Merge branch 'master' of github.com:elastic/kibana into issue-104094-update-legend-position

This commit is contained in:
Chris Cowan 2021-10-25 09:50:23 -06:00
commit b942c9781d
1094 changed files with 24554 additions and 10814 deletions

View file

@ -2,7 +2,7 @@
"upstream": "elastic/kibana",
"targetBranchChoices": [
{ "name": "master", "checked": true },
{ "name": "7.16", "checked": true },
"7.16",
"7.15",
"7.14",
"7.13",

View file

@ -17,12 +17,6 @@ const inputs = [
default: 0,
required: true,
},
{
key: 'ftsr-concurrency',
text: 'Max concurrency per step',
default: 20,
required: true,
},
];
for (let i = 1; i <= OSS_CI_GROUPS; i++) {
@ -36,7 +30,7 @@ for (let i = 1; i <= XPACK_CI_GROUPS; i++) {
const pipeline = {
steps: [
{
input: 'Number of Runs',
input: 'Number of Runs - Click Me',
fields: inputs,
},
{

View file

@ -9,8 +9,10 @@ const overrideCount = parseInt(
execSync(`buildkite-agent meta-data get 'ftsr-override-count'`).toString().trim()
);
const concurrency =
parseInt(execSync(`buildkite-agent meta-data get 'ftsr-concurrency'`).toString().trim()) || 20;
const concurrency = 25;
const initialJobs = 3;
let totalJobs = initialJobs;
const testSuites = [];
for (const key of keys) {
@ -21,12 +23,25 @@ for (const key of keys) {
const value =
overrideCount || execSync(`buildkite-agent meta-data get '${key}'`).toString().trim();
const count = value === '' ? defaultCount : parseInt(value);
totalJobs += count;
testSuites.push({
key: key.replace('ftsr-suite/', ''),
count: value === '' ? defaultCount : parseInt(value),
count: count,
});
}
if (totalJobs > 500) {
console.error('+++ Too many tests');
console.error(
`Buildkite builds can only contain 500 steps in total. Found ${totalJobs} in total. Make sure your test runs are less than ${
500 - initialJobs
}`
);
process.exit(1);
}
const steps = [];
const pipeline = {
env: {
@ -46,7 +61,7 @@ steps.push({
for (const testSuite of testSuites) {
const TEST_SUITE = testSuite.key;
const RUN_COUNT = testSuite.count;
const UUID = TEST_SUITE + process.env.UUID;
const UUID = process.env.UUID;
const JOB_PARTS = TEST_SUITE.split('/');
const IS_XPACK = JOB_PARTS[0] === 'xpack';
@ -65,6 +80,7 @@ for (const testSuite of testSuites) {
parallelism: RUN_COUNT,
concurrency: concurrency,
concurrency_group: UUID,
concurrency_method: 'eager',
});
} else {
steps.push({
@ -75,6 +91,7 @@ for (const testSuite of testSuites) {
parallelism: RUN_COUNT,
concurrency: concurrency,
concurrency_group: UUID,
concurrency_method: 'eager',
});
}
}

View file

@ -15,7 +15,8 @@
"build_on_comment": true,
"trigger_comment_regex": "^(?:(?:buildkite\\W+)?(?:build|test)\\W+(?:this|it))",
"always_trigger_comment_regex": "^(?:(?:buildkite\\W+)?(?:build|test)\\W+(?:this|it))",
"labels": ["buildkite-ci"]
"skip_ci_labels": ["skip-ci", "jenkins-ci"],
"skip_target_branches": ["6.8"]
}
]
}

View file

@ -8,4 +8,4 @@ if [[ "$(which docker)" != "" && "$(command uname -m)" != "aarch64" ]]; then
fi
./.ci/packer_cache_for_branch.sh master
./.ci/packer_cache_for_branch.sh 7.x
./.ci/packer_cache_for_branch.sh 7.16

View file

@ -23,16 +23,6 @@ However, if APM Server is slow to respond, is offline, reports an error, etc.,
APM agents will use local defaults until they're able to update the configuration.
For this reason, it is still essential to set custom default configurations locally in each of your agents.
[float]
==== APM Server setup
This feature requires {apm-server-ref}/setup-kibana-endpoint.html[Kibana endpoint configuration] in APM Server.
In addition, if an APM agent is using {apm-server-ref}/configuration-anonymous.html[anonymous authentication] to communicate with the APM Server,
the agent's service name must be included in the `apm-server.auth.anonymous.allow_service` list.
APM Server acts as a proxy between the agents and Kibana.
Kibana communicates any changed settings to APM Server so that your agents only need to poll APM Server to determine which settings have changed.
[float]
==== Supported configurations

View file

@ -563,9 +563,7 @@ More information on Kibana's API is available in <<api,REST API>>.
=== RUM source map API
IMPORTANT: This endpoint is only compatible with the
{apm-server-ref}/apm-integration.html[APM integration for Elastic Agent].
Users with a standalone APM Server should instead use the APM Server
{apm-server-ref}/sourcemap-api.html[source map upload API].
{apm-guide-ref}/index.html[APM integration for Elastic Agent].
A source map allows minified files to be mapped back to original source code --
allowing you to maintain the speed advantage of minified code,

View file

@ -56,8 +56,8 @@ To create an APM reader user:
include::./tab-widgets/apm-app-reader/widget.asciidoc[]
--
+
TIP: Using the {apm-server-ref-v}/apm-integration.html[APM integration for Elastic Agent]?
Add the privileges under the **Data streams** tab.
TIP: Using the deprecated APM Server binaries?
Add the privileges under the **Classic APM indices** tab above.
. Assign the `read-apm` role created in the previous step, and the following built-in roles to
any APM reader users:
@ -84,8 +84,8 @@ In some instances, you may wish to restrict certain Kibana apps that a user has
include::./tab-widgets/apm-app-reader/widget.asciidoc[]
--
+
TIP: Using the {apm-server-ref-v}/apm-integration.html[APM integration for Elastic Agent]?
Add the privileges under the **Data streams** tab.
TIP: Using the deprecated APM Server binaries?
Add the privileges under the **Classic APM indices** tab above.
. Assign feature privileges to any Kibana feature that the user needs access to.
Here are two examples:
@ -184,8 +184,8 @@ Central configuration users need to be able to view, create, update, and delete
include::./tab-widgets/central-config-users/widget.asciidoc[]
--
+
TIP: Using the {apm-server-ref-v}/apm-integration.html[APM integration for Elastic Agent]?
Add the privileges under the **Data streams** tab.
TIP: Using the deprecated APM Server binaries?
Add the privileges under the **Classic APM indices** tab above.
. Assign the `central-config-manager` role created in the previous step,
and the following Kibana feature privileges to anyone who needs to manage central configurations:
@ -211,8 +211,8 @@ but not create, update, or delete them.
include::./tab-widgets/central-config-users/widget.asciidoc[]
--
+
TIP: Using the {apm-server-ref-v}/apm-integration.html[APM integration for Elastic Agent]?
Add the privileges under the **Data streams** tab.
TIP: Using the deprecated APM Server binaries?
Add the privileges under the **Classic APM indices** tab above.
. Assign the `central-config-reader` role created in the previous step,
and the following Kibana feature privileges to anyone who needs to read central configurations:

View file

@ -2,7 +2,7 @@
[[errors]]
=== Errors
TIP: {apm-overview-ref-v}/errors.html[Errors] are groups of exceptions with a similar exception or log message.
TIP: {apm-guide-ref}/data-model-errors.html[Errors] are groups of exceptions with a similar exception or log message.
The *Errors* overview provides a high-level view of the exceptions that APM agents catch,
or that users manually report with APM agent APIs.

View file

@ -41,7 +41,7 @@ Notice something awry? Select a service or trace and dive deeper with:
* <<metrics>>
TIP: Want to learn more about the Elastic APM ecosystem?
See the {apm-get-started-ref}/overview.html[APM Overview].
See the {apm-guide-ref}/apm-overview.html[APM Overview].
include::services.asciidoc[]

View file

@ -41,7 +41,7 @@ We currently surface two types of service maps:
=== How do service maps work?
Service maps rely on distributed traces to draw connections between services.
As {apm-overview-ref-v}/distributed-tracing.html[distributed tracing] is enabled out-of-the-box for supported technologies, so are service maps.
As {apm-guide-ref}/apm-distributed-tracing.html[distributed tracing] is enabled out-of-the-box for supported technologies, so are service maps.
However, if a service isn't instrumented,
or a `traceparent` header isn't being propagated to it,
distributed tracing will not work, and the connection will not be drawn on the map.

View file

@ -16,7 +16,7 @@ You also get a stack trace, which shows the SQL query in your code.
Finally, APM knows which files are your code and which are just modules or libraries that you've installed.
These library frames will be minimized by default in order to show you the most relevant stack trace.
TIP: A {apm-overview-ref-v}/transaction-spans.html[span] is the duration of a single event.
TIP: A {apm-guide-ref}/data-model-spans.html[span] is the duration of a single event.
Spans are automatically captured by APM agents, and you can also define custom spans.
Each span has a type and is defined by a different color in the timeline/waterfall visualization.

View file

@ -2,28 +2,18 @@
<div class="tabs" data-tab-group="apm-app-reader">
<div role="tablist" aria-label="APM app reader">
<button role="tab"
aria-selected="true"
aria-selected="true"
aria-controls="data-streams-tab"
id="data-streams"
tabindex="-1">
Data streams
</button>
<button role="tab"
aria-selected="false"
aria-controls="classic-indices-tab"
id="classic-indices">
Classic APM indices
</button>
<button role="tab"
aria-selected="false"
aria-controls="data-streams-tab"
id="data-streams"
tabindex="-1">
Data streams
</button>
</div>
<div tabindex="0"
role="tabpanel"
id="classic-indices-tab"
aria-labelledby="classic-indices">
++++
include::content.asciidoc[tag=classic-indices]
++++
</div>
<div tabindex="0"
role="tabpanel"
@ -34,6 +24,16 @@ include::content.asciidoc[tag=classic-indices]
include::content.asciidoc[tag=data-streams]
++++
</div>
<div tabindex="0"
role="tabpanel"
id="classic-indices-tab"
aria-labelledby="classic-indices">
++++
include::content.asciidoc[tag=classic-indices]
++++
</div>
</div>

View file

@ -2,28 +2,18 @@
<div class="tabs" data-tab-group="central-config-manager">
<div role="tablist" aria-label="Central config manager">
<button role="tab"
aria-selected="true"
aria-selected="true"
aria-controls="data-streams-tab"
id="data-streams"
tabindex="-1">
Data streams
</button>
<button role="tab"
aria-selected="false"
aria-controls="classic-indices-tab"
id="classic-indices">
Classic APM indices
</button>
<button role="tab"
aria-selected="false"
aria-controls="data-streams-tab"
id="data-streams"
tabindex="-1">
Data streams
</button>
</div>
<div tabindex="0"
role="tabpanel"
id="classic-indices-tab"
aria-labelledby="classic-indices">
++++
include::content.asciidoc[tag=classic-indices]
++++
</div>
<div tabindex="0"
role="tabpanel"
@ -34,6 +24,16 @@ include::content.asciidoc[tag=classic-indices]
include::content.asciidoc[tag=data-streams]
++++
</div>
<div tabindex="0"
role="tabpanel"
id="classic-indices-tab"
aria-labelledby="classic-indices">
++++
include::content.asciidoc[tag=classic-indices]
++++
</div>
</div>

View file

@ -2,7 +2,7 @@
[[transactions]]
=== Transactions
TIP: A {apm-overview-ref-v}/transactions.html[transaction] describes an event captured by an Elastic APM agent instrumenting a service.
TIP: A {apm-guide-ref}/data-model-transactions.html[transaction] describes an event captured by an Elastic APM agent instrumenting a service.
APM agents automatically collect performance metrics on HTTP requests, database queries, and much more.
[role="screenshot"]

View file

@ -12,7 +12,7 @@ https://github.com/elastic/kibana/pulls[pull request] with your proposed changes
If your issue is potentially related to other components of the APM ecosystem,
don't forget to check our other troubleshooting guides or discussion forum:
* {apm-server-ref}/troubleshooting.html[APM Server troubleshooting]
* {apm-guide-ref}/troubleshoot-apm.html[APM Server troubleshooting]
* {apm-dotnet-ref}/troubleshooting.html[.NET agent troubleshooting]
* {apm-go-ref}/troubleshooting.html[Go agent troubleshooting]
* {apm-ios-ref}/troubleshooting.html[iOS agent troubleshooting]
@ -53,7 +53,7 @@ By default, this index template is created by APM Server on startup.
However, this only happens if `setup.template.enabled` is `true` in `apm-server.yml`.
You can create the index template manually by running `apm-server setup`.
Take note that index templates *cannot* be applied retroactively -- they are only applied at index creation time.
More information is available in {apm-server-ref}/apm-server-configuration.html[Set up and configure].
More information is available in {apm-guide-ref}/apm-server-configuration.html[Set up and configure].
You can check for the existence of an APM index template using the
{ref}/indices-get-template.html[Get index template API].
@ -68,12 +68,12 @@ GET /_template/apm-{version}
*Using Logstash, Kafka, etc.*
If you're not outputting data directly from APM Server to Elasticsearch (perhaps you're using Logstash or Kafka),
then the index template will not be set up automatically. Instead, you'll need to
{apm-server-ref}/apm-server-template.html[load the template manually].
{apm-guide-ref}/apm-server-template.html[load the template manually].
*Using a custom index names*
This problem can also occur if you've customized the index name that you write APM data to.
If you change the default, you must also configure the `setup.template.name` and `setup.template.pattern` options.
See {apm-server-ref}/configuration-template.html[Load the Elasticsearch index template].
See {apm-guide-ref}/configuration-template.html[Load the Elasticsearch index template].
If the Elasticsearch index template has already been successfully loaded to the index,
you can customize the indices that the APM app uses to display data.
Navigate to *APM* > *Settings* > *Indices*, and change all `xpack.apm.indices.*` values to
@ -118,8 +118,8 @@ Instead, we should strip away the unique information and group our transactions
In this case, that means naming all blog transactions, `/blog`, and all documentation transactions, `/guide`.
If you feel like you'd be losing valuable information by following this naming convention, don't fret!
You can always add additional metadata to your transactions using {apm-overview-ref-v}/metadata.html#labels-fields[labels] (indexed) or
{apm-overview-ref-v}/metadata.html#custom-fields[custom context] (non-indexed).
You can always add additional metadata to your transactions using {apm-guide-ref-v}/metadata.html#labels-fields[labels] (indexed) or
{apm-guide-ref-v}/metadata.html#custom-fields[custom context] (non-indexed).
After ensuring you've correctly named your transactions,
you might still see an error in the APM app related to too many transaction names.
@ -182,10 +182,10 @@ Selecting the `apm-*` index pattern shows a listing of every field defined in th
*Ensure a field is searchable*
There are two things you can do to if you'd like to ensure a field is searchable:
1. Index your additional data as {apm-overview-ref-v}/metadata.html[labels] instead.
1. Index your additional data as {apm-guide-ref}/metadata.html[labels] instead.
These are dynamic by default, which means they will be indexed and become searchable and aggregatable.
2. Use the {apm-server-ref}/configuration-template.html[`append_fields`] feature. As an example,
2. Use the {apm-guide-ref}/configuration-template.html[`append_fields`] feature. As an example,
adding the following to `apm-server.yml` will enable dynamic indexing for `http.request.cookies`:
[source,yml]

View file

@ -37,6 +37,7 @@ Use the <<action-settings, Action configuration settings>> to customize connecto
actionTypeId: .servicenow-sir
config:
apiUrl: https://example.service-now.com/
isLegacy: false
secrets:
username: testuser
password: passwordkeystorevalue
@ -45,6 +46,9 @@ Use the <<action-settings, Action configuration settings>> to customize connecto
Config defines information for the connector type.
`apiUrl`:: An address that corresponds to *URL*.
`isLegacy`:: A boolean that indicates if the connector should use the Table API (legacy) or the Import Set API.
Note: If `isLegacy` is set to false the Elastic application should be installed in ServiceNow.
Secrets defines sensitive information for the connector type.

View file

@ -37,6 +37,7 @@ Use the <<action-settings, Action configuration settings>> to customize connecto
actionTypeId: .servicenow
config:
apiUrl: https://example.service-now.com/
isLegacy: false
secrets:
username: testuser
password: passwordkeystorevalue
@ -45,6 +46,9 @@ Use the <<action-settings, Action configuration settings>> to customize connecto
Config defines information for the connector type.
`apiUrl`:: An address that corresponds to *URL*.
`isLegacy`:: A boolean that indicates if the connector should use the Table API (legacy) or the Import Set API.
Note: If `isLegacy` is set to false the Elastic application should be installed in ServiceNow.
Secrets defines sensitive information for the connector type.

View file

@ -75,7 +75,7 @@ Changing these settings may disable features of the APM App.
| `xpack.apm.searchAggregatedTransactions` {ess-icon}
| experimental[] Enables Transaction histogram metrics. Defaults to `never` and aggregated transactions are not used. When set to `auto`, the UI will use metric indices over transaction indices for transactions if aggregated transactions are found. When set to `always`, additional configuration in APM Server is required.
See {apm-server-ref-v}/transaction-metrics.html[Configure transaction metrics] for more information.
See {apm-guide-ref}/transaction-metrics.html[Configure transaction metrics] for more information.
| `xpack.apm.metricsInterval` {ess-icon}
| Sets a `fixed_interval` for date histograms in metrics aggregations. Defaults to `30`.
@ -84,22 +84,22 @@ Changing these settings may disable features of the APM App.
| Set to `false` to disable cloud APM migrations. Defaults to `true`.
| `xpack.apm.indices.error` {ess-icon}
| Matcher for all {apm-server-ref}/error-indices.html[error indices]. Defaults to `logs-apm*,apm-*`.
| Matcher for all error indices. Defaults to `logs-apm*,apm-*`.
| `xpack.apm.indices.onboarding` {ess-icon}
| Matcher for all onboarding indices. Defaults to `apm-*`.
| `xpack.apm.indices.span` {ess-icon}
| Matcher for all {apm-server-ref}/span-indices.html[span indices]. Defaults to `traces-apm*,apm-*`.
| Matcher for all span indices. Defaults to `traces-apm*,apm-*`.
| `xpack.apm.indices.transaction` {ess-icon}
| Matcher for all {apm-server-ref}/transaction-indices.html[transaction indices]. Defaults to `traces-apm*,apm-*`.
| Matcher for all transaction indices. Defaults to `traces-apm*,apm-*`.
| `xpack.apm.indices.metric` {ess-icon}
| Matcher for all {apm-server-ref}/metricset-indices.html[metrics indices]. Defaults to `metrics-apm*,apm-*`.
| Matcher for all metrics indices. Defaults to `metrics-apm*,apm-*`.
| `xpack.apm.indices.sourcemap` {ess-icon}
| Matcher for all {apm-server-ref}/sourcemap-indices.html[source map indices]. Defaults to `apm-*`.
| Matcher for all source map indices. Defaults to `apm-*`.
|===

View file

@ -13,7 +13,7 @@ This guide introduces you to three of {kib}'s security features: spaces, roles,
Do you have multiple teams or tenants using {kib}? Do you want a “playground” to experiment with new visualizations or alerts? If so, then <<xpack-spaces,{kib} Spaces>> can help.
Think of a space as another instance of {kib}. A space allows you to organize your <<dashboard, dashboards>>, <<alerting-getting-started, alerts>>, <<xpack-ml, machine learning jobs>>, and much more into their own categories. For example, you might have a Marketing space for your marketeers to track the results of their campaigns, and an Engineering space for your developers to {apm-get-started-ref}/overview.html[monitor application performance].
Think of a space as another instance of {kib}. A space allows you to organize your <<dashboard, dashboards>>, <<alerting-getting-started, alerts>>, <<xpack-ml, machine learning jobs>>, and much more into their own categories. For example, you might have a Marketing space for your marketeers to track the results of their campaigns, and an Engineering space for your developers to {apm-guide-ref}/apm-overview.html[monitor application performance].
The assets you create in one space are isolated from other spaces, so when you enter a space, you only see the assets that belong to that space.

View file

@ -98,7 +98,7 @@
"@elastic/apm-generator": "link:bazel-bin/packages/elastic-apm-generator",
"@elastic/apm-rum": "^5.9.1",
"@elastic/apm-rum-react": "^1.3.1",
"@elastic/charts": "37.0.0",
"@elastic/charts": "38.0.1",
"@elastic/datemath": "link:bazel-bin/packages/elastic-datemath",
"@elastic/elasticsearch": "npm:@elastic/elasticsearch-canary@^8.0.0-canary.21",
"@elastic/ems-client": "7.16.0",
@ -197,7 +197,7 @@
"chroma-js": "^1.4.1",
"classnames": "2.2.6",
"color": "1.0.3",
"commander": "^3.0.2",
"commander": "^4.1.1",
"compare-versions": "3.5.1",
"concat-stream": "1.6.2",
"constate": "^1.3.2",
@ -248,6 +248,7 @@
"idx": "^2.5.6",
"immer": "^9.0.6",
"inline-style": "^2.0.0",
"inquirer": "^7.3.3",
"intl": "^1.2.5",
"intl-format-cache": "^2.1.0",
"intl-messageformat": "^2.2.0",
@ -288,7 +289,6 @@
"moment-timezone": "^0.5.27",
"monaco-editor": "^0.22.3",
"mustache": "^2.3.2",
"ngreact": "^0.5.1",
"nock": "12.0.3",
"node-fetch": "^2.6.1",
"node-forge": "^0.10.0",
@ -297,6 +297,7 @@
"object-hash": "^1.3.1",
"object-path-immutable": "^3.1.1",
"opn": "^5.5.0",
"ora": "^4.0.4",
"p-limit": "^3.0.1",
"p-map": "^4.0.0",
"p-retry": "^4.2.0",
@ -309,7 +310,7 @@
"prop-types": "^15.7.2",
"proxy-from-env": "1.0.0",
"puid": "1.0.7",
"puppeteer": "^8.0.0",
"puppeteer": "^10.2.0",
"query-string": "^6.13.2",
"random-word-slugs": "^0.0.5",
"raw-loader": "^3.1.0",
@ -720,7 +721,6 @@
"html": "1.0.0",
"html-loader": "^0.5.5",
"http-proxy": "^1.18.1",
"inquirer": "^7.3.3",
"is-glob": "^4.0.1",
"is-path-inside": "^3.0.2",
"istanbul-instrumenter-loader": "^3.0.1",
@ -743,7 +743,7 @@
"jsondiffpatch": "0.4.1",
"license-checker": "^16.0.0",
"listr": "^0.14.1",
"lmdb-store": "^1.6.8",
"lmdb-store": "^1.6.11",
"marge": "^1.0.1",
"micromatch": "3.1.10",
"minimist": "^1.2.5",
@ -762,9 +762,9 @@
"null-loader": "^3.0.0",
"nyc": "^15.0.1",
"oboe": "^2.1.4",
"ora": "^4.0.4",
"parse-link-header": "^1.0.1",
"pbf": "3.2.1",
"pdf-to-img": "^1.1.1",
"pirates": "^4.0.1",
"pixelmatch": "^5.1.0",
"postcss": "^7.0.32",

View file

@ -25,6 +25,7 @@ NPM_MODULE_EXTRA_FILES = [
]
RUNTIME_DEPS = [
"//packages/elastic-datemath",
"@npm//@elastic/elasticsearch",
"@npm//lodash",
"@npm//moment",
@ -36,6 +37,7 @@ RUNTIME_DEPS = [
]
TYPES_DEPS = [
"//packages/elastic-datemath",
"@npm//@elastic/elasticsearch",
"@npm//moment",
"@npm//p-limit",

View file

@ -11,7 +11,7 @@ This section assumes that you've installed Kibana's dependencies by running `yar
This library can currently be used in two ways:
- Imported as a Node.js module, for instance to be used in Kibana's functional test suite.
- With a command line interface, to index data based on some example scenarios.
- With a command line interface, to index data based on a specified scenario.
### Using the Node.js module
@ -32,7 +32,7 @@ const instance = service('synth-go', 'production', 'go')
.instance('instance-a');
const from = new Date('2021-01-01T12:00:00.000Z').getTime();
const to = new Date('2021-01-01T12:00:00.000Z').getTime() - 1;
const to = new Date('2021-01-01T12:00:00.000Z').getTime();
const traceEvents = timerange(from, to)
.interval('1m')
@ -82,12 +82,26 @@ const esEvents = toElasticsearchOutput([
### CLI
Via the CLI, you can upload examples. The supported examples are listed in `src/lib/es.ts`. A `--target` option that specifies the Elasticsearch URL should be defined when running the `example` command. Here's an example:
Via the CLI, you can upload scenarios, either using a fixed time range or continuously generating data. Some examples are available in in `src/scripts/examples`. Here's an example for live data:
`$ node packages/elastic-apm-generator/src/scripts/es.js example simple-trace --target=http://admin:changeme@localhost:9200`
`$ node packages/elastic-apm-generator/src/scripts/run packages/elastic-apm-generator/src/examples/01_simple_trace.ts --target=http://admin:changeme@localhost:9200 --live`
For a fixed time window:
`$ node packages/elastic-apm-generator/src/scripts/run packages/elastic-apm-generator/src/examples/01_simple_trace.ts --target=http://admin:changeme@localhost:9200 --from=now-24h --to=now`
The script will try to automatically find bootstrapped APM indices. __If these indices do not exist, the script will exit with an error. It will not bootstrap the indices itself.__
The following options are supported:
- `to`: the end of the time range, in ISO format. By default, the current time will be used.
- `from`: the start of the time range, in ISO format. By default, `to` minus 15 minutes will be used.
- `apm-server-version`: the version used in the index names bootstrapped by APM Server, e.g. `7.16.0`. __If these indices do not exist, the script will exit with an error. It will not bootstrap the indices itself.__
| Option | Description | Default |
| -------------- | ------------------------------------------------------- | ------------ |
| `--from` | The start of the time window. | `now - 15m` |
| `--to` | The end of the time window. | `now` |
| `--live` | Continously ingest data | `false` |
| `--bucketSize` | Size of bucket for which to generate data. | `15m` |
| `--clean` | Clean APM indices before indexing new data. | `false` |
| `--interval` | The interval at which to index data. | `10s` |
| `--logLevel` | Log level. | `info` |
| `--lookback` | The lookback window for which data should be generated. | `15m` |
| `--target` | Elasticsearch target, including username/password. | **Required** |
| `--workers` | Amount of simultaneously connected ES clients. | `1` |

View file

@ -0,0 +1,13 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0 and the Server Side Public License, v 1; you may not use this file except
* in compliance with, at your election, the Elastic License 2.0 or the Server
* Side Public License, v 1.
*/
module.exports = {
rules: {
'import/no-default-export': 'off',
},
};

View file

@ -21,7 +21,7 @@ export class Interval {
throw new Error('Failed to parse interval');
}
const timestamps: number[] = [];
while (now <= this.to) {
while (now < this.to) {
timestamps.push(...new Array<number>(rate).fill(now));
now = moment(now)
.add(Number(args[1]), args[2] as any)

View file

@ -10,7 +10,25 @@ import { set } from 'lodash';
import { getObserverDefaults } from '../..';
import { Fields } from '../entity';
export function toElasticsearchOutput(events: Fields[], versionOverride?: string) {
export interface ElasticsearchOutput {
_index: string;
_source: unknown;
}
export interface ElasticsearchOutputWriteTargets {
transaction: string;
span: string;
error: string;
metric: string;
}
export function toElasticsearchOutput({
events,
writeTargets,
}: {
events: Fields[];
writeTargets: ElasticsearchOutputWriteTargets;
}): ElasticsearchOutput[] {
return events.map((event) => {
const values = {
...event,
@ -29,7 +47,7 @@ export function toElasticsearchOutput(events: Fields[], versionOverride?: string
set(document, key, val);
}
return {
_index: `apm-${versionOverride || values['observer.version']}-${values['processor.event']}`,
_index: writeTargets[event['processor.event'] as keyof ElasticsearchOutputWriteTargets],
_source: document,
};
});

View file

@ -1,113 +0,0 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0 and the Server Side Public License, v 1; you may not use this file except
* in compliance with, at your election, the Elastic License 2.0 or the Server
* Side Public License, v 1.
*/
import { inspect } from 'util';
import { Client } from '@elastic/elasticsearch';
import { chunk } from 'lodash';
import pLimit from 'p-limit';
import yargs from 'yargs/yargs';
import { toElasticsearchOutput } from '..';
import { simpleTrace } from './examples/01_simple_trace';
yargs(process.argv.slice(2))
.command(
'example',
'run an example scenario',
(y) => {
return y
.positional('scenario', {
describe: 'scenario to run',
choices: ['simple-trace'],
demandOption: true,
})
.option('target', {
describe: 'elasticsearch target, including username/password',
})
.option('from', { describe: 'start of timerange' })
.option('to', { describe: 'end of timerange' })
.option('workers', {
default: 1,
describe: 'number of concurrently connected ES clients',
})
.option('apm-server-version', {
describe: 'APM Server version override',
})
.demandOption('target');
},
(argv) => {
let events: any[] = [];
const toDateString = (argv.to as string | undefined) || new Date().toISOString();
const fromDateString =
(argv.from as string | undefined) ||
new Date(new Date(toDateString).getTime() - 15 * 60 * 1000).toISOString();
const to = new Date(toDateString).getTime();
const from = new Date(fromDateString).getTime();
switch (argv._[1]) {
case 'simple-trace':
events = simpleTrace(from, to);
break;
}
const docs = toElasticsearchOutput(events, argv['apm-server-version'] as string);
const client = new Client({
node: argv.target as string,
});
const fn = pLimit(argv.workers);
const batches = chunk(docs, 1000);
// eslint-disable-next-line no-console
console.log(
'Uploading',
docs.length,
'docs in',
batches.length,
'batches',
'from',
fromDateString,
'to',
toDateString
);
Promise.all(
batches.map((batch) =>
fn(() => {
return client.bulk({
require_alias: true,
body: batch.flatMap((doc) => {
return [{ index: { _index: doc._index } }, doc._source];
}),
});
})
)
)
.then((results) => {
const errors = results
.flatMap((result) => result.body.items)
.filter((item) => !!item.index?.error)
.map((item) => item.index?.error);
if (errors.length) {
// eslint-disable-next-line no-console
console.error(inspect(errors.slice(0, 10), { depth: null }));
throw new Error('Failed to upload some items');
}
process.exit();
})
.catch((err) => {
// eslint-disable-next-line no-console
console.error(err);
process.exit(1);
});
}
)
.parse();

View file

@ -9,12 +9,12 @@
import { service, timerange, getTransactionMetrics, getSpanDestinationMetrics } from '../..';
import { getBreakdownMetrics } from '../../lib/utils/get_breakdown_metrics';
export function simpleTrace(from: number, to: number) {
export default function ({ from, to }: { from: number; to: number }) {
const instance = service('opbeans-go', 'production', 'go').instance('instance');
const range = timerange(from, to);
const transactionName = '240rpm/60% 1000ms';
const transactionName = '240rpm/75% 1000ms';
const successfulTraceEvents = range
.interval('1s')

View file

@ -12,4 +12,4 @@ require('@babel/register')({
presets: [['@babel/preset-env', { targets: { node: 'current' } }], '@babel/preset-typescript'],
});
require('./es.ts');
require('./run.ts');

View file

@ -0,0 +1,117 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0 and the Server Side Public License, v 1; you may not use this file except
* in compliance with, at your election, the Elastic License 2.0 or the Server
* Side Public License, v 1.
*/
import datemath from '@elastic/datemath';
import yargs from 'yargs/yargs';
import { cleanWriteTargets } from './utils/clean_write_targets';
import {
bucketSizeOption,
cleanOption,
fileOption,
intervalOption,
targetOption,
workerOption,
logLevelOption,
} from './utils/common_options';
import { intervalToMs } from './utils/interval_to_ms';
import { getCommonResources } from './utils/get_common_resources';
import { startHistoricalDataUpload } from './utils/start_historical_data_upload';
import { startLiveDataUpload } from './utils/start_live_data_upload';
yargs(process.argv.slice(2))
.command(
'*',
'Generate data and index into Elasticsearch',
(y) => {
return y
.positional('file', fileOption)
.option('bucketSize', bucketSizeOption)
.option('workers', workerOption)
.option('interval', intervalOption)
.option('clean', cleanOption)
.option('target', targetOption)
.option('logLevel', logLevelOption)
.option('from', {
description: 'The start of the time window',
})
.option('to', {
description: 'The end of the time window',
})
.option('live', {
description: 'Generate and index data continuously',
boolean: true,
})
.conflicts('to', 'live');
},
async (argv) => {
const {
scenario,
intervalInMs,
bucketSizeInMs,
target,
workers,
clean,
logger,
writeTargets,
client,
} = await getCommonResources(argv);
if (clean) {
await cleanWriteTargets({ writeTargets, client, logger });
}
const to = datemath.parse(String(argv.to ?? 'now'))!.valueOf();
const from = argv.from
? datemath.parse(String(argv.from))!.valueOf()
: to - intervalToMs('15m');
const live = argv.live;
logger.info(
`Starting data generation\n: ${JSON.stringify(
{
intervalInMs,
bucketSizeInMs,
workers,
target,
writeTargets,
from: new Date(from).toISOString(),
to: new Date(to).toISOString(),
live,
},
null,
2
)}`
);
startHistoricalDataUpload({
from,
to,
scenario,
intervalInMs,
bucketSizeInMs,
client,
workers,
writeTargets,
logger,
});
if (live) {
startLiveDataUpload({
bucketSizeInMs,
client,
intervalInMs,
logger,
scenario,
start: to,
workers,
writeTargets,
});
}
}
)
.parse();

View file

@ -0,0 +1,63 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0 and the Server Side Public License, v 1; you may not use this file except
* in compliance with, at your election, the Elastic License 2.0 or the Server
* Side Public License, v 1.
*/
import { Client } from '@elastic/elasticsearch';
import { ElasticsearchOutputWriteTargets } from '../../lib/output/to_elasticsearch_output';
import { Logger } from './logger';
export async function cleanWriteTargets({
writeTargets,
client,
logger,
}: {
writeTargets: ElasticsearchOutputWriteTargets;
client: Client;
logger: Logger;
}) {
const targets = Object.values(writeTargets);
logger.info(`Cleaning indices: ${targets.join(', ')}`);
const response = await client.deleteByQuery({
index: targets,
allow_no_indices: true,
conflicts: 'proceed',
body: {
query: {
match_all: {},
},
},
wait_for_completion: false,
});
const task = response.body.task;
if (task) {
await new Promise<void>((resolve, reject) => {
const pollForTaskCompletion = async () => {
const taskResponse = await client.tasks.get({
task_id: String(task),
});
logger.debug(
`Polled for task:\n${JSON.stringify(taskResponse.body, ['completed', 'error'], 2)}`
);
if (taskResponse.body.completed) {
resolve();
} else if (taskResponse.body.error) {
reject(taskResponse.body.error);
} else {
setTimeout(pollForTaskCompletion, 2500);
}
};
pollForTaskCompletion();
});
}
}

View file

@ -0,0 +1,53 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0 and the Server Side Public License, v 1; you may not use this file except
* in compliance with, at your election, the Elastic License 2.0 or the Server
* Side Public License, v 1.
*/
const fileOption = {
describe: 'File that contains the trace scenario',
demandOption: true,
};
const intervalOption = {
describe: 'The interval at which to index data',
default: '10s',
};
const targetOption = {
describe: 'Elasticsearch target, including username/password',
demandOption: true,
};
const bucketSizeOption = {
describe: 'Size of bucket for which to generate data',
default: '15m',
};
const workerOption = {
describe: 'Amount of simultaneously connected ES clients',
default: 1,
};
const cleanOption = {
describe: 'Clean APM indices before indexing new data',
default: false,
boolean: true as const,
};
const logLevelOption = {
describe: 'Log level',
default: 'info',
};
export {
fileOption,
intervalOption,
targetOption,
bucketSizeOption,
workerOption,
cleanOption,
logLevelOption,
};

View file

@ -0,0 +1,80 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0 and the Server Side Public License, v 1; you may not use this file except
* in compliance with, at your election, the Elastic License 2.0 or the Server
* Side Public License, v 1.
*/
import { Client } from '@elastic/elasticsearch';
import { getScenario } from './get_scenario';
import { getWriteTargets } from './get_write_targets';
import { intervalToMs } from './interval_to_ms';
import { createLogger, LogLevel } from './logger';
export async function getCommonResources({
file,
interval,
bucketSize,
workers,
target,
clean,
logLevel,
}: {
file: unknown;
interval: unknown;
bucketSize: unknown;
workers: unknown;
target: unknown;
clean: boolean;
logLevel: unknown;
}) {
let parsedLogLevel = LogLevel.info;
switch (logLevel) {
case 'info':
parsedLogLevel = LogLevel.info;
break;
case 'debug':
parsedLogLevel = LogLevel.debug;
break;
case 'quiet':
parsedLogLevel = LogLevel.quiet;
break;
}
const logger = createLogger(parsedLogLevel);
const intervalInMs = intervalToMs(interval);
if (!intervalInMs) {
throw new Error('Invalid interval');
}
const bucketSizeInMs = intervalToMs(bucketSize);
if (!bucketSizeInMs) {
throw new Error('Invalid bucket size');
}
const client = new Client({
node: String(target),
});
const [scenario, writeTargets] = await Promise.all([
getScenario({ file, logger }),
getWriteTargets({ client }),
]);
return {
scenario,
writeTargets,
logger,
client,
intervalInMs,
bucketSizeInMs,
workers: Number(workers),
target: String(target),
clean,
};
}

View file

@ -0,0 +1,25 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0 and the Server Side Public License, v 1; you may not use this file except
* in compliance with, at your election, the Elastic License 2.0 or the Server
* Side Public License, v 1.
*/
import Path from 'path';
import { Fields } from '../../lib/entity';
import { Logger } from './logger';
export type Scenario = (options: { from: number; to: number }) => Fields[];
export function getScenario({ file, logger }: { file: unknown; logger: Logger }) {
const location = Path.join(process.cwd(), String(file));
logger.debug(`Loading scenario from ${location}`);
return import(location).then((m) => {
if (m && m.default) {
return m.default;
}
throw new Error(`Could not find scenario at ${location}`);
}) as Promise<Scenario>;
}

View file

@ -0,0 +1,56 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0 and the Server Side Public License, v 1; you may not use this file except
* in compliance with, at your election, the Elastic License 2.0 or the Server
* Side Public License, v 1.
*/
import { Client } from '@elastic/elasticsearch';
import { ElasticsearchOutputWriteTargets } from '../../lib/output/to_elasticsearch_output';
export async function getWriteTargets({
client,
}: {
client: Client;
}): Promise<ElasticsearchOutputWriteTargets> {
const [indicesResponse, datastreamsResponse] = await Promise.all([
client.indices.getAlias({
index: 'apm-*',
}),
client.indices.getDataStream({
name: '*apm',
}),
]);
function getDataStreamName(filter: string) {
return datastreamsResponse.body.data_streams.find((stream) => stream.name.includes(filter))
?.name;
}
function getAlias(filter: string) {
return Object.keys(indicesResponse.body)
.map((key) => {
return {
key,
writeIndexAlias: Object.entries(indicesResponse.body[key].aliases).find(
([_, alias]) => alias.is_write_index
)?.[0],
};
})
.find(({ key }) => key.includes(filter))?.writeIndexAlias!;
}
const targets = {
transaction: getDataStreamName('traces-apm') || getAlias('-transaction'),
span: getDataStreamName('traces-apm') || getAlias('-span'),
metric: getDataStreamName('metrics-apm') || getAlias('-metric'),
error: getDataStreamName('logs-apm') || getAlias('-error'),
};
if (!targets.transaction || !targets.span || !targets.metric || !targets.error) {
throw new Error('Write targets could not be determined');
}
return targets;
}

View file

@ -0,0 +1,31 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0 and the Server Side Public License, v 1; you may not use this file except
* in compliance with, at your election, the Elastic License 2.0 or the Server
* Side Public License, v 1.
*/
export function intervalToMs(interval: unknown) {
const [, valueAsString, unit] = String(interval).split(/(.*)(s|m|h|d|w)/);
const value = Number(valueAsString);
switch (unit) {
case 's':
return value * 1000;
case 'm':
return value * 1000 * 60;
case 'h':
return value * 1000 * 60 * 60;
case 'd':
return value * 1000 * 60 * 60 * 24;
case 'w':
return value * 1000 * 60 * 60 * 24 * 7;
}
throw new Error('Could not parse interval');
}

View file

@ -0,0 +1,32 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0 and the Server Side Public License, v 1; you may not use this file except
* in compliance with, at your election, the Elastic License 2.0 or the Server
* Side Public License, v 1.
*/
export enum LogLevel {
debug = 0,
info = 1,
quiet = 2,
}
export function createLogger(logLevel: LogLevel) {
return {
debug: (...args: any[]) => {
if (logLevel <= LogLevel.debug) {
// eslint-disable-next-line no-console
console.debug(...args);
}
},
info: (...args: any[]) => {
if (logLevel <= LogLevel.info) {
// eslint-disable-next-line no-console
console.log(...args);
}
},
};
}
export type Logger = ReturnType<typeof createLogger>;

View file

@ -0,0 +1,64 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0 and the Server Side Public License, v 1; you may not use this file except
* in compliance with, at your election, the Elastic License 2.0 or the Server
* Side Public License, v 1.
*/
import { Client } from '@elastic/elasticsearch';
import { ElasticsearchOutputWriteTargets } from '../../lib/output/to_elasticsearch_output';
import { Scenario } from './get_scenario';
import { Logger } from './logger';
import { uploadEvents } from './upload_events';
export async function startHistoricalDataUpload({
from,
to,
scenario,
intervalInMs,
bucketSizeInMs,
client,
workers,
writeTargets,
logger,
}: {
from: number;
to: number;
scenario: Scenario;
intervalInMs: number;
bucketSizeInMs: number;
client: Client;
workers: number;
writeTargets: ElasticsearchOutputWriteTargets;
logger: Logger;
}) {
let requestedUntil: number = from;
function uploadNextBatch() {
const bucketFrom = requestedUntil;
const bucketTo = Math.min(to, bucketFrom + bucketSizeInMs);
const events = scenario({ from: bucketFrom, to: bucketTo });
logger.info(
`Uploading: ${new Date(bucketFrom).toISOString()} to ${new Date(bucketTo).toISOString()}`
);
uploadEvents({
events,
client,
workers,
writeTargets,
logger,
}).then(() => {
if (bucketTo >= to) {
return;
}
uploadNextBatch();
});
requestedUntil = bucketTo;
}
return uploadNextBatch();
}

View file

@ -0,0 +1,75 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0 and the Server Side Public License, v 1; you may not use this file except
* in compliance with, at your election, the Elastic License 2.0 or the Server
* Side Public License, v 1.
*/
import { Client } from '@elastic/elasticsearch';
import { partition } from 'lodash';
import { Fields } from '../../lib/entity';
import { ElasticsearchOutputWriteTargets } from '../../lib/output/to_elasticsearch_output';
import { Scenario } from './get_scenario';
import { Logger } from './logger';
import { uploadEvents } from './upload_events';
export function startLiveDataUpload({
start,
bucketSizeInMs,
intervalInMs,
workers,
writeTargets,
scenario,
client,
logger,
}: {
start: number;
bucketSizeInMs: number;
intervalInMs: number;
workers: number;
writeTargets: ElasticsearchOutputWriteTargets;
scenario: Scenario;
client: Client;
logger: Logger;
}) {
let queuedEvents: Fields[] = [];
let requestedUntil: number = start;
function uploadNextBatch() {
const end = new Date().getTime();
if (end > requestedUntil) {
const bucketFrom = requestedUntil;
const bucketTo = requestedUntil + bucketSizeInMs;
const nextEvents = scenario({ from: bucketFrom, to: bucketTo });
logger.debug(
`Requesting ${new Date(bucketFrom).toISOString()} to ${new Date(
bucketTo
).toISOString()}, events: ${nextEvents.length}`
);
queuedEvents.push(...nextEvents);
requestedUntil = bucketTo;
}
const [eventsToUpload, eventsToRemainInQueue] = partition(
queuedEvents,
(event) => event['@timestamp']! <= end
);
logger.info(`Uploading until ${new Date(end).toISOString()}, events: ${eventsToUpload.length}`);
queuedEvents = eventsToRemainInQueue;
uploadEvents({
events: eventsToUpload,
client,
workers,
writeTargets,
logger,
});
}
setInterval(uploadNextBatch, intervalInMs);
uploadNextBatch();
}

View file

@ -0,0 +1,72 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0 and the Server Side Public License, v 1; you may not use this file except
* in compliance with, at your election, the Elastic License 2.0 or the Server
* Side Public License, v 1.
*/
import { Client } from '@elastic/elasticsearch';
import { chunk } from 'lodash';
import pLimit from 'p-limit';
import { inspect } from 'util';
import { Fields } from '../../lib/entity';
import {
ElasticsearchOutputWriteTargets,
toElasticsearchOutput,
} from '../../lib/output/to_elasticsearch_output';
import { Logger } from './logger';
export function uploadEvents({
events,
client,
workers,
writeTargets,
logger,
}: {
events: Fields[];
client: Client;
workers: number;
writeTargets: ElasticsearchOutputWriteTargets;
logger: Logger;
}) {
const esDocuments = toElasticsearchOutput({ events, writeTargets });
const fn = pLimit(workers);
const batches = chunk(esDocuments, 5000);
logger.debug(`Uploading ${esDocuments.length} in ${batches.length} batches`);
const time = new Date().getTime();
return Promise.all(
batches.map((batch) =>
fn(() => {
return client.bulk({
require_alias: true,
body: batch.flatMap((doc) => {
return [{ index: { _index: doc._index } }, doc._source];
}),
});
})
)
)
.then((results) => {
const errors = results
.flatMap((result) => result.body.items)
.filter((item) => !!item.index?.error)
.map((item) => item.index?.error);
if (errors.length) {
// eslint-disable-next-line no-console
console.error(inspect(errors.slice(0, 10), { depth: null }));
throw new Error('Failed to upload some items');
}
logger.debug(`Uploaded ${events.length} in ${new Date().getTime() - time}ms`);
})
.catch((err) => {
// eslint-disable-next-line no-console
console.error(err);
process.exit(1);
});
}

View file

@ -18,7 +18,7 @@ describe('simple trace', () => {
const range = timerange(
new Date('2021-01-01T00:00:00.000Z').getTime(),
new Date('2021-01-01T00:15:00.000Z').getTime() - 1
new Date('2021-01-01T00:15:00.000Z').getTime()
);
events = range

View file

@ -19,7 +19,7 @@ describe('transaction metrics', () => {
const range = timerange(
new Date('2021-01-01T00:00:00.000Z').getTime(),
new Date('2021-01-01T00:15:00.000Z').getTime() - 1
new Date('2021-01-01T00:15:00.000Z').getTime()
);
events = getTransactionMetrics(

View file

@ -19,7 +19,7 @@ describe('span destination metrics', () => {
const range = timerange(
new Date('2021-01-01T00:00:00.000Z').getTime(),
new Date('2021-01-01T00:15:00.000Z').getTime() - 1
new Date('2021-01-01T00:15:00.000Z').getTime()
);
events = getSpanDestinationMetrics(

View file

@ -26,7 +26,7 @@ describe('breakdown metrics', () => {
const start = new Date('2021-01-01T00:00:00.000Z').getTime();
const range = timerange(start, start + INTERVALS * 30 * 1000 - 1);
const range = timerange(start, start + INTERVALS * 30 * 1000);
events = getBreakdownMetrics([
...range

View file

@ -9,6 +9,13 @@
import { Fields } from '../lib/entity';
import { toElasticsearchOutput } from '../lib/output/to_elasticsearch_output';
const writeTargets = {
transaction: 'apm-8.0.0-transaction',
span: 'apm-8.0.0-span',
metric: 'apm-8.0.0-metric',
error: 'apm-8.0.0-error',
};
describe('output to elasticsearch', () => {
let event: Fields;
@ -21,13 +28,13 @@ describe('output to elasticsearch', () => {
});
it('properly formats @timestamp', () => {
const doc = toElasticsearchOutput([event])[0] as any;
const doc = toElasticsearchOutput({ events: [event], writeTargets })[0] as any;
expect(doc._source['@timestamp']).toEqual('2020-12-31T23:00:00.000Z');
});
it('formats a nested object', () => {
const doc = toElasticsearchOutput([event])[0] as any;
const doc = toElasticsearchOutput({ events: [event], writeTargets })[0] as any;
expect(doc._source.processor).toEqual({
event: 'transaction',

View file

@ -20,21 +20,23 @@ export const MANAGED_CONFIG_KEYS: ManagedConfigKey[] = [
{
key: 'files.watcherExclude',
value: {
['**/.eslintcache']: true,
['**/.es']: true,
['**/.yarn-local-mirror']: true,
['**/.chromium']: true,
['**/packages/kbn-pm/dist/index.js']: true,
['**/.es']: true,
['**/.eslintcache']: true,
['**/.yarn-local-mirror']: true,
['**/*.log']: true,
['**/api_docs']: true,
['**/bazel-*']: true,
['**/node_modules']: true,
['**/packages/kbn-pm/dist/index.js']: true,
['**/target']: true,
['**/*.log']: true,
},
},
{
key: 'search.exclude',
value: {
['**/packages/kbn-pm/dist/index.js']: true,
['**/api_docs']: true,
},
},
{

View file

@ -56,6 +56,8 @@ export type FilterMeta = {
negate?: boolean;
// controlledBy is there to identify who owns the filter
controlledBy?: string;
// allows grouping of filters
group?: string;
// index and type are optional only because when you create a new filter, there are no defaults
index?: string;
isMultiIndex?: boolean;

View file

@ -0,0 +1,189 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0 and the Server Side Public License, v 1; you may not use this file except
* in compliance with, at your election, the Elastic License 2.0 or the Server
* Side Public License, v 1.
*/
import { createParser } from './grammar';
describe('createParser', () => {
let parser: ReturnType<typeof createParser>;
beforeEach(() => {
parser = createParser();
});
test('should create a xjson grammar parser', () => {
expect(createParser()).toBeInstanceOf(Function);
});
test('should return no annotations in case of valid json', () => {
expect(
parser(`
{"menu": {
"id": "file",
"value": "File",
"quotes": "'\\"",
"popup": {
"actions": [
"new",
"open",
"close"
],
"menuitem": [
{"value": "New"},
{"value": "Open"},
{"value": "Close"}
]
}
}}
`)
).toMatchInlineSnapshot(`
Object {
"annotations": Array [],
}
`);
});
test('should support triple quotes', () => {
expect(
parser(`
{"menu": {
"id": """
file
""",
"value": "File"
}}
`)
).toMatchInlineSnapshot(`
Object {
"annotations": Array [],
}
`);
});
test('triple quotes should be correctly closed', () => {
expect(
parser(`
{"menu": {
"id": """"
file
"",
"value": "File"
}}
`)
).toMatchInlineSnapshot(`
Object {
"annotations": Array [
Object {
"at": 36,
"text": "Expected ',' instead of '\\"'",
"type": "error",
},
],
}
`);
});
test('an escaped quote can be appended to the end of triple quotes', () => {
expect(
parser(`
{"menu": {
"id": """
file
\\"""",
"value": "File"
}}
`)
).toMatchInlineSnapshot(`
Object {
"annotations": Array [],
}
`);
});
test('text values should be wrapper into quotes', () => {
expect(
parser(`
{"menu": {
"id": id,
"value": "File"
}}
`)
).toMatchInlineSnapshot(`
Object {
"annotations": Array [
Object {
"at": 36,
"text": "Unexpected 'i'",
"type": "error",
},
],
}
`);
});
test('check for close quotes', () => {
expect(
parser(`
{"menu": {
"id": "id,
"value": "File"
}}
`)
).toMatchInlineSnapshot(`
Object {
"annotations": Array [
Object {
"at": 52,
"text": "Expected ',' instead of 'v'",
"type": "error",
},
],
}
`);
});
test('no duplicate keys', () => {
expect(
parser(`
{"menu": {
"id": "id",
"id": "File"
}}
`)
).toMatchInlineSnapshot(`
Object {
"annotations": Array [
Object {
"at": 53,
"text": "Duplicate key \\"id\\"",
"type": "warning",
},
],
}
`);
});
test('all curly quotes should be closed', () => {
expect(
parser(`
{"menu": {
"id": "id",
"name": "File"
}
`)
).toMatchInlineSnapshot(`
Object {
"annotations": Array [
Object {
"at": 82,
"text": "Expected ',' instead of ''",
"type": "error",
},
],
}
`);
});
});

View file

@ -57,10 +57,6 @@ export const createParser = () => {
text: m,
});
},
reset = function (newAt: number) {
ch = text.charAt(newAt);
at = newAt + 1;
},
next = function (c?: string) {
return (
c && c !== ch && error("Expected '" + c + "' instead of '" + ch + "'"),
@ -69,15 +65,6 @@ export const createParser = () => {
ch
);
},
nextUpTo = function (upTo: any, errorMessage: string) {
let currentAt = at,
i = text.indexOf(upTo, currentAt);
if (i < 0) {
error(errorMessage || "Expected '" + upTo + "'");
}
reset(i + upTo.length);
return text.substring(currentAt, i);
},
peek = function (c: string) {
return text.substr(at, c.length) === c; // nocommit - double check
},
@ -96,37 +83,50 @@ export const createParser = () => {
(string += ch), next();
return (number = +string), isNaN(number) ? (error('Bad number'), void 0) : number;
},
stringLiteral = function () {
let quotes = '"""';
let end = text.indexOf('\\"' + quotes, at + quotes.length);
if (end >= 0) {
quotes = '\\"' + quotes;
} else {
end = text.indexOf(quotes, at + quotes.length);
}
if (end >= 0) {
for (let l = end - at + quotes.length; l > 0; l--) {
next();
}
}
return next();
},
string = function () {
let hex: any,
i: any,
uffff: any,
string = '';
if ('"' === ch) {
if (peek('""')) {
// literal
next('"');
next('"');
return nextUpTo('"""', 'failed to find closing \'"""\'');
} else {
for (; next(); ) {
if ('"' === ch) return next(), string;
if ('\\' === ch)
if ((next(), 'u' === ch)) {
for (
uffff = 0, i = 0;
4 > i && ((hex = parseInt(next(), 16)), isFinite(hex));
i += 1
)
uffff = 16 * uffff + hex;
string += String.fromCharCode(uffff);
} else {
if ('string' != typeof escapee[ch]) break;
string += escapee[ch];
}
else string += ch;
}
for (; next(); ) {
if ('"' === ch) return next(), string;
if ('\\' === ch)
if ((next(), 'u' === ch)) {
for (
uffff = 0, i = 0;
4 > i && ((hex = parseInt(next(), 16)), isFinite(hex));
i += 1
)
uffff = 16 * uffff + hex;
string += String.fromCharCode(uffff);
} else {
if ('string' != typeof escapee[ch]) break;
string += escapee[ch];
}
else string += ch;
}
}
error('Bad string');
},
white = function () {
@ -165,9 +165,9 @@ export const createParser = () => {
((key = string()),
white(),
next(':'),
Object.hasOwnProperty.call(object, key) &&
Object.hasOwnProperty.call(object, key!) &&
warning('Duplicate key "' + key + '"', latchKeyStart),
(object[key] = value()),
(object[key!] = value()),
white(),
'}' === ch)
)
@ -179,6 +179,9 @@ export const createParser = () => {
};
return (
(value = function () {
if (peek('"""')) {
return stringLiteral();
}
switch ((white(), ch)) {
case '{':
return object();

View file

@ -103,6 +103,7 @@ export const lexerRules: monaco.languages.IMonarchLanguage = {
string_literal: [
[/"""/, { token: 'punctuation.end_triple_quote', next: '@pop' }],
[/\\""""/, { token: 'punctuation.end_triple_quote', next: '@pop' }],
[/./, { token: 'multi_string' }],
],
},

File diff suppressed because one or more lines are too long

View file

@ -6,23 +6,30 @@
* Side Public License, v 1.
*/
const Fs = require('fs');
const Path = require('path');
const { REPO_ROOT } = require('@kbn/dev-utils');
const { REPO_ROOT: REPO_ROOT_FOLLOWING_SYMLINKS } = require('@kbn/dev-utils');
const BASE_REPO_ROOT = Path.resolve(
Fs.realpathSync(Path.resolve(REPO_ROOT_FOLLOWING_SYMLINKS, 'package.json')),
'..'
);
const transpileKbnPaths = [
'test',
'x-pack/test',
'examples',
'x-pack/examples',
// TODO: should should probably remove this link back to the source
'x-pack/plugins/task_manager/server/config.ts',
'src/core/utils/default_app_categories.ts',
].map((path) => Path.resolve(BASE_REPO_ROOT, path));
// modifies all future calls to require() to automatically
// compile the required source with babel
require('@babel/register')({
ignore: [/[\/\\](node_modules|target|dist)[\/\\]/],
only: [
Path.resolve(REPO_ROOT, 'test'),
Path.resolve(REPO_ROOT, 'x-pack/test'),
Path.resolve(REPO_ROOT, 'examples'),
Path.resolve(REPO_ROOT, 'x-pack/examples'),
// TODO: should should probably remove this link back to the source
Path.resolve(REPO_ROOT, 'x-pack/plugins/task_manager/server/config.ts'),
Path.resolve(REPO_ROOT, 'src/core/utils/default_app_categories.ts'),
],
only: transpileKbnPaths,
babelrc: false,
presets: [require.resolve('@kbn/babel-preset/node_preset')],
extensions: ['.js', '.ts', '.tsx'],

View file

@ -12,7 +12,7 @@
],
baseBranches: [
'master',
'7.x',
'7.16',
'7.15',
],
prConcurrentLimit: 0,
@ -55,7 +55,7 @@
groupName: '@elastic/elasticsearch',
packageNames: ['@elastic/elasticsearch'],
reviewers: ['team:kibana-operations', 'team:kibana-core'],
matchBaseBranches: ['7.x'],
matchBaseBranches: ['7.16'],
labels: ['release_note:skip', 'Team:Operations', 'Team:Core', 'backport:skip'],
enabled: true,
},

9
scripts/kibana_setup.js Normal file
View file

@ -0,0 +1,9 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0 and the Server Side Public License, v 1; you may not use this file except
* in compliance with, at your election, the Elastic License 2.0 or the Server
* Side Public License, v 1.
*/
require('../src/cli_setup/dev');

20
src/cli_plugin/lib/logger.d.ts vendored Normal file
View file

@ -0,0 +1,20 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0 and the Server Side Public License, v 1; you may not use this file except
* in compliance with, at your election, the Elastic License 2.0 or the Server
* Side Public License, v 1.
*/
interface LoggerOptions {
silent?: boolean;
quiet?: boolean;
}
export declare class Logger {
constructor(settings?: LoggerOptions);
log(data: string, sameLine?: boolean): void;
error(data: string): void;
}

118
src/cli_setup/cli_setup.ts Normal file
View file

@ -0,0 +1,118 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0 and the Server Side Public License, v 1; you may not use this file except
* in compliance with, at your election, the Elastic License 2.0 or the Server
* Side Public License, v 1.
*/
import { kibanaPackageJson } from '@kbn/utils';
import chalk from 'chalk';
import ora from 'ora';
import { Command } from 'commander';
import { getConfigPath } from '@kbn/utils';
import {
ElasticsearchService,
EnrollResult,
} from '../plugins/interactive_setup/server/elasticsearch_service';
import { getDetailedErrorMessage } from '../plugins/interactive_setup/server/errors';
import {
promptToken,
getCommand,
decodeEnrollmentToken,
kibanaConfigWriter,
elasticsearch,
} from './utils';
import { Logger } from '../cli_plugin/lib/logger';
const program = new Command('bin/kibana-setup');
program
.version(kibanaPackageJson.version)
.description(
'This command walks you through all required steps to securely connect Kibana with Elasticsearch'
)
.option('-t, --token <token>', 'Elasticsearch enrollment token')
.option('-s, --silent', 'Prevent all logging');
program.parse(process.argv);
interface SetupOptions {
token?: string;
silent?: boolean;
}
const options = program.opts() as SetupOptions;
const spinner = ora();
const logger = new Logger(options);
async function initCommand() {
const token = decodeEnrollmentToken(
options.token ?? (options.silent ? undefined : await promptToken())
);
if (!token) {
logger.error(chalk.red('Invalid enrollment token provided.'));
logger.error('');
logger.error('To generate a new enrollment token run:');
logger.error(` ${getCommand('elasticsearch-create-enrollment-token', '-s kibana')}`);
process.exit(1);
}
if (!(await kibanaConfigWriter.isConfigWritable())) {
logger.error(chalk.red('Kibana does not have enough permissions to write to the config file.'));
logger.error('');
logger.error('To grant write access run:');
logger.error(` chmod +w ${getConfigPath()}`);
process.exit(1);
}
logger.log('');
if (!options.silent) {
spinner.start(chalk.dim('Configuring Kibana...'));
}
let configToWrite: EnrollResult;
try {
configToWrite = await elasticsearch.enroll({
hosts: token.adr,
apiKey: token.key,
caFingerprint: ElasticsearchService.formatFingerprint(token.fgr),
});
} catch (error) {
if (!options.silent) {
spinner.fail(
`${chalk.bold('Unable to enroll with Elasticsearch:')} ${chalk.red(
`${getDetailedErrorMessage(error)}`
)}`
);
}
logger.error('');
logger.error('To generate a new enrollment token run:');
logger.error(` ${getCommand('elasticsearch-create-enrollment-token', '-s kibana')}`);
process.exit(1);
}
try {
await kibanaConfigWriter.writeConfig(configToWrite);
} catch (error) {
if (!options.silent) {
spinner.fail(
`${chalk.bold('Unable to configure Kibana:')} ${chalk.red(
`${getDetailedErrorMessage(error)}`
)}`
);
}
logger.error(chalk.red(`${getDetailedErrorMessage(error)}`));
process.exit(1);
}
if (!options.silent) {
spinner.succeed(chalk.bold('Kibana configured successfully.'));
}
logger.log('');
logger.log('To start Kibana run:');
logger.log(` ${getCommand('kibana')}`);
}
initCommand();

10
src/cli_setup/dev.js Normal file
View file

@ -0,0 +1,10 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0 and the Server Side Public License, v 1; you may not use this file except
* in compliance with, at your election, the Elastic License 2.0 or the Server
* Side Public License, v 1.
*/
require('../setup_node_env');
require('./cli_setup');

10
src/cli_setup/dist.js Normal file
View file

@ -0,0 +1,10 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0 and the Server Side Public License, v 1; you may not use this file except
* in compliance with, at your election, the Elastic License 2.0 or the Server
* Side Public License, v 1.
*/
require('../setup_node_env/dist');
require('./cli_setup');

View file

@ -0,0 +1,13 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0 and the Server Side Public License, v 1; you may not use this file except
* in compliance with, at your election, the Elastic License 2.0 or the Server
* Side Public License, v 1.
*/
module.exports = {
preset: '@kbn/test',
rootDir: '../..',
roots: ['<rootDir>/src/cli_setup'],
};

View file

@ -0,0 +1,76 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0 and the Server Side Public License, v 1; you may not use this file except
* in compliance with, at your election, the Elastic License 2.0 or the Server
* Side Public License, v 1.
*/
import { decodeEnrollmentToken, getCommand } from './utils';
import type { EnrollmentToken } from '../plugins/interactive_setup/common';
describe('kibana setup cli', () => {
describe('getCommand', () => {
const originalPlatform = process.platform;
it('should format windows correctly', () => {
Object.defineProperty(process, 'platform', {
value: 'win32',
});
expect(getCommand('kibana')).toEqual('bin\\kibana.bat');
expect(getCommand('kibana', '--silent')).toEqual('bin\\kibana.bat --silent');
});
it('should format unix correctly', () => {
Object.defineProperty(process, 'platform', {
value: 'linux',
});
expect(getCommand('kibana')).toEqual('bin/kibana');
expect(getCommand('kibana', '--silent')).toEqual('bin/kibana --silent');
});
afterAll(function () {
Object.defineProperty(process, 'platform', {
value: originalPlatform,
});
});
});
describe('decodeEnrollmentToken', () => {
const token: EnrollmentToken = {
ver: '8.0.0',
adr: ['localhost:9200'],
fgr: 'AA:C8:2C:2E:09:58:F4:FE:A1:D2:AB:7F:13:70:C2:7D:EB:FD:A2:23:88:13:E4:DA:3A:D0:59:D0:09:00:07:36',
key: 'JH-36HoBo4EYIoVhHh2F:uEo4dksARMq_BSHaAHUr8Q',
};
it('should decode a valid token', () => {
expect(decodeEnrollmentToken(btoa(JSON.stringify(token)))).toEqual({
adr: ['https://localhost:9200'],
fgr: 'AA:C8:2C:2E:09:58:F4:FE:A1:D2:AB:7F:13:70:C2:7D:EB:FD:A2:23:88:13:E4:DA:3A:D0:59:D0:09:00:07:36',
key: 'SkgtMzZIb0JvNEVZSW9WaEhoMkY6dUVvNGRrc0FSTXFfQlNIYUFIVXI4UQ==',
ver: '8.0.0',
});
});
it('should not decode an invalid token', () => {
expect(decodeEnrollmentToken(JSON.stringify(token))).toBeUndefined();
expect(
decodeEnrollmentToken(
btoa(
JSON.stringify({
ver: [''],
adr: null,
fgr: false,
key: undefined,
})
)
)
).toBeUndefined();
expect(decodeEnrollmentToken(btoa(JSON.stringify({})))).toBeUndefined();
expect(decodeEnrollmentToken(btoa(JSON.stringify([])))).toBeUndefined();
expect(decodeEnrollmentToken(btoa(JSON.stringify(null)))).toBeUndefined();
expect(decodeEnrollmentToken(btoa(JSON.stringify('')))).toBeUndefined();
});
});
});

91
src/cli_setup/utils.ts Normal file
View file

@ -0,0 +1,91 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0 and the Server Side Public License, v 1; you may not use this file except
* in compliance with, at your election, the Elastic License 2.0 or the Server
* Side Public License, v 1.
*/
import { getConfigPath } from '@kbn/utils';
import inquirer from 'inquirer';
import { duration } from 'moment';
import { merge } from 'lodash';
import { Logger } from '../core/server';
import { ClusterClient } from '../core/server/elasticsearch/client';
import { configSchema } from '../core/server/elasticsearch';
import { ElasticsearchService } from '../plugins/interactive_setup/server/elasticsearch_service';
import { KibanaConfigWriter } from '../plugins/interactive_setup/server/kibana_config_writer';
import type { EnrollmentToken } from '../plugins/interactive_setup/common';
const noop = () => {};
const logger: Logger = {
debug: noop,
error: noop,
warn: noop,
trace: noop,
info: noop,
fatal: noop,
log: noop,
get: () => logger,
};
export const kibanaConfigWriter = new KibanaConfigWriter(getConfigPath(), logger);
export const elasticsearch = new ElasticsearchService(logger).setup({
connectionCheckInterval: duration(Infinity),
elasticsearch: {
createClient: (type, config) => {
const defaults = configSchema.validate({});
return new ClusterClient(
merge(
defaults,
{
hosts: Array.isArray(defaults.hosts) ? defaults.hosts : [defaults.hosts],
},
config
),
logger,
type
);
},
},
});
export async function promptToken() {
const answers = await inquirer.prompt({
type: 'input',
name: 'token',
message: 'Enter enrollment token:',
validate: (value = '') => (decodeEnrollmentToken(value) ? true : 'Invalid enrollment token'),
});
return answers.token;
}
export function decodeEnrollmentToken(enrollmentToken: string): EnrollmentToken | undefined {
try {
const json = JSON.parse(atob(enrollmentToken)) as EnrollmentToken;
if (
!Array.isArray(json.adr) ||
json.adr.some((adr) => typeof adr !== 'string') ||
typeof json.fgr !== 'string' ||
typeof json.key !== 'string' ||
typeof json.ver !== 'string'
) {
return;
}
return { ...json, adr: json.adr.map((adr) => `https://${adr}`), key: btoa(json.key) };
} catch (error) {} // eslint-disable-line no-empty
}
function btoa(str: string) {
return Buffer.from(str, 'binary').toString('base64');
}
function atob(str: string) {
return Buffer.from(str, 'base64').toString('binary');
}
export function getCommand(command: string, args?: string) {
const isWindows = process.platform === 'win32';
return `${isWindows ? `bin\\${command}.bat` : `bin/${command}`}${args ? ` ${args}` : ''}`;
}

View file

@ -30,6 +30,7 @@ interface StartDeps {
export class ApmSystem {
private readonly enabled: boolean;
private pageLoadTransaction?: Transaction;
/**
* `apmConfig` would be populated with relevant APM RUM agent
* configuration if server is started with elastic.apm.* config.
@ -49,10 +50,23 @@ export class ApmSystem {
this.addHttpRequestNormalization(apm);
init(apmConfig);
this.pageLoadTransaction = apm.getCurrentTransaction();
// Keep the page load transaction open until all resources finished loading
if (this.pageLoadTransaction && this.pageLoadTransaction.type === 'page-load') {
// @ts-expect-error 2339
this.pageLoadTransaction.block(true);
this.pageLoadTransaction.mark('apm-setup');
}
}
async start(start?: StartDeps) {
if (!this.enabled || !start) return;
if (this.pageLoadTransaction && this.pageLoadTransaction.type === 'page-load') {
this.pageLoadTransaction.mark('apm-start');
}
/**
* Register listeners for navigation changes and capture them as
* route-change transactions after Kibana app is bootstrapped
@ -60,6 +74,11 @@ export class ApmSystem {
start.application.currentAppId$.subscribe((appId) => {
const apmInstance = (window as any).elasticApm;
if (appId && apmInstance && typeof apmInstance.startTransaction === 'function') {
// Close the page load transaction
if (this.pageLoadTransaction && this.pageLoadTransaction.type === 'page-load') {
this.pageLoadTransaction.end();
this.pageLoadTransaction = undefined;
}
apmInstance.startTransaction(`/app/${appId}`, 'route-change', {
managed: true,
canReuse: true,

View file

@ -499,7 +499,7 @@ export class DocLinksService {
netGuide: `${ELASTIC_WEBSITE_URL}guide/en/elasticsearch/client/net-api/${DOC_LINK_VERSION}/index.html`,
perlGuide: `${ELASTIC_WEBSITE_URL}guide/en/elasticsearch/client/perl-api/${DOC_LINK_VERSION}/index.html`,
phpGuide: `${ELASTIC_WEBSITE_URL}guide/en/elasticsearch/client/php-api/${DOC_LINK_VERSION}/index.html`,
pythonGuide: `${ELASTIC_WEBSITE_URL}guide/en/elasticsearch/client/net-api/${DOC_LINK_VERSION}/index.html`,
pythonGuide: `${ELASTIC_WEBSITE_URL}guide/en/elasticsearch/client/python-api/${DOC_LINK_VERSION}/index.html`,
rubyOverview: `${ELASTIC_WEBSITE_URL}guide/en/elasticsearch/client/ruby-api/${DOC_LINK_VERSION}/ruby_client.html`,
rustGuide: `${ELASTIC_WEBSITE_URL}guide/en/elasticsearch/client/rust-api/${DOC_LINK_VERSION}/index.html`,
},

View file

@ -43,10 +43,12 @@
top: $headerHeight;
}
.kbnStickyMenu {
position: sticky;
max-height: calc(100vh - #{$headerHeight + $euiSize});
top: $headerHeight + $euiSize;
@include euiBreakpoint('xl', 'l') {
.kbnStickyMenu {
position: sticky;
max-height: calc(100vh - #{$headerHeight + $euiSize});
top: $headerHeight + $euiSize;
}
}
}

View file

@ -13,164 +13,16 @@
*/
import type { estypes } from '@elastic/elasticsearch';
import { IndexMapping } from '../../mappings';
export interface CallCluster {
(path: 'bulk', opts: { body: object[] }): Promise<BulkResult>;
(path: 'count', opts: CountOpts): Promise<{ count: number; _shards: estypes.ShardStatistics }>;
(path: 'clearScroll', opts: { scrollId: string }): Promise<any>;
(path: 'indices.create', opts: IndexCreationOpts): Promise<any>;
(path: 'indices.exists', opts: IndexOpts): Promise<boolean>;
(path: 'indices.existsAlias', opts: { name: string }): Promise<boolean>;
(path: 'indices.get', opts: IndexOpts & Ignorable): Promise<IndicesInfo | NotFound>;
(path: 'indices.getAlias', opts: { name: string } & Ignorable): Promise<AliasResult | NotFound>;
(path: 'indices.getMapping', opts: IndexOpts): Promise<MappingResult>;
(path: 'indices.getSettings', opts: IndexOpts): Promise<IndexSettingsResult>;
(path: 'indices.refresh', opts: IndexOpts): Promise<any>;
(path: 'indices.updateAliases', opts: UpdateAliasesOpts): Promise<any>;
(path: 'indices.deleteTemplate', opts: { name: string }): Promise<any>;
(path: 'cat.templates', opts: { format: 'json'; name: string }): Promise<Array<{ name: string }>>;
(path: 'reindex', opts: ReindexOpts): Promise<any>;
(path: 'scroll', opts: ScrollOpts): Promise<SearchResults>;
(path: 'search', opts: SearchOpts): Promise<SearchResults>;
(path: 'tasks.get', opts: { taskId: string }): Promise<{
completed: boolean;
error?: ErrorResponse;
}>;
}
///////////////////////////////////////////////////////////////////
// callCluster argument type definitions
///////////////////////////////////////////////////////////////////
export interface Ignorable {
ignore: number[];
}
export interface CountOpts {
body: {
query: object;
};
index: string;
}
export interface PutMappingOpts {
body: IndexMapping;
index: string;
}
export interface IndexOpts {
index: string;
}
export interface IndexCreationOpts {
index: string;
body?: {
mappings?: IndexMapping;
settings?: {
number_of_shards: number;
auto_expand_replicas: string;
};
};
}
export interface ReindexOpts {
body: {
dest: IndexOpts;
source: IndexOpts & { size: number };
script?: {
source: string;
lang: 'painless';
};
};
refresh: boolean;
waitForCompletion: boolean;
}
export type AliasAction =
| { remove_index: IndexOpts }
| {
remove_index: { index: string };
}
| { remove: { index: string; alias: string } }
| { add: { index: string; alias: string } };
export interface UpdateAliasesOpts {
body: {
actions: AliasAction[];
};
}
export interface SearchOpts {
body: object;
index: string;
scroll?: string;
}
export interface ScrollOpts {
scroll: string;
scrollId: string;
}
///////////////////////////////////////////////////////////////////
// callCluster result type definitions
///////////////////////////////////////////////////////////////////
export interface NotFound {
status: 404;
}
export interface MappingResult {
[index: string]: {
mappings: IndexMapping;
};
}
export interface AliasResult {
[alias: string]: object;
}
export interface IndexSettingsResult {
[indexName: string]: {
settings: {
index: {
number_of_shards: string;
auto_expand_replicas: string;
provided_name: string;
creation_date: string;
number_of_replicas: string;
uuid: string;
version: { created: '7000001' };
};
};
};
}
export interface RawDoc {
_id: estypes.Id;
_source: any;
_type?: string;
}
export interface SearchResults {
hits: {
hits: RawDoc[];
};
_scroll_id?: string;
_shards: estypes.ShardStatistics;
}
export interface ErrorResponse {
type: string;
reason: string;
}
export interface BulkResult {
items: Array<{ index: { error?: ErrorResponse } }>;
}
export interface IndexInfo {
aliases: AliasResult;
mappings: IndexMapping;
}
export interface IndicesInfo {
[index: string]: IndexInfo;
}

View file

@ -306,7 +306,7 @@ export async function convertToAlias(
* alias, meaning that it will only point to one index at a time, so we
* remove any other indices from the alias.
*
* @param {CallCluster} client
* @param {MigrationEsClient} client
* @param {string} index
* @param {string} alias
* @param {AliasAction[]} aliasActions - Optional actions to be added to the updateAliases call

View file

@ -9,7 +9,6 @@
export { DocumentMigrator } from './document_migrator';
export { IndexMigrator } from './index_migrator';
export { buildActiveMappings } from './build_active_mappings';
export type { CallCluster } from './call_cluster';
export type { LogFn, SavedObjectsMigrationLogger } from './migration_logger';
export type { MigrationResult, MigrationStatus } from './migration_coordinator';
export { createMigrationEsClient } from './migration_es_client';

View file

@ -1,44 +0,0 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0 and the Server Side Public License, v 1; you may not use this file except
* in compliance with, at your election, the Elastic License 2.0 or the Server
* Side Public License, v 1.
*/
import { savedObjectsMigrationConfig } from './saved_objects_config';
import { getDeprecationsFor } from '../config/test_utils';
const applyMigrationsDeprecations = (settings: Record<string, any> = {}) =>
getDeprecationsFor({
provider: savedObjectsMigrationConfig.deprecations!,
settings,
path: 'migrations',
});
describe('migrations config', function () {
describe('deprecations', () => {
it('logs a warning if migrations.enableV2 is set: true', () => {
const { messages } = applyMigrationsDeprecations({ enableV2: true });
expect(messages).toMatchInlineSnapshot(`
Array [
"You no longer need to configure \\"migrations.enableV2\\".",
]
`);
});
it('logs a warning if migrations.enableV2 is set: false', () => {
const { messages } = applyMigrationsDeprecations({ enableV2: false });
expect(messages).toMatchInlineSnapshot(`
Array [
"You no longer need to configure \\"migrations.enableV2\\".",
]
`);
});
});
it('does not log a warning if migrations.enableV2 is not set', () => {
const { messages } = applyMigrationsDeprecations({ batchSize: 1_000 });
expect(messages).toMatchInlineSnapshot(`Array []`);
});
});

View file

@ -7,7 +7,6 @@
*/
import { schema, TypeOf } from '@kbn/config-schema';
import { ConfigDeprecationProvider } from '../config';
import type { ServiceConfigDescriptor } from '../internal_types';
const migrationSchema = schema.object({
@ -21,13 +20,10 @@ const migrationSchema = schema.object({
export type SavedObjectsMigrationConfigType = TypeOf<typeof migrationSchema>;
const migrationDeprecations: ConfigDeprecationProvider = ({ unused }) => [unused('enableV2')];
export const savedObjectsMigrationConfig: ServiceConfigDescriptor<SavedObjectsMigrationConfigType> =
{
path: 'migrations',
schema: migrationSchema,
deprecations: migrationDeprecations,
};
const soSchema = schema.object({

View file

@ -0,0 +1,29 @@
#!/bin/sh
SCRIPT=$0
# SCRIPT may be an arbitrarily deep series of symlinks. Loop until we have the concrete path.
while [ -h "$SCRIPT" ] ; do
ls=$(ls -ld "$SCRIPT")
# Drop everything prior to ->
link=$(expr "$ls" : '.*-> \(.*\)$')
if expr "$link" : '/.*' > /dev/null; then
SCRIPT="$link"
else
SCRIPT=$(dirname "$SCRIPT")/"$link"
fi
done
DIR="$(dirname "${SCRIPT}")/.."
CONFIG_DIR=${KBN_PATH_CONF:-"$DIR/config"}
NODE="${DIR}/node/bin/node"
test -x "$NODE"
if [ ! -x "$NODE" ]; then
echo "unable to find usable node.js executable."
exit 1
fi
if [ -f "${CONFIG_DIR}/node.options" ]; then
KBN_NODE_OPTS="$(grep -v ^# < ${CONFIG_DIR}/node.options | xargs)"
fi
NODE_OPTIONS="$KBN_NODE_OPTS $NODE_OPTIONS" "${NODE}" "${DIR}/src/cli_setup/dist" "$@"

View file

@ -0,0 +1,35 @@
@echo off
SETLOCAL ENABLEDELAYEDEXPANSION
set SCRIPT_DIR=%~dp0
for %%I in ("%SCRIPT_DIR%..") do set DIR=%%~dpfI
set NODE=%DIR%\node\node.exe
If Not Exist "%NODE%" (
Echo unable to find usable node.js executable.
Exit /B 1
)
set CONFIG_DIR=%KBN_PATH_CONF%
If ["%KBN_PATH_CONF%"] == [] (
set "CONFIG_DIR=%DIR%\config"
)
IF EXIST "%CONFIG_DIR%\node.options" (
for /F "usebackq eol=# tokens=*" %%i in ("%CONFIG_DIR%\node.options") do (
If [!NODE_OPTIONS!] == [] (
set "NODE_OPTIONS=%%i"
) Else (
set "NODE_OPTIONS=!NODE_OPTIONS! %%i"
)
)
)
TITLE Kibana Setup
"%NODE%" "%DIR%\src\cli_setup\dist" %*
:finally
ENDLOCAL

View file

@ -26,7 +26,7 @@ export const CopySource: Task = {
'!src/test_utils/**',
'!src/fixtures/**',
'!src/cli/repl/**',
'!src/cli/dev.js',
'!src/cli*/dev.js',
'!src/functional_test_runner/**',
'!src/dev/**',
'!**/jest.config.js',

View file

@ -22,11 +22,7 @@ async function getDependencies(cwd: string, entries: string[]) {
export async function findUsedDependencies(listedPkgDependencies: any, baseDir: any) {
// Define the entry points for the server code in order to
// start here later looking for the server side dependencies
const mainCodeEntries = [
Path.resolve(baseDir, `src/cli/dist.js`),
Path.resolve(baseDir, `src/cli_keystore/dist.js`),
Path.resolve(baseDir, `src/cli_plugin/dist.js`),
];
const mainCodeEntries = await globby(normalize(Path.resolve(baseDir, `src/cli*/dist.js`)));
const discoveredPluginEntries = await globby([
normalize(Path.resolve(baseDir, `src/plugins/**/server/index.js`)),

View file

@ -206,7 +206,9 @@ describe('Dashboard container lifecycle', () => {
});
});
describe('Dashboard initial state', () => {
// FLAKY: https://github.com/elastic/kibana/issues/116050
// FLAKY: https://github.com/elastic/kibana/issues/105018
describe.skip('Dashboard initial state', () => {
it('Extracts state from Dashboard Saved Object', async () => {
const { renderHookResult, embeddableFactoryResult } = renderDashboardAppStateHook({});
const getResult = () => renderHookResult.result.current;
@ -276,7 +278,8 @@ describe('Dashboard initial state', () => {
});
});
describe('Dashboard state sync', () => {
// FLAKY: https://github.com/elastic/kibana/issues/116043
describe.skip('Dashboard state sync', () => {
let defaultDashboardAppStateHookResult: RenderDashboardStateHookReturn;
const getResult = () => defaultDashboardAppStateHookResult.renderHookResult.result.current;

View file

@ -8,6 +8,7 @@
import { extract, inject } from './persistable_state';
import { Filter } from '@kbn/es-query';
import { DATA_VIEW_SAVED_OBJECT_TYPE } from '../../common';
describe('filter manager persistable state tests', () => {
const filters: Filter[] = [
@ -15,13 +16,15 @@ describe('filter manager persistable state tests', () => {
];
describe('reference injection', () => {
test('correctly inserts reference to filter', () => {
const updatedFilters = inject(filters, [{ type: 'index_pattern', name: 'test', id: '123' }]);
const updatedFilters = inject(filters, [
{ type: DATA_VIEW_SAVED_OBJECT_TYPE, name: 'test', id: '123' },
]);
expect(updatedFilters[0]).toHaveProperty('meta.index', '123');
});
test('drops index setting if reference is missing', () => {
const updatedFilters = inject(filters, [
{ type: 'index_pattern', name: 'test123', id: '123' },
{ type: DATA_VIEW_SAVED_OBJECT_TYPE, name: 'test123', id: '123' },
]);
expect(updatedFilters[0]).toHaveProperty('meta.index', undefined);
});

View file

@ -8,7 +8,9 @@
import uuid from 'uuid';
import { Filter } from '@kbn/es-query';
import { DATA_VIEW_SAVED_OBJECT_TYPE } from '../../common';
import { SavedObjectReference } from '../../../../core/types';
import { MigrateFunctionsObject } from '../../../kibana_utils/common';
export const extract = (filters: Filter[]) => {
const references: SavedObjectReference[] = [];
@ -16,7 +18,7 @@ export const extract = (filters: Filter[]) => {
if (filter.meta?.index) {
const id = uuid();
references.push({
type: 'index_pattern',
type: DATA_VIEW_SAVED_OBJECT_TYPE,
name: id,
id: filter.meta.index,
});
@ -54,6 +56,10 @@ export const telemetry = (filters: Filter[], collector: unknown) => {
return {};
};
export const getAllMigrations = () => {
export const migrateToLatest = (filters: Filter[], version: string) => {
return filters;
};
export const getAllMigrations = (): MigrateFunctionsObject => {
return {};
};

View file

@ -6,6 +6,25 @@
* Side Public License, v 1.
*/
export * from './timefilter/types';
import type { Query, Filter } from '@kbn/es-query';
import type { RefreshInterval, TimeRange } from './timefilter/types';
export { Query } from '@kbn/es-query';
export type { RefreshInterval, TimeRange, TimeRangeBounds } from './timefilter/types';
export type { Query } from '@kbn/es-query';
export type SavedQueryTimeFilter = TimeRange & {
refreshInterval: RefreshInterval;
};
export interface SavedQuery {
id: string;
attributes: SavedQueryAttributes;
}
export interface SavedQueryAttributes {
title: string;
description: string;
query: Query;
filters?: Filter[];
timefilter?: SavedQueryTimeFilter;
}

View file

@ -56,7 +56,7 @@ export class FieldParamType extends BaseParamType {
'data.search.aggs.paramTypes.field.notFoundSavedFieldParameterErrorMessage',
{
defaultMessage:
'The field "{fieldParameter}" associated with this object no longer exists in the index pattern. Please use another field.',
'The field "{fieldParameter}" associated with this object no longer exists in the data view. Please use another field.',
values: {
fieldParameter: field.name,
},
@ -75,7 +75,7 @@ export class FieldParamType extends BaseParamType {
'data.search.aggs.paramTypes.field.invalidSavedFieldParameterErrorMessage',
{
defaultMessage:
'Saved field "{fieldParameter}" of index pattern "{indexPatternTitle}" is invalid for use with the "{aggType}" aggregation. Please select a new field.',
'Saved field "{fieldParameter}" of data view "{indexPatternTitle}" is invalid for use with the "{aggType}" aggregation. Please select a new field.',
values: {
fieldParameter: field.name,
aggType: aggConfig?.type?.title,

View file

@ -67,10 +67,34 @@ describe('JSON', function () {
aggParam.write(aggConfig, output);
expect(aggConfig.params).toHaveProperty(paramName);
expect(output.params).toEqual({
existing: 'true',
new_param: 'should exist in output',
});
expect(output.params).toMatchInlineSnapshot(`
Object {
"existing": "true",
"new_param": "should exist in output",
}
`);
});
it('should append param when valid JSON with triple quotes', () => {
const aggParam = initAggParam();
const jsonData = `{
"a": """
multiline string - line 1
"""
}`;
aggConfig.params[paramName] = jsonData;
aggParam.write(aggConfig, output);
expect(aggConfig.params).toHaveProperty(paramName);
expect(output.params).toMatchInlineSnapshot(`
Object {
"a": "
multiline string - line 1
",
}
`);
});
it('should not overwrite existing params', () => {

View file

@ -11,6 +11,17 @@ import _ from 'lodash';
import { IAggConfig } from '../agg_config';
import { BaseParamType } from './base';
function collapseLiteralStrings(xjson: string) {
const tripleQuotes = '"""';
const splitData = xjson.split(tripleQuotes);
for (let idx = 1; idx < splitData.length - 1; idx += 2) {
splitData[idx] = JSON.stringify(splitData[idx]);
}
return splitData.join('');
}
export class JsonParamType extends BaseParamType {
constructor(config: Record<string, any>) {
super(config);
@ -26,9 +37,8 @@ export class JsonParamType extends BaseParamType {
return;
}
// handle invalid Json input
try {
paramJson = JSON.parse(param);
paramJson = JSON.parse(collapseLiteralStrings(param));
} catch (err) {
return;
}

View file

@ -62,7 +62,7 @@ export const getEsaggsMeta: () => Omit<EsaggsExpressionFunctionDefinition, 'fn'>
types: ['index_pattern'],
required: true,
help: i18n.translate('data.search.functions.esaggs.index.help', {
defaultMessage: 'Index pattern retrieved with indexPatternLoad',
defaultMessage: 'Data view retrieved with indexPatternLoad',
}),
},
aggs: {

View file

@ -115,12 +115,12 @@ export const getEsdslFn = ({
request.stats({
indexPattern: {
label: i18n.translate('data.search.es_search.indexPatternLabel', {
defaultMessage: 'Index pattern',
label: i18n.translate('data.search.es_search.dataViewLabel', {
defaultMessage: 'Data view',
}),
value: args.index,
description: i18n.translate('data.search.es_search.indexPatternDescription', {
defaultMessage: 'The index pattern that connected to the Elasticsearch indices.',
defaultMessage: 'The data view that connected to the Elasticsearch indices.',
}),
},
});

View file

@ -36,6 +36,8 @@ export * from './field';
export * from './phrase_filter';
export * from './exists_filter';
export * from './range_filter';
export * from './remove_filter';
export * from './select_filter';
export * from './kibana_filter';
export * from './filters_to_ast';
export * from './timerange';

View file

@ -0,0 +1,206 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0 and the Server Side Public License, v 1; you may not use this file except
* in compliance with, at your election, the Elastic License 2.0 or the Server
* Side Public License, v 1.
*/
import { createMockContext } from '../../../../expressions/common';
import { functionWrapper } from './utils';
import { removeFilterFunction } from './remove_filter';
import { KibanaContext } from './kibana_context_type';
describe('interpreter/functions#removeFilter', () => {
const fn = functionWrapper(removeFilterFunction);
const kibanaContext: KibanaContext = {
type: 'kibana_context',
filters: [
{
meta: {
group: 'g1',
},
query: {},
},
{
meta: {
group: 'g2',
},
query: {},
},
{
meta: {
group: 'g1',
controlledBy: 'i1',
},
query: {},
},
{
meta: {
group: 'g1',
controlledBy: 'i2',
},
query: {},
},
{
meta: {
controlledBy: 'i1',
},
query: {},
},
],
};
it('removes all filters when called without arguments', () => {
const actual = fn(kibanaContext, {}, createMockContext());
expect(actual).toMatchInlineSnapshot(`
Object {
"filters": Array [],
"type": "kibana_context",
}
`);
});
it('removes filters belonging to certain group', () => {
const actual = fn(kibanaContext, { group: 'g1' }, createMockContext());
expect(actual).toMatchInlineSnapshot(`
Object {
"filters": Array [
Object {
"meta": Object {
"group": "g2",
},
"query": Object {},
},
Object {
"meta": Object {
"controlledBy": "i1",
},
"query": Object {},
},
],
"type": "kibana_context",
}
`);
});
it('removes ungrouped filters', () => {
const actual = fn(kibanaContext, { ungrouped: true }, createMockContext());
expect(actual).toMatchInlineSnapshot(`
Object {
"filters": Array [
Object {
"meta": Object {
"group": "g1",
},
"query": Object {},
},
Object {
"meta": Object {
"group": "g2",
},
"query": Object {},
},
Object {
"meta": Object {
"controlledBy": "i1",
"group": "g1",
},
"query": Object {},
},
Object {
"meta": Object {
"controlledBy": "i2",
"group": "g1",
},
"query": Object {},
},
],
"type": "kibana_context",
}
`);
});
it('removes ungrouped filters and filters matching a group', () => {
const actual = fn(kibanaContext, { group: 'g1', ungrouped: true }, createMockContext());
expect(actual).toMatchInlineSnapshot(`
Object {
"filters": Array [
Object {
"meta": Object {
"group": "g2",
},
"query": Object {},
},
],
"type": "kibana_context",
}
`);
});
it('removes filters controlled by specified id', () => {
const actual = fn(kibanaContext, { from: 'i1' }, createMockContext());
expect(actual).toMatchInlineSnapshot(`
Object {
"filters": Array [
Object {
"meta": Object {
"group": "g1",
},
"query": Object {},
},
Object {
"meta": Object {
"group": "g2",
},
"query": Object {},
},
Object {
"meta": Object {
"controlledBy": "i2",
"group": "g1",
},
"query": Object {},
},
],
"type": "kibana_context",
}
`);
});
it('removes filters controlled by specified id and matching a group', () => {
const actual = fn(kibanaContext, { group: 'g1', from: 'i1' }, createMockContext());
expect(actual).toMatchInlineSnapshot(`
Object {
"filters": Array [
Object {
"meta": Object {
"group": "g1",
},
"query": Object {},
},
Object {
"meta": Object {
"group": "g2",
},
"query": Object {},
},
Object {
"meta": Object {
"controlledBy": "i2",
"group": "g1",
},
"query": Object {},
},
Object {
"meta": Object {
"controlledBy": "i1",
},
"query": Object {},
},
],
"type": "kibana_context",
}
`);
});
});

View file

@ -0,0 +1,69 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0 and the Server Side Public License, v 1; you may not use this file except
* in compliance with, at your election, the Elastic License 2.0 or the Server
* Side Public License, v 1.
*/
import { i18n } from '@kbn/i18n';
import { ExpressionFunctionDefinition } from 'src/plugins/expressions/common';
import { KibanaContext } from './kibana_context_type';
interface Arguments {
group?: string;
from?: string;
ungrouped?: boolean;
}
export type ExpressionFunctionRemoveFilter = ExpressionFunctionDefinition<
'removeFilter',
KibanaContext,
Arguments,
KibanaContext
>;
export const removeFilterFunction: ExpressionFunctionRemoveFilter = {
name: 'removeFilter',
type: 'kibana_context',
inputTypes: ['kibana_context'],
help: i18n.translate('data.search.functions.removeFilter.help', {
defaultMessage: 'Removes filters from context',
}),
args: {
group: {
types: ['string'],
aliases: ['_'],
help: i18n.translate('data.search.functions.removeFilter.group.help', {
defaultMessage: 'Removes only filters belonging to the provided group',
}),
},
from: {
types: ['string'],
help: i18n.translate('data.search.functions.removeFilter.from.help', {
defaultMessage: 'Removes only filters owned by the provided id',
}),
},
ungrouped: {
types: ['boolean'],
aliases: ['nogroup', 'nogroups'],
default: false,
help: i18n.translate('data.search.functions.removeFilter.ungrouped.help', {
defaultMessage: 'Should filters without group be removed',
}),
},
},
fn(input, { group, from, ungrouped }) {
return {
...input,
filters:
input.filters?.filter(({ meta }) => {
const isGroupMatching =
(!group && !ungrouped) || group === meta.group || (ungrouped && !meta.group);
const isOriginMatching = !from || from === meta.controlledBy;
return !isGroupMatching || !isOriginMatching;
}) || [],
};
},
};

View file

@ -0,0 +1,223 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0 and the Server Side Public License, v 1; you may not use this file except
* in compliance with, at your election, the Elastic License 2.0 or the Server
* Side Public License, v 1.
*/
import { createMockContext } from '../../../../expressions/common';
import { functionWrapper } from './utils';
import { selectFilterFunction } from './select_filter';
import { KibanaContext } from './kibana_context_type';
describe('interpreter/functions#selectFilter', () => {
const fn = functionWrapper(selectFilterFunction);
const kibanaContext: KibanaContext = {
type: 'kibana_context',
filters: [
{
meta: {
group: 'g1',
},
query: {},
},
{
meta: {
group: 'g2',
},
query: {},
},
{
meta: {
group: 'g1',
controlledBy: 'i1',
},
query: {},
},
{
meta: {
group: 'g1',
controlledBy: 'i2',
},
query: {},
},
{
meta: {
controlledBy: 'i1',
},
query: {},
},
],
};
it('selects all filters when called without arguments', () => {
const actual = fn(kibanaContext, {}, createMockContext());
expect(actual).toMatchInlineSnapshot(`
Object {
"filters": Array [
Object {
"meta": Object {
"group": "g1",
},
"query": Object {},
},
Object {
"meta": Object {
"group": "g2",
},
"query": Object {},
},
Object {
"meta": Object {
"controlledBy": "i1",
"group": "g1",
},
"query": Object {},
},
Object {
"meta": Object {
"controlledBy": "i2",
"group": "g1",
},
"query": Object {},
},
Object {
"meta": Object {
"controlledBy": "i1",
},
"query": Object {},
},
],
"type": "kibana_context",
}
`);
});
it('selects filters belonging to certain group', () => {
const actual = fn(kibanaContext, { group: 'g1' }, createMockContext());
expect(actual).toMatchInlineSnapshot(`
Object {
"filters": Array [
Object {
"meta": Object {
"group": "g1",
},
"query": Object {},
},
Object {
"meta": Object {
"controlledBy": "i1",
"group": "g1",
},
"query": Object {},
},
Object {
"meta": Object {
"controlledBy": "i2",
"group": "g1",
},
"query": Object {},
},
],
"type": "kibana_context",
}
`);
});
it('selects ungrouped filters', () => {
const actual = fn(kibanaContext, { ungrouped: true }, createMockContext());
expect(actual).toMatchInlineSnapshot(`
Object {
"filters": Array [
Object {
"meta": Object {
"controlledBy": "i1",
},
"query": Object {},
},
],
"type": "kibana_context",
}
`);
});
it('selects ungrouped filters and filters matching a group', () => {
const actual = fn(kibanaContext, { group: 'g1', ungrouped: true }, createMockContext());
expect(actual).toMatchInlineSnapshot(`
Object {
"filters": Array [
Object {
"meta": Object {
"group": "g1",
},
"query": Object {},
},
Object {
"meta": Object {
"controlledBy": "i1",
"group": "g1",
},
"query": Object {},
},
Object {
"meta": Object {
"controlledBy": "i2",
"group": "g1",
},
"query": Object {},
},
Object {
"meta": Object {
"controlledBy": "i1",
},
"query": Object {},
},
],
"type": "kibana_context",
}
`);
});
it('selects filters controlled by specified id', () => {
const actual = fn(kibanaContext, { from: 'i1' }, createMockContext());
expect(actual).toMatchInlineSnapshot(`
Object {
"filters": Array [
Object {
"meta": Object {
"controlledBy": "i1",
"group": "g1",
},
"query": Object {},
},
Object {
"meta": Object {
"controlledBy": "i1",
},
"query": Object {},
},
],
"type": "kibana_context",
}
`);
});
it('selects filters controlled by specified id and matching a group', () => {
const actual = fn(kibanaContext, { group: 'g1', from: 'i1' }, createMockContext());
expect(actual).toMatchInlineSnapshot(`
Object {
"filters": Array [
Object {
"meta": Object {
"controlledBy": "i1",
"group": "g1",
},
"query": Object {},
},
],
"type": "kibana_context",
}
`);
});
});

View file

@ -0,0 +1,69 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0 and the Server Side Public License, v 1; you may not use this file except
* in compliance with, at your election, the Elastic License 2.0 or the Server
* Side Public License, v 1.
*/
import { i18n } from '@kbn/i18n';
import { ExpressionFunctionDefinition } from 'src/plugins/expressions/common';
import { KibanaContext } from './kibana_context_type';
interface Arguments {
group?: string;
from?: string;
ungrouped?: boolean;
}
export type ExpressionFunctionSelectFilter = ExpressionFunctionDefinition<
'selectFilter',
KibanaContext,
Arguments,
KibanaContext
>;
export const selectFilterFunction: ExpressionFunctionSelectFilter = {
name: 'selectFilter',
type: 'kibana_context',
inputTypes: ['kibana_context'],
help: i18n.translate('data.search.functions.selectFilter.help', {
defaultMessage: 'Selects filters from context',
}),
args: {
group: {
types: ['string'],
aliases: ['_'],
help: i18n.translate('data.search.functions.selectFilter.group.help', {
defaultMessage: 'Select only filters belonging to the provided group',
}),
},
from: {
types: ['string'],
help: i18n.translate('data.search.functions.selectFilter.from.help', {
defaultMessage: 'Select only filters owned by the provided id',
}),
},
ungrouped: {
types: ['boolean'],
aliases: ['nogroup', 'nogroups'],
default: false,
help: i18n.translate('data.search.functions.selectFilter.ungrouped.help', {
defaultMessage: 'Should filters without group be included',
}),
},
},
fn(input, { group, ungrouped, from }) {
return {
...input,
filters:
input.filters?.filter(({ meta }) => {
const isGroupMatching =
(!group && !ungrouped) || group === meta.group || (ungrouped && !meta.group);
const isOriginMatching = !from || from === meta.controlledBy;
return isGroupMatching && isOriginMatching;
}) || [],
};
},
};

View file

@ -25,17 +25,17 @@ export function getRequestInspectorStats(searchSource: ISearchSource) {
if (index) {
stats.indexPattern = {
label: i18n.translate('data.search.searchSource.indexPatternLabel', {
defaultMessage: 'Index pattern',
label: i18n.translate('data.search.searchSource.dataViewLabel', {
defaultMessage: 'Data view',
}),
value: index.title,
description: i18n.translate('data.search.searchSource.indexPatternDescription', {
defaultMessage: 'The index pattern that connected to the Elasticsearch indices.',
description: i18n.translate('data.search.searchSource.dataViewDescription', {
defaultMessage: 'The data view that was queried.',
}),
};
stats.indexPatternId = {
label: i18n.translate('data.search.searchSource.indexPatternIdLabel', {
defaultMessage: 'Index pattern ID',
label: i18n.translate('data.search.searchSource.dataViewIdLabel', {
defaultMessage: 'Data view ID',
}),
value: index.id!,
description: i18n.translate('data.search.searchSource.indexPatternIdDescription', {

View file

@ -130,7 +130,7 @@ export class DataPublicPlugin
core: CoreStart,
{ uiActions, fieldFormats, dataViews }: DataStartDependencies
): DataPublicPluginStart {
const { uiSettings, notifications, savedObjects, overlays } = core;
const { uiSettings, notifications, overlays } = core;
setNotifications(notifications);
setOverlays(overlays);
setUiSettings(uiSettings);
@ -138,7 +138,7 @@ export class DataPublicPlugin
const query = this.queryService.start({
storage: this.storage,
savedObjectsClient: savedObjects.client,
http: core.http,
uiSettings,
});

View file

@ -7,7 +7,7 @@
*/
import { share } from 'rxjs/operators';
import { IUiSettingsClient, SavedObjectsClientContract } from 'src/core/public';
import { HttpStart, IUiSettingsClient } from 'src/core/public';
import { IStorageWrapper } from 'src/plugins/kibana_utils/public';
import { buildEsQuery } from '@kbn/es-query';
import { FilterManager } from './filter_manager';
@ -15,7 +15,7 @@ import { createAddToQueryLog } from './lib';
import { TimefilterService, TimefilterSetup } from './timefilter';
import { createSavedQueryService } from './saved_query/saved_query_service';
import { createQueryStateObservable } from './state_sync/create_global_query_observable';
import { QueryStringManager, QueryStringContract } from './query_string';
import { QueryStringContract, QueryStringManager } from './query_string';
import { getEsQueryConfig, TimeRange } from '../../common';
import { getUiSettings } from '../services';
import { NowProviderInternalContract } from '../now_provider';
@ -33,9 +33,9 @@ interface QueryServiceSetupDependencies {
}
interface QueryServiceStartDependencies {
savedObjectsClient: SavedObjectsClientContract;
storage: IStorageWrapper;
uiSettings: IUiSettingsClient;
http: HttpStart;
}
export class QueryService {
@ -70,7 +70,7 @@ export class QueryService {
};
}
public start({ savedObjectsClient, storage, uiSettings }: QueryServiceStartDependencies) {
public start({ storage, uiSettings, http }: QueryServiceStartDependencies) {
return {
addToQueryLog: createAddToQueryLog({
storage,
@ -78,7 +78,7 @@ export class QueryService {
}),
filterManager: this.filterManager,
queryString: this.queryStringManager,
savedQueries: createSavedQueryService(savedObjectsClient),
savedQueries: createSavedQueryService(http),
state$: this.state$,
timefilter: this.timefilter,
getEsQuery: (indexPattern: IndexPattern, timeRange?: TimeRange) => {

View file

@ -7,8 +7,20 @@
*/
import { createSavedQueryService } from './saved_query_service';
import { FilterStateStore } from '../../../common';
import { SavedQueryAttributes } from './types';
import { httpServiceMock } from '../../../../../core/public/mocks';
import { SavedQueryAttributes } from '../../../common';
const http = httpServiceMock.createStartContract();
const {
deleteSavedQuery,
getSavedQuery,
findSavedQueries,
createQuery,
updateQuery,
getAllSavedQueries,
getSavedQueryCount,
} = createSavedQueryService(http);
const savedQueryAttributes: SavedQueryAttributes = {
title: 'foo',
@ -17,416 +29,90 @@ const savedQueryAttributes: SavedQueryAttributes = {
language: 'kuery',
query: 'response:200',
},
filters: [],
};
const savedQueryAttributesBar: SavedQueryAttributes = {
title: 'bar',
description: 'baz',
query: {
language: 'kuery',
query: 'response:200',
},
};
const savedQueryAttributesWithFilters: SavedQueryAttributes = {
...savedQueryAttributes,
filters: [
{
query: { match_all: {} },
$state: { store: FilterStateStore.APP_STATE },
meta: {
disabled: false,
negate: false,
alias: null,
},
},
],
timefilter: {
to: 'now',
from: 'now-15m',
refreshInterval: {
pause: false,
value: 0,
},
},
};
const mockSavedObjectsClient = {
create: jest.fn(),
error: jest.fn(),
find: jest.fn(),
resolve: jest.fn(),
delete: jest.fn(),
};
const {
deleteSavedQuery,
getSavedQuery,
findSavedQueries,
saveQuery,
getAllSavedQueries,
getSavedQueryCount,
} = createSavedQueryService(
// @ts-ignore
mockSavedObjectsClient
);
describe('saved query service', () => {
afterEach(() => {
mockSavedObjectsClient.create.mockReset();
mockSavedObjectsClient.find.mockReset();
mockSavedObjectsClient.resolve.mockReset();
mockSavedObjectsClient.delete.mockReset();
http.post.mockReset();
http.get.mockReset();
http.delete.mockReset();
});
describe('saveQuery', function () {
it('should create a saved object for the given attributes', async () => {
mockSavedObjectsClient.create.mockReturnValue({
id: 'foo',
attributes: savedQueryAttributes,
describe('createQuery', function () {
it('should post the stringified given attributes', async () => {
await createQuery(savedQueryAttributes);
expect(http.post).toBeCalled();
expect(http.post).toHaveBeenCalledWith('/api/saved_query/_create', {
body: '{"title":"foo","description":"bar","query":{"language":"kuery","query":"response:200"},"filters":[]}',
});
const response = await saveQuery(savedQueryAttributes);
expect(mockSavedObjectsClient.create).toHaveBeenCalledWith('query', savedQueryAttributes, {
id: 'foo',
});
expect(response).toEqual({ id: 'foo', attributes: savedQueryAttributes });
});
it('should allow overwriting an existing saved query', async () => {
mockSavedObjectsClient.create.mockReturnValue({
id: 'foo',
attributes: savedQueryAttributes,
});
const response = await saveQuery(savedQueryAttributes, { overwrite: true });
expect(mockSavedObjectsClient.create).toHaveBeenCalledWith('query', savedQueryAttributes, {
id: 'foo',
overwrite: true,
});
expect(response).toEqual({ id: 'foo', attributes: savedQueryAttributes });
});
it('should optionally accept filters and timefilters in object format', async () => {
const serializedSavedQueryAttributesWithFilters = {
...savedQueryAttributesWithFilters,
filters: savedQueryAttributesWithFilters.filters,
timefilter: savedQueryAttributesWithFilters.timefilter,
};
mockSavedObjectsClient.create.mockReturnValue({
id: 'foo',
attributes: serializedSavedQueryAttributesWithFilters,
});
const response = await saveQuery(savedQueryAttributesWithFilters);
expect(mockSavedObjectsClient.create).toHaveBeenCalledWith(
'query',
serializedSavedQueryAttributesWithFilters,
{ id: 'foo' }
);
expect(response).toEqual({ id: 'foo', attributes: savedQueryAttributesWithFilters });
});
it('should throw an error when saved objects client returns error', async () => {
mockSavedObjectsClient.create.mockReturnValue({
error: {
error: '123',
message: 'An Error',
},
});
let error = null;
try {
await saveQuery(savedQueryAttributes);
} catch (e) {
error = e;
}
expect(error).not.toBe(null);
});
it('should throw an error if the saved query does not have a title', async () => {
let error = null;
try {
await saveQuery({ ...savedQueryAttributes, title: '' });
} catch (e) {
error = e;
}
expect(error).not.toBe(null);
});
});
describe('findSavedQueries', function () {
it('should find and return saved queries without search text or pagination parameters', async () => {
mockSavedObjectsClient.find.mockReturnValue({
savedObjects: [{ id: 'foo', attributes: savedQueryAttributes }],
total: 5,
});
const response = await findSavedQueries();
expect(response.queries).toEqual([{ id: 'foo', attributes: savedQueryAttributes }]);
});
it('should return the total count along with the requested queries', async () => {
mockSavedObjectsClient.find.mockReturnValue({
savedObjects: [{ id: 'foo', attributes: savedQueryAttributes }],
total: 5,
});
const response = await findSavedQueries();
expect(response.total).toEqual(5);
});
it('should find and return saved queries with search text matching the title field', async () => {
mockSavedObjectsClient.find.mockReturnValue({
savedObjects: [{ id: 'foo', attributes: savedQueryAttributes }],
total: 5,
});
const response = await findSavedQueries('foo');
expect(mockSavedObjectsClient.find).toHaveBeenCalledWith({
page: 1,
perPage: 50,
search: 'foo',
searchFields: ['title^5', 'description'],
sortField: '_score',
type: 'query',
});
expect(response.queries).toEqual([{ id: 'foo', attributes: savedQueryAttributes }]);
});
it('should find and return parsed filters and timefilters items', async () => {
const serializedSavedQueryAttributesWithFilters = {
...savedQueryAttributesWithFilters,
filters: savedQueryAttributesWithFilters.filters,
timefilter: savedQueryAttributesWithFilters.timefilter,
};
mockSavedObjectsClient.find.mockReturnValue({
savedObjects: [{ id: 'foo', attributes: serializedSavedQueryAttributesWithFilters }],
total: 5,
});
const response = await findSavedQueries('bar');
expect(response.queries).toEqual([
{ id: 'foo', attributes: savedQueryAttributesWithFilters },
]);
});
it('should return an array of saved queries', async () => {
mockSavedObjectsClient.find.mockReturnValue({
savedObjects: [{ id: 'foo', attributes: savedQueryAttributes }],
total: 5,
});
const response = await findSavedQueries();
expect(response.queries).toEqual(
expect.objectContaining([
{
attributes: {
description: 'bar',
query: { language: 'kuery', query: 'response:200' },
title: 'foo',
},
id: 'foo',
},
])
);
});
it('should accept perPage and page properties', async () => {
mockSavedObjectsClient.find.mockReturnValue({
savedObjects: [
{ id: 'foo', attributes: savedQueryAttributes },
{ id: 'bar', attributes: savedQueryAttributesBar },
],
total: 5,
});
const response = await findSavedQueries(undefined, 2, 1);
expect(mockSavedObjectsClient.find).toHaveBeenCalledWith({
page: 1,
perPage: 2,
search: '',
searchFields: ['title^5', 'description'],
sortField: '_score',
type: 'query',
});
expect(response.queries).toEqual(
expect.objectContaining([
{
attributes: {
description: 'bar',
query: { language: 'kuery', query: 'response:200' },
title: 'foo',
},
id: 'foo',
},
{
attributes: {
description: 'baz',
query: { language: 'kuery', query: 'response:200' },
title: 'bar',
},
id: 'bar',
},
])
);
});
});
describe('getSavedQuery', function () {
it('should retrieve a saved query by id', async () => {
mockSavedObjectsClient.resolve.mockReturnValue({
saved_object: {
id: 'foo',
attributes: savedQueryAttributes,
},
outcome: 'exactMatch',
describe('updateQuery', function () {
it('should put the ID & stringified given attributes', async () => {
await updateQuery('foo', savedQueryAttributes);
expect(http.put).toBeCalled();
expect(http.put).toHaveBeenCalledWith('/api/saved_query/foo', {
body: '{"title":"foo","description":"bar","query":{"language":"kuery","query":"response:200"},"filters":[]}',
});
const response = await getSavedQuery('foo');
expect(response).toEqual({ id: 'foo', attributes: savedQueryAttributes });
});
it('should only return saved queries', async () => {
mockSavedObjectsClient.resolve.mockReturnValue({
saved_object: {
id: 'foo',
attributes: savedQueryAttributes,
},
outcome: 'exactMatch',
});
await getSavedQuery('foo');
expect(mockSavedObjectsClient.resolve).toHaveBeenCalledWith('query', 'foo');
});
it('should parse a json query', async () => {
mockSavedObjectsClient.resolve.mockReturnValue({
saved_object: {
id: 'food',
attributes: {
title: 'food',
description: 'bar',
query: {
language: 'kuery',
query: '{"x": "y"}',
},
},
},
outcome: 'exactMatch',
});
const response = await getSavedQuery('food');
expect(response.attributes.query.query).toEqual({ x: 'y' });
});
it('should handle null string', async () => {
mockSavedObjectsClient.resolve.mockReturnValue({
saved_object: {
id: 'food',
attributes: {
title: 'food',
description: 'bar',
query: {
language: 'kuery',
query: 'null',
},
},
},
outcome: 'exactMatch',
});
const response = await getSavedQuery('food');
expect(response.attributes.query.query).toEqual('null');
});
it('should handle null quoted string', async () => {
mockSavedObjectsClient.resolve.mockReturnValue({
saved_object: {
id: 'food',
attributes: {
title: 'food',
description: 'bar',
query: {
language: 'kuery',
query: '"null"',
},
},
},
outcome: 'exactMatch',
});
const response = await getSavedQuery('food');
expect(response.attributes.query.query).toEqual('"null"');
});
it('should not lose quotes', async () => {
mockSavedObjectsClient.resolve.mockReturnValue({
saved_object: {
id: 'food',
attributes: {
title: 'food',
description: 'bar',
query: {
language: 'kuery',
query: '"Bob"',
},
},
},
outcome: 'exactMatch',
});
const response = await getSavedQuery('food');
expect(response.attributes.query.query).toEqual('"Bob"');
});
it('should throw if conflict', async () => {
mockSavedObjectsClient.resolve.mockReturnValue({
saved_object: {
id: 'foo',
attributes: savedQueryAttributes,
},
outcome: 'conflict',
});
const result = getSavedQuery('food');
expect(result).rejects.toMatchInlineSnapshot(
`[Error: Multiple saved queries found with ID: food (legacy URL alias conflict)]`
);
});
});
describe('deleteSavedQuery', function () {
it('should delete the saved query for the given ID', async () => {
await deleteSavedQuery('foo');
expect(mockSavedObjectsClient.delete).toHaveBeenCalledWith('query', 'foo');
});
});
describe('getAllSavedQueries', function () {
it('should return all the saved queries', async () => {
mockSavedObjectsClient.find.mockReturnValue({
savedObjects: [{ id: 'foo', attributes: savedQueryAttributes }],
it('should post and extract the saved queries from the response', async () => {
http.post.mockResolvedValue({
total: 0,
savedQueries: [{ attributes: savedQueryAttributes }],
});
const response = await getAllSavedQueries();
expect(response).toEqual(
expect.objectContaining([
{
attributes: {
description: 'bar',
query: { language: 'kuery', query: 'response:200' },
title: 'foo',
},
id: 'foo',
},
])
);
expect(mockSavedObjectsClient.find).toHaveBeenCalledWith({
page: 1,
perPage: 0,
type: 'query',
const result = await getAllSavedQueries();
expect(http.post).toBeCalled();
expect(http.post).toHaveBeenCalledWith('/api/saved_query/_find', {
body: '{"perPage":10000}',
});
expect(result).toEqual([{ attributes: savedQueryAttributes }]);
});
});
describe('findSavedQueries', function () {
it('should post and return the total & saved queries', async () => {
http.post.mockResolvedValue({
total: 0,
savedQueries: [{ attributes: savedQueryAttributes }],
});
const result = await findSavedQueries();
expect(http.post).toBeCalled();
expect(http.post).toHaveBeenCalledWith('/api/saved_query/_find', {
body: '{"page":1,"perPage":50,"search":""}',
});
expect(result).toEqual({
queries: [{ attributes: savedQueryAttributes }],
total: 0,
});
});
});
describe('getSavedQuery', function () {
it('should get the given ID', async () => {
await getSavedQuery('my_id');
expect(http.get).toBeCalled();
expect(http.get).toHaveBeenCalledWith('/api/saved_query/my_id');
});
});
describe('deleteSavedQuery', function () {
it('should delete the given ID', async () => {
await deleteSavedQuery('my_id');
expect(http.delete).toBeCalled();
expect(http.delete).toHaveBeenCalledWith('/api/saved_query/my_id');
});
});
describe('getSavedQueryCount', function () {
it('should return the total number of saved queries', async () => {
mockSavedObjectsClient.find.mockReturnValue({
total: 1,
});
const response = await getSavedQueryCount();
expect(response).toEqual(1);
it('should get the total', async () => {
await getSavedQueryCount();
expect(http.get).toBeCalled();
expect(http.get).toHaveBeenCalledWith('/api/saved_query/_count');
});
});
});

View file

@ -6,163 +6,61 @@
* Side Public License, v 1.
*/
import { isObject } from 'lodash';
import { SavedObjectsClientContract, SavedObjectAttributes } from 'src/core/public';
import { SavedQueryAttributes, SavedQuery, SavedQueryService } from './types';
import { HttpStart } from 'src/core/public';
import { SavedQuery } from './types';
import { SavedQueryAttributes } from '../../../common';
type SerializedSavedQueryAttributes = SavedObjectAttributes &
SavedQueryAttributes & {
query: {
query: string;
language: string;
};
export const createSavedQueryService = (http: HttpStart) => {
const createQuery = async (attributes: SavedQueryAttributes, { overwrite = false } = {}) => {
const savedQuery = await http.post('/api/saved_query/_create', {
body: JSON.stringify(attributes),
});
return savedQuery;
};
export const createSavedQueryService = (
savedObjectsClient: SavedObjectsClientContract
): SavedQueryService => {
const saveQuery = async (attributes: SavedQueryAttributes, { overwrite = false } = {}) => {
if (!attributes.title.length) {
// title is required extra check against circumventing the front end
throw new Error('Cannot create saved query without a title');
}
const query = {
query:
typeof attributes.query.query === 'string'
? attributes.query.query
: JSON.stringify(attributes.query.query),
language: attributes.query.language,
};
const queryObject: SerializedSavedQueryAttributes = {
title: attributes.title.trim(), // trim whitespace before save as an extra precaution against circumventing the front end
description: attributes.description,
query,
};
if (attributes.filters) {
queryObject.filters = attributes.filters;
}
if (attributes.timefilter) {
queryObject.timefilter = attributes.timefilter;
}
let rawQueryResponse;
if (!overwrite) {
rawQueryResponse = await savedObjectsClient.create('query', queryObject, {
id: attributes.title,
});
} else {
rawQueryResponse = await savedObjectsClient.create('query', queryObject, {
id: attributes.title,
overwrite: true,
});
}
if (rawQueryResponse.error) {
throw new Error(rawQueryResponse.error.message);
}
return parseSavedQueryObject(rawQueryResponse);
const updateQuery = async (id: string, attributes: SavedQueryAttributes) => {
const savedQuery = await http.put(`/api/saved_query/${id}`, {
body: JSON.stringify(attributes),
});
return savedQuery;
};
// we have to tell the saved objects client how many to fetch, otherwise it defaults to fetching 20 per page
const getAllSavedQueries = async (): Promise<SavedQuery[]> => {
const count = await getSavedQueryCount();
const response = await savedObjectsClient.find<SerializedSavedQueryAttributes>({
type: 'query',
perPage: count,
page: 1,
const { savedQueries } = await http.post('/api/saved_query/_find', {
body: JSON.stringify({ perPage: 10000 }),
});
return response.savedObjects.map(
(savedObject: { id: string; attributes: SerializedSavedQueryAttributes }) =>
parseSavedQueryObject(savedObject)
);
return savedQueries;
};
// findSavedQueries will do a 'match_all' if no search string is passed in
const findSavedQueries = async (
searchText: string = '',
search: string = '',
perPage: number = 50,
activePage: number = 1
page: number = 1
): Promise<{ total: number; queries: SavedQuery[] }> => {
const response = await savedObjectsClient.find<SerializedSavedQueryAttributes>({
type: 'query',
search: searchText,
searchFields: ['title^5', 'description'],
sortField: '_score',
perPage,
page: activePage,
const { total, savedQueries: queries } = await http.post('/api/saved_query/_find', {
body: JSON.stringify({ page, perPage, search }),
});
return {
total: response.total,
queries: response.savedObjects.map(
(savedObject: { id: string; attributes: SerializedSavedQueryAttributes }) =>
parseSavedQueryObject(savedObject)
),
};
return { total, queries };
};
const getSavedQuery = async (id: string): Promise<SavedQuery> => {
const { saved_object: savedObject, outcome } =
await savedObjectsClient.resolve<SerializedSavedQueryAttributes>('query', id);
if (outcome === 'conflict') {
throw new Error(`Multiple saved queries found with ID: ${id} (legacy URL alias conflict)`);
} else if (savedObject.error) {
throw new Error(savedObject.error.message);
}
return parseSavedQueryObject(savedObject);
const getSavedQuery = (id: string): Promise<SavedQuery> => {
return http.get(`/api/saved_query/${id}`);
};
const deleteSavedQuery = async (id: string) => {
return await savedObjectsClient.delete('query', id);
};
const parseSavedQueryObject = (savedQuery: {
id: string;
attributes: SerializedSavedQueryAttributes;
}) => {
let queryString: string | object = savedQuery.attributes.query.query;
try {
const parsedQueryString: object = JSON.parse(savedQuery.attributes.query.query);
if (isObject(parsedQueryString)) {
queryString = parsedQueryString;
}
} catch (e) {} // eslint-disable-line no-empty
const savedQueryItems: SavedQueryAttributes = {
title: savedQuery.attributes.title || '',
description: savedQuery.attributes.description || '',
query: {
query: queryString,
language: savedQuery.attributes.query.language,
},
};
if (savedQuery.attributes.filters) {
savedQueryItems.filters = savedQuery.attributes.filters;
}
if (savedQuery.attributes.timefilter) {
savedQueryItems.timefilter = savedQuery.attributes.timefilter;
}
return {
id: savedQuery.id,
attributes: savedQueryItems,
};
const deleteSavedQuery = (id: string) => {
return http.delete(`/api/saved_query/${id}`);
};
const getSavedQueryCount = async (): Promise<number> => {
const response = await savedObjectsClient.find<SerializedSavedQueryAttributes>({
type: 'query',
perPage: 0,
page: 1,
});
return response.total;
return http.get('/api/saved_query/_count');
};
return {
saveQuery,
createQuery,
updateQuery,
getAllSavedQueries,
findSavedQueries,
getSavedQuery,

View file

@ -26,10 +26,8 @@ export interface SavedQueryAttributes {
}
export interface SavedQueryService {
saveQuery: (
attributes: SavedQueryAttributes,
config?: { overwrite: boolean }
) => Promise<SavedQuery>;
createQuery: (attributes: SavedQueryAttributes) => Promise<SavedQuery>;
updateQuery: (id: string, attributes: SavedQueryAttributes) => Promise<SavedQuery>;
getAllSavedQueries: () => Promise<SavedQuery[]>;
findSavedQueries: (
searchText?: string,

View file

@ -74,7 +74,7 @@ describe('connect_to_global_state', () => {
queryServiceStart = queryService.start({
uiSettings: setupMock.uiSettings,
storage: new Storage(new StubBrowserStorage()),
savedObjectsClient: startMock.savedObjects.client,
http: startMock.http,
});
filterManager = queryServiceStart.filterManager;
timeFilter = queryServiceStart.timefilter.timefilter;
@ -308,7 +308,7 @@ describe('connect_to_app_state', () => {
queryServiceStart = queryService.start({
uiSettings: setupMock.uiSettings,
storage: new Storage(new StubBrowserStorage()),
savedObjectsClient: startMock.savedObjects.client,
http: startMock.http,
});
filterManager = queryServiceStart.filterManager;
@ -487,7 +487,7 @@ describe('filters with different state', () => {
queryServiceStart = queryService.start({
uiSettings: setupMock.uiSettings,
storage: new Storage(new StubBrowserStorage()),
savedObjectsClient: startMock.savedObjects.client,
http: startMock.http,
});
filterManager = queryServiceStart.filterManager;

View file

@ -68,7 +68,7 @@ describe('sync_query_state_with_url', () => {
queryServiceStart = queryService.start({
uiSettings: startMock.uiSettings,
storage: new Storage(new StubBrowserStorage()),
savedObjectsClient: startMock.savedObjects.client,
http: startMock.http,
});
filterManager = queryServiceStart.filterManager;
timefilter = queryServiceStart.timefilter.timefilter;

View file

@ -39,6 +39,8 @@ import {
geoPointFunction,
queryFilterFunction,
rangeFilterFunction,
removeFilterFunction,
selectFilterFunction,
kibanaFilterFunction,
phraseFilterFunction,
esRawResponse,
@ -139,6 +141,8 @@ export class SearchService implements Plugin<ISearchSetup, ISearchStart> {
expressions.registerFunction(existsFilterFunction);
expressions.registerFunction(queryFilterFunction);
expressions.registerFunction(rangeFilterFunction);
expressions.registerFunction(removeFilterFunction);
expressions.registerFunction(selectFilterFunction);
expressions.registerFunction(phraseFilterFunction);
expressions.registerType(kibanaContext);

View file

@ -24,10 +24,9 @@ import {
import { i18n } from '@kbn/i18n';
import { sortBy, isEqual } from 'lodash';
import { SavedQuery, SavedQueryService } from '../..';
import { SavedQueryAttributes } from '../../query';
interface Props {
savedQuery?: SavedQueryAttributes;
savedQuery?: SavedQuery;
savedQueryService: SavedQueryService;
onSave: (savedQueryMeta: SavedQueryMeta) => void;
onClose: () => void;
@ -36,6 +35,7 @@ interface Props {
}
export interface SavedQueryMeta {
id?: string;
title: string;
description: string;
shouldIncludeFilters: boolean;
@ -50,18 +50,18 @@ export function SaveQueryForm({
showFilterOption = true,
showTimeFilterOption = true,
}: Props) {
const [title, setTitle] = useState(savedQuery ? savedQuery.title : '');
const [title, setTitle] = useState(savedQuery?.attributes.title ?? '');
const [enabledSaveButton, setEnabledSaveButton] = useState(Boolean(savedQuery));
const [description, setDescription] = useState(savedQuery ? savedQuery.description : '');
const [description, setDescription] = useState(savedQuery?.attributes.description ?? '');
const [savedQueries, setSavedQueries] = useState<SavedQuery[]>([]);
const [shouldIncludeFilters, setShouldIncludeFilters] = useState(
savedQuery ? !!savedQuery.filters : true
Boolean(savedQuery?.attributes.filters ?? true)
);
// Defaults to false because saved queries are meant to be as portable as possible and loading
// a saved query with a time filter will override whatever the current value of the global timepicker
// is. We expect this option to be used rarely and only when the user knows they want this behavior.
const [shouldIncludeTimefilter, setIncludeTimefilter] = useState(
savedQuery ? !!savedQuery.timefilter : false
Boolean(savedQuery?.attributes.timefilter ?? false)
);
const [formErrors, setFormErrors] = useState<string[]>([]);
@ -82,7 +82,7 @@ export function SaveQueryForm({
useEffect(() => {
const fetchQueries = async () => {
const allSavedQueries = await savedQueryService.getAllSavedQueries();
const sortedAllSavedQueries = sortBy(allSavedQueries, 'attributes.title') as SavedQuery[];
const sortedAllSavedQueries = sortBy(allSavedQueries, 'attributes.title');
setSavedQueries(sortedAllSavedQueries);
};
fetchQueries();
@ -109,13 +109,22 @@ export function SaveQueryForm({
const onClickSave = useCallback(() => {
if (validate()) {
onSave({
id: savedQuery?.id,
title,
description,
shouldIncludeFilters,
shouldIncludeTimefilter,
});
}
}, [validate, onSave, title, description, shouldIncludeFilters, shouldIncludeTimefilter]);
}, [
validate,
onSave,
savedQuery?.id,
title,
description,
shouldIncludeFilters,
shouldIncludeTimefilter,
]);
const onInputChange = useCallback((event) => {
setEnabledSaveButton(Boolean(event.target.value));

View file

@ -245,11 +245,12 @@ class SearchBarUI extends Component<SearchBarProps, State> {
try {
let response;
if (this.props.savedQuery && !saveAsNew) {
response = await this.savedQueryService.saveQuery(savedQueryAttributes, {
overwrite: true,
});
response = await this.savedQueryService.updateQuery(
savedQueryMeta.id!,
savedQueryAttributes
);
} else {
response = await this.savedQueryService.saveQuery(savedQueryAttributes);
response = await this.savedQueryService.createQuery(savedQueryAttributes);
}
this.services.notifications.toasts.addSuccess(
@ -423,7 +424,7 @@ class SearchBarUI extends Component<SearchBarProps, State> {
{this.state.showSaveQueryModal ? (
<SaveQueryForm
savedQuery={this.props.savedQuery ? this.props.savedQuery.attributes : undefined}
savedQuery={this.props.savedQuery ? this.props.savedQuery : undefined}
savedQueryService={this.savedQueryService}
onSave={this.onSave}
onClose={() => this.setState({ showSaveQueryModal: false })}

View file

@ -8,11 +8,21 @@
import { CoreSetup, Plugin } from 'kibana/server';
import { querySavedObjectType } from '../saved_objects';
import { extract, inject, telemetry, getAllMigrations } from '../../common/query/persistable_state';
import { extract, getAllMigrations, inject, telemetry } from '../../common/query/persistable_state';
import { registerSavedQueryRoutes } from './routes';
import {
registerSavedQueryRouteHandlerContext,
SavedQueryRouteHandlerContext,
} from './route_handler_context';
export class QueryService implements Plugin<void> {
public setup(core: CoreSetup) {
core.savedObjects.registerType(querySavedObjectType);
core.http.registerRouteHandlerContext<SavedQueryRouteHandlerContext, 'savedQuery'>(
'savedQuery',
registerSavedQueryRouteHandlerContext
);
registerSavedQueryRoutes(core);
return {
filterManager: {

View file

@ -0,0 +1,566 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0 and the Server Side Public License, v 1; you may not use this file except
* in compliance with, at your election, the Elastic License 2.0 or the Server
* Side Public License, v 1.
*/
import { coreMock } from '../../../../core/server/mocks';
import {
DATA_VIEW_SAVED_OBJECT_TYPE,
FilterStateStore,
SavedObject,
SavedQueryAttributes,
} from '../../common';
import { registerSavedQueryRouteHandlerContext } from './route_handler_context';
import { SavedObjectsFindResponse, SavedObjectsUpdateResponse } from 'kibana/server';
const mockContext = {
core: coreMock.createRequestHandlerContext(),
};
const {
core: {
savedObjects: { client: mockSavedObjectsClient },
},
} = mockContext;
const context = registerSavedQueryRouteHandlerContext(mockContext);
const savedQueryAttributes: SavedQueryAttributes = {
title: 'foo',
description: 'bar',
query: {
language: 'kuery',
query: 'response:200',
},
filters: [],
};
const savedQueryAttributesBar: SavedQueryAttributes = {
title: 'bar',
description: 'baz',
query: {
language: 'kuery',
query: 'response:200',
},
};
const savedQueryAttributesWithFilters: SavedQueryAttributes = {
...savedQueryAttributes,
filters: [
{
query: { match_all: {} },
$state: { store: FilterStateStore.APP_STATE },
meta: {
index: 'my-index',
disabled: false,
negate: false,
alias: null,
},
},
],
timefilter: {
to: 'now',
from: 'now-15m',
refreshInterval: {
pause: false,
value: 0,
},
},
};
const savedQueryReferences = [
{
type: DATA_VIEW_SAVED_OBJECT_TYPE,
name: 'my-index',
id: 'my-index',
},
];
describe('saved query route handler context', () => {
beforeEach(() => {
mockSavedObjectsClient.create.mockClear();
mockSavedObjectsClient.resolve.mockClear();
mockSavedObjectsClient.find.mockClear();
mockSavedObjectsClient.delete.mockClear();
});
describe('create', function () {
it('should create a saved object for the given attributes', async () => {
const mockResponse: SavedObject<SavedQueryAttributes> = {
id: 'foo',
type: 'query',
attributes: savedQueryAttributes,
references: [],
};
mockSavedObjectsClient.create.mockResolvedValue(mockResponse);
const response = await context.create(savedQueryAttributes);
expect(mockSavedObjectsClient.create).toHaveBeenCalledWith('query', savedQueryAttributes, {
references: [],
});
expect(response).toEqual({
id: 'foo',
attributes: savedQueryAttributes,
});
});
it('should optionally accept query in object format', async () => {
const savedQueryAttributesWithQueryObject: SavedQueryAttributes = {
...savedQueryAttributes,
query: {
language: 'lucene',
query: { match_all: {} },
},
};
const mockResponse: SavedObject<SavedQueryAttributes> = {
id: 'foo',
type: 'query',
attributes: savedQueryAttributesWithQueryObject,
references: [],
};
mockSavedObjectsClient.create.mockResolvedValue(mockResponse);
const { attributes } = await context.create(savedQueryAttributesWithQueryObject);
expect(attributes).toEqual(savedQueryAttributesWithQueryObject);
});
it('should optionally accept filters and timefilters in object format', async () => {
const serializedSavedQueryAttributesWithFilters = {
...savedQueryAttributesWithFilters,
filters: savedQueryAttributesWithFilters.filters,
timefilter: savedQueryAttributesWithFilters.timefilter,
};
const mockResponse: SavedObject<SavedQueryAttributes> = {
id: 'foo',
type: 'query',
attributes: serializedSavedQueryAttributesWithFilters,
references: [],
};
mockSavedObjectsClient.create.mockResolvedValue(mockResponse);
await context.create(savedQueryAttributesWithFilters);
const [[type, attributes]] = mockSavedObjectsClient.create.mock.calls;
const { filters = [], timefilter } = attributes as SavedQueryAttributes;
expect(type).toEqual('query');
expect(filters.length).toBe(1);
expect(timefilter).toEqual(savedQueryAttributesWithFilters.timefilter);
});
it('should throw an error when saved objects client returns error', async () => {
mockSavedObjectsClient.create.mockResolvedValue({
error: {
error: '123',
message: 'An Error',
},
} as SavedObject);
const response = context.create(savedQueryAttributes);
expect(response).rejects.toMatchInlineSnapshot(`[Error: An Error]`);
});
it('should throw an error if the saved query does not have a title', async () => {
const response = context.create({ ...savedQueryAttributes, title: '' });
expect(response).rejects.toMatchInlineSnapshot(
`[Error: Cannot create saved query without a title]`
);
});
});
describe('update', function () {
it('should update a saved object for the given attributes', async () => {
const mockResponse: SavedObject<SavedQueryAttributes> = {
id: 'foo',
type: 'query',
attributes: savedQueryAttributes,
references: [],
};
mockSavedObjectsClient.update.mockResolvedValue(mockResponse);
const response = await context.update('foo', savedQueryAttributes);
expect(mockSavedObjectsClient.update).toHaveBeenCalledWith(
'query',
'foo',
savedQueryAttributes,
{
references: [],
}
);
expect(response).toEqual({
id: 'foo',
attributes: savedQueryAttributes,
});
});
it('should throw an error when saved objects client returns error', async () => {
mockSavedObjectsClient.update.mockResolvedValue({
error: {
error: '123',
message: 'An Error',
},
} as SavedObjectsUpdateResponse);
const response = context.update('foo', savedQueryAttributes);
expect(response).rejects.toMatchInlineSnapshot(`[Error: An Error]`);
});
it('should throw an error if the saved query does not have a title', async () => {
const response = context.create({ ...savedQueryAttributes, title: '' });
expect(response).rejects.toMatchInlineSnapshot(
`[Error: Cannot create saved query without a title]`
);
});
});
describe('find', function () {
it('should find and return saved queries without search text or pagination parameters', async () => {
const mockResponse: SavedObjectsFindResponse<SavedQueryAttributes> = {
page: 0,
per_page: 0,
saved_objects: [
{
id: 'foo',
type: 'query',
score: 0,
attributes: savedQueryAttributes,
references: [],
},
],
total: 5,
};
mockSavedObjectsClient.find.mockResolvedValue(mockResponse);
const response = await context.find();
expect(response.savedQueries).toEqual([{ id: 'foo', attributes: savedQueryAttributes }]);
});
it('should return the total count along with the requested queries', async () => {
const mockResponse: SavedObjectsFindResponse<SavedQueryAttributes> = {
page: 0,
per_page: 0,
saved_objects: [
{ id: 'foo', type: 'query', score: 0, attributes: savedQueryAttributes, references: [] },
],
total: 5,
};
mockSavedObjectsClient.find.mockResolvedValue(mockResponse);
const response = await context.find();
expect(response.total).toEqual(5);
});
it('should find and return saved queries with search text matching the title field', async () => {
const mockResponse: SavedObjectsFindResponse<SavedQueryAttributes> = {
page: 0,
per_page: 0,
saved_objects: [
{ id: 'foo', type: 'query', score: 0, attributes: savedQueryAttributes, references: [] },
],
total: 5,
};
mockSavedObjectsClient.find.mockResolvedValue(mockResponse);
const response = await context.find({ search: 'foo' });
expect(mockSavedObjectsClient.find).toHaveBeenCalledWith({
page: 1,
perPage: 50,
search: 'foo',
type: 'query',
});
expect(response.savedQueries).toEqual([{ id: 'foo', attributes: savedQueryAttributes }]);
});
it('should find and return parsed filters and timefilters items', async () => {
const mockResponse: SavedObjectsFindResponse<SavedQueryAttributes> = {
page: 0,
per_page: 0,
saved_objects: [
{
id: 'foo',
type: 'query',
score: 0,
attributes: savedQueryAttributesWithFilters,
references: savedQueryReferences,
},
],
total: 5,
};
mockSavedObjectsClient.find.mockResolvedValue(mockResponse);
const response = await context.find({ search: 'bar' });
expect(response.savedQueries).toEqual([
{ id: 'foo', attributes: savedQueryAttributesWithFilters },
]);
});
it('should return an array of saved queries', async () => {
const mockResponse: SavedObjectsFindResponse<SavedQueryAttributes> = {
page: 0,
per_page: 0,
saved_objects: [
{ id: 'foo', type: 'query', score: 0, attributes: savedQueryAttributes, references: [] },
],
total: 5,
};
mockSavedObjectsClient.find.mockResolvedValue(mockResponse);
const response = await context.find();
expect(response.savedQueries).toEqual(
expect.objectContaining([
{
attributes: {
description: 'bar',
query: { language: 'kuery', query: 'response:200' },
filters: [],
title: 'foo',
},
id: 'foo',
},
])
);
});
it('should accept perPage and page properties', async () => {
const mockResponse: SavedObjectsFindResponse<SavedQueryAttributes> = {
page: 0,
per_page: 0,
saved_objects: [
{ id: 'foo', type: 'query', score: 0, attributes: savedQueryAttributes, references: [] },
{
id: 'bar',
type: 'query',
score: 0,
attributes: savedQueryAttributesBar,
references: [],
},
],
total: 5,
};
mockSavedObjectsClient.find.mockResolvedValue(mockResponse);
const response = await context.find({
page: 1,
perPage: 2,
});
expect(mockSavedObjectsClient.find).toHaveBeenCalledWith({
page: 1,
perPage: 2,
search: '',
type: 'query',
});
expect(response.savedQueries).toEqual(
expect.objectContaining([
{
attributes: {
description: 'bar',
query: { language: 'kuery', query: 'response:200' },
filters: [],
title: 'foo',
},
id: 'foo',
},
{
attributes: {
description: 'baz',
query: { language: 'kuery', query: 'response:200' },
filters: [],
title: 'bar',
},
id: 'bar',
},
])
);
});
});
describe('get', function () {
it('should retrieve a saved query by id', async () => {
mockSavedObjectsClient.resolve.mockResolvedValue({
saved_object: {
id: 'foo',
type: 'query',
attributes: savedQueryAttributes,
references: [],
},
outcome: 'exactMatch',
});
const response = await context.get('foo');
expect(response).toEqual({ id: 'foo', attributes: savedQueryAttributes });
});
it('should only return saved queries', async () => {
mockSavedObjectsClient.resolve.mockResolvedValue({
saved_object: {
id: 'foo',
type: 'query',
attributes: savedQueryAttributes,
references: [],
},
outcome: 'exactMatch',
});
await context.get('foo');
expect(mockSavedObjectsClient.resolve).toHaveBeenCalledWith('query', 'foo');
});
it('should parse a json query', async () => {
mockSavedObjectsClient.resolve.mockResolvedValue({
saved_object: {
id: 'food',
type: 'query',
attributes: {
title: 'food',
description: 'bar',
query: {
language: 'kuery',
query: '{"x": "y"}',
},
},
references: [],
},
outcome: 'exactMatch',
});
const response = await context.get('food');
expect(response.attributes.query.query).toEqual({ x: 'y' });
});
it('should handle null string', async () => {
mockSavedObjectsClient.resolve.mockResolvedValue({
saved_object: {
id: 'food',
type: 'query',
attributes: {
title: 'food',
description: 'bar',
query: {
language: 'kuery',
query: 'null',
},
},
references: [],
},
outcome: 'exactMatch',
});
const response = await context.get('food');
expect(response.attributes.query.query).toEqual('null');
});
it('should handle null quoted string', async () => {
mockSavedObjectsClient.resolve.mockResolvedValue({
saved_object: {
id: 'food',
type: 'query',
attributes: {
title: 'food',
description: 'bar',
query: {
language: 'kuery',
query: '"null"',
},
},
references: [],
},
outcome: 'exactMatch',
});
const response = await context.get('food');
expect(response.attributes.query.query).toEqual('"null"');
});
it('should not lose quotes', async () => {
mockSavedObjectsClient.resolve.mockResolvedValue({
saved_object: {
id: 'food',
type: 'query',
attributes: {
title: 'food',
description: 'bar',
query: {
language: 'kuery',
query: '"Bob"',
},
},
references: [],
},
outcome: 'exactMatch',
});
const response = await context.get('food');
expect(response.attributes.query.query).toEqual('"Bob"');
});
it('should inject references', async () => {
mockSavedObjectsClient.resolve.mockResolvedValue({
saved_object: {
id: 'food',
type: 'query',
attributes: savedQueryAttributesWithFilters,
references: [
{
id: 'my-new-index',
type: DATA_VIEW_SAVED_OBJECT_TYPE,
name: 'my-index',
},
],
},
outcome: 'exactMatch',
});
const response = await context.get('food');
expect(response.attributes.filters[0].meta.index).toBe('my-new-index');
});
it('should throw if conflict', async () => {
mockSavedObjectsClient.resolve.mockResolvedValue({
saved_object: {
id: 'foo',
type: 'query',
attributes: savedQueryAttributes,
references: [],
},
outcome: 'conflict',
});
const result = context.get('food');
expect(result).rejects.toMatchInlineSnapshot(
`[Error: Multiple saved queries found with ID: food (legacy URL alias conflict)]`
);
});
});
describe('delete', function () {
it('should delete the saved query for the given ID', async () => {
await context.delete('foo');
expect(mockSavedObjectsClient.delete).toHaveBeenCalledWith('query', 'foo');
});
});
describe('count', function () {
it('should return the total number of saved queries', async () => {
mockSavedObjectsClient.find.mockResolvedValue({
total: 1,
page: 0,
per_page: 0,
saved_objects: [],
});
const response = await context.count();
expect(response).toEqual(1);
});
});
});

Some files were not shown because too many files have changed in this diff Show more