Merge branch 'master' into xpack-es-to-kbn-archiver-apm
This commit is contained in:
commit
fedfa17eb5
|
@ -4,7 +4,6 @@ library 'kibana-pipeline-library'
|
|||
kibanaLibrary.load()
|
||||
|
||||
def TASK_PARAM = params.TASK ?: params.CI_GROUP
|
||||
|
||||
// Looks like 'oss:ciGroup:1', 'oss:firefoxSmoke'
|
||||
def JOB_PARTS = TASK_PARAM.split(':')
|
||||
def IS_XPACK = JOB_PARTS[0] == 'xpack'
|
||||
|
@ -111,6 +110,8 @@ def getWorkerFromParams(isXpack, job, ciGroup) {
|
|||
return kibanaPipeline.scriptTaskDocker('Jest Integration Tests', 'test/scripts/test/jest_integration.sh')
|
||||
} else if (job == 'apiIntegration') {
|
||||
return kibanaPipeline.scriptTask('API Integration Tests', 'test/scripts/test/api_integration.sh')
|
||||
} else if (job == 'pluginFunctional') {
|
||||
return kibanaPipeline.functionalTestProcess('oss-pluginFunctional', './test/scripts/jenkins_plugin_functional.sh')
|
||||
} else {
|
||||
return kibanaPipeline.ossCiGroupProcess(ciGroup)
|
||||
}
|
||||
|
|
|
@ -13,12 +13,12 @@ pipeline {
|
|||
BASE_DIR = 'src/github.com/elastic/kibana'
|
||||
HOME = "${env.WORKSPACE}"
|
||||
E2E_DIR = 'x-pack/plugins/apm/e2e'
|
||||
PIPELINE_LOG_LEVEL = 'DEBUG'
|
||||
PIPELINE_LOG_LEVEL = 'INFO'
|
||||
KBN_OPTIMIZER_THEMES = 'v7light'
|
||||
}
|
||||
options {
|
||||
timeout(time: 1, unit: 'HOURS')
|
||||
buildDiscarder(logRotator(numToKeepStr: '40', artifactNumToKeepStr: '20', daysToKeepStr: '30'))
|
||||
buildDiscarder(logRotator(numToKeepStr: '30', artifactNumToKeepStr: '10', daysToKeepStr: '30'))
|
||||
timestamps()
|
||||
ansiColor('xterm')
|
||||
disableResume()
|
||||
|
|
|
@ -241,35 +241,136 @@ There are some exceptions where a separate repo makes sense. However, they are e
|
|||
|
||||
It may be tempting to get caught up in the dream of writing the next package which is published to npm and downloaded millions of times a week. Knowing the quality of developers that are working on Kibana, this is a real possibility. However, knowing which packages will see mass adoption is impossible to predict. Instead of jumping directly to writing code in a separate repo and accepting all of the complications that come along with it, prefer keeping code inside the Kibana repo. A [Kibana package](https://github.com/elastic/kibana/tree/master/packages) can be used to publish a package to npm, while still keeping the code inside the Kibana repo. Move code to an external repo only when there is a good reason, for example to enable external contributions.
|
||||
|
||||
## Hardening
|
||||
## Security best practices
|
||||
|
||||
Review the following items related to vulnerability and security risks.
|
||||
When writing code for Kibana, be sure to follow these best practices to avoid common vulnerabilities. Refer to the included Open Web
|
||||
Application Security Project (OWASP) references to learn more about these types of attacks.
|
||||
|
||||
- XSS
|
||||
- Check for usages of `dangerouslySetInnerHtml`, `Element.innerHTML`, `Element.outerHTML`
|
||||
- Ensure all user input is properly escaped.
|
||||
- Ensure any input in `$.html`, `$.append`, `$.appendTo`, $.prepend`, `$.prependTo`is escaped. Instead use`$.text`, or don't use jQuery at all.
|
||||
- CSRF
|
||||
- Ensure all APIs are running inside the Kibana HTTP service.
|
||||
- RCE
|
||||
- Ensure no usages of `eval`
|
||||
- Ensure no usages of dynamic requires
|
||||
- Check for template injection
|
||||
- Check for usages of templating libraries, including `_.template`, and ensure that user provided input isn't influencing the template and is only used as data for rendering the template.
|
||||
- Check for possible prototype pollution.
|
||||
- Prototype Pollution - more info [here](https://docs.google.com/document/d/19V-d9sb6IF-fbzF4iyiPpAropQNydCnoJApzSX5FdcI/edit?usp=sharing)
|
||||
- Check for instances of `anObject[a][b] = c` where a, b, and c are user defined. This includes code paths where the following logical code steps could be performed in separate files by completely different operations, or recursively using dynamic operations.
|
||||
- Validate any user input, including API url-parameters/query-parameters/payloads, preferable against a schema which only allows specific keys/values. At a very minimum, black-list `__proto__` and `prototype.constructor` for use within keys
|
||||
- When calling APIs which spawn new processes or potentially perform code generation from strings, defensively protect against Prototype Pollution by checking `Object.hasOwnProperty` if the arguments to the APIs originate from an Object. An example is the Code app's [spawnProcess](https://github.com/elastic/kibana/blob/b49192626a8528af5d888545fb14cd1ce66a72e7/x-pack/legacy/plugins/code/server/lsp/workspace_command.ts#L40-L44).
|
||||
- Common Node.js offenders: `child_process.spawn`, `child_process.exec`, `eval`, `Function('some string')`, `vm.runIn*Context(x)`
|
||||
- Common Client-side offenders: `eval`, `Function('some string')`, `setTimeout('some string', num)`, `setInterval('some string', num)`
|
||||
- Check for accidental reveal of sensitive information
|
||||
- The biggest culprit is errors which contain stack traces or other sensitive information which end up in the HTTP Response
|
||||
- Checked for Mishandled API requests
|
||||
- Ensure no sensitive cookies are forwarded to external resources.
|
||||
- Ensure that all user controllable variables that are used in constructing a URL are escaped properly. This is relevant when using `transport.request` with the Elasticsearch client as no automatic escaping is performed.
|
||||
- Reverse tabnabbing - https://github.com/OWASP/CheatSheetSeries/blob/master/cheatsheets/HTML5_Security_Cheat_Sheet.md#tabnabbing
|
||||
- When there are user controllable links or hard-coded links to third-party domains that specify target="\_blank" or target="\_window", the `a` tag should have the rel="noreferrer noopener" attribute specified.
|
||||
- Allowing users to input markdown is a common culprit, a custom link renderer should be used
|
||||
- SSRF - https://www.owasp.org/index.php/Server_Side_Request_Forgery
|
||||
- All network requests made from the Kibana server should use an explicit configuration or white-list specified in the `kibana.yml`
|
||||
### Cross-site Scripting (XSS)
|
||||
|
||||
[_OWASP reference for XSS_](https://owasp.org/www-community/attacks/xss)
|
||||
|
||||
XSS is a class of attacks where malicious scripts are injected into vulnerable websites. Kibana defends against this by using the React
|
||||
framework to safely encode data that is rendered in pages, the EUI framework to [automatically sanitize
|
||||
links](https://elastic.github.io/eui/#/navigation/link#link-validation), and a restrictive `Content-Security-Policy` header.
|
||||
|
||||
**Best practices**
|
||||
|
||||
* Check for dangerous functions or assignments that can result in unescaped user input in the browser DOM. Avoid using:
|
||||
* **React:** [`dangerouslySetInnerHtml`](https://reactjs.org/docs/dom-elements.html#dangerouslysetinnerhtml).
|
||||
* **Browser DOM:** `Element.innerHTML` and `Element.outerHTML`.
|
||||
* If using the aforementioned unsafe functions or assignments is absolutely necessary, follow [these XSS prevention
|
||||
rules](https://cheatsheetseries.owasp.org/cheatsheets/Cross_Site_Scripting_Prevention_Cheat_Sheet.html#xss-prevention-rules) to ensure that
|
||||
user input is not inserted into unsafe locations and that it is escaped properly.
|
||||
* Use EUI components to build your UI, particularly when rendering `href` links. Otherwise, sanitize user input before rendering links to
|
||||
ensure that they do not use the `javascript:` protocol.
|
||||
* Don't use the `eval`, `Function`, and `_.template` functions -- these are restricted by ESLint rules.
|
||||
* Be careful when using `setTimeout` and `setInterval` in client-side code. If an attacker can manipulate the arguments and pass a string to
|
||||
one of these, it is evaluated dynamically, which is equivalent to the dangerous `eval` function.
|
||||
|
||||
### Cross-Site Request Forgery (CSRF/XSRF)
|
||||
|
||||
[_OWASP reference for CSRF_](https://owasp.org/www-community/attacks/csrf)
|
||||
|
||||
CSRF is a class of attacks where a user is forced to execute an action on a vulnerable website that they're logged into, usually without
|
||||
their knowledge. Kibana defends against this by requiring [custom request
|
||||
headers](https://cheatsheetseries.owasp.org/cheatsheets/Cross-Site_Request_Forgery_Prevention_Cheat_Sheet.html#use-of-custom-request-headers)
|
||||
for API endpoints. For more information, see [API Request
|
||||
Headers](https://www.elastic.co/guide/en/kibana/master/api.html#api-request-headers).
|
||||
|
||||
**Best practices**
|
||||
|
||||
* Ensure all HTTP routes are registered with the [Kibana HTTP service](https://www.elastic.co/guide/en/kibana/master/http-service.html) to
|
||||
take advantage of the custom request header security control.
|
||||
* Note that HTTP GET requests do **not** require the custom request header; any routes that change data should [adhere to the HTTP
|
||||
specification and use a different method (PUT, POST, etc.)](https://developer.mozilla.org/en-US/docs/Web/HTTP/Methods)
|
||||
|
||||
### Remote Code Execution (RCE)
|
||||
|
||||
[_OWASP reference for Command Injection_](https://owasp.org/www-community/attacks/Command_Injection),
|
||||
[_OWASP reference for Code Injection_](https://owasp.org/www-community/attacks/Code_Injection)
|
||||
|
||||
RCE is a class of attacks where an attacker executes malicious code or commands on a vulnerable server. Kibana defends against this by using
|
||||
ESLint rules to restrict vulnerable functions, and by hooking into or hardening usage of these in third-party dependencies.
|
||||
|
||||
**Best practices**
|
||||
|
||||
* Don't use the `eval`, `Function`, and `_.template` functions -- these are restricted by ESLint rules.
|
||||
* Don't use dynamic `require`.
|
||||
* Check for usages of templating libraries. Ensure that user-provided input doesn't influence the template and is used only as data for
|
||||
rendering the template.
|
||||
* Take extra caution when spawning child processes with any user input or parameters that are user-controlled.
|
||||
|
||||
### Prototype Pollution
|
||||
|
||||
Prototype Pollution is an attack that is unique to JavaScript environments. Attackers can abuse JavaScript's prototype inheritance to
|
||||
"pollute" objects in the application, which is often used as a vector for XSS or RCE vulnerabilities. Kibana defends against this by
|
||||
hardening sensitive functions (such as those exposed by `child_process`), and by requiring validation on all HTTP routes by default.
|
||||
|
||||
**Best practices**
|
||||
|
||||
* Check for instances of `anObject[a][b] = c` where `a`, `b`, and `c` are controlled by user input. This includes code paths where the
|
||||
following logical code steps could be performed in separate files by completely different operations, or by recursively using dynamic
|
||||
operations.
|
||||
* Validate all user input, including API URL parameters, query parameters, and payloads. Preferably, use a schema that only allows specific
|
||||
keys and values. At a minimum, implement a deny-list that prevents `__proto__` and `prototype.constructor` from being used within object
|
||||
keys.
|
||||
* When calling APIs that spawn new processes or perform code generation from strings, protect against Prototype Pollution by checking if
|
||||
`Object.hasOwnProperty` has arguments to the APIs that originate from an Object. An example is the defunct Code app's
|
||||
[`spawnProcess`](https://github.com/elastic/kibana/blob/b49192626a8528af5d888545fb14cd1ce66a72e7/x-pack/legacy/plugins/code/server/lsp/workspace_command.ts#L40-L44)
|
||||
function.
|
||||
* Common Node.js offenders: `child_process.spawn`, `child_process.exec`, `eval`, `Function('some string')`, `vm.runInContext(x)`,
|
||||
`vm.runInNewContext(x)`, `vm.runInThisContext()`
|
||||
* Common client-side offenders: `eval`, `Function('some string')`, `setTimeout('some string', num)`, `setInterval('some string', num)`
|
||||
|
||||
See also:
|
||||
|
||||
* [Prototype pollution: The dangerous and underrated vulnerability impacting JavaScript applications |
|
||||
portswigger.net](https://portswigger.net/daily-swig/prototype-pollution-the-dangerous-and-underrated-vulnerability-impacting-javascript-applications)
|
||||
* [Prototype pollution attack in NodeJS application | Olivier
|
||||
Arteau](https://github.com/HoLyVieR/prototype-pollution-nsec18/blob/master/paper/JavaScript_prototype_pollution_attack_in_NodeJS.pdf)
|
||||
|
||||
### Server-Side Request Forgery (SSRF)
|
||||
|
||||
[_OWASP reference for SSRF_](https://owasp.org/www-community/attacks/Server_Side_Request_Forgery)
|
||||
|
||||
SSRF is a class of attacks where a vulnerable server is forced to make an unintended request, usually to an HTTP API. This is often used as
|
||||
a vector for information disclosure or injection attacks.
|
||||
|
||||
**Best practices**
|
||||
|
||||
* Ensure that all outbound requests from the Kibana server use hard-coded URLs.
|
||||
* If user input is used to construct a URL for an outbound request, ensure that an allow-list is used to validate the endpoints and that
|
||||
user input is escaped properly. Ideally, the allow-list should be set in `kibana.yml`, so only server administrators can change it.
|
||||
* This is particularly relevant when using `transport.request` with the Elasticsearch client, as no automatic escaping is performed.
|
||||
* Note that URLs are very hard to validate properly; exact match validation for user input is most preferable, while URL parsing or RegEx
|
||||
validation should only be used if absolutely necessary.
|
||||
|
||||
### Reverse tabnabbing
|
||||
|
||||
[_OWASP reference for Reverse Tabnabbing_](https://owasp.org/www-community/attacks/Reverse_Tabnabbing)
|
||||
|
||||
Reverse tabnabbing is an attack where a link to a malicious page is used to rewrite a vulnerable parent page. This is often used as a vector
|
||||
for phishing attacks. Kibana defends against this by using the EUI framework, which automatically adds the `rel` attribute to anchor tags,
|
||||
buttons, and other vulnerable DOM elements.
|
||||
|
||||
**Best practices**
|
||||
|
||||
* Use EUI components to build your UI whenever possible. Otherwise, ensure that any DOM elements that have an `href` attribute also have the
|
||||
`rel="noreferrer noopener"` attribute specified. For more information, refer to the [OWASP HTML5 Security Cheat
|
||||
Sheet](https://github.com/OWASP/CheatSheetSeries/blob/master/cheatsheets/HTML5_Security_Cheat_Sheet.md#tabnabbing).
|
||||
* If using a non-EUI markdown renderer, use a custom link renderer for rendered links.
|
||||
|
||||
### Information disclosure
|
||||
|
||||
Information disclosure is not an attack, but it describes whenever sensitive information is accidentally revealed. This can be configuration
|
||||
info, stack traces, or other data that the user is not authorized to access. This concern cannot be addressed with a single security
|
||||
control, but at a high level, Kibana relies on the hapi framework to automatically redact stack traces and detailed error messages in HTTP
|
||||
5xx response payloads.
|
||||
|
||||
**Best practices**
|
||||
|
||||
* Look for instances where sensitive information might accidentally be revealed, particularly in error messages, in the UI, and URL
|
||||
parameters that are exposed to users.
|
||||
* Make sure that sensitive request data is not forwarded to external resources. For example, copying client request headers and using them
|
||||
to make an another request could accidentally expose the user's credentials.
|
||||
|
|
|
@ -71,7 +71,7 @@ Alias: `condition`
|
|||
[[alterColumn_fn]]
|
||||
=== `alterColumn`
|
||||
|
||||
Converts between core types, including `string`, `number`, `null`, `boolean`, and `date`, and renames columns. See also <<mapColumn_fn>> and <<staticColumn_fn>>.
|
||||
Converts between core types, including `string`, `number`, `null`, `boolean`, and `date`, and renames columns. See also <<mapColumn_fn>>, <<mathColumn_fn>>, and <<staticColumn_fn>>.
|
||||
|
||||
*Expression syntax*
|
||||
[source,js]
|
||||
|
@ -1717,11 +1717,16 @@ Adds a column calculated as the result of other columns. Changes are made only w
|
|||
|===
|
||||
|Argument |Type |Description
|
||||
|
||||
|`id`
|
||||
|
||||
|`string`, `null`
|
||||
|An optional id of the resulting column. When no id is provided, the id will be looked up from the existing column by the provided name argument. If no column with this name exists yet, a new column with this name and an identical id will be added to the table.
|
||||
|
||||
|_Unnamed_ ***
|
||||
|
||||
Aliases: `column`, `name`
|
||||
|`string`
|
||||
|The name of the resulting column.
|
||||
|The name of the resulting column. Names are not required to be unique.
|
||||
|
||||
|`expression` ***
|
||||
|
||||
|
@ -1729,11 +1734,6 @@ Aliases: `exp`, `fn`, `function`
|
|||
|`boolean`, `number`, `string`, `null`
|
||||
|A Canvas expression that is passed to each row as a single row `datatable`.
|
||||
|
||||
|`id`
|
||||
|
||||
|`string`, `null`
|
||||
|An optional id of the resulting column. When not specified or `null` the name argument is used as id.
|
||||
|
||||
|`copyMetaFrom`
|
||||
|
||||
|`string`, `null`
|
||||
|
@ -1808,6 +1808,47 @@ Default: `"throw"`
|
|||
*Returns:* `number` | `boolean` | `null`
|
||||
|
||||
|
||||
[float]
|
||||
[[mathColumn_fn]]
|
||||
=== `mathColumn`
|
||||
|
||||
Adds a column by evaluating `TinyMath` on each row. This function is optimized for math, so it performs better than the <<mapColumn_fn>> with a <<math_fn>>.
|
||||
*Accepts:* `datatable`
|
||||
|
||||
[cols="3*^<"]
|
||||
|===
|
||||
|Argument |Type |Description
|
||||
|
||||
|id ***
|
||||
|`string`
|
||||
|id of the resulting column. Must be unique.
|
||||
|
||||
|name ***
|
||||
|`string`
|
||||
|The name of the resulting column. Names are not required to be unique.
|
||||
|
||||
|_Unnamed_
|
||||
|
||||
Alias: `expression`
|
||||
|`string`
|
||||
|A `TinyMath` expression evaluated on each row. See https://www.elastic.co/guide/en/kibana/current/canvas-tinymath-functions.html.
|
||||
|
||||
|`onError`
|
||||
|
||||
|`string`
|
||||
|In case the `TinyMath` evaluation fails or returns NaN, the return value is specified by onError. For example, `"null"`, `"zero"`, `"false"`, `"throw"`. When `"throw"`, it will throw an exception, terminating expression execution.
|
||||
|
||||
Default: `"throw"`
|
||||
|
||||
|`copyMetaFrom`
|
||||
|
||||
|`string`, `null`
|
||||
|If set, the meta object from the specified column id is copied over to the specified target column. Throws an exception if the column doesn't exist
|
||||
|===
|
||||
|
||||
*Returns:* `datatable`
|
||||
|
||||
|
||||
[float]
|
||||
[[metric_fn]]
|
||||
=== `metric`
|
||||
|
@ -2581,7 +2622,7 @@ Default: `false`
|
|||
[[staticColumn_fn]]
|
||||
=== `staticColumn`
|
||||
|
||||
Adds a column with the same static value in every row. See also <<alterColumn_fn>> and <<mapColumn_fn>>.
|
||||
Adds a column with the same static value in every row. See also <<alterColumn_fn>>, <<mapColumn_fn>>, and <<mathColumn_fn>>.
|
||||
|
||||
*Accepts:* `datatable`
|
||||
|
||||
|
|
|
@ -1,55 +1,135 @@
|
|||
[[security-best-practices]]
|
||||
== Security best practices
|
||||
|
||||
* XSS
|
||||
** Check for usages of `dangerouslySetInnerHtml`, `Element.innerHTML`,
|
||||
`Element.outerHTML`
|
||||
** Ensure all user input is properly escaped.
|
||||
** Ensure any input in `$.html`, `$.append`, `$.appendTo`,
|
||||
latexmath:[$.prepend`, `$].prependTo`is escaped. Instead use`$.text`, or
|
||||
don’t use jQuery at all.
|
||||
* CSRF
|
||||
** Ensure all APIs are running inside the {kib} HTTP service.
|
||||
* RCE
|
||||
** Ensure no usages of `eval`
|
||||
** Ensure no usages of dynamic requires
|
||||
** Check for template injection
|
||||
** Check for usages of templating libraries, including `_.template`, and
|
||||
ensure that user provided input isn’t influencing the template and is
|
||||
only used as data for rendering the template.
|
||||
** Check for possible prototype pollution.
|
||||
* Prototype Pollution
|
||||
** Check for instances of `anObject[a][b] = c` where a, b, and c are
|
||||
user defined. This includes code paths where the following logical code
|
||||
steps could be performed in separate files by completely different
|
||||
operations, or recursively using dynamic operations.
|
||||
** Validate any user input, including API
|
||||
url-parameters/query-parameters/payloads, preferable against a schema
|
||||
which only allows specific keys/values. At a very minimum, black-list
|
||||
`__proto__` and `prototype.constructor` for use within keys
|
||||
** When calling APIs which spawn new processes or potentially perform
|
||||
code generation from strings, defensively protect against Prototype
|
||||
Pollution by checking `Object.hasOwnProperty` if the arguments to the
|
||||
APIs originate from an Object. An example is the Code app’s
|
||||
https://github.com/elastic/kibana/blob/b49192626a8528af5d888545fb14cd1ce66a72e7/x-pack/legacy/plugins/code/server/lsp/workspace_command.ts#L40-L44[spawnProcess].
|
||||
*** Common Node.js offenders: `child_process.spawn`,
|
||||
`child_process.exec`, `eval`, `Function('some string')`,
|
||||
`vm.runIn*Context(x)`
|
||||
*** Common Client-side offenders: `eval`, `Function('some string')`,
|
||||
`setTimeout('some string', num)`, `setInterval('some string', num)`
|
||||
* Check for accidental reveal of sensitive information
|
||||
** The biggest culprit is errors which contain stack traces or other
|
||||
sensitive information which end up in the HTTP Response
|
||||
* Checked for Mishandled API requests
|
||||
** Ensure no sensitive cookies are forwarded to external resources.
|
||||
** Ensure that all user controllable variables that are used in
|
||||
constructing a URL are escaped properly. This is relevant when using
|
||||
`transport.request` with the {es} client as no automatic
|
||||
escaping is performed.
|
||||
* Reverse tabnabbing -
|
||||
https://github.com/OWASP/CheatSheetSeries/blob/master/cheatsheets/HTML5_Security_Cheat_Sheet.md#tabnabbing
|
||||
** When there are user controllable links or hard-coded links to
|
||||
third-party domains that specify target="_blank" or target="_window", the a tag should have the rel="noreferrer noopener" attribute specified.
|
||||
Allowing users to input markdown is a common culprit, a custom link renderer should be used
|
||||
* SSRF - https://www.owasp.org/index.php/Server_Side_Request_Forgery
|
||||
All network requests made from the {kib} server should use an explicit configuration or white-list specified in the kibana.yml
|
||||
When writing code for {kib}, be sure to follow these best practices to avoid common vulnerabilities. Refer to the included Open Web
|
||||
Application Security Project (OWASP) references to learn more about these types of attacks.
|
||||
|
||||
=== Cross-site Scripting (XSS) ===
|
||||
|
||||
https://owasp.org/www-community/attacks/xss[_OWASP reference for XSS_]
|
||||
|
||||
XSS is a class of attacks where malicious scripts are injected into vulnerable websites. {kib} defends against this by using the React
|
||||
framework to safely encode data that is rendered in pages, the EUI framework to
|
||||
https://elastic.github.io/eui/#/navigation/link#link-validation[automatically sanitize links], and a restrictive `Content-Security-Policy`
|
||||
header.
|
||||
|
||||
*Best practices*
|
||||
|
||||
* Check for dangerous functions or assignments that can result in unescaped user input in the browser DOM. Avoid using:
|
||||
** *React:* https://reactjs.org/docs/dom-elements.html#dangerouslysetinnerhtml[`dangerouslySetInnerHtml`].
|
||||
** *Browser DOM:* `Element.innerHTML` and `Element.outerHTML`.
|
||||
* If using the aforementioned unsafe functions or assignments is absolutely necessary, follow
|
||||
https://cheatsheetseries.owasp.org/cheatsheets/Cross_Site_Scripting_Prevention_Cheat_Sheet.html#xss-prevention-rules[these XSS prevention
|
||||
rules] to ensure that user input is not inserted into unsafe locations and that it is escaped properly.
|
||||
* Use EUI components to build your UI, particularly when rendering `href` links. Otherwise, sanitize user input before rendering links to
|
||||
ensure that they do not use the `javascript:` protocol.
|
||||
* Don't use the `eval`, `Function`, and `_.template` functions -- these are restricted by ESLint rules.
|
||||
* Be careful when using `setTimeout` and `setInterval` in client-side code. If an attacker can manipulate the arguments and pass a string to
|
||||
one of these, it is evaluated dynamically, which is equivalent to the dangerous `eval` function.
|
||||
|
||||
=== Cross-Site Request Forgery (CSRF/XSRF) ===
|
||||
|
||||
https://owasp.org/www-community/attacks/csrf[_OWASP reference for CSRF_]
|
||||
|
||||
CSRF is a class of attacks where a user is forced to execute an action on a vulnerable website that they're logged into, usually without
|
||||
their knowledge. {kib} defends against this by requiring
|
||||
https://cheatsheetseries.owasp.org/cheatsheets/Cross-Site_Request_Forgery_Prevention_Cheat_Sheet.html#use-of-custom-request-headers[custom
|
||||
request headers] for API endpoints. For more information, see <<api-request-headers, API Request Headers>>.
|
||||
|
||||
*Best practices*
|
||||
|
||||
* Ensure all HTTP routes are registered with the <<http-service, {kib} HTTP service>> to take advantage of the custom request header
|
||||
security control.
|
||||
** Note that HTTP GET requests do *not* require the custom request header; any routes that change data should
|
||||
https://developer.mozilla.org/en-US/docs/Web/HTTP/Methods[adhere to the HTTP specification and use a different method (PUT, POST, etc.)]
|
||||
|
||||
=== Remote Code Execution (RCE) ===
|
||||
|
||||
https://owasp.org/www-community/attacks/Command_Injection[_OWASP reference for Command Injection_],
|
||||
https://owasp.org/www-community/attacks/Code_Injection[_OWASP reference for Code Injection_]
|
||||
|
||||
RCE is a class of attacks where an attacker executes malicious code or commands on a vulnerable server. {kib} defends against this by using
|
||||
ESLint rules to restrict vulnerable functions, and by hooking into or hardening usage of these in third-party dependencies.
|
||||
|
||||
*Best practices*
|
||||
|
||||
* Don't use the `eval`, `Function`, and `_.template` functions -- these are restricted by ESLint rules.
|
||||
* Don't use dynamic `require`.
|
||||
* Check for usages of templating libraries. Ensure that user-provided input doesn't influence the template and is used only as data for
|
||||
rendering the template.
|
||||
* Take extra caution when spawning child processes with any user input or parameters that are user-controlled.
|
||||
|
||||
=== Prototype Pollution ===
|
||||
|
||||
Prototype Pollution is an attack that is unique to JavaScript environments. Attackers can abuse JavaScript's prototype inheritance to
|
||||
"pollute" objects in the application, which is often used as a vector for XSS or RCE vulnerabilities. {kib} defends against this by
|
||||
hardening sensitive functions (such as those exposed by `child_process`), and by requiring validation on all HTTP routes by default.
|
||||
|
||||
*Best practices*
|
||||
|
||||
* Check for instances of `anObject[a][b] = c` where `a`, `b`, and `c` are controlled by user input. This includes code paths where the
|
||||
following logical code steps could be performed in separate files by completely different operations, or by recursively using dynamic
|
||||
operations.
|
||||
* Validate all user input, including API URL parameters, query parameters, and payloads. Preferably, use a schema that only allows specific
|
||||
keys and values. At a minimum, implement a deny-list that prevents `__proto__` and `prototype.constructor` from being used within object
|
||||
keys.
|
||||
* When calling APIs that spawn new processes or perform code generation from strings, protect against Prototype Pollution by checking if
|
||||
`Object.hasOwnProperty` has arguments to the APIs that originate from an Object. An example is the defunct Code app's
|
||||
https://github.com/elastic/kibana/blob/b49192626a8528af5d888545fb14cd1ce66a72e7/x-pack/legacy/plugins/code/server/lsp/workspace_command.ts#L40-L44[`spawnProcess`]
|
||||
function.
|
||||
** Common Node.js offenders: `child_process.spawn`, `child_process.exec`, `eval`, `Function('some string')`, `vm.runInContext(x)`,
|
||||
`vm.runInNewContext(x)`, `vm.runInThisContext()`
|
||||
** Common client-side offenders: `eval`, `Function('some string')`, `setTimeout('some string', num)`, `setInterval('some string', num)`
|
||||
|
||||
See also:
|
||||
|
||||
* https://portswigger.net/daily-swig/prototype-pollution-the-dangerous-and-underrated-vulnerability-impacting-javascript-applications[Prototype
|
||||
pollution: The dangerous and underrated vulnerability impacting JavaScript applications | portswigger.net]
|
||||
* https://github.com/HoLyVieR/prototype-pollution-nsec18/blob/master/paper/JavaScript_prototype_pollution_attack_in_NodeJS.pdf[Prototype
|
||||
pollution attack in NodeJS application | Olivier Arteau]
|
||||
|
||||
=== Server-Side Request Forgery (SSRF) ===
|
||||
|
||||
https://owasp.org/www-community/attacks/Server_Side_Request_Forgery[_OWASP reference for SSRF_]
|
||||
|
||||
SSRF is a class of attacks where a vulnerable server is forced to make an unintended request, usually to an HTTP API. This is often used as
|
||||
a vector for information disclosure or injection attacks.
|
||||
|
||||
*Best practices*
|
||||
|
||||
* Ensure that all outbound requests from the {kib} server use hard-coded URLs.
|
||||
* If user input is used to construct a URL for an outbound request, ensure that an allow-list is used to validate the endpoints and that
|
||||
user input is escaped properly. Ideally, the allow-list should be set in `kibana.yml`, so only server administrators can change it.
|
||||
** This is particularly relevant when using `transport.request` with the {es} client, as no automatic escaping is performed.
|
||||
** Note that URLs are very hard to validate properly; exact match validation for user input is most preferable, while URL parsing or RegEx
|
||||
validation should only be used if absolutely necessary.
|
||||
|
||||
=== Reverse tabnabbing ===
|
||||
|
||||
https://owasp.org/www-community/attacks/Reverse_Tabnabbing[_OWASP reference for Reverse Tabnabbing_]
|
||||
|
||||
Reverse tabnabbing is an attack where a link to a malicious page is used to rewrite a vulnerable parent page. This is often used as a vector
|
||||
for phishing attacks. {kib} defends against this by using the EUI framework, which automatically adds the `rel` attribute to anchor tags,
|
||||
buttons, and other vulnerable DOM elements.
|
||||
|
||||
*Best practices*
|
||||
|
||||
* Use EUI components to build your UI whenever possible. Otherwise, ensure that any DOM elements that have an `href` attribute also have the
|
||||
`rel="noreferrer noopener"` attribute specified. For more information, refer to the
|
||||
https://github.com/OWASP/CheatSheetSeries/blob/master/cheatsheets/HTML5_Security_Cheat_Sheet.md#tabnabbing[OWASP HTML5 Security Cheat
|
||||
Sheet].
|
||||
* If using a non-EUI markdown renderer, use a custom link renderer for rendered links.
|
||||
|
||||
=== Information disclosure ===
|
||||
|
||||
Information disclosure is not an attack, but it describes whenever sensitive information is accidentally revealed. This can be configuration
|
||||
info, stack traces, or other data that the user is not authorized to access. This concern cannot be addressed with a single security
|
||||
control, but at a high level, {kib} relies on the hapi framework to automatically redact stack traces and detailed error messages in HTTP
|
||||
5xx response payloads.
|
||||
|
||||
*Best practices*
|
||||
|
||||
* Look for instances where sensitive information might accidentally be revealed, particularly in error messages, in the UI, and URL
|
||||
parameters that are exposed to users.
|
||||
* Make sure that sensitive request data is not forwarded to external resources. For example, copying client request headers and using them
|
||||
to make an another request could accidentally expose the user's credentials.
|
||||
|
|
|
@ -261,52 +261,48 @@ The plugin exposes the static DefaultEditorController class to consume.
|
|||
|The markdown visualization that can be used to place text panels on dashboards.
|
||||
|
||||
|
||||
|{kib-repo}blob/{branch}/src/plugins/vis_type_metric/README.md[visTypeMetric]
|
||||
|Contains the metric visualization.
|
||||
|{kib-repo}blob/{branch}/src/plugins/vis_type_metric[visTypeMetric]
|
||||
|WARNING: Missing README.
|
||||
|
||||
|
||||
|{kib-repo}blob/{branch}/src/plugins/vis_type_pie/README.md[visTypePie]
|
||||
|Contains the pie chart implementation using the elastic-charts library. The goal is to eventually deprecate the old implementation and keep only this. Until then, the library used is defined by the Legacy charts library advanced setting.
|
||||
|{kib-repo}blob/{branch}/src/plugins/vis_type_pie[visTypePie]
|
||||
|WARNING: Missing README.
|
||||
|
||||
|
||||
|{kib-repo}blob/{branch}/src/plugins/vis_type_table/README.md[visTypeTable]
|
||||
|Contains the data table visualization, that allows presenting data in a simple table format.
|
||||
|
||||
|
||||
|{kib-repo}blob/{branch}/src/plugins/vis_type_tagcloud/README.md[visTypeTagcloud]
|
||||
|Contains the tagcloud visualization.
|
||||
|{kib-repo}blob/{branch}/src/plugins/vis_type_tagcloud[visTypeTagcloud]
|
||||
|WARNING: Missing README.
|
||||
|
||||
|
||||
|{kib-repo}blob/{branch}/src/plugins/vis_type_timelion/README.md[visTypeTimelion]
|
||||
|Contains the timelion visualization and the timelion backend.
|
||||
|
||||
|
||||
|{kib-repo}blob/{branch}/src/plugins/vis_type_timeseries/README.md[visTypeTimeseries]
|
||||
|Contains everything around TSVB (the editor, visualizatin implementations and backends).
|
||||
|{kib-repo}blob/{branch}/src/plugins/vis_type_timeseries[visTypeTimeseries]
|
||||
|WARNING: Missing README.
|
||||
|
||||
|
||||
|{kib-repo}blob/{branch}/src/plugins/vis_type_vega/README.md[visTypeVega]
|
||||
|Contains the Vega visualization.
|
||||
|{kib-repo}blob/{branch}/src/plugins/vis_type_vega[visTypeVega]
|
||||
|WARNING: Missing README.
|
||||
|
||||
|
||||
|{kib-repo}blob/{branch}/src/plugins/vis_type_vislib/README.md[visTypeVislib]
|
||||
|Contains the vislib visualizations. These are the classical area/line/bar, pie, gauge/goal and
|
||||
heatmap charts.
|
||||
|{kib-repo}blob/{branch}/src/plugins/vis_type_vislib[visTypeVislib]
|
||||
|WARNING: Missing README.
|
||||
|
||||
|
||||
|{kib-repo}blob/{branch}/src/plugins/vis_type_xy/README.md[visTypeXy]
|
||||
|Contains the new xy-axis chart using the elastic-charts library, which will eventually
|
||||
replace the vislib xy-axis charts including bar, area, and line.
|
||||
|{kib-repo}blob/{branch}/src/plugins/vis_type_xy[visTypeXy]
|
||||
|WARNING: Missing README.
|
||||
|
||||
|
||||
|{kib-repo}blob/{branch}/src/plugins/visualizations/README.md[visualizations]
|
||||
|Contains most of the visualization infrastructure, e.g. the visualization type registry or the
|
||||
visualization embeddable.
|
||||
|{kib-repo}blob/{branch}/src/plugins/visualizations[visualizations]
|
||||
|WARNING: Missing README.
|
||||
|
||||
|
||||
|{kib-repo}blob/{branch}/src/plugins/visualize/README.md[visualize]
|
||||
|Contains the visualize application which includes the listing page and the app frame,
|
||||
which will load the visualization's editor.
|
||||
|{kib-repo}blob/{branch}/src/plugins/visualize[visualize]
|
||||
|WARNING: Missing README.
|
||||
|
||||
|
||||
|===
|
||||
|
|
|
@ -16,6 +16,7 @@ Note that when generating absolute urls, the origin (protocol, host and port) ar
|
|||
getUrlForApp(appId: string, options?: {
|
||||
path?: string;
|
||||
absolute?: boolean;
|
||||
deepLinkId?: string;
|
||||
}): string;
|
||||
```
|
||||
|
||||
|
@ -24,7 +25,7 @@ getUrlForApp(appId: string, options?: {
|
|||
| Parameter | Type | Description |
|
||||
| --- | --- | --- |
|
||||
| appId | <code>string</code> | |
|
||||
| options | <code>{</code><br/><code> path?: string;</code><br/><code> absolute?: boolean;</code><br/><code> }</code> | |
|
||||
| options | <code>{</code><br/><code> path?: string;</code><br/><code> absolute?: boolean;</code><br/><code> deepLinkId?: string;</code><br/><code> }</code> | |
|
||||
|
||||
<b>Returns:</b>
|
||||
|
||||
|
|
|
@ -1,8 +1,8 @@
|
|||
<!-- Do not edit this file. It is automatically generated by API Documenter. -->
|
||||
|
||||
[Home](./index.md) > [kibana-plugin-plugins-data-public](./kibana-plugin-plugins-data-public.md) > [Plugin](./kibana-plugin-plugins-data-public.plugin.md) > [(constructor)](./kibana-plugin-plugins-data-public.plugin._constructor_.md)
|
||||
[Home](./index.md) > [kibana-plugin-plugins-data-public](./kibana-plugin-plugins-data-public.md) > [DataPlugin](./kibana-plugin-plugins-data-public.dataplugin.md) > [(constructor)](./kibana-plugin-plugins-data-public.dataplugin._constructor_.md)
|
||||
|
||||
## Plugin.(constructor)
|
||||
## DataPlugin.(constructor)
|
||||
|
||||
Constructs a new instance of the `DataPublicPlugin` class
|
||||
|
|
@ -0,0 +1,26 @@
|
|||
<!-- Do not edit this file. It is automatically generated by API Documenter. -->
|
||||
|
||||
[Home](./index.md) > [kibana-plugin-plugins-data-public](./kibana-plugin-plugins-data-public.md) > [DataPlugin](./kibana-plugin-plugins-data-public.dataplugin.md)
|
||||
|
||||
## DataPlugin class
|
||||
|
||||
<b>Signature:</b>
|
||||
|
||||
```typescript
|
||||
export declare class DataPublicPlugin implements Plugin<DataPublicPluginSetup, DataPublicPluginStart, DataSetupDependencies, DataStartDependencies>
|
||||
```
|
||||
|
||||
## Constructors
|
||||
|
||||
| Constructor | Modifiers | Description |
|
||||
| --- | --- | --- |
|
||||
| [(constructor)(initializerContext)](./kibana-plugin-plugins-data-public.dataplugin._constructor_.md) | | Constructs a new instance of the <code>DataPublicPlugin</code> class |
|
||||
|
||||
## Methods
|
||||
|
||||
| Method | Modifiers | Description |
|
||||
| --- | --- | --- |
|
||||
| [setup(core, { bfetch, expressions, uiActions, usageCollection, inspector })](./kibana-plugin-plugins-data-public.dataplugin.setup.md) | | |
|
||||
| [start(core, { uiActions })](./kibana-plugin-plugins-data-public.dataplugin.start.md) | | |
|
||||
| [stop()](./kibana-plugin-plugins-data-public.dataplugin.stop.md) | | |
|
||||
|
|
@ -1,8 +1,8 @@
|
|||
<!-- Do not edit this file. It is automatically generated by API Documenter. -->
|
||||
|
||||
[Home](./index.md) > [kibana-plugin-plugins-data-public](./kibana-plugin-plugins-data-public.md) > [Plugin](./kibana-plugin-plugins-data-public.plugin.md) > [setup](./kibana-plugin-plugins-data-public.plugin.setup.md)
|
||||
[Home](./index.md) > [kibana-plugin-plugins-data-public](./kibana-plugin-plugins-data-public.md) > [DataPlugin](./kibana-plugin-plugins-data-public.dataplugin.md) > [setup](./kibana-plugin-plugins-data-public.dataplugin.setup.md)
|
||||
|
||||
## Plugin.setup() method
|
||||
## DataPlugin.setup() method
|
||||
|
||||
<b>Signature:</b>
|
||||
|
|
@ -1,8 +1,8 @@
|
|||
<!-- Do not edit this file. It is automatically generated by API Documenter. -->
|
||||
|
||||
[Home](./index.md) > [kibana-plugin-plugins-data-public](./kibana-plugin-plugins-data-public.md) > [Plugin](./kibana-plugin-plugins-data-public.plugin.md) > [start](./kibana-plugin-plugins-data-public.plugin.start.md)
|
||||
[Home](./index.md) > [kibana-plugin-plugins-data-public](./kibana-plugin-plugins-data-public.md) > [DataPlugin](./kibana-plugin-plugins-data-public.dataplugin.md) > [start](./kibana-plugin-plugins-data-public.dataplugin.start.md)
|
||||
|
||||
## Plugin.start() method
|
||||
## DataPlugin.start() method
|
||||
|
||||
<b>Signature:</b>
|
||||
|
|
@ -1,8 +1,8 @@
|
|||
<!-- Do not edit this file. It is automatically generated by API Documenter. -->
|
||||
|
||||
[Home](./index.md) > [kibana-plugin-plugins-data-public](./kibana-plugin-plugins-data-public.md) > [Plugin](./kibana-plugin-plugins-data-public.plugin.md) > [stop](./kibana-plugin-plugins-data-public.plugin.stop.md)
|
||||
[Home](./index.md) > [kibana-plugin-plugins-data-public](./kibana-plugin-plugins-data-public.md) > [DataPlugin](./kibana-plugin-plugins-data-public.dataplugin.md) > [stop](./kibana-plugin-plugins-data-public.dataplugin.stop.md)
|
||||
|
||||
## Plugin.stop() method
|
||||
## DataPlugin.stop() method
|
||||
|
||||
<b>Signature:</b>
|
||||
|
|
@ -10,6 +10,6 @@
|
|||
esKuery: {
|
||||
nodeTypes: import("../common/es_query/kuery/node_types").NodeTypes;
|
||||
fromKueryExpression: (expression: any, parseOptions?: Partial<import("../common").KueryParseOptions>) => import("../common").KueryNode;
|
||||
toElasticsearchQuery: (node: import("../common").KueryNode, indexPattern?: import("../common").IIndexPattern | undefined, config?: Record<string, any> | undefined, context?: Record<string, any> | undefined) => import("../../kibana_utils/common").JsonObject;
|
||||
toElasticsearchQuery: (node: import("../common").KueryNode, indexPattern?: import("../common").IIndexPattern | undefined, config?: Record<string, any> | undefined, context?: Record<string, any> | undefined) => import("@kbn/common-utils").JsonObject;
|
||||
}
|
||||
```
|
||||
|
|
|
@ -11,6 +11,7 @@
|
|||
| [AggConfig](./kibana-plugin-plugins-data-public.aggconfig.md) | |
|
||||
| [AggConfigs](./kibana-plugin-plugins-data-public.aggconfigs.md) | |
|
||||
| [AggParamType](./kibana-plugin-plugins-data-public.aggparamtype.md) | |
|
||||
| [DataPlugin](./kibana-plugin-plugins-data-public.dataplugin.md) | |
|
||||
| [DuplicateIndexPatternError](./kibana-plugin-plugins-data-public.duplicateindexpatternerror.md) | |
|
||||
| [FieldFormat](./kibana-plugin-plugins-data-public.fieldformat.md) | |
|
||||
| [FilterManager](./kibana-plugin-plugins-data-public.filtermanager.md) | |
|
||||
|
@ -19,7 +20,6 @@
|
|||
| [IndexPatternsService](./kibana-plugin-plugins-data-public.indexpatternsservice.md) | |
|
||||
| [OptionedParamType](./kibana-plugin-plugins-data-public.optionedparamtype.md) | |
|
||||
| [PainlessError](./kibana-plugin-plugins-data-public.painlesserror.md) | |
|
||||
| [Plugin](./kibana-plugin-plugins-data-public.plugin.md) | |
|
||||
| [SearchInterceptor](./kibana-plugin-plugins-data-public.searchinterceptor.md) | |
|
||||
| [SearchSource](./kibana-plugin-plugins-data-public.searchsource.md) | \* |
|
||||
| [SearchTimeoutError](./kibana-plugin-plugins-data-public.searchtimeouterror.md) | Request Failure - When an entire multi request fails |
|
||||
|
|
|
@ -10,6 +10,6 @@
|
|||
esKuery: {
|
||||
nodeTypes: import("../common/es_query/kuery/node_types").NodeTypes;
|
||||
fromKueryExpression: (expression: any, parseOptions?: Partial<import("../common").KueryParseOptions>) => import("../common").KueryNode;
|
||||
toElasticsearchQuery: (node: import("../common").KueryNode, indexPattern?: import("../common").IIndexPattern | undefined, config?: Record<string, any> | undefined, context?: Record<string, any> | undefined) => import("../../kibana_utils/common").JsonObject;
|
||||
toElasticsearchQuery: (node: import("../common").KueryNode, indexPattern?: import("../common").IIndexPattern | undefined, config?: Record<string, any> | undefined, context?: Record<string, any> | undefined) => import("@kbn/common-utils").JsonObject;
|
||||
}
|
||||
```
|
||||
|
|
|
@ -9,7 +9,7 @@ Constructs a new instance of the `AddPanelAction` class
|
|||
<b>Signature:</b>
|
||||
|
||||
```typescript
|
||||
constructor(getFactory: EmbeddableStart['getEmbeddableFactory'], getAllFactories: EmbeddableStart['getEmbeddableFactories'], overlays: OverlayStart, notifications: NotificationsStart, SavedObjectFinder: React.ComponentType<any>);
|
||||
constructor(getFactory: EmbeddableStart['getEmbeddableFactory'], getAllFactories: EmbeddableStart['getEmbeddableFactories'], overlays: OverlayStart, notifications: NotificationsStart, SavedObjectFinder: React.ComponentType<any>, reportUiCounter?: ((appName: string, type: import("@kbn/analytics").UiCounterMetricType, eventNames: string | string[], count?: number | undefined) => void) | undefined);
|
||||
```
|
||||
|
||||
## Parameters
|
||||
|
@ -21,4 +21,5 @@ constructor(getFactory: EmbeddableStart['getEmbeddableFactory'], getAllFactories
|
|||
| overlays | <code>OverlayStart</code> | |
|
||||
| notifications | <code>NotificationsStart</code> | |
|
||||
| SavedObjectFinder | <code>React.ComponentType<any></code> | |
|
||||
| reportUiCounter | <code>((appName: string, type: import("@kbn/analytics").UiCounterMetricType, eventNames: string | string[], count?: number | undefined) => void) | undefined</code> | |
|
||||
|
||||
|
|
|
@ -14,7 +14,7 @@ export declare class AddPanelAction implements Action<ActionContext>
|
|||
|
||||
| Constructor | Modifiers | Description |
|
||||
| --- | --- | --- |
|
||||
| [(constructor)(getFactory, getAllFactories, overlays, notifications, SavedObjectFinder)](./kibana-plugin-plugins-embeddable-public.addpanelaction._constructor_.md) | | Constructs a new instance of the <code>AddPanelAction</code> class |
|
||||
| [(constructor)(getFactory, getAllFactories, overlays, notifications, SavedObjectFinder, reportUiCounter)](./kibana-plugin-plugins-embeddable-public.addpanelaction._constructor_.md) | | Constructs a new instance of the <code>AddPanelAction</code> class |
|
||||
|
||||
## Properties
|
||||
|
||||
|
|
|
@ -15,6 +15,7 @@ export declare function openAddPanelFlyout(options: {
|
|||
notifications: NotificationsStart;
|
||||
SavedObjectFinder: React.ComponentType<any>;
|
||||
showCreateNewMenu?: boolean;
|
||||
reportUiCounter?: UsageCollectionStart['reportUiCounter'];
|
||||
}): OverlayRef;
|
||||
```
|
||||
|
||||
|
@ -22,7 +23,7 @@ export declare function openAddPanelFlyout(options: {
|
|||
|
||||
| Parameter | Type | Description |
|
||||
| --- | --- | --- |
|
||||
| options | <code>{</code><br/><code> embeddable: IContainer;</code><br/><code> getFactory: EmbeddableStart['getEmbeddableFactory'];</code><br/><code> getAllFactories: EmbeddableStart['getEmbeddableFactories'];</code><br/><code> overlays: OverlayStart;</code><br/><code> notifications: NotificationsStart;</code><br/><code> SavedObjectFinder: React.ComponentType<any>;</code><br/><code> showCreateNewMenu?: boolean;</code><br/><code>}</code> | |
|
||||
| options | <code>{</code><br/><code> embeddable: IContainer;</code><br/><code> getFactory: EmbeddableStart['getEmbeddableFactory'];</code><br/><code> getAllFactories: EmbeddableStart['getEmbeddableFactories'];</code><br/><code> overlays: OverlayStart;</code><br/><code> notifications: NotificationsStart;</code><br/><code> SavedObjectFinder: React.ComponentType<any>;</code><br/><code> showCreateNewMenu?: boolean;</code><br/><code> reportUiCounter?: UsageCollectionStart['reportUiCounter'];</code><br/><code>}</code> | |
|
||||
|
||||
<b>Returns:</b>
|
||||
|
||||
|
|
|
@ -21,6 +21,7 @@ export interface ExpressionFunctionDefinitions
|
|||
| [derivative](./kibana-plugin-plugins-expressions-public.expressionfunctiondefinitions.derivative.md) | <code>ExpressionFunctionDerivative</code> | |
|
||||
| [font](./kibana-plugin-plugins-expressions-public.expressionfunctiondefinitions.font.md) | <code>ExpressionFunctionFont</code> | |
|
||||
| [moving\_average](./kibana-plugin-plugins-expressions-public.expressionfunctiondefinitions.moving_average.md) | <code>ExpressionFunctionMovingAverage</code> | |
|
||||
| [overall\_metric](./kibana-plugin-plugins-expressions-public.expressionfunctiondefinitions.overall_metric.md) | <code>ExpressionFunctionOverallMetric</code> | |
|
||||
| [theme](./kibana-plugin-plugins-expressions-public.expressionfunctiondefinitions.theme.md) | <code>ExpressionFunctionTheme</code> | |
|
||||
| [var\_set](./kibana-plugin-plugins-expressions-public.expressionfunctiondefinitions.var_set.md) | <code>ExpressionFunctionVarSet</code> | |
|
||||
| [var](./kibana-plugin-plugins-expressions-public.expressionfunctiondefinitions.var.md) | <code>ExpressionFunctionVar</code> | |
|
||||
|
|
|
@ -0,0 +1,11 @@
|
|||
<!-- Do not edit this file. It is automatically generated by API Documenter. -->
|
||||
|
||||
[Home](./index.md) > [kibana-plugin-plugins-expressions-public](./kibana-plugin-plugins-expressions-public.md) > [ExpressionFunctionDefinitions](./kibana-plugin-plugins-expressions-public.expressionfunctiondefinitions.md) > [overall\_metric](./kibana-plugin-plugins-expressions-public.expressionfunctiondefinitions.overall_metric.md)
|
||||
|
||||
## ExpressionFunctionDefinitions.overall\_metric property
|
||||
|
||||
<b>Signature:</b>
|
||||
|
||||
```typescript
|
||||
overall_metric: ExpressionFunctionOverallMetric;
|
||||
```
|
|
@ -21,6 +21,7 @@ export interface ExpressionFunctionDefinitions
|
|||
| [derivative](./kibana-plugin-plugins-expressions-server.expressionfunctiondefinitions.derivative.md) | <code>ExpressionFunctionDerivative</code> | |
|
||||
| [font](./kibana-plugin-plugins-expressions-server.expressionfunctiondefinitions.font.md) | <code>ExpressionFunctionFont</code> | |
|
||||
| [moving\_average](./kibana-plugin-plugins-expressions-server.expressionfunctiondefinitions.moving_average.md) | <code>ExpressionFunctionMovingAverage</code> | |
|
||||
| [overall\_metric](./kibana-plugin-plugins-expressions-server.expressionfunctiondefinitions.overall_metric.md) | <code>ExpressionFunctionOverallMetric</code> | |
|
||||
| [theme](./kibana-plugin-plugins-expressions-server.expressionfunctiondefinitions.theme.md) | <code>ExpressionFunctionTheme</code> | |
|
||||
| [var\_set](./kibana-plugin-plugins-expressions-server.expressionfunctiondefinitions.var_set.md) | <code>ExpressionFunctionVarSet</code> | |
|
||||
| [var](./kibana-plugin-plugins-expressions-server.expressionfunctiondefinitions.var.md) | <code>ExpressionFunctionVar</code> | |
|
||||
|
|
|
@ -0,0 +1,11 @@
|
|||
<!-- Do not edit this file. It is automatically generated by API Documenter. -->
|
||||
|
||||
[Home](./index.md) > [kibana-plugin-plugins-expressions-server](./kibana-plugin-plugins-expressions-server.md) > [ExpressionFunctionDefinitions](./kibana-plugin-plugins-expressions-server.expressionfunctiondefinitions.md) > [overall\_metric](./kibana-plugin-plugins-expressions-server.expressionfunctiondefinitions.overall_metric.md)
|
||||
|
||||
## ExpressionFunctionDefinitions.overall\_metric property
|
||||
|
||||
<b>Signature:</b>
|
||||
|
||||
```typescript
|
||||
overall_metric: ExpressionFunctionOverallMetric;
|
||||
```
|
|
@ -68,3 +68,19 @@ behaves differently:
|
|||
* Relative dates are converted to absolute dates.
|
||||
* Panning and zooming is disabled for maps.
|
||||
* Changing a filter, query, or drilldown starts a new search session, which can be slow.
|
||||
|
||||
[float]
|
||||
==== Limitations
|
||||
|
||||
Certain visualization features do not fully support background search sessions yet. If a dashboard using these features gets restored,
|
||||
all panels using unsupported features won't load immediately, but instead send out additional data requests which can take a while to complete.
|
||||
In this case a warning *Your search session is still running* will be shown.
|
||||
|
||||
You can either wait for these additional requests to complete or come back to the dashboard later when all data requests have been finished.
|
||||
|
||||
A panel on a dashboard can behave like this if one of the following features is used:
|
||||
* *Lens* - A *top values* dimension with an enabled setting *Group other values as "Other"* (configurable in the *Advanced* section of the dimension)
|
||||
* *Lens* - An *intervals* dimension is used
|
||||
* *Aggregation based* visualizations - A *terms* aggregation is used with an enabled setting *Group other values in separate bucket*
|
||||
* *Aggregation based* visualizations - A *histogram* aggregation is used
|
||||
* *Maps* - Layers using joins, blended layers or tracks layers are used
|
||||
|
|
|
@ -28,6 +28,9 @@ Task Manager runs background tasks by polling for work on an interval. You can
|
|||
| `xpack.task_manager.max_workers`
|
||||
| The maximum number of tasks that this Kibana instance will run simultaneously. Defaults to 10.
|
||||
Starting in 8.0, it will not be possible to set the value greater than 100.
|
||||
|
||||
| `xpack.task_manager.monitored_stats_warn_delayed_task_start_in_seconds`
|
||||
| The amount of seconds we allow a task to delay before printing a warning server log. Defaults to 60.
|
||||
|===
|
||||
|
||||
[float]
|
||||
|
|
BIN
docs/siem/images/workflow.png
Normal file
BIN
docs/siem/images/workflow.png
Normal file
Binary file not shown.
After Width: | Height: | Size: 301 KiB |
|
@ -1,102 +1,160 @@
|
|||
[role="xpack"]
|
||||
[[siem-ui]]
|
||||
== Using Elastic Security
|
||||
== Elastic Security Overview
|
||||
|
||||
Elastic Security is a highly interactive workspace designed for security
|
||||
analysts. It provides a clear overview of events and alerts from your
|
||||
environment, and you can use the interactive UI to drill down into areas of
|
||||
interest.
|
||||
Elastic Security combines SIEM threat detection features with endpoint
|
||||
prevention and response capabilities in one solution. These analytical and
|
||||
protection capabilities, leveraged by the speed and extensibility of
|
||||
Elasticsearch, enable analysts to defend their organization from threats before
|
||||
damage and loss occur.
|
||||
|
||||
[float]
|
||||
[[hosts-ui]]
|
||||
=== Hosts
|
||||
Elastic Security provides the following security benefits and capabilities:
|
||||
|
||||
The Hosts page provides key metrics regarding host-related security events, and
|
||||
data tables and histograms that let you interact with the Timeline Event Viewer.
|
||||
You can drill down for deeper insights, and drag and drop items of interest from
|
||||
the Hosts page to Timeline for further investigation.
|
||||
* A detection engine to identify attacks and system misconfigurations
|
||||
* A workspace for event triage and investigations
|
||||
* Interactive visualizations to investigate process relationships
|
||||
* Inbuilt case management with automated actions
|
||||
* Detection of signatureless attacks with prebuilt machine learning anomaly jobs
|
||||
and detection rules
|
||||
|
||||
[discrete]
|
||||
== Elastic Security components and workflow
|
||||
|
||||
The following diagram provides a comprehensive illustration of the Elastic Security workflow.
|
||||
|
||||
[role="screenshot"]
|
||||
image::siem/images/hosts-ui.png[]
|
||||
image::../siem/images/workflow.png[Elastic Security workflow]
|
||||
|
||||
Here's an overview of the flow and its components:
|
||||
|
||||
* Data is shipped from your hosts to {es} via beat modules and the Elastic https://www.elastic.co/endpoint-security/[Endpoint Security agent integration]. This integration provides capabilities such as collecting events, detecting and preventing {security-guide}/detection-engine-overview.html#malware-prevention[malicious activity], and artifact delivery. The {fleet-guide}/fleet-overview.html[{fleet}] app is used to
|
||||
install and manage agents and integrations on your hosts.
|
||||
+
|
||||
The Endpoint Security integration ships the following data sets:
|
||||
+
|
||||
*** *Windows*: Process, network, file, DNS, registry, DLL and driver loads,
|
||||
malware security detections
|
||||
*** *Linux/macOS*: Process, network, file
|
||||
+
|
||||
* https://www.elastic.co/integrations?solution=security[Beat modules]: {beats}
|
||||
are lightweight data shippers. Beat modules provide a way of collecting and
|
||||
parsing specific data sets from common sources, such as cloud and OS events,
|
||||
logs, and metrics. Common security-related modules are listed {security-guide}/ingest-data.html#enable-beat-modules[here].
|
||||
* The {security-app} in {kib} is used to manage the *Detection engine*,
|
||||
*Cases*, and *Timeline*, as well as administer hosts running Endpoint Security:
|
||||
** Detection engine: Automatically searches for suspicious host and network
|
||||
activity via the following:
|
||||
*** {security-guide}/detection-engine-overview.html#detection-engine-overview[Detection rules]: Periodically search the data
|
||||
({es} indices) sent from your hosts for suspicious events. When a suspicious
|
||||
event is discovered, a detection alert is generated. External systems, such as
|
||||
Slack and email, can be used to send notifications when alerts are generated.
|
||||
You can create your own rules and make use of our {security-guide}/prebuilt-rules.html[prebuilt ones].
|
||||
*** {security-guide}/detections-ui-exceptions.html[Exceptions]: Reduce noise and the number of
|
||||
false positives. Exceptions are associated with rules and prevent alerts when
|
||||
an exception's conditions are met. *Value lists* contain source event
|
||||
values that can be used as part of an exception's conditions. When
|
||||
Elastic {endpoint-sec} is installed on your hosts, you can add malware exceptions
|
||||
directly to the endpoint from the Security app.
|
||||
*** {security-guide}/machine-learning.html#included-jobs[{ml-cap} jobs]: Automatic anomaly detection of host and
|
||||
network events. Anomaly scores are provided per host and can be used with
|
||||
detection rules.
|
||||
** {security-guide}/timelines-ui.html[Timeline]: Workspace for investigating alerts and events.
|
||||
Timelines use queries and filters to drill down into events related to
|
||||
a specific incident. Timeline templates are attached to rules and use predefined
|
||||
queries when alerts are investigated. Timelines can be saved and shared with
|
||||
others, as well as attached to Cases.
|
||||
** {security-guide}/cases-overview.html[Cases]: An internal system for opening, tracking, and sharing
|
||||
security issues directly in the Security app. Cases can be integrated with
|
||||
external ticketing systems.
|
||||
** {security-guide}/admin-page-ov.html[Administration]: View and manage hosts running {endpoint-sec}.
|
||||
|
||||
{security-guide}/ingest-data.html[Ingest data to Elastic Security] and {security-guide}/install-endpoint.html[Configure and install the Elastic Endpoint integration] describe how to ship security-related
|
||||
data to {es}.
|
||||
|
||||
|
||||
[float]
|
||||
[[network-ui]]
|
||||
=== Network
|
||||
For more background information, see:
|
||||
|
||||
The Network page displays key network activity metrics in an interactive map,
|
||||
and provides network event tables that enable interaction with Timeline.
|
||||
* https://www.elastic.co/products/elasticsearch[{es}]: A real-time,
|
||||
distributed storage, search, and analytics engine. {es} excels at indexing
|
||||
streams of semi-structured data, such as logs or metrics.
|
||||
* https://www.elastic.co/products/kibana[{kib}]: An open-source analytics and
|
||||
visualization platform designed to work with {es}. You use {kib} to search,
|
||||
view, and interact with data stored in {es} indices. You can easily compile
|
||||
advanced data analysis and visualize your data in a variety of charts, tables,
|
||||
and maps.
|
||||
|
||||
[role="screenshot"]
|
||||
image::siem/images/network-ui.png[]
|
||||
[discrete]
|
||||
=== Compatibility with cold tier nodes
|
||||
|
||||
[float]
|
||||
[[detections-ui]]
|
||||
=== Detections (beta)
|
||||
Cold tier is a {ref}/data-tiers.html[data tier] that holds time series data that is accessed only occasionally. In {stack} version >=7.11.0, {elastic-sec} supports cold tier data for the following {es} indices:
|
||||
|
||||
The Detections feature automatically searches for threats and creates
|
||||
alerts when they are detected. Detection rules define the conditions
|
||||
for when alerts are created. Elastic Security comes with prebuilt rules that
|
||||
search for suspicious activity on your network and hosts. Additionally, you can
|
||||
create your own rules.
|
||||
* Index patterns specified in `securitySolution:defaultIndex`
|
||||
* Index patterns specified in the definitions of detection rules, except for indicator match rules
|
||||
* Index patterns specified in the data sources selector on various {security-app} pages
|
||||
|
||||
See {security-guide}/detection-engine-overview.html[Detections] for information
|
||||
on managing detection rules and alerts.
|
||||
{elastic-sec} does NOT support cold tier data for the following {es} indices:
|
||||
|
||||
[role="screenshot"]
|
||||
image::siem/images/detections-ui.png[]
|
||||
* Index patterns controlled by {elastic-sec}, including signals and list indices
|
||||
* Index patterns specified in indicator match rules
|
||||
|
||||
[float]
|
||||
[[cases-ui]]
|
||||
=== Cases (beta)
|
||||
Using cold tier data for unsupported indices may result in detection rule timeouts and overall performance degradation.
|
||||
|
||||
Cases are used to open and track security issues directly in Elastic Security.
|
||||
Cases list the original reporter and all users who contribute to a case
|
||||
(`participants`). Case comments support Markdown syntax, and allow linking to
|
||||
saved Timelines. Additionally, you can send cases to external systems from
|
||||
within Elastic Security.
|
||||
[discrete]
|
||||
[[self-protection]]
|
||||
==== Elastic Endpoint self-protection
|
||||
|
||||
For information about opening, updating, and closing cases, see
|
||||
{security-guide}/cases-overview.html[Cases] in the Elastic Security Guide.
|
||||
Self-protection means that {elastic-endpoint} has guards against users and attackers that may try to interfere with its functionality. This protection feature is consistently enhanced to prevent attackers who may attempt to use newer, more sophisticated tactics to interfere with the {elastic-endpoint}. Self-protection is enabled by default when {elastic-endpoint} installs on supported platforms, listed below.
|
||||
|
||||
[role="screenshot"]
|
||||
image::siem/images/cases-ui.png[]
|
||||
Self-protection is enabled on the following 64-bit Windows versions:
|
||||
|
||||
[float]
|
||||
[[timelines-ui]]
|
||||
=== Timeline
|
||||
* Windows 8.1
|
||||
* Windows 10
|
||||
* Windows Server 2012 R2
|
||||
* Windows Server 2016
|
||||
* Windows Server 2019
|
||||
|
||||
Timeline is your workspace for threat hunting and alert investigations.
|
||||
And on the following macOS versions:
|
||||
|
||||
[role="screenshot"]
|
||||
image::siem/images/timeline-ui.png[Elastic Security Timeline]
|
||||
* macOS 10.15 (Catalina)
|
||||
* macOS 11 (Big Sur)
|
||||
|
||||
You can drag objects of interest into the Timeline Event Viewer to create
|
||||
exactly the query filter you need. You can drag items from table widgets within
|
||||
Hosts and Network pages, or even from within Timeline itself.
|
||||
NOTE: Other Windows and macOS variants (and all Linux distributions) do not have self-protection.
|
||||
|
||||
A timeline is responsive and persists as you move through Elastic Security
|
||||
collecting data.
|
||||
For {stack} version >= 7.11.0, self-protection defines the following permissions:
|
||||
|
||||
For detailed information about Timeline, see
|
||||
{security-guide}/timelines-ui.html[Investigating events in Timeline].
|
||||
|
||||
[float]
|
||||
[[sample-workflow]]
|
||||
=== Sample workflow
|
||||
|
||||
An analyst notices a suspicious user ID that warrants further investigation, and
|
||||
clicks a URL that links to Elastic Security.
|
||||
|
||||
The analyst uses the tables, histograms, and filtering and search capabilities in
|
||||
Elastic Security to get to the bottom of the alert. The analyst can drag items of
|
||||
interest to Timeline for further analysis.
|
||||
|
||||
Within Timeline, the analyst can investigate further - drilling down,
|
||||
searching, and filtering - and add notes and pin items of interest.
|
||||
|
||||
The analyst can name the timeline, write summary notes, and share it with others
|
||||
if appropriate.
|
||||
* Users -- even Administrator/root -- *cannot* delete {elastic-endpoint} files (located at `c:\Program Files\Elastic\Endpoint` on Windows, and `/Library/Elastic/Endpoint` on macOS).
|
||||
* Users *cannot* terminate the {elastic-endpoint} program or service.
|
||||
* Administrator/root users *can* read the endpoint's files. On Windows, the easiest way to read Endpoint files is to start an Administrator `cmd.exe` prompt. On macOS, an Administrator can use the `sudo` command.
|
||||
* Administrator/root users *can* stop the {elastic-agent}'s service. On Windows, run the `sc stop "Elastic Agent"` command. On macOS, run the `sudo launchctl stop elastic-agent` command.
|
||||
|
||||
|
||||
[discrete]
|
||||
[[siem-integration]]
|
||||
=== Integration with other Elastic products
|
||||
|
||||
You can use {elastic-sec} with other Elastic products and features to help you
|
||||
identify and investigate suspicious activity:
|
||||
|
||||
* https://www.elastic.co/products/stack/machine-learning[{ml-cap}]
|
||||
* https://www.elastic.co/products/stack/alerting[Alerting]
|
||||
* https://www.elastic.co/products/stack/canvas[Canvas]
|
||||
|
||||
[discrete]
|
||||
[[data-sources]]
|
||||
=== APM transaction data sources
|
||||
|
||||
By default, {elastic-sec} monitors {apm-app-ref}/apm-getting-started.html[APM]
|
||||
`apm-*-transaction*` indices. To add additional APM indices, update the
|
||||
index patterns in the `securitySolution:defaultIndex` setting ({kib} -> Stack Management -> Advanced Settings -> `securitySolution:defaultIndex`).
|
||||
|
||||
[discrete]
|
||||
[[ecs-compliant-reqs]]
|
||||
=== ECS compliance data requirements
|
||||
|
||||
The {ecs-ref}[Elastic Common Schema (ECS)] defines a common set of fields to be used for
|
||||
storing event data in Elasticsearch. ECS helps users normalize their event data
|
||||
to better analyze, visualize, and correlate the data represented in their
|
||||
events. {elastic-sec} supports events and indicator index data from any ECS-compliant data source.
|
||||
|
||||
IMPORTANT: {elastic-sec} requires {ecs-ref}[ECS-compliant data]. If you use third-party data collectors to ship data to {es}, the data must be mapped to ECS.
|
||||
{security-guide}/siem-field-reference.html[Elastic Security ECS field reference] lists ECS fields used in {elastic-sec}.
|
||||
|
|
|
@ -1,20 +0,0 @@
|
|||
[role="xpack"]
|
||||
[[domain-specific-rules]]
|
||||
== Domain-specific rules
|
||||
|
||||
For domain-specific rules, refer to the documentation for that app.
|
||||
{kib} supports these rules:
|
||||
|
||||
* {observability-guide}/create-alerts.html[Observability rules]
|
||||
* {security-guide}/prebuilt-rules.html[Security rules]
|
||||
* <<geo-alerting, Maps rules>>
|
||||
* {ml-docs}/ml-configuring-alerts.html[{ml-cap} rules] beta:[]
|
||||
|
||||
[NOTE]
|
||||
==============================================
|
||||
Some rule types are subscription features, while others are free features.
|
||||
For a comparison of the Elastic subscription levels,
|
||||
see {subscriptions}[the subscription page].
|
||||
==============================================
|
||||
|
||||
include::map-rules/geo-rule-types.asciidoc[]
|
|
@ -3,6 +3,5 @@ include::alerting-setup.asciidoc[]
|
|||
include::create-and-manage-rules.asciidoc[]
|
||||
include::defining-rules.asciidoc[]
|
||||
include::rule-management.asciidoc[]
|
||||
include::stack-rules.asciidoc[]
|
||||
include::domain-specific-rules.asciidoc[]
|
||||
include::rule-types.asciidoc[]
|
||||
include::alerting-troubleshooting.asciidoc[]
|
||||
|
|
56
docs/user/alerting/rule-types.asciidoc
Normal file
56
docs/user/alerting/rule-types.asciidoc
Normal file
|
@ -0,0 +1,56 @@
|
|||
[role="xpack"]
|
||||
[[rule-types]]
|
||||
== Rule types
|
||||
|
||||
A rule is a set of <<alerting-concepts-conditions, conditions>>, <<alerting-concepts-scheduling, schedules>>, and <<alerting-concepts-actions, actions>> that enable notifications. {kib} provides two types of rules: rules specific to the Elastic Stack and rules specific to a domain.
|
||||
|
||||
[NOTE]
|
||||
==============================================
|
||||
Some rule types are subscription features, while others are free features.
|
||||
For a comparison of the Elastic subscription levels,
|
||||
see {subscriptions}[the subscription page].
|
||||
==============================================
|
||||
|
||||
[float]
|
||||
[[stack-rules]]
|
||||
=== Stack rules
|
||||
|
||||
<<alert-management, Stack rules>> are built into {kib}. To access the *Stack Rules* feature and create and edit rules, users require the `all` privilege. See <<kibana-feature-privileges, feature privileges>> for more information.
|
||||
|
||||
[cols="2*<"]
|
||||
|===
|
||||
|
||||
| <<rule-type-index-threshold>>
|
||||
| Aggregate field values from documents using {es} queries, compare them to threshold values, and schedule actions to run when the thresholds are met.
|
||||
|
||||
| <<rule-type-es-query>>
|
||||
| Run a user-configured {es} query, compare the number of matches to a configured threshold, and schedule actions to run when the threshold condition is met.
|
||||
|
||||
|===
|
||||
|
||||
[float]
|
||||
[[domain-specific-rules]]
|
||||
=== Domain rules
|
||||
|
||||
Domain rules are registered by *Observability*, *Security*, <<maps, Maps>> and <<xpack-ml, Machine Learning>>.
|
||||
|
||||
[cols="2*<"]
|
||||
|===
|
||||
|
||||
| {observability-guide}/create-alerts.html[Observability rules]
|
||||
| Detect complex conditions in the *Logs*, *Metrics*, and *Uptime* apps.
|
||||
|
||||
| {security-guide}/prebuilt-rules.html[Security rules]
|
||||
| Detect suspicous source events with pre-built or custom rules and create alerts when a rule’s conditions are met.
|
||||
|
||||
| <<geo-alerting, Maps rules>>
|
||||
| Run an {es} query to determine if any documents are currently contained in any boundaries from a specified boundary index and generate alerts when a rule's conditions are met.
|
||||
|
||||
| {ml-docs}/ml-configuring-alerts.html[{ml-cap} rules] beta:[]
|
||||
| Run scheduled checks on an anomaly detection job to detect anomalies with certain conditions. If an anomaly meets the conditions, an alert is created and the associated action is triggered.
|
||||
|
||||
|===
|
||||
|
||||
include::rule-types/index-threshold.asciidoc[]
|
||||
include::rule-types/es-query.asciidoc[]
|
||||
include::rule-types/geo-rule-types.asciidoc[]
|
|
@ -1,16 +1,14 @@
|
|||
[role="xpack"]
|
||||
[[geo-alerting]]
|
||||
=== Geo rule type
|
||||
=== Tracking containment
|
||||
|
||||
Alerting now includes one additional stack rule: <<rule-type-tracking-containment>>.
|
||||
|
||||
As with other stack rules, you need `all` access to the *Stack Rules* feature
|
||||
to be able to create and edit a geo rule.
|
||||
See <<kibana-feature-privileges, feature privileges>> for more information on configuring roles that provide access to this feature.
|
||||
<<maps, Maps>> offers the Tracking containment rule type which runs an {es} query over indices to determine whether any
|
||||
documents are currently contained within any boundaries from the specified boundary index.
|
||||
In the event that an entity is contained within a boundary, an alert may be generated.
|
||||
|
||||
[float]
|
||||
==== Geo alerting requirements
|
||||
To create a *Tracking containment* rule, the following requirements must be present:
|
||||
==== Requirements
|
||||
To create a Tracking containment rule, the following requirements must be present:
|
||||
|
||||
- *Tracks index or index pattern*: An index containing a `geo_point` field, `date` field,
|
||||
and some form of entity identifier. An entity identifier is a `keyword` or `number`
|
||||
|
@ -29,22 +27,12 @@ than the current time minus the amount of the interval. If data older than
|
|||
`now - <current interval>` is ingested, it won't trigger a rule.
|
||||
|
||||
[float]
|
||||
==== Creating a geo rule
|
||||
Click the *Create* button in the <<create-and-manage-rules, rule management UI>>.
|
||||
Complete the <<defining-rules-general-details, general rule details>>.
|
||||
==== Create the rule
|
||||
|
||||
[role="screenshot"]
|
||||
image::user/alerting/images/alert-types-tracking-select.png[Choosing a tracking rule type]
|
||||
Fill in the <<defining-rules-general-details, rule details>>, then select Tracking containment.
|
||||
|
||||
[float]
|
||||
[[rule-type-tracking-containment]]
|
||||
==== Tracking containment
|
||||
The Tracking containment rule type runs an {es} query over indices, determining if any
|
||||
documents are currently contained within any boundaries from the specified boundary index.
|
||||
In the event that an entity is contained within a boundary, an alert may be generated.
|
||||
|
||||
[float]
|
||||
===== Defining the conditions
|
||||
==== Define the conditions
|
||||
Tracking containment rules have 3 clauses that define the condition to detect,
|
||||
as well as 2 Kuery bars used to provide additional filtering context for each of the indices.
|
||||
|
||||
|
@ -61,6 +49,9 @@ Index (Boundary):: This clause requires an *index or index pattern*, a *`geo_sha
|
|||
identifying boundaries, and an optional *Human-readable boundary name* for better alerting
|
||||
messages.
|
||||
|
||||
[float]
|
||||
==== Add action
|
||||
|
||||
Conditions for how a rule is tracked can be specified uniquely for each individual action.
|
||||
A rule can be triggered either when a containment condition is met or when an entity
|
||||
is no longer contained.
|
|
@ -1,27 +0,0 @@
|
|||
[role="xpack"]
|
||||
[[stack-rules]]
|
||||
== Stack rule types
|
||||
|
||||
Kibana provides two types of rules:
|
||||
|
||||
* Stack rules, which are built into {kib}
|
||||
* <<domain-specific-rules, Domain-specific rules>>, which are registered by {kib} apps.
|
||||
|
||||
{kib} provides two stack rules:
|
||||
|
||||
* <<rule-type-index-threshold>>
|
||||
* <<rule-type-es-query>>
|
||||
|
||||
Users require the `all` privilege to access the *Stack Rules* feature and create and edit rules.
|
||||
See <<kibana-feature-privileges, feature privileges>> for more information.
|
||||
|
||||
[NOTE]
|
||||
==============================================
|
||||
Some rule types are subscription features, while others are free features.
|
||||
For a comparison of the Elastic subscription levels,
|
||||
see {subscriptions}[the subscription page].
|
||||
==============================================
|
||||
|
||||
|
||||
include::stack-rules/index-threshold.asciidoc[]
|
||||
include::stack-rules/es-query.asciidoc[]
|
|
@ -23,7 +23,7 @@ This reference can help simplify the comparison if you need a specific feature.
|
|||
|
||||
| Table with summary row
|
||||
^| X
|
||||
|
|
||||
^| X
|
||||
|
|
||||
|
|
||||
|
|
||||
|
@ -65,7 +65,7 @@ This reference can help simplify the comparison if you need a specific feature.
|
|||
|
||||
| Heat map
|
||||
^| X
|
||||
|
|
||||
^| X
|
||||
|
|
||||
|
|
||||
^| X
|
||||
|
@ -190,8 +190,8 @@ For information about {es} metrics aggregations, refer to {ref}/search-aggregati
|
|||
|
||||
| Metrics with filters
|
||||
|
|
||||
^| X
|
||||
|
|
||||
^| X
|
||||
|
|
||||
|
||||
| Average
|
||||
|
@ -333,7 +333,7 @@ build their advanced visualization.
|
|||
|
||||
| Math on aggregated data
|
||||
|
|
||||
|
|
||||
^| X
|
||||
^| X
|
||||
^| X
|
||||
^| X
|
||||
|
@ -352,6 +352,13 @@ build their advanced visualization.
|
|||
^| X
|
||||
^| X
|
||||
|
||||
| Time shifts
|
||||
|
|
||||
^| X
|
||||
^| X
|
||||
^| X
|
||||
^| X
|
||||
|
||||
| Fully custom {es} queries
|
||||
|
|
||||
|
|
||||
|
|
|
@ -30,13 +30,16 @@
|
|||
[[lens-editor]]
|
||||
=== Lens
|
||||
|
||||
*Lens* is the drag and drop editor that creates visualizations of your data.
|
||||
*Lens* is the drag and drop editor that creates visualizations of your data, recommended for most
|
||||
users.
|
||||
|
||||
With *Lens*, you can:
|
||||
|
||||
* Use the automatically generated suggestions to change the visualization type.
|
||||
* Create visualizations with multiple layers and indices.
|
||||
* Change the aggregation and labels to customize the data.
|
||||
* Perform math on aggregations using *Formula*.
|
||||
* Use time shifts to compare data at two times, such as month over month.
|
||||
|
||||
[role="screenshot"]
|
||||
image:dashboard/images/lens_advanced_1_1.png[Lens]
|
||||
|
|
BIN
docs/user/dashboard/images/lens_time_shift.png
Normal file
BIN
docs/user/dashboard/images/lens_time_shift.png
Normal file
Binary file not shown.
After Width: | Height: | Size: 130 KiB |
|
@ -295,6 +295,41 @@ image::images/lens_advanced_5_2.png[Line chart with cumulative sum of orders mad
|
|||
|
||||
. Click *Save and return*.
|
||||
|
||||
[discrete]
|
||||
[[compare-time-ranges]]
|
||||
=== Compare time ranges
|
||||
|
||||
*Lens* allows you to compare the currently selected time range with historical data using the *Time shift* option.
|
||||
|
||||
Time shifts can be used on any metric. The special shift *previous* will show the time window preceding the currently selected one, spanning the same duration.
|
||||
For example, if *Last 7 days* is selected in the time filter, *previous* will show data from 14 days ago to 7 days ago.
|
||||
|
||||
If multiple time shifts are used in a single chart, a multiple of the date histogram interval should be chosen - otherwise data points might not line up in the chart and empty spots can occur.
|
||||
For example, if a daily interval is used, shifting one series by *36h*, and another one by *1d*, is not recommended. In this scenario, either reduce the interval to *12h*, or create two separate charts.
|
||||
|
||||
To compare current sales numbers with sales from a week ago, follow these steps:
|
||||
|
||||
. Open *Lens*.
|
||||
|
||||
. From the *Chart Type* dropdown, select *Line*.
|
||||
|
||||
. From the *Available fields* list, drag and drop *Records* to the visualization builder.
|
||||
|
||||
. Copy the *Count of Records* series by dragging it to the empty drop target of the *Vertical axis* dimension group (*Drop a field or click to add*)
|
||||
|
||||
. Shift the second *Count of Records* series by one week to do a week-over-week comparison
|
||||
|
||||
.. Click the new *Count of Records [1]* dimension
|
||||
|
||||
.. Click *Add advanced options* below the field selector
|
||||
|
||||
.. Click *Time shift*
|
||||
|
||||
.. Click the *1 week* option. You can also define custom shifts by typing amount followed by time unit (like *1w* for a one week shift), then hit enter.
|
||||
|
||||
[role="screenshot"]
|
||||
image::images/lens_time_shift.png[Line chart with week-over-week sales comparison]
|
||||
|
||||
[discrete]
|
||||
[[view-customers-over-time-by-continents]]
|
||||
=== View table of customers by category over time
|
||||
|
|
|
@ -300,7 +300,9 @@ image::images/lens_missing_values_strategy.png[Lens Missing values strategies me
|
|||
[[is-it-possible-to-change-the-scale-of-Y-axis]]
|
||||
===== Is it possible to statically define the scale of the y-axis in a visualization?
|
||||
|
||||
The ability to start the y-axis from another value than 0, or use a logarithmic scale, is unsupported in *Lens*.
|
||||
Yes, you can set the bounds on bar, line and area chart types in Lens, unless using percentage mode. Bar
|
||||
and area charts must have 0 in the bounds. Logarithmic scales are unsupported in *Lens*.
|
||||
To set the y-axis bounds, click the icon representing the axis you want to customize.
|
||||
|
||||
[float]
|
||||
[[is-it-possible-to-have-pagination-for-datatable]]
|
||||
|
@ -313,3 +315,22 @@ Pagination in a data table is unsupported in *Lens*. However, the <<types-of-vis
|
|||
===== Is it possible to have more than one y-axis scale in visualizations?
|
||||
|
||||
*Lens* lets you pick, for each Y dimension, up to two distinct axis: *left* and *right*. Each axis can have a different scale.
|
||||
|
||||
[float]
|
||||
[[why-is-my-value-with-the-right-color-using-value-based-coloring]]
|
||||
===== Why is my value with the incorrect color when using value-based coloring?
|
||||
|
||||
There could be various reasons for a specific value in the table to have a different color than expected.
|
||||
|
||||
Here's a short list of few different aspects to check:
|
||||
* Make sure the value falls within the desired color stop value defined in the panel. Color stop values are "inclusive".
|
||||
|
||||
* Make sure you have the right value precision setup: value formatters could round the numeric values up or down.
|
||||
|
||||
* Make sure the right color continuity option is selected: if the number is below the first color stop value, a continuity of type `Below` or `Above and below range` is required.
|
||||
|
||||
* The default values set by the Value type are based on the current data range displayed on the data table.
|
||||
|
||||
** If a custom `Number` configuration is used, check that the color stop values are covering the current data range.
|
||||
|
||||
** If a `Percent` configuration is used, and the data range changes, the colors displayed are affected.
|
||||
|
|
|
@ -122,8 +122,6 @@ active in case of failure from the currently used instance.
|
|||
Kibana can be configured to connect to multiple Elasticsearch nodes in the same cluster. In situations where a node becomes unavailable,
|
||||
Kibana will transparently connect to an available node and continue operating. Requests to available hosts will be routed in a round robin fashion.
|
||||
|
||||
Currently the Console application is limited to connecting to the first node listed.
|
||||
|
||||
In kibana.yml:
|
||||
[source,js]
|
||||
--------
|
||||
|
|
|
@ -92,6 +92,32 @@ the first time Kibana starts when verbose logging is enabled.
|
|||
Whenever possible, a Reporting error message tries to be as self-explanatory as possible. Here are some error messages you might encounter,
|
||||
along with the solution.
|
||||
|
||||
[float]
|
||||
==== `StatusCodeError: [version_conflict_engine_exception]`
|
||||
If you are running multiple instances of {kib} in a cluster, the instances share the work of executing report jobs to evenly distribute
|
||||
the work load. Each instance searches the reporting index for "pending" jobs that the user has requested. It is possible for
|
||||
multiple instances to find the same job in these searches. Only the instance that successfully updated the job status to
|
||||
"processing" will actually execute the report job. The other instances that unsuccessfully tried to make the same update will log
|
||||
something similar to this:
|
||||
|
||||
[source]
|
||||
--------------------------------------------------------------------------------
|
||||
StatusCodeError: [version_conflict_engine_exception] [...]: version conflict, required seqNo [6124], primary term [1]. current document has seqNo [6125] and primary term [1], with { ... }
|
||||
status: 409,
|
||||
displayName: 'Conflict',
|
||||
path: '/.reporting-...',
|
||||
body: {
|
||||
error: {
|
||||
type: 'version_conflict_engine_exception',
|
||||
reason: '[...]: version conflict, required seqNo [6124], primary term [1]. current document has seqNo [6125] and primary term [1]',
|
||||
},
|
||||
},
|
||||
statusCode: 409
|
||||
}
|
||||
--------------------------------------------------------------------------------
|
||||
|
||||
These messages alone don't indicate a problem. They show normal events that happen in a healthy system.
|
||||
|
||||
[float]
|
||||
==== Max attempts reached
|
||||
There are two primary causes of this error:
|
||||
|
|
|
@ -156,6 +156,7 @@
|
|||
"@kbn/ui-framework": "link:packages/kbn-ui-framework",
|
||||
"@kbn/ui-shared-deps": "link:packages/kbn-ui-shared-deps",
|
||||
"@kbn/utility-types": "link:bazel-bin/packages/kbn-utility-types",
|
||||
"@kbn/common-utils": "link:bazel-bin/packages/kbn-common-utils",
|
||||
"@kbn/utils": "link:bazel-bin/packages/kbn-utils",
|
||||
"@loaders.gl/core": "^2.3.1",
|
||||
"@loaders.gl/json": "^2.3.1",
|
||||
|
@ -215,7 +216,6 @@
|
|||
"cytoscape-dagre": "^2.2.2",
|
||||
"d3": "3.5.17",
|
||||
"d3-array": "1.2.4",
|
||||
"d3-cloud": "1.2.5",
|
||||
"d3-scale": "1.0.7",
|
||||
"d3-shape": "^1.1.0",
|
||||
"d3-time": "^1.1.0",
|
||||
|
@ -671,7 +671,7 @@
|
|||
"callsites": "^3.1.0",
|
||||
"chai": "3.5.0",
|
||||
"chance": "1.0.18",
|
||||
"chromedriver": "^90.0.0",
|
||||
"chromedriver": "^91.0.1",
|
||||
"clean-webpack-plugin": "^3.0.0",
|
||||
"cmd-shim": "^2.1.0",
|
||||
"compression-webpack-plugin": "^4.0.0",
|
||||
|
@ -839,4 +839,4 @@
|
|||
"yargs": "^15.4.1",
|
||||
"zlib": "^1.0.5"
|
||||
}
|
||||
}
|
||||
}
|
|
@ -12,6 +12,7 @@ filegroup(
|
|||
"//packages/kbn-apm-utils:build",
|
||||
"//packages/kbn-babel-code-parser:build",
|
||||
"//packages/kbn-babel-preset:build",
|
||||
"//packages/kbn-common-utils:build",
|
||||
"//packages/kbn-config:build",
|
||||
"//packages/kbn-config-schema:build",
|
||||
"//packages/kbn-crypto:build",
|
||||
|
|
|
@ -14,6 +14,7 @@ export interface SpanOptions {
|
|||
type?: string;
|
||||
subtype?: string;
|
||||
labels?: Record<string, string>;
|
||||
intercept?: boolean;
|
||||
}
|
||||
|
||||
type Span = Exclude<typeof agent.currentSpan, undefined | null>;
|
||||
|
@ -36,23 +37,27 @@ export async function withSpan<T>(
|
|||
): Promise<T> {
|
||||
const options = parseSpanOptions(optionsOrName);
|
||||
|
||||
const { name, type, subtype, labels } = options;
|
||||
const { name, type, subtype, labels, intercept } = options;
|
||||
|
||||
if (!agent.isStarted()) {
|
||||
return cb();
|
||||
}
|
||||
|
||||
let createdSpan: Span | undefined;
|
||||
|
||||
// When a span starts, it's marked as the active span in its context.
|
||||
// When it ends, it's not untracked, which means that if a span
|
||||
// starts directly after this one ends, the newly started span is a
|
||||
// child of this span, even though it should be a sibling.
|
||||
// To mitigate this, we queue a microtask by awaiting a promise.
|
||||
await Promise.resolve();
|
||||
if (!intercept) {
|
||||
await Promise.resolve();
|
||||
|
||||
const span = agent.startSpan(name);
|
||||
createdSpan = agent.startSpan(name) ?? undefined;
|
||||
|
||||
if (!span) {
|
||||
return cb();
|
||||
if (!createdSpan) {
|
||||
return cb();
|
||||
}
|
||||
}
|
||||
|
||||
// If a span is created in the same context as the span that we just
|
||||
|
@ -61,33 +66,51 @@ export async function withSpan<T>(
|
|||
// mitigate this we create a new context.
|
||||
|
||||
return runInNewContext(() => {
|
||||
const promise = cb(createdSpan);
|
||||
|
||||
let span: Span | undefined = createdSpan;
|
||||
|
||||
if (intercept) {
|
||||
span = agent.currentSpan ?? undefined;
|
||||
}
|
||||
|
||||
if (!span) {
|
||||
return promise;
|
||||
}
|
||||
|
||||
const targetedSpan = span;
|
||||
|
||||
if (name) {
|
||||
targetedSpan.name = name;
|
||||
}
|
||||
|
||||
// @ts-ignore
|
||||
if (type) {
|
||||
span.type = type;
|
||||
targetedSpan.type = type;
|
||||
}
|
||||
if (subtype) {
|
||||
span.subtype = subtype;
|
||||
targetedSpan.subtype = subtype;
|
||||
}
|
||||
|
||||
if (labels) {
|
||||
span.addLabels(labels);
|
||||
targetedSpan.addLabels(labels);
|
||||
}
|
||||
|
||||
return cb(span)
|
||||
return promise
|
||||
.then((res) => {
|
||||
if (!span.outcome || span.outcome === 'unknown') {
|
||||
span.outcome = 'success';
|
||||
if (!targetedSpan.outcome || targetedSpan.outcome === 'unknown') {
|
||||
targetedSpan.outcome = 'success';
|
||||
}
|
||||
return res;
|
||||
})
|
||||
.catch((err) => {
|
||||
if (!span.outcome || span.outcome === 'unknown') {
|
||||
span.outcome = 'failure';
|
||||
if (!targetedSpan.outcome || targetedSpan.outcome === 'unknown') {
|
||||
targetedSpan.outcome = 'failure';
|
||||
}
|
||||
throw err;
|
||||
})
|
||||
.finally(() => {
|
||||
span.end();
|
||||
targetedSpan.end();
|
||||
});
|
||||
});
|
||||
}
|
||||
|
|
82
packages/kbn-common-utils/BUILD.bazel
Normal file
82
packages/kbn-common-utils/BUILD.bazel
Normal file
|
@ -0,0 +1,82 @@
|
|||
load("@npm//@bazel/typescript:index.bzl", "ts_config", "ts_project")
|
||||
load("@build_bazel_rules_nodejs//:index.bzl", "js_library", "pkg_npm")
|
||||
|
||||
PKG_BASE_NAME = "kbn-common-utils"
|
||||
PKG_REQUIRE_NAME = "@kbn/common-utils"
|
||||
|
||||
SOURCE_FILES = glob(
|
||||
[
|
||||
"src/**/*.ts",
|
||||
],
|
||||
exclude = ["**/*.test.*"],
|
||||
)
|
||||
|
||||
SRCS = SOURCE_FILES
|
||||
|
||||
filegroup(
|
||||
name = "srcs",
|
||||
srcs = SRCS,
|
||||
)
|
||||
|
||||
NPM_MODULE_EXTRA_FILES = [
|
||||
"package.json",
|
||||
"README.md"
|
||||
]
|
||||
|
||||
SRC_DEPS = [
|
||||
"//packages/kbn-config-schema",
|
||||
"@npm//load-json-file",
|
||||
"@npm//tslib",
|
||||
]
|
||||
|
||||
TYPES_DEPS = [
|
||||
"@npm//@types/jest",
|
||||
"@npm//@types/node",
|
||||
]
|
||||
|
||||
DEPS = SRC_DEPS + TYPES_DEPS
|
||||
|
||||
ts_config(
|
||||
name = "tsconfig",
|
||||
src = "tsconfig.json",
|
||||
deps = [
|
||||
"//:tsconfig.base.json",
|
||||
],
|
||||
)
|
||||
|
||||
ts_project(
|
||||
name = "tsc",
|
||||
args = ['--pretty'],
|
||||
srcs = SRCS,
|
||||
deps = DEPS,
|
||||
declaration = True,
|
||||
declaration_map = True,
|
||||
incremental = True,
|
||||
out_dir = "target",
|
||||
source_map = True,
|
||||
root_dir = "src",
|
||||
tsconfig = ":tsconfig",
|
||||
)
|
||||
|
||||
js_library(
|
||||
name = PKG_BASE_NAME,
|
||||
srcs = NPM_MODULE_EXTRA_FILES,
|
||||
deps = DEPS + [":tsc"],
|
||||
package_name = PKG_REQUIRE_NAME,
|
||||
visibility = ["//visibility:public"],
|
||||
)
|
||||
|
||||
pkg_npm(
|
||||
name = "npm_module",
|
||||
deps = [
|
||||
":%s" % PKG_BASE_NAME,
|
||||
]
|
||||
)
|
||||
|
||||
filegroup(
|
||||
name = "build",
|
||||
srcs = [
|
||||
":npm_module",
|
||||
],
|
||||
visibility = ["//visibility:public"],
|
||||
)
|
3
packages/kbn-common-utils/README.md
Normal file
3
packages/kbn-common-utils/README.md
Normal file
|
@ -0,0 +1,3 @@
|
|||
# @kbn/common-utils
|
||||
|
||||
Shared common (client and server sie) utilities shared across packages and plugins.
|
|
@ -6,12 +6,8 @@
|
|||
* Side Public License, v 1.
|
||||
*/
|
||||
|
||||
import { LoadingStatus, LoadingStatusState } from '../../context_app_state';
|
||||
|
||||
export function createInitialLoadingStatusState(): LoadingStatusState {
|
||||
return {
|
||||
anchor: LoadingStatus.UNINITIALIZED,
|
||||
predecessors: LoadingStatus.UNINITIALIZED,
|
||||
successors: LoadingStatus.UNINITIALIZED,
|
||||
};
|
||||
}
|
||||
module.exports = {
|
||||
preset: '@kbn/test',
|
||||
rootDir: '../..',
|
||||
roots: ['<rootDir>/packages/kbn-common-utils'],
|
||||
};
|
9
packages/kbn-common-utils/package.json
Normal file
9
packages/kbn-common-utils/package.json
Normal file
|
@ -0,0 +1,9 @@
|
|||
{
|
||||
"name": "@kbn/common-utils",
|
||||
"main": "./target/index.js",
|
||||
"browser": "./target/index.js",
|
||||
"types": "./target/index.d.ts",
|
||||
"version": "1.0.0",
|
||||
"license": "SSPL-1.0 OR Elastic License 2.0",
|
||||
"private": true
|
||||
}
|
|
@ -6,5 +6,4 @@
|
|||
* Side Public License, v 1.
|
||||
*/
|
||||
|
||||
export { QueryActionsProvider } from './actions';
|
||||
export { createInitialLoadingStatusState } from './state';
|
||||
export * from './json';
|
|
@ -6,6 +6,4 @@
|
|||
* Side Public License, v 1.
|
||||
*/
|
||||
|
||||
export { getQueryParameterActions } from './actions';
|
||||
export { MAX_CONTEXT_SIZE, MIN_CONTEXT_SIZE, QUERY_PARAMETER_KEYS } from './constants';
|
||||
export { createInitialQueryParametersState } from './state';
|
||||
export { JsonArray, JsonValue, JsonObject } from './typed_json';
|
18
packages/kbn-common-utils/tsconfig.json
Normal file
18
packages/kbn-common-utils/tsconfig.json
Normal file
|
@ -0,0 +1,18 @@
|
|||
{
|
||||
"extends": "../../tsconfig.base.json",
|
||||
"compilerOptions": {
|
||||
"incremental": true,
|
||||
"outDir": "target",
|
||||
"declaration": true,
|
||||
"declarationMap": true,
|
||||
"sourceMap": true,
|
||||
"sourceRoot": "../../../../packages/kbn-common-utils/src",
|
||||
"types": [
|
||||
"jest",
|
||||
"node"
|
||||
]
|
||||
},
|
||||
"include": [
|
||||
"src/**/*"
|
||||
]
|
||||
}
|
|
@ -187,6 +187,19 @@ export const schema = Joi.object()
|
|||
sourceArgs: Joi.array(),
|
||||
serverArgs: Joi.array(),
|
||||
installDir: Joi.string(),
|
||||
/** Options for how FTR should execute and interact with Kibana */
|
||||
runOptions: Joi.object()
|
||||
.keys({
|
||||
/**
|
||||
* Log message to wait for before initiating tests, defaults to waiting for Kibana status to be `available`.
|
||||
* Note that this log message must not be filtered out by the current logging config, for example by the
|
||||
* log level. If needed, you can adjust the logging level via `kbnTestServer.serverArgs`.
|
||||
*/
|
||||
wait: Joi.object()
|
||||
.regex()
|
||||
.default(/Kibana is now available/),
|
||||
})
|
||||
.default(),
|
||||
})
|
||||
.default(),
|
||||
|
||||
|
|
|
@ -28,6 +28,7 @@ function extendNodeOptions(installDir) {
|
|||
|
||||
export async function runKibanaServer({ procs, config, options }) {
|
||||
const { installDir } = options;
|
||||
const runOptions = config.get('kbnTestServer.runOptions');
|
||||
|
||||
await procs.run('kibana', {
|
||||
cmd: getKibanaCmd(installDir),
|
||||
|
@ -38,7 +39,7 @@ export async function runKibanaServer({ procs, config, options }) {
|
|||
...extendNodeOptions(installDir),
|
||||
},
|
||||
cwd: installDir || KIBANA_ROOT,
|
||||
wait: /\[Kibana\]\[http\] http server running/,
|
||||
wait: runOptions.wait,
|
||||
});
|
||||
}
|
||||
|
||||
|
|
|
@ -97,8 +97,6 @@ export async function runTests(options) {
|
|||
try {
|
||||
es = await runElasticsearch({ config, options: opts });
|
||||
await runKibanaServer({ procs, config, options: opts });
|
||||
// workaround until https://github.com/elastic/kibana/issues/89828 is addressed
|
||||
await delay(5000);
|
||||
await runFtr({ configPath, options: opts });
|
||||
} finally {
|
||||
try {
|
||||
|
@ -164,7 +162,3 @@ async function silence(log, milliseconds) {
|
|||
)
|
||||
.toPromise();
|
||||
}
|
||||
|
||||
async function delay(ms) {
|
||||
await new Promise((resolve) => setTimeout(resolve, ms));
|
||||
}
|
||||
|
|
|
@ -8,18 +8,39 @@
|
|||
|
||||
import React, { Component, ComponentType } from 'react';
|
||||
import { MemoryRouter, Route, withRouter } from 'react-router-dom';
|
||||
import * as H from 'history';
|
||||
import { History, LocationDescriptor } from 'history';
|
||||
|
||||
export const WithMemoryRouter = (initialEntries: string[] = ['/'], initialIndex: number = 0) => (
|
||||
WrappedComponent: ComponentType
|
||||
) => (props: any) => (
|
||||
const stringifyPath = (path: LocationDescriptor): string => {
|
||||
if (typeof path === 'string') {
|
||||
return path;
|
||||
}
|
||||
|
||||
return path.pathname || '/';
|
||||
};
|
||||
|
||||
const locationDescriptorToRoutePath = (
|
||||
paths: LocationDescriptor | LocationDescriptor[]
|
||||
): string | string[] => {
|
||||
if (Array.isArray(paths)) {
|
||||
return paths.map((path: LocationDescriptor) => {
|
||||
return stringifyPath(path);
|
||||
});
|
||||
}
|
||||
|
||||
return stringifyPath(paths);
|
||||
};
|
||||
|
||||
export const WithMemoryRouter = (
|
||||
initialEntries: LocationDescriptor[] = ['/'],
|
||||
initialIndex: number = 0
|
||||
) => (WrappedComponent: ComponentType) => (props: any) => (
|
||||
<MemoryRouter initialEntries={initialEntries} initialIndex={initialIndex}>
|
||||
<WrappedComponent {...props} />
|
||||
</MemoryRouter>
|
||||
);
|
||||
|
||||
export const WithRoute = (
|
||||
componentRoutePath: string | string[] = '/',
|
||||
componentRoutePath: LocationDescriptor | LocationDescriptor[] = ['/'],
|
||||
onRouter = (router: any) => {}
|
||||
) => (WrappedComponent: ComponentType) => {
|
||||
// Create a class component that will catch the router
|
||||
|
@ -40,16 +61,16 @@ export const WithRoute = (
|
|||
|
||||
return (props: any) => (
|
||||
<Route
|
||||
path={componentRoutePath}
|
||||
path={locationDescriptorToRoutePath(componentRoutePath)}
|
||||
render={(routerProps) => <CatchRouter {...routerProps} {...props} />}
|
||||
/>
|
||||
);
|
||||
};
|
||||
|
||||
interface Router {
|
||||
history: Partial<H.History>;
|
||||
history: Partial<History>;
|
||||
route: {
|
||||
location: H.Location;
|
||||
location: LocationDescriptor;
|
||||
};
|
||||
}
|
||||
|
||||
|
|
|
@ -8,6 +8,7 @@
|
|||
|
||||
import { Store } from 'redux';
|
||||
import { ReactWrapper } from 'enzyme';
|
||||
import { LocationDescriptor } from 'history';
|
||||
|
||||
export type SetupFunc<T> = (props?: any) => TestBed<T> | Promise<TestBed<T>>;
|
||||
|
||||
|
@ -161,11 +162,11 @@ export interface MemoryRouterConfig {
|
|||
/** Flag to add or not the `MemoryRouter`. If set to `false`, there won't be any router and the component won't be wrapped on a `<Route />`. */
|
||||
wrapComponent?: boolean;
|
||||
/** The React Router **initial entries** setting ([see documentation](https://github.com/ReactTraining/react-router/blob/master/packages/react-router/docs/api/MemoryRouter.md)) */
|
||||
initialEntries?: string[];
|
||||
initialEntries?: LocationDescriptor[];
|
||||
/** The React Router **initial index** setting ([see documentation](https://github.com/ReactTraining/react-router/blob/master/packages/react-router/docs/api/MemoryRouter.md)) */
|
||||
initialIndex?: number;
|
||||
/** The route **path** for the mounted component (defaults to `"/"`) */
|
||||
componentRoutePath?: string | string[];
|
||||
componentRoutePath?: LocationDescriptor | LocationDescriptor[];
|
||||
/** A callBack that will be called with the React Router instance once mounted */
|
||||
onRouter?: (router: any) => void;
|
||||
}
|
||||
|
|
|
@ -0,0 +1,63 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0 and the Server Side Public License, v 1; you may not use this file except
|
||||
* in compliance with, at your election, the Elastic License 2.0 or the Server
|
||||
* Side Public License, v 1.
|
||||
*/
|
||||
|
||||
import { parseArchive } from './parse_archive';
|
||||
|
||||
jest.mock('fs/promises', () => ({
|
||||
readFile: jest.fn(),
|
||||
}));
|
||||
|
||||
const mockReadFile = jest.requireMock('fs/promises').readFile;
|
||||
|
||||
beforeEach(() => {
|
||||
jest.clearAllMocks();
|
||||
});
|
||||
|
||||
it('parses archives with \\n', async () => {
|
||||
mockReadFile.mockResolvedValue(
|
||||
`{
|
||||
"foo": "abc"
|
||||
}\n\n{
|
||||
"foo": "xyz"
|
||||
}`
|
||||
);
|
||||
|
||||
const archive = await parseArchive('mock');
|
||||
expect(archive).toMatchInlineSnapshot(`
|
||||
Array [
|
||||
Object {
|
||||
"foo": "abc",
|
||||
},
|
||||
Object {
|
||||
"foo": "xyz",
|
||||
},
|
||||
]
|
||||
`);
|
||||
});
|
||||
|
||||
it('parses archives with \\r\\n', async () => {
|
||||
mockReadFile.mockResolvedValue(
|
||||
`{
|
||||
"foo": "123"
|
||||
}\r\n\r\n{
|
||||
"foo": "456"
|
||||
}`
|
||||
);
|
||||
|
||||
const archive = await parseArchive('mock');
|
||||
expect(archive).toMatchInlineSnapshot(`
|
||||
Array [
|
||||
Object {
|
||||
"foo": "123",
|
||||
},
|
||||
Object {
|
||||
"foo": "456",
|
||||
},
|
||||
]
|
||||
`);
|
||||
});
|
|
@ -6,19 +6,17 @@
|
|||
* Side Public License, v 1.
|
||||
*/
|
||||
|
||||
export function createInitialQueryParametersState(
|
||||
defaultStepSize: number = 5,
|
||||
tieBreakerField: string = '_doc'
|
||||
) {
|
||||
return {
|
||||
anchorId: null,
|
||||
columns: [],
|
||||
defaultStepSize,
|
||||
filters: [],
|
||||
indexPatternId: null,
|
||||
predecessorCount: 0,
|
||||
successorCount: 0,
|
||||
sort: [],
|
||||
tieBreakerField,
|
||||
};
|
||||
import Fs from 'fs/promises';
|
||||
|
||||
export interface SavedObject {
|
||||
id: string;
|
||||
type: string;
|
||||
[key: string]: unknown;
|
||||
}
|
||||
|
||||
export async function parseArchive(path: string): Promise<SavedObject[]> {
|
||||
return (await Fs.readFile(path, 'utf-8'))
|
||||
.split(/\r?\n\r?\n/)
|
||||
.filter((line) => !!line)
|
||||
.map((line) => JSON.parse(line));
|
||||
}
|
|
@ -16,25 +16,12 @@ import { ToolingLog, isAxiosResponseError, createFailError, REPO_ROOT } from '@k
|
|||
|
||||
import { KbnClientRequester, uriencode, ReqOptions } from './kbn_client_requester';
|
||||
import { KbnClientSavedObjects } from './kbn_client_saved_objects';
|
||||
import { parseArchive } from './import_export/parse_archive';
|
||||
|
||||
interface ImportApiResponse {
|
||||
success: boolean;
|
||||
[key: string]: unknown;
|
||||
}
|
||||
|
||||
interface SavedObject {
|
||||
id: string;
|
||||
type: string;
|
||||
[key: string]: unknown;
|
||||
}
|
||||
|
||||
async function parseArchive(path: string): Promise<SavedObject[]> {
|
||||
return (await Fs.readFile(path, 'utf-8'))
|
||||
.split('\n\n')
|
||||
.filter((line) => !!line)
|
||||
.map((line) => JSON.parse(line));
|
||||
}
|
||||
|
||||
export class KbnClientImportExport {
|
||||
constructor(
|
||||
public readonly log: ToolingLog,
|
||||
|
|
|
@ -7,12 +7,11 @@
|
|||
*/
|
||||
|
||||
const { get } = require('lodash');
|
||||
const memoizeOne = require('memoize-one');
|
||||
// eslint-disable-next-line import/no-unresolved
|
||||
const { parse: parseFn } = require('../grammar');
|
||||
const { functions: includedFunctions } = require('./functions');
|
||||
|
||||
module.exports = { parse, evaluate, interpret };
|
||||
|
||||
function parse(input, options) {
|
||||
if (input == null) {
|
||||
throw new Error('Missing expression');
|
||||
|
@ -29,9 +28,11 @@ function parse(input, options) {
|
|||
}
|
||||
}
|
||||
|
||||
const memoizedParse = memoizeOne(parse);
|
||||
|
||||
function evaluate(expression, scope = {}, injectedFunctions = {}) {
|
||||
scope = scope || {};
|
||||
return interpret(parse(expression), scope, injectedFunctions);
|
||||
return interpret(memoizedParse(expression), scope, injectedFunctions);
|
||||
}
|
||||
|
||||
function interpret(node, scope, injectedFunctions) {
|
||||
|
@ -79,3 +80,5 @@ function isOperable(args) {
|
|||
return typeof arg === 'number' && !isNaN(arg);
|
||||
});
|
||||
}
|
||||
|
||||
module.exports = { parse: memoizedParse, evaluate, interpret };
|
||||
|
|
|
@ -39,7 +39,7 @@
|
|||
packageNames: ['@elastic/charts'],
|
||||
reviewers: ['markov00', 'nickofthyme'],
|
||||
matchBaseBranches: ['master'],
|
||||
labels: ['release_note:skip', 'v8.0.0', 'v7.14.0'],
|
||||
labels: ['release_note:skip', 'v8.0.0', 'v7.14.0', 'auto-backport'],
|
||||
enabled: true,
|
||||
},
|
||||
{
|
||||
|
|
|
@ -7,8 +7,6 @@
|
|||
A new Kibana plugin exposing an API on both public and server side, to allow consumers to search for various objects and
|
||||
register result providers.
|
||||
|
||||
Note: whether this will be an oss or xpack plugin still depends on https://github.com/elastic/dev/issues/1404.
|
||||
|
||||
# Basic example
|
||||
|
||||
- registering a result provider:
|
||||
|
@ -43,8 +41,7 @@ Kibana should do its best to assist users searching for and navigating to the va
|
|||
|
||||
We should expose an API to make it possible for plugins to search for the various objects present on a Kibana instance.
|
||||
|
||||
The first consumer of this API will be the global search bar [#57576](https://github.com/elastic/kibana/issues/57576). This API
|
||||
should still be generic to answer similar needs from any other consumer, either client or server side.
|
||||
The first consumer of this API will be the global search bar [#57576](https://github.com/elastic/kibana/issues/57576). This API should still be generic to answer similar needs from any other consumer, either client or server side.
|
||||
|
||||
# Detailed design
|
||||
|
||||
|
@ -84,7 +81,7 @@ interface GlobalSearchProviderFindOptions {
|
|||
aborted$: Observable<void>;
|
||||
/**
|
||||
* The total maximum number of results (including all batches / emissions) that should be returned by the provider for a given `find` request.
|
||||
* Any result emitted exceeding this quota will be ignored by the service and not emitted to the consumer.
|
||||
* Any result emitted exceeding this quota will be ignored by the service and not emitted to the consumer.
|
||||
*/
|
||||
maxResults: number;
|
||||
}
|
||||
|
@ -462,8 +459,8 @@ search(
|
|||
|
||||
Notes:
|
||||
|
||||
- The example implementation is not streaming results from the server, meaning that all results from server-side
|
||||
registered providers will all be fetched and emitted in a single batch. Ideally, we would leverage the `bfetch` plugin
|
||||
- The example implementation is not streaming results from the server, meaning that all results from server-side
|
||||
registered providers will all be fetched and emitted in a single batch. Ideally, we would leverage the `bfetch` plugin
|
||||
to stream the results to the client instead.
|
||||
|
||||
### results sorting
|
||||
|
|
|
@ -497,6 +497,56 @@ describe('#start()', () => {
|
|||
expect(getUrlForApp('app1', { path: 'deep/link///' })).toBe('/base-path/app/app1/deep/link');
|
||||
});
|
||||
|
||||
describe('deepLinkId option', () => {
|
||||
it('ignores the deepLinkId parameter if it is unknown', async () => {
|
||||
service.setup(setupDeps);
|
||||
|
||||
service.setup(setupDeps);
|
||||
const { getUrlForApp } = await service.start(startDeps);
|
||||
|
||||
expect(getUrlForApp('app1', { deepLinkId: 'unkown-deep-link' })).toBe(
|
||||
'/base-path/app/app1'
|
||||
);
|
||||
});
|
||||
|
||||
it('creates URLs with deepLinkId parameter', async () => {
|
||||
const { register } = service.setup(setupDeps);
|
||||
|
||||
register(
|
||||
Symbol(),
|
||||
createApp({
|
||||
id: 'app1',
|
||||
appRoute: '/custom/app-path',
|
||||
deepLinks: [{ id: 'dl1', title: 'deep link 1', path: '/deep-link' }],
|
||||
})
|
||||
);
|
||||
|
||||
const { getUrlForApp } = await service.start(startDeps);
|
||||
|
||||
expect(getUrlForApp('app1', { deepLinkId: 'dl1' })).toBe(
|
||||
'/base-path/custom/app-path/deep-link'
|
||||
);
|
||||
});
|
||||
|
||||
it('creates URLs with deepLinkId and path parameters', async () => {
|
||||
const { register } = service.setup(setupDeps);
|
||||
|
||||
register(
|
||||
Symbol(),
|
||||
createApp({
|
||||
id: 'app1',
|
||||
appRoute: '/custom/app-path',
|
||||
deepLinks: [{ id: 'dl1', title: 'deep link 1', path: '/deep-link' }],
|
||||
})
|
||||
);
|
||||
|
||||
const { getUrlForApp } = await service.start(startDeps);
|
||||
expect(getUrlForApp('app1', { deepLinkId: 'dl1', path: 'foo/bar' })).toBe(
|
||||
'/base-path/custom/app-path/deep-link/foo/bar'
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
it('does not append trailing slash if hash is provided in path parameter', async () => {
|
||||
service.setup(setupDeps);
|
||||
const { getUrlForApp } = await service.start(startDeps);
|
||||
|
|
|
@ -282,8 +282,19 @@ export class ApplicationService {
|
|||
history: this.history!,
|
||||
getUrlForApp: (
|
||||
appId,
|
||||
{ path, absolute = false }: { path?: string; absolute?: boolean } = {}
|
||||
{
|
||||
path,
|
||||
absolute = false,
|
||||
deepLinkId,
|
||||
}: { path?: string; absolute?: boolean; deepLinkId?: string } = {}
|
||||
) => {
|
||||
if (deepLinkId) {
|
||||
const deepLinkPath = getAppDeepLinkPath(availableMounters, appId, deepLinkId);
|
||||
if (deepLinkPath) {
|
||||
path = appendAppPath(deepLinkPath, path);
|
||||
}
|
||||
}
|
||||
|
||||
const relUrl = http.basePath.prepend(getAppUrl(availableMounters, appId, path));
|
||||
return absolute ? relativeToAbsolute(relUrl) : relUrl;
|
||||
},
|
||||
|
|
|
@ -780,7 +780,10 @@ export interface ApplicationStart {
|
|||
* @param options.path - optional path inside application to deep link to
|
||||
* @param options.absolute - if true, will returns an absolute url instead of a relative one
|
||||
*/
|
||||
getUrlForApp(appId: string, options?: { path?: string; absolute?: boolean }): string;
|
||||
getUrlForApp(
|
||||
appId: string,
|
||||
options?: { path?: string; absolute?: boolean; deepLinkId?: string }
|
||||
): string;
|
||||
|
||||
/**
|
||||
* An observable that emits the current application id and each subsequent id update.
|
||||
|
|
|
@ -142,7 +142,7 @@ export class DocLinksService {
|
|||
dataStreams: `${ELASTICSEARCH_DOCS}data-streams.html`,
|
||||
indexModules: `${ELASTICSEARCH_DOCS}index-modules.html`,
|
||||
indexSettings: `${ELASTICSEARCH_DOCS}index-modules.html#index-modules-settings`,
|
||||
indexTemplates: `${ELASTICSEARCH_DOCS}indices-templates.html`,
|
||||
indexTemplates: `${ELASTICSEARCH_DOCS}index-templates.html`,
|
||||
mapping: `${ELASTICSEARCH_DOCS}mapping.html`,
|
||||
mappingAnalyzer: `${ELASTICSEARCH_DOCS}analyzer.html`,
|
||||
mappingCoerce: `${ELASTICSEARCH_DOCS}coerce.html`,
|
||||
|
|
|
@ -150,6 +150,7 @@ export interface ApplicationStart {
|
|||
getUrlForApp(appId: string, options?: {
|
||||
path?: string;
|
||||
absolute?: boolean;
|
||||
deepLinkId?: string;
|
||||
}): string;
|
||||
navigateToApp(appId: string, options?: NavigateToAppOptions): Promise<void>;
|
||||
navigateToUrl(url: string): Promise<void>;
|
||||
|
|
Binary file not shown.
Binary file not shown.
|
@ -21,13 +21,37 @@ import { Root } from '../../../root';
|
|||
|
||||
const kibanaVersion = Env.createDefault(REPO_ROOT, getEnvOptions()).packageInfo.version;
|
||||
|
||||
const logFilePath = Path.join(__dirname, 'migration_test_kibana.log');
|
||||
const logFilePath = Path.join(__dirname, 'migration_test_kibana_from_v1.log');
|
||||
|
||||
const asyncUnlink = Util.promisify(Fs.unlink);
|
||||
async function removeLogFile() {
|
||||
// ignore errors if it doesn't exist
|
||||
await asyncUnlink(logFilePath).catch(() => void 0);
|
||||
}
|
||||
const assertMigratedDocuments = (arr: any[], target: any[]) => target.every((v) => arr.includes(v));
|
||||
|
||||
function sortByTypeAndId(a: { type: string; id: string }, b: { type: string; id: string }) {
|
||||
return a.type.localeCompare(b.type) || a.id.localeCompare(b.id);
|
||||
}
|
||||
|
||||
async function fetchDocuments(esClient: ElasticsearchClient, index: string) {
|
||||
const { body } = await esClient.search<any>({
|
||||
index,
|
||||
body: {
|
||||
query: {
|
||||
match_all: {},
|
||||
},
|
||||
_source: ['type', 'id'],
|
||||
},
|
||||
});
|
||||
|
||||
return body.hits.hits
|
||||
.map((h) => ({
|
||||
...h._source,
|
||||
id: h._id,
|
||||
}))
|
||||
.sort(sortByTypeAndId);
|
||||
}
|
||||
|
||||
describe('migration v2', () => {
|
||||
let esServer: kbnTestServer.TestElasticsearchUtils;
|
||||
|
@ -40,7 +64,7 @@ describe('migration v2', () => {
|
|||
adjustTimeout: (t: number) => jest.setTimeout(t),
|
||||
settings: {
|
||||
es: {
|
||||
license: 'trial',
|
||||
license: 'basic',
|
||||
dataArchive,
|
||||
},
|
||||
},
|
||||
|
@ -51,8 +75,8 @@ describe('migration v2', () => {
|
|||
migrations: {
|
||||
skip: false,
|
||||
enableV2: true,
|
||||
// There are 53 docs in fixtures. Batch size configured to enforce 3 migration steps.
|
||||
batchSize: 20,
|
||||
// There are 40 docs in fixtures. Batch size configured to enforce 3 migration steps.
|
||||
batchSize: 15,
|
||||
},
|
||||
logging: {
|
||||
appenders: {
|
||||
|
@ -85,8 +109,7 @@ describe('migration v2', () => {
|
|||
coreStart = start;
|
||||
esClient = coreStart.elasticsearch.client.asInternalUser;
|
||||
});
|
||||
|
||||
await Promise.all([startEsPromise, startKibanaPromise]);
|
||||
return await Promise.all([startEsPromise, startKibanaPromise]);
|
||||
};
|
||||
|
||||
const getExpectedVersionPerType = () =>
|
||||
|
@ -192,15 +215,19 @@ describe('migration v2', () => {
|
|||
});
|
||||
});
|
||||
|
||||
// FLAKY: https://github.com/elastic/kibana/issues/91107
|
||||
describe.skip('migrating from the same Kibana version', () => {
|
||||
describe('migrating from the same Kibana version that used v1 migrations', () => {
|
||||
const originalIndex = `.kibana_1`; // v1 migrations index
|
||||
const migratedIndex = `.kibana_${kibanaVersion}_001`;
|
||||
|
||||
beforeAll(async () => {
|
||||
await removeLogFile();
|
||||
await startServers({
|
||||
oss: true,
|
||||
dataArchive: Path.join(__dirname, 'archives', '8.0.0_oss_sample_saved_objects.zip'),
|
||||
oss: false,
|
||||
dataArchive: Path.join(
|
||||
__dirname,
|
||||
'archives',
|
||||
'8.0.0_v1_migrations_sample_data_saved_objects.zip'
|
||||
),
|
||||
});
|
||||
});
|
||||
|
||||
|
@ -215,7 +242,6 @@ describe('migration v2', () => {
|
|||
},
|
||||
{ ignore: [404] }
|
||||
);
|
||||
|
||||
const response = body[migratedIndex];
|
||||
|
||||
expect(response).toBeDefined();
|
||||
|
@ -225,17 +251,23 @@ describe('migration v2', () => {
|
|||
]);
|
||||
});
|
||||
|
||||
it('copies all the document of the previous index to the new one', async () => {
|
||||
it('copies the documents from the previous index to the new one', async () => {
|
||||
// original assertion on document count comparison (how atteched are we to this assertion?)
|
||||
const migratedIndexResponse = await esClient.count({
|
||||
index: migratedIndex,
|
||||
});
|
||||
const oldIndexResponse = await esClient.count({
|
||||
index: '.kibana_1',
|
||||
index: originalIndex,
|
||||
});
|
||||
|
||||
// Use a >= comparison since once Kibana has started it might create new
|
||||
// documents like telemetry tasks
|
||||
expect(migratedIndexResponse.body.count).toBeGreaterThanOrEqual(oldIndexResponse.body.count);
|
||||
|
||||
// new assertion against a document array comparison
|
||||
const originalDocs = await fetchDocuments(esClient, originalIndex);
|
||||
const migratedDocs = await fetchDocuments(esClient, migratedIndex);
|
||||
expect(assertMigratedDocuments(migratedDocs, originalDocs));
|
||||
});
|
||||
|
||||
it('migrates the documents to the highest version', async () => {
|
||||
|
|
|
@ -261,6 +261,7 @@ export class Server {
|
|||
|
||||
await this.plugins.start(this.coreStart);
|
||||
|
||||
this.status.start();
|
||||
await this.http.start();
|
||||
|
||||
startTransaction?.end();
|
||||
|
|
103
src/core/server/status/log_overall_status.test.ts
Normal file
103
src/core/server/status/log_overall_status.test.ts
Normal file
|
@ -0,0 +1,103 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0 and the Server Side Public License, v 1; you may not use this file except
|
||||
* in compliance with, at your election, the Elastic License 2.0 or the Server
|
||||
* Side Public License, v 1.
|
||||
*/
|
||||
|
||||
import { TestScheduler } from 'rxjs/testing';
|
||||
import { ServiceStatus, ServiceStatusLevels } from './types';
|
||||
import { getOverallStatusChanges } from './log_overall_status';
|
||||
|
||||
const getTestScheduler = () =>
|
||||
new TestScheduler((actual, expected) => {
|
||||
expect(actual).toEqual(expected);
|
||||
});
|
||||
|
||||
const createStatus = (parts: Partial<ServiceStatus> = {}): ServiceStatus => ({
|
||||
level: ServiceStatusLevels.available,
|
||||
summary: 'summary',
|
||||
...parts,
|
||||
});
|
||||
|
||||
describe('getOverallStatusChanges', () => {
|
||||
it('emits an initial message after first overall$ emission', () => {
|
||||
getTestScheduler().run(({ expectObservable, hot }) => {
|
||||
const overall$ = hot<ServiceStatus>('--a', {
|
||||
a: createStatus(),
|
||||
});
|
||||
const stop$ = hot<void>('');
|
||||
const expected = '--a';
|
||||
|
||||
expectObservable(getOverallStatusChanges(overall$, stop$)).toBe(expected, {
|
||||
a: 'Kibana is now available',
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
it('emits a new message every time the status level changes', () => {
|
||||
getTestScheduler().run(({ expectObservable, hot }) => {
|
||||
const overall$ = hot<ServiceStatus>('--a--b', {
|
||||
a: createStatus({
|
||||
level: ServiceStatusLevels.degraded,
|
||||
}),
|
||||
b: createStatus({
|
||||
level: ServiceStatusLevels.available,
|
||||
}),
|
||||
});
|
||||
const stop$ = hot<void>('');
|
||||
const expected = '--a--b';
|
||||
|
||||
expectObservable(getOverallStatusChanges(overall$, stop$)).toBe(expected, {
|
||||
a: 'Kibana is now degraded',
|
||||
b: 'Kibana is now available (was degraded)',
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
it('does not emit when the status stays the same', () => {
|
||||
getTestScheduler().run(({ expectObservable, hot }) => {
|
||||
const overall$ = hot<ServiceStatus>('--a--b--c', {
|
||||
a: createStatus({
|
||||
level: ServiceStatusLevels.degraded,
|
||||
summary: 'summary 1',
|
||||
}),
|
||||
b: createStatus({
|
||||
level: ServiceStatusLevels.degraded,
|
||||
summary: 'summary 2',
|
||||
}),
|
||||
c: createStatus({
|
||||
level: ServiceStatusLevels.available,
|
||||
summary: 'summary 2',
|
||||
}),
|
||||
});
|
||||
const stop$ = hot<void>('');
|
||||
const expected = '--a-----b';
|
||||
|
||||
expectObservable(getOverallStatusChanges(overall$, stop$)).toBe(expected, {
|
||||
a: 'Kibana is now degraded',
|
||||
b: 'Kibana is now available (was degraded)',
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
it('stops emitting once `stop$` emits', () => {
|
||||
getTestScheduler().run(({ expectObservable, hot }) => {
|
||||
const overall$ = hot<ServiceStatus>('--a--b', {
|
||||
a: createStatus({
|
||||
level: ServiceStatusLevels.degraded,
|
||||
}),
|
||||
b: createStatus({
|
||||
level: ServiceStatusLevels.available,
|
||||
}),
|
||||
});
|
||||
const stop$ = hot<void>('----(s|)');
|
||||
const expected = '--a-|';
|
||||
|
||||
expectObservable(getOverallStatusChanges(overall$, stop$)).toBe(expected, {
|
||||
a: 'Kibana is now degraded',
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
31
src/core/server/status/log_overall_status.ts
Normal file
31
src/core/server/status/log_overall_status.ts
Normal file
|
@ -0,0 +1,31 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0 and the Server Side Public License, v 1; you may not use this file except
|
||||
* in compliance with, at your election, the Elastic License 2.0 or the Server
|
||||
* Side Public License, v 1.
|
||||
*/
|
||||
|
||||
import { Observable } from 'rxjs';
|
||||
import { distinctUntilChanged, pairwise, startWith, takeUntil, map } from 'rxjs/operators';
|
||||
import { ServiceStatus } from './types';
|
||||
|
||||
export const getOverallStatusChanges = (
|
||||
overall$: Observable<ServiceStatus>,
|
||||
stop$: Observable<void>
|
||||
) => {
|
||||
return overall$.pipe(
|
||||
takeUntil(stop$),
|
||||
distinctUntilChanged((previous, next) => {
|
||||
return previous.level.toString() === next.level.toString();
|
||||
}),
|
||||
startWith(undefined),
|
||||
pairwise(),
|
||||
map(([oldStatus, newStatus]) => {
|
||||
if (oldStatus) {
|
||||
return `Kibana is now ${newStatus!.level.toString()} (was ${oldStatus!.level.toString()})`;
|
||||
}
|
||||
return `Kibana is now ${newStatus!.level.toString()}`;
|
||||
})
|
||||
);
|
||||
};
|
|
@ -301,9 +301,9 @@ describe('PluginStatusService', () => {
|
|||
pluginA$.next(available);
|
||||
pluginA$.next(degraded);
|
||||
// Waiting for the debounce timeout should cut a new update
|
||||
await delay(500);
|
||||
await delay(25);
|
||||
pluginA$.next(available);
|
||||
await delay(500);
|
||||
await delay(25);
|
||||
subscription.unsubscribe();
|
||||
|
||||
expect(statusUpdates).toMatchInlineSnapshot(`
|
||||
|
|
|
@ -51,7 +51,7 @@ export class PluginsStatusService {
|
|||
|
||||
return this.getPluginStatuses$(dependencies).pipe(
|
||||
// Prevent many emissions at once from dependency status resolution from making this too noisy
|
||||
debounceTime(500)
|
||||
debounceTime(25)
|
||||
);
|
||||
}
|
||||
|
||||
|
|
|
@ -6,7 +6,7 @@
|
|||
* Side Public License, v 1.
|
||||
*/
|
||||
|
||||
import { Observable, combineLatest, Subscription } from 'rxjs';
|
||||
import { Observable, combineLatest, Subscription, Subject } from 'rxjs';
|
||||
import { map, distinctUntilChanged, shareReplay, take, debounceTime } from 'rxjs/operators';
|
||||
import { isDeepStrictEqual } from 'util';
|
||||
|
||||
|
@ -25,6 +25,7 @@ import { config, StatusConfigType } from './status_config';
|
|||
import { ServiceStatus, CoreStatus, InternalStatusServiceSetup } from './types';
|
||||
import { getSummaryStatus } from './get_summary_status';
|
||||
import { PluginsStatusService } from './plugins_status';
|
||||
import { getOverallStatusChanges } from './log_overall_status';
|
||||
|
||||
interface StatusLogMeta extends LogMeta {
|
||||
kibana: { status: ServiceStatus };
|
||||
|
@ -42,7 +43,9 @@ interface SetupDeps {
|
|||
export class StatusService implements CoreService<InternalStatusServiceSetup> {
|
||||
private readonly logger: Logger;
|
||||
private readonly config$: Observable<StatusConfigType>;
|
||||
private readonly stop$ = new Subject<void>();
|
||||
|
||||
private overall$?: Observable<ServiceStatus>;
|
||||
private pluginsStatus?: PluginsStatusService;
|
||||
private overallSubscription?: Subscription;
|
||||
|
||||
|
@ -63,10 +66,7 @@ export class StatusService implements CoreService<InternalStatusServiceSetup> {
|
|||
const core$ = this.setupCoreStatus({ elasticsearch, savedObjects });
|
||||
this.pluginsStatus = new PluginsStatusService({ core$, pluginDependencies });
|
||||
|
||||
const overall$: Observable<ServiceStatus> = combineLatest([
|
||||
core$,
|
||||
this.pluginsStatus.getAll$(),
|
||||
]).pipe(
|
||||
this.overall$ = combineLatest([core$, this.pluginsStatus.getAll$()]).pipe(
|
||||
// Prevent many emissions at once from dependency status resolution from making this too noisy
|
||||
debounceTime(500),
|
||||
map(([coreStatus, pluginsStatus]) => {
|
||||
|
@ -86,7 +86,7 @@ export class StatusService implements CoreService<InternalStatusServiceSetup> {
|
|||
);
|
||||
|
||||
// Create an unused subscription to ensure all underlying lazy observables are started.
|
||||
this.overallSubscription = overall$.subscribe();
|
||||
this.overallSubscription = this.overall$.subscribe();
|
||||
|
||||
const commonRouteDeps = {
|
||||
config: {
|
||||
|
@ -97,7 +97,7 @@ export class StatusService implements CoreService<InternalStatusServiceSetup> {
|
|||
},
|
||||
metrics,
|
||||
status: {
|
||||
overall$,
|
||||
overall$: this.overall$,
|
||||
plugins$: this.pluginsStatus.getAll$(),
|
||||
core$,
|
||||
},
|
||||
|
@ -124,7 +124,7 @@ export class StatusService implements CoreService<InternalStatusServiceSetup> {
|
|||
|
||||
return {
|
||||
core$,
|
||||
overall$,
|
||||
overall$: this.overall$,
|
||||
plugins: {
|
||||
set: this.pluginsStatus.set.bind(this.pluginsStatus),
|
||||
getDependenciesStatus$: this.pluginsStatus.getDependenciesStatus$.bind(this.pluginsStatus),
|
||||
|
@ -134,9 +134,19 @@ export class StatusService implements CoreService<InternalStatusServiceSetup> {
|
|||
};
|
||||
}
|
||||
|
||||
public start() {}
|
||||
public start() {
|
||||
if (!this.overall$) {
|
||||
throw new Error('cannot call `start` before `setup`');
|
||||
}
|
||||
getOverallStatusChanges(this.overall$, this.stop$).subscribe((message) => {
|
||||
this.logger.info(message);
|
||||
});
|
||||
}
|
||||
|
||||
public stop() {
|
||||
this.stop$.next();
|
||||
this.stop$.complete();
|
||||
|
||||
if (this.overallSubscription) {
|
||||
this.overallSubscription.unsubscribe();
|
||||
this.overallSubscription = undefined;
|
||||
|
|
|
@ -42,7 +42,7 @@ export async function bundleDockerFiles(config: Config, log: ToolingLog, scope:
|
|||
await copyAll(resolve(scope.dockerBuildDir), resolve(dockerFilesBuildDir), {
|
||||
select: ['LICENSE'],
|
||||
});
|
||||
const templates = ['hardening_manifest.yml', 'README.md'];
|
||||
const templates = ['hardening_manifest.yaml', 'README.md'];
|
||||
for (const template of templates) {
|
||||
const file = readFileSync(resolve(__dirname, 'templates/ironbank', template));
|
||||
const output = Mustache.render(file.toString(), scope);
|
||||
|
|
|
@ -31,6 +31,8 @@ kibana_vars=(
|
|||
csp.rules
|
||||
csp.strict
|
||||
csp.warnLegacyBrowsers
|
||||
data.autocomplete.valueSuggestions.terminateAfter
|
||||
data.autocomplete.valueSuggestions.timeout
|
||||
elasticsearch.customHeaders
|
||||
elasticsearch.hosts
|
||||
elasticsearch.logQueries
|
||||
|
@ -57,15 +59,27 @@ kibana_vars=(
|
|||
enterpriseSearch.accessCheckTimeoutWarning
|
||||
enterpriseSearch.enabled
|
||||
enterpriseSearch.host
|
||||
externalUrl.policy
|
||||
i18n.locale
|
||||
interpreter.enableInVisualize
|
||||
kibana.autocompleteTerminateAfter
|
||||
kibana.autocompleteTimeout
|
||||
kibana.defaultAppId
|
||||
kibana.index
|
||||
logging.appenders
|
||||
logging.appenders.console
|
||||
logging.appenders.file
|
||||
logging.appenders.rolling-file
|
||||
logging.dest
|
||||
logging.json
|
||||
logging.loggers
|
||||
logging.loggers.appenders
|
||||
logging.loggers.level
|
||||
logging.loggers.name
|
||||
logging.quiet
|
||||
logging.root
|
||||
logging.root.appenders
|
||||
logging.root.level
|
||||
logging.rotate.enabled
|
||||
logging.rotate.everyBytes
|
||||
logging.rotate.keepFiles
|
||||
|
@ -85,6 +99,7 @@ kibana_vars=(
|
|||
migrations.batchSize
|
||||
migrations.enableV2
|
||||
migrations.pollInterval
|
||||
migrations.retryAttempts
|
||||
migrations.scrollDuration
|
||||
migrations.skip
|
||||
monitoring.cluster_alerts.email_notifications.email_address
|
||||
|
@ -101,6 +116,7 @@ kibana_vars=(
|
|||
monitoring.ui.elasticsearch.ssl.verificationMode
|
||||
monitoring.ui.elasticsearch.username
|
||||
monitoring.ui.enabled
|
||||
monitoring.ui.logs.index
|
||||
monitoring.ui.max_bucket_size
|
||||
monitoring.ui.min_interval_seconds
|
||||
newsfeed.enabled
|
||||
|
@ -110,26 +126,35 @@ kibana_vars=(
|
|||
path.data
|
||||
pid.file
|
||||
regionmap
|
||||
savedObjects.maxImportExportSize
|
||||
savedObjects.maxImportPayloadBytes
|
||||
security.showInsecureClusterWarning
|
||||
server.basePath
|
||||
server.compression.enabled
|
||||
server.compression.referrerWhitelist
|
||||
server.cors
|
||||
server.cors.allowCredentials
|
||||
server.cors.allowOrigin
|
||||
server.cors.enabled
|
||||
server.cors.origin
|
||||
server.securityResponseHeaders.strictTransportSecurity
|
||||
server.securityResponseHeaders.xContentTypeOptions
|
||||
server.securityResponseHeaders.referrerPolicy
|
||||
server.securityResponseHeaders.permissionsPolicy
|
||||
server.securityResponseHeaders.disableEmbedding
|
||||
server.customResponseHeaders
|
||||
server.defaultRoute
|
||||
server.host
|
||||
server.keepAliveTimeout
|
||||
server.maxPayloadBytes
|
||||
server.maxPayload
|
||||
server.maxPayloadBytes
|
||||
server.name
|
||||
server.port
|
||||
server.publicBaseUrl
|
||||
server.requestId.allowFromAnyIp
|
||||
server.requestId.ipAllowlist
|
||||
server.rewriteBasePath
|
||||
server.securityResponseHeaders.disableEmbedding
|
||||
server.securityResponseHeaders.permissionsPolicy
|
||||
server.securityResponseHeaders.referrerPolicy
|
||||
server.securityResponseHeaders.strictTransportSecurity
|
||||
server.securityResponseHeaders.xContentTypeOptions
|
||||
server.shutdownTimeout
|
||||
server.socketTimeout
|
||||
server.ssl.cert
|
||||
server.ssl.certificate
|
||||
|
@ -145,6 +170,8 @@ kibana_vars=(
|
|||
server.ssl.supportedProtocols
|
||||
server.ssl.truststore.password
|
||||
server.ssl.truststore.path
|
||||
server.uuid
|
||||
server.xsrf.allowlist
|
||||
server.xsrf.disableProtection
|
||||
server.xsrf.whitelist
|
||||
status.allowAnonymous
|
||||
|
@ -160,34 +187,45 @@ kibana_vars=(
|
|||
tilemap.options.subdomains
|
||||
tilemap.url
|
||||
timelion.enabled
|
||||
url_drilldown.enabled
|
||||
vega.enableExternalUrls
|
||||
vis_type_vega.enableExternalUrls
|
||||
xpack.actions.allowedHosts
|
||||
xpack.actions.customHostSettings
|
||||
xpack.actions.enabled
|
||||
xpack.actions.enabledActionTypes
|
||||
xpack.actions.preconfiguredAlertHistoryEsIndex
|
||||
xpack.actions.maxResponseContentLength
|
||||
xpack.actions.preconfigured
|
||||
xpack.actions.preconfiguredAlertHistoryEsIndex
|
||||
xpack.actions.proxyBypassHosts
|
||||
xpack.actions.proxyHeaders
|
||||
xpack.actions.proxyOnlyHosts
|
||||
xpack.actions.proxyRejectUnauthorizedCertificates
|
||||
xpack.actions.proxyUrl
|
||||
xpack.actions.proxyBypassHosts
|
||||
xpack.actions.proxyOnlyHosts
|
||||
xpack.actions.rejectUnauthorized
|
||||
xpack.actions.maxResponseContentLength
|
||||
xpack.actions.responseTimeout
|
||||
xpack.actions.tls.verificationMode
|
||||
xpack.actions.tls.proxyVerificationMode
|
||||
xpack.alerts.healthCheck.interval
|
||||
xpack.alerts.invalidateApiKeysTask.interval
|
||||
xpack.alerts.invalidateApiKeysTask.removalDelay
|
||||
xpack.actions.tls.verificationMode
|
||||
xpack.alerting.healthCheck.interval
|
||||
xpack.alerting.invalidateApiKeysTask.interval
|
||||
xpack.alerting.invalidateApiKeysTask.removalDelay
|
||||
xpack.alerts.healthCheck.interval
|
||||
xpack.alerts.invalidateApiKeysTask.interval
|
||||
xpack.alerts.invalidateApiKeysTask.removalDelay
|
||||
xpack.apm.enabled
|
||||
xpack.apm.maxServiceEnvironments
|
||||
xpack.apm.searchAggregatedTransactions
|
||||
xpack.apm.serviceMapEnabled
|
||||
xpack.apm.serviceMapFingerprintBucketSize
|
||||
xpack.apm.serviceMapFingerprintGlobalBucketSize
|
||||
xpack.apm.ui.enabled
|
||||
xpack.apm.ui.maxTraceItems
|
||||
xpack.apm.ui.transactionGroupBucketSize
|
||||
xpack.banners.backgroundColor
|
||||
xpack.banners.disableSpaceBanners
|
||||
xpack.banners.placement
|
||||
xpack.banners.textColor
|
||||
xpack.banners.textContent
|
||||
xpack.canvas.enabled
|
||||
xpack.code.disk.thresholdEnabled
|
||||
xpack.code.disk.watermarkLow
|
||||
|
@ -200,15 +238,28 @@ kibana_vars=(
|
|||
xpack.code.ui.enabled
|
||||
xpack.code.updateRepoFrequencyMs
|
||||
xpack.code.verbose
|
||||
xpack.data_enhanced.search.sessions.defaultExpiration
|
||||
xpack.data_enhanced.search.sessions.enabled
|
||||
xpack.data_enhanced.search.sessions.maxUpdateRetries
|
||||
xpack.data_enhanced.search.sessions.notTouchedInProgressTimeout
|
||||
xpack.data_enhanced.search.sessions.notTouchedTimeout
|
||||
xpack.data_enhanced.search.sessions.pageSize
|
||||
xpack.data_enhanced.search.sessions.trackingInterval
|
||||
xpack.discoverEnhanced.actions.exploreDataInChart.enabled
|
||||
xpack.discoverEnhanced.actions.exploreDataInContextMenu.enabled
|
||||
xpack.encryptedSavedObjects.encryptionKey
|
||||
xpack.encryptedSavedObjects.keyRotation.decryptionOnlyKeys
|
||||
xpack.event_log.enabled
|
||||
xpack.event_log.indexEntries
|
||||
xpack.event_log.logEntries
|
||||
xpack.fleet.agentPolicies
|
||||
xpack.fleet.agents.elasticsearch.host
|
||||
xpack.fleet.agents.elasticsearch.hosts
|
||||
xpack.fleet.agents.enabled
|
||||
xpack.fleet.agents.fleet_server.hosts
|
||||
xpack.fleet.agents.kibana.host
|
||||
xpack.fleet.agents.tlsCheckDisabled
|
||||
xpack.fleet.agentPolicies
|
||||
xpack.fleet.enabled
|
||||
xpack.fleet.packages
|
||||
xpack.fleet.registryUrl
|
||||
xpack.graph.canEditDrillDownUrls
|
||||
|
@ -232,8 +283,10 @@ kibana_vars=(
|
|||
xpack.maps.enabled
|
||||
xpack.maps.showMapVisualizationTypes
|
||||
xpack.ml.enabled
|
||||
xpack.observability.annotations.index
|
||||
xpack.observability.unsafe.alertingExperience.enabled
|
||||
xpack.observability.unsafe.cases.enabled
|
||||
xpack.painless_lab.enabled
|
||||
xpack.reporting.capture.browser.autoDownload
|
||||
xpack.reporting.capture.browser.chromium.disableSandbox
|
||||
xpack.reporting.capture.browser.chromium.inspect
|
||||
|
@ -245,9 +298,11 @@ kibana_vars=(
|
|||
xpack.reporting.capture.concurrency
|
||||
xpack.reporting.capture.loadDelay
|
||||
xpack.reporting.capture.maxAttempts
|
||||
xpack.reporting.capture.networkPolicy
|
||||
xpack.reporting.capture.settleTime
|
||||
xpack.reporting.capture.timeout
|
||||
xpack.reporting.capture.timeouts.openUrl
|
||||
xpack.reporting.capture.timeouts.openUrl
|
||||
xpack.reporting.capture.timeouts.renderComplete
|
||||
xpack.reporting.capture.timeouts.waitForElements
|
||||
xpack.reporting.capture.viewport.height
|
||||
|
@ -281,21 +336,24 @@ kibana_vars=(
|
|||
xpack.rollup.enabled
|
||||
xpack.ruleRegistry.write.enabled
|
||||
xpack.searchprofiler.enabled
|
||||
xpack.security.audit.enabled
|
||||
xpack.security.audit.appender.type
|
||||
xpack.security.audit.appender.layout.type
|
||||
xpack.security.audit.appender.fileName
|
||||
xpack.security.audit.appender.layout.highlight
|
||||
xpack.security.audit.appender.layout.pattern
|
||||
xpack.security.audit.appender.layout.type
|
||||
xpack.security.audit.appender.legacyLoggingConfig
|
||||
xpack.security.audit.appender.fileName
|
||||
xpack.security.audit.appender.policy.type
|
||||
xpack.security.audit.appender.policy.interval
|
||||
xpack.security.audit.appender.policy.modulate
|
||||
xpack.security.audit.appender.policy.size
|
||||
xpack.security.audit.appender.strategy.type
|
||||
xpack.security.audit.appender.policy.type
|
||||
xpack.security.audit.appender.strategy.max
|
||||
xpack.security.audit.appender.strategy.pattern
|
||||
xpack.security.audit.appender.strategy.type
|
||||
xpack.security.audit.appender.type
|
||||
xpack.security.audit.enabled
|
||||
xpack.security.audit.ignore_filters
|
||||
xpack.security.authc.http.autoSchemesEnabled
|
||||
xpack.security.authc.http.enabled
|
||||
xpack.security.authc.http.schemes
|
||||
xpack.security.authc.oidc.realm
|
||||
xpack.security.authc.providers
|
||||
xpack.security.authc.saml.maxRedirectURLSize
|
||||
|
@ -322,6 +380,7 @@ kibana_vars=(
|
|||
xpack.task_manager.monitored_aggregated_stats_refresh_rate
|
||||
xpack.task_manager.monitored_stats_required_freshness
|
||||
xpack.task_manager.monitored_stats_running_average_window
|
||||
xpack.task_manager.monitored_stats_warn_delayed_task_start_in_seconds
|
||||
xpack.task_manager.monitored_task_execution_thresholds
|
||||
xpack.task_manager.poll_interval
|
||||
xpack.task_manager.request_capacity
|
||||
|
|
|
@ -4,7 +4,7 @@
|
|||
################################################################################
|
||||
ARG BASE_REGISTRY=registry1.dsop.io
|
||||
ARG BASE_IMAGE=redhat/ubi/ubi8
|
||||
ARG BASE_TAG=8.3
|
||||
ARG BASE_TAG=8.4
|
||||
|
||||
FROM ${BASE_REGISTRY}/${BASE_IMAGE}:${BASE_TAG} as prep_files
|
||||
|
||||
|
@ -59,7 +59,7 @@ COPY --chown=1000:0 config/kibana.yml /usr/share/kibana/config/kibana.yml
|
|||
|
||||
# Add the launcher/wrapper script. It knows how to interpret environment
|
||||
# variables and translate them to Kibana CLI options.
|
||||
COPY --chown=1000:0 scripts/kibana-docker /usr/local/bin/
|
||||
COPY --chown=1000:0 bin/kibana-docker /usr/local/bin/
|
||||
|
||||
# Remove the suid bit everywhere to mitigate "Stack Clash"
|
||||
RUN find / -xdev -perm -4000 -exec chmod u-s {} +
|
||||
|
|
|
@ -14,7 +14,7 @@ tags:
|
|||
# Build args passed to Dockerfile ARGs
|
||||
args:
|
||||
BASE_IMAGE: 'redhat/ubi/ubi8'
|
||||
BASE_TAG: '8.3'
|
||||
BASE_TAG: '8.4'
|
||||
|
||||
# Docker image labels
|
||||
labels:
|
|
@ -13,12 +13,20 @@ import { ToolingLog, REPO_ROOT } from '@kbn/dev-utils';
|
|||
|
||||
export const REF_CONFIG_PATHS = [Path.resolve(REPO_ROOT, 'tsconfig.refs.json')];
|
||||
|
||||
export async function buildAllTsRefs(log: ToolingLog) {
|
||||
export async function buildAllTsRefs(log: ToolingLog): Promise<{ failed: boolean }> {
|
||||
for (const path of REF_CONFIG_PATHS) {
|
||||
const relative = Path.relative(REPO_ROOT, path);
|
||||
log.debug(`Building TypeScript projects refs for ${relative}...`);
|
||||
await execa(require.resolve('typescript/bin/tsc'), ['-b', relative, '--pretty'], {
|
||||
cwd: REPO_ROOT,
|
||||
});
|
||||
const { failed, stdout } = await execa(
|
||||
require.resolve('typescript/bin/tsc'),
|
||||
['-b', relative, '--pretty'],
|
||||
{
|
||||
cwd: REPO_ROOT,
|
||||
reject: false,
|
||||
}
|
||||
);
|
||||
log.info(stdout);
|
||||
if (failed) return { failed };
|
||||
}
|
||||
return { failed: false };
|
||||
}
|
||||
|
|
|
@ -69,7 +69,11 @@ export async function runTypeCheckCli() {
|
|||
process.exit();
|
||||
}
|
||||
|
||||
await buildAllTsRefs(log);
|
||||
const { failed } = await buildAllTsRefs(log);
|
||||
if (failed) {
|
||||
log.error('Unable to build TS project refs');
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
const tscArgs = [
|
||||
// composite project cannot be used with --noEmit
|
||||
|
|
|
@ -132,7 +132,7 @@ export function DashboardTopNav({
|
|||
|
||||
const trackUiMetric = usageCollection?.reportUiCounter.bind(
|
||||
usageCollection,
|
||||
DashboardConstants.DASHBOARDS_ID
|
||||
DashboardConstants.DASHBOARD_ID
|
||||
);
|
||||
|
||||
useEffect(() => {
|
||||
|
@ -163,6 +163,7 @@ export function DashboardTopNav({
|
|||
notifications: core.notifications,
|
||||
overlays: core.overlays,
|
||||
SavedObjectFinder: getSavedObjectFinder(core.savedObjects, uiSettings),
|
||||
reportUiCounter: usageCollection?.reportUiCounter,
|
||||
}),
|
||||
}));
|
||||
}
|
||||
|
@ -174,6 +175,7 @@ export function DashboardTopNav({
|
|||
core.savedObjects,
|
||||
core.overlays,
|
||||
uiSettings,
|
||||
usageCollection,
|
||||
]);
|
||||
|
||||
const createNewVisType = useCallback(
|
||||
|
@ -183,7 +185,7 @@ export function DashboardTopNav({
|
|||
|
||||
if (visType) {
|
||||
if (trackUiMetric) {
|
||||
trackUiMetric(METRIC_TYPE.CLICK, visType.name);
|
||||
trackUiMetric(METRIC_TYPE.CLICK, `${visType.name}:create`);
|
||||
}
|
||||
|
||||
if ('aliasPath' in visType) {
|
||||
|
|
|
@ -51,7 +51,7 @@ export const EditorMenu = ({ dashboardContainer, createNewVisType }: Props) => {
|
|||
|
||||
const trackUiMetric = usageCollection?.reportUiCounter.bind(
|
||||
usageCollection,
|
||||
DashboardConstants.DASHBOARDS_ID
|
||||
DashboardConstants.DASHBOARD_ID
|
||||
);
|
||||
|
||||
const createNewAggsBasedVis = useCallback(
|
||||
|
|
|
@ -6,6 +6,7 @@
|
|||
* Side Public License, v 1.
|
||||
*/
|
||||
|
||||
import { JsonObject } from '@kbn/common-utils';
|
||||
import { nodeTypes } from '../node_types/index';
|
||||
import { KQLSyntaxError } from '../kuery_syntax_error';
|
||||
import { KueryNode, DslQuery, KueryParseOptions } from '../types';
|
||||
|
@ -13,7 +14,6 @@ import { IIndexPattern } from '../../../index_patterns/types';
|
|||
|
||||
// @ts-ignore
|
||||
import { parse as parseKuery } from './_generated_/kuery';
|
||||
import { JsonObject } from '../../../../../kibana_utils/common';
|
||||
|
||||
const fromExpression = (
|
||||
expression: string | DslQuery,
|
||||
|
|
|
@ -7,10 +7,10 @@
|
|||
*/
|
||||
|
||||
import _ from 'lodash';
|
||||
import { JsonObject } from '@kbn/common-utils';
|
||||
import * as ast from '../ast';
|
||||
import { nodeTypes } from '../node_types';
|
||||
import { NamedArgTypeBuildNode } from './types';
|
||||
import { JsonObject } from '../../../../../kibana_utils/common';
|
||||
|
||||
export function buildNode(name: string, value: any): NamedArgTypeBuildNode {
|
||||
const argumentNode =
|
||||
|
|
|
@ -10,8 +10,8 @@
|
|||
* WARNING: these typings are incomplete
|
||||
*/
|
||||
|
||||
import { JsonValue } from '@kbn/common-utils';
|
||||
import { IIndexPattern } from '../../../index_patterns';
|
||||
import { JsonValue } from '../../../../../kibana_utils/common';
|
||||
import { KueryNode } from '..';
|
||||
|
||||
export type FunctionName =
|
||||
|
|
|
@ -174,6 +174,57 @@ const nestedTermResponse = {
|
|||
status: 200,
|
||||
};
|
||||
|
||||
const exhaustiveNestedTermResponse = {
|
||||
took: 10,
|
||||
timed_out: false,
|
||||
_shards: {
|
||||
total: 1,
|
||||
successful: 1,
|
||||
skipped: 0,
|
||||
failed: 0,
|
||||
},
|
||||
hits: {
|
||||
total: 14005,
|
||||
max_score: 0,
|
||||
hits: [],
|
||||
},
|
||||
aggregations: {
|
||||
'1': {
|
||||
doc_count_error_upper_bound: 0,
|
||||
sum_other_doc_count: 8325,
|
||||
buckets: [
|
||||
{
|
||||
'2': {
|
||||
doc_count_error_upper_bound: 0,
|
||||
sum_other_doc_count: 0,
|
||||
buckets: [
|
||||
{ key: 'ios', doc_count: 2850 },
|
||||
{ key: 'win xp', doc_count: 2830 },
|
||||
{ key: '__missing__', doc_count: 1430 },
|
||||
],
|
||||
},
|
||||
key: 'US-with-dash',
|
||||
doc_count: 2850,
|
||||
},
|
||||
{
|
||||
'2': {
|
||||
doc_count_error_upper_bound: 0,
|
||||
sum_other_doc_count: 0,
|
||||
buckets: [
|
||||
{ key: 'ios', doc_count: 1850 },
|
||||
{ key: 'win xp', doc_count: 1830 },
|
||||
{ key: '__missing__', doc_count: 130 },
|
||||
],
|
||||
},
|
||||
key: 'IN-with-dash',
|
||||
doc_count: 2830,
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
status: 200,
|
||||
};
|
||||
|
||||
const nestedTermResponseNoResults = {
|
||||
took: 10,
|
||||
timed_out: false,
|
||||
|
@ -326,6 +377,17 @@ describe('Terms Agg Other bucket helper', () => {
|
|||
}
|
||||
});
|
||||
|
||||
test('does not build query if sum_other_doc_count is 0 (exhaustive terms)', () => {
|
||||
const aggConfigs = getAggConfigs(nestedTerm.aggs);
|
||||
expect(
|
||||
buildOtherBucketAgg(
|
||||
aggConfigs,
|
||||
aggConfigs.aggs[1] as IBucketAggConfig,
|
||||
exhaustiveNestedTermResponse
|
||||
)
|
||||
).toBeFalsy();
|
||||
});
|
||||
|
||||
test('excludes exists filter for scripted fields', () => {
|
||||
const aggConfigs = getAggConfigs(nestedTerm.aggs);
|
||||
aggConfigs.aggs[1].params.field.scripted = true;
|
||||
|
|
|
@ -156,6 +156,7 @@ export const buildOtherBucketAgg = (
|
|||
};
|
||||
|
||||
let noAggBucketResults = false;
|
||||
let exhaustiveBuckets = true;
|
||||
|
||||
// recursively create filters for all parent aggregation buckets
|
||||
const walkBucketTree = (
|
||||
|
@ -175,6 +176,9 @@ export const buildOtherBucketAgg = (
|
|||
const newAggIndex = aggIndex + 1;
|
||||
const newAgg = bucketAggs[newAggIndex];
|
||||
const currentAgg = bucketAggs[aggIndex];
|
||||
if (aggIndex === index && agg && agg.sum_other_doc_count > 0) {
|
||||
exhaustiveBuckets = false;
|
||||
}
|
||||
if (aggIndex < index) {
|
||||
each(agg.buckets, (bucket: any, bucketObjKey) => {
|
||||
const bucketKey = currentAgg.getKey(
|
||||
|
@ -223,7 +227,7 @@ export const buildOtherBucketAgg = (
|
|||
walkBucketTree(0, response.aggregations, bucketAggs[0].id, [], '');
|
||||
|
||||
// bail if there were no bucket results
|
||||
if (noAggBucketResults) {
|
||||
if (noAggBucketResults || exhaustiveBuckets) {
|
||||
return false;
|
||||
}
|
||||
|
||||
|
|
|
@ -12,11 +12,24 @@ import { mockAggTypesRegistry, mockGetFieldFormatsStart } from '../../test_helpe
|
|||
import { BUCKET_TYPES } from '../bucket_agg_types';
|
||||
import { IBucketAggConfig } from '../bucket_agg_type';
|
||||
import { createFilterHistogram } from './histogram';
|
||||
import { RangeFilter } from '../../../../es_query';
|
||||
|
||||
function validateFilter(filter: RangeFilter) {
|
||||
expect(mockGetFieldFormatsStart().deserialize).toHaveBeenCalledTimes(1);
|
||||
expect(filter).toHaveProperty('meta');
|
||||
expect(filter.meta).toHaveProperty('index', '1234');
|
||||
expect(filter).toHaveProperty('range');
|
||||
expect(filter.range).toHaveProperty('bytes');
|
||||
expect(filter.range.bytes).toHaveProperty('gte', 2048);
|
||||
expect(filter.range.bytes).toHaveProperty('lt', 3072);
|
||||
expect(filter.meta).toHaveProperty('formattedValue');
|
||||
}
|
||||
|
||||
describe('AggConfig Filters', () => {
|
||||
describe('histogram', () => {
|
||||
const getConfig = (() => {}) as FieldFormatsGetConfigFn;
|
||||
const getAggConfigs = () => {
|
||||
jest.clearAllMocks();
|
||||
const field = {
|
||||
name: 'bytes',
|
||||
format: new BytesFormat({}, getConfig),
|
||||
|
@ -56,14 +69,20 @@ describe('AggConfig Filters', () => {
|
|||
'2048'
|
||||
);
|
||||
|
||||
expect(mockGetFieldFormatsStart().deserialize).toHaveBeenCalledTimes(1);
|
||||
expect(filter).toHaveProperty('meta');
|
||||
expect(filter.meta).toHaveProperty('index', '1234');
|
||||
expect(filter).toHaveProperty('range');
|
||||
expect(filter.range).toHaveProperty('bytes');
|
||||
expect(filter.range.bytes).toHaveProperty('gte', 2048);
|
||||
expect(filter.range.bytes).toHaveProperty('lt', 3072);
|
||||
expect(filter.meta).toHaveProperty('formattedValue');
|
||||
validateFilter(filter);
|
||||
});
|
||||
|
||||
test('should work for auto histograms', () => {
|
||||
const aggConfigs = getAggConfigs();
|
||||
const histogramAggConfig = aggConfigs.aggs[0];
|
||||
histogramAggConfig.params.interval = 'auto';
|
||||
histogramAggConfig.params.used_interval = 1024;
|
||||
|
||||
const filter = createFilterHistogram(mockGetFieldFormatsStart)(
|
||||
histogramAggConfig as IBucketAggConfig,
|
||||
'2048'
|
||||
);
|
||||
validateFilter(filter);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
|
|
@ -17,7 +17,14 @@ export const createFilterHistogram = (
|
|||
return (aggConfig: IBucketAggConfig, key: string) => {
|
||||
const { deserialize } = getFieldFormatsStart();
|
||||
const value = parseInt(key, 10);
|
||||
const params: RangeFilterParams = { gte: value, lt: value + aggConfig.params.interval };
|
||||
const params: RangeFilterParams = {
|
||||
gte: value,
|
||||
lt:
|
||||
value +
|
||||
(typeof aggConfig.params.used_interval === 'number'
|
||||
? aggConfig.params.used_interval
|
||||
: aggConfig.params.interval),
|
||||
};
|
||||
|
||||
return buildRangeFilter(
|
||||
aggConfig.params.field,
|
||||
|
|
|
@ -276,9 +276,8 @@ export { DuplicateIndexPatternError } from '../common/index_patterns/errors';
|
|||
* Autocomplete query suggestions:
|
||||
*/
|
||||
|
||||
export {
|
||||
export type {
|
||||
QuerySuggestion,
|
||||
QuerySuggestionTypes,
|
||||
QuerySuggestionGetFn,
|
||||
QuerySuggestionGetFnArgs,
|
||||
QuerySuggestionBasic,
|
||||
|
@ -286,6 +285,7 @@ export {
|
|||
AutocompleteStart,
|
||||
} from './autocomplete';
|
||||
|
||||
export { QuerySuggestionTypes } from './autocomplete';
|
||||
/*
|
||||
* Search:
|
||||
*/
|
||||
|
@ -320,25 +320,23 @@ import {
|
|||
tabifyGetColumns,
|
||||
} from '../common';
|
||||
|
||||
export {
|
||||
export { AggGroupLabels, AggGroupNames, METRIC_TYPES, BUCKET_TYPES } from '../common';
|
||||
|
||||
export type {
|
||||
// aggs
|
||||
AggConfigSerialized,
|
||||
AggGroupLabels,
|
||||
AggGroupName,
|
||||
AggGroupNames,
|
||||
AggFunctionsMapping,
|
||||
AggParam,
|
||||
AggParamOption,
|
||||
AggParamType,
|
||||
AggConfigOptions,
|
||||
BUCKET_TYPES,
|
||||
EsaggsExpressionFunctionDefinition,
|
||||
IAggConfig,
|
||||
IAggConfigs,
|
||||
IAggType,
|
||||
IFieldParamType,
|
||||
IMetricAggType,
|
||||
METRIC_TYPES,
|
||||
OptionedParamType,
|
||||
OptionedValueProp,
|
||||
ParsedInterval,
|
||||
|
@ -352,30 +350,23 @@ export {
|
|||
|
||||
export type { AggConfigs, AggConfig } from '../common';
|
||||
|
||||
export {
|
||||
export type {
|
||||
// search
|
||||
ES_SEARCH_STRATEGY,
|
||||
EsQuerySortValue,
|
||||
extractSearchSourceReferences,
|
||||
getEsPreference,
|
||||
getSearchParamsFromRequest,
|
||||
IEsSearchRequest,
|
||||
IEsSearchResponse,
|
||||
IKibanaSearchRequest,
|
||||
IKibanaSearchResponse,
|
||||
injectSearchSourceReferences,
|
||||
ISearchSetup,
|
||||
ISearchStart,
|
||||
ISearchStartSearchSource,
|
||||
ISearchGeneric,
|
||||
ISearchSource,
|
||||
parseSearchSourceJSON,
|
||||
SearchInterceptor,
|
||||
SearchInterceptorDeps,
|
||||
SearchRequest,
|
||||
SearchSourceFields,
|
||||
SortDirection,
|
||||
SearchSessionState,
|
||||
// expression functions and types
|
||||
EsdslExpressionFunctionDefinition,
|
||||
EsRawResponseExpressionTypeDefinition,
|
||||
|
@ -386,11 +377,21 @@ export {
|
|||
TimeoutErrorMode,
|
||||
PainlessError,
|
||||
Reason,
|
||||
WaitUntilNextSessionCompletesOptions,
|
||||
} from './search';
|
||||
|
||||
export {
|
||||
parseSearchSourceJSON,
|
||||
injectSearchSourceReferences,
|
||||
extractSearchSourceReferences,
|
||||
getEsPreference,
|
||||
getSearchParamsFromRequest,
|
||||
noSearchSessionStorageCapabilityMessage,
|
||||
SEARCH_SESSIONS_MANAGEMENT_ID,
|
||||
waitUntilNextSessionCompletes$,
|
||||
WaitUntilNextSessionCompletesOptions,
|
||||
isEsError,
|
||||
SearchSessionState,
|
||||
SortDirection,
|
||||
} from './search';
|
||||
|
||||
export type {
|
||||
|
@ -438,33 +439,36 @@ export const search = {
|
|||
* UI components
|
||||
*/
|
||||
|
||||
export {
|
||||
SearchBar,
|
||||
export type {
|
||||
SearchBarProps,
|
||||
StatefulSearchBarProps,
|
||||
IndexPatternSelectProps,
|
||||
QueryStringInput,
|
||||
QueryStringInputProps,
|
||||
} from './ui';
|
||||
|
||||
export { QueryStringInput, SearchBar } from './ui';
|
||||
|
||||
/**
|
||||
* Types to be shared externally
|
||||
* @public
|
||||
*/
|
||||
export { Filter, Query, RefreshInterval, TimeRange } from '../common';
|
||||
export type { Filter, Query, RefreshInterval, TimeRange } from '../common';
|
||||
|
||||
export {
|
||||
createSavedQueryService,
|
||||
connectToQueryState,
|
||||
syncQueryStateWithUrl,
|
||||
QueryState,
|
||||
getDefaultQuery,
|
||||
FilterManager,
|
||||
TimeHistory,
|
||||
} from './query';
|
||||
|
||||
export type {
|
||||
QueryState,
|
||||
SavedQuery,
|
||||
SavedQueryService,
|
||||
SavedQueryTimeFilter,
|
||||
InputTimeRange,
|
||||
TimeHistory,
|
||||
TimefilterContract,
|
||||
TimeHistoryContract,
|
||||
QueryStateChange,
|
||||
|
@ -472,7 +476,7 @@ export {
|
|||
AutoRefreshDoneFn,
|
||||
} from './query';
|
||||
|
||||
export { AggsStart } from './search/aggs';
|
||||
export type { AggsStart } from './search/aggs';
|
||||
|
||||
export {
|
||||
getTime,
|
||||
|
@ -496,7 +500,7 @@ export function plugin(initializerContext: PluginInitializerContext<ConfigSchema
|
|||
return new DataPublicPlugin(initializerContext);
|
||||
}
|
||||
|
||||
export {
|
||||
export type {
|
||||
DataPublicPluginSetup,
|
||||
DataPublicPluginStart,
|
||||
IDataPluginServices,
|
||||
|
@ -505,4 +509,4 @@ export {
|
|||
} from './types';
|
||||
|
||||
// Export plugin after all other imports
|
||||
export { DataPublicPlugin as Plugin };
|
||||
export type { DataPublicPlugin as DataPlugin };
|
||||
|
|
|
@ -6,15 +6,15 @@
|
|||
* Side Public License, v 1.
|
||||
*/
|
||||
|
||||
import { Plugin, IndexPatternsContract } from '.';
|
||||
import { DataPlugin, IndexPatternsContract } from '.';
|
||||
import { fieldFormatsServiceMock } from './field_formats/mocks';
|
||||
import { searchServiceMock } from './search/mocks';
|
||||
import { queryServiceMock } from './query/mocks';
|
||||
import { AutocompleteStart, AutocompleteSetup } from './autocomplete';
|
||||
import { createNowProviderMock } from './now_provider/mocks';
|
||||
|
||||
export type Setup = jest.Mocked<ReturnType<Plugin['setup']>>;
|
||||
export type Start = jest.Mocked<ReturnType<Plugin['start']>>;
|
||||
export type Setup = jest.Mocked<ReturnType<DataPlugin['setup']>>;
|
||||
export type Start = jest.Mocked<ReturnType<DataPlugin['start']>>;
|
||||
|
||||
const autocompleteSetupMock: jest.Mocked<AutocompleteSetup> = {
|
||||
getQuerySuggestions: jest.fn(),
|
||||
|
|
|
@ -53,6 +53,7 @@ import { ISearchOptions as ISearchOptions_2 } from 'src/plugins/data/public';
|
|||
import { ISearchSource as ISearchSource_2 } from 'src/plugins/data/public';
|
||||
import { IStorageWrapper } from 'src/plugins/kibana_utils/public';
|
||||
import { IUiSettingsClient } from 'src/core/public';
|
||||
import { JsonValue } from '@kbn/common-utils';
|
||||
import { KibanaClient } from '@elastic/elasticsearch/api/kibana';
|
||||
import { Location } from 'history';
|
||||
import { LocationDescriptorObject } from 'history';
|
||||
|
@ -67,7 +68,7 @@ import { Observable } from 'rxjs';
|
|||
import { PackageInfo } from '@kbn/config';
|
||||
import { Path } from 'history';
|
||||
import { PeerCertificate } from 'tls';
|
||||
import { Plugin as Plugin_2 } from 'src/core/public';
|
||||
import { Plugin } from 'src/core/public';
|
||||
import { PluginInitializerContext as PluginInitializerContext_2 } from 'src/core/public';
|
||||
import { PluginInitializerContext as PluginInitializerContext_3 } from 'kibana/public';
|
||||
import { PopoverAnchorPosition } from '@elastic/eui';
|
||||
|
@ -621,6 +622,22 @@ export type CustomFilter = Filter & {
|
|||
query: any;
|
||||
};
|
||||
|
||||
// Warning: (ae-forgotten-export) The symbol "DataSetupDependencies" needs to be exported by the entry point index.d.ts
|
||||
// Warning: (ae-forgotten-export) The symbol "DataStartDependencies" needs to be exported by the entry point index.d.ts
|
||||
// Warning: (ae-missing-release-tag) "DataPublicPlugin" is exported by the package, but it is missing a release tag (@alpha, @beta, @public, or @internal)
|
||||
//
|
||||
// @public (undocumented)
|
||||
export class DataPlugin implements Plugin<DataPublicPluginSetup, DataPublicPluginStart, DataSetupDependencies, DataStartDependencies> {
|
||||
// Warning: (ae-forgotten-export) The symbol "ConfigSchema" needs to be exported by the entry point index.d.ts
|
||||
constructor(initializerContext: PluginInitializerContext_2<ConfigSchema>);
|
||||
// (undocumented)
|
||||
setup(core: CoreSetup<DataStartDependencies, DataPublicPluginStart>, { bfetch, expressions, uiActions, usageCollection, inspector }: DataSetupDependencies): DataPublicPluginSetup;
|
||||
// (undocumented)
|
||||
start(core: CoreStart_2, { uiActions }: DataStartDependencies): DataPublicPluginStart;
|
||||
// (undocumented)
|
||||
stop(): void;
|
||||
}
|
||||
|
||||
// Warning: (ae-missing-release-tag) "DataPublicPluginSetup" is exported by the package, but it is missing a release tag (@alpha, @beta, @public, or @internal)
|
||||
//
|
||||
// @public
|
||||
|
@ -840,7 +857,7 @@ export const esFilters: {
|
|||
export const esKuery: {
|
||||
nodeTypes: import("../common/es_query/kuery/node_types").NodeTypes;
|
||||
fromKueryExpression: (expression: any, parseOptions?: Partial<import("../common").KueryParseOptions>) => import("../common").KueryNode;
|
||||
toElasticsearchQuery: (node: import("../common").KueryNode, indexPattern?: import("../common").IIndexPattern | undefined, config?: Record<string, any> | undefined, context?: Record<string, any> | undefined) => import("../../kibana_utils/common").JsonObject;
|
||||
toElasticsearchQuery: (node: import("../common").KueryNode, indexPattern?: import("../common").IIndexPattern | undefined, config?: Record<string, any> | undefined, context?: Record<string, any> | undefined) => import("@kbn/common-utils").JsonObject;
|
||||
};
|
||||
|
||||
// Warning: (ae-missing-release-tag) "esQuery" is exported by the package, but it is missing a release tag (@alpha, @beta, @public, or @internal)
|
||||
|
@ -2004,27 +2021,11 @@ export type PhrasesFilter = Filter & {
|
|||
meta: PhrasesFilterMeta;
|
||||
};
|
||||
|
||||
// Warning: (ae-forgotten-export) The symbol "DataSetupDependencies" needs to be exported by the entry point index.d.ts
|
||||
// Warning: (ae-forgotten-export) The symbol "DataStartDependencies" needs to be exported by the entry point index.d.ts
|
||||
// Warning: (ae-missing-release-tag) "DataPublicPlugin" is exported by the package, but it is missing a release tag (@alpha, @beta, @public, or @internal)
|
||||
//
|
||||
// @public (undocumented)
|
||||
export class Plugin implements Plugin_2<DataPublicPluginSetup, DataPublicPluginStart, DataSetupDependencies, DataStartDependencies> {
|
||||
// Warning: (ae-forgotten-export) The symbol "ConfigSchema" needs to be exported by the entry point index.d.ts
|
||||
constructor(initializerContext: PluginInitializerContext_2<ConfigSchema>);
|
||||
// (undocumented)
|
||||
setup(core: CoreSetup<DataStartDependencies, DataPublicPluginStart>, { bfetch, expressions, uiActions, usageCollection, inspector }: DataSetupDependencies): DataPublicPluginSetup;
|
||||
// (undocumented)
|
||||
start(core: CoreStart_2, { uiActions }: DataStartDependencies): DataPublicPluginStart;
|
||||
// (undocumented)
|
||||
stop(): void;
|
||||
}
|
||||
|
||||
// Warning: (ae-forgotten-export) The symbol "PluginInitializerContext" needs to be exported by the entry point index.d.ts
|
||||
// Warning: (ae-missing-release-tag) "plugin" is exported by the package, but it is missing a release tag (@alpha, @beta, @public, or @internal)
|
||||
//
|
||||
// @public (undocumented)
|
||||
export function plugin(initializerContext: PluginInitializerContext<ConfigSchema>): Plugin;
|
||||
export function plugin(initializerContext: PluginInitializerContext<ConfigSchema>): DataPlugin;
|
||||
|
||||
// Warning: (ae-missing-release-tag) "Query" is exported by the package, but it is missing a release tag (@alpha, @beta, @public, or @internal)
|
||||
//
|
||||
|
@ -2772,20 +2773,20 @@ export interface WaitUntilNextSessionCompletesOptions {
|
|||
// src/plugins/data/public/index.ts:238:27 - (ae-forgotten-export) The symbol "validateIndexPattern" needs to be exported by the entry point index.d.ts
|
||||
// src/plugins/data/public/index.ts:238:27 - (ae-forgotten-export) The symbol "flattenHitWrapper" needs to be exported by the entry point index.d.ts
|
||||
// src/plugins/data/public/index.ts:238:27 - (ae-forgotten-export) The symbol "formatHitProvider" needs to be exported by the entry point index.d.ts
|
||||
// src/plugins/data/public/index.ts:407:20 - (ae-forgotten-export) The symbol "getResponseInspectorStats" needs to be exported by the entry point index.d.ts
|
||||
// src/plugins/data/public/index.ts:407:20 - (ae-forgotten-export) The symbol "tabifyAggResponse" needs to be exported by the entry point index.d.ts
|
||||
// src/plugins/data/public/index.ts:407:20 - (ae-forgotten-export) The symbol "tabifyGetColumns" needs to be exported by the entry point index.d.ts
|
||||
// src/plugins/data/public/index.ts:409:1 - (ae-forgotten-export) The symbol "CidrMask" needs to be exported by the entry point index.d.ts
|
||||
// src/plugins/data/public/index.ts:410:1 - (ae-forgotten-export) The symbol "dateHistogramInterval" needs to be exported by the entry point index.d.ts
|
||||
// src/plugins/data/public/index.ts:419:1 - (ae-forgotten-export) The symbol "InvalidEsCalendarIntervalError" needs to be exported by the entry point index.d.ts
|
||||
// src/plugins/data/public/index.ts:420:1 - (ae-forgotten-export) The symbol "InvalidEsIntervalFormatError" needs to be exported by the entry point index.d.ts
|
||||
// src/plugins/data/public/index.ts:421:1 - (ae-forgotten-export) The symbol "IpAddress" needs to be exported by the entry point index.d.ts
|
||||
// src/plugins/data/public/index.ts:422:1 - (ae-forgotten-export) The symbol "isDateHistogramBucketAggConfig" needs to be exported by the entry point index.d.ts
|
||||
// src/plugins/data/public/index.ts:426:1 - (ae-forgotten-export) The symbol "isValidEsInterval" needs to be exported by the entry point index.d.ts
|
||||
// src/plugins/data/public/index.ts:427:1 - (ae-forgotten-export) The symbol "isValidInterval" needs to be exported by the entry point index.d.ts
|
||||
// src/plugins/data/public/index.ts:430:1 - (ae-forgotten-export) The symbol "parseInterval" needs to be exported by the entry point index.d.ts
|
||||
// src/plugins/data/public/index.ts:431:1 - (ae-forgotten-export) The symbol "propFilter" needs to be exported by the entry point index.d.ts
|
||||
// src/plugins/data/public/index.ts:434:1 - (ae-forgotten-export) The symbol "toAbsoluteDates" needs to be exported by the entry point index.d.ts
|
||||
// src/plugins/data/public/index.ts:408:20 - (ae-forgotten-export) The symbol "getResponseInspectorStats" needs to be exported by the entry point index.d.ts
|
||||
// src/plugins/data/public/index.ts:408:20 - (ae-forgotten-export) The symbol "tabifyAggResponse" needs to be exported by the entry point index.d.ts
|
||||
// src/plugins/data/public/index.ts:408:20 - (ae-forgotten-export) The symbol "tabifyGetColumns" needs to be exported by the entry point index.d.ts
|
||||
// src/plugins/data/public/index.ts:410:1 - (ae-forgotten-export) The symbol "CidrMask" needs to be exported by the entry point index.d.ts
|
||||
// src/plugins/data/public/index.ts:411:1 - (ae-forgotten-export) The symbol "dateHistogramInterval" needs to be exported by the entry point index.d.ts
|
||||
// src/plugins/data/public/index.ts:420:1 - (ae-forgotten-export) The symbol "InvalidEsCalendarIntervalError" needs to be exported by the entry point index.d.ts
|
||||
// src/plugins/data/public/index.ts:421:1 - (ae-forgotten-export) The symbol "InvalidEsIntervalFormatError" needs to be exported by the entry point index.d.ts
|
||||
// src/plugins/data/public/index.ts:422:1 - (ae-forgotten-export) The symbol "IpAddress" needs to be exported by the entry point index.d.ts
|
||||
// src/plugins/data/public/index.ts:423:1 - (ae-forgotten-export) The symbol "isDateHistogramBucketAggConfig" needs to be exported by the entry point index.d.ts
|
||||
// src/plugins/data/public/index.ts:427:1 - (ae-forgotten-export) The symbol "isValidEsInterval" needs to be exported by the entry point index.d.ts
|
||||
// src/plugins/data/public/index.ts:428:1 - (ae-forgotten-export) The symbol "isValidInterval" needs to be exported by the entry point index.d.ts
|
||||
// src/plugins/data/public/index.ts:431:1 - (ae-forgotten-export) The symbol "parseInterval" needs to be exported by the entry point index.d.ts
|
||||
// src/plugins/data/public/index.ts:432:1 - (ae-forgotten-export) The symbol "propFilter" needs to be exported by the entry point index.d.ts
|
||||
// src/plugins/data/public/index.ts:435:1 - (ae-forgotten-export) The symbol "toAbsoluteDates" needs to be exported by the entry point index.d.ts
|
||||
// src/plugins/data/public/query/state_sync/connect_to_query_state.ts:34:5 - (ae-forgotten-export) The symbol "FilterStateStore" needs to be exported by the entry point index.d.ts
|
||||
// src/plugins/data/public/search/session/session_service.ts:56:5 - (ae-forgotten-export) The symbol "UrlGeneratorStateMapping" needs to be exported by the entry point index.d.ts
|
||||
|
||||
|
|
|
@ -38,6 +38,7 @@ import { ISearchOptions as ISearchOptions_2 } from 'src/plugins/data/public';
|
|||
import { ISearchSource } from 'src/plugins/data/public';
|
||||
import { IUiSettingsClient } from 'src/core/server';
|
||||
import { IUiSettingsClient as IUiSettingsClient_3 } from 'kibana/server';
|
||||
import { JsonValue } from '@kbn/common-utils';
|
||||
import { KibanaRequest } from 'src/core/server';
|
||||
import { KibanaRequest as KibanaRequest_2 } from 'kibana/server';
|
||||
import { Logger } from 'src/core/server';
|
||||
|
@ -460,7 +461,7 @@ export const esFilters: {
|
|||
export const esKuery: {
|
||||
nodeTypes: import("../common/es_query/kuery/node_types").NodeTypes;
|
||||
fromKueryExpression: (expression: any, parseOptions?: Partial<import("../common").KueryParseOptions>) => import("../common").KueryNode;
|
||||
toElasticsearchQuery: (node: import("../common").KueryNode, indexPattern?: import("../common").IIndexPattern | undefined, config?: Record<string, any> | undefined, context?: Record<string, any> | undefined) => import("../../kibana_utils/common").JsonObject;
|
||||
toElasticsearchQuery: (node: import("../common").KueryNode, indexPattern?: import("../common").IIndexPattern | undefined, config?: Record<string, any> | undefined, context?: Record<string, any> | undefined) => import("@kbn/common-utils").JsonObject;
|
||||
};
|
||||
|
||||
// Warning: (ae-missing-release-tag) "esQuery" is exported by the package, but it is missing a release tag (@alpha, @beta, @public, or @internal)
|
||||
|
|
|
@ -18,4 +18,5 @@ export const indexPatternsMock = ({
|
|||
return indexPatternMock;
|
||||
}
|
||||
},
|
||||
} as unknown) as IndexPatternsService;
|
||||
updateSavedObject: jest.fn(),
|
||||
} as unknown) as jest.Mocked<IndexPatternsService>;
|
||||
|
|
|
@ -7,7 +7,13 @@
|
|||
*/
|
||||
|
||||
import { IUiSettingsClient } from 'kibana/public';
|
||||
import { DEFAULT_COLUMNS_SETTING, DOC_TABLE_LEGACY, SAMPLE_SIZE_SETTING } from '../../common';
|
||||
import {
|
||||
CONTEXT_TIE_BREAKER_FIELDS_SETTING,
|
||||
DEFAULT_COLUMNS_SETTING,
|
||||
DOC_TABLE_LEGACY,
|
||||
SAMPLE_SIZE_SETTING,
|
||||
SEARCH_FIELDS_FROM_SOURCE,
|
||||
} from '../../common';
|
||||
|
||||
export const uiSettingsMock = ({
|
||||
get: (key: string) => {
|
||||
|
@ -17,6 +23,10 @@ export const uiSettingsMock = ({
|
|||
return ['default_column'];
|
||||
} else if (key === DOC_TABLE_LEGACY) {
|
||||
return true;
|
||||
} else if (key === CONTEXT_TIE_BREAKER_FIELDS_SETTING) {
|
||||
return ['_doc'];
|
||||
} else if (key === SEARCH_FIELDS_FROM_SOURCE) {
|
||||
return false;
|
||||
}
|
||||
},
|
||||
} as unknown) as IUiSettingsClient;
|
||||
|
|
|
@ -1,10 +1,5 @@
|
|||
<context-app
|
||||
anchor-id="contextAppRoute.anchorId"
|
||||
columns="contextAppRoute.state.columns"
|
||||
<context-app-legacy
|
||||
index-pattern="contextAppRoute.indexPattern"
|
||||
app-state="contextAppRoute.state"
|
||||
state-container="contextAppRoute.stateContainer"
|
||||
filters="contextAppRoute.filters"
|
||||
predecessor-count="contextAppRoute.state.predecessorCount"
|
||||
successor-count="contextAppRoute.state.successorCount"
|
||||
sort="contextAppRoute.state.sort"></context-app>
|
||||
index-pattern-id="contextAppRoute.indexPatternId"
|
||||
anchor-id="contextAppRoute.anchorId">
|
||||
</context-app-legacy>
|
||||
|
|
|
@ -6,12 +6,8 @@
|
|||
* Side Public License, v 1.
|
||||
*/
|
||||
|
||||
import _ from 'lodash';
|
||||
import { i18n } from '@kbn/i18n';
|
||||
import { CONTEXT_DEFAULT_SIZE_SETTING } from '../../../common';
|
||||
import { getAngularModule, getServices } from '../../kibana_services';
|
||||
import './context_app';
|
||||
import { getState } from './context_state';
|
||||
import contextAppRouteTemplate from './context.html';
|
||||
import { getRootBreadcrumbs } from '../helpers/breadcrumbs';
|
||||
|
||||
|
@ -28,9 +24,14 @@ const k7Breadcrumbs = () => {
|
|||
|
||||
getAngularModule().config(($routeProvider) => {
|
||||
$routeProvider.when('/context/:indexPatternId/:id*', {
|
||||
controller: ContextAppRouteController,
|
||||
controller: function ($routeParams, $scope, $route) {
|
||||
this.indexPattern = $route.current.locals.indexPattern.ip;
|
||||
this.anchorId = $routeParams.id;
|
||||
this.indexPatternId = $route.current.params.indexPatternId;
|
||||
},
|
||||
k7Breadcrumbs,
|
||||
controllerAs: 'contextAppRoute',
|
||||
reloadOnSearch: false,
|
||||
resolve: {
|
||||
indexPattern: ($route, Promise) => {
|
||||
const indexPattern = getServices().indexPatterns.get($route.current.params.indexPatternId);
|
||||
|
@ -40,57 +41,3 @@ getAngularModule().config(($routeProvider) => {
|
|||
template: contextAppRouteTemplate,
|
||||
});
|
||||
});
|
||||
|
||||
function ContextAppRouteController($routeParams, $scope, $route) {
|
||||
const filterManager = getServices().filterManager;
|
||||
const indexPattern = $route.current.locals.indexPattern.ip;
|
||||
const stateContainer = getState({
|
||||
defaultStepSize: getServices().uiSettings.get(CONTEXT_DEFAULT_SIZE_SETTING),
|
||||
timeFieldName: indexPattern.timeFieldName,
|
||||
storeInSessionStorage: getServices().uiSettings.get('state:storeInSessionStorage'),
|
||||
history: getServices().history(),
|
||||
toasts: getServices().core.notifications.toasts,
|
||||
uiSettings: getServices().core.uiSettings,
|
||||
});
|
||||
const {
|
||||
startSync: startStateSync,
|
||||
stopSync: stopStateSync,
|
||||
appState,
|
||||
getFilters,
|
||||
setFilters,
|
||||
setAppState,
|
||||
flushToUrl,
|
||||
} = stateContainer;
|
||||
this.stateContainer = stateContainer;
|
||||
this.state = { ...appState.getState() };
|
||||
this.anchorId = $routeParams.id;
|
||||
this.indexPattern = indexPattern;
|
||||
filterManager.setFilters(_.cloneDeep(getFilters()));
|
||||
startStateSync();
|
||||
|
||||
// take care of parameter changes in UI
|
||||
$scope.$watchGroup(
|
||||
[
|
||||
'contextAppRoute.state.columns',
|
||||
'contextAppRoute.state.predecessorCount',
|
||||
'contextAppRoute.state.successorCount',
|
||||
],
|
||||
(newValues) => {
|
||||
const [columns, predecessorCount, successorCount] = newValues;
|
||||
if (Array.isArray(columns) && predecessorCount >= 0 && successorCount >= 0) {
|
||||
setAppState({ columns, predecessorCount, successorCount });
|
||||
flushToUrl(true);
|
||||
}
|
||||
}
|
||||
);
|
||||
// take care of parameter filter changes
|
||||
const filterObservable = filterManager.getUpdates$().subscribe(() => {
|
||||
setFilters(filterManager);
|
||||
$route.reload();
|
||||
});
|
||||
|
||||
$scope.$on('$destroy', () => {
|
||||
stopStateSync();
|
||||
filterObservable.unsubscribe();
|
||||
});
|
||||
}
|
||||
|
|
|
@ -9,7 +9,7 @@
|
|||
import moment from 'moment';
|
||||
import { get, last } from 'lodash';
|
||||
import { createIndexPatternsStub, createContextSearchSourceStub } from './_stubs';
|
||||
import { EsHitRecordList, fetchContextProvider } from './context';
|
||||
import { EsHitRecordList, fetchContextProvider, SurrDocType } from './context';
|
||||
import { setServices, SortDirection } from '../../../../kibana_services';
|
||||
import { EsHitRecord } from './context';
|
||||
import { Query } from '../../../../../../data/public';
|
||||
|
@ -73,7 +73,7 @@ describe('context app', function () {
|
|||
};
|
||||
|
||||
return fetchContextProvider(createIndexPatternsStub()).fetchSurroundingDocs(
|
||||
'predecessors',
|
||||
SurrDocType.PREDECESSORS,
|
||||
indexPatternId,
|
||||
anchor as EsHitRecord,
|
||||
timeField,
|
||||
|
@ -265,7 +265,7 @@ describe('context app', function () {
|
|||
};
|
||||
|
||||
return fetchContextProvider(createIndexPatternsStub(), true).fetchSurroundingDocs(
|
||||
'predecessors',
|
||||
SurrDocType.PREDECESSORS,
|
||||
indexPatternId,
|
||||
anchor as EsHitRecord,
|
||||
timeField,
|
||||
|
|
Some files were not shown because too many files have changed in this diff Show more
Loading…
Reference in a new issue