Revert "Functional test setup with kbn-test package (#18568)"

This reverts commit c2265e3931.
This commit is contained in:
spalger 2018-05-15 13:16:36 -07:00
parent 05e4db527d
commit 7df7ab92d6
65 changed files with 1587 additions and 3451 deletions

View file

@ -26,7 +26,6 @@ module.exports = {
'packages/kbn-es/**/*',
'packages/kbn-datemath/**/*.js',
'packages/kbn-plugin-generator/**/*',
'packages/kbn-test/**/*',
'packages/kbn-eslint-import-resolver-kibana/**/*',
'x-pack/plugins/apm/**/*',
],

View file

@ -355,7 +355,6 @@ yarn test:browser --dev # remove the --dev flag to run them once and close
[Read about the `FunctionalTestRunner`](https://www.elastic.co/guide/en/kibana/current/development-functional-tests.html) to learn more about how you can run and develop functional tests for Kibana core and plugins.
You can also look into the [Scripts README.md](./scripts/README.md) to learn more about using the node scripts we provide for building Kibana, running integration tests, and starting up Kibana and Elasticsearch while you develop.
### Building OS packages

View file

@ -224,7 +224,6 @@
"@kbn/eslint-import-resolver-kibana": "link:packages/kbn-eslint-import-resolver-kibana",
"@kbn/eslint-plugin-license-header": "link:packages/kbn-eslint-plugin-license-header",
"@kbn/plugin-generator": "link:packages/kbn-plugin-generator",
"@kbn/test": "link:packages/kbn-test",
"angular-mocks": "1.4.7",
"babel-eslint": "8.1.2",
"babel-jest": "^22.4.3",

View file

@ -78,7 +78,7 @@ export function createProc(name, { cmd, args, cwd, env, stdin, log }) {
.map(code => {
// JVM exits with 143 on SIGTERM and 130 on SIGINT, dont' treat then as errors
if (code > 0 && !(code === 143 || code === 130)) {
throw createCliError(`[${name}] exited with code ${code}`);
throw createCliError(`[${name}] exitted with code ${code}`);
}
return code;

View file

@ -76,9 +76,7 @@ export class ProcRunner {
.first()
.catch(err => {
if (err.name !== 'EmptyError') {
throw createCliError(
`[${name}] exited without matching pattern: ${wait}`
);
throw createCliError(`[${name}] exitted without matching pattern: ${wait}`);
} else {
throw err;
}
@ -173,12 +171,7 @@ export class ProcRunner {
proc.outcome$.subscribe({
next: (code) => {
const duration = moment.duration(Date.now() - startMs);
this._log.info(
'[%s] exited with %s after %s',
name,
code,
duration.humanize()
);
this._log.info('[%s] exitted with %s after %s', name, code, duration.humanize());
},
complete: () => {
remove();

View file

@ -1,3 +0,0 @@
{
"presets": ["@kbn/babel-preset/node_preset"]
}

View file

@ -1,39 +0,0 @@
Kibana Testing Library
======================
The @kbn/test package provides ways to run tests. Currently only functional testing is provided by this library, with unit and other testing possibly added here.
Functional Testing
-------------------
### Dependencies
Functional testing methods exist in the `src/functional_tests` directory. They depend on the Functional Test Runner, which is found in [`{KIBANA_ROOT}/src/functional_test_runner`](../../src/functional_test_runner). Ideally libraries provided by kibana packages such as this one should not depend on kibana source code that lives in [`{KIBANA_ROOT}/src`](../../src). The goal is to start pulling test and development utilities out into packages so they can be used across Kibana and plugins. Accordingly the Functional Test Runner itself will be pulled out into a package (or part of a package), and this package's dependence on it will not be an issue.
### Exposed methods
#### runTests(configPaths: Array<string>)
For each config file specified in configPaths, starts Elasticsearch and Kibana once, runs tests specified in that config file, and shuts down Elasticsearch and Kibana once completed. (Repeats for every config file.)
`configPaths`: array of strings, each an absolute path to a config file that looks like [this](../../test/functional/config.js), following the config schema specified [here](../../src/functional_test_runner/lib/config/schema.js).
Internally the method that starts Elasticsearch comes from [kbn-es](../../packages/kbn-es).
#### startServers(configPath: string)
Starts Elasticsearch and Kibana servers given a specified config.
`configPath`: absolute path to a config file that looks like [this](../../test/functional/config.js), following the config schema specified [here](../../src/functional_test_runner/lib/config/schema.js).
Allows users to start another process to run just the tests while keeping the servers running with this method. Start servers _and_ run tests using the same config file ([see how](../../scripts/README.md)).
## Rationale
### Single config per setup
We think it makes sense to specify the tests to run along with the particular server configuration for Elasticsearch and Kibana servers, because the tests expect a particular configuration. For example, saml api integration tests expect certain xml files to exist in Elasticsearch's config directory, and certain saml specific options to be passed in via the command line (or alternatively via the `.yml` config file) to both Elasticsearch and Kibana. It makes sense to keep all these config options together with the list of test files.
### Multiple configs running in succession
We also think it makes sense to have a test runner intelligently (but simply) start servers, run tests, tear down servers, and repeat for each config, uninterrupted. There's nothing special about each kind of config that specifies running some set of functional tests against some kind of Elasticsearch/Kibana servers. There doesn't need to be a separate job to run each kind of setup/test/teardown. These can all be orchestrated sequentially via the current `runTests` implementation. This is how we envision tests to run on CI.
This inherently means that grouping test files in configs matters, such that a group of test files that depends on a particular server config appears together in that config's `testFiles` list. Given how quickly and easily we can start servers using [@kbn/es](../../packages/kbn-es), it should not impact performance to logically group tests by domain even if multiple groups of tests share the same server config. We can think about how to group test files together across domains when that time comes.

View file

@ -1,27 +0,0 @@
{
"name": "@kbn/test",
"main": "./target/index.js",
"version": "1.0.0",
"license": "Apache-2.0",
"private": true,
"scripts": {
"build": "babel src --out-dir target",
"kbn:bootstrap": "yarn build",
"kbn:watch": "yarn build --watch"
},
"devDependencies": {
"@kbn/babel-preset": "link:../kbn-babel-preset",
"@kbn/dev-utils": "link:../kbn-dev-utils",
"babel-cli": "^6.26.0"
},
"dependencies": {
"chalk": "^2.4.1",
"dedent": "^0.7.0",
"getopts": "^2.0.6",
"glob": "^7.1.2",
"rxjs": "^5.4.3",
"tar-fs": "^1.16.2",
"tmp": "^0.0.33",
"zlib": "^1.0.5"
}
}

View file

@ -1,2 +0,0 @@
export { createEsTestCluster } from './es_test_cluster.js';
export { esTestConfig } from './es_test_config';

View file

@ -1,2 +0,0 @@
export { runTestsCli } from './run_tests_cli';
export { startServersCli } from './start_servers_cli';

View file

@ -1,78 +0,0 @@
import dedent from 'dedent';
import getopts from 'getopts';
import { createToolingLog, pickLevelFromFlags } from '@kbn/dev-utils';
import { runTests } from '../../';
/**
* Run servers and tests for each config
* Only cares about --config option. Other options
* are passed directly to functional_test_runner, such as
* --bail, --verbose, etc.
* @param {string[]} defaultConfigPaths Array of paths to configs to use
* if no config option is passed
*/
export async function runTestsCli(defaultConfigPaths) {
const { configs, help, bail, log } = processArgs(defaultConfigPaths);
if (help) return displayHelp();
if (!configs || configs.length === 0) {
log.error(
`Run Tests requires at least one path to a config. Leave blank to use defaults.`
);
process.exit(9);
}
try {
await runTests(configs, { bail, log });
} catch (err) {
log.error('FATAL ERROR');
log.error(err);
process.exit(1);
}
}
function processArgs(defaultConfigPaths) {
// If no args are passed, use {}
const options = getopts(process.argv.slice(2)) || {};
// If --config is passed without paths, it's "true", so use default
const configs =
typeof options.config === 'string' || Array.isArray(options.config)
? [].concat(options.config)
: defaultConfigPaths;
const log = createToolingLog(pickLevelFromFlags(options));
log.pipe(process.stdout);
return {
configs,
log,
help: options.help,
bail: options.bail,
rest: options._,
};
}
function displayHelp() {
console.log(
dedent(`
Run Functional Tests
Usage: node scripts/functional_tests [options]
--config Option to pass in a config
Can pass in multiple configs with
--config file1 --config file2 --config file3
--bail Stop the test run at the first failure
--help Display this menu and exit
Log level options:
--verbose
--debug
--quiet Log errors
--silent
`)
);
}

View file

@ -1,76 +0,0 @@
import chalk from 'chalk';
import dedent from 'dedent';
import getopts from 'getopts';
import { createToolingLog, pickLevelFromFlags } from '@kbn/dev-utils';
import { startServers } from '../../';
/**
* Start servers
* @param {string} configPath path to config
*/
export async function startServersCli(defaultConfigPath) {
const { config, log, help } = processArgv(defaultConfigPath);
if (help) return displayHelp();
if (!config) {
log.error(
`Start Servers requires one path to a config. Leave blank to use default.`
);
process.exit(1);
}
try {
await startServers(config, { log });
} catch (err) {
log.error('FATAL ERROR');
log.error(err);
process.exit(1);
}
}
function processArgv(defaultConfigPath) {
const options = getopts(process.argv.slice(2)) || {};
if (Array.isArray(options.config)) {
console.log(
chalk.red(
`Starting servers requires a single config path. Multiple were passed.`
)
);
process.exit(9);
}
const config =
typeof options.config === 'string' ? options.config : defaultConfigPath;
const log = createToolingLog(pickLevelFromFlags(options));
log.pipe(process.stdout);
return {
config,
log,
help: options.help,
rest: options._,
};
}
function displayHelp() {
console.log(
dedent(`
Start Functional Test Servers
Usage: node scripts/functional_tests_server [options]
--config Option to pass in a config
--help Display this menu and exit
Log level options:
--verbose
--debug
--quiet Log errors
--silent
`)
);
}

View file

@ -1,9 +0,0 @@
export { runKibanaServer } from './run_kibana_server';
export { runElasticsearch } from './run_elasticsearch';
export { runFtr } from './run_ftr';
export {
KIBANA_ROOT,
KIBANA_FTR_SCRIPT,
FUNCTIONAL_CONFIG_PATH,
API_CONFIG_PATH,
} from './paths';

View file

@ -1,25 +0,0 @@
import { resolve, relative } from 'path';
// resolve() treats relative paths as relative to process.cwd(),
// so to return a relative path we use relative()
function resolveRelative(path) {
return relative(process.cwd(), resolve(path));
}
export const KIBANA_EXEC = 'node';
export const KIBANA_EXEC_PATH = resolveRelative('scripts/kibana');
export const KIBANA_ROOT = resolve(__dirname, '../../../../../');
export const KIBANA_FTR_SCRIPT = resolve(
KIBANA_ROOT,
'scripts/functional_test_runner'
);
export const PROJECT_ROOT = resolve(__dirname, '../../../../../../');
export const FUNCTIONAL_CONFIG_PATH = resolve(
KIBANA_ROOT,
'test/functional/config'
);
export const API_CONFIG_PATH = resolve(
KIBANA_ROOT,
'test/api_integration/config'
);
export const OPTIMIZE_BUNDLE_DIR = resolve(KIBANA_ROOT, 'optimize/bundles');

View file

@ -1,30 +0,0 @@
import { resolve } from 'path';
import { KIBANA_ROOT } from './paths';
import { createEsTestCluster } from '../../es';
import { setupUsers, DEFAULT_SUPERUSER_PASS } from './auth';
export async function runElasticsearch({ config, log }) {
const isOss = config.get('esTestCluster.license') === 'oss';
const cluster = createEsTestCluster({
port: config.get('servers.elasticsearch.port'),
password: !isOss
? DEFAULT_SUPERUSER_PASS
: config.get('servers.elasticsearch.password'),
license: config.get('esTestCluster.license'),
log,
basePath: resolve(KIBANA_ROOT, '.es'),
from: config.get('esTestCluster.from'),
});
const esArgs = config.get('esTestCluster.serverArgs');
await cluster.start(esArgs);
if (!isOss) {
await setupUsers(log, config);
}
return cluster;
}

View file

@ -1,29 +0,0 @@
import { KIBANA_FTR_SCRIPT, PROJECT_ROOT } from './paths';
export async function runFtr({
procs,
configPath,
bail,
log,
cwd = PROJECT_ROOT,
}) {
const args = [KIBANA_FTR_SCRIPT];
if (getLogFlag(log)) args.push(`--${getLogFlag(log)}`);
if (bail) args.push('--bail');
if (configPath) args.push('--config', configPath);
await procs.run('ftr', {
cmd: 'node',
args,
cwd,
wait: true,
});
}
function getLogFlag(log) {
const level = log.getLevel();
if (level === 'info') return null;
return level === 'error' ? 'quiet' : level;
}

View file

@ -1,17 +0,0 @@
import { KIBANA_ROOT, KIBANA_EXEC, KIBANA_EXEC_PATH } from './paths';
export async function runKibanaServer({ procs, config }) {
const cliArgs = config.get('kibanaServerArgs') || [];
// start the kibana server and wait for it to log "Server running" before resolving
await procs.run('kibana', {
cmd: KIBANA_EXEC,
args: [KIBANA_EXEC_PATH, ...cliArgs],
env: {
FORCE_COLOR: 1,
...process.env,
},
cwd: KIBANA_ROOT,
wait: /Server running/,
});
}

View file

@ -1,90 +0,0 @@
import { relative, resolve } from 'path';
import Rx from 'rxjs/Rx';
import { withProcRunner } from '@kbn/dev-utils';
import {
runElasticsearch,
runKibanaServer,
runFtr,
KIBANA_FTR_SCRIPT,
} from './lib';
import { readConfigFile } from '../../../../src/functional_test_runner/lib';
const SUCCESS_MESSAGE = `
Elasticsearch and Kibana are ready for functional testing. Start the functional tests
in another terminal session by running this command from this directory:
node ${relative(process.cwd(), KIBANA_FTR_SCRIPT)}
`;
/**
* Run servers and tests for each config
* @param {string[]} configPaths Array of paths to configs
* @param {boolean} bail Whether to exit test run at the first failure
* @param {Log} log Optional logger
*/
export async function runTests(configPaths, { bail, log }) {
for (const configPath of configPaths) {
await runSingleConfig(resolve(process.cwd(), configPath), { bail, log });
}
}
/**
* Start only servers using single config
* @param {string} configPath Path to a config file
* @param {Log} log Optional logger
*/
export async function startServers(configPath, { log }) {
configPath = resolve(process.cwd(), configPath);
await withProcRunner(log, async procs => {
const config = await readConfigFile(log, configPath);
const es = await runElasticsearch({ config, log });
await runKibanaServer({ procs, config, log });
// wait for 5 seconds of silence before logging the
// success message so that it doesn't get buried
await silence(5000, { log });
log.info(SUCCESS_MESSAGE);
await procs.waitForAllToStop();
await es.cleanup();
});
}
async function silence(milliseconds, { log }) {
await Rx.Observable.fromEvent(log, 'data')
.startWith(null)
.switchMap(() => Rx.Observable.timer(milliseconds))
.take(1)
.toPromise();
}
/*
* Start servers and run tests for single config
*/
async function runSingleConfig(configPath, { bail, log }) {
await withProcRunner(log, async procs => {
const config = await readConfigFile(log, configPath);
const es = await runElasticsearch({ config, log });
await runKibanaServer({ procs, config });
// Note: When solving how to incorporate functional_test_runner
// clean this up
await runFtr({
procs,
configPath,
bail,
log,
cwd: process.cwd(),
});
await procs.stop('kibana');
await es.cleanup();
});
}

View file

@ -1,14 +0,0 @@
export { runTestsCli, startServersCli } from './functional_tests/cli';
export { runTests, startServers } from './functional_tests/tasks';
export { OPTIMIZE_BUNDLE_DIR, KIBANA_ROOT } from './functional_tests/lib/paths';
export { esTestConfig, createEsTestCluster } from './es';
export {
kbnTestConfig,
kibanaServerTestUser,
kibanaTestUser,
adminTestUser,
} from './kbn';

View file

@ -1,2 +0,0 @@
export { kbnTestConfig } from './kbn_test_config';
export { kibanaTestUser, kibanaServerTestUser, adminTestUser } from './users';

View file

@ -1,36 +0,0 @@
import { kibanaTestUser } from './users';
import url from 'url';
export const kbnTestConfig = new class KbnTestConfig {
getPort() {
return this.getUrlParts().port;
}
getUrlParts() {
// allow setting one complete TEST_KIBANA_URL for ES like https://elastic:changeme@example.com:9200
if (process.env.TEST_KIBANA_URL) {
const testKibanaUrl = url.parse(process.env.TEST_KIBANA_URL);
return {
protocol: testKibanaUrl.protocol.slice(0, -1),
hostname: testKibanaUrl.hostname,
port: parseInt(testKibanaUrl.port, 10),
auth: testKibanaUrl.auth,
username: testKibanaUrl.auth.split(':')[0],
password: testKibanaUrl.auth.split(':')[1],
};
}
const username =
process.env.TEST_KIBANA_USERNAME || kibanaTestUser.username;
const password =
process.env.TEST_KIBANA_PASSWORD || kibanaTestUser.password;
return {
protocol: process.env.TEST_KIBANA_PROTOCOL || 'http',
hostname: process.env.TEST_KIBANA_HOSTNAME || 'localhost',
port: parseInt(process.env.TEST_KIBANA_PORT, 10) || 5620,
auth: `${username}:${password}`,
username,
password,
};
}
}();

View file

@ -1,16 +0,0 @@
const env = process.env;
export const kibanaTestUser = {
username: env.TEST_KIBANA_USER || 'elastic',
password: env.TEST_KIBANA_PASS || 'changeme',
};
export const kibanaServerTestUser = {
username: env.TEST_KIBANA_SERVER_USER || 'kibana',
password: env.TEST_KIBANA_SERVER_PASS || 'changeme',
};
export const adminTestUser = {
username: env.TEST_ES_USER || 'elastic',
password: env.TEST_ES_PASS || 'changeme',
};

File diff suppressed because it is too large Load diff

View file

@ -1,4 +1,4 @@
# Kibana Dev Scripts
# kibana dev scripts
This directory contains scripts useful for interacting with Kibana tools in development. Use the node executable and `--help` flag to learn about how they work:
@ -6,37 +6,11 @@ This directory contains scripts useful for interacting with Kibana tools in deve
node scripts/{{script name}} --help
```
## For Developers
## for developers
This directory is excluded from the build and tools within it should help users discover their capabilities. Each script in this directory must:
- require `src/babel-register` to bootstrap babel
- call out to source code in the [`src`](../src) or [`packages`](../packages) directories
- call out to source code that is in the `src` directory
- react to the `--help` flag
- run everywhere OR check and fail fast when a required OS or toolchain is not available
## Functional Test Scripts
**`node scripts/functional_tests [--config test/functional/config.js --config test/api_integration/config.js]`**
Runs all the functional tests: selenium tests and api integration tests. List configs with multiple `--config` arguments. Uses the [@kbn/test](../packages/kbn-test) library to run Elasticsearch and Kibana servers and tests against those servers, for multiple server+test setups. In particular, calls out to [`runTests()`](../packages/kbn-test/src/functional_tests/tasks.js). Can be run on a single config.
**`node scripts/functional_tests_server [--config test/functional/config.js]`**
Starts just the Elasticsearch and Kibana servers given a single config, i.e. via `--config test/functional/config.js` or `--config test/api_integration/config`. Allows the user to start just the servers with this script, and keep them running while running tests against these servers. The idea is that the same config file configures both Elasticsearch and Kibana servers. Uses the [`startServers()`](../packages/kbn-test/src/functional_tests/tasks.js#L52-L80) method from [@kbn/test](../packages/kbn-test) library.
Example. Start servers _and_ run tests, separately, but using the same config:
```sh
# Just the servers
node scripts/functional_tests_server --config path/to/config
```
In another terminal:
```sh
# Just the tests--against the running servers
node scripts/functional_test_runner --config path/to/config
```
For details on how the internal methods work, [read this readme](../packages/kbn-test/README.md).
- run everywhere OR check and fail fast when a required OS or toolchain is not available

View file

@ -1,5 +0,0 @@
require('../src/babel-register');
require('../packages/kbn-test').runTestsCli([
require.resolve('../test/functional/config.js'),
require.resolve('../test/api_integration/config.js'),
]);

View file

@ -1,4 +0,0 @@
require('../src/babel-register');
require('../packages/kbn-test').startServersCli(
require.resolve('../test/functional/config.js'),
);

View file

@ -3,7 +3,7 @@ import Promise from 'bluebird';
import sinon from 'sinon';
import expect from 'expect.js';
import { esTestConfig } from '@kbn/test';
import { esTestConfig } from '../../../../test_utils/es';
import { ensureEsVersion } from '../ensure_es_version';
describe('plugins/elasticsearch', () => {

View file

@ -7,7 +7,7 @@ const NoConnections = require('elasticsearch').errors.NoConnections;
import mappings from './fixtures/mappings';
import healthCheck from '../health_check';
import kibanaVersion from '../kibana_version';
import { esTestConfig } from '@kbn/test';
import { esTestConfig } from '../../../../test_utils/es';
import * as patchKibanaIndexNS from '../patch_kibana_index';
const esPort = esTestConfig.getPort();

View file

@ -88,15 +88,6 @@ export const schema = Joi.object().keys({
elasticsearch: urlPartsSchema(),
}).default(),
esTestCluster: Joi.object().keys({
license: Joi.string().default('oss'),
from: Joi.string().default('snapshot'),
serverArgs: Joi.array(),
}).default(),
kibanaServerArgs: Joi.array(),
// env allows generic data, but should be removed
env: Joi.object().default(),
chromedriver: Joi.object().keys({

View file

@ -1,32 +1,27 @@
import { resolve } from 'path';
import { format } from 'url';
import { get } from 'lodash';
import { format } from 'url';
import elasticsearch from 'elasticsearch';
import toPath from 'lodash/internal/toPath';
import { Cluster } from '@kbn/es';
import { esTestConfig } from './es_test_config';
import { rmrfSync } from './rmrf_sync';
import { KIBANA_ROOT } from '../';
import elasticsearch from 'elasticsearch';
export function createEsTestCluster(options = {}) {
export function createTestCluster(options = {}) {
const {
port = esTestConfig.getPort(),
password = 'changeme',
license = 'oss',
log,
basePath = resolve(KIBANA_ROOT, '.es'),
// Use source when running on CI
from = esTestConfig.getBuildFrom(),
basePath = resolve(__dirname, '../../../.es'),
} = options;
const randomHash = Math.random()
.toString(36)
.substring(2);
const randomHash = Math.random().toString(36).substring(2);
const clusterName = `test-${randomHash}`;
const config = {
version: esTestConfig.getVersion(),
installPath: resolve(basePath, clusterName),
sourcePath: resolve(KIBANA_ROOT, '../elasticsearch'),
sourcePath: resolve(__dirname, '../../../../elasticsearch'),
password,
license,
basePath,
@ -34,6 +29,9 @@ export function createEsTestCluster(options = {}) {
const cluster = new Cluster(log);
// Use source when running on CI
const from = options.from || esTestConfig.getBuildFrom();
return new class EsTestCluster {
getStartTimeout() {
const second = 1000;
@ -53,20 +51,18 @@ export function createEsTestCluster(options = {}) {
`cluster.name=${clusterName}`,
`http.port=${port}`,
`discovery.zen.ping.unicast.hosts=localhost:${port}`,
...esArgs,
...esArgs
],
});
}
async stop() {
await cluster.stop();
log.info('[es] stopped');
}
async cleanup() {
await this.stop();
rmrfSync(config.installPath);
log.info('[es] cleanup complete');
}
/**
@ -88,7 +84,7 @@ export function createEsTestCluster(options = {}) {
return format(parts);
}
}();
};
}
/**

View file

@ -1,6 +1,6 @@
import url, { format as formatUrl } from 'url';
import pkg from '../../../../package.json';
import { adminTestUser } from '../kbn';
import url, { format as formatUrl } from 'url';
import pkg from '../../../package.json';
import { admin } from '../../../test/shield';
export const esTestConfig = new class EsTestConfig {
getVersion() {
@ -30,21 +30,22 @@ export const esTestConfig = new class EsTestConfig {
port: parseInt(testEsUrl.port, 10),
username: testEsUrl.auth.split(':')[0],
password: testEsUrl.auth.split(':')[1],
auth: testEsUrl.auth,
auth: testEsUrl.auth
};
}
const username = process.env.TEST_KIBANA_USERNAME || adminTestUser.username;
const password = process.env.TEST_KIBANA_PASSWORD || adminTestUser.password;
const username = process.env.TEST_KIBANA_USERNAME || admin.username;
const password = process.env.TEST_KIBANA_PASSWORD || admin.password;
return {
// Allow setting any individual component(s) of the URL,
// or use default values (username and password from ../kbn/users.js)
// or use default values (username and password from shield.js)
protocol: process.env.TEST_ES_PROTOCOL || 'http',
hostname: process.env.TEST_ES_HOSTNAME || 'localhost',
port: parseInt(process.env.TEST_ES_PORT, 10) || 9220,
auth: username + ':' + password,
auth: `${username}:${password}`,
username: username,
password: password,
};
}
}();
};

View file

@ -0,0 +1,2 @@
export { esTestConfig } from './es_test_config';
export { createTestCluster } from './es_test_cluster';

View file

@ -18,4 +18,4 @@ export function rmrfSync(path) {
});
fs.rmdirSync(path);
}
}
}

View file

@ -9,7 +9,7 @@ import url from 'url';
* protocol: 'http',
* hostname: 'localhost',
* port: 9220,
* auth: kibanaTestUser.username + ':' + kibanaTestUser.password
* auth: shield.kibanaUser.username + ':' + shield.kibanaUser.password
* }
* @param {object} app The params to append
* example:

View file

@ -1,7 +1,8 @@
import { resolve } from 'path';
import { defaultsDeep, set } from 'lodash';
import { header as basicAuthHeader } from './base_auth';
import { esTestConfig, kibanaTestUser, kibanaServerTestUser } from '@kbn/test';
import { kibanaUser, kibanaServer } from '../../test/shield';
import { esTestConfig } from '../test_utils/es';
import KbnServer from '../../src/server/kbn_server';
const DEFAULTS_SETTINGS = {
@ -28,8 +29,8 @@ const DEFAULT_SETTINGS_WITH_CORE_PLUGINS = {
},
elasticsearch: {
url: esTestConfig.getUrl(),
username: kibanaServerTestUser.username,
password: kibanaServerTestUser.password
username: kibanaServer.username,
password: kibanaServer.password
}
};
@ -59,7 +60,7 @@ export function createServerWithCorePlugins(settings = {}) {
* Creates request configuration with a basic auth header
*/
export function authOptions() {
const { username, password } = kibanaTestUser;
const { username, password } = kibanaUser;
const authHeader = basicAuthHeader(username, password);
return set({}, 'headers.Authorization', authHeader);
}

View file

@ -1,7 +1,7 @@
import sinon from 'sinon';
import expect from 'expect.js';
import { createEsTestCluster } from '@kbn/test';
import { createTestCluster } from '../../../../test_utils/es';
import { createServerWithCorePlugins } from '../../../../test_utils/kbn_server';
import { createToolingLog } from '../../../../dev';
import { createOrUpgradeSavedConfig } from '../create_or_upgrade_saved_config';
@ -19,7 +19,7 @@ describe('createOrUpgradeSavedConfig()', () => {
log.info('starting elasticsearch');
log.indent(4);
const es = createEsTestCluster({ log });
const es = createTestCluster({ log });
this.timeout(es.getStartTimeout());
log.indent(-4);
@ -47,7 +47,7 @@ describe('createOrUpgradeSavedConfig()', () => {
type: 'config',
attributes: {
buildNum: 54090,
'5.4.0-SNAPSHOT': true,
'5.4.0-SNAPSHOT': true
},
},
{
@ -55,7 +55,7 @@ describe('createOrUpgradeSavedConfig()', () => {
type: 'config',
attributes: {
buildNum: 54010,
'5.4.0-rc1': true,
'5.4.0-rc1': true
},
},
{
@ -63,7 +63,7 @@ describe('createOrUpgradeSavedConfig()', () => {
type: 'config',
attributes: {
buildNum: 99999,
'@@version': true,
'@@version': true
},
},
]);
@ -83,20 +83,18 @@ describe('createOrUpgradeSavedConfig()', () => {
savedObjectsClient,
version: '5.4.0',
buildNum: 54099,
log: sinon.stub(),
log: sinon.stub()
});
const config540 = await savedObjectsClient.get('config', '5.4.0');
expect(config540)
.to.have.property('attributes')
.eql({
// should have the new build number
buildNum: 54099,
expect(config540).to.have.property('attributes').eql({
// should have the new build number
buildNum: 54099,
// 5.4.0-SNAPSHOT and @@version were ignored so we only have the
// attributes from 5.4.0-rc1, even though the other build nums are greater
'5.4.0-rc1': true,
});
// 5.4.0-SNAPSHOT and @@version were ignored so we only have the
// attributes from 5.4.0-rc1, even though the other build nums are greater
'5.4.0-rc1': true,
});
// add the 5.4.0 flag to the 5.4.0 savedConfig
await savedObjectsClient.update('config', '5.4.0', {
@ -109,20 +107,18 @@ describe('createOrUpgradeSavedConfig()', () => {
savedObjectsClient,
version: '5.4.1',
buildNum: 54199,
log: sinon.stub(),
log: sinon.stub()
});
const config541 = await savedObjectsClient.get('config', '5.4.1');
expect(config541)
.to.have.property('attributes')
.eql({
// should have the new build number
buildNum: 54199,
expect(config541).to.have.property('attributes').eql({
// should have the new build number
buildNum: 54199,
// should also include properties from 5.4.0 and 5.4.0-rc1
'5.4.0': true,
'5.4.0-rc1': true,
});
// should also include properties from 5.4.0 and 5.4.0-rc1
'5.4.0': true,
'5.4.0-rc1': true,
});
// add the 5.4.1 flag to the 5.4.1 savedConfig
await savedObjectsClient.update('config', '5.4.1', {
@ -135,21 +131,19 @@ describe('createOrUpgradeSavedConfig()', () => {
savedObjectsClient,
version: '7.0.0-rc1',
buildNum: 70010,
log: sinon.stub(),
log: sinon.stub()
});
const config700rc1 = await savedObjectsClient.get('config', '7.0.0-rc1');
expect(config700rc1)
.to.have.property('attributes')
.eql({
// should have the new build number
buildNum: 70010,
expect(config700rc1).to.have.property('attributes').eql({
// should have the new build number
buildNum: 70010,
// should also include properties from 5.4.1, 5.4.0 and 5.4.0-rc1
'5.4.1': true,
'5.4.0': true,
'5.4.0-rc1': true,
});
// should also include properties from 5.4.1, 5.4.0 and 5.4.0-rc1
'5.4.1': true,
'5.4.0': true,
'5.4.0-rc1': true,
});
// tag the 7.0.0-rc1 doc
await savedObjectsClient.update('config', '7.0.0-rc1', {
@ -162,22 +156,20 @@ describe('createOrUpgradeSavedConfig()', () => {
savedObjectsClient,
version: '7.0.0',
buildNum: 70099,
log: sinon.stub(),
log: sinon.stub()
});
const config700 = await savedObjectsClient.get('config', '7.0.0');
expect(config700)
.to.have.property('attributes')
.eql({
// should have the new build number
buildNum: 70099,
expect(config700).to.have.property('attributes').eql({
// should have the new build number
buildNum: 70099,
// should also include properties from ancestors, including 7.0.0-rc1
'7.0.0-rc1': true,
'5.4.1': true,
'5.4.0': true,
'5.4.0-rc1': true,
});
// should also include properties from ancestors, including 7.0.0-rc1
'7.0.0-rc1': true,
'5.4.1': true,
'5.4.0': true,
'5.4.0-rc1': true,
});
// tag the 7.0.0 doc
await savedObjectsClient.update('config', '7.0.0', {
@ -190,20 +182,18 @@ describe('createOrUpgradeSavedConfig()', () => {
savedObjectsClient,
version: '6.2.3-rc1',
buildNum: 62310,
log: sinon.stub(),
log: sinon.stub()
});
const config623rc1 = await savedObjectsClient.get('config', '6.2.3-rc1');
expect(config623rc1)
.to.have.property('attributes')
.eql({
// should have the new build number
buildNum: 62310,
expect(config623rc1).to.have.property('attributes').eql({
// should have the new build number
buildNum: 62310,
// should also include properties from ancestors, but not 7.0.0-rc1 or 7.0.0
'5.4.1': true,
'5.4.0': true,
'5.4.0-rc1': true,
});
// should also include properties from ancestors, but not 7.0.0-rc1 or 7.0.0
'5.4.1': true,
'5.4.0': true,
'5.4.0-rc1': true,
});
});
});

View file

@ -1,5 +1,4 @@
import { createEsTestCluster } from '@kbn/test';
import { createToolingLog } from '@kbn/dev-utils';
import { createTestCluster } from '../../../../../test_utils/es';
import * as kbnTestServer from '../../../../../test_utils/kbn_server';
let kbnServer;
@ -7,17 +6,9 @@ let services;
let es;
export async function startServers() {
const log = createToolingLog('debug');
log.pipe(process.stdout);
log.indent(6);
log.info('starting elasticsearch');
log.indent(4);
es = createEsTestCluster({ log });
es = createTestCluster();
this.timeout(es.getStartTimeout());
log.indent(-4);
await es.start();
kbnServer = kbnTestServer.createServerWithCorePlugins();
@ -44,7 +35,7 @@ export function getServices() {
kbnServer,
callCluster,
savedObjectsClient,
uiSettings,
uiSettings
};
return services;

View file

@ -1,4 +1,5 @@
import { esTestConfig, kbnTestConfig } from '@kbn/test';
import { esTestConfig } from '../../src/test_utils/es';
import { kibanaTestServerUrlParts } from '../../test/kibana_test_server_url_parts';
import { resolve } from 'path';
const SECOND = 1000;
@ -46,14 +47,14 @@ module.exports = function (grunt) {
'--optimize.enabled=false',
'--elasticsearch.url=' + esTestConfig.getUrl(),
'--elasticsearch.healthCheck.delay=' + HOUR,
'--server.port=' + kbnTestConfig.getPort(),
'--server.port=' + kibanaTestServerUrlParts.port,
'--server.xsrf.disableProtection=true',
];
const funcTestServerFlags = [
'--server.maxPayloadBytes=1648576', //default is 1048576
'--elasticsearch.url=' + esTestConfig.getUrl(),
'--server.port=' + kbnTestConfig.getPort(),
'--server.port=' + kibanaTestServerUrlParts.port,
];
const browserTestServerFlags = [

View file

@ -6,7 +6,6 @@ import {
export default async function ({ readConfigFile }) {
const commonConfig = await readConfigFile(require.resolve('../common/config'));
const functionalConfig = await readConfigFile(require.resolve('../functional/config'));
return {
testFiles: [
@ -23,14 +22,6 @@ export default async function ({ readConfigFile }) {
servers: commonConfig.get('servers'),
junit: {
reportName: 'API Integration Tests'
},
env: commonConfig.get('env'),
esTestCluster: commonConfig.get('esTestCluster'),
kibanaServerArgs: [
...functionalConfig.get('kibanaServerArgs'),
'--optimize.enabled=false',
'--elasticsearch.healthCheck.delay=3600000',
'--server.xsrf.disableProtection=true',
],
}
};
}

View file

@ -1,5 +1,3 @@
import { format as formatUrl } from 'url';
import { OPTIMIZE_BUNDLE_DIR, esTestConfig, kbnTestConfig } from '@kbn/test';
import {
KibanaServerProvider,
EsProvider,
@ -7,37 +5,15 @@ import {
RetryProvider,
} from './services';
import { esTestConfig } from '../../src/test_utils/es';
import { kibanaTestServerUrlParts } from '../kibana_test_server_url_parts';
export default function () {
const servers = {
kibana: kbnTestConfig.getUrlParts(),
elasticsearch: esTestConfig.getUrlParts(),
};
return {
servers,
esTestCluster: {
license: 'oss',
from: 'snapshot',
serverArgs: [
],
servers: {
kibana: kibanaTestServerUrlParts,
elasticsearch: esTestConfig.getUrlParts(),
},
kibanaServerArgs: [
'--env=development',
'--logging.json=false',
'--no-base-path',
`--server.port=${kbnTestConfig.getPort()}`,
`--optimize.watchPort=${kbnTestConfig.getPort()}`,
'--optimize.watchPrebuild=true',
'--status.allowAnonymous=true',
'--optimize.enabled=true',
`--optimize.bundleDir=${OPTIMIZE_BUNDLE_DIR}`,
`--elasticsearch.url=${formatUrl(servers.elasticsearch)}`,
`--elasticsearch.username=${servers.elasticsearch.username}`,
`--elasticsearch.password=${servers.elasticsearch.password}`,
],
services: {
kibanaServer: KibanaServerProvider,
retry: RetryProvider,

View file

@ -82,16 +82,6 @@ export default async function ({ readConfigFile }) {
dashboardAddPanel: DashboardAddPanelProvider,
},
servers: commonConfig.get('servers'),
env: commonConfig.get('env'),
esTestCluster: commonConfig.get('esTestCluster'),
kibanaServerArgs: [
...commonConfig.get('kibanaServerArgs'),
'--oss',
],
apps: {
status_page: {
pathname: '/status',

View file

@ -0,0 +1,30 @@
import { kibanaUser } from './shield';
import url from 'url';
function getUrlParts() {
// allow setting one complete TEST_KIBANA_URL for ES like https://elastic:changeme@example.com:9200
if (process.env.TEST_KIBANA_URL) {
const testKibanaUrl = url.parse(process.env.TEST_KIBANA_URL);
return {
protocol: testKibanaUrl.protocol.slice(0, -1),
hostname: testKibanaUrl.hostname,
port: parseInt(testKibanaUrl.port, 10),
auth: testKibanaUrl.auth,
username: testKibanaUrl.auth.split(':')[0],
password: testKibanaUrl.auth.split(':')[1]
};
}
const username = process.env.TEST_KIBANA_USERNAME || kibanaUser.username;
const password = process.env.TEST_KIBANA_PASSWORD || kibanaUser.password;
return {
protocol: process.env.TEST_KIBANA_PROTOCOL || 'http',
hostname: process.env.TEST_KIBANA_HOSTNAME || 'localhost',
port: parseInt(process.env.TEST_KIBANA_PORT, 10) || 5620,
auth: `${username}:${password}`,
username,
password,
};
}
export const kibanaTestServerUrlParts = getUrlParts();

View file

@ -35,7 +35,14 @@ mkdir -p "$installDir"
tar -xzf "$linuxBuild" -C "$installDir" --strip=1
echo " -> Running functional and api tests"
echo " -> Running api integration tests"
cd "$XPACK_DIR"
node scripts/functional_tests_api --kibana-install-dir "$installDir" --es-from=source
echo ""
echo ""
echo " -> Running functional tests"
cd "$XPACK_DIR"
xvfb-run node scripts/functional_tests --bail --kibana-install-dir "$installDir" --es-from=source
echo ""

16
test/shield.js Normal file
View file

@ -0,0 +1,16 @@
const env = process.env;
export const kibanaUser = {
username: env.TEST_KIBANA_USER || 'elastic',
password: env.TEST_KIBANA_PASS || 'changeme'
};
export const kibanaServer = {
username: env.TEST_KIBANA_SERVER_USER || 'kibana',
password: env.TEST_KIBANA_SERVER_PASS || 'changeme'
};
export const admin = {
username: env.TEST_ES_USER || 'elastic',
password: env.TEST_ES_PASS || 'changeme'
};

View file

@ -54,15 +54,7 @@ yarn test:server
#### Running functional tests
The functional UI tests, the API integration tests, and the SAML API integration tests are all run against a live browser, Kibana, and Elasticsearch install. Each set of tests is specified with a unique config that describes how to start the Elasticsearch server, the Kibana server, and what tests to run against them. The sets of tests that exist today are *functional UI tests* ([specified by this config](test/functional/config.js)), *API integration tests* ([specified by this config](test/api_integration/config.js)), and *SAML API integration tests* ([specified by this config](test/saml_api_integration/config.js)).
The script runs all sets of tests sequentially like so:
* builds Elasticsearch and X-Pack
* runs Elasticsearch with X-Pack
* starts up the Kibana server with X-Pack
* runs the functional UI tests against those servers
* tears down the servers
* repeats the same process for the API and SAML API integration test configs.
The functional tests are run against a live browser, Kibana, and Elasticsearch install. They build their own version of elasticsearch and x-pack-elasticsearch, run the builds automatically, startup the kibana server, and run the tests against them.
To do all of this in a single command run:
@ -70,59 +62,60 @@ To do all of this in a single command run:
node scripts/functional_tests
```
#### Running UI tests
The functional UI tests can be run separately like so:
```sh
node scripts/functional_tests --config test/functional/config
```
It does the same as the previous command, except that it only does setup/test/teardown for the UI tests.
#### Running API integration tests
API integration tests are run with a unique setup usually without UI assets built for the Kibana server.
API integration tests are intended to test _only programmatic API exposed by Kibana_. There is no need to run browser and simulate user actions, which significantly reduces execution time. In addition, the configuration for API integration tests typically sets `optimize.enabled=false` for Kibana because UI assets are usually not needed for these tests.
The API integration tests can be run separately like so:
```sh
node scripts/functional_tests --config test/api_integration/config
```
#### Running SAML API integration tests
We also have SAML API integration tests which set up Elasticsearch and Kibana with SAML support. Run API integration tests separately with SAML support like so:
```sh
node scripts/functional_tests --config test/saml_api_integration/config
```
#### Developing functional tests
If you are **developing functional tests** then you probably don't want to rebuild Elasticsearch and wait for all that setup on every test run, so instead use this command to build and start just the Elasticsearch and Kibana servers:
If you are **developing functional tests** then you probably don't want to rebuild elasticsearch and wait for all that setup on every test run, so instead use this command to get started:
```sh
node scripts/functional_tests_server
```
After the servers are started, open a new terminal and run this command to run just the tests (without tearing down Elasticsearch or Kibana):
After both Elasticsearch and Kibana are running, open a new terminal (without tearing down Elasticsearch, Kibana, etc.) and use the following to run the tests:
```sh
# make sure you are in the x-pack-kibana project
cd x-pack
# invoke the functional_test_runner from kibana project. try sending --help to learn more
# this command accepts a bunch of arguments to tweak the run, try sending --help to learn more
node ../scripts/functional_test_runner
```
For both of the above commands, it's crucial that you pass in `--config` to specify the same config file to both commands. This makes sure that the right tests will run against the right servers. Typically a set of tests and server configuration go together.
#### Running API integration tests
Read more about how the scripts work [here](scripts/README.md).
API integration tests are very similar to functional tests in a sense that they are organized in the same way and run against live Kibana and Elasticsearch instances.
The difference is that API integration tests are intended to test only programmatic API exposed by Kibana. There is no need to run browser and simulate user actions that significantly reduces execution time.
For a deeper dive, read more about the way functional tests and servers work [here](packages/kbn-test/README.md).
To build, run `x-pack-kibana` with `x-pack-elasticsearch` and then run API integration tests against them use the following command:
```sh
node scripts/functional_tests_api
```
If you are **developing api integration tests** then you probably don't want to rebuild `x-pack-elasticsearch` and wait for all that setup on every test run, so instead use this command to get started:
```sh
node scripts/functional_tests_server
```
Once Kibana and Elasticsearch are up and running open a new terminal and run this command to just run the tests (without tearing down Elasticsearch, Kibana, etc.)
```sh
# this command accepts a bunch of arguments to tweak the run, try sending --help to learn more
node ../scripts/functional_test_runner --config test/api_integration/config.js
```
You can also run API integration tests with SAML support. The `--saml` option configures both Kibana and Elasticsearch
with the SAML security realm, as required by the SAML security API.
Start the functional test server with SAML support:
```sh
node scripts/functional_tests_server --saml
```
Then run the tests with:
```sh
# make sure you are in the x-pack-kibana project
cd x-pack-kibana
# use a different config for SAML
node ../scripts/functional_test_runner --config test/saml_api_integration/config.js
```
### Issues starting dev more of creating builds

View file

@ -0,0 +1,11 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
export {
runFunctionTests,
runApiTests,
runFunctionalTestsServer,
} from './tasks';

View file

@ -12,25 +12,20 @@ import { delay, fromNode as fcb } from 'bluebird';
export const DEFAULT_SUPERUSER_PASS = 'iamsuperuser';
async function updateCredentials(port, auth, username, password, retries = 10) {
const result = await fcb(cb =>
request(
{
method: 'PUT',
uri: formatUrl({
protocol: 'http:',
auth,
hostname: 'localhost',
port,
pathname: `/_xpack/security/user/${username}/_password`,
}),
json: true,
body: { password },
},
(err, httpResponse, body) => {
cb(err, { httpResponse, body });
}
)
);
const result = await fcb(cb => request({
method: 'PUT',
uri: formatUrl({
protocol: 'http:',
auth,
hostname: 'localhost',
port,
pathname: `/_xpack/security/user/${username}/_password`,
}),
json: true,
body: { password }
}, (err, httpResponse, body) => {
cb(err, { httpResponse, body });
}));
const { body, httpResponse } = result;
const { statusCode } = httpResponse;
@ -43,22 +38,20 @@ async function updateCredentials(port, auth, username, password, retries = 10) {
return await updateCredentials(port, auth, username, password, retries - 1);
}
throw new Error(
`${statusCode} response, expected 200 -- ${JSON.stringify(body)}`
);
throw new Error(`${statusCode} response, expected 200 -- ${JSON.stringify(body)}`);
}
export async function setupUsers(log, config) {
const esPort = config.get('servers.elasticsearch.port');
export async function setupUsers(log, ftrConfig) {
const esPort = ftrConfig.get('servers.elasticsearch.port');
// track the current credentials for the `elastic` user as
// they will likely change as we apply updates
let auth = `elastic:${DEFAULT_SUPERUSER_PASS}`;
let auth = 'elastic:iamsuperuser';
// list of updates we need to apply
const updates = [
config.get('servers.elasticsearch'),
config.get('servers.kibana'),
ftrConfig.get('servers.elasticsearch'),
ftrConfig.get('servers.kibana'),
];
for (const { username, password } of updates) {

View file

@ -0,0 +1,17 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
const $isCliError = Symbol('isCliError');
export function createCliError(message) {
const error = new Error(message);
error[$isCliError] = true;
return error;
}
export function isCliError(error) {
return error && !!error[$isCliError];
}

View file

@ -0,0 +1,14 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
import { readFtrConfigFile } from '@kbn/plugin-helpers';
import { FTR_CONFIG_PATH } from './paths';
import { log } from './log';
export async function getFtrConfig() {
return await readFtrConfigFile(log, FTR_CONFIG_PATH);
}

View file

@ -0,0 +1,14 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
export { getFtrConfig } from './get_ftr_config';
export { runKibanaServer } from './run_kibana_server';
export { runEsWithXpack } from './run_es_with_xpack';
export { runFtr } from './run_ftr';
export { log } from './log';
export { KIBANA_FTR_SCRIPT } from './paths';
export { isCliError } from './errors';

View file

@ -0,0 +1,10 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
import { createToolingLog } from '@kbn/dev-utils';
export const log = createToolingLog('debug');
log.pipe(process.stdout);

View file

@ -0,0 +1,22 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
import { resolve } from 'path';
import { resolveKibanaPath } from '@kbn/plugin-helpers';
function useBat(bin) {
return process.platform.startsWith('win') ? `${bin}.bat` : bin;
}
export const KIBANA_BIN_PATH = useBat(resolveKibanaPath('bin/kibana'));
export const KIBANA_ROOT = resolveKibanaPath('');
export const XPACK_KIBANA_ROOT = resolve(KIBANA_ROOT, 'x-pack');
export const GULP_COMMAND_PATH = resolve(XPACK_KIBANA_ROOT, 'node_modules/.bin/gulp');
export const KIBANA_FTR_SCRIPT = resolve(KIBANA_ROOT, 'scripts/functional_test_runner');
export const PROJECT_ROOT = resolve(__dirname, '../../../');
export const FTR_CONFIG_PATH = resolve(PROJECT_ROOT, 'test/functional/config');
export const OPTIMIZE_BUNDLE_DIR = resolve(KIBANA_ROOT, 'optimize/xpackTestUiServer');

View file

@ -0,0 +1,50 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
import { resolve } from 'path';
import { createTestCluster } from '../../../../src/test_utils/es/es_test_cluster';
import { log } from './log';
import { setupUsers, DEFAULT_SUPERUSER_PASS } from './auth';
export async function runEsWithXpack({ ftrConfig, useSAML = false, from }) {
const cluster = createTestCluster({
port: ftrConfig.get('servers.elasticsearch.port'),
password: DEFAULT_SUPERUSER_PASS,
license: 'trial',
from,
log,
});
const kibanaPort = ftrConfig.get('servers.kibana.port');
const idpPath = resolve(
__dirname,
'../../../test/saml_api_integration/fixtures/idp_metadata.xml'
);
const esArgs = [
'xpack.security.enabled=true',
];
const samlEsArgs = [
...esArgs,
'xpack.security.authc.token.enabled=true',
'xpack.security.authc.token.timeout=15s',
'xpack.security.authc.realms.saml1.type=saml',
'xpack.security.authc.realms.saml1.order=0',
`xpack.security.authc.realms.saml1.idp.metadata.path=${idpPath}`,
'xpack.security.authc.realms.saml1.idp.entity_id=http://www.elastic.co',
`xpack.security.authc.realms.saml1.sp.entity_id=http://localhost:${kibanaPort}`,
`xpack.security.authc.realms.saml1.sp.logout=http://localhost:${kibanaPort}/logout`,
`xpack.security.authc.realms.saml1.sp.acs=http://localhost:${kibanaPort}/api/security/v1/saml`,
'xpack.security.authc.realms.saml1.attributes.principal=urn:oid:0.0.7',
];
await cluster.start(useSAML ? samlEsArgs : esArgs);
await setupUsers(log, ftrConfig);
return cluster;
}

View file

@ -0,0 +1,30 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
import {
KIBANA_FTR_SCRIPT,
PROJECT_ROOT
} from './paths';
export async function runFtr({ procs, configPath, bail }) {
const args = [KIBANA_FTR_SCRIPT, '--debug'];
if (configPath) {
args.push('--config', configPath);
}
if (bail) {
args.push('--bail');
}
await procs.run('ftr', {
cmd: 'node',
args,
cwd: PROJECT_ROOT,
wait: true
});
}

View file

@ -0,0 +1,76 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
import { resolve, relative } from 'path';
import { format as formatUrl } from 'url';
import {
KIBANA_ROOT,
KIBANA_BIN_PATH,
OPTIMIZE_BUNDLE_DIR
} from './paths';
export async function runKibanaServer(options) {
const {
procs,
ftrConfig,
devMode = false,
enableUI = true,
useSAML = false,
existingInstallDir = null,
} = options;
if (devMode && existingInstallDir) {
throw new Error('Kibana installations can not be run in dev mode');
}
const runFromSourceArgs = existingInstallDir
? ['--optimize.useBundleCache=true']
: [
'--no-base-path',
`--optimize.bundleDir=${OPTIMIZE_BUNDLE_DIR}`,
];
const samlArgs = useSAML ? [
'--server.xsrf.whitelist=[\"/api/security/v1/saml\"]',
'--xpack.security.authProviders=[\"saml\"]',
] : [];
// start the kibana server and wait for it to log "Server running" before resolving
await procs.run('kibana', {
cwd: existingInstallDir || KIBANA_ROOT,
cmd: existingInstallDir
? resolve(existingInstallDir, relative(KIBANA_ROOT, KIBANA_BIN_PATH))
: KIBANA_BIN_PATH,
args: [
...runFromSourceArgs,
devMode ? '--dev' : '--env=development',
'--logging.json=false',
`--server.port=${ftrConfig.get('servers.kibana.port')}`,
`--server.uuid=${ftrConfig.get('env').kibana.server.uuid}`,
`--elasticsearch.url=${formatUrl(ftrConfig.get('servers.elasticsearch'))}`,
`--optimize.enabled=${enableUI}`,
`--optimize.lazyPort=${ftrConfig.get('servers.kibana.port') + 1}`,
'--optimize.lazyPrebuild=true',
'--status.allowAnonymous=true',
`--elasticsearch.username=${ftrConfig.get('servers.elasticsearch.username')}`,
`--elasticsearch.password=${ftrConfig.get('servers.elasticsearch.password')}`,
'--xpack.security.encryptionKey="wuGNaIhoMpk5sO4UBxgr3NyW1sFcLgIf"', // server restarts should not invalidate active sessions
'--xpack.monitoring.kibana.collection.enabled=false',
'--xpack.xpack_main.telemetry.enabled=false',
...samlArgs,
],
env: {
FORCE_COLOR: 1,
...process.env,
},
wait: /Server running/,
});
}

View file

@ -0,0 +1,186 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
import { relative } from 'path';
import Rx from 'rxjs/Rx';
import { Command } from 'commander';
import { withProcRunner } from '@kbn/dev-utils';
import {
getFtrConfig,
runKibanaServer,
runEsWithXpack,
runFtr,
log,
KIBANA_FTR_SCRIPT,
isCliError,
} from './lib';
const SUCCESS_MESSAGE = `
Elasticsearch and Kibana are ready for functional testing. Start the functional tests
in another terminal session by running this command from this directory:
node ${relative(process.cwd(), KIBANA_FTR_SCRIPT)}
`;
export function fatalErrorHandler(err) {
log.error('FATAL ERROR');
log.error(isCliError(err) ? err.message : err);
process.exit(1);
}
export async function runFunctionTests() {
try {
const cmd = new Command('node scripts/functional_tests');
cmd
.option(
'--bail',
'Stop the functional_test_runner as soon as a failure occurs'
)
.option(
'--kibana-install-dir <path>',
'Run Kibana from an existing install directory'
)
.option(
'--es-from <from>',
'Run ES from either source or snapshot [default: snapshot]'
)
.parse(process.argv);
await withProcRunner(log, async procs => {
const ftrConfig = await getFtrConfig();
const es = await runEsWithXpack({ ftrConfig, from: cmd.esFrom });
await runKibanaServer({
procs,
ftrConfig,
existingInstallDir: cmd.kibanaInstallDir,
});
await runFtr({
procs,
bail: cmd.bail,
});
await procs.stop('kibana');
await es.cleanup();
});
} catch (err) {
fatalErrorHandler(err);
}
}
export async function runApiTests() {
const cmd = new Command('node scripts/functional_tests_api');
cmd
.option(
'--bail',
'Stop the functional_test_runner as soon as a failure occurs'
)
.option(
'--kibana-install-dir <path>',
'Run Kibana from an existing install directory'
)
.option(
'--es-from <from>',
'Run ES from either source or snapshot [default: snapshot]'
)
.parse(process.argv);
try {
await withProcRunner(log, async procs => {
const ftrConfig = await getFtrConfig();
const es = await runEsWithXpack({ ftrConfig, from: cmd.esFrom });
await runKibanaServer({
procs,
ftrConfig,
enableUI: true,
existingInstallDir: cmd.kibanaInstallDir,
});
await runFtr({
procs,
configPath: require.resolve('../../test/api_integration/config.js'),
bail: cmd.bail,
});
await procs.stop('kibana');
await es.cleanup();
// Run SAML specific API integration tests.
const samlEs = await runEsWithXpack({
ftrConfig,
useSAML: true,
from: cmd.esFrom,
});
await runKibanaServer({
procs,
ftrConfig,
enableUI: false,
useSAML: true,
existingInstallDir: cmd.kibanaInstallDir,
});
await runFtr({
procs,
configPath: require.resolve(
'../../test/saml_api_integration/config.js'
),
});
await procs.stop('kibana');
await samlEs.cleanup();
});
} catch (err) {
fatalErrorHandler(err);
}
}
export async function runFunctionalTestsServer() {
const cmd = new Command('node scripts/functional_test_server');
cmd
.option(
'--saml',
'Run Elasticsearch and Kibana with configured SAML security realm',
false
)
.option(
'--es-from <from>',
'Run ES from either source or snapshot [default: snapshot]'
)
.parse(process.argv);
const useSAML = cmd.saml;
try {
await withProcRunner(log, async procs => {
const ftrConfig = await getFtrConfig();
await runEsWithXpack({ ftrConfig, useSAML, from: cmd.esFrom });
await runKibanaServer({
devMode: true,
procs,
ftrConfig,
useSAML,
});
// wait for 5 seconds of silence before logging the
// success message so that it doesn't get buried
await Rx.Observable.fromEvent(log, 'data')
.startWith(null)
.switchMap(() => Rx.Observable.timer(5000))
.take(1)
.toPromise();
log.success(SUCCESS_MESSAGE);
await procs.waitForAllToStop();
});
} catch (err) {
fatalErrorHandler(err);
}
}

View file

@ -24,9 +24,7 @@
},
"devDependencies": {
"@kbn/dev-utils": "link:../packages/kbn-dev-utils",
"@kbn/es": "link:../packages/kbn-es",
"@kbn/plugin-helpers": "link:../packages/kbn-plugin-helpers",
"@kbn/test": "link:../packages/kbn-test",
"@types/jest": "^22.2.3",
"abab": "^1.0.4",
"ansicolors": "0.3.2",

View file

@ -5,8 +5,4 @@
*/
require('@kbn/plugin-helpers').babelRegister();
require('@kbn/test').runTestsCli([
require.resolve('../test/functional/config.js'),
require.resolve('../test/api_integration/config.js'),
require.resolve('../test/saml_api_integration/config.js'),
]);
require('../dev-tools/functional_tests').runFunctionTests();

View file

@ -0,0 +1,8 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
require('@kbn/plugin-helpers').babelRegister();
require('../dev-tools/functional_tests').runApiTests();

View file

@ -5,6 +5,4 @@
*/
require('@kbn/plugin-helpers').babelRegister();
require('@kbn/test').startServersCli(
require.resolve('../test/functional/config.js'),
);
require('../dev-tools/functional_tests').runFunctionalTestsServer();

View file

@ -11,7 +11,7 @@ export default async function ({ readConfigFile }) {
// Read the Kibana API integration tests config file so that we can utilize its services.
const kibanaAPITestsConfig = await readConfigFile(require.resolve('../../../test/api_integration/config.js'));
const xPackFunctionalTestsConfig = await readConfigFile(require.resolve('../functional/config.js'));
const kibanaCommonConfig = await readConfigFile(require.resolve('../../../test/common/config.js'));
const kibanaFunctionalConfig = await readConfigFile(require.resolve('../../../test/functional/config.js'));
return {
testFiles: [require.resolve('./apis')],
@ -20,15 +20,12 @@ export default async function ({ readConfigFile }) {
supertest: kibanaAPITestsConfig.get('services.supertest'),
esSupertest: kibanaAPITestsConfig.get('services.esSupertest'),
supertestWithoutAuth: SupertestWithoutAuthProvider,
es: kibanaCommonConfig.get('services.es'),
esArchiver: kibanaCommonConfig.get('services.esArchiver'),
es: kibanaFunctionalConfig.get('services.es'),
esArchiver: kibanaFunctionalConfig.get('services.esArchiver'),
},
esArchiver: xPackFunctionalTestsConfig.get('esArchiver'),
junit: {
reportName: 'X-Pack API Integration Tests',
},
env: xPackFunctionalTestsConfig.get('env'),
kibanaServerArgs: xPackFunctionalTestsConfig.get('kibanaServerArgs'),
esTestCluster: xPackFunctionalTestsConfig.get('esTestCluster'),
};
}

View file

@ -7,7 +7,6 @@
/* eslint-disable kibana-custom/no-default-export */
import { resolve } from 'path';
import { format as formatUrl } from 'url';
import {
SecurityPageProvider,
@ -53,37 +52,11 @@ import {
// that returns an object with the projects config values
export default async function ({ readConfigFile }) {
const kibanaCommonConfig = await readConfigFile(require.resolve('../../../test/common/config.js'));
const kibanaFunctionalConfig = await readConfigFile(require.resolve('../../../test/functional/config.js'));
// read the Kibana config file so that we can utilize some of
// its services and PageObjects
const kibanaConfig = await readConfigFile(require.resolve('../../../test/functional/config.js'));
const kibanaAPITestsConfig = await readConfigFile(require.resolve('../../../test/api_integration/config.js'));
const servers = {
elasticsearch: {
protocol: process.env.TEST_ES_PROTOCOL || 'http',
hostname: process.env.TEST_ES_HOSTNAME || 'localhost',
port: parseInt(process.env.TEST_ES_PORT, 10) || 9240,
auth: 'elastic:changeme',
username: 'elastic',
password: 'changeme',
},
kibana: {
protocol: process.env.TEST_KIBANA_PROTOCOL || 'http',
hostname: process.env.TEST_KIBANA_HOSTNAME || 'localhost',
port: parseInt(process.env.TEST_KIBANA_PORT, 10) || 5640,
auth: 'elastic:changeme',
username: 'elastic',
password: 'changeme',
},
};
const env = {
kibana: {
server: {
uuid: '5b2de169-2785-441b-ae8c-186a1936b17d', // Kibana UUID for "primary" cluster in monitoring data
}
}
};
return {
// list paths to the files that contain your plugins tests
testFiles: [
@ -101,7 +74,7 @@ export default async function ({ readConfigFile }) {
// available to your tests. If you don't specify anything here
// only the built-in services will be avaliable
services: {
...kibanaFunctionalConfig.get('services'),
...kibanaConfig.get('services'),
esSupertest: kibanaAPITestsConfig.get('services.esSupertest'),
monitoringNoData: MonitoringNoDataProvider,
monitoringClusterList: MonitoringClusterListProvider,
@ -134,7 +107,7 @@ export default async function ({ readConfigFile }) {
// just like services, PageObjects are defined as a map of
// names to Providers. Merge in Kibana's or pick specific ones
pageObjects: {
...kibanaFunctionalConfig.get('pageObjects'),
...kibanaConfig.get('pageObjects'),
security: SecurityPageProvider,
reporting: ReportingPageProvider,
monitoring: MonitoringPageProvider,
@ -144,35 +117,34 @@ export default async function ({ readConfigFile }) {
watcher: WatcherPageProvider,
},
servers,
env,
esTestCluster: {
license: 'trial',
from: 'source',
serverArgs: [
'xpack.license.self_generated.type=trial',
'xpack.security.enabled=true',
],
servers: {
elasticsearch: {
port: 9240,
auth: 'elastic:changeme',
username: 'elastic',
password: 'changeme',
},
kibana: {
port: 5640,
auth: 'elastic:changeme',
username: 'elastic',
password: 'changeme',
},
},
env: {
kibana: {
server: {
uuid: '5b2de169-2785-441b-ae8c-186a1936b17d', // Kibana UUID for "primary" cluster in monitoring data
}
}
},
kibanaServerArgs: [
...kibanaCommonConfig.get('kibanaServerArgs'),
`--server.uuid=${env.kibana.server.uuid}`,
`--server.port=${servers.kibana.port}`,
`--elasticsearch.url=${formatUrl(servers.elasticsearch)}`,
'--xpack.monitoring.kibana.collection.enabled=false',
'--xpack.xpack_main.telemetry.enabled=false',
'--xpack.security.encryptionKey="wuGNaIhoMpk5sO4UBxgr3NyW1sFcLgIf"', // server restarts should not invalidate active sessions
],
// the apps section defines the urls that
// `PageObjects.common.navigateTo(appKey)` will use.
// Merge urls for your plugin with the urls defined in
// Kibana's config in order to use this helper
apps: {
...kibanaFunctionalConfig.get('apps'),
...kibanaConfig.get('apps'),
login: {
pathname: '/login'
},

View file

@ -4,16 +4,11 @@
* you may not use this file except in compliance with the Elastic License.
*/
import { resolve } from 'path';
export default async function ({ readConfigFile }) {
// Read the Kibana API integration tests config file so that we can utilize its services.
const kibanaAPITestsConfig = await readConfigFile(require.resolve('../../../test/api_integration/config.js'));
const xPackAPITestsConfig = await readConfigFile(require.resolve('../api_integration/config.js'));
const kibanaPort = xPackAPITestsConfig.get('servers.kibana.port');
const idpPath = resolve(__dirname, '../../test/saml_api_integration/fixtures/idp_metadata.xml');
return {
testFiles: [require.resolve('./apis')],
servers: xPackAPITestsConfig.get('servers'),
@ -24,30 +19,5 @@ export default async function ({ readConfigFile }) {
junit: {
reportName: 'X-Pack SAML API Integration Tests',
},
env: xPackAPITestsConfig.get('env'),
esTestCluster: {
...xPackAPITestsConfig.get('esTestCluster'),
serverArgs: [
...xPackAPITestsConfig.get('esTestCluster.serverArgs'),
'xpack.security.authc.token.enabled=true',
'xpack.security.authc.token.timeout=15s',
'xpack.security.authc.realms.saml1.type=saml',
'xpack.security.authc.realms.saml1.order=0',
`xpack.security.authc.realms.saml1.idp.metadata.path=${idpPath}`,
'xpack.security.authc.realms.saml1.idp.entity_id=http://www.elastic.co',
`xpack.security.authc.realms.saml1.sp.entity_id=http://localhost:${kibanaPort}`,
`xpack.security.authc.realms.saml1.sp.logout=http://localhost:${kibanaPort}/logout`,
`xpack.security.authc.realms.saml1.sp.acs=http://localhost:${kibanaPort}/api/security/v1/saml`,
'xpack.security.authc.realms.saml1.attributes.principal=urn:oid:0.0.7',
],
},
kibanaServerArgs: [
...xPackAPITestsConfig.get('kibanaServerArgs'),
'--optimize.enabled=false',
'--server.xsrf.whitelist=[\"/api/security/v1/saml\"]',
'--xpack.security.authProviders=[\"saml\"]',
],
};
}

File diff suppressed because it is too large Load diff

View file

@ -162,10 +162,6 @@
version "0.0.0"
uid ""
"@kbn/test@link:packages/kbn-test":
version "0.0.0"
uid ""
"@kbn/ui-framework@link:packages/kbn-ui-framework":
version "0.0.0"
uid ""
@ -2169,14 +2165,6 @@ chalk@^1.0.0, chalk@^1.1.1, chalk@^1.1.3, chalk@~1.1.1:
strip-ansi "^3.0.0"
supports-color "^2.0.0"
chalk@^2.4.1:
version "2.4.1"
resolved "https://registry.yarnpkg.com/chalk/-/chalk-2.4.1.tgz#18c49ab16a037b6eb0152cc83e3471338215b66e"
dependencies:
ansi-styles "^3.2.1"
escape-string-regexp "^1.0.5"
supports-color "^5.3.0"
chalk@~0.5.1:
version "0.5.1"
resolved "https://registry.yarnpkg.com/chalk/-/chalk-0.5.1.tgz#663b3a648b68b55d04690d49167aa837858f2174"
@ -5147,10 +5135,6 @@ getopts@^2.0.0:
version "2.0.5"
resolved "https://registry.yarnpkg.com/getopts/-/getopts-2.0.5.tgz#e4d3948e87fd9fb50c8a0f2912f4de16301fb8ae"
getopts@^2.0.6:
version "2.0.6"
resolved "https://registry.yarnpkg.com/getopts/-/getopts-2.0.6.tgz#4788d533a977527e79efd57b5e742ffa0dd33105"
getos@^3.1.0:
version "3.1.0"
resolved "https://registry.yarnpkg.com/getos/-/getos-3.1.0.tgz#db3aa4df15a3295557ce5e81aa9e3e5cdfaa6567"
@ -11048,12 +11032,6 @@ rxjs@5.4.3:
dependencies:
symbol-observable "^1.0.1"
rxjs@^5.4.3:
version "5.5.10"
resolved "https://registry.yarnpkg.com/rxjs/-/rxjs-5.5.10.tgz#fde02d7a614f6c8683d0d1957827f492e09db045"
dependencies:
symbol-observable "1.0.1"
safe-buffer@5.1.1, safe-buffer@^5.0.1, safe-buffer@^5.1.0, safe-buffer@^5.1.1, safe-buffer@~5.1.0, safe-buffer@~5.1.1:
version "5.1.1"
resolved "https://registry.yarnpkg.com/safe-buffer/-/safe-buffer-5.1.1.tgz#893312af69b2123def71f57889001671eeb2c853"
@ -11978,10 +11956,6 @@ svgo@^0.7.0:
sax "~1.2.1"
whet.extend "~0.9.9"
symbol-observable@1.0.1:
version "1.0.1"
resolved "https://registry.yarnpkg.com/symbol-observable/-/symbol-observable-1.0.1.tgz#8340fc4702c3122df5d22288f88283f513d3fdd4"
symbol-observable@^1.0.1, symbol-observable@^1.0.3:
version "1.2.0"
resolved "https://registry.yarnpkg.com/symbol-observable/-/symbol-observable-1.2.0.tgz#c22688aed4eab3cdc2dfeacbb561660560a00804"
@ -12045,15 +12019,6 @@ tar-fs@^1.16.0:
pump "^1.0.0"
tar-stream "^1.1.2"
tar-fs@^1.16.2:
version "1.16.2"
resolved "https://registry.yarnpkg.com/tar-fs/-/tar-fs-1.16.2.tgz#17e5239747e399f7e77344f5f53365f04af53577"
dependencies:
chownr "^1.0.1"
mkdirp "^0.5.1"
pump "^1.0.0"
tar-stream "^1.1.2"
tar-pack@^3.4.0:
version "3.4.1"
resolved "https://registry.yarnpkg.com/tar-pack/-/tar-pack-3.4.1.tgz#e1dbc03a9b9d3ba07e896ad027317eb679a10a1f"