[7.x] [CI] Add pipeline task queue framework and merge workers into one (#71268) (#74276)

This commit is contained in:
Brian Seeders 2020-08-14 16:41:47 -04:00 committed by GitHub
parent 3879409aaa
commit 2ece4f12a4
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
58 changed files with 835 additions and 147 deletions

38
.ci/Dockerfile Normal file
View file

@ -0,0 +1,38 @@
# NOTE: This Dockerfile is ONLY used to run certain tasks in CI. It is not used to run Kibana or as a distributable.
# If you're looking for the Kibana Docker image distributable, please see: src/dev/build/tasks/os_packages/docker_generator/templates/dockerfile.template.ts
ARG NODE_VERSION=10.21.0
FROM node:${NODE_VERSION} AS base
RUN apt-get update && \
apt-get -y install xvfb gconf-service libasound2 libatk1.0-0 libc6 libcairo2 libcups2 \
libdbus-1-3 libexpat1 libfontconfig1 libgcc1 libgconf-2-4 libgdk-pixbuf2.0-0 libglib2.0-0 \
libgtk-3-0 libnspr4 libpango-1.0-0 libpangocairo-1.0-0 libstdc++6 libx11-6 libx11-xcb1 libxcb1 \
libxcomposite1 libxcursor1 libxdamage1 libxext6 libxfixes3 libxi6 libxrandr2 libxrender1 libxss1 \
libxtst6 ca-certificates fonts-liberation libappindicator1 libnss3 lsb-release xdg-utils wget openjdk-8-jre && \
rm -rf /var/lib/apt/lists/*
RUN curl -sSL https://dl.google.com/linux/linux_signing_key.pub | apt-key add - \
&& sh -c 'echo "deb [arch=amd64] http://dl.google.com/linux/chrome/deb/ stable main" >> /etc/apt/sources.list.d/google.list' \
&& apt-get update \
&& apt-get install -y rsync jq bsdtar google-chrome-stable \
--no-install-recommends \
&& rm -rf /var/lib/apt/lists/* /tmp/* /var/tmp/*
RUN LATEST_VAULT_RELEASE=$(curl -s https://api.github.com/repos/hashicorp/vault/tags | jq --raw-output .[0].name[1:]) \
&& curl -L https://releases.hashicorp.com/vault/${LATEST_VAULT_RELEASE}/vault_${LATEST_VAULT_RELEASE}_linux_amd64.zip -o vault.zip \
&& unzip vault.zip \
&& rm vault.zip \
&& chmod +x vault \
&& mv vault /usr/local/bin/vault
RUN groupadd -r kibana && useradd -r -g kibana kibana && mkdir /home/kibana && chown kibana:kibana /home/kibana
COPY ./bash_standard_lib.sh /usr/local/bin/bash_standard_lib.sh
RUN chmod +x /usr/local/bin/bash_standard_lib.sh
COPY ./runbld /usr/local/bin/runbld
RUN chmod +x /usr/local/bin/runbld
USER kibana

View file

@ -7,19 +7,23 @@ kibanaPipeline(timeoutMinutes: 120) {
githubCommitStatus.trackBuild(params.commit, 'kibana-ci-baseline') { githubCommitStatus.trackBuild(params.commit, 'kibana-ci-baseline') {
ciStats.trackBuild { ciStats.trackBuild {
catchError { catchError {
withEnv([
'CI_PARALLEL_PROCESS_NUMBER=1'
]) {
parallel([ parallel([
'oss-visualRegression': { 'oss-visualRegression': {
workers.ci(name: 'oss-visualRegression', size: 's-highmem', ramDisk: true) { workers.ci(name: 'oss-visualRegression', size: 's-highmem', ramDisk: true) {
kibanaPipeline.functionalTestProcess('oss-visualRegression', './test/scripts/jenkins_visual_regression.sh')(1) kibanaPipeline.functionalTestProcess('oss-visualRegression', './test/scripts/jenkins_visual_regression.sh')()
} }
}, },
'xpack-visualRegression': { 'xpack-visualRegression': {
workers.ci(name: 'xpack-visualRegression', size: 's-highmem', ramDisk: true) { workers.ci(name: 'xpack-visualRegression', size: 's-highmem', ramDisk: true) {
kibanaPipeline.functionalTestProcess('xpack-visualRegression', './test/scripts/jenkins_xpack_visual_regression.sh')(1) kibanaPipeline.functionalTestProcess('xpack-visualRegression', './test/scripts/jenkins_xpack_visual_regression.sh')()
} }
}, },
]) ])
} }
}
kibanaPipeline.sendMail() kibanaPipeline.sendMail()
slackNotifications.onFailure() slackNotifications.onFailure()

View file

@ -3,4 +3,4 @@
profiles: profiles:
- ".*": # Match any job - ".*": # Match any job
tests: tests:
junit-filename-pattern: "8d8bd494-d909-4e67-a052-7e8b5aaeb5e4" # A bogus path that should never exist junit-filename-pattern: false

2
.gitignore vendored
View file

@ -49,6 +49,8 @@ npm-debug.log*
.tern-project .tern-project
.nyc_output .nyc_output
.ci/pipeline-library/build/ .ci/pipeline-library/build/
.ci/runbld
.ci/bash_standard_lib.sh
.gradle .gradle
# apm plugin # apm plugin

44
Jenkinsfile vendored
View file

@ -9,49 +9,7 @@ kibanaPipeline(timeoutMinutes: 155, checkPrChanges: true, setCommitStatus: true)
ciStats.trackBuild { ciStats.trackBuild {
catchError { catchError {
retryable.enable() retryable.enable()
parallel([ kibanaPipeline.allCiTasks()
'kibana-intake-agent': workers.intake('kibana-intake', './test/scripts/jenkins_unit.sh'),
'x-pack-intake-agent': workers.intake('x-pack-intake', './test/scripts/jenkins_xpack.sh'),
'kibana-oss-agent': workers.functional('kibana-oss-tests', { kibanaPipeline.buildOss() }, [
'oss-firefoxSmoke': kibanaPipeline.functionalTestProcess('kibana-firefoxSmoke', './test/scripts/jenkins_firefox_smoke.sh'),
'oss-ciGroup1': kibanaPipeline.ossCiGroupProcess(1),
'oss-ciGroup2': kibanaPipeline.ossCiGroupProcess(2),
'oss-ciGroup3': kibanaPipeline.ossCiGroupProcess(3),
'oss-ciGroup4': kibanaPipeline.ossCiGroupProcess(4),
'oss-ciGroup5': kibanaPipeline.ossCiGroupProcess(5),
'oss-ciGroup6': kibanaPipeline.ossCiGroupProcess(6),
'oss-ciGroup7': kibanaPipeline.ossCiGroupProcess(7),
'oss-ciGroup8': kibanaPipeline.ossCiGroupProcess(8),
'oss-ciGroup9': kibanaPipeline.ossCiGroupProcess(9),
'oss-ciGroup10': kibanaPipeline.ossCiGroupProcess(10),
'oss-ciGroup11': kibanaPipeline.ossCiGroupProcess(11),
'oss-ciGroup12': kibanaPipeline.ossCiGroupProcess(12),
'oss-accessibility': kibanaPipeline.functionalTestProcess('kibana-accessibility', './test/scripts/jenkins_accessibility.sh'),
// 'oss-visualRegression': kibanaPipeline.functionalTestProcess('visualRegression', './test/scripts/jenkins_visual_regression.sh'),
]),
'kibana-xpack-agent': workers.functional('kibana-xpack-tests', { kibanaPipeline.buildXpack() }, [
'xpack-firefoxSmoke': kibanaPipeline.functionalTestProcess('xpack-firefoxSmoke', './test/scripts/jenkins_xpack_firefox_smoke.sh'),
'xpack-ciGroup1': kibanaPipeline.xpackCiGroupProcess(1),
'xpack-ciGroup2': kibanaPipeline.xpackCiGroupProcess(2),
'xpack-ciGroup3': kibanaPipeline.xpackCiGroupProcess(3),
'xpack-ciGroup4': kibanaPipeline.xpackCiGroupProcess(4),
'xpack-ciGroup5': kibanaPipeline.xpackCiGroupProcess(5),
'xpack-ciGroup6': kibanaPipeline.xpackCiGroupProcess(6),
'xpack-ciGroup7': kibanaPipeline.xpackCiGroupProcess(7),
'xpack-ciGroup8': kibanaPipeline.xpackCiGroupProcess(8),
'xpack-ciGroup9': kibanaPipeline.xpackCiGroupProcess(9),
'xpack-ciGroup10': kibanaPipeline.xpackCiGroupProcess(10),
'xpack-accessibility': kibanaPipeline.functionalTestProcess('xpack-accessibility', './test/scripts/jenkins_xpack_accessibility.sh'),
'xpack-savedObjectsFieldMetrics': kibanaPipeline.functionalTestProcess('xpack-savedObjectsFieldMetrics', './test/scripts/jenkins_xpack_saved_objects_field_metrics.sh'),
'xpack-securitySolutionCypress': { processNumber ->
whenChanged(['x-pack/plugins/security_solution/', 'x-pack/test/security_solution_cypress/', 'x-pack/plugins/triggers_actions_ui/public/application/sections/action_connector_form/', 'x-pack/plugins/triggers_actions_ui/public/application/context/actions_connectors_context.tsx']) {
kibanaPipeline.functionalTestProcess('xpack-securitySolutionCypress', './test/scripts/jenkins_security_solution_cypress.sh')(processNumber)
}
},
// 'xpack-visualRegression': kibanaPipeline.functionalTestProcess('xpack-visualRegression', './test/scripts/jenkins_xpack_visual_regression.sh'),
]),
])
} }
} }
} }

View file

@ -57,7 +57,7 @@ const barCommand: Command<any> = {
usage: 'bar [...names]', usage: 'bar [...names]',
}; };
describe('getHelp()', () => { describe.skip('getHelp()', () => {
it('returns the expected output', () => { it('returns the expected output', () => {
expect( expect(
getHelp({ getHelp({
@ -95,7 +95,7 @@ describe('getHelp()', () => {
}); });
}); });
describe('getCommandLevelHelp()', () => { describe.skip('getCommandLevelHelp()', () => {
it('returns the expected output', () => { it('returns the expected output', () => {
expect( expect(
getCommandLevelHelp({ getCommandLevelHelp({
@ -141,7 +141,7 @@ describe('getCommandLevelHelp()', () => {
}); });
}); });
describe('getHelpForAllCommands()', () => { describe.skip('getHelpForAllCommands()', () => {
it('returns the expected output', () => { it('returns the expected output', () => {
expect( expect(
getHelpForAllCommands({ getHelpForAllCommands({

View file

@ -7,10 +7,11 @@ function checkout_sibling {
targetDir=$2 targetDir=$2
useExistingParamName=$3 useExistingParamName=$3
useExisting="$(eval "echo "\$$useExistingParamName"")" useExisting="$(eval "echo "\$$useExistingParamName"")"
repoAddress="https://github.com/"
if [ -z ${useExisting:+x} ]; then if [ -z ${useExisting:+x} ]; then
if [ -d "$targetDir" ]; then if [ -d "$targetDir" ]; then
echo "I expected a clean workspace but an '${project}' sibling directory already exists in [$PARENT_DIR]!" echo "I expected a clean workspace but an '${project}' sibling directory already exists in [$WORKSPACE]!"
echo echo
echo "Either define '${useExistingParamName}' or remove the existing '${project}' sibling." echo "Either define '${useExistingParamName}' or remove the existing '${project}' sibling."
exit 1 exit 1
@ -21,8 +22,9 @@ function checkout_sibling {
cloneBranch="" cloneBranch=""
function clone_target_is_valid { function clone_target_is_valid {
echo " -> checking for '${cloneBranch}' branch at ${cloneAuthor}/${project}" echo " -> checking for '${cloneBranch}' branch at ${cloneAuthor}/${project}"
if [[ -n "$(git ls-remote --heads "git@github.com:${cloneAuthor}/${project}.git" ${cloneBranch} 2>/dev/null)" ]]; then if [[ -n "$(git ls-remote --heads "${repoAddress}${cloneAuthor}/${project}.git" ${cloneBranch} 2>/dev/null)" ]]; then
return 0 return 0
else else
return 1 return 1
@ -71,7 +73,7 @@ function checkout_sibling {
fi fi
echo " -> checking out '${cloneBranch}' branch from ${cloneAuthor}/${project}..." echo " -> checking out '${cloneBranch}' branch from ${cloneAuthor}/${project}..."
git clone -b "$cloneBranch" "git@github.com:${cloneAuthor}/${project}.git" "$targetDir" --depth=1 git clone -b "$cloneBranch" "${repoAddress}${cloneAuthor}/${project}.git" "$targetDir" --depth=1
echo " -> checked out ${project} revision: $(git -C "${targetDir}" rev-parse HEAD)" echo " -> checked out ${project} revision: $(git -C "${targetDir}" rev-parse HEAD)"
echo echo
} }
@ -87,12 +89,12 @@ function checkout_sibling {
fi fi
} }
checkout_sibling "elasticsearch" "${PARENT_DIR}/elasticsearch" "USE_EXISTING_ES" checkout_sibling "elasticsearch" "${WORKSPACE}/elasticsearch" "USE_EXISTING_ES"
export TEST_ES_FROM=${TEST_ES_FROM:-snapshot} export TEST_ES_FROM=${TEST_ES_FROM:-snapshot}
# Set the JAVA_HOME based on the Java property file in the ES repo # Set the JAVA_HOME based on the Java property file in the ES repo
# This assumes the naming convention used on CI (ex: ~/.java/java10) # This assumes the naming convention used on CI (ex: ~/.java/java10)
ES_DIR="$PARENT_DIR/elasticsearch" ES_DIR="$WORKSPACE/elasticsearch"
ES_JAVA_PROP_PATH=$ES_DIR/.ci/java-versions.properties ES_JAVA_PROP_PATH=$ES_DIR/.ci/java-versions.properties

View file

@ -53,6 +53,8 @@ export PARENT_DIR="$parentDir"
kbnBranch="$(jq -r .branch "$KIBANA_DIR/package.json")" kbnBranch="$(jq -r .branch "$KIBANA_DIR/package.json")"
export KIBANA_PKG_BRANCH="$kbnBranch" export KIBANA_PKG_BRANCH="$kbnBranch"
export WORKSPACE="${WORKSPACE:-$PARENT_DIR}"
### ###
### download node ### download node
### ###
@ -162,7 +164,7 @@ export -f checks-reporter-with-killswitch
source "$KIBANA_DIR/src/dev/ci_setup/load_env_keys.sh" source "$KIBANA_DIR/src/dev/ci_setup/load_env_keys.sh"
ES_DIR="$PARENT_DIR/elasticsearch" ES_DIR="$WORKSPACE/elasticsearch"
ES_JAVA_PROP_PATH=$ES_DIR/.ci/java-versions.properties ES_JAVA_PROP_PATH=$ES_DIR/.ci/java-versions.properties
if [[ -d "$ES_DIR" && -f "$ES_JAVA_PROP_PATH" ]]; then if [[ -d "$ES_DIR" && -f "$ES_JAVA_PROP_PATH" ]]; then

View file

@ -49,8 +49,10 @@ export async function generateNoticeFromSource({ productName, directory, log }:
ignore: [ ignore: [
'{node_modules,build,dist,data,built_assets}/**', '{node_modules,build,dist,data,built_assets}/**',
'packages/*/{node_modules,build,dist}/**', 'packages/*/{node_modules,build,dist}/**',
'src/plugins/*/{node_modules,build,dist}/**',
'x-pack/{node_modules,build,dist,data}/**', 'x-pack/{node_modules,build,dist,data}/**',
'x-pack/packages/*/{node_modules,build,dist}/**', 'x-pack/packages/*/{node_modules,build,dist}/**',
'x-pack/plugins/*/{node_modules,build,dist}/**',
'**/target/**', '**/target/**',
], ],
}; };

View file

@ -22,7 +22,7 @@ const { resolve } = require('path');
module.exports = function (grunt) { module.exports = function (grunt) {
grunt.registerTask('test:jest', function () { grunt.registerTask('test:jest', function () {
const done = this.async(); const done = this.async();
runJest(resolve(__dirname, '../scripts/jest.js')).then(done, done); runJest(resolve(__dirname, '../scripts/jest.js'), ['--maxWorkers=10']).then(done, done);
}); });
grunt.registerTask('test:jest_integration', function () { grunt.registerTask('test:jest_integration', function () {
@ -30,10 +30,10 @@ module.exports = function (grunt) {
runJest(resolve(__dirname, '../scripts/jest_integration.js')).then(done, done); runJest(resolve(__dirname, '../scripts/jest_integration.js')).then(done, done);
}); });
function runJest(jestScript) { function runJest(jestScript, args = []) {
const serverCmd = { const serverCmd = {
cmd: 'node', cmd: 'node',
args: [jestScript, '--ci'], args: [jestScript, '--ci', ...args],
opts: { stdio: 'inherit' }, opts: { stdio: 'inherit' },
}; };

View file

@ -0,0 +1,5 @@
#!/usr/bin/env bash
source src/dev/ci_setup/setup_env.sh
yarn run grunt run:checkDocApiChanges

View file

@ -0,0 +1,5 @@
#!/usr/bin/env bash
source src/dev/ci_setup/setup_env.sh
yarn run grunt run:checkFileCasing

5
test/scripts/checks/i18n.sh Executable file
View file

@ -0,0 +1,5 @@
#!/usr/bin/env bash
source src/dev/ci_setup/setup_env.sh
yarn run grunt run:i18nCheck

View file

@ -0,0 +1,5 @@
#!/usr/bin/env bash
source src/dev/ci_setup/setup_env.sh
yarn run grunt run:licenses

View file

@ -0,0 +1,5 @@
#!/usr/bin/env bash
source src/dev/ci_setup/setup_env.sh
yarn run grunt run:checkLockfileSymlinks

View file

@ -0,0 +1,5 @@
#!/usr/bin/env bash
source src/dev/ci_setup/setup_env.sh
yarn run grunt run:telemetryCheck

View file

@ -0,0 +1,5 @@
#!/usr/bin/env bash
source src/dev/ci_setup/setup_env.sh
yarn run grunt run:test_hardening

View file

@ -0,0 +1,5 @@
#!/usr/bin/env bash
source src/dev/ci_setup/setup_env.sh
yarn run grunt run:test_projects

View file

@ -0,0 +1,5 @@
#!/usr/bin/env bash
source src/dev/ci_setup/setup_env.sh
yarn run grunt run:checkTsProjects

View file

@ -0,0 +1,5 @@
#!/usr/bin/env bash
source src/dev/ci_setup/setup_env.sh
yarn run grunt run:typeCheck

View file

@ -0,0 +1,5 @@
#!/usr/bin/env bash
source src/dev/ci_setup/setup_env.sh
yarn run grunt run:verifyDependencyVersions

View file

@ -0,0 +1,5 @@
#!/usr/bin/env bash
source src/dev/ci_setup/setup_env.sh
yarn run grunt run:verifyNotice

View file

@ -5,5 +5,5 @@ source test/scripts/jenkins_test_setup_oss.sh
checks-reporter-with-killswitch "Kibana accessibility tests" \ checks-reporter-with-killswitch "Kibana accessibility tests" \
node scripts/functional_tests \ node scripts/functional_tests \
--debug --bail \ --debug --bail \
--kibana-install-dir "$installDir" \ --kibana-install-dir "$KIBANA_INSTALL_DIR" \
--config test/accessibility/config.ts; --config test/accessibility/config.ts;

0
test/scripts/jenkins_build_kbn_sample_panel_action.sh Normal file → Executable file
View file

View file

@ -2,13 +2,9 @@
source src/dev/ci_setup/setup_env.sh source src/dev/ci_setup/setup_env.sh
echo " -> building kibana platform plugins" if [[ ! "$TASK_QUEUE_PROCESS_ID" ]]; then
node scripts/build_kibana_platform_plugins \ ./test/scripts/jenkins_build_plugins.sh
--oss \ fi
--filter '!alertingExample' \
--scan-dir "$KIBANA_DIR/test/plugin_functional/plugins" \
--scan-dir "$KIBANA_DIR/test/interpreter_functional/plugins" \
--verbose;
# doesn't persist, also set in kibanaPipeline.groovy # doesn't persist, also set in kibanaPipeline.groovy
export KBN_NP_PLUGINS_BUILT=true export KBN_NP_PLUGINS_BUILT=true
@ -18,3 +14,6 @@ yarn run grunt functionalTests:ensureAllTestsInCiGroup;
echo " -> building and extracting OSS Kibana distributable for use in functional tests" echo " -> building and extracting OSS Kibana distributable for use in functional tests"
node scripts/build --debug --oss node scripts/build --debug --oss
mkdir -p "$WORKSPACE/kibana-build-oss"
cp -pR build/oss/kibana-*-SNAPSHOT-linux-x86_64/. $WORKSPACE/kibana-build-oss/

View file

@ -0,0 +1,12 @@
#!/usr/bin/env bash
source src/dev/ci_setup/setup_env.sh
echo " -> building kibana platform plugins"
node scripts/build_kibana_platform_plugins \
--oss \
--filter '!alertingExample' \
--scan-dir "$KIBANA_DIR/test/plugin_functional/plugins" \
--scan-dir "$KIBANA_DIR/test/interpreter_functional/plugins" \
--workers 6 \
--verbose

View file

@ -4,7 +4,7 @@ source test/scripts/jenkins_test_setup_oss.sh
checks-reporter-with-killswitch "Functional tests / Group ${CI_GROUP}" yarn run grunt "run:functionalTests_ciGroup${CI_GROUP}"; checks-reporter-with-killswitch "Functional tests / Group ${CI_GROUP}" yarn run grunt "run:functionalTests_ciGroup${CI_GROUP}";
if [ "$CI_GROUP" == "1" ]; then if [[ ! "$TASK_QUEUE_PROCESS_ID" && "$CI_GROUP" == "1" ]]; then
source test/scripts/jenkins_build_kbn_sample_panel_action.sh source test/scripts/jenkins_build_kbn_sample_panel_action.sh
yarn run grunt run:pluginFunctionalTestsRelease --from=source; yarn run grunt run:pluginFunctionalTestsRelease --from=source;
yarn run grunt run:exampleFunctionalTestsRelease --from=source; yarn run grunt run:exampleFunctionalTestsRelease --from=source;

View file

@ -5,6 +5,6 @@ source test/scripts/jenkins_test_setup_oss.sh
checks-reporter-with-killswitch "Firefox smoke test" \ checks-reporter-with-killswitch "Firefox smoke test" \
node scripts/functional_tests \ node scripts/functional_tests \
--bail --debug \ --bail --debug \
--kibana-install-dir "$installDir" \ --kibana-install-dir "$KIBANA_INSTALL_DIR" \
--include-tag "includeFirefox" \ --include-tag "includeFirefox" \
--config test/functional/config.firefox.js; --config test/functional/config.firefox.js;

View file

@ -0,0 +1,15 @@
#!/usr/bin/env bash
source test/scripts/jenkins_test_setup_oss.sh
cd test/plugin_functional/plugins/kbn_sample_panel_action;
if [[ ! -d "target" ]]; then
yarn build;
fi
cd -;
pwd
yarn run grunt run:pluginFunctionalTestsRelease --from=source;
yarn run grunt run:exampleFunctionalTestsRelease --from=source;
yarn run grunt run:interpreterFunctionalTestsRelease;

8
test/scripts/jenkins_security_solution_cypress.sh Normal file → Executable file
View file

@ -1,12 +1,6 @@
#!/usr/bin/env bash #!/usr/bin/env bash
source test/scripts/jenkins_test_setup.sh source test/scripts/jenkins_test_setup_xpack.sh
installDir="$PARENT_DIR/install/kibana"
destDir="${installDir}-${CI_WORKER_NUMBER}"
cp -R "$installDir" "$destDir"
export KIBANA_INSTALL_DIR="$destDir"
echo " -> Running security solution cypress tests" echo " -> Running security solution cypress tests"
cd "$XPACK_DIR" cd "$XPACK_DIR"

View file

@ -0,0 +1,32 @@
#!/usr/bin/env bash
set -e
CURRENT_DIR=$(pwd)
# Copy everything except node_modules into the current workspace
rsync -a ${WORKSPACE}/kibana/* . --exclude node_modules
rsync -a ${WORKSPACE}/kibana/.??* .
# Symlink all non-root, non-fixture node_modules into our new workspace
cd ${WORKSPACE}/kibana
find . -type d -name node_modules -not -path '*__fixtures__*' -not -path './node_modules*' -prune -print0 | xargs -0I % ln -s "${WORKSPACE}/kibana/%" "${CURRENT_DIR}/%"
find . -type d -wholename '*__fixtures__*node_modules' -not -path './node_modules*' -prune -print0 | xargs -0I % cp -R "${WORKSPACE}/kibana/%" "${CURRENT_DIR}/%"
cd "${CURRENT_DIR}"
# Symlink all of the individual root-level node_modules into the node_modules/ directory
mkdir -p node_modules
ln -s ${WORKSPACE}/kibana/node_modules/* node_modules/
ln -s ${WORKSPACE}/kibana/node_modules/.??* node_modules/
# Copy a few node_modules instead of symlinking them. They don't work correctly if symlinked
unlink node_modules/@kbn
unlink node_modules/css-loader
unlink node_modules/style-loader
# packages/kbn-optimizer/src/integration_tests/basic_optimization.test.ts will fail if this is a symlink
unlink node_modules/val-loader
cp -R ${WORKSPACE}/kibana/node_modules/@kbn node_modules/
cp -R ${WORKSPACE}/kibana/node_modules/css-loader node_modules/
cp -R ${WORKSPACE}/kibana/node_modules/style-loader node_modules/
cp -R ${WORKSPACE}/kibana/node_modules/val-loader node_modules/

6
test/scripts/jenkins_test_setup.sh Normal file → Executable file
View file

@ -14,3 +14,9 @@ trap 'post_work' EXIT
export TEST_BROWSER_HEADLESS=1 export TEST_BROWSER_HEADLESS=1
source src/dev/ci_setup/setup_env.sh source src/dev/ci_setup/setup_env.sh
# For parallel workspaces, we should copy the .es directory from the root, because it should already have downloaded snapshots in it
# This isn't part of jenkins_setup_parallel_workspace.sh just because not all tasks require ES
if [[ ! -d .es && -d "$WORKSPACE/kibana/.es" ]]; then
cp -R $WORKSPACE/kibana/.es ./
fi

15
test/scripts/jenkins_test_setup_oss.sh Normal file → Executable file
View file

@ -2,10 +2,17 @@
source test/scripts/jenkins_test_setup.sh source test/scripts/jenkins_test_setup.sh
if [[ -z "$CODE_COVERAGE" ]] ; then if [[ -z "$CODE_COVERAGE" ]]; then
installDir="$(realpath $PARENT_DIR/kibana/build/oss/kibana-*-SNAPSHOT-linux-x86_64)"
destDir=${installDir}-${CI_PARALLEL_PROCESS_NUMBER} destDir="build/kibana-build-oss"
cp -R "$installDir" "$destDir" if [[ ! "$TASK_QUEUE_PROCESS_ID" ]]; then
destDir="${destDir}-${CI_PARALLEL_PROCESS_NUMBER}"
fi
if [[ ! -d $destDir ]]; then
mkdir -p $destDir
cp -pR "$WORKSPACE/kibana-build-oss/." $destDir/
fi
export KIBANA_INSTALL_DIR="$destDir" export KIBANA_INSTALL_DIR="$destDir"
fi fi

15
test/scripts/jenkins_test_setup_xpack.sh Normal file → Executable file
View file

@ -3,11 +3,18 @@
source test/scripts/jenkins_test_setup.sh source test/scripts/jenkins_test_setup.sh
if [[ -z "$CODE_COVERAGE" ]]; then if [[ -z "$CODE_COVERAGE" ]]; then
installDir="$PARENT_DIR/install/kibana"
destDir="${installDir}-${CI_PARALLEL_PROCESS_NUMBER}"
cp -R "$installDir" "$destDir"
export KIBANA_INSTALL_DIR="$destDir" destDir="build/kibana-build-xpack"
if [[ ! "$TASK_QUEUE_PROCESS_ID" ]]; then
destDir="${destDir}-${CI_PARALLEL_PROCESS_NUMBER}"
fi
if [[ ! -d $destDir ]]; then
mkdir -p $destDir
cp -pR "$WORKSPACE/kibana-build-xpack/." $destDir/
fi
export KIBANA_INSTALL_DIR="$(realpath $destDir)"
cd "$XPACK_DIR" cd "$XPACK_DIR"
fi fi

View file

@ -5,5 +5,5 @@ source test/scripts/jenkins_test_setup_xpack.sh
checks-reporter-with-killswitch "X-Pack accessibility tests" \ checks-reporter-with-killswitch "X-Pack accessibility tests" \
node scripts/functional_tests \ node scripts/functional_tests \
--debug --bail \ --debug --bail \
--kibana-install-dir "$installDir" \ --kibana-install-dir "$KIBANA_INSTALL_DIR" \
--config test/accessibility/config.ts; --config test/accessibility/config.ts;

View file

@ -3,15 +3,9 @@
cd "$KIBANA_DIR" cd "$KIBANA_DIR"
source src/dev/ci_setup/setup_env.sh source src/dev/ci_setup/setup_env.sh
echo " -> building kibana platform plugins" if [[ ! "$TASK_QUEUE_PROCESS_ID" ]]; then
node scripts/build_kibana_platform_plugins \ ./test/scripts/jenkins_xpack_build_plugins.sh
--scan-dir "$KIBANA_DIR/test/plugin_functional/plugins" \ fi
--scan-dir "$XPACK_DIR/test/plugin_functional/plugins" \
--scan-dir "$XPACK_DIR/test/functional_with_es_ssl/fixtures/plugins" \
--scan-dir "$XPACK_DIR/test/alerting_api_integration/plugins" \
--scan-dir "$XPACK_DIR/test/plugin_api_integration/plugins" \
--scan-dir "$XPACK_DIR/test/plugin_api_perf/plugins" \
--verbose;
# doesn't persist, also set in kibanaPipeline.groovy # doesn't persist, also set in kibanaPipeline.groovy
export KBN_NP_PLUGINS_BUILT=true export KBN_NP_PLUGINS_BUILT=true
@ -34,6 +28,9 @@ echo " -> building and extracting default Kibana distributable for use in functi
cd "$KIBANA_DIR" cd "$KIBANA_DIR"
node scripts/build --debug --no-oss node scripts/build --debug --no-oss
linuxBuild="$(find "$KIBANA_DIR/target" -name 'kibana-*-linux-x86_64.tar.gz')" linuxBuild="$(find "$KIBANA_DIR/target" -name 'kibana-*-linux-x86_64.tar.gz')"
installDir="$PARENT_DIR/install/kibana" installDir="$KIBANA_DIR/install/kibana"
mkdir -p "$installDir" mkdir -p "$installDir"
tar -xzf "$linuxBuild" -C "$installDir" --strip=1 tar -xzf "$linuxBuild" -C "$installDir" --strip=1
mkdir -p "$WORKSPACE/kibana-build-xpack"
cp -pR install/kibana/. $WORKSPACE/kibana-build-xpack/

View file

@ -0,0 +1,14 @@
#!/usr/bin/env bash
source src/dev/ci_setup/setup_env.sh
echo " -> building kibana platform plugins"
node scripts/build_kibana_platform_plugins \
--scan-dir "$KIBANA_DIR/test/plugin_functional/plugins" \
--scan-dir "$XPACK_DIR/test/plugin_functional/plugins" \
--scan-dir "$XPACK_DIR/test/functional_with_es_ssl/fixtures/plugins" \
--scan-dir "$XPACK_DIR/test/alerting_api_integration/plugins" \
--scan-dir "$XPACK_DIR/test/plugin_api_integration/plugins" \
--scan-dir "$XPACK_DIR/test/plugin_api_perf/plugins" \
--workers 12 \
--verbose

View file

@ -5,5 +5,5 @@ source test/scripts/jenkins_test_setup_xpack.sh
checks-reporter-with-killswitch "Capture Kibana Saved Objects field count metrics" \ checks-reporter-with-killswitch "Capture Kibana Saved Objects field count metrics" \
node scripts/functional_tests \ node scripts/functional_tests \
--debug --bail \ --debug --bail \
--kibana-install-dir "$installDir" \ --kibana-install-dir "$KIBANA_INSTALL_DIR" \
--config test/saved_objects_field_count/config.ts; --config test/saved_objects_field_count/config.ts;

View file

@ -7,19 +7,19 @@ echo " -> building and extracting default Kibana distributable"
cd "$KIBANA_DIR" cd "$KIBANA_DIR"
node scripts/build --debug --no-oss node scripts/build --debug --no-oss
linuxBuild="$(find "$KIBANA_DIR/target" -name 'kibana-*-linux-x86_64.tar.gz')" linuxBuild="$(find "$KIBANA_DIR/target" -name 'kibana-*-linux-x86_64.tar.gz')"
installDir="$PARENT_DIR/install/kibana" installDir="$KIBANA_DIR/install/kibana"
mkdir -p "$installDir" mkdir -p "$installDir"
tar -xzf "$linuxBuild" -C "$installDir" --strip=1 tar -xzf "$linuxBuild" -C "$installDir" --strip=1
mkdir -p "$WORKSPACE/kibana-build-xpack"
cp -pR install/kibana/. $WORKSPACE/kibana-build-xpack/
# cd "$KIBANA_DIR" # cd "$KIBANA_DIR"
# source "test/scripts/jenkins_xpack_page_load_metrics.sh" # source "test/scripts/jenkins_xpack_page_load_metrics.sh"
cd "$KIBANA_DIR" cd "$KIBANA_DIR"
source "test/scripts/jenkins_xpack_saved_objects_field_metrics.sh" source "test/scripts/jenkins_xpack_saved_objects_field_metrics.sh"
cd "$KIBANA_DIR"
source "test/scripts/jenkins_xpack_saved_objects_field_metrics.sh"
echo " -> running visual regression tests from x-pack directory" echo " -> running visual regression tests from x-pack directory"
cd "$XPACK_DIR" cd "$XPACK_DIR"
yarn percy exec -t 10000 -- -- \ yarn percy exec -t 10000 -- -- \

5
test/scripts/lint/eslint.sh Executable file
View file

@ -0,0 +1,5 @@
#!/usr/bin/env bash
source src/dev/ci_setup/setup_env.sh
yarn run grunt run:eslint

5
test/scripts/lint/sasslint.sh Executable file
View file

@ -0,0 +1,5 @@
#!/usr/bin/env bash
source src/dev/ci_setup/setup_env.sh
yarn run grunt run:sasslint

View file

@ -0,0 +1,5 @@
#!/usr/bin/env bash
source src/dev/ci_setup/setup_env.sh
yarn run grunt run:apiIntegrationTests

View file

@ -0,0 +1,5 @@
#!/usr/bin/env bash
source src/dev/ci_setup/setup_env.sh
yarn run grunt run:test_jest_integration

5
test/scripts/test/jest_unit.sh Executable file
View file

@ -0,0 +1,5 @@
#!/usr/bin/env bash
source src/dev/ci_setup/setup_env.sh
yarn run grunt run:test_jest

5
test/scripts/test/karma_ci.sh Executable file
View file

@ -0,0 +1,5 @@
#!/usr/bin/env bash
source src/dev/ci_setup/setup_env.sh
yarn run grunt run:test_karma_ci

5
test/scripts/test/mocha.sh Executable file
View file

@ -0,0 +1,5 @@
#!/usr/bin/env bash
source src/dev/ci_setup/setup_env.sh
yarn run grunt run:mocha

View file

@ -0,0 +1,5 @@
#!/usr/bin/env bash
source src/dev/ci_setup/setup_env.sh
yarn run grunt run:test_package_safer_lodash_set

View file

@ -0,0 +1,6 @@
#!/usr/bin/env bash
source src/dev/ci_setup/setup_env.sh
cd x-pack
checks-reporter-with-killswitch "X-Pack Jest" node --max-old-space-size=6144 scripts/jest --ci --verbose --maxWorkers=10

View file

@ -0,0 +1,6 @@
#!/usr/bin/env bash
source src/dev/ci_setup/setup_env.sh
cd x-pack
checks-reporter-with-killswitch "X-Pack Karma Tests" yarn test:karma

View file

@ -0,0 +1,6 @@
#!/usr/bin/env bash
source src/dev/ci_setup/setup_env.sh
cd x-pack
checks-reporter-with-killswitch "X-Pack List cyclic dependency test" node plugins/lists/scripts/check_circular_deps

View file

@ -0,0 +1,6 @@
#!/usr/bin/env bash
source src/dev/ci_setup/setup_env.sh
cd x-pack
checks-reporter-with-killswitch "X-Pack SIEM cyclic dependency test" node plugins/security_solution/scripts/check_circular_deps

View file

@ -1,8 +1,15 @@
// Basically, this is a shortcut for catchError(catchInterruptions: false) {} // Basically, this is a shortcut for catchError(catchInterruptions: false) {}
// By default, catchError will swallow aborts/timeouts, which we almost never want // By default, catchError will swallow aborts/timeouts, which we almost never want
// Also, by wrapping it in an additional try/catch, we cut down on spam in Pipeline Steps
def call(Map params = [:], Closure closure) { def call(Map params = [:], Closure closure) {
try {
closure()
} catch (ex) {
params.catchInterruptions = false params.catchInterruptions = false
return catchError(params, closure) catchError(params) {
throw ex
}
}
} }
return this return this

View file

@ -2,18 +2,61 @@ def withPostBuildReporting(Closure closure) {
try { try {
closure() closure()
} finally { } finally {
catchErrors { def parallelWorkspaces = []
runErrorReporter() try {
parallelWorkspaces = getParallelWorkspaces()
} catch(ex) {
print ex
} }
catchErrors { catchErrors {
runbld.junit() runErrorReporter([pwd()] + parallelWorkspaces)
} }
catchErrors { catchErrors {
publishJunit() publishJunit()
} }
catchErrors {
def parallelWorkspace = "${env.WORKSPACE}/parallel"
if (fileExists(parallelWorkspace)) {
dir(parallelWorkspace) {
def workspaceTasks = [:]
parallelWorkspaces.each { workspaceDir ->
workspaceTasks[workspaceDir] = {
dir(workspaceDir) {
catchErrors {
runbld.junit()
} }
}
}
}
if (workspaceTasks) {
parallel(workspaceTasks)
}
}
}
}
}
}
def getParallelWorkspaces() {
def workspaces = []
def parallelWorkspace = "${env.WORKSPACE}/parallel"
if (fileExists(parallelWorkspace)) {
dir(parallelWorkspace) {
// findFiles only returns files if you use glob, so look for a file that should be in every valid workspace
workspaces = findFiles(glob: '*/kibana/package.json')
.collect {
// get the paths to the kibana directories for the parallel workspaces
return parallelWorkspace + '/' + it.path.tokenize('/').dropRight(1).join('/')
}
}
}
return workspaces
} }
def notifyOnError(Closure closure) { def notifyOnError(Closure closure) {
@ -35,38 +78,45 @@ def notifyOnError(Closure closure) {
} }
} }
def functionalTestProcess(String name, Closure closure) { def withFunctionalTestEnv(List additionalEnvs = [], Closure closure) {
return { processNumber -> // This can go away once everything that uses the deprecated workers.parallelProcesses() is moved to task queue
def kibanaPort = "61${processNumber}1" def parallelId = env.TASK_QUEUE_PROCESS_ID ?: env.CI_PARALLEL_PROCESS_NUMBER
def esPort = "61${processNumber}2"
def esTransportPort = "61${processNumber}3" def kibanaPort = "61${parallelId}1"
def ingestManagementPackageRegistryPort = "61${processNumber}4" def esPort = "61${parallelId}2"
def esTransportPort = "61${parallelId}3"
def ingestManagementPackageRegistryPort = "61${parallelId}4"
withEnv([ withEnv([
"CI_PARALLEL_PROCESS_NUMBER=${processNumber}", "CI_GROUP=${parallelId}",
"REMOVE_KIBANA_INSTALL_DIR=1",
"CI_PARALLEL_PROCESS_NUMBER=${parallelId}",
"TEST_KIBANA_HOST=localhost", "TEST_KIBANA_HOST=localhost",
"TEST_KIBANA_PORT=${kibanaPort}", "TEST_KIBANA_PORT=${kibanaPort}",
"TEST_KIBANA_URL=http://elastic:changeme@localhost:${kibanaPort}", "TEST_KIBANA_URL=http://elastic:changeme@localhost:${kibanaPort}",
"TEST_ES_URL=http://elastic:changeme@localhost:${esPort}", "TEST_ES_URL=http://elastic:changeme@localhost:${esPort}",
"TEST_ES_TRANSPORT_PORT=${esTransportPort}", "TEST_ES_TRANSPORT_PORT=${esTransportPort}",
"INGEST_MANAGEMENT_PACKAGE_REGISTRY_PORT=${ingestManagementPackageRegistryPort}",
"IS_PIPELINE_JOB=1",
"JOB=${name}",
"KBN_NP_PLUGINS_BUILT=true", "KBN_NP_PLUGINS_BUILT=true",
]) { "INGEST_MANAGEMENT_PACKAGE_REGISTRY_PORT=${ingestManagementPackageRegistryPort}",
notifyOnError { ] + additionalEnvs) {
closure() closure()
} }
} }
def functionalTestProcess(String name, Closure closure) {
return {
withFunctionalTestEnv(["JOB=${name}"], closure)
} }
} }
def functionalTestProcess(String name, String script) { def functionalTestProcess(String name, String script) {
return functionalTestProcess(name) { return functionalTestProcess(name) {
notifyOnError {
retryable(name) { retryable(name) {
runbld(script, "Execute ${name}") runbld(script, "Execute ${name}")
} }
} }
}
} }
def ossCiGroupProcess(ciGroup) { def ossCiGroupProcess(ciGroup) {
@ -110,11 +160,17 @@ def withGcsArtifactUpload(workerName, closure) {
def ARTIFACT_PATTERNS = [ def ARTIFACT_PATTERNS = [
'**/target/public/.kbn-optimizer-cache', '**/target/public/.kbn-optimizer-cache',
'target/kibana-*', 'target/kibana-*',
'target/test-metrics/*',
'target/kibana-security-solution/**/*.png', 'target/kibana-security-solution/**/*.png',
'target/junit/**/*', 'target/junit/**/*',
'test/**/screenshots/**/*.png', 'target/test-suites-ci-plan.json',
'test/**/screenshots/session/*.png',
'test/**/screenshots/failure/*.png',
'test/**/screenshots/diff/*.png',
'test/functional/failure_debug/html/*.html', 'test/functional/failure_debug/html/*.html',
'x-pack/test/**/screenshots/**/*.png', 'x-pack/test/**/screenshots/session/*.png',
'x-pack/test/**/screenshots/failure/*.png',
'x-pack/test/**/screenshots/diff/*.png',
'x-pack/test/functional/failure_debug/html/*.html', 'x-pack/test/functional/failure_debug/html/*.html',
'x-pack/test/functional/apps/reporting/reports/session/*.pdf', 'x-pack/test/functional/apps/reporting/reports/session/*.pdf',
] ]
@ -129,6 +185,12 @@ def withGcsArtifactUpload(workerName, closure) {
ARTIFACT_PATTERNS.each { pattern -> ARTIFACT_PATTERNS.each { pattern ->
uploadGcsArtifact(uploadPrefix, pattern) uploadGcsArtifact(uploadPrefix, pattern)
} }
dir(env.WORKSPACE) {
ARTIFACT_PATTERNS.each { pattern ->
uploadGcsArtifact(uploadPrefix, "parallel/*/kibana/${pattern}")
}
}
} }
} }
}) })
@ -136,6 +198,10 @@ def withGcsArtifactUpload(workerName, closure) {
def publishJunit() { def publishJunit() {
junit(testResults: 'target/junit/**/*.xml', allowEmptyResults: true, keepLongStdio: true) junit(testResults: 'target/junit/**/*.xml', allowEmptyResults: true, keepLongStdio: true)
dir(env.WORKSPACE) {
junit(testResults: 'parallel/*/kibana/target/junit/**/*.xml', allowEmptyResults: true, keepLongStdio: true)
}
} }
def sendMail() { def sendMail() {
@ -201,26 +267,36 @@ def doSetup() {
} }
} }
def buildOss() { def buildOss(maxWorkers = '') {
notifyOnError { notifyOnError {
withEnv(["KBN_OPTIMIZER_MAX_WORKERS=${maxWorkers}"]) {
runbld("./test/scripts/jenkins_build_kibana.sh", "Build OSS/Default Kibana") runbld("./test/scripts/jenkins_build_kibana.sh", "Build OSS/Default Kibana")
} }
}
} }
def buildXpack() { def buildXpack(maxWorkers = '') {
notifyOnError { notifyOnError {
withEnv(["KBN_OPTIMIZER_MAX_WORKERS=${maxWorkers}"]) {
runbld("./test/scripts/jenkins_xpack_build_kibana.sh", "Build X-Pack Kibana") runbld("./test/scripts/jenkins_xpack_build_kibana.sh", "Build X-Pack Kibana")
} }
}
} }
def runErrorReporter() { def runErrorReporter() {
return runErrorReporter([pwd()])
}
def runErrorReporter(workspaces) {
def status = buildUtils.getBuildStatus() def status = buildUtils.getBuildStatus()
def dryRun = status != "ABORTED" ? "" : "--no-github-update" def dryRun = status != "ABORTED" ? "" : "--no-github-update"
def globs = workspaces.collect { "'${it}/target/junit/**/*.xml'" }.join(" ")
bash( bash(
""" """
source src/dev/ci_setup/setup_env.sh source src/dev/ci_setup/setup_env.sh
node scripts/report_failed_tests ${dryRun} target/junit/**/*.xml node scripts/report_failed_tests ${dryRun} ${globs}
""", """,
"Report failed tests, if necessary" "Report failed tests, if necessary"
) )
@ -259,6 +335,102 @@ def call(Map params = [:], Closure closure) {
} }
} }
// Creates a task queue using withTaskQueue, and copies the bootstrapped kibana repo into each process's workspace
// Note that node_modules are mostly symlinked to save time/space. See test/scripts/jenkins_setup_parallel_workspace.sh
def withCiTaskQueue(Map options = [:], Closure closure) {
def setupClosure = {
// This can't use runbld, because it expects the source to be there, which isn't yet
bash("${env.WORKSPACE}/kibana/test/scripts/jenkins_setup_parallel_workspace.sh", "Set up duplicate workspace for parallel process")
}
def config = [parallel: 24, setup: setupClosure] + options
withTaskQueue(config) {
closure.call()
}
}
def scriptTask(description, script) {
return {
withFunctionalTestEnv {
notifyOnError {
runbld(script, description)
}
}
}
}
def scriptTaskDocker(description, script) {
return {
withDocker(scriptTask(description, script))
}
}
def buildDocker() {
sh(
script: """
cp /usr/local/bin/runbld .ci/
cp /usr/local/bin/bash_standard_lib.sh .ci/
cd .ci
docker build -t kibana-ci -f ./Dockerfile .
""",
label: 'Build CI Docker image'
)
}
def withDocker(Closure closure) {
docker
.image('kibana-ci')
.inside(
"-v /etc/runbld:/etc/runbld:ro -v '${env.JENKINS_HOME}:${env.JENKINS_HOME}' -v '/dev/shm/workspace:/dev/shm/workspace' --shm-size 2GB --cpus 4",
closure
)
}
def buildOssPlugins() {
runbld('./test/scripts/jenkins_build_plugins.sh', 'Build OSS Plugins')
}
def buildXpackPlugins() {
runbld('./test/scripts/jenkins_xpack_build_plugins.sh', 'Build X-Pack Plugins')
}
def withTasks(Map params = [worker: [:]], Closure closure) {
catchErrors {
def config = [name: 'ci-worker', size: 'xxl', ramDisk: true] + (params.worker ?: [:])
workers.ci(config) {
withCiTaskQueue(parallel: 24) {
parallel([
docker: {
retry(2) {
buildDocker()
}
},
// There are integration tests etc that require the plugins to be built first, so let's go ahead and build them before set up the parallel workspaces
ossPlugins: { buildOssPlugins() },
xpackPlugins: { buildXpackPlugins() },
])
catchErrors {
closure()
}
}
}
}
}
def allCiTasks() {
withTasks {
tasks.check()
tasks.lint()
tasks.test()
tasks.functionalOss()
tasks.functionalXpack()
}
}
def pipelineLibraryTests() { def pipelineLibraryTests() {
whenChanged(['vars/', '.ci/pipeline-library/']) { whenChanged(['vars/', '.ci/pipeline-library/']) {
workers.base(size: 'flyweight', bootstrapped: false, ramDisk: false) { workers.base(size: 'flyweight', bootstrapped: false, ramDisk: false) {
@ -269,5 +441,4 @@ def pipelineLibraryTests() {
} }
} }
return this return this

5
vars/task.groovy Normal file
View file

@ -0,0 +1,5 @@
def call(Closure closure) {
withTaskQueue.addTask(closure)
}
return this

119
vars/tasks.groovy Normal file
View file

@ -0,0 +1,119 @@
def call(List<Closure> closures) {
withTaskQueue.addTasks(closures)
}
def check() {
tasks([
kibanaPipeline.scriptTask('Check Telemetry Schema', 'test/scripts/checks/telemetry.sh'),
kibanaPipeline.scriptTask('Check TypeScript Projects', 'test/scripts/checks/ts_projects.sh'),
kibanaPipeline.scriptTask('Check Doc API Changes', 'test/scripts/checks/doc_api_changes.sh'),
kibanaPipeline.scriptTask('Check Types', 'test/scripts/checks/type_check.sh'),
kibanaPipeline.scriptTask('Check i18n', 'test/scripts/checks/i18n.sh'),
kibanaPipeline.scriptTask('Check File Casing', 'test/scripts/checks/file_casing.sh'),
kibanaPipeline.scriptTask('Check Lockfile Symlinks', 'test/scripts/checks/lock_file_symlinks.sh'),
kibanaPipeline.scriptTask('Check Licenses', 'test/scripts/checks/licenses.sh'),
kibanaPipeline.scriptTask('Verify Dependency Versions', 'test/scripts/checks/verify_dependency_versions.sh'),
kibanaPipeline.scriptTask('Verify NOTICE', 'test/scripts/checks/verify_notice.sh'),
kibanaPipeline.scriptTask('Test Projects', 'test/scripts/checks/test_projects.sh'),
kibanaPipeline.scriptTask('Test Hardening', 'test/scripts/checks/test_hardening.sh'),
])
}
def lint() {
tasks([
kibanaPipeline.scriptTask('Lint: eslint', 'test/scripts/lint/eslint.sh'),
kibanaPipeline.scriptTask('Lint: sasslint', 'test/scripts/lint/sasslint.sh'),
])
}
def test() {
tasks([
// These 2 tasks require isolation because of hard-coded, conflicting ports and such, so let's use Docker here
kibanaPipeline.scriptTaskDocker('Jest Integration Tests', 'test/scripts/test/jest_integration.sh'),
kibanaPipeline.scriptTaskDocker('Mocha Tests', 'test/scripts/test/mocha.sh'),
kibanaPipeline.scriptTask('Jest Unit Tests', 'test/scripts/test/jest_unit.sh'),
kibanaPipeline.scriptTask('API Integration Tests', 'test/scripts/test/api_integration.sh'),
kibanaPipeline.scriptTask('@elastic/safer-lodash-set Tests', 'test/scripts/test/safer_lodash_set.sh'),
kibanaPipeline.scriptTask('X-Pack SIEM cyclic dependency', 'test/scripts/test/xpack_siem_cyclic_dependency.sh'),
kibanaPipeline.scriptTask('X-Pack List cyclic dependency', 'test/scripts/test/xpack_list_cyclic_dependency.sh'),
kibanaPipeline.scriptTask('X-Pack Jest Unit Tests', 'test/scripts/test/xpack_jest_unit.sh'),
])
}
def functionalOss(Map params = [:]) {
def config = params ?: [ciGroups: true, firefox: true, accessibility: true, pluginFunctional: true, visualRegression: false]
task {
kibanaPipeline.buildOss(6)
if (config.ciGroups) {
def ciGroups = 1..12
tasks(ciGroups.collect { kibanaPipeline.ossCiGroupProcess(it) })
}
if (config.firefox) {
task(kibanaPipeline.functionalTestProcess('oss-firefox', './test/scripts/jenkins_firefox_smoke.sh'))
}
if (config.accessibility) {
task(kibanaPipeline.functionalTestProcess('oss-accessibility', './test/scripts/jenkins_accessibility.sh'))
}
if (config.pluginFunctional) {
task(kibanaPipeline.functionalTestProcess('oss-pluginFunctional', './test/scripts/jenkins_plugin_functional.sh'))
}
if (config.visualRegression) {
task(kibanaPipeline.functionalTestProcess('oss-visualRegression', './test/scripts/jenkins_visual_regression.sh'))
}
}
}
def functionalXpack(Map params = [:]) {
def config = params ?: [
ciGroups: true,
firefox: true,
accessibility: true,
pluginFunctional: true,
savedObjectsFieldMetrics:true,
pageLoadMetrics: false,
visualRegression: false,
]
task {
kibanaPipeline.buildXpack(10)
if (config.ciGroups) {
def ciGroups = 1..10
tasks(ciGroups.collect { kibanaPipeline.xpackCiGroupProcess(it) })
}
if (config.firefox) {
task(kibanaPipeline.functionalTestProcess('xpack-firefox', './test/scripts/jenkins_xpack_firefox_smoke.sh'))
}
if (config.accessibility) {
task(kibanaPipeline.functionalTestProcess('xpack-accessibility', './test/scripts/jenkins_xpack_accessibility.sh'))
}
if (config.visualRegression) {
task(kibanaPipeline.functionalTestProcess('xpack-visualRegression', './test/scripts/jenkins_xpack_visual_regression.sh'))
}
if (config.savedObjectsFieldMetrics) {
task(kibanaPipeline.functionalTestProcess('xpack-savedObjectsFieldMetrics', './test/scripts/jenkins_xpack_saved_objects_field_metrics.sh'))
}
whenChanged([
'x-pack/plugins/security_solution/',
'x-pack/test/security_solution_cypress/',
'x-pack/plugins/triggers_actions_ui/public/application/sections/action_connector_form/',
'x-pack/plugins/triggers_actions_ui/public/application/context/actions_connectors_context.tsx',
]) {
task(kibanaPipeline.functionalTestProcess('xpack-securitySolutionCypress', './test/scripts/jenkins_security_solution_cypress.sh'))
}
}
}
return this

154
vars/withTaskQueue.groovy Normal file
View file

@ -0,0 +1,154 @@
import groovy.transform.Field
public static @Field TASK_QUEUES = [:]
public static @Field TASK_QUEUES_COUNTER = 0
/**
withTaskQueue creates a queue of "tasks" (just plain closures to execute), and executes them with your desired level of concurrency.
This way, you can define, for example, 40 things that need to execute, then only allow 10 of them to execute at once.
Each "process" will execute in a separate, unique, empty directory.
If you want each process to have a bootstrapped kibana repo, check out kibanaPipeline.withCiTaskQueue
Using the queue currently requires an agent/worker.
Usage:
withTaskQueue(parallel: 10) {
task { print "This is a task" }
// This is the same as calling task() multiple times
tasks([ { print "Another task" }, { print "And another task" } ])
// Tasks can queue up subsequent tasks
task {
buildThing()
task { print "I depend on buildThing()" }
}
}
You can also define a setup task that each process should execute one time before executing tasks:
withTaskQueue(parallel: 10, setup: { sh "my-setup-scrupt.sh" }) {
...
}
*/
def call(Map options = [:], Closure closure) {
def config = [ parallel: 10 ] + options
def counter = ++TASK_QUEUES_COUNTER
// We're basically abusing withEnv() to create a "scope" for all steps inside of a withTaskQueue block
// This way, we could have multiple task queue instances in the same pipeline
withEnv(["TASK_QUEUE_ID=${counter}"]) {
withTaskQueue.TASK_QUEUES[env.TASK_QUEUE_ID] = [
tasks: [],
tmpFile: sh(script: 'mktemp', returnStdout: true).trim()
]
closure.call()
def processesExecuting = 0
def processes = [:]
def iterationId = 0
for(def i = 1; i <= config.parallel; i++) {
def j = i
processes["task-queue-process-${j}"] = {
catchErrors {
withEnv([
"TASK_QUEUE_PROCESS_ID=${j}",
"TASK_QUEUE_ITERATION_ID=${++iterationId}"
]) {
dir("${WORKSPACE}/parallel/${j}/kibana") {
if (config.setup) {
config.setup.call(j)
}
def isDone = false
while(!isDone) { // TODO some kind of timeout?
catchErrors {
if (!getTasks().isEmpty()) {
processesExecuting++
catchErrors {
def task
try {
task = getTasks().pop()
} catch (java.util.NoSuchElementException ex) {
return
}
task.call()
}
processesExecuting--
// If a task finishes, and no new tasks were queued up, and nothing else is executing
// Then all of the processes should wake up and exit
if (processesExecuting < 1 && getTasks().isEmpty()) {
taskNotify()
}
return
}
if (processesExecuting > 0) {
taskSleep()
return
}
// Queue is empty, no processes are executing
isDone = true
}
}
}
}
}
}
}
parallel(processes)
}
}
// If we sleep in a loop using Groovy code, Pipeline Steps is flooded with Sleep steps
// So, instead, we just watch a file and `touch` it whenever something happens that could modify the queue
// There's a 20 minute timeout just in case something goes wrong,
// in which case this method will get called again if the process is actually supposed to be waiting.
def taskSleep() {
sh(script: """#!/bin/bash
TIMESTAMP=\$(date '+%s' -d "0 seconds ago")
for (( i=1; i<=240; i++ ))
do
if [ "\$(stat -c %Y '${getTmpFile()}')" -ge "\$TIMESTAMP" ]
then
break
else
sleep 5
if [[ \$i == 240 ]]; then
echo "Waited for new tasks for 20 minutes, exiting in case something went wrong"
fi
fi
done
""", label: "Waiting for new tasks...")
}
// Used to let the task queue processes know that either a new task has been queued up, or work is complete
def taskNotify() {
sh "touch '${getTmpFile()}'"
}
def getTasks() {
return withTaskQueue.TASK_QUEUES[env.TASK_QUEUE_ID].tasks
}
def getTmpFile() {
return withTaskQueue.TASK_QUEUES[env.TASK_QUEUE_ID].tmpFile
}
def addTask(Closure closure) {
getTasks() << closure
taskNotify()
}
def addTasks(List<Closure> closures) {
closures.reverse().each {
getTasks() << it
}
taskNotify()
}

View file

@ -13,6 +13,8 @@ def label(size) {
return 'docker && tests-l' return 'docker && tests-l'
case 'xl': case 'xl':
return 'docker && tests-xl' return 'docker && tests-xl'
case 'xl-highmem':
return 'docker && tests-xl-highmem'
case 'xxl': case 'xxl':
return 'docker && tests-xxl' return 'docker && tests-xxl'
} }
@ -55,6 +57,11 @@ def base(Map params, Closure closure) {
} }
} }
sh(
script: "mkdir -p ${env.WORKSPACE}/tmp",
label: "Create custom temp directory"
)
def checkoutInfo = [:] def checkoutInfo = [:]
if (config.scm) { if (config.scm) {
@ -89,6 +96,7 @@ def base(Map params, Closure closure) {
"PR_AUTHOR=${env.ghprbPullAuthorLogin ?: ''}", "PR_AUTHOR=${env.ghprbPullAuthorLogin ?: ''}",
"TEST_BROWSER_HEADLESS=1", "TEST_BROWSER_HEADLESS=1",
"GIT_BRANCH=${checkoutInfo.branch}", "GIT_BRANCH=${checkoutInfo.branch}",
"TMPDIR=${env.WORKSPACE}/tmp", // For Chrome and anything else that respects it
]) { ]) {
withCredentials([ withCredentials([
string(credentialsId: 'vault-addr', variable: 'VAULT_ADDR'), string(credentialsId: 'vault-addr', variable: 'VAULT_ADDR'),
@ -169,7 +177,9 @@ def parallelProcesses(Map params) {
sleep(delay) sleep(delay)
} }
processClosure(processNumber) withEnv(["CI_PARALLEL_PROCESS_NUMBER=${processNumber}"]) {
processClosure()
}
} }
} }

View file

@ -4,6 +4,7 @@
* you may not use this file except in compliance with the Elastic License. * you may not use this file except in compliance with the Elastic License.
*/ */
import fs from 'fs';
import { ReactChildren } from 'react'; import { ReactChildren } from 'react';
import path from 'path'; import path from 'path';
import moment from 'moment'; import moment from 'moment';
@ -94,6 +95,12 @@ jest.mock('../shareable_runtime/components/rendered_element');
// @ts-expect-error // @ts-expect-error
RenderedElement.mockImplementation(() => 'RenderedElement'); RenderedElement.mockImplementation(() => 'RenderedElement');
// Some of the code requires that this directory exists, but the tests don't actually require any css to be present
const cssDir = path.resolve(__dirname, '../../../../built_assets/css');
if (!fs.existsSync(cssDir)) {
fs.mkdirSync(cssDir, { recursive: true });
}
addSerializer(styleSheetSerializer); addSerializer(styleSheetSerializer);
// Initialize Storyshots and build the Jest Snapshots // Initialize Storyshots and build the Jest Snapshots