[7.x] [src/dev/build] typescript-ify and convert tests to jest (#72525) (#73108)

Co-authored-by: spalger <spalger@users.noreply.github.com>

Co-authored-by: spalger <spalger@users.noreply.github.com>
This commit is contained in:
Spencer 2020-07-23 12:41:11 -07:00 committed by GitHub
parent 26d42f216a
commit 64ee94c2c0
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
103 changed files with 3011 additions and 2596 deletions

View file

@ -312,6 +312,7 @@
"@types/accept": "3.1.1",
"@types/angular": "^1.6.56",
"@types/angular-mocks": "^1.7.0",
"@types/archiver": "^3.1.0",
"@types/babel__core": "^7.1.2",
"@types/bluebird": "^3.1.1",
"@types/boom": "^7.2.0",
@ -393,6 +394,7 @@
"@types/testing-library__react-hooks": "^3.1.0",
"@types/type-detect": "^4.0.1",
"@types/uuid": "^3.4.4",
"@types/vinyl": "^2.0.4",
"@types/vinyl-fs": "^2.4.11",
"@types/zen-observable": "^0.8.0",
"@typescript-eslint/eslint-plugin": "^2.34.0",
@ -469,6 +471,7 @@
"license-checker": "^16.0.0",
"listr": "^0.14.1",
"load-grunt-config": "^3.0.1",
"load-json-file": "^6.2.0",
"mocha": "^7.1.1",
"mock-fs": "^4.12.0",
"mock-http-server": "1.3.0",

View file

@ -20,6 +20,7 @@
"normalize-path": "^3.0.0",
"moment": "^2.24.0",
"rxjs": "^6.5.5",
"strip-ansi": "^6.0.0",
"tree-kill": "^1.2.2",
"tslib": "^2.0.0"
},

View file

@ -19,7 +19,7 @@
export { withProcRunner, ProcRunner } from './proc_runner';
export * from './tooling_log';
export { createAbsolutePathSerializer } from './serializers';
export * from './serializers';
export {
CA_CERT_PATH,
ES_KEY_PATH,

View file

@ -21,7 +21,7 @@ import { REPO_ROOT } from '../repo_root';
export function createAbsolutePathSerializer(rootPath: string = REPO_ROOT) {
return {
serialize: (value: string) => value.replace(rootPath, '<absolute path>').replace(/\\/g, '/'),
test: (value: any) => typeof value === 'string' && value.startsWith(rootPath),
serialize: (value: string) => value.replace(rootPath, '<absolute path>').replace(/\\/g, '/'),
};
}

View file

@ -0,0 +1,25 @@
/*
* Licensed to Elasticsearch B.V. under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch B.V. licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
export function createAnyInstanceSerializer(Class: Function, name?: string) {
return {
test: (v: any) => v instanceof Class,
serialize: () => `<${name ?? Class.name}>`,
};
}

View file

@ -17,4 +17,7 @@
* under the License.
*/
export { createAbsolutePathSerializer } from './absolute_path_serializer';
export * from './absolute_path_serializer';
export * from './strip_ansi_serializer';
export * from './recursive_serializer';
export * from './any_instance_serizlizer';

View file

@ -0,0 +1,29 @@
/*
* Licensed to Elasticsearch B.V. under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch B.V. licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
export function createRecursiveSerializer(test: (v: any) => boolean, print: (v: any) => string) {
return {
test: (v: any) => test(v),
serialize: (v: any, ...rest: any[]) => {
const replacement = print(v);
const printer = rest.pop()!;
return printer(replacement, ...rest);
},
};
}

View file

@ -0,0 +1,29 @@
/*
* Licensed to Elasticsearch B.V. under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch B.V. licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
import stripAnsi from 'strip-ansi';
import { createRecursiveSerializer } from './recursive_serializer';
export function createStripAnsiSerializer() {
return createRecursiveSerializer(
(v) => typeof v === 'string' && stripAnsi(v) !== v,
(v) => stripAnsi(v)
);
}

View file

@ -17,4 +17,4 @@
* under the License.
*/
export * from './run';
export * from './src/index';

View file

@ -1,6 +1,7 @@
{
"extends": "../../tsconfig.json",
"include": [
"./index.d.ts",
"./src/**/*.ts",
"./dist/*.d.ts",
],

View file

@ -17,160 +17,158 @@
* under the License.
*/
import { ToolingLog } from '@kbn/dev-utils';
import { ToolingLog, createAnyInstanceSerializer } from '@kbn/dev-utils';
import { readCliArgs } from './args';
const fn = (...subArgs: string[]) => {
const result = readCliArgs(['node', 'scripts/build', ...subArgs]);
(result as any).log = result.log instanceof ToolingLog ? '<ToolingLog>' : String(result.log);
return result;
};
expect.addSnapshotSerializer(createAnyInstanceSerializer(ToolingLog));
it('renders help if `--help` passed', () => {
expect(fn('--help')).toMatchInlineSnapshot(`
Object {
"log": "undefined",
"showHelp": true,
"unknownFlags": Array [],
}
`);
expect(readCliArgs(['node', 'scripts/build', '--help'])).toMatchInlineSnapshot(`
Object {
"log": <ToolingLog>,
"showHelp": true,
"unknownFlags": Array [],
}
`);
});
it('build default and oss dist for current platform, without packages, by default', () => {
expect(fn()).toMatchInlineSnapshot(`
Object {
"buildArgs": Object {
"buildDefaultDist": true,
"buildOssDist": true,
"createArchives": true,
"createDebPackage": false,
"createDockerPackage": false,
"createDockerUbiPackage": false,
"createRpmPackage": false,
"downloadFreshNode": true,
"isRelease": false,
"targetAllPlatforms": false,
"versionQualifier": "",
},
"log": "<ToolingLog>",
"showHelp": false,
"unknownFlags": Array [],
}
`);
expect(readCliArgs(['node', 'scripts/build'])).toMatchInlineSnapshot(`
Object {
"buildOptions": Object {
"buildDefaultDist": true,
"buildOssDist": true,
"createArchives": true,
"createDebPackage": false,
"createDockerPackage": false,
"createDockerUbiPackage": false,
"createRpmPackage": false,
"downloadFreshNode": true,
"isRelease": false,
"targetAllPlatforms": false,
"versionQualifier": "",
},
"log": <ToolingLog>,
"showHelp": false,
"unknownFlags": Array [],
}
`);
});
it('builds packages if --all-platforms is passed', () => {
expect(fn('--all-platforms')).toMatchInlineSnapshot(`
Object {
"buildArgs": Object {
"buildDefaultDist": true,
"buildOssDist": true,
"createArchives": true,
"createDebPackage": true,
"createDockerPackage": true,
"createDockerUbiPackage": true,
"createRpmPackage": true,
"downloadFreshNode": true,
"isRelease": false,
"targetAllPlatforms": true,
"versionQualifier": "",
},
"log": "<ToolingLog>",
"showHelp": false,
"unknownFlags": Array [],
}
`);
expect(readCliArgs(['node', 'scripts/build', '--all-platforms'])).toMatchInlineSnapshot(`
Object {
"buildOptions": Object {
"buildDefaultDist": true,
"buildOssDist": true,
"createArchives": true,
"createDebPackage": true,
"createDockerPackage": true,
"createDockerUbiPackage": true,
"createRpmPackage": true,
"downloadFreshNode": true,
"isRelease": false,
"targetAllPlatforms": true,
"versionQualifier": "",
},
"log": <ToolingLog>,
"showHelp": false,
"unknownFlags": Array [],
}
`);
});
it('limits packages if --rpm passed with --all-platforms', () => {
expect(fn('--all-platforms', '--rpm')).toMatchInlineSnapshot(`
Object {
"buildArgs": Object {
"buildDefaultDist": true,
"buildOssDist": true,
"createArchives": true,
"createDebPackage": false,
"createDockerPackage": false,
"createDockerUbiPackage": false,
"createRpmPackage": true,
"downloadFreshNode": true,
"isRelease": false,
"targetAllPlatforms": true,
"versionQualifier": "",
},
"log": "<ToolingLog>",
"showHelp": false,
"unknownFlags": Array [],
}
`);
expect(readCliArgs(['node', 'scripts/build', '--all-platforms', '--rpm'])).toMatchInlineSnapshot(`
Object {
"buildOptions": Object {
"buildDefaultDist": true,
"buildOssDist": true,
"createArchives": true,
"createDebPackage": false,
"createDockerPackage": false,
"createDockerUbiPackage": false,
"createRpmPackage": true,
"downloadFreshNode": true,
"isRelease": false,
"targetAllPlatforms": true,
"versionQualifier": "",
},
"log": <ToolingLog>,
"showHelp": false,
"unknownFlags": Array [],
}
`);
});
it('limits packages if --deb passed with --all-platforms', () => {
expect(fn('--all-platforms', '--deb')).toMatchInlineSnapshot(`
Object {
"buildArgs": Object {
"buildDefaultDist": true,
"buildOssDist": true,
"createArchives": true,
"createDebPackage": true,
"createDockerPackage": false,
"createDockerUbiPackage": false,
"createRpmPackage": false,
"downloadFreshNode": true,
"isRelease": false,
"targetAllPlatforms": true,
"versionQualifier": "",
},
"log": "<ToolingLog>",
"showHelp": false,
"unknownFlags": Array [],
}
`);
expect(readCliArgs(['node', 'scripts/build', '--all-platforms', '--deb'])).toMatchInlineSnapshot(`
Object {
"buildOptions": Object {
"buildDefaultDist": true,
"buildOssDist": true,
"createArchives": true,
"createDebPackage": true,
"createDockerPackage": false,
"createDockerUbiPackage": false,
"createRpmPackage": false,
"downloadFreshNode": true,
"isRelease": false,
"targetAllPlatforms": true,
"versionQualifier": "",
},
"log": <ToolingLog>,
"showHelp": false,
"unknownFlags": Array [],
}
`);
});
it('limits packages if --docker passed with --all-platforms', () => {
expect(fn('--all-platforms', '--docker')).toMatchInlineSnapshot(`
Object {
"buildArgs": Object {
"buildDefaultDist": true,
"buildOssDist": true,
"createArchives": true,
"createDebPackage": false,
"createDockerPackage": true,
"createDockerUbiPackage": true,
"createRpmPackage": false,
"downloadFreshNode": true,
"isRelease": false,
"targetAllPlatforms": true,
"versionQualifier": "",
},
"log": "<ToolingLog>",
"showHelp": false,
"unknownFlags": Array [],
}
`);
expect(readCliArgs(['node', 'scripts/build', '--all-platforms', '--docker']))
.toMatchInlineSnapshot(`
Object {
"buildOptions": Object {
"buildDefaultDist": true,
"buildOssDist": true,
"createArchives": true,
"createDebPackage": false,
"createDockerPackage": true,
"createDockerUbiPackage": true,
"createRpmPackage": false,
"downloadFreshNode": true,
"isRelease": false,
"targetAllPlatforms": true,
"versionQualifier": "",
},
"log": <ToolingLog>,
"showHelp": false,
"unknownFlags": Array [],
}
`);
});
it('limits packages if --docker passed with --skip-docker-ubi and --all-platforms', () => {
expect(fn('--all-platforms', '--docker', '--skip-docker-ubi')).toMatchInlineSnapshot(`
Object {
"buildArgs": Object {
"buildDefaultDist": true,
"buildOssDist": true,
"createArchives": true,
"createDebPackage": false,
"createDockerPackage": true,
"createDockerUbiPackage": false,
"createRpmPackage": false,
"downloadFreshNode": true,
"isRelease": false,
"targetAllPlatforms": true,
"versionQualifier": "",
},
"log": "<ToolingLog>",
"showHelp": false,
"unknownFlags": Array [],
}
`);
expect(readCliArgs(['node', 'scripts/build', '--all-platforms', '--docker', '--skip-docker-ubi']))
.toMatchInlineSnapshot(`
Object {
"buildOptions": Object {
"buildDefaultDist": true,
"buildOssDist": true,
"createArchives": true,
"createDebPackage": false,
"createDockerPackage": true,
"createDockerUbiPackage": false,
"createRpmPackage": false,
"downloadFreshNode": true,
"isRelease": false,
"targetAllPlatforms": true,
"versionQualifier": "",
},
"log": <ToolingLog>,
"showHelp": false,
"unknownFlags": Array [],
}
`);
});

View file

@ -20,16 +20,9 @@
import getopts from 'getopts';
import { ToolingLog, pickLevelFromFlags } from '@kbn/dev-utils';
interface ParsedArgs {
showHelp: boolean;
unknownFlags: string[];
log?: ToolingLog;
buildArgs?: {
[key: string]: any;
};
}
import { BuildOptions } from './build_distributables';
export function readCliArgs(argv: string[]): ParsedArgs {
export function readCliArgs(argv: string[]) {
const unknownFlags: string[] = [];
const flags = getopts(argv, {
boolean: [
@ -70,8 +63,16 @@ export function readCliArgs(argv: string[]): ParsedArgs {
},
});
const log = new ToolingLog({
level: pickLevelFromFlags(flags, {
default: flags.debug === false ? 'info' : 'debug',
}),
writeTo: process.stdout,
});
if (unknownFlags.length || flags.help) {
return {
log,
showHelp: true,
unknownFlags,
};
@ -83,13 +84,6 @@ export function readCliArgs(argv: string[]): ParsedArgs {
flags['all-platforms'] = true;
}
const log = new ToolingLog({
level: pickLevelFromFlags(flags, {
default: flags.debug === false ? 'info' : 'debug',
}),
writeTo: process.stdout,
});
function isOsPackageDesired(name: string) {
if (flags['skip-os-packages'] || !flags['all-platforms']) {
return false;
@ -103,22 +97,24 @@ export function readCliArgs(argv: string[]): ParsedArgs {
return Boolean(flags[name]);
}
const buildOptions: BuildOptions = {
isRelease: Boolean(flags.release),
versionQualifier: flags['version-qualifier'],
buildOssDist: flags.oss !== false,
buildDefaultDist: !flags.oss,
downloadFreshNode: !Boolean(flags['skip-node-download']),
createArchives: !Boolean(flags['skip-archives']),
createRpmPackage: isOsPackageDesired('rpm'),
createDebPackage: isOsPackageDesired('deb'),
createDockerPackage: isOsPackageDesired('docker'),
createDockerUbiPackage: isOsPackageDesired('docker') && !Boolean(flags['skip-docker-ubi']),
targetAllPlatforms: Boolean(flags['all-platforms']),
};
return {
log,
showHelp: false,
unknownFlags: [],
log,
buildArgs: {
isRelease: Boolean(flags.release),
versionQualifier: flags['version-qualifier'],
buildOssDist: flags.oss !== false,
buildDefaultDist: !flags.oss,
downloadFreshNode: !Boolean(flags['skip-node-download']),
createArchives: !Boolean(flags['skip-archives']),
createRpmPackage: isOsPackageDesired('rpm'),
createDebPackage: isOsPackageDesired('deb'),
createDockerPackage: isOsPackageDesired('docker'),
createDockerUbiPackage: isOsPackageDesired('docker') && !Boolean(flags['skip-docker-ubi']),
targetAllPlatforms: Boolean(flags['all-platforms']),
},
buildOptions,
};
}

View file

@ -1,174 +0,0 @@
/*
* Licensed to Elasticsearch B.V. under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch B.V. licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
import { getConfig, createRunner } from './lib';
import {
BuildKibanaPlatformPluginsTask,
BuildPackagesTask,
CleanEmptyFoldersTask,
CleanExtraBinScriptsTask,
CleanExtraFilesFromModulesTask,
CleanNodeBuildsTask,
CleanPackagesTask,
CleanTask,
CleanTypescriptTask,
CopyBinScriptsTask,
CopySourceTask,
CreateArchivesSourcesTask,
CreateArchivesTask,
CreateDebPackageTask,
CreateDockerPackageTask,
CreateDockerUbiPackageTask,
CreateEmptyDirsAndFilesTask,
CreateNoticeFileTask,
CreatePackageJsonTask,
CreateReadmeTask,
CreateRpmPackageTask,
DownloadNodeBuildsTask,
ExtractNodeBuildsTask,
InstallChromiumTask,
InstallDependenciesTask,
OptimizeBuildTask,
PatchNativeModulesTask,
PathLengthTask,
RemovePackageJsonDepsTask,
RemoveWorkspacesTask,
TranspileBabelTask,
TranspileScssTask,
UpdateLicenseFileTask,
UuidVerificationTask,
VerifyEnvTask,
VerifyExistingNodeBuildsTask,
WriteShaSumsTask,
} from './tasks';
export async function buildDistributables(options) {
const {
log,
isRelease,
buildOssDist,
buildDefaultDist,
downloadFreshNode,
createArchives,
createRpmPackage,
createDebPackage,
createDockerPackage,
createDockerUbiPackage,
versionQualifier,
targetAllPlatforms,
} = options;
log.verbose('building distributables with options:', {
isRelease,
buildOssDist,
buildDefaultDist,
downloadFreshNode,
createArchives,
createRpmPackage,
createDebPackage,
versionQualifier,
});
const config = await getConfig({
isRelease,
versionQualifier,
targetAllPlatforms,
});
const run = createRunner({
config,
log,
buildOssDist,
buildDefaultDist,
});
/**
* verify, reset, and initialize the build environment
*/
await run(VerifyEnvTask);
await run(CleanTask);
await run(downloadFreshNode ? DownloadNodeBuildsTask : VerifyExistingNodeBuildsTask);
await run(ExtractNodeBuildsTask);
/**
* run platform-generic build tasks
*/
await run(CopySourceTask);
await run(CopyBinScriptsTask);
await run(CreateEmptyDirsAndFilesTask);
await run(CreateReadmeTask);
await run(TranspileBabelTask);
await run(BuildPackagesTask);
await run(CreatePackageJsonTask);
await run(InstallDependenciesTask);
await run(RemoveWorkspacesTask);
await run(CleanPackagesTask);
await run(CreateNoticeFileTask);
await run(UpdateLicenseFileTask);
await run(RemovePackageJsonDepsTask);
await run(TranspileScssTask);
await run(BuildKibanaPlatformPluginsTask);
await run(OptimizeBuildTask);
await run(CleanTypescriptTask);
await run(CleanExtraFilesFromModulesTask);
await run(CleanEmptyFoldersTask);
/**
* copy generic build outputs into platform-specific build
* directories and perform platform/architecture-specific steps
*/
await run(CreateArchivesSourcesTask);
await run(PatchNativeModulesTask);
await run(InstallChromiumTask);
await run(CleanExtraBinScriptsTask);
await run(CleanNodeBuildsTask);
await run(PathLengthTask);
await run(UuidVerificationTask);
/**
* package platform-specific builds into archives
* or os-specific packages in the target directory
*/
if (createArchives) {
// control w/ --skip-archives
await run(CreateArchivesTask);
}
if (createDebPackage) {
// control w/ --deb or --skip-os-packages
await run(CreateDebPackageTask);
}
if (createRpmPackage) {
// control w/ --rpm or --skip-os-packages
await run(CreateRpmPackageTask);
}
if (createDockerPackage) {
// control w/ --docker or --skip-docker-ubi or --skip-os-packages
await run(CreateDockerPackageTask);
if (createDockerUbiPackage) {
await run(CreateDockerUbiPackageTask);
}
}
/**
* finalize artifacts by writing sha1sums of each into the target directory
*/
await run(WriteShaSumsTask);
}

View file

@ -0,0 +1,123 @@
/*
* Licensed to Elasticsearch B.V. under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch B.V. licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
import { ToolingLog } from '@kbn/dev-utils';
import { Config, createRunner } from './lib';
import * as Tasks from './tasks';
export interface BuildOptions {
isRelease: boolean;
buildOssDist: boolean;
buildDefaultDist: boolean;
downloadFreshNode: boolean;
createArchives: boolean;
createRpmPackage: boolean;
createDebPackage: boolean;
createDockerPackage: boolean;
createDockerUbiPackage: boolean;
versionQualifier: string | undefined;
targetAllPlatforms: boolean;
}
export async function buildDistributables(log: ToolingLog, options: BuildOptions) {
log.verbose('building distributables with options:', options);
const config = await Config.create(options);
const run = createRunner({
config,
log,
buildDefaultDist: options.buildDefaultDist,
buildOssDist: options.buildOssDist,
});
/**
* verify, reset, and initialize the build environment
*/
await run(Tasks.VerifyEnv);
await run(Tasks.Clean);
await run(options.downloadFreshNode ? Tasks.DownloadNodeBuilds : Tasks.VerifyExistingNodeBuilds);
await run(Tasks.ExtractNodeBuilds);
/**
* run platform-generic build tasks
*/
await run(Tasks.CopySource);
await run(Tasks.CopyBinScripts);
await run(Tasks.CreateEmptyDirsAndFiles);
await run(Tasks.CreateReadme);
await run(Tasks.TranspileBabel);
await run(Tasks.BuildPackages);
await run(Tasks.CreatePackageJson);
await run(Tasks.InstallDependencies);
await run(Tasks.RemoveWorkspaces);
await run(Tasks.CleanPackages);
await run(Tasks.CreateNoticeFile);
await run(Tasks.UpdateLicenseFile);
await run(Tasks.RemovePackageJsonDeps);
await run(Tasks.TranspileScss);
await run(Tasks.BuildKibanaPlatformPlugins);
await run(Tasks.OptimizeBuild);
await run(Tasks.CleanTypescript);
await run(Tasks.CleanExtraFilesFromModules);
await run(Tasks.CleanEmptyFolders);
/**
* copy generic build outputs into platform-specific build
* directories and perform platform/architecture-specific steps
*/
await run(Tasks.CreateArchivesSources);
await run(Tasks.PatchNativeModules);
await run(Tasks.InstallChromium);
await run(Tasks.CleanExtraBinScripts);
await run(Tasks.CleanNodeBuilds);
await run(Tasks.PathLength);
await run(Tasks.UuidVerification);
/**
* package platform-specific builds into archives
* or os-specific packages in the target directory
*/
if (options.createArchives) {
// control w/ --skip-archives
await run(Tasks.CreateArchives);
}
if (options.createDebPackage) {
// control w/ --deb or --skip-os-packages
await run(Tasks.CreateDebPackage);
}
if (options.createRpmPackage) {
// control w/ --rpm or --skip-os-packages
await run(Tasks.CreateRpmPackage);
}
if (options.createDockerPackage) {
// control w/ --docker or --skip-docker-ubi or --skip-os-packages
await run(Tasks.CreateDockerPackage);
if (options.createDockerUbiPackage) {
await run(Tasks.CreateDockerUbiPackage);
}
}
/**
* finalize artifacts by writing sha1sums of each into the target directory
*/
await run(Tasks.WriteShaSums);
}

View file

@ -29,15 +29,15 @@ import { readCliArgs } from './args';
// ensure the cwd() is always the repo root
process.chdir(resolve(__dirname, '../../../'));
const { showHelp, unknownFlags, log, buildArgs } = readCliArgs(process.argv);
const { showHelp, unknownFlags, log, buildOptions } = readCliArgs(process.argv);
if (unknownFlags.length) {
const pluralized = unknownFlags.length > 1 ? 'flags' : 'flag';
console.log(chalk`\n{red Unknown ${pluralized}: ${unknownFlags.join(', ')}}\n`);
log.error(`Unknown ${pluralized}: ${unknownFlags.join(', ')}}`);
}
if (showHelp) {
console.log(
log.write(
dedent(chalk`
{dim usage:} node scripts/build
@ -63,7 +63,7 @@ if (showHelp) {
process.exit(1);
}
buildDistributables({ log, ...buildArgs }).catch((error) => {
buildDistributables(log, buildOptions!).catch((error) => {
if (!isErrorLogged(error)) {
log.error('Uncaught error');
log.error(error);

View file

@ -1,168 +0,0 @@
/*
* Licensed to Elasticsearch B.V. under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch B.V. licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
import expect from '@kbn/expect';
import sinon from 'sinon';
import { createBuild } from '../build';
describe('dev/build/lib/build', () => {
describe('Build instance', () => {
describe('#isOss()', () => {
it('returns true if passed oss: true', () => {
const build = createBuild({
oss: true,
});
expect(build.isOss()).to.be(true);
});
it('returns false if passed oss: false', () => {
const build = createBuild({
oss: false,
});
expect(build.isOss()).to.be(false);
});
});
describe('#getName()', () => {
it('returns kibana when oss: false', () => {
const build = createBuild({
oss: false,
});
expect(build.getName()).to.be('kibana');
});
it('returns kibana-oss when oss: true', () => {
const build = createBuild({
oss: true,
});
expect(build.getName()).to.be('kibana-oss');
});
});
describe('#getLogTag()', () => {
it('returns string with build name in it', () => {
const build = createBuild({});
expect(build.getLogTag()).to.contain(build.getName());
});
});
describe('#resolvePath()', () => {
it('uses passed config to resolve a path relative to the build', () => {
const resolveFromRepo = sinon.stub();
const build = createBuild({
config: { resolveFromRepo },
});
build.resolvePath('bar');
sinon.assert.calledWithExactly(resolveFromRepo, 'build', 'kibana', 'bar');
});
it('passes all arguments to config.resolveFromRepo()', () => {
const resolveFromRepo = sinon.stub();
const build = createBuild({
config: { resolveFromRepo },
});
build.resolvePath('bar', 'baz', 'box');
sinon.assert.calledWithExactly(resolveFromRepo, 'build', 'kibana', 'bar', 'baz', 'box');
});
});
describe('#resolvePathForPlatform()', () => {
it('uses config.resolveFromRepo(), config.getBuildVersion(), and platform.getBuildName() to create path', () => {
const resolveFromRepo = sinon.stub();
const getBuildVersion = sinon.stub().returns('buildVersion');
const build = createBuild({
oss: true,
config: { resolveFromRepo, getBuildVersion },
});
const getBuildName = sinon.stub().returns('platformName');
const platform = {
getBuildName,
};
build.resolvePathForPlatform(platform, 'foo', 'bar');
sinon.assert.calledWithExactly(getBuildName);
sinon.assert.calledWithExactly(getBuildVersion);
sinon.assert.calledWithExactly(
resolveFromRepo,
'build',
'oss',
`kibana-buildVersion-platformName`,
'foo',
'bar'
);
});
});
describe('#getPlatformArchivePath()', () => {
const sandbox = sinon.createSandbox();
const config = {
resolveFromRepo: sandbox.stub(),
getBuildVersion: sandbox.stub().returns('buildVersion'),
};
const build = createBuild({
oss: false,
config,
});
const platform = {
getBuildName: sandbox.stub().returns('platformName'),
isWindows: sandbox.stub().returns(false),
};
beforeEach(() => {
sandbox.resetHistory();
});
it('uses config.resolveFromRepo(), config.getBuildVersion, and platform.getBuildName() to create path', () => {
build.getPlatformArchivePath(platform);
sinon.assert.calledWithExactly(platform.getBuildName);
sinon.assert.calledWithExactly(platform.isWindows);
sinon.assert.calledWithExactly(config.getBuildVersion);
sinon.assert.calledWithExactly(
config.resolveFromRepo,
'target',
`kibana-buildVersion-platformName.tar.gz`
);
});
it('creates .zip path if platform is windows', () => {
platform.isWindows.returns(true);
build.getPlatformArchivePath(platform);
sinon.assert.calledWithExactly(platform.getBuildName);
sinon.assert.calledWithExactly(platform.isWindows);
sinon.assert.calledWithExactly(config.getBuildVersion);
sinon.assert.calledWithExactly(
config.resolveFromRepo,
'target',
`kibana-buildVersion-platformName.zip`
);
});
});
});
});

View file

@ -1,174 +0,0 @@
/*
* Licensed to Elasticsearch B.V. under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch B.V. licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
import { resolve } from 'path';
import expect from '@kbn/expect';
import pkg from '../../../../../package.json';
import { getConfig } from '../config';
import { getVersionInfo } from '../version_info';
describe('dev/build/lib/config', () => {
const setup = async function ({ targetAllPlatforms = true } = {}) {
const isRelease = Boolean(Math.round(Math.random()));
const config = await getConfig({
isRelease,
targetAllPlatforms,
});
const buildInfo = await getVersionInfo({
isRelease,
pkg,
});
return { config, buildInfo };
};
describe('#getKibanaPkg()', () => {
it('returns the parsed package.json from the Kibana repo', async () => {
const { config } = await setup();
expect(config.getKibanaPkg()).to.eql(pkg);
});
});
describe('#getNodeVersion()', () => {
it('returns the node version from the kibana package.json', async () => {
const { config } = await setup();
expect(config.getNodeVersion()).to.eql(pkg.engines.node);
});
});
describe('#getRepoRelativePath()', () => {
it('converts an absolute path to relative path, from the root of the repo', async () => {
const { config } = await setup();
expect(config.getRepoRelativePath(__dirname)).to.match(/^src[\/\\]dev[\/\\]build/);
});
});
describe('#resolveFromRepo()', () => {
it('resolves a relative path', async () => {
const { config } = await setup();
expect(config.resolveFromRepo('src/dev/build/lib/__tests__')).to.be(__dirname);
});
it('resolves a series of relative paths', async () => {
const { config } = await setup();
expect(config.resolveFromRepo('src', 'dev', 'build', 'lib', '__tests__')).to.be(__dirname);
});
});
describe('#getPlatform()', () => {
it('throws error when platform does not exist', async () => {
const { config } = await setup();
const fn = () => config.getPlatform('foo', 'x64');
expect(fn).to.throwException(/Unable to find platform/);
});
it('throws error when architecture does not exist', async () => {
const { config } = await setup();
const fn = () => config.getPlatform('linux', 'foo');
expect(fn).to.throwException(/Unable to find platform/);
});
});
describe('#getTargetPlatforms()', () => {
it('returns an array of all platform objects', async () => {
const { config } = await setup();
expect(
config
.getTargetPlatforms()
.map((p) => p.getNodeArch())
.sort()
).to.eql(['darwin-x64', 'linux-arm64', 'linux-x64', 'win32-x64']);
});
it('returns just this platform when targetAllPlatforms = false', async () => {
const { config } = await setup({ targetAllPlatforms: false });
const platforms = config.getTargetPlatforms();
expect(platforms).to.be.an('array');
expect(platforms).to.have.length(1);
expect(platforms[0]).to.be(config.getPlatformForThisOs());
});
});
describe('#getNodePlatforms()', () => {
it('returns all platforms', async () => {
const { config } = await setup();
expect(
config
.getTargetPlatforms()
.map((p) => p.getNodeArch())
.sort()
).to.eql(['darwin-x64', 'linux-arm64', 'linux-x64', 'win32-x64']);
});
it('returns this platform and linux, when targetAllPlatforms = false', async () => {
const { config } = await setup({ targetAllPlatforms: false });
const platforms = config.getNodePlatforms();
expect(platforms).to.be.an('array');
if (process.platform !== 'linux') {
expect(platforms).to.have.length(2);
expect(platforms[0]).to.be(config.getPlatformForThisOs());
expect(platforms[1]).to.be(config.getPlatform('linux', 'x64'));
} else {
expect(platforms).to.have.length(1);
expect(platforms[0]).to.be(config.getPlatform('linux', 'x64'));
}
});
});
describe('#getPlatformForThisOs()', () => {
it('returns the platform that matches the arch of this machine', async () => {
const { config } = await setup();
const currentPlatform = config.getPlatformForThisOs();
expect(currentPlatform.getName()).to.be(process.platform);
expect(currentPlatform.getArchitecture()).to.be(process.arch);
});
});
describe('#getBuildVersion()', () => {
it('returns the version from the build info', async () => {
const { config, buildInfo } = await setup();
expect(config.getBuildVersion()).to.be(buildInfo.buildVersion);
});
});
describe('#getBuildNumber()', () => {
it('returns the number from the build info', async () => {
const { config, buildInfo } = await setup();
expect(config.getBuildNumber()).to.be(buildInfo.buildNumber);
});
});
describe('#getBuildSha()', () => {
it('returns the sha from the build info', async () => {
const { config, buildInfo } = await setup();
expect(config.getBuildSha()).to.be(buildInfo.buildSha);
});
});
describe('#resolveFromTarget()', () => {
it('resolves a relative path, from the target directory', async () => {
const { config } = await setup();
expect(config.resolveFromTarget()).to.be(resolve(__dirname, '../../../../../target'));
});
});
});

View file

@ -1,237 +0,0 @@
/*
* Licensed to Elasticsearch B.V. under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch B.V. licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
import { createServer } from 'http';
import { join } from 'path';
import { tmpdir } from 'os';
import { mkdirp, readFileSync } from 'fs-extra';
import del from 'del';
import sinon from 'sinon';
import { CI_PARALLEL_PROCESS_PREFIX } from '@kbn/test';
import expect from '@kbn/expect';
import Wreck from '@hapi/wreck';
import { ToolingLog } from '@kbn/dev-utils';
import { download } from '../download';
const getTempFolder = async () => {
const dir = join(tmpdir(), CI_PARALLEL_PROCESS_PREFIX, 'download-js-test-tmp-dir');
console.log(dir);
await mkdirp(dir);
return dir;
};
describe('src/dev/build/tasks/nodejs/download', () => {
const sandbox = sinon.createSandbox();
let TMP_DESTINATION;
let TMP_DIR;
beforeEach(async () => {
TMP_DIR = await getTempFolder();
TMP_DESTINATION = join(TMP_DIR, '__tmp_download_js_test_file__');
});
afterEach(async () => {
await del(TMP_DIR, { force: true });
});
afterEach(() => sandbox.reset());
const onLogLine = sandbox.stub();
const log = new ToolingLog({
level: 'verbose',
writeTo: {
write: onLogLine,
},
});
const FOO_SHA256 = '2c26b46b68ffc68ff99b453c1d30413413422d706483bfa0f98a5e886266e7ae';
const createSendHandler = (send) => (req, res) => {
res.statusCode = 200;
res.end(send);
};
const sendErrorHandler = (req, res) => {
res.statusCode = 500;
res.end();
};
let server;
let serverUrl;
let nextHandler;
afterEach(() => (nextHandler = null));
before(async () => {
server = createServer((req, res) => {
if (!nextHandler) {
nextHandler = sendErrorHandler;
}
const handler = nextHandler;
nextHandler = null;
handler(req, res);
});
await Promise.race([
new Promise((resolve, reject) => {
server.once('error', reject);
}),
new Promise((resolve) => {
server.listen(resolve);
}),
]);
serverUrl = `http://localhost:${server.address().port}/`;
});
after(async () => {
server.close();
server = null;
});
it('downloads from URL and checks that content matches sha256', async () => {
nextHandler = createSendHandler('foo');
await download({
log,
url: serverUrl,
destination: TMP_DESTINATION,
sha256: FOO_SHA256,
});
expect(readFileSync(TMP_DESTINATION, 'utf8')).to.be('foo');
});
it('rejects and deletes destination if sha256 does not match', async () => {
nextHandler = createSendHandler('foo');
try {
await download({
log,
url: serverUrl,
destination: TMP_DESTINATION,
sha256: 'bar',
});
throw new Error('Expected download() to reject');
} catch (error) {
expect(error)
.to.have.property('message')
.contain('does not match the expected sha256 checksum');
}
try {
readFileSync(TMP_DESTINATION);
throw new Error('Expected download to be deleted');
} catch (error) {
expect(error).to.have.property('code', 'ENOENT');
}
});
describe('reties download retries: number of times', () => {
it('resolves if retries = 1 and first attempt fails', async () => {
let reqCount = 0;
nextHandler = function sequenceHandler(req, res) {
switch (++reqCount) {
case 1:
nextHandler = sequenceHandler;
return sendErrorHandler(req, res);
default:
return createSendHandler('foo')(req, res);
}
};
await download({
log,
url: serverUrl,
destination: TMP_DESTINATION,
sha256: FOO_SHA256,
retries: 2,
});
expect(readFileSync(TMP_DESTINATION, 'utf8')).to.be('foo');
});
it('resolves if first fails, second is bad shasum, but third succeeds', async () => {
let reqCount = 0;
nextHandler = function sequenceHandler(req, res) {
switch (++reqCount) {
case 1:
nextHandler = sequenceHandler;
return sendErrorHandler(req, res);
case 2:
nextHandler = sequenceHandler;
return createSendHandler('bar')(req, res);
default:
return createSendHandler('foo')(req, res);
}
};
await download({
log,
url: serverUrl,
destination: TMP_DESTINATION,
sha256: FOO_SHA256,
retries: 2,
});
});
it('makes 6 requests if `retries: 5` and all failed', async () => {
let reqCount = 0;
nextHandler = function sequenceHandler(req, res) {
reqCount += 1;
nextHandler = sequenceHandler;
sendErrorHandler(req, res);
};
try {
await download({
log,
url: serverUrl,
destination: TMP_DESTINATION,
sha256: FOO_SHA256,
retries: 5,
});
throw new Error('Expected download() to reject');
} catch (error) {
expect(error).to.have.property('message').contain('Request failed with status code 500');
expect(reqCount).to.be(6);
}
});
});
describe('sha256 option not supplied', () => {
before(() => {
sinon.stub(Wreck, 'request');
});
after(() => {
Wreck.request.restore();
});
it('refuses to download', async () => {
try {
await download({
log,
url: 'http://google.com',
destination: TMP_DESTINATION,
});
throw new Error('expected download() to reject');
} catch (error) {
expect(error).to.have.property('message').contain('refusing to download');
}
});
});
});

View file

@ -1,58 +0,0 @@
/*
* Licensed to Elasticsearch B.V. under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch B.V. licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
import sinon from 'sinon';
import stripAnsi from 'strip-ansi';
import { ToolingLog } from '@kbn/dev-utils';
import { exec } from '../exec';
describe('dev/build/lib/exec', () => {
const sandbox = sinon.createSandbox();
afterEach(() => sandbox.reset());
const onLogLine = sandbox.stub();
const log = new ToolingLog({
level: 'verbose',
writeTo: {
write: (chunk) => {
onLogLine(stripAnsi(chunk));
},
},
});
it('executes a command, logs the command, and logs the output', async () => {
await exec(log, process.execPath, ['-e', 'console.log("hi")']);
// logs the command before execution
sinon.assert.calledWithExactly(onLogLine, sinon.match(`$ ${process.execPath}`));
// log output of the process
sinon.assert.calledWithExactly(onLogLine, sinon.match(/debg\s+hi/));
});
it('logs using level: option', async () => {
await exec(log, process.execPath, ['-e', 'console.log("hi")'], {
level: 'info',
});
// log output of the process
sinon.assert.calledWithExactly(onLogLine, sinon.match(/info\s+hi/));
});
});

View file

@ -1,362 +0,0 @@
/*
* Licensed to Elasticsearch B.V. under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch B.V. licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
import { resolve } from 'path';
import { chmodSync, statSync } from 'fs';
import del from 'del';
import expect from '@kbn/expect';
import { mkdirp, write, read, getChildPaths, copyAll, getFileHash, untar, gunzip } from '../fs';
const TMP = resolve(__dirname, '__tmp__');
const FIXTURES = resolve(__dirname, 'fixtures');
const FOO_TAR_PATH = resolve(FIXTURES, 'foo_dir.tar.gz');
const FOO_GZIP_PATH = resolve(FIXTURES, 'foo.txt.gz');
const BAR_TXT_PATH = resolve(FIXTURES, 'foo_dir/bar.txt');
const WORLD_EXECUTABLE = resolve(FIXTURES, 'bin/world_executable');
const isWindows = /^win/.test(process.platform);
// get the mode of a file as a string, like 777, or 644,
function getCommonMode(path) {
return statSync(path).mode.toString(8).slice(-3);
}
function assertNonAbsoluteError(error) {
expect(error).to.be.an(Error);
expect(error.message).to.contain('Please use absolute paths');
}
describe('dev/build/lib/fs', () => {
// ensure WORLD_EXECUTABLE is actually executable by all
before(async () => {
chmodSync(WORLD_EXECUTABLE, 0o777);
});
// clean and recreate TMP directory
beforeEach(async () => {
await del(TMP);
await mkdirp(TMP);
});
// cleanup TMP directory
after(async () => {
await del(TMP);
});
describe('mkdirp()', () => {
it('rejects if path is not absolute', async () => {
try {
await mkdirp('foo/bar');
throw new Error('Expected mkdirp() to reject');
} catch (error) {
assertNonAbsoluteError(error);
}
});
it('makes directory and necessary parent directories', async () => {
const destination = resolve(TMP, 'a/b/c/d/e/f/g');
expect(await mkdirp(destination)).to.be(undefined);
expect(statSync(destination).isDirectory()).to.be(true);
});
});
describe('write()', () => {
it('rejects if path is not absolute', async () => {
try {
await write('foo/bar');
throw new Error('Expected write() to reject');
} catch (error) {
assertNonAbsoluteError(error);
}
});
it('writes content to a file with existing parent directory', async () => {
const destination = resolve(TMP, 'a');
expect(await write(destination, 'bar')).to.be(undefined);
expect(await read(destination)).to.be('bar');
});
it('writes content to a file with missing parents', async () => {
const destination = resolve(TMP, 'a/b/c/d/e');
expect(await write(destination, 'bar')).to.be(undefined);
expect(await read(destination)).to.be('bar');
});
});
describe('read()', () => {
it('rejects if path is not absolute', async () => {
try {
await read('foo/bar');
throw new Error('Expected read() to reject');
} catch (error) {
assertNonAbsoluteError(error);
}
});
it('reads file, resolves with result', async () => {
expect(await read(BAR_TXT_PATH)).to.be('bar\n');
});
});
describe('getChildPaths()', () => {
it('rejects if path is not absolute', async () => {
try {
await getChildPaths('foo/bar');
throw new Error('Expected getChildPaths() to reject');
} catch (error) {
assertNonAbsoluteError(error);
}
});
it('resolves with absolute paths to the children of directory', async () => {
const path = resolve(FIXTURES, 'foo_dir');
expect((await getChildPaths(path)).sort()).to.eql([
resolve(FIXTURES, 'foo_dir/.bar'),
BAR_TXT_PATH,
resolve(FIXTURES, 'foo_dir/foo'),
]);
});
it('rejects with ENOENT if path does not exist', async () => {
try {
await getChildPaths(resolve(FIXTURES, 'notrealpath'));
throw new Error('Expected getChildPaths() to reject');
} catch (error) {
expect(error).to.have.property('code', 'ENOENT');
}
});
});
describe('copyAll()', () => {
it('rejects if source path is not absolute', async () => {
try {
await copyAll('foo/bar', __dirname);
throw new Error('Expected copyAll() to reject');
} catch (error) {
assertNonAbsoluteError(error);
}
});
it('rejects if destination path is not absolute', async () => {
try {
await copyAll(__dirname, 'foo/bar');
throw new Error('Expected copyAll() to reject');
} catch (error) {
assertNonAbsoluteError(error);
}
});
it('rejects if neither path is not absolute', async () => {
try {
await copyAll('foo/bar', 'foo/bar');
throw new Error('Expected copyAll() to reject');
} catch (error) {
assertNonAbsoluteError(error);
}
});
it('copies files and directories from source to dest, creating dest if necessary, respecting mode', async () => {
const destination = resolve(TMP, 'a/b/c');
await copyAll(FIXTURES, destination);
expect((await getChildPaths(resolve(destination, 'foo_dir'))).sort()).to.eql([
resolve(destination, 'foo_dir/bar.txt'),
resolve(destination, 'foo_dir/foo'),
]);
expect(getCommonMode(resolve(destination, 'bin/world_executable'))).to.be(
isWindows ? '666' : '777'
);
expect(getCommonMode(resolve(destination, 'foo_dir/bar.txt'))).to.be(
isWindows ? '666' : '644'
);
});
it('applies select globs if specified, ignores dot files', async () => {
const destination = resolve(TMP, 'a/b/c/d');
await copyAll(FIXTURES, destination, {
select: ['**/*bar*'],
});
try {
statSync(resolve(destination, 'bin/world_executable'));
throw new Error('expected bin/world_executable to not by copied');
} catch (error) {
expect(error).to.have.property('code', 'ENOENT');
}
try {
statSync(resolve(destination, 'foo_dir/.bar'));
throw new Error('expected foo_dir/.bar to not by copied');
} catch (error) {
expect(error).to.have.property('code', 'ENOENT');
}
expect(await read(resolve(destination, 'foo_dir/bar.txt'))).to.be('bar\n');
});
it('supports select globs and dot option together', async () => {
const destination = resolve(TMP, 'a/b/c/d');
await copyAll(FIXTURES, destination, {
select: ['**/*bar*'],
dot: true,
});
try {
statSync(resolve(destination, 'bin/world_executable'));
throw new Error('expected bin/world_executable to not by copied');
} catch (error) {
expect(error).to.have.property('code', 'ENOENT');
}
expect(await read(resolve(destination, 'foo_dir/bar.txt'))).to.be('bar\n');
expect(await read(resolve(destination, 'foo_dir/.bar'))).to.be('dotfile\n');
});
it('supports atime and mtime', async () => {
const destination = resolve(TMP, 'a/b/c/d/e');
const time = new Date(1425298511000);
await copyAll(FIXTURES, destination, {
time,
});
const barTxt = statSync(resolve(destination, 'foo_dir/bar.txt'));
const fooDir = statSync(resolve(destination, 'foo_dir'));
// precision is platform specific
const oneDay = 86400000;
expect(Math.abs(barTxt.atimeMs - time.getTime())).to.be.below(oneDay);
expect(Math.abs(fooDir.atimeMs - time.getTime())).to.be.below(oneDay);
expect(Math.abs(barTxt.mtimeMs - time.getTime())).to.be.below(oneDay);
});
});
describe('getFileHash()', () => {
it('rejects if path is not absolute', async () => {
try {
await getFileHash('foo/bar');
throw new Error('Expected getFileHash() to reject');
} catch (error) {
assertNonAbsoluteError(error);
}
});
it('resolves with the sha1 hash of a file', async () => {
expect(await getFileHash(BAR_TXT_PATH, 'sha1')).to.be(
'e242ed3bffccdf271b7fbaf34ed72d089537b42f'
);
});
it('resolves with the sha256 hash of a file', async () => {
expect(await getFileHash(BAR_TXT_PATH, 'sha256')).to.be(
'7d865e959b2466918c9863afca942d0fb89d7c9ac0c99bafc3749504ded97730'
);
});
it('resolves with the md5 hash of a file', async () => {
expect(await getFileHash(BAR_TXT_PATH, 'md5')).to.be('c157a79031e1c40f85931829bc5fc552');
});
});
describe('untar()', () => {
it('rejects if source path is not absolute', async () => {
try {
await untar('foo/bar', '**/*', __dirname);
throw new Error('Expected untar() to reject');
} catch (error) {
assertNonAbsoluteError(error);
}
});
it('rejects if destination path is not absolute', async () => {
try {
await untar(__dirname, '**/*', 'foo/bar');
throw new Error('Expected untar() to reject');
} catch (error) {
assertNonAbsoluteError(error);
}
});
it('rejects if neither path is not absolute', async () => {
try {
await untar('foo/bar', '**/*', 'foo/bar');
throw new Error('Expected untar() to reject');
} catch (error) {
assertNonAbsoluteError(error);
}
});
it('extracts tarbar from source into destination, creating destination if necessary', async () => {
const destination = resolve(TMP, 'a/b/c/d/e/f');
await untar(FOO_TAR_PATH, destination);
expect(await read(resolve(destination, 'foo_dir/bar.txt'))).to.be('bar\n');
expect(await read(resolve(destination, 'foo_dir/foo/foo.txt'))).to.be('foo\n');
});
it('passed thrid argument to Extract class, overriding path with destination', async () => {
const destination = resolve(TMP, 'a/b/c');
await untar(FOO_TAR_PATH, destination, {
path: '/dev/null',
strip: 1,
});
expect(await read(resolve(destination, 'bar.txt'))).to.be('bar\n');
expect(await read(resolve(destination, 'foo/foo.txt'))).to.be('foo\n');
});
});
describe('gunzip()', () => {
it('rejects if source path is not absolute', async () => {
try {
await gunzip('foo/bar', '**/*', __dirname);
throw new Error('Expected gunzip() to reject');
} catch (error) {
assertNonAbsoluteError(error);
}
});
it('rejects if destination path is not absolute', async () => {
try {
await gunzip(__dirname, '**/*', 'foo/bar');
throw new Error('Expected gunzip() to reject');
} catch (error) {
assertNonAbsoluteError(error);
}
});
it('rejects if neither path is not absolute', async () => {
try {
await gunzip('foo/bar', '**/*', 'foo/bar');
throw new Error('Expected gunzip() to reject');
} catch (error) {
assertNonAbsoluteError(error);
}
});
it('extracts gzip from source into destination, creating destination if necessary', async () => {
const destination = resolve(TMP, 'z/y/x/v/u/t/foo.txt');
await gunzip(FOO_GZIP_PATH, destination);
expect(await read(resolve(destination))).to.be('foo\n');
});
});
});

View file

@ -1,68 +0,0 @@
/*
* Licensed to Elasticsearch B.V. under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch B.V. licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
import expect from '@kbn/expect';
import { createPlatform } from '../platform';
describe('src/dev/build/lib/platform', () => {
describe('getName()', () => {
it('returns the name argument', () => {
expect(createPlatform('foo').getName()).to.be('foo');
});
});
describe('getNodeArch()', () => {
it('returns the node arch for the passed name', () => {
expect(createPlatform('win32', 'x64').getNodeArch()).to.be('win32-x64');
});
});
describe('getBuildName()', () => {
it('returns the build name for the passed name', () => {
expect(createPlatform('linux', 'arm64', 'linux-aarch64').getBuildName()).to.be(
'linux-aarch64'
);
});
});
describe('isWindows()', () => {
it('returns true if name is win32', () => {
expect(createPlatform('win32', 'x64').isWindows()).to.be(true);
expect(createPlatform('linux', 'x64').isWindows()).to.be(false);
expect(createPlatform('darwin', 'x64').isWindows()).to.be(false);
});
});
describe('isLinux()', () => {
it('returns true if name is linux', () => {
expect(createPlatform('win32', 'x64').isLinux()).to.be(false);
expect(createPlatform('linux', 'x64').isLinux()).to.be(true);
expect(createPlatform('darwin', 'x64').isLinux()).to.be(false);
});
});
describe('isMac()', () => {
it('returns true if name is darwin', () => {
expect(createPlatform('win32', 'x64').isMac()).to.be(false);
expect(createPlatform('linux', 'x64').isMac()).to.be(false);
expect(createPlatform('darwin', 'x64').isMac()).to.be(true);
});
});
});

View file

@ -1,184 +0,0 @@
/*
* Licensed to Elasticsearch B.V. under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch B.V. licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
import sinon from 'sinon';
import expect from '@kbn/expect';
import { ToolingLog } from '@kbn/dev-utils';
import { createRunner } from '../runner';
import { isErrorLogged, markErrorLogged } from '../errors';
describe('dev/build/lib/runner', () => {
const sandbox = sinon.createSandbox();
const config = {};
const onLogLine = sandbox.stub();
const log = new ToolingLog({
level: 'verbose',
writeTo: {
write: onLogLine,
},
});
const buildMatcher = sinon.match({
isOss: sinon.match.func,
resolvePath: sinon.match.func,
resolvePathForPlatform: sinon.match.func,
getPlatformArchivePath: sinon.match.func,
getName: sinon.match.func,
getLogTag: sinon.match.func,
});
const ossBuildMatcher = buildMatcher.and(sinon.match((b) => b.isOss(), 'is oss build'));
const defaultBuildMatcher = buildMatcher.and(sinon.match((b) => !b.isOss(), 'is not oss build'));
afterEach(() => sandbox.reset());
describe('defaults', () => {
const run = createRunner({
config,
log,
});
it('returns a promise', () => {
expect(run({ run: sinon.stub() })).to.be.a(Promise);
});
it('runs global task once, passing config and log', async () => {
const runTask = sinon.stub();
await run({ global: true, run: runTask });
sinon.assert.calledOnce(runTask);
sinon.assert.calledWithExactly(runTask, config, log, sinon.match.array);
});
it('does not call local tasks', async () => {
const runTask = sinon.stub();
await run({ run: runTask });
sinon.assert.notCalled(runTask);
});
});
describe('buildOssDist = true, buildDefaultDist = true', () => {
const run = createRunner({
config,
log,
buildOssDist: true,
buildDefaultDist: true,
});
it('runs global task once, passing config and log', async () => {
const runTask = sinon.stub();
await run({ global: true, run: runTask });
sinon.assert.calledOnce(runTask);
sinon.assert.calledWithExactly(runTask, config, log, sinon.match.array);
});
it('runs local tasks twice, passing config log and both builds', async () => {
const runTask = sinon.stub();
await run({ run: runTask });
sinon.assert.calledTwice(runTask);
sinon.assert.calledWithExactly(runTask, config, log, ossBuildMatcher);
sinon.assert.calledWithExactly(runTask, config, log, defaultBuildMatcher);
});
});
describe('just default dist', () => {
const run = createRunner({
config,
log,
buildDefaultDist: true,
});
it('runs global task once, passing config and log', async () => {
const runTask = sinon.stub();
await run({ global: true, run: runTask });
sinon.assert.calledOnce(runTask);
sinon.assert.calledWithExactly(runTask, config, log, sinon.match.array);
});
it('runs local tasks once, passing config log and default build', async () => {
const runTask = sinon.stub();
await run({ run: runTask });
sinon.assert.calledOnce(runTask);
sinon.assert.calledWithExactly(runTask, config, log, defaultBuildMatcher);
});
});
describe('just oss dist', () => {
const run = createRunner({
config,
log,
buildOssDist: true,
});
it('runs global task once, passing config and log', async () => {
const runTask = sinon.stub();
await run({ global: true, run: runTask });
sinon.assert.calledOnce(runTask);
sinon.assert.calledWithExactly(runTask, config, log, sinon.match.array);
});
it('runs local tasks once, passing config log and oss build', async () => {
const runTask = sinon.stub();
await run({ run: runTask });
sinon.assert.calledOnce(runTask);
sinon.assert.calledWithExactly(runTask, config, log, ossBuildMatcher);
});
});
describe('task rejects', () => {
const run = createRunner({
config,
log,
buildOssDist: true,
});
it('rejects, logs error, and marks error logged', async () => {
try {
await run({
async run() {
throw new Error('FOO');
},
});
throw new Error('expected run() to reject');
} catch (error) {
expect(error).to.have.property('message').be('FOO');
sinon.assert.calledWith(onLogLine, sinon.match(/FOO/));
expect(isErrorLogged(error)).to.be(true);
}
});
it('just rethrows errors that have already been logged', async () => {
try {
await run({
async run() {
throw markErrorLogged(new Error('FOO'));
},
});
throw new Error('expected run() to reject');
} catch (error) {
expect(error).to.have.property('message').be('FOO');
sinon.assert.neverCalledWith(onLogLine, sinon.match(/FOO/));
expect(isErrorLogged(error)).to.be(true);
}
});
});
});

View file

@ -1,70 +0,0 @@
/*
* Licensed to Elasticsearch B.V. under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch B.V. licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
import expect from '@kbn/expect';
import pkg from '../../../../../package.json';
import { getVersionInfo } from '../version_info';
describe('dev/build/lib/version_info', () => {
describe('isRelease = true', () => {
it('returns unchanged package.version, build sha, and build number', async () => {
const versionInfo = await getVersionInfo({
isRelease: true,
pkg,
});
expect(versionInfo).to.have.property('buildVersion', pkg.version);
expect(versionInfo)
.to.have.property('buildSha')
.match(/^[0-9a-f]{40}$/);
expect(versionInfo).to.have.property('buildNumber').a('number').greaterThan(1000);
});
});
describe('isRelease = false', () => {
it('returns snapshot version, build sha, and build number', async () => {
const versionInfo = await getVersionInfo({
isRelease: false,
pkg,
});
expect(versionInfo)
.to.have.property('buildVersion')
.contain(pkg.version)
.match(/-SNAPSHOT$/);
expect(versionInfo)
.to.have.property('buildSha')
.match(/^[0-9a-f]{40}$/);
expect(versionInfo).to.have.property('buildNumber').a('number').greaterThan(1000);
});
});
describe('versionQualifier', () => {
it('appends a version qualifier', async () => {
const versionInfo = await getVersionInfo({
isRelease: true,
versionQualifier: 'beta55',
pkg,
});
expect(versionInfo)
.to.have.property('buildVersion')
.be(pkg.version + '-beta55');
});
});
});

View file

@ -1,60 +0,0 @@
/*
* Licensed to Elasticsearch B.V. under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch B.V. licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
import chalk from 'chalk';
export function createBuild({ config, oss }) {
const name = oss ? 'kibana-oss' : 'kibana';
const logTag = oss ? chalk`{magenta [kibana-oss]}` : chalk`{cyan [ kibana ]}`;
return new (class Build {
isOss() {
return !!oss;
}
resolvePath(...args) {
return config.resolveFromRepo('build', name, ...args);
}
resolvePathForPlatform(platform, ...args) {
return config.resolveFromRepo(
'build',
oss ? 'oss' : 'default',
`kibana-${config.getBuildVersion()}-${platform.getBuildName()}`,
...args
);
}
getPlatformArchivePath(platform) {
const ext = platform.isWindows() ? 'zip' : 'tar.gz';
return config.resolveFromRepo(
'target',
`${name}-${config.getBuildVersion()}-${platform.getBuildName()}.${ext}`
);
}
getName() {
return name;
}
getLogTag() {
return logTag;
}
})();
}

View file

@ -0,0 +1,120 @@
/*
* Licensed to Elasticsearch B.V. under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch B.V. licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
import { REPO_ROOT, createAbsolutePathSerializer } from '@kbn/dev-utils';
import { Config } from './config';
import { Build } from './build';
expect.addSnapshotSerializer(createAbsolutePathSerializer());
const config = new Config(
true,
{
version: '8.0.0',
engines: {
node: '*',
},
workspaces: {
packages: [],
},
},
'1.2.3',
REPO_ROOT,
{
buildNumber: 1234,
buildSha: 'abcd1234',
buildVersion: '8.0.0',
},
true
);
const linuxPlatform = config.getPlatform('linux', 'x64');
const linuxArmPlatform = config.getPlatform('linux', 'arm64');
const windowsPlatform = config.getPlatform('win32', 'x64');
beforeEach(() => {
jest.clearAllMocks();
});
const ossBuild = new Build(config, true);
const defaultBuild = new Build(config, false);
describe('#isOss()', () => {
it('returns true for oss', () => {
expect(ossBuild.isOss()).toBe(true);
});
it('returns false for default build', () => {
expect(defaultBuild.isOss()).toBe(false);
});
});
describe('#getName()', () => {
it('returns kibana for default build', () => {
expect(defaultBuild.getName()).toBe('kibana');
});
it('returns kibana-oss for oss', () => {
expect(ossBuild.getName()).toBe('kibana-oss');
});
});
describe('#getLogTag()', () => {
it('returns string with build name in it', () => {
expect(defaultBuild.getLogTag()).toContain(defaultBuild.getName());
expect(ossBuild.getLogTag()).toContain(ossBuild.getName());
});
});
describe('#resolvePath()', () => {
it('uses passed config to resolve a path relative to the repo', () => {
expect(ossBuild.resolvePath('bar')).toMatchInlineSnapshot(
`<absolute path>/build/kibana-oss/bar`
);
});
it('passes all arguments to config.resolveFromRepo()', () => {
expect(defaultBuild.resolvePath('bar', 'baz', 'box')).toMatchInlineSnapshot(
`<absolute path>/build/kibana/bar/baz/box`
);
});
});
describe('#resolvePathForPlatform()', () => {
it('uses config.resolveFromRepo(), config.getBuildVersion(), and platform.getBuildName() to create path', () => {
expect(ossBuild.resolvePathForPlatform(linuxPlatform, 'foo', 'bar')).toMatchInlineSnapshot(
`<absolute path>/build/oss/kibana-8.0.0-linux-x86_64/foo/bar`
);
});
});
describe('#getPlatformArchivePath()', () => {
it('creates correct path for different platforms', () => {
expect(ossBuild.getPlatformArchivePath(linuxPlatform)).toMatchInlineSnapshot(
`<absolute path>/target/kibana-oss-8.0.0-linux-x86_64.tar.gz`
);
expect(ossBuild.getPlatformArchivePath(linuxArmPlatform)).toMatchInlineSnapshot(
`<absolute path>/target/kibana-oss-8.0.0-linux-aarch64.tar.gz`
);
expect(ossBuild.getPlatformArchivePath(windowsPlatform)).toMatchInlineSnapshot(
`<absolute path>/target/kibana-oss-8.0.0-windows-x86_64.zip`
);
});
});

View file

@ -0,0 +1,63 @@
/*
* Licensed to Elasticsearch B.V. under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch B.V. licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
import chalk from 'chalk';
import { Config } from './config';
import { Platform } from './platform';
export class Build {
private name = this.oss ? 'kibana-oss' : 'kibana';
private logTag = this.oss ? chalk`{magenta [kibana-oss]}` : chalk`{cyan [ kibana ]}`;
constructor(private config: Config, private oss: boolean) {}
isOss() {
return !!this.oss;
}
resolvePath(...args: string[]) {
return this.config.resolveFromRepo('build', this.name, ...args);
}
resolvePathForPlatform(platform: Platform, ...args: string[]) {
return this.config.resolveFromRepo(
'build',
this.oss ? 'oss' : 'default',
`kibana-${this.config.getBuildVersion()}-${platform.getBuildName()}`,
...args
);
}
getPlatformArchivePath(platform: Platform) {
const ext = platform.isWindows() ? 'zip' : 'tar.gz';
return this.config.resolveFromRepo(
'target',
`${this.name}-${this.config.getBuildVersion()}-${platform.getBuildName()}.${ext}`
);
}
getName() {
return this.name;
}
getLogTag() {
return this.logTag;
}
}

View file

@ -1,168 +0,0 @@
/*
* Licensed to Elasticsearch B.V. under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch B.V. licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
import { dirname, resolve, relative } from 'path';
import os from 'os';
import { getVersionInfo } from './version_info';
import { createPlatform } from './platform';
export async function getConfig({ isRelease, targetAllPlatforms, versionQualifier }) {
const pkgPath = resolve(__dirname, '../../../../package.json');
const pkg = require(pkgPath); // eslint-disable-line import/no-dynamic-require
const repoRoot = dirname(pkgPath);
const nodeVersion = pkg.engines.node;
const platforms = [
createPlatform('linux', 'x64', 'linux-x86_64'),
createPlatform('linux', 'arm64', 'linux-aarch64'),
createPlatform('darwin', 'x64', 'darwin-x86_64'),
createPlatform('win32', 'x64', 'windows-x86_64'),
];
const versionInfo = await getVersionInfo({
isRelease,
versionQualifier,
pkg,
});
return new (class Config {
/**
* Get Kibana's parsed package.json file
* @return {Object}
*/
getKibanaPkg() {
return pkg;
}
isRelease() {
return isRelease;
}
/**
* Get the node version required by Kibana
* @return {String}
*/
getNodeVersion() {
return nodeVersion;
}
/**
* Convert an absolute path to a relative path, based from the repo
* @param {String} absolutePath
* @return {String}
*/
getRepoRelativePath(absolutePath) {
return relative(repoRoot, absolutePath);
}
/**
* Resolve a set of relative paths based from the directory of the Kibana repo
* @param {...String} ...subPaths
* @return {String}
*/
resolveFromRepo(...subPaths) {
return resolve(repoRoot, ...subPaths);
}
/**
* Return the list of Platforms we are targeting, if --this-platform flag is
* specified only the platform for this OS will be returned
* @return {Array<Platform>}
*/
getTargetPlatforms() {
if (targetAllPlatforms) {
return platforms;
}
return [this.getPlatformForThisOs()];
}
/**
* Return the list of Platforms we need/have node downloads for. We always
* include the linux platform even if we aren't targeting linux so we can
* reliably get the LICENSE file, which isn't included in the windows version
* @return {Array<Platform>}
*/
getNodePlatforms() {
if (targetAllPlatforms) {
return platforms;
}
if (process.platform === 'linux') {
return [this.getPlatform('linux', 'x64')];
}
return [this.getPlatformForThisOs(), this.getPlatform('linux', 'x64')];
}
getPlatform(name, arch) {
const selected = platforms.find((p) => {
return name === p.getName() && arch === p.getArchitecture();
});
if (!selected) {
throw new Error(`Unable to find platform (${name}) with architecture (${arch})`);
}
return selected;
}
/**
* Get the platform object representing the OS on this machine
* @return {Platform}
*/
getPlatformForThisOs() {
return this.getPlatform(os.platform(), os.arch());
}
/**
* Get the version to use for this build
* @return {String}
*/
getBuildVersion() {
return versionInfo.buildVersion;
}
/**
* Get the build number of this build
* @return {Number}
*/
getBuildNumber() {
return versionInfo.buildNumber;
}
/**
* Get the git sha for this build
* @return {String}
*/
getBuildSha() {
return versionInfo.buildSha;
}
/**
* Resolve a set of paths based from the target directory for this build.
* @param {...String} ...subPaths
* @return {String}
*/
resolveFromTarget(...subPaths) {
return resolve(repoRoot, 'target', ...subPaths);
}
})();
}

View file

@ -0,0 +1,201 @@
/*
* Licensed to Elasticsearch B.V. under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch B.V. licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
import { resolve } from 'path';
import { createAbsolutePathSerializer, REPO_ROOT } from '@kbn/dev-utils';
import pkg from '../../../../package.json';
import { Config } from './config';
jest.mock('./version_info', () => ({
getVersionInfo: () => ({
buildSha: 'abc1234',
buildVersion: '8.0.0',
buildNumber: 1234,
}),
}));
const versionInfo = jest.requireMock('./version_info').getVersionInfo();
expect.addSnapshotSerializer(createAbsolutePathSerializer());
const setup = async ({ targetAllPlatforms = true }: { targetAllPlatforms?: boolean } = {}) => {
return await Config.create({
isRelease: true,
targetAllPlatforms,
});
};
describe('#getKibanaPkg()', () => {
it('returns the parsed package.json from the Kibana repo', async () => {
const config = await setup();
expect(config.getKibanaPkg()).toEqual(pkg);
});
});
describe('#getNodeVersion()', () => {
it('returns the node version from the kibana package.json', async () => {
const config = await setup();
expect(config.getNodeVersion()).toEqual(pkg.engines.node);
});
});
describe('#getRepoRelativePath()', () => {
it('converts an absolute path to relative path, from the root of the repo', async () => {
const config = await setup();
expect(config.getRepoRelativePath(__dirname)).toMatchInlineSnapshot(`"src/dev/build/lib"`);
});
});
describe('#resolveFromRepo()', () => {
it('resolves a relative path', async () => {
const config = await setup();
expect(config.resolveFromRepo('src/dev/build')).toMatchInlineSnapshot(
`<absolute path>/src/dev/build`
);
});
it('resolves a series of relative paths', async () => {
const config = await setup();
expect(config.resolveFromRepo('src', 'dev', 'build')).toMatchInlineSnapshot(
`<absolute path>/src/dev/build`
);
});
});
describe('#getPlatform()', () => {
it('throws error when platform does not exist', async () => {
const config = await setup();
expect(() => {
config.getPlatform(
// @ts-expect-error invalid platform name
'foo',
'x64'
);
}).toThrowErrorMatchingInlineSnapshot(
`"Unable to find platform (foo) with architecture (x64)"`
);
});
it('throws error when architecture does not exist', async () => {
const config = await setup();
expect(() => {
config.getPlatform(
'linux',
// @ts-expect-error invalid platform arch
'foo'
);
}).toThrowErrorMatchingInlineSnapshot(
`"Unable to find platform (linux) with architecture (foo)"`
);
});
});
describe('#getTargetPlatforms()', () => {
it('returns an array of all platform objects', async () => {
const config = await setup();
expect(
config
.getTargetPlatforms()
.map((p) => p.getNodeArch())
.sort()
).toMatchInlineSnapshot(`
Array [
"darwin-x64",
"linux-arm64",
"linux-x64",
"win32-x64",
]
`);
});
it('returns just this platform when targetAllPlatforms = false', async () => {
const config = await setup({
targetAllPlatforms: false,
});
expect(config.getTargetPlatforms()).toEqual([config.getPlatformForThisOs()]);
});
});
describe('#getNodePlatforms()', () => {
it('returns all platforms', async () => {
const config = await setup();
expect(
config
.getTargetPlatforms()
.map((p) => p.getNodeArch())
.sort()
).toEqual(['darwin-x64', 'linux-arm64', 'linux-x64', 'win32-x64']);
});
it('returns this platform and linux, when targetAllPlatforms = false', async () => {
const config = await setup({
targetAllPlatforms: false,
});
const platforms = config.getNodePlatforms();
expect(platforms).toBeInstanceOf(Array);
if (process.platform !== 'linux') {
expect(platforms).toHaveLength(2);
expect(platforms[0]).toBe(config.getPlatformForThisOs());
expect(platforms[1]).toBe(config.getPlatform('linux', 'x64'));
} else {
expect(platforms).toHaveLength(1);
expect(platforms[0]).toBe(config.getPlatform('linux', 'x64'));
}
});
});
describe('#getPlatformForThisOs()', () => {
it('returns the platform that matches the arch of this machine', async () => {
const config = await setup();
const currentPlatform = config.getPlatformForThisOs();
expect(currentPlatform.getName()).toBe(process.platform);
expect(currentPlatform.getArchitecture()).toBe(process.arch);
});
});
describe('#getBuildVersion()', () => {
it('returns the version from the build info', async () => {
const config = await setup();
expect(config.getBuildVersion()).toBe(versionInfo.buildVersion);
});
});
describe('#getBuildNumber()', () => {
it('returns the number from the build info', async () => {
const config = await setup();
expect(config.getBuildNumber()).toBe(versionInfo.buildNumber);
});
});
describe('#getBuildSha()', () => {
it('returns the sha from the build info', async () => {
const config = await setup();
expect(config.getBuildSha()).toBe(versionInfo.buildSha);
});
});
describe('#resolveFromTarget()', () => {
it('resolves a relative path, from the target directory', async () => {
const config = await setup();
expect(config.resolveFromTarget()).toBe(resolve(REPO_ROOT, 'target'));
});
});

173
src/dev/build/lib/config.ts Normal file
View file

@ -0,0 +1,173 @@
/*
* Licensed to Elasticsearch B.V. under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch B.V. licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
import { dirname, resolve, relative } from 'path';
import os from 'os';
import loadJsonFile from 'load-json-file';
import { getVersionInfo, VersionInfo } from './version_info';
import { PlatformName, PlatformArchitecture, ALL_PLATFORMS } from './platform';
interface Options {
isRelease: boolean;
targetAllPlatforms: boolean;
versionQualifier?: string;
}
interface Package {
version: string;
engines: { node: string };
workspaces: {
packages: string[];
};
[key: string]: unknown;
}
export class Config {
static async create({ isRelease, targetAllPlatforms, versionQualifier }: Options) {
const pkgPath = resolve(__dirname, '../../../../package.json');
const pkg: Package = loadJsonFile.sync(pkgPath);
return new Config(
targetAllPlatforms,
pkg,
pkg.engines.node,
dirname(pkgPath),
await getVersionInfo({
isRelease,
versionQualifier,
pkg,
}),
isRelease
);
}
constructor(
private readonly targetAllPlatforms: boolean,
private readonly pkg: Package,
private readonly nodeVersion: string,
private readonly repoRoot: string,
private readonly versionInfo: VersionInfo,
public readonly isRelease: boolean
) {}
/**
* Get Kibana's parsed package.json file
*/
getKibanaPkg() {
return this.pkg;
}
/**
* Get the node version required by Kibana
*/
getNodeVersion() {
return this.nodeVersion;
}
/**
* Convert an absolute path to a relative path, based from the repo
*/
getRepoRelativePath(absolutePath: string) {
return relative(this.repoRoot, absolutePath);
}
/**
* Resolve a set of relative paths based from the directory of the Kibana repo
*/
resolveFromRepo(...subPaths: string[]) {
return resolve(this.repoRoot, ...subPaths);
}
/**
* Return the list of Platforms we are targeting, if --this-platform flag is
* specified only the platform for this OS will be returned
*/
getTargetPlatforms() {
if (this.targetAllPlatforms) {
return ALL_PLATFORMS;
}
return [this.getPlatformForThisOs()];
}
/**
* Return the list of Platforms we need/have node downloads for. We always
* include the linux platform even if we aren't targeting linux so we can
* reliably get the LICENSE file, which isn't included in the windows version
*/
getNodePlatforms() {
if (this.targetAllPlatforms) {
return ALL_PLATFORMS;
}
if (process.platform === 'linux') {
return [this.getPlatform('linux', 'x64')];
}
return [this.getPlatformForThisOs(), this.getPlatform('linux', 'x64')];
}
getPlatform(name: PlatformName, arch: PlatformArchitecture) {
const selected = ALL_PLATFORMS.find((p) => {
return name === p.getName() && arch === p.getArchitecture();
});
if (!selected) {
throw new Error(`Unable to find platform (${name}) with architecture (${arch})`);
}
return selected;
}
/**
* Get the platform object representing the OS on this machine
*/
getPlatformForThisOs() {
return this.getPlatform(os.platform() as PlatformName, os.arch() as PlatformArchitecture);
}
/**
* Get the version to use for this build
*/
getBuildVersion() {
return this.versionInfo.buildVersion;
}
/**
* Get the build number of this build
*/
getBuildNumber() {
return this.versionInfo.buildNumber;
}
/**
* Get the git sha for this build
*/
getBuildSha() {
return this.versionInfo.buildSha;
}
/**
* Resolve a set of paths based from the target directory for this build.
*/
resolveFromTarget(...subPaths: string[]) {
return resolve(this.repoRoot, 'target', ...subPaths);
}
}

View file

@ -23,10 +23,15 @@ import { dirname } from 'path';
import chalk from 'chalk';
import { createHash } from 'crypto';
import Axios from 'axios';
import { ToolingLog } from '@kbn/dev-utils';
// https://github.com/axios/axios/tree/ffea03453f77a8176c51554d5f6c3c6829294649/lib/adapters
// @ts-expect-error untyped internal module used to prevent axios from using xhr adapter in tests
import AxiosHttpAdapter from 'axios/lib/adapters/http';
import { mkdirp } from './fs';
function tryUnlink(path) {
function tryUnlink(path: string) {
try {
unlinkSync(path);
} catch (error) {
@ -36,7 +41,14 @@ function tryUnlink(path) {
}
}
export async function download(options) {
interface DownloadOptions {
log: ToolingLog;
url: string;
destination: string;
sha256: string;
retries?: number;
}
export async function download(options: DownloadOptions): Promise<void> {
const { log, url, destination, sha256, retries = 0 } = options;
if (!sha256) {
@ -52,8 +64,9 @@ export async function download(options) {
log.debug(`Attempting download of ${url}`, chalk.dim(sha256));
const response = await Axios.request({
url: url,
url,
responseType: 'stream',
adapter: AxiosHttpAdapter,
});
if (response.status !== 200) {
@ -62,7 +75,7 @@ export async function download(options) {
const hash = createHash('sha256');
await new Promise((resolve, reject) => {
response.data.on('data', (chunk) => {
response.data.on('data', (chunk: Buffer) => {
hash.update(chunk);
writeSync(fileHandle, chunk);
});

View file

@ -17,28 +17,26 @@
* under the License.
*/
import expect from '@kbn/expect';
import { isErrorLogged, markErrorLogged } from '../errors';
import { isErrorLogged, markErrorLogged } from './errors';
describe('dev/build/lib/errors', () => {
describe('isErrorLogged()/markErrorLogged()', () => {
it('returns true if error has been passed to markErrorLogged()', () => {
const error = new Error();
expect(isErrorLogged(error)).to.be(false);
expect(isErrorLogged(error)).toBe(false);
markErrorLogged(error);
expect(isErrorLogged(error)).to.be(true);
expect(isErrorLogged(error)).toBe(true);
});
describe('isErrorLogged()', () => {
it('handles any value type', () => {
expect(isErrorLogged(null)).to.be(false);
expect(isErrorLogged(undefined)).to.be(false);
expect(isErrorLogged(1)).to.be(false);
expect(isErrorLogged([])).to.be(false);
expect(isErrorLogged({})).to.be(false);
expect(isErrorLogged(/foo/)).to.be(false);
expect(isErrorLogged(new Date())).to.be(false);
expect(isErrorLogged(null)).toBe(false);
expect(isErrorLogged(undefined)).toBe(false);
expect(isErrorLogged(1)).toBe(false);
expect(isErrorLogged([])).toBe(false);
expect(isErrorLogged({})).toBe(false);
expect(isErrorLogged(/foo/)).toBe(false);
expect(isErrorLogged(new Date())).toBe(false);
});
});
});

View file

@ -17,13 +17,13 @@
* under the License.
*/
const loggedErrors = new WeakSet();
const loggedErrors = new WeakSet<any>();
export function markErrorLogged(error) {
export function markErrorLogged<T = any>(error: T): T {
loggedErrors.add(error);
return error;
}
export function isErrorLogged(error) {
export function isErrorLogged(error: any) {
return loggedErrors.has(error);
}

View file

@ -0,0 +1,67 @@
/*
* Licensed to Elasticsearch B.V. under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch B.V. licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
import Path from 'path';
import {
ToolingLog,
ToolingLogCollectingWriter,
createStripAnsiSerializer,
createRecursiveSerializer,
} from '@kbn/dev-utils';
import { exec } from './exec';
const testWriter = new ToolingLogCollectingWriter();
const log = new ToolingLog();
log.setWriters([testWriter]);
expect.addSnapshotSerializer(createStripAnsiSerializer());
expect.addSnapshotSerializer(
createRecursiveSerializer(
(v) => v.includes(process.execPath),
(v) => v.split(Path.dirname(process.execPath)).join('<nodedir>')
)
);
beforeEach(() => {
testWriter.messages.length = 0;
});
it('executes a command, logs the command, and logs the output', async () => {
await exec(log, process.execPath, ['-e', 'console.log("hi")']);
expect(testWriter.messages).toMatchInlineSnapshot(`
Array [
" debg $ <nodedir>/node -e console.log(\\"hi\\")",
" debg hi",
]
`);
});
it('logs using level: option', async () => {
await exec(log, process.execPath, ['-e', 'console.log("hi")'], {
level: 'info',
});
expect(testWriter.messages).toMatchInlineSnapshot(`
Array [
" info $ <nodedir>/node -e console.log(\\"hi\\")",
" info hi",
]
`);
});

View file

@ -19,12 +19,23 @@
import execa from 'execa';
import chalk from 'chalk';
import { ToolingLog, LogLevel } from '@kbn/dev-utils';
import { watchStdioForLine } from '../../../legacy/utils';
import { watchStdioForLine } from './watch_stdio_for_line';
export async function exec(log, cmd, args, options = {}) {
const { level = 'debug', cwd, env, exitAfter } = options;
interface Options {
level?: Exclude<LogLevel, 'silent' | 'error'>;
cwd?: string;
env?: Record<string, string>;
exitAfter?: RegExp;
}
export async function exec(
log: ToolingLog,
cmd: string,
args: string[],
{ level = 'debug', cwd, env, exitAfter }: Options = {}
) {
log[level](chalk.dim('$'), cmd, ...args);
const proc = execa(cmd, args, {

View file

@ -17,28 +17,31 @@
* under the License.
*/
import archiver from 'archiver';
import fs from 'fs';
import { createHash } from 'crypto';
import { pipeline, Writable } from 'stream';
import { resolve, dirname, isAbsolute, sep } from 'path';
import { createGunzip } from 'zlib';
import { inspect } from 'util';
import { inspect, promisify } from 'util';
import archiver from 'archiver';
import vfs from 'vinyl-fs';
import { promisify } from 'bluebird';
import File from 'vinyl';
import del from 'del';
import deleteEmpty from 'delete-empty';
import { createPromiseFromStreams, createMapStream } from '../../../legacy/utils';
import tar from 'tar';
import tar, { ExtractOptions } from 'tar';
import { ToolingLog } from '@kbn/dev-utils';
const pipelineAsync = promisify(pipeline);
const mkdirAsync = promisify(fs.mkdir);
const writeFileAsync = promisify(fs.writeFile);
const readFileAsync = promisify(fs.readFile);
const readdirAsync = promisify(fs.readdir);
const utimesAsync = promisify(fs.utimes);
const copyFileAsync = promisify(fs.copyFile);
const statAsync = promisify(fs.stat);
export function assertAbsolute(path) {
export function assertAbsolute(path: string) {
if (!isAbsolute(path)) {
throw new TypeError(
'Please use absolute paths to keep things explicit. You probably want to use `build.resolvePath()` or `config.resolveFromRepo()`.'
@ -46,7 +49,7 @@ export function assertAbsolute(path) {
}
}
export function isFileAccessible(path) {
export function isFileAccessible(path: string) {
assertAbsolute(path);
try {
@ -57,35 +60,35 @@ export function isFileAccessible(path) {
}
}
function longInspect(value) {
function longInspect(value: any) {
return inspect(value, {
maxArrayLength: Infinity,
});
}
export async function mkdirp(path) {
export async function mkdirp(path: string) {
assertAbsolute(path);
await mkdirAsync(path, { recursive: true });
}
export async function write(path, contents) {
export async function write(path: string, contents: string) {
assertAbsolute(path);
await mkdirp(dirname(path));
await writeFileAsync(path, contents);
}
export async function read(path) {
export async function read(path: string) {
assertAbsolute(path);
return await readFileAsync(path, 'utf8');
}
export async function getChildPaths(path) {
export async function getChildPaths(path: string) {
assertAbsolute(path);
const childNames = await readdirAsync(path);
return childNames.map((name) => resolve(path, name));
}
export async function deleteAll(patterns, log) {
export async function deleteAll(patterns: string[], log: ToolingLog) {
if (!Array.isArray(patterns)) {
throw new TypeError('Expected patterns to be an array');
}
@ -108,7 +111,11 @@ export async function deleteAll(patterns, log) {
}
}
export async function deleteEmptyFolders(log, rootFolderPath, foldersToKeep) {
export async function deleteEmptyFolders(
log: ToolingLog,
rootFolderPath: string,
foldersToKeep: string[]
) {
if (typeof rootFolderPath !== 'string') {
throw new TypeError('Expected root folder to be a string path');
}
@ -121,7 +128,11 @@ export async function deleteEmptyFolders(log, rootFolderPath, foldersToKeep) {
// Delete empty is used to gather all the empty folders and
// then we use del to actually delete them
const emptyFoldersList = await deleteEmpty(rootFolderPath, { dryRun: true });
const emptyFoldersList = await deleteEmpty(rootFolderPath, {
// @ts-expect-error DT package has incorrect types https://github.com/jonschlinkert/delete-empty/blob/6ae34547663e6845c3c98b184c606fa90ef79c0a/index.js#L160
dryRun: true,
});
const foldersToDelete = emptyFoldersList.filter((folderToDelete) => {
return !foldersToKeep.some((folderToKeep) => folderToDelete.includes(folderToKeep));
});
@ -133,85 +144,153 @@ export async function deleteEmptyFolders(log, rootFolderPath, foldersToKeep) {
log.verbose('Deleted:', longInspect(deletedEmptyFolders));
}
export async function copyAll(sourceDir, destination, options = {}) {
const { select = ['**/*'], dot = false, time } = options;
interface CopyOptions {
clone?: boolean;
}
export async function copy(source: string, destination: string, options: CopyOptions = {}) {
assertAbsolute(source);
assertAbsolute(destination);
// ensure source exists before creating destination directory and copying source
await statAsync(source);
await mkdirp(dirname(destination));
return await copyFileAsync(
source,
destination,
options.clone ? fs.constants.COPYFILE_FICLONE : 0
);
}
interface CopyAllOptions {
select?: string[];
dot?: boolean;
time?: string | number | Date;
}
export async function copyAll(
sourceDir: string,
destination: string,
options: CopyAllOptions = {}
) {
const { select = ['**/*'], dot = false, time = Date.now() } = options;
assertAbsolute(sourceDir);
assertAbsolute(destination);
await createPromiseFromStreams([
await pipelineAsync(
vfs.src(select, {
buffer: false,
cwd: sourceDir,
base: sourceDir,
dot,
}),
vfs.dest(destination),
]);
vfs.dest(destination)
);
// we must update access and modified file times after the file copy
// has completed, otherwise the copy action can effect modify times.
if (Boolean(time)) {
await createPromiseFromStreams([
await pipelineAsync(
vfs.src(select, {
buffer: false,
cwd: destination,
base: destination,
dot,
}),
createMapStream((file) => utimesAsync(file.path, time, time)),
]);
new Writable({
objectMode: true,
write(file: File, _, cb) {
utimesAsync(file.path, time, time).then(() => cb(), cb);
},
})
);
}
}
export async function getFileHash(path, algo) {
export async function getFileHash(path: string, algo: string) {
assertAbsolute(path);
const hash = createHash(algo);
const readStream = fs.createReadStream(path);
await new Promise((resolve, reject) => {
await new Promise((res, rej) => {
readStream
.on('data', (chunk) => hash.update(chunk))
.on('error', reject)
.on('end', resolve);
.on('error', rej)
.on('end', res);
});
return hash.digest('hex');
}
export async function untar(source, destination, extractOptions = {}) {
export async function untar(
source: string,
destination: string,
extractOptions: ExtractOptions = {}
) {
assertAbsolute(source);
assertAbsolute(destination);
await mkdirAsync(destination, { recursive: true });
await createPromiseFromStreams([
await pipelineAsync(
fs.createReadStream(source),
createGunzip(),
tar.extract({
...extractOptions,
cwd: destination,
}),
]);
})
);
}
export async function gunzip(source, destination) {
export async function gunzip(source: string, destination: string) {
assertAbsolute(source);
assertAbsolute(destination);
await mkdirAsync(dirname(destination), { recursive: true });
await createPromiseFromStreams([
await pipelineAsync(
fs.createReadStream(source),
createGunzip(),
fs.createWriteStream(destination),
]);
fs.createWriteStream(destination)
);
}
export async function compress(type, options = {}, source, destination) {
interface CompressTarOptions {
createRootDirectory: boolean;
source: string;
destination: string;
archiverOptions?: archiver.TarOptions & archiver.CoreOptions;
}
export async function compressTar({
source,
destination,
archiverOptions,
createRootDirectory,
}: CompressTarOptions) {
const output = fs.createWriteStream(destination);
const archive = archiver(type, options.archiverOptions);
const name = options.createRootDirectory ? source.split(sep).slice(-1)[0] : false;
const archive = archiver('tar', archiverOptions);
const name = createRootDirectory ? source.split(sep).slice(-1)[0] : false;
archive.pipe(output);
return archive.directory(source, name).finalize();
}
interface CompressZipOptions {
createRootDirectory: boolean;
source: string;
destination: string;
archiverOptions?: archiver.ZipOptions & archiver.CoreOptions;
}
export async function compressZip({
source,
destination,
archiverOptions,
createRootDirectory,
}: CompressZipOptions) {
const output = fs.createWriteStream(destination);
const archive = archiver('zip', archiverOptions);
const name = createRootDirectory ? source.split(sep).slice(-1)[0] : false;
archive.pipe(output);

View file

@ -1,39 +0,0 @@
/*
* Licensed to Elasticsearch B.V. under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch B.V. licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
export { getConfig } from './config';
export { createRunner } from './runner';
export { isErrorLogged } from './errors';
export { exec } from './exec';
export {
read,
write,
mkdirp,
copyAll,
getFileHash,
untar,
gunzip,
deleteAll,
deleteEmptyFolders,
compress,
isFileAccessible,
} from './fs';
export { download } from './download';
export { scanDelete } from './scan_delete';
export { scanCopy } from './scan_copy';

View file

@ -0,0 +1,30 @@
/*
* Licensed to Elasticsearch B.V. under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch B.V. licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
export * from './config';
export * from './build';
export * from './runner';
export * from './errors';
export * from './exec';
export * from './fs';
export * from './download';
export * from './scan_delete';
export * from './scan_copy';
export * from './platform';
export * from './scan';

View file

@ -0,0 +1,226 @@
/*
* Licensed to Elasticsearch B.V. under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch B.V. licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
import { createServer, IncomingMessage, ServerResponse } from 'http';
import { join } from 'path';
import { tmpdir } from 'os';
import { readFileSync } from 'fs';
import del from 'del';
import { CI_PARALLEL_PROCESS_PREFIX } from '@kbn/test';
import { ToolingLog } from '@kbn/dev-utils';
import { mkdirp } from '../fs';
import { download } from '../download';
const TMP_DIR = join(tmpdir(), CI_PARALLEL_PROCESS_PREFIX, 'download-js-test-tmp-dir');
const TMP_DESTINATION = join(TMP_DIR, '__tmp_download_js_test_file__');
beforeEach(async () => {
await del(TMP_DIR, { force: true });
await mkdirp(TMP_DIR);
jest.clearAllMocks();
});
afterEach(async () => {
await del(TMP_DIR, { force: true });
});
const onLogLine = jest.fn();
const log = new ToolingLog({
level: 'verbose',
writeTo: {
write: onLogLine,
},
});
type Handler = (req: IncomingMessage, res: ServerResponse) => void;
const FOO_SHA256 = '2c26b46b68ffc68ff99b453c1d30413413422d706483bfa0f98a5e886266e7ae';
const createSendHandler = (send: any): Handler => (req, res) => {
res.statusCode = 200;
res.end(send);
};
const sendErrorHandler: Handler = (req, res) => {
res.statusCode = 500;
res.end();
};
let serverUrl: string;
let nextHandler: Handler | null = null;
const server = createServer((req, res) => {
if (!nextHandler) {
nextHandler = sendErrorHandler;
}
const handler = nextHandler;
nextHandler = null;
handler(req, res);
});
afterEach(() => (nextHandler = null));
beforeAll(async () => {
await Promise.race([
new Promise((_, reject) => {
server.once('error', reject);
}),
new Promise((resolve) => {
server.listen(resolve);
}),
]);
// address is only a string when listening to a UNIX socket, and undefined when we haven't called listen() yet
const address = server.address() as { port: number };
serverUrl = `http://localhost:${address.port}/`;
});
afterAll(async () => {
server.close();
});
it('downloads from URL and checks that content matches sha256', async () => {
nextHandler = createSendHandler('foo');
await download({
log,
url: serverUrl,
destination: TMP_DESTINATION,
sha256: FOO_SHA256,
});
expect(readFileSync(TMP_DESTINATION, 'utf8')).toBe('foo');
});
it('rejects and deletes destination if sha256 does not match', async () => {
nextHandler = createSendHandler('foo');
try {
await download({
log,
url: serverUrl,
destination: TMP_DESTINATION,
sha256: 'bar',
});
throw new Error('Expected download() to reject');
} catch (error) {
expect(error).toHaveProperty(
'message',
expect.stringContaining('does not match the expected sha256 checksum')
);
}
try {
readFileSync(TMP_DESTINATION);
throw new Error('Expected download to be deleted');
} catch (error) {
expect(error).toHaveProperty('code', 'ENOENT');
}
});
describe('reties download retries: number of times', () => {
it('resolves if retries = 1 and first attempt fails', async () => {
let reqCount = 0;
nextHandler = function sequenceHandler(req, res) {
switch (++reqCount) {
case 1:
nextHandler = sequenceHandler;
return sendErrorHandler(req, res);
default:
return createSendHandler('foo')(req, res);
}
};
await download({
log,
url: serverUrl,
destination: TMP_DESTINATION,
sha256: FOO_SHA256,
retries: 2,
});
expect(readFileSync(TMP_DESTINATION, 'utf8')).toBe('foo');
});
it('resolves if first fails, second is bad shasum, but third succeeds', async () => {
let reqCount = 0;
nextHandler = function sequenceHandler(req, res) {
switch (++reqCount) {
case 1:
nextHandler = sequenceHandler;
return sendErrorHandler(req, res);
case 2:
nextHandler = sequenceHandler;
return createSendHandler('bar')(req, res);
default:
return createSendHandler('foo')(req, res);
}
};
await download({
log,
url: serverUrl,
destination: TMP_DESTINATION,
sha256: FOO_SHA256,
retries: 2,
});
});
it('makes 6 requests if `retries: 5` and all failed', async () => {
let reqCount = 0;
nextHandler = function sequenceHandler(req, res) {
reqCount += 1;
nextHandler = sequenceHandler;
sendErrorHandler(req, res);
};
try {
await download({
log,
url: serverUrl,
destination: TMP_DESTINATION,
sha256: FOO_SHA256,
retries: 5,
});
throw new Error('Expected download() to reject');
} catch (error) {
expect(error).toHaveProperty(
'message',
expect.stringContaining('Request failed with status code 500')
);
expect(reqCount).toBe(6);
}
});
});
describe('sha256 option not supplied', () => {
it('refuses to download', async () => {
try {
// @ts-expect-error missing sha256 param is intentional
await download({
log,
url: 'http://google.com',
destination: TMP_DESTINATION,
});
throw new Error('expected download() to reject');
} catch (error) {
expect(error).toHaveProperty('message', expect.stringContaining('refusing to download'));
}
});
});

View file

@ -0,0 +1,358 @@
/*
* Licensed to Elasticsearch B.V. under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch B.V. licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
import { resolve } from 'path';
import { chmodSync, statSync } from 'fs';
import del from 'del';
import { mkdirp, write, read, getChildPaths, copyAll, getFileHash, untar, gunzip } from '../fs';
const TMP = resolve(__dirname, '../__tmp__');
const FIXTURES = resolve(__dirname, '../__fixtures__');
const FOO_TAR_PATH = resolve(FIXTURES, 'foo_dir.tar.gz');
const FOO_GZIP_PATH = resolve(FIXTURES, 'foo.txt.gz');
const BAR_TXT_PATH = resolve(FIXTURES, 'foo_dir/bar.txt');
const WORLD_EXECUTABLE = resolve(FIXTURES, 'bin/world_executable');
const isWindows = /^win/.test(process.platform);
// get the mode of a file as a string, like 777, or 644,
function getCommonMode(path: string) {
return statSync(path).mode.toString(8).slice(-3);
}
function assertNonAbsoluteError(error: any) {
expect(error).toBeInstanceOf(Error);
expect(error.message).toContain('Please use absolute paths');
}
// ensure WORLD_EXECUTABLE is actually executable by all
beforeAll(async () => {
chmodSync(WORLD_EXECUTABLE, 0o777);
});
// clean and recreate TMP directory
beforeEach(async () => {
await del(TMP);
await mkdirp(TMP);
});
// cleanup TMP directory
afterAll(async () => {
await del(TMP);
});
describe('mkdirp()', () => {
it('rejects if path is not absolute', async () => {
try {
await mkdirp('foo/bar');
throw new Error('Expected mkdirp() to reject');
} catch (error) {
assertNonAbsoluteError(error);
}
});
it('makes directory and necessary parent directories', async () => {
const destination = resolve(TMP, 'a/b/c/d/e/f/g');
expect(await mkdirp(destination)).toBe(undefined);
expect(statSync(destination).isDirectory()).toBe(true);
});
});
describe('write()', () => {
it('rejects if path is not absolute', async () => {
try {
// @ts-expect-error missing content intentional
await write('foo/bar');
throw new Error('Expected write() to reject');
} catch (error) {
assertNonAbsoluteError(error);
}
});
it('writes content to a file with existing parent directory', async () => {
const destination = resolve(TMP, 'a');
expect(await write(destination, 'bar')).toBe(undefined);
expect(await read(destination)).toBe('bar');
});
it('writes content to a file with missing parents', async () => {
const destination = resolve(TMP, 'a/b/c/d/e');
expect(await write(destination, 'bar')).toBe(undefined);
expect(await read(destination)).toBe('bar');
});
});
describe('read()', () => {
it('rejects if path is not absolute', async () => {
try {
await read('foo/bar');
throw new Error('Expected read() to reject');
} catch (error) {
assertNonAbsoluteError(error);
}
});
it('reads file, resolves with result', async () => {
expect(await read(BAR_TXT_PATH)).toBe('bar\n');
});
});
describe('getChildPaths()', () => {
it('rejects if path is not absolute', async () => {
try {
await getChildPaths('foo/bar');
throw new Error('Expected getChildPaths() to reject');
} catch (error) {
assertNonAbsoluteError(error);
}
});
it('resolves with absolute paths to the children of directory', async () => {
const path = resolve(FIXTURES, 'foo_dir');
expect((await getChildPaths(path)).sort()).toEqual([
resolve(FIXTURES, 'foo_dir/.bar'),
BAR_TXT_PATH,
resolve(FIXTURES, 'foo_dir/foo'),
]);
});
it('rejects with ENOENT if path does not exist', async () => {
try {
await getChildPaths(resolve(FIXTURES, 'notrealpath'));
throw new Error('Expected getChildPaths() to reject');
} catch (error) {
expect(error).toHaveProperty('code', 'ENOENT');
}
});
});
describe('copyAll()', () => {
it('rejects if source path is not absolute', async () => {
try {
await copyAll('foo/bar', __dirname);
throw new Error('Expected copyAll() to reject');
} catch (error) {
assertNonAbsoluteError(error);
}
});
it('rejects if destination path is not absolute', async () => {
try {
await copyAll(__dirname, 'foo/bar');
throw new Error('Expected copyAll() to reject');
} catch (error) {
assertNonAbsoluteError(error);
}
});
it('rejects if neither path is not absolute', async () => {
try {
await copyAll('foo/bar', 'foo/bar');
throw new Error('Expected copyAll() to reject');
} catch (error) {
assertNonAbsoluteError(error);
}
});
it('copies files and directories from source to dest, creating dest if necessary, respecting mode', async () => {
const destination = resolve(TMP, 'a/b/c');
await copyAll(FIXTURES, destination);
expect((await getChildPaths(resolve(destination, 'foo_dir'))).sort()).toEqual([
resolve(destination, 'foo_dir/bar.txt'),
resolve(destination, 'foo_dir/foo'),
]);
expect(getCommonMode(resolve(destination, 'bin/world_executable'))).toBe(
isWindows ? '666' : '777'
);
expect(getCommonMode(resolve(destination, 'foo_dir/bar.txt'))).toBe(isWindows ? '666' : '644');
});
it('applies select globs if specified, ignores dot files', async () => {
const destination = resolve(TMP, 'a/b/c/d');
await copyAll(FIXTURES, destination, {
select: ['**/*bar*'],
});
try {
statSync(resolve(destination, 'bin/world_executable'));
throw new Error('expected bin/world_executable to not by copied');
} catch (error) {
expect(error).toHaveProperty('code', 'ENOENT');
}
try {
statSync(resolve(destination, 'foo_dir/.bar'));
throw new Error('expected foo_dir/.bar to not by copied');
} catch (error) {
expect(error).toHaveProperty('code', 'ENOENT');
}
expect(await read(resolve(destination, 'foo_dir/bar.txt'))).toBe('bar\n');
});
it('supports select globs and dot option together', async () => {
const destination = resolve(TMP, 'a/b/c/d');
await copyAll(FIXTURES, destination, {
select: ['**/*bar*'],
dot: true,
});
try {
statSync(resolve(destination, 'bin/world_executable'));
throw new Error('expected bin/world_executable to not by copied');
} catch (error) {
expect(error).toHaveProperty('code', 'ENOENT');
}
expect(await read(resolve(destination, 'foo_dir/bar.txt'))).toBe('bar\n');
expect(await read(resolve(destination, 'foo_dir/.bar'))).toBe('dotfile\n');
});
it('supports atime and mtime', async () => {
const destination = resolve(TMP, 'a/b/c/d/e');
const time = new Date(1425298511000);
await copyAll(FIXTURES, destination, {
time,
});
const barTxt = statSync(resolve(destination, 'foo_dir/bar.txt'));
const fooDir = statSync(resolve(destination, 'foo_dir'));
// precision is platform specific
const oneDay = 86400000;
expect(Math.abs(barTxt.atimeMs - time.getTime())).toBeLessThan(oneDay);
expect(Math.abs(fooDir.atimeMs - time.getTime())).toBeLessThan(oneDay);
expect(Math.abs(barTxt.mtimeMs - time.getTime())).toBeLessThan(oneDay);
});
});
describe('getFileHash()', () => {
it('rejects if path is not absolute', async () => {
try {
await getFileHash('foo/bar', 'some content');
throw new Error('Expected getFileHash() to reject');
} catch (error) {
assertNonAbsoluteError(error);
}
});
it('resolves with the sha1 hash of a file', async () => {
expect(await getFileHash(BAR_TXT_PATH, 'sha1')).toBe(
'e242ed3bffccdf271b7fbaf34ed72d089537b42f'
);
});
it('resolves with the sha256 hash of a file', async () => {
expect(await getFileHash(BAR_TXT_PATH, 'sha256')).toBe(
'7d865e959b2466918c9863afca942d0fb89d7c9ac0c99bafc3749504ded97730'
);
});
it('resolves with the md5 hash of a file', async () => {
expect(await getFileHash(BAR_TXT_PATH, 'md5')).toBe('c157a79031e1c40f85931829bc5fc552');
});
});
describe('untar()', () => {
it('rejects if source path is not absolute', async () => {
try {
await untar('foo/bar', '**/*');
throw new Error('Expected untar() to reject');
} catch (error) {
assertNonAbsoluteError(error);
}
});
it('rejects if destination path is not absolute', async () => {
try {
await untar(__dirname, '**/*');
throw new Error('Expected untar() to reject');
} catch (error) {
assertNonAbsoluteError(error);
}
});
it('rejects if neither path is not absolute', async () => {
try {
await untar('foo/bar', '**/*');
throw new Error('Expected untar() to reject');
} catch (error) {
assertNonAbsoluteError(error);
}
});
it('extracts tarbar from source into destination, creating destination if necessary', async () => {
const destination = resolve(TMP, 'a/b/c/d/e/f');
await untar(FOO_TAR_PATH, destination);
expect(await read(resolve(destination, 'foo_dir/bar.txt'))).toBe('bar\n');
expect(await read(resolve(destination, 'foo_dir/foo/foo.txt'))).toBe('foo\n');
});
it('passed thrid argument to Extract class, overriding path with destination', async () => {
const destination = resolve(TMP, 'a/b/c');
await untar(FOO_TAR_PATH, destination, {
path: '/dev/null',
strip: 1,
});
expect(await read(resolve(destination, 'bar.txt'))).toBe('bar\n');
expect(await read(resolve(destination, 'foo/foo.txt'))).toBe('foo\n');
});
});
describe('gunzip()', () => {
it('rejects if source path is not absolute', async () => {
try {
await gunzip('foo/bar', '**/*');
throw new Error('Expected gunzip() to reject');
} catch (error) {
assertNonAbsoluteError(error);
}
});
it('rejects if destination path is not absolute', async () => {
try {
await gunzip(__dirname, '**/*');
throw new Error('Expected gunzip() to reject');
} catch (error) {
assertNonAbsoluteError(error);
}
});
it('rejects if neither path is not absolute', async () => {
try {
await gunzip('foo/bar', '**/*');
throw new Error('Expected gunzip() to reject');
} catch (error) {
assertNonAbsoluteError(error);
}
});
it('extracts gzip from source into destination, creating destination if necessary', async () => {
const destination = resolve(TMP, 'z/y/x/v/u/t/foo.txt');
await gunzip(FOO_GZIP_PATH, destination);
expect(await read(resolve(destination))).toBe('foo\n');
});
});

View file

@ -22,14 +22,13 @@ import { resolve } from 'path';
import del from 'del';
// @ts-ignore
import { getChildPaths, mkdirp, write } from './fs';
import { scanCopy } from './scan_copy';
import { getChildPaths } from '../fs';
import { scanCopy } from '../scan_copy';
const IS_WINDOWS = process.platform === 'win32';
const FIXTURES = resolve(__dirname, '__tests__/fixtures');
const FIXTURES = resolve(__dirname, '../__fixtures__');
const TMP = resolve(__dirname, '../__tmp__');
const WORLD_EXECUTABLE = resolve(FIXTURES, 'bin/world_executable');
const TMP = resolve(__dirname, '__tests__/__tmp__');
const getCommonMode = (path: string) => statSync(path).mode.toString(8).slice(-3);

View file

@ -0,0 +1,52 @@
/*
* Licensed to Elasticsearch B.V. under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch B.V. licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
import execa from 'execa';
import { watchStdioForLine } from '../watch_stdio_for_line';
const onLogLine = jest.fn();
beforeEach(() => {
jest.clearAllMocks();
});
it('calls logFn with log lines', async () => {
const proc = execa(process.execPath, ['-e', 'console.log("hi")']);
await watchStdioForLine(proc, onLogLine);
expect(onLogLine.mock.calls).toMatchInlineSnapshot(`
Array [
Array [
"hi",
],
]
`);
});
it('send the proc SIGKILL if it logs a line matching exitAfter regexp', async function () {
const proc = execa(process.execPath, [require.resolve('../__fixtures__/log_on_sigint')]);
await watchStdioForLine(proc, onLogLine, /listening for SIGINT/);
expect(onLogLine.mock.calls).toMatchInlineSnapshot(`
Array [
Array [
"listening for SIGINT",
],
]
`);
});

View file

@ -1,50 +0,0 @@
/*
* Licensed to Elasticsearch B.V. under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch B.V. licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
export function createPlatform(name, architecture, buildName) {
return new (class Platform {
getName() {
return name;
}
getArchitecture() {
return architecture;
}
getBuildName() {
return buildName;
}
getNodeArch() {
return `${name}-${architecture}`;
}
isWindows() {
return name === 'win32';
}
isMac() {
return name === 'darwin';
}
isLinux() {
return name === 'linux';
}
})();
}

View file

@ -0,0 +1,62 @@
/*
* Licensed to Elasticsearch B.V. under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch B.V. licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
import { Platform } from './platform';
describe('getName()', () => {
it('returns the name argument', () => {
expect(new Platform('win32', 'x64', 'foo').getName()).toBe('win32');
});
});
describe('getNodeArch()', () => {
it('returns the node arch for the passed name', () => {
expect(new Platform('win32', 'x64', 'foo').getNodeArch()).toBe('win32-x64');
});
});
describe('getBuildName()', () => {
it('returns the build name for the passed name', () => {
expect(new Platform('linux', 'arm64', 'linux-aarch64').getBuildName()).toBe('linux-aarch64');
});
});
describe('isWindows()', () => {
it('returns true if name is win32', () => {
expect(new Platform('win32', 'x64', 'foo').isWindows()).toBe(true);
expect(new Platform('linux', 'x64', 'foo').isWindows()).toBe(false);
expect(new Platform('darwin', 'x64', 'foo').isWindows()).toBe(false);
});
});
describe('isLinux()', () => {
it('returns true if name is linux', () => {
expect(new Platform('win32', 'x64', 'foo').isLinux()).toBe(false);
expect(new Platform('linux', 'x64', 'foo').isLinux()).toBe(true);
expect(new Platform('darwin', 'x64', 'foo').isLinux()).toBe(false);
});
});
describe('isMac()', () => {
it('returns true if name is darwin', () => {
expect(new Platform('win32', 'x64', 'foo').isMac()).toBe(false);
expect(new Platform('linux', 'x64', 'foo').isMac()).toBe(false);
expect(new Platform('darwin', 'x64', 'foo').isMac()).toBe(true);
});
});

View file

@ -0,0 +1,64 @@
/*
* Licensed to Elasticsearch B.V. under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch B.V. licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
export type PlatformName = 'win32' | 'darwin' | 'linux';
export type PlatformArchitecture = 'x64' | 'arm64';
export class Platform {
constructor(
private name: PlatformName,
private architecture: PlatformArchitecture,
private buildName: string
) {}
getName() {
return this.name;
}
getArchitecture() {
return this.architecture;
}
getBuildName() {
return this.buildName;
}
getNodeArch() {
return `${this.name}-${this.architecture}`;
}
isWindows() {
return this.name === 'win32';
}
isMac() {
return this.name === 'darwin';
}
isLinux() {
return this.name === 'linux';
}
}
export const ALL_PLATFORMS = [
new Platform('linux', 'x64', 'linux-x86_64'),
new Platform('linux', 'arm64', 'linux-aarch64'),
new Platform('darwin', 'x64', 'darwin-x86_64'),
new Platform('win32', 'x64', 'windows-x86_64'),
];

View file

@ -0,0 +1,248 @@
/*
* Licensed to Elasticsearch B.V. under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch B.V. licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
import {
ToolingLog,
ToolingLogCollectingWriter,
createStripAnsiSerializer,
createRecursiveSerializer,
} from '@kbn/dev-utils';
import { Config } from './config';
import { createRunner } from './runner';
import { Build } from './build';
import { isErrorLogged, markErrorLogged } from './errors';
jest.mock('./version_info');
const testWriter = new ToolingLogCollectingWriter();
const log = new ToolingLog();
log.setWriters([testWriter]);
expect.addSnapshotSerializer(createStripAnsiSerializer());
const STACK_TRACE = /(\│\s+)at .+ \(.+\)$/;
const isStackTrace = (x: any) => typeof x === 'string' && STACK_TRACE.test(x);
expect.addSnapshotSerializer(
createRecursiveSerializer(
(v) => Array.isArray(v) && v.some(isStackTrace),
(v) => {
const start = v.findIndex(isStackTrace);
v[start] = v[start].replace(STACK_TRACE, '$1<stacktrace>');
while (isStackTrace(v[start + 1])) v.splice(start + 1, 1);
return v;
}
)
);
beforeEach(() => {
testWriter.messages.length = 0;
jest.clearAllMocks();
});
const setup = async (opts: { buildDefaultDist: boolean; buildOssDist: boolean }) => {
const config = await Config.create({
isRelease: true,
targetAllPlatforms: true,
versionQualifier: '-SNAPSHOT',
});
const run = createRunner({
config,
log,
...opts,
});
return { config, run };
};
describe('buildOssDist = true, buildDefaultDist = true', () => {
it('runs global task once, passing config and log', async () => {
const { config, run } = await setup({
buildDefaultDist: true,
buildOssDist: true,
});
const mock = jest.fn();
await run({
global: true,
description: 'foo',
run: mock,
});
expect(mock).toHaveBeenCalledTimes(1);
expect(mock).toHaveBeenLastCalledWith(config, log, [expect.any(Build), expect.any(Build)]);
});
it('calls local tasks twice, passing each build', async () => {
const { config, run } = await setup({
buildDefaultDist: true,
buildOssDist: true,
});
const mock = jest.fn();
await run({
description: 'foo',
run: mock,
});
expect(mock).toHaveBeenCalledTimes(2);
expect(mock).toHaveBeenCalledWith(config, log, expect.any(Build));
});
});
describe('just default dist', () => {
it('runs global task once, passing config and log', async () => {
const { config, run } = await setup({
buildDefaultDist: true,
buildOssDist: false,
});
const mock = jest.fn();
await run({
global: true,
description: 'foo',
run: mock,
});
expect(mock).toHaveBeenCalledTimes(1);
expect(mock).toHaveBeenLastCalledWith(config, log, [expect.any(Build)]);
});
it('calls local tasks once, passing the default build', async () => {
const { config, run } = await setup({
buildDefaultDist: true,
buildOssDist: false,
});
const mock = jest.fn();
await run({
description: 'foo',
run: mock,
});
expect(mock).toHaveBeenCalledTimes(1);
expect(mock).toHaveBeenCalledWith(config, log, expect.any(Build));
const [args] = mock.mock.calls;
const [, , build] = args;
if (build.isOss()) {
throw new Error('expected build to be the default dist, not the oss dist');
}
});
});
describe('just oss dist', () => {
it('runs global task once, passing config and log', async () => {
const { config, run } = await setup({
buildDefaultDist: false,
buildOssDist: true,
});
const mock = jest.fn();
await run({
global: true,
description: 'foo',
run: mock,
});
expect(mock).toHaveBeenCalledTimes(1);
expect(mock).toHaveBeenLastCalledWith(config, log, [expect.any(Build)]);
});
it('calls local tasks once, passing the oss build', async () => {
const { config, run } = await setup({
buildDefaultDist: false,
buildOssDist: true,
});
const mock = jest.fn();
await run({
description: 'foo',
run: mock,
});
expect(mock).toHaveBeenCalledTimes(1);
expect(mock).toHaveBeenCalledWith(config, log, expect.any(Build));
const [args] = mock.mock.calls;
const [, , build] = args;
if (!build.isOss()) {
throw new Error('expected build to be the oss dist, not the default dist');
}
});
});
describe('task rejection', () => {
it('rejects, logs error, and marks error logged', async () => {
const { run } = await setup({
buildDefaultDist: true,
buildOssDist: false,
});
const error = new Error('FOO');
expect(isErrorLogged(error)).toBe(false);
const promise = run({
description: 'foo',
async run() {
throw error;
},
});
await expect(promise).rejects.toThrowErrorMatchingInlineSnapshot(`"FOO"`);
expect(testWriter.messages).toMatchInlineSnapshot(`
Array [
" info [ kibana ] foo",
" │ERROR failure 0 sec",
" │ERROR Error: FOO",
" │ <stacktrace>",
"",
]
`);
expect(isErrorLogged(error)).toBe(true);
});
it('just rethrows errors that have already been logged', async () => {
const { run } = await setup({
buildDefaultDist: true,
buildOssDist: false,
});
const error = markErrorLogged(new Error('FOO'));
const promise = run({
description: 'foo',
async run() {
throw error;
},
});
await expect(promise).rejects.toThrowErrorMatchingInlineSnapshot(`"FOO"`);
expect(testWriter.messages).toMatchInlineSnapshot(`
Array [
" info [ kibana ] foo",
"",
]
`);
});
});

View file

@ -18,13 +18,33 @@
*/
import chalk from 'chalk';
import { ToolingLog } from '@kbn/dev-utils';
import { isErrorLogged, markErrorLogged } from './errors';
import { Build } from './build';
import { Config } from './config';
import { createBuild } from './build';
interface Options {
config: Config;
log: ToolingLog;
buildOssDist: boolean;
buildDefaultDist: boolean;
}
export function createRunner({ config, log, buildOssDist, buildDefaultDist }) {
async function execTask(desc, task, ...args) {
export interface GlobalTask {
global: true;
description: string;
run(config: Config, log: ToolingLog, builds: Build[]): Promise<void>;
}
export interface Task {
global?: false;
description: string;
run(config: Config, log: ToolingLog, build: Build): Promise<void>;
}
export function createRunner({ config, log, buildOssDist, buildDefaultDist }: Options) {
async function execTask(desc: string, task: Task | GlobalTask, lastArg: any) {
log.info(desc);
log.indent(4);
@ -37,11 +57,11 @@ export function createRunner({ config, log, buildOssDist, buildDefaultDist }) {
};
try {
await task.run(config, log, ...args);
await task.run(config, log, lastArg);
log.success(chalk.green('✓'), time());
} catch (error) {
if (!isErrorLogged(error)) {
log.error('failure', time());
log.error(`failure ${time()}`);
log.error(error);
markErrorLogged(error);
}
@ -53,22 +73,12 @@ export function createRunner({ config, log, buildOssDist, buildDefaultDist }) {
}
}
const builds = [];
const builds: Build[] = [];
if (buildDefaultDist) {
builds.push(
createBuild({
config,
oss: false,
})
);
builds.push(new Build(config, false));
}
if (buildOssDist) {
builds.push(
createBuild({
config,
oss: true,
})
);
builds.push(new Build(config, true));
}
/**
@ -76,11 +86,8 @@ export function createRunner({ config, log, buildOssDist, buildDefaultDist }) {
* `config`: an object with methods for determining top-level config values, see `./config.js`
* `log`: an instance of the `ToolingLog`, see `../../tooling_log/tooling_log.js`
* `builds?`: If task does is not defined as `global: true` then it is called for each build and passed each one here.
*
* @param {Task} task
* @return {Promise<undefined>}
*/
return async function run(task) {
return async function run(task: Task | GlobalTask) {
if (task.global) {
await execTask(chalk`{dim [ global ]} ${task.description}`, task, builds);
} else {

View file

@ -0,0 +1,62 @@
/*
* Licensed to Elasticsearch B.V. under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch B.V. licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
import pkg from '../../../../package.json';
import { getVersionInfo } from './version_info';
describe('isRelease = true', () => {
it('returns unchanged package.version, build sha, and build number', async () => {
const versionInfo = await getVersionInfo({
isRelease: true,
pkg,
});
expect(versionInfo).toHaveProperty('buildVersion', pkg.version);
expect(versionInfo).toHaveProperty('buildSha', expect.stringMatching(/^[0-9a-f]{40}$/));
expect(versionInfo).toHaveProperty('buildNumber');
expect(versionInfo.buildNumber).toBeGreaterThan(1000);
});
});
describe('isRelease = false', () => {
it('returns snapshot version, build sha, and build number', async () => {
const versionInfo = await getVersionInfo({
isRelease: false,
pkg,
});
expect(versionInfo).toHaveProperty('buildVersion', expect.stringContaining(pkg.version));
expect(versionInfo).toHaveProperty('buildVersion', expect.stringMatching(/-SNAPSHOT$/));
expect(versionInfo).toHaveProperty('buildSha', expect.stringMatching(/^[0-9a-f]{40}$/));
expect(versionInfo).toHaveProperty('buildNumber');
expect(versionInfo.buildNumber).toBeGreaterThan(1000);
});
});
describe('versionQualifier', () => {
it('appends a version qualifier', async () => {
const versionInfo = await getVersionInfo({
isRelease: true,
versionQualifier: 'beta55',
pkg,
});
expect(versionInfo).toHaveProperty('buildVersion', pkg.version + '-beta55');
});
});

View file

@ -34,7 +34,19 @@ async function getBuildNumber() {
return parseFloat(wc.stdout.trim());
}
export async function getVersionInfo({ isRelease, versionQualifier, pkg }) {
interface Options {
isRelease: boolean;
versionQualifier?: string;
pkg: {
version: string;
};
}
type ResolvedType<T extends Promise<any>> = T extends Promise<infer X> ? X : never;
export type VersionInfo = ResolvedType<ReturnType<typeof getVersionInfo>>;
export async function getVersionInfo({ isRelease, versionQualifier, pkg }: Options) {
const buildVersion = pkg.version.concat(
versionQualifier ? `-${versionQualifier}` : '',
isRelease ? '' : '-SNAPSHOT'

View file

@ -18,8 +18,13 @@
*/
import { Transform } from 'stream';
import { ExecaChildProcess } from 'execa';
import { createPromiseFromStreams, createSplitStream, createMapStream } from './streams';
import {
createPromiseFromStreams,
createSplitStream,
createMapStream,
} from '../../../legacy/utils/streams';
// creates a stream that skips empty lines unless they are followed by
// another line, preventing the empty lines produced by splitStream
@ -27,7 +32,7 @@ function skipLastEmptyLineStream() {
let skippedEmptyLine = false;
return new Transform({
objectMode: true,
transform(line, enc, cb) {
transform(line, _, cb) {
if (skippedEmptyLine) {
this.push('');
skippedEmptyLine = false;
@ -37,14 +42,18 @@ function skipLastEmptyLineStream() {
skippedEmptyLine = true;
return cb();
} else {
return cb(null, line);
return cb(undefined, line);
}
},
});
}
export async function watchStdioForLine(proc, logFn, exitAfter) {
function onLogLine(line) {
export async function watchStdioForLine(
proc: ExecaChildProcess,
logFn: (line: string) => void,
exitAfter?: RegExp
) {
function onLogLine(line: string) {
logFn(line);
if (exitAfter && exitAfter.test(line)) {

View file

@ -17,9 +17,9 @@
* under the License.
*/
import { copyAll } from '../../lib';
import { copyAll, Task } from '../../lib';
export const CopyBinScriptsTask = {
export const CopyBinScripts: Task = {
description: 'Copying bin scripts into platform-generic build directory',
async run(config, log, build) {

View file

@ -17,4 +17,4 @@
* under the License.
*/
export { CopyBinScriptsTask } from './copy_bin_scripts_task';
export * from './copy_bin_scripts_task';

View file

@ -25,9 +25,11 @@ import {
reportOptimizerStats,
} from '@kbn/optimizer';
export const BuildKibanaPlatformPluginsTask = {
import { Task } from '../lib';
export const BuildKibanaPlatformPlugins: Task = {
description: 'Building distributable versions of Kibana platform plugins',
async run(_, log, build) {
async run(config, log, build) {
const optimizerConfig = OptimizerConfig.create({
repoRoot: build.resolvePath(),
cache: false,

View file

@ -18,7 +18,8 @@
*/
import { buildProductionProjects } from '@kbn/pm';
import { mkdirp } from '../lib';
import { mkdirp, Task } from '../lib';
/**
* High-level overview of how we enable shared packages in production:
@ -66,8 +67,7 @@ import { mkdirp } from '../lib';
* in some way by Kibana itself in production, as it won't otherwise be
* included in the production build.
*/
export const BuildPackagesTask = {
export const BuildPackages: Task = {
description: 'Building distributable versions of packages',
async run(config, log, build) {
await mkdirp(config.resolveFromRepo('target'));

View file

@ -19,9 +19,9 @@
import minimatch from 'minimatch';
import { deleteAll, deleteEmptyFolders, scanDelete } from '../lib';
import { deleteAll, deleteEmptyFolders, scanDelete, Task, GlobalTask } from '../lib';
export const CleanTask = {
export const Clean: GlobalTask = {
global: true,
description: 'Cleaning artifacts from previous builds',
@ -37,7 +37,7 @@ export const CleanTask = {
},
};
export const CleanPackagesTask = {
export const CleanPackages: Task = {
description: 'Cleaning source for packages that are now installed in node_modules',
async run(config, log, build) {
@ -45,7 +45,7 @@ export const CleanPackagesTask = {
},
};
export const CleanTypescriptTask = {
export const CleanTypescript: Task = {
description: 'Cleaning typescript source files that have been transpiled to JS',
async run(config, log, build) {
@ -59,11 +59,11 @@ export const CleanTypescriptTask = {
},
};
export const CleanExtraFilesFromModulesTask = {
export const CleanExtraFilesFromModules: Task = {
description: 'Cleaning tests, examples, docs, etc. from node_modules',
async run(config, log, build) {
const makeRegexps = (patterns) =>
const makeRegexps = (patterns: string[]) =>
patterns.map((pattern) => minimatch.makeRe(pattern, { nocase: true }));
const regularExpressions = makeRegexps([
@ -181,7 +181,7 @@ export const CleanExtraFilesFromModulesTask = {
},
};
export const CleanExtraBinScriptsTask = {
export const CleanExtraBinScripts: Task = {
description: 'Cleaning extra bin/* scripts from platform-specific builds',
async run(config, log, build) {
@ -201,7 +201,7 @@ export const CleanExtraBinScriptsTask = {
},
};
export const CleanEmptyFoldersTask = {
export const CleanEmptyFolders: Task = {
description: 'Cleaning all empty folders recursively',
async run(config, log, build) {

View file

@ -17,9 +17,9 @@
* under the License.
*/
import { copyAll } from '../lib';
import { copyAll, Task } from '../lib';
export const CopySourceTask = {
export const CopySource: Task = {
description: 'Copying source into platform-generic build directory',
async run(config, log, build) {

View file

@ -17,10 +17,10 @@
* under the License.
*/
import { scanCopy } from '../lib';
import { scanCopy, Task } from '../lib';
import { getNodeDownloadInfo } from './nodejs';
export const CreateArchivesSourcesTask = {
export const CreateArchivesSources: Task = {
description: 'Creating platform-specific archive source directories',
async run(config, log, build) {
await Promise.all(

View file

@ -23,11 +23,11 @@ import { promisify } from 'util';
import { CiStatsReporter } from '@kbn/dev-utils';
import { mkdirp, compress } from '../lib';
import { mkdirp, compressTar, compressZip, Task } from '../lib';
const asyncStat = promisify(Fs.stat);
export const CreateArchivesTask = {
export const CreateArchives: Task = {
description: 'Creating the archives for each platform',
async run(config, log, build) {
@ -49,19 +49,16 @@ export const CreateArchivesTask = {
path: destination,
});
await compress(
'zip',
{
archiverOptions: {
zlib: {
level: 9,
},
},
createRootDirectory: true,
},
await compressZip({
source,
destination
);
destination,
archiverOptions: {
zlib: {
level: 9,
},
},
createRootDirectory: true,
});
break;
case '.gz':
@ -70,20 +67,17 @@ export const CreateArchivesTask = {
path: destination,
});
await compress(
'tar',
{
archiverOptions: {
gzip: true,
gzipOptions: {
level: 9,
},
},
createRootDirectory: true,
},
await compressTar({
source,
destination
);
destination,
archiverOptions: {
gzip: true,
gzipOptions: {
level: 9,
},
},
createRootDirectory: true,
});
break;
default:

View file

@ -17,9 +17,9 @@
* under the License.
*/
import { mkdirp, write } from '../lib';
import { mkdirp, write, Task } from '../lib';
export const CreateEmptyDirsAndFilesTask = {
export const CreateEmptyDirsAndFiles: Task = {
description: 'Creating some empty directories and files to prevent file-permission issues',
async run(config, log, build) {

View file

@ -19,9 +19,9 @@
import { copyWorkspacePackages } from '@kbn/pm';
import { read, write } from '../lib';
import { read, write, Task } from '../lib';
export const CreatePackageJsonTask = {
export const CreatePackageJson: Task = {
description: 'Creating build-ready version of package.json',
async run(config, log, build) {
@ -38,7 +38,7 @@ export const CreatePackageJsonTask = {
number: config.getBuildNumber(),
sha: config.getBuildSha(),
distributable: true,
release: config.isRelease(),
release: config.isRelease,
},
repository: pkg.repository,
engines: {
@ -59,7 +59,7 @@ export const CreatePackageJsonTask = {
},
};
export const RemovePackageJsonDepsTask = {
export const RemovePackageJsonDeps: Task = {
description: 'Removing dependencies from package.json',
async run(config, log, build) {
@ -74,7 +74,7 @@ export const RemovePackageJsonDepsTask = {
},
};
export const RemoveWorkspacesTask = {
export const RemoveWorkspaces: Task = {
description: 'Remove workspace artifacts',
async run(config, log, build) {

View file

@ -17,9 +17,9 @@
* under the License.
*/
import { write, read } from '../lib';
import { write, read, Task } from '../lib';
export const CreateReadmeTask = {
export const CreateReadme: Task = {
description: 'Creating README.md file',
async run(config, log, build) {

View file

@ -27,7 +27,6 @@ export * from './create_archives_task';
export * from './create_empty_dirs_and_files_task';
export * from './create_package_json_task';
export * from './create_readme_task';
export * from './install_chromium';
export * from './install_dependencies_task';
export * from './license_file_task';
export * from './nodejs';
@ -41,3 +40,6 @@ export * from './transpile_scss_task';
export * from './uuid_verification_task';
export * from './verify_env_task';
export * from './write_sha_sums_task';
// @ts-expect-error this module can't be TS because it ends up pulling x-pack into Kibana
export { InstallChromium } from './install_chromium';

View file

@ -17,11 +17,12 @@
* under the License.
*/
// eslint-disable-next-line @kbn/eslint/no-restricted-paths
import { installBrowser } from '../../../../x-pack/plugins/reporting/server/browsers/install';
import { first } from 'rxjs/operators';
export const InstallChromiumTask = {
// eslint-disable-next-line @kbn/eslint/no-restricted-paths
import { installBrowser } from '../../../../x-pack/plugins/reporting/server/browsers/install';
export const InstallChromium = {
description: 'Installing Chromium',
async run(config, log, build) {
@ -32,6 +33,7 @@ export const InstallChromiumTask = {
log.info(`Installing Chromium for ${platform.getName()}-${platform.getArchitecture()}`);
const { binaryPath$ } = installBrowser(
// TODO: https://github.com/elastic/kibana/issues/72496
log,
build.resolvePathForPlatform(platform, 'x-pack/plugins/reporting/chromium'),
platform.getName(),

View file

@ -19,7 +19,9 @@
import { Project } from '@kbn/pm';
export const InstallDependenciesTask = {
import { Task } from '../lib';
export const InstallDependencies: Task = {
description: 'Installing node_modules, including production builds of packages',
async run(config, log, build) {

View file

@ -17,9 +17,9 @@
* under the License.
*/
import { write, read } from '../lib';
import { write, read, Task } from '../lib';
export const UpdateLicenseFileTask = {
export const UpdateLicenseFile: Task = {
description: 'Updating LICENSE.txt file',
async run(config, log, build) {

View file

@ -1,97 +0,0 @@
/*
* Licensed to Elasticsearch B.V. under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch B.V. licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
import sinon from 'sinon';
import expect from '@kbn/expect';
import * as NodeShasumsNS from '../node_shasums';
import * as NodeDownloadInfoNS from '../node_download_info';
import * as DownloadNS from '../../../lib/download'; // sinon can't stub '../../../lib' properly
import { DownloadNodeBuildsTask } from '../download_node_builds_task';
describe('src/dev/build/tasks/nodejs/download_node_builds_task', () => {
const sandbox = sinon.createSandbox();
afterEach(() => {
sandbox.restore();
});
function setup({ failOnUrl } = {}) {
const platforms = [{ getName: () => 'foo' }, { getName: () => 'bar' }];
const log = {};
const config = {
getNodePlatforms: () => platforms,
getNodeVersion: () => 'nodeVersion',
};
sandbox.stub(NodeDownloadInfoNS, 'getNodeDownloadInfo').callsFake((config, platform) => {
return {
url: `${platform.getName()}:url`,
downloadPath: `${platform.getName()}:downloadPath`,
downloadName: `${platform.getName()}:downloadName`,
};
});
sandbox.stub(NodeShasumsNS, 'getNodeShasums').returns({
'foo:downloadName': 'foo:sha256',
'bar:downloadName': 'bar:sha256',
});
sandbox.stub(DownloadNS, 'download').callsFake(({ url }) => {
if (url === failOnUrl) {
throw new Error('Download failed for reasons');
}
});
return { log, config };
}
it('downloads node builds for each platform', async () => {
const { log, config } = setup();
await DownloadNodeBuildsTask.run(config, log);
sinon.assert.calledTwice(DownloadNS.download);
sinon.assert.calledWithExactly(DownloadNS.download, {
log,
url: 'foo:url',
sha256: 'foo:sha256',
destination: 'foo:downloadPath',
retries: 3,
});
sinon.assert.calledWithExactly(DownloadNS.download, {
log,
url: 'bar:url',
sha256: 'bar:sha256',
destination: 'bar:downloadPath',
retries: 3,
});
});
it('rejects if any download fails', async () => {
const { config, log } = setup({ failOnUrl: 'foo:url' });
try {
await DownloadNodeBuildsTask.run(config, log);
throw new Error('Expected DownloadNodeBuildsTask to reject');
} catch (error) {
expect(error).to.have.property('message').be('Download failed for reasons');
}
});
});

View file

@ -1,93 +0,0 @@
/*
* Licensed to Elasticsearch B.V. under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch B.V. licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
import sinon from 'sinon';
import { resolve } from 'path';
import * as NodeDownloadInfoNS from '../node_download_info';
import * as FsNS from '../../../lib/fs';
import { ExtractNodeBuildsTask } from '../extract_node_builds_task';
describe('src/dev/build/tasks/node_extract_node_builds_task', () => {
const sandbox = sinon.createSandbox();
afterEach(() => {
sandbox.restore();
});
it('copies downloadPath to extractDir/node.exe for windows platform', async () => {
sandbox.stub(NodeDownloadInfoNS, 'getNodeDownloadInfo').returns({
downloadPath: 'downloadPath',
extractDir: 'extractDir',
});
sandbox.stub(ExtractNodeBuildsTask, 'copyWindows');
sandbox.stub(FsNS, 'untar');
const platform = {
isWindows: () => true,
};
const config = {
getNodePlatforms: () => [platform],
};
await ExtractNodeBuildsTask.run(config);
sinon.assert.calledOnce(NodeDownloadInfoNS.getNodeDownloadInfo);
sinon.assert.calledWithExactly(NodeDownloadInfoNS.getNodeDownloadInfo, config, platform);
sinon.assert.calledOnce(ExtractNodeBuildsTask.copyWindows);
sinon.assert.calledWithExactly(
ExtractNodeBuildsTask.copyWindows,
'downloadPath',
resolve('extractDir/node.exe')
);
sinon.assert.notCalled(FsNS.untar);
});
it('untars downloadPath to extractDir, stripping the top level of the archive, for non-windows platforms', async () => {
sandbox.stub(NodeDownloadInfoNS, 'getNodeDownloadInfo').returns({
downloadPath: 'downloadPath',
extractDir: 'extractDir',
});
sandbox.stub(ExtractNodeBuildsTask, 'copyWindows');
sandbox.stub(FsNS, 'untar');
const platform = {
isWindows: () => false,
};
const config = {
getNodePlatforms: () => [platform],
};
await ExtractNodeBuildsTask.run(config);
sinon.assert.calledOnce(NodeDownloadInfoNS.getNodeDownloadInfo);
sinon.assert.calledWithExactly(NodeDownloadInfoNS.getNodeDownloadInfo, config, platform);
sinon.assert.notCalled(ExtractNodeBuildsTask.copyWindows);
sinon.assert.calledOnce(FsNS.untar);
sinon.assert.calledWithExactly(FsNS.untar, 'downloadPath', 'extractDir', {
strip: 1,
});
});
});

View file

@ -1,106 +0,0 @@
/*
* Licensed to Elasticsearch B.V. under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch B.V. licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
import sinon from 'sinon';
import expect from '@kbn/expect';
import * as NodeShasumsNS from '../node_shasums';
import * as NodeDownloadInfoNS from '../node_download_info';
import * as FsNS from '../../../lib/fs';
import { VerifyExistingNodeBuildsTask } from '../verify_existing_node_builds_task';
describe('src/dev/build/tasks/nodejs/verify_existing_node_builds_task', () => {
const sandbox = sinon.createSandbox();
afterEach(() => {
sandbox.restore();
});
function setup({ nodeShasums } = {}) {
const platforms = [
{ getName: () => 'foo', getNodeArch: () => 'foo:nodeArch' },
{ getName: () => 'bar', getNodeArch: () => 'bar:nodeArch' },
];
const log = { success: sinon.stub() };
const config = {
getNodePlatforms: () => platforms,
getNodeVersion: () => 'nodeVersion',
};
sandbox.stub(NodeDownloadInfoNS, 'getNodeDownloadInfo').callsFake((config, platform) => {
return {
url: `${platform.getName()}:url`,
downloadPath: `${platform.getName()}:downloadPath`,
downloadName: `${platform.getName()}:downloadName`,
};
});
sandbox.stub(NodeShasumsNS, 'getNodeShasums').returns(
nodeShasums || {
'foo:downloadName': 'foo:sha256',
'bar:downloadName': 'bar:sha256',
}
);
sandbox.stub(FsNS, 'getFileHash').callsFake((path) => {
switch (path) {
case 'foo:downloadPath':
return 'foo:sha256';
case 'bar:downloadPath':
return 'bar:sha256';
}
});
return { log, config, platforms };
}
it('downloads node builds for each platform', async () => {
const { log, config, platforms } = setup();
await VerifyExistingNodeBuildsTask.run(config, log);
sinon.assert.calledOnce(NodeShasumsNS.getNodeShasums);
sinon.assert.calledTwice(NodeDownloadInfoNS.getNodeDownloadInfo);
sinon.assert.calledWithExactly(NodeDownloadInfoNS.getNodeDownloadInfo, config, platforms[0]);
sinon.assert.calledWithExactly(NodeDownloadInfoNS.getNodeDownloadInfo, config, platforms[1]);
sinon.assert.calledTwice(FsNS.getFileHash);
sinon.assert.calledWithExactly(FsNS.getFileHash, 'foo:downloadPath', 'sha256');
sinon.assert.calledWithExactly(FsNS.getFileHash, 'bar:downloadPath', 'sha256');
});
it('rejects if any download has an incorrect sha256', async () => {
const { config, log } = setup({
nodeShasums: {
'foo:downloadName': 'foo:sha256',
'bar:downloadName': 'bar:invalid',
},
});
try {
await VerifyExistingNodeBuildsTask.run(config, log);
throw new Error('Expected VerifyExistingNodeBuildsTask to reject');
} catch (error) {
expect(error)
.to.have.property('message')
.be('Download at bar:downloadPath does not match expected checksum bar:sha256');
}
});
});

View file

@ -17,9 +17,9 @@
* under the License.
*/
import { deleteAll } from '../../lib';
import { deleteAll, Task } from '../../lib';
export const CleanNodeBuildsTask = {
export const CleanNodeBuilds: Task = {
description: 'Cleaning npm from node',
async run(config, log, build) {

View file

@ -0,0 +1,136 @@
/*
* Licensed to Elasticsearch B.V. under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch B.V. licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
import {
ToolingLog,
ToolingLogCollectingWriter,
createAnyInstanceSerializer,
} from '@kbn/dev-utils';
import { Config, Platform } from '../../lib';
import { DownloadNodeBuilds } from './download_node_builds_task';
// import * as NodeShasumsNS from '../node_shasums';
// import * as NodeDownloadInfoNS from '../node_download_info';
// import * as DownloadNS from '../../../lib/download';
// import { DownloadNodeBuilds } from '../download_node_builds_task';
jest.mock('./node_shasums');
jest.mock('./node_download_info');
jest.mock('../../lib/download');
expect.addSnapshotSerializer(createAnyInstanceSerializer(ToolingLog));
const { getNodeDownloadInfo } = jest.requireMock('./node_download_info');
const { getNodeShasums } = jest.requireMock('./node_shasums');
const { download } = jest.requireMock('../../lib/download');
const log = new ToolingLog();
const testWriter = new ToolingLogCollectingWriter();
log.setWriters([testWriter]);
beforeEach(() => {
testWriter.messages.length = 0;
jest.clearAllMocks();
});
async function setup({ failOnUrl }: { failOnUrl?: string } = {}) {
const config = await Config.create({
isRelease: true,
targetAllPlatforms: true,
});
getNodeDownloadInfo.mockImplementation((_: Config, platform: Platform) => {
return {
url: `${platform.getName()}:url`,
downloadPath: `${platform.getName()}:downloadPath`,
downloadName: `${platform.getName()}:downloadName`,
};
});
getNodeShasums.mockReturnValue({
'linux:downloadName': 'linux:sha256',
'darwin:downloadName': 'darwin:sha256',
'win32:downloadName': 'win32:sha256',
});
download.mockImplementation(({ url }: any) => {
if (url === failOnUrl) {
throw new Error('Download failed for reasons');
}
});
return { config };
}
it('downloads node builds for each platform', async () => {
const { config } = await setup();
await DownloadNodeBuilds.run(config, log, []);
expect(download.mock.calls).toMatchInlineSnapshot(`
Array [
Array [
Object {
"destination": "linux:downloadPath",
"log": <ToolingLog>,
"retries": 3,
"sha256": "linux:sha256",
"url": "linux:url",
},
],
Array [
Object {
"destination": "linux:downloadPath",
"log": <ToolingLog>,
"retries": 3,
"sha256": "linux:sha256",
"url": "linux:url",
},
],
Array [
Object {
"destination": "darwin:downloadPath",
"log": <ToolingLog>,
"retries": 3,
"sha256": "darwin:sha256",
"url": "darwin:url",
},
],
Array [
Object {
"destination": "win32:downloadPath",
"log": <ToolingLog>,
"retries": 3,
"sha256": "win32:sha256",
"url": "win32:url",
},
],
]
`);
expect(testWriter.messages).toMatchInlineSnapshot(`Array []`);
});
it('rejects if any download fails', async () => {
const { config } = await setup({ failOnUrl: 'linux:url' });
await expect(DownloadNodeBuilds.run(config, log, [])).rejects.toMatchInlineSnapshot(
`[Error: Download failed for reasons]`
);
expect(testWriter.messages).toMatchInlineSnapshot(`Array []`);
});

View file

@ -17,11 +17,11 @@
* under the License.
*/
import { download } from '../../lib';
import { download, GlobalTask } from '../../lib';
import { getNodeShasums } from './node_shasums';
import { getNodeDownloadInfo } from './node_download_info';
export const DownloadNodeBuildsTask = {
export const DownloadNodeBuilds: GlobalTask = {
global: true,
description: 'Downloading node.js builds for all platforms',
async run(config, log) {

View file

@ -0,0 +1,108 @@
/*
* Licensed to Elasticsearch B.V. under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch B.V. licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
import {
ToolingLog,
ToolingLogCollectingWriter,
createAbsolutePathSerializer,
} from '@kbn/dev-utils';
import { Config } from '../../lib';
import { ExtractNodeBuilds } from './extract_node_builds_task';
jest.mock('../../lib/fs');
const Fs = jest.requireMock('../../lib/fs');
const log = new ToolingLog();
const testWriter = new ToolingLogCollectingWriter();
log.setWriters([testWriter]);
expect.addSnapshotSerializer(createAbsolutePathSerializer());
async function setup() {
const config = await Config.create({
isRelease: true,
targetAllPlatforms: true,
});
return { config };
}
beforeEach(() => {
testWriter.messages.length = 0;
jest.clearAllMocks();
});
it('runs expected fs operations', async () => {
const { config } = await setup();
await ExtractNodeBuilds.run(config, log, []);
const usedMethods = Object.fromEntries(
Object.entries(Fs)
.filter((entry): entry is [string, jest.Mock] => {
const [, mock] = entry;
if (typeof mock !== 'function') {
return false;
}
return (mock as jest.Mock).mock.calls.length > 0;
})
.map(([name, mock]) => [name, mock.mock.calls])
);
expect(usedMethods).toMatchInlineSnapshot(`
Object {
"copy": Array [
Array [
<absolute path>/.node_binaries/10.21.0/node.exe,
<absolute path>/.node_binaries/10.21.0/win32-x64/node.exe,
Object {
"clone": true,
},
],
],
"untar": Array [
Array [
<absolute path>/.node_binaries/10.21.0/node-v10.21.0-linux-x64.tar.gz,
<absolute path>/.node_binaries/10.21.0/linux-x64,
Object {
"strip": 1,
},
],
Array [
<absolute path>/.node_binaries/10.21.0/node-v10.21.0-linux-arm64.tar.gz,
<absolute path>/.node_binaries/10.21.0/linux-arm64,
Object {
"strip": 1,
},
],
Array [
<absolute path>/.node_binaries/10.21.0/node-v10.21.0-darwin-x64.tar.gz,
<absolute path>/.node_binaries/10.21.0/darwin-x64,
Object {
"strip": 1,
},
],
],
}
`);
});

View file

@ -17,39 +17,27 @@
* under the License.
*/
import { dirname, resolve } from 'path';
import fs from 'fs';
import { promisify } from 'util';
import Path from 'path';
import { untar, mkdirp } from '../../lib';
import { untar, GlobalTask, copy } from '../../lib';
import { getNodeDownloadInfo } from './node_download_info';
const statAsync = promisify(fs.stat);
const copyFileAsync = promisify(fs.copyFile);
export const ExtractNodeBuildsTask = {
export const ExtractNodeBuilds: GlobalTask = {
global: true,
description: 'Extracting node.js builds for all platforms',
async run(config) {
await Promise.all(
config.getNodePlatforms().map(async (platform) => {
const { downloadPath, extractDir } = getNodeDownloadInfo(config, platform);
// windows executable is not extractable, it's just an .exe file
if (platform.isWindows()) {
const destination = resolve(extractDir, 'node.exe');
return this.copyWindows(downloadPath, destination);
// windows executable is not extractable, it's just an .exe file
await copy(downloadPath, Path.resolve(extractDir, 'node.exe'), {
clone: true,
});
} else {
await untar(downloadPath, extractDir, { strip: 1 });
}
// all other downloads are tarballs
return untar(downloadPath, extractDir, { strip: 1 });
})
);
},
async copyWindows(source, destination) {
// ensure source exists before creating destination directory
await statAsync(source);
await mkdirp(dirname(destination));
// for performance reasons, do a copy-on-write by using the fs.constants.COPYFILE_FICLONE flag
return await copyFileAsync(source, destination, fs.constants.COPYFILE_FICLONE);
},
};

View file

@ -1,25 +0,0 @@
/*
* Licensed to Elasticsearch B.V. under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch B.V. licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
export { getNodeDownloadInfo } from './node_download_info';
export { DownloadNodeBuildsTask } from './download_node_builds_task';
export { ExtractNodeBuildsTask } from './extract_node_builds_task';
export { VerifyExistingNodeBuildsTask } from './verify_existing_node_builds_task';
export { CleanNodeBuildsTask } from './clean_node_builds_task';

View file

@ -0,0 +1,24 @@
/*
* Licensed to Elasticsearch B.V. under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch B.V. licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
export * from './node_download_info';
export * from './download_node_builds_task';
export * from './extract_node_builds_task';
export * from './verify_existing_node_builds_task';
export * from './clean_node_builds_task';

View file

@ -19,7 +19,9 @@
import { basename } from 'path';
export function getNodeDownloadInfo(config, platform) {
import { Config, Platform } from '../../lib';
export function getNodeDownloadInfo(config: Config, platform: Platform) {
const version = config.getNodeVersion();
const arch = platform.getNodeArch();

View file

@ -0,0 +1,225 @@
/*
* Licensed to Elasticsearch B.V. under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch B.V. licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
import {
ToolingLog,
ToolingLogCollectingWriter,
createAnyInstanceSerializer,
} from '@kbn/dev-utils';
import { Config, Platform } from '../../lib';
import { VerifyExistingNodeBuilds } from './verify_existing_node_builds_task';
jest.mock('./node_shasums');
jest.mock('./node_download_info');
jest.mock('../../lib/fs');
const { getNodeShasums } = jest.requireMock('./node_shasums');
const { getNodeDownloadInfo } = jest.requireMock('./node_download_info');
const { getFileHash } = jest.requireMock('../../lib/fs');
const log = new ToolingLog();
const testWriter = new ToolingLogCollectingWriter();
log.setWriters([testWriter]);
expect.addSnapshotSerializer(createAnyInstanceSerializer(Config));
async function setup(actualShaSums?: Record<string, string>) {
const config = await Config.create({
isRelease: true,
targetAllPlatforms: true,
});
getNodeShasums.mockReturnValue(
Object.fromEntries(
config.getTargetPlatforms().map((platform) => {
return [`${platform.getName()}:${platform.getNodeArch()}:downloadName`, 'valid shasum'];
})
)
);
getNodeDownloadInfo.mockImplementation((_: Config, platform: Platform) => {
return {
downloadPath: `${platform.getName()}:${platform.getNodeArch()}:downloadPath`,
downloadName: `${platform.getName()}:${platform.getNodeArch()}:downloadName`,
};
});
getFileHash.mockImplementation((downloadPath: string) => {
if (actualShaSums?.[downloadPath]) {
return actualShaSums[downloadPath];
}
return 'valid shasum';
});
return { config };
}
beforeEach(() => {
testWriter.messages.length = 0;
jest.clearAllMocks();
});
it('checks shasums for each downloaded node build', async () => {
const { config } = await setup();
await VerifyExistingNodeBuilds.run(config, log, []);
expect(getNodeShasums).toMatchInlineSnapshot(`
[MockFunction] {
"calls": Array [
Array [
"10.21.0",
],
],
"results": Array [
Object {
"type": "return",
"value": Object {
"darwin:darwin-x64:downloadName": "valid shasum",
"linux:linux-arm64:downloadName": "valid shasum",
"linux:linux-x64:downloadName": "valid shasum",
"win32:win32-x64:downloadName": "valid shasum",
},
},
],
}
`);
expect(getNodeDownloadInfo).toMatchInlineSnapshot(`
[MockFunction] {
"calls": Array [
Array [
<Config>,
Platform {
"architecture": "x64",
"buildName": "linux-x86_64",
"name": "linux",
},
],
Array [
<Config>,
Platform {
"architecture": "arm64",
"buildName": "linux-aarch64",
"name": "linux",
},
],
Array [
<Config>,
Platform {
"architecture": "x64",
"buildName": "darwin-x86_64",
"name": "darwin",
},
],
Array [
<Config>,
Platform {
"architecture": "x64",
"buildName": "windows-x86_64",
"name": "win32",
},
],
],
"results": Array [
Object {
"type": "return",
"value": Object {
"downloadName": "linux:linux-x64:downloadName",
"downloadPath": "linux:linux-x64:downloadPath",
},
},
Object {
"type": "return",
"value": Object {
"downloadName": "linux:linux-arm64:downloadName",
"downloadPath": "linux:linux-arm64:downloadPath",
},
},
Object {
"type": "return",
"value": Object {
"downloadName": "darwin:darwin-x64:downloadName",
"downloadPath": "darwin:darwin-x64:downloadPath",
},
},
Object {
"type": "return",
"value": Object {
"downloadName": "win32:win32-x64:downloadName",
"downloadPath": "win32:win32-x64:downloadPath",
},
},
],
}
`);
expect(getFileHash).toMatchInlineSnapshot(`
[MockFunction] {
"calls": Array [
Array [
"linux:linux-x64:downloadPath",
"sha256",
],
Array [
"linux:linux-arm64:downloadPath",
"sha256",
],
Array [
"darwin:darwin-x64:downloadPath",
"sha256",
],
Array [
"win32:win32-x64:downloadPath",
"sha256",
],
],
"results": Array [
Object {
"type": "return",
"value": "valid shasum",
},
Object {
"type": "return",
"value": "valid shasum",
},
Object {
"type": "return",
"value": "valid shasum",
},
Object {
"type": "return",
"value": "valid shasum",
},
],
}
`);
});
it('rejects if any download has an incorrect sha256', async () => {
const { config } = await setup({
'linux:linux-arm64:downloadPath': 'invalid shasum',
});
await expect(
VerifyExistingNodeBuilds.run(config, log, [])
).rejects.toThrowErrorMatchingInlineSnapshot(
`"Download at linux:linux-arm64:downloadPath does not match expected checksum invalid shasum"`
);
});

View file

@ -17,11 +17,11 @@
* under the License.
*/
import { getFileHash } from '../../lib';
import { getFileHash, GlobalTask } from '../../lib';
import { getNodeDownloadInfo } from './node_download_info';
import { getNodeShasums } from './node_shasums';
export const VerifyExistingNodeBuildsTask = {
export const VerifyExistingNodeBuilds: GlobalTask = {
global: true,
description: 'Verifying previously downloaded node.js build for all platforms',
async run(config, log) {

View file

@ -20,11 +20,11 @@
import { getInstalledPackages } from '../../npm';
import { LICENSE_OVERRIDES } from '../../license_checker';
import { write } from '../lib';
import { write, Task } from '../lib';
import { getNodeDownloadInfo } from './nodejs';
import { generateNoticeFromSource, generateBuildNoticeText } from '../../notice';
export const CreateNoticeFileTask = {
export const CreateNoticeFile: Task = {
description: 'Generating NOTICE.txt file',
async run(config, log, build) {
@ -40,7 +40,7 @@ export const CreateNoticeFileTask = {
log.info('Discovering installed packages');
const packages = await getInstalledPackages({
directory: build.resolvePath(),
dev: false,
includeDev: false,
licenseOverrides: LICENSE_OVERRIDES,
});

View file

@ -17,10 +17,10 @@
* under the License.
*/
import { deleteAll, copyAll, exec } from '../lib';
import { deleteAll, copyAll, exec, Task } from '../lib';
import { getNodeDownloadInfo } from './nodejs';
export const OptimizeBuildTask = {
export const OptimizeBuild: Task = {
description: 'Running optimizer',
async run(config, log, build) {

View file

@ -17,10 +17,11 @@
* under the License.
*/
import { Task } from '../../lib';
import { runFpm } from './run_fpm';
import { runDockerGenerator, runDockerGeneratorForUBI } from './docker_generator';
export const CreateDebPackageTask = {
export const CreateDebPackage: Task = {
description: 'Creating deb package',
async run(config, log, build) {
@ -33,7 +34,7 @@ export const CreateDebPackageTask = {
},
};
export const CreateRpmPackageTask = {
export const CreateRpmPackage: Task = {
description: 'Creating rpm package',
async run(config, log, build) {
@ -41,7 +42,7 @@ export const CreateRpmPackageTask = {
},
};
export const CreateDockerPackageTask = {
export const CreateDockerPackage: Task = {
description: 'Creating docker package',
async run(config, log, build) {
@ -50,7 +51,7 @@ export const CreateDockerPackageTask = {
},
};
export const CreateDockerUbiPackageTask = {
export const CreateDockerUbiPackage: Task = {
description: 'Creating docker ubi package',
async run(config, log, build) {

View file

@ -18,7 +18,7 @@
*/
import { resolve } from 'path';
import { compress, copyAll, mkdirp, write } from '../../../lib';
import { compressTar, copyAll, mkdirp, write } from '../../../lib';
import { dockerfileTemplate } from './templates';
export async function bundleDockerFiles(config, log, build, scope) {
@ -50,8 +50,7 @@ export async function bundleDockerFiles(config, log, build, scope) {
// Compress dockerfiles dir created inside
// docker build dir as output it as a target
// on targets folder
await compress(
'tar',
await compressTar(
{
archiverOptions: {
gzip: true,

View file

@ -17,9 +17,5 @@
* under the License.
*/
export {
CreateRpmPackageTask,
CreateDebPackageTask,
CreateDockerPackageTask,
CreateDockerUbiPackageTask,
} from './create_os_package_tasks';
// @ts-expect-error not ts yet
export { runDockerGenerator, runDockerGeneratorForUBI } from './run';

View file

@ -0,0 +1,20 @@
/*
* Licensed to Elasticsearch B.V. under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch B.V. licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
export * from './create_os_package_tasks';

View file

@ -19,15 +19,23 @@
import { resolve } from 'path';
import { exec } from '../../lib';
import { ToolingLog } from '@kbn/dev-utils';
export async function runFpm(config, log, build, type, pkgSpecificFlags) {
import { exec, Config, Build } from '../../lib';
export async function runFpm(
config: Config,
log: ToolingLog,
build: Build,
type: 'rpm' | 'deb',
pkgSpecificFlags: string[]
) {
const linux = config.getPlatform('linux', 'x64');
const version = config.getBuildVersion();
const resolveWithTrailingSlash = (...paths) => `${resolve(...paths)}/`;
const resolveWithTrailingSlash = (...paths: string[]) => `${resolve(...paths)}/`;
const fromBuild = (...paths) => build.resolvePathForPlatform(linux, ...paths);
const fromBuild = (...paths: string[]) => build.resolvePathForPlatform(linux, ...paths);
const pickLicense = () => {
if (build.isOss()) {

View file

@ -16,14 +16,30 @@
* specific language governing permissions and limitations
* under the License.
*/
import fs from 'fs';
import path from 'path';
import util from 'util';
import { deleteAll, download, gunzip, untar } from '../lib';
import { ToolingLog } from '@kbn/dev-utils';
import { deleteAll, download, gunzip, untar, Task, Config, Build, Platform, read } from '../lib';
const DOWNLOAD_DIRECTORY = '.native_modules';
const packages = [
interface Package {
name: string;
version: string;
destinationPath: string;
extractMethod: string;
archives: Record<
string,
{
url: string;
sha256: string;
}
>;
}
const packages: Package[] = [
{
name: 're2',
version: '1.15.4',
@ -46,16 +62,22 @@ const packages = [
},
];
async function getInstalledVersion(config, packageName) {
async function getInstalledVersion(config: Config, packageName: string) {
const packageJSONPath = config.resolveFromRepo(
path.join('node_modules', packageName, 'package.json')
);
const buffer = await util.promisify(fs.readFile)(packageJSONPath);
const packageJSON = JSON.parse(buffer);
const json = await read(packageJSONPath);
const packageJSON = JSON.parse(json);
return packageJSON.version;
}
async function patchModule(config, log, build, platform, pkg) {
async function patchModule(
config: Config,
log: ToolingLog,
build: Build,
platform: Platform,
pkg: Package
) {
const installedVersion = await getInstalledVersion(config, pkg.name);
if (installedVersion !== pkg.version) {
throw new Error(
@ -89,7 +111,7 @@ async function patchModule(config, log, build, platform, pkg) {
}
}
export const PatchNativeModulesTask = {
export const PatchNativeModules: Task = {
description: 'Patching platform-specific native modules',
async run(config, log, build) {
for (const pkg of packages) {

View file

@ -21,9 +21,9 @@ import { relative } from 'path';
import { tap, filter, map, toArray } from 'rxjs/operators';
import { scan$ } from '../lib/scan';
import { scan$, Task } from '../lib';
export const PathLengthTask = {
export const PathLength: Task = {
description: 'Checking Windows for paths > 200 characters',
async run(config, log, build) {

View file

@ -17,15 +17,21 @@
* under the License.
*/
import { pipeline } from 'stream';
import { promisify } from 'util';
// @ts-expect-error @types/gulp-babel is outdated and doesn't work for gulp-babel v8
import gulpBabel from 'gulp-babel';
import vfs from 'vinyl-fs';
import { createPromiseFromStreams } from '../../../legacy/utils';
import { Task, Build } from '../lib';
const transpileWithBabel = async (srcGlobs, build, presets) => {
const asyncPipeline = promisify(pipeline);
const transpileWithBabel = async (srcGlobs: string[], build: Build, presets: string[]) => {
const buildRoot = build.resolvePath();
await createPromiseFromStreams([
await asyncPipeline(
vfs.src(
srcGlobs.concat([
'!**/*.d.ts',
@ -44,11 +50,11 @@ const transpileWithBabel = async (srcGlobs, build, presets) => {
presets,
}),
vfs.dest(buildRoot),
]);
vfs.dest(buildRoot)
);
};
export const TranspileBabelTask = {
export const TranspileBabel: Task = {
description: 'Transpiling sources with babel',
async run(config, log, build) {

View file

@ -17,9 +17,12 @@
* under the License.
*/
import { Task } from '../lib';
// @ts-expect-error buildSass isn't TS yet
import { buildSass } from '../../sass';
export const TranspileScssTask = {
export const TranspileScss: Task = {
description: 'Transpiling SCSS to CSS',
async run(config, log, build) {
await buildSass({

View file

@ -17,9 +17,9 @@
* under the License.
*/
import { read } from '../lib';
import { read, Task } from '../lib';
export const UuidVerificationTask = {
export const UuidVerification: Task = {
description: 'Verify that no UUID file is baked into the build',
async run(config, log, build) {

View file

@ -17,7 +17,9 @@
* under the License.
*/
export const VerifyEnvTask = {
import { GlobalTask } from '../lib';
export const VerifyEnv: GlobalTask = {
global: true,
description: 'Verifying environment meets requirements',

View file

@ -19,9 +19,9 @@
import globby from 'globby';
import { getFileHash, write } from '../lib';
import { getFileHash, write, GlobalTask } from '../lib';
export const WriteShaSumsTask = {
export const WriteShaSums: GlobalTask = {
global: true,
description: 'Writing sha1sums of archives and packages in target directory',

View file

@ -1,55 +0,0 @@
/*
* Licensed to Elasticsearch B.V. under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch B.V. licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
import execa from 'execa';
import stripAnsi from 'strip-ansi';
import sinon from 'sinon';
import { watchStdioForLine } from '../watch_stdio_for_line';
describe('src/legacy/utils/watch_stdio_for_line', function () {
const sandbox = sinon.sandbox.create();
afterEach(() => sandbox.reset());
const onLogLine = sandbox.stub();
const logFn = (line) => onLogLine(stripAnsi(line));
it('calls logFn with log lines', async () => {
const proc = execa(process.execPath, ['-e', 'console.log("hi")']);
await watchStdioForLine(proc, logFn);
// log output of the process
sinon.assert.calledWithExactly(onLogLine, sinon.match(/hi/));
});
it('send the proc SIGKILL if it logs a line matching exitAfter regexp', async function () {
// fixture proc will exit after 10 seconds if sigint not received, but the test won't fail
// unless we see the log line `SIGINT not received`, so we let the test take up to 30 seconds
// for potentially huge delays here and there
this.timeout(30000);
const proc = execa(process.execPath, [require.resolve('./fixtures/log_on_sigint')]);
await watchStdioForLine(proc, logFn, /listening for SIGINT/);
sinon.assert.calledWithExactly(onLogLine, sinon.match(/listening for SIGINT/));
sinon.assert.neverCalledWith(onLogLine, sinon.match(/SIGINT not received/));
});
});

View file

@ -21,7 +21,6 @@ export { BinderBase } from './binder';
export { BinderFor } from './binder_for';
export { deepCloneWithBuffers } from './deep_clone_with_buffers';
export { unset } from './unset';
export { watchStdioForLine } from './watch_stdio_for_line';
export { IS_KIBANA_DISTRIBUTABLE } from './artifact_type';
export { IS_KIBANA_RELEASE } from './artifact_type';

Some files were not shown because too many files have changed in this diff Show more