[kbn/pm] add caching to bootstrap (#53622)

Co-authored-by: Elastic Machine <elasticmachine@users.noreply.github.com>
This commit is contained in:
Spencer 2020-01-03 09:35:38 -07:00 committed by GitHub
parent d4b1966d4f
commit 7b4278dc49
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
29 changed files with 52174 additions and 17362 deletions

View file

@ -15,6 +15,7 @@
"execa": "^3.2.0",
"exit-hook": "^2.2.0",
"getopts": "^2.2.5",
"load-json-file": "^6.2.0",
"moment": "^2.24.0",
"rxjs": "^6.5.3",
"tree-kill": "^1.2.1",

View file

@ -1,22 +0,0 @@
/*
* Licensed to Elasticsearch B.V. under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch B.V. licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
import { dirname } from 'path';
export const REPO_ROOT = dirname(require.resolve('../../../package.json'));

View file

@ -27,6 +27,6 @@ export {
export { createAbsolutePathSerializer } from './serializers';
export { CA_CERT_PATH, ES_KEY_PATH, ES_CERT_PATH } from './certs';
export { run, createFailError, createFlagError, combineErrors, isFailError, Flags } from './run';
export { REPO_ROOT } from './constants';
export { REPO_ROOT } from './repo_root';
export { KbnClient } from './kbn_client';
export * from './axios';

View file

@ -0,0 +1,59 @@
/*
* Licensed to Elasticsearch B.V. under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch B.V. licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
import Path from 'path';
import Fs from 'fs';
import loadJsonFile from 'load-json-file';
const isKibanaDir = (dir: string) => {
try {
const path = Path.resolve(dir, 'package.json');
const json = loadJsonFile.sync(path);
if (json && typeof json === 'object' && 'name' in json && json.name === 'kibana') {
return true;
}
} catch (error) {
if (error && error.code === 'ENOENT') {
return false;
}
throw error;
}
};
// search for the kibana directory, since this file is moved around it might
// not be where we think but should always be a relatively close parent
// of this directory
const startDir = Fs.realpathSync(__dirname);
const { root: rootDir } = Path.parse(startDir);
let cursor = startDir;
while (true) {
if (isKibanaDir(cursor)) {
break;
}
const parent = Path.dirname(cursor);
if (parent === rootDir) {
throw new Error(`unable to find kibana directory from ${startDir}`);
}
cursor = parent;
}
export const REPO_ROOT = cursor;

File diff suppressed because one or more lines are too long

View file

@ -34,6 +34,8 @@
"@types/tempy": "^0.2.0",
"@types/wrap-ansi": "^2.0.15",
"@types/write-pkg": "^3.1.0",
"@kbn/dev-utils": "1.0.0",
"@yarnpkg/lockfile": "^1.1.0",
"babel-loader": "^8.0.6",
"chalk": "^2.4.2",
"cmd-shim": "^2.1.0",
@ -48,6 +50,7 @@
"indent-string": "^3.2.0",
"lodash.clonedeepwith": "^4.5.0",
"log-symbols": "^2.2.0",
"multimatch": "^4.0.0",
"ncp": "^2.0.0",
"ora": "^1.4.0",
"prettier": "^1.19.1",

View file

@ -47,6 +47,7 @@ function help() {
-i, --include Include only specified projects. If left unspecified, it defaults to including all projects.
--oss Do not include the x-pack when running command.
--skip-kibana-plugins Filter all plugins in ./plugins and ../kibana-extra when running command.
--no-cache Disable the bootstrap cache
`);
}
@ -65,7 +66,10 @@ export async function run(argv: string[]) {
h: 'help',
i: 'include',
},
boolean: ['prefer-offline', 'frozen-lockfile'],
default: {
cache: true,
},
boolean: ['prefer-offline', 'frozen-lockfile', 'cache'],
});
const args = options._;

View file

@ -31,6 +31,7 @@ Array [
},
"scripts": Object {},
"targetLocation": "<repoRoot>/packages/kbn-pm/src/commands/target",
"version": "1.0.0",
},
"bar" => Project {
"allDependencies": Object {},
@ -52,6 +53,7 @@ Array [
"kbn:bootstrap": "node ./bar.js",
},
"targetLocation": "<repoRoot>/packages/kbn-pm/src/commands/packages/bar/target",
"version": "1.0.0",
},
},
Map {
@ -76,6 +78,7 @@ Array [
"kbn:bootstrap": "node ./bar.js",
},
"targetLocation": "<repoRoot>/packages/kbn-pm/src/commands/packages/bar/target",
"version": "1.0.0",
},
],
"bar" => Array [],
@ -109,6 +112,7 @@ Array [
"kbn:bootstrap": "node ./bar.js",
},
"targetLocation": "<repoRoot>/packages/kbn-pm/src/commands/packages/bar/target",
"version": "1.0.0",
},
],
]

View file

@ -29,6 +29,7 @@ import { Project } from '../utils/project';
import { buildProjectGraph } from '../utils/projects';
import { installInDir, runScriptInPackageStreaming, yarnWorkspacesInfo } from '../utils/scripts';
import { BootstrapCommand } from './bootstrap';
import { Kibana } from '../utils/kibana';
const mockInstallInDir = installInDir as jest.Mock;
const mockRunScriptInPackageStreaming = runScriptInPackageStreaming as jest.Mock;
@ -107,6 +108,7 @@ test('handles dependencies of dependencies', async () => {
['bar', bar],
['baz', baz],
]);
const kbn = new Kibana(projects);
const projectGraph = buildProjectGraph(projects);
const logMock = jest.spyOn(console, 'log').mockImplementation(noop);
@ -115,6 +117,7 @@ test('handles dependencies of dependencies', async () => {
extraArgs: [],
options: {},
rootPath: '',
kbn,
});
expect(mockInstallInDir.mock.calls).toMatchSnapshot('install in dir');
@ -142,6 +145,7 @@ test('does not run installer if no deps in package', async () => {
['kibana', kibana],
['bar', bar],
]);
const kbn = new Kibana(projects);
const projectGraph = buildProjectGraph(projects);
const logMock = jest.spyOn(console, 'log').mockImplementation(noop);
@ -150,6 +154,7 @@ test('does not run installer if no deps in package', async () => {
extraArgs: [],
options: {},
rootPath: '',
kbn,
});
expect(mockInstallInDir.mock.calls).toMatchSnapshot('install in dir');
@ -167,6 +172,7 @@ test('handles "frozen-lockfile"', async () => {
});
const projects = new Map([['kibana', kibana]]);
const kbn = new Kibana(projects);
const projectGraph = buildProjectGraph(projects);
jest.spyOn(console, 'log').mockImplementation(noop);
@ -177,6 +183,7 @@ test('handles "frozen-lockfile"', async () => {
'frozen-lockfile': true,
},
rootPath: '',
kbn,
});
expect(mockInstallInDir.mock.calls).toMatchSnapshot('install in dir');
@ -205,6 +212,7 @@ test('calls "kbn:bootstrap" scripts and links executables after installing deps'
['kibana', kibana],
['bar', bar],
]);
const kbn = new Kibana(projects);
const projectGraph = buildProjectGraph(projects);
jest.spyOn(console, 'log').mockImplementation(noop);
@ -213,6 +221,7 @@ test('calls "kbn:bootstrap" scripts and links executables after installing deps'
extraArgs: [],
options: {},
rootPath: '',
kbn,
});
expect(mockLinkProjectExecutables.mock.calls).toMatchSnapshot('link bins');

View file

@ -24,12 +24,14 @@ import { log } from '../utils/log';
import { parallelizeBatches } from '../utils/parallelize';
import { topologicallyBatchProjects } from '../utils/projects';
import { ICommand } from './';
import { getAllChecksums } from '../utils/project_checksums';
import { BootstrapCacheFile } from '../utils/bootstrap_cache_file';
export const BootstrapCommand: ICommand = {
description: 'Install dependencies and crosslink projects',
name: 'bootstrap',
async run(projects, projectGraph, { options }) {
async run(projects, projectGraph, { options, kbn }) {
const batchedProjectsByWorkspace = topologicallyBatchProjects(projects, projectGraph, {
batchByWorkspace: true,
});
@ -65,9 +67,18 @@ export const BootstrapCommand: ICommand = {
* have to, as it will slow down the bootstrapping process.
*/
log.write(chalk.bold('\nLinking executables completed, running `kbn:bootstrap` scripts\n'));
await parallelizeBatches(batchedProjects, async pkg => {
if (pkg.hasScript('kbn:bootstrap')) {
await pkg.runScriptStreaming('kbn:bootstrap');
const checksums = options.cache ? await getAllChecksums(kbn, log) : false;
await parallelizeBatches(batchedProjects, async project => {
if (project.hasScript('kbn:bootstrap')) {
const cacheFile = new BootstrapCacheFile(kbn, project, checksums);
if (cacheFile.isValid()) {
log.success(`[${project.name}] cache up to date`);
} else {
cacheFile.delete();
await project.runScriptStreaming('kbn:bootstrap');
cacheFile.write();
}
}
});

View file

@ -23,6 +23,7 @@ export interface ICommandConfig {
extraArgs: string[];
options: { [key: string]: any };
rootPath: string;
kbn: Kibana;
}
export interface ICommand {
@ -36,6 +37,7 @@ import { BootstrapCommand } from './bootstrap';
import { CleanCommand } from './clean';
import { RunCommand } from './run';
import { WatchCommand } from './watch';
import { Kibana } from '../utils/kibana';
export const commands: { [key: string]: ICommand } = {
bootstrap: BootstrapCommand,

View file

@ -19,18 +19,16 @@
import { resolve } from 'path';
export interface IProjectPathOptions {
'skip-kibana-plugins'?: boolean;
oss?: boolean;
interface Options {
rootPath: string;
skipKibanaPlugins?: boolean;
ossOnly?: boolean;
}
/**
* Returns all the paths where plugins are located
*/
export function getProjectPaths(rootPath: string, options: IProjectPathOptions = {}) {
const skipKibanaPlugins = Boolean(options['skip-kibana-plugins']);
const ossOnly = Boolean(options.oss);
export function getProjectPaths({ rootPath, ossOnly, skipKibanaPlugins }: Options) {
const projectPaths = [rootPath, resolve(rootPath, 'packages/*')];
// This is needed in order to install the dependencies for the declared

View file

@ -66,7 +66,7 @@ export async function buildProductionProjects({
* is supplied, we omit projects with build.oss in their package.json set to false.
*/
async function getProductionProjects(rootPath: string, onlyOSS?: boolean) {
const projectPaths = getProjectPaths(rootPath, {});
const projectPaths = getProjectPaths({ rootPath });
const projects = await getProjects(rootPath, projectPaths);
const projectsSubset = [projects.get('kibana')!];

View file

@ -38,7 +38,7 @@ function getExpectedProjectsAndGraph(runMock: any) {
}
let command: ICommand;
let config: ICommandConfig;
let config: Omit<ICommandConfig, 'kbn'>;
beforeEach(() => {
command = {
description: 'test description',

View file

@ -22,13 +22,13 @@ import indentString from 'indent-string';
import wrapAnsi from 'wrap-ansi';
import { ICommand, ICommandConfig } from './commands';
import { getProjectPaths, IProjectPathOptions } from './config';
import { CliError } from './utils/errors';
import { log } from './utils/log';
import { buildProjectGraph, getProjects } from './utils/projects';
import { buildProjectGraph } from './utils/projects';
import { renderProjectsTree } from './utils/projects_tree';
import { Kibana } from './utils/kibana';
export async function runCommand(command: ICommand, config: ICommandConfig) {
export async function runCommand(command: ICommand, config: Omit<ICommandConfig, 'kbn'>) {
try {
log.write(
chalk.bold(
@ -36,9 +36,10 @@ export async function runCommand(command: ICommand, config: ICommandConfig) {
)
);
const projectPaths = getProjectPaths(config.rootPath, config.options as IProjectPathOptions);
const projects = await getProjects(config.rootPath, projectPaths, {
const kbn = await Kibana.loadFrom(config.rootPath);
const projects = kbn.getFilteredProjects({
skipKibanaPlugins: Boolean(config.options['skip-kibana-plugins']),
ossOnly: Boolean(config.options.oss),
exclude: toArray(config.options.exclude),
include: toArray(config.options.include),
});
@ -57,7 +58,10 @@ export async function runCommand(command: ICommand, config: ICommandConfig) {
log.write(chalk.bold(`Found [${chalk.green(projects.size.toString())}] projects:\n`));
log.write(renderProjectsTree(config.rootPath, projects));
await command.run(projects, projectGraph, config);
await command.run(projects, projectGraph, {
...config,
kbn,
});
} catch (e) {
log.write(chalk.bold.red(`\n[${command.name}] failed:\n`));

View file

@ -0,0 +1,92 @@
/*
* Licensed to Elasticsearch B.V. under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch B.V. licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
import Fs from 'fs';
import Path from 'path';
import { ChecksumMap } from './project_checksums';
import { Project } from '../utils/project';
import { Kibana } from '../utils/kibana';
export class BootstrapCacheFile {
private readonly path: string;
private readonly expectedValue: string | undefined;
constructor(kbn: Kibana, project: Project, checksums: ChecksumMap | false) {
this.path = Path.resolve(project.targetLocation, '.bootstrap-cache');
if (!checksums) {
return;
}
const projectAndDepCacheKeys = Array.from(kbn.getProjectAndDeps(project.name).values())
// sort deps by name so that the key is stable
.sort((a, b) => a.name.localeCompare(b.name))
// get the cacheKey for each project, return undefined if the cache key couldn't be determined
.map(p => {
const cacheKey = checksums.get(p.name);
if (cacheKey) {
return `${p.name}:${cacheKey}`;
}
});
// if any of the relevant cache keys are undefined then the projectCacheKey must be too
this.expectedValue = projectAndDepCacheKeys.some(k => !k)
? undefined
: [
`# this is only human readable for debugging, please don't try to parse this`,
...projectAndDepCacheKeys,
].join('\n');
}
isValid() {
if (!this.expectedValue) {
return false;
}
try {
return Fs.readFileSync(this.path, 'utf8') === this.expectedValue;
} catch (error) {
if (error.code === 'ENOENT') {
return false;
}
throw error;
}
}
delete() {
try {
Fs.unlinkSync(this.path);
} catch (error) {
if (error.code !== 'ENOENT') {
throw error;
}
}
}
write() {
if (!this.expectedValue) {
return;
}
Fs.mkdirSync(Path.dirname(this.path), { recursive: true });
Fs.writeFileSync(this.path, this.expectedValue);
}
}

View file

@ -0,0 +1,124 @@
/*
* Licensed to Elasticsearch B.V. under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch B.V. licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
import Path from 'path';
import multimatch from 'multimatch';
import { ProjectMap, getProjects, includeTransitiveProjects } from './projects';
import { Project } from './project';
import { getProjectPaths } from '../config';
/**
* Helper class for dealing with a set of projects as children of
* the Kibana project. The kbn/pm is currently implemented to be
* more generic, where everything is an operation of generic projects,
* but that leads to exceptions where we need the kibana project and
* do things like `project.get('kibana')!`.
*
* Using this helper we can restructre the generic list of projects
* as a Kibana object which encapulates all the projects in the
* workspace and knows about the root Kibana project.
*/
export class Kibana {
static async loadFrom(rootPath: string) {
return new Kibana(await getProjects(rootPath, getProjectPaths({ rootPath })));
}
private readonly kibanaProject: Project;
constructor(private readonly allWorkspaceProjects: ProjectMap) {
const kibanaProject = allWorkspaceProjects.get('kibana');
if (!kibanaProject) {
throw new TypeError(
'Unable to create Kibana object without all projects, including the Kibana project.'
);
}
this.kibanaProject = kibanaProject;
}
/** make an absolute path by resolving subPath relative to the kibana repo */
getAbsolute(...subPath: string[]) {
return Path.resolve(this.kibanaProject.path, ...subPath);
}
/** convert an absolute path to a relative path, relative to the kibana repo */
getRelative(absolute: string) {
return Path.relative(this.kibanaProject.path, absolute);
}
/** get a copy of the map of all projects in the kibana workspace */
getAllProjects() {
return new Map(this.allWorkspaceProjects);
}
/** determine if a project with the given name exists */
hasProject(name: string) {
return this.allWorkspaceProjects.has(name);
}
/** get a specific project, throws if the name is not known (use hasProject() first) */
getProject(name: string) {
const project = this.allWorkspaceProjects.get(name);
if (!project) {
throw new Error(`No package with name "${name}" in the workspace`);
}
return project;
}
/** get a project and all of the projects it depends on in a ProjectMap */
getProjectAndDeps(name: string) {
const project = this.getProject(name);
return includeTransitiveProjects([project], this.allWorkspaceProjects);
}
/** filter the projects to just those matching certain paths/include/exclude tags */
getFilteredProjects(options: {
skipKibanaPlugins: boolean;
ossOnly: boolean;
exclude: string[];
include: string[];
}) {
const allProjects = this.getAllProjects();
const filteredProjects: ProjectMap = new Map();
const pkgJsonPaths = Array.from(allProjects.values()).map(p => p.packageJsonLocation);
const filteredPkgJsonGlobs = getProjectPaths({
...options,
rootPath: this.kibanaProject.path,
}).map(g => Path.resolve(g, 'package.json'));
const matchingPkgJsonPaths = multimatch(pkgJsonPaths, filteredPkgJsonGlobs);
for (const project of allProjects.values()) {
const pathMatches = matchingPkgJsonPaths.includes(project.packageJsonLocation);
const notExcluded = !options.exclude.includes(project.name);
const isIncluded = !options.include.length || options.include.includes(project.name);
if (pathMatches && notExcluded && isIncluded) {
filteredProjects.set(project.name, project);
}
}
return filteredProjects;
}
}

View file

@ -17,7 +17,18 @@
* under the License.
*/
export const log = {
import { ToolingLog, ToolingLogCollectingWriter } from '@kbn/dev-utils';
class Log extends ToolingLog {
testWriter?: ToolingLogCollectingWriter;
constructor() {
super({
level: 'info',
writeTo: process.stdout,
});
}
/**
* Log something to the console. Ideally we would use a real logger in
* kbn-pm, but that's a pretty big change for now.
@ -26,5 +37,7 @@ export const log = {
write(...args: any[]) {
// eslint-disable-next-line no-console
console.log(...args);
},
};
}
}
export const log = new Log();

View file

@ -19,7 +19,7 @@
import chalk from 'chalk';
import fs from 'fs';
import { relative, resolve as resolvePath } from 'path';
import Path from 'path';
import { inspect } from 'util';
import { CliError } from './errors';
@ -54,15 +54,27 @@ export class Project {
return new Project(pkgJson, path);
}
/** parsed package.json */
public readonly json: IPackageJson;
/** absolute path to the package.json file in the project */
public readonly packageJsonLocation: string;
/** absolute path to the node_modules in the project (might not actually exist) */
public readonly nodeModulesLocation: string;
/** absolute path to the target directory in the project (might not actually exist) */
public readonly targetLocation: string;
/** absolute path to the directory containing the project */
public readonly path: string;
/** the version of the project */
public readonly version: string;
/** merged set of dependencies of the project, [name => version range] */
public readonly allDependencies: IPackageDependencies;
/** regular dependencies of the project, [name => version range] */
public readonly productionDependencies: IPackageDependencies;
/** development dependencies of the project, [name => version range] */
public readonly devDependencies: IPackageDependencies;
/** scripts defined in the package.json file for the project [name => body] */
public readonly scripts: IPackageScripts;
public isWorkspaceRoot = false;
public isWorkspaceProject = false;
@ -70,10 +82,11 @@ export class Project {
this.json = Object.freeze(packageJson);
this.path = projectPath;
this.packageJsonLocation = resolvePath(this.path, 'package.json');
this.nodeModulesLocation = resolvePath(this.path, 'node_modules');
this.targetLocation = resolvePath(this.path, 'target');
this.packageJsonLocation = Path.resolve(this.path, 'package.json');
this.nodeModulesLocation = Path.resolve(this.path, 'node_modules');
this.targetLocation = Path.resolve(this.path, 'target');
this.version = this.json.version;
this.productionDependencies = this.json.dependencies || {};
this.devDependencies = this.json.devDependencies || {};
this.allDependencies = {
@ -96,7 +109,7 @@ export class Project {
if (dependentProjectIsInWorkspace) {
expectedVersionInPackageJson = project.json.version;
} else {
const relativePathToProject = normalizePath(relative(this.path, project.path));
const relativePathToProject = normalizePath(Path.relative(this.path, project.path));
expectedVersionInPackageJson = `link:${relativePathToProject}`;
}
@ -134,7 +147,7 @@ export class Project {
* instead of everything located in the project directory.
*/
public getIntermediateBuildDirectory() {
return resolvePath(this.path, this.getBuildConfig().intermediateBuildDirectory || '.');
return Path.resolve(this.path, this.getBuildConfig().intermediateBuildDirectory || '.');
}
public getCleanConfig(): CleanConfig {
@ -154,14 +167,14 @@ export class Project {
if (typeof raw === 'string') {
return {
[this.name]: resolvePath(this.path, raw),
[this.name]: Path.resolve(this.path, raw),
};
}
if (typeof raw === 'object') {
const binsConfig: { [k: string]: string } = {};
for (const binName of Object.keys(raw)) {
binsConfig[binName] = resolvePath(this.path, raw[binName]);
binsConfig[binName] = Path.resolve(this.path, raw[binName]);
}
return binsConfig;
}
@ -221,7 +234,7 @@ export class Project {
unusedWorkspaces.forEach(name => {
const { dependencies, devDependencies } = this.json;
const nodeModulesPath = resolvePath(this.nodeModulesLocation, name);
const nodeModulesPath = Path.resolve(this.nodeModulesLocation, name);
const isDependency = dependencies && dependencies.hasOwnProperty(name);
const isDevDependency = devDependencies && devDependencies.hasOwnProperty(name);

View file

@ -0,0 +1,257 @@
/*
* Licensed to Elasticsearch B.V. under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch B.V. licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
import Fs from 'fs';
import Crypto from 'crypto';
import { promisify } from 'util';
import execa from 'execa';
import { ToolingLog } from '@kbn/dev-utils';
import { readYarnLock, YarnLock } from './yarn_lock';
import { ProjectMap } from '../utils/projects';
import { Project } from '../utils/project';
import { Kibana } from '../utils/kibana';
export type ChecksumMap = Map<string, string | undefined>;
/** map of [repo relative path to changed file, type of change] */
type Changes = Map<string, 'modified' | 'deleted' | 'invalid'>;
const statAsync = promisify(Fs.stat);
const projectBySpecificitySorter = (a: Project, b: Project) => b.path.length - a.path.length;
/** Get the changed files for a set of projects */
async function getChangesForProjects(projects: ProjectMap, kbn: Kibana, log: ToolingLog) {
log.verbose('getting changed files');
const { stdout } = await execa(
'git',
['ls-files', '-dmt', '--', ...Array.from(projects.values()).map(p => p.path)],
{
cwd: kbn.getAbsolute(),
}
);
const output = stdout.trim();
const unassignedChanges: Changes = new Map();
if (output) {
for (const line of output.split('\n')) {
const [tag, ...pathParts] = line.trim().split(' ');
const path = pathParts.join(' ');
switch (tag) {
case 'M':
case 'C':
// for some reason ls-files returns deleted files as both deleted
// and modified, so make sure not to overwrite changes already
// tracked as "deleted"
if (unassignedChanges.get(path) !== 'deleted') {
unassignedChanges.set(path, 'modified');
}
break;
case 'R':
unassignedChanges.set(path, 'deleted');
break;
case 'H':
case 'S':
case 'K':
case '?':
default:
log.warning(`unexpected modification status "${tag}" for ${path}, please report this!`);
unassignedChanges.set(path, 'invalid');
break;
}
}
}
const sortedRelevantProjects = Array.from(projects.values()).sort(projectBySpecificitySorter);
const changesByProject = new Map<Project, Changes>();
for (const project of sortedRelevantProjects) {
const ownChanges: Changes = new Map();
const prefix = kbn.getRelative(project.path);
for (const [path, type] of unassignedChanges) {
if (path.startsWith(prefix)) {
ownChanges.set(path, type);
unassignedChanges.delete(path);
}
}
log.verbose(`[${project.name}] found ${ownChanges.size} changes`);
changesByProject.set(project, ownChanges);
}
if (unassignedChanges.size) {
throw new Error(
`unable to assign all change paths to a project: ${JSON.stringify(
Array.from(unassignedChanges.entries())
)}`
);
}
return changesByProject;
}
/** Get the latest commit sha for a project */
async function getLatestSha(project: Project, kbn: Kibana) {
const { stdout } = await execa(
'git',
['log', '-n', '1', '--pretty=format:%H', '--', project.path],
{
cwd: kbn.getAbsolute(),
}
);
return stdout.trim() || undefined;
}
/**
* Get a list of the absolute dependencies of this project, as resolved
* in the yarn.lock file, does not include other projects in the workspace
* or their dependencies
*/
function resolveDepsForProject(project: Project, yarnLock: YarnLock, kbn: Kibana, log: ToolingLog) {
/** map of [name@range, name@resolved] */
const resolved = new Map<string, string>();
const queue: Array<[string, string]> = Object.entries(project.allDependencies);
while (queue.length) {
const [name, versionRange] = queue.shift()!;
const req = `${name}@${versionRange}`;
if (resolved.has(req)) {
continue;
}
if (!kbn.hasProject(name)) {
const pkg = yarnLock[req];
if (!pkg) {
log.warning(
'yarn.lock file is out of date, please run `yarn kbn bootstrap` to re-enable caching'
);
return;
}
const res = `${name}@${pkg.version}`;
resolved.set(req, res);
const allDepsEntries = [
...Object.entries(pkg.dependencies || {}),
...Object.entries(pkg.optionalDependencies || {}),
];
for (const [childName, childVersionRange] of allDepsEntries) {
queue.push([childName, childVersionRange]);
}
}
}
return Array.from(resolved.values()).sort((a, b) => a.localeCompare(b));
}
/**
* Get the checksum for a specific project in the workspace
*/
async function getChecksum(
project: Project,
changes: Changes,
yarnLock: YarnLock,
kbn: Kibana,
log: ToolingLog
) {
const sha = await getLatestSha(project, kbn);
if (sha) {
log.verbose(`[${project.name}] local sha:`, sha);
}
if (Array.from(changes.values()).includes('invalid')) {
log.warning(`[${project.name}] unable to determine local changes, caching disabled`);
return;
}
const changesSummary = await Promise.all(
Array.from(changes)
.sort((a, b) => a[0].localeCompare(b[0]))
.map(async ([path, type]) => {
if (type === 'deleted') {
return `${path}:deleted`;
}
const stats = await statAsync(kbn.getAbsolute(path));
log.verbose(`[${project.name}] modified time ${stats.mtimeMs} for ${path}`);
return `${path}:${stats.mtimeMs}`;
})
);
const deps = await resolveDepsForProject(project, yarnLock, kbn, log);
if (!deps) {
return;
}
log.verbose(`[${project.name}] resolved %d deps`, deps.length);
const checksum = JSON.stringify(
{
sha,
changes: changesSummary,
deps,
},
null,
2
);
if (process.env.BOOTSTRAP_CACHE_DEBUG_CHECKSUM) {
return checksum;
}
const hash = Crypto.createHash('sha1');
hash.update(checksum);
return hash.digest('hex');
}
/**
* Calculate checksums for all projects in the workspace based on
* - last git commit to project directory
* - un-committed changes
* - resolved dependencies from yarn.lock referenced by project package.json
*/
export async function getAllChecksums(kbn: Kibana, log: ToolingLog) {
const projects = kbn.getAllProjects();
const changesByProject = await getChangesForProjects(projects, kbn, log);
const yarnLock = await readYarnLock(kbn);
/** map of [project.name, cacheKey] */
const cacheKeys: ChecksumMap = new Map();
await Promise.all(
Array.from(projects.values()).map(async project => {
cacheKeys.set(
project.name,
await getChecksum(project, changesByProject.get(project)!, yarnLock, kbn, log)
);
})
);
return cacheKeys;
}

View file

@ -80,7 +80,7 @@ describe('#getProjects', () => {
});
test('includes additional projects in package.json', async () => {
const projectPaths = getProjectPaths(rootPath, {});
const projectPaths = getProjectPaths({ rootPath });
const projects = await getProjects(rootPath, projectPaths);
const expectedProjects = [
@ -100,7 +100,7 @@ describe('#getProjects', () => {
describe('with exclude/include filters', () => {
let projectPaths: string[];
beforeEach(() => {
projectPaths = getProjectPaths(rootPath, {});
projectPaths = getProjectPaths({ rootPath });
});
test('excludes projects specified in `exclude` filter', async () => {

View file

@ -27,6 +27,7 @@ import { workspacePackagePaths } from './workspaces';
const glob = promisify(globSync);
/** a Map of project names to Project instances */
export type ProjectMap = Map<string, Project>;
export type ProjectGraph = Map<string, Project[]>;
export interface IProjectsOptions {
@ -198,7 +199,7 @@ export function includeTransitiveProjects(
allProjects: ProjectMap,
{ onlyProductionDependencies = false } = {}
) {
const dependentProjects: ProjectMap = new Map();
const projectsWithDependents: ProjectMap = new Map();
// the current list of packages we are expanding using breadth-first-search
const toProcess = [...subsetOfProjects];
@ -216,8 +217,8 @@ export function includeTransitiveProjects(
}
});
dependentProjects.set(project.name, project);
projectsWithDependents.set(project.name, project);
}
return dependentProjects;
return projectsWithDependents;
}

View file

@ -43,7 +43,7 @@ test('handles projects outside root folder', async () => {
});
test('handles projects within projects outside root folder', async () => {
const projectPaths = getProjectPaths(rootPath, {});
const projectPaths = getProjectPaths({ rootPath });
const projects = await getProjects(rootPath, projectPaths);
const tree = await renderProjectsTree(rootPath, projects);

View file

@ -56,7 +56,7 @@ export async function workspacePackagePaths(rootPath: string): Promise<string[]>
}
export async function copyWorkspacePackages(rootPath: string): Promise<void> {
const projectPaths = getProjectPaths(rootPath, {});
const projectPaths = getProjectPaths({ rootPath });
const projects = await getProjects(rootPath, projectPaths);
for (const project of projects.values()) {

View file

@ -0,0 +1,63 @@
/*
* Licensed to Elasticsearch B.V. under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch B.V. licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
// @ts-ignore published types are worthless
import { parse as parseLockfile } from '@yarnpkg/lockfile';
import { readFile } from '../utils/fs';
import { Kibana } from '../utils/kibana';
export interface YarnLock {
/** a simple map of version@versionrange tags to metadata about a package */
[key: string]: {
/** resolved version installed for this pacakge */
version: string;
/** resolved url for this pacakge */
resolved: string;
/** yarn calculated integrity value for this package */
integrity: string;
dependencies?: {
/** name => versionRange dependencies listed in package's manifest */
[key: string]: string;
};
optionalDependencies?: {
/** name => versionRange dependencies listed in package's manifest */
[key: string]: string;
};
};
}
export async function readYarnLock(kbn: Kibana): Promise<YarnLock> {
try {
const contents = await readFile(kbn.getAbsolute('yarn.lock'), 'utf8');
const yarnLock = parseLockfile(contents);
if (yarnLock.type === 'success') {
return yarnLock.object;
}
throw new Error('unable to read yarn.lock file, please run `yarn kbn bootstrap`');
} catch (error) {
if (error.code !== 'ENOENT') {
throw error;
}
}
return {};
}

View file

@ -1,11 +1,10 @@
{
"extends": "../../tsconfig.json",
"exclude": [
"dist"
],
"include": [
"./src/**/*.ts",
"./dist/*.d.ts",
],
"exclude": [],
"compilerOptions": {
"types": [
"jest",

View file

@ -36,7 +36,7 @@ expect.addSnapshotSerializer({
jest.mock('fs', () => {
const realFs = jest.requireActual('fs');
return {
readFile: realFs.read,
...realFs,
writeFile: (...args: any[]) => {
setTimeout(args[args.length - 1], 0);
},

View file

@ -31,7 +31,6 @@
"@kbn/es": "1.0.0",
"@kbn/expect": "1.0.0",
"@kbn/plugin-helpers": "9.0.2",
"@kbn/pm": "1.0.0",
"@kbn/test": "1.0.0",
"@kbn/utility-types": "1.0.0",
"@mattapperson/slapshot": "1.4.0",

View file

@ -4710,6 +4710,11 @@
resolved "https://registry.yarnpkg.com/@xtuc/long/-/long-4.2.2.tgz#d291c6a4e97989b5c61d9acf396ae4fe133a718d"
integrity sha512-NuHqBY1PB/D8xU6s/thBgOAiAP7HOYDQ32+BFZILJ8ivkUkAHQnWfn6WhL79Owj1qmUnoN/YPhktdIoucipkAQ==
"@yarnpkg/lockfile@^1.1.0":
version "1.1.0"
resolved "https://registry.yarnpkg.com/@yarnpkg/lockfile/-/lockfile-1.1.0.tgz#e77a97fbd345b76d83245edcd17d393b1b41fb31"
integrity sha512-GpSwvyXOcOOlV70vbnzjj4fW5xW/FdUF6nQEt1ENy7m4ZCczi1+/buVUPAqmGfqznsORNFzUMjctTIp8a9tuCQ==
JSONStream@1.3.5:
version "1.3.5"
resolved "https://registry.yarnpkg.com/JSONStream/-/JSONStream-1.3.5.tgz#3208c1f08d3a4d99261ab64f92302bc15e111ca0"
@ -18361,6 +18366,16 @@ load-json-file@^4.0.0:
pify "^3.0.0"
strip-bom "^3.0.0"
load-json-file@^6.2.0:
version "6.2.0"
resolved "https://registry.yarnpkg.com/load-json-file/-/load-json-file-6.2.0.tgz#5c7770b42cafa97074ca2848707c61662f4251a1"
integrity sha512-gUD/epcRms75Cw8RT1pUdHugZYM5ce64ucs2GEISABwkRsOQr0q2wm/MV2TKThycIe5e0ytRweW2RZxclogCdQ==
dependencies:
graceful-fs "^4.1.15"
parse-json "^5.0.0"
strip-bom "^4.0.0"
type-fest "^0.6.0"
load-source-map@^1.0.0:
version "1.0.0"
resolved "https://registry.yarnpkg.com/load-source-map/-/load-source-map-1.0.0.tgz#318f49905ce8a709dfb7cc3f16f3efe3bcf1dd05"
@ -26894,6 +26909,11 @@ strip-bom@^3.0.0:
resolved "https://registry.yarnpkg.com/strip-bom/-/strip-bom-3.0.0.tgz#2334c18e9c759f7bdd56fdef7e9ae3d588e68ed3"
integrity sha1-IzTBjpx1n3vdVv3vfprj1YjmjtM=
strip-bom@^4.0.0:
version "4.0.0"
resolved "https://registry.yarnpkg.com/strip-bom/-/strip-bom-4.0.0.tgz#9c3505c1db45bcedca3d9cf7a16f5c5aa3901878"
integrity sha512-3xurFv5tEgii33Zi8Jtp55wEIILR9eh34FAW00PZf+JnSsTmV/ioewSgQl97JHvgjoRGwPShsWm+IdrxB35d0w==
strip-dirs@^2.0.0:
version "2.1.0"
resolved "https://registry.yarnpkg.com/strip-dirs/-/strip-dirs-2.1.0.tgz#4987736264fc344cf20f6c34aca9d13d1d4ed6c5"