Implement recursive plugin discovery (#68811) (#70286)

* implements recursive scanning in plugin discovery system

* update optimizer to find plugins in sub-directories

* update renovate

* update optimizer IT snapshot

* refactor processPluginSearchPaths$ and add test for inaccessible manifest

* add symlink test

* add maxDepth to the optimizer

* adapt mockFs definitions

* remove `flat` usage
# Conflicts:
#	renovate.json5
This commit is contained in:
Pierre Gayvallet 2020-06-30 09:33:43 +02:00 committed by GitHub
parent fd47408c4b
commit 91bf50b5d4
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
11 changed files with 471 additions and 253 deletions

View file

@ -356,6 +356,7 @@
"@types/markdown-it": "^0.0.7",
"@types/minimatch": "^2.0.29",
"@types/mocha": "^7.0.2",
"@types/mock-fs": "^4.10.0",
"@types/moment-timezone": "^0.5.12",
"@types/mustache": "^0.8.31",
"@types/node": ">=10.17.17 <10.20.0",
@ -468,6 +469,7 @@
"listr": "^0.14.1",
"load-grunt-config": "^3.0.1",
"mocha": "^7.1.1",
"mock-fs": "^4.12.0",
"mock-http-server": "1.3.0",
"ms-chromium-edge-driver": "^0.2.3",
"multistream": "^2.1.1",

View file

@ -43,18 +43,18 @@ OptimizerConfig {
"id": "bar",
"isUiPlugin": true,
},
Object {
"directory": <absolute path>/packages/kbn-optimizer/src/__fixtures__/__tmp__/mock_repo/plugins/baz,
"extraPublicDirs": Array [],
"id": "baz",
"isUiPlugin": false,
},
Object {
"directory": <absolute path>/packages/kbn-optimizer/src/__fixtures__/__tmp__/mock_repo/plugins/foo,
"extraPublicDirs": Array [],
"id": "foo",
"isUiPlugin": true,
},
Object {
"directory": <absolute path>/packages/kbn-optimizer/src/__fixtures__/__tmp__/mock_repo/plugins/nested/baz,
"extraPublicDirs": Array [],
"id": "baz",
"isUiPlugin": false,
},
],
"profileWebpack": false,
"repoRoot": <absolute path>/packages/kbn-optimizer/src/__fixtures__/__tmp__/mock_repo,

View file

@ -41,18 +41,18 @@ it('parses kibana.json files of plugins found in pluginDirs', () => {
"id": "bar",
"isUiPlugin": true,
},
Object {
"directory": <absolute path>/packages/kbn-optimizer/src/__fixtures__/mock_repo/plugins/baz,
"extraPublicDirs": Array [],
"id": "baz",
"isUiPlugin": false,
},
Object {
"directory": <absolute path>/packages/kbn-optimizer/src/__fixtures__/mock_repo/plugins/foo,
"extraPublicDirs": Array [],
"id": "foo",
"isUiPlugin": true,
},
Object {
"directory": <absolute path>/packages/kbn-optimizer/src/__fixtures__/mock_repo/plugins/nested/baz,
"extraPublicDirs": Array [],
"id": "baz",
"isUiPlugin": false,
},
Object {
"directory": <absolute path>/packages/kbn-optimizer/src/__fixtures__/mock_repo/test_plugins/test_baz,
"extraPublicDirs": Array [],

View file

@ -37,7 +37,7 @@ export function findKibanaPlatformPlugins(scanDirs: string[], paths: string[]) {
.sync(
Array.from(
new Set([
...scanDirs.map((dir) => `${dir}/*/kibana.json`),
...scanDirs.map(nestedScanDirPaths).reduce((dirs, current) => [...dirs, ...current], []),
...paths.map((path) => `${path}/kibana.json`),
])
),
@ -51,6 +51,17 @@ export function findKibanaPlatformPlugins(scanDirs: string[], paths: string[]) {
);
}
function nestedScanDirPaths(dir: string): string[] {
// down to 5 level max
return [
`${dir}/*/kibana.json`,
`${dir}/*/*/kibana.json`,
`${dir}/*/*/*/kibana.json`,
`${dir}/*/*/*/*/kibana.json`,
`${dir}/*/*/*/*/*/kibana.json`,
];
}
function readKibanaPlatformPlugin(manifestPath: string): KibanaPlatformPlugin {
if (!Path.isAbsolute(manifestPath)) {
throw new TypeError('expected new platform manifest path to be absolute');

View file

@ -17,14 +17,5 @@
* under the License.
*/
export const mockReaddir = jest.fn();
export const mockReadFile = jest.fn();
export const mockStat = jest.fn();
jest.mock('fs', () => ({
readdir: mockReaddir,
readFile: mockReadFile,
stat: mockStat,
}));
export const mockPackage = new Proxy({ raw: {} as any }, { get: (obj, prop) => obj.raw[prop] });
jest.mock('../../../../../package.json', () => mockPackage);

View file

@ -17,251 +17,384 @@
* under the License.
*/
import { mockPackage, mockReaddir, mockReadFile, mockStat } from './plugins_discovery.test.mocks';
import { rawConfigServiceMock } from '../../config/raw_config_service.mock';
import { mockPackage } from './plugins_discovery.test.mocks';
import mockFs from 'mock-fs';
import { loggingSystemMock } from '../../logging/logging_system.mock';
import { resolve } from 'path';
import { first, map, toArray } from 'rxjs/operators';
import { resolve } from 'path';
import { ConfigService, Env } from '../../config';
import { getEnvOptions } from '../../config/__mocks__/env';
import { PluginWrapper } from '../plugin';
import { PluginsConfig, PluginsConfigType, config } from '../plugins_config';
import { discover } from './plugins_discovery';
import { rawConfigServiceMock } from '../../config/raw_config_service.mock';
import { CoreContext } from '../../core_context';
const TEST_PLUGIN_SEARCH_PATHS = {
nonEmptySrcPlugins: resolve(process.cwd(), 'src', 'plugins'),
emptyPlugins: resolve(process.cwd(), 'plugins'),
nonExistentKibanaExtra: resolve(process.cwd(), '..', 'kibana-extra'),
const KIBANA_ROOT = process.cwd();
const Plugins = {
invalid: () => ({
'kibana.json': 'not-json',
}),
incomplete: () => ({
'kibana.json': JSON.stringify({ version: '1' }),
}),
incompatible: () => ({
'kibana.json': JSON.stringify({ id: 'plugin', version: '1' }),
}),
missingManifest: () => ({}),
inaccessibleManifest: () => ({
'kibana.json': mockFs.file({
mode: 0, // 0000,
content: JSON.stringify({ id: 'plugin', version: '1' }),
}),
}),
valid: (id: string) => ({
'kibana.json': JSON.stringify({
id,
configPath: ['plugins', id],
version: '1',
kibanaVersion: '1.2.3',
requiredPlugins: [],
optionalPlugins: [],
server: true,
}),
}),
};
const TEST_EXTRA_PLUGIN_PATH = resolve(process.cwd(), 'my-extra-plugin');
const logger = loggingSystemMock.create();
const packageMock = {
branch: 'master',
version: '1.2.3',
build: {
distributable: true,
number: 1,
sha: '',
},
};
beforeEach(() => {
mockReaddir.mockImplementation((path, cb) => {
if (path === TEST_PLUGIN_SEARCH_PATHS.nonEmptySrcPlugins) {
cb(null, [
'1',
'2-no-manifest',
'3',
'4-incomplete-manifest',
'5-invalid-manifest',
'6',
'7-non-dir',
'8-incompatible-manifest',
'9-inaccessible-dir',
]);
} else if (path === TEST_PLUGIN_SEARCH_PATHS.nonExistentKibanaExtra) {
cb(new Error('ENOENT'));
} else {
cb(null, []);
}
const manifestPath = (...pluginPath: string[]) =>
resolve(KIBANA_ROOT, 'src', 'plugins', ...pluginPath, 'kibana.json');
describe('plugins discovery system', () => {
let logger: ReturnType<typeof loggingSystemMock.create>;
let env: Env;
let configService: ConfigService;
let pluginConfig: PluginsConfigType;
let coreContext: CoreContext;
beforeEach(async () => {
logger = loggingSystemMock.create();
mockPackage.raw = packageMock;
env = Env.createDefault(
getEnvOptions({
cliArgs: { envName: 'development' },
})
);
configService = new ConfigService(
rawConfigServiceMock.create({ rawConfig: { plugins: { paths: [] } } }),
env,
logger
);
await configService.setSchema(config.path, config.schema);
coreContext = {
coreId: Symbol(),
configService,
env,
logger,
};
pluginConfig = await configService
.atPath<PluginsConfigType>('plugins')
.pipe(first())
.toPromise();
// jest relies on the filesystem to get sourcemaps when using console.log
// which breaks with the mocked FS, see https://github.com/tschaub/mock-fs/issues/234
// hijacking logging to process.stdout as a workaround for this suite.
jest.spyOn(console, 'log').mockImplementation((...args) => {
process.stdout.write(args + '\n');
});
});
mockStat.mockImplementation((path, cb) => {
if (path.includes('9-inaccessible-dir')) {
cb(new Error(`ENOENT (disappeared between "readdir" and "stat").`));
} else {
cb(null, { isDirectory: () => !path.includes('non-dir') });
}
afterEach(() => {
mockFs.restore();
// restore the console.log behavior
jest.restoreAllMocks();
});
mockReadFile.mockImplementation((path, cb) => {
if (path.includes('no-manifest')) {
cb(new Error('ENOENT'));
} else if (path.includes('invalid-manifest')) {
cb(null, Buffer.from('not-json'));
} else if (path.includes('incomplete-manifest')) {
cb(null, Buffer.from(JSON.stringify({ version: '1' })));
} else if (path.includes('incompatible-manifest')) {
cb(null, Buffer.from(JSON.stringify({ id: 'plugin', version: '1' })));
} else {
cb(
null,
Buffer.from(
JSON.stringify({
id: 'plugin',
configPath: ['core', 'config'],
version: '1',
kibanaVersion: '1.2.3',
requiredPlugins: ['a', 'b'],
optionalPlugins: ['c', 'd'],
server: true,
})
)
);
}
});
});
it('discovers plugins in the search locations', async () => {
const { plugin$ } = discover(new PluginsConfig(pluginConfig, env), coreContext);
afterEach(() => {
jest.clearAllMocks();
});
mockFs(
{
[`${KIBANA_ROOT}/src/plugins/plugin_a`]: Plugins.valid('pluginA'),
[`${KIBANA_ROOT}/plugins/plugin_b`]: Plugins.valid('pluginB'),
[`${KIBANA_ROOT}/x-pack/plugins/plugin_c`]: Plugins.valid('pluginC'),
},
{ createCwd: false }
);
test('properly iterates through plugin search locations', async () => {
mockPackage.raw = {
branch: 'master',
version: '1.2.3',
build: {
distributable: true,
number: 1,
sha: '',
},
};
const plugins = await plugin$.pipe(toArray()).toPromise();
const pluginNames = plugins.map((plugin) => plugin.name);
const env = Env.createDefault(
getEnvOptions({
cliArgs: { envName: 'development' },
})
);
const configService = new ConfigService(
rawConfigServiceMock.create({ rawConfig: { plugins: { paths: [TEST_EXTRA_PLUGIN_PATH] } } }),
env,
logger
);
await configService.setSchema(config.path, config.schema);
const rawConfig = await configService
.atPath<PluginsConfigType>('plugins')
.pipe(first())
.toPromise();
const { plugin$, error$ } = discover(new PluginsConfig(rawConfig, env), {
coreId: Symbol(),
configService,
env,
logger,
expect(pluginNames).toHaveLength(3);
expect(pluginNames).toEqual(expect.arrayContaining(['pluginA', 'pluginB', 'pluginC']));
});
const plugins = await plugin$.pipe(toArray()).toPromise();
expect(plugins).toHaveLength(4);
it('return errors when the manifest is invalid or incompatible', async () => {
const { plugin$, error$ } = discover(new PluginsConfig(pluginConfig, env), coreContext);
for (const path of [
resolve(TEST_PLUGIN_SEARCH_PATHS.nonEmptySrcPlugins, '1'),
resolve(TEST_PLUGIN_SEARCH_PATHS.nonEmptySrcPlugins, '3'),
resolve(TEST_PLUGIN_SEARCH_PATHS.nonEmptySrcPlugins, '6'),
TEST_EXTRA_PLUGIN_PATH,
]) {
const discoveredPlugin = plugins.find((plugin) => plugin.path === path)!;
expect(discoveredPlugin).toBeInstanceOf(PluginWrapper);
expect(discoveredPlugin.configPath).toEqual(['core', 'config']);
expect(discoveredPlugin.requiredPlugins).toEqual(['a', 'b']);
expect(discoveredPlugin.optionalPlugins).toEqual(['c', 'd']);
}
mockFs(
{
[`${KIBANA_ROOT}/src/plugins/plugin_a`]: Plugins.invalid(),
[`${KIBANA_ROOT}/src/plugins/plugin_b`]: Plugins.incomplete(),
[`${KIBANA_ROOT}/src/plugins/plugin_c`]: Plugins.incompatible(),
[`${KIBANA_ROOT}/src/plugins/plugin_ad`]: Plugins.missingManifest(),
},
{ createCwd: false }
);
await expect(
error$
const plugins = await plugin$.pipe(toArray()).toPromise();
expect(plugins).toHaveLength(0);
const errors = await error$
.pipe(
map((error) => error.toString()),
toArray()
)
.toPromise()
).resolves.toEqual([
`Error: ENOENT (disappeared between "readdir" and "stat"). (invalid-plugin-path, ${resolve(
TEST_PLUGIN_SEARCH_PATHS.nonEmptySrcPlugins,
'9-inaccessible-dir'
)})`,
`Error: ENOENT (invalid-search-path, ${TEST_PLUGIN_SEARCH_PATHS.nonExistentKibanaExtra})`,
`Error: ENOENT (missing-manifest, ${resolve(
TEST_PLUGIN_SEARCH_PATHS.nonEmptySrcPlugins,
'2-no-manifest',
'kibana.json'
)})`,
`Error: Plugin manifest must contain an "id" property. (invalid-manifest, ${resolve(
TEST_PLUGIN_SEARCH_PATHS.nonEmptySrcPlugins,
'4-incomplete-manifest',
'kibana.json'
)})`,
`Error: Unexpected token o in JSON at position 1 (invalid-manifest, ${resolve(
TEST_PLUGIN_SEARCH_PATHS.nonEmptySrcPlugins,
'5-invalid-manifest',
'kibana.json'
)})`,
`Error: Plugin "plugin" is only compatible with Kibana version "1", but used Kibana version is "1.2.3". (incompatible-version, ${resolve(
TEST_PLUGIN_SEARCH_PATHS.nonEmptySrcPlugins,
'8-incompatible-manifest',
'kibana.json'
)})`,
]);
});
.toPromise();
test('logs a warning about --plugin-path when used in development', async () => {
mockPackage.raw = {
branch: 'master',
version: '1.2.3',
build: {
distributable: true,
number: 1,
sha: '',
},
};
const env = Env.createDefault(
getEnvOptions({
cliArgs: { dev: false, envName: 'development' },
})
);
const configService = new ConfigService(
rawConfigServiceMock.create({ rawConfig: { plugins: { paths: [TEST_EXTRA_PLUGIN_PATH] } } }),
env,
logger
);
await configService.setSchema(config.path, config.schema);
const rawConfig = await configService
.atPath<PluginsConfigType>('plugins')
.pipe(first())
.toPromise();
discover(new PluginsConfig(rawConfig, env), {
coreId: Symbol(),
configService,
env,
logger,
expect(errors).toEqual(
expect.arrayContaining([
`Error: Unexpected token o in JSON at position 1 (invalid-manifest, ${manifestPath(
'plugin_a'
)})`,
`Error: Plugin manifest must contain an "id" property. (invalid-manifest, ${manifestPath(
'plugin_b'
)})`,
`Error: Plugin "plugin" is only compatible with Kibana version "1", but used Kibana version is "1.2.3". (incompatible-version, ${manifestPath(
'plugin_c'
)})`,
])
);
});
expect(loggingSystemMock.collect(logger).warn).toEqual([
[
`Explicit plugin paths [${TEST_EXTRA_PLUGIN_PATH}] should only be used in development. Relative imports may not work properly in production.`,
],
]);
});
it('return errors when the plugin search path is not accessible', async () => {
const { plugin$, error$ } = discover(new PluginsConfig(pluginConfig, env), coreContext);
test('does not log a warning about --plugin-path when used in production', async () => {
mockPackage.raw = {
branch: 'master',
version: '1.2.3',
build: {
distributable: true,
number: 1,
sha: '',
},
};
mockFs(
{
[`${KIBANA_ROOT}/src/plugins`]: mockFs.directory({
mode: 0, // 0000
items: {
plugin_a: Plugins.valid('pluginA'),
},
}),
},
{ createCwd: false }
);
const env = Env.createDefault(
getEnvOptions({
cliArgs: { dev: false, envName: 'production' },
})
);
const configService = new ConfigService(
rawConfigServiceMock.create({ rawConfig: { plugins: { paths: [TEST_EXTRA_PLUGIN_PATH] } } }),
env,
logger
);
await configService.setSchema(config.path, config.schema);
const plugins = await plugin$.pipe(toArray()).toPromise();
expect(plugins).toHaveLength(0);
const rawConfig = await configService
.atPath<PluginsConfigType>('plugins')
.pipe(first())
.toPromise();
const errors = await error$
.pipe(
map((error) => error.toString()),
toArray()
)
.toPromise();
discover(new PluginsConfig(rawConfig, env), {
coreId: Symbol(),
configService,
env,
logger,
const srcPluginsPath = resolve(KIBANA_ROOT, 'src', 'plugins');
const xpackPluginsPath = resolve(KIBANA_ROOT, 'x-pack', 'plugins');
expect(errors).toEqual(
expect.arrayContaining([
`Error: EACCES, permission denied '${srcPluginsPath}' (invalid-search-path, ${srcPluginsPath})`,
`Error: ENOENT, no such file or directory '${xpackPluginsPath}' (invalid-search-path, ${xpackPluginsPath})`,
])
);
});
expect(loggingSystemMock.collect(logger).warn).toEqual([]);
it('return an error when the manifest file is not accessible', async () => {
const { plugin$, error$ } = discover(new PluginsConfig(pluginConfig, env), coreContext);
mockFs(
{
[`${KIBANA_ROOT}/src/plugins/plugin_a`]: {
...Plugins.inaccessibleManifest(),
nested_plugin: Plugins.valid('nestedPlugin'),
},
},
{ createCwd: false }
);
const plugins = await plugin$.pipe(toArray()).toPromise();
expect(plugins).toHaveLength(0);
const errors = await error$
.pipe(
map((error) => error.toString()),
toArray()
)
.toPromise();
const errorPath = manifestPath('plugin_a');
expect(errors).toEqual(
expect.arrayContaining([
`Error: EACCES, permission denied '${errorPath}' (missing-manifest, ${errorPath})`,
])
);
});
it('discovers plugins in nested directories', async () => {
const { plugin$, error$ } = discover(new PluginsConfig(pluginConfig, env), coreContext);
mockFs(
{
[`${KIBANA_ROOT}/src/plugins/plugin_a`]: Plugins.valid('pluginA'),
[`${KIBANA_ROOT}/src/plugins/sub1/plugin_b`]: Plugins.valid('pluginB'),
[`${KIBANA_ROOT}/src/plugins/sub1/sub2/plugin_c`]: Plugins.valid('pluginC'),
[`${KIBANA_ROOT}/src/plugins/sub1/sub2/plugin_d`]: Plugins.incomplete(),
},
{ createCwd: false }
);
const plugins = await plugin$.pipe(toArray()).toPromise();
const pluginNames = plugins.map((plugin) => plugin.name);
expect(pluginNames).toHaveLength(3);
expect(pluginNames).toEqual(expect.arrayContaining(['pluginA', 'pluginB', 'pluginC']));
const errors = await error$
.pipe(
map((error) => error.toString()),
toArray()
)
.toPromise();
expect(errors).toEqual(
expect.arrayContaining([
`Error: Plugin manifest must contain an "id" property. (invalid-manifest, ${manifestPath(
'sub1',
'sub2',
'plugin_d'
)})`,
])
);
});
it('does not discover plugins nested inside another plugin', async () => {
const { plugin$ } = discover(new PluginsConfig(pluginConfig, env), coreContext);
mockFs(
{
[`${KIBANA_ROOT}/src/plugins/plugin_a`]: {
...Plugins.valid('pluginA'),
nested_plugin: Plugins.valid('nestedPlugin'),
},
},
{ createCwd: false }
);
const plugins = await plugin$.pipe(toArray()).toPromise();
const pluginNames = plugins.map((plugin) => plugin.name);
expect(pluginNames).toEqual(['pluginA']);
});
it('stops scanning when reaching `maxDepth`', async () => {
const { plugin$ } = discover(new PluginsConfig(pluginConfig, env), coreContext);
mockFs(
{
[`${KIBANA_ROOT}/src/plugins/sub1/plugin`]: Plugins.valid('plugin1'),
[`${KIBANA_ROOT}/src/plugins/sub1/sub2/plugin`]: Plugins.valid('plugin2'),
[`${KIBANA_ROOT}/src/plugins/sub1/sub2/sub3/plugin`]: Plugins.valid('plugin3'),
[`${KIBANA_ROOT}/src/plugins/sub1/sub2/sub3/sub4/plugin`]: Plugins.valid('plugin4'),
[`${KIBANA_ROOT}/src/plugins/sub1/sub2/sub3/sub4/sub5/plugin`]: Plugins.valid('plugin5'),
[`${KIBANA_ROOT}/src/plugins/sub1/sub2/sub3/sub4/sub5/sub6/plugin`]: Plugins.valid(
'plugin6'
),
},
{ createCwd: false }
);
const plugins = await plugin$.pipe(toArray()).toPromise();
const pluginNames = plugins.map((plugin) => plugin.name);
expect(pluginNames).toHaveLength(5);
expect(pluginNames).toEqual(
expect.arrayContaining(['plugin1', 'plugin2', 'plugin3', 'plugin4', 'plugin5'])
);
});
it('works with symlinks', async () => {
const { plugin$ } = discover(new PluginsConfig(pluginConfig, env), coreContext);
const pluginFolder = resolve(KIBANA_ROOT, '..', 'ext-plugins');
mockFs(
{
[`${KIBANA_ROOT}/plugins`]: mockFs.symlink({
path: '../ext-plugins',
}),
[pluginFolder]: {
plugin_a: Plugins.valid('pluginA'),
plugin_b: Plugins.valid('pluginB'),
},
},
{ createCwd: false }
);
const plugins = await plugin$.pipe(toArray()).toPromise();
const pluginNames = plugins.map((plugin) => plugin.name);
expect(pluginNames).toHaveLength(2);
expect(pluginNames).toEqual(expect.arrayContaining(['pluginA', 'pluginB']));
});
it('logs a warning about --plugin-path when used in development', async () => {
const extraPluginTestPath = resolve(process.cwd(), 'my-extra-plugin');
env = Env.createDefault(
getEnvOptions({
cliArgs: { dev: false, envName: 'development' },
})
);
discover(new PluginsConfig({ ...pluginConfig, paths: [extraPluginTestPath] }, env), {
coreId: Symbol(),
configService,
env,
logger,
});
expect(loggingSystemMock.collect(logger).warn).toEqual([
[
`Explicit plugin paths [${extraPluginTestPath}] should only be used in development. Relative imports may not work properly in production.`,
],
]);
});
test('does not log a warning about --plugin-path when used in production', async () => {
const extraPluginTestPath = resolve(process.cwd(), 'my-extra-plugin');
env = Env.createDefault(
getEnvOptions({
cliArgs: { dev: false, envName: 'production' },
})
);
discover(new PluginsConfig({ ...pluginConfig, paths: [extraPluginTestPath] }, env), {
coreId: Symbol(),
configService,
env,
logger,
});
expect(loggingSystemMock.collect(logger).warn).toEqual([]);
});
});

View file

@ -19,7 +19,7 @@
import { readdir, stat } from 'fs';
import { resolve } from 'path';
import { bindNodeCallback, from, merge } from 'rxjs';
import { bindNodeCallback, from, merge, Observable } from 'rxjs';
import { catchError, filter, map, mergeMap, shareReplay } from 'rxjs/operators';
import { CoreContext } from '../../core_context';
import { Logger } from '../../logging';
@ -32,6 +32,13 @@ import { parseManifest } from './plugin_manifest_parser';
const fsReadDir$ = bindNodeCallback<string, string[]>(readdir);
const fsStat$ = bindNodeCallback(stat);
const maxScanDepth = 5;
interface PluginSearchPathEntry {
dir: string;
depth: number;
}
/**
* Tries to discover all possible plugins based on the provided plugin config.
* Discovery result consists of two separate streams, the one (`plugin$`) is
@ -75,34 +82,96 @@ export function discover(config: PluginsConfig, coreContext: CoreContext) {
}
/**
* Iterates over every plugin search path and returns a merged stream of all
* sub-directories. If directory cannot be read or it's impossible to get stat
* Recursively iterates over every plugin search path and returns a merged stream of all
* sub-directories containing a manifest file. If directory cannot be read or it's impossible to get stat
* for any of the nested entries then error is added into the stream instead.
*
* @param pluginDirs List of the top-level directories to process.
* @param log Plugin discovery logger instance.
*/
function processPluginSearchPaths$(pluginDirs: readonly string[], log: Logger) {
return from(pluginDirs).pipe(
mergeMap((dir) => {
log.debug(`Scanning "${dir}" for plugin sub-directories...`);
function processPluginSearchPaths$(
pluginDirs: readonly string[],
log: Logger
): Observable<string | PluginDiscoveryError> {
function recursiveScanFolder(
ent: PluginSearchPathEntry
): Observable<string | PluginDiscoveryError> {
return from([ent]).pipe(
mergeMap((entry) => {
return findManifestInFolder(entry.dir, () => {
if (entry.depth > maxScanDepth) {
return [];
}
return mapSubdirectories(entry.dir, (subDir) =>
recursiveScanFolder({ dir: subDir, depth: entry.depth + 1 })
);
});
})
);
}
return fsReadDir$(dir).pipe(
mergeMap((subDirs: string[]) => subDirs.map((subDir) => resolve(dir, subDir))),
mergeMap((path) =>
fsStat$(path).pipe(
// Filter out non-directory entries from target directories, it's expected that
// these directories may contain files (e.g. `README.md` or `package.json`).
// We shouldn't silently ignore the entries we couldn't get stat for though.
mergeMap((pathStat) => (pathStat.isDirectory() ? [path] : [])),
catchError((err) => [PluginDiscoveryError.invalidPluginPath(path, err)])
)
),
catchError((err) => [PluginDiscoveryError.invalidSearchPath(dir, err)])
return from(pluginDirs.map((dir) => ({ dir, depth: 0 }))).pipe(
mergeMap((entry) => {
log.debug(`Scanning "${entry.dir}" for plugin sub-directories...`);
return fsReadDir$(entry.dir).pipe(
mergeMap(() => recursiveScanFolder(entry)),
catchError((err) => [PluginDiscoveryError.invalidSearchPath(entry.dir, err)])
);
})
);
}
/**
* Attempts to read manifest file in specified directory or calls `notFound` and returns results if not found. For any
* manifest files that cannot be read, a PluginDiscoveryError is added.
* @param dir
* @param notFound
*/
function findManifestInFolder(
dir: string,
notFound: () => never[] | Observable<string | PluginDiscoveryError>
): string[] | Observable<string | PluginDiscoveryError> {
return fsStat$(resolve(dir, 'kibana.json')).pipe(
mergeMap((stats) => {
// `kibana.json` exists in given directory, we got a plugin
if (stats.isFile()) {
return [dir];
}
return [];
}),
catchError((manifestStatError) => {
// did not find manifest. recursively process sub directories until we reach max depth.
if (manifestStatError.code !== 'ENOENT') {
return [PluginDiscoveryError.invalidPluginPath(dir, manifestStatError)];
}
return notFound();
})
);
}
/**
* Finds all subdirectories in `dir` and executed `mapFunc` for each one. For any directories that cannot be read,
* a PluginDiscoveryError is added.
* @param dir
* @param mapFunc
*/
function mapSubdirectories(
dir: string,
mapFunc: (subDir: string) => Observable<string | PluginDiscoveryError>
): Observable<string | PluginDiscoveryError> {
return fsReadDir$(dir).pipe(
mergeMap((subDirs: string[]) => subDirs.map((subDir) => resolve(dir, subDir))),
mergeMap((subDir) =>
fsStat$(subDir).pipe(
mergeMap((pathStat) => (pathStat.isDirectory() ? mapFunc(subDir) : [])),
catchError((subDirStatError) => [
PluginDiscoveryError.invalidPluginPath(subDir, subDirStatError),
])
)
)
);
}
/**
* Tries to load and parse the plugin manifest file located at the provided plugin
* directory path and produces an error result if it fails to do so or plugin manifest

View file

@ -5500,6 +5500,13 @@
resolved "https://registry.yarnpkg.com/@types/mocha/-/mocha-7.0.2.tgz#b17f16cf933597e10d6d78eae3251e692ce8b0ce"
integrity sha512-ZvO2tAcjmMi8V/5Z3JsyofMe3hasRcaw88cto5etSVMwVQfeivGAlEYmaQgceUSVYFofVjT+ioHsATjdWcFt1w==
"@types/mock-fs@^4.10.0":
version "4.10.0"
resolved "https://registry.yarnpkg.com/@types/mock-fs/-/mock-fs-4.10.0.tgz#460061b186993d76856f669d5317cda8a007c24b"
integrity sha512-FQ5alSzmHMmliqcL36JqIA4Yyn9jyJKvRSGV3mvPh108VFatX7naJDzSG4fnFQNZFq9dIx0Dzoe6ddflMB2Xkg==
dependencies:
"@types/node" "*"
"@types/moment-timezone@^0.5.12":
version "0.5.12"
resolved "https://registry.yarnpkg.com/@types/moment-timezone/-/moment-timezone-0.5.12.tgz#0fb680c03db194fe8ff4551eaeb1eec8d3d80e9f"
@ -22095,6 +22102,11 @@ mochawesome@^4.1.0:
strip-ansi "^5.0.0"
uuid "^3.3.2"
mock-fs@^4.12.0:
version "4.12.0"
resolved "https://registry.yarnpkg.com/mock-fs/-/mock-fs-4.12.0.tgz#a5d50b12d2d75e5bec9dac3b67ffe3c41d31ade4"
integrity sha512-/P/HtrlvBxY4o/PzXY9cCNBrdylDNxg7gnrv2sMNxj+UJ2m8jSpl0/A6fuJeNAWr99ZvGWH8XCbE0vmnM5KupQ==
mock-http-server@1.3.0:
version "1.3.0"
resolved "https://registry.yarnpkg.com/mock-http-server/-/mock-http-server-1.3.0.tgz#d2c2ffe65f77d3a4da8302c91d3bf687e5b51519"