[7.x] [kbn/es-archiver] move to a package (#72318) (#72871)

Co-authored-by: spalger <spalger@users.noreply.github.com>
Co-authored-by: Elastic Machine <elasticmachine@users.noreply.github.com>
This commit is contained in:
Spencer 2020-07-22 11:29:11 -07:00 committed by GitHub
parent 3d8b3d2cad
commit e98ba6c705
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
57 changed files with 334 additions and 252 deletions

View file

@ -295,6 +295,7 @@
"@elastic/makelogs": "^6.0.0",
"@kbn/dev-utils": "1.0.0",
"@kbn/es": "1.0.0",
"@kbn/es-archiver": "1.0.0",
"@kbn/eslint-import-resolver-kibana": "2.0.0",
"@kbn/eslint-plugin-eslint": "1.0.0",
"@kbn/expect": "1.0.0",

View file

@ -61,9 +61,7 @@ it('extends the context using extendContext()', async () => {
expect(context.flags).toMatchInlineSnapshot(`
Object {
"_": Array [
"foo",
],
"_": Array [],
"debug": false,
"help": false,
"quiet": false,

View file

@ -91,6 +91,13 @@ export class RunWithCommands<T> {
const commandFlagOptions = mergeFlagOptions(this.options.globalFlags, command.flags);
const commandFlags = getFlags(process.argv.slice(2), commandFlagOptions);
// strip command name plus "help" if we're actually executing the fake "help" command
if (isHelpCommand) {
commandFlags._.splice(0, 2);
} else {
commandFlags._.splice(0, 1);
}
const commandHelp = getCommandLevelHelp({
usage: this.options.usage,
globalFlagHelp: this.options.globalFlags?.help,
@ -115,7 +122,7 @@ export class RunWithCommands<T> {
log,
flags: commandFlags,
procRunner,
addCleanupTask: cleanup.add,
addCleanupTask: cleanup.add.bind(cleanup),
};
const extendedContext = {

View file

@ -0,0 +1,17 @@
{
"name": "@kbn/es-archiver",
"version": "1.0.0",
"license": "Apache-2.0",
"main": "target/index.js",
"scripts": {
"kbn:bootstrap": "tsc",
"kbn:watch": "tsc --watch"
},
"dependencies": {
"@kbn/dev-utils": "1.0.0",
"elasticsearch": "^16.7.0"
},
"devDependencies": {
"@types/elasticsearch": "^5.0.33"
}
}

View file

@ -24,7 +24,7 @@ import { promisify } from 'util';
import globby from 'globby';
import { ToolingLog } from '@kbn/dev-utils';
import { createPromiseFromStreams } from '../../legacy/utils';
import { createPromiseFromStreams } from '../lib/streams';
const unlinkAsync = promisify(Fs.unlink);

View file

@ -23,7 +23,7 @@ import { Readable } from 'stream';
import { ToolingLog, KbnClient } from '@kbn/dev-utils';
import { Client } from 'elasticsearch';
import { createPromiseFromStreams, concatStreamProviders } from '../../legacy/utils';
import { createPromiseFromStreams, concatStreamProviders } from '../lib/streams';
import {
isGzip,

View file

@ -23,7 +23,7 @@ import { Readable, Writable } from 'stream';
import { fromNode } from 'bluebird';
import { ToolingLog } from '@kbn/dev-utils';
import { createPromiseFromStreams } from '../../legacy/utils';
import { createPromiseFromStreams } from '../lib/streams';
import {
prioritizeMappings,
readDirectory,

View file

@ -23,7 +23,7 @@ import { Readable, Writable } from 'stream';
import { Client } from 'elasticsearch';
import { ToolingLog } from '@kbn/dev-utils';
import { createListStream, createPromiseFromStreams } from '../../legacy/utils';
import { createListStream, createPromiseFromStreams } from '../lib/streams';
import {
createStats,
createGenerateIndexRecordsStream,

View file

@ -23,7 +23,7 @@ import { Readable, Writable } from 'stream';
import { Client } from 'elasticsearch';
import { ToolingLog, KbnClient } from '@kbn/dev-utils';
import { createPromiseFromStreams } from '../../legacy/utils';
import { createPromiseFromStreams } from '../lib/streams';
import {
isGzip,
createStats,

View file

@ -0,0 +1,244 @@
/*
* Licensed to Elasticsearch B.V. under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch B.V. licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
/** ***********************************************************
*
* Run `node scripts/es_archiver --help` for usage information
*
*************************************************************/
import Path from 'path';
import Url from 'url';
import readline from 'readline';
import { RunWithCommands, createFlagError } from '@kbn/dev-utils';
import { readConfigFile } from '@kbn/test';
import legacyElasticsearch from 'elasticsearch';
import { EsArchiver } from './es_archiver';
const resolveConfigPath = (v: string) => Path.resolve(process.cwd(), v);
const defaultConfigPath = resolveConfigPath('test/functional/config.js');
export function runCli() {
new RunWithCommands({
description: 'CLI to manage archiving/restoring data in elasticsearch',
globalFlags: {
string: ['es-url', 'kibana-url', 'dir', 'config'],
help: `
--config path to an FTR config file that sets --es-url, --kibana-url, and --dir
default: ${defaultConfigPath}
--es-url url for Elasticsearch, prefer the --config flag
--kibana-url url for Kibana, prefer the --config flag
--dir where arechives are stored, prefer the --config flag
`,
},
async extendContext({ log, flags, addCleanupTask }) {
const configPath = flags.config || defaultConfigPath;
if (typeof configPath !== 'string') {
throw createFlagError('--config must be a string');
}
const config = await readConfigFile(log, Path.resolve(configPath));
let esUrl = flags['es-url'];
if (esUrl && typeof esUrl !== 'string') {
throw createFlagError('--es-url must be a string');
}
if (!esUrl && config) {
esUrl = Url.format(config.get('servers.elasticsearch'));
}
if (!esUrl) {
throw createFlagError('--es-url or --config must be defined');
}
let kibanaUrl = flags['kibana-url'];
if (kibanaUrl && typeof kibanaUrl !== 'string') {
throw createFlagError('--kibana-url must be a string');
}
if (!kibanaUrl && config) {
kibanaUrl = Url.format(config.get('servers.kibana'));
}
if (!kibanaUrl) {
throw createFlagError('--kibana-url or --config must be defined');
}
let dir = flags.dir;
if (dir && typeof dir !== 'string') {
throw createFlagError('--dir must be a string');
}
if (!dir && config) {
dir = Path.resolve(config.get('esArchiver.directory'));
}
if (!dir) {
throw createFlagError('--dir or --config must be defined');
}
const client = new legacyElasticsearch.Client({
host: esUrl,
log: flags.verbose ? 'trace' : [],
});
addCleanupTask(() => client.close());
const esArchiver = new EsArchiver({
log,
client,
dataDir: dir,
kibanaUrl,
});
return {
esArchiver,
};
},
})
.command({
name: 'save',
usage: 'save [name] [...indices]',
description: `
archive the [indices ...] into the --dir with [name]
Example:
Save all [logstash-*] indices from http://localhost:9200 to [snapshots/my_test_data] directory
WARNING: If the [my_test_data] snapshot exists it will be deleted!
$ node scripts/es_archiver save my_test_data logstash-* --dir snapshots
`,
flags: {
boolean: ['raw'],
help: `
--raw don't gzip the archives
`,
},
async run({ flags, esArchiver }) {
const [name, ...indices] = flags._;
if (!name) {
throw createFlagError('missing [name] argument');
}
if (!indices.length) {
throw createFlagError('missing [...indices] arguments');
}
const raw = flags.raw;
if (typeof raw !== 'boolean') {
throw createFlagError('--raw does not take a value');
}
await esArchiver.save(name, indices, { raw });
},
})
.command({
name: 'load',
usage: 'load [name]',
description: `
load the archive in --dir with [name]
Example:
Load the [my_test_data] snapshot from the archive directory and elasticsearch instance defined
in the [test/functional/config.js] config file
WARNING: If the indices exist already they will be deleted!
$ node scripts/es_archiver load my_test_data --config test/functional/config.js
`,
flags: {
boolean: ['use-create'],
help: `
--use-create use create instead of index for loading documents
`,
},
async run({ flags, esArchiver }) {
const [name] = flags._;
if (!name) {
throw createFlagError('missing [name] argument');
}
if (flags._.length > 1) {
throw createFlagError(`unknown extra arguments: [${flags._.slice(1).join(', ')}]`);
}
const useCreate = flags['use-create'];
if (typeof useCreate !== 'boolean') {
throw createFlagError('--use-create does not take a value');
}
await esArchiver.load(name, { useCreate });
},
})
.command({
name: 'unload',
usage: 'unload [name]',
description: 'remove indices created by the archive in --dir with [name]',
async run({ flags, esArchiver }) {
const [name] = flags._;
if (!name) {
throw createFlagError('missing [name] argument');
}
if (flags._.length > 1) {
throw createFlagError(`unknown extra arguments: [${flags._.slice(1).join(', ')}]`);
}
await esArchiver.unload(name);
},
})
.command({
name: 'edit',
usage: 'edit [prefix]',
description:
'extract the archives under the prefix, wait for edits to be completed, and then recompress the archives',
async run({ flags, esArchiver }) {
const [prefix] = flags._;
if (!prefix) {
throw createFlagError('missing [prefix] argument');
}
if (flags._.length > 1) {
throw createFlagError(`unknown extra arguments: [${flags._.slice(1).join(', ')}]`);
}
await esArchiver.edit(prefix, async () => {
const rl = readline.createInterface({
input: process.stdin,
output: process.stdout,
});
await new Promise((resolveInput) => {
rl.question(`Press enter when you're done`, () => {
rl.close();
resolveInput();
});
});
});
},
})
.command({
name: 'empty-kibana-index',
description:
'[internal] Delete any Kibana indices, and initialize the Kibana index as Kibana would do on startup.',
async run({ esArchiver }) {
await esArchiver.emptyKibanaIndex();
},
})
.command({
name: 'rebuild-all',
description: '[internal] read and write all archives in --dir to remove any inconsistencies',
async run({ esArchiver }) {
await esArchiver.rebuildAll();
},
})
.execute();
}

View file

@ -18,3 +18,4 @@
*/
export { EsArchiver } from './es_archiver';
export * from './cli';

View file

@ -22,11 +22,7 @@ import { createGunzip } from 'zlib';
import expect from '@kbn/expect';
import {
createListStream,
createPromiseFromStreams,
createConcatStream,
} from '../../../../legacy/utils';
import { createListStream, createPromiseFromStreams, createConcatStream } from '../../streams';
import { createFormatArchiveStreams } from '../format';

View file

@ -22,11 +22,7 @@ import { createGzip } from 'zlib';
import expect from '@kbn/expect';
import {
createConcatStream,
createListStream,
createPromiseFromStreams,
} from '../../../../legacy/utils';
import { createConcatStream, createListStream, createPromiseFromStreams } from '../../streams';
import { createParseArchiveStreams } from '../parse';
@ -109,7 +105,7 @@ describe('esArchiver createParseArchiveStreams', () => {
Buffer.from('{"a": 2}\n\n'),
]),
...createParseArchiveStreams({ gzip: false }),
createConcatStream(),
createConcatStream([]),
] as [Readable, ...Writable[]]);
throw new Error('should have failed');
} catch (err) {
@ -172,7 +168,7 @@ describe('esArchiver createParseArchiveStreams', () => {
await createPromiseFromStreams([
createListStream([Buffer.from('{"a": 1}')]),
...createParseArchiveStreams({ gzip: true }),
createConcatStream(),
createConcatStream([]),
] as [Readable, ...Writable[]]);
throw new Error('should have failed');
} catch (err) {

View file

@ -21,7 +21,7 @@ import { createGzip, Z_BEST_COMPRESSION } from 'zlib';
import { PassThrough } from 'stream';
import stringify from 'json-stable-stringify';
import { createMapStream, createIntersperseStream } from '../../../legacy/utils';
import { createMapStream, createIntersperseStream } from '../streams';
import { RECORD_SEPARATOR } from './constants';
export function createFormatArchiveStreams({ gzip = false }: { gzip?: boolean } = {}) {

View file

@ -19,8 +19,12 @@
import { createGunzip } from 'zlib';
import { PassThrough } from 'stream';
import { createFilterStream } from '../../../legacy/utils/streams/filter_stream';
import { createSplitStream, createReplaceStream, createMapStream } from '../../../legacy/utils';
import {
createFilterStream,
createSplitStream,
createReplaceStream,
createMapStream,
} from '../streams';
import { RECORD_SEPARATOR } from './constants';

View file

@ -21,11 +21,7 @@ import sinon from 'sinon';
import expect from '@kbn/expect';
import { delay } from 'bluebird';
import {
createListStream,
createPromiseFromStreams,
createConcatStream,
} from '../../../../legacy/utils';
import { createListStream, createPromiseFromStreams, createConcatStream } from '../../streams';
import { createGenerateDocRecordsStream } from '../generate_doc_records_stream';
import { Progress } from '../../progress';

View file

@ -20,7 +20,7 @@
import expect from '@kbn/expect';
import { delay } from 'bluebird';
import { createListStream, createPromiseFromStreams } from '../../../../legacy/utils';
import { createListStream, createPromiseFromStreams } from '../../streams';
import { Progress } from '../../progress';
import { createIndexDocRecordsStream } from '../index_doc_records_stream';

View file

@ -21,11 +21,7 @@ import expect from '@kbn/expect';
import sinon from 'sinon';
import Chance from 'chance';
import {
createPromiseFromStreams,
createConcatStream,
createListStream,
} from '../../../../legacy/utils';
import { createPromiseFromStreams, createConcatStream, createListStream } from '../../streams';
import { createCreateIndexStream } from '../create_index_stream';

View file

@ -19,7 +19,7 @@
import sinon from 'sinon';
import { createListStream, createPromiseFromStreams } from '../../../../legacy/utils';
import { createListStream, createPromiseFromStreams } from '../../streams';
import { createDeleteIndexStream } from '../delete_index_stream';

View file

@ -20,11 +20,7 @@
import sinon from 'sinon';
import expect from '@kbn/expect';
import {
createListStream,
createPromiseFromStreams,
createConcatStream,
} from '../../../../legacy/utils';
import { createListStream, createPromiseFromStreams, createConcatStream } from '../../streams';
import { createStubClient, createStubStats } from './stubs';

View file

@ -75,7 +75,7 @@ export async function migrateKibanaIndex({
},
} as any);
return await kbnClient.savedObjects.migrate();
await kbnClient.savedObjects.migrate();
}
/**

View file

@ -20,11 +20,7 @@
import Chance from 'chance';
import expect from '@kbn/expect';
import {
createListStream,
createPromiseFromStreams,
createConcatStream,
} from '../../../../legacy/utils';
import { createListStream, createPromiseFromStreams, createConcatStream } from '../../streams';
import { createFilterRecordsStream } from '../filter_records_stream';

View file

@ -0,0 +1,20 @@
/*
* Licensed to Elasticsearch B.V. under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch B.V. licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
export * from '../../../../src/legacy/utils/streams';

View file

@ -0,0 +1,12 @@
{
"extends": "../../tsconfig.json",
"compilerOptions": {
"outDir": "./target",
"declaration": true,
"sourceMap": true,
"target": "ES2019"
},
"include": [
"src/**/*"
]
}

View file

@ -0,0 +1 @@
../../yarn.lock

View file

@ -18,6 +18,6 @@
*/
export { FunctionalTestRunner } from './functional_test_runner';
export { readConfigFile } from './lib';
export { readConfigFile, Config } from './lib';
export { runFtrCli } from './cli';
export * from './lib/docker_servers';

View file

@ -18,4 +18,4 @@
*/
require('../src/setup_node_env');
require('../src/es_archiver/cli');
require('@kbn/es-archiver').runCli();

View file

@ -37,7 +37,6 @@ export const CopySourceTask = {
'!src/legacy/core_plugins/console/public/tests/**',
'!src/cli/cluster/**',
'!src/cli/repl/**',
'!src/es_archiver/**',
'!src/functional_test_runner/**',
'!src/dev/**',
'typings/**',

View file

@ -1,183 +0,0 @@
/*
* Licensed to Elasticsearch B.V. under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch B.V. licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
/** ***********************************************************
*
* Run `node scripts/es_archiver --help` for usage information
*
*************************************************************/
import { resolve } from 'path';
import { readFileSync } from 'fs';
import { format as formatUrl } from 'url';
import readline from 'readline';
import { Command } from 'commander';
import * as legacyElasticsearch from 'elasticsearch';
import { ToolingLog } from '@kbn/dev-utils';
import { readConfigFile } from '@kbn/test';
import { EsArchiver } from './es_archiver';
const cmd = new Command('node scripts/es_archiver');
const resolveConfigPath = (v: string) => resolve(process.cwd(), v);
const defaultConfigPath = resolveConfigPath('test/functional/config.js');
cmd
.description(`CLI to manage archiving/restoring data in elasticsearch`)
.option('--es-url [url]', 'url for elasticsearch')
.option(
'--kibana-url [url]',
'url for kibana (only necessary if using "load" or "unload" methods)'
)
.option(`--dir [path]`, 'where archives are stored')
.option('--verbose', 'turn on verbose logging')
.option(
'--config [path]',
'path to a functional test config file to use for default values',
resolveConfigPath,
defaultConfigPath
)
.on('--help', () => {
// eslint-disable-next-line no-console
console.log(readFileSync(resolve(__dirname, './cli_help.txt'), 'utf8'));
});
cmd
.option('--raw', `don't gzip the archive`)
.command('save <name> <indices...>')
.description('archive the <indices ...> into the --dir with <name>')
.action((name, indices) => execute((archiver, { raw }) => archiver.save(name, indices, { raw })));
cmd
.option('--use-create', 'use create instead of index for loading documents')
.command('load <name>')
.description('load the archive in --dir with <name>')
.action((name) => execute((archiver, { useCreate }) => archiver.load(name, { useCreate })));
cmd
.command('unload <name>')
.description('remove indices created by the archive in --dir with <name>')
.action((name) => execute((archiver) => archiver.unload(name)));
cmd
.command('empty-kibana-index')
.description(
'[internal] Delete any Kibana indices, and initialize the Kibana index as Kibana would do on startup.'
)
.action(() => execute((archiver) => archiver.emptyKibanaIndex()));
cmd
.command('edit [prefix]')
.description(
'extract the archives under the prefix, wait for edits to be completed, and then recompress the archives'
)
.action((prefix) =>
execute((archiver) =>
archiver.edit(prefix, async () => {
const rl = readline.createInterface({
input: process.stdin,
output: process.stdout,
});
await new Promise((resolveInput) => {
rl.question(`Press enter when you're done`, () => {
rl.close();
resolveInput();
});
});
})
)
);
cmd
.command('rebuild-all')
.description('[internal] read and write all archives in --dir to remove any inconsistencies')
.action(() => execute((archiver) => archiver.rebuildAll()));
cmd.parse(process.argv);
const missingCommand = cmd.args.every((a) => !((a as any) instanceof Command));
if (missingCommand) {
execute();
}
async function execute(fn?: (esArchiver: EsArchiver, command: Command) => void): Promise<void> {
try {
const log = new ToolingLog({
level: cmd.verbose ? 'debug' : 'info',
writeTo: process.stdout,
});
if (cmd.config) {
// load default values from the specified config file
const config = await readConfigFile(log, resolve(cmd.config));
if (!cmd.esUrl) cmd.esUrl = formatUrl(config.get('servers.elasticsearch'));
if (!cmd.kibanaUrl) cmd.kibanaUrl = formatUrl(config.get('servers.kibana'));
if (!cmd.dir) cmd.dir = config.get('esArchiver.directory');
}
// log and count all validation errors
let errorCount = 0;
const error = (msg: string) => {
errorCount++;
log.error(msg);
};
if (!fn) {
error(`Unknown command "${cmd.args[0]}"`);
}
if (!cmd.esUrl) {
error('You must specify either --es-url or --config flags');
}
if (!cmd.dir) {
error('You must specify either --dir or --config flags');
}
// if there was a validation error display the help
if (errorCount) {
cmd.help();
return;
}
// run!
const client = new legacyElasticsearch.Client({
host: cmd.esUrl,
log: cmd.verbose ? 'trace' : [],
});
try {
const esArchiver = new EsArchiver({
log,
client,
dataDir: resolve(cmd.dir),
kibanaUrl: cmd.kibanaUrl,
});
await fn!(esArchiver, cmd);
} finally {
await client.close();
}
} catch (err) {
// eslint-disable-next-line no-console
console.log('FATAL ERROR', err.stack);
}
}

View file

@ -1,15 +0,0 @@
Examples:
Dump an index to disk:
Save all `logstash-*` indices from http://localhost:9200 to `snapshots/my_test_data` directory
WARNING: If the `my_test_data` snapshot exists it will be deleted!
$ node scripts/es_archiver save my_test_data logstash-* --dir snapshots
Load an index from disk
Load the `my_test_data` snapshot from the archive directory and elasticsearch instance defined
in the `test/functional/config.js` config file
WARNING: If the indices exist already they will be deleted!
$ node scripts/es_archiver load my_test_data --config test/functional/config.js

View file

@ -18,9 +18,9 @@
*/
import { format as formatUrl } from 'url';
import { EsArchiver } from '@kbn/es-archiver';
import { FtrProviderContext } from '../ftr_provider_context';
import { EsArchiver } from '../../../src/es_archiver';
// @ts-ignore not TS yet
import * as KibanaServer from './kibana_server';

View file

@ -5,7 +5,7 @@
*/
import expect from '@kbn/expect';
import { EsArchiver } from 'src/es_archiver';
import { EsArchiver } from '@kbn/es-archiver';
import { AppSearchService, IEngine } from '../../../../services/app_search_service';
import { Browser } from '../../../../../../../test/functional/services/common';
import { FtrProviderContext } from '../../../../ftr_provider_context';

View file

@ -6,7 +6,7 @@
import expect from '@kbn/expect';
import { SuperTest } from 'supertest';
import { EsArchiver } from 'src/es_archiver';
import { EsArchiver } from '@kbn/es-archiver';
import { DEFAULT_SPACE_ID } from '../../../../plugins/spaces/common/constants';
import { CopyResponse } from '../../../../plugins/spaces/server/lib/copy_to_spaces';
import { getUrlPrefix } from '../lib/space_test_utils';

View file

@ -6,7 +6,7 @@
import expect from '@kbn/expect';
import { SuperTest } from 'supertest';
import { EsArchiver } from 'src/es_archiver';
import { EsArchiver } from '@kbn/es-archiver';
import { SavedObject } from 'src/core/server';
import { DEFAULT_SPACE_ID } from '../../../../plugins/spaces/common/constants';
import { CopyResponse } from '../../../../plugins/spaces/server/lib/copy_to_spaces';