diff --git a/package-lock.json b/package-lock.json index a36b7bec9..fda7e2655 100644 --- a/package-lock.json +++ b/package-lock.json @@ -7115,8 +7115,7 @@ "nice-try": { "version": "1.0.5", "resolved": "https://registry.npmjs.org/nice-try/-/nice-try-1.0.5.tgz", - "integrity": "sha512-1nh45deeb5olNY7eX82BkPO7SSxR5SSYJiPTrTdFUVYwAl8CKMA5N9PjTYkHiRjisVcxcQ1HXdLhx2qxxJzLNQ==", - "dev": true + "integrity": "sha512-1nh45deeb5olNY7eX82BkPO7SSxR5SSYJiPTrTdFUVYwAl8CKMA5N9PjTYkHiRjisVcxcQ1HXdLhx2qxxJzLNQ==" }, "node-abi": { "version": "2.30.1", diff --git a/package.json b/package.json index 713f79451..93cd0fd22 100644 --- a/package.json +++ b/package.json @@ -91,6 +91,7 @@ "jose": "^4.3.6", "lexicographic-integer": "^1.1.0", "multiformats": "^9.4.8", + "nexpect": "^0.6.0", "node-forge": "^0.10.0", "pako": "^1.0.11", "prompts": "^2.4.1", @@ -106,6 +107,7 @@ "@types/cross-spawn": "^6.0.2", "@types/google-protobuf": "^3.7.4", "@types/jest": "^26.0.20", + "@types/level": "^6.0.0", "@types/nexpect": "^0.4.31", "@types/node": "^14.14.35", "@types/node-forge": "^0.9.7", diff --git a/src/bin/secrets/env.ts b/src/bin/secrets/env.ts new file mode 100644 index 000000000..81ea34e03 --- /dev/null +++ b/src/bin/secrets/env.ts @@ -0,0 +1,147 @@ +import path from 'path'; +import Logger, { LogLevel, StreamHandler } from '@matrixai/logger'; +import { utils as clientUtils } from '../../client'; +import * as vaultsPB from '../../proto/js/polykey/v1/vaults/vaults_pb'; +import * as secretsPB from '../../proto/js/polykey/v1/secrets/secrets_pb'; +import PolykeyClient from '../../PolykeyClient'; +import * as utils from '../../utils'; +import * as binUtils from '../utils'; +import * as CLIErrors from '../errors'; +import * as grpcErrors from '../../grpc/errors'; + +const env = binUtils.createCommand('env', { + description: 'Runs a modified environment with injected secrets', + nodePath: true, + verbose: true, + format: true, +}); +// env.option( +// '--command ', +// 'In the environment of the derivation, run the shell command cmd in an interactive shell (Use --run to use a non-interactive shell instead)', +// ); +// env.option( +// '--run ', +// 'In the environment of the derivation, run the shell command cmd in a non-interactive shell, meaning (among other things) that if you hit Ctrl-C while the command is running, the shell exits (Use --command to use an interactive shell instead)', +// ); +env.option( + '-e, --export', + 'Export all variables', +); +env.arguments( + "Secrets to inject into env, of the format ':[=]', you can also control what the environment variable will be called using '[]' (defaults to upper, snake case of the original secret name)", +); +env.action(async (options, command) => { + const clientConfig = {}; + clientConfig['logger'] = new Logger('CLI Logger', LogLevel.WARN, [ + new StreamHandler(), + ]); + if (options.verbose) { + clientConfig['logger'].setLevel(LogLevel.DEBUG); + } + clientConfig['nodePath'] = options.nodePath + ? options.nodePath + : utils.getDefaultNodePath(); + + let shellCommand; + const client = await PolykeyClient.createPolykeyClient(clientConfig); + const directoryMessage = new secretsPB.Directory(); + const vaultMessage = new vaultsPB.Vault(); + directoryMessage.setVault(vaultMessage); + const secretPathList: string[] = Array.from(command.args.values()); + if(!binUtils.pathRegex.test(secretPathList[secretPathList.length - 1])) { + shellCommand = secretPathList.pop(); + } + const data: string[] = []; + + try { + const secretPathList: string[] = Array.from(command.args.values()); + + if(!binUtils.pathRegex.test(secretPathList[secretPathList.length - 1])) { + shellCommand = secretPathList.pop(); + } + + if (secretPathList.length < 1) { + throw new CLIErrors.ErrorSecretsUndefined(); + } + + const secretEnv = { ...process.env }; + + await client.start({}); + const grpcClient = client.grpcClient; + + let output = ''; + + for (const secretPath of secretPathList) { + if (secretPath.includes(':')) { + if (options.export) { + output = 'export '; + } + + const [, vaultName, secretExpression, variableName] = secretPath.match( + binUtils.pathRegex, + )!; + + vaultMessage.setNameOrId(vaultName); + directoryMessage.setSecretDirectory(secretExpression) + + const secretGenerator = grpcClient.vaultsSecretsEnv(directoryMessage); + const { p, resolveP } = utils.promise(); + secretGenerator.stream.on('metadata', async (meta) => { + await clientUtils.refreshSession(meta, client.session); + resolveP(null); + }); + + + for await (const secret of secretGenerator) { + const secretName = secret.getSecretName(); + const secretContent = Buffer.from(secret.getSecretContent()); + const secretVariableName = variableName !== '' ? variableName: path.basename(secretName.toUpperCase().replace('-', '_')); + secretEnv[secretVariableName] = secretContent.toString(); + data.push(output + `${secretVariableName}=${secretContent.toString()}`); + } + output = ''; + } else if (secretPath === '-e' || secretPath === '--export') { + output = 'export '; + } else { + throw new CLIErrors.ErrorSecretPathFormat(); + } + } + + if(shellCommand) { + binUtils.spawnShell(shellCommand, secretEnv, options.format); + } else { + process.stdout.write( + binUtils.outputFormatter({ + type: options.format === 'json' ? 'json' : 'list', + data: data, + }), + ); + } + + } catch (err) { + if (err instanceof grpcErrors.ErrorGRPCClientTimeout) { + process.stderr.write(`${err.message}\n`); + } + if (err instanceof grpcErrors.ErrorGRPCServerNotStarted) { + process.stderr.write(`${err.message}\n`); + } else { + process.stderr.write( + binUtils.outputFormatter({ + type: 'error', + description: err.description, + message: err.message, + }), + ); + throw err; + } + } finally { + await client.stop(); + options.nodePath = undefined; + options.verbose = undefined; + options.format = undefined; + options.command = undefined; + options.run = undefined; + } +}); + +export default env; diff --git a/src/bin/utils.ts b/src/bin/utils.ts index 86a0df17b..f645bfdeb 100644 --- a/src/bin/utils.ts +++ b/src/bin/utils.ts @@ -4,6 +4,10 @@ import os from 'os'; import process from 'process'; import { LogLevel } from '@matrixai/logger'; import prompts from 'prompts'; +import { spawn } from 'cross-spawn'; +import commander from 'commander'; +import Logger, { LogLevel } from '@matrixai/logger'; + import * as grpc from '@grpc/grpc-js'; import * as clientUtils from '../client/utils'; import * as clientErrors from '../client/errors'; @@ -153,11 +157,101 @@ async function retryAuth( } } } +// Async function requestPassword(keyManager: KeyManager, attempts: number = 3) { +// let i = 0; +// let correct = false; +// while (i < attempts) { +// const response = await prompts({ +// type: 'text', +// name: 'password', +// message: 'Please enter your password', +// }); +// try { +// clientUtils.checkPassword(response.password, keyManager); +// correct = true; +// } catch (err) { +// if (err instanceof clientErrors.ErrorPassword) { +// if (attempts == 2) { +// throw new clientErrors.ErrorPassword(); +// } +// i++; +// } +// } +// if (correct) { +// break; +// } +// } +// return; +// } +function spawnShell(command: string, environmentVariables: POJO, format: string): void { + // This code is what this function should look like after the kexec package is added + // try { + // kexec(command, { + // stdio: 'inherit', + // env: environmentVariables, + // shell: true, + // }); + // } catch (err) { + // if ( + // err.code !== "MODULE_NOT_FOUND" && + // err.code !== "UNDECLARED_DEPENDENCY" + // ) { + // throw err; + // } + + // const shell = spawn(command, { + // stdio: 'inherit', + // env: environmentVariables, + // shell: true, + // }); + // shell.on("exit", (code: number, signal: NodeJS.Signals) => { + // process.on("exit", () => { + // if (signal) { + // process.kill(process.pid, signal); + // } else { + // process.exitCode = code; + // } + // }); + // }); + // process.on("SIGINT", () => { + // shell.kill("SIGINT") + // }); + // shell.on('close', (code) => { + // if (code != 0) { + // process.stdout.write( + // outputFormatter({ + // type: format === 'json' ? 'json' : 'list', + // data: [`Terminated with ${code}`], + // }), + // ); + // } + // }); + // } + const shell = spawn(command, { + stdio: 'inherit', + env: environmentVariables, + shell: true, + }); + + shell.on('close', (code) => { + if (code != 0) { + process.stdout.write( + outputFormatter({ + type: format === 'json' ? 'json' : 'list', + data: [`Terminated with ${code}`], + }), + ); + } + }); } export { getDefaultNodePath, verboseToLogLevel, + createCommand, + promisifyGrpc, + outputFormatter, + spawnShell, OutputObject, outputFormatter, requestPassword, diff --git a/src/client/GRPCClientClient.ts b/src/client/GRPCClientClient.ts index 9beec1423..a05853554 100644 --- a/src/client/GRPCClientClient.ts +++ b/src/client/GRPCClientClient.ts @@ -293,7 +293,15 @@ class GRPCClientClient extends GRPCClient { )(...args); } - @ready(new clientErrors.ErrorClientClientDestroyed()) + @ready(new grpcErrors.ErrorGRPCClientNotStarted()) + public vaultsSecretsEnv(...args) { + return grpcUtils.promisifyReadableStreamCall( + this.client, + this.client.vaultsSecretsEnv, + )(...args); + } + + @ready(new grpcErrors.ErrorGRPCClientNotStarted()) public keysKeyPairRoot(...args) { return grpcUtils.promisifyUnaryCall( this.client, diff --git a/src/client/rpcVaults.ts b/src/client/rpcVaults.ts index f85cb9a37..14059c69e 100644 --- a/src/client/rpcVaults.ts +++ b/src/client/rpcVaults.ts @@ -639,6 +639,73 @@ const createVaultRPC = ({ await genWritable.throw(err); } }, + secretsEnv: async ( + call: grpc.ServerWritableStream< + secretsPB.Directory, + secretsPB.Directory + >, + ): Promise => { + const genWritable = grpcUtils.generatorWritable(call); + try { + await sessionManager.verifyToken(utils.getToken(call.metadata)); + const responseMeta = utils.createMetaTokenResponse( + await sessionManager.generateToken(), + ); + call.sendMetadata(responseMeta); + //Getting the vault. + const directoryMessage = call.request; + const vaultMessage = directoryMessage.getVault(); + if (vaultMessage == null) { + await genWritable.throw({ code: grpc.status.NOT_FOUND }); + return; + } + const vaultId = await utils.parseVaultInput(vaultMessage, vaultManager); + const pattern = directoryMessage.getSecretDirectory(); + const res = await vaultManager.glob(vaultId, pattern); + const dirMessage = new secretsPB.Directory(); + for (const file in res) { + dirMessage.setSecretDirectory(file); + await genWritable.next(dirMessage); + } + await genWritable.next(null); + } catch (err) { + await genWritable.throw(err); + } + }, + vaultsSecretsEnv: async ( + call: grpc.ServerWritableStream, + ): Promise => { + const genWritable = grpcUtils.generatorWritable(call); + + try { + await sessionManager.verifyToken(utils.getToken(call.metadata)); + const responseMeta = utils.createMetaTokenResponse( + await sessionManager.generateToken(), + ); + call.sendMetadata(responseMeta); + const directoryMessage = call.request; + const vaultMessage = directoryMessage.getVault(); + if (vaultMessage == null) { + await genWritable.throw({ code: grpc.status.NOT_FOUND }); + return; + } + const pattern = directoryMessage.getSecretDirectory(); + const id = await utils.parseVaultInput(vaultMessage, vaultManager); + const vault = await vaultManager.openVault(id); + const secretList = await vaultManager.glob(id, pattern); + let secretMessage: secretsPB.Secret; + for (const secretName of secretList) { + const secretContent = await vaultOps.getSecret(vault, secretName); + secretMessage = new secretsPB.Secret(); + secretMessage.setSecretName(secretName); + secretMessage.setSecretContent(secretContent); + await genWritable.next(secretMessage); + } + await genWritable.next(null); + } catch (err) { + await genWritable.throw(err); + } + }, }; }; diff --git a/src/client/utils.ts b/src/client/utils.ts index 1ed23ea1b..a5cfbc6b0 100644 --- a/src/client/utils.ts +++ b/src/client/utils.ts @@ -146,3 +146,75 @@ export { }; export type { Authenticate }; +const isHidden = /(^|[\\\/])\.[^\\\/\.]/g; +let CACHE = {}; + +async function walk(filesystem: typeof fs, output: string[], prefix: string, lexer, opts, dirname='', level=0) { + const readdir = utils.promisify(filesystem.readdir).bind(filesystem); + const lstat = utils.promisify(filesystem.lstat).bind(filesystem); + + const rgx = lexer.segments[level]; + const dir = path.resolve(opts.cwd, prefix, dirname); + const files = await readdir(dir); + const { dot, filesOnly } = opts; + + let i=0, len=files.length, file; + let fullpath, relpath, stats, isMatch; + + for (; i < len; i++) { + fullpath = path.join(dir, file=files[i]); + relpath = dirname ? path.join(dirname, file) : file; + // if (!dot && isHidden.test(relpath)) continue; + isMatch = lexer.regex.test(relpath); + + if ((stats=CACHE[relpath]) === void 0) { + CACHE[relpath] = stats = await lstat(fullpath); + } + + if (!stats.isDirectory()) { + isMatch && output.push(path.relative(opts.cwd, fullpath)); + continue; + } + + if (rgx && !rgx.test(file)) continue; + !filesOnly && isMatch && output.push(path.join(prefix, relpath)); + + await walk(filesystem, output, prefix, lexer, opts, relpath, rgx && rgx.toString() !== lexer.globstar && level + 1); + } +} + +async function glob(filesystem: typeof fs, str: string, opts={ cwd: '.', absolute: true, filesOnly: false, flush: true }) { + const stat = utils.promisify(filesystem.stat).bind(filesystem); + + if (!str) return []; + + let glob = globalyzer(str); + + opts.cwd = opts.cwd || '.'; + + if (!glob.isGlob) { + try { + let resolved = path.resolve(opts.cwd, str); + let dirent = await stat(resolved); + if (opts.filesOnly && !dirent.isFile()) return []; + + return opts.absolute ? [resolved] : [str]; + } catch (err) { + if (err.code != 'ENOENT') throw err; + + return []; + } + } + + if (opts.flush) CACHE = {}; + + let matches = []; + const res = globrex(glob.glob, { filepath:true, globstar:true, extended:true }); + const globPath = res.path; + + await walk(filesystem, matches, glob.base, globPath, opts, '.', 0); + + return opts.absolute ? matches.map(x => path.resolve(opts.cwd, x)) : matches; +} + +export { parseVaultInput, getToken, refreshSession, createMetaTokenResponse }; diff --git a/src/git/GitRequest.ts b/src/git/GitRequest.ts deleted file mode 100644 index 14f304d66..000000000 --- a/src/git/GitRequest.ts +++ /dev/null @@ -1,87 +0,0 @@ -/** - * Responsible for converting HTTP messages from isomorphic-git into requests and sending them to a specific node. - */ - -class GitRequest { - private requestInfo: ( - vaultNameOrId: string, - ) => AsyncIterableIterator; - private requestPack: ( - vaultNameOrId: string, - body: any, - ) => AsyncIterableIterator; - private requestVaultNames: () => Promise; - - constructor( - requestInfo: (vaultNameOrId: string) => AsyncIterableIterator, - requestPack: ( - vaultNameOrId: string, - body: Buffer, - ) => AsyncIterableIterator, - requestVaultNames: () => Promise, - ) { - this.requestInfo = requestInfo; - this.requestPack = requestPack; - this.requestVaultNames = requestVaultNames; - } - - /** - * The custom http request method to feed into isomorphic-git's [custom http object](https://isomorphic-git.org/docs/en/http) - * In the future this will need to be changed in order to handle the receive-pack command from isomorphic-git. This will be - * in the url passed into the request function and is needed for push functionality - */ - public async request({ - url, - method = 'GET', - headers = {}, - body = Buffer.from(''), - }) { - const u = new URL(url); - - // Parse request - if (method === 'GET') { - const match = u.pathname.match(/\/(.+)\/info\/refs$/); - if (!match || /\.\./.test(match[1])) { - throw new Error('Error'); - } - - const vaultNameOrId = match![1]; - const infoResponse = this.requestInfo(vaultNameOrId); - - return { - url: url, - method: method, - body: infoResponse, - headers: headers, - statusCode: 200, - statusMessage: 'OK', - }; - } else if (method === 'POST') { - const match = u.pathname.match(/\/(.+)\/git-(.+)/); - if (!match || /\.\./.test(match[1])) { - throw new Error('Error'); - } - - const vaultNameOrId = match![1]; - - const packResponse = this.requestPack(vaultNameOrId, body[0]); - - return { - url: url, - method: method, - body: packResponse, - headers: headers, - statusCode: 200, - statusMessage: 'OK', - }; - } else { - throw new Error('Method not supported'); - } - } - - public async scanVaults() { - return await this.requestVaultNames(); - } -} - -export default GitRequest; diff --git a/src/git/index.ts b/src/git/index.ts index dae0d1ba1..006019213 100644 --- a/src/git/index.ts +++ b/src/git/index.ts @@ -1,4 +1,3 @@ -export { default as GitRequest } from './GitRequest'; export * as utils from './utils'; export * as types from './types'; export * as errors from './errors'; diff --git a/src/proto/js/polykey/v1/client_service_grpc_pb.d.ts b/src/proto/js/polykey/v1/client_service_grpc_pb.d.ts index 3e5a38052..0fe24f3f6 100644 --- a/src/proto/js/polykey/v1/client_service_grpc_pb.d.ts +++ b/src/proto/js/polykey/v1/client_service_grpc_pb.d.ts @@ -59,6 +59,7 @@ interface IClientServiceService extends grpc.ServiceDefinition; responseDeserialize: grpc.deserialize; } +interface IClientServiceService_IVaultsSecretsEnv extends grpc.MethodDefinition { + path: "/polykey.v1.ClientService/VaultsSecretsEnv"; + requestStream: false; + responseStream: true; + requestSerialize: grpc.serialize; + requestDeserialize: grpc.deserialize; + responseSerialize: grpc.serialize; + responseDeserialize: grpc.deserialize; +} interface IClientServiceService_IIdentitiesAuthenticate extends grpc.MethodDefinition { path: "/polykey.v1.ClientService/IdentitiesAuthenticate"; requestStream: false; @@ -685,6 +695,7 @@ export interface IClientServiceServer extends grpc.UntypedServiceImplementation vaultsPermissions: grpc.handleServerStreamingCall; vaultsVersion: grpc.handleUnaryCall; vaultsLog: grpc.handleServerStreamingCall; + vaultsSecretsEnv: grpc.handleServerStreamingCall; identitiesAuthenticate: grpc.handleServerStreamingCall; identitiesTokenPut: grpc.handleUnaryCall; identitiesTokenGet: grpc.handleUnaryCall; @@ -824,6 +835,8 @@ export interface IClientServiceClient { vaultsVersion(request: polykey_v1_vaults_vaults_pb.Version, metadata: grpc.Metadata, options: Partial, callback: (error: grpc.ServiceError | null, response: polykey_v1_vaults_vaults_pb.VersionResult) => void): grpc.ClientUnaryCall; vaultsLog(request: polykey_v1_vaults_vaults_pb.Log, options?: Partial): grpc.ClientReadableStream; vaultsLog(request: polykey_v1_vaults_vaults_pb.Log, metadata?: grpc.Metadata, options?: Partial): grpc.ClientReadableStream; + vaultsSecretsEnv(request: polykey_v1_secrets_secrets_pb.Directory, options?: Partial): grpc.ClientReadableStream; + vaultsSecretsEnv(request: polykey_v1_secrets_secrets_pb.Directory, metadata?: grpc.Metadata, options?: Partial): grpc.ClientReadableStream; identitiesAuthenticate(request: polykey_v1_identities_identities_pb.Provider, options?: Partial): grpc.ClientReadableStream; identitiesAuthenticate(request: polykey_v1_identities_identities_pb.Provider, metadata?: grpc.Metadata, options?: Partial): grpc.ClientReadableStream; identitiesTokenPut(request: polykey_v1_identities_identities_pb.TokenSpecific, callback: (error: grpc.ServiceError | null, response: polykey_v1_utils_utils_pb.EmptyMessage) => void): grpc.ClientUnaryCall; @@ -1005,6 +1018,8 @@ export class ClientServiceClient extends grpc.Client implements IClientServiceCl public vaultsVersion(request: polykey_v1_vaults_vaults_pb.Version, metadata: grpc.Metadata, options: Partial, callback: (error: grpc.ServiceError | null, response: polykey_v1_vaults_vaults_pb.VersionResult) => void): grpc.ClientUnaryCall; public vaultsLog(request: polykey_v1_vaults_vaults_pb.Log, options?: Partial): grpc.ClientReadableStream; public vaultsLog(request: polykey_v1_vaults_vaults_pb.Log, metadata?: grpc.Metadata, options?: Partial): grpc.ClientReadableStream; + public vaultsSecretsEnv(request: polykey_v1_secrets_secrets_pb.Directory, options?: Partial): grpc.ClientReadableStream; + public vaultsSecretsEnv(request: polykey_v1_secrets_secrets_pb.Directory, metadata?: grpc.Metadata, options?: Partial): grpc.ClientReadableStream; public identitiesAuthenticate(request: polykey_v1_identities_identities_pb.Provider, options?: Partial): grpc.ClientReadableStream; public identitiesAuthenticate(request: polykey_v1_identities_identities_pb.Provider, metadata?: grpc.Metadata, options?: Partial): grpc.ClientReadableStream; public identitiesTokenPut(request: polykey_v1_identities_identities_pb.TokenSpecific, callback: (error: grpc.ServiceError | null, response: polykey_v1_utils_utils_pb.EmptyMessage) => void): grpc.ClientUnaryCall; diff --git a/src/proto/js/polykey/v1/client_service_grpc_pb.js b/src/proto/js/polykey/v1/client_service_grpc_pb.js index 103b234a6..9f9522562 100644 --- a/src/proto/js/polykey/v1/client_service_grpc_pb.js +++ b/src/proto/js/polykey/v1/client_service_grpc_pb.js @@ -923,6 +923,17 @@ vaultsList: { responseSerialize: serialize_polykey_v1_vaults_LogEntry, responseDeserialize: deserialize_polykey_v1_vaults_LogEntry, }, + vaultsSecretsEnv: { + path: '/polykey.v1.ClientService/VaultsSecretsEnv', + requestStream: false, + responseStream: true, + requestType: polykey_v1_secrets_secrets_pb.Directory, + responseType: polykey_v1_secrets_secrets_pb.Secret, + requestSerialize: serialize_polykey_v1_secrets_Directory, + requestDeserialize: deserialize_polykey_v1_secrets_Directory, + responseSerialize: serialize_polykey_v1_secrets_Secret, + responseDeserialize: deserialize_polykey_v1_secrets_Secret, + }, // Identities identitiesAuthenticate: { path: '/polykey.v1.ClientService/IdentitiesAuthenticate', diff --git a/src/proto/schemas/polykey/v1/client_service.proto b/src/proto/schemas/polykey/v1/client_service.proto index 03dffd7b3..fd8d3e50d 100644 --- a/src/proto/schemas/polykey/v1/client_service.proto +++ b/src/proto/schemas/polykey/v1/client_service.proto @@ -64,6 +64,7 @@ service ClientService { rpc VaultsPermissions(polykey.v1.vaults.PermGet) returns (stream polykey.v1.vaults.Permission); rpc VaultsVersion(polykey.v1.vaults.Version) returns (polykey.v1.vaults.VersionResult); rpc VaultsLog(polykey.v1.vaults.Log) returns (stream polykey.v1.vaults.LogEntry); + rpc VaultsSecretsEnv(polykey.v1.secrets.Directory) returns(stream polykey.v1.secrets.Secret); // Identities rpc IdentitiesAuthenticate(polykey.v1.identities.Provider) returns (stream polykey.v1.identities.Provider); diff --git a/src/vaults/VaultInternal.ts b/src/vaults/VaultInternal.ts index 6a900c11c..e7372481b 100644 --- a/src/vaults/VaultInternal.ts +++ b/src/vaults/VaultInternal.ts @@ -349,6 +349,13 @@ class VaultInternal { ); } + @ready(new vaultsErrors.ErrorVaultDestroyed()) + public async glob( + pattern: string, + ): Promise { + return await vaultsUtils.glob(this.efsVault, pattern, {}); + } + @ready(new vaultsErrors.ErrorVaultDestroyed()) public async applySchema() {} } diff --git a/src/vaults/VaultManager.ts b/src/vaults/VaultManager.ts index 0c86446ec..2c8df280f 100644 --- a/src/vaults/VaultManager.ts +++ b/src/vaults/VaultManager.ts @@ -676,7 +676,16 @@ class VaultManager { return vaultId; } - @ready(new vaultsErrors.ErrorVaultManagerNotRunning()) + @ready(new vaultsErrors.ErrorVaultManagerDestroyed()) + public async glob( + vaultId: VaultId, + pattern: string, + ): Promise { + const vault = await this.getVault(vaultId); + return await vault.glob(pattern); + } + + @ready(new vaultsErrors.ErrorVaultManagerDestroyed()) public async *handleInfoRequest( vaultId: VaultId, ): AsyncGenerator { diff --git a/src/vaults/utils.ts b/src/vaults/utils.ts index b3fb797a4..a5db356dc 100644 --- a/src/vaults/utils.ts +++ b/src/vaults/utils.ts @@ -1,4 +1,4 @@ -import type { EncryptedFS } from 'encryptedfs'; +import type { EncryptedFS, Stat } from 'encryptedfs'; import type { VaultId, VaultKey, @@ -8,11 +8,16 @@ import type { VaultIdPretty, } from './types'; import type { FileSystem } from '../types'; +import type { FileSystemWritable } from './types'; import type { NodeId } from '../nodes/types'; import type { GRPCClientAgent } from '../agent'; import path from 'path'; import { IdRandom } from '@matrixai/id'; +<<<<<<< HEAD +======= + +>>>>>>> wip import * as grpc from '@grpc/grpc-js'; import * as vaultsErrors from './errors'; import { GitRequest } from '../git'; @@ -125,122 +130,390 @@ async function* readdirRecursivelyEFS2( } } -/** - * Searches a list of vaults for the given vault Id and associated name - * @throws If the vault Id does not exist - */ -function searchVaultName(vaultList: VaultList, vaultId: VaultId): VaultName { - let vaultName: VaultName | undefined; - - // Search each element in the list of vaults - for (const elem in vaultList) { - // List is of form \t - const value = vaultList[elem].split('\t'); - if (value[1] === vaultId) { - vaultName = value[0]; - break; +const isHidden = /(^|[\\\/])\.[^\\\/\.]/g; +let CACHE = {}; + +async function walk(filesystem: FileSystemGlob, output: string[], prefix: string, lexer, opts, dirname='', level=0) { + const rgx = lexer.segments[level]; + const dir = path.join(prefix, dirname); + const files = await filesystem.promises.readdir(dir, { encoding: 'utf8' }) as string[]; + const { dot, filesOnly } = opts; + + let i=0, len=files.length, file; + let fullpath, relpath, stats, isMatch; + + for (; i < len; i++) { + fullpath = path.join(dir, file=files[i]); + relpath = dirname ? path.join(dirname, file) : file; + if (!dot && isHidden.test(relpath)) continue; + isMatch = lexer.regex.test(relpath); + + if ((stats=CACHE[relpath]) === void 0) { + CACHE[relpath] = stats = await filesystem.promises.lstat(fullpath); } + if (!stats.isDirectory()) { + isMatch && output.push(path.relative(opts.cwd, fullpath)); + continue; + } + + if (rgx && !rgx.test(file)) continue; + !filesOnly && isMatch && output.push(path.join(prefix, relpath)); + + await walk(filesystem, output, prefix, lexer, opts, relpath, rgx && rgx.toString() !== lexer.globstar && level + 1); } - if (vaultName == null) { - throw new vaultsErrors.ErrorRemoteVaultUndefined( - `${vaultId} does not exist on connected node`, - ); - } - return vaultName; } -/** - * Creates a GitRequest object from the desired node connection. - * @param client GRPC connection to desired node - * @param nodeId - */ -async function constructGitHandler( - client: GRPCClientAgent, - nodeId: NodeId, -): Promise { - const gitRequest = new GitRequest( - ((vaultNameOrId: string) => requestInfo(vaultNameOrId, client)).bind(this), - ((vaultNameOrId: string, body: Buffer) => - requestPack(vaultNameOrId, body, client)).bind(this), - (() => requestVaultNames(client, nodeId)).bind(this), - ); - return gitRequest; +interface FileSystemGlob { + promises: { + stat: typeof EncryptedFS.prototype.stat; + lstat: typeof EncryptedFS.prototype.lstat; + readdir: typeof EncryptedFS.prototype.readdir; + }; } -/** - * Requests remote info from the connected node for the named vault. - * @param vaultId ID of the desired vault - * @param client A connection object to the node - * @returns Async Generator of Uint8Arrays representing the Info Response - */ -async function* requestInfo( - vaultNameOrId: string, - client: GRPCClientAgent, -): AsyncGenerator { - const request = new vaultsPB.Vault(); - request.setNameOrId(vaultNameOrId); - const response = client.vaultsGitInfoGet(request); - for await (const resp of response) { - yield resp.getChunk_asU8(); +async function glob(filesystem: FileSystemGlob, str: string, { cwd = '.', filesOnly = true, flush = true, dot = true }) { + if (!str) return []; + + let glob = globalyzer(str); + + if (!glob.isGlob) { + try { + let dirent = await filesystem.promises.stat(str); + if (filesOnly && !dirent.isFile()) return []; + return [str]; + } catch (err) { + if (err.code != 'ENOENT') throw err; + return []; + } } + + if (flush) CACHE = {}; + + let matches: string[] = []; + const res = globrex(glob.glob, { filepath:true, globstar:true, extended:true }); + const globPath = res.path; + + await walk(filesystem, matches, glob.base, globPath, { cwd, filesOnly, flush, dot }, '.', 0); + return matches; } -/** - * Requests a pack from the connected node for the named vault - * @param vaultId ID of vault - * @param body contains the pack request - * @param client A connection object to the node - * @returns AsyncGenerator of Uint8Arrays representing the Pack Response - */ -async function* requestPack( - vaultNameOrId: string, - body: Buffer, - client: GRPCClientAgent, -): AsyncGenerator { - const responseBuffers: Array = []; - - const meta = new grpc.Metadata(); - // FIXME make it a VaultIdReadable - meta.set('vaultNameOrId', vaultNameOrId); - - const stream = client.vaultsGitPackGet(meta); - const write = promisify(stream.write).bind(stream); - - stream.on('data', (d) => { - responseBuffers.push(d.getChunk_asU8()); - }); - - const chunk = new vaultsPB.PackChunk(); - chunk.setChunk(body); - write(chunk); - stream.end(); - - yield await new Promise((resolve) => { - stream.once('end', () => { - resolve(Buffer.concat(responseBuffers)); - }); - }); +const CHARS = { '{': '}', '(': ')', '[': ']'}; +const STRICT = /\\(.)|(^!|\*|[\].+)]\?|\[[^\\\]]+\]|\{[^\\}]+\}|\(\?[:!=][^\\)]+\)|\([^|]+\|[^\\)]+\)|(\\).|([@?!+*]\(.*\)))/; +const RELAXED = /\\(.)|(^!|[*?{}()[\]]|\(\?)/; + +function isglob(str: string, { strict = true } = {}): boolean { + if (str === '') return false; + let match, rgx = strict ? STRICT : RELAXED; + while ((match = rgx.exec(str))) { + if (match[2]) return true; + let idx = match.index + match[0].length; + + // if an open bracket/brace/paren is escaped, + // set the index to the next closing character + let open = match[1]; + let close = open ? CHARS[open] : null; + if (open && close) { + let n = str.indexOf(close, idx); + if (n !== -1) idx = n + 1; + } + + str = str.slice(idx); + } + return false; } -/** - * Requests the vault names from the connected node. - * @param client A connection object to the node - * @param nodeId - */ -async function requestVaultNames( - client: GRPCClientAgent, - nodeId: NodeId, -): Promise { - const request = new nodesPB.Node(); - request.setNodeId(nodeId); - const vaultList = client.vaultsScan(request); - const data: string[] = []; - for await (const vault of vaultList) { - const vaultMessage = vault.getNameOrId(); - data.push(vaultMessage); +function parent(str: string, { strict = false } = {}): string { + str = path.normalize(str).replace(/\/|\\/, '/'); + // special case for strings ending in enclosure containing path separator + if (/[\{\[].*[\/]*.*[\}\]]$/.test(str)) str += '/'; + + // preserves full path in case of trailing path separator + str += 'a'; + + do {str = path.dirname(str)} + while (isglob(str, {strict}) || /(^|[^\\])([\{\[]|\([^\)]+$)/.test(str)); + // remove escape chars and return result + return str.replace(/\\([\*\?\|\[\]\(\)\{\}])/g, '$1'); +}; + +function globalyzer(pattern: string, opts = {}) { + let base = parent(pattern, opts); + let isGlob = isglob(pattern, opts); + let glob; + if (base != '.') { + glob = pattern.substr(base.length); + if (glob.startsWith('/')) glob = glob.substr(1); + } else { + glob = pattern; } - return data; + if (!isGlob) { + base = path.dirname(pattern); + glob = base !== '.' ? pattern.substr(base.length) : pattern; + } + if (glob.startsWith('./')) glob = glob.substr(2); + if (glob.startsWith('/')) glob = glob.substr(1); + + return { base, glob, isGlob }; +} + +const isWin = process.platform === 'win32'; +const SEP = isWin ? `\\\\+` : `\\/`; +const SEP_ESC = isWin ? `\\\\` : `/`; +const GLOBSTAR = `((?:[^/]*(?:/|$))*)`; +const WILDCARD = `([^/]*)`; +const GLOBSTAR_SEGMENT = `((?:[^${SEP_ESC}]*(?:${SEP_ESC}|$))*)`; +const WILDCARD_SEGMENT = `([^${SEP_ESC}]*)`; + +function globrex(glob: string, { extended = false, globstar = false, strict = false, filepath = false, flags = ''} = {}) { + let regex = ''; + let segment = ''; + let path: {regex: RegExp | string, segments: RegExp[], globstar?: RegExp } = { regex: '', segments: [] }; + + // If we are doing extended matching, this boolean is true when we are inside + // a group (eg {*.html,*.js}), and false otherwise. + let inGroup = false; + let inRange = false; + + // extglob stack. Keep track of scope + const ext: string[] = []; + + // Helper function to build string and segments + function add(str: string, { split, last, only }: { split?: boolean, last?: boolean, only?: string} = {}) { + if (only !== 'path') regex += str; + if (filepath && only !== 'regex') { + path.regex += (str === '\\/' ? SEP : str); + if (split) { + if (last) segment += str; + if (segment !== '') { + if (!flags.includes('g')) segment = `^${segment}$`; // change it 'includes' + path.segments.push(new RegExp(segment, flags)); + } + segment = ''; + } else { + segment += str; + } + } + } + + let c, n; + for (let i = 0; i < glob.length; i++) { + c = glob[i]; + n = glob[i + 1]; + + if (['\\', '$', '^', '.', '='].includes(c)) { + add(`\\${c}`); + continue; + } + + if (c === '/') { + add(`\\${c}`, {split: true}); + if (n === '/' && !strict) regex += '?'; + continue; + } + + if (c === '(') { + if (ext.length) { + add(c); + continue; + } + add(`\\${c}`); + continue; + } + + if (c === ')') { + if (ext.length) { + add(c); + let type = ext.pop(); + if (type === '@') { + add('{1}'); + } else if (type === '!') { + add('([^\/]*)'); + } else { + add(type!); + } + continue; + } + add(`\\${c}`); + continue; + } + + if (c === '|') { + if (ext.length) { + add(c); + continue; + } + add(`\\${c}`); + continue; + } + + if (c === '+') { + if (n === '(' && extended) { + ext.push(c); + continue; + } + add(`\\${c}`); + continue; + } + + if (c === '@' && extended) { + if (n === '(') { + ext.push(c); + continue; + } + } + + if (c === '!') { + if (extended) { + if (inRange) { + add('^'); + continue + } + if (n === '(') { + ext.push(c); + add('(?!'); + i++; + continue; + } + add(`\\${c}`); + continue; + } + add(`\\${c}`); + continue; + } + + if (c === '?') { + if (extended) { + if (n === '(') { + ext.push(c); + } else { + add('.'); + } + continue; + } + add(`\\${c}`); + continue; + } + + if (c === '[') { + if (inRange && n === ':') { + i++; // skip [ + let value = ''; + while(glob[++i] !== ':') value += glob[i]; + if (value === 'alnum') add('(\\w|\\d)'); + else if (value === 'space') add('\\s'); + else if (value === 'digit') add('\\d'); + i++; // skip last ] + continue; + } + if (extended) { + inRange = true; + add(c); + continue; + } + add(`\\${c}`); + continue; + } + + if (c === ']') { + if (extended) { + inRange = false; + add(c); + continue; + } + add(`\\${c}`); + continue; + } + + if (c === '{') { + if (extended) { + inGroup = true; + add('('); + continue; + } + add(`\\${c}`); + continue; + } + + if (c === '}') { + if (extended) { + inGroup = false; + add(')'); + continue; + } + add(`\\${c}`); + continue; + } + + if (c === ',') { + if (inGroup) { + add('|'); + continue; + } + add(`\\${c}`); + continue; + } + + if (c === '*') { + if (n === '(' && extended) { + ext.push(c); + continue; + } + // Move over all consecutive "*"'s. + // Also store the previous and next characters + let prevChar = glob[i - 1]; + let starCount = 1; + while (glob[i + 1] === '*') { + starCount++; + i++; + } + let nextChar = glob[i + 1]; + if (!globstar) { + // globstar is disabled, so treat any number of "*" as one + add('.*'); + } else { + // globstar is enabled, so determine if this is a globstar segment + let isGlobstar = + starCount > 1 && // multiple "*"'s + (prevChar === '/' || prevChar === undefined) && // from the start of the segment + (nextChar === '/' || nextChar === undefined); // to the end of the segment + if (isGlobstar) { + // it's a globstar, so match zero or more path segments + add(GLOBSTAR, {only:'regex'}); + add(GLOBSTAR_SEGMENT, {only:'path', last:true, split:true}); + i++; // move over the "/" + } else { + // it's not a globstar, so only match one path segment + add(WILDCARD, {only:'regex'}); + add(WILDCARD_SEGMENT, {only:'path'}); + } + } + continue; + } + + add(c); + } + + + // When regexp 'g' flag is specified don't + // constrain the regular expression with ^ & $ + if (!flags.includes('g')) { + regex = `^${regex}$`; + segment = `^${segment}$`; + if (filepath) path.regex = `^${path.regex}$`; + } + let result; + result = { regex: new RegExp(regex, flags) }; + + // Push the last segment + if (filepath) { + path.segments.push(new RegExp(segment, flags)); + path.regex = new RegExp(path.regex, flags); + path.globstar = new RegExp(!flags.includes('g') ? `^${GLOBSTAR_SEGMENT}$` : GLOBSTAR_SEGMENT, flags); + result.path = path; + } + + return result; } export { @@ -254,6 +527,5 @@ export { readdirRecursively, readdirRecursivelyEFS, readdirRecursivelyEFS2, - constructGitHandler, - searchVaultName, + glob, }; diff --git a/tests/bin/secret.test.ts b/tests/bin/secret.test.ts index 94db5dd1e..86aa8fced 100644 --- a/tests/bin/secret.test.ts +++ b/tests/bin/secret.test.ts @@ -2,6 +2,7 @@ import type { VaultName } from '@/vaults/types'; import os from 'os'; import path from 'path'; import fs from 'fs'; +import nexpect from 'nexpect'; import Logger, { LogLevel, StreamHandler } from '@matrixai/logger'; import { PolykeyAgent } from '@'; import { vaultOps } from '@/vaults'; @@ -55,6 +56,69 @@ describe('CLI secrets', () => { }); }); + describe('commandSecretEnv', () => { + test('should wrap globbed secrets', async () => { + const vaultName = 'Vault0' as VaultName; + const vault = await polykeyAgent.vaults.createVault(vaultName); + + await vaultOps.mkdir(vault, 'dir1/dir2/dir3', { recursive: true }); + await vaultOps.addSecret(vault, 'dir1/dir2/TEST VAR 1', 'test-1'); + await vaultOps.addSecret(vault, 'dir1/dir2/TEST_VAR_2', 'test-2'); + await vaultOps.addSecret(vault, 'TEST_VAR_3', 'test-3'); + await vaultOps.addSecret(vault, 'dir1/dir2/dir3/TEST_VAR_4', 'test-4'); + + let message = 'TEST VAR 1=test-1\nTEST_VAR_2=test-2\n'; + + command = [ + 'secrets', + 'env', + '-np', + dataDir, + 'Vault0:dir1/dir2/*', + ]; + + let result = await utils.pkWithStdio(command); + expect(result.code).toBe(0); + expect(result.stdout).toContain(message); + + nexpect.spawn('echo', ['$TEST VAR 1']).expect(''); + + command = [ + 'secrets', + 'env', + '-np', + dataDir, + '-e', + 'Vault0:**/*', + ]; + + message = 'export TEST_VAR_3=test-3\nexport TEST VAR 1=test-1\nexport TEST_VAR_2=test-2\nexport TEST_VAR_4=test-4\n'; + + result = await utils.pkWithStdio(command); + expect(result.code).toBe(0); + expect(result.stdout).toContain(message); + nexpect.spawn('echo', ['$TEST_VAR_3']).expect('test-3'); + nexpect.spawn('echo', ['$TEST VAR 1']).expect('test-1'); + nexpect.spawn('echo', ['$TEST_VAR_2']).expect('test-2'); + nexpect.spawn('echo', ['$TEST_VAR_4']).expect('test-4'); + }); + test('can export secrets to a bash subshell', async (done) => { + const vaultName = 'Vault000' as VaultName; + const vault = await polykeyAgent.vaults.createVault(vaultName); + + await vaultOps.mkdir(vault, 'dir1/dir2/dir3', { recursive: true }); + await vaultOps.addSecret(vault, 'dir1/dir2/TEST_VAR_2', 'test-2'); + + nexpect.spawn('npm', ['run', 'polykey', '--', 'secrets', 'env', '-np', dataDir, '-e', 'Vault000:**/*', 'bash']) + .sendline('echo $TEST_VAR_2') + .sendline('exit') + .run(function (_, stdout) { + expect(stdout).toContain('test-2'); + done(); + }); + }); + }); + describe('commandCreateSecret', () => { test('should create secrets', async () => { const vaultName = 'Vault1' as VaultName; @@ -250,4 +314,101 @@ describe('CLI secrets', () => { ]); }); }); + + describe('commandNewDirSecret', () => { + test('should inject secrets into a sub-shell', async () => { + const vaultName = 'Vault9' as VaultName; + const vault = await polykeyAgent.vaults.createVault(vaultName); + + const vaultName2 = 'Vault10' as VaultName; + const vault2 = await polykeyAgent.vaults.createVault(vaultName2); + + await vaultOps.addSecret(vault, 'TEST_VARIABLE_1', Buffer.from('test-1')); + await vaultOps.addSecret(vault, 'TEST_VARIABLE_2', Buffer.from('test-2')); + await vaultOps.addSecret(vault2, 'TEST_VARIABLE_3', Buffer.from('test-3')); + + // const secretPath = path.join(dataDir, 'secret'); + // await fs.promises.writeFile(secretPath, 'this is a secret'); + + const result = await utils.pk([ + 'secrets', + 'env', + '-np', + dataDir, + '--password-file', + passwordFile, + 'Vault1:TEST_VARIABLE_1', + 'Vault2:TEST_VARIABLE_3', + 'Vault1:TEST_VARIABLE_2', + // 'bash', + ]); + expect(result).toBe(0); + + // let list = await vault.listSecrets(); + // expect(list.sort()).toStrictEqual(['MySecret']); + // expect(await vault.getSecret('MySecret')).toStrictEqual( + // Buffer.from('this is a secret'), + // ); + + // await polykeyAgent.sessionManager.stopSession(); + // const result2 = await utils.pk([ + // 'secrets', + // 'create', + // '-np', + // dataDir, + // '-sp', + // 'Vault1:MySecret', + // '-fp', + // secretPath, + // ]); + // expect(result2).toBe(passwordExitCode); + + // list = await vault.listSecrets(); + // expect(list.sort()).toStrictEqual(['MySecret']); + }); + test('should export secrets', async () => { + const stdoutSpy = jest.spyOn(process.stdout, 'write'); + + const vaultName = 'Vault11' as VaultName; + const vault = await polykeyAgent.vaults.createVault(vaultName); + + const vaultName2 = 'Vault12' as VaultName; + const vault2 = await polykeyAgent.vaults.createVault(vaultName2); + + await vaultOps.addSecret(vault, 'TEST_VARIABLE_1', Buffer.from('test-1')); + await vaultOps.addSecret(vault, 'TEST_VARIABLE_2', Buffer.from('test-2')); + await vaultOps.addSecret(vault2, 'TEST_VARIABLE_3', Buffer.from('test-3')); + + const message = 'export TEST_VAR_1=test-1\nTEST_VAR_3=test-3\nexport TEST_VAR_4=test-2\n'; + const message2 = 'export TEST_VAR_1=test-1\nexport TEST_VAR_3=test-3\nexport TEST_VAR_4=test-2\n'; + + const result = await utils.pk([ + 'secrets', + 'env', + '--', + '-e', + 'Vault1:TEST_VAR_1', + 'Vault2:TEST_VAR_3', + '-e', + 'Vault1:TEST_VAR_2=TEST_VAR_4', + ]); + expect(result).toBe(0); + expect(stdoutSpy).toHaveBeenLastCalledWith(message); + + const result2 = await utils.pk([ + 'secrets', + 'env', + '-np', + dataDir, + '--password-file', + passwordFile, + '-e', + 'Vault1:TEST_VAR_1', + 'Vault2:TEST_VAR_3', + 'Vault1:TEST_VAR_2=TEST_VAR_4', + ]); + expect(result2).toBe(0); + expect(stdoutSpy).toHaveBeenLastCalledWith(message2); + }); + }); }); diff --git a/tests/client/utils.test.ts b/tests/client/utils.test.ts new file mode 100644 index 000000000..8cdc68e8a --- /dev/null +++ b/tests/client/utils.test.ts @@ -0,0 +1,41 @@ +import fs from 'fs'; +import os from 'os'; +import path from 'path'; + +import * as utils from '@/client/utils'; + +describe('client/utils globbing correctly returns', () => { + let dataDir: string; + let opts; + + beforeEach(async () => { + dataDir = await fs.promises.mkdtemp( + path.join(os.tmpdir(), 'polykey-test-'), + ); + opts = { cwd: dataDir, absolute: false, filesOnly: true, flush: false } + await fs.promises.mkdir(path.join(dataDir, 'dir1/dir2/dir3'), { recursive: true }); + await fs.promises.mkdir(path.join(dataDir, 'dir4')); + await fs.promises.writeFile(path.join(dataDir, 'file1'), 'f1'); + await fs.promises.writeFile(path.join(dataDir, 'dir1/file2'), 'f2'); + await fs.promises.writeFile(path.join(dataDir, 'dir1/dir2/file3'), 'f3'); + await fs.promises.writeFile(path.join(dataDir, 'dir1/dir2/dir3/file4'), 'f4'); + await fs.promises.writeFile(path.join(dataDir, 'dir1/dir2/dir3/file5'), 'f5'); + await fs.promises.writeFile(path.join(dataDir, 'dir1/dir2/dir3/.file6'), 'f6'); + await fs.promises.writeFile(path.join(dataDir, 'dir4/file7'), 'f7'); + await fs.promises.writeFile(path.join(dataDir, 'dir4/.file8'), 'f8'); + }); + test('** lists all files in the cwd', async () => { + const msg = [ + 'file1', + 'dir1/file2', + 'dir1/dir2/file3', + 'dir1/dir2/dir3/file4', + 'dir1/dir2/dir3/file5', + 'dir1/dir2/dir3/.file6', + 'dir4/file7', + 'dir4/.file8', + ] + const res = await utils.glob(fs, '**/**/**', opts); + expect(res.sort()).toStrictEqual(msg.sort()); + }); +}); diff --git a/tests/vaults/VaultInternal.test.ts b/tests/vaults/VaultInternal.test.ts index f557a9681..2ce43ada2 100644 --- a/tests/vaults/VaultInternal.test.ts +++ b/tests/vaults/VaultInternal.test.ts @@ -199,6 +199,67 @@ describe('VaultInternal', () => { await vault.readWorkingDirectory(); }).rejects.toThrow(vaultsErrors.ErrorVaultDestroyed); }); + test('globbing files and directories', async () => { + await vault.commit(async (efs) => { + await efs.writeFile('secret1', 'secret-content'); + await efs.writeFile('secret2', 'secret-content'); + await efs.writeFile('secret3', 'secret-content'); + await efs.writeFile('secret4', 'secret-content'); + await efs.writeFile('secret5', 'secret-content'); + await efs.mkdir('dir1/dir2', { recursive: true }); + await efs.writeFile('dir1/secret6', 'secret-content'); + await efs.writeFile('dir1/secret7', 'secret-content'); + await efs.writeFile('dir1/dir2/secret8', 'secret-content'); + await efs.writeFile('dir1/dir2/secret9', 'secret-content'); + await efs.writeFile('dir1/dir2/secret10', 'secret-content'); + await efs.writeFile('dir1/dir2/secret11', 'secret-content'); + }); + let list = await vault.glob('*'); + expect(list.sort()).toEqual([ + 'secret1', + 'secret2', + 'secret3', + 'secret4', + 'secret5' + ].sort()); + list = await vault.glob('dir1/*'); + expect(list.sort()).toEqual([ + 'dir1/secret6', + 'dir1/secret7' + ].sort()); + list = await vault.glob('dir1/dir2/*'); + expect(list.sort()).toEqual([ + 'dir1/dir2/secret8', + 'dir1/dir2/secret9', + 'dir1/dir2/secret10', + 'dir1/dir2/secret11' + ].sort()); + list = await vault.glob('**/*'); + expect(list.sort()).toEqual([ + 'dir1/dir2/secret10', + 'dir1/dir2/secret11', + 'dir1/dir2/secret8', + 'dir1/dir2/secret9', + 'dir1/secret6', + 'dir1/secret7', + 'secret1', + 'secret2', + 'secret3', + 'secret4', + 'secret5' + ].sort()); + list = await vault.glob('dir1/**/*'); + expect(list.sort()).toEqual([ + 'dir1/dir2', + 'dir1/dir2/secret10', + 'dir1/dir2/secret11', + 'dir1/dir2/secret8', + 'dir1/dir2/secret9', + 'dir1/secret6', + 'dir1/secret7', + ].sort()); + }); + test('creating state on disk', async () => { expect(await fs.promises.readdir(dataDir)).toContain('db'); });