Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

feat: multi-stage-output/new csv flags #1110

Open
wants to merge 22 commits into
base: main
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from 13 commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 4 additions & 0 deletions command-snapshot.json
Original file line number Diff line number Diff line change
Expand Up @@ -41,10 +41,12 @@
"flags": [
"api-version",
"async",
"column-delimiter",
"file",
"flags-dir",
"hard-delete",
"json",
"line-ending",
"loglevel",
"sobject",
"target-org",
Expand Down Expand Up @@ -309,10 +311,12 @@
"flags": [
"api-version",
"async",
"column-delimiter",
"external-id",
"file",
"flags-dir",
"json",
"line-ending",
"loglevel",
"sobject",
"target-org",
Expand Down
23 changes: 0 additions & 23 deletions messages/bulk.operation.command.md

This file was deleted.

30 changes: 29 additions & 1 deletion messages/bulkIngest.md
Original file line number Diff line number Diff line change
Expand Up @@ -37,6 +37,34 @@ Job has been aborted.
- Get the job results by running: "sf data bulk results -o %s --job-id %s".
- View the job in the org: "sf org open -o %s --path '/lightning/setup/AsyncApiJobStatus/page?address=%2F%s'".

# error.hardDeletePermission

You must have the "Bulk API Hard Delete" system permission to use the --hard-delete flag. This permission is disabled by default and can be enabled only by a system administrator.

# flags.column-delimiter.summary

Column delimiter used in the CSV file. Default is COMMA.
Column delimiter used in the CSV file.

# flags.line-ending.summary

Line ending used in the CSV file. Default value on Windows is `CRLF`; on macOS and Linux it's `LF`.

# flags.sobject.summary

API name of the Salesforce object, either standard or custom, that you want to update or delete records from.

# flags.csvfile.summary

CSV file that contains the IDs of the records to update or delete.

# flags.wait.summary

Number of minutes to wait for the command to complete before displaying the results.

# flags.async.summary

Run the command asynchronously.

# flags.verbose.summary

Print verbose output of failed records if result is available.
4 changes: 0 additions & 4 deletions messages/data.update.bulk.md
Original file line number Diff line number Diff line change
Expand Up @@ -38,10 +38,6 @@ CSV file that contains the Salesforce object records you want to update.

API name of the Salesforce object, either standard or custom, which you are updating.

# flags.line-ending.summary

Line ending used in the CSV file. Default value on Windows is `CRLF`; on macOS and Linux it's `LF`.

# flags.column-delimiter.summary

Column delimiter used in the CSV file. Default is COMMA.
86 changes: 25 additions & 61 deletions src/bulkDataRequestCache.ts
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@

import { TTLConfig, Global, Logger, Messages, Org } from '@salesforce/core';
import { Duration } from '@salesforce/kit';
import type { ResumeBulkExportOptions, ResumeBulkImportOptions, ResumeOptions } from './types.js';
import type { ResumeBulkExportOptions, ResumeBulkImportOptions } from './types.js';
import { ColumnDelimiterKeys } from './bulkUtils.js';

Messages.importMessagesDirectoryFromMetaUrl(import.meta.url);
Expand Down Expand Up @@ -64,61 +64,33 @@ export abstract class BulkDataRequestCache extends TTLConfig<TTLConfig.Options,
Logger.childFromRoot('DataRequestCache').debug(`bulk cache saved for ${bulkRequestId}`);
}

public async resolveResumeOptionsFromCache(
bulkJobId: string | undefined,
useMostRecent: boolean,
org: Org | undefined,
apiVersion: string | undefined
): Promise<ResumeOptions> {
if (!useMostRecent && !bulkJobId) {
throw messages.createError('bulkRequestIdRequiredWhenNotUsingMostRecent');
}
const resumeOptions = {
operation: 'query',
query: '',
pollingOptions: { pollTimeout: 0, pollInterval: 0 },
} satisfies Pick<ResumeOptions['options'], 'operation' | 'query' | 'pollingOptions'>;

if (useMostRecent) {
public async resolveResumeOptionsFromCache(jobIdOrMostRecent: string | boolean): Promise<ResumeBulkImportOptions> {
if (typeof jobIdOrMostRecent === 'boolean') {
const key = this.getLatestKey();
if (key) {
// key definitely exists because it came from the cache
const entry = this.get(key);

return {
jobInfo: { id: entry.jobId },
options: {
...resumeOptions,
connection: (await Org.create({ aliasOrUsername: entry.username })).getConnection(apiVersion),
},
};
}
}
if (bulkJobId) {
const entry = this.get(bulkJobId);
if (entry) {
return {
jobInfo: { id: entry.jobId },
options: {
...resumeOptions,
connection: (await Org.create({ aliasOrUsername: entry.username })).getConnection(apiVersion),
},
};
} else if (org) {
return {
jobInfo: { id: bulkJobId },
options: {
...resumeOptions,
connection: org.getConnection(apiVersion),
},
};
} else {
throw messages.createError('cannotCreateResumeOptionsWithoutAnOrg');
if (!key) {
throw messages.createError('error.missingCacheEntryError');
}
} else if (useMostRecent) {
throw messages.createError('error.missingCacheEntryError');
// key definitely exists because it came from the cache
const entry = this.get(key);

return {
jobInfo: { id: entry.jobId },
options: {
connection: (await Org.create({ aliasOrUsername: entry.username })).getConnection(),
},
};
} else {
throw messages.createError('bulkRequestIdRequiredWhenNotUsingMostRecent');
const entry = this.get(jobIdOrMostRecent);
if (!entry) {
throw messages.createError('error.bulkRequestIdNotFound', [jobIdOrMostRecent]);
}

return {
jobInfo: { id: entry.jobId },
options: {
connection: (await Org.create({ aliasOrUsername: entry.username })).getConnection(),
},
};
Copy link
Member Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

simplified resolveResumeOptionsFromCache so that it matches data import/export's cache resolver.
The previous implementation had a few unused properties and one (undocumented?) functionality:
passing a job ID that didn't exist in the cache wouldn't cause an error, the resolver would return it as if it was found in the local cache.
Added code ƒor specific bulk commands using this to avoid breaking changes.

}
}
}
Expand Down Expand Up @@ -377,12 +349,6 @@ export class BulkExportRequestCache extends TTLConfig<TTLConfig.Options, BulkExp
jobIdOrMostRecent: string | boolean,
apiVersion: string | undefined
): Promise<ResumeBulkExportOptions> {
const resumeOptionsOptions = {
operation: 'query',
query: '',
pollingOptions: { pollTimeout: 0, pollInterval: 0 },
} satisfies Pick<ResumeOptions['options'], 'operation' | 'query' | 'pollingOptions'>;

if (typeof jobIdOrMostRecent === 'boolean') {
const key = this.getLatestKey();
if (!key) {
Expand All @@ -399,7 +365,6 @@ export class BulkExportRequestCache extends TTLConfig<TTLConfig.Options, BulkExp
columnDelimiter: entry.outputInfo.columnDelimiter,
},
options: {
...resumeOptionsOptions,
connection: (await Org.create({ aliasOrUsername: entry.username })).getConnection(apiVersion),
},
};
Expand All @@ -413,7 +378,6 @@ export class BulkExportRequestCache extends TTLConfig<TTLConfig.Options, BulkExp
jobInfo: { id: entry.jobId },
outputInfo: entry.outputInfo,
options: {
...resumeOptionsOptions,
connection: (await Org.create({ aliasOrUsername: entry.username })).getConnection(apiVersion),
},
};
Expand Down
Loading