From bcbb3543f0405d17a2bc75f4a5d0176262f6a460 Mon Sep 17 00:00:00 2001 From: Willie Ruemmele Date: Thu, 14 Aug 2025 11:15:59 -0600 Subject: [PATCH 1/4] fix: remove deprecated async flags --- command-snapshot.json | 13 ++++--------- messages/data.export.bulk.md | 8 ++------ messages/data.export.resume.md | 2 +- messages/data.import.bulk.md | 6 +----- messages/data.import.resume.md | 2 +- messages/data.update.bulk.md | 6 +----- messages/data.update.resume.md | 2 +- src/bulkIngest.ts | 9 +-------- src/commands/data/delete/bulk.ts | 2 +- src/commands/data/export/bulk.ts | 8 +------- src/commands/data/import/bulk.ts | 9 +-------- src/commands/data/update/bulk.ts | 7 +------ src/commands/data/upsert/bulk.ts | 2 +- test/commands/data/bulk/results.nut.ts | 7 +------ test/commands/data/export/resume.nut.ts | 4 ++-- test/commands/data/import/resume.nut.ts | 4 ++-- test/commands/data/update/resume.nut.ts | 2 +- 17 files changed, 23 insertions(+), 70 deletions(-) diff --git a/command-snapshot.json b/command-snapshot.json index fa59e45e..de919e69 100644 --- a/command-snapshot.json +++ b/command-snapshot.json @@ -37,10 +37,9 @@ "alias": [], "command": "data:delete:bulk", "flagAliases": ["apiversion", "csvfile", "sobjecttype", "targetusername", "u"], - "flagChars": ["a", "f", "o", "s", "w"], + "flagChars": ["f", "o", "s", "w"], "flags": [ "api-version", - "async", "file", "flags-dir", "hard-delete", @@ -88,7 +87,6 @@ "flags": [ "all-rows", "api-version", - "async", "column-delimiter", "flags-dir", "json", @@ -141,10 +139,9 @@ "alias": [], "command": "data:import:bulk", "flagAliases": [], - "flagChars": ["a", "f", "o", "s", "w"], + "flagChars": ["f", "o", "s", "w"], "flags": [ "api-version", - "async", "column-delimiter", "file", "flags-dir", @@ -213,10 +210,9 @@ "alias": [], "command": "data:update:bulk", "flagAliases": [], - "flagChars": ["a", "f", "o", "s", "w"], + "flagChars": ["f", "o", "s", "w"], "flags": [ "api-version", - "async", "column-delimiter", "file", "flags-dir", @@ -260,10 +256,9 @@ "alias": [], "command": "data:upsert:bulk", "flagAliases": ["apiversion", "csvfile", "externalid", "sobjecttype", "targetusername", "u"], - "flagChars": ["a", "f", "i", "o", "s", "w"], + "flagChars": ["f", "i", "o", "s", "w"], "flags": [ "api-version", - "async", "column-delimiter", "external-id", "file", diff --git a/messages/data.export.bulk.md b/messages/data.export.bulk.md index 9d6fb241..a9c8199a 100644 --- a/messages/data.export.bulk.md +++ b/messages/data.export.bulk.md @@ -6,9 +6,9 @@ Bulk export records from an org into a file using a SOQL query. Uses Bulk API 2. You can use this command to export millions of records from an org, either to migrate data or to back it up. -Use a SOQL query to specify the fields of a standard or custom object that you want to export. Specify the SOQL query either at the command line with the --query flag or read it from a file with the --query-file flag; you can't specify both flags. The --output-file flag is required, which means you can only write the records to a file, in either CSV or JSON format. +Use a SOQL query to specify the fields of a standard or custom object that you want to export. Specify the SOQL query either at the command line with the --query flag or read it from a file with the --query-file flag; you can't specify both flags. The --output-file flag is required, which means you can only write the records to a file, in either CSV or JSON format. -Bulk exports can take a while, depending on how many records are returned by the SOQL query. If the command times out, or you specified the --async flag, the command displays the job ID. To see the status and get the results of the job, run "sf data export resume" and pass the job ID to the --job-id flag. +Bulk exports can take a while, depending on how many records are returned by the SOQL query. If the command times out, the command displays the job ID. To see the status and get the results of the job, run "sf data export resume" and pass the job ID to the --job-id flag. IMPORTANT: This command uses Bulk API 2.0, which limits the type of SOQL queries you can run. For example, you can't use aggregate functions such as count(). For the complete list of limitations, see the "SOQL Considerations" section in the "Bulk API 2.0 and Bulk API Developer Guide" (https://developer.salesforce.com/docs/atlas.en-us.api_asynch.meta/api_asynch/queries.htm). @@ -22,10 +22,6 @@ IMPORTANT: This command uses Bulk API 2.0, which limits the type of SOQL queries <%= config.bin %> <%= command.id %> --query "SELECT Id, Name, Account.Name FROM Contact" --output-file export-accounts.json --result-format json --wait 10 --all-rows -- Export asynchronously; the command immediately returns a job ID that you then pass to the "sf data export resume" command: - - <%= config.bin %> <%= command.id %> --query "SELECT Id, Name, Account.Name FROM Contact" --output-file export-accounts.json --result-format json --async - # flags.wait.summary Time to wait for the command to finish, in minutes. diff --git a/messages/data.export.resume.md b/messages/data.export.resume.md index fe84f7df..4a83efd9 100644 --- a/messages/data.export.resume.md +++ b/messages/data.export.resume.md @@ -4,7 +4,7 @@ Resume a bulk export job that you previously started. Uses Bulk API 2.0. # description -When the original "data export bulk" command either times out or is run with the --async flag, it displays a job ID. To see the status and get the results of the bulk export, run this command by either passing it the job ID or using the --use-most-recent flag to specify the most recent bulk export job. +When the original "data export bulk" command either times out, it displays a job ID. To see the status and get the results of the bulk export, run this command by either passing it the job ID or using the --use-most-recent flag to specify the most recent bulk export job. Using either `--job-id` or `--use-most-recent` will properly resolve to the correct org where the bulk job was started based on the cached data by "data export bulk". diff --git a/messages/data.import.bulk.md b/messages/data.import.bulk.md index 1156f259..30411042 100644 --- a/messages/data.import.bulk.md +++ b/messages/data.import.bulk.md @@ -8,7 +8,7 @@ You can use this command to import millions of records into the object from a fi All the records in the CSV file must be for the same Salesforce object. Specify the object with the `--sobject` flag. -Bulk imports can take a while, depending on how many records are in the CSV file. If the command times out, or you specified the --async flag, the command displays the job ID. To see the status and get the results of the job, run "sf data import resume" and pass the job ID to the --job-id flag. +Bulk imports can take a while, depending on how many records are in the CSV file. If the command times out, the command displays the job ID. To see the status and get the results of the job, run "sf data import resume" and pass the job ID to the --job-id flag. For information and examples about how to prepare your CSV files, see "Prepare Data to Ingest" in the "Bulk API 2.0 and Bulk API Developer Guide" (https://developer.salesforce.com/docs/atlas.en-us.api_asynch.meta/api_asynch/datafiles_prepare_data.htm). @@ -18,10 +18,6 @@ For information and examples about how to prepare your CSV files, see "Prepare D <%= config.bin %> <%= command.id %> --file accounts.csv --sobject Account --wait 10 --target-org my-scratch -- Import asynchronously and use the default org; the command immediately returns a job ID that you then pass to the "sf data import resume" command: - - <%= config.bin %> <%= command.id %> --file accounts.csv --sobject Account --async - # flags.async.summary Don't wait for the command to complete. diff --git a/messages/data.import.resume.md b/messages/data.import.resume.md index 5c10f2b4..067f1262 100644 --- a/messages/data.import.resume.md +++ b/messages/data.import.resume.md @@ -4,7 +4,7 @@ Resume a bulk import job that you previously started. Uses Bulk API 2.0. # description -When the original "sf data import bulk" command either times out or is run with the --async flag, it displays a job ID. To see the status and get the results of the bulk import, run this command by either passing it the job ID or using the --use-most-recent flag to specify the most recent bulk import job. +When the original "sf data import bulk" command either times out, it displays a job ID. To see the status and get the results of the bulk import, run this command by either passing it the job ID or using the --use-most-recent flag to specify the most recent bulk import job. # examples diff --git a/messages/data.update.bulk.md b/messages/data.update.bulk.md index 9df4773d..22841e10 100644 --- a/messages/data.update.bulk.md +++ b/messages/data.update.bulk.md @@ -8,7 +8,7 @@ You can use this command to update millions of Salesforce object records based o All the records in the CSV file must be for the same Salesforce object. Specify the object with the `--sobject` flag. The first column of every line in the CSV file must be an ID of the record you want to update. The CSV file can contain only existing records; if a record in the file doesn't currently exist in the Salesforce object, the command fails. Consider using "sf data upsert bulk" if you also want to insert new records. -Bulk updates can take a while, depending on how many records are in the CSV file. If the command times out, or you specified the --async flag, the command displays the job ID. To see the status and get the results of the job, run "sf data update resume" and pass the job ID to the --job-id flag. +Bulk updates can take a while, depending on how many records are in the CSV file. If the command times out, the command displays the job ID. To see the status and get the results of the job, run "sf data update resume" and pass the job ID to the --job-id flag. For information and examples about how to prepare your CSV files, see "Prepare Data to Ingest" in the "Bulk API 2.0 and Bulk API Developer Guide" (https://developer.salesforce.com/docs/atlas.en-us.api_asynch.meta/api_asynch/datafiles_prepare_data.htm). @@ -18,10 +18,6 @@ For information and examples about how to prepare your CSV files, see "Prepare D <%= config.bin %> <%= command.id %> --file accounts.csv --sobject Account --wait 10 --target-org my-scratch -- Update asynchronously and use the default org; the command immediately returns a job ID that you then pass to the "sf data update resume" command: - - <%= config.bin %> <%= command.id %> --file accounts.csv --sobject Account --async - # flags.async.summary Don't wait for the command to complete. diff --git a/messages/data.update.resume.md b/messages/data.update.resume.md index 3e446106..b08ef880 100644 --- a/messages/data.update.resume.md +++ b/messages/data.update.resume.md @@ -4,7 +4,7 @@ Resume a bulk update job that you previously started. Uses Bulk API 2.0. # description -When the original "sf data update bulk" command either times out or is run with the --async flag, it displays a job ID. To see the status and get the results of the bulk update, run this command by either passing it the job ID or using the --use-most-recent flag to specify the most recent bulk update job. +When the original "sf data update bulk" command times out, it displays a job ID. To see the status and get the results of the bulk update, run this command by either passing it the job ID or using the --use-most-recent flag to specify the most recent bulk update job. Using either `--job-id` or `--use-most-recent` will properly resolve to the correct org where the bulk job was started based on the cached data by "data update bulk". diff --git a/src/bulkIngest.ts b/src/bulkIngest.ts index 84f1dc85..a1de01b4 100644 --- a/src/bulkIngest.ts +++ b/src/bulkIngest.ts @@ -344,7 +344,7 @@ export const lineEndingFlag = Flags.option({ })(); /** - * Use only for commands that maintain sfdx compatibility. + * Use only for commands that maintain sfdx compatibility.1 * * @deprecated */ @@ -371,13 +371,6 @@ export const baseUpsertDeleteFlags = { summary: messages.getMessage('flags.wait.summary'), min: 0, defaultValue: 0, - exclusive: ['async'], - }), - async: Flags.boolean({ - char: 'a', - summary: messages.getMessage('flags.async.summary'), - exclusive: ['wait'], - deprecated: true, }), }; diff --git a/src/commands/data/delete/bulk.ts b/src/commands/data/delete/bulk.ts index 6900c1d3..cf449921 100644 --- a/src/commands/data/delete/bulk.ts +++ b/src/commands/data/delete/bulk.ts @@ -42,7 +42,7 @@ export default class Delete extends SfCommand { columnDelimiter: undefined, conn: flags['target-org'].getConnection(flags['api-version']), cache: await BulkDeleteRequestCache.create(), - async: flags.async, + async: false, wait: flags.wait, file: flags.file, jsonEnabled: this.jsonEnabled(), diff --git a/src/commands/data/export/bulk.ts b/src/commands/data/export/bulk.ts index a92e0d88..7569349f 100644 --- a/src/commands/data/export/bulk.ts +++ b/src/commands/data/export/bulk.ts @@ -38,12 +38,6 @@ export default class DataExportBulk extends SfCommand { char: 'w', helpValue: '', unit: 'minutes', - exclusive: ['async'], - }), - async: Flags.boolean({ - summary: messages.getMessage('flags.async.summary'), - exclusive: ['wait'], - deprecated: true, }), query: Flags.string({ summary: messages.getMessage('flags.query.summary'), @@ -108,7 +102,7 @@ export default class DataExportBulk extends SfCommand { const conn = flags['target-org'].getConnection(flags['api-version']); - const timeout = flags.async ? Duration.minutes(0) : flags.wait ?? Duration.minutes(0); + const timeout = flags.wait ?? Duration.minutes(0); // `flags['query-file']` will be present if `flags.query` isn't. oclif's `exclusive` isn't quite that clever const soqlQuery = flags.query ?? fs.readFileSync(flags['query-file'] as string, 'utf8'); diff --git a/src/commands/data/import/bulk.ts b/src/commands/data/import/bulk.ts index 719a24ec..b33c5e05 100644 --- a/src/commands/data/import/bulk.ts +++ b/src/commands/data/import/bulk.ts @@ -26,12 +26,6 @@ export default class DataImportBulk extends SfCommand { public static readonly examples = messages.getMessages('examples'); public static readonly flags = { - async: Flags.boolean({ - summary: messages.getMessage('flags.async.summary'), - char: 'a', - exclusive: ['wait'], - deprecated: true, - }), file: Flags.file({ summary: messages.getMessage('flags.file.summary'), char: 'f', @@ -48,7 +42,6 @@ export default class DataImportBulk extends SfCommand { summary: messages.getMessage('flags.wait.summary'), char: 'w', unit: 'minutes', - exclusive: ['async'], }), 'target-org': Flags.requiredOrg(), 'line-ending': Flags.option({ @@ -71,7 +64,7 @@ export default class DataImportBulk extends SfCommand { columnDelimiter: flags['column-delimiter'], conn: flags['target-org'].getConnection(flags['api-version']), cache: await BulkImportRequestCache.create(), - async: flags.async, + async: false, wait: flags.wait, file: flags.file, jsonEnabled: this.jsonEnabled(), diff --git a/src/commands/data/update/bulk.ts b/src/commands/data/update/bulk.ts index b2c5434e..15ef3758 100644 --- a/src/commands/data/update/bulk.ts +++ b/src/commands/data/update/bulk.ts @@ -26,11 +26,6 @@ export default class DataUpdateBulk extends SfCommand { public static readonly examples = messages.getMessages('examples'); public static readonly flags = { - async: Flags.boolean({ - summary: messages.getMessage('flags.async.summary'), - char: 'a', - deprecated: true, - }), wait: Flags.duration({ summary: messages.getMessage('flags.wait.summary'), char: 'w', @@ -65,7 +60,7 @@ export default class DataUpdateBulk extends SfCommand { columnDelimiter: flags['column-delimiter'], conn: flags['target-org'].getConnection(flags['api-version']), cache: await BulkUpdateRequestCache.create(), - async: flags.async, + async: false, wait: flags.wait, file: flags.file, jsonEnabled: this.jsonEnabled(), diff --git a/src/commands/data/upsert/bulk.ts b/src/commands/data/upsert/bulk.ts index 377d9a4c..74ca4eb1 100644 --- a/src/commands/data/upsert/bulk.ts +++ b/src/commands/data/upsert/bulk.ts @@ -46,7 +46,7 @@ export default class Upsert extends SfCommand { externalId: flags['external-id'], conn: flags['target-org'].getConnection(flags['api-version']), cache: await BulkUpsertRequestCache.create(), - async: flags.async, + async: false, wait: flags.wait, file: flags.file, jsonEnabled: this.jsonEnabled(), diff --git a/test/commands/data/bulk/results.nut.ts b/test/commands/data/bulk/results.nut.ts index 4ab52128..8abd5273 100644 --- a/test/commands/data/bulk/results.nut.ts +++ b/test/commands/data/bulk/results.nut.ts @@ -10,7 +10,6 @@ import { EOL } from 'node:os'; import { execCmd, TestSession } from '@salesforce/cli-plugins-testkit'; import { expect } from 'chai'; import { ensureString } from '@salesforce/ts-types'; -import { Duration, sleep } from '@salesforce/kit'; import { validateCsv } from '../../../testUtil.js'; import { DataImportBulkResult } from '../../../../src/commands/data/import/bulk.js'; import { DataBulkResultsResult } from '../../../../src/commands/data/bulk/results.js'; @@ -63,17 +62,13 @@ describe('data bulk results NUTs', () => { const csvFile = await generateAccountsCsv(session.project.dir, 5000); const bulkImportAsync = execCmd( - `data import bulk --file ${csvFile} --sobject account --async --json`, + `data import bulk --file ${csvFile} --sobject account --wait 3 --json`, { ensureExitCode: 0 } ).jsonOutput?.result as DataImportBulkResult; expect(bulkImportAsync.jobId).not.to.be.undefined; expect(bulkImportAsync.jobId).to.be.length(18); - // wait 2 minutes for the async bulk import above to finish. - // we can't use `data import resume` because we expect record failures to happen. - await sleep(Duration.minutes(2)); - const results = execCmd(`data bulk results --job-id ${bulkImportAsync.jobId} --json`, { ensureExitCode: 0, }).jsonOutput?.result as DataBulkResultsResult; diff --git a/test/commands/data/export/resume.nut.ts b/test/commands/data/export/resume.nut.ts index 72ddd6c6..c0b299da 100644 --- a/test/commands/data/export/resume.nut.ts +++ b/test/commands/data/export/resume.nut.ts @@ -47,7 +47,7 @@ describe('data export resume NUTs', () => { it('should resume export in csv format', async () => { const outputFile = 'export-accounts.csv'; - const command = `data export bulk -q "${soqlQuery}" --output-file ${outputFile} --async --json`; + const command = `data export bulk -q "${soqlQuery}" --output-file ${outputFile} --json`; const exportAsyncResult = execCmd(command, { ensureExitCode: 0 }).jsonOutput?.result; @@ -71,7 +71,7 @@ describe('data export resume NUTs', () => { it('should resume export in json format', async () => { const outputFile = 'export-accounts.json'; - const command = `data export bulk -q "${soqlQuery}" --output-file ${outputFile} --async --result-format json --json`; + const command = `data export bulk -q "${soqlQuery}" --output-file ${outputFile} --result-format json --json`; const exportAsyncResult = execCmd(command, { ensureExitCode: 0 }).jsonOutput?.result; diff --git a/test/commands/data/import/resume.nut.ts b/test/commands/data/import/resume.nut.ts index 1098ac1e..9855e4a7 100644 --- a/test/commands/data/import/resume.nut.ts +++ b/test/commands/data/import/resume.nut.ts @@ -35,7 +35,7 @@ describe('data import resume NUTs', () => { const csvFile = await generateAccountsCsv(session.dir); const importAsyncRes = execCmd( - `data import bulk --file ${csvFile} --sobject account --async --json`, + `data import bulk --file ${csvFile} --sobject account --json`, { ensureExitCode: 0 } ).jsonOutput?.result; @@ -59,7 +59,7 @@ describe('data import resume NUTs', () => { it('should resume bulk import via--use-most-recent', async () => { const csvFile = await generateAccountsCsv(session.dir); - const command = `data import bulk --file ${csvFile} --sobject account --async --json`; + const command = `data import bulk --file ${csvFile} --sobject account --json`; const exportAsyncResult = execCmd(command, { ensureExitCode: 0 }).jsonOutput?.result; diff --git a/test/commands/data/update/resume.nut.ts b/test/commands/data/update/resume.nut.ts index 1acd927c..3263191c 100644 --- a/test/commands/data/update/resume.nut.ts +++ b/test/commands/data/update/resume.nut.ts @@ -60,7 +60,7 @@ describe('data update resume NUTs', () => { ); const dataUpdateAsyncRes = execCmd( - `data update bulk --file ${updatedCsv} --sobject account --async --json`, + `data update bulk --file ${updatedCsv} --sobject account --json`, { ensureExitCode: 0 } ).jsonOutput?.result; From 74c36be13b36756acd7794cc553561cafc07e4da Mon Sep 17 00:00:00 2001 From: Willie Ruemmele Date: Thu, 14 Aug 2025 11:54:31 -0600 Subject: [PATCH 2/4] Update messages/data.import.resume.md Co-authored-by: Juliet Shackell <63259011+jshackell-sfdc@users.noreply.github.com> --- messages/data.import.resume.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/messages/data.import.resume.md b/messages/data.import.resume.md index 067f1262..a6c334c0 100644 --- a/messages/data.import.resume.md +++ b/messages/data.import.resume.md @@ -4,7 +4,7 @@ Resume a bulk import job that you previously started. Uses Bulk API 2.0. # description -When the original "sf data import bulk" command either times out, it displays a job ID. To see the status and get the results of the bulk import, run this command by either passing it the job ID or using the --use-most-recent flag to specify the most recent bulk import job. +When the original "sf data import bulk" command times out, it displays a job ID. To see the status and get the results of the bulk import, run this command by either passing it the job ID or using the --use-most-recent flag to specify the most recent bulk import job. # examples From dc5ba9ecd08791afc103c43eae80e0e5a0025042 Mon Sep 17 00:00:00 2001 From: Willie Ruemmele Date: Thu, 14 Aug 2025 11:54:36 -0600 Subject: [PATCH 3/4] Update messages/data.export.resume.md Co-authored-by: Juliet Shackell <63259011+jshackell-sfdc@users.noreply.github.com> --- messages/data.export.resume.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/messages/data.export.resume.md b/messages/data.export.resume.md index 4a83efd9..915bd9d5 100644 --- a/messages/data.export.resume.md +++ b/messages/data.export.resume.md @@ -4,7 +4,7 @@ Resume a bulk export job that you previously started. Uses Bulk API 2.0. # description -When the original "data export bulk" command either times out, it displays a job ID. To see the status and get the results of the bulk export, run this command by either passing it the job ID or using the --use-most-recent flag to specify the most recent bulk export job. +When the original "data export bulk" command times out, it displays a job ID. To see the status and get the results of the bulk export, run this command by either passing it the job ID or using the --use-most-recent flag to specify the most recent bulk export job. Using either `--job-id` or `--use-most-recent` will properly resolve to the correct org where the bulk job was started based on the cached data by "data export bulk". From 5dfc351e38d6f640a42a1f8da651f745eb1fcaf0 Mon Sep 17 00:00:00 2001 From: Willie Ruemmele Date: Mon, 18 Aug 2025 09:18:57 -0600 Subject: [PATCH 4/4] chore: remove async param --- src/bulkIngest.ts | 4 +--- src/commands/data/delete/bulk.ts | 1 - src/commands/data/import/bulk.ts | 1 - src/commands/data/update/bulk.ts | 1 - src/commands/data/upsert/bulk.ts | 1 - 5 files changed, 1 insertion(+), 7 deletions(-) diff --git a/src/bulkIngest.ts b/src/bulkIngest.ts index a1de01b4..d65b1519 100644 --- a/src/bulkIngest.ts +++ b/src/bulkIngest.ts @@ -38,7 +38,6 @@ type ResumeCommandIDs = 'data import resume' | 'data update resume' | 'data upse * * It will create the specified bulk ingest job, set up the oclif/MSO stages and return the job info. * */ -// eslint-disable-next-line complexity export async function bulkIngest(opts: { resumeCmdId: ResumeCommandIDs; stageTitle: string; @@ -49,7 +48,6 @@ export async function bulkIngest(opts: { externalId?: JobInfoV2['externalIdFieldName']; conn: Connection; cache: BulkUpdateRequestCache | BulkImportRequestCache | BulkUpsertRequestCache; - async: boolean; wait: Duration; file: string; jsonEnabled: boolean; @@ -63,7 +61,7 @@ export async function bulkIngest(opts: { throw new SfError('External ID is only required for `sf data upsert bulk`.'); } - const timeout = opts.async ? Duration.minutes(0) : opts.wait ?? Duration.minutes(0); + const timeout = opts.wait ?? Duration.minutes(0); const async = timeout.milliseconds === 0; // CSV file for `delete/HardDelete` operations only have 1 column (ID), we set it to `COMMA` if not specified but any delimiter works. diff --git a/src/commands/data/delete/bulk.ts b/src/commands/data/delete/bulk.ts index cf449921..dffcca30 100644 --- a/src/commands/data/delete/bulk.ts +++ b/src/commands/data/delete/bulk.ts @@ -42,7 +42,6 @@ export default class Delete extends SfCommand { columnDelimiter: undefined, conn: flags['target-org'].getConnection(flags['api-version']), cache: await BulkDeleteRequestCache.create(), - async: false, wait: flags.wait, file: flags.file, jsonEnabled: this.jsonEnabled(), diff --git a/src/commands/data/import/bulk.ts b/src/commands/data/import/bulk.ts index b33c5e05..c80c7011 100644 --- a/src/commands/data/import/bulk.ts +++ b/src/commands/data/import/bulk.ts @@ -64,7 +64,6 @@ export default class DataImportBulk extends SfCommand { columnDelimiter: flags['column-delimiter'], conn: flags['target-org'].getConnection(flags['api-version']), cache: await BulkImportRequestCache.create(), - async: false, wait: flags.wait, file: flags.file, jsonEnabled: this.jsonEnabled(), diff --git a/src/commands/data/update/bulk.ts b/src/commands/data/update/bulk.ts index 15ef3758..794ae411 100644 --- a/src/commands/data/update/bulk.ts +++ b/src/commands/data/update/bulk.ts @@ -60,7 +60,6 @@ export default class DataUpdateBulk extends SfCommand { columnDelimiter: flags['column-delimiter'], conn: flags['target-org'].getConnection(flags['api-version']), cache: await BulkUpdateRequestCache.create(), - async: false, wait: flags.wait, file: flags.file, jsonEnabled: this.jsonEnabled(), diff --git a/src/commands/data/upsert/bulk.ts b/src/commands/data/upsert/bulk.ts index 74ca4eb1..04b1226e 100644 --- a/src/commands/data/upsert/bulk.ts +++ b/src/commands/data/upsert/bulk.ts @@ -46,7 +46,6 @@ export default class Upsert extends SfCommand { externalId: flags['external-id'], conn: flags['target-org'].getConnection(flags['api-version']), cache: await BulkUpsertRequestCache.create(), - async: false, wait: flags.wait, file: flags.file, jsonEnabled: this.jsonEnabled(),