Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions .github/workflows/test.yml
Original file line number Diff line number Diff line change
Expand Up @@ -25,6 +25,7 @@ jobs:
- 'yarn test:nuts:bulk:export'
- 'yarn test:nuts:bulk:import'
- 'yarn test:nuts:bulk:update'
- 'yarn test:nuts:bulk:results'
- 'yarn test:nuts:data:bulk-upsert-delete'
- 'yarn test:nuts:data:create'
- 'yarn test:nuts:data:query'
Expand Down
1 change: 1 addition & 0 deletions package.json
Original file line number Diff line number Diff line change
Expand Up @@ -109,6 +109,7 @@
"test:nuts:bulk:export": "nyc mocha \"./test/commands/data/export/*.nut.ts\" --slow 4500 --timeout 600000 --parallel --jobs 20",
"test:nuts:bulk:import": "nyc mocha \"./test/commands/data/import/*.nut.ts\" --slow 4500 --timeout 600000 --parallel --jobs 20",
"test:nuts:bulk:update": "nyc mocha \"./test/commands/data/update/*.nut.ts\" --slow 4500 --timeout 600000 --parallel --jobs 20",
"test:nuts:bulk:results": "nyc mocha \"./test/commands/data/bulk/*.nut.ts\" --slow 4500 --timeout 600000 --parallel --jobs 20",
"test:nuts:data:bulk-upsert-delete": "nyc mocha \"./test/commands/data/dataBulk.nut.ts\" --slow 4500 --timeout 600000 --parallel --jobs 20",
"test:nuts:data:create": "nyc mocha \"./test/commands/data/create/*.nut.ts\" --slow 4500 --timeout 600000 --parallel --jobs 20",
"test:nuts:data:query": "nyc mocha \"./test/commands/data/query/*.nut.ts\" --slow 4500 --timeout 600000 --parallel --jobs 20",
Expand Down
18 changes: 16 additions & 2 deletions src/bulkIngest.ts
Original file line number Diff line number Diff line change
Expand Up @@ -135,11 +135,18 @@ export async function bulkIngest(opts: {
if (jobInfo.numberRecordsFailed) {
stages.error();

throw messages.createError(
const err = messages.createError(
'error.failedRecordDetails',
[jobInfo.numberRecordsFailed],
[conn.getUsername(), job.id, conn.getUsername(), job.id]
);

err.setData({
jobId: jobInfo.id,
state: jobInfo.state,
});

throw err;
}

stages.stop();
Expand Down Expand Up @@ -242,11 +249,18 @@ export async function bulkIngestResume(opts: {
};
}

throw messages.createError(
const err = messages.createError(
'error.failedRecordDetails',
[jobInfo.numberRecordsFailed],
[conn.getUsername(), job.id, conn.getUsername(), job.id]
);

err.setData({
jobId: jobInfo.id,
state: jobInfo.state,
});

throw err;
}

stages.stop();
Expand Down
23 changes: 15 additions & 8 deletions test/commands/data/bulk/results.nut.ts
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@ import path from 'node:path';
import { EOL } from 'node:os';
import { execCmd, TestSession } from '@salesforce/cli-plugins-testkit';
import { expect } from 'chai';
import { ensureString } from '@salesforce/ts-types';
import { AnyJson, ensureString } from '@salesforce/ts-types';
import { validateCsv } from '../../../testUtil.js';
import { DataImportBulkResult } from '../../../../src/commands/data/import/bulk.js';
import { DataBulkResultsResult } from '../../../../src/commands/data/bulk/results.js';
Expand Down Expand Up @@ -61,15 +61,22 @@ describe('data bulk results NUTs', () => {
it('should get success/failure results from a bulk import', async () => {
const csvFile = await generateAccountsCsv(session.project.dir, 5000);

const bulkImportAsync = execCmd<DataImportBulkResult>(
`data import bulk --file ${csvFile} --sobject account --wait 3 --json`,
{ ensureExitCode: 0 }
).jsonOutput?.result as DataImportBulkResult;
type bulkImportErrResponse = AnyJson & {
data: {
jobId: string;
state: string;
};
};

// the CSV will have 5000 wrong rows so the command is expected to fail, we just need the job ID from the response to get results
const bulkImportErr = execCmd(`data import bulk --file ${csvFile} --sobject account --wait 5 --json`, {
ensureExitCode: 1,
}).jsonOutput as unknown as bulkImportErrResponse;

expect(bulkImportAsync.jobId).not.to.be.undefined;
expect(bulkImportAsync.jobId).to.be.length(18);
expect(bulkImportErr.data.jobId).not.to.be.undefined;
expect(bulkImportErr.data.jobId).to.be.length(18);

const results = execCmd<DataBulkResultsResult>(`data bulk results --job-id ${bulkImportAsync.jobId} --json`, {
const results = execCmd<DataBulkResultsResult>(`data bulk results --job-id ${bulkImportErr.data.jobId} --json`, {
ensureExitCode: 0,
}).jsonOutput?.result as DataBulkResultsResult;

Expand Down
Loading