From 41682796a7af0f90cda5c8741a6f45550ce59f0e Mon Sep 17 00:00:00 2001 From: Cristian Dominguez Date: Mon, 18 Aug 2025 17:16:56 -0300 Subject: [PATCH 1/2] chore: run bulk result NUTs --- .github/workflows/test.yml | 1 + package.json | 1 + 2 files changed, 2 insertions(+) diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index 7489f6b1..19d55bac 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -25,6 +25,7 @@ jobs: - 'yarn test:nuts:bulk:export' - 'yarn test:nuts:bulk:import' - 'yarn test:nuts:bulk:update' + - 'yarn test:nuts:bulk:results' - 'yarn test:nuts:data:bulk-upsert-delete' - 'yarn test:nuts:data:create' - 'yarn test:nuts:data:query' diff --git a/package.json b/package.json index 2728d782..23fd9aec 100644 --- a/package.json +++ b/package.json @@ -109,6 +109,7 @@ "test:nuts:bulk:export": "nyc mocha \"./test/commands/data/export/*.nut.ts\" --slow 4500 --timeout 600000 --parallel --jobs 20", "test:nuts:bulk:import": "nyc mocha \"./test/commands/data/import/*.nut.ts\" --slow 4500 --timeout 600000 --parallel --jobs 20", "test:nuts:bulk:update": "nyc mocha \"./test/commands/data/update/*.nut.ts\" --slow 4500 --timeout 600000 --parallel --jobs 20", + "test:nuts:bulk:results": "nyc mocha \"./test/commands/data/bulk/*.nut.ts\" --slow 4500 --timeout 600000 --parallel --jobs 20", "test:nuts:data:bulk-upsert-delete": "nyc mocha \"./test/commands/data/dataBulk.nut.ts\" --slow 4500 --timeout 600000 --parallel --jobs 20", "test:nuts:data:create": "nyc mocha \"./test/commands/data/create/*.nut.ts\" --slow 4500 --timeout 600000 --parallel --jobs 20", "test:nuts:data:query": "nyc mocha \"./test/commands/data/query/*.nut.ts\" --slow 4500 --timeout 600000 --parallel --jobs 20", From ea6b407eb161f7bd5c7ce905a9a899912c59897f Mon Sep 17 00:00:00 2001 From: Cristian Dominguez Date: Mon, 18 Aug 2025 17:19:36 -0300 Subject: [PATCH 2/2] test: fix bulk result NUTs --- src/bulkIngest.ts | 18 ++++++++++++++++-- test/commands/data/bulk/results.nut.ts | 23 +++++++++++++++-------- 2 files changed, 31 insertions(+), 10 deletions(-) diff --git a/src/bulkIngest.ts b/src/bulkIngest.ts index d65b1519..d4c9a3e1 100644 --- a/src/bulkIngest.ts +++ b/src/bulkIngest.ts @@ -135,11 +135,18 @@ export async function bulkIngest(opts: { if (jobInfo.numberRecordsFailed) { stages.error(); - throw messages.createError( + const err = messages.createError( 'error.failedRecordDetails', [jobInfo.numberRecordsFailed], [conn.getUsername(), job.id, conn.getUsername(), job.id] ); + + err.setData({ + jobId: jobInfo.id, + state: jobInfo.state, + }); + + throw err; } stages.stop(); @@ -242,11 +249,18 @@ export async function bulkIngestResume(opts: { }; } - throw messages.createError( + const err = messages.createError( 'error.failedRecordDetails', [jobInfo.numberRecordsFailed], [conn.getUsername(), job.id, conn.getUsername(), job.id] ); + + err.setData({ + jobId: jobInfo.id, + state: jobInfo.state, + }); + + throw err; } stages.stop(); diff --git a/test/commands/data/bulk/results.nut.ts b/test/commands/data/bulk/results.nut.ts index 8abd5273..0c2673cd 100644 --- a/test/commands/data/bulk/results.nut.ts +++ b/test/commands/data/bulk/results.nut.ts @@ -9,7 +9,7 @@ import path from 'node:path'; import { EOL } from 'node:os'; import { execCmd, TestSession } from '@salesforce/cli-plugins-testkit'; import { expect } from 'chai'; -import { ensureString } from '@salesforce/ts-types'; +import { AnyJson, ensureString } from '@salesforce/ts-types'; import { validateCsv } from '../../../testUtil.js'; import { DataImportBulkResult } from '../../../../src/commands/data/import/bulk.js'; import { DataBulkResultsResult } from '../../../../src/commands/data/bulk/results.js'; @@ -61,15 +61,22 @@ describe('data bulk results NUTs', () => { it('should get success/failure results from a bulk import', async () => { const csvFile = await generateAccountsCsv(session.project.dir, 5000); - const bulkImportAsync = execCmd( - `data import bulk --file ${csvFile} --sobject account --wait 3 --json`, - { ensureExitCode: 0 } - ).jsonOutput?.result as DataImportBulkResult; + type bulkImportErrResponse = AnyJson & { + data: { + jobId: string; + state: string; + }; + }; + + // the CSV will have 5000 wrong rows so the command is expected to fail, we just need the job ID from the response to get results + const bulkImportErr = execCmd(`data import bulk --file ${csvFile} --sobject account --wait 5 --json`, { + ensureExitCode: 1, + }).jsonOutput as unknown as bulkImportErrResponse; - expect(bulkImportAsync.jobId).not.to.be.undefined; - expect(bulkImportAsync.jobId).to.be.length(18); + expect(bulkImportErr.data.jobId).not.to.be.undefined; + expect(bulkImportErr.data.jobId).to.be.length(18); - const results = execCmd(`data bulk results --job-id ${bulkImportAsync.jobId} --json`, { + const results = execCmd(`data bulk results --job-id ${bulkImportErr.data.jobId} --json`, { ensureExitCode: 0, }).jsonOutput?.result as DataBulkResultsResult;