Skip to content

Commit 0cd1d4a

Browse files
authored
feat(node): Add ESM support for postgres.js instrumentation (#17961)
Rewrite the `postgresjs` instrumentation with a new architecture: - Added ESM support via `replaceExports` - Moved to main export wrapping instead of internal module patching - Previously, we were patching `connection.js` and `query.js` internal modules - New approach: We are wrapping the main postgres module export to intercept sql instance creation - Connection context is now stored directly on sql instances using `CONNECTION_CONTEXT_SYMBOL` - `Query.prototype` fallback (CJS only) - Patches `Query.prototype.handle` as a fallback for pre-existing sql instances - Uses `QUERY_FROM_INSTRUMENTED_SQL` marker to prevent duplicate spans Also, - Improved SQL sanitization - `port` attribute is now stored as a number per OTEL semantic conventions - Added fallback regex extraction for operation name when `command` isn't available
1 parent 861ed3f commit 0cd1d4a

File tree

17 files changed

+2271
-210
lines changed

17 files changed

+2271
-210
lines changed

.size-limit.js

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -240,7 +240,7 @@ module.exports = [
240240
import: createImport('init'),
241241
ignore: [...builtinModules, ...nodePrefixedBuiltinModules],
242242
gzip: true,
243-
limit: '161 KB',
243+
limit: '162 KB',
244244
},
245245
{
246246
name: '@sentry/node - without tracing',
Lines changed: 25 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,25 @@
1+
const Sentry = require('@sentry/node');
2+
const { loggingTransport } = require('@sentry-internal/node-integration-tests');
3+
4+
Sentry.init({
5+
dsn: 'https://public@dsn.ingest.sentry.io/1337',
6+
release: '1.0',
7+
tracesSampleRate: 1.0,
8+
transport: loggingTransport,
9+
integrations: [
10+
Sentry.postgresJsIntegration({
11+
requestHook: (span, sanitizedSqlQuery, connectionContext) => {
12+
// Add custom attributes to demonstrate requestHook functionality
13+
span.setAttribute('custom.requestHook', 'called');
14+
15+
// Set context information as extras for test validation
16+
Sentry.setExtra('requestHookCalled', {
17+
sanitizedQuery: sanitizedSqlQuery,
18+
database: connectionContext?.ATTR_DB_NAMESPACE,
19+
host: connectionContext?.ATTR_SERVER_ADDRESS,
20+
port: connectionContext?.ATTR_SERVER_PORT,
21+
});
22+
},
23+
}),
24+
],
25+
});
Lines changed: 25 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,25 @@
1+
import * as Sentry from '@sentry/node';
2+
import { loggingTransport } from '@sentry-internal/node-integration-tests';
3+
4+
Sentry.init({
5+
dsn: 'https://public@dsn.ingest.sentry.io/1337',
6+
release: '1.0',
7+
tracesSampleRate: 1.0,
8+
transport: loggingTransport,
9+
integrations: [
10+
Sentry.postgresJsIntegration({
11+
requestHook: (span, sanitizedSqlQuery, connectionContext) => {
12+
// Add custom attributes to demonstrate requestHook functionality
13+
span.setAttribute('custom.requestHook', 'called');
14+
15+
// Set context information as extras for test validation
16+
Sentry.setExtra('requestHookCalled', {
17+
sanitizedQuery: sanitizedSqlQuery,
18+
database: connectionContext?.ATTR_DB_NAMESPACE,
19+
host: connectionContext?.ATTR_SERVER_ADDRESS,
20+
port: connectionContext?.ATTR_SERVER_PORT,
21+
});
22+
},
23+
}),
24+
],
25+
});
Lines changed: 9 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,9 @@
1+
const Sentry = require('@sentry/node');
2+
const { loggingTransport } = require('@sentry-internal/node-integration-tests');
3+
4+
Sentry.init({
5+
dsn: 'https://public@dsn.ingest.sentry.io/1337',
6+
release: '1.0',
7+
tracesSampleRate: 1.0,
8+
transport: loggingTransport,
9+
});
Lines changed: 9 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,9 @@
1+
import * as Sentry from '@sentry/node';
2+
import { loggingTransport } from '@sentry-internal/node-integration-tests';
3+
4+
Sentry.init({
5+
dsn: 'https://public@dsn.ingest.sentry.io/1337',
6+
release: '1.0',
7+
tracesSampleRate: 1.0,
8+
transport: loggingTransport,
9+
});
Lines changed: 39 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,39 @@
1+
const Sentry = require('@sentry/node');
2+
const postgres = require('postgres');
3+
4+
// Stop the process from exiting before the transaction is sent
5+
setInterval(() => {}, 1000);
6+
7+
const sql = postgres({ port: 5444, user: 'test', password: 'test', database: 'test_db' });
8+
9+
async function run() {
10+
await Sentry.startSpan(
11+
{
12+
name: 'Test Transaction',
13+
op: 'transaction',
14+
},
15+
async () => {
16+
try {
17+
await sql`
18+
CREATE TABLE "User" ("id" SERIAL NOT NULL,"createdAt" TIMESTAMP(3) NOT NULL DEFAULT CURRENT_TIMESTAMP,"email" TEXT NOT NULL,"name" TEXT,CONSTRAINT "User_pkey" PRIMARY KEY ("id"));
19+
`;
20+
21+
await sql`
22+
INSERT INTO "User" ("email", "name") VALUES ('Foo', 'bar@baz.com');
23+
`;
24+
25+
await sql`
26+
SELECT * FROM "User" WHERE "email" = 'bar@baz.com';
27+
`;
28+
29+
await sql`
30+
DROP TABLE "User";
31+
`;
32+
} finally {
33+
await sql.end();
34+
}
35+
},
36+
);
37+
}
38+
39+
run();
Lines changed: 39 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,39 @@
1+
import * as Sentry from '@sentry/node';
2+
import postgres from 'postgres';
3+
4+
// Stop the process from exiting before the transaction is sent
5+
setInterval(() => {}, 1000);
6+
7+
const sql = postgres({ port: 5444, user: 'test', password: 'test', database: 'test_db' });
8+
9+
async function run() {
10+
await Sentry.startSpan(
11+
{
12+
name: 'Test Transaction',
13+
op: 'transaction',
14+
},
15+
async () => {
16+
try {
17+
await sql`
18+
CREATE TABLE "User" ("id" SERIAL NOT NULL,"createdAt" TIMESTAMP(3) NOT NULL DEFAULT CURRENT_TIMESTAMP,"email" TEXT NOT NULL,"name" TEXT,CONSTRAINT "User_pkey" PRIMARY KEY ("id"));
19+
`;
20+
21+
await sql`
22+
INSERT INTO "User" ("email", "name") VALUES ('Foo', 'bar@baz.com');
23+
`;
24+
25+
await sql`
26+
SELECT * FROM "User" WHERE "email" = 'bar@baz.com';
27+
`;
28+
29+
await sql`
30+
DROP TABLE "User";
31+
`;
32+
} finally {
33+
await sql.end();
34+
}
35+
},
36+
);
37+
}
38+
39+
run();
Lines changed: 46 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,46 @@
1+
const { loggingTransport } = require('@sentry-internal/node-integration-tests');
2+
const Sentry = require('@sentry/node');
3+
4+
Sentry.init({
5+
dsn: 'https://public@dsn.ingest.sentry.io/1337',
6+
release: '1.0',
7+
tracesSampleRate: 1.0,
8+
transport: loggingTransport,
9+
});
10+
11+
// Import postgres AFTER Sentry.init() so instrumentation is set up
12+
const postgres = require('postgres');
13+
14+
// Stop the process from exiting before the transaction is sent
15+
setInterval(() => {}, 1000);
16+
17+
// Test with plain object options
18+
const sql = postgres({ port: 5444, user: 'test', password: 'test', database: 'test_db' });
19+
20+
async function run() {
21+
await Sentry.startSpan(
22+
{
23+
name: 'Test Transaction',
24+
op: 'transaction',
25+
},
26+
async () => {
27+
try {
28+
// Test sql.unsafe() - this was not being instrumented before the fix
29+
await sql.unsafe('CREATE TABLE "User" ("id" SERIAL NOT NULL, "email" TEXT NOT NULL, PRIMARY KEY ("id"))');
30+
31+
await sql.unsafe('INSERT INTO "User" ("email") VALUES ($1)', ['test@example.com']);
32+
33+
await sql.unsafe('SELECT * FROM "User" WHERE "email" = $1', ['test@example.com']);
34+
35+
await sql.unsafe('DROP TABLE "User"');
36+
37+
// This will be captured as an error as the table no longer exists
38+
await sql.unsafe('SELECT * FROM "User"');
39+
} finally {
40+
await sql.end();
41+
}
42+
},
43+
);
44+
}
45+
46+
run();
Lines changed: 36 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,36 @@
1+
import * as Sentry from '@sentry/node';
2+
import postgres from 'postgres';
3+
4+
// Stop the process from exiting before the transaction is sent
5+
setInterval(() => {}, 1000);
6+
7+
// Test with plain object options
8+
const sql = postgres({ port: 5444, user: 'test', password: 'test', database: 'test_db' });
9+
10+
async function run() {
11+
await Sentry.startSpan(
12+
{
13+
name: 'Test Transaction',
14+
op: 'transaction',
15+
},
16+
async () => {
17+
try {
18+
// Test sql.unsafe() - this was not being instrumented before the fix
19+
await sql.unsafe('CREATE TABLE "User" ("id" SERIAL NOT NULL, "email" TEXT NOT NULL, PRIMARY KEY ("id"))');
20+
21+
await sql.unsafe('INSERT INTO "User" ("email") VALUES ($1)', ['test@example.com']);
22+
23+
await sql.unsafe('SELECT * FROM "User" WHERE "email" = $1', ['test@example.com']);
24+
25+
await sql.unsafe('DROP TABLE "User"');
26+
27+
// This will be captured as an error as the table no longer exists
28+
await sql.unsafe('SELECT * FROM "User"');
29+
} finally {
30+
await sql.end();
31+
}
32+
},
33+
);
34+
}
35+
36+
run();
Lines changed: 63 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,63 @@
1+
const { loggingTransport } = require('@sentry-internal/node-integration-tests');
2+
const Sentry = require('@sentry/node');
3+
4+
Sentry.init({
5+
dsn: 'https://public@dsn.ingest.sentry.io/1337',
6+
release: '1.0',
7+
tracesSampleRate: 1.0,
8+
transport: loggingTransport,
9+
});
10+
11+
// Import postgres AFTER Sentry.init() so instrumentation is set up
12+
const postgres = require('postgres');
13+
14+
// Stop the process from exiting before the transaction is sent
15+
setInterval(() => {}, 1000);
16+
17+
// Test URL-based initialization - this is the common pattern that was causing the regression
18+
const sql = postgres('postgres://test:test@localhost:5444/test_db');
19+
20+
async function run() {
21+
await Sentry.startSpan(
22+
{
23+
name: 'Test Transaction',
24+
op: 'transaction',
25+
},
26+
async () => {
27+
try {
28+
await sql`
29+
CREATE TABLE "User" ("id" SERIAL NOT NULL,"createdAt" TIMESTAMP(3) NOT NULL DEFAULT CURRENT_TIMESTAMP,"email" TEXT NOT NULL,"name" TEXT,CONSTRAINT "User_pkey" PRIMARY KEY ("id"));
30+
`;
31+
32+
await sql`
33+
INSERT INTO "User" ("email", "name") VALUES ('Foo', 'bar@baz.com');
34+
`;
35+
36+
await sql`
37+
UPDATE "User" SET "name" = 'Foo' WHERE "email" = 'bar@baz.com';
38+
`;
39+
40+
await sql`
41+
SELECT * FROM "User" WHERE "email" = 'bar@baz.com';
42+
`;
43+
44+
await sql`SELECT * from generate_series(1,1000) as x `.cursor(10, async rows => {
45+
await Promise.all(rows);
46+
});
47+
48+
await sql`
49+
DROP TABLE "User";
50+
`;
51+
52+
// This will be captured as an error as the table no longer exists
53+
await sql`
54+
SELECT * FROM "User" WHERE "email" = 'foo@baz.com';
55+
`;
56+
} finally {
57+
await sql.end();
58+
}
59+
},
60+
);
61+
}
62+
63+
run();

0 commit comments

Comments
 (0)