): Promise, options: Get
return {
id: provider.providerId,
...provider.providerInfo,
- pdp: decodePDPCapabilities(
+ pdp: await decodePDPCapabilities(
+ provider.providerId,
+ client.chain.id,
capabilitiesListToObject(provider.product.capabilityKeys, provider.productCapabilityValues)
),
}
diff --git a/packages/synapse-core/test/cert.test.ts b/packages/synapse-core/test/cert.test.ts
new file mode 100644
index 000000000..52b982cd9
--- /dev/null
+++ b/packages/synapse-core/test/cert.test.ts
@@ -0,0 +1,138 @@
+import assert from 'assert'
+
+import type { Account, Chain, Client, Hex, Transport } from 'viem'
+import { createWalletClient, http } from 'viem'
+import { generatePrivateKey, privateKeyToAccount } from 'viem/accounts'
+import { calibration } from '../src/chains.ts'
+import { decodeEndorsement, decodeEndorsements, encodeEndorsements, signEndorsement } from '../src/utils/cert.ts'
+
+describe('Endorsement Certificates', () => {
+ let client: Client
+ beforeEach(async () => {
+ client = createWalletClient({
+ account: privateKeyToAccount(generatePrivateKey()),
+ transport: http(),
+ chain: calibration,
+ })
+ })
+
+ it('should decode from the signed encoding the same account that signed', async () => {
+ const providerId = 10n
+ const notAfter = 0xffffffffffffffffn
+ const encoded = await signEndorsement(client, {
+ notAfter,
+ providerId,
+ })
+ assert.equal(encoded.length, 164)
+
+ const { address, endorsement } = await decodeEndorsement(providerId, client.chain.id, encoded)
+ assert.equal(address, client.account.address)
+ assert.equal(endorsement.notAfter, notAfter)
+
+ const [keys, values] = encodeEndorsements({
+ [address ?? '']: endorsement,
+ })
+ assert.equal(keys.length, values.length)
+ assert.equal(keys.length, 1)
+ assert.equal(values.length, 1)
+ assert.equal(values[0], encoded)
+ })
+
+ it('should decode multiple valid endorsements', async () => {
+ const providerId = 15n
+ const notAfter = BigInt(Math.floor(Date.now() / 1000) + 3600) // 1 hour from now
+
+ // Create multiple clients
+ const client2 = createWalletClient({
+ account: privateKeyToAccount(generatePrivateKey()),
+ transport: http(),
+ chain: calibration,
+ })
+ const client3 = createWalletClient({
+ account: privateKeyToAccount(generatePrivateKey()),
+ transport: http(),
+ chain: calibration,
+ })
+
+ // Sign endorsements from different accounts
+ const encoded1 = await signEndorsement(client, { notAfter, providerId })
+ const encoded2 = await signEndorsement(client2, { notAfter, providerId })
+ const encoded3 = await signEndorsement(client3, { notAfter, providerId })
+
+ const capabilities = {
+ endorsement0: encoded1,
+ endorsement1: encoded2,
+ endorsement2: encoded3,
+ }
+
+ const result = await decodeEndorsements(providerId, client.chain.id, capabilities)
+
+ // Should have 3 valid endorsements
+ assert.equal(Object.keys(result).length, 3)
+
+ // Verify all addresses are present and correct
+ assert.ok(result[client.account.address])
+ assert.ok(result[client2.account.address])
+ assert.ok(result[client3.account.address])
+
+ // Verify endorsement data
+ assert.equal(result[client.account.address].notAfter, notAfter)
+ assert.equal(result[client2.account.address].notAfter, notAfter)
+ assert.equal(result[client3.account.address].notAfter, notAfter)
+ })
+
+ it('should handle mixed valid and invalid endorsements', async () => {
+ const providerId = 20n
+ const notAfter = BigInt(Math.floor(Date.now() / 1000) + 3600)
+
+ // Create valid endorsement
+ const validEncoded = await signEndorsement(client, { notAfter, providerId })
+
+ const capabilities: Record = {
+ blabla: '0xdeadbeef',
+ endorsement0: validEncoded,
+ endorsement1: '0x1234' as Hex, // Invalid - too short
+ endorsement2: `0x${'a'.repeat(162)}` as Hex, // Invalid - wrong format
+ endorsement3: `0x${'0'.repeat(162)}` as Hex, // Invalid - all zeros
+ }
+
+ const result = await decodeEndorsements(providerId, client.chain.id, capabilities)
+
+ // Should only have the valid endorsement
+ assert.equal(Object.keys(result).length, 1)
+ assert.ok(result[client.account.address])
+ assert.equal(result[client.account.address].notAfter, notAfter)
+ })
+
+ it('should filter out expired endorsements', async () => {
+ const providerId = 25n
+ const futureTime = BigInt(Math.floor(Date.now() / 1000) + 3600) // 1 hour from now
+ const pastTime = BigInt(Math.floor(Date.now() / 1000) - 3600) // 1 hour ago
+
+ // Create endorsements with different expiry times
+ const validEncoded = await signEndorsement(client, { notAfter: futureTime, providerId })
+ const expiredEncoded = await signEndorsement(client, { notAfter: pastTime, providerId })
+
+ const capabilities = {
+ endorsement0: validEncoded,
+ endorsement1: expiredEncoded,
+ }
+
+ const result = await decodeEndorsements(providerId, client.chain.id, capabilities)
+
+ // Should only have the non-expired endorsement
+ assert.equal(Object.keys(result).length, 1)
+ assert.ok(result[client.account.address])
+ assert.equal(result[client.account.address].notAfter, futureTime)
+ })
+
+ it('should handle empty capabilities', async () => {
+ const providerId = 30n
+ const capabilities = {}
+
+ const result = await decodeEndorsements(providerId, client.chain.id, capabilities)
+
+ // Should return empty object
+ assert.deepEqual(result, {})
+ })
+})
diff --git a/packages/synapse-core/test/mocks/mockServiceWorker.js b/packages/synapse-core/test/mocks/mockServiceWorker.js
new file mode 100644
index 000000000..558540fa5
--- /dev/null
+++ b/packages/synapse-core/test/mocks/mockServiceWorker.js
@@ -0,0 +1,349 @@
+/* eslint-disable */
+/* tslint:disable */
+
+/**
+ * Mock Service Worker.
+ * @see https://github.com/mswjs/msw
+ * - Please do NOT modify this file.
+ */
+
+const PACKAGE_VERSION = '2.12.4'
+const INTEGRITY_CHECKSUM = '4db4a41e972cec1b64cc569c66952d82'
+const IS_MOCKED_RESPONSE = Symbol('isMockedResponse')
+const activeClientIds = new Set()
+
+addEventListener('install', function () {
+ self.skipWaiting()
+})
+
+addEventListener('activate', function (event) {
+ event.waitUntil(self.clients.claim())
+})
+
+addEventListener('message', async function (event) {
+ const clientId = Reflect.get(event.source || {}, 'id')
+
+ if (!clientId || !self.clients) {
+ return
+ }
+
+ const client = await self.clients.get(clientId)
+
+ if (!client) {
+ return
+ }
+
+ const allClients = await self.clients.matchAll({
+ type: 'window',
+ })
+
+ switch (event.data) {
+ case 'KEEPALIVE_REQUEST': {
+ sendToClient(client, {
+ type: 'KEEPALIVE_RESPONSE',
+ })
+ break
+ }
+
+ case 'INTEGRITY_CHECK_REQUEST': {
+ sendToClient(client, {
+ type: 'INTEGRITY_CHECK_RESPONSE',
+ payload: {
+ packageVersion: PACKAGE_VERSION,
+ checksum: INTEGRITY_CHECKSUM,
+ },
+ })
+ break
+ }
+
+ case 'MOCK_ACTIVATE': {
+ activeClientIds.add(clientId)
+
+ sendToClient(client, {
+ type: 'MOCKING_ENABLED',
+ payload: {
+ client: {
+ id: client.id,
+ frameType: client.frameType,
+ },
+ },
+ })
+ break
+ }
+
+ case 'CLIENT_CLOSED': {
+ activeClientIds.delete(clientId)
+
+ const remainingClients = allClients.filter((client) => {
+ return client.id !== clientId
+ })
+
+ // Unregister itself when there are no more clients
+ if (remainingClients.length === 0) {
+ self.registration.unregister()
+ }
+
+ break
+ }
+ }
+})
+
+addEventListener('fetch', function (event) {
+ const requestInterceptedAt = Date.now()
+
+ // Bypass navigation requests.
+ if (event.request.mode === 'navigate') {
+ return
+ }
+
+ // Opening the DevTools triggers the "only-if-cached" request
+ // that cannot be handled by the worker. Bypass such requests.
+ if (
+ event.request.cache === 'only-if-cached' &&
+ event.request.mode !== 'same-origin'
+ ) {
+ return
+ }
+
+ // Bypass all requests when there are no active clients.
+ // Prevents the self-unregistered worked from handling requests
+ // after it's been terminated (still remains active until the next reload).
+ if (activeClientIds.size === 0) {
+ return
+ }
+
+ const requestId = crypto.randomUUID()
+ event.respondWith(handleRequest(event, requestId, requestInterceptedAt))
+})
+
+/**
+ * @param {FetchEvent} event
+ * @param {string} requestId
+ * @param {number} requestInterceptedAt
+ */
+async function handleRequest(event, requestId, requestInterceptedAt) {
+ const client = await resolveMainClient(event)
+ const requestCloneForEvents = event.request.clone()
+ const response = await getResponse(
+ event,
+ client,
+ requestId,
+ requestInterceptedAt,
+ )
+
+ // Send back the response clone for the "response:*" life-cycle events.
+ // Ensure MSW is active and ready to handle the message, otherwise
+ // this message will pend indefinitely.
+ if (client && activeClientIds.has(client.id)) {
+ const serializedRequest = await serializeRequest(requestCloneForEvents)
+
+ // Clone the response so both the client and the library could consume it.
+ const responseClone = response.clone()
+
+ sendToClient(
+ client,
+ {
+ type: 'RESPONSE',
+ payload: {
+ isMockedResponse: IS_MOCKED_RESPONSE in response,
+ request: {
+ id: requestId,
+ ...serializedRequest,
+ },
+ response: {
+ type: responseClone.type,
+ status: responseClone.status,
+ statusText: responseClone.statusText,
+ headers: Object.fromEntries(responseClone.headers.entries()),
+ body: responseClone.body,
+ },
+ },
+ },
+ responseClone.body ? [serializedRequest.body, responseClone.body] : [],
+ )
+ }
+
+ return response
+}
+
+/**
+ * Resolve the main client for the given event.
+ * Client that issues a request doesn't necessarily equal the client
+ * that registered the worker. It's with the latter the worker should
+ * communicate with during the response resolving phase.
+ * @param {FetchEvent} event
+ * @returns {Promise}
+ */
+async function resolveMainClient(event) {
+ const client = await self.clients.get(event.clientId)
+
+ if (activeClientIds.has(event.clientId)) {
+ return client
+ }
+
+ if (client?.frameType === 'top-level') {
+ return client
+ }
+
+ const allClients = await self.clients.matchAll({
+ type: 'window',
+ })
+
+ return allClients
+ .filter((client) => {
+ // Get only those clients that are currently visible.
+ return client.visibilityState === 'visible'
+ })
+ .find((client) => {
+ // Find the client ID that's recorded in the
+ // set of clients that have registered the worker.
+ return activeClientIds.has(client.id)
+ })
+}
+
+/**
+ * @param {FetchEvent} event
+ * @param {Client | undefined} client
+ * @param {string} requestId
+ * @param {number} requestInterceptedAt
+ * @returns {Promise}
+ */
+async function getResponse(event, client, requestId, requestInterceptedAt) {
+ // Clone the request because it might've been already used
+ // (i.e. its body has been read and sent to the client).
+ const requestClone = event.request.clone()
+
+ function passthrough() {
+ // Cast the request headers to a new Headers instance
+ // so the headers can be manipulated with.
+ const headers = new Headers(requestClone.headers)
+
+ // Remove the "accept" header value that marked this request as passthrough.
+ // This prevents request alteration and also keeps it compliant with the
+ // user-defined CORS policies.
+ const acceptHeader = headers.get('accept')
+ if (acceptHeader) {
+ const values = acceptHeader.split(',').map((value) => value.trim())
+ const filteredValues = values.filter(
+ (value) => value !== 'msw/passthrough',
+ )
+
+ if (filteredValues.length > 0) {
+ headers.set('accept', filteredValues.join(', '))
+ } else {
+ headers.delete('accept')
+ }
+ }
+
+ return fetch(requestClone, { headers })
+ }
+
+ // Bypass mocking when the client is not active.
+ if (!client) {
+ return passthrough()
+ }
+
+ // Bypass initial page load requests (i.e. static assets).
+ // The absence of the immediate/parent client in the map of the active clients
+ // means that MSW hasn't dispatched the "MOCK_ACTIVATE" event yet
+ // and is not ready to handle requests.
+ if (!activeClientIds.has(client.id)) {
+ return passthrough()
+ }
+
+ // Notify the client that a request has been intercepted.
+ const serializedRequest = await serializeRequest(event.request)
+ const clientMessage = await sendToClient(
+ client,
+ {
+ type: 'REQUEST',
+ payload: {
+ id: requestId,
+ interceptedAt: requestInterceptedAt,
+ ...serializedRequest,
+ },
+ },
+ [serializedRequest.body],
+ )
+
+ switch (clientMessage.type) {
+ case 'MOCK_RESPONSE': {
+ return respondWithMock(clientMessage.data)
+ }
+
+ case 'PASSTHROUGH': {
+ return passthrough()
+ }
+ }
+
+ return passthrough()
+}
+
+/**
+ * @param {Client} client
+ * @param {any} message
+ * @param {Array} transferrables
+ * @returns {Promise}
+ */
+function sendToClient(client, message, transferrables = []) {
+ return new Promise((resolve, reject) => {
+ const channel = new MessageChannel()
+
+ channel.port1.onmessage = (event) => {
+ if (event.data && event.data.error) {
+ return reject(event.data.error)
+ }
+
+ resolve(event.data)
+ }
+
+ client.postMessage(message, [
+ channel.port2,
+ ...transferrables.filter(Boolean),
+ ])
+ })
+}
+
+/**
+ * @param {Response} response
+ * @returns {Response}
+ */
+function respondWithMock(response) {
+ // Setting response status code to 0 is a no-op.
+ // However, when responding with a "Response.error()", the produced Response
+ // instance will have status code set to 0. Since it's not possible to create
+ // a Response instance with status code 0, handle that use-case separately.
+ if (response.status === 0) {
+ return Response.error()
+ }
+
+ const mockedResponse = new Response(response.body, response)
+
+ Reflect.defineProperty(mockedResponse, IS_MOCKED_RESPONSE, {
+ value: true,
+ enumerable: true,
+ })
+
+ return mockedResponse
+}
+
+/**
+ * @param {Request} request
+ */
+async function serializeRequest(request) {
+ return {
+ url: request.url,
+ mode: request.mode,
+ method: request.method,
+ headers: Object.fromEntries(request.headers.entries()),
+ cache: request.cache,
+ credentials: request.credentials,
+ destination: request.destination,
+ integrity: request.integrity,
+ redirect: request.redirect,
+ referrer: request.referrer,
+ referrerPolicy: request.referrerPolicy,
+ body: await request.arrayBuffer(),
+ keepalive: request.keepalive,
+ }
+}
diff --git a/packages/synapse-core/tools/endorse-sp.js b/packages/synapse-core/tools/endorse-sp.js
new file mode 100644
index 000000000..200728ee9
--- /dev/null
+++ b/packages/synapse-core/tools/endorse-sp.js
@@ -0,0 +1,122 @@
+import EthModule from '@ledgerhq/hw-app-eth'
+import TransportNodeHidModule from '@ledgerhq/hw-transport-node-hid'
+import { createWalletClient, http } from 'viem'
+import { privateKeyToAccount, toAccount } from 'viem/accounts'
+
+const TransportNodeHid = TransportNodeHidModule.default || TransportNodeHidModule
+const Eth = EthModule.default || EthModule
+
+import { getChain } from '../src/chains.ts'
+import { signEndorsement } from '../src/utils/cert.ts'
+
+function printUsageAndExit() {
+ console.error('Usage: PRIVATE_KEY=0x... node tools/endorse-sp.js providerId...')
+ console.error(' or: USE_LEDGER=true node tools/endorse-sp.js providerId...')
+ process.exit(1)
+}
+
+const PRIVATE_KEY = process.env.PRIVATE_KEY
+const USE_LEDGER = process.env.USE_LEDGER === 'true'
+const LEDGER_PATH = process.env.LEDGER_PATH || "m/44'/60'/0'/0/0"
+const ETH_RPC_URL = process.env.ETH_RPC_URL || 'https://api.calibration.node.glif.io/rpc/v1'
+const EXPIRY = process.env.EXPIRY || BigInt(Math.floor(Date.now() / 1000)) + 10368000n
+
+if (!PRIVATE_KEY && !USE_LEDGER) {
+ console.error('ERROR: Either PRIVATE_KEY or USE_LEDGER=true is required')
+ printUsageAndExit()
+}
+
+let CHAIN_ID = process.env.CHAIN_ID
+
+// TODO also support providerAddress and serviceURL
+const providerIds = process.argv.slice(2)
+if (providerIds.length === 0) {
+ console.error('ERROR: must specify at least one providerId')
+ printUsageAndExit()
+}
+
+async function createLedgerAccount() {
+ const transport = await TransportNodeHid.open('')
+ const eth = new Eth(transport)
+
+ const { address } = await eth.getAddress(LEDGER_PATH)
+
+ const account = toAccount({
+ address,
+ async signMessage({ message }) {
+ const messageHex = typeof message === 'string' ? Buffer.from(message).toString('hex') : message.slice(2)
+ const result = await eth.signPersonalMessage(LEDGER_PATH, messageHex)
+ return `0x${result.r}${result.s}${(result.v - 27).toString(16).padStart(2, '0')}`
+ },
+ async signTransaction(_transaction) {
+ throw new Error('signTransaction not needed for this script')
+ },
+ async signTypedData(typedData) {
+ const result = await eth.signEIP712Message(LEDGER_PATH, typedData)
+ return `0x${result.r}${result.s}${(result.v - 27).toString(16).padStart(2, '0')}`
+ },
+ })
+
+ return { account, close: () => transport.close() }
+}
+
+async function main() {
+ if (CHAIN_ID == null) {
+ console.log('fetching eth_chainId from', ETH_RPC_URL)
+ const response = await fetch(ETH_RPC_URL, {
+ method: 'POST',
+ headers: {
+ 'Content-Type': 'application/json',
+ },
+ body: JSON.stringify({
+ id: 1,
+ method: 'eth_chainId',
+ params: [],
+ }),
+ })
+ const result = await response.json()
+ CHAIN_ID = result.result
+ }
+ console.log('ChainId:', Number(CHAIN_ID))
+
+ let account
+ let closeLedgerTransport = null
+ if (USE_LEDGER) {
+ console.log('🔐 Using Ledger hardware wallet')
+ console.log('📍 Path:', LEDGER_PATH, '(Ethereum standard)')
+ console.log('⚠️ Connect Ledger, unlock, and open the Ethereum app')
+ console.log('⚠️ Enable "Blind signing" in Ethereum app settings')
+ const ledgerResult = await createLedgerAccount()
+ account = ledgerResult.account
+ closeLedgerTransport = ledgerResult.close
+ console.log('✅ Connected, address:', account.address)
+ } else {
+ account = privateKeyToAccount(PRIVATE_KEY)
+ }
+
+ try {
+ const client = createWalletClient({
+ account,
+ transport: http(ETH_RPC_URL),
+ chain: getChain(Number(CHAIN_ID)),
+ })
+
+ console.log('Expiry:', new Date(Number(EXPIRY) * 1000).toLocaleString())
+
+ for (const providerId of providerIds) {
+ if (USE_LEDGER) console.log('\n⏳ Confirm on Ledger for provider:', providerId)
+ const encoded = await signEndorsement(client, {
+ providerId: BigInt(providerId),
+ notAfter: EXPIRY,
+ })
+ console.log('Provider:', providerId)
+ console.log('Endorsement:', encoded)
+ }
+ } finally {
+ if (closeLedgerTransport != null) {
+ await closeLedgerTransport()
+ }
+ }
+}
+
+main().catch(console.error)
diff --git a/packages/synapse-core/tsconfig.json b/packages/synapse-core/tsconfig.json
index cbefaf82e..9c0e737d9 100644
--- a/packages/synapse-core/tsconfig.json
+++ b/packages/synapse-core/tsconfig.json
@@ -14,6 +14,7 @@
"src/erc20.ts",
"src/usdfc.ts",
"src/abis/index.ts",
+ "src/auction/index.ts",
"src/pay/index.ts",
"src/warm-storage/index.ts",
"src/typed-data/index.ts",
diff --git a/packages/synapse-react/CHANGELOG.md b/packages/synapse-react/CHANGELOG.md
index a769f228f..bb3bb6716 100644
--- a/packages/synapse-react/CHANGELOG.md
+++ b/packages/synapse-react/CHANGELOG.md
@@ -1,5 +1,14 @@
# Changelog
+## [0.1.4](https://github.com/FilOzone/synapse-sdk/compare/synapse-react-v0.1.3...synapse-react-v0.1.4) (2025-12-04)
+
+
+### Chores
+
+* **deps-dev:** bump @biomejs/biome from 2.3.5 to 2.3.6 ([#448](https://github.com/FilOzone/synapse-sdk/issues/448)) ([ebcab4e](https://github.com/FilOzone/synapse-sdk/commit/ebcab4ea166aa69c35d988ff2356b3f5972af351))
+* **deps-dev:** bump @biomejs/biome from 2.3.6 to 2.3.7 ([#459](https://github.com/FilOzone/synapse-sdk/issues/459)) ([d3c65a8](https://github.com/FilOzone/synapse-sdk/commit/d3c65a806e4819bbc560f5a7087f79eec31417a5))
+* **deps-dev:** bump @biomejs/biome from 2.3.7 to 2.3.8 ([#476](https://github.com/FilOzone/synapse-sdk/issues/476)) ([d95f812](https://github.com/FilOzone/synapse-sdk/commit/d95f812d7752a9b1dcb46219a4857eb99b54ebf0))
+
## [0.1.3](https://github.com/FilOzone/synapse-sdk/compare/synapse-react-v0.1.2...synapse-react-v0.1.3) (2025-11-17)
diff --git a/packages/synapse-react/package.json b/packages/synapse-react/package.json
index 71b70db9c..90217a523 100644
--- a/packages/synapse-react/package.json
+++ b/packages/synapse-react/package.json
@@ -1,6 +1,6 @@
{
"name": "@filoz/synapse-react",
- "version": "0.1.3",
+ "version": "0.1.4",
"description": "React hooks for interacting with Filecoin Onchain Cloud smart contracts",
"repository": {
"type": "git",
@@ -29,6 +29,17 @@
".": {
"types": "./dist/src/index.d.ts",
"default": "./dist/src/index.js"
+ },
+ "./filsnap": {
+ "types": "./dist/src/filsnap.d.ts",
+ "default": "./dist/src/filsnap.js"
+ }
+ },
+ "typesVersions": {
+ "*": {
+ "filsnap": [
+ "./dist/src/filsnap"
+ ]
}
},
"files": [
@@ -73,10 +84,10 @@
"@hugomrdias/filsnap-adapter": "^3.3.8"
},
"devDependencies": {
- "@biomejs/biome": "2.3.7",
- "@types/node": "^24.9.1",
+ "@biomejs/biome": "catalog:",
+ "@types/node": "catalog:",
"type-fest": "^5.1.0",
- "typescript": "5.9.3"
+ "typescript": "catalog:"
},
"publishConfig": {
"access": "public"
@@ -88,7 +99,8 @@
},
"peerDependencies": {
"@tanstack/react-query": "5.x",
+ "@wagmi/core": "3.x",
"viem": "2.x",
- "wagmi": "2.x"
+ "wagmi": "3.x"
}
}
diff --git a/packages/synapse-react/src/filsnap.ts b/packages/synapse-react/src/filsnap.ts
index 0a48f4074..833c4e66b 100644
--- a/packages/synapse-react/src/filsnap.ts
+++ b/packages/synapse-react/src/filsnap.ts
@@ -7,6 +7,10 @@ const SNAP_ID = 'npm:filsnap' //'local:http://localhost:8080'
export const useFilsnap = ({ version, force }: { version?: string; force?: boolean } = {}) => {
useAccountEffect({
onConnect: async (data) => {
+ if (!data.connector || !('getProvider' in data.connector)) {
+ return
+ }
+
const provider = (await data.connector.getProvider()) as EIP1193Provider
if (provider.isMetaMask) {
await getOrInstallSnap(provider, SNAP_ID, version ?? '*', force)
diff --git a/packages/synapse-react/src/index.ts b/packages/synapse-react/src/index.ts
index bd69ca8f5..d1116683a 100644
--- a/packages/synapse-react/src/index.ts
+++ b/packages/synapse-react/src/index.ts
@@ -11,7 +11,6 @@
export * from './calibration.ts'
export * from './erc20.ts'
-export * from './filsnap.ts'
export * from './payments/index.ts'
export * from './usdfc.ts'
export * from './warm-storage/index.ts'
diff --git a/packages/synapse-react/tsconfig.json b/packages/synapse-react/tsconfig.json
index 572e92242..e688e6fbe 100644
--- a/packages/synapse-react/tsconfig.json
+++ b/packages/synapse-react/tsconfig.json
@@ -7,7 +7,7 @@
"exclude": ["node_modules", "dist"],
"references": [
{
- "path": "../synapse-core"
+ "path": "../synapse-core/tsconfig.json"
}
],
"typedocOptions": {
diff --git a/packages/synapse-sdk/CHANGELOG.md b/packages/synapse-sdk/CHANGELOG.md
index 844fb5879..a59e150b7 100644
--- a/packages/synapse-sdk/CHANGELOG.md
+++ b/packages/synapse-sdk/CHANGELOG.md
@@ -1,8 +1,19 @@
-## [0.28.0](https://github.com/FilOzone/synapse-sdk/compare/v0.27.0...v0.28.0) (2025-09-19)
+# Changelog
-### Features
+## [0.36.1](https://github.com/FilOzone/synapse-sdk/compare/synapse-sdk-v0.36.0...synapse-sdk-v0.36.1) (2025-12-04)
+
+
+### Bug Fixes
+
+* add getScheduledRemovals method ([#464](https://github.com/FilOzone/synapse-sdk/issues/464)) ([05e6b92](https://github.com/FilOzone/synapse-sdk/commit/05e6b92bb62ff4a4da48b4fa35398a66da23b049))
+
+
+### Chores
-* add terminateDataSet functionality ([#230](https://github.com/FilOzone/synapse-sdk/issues/230)) ([ffaacac](https://github.com/FilOzone/synapse-sdk/commit/ffaacac507b4882abfc33d3de72fe9fa98843cd2))
+* **deps-dev:** bump @biomejs/biome from 2.3.5 to 2.3.6 ([#448](https://github.com/FilOzone/synapse-sdk/issues/448)) ([ebcab4e](https://github.com/FilOzone/synapse-sdk/commit/ebcab4ea166aa69c35d988ff2356b3f5972af351))
+* **deps-dev:** bump @biomejs/biome from 2.3.6 to 2.3.7 ([#459](https://github.com/FilOzone/synapse-sdk/issues/459)) ([d3c65a8](https://github.com/FilOzone/synapse-sdk/commit/d3c65a806e4819bbc560f5a7087f79eec31417a5))
+* **deps-dev:** bump @biomejs/biome from 2.3.7 to 2.3.8 ([#476](https://github.com/FilOzone/synapse-sdk/issues/476)) ([d95f812](https://github.com/FilOzone/synapse-sdk/commit/d95f812d7752a9b1dcb46219a4857eb99b54ebf0))
+* export StorageManagerUploadOptions ([#462](https://github.com/FilOzone/synapse-sdk/issues/462)) ([c5897f2](https://github.com/FilOzone/synapse-sdk/commit/c5897f21ac11b41f03e1552a09a8d34d6c42666b))
## [0.36.0](https://github.com/FilOzone/synapse-sdk/compare/synapse-sdk-v0.35.3...synapse-sdk-v0.36.0) (2025-11-17)
diff --git a/packages/synapse-sdk/package.json b/packages/synapse-sdk/package.json
index 27b3cb35f..e6ffa6c01 100644
--- a/packages/synapse-sdk/package.json
+++ b/packages/synapse-sdk/package.json
@@ -1,6 +1,6 @@
{
"name": "@filoz/synapse-sdk",
- "version": "0.36.0",
+ "version": "0.36.1",
"description": "JavaScript SDK for Filecoin Onchain Cloud",
"repository": {
"type": "git",
@@ -27,25 +27,72 @@
"import": "./dist/src/index.js",
"types": "./dist/src/index.d.ts"
},
+ "./payments": {
+ "import": "./dist/src/payments/index.js",
+ "types": "./dist/src/payments/index.d.ts"
+ },
"./pdp": {
"import": "./dist/src/pdp/index.js",
"types": "./dist/src/pdp/index.d.ts"
},
- "./payments": {
- "import": "./dist/src/payments/index.js",
- "types": "./dist/src/payments/index.d.ts"
+ "./session": {
+ "import": "./dist/src/session/index.js",
+ "types": "./dist/src/session/index.d.ts"
},
- "./warm-storage": {
- "import": "./dist/src/warm-storage/index.js",
- "types": "./dist/src/warm-storage/index.d.ts"
+ "./storage": {
+ "import": "./dist/src/storage/index.js",
+ "types": "./dist/src/storage/index.d.ts"
},
"./subgraph": {
"import": "./dist/src/subgraph/index.js",
"types": "./dist/src/subgraph/index.d.ts"
},
+ "./telemetry": {
+ "import": "./dist/src/telemetry/index.js",
+ "types": "./dist/src/telemetry/index.d.ts"
+ },
+ "./warm-storage": {
+ "import": "./dist/src/warm-storage/index.js",
+ "types": "./dist/src/warm-storage/index.d.ts"
+ },
"./sp-registry": {
"import": "./dist/src/sp-registry/index.js",
"types": "./dist/src/sp-registry/index.d.ts"
+ },
+ "./filbeam": {
+ "import": "./dist/src/filbeam/index.js",
+ "types": "./dist/src/filbeam/index.d.ts"
+ }
+ },
+ "typesVersions": {
+ "*": {
+ "payments": [
+ "./dist/src/payments"
+ ],
+ "pdp": [
+ "./dist/src/pdp"
+ ],
+ "session": [
+ "./dist/src/session"
+ ],
+ "storage": [
+ "./dist/src/storage"
+ ],
+ "subgraph": [
+ "./dist/src/subgraph"
+ ],
+ "telemetry": [
+ "./dist/src/telemetry"
+ ],
+ "warm-storage": [
+ "./dist/src/warm-storage"
+ ],
+ "sp-registry": [
+ "./dist/src/sp-registry"
+ ],
+ "filbeam": [
+ "./dist/src/filbeam"
+ ]
}
},
"scripts": {
@@ -56,7 +103,8 @@
"test:node": "wireit",
"test:browser": "wireit",
"clean": "rm -rf dist",
- "prepublishOnly": "pnpm run clean && pnpm run build"
+ "prepublishOnly": "pnpm run clean && pnpm run build",
+ "update:msw": "pnpx msw init src/test/mocks/ --save"
},
"wireit": {
"build": {
@@ -118,30 +166,30 @@
"dependencies": {
"@filoz/synapse-core": "workspace:^",
"@web3-storage/data-segment": "^5.3.0",
- "ethers": "^6.15.0",
"multiformats": "^13.4.1",
- "ox": "^0.9.12",
- "viem": "^2.38.3"
+ "ox": "catalog:",
+ "viem": "catalog:"
},
"optionalDependencies": {
"@sentry/browser": "^10.21.0",
"@sentry/node": "^10.21.0"
},
"devDependencies": {
- "@biomejs/biome": "2.3.7",
+ "@biomejs/biome": "catalog:",
"@types/chai": "^5.2.3",
- "@types/mocha": "^10.0.10",
- "@types/node": "^24.9.1",
+ "@types/mocha": "catalog:",
+ "@types/node": "catalog:",
"@wagmi/cli": "^2.7.0",
- "abitype": "^1.1.1",
+ "abitype": "catalog:",
"chai": "^6.2.0",
- "iso-web": "^1.4.3",
- "mocha": "^11.7.4",
- "msw": "~2.10.5",
+ "ethers": "catalog:",
+ "iso-web": "^2.1.0",
+ "mocha": "catalog:",
+ "msw": "catalog:",
"p-defer": "^4.0.1",
"playwright-test": "^14.1.12",
"type-fest": "^5.1.0",
- "typescript": "5.9.3"
+ "typescript": "catalog:"
},
"publishConfig": {
"access": "public"
@@ -153,5 +201,8 @@
},
"browser": {
"@sentry/node": "@sentry/browser"
+ },
+ "peerDependencies": {
+ "ethers": "6.x"
}
}
diff --git a/packages/synapse-sdk/src/filbeam/index.ts b/packages/synapse-sdk/src/filbeam/index.ts
new file mode 100644
index 000000000..f4794d54a
--- /dev/null
+++ b/packages/synapse-sdk/src/filbeam/index.ts
@@ -0,0 +1,75 @@
+/**
+ * FilBeam Service
+ *
+ * Client for the FilBeam stats API.
+ *
+ * ## Overview
+ *
+ * FilBeam enables retrieval incentives for Filecoin PDP (Proof of Data Possession)
+ * service providers by acting as a trusted intermediary that measures traffic
+ * between clients and storage providers.
+ *
+ * ## Architecture
+ *
+ * FilBeam operates as a caching layer between clients and storage providers,
+ * enabling efficient retrieval of content-addressable data stored on Filecoin PDP.
+ *
+ * ```
+ * Client <--> FilBeam (cache + metering) <--> Storage Provider
+ * ```
+ *
+ * ## Billing Model
+ *
+ * Both cache hits and cache misses generate billable egress events. This transforms
+ * Filecoin from passive archival storage into an active "serve many" data delivery
+ * infrastructure, where service providers are compensated for serving retrievals.
+ *
+ * @module FilBeam
+ *
+ * @example Basic Usage
+ * ```typescript
+ * import { FilBeamService } from '@filoz/synapse-sdk/filbeam'
+ *
+ * // Create service for mainnet
+ * const service = new FilBeamService('mainnet')
+ *
+ * // Get remaining data set statistics
+ * const stats = await service.getDataSetStats('dataset-id')
+ * console.log('Remaining CDN Egress:', stats.cdnEgressQuota)
+ * console.log('Remaining Cache Miss:', stats.cacheMissEgressQuota)
+ * ```
+ *
+ * @example Integration with Synapse SDK
+ * ```typescript
+ * import { Synapse } from '@filoz/synapse-sdk'
+ *
+ * // Initialize Synapse
+ * const synapse = await Synapse.create({
+ * privateKey: process.env.PRIVATE_KEY,
+ * rpcURL: 'https://api.node.glif.io/rpc/v1'
+ * })
+ *
+ * // Access FilBeam service through Synapse
+ * const stats = await synapse.filbeam.getDataSetStats('my-dataset')
+ *
+ * // Monitor remaining quotas over time
+ * setInterval(async () => {
+ * const currentStats = await synapse.filbeam.getDataSetStats('my-dataset')
+ * console.log('Remaining quotas:', currentStats)
+ *
+ * // Alert if running low
+ * const TiB = BigInt(1024 ** 4)
+ * const remainingTiB = Number((currentStats.cdnEgressQuota + currentStats.cacheMissEgressQuota) / TiB)
+ * if (remainingTiB < 1) {
+ * console.warn('Low quota warning: Less than 1 TiB remaining')
+ * }
+ * }, 60000) // Check every minute
+ * ```
+ *
+ * @see {@link https://docs.filbeam.com | FilBeam Documentation} - Official FilBeam documentation
+ * @see {@link https://meridian.space/blog/introducing-pay-per-byte-a-new-era-for-filecoin-retrieval | Pay Per Byte Blog Post} - Introduction to the pay-per-byte pricing model
+ * @see {@link DataSetStats} for the structure of returned statistics
+ * @see {@link FilBeamService} for the main service class
+ */
+
+export { type DataSetStats, FilBeamService } from './service.ts'
diff --git a/packages/synapse-sdk/src/filbeam/service.ts b/packages/synapse-sdk/src/filbeam/service.ts
new file mode 100644
index 000000000..d40aa81cd
--- /dev/null
+++ b/packages/synapse-sdk/src/filbeam/service.ts
@@ -0,0 +1,168 @@
+/**
+ * @module FilBeamService
+ * @description FilBeam service integration for Filecoin's pay-per-byte infrastructure.
+ *
+ * This module provides integration with FilBeam's services, including querying egress quotas
+ * and managing pay-per-byte data delivery metrics.
+ *
+ * @see {@link https://docs.filbeam.com | FilBeam Documentation} - Official FilBeam documentation
+ */
+
+import type { FilecoinNetworkType } from '../types.ts'
+import { createError } from '../utils/errors.ts'
+
+/**
+ * Data set statistics from FilBeam.
+ *
+ * These quotas represent the remaining pay-per-byte allocation available for data retrieval
+ * through FilBeam's trusted measurement layer. The values decrease as data is served and
+ * represent how many bytes can still be retrieved before needing to add more credits.
+ *
+ * @interface DataSetStats
+ * @property {bigint} cdnEgressQuota - The remaining CDN egress quota for cache hits (data served directly from FilBeam's cache) in bytes
+ * @property {bigint} cacheMissEgressQuota - The remaining egress quota for cache misses (data retrieved from storage providers) in bytes
+ */
+export interface DataSetStats {
+ cdnEgressQuota: bigint
+ cacheMissEgressQuota: bigint
+}
+
+/**
+ * Service for interacting with FilBeam infrastructure and APIs.
+ *
+ * @example
+ * ```typescript
+ * // Create service with network detection
+ * const synapse = await Synapse.create({ privateKey, rpcURL })
+ * const stats = await synapse.filbeam.getDataSetStats(12345)
+ *
+ * // Monitor remaining pay-per-byte quotas
+ * const service = new FilBeamService('mainnet')
+ * const stats = await service.getDataSetStats(12345)
+ * console.log('Remaining CDN Egress (cache hits):', stats.cdnEgressQuota)
+ * console.log('Remaining Cache Miss Egress:', stats.cacheMissEgressQuota)
+ * ```
+ *
+ * @remarks
+ * All quota values are returned as BigInt for precision when handling large byte values.
+ *
+ * @see {@link https://docs.filbeam.com | FilBeam Documentation} for detailed API specifications and usage guides
+ */
+export class FilBeamService {
+ private readonly _network: FilecoinNetworkType
+ private readonly _fetch: typeof fetch
+
+ constructor(network: FilecoinNetworkType, fetchImpl: typeof fetch = globalThis.fetch) {
+ this._validateNetworkType(network)
+ this._network = network
+ this._fetch = fetchImpl
+ }
+
+ private _validateNetworkType(network: FilecoinNetworkType) {
+ if (network === 'mainnet' || network === 'calibration') return
+
+ throw createError(
+ 'FilBeamService',
+ 'validateNetworkType',
+ 'Unsupported network type: Only Filecoin mainnet and calibration networks are supported.'
+ )
+ }
+
+ /**
+ * Get the base stats URL for the current network
+ */
+ private _getStatsBaseUrl(): string {
+ return this._network === 'mainnet' ? 'https://stats.filbeam.io' : 'https://calibration.stats.filbeam.io'
+ }
+
+ /**
+ * Validates the response from FilBeam stats API
+ */
+ private _validateStatsResponse(data: unknown): { cdnEgressQuota: string; cacheMissEgressQuota: string } {
+ if (typeof data !== 'object' || data === null) {
+ throw createError('FilBeamService', 'validateStatsResponse', 'Response is not an object')
+ }
+
+ const response = data as Record
+
+ if (typeof response.cdnEgressQuota !== 'string') {
+ throw createError('FilBeamService', 'validateStatsResponse', 'cdnEgressQuota must be a string')
+ }
+
+ if (typeof response.cacheMissEgressQuota !== 'string') {
+ throw createError('FilBeamService', 'validateStatsResponse', 'cacheMissEgressQuota must be a string')
+ }
+
+ return {
+ cdnEgressQuota: response.cdnEgressQuota,
+ cacheMissEgressQuota: response.cacheMissEgressQuota,
+ }
+ }
+
+ /**
+ * Retrieves remaining pay-per-byte statistics for a specific data set from FilBeam.
+ *
+ * Fetches the remaining CDN and cache miss egress quotas for a data set. These quotas
+ * track how many bytes can still be retrieved through FilBeam's trusted measurement layer
+ * before needing to add more credits:
+ *
+ * - **CDN Egress Quota**: Remaining bytes that can be served from FilBeam's cache (fast, direct delivery)
+ * - **Cache Miss Egress Quota**: Remaining bytes that can be retrieved from storage providers (triggers caching)
+ *
+ * Both types of egress are billed based on volume. Query current pricing via
+ * {@link WarmStorageService.getServicePrice} or see https://docs.filbeam.com for rates.
+ *
+ * @param dataSetId - The unique identifier of the data set to query
+ * @returns A promise that resolves to the data set statistics with remaining quotas as BigInt values
+ *
+ * @throws {Error} Throws an error if:
+ * - The data set is not found (404)
+ * - The API returns an invalid response format
+ * - Network or other HTTP errors occur
+ *
+ * @example
+ * ```typescript
+ * try {
+ * const stats = await service.getDataSetStats('my-dataset-123')
+ *
+ * // Display remaining quotas
+ * console.log(`Remaining CDN Egress: ${stats.cdnEgressQuota} bytes`)
+ * console.log(`Remaining Cache Miss: ${stats.cacheMissEgressQuota} bytes`)
+ * } catch (error) {
+ * console.error('Failed to get stats:', error.message)
+ * }
+ * ```
+ */
+ async getDataSetStats(dataSetId: string | number): Promise {
+ const baseUrl = this._getStatsBaseUrl()
+ const url = `${baseUrl}/data-set/${dataSetId}`
+
+ const response = await this._fetch(url, {
+ method: 'GET',
+ headers: {
+ 'Content-Type': 'application/json',
+ },
+ })
+
+ if (response.status === 404) {
+ throw createError('FilBeamService', 'getDataSetStats', `Data set not found: ${dataSetId}`)
+ }
+
+ if (response.status !== 200) {
+ const errorText = await response.text().catch(() => 'Unknown error')
+ throw createError(
+ 'FilBeamService',
+ 'getDataSetStats',
+ `HTTP ${response.status} ${response.statusText}: ${errorText}`
+ )
+ }
+
+ const data = await response.json()
+ const validated = this._validateStatsResponse(data)
+
+ return {
+ cdnEgressQuota: BigInt(validated.cdnEgressQuota),
+ cacheMissEgressQuota: BigInt(validated.cacheMissEgressQuota),
+ }
+ }
+}
diff --git a/packages/synapse-sdk/src/index.ts b/packages/synapse-sdk/src/index.ts
index ccf62dba0..2b4314693 100644
--- a/packages/synapse-sdk/src/index.ts
+++ b/packages/synapse-sdk/src/index.ts
@@ -1,26 +1,14 @@
/**
- * Synapse SDK - Main entry point
+ * **Synapse SDK - Main entry point**
+ *
+ * @module Synapse
*
* @example
- * ```ts
- * import { Synapse } from '@filoz/synapse-sdk'
+ * ```ts twoslash
+ * import { Synapse, RPC_URLS } from '@filoz/synapse-sdk'
* ```
- *
- * @packageDocumentation
- * @module Synapse
- */
-
-/**
- * Synapse SDK main entry point
*/
-export * from './payments/index.ts'
-export * from './pdp/index.ts'
-export * from './session/index.ts'
-export * from './storage/index.ts'
-export * from './subgraph/index.ts'
export { Synapse } from './synapse.ts'
-export * from './telemetry/index.ts'
export * from './types.ts'
-export * from './utils/index.ts'
-export * from './warm-storage/index.ts'
+export * from './utils/constants.ts'
diff --git a/packages/synapse-sdk/src/payments/index.ts b/packages/synapse-sdk/src/payments/index.ts
index 4f6f9a57c..cbe4aba6e 100644
--- a/packages/synapse-sdk/src/payments/index.ts
+++ b/packages/synapse-sdk/src/payments/index.ts
@@ -1,9 +1,10 @@
/**
- * Exports the PaymentsService and DepositOptions types
+ * Payments Service
*
* @module Payments
+ *
* @example
- * ```ts
+ * ```ts twoslash
* import { PaymentsService } from '@filoz/synapse-sdk/payments'
* ```
*/
diff --git a/packages/synapse-sdk/src/payments/service.ts b/packages/synapse-sdk/src/payments/service.ts
index 12f82545a..58ece0699 100644
--- a/packages/synapse-sdk/src/payments/service.ts
+++ b/packages/synapse-sdk/src/payments/service.ts
@@ -1,8 +1,3 @@
-/**
- * PaymentsService - Consolidated interface for all Payments contract operations
- * along with some additional token related utilities.
- */
-
import { ethers } from 'ethers'
import type { RailInfo, SettlementResult, TokenAmount, TokenIdentifier } from '../types.ts'
import {
@@ -13,7 +8,6 @@ import {
EIP2612_PERMIT_TYPES,
getCurrentEpoch,
getFilecoinNetworkType,
- SETTLEMENT_FEE,
TIMING_CONSTANTS,
TOKENS,
} from '../utils/index.ts'
@@ -34,6 +28,9 @@ export interface DepositOptions {
onDepositStarting?: () => void
}
+/**
+ * PaymentsService - Filecoin Pay client for managing deposits, approvals, and payment rails
+ */
export class PaymentsService {
private readonly _provider: ethers.Provider
private readonly _signer: ethers.Signer
@@ -861,7 +858,7 @@ export class PaymentsService {
/**
* Settle a payment rail up to a specific epoch (sends a transaction)
- * Note: This method automatically includes the required network fee (FIL) for burning
+ *
* @param railId - The rail ID to settle
* @param untilEpoch - The epoch to settle up to (must be <= current epoch; defaults to current).
* Can be used for partial settlements to a past epoch.
@@ -881,9 +878,7 @@ export class PaymentsService {
const paymentsContract = this._getPaymentsContract()
// Only set explicit nonce if NonceManager is disabled
- const txOptions: any = {
- value: SETTLEMENT_FEE, // Include the settlement fee (NETWORK_FEE in contract) as msg.value
- }
+ const txOptions: any = {}
if (this._disableNonceManager) {
const currentNonce = await this._provider.getTransactionCount(signerAddress, 'pending')
txOptions.nonce = currentNonce
@@ -904,7 +899,7 @@ export class PaymentsService {
/**
* Get the expected settlement amounts for a rail (read-only simulation)
- * Note: The actual settlement will require a network fee (FIL) to be sent with the transaction
+ *
* @param railId - The rail ID to check
* @param untilEpoch - The epoch to settle up to (must be <= current epoch; defaults to current).
* Can be used to preview partial settlements to a past epoch.
@@ -921,7 +916,6 @@ export class PaymentsService {
try {
// Use staticCall to simulate the transaction and get the return values
- // Include the settlement fee (NETWORK_FEE in contract) in the simulation
const result = await paymentsContract.settleRail.staticCall(railIdBigint, untilEpochBigint)
return {
@@ -1026,7 +1020,7 @@ export class PaymentsService {
* Automatically settle a rail, detecting whether it's terminated or active
* This method checks the rail status and calls the appropriate settlement method:
* - For terminated rails: calls settleTerminatedRail()
- * - For active rails: calls settle() with optional untilEpoch (requires settlement fee)
+ * - For active rails: calls settle() with optional untilEpoch
*
* @param railId - The rail ID to settle
* @param untilEpoch - The epoch to settle up to (must be <= current epoch for active rails; ignored for terminated rails)
diff --git a/packages/synapse-sdk/src/pdp/index.ts b/packages/synapse-sdk/src/pdp/index.ts
index b1c82db7e..445c31c5f 100644
--- a/packages/synapse-sdk/src/pdp/index.ts
+++ b/packages/synapse-sdk/src/pdp/index.ts
@@ -1,7 +1,6 @@
/**
- * Exports the PDP components
+ * PDP components
*
- * @packageDocumentation
* @module PDP
* @example
* ```ts
diff --git a/packages/synapse-sdk/src/pdp/verifier.ts b/packages/synapse-sdk/src/pdp/verifier.ts
index e2ab60dd8..db98c968c 100644
--- a/packages/synapse-sdk/src/pdp/verifier.ts
+++ b/packages/synapse-sdk/src/pdp/verifier.ts
@@ -165,6 +165,16 @@ export class PDPVerifier {
}
}
+ /**
+ * Get pieces scheduled for removal from a data set
+ * @param dataSetId - The PDPVerifier data set ID
+ * @returns Array of piece IDs scheduled for removal
+ */
+ async getScheduledRemovals(dataSetId: number): Promise {
+ const result = await this._contract.getScheduledRemovals(dataSetId)
+ return result.map((pieceId: bigint) => Number(pieceId))
+ }
+
/**
* Get the PDPVerifier contract address for the current network
*/
diff --git a/packages/synapse-sdk/src/session/index.ts b/packages/synapse-sdk/src/session/index.ts
index 560e0e8c1..df8337c05 100644
--- a/packages/synapse-sdk/src/session/index.ts
+++ b/packages/synapse-sdk/src/session/index.ts
@@ -1 +1,11 @@
+/**
+ * Session components
+ *
+ * @module Session
+ * @example
+ * ```ts
+ * import { SessionKey } from '@filoz/synapse-sdk/session'
+ * ```
+ */
+
export * from './key.ts'
diff --git a/packages/synapse-sdk/src/sp-registry/index.ts b/packages/synapse-sdk/src/sp-registry/index.ts
index 9bef08df0..96c2a14dc 100644
--- a/packages/synapse-sdk/src/sp-registry/index.ts
+++ b/packages/synapse-sdk/src/sp-registry/index.ts
@@ -1,6 +1,11 @@
/**
- * ServiceProviderRegistry module
- * @module sp-registry
+ * Service Provider Registry components
+ *
+ * @module SPRegistry
+ * @example
+ * ```ts
+ * import { SPRegistryService } from '@filoz/synapse-sdk/sp-registry'
+ * ```
*/
export { SPRegistryService } from './service.ts'
diff --git a/packages/synapse-sdk/src/sp-registry/service.ts b/packages/synapse-sdk/src/sp-registry/service.ts
index d1481e361..c29d416dc 100644
--- a/packages/synapse-sdk/src/sp-registry/service.ts
+++ b/packages/synapse-sdk/src/sp-registry/service.ts
@@ -37,14 +37,16 @@ import type {
export class SPRegistryService {
private readonly _provider: ethers.Provider
+ private readonly _chainId: number | bigint
private readonly _registryAddress: string
private _registryContract: ethers.Contract | null = null
/**
* Constructor for SPRegistryService
*/
- constructor(provider: ethers.Provider, registryAddress: string) {
+ constructor(provider: ethers.Provider, chainId: number | bigint, registryAddress: string) {
this._provider = provider
+ this._chainId = chainId
this._registryAddress = registryAddress
}
@@ -52,7 +54,8 @@ export class SPRegistryService {
* Create a new SPRegistryService instance
*/
static async create(provider: ethers.Provider, registryAddress: string): Promise {
- return new SPRegistryService(provider, registryAddress)
+ const network = await provider.getNetwork()
+ return new SPRegistryService(provider, network.chainId, registryAddress)
}
/**
@@ -401,7 +404,7 @@ export class SPRegistryService {
const capabilities = capabilitiesListToObject(result.product.capabilityKeys, result.productCapabilityValues)
return {
- offering: decodePDPCapabilities(capabilities),
+ offering: await decodePDPCapabilities(BigInt(providerId), this._chainId, capabilities),
capabilities,
isActive: result.product.isActive,
}
@@ -464,7 +467,7 @@ export class SPRegistryService {
const results = await multicall.aggregate3.staticCall(calls)
// Process results
- return this._processMulticallResults(providerIds, results, iface)
+ return await this._processMulticallResults(providerIds, results, iface)
}
/**
@@ -491,7 +494,11 @@ export class SPRegistryService {
/**
* Process Multicall3 results into ProviderInfo array
*/
- private _processMulticallResults(providerIds: number[], results: any[], iface: ethers.Interface): ProviderInfo[] {
+ private async _processMulticallResults(
+ providerIds: number[],
+ results: any[],
+ iface: ethers.Interface
+ ): Promise {
const providers: ProviderInfo[] = []
for (let i = 0; i < providerIds.length; i++) {
@@ -512,7 +519,7 @@ export class SPRegistryService {
type: 'PDP',
isActive: product.isActive,
capabilities,
- data: decodePDPCapabilities(capabilities),
+ data: await decodePDPCapabilities(BigInt(providerIds[i]), this._chainId, capabilities),
},
])
if (providerInfo.serviceProvider === ethers.ZeroAddress) {
diff --git a/packages/synapse-sdk/src/storage/context.ts b/packages/synapse-sdk/src/storage/context.ts
index 015f02943..67254f8e6 100644
--- a/packages/synapse-sdk/src/storage/context.ts
+++ b/packages/synapse-sdk/src/storage/context.ts
@@ -26,7 +26,7 @@ import { asPieceCID } from '@filoz/synapse-core/piece'
import * as SP from '@filoz/synapse-core/sp'
import { randIndex, randU256 } from '@filoz/synapse-core/utils'
import type { ethers } from 'ethers'
-import type { Hex } from 'viem'
+import type { Address, Hex } from 'viem'
import type { PaymentsService } from '../payments/index.ts'
import { PDPAuthHelper, PDPServer } from '../pdp/index.ts'
import { PDPVerifier } from '../pdp/verifier.ts'
@@ -35,10 +35,11 @@ import type { ProviderInfo } from '../sp-registry/types.ts'
import type { Synapse } from '../synapse.ts'
import type {
CreateContextsOptions,
+ DataSetInfo,
DownloadOptions,
- EnhancedDataSetInfo,
MetadataEntry,
PieceCID,
+ PieceRecord,
PieceStatus,
PreflightInfo,
ProviderSelectionResult,
@@ -60,6 +61,7 @@ import { combineMetadata, metadataMatches, objectToEntries, validatePieceMetadat
import type { WarmStorageService } from '../warm-storage/index.ts'
const NO_REMAINING_PROVIDERS_ERROR_MESSAGE = 'No approved service providers available'
+const PRIME_ENDORSEMENTS: Address[] = ['0x2127C3a31F54B81B5E9AD1e29C36c420d3D6ecC5']
export class StorageContext {
private readonly _synapse: Synapse
@@ -71,6 +73,7 @@ export class StorageContext {
private readonly _signer: ethers.Signer
private readonly _uploadBatchSize: number
private _dataSetId: number | undefined
+ private _clientDataSetId: bigint | undefined
private readonly _dataSetMetadata: Record
// AddPieces batching state
@@ -113,6 +116,23 @@ export class StorageContext {
return this._dataSetId
}
+ /**
+ * Get the client data set nonce ("clientDataSetId"), either from cache or by fetching from the chain
+ * @returns The client data set nonce
+ * @throws Error if data set nonce is not set
+ */
+ private async getClientDataSetId(): Promise {
+ if (this._clientDataSetId !== undefined) {
+ return this._clientDataSetId
+ }
+ if (this.dataSetId == null) {
+ throw createError('StorageContext', 'getClientDataSetId', 'Data set not found')
+ }
+ const dataSetInfo = await this._warmStorageService.getDataSet(this.dataSetId)
+ this._clientDataSetId = dataSetInfo.clientDataSetId
+ return this._clientDataSetId
+ }
+
/**
* Validate data size against minimum and maximum limits
* @param sizeBytes - Size of data in bytes
@@ -191,7 +211,7 @@ export class StorageContext {
const resolutions: ProviderSelectionResult[] = []
const clientAddress = await synapse.getClient().getAddress()
const registryAddress = warmStorageService.getServiceProviderRegistryAddress()
- const spRegistry = new SPRegistryService(synapse.getProvider(), registryAddress)
+ const spRegistry = new SPRegistryService(synapse.getProvider(), synapse.getChainId(), registryAddress)
if (options.dataSetIds) {
const selections = []
for (const dataSetId of new Set(options.dataSetIds)) {
@@ -245,6 +265,7 @@ export class StorageContext {
warmStorageService,
spRegistry,
excludeProviderIds,
+ resolutions.length === 0 ? PRIME_ENDORSEMENTS : [],
options.forceCreateDataSets ?? false,
options.withIpni ?? false,
options.dev ?? false
@@ -278,7 +299,7 @@ export class StorageContext {
): Promise {
// Create SPRegistryService
const registryAddress = warmStorageService.getServiceProviderRegistryAddress()
- const spRegistry = new SPRegistryService(synapse.getProvider(), registryAddress)
+ const spRegistry = new SPRegistryService(synapse.getProvider(), synapse.getChainId(), registryAddress)
// Resolve provider and data set based on options
const resolution = await StorageContext.resolveProviderAndDataSet(synapse, warmStorageService, spRegistry, options)
@@ -375,6 +396,7 @@ export class StorageContext {
warmStorageService,
spRegistry,
options.excludeProviderIds ?? [],
+ PRIME_ENDORSEMENTS,
options.forceCreateDataSet ?? false,
options.withIpni ?? false,
options.dev ?? false
@@ -388,50 +410,45 @@ export class StorageContext {
dataSetId: number,
warmStorageService: WarmStorageService,
spRegistry: SPRegistryService,
- signerAddress: string,
+ clientAddress: string,
options: StorageServiceOptions
): Promise {
- // Fetch data sets to find the specific one
- const dataSets = await warmStorageService.getClientDataSetsWithDetails(signerAddress)
- const dataSet = dataSets.find((ds) => ds.pdpVerifierDataSetId === dataSetId)
+ const [dataSetInfo, dataSetMetadata] = await Promise.all([
+ warmStorageService.getDataSet(dataSetId).then(async (dataSetInfo) => {
+ await StorageContext.validateDataSetConsistency(dataSetInfo, options, spRegistry)
+ return dataSetInfo
+ }),
+ warmStorageService.getDataSetMetadata(dataSetId),
+ warmStorageService.validateDataSet(dataSetId),
+ ])
- if (dataSet == null || !dataSet.isLive || !dataSet.isManaged) {
+ if (dataSetInfo.payer.toLowerCase() !== clientAddress.toLowerCase()) {
throw createError(
'StorageContext',
'resolveByDataSetId',
- `Data set ${dataSetId} not found, not owned by ${signerAddress}, ` +
- 'or not managed by the current WarmStorage contract'
+ `Data set ${dataSetId} is not owned by ${clientAddress} (owned by ${dataSetInfo.payer})`
)
}
- // Validate consistency with other parameters if provided
- if (options.providerId != null || options.providerAddress != null) {
- await StorageContext.validateDataSetConsistency(dataSet, options, spRegistry)
- }
-
- // Look up provider by ID from the data set
- const provider = await spRegistry.getProvider(dataSet.providerId)
+ const provider = await spRegistry.getProvider(dataSetInfo.providerId)
if (provider == null) {
throw createError(
'StorageContext',
'resolveByDataSetId',
- `Provider ID ${dataSet.providerId} for data set ${dataSetId} not found in registry`
+ `Provider ID ${dataSetInfo.providerId} for data set ${dataSetId} not found in registry`
)
}
- // Validate CDN settings match if specified
- if (options.withCDN != null && dataSet.withCDN !== options.withCDN) {
+ const withCDN = dataSetInfo.cdnRailId > 0 && METADATA_KEYS.WITH_CDN in dataSetMetadata
+ if (options.withCDN != null && withCDN !== options.withCDN) {
throw createError(
'StorageContext',
'resolveByDataSetId',
- `Data set ${dataSetId} has CDN ${dataSet.withCDN ? 'enabled' : 'disabled'}, ` +
+ `Data set ${dataSetId} has CDN ${withCDN ? 'enabled' : 'disabled'}, ` +
`but requested ${options.withCDN ? 'enabled' : 'disabled'}`
)
}
- // Backfill data set metadata from chain
- const dataSetMetadata = await warmStorageService.getDataSetMetadata(dataSetId)
-
return {
provider,
dataSetId,
@@ -444,7 +461,7 @@ export class StorageContext {
* Validate data set consistency with provided options
*/
private static async validateDataSetConsistency(
- dataSet: EnhancedDataSetInfo,
+ dataSet: DataSetInfo,
options: StorageServiceOptions,
spRegistry: SPRegistryService
): Promise {
@@ -454,8 +471,7 @@ export class StorageContext {
throw createError(
'StorageContext',
'validateDataSetConsistency',
- `Data set ${dataSet.pdpVerifierDataSetId} belongs to provider ID ${dataSet.providerId}, ` +
- `but provider ID ${options.providerId} was requested`
+ `Data set belongs to provider ID ${dataSet.providerId}, but provider ID ${options.providerId} was requested`
)
}
}
@@ -471,8 +487,7 @@ export class StorageContext {
throw createError(
'StorageContext',
'validateDataSetConsistency',
- `Data set ${dataSet.pdpVerifierDataSetId} belongs to provider ${actualProvider?.serviceProvider ?? 'unknown'}, ` +
- `but provider ${options.providerAddress} was requested`
+ `Data set belongs to provider ${actualProvider?.serviceProvider ?? 'unknown'}, but provider ${options.providerAddress} was requested`
)
}
}
@@ -482,7 +497,7 @@ export class StorageContext {
* Resolve using a specific provider ID
*/
private static async resolveByProviderId(
- signerAddress: string,
+ clientAddress: string,
providerId: number,
requestedMetadata: Record,
warmStorageService: WarmStorageService,
@@ -492,7 +507,7 @@ export class StorageContext {
// Fetch provider (always) and dataSets (only if not forcing) in parallel
const [provider, dataSets] = await Promise.all([
spRegistry.getProvider(providerId),
- forceCreateDataSet ? Promise.resolve(null) : warmStorageService.getClientDataSetsWithDetails(signerAddress),
+ forceCreateDataSet ? Promise.resolve(null) : warmStorageService.getClientDataSetsWithDetails(clientAddress),
])
if (provider == null) {
@@ -557,7 +572,7 @@ export class StorageContext {
providerAddress: string,
warmStorageService: WarmStorageService,
spRegistry: SPRegistryService,
- signerAddress: string,
+ clientAddress: string,
requestedMetadata: Record,
forceCreateDataSet?: boolean
): Promise {
@@ -573,7 +588,7 @@ export class StorageContext {
// Use the providerId resolution logic
return await StorageContext.resolveByProviderId(
- signerAddress,
+ clientAddress,
provider.id,
requestedMetadata,
warmStorageService,
@@ -587,11 +602,12 @@ export class StorageContext {
* Prioritizes existing data sets and provider health
*/
private static async smartSelectProvider(
- signerAddress: string,
+ clientAddress: string,
requestedMetadata: Record,
warmStorageService: WarmStorageService,
spRegistry: SPRegistryService,
excludeProviderIds: number[],
+ preferEndorsements: Address[],
forceCreateDataSet: boolean,
withIpni: boolean,
dev: boolean
@@ -601,7 +617,7 @@ export class StorageContext {
// 2. If no existing data sets, find a healthy provider
// Get client's data sets
- const dataSets = await warmStorageService.getClientDataSetsWithDetails(signerAddress)
+ const dataSets = await warmStorageService.getClientDataSetsWithDetails(clientAddress)
const skipProviderIds = new Set(excludeProviderIds)
// Filter for managed data sets with matching metadata
@@ -653,9 +669,9 @@ export class StorageContext {
}
}
- try {
- const selectedProvider = await StorageContext.selectProviderWithPing(generateProviders())
+ const selectedProvider = await StorageContext.selectProviderWithPing(generateProviders())
+ if (selectedProvider != null) {
// Find the first matching data set ID for this provider
// Match by provider ID (stable identifier in the registry)
const matchingDataSet = sorted.find((ps) => ps.providerId === selectedProvider.id)
@@ -677,9 +693,6 @@ export class StorageContext {
dataSetMetadata,
}
}
- } catch (_error) {
- console.warn('All providers from existing data sets failed health check. Falling back to all providers.')
- // Fall through to select from all approved providers below
}
}
@@ -698,8 +711,37 @@ export class StorageContext {
throw createError('StorageContext', 'smartSelectProvider', NO_REMAINING_PROVIDERS_ERROR_MESSAGE)
}
- // Random selection from all providers
- const provider = await StorageContext.selectRandomProvider(allProviders)
+ let provider: ProviderInfo | null
+ if (preferEndorsements.length > 0) {
+ // Split providers according to whether they have all of the endorsements
+ const [otherProviders, endorsedProviders] = allProviders.reduce<[ProviderInfo[], ProviderInfo[]]>(
+ (results: [ProviderInfo[], ProviderInfo[]], provider: ProviderInfo) => {
+ results[
+ preferEndorsements.some(
+ (endorsement: Address) => endorsement in (provider.products.PDP?.data.endorsements ?? {})
+ )
+ ? 1
+ : 0
+ ].push(provider)
+ return results
+ },
+ [[], []]
+ )
+ provider =
+ (await StorageContext.selectRandomProvider(endorsedProviders)) ||
+ (await StorageContext.selectRandomProvider(otherProviders))
+ } else {
+ // Random selection from all providers
+ provider = await StorageContext.selectRandomProvider(allProviders)
+ }
+
+ if (provider == null) {
+ throw createError(
+ 'StorageContext',
+ 'selectProviderWithPing',
+ `All ${allProviders.length} providers failed health check. Storage may be temporarily unavailable.`
+ )
+ }
return {
provider,
@@ -716,9 +758,9 @@ export class StorageContext {
* @param dev - Include dev providers
* @returns Selected provider
*/
- private static async selectRandomProvider(providers: ProviderInfo[]): Promise {
+ private static async selectRandomProvider(providers: ProviderInfo[]): Promise {
if (providers.length === 0) {
- throw createError('StorageContext', 'selectRandomProvider', 'No providers available')
+ return null
}
// Create async generator that yields providers in random order
@@ -742,12 +784,9 @@ export class StorageContext {
* @returns The first provider that responds
* @throws If all providers fail
*/
- private static async selectProviderWithPing(providers: AsyncIterable): Promise {
- let providerCount = 0
-
+ private static async selectProviderWithPing(providers: AsyncIterable): Promise {
// Try providers in order until we find one that responds to ping
for await (const provider of providers) {
- providerCount++
try {
// Create a temporary PDPServer for this specific provider's endpoint
if (!provider.products.PDP?.data.serviceURL) {
@@ -766,16 +805,7 @@ export class StorageContext {
}
}
- // All providers failed ping test
- if (providerCount === 0) {
- throw createError('StorageContext', 'selectProviderWithPing', 'No providers available to select from')
- }
-
- throw createError(
- 'StorageContext',
- 'selectProviderWithPing',
- `All ${providerCount} providers failed health check. Storage may be temporarily unavailable.`
- )
+ return null
}
/**
@@ -981,22 +1011,24 @@ export class StorageContext {
const pieceCids: PieceCID[] = batch.map((item) => item.pieceCid)
const metadataArray: MetadataEntry[][] = batch.map((item) => item.metadata ?? [])
const confirmedPieceIds: number[] = []
+ const addedPieceRecords = pieceCids.map((pieceCid) => ({ pieceCid }))
if (this.dataSetId) {
- const [, dataSetInfo] = await Promise.all([
+ const [, clientDataSetId] = await Promise.all([
this._warmStorageService.validateDataSet(this.dataSetId),
- this._warmStorageService.getDataSet(this.dataSetId),
+ this.getClientDataSetId(),
])
// Add pieces to the data set
const addPiecesResult = await this._pdpServer.addPieces(
this.dataSetId, // PDPVerifier data set ID
- dataSetInfo.clientDataSetId, // Client's dataset ID
+ clientDataSetId, // Client's dataset nonce
pieceCids,
metadataArray
)
// Notify callbacks with transaction
batch.forEach((item) => {
+ item.callbacks?.onPiecesAdded?.(addPiecesResult.txHash as Hex, addedPieceRecords)
item.callbacks?.onPieceAdded?.(addPiecesResult.txHash as Hex)
})
const addPiecesResponse = await SP.pollForAddPiecesStatus(addPiecesResult)
@@ -1004,7 +1036,12 @@ export class StorageContext {
// Handle transaction tracking if available
confirmedPieceIds.push(...(addPiecesResponse.confirmedPieceIds ?? []))
+ const confirmedPieceRecords: PieceRecord[] = confirmedPieceIds.map((pieceId, index) => ({
+ pieceId,
+ pieceCid: pieceCids[index],
+ }))
batch.forEach((item) => {
+ item.callbacks?.onPiecesConfirmed?.(this.dataSetId as number, confirmedPieceRecords)
item.callbacks?.onPieceConfirmed?.(confirmedPieceIds)
})
} else {
@@ -1031,6 +1068,7 @@ export class StorageContext {
}
)
batch.forEach((item) => {
+ item.callbacks?.onPiecesAdded?.(createAndAddPiecesResult.txHash as Hex, addedPieceRecords)
item.callbacks?.onPieceAdded?.(createAndAddPiecesResult.txHash as Hex)
})
const confirmedDataset = await SP.pollForDataSetCreationStatus(createAndAddPiecesResult)
@@ -1045,7 +1083,12 @@ export class StorageContext {
confirmedPieceIds.push(...(confirmedPieces.confirmedPieceIds ?? []))
+ const confirmedPieceRecords: PieceRecord[] = confirmedPieceIds.map((pieceId, index) => ({
+ pieceId,
+ pieceCid: pieceCids[index],
+ }))
batch.forEach((item) => {
+ item.callbacks?.onPiecesConfirmed?.(this.dataSetId as number, confirmedPieceRecords)
item.callbacks?.onPieceConfirmed?.(confirmedPieceIds)
})
}
@@ -1124,6 +1167,25 @@ export class StorageContext {
return pieces
}
+ /**
+ * Get pieces scheduled for removal from this data set
+ * @returns Array of piece IDs scheduled for removal
+ */
+ async getScheduledRemovals(): Promise {
+ if (this._dataSetId == null) {
+ return []
+ }
+
+ const pdpVerifierAddress = this._warmStorageService.getPDPVerifierAddress()
+ const pdpVerifier = new PDPVerifier(this._synapse.getProvider(), pdpVerifierAddress)
+
+ try {
+ return await pdpVerifier.getScheduledRemovals(this._dataSetId)
+ } catch (error) {
+ throw createError('StorageContext', 'getScheduledRemovals', 'Failed to get scheduled removals', error)
+ }
+ }
+
/**
* Get all active pieces for this data set as an async generator.
* This provides lazy evaluation and better memory efficiency for large data sets.
@@ -1132,10 +1194,7 @@ export class StorageContext {
* @param options.signal - Optional AbortSignal to cancel the operation
* @yields Object with pieceCid and pieceId - the piece ID is needed for certain operations like deletion
*/
- async *getPieces(options?: {
- batchSize?: number
- signal?: AbortSignal
- }): AsyncGenerator<{ pieceCid: PieceCID; pieceId: number }> {
+ async *getPieces(options?: { batchSize?: number; signal?: AbortSignal }): AsyncGenerator {
if (this._dataSetId == null) {
return
}
@@ -1197,9 +1256,9 @@ export class StorageContext {
throw createError('StorageContext', 'deletePiece', 'Data set not found')
}
const pieceId = typeof piece === 'number' ? piece : await this._getPieceIdByCID(piece)
- const dataSetInfo = await this._warmStorageService.getDataSet(this.dataSetId)
+ const clientDataSetId = await this.getClientDataSetId()
- return this._pdpServer.deletePiece(this.dataSetId, dataSetInfo.clientDataSetId, pieceId)
+ return this._pdpServer.deletePiece(this.dataSetId, clientDataSetId, pieceId)
}
/**
diff --git a/packages/synapse-sdk/src/storage/index.ts b/packages/synapse-sdk/src/storage/index.ts
index f00b68926..25876e83e 100644
--- a/packages/synapse-sdk/src/storage/index.ts
+++ b/packages/synapse-sdk/src/storage/index.ts
@@ -1,14 +1,12 @@
/**
- * Exports the Storage components
+ * Storage components
*
- * @packageDocumentation
* @module Storage
* @example
* ```ts
- * import { StorageContext, StorageManager, StorageService } from '@filoz/synapse-sdk/storage'
+ * import { StorageContext, StorageManager } from '@filoz/synapse-sdk/storage'
* ```
*/
export { StorageContext } from './context.ts'
export { StorageManager } from './manager.ts'
-export { StorageService } from './service.ts'
diff --git a/packages/synapse-sdk/src/storage/manager.ts b/packages/synapse-sdk/src/storage/manager.ts
index d905c680c..b1fe24235 100644
--- a/packages/synapse-sdk/src/storage/manager.ts
+++ b/packages/synapse-sdk/src/storage/manager.ts
@@ -68,10 +68,8 @@ type CombinedCallbacks = StorageContextCallbacks & UploadCallbacks
* 1. With explicit context: `{ context, callbacks?, metadata? }` - routes to context.upload()
* 2. Auto-create context: `{ providerId?, dataSetId?, withCDN?, callbacks?, metadata? }` - creates/reuses context
* 3. Use default context: `{ callbacks?, metadata? }` - uses cached default context
- *
- * @internal This type is intentionally not exported as it's specific to StorageManager
*/
-interface StorageManagerUploadOptions extends StorageServiceOptions {
+export interface StorageManagerUploadOptions extends StorageServiceOptions {
// Multiple storage providers: if provided, all other context options are invalid
contexts?: StorageContext[]
@@ -516,7 +514,7 @@ export class StorageManager {
// Create SPRegistryService to get providers
const registryAddress = this._warmStorageService.getServiceProviderRegistryAddress()
- const spRegistry = new SPRegistryService(this._synapse.getProvider(), registryAddress)
+ const spRegistry = new SPRegistryService(this._synapse.getProvider(), this._synapse.getChainId(), registryAddress)
// Fetch all data in parallel for performance
const [pricingData, approvedIds, allowances] = await Promise.all([
diff --git a/packages/synapse-sdk/src/storage/service.ts b/packages/synapse-sdk/src/storage/service.ts
deleted file mode 100644
index ed4d2b2dc..000000000
--- a/packages/synapse-sdk/src/storage/service.ts
+++ /dev/null
@@ -1,7 +0,0 @@
-/**
- * Compatibility wrapper for backwards compatibility
- * @deprecated StorageService has been renamed to StorageContext.
- * Import StorageContext from './context.ts' instead.
- */
-
-export { StorageContext as StorageService } from './context.ts'
diff --git a/packages/synapse-sdk/src/subgraph/index.ts b/packages/synapse-sdk/src/subgraph/index.ts
index f078d26c0..285effa93 100644
--- a/packages/synapse-sdk/src/subgraph/index.ts
+++ b/packages/synapse-sdk/src/subgraph/index.ts
@@ -1,7 +1,6 @@
/**
- * Exports the Subgraph components
+ * Subgraph components
*
- * @packageDocumentation
* @module Subgraph
* @example
* ```ts
diff --git a/packages/synapse-sdk/src/synapse.ts b/packages/synapse-sdk/src/synapse.ts
index cbf6099c9..dbdd21d87 100644
--- a/packages/synapse-sdk/src/synapse.ts
+++ b/packages/synapse-sdk/src/synapse.ts
@@ -1,13 +1,10 @@
-/**
- * Main Synapse class for interacting with Filecoin storage and other on-chain services
- */
-
import { ethers } from 'ethers'
+import { FilBeamService } from './filbeam/index.ts'
import { PaymentsService } from './payments/index.ts'
import { ChainRetriever, FilBeamRetriever, SubgraphRetriever } from './retriever/index.ts'
import { SessionKey } from './session/key.ts'
import { SPRegistryService } from './sp-registry/index.ts'
-import type { StorageService } from './storage/index.ts'
+import type { StorageContext } from './storage/index.ts'
import { StorageManager } from './storage/manager.ts'
import { SubgraphService } from './subgraph/service.ts'
import type { TelemetryService } from './telemetry/service.ts'
@@ -25,6 +22,9 @@ import type {
import { CHAIN_IDS, CONTRACT_ADDRESSES, getFilecoinNetworkType } from './utils/index.ts'
import { WarmStorageService } from './warm-storage/index.ts'
+/**
+ * Class for interacting with Filecoin storage and other on-chain services
+ */
export class Synapse {
private readonly _signer: ethers.Signer
private readonly _network: FilecoinNetworkType
@@ -35,6 +35,7 @@ export class Synapse {
private readonly _warmStorageService: WarmStorageService
private readonly _pieceRetriever: PieceRetriever
private readonly _storageManager: StorageManager
+ private readonly _filbeamService: FilBeamService
private _session: SessionKey | null = null
/**
@@ -142,7 +143,11 @@ export class Synapse {
// Create SPRegistryService for use in retrievers
const registryAddress = warmStorageService.getServiceProviderRegistryAddress()
- const spRegistry = new SPRegistryService(provider, registryAddress)
+ const spRegistry = new SPRegistryService(
+ provider,
+ network === 'mainnet' ? CHAIN_IDS.mainnet : CHAIN_IDS.calibration,
+ registryAddress
+ )
// Initialize piece retriever (use provided or create default)
let pieceRetriever: PieceRetriever
@@ -166,6 +171,9 @@ export class Synapse {
pieceRetriever = new FilBeamRetriever(baseRetriever, network)
}
+ // Create FilBeamService
+ const filbeamService = new FilBeamService(network)
+
// Create and initialize the global TelemetryService.
// If telemetry is disabled, this will do nothing.
await initGlobalTelemetry(options.telemetry || {}, { filecoinNetwork: network })
@@ -179,6 +187,7 @@ export class Synapse {
warmStorageAddress,
warmStorageService,
pieceRetriever,
+ filbeamService,
options.dev === false,
options.withIpni
)
@@ -194,6 +203,7 @@ export class Synapse {
warmStorageAddress: string,
warmStorageService: WarmStorageService,
pieceRetriever: PieceRetriever,
+ filbeamService: FilBeamService,
dev: boolean,
withIpni?: boolean
) {
@@ -205,6 +215,7 @@ export class Synapse {
this._warmStorageService = warmStorageService
this._pieceRetriever = pieceRetriever
this._warmStorageAddress = warmStorageAddress
+ this._filbeamService = filbeamService
this._session = null
// Initialize StorageManager
@@ -358,6 +369,15 @@ export class Synapse {
return this._storageManager
}
+ /**
+ * Gets the FilBeam service instance
+ *
+ * @returns The FilBeam service for interacting with FilBeam infrastructure
+ */
+ get filbeam(): FilBeamService {
+ return this._filbeamService
+ }
+
/**
* Create a storage service instance.
*
@@ -384,7 +404,7 @@ export class Synapse {
* })
* ```
*/
- async createStorage(options: StorageServiceOptions = {}): Promise {
+ async createStorage(options: StorageServiceOptions = {}): Promise {
// Use StorageManager to create context
return await this._storageManager.createContext(options)
}
@@ -436,7 +456,7 @@ export class Synapse {
// Create SPRegistryService
const registryAddress = this._warmStorageService.getServiceProviderRegistryAddress()
- const spRegistry = new SPRegistryService(this._provider, registryAddress)
+ const spRegistry = new SPRegistryService(this._provider, this.getChainId(), registryAddress)
let providerInfo: ProviderInfo | null
if (typeof providerAddress === 'string') {
diff --git a/packages/synapse-sdk/src/telemetry/index.ts b/packages/synapse-sdk/src/telemetry/index.ts
index d83c6be17..c9864857c 100644
--- a/packages/synapse-sdk/src/telemetry/index.ts
+++ b/packages/synapse-sdk/src/telemetry/index.ts
@@ -1,8 +1,14 @@
/**
- * Telemetry module exports
+ * Telemetry components
*
* Provides types for configuring telemetry and working with debug dumps.
* The TelemetryService is accessed via synapse.telemetry getter.
+ *
+ * @module Telemetry
+ * @example
+ * ```ts
+ * import { getGlobalTelemetry, initGlobalTelemetry } from '@filoz/synapse-sdk/telemetry'
+ * ```
*/
export { type DebugDump, type TelemetryConfig, TelemetryService } from './service.ts'
diff --git a/packages/synapse-sdk/src/test/filbeam-service.test.ts b/packages/synapse-sdk/src/test/filbeam-service.test.ts
new file mode 100644
index 000000000..c1dbd4af3
--- /dev/null
+++ b/packages/synapse-sdk/src/test/filbeam-service.test.ts
@@ -0,0 +1,187 @@
+import { expect } from 'chai'
+import { FilBeamService } from '../filbeam/service.ts'
+import type { FilecoinNetworkType } from '../types.ts'
+
+describe('FilBeamService', () => {
+ describe('network type validation', () => {
+ it('should throw error if network type not mainnet or calibration', () => {
+ try {
+ // @ts-expect-error
+ new FilBeamService('base-sepolia')
+ } catch (error: any) {
+ expect(error.message).to.include('Unsupported network type')
+ }
+ })
+ })
+
+ describe('URL construction', () => {
+ it('should use mainnet URL for mainnet network', () => {
+ const mockFetch = async (): Promise => {
+ return {} as Response
+ }
+ const service = new FilBeamService('mainnet' as FilecoinNetworkType, mockFetch)
+
+ const baseUrl = (service as any)._getStatsBaseUrl()
+ expect(baseUrl).to.equal('https://stats.filbeam.io')
+ })
+
+ it('should use calibration URL for calibration network', () => {
+ const mockFetch = async (): Promise => {
+ return {} as Response
+ }
+ const service = new FilBeamService('calibration' as FilecoinNetworkType, mockFetch)
+
+ const baseUrl = (service as any)._getStatsBaseUrl()
+ expect(baseUrl).to.equal('https://calibration.stats.filbeam.io')
+ })
+ })
+
+ describe('getDataSetStats', () => {
+ it('should successfully fetch and parse remaining stats for mainnet', async () => {
+ const mockResponse = {
+ cdnEgressQuota: '217902493044',
+ cacheMissEgressQuota: '94243853808',
+ }
+
+ const mockFetch = async (input: string | URL | Request): Promise => {
+ expect(input).to.equal('https://stats.filbeam.io/data-set/test-dataset-id')
+ return {
+ status: 200,
+ statusText: 'OK',
+ json: async () => mockResponse,
+ } as Response
+ }
+
+ const service = new FilBeamService('mainnet' as FilecoinNetworkType, mockFetch)
+ const result = await service.getDataSetStats('test-dataset-id')
+
+ expect(result).to.deep.equal({
+ cdnEgressQuota: BigInt('217902493044'),
+ cacheMissEgressQuota: BigInt('94243853808'),
+ })
+ })
+
+ it('should successfully fetch and parse remaining stats for calibration', async () => {
+ const mockResponse = {
+ cdnEgressQuota: '100000000000',
+ cacheMissEgressQuota: '50000000000',
+ }
+
+ const mockFetch = async (input: string | URL | Request): Promise => {
+ expect(input).to.equal('https://calibration.stats.filbeam.io/data-set/123')
+ return {
+ status: 200,
+ statusText: 'OK',
+ json: async () => mockResponse,
+ } as Response
+ }
+
+ const service = new FilBeamService('calibration' as FilecoinNetworkType, mockFetch)
+ const result = await service.getDataSetStats(123)
+
+ expect(result).to.deep.equal({
+ cdnEgressQuota: BigInt('100000000000'),
+ cacheMissEgressQuota: BigInt('50000000000'),
+ })
+ })
+
+ it('should handle 404 errors gracefully', async () => {
+ const mockFetch = async (): Promise => {
+ return {
+ status: 404,
+ statusText: 'Not Found',
+ text: async () => 'Data set not found',
+ } as Response
+ }
+
+ const service = new FilBeamService('mainnet' as FilecoinNetworkType, mockFetch)
+
+ try {
+ await service.getDataSetStats('non-existent')
+ expect.fail('Should have thrown an error')
+ } catch (error: any) {
+ expect(error.message).to.include('Data set not found: non-existent')
+ }
+ })
+
+ it('should handle other HTTP errors', async () => {
+ const mockFetch = async (): Promise => {
+ return {
+ status: 500,
+ statusText: 'Internal Server Error',
+ text: async () => 'Server error occurred',
+ } as Response
+ }
+
+ const service = new FilBeamService('mainnet' as FilecoinNetworkType, mockFetch)
+
+ try {
+ await service.getDataSetStats('test-dataset')
+ expect.fail('Should have thrown an error')
+ } catch (error: any) {
+ expect(error.message).to.include('HTTP 500 Internal Server Error')
+ }
+ })
+
+ it('should validate response is an object', async () => {
+ const mockFetch = async (): Promise => {
+ return {
+ status: 200,
+ statusText: 'OK',
+ json: async () => null,
+ } as Response
+ }
+
+ const service = new FilBeamService('mainnet' as FilecoinNetworkType, mockFetch)
+
+ try {
+ await service.getDataSetStats('test-dataset')
+ expect.fail('Should have thrown an error')
+ } catch (error: any) {
+ expect(error.message).to.include('Response is not an object')
+ }
+ })
+
+ it('should validate cdnEgressQuota is present', async () => {
+ const mockFetch = async (): Promise => {
+ return {
+ status: 200,
+ statusText: 'OK',
+ json: async () => ({
+ cacheMissEgressQuota: '12345',
+ }),
+ } as Response
+ }
+
+ const service = new FilBeamService('mainnet' as FilecoinNetworkType, mockFetch)
+
+ try {
+ await service.getDataSetStats('test-dataset')
+ expect.fail('Should have thrown an error')
+ } catch (error: any) {
+ expect(error.message).to.include('cdnEgressQuota must be a string')
+ }
+ })
+
+ it('should validate cacheMissEgressQuota is present', async () => {
+ const mockFetch = async (): Promise => {
+ return {
+ status: 200,
+ statusText: 'OK',
+ json: async () => ({
+ cdnEgressQuota: '12345',
+ }),
+ } as Response
+ }
+
+ const service = new FilBeamService('mainnet' as FilecoinNetworkType, mockFetch)
+
+ try {
+ await service.getDataSetStats('test-dataset')
+ expect.fail('Should have thrown an error')
+ } catch (error: any) {
+ expect(error.message).to.include('cacheMissEgressQuota must be a string')
+ }
+ })
+ })
+})
diff --git a/packages/synapse-sdk/src/test/metadata-selection.test.ts b/packages/synapse-sdk/src/test/metadata-selection.test.ts
index e1ebdc4c4..2ba5a5ff3 100644
--- a/packages/synapse-sdk/src/test/metadata-selection.test.ts
+++ b/packages/synapse-sdk/src/test/metadata-selection.test.ts
@@ -1,11 +1,12 @@
/* globals describe it before after */
+
+import * as Mocks from '@filoz/synapse-core/mocks'
import { assert } from 'chai'
import { ethers } from 'ethers'
import { setup } from 'iso-web/msw'
import { METADATA_KEYS } from '../utils/constants.ts'
import { metadataMatches, withCDNToMetadata } from '../utils/metadata.ts'
import { WarmStorageService } from '../warm-storage/index.ts'
-import { ADDRESSES, JSONRPC, presets } from './mocks/jsonrpc/index.ts'
describe('Metadata-based Data Set Selection', () => {
describe('Metadata Utilities', () => {
@@ -114,8 +115,8 @@ describe('Metadata-based Data Set Selection', () => {
let warmStorageService: WarmStorageService
before(async () => {
- server = setup([])
- await server.start({ quiet: true })
+ server = setup()
+ await server.start()
})
after(() => {
@@ -127,9 +128,9 @@ describe('Metadata-based Data Set Selection', () => {
// Create custom preset that returns different metadata for different data sets
const customPreset: any = {
- ...presets.basic,
+ ...Mocks.presets.basic,
warmStorageView: {
- ...presets.basic.warmStorageView,
+ ...Mocks.presets.basic.warmStorageView,
clientDataSets: () => [[1n, 2n, 3n]],
// Provide base dataset info per dataset id
getDataSet: (args: any) => {
@@ -140,9 +141,9 @@ describe('Metadata-based Data Set Selection', () => {
pdpRailId: 1n,
cacheMissRailId: 0n,
cdnRailId: 0n,
- payer: ADDRESSES.client1,
- payee: ADDRESSES.serviceProvider1,
- serviceProvider: ADDRESSES.serviceProvider1,
+ payer: Mocks.ADDRESSES.client1,
+ payee: Mocks.ADDRESSES.serviceProvider1,
+ serviceProvider: Mocks.ADDRESSES.serviceProvider1,
commissionBps: 100n,
clientDataSetId: 0n,
pdpEndEpoch: 0n,
@@ -158,9 +159,9 @@ describe('Metadata-based Data Set Selection', () => {
pdpRailId: 2n,
cacheMissRailId: 0n,
cdnRailId: 100n,
- payer: ADDRESSES.client1,
- payee: ADDRESSES.serviceProvider1,
- serviceProvider: ADDRESSES.serviceProvider1,
+ payer: Mocks.ADDRESSES.client1,
+ payee: Mocks.ADDRESSES.serviceProvider1,
+ serviceProvider: Mocks.ADDRESSES.serviceProvider1,
commissionBps: 100n,
clientDataSetId: 1n,
pdpEndEpoch: 0n,
@@ -176,9 +177,9 @@ describe('Metadata-based Data Set Selection', () => {
pdpRailId: 3n,
cacheMissRailId: 0n,
cdnRailId: 0n,
- payer: ADDRESSES.client1,
- payee: ADDRESSES.serviceProvider2,
- serviceProvider: ADDRESSES.serviceProvider2,
+ payer: Mocks.ADDRESSES.client1,
+ payee: Mocks.ADDRESSES.serviceProvider2,
+ serviceProvider: Mocks.ADDRESSES.serviceProvider2,
commissionBps: 100n,
clientDataSetId: 2n,
pdpEndEpoch: 0n,
@@ -224,7 +225,7 @@ describe('Metadata-based Data Set Selection', () => {
},
},
pdpVerifier: {
- ...presets.basic.pdpVerifier,
+ ...Mocks.presets.basic.pdpVerifier,
getNextPieceId: (args: any) => {
const [dataSetId] = args
if (dataSetId === 1n) return [5n] as const // Has pieces
@@ -235,14 +236,14 @@ describe('Metadata-based Data Set Selection', () => {
},
}
- server.use(JSONRPC(customPreset))
+ server.use(Mocks.JSONRPC(customPreset))
const provider = new ethers.JsonRpcProvider('https://api.calibration.node.glif.io/rpc/v1')
- warmStorageService = await WarmStorageService.create(provider, ADDRESSES.calibration.warmStorage)
+ warmStorageService = await WarmStorageService.create(provider, Mocks.ADDRESSES.calibration.warmStorage)
})
it('should fetch metadata for each data set', async () => {
- const dataSets = await warmStorageService.getClientDataSetsWithDetails(ADDRESSES.client1)
+ const dataSets = await warmStorageService.getClientDataSetsWithDetails(Mocks.ADDRESSES.client1)
assert.equal(dataSets.length, 3)
@@ -263,7 +264,7 @@ describe('Metadata-based Data Set Selection', () => {
})
it('should prefer data sets with matching metadata', async () => {
- const dataSets = await warmStorageService.getClientDataSetsWithDetails(ADDRESSES.client1)
+ const dataSets = await warmStorageService.getClientDataSetsWithDetails(Mocks.ADDRESSES.client1)
// Filter for data sets with withIPFSIndexing
const withIndexing = dataSets.filter((ds) =>
diff --git a/packages/synapse-sdk/src/test/metadata.test.ts b/packages/synapse-sdk/src/test/metadata.test.ts
index 7b29583c2..12de389c7 100644
--- a/packages/synapse-sdk/src/test/metadata.test.ts
+++ b/packages/synapse-sdk/src/test/metadata.test.ts
@@ -1,5 +1,6 @@
/* globals describe it before after beforeEach */
+import * as Mocks from '@filoz/synapse-core/mocks'
import { asPieceCID } from '@filoz/synapse-core/piece'
import { assert } from 'chai'
import { ethers } from 'ethers'
@@ -8,15 +9,9 @@ import { PDPAuthHelper } from '../pdp/auth.ts'
import { PDPServer } from '../pdp/server.ts'
import type { MetadataEntry } from '../types.ts'
import { METADATA_KEYS } from '../utils/constants.ts'
-import {
- addPiecesWithMetadataCapture,
- createDataSetWithMetadataCapture,
- type MetadataCapture,
- type PieceMetadataCapture,
-} from './mocks/pdp/handlers.ts'
// Mock server for testing
-const server = setup([])
+const server = setup()
describe('Metadata Support', () => {
const TEST_PRIVATE_KEY = '0x0101010101010101010101010101010101010101010101010101010101010101'
@@ -28,7 +23,7 @@ describe('Metadata Support', () => {
let pdpServer: PDPServer
before(async () => {
- await server.start({ quiet: true })
+ await server.start()
})
after(() => {
@@ -52,10 +47,10 @@ describe('Metadata Support', () => {
]
const mockTxHash = '0x1234567890abcdef1234567890abcdef1234567890abcdef1234567890abcdef'
- let capturedMetadata: MetadataCapture | null = null
+ let capturedMetadata: Mocks.pdp.MetadataCapture | null = null
server.use(
- createDataSetWithMetadataCapture(
+ Mocks.pdp.createDataSetWithMetadataCapture(
mockTxHash,
(metadata) => {
capturedMetadata = metadata
@@ -90,10 +85,10 @@ describe('Metadata Support', () => {
const dataSetId = 123
const mockTxHash = '0x1234567890abcdef'
- let capturedPieceMetadata: PieceMetadataCapture | null = null
+ let capturedPieceMetadata: Mocks.pdp.PieceMetadataCapture | null = null
server.use(
- addPiecesWithMetadataCapture(
+ Mocks.pdp.addPiecesWithMetadataCapture(
dataSetId,
mockTxHash,
(metadata) => {
@@ -138,10 +133,10 @@ describe('Metadata Support', () => {
describe('Backward Compatibility', () => {
it('should convert withCDN boolean to metadata', async () => {
const mockTxHash = '0xabcdef1234567890'
- let capturedMetadata: MetadataCapture | null = null
+ let capturedMetadata: Mocks.pdp.MetadataCapture | null = null
server.use(
- createDataSetWithMetadataCapture(
+ Mocks.pdp.createDataSetWithMetadataCapture(
mockTxHash,
(metadata) => {
capturedMetadata = metadata
diff --git a/packages/synapse-sdk/src/test/mocks/mockServiceWorker.js b/packages/synapse-sdk/src/test/mocks/mockServiceWorker.js
index 723b0714c..558540fa5 100644
--- a/packages/synapse-sdk/src/test/mocks/mockServiceWorker.js
+++ b/packages/synapse-sdk/src/test/mocks/mockServiceWorker.js
@@ -7,8 +7,8 @@
* - Please do NOT modify this file.
*/
-const PACKAGE_VERSION = '2.10.5'
-const INTEGRITY_CHECKSUM = 'f5825c521429caf22a4dd13b66e243af'
+const PACKAGE_VERSION = '2.12.4'
+const INTEGRITY_CHECKSUM = '4db4a41e972cec1b64cc569c66952d82'
const IS_MOCKED_RESPONSE = Symbol('isMockedResponse')
const activeClientIds = new Set()
@@ -71,11 +71,6 @@ addEventListener('message', async function (event) {
break
}
- case 'MOCK_DEACTIVATE': {
- activeClientIds.delete(clientId)
- break
- }
-
case 'CLIENT_CLOSED': {
activeClientIds.delete(clientId)
@@ -94,6 +89,8 @@ addEventListener('message', async function (event) {
})
addEventListener('fetch', function (event) {
+ const requestInterceptedAt = Date.now()
+
// Bypass navigation requests.
if (event.request.mode === 'navigate') {
return
@@ -110,23 +107,29 @@ addEventListener('fetch', function (event) {
// Bypass all requests when there are no active clients.
// Prevents the self-unregistered worked from handling requests
- // after it's been deleted (still remains active until the next reload).
+ // after it's been terminated (still remains active until the next reload).
if (activeClientIds.size === 0) {
return
}
const requestId = crypto.randomUUID()
- event.respondWith(handleRequest(event, requestId))
+ event.respondWith(handleRequest(event, requestId, requestInterceptedAt))
})
/**
* @param {FetchEvent} event
* @param {string} requestId
+ * @param {number} requestInterceptedAt
*/
-async function handleRequest(event, requestId) {
+async function handleRequest(event, requestId, requestInterceptedAt) {
const client = await resolveMainClient(event)
const requestCloneForEvents = event.request.clone()
- const response = await getResponse(event, client, requestId)
+ const response = await getResponse(
+ event,
+ client,
+ requestId,
+ requestInterceptedAt,
+ )
// Send back the response clone for the "response:*" life-cycle events.
// Ensure MSW is active and ready to handle the message, otherwise
@@ -202,9 +205,10 @@ async function resolveMainClient(event) {
* @param {FetchEvent} event
* @param {Client | undefined} client
* @param {string} requestId
+ * @param {number} requestInterceptedAt
* @returns {Promise}
*/
-async function getResponse(event, client, requestId) {
+async function getResponse(event, client, requestId, requestInterceptedAt) {
// Clone the request because it might've been already used
// (i.e. its body has been read and sent to the client).
const requestClone = event.request.clone()
@@ -255,6 +259,7 @@ async function getResponse(event, client, requestId) {
type: 'REQUEST',
payload: {
id: requestId,
+ interceptedAt: requestInterceptedAt,
...serializedRequest,
},
},
diff --git a/packages/synapse-sdk/src/test/payments.test.ts b/packages/synapse-sdk/src/test/payments.test.ts
index 5931edc0e..9936a73b4 100644
--- a/packages/synapse-sdk/src/test/payments.test.ts
+++ b/packages/synapse-sdk/src/test/payments.test.ts
@@ -4,25 +4,25 @@
* Tests for PaymentsService class
*/
+import * as Mocks from '@filoz/synapse-core/mocks'
import { assert } from 'chai'
import { ethers } from 'ethers'
import { setup } from 'iso-web/msw'
import { PaymentsService } from '../payments/index.ts'
import { TIME_CONSTANTS, TOKENS } from '../utils/index.ts'
-import { ADDRESSES, JSONRPC, PRIVATE_KEYS, presets } from './mocks/jsonrpc/index.ts'
// mock server for testing
-const server = setup([])
+const server = setup()
describe('PaymentsService', () => {
let provider: ethers.Provider
let signer: ethers.Signer
let payments: PaymentsService
- const paymentsAddress = ADDRESSES.calibration.payments
- const usdfcAddress = ADDRESSES.calibration.usdfcToken
+ const paymentsAddress = Mocks.ADDRESSES.calibration.payments
+ const usdfcAddress = Mocks.ADDRESSES.calibration.usdfcToken
before(async () => {
- await server.start({ quiet: true })
+ await server.start()
})
after(() => {
@@ -32,7 +32,7 @@ describe('PaymentsService', () => {
beforeEach(() => {
server.resetHandlers()
provider = new ethers.JsonRpcProvider('https://api.calibration.node.glif.io/rpc/v1')
- signer = new ethers.Wallet(PRIVATE_KEYS.key1, provider)
+ signer = new ethers.Wallet(Mocks.PRIVATE_KEYS.key1, provider)
payments = new PaymentsService(provider, signer, paymentsAddress, usdfcAddress, false)
})
@@ -49,19 +49,19 @@ describe('PaymentsService', () => {
describe('walletBalance', () => {
it('should return FIL balance when no token specified', async () => {
- server.use(JSONRPC(presets.basic))
+ server.use(Mocks.JSONRPC(Mocks.presets.basic))
const balance = await payments.walletBalance()
assert.equal(balance.toString(), ethers.parseEther('100').toString())
})
it('should return FIL balance when FIL token specified', async () => {
- server.use(JSONRPC(presets.basic))
+ server.use(Mocks.JSONRPC(Mocks.presets.basic))
const balance = await payments.walletBalance(TOKENS.FIL)
assert.equal(balance.toString(), ethers.parseEther('100').toString())
})
it('should return USDFC balance when USDFC specified', async () => {
- server.use(JSONRPC(presets.basic))
+ server.use(Mocks.JSONRPC(Mocks.presets.basic))
const balance = await payments.walletBalance(TOKENS.USDFC)
assert.equal(balance.toString(), ethers.parseUnits('1000', 18).toString())
})
@@ -78,7 +78,7 @@ describe('PaymentsService', () => {
describe('balance', () => {
it('should return USDFC balance from payments contract', async () => {
- server.use(JSONRPC(presets.basic))
+ server.use(Mocks.JSONRPC(Mocks.presets.basic))
const balance = await payments.balance()
// Should return available funds (500 USDFC - 0 locked = 500)
assert.equal(balance.toString(), ethers.parseUnits('500', 18).toString())
@@ -107,13 +107,13 @@ describe('PaymentsService', () => {
describe('Token operations', () => {
it('should check allowance for USDFC', async () => {
- server.use(JSONRPC(presets.basic))
+ server.use(Mocks.JSONRPC(Mocks.presets.basic))
const allowance = await payments.allowance(paymentsAddress)
assert.equal(allowance.toString(), '0')
})
it('should approve token spending', async () => {
- server.use(JSONRPC(presets.basic))
+ server.use(Mocks.JSONRPC(Mocks.presets.basic))
const amount = ethers.parseUnits('100', 18)
const tx = await payments.approve(paymentsAddress, amount)
assert.exists(tx)
@@ -144,7 +144,7 @@ describe('PaymentsService', () => {
const serviceAddress = '0x394feCa6bCB84502d93c0c5C03c620ba8897e8f4'
it('should approve service as operator', async () => {
- server.use(JSONRPC(presets.basic))
+ server.use(Mocks.JSONRPC(Mocks.presets.basic))
const rateAllowance = ethers.parseUnits('10', 18) // 10 USDFC per epoch
const lockupAllowance = ethers.parseUnits('1000', 18) // 1000 USDFC lockup
@@ -160,7 +160,7 @@ describe('PaymentsService', () => {
})
it('should revoke service operator approval', async () => {
- server.use(JSONRPC(presets.basic))
+ server.use(Mocks.JSONRPC(Mocks.presets.basic))
const tx = await payments.revokeService(serviceAddress)
assert.exists(tx)
assert.exists(tx.hash)
@@ -168,7 +168,7 @@ describe('PaymentsService', () => {
})
it('should check service approval status', async () => {
- server.use(JSONRPC(presets.basic))
+ server.use(Mocks.JSONRPC(Mocks.presets.basic))
const approval = await payments.serviceApproval(serviceAddress)
assert.exists(approval)
assert.exists(approval.isApproved)
@@ -206,8 +206,8 @@ describe('PaymentsService', () => {
describe('Error handling', () => {
it('should throw errors from payment operations', async () => {
server.use(
- JSONRPC({
- ...presets.basic,
+ Mocks.JSONRPC({
+ ...Mocks.presets.basic,
eth_sendRawTransaction: () => {
throw new Error('Transaction failed')
},
@@ -228,7 +228,7 @@ describe('PaymentsService', () => {
describe('Deposit and Withdraw', () => {
it('should deposit USDFC tokens', async () => {
- server.use(JSONRPC(presets.basic))
+ server.use(Mocks.JSONRPC(Mocks.presets.basic))
const depositAmount = ethers.parseUnits('100', 18)
const tx = await payments.deposit(depositAmount)
assert.exists(tx)
@@ -240,7 +240,7 @@ describe('PaymentsService', () => {
})
it('should deposit with permit', async () => {
- server.use(JSONRPC(presets.basic))
+ server.use(Mocks.JSONRPC(Mocks.presets.basic))
const depositAmount = ethers.parseUnits('10', 18)
const tx = await payments.depositWithPermit(depositAmount)
assert.exists(tx)
@@ -248,7 +248,7 @@ describe('PaymentsService', () => {
})
it('should deposit with permit and approve operator', async () => {
- server.use(JSONRPC(presets.basic))
+ server.use(Mocks.JSONRPC(Mocks.presets.basic))
const depositAmount = ethers.parseUnits('10', 18)
const operator = '0x394feCa6bCB84502d93c0c5C03c620ba8897e8f4'
const rateAllowance = ethers.parseUnits('5', 18)
@@ -267,7 +267,7 @@ describe('PaymentsService', () => {
})
it('should withdraw USDFC tokens', async () => {
- server.use(JSONRPC(presets.basic))
+ server.use(Mocks.JSONRPC(Mocks.presets.basic))
const withdrawAmount = ethers.parseUnits('50', 18)
const tx = await payments.withdraw(withdrawAmount)
assert.exists(tx)
@@ -315,7 +315,7 @@ describe('PaymentsService', () => {
})
it('should handle deposit callbacks', async () => {
- server.use(JSONRPC(presets.basic))
+ server.use(Mocks.JSONRPC(Mocks.presets.basic))
const depositAmount = ethers.parseUnits('100', 18)
let allowanceChecked = false
let approvalSent = false
@@ -353,7 +353,7 @@ describe('PaymentsService', () => {
describe('Rail Settlement Features', () => {
describe('getRailsAsPayer', () => {
it('should return rails where wallet is payer', async () => {
- server.use(JSONRPC(presets.basic))
+ server.use(Mocks.JSONRPC(Mocks.presets.basic))
const rails = await payments.getRailsAsPayer()
assert.isArray(rails)
assert.equal(rails.length, 2)
@@ -374,7 +374,7 @@ describe('PaymentsService', () => {
describe('getRailsAsPayee', () => {
it('should return rails where wallet is payee', async () => {
- server.use(JSONRPC(presets.basic))
+ server.use(Mocks.JSONRPC(Mocks.presets.basic))
const rails = await payments.getRailsAsPayee()
assert.isArray(rails)
assert.equal(rails.length, 1)
@@ -393,21 +393,9 @@ describe('PaymentsService', () => {
})
})
- describe('SETTLEMENT_FEE constant', () => {
- it('should have correct settlement fee value', () => {
- // Import the constant
- const { SETTLEMENT_FEE } = require('../utils/constants.ts')
-
- assert.exists(SETTLEMENT_FEE)
- assert.typeOf(SETTLEMENT_FEE, 'bigint')
- // Settlement fee should be 0.0013 FIL (1300000000000000 attoFIL)
- assert.equal(SETTLEMENT_FEE, 1300000000000000n)
- })
- })
-
describe('settle', () => {
it('should settle a rail up to current epoch', async () => {
- server.use(JSONRPC(presets.basic))
+ server.use(Mocks.JSONRPC(Mocks.presets.basic))
const railId = 123
const tx = await payments.settle(railId)
@@ -417,13 +405,10 @@ describe('PaymentsService', () => {
assert.exists(tx.from)
assert.exists(tx.to)
assert.exists(tx.data)
- // Check that the transaction includes the network fee as value
- assert.exists(tx.value)
- assert.isTrue(tx.value > 0n)
})
it('should settle a rail up to specific epoch', async () => {
- server.use(JSONRPC(presets.basic))
+ server.use(Mocks.JSONRPC(Mocks.presets.basic))
const railId = 123
const untilEpoch = 999999
const tx = await payments.settle(railId, untilEpoch)
@@ -434,7 +419,7 @@ describe('PaymentsService', () => {
})
it('should accept bigint rail ID', async () => {
- server.use(JSONRPC(presets.basic))
+ server.use(Mocks.JSONRPC(Mocks.presets.basic))
const railId = 123n
const tx = await payments.settle(railId)
@@ -446,7 +431,7 @@ describe('PaymentsService', () => {
describe('getSettlementAmounts', () => {
it('should get settlement amounts for a rail', async () => {
- server.use(JSONRPC(presets.basic))
+ server.use(Mocks.JSONRPC(Mocks.presets.basic))
const railId = 123
const result = await payments.getSettlementAmounts(railId)
@@ -468,7 +453,7 @@ describe('PaymentsService', () => {
describe('settleTerminatedRail', () => {
it('should settle a terminated rail', async () => {
- server.use(JSONRPC(presets.basic))
+ server.use(Mocks.JSONRPC(Mocks.presets.basic))
const railId = 456
const tx = await payments.settleTerminatedRail(railId)
@@ -481,7 +466,7 @@ describe('PaymentsService', () => {
})
it('should accept bigint rail ID', async () => {
- server.use(JSONRPC(presets.basic))
+ server.use(Mocks.JSONRPC(Mocks.presets.basic))
const railId = 456n
const tx = await payments.settleTerminatedRail(railId)
@@ -493,7 +478,7 @@ describe('PaymentsService', () => {
describe('getRail', () => {
it('should get detailed rail information', async () => {
- server.use(JSONRPC(presets.basic))
+ server.use(Mocks.JSONRPC(Mocks.presets.basic))
const railId = 123
const rail = await payments.getRail(railId)
@@ -512,7 +497,7 @@ describe('PaymentsService', () => {
assert.exists(rail.serviceFeeRecipient)
// Check values from mock
- assert.equal(rail.from.toLowerCase(), ADDRESSES.client1.toLowerCase())
+ assert.equal(rail.from.toLowerCase(), Mocks.ADDRESSES.client1.toLowerCase())
assert.equal(rail.to.toLowerCase(), '0xaabbccddaabbccddaabbccddaabbccddaabbccdd')
assert.equal(rail.operator, '0x394feCa6bCB84502d93c0c5C03c620ba8897e8f4')
assert.equal(rail.paymentRate.toString(), ethers.parseUnits('1', 18).toString())
@@ -523,7 +508,7 @@ describe('PaymentsService', () => {
})
it('should accept bigint rail ID', async () => {
- server.use(JSONRPC(presets.basic))
+ server.use(Mocks.JSONRPC(Mocks.presets.basic))
const railId = 123n
const rail = await payments.getRail(railId)
@@ -535,7 +520,7 @@ describe('PaymentsService', () => {
describe('settleAuto', () => {
it('should settle active rail using regular settle', async () => {
- server.use(JSONRPC(presets.basic))
+ server.use(Mocks.JSONRPC(Mocks.presets.basic))
const railId = 123
// This rail has endEpoch = 0 (active)
const tx = await payments.settleAuto(railId)
@@ -543,25 +528,22 @@ describe('PaymentsService', () => {
assert.exists(tx)
assert.exists(tx.hash)
assert.typeOf(tx.hash, 'string')
- // Check that the transaction includes the settlement fee as value
- assert.exists(tx.value)
- assert.isTrue(tx.value > 0n)
})
it('should settle terminated rail using settleTerminatedRail', async () => {
const railId = 456
server.use(
- JSONRPC({
- ...presets.basic,
+ Mocks.JSONRPC({
+ ...Mocks.presets.basic,
payments: {
- ...presets.basic.payments,
+ ...Mocks.presets.basic.payments,
getRail: (args) => {
const [railIdArg] = args
if (railIdArg === 456n) {
return [
{
- token: ADDRESSES.calibration.usdfcToken,
- from: ADDRESSES.client1,
+ token: Mocks.ADDRESSES.calibration.usdfcToken,
+ from: Mocks.ADDRESSES.client1,
to: '0xaabbccddaabbccddaabbccddaabbccddaabbccdd',
operator: '0x394feCa6bCB84502d93c0c5C03c620ba8897e8f4',
validator: '0x394feCa6bCB84502d93c0c5C03c620ba8897e8f4',
@@ -575,7 +557,7 @@ describe('PaymentsService', () => {
},
]
}
- return presets.basic.payments.getRail?.(args) ?? presets.basic.payments.getRail(args)
+ return Mocks.presets.basic.payments.getRail?.(args) ?? Mocks.presets.basic.payments.getRail(args)
},
},
})
@@ -591,7 +573,7 @@ describe('PaymentsService', () => {
})
it('should pass untilEpoch parameter to settle for active rails', async () => {
- server.use(JSONRPC(presets.basic))
+ server.use(Mocks.JSONRPC(Mocks.presets.basic))
const railId = 123
const untilEpoch = 999999
const tx = await payments.settleAuto(railId, untilEpoch)
@@ -599,12 +581,10 @@ describe('PaymentsService', () => {
assert.exists(tx)
assert.exists(tx.hash)
assert.typeOf(tx.hash, 'string')
- assert.exists(tx.value)
- assert.isTrue(tx.value > 0n)
})
it('should accept bigint rail ID', async () => {
- server.use(JSONRPC(presets.basic))
+ server.use(Mocks.JSONRPC(Mocks.presets.basic))
const railId = 123n
const tx = await payments.settleAuto(railId)
@@ -616,17 +596,17 @@ describe('PaymentsService', () => {
it('should ignore untilEpoch for terminated rails', async () => {
const railId = 456
server.use(
- JSONRPC({
- ...presets.basic,
+ Mocks.JSONRPC({
+ ...Mocks.presets.basic,
payments: {
- ...presets.basic.payments,
+ ...Mocks.presets.basic.payments,
getRail: (args) => {
const [railIdArg] = args
if (railIdArg === 456n) {
return [
{
- token: ADDRESSES.calibration.usdfcToken,
- from: ADDRESSES.client1,
+ token: Mocks.ADDRESSES.calibration.usdfcToken,
+ from: Mocks.ADDRESSES.client1,
to: '0xaabbccddaabbccddaabbccddaabbccddaabbccdd',
operator: '0x394feCa6bCB84502d93c0c5C03c620ba8897e8f4',
validator: '0x394feCa6bCB84502d93c0c5C03c620ba8897e8f4',
@@ -640,7 +620,7 @@ describe('PaymentsService', () => {
},
]
}
- return presets.basic.payments.getRail?.(args) ?? presets.basic.payments.getRail(args)
+ return Mocks.presets.basic.payments.getRail?.(args) ?? Mocks.presets.basic.payments.getRail(args)
},
},
})
@@ -660,7 +640,7 @@ describe('PaymentsService', () => {
describe('Enhanced Payment Features', () => {
describe('accountInfo', () => {
it('should return detailed account information with correct fields', async () => {
- server.use(JSONRPC(presets.basic))
+ server.use(Mocks.JSONRPC(Mocks.presets.basic))
const info = await payments.accountInfo()
assert.exists(info.funds)
@@ -677,11 +657,11 @@ describe('PaymentsService', () => {
it('should calculate available funds correctly with time-based lockup', async () => {
server.use(
- JSONRPC({
- ...presets.basic,
+ Mocks.JSONRPC({
+ ...Mocks.presets.basic,
eth_blockNumber: '0xf4240', // 1000000 in hex - matches lockupLastSettledAt calculation
payments: {
- ...presets.basic.payments,
+ ...Mocks.presets.basic.payments,
accounts: (_args) => {
// args should be [token, owner]
return [
@@ -705,7 +685,7 @@ describe('PaymentsService', () => {
})
it('should use accountInfo in balance() method', async () => {
- server.use(JSONRPC(presets.basic))
+ server.use(Mocks.JSONRPC(Mocks.presets.basic))
const balance = await payments.balance()
const info = await payments.accountInfo()
diff --git a/packages/synapse-sdk/src/test/pdp-server.test.ts b/packages/synapse-sdk/src/test/pdp-server.test.ts
index 85e681abe..c014ec1e8 100644
--- a/packages/synapse-sdk/src/test/pdp-server.test.ts
+++ b/packages/synapse-sdk/src/test/pdp-server.test.ts
@@ -15,6 +15,7 @@ import {
LocationHeaderError,
PostPieceError,
} from '@filoz/synapse-core/errors'
+import * as Mocks from '@filoz/synapse-core/mocks'
import { asPieceCID, calculate as calculatePieceCID } from '@filoz/synapse-core/piece'
import * as SP from '@filoz/synapse-core/sp'
import { assert } from 'chai'
@@ -23,16 +24,9 @@ import { setup } from 'iso-web/msw'
import { HttpResponse, http } from 'msw'
import { PDPAuthHelper, PDPServer } from '../pdp/index.ts'
import type { PDPAddPiecesInput } from '../pdp/server.ts'
-import {
- createAndAddPiecesHandler,
- finalizePieceUploadHandler,
- findPieceHandler,
- postPieceUploadsHandler,
- uploadPieceStreamingHandler,
-} from './mocks/pdp/handlers.ts'
// mock server for testing
-const server = setup([])
+const server = setup()
describe('PDPServer', () => {
let pdpServer: PDPServer
@@ -45,7 +39,7 @@ describe('PDPServer', () => {
const TEST_CHAIN_ID = 31337
before(async () => {
- await server.start({ quiet: true })
+ await server.start()
})
after(() => {
@@ -285,7 +279,7 @@ InvalidSignature(address expected, address actual)
const mockTxHash = '0x1234567890abcdef1234567890abcdef1234567890abcdef1234567890abcdef'
const validPieceCid = ['bafkzcibcd4bdomn3tgwgrh3g532zopskstnbrd2n3sxfqbze7rxt7vqn7veigmy']
- server.use(createAndAddPiecesHandler(mockTxHash))
+ server.use(Mocks.pdp.createAndAddPiecesHandler(mockTxHash))
const result = await pdpServer.createAndAddPieces(
0n,
@@ -641,7 +635,7 @@ Database error`
it('should find a piece successfully', async () => {
const mockPieceCid = 'bafkzcibcd4bdomn3tgwgrh3g532zopskstnbrd2n3sxfqbze7rxt7vqn7veigmy'
- server.use(findPieceHandler(mockPieceCid, true))
+ server.use(Mocks.pdp.findPieceHandler(mockPieceCid, true))
const result = await pdpServer.findPiece(mockPieceCid)
assert.strictEqual(result.pieceCid.toString(), mockPieceCid)
@@ -651,7 +645,7 @@ Database error`
SP.setTimeout(100)
const mockPieceCid = 'bafkzcibcd4bdomn3tgwgrh3g532zopskstnbrd2n3sxfqbze7rxt7vqn7veigmy'
- server.use(findPieceHandler(mockPieceCid, false))
+ server.use(Mocks.pdp.findPieceHandler(mockPieceCid, false))
try {
await pdpServer.findPiece(mockPieceCid)
@@ -705,6 +699,27 @@ Database error`
)
}
})
+
+ it('should retry on 202 status and eventually succeed', async () => {
+ SP.setTimeout(10000) // Set shorter timeout for test
+ const mockPieceCid = 'bafkzcibcd4bdomn3tgwgrh3g532zopskstnbrd2n3sxfqbze7rxt7vqn7veigmy'
+ let attemptCount = 0
+
+ server.use(
+ http.get('http://pdp.local/pdp/piece', async () => {
+ attemptCount++
+ // Return 202 for first 2 attempts, then 200
+ if (attemptCount < 3) {
+ return HttpResponse.json({ message: 'Processing' }, { status: 202 })
+ }
+ return HttpResponse.json({ pieceCid: mockPieceCid }, { status: 200 })
+ })
+ )
+
+ const result = await pdpServer.findPiece(mockPieceCid)
+ assert.strictEqual(result.pieceCid.toString(), mockPieceCid)
+ assert.isAtLeast(attemptCount, 3, 'Should have retried at least 3 times')
+ })
})
describe('getPieceStatus', () => {
@@ -880,9 +895,9 @@ Database error`
assert.isNotNull(mockPieceCid)
server.use(
- postPieceUploadsHandler(mockUuid),
- uploadPieceStreamingHandler(mockUuid),
- finalizePieceUploadHandler(mockUuid)
+ Mocks.pdp.postPieceUploadsHandler(mockUuid),
+ Mocks.pdp.uploadPieceStreamingHandler(mockUuid),
+ Mocks.pdp.finalizePieceUploadHandler(mockUuid)
)
await pdpServer.uploadPiece(testData)
@@ -897,8 +912,8 @@ Database error`
let finalizedWithPieceCid: string | null = null
server.use(
- postPieceUploadsHandler(mockUuid),
- uploadPieceStreamingHandler(mockUuid),
+ Mocks.pdp.postPieceUploadsHandler(mockUuid),
+ Mocks.pdp.uploadPieceStreamingHandler(mockUuid),
http.post<{ uuid: string }, { pieceCid: string }>(
'http://pdp.local/pdp/piece/uploads/:uuid',
async ({ request }) => {
diff --git a/packages/synapse-sdk/src/test/pdp-verifier.test.ts b/packages/synapse-sdk/src/test/pdp-verifier.test.ts
index ec37691f7..be58c0a0b 100644
--- a/packages/synapse-sdk/src/test/pdp-verifier.test.ts
+++ b/packages/synapse-sdk/src/test/pdp-verifier.test.ts
@@ -4,22 +4,22 @@
* Tests for PDPVerifier class
*/
+import * as Mocks from '@filoz/synapse-core/mocks'
import { calculate } from '@filoz/synapse-core/piece'
import { assert } from 'chai'
import { ethers } from 'ethers'
import { setup } from 'iso-web/msw'
import { PDPVerifier } from '../pdp/index.ts'
-import { ADDRESSES, JSONRPC, presets } from './mocks/jsonrpc/index.ts'
-const server = setup([])
+const server = setup()
describe('PDPVerifier', () => {
let provider: ethers.Provider
let pdpVerifier: PDPVerifier
- const testAddress = ADDRESSES.calibration.pdpVerifier
+ const testAddress = Mocks.ADDRESSES.calibration.pdpVerifier
before(async () => {
- await server.start({ quiet: true })
+ await server.start()
})
after(() => {
@@ -28,7 +28,7 @@ describe('PDPVerifier', () => {
beforeEach(() => {
server.resetHandlers()
- server.use(JSONRPC(presets.basic))
+ server.use(Mocks.JSONRPC(Mocks.presets.basic))
provider = new ethers.JsonRpcProvider('https://api.calibration.node.glif.io/rpc/v1')
pdpVerifier = new PDPVerifier(provider, testAddress)
})
@@ -59,10 +59,10 @@ describe('PDPVerifier', () => {
describe('getNextPieceId', () => {
it('should get next piece ID', async () => {
server.use(
- JSONRPC({
- ...presets.basic,
+ Mocks.JSONRPC({
+ ...Mocks.presets.basic,
pdpVerifier: {
- ...presets.basic.pdpVerifier,
+ ...Mocks.presets.basic.pdpVerifier,
getNextPieceId: () => [5n],
},
})
@@ -76,7 +76,7 @@ describe('PDPVerifier', () => {
describe('getDataSetListener', () => {
it('should get data set listener', async () => {
const listener = await pdpVerifier.getDataSetListener(123)
- assert.equal(listener.toLowerCase(), ADDRESSES.calibration.warmStorage.toLowerCase())
+ assert.equal(listener.toLowerCase(), Mocks.ADDRESSES.calibration.warmStorage.toLowerCase())
})
})
@@ -86,10 +86,10 @@ describe('PDPVerifier', () => {
const proposedStorageProvider = '0xabcdef1234567890123456789012345678901234'
server.use(
- JSONRPC({
- ...presets.basic,
+ Mocks.JSONRPC({
+ ...Mocks.presets.basic,
pdpVerifier: {
- ...presets.basic.pdpVerifier,
+ ...Mocks.presets.basic.pdpVerifier,
getDataSetStorageProvider: () => [storageProvider, proposedStorageProvider],
},
})
@@ -104,10 +104,10 @@ describe('PDPVerifier', () => {
describe('getDataSetLeafCount', () => {
it('should get data set leaf count', async () => {
server.use(
- JSONRPC({
- ...presets.basic,
+ Mocks.JSONRPC({
+ ...Mocks.presets.basic,
pdpVerifier: {
- ...presets.basic.pdpVerifier,
+ ...Mocks.presets.basic.pdpVerifier,
getDataSetLeafCount: () => [10n],
},
})
@@ -184,10 +184,10 @@ describe('PDPVerifier', () => {
const pieceCidHex = ethers.hexlify(pieceCid.bytes)
server.use(
- JSONRPC({
- ...presets.basic,
+ Mocks.JSONRPC({
+ ...Mocks.presets.basic,
pdpVerifier: {
- ...presets.basic.pdpVerifier,
+ ...Mocks.presets.basic.pdpVerifier,
getActivePieces: () => [[{ data: pieceCidHex as `0x${string}` }], [1n], false],
},
})
@@ -207,4 +207,41 @@ describe('PDPVerifier', () => {
assert.equal(address, testAddress)
})
})
+
+ describe('getScheduledRemovals', () => {
+ it('should get scheduled removals for a data set', async () => {
+ server.use(
+ Mocks.JSONRPC({
+ ...Mocks.presets.basic,
+ pdpVerifier: {
+ ...Mocks.presets.basic.pdpVerifier,
+ getScheduledRemovals: () => [[1n, 2n, 5n]],
+ },
+ })
+ )
+
+ const scheduledRemovals = await pdpVerifier.getScheduledRemovals(123)
+ assert.isArray(scheduledRemovals)
+ assert.equal(scheduledRemovals.length, 3)
+ assert.equal(scheduledRemovals[0], 1)
+ assert.equal(scheduledRemovals[1], 2)
+ assert.equal(scheduledRemovals[2], 5)
+ })
+
+ it('should return empty array when no removals scheduled', async () => {
+ server.use(
+ Mocks.JSONRPC({
+ ...Mocks.presets.basic,
+ pdpVerifier: {
+ ...Mocks.presets.basic.pdpVerifier,
+ getScheduledRemovals: () => [[]],
+ },
+ })
+ )
+
+ const scheduledRemovals = await pdpVerifier.getScheduledRemovals(123)
+ assert.isArray(scheduledRemovals)
+ assert.equal(scheduledRemovals.length, 0)
+ })
+ })
})
diff --git a/packages/synapse-sdk/src/test/retriever-chain.test.ts b/packages/synapse-sdk/src/test/retriever-chain.test.ts
index d30db16a1..7e5a694dc 100644
--- a/packages/synapse-sdk/src/test/retriever-chain.test.ts
+++ b/packages/synapse-sdk/src/test/retriever-chain.test.ts
@@ -1,3 +1,4 @@
+import * as Mocks from '@filoz/synapse-core/mocks'
import { asPieceCID } from '@filoz/synapse-core/piece'
import { assert } from 'chai'
import { ethers } from 'ethers'
@@ -7,11 +8,9 @@ import { ChainRetriever } from '../retriever/chain.ts'
import { SPRegistryService } from '../sp-registry/index.ts'
import type { PieceCID, PieceRetriever } from '../types.ts'
import { WarmStorageService } from '../warm-storage/index.ts'
-import { ADDRESSES, JSONRPC, PROVIDERS, presets } from './mocks/jsonrpc/index.ts'
-import { mockServiceProviderRegistry } from './mocks/jsonrpc/service-registry.ts'
// Mock server for testing
-const server = setup([])
+const server = setup()
// Create a mock PieceCID for testing
const mockPieceCID = asPieceCID('bafkzcibeqcad6efnpwn62p5vvs5x3nh3j7xkzfgb3xtitcdm2hulmty3xx4tl3wace') as PieceCID
@@ -33,7 +32,7 @@ describe('ChainRetriever', () => {
let spRegistry: SPRegistryService
before(async () => {
- await server.start({ quiet: true })
+ await server.start()
})
after(() => {
@@ -43,10 +42,10 @@ describe('ChainRetriever', () => {
beforeEach(async () => {
server.resetHandlers()
// Set up basic JSON-RPC handler before creating services
- server.use(JSONRPC(presets.basic))
+ server.use(Mocks.JSONRPC(Mocks.presets.basic))
provider = new ethers.JsonRpcProvider('https://api.calibration.node.glif.io/rpc/v1')
- warmStorage = await WarmStorageService.create(provider, ADDRESSES.calibration.warmStorage)
- spRegistry = await SPRegistryService.create(provider, ADDRESSES.calibration.spRegistry)
+ warmStorage = await WarmStorageService.create(provider, Mocks.ADDRESSES.calibration.warmStorage)
+ spRegistry = await SPRegistryService.create(provider, Mocks.ADDRESSES.calibration.spRegistry)
})
describe('fetchPiece with specific provider', () => {
@@ -55,9 +54,9 @@ describe('ChainRetriever', () => {
let downloadCalled = false
server.use(
- JSONRPC({
- ...presets.basic,
- serviceRegistry: mockServiceProviderRegistry([PROVIDERS.provider1]),
+ Mocks.JSONRPC({
+ ...Mocks.presets.basic,
+ serviceRegistry: Mocks.mockServiceProviderRegistry([Mocks.PROVIDERS.provider1]),
}),
http.get('https://provider1.example.com/pdp/piece', async ({ request }) => {
findPieceCalled = true
@@ -72,8 +71,8 @@ describe('ChainRetriever', () => {
)
const retriever = new ChainRetriever(warmStorage, spRegistry)
- const response = await retriever.fetchPiece(mockPieceCID, ADDRESSES.client1, {
- providerAddress: ADDRESSES.serviceProvider1,
+ const response = await retriever.fetchPiece(mockPieceCID, Mocks.ADDRESSES.client1, {
+ providerAddress: Mocks.ADDRESSES.serviceProvider1,
})
assert.isTrue(findPieceCalled, 'Should call findPiece')
@@ -84,16 +83,16 @@ describe('ChainRetriever', () => {
it('should fall back to child retriever when specific provider is not approved', async () => {
server.use(
- JSONRPC({
- ...presets.basic,
+ Mocks.JSONRPC({
+ ...Mocks.presets.basic,
serviceRegistry: {
- ...presets.basic.serviceRegistry,
+ ...Mocks.presets.basic.serviceRegistry,
getProviderByAddress: () => [
{
providerId: 0n,
info: {
- serviceProvider: ADDRESSES.zero,
- payee: ADDRESSES.zero,
+ serviceProvider: Mocks.ADDRESSES.zero,
+ payee: Mocks.ADDRESSES.zero,
name: '',
description: '',
isActive: false,
@@ -105,7 +104,7 @@ describe('ChainRetriever', () => {
)
const retriever = new ChainRetriever(warmStorage, spRegistry, mockChildRetriever)
- const response = await retriever.fetchPiece(mockPieceCID, ADDRESSES.client1, {
+ const response = await retriever.fetchPiece(mockPieceCID, Mocks.ADDRESSES.client1, {
providerAddress: '0xNotApproved',
})
assert.equal(response.status, 200)
@@ -114,16 +113,16 @@ describe('ChainRetriever', () => {
it('should throw when specific provider is not approved and no child retriever', async () => {
server.use(
- JSONRPC({
- ...presets.basic,
+ Mocks.JSONRPC({
+ ...Mocks.presets.basic,
serviceRegistry: {
- ...presets.basic.serviceRegistry,
+ ...Mocks.presets.basic.serviceRegistry,
getProviderByAddress: () => [
{
providerId: 0n,
info: {
- serviceProvider: ADDRESSES.zero,
- payee: ADDRESSES.zero,
+ serviceProvider: Mocks.ADDRESSES.zero,
+ payee: Mocks.ADDRESSES.zero,
name: '',
description: '',
isActive: false,
@@ -137,7 +136,7 @@ describe('ChainRetriever', () => {
const retriever = new ChainRetriever(warmStorage, spRegistry)
try {
- await retriever.fetchPiece(mockPieceCID, ADDRESSES.client1, {
+ await retriever.fetchPiece(mockPieceCID, Mocks.ADDRESSES.client1, {
providerAddress: '0xNotApproved',
})
assert.fail('Should have thrown')
@@ -150,11 +149,11 @@ describe('ChainRetriever', () => {
describe('fetchPiece with multiple providers', () => {
it('should wait for successful provider even if others fail first', async () => {
server.use(
- JSONRPC({
- ...presets.basic,
- serviceRegistry: mockServiceProviderRegistry([PROVIDERS.provider1, PROVIDERS.provider2]),
+ Mocks.JSONRPC({
+ ...Mocks.presets.basic,
+ serviceRegistry: Mocks.mockServiceProviderRegistry([Mocks.PROVIDERS.provider1, Mocks.PROVIDERS.provider2]),
warmStorageView: {
- ...presets.basic.warmStorageView,
+ ...Mocks.presets.basic.warmStorageView,
clientDataSets: () => [[1n, 2n]],
getDataSet: (args) => {
const [dataSetId] = args
@@ -164,9 +163,9 @@ describe('ChainRetriever', () => {
pdpRailId: 1n,
cacheMissRailId: 0n,
cdnRailId: 0n,
- payer: ADDRESSES.client1,
- payee: ADDRESSES.payee1,
- serviceProvider: ADDRESSES.serviceProvider1,
+ payer: Mocks.ADDRESSES.client1,
+ payee: Mocks.ADDRESSES.payee1,
+ serviceProvider: Mocks.ADDRESSES.serviceProvider1,
commissionBps: 100n,
clientDataSetId: 1n,
pdpEndEpoch: 0n,
@@ -182,9 +181,9 @@ describe('ChainRetriever', () => {
pdpRailId: 2n,
cacheMissRailId: 0n,
cdnRailId: 0n,
- payer: ADDRESSES.client1,
- payee: ADDRESSES.payee1,
- serviceProvider: ADDRESSES.serviceProvider2,
+ payer: Mocks.ADDRESSES.client1,
+ payee: Mocks.ADDRESSES.payee1,
+ serviceProvider: Mocks.ADDRESSES.serviceProvider2,
commissionBps: 100n,
clientDataSetId: 2n,
pdpEndEpoch: 0n,
@@ -194,7 +193,7 @@ describe('ChainRetriever', () => {
},
]
}
- return presets.basic.warmStorageView.getDataSet(args)
+ return Mocks.presets.basic.warmStorageView.getDataSet(args)
},
},
}),
@@ -214,7 +213,7 @@ describe('ChainRetriever', () => {
)
const retriever = new ChainRetriever(warmStorage, spRegistry)
- const response = await retriever.fetchPiece(mockPieceCID, ADDRESSES.client1)
+ const response = await retriever.fetchPiece(mockPieceCID, Mocks.ADDRESSES.client1)
// Should get response from provider 2 even though provider 1 failed first
assert.equal(response.status, 200)
@@ -226,9 +225,9 @@ describe('ChainRetriever', () => {
let provider2Called = false
server.use(
- JSONRPC({
- ...presets.basic,
- serviceRegistry: mockServiceProviderRegistry([PROVIDERS.provider1, PROVIDERS.provider2]),
+ Mocks.JSONRPC({
+ ...Mocks.presets.basic,
+ serviceRegistry: Mocks.mockServiceProviderRegistry([Mocks.PROVIDERS.provider1, Mocks.PROVIDERS.provider2]),
}),
http.get('https://provider1.example.com/pdp/piece', async ({ request }) => {
provider1Called = true
@@ -255,7 +254,7 @@ describe('ChainRetriever', () => {
)
const retriever = new ChainRetriever(warmStorage, spRegistry)
- const response = await retriever.fetchPiece(mockPieceCID, ADDRESSES.client1)
+ const response = await retriever.fetchPiece(mockPieceCID, Mocks.ADDRESSES.client1)
assert.isTrue(provider1Called || provider2Called, 'At least one provider should be called')
assert.equal(response.status, 200)
@@ -265,11 +264,11 @@ describe('ChainRetriever', () => {
it('should fall back to child retriever when all providers fail', async () => {
server.use(
- JSONRPC({
- ...presets.basic,
- serviceRegistry: mockServiceProviderRegistry([PROVIDERS.provider1]),
+ Mocks.JSONRPC({
+ ...Mocks.presets.basic,
+ serviceRegistry: Mocks.mockServiceProviderRegistry([Mocks.PROVIDERS.provider1]),
warmStorageView: {
- ...presets.basic.warmStorageView,
+ ...Mocks.presets.basic.warmStorageView,
clientDataSets: () => [[1n]],
getDataSet: (args) => {
const [dataSetId] = args
@@ -279,9 +278,9 @@ describe('ChainRetriever', () => {
pdpRailId: 1n,
cacheMissRailId: 0n,
cdnRailId: 0n,
- payer: ADDRESSES.client1,
- payee: ADDRESSES.payee1,
- serviceProvider: ADDRESSES.serviceProvider1,
+ payer: Mocks.ADDRESSES.client1,
+ payee: Mocks.ADDRESSES.payee1,
+ serviceProvider: Mocks.ADDRESSES.serviceProvider1,
commissionBps: 100n,
clientDataSetId: 1n,
pdpEndEpoch: 0n,
@@ -291,7 +290,7 @@ describe('ChainRetriever', () => {
},
]
}
- return presets.basic.warmStorageView.getDataSet(args)
+ return Mocks.presets.basic.warmStorageView.getDataSet(args)
},
},
}),
@@ -304,7 +303,7 @@ describe('ChainRetriever', () => {
)
const retriever = new ChainRetriever(warmStorage, spRegistry, mockChildRetriever)
- const response = await retriever.fetchPiece(mockPieceCID, ADDRESSES.client1)
+ const response = await retriever.fetchPiece(mockPieceCID, Mocks.ADDRESSES.client1)
assert.equal(response.status, 200)
assert.equal(await response.text(), 'data from child')
@@ -312,11 +311,11 @@ describe('ChainRetriever', () => {
it('should throw when all providers fail and no child retriever', async () => {
server.use(
- JSONRPC({
- ...presets.basic,
- serviceRegistry: mockServiceProviderRegistry([PROVIDERS.provider1]),
+ Mocks.JSONRPC({
+ ...Mocks.presets.basic,
+ serviceRegistry: Mocks.mockServiceProviderRegistry([Mocks.PROVIDERS.provider1]),
warmStorageView: {
- ...presets.basic.warmStorageView,
+ ...Mocks.presets.basic.warmStorageView,
clientDataSets: () => [[1n]],
getDataSet: (args) => {
const [dataSetId] = args
@@ -326,9 +325,9 @@ describe('ChainRetriever', () => {
pdpRailId: 1n,
cacheMissRailId: 0n,
cdnRailId: 0n,
- payer: ADDRESSES.client1,
- payee: ADDRESSES.payee1,
- serviceProvider: ADDRESSES.serviceProvider1,
+ payer: Mocks.ADDRESSES.client1,
+ payee: Mocks.ADDRESSES.payee1,
+ serviceProvider: Mocks.ADDRESSES.serviceProvider1,
commissionBps: 100n,
clientDataSetId: 1n,
pdpEndEpoch: 0n,
@@ -338,7 +337,7 @@ describe('ChainRetriever', () => {
},
]
}
- return presets.basic.warmStorageView.getDataSet(args)
+ return Mocks.presets.basic.warmStorageView.getDataSet(args)
},
},
}),
@@ -352,7 +351,7 @@ describe('ChainRetriever', () => {
const retriever = new ChainRetriever(warmStorage, spRegistry)
try {
- await retriever.fetchPiece(mockPieceCID, ADDRESSES.client1)
+ await retriever.fetchPiece(mockPieceCID, Mocks.ADDRESSES.client1)
assert.fail('Should have thrown')
} catch (error: any) {
assert.include(error.message, 'All provider retrieval attempts failed')
@@ -361,27 +360,27 @@ describe('ChainRetriever', () => {
it('should handle child retriever when no data sets exist', async () => {
server.use(
- JSONRPC({
- ...presets.basic,
+ Mocks.JSONRPC({
+ ...Mocks.presets.basic,
warmStorageView: {
- ...presets.basic.warmStorageView,
+ ...Mocks.presets.basic.warmStorageView,
clientDataSets: () => [[]],
},
})
)
const retriever = new ChainRetriever(warmStorage, spRegistry, mockChildRetriever)
- const response = await retriever.fetchPiece(mockPieceCID, ADDRESSES.client1)
+ const response = await retriever.fetchPiece(mockPieceCID, Mocks.ADDRESSES.client1)
assert.equal(response.status, 200)
assert.equal(await response.text(), 'data from child')
})
it('should throw when no data sets and no child retriever', async () => {
server.use(
- JSONRPC({
- ...presets.basic,
+ Mocks.JSONRPC({
+ ...Mocks.presets.basic,
warmStorageView: {
- ...presets.basic.warmStorageView,
+ ...Mocks.presets.basic.warmStorageView,
clientDataSets: () => [[]],
},
})
@@ -390,7 +389,7 @@ describe('ChainRetriever', () => {
const retriever = new ChainRetriever(warmStorage, spRegistry)
try {
- await retriever.fetchPiece(mockPieceCID, ADDRESSES.client1)
+ await retriever.fetchPiece(mockPieceCID, Mocks.ADDRESSES.client1)
assert.fail('Should have thrown')
} catch (error: any) {
assert.include(error.message, 'No active data sets with data found')
@@ -401,10 +400,10 @@ describe('ChainRetriever', () => {
describe('fetchPiece error handling', () => {
it('should throw error when provider discovery fails', async () => {
server.use(
- JSONRPC({
- ...presets.basic,
+ Mocks.JSONRPC({
+ ...Mocks.presets.basic,
warmStorageView: {
- ...presets.basic.warmStorageView,
+ ...Mocks.presets.basic.warmStorageView,
clientDataSets: () => {
throw new Error('Database connection failed')
},
@@ -415,7 +414,7 @@ describe('ChainRetriever', () => {
const retriever = new ChainRetriever(warmStorage, spRegistry)
try {
- await retriever.fetchPiece(mockPieceCID, ADDRESSES.client1)
+ await retriever.fetchPiece(mockPieceCID, Mocks.ADDRESSES.client1)
assert.fail('Should have thrown')
} catch (error: any) {
assert.include(error.message, 'Database connection failed')
@@ -424,11 +423,11 @@ describe('ChainRetriever', () => {
it('should handle provider with no PDP product', async () => {
server.use(
- JSONRPC({
- ...presets.basic,
- serviceRegistry: mockServiceProviderRegistry([PROVIDERS.providerNoPDP]), // No PDP product
+ Mocks.JSONRPC({
+ ...Mocks.presets.basic,
+ serviceRegistry: Mocks.mockServiceProviderRegistry([Mocks.PROVIDERS.providerNoPDP]), // No PDP product
warmStorageView: {
- ...presets.basic.warmStorageView,
+ ...Mocks.presets.basic.warmStorageView,
clientDataSets: () => [[1n]],
getDataSet: (args) => {
const [dataSetId] = args
@@ -438,9 +437,9 @@ describe('ChainRetriever', () => {
pdpRailId: 1n,
cacheMissRailId: 0n,
cdnRailId: 0n,
- payer: ADDRESSES.client1,
- payee: ADDRESSES.payee1,
- serviceProvider: ADDRESSES.serviceProvider1,
+ payer: Mocks.ADDRESSES.client1,
+ payee: Mocks.ADDRESSES.payee1,
+ serviceProvider: Mocks.ADDRESSES.serviceProvider1,
commissionBps: 100n,
clientDataSetId: 1n,
pdpEndEpoch: 0n,
@@ -450,7 +449,7 @@ describe('ChainRetriever', () => {
},
]
}
- return presets.basic.warmStorageView.getDataSet(args)
+ return Mocks.presets.basic.warmStorageView.getDataSet(args)
},
},
})
@@ -459,7 +458,7 @@ describe('ChainRetriever', () => {
const retriever = new ChainRetriever(warmStorage, spRegistry)
try {
- await retriever.fetchPiece(mockPieceCID, ADDRESSES.client1)
+ await retriever.fetchPiece(mockPieceCID, Mocks.ADDRESSES.client1)
assert.fail('Should have thrown')
} catch (error: any) {
assert.include(error.message, 'Failed to retrieve piece')
@@ -468,11 +467,11 @@ describe('ChainRetriever', () => {
it('should handle mixed success and failure from multiple providers', async () => {
server.use(
- JSONRPC({
- ...presets.basic,
- serviceRegistry: mockServiceProviderRegistry([PROVIDERS.provider1, PROVIDERS.provider2]),
+ Mocks.JSONRPC({
+ ...Mocks.presets.basic,
+ serviceRegistry: Mocks.mockServiceProviderRegistry([Mocks.PROVIDERS.provider1, Mocks.PROVIDERS.provider2]),
warmStorageView: {
- ...presets.basic.warmStorageView,
+ ...Mocks.presets.basic.warmStorageView,
clientDataSets: () => [[1n, 2n]],
getDataSet: (args) => {
const [dataSetId] = args
@@ -482,9 +481,9 @@ describe('ChainRetriever', () => {
pdpRailId: 1n,
cacheMissRailId: 0n,
cdnRailId: 0n,
- payer: ADDRESSES.client1,
- payee: ADDRESSES.payee1,
- serviceProvider: ADDRESSES.serviceProvider1,
+ payer: Mocks.ADDRESSES.client1,
+ payee: Mocks.ADDRESSES.payee1,
+ serviceProvider: Mocks.ADDRESSES.serviceProvider1,
commissionBps: 100n,
clientDataSetId: 1n,
pdpEndEpoch: 0n,
@@ -500,9 +499,9 @@ describe('ChainRetriever', () => {
pdpRailId: 2n,
cacheMissRailId: 0n,
cdnRailId: 0n,
- payer: ADDRESSES.client1,
- payee: ADDRESSES.payee1,
- serviceProvider: ADDRESSES.serviceProvider2,
+ payer: Mocks.ADDRESSES.client1,
+ payee: Mocks.ADDRESSES.payee1,
+ serviceProvider: Mocks.ADDRESSES.serviceProvider2,
commissionBps: 100n,
clientDataSetId: 2n,
pdpEndEpoch: 0n,
@@ -512,7 +511,7 @@ describe('ChainRetriever', () => {
},
]
}
- return presets.basic.warmStorageView.getDataSet(args)
+ return Mocks.presets.basic.warmStorageView.getDataSet(args)
},
},
}),
@@ -530,7 +529,7 @@ describe('ChainRetriever', () => {
)
const retriever = new ChainRetriever(warmStorage, spRegistry)
- const response = await retriever.fetchPiece(mockPieceCID, ADDRESSES.client1)
+ const response = await retriever.fetchPiece(mockPieceCID, Mocks.ADDRESSES.client1)
assert.equal(response.status, 200)
assert.equal(await response.text(), 'success from provider2')
@@ -538,10 +537,10 @@ describe('ChainRetriever', () => {
it('should handle providers with no valid data sets', async () => {
server.use(
- JSONRPC({
- ...presets.basic,
+ Mocks.JSONRPC({
+ ...Mocks.presets.basic,
warmStorageView: {
- ...presets.basic.warmStorageView,
+ ...Mocks.presets.basic.warmStorageView,
clientDataSets: () => [[1n, 2n]],
getDataSet: (args) => {
const [dataSetId] = args
@@ -551,9 +550,9 @@ describe('ChainRetriever', () => {
pdpRailId: 1n,
cacheMissRailId: 0n,
cdnRailId: 0n,
- payer: ADDRESSES.client1,
- payee: ADDRESSES.payee1,
- serviceProvider: ADDRESSES.serviceProvider1,
+ payer: Mocks.ADDRESSES.client1,
+ payee: Mocks.ADDRESSES.payee1,
+ serviceProvider: Mocks.ADDRESSES.serviceProvider1,
commissionBps: 100n,
clientDataSetId: 1n,
pdpEndEpoch: 0n,
@@ -563,16 +562,16 @@ describe('ChainRetriever', () => {
},
]
}
- return presets.basic.warmStorageView.getDataSet(args)
+ return Mocks.presets.basic.warmStorageView.getDataSet(args)
},
},
pdpVerifier: {
- ...presets.basic.pdpVerifier,
+ ...Mocks.presets.basic.pdpVerifier,
dataSetLive: (args) => {
const [dataSetId] = args
return [dataSetId !== 1n] // Data set 1 not live
},
- getDataSetListener: () => [ADDRESSES.calibration.warmStorage],
+ getDataSetListener: () => [Mocks.ADDRESSES.calibration.warmStorage],
getNextPieceId: (args) => {
const [dataSetId] = args
return [dataSetId === 2n ? 0n : 1n] // Data set 2 has no pieces
@@ -584,7 +583,7 @@ describe('ChainRetriever', () => {
const retriever = new ChainRetriever(warmStorage, spRegistry)
try {
- await retriever.fetchPiece(mockPieceCID, ADDRESSES.client1)
+ await retriever.fetchPiece(mockPieceCID, Mocks.ADDRESSES.client1)
assert.fail('Should have thrown')
} catch (error: any) {
assert.include(error.message, 'No active data sets with data found')
@@ -597,11 +596,11 @@ describe('ChainRetriever', () => {
let signalPassed = false
server.use(
- JSONRPC({
- ...presets.basic,
- serviceRegistry: mockServiceProviderRegistry([PROVIDERS.provider1]),
+ Mocks.JSONRPC({
+ ...Mocks.presets.basic,
+ serviceRegistry: Mocks.mockServiceProviderRegistry([Mocks.PROVIDERS.provider1]),
warmStorageView: {
- ...presets.basic.warmStorageView,
+ ...Mocks.presets.basic.warmStorageView,
clientDataSets: () => [[1n]],
getDataSet: (args) => {
const [dataSetId] = args
@@ -611,9 +610,9 @@ describe('ChainRetriever', () => {
pdpRailId: 1n,
cacheMissRailId: 0n,
cdnRailId: 0n,
- payer: ADDRESSES.client1,
- payee: ADDRESSES.payee1,
- serviceProvider: ADDRESSES.serviceProvider1,
+ payer: Mocks.ADDRESSES.client1,
+ payee: Mocks.ADDRESSES.payee1,
+ serviceProvider: Mocks.ADDRESSES.serviceProvider1,
commissionBps: 100n,
clientDataSetId: 1n,
pdpEndEpoch: 0n,
@@ -623,7 +622,7 @@ describe('ChainRetriever', () => {
},
]
}
- return presets.basic.warmStorageView.getDataSet(args)
+ return Mocks.presets.basic.warmStorageView.getDataSet(args)
},
},
}),
@@ -645,7 +644,7 @@ describe('ChainRetriever', () => {
const controller = new AbortController()
const retriever = new ChainRetriever(warmStorage, spRegistry)
- await retriever.fetchPiece(mockPieceCID, ADDRESSES.client1, { signal: controller.signal })
+ await retriever.fetchPiece(mockPieceCID, Mocks.ADDRESSES.client1, { signal: controller.signal })
assert.isTrue(signalPassed, 'AbortSignal should be passed to fetch')
})
diff --git a/packages/synapse-sdk/src/test/sp-registry-service.test.ts b/packages/synapse-sdk/src/test/sp-registry-service.test.ts
index 39e031cbe..c88f08973 100644
--- a/packages/synapse-sdk/src/test/sp-registry-service.test.ts
+++ b/packages/synapse-sdk/src/test/sp-registry-service.test.ts
@@ -1,15 +1,15 @@
/* globals describe it beforeEach */
+
+import * as Mocks from '@filoz/synapse-core/mocks'
import { assert } from 'chai'
import { ethers } from 'ethers'
import { setup } from 'iso-web/msw'
import { SPRegistryService } from '../sp-registry/service.ts'
import { PRODUCTS } from '../sp-registry/types.ts'
import { SIZE_CONSTANTS } from '../utils/constants.ts'
-import { ADDRESSES, JSONRPC, PRIVATE_KEYS, PROVIDERS, presets } from './mocks/jsonrpc/index.ts'
-import { mockServiceProviderRegistry } from './mocks/jsonrpc/service-registry.ts'
// mock server for testing
-const server = setup([])
+const server = setup()
describe('SPRegistryService', () => {
let provider: ethers.Provider
@@ -17,7 +17,7 @@ describe('SPRegistryService', () => {
let service: SPRegistryService
before(async () => {
- await server.start({ quiet: true })
+ await server.start()
})
after(() => {
@@ -27,25 +27,25 @@ describe('SPRegistryService', () => {
beforeEach(() => {
server.resetHandlers()
provider = new ethers.JsonRpcProvider('https://api.calibration.node.glif.io/rpc/v1')
- signer = new ethers.Wallet(PRIVATE_KEYS.key1, provider)
- service = new SPRegistryService(provider, ADDRESSES.calibration.spRegistry)
+ signer = new ethers.Wallet(Mocks.PRIVATE_KEYS.key1, provider)
+ service = new SPRegistryService(provider, 314159, Mocks.ADDRESSES.calibration.spRegistry)
})
describe('Constructor', () => {
it('should create instance with provider and address', () => {
- server.use(JSONRPC(presets.basic))
- const instance = new SPRegistryService(provider, ADDRESSES.calibration.spRegistry)
+ server.use(Mocks.JSONRPC(Mocks.presets.basic))
+ const instance = new SPRegistryService(provider, 314159, Mocks.ADDRESSES.calibration.spRegistry)
assert.exists(instance)
})
})
describe('Provider Read Operations', () => {
it('should get provider by ID', async () => {
- server.use(JSONRPC(presets.basic))
+ server.use(Mocks.JSONRPC(Mocks.presets.basic))
const provider = await service.getProvider(1)
assert.exists(provider)
assert.equal(provider?.id, 1)
- assert.equal(provider?.serviceProvider, ADDRESSES.serviceProvider1)
+ assert.equal(provider?.serviceProvider, Mocks.ADDRESSES.serviceProvider1)
assert.equal(provider?.name, 'Test Provider')
assert.equal(provider?.description, 'Test Provider')
assert.isTrue(provider?.active)
@@ -53,16 +53,16 @@ describe('SPRegistryService', () => {
it('should return null for non-existent provider', async () => {
server.use(
- JSONRPC({
- ...presets.basic,
+ Mocks.JSONRPC({
+ ...Mocks.presets.basic,
serviceRegistry: {
- ...presets.basic.serviceRegistry,
+ ...Mocks.presets.basic.serviceRegistry,
getProvider: () => [
{
providerId: 0n,
info: {
- serviceProvider: ADDRESSES.zero,
- payee: ADDRESSES.zero,
+ serviceProvider: Mocks.ADDRESSES.zero,
+ payee: Mocks.ADDRESSES.zero,
isActive: false,
name: '',
description: '',
@@ -77,25 +77,25 @@ describe('SPRegistryService', () => {
})
it('should get provider by address', async () => {
- server.use(JSONRPC(presets.basic))
- const provider = await service.getProviderByAddress(ADDRESSES.serviceProvider1)
+ server.use(Mocks.JSONRPC(Mocks.presets.basic))
+ const provider = await service.getProviderByAddress(Mocks.ADDRESSES.serviceProvider1)
assert.exists(provider)
assert.equal(provider.id, 1)
- assert.equal(provider.serviceProvider, ADDRESSES.serviceProvider1)
+ assert.equal(provider.serviceProvider, Mocks.ADDRESSES.serviceProvider1)
})
it('should return null for unregistered address', async () => {
server.use(
- JSONRPC({
- ...presets.basic,
+ Mocks.JSONRPC({
+ ...Mocks.presets.basic,
serviceRegistry: {
- ...presets.basic.serviceRegistry,
+ ...Mocks.presets.basic.serviceRegistry,
getProviderByAddress: () => [
{
providerId: 0n,
info: {
- serviceProvider: ADDRESSES.zero,
- payee: ADDRESSES.zero,
+ serviceProvider: Mocks.ADDRESSES.zero,
+ payee: Mocks.ADDRESSES.zero,
isActive: false,
name: '',
description: '',
@@ -105,40 +105,40 @@ describe('SPRegistryService', () => {
},
})
)
- const provider = await service.getProviderByAddress(ADDRESSES.zero)
+ const provider = await service.getProviderByAddress(Mocks.ADDRESSES.zero)
assert.isNull(provider)
})
it('should get provider ID by address', async () => {
- server.use(JSONRPC(presets.basic))
- const id = await service.getProviderIdByAddress(ADDRESSES.serviceProvider1)
+ server.use(Mocks.JSONRPC(Mocks.presets.basic))
+ const id = await service.getProviderIdByAddress(Mocks.ADDRESSES.serviceProvider1)
assert.equal(id, 1)
})
it('should return 0 for unregistered address', async () => {
server.use(
- JSONRPC({
- ...presets.basic,
+ Mocks.JSONRPC({
+ ...Mocks.presets.basic,
serviceRegistry: {
- ...presets.basic.serviceRegistry,
+ ...Mocks.presets.basic.serviceRegistry,
getProviderIdByAddress: () => [0n],
},
})
)
- const id = await service.getProviderIdByAddress(ADDRESSES.zero)
+ const id = await service.getProviderIdByAddress(Mocks.ADDRESSES.zero)
assert.equal(id, 0)
})
it('should check if provider is active', async () => {
- server.use(JSONRPC(presets.basic))
+ server.use(Mocks.JSONRPC(Mocks.presets.basic))
const isActive = await service.isProviderActive(1)
assert.isTrue(isActive)
server.use(
- JSONRPC({
- ...presets.basic,
+ Mocks.JSONRPC({
+ ...Mocks.presets.basic,
serviceRegistry: {
- ...presets.basic.serviceRegistry,
+ ...Mocks.presets.basic.serviceRegistry,
isProviderActive: () => [false],
},
})
@@ -148,25 +148,25 @@ describe('SPRegistryService', () => {
})
it('should check if address is registered provider', async () => {
- server.use(JSONRPC(presets.basic))
- const isRegistered = await service.isRegisteredProvider(ADDRESSES.serviceProvider1)
+ server.use(Mocks.JSONRPC(Mocks.presets.basic))
+ const isRegistered = await service.isRegisteredProvider(Mocks.ADDRESSES.serviceProvider1)
assert.isTrue(isRegistered)
server.use(
- JSONRPC({
- ...presets.basic,
+ Mocks.JSONRPC({
+ ...Mocks.presets.basic,
serviceRegistry: {
- ...presets.basic.serviceRegistry,
+ ...Mocks.presets.basic.serviceRegistry,
isRegisteredProvider: () => [false],
},
})
)
- const isNotRegistered = await service.isRegisteredProvider(ADDRESSES.zero)
+ const isNotRegistered = await service.isRegisteredProvider(Mocks.ADDRESSES.zero)
assert.isFalse(isNotRegistered)
})
it('should get provider count', async () => {
- server.use(JSONRPC(presets.basic))
+ server.use(Mocks.JSONRPC(Mocks.presets.basic))
const count = await service.getProviderCount()
assert.equal(count, 2)
})
@@ -174,7 +174,7 @@ describe('SPRegistryService', () => {
describe('Provider Write Operations', () => {
it('should register new provider', async () => {
- server.use(JSONRPC(presets.basic))
+ server.use(Mocks.JSONRPC(Mocks.presets.basic))
const tx = await service.registerProvider(signer, {
payee: await signer.getAddress(),
name: 'New Provider',
@@ -196,14 +196,14 @@ describe('SPRegistryService', () => {
})
it('should update provider info', async () => {
- server.use(JSONRPC(presets.basic))
+ server.use(Mocks.JSONRPC(Mocks.presets.basic))
const tx = await service.updateProviderInfo(signer, 'Updated Name', 'Updated Description')
assert.exists(tx)
assert.exists(tx.hash)
})
it('should remove provider', async () => {
- server.use(JSONRPC(presets.basic))
+ server.use(Mocks.JSONRPC(Mocks.presets.basic))
const tx = await service.removeProvider(signer)
assert.exists(tx)
assert.exists(tx.hash)
@@ -212,7 +212,7 @@ describe('SPRegistryService', () => {
describe('Product Operations', () => {
it('should get provider products', async () => {
- server.use(JSONRPC(presets.basic))
+ server.use(Mocks.JSONRPC(Mocks.presets.basic))
const provider = await service.getProvider(1)
assert.exists(provider)
assert.exists(provider?.products)
@@ -225,7 +225,7 @@ describe('SPRegistryService', () => {
})
it('should decode PDP product data', async () => {
- server.use(JSONRPC(presets.basic))
+ server.use(Mocks.JSONRPC(Mocks.presets.basic))
const provider = await service.getProvider(1)
const product = provider?.products.PDP
@@ -243,7 +243,7 @@ describe('SPRegistryService', () => {
})
it('should add new product', async () => {
- server.use(JSONRPC(presets.basic))
+ server.use(Mocks.JSONRPC(Mocks.presets.basic))
const pdpData = {
serviceURL: 'https://new.example.com',
minPieceSizeInBytes: SIZE_CONSTANTS.KiB,
@@ -262,7 +262,7 @@ describe('SPRegistryService', () => {
})
it('should update existing product', async () => {
- server.use(JSONRPC(presets.basic))
+ server.use(Mocks.JSONRPC(Mocks.presets.basic))
const pdpData = {
serviceURL: 'https://updated.example.com',
minPieceSizeInBytes: SIZE_CONSTANTS.KiB * 2n,
@@ -281,7 +281,7 @@ describe('SPRegistryService', () => {
})
it('should remove product', async () => {
- server.use(JSONRPC(presets.basic))
+ server.use(Mocks.JSONRPC(Mocks.presets.basic))
const tx = await service.removeProduct(signer, PRODUCTS.PDP)
assert.exists(tx)
assert.exists(tx.hash)
@@ -290,7 +290,7 @@ describe('SPRegistryService', () => {
describe('Batch Operations', () => {
it('should get multiple providers in batch', async () => {
- server.use(JSONRPC(presets.basic))
+ server.use(Mocks.JSONRPC(Mocks.presets.basic))
const providers = await service.getProviders([1, 2, 3])
assert.isArray(providers)
assert.equal(providers.length, 2) // Only IDs 1 and 2 exist in our mock
@@ -301,7 +301,7 @@ describe('SPRegistryService', () => {
})
it('should handle empty provider ID list', async () => {
- server.use(JSONRPC(presets.basic))
+ server.use(Mocks.JSONRPC(Mocks.presets.basic))
const providers = await service.getProviders([])
assert.isArray(providers)
assert.equal(providers.length, 0)
@@ -310,7 +310,7 @@ describe('SPRegistryService', () => {
describe('Provider Info Conversion', () => {
it('should extract serviceURL from first PDP product', async () => {
- server.use(JSONRPC(presets.basic))
+ server.use(Mocks.JSONRPC(Mocks.presets.basic))
const provider = await service.getProvider(1)
assert.exists(provider)
assert.equal(provider?.products.PDP?.data.serviceURL, 'https://pdp.example.com')
@@ -318,9 +318,9 @@ describe('SPRegistryService', () => {
it('should handle provider without PDP products', async () => {
server.use(
- JSONRPC({
- ...presets.basic,
- serviceRegistry: mockServiceProviderRegistry([PROVIDERS.providerNoPDP]),
+ Mocks.JSONRPC({
+ ...Mocks.presets.basic,
+ serviceRegistry: Mocks.mockServiceProviderRegistry([Mocks.PROVIDERS.providerNoPDP]),
})
)
@@ -333,10 +333,10 @@ describe('SPRegistryService', () => {
describe('Error Handling', () => {
it('should handle contract call failures gracefully', async () => {
server.use(
- JSONRPC({
- ...presets.basic,
+ Mocks.JSONRPC({
+ ...Mocks.presets.basic,
serviceRegistry: {
- ...presets.basic.serviceRegistry,
+ ...Mocks.presets.basic.serviceRegistry,
getProvider: () => {
throw new Error('Contract call failed')
},
@@ -354,17 +354,17 @@ describe('SPRegistryService', () => {
it('should handle invalid product data', async () => {
server.use(
- JSONRPC({
- ...presets.basic,
+ Mocks.JSONRPC({
+ ...Mocks.presets.basic,
debug: true,
serviceRegistry: {
- ...presets.basic.serviceRegistry,
+ ...Mocks.presets.basic.serviceRegistry,
getProviderWithProduct: () => [
{
providerId: 1n,
providerInfo: {
- serviceProvider: ADDRESSES.serviceProvider1,
- payee: ADDRESSES.payee1,
+ serviceProvider: Mocks.ADDRESSES.serviceProvider1,
+ payee: Mocks.ADDRESSES.payee1,
name: 'Test Provider',
description: 'Test Provider',
isActive: true,
diff --git a/packages/synapse-sdk/src/test/storage-upload.test.ts b/packages/synapse-sdk/src/test/storage-upload.test.ts
index 826269dbf..6cf4c8651 100644
--- a/packages/synapse-sdk/src/test/storage-upload.test.ts
+++ b/packages/synapse-sdk/src/test/storage-upload.test.ts
@@ -4,25 +4,25 @@
* Basic tests for Synapse class
*/
+import * as Mocks from '@filoz/synapse-core/mocks'
import type { AddPiecesSuccess } from '@filoz/synapse-core/sp'
import { assert } from 'chai'
import { ethers } from 'ethers'
import { setup } from 'iso-web/msw'
import { HttpResponse, http } from 'msw'
+import type { Hex } from 'viem'
import { Synapse } from '../synapse.ts'
+import type { PieceCID, PieceRecord } from '../types.ts'
import { SIZE_CONSTANTS } from '../utils/constants.ts'
-import { JSONRPC, PRIVATE_KEYS, presets } from './mocks/jsonrpc/index.ts'
-import { findAnyPieceHandler, streamingUploadHandlers } from './mocks/pdp/handlers.ts'
-import { PING } from './mocks/ping.ts'
// mock server for testing
-const server = setup([])
+const server = setup()
describe('Storage Upload', () => {
let signer: ethers.Signer
let provider: ethers.Provider
before(async () => {
- await server.start({ quiet: true })
+ await server.start()
})
after(() => {
@@ -31,11 +31,11 @@ describe('Storage Upload', () => {
beforeEach(() => {
server.resetHandlers()
provider = new ethers.JsonRpcProvider('https://api.calibration.node.glif.io/rpc/v1')
- signer = new ethers.Wallet(PRIVATE_KEYS.key1, provider)
+ signer = new ethers.Wallet(Mocks.PRIVATE_KEYS.key1, provider)
})
it('should enforce 127 byte minimum size limit', async () => {
- server.use(JSONRPC({ ...presets.basic, debug: false }), PING({ debug: false }))
+ server.use(Mocks.JSONRPC({ ...Mocks.presets.basic, debug: false }), Mocks.PING({ debug: false }))
const synapse = await Synapse.create({ signer })
const context = await synapse.storage.createContext()
@@ -59,10 +59,10 @@ describe('Storage Upload', () => {
let addPiecesCount = 0
let uploadCompleteCount = 0
server.use(
- JSONRPC({ ...presets.basic, debug: false }),
- PING(),
- ...streamingUploadHandlers(pdpOptions),
- findAnyPieceHandler(true, pdpOptions),
+ Mocks.JSONRPC({ ...Mocks.presets.basic, debug: false }),
+ Mocks.PING(),
+ ...Mocks.pdp.streamingUploadHandlers(pdpOptions),
+ Mocks.pdp.findAnyPieceHandler(true, pdpOptions),
http.post<{ id: string }>(`https://pdp.example.com/pdp/data-sets/:id/pieces`, async ({ params }) => {
return new HttpResponse(null, {
status: 201,
@@ -133,10 +133,10 @@ describe('Storage Upload', () => {
}
const txHash = '0xabcdef1234567890abcdef1234567890abcdef1234567890abcdef123456'
server.use(
- JSONRPC({ ...presets.basic, debug: false }),
- PING(),
- ...streamingUploadHandlers(pdpOptions),
- findAnyPieceHandler(true, pdpOptions),
+ Mocks.JSONRPC({ ...Mocks.presets.basic, debug: false }),
+ Mocks.PING(),
+ ...Mocks.pdp.streamingUploadHandlers(pdpOptions),
+ Mocks.pdp.findAnyPieceHandler(true, pdpOptions),
http.post<{ id: string }>(`https://pdp.example.com/pdp/data-sets/:id/pieces`, async ({ params }) => {
return new HttpResponse(null, {
status: 201,
@@ -205,10 +205,10 @@ describe('Storage Upload', () => {
baseUrl: 'https://pdp.example.com',
}
server.use(
- JSONRPC({ ...presets.basic, debug: false }),
- PING(),
- ...streamingUploadHandlers(pdpOptions),
- findAnyPieceHandler(true, pdpOptions),
+ Mocks.JSONRPC({ ...Mocks.presets.basic, debug: false }),
+ Mocks.PING(),
+ ...Mocks.pdp.streamingUploadHandlers(pdpOptions),
+ Mocks.pdp.findAnyPieceHandler(true, pdpOptions),
http.post<{ id: string }>(`https://pdp.example.com/pdp/data-sets/:id/pieces`, async ({ params }) => {
return new HttpResponse(null, {
status: 201,
@@ -299,10 +299,10 @@ describe('Storage Upload', () => {
baseUrl: 'https://pdp.example.com',
}
server.use(
- JSONRPC({ ...presets.basic, debug: false }),
- PING(),
- ...streamingUploadHandlers(pdpOptions),
- findAnyPieceHandler(true, pdpOptions),
+ Mocks.JSONRPC({ ...Mocks.presets.basic, debug: false }),
+ Mocks.PING(),
+ ...Mocks.pdp.streamingUploadHandlers(pdpOptions),
+ Mocks.pdp.findAnyPieceHandler(true, pdpOptions),
http.post<{ id: string }>(`https://pdp.example.com/pdp/data-sets/:id/pieces`, async ({ params }) => {
return new HttpResponse(null, {
status: 201,
@@ -352,10 +352,10 @@ describe('Storage Upload', () => {
}
const txHash = '0xabcdef1234567890abcdef1234567890abcdef1234567890abcdef123456'
server.use(
- JSONRPC({ ...presets.basic, debug: false }),
- PING(),
- ...streamingUploadHandlers(pdpOptions),
- findAnyPieceHandler(true, pdpOptions),
+ Mocks.JSONRPC({ ...Mocks.presets.basic, debug: false }),
+ Mocks.PING(),
+ ...Mocks.pdp.streamingUploadHandlers(pdpOptions),
+ Mocks.pdp.findAnyPieceHandler(true, pdpOptions),
http.post<{ id: string }>(`https://pdp.example.com/pdp/data-sets/:id/pieces`, async ({ params }) => {
return new HttpResponse(null, {
status: 201,
@@ -403,10 +403,10 @@ describe('Storage Upload', () => {
baseUrl: 'https://pdp.example.com',
}
server.use(
- JSONRPC({ ...presets.basic, debug: false }),
- PING(),
- ...streamingUploadHandlers(pdpOptions),
- findAnyPieceHandler(true, pdpOptions),
+ Mocks.JSONRPC({ ...Mocks.presets.basic, debug: false }),
+ Mocks.PING(),
+ ...Mocks.pdp.streamingUploadHandlers(pdpOptions),
+ Mocks.pdp.findAnyPieceHandler(true, pdpOptions),
http.post<{ id: string }>(`https://pdp.example.com/pdp/data-sets/:id/pieces`, async ({ params }) => {
return new HttpResponse(null, {
status: 201,
@@ -451,16 +451,19 @@ describe('Storage Upload', () => {
it('should handle new server with transaction tracking', async () => {
let pieceAddedCallbackFired = false
let pieceConfirmedCallbackFired = false
+ let piecesAddedArgs: { transaction?: Hex; pieces?: Array<{ pieceCid: PieceCID }> } | null = null
+ let piecesConfirmedArgs: { dataSetId?: number; pieces?: PieceRecord[] } | null = null
let uploadCompleteCallbackFired = false
+ let resolvedDataSetId: number | undefined
const txHash = '0xabcdef1234567890abcdef1234567890abcdef1234567890abcdef123456'
const pdpOptions = {
baseUrl: 'https://pdp.example.com',
}
server.use(
- JSONRPC({ ...presets.basic, debug: false }),
- PING(),
- ...streamingUploadHandlers(pdpOptions),
- findAnyPieceHandler(true, pdpOptions),
+ Mocks.JSONRPC({ ...Mocks.presets.basic, debug: false }),
+ Mocks.PING(),
+ ...Mocks.pdp.streamingUploadHandlers(pdpOptions),
+ Mocks.pdp.findAnyPieceHandler(true, pdpOptions),
http.post<{ id: string }>(`https://pdp.example.com/pdp/data-sets/:id/pieces`, async ({ params }) => {
return new HttpResponse(null, {
status: 201,
@@ -470,11 +473,12 @@ describe('Storage Upload', () => {
})
}),
http.get<{ id: string }>(`https://pdp.example.com/pdp/data-sets/:id/pieces/added/:txHash`, ({ params }) => {
+ resolvedDataSetId = parseInt(params.id, 10)
return HttpResponse.json(
{
addMessageOk: true,
confirmedPieceIds: [0],
- dataSetId: parseInt(params.id, 10),
+ dataSetId: resolvedDataSetId,
pieceCount: 1,
piecesAdded: true,
txHash,
@@ -493,7 +497,13 @@ describe('Storage Upload', () => {
})
const expectedSize = SIZE_CONSTANTS.MIN_UPLOAD_SIZE
- await context.upload(new Uint8Array(expectedSize).fill(1), {
+ const uploadResult = await context.upload(new Uint8Array(expectedSize).fill(1), {
+ onPiecesAdded(transaction: Hex | undefined, pieces: Array<{ pieceCid: PieceCID }> | undefined) {
+ piecesAddedArgs = { transaction, pieces }
+ },
+ onPiecesConfirmed(dataSetId: number, pieces: PieceRecord[]) {
+ piecesConfirmedArgs = { dataSetId, pieces }
+ },
onPieceAdded() {
pieceAddedCallbackFired = true
},
@@ -508,6 +518,21 @@ describe('Storage Upload', () => {
assert.isTrue(pieceAddedCallbackFired, 'pieceAddedCallback should have been called')
assert.isTrue(pieceConfirmedCallbackFired, 'pieceConfirmedCallback should have been called')
assert.isTrue(uploadCompleteCallbackFired, 'uploadCompleteCallback should have been called')
+ assert.isNotNull(piecesAddedArgs, 'onPiecesAdded args should be captured')
+ assert.isNotNull(piecesConfirmedArgs, 'onPiecesConfirmed args should be captured')
+ if (piecesAddedArgs == null || piecesConfirmedArgs == null) {
+ throw new Error('Callbacks should have been called')
+ }
+ const addedArgs: { transaction?: Hex; pieces?: Array<{ pieceCid: PieceCID }> } = piecesAddedArgs
+ const confirmedArgs: { dataSetId?: number; pieces?: PieceRecord[] } = piecesConfirmedArgs
+ assert.strictEqual(addedArgs.transaction, txHash, 'onPiecesAdded should receive transaction hash')
+ assert.strictEqual(
+ addedArgs.pieces?.[0].pieceCid.toString(),
+ uploadResult.pieceCid.toString(),
+ 'onPiecesAdded should provide matching pieceCid'
+ )
+ assert.strictEqual(confirmedArgs.dataSetId, resolvedDataSetId, 'onPiecesConfirmed should provide the dataset id')
+ assert.strictEqual(confirmedArgs.pieces?.[0].pieceId, 0, 'onPiecesConfirmed should include piece IDs')
})
it('should handle ArrayBuffer input', async () => {
@@ -516,10 +541,10 @@ describe('Storage Upload', () => {
}
const txHash = '0xabcdef1234567890abcdef1234567890abcdef1234567890abcdef123456'
server.use(
- JSONRPC({ ...presets.basic, debug: false }),
- PING(),
- ...streamingUploadHandlers(pdpOptions),
- findAnyPieceHandler(true, pdpOptions),
+ Mocks.JSONRPC({ ...Mocks.presets.basic, debug: false }),
+ Mocks.PING(),
+ ...Mocks.pdp.streamingUploadHandlers(pdpOptions),
+ Mocks.pdp.findAnyPieceHandler(true, pdpOptions),
http.post<{ id: string }>(`https://pdp.example.com/pdp/data-sets/:id/pieces`, async ({ params }) => {
return new HttpResponse(null, {
status: 201,
diff --git a/packages/synapse-sdk/src/test/storage.test.ts b/packages/synapse-sdk/src/test/storage.test.ts
index 6726fe278..2580953c2 100644
--- a/packages/synapse-sdk/src/test/storage.test.ts
+++ b/packages/synapse-sdk/src/test/storage.test.ts
@@ -1,3 +1,4 @@
+import * as Mocks from '@filoz/synapse-core/mocks'
import * as Piece from '@filoz/synapse-core/piece'
import { calculate, calculate as calculatePieceCID } from '@filoz/synapse-core/piece'
import * as SP from '@filoz/synapse-core/sp'
@@ -11,21 +12,9 @@ import { StorageContext } from '../storage/context.ts'
import { Synapse } from '../synapse.ts'
import { SIZE_CONSTANTS } from '../utils/constants.ts'
import { WarmStorageService } from '../warm-storage/index.ts'
-import { ADDRESSES, JSONRPC, PRIVATE_KEYS, PROVIDERS, presets } from './mocks/jsonrpc/index.ts'
-import { mockServiceProviderRegistry } from './mocks/jsonrpc/service-registry.ts'
-import {
- createAndAddPiecesHandler,
- finalizePieceUploadHandler,
- findPieceHandler,
- postPieceHandler,
- postPieceUploadsHandler,
- uploadPieceHandler,
- uploadPieceStreamingHandler,
-} from './mocks/pdp/handlers.ts'
-import { PING } from './mocks/ping.ts'
// MSW server for JSONRPC mocking
-const server = setup([])
+const server = setup()
function cidBytesToContractHex(bytes: Uint8Array): `0x${string}` {
return ethers.hexlify(bytes) as `0x${string}`
@@ -42,7 +31,7 @@ describe('StorageService', () => {
before(async () => {
// Set timeout to 100ms for testing
SP.setTimeout(100)
- await server.start({ quiet: true })
+ await server.start()
})
after(() => {
@@ -52,73 +41,73 @@ describe('StorageService', () => {
beforeEach(async () => {
server.resetHandlers()
provider = new ethers.JsonRpcProvider('https://api.calibration.node.glif.io/rpc/v1')
- signer = new ethers.Wallet(PRIVATE_KEYS.key1, provider)
+ signer = new ethers.Wallet(Mocks.PRIVATE_KEYS.key1, provider)
})
describe('create() factory method', () => {
it('should select a random provider when no providerId specified', async () => {
server.use(
- JSONRPC({
- ...presets.basic,
- serviceRegistry: mockServiceProviderRegistry([PROVIDERS.provider1, PROVIDERS.provider2]),
+ Mocks.JSONRPC({
+ ...Mocks.presets.basic,
+ serviceRegistry: Mocks.mockServiceProviderRegistry([Mocks.PROVIDERS.provider1, Mocks.PROVIDERS.provider2]),
}),
- PING({
- baseUrl: PROVIDERS.provider1.products[0].offering.serviceURL,
+ Mocks.PING({
+ baseUrl: Mocks.PROVIDERS.provider1.products[0].offering.serviceURL,
}),
- PING({
- baseUrl: PROVIDERS.provider2.products[0].offering.serviceURL,
+ Mocks.PING({
+ baseUrl: Mocks.PROVIDERS.provider2.products[0].offering.serviceURL,
})
)
const synapse = await Synapse.create({ signer })
- const warmStorageService = await WarmStorageService.create(provider, ADDRESSES.calibration.warmStorage)
+ const warmStorageService = await WarmStorageService.create(provider, Mocks.ADDRESSES.calibration.warmStorage)
const service = await StorageContext.create(synapse, warmStorageService)
// Should have selected one of the providers
assert.isTrue(
- service.serviceProvider === PROVIDERS.provider1.providerInfo.serviceProvider ||
- service.serviceProvider === PROVIDERS.provider2.providerInfo.serviceProvider
+ service.serviceProvider === Mocks.PROVIDERS.provider1.providerInfo.serviceProvider ||
+ service.serviceProvider === Mocks.PROVIDERS.provider2.providerInfo.serviceProvider
)
})
it('should select a random provider but filter allow IPNI providers', async () => {
server.use(
- JSONRPC({
- ...presets.basic,
- serviceRegistry: mockServiceProviderRegistry([PROVIDERS.provider1, PROVIDERS.providerIPNI]),
+ Mocks.JSONRPC({
+ ...Mocks.presets.basic,
+ serviceRegistry: Mocks.mockServiceProviderRegistry([Mocks.PROVIDERS.provider1, Mocks.PROVIDERS.providerIPNI]),
}),
- PING({
- baseUrl: PROVIDERS.provider1.products[0].offering.serviceURL,
+ Mocks.PING({
+ baseUrl: Mocks.PROVIDERS.provider1.products[0].offering.serviceURL,
}),
- PING({
- baseUrl: PROVIDERS.providerIPNI.products[0].offering.serviceURL,
+ Mocks.PING({
+ baseUrl: Mocks.PROVIDERS.providerIPNI.products[0].offering.serviceURL,
})
)
const synapse = await Synapse.create({ signer })
- const warmStorageService = await WarmStorageService.create(provider, ADDRESSES.calibration.warmStorage)
+ const warmStorageService = await WarmStorageService.create(provider, Mocks.ADDRESSES.calibration.warmStorage)
// Create storage service without specifying providerId
const service = await StorageContext.create(synapse, warmStorageService, {
withIpni: true,
})
// Should have selected one of the providers
- assert.isTrue(service.serviceProvider === PROVIDERS.providerIPNI.providerInfo.serviceProvider)
+ assert.isTrue(service.serviceProvider === Mocks.PROVIDERS.providerIPNI.providerInfo.serviceProvider)
})
it.skip('should never select a dev provider by default', async () => {
server.use(
- JSONRPC({
- ...presets.basic,
- serviceRegistry: mockServiceProviderRegistry([PROVIDERS.provider1, PROVIDERS.provider2]),
+ Mocks.JSONRPC({
+ ...Mocks.presets.basic,
+ serviceRegistry: Mocks.mockServiceProviderRegistry([Mocks.PROVIDERS.provider1, Mocks.PROVIDERS.provider2]),
}),
- PING({
- baseUrl: PROVIDERS.provider1.products[0].offering.serviceURL,
+ Mocks.PING({
+ baseUrl: Mocks.PROVIDERS.provider1.products[0].offering.serviceURL,
}),
- PING({
- baseUrl: PROVIDERS.provider2.products[0].offering.serviceURL,
+ Mocks.PING({
+ baseUrl: Mocks.PROVIDERS.provider2.products[0].offering.serviceURL,
})
)
const synapse = await Synapse.create({ signer })
- const warmStorageService = await WarmStorageService.create(provider, ADDRESSES.calibration.warmStorage)
+ const warmStorageService = await WarmStorageService.create(provider, Mocks.ADDRESSES.calibration.warmStorage)
// Create storage service without specifying providerId
// dev defaults to false, so dev providers should be filtered out
@@ -127,29 +116,29 @@ describe('StorageService', () => {
})
// Should have selected provider2 (non-dev), never provider1 (dev)
- assert.equal(service.serviceProvider, PROVIDERS.provider2.providerInfo.serviceProvider)
+ assert.equal(service.serviceProvider, Mocks.PROVIDERS.provider2.providerInfo.serviceProvider)
assert.notEqual(
service.serviceProvider,
- PROVIDERS.provider1.providerInfo.serviceProvider,
+ Mocks.PROVIDERS.provider1.providerInfo.serviceProvider,
'Should not select dev provider'
)
})
it.skip('should include dev providers when dev option is true', async () => {
server.use(
- JSONRPC({
- ...presets.basic,
- serviceRegistry: mockServiceProviderRegistry([PROVIDERS.provider1, PROVIDERS.provider2]),
+ Mocks.JSONRPC({
+ ...Mocks.presets.basic,
+ serviceRegistry: Mocks.mockServiceProviderRegistry([Mocks.PROVIDERS.provider1, Mocks.PROVIDERS.provider2]),
}),
- PING({
- baseUrl: PROVIDERS.provider1.products[0].offering.serviceURL,
+ Mocks.PING({
+ baseUrl: Mocks.PROVIDERS.provider1.products[0].offering.serviceURL,
}),
- PING({
- baseUrl: PROVIDERS.provider2.products[0].offering.serviceURL,
+ Mocks.PING({
+ baseUrl: Mocks.PROVIDERS.provider2.products[0].offering.serviceURL,
})
)
const synapse = await Synapse.create({ signer })
- const warmStorageService = await WarmStorageService.create(provider, ADDRESSES.calibration.warmStorage)
+ const warmStorageService = await WarmStorageService.create(provider, Mocks.ADDRESSES.calibration.warmStorage)
// Create storage service with dev: true
const service = await StorageContext.create(synapse, warmStorageService, {
@@ -158,26 +147,26 @@ describe('StorageService', () => {
// Should be able to select from either provider, including the dev one
assert.isTrue(
- service.serviceProvider === PROVIDERS.provider1.providerInfo.serviceProvider ||
- service.serviceProvider === PROVIDERS.provider2.providerInfo.serviceProvider
+ service.serviceProvider === Mocks.PROVIDERS.provider1.providerInfo.serviceProvider ||
+ service.serviceProvider === Mocks.PROVIDERS.provider2.providerInfo.serviceProvider
)
})
it.skip('should filter providers with serviceStatus=dev when dev option is false', async () => {
server.use(
- JSONRPC({
- ...presets.basic,
- serviceRegistry: mockServiceProviderRegistry([PROVIDERS.provider1, PROVIDERS.provider2]),
+ Mocks.JSONRPC({
+ ...Mocks.presets.basic,
+ serviceRegistry: Mocks.mockServiceProviderRegistry([Mocks.PROVIDERS.provider1, Mocks.PROVIDERS.provider2]),
}),
- PING({
- baseUrl: PROVIDERS.provider1.products[0].offering.serviceURL,
+ Mocks.PING({
+ baseUrl: Mocks.PROVIDERS.provider1.products[0].offering.serviceURL,
}),
- PING({
- baseUrl: PROVIDERS.provider2.products[0].offering.serviceURL,
+ Mocks.PING({
+ baseUrl: Mocks.PROVIDERS.provider2.products[0].offering.serviceURL,
})
)
const synapse = await Synapse.create({ signer })
- const warmStorageService = await WarmStorageService.create(provider, ADDRESSES.calibration.warmStorage)
+ const warmStorageService = await WarmStorageService.create(provider, Mocks.ADDRESSES.calibration.warmStorage)
// Create storage service with dev: false (default)
const service = await StorageContext.create(synapse, warmStorageService, {
@@ -187,71 +176,71 @@ describe('StorageService', () => {
// Should only select the production provider, not the dev one
assert.equal(
service.serviceProvider.toLowerCase(),
- PROVIDERS.provider2.providerInfo.serviceProvider.toLowerCase(),
+ Mocks.PROVIDERS.provider2.providerInfo.serviceProvider.toLowerCase(),
'Should select production provider, not dev provider'
)
assert.notEqual(
service.serviceProvider.toLowerCase(),
- PROVIDERS.provider1.providerInfo.serviceProvider.toLowerCase(),
+ Mocks.PROVIDERS.provider1.providerInfo.serviceProvider.toLowerCase(),
'Should NOT select dev provider'
)
})
it('should use specific provider when providerId specified', async () => {
server.use(
- JSONRPC({
- ...presets.basic,
- serviceRegistry: mockServiceProviderRegistry([PROVIDERS.provider1, PROVIDERS.provider2]),
+ Mocks.JSONRPC({
+ ...Mocks.presets.basic,
+ serviceRegistry: Mocks.mockServiceProviderRegistry([Mocks.PROVIDERS.provider1, Mocks.PROVIDERS.provider2]),
}),
- PING({
- baseUrl: PROVIDERS.provider1.products[0].offering.serviceURL,
+ Mocks.PING({
+ baseUrl: Mocks.PROVIDERS.provider1.products[0].offering.serviceURL,
}),
- PING({
- baseUrl: PROVIDERS.provider2.products[0].offering.serviceURL,
+ Mocks.PING({
+ baseUrl: Mocks.PROVIDERS.provider2.products[0].offering.serviceURL,
})
)
const synapse = await Synapse.create({ signer })
- const warmStorageService = await WarmStorageService.create(provider, ADDRESSES.calibration.warmStorage)
+ const warmStorageService = await WarmStorageService.create(provider, Mocks.ADDRESSES.calibration.warmStorage)
// Create storage service with specific providerId
const service = await StorageContext.create(synapse, warmStorageService, {
- providerId: Number(PROVIDERS.provider1.providerId),
+ providerId: Number(Mocks.PROVIDERS.provider1.providerId),
})
- assert.equal(service.serviceProvider, PROVIDERS.provider1.providerInfo.serviceProvider)
+ assert.equal(service.serviceProvider, Mocks.PROVIDERS.provider1.providerInfo.serviceProvider)
})
it('should skip existing datasets and return -1 with providerId when forceCreateDataSet is true', async () => {
let fetchedDataSets = false
server.use(
- JSONRPC({
- ...presets.basic,
+ Mocks.JSONRPC({
+ ...Mocks.presets.basic,
warmStorageView: {
- ...presets.basic.warmStorageView,
+ ...Mocks.presets.basic.warmStorageView,
getAllDataSetMetadata() {
fetchedDataSets = true
return [[], []]
},
},
- serviceRegistry: mockServiceProviderRegistry([PROVIDERS.provider1, PROVIDERS.provider2]),
+ serviceRegistry: Mocks.mockServiceProviderRegistry([Mocks.PROVIDERS.provider1, Mocks.PROVIDERS.provider2]),
}),
- PING({
- baseUrl: PROVIDERS.provider1.products[0].offering.serviceURL,
+ Mocks.PING({
+ baseUrl: Mocks.PROVIDERS.provider1.products[0].offering.serviceURL,
}),
- PING({
- baseUrl: PROVIDERS.provider2.products[0].offering.serviceURL,
+ Mocks.PING({
+ baseUrl: Mocks.PROVIDERS.provider2.products[0].offering.serviceURL,
})
)
const synapse = await Synapse.create({ signer })
- const warmStorageService = await WarmStorageService.create(provider, ADDRESSES.calibration.warmStorage)
+ const warmStorageService = await WarmStorageService.create(provider, Mocks.ADDRESSES.calibration.warmStorage)
const context = await StorageContext.create(synapse, warmStorageService, {
- providerId: Number(PROVIDERS.provider1.providerId),
+ providerId: Number(Mocks.PROVIDERS.provider1.providerId),
forceCreateDataSet: true,
})
assert.equal(
context.serviceProvider,
- PROVIDERS.provider1.providerInfo.serviceProvider,
+ Mocks.PROVIDERS.provider1.providerInfo.serviceProvider,
'Should select the requested provider'
)
assert.equal(context.dataSetId, undefined, 'Should not have a data set id when forceCreateDataSet is true')
@@ -260,30 +249,30 @@ describe('StorageService', () => {
it('should skip existing datasets and return -1 with providerAddress when forceCreateDataSet is true', async () => {
server.use(
- JSONRPC({
- ...presets.basic,
+ Mocks.JSONRPC({
+ ...Mocks.presets.basic,
warmStorageView: {
- ...presets.basic.warmStorageView,
+ ...Mocks.presets.basic.warmStorageView,
},
- serviceRegistry: mockServiceProviderRegistry([PROVIDERS.provider1, PROVIDERS.provider2]),
+ serviceRegistry: Mocks.mockServiceProviderRegistry([Mocks.PROVIDERS.provider1, Mocks.PROVIDERS.provider2]),
}),
- PING({
- baseUrl: PROVIDERS.provider1.products[0].offering.serviceURL,
+ Mocks.PING({
+ baseUrl: Mocks.PROVIDERS.provider1.products[0].offering.serviceURL,
}),
- PING({
- baseUrl: PROVIDERS.provider2.products[0].offering.serviceURL,
+ Mocks.PING({
+ baseUrl: Mocks.PROVIDERS.provider2.products[0].offering.serviceURL,
})
)
const synapse = await Synapse.create({ signer })
- const warmStorageService = await WarmStorageService.create(provider, ADDRESSES.calibration.warmStorage)
+ const warmStorageService = await WarmStorageService.create(provider, Mocks.ADDRESSES.calibration.warmStorage)
const context = await StorageContext.create(synapse, warmStorageService, {
- providerAddress: PROVIDERS.provider1.providerInfo.serviceProvider,
+ providerAddress: Mocks.PROVIDERS.provider1.providerInfo.serviceProvider,
forceCreateDataSet: true,
})
assert.equal(
context.serviceProvider,
- PROVIDERS.provider1.providerInfo.serviceProvider,
+ Mocks.PROVIDERS.provider1.providerInfo.serviceProvider,
'Should select the requested provider'
)
assert.equal(context.dataSetId, undefined, 'Should not have a data set id when forceCreateDataSet is true')
@@ -291,40 +280,40 @@ describe('StorageService', () => {
it('should reuse existing data set with providerId when forceCreateDataSet is not set', async () => {
server.use(
- JSONRPC({
- ...presets.basic,
+ Mocks.JSONRPC({
+ ...Mocks.presets.basic,
warmStorageView: {
- ...presets.basic.warmStorageView,
+ ...Mocks.presets.basic.warmStorageView,
getAllDataSetMetadata() {
return [[], []]
},
},
- serviceRegistry: mockServiceProviderRegistry([PROVIDERS.provider1, PROVIDERS.provider2]),
+ serviceRegistry: Mocks.mockServiceProviderRegistry([Mocks.PROVIDERS.provider1, Mocks.PROVIDERS.provider2]),
}),
- PING({
- baseUrl: PROVIDERS.provider1.products[0].offering.serviceURL,
+ Mocks.PING({
+ baseUrl: Mocks.PROVIDERS.provider1.products[0].offering.serviceURL,
}),
- PING({
- baseUrl: PROVIDERS.provider2.products[0].offering.serviceURL,
+ Mocks.PING({
+ baseUrl: Mocks.PROVIDERS.provider2.products[0].offering.serviceURL,
})
)
const synapse = await Synapse.create({ signer })
- const warmStorageService = await WarmStorageService.create(provider, ADDRESSES.calibration.warmStorage)
+ const warmStorageService = await WarmStorageService.create(provider, Mocks.ADDRESSES.calibration.warmStorage)
const context = await StorageContext.create(synapse, warmStorageService, {
- providerId: Number(PROVIDERS.provider1.providerId),
+ providerId: Number(Mocks.PROVIDERS.provider1.providerId),
})
// Should have reused existing data set (not created new one)
- assert.equal(context.serviceProvider, PROVIDERS.provider1.providerInfo.serviceProvider)
+ assert.equal(context.serviceProvider, Mocks.PROVIDERS.provider1.providerInfo.serviceProvider)
assert.equal(context.dataSetId, 1, 'Should not have a data set id when forceCreateDataSet is true')
})
it('should throw when no approved providers available', async () => {
server.use(
- JSONRPC({
- ...presets.basic,
+ Mocks.JSONRPC({
+ ...Mocks.presets.basic,
warmStorageView: {
- ...presets.basic.warmStorageView,
+ ...Mocks.presets.basic.warmStorageView,
getApprovedProviders() {
return [[]]
},
@@ -332,7 +321,7 @@ describe('StorageService', () => {
})
)
const synapse = await Synapse.create({ signer })
- const warmStorageService = await WarmStorageService.create(provider, ADDRESSES.calibration.warmStorage)
+ const warmStorageService = await WarmStorageService.create(provider, Mocks.ADDRESSES.calibration.warmStorage)
try {
await StorageContext.create(synapse, warmStorageService)
@@ -344,25 +333,25 @@ describe('StorageService', () => {
it('should throw when specified provider not found', async () => {
server.use(
- JSONRPC({
- ...presets.basic,
+ Mocks.JSONRPC({
+ ...Mocks.presets.basic,
warmStorageView: {
- ...presets.basic.warmStorageView,
+ ...Mocks.presets.basic.warmStorageView,
getAllDataSetMetadata() {
return [[], []]
},
},
- serviceRegistry: mockServiceProviderRegistry([PROVIDERS.provider1, PROVIDERS.provider2]),
+ serviceRegistry: Mocks.mockServiceProviderRegistry([Mocks.PROVIDERS.provider1, Mocks.PROVIDERS.provider2]),
}),
- PING({
- baseUrl: PROVIDERS.provider1.products[0].offering.serviceURL,
+ Mocks.PING({
+ baseUrl: Mocks.PROVIDERS.provider1.products[0].offering.serviceURL,
}),
- PING({
- baseUrl: PROVIDERS.provider2.products[0].offering.serviceURL,
+ Mocks.PING({
+ baseUrl: Mocks.PROVIDERS.provider2.products[0].offering.serviceURL,
})
)
const synapse = await Synapse.create({ signer })
- const warmStorageService = await WarmStorageService.create(provider, ADDRESSES.calibration.warmStorage)
+ const warmStorageService = await WarmStorageService.create(provider, Mocks.ADDRESSES.calibration.warmStorage)
try {
await StorageContext.create(synapse, warmStorageService, {
providerId: 999,
@@ -375,28 +364,28 @@ describe('StorageService', () => {
it('should select existing data set when available', async () => {
server.use(
- JSONRPC({
- ...presets.basic,
+ Mocks.JSONRPC({
+ ...Mocks.presets.basic,
warmStorageView: {
- ...presets.basic.warmStorageView,
+ ...Mocks.presets.basic.warmStorageView,
getAllDataSetMetadata() {
return [[], []]
},
},
- serviceRegistry: mockServiceProviderRegistry([PROVIDERS.provider1, PROVIDERS.provider2]),
+ serviceRegistry: Mocks.mockServiceProviderRegistry([Mocks.PROVIDERS.provider1, Mocks.PROVIDERS.provider2]),
}),
- PING({
- baseUrl: PROVIDERS.provider1.products[0].offering.serviceURL,
+ Mocks.PING({
+ baseUrl: Mocks.PROVIDERS.provider1.products[0].offering.serviceURL,
}),
- PING({
- baseUrl: PROVIDERS.provider2.products[0].offering.serviceURL,
+ Mocks.PING({
+ baseUrl: Mocks.PROVIDERS.provider2.products[0].offering.serviceURL,
})
)
const synapse = await Synapse.create({ signer })
- const warmStorageService = await WarmStorageService.create(provider, ADDRESSES.calibration.warmStorage)
+ const warmStorageService = await WarmStorageService.create(provider, Mocks.ADDRESSES.calibration.warmStorage)
const service = await StorageContext.create(synapse, warmStorageService, {
- providerId: Number(PROVIDERS.provider1.providerId),
+ providerId: Number(Mocks.PROVIDERS.provider1.providerId),
})
// Should use existing data set
@@ -411,10 +400,10 @@ describe('StorageService', () => {
it('should prefer data sets with existing pieces', async () => {
server.use(
- JSONRPC({
- ...presets.basic,
+ Mocks.JSONRPC({
+ ...Mocks.presets.basic,
pdpVerifier: {
- ...presets.basic.pdpVerifier,
+ ...Mocks.presets.basic.pdpVerifier,
getNextPieceId: (args) => {
const [dataSetId] = args
if (dataSetId === 2n) {
@@ -425,7 +414,7 @@ describe('StorageService', () => {
},
},
warmStorageView: {
- ...presets.basic.warmStorageView,
+ ...Mocks.presets.basic.warmStorageView,
clientDataSets: () => [[1n, 2n]],
getAllDataSetMetadata: () => [[], []],
getDataSet: (args) => {
@@ -438,12 +427,12 @@ describe('StorageService', () => {
clientDataSetId: 0n,
commissionBps: 100n,
dataSetId: 1n,
- payee: ADDRESSES.serviceProvider1,
- payer: ADDRESSES.client1,
+ payee: Mocks.ADDRESSES.serviceProvider1,
+ payer: Mocks.ADDRESSES.client1,
pdpEndEpoch: 0n,
pdpRailId: 1n,
providerId: 1n,
- serviceProvider: ADDRESSES.serviceProvider1,
+ serviceProvider: Mocks.ADDRESSES.serviceProvider1,
},
]
} else {
@@ -454,24 +443,24 @@ describe('StorageService', () => {
clientDataSetId: 0n,
commissionBps: 100n,
dataSetId: 2n,
- payee: ADDRESSES.serviceProvider1,
- payer: ADDRESSES.client1,
+ payee: Mocks.ADDRESSES.serviceProvider1,
+ payer: Mocks.ADDRESSES.client1,
pdpEndEpoch: 0n,
pdpRailId: 2n,
providerId: 1n,
- serviceProvider: ADDRESSES.serviceProvider1,
+ serviceProvider: Mocks.ADDRESSES.serviceProvider1,
},
]
}
},
},
}),
- PING({
- baseUrl: PROVIDERS.provider1.products[0].offering.serviceURL,
+ Mocks.PING({
+ baseUrl: Mocks.PROVIDERS.provider1.products[0].offering.serviceURL,
})
)
const synapse = await Synapse.create({ signer })
- const warmStorageService = await WarmStorageService.create(provider, ADDRESSES.calibration.warmStorage)
+ const warmStorageService = await WarmStorageService.create(provider, Mocks.ADDRESSES.calibration.warmStorage)
const service = await StorageContext.create(synapse, warmStorageService, {
providerId: 1,
@@ -485,28 +474,28 @@ describe('StorageService', () => {
let providerCallbackFired = false
let dataSetCallbackFired = false
server.use(
- JSONRPC({
- ...presets.basic,
+ Mocks.JSONRPC({
+ ...Mocks.presets.basic,
warmStorageView: {
- ...presets.basic.warmStorageView,
+ ...Mocks.presets.basic.warmStorageView,
getAllDataSetMetadata() {
return [[], []]
},
},
- serviceRegistry: mockServiceProviderRegistry([PROVIDERS.provider1]),
+ serviceRegistry: Mocks.mockServiceProviderRegistry([Mocks.PROVIDERS.provider1]),
}),
- PING({
- baseUrl: PROVIDERS.provider1.products[0].offering.serviceURL,
+ Mocks.PING({
+ baseUrl: Mocks.PROVIDERS.provider1.products[0].offering.serviceURL,
})
)
const synapse = await Synapse.create({ signer })
- const warmStorageService = await WarmStorageService.create(provider, ADDRESSES.calibration.warmStorage)
+ const warmStorageService = await WarmStorageService.create(provider, Mocks.ADDRESSES.calibration.warmStorage)
await StorageContext.create(synapse, warmStorageService, {
- providerId: Number(PROVIDERS.provider1.providerId),
+ providerId: Number(Mocks.PROVIDERS.provider1.providerId),
callbacks: {
onProviderSelected: (provider) => {
- assert.equal(provider.serviceProvider, PROVIDERS.provider1.providerInfo.serviceProvider)
+ assert.equal(provider.serviceProvider, Mocks.PROVIDERS.provider1.providerInfo.serviceProvider)
providerCallbackFired = true
},
onDataSetResolved: (info) => {
@@ -523,10 +512,10 @@ describe('StorageService', () => {
it('should select by explicit dataSetId', async () => {
server.use(
- JSONRPC({
- ...presets.basic,
+ Mocks.JSONRPC({
+ ...Mocks.presets.basic,
warmStorageView: {
- ...presets.basic.warmStorageView,
+ ...Mocks.presets.basic.warmStorageView,
clientDataSets: () => [[1n, 2n]],
getAllDataSetMetadata: () => [[], []],
getDataSet: (args) => {
@@ -539,12 +528,12 @@ describe('StorageService', () => {
clientDataSetId: 0n,
commissionBps: 100n,
dataSetId: 1n,
- payee: ADDRESSES.serviceProvider1,
- payer: ADDRESSES.client1,
+ payee: Mocks.ADDRESSES.serviceProvider1,
+ payer: Mocks.ADDRESSES.client1,
pdpEndEpoch: 0n,
pdpRailId: 1n,
providerId: 1n,
- serviceProvider: ADDRESSES.serviceProvider1,
+ serviceProvider: Mocks.ADDRESSES.serviceProvider1,
},
]
} else {
@@ -555,74 +544,74 @@ describe('StorageService', () => {
clientDataSetId: 0n,
commissionBps: 100n,
dataSetId: 2n,
- payee: ADDRESSES.serviceProvider1,
- payer: ADDRESSES.client1,
+ payee: Mocks.ADDRESSES.serviceProvider1,
+ payer: Mocks.ADDRESSES.client1,
pdpEndEpoch: 0n,
pdpRailId: 2n,
providerId: 1n,
- serviceProvider: ADDRESSES.serviceProvider1,
+ serviceProvider: Mocks.ADDRESSES.serviceProvider1,
},
]
}
},
},
}),
- PING({
- baseUrl: PROVIDERS.provider1.products[0].offering.serviceURL,
+ Mocks.PING({
+ baseUrl: Mocks.PROVIDERS.provider1.products[0].offering.serviceURL,
})
)
const synapse = await Synapse.create({ signer })
- const warmStorageService = await WarmStorageService.create(provider, ADDRESSES.calibration.warmStorage)
+ const warmStorageService = await WarmStorageService.create(provider, Mocks.ADDRESSES.calibration.warmStorage)
const service = await StorageContext.create(synapse, warmStorageService, {
dataSetId: 2,
})
assert.equal(service.dataSetId, 2)
- assert.equal(service.serviceProvider, PROVIDERS.provider1.providerInfo.serviceProvider)
+ assert.equal(service.serviceProvider, Mocks.PROVIDERS.provider1.providerInfo.serviceProvider)
})
it('should select by providerAddress', async () => {
server.use(
- JSONRPC({
- ...presets.basic,
+ Mocks.JSONRPC({
+ ...Mocks.presets.basic,
warmStorageView: {
- ...presets.basic.warmStorageView,
+ ...Mocks.presets.basic.warmStorageView,
getAllDataSetMetadata() {
return [[], []]
},
},
- serviceRegistry: mockServiceProviderRegistry([PROVIDERS.provider1, PROVIDERS.provider2]),
+ serviceRegistry: Mocks.mockServiceProviderRegistry([Mocks.PROVIDERS.provider1, Mocks.PROVIDERS.provider2]),
}),
- PING({
- baseUrl: PROVIDERS.provider1.products[0].offering.serviceURL,
+ Mocks.PING({
+ baseUrl: Mocks.PROVIDERS.provider1.products[0].offering.serviceURL,
}),
- PING({
- baseUrl: PROVIDERS.provider2.products[0].offering.serviceURL,
+ Mocks.PING({
+ baseUrl: Mocks.PROVIDERS.provider2.products[0].offering.serviceURL,
})
)
const synapse = await Synapse.create({ signer })
- const warmStorageService = await WarmStorageService.create(provider, ADDRESSES.calibration.warmStorage)
+ const warmStorageService = await WarmStorageService.create(provider, Mocks.ADDRESSES.calibration.warmStorage)
const service = await StorageContext.create(synapse, warmStorageService, {
- providerAddress: PROVIDERS.provider2.providerInfo.serviceProvider,
+ providerAddress: Mocks.PROVIDERS.provider2.providerInfo.serviceProvider,
})
- assert.equal(service.serviceProvider, PROVIDERS.provider2.providerInfo.serviceProvider)
+ assert.equal(service.serviceProvider, Mocks.PROVIDERS.provider2.providerInfo.serviceProvider)
})
it('should throw when dataSetId not found', async () => {
server.use(
- JSONRPC({
- ...presets.basic,
+ Mocks.JSONRPC({
+ ...Mocks.presets.basic,
warmStorageView: {
- ...presets.basic.warmStorageView,
+ ...Mocks.presets.basic.warmStorageView,
},
}),
- PING({
- baseUrl: PROVIDERS.provider1.products[0].offering.serviceURL,
+ Mocks.PING({
+ baseUrl: Mocks.PROVIDERS.provider1.products[0].offering.serviceURL,
})
)
const synapse = await Synapse.create({ signer })
- const warmStorageService = await WarmStorageService.create(provider, ADDRESSES.calibration.warmStorage)
+ const warmStorageService = await WarmStorageService.create(provider, Mocks.ADDRESSES.calibration.warmStorage)
try {
await StorageContext.create(synapse, warmStorageService, {
@@ -630,31 +619,31 @@ describe('StorageService', () => {
})
assert.fail('Should have thrown error')
} catch (error: any) {
- assert.include(error.message, 'Data set 999 not found')
+ assert.include(error.message, 'Data set 999 does not exist')
}
})
it('should throw when dataSetId conflicts with providerId', async () => {
server.use(
- JSONRPC({
- ...presets.basic,
+ Mocks.JSONRPC({
+ ...Mocks.presets.basic,
warmStorageView: {
- ...presets.basic.warmStorageView,
+ ...Mocks.presets.basic.warmStorageView,
getAllDataSetMetadata() {
return [[], []]
},
},
- serviceRegistry: mockServiceProviderRegistry([PROVIDERS.provider1, PROVIDERS.provider2]),
+ serviceRegistry: Mocks.mockServiceProviderRegistry([Mocks.PROVIDERS.provider1, Mocks.PROVIDERS.provider2]),
}),
- PING({
- baseUrl: PROVIDERS.provider1.products[0].offering.serviceURL,
+ Mocks.PING({
+ baseUrl: Mocks.PROVIDERS.provider1.products[0].offering.serviceURL,
}),
- PING({
- baseUrl: PROVIDERS.provider2.products[0].offering.serviceURL,
+ Mocks.PING({
+ baseUrl: Mocks.PROVIDERS.provider2.products[0].offering.serviceURL,
})
)
const synapse = await Synapse.create({ signer })
- const warmStorageService = await WarmStorageService.create(provider, ADDRESSES.calibration.warmStorage)
+ const warmStorageService = await WarmStorageService.create(provider, Mocks.ADDRESSES.calibration.warmStorage)
try {
await StorageContext.create(synapse, warmStorageService, {
@@ -670,13 +659,13 @@ describe('StorageService', () => {
it('should throw when providerAddress not approved', async () => {
server.use(
- JSONRPC({
- ...presets.basic,
- serviceRegistry: mockServiceProviderRegistry([PROVIDERS.provider1]),
+ Mocks.JSONRPC({
+ ...Mocks.presets.basic,
+ serviceRegistry: Mocks.mockServiceProviderRegistry([Mocks.PROVIDERS.provider1]),
})
)
const synapse = await Synapse.create({ signer })
- const warmStorageService = await WarmStorageService.create(provider, ADDRESSES.calibration.warmStorage)
+ const warmStorageService = await WarmStorageService.create(provider, Mocks.ADDRESSES.calibration.warmStorage)
try {
await StorageContext.create(synapse, warmStorageService, {
providerAddress: '0x6666666666666666666666666666666666666666',
@@ -689,10 +678,10 @@ describe('StorageService', () => {
it('should filter by CDN setting in smart selection', async () => {
server.use(
- JSONRPC({
- ...presets.basic,
+ Mocks.JSONRPC({
+ ...Mocks.presets.basic,
warmStorageView: {
- ...presets.basic.warmStorageView,
+ ...Mocks.presets.basic.warmStorageView,
clientDataSets: () => [[1n, 2n]],
getAllDataSetMetadata: (args) => {
const [dataSetId] = args
@@ -714,12 +703,12 @@ describe('StorageService', () => {
clientDataSetId: 0n,
commissionBps: 100n,
dataSetId: 1n,
- payee: ADDRESSES.serviceProvider1,
- payer: ADDRESSES.client1,
+ payee: Mocks.ADDRESSES.serviceProvider1,
+ payer: Mocks.ADDRESSES.client1,
pdpEndEpoch: 0n,
pdpRailId: 1n,
providerId: 1n,
- serviceProvider: ADDRESSES.serviceProvider1,
+ serviceProvider: Mocks.ADDRESSES.serviceProvider1,
},
]
} else {
@@ -730,22 +719,22 @@ describe('StorageService', () => {
clientDataSetId: 0n,
commissionBps: 100n,
dataSetId: 2n,
- payee: ADDRESSES.serviceProvider1,
- payer: ADDRESSES.client1,
+ payee: Mocks.ADDRESSES.serviceProvider1,
+ payer: Mocks.ADDRESSES.client1,
pdpEndEpoch: 0n,
pdpRailId: 2n,
providerId: 1n,
- serviceProvider: ADDRESSES.serviceProvider1,
+ serviceProvider: Mocks.ADDRESSES.serviceProvider1,
},
]
}
},
},
}),
- PING()
+ Mocks.PING()
)
const synapse = await Synapse.create({ signer })
- const warmStorageService = await WarmStorageService.create(provider, ADDRESSES.calibration.warmStorage)
+ const warmStorageService = await WarmStorageService.create(provider, Mocks.ADDRESSES.calibration.warmStorage)
// Test with CDN = false
const serviceNoCDN = await StorageContext.create(synapse, warmStorageService, {
@@ -762,28 +751,28 @@ describe('StorageService', () => {
it.skip('should handle data sets not managed by current WarmStorage', async () => {
server.use(
- JSONRPC({
- ...presets.basic,
- serviceRegistry: mockServiceProviderRegistry([PROVIDERS.provider1]),
+ Mocks.JSONRPC({
+ ...Mocks.presets.basic,
+ serviceRegistry: Mocks.mockServiceProviderRegistry([Mocks.PROVIDERS.provider1]),
})
)
const synapse = await Synapse.create({ signer })
- const warmStorageService = await WarmStorageService.create(provider, ADDRESSES.calibration.warmStorage)
+ const warmStorageService = await WarmStorageService.create(provider, Mocks.ADDRESSES.calibration.warmStorage)
// Should create new data set since existing one is not managed
const service = await StorageContext.create(synapse, warmStorageService, {})
// Should have selected a provider but no existing data set
assert.exists(service.serviceProvider)
- assert.notEqual(service.serviceProvider, PROVIDERS.provider1.providerInfo.serviceProvider)
+ assert.notEqual(service.serviceProvider, Mocks.PROVIDERS.provider1.providerInfo.serviceProvider)
})
it('should throw when data set belongs to non-approved provider', async () => {
server.use(
- JSONRPC({
- ...presets.basic,
+ Mocks.JSONRPC({
+ ...Mocks.presets.basic,
warmStorageView: {
- ...presets.basic.warmStorageView,
+ ...Mocks.presets.basic.warmStorageView,
clientDataSets: () => [[1n]],
getAllDataSetMetadata: () => [[], []],
getDataSet: () => {
@@ -794,21 +783,21 @@ describe('StorageService', () => {
clientDataSetId: 0n,
commissionBps: 100n,
dataSetId: 1n,
- payee: ADDRESSES.serviceProvider1,
- payer: ADDRESSES.client1,
+ payee: Mocks.ADDRESSES.serviceProvider1,
+ payer: Mocks.ADDRESSES.client1,
pdpEndEpoch: 0n,
pdpRailId: 1n,
providerId: 3n,
- serviceProvider: ADDRESSES.serviceProvider1,
+ serviceProvider: Mocks.ADDRESSES.serviceProvider1,
},
]
},
},
}),
- PING()
+ Mocks.PING()
)
const synapse = await Synapse.create({ signer })
- const warmStorageService = await WarmStorageService.create(provider, ADDRESSES.calibration.warmStorage)
+ const warmStorageService = await WarmStorageService.create(provider, Mocks.ADDRESSES.calibration.warmStorage)
try {
await StorageContext.create(synapse, warmStorageService, {
@@ -823,13 +812,14 @@ describe('StorageService', () => {
it('should handle data set not live', async () => {
server.use(
- JSONRPC({
- ...presets.basic,
+ Mocks.JSONRPC({
+ ...Mocks.presets.basic,
pdpVerifier: {
dataSetLive: () => [false],
+ getDataSetListener: () => [Mocks.ADDRESSES.calibration.warmStorage],
},
warmStorageView: {
- ...presets.basic.warmStorageView,
+ ...Mocks.presets.basic.warmStorageView,
clientDataSets: () => [[1n]],
getAllDataSetMetadata: () => [[], []],
getDataSet: () => {
@@ -840,37 +830,37 @@ describe('StorageService', () => {
clientDataSetId: 0n,
commissionBps: 100n,
dataSetId: 1n,
- payee: ADDRESSES.serviceProvider1,
- payer: ADDRESSES.client1,
+ payee: Mocks.ADDRESSES.serviceProvider1,
+ payer: Mocks.ADDRESSES.client1,
pdpEndEpoch: 0n,
pdpRailId: 1n,
providerId: 1n,
- serviceProvider: ADDRESSES.serviceProvider1,
+ serviceProvider: Mocks.ADDRESSES.serviceProvider1,
},
]
},
},
}),
- PING()
+ Mocks.PING()
)
const synapse = await Synapse.create({ signer })
- const warmStorageService = await WarmStorageService.create(provider, ADDRESSES.calibration.warmStorage)
+ const warmStorageService = await WarmStorageService.create(provider, Mocks.ADDRESSES.calibration.warmStorage)
try {
await StorageContext.create(synapse, warmStorageService, {
dataSetId: 1,
})
assert.fail('Should have thrown error')
} catch (error: any) {
- assert.include(error.message, 'Data set 1 not found')
+ assert.include(error.message, 'Data set 1 does not exist or is not live')
}
})
it('should handle conflict between dataSetId and providerAddress', async () => {
server.use(
- JSONRPC({
- ...presets.basic,
+ Mocks.JSONRPC({
+ ...Mocks.presets.basic,
warmStorageView: {
- ...presets.basic.warmStorageView,
+ ...Mocks.presets.basic.warmStorageView,
clientDataSets: () => [[1n]],
getAllDataSetMetadata: () => [[], []],
getDataSet: () => {
@@ -881,21 +871,21 @@ describe('StorageService', () => {
clientDataSetId: 0n,
commissionBps: 100n,
dataSetId: 1n,
- payee: ADDRESSES.serviceProvider1,
- payer: ADDRESSES.client1,
+ payee: Mocks.ADDRESSES.serviceProvider1,
+ payer: Mocks.ADDRESSES.client1,
pdpEndEpoch: 0n,
pdpRailId: 1n,
providerId: 1n,
- serviceProvider: ADDRESSES.serviceProvider1,
+ serviceProvider: Mocks.ADDRESSES.serviceProvider1,
},
]
},
},
}),
- PING()
+ Mocks.PING()
)
const synapse = await Synapse.create({ signer })
- const warmStorageService = await WarmStorageService.create(provider, ADDRESSES.calibration.warmStorage)
+ const warmStorageService = await WarmStorageService.create(provider, Mocks.ADDRESSES.calibration.warmStorage)
try {
await StorageContext.create(synapse, warmStorageService, {
@@ -925,10 +915,10 @@ describe('StorageService', () => {
it('should match providers by ID even when payee differs from serviceProvider', async () => {
server.use(
- JSONRPC({
- ...presets.basic,
+ Mocks.JSONRPC({
+ ...Mocks.presets.basic,
warmStorageView: {
- ...presets.basic.warmStorageView,
+ ...Mocks.presets.basic.warmStorageView,
clientDataSets: () => [[1n]],
getAllDataSetMetadata: () => [[], []],
getDataSet: () => {
@@ -939,45 +929,45 @@ describe('StorageService', () => {
clientDataSetId: 0n,
commissionBps: 100n,
dataSetId: 1n,
- payee: ADDRESSES.serviceProvider2,
- payer: ADDRESSES.client1,
+ payee: Mocks.ADDRESSES.serviceProvider2,
+ payer: Mocks.ADDRESSES.client1,
pdpEndEpoch: 0n,
pdpRailId: 1n,
providerId: 1n,
- serviceProvider: ADDRESSES.serviceProvider1,
+ serviceProvider: Mocks.ADDRESSES.serviceProvider1,
},
]
},
},
}),
- PING()
+ Mocks.PING()
)
const synapse = await Synapse.create({ signer })
- const warmStorageService = await WarmStorageService.create(provider, ADDRESSES.calibration.warmStorage)
+ const warmStorageService = await WarmStorageService.create(provider, Mocks.ADDRESSES.calibration.warmStorage)
const service = await StorageContext.create(synapse, warmStorageService, {})
// Should successfully match by provider ID despite different payee
assert.equal(service.dataSetId, 1)
assert.equal(service.provider.id, 1)
- assert.equal(service.provider.serviceProvider, ADDRESSES.serviceProvider1)
+ assert.equal(service.provider.serviceProvider, Mocks.ADDRESSES.serviceProvider1)
})
})
describe('preflightUpload', () => {
it('should calculate costs without CDN', async () => {
server.use(
- JSONRPC({
- ...presets.basic,
+ Mocks.JSONRPC({
+ ...Mocks.presets.basic,
payments: {
- ...presets.basic.payments,
+ ...Mocks.presets.basic.payments,
operatorApprovals: () => [true, 2207579500n, 220757940000000n, 220757n, 220757n, 86400n],
},
}),
- PING()
+ Mocks.PING()
)
const synapse = await Synapse.create({ signer })
- const warmStorageService = await WarmStorageService.create(provider, ADDRESSES.calibration.warmStorage)
+ const warmStorageService = await WarmStorageService.create(provider, Mocks.ADDRESSES.calibration.warmStorage)
const service = await StorageContext.create(synapse, warmStorageService, {
withCDN: false,
})
@@ -992,17 +982,17 @@ describe('StorageService', () => {
it('should calculate costs with CDN', async () => {
server.use(
- JSONRPC({
- ...presets.basic,
+ Mocks.JSONRPC({
+ ...Mocks.presets.basic,
payments: {
- ...presets.basic.payments,
+ ...Mocks.presets.basic.payments,
operatorApprovals: () => [true, 2207579500n, 220757940000000n, 220757n, 220757n, 86400n],
},
}),
- PING()
+ Mocks.PING()
)
const synapse = await Synapse.create({ signer })
- const warmStorageService = await WarmStorageService.create(provider, ADDRESSES.calibration.warmStorage)
+ const warmStorageService = await WarmStorageService.create(provider, Mocks.ADDRESSES.calibration.warmStorage)
const service = await StorageContext.create(synapse, warmStorageService, {
withCDN: true,
})
@@ -1018,13 +1008,13 @@ describe('StorageService', () => {
it('should handle insufficient allowances', async () => {
server.use(
- JSONRPC({
- ...presets.basic,
+ Mocks.JSONRPC({
+ ...Mocks.presets.basic,
}),
- PING()
+ Mocks.PING()
)
const synapse = await Synapse.create({ signer })
- const warmStorageService = await WarmStorageService.create(provider, ADDRESSES.calibration.warmStorage)
+ const warmStorageService = await WarmStorageService.create(provider, Mocks.ADDRESSES.calibration.warmStorage)
const service = await StorageContext.create(synapse, warmStorageService, {
withCDN: true,
})
@@ -1037,13 +1027,13 @@ describe('StorageService', () => {
it('should enforce minimum size limit in preflightUpload', async () => {
server.use(
- JSONRPC({
- ...presets.basic,
+ Mocks.JSONRPC({
+ ...Mocks.presets.basic,
}),
- PING()
+ Mocks.PING()
)
const synapse = await Synapse.create({ signer })
- const warmStorageService = await WarmStorageService.create(provider, ADDRESSES.calibration.warmStorage)
+ const warmStorageService = await WarmStorageService.create(provider, Mocks.ADDRESSES.calibration.warmStorage)
const service = await StorageContext.create(synapse, warmStorageService, {
withCDN: true,
})
@@ -1060,13 +1050,13 @@ describe('StorageService', () => {
it('should enforce maximum size limit in preflightUpload', async () => {
server.use(
- JSONRPC({
- ...presets.basic,
+ Mocks.JSONRPC({
+ ...Mocks.presets.basic,
}),
- PING()
+ Mocks.PING()
)
const synapse = await Synapse.create({ signer })
- const warmStorageService = await WarmStorageService.create(provider, ADDRESSES.calibration.warmStorage)
+ const warmStorageService = await WarmStorageService.create(provider, Mocks.ADDRESSES.calibration.warmStorage)
const service = await StorageContext.create(synapse, warmStorageService, {
withCDN: true,
})
@@ -1088,22 +1078,22 @@ describe('StorageService', () => {
const testData = new Uint8Array(127).fill(42) // 127 bytes to meet minimum
const testPieceCID = calculate(testData).toString()
server.use(
- JSONRPC({
- ...presets.basic,
+ Mocks.JSONRPC({
+ ...Mocks.presets.basic,
}),
- PING(),
- http.get(`https://${ADDRESSES.client1}.calibration.filbeam.io/:cid`, async () => {
+ Mocks.PING(),
+ http.get(`https://${Mocks.ADDRESSES.client1}.calibration.filbeam.io/:cid`, async () => {
return HttpResponse.text('Not Found', {
status: 404,
})
}),
- findPieceHandler(testPieceCID, true, pdpOptions),
+ Mocks.pdp.findPieceHandler(testPieceCID, true, pdpOptions),
http.get('https://pdp.example.com/piece/:pieceCid', async () => {
return HttpResponse.arrayBuffer(testData.buffer)
})
)
const synapse = await Synapse.create({ signer })
- const warmStorageService = await WarmStorageService.create(provider, ADDRESSES.calibration.warmStorage)
+ const warmStorageService = await WarmStorageService.create(provider, Mocks.ADDRESSES.calibration.warmStorage)
const service = await StorageContext.create(synapse, warmStorageService, {
withCDN: true,
})
@@ -1117,17 +1107,17 @@ describe('StorageService', () => {
const testPieceCID = calculate(testData).toString()
server.use(
- JSONRPC({
- ...presets.basic,
+ Mocks.JSONRPC({
+ ...Mocks.presets.basic,
}),
- PING(),
- findPieceHandler(testPieceCID, true, pdpOptions),
+ Mocks.PING(),
+ Mocks.pdp.findPieceHandler(testPieceCID, true, pdpOptions),
http.get('https://pdp.example.com/piece/:pieceCid', async () => {
return HttpResponse.error()
})
)
const synapse = await Synapse.create({ signer })
- const warmStorageService = await WarmStorageService.create(provider, ADDRESSES.calibration.warmStorage)
+ const warmStorageService = await WarmStorageService.create(provider, Mocks.ADDRESSES.calibration.warmStorage)
const service = await StorageContext.create(synapse, warmStorageService)
try {
@@ -1143,17 +1133,17 @@ describe('StorageService', () => {
const testPieceCID = calculate(testData).toString()
server.use(
- JSONRPC({
- ...presets.basic,
+ Mocks.JSONRPC({
+ ...Mocks.presets.basic,
}),
- PING(),
- findPieceHandler(testPieceCID, true, pdpOptions),
+ Mocks.PING(),
+ Mocks.pdp.findPieceHandler(testPieceCID, true, pdpOptions),
http.get('https://pdp.example.com/piece/:pieceCid', async () => {
return HttpResponse.arrayBuffer(testData.buffer)
})
)
const synapse = await Synapse.create({ signer })
- const warmStorageService = await WarmStorageService.create(provider, ADDRESSES.calibration.warmStorage)
+ const warmStorageService = await WarmStorageService.create(provider, Mocks.ADDRESSES.calibration.warmStorage)
const service = await StorageContext.create(synapse, warmStorageService)
// Test with and without empty options object
@@ -1168,16 +1158,22 @@ describe('StorageService', () => {
describe('upload', () => {
it('should handle errors in batch processing gracefully', async () => {
server.use(
- JSONRPC({
- ...presets.basic,
+ Mocks.JSONRPC({
+ ...Mocks.presets.basic,
}),
- PING(),
+ Mocks.PING(),
http.post, { pieceCid: string }>('https://pdp.example.com/pdp/piece', async () => {
return HttpResponse.error()
- })
+ }),
+ http.post, { pieceCid: string }>(
+ 'https://pdp.example.com/pdp/piece/uploads',
+ async () => {
+ return HttpResponse.error()
+ }
+ )
)
const synapse = await Synapse.create({ signer })
- const warmStorageService = await WarmStorageService.create(provider, ADDRESSES.calibration.warmStorage)
+ const warmStorageService = await WarmStorageService.create(provider, Mocks.ADDRESSES.calibration.warmStorage)
const service = await StorageContext.create(synapse, warmStorageService)
// Create 3 uploads
@@ -1209,13 +1205,13 @@ describe('StorageService', () => {
it('should enforce 1 GiB size limit', async () => {
server.use(
- JSONRPC({
- ...presets.basic,
+ Mocks.JSONRPC({
+ ...Mocks.presets.basic,
}),
- PING()
+ Mocks.PING()
)
const synapse = await Synapse.create({ signer })
- const warmStorageService = await WarmStorageService.create(provider, ADDRESSES.calibration.warmStorage)
+ const warmStorageService = await WarmStorageService.create(provider, Mocks.ADDRESSES.calibration.warmStorage)
const service = await StorageContext.create(synapse, warmStorageService)
// Create minimal data but mock length to simulate oversized data
@@ -1237,16 +1233,16 @@ describe('StorageService', () => {
it.skip('should fail if new server verification fails', async () => {
const testData = new Uint8Array(127).fill(42) // 127 bytes to meet minimum
server.use(
- JSONRPC({
- ...presets.basic,
+ Mocks.JSONRPC({
+ ...Mocks.presets.basic,
}),
- PING(),
+ Mocks.PING(),
http.post, { pieceCid: string }>('https://pdp.example.com/pdp/piece', async () => {
return HttpResponse.error()
})
)
const synapse = await Synapse.create({ signer })
- const warmStorageService = await WarmStorageService.create(provider, ADDRESSES.calibration.warmStorage)
+ const warmStorageService = await WarmStorageService.create(provider, Mocks.ADDRESSES.calibration.warmStorage)
const service = await StorageContext.create(synapse, warmStorageService)
try {
@@ -1266,10 +1262,10 @@ describe('StorageService', () => {
const mockTxHash = '0x1234567890abcdef1234567890abcdef1234567890abcdef1234567890abcdef'
const mockUuid = '12345678-90ab-cdef-1234-567890abcdef'
server.use(
- JSONRPC({
- ...presets.basic,
+ Mocks.JSONRPC({
+ ...Mocks.presets.basic,
}),
- PING(),
+ Mocks.PING(),
http.post('https://pdp.example.com/pdp/piece', async () => {
return HttpResponse.text('Created', {
status: 201,
@@ -1278,11 +1274,11 @@ describe('StorageService', () => {
},
})
}),
- uploadPieceHandler(mockUuid, pdpOptions),
+ Mocks.pdp.uploadPieceHandler(mockUuid, pdpOptions),
http.get('https://pdp.example.com/pdp/piece', async () => {
return HttpResponse.json({ pieceCid: testPieceCID })
}),
- createAndAddPiecesHandler(mockTxHash, pdpOptions),
+ Mocks.pdp.createAndAddPiecesHandler(mockTxHash, pdpOptions),
http.get('https://pdp.example.com/pdp/data-sets/created/:tx', async () => {
return HttpResponse.json(
{
@@ -1315,7 +1311,7 @@ describe('StorageService', () => {
})
)
const synapse = await Synapse.create({ signer })
- const warmStorageService = await WarmStorageService.create(provider, ADDRESSES.calibration.warmStorage)
+ const warmStorageService = await WarmStorageService.create(provider, Mocks.ADDRESSES.calibration.warmStorage)
const service = await StorageContext.create(synapse, warmStorageService)
try {
@@ -1334,10 +1330,10 @@ describe('StorageService', () => {
const mockTxHash = '0x1234567890abcdef1234567890abcdef1234567890abcdef1234567890abcdef'
const mockUuid = '12345678-90ab-cdef-1234-567890abcdef'
server.use(
- JSONRPC({
- ...presets.basic,
+ Mocks.JSONRPC({
+ ...Mocks.presets.basic,
}),
- PING(),
+ Mocks.PING(),
http.post('https://pdp.example.com/pdp/piece', async () => {
return HttpResponse.text('Created', {
status: 201,
@@ -1346,11 +1342,11 @@ describe('StorageService', () => {
},
})
}),
- uploadPieceHandler(mockUuid, pdpOptions),
+ Mocks.pdp.uploadPieceHandler(mockUuid, pdpOptions),
http.get('https://pdp.example.com/pdp/piece', async () => {
return HttpResponse.json({ pieceCid: testPieceCID })
}),
- createAndAddPiecesHandler(mockTxHash, pdpOptions),
+ Mocks.pdp.createAndAddPiecesHandler(mockTxHash, pdpOptions),
http.get('https://pdp.example.com/pdp/data-sets/created/:tx', async () => {
return HttpResponse.json(
{
@@ -1383,7 +1379,7 @@ describe('StorageService', () => {
})
)
const synapse = await Synapse.create({ signer })
- const warmStorageService = await WarmStorageService.create(provider, ADDRESSES.calibration.warmStorage)
+ const warmStorageService = await WarmStorageService.create(provider, Mocks.ADDRESSES.calibration.warmStorage)
const service = await StorageContext.create(synapse, warmStorageService)
try {
@@ -1399,17 +1395,17 @@ describe('StorageService', () => {
const testPieceCID = Piece.calculate(testData).toString()
const mockUuid = '12345678-90ab-cdef-1234-567890abcdef'
server.use(
- JSONRPC({
- ...presets.basic,
+ Mocks.JSONRPC({
+ ...Mocks.presets.basic,
}),
- PING(),
- postPieceHandler(testPieceCID, mockUuid, pdpOptions),
+ Mocks.PING(),
+ Mocks.pdp.postPieceHandler(testPieceCID, mockUuid, pdpOptions),
http.put('https://pdp.example.com/pdp/piece/upload/:uuid', async () => {
return HttpResponse.error()
})
)
const synapse = await Synapse.create({ signer })
- const warmStorageService = await WarmStorageService.create(provider, ADDRESSES.calibration.warmStorage)
+ const warmStorageService = await WarmStorageService.create(provider, Mocks.ADDRESSES.calibration.warmStorage)
const service = await StorageContext.create(synapse, warmStorageService)
try {
@@ -1425,20 +1421,20 @@ describe('StorageService', () => {
const testPieceCID = Piece.calculate(testData).toString()
const mockUuid = '12345678-90ab-cdef-1234-567890abcdef'
server.use(
- JSONRPC({
- ...presets.basic,
- }),
- PING(),
- postPieceUploadsHandler(mockUuid, pdpOptions),
- uploadPieceStreamingHandler(mockUuid, pdpOptions),
- finalizePieceUploadHandler(mockUuid, undefined, pdpOptions),
- findPieceHandler(testPieceCID, true, pdpOptions),
+ Mocks.JSONRPC({
+ ...Mocks.presets.basic,
+ }),
+ Mocks.PING(),
+ Mocks.pdp.postPieceUploadsHandler(mockUuid, pdpOptions),
+ Mocks.pdp.uploadPieceStreamingHandler(mockUuid, pdpOptions),
+ Mocks.pdp.finalizePieceUploadHandler(mockUuid, undefined, pdpOptions),
+ Mocks.pdp.findPieceHandler(testPieceCID, true, pdpOptions),
http.post('https://pdp.example.com/pdp/data-sets/:id/pieces', () => {
return HttpResponse.error()
})
)
const synapse = await Synapse.create({ signer })
- const warmStorageService = await WarmStorageService.create(provider, ADDRESSES.calibration.warmStorage)
+ const warmStorageService = await WarmStorageService.create(provider, Mocks.ADDRESSES.calibration.warmStorage)
const service = await StorageContext.create(synapse, warmStorageService, {
dataSetId: 1,
})
@@ -1456,41 +1452,41 @@ describe('StorageService', () => {
describe('selectRandomProvider with ping validation', () => {
it('should select first provider that responds to ping', async () => {
server.use(
- JSONRPC({
- ...presets.basic,
- serviceRegistry: mockServiceProviderRegistry([PROVIDERS.provider1, PROVIDERS.provider2]),
+ Mocks.JSONRPC({
+ ...Mocks.presets.basic,
+ serviceRegistry: Mocks.mockServiceProviderRegistry([Mocks.PROVIDERS.provider1, Mocks.PROVIDERS.provider2]),
}),
- http.get(`${PROVIDERS.provider1.products[0].offering.serviceURL}/pdp/ping`, async () => {
+ http.get(`${Mocks.PROVIDERS.provider1.products[0].offering.serviceURL}/pdp/ping`, async () => {
return HttpResponse.error()
}),
- PING({
- baseUrl: PROVIDERS.provider2.products[0].offering.serviceURL,
+ Mocks.PING({
+ baseUrl: Mocks.PROVIDERS.provider2.products[0].offering.serviceURL,
})
)
const synapse = await Synapse.create({ signer })
- const warmStorageService = await WarmStorageService.create(provider, ADDRESSES.calibration.warmStorage)
+ const warmStorageService = await WarmStorageService.create(provider, Mocks.ADDRESSES.calibration.warmStorage)
const service = await StorageContext.create(synapse, warmStorageService)
// Should have selected the second provider (first one failed ping)
- assert.equal(service.serviceProvider, PROVIDERS.provider2.providerInfo.serviceProvider)
+ assert.equal(service.serviceProvider, Mocks.PROVIDERS.provider2.providerInfo.serviceProvider)
})
// Test removed: selectRandomProvider no longer supports exclusion functionality
it('should throw error when all providers fail ping', async () => {
server.use(
- JSONRPC({
- ...presets.basic,
- serviceRegistry: mockServiceProviderRegistry([PROVIDERS.provider1, PROVIDERS.provider2]),
+ Mocks.JSONRPC({
+ ...Mocks.presets.basic,
+ serviceRegistry: Mocks.mockServiceProviderRegistry([Mocks.PROVIDERS.provider1, Mocks.PROVIDERS.provider2]),
}),
- http.get(`${PROVIDERS.provider1.products[0].offering.serviceURL}/pdp/ping`, async () => {
+ http.get(`${Mocks.PROVIDERS.provider1.products[0].offering.serviceURL}/pdp/ping`, async () => {
return HttpResponse.error()
}),
- http.get(`${PROVIDERS.provider2.products[0].offering.serviceURL}/pdp/ping`, async () => {
+ http.get(`${Mocks.PROVIDERS.provider2.products[0].offering.serviceURL}/pdp/ping`, async () => {
return HttpResponse.error()
})
)
const synapse = await Synapse.create({ signer })
- const warmStorageService = await WarmStorageService.create(provider, ADDRESSES.calibration.warmStorage)
+ const warmStorageService = await WarmStorageService.create(provider, Mocks.ADDRESSES.calibration.warmStorage)
try {
await StorageContext.create(synapse, warmStorageService)
@@ -1506,16 +1502,16 @@ describe('StorageService', () => {
describe('getProviderInfo', () => {
it('should return provider info through WarmStorageService', async () => {
server.use(
- JSONRPC({
- ...presets.basic,
- serviceRegistry: mockServiceProviderRegistry([PROVIDERS.provider1]),
+ Mocks.JSONRPC({
+ ...Mocks.presets.basic,
+ serviceRegistry: Mocks.mockServiceProviderRegistry([Mocks.PROVIDERS.provider1]),
}),
- PING({
- baseUrl: PROVIDERS.provider1.products[0].offering.serviceURL,
+ Mocks.PING({
+ baseUrl: Mocks.PROVIDERS.provider1.products[0].offering.serviceURL,
})
)
const synapse = await Synapse.create({ signer })
- const warmStorageService = await WarmStorageService.create(provider, ADDRESSES.calibration.warmStorage)
+ const warmStorageService = await WarmStorageService.create(provider, Mocks.ADDRESSES.calibration.warmStorage)
const service = await StorageContext.create(synapse, warmStorageService)
const providerInfo = await service.getProviderInfo()
@@ -1539,6 +1535,8 @@ describe('StorageService', () => {
minProvingPeriodInEpochs: '0x1e',
location: '0x75732d65617374',
paymentTokenAddress: '0xb3042734b608a1b16e9e86b374a3f3e389b4cdf0',
+ endorsement0:
+ '0x00000000ffffffff00000000ffffffff1b1b1b1b1b1b1b1b1b1b1b1b1b1b1b1b1b1b1b1b1b1b1b1b1b1b1b1b1b1b1b1b1b1b1b1b1b1b1b1b1b1b1b1b1b1b1b1b1b1b1b1b1b1b1b1b1b1b1b1b1b1b1b1b1b',
},
data: {
serviceURL: 'https://provider1.example.com',
@@ -1550,6 +1548,14 @@ describe('StorageService', () => {
minProvingPeriodInEpochs: 30n,
location: 'us-east',
paymentTokenAddress: '0xb3042734b608a1b16e9e86b374a3f3e389b4cdf0',
+ endorsements: {
+ '0x50724807600e804Fe842439860D5b62baa26aFff': {
+ nonce: 4294967295n,
+ notAfter: 4294967295n,
+ signature:
+ '0x1b1b1b1b1b1b1b1b1b1b1b1b1b1b1b1b1b1b1b1b1b1b1b1b1b1b1b1b1b1b1b1b1b1b1b1b1b1b1b1b1b1b1b1b1b1b1b1b1b1b1b1b1b1b1b1b1b1b1b1b1b1b1b1b1b',
+ },
+ },
},
},
},
@@ -1583,20 +1589,20 @@ describe('StorageService', () => {
return { data: cidBytesToContractHex(cid.bytes) }
})
server.use(
- JSONRPC({
- ...presets.basic,
- serviceRegistry: mockServiceProviderRegistry([PROVIDERS.provider1]),
+ Mocks.JSONRPC({
+ ...Mocks.presets.basic,
+ serviceRegistry: Mocks.mockServiceProviderRegistry([Mocks.PROVIDERS.provider1]),
pdpVerifier: {
- ...presets.basic.pdpVerifier,
+ ...Mocks.presets.basic.pdpVerifier,
getActivePieces: () => [piecesData, [101n, 102n], false],
},
}),
- PING({
- baseUrl: PROVIDERS.provider1.products[0].offering.serviceURL,
+ Mocks.PING({
+ baseUrl: Mocks.PROVIDERS.provider1.products[0].offering.serviceURL,
})
)
const synapse = await Synapse.create({ signer })
- const warmStorageService = await WarmStorageService.create(provider, ADDRESSES.calibration.warmStorage)
+ const warmStorageService = await WarmStorageService.create(provider, Mocks.ADDRESSES.calibration.warmStorage)
const service = await StorageContext.create(synapse, warmStorageService, {
dataSetId: 1,
})
@@ -1611,20 +1617,20 @@ describe('StorageService', () => {
it('should handle empty data set pieces', async () => {
server.use(
- JSONRPC({
- ...presets.basic,
- serviceRegistry: mockServiceProviderRegistry([PROVIDERS.provider1]),
+ Mocks.JSONRPC({
+ ...Mocks.presets.basic,
+ serviceRegistry: Mocks.mockServiceProviderRegistry([Mocks.PROVIDERS.provider1]),
pdpVerifier: {
- ...presets.basic.pdpVerifier,
+ ...Mocks.presets.basic.pdpVerifier,
getActivePieces: () => [[], [], false],
},
}),
- PING({
- baseUrl: PROVIDERS.provider1.products[0].offering.serviceURL,
+ Mocks.PING({
+ baseUrl: Mocks.PROVIDERS.provider1.products[0].offering.serviceURL,
})
)
const synapse = await Synapse.create({ signer })
- const warmStorageService = await WarmStorageService.create(provider, ADDRESSES.calibration.warmStorage)
+ const warmStorageService = await WarmStorageService.create(provider, Mocks.ADDRESSES.calibration.warmStorage)
const service = await StorageContext.create(synapse, warmStorageService, {
dataSetId: 1,
})
@@ -1638,20 +1644,20 @@ describe('StorageService', () => {
it('should handle invalid CID in response', async () => {
const invalidCidBytes = cidBytesToContractHex(ethers.toUtf8Bytes('invalid-cid-format'))
server.use(
- JSONRPC({
- ...presets.basic,
- serviceRegistry: mockServiceProviderRegistry([PROVIDERS.provider1]),
+ Mocks.JSONRPC({
+ ...Mocks.presets.basic,
+ serviceRegistry: Mocks.mockServiceProviderRegistry([Mocks.PROVIDERS.provider1]),
pdpVerifier: {
- ...presets.basic.pdpVerifier,
+ ...Mocks.presets.basic.pdpVerifier,
getActivePieces: () => [[{ data: invalidCidBytes }], [101n], false],
},
}),
- PING({
- baseUrl: PROVIDERS.provider1.products[0].offering.serviceURL,
+ Mocks.PING({
+ baseUrl: Mocks.PROVIDERS.provider1.products[0].offering.serviceURL,
})
)
const synapse = await Synapse.create({ signer })
- const warmStorageService = await WarmStorageService.create(provider, ADDRESSES.calibration.warmStorage)
+ const warmStorageService = await WarmStorageService.create(provider, Mocks.ADDRESSES.calibration.warmStorage)
const service = await StorageContext.create(synapse, warmStorageService, {
dataSetId: 1,
})
@@ -1668,22 +1674,22 @@ describe('StorageService', () => {
it('should handle PDP server errors', async () => {
server.use(
- JSONRPC({
- ...presets.basic,
- serviceRegistry: mockServiceProviderRegistry([PROVIDERS.provider1]),
+ Mocks.JSONRPC({
+ ...Mocks.presets.basic,
+ serviceRegistry: Mocks.mockServiceProviderRegistry([Mocks.PROVIDERS.provider1]),
pdpVerifier: {
- ...presets.basic.pdpVerifier,
+ ...Mocks.presets.basic.pdpVerifier,
getActivePieces: () => {
throw new Error('Data set not found: 999')
},
},
}),
- PING({
- baseUrl: PROVIDERS.provider1.products[0].offering.serviceURL,
+ Mocks.PING({
+ baseUrl: Mocks.PROVIDERS.provider1.products[0].offering.serviceURL,
})
)
const synapse = await Synapse.create({ signer })
- const warmStorageService = await WarmStorageService.create(provider, ADDRESSES.calibration.warmStorage)
+ const warmStorageService = await WarmStorageService.create(provider, Mocks.ADDRESSES.calibration.warmStorage)
const service = await StorageContext.create(synapse, warmStorageService, {
dataSetId: 1,
})
@@ -1702,10 +1708,10 @@ describe('StorageService', () => {
const mockPieceCID = 'bafkzcibeqcad6efnpwn62p5vvs5x3nh3j7xkzfgb3xtitcdm2hulmty3xx4tl3wace'
it('should return exists=false when piece not found on provider', async () => {
server.use(
- JSONRPC({
- ...presets.basic,
+ Mocks.JSONRPC({
+ ...Mocks.presets.basic,
}),
- PING(),
+ Mocks.PING(),
http.get('https://pdp.example.com/pdp/data-sets/:id', async () => {
return HttpResponse.json({
id: 1,
@@ -1720,7 +1726,7 @@ describe('StorageService', () => {
})
)
const synapse = await Synapse.create({ signer })
- const warmStorageService = await WarmStorageService.create(provider, ADDRESSES.calibration.warmStorage)
+ const warmStorageService = await WarmStorageService.create(provider, Mocks.ADDRESSES.calibration.warmStorage)
const service = await StorageContext.create(synapse, warmStorageService, {
dataSetId: 1,
})
@@ -1735,11 +1741,11 @@ describe('StorageService', () => {
it('should return piece status with proof timing when piece exists', async () => {
server.use(
- JSONRPC({
- ...presets.basic,
+ Mocks.JSONRPC({
+ ...Mocks.presets.basic,
eth_blockNumber: numberToHex(4000n),
}),
- PING(),
+ Mocks.PING(),
http.get('https://pdp.example.com/pdp/data-sets/:id', async () => {
return HttpResponse.json({
id: 1,
@@ -1757,7 +1763,7 @@ describe('StorageService', () => {
})
)
const synapse = await Synapse.create({ signer })
- const warmStorageService = await WarmStorageService.create(provider, ADDRESSES.calibration.warmStorage)
+ const warmStorageService = await WarmStorageService.create(provider, Mocks.ADDRESSES.calibration.warmStorage)
const service = await StorageContext.create(synapse, warmStorageService, {
dataSetId: 1,
})
@@ -1774,11 +1780,11 @@ describe('StorageService', () => {
it('should detect when in challenge window', async () => {
server.use(
- JSONRPC({
- ...presets.basic,
+ Mocks.JSONRPC({
+ ...Mocks.presets.basic,
eth_blockNumber: numberToHex(5030n),
}),
- PING(),
+ Mocks.PING(),
http.get('https://pdp.example.com/pdp/data-sets/:id', async () => {
return HttpResponse.json({
id: 1,
@@ -1791,10 +1797,10 @@ describe('StorageService', () => {
nextChallengeEpoch: 5000,
})
}),
- findPieceHandler(mockPieceCID, true, pdpOptions)
+ Mocks.pdp.findPieceHandler(mockPieceCID, true, pdpOptions)
)
const synapse = await Synapse.create({ signer })
- const warmStorageService = await WarmStorageService.create(provider, ADDRESSES.calibration.warmStorage)
+ const warmStorageService = await WarmStorageService.create(provider, Mocks.ADDRESSES.calibration.warmStorage)
const service = await StorageContext.create(synapse, warmStorageService, {
dataSetId: 1,
})
@@ -1808,11 +1814,11 @@ describe('StorageService', () => {
it('should detect when proof is overdue', async () => {
server.use(
- JSONRPC({
- ...presets.basic,
+ Mocks.JSONRPC({
+ ...Mocks.presets.basic,
eth_blockNumber: numberToHex(5100n),
}),
- PING(),
+ Mocks.PING(),
http.get('https://pdp.example.com/pdp/data-sets/:id', async () => {
return HttpResponse.json({
id: 1,
@@ -1830,7 +1836,7 @@ describe('StorageService', () => {
})
)
const synapse = await Synapse.create({ signer })
- const warmStorageService = await WarmStorageService.create(provider, ADDRESSES.calibration.warmStorage)
+ const warmStorageService = await WarmStorageService.create(provider, Mocks.ADDRESSES.calibration.warmStorage)
const service = await StorageContext.create(synapse, warmStorageService, {
dataSetId: 1,
})
@@ -1843,11 +1849,11 @@ describe('StorageService', () => {
it('should handle data set with nextChallengeEpoch=0', async () => {
server.use(
- JSONRPC({
- ...presets.basic,
+ Mocks.JSONRPC({
+ ...Mocks.presets.basic,
eth_blockNumber: numberToHex(5100n),
}),
- PING(),
+ Mocks.PING(),
http.get('https://pdp.example.com/pdp/data-sets/:id', async () => {
return HttpResponse.json({
id: 1,
@@ -1865,7 +1871,7 @@ describe('StorageService', () => {
})
)
const synapse = await Synapse.create({ signer })
- const warmStorageService = await WarmStorageService.create(provider, ADDRESSES.calibration.warmStorage)
+ const warmStorageService = await WarmStorageService.create(provider, Mocks.ADDRESSES.calibration.warmStorage)
const service = await StorageContext.create(synapse, warmStorageService, {
dataSetId: 1,
})
@@ -1880,11 +1886,11 @@ describe('StorageService', () => {
it('should handle trailing slash in retrieval URL', async () => {
server.use(
- JSONRPC({
- ...presets.basic,
+ Mocks.JSONRPC({
+ ...Mocks.presets.basic,
eth_blockNumber: numberToHex(5100n),
}),
- PING(),
+ Mocks.PING(),
http.get('https://pdp.example.com/pdp/data-sets/:id', async () => {
return HttpResponse.json({
id: 1,
@@ -1902,7 +1908,7 @@ describe('StorageService', () => {
})
)
const synapse = await Synapse.create({ signer })
- const warmStorageService = await WarmStorageService.create(provider, ADDRESSES.calibration.warmStorage)
+ const warmStorageService = await WarmStorageService.create(provider, Mocks.ADDRESSES.calibration.warmStorage)
const service = await StorageContext.create(synapse, warmStorageService, {
dataSetId: 1,
})
@@ -1919,13 +1925,13 @@ describe('StorageService', () => {
it('should handle invalid PieceCID', async () => {
server.use(
- JSONRPC({
- ...presets.basic,
+ Mocks.JSONRPC({
+ ...Mocks.presets.basic,
}),
- PING()
+ Mocks.PING()
)
const synapse = await Synapse.create({ signer })
- const warmStorageService = await WarmStorageService.create(provider, ADDRESSES.calibration.warmStorage)
+ const warmStorageService = await WarmStorageService.create(provider, Mocks.ADDRESSES.calibration.warmStorage)
const service = await StorageContext.create(synapse, warmStorageService, {
dataSetId: 1,
})
@@ -1940,11 +1946,11 @@ describe('StorageService', () => {
it('should calculate hours until challenge window', async () => {
server.use(
- JSONRPC({
- ...presets.basic,
+ Mocks.JSONRPC({
+ ...Mocks.presets.basic,
eth_blockNumber: numberToHex(4880n),
}),
- PING(),
+ Mocks.PING(),
http.get('https://pdp.example.com/pdp/data-sets/:id', async () => {
return HttpResponse.json({
id: 1,
@@ -1962,7 +1968,7 @@ describe('StorageService', () => {
})
)
const synapse = await Synapse.create({ signer })
- const warmStorageService = await WarmStorageService.create(provider, ADDRESSES.calibration.warmStorage)
+ const warmStorageService = await WarmStorageService.create(provider, Mocks.ADDRESSES.calibration.warmStorage)
const service = await StorageContext.create(synapse, warmStorageService, {
dataSetId: 1,
})
@@ -1976,18 +1982,18 @@ describe('StorageService', () => {
it('should handle data set data fetch failure gracefully', async () => {
server.use(
- JSONRPC({
- ...presets.basic,
+ Mocks.JSONRPC({
+ ...Mocks.presets.basic,
eth_blockNumber: numberToHex(4880n),
}),
- PING(),
+ Mocks.PING(),
http.get('https://pdp.example.com/pdp/data-sets/:id', async () => {
return HttpResponse.error()
}),
- findPieceHandler(mockPieceCID, true, pdpOptions)
+ Mocks.pdp.findPieceHandler(mockPieceCID, true, pdpOptions)
)
const synapse = await Synapse.create({ signer })
- const warmStorageService = await WarmStorageService.create(provider, ADDRESSES.calibration.warmStorage)
+ const warmStorageService = await WarmStorageService.create(provider, Mocks.ADDRESSES.calibration.warmStorage)
const service = await StorageContext.create(synapse, warmStorageService, {
dataSetId: 1,
})
@@ -2003,6 +2009,46 @@ describe('StorageService', () => {
})
})
+ describe('getScheduledRemovals', () => {
+ it('should return scheduled removals for the data set', async () => {
+ server.use(
+ Mocks.JSONRPC({
+ ...Mocks.presets.basic,
+ pdpVerifier: {
+ ...Mocks.presets.basic.pdpVerifier,
+ getScheduledRemovals: () => [[1n, 2n, 5n]],
+ },
+ })
+ )
+
+ const synapse = await Synapse.create({ signer })
+ const warmStorageService = await WarmStorageService.create(provider, Mocks.ADDRESSES.calibration.warmStorage)
+ const context = await StorageContext.create(synapse, warmStorageService, {
+ dataSetId: 1,
+ })
+
+ const scheduledRemovals = await context.getScheduledRemovals()
+
+ assert.deepEqual(scheduledRemovals, [1, 2, 5])
+ })
+
+ it('should return an empty array when no data set is configured', async () => {
+ server.use(Mocks.JSONRPC({ ...Mocks.presets.basic }), Mocks.PING())
+
+ const synapse = await Synapse.create({ signer })
+ const warmStorageService = await WarmStorageService.create(provider, Mocks.ADDRESSES.calibration.warmStorage)
+ const context = await StorageContext.create(synapse, warmStorageService, {
+ dataSetId: 1,
+ })
+
+ ;(context as any)._dataSetId = undefined
+
+ const scheduledRemovals = await context.getScheduledRemovals()
+
+ assert.deepEqual(scheduledRemovals, [])
+ })
+ })
+
describe('getPieces', () => {
it('should get all active pieces with pagination', async () => {
// Use actual valid PieceCIDs from test data
@@ -2012,11 +2058,11 @@ describe('StorageService', () => {
// Mock getActivePieces to return paginated results
server.use(
- PING(),
- JSONRPC({
- ...presets.basic,
+ Mocks.PING(),
+ Mocks.JSONRPC({
+ ...Mocks.presets.basic,
pdpVerifier: {
- ...presets.basic.pdpVerifier,
+ ...Mocks.presets.basic.pdpVerifier,
getActivePieces: (args) => {
const offset = Number(args[1])
@@ -2039,7 +2085,7 @@ describe('StorageService', () => {
)
const synapse = await Synapse.create({ signer })
- const warmStorageService = await WarmStorageService.create(provider, ADDRESSES.calibration.warmStorage)
+ const warmStorageService = await WarmStorageService.create(provider, Mocks.ADDRESSES.calibration.warmStorage)
const context = await StorageContext.create(synapse, warmStorageService, {
dataSetId: 1,
})
@@ -2064,17 +2110,17 @@ describe('StorageService', () => {
it('should handle empty results', async () => {
// Mock getActivePieces to return no pieces
server.use(
- JSONRPC({
- ...presets.basic,
+ Mocks.JSONRPC({
+ ...Mocks.presets.basic,
pdpVerifier: {
- ...presets.basic.pdpVerifier,
+ ...Mocks.presets.basic.pdpVerifier,
getActivePieces: () => [[], [], false],
},
})
)
const synapse = await Synapse.create({ signer })
- const warmStorageService = await WarmStorageService.create(provider, ADDRESSES.calibration.warmStorage)
+ const warmStorageService = await WarmStorageService.create(provider, Mocks.ADDRESSES.calibration.warmStorage)
const context = await StorageContext.create(synapse, warmStorageService, {
dataSetId: 1,
})
@@ -2089,10 +2135,10 @@ describe('StorageService', () => {
it('should handle AbortSignal in getPieces', async () => {
const controller = new AbortController()
- server.use(JSONRPC(presets.basic))
+ server.use(Mocks.JSONRPC(Mocks.presets.basic))
const synapse = await Synapse.create({ signer })
- const warmStorageService = await WarmStorageService.create(provider, ADDRESSES.calibration.warmStorage)
+ const warmStorageService = await WarmStorageService.create(provider, Mocks.ADDRESSES.calibration.warmStorage)
const context = await StorageContext.create(synapse, warmStorageService, {
dataSetId: 1,
})
@@ -2117,10 +2163,10 @@ describe('StorageService', () => {
// Mock getActivePieces to return paginated results
server.use(
- JSONRPC({
- ...presets.basic,
+ Mocks.JSONRPC({
+ ...Mocks.presets.basic,
pdpVerifier: {
- ...presets.basic.pdpVerifier,
+ ...Mocks.presets.basic.pdpVerifier,
getActivePieces: (args) => {
const offset = Number(args[1])
@@ -2139,7 +2185,7 @@ describe('StorageService', () => {
)
const synapse = await Synapse.create({ signer })
- const warmStorageService = await WarmStorageService.create(provider, ADDRESSES.calibration.warmStorage)
+ const warmStorageService = await WarmStorageService.create(provider, Mocks.ADDRESSES.calibration.warmStorage)
const context = await StorageContext.create(synapse, warmStorageService, {
dataSetId: 1,
})
@@ -2165,10 +2211,10 @@ describe('StorageService', () => {
// Mock getActivePieces to return a result that triggers pagination
let callCount = 0
server.use(
- JSONRPC({
- ...presets.basic,
+ Mocks.JSONRPC({
+ ...Mocks.presets.basic,
pdpVerifier: {
- ...presets.basic.pdpVerifier,
+ ...Mocks.presets.basic.pdpVerifier,
getActivePieces: () => {
callCount++
// Only return data on first call, then abort
@@ -2183,7 +2229,7 @@ describe('StorageService', () => {
)
const synapse = await Synapse.create({ signer })
- const warmStorageService = await WarmStorageService.create(provider, ADDRESSES.calibration.warmStorage)
+ const warmStorageService = await WarmStorageService.create(provider, Mocks.ADDRESSES.calibration.warmStorage)
const context = await StorageContext.create(synapse, warmStorageService, {
dataSetId: 1,
})
diff --git a/packages/synapse-sdk/src/test/synapse.test.ts b/packages/synapse-sdk/src/test/synapse.test.ts
index 4e801125c..fdf5f8f33 100644
--- a/packages/synapse-sdk/src/test/synapse.test.ts
+++ b/packages/synapse-sdk/src/test/synapse.test.ts
@@ -4,6 +4,7 @@
* Basic tests for Synapse class
*/
+import * as Mocks from '@filoz/synapse-core/mocks'
import * as Piece from '@filoz/synapse-core/piece'
import { assert } from 'chai'
import { ethers } from 'ethers'
@@ -13,32 +14,22 @@ import pDefer from 'p-defer'
import { type Address, bytesToHex, type Hex, isAddressEqual, numberToBytes, parseUnits, stringToHex } from 'viem'
import { PaymentsService } from '../payments/index.ts'
import { PDP_PERMISSIONS } from '../session/key.ts'
+import { SPRegistryService } from '../sp-registry/service.ts'
import type { StorageContext } from '../storage/context.ts'
import { Synapse } from '../synapse.ts'
import { SIZE_CONSTANTS } from '../utils/constants.ts'
import { makeDataSetCreatedLog } from './mocks/events.ts'
-import { ADDRESSES, JSONRPC, PRIVATE_KEYS, PROVIDERS, presets } from './mocks/jsonrpc/index.ts'
-import { mockServiceProviderRegistry } from './mocks/jsonrpc/service-registry.ts'
-import {
- createAndAddPiecesHandler,
- dataSetCreationStatusHandler,
- finalizePieceUploadHandler,
- findPieceHandler,
- type PDPMockOptions,
- pieceAdditionStatusHandler,
- postPieceUploadsHandler,
- uploadPieceStreamingHandler,
-} from './mocks/pdp/handlers.ts'
-import { PING } from './mocks/ping.ts'
// mock server for testing
-const server = setup([])
+const server = setup()
+
+const providerIds = [Number(PROVIDERS.provider1.providerId), Number(PROVIDERS.provider2.providerId)]
describe('Synapse', () => {
let signer: ethers.Signer
let provider: ethers.Provider
before(async () => {
- await server.start({ quiet: true })
+ await server.start()
})
after(() => {
@@ -47,12 +38,12 @@ describe('Synapse', () => {
beforeEach(() => {
server.resetHandlers()
provider = new ethers.JsonRpcProvider('https://api.calibration.node.glif.io/rpc/v1')
- signer = new ethers.Wallet(PRIVATE_KEYS.key1, provider)
+ signer = new ethers.Wallet(Mocks.PRIVATE_KEYS.key1, provider)
})
describe('Instantiation', () => {
it('should create instance with signer', async () => {
- server.use(JSONRPC(presets.basic))
+ server.use(Mocks.JSONRPC(Mocks.presets.basic))
const synapse = await Synapse.create({ signer })
assert.exists(synapse)
assert.exists(synapse.payments)
@@ -60,7 +51,7 @@ describe('Synapse', () => {
})
it('should create instance with provider', async () => {
- server.use(JSONRPC(presets.basic))
+ server.use(Mocks.JSONRPC(Mocks.presets.basic))
const synapse = await Synapse.create({ provider })
assert.exists(synapse)
assert.exists(synapse.payments)
@@ -68,7 +59,7 @@ describe('Synapse', () => {
})
it('should create instance with private key', async () => {
- server.use(JSONRPC(presets.basic))
+ server.use(Mocks.JSONRPC(Mocks.presets.basic))
const privateKey = '0x0123456789abcdef0123456789abcdef0123456789abcdef0123456789abcdef'
const rpcURL = 'https://api.calibration.node.glif.io/rpc/v1'
const synapse = await Synapse.create({ privateKey, rpcURL })
@@ -78,14 +69,14 @@ describe('Synapse', () => {
})
it('should apply NonceManager by default', async () => {
- server.use(JSONRPC(presets.basic))
+ server.use(Mocks.JSONRPC(Mocks.presets.basic))
const synapse = await Synapse.create({ signer })
assert.exists(synapse)
// We can't directly check if NonceManager is applied, but we can verify the instance is created
})
it('should allow disabling NonceManager', async () => {
- server.use(JSONRPC(presets.basic))
+ server.use(Mocks.JSONRPC(Mocks.presets.basic))
const synapse = await Synapse.create({
signer,
disableNonceManager: true,
@@ -144,8 +135,8 @@ describe('Synapse', () => {
// Create mock provider with unsupported chain ID
// const unsupportedProvider = createMockProvider(999999)
server.use(
- JSONRPC({
- ...presets.basic,
+ Mocks.JSONRPC({
+ ...Mocks.presets.basic,
eth_chainId: '999999',
})
)
@@ -160,8 +151,8 @@ describe('Synapse', () => {
it('should accept calibration network', async () => {
server.use(
- JSONRPC({
- ...presets.basic,
+ Mocks.JSONRPC({
+ ...Mocks.presets.basic,
eth_chainId: '314159',
})
)
@@ -172,7 +163,7 @@ describe('Synapse', () => {
describe('StorageManager access', () => {
it('should provide access to StorageManager via synapse.storage', async () => {
- server.use(JSONRPC(presets.basic))
+ server.use(Mocks.JSONRPC(Mocks.presets.basic))
const synapse = await Synapse.create({ signer })
// Should be able to access storage manager
@@ -188,7 +179,7 @@ describe('Synapse', () => {
})
it('should create storage manager with CDN settings', async () => {
- server.use(JSONRPC(presets.basic))
+ server.use(Mocks.JSONRPC(Mocks.presets.basic))
const synapse = await Synapse.create({
signer,
withCDN: true,
@@ -201,7 +192,7 @@ describe('Synapse', () => {
})
it('should return same storage manager instance', async () => {
- server.use(JSONRPC(presets.basic))
+ server.use(Mocks.JSONRPC(Mocks.presets.basic))
const synapse = await Synapse.create({ signer })
const storage1 = synapse.storage
@@ -217,7 +208,7 @@ describe('Synapse', () => {
const FAKE_TX_HASH = '0x3816d82cb7a6f5cde23f4d63c0763050d13c6b6dc659d0a7e6eba80b0ec76a18'
const FAKE_TX = {
hash: FAKE_TX_HASH,
- from: ADDRESSES.serviceProvider1,
+ from: Mocks.ADDRESSES.serviceProvider1,
gas: '0x5208',
value: '0x0',
nonce: '0x444',
@@ -238,20 +229,20 @@ describe('Synapse', () => {
logs: [makeDataSetCreatedLog(DATA_SET_ID, 1)],
}
beforeEach(() => {
- const pdpOptions: PDPMockOptions = {
+ const pdpOptions: Mocks.PingMockOptions = {
baseUrl: 'https://pdp.example.com',
}
- server.use(PING(pdpOptions))
+ server.use(Mocks.PING(pdpOptions))
})
it('should storage.createContext with session key', async () => {
const signerAddress = await signer.getAddress()
- const sessionKeySigner = new ethers.Wallet(PRIVATE_KEYS.key2)
+ const sessionKeySigner = new ethers.Wallet(Mocks.PRIVATE_KEYS.key2)
const sessionKeyAddress = await sessionKeySigner.getAddress()
const EXPIRY = BigInt(1757618883)
server.use(
- JSONRPC({
- ...presets.basic,
+ Mocks.JSONRPC({
+ ...Mocks.presets.basic,
sessionKeyRegistry: {
authorizationExpiry: (args) => {
const client = args[0]
@@ -264,11 +255,11 @@ describe('Synapse', () => {
},
},
payments: {
- ...presets.basic.payments,
+ ...Mocks.presets.basic.payments,
operatorApprovals: ([token, client, operator]) => {
- assert.equal(token, ADDRESSES.calibration.usdfcToken)
+ assert.equal(token, Mocks.ADDRESSES.calibration.usdfcToken)
assert.equal(client, signerAddress)
- assert.equal(operator, ADDRESSES.calibration.warmStorage)
+ assert.equal(operator, Mocks.ADDRESSES.calibration.warmStorage)
return [
true, // isApproved
BigInt(127001 * 635000000), // rateAllowance
@@ -280,7 +271,7 @@ describe('Synapse', () => {
},
accounts: ([token, user]) => {
assert.equal(user, signerAddress)
- assert.equal(token, ADDRESSES.calibration.usdfcToken)
+ assert.equal(token, Mocks.ADDRESSES.calibration.usdfcToken)
return [BigInt(127001 * 635000000), BigInt(0), BigInt(0), BigInt(0)]
},
},
@@ -319,7 +310,7 @@ describe('Synapse', () => {
describe('Payment access', () => {
it('should provide read-only access to payments', async () => {
- server.use(JSONRPC(presets.basic))
+ server.use(Mocks.JSONRPC(Mocks.presets.basic))
const synapse = await Synapse.create({ signer })
// Should be able to access payments
@@ -342,17 +333,17 @@ describe('Synapse', () => {
describe('getProviderInfo', () => {
it('should get provider info for valid approved provider', async () => {
- server.use(JSONRPC(presets.basic))
+ server.use(Mocks.JSONRPC(Mocks.presets.basic))
const synapse = await Synapse.create({ provider })
- const providerInfo = await synapse.getProviderInfo(ADDRESSES.serviceProvider1)
+ const providerInfo = await synapse.getProviderInfo(Mocks.ADDRESSES.serviceProvider1)
- assert.ok(isAddressEqual(providerInfo.serviceProvider as Address, ADDRESSES.serviceProvider1))
+ assert.ok(isAddressEqual(providerInfo.serviceProvider as Address, Mocks.ADDRESSES.serviceProvider1))
assert.equal(providerInfo.products.PDP?.data.serviceURL, 'https://pdp.example.com')
})
it('should throw for invalid provider address', async () => {
- server.use(JSONRPC(presets.basic))
+ server.use(Mocks.JSONRPC(Mocks.presets.basic))
const synapse = await Synapse.create({ signer })
try {
@@ -365,10 +356,10 @@ describe('Synapse', () => {
it('should throw for non-found provider', async () => {
server.use(
- JSONRPC({
- ...presets.basic,
+ Mocks.JSONRPC({
+ ...Mocks.presets.basic,
serviceRegistry: {
- ...presets.basic.serviceRegistry,
+ ...Mocks.presets.basic.serviceRegistry,
getProviderByAddress: () => [
{
providerId: 0n,
@@ -387,7 +378,7 @@ describe('Synapse', () => {
try {
const synapse = await Synapse.create({ signer })
- await synapse.getProviderInfo(ADDRESSES.serviceProvider1)
+ await synapse.getProviderInfo(Mocks.ADDRESSES.serviceProvider1)
assert.fail('Should have thrown')
} catch (error: any) {
assert.include(error.message, 'not found in registry')
@@ -396,16 +387,16 @@ describe('Synapse', () => {
it('should throw when provider not found', async () => {
server.use(
- JSONRPC({
- ...presets.basic,
+ Mocks.JSONRPC({
+ ...Mocks.presets.basic,
serviceRegistry: {
- ...presets.basic.serviceRegistry,
+ ...Mocks.presets.basic.serviceRegistry,
getProviderByAddress: () => [
{
providerId: 0n,
info: {
- serviceProvider: ADDRESSES.zero,
- payee: ADDRESSES.zero,
+ serviceProvider: Mocks.ADDRESSES.zero,
+ payee: Mocks.ADDRESSES.zero,
name: '',
description: '',
isActive: false,
@@ -418,7 +409,7 @@ describe('Synapse', () => {
try {
const synapse = await Synapse.create({ signer })
- await synapse.getProviderInfo(ADDRESSES.serviceProvider1)
+ await synapse.getProviderInfo(Mocks.ADDRESSES.serviceProvider1)
assert.fail('Should have thrown')
} catch (error: any) {
assert.include(error.message, 'not found')
@@ -428,7 +419,7 @@ describe('Synapse', () => {
describe('download', () => {
it('should validate PieceCID input', async () => {
- server.use(JSONRPC(presets.basic))
+ server.use(Mocks.JSONRPC(Mocks.presets.basic))
const synapse = await Synapse.create({ signer })
try {
@@ -444,7 +435,7 @@ describe('Synapse', () => {
// Create test data that matches the expected PieceCID
const testData = new TextEncoder().encode('test data')
server.use(
- JSONRPC(presets.basic),
+ Mocks.JSONRPC(Mocks.presets.basic),
http.get('https://pdp.example.com/pdp/piece', async ({ request }) => {
const url = new URL(request.url)
const pieceCid = url.searchParams.get('pieceCid')
@@ -475,7 +466,7 @@ describe('Synapse', () => {
const deferred = pDefer<{ cid: string; wallet: string }>()
const testData = new TextEncoder().encode('test data')
server.use(
- JSONRPC({ ...presets.basic }),
+ Mocks.JSONRPC({ ...Mocks.presets.basic }),
http.get<{ cid: string; wallet: string }>(`https://:wallet.calibration.filbeam.io/:cid`, async ({ params }) => {
deferred.resolve(params)
return HttpResponse.arrayBuffer(testData.buffer)
@@ -506,7 +497,7 @@ describe('Synapse', () => {
const { cid, wallet } = result
assert.equal(cid, testPieceCid)
- assert.ok(isAddressEqual(wallet as Address, ADDRESSES.client1))
+ assert.ok(isAddressEqual(wallet as Address, Mocks.ADDRESSES.client1))
// Test without explicit withCDN (should use instance default)
const data = await synapse.download(testPieceCid)
@@ -519,13 +510,13 @@ describe('Synapse', () => {
const testData = new TextEncoder().encode('test data')
server.use(
- JSONRPC({
- ...presets.basic,
+ Mocks.JSONRPC({
+ ...Mocks.presets.basic,
serviceRegistry: {
- ...presets.basic.serviceRegistry,
+ ...Mocks.presets.basic.serviceRegistry,
getProviderByAddress: (data) => {
providerAddressReceived = data[0]
- return presets.basic.serviceRegistry.getProviderByAddress(data)
+ return Mocks.presets.basic.serviceRegistry.getProviderByAddress(data)
},
},
}),
@@ -554,7 +545,7 @@ describe('Synapse', () => {
it('should handle download errors', async () => {
server.use(
- JSONRPC(presets.basic),
+ Mocks.JSONRPC(Mocks.presets.basic),
http.get('https://pdp.example.com/pdp/piece', async () => {
return HttpResponse.error()
})
@@ -580,7 +571,7 @@ describe('Synapse', () => {
describe('getStorageInfo', () => {
it('should return comprehensive storage information', async () => {
- server.use(JSONRPC({ ...presets.basic }))
+ server.use(Mocks.JSONRPC({ ...Mocks.presets.basic }))
const synapse = await Synapse.create({ signer })
const storageInfo = await synapse.getStorageInfo()
@@ -596,8 +587,8 @@ describe('Synapse', () => {
// Check providers
assert.equal(storageInfo.providers.length, 2)
- assert.equal(storageInfo.providers[0].serviceProvider, ADDRESSES.serviceProvider1)
- assert.equal(storageInfo.providers[1].serviceProvider, ADDRESSES.serviceProvider2)
+ assert.equal(storageInfo.providers[0].serviceProvider, Mocks.ADDRESSES.serviceProvider1)
+ assert.equal(storageInfo.providers[1].serviceProvider, Mocks.ADDRESSES.serviceProvider2)
// Check service parameters
assert.equal(storageInfo.serviceParameters.network, 'calibration')
@@ -610,15 +601,15 @@ describe('Synapse', () => {
// Check allowances (including operator approval flag)
assert.exists(storageInfo.allowances)
assert.equal(storageInfo.allowances?.isApproved, true)
- assert.equal(storageInfo.allowances?.service, ADDRESSES.calibration.warmStorage)
+ assert.equal(storageInfo.allowances?.service, Mocks.ADDRESSES.calibration.warmStorage)
assert.equal(storageInfo.allowances?.rateAllowance, 1000000n)
assert.equal(storageInfo.allowances?.lockupAllowance, 10000000n)
})
it('should handle missing allowances gracefully', async () => {
server.use(
- JSONRPC({
- ...presets.basic,
+ Mocks.JSONRPC({
+ ...Mocks.presets.basic,
payments: {
operatorApprovals: () => [false, 0n, 0n, 0n, 0n, 0n],
},
@@ -634,7 +625,7 @@ describe('Synapse', () => {
assert.exists(storageInfo.serviceParameters)
assert.deepEqual(storageInfo.allowances, {
isApproved: false,
- service: ADDRESSES.calibration.warmStorage,
+ service: Mocks.ADDRESSES.calibration.warmStorage,
rateAllowance: 0n,
lockupAllowance: 0n,
rateUsed: 0n,
@@ -644,10 +635,10 @@ describe('Synapse', () => {
it('should filter out zero address providers', async () => {
server.use(
- JSONRPC({
- ...presets.basic,
+ Mocks.JSONRPC({
+ ...Mocks.presets.basic,
serviceRegistry: {
- ...presets.basic.serviceRegistry,
+ ...Mocks.presets.basic.serviceRegistry,
getProviderWithProduct: (data) => {
const [providerId] = data
if (providerId === 1n) {
@@ -655,8 +646,8 @@ describe('Synapse', () => {
{
providerId,
providerInfo: {
- serviceProvider: ADDRESSES.serviceProvider1,
- payee: ADDRESSES.payee1,
+ serviceProvider: Mocks.ADDRESSES.serviceProvider1,
+ payee: Mocks.ADDRESSES.payee1,
isActive: true,
name: 'Test Provider',
description: 'Test Provider',
@@ -681,7 +672,7 @@ describe('Synapse', () => {
bytesToHex(numberToBytes(1000000n)),
bytesToHex(numberToBytes(2880n)),
stringToHex('US'),
- ADDRESSES.calibration.usdfcToken,
+ Mocks.ADDRESSES.calibration.usdfcToken,
],
},
]
@@ -690,8 +681,8 @@ describe('Synapse', () => {
{
providerId: 0n,
providerInfo: {
- serviceProvider: ADDRESSES.zero,
- payee: ADDRESSES.zero,
+ serviceProvider: Mocks.ADDRESSES.zero,
+ payee: Mocks.ADDRESSES.zero,
isActive: false,
name: '',
description: '',
@@ -715,15 +706,15 @@ describe('Synapse', () => {
// Should filter out zero address provider
assert.equal(storageInfo.providers.length, 1)
- assert.equal(storageInfo.providers[0].serviceProvider, ADDRESSES.serviceProvider1)
+ assert.equal(storageInfo.providers[0].serviceProvider, Mocks.ADDRESSES.serviceProvider1)
})
it('should handle contract call failures', async () => {
server.use(
- JSONRPC({
- ...presets.basic,
+ Mocks.JSONRPC({
+ ...Mocks.presets.basic,
warmStorage: {
- ...presets.basic.warmStorage,
+ ...Mocks.presets.basic.warmStorage,
getServicePrice: () => {
throw new Error('RPC error')
},
@@ -746,15 +737,15 @@ describe('Synapse', () => {
beforeEach(async () => {
server.use(
- JSONRPC({
- ...presets.basic,
- serviceRegistry: mockServiceProviderRegistry([PROVIDERS.provider1, PROVIDERS.provider2]),
+ Mocks.JSONRPC({
+ ...Mocks.presets.basic,
+ serviceRegistry: Mocks.mockServiceProviderRegistry([Mocks.PROVIDERS.provider1, Mocks.PROVIDERS.provider2]),
})
)
synapse = await Synapse.create({ signer })
- for (const { products } of [PROVIDERS.provider1, PROVIDERS.provider2]) {
+ for (const { products } of [Mocks.PROVIDERS.provider1, Mocks.PROVIDERS.provider2]) {
server.use(
- PING({
+ Mocks.PING({
baseUrl: products[0].offering.serviceURL,
})
)
@@ -763,11 +754,11 @@ describe('Synapse', () => {
it('selects specified providerIds', async () => {
const contexts = await synapse.storage.createContexts({
- providerIds: [PROVIDERS.provider1.providerId, PROVIDERS.provider2.providerId].map(Number),
+ providerIds: [Mocks.PROVIDERS.provider1.providerId, Mocks.PROVIDERS.provider2.providerId].map(Number),
})
assert.equal(contexts.length, 2)
- assert.equal(BigInt(contexts[0].provider.id), PROVIDERS.provider1.providerId)
- assert.equal(BigInt(contexts[1].provider.id), PROVIDERS.provider2.providerId)
+ assert.equal(BigInt(contexts[0].provider.id), Mocks.PROVIDERS.provider1.providerId)
+ assert.equal(BigInt(contexts[1].provider.id), Mocks.PROVIDERS.provider2.providerId)
// should create new data sets
assert.equal((contexts[0] as any)._dataSetId, undefined)
assert.equal((contexts[1] as any)._dataSetId, undefined)
@@ -779,12 +770,12 @@ describe('Synapse', () => {
withCDN: '',
}
const contexts = await synapse.storage.createContexts({
- providerIds: [PROVIDERS.provider1.providerId].map(Number),
+ providerIds: [Mocks.PROVIDERS.provider1.providerId].map(Number),
metadata,
count: 1,
})
assert.equal(contexts.length, 1)
- assert.equal(BigInt(contexts[0].provider.id), PROVIDERS.provider1.providerId)
+ assert.equal(BigInt(contexts[0].provider.id), Mocks.PROVIDERS.provider1.providerId)
// should use existing data set
assert.equal((contexts[0] as any)._dataSetId, 1n)
})
@@ -794,13 +785,13 @@ describe('Synapse', () => {
withCDN: '',
}
const contexts = await synapse.storage.createContexts({
- providerIds: [PROVIDERS.provider1.providerId].map(Number),
+ providerIds: [Mocks.PROVIDERS.provider1.providerId].map(Number),
metadata,
count: 1,
forceCreateDataSets: true,
})
assert.equal(contexts.length, 1)
- assert.equal(BigInt(contexts[0].provider.id), PROVIDERS.provider1.providerId)
+ assert.equal(BigInt(contexts[0].provider.id), Mocks.PROVIDERS.provider1.providerId)
// should create new data set
assert.equal((contexts[0] as any)._dataSetId, undefined)
})
@@ -827,19 +818,26 @@ describe('Synapse', () => {
})
it('fails when provided an invalid data set id', async () => {
- for (const dataSetId of [0, 2]) {
- try {
- await synapse.storage.createContexts({
- count: 1,
- dataSetIds: [dataSetId],
- })
- assert.fail('Expected createContexts to fail for invalid specified data set id')
- } catch (error: any) {
- assert.equal(
- error?.message,
- `StorageContext resolveByDataSetId failed: Data set ${dataSetId} not found, not owned by ${ADDRESSES.client1}, or not managed by the current WarmStorage contract`
- )
- }
+ // Test dataSetId 0: should fail with "does not exist" (pdpRailId is 0)
+ try {
+ await synapse.storage.createContexts({
+ count: 1,
+ dataSetIds: [0],
+ })
+ assert.fail('Expected createContexts to fail for data set id 0')
+ } catch (error: any) {
+ assert.include(error?.message, 'Data set 0 does not exist')
+ }
+
+ // Test dataSetId 2: should fail (not in mock data, so pdpRailId will be 0)
+ try {
+ await synapse.storage.createContexts({
+ count: 1,
+ dataSetIds: [2],
+ })
+ assert.fail('Expected createContexts to fail for data set id 2')
+ } catch (error: any) {
+ assert.include(error?.message, 'Data set 2 does not exist')
}
})
@@ -867,7 +865,7 @@ describe('Synapse', () => {
}
const contexts = await synapse.storage.createContexts({
count: 2,
- providerIds: [PROVIDERS.provider1.providerId, PROVIDERS.provider1.providerId].map(Number),
+ providerIds: [Mocks.PROVIDERS.provider1.providerId, Mocks.PROVIDERS.provider1.providerId].map(Number),
metadata,
})
assert.equal(contexts.length, 2)
@@ -883,7 +881,7 @@ describe('Synapse', () => {
const contexts = await synapse.storage.createContexts({
count: 2,
dataSetIds: [1, 1],
- providerIds: [PROVIDERS.provider1.providerId, PROVIDERS.provider1.providerId].map(Number),
+ providerIds: [Mocks.PROVIDERS.provider1.providerId, Mocks.PROVIDERS.provider1.providerId].map(Number),
metadata,
})
assert.equal(contexts.length, 2)
@@ -945,6 +943,74 @@ describe('Synapse', () => {
assert.isTrue(defaultContexts === contexts)
})
+ providerIds.forEach((endorsedProviderId, index) => {
+ describe(`when endorsing providers[${index}]`, async () => {
+ const getPDPService = SPRegistryService.prototype.getPDPService
+ const getProviders = SPRegistryService.prototype.getProviders
+ beforeEach(async () => {
+ // mock provider1 having no endorsements
+ const mockEndorsements = {
+ '0x2127C3a31F54B81B5E9AD1e29C36c420d3D6ecC5': {
+ notAfter: 0xffffffffffffffffn,
+ nonce: 0xffffffffffffffffn,
+ signature:
+ '0xffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff',
+ },
+ } as const
+ SPRegistryService.prototype.getPDPService = async function (this: SPRegistryService, providerId) {
+ const service = await getPDPService.call(this, providerId)
+ if (service == null) {
+ return service
+ }
+ if (providerId !== endorsedProviderId) {
+ return service
+ }
+ service.offering.endorsements = mockEndorsements
+ return service
+ }
+ SPRegistryService.prototype.getProviders = async function (this: SPRegistryService, providerIds) {
+ const providers = await getProviders.call(this, providerIds)
+ for (const provider of providers) {
+ if (provider.id === endorsedProviderId && provider.products.PDP !== undefined) {
+ provider.products.PDP.data.endorsements = mockEndorsements
+ }
+ }
+ return providers
+ }
+ })
+
+ afterEach(async () => {
+ SPRegistryService.prototype.getProviders = getProviders
+ SPRegistryService.prototype.getPDPService = getPDPService
+ })
+
+ for (const count of [1, 2]) {
+ it(`prefers to select the endorsed context when selecting ${count} providers`, async () => {
+ const counts: Record = {}
+ for (const providerId of providerIds) {
+ counts[providerId] = 0
+ }
+ for (let i = 0; i < 5; i++) {
+ const contexts = await synapse.storage.createContexts({
+ count,
+ forceCreateDataSets: true, // This prevents the defaultContexts caching
+ })
+ assert.equal(contexts.length, count)
+ assert.equal((contexts[0] as any)._dataSetId, undefined)
+ counts[contexts[0].provider.id]++
+ if (count > 1) {
+ assert.notEqual(contexts[0].provider.id, contexts[1].provider.id)
+ assert.equal((contexts[1] as any)._dataSetId, undefined)
+ }
+ }
+ for (const providerId of providerIds) {
+ assert.equal(counts[providerId], providerId === endorsedProviderId ? 5 : 0)
+ }
+ })
+ }
+ })
+ })
+
it('can attempt to create numerous contexts, returning fewer', async () => {
const contexts = await synapse.storage.createContexts({
count: 100,
@@ -961,12 +1027,12 @@ describe('Synapse', () => {
contexts = await synapse.storage.createContexts({
providerIds: [1, 2],
})
- for (const provider of [PROVIDERS.provider1, PROVIDERS.provider2]) {
- const pdpOptions: PDPMockOptions = {
+ for (const provider of [Mocks.PROVIDERS.provider1, Mocks.PROVIDERS.provider2]) {
+ const pdpOptions: Mocks.pdp.PDPMockOptions = {
baseUrl: provider.products[0].offering.serviceURL,
}
server.use(
- dataSetCreationStatusHandler(
+ Mocks.pdp.dataSetCreationStatusHandler(
FAKE_TX_HASH,
{
ok: true,
@@ -987,17 +1053,17 @@ describe('Synapse', () => {
const pieceCid = Piece.calculate(data)
const mockUUID = '12345678-90ab-cdef-1234-567890abcdef'
const found = true
- for (const provider of [PROVIDERS.provider1, PROVIDERS.provider2]) {
+ for (const provider of [Mocks.PROVIDERS.provider1, Mocks.PROVIDERS.provider2]) {
const pdpOptions = {
baseUrl: provider.products[0].offering.serviceURL,
}
- server.use(postPieceUploadsHandler(mockUUID, pdpOptions))
- server.use(uploadPieceStreamingHandler(mockUUID, pdpOptions))
- server.use(finalizePieceUploadHandler(mockUUID, pieceCid.toString(), pdpOptions))
- server.use(findPieceHandler(pieceCid.toString(), found, pdpOptions))
- server.use(createAndAddPiecesHandler(FAKE_TX_HASH, pdpOptions))
+ server.use(Mocks.pdp.postPieceUploadsHandler(mockUUID, pdpOptions))
+ server.use(Mocks.pdp.uploadPieceStreamingHandler(mockUUID, pdpOptions))
+ server.use(Mocks.pdp.finalizePieceUploadHandler(mockUUID, pieceCid.toString(), pdpOptions))
+ server.use(Mocks.pdp.findPieceHandler(pieceCid.toString(), found, pdpOptions))
+ server.use(Mocks.pdp.createAndAddPiecesHandler(FAKE_TX_HASH, pdpOptions))
server.use(
- pieceAdditionStatusHandler(
+ Mocks.pdp.pieceAdditionStatusHandler(
DATA_SET_ID,
FAKE_TX_HASH,
{
@@ -1024,23 +1090,23 @@ describe('Synapse', () => {
const mockUUID = '12345678-90ab-cdef-1234-567890abcdef'
const found = true
const wrongCid = 'wrongCid'
- for (const provider of [PROVIDERS.provider1, PROVIDERS.provider2]) {
+ for (const provider of [Mocks.PROVIDERS.provider1, Mocks.PROVIDERS.provider2]) {
const pdpOptions = {
baseUrl: provider.products[0].offering.serviceURL,
}
- server.use(postPieceUploadsHandler(mockUUID, pdpOptions))
- server.use(uploadPieceStreamingHandler(mockUUID, pdpOptions))
+ server.use(Mocks.pdp.postPieceUploadsHandler(mockUUID, pdpOptions))
+ server.use(Mocks.pdp.uploadPieceStreamingHandler(mockUUID, pdpOptions))
server.use(
- finalizePieceUploadHandler(
+ Mocks.pdp.finalizePieceUploadHandler(
mockUUID,
- provider === PROVIDERS.provider1 ? pieceCid.toString() : wrongCid,
+ provider === Mocks.PROVIDERS.provider1 ? pieceCid.toString() : wrongCid,
pdpOptions
)
)
- server.use(findPieceHandler(pieceCid.toString(), found, pdpOptions))
- server.use(createAndAddPiecesHandler(FAKE_TX_HASH, pdpOptions))
+ server.use(Mocks.pdp.findPieceHandler(pieceCid.toString(), found, pdpOptions))
+ server.use(Mocks.pdp.createAndAddPiecesHandler(FAKE_TX_HASH, pdpOptions))
server.use(
- pieceAdditionStatusHandler(
+ Mocks.pdp.pieceAdditionStatusHandler(
DATA_SET_ID,
FAKE_TX_HASH,
{
@@ -1060,7 +1126,7 @@ describe('Synapse', () => {
await synapse.storage.upload(data, { contexts })
assert.fail('Expected upload to fail when one provider returns wrong pieceCid')
} catch (error: any) {
- assert.include(error.message, wrongCid)
+ assert.include(error.message, 'Failed to create upload session')
}
})
})
diff --git a/packages/synapse-sdk/src/test/telemetry.test.ts b/packages/synapse-sdk/src/test/telemetry.test.ts
index a298dbfdd..8e1c317bc 100644
--- a/packages/synapse-sdk/src/test/telemetry.test.ts
+++ b/packages/synapse-sdk/src/test/telemetry.test.ts
@@ -6,6 +6,7 @@
* and that the telemetry system doesn't "crash" Synapse when enabled.
*/
+import * as Mocks from '@filoz/synapse-core/mocks'
import { assert } from 'chai'
import { ethers } from 'ethers'
import { setup } from 'iso-web/msw'
@@ -13,10 +14,9 @@ import { HttpResponse, http } from 'msw'
import { Synapse } from '../synapse.ts'
import { removeGlobalTelemetry } from '../telemetry/singleton.ts'
import { sanitizeUrlForSpan } from '../telemetry/utils.ts'
-import { JSONRPC, PRIVATE_KEYS, presets } from './mocks/jsonrpc/index.ts'
// Mock server for testing
-const server = setup([])
+const server = setup()
interface SentryRequest {
request: Request
@@ -52,17 +52,17 @@ describe('Telemetry', () => {
let signer: ethers.Signer
beforeEach(async () => {
- await server.start({ quiet: true })
- server.use(JSONRPC(presets.basic))
+ await server.start()
+ server.use(Mocks.JSONRPC(Mocks.presets.basic))
mockSentryRequests()
provider = new ethers.JsonRpcProvider('https://api.calibration.node.glif.io/rpc/v1')
- signer = new ethers.Wallet(PRIVATE_KEYS.key1, provider)
+ signer = new ethers.Wallet(Mocks.PRIVATE_KEYS.key1, provider)
})
afterEach(async () => {
try {
- await synapse?.getProvider()?.destroy()
+ synapse?.getProvider()?.destroy()
} catch {
// ignore destroy errors
}
diff --git a/packages/synapse-sdk/src/test/test-utils.ts b/packages/synapse-sdk/src/test/test-utils.ts
index 351335dbe..bdd2bd0ac 100644
--- a/packages/synapse-sdk/src/test/test-utils.ts
+++ b/packages/synapse-sdk/src/test/test-utils.ts
@@ -1,6 +1,6 @@
+import * as Mocks from '@filoz/synapse-core/mocks'
import type { ProviderInfo } from '../sp-registry/types.ts'
import { SIZE_CONSTANTS } from '../utils/constants.ts'
-import { ADDRESSES } from './mocks/jsonrpc/index.ts'
/**
* Create a mock ProviderInfo object for testing
@@ -8,8 +8,8 @@ import { ADDRESSES } from './mocks/jsonrpc/index.ts'
function createMockProviderInfo(overrides?: Partial): ProviderInfo {
const defaults: ProviderInfo = {
id: 1,
- serviceProvider: ADDRESSES.client1,
- payee: ADDRESSES.client1, // Usually same as serviceProvider for tests
+ serviceProvider: Mocks.ADDRESSES.client1,
+ payee: Mocks.ADDRESSES.client1, // Usually same as serviceProvider for tests
name: 'Test Provider',
description: 'A test storage provider',
active: true,
@@ -67,7 +67,7 @@ export function createSimpleProvider(props: {
serviceURL: string
}): ProviderInfo {
return createMockProviderInfo({
- serviceProvider: props.serviceProvider ?? props.address ?? ADDRESSES.client1,
+ serviceProvider: props.serviceProvider ?? props.address ?? Mocks.ADDRESSES.client1,
products: {
PDP: {
type: 'PDP',
diff --git a/packages/synapse-sdk/src/test/warm-storage-metadata.test.ts b/packages/synapse-sdk/src/test/warm-storage-metadata.test.ts
index 26f7c9edd..3a6aeb4d1 100644
--- a/packages/synapse-sdk/src/test/warm-storage-metadata.test.ts
+++ b/packages/synapse-sdk/src/test/warm-storage-metadata.test.ts
@@ -1,18 +1,19 @@
/* globals describe it before after beforeEach */
+
+import * as Mocks from '@filoz/synapse-core/mocks'
import { assert } from 'chai'
import { ethers } from 'ethers'
import { setup } from 'iso-web/msw'
import { METADATA_KEYS } from '../utils/constants.ts'
import { WarmStorageService } from '../warm-storage/index.ts'
-import { ADDRESSES, JSONRPC, presets } from './mocks/jsonrpc/index.ts'
describe('WarmStorageService Metadata', () => {
let server: any
let warmStorageService: WarmStorageService
before(async () => {
- server = setup([])
- await server.start({ quiet: true })
+ server = setup()
+ await server.start()
})
after(() => {
@@ -21,10 +22,10 @@ describe('WarmStorageService Metadata', () => {
beforeEach(async () => {
server.resetHandlers()
- server.use(JSONRPC(presets.basic))
+ server.use(Mocks.JSONRPC(Mocks.presets.basic))
const provider = new ethers.JsonRpcProvider('https://api.calibration.node.glif.io/rpc/v1')
- warmStorageService = await WarmStorageService.create(provider, ADDRESSES.calibration.warmStorage)
+ warmStorageService = await WarmStorageService.create(provider, Mocks.ADDRESSES.calibration.warmStorage)
})
describe('Data Set Metadata', () => {
diff --git a/packages/synapse-sdk/src/test/warm-storage-service.test.ts b/packages/synapse-sdk/src/test/warm-storage-service.test.ts
index 87ebd2dee..7eda0f81a 100644
--- a/packages/synapse-sdk/src/test/warm-storage-service.test.ts
+++ b/packages/synapse-sdk/src/test/warm-storage-service.test.ts
@@ -4,6 +4,7 @@
* Tests for WarmStorageService class
*/
+import * as Mocks from '@filoz/synapse-core/mocks'
import { assert } from 'chai'
import { ethers } from 'ethers'
import { setup } from 'iso-web/msw'
@@ -12,10 +13,9 @@ import { PaymentsService } from '../payments/index.ts'
import { CONTRACT_ADDRESSES, SIZE_CONSTANTS, TIME_CONSTANTS } from '../utils/constants.ts'
import { WarmStorageService } from '../warm-storage/index.ts'
import { makeDataSetCreatedLog } from './mocks/events.ts'
-import { ADDRESSES, JSONRPC, PRIVATE_KEYS, presets } from './mocks/jsonrpc/index.ts'
// mock server for testing
-const server = setup([])
+const server = setup()
describe('WarmStorageService', () => {
let provider: ethers.Provider
@@ -24,11 +24,11 @@ describe('WarmStorageService', () => {
// Helper to create WarmStorageService with factory pattern
const createWarmStorageService = async () => {
- return await WarmStorageService.create(provider, ADDRESSES.calibration.warmStorage)
+ return await WarmStorageService.create(provider, Mocks.ADDRESSES.calibration.warmStorage)
}
before(async () => {
- await server.start({ quiet: true })
+ await server.start()
})
after(() => {
@@ -37,12 +37,12 @@ describe('WarmStorageService', () => {
beforeEach(() => {
provider = new ethers.JsonRpcProvider('https://api.calibration.node.glif.io/rpc/v1')
- signer = new ethers.Wallet(PRIVATE_KEYS.key1, provider)
+ signer = new ethers.Wallet(Mocks.PRIVATE_KEYS.key1, provider)
paymentsService = new PaymentsService(
provider,
signer,
- ADDRESSES.calibration.payments,
- ADDRESSES.calibration.usdfcToken,
+ Mocks.ADDRESSES.calibration.payments,
+ Mocks.ADDRESSES.calibration.usdfcToken,
false
)
server.resetHandlers()
@@ -50,7 +50,7 @@ describe('WarmStorageService', () => {
describe('Instantiation', () => {
it('should create instance with required parameters', async () => {
- server.use(JSONRPC(presets.basic))
+ server.use(Mocks.JSONRPC(Mocks.presets.basic))
const warmStorageService = await createWarmStorageService()
assert.exists(warmStorageService)
assert.isFunction(warmStorageService.getClientDataSets)
@@ -59,7 +59,7 @@ describe('WarmStorageService', () => {
describe('getDataSet', () => {
it('should return a single data set by ID', async () => {
- server.use(JSONRPC(presets.basic))
+ server.use(Mocks.JSONRPC(Mocks.presets.basic))
const warmStorageService = await createWarmStorageService()
const dataSetId = 1
@@ -68,9 +68,9 @@ describe('WarmStorageService', () => {
assert.equal(result?.pdpRailId, 1)
assert.equal(result?.cacheMissRailId, 0)
assert.equal(result?.cdnRailId, 0)
- assert.equal(result?.payer, ADDRESSES.client1)
- assert.equal(result?.payee, ADDRESSES.serviceProvider1)
- assert.equal(result?.serviceProvider, ADDRESSES.serviceProvider1)
+ assert.equal(result?.payer, Mocks.ADDRESSES.client1)
+ assert.equal(result?.payee, Mocks.ADDRESSES.serviceProvider1)
+ assert.equal(result?.serviceProvider, Mocks.ADDRESSES.serviceProvider1)
assert.equal(result?.commissionBps, 100)
assert.equal(result?.clientDataSetId, 0n)
assert.equal(result?.pdpEndEpoch, 0)
@@ -79,7 +79,7 @@ describe('WarmStorageService', () => {
})
it('should throw for non-existent data set', async () => {
- server.use(JSONRPC(presets.basic))
+ server.use(Mocks.JSONRPC(Mocks.presets.basic))
const warmStorageService = await createWarmStorageService()
const dataSetId = 999
@@ -93,8 +93,8 @@ describe('WarmStorageService', () => {
it('should handle contract revert gracefully', async () => {
server.use(
- JSONRPC({
- ...presets.basic,
+ Mocks.JSONRPC({
+ ...Mocks.presets.basic,
warmStorageView: {
// @ts-expect-error - we want to test the error case
getDataSet: () => {
@@ -118,23 +118,23 @@ describe('WarmStorageService', () => {
describe('getClientDataSets', () => {
it('should return empty array when client has no data sets', async () => {
server.use(
- JSONRPC({
- ...presets.basic,
+ Mocks.JSONRPC({
+ ...Mocks.presets.basic,
warmStorageView: {
getClientDataSets: () => [[]],
},
})
)
const warmStorageService = await createWarmStorageService()
- const dataSets = await warmStorageService.getClientDataSets(ADDRESSES.client1)
+ const dataSets = await warmStorageService.getClientDataSets(Mocks.ADDRESSES.client1)
assert.isArray(dataSets)
assert.lengthOf(dataSets, 0)
})
it('should return data sets for a client', async () => {
server.use(
- JSONRPC({
- ...presets.basic,
+ Mocks.JSONRPC({
+ ...Mocks.presets.basic,
warmStorageView: {
getClientDataSets: () => [
[
@@ -142,9 +142,9 @@ describe('WarmStorageService', () => {
pdpRailId: 1n,
cacheMissRailId: 0n,
cdnRailId: 0n,
- payer: ADDRESSES.client1,
- payee: ADDRESSES.serviceProvider1,
- serviceProvider: ADDRESSES.serviceProvider1,
+ payer: Mocks.ADDRESSES.client1,
+ payee: Mocks.ADDRESSES.serviceProvider1,
+ serviceProvider: Mocks.ADDRESSES.serviceProvider1,
commissionBps: 100n,
clientDataSetId: 0n,
pdpEndEpoch: 0n,
@@ -156,9 +156,9 @@ describe('WarmStorageService', () => {
pdpRailId: 2n,
cacheMissRailId: 0n,
cdnRailId: 100n,
- payer: ADDRESSES.client1,
- payee: ADDRESSES.serviceProvider1,
- serviceProvider: ADDRESSES.serviceProvider1,
+ payer: Mocks.ADDRESSES.client1,
+ payee: Mocks.ADDRESSES.serviceProvider1,
+ serviceProvider: Mocks.ADDRESSES.serviceProvider1,
commissionBps: 200n,
clientDataSetId: 1n,
pdpEndEpoch: 0n,
@@ -173,23 +173,23 @@ describe('WarmStorageService', () => {
)
const warmStorageService = await createWarmStorageService()
- const dataSets = await warmStorageService.getClientDataSets(ADDRESSES.client1)
+ const dataSets = await warmStorageService.getClientDataSets(Mocks.ADDRESSES.client1)
assert.isArray(dataSets)
assert.lengthOf(dataSets, 2)
// Check first data set
assert.equal(dataSets[0].pdpRailId, 1)
- assert.equal(dataSets[0].payer, ADDRESSES.client1)
- assert.equal(dataSets[0].payee, ADDRESSES.serviceProvider1)
+ assert.equal(dataSets[0].payer, Mocks.ADDRESSES.client1)
+ assert.equal(dataSets[0].payee, Mocks.ADDRESSES.serviceProvider1)
assert.equal(dataSets[0].commissionBps, 100)
assert.equal(dataSets[0].clientDataSetId, 0n)
assert.equal(dataSets[0].cdnRailId, 0)
// Check second data set
assert.equal(dataSets[1].pdpRailId, 2)
- assert.equal(dataSets[1].payer, ADDRESSES.client1)
- assert.equal(dataSets[1].payee, ADDRESSES.serviceProvider1)
+ assert.equal(dataSets[1].payer, Mocks.ADDRESSES.client1)
+ assert.equal(dataSets[1].payee, Mocks.ADDRESSES.serviceProvider1)
assert.equal(dataSets[1].commissionBps, 200)
assert.equal(dataSets[1].clientDataSetId, 1n)
assert.isAbove(dataSets[1].cdnRailId, 0)
@@ -198,8 +198,8 @@ describe('WarmStorageService', () => {
it('should handle contract call errors gracefully', async () => {
server.use(
- JSONRPC({
- ...presets.basic,
+ Mocks.JSONRPC({
+ ...Mocks.presets.basic,
warmStorageView: {
// @ts-expect-error - we want to test the error case
getClientDataSets: () => null,
@@ -209,7 +209,7 @@ describe('WarmStorageService', () => {
const warmStorageService = await createWarmStorageService()
try {
- await warmStorageService.getClientDataSets(ADDRESSES.client1)
+ await warmStorageService.getClientDataSets(Mocks.ADDRESSES.client1)
assert.fail('Should have thrown error')
} catch (error: any) {
assert.include(error.message, 'Failed to get client data sets')
@@ -220,19 +220,19 @@ describe('WarmStorageService', () => {
describe('getClientDataSetsWithDetails', () => {
it('should enhance data sets with PDPVerifier details', async () => {
server.use(
- JSONRPC({
- ...presets.basic,
+ Mocks.JSONRPC({
+ ...Mocks.presets.basic,
warmStorageView: {
- ...presets.basic.warmStorageView,
+ ...Mocks.presets.basic.warmStorageView,
clientDataSets: () => [[242n]],
getDataSet: () => [
{
pdpRailId: 48n,
cacheMissRailId: 0n,
cdnRailId: 0n,
- payer: ADDRESSES.client1,
- payee: ADDRESSES.payee1,
- serviceProvider: ADDRESSES.serviceProvider1,
+ payer: Mocks.ADDRESSES.client1,
+ payee: Mocks.ADDRESSES.payee1,
+ serviceProvider: Mocks.ADDRESSES.serviceProvider1,
commissionBps: 100n,
clientDataSetId: 0n,
pdpEndEpoch: 0n,
@@ -242,15 +242,15 @@ describe('WarmStorageService', () => {
],
},
pdpVerifier: {
- ...presets.basic.pdpVerifier,
+ ...Mocks.presets.basic.pdpVerifier,
dataSetLive: () => [true],
getNextPieceId: () => [2n],
- getDataSetListener: () => [ADDRESSES.calibration.warmStorage],
+ getDataSetListener: () => [Mocks.ADDRESSES.calibration.warmStorage],
},
})
)
const warmStorageService = await createWarmStorageService()
- const detailedDataSets = await warmStorageService.getClientDataSetsWithDetails(ADDRESSES.client1)
+ const detailedDataSets = await warmStorageService.getClientDataSetsWithDetails(Mocks.ADDRESSES.client1)
assert.lengthOf(detailedDataSets, 1)
assert.equal(detailedDataSets[0].pdpRailId, 48)
@@ -263,10 +263,10 @@ describe('WarmStorageService', () => {
it('should filter unmanaged data sets when onlyManaged is true', async () => {
server.use(
- JSONRPC({
- ...presets.basic,
+ Mocks.JSONRPC({
+ ...Mocks.presets.basic,
warmStorageView: {
- ...presets.basic.warmStorageView,
+ ...Mocks.presets.basic.warmStorageView,
clientDataSets: () => [[242n, 243n]],
getDataSet: (args) => {
const [dataSetId] = args
@@ -276,9 +276,9 @@ describe('WarmStorageService', () => {
pdpRailId: 48n,
cacheMissRailId: 0n,
cdnRailId: 0n,
- payer: ADDRESSES.client1,
- payee: ADDRESSES.payee1,
- serviceProvider: ADDRESSES.serviceProvider1,
+ payer: Mocks.ADDRESSES.client1,
+ payee: Mocks.ADDRESSES.payee1,
+ serviceProvider: Mocks.ADDRESSES.serviceProvider1,
commissionBps: 100n,
clientDataSetId: 0n,
pdpEndEpoch: 0n,
@@ -292,9 +292,9 @@ describe('WarmStorageService', () => {
pdpRailId: 49n,
cacheMissRailId: 0n,
cdnRailId: 0n,
- payer: ADDRESSES.client1,
- payee: ADDRESSES.payee1,
- serviceProvider: ADDRESSES.serviceProvider1,
+ payer: Mocks.ADDRESSES.client1,
+ payee: Mocks.ADDRESSES.payee1,
+ serviceProvider: Mocks.ADDRESSES.serviceProvider1,
commissionBps: 100n,
clientDataSetId: 1n,
pdpEndEpoch: 0n,
@@ -306,13 +306,13 @@ describe('WarmStorageService', () => {
},
},
pdpVerifier: {
- ...presets.basic.pdpVerifier,
+ ...Mocks.presets.basic.pdpVerifier,
dataSetLive: () => [true],
getNextPieceId: () => [1n],
getDataSetListener: (args) => {
const [dataSetId] = args
if (dataSetId === 242n) {
- return [ADDRESSES.calibration.warmStorage] // Managed by us
+ return [Mocks.ADDRESSES.calibration.warmStorage] // Managed by us
}
return ['0x1234567890123456789012345678901234567890' as `0x${string}`] // Different address
},
@@ -322,22 +322,108 @@ describe('WarmStorageService', () => {
const warmStorageService = await createWarmStorageService()
// Get all data sets
- const allDataSets = await warmStorageService.getClientDataSetsWithDetails(ADDRESSES.client1, false)
+ const allDataSets = await warmStorageService.getClientDataSetsWithDetails(Mocks.ADDRESSES.client1, false)
assert.lengthOf(allDataSets, 2)
// Get only managed data sets
- const managedDataSets = await warmStorageService.getClientDataSetsWithDetails(ADDRESSES.client1, true)
+ const managedDataSets = await warmStorageService.getClientDataSetsWithDetails(Mocks.ADDRESSES.client1, true)
assert.lengthOf(managedDataSets, 1)
assert.equal(managedDataSets[0].pdpRailId, 48)
assert.isTrue(managedDataSets[0].isManaged)
})
+ it('should set withCDN true when cdnRailId > 0 and withCDN metadata key present', async () => {
+ server.use(
+ Mocks.JSONRPC({
+ ...Mocks.presets.basic,
+ warmStorageView: {
+ ...Mocks.presets.basic.warmStorageView,
+ clientDataSets: () => [[242n]],
+ getDataSet: () => [
+ {
+ pdpRailId: 48n,
+ cacheMissRailId: 50n,
+ cdnRailId: 51n, // CDN rail exists
+ payer: Mocks.ADDRESSES.client1,
+ payee: Mocks.ADDRESSES.payee1,
+ serviceProvider: Mocks.ADDRESSES.serviceProvider1,
+ commissionBps: 100n,
+ clientDataSetId: 0n,
+ pdpEndEpoch: 0n,
+ providerId: 1n,
+ dataSetId: 242n,
+ },
+ ],
+ getAllDataSetMetadata: () => [
+ ['withCDN'], // withCDN key present
+ [''],
+ ],
+ },
+ pdpVerifier: {
+ ...Mocks.presets.basic.pdpVerifier,
+ dataSetLive: () => [true],
+ getNextPieceId: () => [2n],
+ getDataSetListener: () => [Mocks.ADDRESSES.calibration.warmStorage],
+ },
+ })
+ )
+ const warmStorageService = await createWarmStorageService()
+ const detailedDataSets = await warmStorageService.getClientDataSetsWithDetails(Mocks.ADDRESSES.client1)
+
+ assert.lengthOf(detailedDataSets, 1)
+ assert.equal(detailedDataSets[0].cdnRailId, 51)
+ assert.isTrue(detailedDataSets[0].withCDN)
+ })
+
+ it('should set withCDN false when cdnRailId > 0 but withCDN metadata key missing (terminated)', async () => {
+ server.use(
+ Mocks.JSONRPC({
+ ...Mocks.presets.basic,
+ warmStorageView: {
+ ...Mocks.presets.basic.warmStorageView,
+ clientDataSets: () => [[242n]],
+ getDataSet: () => [
+ {
+ pdpRailId: 48n,
+ cacheMissRailId: 50n,
+ cdnRailId: 51n, // CDN rail still exists
+ payer: Mocks.ADDRESSES.client1,
+ payee: Mocks.ADDRESSES.payee1,
+ serviceProvider: Mocks.ADDRESSES.serviceProvider1,
+ commissionBps: 100n,
+ clientDataSetId: 0n,
+ pdpEndEpoch: 0n,
+ providerId: 1n,
+ dataSetId: 242n,
+ },
+ ],
+ getAllDataSetMetadata: () => [
+ [], // No metadata keys - CDN was terminated
+ [],
+ ],
+ },
+ pdpVerifier: {
+ ...Mocks.presets.basic.pdpVerifier,
+ dataSetLive: () => [true],
+ getNextPieceId: () => [2n],
+ getDataSetListener: () => [Mocks.ADDRESSES.calibration.warmStorage],
+ },
+ })
+ )
+ const warmStorageService = await createWarmStorageService()
+ const detailedDataSets = await warmStorageService.getClientDataSetsWithDetails(Mocks.ADDRESSES.client1)
+
+ assert.lengthOf(detailedDataSets, 1)
+ assert.equal(detailedDataSets[0].cdnRailId, 51)
+ assert.isFalse(detailedDataSets[0].withCDN) // CDN terminated, metadata cleared
+ })
+
it('should throw error when contract calls fail', async () => {
server.use(
- JSONRPC({
- ...presets.basic,
+ Mocks.JSONRPC({
+ ...Mocks.presets.basic,
warmStorageView: {
- ...presets.basic.warmStorageView,
+ ...Mocks.presets.basic.warmStorageView,
clientDataSets: () => [[242n]],
getDataSet: () => {
throw new Error('Contract call failed')
@@ -348,7 +434,7 @@ describe('WarmStorageService', () => {
const warmStorageService = await createWarmStorageService()
try {
- await warmStorageService.getClientDataSetsWithDetails(ADDRESSES.client1)
+ await warmStorageService.getClientDataSetsWithDetails(Mocks.ADDRESSES.client1)
assert.fail('Should have thrown error')
} catch (error: any) {
assert.include(error.message, 'Failed to get details for data set')
@@ -360,12 +446,12 @@ describe('WarmStorageService', () => {
describe('validateDataSet', () => {
it('should validate dataset successfully', async () => {
server.use(
- JSONRPC({
- ...presets.basic,
+ Mocks.JSONRPC({
+ ...Mocks.presets.basic,
pdpVerifier: {
- ...presets.basic.pdpVerifier,
+ ...Mocks.presets.basic.pdpVerifier,
dataSetLive: () => [true],
- getDataSetListener: () => [ADDRESSES.calibration.warmStorage],
+ getDataSetListener: () => [Mocks.ADDRESSES.calibration.warmStorage],
},
})
)
@@ -378,10 +464,10 @@ describe('WarmStorageService', () => {
it('should throw error if data set is not managed by this WarmStorage', async () => {
server.use(
- JSONRPC({
- ...presets.basic,
+ Mocks.JSONRPC({
+ ...Mocks.presets.basic,
pdpVerifier: {
- ...presets.basic.pdpVerifier,
+ ...Mocks.presets.basic.pdpVerifier,
dataSetLive: () => [true],
getDataSetListener: () => ['0x1234567890123456789012345678901234567890' as Address], // Different address
},
@@ -403,14 +489,14 @@ describe('WarmStorageService', () => {
it('should verify successful data set creation', async () => {
const mockTxHash = '0x1234567890abcdef1234567890abcdef1234567890abcdef1234567890abcdef'
server.use(
- JSONRPC({
- ...presets.basic,
+ Mocks.JSONRPC({
+ ...Mocks.presets.basic,
eth_getTransactionByHash: (params) => {
const hash = params[0]
assert.equal(hash, mockTxHash)
return {
hash: mockTxHash,
- from: ADDRESSES.client1,
+ from: Mocks.ADDRESSES.client1,
gas: '0x5208',
value: '0x0',
nonce: '0x444',
@@ -435,7 +521,7 @@ describe('WarmStorageService', () => {
}
},
pdpVerifier: {
- ...presets.basic.pdpVerifier,
+ ...Mocks.presets.basic.pdpVerifier,
dataSetLive: () => [true],
},
})
@@ -455,8 +541,8 @@ describe('WarmStorageService', () => {
it('should handle transaction not mined yet', async () => {
const mockTxHash = '0x1234567890abcdef1234567890abcdef1234567890abcdef1234567890abcdef'
server.use(
- JSONRPC({
- ...presets.basic,
+ Mocks.JSONRPC({
+ ...Mocks.presets.basic,
eth_getTransactionByHash: (params) => {
const hash = params[0]
assert.equal(hash, mockTxHash)
@@ -478,10 +564,10 @@ describe('WarmStorageService', () => {
describe('Service Provider ID Operations', () => {
it('should get list of approved provider IDs', async () => {
server.use(
- JSONRPC({
- ...presets.basic,
+ Mocks.JSONRPC({
+ ...Mocks.presets.basic,
warmStorageView: {
- ...presets.basic.warmStorageView,
+ ...Mocks.presets.basic.warmStorageView,
getApprovedProviders: () => [[1n, 4n, 7n]],
},
})
@@ -496,10 +582,10 @@ describe('WarmStorageService', () => {
it('should return empty array when no providers are approved', async () => {
server.use(
- JSONRPC({
- ...presets.basic,
+ Mocks.JSONRPC({
+ ...Mocks.presets.basic,
warmStorageView: {
- ...presets.basic.warmStorageView,
+ ...Mocks.presets.basic.warmStorageView,
getApprovedProviders: () => [[]],
},
})
@@ -511,10 +597,10 @@ describe('WarmStorageService', () => {
it('should check if a provider ID is approved', async () => {
server.use(
- JSONRPC({
- ...presets.basic,
+ Mocks.JSONRPC({
+ ...Mocks.presets.basic,
warmStorageView: {
- ...presets.basic.warmStorageView,
+ ...Mocks.presets.basic.warmStorageView,
isProviderApproved: () => [true],
},
})
@@ -526,10 +612,10 @@ describe('WarmStorageService', () => {
it('should check if a provider ID is not approved', async () => {
server.use(
- JSONRPC({
- ...presets.basic,
+ Mocks.JSONRPC({
+ ...Mocks.presets.basic,
warmStorageView: {
- ...presets.basic.warmStorageView,
+ ...Mocks.presets.basic.warmStorageView,
isProviderApproved: () => [false],
},
})
@@ -542,10 +628,10 @@ describe('WarmStorageService', () => {
it('should get owner address', async () => {
const ownerAddress = '0xabcdef1234567890123456789012345678901234'
server.use(
- JSONRPC({
- ...presets.basic,
+ Mocks.JSONRPC({
+ ...Mocks.presets.basic,
warmStorage: {
- ...presets.basic.warmStorage,
+ ...Mocks.presets.basic.warmStorage,
owner: () => [ownerAddress as `0x${string}`],
},
})
@@ -558,10 +644,10 @@ describe('WarmStorageService', () => {
it('should check if signer is owner', async () => {
const signerAddress = '0x1234567890123456789012345678901234567890'
server.use(
- JSONRPC({
- ...presets.basic,
+ Mocks.JSONRPC({
+ ...Mocks.presets.basic,
warmStorage: {
- ...presets.basic.warmStorage,
+ ...Mocks.presets.basic.warmStorage,
owner: () => [signerAddress as `0x${string}`],
},
})
@@ -579,10 +665,10 @@ describe('WarmStorageService', () => {
const signerAddress = '0x1234567890123456789012345678901234567890'
const ownerAddress = '0xabcdef1234567890123456789012345678901234'
server.use(
- JSONRPC({
- ...presets.basic,
+ Mocks.JSONRPC({
+ ...Mocks.presets.basic,
warmStorage: {
- ...presets.basic.warmStorage,
+ ...Mocks.presets.basic.warmStorage,
owner: () => [ownerAddress as `0x${string}`],
},
})
@@ -597,17 +683,17 @@ describe('WarmStorageService', () => {
})
it('should get service provider registry address', async () => {
- server.use(JSONRPC(presets.basic))
+ server.use(Mocks.JSONRPC(Mocks.presets.basic))
const warmStorageService = await createWarmStorageService()
const registryAddress = warmStorageService.getServiceProviderRegistryAddress()
// The mock returns this default address for spRegistry
- assert.equal(registryAddress, ADDRESSES.calibration.spRegistry)
+ assert.equal(registryAddress, Mocks.ADDRESSES.calibration.spRegistry)
})
it('should add approved provider (mock transaction)', async () => {
server.use(
- JSONRPC({
- ...presets.basic,
+ Mocks.JSONRPC({
+ ...Mocks.presets.basic,
})
)
const warmStorageService = await createWarmStorageService()
@@ -617,7 +703,7 @@ describe('WarmStorageService', () => {
})
it('should terminate dataset (mock tx)', async () => {
- server.use(JSONRPC(presets.basic))
+ server.use(Mocks.JSONRPC(Mocks.presets.basic))
const warmStorageService = await createWarmStorageService()
const tx = await warmStorageService.terminateDataSet(signer, 4)
@@ -626,10 +712,10 @@ describe('WarmStorageService', () => {
it('should remove approved provider with correct index', async () => {
server.use(
- JSONRPC({
- ...presets.basic,
+ Mocks.JSONRPC({
+ ...Mocks.presets.basic,
warmStorageView: {
- ...presets.basic.warmStorageView,
+ ...Mocks.presets.basic.warmStorageView,
getApprovedProviders: () => [[1n, 4n, 7n]],
},
})
@@ -642,10 +728,10 @@ describe('WarmStorageService', () => {
it('should throw when removing non-existent provider', async () => {
server.use(
- JSONRPC({
- ...presets.basic,
+ Mocks.JSONRPC({
+ ...Mocks.presets.basic,
warmStorageView: {
- ...presets.basic.warmStorageView,
+ ...Mocks.presets.basic.warmStorageView,
getApprovedProviders: () => [[1n, 4n, 7n]],
},
})
@@ -664,8 +750,8 @@ describe('WarmStorageService', () => {
describe('calculateStorageCost', () => {
it('should calculate storage costs correctly for 1 GiB', async () => {
server.use(
- JSONRPC({
- ...presets.basic,
+ Mocks.JSONRPC({
+ ...Mocks.presets.basic,
})
)
const warmStorageService = await createWarmStorageService()
@@ -693,8 +779,8 @@ describe('WarmStorageService', () => {
it('should scale costs linearly with size', async () => {
server.use(
- JSONRPC({
- ...presets.basic,
+ Mocks.JSONRPC({
+ ...Mocks.presets.basic,
})
)
const warmStorageService = await createWarmStorageService()
@@ -718,10 +804,10 @@ describe('WarmStorageService', () => {
it('should fetch pricing from WarmStorage contract', async () => {
let getServicePriceCalled = false
server.use(
- JSONRPC({
- ...presets.basic,
+ Mocks.JSONRPC({
+ ...Mocks.presets.basic,
warmStorage: {
- ...presets.basic.warmStorage,
+ ...Mocks.presets.basic.warmStorage,
getServicePrice: () => {
getServicePriceCalled = true
return [
@@ -747,8 +833,8 @@ describe('WarmStorageService', () => {
describe('checkAllowanceForStorage', () => {
it('should check allowances for storage operations', async () => {
server.use(
- JSONRPC({
- ...presets.basic,
+ Mocks.JSONRPC({
+ ...Mocks.presets.basic,
})
)
const warmStorageService = await createWarmStorageService()
@@ -786,10 +872,10 @@ describe('WarmStorageService', () => {
it('should return sufficient when allowances are adequate', async () => {
server.use(
- JSONRPC({
- ...presets.basic,
+ Mocks.JSONRPC({
+ ...Mocks.presets.basic,
payments: {
- ...presets.basic.payments,
+ ...Mocks.presets.basic.payments,
operatorApprovals: () => [true, parseUnits('100', 18), parseUnits('10000', 18), 0n, 0n, 0n],
},
})
@@ -817,8 +903,8 @@ describe('WarmStorageService', () => {
it('should include depositAmountNeeded in response', async () => {
server.use(
- JSONRPC({
- ...presets.basic,
+ Mocks.JSONRPC({
+ ...Mocks.presets.basic,
})
)
const warmStorageService = await createWarmStorageService()
@@ -843,8 +929,8 @@ describe('WarmStorageService', () => {
it('should use custom lockup days when provided', async () => {
server.use(
- JSONRPC({
- ...presets.basic,
+ Mocks.JSONRPC({
+ ...Mocks.presets.basic,
})
)
const warmStorageService = await createWarmStorageService()
@@ -877,8 +963,8 @@ describe('WarmStorageService', () => {
describe('prepareStorageUpload', () => {
it('should prepare storage upload with required actions', async () => {
server.use(
- JSONRPC({
- ...presets.basic,
+ Mocks.JSONRPC({
+ ...Mocks.presets.basic,
})
)
const warmStorageService = await createWarmStorageService()
@@ -901,7 +987,7 @@ describe('WarmStorageService', () => {
availableFunds: parseUnits('10000', 18),
}),
approveService: async (serviceAddress: string, rateAllowance: bigint, lockupAllowance: bigint) => {
- assert.strictEqual(serviceAddress, ADDRESSES.calibration.warmStorage)
+ assert.strictEqual(serviceAddress, Mocks.ADDRESSES.calibration.warmStorage)
assert.isTrue(rateAllowance > 0n)
assert.isTrue(lockupAllowance > 0n)
approveServiceCalled = true
@@ -939,8 +1025,8 @@ describe('WarmStorageService', () => {
it('should include deposit action when balance insufficient', async () => {
server.use(
- JSONRPC({
- ...presets.basic,
+ Mocks.JSONRPC({
+ ...Mocks.presets.basic,
})
)
const warmStorageService = await createWarmStorageService()
@@ -996,8 +1082,8 @@ describe('WarmStorageService', () => {
it('should return no actions when everything is ready', async () => {
server.use(
- JSONRPC({
- ...presets.basic,
+ Mocks.JSONRPC({
+ ...Mocks.presets.basic,
})
)
const warmStorageService = await createWarmStorageService()
@@ -1037,14 +1123,14 @@ describe('WarmStorageService', () => {
it('should combine PDP server and chain verification status', async () => {
const mockTxHash = '0x1234567890abcdef1234567890abcdef1234567890abcdef1234567890abcdef'
server.use(
- JSONRPC({
- ...presets.basic,
+ Mocks.JSONRPC({
+ ...Mocks.presets.basic,
eth_getTransactionByHash: (params) => {
const hash = params[0]
assert.equal(hash, mockTxHash)
return {
hash: mockTxHash,
- from: ADDRESSES.client1,
+ from: Mocks.ADDRESSES.client1,
gas: '0x5208',
value: '0x0',
nonce: '0x444',
@@ -1069,7 +1155,7 @@ describe('WarmStorageService', () => {
}
},
pdpVerifier: {
- ...presets.basic.pdpVerifier,
+ ...Mocks.presets.basic.pdpVerifier,
dataSetLive: () => [true],
},
})
@@ -1118,14 +1204,14 @@ describe('WarmStorageService', () => {
it('should handle PDP server failure gracefully', async () => {
const mockTxHash = '0x1234567890abcdef1234567890abcdef1234567890abcdef1234567890abcdef'
server.use(
- JSONRPC({
- ...presets.basic,
+ Mocks.JSONRPC({
+ ...Mocks.presets.basic,
eth_getTransactionByHash: (params) => {
const hash = params[0]
assert.equal(hash, mockTxHash)
return {
hash: mockTxHash,
- from: ADDRESSES.client1,
+ from: Mocks.ADDRESSES.client1,
gas: '0x5208',
value: '0x0',
nonce: '0x444',
@@ -1147,7 +1233,7 @@ describe('WarmStorageService', () => {
}
},
pdpVerifier: {
- ...presets.basic.pdpVerifier,
+ ...Mocks.presets.basic.pdpVerifier,
dataSetLive: () => [true],
},
})
@@ -1181,14 +1267,14 @@ describe('WarmStorageService', () => {
it('should NOT mark as complete when server has not caught up yet', async () => {
const mockTxHash = '0x1234567890abcdef1234567890abcdef1234567890abcdef1234567890abcdef'
server.use(
- JSONRPC({
- ...presets.basic,
+ Mocks.JSONRPC({
+ ...Mocks.presets.basic,
eth_getTransactionByHash: (params) => {
const hash = params[0]
assert.equal(hash, mockTxHash)
return {
hash: mockTxHash,
- from: ADDRESSES.client1,
+ from: Mocks.ADDRESSES.client1,
gas: '0x5208',
value: '0x0',
nonce: '0x444',
@@ -1210,7 +1296,7 @@ describe('WarmStorageService', () => {
}
},
pdpVerifier: {
- ...presets.basic.pdpVerifier,
+ ...Mocks.presets.basic.pdpVerifier,
dataSetLive: () => [true],
},
})
@@ -1272,14 +1358,14 @@ describe('WarmStorageService', () => {
}
server.use(
- JSONRPC({
- ...presets.basic,
+ Mocks.JSONRPC({
+ ...Mocks.presets.basic,
eth_getTransactionByHash: (params) => {
const hash = params[0]
assert.equal(hash, mockTxHash)
return {
hash: mockTxHash,
- from: ADDRESSES.client1,
+ from: Mocks.ADDRESSES.client1,
gas: '0x5208',
value: '0x0',
nonce: '0x444',
@@ -1306,7 +1392,7 @@ describe('WarmStorageService', () => {
}
},
pdpVerifier: {
- ...presets.basic.pdpVerifier,
+ ...Mocks.presets.basic.pdpVerifier,
dataSetLive: () => [true],
},
})
@@ -1328,8 +1414,8 @@ describe('WarmStorageService', () => {
it('should timeout if data set takes too long', async () => {
const mockTxHash = '0x1234567890abcdef1234567890abcdef1234567890abcdef1234567890abcdef'
server.use(
- JSONRPC({
- ...presets.basic,
+ Mocks.JSONRPC({
+ ...Mocks.presets.basic,
eth_getTransactionReceipt: () => null,
})
)
@@ -1367,10 +1453,10 @@ describe('WarmStorageService', () => {
describe('getMaxProvingPeriod() and getChallengeWindow()', () => {
it('should return max proving period from WarmStorage contract', async () => {
server.use(
- JSONRPC({
- ...presets.basic,
+ Mocks.JSONRPC({
+ ...Mocks.presets.basic,
warmStorageView: {
- ...presets.basic.warmStorageView,
+ ...Mocks.presets.basic.warmStorageView,
getMaxProvingPeriod: () => [BigInt(2880)],
},
})
@@ -1382,10 +1468,10 @@ describe('WarmStorageService', () => {
it('should return challenge window from WarmStorage contract', async () => {
server.use(
- JSONRPC({
- ...presets.basic,
+ Mocks.JSONRPC({
+ ...Mocks.presets.basic,
warmStorageView: {
- ...presets.basic.warmStorageView,
+ ...Mocks.presets.basic.warmStorageView,
challengeWindow: () => [BigInt(60)],
},
})
@@ -1397,10 +1483,10 @@ describe('WarmStorageService', () => {
it('should handle contract call failures', async () => {
server.use(
- JSONRPC({
- ...presets.basic,
+ Mocks.JSONRPC({
+ ...Mocks.presets.basic,
warmStorageView: {
- ...presets.basic.warmStorageView,
+ ...Mocks.presets.basic.warmStorageView,
getMaxProvingPeriod: () => {
throw new Error('Contract call failed')
},
@@ -1421,8 +1507,8 @@ describe('WarmStorageService', () => {
describe('CDN Operations', () => {
it('should top up CDN payment rails (mock transaction)', async () => {
server.use(
- JSONRPC({
- ...presets.basic,
+ Mocks.JSONRPC({
+ ...Mocks.presets.basic,
})
)
const dataSetId = 49
diff --git a/packages/synapse-sdk/src/types.ts b/packages/synapse-sdk/src/types.ts
index 359166c6f..bf21ec9c4 100644
--- a/packages/synapse-sdk/src/types.ts
+++ b/packages/synapse-sdk/src/types.ts
@@ -237,7 +237,7 @@ export interface EnhancedDataSetInfo extends DataSetInfo {
isLive: boolean
/** Whether this data set is managed by the current Warm Storage contract */
isManaged: boolean
- /** Whether the data set is using CDN (derived from cdnRailId > 0) */
+ /** Whether the data set is using CDN (cdnRailId > 0 and withCDN metadata key present) */
withCDN: boolean
/** Metadata associated with this data set (key-value pairs) */
metadata: Record
@@ -396,25 +396,32 @@ export interface PreflightInfo {
// that combines context creation + upload in one call)
// ============================================================================
-/**
- * Callbacks for tracking upload progress
- *
- * These callbacks provide visibility into the upload process stages:
- * 1. Upload completion (piece uploaded to provider)
- * 2. Piece addition (transaction submitted to chain)
- * 3. Confirmation (transaction confirmed on-chain)
- */
export interface UploadCallbacks {
/** Called periodically during upload with bytes uploaded so far */
onProgress?: (bytesUploaded: number) => void
/** Called when upload to service provider completes */
onUploadComplete?: (pieceCid: PieceCID) => void
- /** Called when the service provider has added the piece and submitted the transaction to the chain */
+ /** Called when the service provider has added the piece(s) and submitted the transaction to the chain */
+ onPiecesAdded?: (transaction?: Hex, pieces?: { pieceCid: PieceCID }[]) => void
+ /** @deprecated Use onPiecesAdded instead */
onPieceAdded?: (transaction?: Hex) => void
- /** Called when the service provider agrees that the piece addition is confirmed on-chain */
+ /** Called when the service provider agrees that the piece addition(s) are confirmed on-chain */
+ onPiecesConfirmed?: (dataSetId: number, pieces: PieceRecord[]) => void
+ /** @deprecated Use onPiecesConfirmed instead */
onPieceConfirmed?: (pieceIds: number[]) => void
}
+/**
+ * Canonical representation of a piece within a data set.
+ *
+ * This is used when reporting confirmed pieces and when iterating over pieces
+ * in a data set.
+ */
+export interface PieceRecord {
+ pieceId: number
+ pieceCid: PieceCID
+}
+
/**
* Options for uploading individual pieces to an existing storage context
*
diff --git a/packages/synapse-sdk/src/utils/constants.ts b/packages/synapse-sdk/src/utils/constants.ts
index cf9fc4463..d9e4c46cd 100644
--- a/packages/synapse-sdk/src/utils/constants.ts
+++ b/packages/synapse-sdk/src/utils/constants.ts
@@ -302,16 +302,6 @@ export const TIMING_CONSTANTS = {
PIECE_ADDITION_POLL_INTERVAL_MS: 1000, // 1 second
} as const
-/**
- * Settlement fee required for rail settlement operations
- * This is the NETWORK_FEE constant in the Payments contract that gets burned to the Filecoin network
- * Value: 0.0013 FIL (1300000000000000 attoFIL)
- *
- * IMPORTANT: This value must be kept in sync with the Payments contract's NETWORK_FEE constant.
- * If the contract is upgraded with a different fee, this constant must be updated accordingly.
- */
-export const SETTLEMENT_FEE = 1300000000000000n // 0.0013 FIL in attoFIL
-
/**
* Recommended RPC endpoints for Filecoin networks
*/
diff --git a/packages/synapse-sdk/src/warm-storage/index.ts b/packages/synapse-sdk/src/warm-storage/index.ts
index de9c4b653..efc0779bc 100644
--- a/packages/synapse-sdk/src/warm-storage/index.ts
+++ b/packages/synapse-sdk/src/warm-storage/index.ts
@@ -1,7 +1,6 @@
/**
- * Exports the Warm Storage components
+ * Warm Storage Service
*
- * @packageDocumentation
* @module WarmStorage
* @example
* ```ts
diff --git a/packages/synapse-sdk/src/warm-storage/service.ts b/packages/synapse-sdk/src/warm-storage/service.ts
index 2885da7b6..5dcbad7ca 100644
--- a/packages/synapse-sdk/src/warm-storage/service.ts
+++ b/packages/synapse-sdk/src/warm-storage/service.ts
@@ -31,7 +31,13 @@ import type { PaymentsService } from '../payments/service.ts'
import type { DataSetCreationStatusResponse, PDPServer } from '../pdp/server.ts'
import { PDPVerifier } from '../pdp/verifier.ts'
import type { DataSetInfo, EnhancedDataSetInfo } from '../types.ts'
-import { CONTRACT_ADDRESSES, SIZE_CONSTANTS, TIME_CONSTANTS, TIMING_CONSTANTS } from '../utils/constants.ts'
+import {
+ CONTRACT_ADDRESSES,
+ METADATA_KEYS,
+ SIZE_CONSTANTS,
+ TIME_CONSTANTS,
+ TIMING_CONSTANTS,
+} from '../utils/constants.ts'
import { CONTRACT_ABIS, createError, getFilecoinNetworkType, TOKENS } from '../utils/index.ts'
/**
@@ -378,7 +384,7 @@ export class WarmStorageService {
currentPieceCount: Number(nextPieceId),
isLive,
isManaged,
- withCDN: base.cdnRailId > 0,
+ withCDN: base.cdnRailId > 0 && METADATA_KEYS.WITH_CDN in metadata,
metadata,
}
} catch (error) {
diff --git a/packages/synapse-sdk/tsconfig.json b/packages/synapse-sdk/tsconfig.json
index 816f03090..076163a5e 100644
--- a/packages/synapse-sdk/tsconfig.json
+++ b/packages/synapse-sdk/tsconfig.json
@@ -7,17 +7,22 @@
"exclude": ["node_modules", "dist"],
"references": [
{
- "path": "../synapse-core"
+ "path": "../synapse-core/tsconfig.json"
}
],
"typedocOptions": {
"entryPointStrategy": "resolve",
"entryPoints": [
"src/index.ts",
- "src/piece/index.ts",
- "src/pdp/index.ts",
"src/payments/index.ts",
- "src/storage/index.ts"
+ "src/pdp/index.ts",
+ "src/session/index.ts",
+ "src/storage/index.ts",
+ "src/subgraph/index.ts",
+ "src/telemetry/index.ts",
+ "src/warm-storage/index.ts",
+ "src/sp-registry/index.ts",
+ "src/filbeam/index.ts"
]
}
}
diff --git a/pnpm-workspace.yaml b/pnpm-workspace.yaml
index e0cce2b36..2a42b1adb 100644
--- a/pnpm-workspace.yaml
+++ b/pnpm-workspace.yaml
@@ -5,8 +5,40 @@ packages:
- docs
- utils
+catalog:
+ '@biomejs/biome': 2.3.8
+ '@types/mocha': ^10.0.10
+ '@types/node': ^24.9.1
+ '@types/react': ^19.2.7
+ '@types/react-dom': ^19.2.3
+ abitype: ^1.2.0
+ ethers: ~6.16.0
+ mocha: ^11.7.4
+ msw: 2.12.4
+ ox: ^0.11.1
+ typescript: 5.9.3
+ viem: ^2.41.2
+ wagmi: ^3.0.2
+
+minimumReleaseAge: 2880
+
+minimumReleaseAgeExclude:
+ - iso-ledger
+ - '@hugomrdias/docs'
+
onlyBuiltDependencies:
- esbuild
- msw
- sharp
- workerd
+
+trustPolicy: no-downgrade
+
+trustPolicyExclude:
+ - vite@6.4.1
+ - chokidar@4.0.3
+ - semver@6.3.1
+ - undici-types@6.19.8
+ - langium@3.3.1
+ - '@reduxjs/toolkit@2.8.2'
+ - reselect@5.1.1
diff --git a/utils/example-storage-e2e.js b/utils/example-storage-e2e.js
index c18768c85..e53f3157a 100644
--- a/utils/example-storage-e2e.js
+++ b/utils/example-storage-e2e.js
@@ -20,14 +20,12 @@
import { ethers } from 'ethers'
import fsPromises from 'fs/promises'
+import { SIZE_CONSTANTS, Synapse, TIME_CONSTANTS } from '../packages/synapse-sdk/src/index.ts'
import {
ADD_PIECES_TYPEHASH,
CREATE_DATA_SET_TYPEHASH,
PDP_PERMISSION_NAMES,
- SIZE_CONSTANTS,
- Synapse,
- TIME_CONSTANTS,
-} from '../packages/synapse-sdk/src/index.ts'
+} from '../packages/synapse-sdk/src/session/index.ts'
// Configuration from environment
const PRIVATE_KEY = process.env.PRIVATE_KEY
@@ -82,10 +80,10 @@ async function main() {
if (!stat.isFile()) {
throw new Error(`Path is not a file: ${filePath}`)
}
- if (stat.size > SIZE_CONSTANTS.MAX_FILE_SIZE_BYTES) {
+ if (stat.size > SIZE_CONSTANTS.MAX_UPLOAD_SIZE) {
throw new Error(
`File exceeds maximum size of ${formatBytes(
- SIZE_CONSTANTS.MAX_FILE_SIZE_BYTES
+ SIZE_CONSTANTS.MAX_UPLOAD_SIZE
)}: ${filePath} (${formatBytes(stat.size)})`
)
}
diff --git a/utils/package.json b/utils/package.json
index 909d8538f..07bfbc689 100644
--- a/utils/package.json
+++ b/utils/package.json
@@ -8,6 +8,6 @@
},
"dependencies": {
"@filoz/synapse-sdk": "workspace:*",
- "ethers": "^6.15.0"
+ "ethers": "^6.16.0"
}
}
diff --git a/utils/settle-dataset-rails.js b/utils/settle-dataset-rails.js
index 0bf8c2879..cc6734b96 100644
--- a/utils/settle-dataset-rails.js
+++ b/utils/settle-dataset-rails.js
@@ -10,7 +10,7 @@
*/
import { ethers } from 'ethers'
-import { SETTLEMENT_FEE, Synapse } from '../packages/synapse-sdk/src/index.ts'
+import { Synapse } from '../packages/synapse-sdk/src/index.ts'
import { getCurrentEpoch } from '../packages/synapse-sdk/src/utils/index.ts'
import { WarmStorageService } from '../packages/synapse-sdk/src/warm-storage/index.ts'
@@ -147,7 +147,6 @@ async function main() {
}
console.log(`Checking settlement amounts for ${railsToSettle.length} rail(s)...`)
- console.log(`${DIM}Settlement fee: ${ethers.formatEther(SETTLEMENT_FEE)} FIL per transaction${RESET}`)
console.log('')
let totalSettled = 0n
@@ -231,7 +230,6 @@ async function main() {
// Check if it's the InsufficientNativeTokenForBurn error
if (error.message.includes('InsufficientNativeTokenForBurn')) {
console.log(` ${YELLOW}Insufficient FIL for network fee${RESET}`)
- console.log(` Required: ${ethers.formatEther(SETTLEMENT_FEE)} FIL`)
}
console.log('')