diff --git a/.github/.markdownlint-cli2.jsonc b/.github/.markdownlint-cli2.jsonc index 16d23e501..2375fd774 100644 --- a/.github/.markdownlint-cli2.jsonc +++ b/.github/.markdownlint-cli2.jsonc @@ -17,6 +17,6 @@ "**/CHANGELOG.md", "**/dist/**/*.{md,mdx}", "**/node_modules/**/*.{md,mdx}", - "docs/src/content/docs/api/**/*.{md,mdx}" + "docs/src/content/docs/reference/**/*.{md,mdx}" ] } diff --git a/.github/CODEOWNERS b/.github/CODEOWNERS new file mode 100644 index 000000000..3cfec3bad --- /dev/null +++ b/.github/CODEOWNERS @@ -0,0 +1,7 @@ +* @hugomrdias @rvagg + +/docs/src/content/cookbooks @FilOzone/FIL-B +/docs/src/content/core-concepts @FilOzone/FIL-B +/docs/src/content/introduction @FilOzone/FIL-B +/docs/src/content/resources @FilOzone/FIL-B +/docs/src/content/developer-guides @FilOzone/FIL-B @hugomrdias @rvagg \ No newline at end of file diff --git a/.github/CONTRIBUTING.md b/.github/CONTRIBUTING.md index 5dfbffa81..9d8eecdcc 100644 --- a/.github/CONTRIBUTING.md +++ b/.github/CONTRIBUTING.md @@ -68,6 +68,16 @@ If using an AI tool, you are welcome to load AGENTS.md into your context to teac pnpm run generate-abi # in the synapse-core package ``` +### Wireit + +This repo use [wireit](https://github.com/google/wireit) to run scripts, enable incremental build, setup dependencies between workspaces and cache results. + +To run a script without cache prepend `WIREIT_CACHE=none` to the script like `WIREIT_CACHE=none pnpm run build`. + +To run a script in watch mode append `--watch`. + +To pass extra arguments to the script use `pnpm run {script} {npm args} {wireit args} -- {script args}` + ### Codespaces This repo has a dev container configuration to enable one click setup of a development environment using Codespaces. @@ -126,21 +136,23 @@ Rather than continuously releasing what's landed to our default branch, release- These Release PRs are kept up-to-date as additional work is merged. When we're ready to tag a release, we simply merge the release PR. -When the release PR is merged the release job is triggered to create a new tag, a new github release and run other package specific jobs. +When the release PR is merged the release job is triggered to create a new tag, a new github release and run other package specific jobs. ### How to merge the Release PRs? + Overview: -* Release PRs are created individually for each package in the mono repo. -* The merge order matters. We start with `synapse-core`, then `synapse-sdk`, then `synapse-react`. -* Only merge ONE release PR at a time and wait for `release-please` CI to finish before merging another. -* Dependent packages like `synapse-core` and `synapse-sdk` will have to resolve conflicts before merging because the `main` branch will have an updated `synapse-core` version. Conflicts should be handled by "accepting incoming changes" and then manually again updating the version for the package that is about to be released (e.g., `synapse-sdk`, `synapse-react`). This effectively updates the dependencies in the "Release PR" branch. ([Example merge commit](https://github.com/FilOzone/synapse-sdk/pull/381/commits/ad13bfc9aa16d9abb41c2028d738a60774b54e21).) + +- Release PRs are created individually for each package in the mono repo. +- The merge order matters. We start with `synapse-core`, then `synapse-sdk`, then `synapse-react`. +- Only merge ONE release PR at a time and wait for `release-please` CI to finish before merging another. +- Dependent packages like `synapse-core` and `synapse-sdk` will have to resolve conflicts before merging because the `main` branch will have an updated `synapse-core` version. Conflicts should be handled by "accepting incoming changes" and then manually again updating the version for the package that is about to be released (e.g., `synapse-sdk`, `synapse-react`). This effectively updates the dependencies in the "Release PR" branch. ([Example merge commit](https://github.com/FilOzone/synapse-sdk/pull/381/commits/ad13bfc9aa16d9abb41c2028d738a60774b54e21).) Below are the specific steps to take. They use the example of releasing `synapse-core=0.1.1`, `synapse-sdk=0.35.2`, and `synapse-react=0.1.1`. | # | Package | Step | Example | -|---|---------|------|---------| +| --- | --------- | ------ | --------- | | 1 | synapse-core | Find the `synapse-core` PR | [example](https://github.com/FilOzone/synapse-sdk/pull/382) | -| 2 | synapse-core | Squash and merge the PR |[example](https://github.com/FilOzone/synapse-sdk/commit/76853b64b2d5f1c9e42baf9ab6dc746d46aca5d5) | +| 2 | synapse-core | Squash and merge the PR | [example](https://github.com/FilOzone/synapse-sdk/commit/76853b64b2d5f1c9e42baf9ab6dc746d46aca5d5) | | 3 | synapse-core | Ensure the `release-please` workflow completes | [example](https://github.com/FilOzone/synapse-sdk/actions/runs/19044395310) | | 4 | synapse-sdk | Find the `synapse-sdk` PR | [example](https://github.com/FilOzone/synapse-sdk/pull/380) | | 5 | synapse-sdk | Resolve conflicts by accepting incoming changes and then resetting the `synapse-sdk` version | [example](https://github.com/FilOzone/synapse-sdk/pull/380/commits/ca1b61b8c87e306609cd4b3c6216bfc8f8a40348) | @@ -148,7 +160,7 @@ Below are the specific steps to take. They use the example of releasing `synaps | 7 | synapse-sdk | Ensure the `release-please` workflow completes | [example](https://github.com/FilOzone/synapse-sdk/actions/runs/19044573289) | | 8 | synapse-react | Find the `synapse-react` PR | [example](https://github.com/FilOzone/synapse-sdk/pull/381) | | 9 | synapse-react | Resolve conflicts by accepting incoming changes and then resetting the `synapse-sdk` version | [example](https://github.com/FilOzone/synapse-sdk/pull/381/commits/ad13bfc9aa16d9abb41c2028d738a60774b54e21) | -| 10 | synapse-react | Squash and merge the PR |[example](https://github.com/FilOzone/synapse-sdk/commit/4b381d8a6e023315652a83c6782f18ac554dba2e) | +| 10 | synapse-react | Squash and merge the PR | [example](https://github.com/FilOzone/synapse-sdk/commit/4b381d8a6e023315652a83c6782f18ac554dba2e) | | 11 | synapse-react | Ensure the `release-please` workflow completes | [example](https://github.com/FilOzone/synapse-sdk/actions/runs/19044833170) | ### How should I write my commits? diff --git a/.github/release-please-manifest.json b/.github/release-please-manifest.json index 94d2a37bd..6a9e09220 100644 --- a/.github/release-please-manifest.json +++ b/.github/release-please-manifest.json @@ -1,5 +1,5 @@ { - "packages/synapse-sdk": "0.36.0", - "packages/synapse-core": "0.1.3", - "packages/synapse-react": "0.1.3" + "packages/synapse-sdk": "0.36.1", + "packages/synapse-core": "0.1.4", + "packages/synapse-react": "0.1.4" } diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 9f7e456a5..b9c122a58 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -6,20 +6,25 @@ on: paths: - "packages/**" - .github/workflows/ci.yml + - package.json + - pnpm-lock.yaml + - pnpm-workspace.yaml pull_request: paths: - "packages/**" - .github/workflows/ci.yml + - package.json + - pnpm-lock.yaml + - pnpm-workspace.yaml jobs: lint: runs-on: ubuntu-latest steps: - name: Checkout Repository - uses: actions/checkout@v5 + uses: actions/checkout@v6 - name: Setup pnpm uses: pnpm/action-setup@v4 - with: - version: latest + - name: Use Node.js lts/* uses: actions/setup-node@v6 with: @@ -41,7 +46,8 @@ jobs: run: pnpm run lint - name: Build run: pnpm run build - + - name: Build docs + run: pnpm -r --filter docs run build test-synapse-sdk: needs: lint strategy: @@ -52,11 +58,9 @@ jobs: runs-on: ${{ matrix.os }} steps: - name: Checkout Repository - uses: actions/checkout@v5 + uses: actions/checkout@v6 - name: Setup pnpm uses: pnpm/action-setup@v4 - with: - version: latest - name: Use Node.js ${{ matrix.node }} uses: actions/setup-node@v6 with: @@ -76,11 +80,9 @@ jobs: runs-on: ubuntu-latest steps: - name: Checkout Repository - uses: actions/checkout@v5 + uses: actions/checkout@v6 - name: Setup pnpm uses: pnpm/action-setup@v4 - with: - version: latest - name: Use Node.js lts/* uses: actions/setup-node@v6 with: diff --git a/.github/workflows/claude.yml b/.github/workflows/claude.yml index f8c785aea..a8544d2d9 100644 --- a/.github/workflows/claude.yml +++ b/.github/workflows/claude.yml @@ -26,7 +26,7 @@ jobs: actions: read # Required for Claude to read CI results on PRs steps: - name: Checkout repository - uses: actions/checkout@v5 + uses: actions/checkout@v6 with: fetch-depth: 1 diff --git a/.github/workflows/docs.yml b/.github/workflows/docs.yml index f7c1d08e0..da020a7fd 100644 --- a/.github/workflows/docs.yml +++ b/.github/workflows/docs.yml @@ -18,10 +18,8 @@ jobs: runs-on: ubuntu-latest if: github.ref_name == 'master' steps: - - uses: actions/checkout@v5 + - uses: actions/checkout@v6 - uses: pnpm/action-setup@v4 - with: - version: latest - uses: actions/setup-node@v6 with: node-version: lts/* diff --git a/.github/workflows/release-please.yml b/.github/workflows/release-please.yml index 78d5179e4..8332af32f 100644 --- a/.github/workflows/release-please.yml +++ b/.github/workflows/release-please.yml @@ -34,10 +34,8 @@ jobs: contents: read id-token: write steps: - - uses: actions/checkout@v5 + - uses: actions/checkout@v6 - uses: pnpm/action-setup@v4 - with: - version: latest - uses: actions/setup-node@v6 with: node-version: lts/* @@ -46,8 +44,6 @@ jobs: - run: pnpm run lint - run: pnpm run build - run: pnpm -r --filter './packages/**' publish --access=public - env: - NODE_AUTH_TOKEN: ${{secrets.NPM_TOKEN}} docs: needs: release if: | diff --git a/.vscode/extensions.json b/.vscode/extensions.json index 41bba06f6..27b668163 100644 --- a/.vscode/extensions.json +++ b/.vscode/extensions.json @@ -1,3 +1,3 @@ { - "recommendations": ["biomejs.biome", "yoavbls.pretty-ts-errors"] + "recommendations": ["biomejs.biome", "yoavbls.pretty-ts-errors", "hideoo.starlight-links"] } diff --git a/.vscode/settings.json b/.vscode/settings.json index ea44620c3..c2aab1145 100644 --- a/.vscode/settings.json +++ b/.vscode/settings.json @@ -11,12 +11,13 @@ "typescript.enablePromptUseWorkspaceTsdk": true, "javascript.preferences.importModuleSpecifierEnding": "js", "typescript.preferences.importModuleSpecifierEnding": "js", - "markdownlint.configFile": "./.github/.markdownlint-cli2.jsonc", + "markdownlint.configFile": ".github/.markdownlint-cli2.jsonc", "search.exclude": { "**/dist/**": true, "pnpm-lock.yaml": true }, "files.associations": { + "*.mdx": "markdown", ".vscode/*.json": "jsonc", "*.css": "tailwindcss", "*.svg": "html", @@ -31,5 +32,7 @@ }, "[mdx]": { "editor.defaultFormatter": "DavidAnson.vscode-markdownlint" - } + }, + "markdown.validate.enabled": false, + "starlightLinks.configDirectories": ["./docs"] } diff --git a/apps/synapse-playground/biome.json b/apps/synapse-playground/biome.json index f1f63fc2f..914f83175 100644 --- a/apps/synapse-playground/biome.json +++ b/apps/synapse-playground/biome.json @@ -1,6 +1,6 @@ { "root": false, - "$schema": "https://biomejs.dev/schemas/2.3.5/schema.json", + "$schema": "./node_modules/@biomejs/biome/configuration_schema.json", "extends": "//", "files": { "includes": ["src/**/*.{js,jsx,ts,tsx}", "!src/components/ui/*.tsx", "vite.config.ts", "!src/style.css"] diff --git a/apps/synapse-playground/package.json b/apps/synapse-playground/package.json index 0f319344e..b65973171 100644 --- a/apps/synapse-playground/package.json +++ b/apps/synapse-playground/package.json @@ -65,37 +65,39 @@ "@radix-ui/react-switch": "^1.2.6", "@radix-ui/react-tooltip": "^1.2.8", "@tailwindcss/vite": "^4.1.14", - "@tanstack/query-async-storage-persister": "^5.90.7", - "@tanstack/query-sync-storage-persister": "^5.90.7", - "@tanstack/react-query": "^5.90.2", - "@tanstack/react-query-persist-client": "^5.90.7", + "@tanstack/query-async-storage-persister": "^5.90.13", + "@tanstack/query-sync-storage-persister": "^5.90.13", + "@tanstack/react-query": "^5.90.11", + "@tanstack/react-query-persist-client": "^5.90.13", "class-variance-authority": "^0.7.1", "clsx": "^2.1.1", - "lucide-react": "^0.553.0", + "iso-ledger": "^0.1.7", + "lucide-react": "^0.561.0", "nanostores": "^1.0.1", "next-themes": "^0.4.6", "p-retry": "^7.1.0", - "react": "19.2.0", - "react-dom": "19.2.0", + "react": "19.2.3", + "react-dom": "19.2.3", "react-dropzone": "^14.3.8", - "react-hook-form": "^7.64.0", + "react-hook-form": "^7.66.1", "sonner": "^2.0.7", "tailwind-merge": "^3.3.1", "tailwindcss": "^4.1.14", - "viem": "^2.38.3", - "wagmi": "^2.18.1", - "zod": "^4.1.12" + "viem": "catalog:", + "wagmi": "catalog:", + "zod": "^4.1.13" }, "devDependencies": { - "@biomejs/biome": "^2.3.5", - "@types/react": "19.2.4", - "@types/react-dom": "19.2.3", + "@biomejs/biome": "catalog:", + "@types/react": "catalog:", + "@types/react-dom": "catalog:", "@vitejs/plugin-react": "^5.0.4", + "buffer": "^6.0.3", "globals": "^16.3.0", "tw-animate-css": "^1.4.0", - "typescript": "^5.9.3", - "vite": "^7.1.10", - "vite-bundle-analyzer": "^1.2.1", - "wrangler": "^4.44.0" + "typescript": "catalog:", + "vite": "^7.2.6", + "vite-plugin-node-polyfills": "^0.24.0", + "wrangler": "^4.51.0" } } diff --git a/apps/synapse-playground/src/components/connect-wallet.tsx b/apps/synapse-playground/src/components/connect-wallet.tsx index 30c16a542..50d12872f 100644 --- a/apps/synapse-playground/src/components/connect-wallet.tsx +++ b/apps/synapse-playground/src/components/connect-wallet.tsx @@ -1,5 +1,5 @@ import { useEffect, useState } from 'react' -import { useAccount } from 'wagmi' +import { useConnection } from 'wagmi' import { Button } from '@/components/ui/button.tsx' import { Dialog, @@ -13,7 +13,7 @@ import { WalletOptions } from './wallet-options.tsx' export function ConnectWallet() { const [open, setOpen] = useState(false) - const { isConnected } = useAccount() + const { isConnected } = useConnection() useEffect(() => { if (isConnected) { diff --git a/apps/synapse-playground/src/components/network-selector.tsx b/apps/synapse-playground/src/components/network-selector.tsx index c74bb710c..e1cb476f7 100644 --- a/apps/synapse-playground/src/components/network-selector.tsx +++ b/apps/synapse-playground/src/components/network-selector.tsx @@ -1,13 +1,13 @@ import { useStore } from '@nanostores/react' import { useEffect } from 'react' -import { useAccount, useSwitchChain } from 'wagmi' +import { useConnection, useSwitchChain } from 'wagmi' import { filecoin, filecoinCalibration } from 'wagmi/chains' import { Select, SelectContent, SelectItem, SelectTrigger, SelectValue } from '@/components/ui/select.tsx' import { store } from '@/lib/store.ts' export function NetworkSelector() { const { network } = useStore(store, { keys: ['network'] }) - const { chainId } = useAccount() + const { chainId } = useConnection() const { switchChain } = useSwitchChain() // update the network in the store when the chainId changes diff --git a/apps/synapse-playground/src/components/payments-account.tsx b/apps/synapse-playground/src/components/payments-account.tsx index cc708faa1..94324f17b 100644 --- a/apps/synapse-playground/src/components/payments-account.tsx +++ b/apps/synapse-playground/src/components/payments-account.tsx @@ -3,7 +3,7 @@ import { useAccountInfo, useApproveAllowance, useDeposit, useWithdraw } from '@f import { useState } from 'react' import { useForm } from 'react-hook-form' import { parseEther } from 'viem' -import { useAccount } from 'wagmi' +import { useConnection } from 'wagmi' import { z } from 'zod/v4' import * as Icons from '@/components/icons.tsx' import { Card, CardContent, CardDescription, CardFooter, CardHeader, CardTitle } from '@/components/ui/card.tsx' @@ -25,7 +25,7 @@ import { Form, FormControl, FormDescription, FormField, FormItem, FormLabel, For import { Input } from './ui/input.tsx' export function PaymentsAccount() { - const { address } = useAccount() + const { address } = useConnection() const { data: paymentsBalance } = useAccountInfo({ address, }) diff --git a/apps/synapse-playground/src/components/wallet-menu.tsx b/apps/synapse-playground/src/components/wallet-menu.tsx index 8a9c2bd54..317db3e7f 100644 --- a/apps/synapse-playground/src/components/wallet-menu.tsx +++ b/apps/synapse-playground/src/components/wallet-menu.tsx @@ -1,8 +1,9 @@ import { formatBalance } from '@filoz/synapse-core/utils' import { useAddUsdfc, useERC20Balance, useFundWallet } from '@filoz/synapse-react' +import { isLedgerConnector } from 'iso-ledger/ledger-connector' import { ArrowUpRight, Copy, Wallet } from 'lucide-react' import { toast } from 'sonner' -import { useAccount, useBalance, useDisconnect } from 'wagmi' +import { useBalance, useConnection, useDisconnect } from 'wagmi' import * as Icons from '@/components/icons.tsx' import { Button } from '@/components/ui/button.tsx' import { @@ -18,9 +19,10 @@ import { import { useCopyToClipboard } from '@/hooks/use-clipboard.ts' import { toastError, truncateMiddle } from '@/lib/utils.ts' import { ExplorerLink } from './explorer-link.tsx' +import { LedgerChangeAccountDialog } from './wallet-menu/ledger-change-account.tsx' export function WalletMenu() { - const { address } = useAccount() + const { address, connector } = useConnection() const { disconnect } = useDisconnect() const [_, copyToClipboard] = useCopyToClipboard() const { data: balance } = useBalance({ @@ -86,6 +88,7 @@ export function WalletMenu() { + {isLedgerConnector(connector) && } Tools diff --git a/apps/synapse-playground/src/components/wallet-menu/ledger-change-account.tsx b/apps/synapse-playground/src/components/wallet-menu/ledger-change-account.tsx new file mode 100644 index 000000000..4d978c1d7 --- /dev/null +++ b/apps/synapse-playground/src/components/wallet-menu/ledger-change-account.tsx @@ -0,0 +1,126 @@ +import { isLedgerConnector } from 'iso-ledger/ledger-connector' +import { useState } from 'react' +import { useForm } from 'react-hook-form' +import { useConnection } from 'wagmi' +import { z } from 'zod/v4' +import { Button } from '@/components/ui/button.tsx' +import { + Dialog, + DialogClose, + DialogContent, + DialogDescription, + DialogFooter, + DialogHeader, + DialogTitle, + DialogTrigger, +} from '@/components/ui/dialog.tsx' +import { + Form, + FormControl, + FormDescription, + FormField, + FormItem, + FormLabel, + FormMessage, +} from '@/components/ui/form.tsx' +import { Input } from '@/components/ui/input.tsx' +import { DropdownMenuItem } from '../ui/dropdown-menu.tsx' + +const changeAccountFormSchema = z.object({ + accountIndex: z.number().min(0), + addressIndex: z.number().min(0), +}) +export function LedgerChangeAccountDialog() { + const { connector } = useConnection() + const [open, setOpen] = useState(false) + const form = useForm>({ + defaultValues: { + accountIndex: 0, + addressIndex: 0, + }, + }) + + function onSubmit(values: z.infer) { + if (isLedgerConnector(connector)) { + connector.changeAccount({ accountIndex: values.accountIndex, addressIndex: values.addressIndex }) + setOpen(false) + } + } + + return ( + + + e.preventDefault()}>Change Account + + +
+ + + Change Account + Change the account to use with the Ledger device. + + +
+
+ ( + + Account Index + + + + Ledger Live + + + )} + rules={{ + required: 'Account Index is required', + validate: (value) => { + if (value < 0) { + return 'Account Index must be equal or greater than 0' + } + return true + }, + }} + /> + ( + + Address Index + + + + MetaMask and Trezor (BIP44) + + + )} + rules={{ + required: 'Address Index is required', + validate: (value) => { + if (value < 0) { + return 'Address Index must be equal or greater than 0' + } + return true + }, + }} + /> +
+
+ + + + + + +
+ +
+
+ ) +} diff --git a/apps/synapse-playground/src/components/wallet-options.tsx b/apps/synapse-playground/src/components/wallet-options.tsx index 6b3e5ce9c..c2ea205cb 100644 --- a/apps/synapse-playground/src/components/wallet-options.tsx +++ b/apps/synapse-playground/src/components/wallet-options.tsx @@ -1,14 +1,22 @@ -import { useFilsnap } from '@filoz/synapse-react' +import { useFilsnap } from '@filoz/synapse-react/filsnap' import { useStore } from '@nanostores/react' import { useEffect, useState } from 'react' -import { type Connector, useConnect } from 'wagmi' +import { type Connector, useConnect, useConnectors } from 'wagmi' import { filecoin, filecoinCalibration } from 'wagmi/chains' import { store } from '@/lib/store.ts' +import { toastError } from '@/lib/utils.ts' import { Button } from './ui/button.tsx' export function WalletOptions() { - const { connectors, connect } = useConnect() + const connectors = useConnectors() const { network } = useStore(store, { keys: ['network'] }) + const { connect, isPending } = useConnect({ + mutation: { + onError: (error) => { + toastError(error, 'connect-wallet', 'Connecting wallet failed') + }, + }, + }) useFilsnap({ // force: true, }) @@ -20,6 +28,7 @@ export function WalletOptions() { return ( { connect({ @@ -32,7 +41,15 @@ export function WalletOptions() { }) } -function WalletOption({ connector, onClick }: { connector: Connector; onClick: () => void }) { +function WalletOption({ + connector, + onClick, + isPending, +}: { + connector: Connector + onClick: () => void + isPending: boolean +}) { const [ready, setReady] = useState(false) useEffect(() => { @@ -43,7 +60,7 @@ function WalletOption({ connector, onClick }: { connector: Connector; onClick: ( }, [connector]) return ( - ) diff --git a/apps/synapse-playground/src/main.tsx b/apps/synapse-playground/src/main.tsx index 60b955f4f..ab688285e 100644 --- a/apps/synapse-playground/src/main.tsx +++ b/apps/synapse-playground/src/main.tsx @@ -2,14 +2,13 @@ import { calibration, mainnet } from '@filoz/synapse-core/chains' import { createSyncStoragePersister } from '@tanstack/query-sync-storage-persister' import { QueryClient, QueryClientProvider } from '@tanstack/react-query' import { persistQueryClient } from '@tanstack/react-query-persist-client' +import { ledger } from 'iso-ledger/ledger-connector' import { StrictMode } from 'react' import { createRoot } from 'react-dom/client' import { createConfig, deserialize, http, serialize, WagmiProvider } from 'wagmi' - -import { injected, walletConnect } from 'wagmi/connectors' +import { injected } from 'wagmi/connectors' import { App } from './app.tsx' import { ThemeProvider } from './components/theme-provider.tsx' - import './style.css' const queryClient = new QueryClient({ @@ -40,27 +39,30 @@ persistQueryClient({ // storage: window.localStorage, // }) -const baseUrl = globalThis.location.origin -const iconUrl = `${baseUrl}/filecoin-logo.svg` +// const baseUrl = globalThis.location.origin +// const iconUrl = `${baseUrl}/filecoin-logo.svg` export const config = createConfig({ chains: [mainnet, calibration], connectors: [ injected(), - walletConnect({ - projectId: '5dc22b5e6ac40238a76062d77107ab29', - metadata: { - name: 'Synapse Playground', - description: 'Synapse Playground', - url: baseUrl, - icons: [iconUrl], - }, + ledger({ + forceBlindSigning: true, }), + // walletConnect({ + // projectId: '5dc22b5e6ac40238a76062d77107ab29', + // metadata: { + // name: 'Synapse Playground', + // description: 'Synapse Playground', + // url: baseUrl, + // icons: [iconUrl], + // }, + // }), ], transports: { [mainnet.id]: http(), [calibration.id]: http(undefined, { - batch: true, + batch: false, }), }, batch: { @@ -79,7 +81,7 @@ createRoot(document.getElementById('root')!).render( - + diff --git a/apps/synapse-playground/tsconfig.json b/apps/synapse-playground/tsconfig.json index e9893bae7..1474dc289 100644 --- a/apps/synapse-playground/tsconfig.json +++ b/apps/synapse-playground/tsconfig.json @@ -31,5 +31,8 @@ } }, "include": ["src"], - "references": [{ "path": "../../packages/synapse-core" }, { "path": "../../packages/synapse-react" }] + "references": [ + { "path": "../../packages/synapse-core/tsconfig.json" }, + { "path": "../../packages/synapse-react/tsconfig.json" } + ] } diff --git a/apps/synapse-playground/vite.config.ts b/apps/synapse-playground/vite.config.ts index 68fae9881..87cc06aa9 100644 --- a/apps/synapse-playground/vite.config.ts +++ b/apps/synapse-playground/vite.config.ts @@ -2,16 +2,19 @@ import tailwindcss from '@tailwindcss/vite' import react from '@vitejs/plugin-react' import path from 'path' import { defineConfig } from 'vite' -import { analyzer } from 'vite-bundle-analyzer' +import { nodePolyfills } from 'vite-plugin-node-polyfills' // https://vitejs.dev/config/ export default defineConfig({ plugins: [ + nodePolyfills({ + include: ['buffer'], + globals: { + Buffer: true, + }, + }), react(), tailwindcss(), - analyzer({ - enabled: false, - }), ], resolve: { dedupe: ['react', 'react-dom', 'wagmi'], diff --git a/docs/.gitignore b/docs/.gitignore index d2172bca7..4d4fbb795 100644 --- a/docs/.gitignore +++ b/docs/.gitignore @@ -19,7 +19,7 @@ pnpm-debug.log* # macOS-specific files .DS_Store -src/content/docs/api +src/content/docs/reference .wrangler/ # Node.js v25 localStorage file diff --git a/docs/README.md b/docs/README.md index 207b5855f..614fa0189 100644 --- a/docs/README.md +++ b/docs/README.md @@ -27,8 +27,8 @@ Static assets, like favicons, can be placed in the `public/` directory. All commands are run from the root of the project, from a terminal: -| Command | Action | -| :------------------------ | :----------------------------------------------- | +| Command | Action | +| :------------------------ | :----------------------------------------------- | | `pnpm install` | Installs dependencies | | `pnpm run dev` | Starts local dev server at `localhost:4321` | | `pnpm run build` | Build your production site to `./dist/` | diff --git a/docs/astro.config.mjs b/docs/astro.config.mjs index bafc3e76a..691fa1de2 100644 --- a/docs/astro.config.mjs +++ b/docs/astro.config.mjs @@ -1,10 +1,13 @@ import starlight from '@astrojs/starlight' +import { llmsPlugin } from '@hugomrdias/docs/starlight-llms' import { docsPlugin } from '@hugomrdias/docs/starlight-typedoc' import { defineConfig } from 'astro/config' import mermaid from 'astro-mermaid' import ecTwoSlash from 'expressive-code-twoslash' -import starlightLlmsTxt from 'starlight-llms-txt' -import starlightPageActions from 'starlight-page-actions' +import rehypeExternalLinks from 'rehype-external-links' +import starlightAutoSidebar from 'starlight-auto-sidebar' +import starlightChangelogs, { makeChangelogsSidebarLinks } from 'starlight-changelogs' +import starlightLinksValidator from 'starlight-links-validator' import viteTsconfigPaths from 'vite-tsconfig-paths' const site = 'https://docs.filecoin.cloud' @@ -16,6 +19,16 @@ export default defineConfig({ vite: { plugins: [viteTsconfigPaths()], }, + markdown: { + rehypePlugins: [ + [ + rehypeExternalLinks, + { + target: '_blank', + }, + ], + ], + }, integrations: [ mermaid({ theme: 'forest', @@ -68,6 +81,21 @@ export default defineConfig({ content: new URL('og2.jpg?v=1', site).href, }, }, + { + tag: 'script', + attrs: { + src: 'https://plausible.io/js/pa-rtmx1Y7w1rQg3O30eJD9U.js', + defer: true, + async: true, + }, + }, + { + tag: 'script', + content: ` + window.plausible=window.plausible||function(){(plausible.q=plausible.q||[]).push(arguments)},plausible.init=plausible.init||function(i){plausible.o=i||{}}; + plausible.init(); + `, + }, ], social: [ { @@ -114,9 +142,32 @@ export default defineConfig({ autogenerate: { directory: 'resources' }, }, { - label: 'API', + label: 'Changelogs', + collapsed: true, + items: [ + ...makeChangelogsSidebarLinks([ + { + type: 'all', + base: 'changelog-sdk', + label: '@filoz/synapse-sdk', + }, + { + type: 'all', + base: 'changelog-core', + label: '@filoz/synapse-core', + }, + { + type: 'all', + base: 'changelog-react', + label: '@filoz/synapse-react', + }, + ]), + ], + }, + { + label: 'Reference', collapsed: true, - autogenerate: { directory: 'api' }, + autogenerate: { directory: 'reference' }, }, ], expressiveCode: { @@ -133,6 +184,7 @@ export default defineConfig({ }, plugins: [ docsPlugin({ + outputDirectory: 'reference', pagination: true, typeDocOptions: { githubPages: true, @@ -146,8 +198,10 @@ export default defineConfig({ plugin: ['typedoc-plugin-mdn-links'], }, }), - starlightLlmsTxt(), - starlightPageActions(), + llmsPlugin(), + starlightAutoSidebar(), + starlightChangelogs(), + starlightLinksValidator(), ], }), ], diff --git a/docs/package.json b/docs/package.json index 6280dd668..99a363508 100644 --- a/docs/package.json +++ b/docs/package.json @@ -10,25 +10,27 @@ "astro": "astro" }, "dependencies": { - "@filoz/synapse-sdk": "0.35.3", - "ethers": "^6.15.0", - "starlight-page-actions": "^0.2.0" + "@filoz/synapse-sdk": "workspace:*", + "ethers": "catalog:", + "starlight-auto-sidebar": "^0.1.3", + "starlight-changelogs": "^0.2.3", + "starlight-links-validator": "^0.19.2" }, "devDependencies": { - "@astrojs/starlight": "^0.36.1", - "@hugomrdias/docs": "^0.1.5", - "@types/react": "^19.2.4", - "@types/react-dom": "^19.2.3", - "astro": "^5.14.7", + "@astrojs/starlight": "^0.37.0", + "@hugomrdias/docs": "^0.1.10", + "@types/react": "catalog:", + "@types/react-dom": "catalog:", + "astro": "^5.16.3", "astro-mermaid": "^1.1.0", "expressive-code-twoslash": "^0.5.3", - "mermaid": "^11.12.0", + "mermaid": "^11.12.2", + "rehype-external-links": "^3.0.0", "sharp": "^0.34.4", - "starlight-llms-txt": "^0.6.0", - "typedoc": "^0.28.14", + "typedoc": "^0.28.15", "typedoc-plugin-mdn-links": "^5.0.10", "typedoc-plugin-missing-exports": "^4.1.2", "vite-tsconfig-paths": "^5.1.4", - "wrangler": "^4.44.0" + "wrangler": "^4.51.0" } } diff --git a/docs/src/content.config.ts b/docs/src/content.config.ts index 9cfdab095..4e6c14741 100644 --- a/docs/src/content.config.ts +++ b/docs/src/content.config.ts @@ -1,7 +1,42 @@ import { defineCollection } from 'astro:content' import { docsLoader } from '@astrojs/starlight/loaders' import { docsSchema } from '@astrojs/starlight/schema' +import { autoSidebarLoader } from 'starlight-auto-sidebar/loader' +import { autoSidebarSchema } from 'starlight-auto-sidebar/schema' +import { changelogsLoader } from 'starlight-changelogs/loader' export const collections = { docs: defineCollection({ loader: docsLoader(), schema: docsSchema() }), + autoSidebar: defineCollection({ + loader: autoSidebarLoader(), + schema: autoSidebarSchema(), + }), + changelogs: defineCollection({ + loader: changelogsLoader([ + { + provider: 'changeset', + base: 'changelog-sdk', + changelog: '../packages/synapse-sdk/CHANGELOG.md', + process: ({ title }) => { + return title.split(' ')[0] + }, + }, + { + provider: 'changeset', + base: 'changelog-core', + changelog: '../packages/synapse-core/CHANGELOG.md', + process: ({ title }) => { + return title.split(' ')[0] + }, + }, + { + provider: 'changeset', + base: 'changelog-react', + changelog: '../packages/synapse-react/CHANGELOG.md', + process: ({ title }) => { + return title.split(' ')[0] + }, + }, + ]), + }), } diff --git a/docs/src/content/docs/cookbooks/filecoin-pay.mdx b/docs/src/content/docs/cookbooks/filecoin-pay.mdx index 8a08b76a5..9bc7a5d69 100644 --- a/docs/src/content/docs/cookbooks/filecoin-pay.mdx +++ b/docs/src/content/docs/cookbooks/filecoin-pay.mdx @@ -1,12 +1,12 @@ --- -title: Filecoin Pay Cookbook +title: Filecoin Pay description: Core concepts for building automated payment systems with native token streaming. sidebar: order: 0 --- :::tip[Looking for Technical Details?] -This guide focuses on **conceptual understanding** and **use cases**. For technical implementation details, smart contract functions, and integration patterns, see the [**Filecoin Pay Technical Overview**](/core-concepts/filecoin-pay-overview). +This guide focuses on **conceptual understanding** and **use cases**. For technical implementation details, smart contract functions, and integration patterns, see the [**Filecoin Pay Technical Overview**](/core-concepts/filecoin-pay-overview/). ::: ## What is Filecoin Pay? @@ -209,8 +209,8 @@ Filecoin Pay power comes from combining payment rails with custom validators tha Ready to dive deeper? -- [Filecoin Pay Technical Overview](/core-concepts/filecoin-pay-overview) - Technical architecture and implementation details -- [System Architecture](/core-concepts/architecture) - How Filecoin Pay integrates with the broader system -- [Developer Guides](/developer-guides/) - Start building applications with Filecoin Pay +- [Filecoin Pay Technical Overview](/core-concepts/filecoin-pay-overview/) - Technical architecture and implementation details +- [System Architecture](/core-concepts/architecture/) - How Filecoin Pay integrates with the broader system +- [Developer Guides](/developer-guides/synapse/) - Start building applications with Filecoin Pay Filecoin Pay represents a fundamental shift in how Web3 infrastructure gets paid - from manual, trust-based payments to automated, verifiable, programmable payment rails. The patterns above are just the beginning of what's possible. diff --git a/docs/src/content/docs/core-concepts/architecture.mdx b/docs/src/content/docs/core-concepts/architecture.mdx index 6903b3be8..da154445a 100644 --- a/docs/src/content/docs/core-concepts/architecture.mdx +++ b/docs/src/content/docs/core-concepts/architecture.mdx @@ -15,18 +15,18 @@ Filecoin Onchain Cloud models both data operations and economic interactions as Together, these entities define how information, proofs, and payments move through the system — forming the foundation of Filecoin’s verifiable cloud. -| Concept | Purpose | -|---------|---------| +| Concept | Purpose | +| --------- | --------- | | **Piece** | Smallest data unit stored in the Filecoin network. | -| **Data Set** | Logical collection of stored data pieces with a shared lifecycle.| -| **Proof Record** | Verifiable claim proving continued possession of data. | -| **Payment Rail** | Economic channel defining how and when payments are released. | +| **Data Set** | Logical collection of stored data pieces with a shared lifecycle. | +| **Proof Record** | Verifiable claim proving continued possession of data. | +| **Payment Rail** | Economic channel defining how and when payments are released. | -#### **Piece** +### Piece A Piece is the smallest unit of content stored in the Filecoin network. Each piece is identified by a **Content Identifier (CID)** and represents a verifiable reference to raw data, independent of its physical location. -#### **Data Set** +### Data Set A data set represents a logical grouping of one or more Pieces of content which will be stored and proved by PDP service providers. @@ -38,7 +38,7 @@ Each data set becomes a persistent, onchain-tracked object — tied to: - its service provider, - and its proof schedule under PDP. -#### **Proof Record** +### Proof Record A Proof Record captures the cryptographic evidence that a PDP Service Provider still holds the data in a data set. Proofs are generated periodically using **Proof of Data Possession (PDP)** — a lightweight cryptographic protocol optimized for verifiable warm storage. @@ -50,7 +50,7 @@ Each Proof Record: These records create a **public, immutable audit trail** of data set availability, enabling anyone to verify that data remains online without re-downloading it. Proof Records also act as **payment triggers**, signaling Filecoin Pay to settle balances once proofs succeed. -#### **Payment Rail** +### Payment Rail A Payment Rail defines the financial lifecycle of the Filecoin Onchain Cloud service agreement. It connects a **client** (who requests a service) with a **provider** (who delivers it), and governs how tokens move between them based on onchain verifiable conditions. @@ -81,7 +81,7 @@ graph TB This modularity allows each layer to evolve independently while maintaining interoperability through onchain standards. | Layer | Function | Components | -|-------|------------|----------| +| ------- | ------------ | ---------- | | **Verification Layer** | Guarantees the correctness and transparency of all storage operations. | **Proof of Data Possession (PDP)** | | **Settlement Layer** | Handles programmable payments, rails settlement. | **Filecoin Pay** | | **Storage & Retrieval Service Layer** | Executes fast, retrievable, verifiable data operations. | **Filecoin Warm Storage Service (FWSS)**, **Filecoin Beam**. | @@ -111,13 +111,13 @@ PDP is a cryptographic protocol that allows storage providers to prove they poss - Small proofs for large data - Continuous verification (not one-time) -[Learn more about PDP →](/core-concepts/pdp-overview) +[Learn more about PDP →](/core-concepts/pdp-overview/) ### Payment Rails and Lockup Payment rails are **automated payment channels** that stream tokens from clients to providers at a specified rate (e.g., 100 USDFC/epoch). The lockup mechanism ensures providers get paid even if clients abandon their accounts by reserving funds for a guaranteed period. -[Learn more about Filecoin Pay →](/core-concepts/filecoin-pay-overview) +[Learn more about Filecoin Pay →](/core-concepts/filecoin-pay-overview/) ### Filecoin Warm Storage Service @@ -125,10 +125,10 @@ Filecoin Warm Storage Service is the **business logic layer** that combines PDP Service providers run Curio nodes that store data, submit proofs, and provide HTTP APIs for upload/download. -[Learn more about Filecoin Warm Storage Service →](/core-concepts/fwss-overview) +[Learn more about Filecoin Warm Storage Service →](/core-concepts/fwss-overview/) ## Next Steps Now that you understand the architecture, you can start building on Filecoin Onchain Cloud. -- [**Developer Guides**](/developer-guides) - Build with the SDK +- [**Developer Guides**](/developer-guides/synapse/) - Build with the SDK diff --git a/docs/src/content/docs/core-concepts/filecoin-pay-overview.mdx b/docs/src/content/docs/core-concepts/filecoin-pay-overview.mdx index 9cd96f758..cc1f01f1b 100644 --- a/docs/src/content/docs/core-concepts/filecoin-pay-overview.mdx +++ b/docs/src/content/docs/core-concepts/filecoin-pay-overview.mdx @@ -9,7 +9,6 @@ As the economic core of the Filecoin Onchain Cloud (FOC), **Filecoin Pay** embed With Filecoin Pay, service economics become as **transparent and composable** as the data services they underpin, enabling the Filecoin network to operate not just as a storage market but as a fully programmable cloud economy. - ## What is Filecoin Pay? In the Filecoin Onchain Cloud architecture, services (storage, retrieval) must be paired with economic incentives. Without a robust, verifiable settlement layer, the system risks: @@ -18,7 +17,6 @@ In the Filecoin Onchain Cloud architecture, services (storage, retrieval) must b - Clients paying upfront without verifiable service delivery. - Opaque billing and trust-based relationships that undermine decentralisation. - Filecoin Pay addresses these risks by tying **service delivery** (e.g., proofs of data possession, retrieval success) to **on-chain payment flows** that lets services run automated, controllable payment “rails” between a payer and a payee. It supports: - **Token-agnostic billing**: support for FIL, stablecoins, ERC-20 tokens. @@ -29,6 +27,7 @@ Filecoin Pay addresses these risks by tying **service delivery** (e.g., proofs o At its core, Filecoin Pay makes future payment guarantees explicit via lockups and enforces them in the contract’s accounting, so participants can reason clearly about risk, solvency, and termination behavior. ## How Filecoin Pay works + The Filecoin Pay smart contract enables ERC20 token payment flows through "rails" - enables automatic, verifiable, programmable settlement between users and service providers on the Filecoin Onchain Cloud. At its core, Filecoin Pay works by: @@ -41,39 +40,47 @@ At its core, Filecoin Pay works by: ### Core Building Blocks #### **Account** + Represents the client’s funds allocated for a service. + - Holds balances in **FIL or ERC-20 tokens** - Controlled by the user, not the provider - Can be topped up, withdrawn, and managed through smart contracts -#### **Payment Rail**: -A **automatic settlement channel** between a user and a provider for a specific service or data set. Payer ↔ Payee pairs can have multiple payment rails between them but they can also reuse the same rail across multiple deals. +#### **Payment Rail** + +A **automatic settlement channel** between a user and a provider for a specific service or data set. Payer ↔ Payee pairs can have multiple payment rails between them but they can also reuse the same rail across multiple deals. Each rail defines: + - `token`: FIL, stablecoin, ERC-20 - `payer` & `payee`: clients & service providers -- `operator` & `validator` +- `operator` & `validator` - `paymentRate`: tokens per epoch - `lockupPeriod`: epochs of guaranteed cover after termination - `lockupFixed`: pool for one-time payments - `endEpoch`: Final epoch up to which the rail can be settled #### **Operator** + **Trusted smart contract** or entity that manages rails on behalf of payers with caps. A payer must explicitly approve an operator and grant it specific allowances, which act as a budget for how much the operator can spend or lock up on the payer's behalf. -The operator sometimes referred to as the "**service contract**", such as Filecoin Warm Storage Service, and File Beam CND service, etc. +The operator sometimes referred to as the "**service contract**", such as Filecoin Warm Storage Service, and File Beam CND service, etc. **Capabilities** of an approved operator: create rails, modify rail terms (rate, lockup), settle payments, terminate rails, execute one-time payments #### **Validator** + A validator is an optional contract that acts as a trusted arbitrator for a rail. Its primary role is to validate payments during settlement, but it also plays a crucial part in the rail's lifecycle, especially during termination. -**Capabilities**: +**Capabilities**: + - **Validate settlements**: Adjust payment amounts during settlement - **Veto terminations**: Block rail termination attempts - **Decide final payout**: Determine total payout after termination ### Lockup Mechanism + Each payment rail can be configured to require the payer to lock funds to guarantee future payments. The lockup mechanism ensures providers get paid even if clients abandon their accounts. It's a **safety guarantee**, not a pre-payment. This lockup is composed of two distinct components: @@ -92,17 +99,18 @@ This lockup is composed of two distinct components: - Reduced as payments are made ## Payment Flow -The life circle of a payment rail. + +The life circle of a payment rail. 1) **Funding and approvals** - + Deposit ERC-20 or FIL into the payer’s internal account 2) **Rail creation and configuration** - Operator `createRail` & Configure lockups via `modifyRailLockup` - Start/adjust streaming via `modifyRailPayment` -3) **Settlement** +3) **Settlement** - Active rails: can only settle up to payer’s `lockupLastSettledAt` - Terminated rails: can settle up to `endEpoch` using the streaming lockup @@ -112,14 +120,13 @@ The life circle of a payment rail. - After settlement reaches `endEpoch`, the rail is finalized and zeroed; operator usage is released. 5) **Escape hatch** - + If a validator malfunctions, the payer can pay in full for the guaranteed window and unlock funds. **Normal Operation** (before termination): Payments flow from general account funds, lockup remains untouched. **Safety Hatch** (after termination): Provider can settle up to `endEpoch` using locked funds, even if client account is empty. - ```mermaid stateDiagram-v2 [*] --> Active: Rail created, funds locked @@ -132,7 +139,7 @@ stateDiagram-v2 **Example**: -``` +```txt Rail: 1 USDFC/epoch, 2,880 epoch lockup period Terminated at epoch 10,000 Provider can settle through epoch 12,880 using the 2,880 USDFC lockup diff --git a/docs/src/content/docs/core-concepts/fwss-overview.mdx b/docs/src/content/docs/core-concepts/fwss-overview.mdx index 82f5093a1..18876200c 100644 --- a/docs/src/content/docs/core-concepts/fwss-overview.mdx +++ b/docs/src/content/docs/core-concepts/fwss-overview.mdx @@ -24,7 +24,7 @@ This service transforms Filecoin from an archival storage network into a data se Together, FWSS enables builders to depend on Filecoin not only for “store and forget,” but for “store and serve” in a verifiable, programmable manner. -#### **Core Responsibilities** +### Core Responsibilities WarmStorage manages the complete storage marketplace: @@ -72,7 +72,7 @@ interface PDPListener { } ``` -##### _`dataSetCreated()`_ +#### _`dataSetCreated()`_ When a new data set is created in `PDPVerifier`, this function is called to set up payment infrastructure and validate the client operation. @@ -83,7 +83,7 @@ When a new data set is created in `PDPVerifier`, this function is called to set 3. **Metadata storage**: Save data set metadata (category, project, etc.) 4. **Payment rail creation**: Establish automated payment channel -##### _`piecesAdded()`_ +#### _`piecesAdded()`_ When pieces are added to an existing data set, this function is called to store piece metadata and update payment rail configuration if needed. @@ -92,11 +92,11 @@ When pieces are added to an existing data set, this function is called to store - Payment rail creation deferred to first piece upload - Enables data sets without immediate storage -##### _`possessionProven()`_ +#### _`possessionProven()`_ When a storage provider successfully completes a proof verification, this function is called to record the successful proof completion. -##### _`nextProvingPeriod()`_ +#### _`nextProvingPeriod()`_ When a proof window expires without a valid proof (indicating a fault), this function is called to handle the failure and apply penalties. @@ -146,7 +146,7 @@ price: `14 USDFC per TiB of egress used` :::note **FWSS Lockup Model:** -FWSS uses a simple fixed lockup: you always prepay for 30 days of active service. +FWSS uses a simple fixed lockup: you always prepay for 30 days of active service. If your balance drops below this threshold, you risk default—at which point the service provider may begin removing your data. There is no variable lockup or complex collateral calculation: just keep your balance above 30 days to avoid disruption. ::: @@ -156,7 +156,7 @@ If your balance drops below this threshold, you risk default—at which point th FWSS enables the Filecoin Onchain Cloud to behave not just as a passive archive, but as a live data service: - Storage providers are continuously verified via PDP -- Clients gain access to retrievable, service‐ready data +- Clients gain access to retrievable, service-ready data - Payments are automated via onchain rails linked to proof events -By linking upload, proof, retrieval, and payment, FWSS delivers cloud-style storage with blockchain‐grade guarantees. +By linking upload, proof, retrieval, and payment, FWSS delivers cloud-style storage with blockchain-grade guarantees. diff --git a/docs/src/content/docs/core-concepts/pdp-overview.mdx b/docs/src/content/docs/core-concepts/pdp-overview.mdx index 3e8f4d748..2fd9cf436 100644 --- a/docs/src/content/docs/core-concepts/pdp-overview.mdx +++ b/docs/src/content/docs/core-concepts/pdp-overview.mdx @@ -64,7 +64,7 @@ sequenceDiagram end ``` -#### **Step-by-step Summary:** +### Step-by-step Summary 1. **Data Upload**: The Client uploads file in a data set to a PDP-enabled Service Provider which will stores the file, and add piece in the data set with the PDP contract onchain. diff --git a/docs/src/content/docs/developer-guides/components.mdx b/docs/src/content/docs/developer-guides/components.mdx index 8bd8779da..5414ed12d 100644 --- a/docs/src/content/docs/developer-guides/components.mdx +++ b/docs/src/content/docs/developer-guides/components.mdx @@ -71,7 +71,7 @@ The SDK is organized into three layers, each serving a specific purpose: **Purpose**: Main SDK entry point with simple, high-level API -**API Reference**: [Synapse API Reference](/api/filoz/synapse-sdk/synapse/classes/synapse/) +**API Reference**: [Synapse API Reference](/reference/filoz/synapse-sdk/synapse/classes/synapse/) **Synapse Interface**: @@ -84,7 +84,8 @@ import { FilecoinNetworkType, } from "@filoz/synapse-sdk"; import { ethers } from "ethers"; -import type { PaymentsService, StorageManager } from "@filoz/synapse-sdk"; +import type { PaymentsService } from "@filoz/synapse-sdk/payments"; +import type { StorageManager } from "@filoz/synapse-sdk/storage"; // ---cut--- interface SynapseAPI { // Create a new Synapse instance @@ -119,9 +120,9 @@ The PaymentsService provides direct access to the Filecoin Pay contract, enablin This is your primary interface for all payment-related operations in the SDK. -**API Reference**: [PaymentsService API Reference](/api/filoz/synapse-sdk/synapse/classes/paymentsservice/) +**API Reference**: [PaymentsService API Reference](/reference/filoz/synapse-sdk/payments/classes/paymentsservice/) -Check out the [Payment Operations](/developer-guides/payments) guide for more details. +Check out the [Payment Operations](/developer-guides/payments/payment-operations/) guide for more details. **Payments Service Interface**: @@ -130,10 +131,9 @@ import { RailInfo, TokenIdentifier, TokenAmount, - DepositOptions, - PaymentsService, SettlementResult, } from "@filoz/synapse-sdk"; +import type { PaymentsService, DepositOptions } from "@filoz/synapse-sdk/payments"; import { ethers } from "ethers"; import { TOKENS } from "@filoz/synapse-sdk"; interface AccountInfo { @@ -218,9 +218,9 @@ interface PaymentsServiceAPI { **Purpose**: High-level, auto-managed storage operations - upload and download data to and from the Filecoin Onchain Cloud. -**API Reference**: [StorageManager API Reference](/api/filoz/synapse-sdk/synapse/classes/storagemanager/) +**API Reference**: [StorageManager API Reference](/reference/filoz/synapse-sdk/storage/classes/storagemanager/) -Check out the [Storage Operations](/developer-guides/storage) guide for more details. +Check out the [Storage Operations](/developer-guides/storage/storage-operations/) guide for more details. **Storage Manager Interface**: @@ -232,8 +232,8 @@ import { EnhancedDataSetInfo, StorageServiceOptions, PreflightInfo, - StorageContext, } from "@filoz/synapse-sdk"; +import type { StorageContext } from "@filoz/synapse-sdk/storage"; import { ethers } from "ethers"; interface StorageManagerUploadOptions {} @@ -268,15 +268,15 @@ interface StorageManagerAPI { **Purpose**: Provider-specific storage operations - upload and download data to and from the Filecoin Onchain Cloud. -**API Reference**: [StorageContext API Reference](/api/filoz/synapse-sdk/synapse/classes/storagecontext/) +**API Reference**: [StorageContext API Reference](/reference/filoz/synapse-sdk/storage/classes/storagecontext/) -Check out the [Storage Context](/developer-guides/storage/storage-context) guide for more details. +Check out the [Storage Context](/developer-guides/storage/storage-context/) guide for more details. ### WarmStorageService **Purpose**: SDK client for storage coordination and pricing - storage pricing and cost calculations, data set management and queries, metadata operations (data sets and pieces), service provider approval management, contract address discovery, data set creation verification. -**API Reference**: [WarmStorageService API Reference](/api/filoz/synapse-sdk/synapse/classes/warmstorageservice/) +**API Reference**: [WarmStorageService API Reference](/reference/filoz/synapse-sdk/warmstorage/classes/warmstorageservice/) **WarmStorageService Interface**: @@ -284,115 +284,115 @@ Check out the [Storage Context](/developer-guides/storage/storage-context) guide import { DataSetInfo, EnhancedDataSetInfo, - WarmStorageService, -} from "@filoz/synapse-sdk"; -import { ethers } from "ethers"; -import type { PaymentsService } from "@filoz/synapse-sdk"; +} from "@filoz/synapse-sdk" +import { WarmStorageService } from '@filoz/synapse-sdk/warm-storage' +import { ethers } from "ethers" +import type { PaymentsService } from "@filoz/synapse-sdk/payments" type ServicePriceInfo = { - pricePerTiBPerMonthNoCDN: bigint; - pricePerTiBCdnEgress: bigint; - pricePerTiBCacheMissEgress: bigint; - tokenAddress: string; - epochsPerMonth: bigint; - minimumPricePerMonth: bigint; -}; + pricePerTiBPerMonthNoCDN: bigint + pricePerTiBCdnEgress: bigint + pricePerTiBCacheMissEgress: bigint + tokenAddress: string + epochsPerMonth: bigint + minimumPricePerMonth: bigint +} type DataSetCreationVerification = { - transactionMined: boolean; - transactionSuccess: boolean; - dataSetId?: number; - dataSetLive: boolean; - blockNumber?: number; - gasUsed?: bigint; - error?: string; -}; + transactionMined: boolean + transactionSuccess: boolean + dataSetId?: number + dataSetLive: boolean + blockNumber?: number + gasUsed?: bigint + error?: string +} type CheckAllowanceForStorageResult = { - rateAllowanceNeeded: bigint; - lockupAllowanceNeeded: bigint; - currentRateAllowance: bigint; - currentLockupAllowance: bigint; - currentRateUsed: bigint; - currentLockupUsed: bigint; - sufficient: boolean; - message?: string; + rateAllowanceNeeded: bigint + lockupAllowanceNeeded: bigint + currentRateAllowance: bigint + currentLockupAllowance: bigint + currentRateUsed: bigint + currentLockupUsed: bigint + sufficient: boolean + message?: string costs: { - perEpoch: bigint; - perDay: bigint; - perMonth: bigint; - }; - depositAmountNeeded: bigint; -}; -type Transaction = Promise; + perEpoch: bigint + perDay: bigint + perMonth: bigint + } + depositAmountNeeded: bigint +} +type Transaction = Promise type CalculateStorageCostResult = { - perEpoch: bigint; - perDay: bigint; - perMonth: bigint; + perEpoch: bigint + perDay: bigint + perMonth: bigint withCDN: { - perEpoch: bigint; - perDay: bigint; - perMonth: bigint; - }; -}; + perEpoch: bigint + perDay: bigint + perMonth: bigint + } +} // ---cut--- interface WarmStorageServiceAPI { // Factory Method create( provider: ethers.Provider, warmStorageAddress: string - ): Promise; + ): Promise // Data Set Queries - getDataSet(dataSetId: number): Promise; - getClientDataSets(clientAddress: string): Promise; + getDataSet(dataSetId: number): Promise + getClientDataSets(clientAddress: string): Promise getClientDataSetsWithDetails( client: string, onlyManaged?: boolean - ): Promise; + ): Promise // Metadata Operations - getDataSetMetadata(dataSetId: number): Promise>; + getDataSetMetadata(dataSetId: number): Promise> getDataSetMetadataByKey( dataSetId: number, key: string - ): Promise; + ): Promise getPieceMetadata( dataSetId: number, pieceId: number - ): Promise>; + ): Promise> getPieceMetadataByKey( dataSetId: number, pieceId: number, key: string - ): Promise; + ): Promise // Pricing & Cost Calculations - getServicePrice(): Promise; + getServicePrice(): Promise calculateStorageCost( sizeInBytes: number - ): Promise; + ): Promise checkAllowanceForStorage( sizeInBytes: number, withCDN: boolean, paymentsService: PaymentsService, lockupDays?: number - ): Promise; + ): Promise // Data Set Management - terminateDataSet(signer: ethers.Signer, dataSetId: number): Transaction; + terminateDataSet(signer: ethers.Signer, dataSetId: number): Transaction topUpCDNPaymentRails( signer: ethers.Signer, dataSetId: number, cdnAmountToAdd: bigint, cacheMissAmountToAdd: bigint - ): Transaction; + ): Transaction - getApprovedProviderIds(): Promise; - isProviderIdApproved(providerId: number): Promise; + getApprovedProviderIds(): Promise + isProviderIdApproved(providerId: number): Promise // Proving Period - getMaxProvingPeriod(): Promise; - getChallengeWindow(): Promise; + getMaxProvingPeriod(): Promise + getChallengeWindow(): Promise } ``` @@ -402,12 +402,12 @@ interface WarmStorageServiceAPI { **Purpose**: Client for PDPVerifier contract - get dataset and piece status, create data sets and add pieces. -**API Reference**: [PDPVerifier API Reference](/api/filoz/synapse-sdk/synapse/classes/pdpverifier/) +**API Reference**: [PDPVerifier API Reference](/reference/filoz/synapse-sdk/pdp/classes/pdpverifier/) **PDPVerifier Example**: ```ts twoslash -import { PDPVerifier } from "@filoz/synapse-sdk"; +import { PDPVerifier } from "@filoz/synapse-sdk/pdp"; import { ethers } from "ethers"; const provider = null as unknown as ethers.Provider; const pdpVerifierAddress = null as unknown as string; @@ -438,16 +438,16 @@ const extractedId = await pdpVerifier.extractDataSetIdFromReceipt(transactionRec This component is used internally by the SDK but exposed for developers who need explicit control over signature generation and authentication flows. ::: -**API Reference**: [PDPAuthHelper API Reference](/api/filoz/synapse-sdk/synapse/classes/pdpauthhelper/) +**API Reference**: [PDPAuthHelper API Reference](/reference/filoz/synapse-sdk/pdp/classes/pdpauthhelper/) **PDPAuthHelper Example**: ```ts twoslash import { - PDPAuthHelper, type MetadataEntry, type PieceCID, } from "@filoz/synapse-sdk"; +import { PDPAuthHelper } from "@filoz/synapse-sdk/pdp"; import { ethers } from "ethers"; const warmStorageAddress = null as unknown as string; const signer = null as unknown as ethers.Signer; @@ -498,18 +498,17 @@ const deleteDataSetSig = await authHelper.signDeleteDataSet(clientDataSetId); This component is used internally by the SDK but exposed for developers who need explicit control over provider communication and low-level storage operations. ::: -**API Reference**: [PDPServer API Reference](/api/filoz/synapse-sdk/synapse/classes/pdpserver/) +**API Reference**: [PDPServer API Reference](/reference/filoz/synapse-sdk/pdp/classes/pdpserver/) **PDPServer Example**: ```ts twoslash import { - PDPServer, - PDPAuthHelper, PieceCID, MetadataEntry, DataSetData, } from "@filoz/synapse-sdk"; +import { PDPServer, PDPAuthHelper } from "@filoz/synapse-sdk/pdp"; import { ethers } from "ethers"; const warmStorageAddress = null as unknown as string; const signer = null as unknown as ethers.Signer; @@ -518,7 +517,7 @@ const clientDataSetId = null as unknown as bigint; const payee = null as unknown as string; const dataSetId = null as unknown as number; const pieceDataArray = null as unknown as PieceCID[] | string[]; -const data = null as unknown as Uint8Array | ArrayBuffer; +const data = null as unknown as Uint8Array | AsyncIterable | ReadableStream; const datasetMetadata = null as unknown as MetadataEntry[]; const piecesMetadata = null as unknown as MetadataEntry[][]; const payer = null as unknown as string; @@ -575,53 +574,7 @@ const dataSet = await pdpServer.getDataSet(dataSetId); console.log(`Dataset ${dataSet.id}: ${dataSet.pieces.length} pieces`); ``` -### PieceCID Utilities - -**Purpose**: Calculate PieceCIDs, convert between formats, and get the size of a specific piece. - -**API Reference**: [PieceCID Utilities API Reference](/api/filoz/synapse-sdk/piece/readme) - -**PieceCID Utilities Example**: - -```typescript -import { - calculate, - asPieceCID, - asLegacyPieceCID, - createPieceCIDStream, - getSizeFromPieceCID, -} from "@filoz/synapse-sdk/piece"; - -// Calculate PieceCID from data -const data = new Uint8Array([1, 2, 3, 4]); -const pieceCid = calculate(data); -console.log(pieceCid.toString()); // bafkzcib... - -// Validate and convert PieceCID string -const converted = asPieceCID( - "bafkzcibcd4bdomn3tgwgrh3g532zopskstnbrd2n3sxfqbze7rxt7vqn7veigmy" -); -if (converted !== null) { - console.log("Valid PieceCID:", converted.toString()); -} - -// Extract size from PieceCID -const size = getSizeFromPieceCID(pieceCid); -console.log(`Piece size: ${size} bytes`); - -// Stream-based PieceCID calculation (Web Streams API compatible) -const { stream, getPieceCID } = createPieceCIDStream(); -// Pipe data through stream, then call getPieceCID() for result - -// Convert to LegacyPieceCID for compatibility with external Filecoin services -const legacyPieceCid = asLegacyPieceCID(convertedPieceCid); -if (legacyPieceCid !== null) { - console.log("Valid LegacyPieceCID:", legacyPieceCid.toString()); - // Valid LegacyPieceCID: baga6ea4seaqdomn3tgwgrh3g532zopskstnbrd2n3sxfqbze7rxt7vqn7veigmy -} -``` - -### Complete Data Flow +## Complete Data Flow This sequence diagram shows the complete lifecycle of a file upload operation, from initialization through verification. Each step represents an actual blockchain transaction or API call. @@ -678,18 +631,18 @@ sequenceDiagram Choose your learning path based on your immediate needs: -#### Ready to Build? +### Ready to Build? Jump straight to code with the [**Getting Started Guide →**](/getting-started/) -- [**Storage Operations →**](/developer-guides/storage) - Upload and download your first file -- [**Storage Context →**](/developer-guides/storage/storage-context) - Advanced storage operations and batch uploads -- [**Payment Operations →**](/developer-guides/payments) - Fund your account and manage payments -- [**Rails & Settlement →**](/developer-guides/payments/rails-settlement) - Payment mechanics and settlement strategies +- [**Storage Operations →**](/developer-guides/storage/storage-operations/) - Upload and download your first file +- [**Storage Context →**](/developer-guides/storage/storage-context/) - Advanced storage operations and batch uploads +- [**Payment Operations →**](/developer-guides/payments/payment-operations/) - Fund your account and manage payments +- [**Rails & Settlement →**](/developer-guides/payments/rails-settlement/) - Payment mechanics and settlement strategies -#### Want to Learn More? +### Want to Learn More? - [**Architecture →**](/core-concepts/architecture/) - Understanding how all components work together -- [**PDP Overview →**](/core-concepts/pdp-overview) - Proof verification and data integrity -- [**Filecoin Pay →**](/core-concepts/filecoin-pay-overview) - Payment rails and lockup mechanisms -- [**Warm Storage Service →**](/core-concepts/fwss-overview) - Storage coordination and pricing model +- [**PDP Overview →**](/core-concepts/pdp-overview/) - Proof verification and data integrity +- [**Filecoin Pay →**](/core-concepts/filecoin-pay-overview/) - Payment rails and lockup mechanisms +- [**Warm Storage Service →**](/core-concepts/fwss-overview/) - Storage coordination and pricing model diff --git a/docs/src/content/docs/guides/migration-guide.mdx b/docs/src/content/docs/developer-guides/migration-guide.md similarity index 88% rename from docs/src/content/docs/guides/migration-guide.mdx rename to docs/src/content/docs/developer-guides/migration-guide.md index a4997a0d4..1fd7d40e3 100644 --- a/docs/src/content/docs/guides/migration-guide.mdx +++ b/docs/src/content/docs/developer-guides/migration-guide.md @@ -1,10 +1,43 @@ --- -title: Migration guide +title: Migration Guide description: Learn how to migrate to newer versions of the SDK. sidebar: order: 0 --- +If you are coming from an earlier version of any of the Synapse packages, you will need to make sure to update the APIs listed below. + +--- + +## `@filoz/synapse-sdk` 0.37.0 + + + +The main entrypoint `@filoz/synapse-sdk` no longer export all the other modules, from this version onwards it will only export the `Synapse` class, constants and types. Check [reference](/reference/filoz/synapse-sdk/synapse/toc/) for the current exports. + +### Action: Change `import` statements + +```ts +// before +import { + PaymentService, + PDPAuthHelper, + PDPServer, + PDPVerifier, + SessionKey, + StorageContext, + StorageManager, + WarmStorageService +} from '@filoz/synapse-sdk' + +// after +import { PaymentService } from '@filoz/synapse-sdk/payments' +import { PDPAuthHelper, PDPServer, PDPVerifier } from '@filoz/synapse-sdk/pdp' +import { SessionKey } from '@filoz/synapse-sdk/session' +import { StorageContext, StorageManager } from '@filoz/synapse-sdk/manager' +import { WarmStorageService } from '@filoz/synapse-sdk/warm-storage' + +``` ## 0.24.0+ @@ -37,7 +70,7 @@ import { WarmStorageService } from '@filoz/synapse-sdk/warm-storage' #### Type Name Changes | Old Type (< v0.24.0) | New Type (v0.24.0+) | -|----------------------|---------------------| +| ---------------------- | --------------------- | | `ProofSetId` | `DataSetId` | | `RootData` | `PieceData` | | `ProofSetInfo` | `DataSetInfo` | diff --git a/docs/src/content/docs/developer-guides/payments/_meta.yml b/docs/src/content/docs/developer-guides/payments/_meta.yml new file mode 100644 index 000000000..aa06d6c7b --- /dev/null +++ b/docs/src/content/docs/developer-guides/payments/_meta.yml @@ -0,0 +1,3 @@ +label: Payments +collapsed: true +order: 4 \ No newline at end of file diff --git a/docs/src/content/docs/developer-guides/payments/index.mdx b/docs/src/content/docs/developer-guides/payments/payment-operations.mdx similarity index 96% rename from docs/src/content/docs/developer-guides/payments/index.mdx rename to docs/src/content/docs/developer-guides/payments/payment-operations.mdx index 4d0ab3a01..fe87fc1a8 100644 --- a/docs/src/content/docs/developer-guides/payments/index.mdx +++ b/docs/src/content/docs/developer-guides/payments/payment-operations.mdx @@ -7,11 +7,11 @@ sidebar: The Synapse SDK uses [**USDFC**](https://app.usdfc.net/) (a Filecoin-native stablecoin) for storage payments. Before uploading files, you must fund your account and approve operators. -This guide covers the essential operations. For advanced topics (understanding rails, settlement strategies), see [Rails & Settlement](/developer-guides/payments/rails-settlement). +This guide covers the essential operations. For advanced topics (understanding rails, settlement strategies), see [Rails & Settlement](/developer-guides/payments/rails-settlement/). Before working with payments, familiarize yourself with these fundamental concepts: -### Key Concepts +## Key Concepts - **USDFC Token**: Stablecoin on Filecoin which is used for all storage payments. The protocol requires USDFC approval before operations. - **Payment Rails**: Continuous payment streams created automatically when you upload files. Rate is fixed per epoch. @@ -173,6 +173,6 @@ console.log("Max lockup period:", approval.maxLockupPeriod); ## Next Steps -- [Understanding Rails & Settlement](/developer-guides/payments/rails-settlement) - Deep dive into payment mechanics -- [Storage Costs & Budgeting](/developer-guides/storage/storage-costs) - Plan your storage budget -- **Ready to upload files?** You now have the basics. [Start uploading →](/developer-guides/storage/) +- [Understanding Rails & Settlement](/developer-guides/payments/rails-settlement/) - Deep dive into payment mechanics +- [Storage Costs & Budgeting](/developer-guides/storage/storage-costs/) - Plan your storage budget +- **Ready to upload files?** You now have the basics. [Start uploading →](/developer-guides/storage/storage-operations/) diff --git a/docs/src/content/docs/developer-guides/payments/rails-settlement.mdx b/docs/src/content/docs/developer-guides/payments/rails-settlement.mdx index 00229eb3a..0578de456 100644 --- a/docs/src/content/docs/developer-guides/payments/rails-settlement.mdx +++ b/docs/src/content/docs/developer-guides/payments/rails-settlement.mdx @@ -130,27 +130,6 @@ Settlement is the process of executing the accumulated payments in a rail. Until - **Flexibility**: Allows validators to adjust payments if needed - **Finality**: Makes funds available for withdrawal -### Settlement Fee - -:::warning[FIL Required for Settlement] -**Every settlement operation requires 0.0013 FIL** in your wallet (separate from USDFC deposits). This fee is burned to the Filecoin network and is **NOT** returned. Ensure you have sufficient FIL balance before attempting settlement operations. -::: - -Settlement operations require a network fee that is burned (permanently removed from circulation), effectively paying the Filecoin network for providing the settlement service: - -- **Amount**: 0.0013 FIL (defined as `SETTLEMENT_FEE` constant) -- **Mechanism**: The fee is burned to Filecoin's burn actor, `f099` (also known as address `0xff00000000000000000000000000000000000063`), reducing FIL supply -- **Purpose**: This burn mechanism compensates the network for processing and securing payment settlements -- **Automatic**: The SDK automatically includes this fee when calling settlement methods - -```ts twoslash -import { ethers } from "ethers"; -// ---cut--- -import { SETTLEMENT_FEE } from "@filoz/synapse-sdk"; -console.log(`Settlement fee: ${ethers.formatEther(SETTLEMENT_FEE)} FIL`); -// This fee is burned to the network, not paid to any party -``` - ### Performing Settlement #### Automatic Settlement (Recommended) @@ -374,6 +353,6 @@ try { ## Next Steps -- Learn about [Service Approvals](/developer-guides/payments/) for managing operator permissions -- Explore [Storage Management](/developer-guides/storage/) which creates payment rails automatically -- See [Payment Operations](/developer-guides/payments/) for comprehensive payment management guide +- Learn about [Service Approvals](/developer-guides/payments/payment-operations/#approving-operators) for managing operator permissions +- Explore [Storage Management](/developer-guides/storage/storage-operations/) which creates payment rails automatically +- See [Payment Operations](/developer-guides/payments/payment-operations/) for comprehensive payment management guide diff --git a/docs/src/content/docs/developer-guides/react-integration.mdx b/docs/src/content/docs/developer-guides/react-integration.mdx index 82b51df80..914fda016 100644 --- a/docs/src/content/docs/developer-guides/react-integration.mdx +++ b/docs/src/content/docs/developer-guides/react-integration.mdx @@ -116,13 +116,13 @@ const SimpleMetaMaskExample = () => { ## Next Steps -#### Ready to Build? +### Ready to Build? Jump straight to code with the [**Getting Started Guide →**](/getting-started/) -- [**Storage Operations →**](/developer-guides/storage) - Upload and download your first file -- [**Storage Context →**](/developer-guides/storage/storage-context) - Advanced storage operations and batch uploads -- [**Payment Operations →**](/developer-guides/payments) - Fund your account and manage payments -- [**Rails & Settlement →**](/developer-guides/payments/rails-settlement) - Payment mechanics and settlement strategies +- [**Storage Operations →**](/developer-guides/storage/storage-operations/) - Upload and download your first file +- [**Storage Context →**](/developer-guides/storage/storage-context/) - Advanced storage operations and batch uploads +- [**Payment Operations →**](/developer-guides/payments/payment-operations/) - Fund your account and manage payments +- [**Rails & Settlement →**](/developer-guides/payments/rails-settlement/) - Payment mechanics and settlement strategies **Resources**: [Wagmi](https://wagmi.sh) · [Ethers v6](https://docs.ethers.org/v6/) · [React](https://react.dev/learn) · [Filecoin](https://docs.filecoin.io) diff --git a/docs/src/content/docs/developer-guides/storage/_meta.yml b/docs/src/content/docs/developer-guides/storage/_meta.yml new file mode 100644 index 000000000..bfed27597 --- /dev/null +++ b/docs/src/content/docs/developer-guides/storage/_meta.yml @@ -0,0 +1,3 @@ +label: Storage +collapsed: true +order: 5 diff --git a/docs/src/content/docs/developer-guides/storage/storage-context.mdx b/docs/src/content/docs/developer-guides/storage/storage-context.mdx index 6c194b12b..41bfb1e5c 100644 --- a/docs/src/content/docs/developer-guides/storage/storage-context.mdx +++ b/docs/src/content/docs/developer-guides/storage/storage-context.mdx @@ -10,13 +10,13 @@ This guide is for developers who need fine-grained control over storage operatio You'll learn about explicit provider selection, batch uploads, lifecycle management, and download strategies. **Audience**: Experienced developers building production applications -**Prerequisites**: Complete the [Storage Operations Guide](./index) first +**Prerequisites**: Complete the [Storage Operations Guide](/developer-guides/storage/storage-operations/) first **When to use this**: Batch operations, custom callbacks, specific provider requirements, advanced error handling ::: ## Storage Context Overview -A Storage Context represents a connection to a specific storage provider and data set. Unlike the auto-managed approach in the [Storage Operations Guide](./index), contexts give you explicit control over these key capabilities: +A Storage Context represents a connection to a specific storage provider and data set. Unlike the auto-managed approach in the [Storage Operations Guide](/developer-guides/storage/storage-operations/), contexts give you explicit control over these key capabilities: - **Provider Selection**: Choose specific providers for your data - **Data Set Management**: Create, reuse, and manage data sets explicitly @@ -24,7 +24,7 @@ A Storage Context represents a connection to a specific storage provider and dat - **Lifecycle Control**: Terminate data sets and delete pieces when needed - **Download Strategies**: Choose between SP-agnostic and SP-specific retrieval -This guide assumes you've already completed the [Storage Operations Guide](./index) and understand the basics of uploading and downloading data. +This guide assumes you've already completed the [Storage Operations Guide](/developer-guides/storage/storage-operations/) and understand the basics of uploading and downloading data. ### Creating a Storage Context @@ -137,6 +137,7 @@ The SDK intelligently manages data sets to minimize on-chain transactions. The s ```ts twoslash import { PieceCID, + PieceRecord, UploadResult, ProviderInfo, PreflightInfo, @@ -145,20 +146,18 @@ import { import { ethers } from "ethers"; type Transaction = Promise; type Hex = `0x${string}`; -/** - * Callbacks for tracking upload progress - * - * These callbacks provide visibility into the upload process stages: - * 1. Upload completion (piece uploaded to provider) - * 2. Piece addition (transaction submitted to chain) - * 3. Confirmation (transaction confirmed on-chain) - */ export interface UploadCallbacks { + /** Called periodically during upload with bytes uploaded so far */ + onProgress?: (bytesUploaded: number) => void; /** Called when upload to service provider completes */ onUploadComplete?: (pieceCid: PieceCID) => void; - /** Called when the service provider has added the piece and submitted the transaction to the chain */ + /** Called when the service provider has added the piece(s) and submitted the transaction to the chain */ + onPiecesAdded?: (transaction?: Hex, pieces?: { pieceCid: PieceCID }[]) => void; + /** @deprecated Use onPiecesAdded instead */ onPieceAdded?: (transaction?: Hex) => void; - /** Called when the service provider agrees that the piece addition is confirmed on-chain */ + /** Called when the service provider agrees that the piece addition(s) are confirmed on-chain */ + onPiecesConfirmed?: (dataSetId: number, pieces: PieceRecord[]) => void; + /** @deprecated Use onPiecesConfirmed instead */ onPieceConfirmed?: (pieceIds: number[]) => void; } @@ -166,14 +165,14 @@ export interface UploadCallbacks { * Options for uploading individual pieces to an existing storage context * @param metadata - Custom metadata for this specific piece (key-value pairs) * @param onUploadComplete - Called when upload to service provider completes - * @param onPieceAdded - Called when the service provider has added the piece and submitted the transaction to the chain - * @param onPieceConfirmed - Called when the service provider agrees that the piece addition is confirmed on-chain + * @param onPiecesAdded - Called when the service provider has added the piece(s) and submitted the transaction to the chain + * @param onPiecesConfirmed - Called when the service provider agrees that the piece addition(s) are confirmed on-chain and provides the dataSetId */ type UploadOptions = { metadata?: Record; onUploadComplete?: (pieceCid: PieceCID) => void; - onPieceAdded?: (transaction?: Hex) => void; - onPieceConfirmed?: (pieceIds: number[]) => void; + onPiecesAdded?: (transaction?: Hex, pieces?: { pieceCid: PieceCID }[]) => void; + onPiecesConfirmed?: (dataSetId: number, pieces: PieceRecord[]) => void; }; // ---cut--- interface StorageContextAPI { @@ -241,13 +240,23 @@ const { pieceCid, size, pieceId } = await storageContext.upload(data, { `Uploaded PieceCID: ${piece.toV1().toString()} to storage provider!` ); }, - onPieceAdded: (hash) => { + onPiecesAdded: (hash, pieces) => { console.log( `🔄 Waiting for transaction to be confirmed on chain (txHash: ${hash})` ); + console.log( + `Batch includes PieceCIDs: ${ + pieces?.map(({ pieceCid }) => pieceCid.toString()).join(", ") ?? "" + }` + ); }, - onPieceConfirmed: () => { - console.log("Data pieces added to data set successfully"); + onPiecesConfirmed: (dataSetId, pieces) => { + console.log(`Data set ${dataSetId} confirmed with provider`); + console.log( + `Piece ID mapping: ${pieces + .map(({ pieceId, pieceCid }) => `${pieceId}:${pieceCid}`) + .join(", ")}` + ); }, }); @@ -266,7 +275,7 @@ console.log(`Data set last proven: ${status.dataSetLastProven}`); console.log(`Data set next proof due: ${status.dataSetNextProofDue}`); ``` -##### Efficient Batch Uploads +#### Efficient Batch Uploads When uploading multiple files, the SDK automatically batches operations for efficiency. Due to blockchain transaction ordering requirements, uploads are processed sequentially. To maximize efficiency: The SDK batches up to 32 uploads by default (configurable via `uploadBatchSize`). If you have more than 32 files, they'll be processed in multiple batches automatically. @@ -409,18 +418,18 @@ Note: When `withCDN: true` is set, it adds `{ withCDN: '' }` to the data set's m Now that you understand Storage Context and advanced operations: -- **[Calculate Storage Costs →](./storage-costs)** +- **[Calculate Storage Costs →](/developer-guides/storage/storage-costs/)** Plan your budget and fund your storage account. _Use the quick calculator to estimate monthly costs._ -- **[Storage Operations Basics →](./index)** +- **[Storage Operations Basics →](/developer-guides/storage/storage-operations/)** Review fundamental storage concepts and auto-managed operations. _Good for a refresher on the simpler approach._ -- **[Component Architecture →](/developer-guides/components)** +- **[Component Architecture →](/developer-guides/components/)** Understand how StorageContext fits into the SDK design. _Deep dive into the component architecture._ -- **[Payment Management →](/developer-guides/payments/)** +- **[Payment Management →](/developer-guides/payments/payment-operations/)** Manage deposits, approvals, and payment rails. _Required before your first upload._ diff --git a/docs/src/content/docs/developer-guides/storage/storage-costs.mdx b/docs/src/content/docs/developer-guides/storage/storage-costs.mdx index cb142a3cc..4fde526e5 100644 --- a/docs/src/content/docs/developer-guides/storage/storage-costs.mdx +++ b/docs/src/content/docs/developer-guides/storage/storage-costs.mdx @@ -17,11 +17,11 @@ An **epoch** is Filecoin's block time, which is 30 seconds. Storage costs are ca ### Pricing Components -| Component | Cost | Notes | +| Component | Cost | Notes | | -------------- | ------------------ | -------------------------------------------------------- | -| **Storage** | $2.50/TiB/month | Minimum $0.06/month per data set (~24.567 GiB threshold) | -| **CDN Egress** | $14/TiB downloaded | 1 USDFC top-up ≈ 71.5 GiB of downloads | -| **CDN Setup** | 1 USDFC (one-time) | Per data set; reusing existing data sets incurs no cost | +| **Storage** | $2.50/TiB/month | Minimum $0.06/month per data set (~24.567 GiB threshold) | +| **CDN Egress** | $14/TiB downloaded | 1 USDFC top-up ≈ 71.5 GiB of downloads | +| **CDN Setup** | 1 USDFC (one-time) | Per data set; reusing existing data sets incurs no cost | :::note[Be aware] Per month means **per 30 days** here, not calendar month like a traditional storage service. @@ -35,7 +35,7 @@ Per month means **per 30 days** here, not calendar month like a traditional stor ### Real-World Cost Examples -**Example 1: NFT Collection (10,000 × 5 KiB ≈ 48.82 MiB)** +#### Example 1: NFT Collection (10,000 × 5 KiB ≈ 48.82 MiB) ```ts twoslash // 48.82 MiB less than 24.567 GiB threshold @@ -52,7 +52,7 @@ const PRICE_FOR_24_MONTHS = PRICE_PER_MONTH * 24; // 1.44 USDFC --- -**Example 2: User Content Platform with CDN** +#### Example 2: User Content Platform with CDN - **Storage:** 1,000 users × 100 MiB ≈ 100,000 MiB - **Traffic:** 1,000 users × 100 MiB/month ≈ 100,000 MiB/month egress @@ -82,11 +82,11 @@ const totalCostPerMonth = storageCostPerMonth + egressCostPerMonth; const totalCostFor24Months = totalCostPerMonth * 24; ``` -| Cost Component | Per Month | 24 Months | +| Cost Component | Per Month | 24 Months | | -------------- | ---------------- | ----------------- | -| Storage | ≈ 0.238 USDFC | ≈ 5.71 USDFC | -| CDN Egress | ≈ 1.334 USDFC | ≈ 32.016 USDFC | -| **Total** | **≈ 1.572 USDFC** | **≈ 37.728 USDFC** | +| Storage | ≈ 0.238 USDFC | ≈ 5.71 USDFC | +| CDN Egress | ≈ 1.334 USDFC | ≈ 32.016 USDFC | +| **Total** | **≈ 1.572 USDFC** | **≈ 37.728 USDFC** | ## Warm Storage Service Approvals @@ -116,37 +116,37 @@ Get pricing information and calculate the cost per epoch for your storage capaci ```ts twoslash import { Synapse, - WarmStorageService, SIZE_CONSTANTS, -} from "@filoz/synapse-sdk"; -const synapse = await Synapse.create({}); +} from "@filoz/synapse-sdk" +import { WarmStorageService } from '@filoz/synapse-sdk/warm-storage' +const synapse = await Synapse.create({}) // ---cut--- // Get pricing structure -const warmStorageAddress = await synapse.getWarmStorageAddress(); +const warmStorageAddress = await synapse.getWarmStorageAddress() const warmStorageService = await WarmStorageService.create( synapse.getProvider(), warmStorageAddress -); +) const { minimumPricePerMonth, epochsPerMonth, pricePerTiBPerMonthNoCDN } = - await warmStorageService.getServicePrice(); + await warmStorageService.getServicePrice() // Calculate base cost per month -const bytesToStore = SIZE_CONSTANTS.GiB; // 1 GiB +const bytesToStore = SIZE_CONSTANTS.GiB // 1 GiB let pricePerMonth = (pricePerTiBPerMonthNoCDN * BigInt(bytesToStore)) / - BigInt(SIZE_CONSTANTS.TiB); + BigInt(SIZE_CONSTANTS.TiB) // Apply minimum pricing if needed if (pricePerMonth < minimumPricePerMonth) { - pricePerMonth = minimumPricePerMonth; + pricePerMonth = minimumPricePerMonth } // Calculate per-epoch cost -const pricePerEpoch = pricePerMonth / epochsPerMonth; +const pricePerEpoch = pricePerMonth / epochsPerMonth -console.log("Monthly cost:", pricePerMonth); -console.log("Per-epoch cost:", pricePerEpoch); +console.log("Monthly cost:", pricePerMonth) +console.log("Per-epoch cost:", pricePerEpoch) ``` ### Step 2: Calculate Required Allowances @@ -158,6 +158,7 @@ Allowances are **cumulative** - you must add your new storage requirements to an - **Rate Allowance**: Total per-epoch spending across all datasets - **Lockup Allowance**: Total 30-day buffer across all datasets + ::: ```ts twoslash @@ -266,5 +267,5 @@ console.log("Account funded successfully"); ## Next Steps -- [Storage Operations](/developer-guides/storage/) - Storage concepts and workflows +- [Storage Operations](/developer-guides/storage/storage-operations/) - Storage concepts and workflows - [Storage Context](/developer-guides/storage/storage-context/) - Contexts and data sets diff --git a/docs/src/content/docs/developer-guides/storage/index.mdx b/docs/src/content/docs/developer-guides/storage/storage-operations.mdx similarity index 91% rename from docs/src/content/docs/developer-guides/storage/index.mdx rename to docs/src/content/docs/developer-guides/storage/storage-operations.mdx index ecfd8ac27..628c4dffd 100644 --- a/docs/src/content/docs/developer-guides/storage/index.mdx +++ b/docs/src/content/docs/developer-guides/storage/storage-operations.mdx @@ -16,7 +16,7 @@ You'll learn the fundamentals of uploading data, managing data sets, and retriev This guide explains the core storage concepts and provides examples of how to use the Synapse SDK to store, retrieve, and manage data on Filecoin On-Chain Cloud. -### Key Concepts +## Key Concepts **Data Set**: A logical container of pieces stored with one provider. When a data set is created, a payment rail is established with that provider. All pieces in the data set share this single payment rail and are verified together via PDP proofs. @@ -31,7 +31,7 @@ This guide explains the core storage concepts and provides examples of how to us **Storage Context**: A connection to a specific storage provider and data set. Created explicitly for fine-grained control or automatically by StorageManager. Enables uploads and downloads with the specific storage provider. -### Storage Approaches +## Storage Approaches The SDK offers two ways to work with storage operations: @@ -42,7 +42,7 @@ The SDK offers two ways to work with storage operations: **Recommendation**: Start with auto-managed, then explore explicit control only if needed. -**Example 1: Quick Start (Auto-Managed)** +### Example 1: Quick Start (Auto-Managed) Upload and download data with zero configuration - SDK automatically selects a provider and manages the data set: @@ -59,7 +59,7 @@ console.log("Uploaded:", result.pieceCid); console.log("Downloaded:", downloaded.length, "bytes"); ``` -**Example 2: With Metadata (Auto-Managed)** +### Example 2: With Metadata (Auto-Managed) Add metadata to organize uploads and enable faster data set reuse - SDK will reuse any existing data set matching the metadata: @@ -83,7 +83,7 @@ console.log("Uploaded:", result.pieceCid); :::tip[Need More Control?] The examples above use auto-managed storage where the SDK handles provider selection and data set creation. -**For advanced use cases** like batch uploads, custom callbacks, or explicit provider selection, see the [Storage Context Guide](./storage-context). +**For advanced use cases** like batch uploads, custom callbacks, or explicit provider selection, see the [Storage Context Guide](/developer-guides/storage/storage-context/). ::: ## Data Set Management @@ -144,7 +144,9 @@ console.log(`Found ${pieces.length} pieces`); Calculate total storage size by summing piece sizes extracted from PieceCIDs: ```ts twoslash -import { Synapse, PDPVerifier } from "@filoz/synapse-sdk"; +import { Synapse } from "@filoz/synapse-sdk"; +import { PDPVerifier } from "@filoz/synapse-sdk/pdp"; + const synapse = await Synapse.create({}); const dataSetId = null as unknown as number; // ---cut--- @@ -163,7 +165,8 @@ console.log(`Data set size: ${sizeInBytes} bytes`); Access custom metadata attached to individual pieces for organization and filtering: ```ts twoslash -import { Synapse, WarmStorageService } from "@filoz/synapse-sdk"; +import { Synapse } from "@filoz/synapse-sdk"; +import { WarmStorageService } from "@filoz/synapse-sdk/warm-storage"; const synapse = await Synapse.create({}); const dataSetId = null as unknown as number; const piece = null as unknown as { pieceCid: string; pieceId: number }; @@ -177,7 +180,7 @@ const metadata = await warmStorage.getPieceMetadata(dataSetId, piece.pieceId); console.log("Piece metadata:", metadata); ``` -### Getting the size of a specific piece +## Getting the size of a specific piece Calculate size of a specific piece by extracting the size from the PieceCID: @@ -192,7 +195,7 @@ const size = getSizeFromPieceCID(pieceCid); console.log(`Piece size: ${size} bytes`); ``` -### Storage Information +## Storage Information Query service-wide pricing, available providers, and network parameters: @@ -213,14 +216,14 @@ console.log("PDP URL:", providerInfo.products.PDP!.data.serviceURL); **Ready to explore more?** Here's your learning path: -- **[Advanced Operations →](./storage-context)** +- **[Advanced Operations →](/developer-guides/storage/storage-context/)** Learn about batch uploads, lifecycle management, and download strategies. _For developers building production applications with specific provider requirements._ -- **[Plan Storage Costs →](./storage-costs)** +- **[Plan Storage Costs →](/developer-guides/storage/storage-costs/)** Calculate your monthly costs and understand funding requirements. _Use the quick calculator to estimate costs in under 5 minutes._ -- **[Payment Management →](/developer-guides/payments/)** +- **[Payment Management →](/developer-guides/payments/payment-operations/)** Manage deposits, approvals, and payment rails. _Required before your first upload._ diff --git a/docs/src/content/docs/developer-guides/index.mdx b/docs/src/content/docs/developer-guides/synapse.md similarity index 80% rename from docs/src/content/docs/developer-guides/index.mdx rename to docs/src/content/docs/developer-guides/synapse.md index 3a239fcf1..c7067bbfd 100644 --- a/docs/src/content/docs/developer-guides/index.mdx +++ b/docs/src/content/docs/developer-guides/synapse.md @@ -11,12 +11,11 @@ sidebar: The SDK integrates with four key components of the Filecoin Onchain Cloud: -- **PDPVerifier** : Proof verification contract powered by ([PDP](/core-concepts/pdp-overview)) -- **Filecoin Pay** : Payment layer contract powered by ([Filecoin Pay](/core-concepts/filecoin-pay-overview)) -- **Filecoin Warm Storage Service** : Business logic contract powered by ([WarmStorage](/core-concepts/fwss-overview)) +- **PDPVerifier** : Proof verification contract powered by ([PDP](/core-concepts/pdp-overview/)) +- **Filecoin Pay** : Payment layer contract powered by ([Filecoin Pay](/core-concepts/filecoin-pay-overview/)) +- **Filecoin Warm Storage Service** : Business logic contract powered by ([WarmStorage](/core-concepts/fwss-overview/)) - **Service Providers** : Service providers are the actors that safeguard the data stored in the Filecoin Onchain Cloud powered by the [Curio Storage software](https://github.com/filecoin-project/curio) - :::tip[New to Synapse SDK?] **Start building in 5 minutes!** Follow the [**Getting Started Guide →**](/getting-started/) to install the SDK, configure your environment, and upload your first file to Filecoin Onchain Cloud. ::: @@ -35,45 +34,46 @@ import { StorageInfo, ProviderInfo, FilecoinNetworkType, -} from "@filoz/synapse-sdk"; -import { ethers } from "ethers"; -import type { PaymentsService, StorageManager } from "@filoz/synapse-sdk"; +} from "@filoz/synapse-sdk" +import { ethers } from "ethers" +import type { PaymentsService } from "@filoz/synapse-sdk/payments" +import type { StorageManager } from "@filoz/synapse-sdk/storage" // ---cut--- interface SynapseAPI { // Create a new Synapse instance - create(options: SynapseOptions): Promise; + create(options: SynapseOptions): Promise // Properties - payments: PaymentsService; - storage: StorageManager; + payments: PaymentsService + storage: StorageManager // Storage Information (pricing, providers, service parameters, allowances) - getStorageInfo(): Promise; - getProviderInfo(providerAddress: string): Promise; + getStorageInfo(): Promise + getProviderInfo(providerAddress: string): Promise // Network Information - getNetwork(): FilecoinNetworkType; - getChainId(): number; + getNetwork(): FilecoinNetworkType + getChainId(): number // Contract Addresses - getWarmStorageAddress(): string; - getPaymentsAddress(): string; - getPDPVerifierAddress(): string; + getWarmStorageAddress(): string + getPaymentsAddress(): string + getPDPVerifierAddress(): string // Ethers Helpers - getProvider(): ethers.Provider; - getSigner(): ethers.Signer; + getProvider(): ethers.Provider + getSigner(): ethers.Signer } ``` ### Payment Operations -Fund your account and manage payments for Filecoin storage services. +Fund your account and manage payments for Filecoin storage services. -**When You Need This**: +#### When You Need This - Required before uploading files (must fund account and approve operators) - To monitor account balance and health - If you're a service provider managing settlements -[View Payment Operations Guide →](/developer-guides/payments) - _Learn the basics in less than 10 minutes_ +[View Payment Operations Guide →](/developer-guides/payments/payment-operations/) - _Learn the basics in less than 10 minutes_ -[View Rails & Settlement Guide →](/developer-guides/payments/rails-settlement) - _Learn the advanced payment concepts_ +[View Rails & Settlement Guide →](/developer-guides/payments/rails-settlement/) - _Learn the advanced payment concepts_ ### Storage Operations @@ -84,18 +84,18 @@ The SDK provides comprehensive storage capabilities through two main approaches: To understand these storage approaches, you'll need to be familiar with several key concepts: -**Core Concepts** +#### Core Concepts - **Storage Contexts**: Manage storage lifecycle and provider connections. - **Data Sets**: Organize related data pieces with shared payment rails. - **PieceCIDs**: Unique content-addressed identifiers for stored data. - **Service Providers**: Infrastructure for decentralized storage with cryptographic proofs. -[View Storage Operations Guide →](/developer-guides/storage) - _Learn the basics in less than 10 minutes_ +[View Storage Operations Guide →](/developer-guides/storage/storage-operations/) - _Learn the basics in less than 10 minutes_ -[View Storage Context Guide →](/developer-guides/storage/storage-context) - _Learn the advanced storage concepts_ +[View Storage Context Guide →](/developer-guides/storage/storage-context/) - _Learn the advanced storage concepts_ -[View Storage Costs Guide →](/developer-guides/storage/storage-costs) - _Learn how to calculate your storage costs_ +[View Storage Costs Guide →](/developer-guides/storage/storage-costs/) - _Learn how to calculate your storage costs_ ## Technical Constraints and Concepts @@ -129,7 +129,7 @@ The storage service enforces the following size limits for uploads: Attempting to upload data outside these limits will result in an error. :::note -These limits are defined in the SDK constants (`SIZE_CONSTANTS.MIN_UPLOAD_SIZE` and `SIZE_CONSTANTS.MAX_UPLOAD_SIZE`). While the underlying Curio PDP service supports files up to 254 MiB, the SDK currently limits uploads to 200 MiB. Future versions will support larger files through chunking and aggregate PieceCIDs. +These limits are defined in the SDK constants (`SIZE_CONSTANTS.MIN_UPLOAD_SIZE` and `SIZE_CONSTANTS.MAX_UPLOAD_SIZE`). While the underlying Curio PDP service supports files up to 254 MiB, the SDK currently limits uploads to 200 MiB. Future versions will support larger files through chunking and aggregate PieceCIDs. See [this issue](https://github.com/FilOzone/synapse-sdk/issues/110) for details. ::: @@ -151,7 +151,7 @@ PieceCID is Filecoin's native content address identifier, a variant of [CID](htt PieceCID is also known as "CommP" or "Piece Commitment" in Filecoin documentation. The SDK exclusively uses PieceCID (v2 format) for all operations—you receive a PieceCID when uploading and use it for downloads. -LegacyPieceCID (v1 format) conversion utilities are provided for interoperability with other Filecoin services that may still use the older format. See [PieceCID Utilities](#piececid-utilities) for conversion functions. +LegacyPieceCID (v1 format) conversion utilities are provided for interoperability with other Filecoin services that may still use the older format. **Technical Reference:** See [FRC-0069](https://github.com/filecoin-project/FIPs/blob/master/FRCs/frc-0069.md) for the complete specification of PieceCID ("v2 Piece CID") and its relationship to LegacyPieceCID ("v1 Piece CID"). Most Filecoin tooling currently uses v1, but the ecosystem is transitioning to v2. @@ -171,7 +171,7 @@ try { ## Additional Resources - [Getting Started](/getting-started/) - Installation and setup guide -- [Payment Operations Guide](/developer-guides/payments/) - Complete payment operations reference -- [Storage Operations Guide](/developer-guides/storage/) - Complete storage operations reference -- [API Reference](/api/filoz/synapse-sdk/readme/) - Full API documentation +- [Payment Operations Guide](/developer-guides/payments/payment-operations/) - Complete payment operations reference +- [Storage Operations Guide](/developer-guides/storage/storage-operations/) - Complete storage operations reference +- [API Reference](/reference/filoz/synapse-sdk/toc/) - Full API documentation - [GitHub Repository](https://github.com/FilOzone/synapse-sdk) - Source code and examples diff --git a/docs/src/content/docs/developer-guides/telemetry.md b/docs/src/content/docs/developer-guides/telemetry.md index 51bfac2ff..2adfd2c2a 100644 --- a/docs/src/content/docs/developer-guides/telemetry.md +++ b/docs/src/content/docs/developer-guides/telemetry.md @@ -5,11 +5,12 @@ description: Notes about the telemetry functionality that is within Synapse. To help maintainers validate functionality and iron out problems throughout the whole Filecoin Onchain Cloud stack, starting from the SDK, telemetry is **temporarily enabled by default for the calibration network** in Synapse. We are currently leveraging sentry.io as discussed in [issue #328](https://github.com/FilOzone/synapse-sdk/issues/328). -### How to disable telemetry +## How to disable telemetry There are multiple ways to disable Synapse telemetry: 1) Via Synapse Config: + ```ts const synapse = await Synapse.create({ /* ...existing options... */ @@ -17,11 +18,11 @@ const synapse = await Synapse.create({ }) ``` -2) Set the environment variable `SYNAPSE_TELEMETRY_DISABLED=true` before instantiating Synapse. +1) Set the environment variable `SYNAPSE_TELEMETRY_DISABLED=true` before instantiating Synapse. -3) Set `globalThis.SYNAPSE_TELEMETRY_DISABLED=true` before instantiating Synapse. +2) Set `globalThis.SYNAPSE_TELEMETRY_DISABLED=true` before instantiating Synapse. -### What is being collected and why +## What is being collected and why All HTTP calls are being instrumented (except for static assets like JS, CSS, and images), even HTTP calls that originate from outside of Synapse. This was the quickest way to ensure we captured the information we are after. @@ -30,22 +31,27 @@ The primary information we are attempting to collect is HTTP request paths, resp We also capture general uncaught errors. This could be indicative of issues in Synapse, which we'd want to fix. We are not capturing: + - Personal identifiable information (PII). We explicitly [disable sending default PII to Sentry](https://docs.sentry.io/platforms/javascript/configuration/options/#sendDefaultPii). - Metrics on static asset (e.g., CSS, JS, image) retrieval. (One can verify these claims in [telemetry/service.ts](https://github.com/FilOzone/synapse-sdk/blob/master/packages/synapse-sdk/src/telemetry/service.ts).) -### Why is telemetry collecting happening a library like Synapse +## Why is telemetry collecting happening a library like Synapse + Collecting telemetry through Synapse with [issue #328](https://github.com/FilOzone/synapse-sdk/issues/328) is done as short a term dev-resource efficient decision. In this season of focusing on stability, the goal is to capture request failures and other client-side errors as broadly and quickly as possible so we have an enumeration of the problems and their impact. By setting up telemetry at the Synapse layer, we can broadly get telemetry from some of the first consumers by default without requiring extra on them (e.g., filecoin-pin,filecoin-pin-website, synapse demo websites). This is a short term measure. -### How long will Synapse collect telemetry +## How long will Synapse collect telemetry + This broad telemetry at the library/SDK layer will be removed by GA (by end of November 2025). At that point, we'll do one or more of the following: + 1. Reduce telemetry collecting to only be for calls originating from Synapse (not all HTTP calls), 2. Switch the default to opt-in vs. opt-out like it is currently. (Note that currently we only enable telemetry by default for the calibration network. We don't enable it by default for mainnet.) 3. Remove telemetry entirely out of Synapse, and instead require applications (e.g., filecoin-pin, filecoin-pin-website) to do their telemetry collecting. The tracking issue for this cleanup is [issue #363](https://github.com/FilOzone/synapse-sdk/issues/363). -### How to configure telemetry +## How to configure telemetry + Synapse consumers can pass in any [Sentry options](https://docs.sentry.io/platforms/javascript/configuration/options/) via `Synapse.create({telemetry : { sentryInitOptions : {...} },})`. Synapse default Sentry options are applied in [src/telemetry/service.ts] whenever not explicitly set by the user. @@ -54,5 +60,6 @@ Any explicit tags to add to all Sentry calls can be added with `Synapse.create({ One also has direct access to the Sentry instance that Synapse is using via `synapse.telemetry.sentry`, at which point any of the [Sentry APIs](https://docs.sentry.io/platforms/javascript/apis/) can be invoked. -### Who has access to the telemetry data -Access is restricted to the Synapse maintainers and product/support personnel actively involved in the Filecoin Onchain Cloud who work with Synapse. \ No newline at end of file +## Who has access to the telemetry data + +Access is restricted to the Synapse maintainers and product/support personnel actively involved in the Filecoin Onchain Cloud who work with Synapse. diff --git a/docs/src/content/docs/getting-started/index.mdx b/docs/src/content/docs/getting-started/index.mdx index 4f6df42ad..525750193 100644 --- a/docs/src/content/docs/getting-started/index.mdx +++ b/docs/src/content/docs/getting-started/index.mdx @@ -14,7 +14,7 @@ The Synapse SDK provides an interface to Filecoin's decentralized services ecosy The SDK handles all the complexity of blockchain interactions, provider selection, and data management, so you can focus on building your application. -#### What You'll Learn +## What You'll Learn By the end of this guide, you'll understand how to: @@ -32,7 +32,7 @@ Before installing the Synapse SDK, make sure you have the following: - A **supported package manager** (npm, yarn, or pnpm). - Added [Filecoin **Calibration**](https://chainlist.org/chain/314159) into you MetaMask. -#### Get Test Tokens +### Get Test Tokens Before you start building with Synapse SDK, you'll need to request test tokens from faucets to pay transaction fees and storage fees. For the calibration testnet: @@ -60,7 +60,7 @@ Note: `ethers` v6 is a peer dependency and must be installed separately. ## Quick Start Example -The [`Synapse`](../api/filoz/synapse-sdk/synapse/classes/synapse/) class provides a complete, easy-to-use interface for interacting with Filecoin storage services. +The [`Synapse`](/reference/filoz/synapse-sdk/synapse/classes/synapse/) class provides a complete, easy-to-use interface for interacting with Filecoin storage services. :::note[Keep your private key safe] Instead of hardcoding `privateKey` in the code, you should always consider to store and access it from `.env` files. And never commit your private key. @@ -77,6 +77,8 @@ async function main() { const synapse = await Synapse.create({ privateKey: "YOUR_PRIVATE_KEY", rpcURL: RPC_URLS.calibration.http, + // Uncomment for high-performance incentive-aligned data retrievability through Filecoin Beam + // withCDN: true }) // 2) Fund & approve (single tx) @@ -146,9 +148,9 @@ const synapse = await Synapse.create({ ### 2: Payment Setup -Before storing data, you need to deposit USDFC tokens in your payments account. The amount you deposit is entirely up to your anticipated storage and retrieval needs. +Before storing data, you need to deposit USDFC tokens in your payments account. The amount you deposit is entirely up to your anticipated storage and retrieval needs. :::note[Pricing] -To size your deposit, check the up‑to‑date rates in [**Pricing**](../introduction/about/#pricing) and use the [storage costs calculator](/developer-guides/storage/storage-costs/#detailed-calculator-guide) for a precise estimate. +To size your deposit, check the up‑to‑date rates in [**Pricing**](/introduction/about/#pricing) and use the [storage costs calculator](/developer-guides/storage/storage-costs/#detailed-calculator-guide) for a precise estimate. ::: ```ts twoslash @@ -252,6 +254,7 @@ const result = await context.upload(data); // Download from this context const downloaded = await context.download(result.pieceCid); ``` + - **providerId**: By specifying `providerId`, you tell Synapse SDK to store (and later retrieve) your data specifically with that provider. - **withCDN**: Setting `withCDN: true` enables **Filecoin Beam**, the decentralized retrieval and delivery layer. So global users can download data faster, and pay-by-egress billing. @@ -286,8 +289,8 @@ providers.forEach((provider) => { Ready to dive deeper? Check out these comprehensive guides: - **[Core Concepts](/core-concepts/architecture/)** - Understand the architecture, PDP, and Filecoin Pay protocols -- **[Storage Operations](/developer-guides/storage/)** - Complete guide to uploading, downloading, and managing data sets -- **[Payment Management](/developer-guides/payments/)** - Learn about deposits, approvals, and monitoring costs +- **[Storage Operations](/developer-guides/storage/storage-operations/)** - Complete guide to uploading, downloading, and managing data sets +- **[Payment Management](/developer-guides/payments/payment-operations/)** - Learn about deposits, approvals, and monitoring costs #### Developer Resources @@ -295,7 +298,7 @@ Ready to dive deeper? Check out these comprehensive guides: #### API Reference -For complete API documentation, see the [API Reference](/api/filoz/synapse-sdk/readme/) which covers: +For complete API documentation, see the [API Reference](/reference/filoz/synapse-sdk/toc/) which covers: - All SDK classes and methods - TypeScript interfaces and types diff --git a/docs/src/content/docs/guides/components.mdx b/docs/src/content/docs/guides/components.mdx deleted file mode 100644 index ecc2da946..000000000 --- a/docs/src/content/docs/guides/components.mdx +++ /dev/null @@ -1,347 +0,0 @@ ---- -title: Components -description: Learn how to configure the Synapse SDK for different networks. -sidebar: - order: 2 ---- - -All components can be imported and used independently for advanced use cases. The SDK is organized to match the external service structure: - -### Payments Service - -Direct interface to the Payments contract for token operations and operator approvals. - -```typescript -import { PaymentsService } from '@filoz/synapse-sdk/payments' -import { ethers } from 'ethers' - -const provider = new ethers.JsonRpcProvider(rpcUrl) -const signer = await provider.getSigner() -const paymentsService = new PaymentsService(provider, signer, paymentsAddress, usdfcAddress, false) - -// Deposit USDFC to payments contract -const depositTx = await paymentsService.deposit(amount) // amount in base units -console.log(`Deposit transaction: ${depositTx.hash}`) -await depositTx.wait() // Wait for confirmation - -// Optional: Deposit to a different address -const recipientAddress = '0x1234...' -await paymentsService.deposit(amount, TOKENS.USDFC, { to: recipientAddress }) - -// Check account info -const info = await paymentsService.accountInfo() // Uses USDFC by default -console.log('Available funds:', info.availableFunds) - -// Approve service as operator -const approveTx = await paymentsService.approveService( - serviceAddress, // e.g., Warm Storage contract address - rateAllowance, // per-epoch rate allowance in base units - lockupAllowance, // total lockup allowance in base units - maxLockupPeriod // max lockup period in epochs (e.g., 86400n for 30 days) -) -console.log(`Service approval transaction: ${approveTx.hash}`) -await approveTx.wait() // Wait for confirmation -``` - -### Service Provider Registry - -Query and manage service providers registered in the on-chain registry. - -```typescript -import { SPRegistryService } from '@filoz/synapse-sdk/sp-registry' - -// Create service instance -const spRegistry = new SPRegistryService(provider, registryAddress) - -// Query providers -const allProviders = await spRegistry.getAllActiveProviders() -const provider = await spRegistry.getProvider(providerId) -const providerByAddr = await spRegistry.getProviderByAddress(address) - -// Check registration status -const isRegistered = await spRegistry.isRegisteredProvider(address) -const providerId = await spRegistry.getProviderIdByAddress(address) -const isActive = await spRegistry.isProviderActive(providerId) - -// Provider management (requires signer) -const registrationInfo = { - name: 'My Storage Provider', - description: 'Reliable storage service', - pdpOffering: { - serviceURL: 'https://provider.example.com', - minPieceSizeInBytes: 65n, - maxPieceSizeInBytes: 34091302912n, - storagePricePerTibPerMonth: 5000000000000000000n, - location: '/C=US/ST=CA/L=SF', - // ... other PDP fields - }, - capabilities: { hyperCompute: '100x' } -} -await spRegistry.registerProvider(signer, registrationInfo) -await spRegistry.updateProviderInfo(signer, name, description) -await spRegistry.removeProvider(signer) - -// Product management for PDP services -await spRegistry.addPDPProduct(signer, pdpOffering, capabilities) -await spRegistry.updatePDPProduct(signer, pdpOffering, capabilities) -await spRegistry.deactivateProduct(signer, 0) // 0 = ProductType.PDP - -// Query PDP service details -const pdpService = await spRegistry.getPDPService(providerId) -console.log('Service URL:', pdpService.offering.serviceURL) -console.log('Storage Price:', pdpService.offering.storagePricePerTibPerMonth) -``` - -### Warm Storage Service - -Interact with the Warm Storage contract for data set management, service provider operations, and storage cost calculations. - -```typescript -import { WarmStorageService } from '@filoz/synapse-sdk/warm-storage' - -// Create WarmStorageService using factory method -const warmStorageService = await WarmStorageService.create(provider, warmStorageAddress) - -// Storage cost calculations -const costs = await warmStorageService.calculateStorageCost(sizeInBytes) -console.log(`Storage cost: ${costs.perMonth} per month`) - -// Check allowances for storage (returns allowance details and costs) -const check = await warmStorageService.checkAllowanceForStorage( - sizeInBytes, - withCDN, - paymentsService // Pass PaymentsService instance -) -// check.sufficient - boolean indicating if allowances are sufficient -// check.costs - storage costs per epoch/day/month - -// Prepare storage upload -const prep = await warmStorageService.prepareStorageUpload({ - dataSize: sizeInBytes, - withCDN: false -}, paymentsService) - -// Get client data sets with enhanced details -const dataSets = await warmStorageService.getClientDataSetsWithDetails(clientAddress) -for (const ds of dataSets) { - console.log(`Rail ID: ${ds.railId}, PDP Verifier ID: ${ds.pdpVerifierDataSetId}`) - console.log(`Is Live: ${ds.isLive}, Is Managed: ${ds.isManaged}`) - console.log(`Next Piece ID: ${ds.nextPieceId}`) -} - -// Get only data sets managed by this Warm Storage instance -const managedSets = await warmStorageService.getManagedDataSets(clientAddress) - -// Verify data set creation -const verification = await warmStorageService.verifyDataSetCreation(txHash) -if (verification.dataSetLive) { - console.log(`Data set ${verification.dataSetId} is live!`) -} - -// Service provider operations -const isApproved = await warmStorageService.isProviderApproved(providerAddress) -const providers = await warmStorageService.getAllApprovedProviders() - -// Top up CDN payment rails -await wamStorageService.topUpCDNPaymentRails(signer, dataSetId, cdnAmountToAdd, cacheMissAmountToAdd) -``` - -### Subgraph Service - -The SubgraphService provides access to Synapse-compatible subgraphs for provider discovery, data set tracking, and more. - -```typescript -// Create subgraph service -const subgraphService = new SubgraphService({ - goldsky: { - projectId: 'PROJECT_ID', - subgraphName: 'SUBGRAPH_NAME', - version: 'latest' - } -}) - -// Direct endpoint configuration -const subgraphService2 = new SubgraphService({ - endpoint: 'https://api.goldsky.com/api/public/project_id/subgraph_name' -}) - -// Example: Query for active providers with custom filtering -const activeProviders = await subgraphService.queryProviders({ - where: { - status: 'Approved' - }, - orderBy: 'totalDataSets', - orderDirection: 'desc', - first: 5 -}) - -// Example: Find providers for a specific PieceCID -const providers = await subgraphService.getApprovedProvidersForPieceCID(pieceCid) -``` - -#### Custom Subgraph Service Implementations - -The SDK supports custom implementations of the `SubgraphRetrievalService` interface, allowing you to provide alternative data sources for provider discovery. This is useful for testing, custom integrations, or cases where you need specialized provider selection logic. - -```typescript -// Example: Implementing a custom SubgraphRetrievalService -class CustomProviderService implements SubgraphRetrievalService { - async getApprovedProvidersForPieceCID(pieceCid) { - // Your custom implementation here - // Could use a different data source, filtering logic, etc. - return [{ - owner: '0x123...', - pdpUrl: 'https://example.com/pdp', - pieceRetrievalUrl: 'https://example.com/retrieval', - registeredAt: Date.now(), - approvedAt: Date.now() - }] - } - - async getProviderByAddress(address) { - // Your custom implementation - // ... - } -} - -// Using the custom service with Synapse -const synapse = await Synapse.create({ - provider, - subgraphService: new CustomProviderService() -}) -``` - -### PDP Components - -The PDP (Proof of Data Possession) system has three main components: - -#### PDP Verifier - -Low-level interface to the PDPVerifier contract for protocol operations. - -```typescript -import { PDPVerifier } from '@filoz/synapse-sdk/pdp' - -// Create PDPVerifier instance -const pdpVerifier = new PDPVerifier(provider, pdpVerifierAddress) - -// Check if data set is live -const isLive = await pdpVerifier.dataSetLive(dataSetId) - -// Get data set details -const nextPieceId = await pdpVerifier.getNextPieceId(dataSetId) -const listener = await pdpVerifier.getDataSetListener(dataSetId) -const leafCount = await pdpVerifier.getDataSetLeafCount(dataSetId) - -// Extract data set ID from transaction receipt -const dataSetId = await pdpVerifier.extractDataSetIdFromReceipt(receipt) -``` - -#### PDP Server - -Consolidated interface for all PDP server (Curio) HTTP operations including data sets, uploads, and downloads. - -```typescript -import { PDPServer, PDPAuthHelper } from '@filoz/synapse-sdk/pdp' - -// Create server instance with auth helper -const authHelper = new PDPAuthHelper(warmStorageAddress, signer, chainId) -const pdpServer = new PDPServer(authHelper, 'https://pdp.provider.com', 'https://pdp.provider.com') - -// Create a data set -const { txHash, statusUrl } = await pdpServer.createDataSet( - clientDataSetId, // number - payee, // string (service provider address) - withCDN, // boolean - recordKeeper // string (Warm Storage contract address) -) - -// Check creation status -const status = await pdpServer.getDataSetCreationStatus(txHash) -console.log(`Status: ${status.txStatus}, Data Set ID: ${status.dataSetId}`) - -// Add pieces to data set (returns transaction tracking info) -const addResult = await pdpServer.addPieces( - dataSetId, // number (PDPVerifier data set ID) - clientDataSetId, // bigint - pieceDataArray // Array of PieceCID strings -) -// addResult: { message: string, txHash?: string, statusUrl?: string } - -// Check piece addition status (for new servers with transaction tracking) -if (addResult.txHash) { - const status = await pdpServer.getPieceAdditionStatus(dataSetId, addResult.txHash) - console.log(`Status: ${status.txStatus}, Piece IDs: ${status.confirmedPieceIds}`) -} - -// Upload a piece -const { pieceCid, size } = await pdpServer.uploadPiece(data, 'my-file.dat') - -// Find existing piece -const piece = await pdpServer.findPiece(pieceCid, size) -console.log(`Piece found: ${piece.uuid}`) - -// Download a piece -const data = await pdpServer.downloadPiece(pieceCid) - -// Get data set details -const dataSet = await pdpServer.getDataSet(dataSetId) -console.log(`Data set ${dataSet.id} has ${dataSet.pieces.length} pieces`) -``` - -#### PDP Auth Helper - -Sign EIP-712 typed data for PDP operations. Compatible with MetaMask and other browser wallets. - -```typescript -import { PDPAuthHelper } from '@filoz/synapse-sdk/pdp' - -// Create auth helper -const authHelper = new PDPAuthHelper(warmStorageAddress, signer, chainId) - -// Sign operations -const createDataSetSig = await authHelper.signCreateDataSet( - clientDataSetId, // number - payeeAddress, // string - withCDN // boolean -) - -const addPiecesSig = await authHelper.signAddPieces( - clientDataSetId, // bigint - nonce, // bigint (random value for replay protection) - pieceDataArray, // Array of PieceCID strings - metadata // Optional array of metadata for each piece -) - -// All signatures return { signature, v, r, s, signedData } -``` - -### PieceCID Utilities - -Utilities for calculating PieceCIDs and converting between formats. - -```typescript -import { calculate, asPieceCID, asLegacyPieceCID, createPieceCIDStream } from '@filoz/synapse-sdk/piece' - -// Calculate PieceCID from data -const data = new Uint8Array([1, 2, 3, 4]) -const pieceCid = calculate(data) -console.log(pieceCid.toString()) // bafkzcib... - -// Validate and convert PieceCID strings and CIDs -const convertedPieceCid = asPieceCID('bafkzcibcd4bdomn3tgwgrh3g532zopskstnbrd2n3sxfqbze7rxt7vqn7veigmy') -if (convertedPieceCid !== null) { - console.log('Valid PieceCID:', convertedPieceCid.toString()) -} - -// Stream-based PieceCID calculation; compatible with Web Streams API -const { stream, getPieceCID } = createPieceCIDStream() -// Pipe data through stream, then call getPieceCID() for result - -// Convert to LegacyPieceCID for compatibility with external Filecoin services -const legacyPieceCid = asLegacyPieceCID(convertedPieceCid) -if (legacyPieceCid !== null) { - console.log('Valid LegacyPieceCID:', legacyPieceCid.toString()) - // Valid LegacyPieceCID: baga6ea4seaqdomn3tgwgrh3g532zopskstnbrd2n3sxfqbze7rxt7vqn7veigmy -} -``` diff --git a/docs/src/content/docs/guides/network-configuration.mdx b/docs/src/content/docs/guides/network-configuration.mdx deleted file mode 100644 index 8e257a6f3..000000000 --- a/docs/src/content/docs/guides/network-configuration.mdx +++ /dev/null @@ -1,111 +0,0 @@ ---- -title: Network Configuration -description: Learn how to configure the Synapse SDK for different networks. -sidebar: - order: 1 ---- - -### RPC Endpoints - -```typescript -import { RPC_URLS } from '@filoz/synapse-sdk' - -// Mainnet -RPC_URLS.mainnet.websocket // wss://wss.node.glif.io/apigw/lotus/rpc/v1 -RPC_URLS.mainnet.http // https://api.node.glif.io/rpc/v1 - -// Calibration Testnet -RPC_URLS.calibration.websocket // wss://wss.calibration.node.glif.io/apigw/lotus/rpc/v1 -RPC_URLS.calibration.http // https://api.calibration.node.glif.io/rpc/v1 -``` - -### GLIF Authorization - -For higher rate limits with GLIF endpoints: - -```typescript -import { Synapse } from '@filoz/synapse-sdk' - -// Using GLIF authorization with private key -const synapse = await Synapse.create({ - privateKey: '0x...', - rpcURL: 'https://api.node.glif.io/rpc/v1', - authorization: 'Bearer YOUR_GLIF_TOKEN' -}) -``` - -### Connection Management - -#### WebSocket vs HTTP - -The SDK supports both WebSocket and HTTP connections: - -- **WebSocket** (recommended): Better performance for multiple operations, real-time updates, lower latency -- **HTTP**: Simpler setup, stateless, better for single operations - -```typescript -// WebSocket connection (recommended) -const synapse = await Synapse.create({ - privateKey: '0x...', - rpcURL: RPC_URLS.calibration.websocket -}) - -// HTTP connection -const synapse = await Synapse.create({ - privateKey: '0x...', - rpcURL: RPC_URLS.calibration.http -}) -``` - -#### Cleaning Up Connections - -When using WebSocket connections, it's important to properly close them when your application is done: - -```typescript -// Get the provider instance -const provider = synapse.getProvider() - -// Clean up the connection -if (provider && typeof provider.destroy === 'function') { - await provider.destroy() -} -``` - -This is especially important for: - -- **CLI tools**: Ensures the process can exit cleanly -- **Test suites**: Prevents hanging tests and resource leaks -- **Server applications**: Frees resources when shutting down -- **Long-running applications**: Allows graceful reconnection if needed - -#### Connection Best Practices - -1. **Reuse connections**: Create a single Synapse instance and reuse it for multiple operations -2. **Handle disconnections**: Implement reconnection logic for long-running applications -3. **Clean up on exit**: Always destroy connections when your application terminates - -```typescript -// Example: Proper cleanup in a Node.js application -process.on('SIGINT', async () => { - console.log('Shutting down...') - const provider = synapse.getProvider() - if (provider && typeof provider.destroy === 'function') { - await provider.destroy() - } - process.exit(0) -}) -``` - -### Network Details - -#### Filecoin Mainnet - -- Chain ID: 314 -- USDFC Contract: `0x80B98d3aa09ffff255c3ba4A241111Ff1262F045` - -#### Filecoin Calibration Testnet - -- Chain ID: 314159 -- USDFC Contract: `0xb3042734b608a1B16e9e86B374A3f3e389B4cDf0` - ---- diff --git a/docs/src/content/docs/guides/rails-settlement.mdx b/docs/src/content/docs/guides/rails-settlement.mdx deleted file mode 100644 index a1570a06c..000000000 --- a/docs/src/content/docs/guides/rails-settlement.mdx +++ /dev/null @@ -1,306 +0,0 @@ ---- -title: Rails & Settlement -description: Guide to payment rails and settlement operations -sidebar: - order: 4 ---- - -Payment rails are the core mechanism for streaming payments between parties in the Synapse ecosystem. They enable continuous, per-epoch payments for services like storage. - -## Understanding Payment Rails - -Payment rails are continuous payment streams between clients and service providers, created automatically when services like storage are initiated. - -### How Rails Work - -Rails ensure reliable payments through a simple lockup mechanism: - -#### 1. The Lockup Requirement - -When you create a data set (storage), the system calculates how much balance you need to maintain: - -- **Formula**: `lockup = paymentRate × lockupPeriod` (e.g., 30 days worth of payments) -- **Example**: Storing 1 GiB costs ~0.0000565 USDFC/epoch, requiring ~1.63 USDFC minimum balance -- **Purpose**: This protects the service provider by ensuring you always have enough for the next payment period - -#### 2. How Your Balance Works - -- You deposit funds into the payments contract (e.g., 100 USDFC) -- The lockup requirement reserves part of this balance (e.g., 1.63 USDFC for 1 GiB storage) -- You can withdraw anything above the lockup requirement -- When you settle, your total balance decreases by the payment amount (lockup requirement stays the same) - -#### 3. Normal vs Abnormal Operations - -- **Normal Operation**: You keep settling regularly, lockup stays reserved but unused -- **If you stop settling**: Service continues but unpaid amounts accumulate -- **If balance gets too low**: Rail terminates when you can't cover future payments -- **After termination**: The lockup NOW becomes available to pay the service provider for the period already provided - -### Understanding Your Balance - -Think of your account as having these components: - -- **Total Funds**: All tokens you've deposited into the payments contract -- **Lockup Requirement**: The minimum balance reserved to guarantee future payments -- **Available Balance**: `totalFunds - lockupRequirement` (this is what you can withdraw) - -### When Lockup Gets Used (The Safety Net) - -The lockup finally gets "used" when things go wrong: - -- **Rail terminates** (due to insufficient funds or manual termination) -- **After termination**, the service provider can settle and claim payment from the lockup -- **This ensures** the provider gets paid for services already delivered, even if the client disappears -- **Example**: If you had 10 days of lockup and the rail terminates, the provider can claim up to 10 days of service payments from that locked amount - -For more details on the payment mechanics, see the [Filecoin Pay documentation](https://github.com/FilOzone/filecoin-pay) - -### Rail Components - -Each rail consists of: - -- **Payer**: The account paying for services -- **Payee**: The recipient of payments (service provider) -- **Operator**: The contract managing the rail (e.g., WarmStorage contract) -- **Payment Rate**: Amount paid per epoch -- **Lockup Period**: How many epochs of payments to lock up in advance -- **Commission**: Percentage taken by the operator (in basis points) - -## Working with Rails - -### Viewing Your Rails - -Check rails where you're the payer: - -```typescript -const payerRails = await synapse.payments.getRailsAsPayer() -console.log(`You have ${payerRails.length} outgoing payment rails`) - -for (const rail of payerRails) { - console.log(`Rail ${rail.railId}:`) - console.log(` Active: ${!rail.isTerminated}`) - if (rail.isTerminated) { - console.log(` End epoch: ${rail.endEpoch}`) - } -} -``` - -Check rails where you're receiving payments: - -```typescript -const payeeRails = await synapse.payments.getRailsAsPayee() -console.log(`You have ${payeeRails.length} incoming payment rails`) -``` - -### Getting Rail Details - -For detailed information about a specific rail: - -```typescript -const railInfo = await synapse.payments.getRail(railId) -console.log('Rail details:', { - from: railInfo.from, - to: railInfo.to, - rate: railInfo.paymentRate, - settledUpTo: railInfo.settledUpTo, - isTerminated: railInfo.endEpoch > 0 -}) -``` - -## Settlement Operations - -Settlement is the process of executing the accumulated payments in a rail. Until settled, payments accumulate but aren't transferred. - -### Why Settlement is Needed - -- **Gas Efficiency**: Batches many epochs of payments into one transaction -- **Flexibility**: Allows validators to adjust payments if needed -- **Finality**: Makes funds available for withdrawal - -### Settlement Fee - -Settlement operations require a network fee that is burned (permanently removed from circulation), effectively paying the Filecoin network for providing the settlement service: - -- **Amount**: 0.0013 FIL (defined as `SETTLEMENT_FEE` constant) -- **Mechanism**: The fee is burned to Filecoin's burn actor, `f099` (also known as address `0xff00000000000000000000000000000000000063`), reducing FIL supply -- **Purpose**: This burn mechanism compensates the network for processing and securing payment settlements -- **Automatic**: The SDK automatically includes this fee when calling settlement methods - -```typescript -import { SETTLEMENT_FEE } from '@filoz/synapse-sdk' -console.log(`Settlement fee: ${ethers.formatEther(SETTLEMENT_FEE)} FIL`) -// This fee is burned to the network, not paid to any party -``` - -### Performing Settlement - -#### Automatic Settlement (Recommended) - -The simplest way to settle a rail is using `settleAuto()`, which automatically detects whether the rail is active or terminated and calls the appropriate method: - -```typescript -// Automatically handles both active and terminated rails -const tx = await synapse.payments.settleAuto(railId) -await tx.wait() -console.log('Rail settled successfully') - -// For active rails, you can specify the epoch to settle up to -const tx = await synapse.payments.settleAuto(railId, 1000) -await tx.wait() -``` - -#### Manual Settlement Methods - -For more control, you can use the specific settlement methods: - -##### Active Rails - -Settle up to the current epoch: - -```typescript -// Settle a specific rail (requires settlement fee) -const tx = await synapse.payments.settle(railId) -await tx.wait() -console.log('Rail settled successfully') -``` - -Settle up to a specific past epoch (partial settlement): - -```typescript -// Settle up to epoch 1000 (must be less than or equal to current epoch) -// Useful for: -// - Partial settlements to manage cash flow -// - Testing settlement calculations -// - Settling up to a specific accounting period -const provider = synapse.getProvider() -const currentEpoch = await provider.getBlockNumber() -const targetEpoch = Math.min(1000, currentEpoch) // Ensure it's not in the future -const tx = await synapse.payments.settle(railId, targetEpoch) -await tx.wait() -``` - -**Important**: The `untilEpoch` parameter: - -- **Must be less than or equal to current epoch** - Cannot settle future epochs that haven't occurred yet -- **Can be in the past** - Allows partial settlement up to a historical epoch -- **Defaults to current epoch** - If omitted, settles all accumulated payments up to now -- The contract will revert with `CannotSettleFutureEpochs` error if you try to settle beyond the current epoch - -##### Terminated Rails - -When a rail is terminated, use the specific method for terminated rails: - -```typescript -// Check if rail is terminated -const railInfo = await synapse.payments.getRail(railId) -if (railInfo.endEpoch > 0) { - console.log(`Rail terminated at epoch ${railInfo.endEpoch}`) - - // Settle the terminated rail - const tx = await synapse.payments.settleTerminatedRail(railId) - await tx.wait() - console.log('Terminated rail settled and closed') -} -``` - -#### Preview Settlement Amounts - -Check settlement amounts before executing: - -```typescript -// Preview settlement to current epoch -const amounts = await synapse.payments.getSettlementAmounts(railId) -console.log('Settlement preview:') -console.log(` Total amount: ${ethers.formatUnits(amounts.totalSettledAmount, 18)} USDFC`) -console.log(` Payee receives: ${ethers.formatUnits(amounts.totalNetPayeeAmount, 18)} USDFC`) -console.log(` Operator commission: ${ethers.formatUnits(amounts.totalOperatorCommission, 18)} USDFC`) -console.log(` Settled up to epoch: ${amounts.finalSettledEpoch}`) -console.log(` Note: ${amounts.note}`) - -// Preview partial settlement to a specific past epoch -const targetEpoch = 1000 // Must be less than or equal to current epoch -const partialAmounts = await synapse.payments.getSettlementAmounts(railId, targetEpoch) -console.log(`Partial settlement to epoch ${targetEpoch} would settle: ${ethers.formatUnits(partialAmounts.totalSettledAmount, 18)} USDFC`) -``` - -## Settlement Strategies - -### For Service Providers - -Service providers (payees) should settle regularly to receive accumulated earnings. - -```typescript -// Example: Settle all incoming rails using settleAuto -async function settleAllIncomingRails() { - const rails = await synapse.payments.getRailsAsPayee() - - for (const rail of rails) { - try { - // Check if settlement is worthwhile - const amounts = await synapse.payments.getSettlementAmounts(rail.railId) - - // Only settle if amount exceeds threshold (e.g., $10) - const threshold = ethers.parseUnits('10', 18) // 10 USDFC - if (amounts.totalNetPayeeAmount > threshold) { - // settleAuto handles both active and terminated rails - const tx = await synapse.payments.settleAuto(rail.railId) - await tx.wait() - console.log(`Settled rail ${rail.railId} for ${ethers.formatUnits(amounts.totalNetPayeeAmount, 18)} USDFC`) - } - } catch (error) { - console.error(`Failed to settle rail ${rail.railId}:`, error) - } - } -} -``` - -### For Clients - -Clients (payers) typically don't need to settle unless: - -- They want to update their available balance before withdrawal -- A rail is terminated and needs finalization - -```typescript -// Example: Settle before withdrawal -async function prepareForWithdrawal() { - const rails = await synapse.payments.getRailsAsPayer() - - // Settle all rails to update balance (settleAuto handles both active and terminated) - for (const rail of rails) { - const tx = await synapse.payments.settleAuto(rail.railId) - await tx.wait() - } - - // Now withdrawal will reflect accurate balance - const availableBalance = await synapse.payments.availableBalance() - console.log(`Available for withdrawal: ${ethers.formatUnits(availableBalance, 18)} USDFC`) -} -``` - -## Error Handling - -Common settlement errors and solutions: - -```typescript -try { - await synapse.payments.settle(railId) -} catch (error) { - if (error.message.includes('InsufficientNativeTokenForBurn')) { - console.error('Insufficient FIL for settlement fee (0.0013 FIL required)') - } else if (error.message.includes('NoProgressInSettlement')) { - console.error('Rail already settled to current epoch') - } else if (error.message.includes('RailNotActive')) { - console.error('Rail is not active or already terminated') - } else { - console.error('Settlement failed:', error) - } -} -``` - -## Next Steps - -- Learn about [Service Approvals](/guides/service-approvals) for managing operator permissions -- Explore [Storage Management](/guides/storage) which creates payment rails automatically -- Review the [Migration Guide](/guides/migration-guide) for updates to settlement APIs diff --git a/docs/src/content/docs/guides/react.mdx b/docs/src/content/docs/guides/react.mdx deleted file mode 100644 index c6fdabe5d..000000000 --- a/docs/src/content/docs/guides/react.mdx +++ /dev/null @@ -1,30 +0,0 @@ ---- -title: React -description: Learn how to set up and use iso-filecoin-react hooks. -draft: true ---- - -`iso-filecoin-react` provides React hooks and context to easily integrate Filecoin wallet interactions into your React applications. It works seamlessly with the wallet adapters from `iso-filecoin-wallets`. - -## Installation - -First, you need to install the necessary packages. You'll typically need `iso-filecoin-react`, `iso-filecoin` (as a peer dependency), and at least one wallet adapter from `iso-filecoin-wallets`. You might also need `@tanstack/react-query` if you haven't installed it already, as it's a peer dependency for hooks. - -```bash -# Using pnpm (recommended for this monorepo) -pnpm add iso-filecoin-react iso-filecoin iso-filecoin-wallets @tanstack/react-query -``` - -## Playground - -Play with all the hooks in this StackBlitz example: - - - -Check the [source code](https://github.com/hugomrdias/filecoin/tree/main/examples/ledger). - -## Next Steps - -* Explore the API Reference for detailed information on all hooks and types. -* See the guide on [Using Wallet Adapters](./iso-filecoin-wallets/index.md) for more on specific adapters. -* Check out the [examples](https://github.com/hugomrdias/filecoin/tree/main/examples) folder in the repository for practical examples. diff --git a/docs/src/content/docs/guides/storage.mdx b/docs/src/content/docs/guides/storage.mdx deleted file mode 100644 index 6da67be9c..000000000 --- a/docs/src/content/docs/guides/storage.mdx +++ /dev/null @@ -1,429 +0,0 @@ ---- -title: Storage -description: Guides for using the Storage API. -sidebar: - order: 3 ---- - -The Synapse SDK automatically handles all the complexity of storage setup for you - selecting providers, managing data sets, and coordinating with the blockchain. - -### Storage Context Creation - -You have two options: - -1. **Simple mode**: Just use `synapse.storage.upload()` directly - the SDK auto-manages contexts for you. -2. **Explicit mode**: Create contexts with `synapse.storage.createContexts()` for more control. Contexts can be used directly or passed in the options to `synapse.storage.upload()` and `synapse.storage.download()`. - -Behind the scenes, the process may be: - -- **Fast (< 1 second)**: When reusing existing data sets that match your requirements (including all metadata) -- **Slower (2-5 minutes)**: When setting up new blockchain infrastructure (i.e. creating a brand new data set) - -#### Basic Usage - -```typescript -// Option 1: Auto-managed context (simplest) -await synapse.storage.upload(data) // Context created/reused automatically - -// Option 2: Explicit context creation -const contexts = await synapse.storage.createContexts() -await synapse.storage.upload(data, { contexts }) - -// Option 3: Explicit context upload -const contexts = await synapse.storage.createContexts() -await contexts[0].upload(data) // Upload to this specific context - -// Option 4: Contexts with metadata requirements -const contexts = await synapse.storage.createContexts({ - metadata: { - withIPFSIndexing: '', - category: 'videos' - } -}) -// This will reuse existing data sets that have both of these metadata entries, -// or create new ones if none match -``` - -#### Metadata Limits - -Metadata is subject to the following contract-enforced limits: - -**Data Set Metadata:** - -- Maximum of 10 key-value pairs per data set -- Keys: Maximum 32 characters -- Values: Maximum 128 characters - -**Piece Metadata:** - -- Maximum of 5 key-value pairs per piece -- Keys: Maximum 32 characters -- Values: Maximum 128 characters - -These limits are enforced by the blockchain contracts. The SDK will validate metadata before submission and throw descriptive errors if limits are exceeded. - -#### Advanced Usage with Callbacks - -Monitor the creation process with detailed callbacks: - -```typescript -const contexts = await synapse.storage.createContexts({ - providerAddresses: ['0x...'], // Optional: specify provider by address - withCDN: true, // Optional: enable CDN for faster downloads - callbacks: { - // Called when a provider is selected - onProviderSelected: (provider) => { - console.log(`Selected provider: ${provider.owner}`) - console.log(` PDP URL: ${provider.pdpUrl}`) - }, - - // Called when data set is found or created - onDataSetResolved: (info) => { - if (info.isExisting) { - console.log(`Using existing data set: ${info.dataSetId}`) - } else { - console.log(`Created new data set: ${info.dataSetId}`) - } - }, - } -}) -``` - -#### Creation Options - -##### createContexts -```typescript -export interface CreateContextsOptions { - count?: number // Number of contexts to create (default: 2) - providerIds?: number // Specific provider IDs to use - providerAddresses?: string // Specific provider addresses to use - dataSetIds?: number // Specific data set IDs to use - excludeProviderIds?: number[] // Do not select any of these providers - forceCreateDataSets?: boolean // Create new data sets, even if candidates exist - withCDN?: boolean // Enable CDN services - withIpni?: boolean // Enable IPNI - metadata?: Record // Metadata requirements for data set selection/creation - callbacks?: StorageContextCallbacks // Progress callbacks - uploadBatchSize?: number // Max uploads per batch (default: 32, min: 1) -} -``` - -##### createContext - -```typescript -interface StorageServiceOptions { - providerId?: number // Specific provider ID to use - providerAddress?: string // Specific provider address to use - dataSetId?: number // Specific data set ID to use - withCDN?: boolean // Enable CDN services (alias for metadata: { withCDN: '' }) - withIpni?: boolean // Enable IPNI - metadata?: Record // Metadata requirements for data set selection/creation - callbacks?: StorageContextCallbacks // Progress callbacks - uploadBatchSize?: number // Max uploads per batch (default: 32, min: 1) -} -``` - -#### Data Set Selection and Matching - -The SDK intelligently manages data sets to minimize on-chain transactions. The selection behavior depends on the parameters you provide: - -**Selection Scenarios**: - -1. **Explicit data set ID**: If you specify `dataSetIds`, those exact data sets are used (must exist and be accessible) -2. **Specific provider**: If you specify `providerIds` or `providerAddresses`, the SDK searches for matching data sets only within those providers' existing data sets -3. **Automatic selection**: Without specific parameters, the SDK searches across all your data sets with any approved provider - -**Exact Metadata Matching**: In scenarios 2 and 3, the SDK will reuse an existing data set only if it has **exactly** the same metadata keys and values as requested. This ensures data sets remain organized according to your specific requirements. - -**Selection Priority**: When multiple data sets match your criteria: - -- Data sets with existing pieces are preferred over empty ones -- Within each group (with pieces vs. empty), the oldest data set (lowest ID) is selected - -**Provider Selection** (when no matching data sets exist): - -- If you specify a provider (via `providerId` or `providerAddress`), that provider is used -- Otherwise, the SDK currently uses random selection from all approved providers -- Before finalizing selection, the SDK verifies the provider is reachable via a ping test -- If a provider fails the ping test, the SDK tries the next candidate - -```typescript -// Scenario 1: Explicit data set (no matching required) -const contexts1 = await synapse.storage.createContexts({ - dataSetIds: [42, 44], -}) -const context1 = await synapse.storage.createContext({ - dataSetId: 42 // Uses data set 42 directly -}) - -// Scenario 2: Provider-specific search -const contexts2 = await synapse.storage.createContexts({ - providerIds: [3, 7], - metadata: { app: 'myapp', env: 'prod' }, -}) -const context2 = await synapse.storage.createContext({ - providerId: 3, - metadata: { app: 'myapp', env: 'prod' }, -}) -// Searches ONLY within provider 3's data sets for exact metadata match - -// Scenario 3: Automatic selection across all providers -const contexts3 = await synapse.storage.createContexts({ - metadata: { app: 'myapp', env: 'prod' } -}) -const context3 = await synapse.storage.createContext({ - metadata: { app: 'myapp', env: 'prod' } -}) -// Searches ALL your data sets across any approved provider - -// Metadata matching examples (exact match required): -// These will use the SAME data set (if it exists) -const contextA = await synapse.storage.createContext({ - metadata: { app: 'myapp', env: 'prod' } -}) -const contextB = await synapse.storage.createContext({ - metadata: { env: 'prod', app: 'myapp' } // Order doesn't matter -}) - -// These will use DIFFERENT data sets -const contextC = await synapse.storage.createContext({ - metadata: { app: 'myapp' } // Missing 'env' key -}) -const contextD = await synapse.storage.createContext({ - metadata: { app: 'myapp', env: 'prod', extra: 'data' } // Has extra key -}) - -// Provider selection when no data sets match: -const newContext = await synapse.storage.createContext({ - metadata: { app: 'newapp', version: 'v1' } -}) -// If no existing data sets have this exact metadata: -// 1. SDK randomly selects from approved providers -// 2. Pings the selected provider to verify availability -// 3. Creates a new data set with that provider -``` - -**The `withCDN` Option**: This is a convenience alias for adding `{ withCDN: '' }` to metadata: - -```typescript -// These are equivalent: -const context1 = await synapse.storage.createContext({ withCDN: true }) -const context2 = await synapse.storage.createContext({ - metadata: { withCDN: '' } -}) -``` - -#### Storage Context Properties - -Once created, the storage context provides access to: - -```typescript -// The data set ID being used -console.log(`Data set ID: ${context.dataSetId}`) - -// The service provider address -console.log(`Service provider: ${context.serviceProvider}`) -``` - -#### Storage Context Methods - -##### Preflight Upload - -Check if an upload is possible before attempting it: - -```typescript -const preflight = await context.preflightUpload(dataSize) -console.log('Estimated costs:', preflight.estimatedCost) -console.log('Allowance sufficient:', preflight.allowanceCheck.sufficient) -``` - -##### Upload and Download - -The SDK uses memory-efficient streaming for uploads, enabling uploads up to 1 GiB with minimal memory usage. - -**Streaming Uploads (Recommended)** - -Stream data directly from files or other sources using `AsyncIterable`: - -```typescript -// Node.js: Stream from file system -import fs from 'fs' - -const result = await context.upload(fs.createReadStream('large-file.bin'), { - metadata: { filename: 'large-file.bin' }, - onProgress: (progress) => { - const percent = (progress.uploadedBytes / progress.totalBytes * 100).toFixed(1) - console.log(`Upload progress: ${percent}%`) - }, - onUploadComplete: (pieceCid) => { - console.log(`Upload complete! PieceCID: ${pieceCid}`) - } -}) - -// Browser: Stream from File input -const fileInput = document.querySelector('input[type="file"]') -const file = fileInput.files[0] -const result = await context.upload(file.stream(), { - onProgress: (progress) => { - updateProgressBar(progress.uploadedBytes, progress.totalBytes) - } -}) -``` - -**Simple Uploads** - -For smaller files or when you already have data in memory, pass `Uint8Array` or `ArrayBuffer` directly. The SDK automatically streams it for you: - -```typescript -// Upload from memory (automatically streamed internally) -const fileData = await fs.promises.readFile('file.bin') -const result = await context.upload(fileData, { - metadata: { - snapshotVersion: 'v2.1.0', - generator: 'backup-system' - }, - onProgress: (progress) => { - console.log(`${progress.uploadedBytes} / ${progress.totalBytes} bytes`) - }, - onUploadComplete: (pieceCid) => { - console.log(`Upload complete! PieceCID: ${pieceCid}`) - }, - onPieceAdded: (hash) => { - console.log(`Piece added, tx: ${hash}`) - }, - onPieceConfirmed: (pieceIds) => { - console.log(`Piece IDs: ${pieceIds.join(', ')}`) - } -}) -``` - -**Downloads** - -```typescript -// Download data from this context's specific provider -const downloaded = await context.download(result.pieceCid) - -// Get the list of piece CIDs in the current data set by querying the provider -const pieceCids = await context.getDataSetPieces() -console.log(`Piece CIDs: ${pieceCids.map(cid => cid.toString()).join(', ')}`) - -// Check the status of a piece on the service provider -const status = await context.pieceStatus(result.pieceCid) -console.log(`Piece exists: ${status.exists}`) -console.log(`Data set last proven: ${status.dataSetLastProven}`) -console.log(`Data set next proof due: ${status.dataSetNextProofDue}`) -``` - -##### Size Constraints - -The storage service enforces the following size limits for uploads: - -- **Minimum**: 65 bytes -- **Maximum**: ~1 GiB (1,065,353,216 bytes accounting Filecoin's fr32 expansion) - -Attempting to upload data outside these limits will result in an error. - -**Streaming Implementation**: The SDK automatically uses memory-efficient streaming for all uploads. Your `Uint8Array` or `ArrayBuffer` data is streamed to the provider with parallel CommP (PieceCID) calculation. For very large files (hundreds of megabytes), be aware of memory pressure in resource-constrained environments like browsers. - -***Note: these limits are temporary during this current pre-v1 period and will eventually be extended. You can read more in [this issue thread](https://github.com/FilOzone/synapse-sdk/issues/110)*** - -##### Efficient Batch Uploads - -When uploading multiple files, the SDK automatically batches operations for efficiency. Due to blockchain transaction ordering requirements, uploads are processed sequentially. To maximize efficiency: - -```typescript -// Efficient: Start all uploads without await - they'll be batched automatically -const uploads = [] -for (const data of dataArray) { - uploads.push(context.upload(data)) // No await here -} -const results = await Promise.all(uploads) - -// Less efficient: Awaiting each upload forces sequential processing -for (const data of dataArray) { - await context.upload(data) // Each waits for the previous to complete -} -``` - -The SDK batches up to 32 uploads by default (configurable via `uploadBatchSize`). If you have more than 32 files, they'll be processed in multiple batches automatically. - -##### Removing Data - -To delete an entire data set and discontinue payments for the service, call `context.terminate()`. -This method submits an on-chain transaction to initiate the termination process. Following a defined termination period, payments will cease, and the service provider will be able to delete the data set. - -You can also terminate a data set using `synapse.storage.terminateDataSet(dataSetId)`, in a case that creation of the context is not possible or `dataSetId` is known and creation of the context is not necessary. - -**Important:** Data set termination is irreversible and cannot be canceled once initiated. - -Deletion of individual pieces is not supported at this time but is on the roadmap. - -### Storage Information - -Get comprehensive information about the storage service: - -```typescript -// Get storage service info including pricing and providers -const info = await synapse.getStorageInfo() -console.log('Price per TiB/month:', info.pricing.noCDN.perTiBPerMonth) -console.log('Available providers:', info.providers.length) -console.log('Network:', info.serviceParameters.network) - -// Get details about a specific provider -const providerInfo = await synapse.getProviderInfo('0x...') -console.log('Provider PDP URL:', providerInfo.pdpUrl) -``` - -### Download Options - -The SDK provides flexible download options with clear semantics: - -#### SP-Agnostic Download (from anywhere) - -Download pieces from any available provider using the StorageManager: - -```typescript -// Download from any provider that has the piece -const data = await synapse.storage.download(pieceCid) - -// Download with CDN optimization (if available) -const dataWithCDN = await synapse.storage.download(pieceCid, { withCDN: true }) - -// Prefer a specific provider (falls back to others if unavailable) -const dataFromProvider = await synapse.storage.download(pieceCid, { - providerAddress: '0x...' -}) -``` - -#### Context-Specific Download (from this provider) - -When using a StorageContext, downloads are automatically restricted to that specific provider: - -```typescript -// Downloads from the provider associated with this context -const context = await synapse.storage.createContext({ providerAddress: '0x...' }) -const data = await context.download(pieceCid) - -// The context passes its withCDN setting to the download -const contextWithCDN = await synapse.storage.createContext({ withCDN: true }) -const dataWithCDN = await contextWithCDN.download(pieceCid) // Uses CDN if available -``` - -#### CDN Option Inheritance - -The `withCDN` option (which is an alias for `metadata: { withCDN: '' }`) follows a clear inheritance hierarchy: - -1. **Synapse level**: Default setting for all operations -2. **StorageContext level**: Can override Synapse's default -3. **Method level**: Can override instance settings - -```typescript -// Example of inheritance -const synapse = await Synapse.create({ withCDN: true }) // Global default: CDN enabled -const context = await synapse.storage.createContext({ withCDN: false }) // Context override: CDN disabled -await synapse.storage.download(pieceCid) // Uses Synapse's withCDN: true -await context.download(pieceCid) // Uses context's withCDN: false -await synapse.storage.download(pieceCid, { withCDN: false }) // Method override: CDN disabled -``` - -Note: When `withCDN: true` is set, it adds `{ withCDN: '' }` to the data set's metadata, ensuring CDN-enabled and non-CDN data sets remain separate. diff --git a/docs/src/content/docs/guides/telemetry.md b/docs/src/content/docs/guides/telemetry.md deleted file mode 100644 index 51bfac2ff..000000000 --- a/docs/src/content/docs/guides/telemetry.md +++ /dev/null @@ -1,58 +0,0 @@ ---- -title: Telemetry -description: Notes about the telemetry functionality that is within Synapse. ---- - -To help maintainers validate functionality and iron out problems throughout the whole Filecoin Onchain Cloud stack, starting from the SDK, telemetry is **temporarily enabled by default for the calibration network** in Synapse. We are currently leveraging sentry.io as discussed in [issue #328](https://github.com/FilOzone/synapse-sdk/issues/328). - -### How to disable telemetry - -There are multiple ways to disable Synapse telemetry: - -1) Via Synapse Config: -```ts -const synapse = await Synapse.create({ - /* ...existing options... */ - telemetry : { sentryInitOptions : { enabled: false } }, -}) -``` - -2) Set the environment variable `SYNAPSE_TELEMETRY_DISABLED=true` before instantiating Synapse. - -3) Set `globalThis.SYNAPSE_TELEMETRY_DISABLED=true` before instantiating Synapse. - -### What is being collected and why - -All HTTP calls are being instrumented (except for static assets like JS, CSS, and images), even HTTP calls that originate from outside of Synapse. This was the quickest way to ensure we captured the information we are after. - -The primary information we are attempting to collect is HTTP request paths, response status codes, and request/response latencies to RPC providers and Service Providers (SPs). Non 200 responses or "slow" responses may indicate issues in Synapse or the backend SP software, or general operational issues with RPC providers or SPs. These are issues we want to be aware of so we can potentially fix or improve. - -We also capture general uncaught errors. This could be indicative of issues in Synapse, which we'd want to fix. - -We are not capturing: -- Personal identifiable information (PII). We explicitly [disable sending default PII to Sentry](https://docs.sentry.io/platforms/javascript/configuration/options/#sendDefaultPii). -- Metrics on static asset (e.g., CSS, JS, image) retrieval. - -(One can verify these claims in [telemetry/service.ts](https://github.com/FilOzone/synapse-sdk/blob/master/packages/synapse-sdk/src/telemetry/service.ts).) - -### Why is telemetry collecting happening a library like Synapse -Collecting telemetry through Synapse with [issue #328](https://github.com/FilOzone/synapse-sdk/issues/328) is done as short a term dev-resource efficient decision. In this season of focusing on stability, the goal is to capture request failures and other client-side errors as broadly and quickly as possible so we have an enumeration of the problems and their impact. By setting up telemetry at the Synapse layer, we can broadly get telemetry from some of the first consumers by default without requiring extra on them (e.g., filecoin-pin,filecoin-pin-website, synapse demo websites). This is a short term measure. - -### How long will Synapse collect telemetry -This broad telemetry at the library/SDK layer will be removed by GA (by end of November 2025). At that point, we'll do one or more of the following: -1. Reduce telemetry collecting to only be for calls originating from Synapse (not all HTTP calls), -2. Switch the default to opt-in vs. opt-out like it is currently. (Note that currently we only enable telemetry by default for the calibration network. We don't enable it by default for mainnet.) -3. Remove telemetry entirely out of Synapse, and instead require applications (e.g., filecoin-pin, filecoin-pin-website) to do their telemetry collecting. -The tracking issue for this cleanup is [issue #363](https://github.com/FilOzone/synapse-sdk/issues/363). - -### How to configure telemetry -Synapse consumers can pass in any [Sentry options](https://docs.sentry.io/platforms/javascript/configuration/options/) via `Synapse.create({telemetry : { sentryInitOptions : {...} },})`. - -Synapse default Sentry options are applied in [src/telemetry/service.ts] whenever not explicitly set by the user. - -Any explicit tags to add to all Sentry calls can be added with `Synapse.create({telemetry : { sentrySetTags : {...} },})`. - -One also has direct access to the Sentry instance that Synapse is using via `synapse.telemetry.sentry`, at which point any of the [Sentry APIs](https://docs.sentry.io/platforms/javascript/apis/) can be invoked. - -### Who has access to the telemetry data -Access is restricted to the Synapse maintainers and product/support personnel actively involved in the Filecoin Onchain Cloud who work with Synapse. \ No newline at end of file diff --git a/docs/src/content/docs/index.mdx b/docs/src/content/docs/index.mdx index 0acb96bc9..43963e1fb 100644 --- a/docs/src/content/docs/index.mdx +++ b/docs/src/content/docs/index.mdx @@ -11,7 +11,7 @@ hero: light: ../../assets/globe-better-light.png actions: - text: Get started - link: getting-started/ + link: /getting-started/ icon: right-arrow - text: Star on GitHub link: https://github.com/FilOzone/synapse-sdk @@ -29,20 +29,16 @@ Ready to integrate Filecoin Onchain Cloud into your application? - Learn about the Filecoin Onchain Cloud architecture and how it works. [Learn - More →](/introduction/about/) + Learn about the Filecoin Onchain Cloud architecture and how it works. [Learn More →](/introduction/about/) - Get up and running in minutes with our step-by-step tutorial. [Start - Building →](/getting-started/) + Get up and running in minutes with our step-by-step tutorial. [Start Building →](/getting-started/) - Understand the architecture and key protocols behind FOC. [Learn More - →](/core-concepts/architecture/) + Understand the architecture and key protocols behind FOC. [Learn More →](/core-concepts/architecture/) - Comprehensive guides for storage, payments, and monitoring. [Explore Guides - →](/developer-guides/) + Comprehensive guides for storage, payments, and monitoring. [Explore Guides →](/developer-guides/synapse/) diff --git a/docs/src/content/docs/introduction/about.mdx b/docs/src/content/docs/introduction/about.mdx index 0c99b799c..4eeb375dc 100644 --- a/docs/src/content/docs/introduction/about.mdx +++ b/docs/src/content/docs/introduction/about.mdx @@ -9,7 +9,7 @@ Since its launch, [**Filecoin Network**](https://filecoin.io/) has been the back [**Filecoin Onchain Cloud (FOC)**](https://www.filecoin.cloud/) addresses this next-generation demand by reimagining storage as a **programmable cloud service layer** — where each capability (storage, retrieval, billing, verification) exists as a composable onchain module. These modules can be combined, forked, or extended, giving builders the flexibility to create customized decentralized applications and data-driven services. -Built on the **Filecoin Virtual Machine (FVM)** and powered by a distributed network of verifiable storage providers, Filecoin Onchain Cloud transforms the Filecoin Network from a large-scale cold storage layer into a programmable, service-based data infrastructure. +Built on the **Filecoin Virtual Machine (FVM)** and powered by a distributed network of verifiable storage providers, Filecoin Onchain Cloud transforms the Filecoin Network from a large-scale cold storage layer into a programmable, service-based data infrastructure. Ready to build? [Get started with the Synapse SDK →](/getting-started/) @@ -46,10 +46,12 @@ The Filecoin Onchain Cloud offers foundational components that together create a Each of these components works independently yet integrates seamlessly through onchain smart contracts and the [**Synapse SDK**](https://github.com/FilOzone/synapse-sdk), forming a cohesive, modular system. Together, they deliver the core properties of a next-generation decentralized cloud: verifiability, programmability, and composability. ### Key Products + The **Filecoin Onchain Cloud** is not just an infrastructure layer — it’s a platform for innovation. By exposing verifiable storage, retrieval, and payment capabilities as onchain services, FOC empowers developers to build entirely new products and workflows to expand the functionality of the Onchain Cloud and bring Filecoin closer to becoming a **modular, decentralized cloud** ecosystem. #### Filecoin Pin + **Filecoin** Pin is a fully decentralized persistence layer for IPFS content using the global network of Filecoin storage providers with cryptographic guarantees. It brings **IPFS's filesystem structure to Filecoin Onchain Cloud** for developers building on IPFS who need trustless, economically-incentivized persistence for their content. [Learn more about Filecoin Pin →](https://docs.filecoin.io/builder-cookbook/filecoin-pin) @@ -62,9 +64,9 @@ The **Filecoin Onchain Cloud (FOC)** offers transparent and verifiable pricing f All payments are settled automatically through Filecoin Pay, ensuring on-chain accountability and proof-linked billing. | **Service** | **Pricing** | **Description** | -|-------|---------|--------------------| +| ------- | --------- | -------------------- | | **Base Storage & Retrival** | **$2.5/TiB/month/copy** (minimum 2 copies) | Redundant storage ensures durability and continuous PDP verification.
Minimum monthly charge of **0.06 USDFC**, covering roughly **24 GiB** of total data (two copies). | -| **Add-on Retrieval (Filecoin Beam)** | **Up to $0.014/GiB egress** | Charged for outbound data delivered via Filecoin Beam gateways or retrieval providers. | +| **Add-on Retrieval (Filecoin Beam)** | **Up to $0.014/GiB egress** | Charged for outbound data delivered via Filecoin Beam gateways or retrieval providers. | :::note[Verifiable Payment] All storage and retrieval charges are denominated in **USDFC** (or supported ERC-20 tokens) and settled via **Filecoin Pay**. Every transaction is on-chain, auditable, and linked to verifiable service proofs. diff --git a/docs/src/content/docs/introduction/why.mdx b/docs/src/content/docs/introduction/why.mdx index fd7673065..f667a6037 100644 --- a/docs/src/content/docs/introduction/why.mdx +++ b/docs/src/content/docs/introduction/why.mdx @@ -18,6 +18,7 @@ Traditional cloud storage solutions present several challenges: Traditional cloud services offer performance and usability but lack transparency, auditability, and user ownership. **Filecoin Onchain Cloud** bridges this gap by introducing a **trustless execution layer** for decentralized services. Each service in the Onchain Cloud — from storage to payment — operates as a smart contract, ensuring that every action, transaction, and proof is **verifiable on the Filecoin blockchain**. FOC is designed to: + - ✅ **Enable hot and verifiable data storage and retrieval** with Proof of Data Possession (PDP) & Filecoin Beam - 🧩 **Simplify developer integration** through the Synapse SDK — a TypeScript library for building Filecoin-native dApps - 💰 **Power programmable payments** via Filecoin Pay — a token-agnostic payment rail for decentralized services @@ -27,7 +28,7 @@ FOC is designed to: ### Key Differentiators | Feature | Traditional Cloud | Filecoin Onchain Cloud | -|---------|------------------|------------------------| +| --------- | ------------------ | ------------------------ | | Data Control | Provider controlled | User owned | | Transparency | Limited | Full blockchain verification | | Programmability | Provider APIs | Smart contracts | diff --git a/docs/src/content/docs/resources/additional-resources.md b/docs/src/content/docs/resources/additional-resources.md new file mode 100644 index 000000000..5c04df2b8 --- /dev/null +++ b/docs/src/content/docs/resources/additional-resources.md @@ -0,0 +1,57 @@ +--- +title: Additional Resources +description: Resources for Filecoin Onchain Cloud. +sidebar: + order: 2 +--- + +This section contains additional resources for Filecoin Onchain Cloud. + +## Getting USDFC Tokens + +- Mainnet + - Bridge/Swap any token to USDFC using the [Squid Router](https://app.squidrouter.com) + - Deposit FIL as collateral to get USDFC using the [USDFC Website](https://app.usdfc.net) +- Calibration Testnet + - Get tUSDFC tokens using the [Calibration Testnet tUSDFC Faucet](https://forest-explorer.chainsafe.dev/faucet/calibnet_usdfc) + - Deposit tFIL as collateral to get tUSDFC using the [USDFC Website](https://app.usdfc.net) + +## Filecoin Onchain Cloud Explorers + +### PDP Explorer + +Track proofs submitted to the PDP contract. + +- **Mainnet**: +- **Calibration Testnet**: + +### Filecoin Pay Explorer + +Track your payment rails and payments. + +- **Mainnet**: +- **Calibration Testnet**: + +### Storage Providers DealBot + +Track storage providers health and reliability. + +- **Mainnet**: +- **Calibration Testnet**: + +## Network Information + +- Mainnet + - **Chain ID**: 314 + - **RPC URLs List**: [https://chainlist.org/chain/314](https://chainlist.org/chain/314) + - **Block Explorers**: + - [https://filecoin.blockscout.com](https://filecoin.blockscout.com) + - [https://filfox.info](https://filfox.info) + - [https://beryx.io](https://beryx.io) +- Calibration Testnet + - Chain ID**: 314159 + - **RPC URLs List**: [https://chainlist.org/chain/314159](https://chainlist.org/chain/314159) + - **Block Explorers**: + - [https://filecoin-testnet.blockscout.com](https://filecoin-testnet.blockscout.com) + - [https://calibration.filfox.info](https://calibration.filfox.info) + - [https://beryx.io](https://beryx.io) diff --git a/docs/src/content/docs/resources/additional-resources.mdx b/docs/src/content/docs/resources/additional-resources.mdx deleted file mode 100644 index c81c0c36c..000000000 --- a/docs/src/content/docs/resources/additional-resources.mdx +++ /dev/null @@ -1,67 +0,0 @@ ---- -title: Additional Resources -description: Resources for Filecoin Onchain Cloud. -sidebar: - order: 2 ---- - -This section contains additional resources for Filecoin Onchain Cloud. - -## Getting USDFC Tokens - -### Filecoin Mainnet - -**Bridge/Swap any token to USDFC** using the [Squid Router](https://app.squidrouter.com/) - -**Deposit FIL as collateral to get USDFC** using the [USDFC Website](https://app.usdfc.net/#/) - -### Calibration Testnet - -**Get tUSDFC tokens** using the [Calibration Testnet tUSDFC Faucet](https://forest-explorer.chainsafe.dev/faucet/calibnet_usdfc) - -**Deposit tFIL as collateral to get tUSDFC** using the [USDFC Website](https://app.usdfc.net/#/) - -## Filecoin Onchain Cloud Explorers - -### PDP Explorer - -Track proofs submitted to the PDP contract. - -- **Filecoin Mainnet**: https://pdp.vxb.ai/mainnet -- **Calibration Testnet**: https://pdp.vxb.ai/calibration - -### Filecoin Pay Explorer - -Track your payment rails and payments. - -- **Filecoin Mainnet**: https://pay.filecoin.services/ -- **Calibration Testnet**: https://staging-pay.filecoin.services/ - ---- - -### Storage Providers DealBot - -Track storage providers health and reliability. - -- **Filecoin Mainnet**: https://dealbot.fwss.io/ -- **Calibration Testnet**: https://dealbot-staging.fwss.io - -## Network Information - -### Calibration Testnet - -- **Chain ID**: 314159 -- **RPC URLs List**: https://chainlist.org/chain/314159 -- **Block Explorers**: - - https://filecoin-testnet.blockscout.com - - https://calibration.filfox.info - - https://beryx.io/ - -### Mainnet - -- **Chain ID**: 314 -- **RPC URLs List**: https://chainlist.org/chain/314 -- **Block Explorers**: - - https://filecoin.blockscout.com - - https://filfox.info - - https://beryx.io/ diff --git a/docs/src/content/docs/resources/contracts.md b/docs/src/content/docs/resources/contracts.md new file mode 100644 index 000000000..1f7693b7f --- /dev/null +++ b/docs/src/content/docs/resources/contracts.md @@ -0,0 +1,34 @@ +--- +title: Contract Addresses +description: Official smart contract addresses for Filecoin Onchain Cloud services +sidebar: + order: 1 +--- + +This section contains the deployed smart contract addresses for Filecoin Onchain Cloud services. + +## Mainnet + +| Contract | Address | Explorer | +| -------- | ------- | --------- | +| Multicall3 | `0xcA11bde05977b3631167028862bE2a173976CA11` | [View](https://filecoin.blockscout.com/address/0xcA11bde05977b3631167028862bE2a173976CA11) | +| Warm Storage Service | `0x8408502033C418E1bbC97cE9ac48E5528F371A9f` | [View](https://filecoin.blockscout.com/address/0x8408502033C418E1bbC97cE9ac48E5528F371A9f) | +| PDPVerifier | `0xBADd0B92C1c71d02E7d520f64c0876538fa2557F` | [View](https://filecoin.blockscout.com/address/0xBADd0B92C1c71d02E7d520f64c0876538fa2557F) | +| Filecoin Pay | `0x23b1e018F08BB982348b15a86ee926eEBf7F4DAa` | [View](https://filecoin.blockscout.com/address/0x23b1e018F08BB982348b15a86ee926eEBf7F4DAa) | +| USDFC Token | `0x80B98d3aa09ffff255c3ba4A241111Ff1262F045` | [View](https://filecoin.blockscout.com/address/0x80B98d3aa09ffff255c3ba4A241111Ff1262F045) | +| Warm Storage Service StateView | `0x9e4e6699d8F67dFc883d6b0A7344Bd56F7E80B46` | [View](https://filecoin.blockscout.com/address/0x9e4e6699d8F67dFc883d6b0A7344Bd56F7E80B46) | +| Service Provider Registry | `0xf55dDbf63F1b55c3F1D4FA7e339a68AB7b64A5eB` | [View](https://filecoin.blockscout.com/address/0xf55dDbf63F1b55c3F1D4FA7e339a68AB7b64A5eB) | +| Session Key Registry | `0x74FD50525A958aF5d484601E252271f9625231aB` | [View](https://filecoin.blockscout.com/address/0x74FD50525A958aF5d484601E252271f9625231aB) | + +## Calibration Testnet + +| Contract | Address | Explorer | +| -------- | ------- | -------- | +| Multicall3 | `0xcA11bde05977b3631167028862bE2a173976CA11` | [View](https://filecoin-testnet.blockscout.com/address/0xcA11bde05977b3631167028862bE2a173976CA11) | +| Warm Storage Service | `0x02925630df557F957f70E112bA06e50965417CA0` | [View](https://filecoin-testnet.blockscout.com/address/0x02925630df557F957f70E112bA06e50965417CA0) | +| PDPVerifier | `0x85e366Cf9DD2c0aE37E963d9556F5f4718d6417C` | [View](https://filecoin-testnet.blockscout.com/address/0x85e366Cf9DD2c0aE37E963d9556F5f4718d6417C) | +| Filecoin Pay | `0x09a0fDc2723fAd1A7b8e3e00eE5DF73841df55a0` | [View](https://filecoin-testnet.blockscout.com/address/0x09a0fDc2723fAd1A7b8e3e00eE5DF73841df55a0) | +| USDFC Token | `0xb3042734b608a1B16e9e86B374A3f3e389B4cDf0` | [View](https://filecoin-testnet.blockscout.com/address/0xb3042734b608a1B16e9e86B374A3f3e389B4cDf0) | +| Warm Storage Service StateView | `0xA5D87b04086B1d591026cCE10255351B5AA4689B` | [View](https://filecoin-testnet.blockscout.com/address/0xA5D87b04086B1d591026cCE10255351B5AA4689B) | +| Service Provider Registry | `0x839e5c9988e4e9977d40708d0094103c0839Ac9D` | [View](https://filecoin-testnet.blockscout.com/address/0x839e5c9988e4e9977d40708d0094103c0839Ac9D) | +| Session Key Registry | `0x518411c2062E119Aaf7A8B12A2eDf9a939347655` | [View](https://filecoin-testnet.blockscout.com/address/0x518411c2062E119Aaf7A8B12A2eDf9a939347655) | diff --git a/docs/src/content/docs/resources/contracts.mdx b/docs/src/content/docs/resources/contracts.mdx deleted file mode 100644 index fdfaa9cef..000000000 --- a/docs/src/content/docs/resources/contracts.mdx +++ /dev/null @@ -1,26 +0,0 @@ ---- -title: Contract Addresses -description: Official smart contract addresses for Filecoin Onchain Cloud services -sidebar: - order: 1 ---- - -This section contains the deployed smart contract addresses for Filecoin Onchain Cloud services. - -### Mainnet - -| Contract | Address | Explorer | -| -------------------- | -------------------------------------------- | -------------------------------------------------------------------------------------------------- | -| Warm Storage Service | `0x8408502033C418E1bbC97cE9ac48E5528F371A9f` | [View](https://filecoin.blockscout.com/address/0x8408502033C418E1bbC97cE9ac48E5528F371A9f) | -| PDPVerifier | `0xBADd0B92C1c71d02E7d520f64c0876538fa2557F` | [View](https://filecoin.blockscout.com/address/0xBADd0B92C1c71d02E7d520f64c0876538fa2557F) | -| Filecoin Pay | `0x23b1e018F08BB982348b15a86ee926eEBf7F4DAa` | [View](https://filecoin.blockscout.com/address/0x23b1e018F08BB982348b15a86ee926eEBf7F4DAa) | -| USDFC Token | `0x80B98d3aa09ffff255c3ba4A241111Ff1262F045` | [View](https://filecoin.blockscout.com/address/0x80B98d3aa09ffff255c3ba4A241111Ff1262F045) | - -### Calibration Testnet - -| Contract | Address | Explorer | -| -------------------- | -------------------------------------------- | -------------------------------------------------------------------------------------------------- | -| Warm Storage Service | `0x02925630df557F957f70E112bA06e50965417CA0` | [View](https://filecoin-testnet.blockscout.com/address/0x02925630df557F957f70E112bA06e50965417CA0) | -| PDPVerifier | `0x85e366Cf9DD2c0aE37E963d9556F5f4718d6417C` | [View](https://filecoin-testnet.blockscout.com/address/0x85e366Cf9DD2c0aE37E963d9556F5f4718d6417C) | -| Filecoin Pay | `0x09a0fDc2723fAd1A7b8e3e00eE5DF73841df55a0` | [View](https://filecoin-testnet.blockscout.com/address/0x09a0fDc2723fAd1A7b8e3e00eE5DF73841df55a0) | -| USDFC Token | `0xb3042734b608a1B16e9e86B374A3f3e389B4cDf0` | [View](https://filecoin-testnet.blockscout.com/address/0xb3042734b608a1B16e9e86B374A3f3e389B4cDf0) | diff --git a/docs/tsconfig.json b/docs/tsconfig.json index 9d1f82df1..6c1ba0a96 100644 --- a/docs/tsconfig.json +++ b/docs/tsconfig.json @@ -4,7 +4,7 @@ "exclude": ["dist"], "references": [ { - "path": "../packages/synapse-sdk" + "path": "../packages/synapse-sdk/tsconfig.json" } ] } diff --git a/examples/cli/biome.json b/examples/cli/biome.json index 645b1cb4c..7e2c5ead1 100644 --- a/examples/cli/biome.json +++ b/examples/cli/biome.json @@ -1,6 +1,6 @@ { "root": false, - "$schema": "https://biomejs.dev/schemas/2.3.5/schema.json", + "$schema": "./node_modules/@biomejs/biome/configuration_schema.json", "files": { "ignoreUnknown": true }, diff --git a/examples/cli/package.json b/examples/cli/package.json index 07a4466cd..7ae7bffa1 100644 --- a/examples/cli/package.json +++ b/examples/cli/package.json @@ -4,9 +4,9 @@ "description": "CLI for Synapse", "type": "module", "private": true, - "main": "index.js", + "main": "src/index.ts", "scripts": { - "lint": "biome check --no-errors-on-unmatched .", + "lint": "tsc && biome check --no-errors-on-unmatched .", "test": "echo \"Error: no test specified\" && exit 1" }, "keywords": [], @@ -18,10 +18,10 @@ "@filoz/synapse-sdk": "workspace:^", "cleye": "^2.0.0", "conf": "^15.0.2", - "viem": "^2.38.4" + "viem": "catalog:" }, "devDependencies": { - "@biomejs/biome": "2.3.7", - "@types/node": "^24.9.1" + "@biomejs/biome": "catalog:", + "@types/node": "catalog:" } } diff --git a/examples/cli/src/client.ts b/examples/cli/src/client.ts new file mode 100644 index 000000000..11dec1758 --- /dev/null +++ b/examples/cli/src/client.ts @@ -0,0 +1,35 @@ +import * as p from '@clack/prompts' +import { getChain } from '@filoz/synapse-core/chains' +import { createPublicClient, createWalletClient, type Hex, http } from 'viem' +import { privateKeyToAccount } from 'viem/accounts' +import config from './config.ts' + +export function privateKeyClient(chainId: number) { + const chain = getChain(chainId) + const privateKey = config.get('privateKey') + if (!privateKey) { + p.log.error('Private key not found') + p.outro('Please run `synapse init` to initialize the CLI') + process.exit(1) + } + const account = privateKeyToAccount(privateKey as Hex) + const client = createWalletClient({ + account, + chain, + transport: http(), + }) + return { + client, + privateKey: privateKey as Hex, + rpcURL: chain.rpcUrls.default.http[0], + } +} + +export function publicClient(chainId: number) { + const chain = getChain(chainId) + const publicClient = createPublicClient({ + chain, + transport: http(), + }) + return publicClient +} diff --git a/examples/cli/src/commands/dataset-terminate.ts b/examples/cli/src/commands/dataset-terminate.ts index 67d40cbb2..cf6b2854d 100644 --- a/examples/cli/src/commands/dataset-terminate.ts +++ b/examples/cli/src/commands/dataset-terminate.ts @@ -1,46 +1,30 @@ import * as p from '@clack/prompts' -import { calibration } from '@filoz/synapse-core/chains' import { getDataSets, terminateDataSet } from '@filoz/synapse-core/warm-storage' import { type Command, command } from 'cleye' -import { createPublicClient, createWalletClient, type Hex, http } from 'viem' -import { privateKeyToAccount } from 'viem/accounts' import { waitForTransactionReceipt } from 'viem/actions' -import config from '../config.ts' - -const publicClient = createPublicClient({ - chain: calibration, - transport: http(), -}) +import { privateKeyClient } from '../client.ts' +import { globalFlags } from '../flags.ts' export const datasetTerminate: Command = command( { name: 'dataset-terminate', description: 'Terminate a data set', alias: 'dt', + flags: { + ...globalFlags, + }, help: { description: 'Terminate a data set', }, }, - async (_argv) => { - const privateKey = config.get('privateKey') - if (!privateKey) { - p.log.error('Private key not found') - p.outro('Please run `synapse init` to initialize the CLI') - return - } - - const account = privateKeyToAccount(privateKey as Hex) - const client = createWalletClient({ - account, - chain: calibration, - transport: http(), - }) + async (argv) => { + const { client } = privateKeyClient(argv.flags.chain) const spinner = p.spinner() spinner.start(`Fetching data sets...`) try { - const dataSets = await getDataSets(publicClient, { - address: account.address, + const dataSets = await getDataSets(client, { + address: client.account.address, }) spinner.stop(`Fetching data sets complete`) @@ -71,7 +55,7 @@ export const datasetTerminate: Command = command( }) spinner.message(`Waiting for transaction to be mined...`) - await waitForTransactionReceipt(publicClient, { + await waitForTransactionReceipt(client, { hash: tx, }) diff --git a/examples/cli/src/commands/datasets.ts b/examples/cli/src/commands/datasets.ts index acc2596ac..f7c9ba40e 100644 --- a/examples/cli/src/commands/datasets.ts +++ b/examples/cli/src/commands/datasets.ts @@ -1,41 +1,31 @@ import * as p from '@clack/prompts' -import { calibration } from '@filoz/synapse-core/chains' import { getDataSets } from '@filoz/synapse-core/warm-storage' import { type Command, command } from 'cleye' -import { createPublicClient, type Hex, http } from 'viem' -import { privateKeyToAccount } from 'viem/accounts' -import config from '../config.ts' - -const publicClient = createPublicClient({ - chain: calibration, - transport: http(), -}) +import { privateKeyClient } from '../client.ts' +import { globalFlags } from '../flags.ts' export const datasets: Command = command( { name: 'datasets', description: 'List all data sets', alias: 'ds', + flags: { + ...globalFlags, + }, help: { description: 'List all data sets', examples: ['synapse datasets', 'synapse datasets --help'], }, }, - async (_argv) => { - const privateKey = config.get('privateKey') - if (!privateKey) { - p.log.error('Private key not found') - p.outro('Please run `synapse init` to initialize the CLI') - return - } + async (argv) => { + const { client } = privateKeyClient(argv.flags.chain) - const account = privateKeyToAccount(privateKey as Hex) const spinner = p.spinner() spinner.start('Listing data sets...') try { - const dataSets = await getDataSets(publicClient, { - address: account.address, + const dataSets = await getDataSets(client, { + address: client.account.address, }) spinner.stop('Data sets:') dataSets.forEach(async (dataSet) => { diff --git a/examples/cli/src/commands/deposit.ts b/examples/cli/src/commands/deposit.ts index ed4273be6..2fe7e130a 100644 --- a/examples/cli/src/commands/deposit.ts +++ b/examples/cli/src/commands/deposit.ts @@ -1,45 +1,26 @@ import * as p from '@clack/prompts' -import { calibration } from '@filoz/synapse-core/chains' import { depositAndApprove } from '@filoz/synapse-core/pay' import { type Command, command } from 'cleye' -import { - createPublicClient, - createWalletClient, - type Hex, - http, - parseEther, -} from 'viem' -import { privateKeyToAccount } from 'viem/accounts' +import { parseEther } from 'viem' import { waitForTransactionReceipt } from 'viem/actions' -import config from '../config.ts' - -const publicClient = createPublicClient({ - chain: calibration, - transport: http(), -}) +import { privateKeyClient } from '../client.ts' +import { globalFlags } from '../flags.ts' export const deposit: Command = command( { name: 'deposit', description: 'Deposit funds to the wallet', alias: 'd', + flags: { + ...globalFlags, + }, help: { description: 'Deposit funds to the wallet', examples: ['synapse deposit', 'synapse deposit --help'], }, }, - async (_argv) => { - const privateKey = config.get('privateKey') - if (!privateKey) { - p.log.error('Private key not found') - p.outro('Please run `synapse init` to initialize the CLI') - return - } - const client = createWalletClient({ - account: privateKeyToAccount(privateKey as Hex), - chain: calibration, - transport: http(), - }) + async (argv) => { + const { client } = privateKeyClient(argv.flags.chain) const spinner = p.spinner() const value = await p.text({ @@ -59,7 +40,7 @@ export const deposit: Command = command( spinner.message('Waiting for transaction to be mined...') - await waitForTransactionReceipt(publicClient, { + await waitForTransactionReceipt(client, { hash, }) diff --git a/examples/cli/src/commands/fund.ts b/examples/cli/src/commands/fund.ts index 04ec24dc5..481ee4c44 100644 --- a/examples/cli/src/commands/fund.ts +++ b/examples/cli/src/commands/fund.ts @@ -1,48 +1,38 @@ import * as p from '@clack/prompts' -import { calibration } from '@filoz/synapse-core/chains' import { claimTokens, formatBalance } from '@filoz/synapse-core/utils' -import { RPC_URLS, Synapse } from '@filoz/synapse-sdk' +import { Synapse } from '@filoz/synapse-sdk' import { type Command, command } from 'cleye' -import { createPublicClient, type Hex, http } from 'viem' -import { privateKeyToAccount } from 'viem/accounts' import { waitForTransactionReceipt } from 'viem/actions' -import config from '../config.ts' - -const publicClient = createPublicClient({ - chain: calibration, - transport: http(), -}) +import { privateKeyClient } from '../client.ts' +import { globalFlags } from '../flags.ts' export const fund: Command = command( { name: 'fund', description: 'Fund the wallet', alias: 'f', + flags: { + ...globalFlags, + }, }, - async (_argv) => { - const privateKey = config.get('privateKey') - if (!privateKey) { - p.log.error('Private key not found') - p.outro('Please run `synapse init` to initialize the CLI') - return - } + async (argv) => { + const { client, privateKey, rpcURL } = privateKeyClient(argv.flags.chain) p.intro('Funding wallet...') const spinner = p.spinner() - const account = privateKeyToAccount(privateKey as Hex) spinner.start('Requesting faucets...') try { - const hashes = await claimTokens({ address: account.address }) + const hashes = await claimTokens({ address: client.account.address }) spinner.message(`Waiting for transactions to be mined...`) - await waitForTransactionReceipt(publicClient, { + await waitForTransactionReceipt(client, { hash: hashes[0].tx_hash, }) const synapse = await Synapse.create({ - privateKey: privateKey as Hex, - rpcURL: RPC_URLS.calibration.http, // Use calibration testnet for testing + privateKey, + rpcURL, }) spinner.stop('Balances') diff --git a/examples/cli/src/commands/pay.ts b/examples/cli/src/commands/pay.ts index 5418e3158..53d293eaf 100644 --- a/examples/cli/src/commands/pay.ts +++ b/examples/cli/src/commands/pay.ts @@ -1,35 +1,33 @@ import * as p from '@clack/prompts' import { formatBalance } from '@filoz/synapse-core/utils' -import { RPC_URLS, Synapse } from '@filoz/synapse-sdk' +import { Synapse } from '@filoz/synapse-sdk' import { type Command, command } from 'cleye' -import type { Hex } from 'viem' -import config from '../config.ts' +import { privateKeyClient } from '../client.ts' +import { globalFlags } from '../flags.ts' export const pay: Command = command( { name: 'pay', description: 'Check wallet balances', alias: 'p', + flags: { + ...globalFlags, + }, help: { description: 'Check wallet balances', examples: ['synapse pay', 'synapse pay --help'], }, }, - async (_argv) => { - const privateKey = config.get('privateKey') - if (!privateKey) { - p.log.error('Private key not found') - p.outro('Please run `synapse init` to initialize the CLI') - return - } + async (argv) => { + const { privateKey, rpcURL } = privateKeyClient(argv.flags.chain) const spinner = p.spinner() spinner.start('Checking wallet balance...') try { const synapse = await Synapse.create({ - privateKey: privateKey as Hex, - rpcURL: RPC_URLS.calibration.http, // Use calibration testnet for testing + privateKey, + rpcURL, }) const filBalance = await synapse.payments.walletBalance() diff --git a/examples/cli/src/commands/pieces.ts b/examples/cli/src/commands/pieces.ts index 9dd3e4a97..d524ea600 100644 --- a/examples/cli/src/commands/pieces.ts +++ b/examples/cli/src/commands/pieces.ts @@ -9,9 +9,9 @@ import { import { RPC_URLS, Synapse } from '@filoz/synapse-sdk' import { type Command, command } from 'cleye' import { createPublicClient, type Hex, http, stringify } from 'viem' -import { privateKeyToAccount } from 'viem/accounts' import { readContract, waitForTransactionReceipt } from 'viem/actions' -import config from '../config.ts' +import { privateKeyClient } from '../client.ts' +import { globalFlags } from '../flags.ts' const publicClient = createPublicClient({ chain: calibration, @@ -23,26 +23,23 @@ export const pieces: Command = command( name: 'pieces', description: 'List all pieces', alias: 'ps', + flags: { + ...globalFlags, + }, help: { description: 'List all pieces', examples: ['synapse pieces', 'synapse pieces --help'], }, }, - async (_argv) => { - const privateKey = config.get('privateKey') - if (!privateKey) { - p.log.error('Private key not found') - p.outro('Please run `synapse init` to initialize the CLI') - return - } + async (argv) => { + const { client, privateKey } = privateKeyClient(argv.flags.chain) - const account = privateKeyToAccount(privateKey as Hex) const spinner = p.spinner() spinner.start('Fetching data sets...') try { - const dataSets = await getDataSets(publicClient, { - address: account.address, + const dataSets = await getDataSets(client, { + address: client.account.address, }) spinner.stop('Fetching data sets complete') let pieces: Piece[] = [] @@ -61,12 +58,12 @@ export const pieces: Command = command( }, pieceId: async ({ results }) => { const dataSetId = results.dataSetId - const rsp = await getPieces(publicClient, { + const rsp = await getPieces(client, { // biome-ignore lint/style/noNonNullAssertion: dataSetId is guaranteed to be found dataSet: dataSets.find( (dataSet) => dataSet.dataSetId === dataSetId )!, - address: account.address, + address: client.account.address, }) pieces = rsp.pieces if (rsp.pieces.length === 0) { diff --git a/examples/cli/src/commands/upload-dataset.ts b/examples/cli/src/commands/upload-dataset.ts index 06dfc7e72..1fd0df5a7 100644 --- a/examples/cli/src/commands/upload-dataset.ts +++ b/examples/cli/src/commands/upload-dataset.ts @@ -1,21 +1,15 @@ import { readFile } from 'node:fs/promises' import path from 'node:path' import * as p from '@clack/prompts' -import { calibration } from '@filoz/synapse-core/chains' +import * as Piece from '@filoz/synapse-core/piece' import * as SP from '@filoz/synapse-core/sp' import { createDataSetAndAddPieces, readProviders, } from '@filoz/synapse-core/warm-storage' import { type Command, command } from 'cleye' -import { createPublicClient, createWalletClient, type Hex, http } from 'viem' -import { privateKeyToAccount } from 'viem/accounts' -import config from '../config.ts' - -const publicClient = createPublicClient({ - chain: calibration, - transport: http(), -}) +import { privateKeyClient } from '../client.ts' +import { globalFlags } from '../flags.ts' export const uploadDataset: Command = command( { @@ -23,7 +17,8 @@ export const uploadDataset: Command = command( parameters: ['', ''], description: 'Upload a file to a new data set', flags: { - withCDN: { + ...globalFlags, + cdn: { type: Boolean, description: 'Enable CDN', default: false, @@ -34,19 +29,7 @@ export const uploadDataset: Command = command( }, }, async (argv) => { - const privateKey = config.get('privateKey') - if (!privateKey) { - p.log.error('Private key not found') - p.outro('Please run `synapse init` to initialize the CLI') - return - } - const account = privateKeyToAccount(privateKey as Hex) - const client = createWalletClient({ - account, - chain: calibration, - transport: http(), - }) - + const { client } = privateKeyClient(argv.flags.chain) const spinner = p.spinner() const filePath = argv._.requiredPath @@ -55,7 +38,7 @@ export const uploadDataset: Command = command( spinner.start(`Uploading file ${absolutePath}...`) try { - const providers = await readProviders(publicClient) + const providers = await readProviders(client) const provider = providers.find( (provider) => provider.id === BigInt(argv._.requiredProviderId) ) @@ -64,29 +47,33 @@ export const uploadDataset: Command = command( p.outro('Please try again') return } - const upload = await SP.uploadPiece({ + + const pieceCid = Piece.calculate(fileData) + await SP.uploadPiece({ data: fileData, endpoint: provider.pdp.serviceURL, + pieceCid, }) await SP.findPiece({ - pieceCid: upload.pieceCid, + pieceCid, endpoint: provider.pdp.serviceURL, }) const rsp = await createDataSetAndAddPieces(client, { - provider, - cdn: argv.flags.withCDN, + endpoint: provider.pdp.serviceURL, + payee: provider.payee, + cdn: argv.flags.cdn, pieces: [ { - pieceCid: upload.pieceCid, + pieceCid, metadata: { name: path.basename(absolutePath) }, }, ], }) await SP.pollForDataSetCreationStatus(rsp) - spinner.stop(`File uploaded ${upload.pieceCid}`) + spinner.stop(`File uploaded ${pieceCid}`) } catch (error) { spinner.stop() p.log.error((error as Error).message) diff --git a/examples/cli/src/commands/upload.ts b/examples/cli/src/commands/upload.ts index 8c92edbca..cb91d1779 100644 --- a/examples/cli/src/commands/upload.ts +++ b/examples/cli/src/commands/upload.ts @@ -1,10 +1,10 @@ import { readFile } from 'node:fs/promises' import path from 'node:path' import * as p from '@clack/prompts' -import { RPC_URLS, Synapse } from '@filoz/synapse-sdk' +import { Synapse } from '@filoz/synapse-sdk' import { type Command, command } from 'cleye' -import type { Hex } from 'viem' -import config from '../config.ts' +import { privateKeyClient } from '../client.ts' +import { globalFlags } from '../flags.ts' export const upload: Command = command( { @@ -13,6 +13,7 @@ export const upload: Command = command( description: 'Upload a file to the warm storage', alias: 'u', flags: { + ...globalFlags, forceCreateDataSet: { type: Boolean, description: 'Force create a new data set', @@ -34,12 +35,7 @@ export const upload: Command = command( }, }, async (argv) => { - const privateKey = config.get('privateKey') - if (!privateKey) { - p.log.error('Private key not found') - p.outro('Please run `synapse init` to initialize the CLI') - return - } + const { privateKey, rpcURL } = privateKeyClient(argv.flags.chain) const filePath = argv._.requiredPath const absolutePath = path.resolve(filePath) @@ -47,8 +43,8 @@ export const upload: Command = command( try { const synapse = await Synapse.create({ - privateKey: privateKey as Hex, - rpcURL: RPC_URLS.calibration.http, // Use calibration testnet for testing + privateKey, + rpcURL, }) p.log.step('Creating context...') @@ -70,11 +66,19 @@ export const upload: Command = command( metadata: { name: path.basename(absolutePath), }, - onPieceAdded(transactionHash) { - p.log.info(`Piece added, tx: ${transactionHash}`) + onPiecesAdded(transactionHash, pieces) { + p.log.info(`Pieces added in tx: ${transactionHash}`) + if (pieces?.length) { + p.log.info( + `PieceCIDs: ${pieces.map(({ pieceCid }) => pieceCid.toString()).join(', ')}` + ) + } }, - onPieceConfirmed(pieceIds) { - p.log.info(`Piece confirmed: ${pieceIds.join(', ')}`) + onPiecesConfirmed(dataSetId, pieces) { + p.log.info(`Data set ${dataSetId} confirmed`) + p.log.info( + `Piece IDs: ${pieces.map(({ pieceId }) => pieceId).join(', ')}` + ) }, onUploadComplete(pieceCid) { p.log.info(`Upload complete! PieceCID: ${pieceCid}`) diff --git a/examples/cli/src/flags.ts b/examples/cli/src/flags.ts new file mode 100644 index 000000000..9800a32db --- /dev/null +++ b/examples/cli/src/flags.ts @@ -0,0 +1,19 @@ +const possibleChains = [314159, 314] as const +type Chains = (typeof possibleChains)[number] + +const Chain = (chain: Chains) => { + if (!possibleChains.includes(chain)) { + throw new Error( + `Invalid chain: ${chain}. Must be one of: ${possibleChains.join(', ')}` + ) + } + return chain +} + +export const globalFlags = { + chain: { + type: Chain, + description: 'The chain to use. 314159 for calibration, 314 for mainnet', + default: 314159, + }, +} diff --git a/examples/cli/tsconfig.json b/examples/cli/tsconfig.json index 4f9b64b86..6991c4ca9 100644 --- a/examples/cli/tsconfig.json +++ b/examples/cli/tsconfig.json @@ -8,10 +8,10 @@ "exclude": ["node_modules", "dist"], "references": [ { - "path": "../../packages/synapse-sdk" + "path": "../../packages/synapse-sdk/tsconfig.json" }, { - "path": "../../packages/synapse-core" + "path": "../../packages/synapse-core/tsconfig.json" } ] } diff --git a/examples/script-tag/biome.json b/examples/script-tag/biome.json index 645b1cb4c..4c1be270b 100644 --- a/examples/script-tag/biome.json +++ b/examples/script-tag/biome.json @@ -1,6 +1,6 @@ { "root": false, - "$schema": "https://biomejs.dev/schemas/2.3.5/schema.json", + "$schema": "https://biomejs.dev/schemas/2.3.8/schema.json", "files": { "ignoreUnknown": true }, diff --git a/package.json b/package.json index e995bbf29..d7c6a077c 100644 --- a/package.json +++ b/package.json @@ -18,13 +18,15 @@ "check:markdown": "markdownlint-cli2 --config .github/.markdownlint-cli2.jsonc '**/*.md' '**/*.mdx'", "check:repo": "pnpx sherif@latest -r root-package-manager-field", "check:knip": "knip --config .github/knip.jsonc", - "clean": "rm -rf node_modules pnpm-lock.yaml package-lock.json packages/*/{.wireit,pnpm-lock.yaml,package-lock.json,coverage,.nyc_output,dist,node_modules} examples/*/{.wireit,pnpm-lock.yaml,package-lock.json,coverage,.nyc_output,dist,node_modules} docs/{.wireit,pnpm-lock.yaml,package-lock.json,coverage,.nyc_output,dist,node_modules,.astro} apps/*/{.wireit,pnpm-lock.yaml,package-lock.json,coverage,.nyc_output,dist,node_modules}" + "update:msw": "pnpm -r --filter='./packages/*' --if-present run update:msw", + "clean": "rm -rf node_modules pnpm-lock.yaml package-lock.json packages/*/{.wireit,pnpm-lock.yaml,package-lock.json,coverage,.nyc_output,dist,node_modules} examples/*/{.wireit,pnpm-lock.yaml,package-lock.json,coverage,.nyc_output,dist,node_modules} docs/{.wireit,pnpm-lock.yaml,package-lock.json,coverage,.nyc_output,dist,node_modules,.astro} apps/*/{.wireit,pnpm-lock.yaml,package-lock.json,coverage,.nyc_output,dist,node_modules,.wrangler}", + "clean:cache": "rm -rf packages/*/{.wireit,coverage,.nyc_output,dist} examples/*/{.wireit,coverage,.nyc_output,dist} docs/{.wireit,coverage,.nyc_output,dist,.astro} apps/*/{.wireit,coverage,.nyc_output,dist,.wrangler}" }, "devDependencies": { - "@biomejs/biome": "2.3.7", - "knip": "^5.69.1", - "markdownlint-cli2": "^0.19.0", - "typescript": "5.9.3", + "@biomejs/biome": "catalog:", + "knip": "^5.71.0", + "markdownlint-cli2": "^0.20.0", + "typescript": "catalog:", "wireit": "^0.14.12" }, "simple-git-hooks": { @@ -32,5 +34,13 @@ }, "engines": { "node": ">=22" + }, + "packageManager": "pnpm@10.26.0", + "devEngines": { + "runtime": { + "name": "node", + "version": "^24.8.0", + "onFail": "download" + } } } diff --git a/packages/synapse-core/CHANGELOG.md b/packages/synapse-core/CHANGELOG.md index b49e808df..7c754dca0 100644 --- a/packages/synapse-core/CHANGELOG.md +++ b/packages/synapse-core/CHANGELOG.md @@ -1,5 +1,19 @@ # Changelog +## [0.1.4](https://github.com/FilOzone/synapse-sdk/compare/synapse-core-v0.1.3...synapse-core-v0.1.4) (2025-12-02) + + +### Features + +* auctionPriceAt ([#454](https://github.com/FilOzone/synapse-sdk/issues/454)) ([b38d81f](https://github.com/FilOzone/synapse-sdk/commit/b38d81fb912c6388804ba917154be8e2d61151b3)) + + +### Chores + +* **deps-dev:** bump @biomejs/biome from 2.3.5 to 2.3.6 ([#448](https://github.com/FilOzone/synapse-sdk/issues/448)) ([ebcab4e](https://github.com/FilOzone/synapse-sdk/commit/ebcab4ea166aa69c35d988ff2356b3f5972af351)) +* **deps-dev:** bump @biomejs/biome from 2.3.6 to 2.3.7 ([#459](https://github.com/FilOzone/synapse-sdk/issues/459)) ([d3c65a8](https://github.com/FilOzone/synapse-sdk/commit/d3c65a806e4819bbc560f5a7087f79eec31417a5)) +* **deps-dev:** bump @biomejs/biome from 2.3.7 to 2.3.8 ([#476](https://github.com/FilOzone/synapse-sdk/issues/476)) ([d95f812](https://github.com/FilOzone/synapse-sdk/commit/d95f812d7752a9b1dcb46219a4857eb99b54ebf0)) + ## [0.1.3](https://github.com/FilOzone/synapse-sdk/compare/synapse-core-v0.1.2...synapse-core-v0.1.3) (2025-11-17) diff --git a/packages/synapse-core/package.json b/packages/synapse-core/package.json index 2aeaf4d3e..cf7641a5a 100644 --- a/packages/synapse-core/package.json +++ b/packages/synapse-core/package.json @@ -1,6 +1,6 @@ { "name": "@filoz/synapse-core", - "version": "0.1.3", + "version": "0.1.4", "description": "JavaScript Standard Library for Filecoin Onchain Cloud", "repository": { "type": "git", @@ -79,6 +79,10 @@ "./utils": { "types": "./dist/src/utils/index.d.ts", "default": "./dist/src/utils/index.js" + }, + "./mocks": { + "types": "./dist/src/mocks/index.d.ts", + "default": "./dist/src/mocks/index.js" } }, "typesVersions": { @@ -98,6 +102,9 @@ "abis": [ "./dist/src/abis/index" ], + "auction": [ + "./dist/src/auction/index" + ], "pay": [ "./dist/src/pay/index" ], @@ -115,6 +122,12 @@ ], "piece": [ "./dist/src/piece" + ], + "utils": [ + "./dist/src/utils/index" + ], + "mocks": [ + "./dist/src/mocks/index" ] } }, @@ -131,7 +144,8 @@ "test:node": "wireit", "test:browser": "wireit", "lint": "wireit", - "lint:fix": "biome check --no-errors-on-unmatched --files-ignore-unknown=true --fix ." + "lint:fix": "biome check --no-errors-on-unmatched --files-ignore-unknown=true --fix .", + "update:msw": "pnpx msw init test/mocks/ --save" }, "wireit": { "build": { @@ -187,36 +201,40 @@ } }, "dependencies": { + "@hugomrdias/filsnap-adapter": "^3.3.8", "@web3-storage/data-segment": "^5.3.0", "dnum": "^2.15.0", - "@hugomrdias/filsnap-adapter": "^3.3.8", - "iso-web": "^1.4.3", + "iso-web": "^2.1.0", "multiformats": "^13.4.1", - "ox": "^0.9.12", + "ox": "catalog:", "p-retry": "^7.1.0" }, "devDependencies": { - "@biomejs/biome": "2.3.7", + "@biomejs/biome": "catalog:", + "@ledgerhq/hw-app-eth": "^6.47.1", + "@ledgerhq/hw-transport-node-hid": "^6.29.14", "@types/assert": "^1.5.11", - "@types/mocha": "^10.0.10", - "@types/node": "^24.7.2", + "@types/mocha": "catalog:", + "@types/node": "catalog:", "@wagmi/cli": "^2.7.0", - "abitype": "^1.1.1", + "abitype": "catalog:", "assert": "^2.1.0", - "mocha": "^11.7.4", + "mocha": "catalog:", + "msw": "catalog:", "playwright-test": "^14.1.12", "type-fest": "^5.1.0", - "typescript": "5.9.3" + "typescript": "catalog:" }, "publishConfig": { "access": "public" }, "msw": { "workerDirectory": [ - "src/test/mocks" + "test/mocks" ] }, "peerDependencies": { + "msw": "^2.12.4", "viem": "2.x" } } diff --git a/packages/synapse-core/src/abis/index.ts b/packages/synapse-core/src/abis/index.ts index e153c245c..3503be911 100644 --- a/packages/synapse-core/src/abis/index.ts +++ b/packages/synapse-core/src/abis/index.ts @@ -1,12 +1,12 @@ /** - * Synapse Core - ABIs + * ABIs * * @example * ```ts * import * as Abis from '@filoz/synapse-core/abis' * ``` * - * @packageDocumentation + * @module abis */ export * from './erc20.ts' diff --git a/packages/synapse-core/src/auction/index.ts b/packages/synapse-core/src/auction/index.ts index d73c6f166..7b2552bc3 100644 --- a/packages/synapse-core/src/auction/index.ts +++ b/packages/synapse-core/src/auction/index.ts @@ -1 +1,11 @@ +/** + * Auction + * + * @example + * ```ts + * import * as auction from '@filoz/synapse-core/auction' + * ``` + * + * @module auction + */ export * from './auction.ts' diff --git a/packages/synapse-core/src/chains.ts b/packages/synapse-core/src/chains.ts index cf2c85d9c..0c07e0f8f 100644 --- a/packages/synapse-core/src/chains.ts +++ b/packages/synapse-core/src/chains.ts @@ -1,12 +1,12 @@ /** - * Synapse Core - Chains + * Chains * * @example * ```ts * import * as Chains from '@filoz/synapse-core/chains' * ``` * - * @packageDocumentation + * @module chains */ import type { Address, ChainContract, Chain as ViemChain } from 'viem' diff --git a/packages/synapse-core/src/erc20.ts b/packages/synapse-core/src/erc20.ts index 3cadfd0cc..7c590234e 100644 --- a/packages/synapse-core/src/erc20.ts +++ b/packages/synapse-core/src/erc20.ts @@ -1,12 +1,12 @@ /** - * Synapse Core - ERC20 Contract Operations + * ERC20 Contract Operations * * @example * ```ts * import * as ERC20 from '@filoz/synapse-core/erc20' * ``` * - * @packageDocumentation + * @module erc20 */ import { diff --git a/packages/synapse-core/src/errors/index.ts b/packages/synapse-core/src/errors/index.ts index 6a89fbe1d..0fc26f89e 100644 --- a/packages/synapse-core/src/errors/index.ts +++ b/packages/synapse-core/src/errors/index.ts @@ -1,12 +1,12 @@ /** - * Synapse Core - Errors + * Errors * * @example * ```ts * import * as Errors from '@filoz/synapse-core/errors' * ``` * - * @packageDocumentation + * @module errors */ export * from './base.ts' export * from './chains.ts' diff --git a/packages/synapse-core/src/index.ts b/packages/synapse-core/src/index.ts index 0371d5442..202d22050 100644 --- a/packages/synapse-core/src/index.ts +++ b/packages/synapse-core/src/index.ts @@ -1,12 +1,12 @@ /** - * Synapse Core - Main Entry Point + * **Synapse Core - Main Entry Point** * * @example * ```ts * import * as core from '@filoz/synapse-core' * ``` * - * @packageDocumentation + * @module core */ export * as abis from './abis/index.ts' diff --git a/packages/synapse-sdk/src/test/mocks/ping.ts b/packages/synapse-core/src/mocks/common.ts similarity index 100% rename from packages/synapse-sdk/src/test/mocks/ping.ts rename to packages/synapse-core/src/mocks/common.ts diff --git a/packages/synapse-core/src/mocks/index.ts b/packages/synapse-core/src/mocks/index.ts new file mode 100644 index 000000000..9c1f8502a --- /dev/null +++ b/packages/synapse-core/src/mocks/index.ts @@ -0,0 +1,15 @@ +/** + * Mocks for testing + * + * @example + * ```ts + * import * as Mocks from '@filoz/synapse-core/mocks' + * ``` + * + * @module mocks + */ + +export * from './common.ts' +export * from './jsonrpc/index.ts' +export { mockServiceProviderRegistry } from './jsonrpc/service-registry.ts' +export * as pdp from './pdp.ts' diff --git a/packages/synapse-sdk/src/test/mocks/jsonrpc/constants.ts b/packages/synapse-core/src/mocks/jsonrpc/constants.ts similarity index 72% rename from packages/synapse-sdk/src/test/mocks/jsonrpc/constants.ts rename to packages/synapse-core/src/mocks/jsonrpc/constants.ts index 48e122dd5..727a3cfba 100644 --- a/packages/synapse-sdk/src/test/mocks/jsonrpc/constants.ts +++ b/packages/synapse-core/src/mocks/jsonrpc/constants.ts @@ -1,5 +1,5 @@ -import type { Address } from 'viem' -import { CONTRACT_ADDRESSES } from '../../../utils/constants.ts' +import { type Address, zeroAddress } from 'viem' +import { calibration, mainnet } from '../../chains.ts' export const PRIVATE_KEYS = { key1: '0x1234567890123456789012345678901234567890123456789012345678901234', @@ -7,28 +7,37 @@ export const PRIVATE_KEYS = { } export const ADDRESSES = { client1: '0x2e988A386a799F506693793c6A5AF6B54dfAaBfB' as Address, - zero: '0x0000000000000000000000000000000000000000' as Address, + zero: zeroAddress, serviceProvider1: '0x0000000000000000000000000000000000000001' as Address, serviceProvider2: '0x0000000000000000000000000000000000000002' as Address, payee1: '0x1000000000000000000000000000000000000001' as Address, mainnet: { - warmStorage: '0x1234567890123456789012345678901234567890' as Address, - multicall3: CONTRACT_ADDRESSES.MULTICALL3.mainnet, - pdpVerifier: '0x9876543210987654321098765432109876543210', + warmStorage: mainnet.contracts.storage.address, + multicall3: mainnet.contracts.multicall3.address, + pdpVerifier: mainnet.contracts.pdp.address, }, calibration: { - warmStorage: CONTRACT_ADDRESSES.WARM_STORAGE.calibration as Address, - multicall3: CONTRACT_ADDRESSES.MULTICALL3.calibration, - pdpVerifier: '0x3ce3C62C4D405d69738530A6A65E4b13E8700C48' as Address, - payments: '0x80Df863d84eFaa0aaC8da2E9B08D14A7236ff4D0' as Address, - usdfcToken: '0xb3042734b608a1B16e9e86B374A3f3e389B4cDf0' as Address, - filCDN: '0x0000000000000000000000000000000000000000' as Address, - viewContract: '0x1996B60838871D0bc7980Bc02DD6Eb920535bE54' as Address, - spRegistry: '0x0000000000000000000000000000000000000001' as Address, - sessionKeyRegistry: '0x518411c2062E119Aaf7A8B12A2eDf9a939347655' as Address, + warmStorage: calibration.contracts.storage.address, + multicall3: calibration.contracts.multicall3.address, + pdpVerifier: calibration.contracts.pdp.address, + payments: calibration.contracts.payments.address, + usdfcToken: calibration.contracts.usdfc.address, + filCDN: zeroAddress, + viewContract: calibration.contracts.storageView.address, + spRegistry: calibration.contracts.serviceProviderRegistry.address, + sessionKeyRegistry: calibration.contracts.sessionKeyRegistry.address, }, } +const ENDORSEMENTS = { + '0x50724807600e804Fe842439860D5b62baa26aFff': { + notAfter: 0xffffffffn, + nonce: 0xffffffffn, + signature: + '0x1b1b1b1b1b1b1b1b1b1b1b1b1b1b1b1b1b1b1b1b1b1b1b1b1b1b1b1b1b1b1b1b1b1b1b1b1b1b1b1b1b1b1b1b1b1b1b1b1b1b1b1b1b1b1b1b1b1b1b1b1b1b1b1b1b', + }, +} as const + export const PROVIDERS = { providerNoPDP: { providerId: 1n, @@ -64,6 +73,7 @@ export const PROVIDERS = { minProvingPeriodInEpochs: 30n, location: 'us-east', paymentTokenAddress: ADDRESSES.calibration.usdfcToken, + endorsements: ENDORSEMENTS, }, }, ], @@ -91,6 +101,7 @@ export const PROVIDERS = { minProvingPeriodInEpochs: 30n, location: 'us-east', paymentTokenAddress: ADDRESSES.calibration.usdfcToken, + endorsements: ENDORSEMENTS, }, }, ], @@ -118,6 +129,7 @@ export const PROVIDERS = { minProvingPeriodInEpochs: 30n, location: 'us-east', paymentTokenAddress: ADDRESSES.calibration.usdfcToken, + endorsements: ENDORSEMENTS, }, }, ], diff --git a/packages/synapse-sdk/src/test/mocks/jsonrpc/erc20.ts b/packages/synapse-core/src/mocks/jsonrpc/erc20.ts similarity index 68% rename from packages/synapse-sdk/src/test/mocks/jsonrpc/erc20.ts rename to packages/synapse-core/src/mocks/jsonrpc/erc20.ts index 02259b7d6..6f944cd11 100644 --- a/packages/synapse-sdk/src/test/mocks/jsonrpc/erc20.ts +++ b/packages/synapse-core/src/mocks/jsonrpc/erc20.ts @@ -2,16 +2,16 @@ import type { ExtractAbiFunction } from 'abitype' import { decodeFunctionData, encodeAbiParameters, type Hex } from 'viem' -import { CONTRACT_ABIS } from '../../../utils/constants.ts' +import * as Abis from '../../abis/index.ts' import type { AbiToType, JSONRPCOptions } from './types.ts' -export type balanceOf = ExtractAbiFunction -export type decimals = ExtractAbiFunction -export type allowance = ExtractAbiFunction -export type name = ExtractAbiFunction -export type approve = ExtractAbiFunction -export type nonces = ExtractAbiFunction -export type version = ExtractAbiFunction +export type balanceOf = ExtractAbiFunction +export type decimals = ExtractAbiFunction +export type allowance = ExtractAbiFunction +export type name = ExtractAbiFunction +export type approve = ExtractAbiFunction +export type nonces = ExtractAbiFunction +export type version = ExtractAbiFunction export interface ERC20Options { balanceOf?: (args: AbiToType) => AbiToType @@ -32,7 +32,7 @@ export function erc20CallHandler(data: Hex, options: JSONRPCOptions): Hex { try { const decoded = decodeFunctionData({ - abi: CONTRACT_ABIS.ERC20_PERMIT, + abi: Abis.erc20WithPermit, data: data as Hex, }) functionName = decoded.functionName @@ -51,7 +51,7 @@ export function erc20CallHandler(data: Hex, options: JSONRPCOptions): Hex { throw new Error('ERC20: balanceOf is not defined') } return encodeAbiParameters( - CONTRACT_ABIS.ERC20_PERMIT.find((abi) => abi.type === 'function' && abi.name === 'balanceOf')!.outputs, + Abis.erc20WithPermit.find((abi) => abi.type === 'function' && abi.name === 'balanceOf')!.outputs, options.erc20.balanceOf(args as AbiToType) ) } @@ -61,7 +61,7 @@ export function erc20CallHandler(data: Hex, options: JSONRPCOptions): Hex { throw new Error('ERC20: decimals is not defined') } return encodeAbiParameters( - CONTRACT_ABIS.ERC20_PERMIT.find((abi) => abi.type === 'function' && abi.name === 'decimals')!.outputs, + Abis.erc20WithPermit.find((abi) => abi.type === 'function' && abi.name === 'decimals')!.outputs, options.erc20.decimals(args as AbiToType) ) } @@ -71,7 +71,7 @@ export function erc20CallHandler(data: Hex, options: JSONRPCOptions): Hex { throw new Error('ERC20: allowance is not defined') } return encodeAbiParameters( - CONTRACT_ABIS.ERC20_PERMIT.find((abi) => abi.type === 'function' && abi.name === 'allowance')!.outputs, + Abis.erc20WithPermit.find((abi) => abi.type === 'function' && abi.name === 'allowance')!.outputs, options.erc20.allowance(args as AbiToType) ) } @@ -81,7 +81,7 @@ export function erc20CallHandler(data: Hex, options: JSONRPCOptions): Hex { throw new Error('ERC20: name is not defined') } return encodeAbiParameters( - CONTRACT_ABIS.ERC20_PERMIT.find((abi) => abi.type === 'function' && abi.name === 'name')!.outputs, + Abis.erc20WithPermit.find((abi) => abi.type === 'function' && abi.name === 'name')!.outputs, options.erc20.name(args as AbiToType) ) } @@ -91,7 +91,7 @@ export function erc20CallHandler(data: Hex, options: JSONRPCOptions): Hex { throw new Error('ERC20: approve is not defined') } return encodeAbiParameters( - CONTRACT_ABIS.ERC20_PERMIT.find((abi) => abi.type === 'function' && abi.name === 'approve')!.outputs, + Abis.erc20WithPermit.find((abi) => abi.type === 'function' && abi.name === 'approve')!.outputs, options.erc20.approve(args as AbiToType) ) } @@ -101,7 +101,7 @@ export function erc20CallHandler(data: Hex, options: JSONRPCOptions): Hex { throw new Error('ERC20: version is not defined') } return encodeAbiParameters( - CONTRACT_ABIS.ERC20_PERMIT.find((abi) => abi.type === 'function' && abi.name === 'version')!.outputs, + Abis.erc20WithPermit.find((abi) => abi.type === 'function' && abi.name === 'version')!.outputs, options.erc20.version(args as AbiToType) ) } @@ -111,7 +111,7 @@ export function erc20CallHandler(data: Hex, options: JSONRPCOptions): Hex { throw new Error('ERC20: nonces is not defined') } return encodeAbiParameters( - CONTRACT_ABIS.ERC20_PERMIT.find((abi) => abi.type === 'function' && abi.name === 'nonces')!.outputs, + Abis.erc20WithPermit.find((abi) => abi.type === 'function' && abi.name === 'nonces')!.outputs, options.erc20.nonces(args as AbiToType) ) } diff --git a/packages/synapse-sdk/src/test/mocks/jsonrpc/index.ts b/packages/synapse-core/src/mocks/jsonrpc/index.ts similarity index 98% rename from packages/synapse-sdk/src/test/mocks/jsonrpc/index.ts rename to packages/synapse-core/src/mocks/jsonrpc/index.ts index 7a2150e6c..e23f044ff 100644 --- a/packages/synapse-sdk/src/test/mocks/jsonrpc/index.ts +++ b/packages/synapse-core/src/mocks/jsonrpc/index.ts @@ -1,5 +1,5 @@ import { HttpResponse, http } from 'msw' -import { TransactionEnvelopeEip1559 } from 'ox' +import { TxEnvelopeEip1559 } from 'ox' import type { RequiredDeep } from 'type-fest' import { type Address, @@ -14,7 +14,7 @@ import { parseUnits, stringToHex, } from 'viem' -import { CONTRACT_ADDRESSES, TIME_CONSTANTS } from '../../../utils/constants.ts' +import { TIME_CONSTANTS } from '../../utils/constants.ts' import { ADDRESSES } from './constants.ts' import { erc20CallHandler } from './erc20.ts' import { paymentsCallHandler } from './payments.ts' @@ -164,7 +164,7 @@ function handler(body: RpcRequest, options: JSONRPCOptions) { return warmStorageCallHandler(data as Hex, options) } - if (isAddressEqual(CONTRACT_ADDRESSES.MULTICALL3.calibration, to as Address)) { + if (isAddressEqual(ADDRESSES.calibration.multicall3, to as Address)) { return multicall3CallHandler(data as Hex, options) } @@ -336,15 +336,15 @@ export const presets = { eth_gasPrice: () => '0x09184e72a000', eth_maxPriorityFeePerGas: () => '0x5f5e100', eth_sendRawTransaction: (args) => { - const deserialized = TransactionEnvelopeEip1559.deserialize(args[0] as `0x02${string}`) - const envelope = TransactionEnvelopeEip1559.from(deserialized, { + const deserialized = TxEnvelopeEip1559.deserialize(args[0] as `0x02${string}`) + const envelope = TxEnvelopeEip1559.from(deserialized, { signature: { r: deserialized.r ?? 0n, s: deserialized.s ?? 0n, yParity: deserialized.yParity ?? 0, }, }) - const hash = TransactionEnvelopeEip1559.hash(envelope) + const hash = TxEnvelopeEip1559.hash(envelope) return hash }, @@ -478,6 +478,7 @@ export const presets = { getActivePieces: () => [[], [], false], getDataSetStorageProvider: () => [ADDRESSES.serviceProvider1, ADDRESSES.zero], getDataSetLeafCount: () => [0n], + getScheduledRemovals: () => [[]], }, serviceRegistry: { getProviderByAddress: (data) => [ diff --git a/packages/synapse-sdk/src/test/mocks/jsonrpc/payments.ts b/packages/synapse-core/src/mocks/jsonrpc/payments.ts similarity index 74% rename from packages/synapse-sdk/src/test/mocks/jsonrpc/payments.ts rename to packages/synapse-core/src/mocks/jsonrpc/payments.ts index 14e571913..168ac1daf 100644 --- a/packages/synapse-sdk/src/test/mocks/jsonrpc/payments.ts +++ b/packages/synapse-core/src/mocks/jsonrpc/payments.ts @@ -2,17 +2,17 @@ import type { ExtractAbiFunction } from 'abitype' import { decodeFunctionData, encodeAbiParameters, type Hex } from 'viem' -import { CONTRACT_ABIS } from '../../../utils/constants.ts' +import * as Abis from '../../abis/index.ts' import type { AbiToType, JSONRPCOptions } from './types.ts' -export type accounts = ExtractAbiFunction -export type operatorApprovals = ExtractAbiFunction -export type getRail = ExtractAbiFunction -export type getRailsForPayerAndToken = ExtractAbiFunction -export type getRailsForPayeeAndToken = ExtractAbiFunction -export type settleRail = ExtractAbiFunction +export type accounts = ExtractAbiFunction +export type operatorApprovals = ExtractAbiFunction +export type getRail = ExtractAbiFunction +export type getRailsForPayerAndToken = ExtractAbiFunction +export type getRailsForPayeeAndToken = ExtractAbiFunction +export type settleRail = ExtractAbiFunction export type settleTerminatedRailWithoutValidation = ExtractAbiFunction< - typeof CONTRACT_ABIS.PAYMENTS, + typeof Abis.payments, 'settleTerminatedRailWithoutValidation' > @@ -47,7 +47,7 @@ export function paymentsCallHandler(data: Hex, options: JSONRPCOptions): Hex { } const { functionName, args } = decodeFunctionData({ - abi: CONTRACT_ABIS.PAYMENTS, + abi: Abis.payments, data: data as Hex, }) @@ -61,7 +61,7 @@ export function paymentsCallHandler(data: Hex, options: JSONRPCOptions): Hex { throw new Error('Payments: operatorApprovals is not defined') } return encodeAbiParameters( - CONTRACT_ABIS.PAYMENTS.find((abi) => abi.type === 'function' && abi.name === 'operatorApprovals')!.outputs, + Abis.payments.find((abi) => abi.type === 'function' && abi.name === 'operatorApprovals')!.outputs, options.payments.operatorApprovals(args) ) } @@ -71,7 +71,7 @@ export function paymentsCallHandler(data: Hex, options: JSONRPCOptions): Hex { throw new Error('Payments: accounts is not defined') } return encodeAbiParameters( - CONTRACT_ABIS.PAYMENTS.find((abi) => abi.type === 'function' && abi.name === 'accounts')!.outputs, + Abis.payments.find((abi) => abi.type === 'function' && abi.name === 'accounts')!.outputs, options.payments.accounts(args) ) } @@ -81,7 +81,7 @@ export function paymentsCallHandler(data: Hex, options: JSONRPCOptions): Hex { throw new Error('Payments: getRail is not defined') } return encodeAbiParameters( - CONTRACT_ABIS.PAYMENTS.find((abi) => abi.type === 'function' && abi.name === 'getRail')!.outputs, + Abis.payments.find((abi) => abi.type === 'function' && abi.name === 'getRail')!.outputs, options.payments.getRail(args) ) } @@ -91,8 +91,7 @@ export function paymentsCallHandler(data: Hex, options: JSONRPCOptions): Hex { throw new Error('Payments: getRailsForPayerAndToken is not defined') } return encodeAbiParameters( - CONTRACT_ABIS.PAYMENTS.find((abi) => abi.type === 'function' && abi.name === 'getRailsForPayerAndToken')! - .outputs, + Abis.payments.find((abi) => abi.type === 'function' && abi.name === 'getRailsForPayerAndToken')!.outputs, options.payments.getRailsForPayerAndToken(args) ) } @@ -102,8 +101,7 @@ export function paymentsCallHandler(data: Hex, options: JSONRPCOptions): Hex { throw new Error('Payments: getRailsForPayeeAndToken is not defined') } return encodeAbiParameters( - CONTRACT_ABIS.PAYMENTS.find((abi) => abi.type === 'function' && abi.name === 'getRailsForPayeeAndToken')! - .outputs, + Abis.payments.find((abi) => abi.type === 'function' && abi.name === 'getRailsForPayeeAndToken')!.outputs, options.payments.getRailsForPayeeAndToken(args) ) } @@ -113,7 +111,7 @@ export function paymentsCallHandler(data: Hex, options: JSONRPCOptions): Hex { throw new Error('Payments: settleRail is not defined') } return encodeAbiParameters( - CONTRACT_ABIS.PAYMENTS.find((abi) => abi.type === 'function' && abi.name === 'settleRail')!.outputs, + Abis.payments.find((abi) => abi.type === 'function' && abi.name === 'settleRail')!.outputs, options.payments.settleRail(args) ) } @@ -123,9 +121,8 @@ export function paymentsCallHandler(data: Hex, options: JSONRPCOptions): Hex { throw new Error('Payments: settleTerminatedRailWithoutValidation is not defined') } return encodeAbiParameters( - CONTRACT_ABIS.PAYMENTS.find( - (abi) => abi.type === 'function' && abi.name === 'settleTerminatedRailWithoutValidation' - )!.outputs, + Abis.payments.find((abi) => abi.type === 'function' && abi.name === 'settleTerminatedRailWithoutValidation')! + .outputs, options.payments.settleTerminatedRailWithoutValidation(args) ) } diff --git a/packages/synapse-sdk/src/test/mocks/jsonrpc/pdp.ts b/packages/synapse-core/src/mocks/jsonrpc/pdp.ts similarity index 62% rename from packages/synapse-sdk/src/test/mocks/jsonrpc/pdp.ts rename to packages/synapse-core/src/mocks/jsonrpc/pdp.ts index b06bd7ac3..a2d355757 100644 --- a/packages/synapse-sdk/src/test/mocks/jsonrpc/pdp.ts +++ b/packages/synapse-core/src/mocks/jsonrpc/pdp.ts @@ -2,18 +2,16 @@ import type { ExtractAbiFunction } from 'abitype' import { decodeFunctionData, encodeAbiParameters, type Hex } from 'viem' -import { CONTRACT_ABIS } from '../../../utils/constants.ts' +import * as Abis from '../../abis/index.ts' import type { AbiToType, JSONRPCOptions } from './types.ts' -export type getNextPieceId = ExtractAbiFunction -export type dataSetLive = ExtractAbiFunction -export type getDataSetListener = ExtractAbiFunction -export type getActivePieces = ExtractAbiFunction -export type getDataSetStorageProvider = ExtractAbiFunction< - typeof CONTRACT_ABIS.PDP_VERIFIER, - 'getDataSetStorageProvider' -> -export type getDataSetLeafCount = ExtractAbiFunction +export type getNextPieceId = ExtractAbiFunction +export type dataSetLive = ExtractAbiFunction +export type getDataSetListener = ExtractAbiFunction +export type getActivePieces = ExtractAbiFunction +export type getDataSetStorageProvider = ExtractAbiFunction +export type getDataSetLeafCount = ExtractAbiFunction +export type getScheduledRemovals = ExtractAbiFunction export interface PDPVerifierOptions { dataSetLive?: (args: AbiToType) => AbiToType @@ -24,6 +22,7 @@ export interface PDPVerifierOptions { args: AbiToType ) => AbiToType getDataSetLeafCount?: (args: AbiToType) => AbiToType + getScheduledRemovals?: (args: AbiToType) => AbiToType } /** @@ -31,7 +30,7 @@ export interface PDPVerifierOptions { */ export function pdpVerifierCallHandler(data: Hex, options: JSONRPCOptions): Hex { const { functionName, args } = decodeFunctionData({ - abi: CONTRACT_ABIS.PDP_VERIFIER, + abi: Abis.pdp, data: data as Hex, }) @@ -45,7 +44,7 @@ export function pdpVerifierCallHandler(data: Hex, options: JSONRPCOptions): Hex throw new Error('PDP Verifier: dataSetLive is not defined') } return encodeAbiParameters( - CONTRACT_ABIS.PDP_VERIFIER.find((abi) => abi.type === 'function' && abi.name === 'dataSetLive')!.outputs, + Abis.pdp.find((abi) => abi.type === 'function' && abi.name === 'dataSetLive')!.outputs, options.pdpVerifier.dataSetLive(args) ) } @@ -55,7 +54,7 @@ export function pdpVerifierCallHandler(data: Hex, options: JSONRPCOptions): Hex throw new Error('PDP Verifier: getDataSetListener is not defined') } return encodeAbiParameters( - CONTRACT_ABIS.PDP_VERIFIER.find((abi) => abi.type === 'function' && abi.name === 'getDataSetListener')!.outputs, + Abis.pdp.find((abi) => abi.type === 'function' && abi.name === 'getDataSetListener')!.outputs, options.pdpVerifier.getDataSetListener(args) ) case 'getNextPieceId': @@ -63,7 +62,7 @@ export function pdpVerifierCallHandler(data: Hex, options: JSONRPCOptions): Hex throw new Error('PDP Verifier: getNextPieceId is not defined') } return encodeAbiParameters( - CONTRACT_ABIS.PDP_VERIFIER.find((abi) => abi.type === 'function' && abi.name === 'getNextPieceId')!.outputs, + Abis.pdp.find((abi) => abi.type === 'function' && abi.name === 'getNextPieceId')!.outputs, options.pdpVerifier.getNextPieceId(args) ) case 'getActivePieces': { @@ -71,7 +70,7 @@ export function pdpVerifierCallHandler(data: Hex, options: JSONRPCOptions): Hex throw new Error('PDP Verifier: getActivePieces is not defined') } return encodeAbiParameters( - CONTRACT_ABIS.PDP_VERIFIER.find((abi) => abi.type === 'function' && abi.name === 'getActivePieces')!.outputs, + Abis.pdp.find((abi) => abi.type === 'function' && abi.name === 'getActivePieces')!.outputs, options.pdpVerifier.getActivePieces(args) ) } @@ -80,8 +79,7 @@ export function pdpVerifierCallHandler(data: Hex, options: JSONRPCOptions): Hex throw new Error('PDP Verifier: getDataSetStorageProvider is not defined') } return encodeAbiParameters( - CONTRACT_ABIS.PDP_VERIFIER.find((abi) => abi.type === 'function' && abi.name === 'getDataSetStorageProvider')! - .outputs, + Abis.pdp.find((abi) => abi.type === 'function' && abi.name === 'getDataSetStorageProvider')!.outputs, options.pdpVerifier.getDataSetStorageProvider(args) ) } @@ -90,11 +88,19 @@ export function pdpVerifierCallHandler(data: Hex, options: JSONRPCOptions): Hex throw new Error('PDP Verifier: getDataSetLeafCount is not defined') } return encodeAbiParameters( - CONTRACT_ABIS.PDP_VERIFIER.find((abi) => abi.type === 'function' && abi.name === 'getDataSetLeafCount')! - .outputs, + Abis.pdp.find((abi) => abi.type === 'function' && abi.name === 'getDataSetLeafCount')!.outputs, options.pdpVerifier.getDataSetLeafCount(args) ) } + case 'getScheduledRemovals': { + if (!options.pdpVerifier?.getScheduledRemovals) { + throw new Error('PDP Verifier: getScheduledRemovals is not defined') + } + return encodeAbiParameters( + Abis.pdp.find((abi) => abi.type === 'function' && abi.name === 'getScheduledRemovals')!.outputs, + options.pdpVerifier.getScheduledRemovals(args) + ) + } default: { throw new Error(`PDP Verifier: unknown function: ${functionName} with args: ${args}`) } diff --git a/packages/synapse-sdk/src/test/mocks/jsonrpc/service-registry.ts b/packages/synapse-core/src/mocks/jsonrpc/service-registry.ts similarity index 79% rename from packages/synapse-sdk/src/test/mocks/jsonrpc/service-registry.ts rename to packages/synapse-core/src/mocks/jsonrpc/service-registry.ts index 2840308d8..bd794f152 100644 --- a/packages/synapse-sdk/src/test/mocks/jsonrpc/service-registry.ts +++ b/packages/synapse-core/src/mocks/jsonrpc/service-registry.ts @@ -5,44 +5,29 @@ import type { PDPOffering, ServiceProviderInfo } from '@filoz/synapse-core/warm- import type { ExtractAbiFunction } from 'abitype' import type { Hex } from 'viem' import { decodeFunctionData, encodeAbiParameters, isAddressEqual } from 'viem' -import { CONTRACT_ABIS } from '../../../utils/constants.ts' +import * as Abis from '../../abis/index.ts' import type { AbiToType, JSONRPCOptions } from './types.ts' -export type getProviderByAddress = ExtractAbiFunction< - typeof CONTRACT_ABIS.SERVICE_PROVIDER_REGISTRY, - 'getProviderByAddress' -> +export type getProviderByAddress = ExtractAbiFunction -export type getProvider = ExtractAbiFunction +export type getProvider = ExtractAbiFunction -export type getProviderIdByAddress = ExtractAbiFunction< - typeof CONTRACT_ABIS.SERVICE_PROVIDER_REGISTRY, - 'getProviderIdByAddress' -> +export type getProviderIdByAddress = ExtractAbiFunction -export type getProviderWithProduct = ExtractAbiFunction< - typeof CONTRACT_ABIS.SERVICE_PROVIDER_REGISTRY, - 'getProviderWithProduct' -> +export type getProviderWithProduct = ExtractAbiFunction export type getProvidersByProductType = ExtractAbiFunction< - typeof CONTRACT_ABIS.SERVICE_PROVIDER_REGISTRY, + typeof Abis.serviceProviderRegistry, 'getProvidersByProductType' > -export type getAllActiveProviders = ExtractAbiFunction< - typeof CONTRACT_ABIS.SERVICE_PROVIDER_REGISTRY, - 'getAllActiveProviders' -> +export type getAllActiveProviders = ExtractAbiFunction -export type getProviderCount = ExtractAbiFunction +export type getProviderCount = ExtractAbiFunction -export type isProviderActive = ExtractAbiFunction +export type isProviderActive = ExtractAbiFunction -export type isRegisteredProvider = ExtractAbiFunction< - typeof CONTRACT_ABIS.SERVICE_PROVIDER_REGISTRY, - 'isRegisteredProvider' -> +export type isRegisteredProvider = ExtractAbiFunction export interface ServiceRegistryOptions { getProviderByAddress?: (args: AbiToType) => AbiToType @@ -236,7 +221,7 @@ export function mockServiceProviderRegistry(providers: ProviderDecoded[]): Servi */ export function serviceProviderRegistryCallHandler(data: Hex, options: JSONRPCOptions): Hex { const { functionName, args } = decodeFunctionData({ - abi: CONTRACT_ABIS.SERVICE_PROVIDER_REGISTRY, + abi: Abis.serviceProviderRegistry, data: data as Hex, }) @@ -250,9 +235,8 @@ export function serviceProviderRegistryCallHandler(data: Hex, options: JSONRPCOp throw new Error('Service Provider Registry: getProviderByAddress is not defined') } return encodeAbiParameters( - CONTRACT_ABIS.SERVICE_PROVIDER_REGISTRY.find( - (abi) => abi.type === 'function' && abi.name === 'getProviderByAddress' - )!.outputs, + Abis.serviceProviderRegistry.find((abi) => abi.type === 'function' && abi.name === 'getProviderByAddress')! + .outputs, options.serviceRegistry.getProviderByAddress(args) ) } @@ -261,9 +245,8 @@ export function serviceProviderRegistryCallHandler(data: Hex, options: JSONRPCOp throw new Error('Service Provider Registry: getProviderIdByAddress is not defined') } return encodeAbiParameters( - CONTRACT_ABIS.SERVICE_PROVIDER_REGISTRY.find( - (abi) => abi.type === 'function' && abi.name === 'getProviderIdByAddress' - )!.outputs, + Abis.serviceProviderRegistry.find((abi) => abi.type === 'function' && abi.name === 'getProviderIdByAddress')! + .outputs, options.serviceRegistry.getProviderIdByAddress(args) ) } @@ -272,8 +255,7 @@ export function serviceProviderRegistryCallHandler(data: Hex, options: JSONRPCOp throw new Error('Service Provider Registry: getProvider is not defined') } return encodeAbiParameters( - CONTRACT_ABIS.SERVICE_PROVIDER_REGISTRY.find((abi) => abi.type === 'function' && abi.name === 'getProvider')! - .outputs, + Abis.serviceProviderRegistry.find((abi) => abi.type === 'function' && abi.name === 'getProvider')!.outputs, options.serviceRegistry.getProvider(args) ) } @@ -282,9 +264,8 @@ export function serviceProviderRegistryCallHandler(data: Hex, options: JSONRPCOp throw new Error('Service Provider Registry: getProviderWithProduct is not defined') } return encodeAbiParameters( - CONTRACT_ABIS.SERVICE_PROVIDER_REGISTRY.find( - (abi) => abi.type === 'function' && abi.name === 'getProviderWithProduct' - )!.outputs, + Abis.serviceProviderRegistry.find((abi) => abi.type === 'function' && abi.name === 'getProviderWithProduct')! + .outputs, options.serviceRegistry.getProviderWithProduct(args) ) } @@ -293,9 +274,8 @@ export function serviceProviderRegistryCallHandler(data: Hex, options: JSONRPCOp throw new Error('Service Provider Registry: getAllActiveProviders is not defined') } return encodeAbiParameters( - CONTRACT_ABIS.SERVICE_PROVIDER_REGISTRY.find( - (abi) => abi.type === 'function' && abi.name === 'getAllActiveProviders' - )!.outputs, + Abis.serviceProviderRegistry.find((abi) => abi.type === 'function' && abi.name === 'getAllActiveProviders')! + .outputs, options.serviceRegistry.getAllActiveProviders(args) ) } @@ -304,9 +284,7 @@ export function serviceProviderRegistryCallHandler(data: Hex, options: JSONRPCOp throw new Error('Service Provider Registry: getProviderCount is not defined') } return encodeAbiParameters( - CONTRACT_ABIS.SERVICE_PROVIDER_REGISTRY.find( - (abi) => abi.type === 'function' && abi.name === 'getProviderCount' - )!.outputs, + Abis.serviceProviderRegistry.find((abi) => abi.type === 'function' && abi.name === 'getProviderCount')!.outputs, options.serviceRegistry.getProviderCount(args) ) } @@ -315,9 +293,7 @@ export function serviceProviderRegistryCallHandler(data: Hex, options: JSONRPCOp throw new Error('Service Provider Registry: isProviderActive is not defined') } return encodeAbiParameters( - CONTRACT_ABIS.SERVICE_PROVIDER_REGISTRY.find( - (abi) => abi.type === 'function' && abi.name === 'isProviderActive' - )!.outputs, + Abis.serviceProviderRegistry.find((abi) => abi.type === 'function' && abi.name === 'isProviderActive')!.outputs, options.serviceRegistry.isProviderActive(args) ) } @@ -326,9 +302,8 @@ export function serviceProviderRegistryCallHandler(data: Hex, options: JSONRPCOp throw new Error('Service Provider Registry: isRegisteredProvider is not defined') } return encodeAbiParameters( - CONTRACT_ABIS.SERVICE_PROVIDER_REGISTRY.find( - (abi) => abi.type === 'function' && abi.name === 'isRegisteredProvider' - )!.outputs, + Abis.serviceProviderRegistry.find((abi) => abi.type === 'function' && abi.name === 'isRegisteredProvider')! + .outputs, options.serviceRegistry.isRegisteredProvider(args) ) } @@ -337,9 +312,7 @@ export function serviceProviderRegistryCallHandler(data: Hex, options: JSONRPCOp throw new Error('Service Provider Registry: REGISTRATION_FEE is not defined') } return encodeAbiParameters( - CONTRACT_ABIS.SERVICE_PROVIDER_REGISTRY.find( - (abi) => abi.type === 'function' && abi.name === 'REGISTRATION_FEE' - )!.outputs, + Abis.serviceProviderRegistry.find((abi) => abi.type === 'function' && abi.name === 'REGISTRATION_FEE')!.outputs, [options.serviceRegistry.REGISTRATION_FEE()] ) } diff --git a/packages/synapse-sdk/src/test/mocks/jsonrpc/session-key-registry.ts b/packages/synapse-core/src/mocks/jsonrpc/session-key-registry.ts similarity index 76% rename from packages/synapse-sdk/src/test/mocks/jsonrpc/session-key-registry.ts rename to packages/synapse-core/src/mocks/jsonrpc/session-key-registry.ts index b07c9d3af..14220d300 100644 --- a/packages/synapse-sdk/src/test/mocks/jsonrpc/session-key-registry.ts +++ b/packages/synapse-core/src/mocks/jsonrpc/session-key-registry.ts @@ -2,10 +2,10 @@ import type { ExtractAbiFunction } from 'abitype' import { decodeFunctionData, encodeAbiParameters, type Hex } from 'viem' -import { CONTRACT_ABIS } from '../../../utils/constants.ts' +import * as Abis from '../../abis/index.ts' import type { AbiToType, JSONRPCOptions } from './types.ts' -export type authorizationExpiry = ExtractAbiFunction +export type authorizationExpiry = ExtractAbiFunction export interface SessionKeyRegistryOptions { authorizationExpiry?: (args: AbiToType) => AbiToType @@ -13,7 +13,7 @@ export interface SessionKeyRegistryOptions { export function sessionKeyRegistryCallHandler(data: Hex, options: JSONRPCOptions): Hex { const { functionName, args } = decodeFunctionData({ - abi: CONTRACT_ABIS.SESSION_KEY_REGISTRY, + abi: Abis.sessionKeyRegistry, data: data as Hex, }) @@ -27,8 +27,7 @@ export function sessionKeyRegistryCallHandler(data: Hex, options: JSONRPCOptions throw new Error('Service Provider Registry: authorizationExpiry is not defined') } return encodeAbiParameters( - CONTRACT_ABIS.SESSION_KEY_REGISTRY.find((abi) => abi.type === 'function' && abi.name === 'authorizationExpiry')! - .outputs, + Abis.sessionKeyRegistry.find((abi) => abi.type === 'function' && abi.name === 'authorizationExpiry')!.outputs, options.sessionKeyRegistry.authorizationExpiry(args) ) } diff --git a/packages/synapse-sdk/src/test/mocks/jsonrpc/types.ts b/packages/synapse-core/src/mocks/jsonrpc/types.ts similarity index 100% rename from packages/synapse-sdk/src/test/mocks/jsonrpc/types.ts rename to packages/synapse-core/src/mocks/jsonrpc/types.ts diff --git a/packages/synapse-sdk/src/test/mocks/jsonrpc/warm-storage.ts b/packages/synapse-core/src/mocks/jsonrpc/warm-storage.ts similarity index 70% rename from packages/synapse-sdk/src/test/mocks/jsonrpc/warm-storage.ts rename to packages/synapse-core/src/mocks/jsonrpc/warm-storage.ts index 3d0050f95..b24886aca 100644 --- a/packages/synapse-sdk/src/test/mocks/jsonrpc/warm-storage.ts +++ b/packages/synapse-core/src/mocks/jsonrpc/warm-storage.ts @@ -1,38 +1,26 @@ /** biome-ignore-all lint/style/noNonNullAssertion: testing */ import type { ExtractAbiFunction } from 'abitype' import { decodeFunctionData, encodeAbiParameters, type Hex } from 'viem' -import { CONTRACT_ABIS } from '../../../utils/constants.ts' +import * as Abis from '../../abis/index.ts' import type { AbiToType, JSONRPCOptions } from './types.ts' /** * Warm Storage View ABI types */ -export type isProviderApproved = ExtractAbiFunction - -export type railToDataSet = ExtractAbiFunction - -export type getClientDataSets = ExtractAbiFunction - -export type clientDataSets = ExtractAbiFunction - -export type getDataSet = ExtractAbiFunction - -export type getApprovedProviders = ExtractAbiFunction - -export type getAllDataSetMetadata = ExtractAbiFunction - -export type getDataSetMetadata = ExtractAbiFunction - -export type getAllPieceMetadata = ExtractAbiFunction - -export type getPieceMetadata = ExtractAbiFunction - -export type clientNonces = ExtractAbiFunction - -export type getMaxProvingPeriod = ExtractAbiFunction - -export type challengeWindow = ExtractAbiFunction +export type isProviderApproved = ExtractAbiFunction +export type railToDataSet = ExtractAbiFunction +export type getClientDataSets = ExtractAbiFunction +export type clientDataSets = ExtractAbiFunction +export type getDataSet = ExtractAbiFunction +export type getApprovedProviders = ExtractAbiFunction +export type getAllDataSetMetadata = ExtractAbiFunction +export type getDataSetMetadata = ExtractAbiFunction +export type getAllPieceMetadata = ExtractAbiFunction +export type getPieceMetadata = ExtractAbiFunction +export type clientNonces = ExtractAbiFunction +export type getMaxProvingPeriod = ExtractAbiFunction +export type challengeWindow = ExtractAbiFunction export interface WarmStorageViewOptions { isProviderApproved?: (args: AbiToType) => AbiToType @@ -56,26 +44,15 @@ export interface WarmStorageViewOptions { * Warm Storage ABI types */ -export type pdpVerifierAddress = ExtractAbiFunction - -export type paymentsContractAddress = ExtractAbiFunction - -export type usdfcTokenAddress = ExtractAbiFunction - -export type filBeamBeneficiaryAddress = ExtractAbiFunction< - typeof CONTRACT_ABIS.WARM_STORAGE, - 'filBeamBeneficiaryAddress' -> - -export type viewContractAddress = ExtractAbiFunction - -export type serviceProviderRegistry = ExtractAbiFunction - -export type sessionKeyRegistry = ExtractAbiFunction - -export type getServicePrice = ExtractAbiFunction - -export type owner = ExtractAbiFunction +export type pdpVerifierAddress = ExtractAbiFunction +export type paymentsContractAddress = ExtractAbiFunction +export type usdfcTokenAddress = ExtractAbiFunction +export type filBeamBeneficiaryAddress = ExtractAbiFunction +export type viewContractAddress = ExtractAbiFunction +export type serviceProviderRegistry = ExtractAbiFunction +export type sessionKeyRegistry = ExtractAbiFunction +export type getServicePrice = ExtractAbiFunction +export type owner = ExtractAbiFunction export interface WarmStorageOptions { pdpVerifierAddress?: (args: AbiToType) => AbiToType @@ -100,7 +77,7 @@ export interface WarmStorageOptions { */ export function warmStorageCallHandler(data: Hex, options: JSONRPCOptions): Hex { const { functionName, args } = decodeFunctionData({ - abi: CONTRACT_ABIS.WARM_STORAGE, + abi: Abis.storage, data: data as Hex, }) @@ -179,7 +156,7 @@ export function warmStorageCallHandler(data: Hex, options: JSONRPCOptions): Hex throw new Error('Warm Storage: getServicePrice is not defined') } return encodeAbiParameters( - CONTRACT_ABIS.WARM_STORAGE.find((abi) => abi.type === 'function' && abi.name === 'getServicePrice')!.outputs, + Abis.storage.find((abi) => abi.type === 'function' && abi.name === 'getServicePrice')!.outputs, options.warmStorage.getServicePrice(args) ) } @@ -189,7 +166,7 @@ export function warmStorageCallHandler(data: Hex, options: JSONRPCOptions): Hex throw new Error('Warm Storage: owner is not defined') } return encodeAbiParameters( - CONTRACT_ABIS.WARM_STORAGE.find((abi) => abi.type === 'function' && abi.name === 'owner')!.outputs, + Abis.storage.find((abi) => abi.type === 'function' && abi.name === 'owner')!.outputs, options.warmStorage.owner(args) ) } @@ -205,7 +182,7 @@ export function warmStorageCallHandler(data: Hex, options: JSONRPCOptions): Hex */ export function warmStorageViewCallHandler(data: Hex, options: JSONRPCOptions): Hex { const { functionName, args } = decodeFunctionData({ - abi: CONTRACT_ABIS.WARM_STORAGE_VIEW, + abi: Abis.storageView, data: data as Hex, }) @@ -219,8 +196,7 @@ export function warmStorageViewCallHandler(data: Hex, options: JSONRPCOptions): throw new Error('Warm Storage View: isProviderApproved is not defined') } return encodeAbiParameters( - CONTRACT_ABIS.WARM_STORAGE_VIEW.find((abi) => abi.type === 'function' && abi.name === 'isProviderApproved')! - .outputs, + Abis.storageView.find((abi) => abi.type === 'function' && abi.name === 'isProviderApproved')!.outputs, options.warmStorageView.isProviderApproved(args) ) } @@ -229,8 +205,7 @@ export function warmStorageViewCallHandler(data: Hex, options: JSONRPCOptions): throw new Error('Warm Storage View: getClientDataSets is not defined') } return encodeAbiParameters( - CONTRACT_ABIS.WARM_STORAGE_VIEW.find((abi) => abi.type === 'function' && abi.name === 'getClientDataSets')! - .outputs, + Abis.storageView.find((abi) => abi.type === 'function' && abi.name === 'getClientDataSets')!.outputs, options.warmStorageView.getClientDataSets(args) ) } @@ -240,8 +215,7 @@ export function warmStorageViewCallHandler(data: Hex, options: JSONRPCOptions): throw new Error('Warm Storage View: clientDataSets is not defined') } return encodeAbiParameters( - CONTRACT_ABIS.WARM_STORAGE_VIEW.find((abi) => abi.type === 'function' && abi.name === 'clientDataSets')! - .outputs, + Abis.storageView.find((abi) => abi.type === 'function' && abi.name === 'clientDataSets')!.outputs, options.warmStorageView.clientDataSets(args) ) } @@ -251,7 +225,7 @@ export function warmStorageViewCallHandler(data: Hex, options: JSONRPCOptions): throw new Error('Warm Storage View: getDataSet is not defined') } return encodeAbiParameters( - CONTRACT_ABIS.WARM_STORAGE_VIEW.find((abi) => abi.type === 'function' && abi.name === 'getDataSet')!.outputs, + Abis.storageView.find((abi) => abi.type === 'function' && abi.name === 'getDataSet')!.outputs, options.warmStorageView.getDataSet(args) ) } @@ -261,7 +235,7 @@ export function warmStorageViewCallHandler(data: Hex, options: JSONRPCOptions): throw new Error('Warm Storage View: railToDataSet is not defined') } return encodeAbiParameters( - CONTRACT_ABIS.WARM_STORAGE_VIEW.find((abi) => abi.type === 'function' && abi.name === 'railToDataSet')!.outputs, + Abis.storageView.find((abi) => abi.type === 'function' && abi.name === 'railToDataSet')!.outputs, options.warmStorageView.railToDataSet(args) ) } @@ -270,8 +244,7 @@ export function warmStorageViewCallHandler(data: Hex, options: JSONRPCOptions): throw new Error('Warm Storage View: getApprovedProviders is not defined') } return encodeAbiParameters( - CONTRACT_ABIS.WARM_STORAGE_VIEW.find((abi) => abi.type === 'function' && abi.name === 'getApprovedProviders')! - .outputs, + Abis.storageView.find((abi) => abi.type === 'function' && abi.name === 'getApprovedProviders')!.outputs, options.warmStorageView.getApprovedProviders(args) ) } @@ -280,8 +253,7 @@ export function warmStorageViewCallHandler(data: Hex, options: JSONRPCOptions): throw new Error('Warm Storage View: getAllDataSetMetadata is not defined') } return encodeAbiParameters( - CONTRACT_ABIS.WARM_STORAGE_VIEW.find((abi) => abi.type === 'function' && abi.name === 'getAllDataSetMetadata')! - .outputs, + Abis.storageView.find((abi) => abi.type === 'function' && abi.name === 'getAllDataSetMetadata')!.outputs, options.warmStorageView.getAllDataSetMetadata(args) ) } @@ -290,8 +262,7 @@ export function warmStorageViewCallHandler(data: Hex, options: JSONRPCOptions): throw new Error('Warm Storage View: getDataSetMetadata is not defined') } return encodeAbiParameters( - CONTRACT_ABIS.WARM_STORAGE_VIEW.find((abi) => abi.type === 'function' && abi.name === 'getDataSetMetadata')! - .outputs, + Abis.storageView.find((abi) => abi.type === 'function' && abi.name === 'getDataSetMetadata')!.outputs, options.warmStorageView.getDataSetMetadata(args) ) } @@ -300,8 +271,7 @@ export function warmStorageViewCallHandler(data: Hex, options: JSONRPCOptions): throw new Error('Warm Storage View: getAllPieceMetadata is not defined') } return encodeAbiParameters( - CONTRACT_ABIS.WARM_STORAGE_VIEW.find((abi) => abi.type === 'function' && abi.name === 'getAllPieceMetadata')! - .outputs, + Abis.storageView.find((abi) => abi.type === 'function' && abi.name === 'getAllPieceMetadata')!.outputs, options.warmStorageView.getAllPieceMetadata(args) ) } @@ -310,8 +280,7 @@ export function warmStorageViewCallHandler(data: Hex, options: JSONRPCOptions): throw new Error('Warm Storage View: getPieceMetadata is not defined') } return encodeAbiParameters( - CONTRACT_ABIS.WARM_STORAGE_VIEW.find((abi) => abi.type === 'function' && abi.name === 'getPieceMetadata')! - .outputs, + Abis.storageView.find((abi) => abi.type === 'function' && abi.name === 'getPieceMetadata')!.outputs, options.warmStorageView.getPieceMetadata(args) ) } @@ -320,7 +289,7 @@ export function warmStorageViewCallHandler(data: Hex, options: JSONRPCOptions): throw new Error('Warm Storage View: clientNonces is not defined') } return encodeAbiParameters( - CONTRACT_ABIS.WARM_STORAGE_VIEW.find((abi) => abi.type === 'function' && abi.name === 'clientNonces')!.outputs, + Abis.storageView.find((abi) => abi.type === 'function' && abi.name === 'clientNonces')!.outputs, options.warmStorageView.clientNonces(args) ) } @@ -329,8 +298,7 @@ export function warmStorageViewCallHandler(data: Hex, options: JSONRPCOptions): throw new Error('Warm Storage View: getMaxProvingPeriod is not defined') } return encodeAbiParameters( - CONTRACT_ABIS.WARM_STORAGE_VIEW.find((abi) => abi.type === 'function' && abi.name === 'getMaxProvingPeriod')! - .outputs, + Abis.storageView.find((abi) => abi.type === 'function' && abi.name === 'getMaxProvingPeriod')!.outputs, options.warmStorageView.getMaxProvingPeriod(args) ) } @@ -339,8 +307,7 @@ export function warmStorageViewCallHandler(data: Hex, options: JSONRPCOptions): throw new Error('Warm Storage View: challengeWindow is not defined') } return encodeAbiParameters( - CONTRACT_ABIS.WARM_STORAGE_VIEW.find((abi) => abi.type === 'function' && abi.name === 'challengeWindow')! - .outputs, + Abis.storageView.find((abi) => abi.type === 'function' && abi.name === 'challengeWindow')!.outputs, options.warmStorageView.challengeWindow(args) ) } diff --git a/packages/synapse-sdk/src/test/mocks/pdp/handlers.ts b/packages/synapse-core/src/mocks/pdp.ts similarity index 90% rename from packages/synapse-sdk/src/test/mocks/pdp/handlers.ts rename to packages/synapse-core/src/mocks/pdp.ts index dc7eb10cf..6922a5962 100644 --- a/packages/synapse-sdk/src/test/mocks/pdp/handlers.ts +++ b/packages/synapse-core/src/mocks/pdp.ts @@ -4,11 +4,10 @@ * These handlers can be used to mock PDP Server HTTP responses in tests */ -import { assert } from 'chai' -import { ethers } from 'ethers' +import assert from 'assert' import { HttpResponse, http } from 'msw' -import type { Hex } from 'viem' -import type { PDPAddPiecesInput } from '../../../pdp/server.ts' +import { decodeAbiParameters, type Hex } from 'viem' +import type { AddPiecesRequest } from '../sp.ts' export interface PDPMockOptions { baseUrl?: string @@ -16,13 +15,13 @@ export interface PDPMockOptions { } export interface MetadataCapture { - keys: string[] - values: string[] + keys: readonly string[] + values: readonly string[] } export interface PieceMetadataCapture { - keys: string[][] - values: string[][] + keys: readonly (readonly string[])[] + values: readonly (readonly string[])[] } export function createAndAddPiecesHandler(txHash: Hex, options: PDPMockOptions = {}) { @@ -124,10 +123,9 @@ export function postPieceHandler(pieceCid: string, uuid?: string, options: PDPMo const baseUrl = options.baseUrl ?? 'http://pdp.local' return http.post, { pieceCid: string }>(`${baseUrl}/pdp/piece`, async ({ request }) => { const body = await request.json() - assert.isDefined(body) - assert.isNotNull(body) - assert.exists(body.pieceCid) - assert.equal(body.pieceCid, pieceCid) + assert(body != null, 'Body should be defined') + assert(body.pieceCid != null, 'PieceCID should be defined') + assert.strictEqual(body.pieceCid, pieceCid, 'PieceCID should match expected value') if (uuid == null) { // parked piece found return HttpResponse.json({ @@ -253,12 +251,14 @@ export function streamingUploadHandlers(options: PDPMockOptions = {}) { /** * Helper to decode metadata from extraData */ -export function decodeMetadataFromCreateDataSetExtraData(extraData: string): MetadataCapture { - const abiCoder = ethers.AbiCoder.defaultAbiCoder() - const decoded = abiCoder.decode(['address', 'uint256', 'string[]', 'string[]', 'bytes'], extraData) +export function decodeMetadataFromCreateDataSetExtraData(extraData: Hex): MetadataCapture { + const decoded = decodeAbiParameters( + [{ type: 'address' }, { type: 'uint256' }, { type: 'string[]' }, { type: 'string[]' }, { type: 'bytes' }], + extraData + ) return { - keys: decoded[2] as string[], - values: decoded[3] as string[], + keys: decoded[2], + values: decoded[3], } } @@ -266,12 +266,14 @@ export function decodeMetadataFromCreateDataSetExtraData(extraData: string): Met * Helper to decode piece metadata from extraData * Format: (uint256 nonce, string[][] keys, string[][] values, bytes signature) */ -export function decodePieceMetadataFromExtraData(extraData: string): PieceMetadataCapture { - const abiCoder = ethers.AbiCoder.defaultAbiCoder() - const decoded = abiCoder.decode(['uint256', 'string[][]', 'string[][]', 'bytes'], extraData) +export function decodePieceMetadataFromExtraData(extraData: Hex): PieceMetadataCapture { + const decoded = decodeAbiParameters( + [{ type: 'uint256' }, { type: 'string[][]' }, { type: 'string[][]' }, { type: 'bytes' }], + extraData + ) return { - keys: decoded[1] as string[][], - values: decoded[2] as string[][], + keys: decoded[1], + values: decoded[2], } } @@ -330,7 +332,7 @@ export function addPiecesWithMetadataCapture( ) { const baseUrl = options.baseUrl ?? 'http://pdp.local' - return http.post<{ id: string }, PDPAddPiecesInput>( + return http.post<{ id: string }, AddPiecesRequest>( `${baseUrl}/pdp/data-sets/:id/pieces`, async ({ params, request }) => { if (params.id !== dataSetId.toString()) { diff --git a/packages/synapse-core/src/pay/index.ts b/packages/synapse-core/src/pay/index.ts index 25ebb8b3d..ab7a77d59 100644 --- a/packages/synapse-core/src/pay/index.ts +++ b/packages/synapse-core/src/pay/index.ts @@ -1,12 +1,12 @@ /** - * Synapse Core - Payments Contract Operations + * Payments Contract Operations * * @example * ```ts * import * as Pay from '@filoz/synapse-core/pay' * ``` * - * @packageDocumentation + * @module pay */ export * from './operators.ts' diff --git a/packages/synapse-core/src/piece.ts b/packages/synapse-core/src/piece.ts index 0b0e9db40..25be9e41d 100644 --- a/packages/synapse-core/src/piece.ts +++ b/packages/synapse-core/src/piece.ts @@ -2,6 +2,13 @@ * PieceCID (Piece Commitment CID) utilities * * Helper functions for working with Filecoin Piece CIDs + * + * @example + * ```ts + * import * as Piece from '@filoz/synapse-core/piece' + * ``` + * + * @module piece */ import type { LegacyPieceLink as LegacyPieceCIDType, PieceLink as PieceCIDType } from '@web3-storage/data-segment' diff --git a/packages/synapse-core/src/session-key/index.ts b/packages/synapse-core/src/session-key/index.ts index 256b6e289..1ba666568 100644 --- a/packages/synapse-core/src/session-key/index.ts +++ b/packages/synapse-core/src/session-key/index.ts @@ -1,12 +1,12 @@ /** - * Synapse Core - Session Key Contract Operations + * Session Key Contract Operations * * @example * ```ts * import * as SessionKey from '@filoz/synapse-core/session-key' * ``` * - * @packageDocumentation + * @module session-key */ export * from './actions.ts' export * from './permissions.ts' diff --git a/packages/synapse-core/src/sp.ts b/packages/synapse-core/src/sp.ts index cb0254a69..8498f4c2c 100644 --- a/packages/synapse-core/src/sp.ts +++ b/packages/synapse-core/src/sp.ts @@ -1,15 +1,16 @@ /** - * Synapse Core - Service Provider HTTP Operations + * Service Provider HTTP Operations * * @example * ```ts * import * as SP from '@filoz/synapse-core/sp' * ``` * - * @packageDocumentation + * @module sp */ import { HttpError, request, TimeoutError } from 'iso-web/http' +import type { ToString } from 'multiformats' import type { Simplify } from 'type-fest' import { type Address, type Hex, isHex } from 'viem' import { @@ -603,7 +604,7 @@ export async function findPiece(options: FindPieceOptions): Promise { const response = await request.json.get<{ pieceCid: string }>(new URL(`pdp/piece?${params.toString()}`, endpoint), { retry: { - statusCodes: [404], + statusCodes: [202, 404], retries: RETRIES, factor: FACTOR, }, @@ -630,6 +631,16 @@ export type AddPiecesOptions = { extraData: Hex } +export type AddPiecesRequest = { + pieces: { + pieceCid: ToString + subPieces: { + subPieceCid: ToString + }[] + }[] + extraData: Hex +} + /** * Add pieces to a data set on the PDP API. * diff --git a/packages/synapse-core/src/typed-data/index.ts b/packages/synapse-core/src/typed-data/index.ts index 24d614c20..4cb795be7 100644 --- a/packages/synapse-core/src/typed-data/index.ts +++ b/packages/synapse-core/src/typed-data/index.ts @@ -1,12 +1,12 @@ /** - * Synapse Core - Typed Data Operations + * Typed Data Operations * * @example * ```ts * import * as TypedData from '@filoz/synapse-core/typed-data' * ``` * - * @packageDocumentation + * @module typed-data */ export * from './sign-add-pieces.ts' export * from './sign-create-dataset.ts' diff --git a/packages/synapse-core/src/usdfc.ts b/packages/synapse-core/src/usdfc.ts index 7901d0bdc..ec621603f 100644 --- a/packages/synapse-core/src/usdfc.ts +++ b/packages/synapse-core/src/usdfc.ts @@ -1,12 +1,12 @@ /** - * Synapse Core - USDFC Contract Operations + * USDFC Contract Operations * * @example * ```ts * import * as USDFC from '@filoz/synapse-core/usdfc' * ``` * - * @packageDocumentation + * @module usdfc */ import type { Account, Chain, Client, Transport } from 'viem' diff --git a/packages/synapse-core/src/utils/cert.ts b/packages/synapse-core/src/utils/cert.ts new file mode 100644 index 000000000..c75d5583d --- /dev/null +++ b/packages/synapse-core/src/utils/cert.ts @@ -0,0 +1,151 @@ +import type { TypedDataToPrimitiveTypes } from 'abitype' +import type { Account, Address, Chain, Client, Hex, Transport } from 'viem' +import { bytesToBigInt, bytesToHex, concat, hexToBytes, numberToHex, recoverTypedDataAddress } from 'viem' +import { signTypedData } from 'viem/actions' +import { randU256 } from '../utils/rand.ts' + +export type Endorsement = { + /** + * Unique nonce to suport nonce based revocation. + */ + nonce: bigint + /** + * This certificate becomes invalid after `notAfter` timestamp. + */ + notAfter: bigint +} + +export type SignedEndorsement = Endorsement & { + signature: Hex +} + +export const EIP712Endorsement = { + Endorsement: [ + { name: 'nonce', type: 'uint64' }, + { name: 'notAfter', type: 'uint64' }, + { name: 'providerId', type: 'uint256' }, + ], +} as const + +export type TypedEn = TypedDataToPrimitiveTypes['Endorsement'] + +export type SignCertOptions = { + nonce?: bigint // uint64 + notAfter: bigint // uint64 + providerId: bigint +} + +/** + * Signs an endorsement certificate for a specific provider + * @param client - The client to use to sign the message + * @param options - nonce (randomised if null), not after and who to sign it for + * @returns encoded certificate data abiEncodePacked([nonce, notAfter, signature]), the provider id is implicit by where it will get placed in registry. + */ +export async function signEndorsement(client: Client, options: SignCertOptions) { + const nonce = (options.nonce ?? randU256()) & 0xffffffffffffffffn + const signature = await signTypedData(client, { + account: client.account, + domain: { + name: 'Storage Endorsement', + version: '1', + chainId: client.chain.id, + }, + types: EIP712Endorsement, + primaryType: 'Endorsement', + message: { + nonce: nonce, + notAfter: options.notAfter, + providerId: options.providerId, + }, + }) + + const encodedNonce = numberToHex(nonce, { size: 8 }) + const encodedNotAfter = numberToHex(options.notAfter, { size: 8 }) + + return concat([encodedNonce, encodedNotAfter, signature]) +} + +export async function decodeEndorsement( + providerId: bigint, + chainId: number | bigint, + hexData: Hex +): Promise<{ + address: Address | null + endorsement: SignedEndorsement +}> { + if (hexData.length !== 164) { + return { + address: null, + endorsement: { + nonce: 0n, + notAfter: 0n, + signature: '0x', + }, + } + } + const data = hexToBytes(hexData) + const endorsement: SignedEndorsement = { + nonce: bytesToBigInt(data.slice(0, 8)), + notAfter: bytesToBigInt(data.slice(8, 16)), + signature: bytesToHex(data.slice(16)), + } + const address = await recoverTypedDataAddress({ + domain: { + name: 'Storage Endorsement', + version: '1', + chainId, + }, + types: EIP712Endorsement, + primaryType: 'Endorsement', + message: { + nonce: endorsement.nonce, + notAfter: endorsement.notAfter, + providerId: providerId, + }, + signature: endorsement.signature, + }).catch(() => { + return null + }) + return { address, endorsement } +} + +/** + * Validates endorsement capabilities, if any, filtering out invalid ones + * @returns mapping of valid endorsements to expiry, nonce, signature + */ +export async function decodeEndorsements( + providerId: bigint, + chainId: number | bigint, + capabilities: Record +): Promise> { + const now = Date.now() / 1000 + const result: Record = {} + + for (const hexData of Object.values(capabilities)) { + try { + const { address, endorsement } = await decodeEndorsement(providerId, chainId, hexData) + if (address && endorsement.notAfter > now) { + result[address] = endorsement + } + } catch { + // Skip invalid endorsements + } + } + + return result +} + +/** + * @returns a list of capability keys and a list of capability values for the ServiceProviderRegistry + */ +export function encodeEndorsements(endorsements: Record): [string[], Hex[]] { + const keys: string[] = [] + const values: Hex[] = [] + Object.values(endorsements).forEach((value, index) => { + keys.push(`endorsement${index.toString()}`) + values.push( + concat([numberToHex(value.nonce, { size: 8 }), numberToHex(value.notAfter, { size: 8 }), value.signature]) + ) + }) + return [keys, values] +} diff --git a/packages/synapse-core/src/utils/index.ts b/packages/synapse-core/src/utils/index.ts index 70b2e8236..58af56498 100644 --- a/packages/synapse-core/src/utils/index.ts +++ b/packages/synapse-core/src/utils/index.ts @@ -1,5 +1,6 @@ export * from './calibration.ts' export * from './capabilities.ts' +export * from './cert.ts' export * from './constants.ts' export * from './decode-pdp-errors.ts' export * from './format.ts' diff --git a/packages/synapse-core/src/utils/pdp-capabilities.ts b/packages/synapse-core/src/utils/pdp-capabilities.ts index 4bd0a8081..ec54a1159 100644 --- a/packages/synapse-core/src/utils/pdp-capabilities.ts +++ b/packages/synapse-core/src/utils/pdp-capabilities.ts @@ -2,6 +2,7 @@ import type { Hex } from 'viem' import { bytesToHex, hexToString, isHex, numberToBytes, stringToHex, toBytes } from 'viem' import type { PDPOffering } from '../warm-storage/providers.ts' import { decodeAddressCapability } from './capabilities.ts' +import { decodeEndorsements, encodeEndorsements } from './cert.ts' // Standard capability keys for PDP product type (must match ServiceProviderRegistry.sol REQUIRED_PDP_KEYS) export const CAP_SERVICE_URL = 'serviceURL' @@ -18,7 +19,11 @@ export const CAP_PAYMENT_TOKEN = 'paymentTokenAddress' * Decode PDP capabilities from keys/values arrays into a PDPOffering object. * Based on Curio's capabilitiesToOffering function. */ -export function decodePDPCapabilities(capabilities: Record): PDPOffering { +export async function decodePDPCapabilities( + providerId: bigint, + chainId: number | bigint, + capabilities: Record +): Promise { return { serviceURL: hexToString(capabilities.serviceURL), minPieceSizeInBytes: BigInt(capabilities.minPieceSizeInBytes), @@ -29,6 +34,7 @@ export function decodePDPCapabilities(capabilities: Record): PDPOff minProvingPeriodInEpochs: BigInt(capabilities.minProvingPeriodInEpochs), location: hexToString(capabilities.location), paymentTokenAddress: decodeAddressCapability(capabilities.paymentTokenAddress), + endorsements: await decodeEndorsements(providerId, chainId, capabilities), } } @@ -62,6 +68,12 @@ export function encodePDPCapabilities( capabilityKeys.push(CAP_PAYMENT_TOKEN) capabilityValues.push(pdpOffering.paymentTokenAddress) + if (pdpOffering.endorsements != null) { + const [endorsementKeys, endorsementValues] = encodeEndorsements(pdpOffering.endorsements) + capabilityKeys.push(...endorsementKeys) + capabilityValues.push(...endorsementValues) + } + if (capabilities != null) { for (const [key, value] of Object.entries(capabilities)) { capabilityKeys.push(key) diff --git a/packages/synapse-core/src/warm-storage/data-sets.ts b/packages/synapse-core/src/warm-storage/data-sets.ts index 8990bb1b0..310f1925e 100644 --- a/packages/synapse-core/src/warm-storage/data-sets.ts +++ b/packages/synapse-core/src/warm-storage/data-sets.ts @@ -99,7 +99,9 @@ export async function getDataSets(client: Client, options: Get ], }) // getProviderWithProduct returns {providerId, providerInfo, product, productCapabilityValues} - const pdpCaps = decodePDPCapabilities( + const pdpCaps = await decodePDPCapabilities( + dataSet.providerId, + client.chain.id, capabilitiesListToObject(pdpOffering.product.capabilityKeys, pdpOffering.productCapabilityValues) ) @@ -178,7 +180,9 @@ export async function getDataSet(client: Client, options: GetD }) // getProviderWithProduct returns {providerId, providerInfo, product, productCapabilityValues} - const pdpCaps = decodePDPCapabilities( + const pdpCaps = await decodePDPCapabilities( + dataSet.providerId, + client.chain.id, capabilitiesListToObject(pdpOffering.product.capabilityKeys, pdpOffering.productCapabilityValues) ) diff --git a/packages/synapse-core/src/warm-storage/index.ts b/packages/synapse-core/src/warm-storage/index.ts index 2642168a3..e45cb4f93 100644 --- a/packages/synapse-core/src/warm-storage/index.ts +++ b/packages/synapse-core/src/warm-storage/index.ts @@ -1,12 +1,12 @@ /** - * Synapse Core - Warm Storage Contract Operations + * Warm Storage Contract Operations * * @example * ```ts * import * as WarmStorage from '@filoz/synapse-core/warm-storage' * ``` * - * @packageDocumentation + * @module warm-storage */ export * from './data-sets.ts' export * from './pieces.ts' diff --git a/packages/synapse-core/src/warm-storage/providers.ts b/packages/synapse-core/src/warm-storage/providers.ts index 3a00209bb..02a2c84a5 100644 --- a/packages/synapse-core/src/warm-storage/providers.ts +++ b/packages/synapse-core/src/warm-storage/providers.ts @@ -1,9 +1,10 @@ import type { AbiParametersToPrimitiveTypes, ExtractAbiFunction } from 'abitype' -import type { Chain, Client, Hex, Transport } from 'viem' +import type { Address, Chain, Client, Hex, Transport } from 'viem' import { readContract } from 'viem/actions' import type * as Abis from '../abis/index.ts' import { getChain } from '../chains.ts' import { capabilitiesListToObject } from '../utils/capabilities.ts' +import type { SignedEndorsement } from '../utils/cert.ts' import { decodePDPCapabilities } from '../utils/pdp-capabilities.ts' export type getProviderType = ExtractAbiFunction @@ -23,6 +24,7 @@ export interface PDPOffering { minProvingPeriodInEpochs: bigint location: string paymentTokenAddress: Hex + endorsements?: Record } export interface PDPProvider extends ServiceProviderInfo { @@ -59,7 +61,9 @@ export async function readProviders(client: Client): Promise

, options: Get return { id: provider.providerId, ...provider.providerInfo, - pdp: decodePDPCapabilities( + pdp: await decodePDPCapabilities( + provider.providerId, + client.chain.id, capabilitiesListToObject(provider.product.capabilityKeys, provider.productCapabilityValues) ), } diff --git a/packages/synapse-core/test/cert.test.ts b/packages/synapse-core/test/cert.test.ts new file mode 100644 index 000000000..52b982cd9 --- /dev/null +++ b/packages/synapse-core/test/cert.test.ts @@ -0,0 +1,138 @@ +import assert from 'assert' + +import type { Account, Chain, Client, Hex, Transport } from 'viem' +import { createWalletClient, http } from 'viem' +import { generatePrivateKey, privateKeyToAccount } from 'viem/accounts' +import { calibration } from '../src/chains.ts' +import { decodeEndorsement, decodeEndorsements, encodeEndorsements, signEndorsement } from '../src/utils/cert.ts' + +describe('Endorsement Certificates', () => { + let client: Client + beforeEach(async () => { + client = createWalletClient({ + account: privateKeyToAccount(generatePrivateKey()), + transport: http(), + chain: calibration, + }) + }) + + it('should decode from the signed encoding the same account that signed', async () => { + const providerId = 10n + const notAfter = 0xffffffffffffffffn + const encoded = await signEndorsement(client, { + notAfter, + providerId, + }) + assert.equal(encoded.length, 164) + + const { address, endorsement } = await decodeEndorsement(providerId, client.chain.id, encoded) + assert.equal(address, client.account.address) + assert.equal(endorsement.notAfter, notAfter) + + const [keys, values] = encodeEndorsements({ + [address ?? '']: endorsement, + }) + assert.equal(keys.length, values.length) + assert.equal(keys.length, 1) + assert.equal(values.length, 1) + assert.equal(values[0], encoded) + }) + + it('should decode multiple valid endorsements', async () => { + const providerId = 15n + const notAfter = BigInt(Math.floor(Date.now() / 1000) + 3600) // 1 hour from now + + // Create multiple clients + const client2 = createWalletClient({ + account: privateKeyToAccount(generatePrivateKey()), + transport: http(), + chain: calibration, + }) + const client3 = createWalletClient({ + account: privateKeyToAccount(generatePrivateKey()), + transport: http(), + chain: calibration, + }) + + // Sign endorsements from different accounts + const encoded1 = await signEndorsement(client, { notAfter, providerId }) + const encoded2 = await signEndorsement(client2, { notAfter, providerId }) + const encoded3 = await signEndorsement(client3, { notAfter, providerId }) + + const capabilities = { + endorsement0: encoded1, + endorsement1: encoded2, + endorsement2: encoded3, + } + + const result = await decodeEndorsements(providerId, client.chain.id, capabilities) + + // Should have 3 valid endorsements + assert.equal(Object.keys(result).length, 3) + + // Verify all addresses are present and correct + assert.ok(result[client.account.address]) + assert.ok(result[client2.account.address]) + assert.ok(result[client3.account.address]) + + // Verify endorsement data + assert.equal(result[client.account.address].notAfter, notAfter) + assert.equal(result[client2.account.address].notAfter, notAfter) + assert.equal(result[client3.account.address].notAfter, notAfter) + }) + + it('should handle mixed valid and invalid endorsements', async () => { + const providerId = 20n + const notAfter = BigInt(Math.floor(Date.now() / 1000) + 3600) + + // Create valid endorsement + const validEncoded = await signEndorsement(client, { notAfter, providerId }) + + const capabilities: Record = { + blabla: '0xdeadbeef', + endorsement0: validEncoded, + endorsement1: '0x1234' as Hex, // Invalid - too short + endorsement2: `0x${'a'.repeat(162)}` as Hex, // Invalid - wrong format + endorsement3: `0x${'0'.repeat(162)}` as Hex, // Invalid - all zeros + } + + const result = await decodeEndorsements(providerId, client.chain.id, capabilities) + + // Should only have the valid endorsement + assert.equal(Object.keys(result).length, 1) + assert.ok(result[client.account.address]) + assert.equal(result[client.account.address].notAfter, notAfter) + }) + + it('should filter out expired endorsements', async () => { + const providerId = 25n + const futureTime = BigInt(Math.floor(Date.now() / 1000) + 3600) // 1 hour from now + const pastTime = BigInt(Math.floor(Date.now() / 1000) - 3600) // 1 hour ago + + // Create endorsements with different expiry times + const validEncoded = await signEndorsement(client, { notAfter: futureTime, providerId }) + const expiredEncoded = await signEndorsement(client, { notAfter: pastTime, providerId }) + + const capabilities = { + endorsement0: validEncoded, + endorsement1: expiredEncoded, + } + + const result = await decodeEndorsements(providerId, client.chain.id, capabilities) + + // Should only have the non-expired endorsement + assert.equal(Object.keys(result).length, 1) + assert.ok(result[client.account.address]) + assert.equal(result[client.account.address].notAfter, futureTime) + }) + + it('should handle empty capabilities', async () => { + const providerId = 30n + const capabilities = {} + + const result = await decodeEndorsements(providerId, client.chain.id, capabilities) + + // Should return empty object + assert.deepEqual(result, {}) + }) +}) diff --git a/packages/synapse-core/test/mocks/mockServiceWorker.js b/packages/synapse-core/test/mocks/mockServiceWorker.js new file mode 100644 index 000000000..558540fa5 --- /dev/null +++ b/packages/synapse-core/test/mocks/mockServiceWorker.js @@ -0,0 +1,349 @@ +/* eslint-disable */ +/* tslint:disable */ + +/** + * Mock Service Worker. + * @see https://github.com/mswjs/msw + * - Please do NOT modify this file. + */ + +const PACKAGE_VERSION = '2.12.4' +const INTEGRITY_CHECKSUM = '4db4a41e972cec1b64cc569c66952d82' +const IS_MOCKED_RESPONSE = Symbol('isMockedResponse') +const activeClientIds = new Set() + +addEventListener('install', function () { + self.skipWaiting() +}) + +addEventListener('activate', function (event) { + event.waitUntil(self.clients.claim()) +}) + +addEventListener('message', async function (event) { + const clientId = Reflect.get(event.source || {}, 'id') + + if (!clientId || !self.clients) { + return + } + + const client = await self.clients.get(clientId) + + if (!client) { + return + } + + const allClients = await self.clients.matchAll({ + type: 'window', + }) + + switch (event.data) { + case 'KEEPALIVE_REQUEST': { + sendToClient(client, { + type: 'KEEPALIVE_RESPONSE', + }) + break + } + + case 'INTEGRITY_CHECK_REQUEST': { + sendToClient(client, { + type: 'INTEGRITY_CHECK_RESPONSE', + payload: { + packageVersion: PACKAGE_VERSION, + checksum: INTEGRITY_CHECKSUM, + }, + }) + break + } + + case 'MOCK_ACTIVATE': { + activeClientIds.add(clientId) + + sendToClient(client, { + type: 'MOCKING_ENABLED', + payload: { + client: { + id: client.id, + frameType: client.frameType, + }, + }, + }) + break + } + + case 'CLIENT_CLOSED': { + activeClientIds.delete(clientId) + + const remainingClients = allClients.filter((client) => { + return client.id !== clientId + }) + + // Unregister itself when there are no more clients + if (remainingClients.length === 0) { + self.registration.unregister() + } + + break + } + } +}) + +addEventListener('fetch', function (event) { + const requestInterceptedAt = Date.now() + + // Bypass navigation requests. + if (event.request.mode === 'navigate') { + return + } + + // Opening the DevTools triggers the "only-if-cached" request + // that cannot be handled by the worker. Bypass such requests. + if ( + event.request.cache === 'only-if-cached' && + event.request.mode !== 'same-origin' + ) { + return + } + + // Bypass all requests when there are no active clients. + // Prevents the self-unregistered worked from handling requests + // after it's been terminated (still remains active until the next reload). + if (activeClientIds.size === 0) { + return + } + + const requestId = crypto.randomUUID() + event.respondWith(handleRequest(event, requestId, requestInterceptedAt)) +}) + +/** + * @param {FetchEvent} event + * @param {string} requestId + * @param {number} requestInterceptedAt + */ +async function handleRequest(event, requestId, requestInterceptedAt) { + const client = await resolveMainClient(event) + const requestCloneForEvents = event.request.clone() + const response = await getResponse( + event, + client, + requestId, + requestInterceptedAt, + ) + + // Send back the response clone for the "response:*" life-cycle events. + // Ensure MSW is active and ready to handle the message, otherwise + // this message will pend indefinitely. + if (client && activeClientIds.has(client.id)) { + const serializedRequest = await serializeRequest(requestCloneForEvents) + + // Clone the response so both the client and the library could consume it. + const responseClone = response.clone() + + sendToClient( + client, + { + type: 'RESPONSE', + payload: { + isMockedResponse: IS_MOCKED_RESPONSE in response, + request: { + id: requestId, + ...serializedRequest, + }, + response: { + type: responseClone.type, + status: responseClone.status, + statusText: responseClone.statusText, + headers: Object.fromEntries(responseClone.headers.entries()), + body: responseClone.body, + }, + }, + }, + responseClone.body ? [serializedRequest.body, responseClone.body] : [], + ) + } + + return response +} + +/** + * Resolve the main client for the given event. + * Client that issues a request doesn't necessarily equal the client + * that registered the worker. It's with the latter the worker should + * communicate with during the response resolving phase. + * @param {FetchEvent} event + * @returns {Promise} + */ +async function resolveMainClient(event) { + const client = await self.clients.get(event.clientId) + + if (activeClientIds.has(event.clientId)) { + return client + } + + if (client?.frameType === 'top-level') { + return client + } + + const allClients = await self.clients.matchAll({ + type: 'window', + }) + + return allClients + .filter((client) => { + // Get only those clients that are currently visible. + return client.visibilityState === 'visible' + }) + .find((client) => { + // Find the client ID that's recorded in the + // set of clients that have registered the worker. + return activeClientIds.has(client.id) + }) +} + +/** + * @param {FetchEvent} event + * @param {Client | undefined} client + * @param {string} requestId + * @param {number} requestInterceptedAt + * @returns {Promise} + */ +async function getResponse(event, client, requestId, requestInterceptedAt) { + // Clone the request because it might've been already used + // (i.e. its body has been read and sent to the client). + const requestClone = event.request.clone() + + function passthrough() { + // Cast the request headers to a new Headers instance + // so the headers can be manipulated with. + const headers = new Headers(requestClone.headers) + + // Remove the "accept" header value that marked this request as passthrough. + // This prevents request alteration and also keeps it compliant with the + // user-defined CORS policies. + const acceptHeader = headers.get('accept') + if (acceptHeader) { + const values = acceptHeader.split(',').map((value) => value.trim()) + const filteredValues = values.filter( + (value) => value !== 'msw/passthrough', + ) + + if (filteredValues.length > 0) { + headers.set('accept', filteredValues.join(', ')) + } else { + headers.delete('accept') + } + } + + return fetch(requestClone, { headers }) + } + + // Bypass mocking when the client is not active. + if (!client) { + return passthrough() + } + + // Bypass initial page load requests (i.e. static assets). + // The absence of the immediate/parent client in the map of the active clients + // means that MSW hasn't dispatched the "MOCK_ACTIVATE" event yet + // and is not ready to handle requests. + if (!activeClientIds.has(client.id)) { + return passthrough() + } + + // Notify the client that a request has been intercepted. + const serializedRequest = await serializeRequest(event.request) + const clientMessage = await sendToClient( + client, + { + type: 'REQUEST', + payload: { + id: requestId, + interceptedAt: requestInterceptedAt, + ...serializedRequest, + }, + }, + [serializedRequest.body], + ) + + switch (clientMessage.type) { + case 'MOCK_RESPONSE': { + return respondWithMock(clientMessage.data) + } + + case 'PASSTHROUGH': { + return passthrough() + } + } + + return passthrough() +} + +/** + * @param {Client} client + * @param {any} message + * @param {Array} transferrables + * @returns {Promise} + */ +function sendToClient(client, message, transferrables = []) { + return new Promise((resolve, reject) => { + const channel = new MessageChannel() + + channel.port1.onmessage = (event) => { + if (event.data && event.data.error) { + return reject(event.data.error) + } + + resolve(event.data) + } + + client.postMessage(message, [ + channel.port2, + ...transferrables.filter(Boolean), + ]) + }) +} + +/** + * @param {Response} response + * @returns {Response} + */ +function respondWithMock(response) { + // Setting response status code to 0 is a no-op. + // However, when responding with a "Response.error()", the produced Response + // instance will have status code set to 0. Since it's not possible to create + // a Response instance with status code 0, handle that use-case separately. + if (response.status === 0) { + return Response.error() + } + + const mockedResponse = new Response(response.body, response) + + Reflect.defineProperty(mockedResponse, IS_MOCKED_RESPONSE, { + value: true, + enumerable: true, + }) + + return mockedResponse +} + +/** + * @param {Request} request + */ +async function serializeRequest(request) { + return { + url: request.url, + mode: request.mode, + method: request.method, + headers: Object.fromEntries(request.headers.entries()), + cache: request.cache, + credentials: request.credentials, + destination: request.destination, + integrity: request.integrity, + redirect: request.redirect, + referrer: request.referrer, + referrerPolicy: request.referrerPolicy, + body: await request.arrayBuffer(), + keepalive: request.keepalive, + } +} diff --git a/packages/synapse-core/tools/endorse-sp.js b/packages/synapse-core/tools/endorse-sp.js new file mode 100644 index 000000000..200728ee9 --- /dev/null +++ b/packages/synapse-core/tools/endorse-sp.js @@ -0,0 +1,122 @@ +import EthModule from '@ledgerhq/hw-app-eth' +import TransportNodeHidModule from '@ledgerhq/hw-transport-node-hid' +import { createWalletClient, http } from 'viem' +import { privateKeyToAccount, toAccount } from 'viem/accounts' + +const TransportNodeHid = TransportNodeHidModule.default || TransportNodeHidModule +const Eth = EthModule.default || EthModule + +import { getChain } from '../src/chains.ts' +import { signEndorsement } from '../src/utils/cert.ts' + +function printUsageAndExit() { + console.error('Usage: PRIVATE_KEY=0x... node tools/endorse-sp.js providerId...') + console.error(' or: USE_LEDGER=true node tools/endorse-sp.js providerId...') + process.exit(1) +} + +const PRIVATE_KEY = process.env.PRIVATE_KEY +const USE_LEDGER = process.env.USE_LEDGER === 'true' +const LEDGER_PATH = process.env.LEDGER_PATH || "m/44'/60'/0'/0/0" +const ETH_RPC_URL = process.env.ETH_RPC_URL || 'https://api.calibration.node.glif.io/rpc/v1' +const EXPIRY = process.env.EXPIRY || BigInt(Math.floor(Date.now() / 1000)) + 10368000n + +if (!PRIVATE_KEY && !USE_LEDGER) { + console.error('ERROR: Either PRIVATE_KEY or USE_LEDGER=true is required') + printUsageAndExit() +} + +let CHAIN_ID = process.env.CHAIN_ID + +// TODO also support providerAddress and serviceURL +const providerIds = process.argv.slice(2) +if (providerIds.length === 0) { + console.error('ERROR: must specify at least one providerId') + printUsageAndExit() +} + +async function createLedgerAccount() { + const transport = await TransportNodeHid.open('') + const eth = new Eth(transport) + + const { address } = await eth.getAddress(LEDGER_PATH) + + const account = toAccount({ + address, + async signMessage({ message }) { + const messageHex = typeof message === 'string' ? Buffer.from(message).toString('hex') : message.slice(2) + const result = await eth.signPersonalMessage(LEDGER_PATH, messageHex) + return `0x${result.r}${result.s}${(result.v - 27).toString(16).padStart(2, '0')}` + }, + async signTransaction(_transaction) { + throw new Error('signTransaction not needed for this script') + }, + async signTypedData(typedData) { + const result = await eth.signEIP712Message(LEDGER_PATH, typedData) + return `0x${result.r}${result.s}${(result.v - 27).toString(16).padStart(2, '0')}` + }, + }) + + return { account, close: () => transport.close() } +} + +async function main() { + if (CHAIN_ID == null) { + console.log('fetching eth_chainId from', ETH_RPC_URL) + const response = await fetch(ETH_RPC_URL, { + method: 'POST', + headers: { + 'Content-Type': 'application/json', + }, + body: JSON.stringify({ + id: 1, + method: 'eth_chainId', + params: [], + }), + }) + const result = await response.json() + CHAIN_ID = result.result + } + console.log('ChainId:', Number(CHAIN_ID)) + + let account + let closeLedgerTransport = null + if (USE_LEDGER) { + console.log('🔐 Using Ledger hardware wallet') + console.log('📍 Path:', LEDGER_PATH, '(Ethereum standard)') + console.log('⚠️ Connect Ledger, unlock, and open the Ethereum app') + console.log('⚠️ Enable "Blind signing" in Ethereum app settings') + const ledgerResult = await createLedgerAccount() + account = ledgerResult.account + closeLedgerTransport = ledgerResult.close + console.log('✅ Connected, address:', account.address) + } else { + account = privateKeyToAccount(PRIVATE_KEY) + } + + try { + const client = createWalletClient({ + account, + transport: http(ETH_RPC_URL), + chain: getChain(Number(CHAIN_ID)), + }) + + console.log('Expiry:', new Date(Number(EXPIRY) * 1000).toLocaleString()) + + for (const providerId of providerIds) { + if (USE_LEDGER) console.log('\n⏳ Confirm on Ledger for provider:', providerId) + const encoded = await signEndorsement(client, { + providerId: BigInt(providerId), + notAfter: EXPIRY, + }) + console.log('Provider:', providerId) + console.log('Endorsement:', encoded) + } + } finally { + if (closeLedgerTransport != null) { + await closeLedgerTransport() + } + } +} + +main().catch(console.error) diff --git a/packages/synapse-core/tsconfig.json b/packages/synapse-core/tsconfig.json index cbefaf82e..9c0e737d9 100644 --- a/packages/synapse-core/tsconfig.json +++ b/packages/synapse-core/tsconfig.json @@ -14,6 +14,7 @@ "src/erc20.ts", "src/usdfc.ts", "src/abis/index.ts", + "src/auction/index.ts", "src/pay/index.ts", "src/warm-storage/index.ts", "src/typed-data/index.ts", diff --git a/packages/synapse-react/CHANGELOG.md b/packages/synapse-react/CHANGELOG.md index a769f228f..bb3bb6716 100644 --- a/packages/synapse-react/CHANGELOG.md +++ b/packages/synapse-react/CHANGELOG.md @@ -1,5 +1,14 @@ # Changelog +## [0.1.4](https://github.com/FilOzone/synapse-sdk/compare/synapse-react-v0.1.3...synapse-react-v0.1.4) (2025-12-04) + + +### Chores + +* **deps-dev:** bump @biomejs/biome from 2.3.5 to 2.3.6 ([#448](https://github.com/FilOzone/synapse-sdk/issues/448)) ([ebcab4e](https://github.com/FilOzone/synapse-sdk/commit/ebcab4ea166aa69c35d988ff2356b3f5972af351)) +* **deps-dev:** bump @biomejs/biome from 2.3.6 to 2.3.7 ([#459](https://github.com/FilOzone/synapse-sdk/issues/459)) ([d3c65a8](https://github.com/FilOzone/synapse-sdk/commit/d3c65a806e4819bbc560f5a7087f79eec31417a5)) +* **deps-dev:** bump @biomejs/biome from 2.3.7 to 2.3.8 ([#476](https://github.com/FilOzone/synapse-sdk/issues/476)) ([d95f812](https://github.com/FilOzone/synapse-sdk/commit/d95f812d7752a9b1dcb46219a4857eb99b54ebf0)) + ## [0.1.3](https://github.com/FilOzone/synapse-sdk/compare/synapse-react-v0.1.2...synapse-react-v0.1.3) (2025-11-17) diff --git a/packages/synapse-react/package.json b/packages/synapse-react/package.json index 71b70db9c..90217a523 100644 --- a/packages/synapse-react/package.json +++ b/packages/synapse-react/package.json @@ -1,6 +1,6 @@ { "name": "@filoz/synapse-react", - "version": "0.1.3", + "version": "0.1.4", "description": "React hooks for interacting with Filecoin Onchain Cloud smart contracts", "repository": { "type": "git", @@ -29,6 +29,17 @@ ".": { "types": "./dist/src/index.d.ts", "default": "./dist/src/index.js" + }, + "./filsnap": { + "types": "./dist/src/filsnap.d.ts", + "default": "./dist/src/filsnap.js" + } + }, + "typesVersions": { + "*": { + "filsnap": [ + "./dist/src/filsnap" + ] } }, "files": [ @@ -73,10 +84,10 @@ "@hugomrdias/filsnap-adapter": "^3.3.8" }, "devDependencies": { - "@biomejs/biome": "2.3.7", - "@types/node": "^24.9.1", + "@biomejs/biome": "catalog:", + "@types/node": "catalog:", "type-fest": "^5.1.0", - "typescript": "5.9.3" + "typescript": "catalog:" }, "publishConfig": { "access": "public" @@ -88,7 +99,8 @@ }, "peerDependencies": { "@tanstack/react-query": "5.x", + "@wagmi/core": "3.x", "viem": "2.x", - "wagmi": "2.x" + "wagmi": "3.x" } } diff --git a/packages/synapse-react/src/filsnap.ts b/packages/synapse-react/src/filsnap.ts index 0a48f4074..833c4e66b 100644 --- a/packages/synapse-react/src/filsnap.ts +++ b/packages/synapse-react/src/filsnap.ts @@ -7,6 +7,10 @@ const SNAP_ID = 'npm:filsnap' //'local:http://localhost:8080' export const useFilsnap = ({ version, force }: { version?: string; force?: boolean } = {}) => { useAccountEffect({ onConnect: async (data) => { + if (!data.connector || !('getProvider' in data.connector)) { + return + } + const provider = (await data.connector.getProvider()) as EIP1193Provider if (provider.isMetaMask) { await getOrInstallSnap(provider, SNAP_ID, version ?? '*', force) diff --git a/packages/synapse-react/src/index.ts b/packages/synapse-react/src/index.ts index bd69ca8f5..d1116683a 100644 --- a/packages/synapse-react/src/index.ts +++ b/packages/synapse-react/src/index.ts @@ -11,7 +11,6 @@ export * from './calibration.ts' export * from './erc20.ts' -export * from './filsnap.ts' export * from './payments/index.ts' export * from './usdfc.ts' export * from './warm-storage/index.ts' diff --git a/packages/synapse-react/tsconfig.json b/packages/synapse-react/tsconfig.json index 572e92242..e688e6fbe 100644 --- a/packages/synapse-react/tsconfig.json +++ b/packages/synapse-react/tsconfig.json @@ -7,7 +7,7 @@ "exclude": ["node_modules", "dist"], "references": [ { - "path": "../synapse-core" + "path": "../synapse-core/tsconfig.json" } ], "typedocOptions": { diff --git a/packages/synapse-sdk/CHANGELOG.md b/packages/synapse-sdk/CHANGELOG.md index 844fb5879..a59e150b7 100644 --- a/packages/synapse-sdk/CHANGELOG.md +++ b/packages/synapse-sdk/CHANGELOG.md @@ -1,8 +1,19 @@ -## [0.28.0](https://github.com/FilOzone/synapse-sdk/compare/v0.27.0...v0.28.0) (2025-09-19) +# Changelog -### Features +## [0.36.1](https://github.com/FilOzone/synapse-sdk/compare/synapse-sdk-v0.36.0...synapse-sdk-v0.36.1) (2025-12-04) + + +### Bug Fixes + +* add getScheduledRemovals method ([#464](https://github.com/FilOzone/synapse-sdk/issues/464)) ([05e6b92](https://github.com/FilOzone/synapse-sdk/commit/05e6b92bb62ff4a4da48b4fa35398a66da23b049)) + + +### Chores -* add terminateDataSet functionality ([#230](https://github.com/FilOzone/synapse-sdk/issues/230)) ([ffaacac](https://github.com/FilOzone/synapse-sdk/commit/ffaacac507b4882abfc33d3de72fe9fa98843cd2)) +* **deps-dev:** bump @biomejs/biome from 2.3.5 to 2.3.6 ([#448](https://github.com/FilOzone/synapse-sdk/issues/448)) ([ebcab4e](https://github.com/FilOzone/synapse-sdk/commit/ebcab4ea166aa69c35d988ff2356b3f5972af351)) +* **deps-dev:** bump @biomejs/biome from 2.3.6 to 2.3.7 ([#459](https://github.com/FilOzone/synapse-sdk/issues/459)) ([d3c65a8](https://github.com/FilOzone/synapse-sdk/commit/d3c65a806e4819bbc560f5a7087f79eec31417a5)) +* **deps-dev:** bump @biomejs/biome from 2.3.7 to 2.3.8 ([#476](https://github.com/FilOzone/synapse-sdk/issues/476)) ([d95f812](https://github.com/FilOzone/synapse-sdk/commit/d95f812d7752a9b1dcb46219a4857eb99b54ebf0)) +* export StorageManagerUploadOptions ([#462](https://github.com/FilOzone/synapse-sdk/issues/462)) ([c5897f2](https://github.com/FilOzone/synapse-sdk/commit/c5897f21ac11b41f03e1552a09a8d34d6c42666b)) ## [0.36.0](https://github.com/FilOzone/synapse-sdk/compare/synapse-sdk-v0.35.3...synapse-sdk-v0.36.0) (2025-11-17) diff --git a/packages/synapse-sdk/package.json b/packages/synapse-sdk/package.json index 27b3cb35f..e6ffa6c01 100644 --- a/packages/synapse-sdk/package.json +++ b/packages/synapse-sdk/package.json @@ -1,6 +1,6 @@ { "name": "@filoz/synapse-sdk", - "version": "0.36.0", + "version": "0.36.1", "description": "JavaScript SDK for Filecoin Onchain Cloud", "repository": { "type": "git", @@ -27,25 +27,72 @@ "import": "./dist/src/index.js", "types": "./dist/src/index.d.ts" }, + "./payments": { + "import": "./dist/src/payments/index.js", + "types": "./dist/src/payments/index.d.ts" + }, "./pdp": { "import": "./dist/src/pdp/index.js", "types": "./dist/src/pdp/index.d.ts" }, - "./payments": { - "import": "./dist/src/payments/index.js", - "types": "./dist/src/payments/index.d.ts" + "./session": { + "import": "./dist/src/session/index.js", + "types": "./dist/src/session/index.d.ts" }, - "./warm-storage": { - "import": "./dist/src/warm-storage/index.js", - "types": "./dist/src/warm-storage/index.d.ts" + "./storage": { + "import": "./dist/src/storage/index.js", + "types": "./dist/src/storage/index.d.ts" }, "./subgraph": { "import": "./dist/src/subgraph/index.js", "types": "./dist/src/subgraph/index.d.ts" }, + "./telemetry": { + "import": "./dist/src/telemetry/index.js", + "types": "./dist/src/telemetry/index.d.ts" + }, + "./warm-storage": { + "import": "./dist/src/warm-storage/index.js", + "types": "./dist/src/warm-storage/index.d.ts" + }, "./sp-registry": { "import": "./dist/src/sp-registry/index.js", "types": "./dist/src/sp-registry/index.d.ts" + }, + "./filbeam": { + "import": "./dist/src/filbeam/index.js", + "types": "./dist/src/filbeam/index.d.ts" + } + }, + "typesVersions": { + "*": { + "payments": [ + "./dist/src/payments" + ], + "pdp": [ + "./dist/src/pdp" + ], + "session": [ + "./dist/src/session" + ], + "storage": [ + "./dist/src/storage" + ], + "subgraph": [ + "./dist/src/subgraph" + ], + "telemetry": [ + "./dist/src/telemetry" + ], + "warm-storage": [ + "./dist/src/warm-storage" + ], + "sp-registry": [ + "./dist/src/sp-registry" + ], + "filbeam": [ + "./dist/src/filbeam" + ] } }, "scripts": { @@ -56,7 +103,8 @@ "test:node": "wireit", "test:browser": "wireit", "clean": "rm -rf dist", - "prepublishOnly": "pnpm run clean && pnpm run build" + "prepublishOnly": "pnpm run clean && pnpm run build", + "update:msw": "pnpx msw init src/test/mocks/ --save" }, "wireit": { "build": { @@ -118,30 +166,30 @@ "dependencies": { "@filoz/synapse-core": "workspace:^", "@web3-storage/data-segment": "^5.3.0", - "ethers": "^6.15.0", "multiformats": "^13.4.1", - "ox": "^0.9.12", - "viem": "^2.38.3" + "ox": "catalog:", + "viem": "catalog:" }, "optionalDependencies": { "@sentry/browser": "^10.21.0", "@sentry/node": "^10.21.0" }, "devDependencies": { - "@biomejs/biome": "2.3.7", + "@biomejs/biome": "catalog:", "@types/chai": "^5.2.3", - "@types/mocha": "^10.0.10", - "@types/node": "^24.9.1", + "@types/mocha": "catalog:", + "@types/node": "catalog:", "@wagmi/cli": "^2.7.0", - "abitype": "^1.1.1", + "abitype": "catalog:", "chai": "^6.2.0", - "iso-web": "^1.4.3", - "mocha": "^11.7.4", - "msw": "~2.10.5", + "ethers": "catalog:", + "iso-web": "^2.1.0", + "mocha": "catalog:", + "msw": "catalog:", "p-defer": "^4.0.1", "playwright-test": "^14.1.12", "type-fest": "^5.1.0", - "typescript": "5.9.3" + "typescript": "catalog:" }, "publishConfig": { "access": "public" @@ -153,5 +201,8 @@ }, "browser": { "@sentry/node": "@sentry/browser" + }, + "peerDependencies": { + "ethers": "6.x" } } diff --git a/packages/synapse-sdk/src/filbeam/index.ts b/packages/synapse-sdk/src/filbeam/index.ts new file mode 100644 index 000000000..f4794d54a --- /dev/null +++ b/packages/synapse-sdk/src/filbeam/index.ts @@ -0,0 +1,75 @@ +/** + * FilBeam Service + * + * Client for the FilBeam stats API. + * + * ## Overview + * + * FilBeam enables retrieval incentives for Filecoin PDP (Proof of Data Possession) + * service providers by acting as a trusted intermediary that measures traffic + * between clients and storage providers. + * + * ## Architecture + * + * FilBeam operates as a caching layer between clients and storage providers, + * enabling efficient retrieval of content-addressable data stored on Filecoin PDP. + * + * ``` + * Client <--> FilBeam (cache + metering) <--> Storage Provider + * ``` + * + * ## Billing Model + * + * Both cache hits and cache misses generate billable egress events. This transforms + * Filecoin from passive archival storage into an active "serve many" data delivery + * infrastructure, where service providers are compensated for serving retrievals. + * + * @module FilBeam + * + * @example Basic Usage + * ```typescript + * import { FilBeamService } from '@filoz/synapse-sdk/filbeam' + * + * // Create service for mainnet + * const service = new FilBeamService('mainnet') + * + * // Get remaining data set statistics + * const stats = await service.getDataSetStats('dataset-id') + * console.log('Remaining CDN Egress:', stats.cdnEgressQuota) + * console.log('Remaining Cache Miss:', stats.cacheMissEgressQuota) + * ``` + * + * @example Integration with Synapse SDK + * ```typescript + * import { Synapse } from '@filoz/synapse-sdk' + * + * // Initialize Synapse + * const synapse = await Synapse.create({ + * privateKey: process.env.PRIVATE_KEY, + * rpcURL: 'https://api.node.glif.io/rpc/v1' + * }) + * + * // Access FilBeam service through Synapse + * const stats = await synapse.filbeam.getDataSetStats('my-dataset') + * + * // Monitor remaining quotas over time + * setInterval(async () => { + * const currentStats = await synapse.filbeam.getDataSetStats('my-dataset') + * console.log('Remaining quotas:', currentStats) + * + * // Alert if running low + * const TiB = BigInt(1024 ** 4) + * const remainingTiB = Number((currentStats.cdnEgressQuota + currentStats.cacheMissEgressQuota) / TiB) + * if (remainingTiB < 1) { + * console.warn('Low quota warning: Less than 1 TiB remaining') + * } + * }, 60000) // Check every minute + * ``` + * + * @see {@link https://docs.filbeam.com | FilBeam Documentation} - Official FilBeam documentation + * @see {@link https://meridian.space/blog/introducing-pay-per-byte-a-new-era-for-filecoin-retrieval | Pay Per Byte Blog Post} - Introduction to the pay-per-byte pricing model + * @see {@link DataSetStats} for the structure of returned statistics + * @see {@link FilBeamService} for the main service class + */ + +export { type DataSetStats, FilBeamService } from './service.ts' diff --git a/packages/synapse-sdk/src/filbeam/service.ts b/packages/synapse-sdk/src/filbeam/service.ts new file mode 100644 index 000000000..d40aa81cd --- /dev/null +++ b/packages/synapse-sdk/src/filbeam/service.ts @@ -0,0 +1,168 @@ +/** + * @module FilBeamService + * @description FilBeam service integration for Filecoin's pay-per-byte infrastructure. + * + * This module provides integration with FilBeam's services, including querying egress quotas + * and managing pay-per-byte data delivery metrics. + * + * @see {@link https://docs.filbeam.com | FilBeam Documentation} - Official FilBeam documentation + */ + +import type { FilecoinNetworkType } from '../types.ts' +import { createError } from '../utils/errors.ts' + +/** + * Data set statistics from FilBeam. + * + * These quotas represent the remaining pay-per-byte allocation available for data retrieval + * through FilBeam's trusted measurement layer. The values decrease as data is served and + * represent how many bytes can still be retrieved before needing to add more credits. + * + * @interface DataSetStats + * @property {bigint} cdnEgressQuota - The remaining CDN egress quota for cache hits (data served directly from FilBeam's cache) in bytes + * @property {bigint} cacheMissEgressQuota - The remaining egress quota for cache misses (data retrieved from storage providers) in bytes + */ +export interface DataSetStats { + cdnEgressQuota: bigint + cacheMissEgressQuota: bigint +} + +/** + * Service for interacting with FilBeam infrastructure and APIs. + * + * @example + * ```typescript + * // Create service with network detection + * const synapse = await Synapse.create({ privateKey, rpcURL }) + * const stats = await synapse.filbeam.getDataSetStats(12345) + * + * // Monitor remaining pay-per-byte quotas + * const service = new FilBeamService('mainnet') + * const stats = await service.getDataSetStats(12345) + * console.log('Remaining CDN Egress (cache hits):', stats.cdnEgressQuota) + * console.log('Remaining Cache Miss Egress:', stats.cacheMissEgressQuota) + * ``` + * + * @remarks + * All quota values are returned as BigInt for precision when handling large byte values. + * + * @see {@link https://docs.filbeam.com | FilBeam Documentation} for detailed API specifications and usage guides + */ +export class FilBeamService { + private readonly _network: FilecoinNetworkType + private readonly _fetch: typeof fetch + + constructor(network: FilecoinNetworkType, fetchImpl: typeof fetch = globalThis.fetch) { + this._validateNetworkType(network) + this._network = network + this._fetch = fetchImpl + } + + private _validateNetworkType(network: FilecoinNetworkType) { + if (network === 'mainnet' || network === 'calibration') return + + throw createError( + 'FilBeamService', + 'validateNetworkType', + 'Unsupported network type: Only Filecoin mainnet and calibration networks are supported.' + ) + } + + /** + * Get the base stats URL for the current network + */ + private _getStatsBaseUrl(): string { + return this._network === 'mainnet' ? 'https://stats.filbeam.io' : 'https://calibration.stats.filbeam.io' + } + + /** + * Validates the response from FilBeam stats API + */ + private _validateStatsResponse(data: unknown): { cdnEgressQuota: string; cacheMissEgressQuota: string } { + if (typeof data !== 'object' || data === null) { + throw createError('FilBeamService', 'validateStatsResponse', 'Response is not an object') + } + + const response = data as Record + + if (typeof response.cdnEgressQuota !== 'string') { + throw createError('FilBeamService', 'validateStatsResponse', 'cdnEgressQuota must be a string') + } + + if (typeof response.cacheMissEgressQuota !== 'string') { + throw createError('FilBeamService', 'validateStatsResponse', 'cacheMissEgressQuota must be a string') + } + + return { + cdnEgressQuota: response.cdnEgressQuota, + cacheMissEgressQuota: response.cacheMissEgressQuota, + } + } + + /** + * Retrieves remaining pay-per-byte statistics for a specific data set from FilBeam. + * + * Fetches the remaining CDN and cache miss egress quotas for a data set. These quotas + * track how many bytes can still be retrieved through FilBeam's trusted measurement layer + * before needing to add more credits: + * + * - **CDN Egress Quota**: Remaining bytes that can be served from FilBeam's cache (fast, direct delivery) + * - **Cache Miss Egress Quota**: Remaining bytes that can be retrieved from storage providers (triggers caching) + * + * Both types of egress are billed based on volume. Query current pricing via + * {@link WarmStorageService.getServicePrice} or see https://docs.filbeam.com for rates. + * + * @param dataSetId - The unique identifier of the data set to query + * @returns A promise that resolves to the data set statistics with remaining quotas as BigInt values + * + * @throws {Error} Throws an error if: + * - The data set is not found (404) + * - The API returns an invalid response format + * - Network or other HTTP errors occur + * + * @example + * ```typescript + * try { + * const stats = await service.getDataSetStats('my-dataset-123') + * + * // Display remaining quotas + * console.log(`Remaining CDN Egress: ${stats.cdnEgressQuota} bytes`) + * console.log(`Remaining Cache Miss: ${stats.cacheMissEgressQuota} bytes`) + * } catch (error) { + * console.error('Failed to get stats:', error.message) + * } + * ``` + */ + async getDataSetStats(dataSetId: string | number): Promise { + const baseUrl = this._getStatsBaseUrl() + const url = `${baseUrl}/data-set/${dataSetId}` + + const response = await this._fetch(url, { + method: 'GET', + headers: { + 'Content-Type': 'application/json', + }, + }) + + if (response.status === 404) { + throw createError('FilBeamService', 'getDataSetStats', `Data set not found: ${dataSetId}`) + } + + if (response.status !== 200) { + const errorText = await response.text().catch(() => 'Unknown error') + throw createError( + 'FilBeamService', + 'getDataSetStats', + `HTTP ${response.status} ${response.statusText}: ${errorText}` + ) + } + + const data = await response.json() + const validated = this._validateStatsResponse(data) + + return { + cdnEgressQuota: BigInt(validated.cdnEgressQuota), + cacheMissEgressQuota: BigInt(validated.cacheMissEgressQuota), + } + } +} diff --git a/packages/synapse-sdk/src/index.ts b/packages/synapse-sdk/src/index.ts index ccf62dba0..2b4314693 100644 --- a/packages/synapse-sdk/src/index.ts +++ b/packages/synapse-sdk/src/index.ts @@ -1,26 +1,14 @@ /** - * Synapse SDK - Main entry point + * **Synapse SDK - Main entry point** + * + * @module Synapse * * @example - * ```ts - * import { Synapse } from '@filoz/synapse-sdk' + * ```ts twoslash + * import { Synapse, RPC_URLS } from '@filoz/synapse-sdk' * ``` - * - * @packageDocumentation - * @module Synapse - */ - -/** - * Synapse SDK main entry point */ -export * from './payments/index.ts' -export * from './pdp/index.ts' -export * from './session/index.ts' -export * from './storage/index.ts' -export * from './subgraph/index.ts' export { Synapse } from './synapse.ts' -export * from './telemetry/index.ts' export * from './types.ts' -export * from './utils/index.ts' -export * from './warm-storage/index.ts' +export * from './utils/constants.ts' diff --git a/packages/synapse-sdk/src/payments/index.ts b/packages/synapse-sdk/src/payments/index.ts index 4f6f9a57c..cbe4aba6e 100644 --- a/packages/synapse-sdk/src/payments/index.ts +++ b/packages/synapse-sdk/src/payments/index.ts @@ -1,9 +1,10 @@ /** - * Exports the PaymentsService and DepositOptions types + * Payments Service * * @module Payments + * * @example - * ```ts + * ```ts twoslash * import { PaymentsService } from '@filoz/synapse-sdk/payments' * ``` */ diff --git a/packages/synapse-sdk/src/payments/service.ts b/packages/synapse-sdk/src/payments/service.ts index 12f82545a..58ece0699 100644 --- a/packages/synapse-sdk/src/payments/service.ts +++ b/packages/synapse-sdk/src/payments/service.ts @@ -1,8 +1,3 @@ -/** - * PaymentsService - Consolidated interface for all Payments contract operations - * along with some additional token related utilities. - */ - import { ethers } from 'ethers' import type { RailInfo, SettlementResult, TokenAmount, TokenIdentifier } from '../types.ts' import { @@ -13,7 +8,6 @@ import { EIP2612_PERMIT_TYPES, getCurrentEpoch, getFilecoinNetworkType, - SETTLEMENT_FEE, TIMING_CONSTANTS, TOKENS, } from '../utils/index.ts' @@ -34,6 +28,9 @@ export interface DepositOptions { onDepositStarting?: () => void } +/** + * PaymentsService - Filecoin Pay client for managing deposits, approvals, and payment rails + */ export class PaymentsService { private readonly _provider: ethers.Provider private readonly _signer: ethers.Signer @@ -861,7 +858,7 @@ export class PaymentsService { /** * Settle a payment rail up to a specific epoch (sends a transaction) - * Note: This method automatically includes the required network fee (FIL) for burning + * * @param railId - The rail ID to settle * @param untilEpoch - The epoch to settle up to (must be <= current epoch; defaults to current). * Can be used for partial settlements to a past epoch. @@ -881,9 +878,7 @@ export class PaymentsService { const paymentsContract = this._getPaymentsContract() // Only set explicit nonce if NonceManager is disabled - const txOptions: any = { - value: SETTLEMENT_FEE, // Include the settlement fee (NETWORK_FEE in contract) as msg.value - } + const txOptions: any = {} if (this._disableNonceManager) { const currentNonce = await this._provider.getTransactionCount(signerAddress, 'pending') txOptions.nonce = currentNonce @@ -904,7 +899,7 @@ export class PaymentsService { /** * Get the expected settlement amounts for a rail (read-only simulation) - * Note: The actual settlement will require a network fee (FIL) to be sent with the transaction + * * @param railId - The rail ID to check * @param untilEpoch - The epoch to settle up to (must be <= current epoch; defaults to current). * Can be used to preview partial settlements to a past epoch. @@ -921,7 +916,6 @@ export class PaymentsService { try { // Use staticCall to simulate the transaction and get the return values - // Include the settlement fee (NETWORK_FEE in contract) in the simulation const result = await paymentsContract.settleRail.staticCall(railIdBigint, untilEpochBigint) return { @@ -1026,7 +1020,7 @@ export class PaymentsService { * Automatically settle a rail, detecting whether it's terminated or active * This method checks the rail status and calls the appropriate settlement method: * - For terminated rails: calls settleTerminatedRail() - * - For active rails: calls settle() with optional untilEpoch (requires settlement fee) + * - For active rails: calls settle() with optional untilEpoch * * @param railId - The rail ID to settle * @param untilEpoch - The epoch to settle up to (must be <= current epoch for active rails; ignored for terminated rails) diff --git a/packages/synapse-sdk/src/pdp/index.ts b/packages/synapse-sdk/src/pdp/index.ts index b1c82db7e..445c31c5f 100644 --- a/packages/synapse-sdk/src/pdp/index.ts +++ b/packages/synapse-sdk/src/pdp/index.ts @@ -1,7 +1,6 @@ /** - * Exports the PDP components + * PDP components * - * @packageDocumentation * @module PDP * @example * ```ts diff --git a/packages/synapse-sdk/src/pdp/verifier.ts b/packages/synapse-sdk/src/pdp/verifier.ts index e2ab60dd8..db98c968c 100644 --- a/packages/synapse-sdk/src/pdp/verifier.ts +++ b/packages/synapse-sdk/src/pdp/verifier.ts @@ -165,6 +165,16 @@ export class PDPVerifier { } } + /** + * Get pieces scheduled for removal from a data set + * @param dataSetId - The PDPVerifier data set ID + * @returns Array of piece IDs scheduled for removal + */ + async getScheduledRemovals(dataSetId: number): Promise { + const result = await this._contract.getScheduledRemovals(dataSetId) + return result.map((pieceId: bigint) => Number(pieceId)) + } + /** * Get the PDPVerifier contract address for the current network */ diff --git a/packages/synapse-sdk/src/session/index.ts b/packages/synapse-sdk/src/session/index.ts index 560e0e8c1..df8337c05 100644 --- a/packages/synapse-sdk/src/session/index.ts +++ b/packages/synapse-sdk/src/session/index.ts @@ -1 +1,11 @@ +/** + * Session components + * + * @module Session + * @example + * ```ts + * import { SessionKey } from '@filoz/synapse-sdk/session' + * ``` + */ + export * from './key.ts' diff --git a/packages/synapse-sdk/src/sp-registry/index.ts b/packages/synapse-sdk/src/sp-registry/index.ts index 9bef08df0..96c2a14dc 100644 --- a/packages/synapse-sdk/src/sp-registry/index.ts +++ b/packages/synapse-sdk/src/sp-registry/index.ts @@ -1,6 +1,11 @@ /** - * ServiceProviderRegistry module - * @module sp-registry + * Service Provider Registry components + * + * @module SPRegistry + * @example + * ```ts + * import { SPRegistryService } from '@filoz/synapse-sdk/sp-registry' + * ``` */ export { SPRegistryService } from './service.ts' diff --git a/packages/synapse-sdk/src/sp-registry/service.ts b/packages/synapse-sdk/src/sp-registry/service.ts index d1481e361..c29d416dc 100644 --- a/packages/synapse-sdk/src/sp-registry/service.ts +++ b/packages/synapse-sdk/src/sp-registry/service.ts @@ -37,14 +37,16 @@ import type { export class SPRegistryService { private readonly _provider: ethers.Provider + private readonly _chainId: number | bigint private readonly _registryAddress: string private _registryContract: ethers.Contract | null = null /** * Constructor for SPRegistryService */ - constructor(provider: ethers.Provider, registryAddress: string) { + constructor(provider: ethers.Provider, chainId: number | bigint, registryAddress: string) { this._provider = provider + this._chainId = chainId this._registryAddress = registryAddress } @@ -52,7 +54,8 @@ export class SPRegistryService { * Create a new SPRegistryService instance */ static async create(provider: ethers.Provider, registryAddress: string): Promise { - return new SPRegistryService(provider, registryAddress) + const network = await provider.getNetwork() + return new SPRegistryService(provider, network.chainId, registryAddress) } /** @@ -401,7 +404,7 @@ export class SPRegistryService { const capabilities = capabilitiesListToObject(result.product.capabilityKeys, result.productCapabilityValues) return { - offering: decodePDPCapabilities(capabilities), + offering: await decodePDPCapabilities(BigInt(providerId), this._chainId, capabilities), capabilities, isActive: result.product.isActive, } @@ -464,7 +467,7 @@ export class SPRegistryService { const results = await multicall.aggregate3.staticCall(calls) // Process results - return this._processMulticallResults(providerIds, results, iface) + return await this._processMulticallResults(providerIds, results, iface) } /** @@ -491,7 +494,11 @@ export class SPRegistryService { /** * Process Multicall3 results into ProviderInfo array */ - private _processMulticallResults(providerIds: number[], results: any[], iface: ethers.Interface): ProviderInfo[] { + private async _processMulticallResults( + providerIds: number[], + results: any[], + iface: ethers.Interface + ): Promise { const providers: ProviderInfo[] = [] for (let i = 0; i < providerIds.length; i++) { @@ -512,7 +519,7 @@ export class SPRegistryService { type: 'PDP', isActive: product.isActive, capabilities, - data: decodePDPCapabilities(capabilities), + data: await decodePDPCapabilities(BigInt(providerIds[i]), this._chainId, capabilities), }, ]) if (providerInfo.serviceProvider === ethers.ZeroAddress) { diff --git a/packages/synapse-sdk/src/storage/context.ts b/packages/synapse-sdk/src/storage/context.ts index 015f02943..67254f8e6 100644 --- a/packages/synapse-sdk/src/storage/context.ts +++ b/packages/synapse-sdk/src/storage/context.ts @@ -26,7 +26,7 @@ import { asPieceCID } from '@filoz/synapse-core/piece' import * as SP from '@filoz/synapse-core/sp' import { randIndex, randU256 } from '@filoz/synapse-core/utils' import type { ethers } from 'ethers' -import type { Hex } from 'viem' +import type { Address, Hex } from 'viem' import type { PaymentsService } from '../payments/index.ts' import { PDPAuthHelper, PDPServer } from '../pdp/index.ts' import { PDPVerifier } from '../pdp/verifier.ts' @@ -35,10 +35,11 @@ import type { ProviderInfo } from '../sp-registry/types.ts' import type { Synapse } from '../synapse.ts' import type { CreateContextsOptions, + DataSetInfo, DownloadOptions, - EnhancedDataSetInfo, MetadataEntry, PieceCID, + PieceRecord, PieceStatus, PreflightInfo, ProviderSelectionResult, @@ -60,6 +61,7 @@ import { combineMetadata, metadataMatches, objectToEntries, validatePieceMetadat import type { WarmStorageService } from '../warm-storage/index.ts' const NO_REMAINING_PROVIDERS_ERROR_MESSAGE = 'No approved service providers available' +const PRIME_ENDORSEMENTS: Address[] = ['0x2127C3a31F54B81B5E9AD1e29C36c420d3D6ecC5'] export class StorageContext { private readonly _synapse: Synapse @@ -71,6 +73,7 @@ export class StorageContext { private readonly _signer: ethers.Signer private readonly _uploadBatchSize: number private _dataSetId: number | undefined + private _clientDataSetId: bigint | undefined private readonly _dataSetMetadata: Record // AddPieces batching state @@ -113,6 +116,23 @@ export class StorageContext { return this._dataSetId } + /** + * Get the client data set nonce ("clientDataSetId"), either from cache or by fetching from the chain + * @returns The client data set nonce + * @throws Error if data set nonce is not set + */ + private async getClientDataSetId(): Promise { + if (this._clientDataSetId !== undefined) { + return this._clientDataSetId + } + if (this.dataSetId == null) { + throw createError('StorageContext', 'getClientDataSetId', 'Data set not found') + } + const dataSetInfo = await this._warmStorageService.getDataSet(this.dataSetId) + this._clientDataSetId = dataSetInfo.clientDataSetId + return this._clientDataSetId + } + /** * Validate data size against minimum and maximum limits * @param sizeBytes - Size of data in bytes @@ -191,7 +211,7 @@ export class StorageContext { const resolutions: ProviderSelectionResult[] = [] const clientAddress = await synapse.getClient().getAddress() const registryAddress = warmStorageService.getServiceProviderRegistryAddress() - const spRegistry = new SPRegistryService(synapse.getProvider(), registryAddress) + const spRegistry = new SPRegistryService(synapse.getProvider(), synapse.getChainId(), registryAddress) if (options.dataSetIds) { const selections = [] for (const dataSetId of new Set(options.dataSetIds)) { @@ -245,6 +265,7 @@ export class StorageContext { warmStorageService, spRegistry, excludeProviderIds, + resolutions.length === 0 ? PRIME_ENDORSEMENTS : [], options.forceCreateDataSets ?? false, options.withIpni ?? false, options.dev ?? false @@ -278,7 +299,7 @@ export class StorageContext { ): Promise { // Create SPRegistryService const registryAddress = warmStorageService.getServiceProviderRegistryAddress() - const spRegistry = new SPRegistryService(synapse.getProvider(), registryAddress) + const spRegistry = new SPRegistryService(synapse.getProvider(), synapse.getChainId(), registryAddress) // Resolve provider and data set based on options const resolution = await StorageContext.resolveProviderAndDataSet(synapse, warmStorageService, spRegistry, options) @@ -375,6 +396,7 @@ export class StorageContext { warmStorageService, spRegistry, options.excludeProviderIds ?? [], + PRIME_ENDORSEMENTS, options.forceCreateDataSet ?? false, options.withIpni ?? false, options.dev ?? false @@ -388,50 +410,45 @@ export class StorageContext { dataSetId: number, warmStorageService: WarmStorageService, spRegistry: SPRegistryService, - signerAddress: string, + clientAddress: string, options: StorageServiceOptions ): Promise { - // Fetch data sets to find the specific one - const dataSets = await warmStorageService.getClientDataSetsWithDetails(signerAddress) - const dataSet = dataSets.find((ds) => ds.pdpVerifierDataSetId === dataSetId) + const [dataSetInfo, dataSetMetadata] = await Promise.all([ + warmStorageService.getDataSet(dataSetId).then(async (dataSetInfo) => { + await StorageContext.validateDataSetConsistency(dataSetInfo, options, spRegistry) + return dataSetInfo + }), + warmStorageService.getDataSetMetadata(dataSetId), + warmStorageService.validateDataSet(dataSetId), + ]) - if (dataSet == null || !dataSet.isLive || !dataSet.isManaged) { + if (dataSetInfo.payer.toLowerCase() !== clientAddress.toLowerCase()) { throw createError( 'StorageContext', 'resolveByDataSetId', - `Data set ${dataSetId} not found, not owned by ${signerAddress}, ` + - 'or not managed by the current WarmStorage contract' + `Data set ${dataSetId} is not owned by ${clientAddress} (owned by ${dataSetInfo.payer})` ) } - // Validate consistency with other parameters if provided - if (options.providerId != null || options.providerAddress != null) { - await StorageContext.validateDataSetConsistency(dataSet, options, spRegistry) - } - - // Look up provider by ID from the data set - const provider = await spRegistry.getProvider(dataSet.providerId) + const provider = await spRegistry.getProvider(dataSetInfo.providerId) if (provider == null) { throw createError( 'StorageContext', 'resolveByDataSetId', - `Provider ID ${dataSet.providerId} for data set ${dataSetId} not found in registry` + `Provider ID ${dataSetInfo.providerId} for data set ${dataSetId} not found in registry` ) } - // Validate CDN settings match if specified - if (options.withCDN != null && dataSet.withCDN !== options.withCDN) { + const withCDN = dataSetInfo.cdnRailId > 0 && METADATA_KEYS.WITH_CDN in dataSetMetadata + if (options.withCDN != null && withCDN !== options.withCDN) { throw createError( 'StorageContext', 'resolveByDataSetId', - `Data set ${dataSetId} has CDN ${dataSet.withCDN ? 'enabled' : 'disabled'}, ` + + `Data set ${dataSetId} has CDN ${withCDN ? 'enabled' : 'disabled'}, ` + `but requested ${options.withCDN ? 'enabled' : 'disabled'}` ) } - // Backfill data set metadata from chain - const dataSetMetadata = await warmStorageService.getDataSetMetadata(dataSetId) - return { provider, dataSetId, @@ -444,7 +461,7 @@ export class StorageContext { * Validate data set consistency with provided options */ private static async validateDataSetConsistency( - dataSet: EnhancedDataSetInfo, + dataSet: DataSetInfo, options: StorageServiceOptions, spRegistry: SPRegistryService ): Promise { @@ -454,8 +471,7 @@ export class StorageContext { throw createError( 'StorageContext', 'validateDataSetConsistency', - `Data set ${dataSet.pdpVerifierDataSetId} belongs to provider ID ${dataSet.providerId}, ` + - `but provider ID ${options.providerId} was requested` + `Data set belongs to provider ID ${dataSet.providerId}, but provider ID ${options.providerId} was requested` ) } } @@ -471,8 +487,7 @@ export class StorageContext { throw createError( 'StorageContext', 'validateDataSetConsistency', - `Data set ${dataSet.pdpVerifierDataSetId} belongs to provider ${actualProvider?.serviceProvider ?? 'unknown'}, ` + - `but provider ${options.providerAddress} was requested` + `Data set belongs to provider ${actualProvider?.serviceProvider ?? 'unknown'}, but provider ${options.providerAddress} was requested` ) } } @@ -482,7 +497,7 @@ export class StorageContext { * Resolve using a specific provider ID */ private static async resolveByProviderId( - signerAddress: string, + clientAddress: string, providerId: number, requestedMetadata: Record, warmStorageService: WarmStorageService, @@ -492,7 +507,7 @@ export class StorageContext { // Fetch provider (always) and dataSets (only if not forcing) in parallel const [provider, dataSets] = await Promise.all([ spRegistry.getProvider(providerId), - forceCreateDataSet ? Promise.resolve(null) : warmStorageService.getClientDataSetsWithDetails(signerAddress), + forceCreateDataSet ? Promise.resolve(null) : warmStorageService.getClientDataSetsWithDetails(clientAddress), ]) if (provider == null) { @@ -557,7 +572,7 @@ export class StorageContext { providerAddress: string, warmStorageService: WarmStorageService, spRegistry: SPRegistryService, - signerAddress: string, + clientAddress: string, requestedMetadata: Record, forceCreateDataSet?: boolean ): Promise { @@ -573,7 +588,7 @@ export class StorageContext { // Use the providerId resolution logic return await StorageContext.resolveByProviderId( - signerAddress, + clientAddress, provider.id, requestedMetadata, warmStorageService, @@ -587,11 +602,12 @@ export class StorageContext { * Prioritizes existing data sets and provider health */ private static async smartSelectProvider( - signerAddress: string, + clientAddress: string, requestedMetadata: Record, warmStorageService: WarmStorageService, spRegistry: SPRegistryService, excludeProviderIds: number[], + preferEndorsements: Address[], forceCreateDataSet: boolean, withIpni: boolean, dev: boolean @@ -601,7 +617,7 @@ export class StorageContext { // 2. If no existing data sets, find a healthy provider // Get client's data sets - const dataSets = await warmStorageService.getClientDataSetsWithDetails(signerAddress) + const dataSets = await warmStorageService.getClientDataSetsWithDetails(clientAddress) const skipProviderIds = new Set(excludeProviderIds) // Filter for managed data sets with matching metadata @@ -653,9 +669,9 @@ export class StorageContext { } } - try { - const selectedProvider = await StorageContext.selectProviderWithPing(generateProviders()) + const selectedProvider = await StorageContext.selectProviderWithPing(generateProviders()) + if (selectedProvider != null) { // Find the first matching data set ID for this provider // Match by provider ID (stable identifier in the registry) const matchingDataSet = sorted.find((ps) => ps.providerId === selectedProvider.id) @@ -677,9 +693,6 @@ export class StorageContext { dataSetMetadata, } } - } catch (_error) { - console.warn('All providers from existing data sets failed health check. Falling back to all providers.') - // Fall through to select from all approved providers below } } @@ -698,8 +711,37 @@ export class StorageContext { throw createError('StorageContext', 'smartSelectProvider', NO_REMAINING_PROVIDERS_ERROR_MESSAGE) } - // Random selection from all providers - const provider = await StorageContext.selectRandomProvider(allProviders) + let provider: ProviderInfo | null + if (preferEndorsements.length > 0) { + // Split providers according to whether they have all of the endorsements + const [otherProviders, endorsedProviders] = allProviders.reduce<[ProviderInfo[], ProviderInfo[]]>( + (results: [ProviderInfo[], ProviderInfo[]], provider: ProviderInfo) => { + results[ + preferEndorsements.some( + (endorsement: Address) => endorsement in (provider.products.PDP?.data.endorsements ?? {}) + ) + ? 1 + : 0 + ].push(provider) + return results + }, + [[], []] + ) + provider = + (await StorageContext.selectRandomProvider(endorsedProviders)) || + (await StorageContext.selectRandomProvider(otherProviders)) + } else { + // Random selection from all providers + provider = await StorageContext.selectRandomProvider(allProviders) + } + + if (provider == null) { + throw createError( + 'StorageContext', + 'selectProviderWithPing', + `All ${allProviders.length} providers failed health check. Storage may be temporarily unavailable.` + ) + } return { provider, @@ -716,9 +758,9 @@ export class StorageContext { * @param dev - Include dev providers * @returns Selected provider */ - private static async selectRandomProvider(providers: ProviderInfo[]): Promise { + private static async selectRandomProvider(providers: ProviderInfo[]): Promise { if (providers.length === 0) { - throw createError('StorageContext', 'selectRandomProvider', 'No providers available') + return null } // Create async generator that yields providers in random order @@ -742,12 +784,9 @@ export class StorageContext { * @returns The first provider that responds * @throws If all providers fail */ - private static async selectProviderWithPing(providers: AsyncIterable): Promise { - let providerCount = 0 - + private static async selectProviderWithPing(providers: AsyncIterable): Promise { // Try providers in order until we find one that responds to ping for await (const provider of providers) { - providerCount++ try { // Create a temporary PDPServer for this specific provider's endpoint if (!provider.products.PDP?.data.serviceURL) { @@ -766,16 +805,7 @@ export class StorageContext { } } - // All providers failed ping test - if (providerCount === 0) { - throw createError('StorageContext', 'selectProviderWithPing', 'No providers available to select from') - } - - throw createError( - 'StorageContext', - 'selectProviderWithPing', - `All ${providerCount} providers failed health check. Storage may be temporarily unavailable.` - ) + return null } /** @@ -981,22 +1011,24 @@ export class StorageContext { const pieceCids: PieceCID[] = batch.map((item) => item.pieceCid) const metadataArray: MetadataEntry[][] = batch.map((item) => item.metadata ?? []) const confirmedPieceIds: number[] = [] + const addedPieceRecords = pieceCids.map((pieceCid) => ({ pieceCid })) if (this.dataSetId) { - const [, dataSetInfo] = await Promise.all([ + const [, clientDataSetId] = await Promise.all([ this._warmStorageService.validateDataSet(this.dataSetId), - this._warmStorageService.getDataSet(this.dataSetId), + this.getClientDataSetId(), ]) // Add pieces to the data set const addPiecesResult = await this._pdpServer.addPieces( this.dataSetId, // PDPVerifier data set ID - dataSetInfo.clientDataSetId, // Client's dataset ID + clientDataSetId, // Client's dataset nonce pieceCids, metadataArray ) // Notify callbacks with transaction batch.forEach((item) => { + item.callbacks?.onPiecesAdded?.(addPiecesResult.txHash as Hex, addedPieceRecords) item.callbacks?.onPieceAdded?.(addPiecesResult.txHash as Hex) }) const addPiecesResponse = await SP.pollForAddPiecesStatus(addPiecesResult) @@ -1004,7 +1036,12 @@ export class StorageContext { // Handle transaction tracking if available confirmedPieceIds.push(...(addPiecesResponse.confirmedPieceIds ?? [])) + const confirmedPieceRecords: PieceRecord[] = confirmedPieceIds.map((pieceId, index) => ({ + pieceId, + pieceCid: pieceCids[index], + })) batch.forEach((item) => { + item.callbacks?.onPiecesConfirmed?.(this.dataSetId as number, confirmedPieceRecords) item.callbacks?.onPieceConfirmed?.(confirmedPieceIds) }) } else { @@ -1031,6 +1068,7 @@ export class StorageContext { } ) batch.forEach((item) => { + item.callbacks?.onPiecesAdded?.(createAndAddPiecesResult.txHash as Hex, addedPieceRecords) item.callbacks?.onPieceAdded?.(createAndAddPiecesResult.txHash as Hex) }) const confirmedDataset = await SP.pollForDataSetCreationStatus(createAndAddPiecesResult) @@ -1045,7 +1083,12 @@ export class StorageContext { confirmedPieceIds.push(...(confirmedPieces.confirmedPieceIds ?? [])) + const confirmedPieceRecords: PieceRecord[] = confirmedPieceIds.map((pieceId, index) => ({ + pieceId, + pieceCid: pieceCids[index], + })) batch.forEach((item) => { + item.callbacks?.onPiecesConfirmed?.(this.dataSetId as number, confirmedPieceRecords) item.callbacks?.onPieceConfirmed?.(confirmedPieceIds) }) } @@ -1124,6 +1167,25 @@ export class StorageContext { return pieces } + /** + * Get pieces scheduled for removal from this data set + * @returns Array of piece IDs scheduled for removal + */ + async getScheduledRemovals(): Promise { + if (this._dataSetId == null) { + return [] + } + + const pdpVerifierAddress = this._warmStorageService.getPDPVerifierAddress() + const pdpVerifier = new PDPVerifier(this._synapse.getProvider(), pdpVerifierAddress) + + try { + return await pdpVerifier.getScheduledRemovals(this._dataSetId) + } catch (error) { + throw createError('StorageContext', 'getScheduledRemovals', 'Failed to get scheduled removals', error) + } + } + /** * Get all active pieces for this data set as an async generator. * This provides lazy evaluation and better memory efficiency for large data sets. @@ -1132,10 +1194,7 @@ export class StorageContext { * @param options.signal - Optional AbortSignal to cancel the operation * @yields Object with pieceCid and pieceId - the piece ID is needed for certain operations like deletion */ - async *getPieces(options?: { - batchSize?: number - signal?: AbortSignal - }): AsyncGenerator<{ pieceCid: PieceCID; pieceId: number }> { + async *getPieces(options?: { batchSize?: number; signal?: AbortSignal }): AsyncGenerator { if (this._dataSetId == null) { return } @@ -1197,9 +1256,9 @@ export class StorageContext { throw createError('StorageContext', 'deletePiece', 'Data set not found') } const pieceId = typeof piece === 'number' ? piece : await this._getPieceIdByCID(piece) - const dataSetInfo = await this._warmStorageService.getDataSet(this.dataSetId) + const clientDataSetId = await this.getClientDataSetId() - return this._pdpServer.deletePiece(this.dataSetId, dataSetInfo.clientDataSetId, pieceId) + return this._pdpServer.deletePiece(this.dataSetId, clientDataSetId, pieceId) } /** diff --git a/packages/synapse-sdk/src/storage/index.ts b/packages/synapse-sdk/src/storage/index.ts index f00b68926..25876e83e 100644 --- a/packages/synapse-sdk/src/storage/index.ts +++ b/packages/synapse-sdk/src/storage/index.ts @@ -1,14 +1,12 @@ /** - * Exports the Storage components + * Storage components * - * @packageDocumentation * @module Storage * @example * ```ts - * import { StorageContext, StorageManager, StorageService } from '@filoz/synapse-sdk/storage' + * import { StorageContext, StorageManager } from '@filoz/synapse-sdk/storage' * ``` */ export { StorageContext } from './context.ts' export { StorageManager } from './manager.ts' -export { StorageService } from './service.ts' diff --git a/packages/synapse-sdk/src/storage/manager.ts b/packages/synapse-sdk/src/storage/manager.ts index d905c680c..b1fe24235 100644 --- a/packages/synapse-sdk/src/storage/manager.ts +++ b/packages/synapse-sdk/src/storage/manager.ts @@ -68,10 +68,8 @@ type CombinedCallbacks = StorageContextCallbacks & UploadCallbacks * 1. With explicit context: `{ context, callbacks?, metadata? }` - routes to context.upload() * 2. Auto-create context: `{ providerId?, dataSetId?, withCDN?, callbacks?, metadata? }` - creates/reuses context * 3. Use default context: `{ callbacks?, metadata? }` - uses cached default context - * - * @internal This type is intentionally not exported as it's specific to StorageManager */ -interface StorageManagerUploadOptions extends StorageServiceOptions { +export interface StorageManagerUploadOptions extends StorageServiceOptions { // Multiple storage providers: if provided, all other context options are invalid contexts?: StorageContext[] @@ -516,7 +514,7 @@ export class StorageManager { // Create SPRegistryService to get providers const registryAddress = this._warmStorageService.getServiceProviderRegistryAddress() - const spRegistry = new SPRegistryService(this._synapse.getProvider(), registryAddress) + const spRegistry = new SPRegistryService(this._synapse.getProvider(), this._synapse.getChainId(), registryAddress) // Fetch all data in parallel for performance const [pricingData, approvedIds, allowances] = await Promise.all([ diff --git a/packages/synapse-sdk/src/storage/service.ts b/packages/synapse-sdk/src/storage/service.ts deleted file mode 100644 index ed4d2b2dc..000000000 --- a/packages/synapse-sdk/src/storage/service.ts +++ /dev/null @@ -1,7 +0,0 @@ -/** - * Compatibility wrapper for backwards compatibility - * @deprecated StorageService has been renamed to StorageContext. - * Import StorageContext from './context.ts' instead. - */ - -export { StorageContext as StorageService } from './context.ts' diff --git a/packages/synapse-sdk/src/subgraph/index.ts b/packages/synapse-sdk/src/subgraph/index.ts index f078d26c0..285effa93 100644 --- a/packages/synapse-sdk/src/subgraph/index.ts +++ b/packages/synapse-sdk/src/subgraph/index.ts @@ -1,7 +1,6 @@ /** - * Exports the Subgraph components + * Subgraph components * - * @packageDocumentation * @module Subgraph * @example * ```ts diff --git a/packages/synapse-sdk/src/synapse.ts b/packages/synapse-sdk/src/synapse.ts index cbf6099c9..dbdd21d87 100644 --- a/packages/synapse-sdk/src/synapse.ts +++ b/packages/synapse-sdk/src/synapse.ts @@ -1,13 +1,10 @@ -/** - * Main Synapse class for interacting with Filecoin storage and other on-chain services - */ - import { ethers } from 'ethers' +import { FilBeamService } from './filbeam/index.ts' import { PaymentsService } from './payments/index.ts' import { ChainRetriever, FilBeamRetriever, SubgraphRetriever } from './retriever/index.ts' import { SessionKey } from './session/key.ts' import { SPRegistryService } from './sp-registry/index.ts' -import type { StorageService } from './storage/index.ts' +import type { StorageContext } from './storage/index.ts' import { StorageManager } from './storage/manager.ts' import { SubgraphService } from './subgraph/service.ts' import type { TelemetryService } from './telemetry/service.ts' @@ -25,6 +22,9 @@ import type { import { CHAIN_IDS, CONTRACT_ADDRESSES, getFilecoinNetworkType } from './utils/index.ts' import { WarmStorageService } from './warm-storage/index.ts' +/** + * Class for interacting with Filecoin storage and other on-chain services + */ export class Synapse { private readonly _signer: ethers.Signer private readonly _network: FilecoinNetworkType @@ -35,6 +35,7 @@ export class Synapse { private readonly _warmStorageService: WarmStorageService private readonly _pieceRetriever: PieceRetriever private readonly _storageManager: StorageManager + private readonly _filbeamService: FilBeamService private _session: SessionKey | null = null /** @@ -142,7 +143,11 @@ export class Synapse { // Create SPRegistryService for use in retrievers const registryAddress = warmStorageService.getServiceProviderRegistryAddress() - const spRegistry = new SPRegistryService(provider, registryAddress) + const spRegistry = new SPRegistryService( + provider, + network === 'mainnet' ? CHAIN_IDS.mainnet : CHAIN_IDS.calibration, + registryAddress + ) // Initialize piece retriever (use provided or create default) let pieceRetriever: PieceRetriever @@ -166,6 +171,9 @@ export class Synapse { pieceRetriever = new FilBeamRetriever(baseRetriever, network) } + // Create FilBeamService + const filbeamService = new FilBeamService(network) + // Create and initialize the global TelemetryService. // If telemetry is disabled, this will do nothing. await initGlobalTelemetry(options.telemetry || {}, { filecoinNetwork: network }) @@ -179,6 +187,7 @@ export class Synapse { warmStorageAddress, warmStorageService, pieceRetriever, + filbeamService, options.dev === false, options.withIpni ) @@ -194,6 +203,7 @@ export class Synapse { warmStorageAddress: string, warmStorageService: WarmStorageService, pieceRetriever: PieceRetriever, + filbeamService: FilBeamService, dev: boolean, withIpni?: boolean ) { @@ -205,6 +215,7 @@ export class Synapse { this._warmStorageService = warmStorageService this._pieceRetriever = pieceRetriever this._warmStorageAddress = warmStorageAddress + this._filbeamService = filbeamService this._session = null // Initialize StorageManager @@ -358,6 +369,15 @@ export class Synapse { return this._storageManager } + /** + * Gets the FilBeam service instance + * + * @returns The FilBeam service for interacting with FilBeam infrastructure + */ + get filbeam(): FilBeamService { + return this._filbeamService + } + /** * Create a storage service instance. * @@ -384,7 +404,7 @@ export class Synapse { * }) * ``` */ - async createStorage(options: StorageServiceOptions = {}): Promise { + async createStorage(options: StorageServiceOptions = {}): Promise { // Use StorageManager to create context return await this._storageManager.createContext(options) } @@ -436,7 +456,7 @@ export class Synapse { // Create SPRegistryService const registryAddress = this._warmStorageService.getServiceProviderRegistryAddress() - const spRegistry = new SPRegistryService(this._provider, registryAddress) + const spRegistry = new SPRegistryService(this._provider, this.getChainId(), registryAddress) let providerInfo: ProviderInfo | null if (typeof providerAddress === 'string') { diff --git a/packages/synapse-sdk/src/telemetry/index.ts b/packages/synapse-sdk/src/telemetry/index.ts index d83c6be17..c9864857c 100644 --- a/packages/synapse-sdk/src/telemetry/index.ts +++ b/packages/synapse-sdk/src/telemetry/index.ts @@ -1,8 +1,14 @@ /** - * Telemetry module exports + * Telemetry components * * Provides types for configuring telemetry and working with debug dumps. * The TelemetryService is accessed via synapse.telemetry getter. + * + * @module Telemetry + * @example + * ```ts + * import { getGlobalTelemetry, initGlobalTelemetry } from '@filoz/synapse-sdk/telemetry' + * ``` */ export { type DebugDump, type TelemetryConfig, TelemetryService } from './service.ts' diff --git a/packages/synapse-sdk/src/test/filbeam-service.test.ts b/packages/synapse-sdk/src/test/filbeam-service.test.ts new file mode 100644 index 000000000..c1dbd4af3 --- /dev/null +++ b/packages/synapse-sdk/src/test/filbeam-service.test.ts @@ -0,0 +1,187 @@ +import { expect } from 'chai' +import { FilBeamService } from '../filbeam/service.ts' +import type { FilecoinNetworkType } from '../types.ts' + +describe('FilBeamService', () => { + describe('network type validation', () => { + it('should throw error if network type not mainnet or calibration', () => { + try { + // @ts-expect-error + new FilBeamService('base-sepolia') + } catch (error: any) { + expect(error.message).to.include('Unsupported network type') + } + }) + }) + + describe('URL construction', () => { + it('should use mainnet URL for mainnet network', () => { + const mockFetch = async (): Promise => { + return {} as Response + } + const service = new FilBeamService('mainnet' as FilecoinNetworkType, mockFetch) + + const baseUrl = (service as any)._getStatsBaseUrl() + expect(baseUrl).to.equal('https://stats.filbeam.io') + }) + + it('should use calibration URL for calibration network', () => { + const mockFetch = async (): Promise => { + return {} as Response + } + const service = new FilBeamService('calibration' as FilecoinNetworkType, mockFetch) + + const baseUrl = (service as any)._getStatsBaseUrl() + expect(baseUrl).to.equal('https://calibration.stats.filbeam.io') + }) + }) + + describe('getDataSetStats', () => { + it('should successfully fetch and parse remaining stats for mainnet', async () => { + const mockResponse = { + cdnEgressQuota: '217902493044', + cacheMissEgressQuota: '94243853808', + } + + const mockFetch = async (input: string | URL | Request): Promise => { + expect(input).to.equal('https://stats.filbeam.io/data-set/test-dataset-id') + return { + status: 200, + statusText: 'OK', + json: async () => mockResponse, + } as Response + } + + const service = new FilBeamService('mainnet' as FilecoinNetworkType, mockFetch) + const result = await service.getDataSetStats('test-dataset-id') + + expect(result).to.deep.equal({ + cdnEgressQuota: BigInt('217902493044'), + cacheMissEgressQuota: BigInt('94243853808'), + }) + }) + + it('should successfully fetch and parse remaining stats for calibration', async () => { + const mockResponse = { + cdnEgressQuota: '100000000000', + cacheMissEgressQuota: '50000000000', + } + + const mockFetch = async (input: string | URL | Request): Promise => { + expect(input).to.equal('https://calibration.stats.filbeam.io/data-set/123') + return { + status: 200, + statusText: 'OK', + json: async () => mockResponse, + } as Response + } + + const service = new FilBeamService('calibration' as FilecoinNetworkType, mockFetch) + const result = await service.getDataSetStats(123) + + expect(result).to.deep.equal({ + cdnEgressQuota: BigInt('100000000000'), + cacheMissEgressQuota: BigInt('50000000000'), + }) + }) + + it('should handle 404 errors gracefully', async () => { + const mockFetch = async (): Promise => { + return { + status: 404, + statusText: 'Not Found', + text: async () => 'Data set not found', + } as Response + } + + const service = new FilBeamService('mainnet' as FilecoinNetworkType, mockFetch) + + try { + await service.getDataSetStats('non-existent') + expect.fail('Should have thrown an error') + } catch (error: any) { + expect(error.message).to.include('Data set not found: non-existent') + } + }) + + it('should handle other HTTP errors', async () => { + const mockFetch = async (): Promise => { + return { + status: 500, + statusText: 'Internal Server Error', + text: async () => 'Server error occurred', + } as Response + } + + const service = new FilBeamService('mainnet' as FilecoinNetworkType, mockFetch) + + try { + await service.getDataSetStats('test-dataset') + expect.fail('Should have thrown an error') + } catch (error: any) { + expect(error.message).to.include('HTTP 500 Internal Server Error') + } + }) + + it('should validate response is an object', async () => { + const mockFetch = async (): Promise => { + return { + status: 200, + statusText: 'OK', + json: async () => null, + } as Response + } + + const service = new FilBeamService('mainnet' as FilecoinNetworkType, mockFetch) + + try { + await service.getDataSetStats('test-dataset') + expect.fail('Should have thrown an error') + } catch (error: any) { + expect(error.message).to.include('Response is not an object') + } + }) + + it('should validate cdnEgressQuota is present', async () => { + const mockFetch = async (): Promise => { + return { + status: 200, + statusText: 'OK', + json: async () => ({ + cacheMissEgressQuota: '12345', + }), + } as Response + } + + const service = new FilBeamService('mainnet' as FilecoinNetworkType, mockFetch) + + try { + await service.getDataSetStats('test-dataset') + expect.fail('Should have thrown an error') + } catch (error: any) { + expect(error.message).to.include('cdnEgressQuota must be a string') + } + }) + + it('should validate cacheMissEgressQuota is present', async () => { + const mockFetch = async (): Promise => { + return { + status: 200, + statusText: 'OK', + json: async () => ({ + cdnEgressQuota: '12345', + }), + } as Response + } + + const service = new FilBeamService('mainnet' as FilecoinNetworkType, mockFetch) + + try { + await service.getDataSetStats('test-dataset') + expect.fail('Should have thrown an error') + } catch (error: any) { + expect(error.message).to.include('cacheMissEgressQuota must be a string') + } + }) + }) +}) diff --git a/packages/synapse-sdk/src/test/metadata-selection.test.ts b/packages/synapse-sdk/src/test/metadata-selection.test.ts index e1ebdc4c4..2ba5a5ff3 100644 --- a/packages/synapse-sdk/src/test/metadata-selection.test.ts +++ b/packages/synapse-sdk/src/test/metadata-selection.test.ts @@ -1,11 +1,12 @@ /* globals describe it before after */ + +import * as Mocks from '@filoz/synapse-core/mocks' import { assert } from 'chai' import { ethers } from 'ethers' import { setup } from 'iso-web/msw' import { METADATA_KEYS } from '../utils/constants.ts' import { metadataMatches, withCDNToMetadata } from '../utils/metadata.ts' import { WarmStorageService } from '../warm-storage/index.ts' -import { ADDRESSES, JSONRPC, presets } from './mocks/jsonrpc/index.ts' describe('Metadata-based Data Set Selection', () => { describe('Metadata Utilities', () => { @@ -114,8 +115,8 @@ describe('Metadata-based Data Set Selection', () => { let warmStorageService: WarmStorageService before(async () => { - server = setup([]) - await server.start({ quiet: true }) + server = setup() + await server.start() }) after(() => { @@ -127,9 +128,9 @@ describe('Metadata-based Data Set Selection', () => { // Create custom preset that returns different metadata for different data sets const customPreset: any = { - ...presets.basic, + ...Mocks.presets.basic, warmStorageView: { - ...presets.basic.warmStorageView, + ...Mocks.presets.basic.warmStorageView, clientDataSets: () => [[1n, 2n, 3n]], // Provide base dataset info per dataset id getDataSet: (args: any) => { @@ -140,9 +141,9 @@ describe('Metadata-based Data Set Selection', () => { pdpRailId: 1n, cacheMissRailId: 0n, cdnRailId: 0n, - payer: ADDRESSES.client1, - payee: ADDRESSES.serviceProvider1, - serviceProvider: ADDRESSES.serviceProvider1, + payer: Mocks.ADDRESSES.client1, + payee: Mocks.ADDRESSES.serviceProvider1, + serviceProvider: Mocks.ADDRESSES.serviceProvider1, commissionBps: 100n, clientDataSetId: 0n, pdpEndEpoch: 0n, @@ -158,9 +159,9 @@ describe('Metadata-based Data Set Selection', () => { pdpRailId: 2n, cacheMissRailId: 0n, cdnRailId: 100n, - payer: ADDRESSES.client1, - payee: ADDRESSES.serviceProvider1, - serviceProvider: ADDRESSES.serviceProvider1, + payer: Mocks.ADDRESSES.client1, + payee: Mocks.ADDRESSES.serviceProvider1, + serviceProvider: Mocks.ADDRESSES.serviceProvider1, commissionBps: 100n, clientDataSetId: 1n, pdpEndEpoch: 0n, @@ -176,9 +177,9 @@ describe('Metadata-based Data Set Selection', () => { pdpRailId: 3n, cacheMissRailId: 0n, cdnRailId: 0n, - payer: ADDRESSES.client1, - payee: ADDRESSES.serviceProvider2, - serviceProvider: ADDRESSES.serviceProvider2, + payer: Mocks.ADDRESSES.client1, + payee: Mocks.ADDRESSES.serviceProvider2, + serviceProvider: Mocks.ADDRESSES.serviceProvider2, commissionBps: 100n, clientDataSetId: 2n, pdpEndEpoch: 0n, @@ -224,7 +225,7 @@ describe('Metadata-based Data Set Selection', () => { }, }, pdpVerifier: { - ...presets.basic.pdpVerifier, + ...Mocks.presets.basic.pdpVerifier, getNextPieceId: (args: any) => { const [dataSetId] = args if (dataSetId === 1n) return [5n] as const // Has pieces @@ -235,14 +236,14 @@ describe('Metadata-based Data Set Selection', () => { }, } - server.use(JSONRPC(customPreset)) + server.use(Mocks.JSONRPC(customPreset)) const provider = new ethers.JsonRpcProvider('https://api.calibration.node.glif.io/rpc/v1') - warmStorageService = await WarmStorageService.create(provider, ADDRESSES.calibration.warmStorage) + warmStorageService = await WarmStorageService.create(provider, Mocks.ADDRESSES.calibration.warmStorage) }) it('should fetch metadata for each data set', async () => { - const dataSets = await warmStorageService.getClientDataSetsWithDetails(ADDRESSES.client1) + const dataSets = await warmStorageService.getClientDataSetsWithDetails(Mocks.ADDRESSES.client1) assert.equal(dataSets.length, 3) @@ -263,7 +264,7 @@ describe('Metadata-based Data Set Selection', () => { }) it('should prefer data sets with matching metadata', async () => { - const dataSets = await warmStorageService.getClientDataSetsWithDetails(ADDRESSES.client1) + const dataSets = await warmStorageService.getClientDataSetsWithDetails(Mocks.ADDRESSES.client1) // Filter for data sets with withIPFSIndexing const withIndexing = dataSets.filter((ds) => diff --git a/packages/synapse-sdk/src/test/metadata.test.ts b/packages/synapse-sdk/src/test/metadata.test.ts index 7b29583c2..12de389c7 100644 --- a/packages/synapse-sdk/src/test/metadata.test.ts +++ b/packages/synapse-sdk/src/test/metadata.test.ts @@ -1,5 +1,6 @@ /* globals describe it before after beforeEach */ +import * as Mocks from '@filoz/synapse-core/mocks' import { asPieceCID } from '@filoz/synapse-core/piece' import { assert } from 'chai' import { ethers } from 'ethers' @@ -8,15 +9,9 @@ import { PDPAuthHelper } from '../pdp/auth.ts' import { PDPServer } from '../pdp/server.ts' import type { MetadataEntry } from '../types.ts' import { METADATA_KEYS } from '../utils/constants.ts' -import { - addPiecesWithMetadataCapture, - createDataSetWithMetadataCapture, - type MetadataCapture, - type PieceMetadataCapture, -} from './mocks/pdp/handlers.ts' // Mock server for testing -const server = setup([]) +const server = setup() describe('Metadata Support', () => { const TEST_PRIVATE_KEY = '0x0101010101010101010101010101010101010101010101010101010101010101' @@ -28,7 +23,7 @@ describe('Metadata Support', () => { let pdpServer: PDPServer before(async () => { - await server.start({ quiet: true }) + await server.start() }) after(() => { @@ -52,10 +47,10 @@ describe('Metadata Support', () => { ] const mockTxHash = '0x1234567890abcdef1234567890abcdef1234567890abcdef1234567890abcdef' - let capturedMetadata: MetadataCapture | null = null + let capturedMetadata: Mocks.pdp.MetadataCapture | null = null server.use( - createDataSetWithMetadataCapture( + Mocks.pdp.createDataSetWithMetadataCapture( mockTxHash, (metadata) => { capturedMetadata = metadata @@ -90,10 +85,10 @@ describe('Metadata Support', () => { const dataSetId = 123 const mockTxHash = '0x1234567890abcdef' - let capturedPieceMetadata: PieceMetadataCapture | null = null + let capturedPieceMetadata: Mocks.pdp.PieceMetadataCapture | null = null server.use( - addPiecesWithMetadataCapture( + Mocks.pdp.addPiecesWithMetadataCapture( dataSetId, mockTxHash, (metadata) => { @@ -138,10 +133,10 @@ describe('Metadata Support', () => { describe('Backward Compatibility', () => { it('should convert withCDN boolean to metadata', async () => { const mockTxHash = '0xabcdef1234567890' - let capturedMetadata: MetadataCapture | null = null + let capturedMetadata: Mocks.pdp.MetadataCapture | null = null server.use( - createDataSetWithMetadataCapture( + Mocks.pdp.createDataSetWithMetadataCapture( mockTxHash, (metadata) => { capturedMetadata = metadata diff --git a/packages/synapse-sdk/src/test/mocks/mockServiceWorker.js b/packages/synapse-sdk/src/test/mocks/mockServiceWorker.js index 723b0714c..558540fa5 100644 --- a/packages/synapse-sdk/src/test/mocks/mockServiceWorker.js +++ b/packages/synapse-sdk/src/test/mocks/mockServiceWorker.js @@ -7,8 +7,8 @@ * - Please do NOT modify this file. */ -const PACKAGE_VERSION = '2.10.5' -const INTEGRITY_CHECKSUM = 'f5825c521429caf22a4dd13b66e243af' +const PACKAGE_VERSION = '2.12.4' +const INTEGRITY_CHECKSUM = '4db4a41e972cec1b64cc569c66952d82' const IS_MOCKED_RESPONSE = Symbol('isMockedResponse') const activeClientIds = new Set() @@ -71,11 +71,6 @@ addEventListener('message', async function (event) { break } - case 'MOCK_DEACTIVATE': { - activeClientIds.delete(clientId) - break - } - case 'CLIENT_CLOSED': { activeClientIds.delete(clientId) @@ -94,6 +89,8 @@ addEventListener('message', async function (event) { }) addEventListener('fetch', function (event) { + const requestInterceptedAt = Date.now() + // Bypass navigation requests. if (event.request.mode === 'navigate') { return @@ -110,23 +107,29 @@ addEventListener('fetch', function (event) { // Bypass all requests when there are no active clients. // Prevents the self-unregistered worked from handling requests - // after it's been deleted (still remains active until the next reload). + // after it's been terminated (still remains active until the next reload). if (activeClientIds.size === 0) { return } const requestId = crypto.randomUUID() - event.respondWith(handleRequest(event, requestId)) + event.respondWith(handleRequest(event, requestId, requestInterceptedAt)) }) /** * @param {FetchEvent} event * @param {string} requestId + * @param {number} requestInterceptedAt */ -async function handleRequest(event, requestId) { +async function handleRequest(event, requestId, requestInterceptedAt) { const client = await resolveMainClient(event) const requestCloneForEvents = event.request.clone() - const response = await getResponse(event, client, requestId) + const response = await getResponse( + event, + client, + requestId, + requestInterceptedAt, + ) // Send back the response clone for the "response:*" life-cycle events. // Ensure MSW is active and ready to handle the message, otherwise @@ -202,9 +205,10 @@ async function resolveMainClient(event) { * @param {FetchEvent} event * @param {Client | undefined} client * @param {string} requestId + * @param {number} requestInterceptedAt * @returns {Promise} */ -async function getResponse(event, client, requestId) { +async function getResponse(event, client, requestId, requestInterceptedAt) { // Clone the request because it might've been already used // (i.e. its body has been read and sent to the client). const requestClone = event.request.clone() @@ -255,6 +259,7 @@ async function getResponse(event, client, requestId) { type: 'REQUEST', payload: { id: requestId, + interceptedAt: requestInterceptedAt, ...serializedRequest, }, }, diff --git a/packages/synapse-sdk/src/test/payments.test.ts b/packages/synapse-sdk/src/test/payments.test.ts index 5931edc0e..9936a73b4 100644 --- a/packages/synapse-sdk/src/test/payments.test.ts +++ b/packages/synapse-sdk/src/test/payments.test.ts @@ -4,25 +4,25 @@ * Tests for PaymentsService class */ +import * as Mocks from '@filoz/synapse-core/mocks' import { assert } from 'chai' import { ethers } from 'ethers' import { setup } from 'iso-web/msw' import { PaymentsService } from '../payments/index.ts' import { TIME_CONSTANTS, TOKENS } from '../utils/index.ts' -import { ADDRESSES, JSONRPC, PRIVATE_KEYS, presets } from './mocks/jsonrpc/index.ts' // mock server for testing -const server = setup([]) +const server = setup() describe('PaymentsService', () => { let provider: ethers.Provider let signer: ethers.Signer let payments: PaymentsService - const paymentsAddress = ADDRESSES.calibration.payments - const usdfcAddress = ADDRESSES.calibration.usdfcToken + const paymentsAddress = Mocks.ADDRESSES.calibration.payments + const usdfcAddress = Mocks.ADDRESSES.calibration.usdfcToken before(async () => { - await server.start({ quiet: true }) + await server.start() }) after(() => { @@ -32,7 +32,7 @@ describe('PaymentsService', () => { beforeEach(() => { server.resetHandlers() provider = new ethers.JsonRpcProvider('https://api.calibration.node.glif.io/rpc/v1') - signer = new ethers.Wallet(PRIVATE_KEYS.key1, provider) + signer = new ethers.Wallet(Mocks.PRIVATE_KEYS.key1, provider) payments = new PaymentsService(provider, signer, paymentsAddress, usdfcAddress, false) }) @@ -49,19 +49,19 @@ describe('PaymentsService', () => { describe('walletBalance', () => { it('should return FIL balance when no token specified', async () => { - server.use(JSONRPC(presets.basic)) + server.use(Mocks.JSONRPC(Mocks.presets.basic)) const balance = await payments.walletBalance() assert.equal(balance.toString(), ethers.parseEther('100').toString()) }) it('should return FIL balance when FIL token specified', async () => { - server.use(JSONRPC(presets.basic)) + server.use(Mocks.JSONRPC(Mocks.presets.basic)) const balance = await payments.walletBalance(TOKENS.FIL) assert.equal(balance.toString(), ethers.parseEther('100').toString()) }) it('should return USDFC balance when USDFC specified', async () => { - server.use(JSONRPC(presets.basic)) + server.use(Mocks.JSONRPC(Mocks.presets.basic)) const balance = await payments.walletBalance(TOKENS.USDFC) assert.equal(balance.toString(), ethers.parseUnits('1000', 18).toString()) }) @@ -78,7 +78,7 @@ describe('PaymentsService', () => { describe('balance', () => { it('should return USDFC balance from payments contract', async () => { - server.use(JSONRPC(presets.basic)) + server.use(Mocks.JSONRPC(Mocks.presets.basic)) const balance = await payments.balance() // Should return available funds (500 USDFC - 0 locked = 500) assert.equal(balance.toString(), ethers.parseUnits('500', 18).toString()) @@ -107,13 +107,13 @@ describe('PaymentsService', () => { describe('Token operations', () => { it('should check allowance for USDFC', async () => { - server.use(JSONRPC(presets.basic)) + server.use(Mocks.JSONRPC(Mocks.presets.basic)) const allowance = await payments.allowance(paymentsAddress) assert.equal(allowance.toString(), '0') }) it('should approve token spending', async () => { - server.use(JSONRPC(presets.basic)) + server.use(Mocks.JSONRPC(Mocks.presets.basic)) const amount = ethers.parseUnits('100', 18) const tx = await payments.approve(paymentsAddress, amount) assert.exists(tx) @@ -144,7 +144,7 @@ describe('PaymentsService', () => { const serviceAddress = '0x394feCa6bCB84502d93c0c5C03c620ba8897e8f4' it('should approve service as operator', async () => { - server.use(JSONRPC(presets.basic)) + server.use(Mocks.JSONRPC(Mocks.presets.basic)) const rateAllowance = ethers.parseUnits('10', 18) // 10 USDFC per epoch const lockupAllowance = ethers.parseUnits('1000', 18) // 1000 USDFC lockup @@ -160,7 +160,7 @@ describe('PaymentsService', () => { }) it('should revoke service operator approval', async () => { - server.use(JSONRPC(presets.basic)) + server.use(Mocks.JSONRPC(Mocks.presets.basic)) const tx = await payments.revokeService(serviceAddress) assert.exists(tx) assert.exists(tx.hash) @@ -168,7 +168,7 @@ describe('PaymentsService', () => { }) it('should check service approval status', async () => { - server.use(JSONRPC(presets.basic)) + server.use(Mocks.JSONRPC(Mocks.presets.basic)) const approval = await payments.serviceApproval(serviceAddress) assert.exists(approval) assert.exists(approval.isApproved) @@ -206,8 +206,8 @@ describe('PaymentsService', () => { describe('Error handling', () => { it('should throw errors from payment operations', async () => { server.use( - JSONRPC({ - ...presets.basic, + Mocks.JSONRPC({ + ...Mocks.presets.basic, eth_sendRawTransaction: () => { throw new Error('Transaction failed') }, @@ -228,7 +228,7 @@ describe('PaymentsService', () => { describe('Deposit and Withdraw', () => { it('should deposit USDFC tokens', async () => { - server.use(JSONRPC(presets.basic)) + server.use(Mocks.JSONRPC(Mocks.presets.basic)) const depositAmount = ethers.parseUnits('100', 18) const tx = await payments.deposit(depositAmount) assert.exists(tx) @@ -240,7 +240,7 @@ describe('PaymentsService', () => { }) it('should deposit with permit', async () => { - server.use(JSONRPC(presets.basic)) + server.use(Mocks.JSONRPC(Mocks.presets.basic)) const depositAmount = ethers.parseUnits('10', 18) const tx = await payments.depositWithPermit(depositAmount) assert.exists(tx) @@ -248,7 +248,7 @@ describe('PaymentsService', () => { }) it('should deposit with permit and approve operator', async () => { - server.use(JSONRPC(presets.basic)) + server.use(Mocks.JSONRPC(Mocks.presets.basic)) const depositAmount = ethers.parseUnits('10', 18) const operator = '0x394feCa6bCB84502d93c0c5C03c620ba8897e8f4' const rateAllowance = ethers.parseUnits('5', 18) @@ -267,7 +267,7 @@ describe('PaymentsService', () => { }) it('should withdraw USDFC tokens', async () => { - server.use(JSONRPC(presets.basic)) + server.use(Mocks.JSONRPC(Mocks.presets.basic)) const withdrawAmount = ethers.parseUnits('50', 18) const tx = await payments.withdraw(withdrawAmount) assert.exists(tx) @@ -315,7 +315,7 @@ describe('PaymentsService', () => { }) it('should handle deposit callbacks', async () => { - server.use(JSONRPC(presets.basic)) + server.use(Mocks.JSONRPC(Mocks.presets.basic)) const depositAmount = ethers.parseUnits('100', 18) let allowanceChecked = false let approvalSent = false @@ -353,7 +353,7 @@ describe('PaymentsService', () => { describe('Rail Settlement Features', () => { describe('getRailsAsPayer', () => { it('should return rails where wallet is payer', async () => { - server.use(JSONRPC(presets.basic)) + server.use(Mocks.JSONRPC(Mocks.presets.basic)) const rails = await payments.getRailsAsPayer() assert.isArray(rails) assert.equal(rails.length, 2) @@ -374,7 +374,7 @@ describe('PaymentsService', () => { describe('getRailsAsPayee', () => { it('should return rails where wallet is payee', async () => { - server.use(JSONRPC(presets.basic)) + server.use(Mocks.JSONRPC(Mocks.presets.basic)) const rails = await payments.getRailsAsPayee() assert.isArray(rails) assert.equal(rails.length, 1) @@ -393,21 +393,9 @@ describe('PaymentsService', () => { }) }) - describe('SETTLEMENT_FEE constant', () => { - it('should have correct settlement fee value', () => { - // Import the constant - const { SETTLEMENT_FEE } = require('../utils/constants.ts') - - assert.exists(SETTLEMENT_FEE) - assert.typeOf(SETTLEMENT_FEE, 'bigint') - // Settlement fee should be 0.0013 FIL (1300000000000000 attoFIL) - assert.equal(SETTLEMENT_FEE, 1300000000000000n) - }) - }) - describe('settle', () => { it('should settle a rail up to current epoch', async () => { - server.use(JSONRPC(presets.basic)) + server.use(Mocks.JSONRPC(Mocks.presets.basic)) const railId = 123 const tx = await payments.settle(railId) @@ -417,13 +405,10 @@ describe('PaymentsService', () => { assert.exists(tx.from) assert.exists(tx.to) assert.exists(tx.data) - // Check that the transaction includes the network fee as value - assert.exists(tx.value) - assert.isTrue(tx.value > 0n) }) it('should settle a rail up to specific epoch', async () => { - server.use(JSONRPC(presets.basic)) + server.use(Mocks.JSONRPC(Mocks.presets.basic)) const railId = 123 const untilEpoch = 999999 const tx = await payments.settle(railId, untilEpoch) @@ -434,7 +419,7 @@ describe('PaymentsService', () => { }) it('should accept bigint rail ID', async () => { - server.use(JSONRPC(presets.basic)) + server.use(Mocks.JSONRPC(Mocks.presets.basic)) const railId = 123n const tx = await payments.settle(railId) @@ -446,7 +431,7 @@ describe('PaymentsService', () => { describe('getSettlementAmounts', () => { it('should get settlement amounts for a rail', async () => { - server.use(JSONRPC(presets.basic)) + server.use(Mocks.JSONRPC(Mocks.presets.basic)) const railId = 123 const result = await payments.getSettlementAmounts(railId) @@ -468,7 +453,7 @@ describe('PaymentsService', () => { describe('settleTerminatedRail', () => { it('should settle a terminated rail', async () => { - server.use(JSONRPC(presets.basic)) + server.use(Mocks.JSONRPC(Mocks.presets.basic)) const railId = 456 const tx = await payments.settleTerminatedRail(railId) @@ -481,7 +466,7 @@ describe('PaymentsService', () => { }) it('should accept bigint rail ID', async () => { - server.use(JSONRPC(presets.basic)) + server.use(Mocks.JSONRPC(Mocks.presets.basic)) const railId = 456n const tx = await payments.settleTerminatedRail(railId) @@ -493,7 +478,7 @@ describe('PaymentsService', () => { describe('getRail', () => { it('should get detailed rail information', async () => { - server.use(JSONRPC(presets.basic)) + server.use(Mocks.JSONRPC(Mocks.presets.basic)) const railId = 123 const rail = await payments.getRail(railId) @@ -512,7 +497,7 @@ describe('PaymentsService', () => { assert.exists(rail.serviceFeeRecipient) // Check values from mock - assert.equal(rail.from.toLowerCase(), ADDRESSES.client1.toLowerCase()) + assert.equal(rail.from.toLowerCase(), Mocks.ADDRESSES.client1.toLowerCase()) assert.equal(rail.to.toLowerCase(), '0xaabbccddaabbccddaabbccddaabbccddaabbccdd') assert.equal(rail.operator, '0x394feCa6bCB84502d93c0c5C03c620ba8897e8f4') assert.equal(rail.paymentRate.toString(), ethers.parseUnits('1', 18).toString()) @@ -523,7 +508,7 @@ describe('PaymentsService', () => { }) it('should accept bigint rail ID', async () => { - server.use(JSONRPC(presets.basic)) + server.use(Mocks.JSONRPC(Mocks.presets.basic)) const railId = 123n const rail = await payments.getRail(railId) @@ -535,7 +520,7 @@ describe('PaymentsService', () => { describe('settleAuto', () => { it('should settle active rail using regular settle', async () => { - server.use(JSONRPC(presets.basic)) + server.use(Mocks.JSONRPC(Mocks.presets.basic)) const railId = 123 // This rail has endEpoch = 0 (active) const tx = await payments.settleAuto(railId) @@ -543,25 +528,22 @@ describe('PaymentsService', () => { assert.exists(tx) assert.exists(tx.hash) assert.typeOf(tx.hash, 'string') - // Check that the transaction includes the settlement fee as value - assert.exists(tx.value) - assert.isTrue(tx.value > 0n) }) it('should settle terminated rail using settleTerminatedRail', async () => { const railId = 456 server.use( - JSONRPC({ - ...presets.basic, + Mocks.JSONRPC({ + ...Mocks.presets.basic, payments: { - ...presets.basic.payments, + ...Mocks.presets.basic.payments, getRail: (args) => { const [railIdArg] = args if (railIdArg === 456n) { return [ { - token: ADDRESSES.calibration.usdfcToken, - from: ADDRESSES.client1, + token: Mocks.ADDRESSES.calibration.usdfcToken, + from: Mocks.ADDRESSES.client1, to: '0xaabbccddaabbccddaabbccddaabbccddaabbccdd', operator: '0x394feCa6bCB84502d93c0c5C03c620ba8897e8f4', validator: '0x394feCa6bCB84502d93c0c5C03c620ba8897e8f4', @@ -575,7 +557,7 @@ describe('PaymentsService', () => { }, ] } - return presets.basic.payments.getRail?.(args) ?? presets.basic.payments.getRail(args) + return Mocks.presets.basic.payments.getRail?.(args) ?? Mocks.presets.basic.payments.getRail(args) }, }, }) @@ -591,7 +573,7 @@ describe('PaymentsService', () => { }) it('should pass untilEpoch parameter to settle for active rails', async () => { - server.use(JSONRPC(presets.basic)) + server.use(Mocks.JSONRPC(Mocks.presets.basic)) const railId = 123 const untilEpoch = 999999 const tx = await payments.settleAuto(railId, untilEpoch) @@ -599,12 +581,10 @@ describe('PaymentsService', () => { assert.exists(tx) assert.exists(tx.hash) assert.typeOf(tx.hash, 'string') - assert.exists(tx.value) - assert.isTrue(tx.value > 0n) }) it('should accept bigint rail ID', async () => { - server.use(JSONRPC(presets.basic)) + server.use(Mocks.JSONRPC(Mocks.presets.basic)) const railId = 123n const tx = await payments.settleAuto(railId) @@ -616,17 +596,17 @@ describe('PaymentsService', () => { it('should ignore untilEpoch for terminated rails', async () => { const railId = 456 server.use( - JSONRPC({ - ...presets.basic, + Mocks.JSONRPC({ + ...Mocks.presets.basic, payments: { - ...presets.basic.payments, + ...Mocks.presets.basic.payments, getRail: (args) => { const [railIdArg] = args if (railIdArg === 456n) { return [ { - token: ADDRESSES.calibration.usdfcToken, - from: ADDRESSES.client1, + token: Mocks.ADDRESSES.calibration.usdfcToken, + from: Mocks.ADDRESSES.client1, to: '0xaabbccddaabbccddaabbccddaabbccddaabbccdd', operator: '0x394feCa6bCB84502d93c0c5C03c620ba8897e8f4', validator: '0x394feCa6bCB84502d93c0c5C03c620ba8897e8f4', @@ -640,7 +620,7 @@ describe('PaymentsService', () => { }, ] } - return presets.basic.payments.getRail?.(args) ?? presets.basic.payments.getRail(args) + return Mocks.presets.basic.payments.getRail?.(args) ?? Mocks.presets.basic.payments.getRail(args) }, }, }) @@ -660,7 +640,7 @@ describe('PaymentsService', () => { describe('Enhanced Payment Features', () => { describe('accountInfo', () => { it('should return detailed account information with correct fields', async () => { - server.use(JSONRPC(presets.basic)) + server.use(Mocks.JSONRPC(Mocks.presets.basic)) const info = await payments.accountInfo() assert.exists(info.funds) @@ -677,11 +657,11 @@ describe('PaymentsService', () => { it('should calculate available funds correctly with time-based lockup', async () => { server.use( - JSONRPC({ - ...presets.basic, + Mocks.JSONRPC({ + ...Mocks.presets.basic, eth_blockNumber: '0xf4240', // 1000000 in hex - matches lockupLastSettledAt calculation payments: { - ...presets.basic.payments, + ...Mocks.presets.basic.payments, accounts: (_args) => { // args should be [token, owner] return [ @@ -705,7 +685,7 @@ describe('PaymentsService', () => { }) it('should use accountInfo in balance() method', async () => { - server.use(JSONRPC(presets.basic)) + server.use(Mocks.JSONRPC(Mocks.presets.basic)) const balance = await payments.balance() const info = await payments.accountInfo() diff --git a/packages/synapse-sdk/src/test/pdp-server.test.ts b/packages/synapse-sdk/src/test/pdp-server.test.ts index 85e681abe..c014ec1e8 100644 --- a/packages/synapse-sdk/src/test/pdp-server.test.ts +++ b/packages/synapse-sdk/src/test/pdp-server.test.ts @@ -15,6 +15,7 @@ import { LocationHeaderError, PostPieceError, } from '@filoz/synapse-core/errors' +import * as Mocks from '@filoz/synapse-core/mocks' import { asPieceCID, calculate as calculatePieceCID } from '@filoz/synapse-core/piece' import * as SP from '@filoz/synapse-core/sp' import { assert } from 'chai' @@ -23,16 +24,9 @@ import { setup } from 'iso-web/msw' import { HttpResponse, http } from 'msw' import { PDPAuthHelper, PDPServer } from '../pdp/index.ts' import type { PDPAddPiecesInput } from '../pdp/server.ts' -import { - createAndAddPiecesHandler, - finalizePieceUploadHandler, - findPieceHandler, - postPieceUploadsHandler, - uploadPieceStreamingHandler, -} from './mocks/pdp/handlers.ts' // mock server for testing -const server = setup([]) +const server = setup() describe('PDPServer', () => { let pdpServer: PDPServer @@ -45,7 +39,7 @@ describe('PDPServer', () => { const TEST_CHAIN_ID = 31337 before(async () => { - await server.start({ quiet: true }) + await server.start() }) after(() => { @@ -285,7 +279,7 @@ InvalidSignature(address expected, address actual) const mockTxHash = '0x1234567890abcdef1234567890abcdef1234567890abcdef1234567890abcdef' const validPieceCid = ['bafkzcibcd4bdomn3tgwgrh3g532zopskstnbrd2n3sxfqbze7rxt7vqn7veigmy'] - server.use(createAndAddPiecesHandler(mockTxHash)) + server.use(Mocks.pdp.createAndAddPiecesHandler(mockTxHash)) const result = await pdpServer.createAndAddPieces( 0n, @@ -641,7 +635,7 @@ Database error` it('should find a piece successfully', async () => { const mockPieceCid = 'bafkzcibcd4bdomn3tgwgrh3g532zopskstnbrd2n3sxfqbze7rxt7vqn7veigmy' - server.use(findPieceHandler(mockPieceCid, true)) + server.use(Mocks.pdp.findPieceHandler(mockPieceCid, true)) const result = await pdpServer.findPiece(mockPieceCid) assert.strictEqual(result.pieceCid.toString(), mockPieceCid) @@ -651,7 +645,7 @@ Database error` SP.setTimeout(100) const mockPieceCid = 'bafkzcibcd4bdomn3tgwgrh3g532zopskstnbrd2n3sxfqbze7rxt7vqn7veigmy' - server.use(findPieceHandler(mockPieceCid, false)) + server.use(Mocks.pdp.findPieceHandler(mockPieceCid, false)) try { await pdpServer.findPiece(mockPieceCid) @@ -705,6 +699,27 @@ Database error` ) } }) + + it('should retry on 202 status and eventually succeed', async () => { + SP.setTimeout(10000) // Set shorter timeout for test + const mockPieceCid = 'bafkzcibcd4bdomn3tgwgrh3g532zopskstnbrd2n3sxfqbze7rxt7vqn7veigmy' + let attemptCount = 0 + + server.use( + http.get('http://pdp.local/pdp/piece', async () => { + attemptCount++ + // Return 202 for first 2 attempts, then 200 + if (attemptCount < 3) { + return HttpResponse.json({ message: 'Processing' }, { status: 202 }) + } + return HttpResponse.json({ pieceCid: mockPieceCid }, { status: 200 }) + }) + ) + + const result = await pdpServer.findPiece(mockPieceCid) + assert.strictEqual(result.pieceCid.toString(), mockPieceCid) + assert.isAtLeast(attemptCount, 3, 'Should have retried at least 3 times') + }) }) describe('getPieceStatus', () => { @@ -880,9 +895,9 @@ Database error` assert.isNotNull(mockPieceCid) server.use( - postPieceUploadsHandler(mockUuid), - uploadPieceStreamingHandler(mockUuid), - finalizePieceUploadHandler(mockUuid) + Mocks.pdp.postPieceUploadsHandler(mockUuid), + Mocks.pdp.uploadPieceStreamingHandler(mockUuid), + Mocks.pdp.finalizePieceUploadHandler(mockUuid) ) await pdpServer.uploadPiece(testData) @@ -897,8 +912,8 @@ Database error` let finalizedWithPieceCid: string | null = null server.use( - postPieceUploadsHandler(mockUuid), - uploadPieceStreamingHandler(mockUuid), + Mocks.pdp.postPieceUploadsHandler(mockUuid), + Mocks.pdp.uploadPieceStreamingHandler(mockUuid), http.post<{ uuid: string }, { pieceCid: string }>( 'http://pdp.local/pdp/piece/uploads/:uuid', async ({ request }) => { diff --git a/packages/synapse-sdk/src/test/pdp-verifier.test.ts b/packages/synapse-sdk/src/test/pdp-verifier.test.ts index ec37691f7..be58c0a0b 100644 --- a/packages/synapse-sdk/src/test/pdp-verifier.test.ts +++ b/packages/synapse-sdk/src/test/pdp-verifier.test.ts @@ -4,22 +4,22 @@ * Tests for PDPVerifier class */ +import * as Mocks from '@filoz/synapse-core/mocks' import { calculate } from '@filoz/synapse-core/piece' import { assert } from 'chai' import { ethers } from 'ethers' import { setup } from 'iso-web/msw' import { PDPVerifier } from '../pdp/index.ts' -import { ADDRESSES, JSONRPC, presets } from './mocks/jsonrpc/index.ts' -const server = setup([]) +const server = setup() describe('PDPVerifier', () => { let provider: ethers.Provider let pdpVerifier: PDPVerifier - const testAddress = ADDRESSES.calibration.pdpVerifier + const testAddress = Mocks.ADDRESSES.calibration.pdpVerifier before(async () => { - await server.start({ quiet: true }) + await server.start() }) after(() => { @@ -28,7 +28,7 @@ describe('PDPVerifier', () => { beforeEach(() => { server.resetHandlers() - server.use(JSONRPC(presets.basic)) + server.use(Mocks.JSONRPC(Mocks.presets.basic)) provider = new ethers.JsonRpcProvider('https://api.calibration.node.glif.io/rpc/v1') pdpVerifier = new PDPVerifier(provider, testAddress) }) @@ -59,10 +59,10 @@ describe('PDPVerifier', () => { describe('getNextPieceId', () => { it('should get next piece ID', async () => { server.use( - JSONRPC({ - ...presets.basic, + Mocks.JSONRPC({ + ...Mocks.presets.basic, pdpVerifier: { - ...presets.basic.pdpVerifier, + ...Mocks.presets.basic.pdpVerifier, getNextPieceId: () => [5n], }, }) @@ -76,7 +76,7 @@ describe('PDPVerifier', () => { describe('getDataSetListener', () => { it('should get data set listener', async () => { const listener = await pdpVerifier.getDataSetListener(123) - assert.equal(listener.toLowerCase(), ADDRESSES.calibration.warmStorage.toLowerCase()) + assert.equal(listener.toLowerCase(), Mocks.ADDRESSES.calibration.warmStorage.toLowerCase()) }) }) @@ -86,10 +86,10 @@ describe('PDPVerifier', () => { const proposedStorageProvider = '0xabcdef1234567890123456789012345678901234' server.use( - JSONRPC({ - ...presets.basic, + Mocks.JSONRPC({ + ...Mocks.presets.basic, pdpVerifier: { - ...presets.basic.pdpVerifier, + ...Mocks.presets.basic.pdpVerifier, getDataSetStorageProvider: () => [storageProvider, proposedStorageProvider], }, }) @@ -104,10 +104,10 @@ describe('PDPVerifier', () => { describe('getDataSetLeafCount', () => { it('should get data set leaf count', async () => { server.use( - JSONRPC({ - ...presets.basic, + Mocks.JSONRPC({ + ...Mocks.presets.basic, pdpVerifier: { - ...presets.basic.pdpVerifier, + ...Mocks.presets.basic.pdpVerifier, getDataSetLeafCount: () => [10n], }, }) @@ -184,10 +184,10 @@ describe('PDPVerifier', () => { const pieceCidHex = ethers.hexlify(pieceCid.bytes) server.use( - JSONRPC({ - ...presets.basic, + Mocks.JSONRPC({ + ...Mocks.presets.basic, pdpVerifier: { - ...presets.basic.pdpVerifier, + ...Mocks.presets.basic.pdpVerifier, getActivePieces: () => [[{ data: pieceCidHex as `0x${string}` }], [1n], false], }, }) @@ -207,4 +207,41 @@ describe('PDPVerifier', () => { assert.equal(address, testAddress) }) }) + + describe('getScheduledRemovals', () => { + it('should get scheduled removals for a data set', async () => { + server.use( + Mocks.JSONRPC({ + ...Mocks.presets.basic, + pdpVerifier: { + ...Mocks.presets.basic.pdpVerifier, + getScheduledRemovals: () => [[1n, 2n, 5n]], + }, + }) + ) + + const scheduledRemovals = await pdpVerifier.getScheduledRemovals(123) + assert.isArray(scheduledRemovals) + assert.equal(scheduledRemovals.length, 3) + assert.equal(scheduledRemovals[0], 1) + assert.equal(scheduledRemovals[1], 2) + assert.equal(scheduledRemovals[2], 5) + }) + + it('should return empty array when no removals scheduled', async () => { + server.use( + Mocks.JSONRPC({ + ...Mocks.presets.basic, + pdpVerifier: { + ...Mocks.presets.basic.pdpVerifier, + getScheduledRemovals: () => [[]], + }, + }) + ) + + const scheduledRemovals = await pdpVerifier.getScheduledRemovals(123) + assert.isArray(scheduledRemovals) + assert.equal(scheduledRemovals.length, 0) + }) + }) }) diff --git a/packages/synapse-sdk/src/test/retriever-chain.test.ts b/packages/synapse-sdk/src/test/retriever-chain.test.ts index d30db16a1..7e5a694dc 100644 --- a/packages/synapse-sdk/src/test/retriever-chain.test.ts +++ b/packages/synapse-sdk/src/test/retriever-chain.test.ts @@ -1,3 +1,4 @@ +import * as Mocks from '@filoz/synapse-core/mocks' import { asPieceCID } from '@filoz/synapse-core/piece' import { assert } from 'chai' import { ethers } from 'ethers' @@ -7,11 +8,9 @@ import { ChainRetriever } from '../retriever/chain.ts' import { SPRegistryService } from '../sp-registry/index.ts' import type { PieceCID, PieceRetriever } from '../types.ts' import { WarmStorageService } from '../warm-storage/index.ts' -import { ADDRESSES, JSONRPC, PROVIDERS, presets } from './mocks/jsonrpc/index.ts' -import { mockServiceProviderRegistry } from './mocks/jsonrpc/service-registry.ts' // Mock server for testing -const server = setup([]) +const server = setup() // Create a mock PieceCID for testing const mockPieceCID = asPieceCID('bafkzcibeqcad6efnpwn62p5vvs5x3nh3j7xkzfgb3xtitcdm2hulmty3xx4tl3wace') as PieceCID @@ -33,7 +32,7 @@ describe('ChainRetriever', () => { let spRegistry: SPRegistryService before(async () => { - await server.start({ quiet: true }) + await server.start() }) after(() => { @@ -43,10 +42,10 @@ describe('ChainRetriever', () => { beforeEach(async () => { server.resetHandlers() // Set up basic JSON-RPC handler before creating services - server.use(JSONRPC(presets.basic)) + server.use(Mocks.JSONRPC(Mocks.presets.basic)) provider = new ethers.JsonRpcProvider('https://api.calibration.node.glif.io/rpc/v1') - warmStorage = await WarmStorageService.create(provider, ADDRESSES.calibration.warmStorage) - spRegistry = await SPRegistryService.create(provider, ADDRESSES.calibration.spRegistry) + warmStorage = await WarmStorageService.create(provider, Mocks.ADDRESSES.calibration.warmStorage) + spRegistry = await SPRegistryService.create(provider, Mocks.ADDRESSES.calibration.spRegistry) }) describe('fetchPiece with specific provider', () => { @@ -55,9 +54,9 @@ describe('ChainRetriever', () => { let downloadCalled = false server.use( - JSONRPC({ - ...presets.basic, - serviceRegistry: mockServiceProviderRegistry([PROVIDERS.provider1]), + Mocks.JSONRPC({ + ...Mocks.presets.basic, + serviceRegistry: Mocks.mockServiceProviderRegistry([Mocks.PROVIDERS.provider1]), }), http.get('https://provider1.example.com/pdp/piece', async ({ request }) => { findPieceCalled = true @@ -72,8 +71,8 @@ describe('ChainRetriever', () => { ) const retriever = new ChainRetriever(warmStorage, spRegistry) - const response = await retriever.fetchPiece(mockPieceCID, ADDRESSES.client1, { - providerAddress: ADDRESSES.serviceProvider1, + const response = await retriever.fetchPiece(mockPieceCID, Mocks.ADDRESSES.client1, { + providerAddress: Mocks.ADDRESSES.serviceProvider1, }) assert.isTrue(findPieceCalled, 'Should call findPiece') @@ -84,16 +83,16 @@ describe('ChainRetriever', () => { it('should fall back to child retriever when specific provider is not approved', async () => { server.use( - JSONRPC({ - ...presets.basic, + Mocks.JSONRPC({ + ...Mocks.presets.basic, serviceRegistry: { - ...presets.basic.serviceRegistry, + ...Mocks.presets.basic.serviceRegistry, getProviderByAddress: () => [ { providerId: 0n, info: { - serviceProvider: ADDRESSES.zero, - payee: ADDRESSES.zero, + serviceProvider: Mocks.ADDRESSES.zero, + payee: Mocks.ADDRESSES.zero, name: '', description: '', isActive: false, @@ -105,7 +104,7 @@ describe('ChainRetriever', () => { ) const retriever = new ChainRetriever(warmStorage, spRegistry, mockChildRetriever) - const response = await retriever.fetchPiece(mockPieceCID, ADDRESSES.client1, { + const response = await retriever.fetchPiece(mockPieceCID, Mocks.ADDRESSES.client1, { providerAddress: '0xNotApproved', }) assert.equal(response.status, 200) @@ -114,16 +113,16 @@ describe('ChainRetriever', () => { it('should throw when specific provider is not approved and no child retriever', async () => { server.use( - JSONRPC({ - ...presets.basic, + Mocks.JSONRPC({ + ...Mocks.presets.basic, serviceRegistry: { - ...presets.basic.serviceRegistry, + ...Mocks.presets.basic.serviceRegistry, getProviderByAddress: () => [ { providerId: 0n, info: { - serviceProvider: ADDRESSES.zero, - payee: ADDRESSES.zero, + serviceProvider: Mocks.ADDRESSES.zero, + payee: Mocks.ADDRESSES.zero, name: '', description: '', isActive: false, @@ -137,7 +136,7 @@ describe('ChainRetriever', () => { const retriever = new ChainRetriever(warmStorage, spRegistry) try { - await retriever.fetchPiece(mockPieceCID, ADDRESSES.client1, { + await retriever.fetchPiece(mockPieceCID, Mocks.ADDRESSES.client1, { providerAddress: '0xNotApproved', }) assert.fail('Should have thrown') @@ -150,11 +149,11 @@ describe('ChainRetriever', () => { describe('fetchPiece with multiple providers', () => { it('should wait for successful provider even if others fail first', async () => { server.use( - JSONRPC({ - ...presets.basic, - serviceRegistry: mockServiceProviderRegistry([PROVIDERS.provider1, PROVIDERS.provider2]), + Mocks.JSONRPC({ + ...Mocks.presets.basic, + serviceRegistry: Mocks.mockServiceProviderRegistry([Mocks.PROVIDERS.provider1, Mocks.PROVIDERS.provider2]), warmStorageView: { - ...presets.basic.warmStorageView, + ...Mocks.presets.basic.warmStorageView, clientDataSets: () => [[1n, 2n]], getDataSet: (args) => { const [dataSetId] = args @@ -164,9 +163,9 @@ describe('ChainRetriever', () => { pdpRailId: 1n, cacheMissRailId: 0n, cdnRailId: 0n, - payer: ADDRESSES.client1, - payee: ADDRESSES.payee1, - serviceProvider: ADDRESSES.serviceProvider1, + payer: Mocks.ADDRESSES.client1, + payee: Mocks.ADDRESSES.payee1, + serviceProvider: Mocks.ADDRESSES.serviceProvider1, commissionBps: 100n, clientDataSetId: 1n, pdpEndEpoch: 0n, @@ -182,9 +181,9 @@ describe('ChainRetriever', () => { pdpRailId: 2n, cacheMissRailId: 0n, cdnRailId: 0n, - payer: ADDRESSES.client1, - payee: ADDRESSES.payee1, - serviceProvider: ADDRESSES.serviceProvider2, + payer: Mocks.ADDRESSES.client1, + payee: Mocks.ADDRESSES.payee1, + serviceProvider: Mocks.ADDRESSES.serviceProvider2, commissionBps: 100n, clientDataSetId: 2n, pdpEndEpoch: 0n, @@ -194,7 +193,7 @@ describe('ChainRetriever', () => { }, ] } - return presets.basic.warmStorageView.getDataSet(args) + return Mocks.presets.basic.warmStorageView.getDataSet(args) }, }, }), @@ -214,7 +213,7 @@ describe('ChainRetriever', () => { ) const retriever = new ChainRetriever(warmStorage, spRegistry) - const response = await retriever.fetchPiece(mockPieceCID, ADDRESSES.client1) + const response = await retriever.fetchPiece(mockPieceCID, Mocks.ADDRESSES.client1) // Should get response from provider 2 even though provider 1 failed first assert.equal(response.status, 200) @@ -226,9 +225,9 @@ describe('ChainRetriever', () => { let provider2Called = false server.use( - JSONRPC({ - ...presets.basic, - serviceRegistry: mockServiceProviderRegistry([PROVIDERS.provider1, PROVIDERS.provider2]), + Mocks.JSONRPC({ + ...Mocks.presets.basic, + serviceRegistry: Mocks.mockServiceProviderRegistry([Mocks.PROVIDERS.provider1, Mocks.PROVIDERS.provider2]), }), http.get('https://provider1.example.com/pdp/piece', async ({ request }) => { provider1Called = true @@ -255,7 +254,7 @@ describe('ChainRetriever', () => { ) const retriever = new ChainRetriever(warmStorage, spRegistry) - const response = await retriever.fetchPiece(mockPieceCID, ADDRESSES.client1) + const response = await retriever.fetchPiece(mockPieceCID, Mocks.ADDRESSES.client1) assert.isTrue(provider1Called || provider2Called, 'At least one provider should be called') assert.equal(response.status, 200) @@ -265,11 +264,11 @@ describe('ChainRetriever', () => { it('should fall back to child retriever when all providers fail', async () => { server.use( - JSONRPC({ - ...presets.basic, - serviceRegistry: mockServiceProviderRegistry([PROVIDERS.provider1]), + Mocks.JSONRPC({ + ...Mocks.presets.basic, + serviceRegistry: Mocks.mockServiceProviderRegistry([Mocks.PROVIDERS.provider1]), warmStorageView: { - ...presets.basic.warmStorageView, + ...Mocks.presets.basic.warmStorageView, clientDataSets: () => [[1n]], getDataSet: (args) => { const [dataSetId] = args @@ -279,9 +278,9 @@ describe('ChainRetriever', () => { pdpRailId: 1n, cacheMissRailId: 0n, cdnRailId: 0n, - payer: ADDRESSES.client1, - payee: ADDRESSES.payee1, - serviceProvider: ADDRESSES.serviceProvider1, + payer: Mocks.ADDRESSES.client1, + payee: Mocks.ADDRESSES.payee1, + serviceProvider: Mocks.ADDRESSES.serviceProvider1, commissionBps: 100n, clientDataSetId: 1n, pdpEndEpoch: 0n, @@ -291,7 +290,7 @@ describe('ChainRetriever', () => { }, ] } - return presets.basic.warmStorageView.getDataSet(args) + return Mocks.presets.basic.warmStorageView.getDataSet(args) }, }, }), @@ -304,7 +303,7 @@ describe('ChainRetriever', () => { ) const retriever = new ChainRetriever(warmStorage, spRegistry, mockChildRetriever) - const response = await retriever.fetchPiece(mockPieceCID, ADDRESSES.client1) + const response = await retriever.fetchPiece(mockPieceCID, Mocks.ADDRESSES.client1) assert.equal(response.status, 200) assert.equal(await response.text(), 'data from child') @@ -312,11 +311,11 @@ describe('ChainRetriever', () => { it('should throw when all providers fail and no child retriever', async () => { server.use( - JSONRPC({ - ...presets.basic, - serviceRegistry: mockServiceProviderRegistry([PROVIDERS.provider1]), + Mocks.JSONRPC({ + ...Mocks.presets.basic, + serviceRegistry: Mocks.mockServiceProviderRegistry([Mocks.PROVIDERS.provider1]), warmStorageView: { - ...presets.basic.warmStorageView, + ...Mocks.presets.basic.warmStorageView, clientDataSets: () => [[1n]], getDataSet: (args) => { const [dataSetId] = args @@ -326,9 +325,9 @@ describe('ChainRetriever', () => { pdpRailId: 1n, cacheMissRailId: 0n, cdnRailId: 0n, - payer: ADDRESSES.client1, - payee: ADDRESSES.payee1, - serviceProvider: ADDRESSES.serviceProvider1, + payer: Mocks.ADDRESSES.client1, + payee: Mocks.ADDRESSES.payee1, + serviceProvider: Mocks.ADDRESSES.serviceProvider1, commissionBps: 100n, clientDataSetId: 1n, pdpEndEpoch: 0n, @@ -338,7 +337,7 @@ describe('ChainRetriever', () => { }, ] } - return presets.basic.warmStorageView.getDataSet(args) + return Mocks.presets.basic.warmStorageView.getDataSet(args) }, }, }), @@ -352,7 +351,7 @@ describe('ChainRetriever', () => { const retriever = new ChainRetriever(warmStorage, spRegistry) try { - await retriever.fetchPiece(mockPieceCID, ADDRESSES.client1) + await retriever.fetchPiece(mockPieceCID, Mocks.ADDRESSES.client1) assert.fail('Should have thrown') } catch (error: any) { assert.include(error.message, 'All provider retrieval attempts failed') @@ -361,27 +360,27 @@ describe('ChainRetriever', () => { it('should handle child retriever when no data sets exist', async () => { server.use( - JSONRPC({ - ...presets.basic, + Mocks.JSONRPC({ + ...Mocks.presets.basic, warmStorageView: { - ...presets.basic.warmStorageView, + ...Mocks.presets.basic.warmStorageView, clientDataSets: () => [[]], }, }) ) const retriever = new ChainRetriever(warmStorage, spRegistry, mockChildRetriever) - const response = await retriever.fetchPiece(mockPieceCID, ADDRESSES.client1) + const response = await retriever.fetchPiece(mockPieceCID, Mocks.ADDRESSES.client1) assert.equal(response.status, 200) assert.equal(await response.text(), 'data from child') }) it('should throw when no data sets and no child retriever', async () => { server.use( - JSONRPC({ - ...presets.basic, + Mocks.JSONRPC({ + ...Mocks.presets.basic, warmStorageView: { - ...presets.basic.warmStorageView, + ...Mocks.presets.basic.warmStorageView, clientDataSets: () => [[]], }, }) @@ -390,7 +389,7 @@ describe('ChainRetriever', () => { const retriever = new ChainRetriever(warmStorage, spRegistry) try { - await retriever.fetchPiece(mockPieceCID, ADDRESSES.client1) + await retriever.fetchPiece(mockPieceCID, Mocks.ADDRESSES.client1) assert.fail('Should have thrown') } catch (error: any) { assert.include(error.message, 'No active data sets with data found') @@ -401,10 +400,10 @@ describe('ChainRetriever', () => { describe('fetchPiece error handling', () => { it('should throw error when provider discovery fails', async () => { server.use( - JSONRPC({ - ...presets.basic, + Mocks.JSONRPC({ + ...Mocks.presets.basic, warmStorageView: { - ...presets.basic.warmStorageView, + ...Mocks.presets.basic.warmStorageView, clientDataSets: () => { throw new Error('Database connection failed') }, @@ -415,7 +414,7 @@ describe('ChainRetriever', () => { const retriever = new ChainRetriever(warmStorage, spRegistry) try { - await retriever.fetchPiece(mockPieceCID, ADDRESSES.client1) + await retriever.fetchPiece(mockPieceCID, Mocks.ADDRESSES.client1) assert.fail('Should have thrown') } catch (error: any) { assert.include(error.message, 'Database connection failed') @@ -424,11 +423,11 @@ describe('ChainRetriever', () => { it('should handle provider with no PDP product', async () => { server.use( - JSONRPC({ - ...presets.basic, - serviceRegistry: mockServiceProviderRegistry([PROVIDERS.providerNoPDP]), // No PDP product + Mocks.JSONRPC({ + ...Mocks.presets.basic, + serviceRegistry: Mocks.mockServiceProviderRegistry([Mocks.PROVIDERS.providerNoPDP]), // No PDP product warmStorageView: { - ...presets.basic.warmStorageView, + ...Mocks.presets.basic.warmStorageView, clientDataSets: () => [[1n]], getDataSet: (args) => { const [dataSetId] = args @@ -438,9 +437,9 @@ describe('ChainRetriever', () => { pdpRailId: 1n, cacheMissRailId: 0n, cdnRailId: 0n, - payer: ADDRESSES.client1, - payee: ADDRESSES.payee1, - serviceProvider: ADDRESSES.serviceProvider1, + payer: Mocks.ADDRESSES.client1, + payee: Mocks.ADDRESSES.payee1, + serviceProvider: Mocks.ADDRESSES.serviceProvider1, commissionBps: 100n, clientDataSetId: 1n, pdpEndEpoch: 0n, @@ -450,7 +449,7 @@ describe('ChainRetriever', () => { }, ] } - return presets.basic.warmStorageView.getDataSet(args) + return Mocks.presets.basic.warmStorageView.getDataSet(args) }, }, }) @@ -459,7 +458,7 @@ describe('ChainRetriever', () => { const retriever = new ChainRetriever(warmStorage, spRegistry) try { - await retriever.fetchPiece(mockPieceCID, ADDRESSES.client1) + await retriever.fetchPiece(mockPieceCID, Mocks.ADDRESSES.client1) assert.fail('Should have thrown') } catch (error: any) { assert.include(error.message, 'Failed to retrieve piece') @@ -468,11 +467,11 @@ describe('ChainRetriever', () => { it('should handle mixed success and failure from multiple providers', async () => { server.use( - JSONRPC({ - ...presets.basic, - serviceRegistry: mockServiceProviderRegistry([PROVIDERS.provider1, PROVIDERS.provider2]), + Mocks.JSONRPC({ + ...Mocks.presets.basic, + serviceRegistry: Mocks.mockServiceProviderRegistry([Mocks.PROVIDERS.provider1, Mocks.PROVIDERS.provider2]), warmStorageView: { - ...presets.basic.warmStorageView, + ...Mocks.presets.basic.warmStorageView, clientDataSets: () => [[1n, 2n]], getDataSet: (args) => { const [dataSetId] = args @@ -482,9 +481,9 @@ describe('ChainRetriever', () => { pdpRailId: 1n, cacheMissRailId: 0n, cdnRailId: 0n, - payer: ADDRESSES.client1, - payee: ADDRESSES.payee1, - serviceProvider: ADDRESSES.serviceProvider1, + payer: Mocks.ADDRESSES.client1, + payee: Mocks.ADDRESSES.payee1, + serviceProvider: Mocks.ADDRESSES.serviceProvider1, commissionBps: 100n, clientDataSetId: 1n, pdpEndEpoch: 0n, @@ -500,9 +499,9 @@ describe('ChainRetriever', () => { pdpRailId: 2n, cacheMissRailId: 0n, cdnRailId: 0n, - payer: ADDRESSES.client1, - payee: ADDRESSES.payee1, - serviceProvider: ADDRESSES.serviceProvider2, + payer: Mocks.ADDRESSES.client1, + payee: Mocks.ADDRESSES.payee1, + serviceProvider: Mocks.ADDRESSES.serviceProvider2, commissionBps: 100n, clientDataSetId: 2n, pdpEndEpoch: 0n, @@ -512,7 +511,7 @@ describe('ChainRetriever', () => { }, ] } - return presets.basic.warmStorageView.getDataSet(args) + return Mocks.presets.basic.warmStorageView.getDataSet(args) }, }, }), @@ -530,7 +529,7 @@ describe('ChainRetriever', () => { ) const retriever = new ChainRetriever(warmStorage, spRegistry) - const response = await retriever.fetchPiece(mockPieceCID, ADDRESSES.client1) + const response = await retriever.fetchPiece(mockPieceCID, Mocks.ADDRESSES.client1) assert.equal(response.status, 200) assert.equal(await response.text(), 'success from provider2') @@ -538,10 +537,10 @@ describe('ChainRetriever', () => { it('should handle providers with no valid data sets', async () => { server.use( - JSONRPC({ - ...presets.basic, + Mocks.JSONRPC({ + ...Mocks.presets.basic, warmStorageView: { - ...presets.basic.warmStorageView, + ...Mocks.presets.basic.warmStorageView, clientDataSets: () => [[1n, 2n]], getDataSet: (args) => { const [dataSetId] = args @@ -551,9 +550,9 @@ describe('ChainRetriever', () => { pdpRailId: 1n, cacheMissRailId: 0n, cdnRailId: 0n, - payer: ADDRESSES.client1, - payee: ADDRESSES.payee1, - serviceProvider: ADDRESSES.serviceProvider1, + payer: Mocks.ADDRESSES.client1, + payee: Mocks.ADDRESSES.payee1, + serviceProvider: Mocks.ADDRESSES.serviceProvider1, commissionBps: 100n, clientDataSetId: 1n, pdpEndEpoch: 0n, @@ -563,16 +562,16 @@ describe('ChainRetriever', () => { }, ] } - return presets.basic.warmStorageView.getDataSet(args) + return Mocks.presets.basic.warmStorageView.getDataSet(args) }, }, pdpVerifier: { - ...presets.basic.pdpVerifier, + ...Mocks.presets.basic.pdpVerifier, dataSetLive: (args) => { const [dataSetId] = args return [dataSetId !== 1n] // Data set 1 not live }, - getDataSetListener: () => [ADDRESSES.calibration.warmStorage], + getDataSetListener: () => [Mocks.ADDRESSES.calibration.warmStorage], getNextPieceId: (args) => { const [dataSetId] = args return [dataSetId === 2n ? 0n : 1n] // Data set 2 has no pieces @@ -584,7 +583,7 @@ describe('ChainRetriever', () => { const retriever = new ChainRetriever(warmStorage, spRegistry) try { - await retriever.fetchPiece(mockPieceCID, ADDRESSES.client1) + await retriever.fetchPiece(mockPieceCID, Mocks.ADDRESSES.client1) assert.fail('Should have thrown') } catch (error: any) { assert.include(error.message, 'No active data sets with data found') @@ -597,11 +596,11 @@ describe('ChainRetriever', () => { let signalPassed = false server.use( - JSONRPC({ - ...presets.basic, - serviceRegistry: mockServiceProviderRegistry([PROVIDERS.provider1]), + Mocks.JSONRPC({ + ...Mocks.presets.basic, + serviceRegistry: Mocks.mockServiceProviderRegistry([Mocks.PROVIDERS.provider1]), warmStorageView: { - ...presets.basic.warmStorageView, + ...Mocks.presets.basic.warmStorageView, clientDataSets: () => [[1n]], getDataSet: (args) => { const [dataSetId] = args @@ -611,9 +610,9 @@ describe('ChainRetriever', () => { pdpRailId: 1n, cacheMissRailId: 0n, cdnRailId: 0n, - payer: ADDRESSES.client1, - payee: ADDRESSES.payee1, - serviceProvider: ADDRESSES.serviceProvider1, + payer: Mocks.ADDRESSES.client1, + payee: Mocks.ADDRESSES.payee1, + serviceProvider: Mocks.ADDRESSES.serviceProvider1, commissionBps: 100n, clientDataSetId: 1n, pdpEndEpoch: 0n, @@ -623,7 +622,7 @@ describe('ChainRetriever', () => { }, ] } - return presets.basic.warmStorageView.getDataSet(args) + return Mocks.presets.basic.warmStorageView.getDataSet(args) }, }, }), @@ -645,7 +644,7 @@ describe('ChainRetriever', () => { const controller = new AbortController() const retriever = new ChainRetriever(warmStorage, spRegistry) - await retriever.fetchPiece(mockPieceCID, ADDRESSES.client1, { signal: controller.signal }) + await retriever.fetchPiece(mockPieceCID, Mocks.ADDRESSES.client1, { signal: controller.signal }) assert.isTrue(signalPassed, 'AbortSignal should be passed to fetch') }) diff --git a/packages/synapse-sdk/src/test/sp-registry-service.test.ts b/packages/synapse-sdk/src/test/sp-registry-service.test.ts index 39e031cbe..c88f08973 100644 --- a/packages/synapse-sdk/src/test/sp-registry-service.test.ts +++ b/packages/synapse-sdk/src/test/sp-registry-service.test.ts @@ -1,15 +1,15 @@ /* globals describe it beforeEach */ + +import * as Mocks from '@filoz/synapse-core/mocks' import { assert } from 'chai' import { ethers } from 'ethers' import { setup } from 'iso-web/msw' import { SPRegistryService } from '../sp-registry/service.ts' import { PRODUCTS } from '../sp-registry/types.ts' import { SIZE_CONSTANTS } from '../utils/constants.ts' -import { ADDRESSES, JSONRPC, PRIVATE_KEYS, PROVIDERS, presets } from './mocks/jsonrpc/index.ts' -import { mockServiceProviderRegistry } from './mocks/jsonrpc/service-registry.ts' // mock server for testing -const server = setup([]) +const server = setup() describe('SPRegistryService', () => { let provider: ethers.Provider @@ -17,7 +17,7 @@ describe('SPRegistryService', () => { let service: SPRegistryService before(async () => { - await server.start({ quiet: true }) + await server.start() }) after(() => { @@ -27,25 +27,25 @@ describe('SPRegistryService', () => { beforeEach(() => { server.resetHandlers() provider = new ethers.JsonRpcProvider('https://api.calibration.node.glif.io/rpc/v1') - signer = new ethers.Wallet(PRIVATE_KEYS.key1, provider) - service = new SPRegistryService(provider, ADDRESSES.calibration.spRegistry) + signer = new ethers.Wallet(Mocks.PRIVATE_KEYS.key1, provider) + service = new SPRegistryService(provider, 314159, Mocks.ADDRESSES.calibration.spRegistry) }) describe('Constructor', () => { it('should create instance with provider and address', () => { - server.use(JSONRPC(presets.basic)) - const instance = new SPRegistryService(provider, ADDRESSES.calibration.spRegistry) + server.use(Mocks.JSONRPC(Mocks.presets.basic)) + const instance = new SPRegistryService(provider, 314159, Mocks.ADDRESSES.calibration.spRegistry) assert.exists(instance) }) }) describe('Provider Read Operations', () => { it('should get provider by ID', async () => { - server.use(JSONRPC(presets.basic)) + server.use(Mocks.JSONRPC(Mocks.presets.basic)) const provider = await service.getProvider(1) assert.exists(provider) assert.equal(provider?.id, 1) - assert.equal(provider?.serviceProvider, ADDRESSES.serviceProvider1) + assert.equal(provider?.serviceProvider, Mocks.ADDRESSES.serviceProvider1) assert.equal(provider?.name, 'Test Provider') assert.equal(provider?.description, 'Test Provider') assert.isTrue(provider?.active) @@ -53,16 +53,16 @@ describe('SPRegistryService', () => { it('should return null for non-existent provider', async () => { server.use( - JSONRPC({ - ...presets.basic, + Mocks.JSONRPC({ + ...Mocks.presets.basic, serviceRegistry: { - ...presets.basic.serviceRegistry, + ...Mocks.presets.basic.serviceRegistry, getProvider: () => [ { providerId: 0n, info: { - serviceProvider: ADDRESSES.zero, - payee: ADDRESSES.zero, + serviceProvider: Mocks.ADDRESSES.zero, + payee: Mocks.ADDRESSES.zero, isActive: false, name: '', description: '', @@ -77,25 +77,25 @@ describe('SPRegistryService', () => { }) it('should get provider by address', async () => { - server.use(JSONRPC(presets.basic)) - const provider = await service.getProviderByAddress(ADDRESSES.serviceProvider1) + server.use(Mocks.JSONRPC(Mocks.presets.basic)) + const provider = await service.getProviderByAddress(Mocks.ADDRESSES.serviceProvider1) assert.exists(provider) assert.equal(provider.id, 1) - assert.equal(provider.serviceProvider, ADDRESSES.serviceProvider1) + assert.equal(provider.serviceProvider, Mocks.ADDRESSES.serviceProvider1) }) it('should return null for unregistered address', async () => { server.use( - JSONRPC({ - ...presets.basic, + Mocks.JSONRPC({ + ...Mocks.presets.basic, serviceRegistry: { - ...presets.basic.serviceRegistry, + ...Mocks.presets.basic.serviceRegistry, getProviderByAddress: () => [ { providerId: 0n, info: { - serviceProvider: ADDRESSES.zero, - payee: ADDRESSES.zero, + serviceProvider: Mocks.ADDRESSES.zero, + payee: Mocks.ADDRESSES.zero, isActive: false, name: '', description: '', @@ -105,40 +105,40 @@ describe('SPRegistryService', () => { }, }) ) - const provider = await service.getProviderByAddress(ADDRESSES.zero) + const provider = await service.getProviderByAddress(Mocks.ADDRESSES.zero) assert.isNull(provider) }) it('should get provider ID by address', async () => { - server.use(JSONRPC(presets.basic)) - const id = await service.getProviderIdByAddress(ADDRESSES.serviceProvider1) + server.use(Mocks.JSONRPC(Mocks.presets.basic)) + const id = await service.getProviderIdByAddress(Mocks.ADDRESSES.serviceProvider1) assert.equal(id, 1) }) it('should return 0 for unregistered address', async () => { server.use( - JSONRPC({ - ...presets.basic, + Mocks.JSONRPC({ + ...Mocks.presets.basic, serviceRegistry: { - ...presets.basic.serviceRegistry, + ...Mocks.presets.basic.serviceRegistry, getProviderIdByAddress: () => [0n], }, }) ) - const id = await service.getProviderIdByAddress(ADDRESSES.zero) + const id = await service.getProviderIdByAddress(Mocks.ADDRESSES.zero) assert.equal(id, 0) }) it('should check if provider is active', async () => { - server.use(JSONRPC(presets.basic)) + server.use(Mocks.JSONRPC(Mocks.presets.basic)) const isActive = await service.isProviderActive(1) assert.isTrue(isActive) server.use( - JSONRPC({ - ...presets.basic, + Mocks.JSONRPC({ + ...Mocks.presets.basic, serviceRegistry: { - ...presets.basic.serviceRegistry, + ...Mocks.presets.basic.serviceRegistry, isProviderActive: () => [false], }, }) @@ -148,25 +148,25 @@ describe('SPRegistryService', () => { }) it('should check if address is registered provider', async () => { - server.use(JSONRPC(presets.basic)) - const isRegistered = await service.isRegisteredProvider(ADDRESSES.serviceProvider1) + server.use(Mocks.JSONRPC(Mocks.presets.basic)) + const isRegistered = await service.isRegisteredProvider(Mocks.ADDRESSES.serviceProvider1) assert.isTrue(isRegistered) server.use( - JSONRPC({ - ...presets.basic, + Mocks.JSONRPC({ + ...Mocks.presets.basic, serviceRegistry: { - ...presets.basic.serviceRegistry, + ...Mocks.presets.basic.serviceRegistry, isRegisteredProvider: () => [false], }, }) ) - const isNotRegistered = await service.isRegisteredProvider(ADDRESSES.zero) + const isNotRegistered = await service.isRegisteredProvider(Mocks.ADDRESSES.zero) assert.isFalse(isNotRegistered) }) it('should get provider count', async () => { - server.use(JSONRPC(presets.basic)) + server.use(Mocks.JSONRPC(Mocks.presets.basic)) const count = await service.getProviderCount() assert.equal(count, 2) }) @@ -174,7 +174,7 @@ describe('SPRegistryService', () => { describe('Provider Write Operations', () => { it('should register new provider', async () => { - server.use(JSONRPC(presets.basic)) + server.use(Mocks.JSONRPC(Mocks.presets.basic)) const tx = await service.registerProvider(signer, { payee: await signer.getAddress(), name: 'New Provider', @@ -196,14 +196,14 @@ describe('SPRegistryService', () => { }) it('should update provider info', async () => { - server.use(JSONRPC(presets.basic)) + server.use(Mocks.JSONRPC(Mocks.presets.basic)) const tx = await service.updateProviderInfo(signer, 'Updated Name', 'Updated Description') assert.exists(tx) assert.exists(tx.hash) }) it('should remove provider', async () => { - server.use(JSONRPC(presets.basic)) + server.use(Mocks.JSONRPC(Mocks.presets.basic)) const tx = await service.removeProvider(signer) assert.exists(tx) assert.exists(tx.hash) @@ -212,7 +212,7 @@ describe('SPRegistryService', () => { describe('Product Operations', () => { it('should get provider products', async () => { - server.use(JSONRPC(presets.basic)) + server.use(Mocks.JSONRPC(Mocks.presets.basic)) const provider = await service.getProvider(1) assert.exists(provider) assert.exists(provider?.products) @@ -225,7 +225,7 @@ describe('SPRegistryService', () => { }) it('should decode PDP product data', async () => { - server.use(JSONRPC(presets.basic)) + server.use(Mocks.JSONRPC(Mocks.presets.basic)) const provider = await service.getProvider(1) const product = provider?.products.PDP @@ -243,7 +243,7 @@ describe('SPRegistryService', () => { }) it('should add new product', async () => { - server.use(JSONRPC(presets.basic)) + server.use(Mocks.JSONRPC(Mocks.presets.basic)) const pdpData = { serviceURL: 'https://new.example.com', minPieceSizeInBytes: SIZE_CONSTANTS.KiB, @@ -262,7 +262,7 @@ describe('SPRegistryService', () => { }) it('should update existing product', async () => { - server.use(JSONRPC(presets.basic)) + server.use(Mocks.JSONRPC(Mocks.presets.basic)) const pdpData = { serviceURL: 'https://updated.example.com', minPieceSizeInBytes: SIZE_CONSTANTS.KiB * 2n, @@ -281,7 +281,7 @@ describe('SPRegistryService', () => { }) it('should remove product', async () => { - server.use(JSONRPC(presets.basic)) + server.use(Mocks.JSONRPC(Mocks.presets.basic)) const tx = await service.removeProduct(signer, PRODUCTS.PDP) assert.exists(tx) assert.exists(tx.hash) @@ -290,7 +290,7 @@ describe('SPRegistryService', () => { describe('Batch Operations', () => { it('should get multiple providers in batch', async () => { - server.use(JSONRPC(presets.basic)) + server.use(Mocks.JSONRPC(Mocks.presets.basic)) const providers = await service.getProviders([1, 2, 3]) assert.isArray(providers) assert.equal(providers.length, 2) // Only IDs 1 and 2 exist in our mock @@ -301,7 +301,7 @@ describe('SPRegistryService', () => { }) it('should handle empty provider ID list', async () => { - server.use(JSONRPC(presets.basic)) + server.use(Mocks.JSONRPC(Mocks.presets.basic)) const providers = await service.getProviders([]) assert.isArray(providers) assert.equal(providers.length, 0) @@ -310,7 +310,7 @@ describe('SPRegistryService', () => { describe('Provider Info Conversion', () => { it('should extract serviceURL from first PDP product', async () => { - server.use(JSONRPC(presets.basic)) + server.use(Mocks.JSONRPC(Mocks.presets.basic)) const provider = await service.getProvider(1) assert.exists(provider) assert.equal(provider?.products.PDP?.data.serviceURL, 'https://pdp.example.com') @@ -318,9 +318,9 @@ describe('SPRegistryService', () => { it('should handle provider without PDP products', async () => { server.use( - JSONRPC({ - ...presets.basic, - serviceRegistry: mockServiceProviderRegistry([PROVIDERS.providerNoPDP]), + Mocks.JSONRPC({ + ...Mocks.presets.basic, + serviceRegistry: Mocks.mockServiceProviderRegistry([Mocks.PROVIDERS.providerNoPDP]), }) ) @@ -333,10 +333,10 @@ describe('SPRegistryService', () => { describe('Error Handling', () => { it('should handle contract call failures gracefully', async () => { server.use( - JSONRPC({ - ...presets.basic, + Mocks.JSONRPC({ + ...Mocks.presets.basic, serviceRegistry: { - ...presets.basic.serviceRegistry, + ...Mocks.presets.basic.serviceRegistry, getProvider: () => { throw new Error('Contract call failed') }, @@ -354,17 +354,17 @@ describe('SPRegistryService', () => { it('should handle invalid product data', async () => { server.use( - JSONRPC({ - ...presets.basic, + Mocks.JSONRPC({ + ...Mocks.presets.basic, debug: true, serviceRegistry: { - ...presets.basic.serviceRegistry, + ...Mocks.presets.basic.serviceRegistry, getProviderWithProduct: () => [ { providerId: 1n, providerInfo: { - serviceProvider: ADDRESSES.serviceProvider1, - payee: ADDRESSES.payee1, + serviceProvider: Mocks.ADDRESSES.serviceProvider1, + payee: Mocks.ADDRESSES.payee1, name: 'Test Provider', description: 'Test Provider', isActive: true, diff --git a/packages/synapse-sdk/src/test/storage-upload.test.ts b/packages/synapse-sdk/src/test/storage-upload.test.ts index 826269dbf..6cf4c8651 100644 --- a/packages/synapse-sdk/src/test/storage-upload.test.ts +++ b/packages/synapse-sdk/src/test/storage-upload.test.ts @@ -4,25 +4,25 @@ * Basic tests for Synapse class */ +import * as Mocks from '@filoz/synapse-core/mocks' import type { AddPiecesSuccess } from '@filoz/synapse-core/sp' import { assert } from 'chai' import { ethers } from 'ethers' import { setup } from 'iso-web/msw' import { HttpResponse, http } from 'msw' +import type { Hex } from 'viem' import { Synapse } from '../synapse.ts' +import type { PieceCID, PieceRecord } from '../types.ts' import { SIZE_CONSTANTS } from '../utils/constants.ts' -import { JSONRPC, PRIVATE_KEYS, presets } from './mocks/jsonrpc/index.ts' -import { findAnyPieceHandler, streamingUploadHandlers } from './mocks/pdp/handlers.ts' -import { PING } from './mocks/ping.ts' // mock server for testing -const server = setup([]) +const server = setup() describe('Storage Upload', () => { let signer: ethers.Signer let provider: ethers.Provider before(async () => { - await server.start({ quiet: true }) + await server.start() }) after(() => { @@ -31,11 +31,11 @@ describe('Storage Upload', () => { beforeEach(() => { server.resetHandlers() provider = new ethers.JsonRpcProvider('https://api.calibration.node.glif.io/rpc/v1') - signer = new ethers.Wallet(PRIVATE_KEYS.key1, provider) + signer = new ethers.Wallet(Mocks.PRIVATE_KEYS.key1, provider) }) it('should enforce 127 byte minimum size limit', async () => { - server.use(JSONRPC({ ...presets.basic, debug: false }), PING({ debug: false })) + server.use(Mocks.JSONRPC({ ...Mocks.presets.basic, debug: false }), Mocks.PING({ debug: false })) const synapse = await Synapse.create({ signer }) const context = await synapse.storage.createContext() @@ -59,10 +59,10 @@ describe('Storage Upload', () => { let addPiecesCount = 0 let uploadCompleteCount = 0 server.use( - JSONRPC({ ...presets.basic, debug: false }), - PING(), - ...streamingUploadHandlers(pdpOptions), - findAnyPieceHandler(true, pdpOptions), + Mocks.JSONRPC({ ...Mocks.presets.basic, debug: false }), + Mocks.PING(), + ...Mocks.pdp.streamingUploadHandlers(pdpOptions), + Mocks.pdp.findAnyPieceHandler(true, pdpOptions), http.post<{ id: string }>(`https://pdp.example.com/pdp/data-sets/:id/pieces`, async ({ params }) => { return new HttpResponse(null, { status: 201, @@ -133,10 +133,10 @@ describe('Storage Upload', () => { } const txHash = '0xabcdef1234567890abcdef1234567890abcdef1234567890abcdef123456' server.use( - JSONRPC({ ...presets.basic, debug: false }), - PING(), - ...streamingUploadHandlers(pdpOptions), - findAnyPieceHandler(true, pdpOptions), + Mocks.JSONRPC({ ...Mocks.presets.basic, debug: false }), + Mocks.PING(), + ...Mocks.pdp.streamingUploadHandlers(pdpOptions), + Mocks.pdp.findAnyPieceHandler(true, pdpOptions), http.post<{ id: string }>(`https://pdp.example.com/pdp/data-sets/:id/pieces`, async ({ params }) => { return new HttpResponse(null, { status: 201, @@ -205,10 +205,10 @@ describe('Storage Upload', () => { baseUrl: 'https://pdp.example.com', } server.use( - JSONRPC({ ...presets.basic, debug: false }), - PING(), - ...streamingUploadHandlers(pdpOptions), - findAnyPieceHandler(true, pdpOptions), + Mocks.JSONRPC({ ...Mocks.presets.basic, debug: false }), + Mocks.PING(), + ...Mocks.pdp.streamingUploadHandlers(pdpOptions), + Mocks.pdp.findAnyPieceHandler(true, pdpOptions), http.post<{ id: string }>(`https://pdp.example.com/pdp/data-sets/:id/pieces`, async ({ params }) => { return new HttpResponse(null, { status: 201, @@ -299,10 +299,10 @@ describe('Storage Upload', () => { baseUrl: 'https://pdp.example.com', } server.use( - JSONRPC({ ...presets.basic, debug: false }), - PING(), - ...streamingUploadHandlers(pdpOptions), - findAnyPieceHandler(true, pdpOptions), + Mocks.JSONRPC({ ...Mocks.presets.basic, debug: false }), + Mocks.PING(), + ...Mocks.pdp.streamingUploadHandlers(pdpOptions), + Mocks.pdp.findAnyPieceHandler(true, pdpOptions), http.post<{ id: string }>(`https://pdp.example.com/pdp/data-sets/:id/pieces`, async ({ params }) => { return new HttpResponse(null, { status: 201, @@ -352,10 +352,10 @@ describe('Storage Upload', () => { } const txHash = '0xabcdef1234567890abcdef1234567890abcdef1234567890abcdef123456' server.use( - JSONRPC({ ...presets.basic, debug: false }), - PING(), - ...streamingUploadHandlers(pdpOptions), - findAnyPieceHandler(true, pdpOptions), + Mocks.JSONRPC({ ...Mocks.presets.basic, debug: false }), + Mocks.PING(), + ...Mocks.pdp.streamingUploadHandlers(pdpOptions), + Mocks.pdp.findAnyPieceHandler(true, pdpOptions), http.post<{ id: string }>(`https://pdp.example.com/pdp/data-sets/:id/pieces`, async ({ params }) => { return new HttpResponse(null, { status: 201, @@ -403,10 +403,10 @@ describe('Storage Upload', () => { baseUrl: 'https://pdp.example.com', } server.use( - JSONRPC({ ...presets.basic, debug: false }), - PING(), - ...streamingUploadHandlers(pdpOptions), - findAnyPieceHandler(true, pdpOptions), + Mocks.JSONRPC({ ...Mocks.presets.basic, debug: false }), + Mocks.PING(), + ...Mocks.pdp.streamingUploadHandlers(pdpOptions), + Mocks.pdp.findAnyPieceHandler(true, pdpOptions), http.post<{ id: string }>(`https://pdp.example.com/pdp/data-sets/:id/pieces`, async ({ params }) => { return new HttpResponse(null, { status: 201, @@ -451,16 +451,19 @@ describe('Storage Upload', () => { it('should handle new server with transaction tracking', async () => { let pieceAddedCallbackFired = false let pieceConfirmedCallbackFired = false + let piecesAddedArgs: { transaction?: Hex; pieces?: Array<{ pieceCid: PieceCID }> } | null = null + let piecesConfirmedArgs: { dataSetId?: number; pieces?: PieceRecord[] } | null = null let uploadCompleteCallbackFired = false + let resolvedDataSetId: number | undefined const txHash = '0xabcdef1234567890abcdef1234567890abcdef1234567890abcdef123456' const pdpOptions = { baseUrl: 'https://pdp.example.com', } server.use( - JSONRPC({ ...presets.basic, debug: false }), - PING(), - ...streamingUploadHandlers(pdpOptions), - findAnyPieceHandler(true, pdpOptions), + Mocks.JSONRPC({ ...Mocks.presets.basic, debug: false }), + Mocks.PING(), + ...Mocks.pdp.streamingUploadHandlers(pdpOptions), + Mocks.pdp.findAnyPieceHandler(true, pdpOptions), http.post<{ id: string }>(`https://pdp.example.com/pdp/data-sets/:id/pieces`, async ({ params }) => { return new HttpResponse(null, { status: 201, @@ -470,11 +473,12 @@ describe('Storage Upload', () => { }) }), http.get<{ id: string }>(`https://pdp.example.com/pdp/data-sets/:id/pieces/added/:txHash`, ({ params }) => { + resolvedDataSetId = parseInt(params.id, 10) return HttpResponse.json( { addMessageOk: true, confirmedPieceIds: [0], - dataSetId: parseInt(params.id, 10), + dataSetId: resolvedDataSetId, pieceCount: 1, piecesAdded: true, txHash, @@ -493,7 +497,13 @@ describe('Storage Upload', () => { }) const expectedSize = SIZE_CONSTANTS.MIN_UPLOAD_SIZE - await context.upload(new Uint8Array(expectedSize).fill(1), { + const uploadResult = await context.upload(new Uint8Array(expectedSize).fill(1), { + onPiecesAdded(transaction: Hex | undefined, pieces: Array<{ pieceCid: PieceCID }> | undefined) { + piecesAddedArgs = { transaction, pieces } + }, + onPiecesConfirmed(dataSetId: number, pieces: PieceRecord[]) { + piecesConfirmedArgs = { dataSetId, pieces } + }, onPieceAdded() { pieceAddedCallbackFired = true }, @@ -508,6 +518,21 @@ describe('Storage Upload', () => { assert.isTrue(pieceAddedCallbackFired, 'pieceAddedCallback should have been called') assert.isTrue(pieceConfirmedCallbackFired, 'pieceConfirmedCallback should have been called') assert.isTrue(uploadCompleteCallbackFired, 'uploadCompleteCallback should have been called') + assert.isNotNull(piecesAddedArgs, 'onPiecesAdded args should be captured') + assert.isNotNull(piecesConfirmedArgs, 'onPiecesConfirmed args should be captured') + if (piecesAddedArgs == null || piecesConfirmedArgs == null) { + throw new Error('Callbacks should have been called') + } + const addedArgs: { transaction?: Hex; pieces?: Array<{ pieceCid: PieceCID }> } = piecesAddedArgs + const confirmedArgs: { dataSetId?: number; pieces?: PieceRecord[] } = piecesConfirmedArgs + assert.strictEqual(addedArgs.transaction, txHash, 'onPiecesAdded should receive transaction hash') + assert.strictEqual( + addedArgs.pieces?.[0].pieceCid.toString(), + uploadResult.pieceCid.toString(), + 'onPiecesAdded should provide matching pieceCid' + ) + assert.strictEqual(confirmedArgs.dataSetId, resolvedDataSetId, 'onPiecesConfirmed should provide the dataset id') + assert.strictEqual(confirmedArgs.pieces?.[0].pieceId, 0, 'onPiecesConfirmed should include piece IDs') }) it('should handle ArrayBuffer input', async () => { @@ -516,10 +541,10 @@ describe('Storage Upload', () => { } const txHash = '0xabcdef1234567890abcdef1234567890abcdef1234567890abcdef123456' server.use( - JSONRPC({ ...presets.basic, debug: false }), - PING(), - ...streamingUploadHandlers(pdpOptions), - findAnyPieceHandler(true, pdpOptions), + Mocks.JSONRPC({ ...Mocks.presets.basic, debug: false }), + Mocks.PING(), + ...Mocks.pdp.streamingUploadHandlers(pdpOptions), + Mocks.pdp.findAnyPieceHandler(true, pdpOptions), http.post<{ id: string }>(`https://pdp.example.com/pdp/data-sets/:id/pieces`, async ({ params }) => { return new HttpResponse(null, { status: 201, diff --git a/packages/synapse-sdk/src/test/storage.test.ts b/packages/synapse-sdk/src/test/storage.test.ts index 6726fe278..2580953c2 100644 --- a/packages/synapse-sdk/src/test/storage.test.ts +++ b/packages/synapse-sdk/src/test/storage.test.ts @@ -1,3 +1,4 @@ +import * as Mocks from '@filoz/synapse-core/mocks' import * as Piece from '@filoz/synapse-core/piece' import { calculate, calculate as calculatePieceCID } from '@filoz/synapse-core/piece' import * as SP from '@filoz/synapse-core/sp' @@ -11,21 +12,9 @@ import { StorageContext } from '../storage/context.ts' import { Synapse } from '../synapse.ts' import { SIZE_CONSTANTS } from '../utils/constants.ts' import { WarmStorageService } from '../warm-storage/index.ts' -import { ADDRESSES, JSONRPC, PRIVATE_KEYS, PROVIDERS, presets } from './mocks/jsonrpc/index.ts' -import { mockServiceProviderRegistry } from './mocks/jsonrpc/service-registry.ts' -import { - createAndAddPiecesHandler, - finalizePieceUploadHandler, - findPieceHandler, - postPieceHandler, - postPieceUploadsHandler, - uploadPieceHandler, - uploadPieceStreamingHandler, -} from './mocks/pdp/handlers.ts' -import { PING } from './mocks/ping.ts' // MSW server for JSONRPC mocking -const server = setup([]) +const server = setup() function cidBytesToContractHex(bytes: Uint8Array): `0x${string}` { return ethers.hexlify(bytes) as `0x${string}` @@ -42,7 +31,7 @@ describe('StorageService', () => { before(async () => { // Set timeout to 100ms for testing SP.setTimeout(100) - await server.start({ quiet: true }) + await server.start() }) after(() => { @@ -52,73 +41,73 @@ describe('StorageService', () => { beforeEach(async () => { server.resetHandlers() provider = new ethers.JsonRpcProvider('https://api.calibration.node.glif.io/rpc/v1') - signer = new ethers.Wallet(PRIVATE_KEYS.key1, provider) + signer = new ethers.Wallet(Mocks.PRIVATE_KEYS.key1, provider) }) describe('create() factory method', () => { it('should select a random provider when no providerId specified', async () => { server.use( - JSONRPC({ - ...presets.basic, - serviceRegistry: mockServiceProviderRegistry([PROVIDERS.provider1, PROVIDERS.provider2]), + Mocks.JSONRPC({ + ...Mocks.presets.basic, + serviceRegistry: Mocks.mockServiceProviderRegistry([Mocks.PROVIDERS.provider1, Mocks.PROVIDERS.provider2]), }), - PING({ - baseUrl: PROVIDERS.provider1.products[0].offering.serviceURL, + Mocks.PING({ + baseUrl: Mocks.PROVIDERS.provider1.products[0].offering.serviceURL, }), - PING({ - baseUrl: PROVIDERS.provider2.products[0].offering.serviceURL, + Mocks.PING({ + baseUrl: Mocks.PROVIDERS.provider2.products[0].offering.serviceURL, }) ) const synapse = await Synapse.create({ signer }) - const warmStorageService = await WarmStorageService.create(provider, ADDRESSES.calibration.warmStorage) + const warmStorageService = await WarmStorageService.create(provider, Mocks.ADDRESSES.calibration.warmStorage) const service = await StorageContext.create(synapse, warmStorageService) // Should have selected one of the providers assert.isTrue( - service.serviceProvider === PROVIDERS.provider1.providerInfo.serviceProvider || - service.serviceProvider === PROVIDERS.provider2.providerInfo.serviceProvider + service.serviceProvider === Mocks.PROVIDERS.provider1.providerInfo.serviceProvider || + service.serviceProvider === Mocks.PROVIDERS.provider2.providerInfo.serviceProvider ) }) it('should select a random provider but filter allow IPNI providers', async () => { server.use( - JSONRPC({ - ...presets.basic, - serviceRegistry: mockServiceProviderRegistry([PROVIDERS.provider1, PROVIDERS.providerIPNI]), + Mocks.JSONRPC({ + ...Mocks.presets.basic, + serviceRegistry: Mocks.mockServiceProviderRegistry([Mocks.PROVIDERS.provider1, Mocks.PROVIDERS.providerIPNI]), }), - PING({ - baseUrl: PROVIDERS.provider1.products[0].offering.serviceURL, + Mocks.PING({ + baseUrl: Mocks.PROVIDERS.provider1.products[0].offering.serviceURL, }), - PING({ - baseUrl: PROVIDERS.providerIPNI.products[0].offering.serviceURL, + Mocks.PING({ + baseUrl: Mocks.PROVIDERS.providerIPNI.products[0].offering.serviceURL, }) ) const synapse = await Synapse.create({ signer }) - const warmStorageService = await WarmStorageService.create(provider, ADDRESSES.calibration.warmStorage) + const warmStorageService = await WarmStorageService.create(provider, Mocks.ADDRESSES.calibration.warmStorage) // Create storage service without specifying providerId const service = await StorageContext.create(synapse, warmStorageService, { withIpni: true, }) // Should have selected one of the providers - assert.isTrue(service.serviceProvider === PROVIDERS.providerIPNI.providerInfo.serviceProvider) + assert.isTrue(service.serviceProvider === Mocks.PROVIDERS.providerIPNI.providerInfo.serviceProvider) }) it.skip('should never select a dev provider by default', async () => { server.use( - JSONRPC({ - ...presets.basic, - serviceRegistry: mockServiceProviderRegistry([PROVIDERS.provider1, PROVIDERS.provider2]), + Mocks.JSONRPC({ + ...Mocks.presets.basic, + serviceRegistry: Mocks.mockServiceProviderRegistry([Mocks.PROVIDERS.provider1, Mocks.PROVIDERS.provider2]), }), - PING({ - baseUrl: PROVIDERS.provider1.products[0].offering.serviceURL, + Mocks.PING({ + baseUrl: Mocks.PROVIDERS.provider1.products[0].offering.serviceURL, }), - PING({ - baseUrl: PROVIDERS.provider2.products[0].offering.serviceURL, + Mocks.PING({ + baseUrl: Mocks.PROVIDERS.provider2.products[0].offering.serviceURL, }) ) const synapse = await Synapse.create({ signer }) - const warmStorageService = await WarmStorageService.create(provider, ADDRESSES.calibration.warmStorage) + const warmStorageService = await WarmStorageService.create(provider, Mocks.ADDRESSES.calibration.warmStorage) // Create storage service without specifying providerId // dev defaults to false, so dev providers should be filtered out @@ -127,29 +116,29 @@ describe('StorageService', () => { }) // Should have selected provider2 (non-dev), never provider1 (dev) - assert.equal(service.serviceProvider, PROVIDERS.provider2.providerInfo.serviceProvider) + assert.equal(service.serviceProvider, Mocks.PROVIDERS.provider2.providerInfo.serviceProvider) assert.notEqual( service.serviceProvider, - PROVIDERS.provider1.providerInfo.serviceProvider, + Mocks.PROVIDERS.provider1.providerInfo.serviceProvider, 'Should not select dev provider' ) }) it.skip('should include dev providers when dev option is true', async () => { server.use( - JSONRPC({ - ...presets.basic, - serviceRegistry: mockServiceProviderRegistry([PROVIDERS.provider1, PROVIDERS.provider2]), + Mocks.JSONRPC({ + ...Mocks.presets.basic, + serviceRegistry: Mocks.mockServiceProviderRegistry([Mocks.PROVIDERS.provider1, Mocks.PROVIDERS.provider2]), }), - PING({ - baseUrl: PROVIDERS.provider1.products[0].offering.serviceURL, + Mocks.PING({ + baseUrl: Mocks.PROVIDERS.provider1.products[0].offering.serviceURL, }), - PING({ - baseUrl: PROVIDERS.provider2.products[0].offering.serviceURL, + Mocks.PING({ + baseUrl: Mocks.PROVIDERS.provider2.products[0].offering.serviceURL, }) ) const synapse = await Synapse.create({ signer }) - const warmStorageService = await WarmStorageService.create(provider, ADDRESSES.calibration.warmStorage) + const warmStorageService = await WarmStorageService.create(provider, Mocks.ADDRESSES.calibration.warmStorage) // Create storage service with dev: true const service = await StorageContext.create(synapse, warmStorageService, { @@ -158,26 +147,26 @@ describe('StorageService', () => { // Should be able to select from either provider, including the dev one assert.isTrue( - service.serviceProvider === PROVIDERS.provider1.providerInfo.serviceProvider || - service.serviceProvider === PROVIDERS.provider2.providerInfo.serviceProvider + service.serviceProvider === Mocks.PROVIDERS.provider1.providerInfo.serviceProvider || + service.serviceProvider === Mocks.PROVIDERS.provider2.providerInfo.serviceProvider ) }) it.skip('should filter providers with serviceStatus=dev when dev option is false', async () => { server.use( - JSONRPC({ - ...presets.basic, - serviceRegistry: mockServiceProviderRegistry([PROVIDERS.provider1, PROVIDERS.provider2]), + Mocks.JSONRPC({ + ...Mocks.presets.basic, + serviceRegistry: Mocks.mockServiceProviderRegistry([Mocks.PROVIDERS.provider1, Mocks.PROVIDERS.provider2]), }), - PING({ - baseUrl: PROVIDERS.provider1.products[0].offering.serviceURL, + Mocks.PING({ + baseUrl: Mocks.PROVIDERS.provider1.products[0].offering.serviceURL, }), - PING({ - baseUrl: PROVIDERS.provider2.products[0].offering.serviceURL, + Mocks.PING({ + baseUrl: Mocks.PROVIDERS.provider2.products[0].offering.serviceURL, }) ) const synapse = await Synapse.create({ signer }) - const warmStorageService = await WarmStorageService.create(provider, ADDRESSES.calibration.warmStorage) + const warmStorageService = await WarmStorageService.create(provider, Mocks.ADDRESSES.calibration.warmStorage) // Create storage service with dev: false (default) const service = await StorageContext.create(synapse, warmStorageService, { @@ -187,71 +176,71 @@ describe('StorageService', () => { // Should only select the production provider, not the dev one assert.equal( service.serviceProvider.toLowerCase(), - PROVIDERS.provider2.providerInfo.serviceProvider.toLowerCase(), + Mocks.PROVIDERS.provider2.providerInfo.serviceProvider.toLowerCase(), 'Should select production provider, not dev provider' ) assert.notEqual( service.serviceProvider.toLowerCase(), - PROVIDERS.provider1.providerInfo.serviceProvider.toLowerCase(), + Mocks.PROVIDERS.provider1.providerInfo.serviceProvider.toLowerCase(), 'Should NOT select dev provider' ) }) it('should use specific provider when providerId specified', async () => { server.use( - JSONRPC({ - ...presets.basic, - serviceRegistry: mockServiceProviderRegistry([PROVIDERS.provider1, PROVIDERS.provider2]), + Mocks.JSONRPC({ + ...Mocks.presets.basic, + serviceRegistry: Mocks.mockServiceProviderRegistry([Mocks.PROVIDERS.provider1, Mocks.PROVIDERS.provider2]), }), - PING({ - baseUrl: PROVIDERS.provider1.products[0].offering.serviceURL, + Mocks.PING({ + baseUrl: Mocks.PROVIDERS.provider1.products[0].offering.serviceURL, }), - PING({ - baseUrl: PROVIDERS.provider2.products[0].offering.serviceURL, + Mocks.PING({ + baseUrl: Mocks.PROVIDERS.provider2.products[0].offering.serviceURL, }) ) const synapse = await Synapse.create({ signer }) - const warmStorageService = await WarmStorageService.create(provider, ADDRESSES.calibration.warmStorage) + const warmStorageService = await WarmStorageService.create(provider, Mocks.ADDRESSES.calibration.warmStorage) // Create storage service with specific providerId const service = await StorageContext.create(synapse, warmStorageService, { - providerId: Number(PROVIDERS.provider1.providerId), + providerId: Number(Mocks.PROVIDERS.provider1.providerId), }) - assert.equal(service.serviceProvider, PROVIDERS.provider1.providerInfo.serviceProvider) + assert.equal(service.serviceProvider, Mocks.PROVIDERS.provider1.providerInfo.serviceProvider) }) it('should skip existing datasets and return -1 with providerId when forceCreateDataSet is true', async () => { let fetchedDataSets = false server.use( - JSONRPC({ - ...presets.basic, + Mocks.JSONRPC({ + ...Mocks.presets.basic, warmStorageView: { - ...presets.basic.warmStorageView, + ...Mocks.presets.basic.warmStorageView, getAllDataSetMetadata() { fetchedDataSets = true return [[], []] }, }, - serviceRegistry: mockServiceProviderRegistry([PROVIDERS.provider1, PROVIDERS.provider2]), + serviceRegistry: Mocks.mockServiceProviderRegistry([Mocks.PROVIDERS.provider1, Mocks.PROVIDERS.provider2]), }), - PING({ - baseUrl: PROVIDERS.provider1.products[0].offering.serviceURL, + Mocks.PING({ + baseUrl: Mocks.PROVIDERS.provider1.products[0].offering.serviceURL, }), - PING({ - baseUrl: PROVIDERS.provider2.products[0].offering.serviceURL, + Mocks.PING({ + baseUrl: Mocks.PROVIDERS.provider2.products[0].offering.serviceURL, }) ) const synapse = await Synapse.create({ signer }) - const warmStorageService = await WarmStorageService.create(provider, ADDRESSES.calibration.warmStorage) + const warmStorageService = await WarmStorageService.create(provider, Mocks.ADDRESSES.calibration.warmStorage) const context = await StorageContext.create(synapse, warmStorageService, { - providerId: Number(PROVIDERS.provider1.providerId), + providerId: Number(Mocks.PROVIDERS.provider1.providerId), forceCreateDataSet: true, }) assert.equal( context.serviceProvider, - PROVIDERS.provider1.providerInfo.serviceProvider, + Mocks.PROVIDERS.provider1.providerInfo.serviceProvider, 'Should select the requested provider' ) assert.equal(context.dataSetId, undefined, 'Should not have a data set id when forceCreateDataSet is true') @@ -260,30 +249,30 @@ describe('StorageService', () => { it('should skip existing datasets and return -1 with providerAddress when forceCreateDataSet is true', async () => { server.use( - JSONRPC({ - ...presets.basic, + Mocks.JSONRPC({ + ...Mocks.presets.basic, warmStorageView: { - ...presets.basic.warmStorageView, + ...Mocks.presets.basic.warmStorageView, }, - serviceRegistry: mockServiceProviderRegistry([PROVIDERS.provider1, PROVIDERS.provider2]), + serviceRegistry: Mocks.mockServiceProviderRegistry([Mocks.PROVIDERS.provider1, Mocks.PROVIDERS.provider2]), }), - PING({ - baseUrl: PROVIDERS.provider1.products[0].offering.serviceURL, + Mocks.PING({ + baseUrl: Mocks.PROVIDERS.provider1.products[0].offering.serviceURL, }), - PING({ - baseUrl: PROVIDERS.provider2.products[0].offering.serviceURL, + Mocks.PING({ + baseUrl: Mocks.PROVIDERS.provider2.products[0].offering.serviceURL, }) ) const synapse = await Synapse.create({ signer }) - const warmStorageService = await WarmStorageService.create(provider, ADDRESSES.calibration.warmStorage) + const warmStorageService = await WarmStorageService.create(provider, Mocks.ADDRESSES.calibration.warmStorage) const context = await StorageContext.create(synapse, warmStorageService, { - providerAddress: PROVIDERS.provider1.providerInfo.serviceProvider, + providerAddress: Mocks.PROVIDERS.provider1.providerInfo.serviceProvider, forceCreateDataSet: true, }) assert.equal( context.serviceProvider, - PROVIDERS.provider1.providerInfo.serviceProvider, + Mocks.PROVIDERS.provider1.providerInfo.serviceProvider, 'Should select the requested provider' ) assert.equal(context.dataSetId, undefined, 'Should not have a data set id when forceCreateDataSet is true') @@ -291,40 +280,40 @@ describe('StorageService', () => { it('should reuse existing data set with providerId when forceCreateDataSet is not set', async () => { server.use( - JSONRPC({ - ...presets.basic, + Mocks.JSONRPC({ + ...Mocks.presets.basic, warmStorageView: { - ...presets.basic.warmStorageView, + ...Mocks.presets.basic.warmStorageView, getAllDataSetMetadata() { return [[], []] }, }, - serviceRegistry: mockServiceProviderRegistry([PROVIDERS.provider1, PROVIDERS.provider2]), + serviceRegistry: Mocks.mockServiceProviderRegistry([Mocks.PROVIDERS.provider1, Mocks.PROVIDERS.provider2]), }), - PING({ - baseUrl: PROVIDERS.provider1.products[0].offering.serviceURL, + Mocks.PING({ + baseUrl: Mocks.PROVIDERS.provider1.products[0].offering.serviceURL, }), - PING({ - baseUrl: PROVIDERS.provider2.products[0].offering.serviceURL, + Mocks.PING({ + baseUrl: Mocks.PROVIDERS.provider2.products[0].offering.serviceURL, }) ) const synapse = await Synapse.create({ signer }) - const warmStorageService = await WarmStorageService.create(provider, ADDRESSES.calibration.warmStorage) + const warmStorageService = await WarmStorageService.create(provider, Mocks.ADDRESSES.calibration.warmStorage) const context = await StorageContext.create(synapse, warmStorageService, { - providerId: Number(PROVIDERS.provider1.providerId), + providerId: Number(Mocks.PROVIDERS.provider1.providerId), }) // Should have reused existing data set (not created new one) - assert.equal(context.serviceProvider, PROVIDERS.provider1.providerInfo.serviceProvider) + assert.equal(context.serviceProvider, Mocks.PROVIDERS.provider1.providerInfo.serviceProvider) assert.equal(context.dataSetId, 1, 'Should not have a data set id when forceCreateDataSet is true') }) it('should throw when no approved providers available', async () => { server.use( - JSONRPC({ - ...presets.basic, + Mocks.JSONRPC({ + ...Mocks.presets.basic, warmStorageView: { - ...presets.basic.warmStorageView, + ...Mocks.presets.basic.warmStorageView, getApprovedProviders() { return [[]] }, @@ -332,7 +321,7 @@ describe('StorageService', () => { }) ) const synapse = await Synapse.create({ signer }) - const warmStorageService = await WarmStorageService.create(provider, ADDRESSES.calibration.warmStorage) + const warmStorageService = await WarmStorageService.create(provider, Mocks.ADDRESSES.calibration.warmStorage) try { await StorageContext.create(synapse, warmStorageService) @@ -344,25 +333,25 @@ describe('StorageService', () => { it('should throw when specified provider not found', async () => { server.use( - JSONRPC({ - ...presets.basic, + Mocks.JSONRPC({ + ...Mocks.presets.basic, warmStorageView: { - ...presets.basic.warmStorageView, + ...Mocks.presets.basic.warmStorageView, getAllDataSetMetadata() { return [[], []] }, }, - serviceRegistry: mockServiceProviderRegistry([PROVIDERS.provider1, PROVIDERS.provider2]), + serviceRegistry: Mocks.mockServiceProviderRegistry([Mocks.PROVIDERS.provider1, Mocks.PROVIDERS.provider2]), }), - PING({ - baseUrl: PROVIDERS.provider1.products[0].offering.serviceURL, + Mocks.PING({ + baseUrl: Mocks.PROVIDERS.provider1.products[0].offering.serviceURL, }), - PING({ - baseUrl: PROVIDERS.provider2.products[0].offering.serviceURL, + Mocks.PING({ + baseUrl: Mocks.PROVIDERS.provider2.products[0].offering.serviceURL, }) ) const synapse = await Synapse.create({ signer }) - const warmStorageService = await WarmStorageService.create(provider, ADDRESSES.calibration.warmStorage) + const warmStorageService = await WarmStorageService.create(provider, Mocks.ADDRESSES.calibration.warmStorage) try { await StorageContext.create(synapse, warmStorageService, { providerId: 999, @@ -375,28 +364,28 @@ describe('StorageService', () => { it('should select existing data set when available', async () => { server.use( - JSONRPC({ - ...presets.basic, + Mocks.JSONRPC({ + ...Mocks.presets.basic, warmStorageView: { - ...presets.basic.warmStorageView, + ...Mocks.presets.basic.warmStorageView, getAllDataSetMetadata() { return [[], []] }, }, - serviceRegistry: mockServiceProviderRegistry([PROVIDERS.provider1, PROVIDERS.provider2]), + serviceRegistry: Mocks.mockServiceProviderRegistry([Mocks.PROVIDERS.provider1, Mocks.PROVIDERS.provider2]), }), - PING({ - baseUrl: PROVIDERS.provider1.products[0].offering.serviceURL, + Mocks.PING({ + baseUrl: Mocks.PROVIDERS.provider1.products[0].offering.serviceURL, }), - PING({ - baseUrl: PROVIDERS.provider2.products[0].offering.serviceURL, + Mocks.PING({ + baseUrl: Mocks.PROVIDERS.provider2.products[0].offering.serviceURL, }) ) const synapse = await Synapse.create({ signer }) - const warmStorageService = await WarmStorageService.create(provider, ADDRESSES.calibration.warmStorage) + const warmStorageService = await WarmStorageService.create(provider, Mocks.ADDRESSES.calibration.warmStorage) const service = await StorageContext.create(synapse, warmStorageService, { - providerId: Number(PROVIDERS.provider1.providerId), + providerId: Number(Mocks.PROVIDERS.provider1.providerId), }) // Should use existing data set @@ -411,10 +400,10 @@ describe('StorageService', () => { it('should prefer data sets with existing pieces', async () => { server.use( - JSONRPC({ - ...presets.basic, + Mocks.JSONRPC({ + ...Mocks.presets.basic, pdpVerifier: { - ...presets.basic.pdpVerifier, + ...Mocks.presets.basic.pdpVerifier, getNextPieceId: (args) => { const [dataSetId] = args if (dataSetId === 2n) { @@ -425,7 +414,7 @@ describe('StorageService', () => { }, }, warmStorageView: { - ...presets.basic.warmStorageView, + ...Mocks.presets.basic.warmStorageView, clientDataSets: () => [[1n, 2n]], getAllDataSetMetadata: () => [[], []], getDataSet: (args) => { @@ -438,12 +427,12 @@ describe('StorageService', () => { clientDataSetId: 0n, commissionBps: 100n, dataSetId: 1n, - payee: ADDRESSES.serviceProvider1, - payer: ADDRESSES.client1, + payee: Mocks.ADDRESSES.serviceProvider1, + payer: Mocks.ADDRESSES.client1, pdpEndEpoch: 0n, pdpRailId: 1n, providerId: 1n, - serviceProvider: ADDRESSES.serviceProvider1, + serviceProvider: Mocks.ADDRESSES.serviceProvider1, }, ] } else { @@ -454,24 +443,24 @@ describe('StorageService', () => { clientDataSetId: 0n, commissionBps: 100n, dataSetId: 2n, - payee: ADDRESSES.serviceProvider1, - payer: ADDRESSES.client1, + payee: Mocks.ADDRESSES.serviceProvider1, + payer: Mocks.ADDRESSES.client1, pdpEndEpoch: 0n, pdpRailId: 2n, providerId: 1n, - serviceProvider: ADDRESSES.serviceProvider1, + serviceProvider: Mocks.ADDRESSES.serviceProvider1, }, ] } }, }, }), - PING({ - baseUrl: PROVIDERS.provider1.products[0].offering.serviceURL, + Mocks.PING({ + baseUrl: Mocks.PROVIDERS.provider1.products[0].offering.serviceURL, }) ) const synapse = await Synapse.create({ signer }) - const warmStorageService = await WarmStorageService.create(provider, ADDRESSES.calibration.warmStorage) + const warmStorageService = await WarmStorageService.create(provider, Mocks.ADDRESSES.calibration.warmStorage) const service = await StorageContext.create(synapse, warmStorageService, { providerId: 1, @@ -485,28 +474,28 @@ describe('StorageService', () => { let providerCallbackFired = false let dataSetCallbackFired = false server.use( - JSONRPC({ - ...presets.basic, + Mocks.JSONRPC({ + ...Mocks.presets.basic, warmStorageView: { - ...presets.basic.warmStorageView, + ...Mocks.presets.basic.warmStorageView, getAllDataSetMetadata() { return [[], []] }, }, - serviceRegistry: mockServiceProviderRegistry([PROVIDERS.provider1]), + serviceRegistry: Mocks.mockServiceProviderRegistry([Mocks.PROVIDERS.provider1]), }), - PING({ - baseUrl: PROVIDERS.provider1.products[0].offering.serviceURL, + Mocks.PING({ + baseUrl: Mocks.PROVIDERS.provider1.products[0].offering.serviceURL, }) ) const synapse = await Synapse.create({ signer }) - const warmStorageService = await WarmStorageService.create(provider, ADDRESSES.calibration.warmStorage) + const warmStorageService = await WarmStorageService.create(provider, Mocks.ADDRESSES.calibration.warmStorage) await StorageContext.create(synapse, warmStorageService, { - providerId: Number(PROVIDERS.provider1.providerId), + providerId: Number(Mocks.PROVIDERS.provider1.providerId), callbacks: { onProviderSelected: (provider) => { - assert.equal(provider.serviceProvider, PROVIDERS.provider1.providerInfo.serviceProvider) + assert.equal(provider.serviceProvider, Mocks.PROVIDERS.provider1.providerInfo.serviceProvider) providerCallbackFired = true }, onDataSetResolved: (info) => { @@ -523,10 +512,10 @@ describe('StorageService', () => { it('should select by explicit dataSetId', async () => { server.use( - JSONRPC({ - ...presets.basic, + Mocks.JSONRPC({ + ...Mocks.presets.basic, warmStorageView: { - ...presets.basic.warmStorageView, + ...Mocks.presets.basic.warmStorageView, clientDataSets: () => [[1n, 2n]], getAllDataSetMetadata: () => [[], []], getDataSet: (args) => { @@ -539,12 +528,12 @@ describe('StorageService', () => { clientDataSetId: 0n, commissionBps: 100n, dataSetId: 1n, - payee: ADDRESSES.serviceProvider1, - payer: ADDRESSES.client1, + payee: Mocks.ADDRESSES.serviceProvider1, + payer: Mocks.ADDRESSES.client1, pdpEndEpoch: 0n, pdpRailId: 1n, providerId: 1n, - serviceProvider: ADDRESSES.serviceProvider1, + serviceProvider: Mocks.ADDRESSES.serviceProvider1, }, ] } else { @@ -555,74 +544,74 @@ describe('StorageService', () => { clientDataSetId: 0n, commissionBps: 100n, dataSetId: 2n, - payee: ADDRESSES.serviceProvider1, - payer: ADDRESSES.client1, + payee: Mocks.ADDRESSES.serviceProvider1, + payer: Mocks.ADDRESSES.client1, pdpEndEpoch: 0n, pdpRailId: 2n, providerId: 1n, - serviceProvider: ADDRESSES.serviceProvider1, + serviceProvider: Mocks.ADDRESSES.serviceProvider1, }, ] } }, }, }), - PING({ - baseUrl: PROVIDERS.provider1.products[0].offering.serviceURL, + Mocks.PING({ + baseUrl: Mocks.PROVIDERS.provider1.products[0].offering.serviceURL, }) ) const synapse = await Synapse.create({ signer }) - const warmStorageService = await WarmStorageService.create(provider, ADDRESSES.calibration.warmStorage) + const warmStorageService = await WarmStorageService.create(provider, Mocks.ADDRESSES.calibration.warmStorage) const service = await StorageContext.create(synapse, warmStorageService, { dataSetId: 2, }) assert.equal(service.dataSetId, 2) - assert.equal(service.serviceProvider, PROVIDERS.provider1.providerInfo.serviceProvider) + assert.equal(service.serviceProvider, Mocks.PROVIDERS.provider1.providerInfo.serviceProvider) }) it('should select by providerAddress', async () => { server.use( - JSONRPC({ - ...presets.basic, + Mocks.JSONRPC({ + ...Mocks.presets.basic, warmStorageView: { - ...presets.basic.warmStorageView, + ...Mocks.presets.basic.warmStorageView, getAllDataSetMetadata() { return [[], []] }, }, - serviceRegistry: mockServiceProviderRegistry([PROVIDERS.provider1, PROVIDERS.provider2]), + serviceRegistry: Mocks.mockServiceProviderRegistry([Mocks.PROVIDERS.provider1, Mocks.PROVIDERS.provider2]), }), - PING({ - baseUrl: PROVIDERS.provider1.products[0].offering.serviceURL, + Mocks.PING({ + baseUrl: Mocks.PROVIDERS.provider1.products[0].offering.serviceURL, }), - PING({ - baseUrl: PROVIDERS.provider2.products[0].offering.serviceURL, + Mocks.PING({ + baseUrl: Mocks.PROVIDERS.provider2.products[0].offering.serviceURL, }) ) const synapse = await Synapse.create({ signer }) - const warmStorageService = await WarmStorageService.create(provider, ADDRESSES.calibration.warmStorage) + const warmStorageService = await WarmStorageService.create(provider, Mocks.ADDRESSES.calibration.warmStorage) const service = await StorageContext.create(synapse, warmStorageService, { - providerAddress: PROVIDERS.provider2.providerInfo.serviceProvider, + providerAddress: Mocks.PROVIDERS.provider2.providerInfo.serviceProvider, }) - assert.equal(service.serviceProvider, PROVIDERS.provider2.providerInfo.serviceProvider) + assert.equal(service.serviceProvider, Mocks.PROVIDERS.provider2.providerInfo.serviceProvider) }) it('should throw when dataSetId not found', async () => { server.use( - JSONRPC({ - ...presets.basic, + Mocks.JSONRPC({ + ...Mocks.presets.basic, warmStorageView: { - ...presets.basic.warmStorageView, + ...Mocks.presets.basic.warmStorageView, }, }), - PING({ - baseUrl: PROVIDERS.provider1.products[0].offering.serviceURL, + Mocks.PING({ + baseUrl: Mocks.PROVIDERS.provider1.products[0].offering.serviceURL, }) ) const synapse = await Synapse.create({ signer }) - const warmStorageService = await WarmStorageService.create(provider, ADDRESSES.calibration.warmStorage) + const warmStorageService = await WarmStorageService.create(provider, Mocks.ADDRESSES.calibration.warmStorage) try { await StorageContext.create(synapse, warmStorageService, { @@ -630,31 +619,31 @@ describe('StorageService', () => { }) assert.fail('Should have thrown error') } catch (error: any) { - assert.include(error.message, 'Data set 999 not found') + assert.include(error.message, 'Data set 999 does not exist') } }) it('should throw when dataSetId conflicts with providerId', async () => { server.use( - JSONRPC({ - ...presets.basic, + Mocks.JSONRPC({ + ...Mocks.presets.basic, warmStorageView: { - ...presets.basic.warmStorageView, + ...Mocks.presets.basic.warmStorageView, getAllDataSetMetadata() { return [[], []] }, }, - serviceRegistry: mockServiceProviderRegistry([PROVIDERS.provider1, PROVIDERS.provider2]), + serviceRegistry: Mocks.mockServiceProviderRegistry([Mocks.PROVIDERS.provider1, Mocks.PROVIDERS.provider2]), }), - PING({ - baseUrl: PROVIDERS.provider1.products[0].offering.serviceURL, + Mocks.PING({ + baseUrl: Mocks.PROVIDERS.provider1.products[0].offering.serviceURL, }), - PING({ - baseUrl: PROVIDERS.provider2.products[0].offering.serviceURL, + Mocks.PING({ + baseUrl: Mocks.PROVIDERS.provider2.products[0].offering.serviceURL, }) ) const synapse = await Synapse.create({ signer }) - const warmStorageService = await WarmStorageService.create(provider, ADDRESSES.calibration.warmStorage) + const warmStorageService = await WarmStorageService.create(provider, Mocks.ADDRESSES.calibration.warmStorage) try { await StorageContext.create(synapse, warmStorageService, { @@ -670,13 +659,13 @@ describe('StorageService', () => { it('should throw when providerAddress not approved', async () => { server.use( - JSONRPC({ - ...presets.basic, - serviceRegistry: mockServiceProviderRegistry([PROVIDERS.provider1]), + Mocks.JSONRPC({ + ...Mocks.presets.basic, + serviceRegistry: Mocks.mockServiceProviderRegistry([Mocks.PROVIDERS.provider1]), }) ) const synapse = await Synapse.create({ signer }) - const warmStorageService = await WarmStorageService.create(provider, ADDRESSES.calibration.warmStorage) + const warmStorageService = await WarmStorageService.create(provider, Mocks.ADDRESSES.calibration.warmStorage) try { await StorageContext.create(synapse, warmStorageService, { providerAddress: '0x6666666666666666666666666666666666666666', @@ -689,10 +678,10 @@ describe('StorageService', () => { it('should filter by CDN setting in smart selection', async () => { server.use( - JSONRPC({ - ...presets.basic, + Mocks.JSONRPC({ + ...Mocks.presets.basic, warmStorageView: { - ...presets.basic.warmStorageView, + ...Mocks.presets.basic.warmStorageView, clientDataSets: () => [[1n, 2n]], getAllDataSetMetadata: (args) => { const [dataSetId] = args @@ -714,12 +703,12 @@ describe('StorageService', () => { clientDataSetId: 0n, commissionBps: 100n, dataSetId: 1n, - payee: ADDRESSES.serviceProvider1, - payer: ADDRESSES.client1, + payee: Mocks.ADDRESSES.serviceProvider1, + payer: Mocks.ADDRESSES.client1, pdpEndEpoch: 0n, pdpRailId: 1n, providerId: 1n, - serviceProvider: ADDRESSES.serviceProvider1, + serviceProvider: Mocks.ADDRESSES.serviceProvider1, }, ] } else { @@ -730,22 +719,22 @@ describe('StorageService', () => { clientDataSetId: 0n, commissionBps: 100n, dataSetId: 2n, - payee: ADDRESSES.serviceProvider1, - payer: ADDRESSES.client1, + payee: Mocks.ADDRESSES.serviceProvider1, + payer: Mocks.ADDRESSES.client1, pdpEndEpoch: 0n, pdpRailId: 2n, providerId: 1n, - serviceProvider: ADDRESSES.serviceProvider1, + serviceProvider: Mocks.ADDRESSES.serviceProvider1, }, ] } }, }, }), - PING() + Mocks.PING() ) const synapse = await Synapse.create({ signer }) - const warmStorageService = await WarmStorageService.create(provider, ADDRESSES.calibration.warmStorage) + const warmStorageService = await WarmStorageService.create(provider, Mocks.ADDRESSES.calibration.warmStorage) // Test with CDN = false const serviceNoCDN = await StorageContext.create(synapse, warmStorageService, { @@ -762,28 +751,28 @@ describe('StorageService', () => { it.skip('should handle data sets not managed by current WarmStorage', async () => { server.use( - JSONRPC({ - ...presets.basic, - serviceRegistry: mockServiceProviderRegistry([PROVIDERS.provider1]), + Mocks.JSONRPC({ + ...Mocks.presets.basic, + serviceRegistry: Mocks.mockServiceProviderRegistry([Mocks.PROVIDERS.provider1]), }) ) const synapse = await Synapse.create({ signer }) - const warmStorageService = await WarmStorageService.create(provider, ADDRESSES.calibration.warmStorage) + const warmStorageService = await WarmStorageService.create(provider, Mocks.ADDRESSES.calibration.warmStorage) // Should create new data set since existing one is not managed const service = await StorageContext.create(synapse, warmStorageService, {}) // Should have selected a provider but no existing data set assert.exists(service.serviceProvider) - assert.notEqual(service.serviceProvider, PROVIDERS.provider1.providerInfo.serviceProvider) + assert.notEqual(service.serviceProvider, Mocks.PROVIDERS.provider1.providerInfo.serviceProvider) }) it('should throw when data set belongs to non-approved provider', async () => { server.use( - JSONRPC({ - ...presets.basic, + Mocks.JSONRPC({ + ...Mocks.presets.basic, warmStorageView: { - ...presets.basic.warmStorageView, + ...Mocks.presets.basic.warmStorageView, clientDataSets: () => [[1n]], getAllDataSetMetadata: () => [[], []], getDataSet: () => { @@ -794,21 +783,21 @@ describe('StorageService', () => { clientDataSetId: 0n, commissionBps: 100n, dataSetId: 1n, - payee: ADDRESSES.serviceProvider1, - payer: ADDRESSES.client1, + payee: Mocks.ADDRESSES.serviceProvider1, + payer: Mocks.ADDRESSES.client1, pdpEndEpoch: 0n, pdpRailId: 1n, providerId: 3n, - serviceProvider: ADDRESSES.serviceProvider1, + serviceProvider: Mocks.ADDRESSES.serviceProvider1, }, ] }, }, }), - PING() + Mocks.PING() ) const synapse = await Synapse.create({ signer }) - const warmStorageService = await WarmStorageService.create(provider, ADDRESSES.calibration.warmStorage) + const warmStorageService = await WarmStorageService.create(provider, Mocks.ADDRESSES.calibration.warmStorage) try { await StorageContext.create(synapse, warmStorageService, { @@ -823,13 +812,14 @@ describe('StorageService', () => { it('should handle data set not live', async () => { server.use( - JSONRPC({ - ...presets.basic, + Mocks.JSONRPC({ + ...Mocks.presets.basic, pdpVerifier: { dataSetLive: () => [false], + getDataSetListener: () => [Mocks.ADDRESSES.calibration.warmStorage], }, warmStorageView: { - ...presets.basic.warmStorageView, + ...Mocks.presets.basic.warmStorageView, clientDataSets: () => [[1n]], getAllDataSetMetadata: () => [[], []], getDataSet: () => { @@ -840,37 +830,37 @@ describe('StorageService', () => { clientDataSetId: 0n, commissionBps: 100n, dataSetId: 1n, - payee: ADDRESSES.serviceProvider1, - payer: ADDRESSES.client1, + payee: Mocks.ADDRESSES.serviceProvider1, + payer: Mocks.ADDRESSES.client1, pdpEndEpoch: 0n, pdpRailId: 1n, providerId: 1n, - serviceProvider: ADDRESSES.serviceProvider1, + serviceProvider: Mocks.ADDRESSES.serviceProvider1, }, ] }, }, }), - PING() + Mocks.PING() ) const synapse = await Synapse.create({ signer }) - const warmStorageService = await WarmStorageService.create(provider, ADDRESSES.calibration.warmStorage) + const warmStorageService = await WarmStorageService.create(provider, Mocks.ADDRESSES.calibration.warmStorage) try { await StorageContext.create(synapse, warmStorageService, { dataSetId: 1, }) assert.fail('Should have thrown error') } catch (error: any) { - assert.include(error.message, 'Data set 1 not found') + assert.include(error.message, 'Data set 1 does not exist or is not live') } }) it('should handle conflict between dataSetId and providerAddress', async () => { server.use( - JSONRPC({ - ...presets.basic, + Mocks.JSONRPC({ + ...Mocks.presets.basic, warmStorageView: { - ...presets.basic.warmStorageView, + ...Mocks.presets.basic.warmStorageView, clientDataSets: () => [[1n]], getAllDataSetMetadata: () => [[], []], getDataSet: () => { @@ -881,21 +871,21 @@ describe('StorageService', () => { clientDataSetId: 0n, commissionBps: 100n, dataSetId: 1n, - payee: ADDRESSES.serviceProvider1, - payer: ADDRESSES.client1, + payee: Mocks.ADDRESSES.serviceProvider1, + payer: Mocks.ADDRESSES.client1, pdpEndEpoch: 0n, pdpRailId: 1n, providerId: 1n, - serviceProvider: ADDRESSES.serviceProvider1, + serviceProvider: Mocks.ADDRESSES.serviceProvider1, }, ] }, }, }), - PING() + Mocks.PING() ) const synapse = await Synapse.create({ signer }) - const warmStorageService = await WarmStorageService.create(provider, ADDRESSES.calibration.warmStorage) + const warmStorageService = await WarmStorageService.create(provider, Mocks.ADDRESSES.calibration.warmStorage) try { await StorageContext.create(synapse, warmStorageService, { @@ -925,10 +915,10 @@ describe('StorageService', () => { it('should match providers by ID even when payee differs from serviceProvider', async () => { server.use( - JSONRPC({ - ...presets.basic, + Mocks.JSONRPC({ + ...Mocks.presets.basic, warmStorageView: { - ...presets.basic.warmStorageView, + ...Mocks.presets.basic.warmStorageView, clientDataSets: () => [[1n]], getAllDataSetMetadata: () => [[], []], getDataSet: () => { @@ -939,45 +929,45 @@ describe('StorageService', () => { clientDataSetId: 0n, commissionBps: 100n, dataSetId: 1n, - payee: ADDRESSES.serviceProvider2, - payer: ADDRESSES.client1, + payee: Mocks.ADDRESSES.serviceProvider2, + payer: Mocks.ADDRESSES.client1, pdpEndEpoch: 0n, pdpRailId: 1n, providerId: 1n, - serviceProvider: ADDRESSES.serviceProvider1, + serviceProvider: Mocks.ADDRESSES.serviceProvider1, }, ] }, }, }), - PING() + Mocks.PING() ) const synapse = await Synapse.create({ signer }) - const warmStorageService = await WarmStorageService.create(provider, ADDRESSES.calibration.warmStorage) + const warmStorageService = await WarmStorageService.create(provider, Mocks.ADDRESSES.calibration.warmStorage) const service = await StorageContext.create(synapse, warmStorageService, {}) // Should successfully match by provider ID despite different payee assert.equal(service.dataSetId, 1) assert.equal(service.provider.id, 1) - assert.equal(service.provider.serviceProvider, ADDRESSES.serviceProvider1) + assert.equal(service.provider.serviceProvider, Mocks.ADDRESSES.serviceProvider1) }) }) describe('preflightUpload', () => { it('should calculate costs without CDN', async () => { server.use( - JSONRPC({ - ...presets.basic, + Mocks.JSONRPC({ + ...Mocks.presets.basic, payments: { - ...presets.basic.payments, + ...Mocks.presets.basic.payments, operatorApprovals: () => [true, 2207579500n, 220757940000000n, 220757n, 220757n, 86400n], }, }), - PING() + Mocks.PING() ) const synapse = await Synapse.create({ signer }) - const warmStorageService = await WarmStorageService.create(provider, ADDRESSES.calibration.warmStorage) + const warmStorageService = await WarmStorageService.create(provider, Mocks.ADDRESSES.calibration.warmStorage) const service = await StorageContext.create(synapse, warmStorageService, { withCDN: false, }) @@ -992,17 +982,17 @@ describe('StorageService', () => { it('should calculate costs with CDN', async () => { server.use( - JSONRPC({ - ...presets.basic, + Mocks.JSONRPC({ + ...Mocks.presets.basic, payments: { - ...presets.basic.payments, + ...Mocks.presets.basic.payments, operatorApprovals: () => [true, 2207579500n, 220757940000000n, 220757n, 220757n, 86400n], }, }), - PING() + Mocks.PING() ) const synapse = await Synapse.create({ signer }) - const warmStorageService = await WarmStorageService.create(provider, ADDRESSES.calibration.warmStorage) + const warmStorageService = await WarmStorageService.create(provider, Mocks.ADDRESSES.calibration.warmStorage) const service = await StorageContext.create(synapse, warmStorageService, { withCDN: true, }) @@ -1018,13 +1008,13 @@ describe('StorageService', () => { it('should handle insufficient allowances', async () => { server.use( - JSONRPC({ - ...presets.basic, + Mocks.JSONRPC({ + ...Mocks.presets.basic, }), - PING() + Mocks.PING() ) const synapse = await Synapse.create({ signer }) - const warmStorageService = await WarmStorageService.create(provider, ADDRESSES.calibration.warmStorage) + const warmStorageService = await WarmStorageService.create(provider, Mocks.ADDRESSES.calibration.warmStorage) const service = await StorageContext.create(synapse, warmStorageService, { withCDN: true, }) @@ -1037,13 +1027,13 @@ describe('StorageService', () => { it('should enforce minimum size limit in preflightUpload', async () => { server.use( - JSONRPC({ - ...presets.basic, + Mocks.JSONRPC({ + ...Mocks.presets.basic, }), - PING() + Mocks.PING() ) const synapse = await Synapse.create({ signer }) - const warmStorageService = await WarmStorageService.create(provider, ADDRESSES.calibration.warmStorage) + const warmStorageService = await WarmStorageService.create(provider, Mocks.ADDRESSES.calibration.warmStorage) const service = await StorageContext.create(synapse, warmStorageService, { withCDN: true, }) @@ -1060,13 +1050,13 @@ describe('StorageService', () => { it('should enforce maximum size limit in preflightUpload', async () => { server.use( - JSONRPC({ - ...presets.basic, + Mocks.JSONRPC({ + ...Mocks.presets.basic, }), - PING() + Mocks.PING() ) const synapse = await Synapse.create({ signer }) - const warmStorageService = await WarmStorageService.create(provider, ADDRESSES.calibration.warmStorage) + const warmStorageService = await WarmStorageService.create(provider, Mocks.ADDRESSES.calibration.warmStorage) const service = await StorageContext.create(synapse, warmStorageService, { withCDN: true, }) @@ -1088,22 +1078,22 @@ describe('StorageService', () => { const testData = new Uint8Array(127).fill(42) // 127 bytes to meet minimum const testPieceCID = calculate(testData).toString() server.use( - JSONRPC({ - ...presets.basic, + Mocks.JSONRPC({ + ...Mocks.presets.basic, }), - PING(), - http.get(`https://${ADDRESSES.client1}.calibration.filbeam.io/:cid`, async () => { + Mocks.PING(), + http.get(`https://${Mocks.ADDRESSES.client1}.calibration.filbeam.io/:cid`, async () => { return HttpResponse.text('Not Found', { status: 404, }) }), - findPieceHandler(testPieceCID, true, pdpOptions), + Mocks.pdp.findPieceHandler(testPieceCID, true, pdpOptions), http.get('https://pdp.example.com/piece/:pieceCid', async () => { return HttpResponse.arrayBuffer(testData.buffer) }) ) const synapse = await Synapse.create({ signer }) - const warmStorageService = await WarmStorageService.create(provider, ADDRESSES.calibration.warmStorage) + const warmStorageService = await WarmStorageService.create(provider, Mocks.ADDRESSES.calibration.warmStorage) const service = await StorageContext.create(synapse, warmStorageService, { withCDN: true, }) @@ -1117,17 +1107,17 @@ describe('StorageService', () => { const testPieceCID = calculate(testData).toString() server.use( - JSONRPC({ - ...presets.basic, + Mocks.JSONRPC({ + ...Mocks.presets.basic, }), - PING(), - findPieceHandler(testPieceCID, true, pdpOptions), + Mocks.PING(), + Mocks.pdp.findPieceHandler(testPieceCID, true, pdpOptions), http.get('https://pdp.example.com/piece/:pieceCid', async () => { return HttpResponse.error() }) ) const synapse = await Synapse.create({ signer }) - const warmStorageService = await WarmStorageService.create(provider, ADDRESSES.calibration.warmStorage) + const warmStorageService = await WarmStorageService.create(provider, Mocks.ADDRESSES.calibration.warmStorage) const service = await StorageContext.create(synapse, warmStorageService) try { @@ -1143,17 +1133,17 @@ describe('StorageService', () => { const testPieceCID = calculate(testData).toString() server.use( - JSONRPC({ - ...presets.basic, + Mocks.JSONRPC({ + ...Mocks.presets.basic, }), - PING(), - findPieceHandler(testPieceCID, true, pdpOptions), + Mocks.PING(), + Mocks.pdp.findPieceHandler(testPieceCID, true, pdpOptions), http.get('https://pdp.example.com/piece/:pieceCid', async () => { return HttpResponse.arrayBuffer(testData.buffer) }) ) const synapse = await Synapse.create({ signer }) - const warmStorageService = await WarmStorageService.create(provider, ADDRESSES.calibration.warmStorage) + const warmStorageService = await WarmStorageService.create(provider, Mocks.ADDRESSES.calibration.warmStorage) const service = await StorageContext.create(synapse, warmStorageService) // Test with and without empty options object @@ -1168,16 +1158,22 @@ describe('StorageService', () => { describe('upload', () => { it('should handle errors in batch processing gracefully', async () => { server.use( - JSONRPC({ - ...presets.basic, + Mocks.JSONRPC({ + ...Mocks.presets.basic, }), - PING(), + Mocks.PING(), http.post, { pieceCid: string }>('https://pdp.example.com/pdp/piece', async () => { return HttpResponse.error() - }) + }), + http.post, { pieceCid: string }>( + 'https://pdp.example.com/pdp/piece/uploads', + async () => { + return HttpResponse.error() + } + ) ) const synapse = await Synapse.create({ signer }) - const warmStorageService = await WarmStorageService.create(provider, ADDRESSES.calibration.warmStorage) + const warmStorageService = await WarmStorageService.create(provider, Mocks.ADDRESSES.calibration.warmStorage) const service = await StorageContext.create(synapse, warmStorageService) // Create 3 uploads @@ -1209,13 +1205,13 @@ describe('StorageService', () => { it('should enforce 1 GiB size limit', async () => { server.use( - JSONRPC({ - ...presets.basic, + Mocks.JSONRPC({ + ...Mocks.presets.basic, }), - PING() + Mocks.PING() ) const synapse = await Synapse.create({ signer }) - const warmStorageService = await WarmStorageService.create(provider, ADDRESSES.calibration.warmStorage) + const warmStorageService = await WarmStorageService.create(provider, Mocks.ADDRESSES.calibration.warmStorage) const service = await StorageContext.create(synapse, warmStorageService) // Create minimal data but mock length to simulate oversized data @@ -1237,16 +1233,16 @@ describe('StorageService', () => { it.skip('should fail if new server verification fails', async () => { const testData = new Uint8Array(127).fill(42) // 127 bytes to meet minimum server.use( - JSONRPC({ - ...presets.basic, + Mocks.JSONRPC({ + ...Mocks.presets.basic, }), - PING(), + Mocks.PING(), http.post, { pieceCid: string }>('https://pdp.example.com/pdp/piece', async () => { return HttpResponse.error() }) ) const synapse = await Synapse.create({ signer }) - const warmStorageService = await WarmStorageService.create(provider, ADDRESSES.calibration.warmStorage) + const warmStorageService = await WarmStorageService.create(provider, Mocks.ADDRESSES.calibration.warmStorage) const service = await StorageContext.create(synapse, warmStorageService) try { @@ -1266,10 +1262,10 @@ describe('StorageService', () => { const mockTxHash = '0x1234567890abcdef1234567890abcdef1234567890abcdef1234567890abcdef' const mockUuid = '12345678-90ab-cdef-1234-567890abcdef' server.use( - JSONRPC({ - ...presets.basic, + Mocks.JSONRPC({ + ...Mocks.presets.basic, }), - PING(), + Mocks.PING(), http.post('https://pdp.example.com/pdp/piece', async () => { return HttpResponse.text('Created', { status: 201, @@ -1278,11 +1274,11 @@ describe('StorageService', () => { }, }) }), - uploadPieceHandler(mockUuid, pdpOptions), + Mocks.pdp.uploadPieceHandler(mockUuid, pdpOptions), http.get('https://pdp.example.com/pdp/piece', async () => { return HttpResponse.json({ pieceCid: testPieceCID }) }), - createAndAddPiecesHandler(mockTxHash, pdpOptions), + Mocks.pdp.createAndAddPiecesHandler(mockTxHash, pdpOptions), http.get('https://pdp.example.com/pdp/data-sets/created/:tx', async () => { return HttpResponse.json( { @@ -1315,7 +1311,7 @@ describe('StorageService', () => { }) ) const synapse = await Synapse.create({ signer }) - const warmStorageService = await WarmStorageService.create(provider, ADDRESSES.calibration.warmStorage) + const warmStorageService = await WarmStorageService.create(provider, Mocks.ADDRESSES.calibration.warmStorage) const service = await StorageContext.create(synapse, warmStorageService) try { @@ -1334,10 +1330,10 @@ describe('StorageService', () => { const mockTxHash = '0x1234567890abcdef1234567890abcdef1234567890abcdef1234567890abcdef' const mockUuid = '12345678-90ab-cdef-1234-567890abcdef' server.use( - JSONRPC({ - ...presets.basic, + Mocks.JSONRPC({ + ...Mocks.presets.basic, }), - PING(), + Mocks.PING(), http.post('https://pdp.example.com/pdp/piece', async () => { return HttpResponse.text('Created', { status: 201, @@ -1346,11 +1342,11 @@ describe('StorageService', () => { }, }) }), - uploadPieceHandler(mockUuid, pdpOptions), + Mocks.pdp.uploadPieceHandler(mockUuid, pdpOptions), http.get('https://pdp.example.com/pdp/piece', async () => { return HttpResponse.json({ pieceCid: testPieceCID }) }), - createAndAddPiecesHandler(mockTxHash, pdpOptions), + Mocks.pdp.createAndAddPiecesHandler(mockTxHash, pdpOptions), http.get('https://pdp.example.com/pdp/data-sets/created/:tx', async () => { return HttpResponse.json( { @@ -1383,7 +1379,7 @@ describe('StorageService', () => { }) ) const synapse = await Synapse.create({ signer }) - const warmStorageService = await WarmStorageService.create(provider, ADDRESSES.calibration.warmStorage) + const warmStorageService = await WarmStorageService.create(provider, Mocks.ADDRESSES.calibration.warmStorage) const service = await StorageContext.create(synapse, warmStorageService) try { @@ -1399,17 +1395,17 @@ describe('StorageService', () => { const testPieceCID = Piece.calculate(testData).toString() const mockUuid = '12345678-90ab-cdef-1234-567890abcdef' server.use( - JSONRPC({ - ...presets.basic, + Mocks.JSONRPC({ + ...Mocks.presets.basic, }), - PING(), - postPieceHandler(testPieceCID, mockUuid, pdpOptions), + Mocks.PING(), + Mocks.pdp.postPieceHandler(testPieceCID, mockUuid, pdpOptions), http.put('https://pdp.example.com/pdp/piece/upload/:uuid', async () => { return HttpResponse.error() }) ) const synapse = await Synapse.create({ signer }) - const warmStorageService = await WarmStorageService.create(provider, ADDRESSES.calibration.warmStorage) + const warmStorageService = await WarmStorageService.create(provider, Mocks.ADDRESSES.calibration.warmStorage) const service = await StorageContext.create(synapse, warmStorageService) try { @@ -1425,20 +1421,20 @@ describe('StorageService', () => { const testPieceCID = Piece.calculate(testData).toString() const mockUuid = '12345678-90ab-cdef-1234-567890abcdef' server.use( - JSONRPC({ - ...presets.basic, - }), - PING(), - postPieceUploadsHandler(mockUuid, pdpOptions), - uploadPieceStreamingHandler(mockUuid, pdpOptions), - finalizePieceUploadHandler(mockUuid, undefined, pdpOptions), - findPieceHandler(testPieceCID, true, pdpOptions), + Mocks.JSONRPC({ + ...Mocks.presets.basic, + }), + Mocks.PING(), + Mocks.pdp.postPieceUploadsHandler(mockUuid, pdpOptions), + Mocks.pdp.uploadPieceStreamingHandler(mockUuid, pdpOptions), + Mocks.pdp.finalizePieceUploadHandler(mockUuid, undefined, pdpOptions), + Mocks.pdp.findPieceHandler(testPieceCID, true, pdpOptions), http.post('https://pdp.example.com/pdp/data-sets/:id/pieces', () => { return HttpResponse.error() }) ) const synapse = await Synapse.create({ signer }) - const warmStorageService = await WarmStorageService.create(provider, ADDRESSES.calibration.warmStorage) + const warmStorageService = await WarmStorageService.create(provider, Mocks.ADDRESSES.calibration.warmStorage) const service = await StorageContext.create(synapse, warmStorageService, { dataSetId: 1, }) @@ -1456,41 +1452,41 @@ describe('StorageService', () => { describe('selectRandomProvider with ping validation', () => { it('should select first provider that responds to ping', async () => { server.use( - JSONRPC({ - ...presets.basic, - serviceRegistry: mockServiceProviderRegistry([PROVIDERS.provider1, PROVIDERS.provider2]), + Mocks.JSONRPC({ + ...Mocks.presets.basic, + serviceRegistry: Mocks.mockServiceProviderRegistry([Mocks.PROVIDERS.provider1, Mocks.PROVIDERS.provider2]), }), - http.get(`${PROVIDERS.provider1.products[0].offering.serviceURL}/pdp/ping`, async () => { + http.get(`${Mocks.PROVIDERS.provider1.products[0].offering.serviceURL}/pdp/ping`, async () => { return HttpResponse.error() }), - PING({ - baseUrl: PROVIDERS.provider2.products[0].offering.serviceURL, + Mocks.PING({ + baseUrl: Mocks.PROVIDERS.provider2.products[0].offering.serviceURL, }) ) const synapse = await Synapse.create({ signer }) - const warmStorageService = await WarmStorageService.create(provider, ADDRESSES.calibration.warmStorage) + const warmStorageService = await WarmStorageService.create(provider, Mocks.ADDRESSES.calibration.warmStorage) const service = await StorageContext.create(synapse, warmStorageService) // Should have selected the second provider (first one failed ping) - assert.equal(service.serviceProvider, PROVIDERS.provider2.providerInfo.serviceProvider) + assert.equal(service.serviceProvider, Mocks.PROVIDERS.provider2.providerInfo.serviceProvider) }) // Test removed: selectRandomProvider no longer supports exclusion functionality it('should throw error when all providers fail ping', async () => { server.use( - JSONRPC({ - ...presets.basic, - serviceRegistry: mockServiceProviderRegistry([PROVIDERS.provider1, PROVIDERS.provider2]), + Mocks.JSONRPC({ + ...Mocks.presets.basic, + serviceRegistry: Mocks.mockServiceProviderRegistry([Mocks.PROVIDERS.provider1, Mocks.PROVIDERS.provider2]), }), - http.get(`${PROVIDERS.provider1.products[0].offering.serviceURL}/pdp/ping`, async () => { + http.get(`${Mocks.PROVIDERS.provider1.products[0].offering.serviceURL}/pdp/ping`, async () => { return HttpResponse.error() }), - http.get(`${PROVIDERS.provider2.products[0].offering.serviceURL}/pdp/ping`, async () => { + http.get(`${Mocks.PROVIDERS.provider2.products[0].offering.serviceURL}/pdp/ping`, async () => { return HttpResponse.error() }) ) const synapse = await Synapse.create({ signer }) - const warmStorageService = await WarmStorageService.create(provider, ADDRESSES.calibration.warmStorage) + const warmStorageService = await WarmStorageService.create(provider, Mocks.ADDRESSES.calibration.warmStorage) try { await StorageContext.create(synapse, warmStorageService) @@ -1506,16 +1502,16 @@ describe('StorageService', () => { describe('getProviderInfo', () => { it('should return provider info through WarmStorageService', async () => { server.use( - JSONRPC({ - ...presets.basic, - serviceRegistry: mockServiceProviderRegistry([PROVIDERS.provider1]), + Mocks.JSONRPC({ + ...Mocks.presets.basic, + serviceRegistry: Mocks.mockServiceProviderRegistry([Mocks.PROVIDERS.provider1]), }), - PING({ - baseUrl: PROVIDERS.provider1.products[0].offering.serviceURL, + Mocks.PING({ + baseUrl: Mocks.PROVIDERS.provider1.products[0].offering.serviceURL, }) ) const synapse = await Synapse.create({ signer }) - const warmStorageService = await WarmStorageService.create(provider, ADDRESSES.calibration.warmStorage) + const warmStorageService = await WarmStorageService.create(provider, Mocks.ADDRESSES.calibration.warmStorage) const service = await StorageContext.create(synapse, warmStorageService) const providerInfo = await service.getProviderInfo() @@ -1539,6 +1535,8 @@ describe('StorageService', () => { minProvingPeriodInEpochs: '0x1e', location: '0x75732d65617374', paymentTokenAddress: '0xb3042734b608a1b16e9e86b374a3f3e389b4cdf0', + endorsement0: + '0x00000000ffffffff00000000ffffffff1b1b1b1b1b1b1b1b1b1b1b1b1b1b1b1b1b1b1b1b1b1b1b1b1b1b1b1b1b1b1b1b1b1b1b1b1b1b1b1b1b1b1b1b1b1b1b1b1b1b1b1b1b1b1b1b1b1b1b1b1b1b1b1b1b', }, data: { serviceURL: 'https://provider1.example.com', @@ -1550,6 +1548,14 @@ describe('StorageService', () => { minProvingPeriodInEpochs: 30n, location: 'us-east', paymentTokenAddress: '0xb3042734b608a1b16e9e86b374a3f3e389b4cdf0', + endorsements: { + '0x50724807600e804Fe842439860D5b62baa26aFff': { + nonce: 4294967295n, + notAfter: 4294967295n, + signature: + '0x1b1b1b1b1b1b1b1b1b1b1b1b1b1b1b1b1b1b1b1b1b1b1b1b1b1b1b1b1b1b1b1b1b1b1b1b1b1b1b1b1b1b1b1b1b1b1b1b1b1b1b1b1b1b1b1b1b1b1b1b1b1b1b1b1b', + }, + }, }, }, }, @@ -1583,20 +1589,20 @@ describe('StorageService', () => { return { data: cidBytesToContractHex(cid.bytes) } }) server.use( - JSONRPC({ - ...presets.basic, - serviceRegistry: mockServiceProviderRegistry([PROVIDERS.provider1]), + Mocks.JSONRPC({ + ...Mocks.presets.basic, + serviceRegistry: Mocks.mockServiceProviderRegistry([Mocks.PROVIDERS.provider1]), pdpVerifier: { - ...presets.basic.pdpVerifier, + ...Mocks.presets.basic.pdpVerifier, getActivePieces: () => [piecesData, [101n, 102n], false], }, }), - PING({ - baseUrl: PROVIDERS.provider1.products[0].offering.serviceURL, + Mocks.PING({ + baseUrl: Mocks.PROVIDERS.provider1.products[0].offering.serviceURL, }) ) const synapse = await Synapse.create({ signer }) - const warmStorageService = await WarmStorageService.create(provider, ADDRESSES.calibration.warmStorage) + const warmStorageService = await WarmStorageService.create(provider, Mocks.ADDRESSES.calibration.warmStorage) const service = await StorageContext.create(synapse, warmStorageService, { dataSetId: 1, }) @@ -1611,20 +1617,20 @@ describe('StorageService', () => { it('should handle empty data set pieces', async () => { server.use( - JSONRPC({ - ...presets.basic, - serviceRegistry: mockServiceProviderRegistry([PROVIDERS.provider1]), + Mocks.JSONRPC({ + ...Mocks.presets.basic, + serviceRegistry: Mocks.mockServiceProviderRegistry([Mocks.PROVIDERS.provider1]), pdpVerifier: { - ...presets.basic.pdpVerifier, + ...Mocks.presets.basic.pdpVerifier, getActivePieces: () => [[], [], false], }, }), - PING({ - baseUrl: PROVIDERS.provider1.products[0].offering.serviceURL, + Mocks.PING({ + baseUrl: Mocks.PROVIDERS.provider1.products[0].offering.serviceURL, }) ) const synapse = await Synapse.create({ signer }) - const warmStorageService = await WarmStorageService.create(provider, ADDRESSES.calibration.warmStorage) + const warmStorageService = await WarmStorageService.create(provider, Mocks.ADDRESSES.calibration.warmStorage) const service = await StorageContext.create(synapse, warmStorageService, { dataSetId: 1, }) @@ -1638,20 +1644,20 @@ describe('StorageService', () => { it('should handle invalid CID in response', async () => { const invalidCidBytes = cidBytesToContractHex(ethers.toUtf8Bytes('invalid-cid-format')) server.use( - JSONRPC({ - ...presets.basic, - serviceRegistry: mockServiceProviderRegistry([PROVIDERS.provider1]), + Mocks.JSONRPC({ + ...Mocks.presets.basic, + serviceRegistry: Mocks.mockServiceProviderRegistry([Mocks.PROVIDERS.provider1]), pdpVerifier: { - ...presets.basic.pdpVerifier, + ...Mocks.presets.basic.pdpVerifier, getActivePieces: () => [[{ data: invalidCidBytes }], [101n], false], }, }), - PING({ - baseUrl: PROVIDERS.provider1.products[0].offering.serviceURL, + Mocks.PING({ + baseUrl: Mocks.PROVIDERS.provider1.products[0].offering.serviceURL, }) ) const synapse = await Synapse.create({ signer }) - const warmStorageService = await WarmStorageService.create(provider, ADDRESSES.calibration.warmStorage) + const warmStorageService = await WarmStorageService.create(provider, Mocks.ADDRESSES.calibration.warmStorage) const service = await StorageContext.create(synapse, warmStorageService, { dataSetId: 1, }) @@ -1668,22 +1674,22 @@ describe('StorageService', () => { it('should handle PDP server errors', async () => { server.use( - JSONRPC({ - ...presets.basic, - serviceRegistry: mockServiceProviderRegistry([PROVIDERS.provider1]), + Mocks.JSONRPC({ + ...Mocks.presets.basic, + serviceRegistry: Mocks.mockServiceProviderRegistry([Mocks.PROVIDERS.provider1]), pdpVerifier: { - ...presets.basic.pdpVerifier, + ...Mocks.presets.basic.pdpVerifier, getActivePieces: () => { throw new Error('Data set not found: 999') }, }, }), - PING({ - baseUrl: PROVIDERS.provider1.products[0].offering.serviceURL, + Mocks.PING({ + baseUrl: Mocks.PROVIDERS.provider1.products[0].offering.serviceURL, }) ) const synapse = await Synapse.create({ signer }) - const warmStorageService = await WarmStorageService.create(provider, ADDRESSES.calibration.warmStorage) + const warmStorageService = await WarmStorageService.create(provider, Mocks.ADDRESSES.calibration.warmStorage) const service = await StorageContext.create(synapse, warmStorageService, { dataSetId: 1, }) @@ -1702,10 +1708,10 @@ describe('StorageService', () => { const mockPieceCID = 'bafkzcibeqcad6efnpwn62p5vvs5x3nh3j7xkzfgb3xtitcdm2hulmty3xx4tl3wace' it('should return exists=false when piece not found on provider', async () => { server.use( - JSONRPC({ - ...presets.basic, + Mocks.JSONRPC({ + ...Mocks.presets.basic, }), - PING(), + Mocks.PING(), http.get('https://pdp.example.com/pdp/data-sets/:id', async () => { return HttpResponse.json({ id: 1, @@ -1720,7 +1726,7 @@ describe('StorageService', () => { }) ) const synapse = await Synapse.create({ signer }) - const warmStorageService = await WarmStorageService.create(provider, ADDRESSES.calibration.warmStorage) + const warmStorageService = await WarmStorageService.create(provider, Mocks.ADDRESSES.calibration.warmStorage) const service = await StorageContext.create(synapse, warmStorageService, { dataSetId: 1, }) @@ -1735,11 +1741,11 @@ describe('StorageService', () => { it('should return piece status with proof timing when piece exists', async () => { server.use( - JSONRPC({ - ...presets.basic, + Mocks.JSONRPC({ + ...Mocks.presets.basic, eth_blockNumber: numberToHex(4000n), }), - PING(), + Mocks.PING(), http.get('https://pdp.example.com/pdp/data-sets/:id', async () => { return HttpResponse.json({ id: 1, @@ -1757,7 +1763,7 @@ describe('StorageService', () => { }) ) const synapse = await Synapse.create({ signer }) - const warmStorageService = await WarmStorageService.create(provider, ADDRESSES.calibration.warmStorage) + const warmStorageService = await WarmStorageService.create(provider, Mocks.ADDRESSES.calibration.warmStorage) const service = await StorageContext.create(synapse, warmStorageService, { dataSetId: 1, }) @@ -1774,11 +1780,11 @@ describe('StorageService', () => { it('should detect when in challenge window', async () => { server.use( - JSONRPC({ - ...presets.basic, + Mocks.JSONRPC({ + ...Mocks.presets.basic, eth_blockNumber: numberToHex(5030n), }), - PING(), + Mocks.PING(), http.get('https://pdp.example.com/pdp/data-sets/:id', async () => { return HttpResponse.json({ id: 1, @@ -1791,10 +1797,10 @@ describe('StorageService', () => { nextChallengeEpoch: 5000, }) }), - findPieceHandler(mockPieceCID, true, pdpOptions) + Mocks.pdp.findPieceHandler(mockPieceCID, true, pdpOptions) ) const synapse = await Synapse.create({ signer }) - const warmStorageService = await WarmStorageService.create(provider, ADDRESSES.calibration.warmStorage) + const warmStorageService = await WarmStorageService.create(provider, Mocks.ADDRESSES.calibration.warmStorage) const service = await StorageContext.create(synapse, warmStorageService, { dataSetId: 1, }) @@ -1808,11 +1814,11 @@ describe('StorageService', () => { it('should detect when proof is overdue', async () => { server.use( - JSONRPC({ - ...presets.basic, + Mocks.JSONRPC({ + ...Mocks.presets.basic, eth_blockNumber: numberToHex(5100n), }), - PING(), + Mocks.PING(), http.get('https://pdp.example.com/pdp/data-sets/:id', async () => { return HttpResponse.json({ id: 1, @@ -1830,7 +1836,7 @@ describe('StorageService', () => { }) ) const synapse = await Synapse.create({ signer }) - const warmStorageService = await WarmStorageService.create(provider, ADDRESSES.calibration.warmStorage) + const warmStorageService = await WarmStorageService.create(provider, Mocks.ADDRESSES.calibration.warmStorage) const service = await StorageContext.create(synapse, warmStorageService, { dataSetId: 1, }) @@ -1843,11 +1849,11 @@ describe('StorageService', () => { it('should handle data set with nextChallengeEpoch=0', async () => { server.use( - JSONRPC({ - ...presets.basic, + Mocks.JSONRPC({ + ...Mocks.presets.basic, eth_blockNumber: numberToHex(5100n), }), - PING(), + Mocks.PING(), http.get('https://pdp.example.com/pdp/data-sets/:id', async () => { return HttpResponse.json({ id: 1, @@ -1865,7 +1871,7 @@ describe('StorageService', () => { }) ) const synapse = await Synapse.create({ signer }) - const warmStorageService = await WarmStorageService.create(provider, ADDRESSES.calibration.warmStorage) + const warmStorageService = await WarmStorageService.create(provider, Mocks.ADDRESSES.calibration.warmStorage) const service = await StorageContext.create(synapse, warmStorageService, { dataSetId: 1, }) @@ -1880,11 +1886,11 @@ describe('StorageService', () => { it('should handle trailing slash in retrieval URL', async () => { server.use( - JSONRPC({ - ...presets.basic, + Mocks.JSONRPC({ + ...Mocks.presets.basic, eth_blockNumber: numberToHex(5100n), }), - PING(), + Mocks.PING(), http.get('https://pdp.example.com/pdp/data-sets/:id', async () => { return HttpResponse.json({ id: 1, @@ -1902,7 +1908,7 @@ describe('StorageService', () => { }) ) const synapse = await Synapse.create({ signer }) - const warmStorageService = await WarmStorageService.create(provider, ADDRESSES.calibration.warmStorage) + const warmStorageService = await WarmStorageService.create(provider, Mocks.ADDRESSES.calibration.warmStorage) const service = await StorageContext.create(synapse, warmStorageService, { dataSetId: 1, }) @@ -1919,13 +1925,13 @@ describe('StorageService', () => { it('should handle invalid PieceCID', async () => { server.use( - JSONRPC({ - ...presets.basic, + Mocks.JSONRPC({ + ...Mocks.presets.basic, }), - PING() + Mocks.PING() ) const synapse = await Synapse.create({ signer }) - const warmStorageService = await WarmStorageService.create(provider, ADDRESSES.calibration.warmStorage) + const warmStorageService = await WarmStorageService.create(provider, Mocks.ADDRESSES.calibration.warmStorage) const service = await StorageContext.create(synapse, warmStorageService, { dataSetId: 1, }) @@ -1940,11 +1946,11 @@ describe('StorageService', () => { it('should calculate hours until challenge window', async () => { server.use( - JSONRPC({ - ...presets.basic, + Mocks.JSONRPC({ + ...Mocks.presets.basic, eth_blockNumber: numberToHex(4880n), }), - PING(), + Mocks.PING(), http.get('https://pdp.example.com/pdp/data-sets/:id', async () => { return HttpResponse.json({ id: 1, @@ -1962,7 +1968,7 @@ describe('StorageService', () => { }) ) const synapse = await Synapse.create({ signer }) - const warmStorageService = await WarmStorageService.create(provider, ADDRESSES.calibration.warmStorage) + const warmStorageService = await WarmStorageService.create(provider, Mocks.ADDRESSES.calibration.warmStorage) const service = await StorageContext.create(synapse, warmStorageService, { dataSetId: 1, }) @@ -1976,18 +1982,18 @@ describe('StorageService', () => { it('should handle data set data fetch failure gracefully', async () => { server.use( - JSONRPC({ - ...presets.basic, + Mocks.JSONRPC({ + ...Mocks.presets.basic, eth_blockNumber: numberToHex(4880n), }), - PING(), + Mocks.PING(), http.get('https://pdp.example.com/pdp/data-sets/:id', async () => { return HttpResponse.error() }), - findPieceHandler(mockPieceCID, true, pdpOptions) + Mocks.pdp.findPieceHandler(mockPieceCID, true, pdpOptions) ) const synapse = await Synapse.create({ signer }) - const warmStorageService = await WarmStorageService.create(provider, ADDRESSES.calibration.warmStorage) + const warmStorageService = await WarmStorageService.create(provider, Mocks.ADDRESSES.calibration.warmStorage) const service = await StorageContext.create(synapse, warmStorageService, { dataSetId: 1, }) @@ -2003,6 +2009,46 @@ describe('StorageService', () => { }) }) + describe('getScheduledRemovals', () => { + it('should return scheduled removals for the data set', async () => { + server.use( + Mocks.JSONRPC({ + ...Mocks.presets.basic, + pdpVerifier: { + ...Mocks.presets.basic.pdpVerifier, + getScheduledRemovals: () => [[1n, 2n, 5n]], + }, + }) + ) + + const synapse = await Synapse.create({ signer }) + const warmStorageService = await WarmStorageService.create(provider, Mocks.ADDRESSES.calibration.warmStorage) + const context = await StorageContext.create(synapse, warmStorageService, { + dataSetId: 1, + }) + + const scheduledRemovals = await context.getScheduledRemovals() + + assert.deepEqual(scheduledRemovals, [1, 2, 5]) + }) + + it('should return an empty array when no data set is configured', async () => { + server.use(Mocks.JSONRPC({ ...Mocks.presets.basic }), Mocks.PING()) + + const synapse = await Synapse.create({ signer }) + const warmStorageService = await WarmStorageService.create(provider, Mocks.ADDRESSES.calibration.warmStorage) + const context = await StorageContext.create(synapse, warmStorageService, { + dataSetId: 1, + }) + + ;(context as any)._dataSetId = undefined + + const scheduledRemovals = await context.getScheduledRemovals() + + assert.deepEqual(scheduledRemovals, []) + }) + }) + describe('getPieces', () => { it('should get all active pieces with pagination', async () => { // Use actual valid PieceCIDs from test data @@ -2012,11 +2058,11 @@ describe('StorageService', () => { // Mock getActivePieces to return paginated results server.use( - PING(), - JSONRPC({ - ...presets.basic, + Mocks.PING(), + Mocks.JSONRPC({ + ...Mocks.presets.basic, pdpVerifier: { - ...presets.basic.pdpVerifier, + ...Mocks.presets.basic.pdpVerifier, getActivePieces: (args) => { const offset = Number(args[1]) @@ -2039,7 +2085,7 @@ describe('StorageService', () => { ) const synapse = await Synapse.create({ signer }) - const warmStorageService = await WarmStorageService.create(provider, ADDRESSES.calibration.warmStorage) + const warmStorageService = await WarmStorageService.create(provider, Mocks.ADDRESSES.calibration.warmStorage) const context = await StorageContext.create(synapse, warmStorageService, { dataSetId: 1, }) @@ -2064,17 +2110,17 @@ describe('StorageService', () => { it('should handle empty results', async () => { // Mock getActivePieces to return no pieces server.use( - JSONRPC({ - ...presets.basic, + Mocks.JSONRPC({ + ...Mocks.presets.basic, pdpVerifier: { - ...presets.basic.pdpVerifier, + ...Mocks.presets.basic.pdpVerifier, getActivePieces: () => [[], [], false], }, }) ) const synapse = await Synapse.create({ signer }) - const warmStorageService = await WarmStorageService.create(provider, ADDRESSES.calibration.warmStorage) + const warmStorageService = await WarmStorageService.create(provider, Mocks.ADDRESSES.calibration.warmStorage) const context = await StorageContext.create(synapse, warmStorageService, { dataSetId: 1, }) @@ -2089,10 +2135,10 @@ describe('StorageService', () => { it('should handle AbortSignal in getPieces', async () => { const controller = new AbortController() - server.use(JSONRPC(presets.basic)) + server.use(Mocks.JSONRPC(Mocks.presets.basic)) const synapse = await Synapse.create({ signer }) - const warmStorageService = await WarmStorageService.create(provider, ADDRESSES.calibration.warmStorage) + const warmStorageService = await WarmStorageService.create(provider, Mocks.ADDRESSES.calibration.warmStorage) const context = await StorageContext.create(synapse, warmStorageService, { dataSetId: 1, }) @@ -2117,10 +2163,10 @@ describe('StorageService', () => { // Mock getActivePieces to return paginated results server.use( - JSONRPC({ - ...presets.basic, + Mocks.JSONRPC({ + ...Mocks.presets.basic, pdpVerifier: { - ...presets.basic.pdpVerifier, + ...Mocks.presets.basic.pdpVerifier, getActivePieces: (args) => { const offset = Number(args[1]) @@ -2139,7 +2185,7 @@ describe('StorageService', () => { ) const synapse = await Synapse.create({ signer }) - const warmStorageService = await WarmStorageService.create(provider, ADDRESSES.calibration.warmStorage) + const warmStorageService = await WarmStorageService.create(provider, Mocks.ADDRESSES.calibration.warmStorage) const context = await StorageContext.create(synapse, warmStorageService, { dataSetId: 1, }) @@ -2165,10 +2211,10 @@ describe('StorageService', () => { // Mock getActivePieces to return a result that triggers pagination let callCount = 0 server.use( - JSONRPC({ - ...presets.basic, + Mocks.JSONRPC({ + ...Mocks.presets.basic, pdpVerifier: { - ...presets.basic.pdpVerifier, + ...Mocks.presets.basic.pdpVerifier, getActivePieces: () => { callCount++ // Only return data on first call, then abort @@ -2183,7 +2229,7 @@ describe('StorageService', () => { ) const synapse = await Synapse.create({ signer }) - const warmStorageService = await WarmStorageService.create(provider, ADDRESSES.calibration.warmStorage) + const warmStorageService = await WarmStorageService.create(provider, Mocks.ADDRESSES.calibration.warmStorage) const context = await StorageContext.create(synapse, warmStorageService, { dataSetId: 1, }) diff --git a/packages/synapse-sdk/src/test/synapse.test.ts b/packages/synapse-sdk/src/test/synapse.test.ts index 4e801125c..fdf5f8f33 100644 --- a/packages/synapse-sdk/src/test/synapse.test.ts +++ b/packages/synapse-sdk/src/test/synapse.test.ts @@ -4,6 +4,7 @@ * Basic tests for Synapse class */ +import * as Mocks from '@filoz/synapse-core/mocks' import * as Piece from '@filoz/synapse-core/piece' import { assert } from 'chai' import { ethers } from 'ethers' @@ -13,32 +14,22 @@ import pDefer from 'p-defer' import { type Address, bytesToHex, type Hex, isAddressEqual, numberToBytes, parseUnits, stringToHex } from 'viem' import { PaymentsService } from '../payments/index.ts' import { PDP_PERMISSIONS } from '../session/key.ts' +import { SPRegistryService } from '../sp-registry/service.ts' import type { StorageContext } from '../storage/context.ts' import { Synapse } from '../synapse.ts' import { SIZE_CONSTANTS } from '../utils/constants.ts' import { makeDataSetCreatedLog } from './mocks/events.ts' -import { ADDRESSES, JSONRPC, PRIVATE_KEYS, PROVIDERS, presets } from './mocks/jsonrpc/index.ts' -import { mockServiceProviderRegistry } from './mocks/jsonrpc/service-registry.ts' -import { - createAndAddPiecesHandler, - dataSetCreationStatusHandler, - finalizePieceUploadHandler, - findPieceHandler, - type PDPMockOptions, - pieceAdditionStatusHandler, - postPieceUploadsHandler, - uploadPieceStreamingHandler, -} from './mocks/pdp/handlers.ts' -import { PING } from './mocks/ping.ts' // mock server for testing -const server = setup([]) +const server = setup() + +const providerIds = [Number(PROVIDERS.provider1.providerId), Number(PROVIDERS.provider2.providerId)] describe('Synapse', () => { let signer: ethers.Signer let provider: ethers.Provider before(async () => { - await server.start({ quiet: true }) + await server.start() }) after(() => { @@ -47,12 +38,12 @@ describe('Synapse', () => { beforeEach(() => { server.resetHandlers() provider = new ethers.JsonRpcProvider('https://api.calibration.node.glif.io/rpc/v1') - signer = new ethers.Wallet(PRIVATE_KEYS.key1, provider) + signer = new ethers.Wallet(Mocks.PRIVATE_KEYS.key1, provider) }) describe('Instantiation', () => { it('should create instance with signer', async () => { - server.use(JSONRPC(presets.basic)) + server.use(Mocks.JSONRPC(Mocks.presets.basic)) const synapse = await Synapse.create({ signer }) assert.exists(synapse) assert.exists(synapse.payments) @@ -60,7 +51,7 @@ describe('Synapse', () => { }) it('should create instance with provider', async () => { - server.use(JSONRPC(presets.basic)) + server.use(Mocks.JSONRPC(Mocks.presets.basic)) const synapse = await Synapse.create({ provider }) assert.exists(synapse) assert.exists(synapse.payments) @@ -68,7 +59,7 @@ describe('Synapse', () => { }) it('should create instance with private key', async () => { - server.use(JSONRPC(presets.basic)) + server.use(Mocks.JSONRPC(Mocks.presets.basic)) const privateKey = '0x0123456789abcdef0123456789abcdef0123456789abcdef0123456789abcdef' const rpcURL = 'https://api.calibration.node.glif.io/rpc/v1' const synapse = await Synapse.create({ privateKey, rpcURL }) @@ -78,14 +69,14 @@ describe('Synapse', () => { }) it('should apply NonceManager by default', async () => { - server.use(JSONRPC(presets.basic)) + server.use(Mocks.JSONRPC(Mocks.presets.basic)) const synapse = await Synapse.create({ signer }) assert.exists(synapse) // We can't directly check if NonceManager is applied, but we can verify the instance is created }) it('should allow disabling NonceManager', async () => { - server.use(JSONRPC(presets.basic)) + server.use(Mocks.JSONRPC(Mocks.presets.basic)) const synapse = await Synapse.create({ signer, disableNonceManager: true, @@ -144,8 +135,8 @@ describe('Synapse', () => { // Create mock provider with unsupported chain ID // const unsupportedProvider = createMockProvider(999999) server.use( - JSONRPC({ - ...presets.basic, + Mocks.JSONRPC({ + ...Mocks.presets.basic, eth_chainId: '999999', }) ) @@ -160,8 +151,8 @@ describe('Synapse', () => { it('should accept calibration network', async () => { server.use( - JSONRPC({ - ...presets.basic, + Mocks.JSONRPC({ + ...Mocks.presets.basic, eth_chainId: '314159', }) ) @@ -172,7 +163,7 @@ describe('Synapse', () => { describe('StorageManager access', () => { it('should provide access to StorageManager via synapse.storage', async () => { - server.use(JSONRPC(presets.basic)) + server.use(Mocks.JSONRPC(Mocks.presets.basic)) const synapse = await Synapse.create({ signer }) // Should be able to access storage manager @@ -188,7 +179,7 @@ describe('Synapse', () => { }) it('should create storage manager with CDN settings', async () => { - server.use(JSONRPC(presets.basic)) + server.use(Mocks.JSONRPC(Mocks.presets.basic)) const synapse = await Synapse.create({ signer, withCDN: true, @@ -201,7 +192,7 @@ describe('Synapse', () => { }) it('should return same storage manager instance', async () => { - server.use(JSONRPC(presets.basic)) + server.use(Mocks.JSONRPC(Mocks.presets.basic)) const synapse = await Synapse.create({ signer }) const storage1 = synapse.storage @@ -217,7 +208,7 @@ describe('Synapse', () => { const FAKE_TX_HASH = '0x3816d82cb7a6f5cde23f4d63c0763050d13c6b6dc659d0a7e6eba80b0ec76a18' const FAKE_TX = { hash: FAKE_TX_HASH, - from: ADDRESSES.serviceProvider1, + from: Mocks.ADDRESSES.serviceProvider1, gas: '0x5208', value: '0x0', nonce: '0x444', @@ -238,20 +229,20 @@ describe('Synapse', () => { logs: [makeDataSetCreatedLog(DATA_SET_ID, 1)], } beforeEach(() => { - const pdpOptions: PDPMockOptions = { + const pdpOptions: Mocks.PingMockOptions = { baseUrl: 'https://pdp.example.com', } - server.use(PING(pdpOptions)) + server.use(Mocks.PING(pdpOptions)) }) it('should storage.createContext with session key', async () => { const signerAddress = await signer.getAddress() - const sessionKeySigner = new ethers.Wallet(PRIVATE_KEYS.key2) + const sessionKeySigner = new ethers.Wallet(Mocks.PRIVATE_KEYS.key2) const sessionKeyAddress = await sessionKeySigner.getAddress() const EXPIRY = BigInt(1757618883) server.use( - JSONRPC({ - ...presets.basic, + Mocks.JSONRPC({ + ...Mocks.presets.basic, sessionKeyRegistry: { authorizationExpiry: (args) => { const client = args[0] @@ -264,11 +255,11 @@ describe('Synapse', () => { }, }, payments: { - ...presets.basic.payments, + ...Mocks.presets.basic.payments, operatorApprovals: ([token, client, operator]) => { - assert.equal(token, ADDRESSES.calibration.usdfcToken) + assert.equal(token, Mocks.ADDRESSES.calibration.usdfcToken) assert.equal(client, signerAddress) - assert.equal(operator, ADDRESSES.calibration.warmStorage) + assert.equal(operator, Mocks.ADDRESSES.calibration.warmStorage) return [ true, // isApproved BigInt(127001 * 635000000), // rateAllowance @@ -280,7 +271,7 @@ describe('Synapse', () => { }, accounts: ([token, user]) => { assert.equal(user, signerAddress) - assert.equal(token, ADDRESSES.calibration.usdfcToken) + assert.equal(token, Mocks.ADDRESSES.calibration.usdfcToken) return [BigInt(127001 * 635000000), BigInt(0), BigInt(0), BigInt(0)] }, }, @@ -319,7 +310,7 @@ describe('Synapse', () => { describe('Payment access', () => { it('should provide read-only access to payments', async () => { - server.use(JSONRPC(presets.basic)) + server.use(Mocks.JSONRPC(Mocks.presets.basic)) const synapse = await Synapse.create({ signer }) // Should be able to access payments @@ -342,17 +333,17 @@ describe('Synapse', () => { describe('getProviderInfo', () => { it('should get provider info for valid approved provider', async () => { - server.use(JSONRPC(presets.basic)) + server.use(Mocks.JSONRPC(Mocks.presets.basic)) const synapse = await Synapse.create({ provider }) - const providerInfo = await synapse.getProviderInfo(ADDRESSES.serviceProvider1) + const providerInfo = await synapse.getProviderInfo(Mocks.ADDRESSES.serviceProvider1) - assert.ok(isAddressEqual(providerInfo.serviceProvider as Address, ADDRESSES.serviceProvider1)) + assert.ok(isAddressEqual(providerInfo.serviceProvider as Address, Mocks.ADDRESSES.serviceProvider1)) assert.equal(providerInfo.products.PDP?.data.serviceURL, 'https://pdp.example.com') }) it('should throw for invalid provider address', async () => { - server.use(JSONRPC(presets.basic)) + server.use(Mocks.JSONRPC(Mocks.presets.basic)) const synapse = await Synapse.create({ signer }) try { @@ -365,10 +356,10 @@ describe('Synapse', () => { it('should throw for non-found provider', async () => { server.use( - JSONRPC({ - ...presets.basic, + Mocks.JSONRPC({ + ...Mocks.presets.basic, serviceRegistry: { - ...presets.basic.serviceRegistry, + ...Mocks.presets.basic.serviceRegistry, getProviderByAddress: () => [ { providerId: 0n, @@ -387,7 +378,7 @@ describe('Synapse', () => { try { const synapse = await Synapse.create({ signer }) - await synapse.getProviderInfo(ADDRESSES.serviceProvider1) + await synapse.getProviderInfo(Mocks.ADDRESSES.serviceProvider1) assert.fail('Should have thrown') } catch (error: any) { assert.include(error.message, 'not found in registry') @@ -396,16 +387,16 @@ describe('Synapse', () => { it('should throw when provider not found', async () => { server.use( - JSONRPC({ - ...presets.basic, + Mocks.JSONRPC({ + ...Mocks.presets.basic, serviceRegistry: { - ...presets.basic.serviceRegistry, + ...Mocks.presets.basic.serviceRegistry, getProviderByAddress: () => [ { providerId: 0n, info: { - serviceProvider: ADDRESSES.zero, - payee: ADDRESSES.zero, + serviceProvider: Mocks.ADDRESSES.zero, + payee: Mocks.ADDRESSES.zero, name: '', description: '', isActive: false, @@ -418,7 +409,7 @@ describe('Synapse', () => { try { const synapse = await Synapse.create({ signer }) - await synapse.getProviderInfo(ADDRESSES.serviceProvider1) + await synapse.getProviderInfo(Mocks.ADDRESSES.serviceProvider1) assert.fail('Should have thrown') } catch (error: any) { assert.include(error.message, 'not found') @@ -428,7 +419,7 @@ describe('Synapse', () => { describe('download', () => { it('should validate PieceCID input', async () => { - server.use(JSONRPC(presets.basic)) + server.use(Mocks.JSONRPC(Mocks.presets.basic)) const synapse = await Synapse.create({ signer }) try { @@ -444,7 +435,7 @@ describe('Synapse', () => { // Create test data that matches the expected PieceCID const testData = new TextEncoder().encode('test data') server.use( - JSONRPC(presets.basic), + Mocks.JSONRPC(Mocks.presets.basic), http.get('https://pdp.example.com/pdp/piece', async ({ request }) => { const url = new URL(request.url) const pieceCid = url.searchParams.get('pieceCid') @@ -475,7 +466,7 @@ describe('Synapse', () => { const deferred = pDefer<{ cid: string; wallet: string }>() const testData = new TextEncoder().encode('test data') server.use( - JSONRPC({ ...presets.basic }), + Mocks.JSONRPC({ ...Mocks.presets.basic }), http.get<{ cid: string; wallet: string }>(`https://:wallet.calibration.filbeam.io/:cid`, async ({ params }) => { deferred.resolve(params) return HttpResponse.arrayBuffer(testData.buffer) @@ -506,7 +497,7 @@ describe('Synapse', () => { const { cid, wallet } = result assert.equal(cid, testPieceCid) - assert.ok(isAddressEqual(wallet as Address, ADDRESSES.client1)) + assert.ok(isAddressEqual(wallet as Address, Mocks.ADDRESSES.client1)) // Test without explicit withCDN (should use instance default) const data = await synapse.download(testPieceCid) @@ -519,13 +510,13 @@ describe('Synapse', () => { const testData = new TextEncoder().encode('test data') server.use( - JSONRPC({ - ...presets.basic, + Mocks.JSONRPC({ + ...Mocks.presets.basic, serviceRegistry: { - ...presets.basic.serviceRegistry, + ...Mocks.presets.basic.serviceRegistry, getProviderByAddress: (data) => { providerAddressReceived = data[0] - return presets.basic.serviceRegistry.getProviderByAddress(data) + return Mocks.presets.basic.serviceRegistry.getProviderByAddress(data) }, }, }), @@ -554,7 +545,7 @@ describe('Synapse', () => { it('should handle download errors', async () => { server.use( - JSONRPC(presets.basic), + Mocks.JSONRPC(Mocks.presets.basic), http.get('https://pdp.example.com/pdp/piece', async () => { return HttpResponse.error() }) @@ -580,7 +571,7 @@ describe('Synapse', () => { describe('getStorageInfo', () => { it('should return comprehensive storage information', async () => { - server.use(JSONRPC({ ...presets.basic })) + server.use(Mocks.JSONRPC({ ...Mocks.presets.basic })) const synapse = await Synapse.create({ signer }) const storageInfo = await synapse.getStorageInfo() @@ -596,8 +587,8 @@ describe('Synapse', () => { // Check providers assert.equal(storageInfo.providers.length, 2) - assert.equal(storageInfo.providers[0].serviceProvider, ADDRESSES.serviceProvider1) - assert.equal(storageInfo.providers[1].serviceProvider, ADDRESSES.serviceProvider2) + assert.equal(storageInfo.providers[0].serviceProvider, Mocks.ADDRESSES.serviceProvider1) + assert.equal(storageInfo.providers[1].serviceProvider, Mocks.ADDRESSES.serviceProvider2) // Check service parameters assert.equal(storageInfo.serviceParameters.network, 'calibration') @@ -610,15 +601,15 @@ describe('Synapse', () => { // Check allowances (including operator approval flag) assert.exists(storageInfo.allowances) assert.equal(storageInfo.allowances?.isApproved, true) - assert.equal(storageInfo.allowances?.service, ADDRESSES.calibration.warmStorage) + assert.equal(storageInfo.allowances?.service, Mocks.ADDRESSES.calibration.warmStorage) assert.equal(storageInfo.allowances?.rateAllowance, 1000000n) assert.equal(storageInfo.allowances?.lockupAllowance, 10000000n) }) it('should handle missing allowances gracefully', async () => { server.use( - JSONRPC({ - ...presets.basic, + Mocks.JSONRPC({ + ...Mocks.presets.basic, payments: { operatorApprovals: () => [false, 0n, 0n, 0n, 0n, 0n], }, @@ -634,7 +625,7 @@ describe('Synapse', () => { assert.exists(storageInfo.serviceParameters) assert.deepEqual(storageInfo.allowances, { isApproved: false, - service: ADDRESSES.calibration.warmStorage, + service: Mocks.ADDRESSES.calibration.warmStorage, rateAllowance: 0n, lockupAllowance: 0n, rateUsed: 0n, @@ -644,10 +635,10 @@ describe('Synapse', () => { it('should filter out zero address providers', async () => { server.use( - JSONRPC({ - ...presets.basic, + Mocks.JSONRPC({ + ...Mocks.presets.basic, serviceRegistry: { - ...presets.basic.serviceRegistry, + ...Mocks.presets.basic.serviceRegistry, getProviderWithProduct: (data) => { const [providerId] = data if (providerId === 1n) { @@ -655,8 +646,8 @@ describe('Synapse', () => { { providerId, providerInfo: { - serviceProvider: ADDRESSES.serviceProvider1, - payee: ADDRESSES.payee1, + serviceProvider: Mocks.ADDRESSES.serviceProvider1, + payee: Mocks.ADDRESSES.payee1, isActive: true, name: 'Test Provider', description: 'Test Provider', @@ -681,7 +672,7 @@ describe('Synapse', () => { bytesToHex(numberToBytes(1000000n)), bytesToHex(numberToBytes(2880n)), stringToHex('US'), - ADDRESSES.calibration.usdfcToken, + Mocks.ADDRESSES.calibration.usdfcToken, ], }, ] @@ -690,8 +681,8 @@ describe('Synapse', () => { { providerId: 0n, providerInfo: { - serviceProvider: ADDRESSES.zero, - payee: ADDRESSES.zero, + serviceProvider: Mocks.ADDRESSES.zero, + payee: Mocks.ADDRESSES.zero, isActive: false, name: '', description: '', @@ -715,15 +706,15 @@ describe('Synapse', () => { // Should filter out zero address provider assert.equal(storageInfo.providers.length, 1) - assert.equal(storageInfo.providers[0].serviceProvider, ADDRESSES.serviceProvider1) + assert.equal(storageInfo.providers[0].serviceProvider, Mocks.ADDRESSES.serviceProvider1) }) it('should handle contract call failures', async () => { server.use( - JSONRPC({ - ...presets.basic, + Mocks.JSONRPC({ + ...Mocks.presets.basic, warmStorage: { - ...presets.basic.warmStorage, + ...Mocks.presets.basic.warmStorage, getServicePrice: () => { throw new Error('RPC error') }, @@ -746,15 +737,15 @@ describe('Synapse', () => { beforeEach(async () => { server.use( - JSONRPC({ - ...presets.basic, - serviceRegistry: mockServiceProviderRegistry([PROVIDERS.provider1, PROVIDERS.provider2]), + Mocks.JSONRPC({ + ...Mocks.presets.basic, + serviceRegistry: Mocks.mockServiceProviderRegistry([Mocks.PROVIDERS.provider1, Mocks.PROVIDERS.provider2]), }) ) synapse = await Synapse.create({ signer }) - for (const { products } of [PROVIDERS.provider1, PROVIDERS.provider2]) { + for (const { products } of [Mocks.PROVIDERS.provider1, Mocks.PROVIDERS.provider2]) { server.use( - PING({ + Mocks.PING({ baseUrl: products[0].offering.serviceURL, }) ) @@ -763,11 +754,11 @@ describe('Synapse', () => { it('selects specified providerIds', async () => { const contexts = await synapse.storage.createContexts({ - providerIds: [PROVIDERS.provider1.providerId, PROVIDERS.provider2.providerId].map(Number), + providerIds: [Mocks.PROVIDERS.provider1.providerId, Mocks.PROVIDERS.provider2.providerId].map(Number), }) assert.equal(contexts.length, 2) - assert.equal(BigInt(contexts[0].provider.id), PROVIDERS.provider1.providerId) - assert.equal(BigInt(contexts[1].provider.id), PROVIDERS.provider2.providerId) + assert.equal(BigInt(contexts[0].provider.id), Mocks.PROVIDERS.provider1.providerId) + assert.equal(BigInt(contexts[1].provider.id), Mocks.PROVIDERS.provider2.providerId) // should create new data sets assert.equal((contexts[0] as any)._dataSetId, undefined) assert.equal((contexts[1] as any)._dataSetId, undefined) @@ -779,12 +770,12 @@ describe('Synapse', () => { withCDN: '', } const contexts = await synapse.storage.createContexts({ - providerIds: [PROVIDERS.provider1.providerId].map(Number), + providerIds: [Mocks.PROVIDERS.provider1.providerId].map(Number), metadata, count: 1, }) assert.equal(contexts.length, 1) - assert.equal(BigInt(contexts[0].provider.id), PROVIDERS.provider1.providerId) + assert.equal(BigInt(contexts[0].provider.id), Mocks.PROVIDERS.provider1.providerId) // should use existing data set assert.equal((contexts[0] as any)._dataSetId, 1n) }) @@ -794,13 +785,13 @@ describe('Synapse', () => { withCDN: '', } const contexts = await synapse.storage.createContexts({ - providerIds: [PROVIDERS.provider1.providerId].map(Number), + providerIds: [Mocks.PROVIDERS.provider1.providerId].map(Number), metadata, count: 1, forceCreateDataSets: true, }) assert.equal(contexts.length, 1) - assert.equal(BigInt(contexts[0].provider.id), PROVIDERS.provider1.providerId) + assert.equal(BigInt(contexts[0].provider.id), Mocks.PROVIDERS.provider1.providerId) // should create new data set assert.equal((contexts[0] as any)._dataSetId, undefined) }) @@ -827,19 +818,26 @@ describe('Synapse', () => { }) it('fails when provided an invalid data set id', async () => { - for (const dataSetId of [0, 2]) { - try { - await synapse.storage.createContexts({ - count: 1, - dataSetIds: [dataSetId], - }) - assert.fail('Expected createContexts to fail for invalid specified data set id') - } catch (error: any) { - assert.equal( - error?.message, - `StorageContext resolveByDataSetId failed: Data set ${dataSetId} not found, not owned by ${ADDRESSES.client1}, or not managed by the current WarmStorage contract` - ) - } + // Test dataSetId 0: should fail with "does not exist" (pdpRailId is 0) + try { + await synapse.storage.createContexts({ + count: 1, + dataSetIds: [0], + }) + assert.fail('Expected createContexts to fail for data set id 0') + } catch (error: any) { + assert.include(error?.message, 'Data set 0 does not exist') + } + + // Test dataSetId 2: should fail (not in mock data, so pdpRailId will be 0) + try { + await synapse.storage.createContexts({ + count: 1, + dataSetIds: [2], + }) + assert.fail('Expected createContexts to fail for data set id 2') + } catch (error: any) { + assert.include(error?.message, 'Data set 2 does not exist') } }) @@ -867,7 +865,7 @@ describe('Synapse', () => { } const contexts = await synapse.storage.createContexts({ count: 2, - providerIds: [PROVIDERS.provider1.providerId, PROVIDERS.provider1.providerId].map(Number), + providerIds: [Mocks.PROVIDERS.provider1.providerId, Mocks.PROVIDERS.provider1.providerId].map(Number), metadata, }) assert.equal(contexts.length, 2) @@ -883,7 +881,7 @@ describe('Synapse', () => { const contexts = await synapse.storage.createContexts({ count: 2, dataSetIds: [1, 1], - providerIds: [PROVIDERS.provider1.providerId, PROVIDERS.provider1.providerId].map(Number), + providerIds: [Mocks.PROVIDERS.provider1.providerId, Mocks.PROVIDERS.provider1.providerId].map(Number), metadata, }) assert.equal(contexts.length, 2) @@ -945,6 +943,74 @@ describe('Synapse', () => { assert.isTrue(defaultContexts === contexts) }) + providerIds.forEach((endorsedProviderId, index) => { + describe(`when endorsing providers[${index}]`, async () => { + const getPDPService = SPRegistryService.prototype.getPDPService + const getProviders = SPRegistryService.prototype.getProviders + beforeEach(async () => { + // mock provider1 having no endorsements + const mockEndorsements = { + '0x2127C3a31F54B81B5E9AD1e29C36c420d3D6ecC5': { + notAfter: 0xffffffffffffffffn, + nonce: 0xffffffffffffffffn, + signature: + '0xffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff', + }, + } as const + SPRegistryService.prototype.getPDPService = async function (this: SPRegistryService, providerId) { + const service = await getPDPService.call(this, providerId) + if (service == null) { + return service + } + if (providerId !== endorsedProviderId) { + return service + } + service.offering.endorsements = mockEndorsements + return service + } + SPRegistryService.prototype.getProviders = async function (this: SPRegistryService, providerIds) { + const providers = await getProviders.call(this, providerIds) + for (const provider of providers) { + if (provider.id === endorsedProviderId && provider.products.PDP !== undefined) { + provider.products.PDP.data.endorsements = mockEndorsements + } + } + return providers + } + }) + + afterEach(async () => { + SPRegistryService.prototype.getProviders = getProviders + SPRegistryService.prototype.getPDPService = getPDPService + }) + + for (const count of [1, 2]) { + it(`prefers to select the endorsed context when selecting ${count} providers`, async () => { + const counts: Record = {} + for (const providerId of providerIds) { + counts[providerId] = 0 + } + for (let i = 0; i < 5; i++) { + const contexts = await synapse.storage.createContexts({ + count, + forceCreateDataSets: true, // This prevents the defaultContexts caching + }) + assert.equal(contexts.length, count) + assert.equal((contexts[0] as any)._dataSetId, undefined) + counts[contexts[0].provider.id]++ + if (count > 1) { + assert.notEqual(contexts[0].provider.id, contexts[1].provider.id) + assert.equal((contexts[1] as any)._dataSetId, undefined) + } + } + for (const providerId of providerIds) { + assert.equal(counts[providerId], providerId === endorsedProviderId ? 5 : 0) + } + }) + } + }) + }) + it('can attempt to create numerous contexts, returning fewer', async () => { const contexts = await synapse.storage.createContexts({ count: 100, @@ -961,12 +1027,12 @@ describe('Synapse', () => { contexts = await synapse.storage.createContexts({ providerIds: [1, 2], }) - for (const provider of [PROVIDERS.provider1, PROVIDERS.provider2]) { - const pdpOptions: PDPMockOptions = { + for (const provider of [Mocks.PROVIDERS.provider1, Mocks.PROVIDERS.provider2]) { + const pdpOptions: Mocks.pdp.PDPMockOptions = { baseUrl: provider.products[0].offering.serviceURL, } server.use( - dataSetCreationStatusHandler( + Mocks.pdp.dataSetCreationStatusHandler( FAKE_TX_HASH, { ok: true, @@ -987,17 +1053,17 @@ describe('Synapse', () => { const pieceCid = Piece.calculate(data) const mockUUID = '12345678-90ab-cdef-1234-567890abcdef' const found = true - for (const provider of [PROVIDERS.provider1, PROVIDERS.provider2]) { + for (const provider of [Mocks.PROVIDERS.provider1, Mocks.PROVIDERS.provider2]) { const pdpOptions = { baseUrl: provider.products[0].offering.serviceURL, } - server.use(postPieceUploadsHandler(mockUUID, pdpOptions)) - server.use(uploadPieceStreamingHandler(mockUUID, pdpOptions)) - server.use(finalizePieceUploadHandler(mockUUID, pieceCid.toString(), pdpOptions)) - server.use(findPieceHandler(pieceCid.toString(), found, pdpOptions)) - server.use(createAndAddPiecesHandler(FAKE_TX_HASH, pdpOptions)) + server.use(Mocks.pdp.postPieceUploadsHandler(mockUUID, pdpOptions)) + server.use(Mocks.pdp.uploadPieceStreamingHandler(mockUUID, pdpOptions)) + server.use(Mocks.pdp.finalizePieceUploadHandler(mockUUID, pieceCid.toString(), pdpOptions)) + server.use(Mocks.pdp.findPieceHandler(pieceCid.toString(), found, pdpOptions)) + server.use(Mocks.pdp.createAndAddPiecesHandler(FAKE_TX_HASH, pdpOptions)) server.use( - pieceAdditionStatusHandler( + Mocks.pdp.pieceAdditionStatusHandler( DATA_SET_ID, FAKE_TX_HASH, { @@ -1024,23 +1090,23 @@ describe('Synapse', () => { const mockUUID = '12345678-90ab-cdef-1234-567890abcdef' const found = true const wrongCid = 'wrongCid' - for (const provider of [PROVIDERS.provider1, PROVIDERS.provider2]) { + for (const provider of [Mocks.PROVIDERS.provider1, Mocks.PROVIDERS.provider2]) { const pdpOptions = { baseUrl: provider.products[0].offering.serviceURL, } - server.use(postPieceUploadsHandler(mockUUID, pdpOptions)) - server.use(uploadPieceStreamingHandler(mockUUID, pdpOptions)) + server.use(Mocks.pdp.postPieceUploadsHandler(mockUUID, pdpOptions)) + server.use(Mocks.pdp.uploadPieceStreamingHandler(mockUUID, pdpOptions)) server.use( - finalizePieceUploadHandler( + Mocks.pdp.finalizePieceUploadHandler( mockUUID, - provider === PROVIDERS.provider1 ? pieceCid.toString() : wrongCid, + provider === Mocks.PROVIDERS.provider1 ? pieceCid.toString() : wrongCid, pdpOptions ) ) - server.use(findPieceHandler(pieceCid.toString(), found, pdpOptions)) - server.use(createAndAddPiecesHandler(FAKE_TX_HASH, pdpOptions)) + server.use(Mocks.pdp.findPieceHandler(pieceCid.toString(), found, pdpOptions)) + server.use(Mocks.pdp.createAndAddPiecesHandler(FAKE_TX_HASH, pdpOptions)) server.use( - pieceAdditionStatusHandler( + Mocks.pdp.pieceAdditionStatusHandler( DATA_SET_ID, FAKE_TX_HASH, { @@ -1060,7 +1126,7 @@ describe('Synapse', () => { await synapse.storage.upload(data, { contexts }) assert.fail('Expected upload to fail when one provider returns wrong pieceCid') } catch (error: any) { - assert.include(error.message, wrongCid) + assert.include(error.message, 'Failed to create upload session') } }) }) diff --git a/packages/synapse-sdk/src/test/telemetry.test.ts b/packages/synapse-sdk/src/test/telemetry.test.ts index a298dbfdd..8e1c317bc 100644 --- a/packages/synapse-sdk/src/test/telemetry.test.ts +++ b/packages/synapse-sdk/src/test/telemetry.test.ts @@ -6,6 +6,7 @@ * and that the telemetry system doesn't "crash" Synapse when enabled. */ +import * as Mocks from '@filoz/synapse-core/mocks' import { assert } from 'chai' import { ethers } from 'ethers' import { setup } from 'iso-web/msw' @@ -13,10 +14,9 @@ import { HttpResponse, http } from 'msw' import { Synapse } from '../synapse.ts' import { removeGlobalTelemetry } from '../telemetry/singleton.ts' import { sanitizeUrlForSpan } from '../telemetry/utils.ts' -import { JSONRPC, PRIVATE_KEYS, presets } from './mocks/jsonrpc/index.ts' // Mock server for testing -const server = setup([]) +const server = setup() interface SentryRequest { request: Request @@ -52,17 +52,17 @@ describe('Telemetry', () => { let signer: ethers.Signer beforeEach(async () => { - await server.start({ quiet: true }) - server.use(JSONRPC(presets.basic)) + await server.start() + server.use(Mocks.JSONRPC(Mocks.presets.basic)) mockSentryRequests() provider = new ethers.JsonRpcProvider('https://api.calibration.node.glif.io/rpc/v1') - signer = new ethers.Wallet(PRIVATE_KEYS.key1, provider) + signer = new ethers.Wallet(Mocks.PRIVATE_KEYS.key1, provider) }) afterEach(async () => { try { - await synapse?.getProvider()?.destroy() + synapse?.getProvider()?.destroy() } catch { // ignore destroy errors } diff --git a/packages/synapse-sdk/src/test/test-utils.ts b/packages/synapse-sdk/src/test/test-utils.ts index 351335dbe..bdd2bd0ac 100644 --- a/packages/synapse-sdk/src/test/test-utils.ts +++ b/packages/synapse-sdk/src/test/test-utils.ts @@ -1,6 +1,6 @@ +import * as Mocks from '@filoz/synapse-core/mocks' import type { ProviderInfo } from '../sp-registry/types.ts' import { SIZE_CONSTANTS } from '../utils/constants.ts' -import { ADDRESSES } from './mocks/jsonrpc/index.ts' /** * Create a mock ProviderInfo object for testing @@ -8,8 +8,8 @@ import { ADDRESSES } from './mocks/jsonrpc/index.ts' function createMockProviderInfo(overrides?: Partial): ProviderInfo { const defaults: ProviderInfo = { id: 1, - serviceProvider: ADDRESSES.client1, - payee: ADDRESSES.client1, // Usually same as serviceProvider for tests + serviceProvider: Mocks.ADDRESSES.client1, + payee: Mocks.ADDRESSES.client1, // Usually same as serviceProvider for tests name: 'Test Provider', description: 'A test storage provider', active: true, @@ -67,7 +67,7 @@ export function createSimpleProvider(props: { serviceURL: string }): ProviderInfo { return createMockProviderInfo({ - serviceProvider: props.serviceProvider ?? props.address ?? ADDRESSES.client1, + serviceProvider: props.serviceProvider ?? props.address ?? Mocks.ADDRESSES.client1, products: { PDP: { type: 'PDP', diff --git a/packages/synapse-sdk/src/test/warm-storage-metadata.test.ts b/packages/synapse-sdk/src/test/warm-storage-metadata.test.ts index 26f7c9edd..3a6aeb4d1 100644 --- a/packages/synapse-sdk/src/test/warm-storage-metadata.test.ts +++ b/packages/synapse-sdk/src/test/warm-storage-metadata.test.ts @@ -1,18 +1,19 @@ /* globals describe it before after beforeEach */ + +import * as Mocks from '@filoz/synapse-core/mocks' import { assert } from 'chai' import { ethers } from 'ethers' import { setup } from 'iso-web/msw' import { METADATA_KEYS } from '../utils/constants.ts' import { WarmStorageService } from '../warm-storage/index.ts' -import { ADDRESSES, JSONRPC, presets } from './mocks/jsonrpc/index.ts' describe('WarmStorageService Metadata', () => { let server: any let warmStorageService: WarmStorageService before(async () => { - server = setup([]) - await server.start({ quiet: true }) + server = setup() + await server.start() }) after(() => { @@ -21,10 +22,10 @@ describe('WarmStorageService Metadata', () => { beforeEach(async () => { server.resetHandlers() - server.use(JSONRPC(presets.basic)) + server.use(Mocks.JSONRPC(Mocks.presets.basic)) const provider = new ethers.JsonRpcProvider('https://api.calibration.node.glif.io/rpc/v1') - warmStorageService = await WarmStorageService.create(provider, ADDRESSES.calibration.warmStorage) + warmStorageService = await WarmStorageService.create(provider, Mocks.ADDRESSES.calibration.warmStorage) }) describe('Data Set Metadata', () => { diff --git a/packages/synapse-sdk/src/test/warm-storage-service.test.ts b/packages/synapse-sdk/src/test/warm-storage-service.test.ts index 87ebd2dee..7eda0f81a 100644 --- a/packages/synapse-sdk/src/test/warm-storage-service.test.ts +++ b/packages/synapse-sdk/src/test/warm-storage-service.test.ts @@ -4,6 +4,7 @@ * Tests for WarmStorageService class */ +import * as Mocks from '@filoz/synapse-core/mocks' import { assert } from 'chai' import { ethers } from 'ethers' import { setup } from 'iso-web/msw' @@ -12,10 +13,9 @@ import { PaymentsService } from '../payments/index.ts' import { CONTRACT_ADDRESSES, SIZE_CONSTANTS, TIME_CONSTANTS } from '../utils/constants.ts' import { WarmStorageService } from '../warm-storage/index.ts' import { makeDataSetCreatedLog } from './mocks/events.ts' -import { ADDRESSES, JSONRPC, PRIVATE_KEYS, presets } from './mocks/jsonrpc/index.ts' // mock server for testing -const server = setup([]) +const server = setup() describe('WarmStorageService', () => { let provider: ethers.Provider @@ -24,11 +24,11 @@ describe('WarmStorageService', () => { // Helper to create WarmStorageService with factory pattern const createWarmStorageService = async () => { - return await WarmStorageService.create(provider, ADDRESSES.calibration.warmStorage) + return await WarmStorageService.create(provider, Mocks.ADDRESSES.calibration.warmStorage) } before(async () => { - await server.start({ quiet: true }) + await server.start() }) after(() => { @@ -37,12 +37,12 @@ describe('WarmStorageService', () => { beforeEach(() => { provider = new ethers.JsonRpcProvider('https://api.calibration.node.glif.io/rpc/v1') - signer = new ethers.Wallet(PRIVATE_KEYS.key1, provider) + signer = new ethers.Wallet(Mocks.PRIVATE_KEYS.key1, provider) paymentsService = new PaymentsService( provider, signer, - ADDRESSES.calibration.payments, - ADDRESSES.calibration.usdfcToken, + Mocks.ADDRESSES.calibration.payments, + Mocks.ADDRESSES.calibration.usdfcToken, false ) server.resetHandlers() @@ -50,7 +50,7 @@ describe('WarmStorageService', () => { describe('Instantiation', () => { it('should create instance with required parameters', async () => { - server.use(JSONRPC(presets.basic)) + server.use(Mocks.JSONRPC(Mocks.presets.basic)) const warmStorageService = await createWarmStorageService() assert.exists(warmStorageService) assert.isFunction(warmStorageService.getClientDataSets) @@ -59,7 +59,7 @@ describe('WarmStorageService', () => { describe('getDataSet', () => { it('should return a single data set by ID', async () => { - server.use(JSONRPC(presets.basic)) + server.use(Mocks.JSONRPC(Mocks.presets.basic)) const warmStorageService = await createWarmStorageService() const dataSetId = 1 @@ -68,9 +68,9 @@ describe('WarmStorageService', () => { assert.equal(result?.pdpRailId, 1) assert.equal(result?.cacheMissRailId, 0) assert.equal(result?.cdnRailId, 0) - assert.equal(result?.payer, ADDRESSES.client1) - assert.equal(result?.payee, ADDRESSES.serviceProvider1) - assert.equal(result?.serviceProvider, ADDRESSES.serviceProvider1) + assert.equal(result?.payer, Mocks.ADDRESSES.client1) + assert.equal(result?.payee, Mocks.ADDRESSES.serviceProvider1) + assert.equal(result?.serviceProvider, Mocks.ADDRESSES.serviceProvider1) assert.equal(result?.commissionBps, 100) assert.equal(result?.clientDataSetId, 0n) assert.equal(result?.pdpEndEpoch, 0) @@ -79,7 +79,7 @@ describe('WarmStorageService', () => { }) it('should throw for non-existent data set', async () => { - server.use(JSONRPC(presets.basic)) + server.use(Mocks.JSONRPC(Mocks.presets.basic)) const warmStorageService = await createWarmStorageService() const dataSetId = 999 @@ -93,8 +93,8 @@ describe('WarmStorageService', () => { it('should handle contract revert gracefully', async () => { server.use( - JSONRPC({ - ...presets.basic, + Mocks.JSONRPC({ + ...Mocks.presets.basic, warmStorageView: { // @ts-expect-error - we want to test the error case getDataSet: () => { @@ -118,23 +118,23 @@ describe('WarmStorageService', () => { describe('getClientDataSets', () => { it('should return empty array when client has no data sets', async () => { server.use( - JSONRPC({ - ...presets.basic, + Mocks.JSONRPC({ + ...Mocks.presets.basic, warmStorageView: { getClientDataSets: () => [[]], }, }) ) const warmStorageService = await createWarmStorageService() - const dataSets = await warmStorageService.getClientDataSets(ADDRESSES.client1) + const dataSets = await warmStorageService.getClientDataSets(Mocks.ADDRESSES.client1) assert.isArray(dataSets) assert.lengthOf(dataSets, 0) }) it('should return data sets for a client', async () => { server.use( - JSONRPC({ - ...presets.basic, + Mocks.JSONRPC({ + ...Mocks.presets.basic, warmStorageView: { getClientDataSets: () => [ [ @@ -142,9 +142,9 @@ describe('WarmStorageService', () => { pdpRailId: 1n, cacheMissRailId: 0n, cdnRailId: 0n, - payer: ADDRESSES.client1, - payee: ADDRESSES.serviceProvider1, - serviceProvider: ADDRESSES.serviceProvider1, + payer: Mocks.ADDRESSES.client1, + payee: Mocks.ADDRESSES.serviceProvider1, + serviceProvider: Mocks.ADDRESSES.serviceProvider1, commissionBps: 100n, clientDataSetId: 0n, pdpEndEpoch: 0n, @@ -156,9 +156,9 @@ describe('WarmStorageService', () => { pdpRailId: 2n, cacheMissRailId: 0n, cdnRailId: 100n, - payer: ADDRESSES.client1, - payee: ADDRESSES.serviceProvider1, - serviceProvider: ADDRESSES.serviceProvider1, + payer: Mocks.ADDRESSES.client1, + payee: Mocks.ADDRESSES.serviceProvider1, + serviceProvider: Mocks.ADDRESSES.serviceProvider1, commissionBps: 200n, clientDataSetId: 1n, pdpEndEpoch: 0n, @@ -173,23 +173,23 @@ describe('WarmStorageService', () => { ) const warmStorageService = await createWarmStorageService() - const dataSets = await warmStorageService.getClientDataSets(ADDRESSES.client1) + const dataSets = await warmStorageService.getClientDataSets(Mocks.ADDRESSES.client1) assert.isArray(dataSets) assert.lengthOf(dataSets, 2) // Check first data set assert.equal(dataSets[0].pdpRailId, 1) - assert.equal(dataSets[0].payer, ADDRESSES.client1) - assert.equal(dataSets[0].payee, ADDRESSES.serviceProvider1) + assert.equal(dataSets[0].payer, Mocks.ADDRESSES.client1) + assert.equal(dataSets[0].payee, Mocks.ADDRESSES.serviceProvider1) assert.equal(dataSets[0].commissionBps, 100) assert.equal(dataSets[0].clientDataSetId, 0n) assert.equal(dataSets[0].cdnRailId, 0) // Check second data set assert.equal(dataSets[1].pdpRailId, 2) - assert.equal(dataSets[1].payer, ADDRESSES.client1) - assert.equal(dataSets[1].payee, ADDRESSES.serviceProvider1) + assert.equal(dataSets[1].payer, Mocks.ADDRESSES.client1) + assert.equal(dataSets[1].payee, Mocks.ADDRESSES.serviceProvider1) assert.equal(dataSets[1].commissionBps, 200) assert.equal(dataSets[1].clientDataSetId, 1n) assert.isAbove(dataSets[1].cdnRailId, 0) @@ -198,8 +198,8 @@ describe('WarmStorageService', () => { it('should handle contract call errors gracefully', async () => { server.use( - JSONRPC({ - ...presets.basic, + Mocks.JSONRPC({ + ...Mocks.presets.basic, warmStorageView: { // @ts-expect-error - we want to test the error case getClientDataSets: () => null, @@ -209,7 +209,7 @@ describe('WarmStorageService', () => { const warmStorageService = await createWarmStorageService() try { - await warmStorageService.getClientDataSets(ADDRESSES.client1) + await warmStorageService.getClientDataSets(Mocks.ADDRESSES.client1) assert.fail('Should have thrown error') } catch (error: any) { assert.include(error.message, 'Failed to get client data sets') @@ -220,19 +220,19 @@ describe('WarmStorageService', () => { describe('getClientDataSetsWithDetails', () => { it('should enhance data sets with PDPVerifier details', async () => { server.use( - JSONRPC({ - ...presets.basic, + Mocks.JSONRPC({ + ...Mocks.presets.basic, warmStorageView: { - ...presets.basic.warmStorageView, + ...Mocks.presets.basic.warmStorageView, clientDataSets: () => [[242n]], getDataSet: () => [ { pdpRailId: 48n, cacheMissRailId: 0n, cdnRailId: 0n, - payer: ADDRESSES.client1, - payee: ADDRESSES.payee1, - serviceProvider: ADDRESSES.serviceProvider1, + payer: Mocks.ADDRESSES.client1, + payee: Mocks.ADDRESSES.payee1, + serviceProvider: Mocks.ADDRESSES.serviceProvider1, commissionBps: 100n, clientDataSetId: 0n, pdpEndEpoch: 0n, @@ -242,15 +242,15 @@ describe('WarmStorageService', () => { ], }, pdpVerifier: { - ...presets.basic.pdpVerifier, + ...Mocks.presets.basic.pdpVerifier, dataSetLive: () => [true], getNextPieceId: () => [2n], - getDataSetListener: () => [ADDRESSES.calibration.warmStorage], + getDataSetListener: () => [Mocks.ADDRESSES.calibration.warmStorage], }, }) ) const warmStorageService = await createWarmStorageService() - const detailedDataSets = await warmStorageService.getClientDataSetsWithDetails(ADDRESSES.client1) + const detailedDataSets = await warmStorageService.getClientDataSetsWithDetails(Mocks.ADDRESSES.client1) assert.lengthOf(detailedDataSets, 1) assert.equal(detailedDataSets[0].pdpRailId, 48) @@ -263,10 +263,10 @@ describe('WarmStorageService', () => { it('should filter unmanaged data sets when onlyManaged is true', async () => { server.use( - JSONRPC({ - ...presets.basic, + Mocks.JSONRPC({ + ...Mocks.presets.basic, warmStorageView: { - ...presets.basic.warmStorageView, + ...Mocks.presets.basic.warmStorageView, clientDataSets: () => [[242n, 243n]], getDataSet: (args) => { const [dataSetId] = args @@ -276,9 +276,9 @@ describe('WarmStorageService', () => { pdpRailId: 48n, cacheMissRailId: 0n, cdnRailId: 0n, - payer: ADDRESSES.client1, - payee: ADDRESSES.payee1, - serviceProvider: ADDRESSES.serviceProvider1, + payer: Mocks.ADDRESSES.client1, + payee: Mocks.ADDRESSES.payee1, + serviceProvider: Mocks.ADDRESSES.serviceProvider1, commissionBps: 100n, clientDataSetId: 0n, pdpEndEpoch: 0n, @@ -292,9 +292,9 @@ describe('WarmStorageService', () => { pdpRailId: 49n, cacheMissRailId: 0n, cdnRailId: 0n, - payer: ADDRESSES.client1, - payee: ADDRESSES.payee1, - serviceProvider: ADDRESSES.serviceProvider1, + payer: Mocks.ADDRESSES.client1, + payee: Mocks.ADDRESSES.payee1, + serviceProvider: Mocks.ADDRESSES.serviceProvider1, commissionBps: 100n, clientDataSetId: 1n, pdpEndEpoch: 0n, @@ -306,13 +306,13 @@ describe('WarmStorageService', () => { }, }, pdpVerifier: { - ...presets.basic.pdpVerifier, + ...Mocks.presets.basic.pdpVerifier, dataSetLive: () => [true], getNextPieceId: () => [1n], getDataSetListener: (args) => { const [dataSetId] = args if (dataSetId === 242n) { - return [ADDRESSES.calibration.warmStorage] // Managed by us + return [Mocks.ADDRESSES.calibration.warmStorage] // Managed by us } return ['0x1234567890123456789012345678901234567890' as `0x${string}`] // Different address }, @@ -322,22 +322,108 @@ describe('WarmStorageService', () => { const warmStorageService = await createWarmStorageService() // Get all data sets - const allDataSets = await warmStorageService.getClientDataSetsWithDetails(ADDRESSES.client1, false) + const allDataSets = await warmStorageService.getClientDataSetsWithDetails(Mocks.ADDRESSES.client1, false) assert.lengthOf(allDataSets, 2) // Get only managed data sets - const managedDataSets = await warmStorageService.getClientDataSetsWithDetails(ADDRESSES.client1, true) + const managedDataSets = await warmStorageService.getClientDataSetsWithDetails(Mocks.ADDRESSES.client1, true) assert.lengthOf(managedDataSets, 1) assert.equal(managedDataSets[0].pdpRailId, 48) assert.isTrue(managedDataSets[0].isManaged) }) + it('should set withCDN true when cdnRailId > 0 and withCDN metadata key present', async () => { + server.use( + Mocks.JSONRPC({ + ...Mocks.presets.basic, + warmStorageView: { + ...Mocks.presets.basic.warmStorageView, + clientDataSets: () => [[242n]], + getDataSet: () => [ + { + pdpRailId: 48n, + cacheMissRailId: 50n, + cdnRailId: 51n, // CDN rail exists + payer: Mocks.ADDRESSES.client1, + payee: Mocks.ADDRESSES.payee1, + serviceProvider: Mocks.ADDRESSES.serviceProvider1, + commissionBps: 100n, + clientDataSetId: 0n, + pdpEndEpoch: 0n, + providerId: 1n, + dataSetId: 242n, + }, + ], + getAllDataSetMetadata: () => [ + ['withCDN'], // withCDN key present + [''], + ], + }, + pdpVerifier: { + ...Mocks.presets.basic.pdpVerifier, + dataSetLive: () => [true], + getNextPieceId: () => [2n], + getDataSetListener: () => [Mocks.ADDRESSES.calibration.warmStorage], + }, + }) + ) + const warmStorageService = await createWarmStorageService() + const detailedDataSets = await warmStorageService.getClientDataSetsWithDetails(Mocks.ADDRESSES.client1) + + assert.lengthOf(detailedDataSets, 1) + assert.equal(detailedDataSets[0].cdnRailId, 51) + assert.isTrue(detailedDataSets[0].withCDN) + }) + + it('should set withCDN false when cdnRailId > 0 but withCDN metadata key missing (terminated)', async () => { + server.use( + Mocks.JSONRPC({ + ...Mocks.presets.basic, + warmStorageView: { + ...Mocks.presets.basic.warmStorageView, + clientDataSets: () => [[242n]], + getDataSet: () => [ + { + pdpRailId: 48n, + cacheMissRailId: 50n, + cdnRailId: 51n, // CDN rail still exists + payer: Mocks.ADDRESSES.client1, + payee: Mocks.ADDRESSES.payee1, + serviceProvider: Mocks.ADDRESSES.serviceProvider1, + commissionBps: 100n, + clientDataSetId: 0n, + pdpEndEpoch: 0n, + providerId: 1n, + dataSetId: 242n, + }, + ], + getAllDataSetMetadata: () => [ + [], // No metadata keys - CDN was terminated + [], + ], + }, + pdpVerifier: { + ...Mocks.presets.basic.pdpVerifier, + dataSetLive: () => [true], + getNextPieceId: () => [2n], + getDataSetListener: () => [Mocks.ADDRESSES.calibration.warmStorage], + }, + }) + ) + const warmStorageService = await createWarmStorageService() + const detailedDataSets = await warmStorageService.getClientDataSetsWithDetails(Mocks.ADDRESSES.client1) + + assert.lengthOf(detailedDataSets, 1) + assert.equal(detailedDataSets[0].cdnRailId, 51) + assert.isFalse(detailedDataSets[0].withCDN) // CDN terminated, metadata cleared + }) + it('should throw error when contract calls fail', async () => { server.use( - JSONRPC({ - ...presets.basic, + Mocks.JSONRPC({ + ...Mocks.presets.basic, warmStorageView: { - ...presets.basic.warmStorageView, + ...Mocks.presets.basic.warmStorageView, clientDataSets: () => [[242n]], getDataSet: () => { throw new Error('Contract call failed') @@ -348,7 +434,7 @@ describe('WarmStorageService', () => { const warmStorageService = await createWarmStorageService() try { - await warmStorageService.getClientDataSetsWithDetails(ADDRESSES.client1) + await warmStorageService.getClientDataSetsWithDetails(Mocks.ADDRESSES.client1) assert.fail('Should have thrown error') } catch (error: any) { assert.include(error.message, 'Failed to get details for data set') @@ -360,12 +446,12 @@ describe('WarmStorageService', () => { describe('validateDataSet', () => { it('should validate dataset successfully', async () => { server.use( - JSONRPC({ - ...presets.basic, + Mocks.JSONRPC({ + ...Mocks.presets.basic, pdpVerifier: { - ...presets.basic.pdpVerifier, + ...Mocks.presets.basic.pdpVerifier, dataSetLive: () => [true], - getDataSetListener: () => [ADDRESSES.calibration.warmStorage], + getDataSetListener: () => [Mocks.ADDRESSES.calibration.warmStorage], }, }) ) @@ -378,10 +464,10 @@ describe('WarmStorageService', () => { it('should throw error if data set is not managed by this WarmStorage', async () => { server.use( - JSONRPC({ - ...presets.basic, + Mocks.JSONRPC({ + ...Mocks.presets.basic, pdpVerifier: { - ...presets.basic.pdpVerifier, + ...Mocks.presets.basic.pdpVerifier, dataSetLive: () => [true], getDataSetListener: () => ['0x1234567890123456789012345678901234567890' as Address], // Different address }, @@ -403,14 +489,14 @@ describe('WarmStorageService', () => { it('should verify successful data set creation', async () => { const mockTxHash = '0x1234567890abcdef1234567890abcdef1234567890abcdef1234567890abcdef' server.use( - JSONRPC({ - ...presets.basic, + Mocks.JSONRPC({ + ...Mocks.presets.basic, eth_getTransactionByHash: (params) => { const hash = params[0] assert.equal(hash, mockTxHash) return { hash: mockTxHash, - from: ADDRESSES.client1, + from: Mocks.ADDRESSES.client1, gas: '0x5208', value: '0x0', nonce: '0x444', @@ -435,7 +521,7 @@ describe('WarmStorageService', () => { } }, pdpVerifier: { - ...presets.basic.pdpVerifier, + ...Mocks.presets.basic.pdpVerifier, dataSetLive: () => [true], }, }) @@ -455,8 +541,8 @@ describe('WarmStorageService', () => { it('should handle transaction not mined yet', async () => { const mockTxHash = '0x1234567890abcdef1234567890abcdef1234567890abcdef1234567890abcdef' server.use( - JSONRPC({ - ...presets.basic, + Mocks.JSONRPC({ + ...Mocks.presets.basic, eth_getTransactionByHash: (params) => { const hash = params[0] assert.equal(hash, mockTxHash) @@ -478,10 +564,10 @@ describe('WarmStorageService', () => { describe('Service Provider ID Operations', () => { it('should get list of approved provider IDs', async () => { server.use( - JSONRPC({ - ...presets.basic, + Mocks.JSONRPC({ + ...Mocks.presets.basic, warmStorageView: { - ...presets.basic.warmStorageView, + ...Mocks.presets.basic.warmStorageView, getApprovedProviders: () => [[1n, 4n, 7n]], }, }) @@ -496,10 +582,10 @@ describe('WarmStorageService', () => { it('should return empty array when no providers are approved', async () => { server.use( - JSONRPC({ - ...presets.basic, + Mocks.JSONRPC({ + ...Mocks.presets.basic, warmStorageView: { - ...presets.basic.warmStorageView, + ...Mocks.presets.basic.warmStorageView, getApprovedProviders: () => [[]], }, }) @@ -511,10 +597,10 @@ describe('WarmStorageService', () => { it('should check if a provider ID is approved', async () => { server.use( - JSONRPC({ - ...presets.basic, + Mocks.JSONRPC({ + ...Mocks.presets.basic, warmStorageView: { - ...presets.basic.warmStorageView, + ...Mocks.presets.basic.warmStorageView, isProviderApproved: () => [true], }, }) @@ -526,10 +612,10 @@ describe('WarmStorageService', () => { it('should check if a provider ID is not approved', async () => { server.use( - JSONRPC({ - ...presets.basic, + Mocks.JSONRPC({ + ...Mocks.presets.basic, warmStorageView: { - ...presets.basic.warmStorageView, + ...Mocks.presets.basic.warmStorageView, isProviderApproved: () => [false], }, }) @@ -542,10 +628,10 @@ describe('WarmStorageService', () => { it('should get owner address', async () => { const ownerAddress = '0xabcdef1234567890123456789012345678901234' server.use( - JSONRPC({ - ...presets.basic, + Mocks.JSONRPC({ + ...Mocks.presets.basic, warmStorage: { - ...presets.basic.warmStorage, + ...Mocks.presets.basic.warmStorage, owner: () => [ownerAddress as `0x${string}`], }, }) @@ -558,10 +644,10 @@ describe('WarmStorageService', () => { it('should check if signer is owner', async () => { const signerAddress = '0x1234567890123456789012345678901234567890' server.use( - JSONRPC({ - ...presets.basic, + Mocks.JSONRPC({ + ...Mocks.presets.basic, warmStorage: { - ...presets.basic.warmStorage, + ...Mocks.presets.basic.warmStorage, owner: () => [signerAddress as `0x${string}`], }, }) @@ -579,10 +665,10 @@ describe('WarmStorageService', () => { const signerAddress = '0x1234567890123456789012345678901234567890' const ownerAddress = '0xabcdef1234567890123456789012345678901234' server.use( - JSONRPC({ - ...presets.basic, + Mocks.JSONRPC({ + ...Mocks.presets.basic, warmStorage: { - ...presets.basic.warmStorage, + ...Mocks.presets.basic.warmStorage, owner: () => [ownerAddress as `0x${string}`], }, }) @@ -597,17 +683,17 @@ describe('WarmStorageService', () => { }) it('should get service provider registry address', async () => { - server.use(JSONRPC(presets.basic)) + server.use(Mocks.JSONRPC(Mocks.presets.basic)) const warmStorageService = await createWarmStorageService() const registryAddress = warmStorageService.getServiceProviderRegistryAddress() // The mock returns this default address for spRegistry - assert.equal(registryAddress, ADDRESSES.calibration.spRegistry) + assert.equal(registryAddress, Mocks.ADDRESSES.calibration.spRegistry) }) it('should add approved provider (mock transaction)', async () => { server.use( - JSONRPC({ - ...presets.basic, + Mocks.JSONRPC({ + ...Mocks.presets.basic, }) ) const warmStorageService = await createWarmStorageService() @@ -617,7 +703,7 @@ describe('WarmStorageService', () => { }) it('should terminate dataset (mock tx)', async () => { - server.use(JSONRPC(presets.basic)) + server.use(Mocks.JSONRPC(Mocks.presets.basic)) const warmStorageService = await createWarmStorageService() const tx = await warmStorageService.terminateDataSet(signer, 4) @@ -626,10 +712,10 @@ describe('WarmStorageService', () => { it('should remove approved provider with correct index', async () => { server.use( - JSONRPC({ - ...presets.basic, + Mocks.JSONRPC({ + ...Mocks.presets.basic, warmStorageView: { - ...presets.basic.warmStorageView, + ...Mocks.presets.basic.warmStorageView, getApprovedProviders: () => [[1n, 4n, 7n]], }, }) @@ -642,10 +728,10 @@ describe('WarmStorageService', () => { it('should throw when removing non-existent provider', async () => { server.use( - JSONRPC({ - ...presets.basic, + Mocks.JSONRPC({ + ...Mocks.presets.basic, warmStorageView: { - ...presets.basic.warmStorageView, + ...Mocks.presets.basic.warmStorageView, getApprovedProviders: () => [[1n, 4n, 7n]], }, }) @@ -664,8 +750,8 @@ describe('WarmStorageService', () => { describe('calculateStorageCost', () => { it('should calculate storage costs correctly for 1 GiB', async () => { server.use( - JSONRPC({ - ...presets.basic, + Mocks.JSONRPC({ + ...Mocks.presets.basic, }) ) const warmStorageService = await createWarmStorageService() @@ -693,8 +779,8 @@ describe('WarmStorageService', () => { it('should scale costs linearly with size', async () => { server.use( - JSONRPC({ - ...presets.basic, + Mocks.JSONRPC({ + ...Mocks.presets.basic, }) ) const warmStorageService = await createWarmStorageService() @@ -718,10 +804,10 @@ describe('WarmStorageService', () => { it('should fetch pricing from WarmStorage contract', async () => { let getServicePriceCalled = false server.use( - JSONRPC({ - ...presets.basic, + Mocks.JSONRPC({ + ...Mocks.presets.basic, warmStorage: { - ...presets.basic.warmStorage, + ...Mocks.presets.basic.warmStorage, getServicePrice: () => { getServicePriceCalled = true return [ @@ -747,8 +833,8 @@ describe('WarmStorageService', () => { describe('checkAllowanceForStorage', () => { it('should check allowances for storage operations', async () => { server.use( - JSONRPC({ - ...presets.basic, + Mocks.JSONRPC({ + ...Mocks.presets.basic, }) ) const warmStorageService = await createWarmStorageService() @@ -786,10 +872,10 @@ describe('WarmStorageService', () => { it('should return sufficient when allowances are adequate', async () => { server.use( - JSONRPC({ - ...presets.basic, + Mocks.JSONRPC({ + ...Mocks.presets.basic, payments: { - ...presets.basic.payments, + ...Mocks.presets.basic.payments, operatorApprovals: () => [true, parseUnits('100', 18), parseUnits('10000', 18), 0n, 0n, 0n], }, }) @@ -817,8 +903,8 @@ describe('WarmStorageService', () => { it('should include depositAmountNeeded in response', async () => { server.use( - JSONRPC({ - ...presets.basic, + Mocks.JSONRPC({ + ...Mocks.presets.basic, }) ) const warmStorageService = await createWarmStorageService() @@ -843,8 +929,8 @@ describe('WarmStorageService', () => { it('should use custom lockup days when provided', async () => { server.use( - JSONRPC({ - ...presets.basic, + Mocks.JSONRPC({ + ...Mocks.presets.basic, }) ) const warmStorageService = await createWarmStorageService() @@ -877,8 +963,8 @@ describe('WarmStorageService', () => { describe('prepareStorageUpload', () => { it('should prepare storage upload with required actions', async () => { server.use( - JSONRPC({ - ...presets.basic, + Mocks.JSONRPC({ + ...Mocks.presets.basic, }) ) const warmStorageService = await createWarmStorageService() @@ -901,7 +987,7 @@ describe('WarmStorageService', () => { availableFunds: parseUnits('10000', 18), }), approveService: async (serviceAddress: string, rateAllowance: bigint, lockupAllowance: bigint) => { - assert.strictEqual(serviceAddress, ADDRESSES.calibration.warmStorage) + assert.strictEqual(serviceAddress, Mocks.ADDRESSES.calibration.warmStorage) assert.isTrue(rateAllowance > 0n) assert.isTrue(lockupAllowance > 0n) approveServiceCalled = true @@ -939,8 +1025,8 @@ describe('WarmStorageService', () => { it('should include deposit action when balance insufficient', async () => { server.use( - JSONRPC({ - ...presets.basic, + Mocks.JSONRPC({ + ...Mocks.presets.basic, }) ) const warmStorageService = await createWarmStorageService() @@ -996,8 +1082,8 @@ describe('WarmStorageService', () => { it('should return no actions when everything is ready', async () => { server.use( - JSONRPC({ - ...presets.basic, + Mocks.JSONRPC({ + ...Mocks.presets.basic, }) ) const warmStorageService = await createWarmStorageService() @@ -1037,14 +1123,14 @@ describe('WarmStorageService', () => { it('should combine PDP server and chain verification status', async () => { const mockTxHash = '0x1234567890abcdef1234567890abcdef1234567890abcdef1234567890abcdef' server.use( - JSONRPC({ - ...presets.basic, + Mocks.JSONRPC({ + ...Mocks.presets.basic, eth_getTransactionByHash: (params) => { const hash = params[0] assert.equal(hash, mockTxHash) return { hash: mockTxHash, - from: ADDRESSES.client1, + from: Mocks.ADDRESSES.client1, gas: '0x5208', value: '0x0', nonce: '0x444', @@ -1069,7 +1155,7 @@ describe('WarmStorageService', () => { } }, pdpVerifier: { - ...presets.basic.pdpVerifier, + ...Mocks.presets.basic.pdpVerifier, dataSetLive: () => [true], }, }) @@ -1118,14 +1204,14 @@ describe('WarmStorageService', () => { it('should handle PDP server failure gracefully', async () => { const mockTxHash = '0x1234567890abcdef1234567890abcdef1234567890abcdef1234567890abcdef' server.use( - JSONRPC({ - ...presets.basic, + Mocks.JSONRPC({ + ...Mocks.presets.basic, eth_getTransactionByHash: (params) => { const hash = params[0] assert.equal(hash, mockTxHash) return { hash: mockTxHash, - from: ADDRESSES.client1, + from: Mocks.ADDRESSES.client1, gas: '0x5208', value: '0x0', nonce: '0x444', @@ -1147,7 +1233,7 @@ describe('WarmStorageService', () => { } }, pdpVerifier: { - ...presets.basic.pdpVerifier, + ...Mocks.presets.basic.pdpVerifier, dataSetLive: () => [true], }, }) @@ -1181,14 +1267,14 @@ describe('WarmStorageService', () => { it('should NOT mark as complete when server has not caught up yet', async () => { const mockTxHash = '0x1234567890abcdef1234567890abcdef1234567890abcdef1234567890abcdef' server.use( - JSONRPC({ - ...presets.basic, + Mocks.JSONRPC({ + ...Mocks.presets.basic, eth_getTransactionByHash: (params) => { const hash = params[0] assert.equal(hash, mockTxHash) return { hash: mockTxHash, - from: ADDRESSES.client1, + from: Mocks.ADDRESSES.client1, gas: '0x5208', value: '0x0', nonce: '0x444', @@ -1210,7 +1296,7 @@ describe('WarmStorageService', () => { } }, pdpVerifier: { - ...presets.basic.pdpVerifier, + ...Mocks.presets.basic.pdpVerifier, dataSetLive: () => [true], }, }) @@ -1272,14 +1358,14 @@ describe('WarmStorageService', () => { } server.use( - JSONRPC({ - ...presets.basic, + Mocks.JSONRPC({ + ...Mocks.presets.basic, eth_getTransactionByHash: (params) => { const hash = params[0] assert.equal(hash, mockTxHash) return { hash: mockTxHash, - from: ADDRESSES.client1, + from: Mocks.ADDRESSES.client1, gas: '0x5208', value: '0x0', nonce: '0x444', @@ -1306,7 +1392,7 @@ describe('WarmStorageService', () => { } }, pdpVerifier: { - ...presets.basic.pdpVerifier, + ...Mocks.presets.basic.pdpVerifier, dataSetLive: () => [true], }, }) @@ -1328,8 +1414,8 @@ describe('WarmStorageService', () => { it('should timeout if data set takes too long', async () => { const mockTxHash = '0x1234567890abcdef1234567890abcdef1234567890abcdef1234567890abcdef' server.use( - JSONRPC({ - ...presets.basic, + Mocks.JSONRPC({ + ...Mocks.presets.basic, eth_getTransactionReceipt: () => null, }) ) @@ -1367,10 +1453,10 @@ describe('WarmStorageService', () => { describe('getMaxProvingPeriod() and getChallengeWindow()', () => { it('should return max proving period from WarmStorage contract', async () => { server.use( - JSONRPC({ - ...presets.basic, + Mocks.JSONRPC({ + ...Mocks.presets.basic, warmStorageView: { - ...presets.basic.warmStorageView, + ...Mocks.presets.basic.warmStorageView, getMaxProvingPeriod: () => [BigInt(2880)], }, }) @@ -1382,10 +1468,10 @@ describe('WarmStorageService', () => { it('should return challenge window from WarmStorage contract', async () => { server.use( - JSONRPC({ - ...presets.basic, + Mocks.JSONRPC({ + ...Mocks.presets.basic, warmStorageView: { - ...presets.basic.warmStorageView, + ...Mocks.presets.basic.warmStorageView, challengeWindow: () => [BigInt(60)], }, }) @@ -1397,10 +1483,10 @@ describe('WarmStorageService', () => { it('should handle contract call failures', async () => { server.use( - JSONRPC({ - ...presets.basic, + Mocks.JSONRPC({ + ...Mocks.presets.basic, warmStorageView: { - ...presets.basic.warmStorageView, + ...Mocks.presets.basic.warmStorageView, getMaxProvingPeriod: () => { throw new Error('Contract call failed') }, @@ -1421,8 +1507,8 @@ describe('WarmStorageService', () => { describe('CDN Operations', () => { it('should top up CDN payment rails (mock transaction)', async () => { server.use( - JSONRPC({ - ...presets.basic, + Mocks.JSONRPC({ + ...Mocks.presets.basic, }) ) const dataSetId = 49 diff --git a/packages/synapse-sdk/src/types.ts b/packages/synapse-sdk/src/types.ts index 359166c6f..bf21ec9c4 100644 --- a/packages/synapse-sdk/src/types.ts +++ b/packages/synapse-sdk/src/types.ts @@ -237,7 +237,7 @@ export interface EnhancedDataSetInfo extends DataSetInfo { isLive: boolean /** Whether this data set is managed by the current Warm Storage contract */ isManaged: boolean - /** Whether the data set is using CDN (derived from cdnRailId > 0) */ + /** Whether the data set is using CDN (cdnRailId > 0 and withCDN metadata key present) */ withCDN: boolean /** Metadata associated with this data set (key-value pairs) */ metadata: Record @@ -396,25 +396,32 @@ export interface PreflightInfo { // that combines context creation + upload in one call) // ============================================================================ -/** - * Callbacks for tracking upload progress - * - * These callbacks provide visibility into the upload process stages: - * 1. Upload completion (piece uploaded to provider) - * 2. Piece addition (transaction submitted to chain) - * 3. Confirmation (transaction confirmed on-chain) - */ export interface UploadCallbacks { /** Called periodically during upload with bytes uploaded so far */ onProgress?: (bytesUploaded: number) => void /** Called when upload to service provider completes */ onUploadComplete?: (pieceCid: PieceCID) => void - /** Called when the service provider has added the piece and submitted the transaction to the chain */ + /** Called when the service provider has added the piece(s) and submitted the transaction to the chain */ + onPiecesAdded?: (transaction?: Hex, pieces?: { pieceCid: PieceCID }[]) => void + /** @deprecated Use onPiecesAdded instead */ onPieceAdded?: (transaction?: Hex) => void - /** Called when the service provider agrees that the piece addition is confirmed on-chain */ + /** Called when the service provider agrees that the piece addition(s) are confirmed on-chain */ + onPiecesConfirmed?: (dataSetId: number, pieces: PieceRecord[]) => void + /** @deprecated Use onPiecesConfirmed instead */ onPieceConfirmed?: (pieceIds: number[]) => void } +/** + * Canonical representation of a piece within a data set. + * + * This is used when reporting confirmed pieces and when iterating over pieces + * in a data set. + */ +export interface PieceRecord { + pieceId: number + pieceCid: PieceCID +} + /** * Options for uploading individual pieces to an existing storage context * diff --git a/packages/synapse-sdk/src/utils/constants.ts b/packages/synapse-sdk/src/utils/constants.ts index cf9fc4463..d9e4c46cd 100644 --- a/packages/synapse-sdk/src/utils/constants.ts +++ b/packages/synapse-sdk/src/utils/constants.ts @@ -302,16 +302,6 @@ export const TIMING_CONSTANTS = { PIECE_ADDITION_POLL_INTERVAL_MS: 1000, // 1 second } as const -/** - * Settlement fee required for rail settlement operations - * This is the NETWORK_FEE constant in the Payments contract that gets burned to the Filecoin network - * Value: 0.0013 FIL (1300000000000000 attoFIL) - * - * IMPORTANT: This value must be kept in sync with the Payments contract's NETWORK_FEE constant. - * If the contract is upgraded with a different fee, this constant must be updated accordingly. - */ -export const SETTLEMENT_FEE = 1300000000000000n // 0.0013 FIL in attoFIL - /** * Recommended RPC endpoints for Filecoin networks */ diff --git a/packages/synapse-sdk/src/warm-storage/index.ts b/packages/synapse-sdk/src/warm-storage/index.ts index de9c4b653..efc0779bc 100644 --- a/packages/synapse-sdk/src/warm-storage/index.ts +++ b/packages/synapse-sdk/src/warm-storage/index.ts @@ -1,7 +1,6 @@ /** - * Exports the Warm Storage components + * Warm Storage Service * - * @packageDocumentation * @module WarmStorage * @example * ```ts diff --git a/packages/synapse-sdk/src/warm-storage/service.ts b/packages/synapse-sdk/src/warm-storage/service.ts index 2885da7b6..5dcbad7ca 100644 --- a/packages/synapse-sdk/src/warm-storage/service.ts +++ b/packages/synapse-sdk/src/warm-storage/service.ts @@ -31,7 +31,13 @@ import type { PaymentsService } from '../payments/service.ts' import type { DataSetCreationStatusResponse, PDPServer } from '../pdp/server.ts' import { PDPVerifier } from '../pdp/verifier.ts' import type { DataSetInfo, EnhancedDataSetInfo } from '../types.ts' -import { CONTRACT_ADDRESSES, SIZE_CONSTANTS, TIME_CONSTANTS, TIMING_CONSTANTS } from '../utils/constants.ts' +import { + CONTRACT_ADDRESSES, + METADATA_KEYS, + SIZE_CONSTANTS, + TIME_CONSTANTS, + TIMING_CONSTANTS, +} from '../utils/constants.ts' import { CONTRACT_ABIS, createError, getFilecoinNetworkType, TOKENS } from '../utils/index.ts' /** @@ -378,7 +384,7 @@ export class WarmStorageService { currentPieceCount: Number(nextPieceId), isLive, isManaged, - withCDN: base.cdnRailId > 0, + withCDN: base.cdnRailId > 0 && METADATA_KEYS.WITH_CDN in metadata, metadata, } } catch (error) { diff --git a/packages/synapse-sdk/tsconfig.json b/packages/synapse-sdk/tsconfig.json index 816f03090..076163a5e 100644 --- a/packages/synapse-sdk/tsconfig.json +++ b/packages/synapse-sdk/tsconfig.json @@ -7,17 +7,22 @@ "exclude": ["node_modules", "dist"], "references": [ { - "path": "../synapse-core" + "path": "../synapse-core/tsconfig.json" } ], "typedocOptions": { "entryPointStrategy": "resolve", "entryPoints": [ "src/index.ts", - "src/piece/index.ts", - "src/pdp/index.ts", "src/payments/index.ts", - "src/storage/index.ts" + "src/pdp/index.ts", + "src/session/index.ts", + "src/storage/index.ts", + "src/subgraph/index.ts", + "src/telemetry/index.ts", + "src/warm-storage/index.ts", + "src/sp-registry/index.ts", + "src/filbeam/index.ts" ] } } diff --git a/pnpm-workspace.yaml b/pnpm-workspace.yaml index e0cce2b36..2a42b1adb 100644 --- a/pnpm-workspace.yaml +++ b/pnpm-workspace.yaml @@ -5,8 +5,40 @@ packages: - docs - utils +catalog: + '@biomejs/biome': 2.3.8 + '@types/mocha': ^10.0.10 + '@types/node': ^24.9.1 + '@types/react': ^19.2.7 + '@types/react-dom': ^19.2.3 + abitype: ^1.2.0 + ethers: ~6.16.0 + mocha: ^11.7.4 + msw: 2.12.4 + ox: ^0.11.1 + typescript: 5.9.3 + viem: ^2.41.2 + wagmi: ^3.0.2 + +minimumReleaseAge: 2880 + +minimumReleaseAgeExclude: + - iso-ledger + - '@hugomrdias/docs' + onlyBuiltDependencies: - esbuild - msw - sharp - workerd + +trustPolicy: no-downgrade + +trustPolicyExclude: + - vite@6.4.1 + - chokidar@4.0.3 + - semver@6.3.1 + - undici-types@6.19.8 + - langium@3.3.1 + - '@reduxjs/toolkit@2.8.2' + - reselect@5.1.1 diff --git a/utils/example-storage-e2e.js b/utils/example-storage-e2e.js index c18768c85..e53f3157a 100644 --- a/utils/example-storage-e2e.js +++ b/utils/example-storage-e2e.js @@ -20,14 +20,12 @@ import { ethers } from 'ethers' import fsPromises from 'fs/promises' +import { SIZE_CONSTANTS, Synapse, TIME_CONSTANTS } from '../packages/synapse-sdk/src/index.ts' import { ADD_PIECES_TYPEHASH, CREATE_DATA_SET_TYPEHASH, PDP_PERMISSION_NAMES, - SIZE_CONSTANTS, - Synapse, - TIME_CONSTANTS, -} from '../packages/synapse-sdk/src/index.ts' +} from '../packages/synapse-sdk/src/session/index.ts' // Configuration from environment const PRIVATE_KEY = process.env.PRIVATE_KEY @@ -82,10 +80,10 @@ async function main() { if (!stat.isFile()) { throw new Error(`Path is not a file: ${filePath}`) } - if (stat.size > SIZE_CONSTANTS.MAX_FILE_SIZE_BYTES) { + if (stat.size > SIZE_CONSTANTS.MAX_UPLOAD_SIZE) { throw new Error( `File exceeds maximum size of ${formatBytes( - SIZE_CONSTANTS.MAX_FILE_SIZE_BYTES + SIZE_CONSTANTS.MAX_UPLOAD_SIZE )}: ${filePath} (${formatBytes(stat.size)})` ) } diff --git a/utils/package.json b/utils/package.json index 909d8538f..07bfbc689 100644 --- a/utils/package.json +++ b/utils/package.json @@ -8,6 +8,6 @@ }, "dependencies": { "@filoz/synapse-sdk": "workspace:*", - "ethers": "^6.15.0" + "ethers": "^6.16.0" } } diff --git a/utils/settle-dataset-rails.js b/utils/settle-dataset-rails.js index 0bf8c2879..cc6734b96 100644 --- a/utils/settle-dataset-rails.js +++ b/utils/settle-dataset-rails.js @@ -10,7 +10,7 @@ */ import { ethers } from 'ethers' -import { SETTLEMENT_FEE, Synapse } from '../packages/synapse-sdk/src/index.ts' +import { Synapse } from '../packages/synapse-sdk/src/index.ts' import { getCurrentEpoch } from '../packages/synapse-sdk/src/utils/index.ts' import { WarmStorageService } from '../packages/synapse-sdk/src/warm-storage/index.ts' @@ -147,7 +147,6 @@ async function main() { } console.log(`Checking settlement amounts for ${railsToSettle.length} rail(s)...`) - console.log(`${DIM}Settlement fee: ${ethers.formatEther(SETTLEMENT_FEE)} FIL per transaction${RESET}`) console.log('') let totalSettled = 0n @@ -231,7 +230,6 @@ async function main() { // Check if it's the InsufficientNativeTokenForBurn error if (error.message.includes('InsufficientNativeTokenForBurn')) { console.log(` ${YELLOW}Insufficient FIL for network fee${RESET}`) - console.log(` Required: ${ethers.formatEther(SETTLEMENT_FEE)} FIL`) } console.log('')