From 53fd92a30a1c1b09c2d5f88834bc1a35c1fb294b Mon Sep 17 00:00:00 2001 From: Waleed Date: Fri, 6 Mar 2026 23:13:47 -0800 Subject: [PATCH 1/6] feat(obsidian): add Obsidian integration with 15 tools (#3455) * feat(obsidian): add Obsidian integration with 15 tools * fix(obsidian): encode path segments individually to preserve slashes * improvement(obsidian): add type re-exports and improve output descriptions * fix(obsidian): remove unreachable 404 handling from transformResponse --- apps/docs/components/icons.tsx | 149 ++++++++ apps/docs/components/ui/icon-mapping.ts | 2 + apps/docs/content/docs/en/tools/meta.json | 1 + apps/docs/content/docs/en/tools/obsidian.mdx | 323 ++++++++++++++++++ apps/sim/blocks/blocks/obsidian.ts | 270 +++++++++++++++ apps/sim/blocks/registry.ts | 2 + apps/sim/components/icons.tsx | 149 ++++++++ apps/sim/tools/obsidian/append_active.ts | 66 ++++ apps/sim/tools/obsidian/append_note.ts | 74 ++++ .../tools/obsidian/append_periodic_note.ts | 78 +++++ apps/sim/tools/obsidian/create_note.ts | 74 ++++ apps/sim/tools/obsidian/delete_note.ts | 66 ++++ apps/sim/tools/obsidian/execute_command.ts | 70 ++++ apps/sim/tools/obsidian/get_active.ts | 59 ++++ apps/sim/tools/obsidian/get_note.ts | 68 ++++ apps/sim/tools/obsidian/get_periodic_note.ts | 67 ++++ apps/sim/tools/obsidian/index.ts | 16 + apps/sim/tools/obsidian/list_commands.ts | 68 ++++ apps/sim/tools/obsidian/list_files.ts | 76 +++++ apps/sim/tools/obsidian/open_file.ts | 73 ++++ apps/sim/tools/obsidian/patch_active.ts | 107 ++++++ apps/sim/tools/obsidian/patch_note.ts | 118 +++++++ apps/sim/tools/obsidian/search.ts | 95 ++++++ apps/sim/tools/obsidian/types.ts | 190 +++++++++++ apps/sim/tools/registry.ts | 32 ++ 25 files changed, 2293 insertions(+) create mode 100644 apps/docs/content/docs/en/tools/obsidian.mdx create mode 100644 apps/sim/blocks/blocks/obsidian.ts create mode 100644 apps/sim/tools/obsidian/append_active.ts create mode 100644 apps/sim/tools/obsidian/append_note.ts create mode 100644 apps/sim/tools/obsidian/append_periodic_note.ts create mode 100644 apps/sim/tools/obsidian/create_note.ts create mode 100644 apps/sim/tools/obsidian/delete_note.ts create mode 100644 apps/sim/tools/obsidian/execute_command.ts create mode 100644 apps/sim/tools/obsidian/get_active.ts create mode 100644 apps/sim/tools/obsidian/get_note.ts create mode 100644 apps/sim/tools/obsidian/get_periodic_note.ts create mode 100644 apps/sim/tools/obsidian/index.ts create mode 100644 apps/sim/tools/obsidian/list_commands.ts create mode 100644 apps/sim/tools/obsidian/list_files.ts create mode 100644 apps/sim/tools/obsidian/open_file.ts create mode 100644 apps/sim/tools/obsidian/patch_active.ts create mode 100644 apps/sim/tools/obsidian/patch_note.ts create mode 100644 apps/sim/tools/obsidian/search.ts create mode 100644 apps/sim/tools/obsidian/types.ts diff --git a/apps/docs/components/icons.tsx b/apps/docs/components/icons.tsx index 5525e048cfa..41fa14fa22a 100644 --- a/apps/docs/components/icons.tsx +++ b/apps/docs/components/icons.tsx @@ -710,6 +710,155 @@ export function PerplexityIcon(props: SVGProps) { ) } +export function ObsidianIcon(props: SVGProps) { + const id = useId() + const bl = `${id}-bl` + const tr = `${id}-tr` + const tl = `${id}-tl` + const br = `${id}-br` + const te = `${id}-te` + const le = `${id}-le` + const be = `${id}-be` + const me = `${id}-me` + const clip = `${id}-clip` + return ( + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + ) +} + export function NotionIcon(props: SVGProps) { return ( diff --git a/apps/docs/components/ui/icon-mapping.ts b/apps/docs/components/ui/icon-mapping.ts index 7b2ae7e9278..f9b3e42f0a0 100644 --- a/apps/docs/components/ui/icon-mapping.ts +++ b/apps/docs/components/ui/icon-mapping.ts @@ -103,6 +103,7 @@ import { MySQLIcon, Neo4jIcon, NotionIcon, + ObsidianIcon, OnePasswordIcon, OpenAIIcon, OutlookIcon, @@ -265,6 +266,7 @@ export const blockTypeToIconMap: Record = { mysql: MySQLIcon, neo4j: Neo4jIcon, notion_v2: NotionIcon, + obsidian: ObsidianIcon, onedrive: MicrosoftOneDriveIcon, onepassword: OnePasswordIcon, openai: OpenAIIcon, diff --git a/apps/docs/content/docs/en/tools/meta.json b/apps/docs/content/docs/en/tools/meta.json index f8d851049fe..a5de340aaa7 100644 --- a/apps/docs/content/docs/en/tools/meta.json +++ b/apps/docs/content/docs/en/tools/meta.json @@ -98,6 +98,7 @@ "mysql", "neo4j", "notion", + "obsidian", "onedrive", "onepassword", "openai", diff --git a/apps/docs/content/docs/en/tools/obsidian.mdx b/apps/docs/content/docs/en/tools/obsidian.mdx new file mode 100644 index 00000000000..c2b28f74cbf --- /dev/null +++ b/apps/docs/content/docs/en/tools/obsidian.mdx @@ -0,0 +1,323 @@ +--- +title: Obsidian +description: Interact with your Obsidian vault via the Local REST API +--- + +import { BlockInfoCard } from "@/components/ui/block-info-card" + + + +## Usage Instructions + +Read, create, update, search, and delete notes in your Obsidian vault. Manage periodic notes, execute commands, and patch content at specific locations. Requires the Obsidian Local REST API plugin. + + + +## Tools + +### `obsidian_append_active` + +Append content to the currently active file in Obsidian + +#### Input + +| Parameter | Type | Required | Description | +| --------- | ---- | -------- | ----------- | +| `apiKey` | string | Yes | API key from Obsidian Local REST API plugin settings | +| `baseUrl` | string | Yes | Base URL for the Obsidian Local REST API | +| `content` | string | Yes | Markdown content to append to the active file | + +#### Output + +| Parameter | Type | Description | +| --------- | ---- | ----------- | +| `appended` | boolean | Whether content was successfully appended | + +### `obsidian_append_note` + +Append content to an existing note in your Obsidian vault + +#### Input + +| Parameter | Type | Required | Description | +| --------- | ---- | -------- | ----------- | +| `apiKey` | string | Yes | API key from Obsidian Local REST API plugin settings | +| `baseUrl` | string | Yes | Base URL for the Obsidian Local REST API | +| `filename` | string | Yes | Path to the note relative to vault root \(e.g. "folder/note.md"\) | +| `content` | string | Yes | Markdown content to append to the note | + +#### Output + +| Parameter | Type | Description | +| --------- | ---- | ----------- | +| `filename` | string | Path of the note | +| `appended` | boolean | Whether content was successfully appended | + +### `obsidian_append_periodic_note` + +Append content to the current periodic note (daily, weekly, monthly, quarterly, or yearly). Creates the note if it does not exist. + +#### Input + +| Parameter | Type | Required | Description | +| --------- | ---- | -------- | ----------- | +| `apiKey` | string | Yes | API key from Obsidian Local REST API plugin settings | +| `baseUrl` | string | Yes | Base URL for the Obsidian Local REST API | +| `period` | string | Yes | Period type: daily, weekly, monthly, quarterly, or yearly | +| `content` | string | Yes | Markdown content to append to the periodic note | + +#### Output + +| Parameter | Type | Description | +| --------- | ---- | ----------- | +| `period` | string | Period type of the note | +| `appended` | boolean | Whether content was successfully appended | + +### `obsidian_create_note` + +Create or replace a note in your Obsidian vault + +#### Input + +| Parameter | Type | Required | Description | +| --------- | ---- | -------- | ----------- | +| `apiKey` | string | Yes | API key from Obsidian Local REST API plugin settings | +| `baseUrl` | string | Yes | Base URL for the Obsidian Local REST API | +| `filename` | string | Yes | Path for the note relative to vault root \(e.g. "folder/note.md"\) | +| `content` | string | Yes | Markdown content for the note | + +#### Output + +| Parameter | Type | Description | +| --------- | ---- | ----------- | +| `filename` | string | Path of the created note | +| `created` | boolean | Whether the note was successfully created | + +### `obsidian_delete_note` + +Delete a note from your Obsidian vault + +#### Input + +| Parameter | Type | Required | Description | +| --------- | ---- | -------- | ----------- | +| `apiKey` | string | Yes | API key from Obsidian Local REST API plugin settings | +| `baseUrl` | string | Yes | Base URL for the Obsidian Local REST API | +| `filename` | string | Yes | Path to the note to delete relative to vault root | + +#### Output + +| Parameter | Type | Description | +| --------- | ---- | ----------- | +| `filename` | string | Path of the deleted note | +| `deleted` | boolean | Whether the note was successfully deleted | + +### `obsidian_execute_command` + +Execute a command in Obsidian (e.g. open daily note, toggle sidebar) + +#### Input + +| Parameter | Type | Required | Description | +| --------- | ---- | -------- | ----------- | +| `apiKey` | string | Yes | API key from Obsidian Local REST API plugin settings | +| `baseUrl` | string | Yes | Base URL for the Obsidian Local REST API | +| `commandId` | string | Yes | ID of the command to execute \(use List Commands operation to discover available commands\) | + +#### Output + +| Parameter | Type | Description | +| --------- | ---- | ----------- | +| `commandId` | string | ID of the executed command | +| `executed` | boolean | Whether the command was successfully executed | + +### `obsidian_get_active` + +Retrieve the content of the currently active file in Obsidian + +#### Input + +| Parameter | Type | Required | Description | +| --------- | ---- | -------- | ----------- | +| `apiKey` | string | Yes | API key from Obsidian Local REST API plugin settings | +| `baseUrl` | string | Yes | Base URL for the Obsidian Local REST API | + +#### Output + +| Parameter | Type | Description | +| --------- | ---- | ----------- | +| `content` | string | Markdown content of the active file | +| `filename` | string | Path to the active file | + +### `obsidian_get_note` + +Retrieve the content of a note from your Obsidian vault + +#### Input + +| Parameter | Type | Required | Description | +| --------- | ---- | -------- | ----------- | +| `apiKey` | string | Yes | API key from Obsidian Local REST API plugin settings | +| `baseUrl` | string | Yes | Base URL for the Obsidian Local REST API | +| `filename` | string | Yes | Path to the note relative to vault root \(e.g. "folder/note.md"\) | + +#### Output + +| Parameter | Type | Description | +| --------- | ---- | ----------- | +| `content` | string | Markdown content of the note | +| `filename` | string | Path to the note | + +### `obsidian_get_periodic_note` + +Retrieve the current periodic note (daily, weekly, monthly, quarterly, or yearly) + +#### Input + +| Parameter | Type | Required | Description | +| --------- | ---- | -------- | ----------- | +| `apiKey` | string | Yes | API key from Obsidian Local REST API plugin settings | +| `baseUrl` | string | Yes | Base URL for the Obsidian Local REST API | +| `period` | string | Yes | Period type: daily, weekly, monthly, quarterly, or yearly | + +#### Output + +| Parameter | Type | Description | +| --------- | ---- | ----------- | +| `content` | string | Markdown content of the periodic note | +| `period` | string | Period type of the note | + +### `obsidian_list_commands` + +List all available commands in Obsidian + +#### Input + +| Parameter | Type | Required | Description | +| --------- | ---- | -------- | ----------- | +| `apiKey` | string | Yes | API key from Obsidian Local REST API plugin settings | +| `baseUrl` | string | Yes | Base URL for the Obsidian Local REST API | + +#### Output + +| Parameter | Type | Description | +| --------- | ---- | ----------- | +| `commands` | json | List of available commands with IDs and names | +| ↳ `id` | string | Command identifier | +| ↳ `name` | string | Human-readable command name | + +### `obsidian_list_files` + +List files and directories in your Obsidian vault + +#### Input + +| Parameter | Type | Required | Description | +| --------- | ---- | -------- | ----------- | +| `apiKey` | string | Yes | API key from Obsidian Local REST API plugin settings | +| `baseUrl` | string | Yes | Base URL for the Obsidian Local REST API | +| `path` | string | No | Directory path relative to vault root. Leave empty to list root. | + +#### Output + +| Parameter | Type | Description | +| --------- | ---- | ----------- | +| `files` | json | List of files and directories | +| ↳ `path` | string | File or directory path | +| ↳ `type` | string | Whether the entry is a file or directory | + +### `obsidian_open_file` + +Open a file in the Obsidian UI (creates the file if it does not exist) + +#### Input + +| Parameter | Type | Required | Description | +| --------- | ---- | -------- | ----------- | +| `apiKey` | string | Yes | API key from Obsidian Local REST API plugin settings | +| `baseUrl` | string | Yes | Base URL for the Obsidian Local REST API | +| `filename` | string | Yes | Path to the file relative to vault root | +| `newLeaf` | boolean | No | Whether to open the file in a new leaf/tab | + +#### Output + +| Parameter | Type | Description | +| --------- | ---- | ----------- | +| `filename` | string | Path of the opened file | +| `opened` | boolean | Whether the file was successfully opened | + +### `obsidian_patch_active` + +Insert or replace content at a specific heading, block reference, or frontmatter field in the active file + +#### Input + +| Parameter | Type | Required | Description | +| --------- | ---- | -------- | ----------- | +| `apiKey` | string | Yes | API key from Obsidian Local REST API plugin settings | +| `baseUrl` | string | Yes | Base URL for the Obsidian Local REST API | +| `content` | string | Yes | Content to insert at the target location | +| `operation` | string | Yes | How to insert content: append, prepend, or replace | +| `targetType` | string | Yes | Type of target: heading, block, or frontmatter | +| `target` | string | Yes | Target identifier \(heading text, block reference ID, or frontmatter field name\) | +| `targetDelimiter` | string | No | Delimiter for nested headings \(default: "::"\) | +| `trimTargetWhitespace` | boolean | No | Whether to trim whitespace from target before matching \(default: false\) | + +#### Output + +| Parameter | Type | Description | +| --------- | ---- | ----------- | +| `patched` | boolean | Whether the active file was successfully patched | + +### `obsidian_patch_note` + +Insert or replace content at a specific heading, block reference, or frontmatter field in a note + +#### Input + +| Parameter | Type | Required | Description | +| --------- | ---- | -------- | ----------- | +| `apiKey` | string | Yes | API key from Obsidian Local REST API plugin settings | +| `baseUrl` | string | Yes | Base URL for the Obsidian Local REST API | +| `filename` | string | Yes | Path to the note relative to vault root \(e.g. "folder/note.md"\) | +| `content` | string | Yes | Content to insert at the target location | +| `operation` | string | Yes | How to insert content: append, prepend, or replace | +| `targetType` | string | Yes | Type of target: heading, block, or frontmatter | +| `target` | string | Yes | Target identifier \(heading text, block reference ID, or frontmatter field name\) | +| `targetDelimiter` | string | No | Delimiter for nested headings \(default: "::"\) | +| `trimTargetWhitespace` | boolean | No | Whether to trim whitespace from target before matching \(default: false\) | + +#### Output + +| Parameter | Type | Description | +| --------- | ---- | ----------- | +| `filename` | string | Path of the patched note | +| `patched` | boolean | Whether the note was successfully patched | + +### `obsidian_search` + +Search for text across notes in your Obsidian vault + +#### Input + +| Parameter | Type | Required | Description | +| --------- | ---- | -------- | ----------- | +| `apiKey` | string | Yes | API key from Obsidian Local REST API plugin settings | +| `baseUrl` | string | Yes | Base URL for the Obsidian Local REST API | +| `query` | string | Yes | Text to search for across vault notes | +| `contextLength` | number | No | Number of characters of context around each match \(default: 100\) | + +#### Output + +| Parameter | Type | Description | +| --------- | ---- | ----------- | +| `results` | json | Search results with filenames, scores, and matching contexts | +| ↳ `filename` | string | Path to the matching note | +| ↳ `score` | number | Relevance score | +| ↳ `matches` | json | Matching text contexts | +| ↳ `context` | string | Text surrounding the match | + + diff --git a/apps/sim/blocks/blocks/obsidian.ts b/apps/sim/blocks/blocks/obsidian.ts new file mode 100644 index 00000000000..533c80fc654 --- /dev/null +++ b/apps/sim/blocks/blocks/obsidian.ts @@ -0,0 +1,270 @@ +import { ObsidianIcon } from '@/components/icons' +import type { BlockConfig } from '@/blocks/types' +import { AuthMode } from '@/blocks/types' + +export const ObsidianBlock: BlockConfig = { + type: 'obsidian', + name: 'Obsidian', + description: 'Interact with your Obsidian vault via the Local REST API', + longDescription: + 'Read, create, update, search, and delete notes in your Obsidian vault. Manage periodic notes, execute commands, and patch content at specific locations. Requires the Obsidian Local REST API plugin.', + docsLink: 'https://docs.sim.ai/tools/obsidian', + category: 'tools', + bgColor: '#0F0F0F', + icon: ObsidianIcon, + authMode: AuthMode.ApiKey, + + subBlocks: [ + { + id: 'operation', + title: 'Operation', + type: 'dropdown', + options: [ + { label: 'List Files', id: 'list_files' }, + { label: 'Get Note', id: 'get_note' }, + { label: 'Create Note', id: 'create_note' }, + { label: 'Append to Note', id: 'append_note' }, + { label: 'Patch Note', id: 'patch_note' }, + { label: 'Delete Note', id: 'delete_note' }, + { label: 'Search', id: 'search' }, + { label: 'Get Active File', id: 'get_active' }, + { label: 'Append to Active File', id: 'append_active' }, + { label: 'Patch Active File', id: 'patch_active' }, + { label: 'Open File', id: 'open_file' }, + { label: 'List Commands', id: 'list_commands' }, + { label: 'Execute Command', id: 'execute_command' }, + { label: 'Get Periodic Note', id: 'get_periodic_note' }, + { label: 'Append to Periodic Note', id: 'append_periodic_note' }, + ], + value: () => 'get_note', + }, + { + id: 'baseUrl', + title: 'Base URL', + type: 'short-input', + placeholder: 'https://127.0.0.1:27124', + value: () => 'https://127.0.0.1:27124', + required: true, + }, + { + id: 'apiKey', + title: 'API Key', + type: 'short-input', + placeholder: 'Enter your Obsidian Local REST API key', + password: true, + required: true, + }, + { + id: 'path', + title: 'Directory Path', + type: 'short-input', + placeholder: 'Leave empty for vault root (e.g. "Projects/notes")', + condition: { field: 'operation', value: 'list_files' }, + }, + { + id: 'filename', + title: 'Note Path', + type: 'short-input', + placeholder: 'folder/note.md', + condition: { + field: 'operation', + value: ['get_note', 'create_note', 'append_note', 'patch_note', 'delete_note', 'open_file'], + }, + required: { + field: 'operation', + value: ['get_note', 'create_note', 'append_note', 'patch_note', 'delete_note', 'open_file'], + }, + }, + { + id: 'content', + title: 'Content', + type: 'long-input', + placeholder: 'Markdown content', + condition: { + field: 'operation', + value: [ + 'create_note', + 'append_note', + 'patch_note', + 'append_active', + 'patch_active', + 'append_periodic_note', + ], + }, + required: { + field: 'operation', + value: [ + 'create_note', + 'append_note', + 'patch_note', + 'append_active', + 'patch_active', + 'append_periodic_note', + ], + }, + }, + { + id: 'patchOperation', + title: 'Patch Operation', + type: 'dropdown', + options: [ + { label: 'Append', id: 'append' }, + { label: 'Prepend', id: 'prepend' }, + { label: 'Replace', id: 'replace' }, + ], + value: () => 'append', + condition: { field: 'operation', value: ['patch_note', 'patch_active'] }, + required: { field: 'operation', value: ['patch_note', 'patch_active'] }, + }, + { + id: 'targetType', + title: 'Target Type', + type: 'dropdown', + options: [ + { label: 'Heading', id: 'heading' }, + { label: 'Block Reference', id: 'block' }, + { label: 'Frontmatter', id: 'frontmatter' }, + ], + value: () => 'heading', + condition: { field: 'operation', value: ['patch_note', 'patch_active'] }, + required: { field: 'operation', value: ['patch_note', 'patch_active'] }, + }, + { + id: 'target', + title: 'Target', + type: 'short-input', + placeholder: 'Heading text, block ID, or frontmatter field', + condition: { field: 'operation', value: ['patch_note', 'patch_active'] }, + required: { field: 'operation', value: ['patch_note', 'patch_active'] }, + }, + { + id: 'targetDelimiter', + title: 'Target Delimiter', + type: 'short-input', + placeholder: ':: (default)', + condition: { field: 'operation', value: ['patch_note', 'patch_active'] }, + mode: 'advanced', + }, + { + id: 'trimTargetWhitespace', + title: 'Trim Target Whitespace', + type: 'switch', + condition: { field: 'operation', value: ['patch_note', 'patch_active'] }, + mode: 'advanced', + }, + { + id: 'query', + title: 'Search Query', + type: 'short-input', + placeholder: 'Text to search for', + condition: { field: 'operation', value: 'search' }, + required: { field: 'operation', value: 'search' }, + }, + { + id: 'contextLength', + title: 'Context Length', + type: 'short-input', + placeholder: '100', + condition: { field: 'operation', value: 'search' }, + mode: 'advanced', + }, + { + id: 'commandId', + title: 'Command ID', + type: 'short-input', + placeholder: 'e.g. daily-notes:open-today', + condition: { field: 'operation', value: 'execute_command' }, + required: { field: 'operation', value: 'execute_command' }, + }, + { + id: 'newLeaf', + title: 'Open in New Tab', + type: 'switch', + condition: { field: 'operation', value: 'open_file' }, + mode: 'advanced', + }, + { + id: 'period', + title: 'Period', + type: 'dropdown', + options: [ + { label: 'Daily', id: 'daily' }, + { label: 'Weekly', id: 'weekly' }, + { label: 'Monthly', id: 'monthly' }, + { label: 'Quarterly', id: 'quarterly' }, + { label: 'Yearly', id: 'yearly' }, + ], + value: () => 'daily', + condition: { field: 'operation', value: ['get_periodic_note', 'append_periodic_note'] }, + required: { field: 'operation', value: ['get_periodic_note', 'append_periodic_note'] }, + }, + ], + + tools: { + access: [ + 'obsidian_append_active', + 'obsidian_append_note', + 'obsidian_append_periodic_note', + 'obsidian_create_note', + 'obsidian_delete_note', + 'obsidian_execute_command', + 'obsidian_get_active', + 'obsidian_get_note', + 'obsidian_get_periodic_note', + 'obsidian_list_commands', + 'obsidian_list_files', + 'obsidian_open_file', + 'obsidian_patch_active', + 'obsidian_patch_note', + 'obsidian_search', + ], + config: { + tool: (params) => `obsidian_${params.operation}`, + params: (params) => { + const result: Record = {} + if (params.contextLength) { + result.contextLength = Number(params.contextLength) + } + if (params.patchOperation) { + result.operation = params.patchOperation + } + return result + }, + }, + }, + + inputs: { + operation: { type: 'string', description: 'Operation to perform' }, + baseUrl: { type: 'string', description: 'Base URL for the Obsidian Local REST API' }, + apiKey: { type: 'string', description: 'API key for authentication' }, + filename: { type: 'string', description: 'Path to the note relative to vault root' }, + content: { type: 'string', description: 'Markdown content for the note' }, + path: { type: 'string', description: 'Directory path to list' }, + query: { type: 'string', description: 'Text to search for' }, + contextLength: { type: 'number', description: 'Characters of context around matches' }, + commandId: { type: 'string', description: 'ID of the command to execute' }, + patchOperation: { type: 'string', description: 'Patch operation: append, prepend, or replace' }, + targetType: { type: 'string', description: 'Target type: heading, block, or frontmatter' }, + target: { type: 'string', description: 'Target identifier for patch operations' }, + targetDelimiter: { type: 'string', description: 'Delimiter for nested headings' }, + trimTargetWhitespace: { type: 'boolean', description: 'Trim whitespace from target' }, + newLeaf: { type: 'boolean', description: 'Open file in new tab' }, + period: { type: 'string', description: 'Periodic note period type' }, + }, + + outputs: { + content: { type: 'string', description: 'Markdown content of the note' }, + filename: { type: 'string', description: 'Path to the note' }, + files: { type: 'json', description: 'List of files and directories (path, type)' }, + results: { type: 'json', description: 'Search results (filename, score, matches)' }, + commands: { type: 'json', description: 'List of available commands (id, name)' }, + created: { type: 'boolean', description: 'Whether the note was created' }, + appended: { type: 'boolean', description: 'Whether content was appended' }, + patched: { type: 'boolean', description: 'Whether content was patched' }, + deleted: { type: 'boolean', description: 'Whether the note was deleted' }, + executed: { type: 'boolean', description: 'Whether the command was executed' }, + opened: { type: 'boolean', description: 'Whether the file was opened' }, + commandId: { type: 'string', description: 'ID of the executed command' }, + period: { type: 'string', description: 'Period type of the periodic note' }, + }, +} diff --git a/apps/sim/blocks/registry.ts b/apps/sim/blocks/registry.ts index 7ff0b918dd1..23c47186ca4 100644 --- a/apps/sim/blocks/registry.ts +++ b/apps/sim/blocks/registry.ts @@ -113,6 +113,7 @@ import { MySQLBlock } from '@/blocks/blocks/mysql' import { Neo4jBlock } from '@/blocks/blocks/neo4j' import { NoteBlock } from '@/blocks/blocks/note' import { NotionBlock, NotionV2Block } from '@/blocks/blocks/notion' +import { ObsidianBlock } from '@/blocks/blocks/obsidian' import { OneDriveBlock } from '@/blocks/blocks/onedrive' import { OnePasswordBlock } from '@/blocks/blocks/onepassword' import { OpenAIBlock } from '@/blocks/blocks/openai' @@ -320,6 +321,7 @@ export const registry: Record = { note: NoteBlock, notion: NotionBlock, notion_v2: NotionV2Block, + obsidian: ObsidianBlock, onepassword: OnePasswordBlock, onedrive: OneDriveBlock, openai: OpenAIBlock, diff --git a/apps/sim/components/icons.tsx b/apps/sim/components/icons.tsx index 5525e048cfa..41fa14fa22a 100644 --- a/apps/sim/components/icons.tsx +++ b/apps/sim/components/icons.tsx @@ -710,6 +710,155 @@ export function PerplexityIcon(props: SVGProps) { ) } +export function ObsidianIcon(props: SVGProps) { + const id = useId() + const bl = `${id}-bl` + const tr = `${id}-tr` + const tl = `${id}-tl` + const br = `${id}-br` + const te = `${id}-te` + const le = `${id}-le` + const be = `${id}-be` + const me = `${id}-me` + const clip = `${id}-clip` + return ( + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + ) +} + export function NotionIcon(props: SVGProps) { return ( diff --git a/apps/sim/tools/obsidian/append_active.ts b/apps/sim/tools/obsidian/append_active.ts new file mode 100644 index 00000000000..49ec7378d75 --- /dev/null +++ b/apps/sim/tools/obsidian/append_active.ts @@ -0,0 +1,66 @@ +import type { ToolConfig } from '@/tools/types' +import type { ObsidianAppendActiveParams, ObsidianAppendActiveResponse } from './types' + +export const appendActiveTool: ToolConfig< + ObsidianAppendActiveParams, + ObsidianAppendActiveResponse +> = { + id: 'obsidian_append_active', + name: 'Obsidian Append to Active File', + description: 'Append content to the currently active file in Obsidian', + version: '1.0.0', + + params: { + apiKey: { + type: 'string', + required: true, + visibility: 'user-only', + description: 'API key from Obsidian Local REST API plugin settings', + }, + baseUrl: { + type: 'string', + required: true, + visibility: 'user-only', + description: 'Base URL for the Obsidian Local REST API', + }, + content: { + type: 'string', + required: true, + visibility: 'user-or-llm', + description: 'Markdown content to append to the active file', + }, + }, + + request: { + url: (params) => { + const base = params.baseUrl.replace(/\/$/, '') + return `${base}/active/` + }, + method: 'POST', + headers: (params) => ({ + Authorization: `Bearer ${params.apiKey}`, + 'Content-Type': 'text/markdown', + }), + body: (params) => params.content, + }, + + transformResponse: async (response) => { + if (!response.ok) { + const error = await response.json().catch(() => ({ message: 'Unknown error' })) + throw new Error(`Failed to append to active file: ${error.message ?? response.statusText}`) + } + return { + success: true, + output: { + appended: true, + }, + } + }, + + outputs: { + appended: { + type: 'boolean', + description: 'Whether content was successfully appended', + }, + }, +} diff --git a/apps/sim/tools/obsidian/append_note.ts b/apps/sim/tools/obsidian/append_note.ts new file mode 100644 index 00000000000..2f0fbed8094 --- /dev/null +++ b/apps/sim/tools/obsidian/append_note.ts @@ -0,0 +1,74 @@ +import type { ToolConfig } from '@/tools/types' +import type { ObsidianAppendNoteParams, ObsidianAppendNoteResponse } from './types' + +export const appendNoteTool: ToolConfig = { + id: 'obsidian_append_note', + name: 'Obsidian Append to Note', + description: 'Append content to an existing note in your Obsidian vault', + version: '1.0.0', + + params: { + apiKey: { + type: 'string', + required: true, + visibility: 'user-only', + description: 'API key from Obsidian Local REST API plugin settings', + }, + baseUrl: { + type: 'string', + required: true, + visibility: 'user-only', + description: 'Base URL for the Obsidian Local REST API', + }, + filename: { + type: 'string', + required: true, + visibility: 'user-or-llm', + description: 'Path to the note relative to vault root (e.g. "folder/note.md")', + }, + content: { + type: 'string', + required: true, + visibility: 'user-or-llm', + description: 'Markdown content to append to the note', + }, + }, + + request: { + url: (params) => { + const base = params.baseUrl.replace(/\/$/, '') + return `${base}/vault/${params.filename.trim().split('/').map(encodeURIComponent).join('/')}` + }, + method: 'POST', + headers: (params) => ({ + Authorization: `Bearer ${params.apiKey}`, + 'Content-Type': 'text/markdown', + }), + body: (params) => params.content, + }, + + transformResponse: async (response, params) => { + if (!response.ok) { + const error = await response.json().catch(() => ({ message: 'Unknown error' })) + throw new Error(`Failed to append to note: ${error.message ?? response.statusText}`) + } + return { + success: true, + output: { + filename: params?.filename ?? '', + appended: true, + }, + } + }, + + outputs: { + filename: { + type: 'string', + description: 'Path of the note', + }, + appended: { + type: 'boolean', + description: 'Whether content was successfully appended', + }, + }, +} diff --git a/apps/sim/tools/obsidian/append_periodic_note.ts b/apps/sim/tools/obsidian/append_periodic_note.ts new file mode 100644 index 00000000000..50b43ea18cf --- /dev/null +++ b/apps/sim/tools/obsidian/append_periodic_note.ts @@ -0,0 +1,78 @@ +import type { ToolConfig } from '@/tools/types' +import type { ObsidianAppendPeriodicNoteParams, ObsidianAppendPeriodicNoteResponse } from './types' + +export const appendPeriodicNoteTool: ToolConfig< + ObsidianAppendPeriodicNoteParams, + ObsidianAppendPeriodicNoteResponse +> = { + id: 'obsidian_append_periodic_note', + name: 'Obsidian Append to Periodic Note', + description: + 'Append content to the current periodic note (daily, weekly, monthly, quarterly, or yearly). Creates the note if it does not exist.', + version: '1.0.0', + + params: { + apiKey: { + type: 'string', + required: true, + visibility: 'user-only', + description: 'API key from Obsidian Local REST API plugin settings', + }, + baseUrl: { + type: 'string', + required: true, + visibility: 'user-only', + description: 'Base URL for the Obsidian Local REST API', + }, + period: { + type: 'string', + required: true, + visibility: 'user-or-llm', + description: 'Period type: daily, weekly, monthly, quarterly, or yearly', + }, + content: { + type: 'string', + required: true, + visibility: 'user-or-llm', + description: 'Markdown content to append to the periodic note', + }, + }, + + request: { + url: (params) => { + const base = params.baseUrl.replace(/\/$/, '') + return `${base}/periodic/${encodeURIComponent(params.period)}/` + }, + method: 'POST', + headers: (params) => ({ + Authorization: `Bearer ${params.apiKey}`, + 'Content-Type': 'text/markdown', + }), + body: (params) => params.content, + }, + + transformResponse: async (response, params) => { + if (!response.ok) { + const error = await response.json().catch(() => ({ message: 'Unknown error' })) + throw new Error(`Failed to append to periodic note: ${error.message ?? response.statusText}`) + } + return { + success: true, + output: { + period: params?.period ?? '', + appended: true, + }, + } + }, + + outputs: { + period: { + type: 'string', + description: 'Period type of the note', + }, + appended: { + type: 'boolean', + description: 'Whether content was successfully appended', + }, + }, +} diff --git a/apps/sim/tools/obsidian/create_note.ts b/apps/sim/tools/obsidian/create_note.ts new file mode 100644 index 00000000000..fed38cca8f6 --- /dev/null +++ b/apps/sim/tools/obsidian/create_note.ts @@ -0,0 +1,74 @@ +import type { ToolConfig } from '@/tools/types' +import type { ObsidianCreateNoteParams, ObsidianCreateNoteResponse } from './types' + +export const createNoteTool: ToolConfig = { + id: 'obsidian_create_note', + name: 'Obsidian Create Note', + description: 'Create or replace a note in your Obsidian vault', + version: '1.0.0', + + params: { + apiKey: { + type: 'string', + required: true, + visibility: 'user-only', + description: 'API key from Obsidian Local REST API plugin settings', + }, + baseUrl: { + type: 'string', + required: true, + visibility: 'user-only', + description: 'Base URL for the Obsidian Local REST API', + }, + filename: { + type: 'string', + required: true, + visibility: 'user-or-llm', + description: 'Path for the note relative to vault root (e.g. "folder/note.md")', + }, + content: { + type: 'string', + required: true, + visibility: 'user-or-llm', + description: 'Markdown content for the note', + }, + }, + + request: { + url: (params) => { + const base = params.baseUrl.replace(/\/$/, '') + return `${base}/vault/${params.filename.trim().split('/').map(encodeURIComponent).join('/')}` + }, + method: 'PUT', + headers: (params) => ({ + Authorization: `Bearer ${params.apiKey}`, + 'Content-Type': 'text/markdown', + }), + body: (params) => params.content, + }, + + transformResponse: async (response, params) => { + if (!response.ok) { + const error = await response.json().catch(() => ({ message: 'Unknown error' })) + throw new Error(`Failed to create note: ${error.message ?? response.statusText}`) + } + return { + success: true, + output: { + filename: params?.filename ?? '', + created: true, + }, + } + }, + + outputs: { + filename: { + type: 'string', + description: 'Path of the created note', + }, + created: { + type: 'boolean', + description: 'Whether the note was successfully created', + }, + }, +} diff --git a/apps/sim/tools/obsidian/delete_note.ts b/apps/sim/tools/obsidian/delete_note.ts new file mode 100644 index 00000000000..a6911d85e7f --- /dev/null +++ b/apps/sim/tools/obsidian/delete_note.ts @@ -0,0 +1,66 @@ +import type { ToolConfig } from '@/tools/types' +import type { ObsidianDeleteNoteParams, ObsidianDeleteNoteResponse } from './types' + +export const deleteNoteTool: ToolConfig = { + id: 'obsidian_delete_note', + name: 'Obsidian Delete Note', + description: 'Delete a note from your Obsidian vault', + version: '1.0.0', + + params: { + apiKey: { + type: 'string', + required: true, + visibility: 'user-only', + description: 'API key from Obsidian Local REST API plugin settings', + }, + baseUrl: { + type: 'string', + required: true, + visibility: 'user-only', + description: 'Base URL for the Obsidian Local REST API', + }, + filename: { + type: 'string', + required: true, + visibility: 'user-or-llm', + description: 'Path to the note to delete relative to vault root', + }, + }, + + request: { + url: (params) => { + const base = params.baseUrl.replace(/\/$/, '') + return `${base}/vault/${params.filename.trim().split('/').map(encodeURIComponent).join('/')}` + }, + method: 'DELETE', + headers: (params) => ({ + Authorization: `Bearer ${params.apiKey}`, + }), + }, + + transformResponse: async (response, params) => { + if (!response.ok) { + const error = await response.json().catch(() => ({ message: 'Unknown error' })) + throw new Error(`Failed to delete note: ${error.message ?? response.statusText}`) + } + return { + success: true, + output: { + filename: params?.filename ?? '', + deleted: true, + }, + } + }, + + outputs: { + filename: { + type: 'string', + description: 'Path of the deleted note', + }, + deleted: { + type: 'boolean', + description: 'Whether the note was successfully deleted', + }, + }, +} diff --git a/apps/sim/tools/obsidian/execute_command.ts b/apps/sim/tools/obsidian/execute_command.ts new file mode 100644 index 00000000000..240711b6300 --- /dev/null +++ b/apps/sim/tools/obsidian/execute_command.ts @@ -0,0 +1,70 @@ +import type { ToolConfig } from '@/tools/types' +import type { ObsidianExecuteCommandParams, ObsidianExecuteCommandResponse } from './types' + +export const executeCommandTool: ToolConfig< + ObsidianExecuteCommandParams, + ObsidianExecuteCommandResponse +> = { + id: 'obsidian_execute_command', + name: 'Obsidian Execute Command', + description: 'Execute a command in Obsidian (e.g. open daily note, toggle sidebar)', + version: '1.0.0', + + params: { + apiKey: { + type: 'string', + required: true, + visibility: 'user-only', + description: 'API key from Obsidian Local REST API plugin settings', + }, + baseUrl: { + type: 'string', + required: true, + visibility: 'user-only', + description: 'Base URL for the Obsidian Local REST API', + }, + commandId: { + type: 'string', + required: true, + visibility: 'user-or-llm', + description: + 'ID of the command to execute (use List Commands operation to discover available commands)', + }, + }, + + request: { + url: (params) => { + const base = params.baseUrl.replace(/\/$/, '') + return `${base}/commands/${encodeURIComponent(params.commandId.trim())}/` + }, + method: 'POST', + headers: (params) => ({ + Authorization: `Bearer ${params.apiKey}`, + }), + }, + + transformResponse: async (response, params) => { + if (!response.ok) { + const error = await response.json().catch(() => ({ message: 'Unknown error' })) + throw new Error(`Failed to execute command: ${error.message ?? response.statusText}`) + } + return { + success: true, + output: { + commandId: params?.commandId ?? '', + executed: true, + }, + } + }, + + outputs: { + commandId: { + type: 'string', + description: 'ID of the executed command', + }, + executed: { + type: 'boolean', + description: 'Whether the command was successfully executed', + }, + }, +} diff --git a/apps/sim/tools/obsidian/get_active.ts b/apps/sim/tools/obsidian/get_active.ts new file mode 100644 index 00000000000..56a838d6716 --- /dev/null +++ b/apps/sim/tools/obsidian/get_active.ts @@ -0,0 +1,59 @@ +import type { ToolConfig } from '@/tools/types' +import type { ObsidianGetActiveParams, ObsidianGetActiveResponse } from './types' + +export const getActiveTool: ToolConfig = { + id: 'obsidian_get_active', + name: 'Obsidian Get Active File', + description: 'Retrieve the content of the currently active file in Obsidian', + version: '1.0.0', + + params: { + apiKey: { + type: 'string', + required: true, + visibility: 'user-only', + description: 'API key from Obsidian Local REST API plugin settings', + }, + baseUrl: { + type: 'string', + required: true, + visibility: 'user-only', + description: 'Base URL for the Obsidian Local REST API', + }, + }, + + request: { + url: (params) => { + const base = params.baseUrl.replace(/\/$/, '') + return `${base}/active/` + }, + method: 'GET', + headers: (params) => ({ + Authorization: `Bearer ${params.apiKey}`, + Accept: 'application/vnd.olrapi.note+json', + }), + }, + + transformResponse: async (response) => { + const data = await response.json() + return { + success: true, + output: { + content: data.content ?? '', + filename: data.path ?? null, + }, + } + }, + + outputs: { + content: { + type: 'string', + description: 'Markdown content of the active file', + }, + filename: { + type: 'string', + description: 'Path to the active file', + optional: true, + }, + }, +} diff --git a/apps/sim/tools/obsidian/get_note.ts b/apps/sim/tools/obsidian/get_note.ts new file mode 100644 index 00000000000..118cb7fa6c9 --- /dev/null +++ b/apps/sim/tools/obsidian/get_note.ts @@ -0,0 +1,68 @@ +import type { ToolConfig } from '@/tools/types' +import type { ObsidianGetNoteParams, ObsidianGetNoteResponse } from './types' + +export const getNoteTool: ToolConfig = { + id: 'obsidian_get_note', + name: 'Obsidian Get Note', + description: 'Retrieve the content of a note from your Obsidian vault', + version: '1.0.0', + + params: { + apiKey: { + type: 'string', + required: true, + visibility: 'user-only', + description: 'API key from Obsidian Local REST API plugin settings', + }, + baseUrl: { + type: 'string', + required: true, + visibility: 'user-only', + description: 'Base URL for the Obsidian Local REST API', + }, + filename: { + type: 'string', + required: true, + visibility: 'user-or-llm', + description: 'Path to the note relative to vault root (e.g. "folder/note.md")', + }, + }, + + request: { + url: (params) => { + const base = params.baseUrl.replace(/\/$/, '') + return `${base}/vault/${params.filename.trim().split('/').map(encodeURIComponent).join('/')}` + }, + method: 'GET', + headers: (params) => ({ + Authorization: `Bearer ${params.apiKey}`, + Accept: 'text/markdown', + }), + }, + + transformResponse: async (response, params) => { + if (!response.ok) { + const error = await response.json().catch(() => ({ message: 'Unknown error' })) + throw new Error(`Failed to get note: ${error.message ?? response.statusText}`) + } + const content = await response.text() + return { + success: true, + output: { + content, + filename: params?.filename ?? '', + }, + } + }, + + outputs: { + content: { + type: 'string', + description: 'Markdown content of the note', + }, + filename: { + type: 'string', + description: 'Path to the note', + }, + }, +} diff --git a/apps/sim/tools/obsidian/get_periodic_note.ts b/apps/sim/tools/obsidian/get_periodic_note.ts new file mode 100644 index 00000000000..d37b3169ac4 --- /dev/null +++ b/apps/sim/tools/obsidian/get_periodic_note.ts @@ -0,0 +1,67 @@ +import type { ToolConfig } from '@/tools/types' +import type { ObsidianGetPeriodicNoteParams, ObsidianGetPeriodicNoteResponse } from './types' + +export const getPeriodicNoteTool: ToolConfig< + ObsidianGetPeriodicNoteParams, + ObsidianGetPeriodicNoteResponse +> = { + id: 'obsidian_get_periodic_note', + name: 'Obsidian Get Periodic Note', + description: 'Retrieve the current periodic note (daily, weekly, monthly, quarterly, or yearly)', + version: '1.0.0', + + params: { + apiKey: { + type: 'string', + required: true, + visibility: 'user-only', + description: 'API key from Obsidian Local REST API plugin settings', + }, + baseUrl: { + type: 'string', + required: true, + visibility: 'user-only', + description: 'Base URL for the Obsidian Local REST API', + }, + period: { + type: 'string', + required: true, + visibility: 'user-or-llm', + description: 'Period type: daily, weekly, monthly, quarterly, or yearly', + }, + }, + + request: { + url: (params) => { + const base = params.baseUrl.replace(/\/$/, '') + return `${base}/periodic/${encodeURIComponent(params.period)}/` + }, + method: 'GET', + headers: (params) => ({ + Authorization: `Bearer ${params.apiKey}`, + Accept: 'text/markdown', + }), + }, + + transformResponse: async (response, params) => { + const content = await response.text() + return { + success: true, + output: { + content, + period: params?.period ?? '', + }, + } + }, + + outputs: { + content: { + type: 'string', + description: 'Markdown content of the periodic note', + }, + period: { + type: 'string', + description: 'Period type of the note', + }, + }, +} diff --git a/apps/sim/tools/obsidian/index.ts b/apps/sim/tools/obsidian/index.ts new file mode 100644 index 00000000000..43327f8971d --- /dev/null +++ b/apps/sim/tools/obsidian/index.ts @@ -0,0 +1,16 @@ +export { appendActiveTool as obsidianAppendActiveTool } from './append_active' +export { appendNoteTool as obsidianAppendNoteTool } from './append_note' +export { appendPeriodicNoteTool as obsidianAppendPeriodicNoteTool } from './append_periodic_note' +export { createNoteTool as obsidianCreateNoteTool } from './create_note' +export { deleteNoteTool as obsidianDeleteNoteTool } from './delete_note' +export { executeCommandTool as obsidianExecuteCommandTool } from './execute_command' +export { getActiveTool as obsidianGetActiveTool } from './get_active' +export { getNoteTool as obsidianGetNoteTool } from './get_note' +export { getPeriodicNoteTool as obsidianGetPeriodicNoteTool } from './get_periodic_note' +export { listCommandsTool as obsidianListCommandsTool } from './list_commands' +export { listFilesTool as obsidianListFilesTool } from './list_files' +export { openFileTool as obsidianOpenFileTool } from './open_file' +export { patchActiveTool as obsidianPatchActiveTool } from './patch_active' +export { patchNoteTool as obsidianPatchNoteTool } from './patch_note' +export { searchTool as obsidianSearchTool } from './search' +export * from './types' diff --git a/apps/sim/tools/obsidian/list_commands.ts b/apps/sim/tools/obsidian/list_commands.ts new file mode 100644 index 00000000000..71394db09d0 --- /dev/null +++ b/apps/sim/tools/obsidian/list_commands.ts @@ -0,0 +1,68 @@ +import type { ToolConfig } from '@/tools/types' +import type { ObsidianListCommandsParams, ObsidianListCommandsResponse } from './types' + +export const listCommandsTool: ToolConfig< + ObsidianListCommandsParams, + ObsidianListCommandsResponse +> = { + id: 'obsidian_list_commands', + name: 'Obsidian List Commands', + description: 'List all available commands in Obsidian', + version: '1.0.0', + + params: { + apiKey: { + type: 'string', + required: true, + visibility: 'user-only', + description: 'API key from Obsidian Local REST API plugin settings', + }, + baseUrl: { + type: 'string', + required: true, + visibility: 'user-only', + description: 'Base URL for the Obsidian Local REST API', + }, + }, + + request: { + url: (params) => { + const base = params.baseUrl.replace(/\/$/, '') + return `${base}/commands/` + }, + method: 'GET', + headers: (params) => ({ + Authorization: `Bearer ${params.apiKey}`, + Accept: 'application/json', + }), + }, + + transformResponse: async (response) => { + if (!response.ok) { + const error = await response.json().catch(() => ({ message: 'Unknown error' })) + throw new Error(`Failed to list commands: ${error.message ?? response.statusText}`) + } + const data = await response.json() + return { + success: true, + output: { + commands: + data.commands?.map((cmd: { id: string; name: string }) => ({ + id: cmd.id ?? '', + name: cmd.name ?? '', + })) ?? [], + }, + } + }, + + outputs: { + commands: { + type: 'json', + description: 'List of available commands with IDs and names', + properties: { + id: { type: 'string', description: 'Command identifier' }, + name: { type: 'string', description: 'Human-readable command name' }, + }, + }, + }, +} diff --git a/apps/sim/tools/obsidian/list_files.ts b/apps/sim/tools/obsidian/list_files.ts new file mode 100644 index 00000000000..6c83880c14a --- /dev/null +++ b/apps/sim/tools/obsidian/list_files.ts @@ -0,0 +1,76 @@ +import type { ToolConfig } from '@/tools/types' +import type { ObsidianListFilesParams, ObsidianListFilesResponse } from './types' + +export const listFilesTool: ToolConfig = { + id: 'obsidian_list_files', + name: 'Obsidian List Files', + description: 'List files and directories in your Obsidian vault', + version: '1.0.0', + + params: { + apiKey: { + type: 'string', + required: true, + visibility: 'user-only', + description: 'API key from Obsidian Local REST API plugin settings', + }, + baseUrl: { + type: 'string', + required: true, + visibility: 'user-only', + description: 'Base URL for the Obsidian Local REST API', + }, + path: { + type: 'string', + required: false, + visibility: 'user-or-llm', + description: 'Directory path relative to vault root. Leave empty to list root.', + }, + }, + + request: { + url: (params) => { + const base = params.baseUrl.replace(/\/$/, '') + const path = params.path + ? `/${params.path.trim().split('/').map(encodeURIComponent).join('/')}/` + : '/' + return `${base}/vault${path}` + }, + method: 'GET', + headers: (params) => ({ + Authorization: `Bearer ${params.apiKey}`, + Accept: 'application/json', + }), + }, + + transformResponse: async (response) => { + if (!response.ok) { + const error = await response.json().catch(() => ({ message: 'Unknown error' })) + throw new Error(`Failed to list files: ${error.message ?? response.statusText}`) + } + const data = await response.json() + return { + success: true, + output: { + files: + data.files?.map((f: string | { path: string; type: string }) => { + if (typeof f === 'string') { + return { path: f, type: f.endsWith('/') ? 'directory' : 'file' } + } + return { path: f.path ?? '', type: f.type ?? 'file' } + }) ?? [], + }, + } + }, + + outputs: { + files: { + type: 'json', + description: 'List of files and directories', + properties: { + path: { type: 'string', description: 'File or directory path' }, + type: { type: 'string', description: 'Whether the entry is a file or directory' }, + }, + }, + }, +} diff --git a/apps/sim/tools/obsidian/open_file.ts b/apps/sim/tools/obsidian/open_file.ts new file mode 100644 index 00000000000..4100ce1025e --- /dev/null +++ b/apps/sim/tools/obsidian/open_file.ts @@ -0,0 +1,73 @@ +import type { ToolConfig } from '@/tools/types' +import type { ObsidianOpenFileParams, ObsidianOpenFileResponse } from './types' + +export const openFileTool: ToolConfig = { + id: 'obsidian_open_file', + name: 'Obsidian Open File', + description: 'Open a file in the Obsidian UI (creates the file if it does not exist)', + version: '1.0.0', + + params: { + apiKey: { + type: 'string', + required: true, + visibility: 'user-only', + description: 'API key from Obsidian Local REST API plugin settings', + }, + baseUrl: { + type: 'string', + required: true, + visibility: 'user-only', + description: 'Base URL for the Obsidian Local REST API', + }, + filename: { + type: 'string', + required: true, + visibility: 'user-or-llm', + description: 'Path to the file relative to vault root', + }, + newLeaf: { + type: 'boolean', + required: false, + visibility: 'user-or-llm', + description: 'Whether to open the file in a new leaf/tab', + }, + }, + + request: { + url: (params) => { + const base = params.baseUrl.replace(/\/$/, '') + const leafParam = params.newLeaf ? '?newLeaf=true' : '' + return `${base}/open/${params.filename.trim().split('/').map(encodeURIComponent).join('/')}${leafParam}` + }, + method: 'POST', + headers: (params) => ({ + Authorization: `Bearer ${params.apiKey}`, + }), + }, + + transformResponse: async (response, params) => { + if (!response.ok) { + const error = await response.json().catch(() => ({ message: 'Unknown error' })) + throw new Error(`Failed to open file: ${error.message ?? response.statusText}`) + } + return { + success: true, + output: { + filename: params?.filename ?? '', + opened: true, + }, + } + }, + + outputs: { + filename: { + type: 'string', + description: 'Path of the opened file', + }, + opened: { + type: 'boolean', + description: 'Whether the file was successfully opened', + }, + }, +} diff --git a/apps/sim/tools/obsidian/patch_active.ts b/apps/sim/tools/obsidian/patch_active.ts new file mode 100644 index 00000000000..ae72a71218b --- /dev/null +++ b/apps/sim/tools/obsidian/patch_active.ts @@ -0,0 +1,107 @@ +import type { ToolConfig } from '@/tools/types' +import type { ObsidianPatchActiveParams, ObsidianPatchActiveResponse } from './types' + +export const patchActiveTool: ToolConfig = { + id: 'obsidian_patch_active', + name: 'Obsidian Patch Active File', + description: + 'Insert or replace content at a specific heading, block reference, or frontmatter field in the active file', + version: '1.0.0', + + params: { + apiKey: { + type: 'string', + required: true, + visibility: 'user-only', + description: 'API key from Obsidian Local REST API plugin settings', + }, + baseUrl: { + type: 'string', + required: true, + visibility: 'user-only', + description: 'Base URL for the Obsidian Local REST API', + }, + content: { + type: 'string', + required: true, + visibility: 'user-or-llm', + description: 'Content to insert at the target location', + }, + operation: { + type: 'string', + required: true, + visibility: 'user-or-llm', + description: 'How to insert content: append, prepend, or replace', + }, + targetType: { + type: 'string', + required: true, + visibility: 'user-or-llm', + description: 'Type of target: heading, block, or frontmatter', + }, + target: { + type: 'string', + required: true, + visibility: 'user-or-llm', + description: + 'Target identifier (heading text, block reference ID, or frontmatter field name)', + }, + targetDelimiter: { + type: 'string', + required: false, + visibility: 'user-or-llm', + description: 'Delimiter for nested headings (default: "::")', + }, + trimTargetWhitespace: { + type: 'boolean', + required: false, + visibility: 'user-or-llm', + description: 'Whether to trim whitespace from target before matching (default: false)', + }, + }, + + request: { + url: (params) => { + const base = params.baseUrl.replace(/\/$/, '') + return `${base}/active/` + }, + method: 'PATCH', + headers: (params) => { + const headers: Record = { + Authorization: `Bearer ${params.apiKey}`, + 'Content-Type': 'text/markdown', + Operation: params.operation, + 'Target-Type': params.targetType, + Target: encodeURIComponent(params.target), + } + if (params.targetDelimiter) { + headers['Target-Delimiter'] = params.targetDelimiter + } + if (params.trimTargetWhitespace) { + headers['Trim-Target-Whitespace'] = 'true' + } + return headers + }, + body: (params) => params.content, + }, + + transformResponse: async (response) => { + if (!response.ok) { + const error = await response.json().catch(() => ({ message: 'Unknown error' })) + throw new Error(`Failed to patch active file: ${error.message ?? response.statusText}`) + } + return { + success: true, + output: { + patched: true, + }, + } + }, + + outputs: { + patched: { + type: 'boolean', + description: 'Whether the active file was successfully patched', + }, + }, +} diff --git a/apps/sim/tools/obsidian/patch_note.ts b/apps/sim/tools/obsidian/patch_note.ts new file mode 100644 index 00000000000..12013d8b77f --- /dev/null +++ b/apps/sim/tools/obsidian/patch_note.ts @@ -0,0 +1,118 @@ +import type { ToolConfig } from '@/tools/types' +import type { ObsidianPatchNoteParams, ObsidianPatchNoteResponse } from './types' + +export const patchNoteTool: ToolConfig = { + id: 'obsidian_patch_note', + name: 'Obsidian Patch Note', + description: + 'Insert or replace content at a specific heading, block reference, or frontmatter field in a note', + version: '1.0.0', + + params: { + apiKey: { + type: 'string', + required: true, + visibility: 'user-only', + description: 'API key from Obsidian Local REST API plugin settings', + }, + baseUrl: { + type: 'string', + required: true, + visibility: 'user-only', + description: 'Base URL for the Obsidian Local REST API', + }, + filename: { + type: 'string', + required: true, + visibility: 'user-or-llm', + description: 'Path to the note relative to vault root (e.g. "folder/note.md")', + }, + content: { + type: 'string', + required: true, + visibility: 'user-or-llm', + description: 'Content to insert at the target location', + }, + operation: { + type: 'string', + required: true, + visibility: 'user-or-llm', + description: 'How to insert content: append, prepend, or replace', + }, + targetType: { + type: 'string', + required: true, + visibility: 'user-or-llm', + description: 'Type of target: heading, block, or frontmatter', + }, + target: { + type: 'string', + required: true, + visibility: 'user-or-llm', + description: + 'Target identifier (heading text, block reference ID, or frontmatter field name)', + }, + targetDelimiter: { + type: 'string', + required: false, + visibility: 'user-or-llm', + description: 'Delimiter for nested headings (default: "::")', + }, + trimTargetWhitespace: { + type: 'boolean', + required: false, + visibility: 'user-or-llm', + description: 'Whether to trim whitespace from target before matching (default: false)', + }, + }, + + request: { + url: (params) => { + const base = params.baseUrl.replace(/\/$/, '') + return `${base}/vault/${params.filename.trim().split('/').map(encodeURIComponent).join('/')}` + }, + method: 'PATCH', + headers: (params) => { + const headers: Record = { + Authorization: `Bearer ${params.apiKey}`, + 'Content-Type': 'text/markdown', + Operation: params.operation, + 'Target-Type': params.targetType, + Target: encodeURIComponent(params.target), + } + if (params.targetDelimiter) { + headers['Target-Delimiter'] = params.targetDelimiter + } + if (params.trimTargetWhitespace) { + headers['Trim-Target-Whitespace'] = 'true' + } + return headers + }, + body: (params) => params.content, + }, + + transformResponse: async (response, params) => { + if (!response.ok) { + const error = await response.json().catch(() => ({ message: 'Unknown error' })) + throw new Error(`Failed to patch note: ${error.message ?? response.statusText}`) + } + return { + success: true, + output: { + filename: params?.filename ?? '', + patched: true, + }, + } + }, + + outputs: { + filename: { + type: 'string', + description: 'Path of the patched note', + }, + patched: { + type: 'boolean', + description: 'Whether the note was successfully patched', + }, + }, +} diff --git a/apps/sim/tools/obsidian/search.ts b/apps/sim/tools/obsidian/search.ts new file mode 100644 index 00000000000..72551697f6c --- /dev/null +++ b/apps/sim/tools/obsidian/search.ts @@ -0,0 +1,95 @@ +import type { ToolConfig } from '@/tools/types' +import type { ObsidianSearchParams, ObsidianSearchResponse } from './types' + +export const searchTool: ToolConfig = { + id: 'obsidian_search', + name: 'Obsidian Search', + description: 'Search for text across notes in your Obsidian vault', + version: '1.0.0', + + params: { + apiKey: { + type: 'string', + required: true, + visibility: 'user-only', + description: 'API key from Obsidian Local REST API plugin settings', + }, + baseUrl: { + type: 'string', + required: true, + visibility: 'user-only', + description: 'Base URL for the Obsidian Local REST API', + }, + query: { + type: 'string', + required: true, + visibility: 'user-or-llm', + description: 'Text to search for across vault notes', + }, + contextLength: { + type: 'number', + required: false, + visibility: 'user-or-llm', + description: 'Number of characters of context around each match (default: 100)', + }, + }, + + request: { + url: (params) => { + const base = params.baseUrl.replace(/\/$/, '') + const contextParam = params.contextLength ? `&contextLength=${params.contextLength}` : '' + return `${base}/search/simple/?query=${encodeURIComponent(params.query)}${contextParam}` + }, + method: 'POST', + headers: (params) => ({ + Authorization: `Bearer ${params.apiKey}`, + Accept: 'application/json', + }), + }, + + transformResponse: async (response) => { + if (!response.ok) { + const error = await response.json().catch(() => ({ message: 'Unknown error' })) + throw new Error(`Search failed: ${error.message ?? response.statusText}`) + } + const data = await response.json() + return { + success: true, + output: { + results: + data?.map( + (item: { + filename: string + score: number + matches: Array<{ match: { start: number; end: number }; context: string }> + }) => ({ + filename: item.filename ?? '', + score: item.score ?? 0, + matches: + item.matches?.map((m: { context: string }) => ({ + context: m.context ?? '', + })) ?? [], + }) + ) ?? [], + }, + } + }, + + outputs: { + results: { + type: 'json', + description: 'Search results with filenames, scores, and matching contexts', + properties: { + filename: { type: 'string', description: 'Path to the matching note' }, + score: { type: 'number', description: 'Relevance score' }, + matches: { + type: 'json', + description: 'Matching text contexts', + properties: { + context: { type: 'string', description: 'Text surrounding the match' }, + }, + }, + }, + }, + }, +} diff --git a/apps/sim/tools/obsidian/types.ts b/apps/sim/tools/obsidian/types.ts new file mode 100644 index 00000000000..6fe9203414a --- /dev/null +++ b/apps/sim/tools/obsidian/types.ts @@ -0,0 +1,190 @@ +import type { ToolResponse } from '@/tools/types' + +export interface ObsidianBaseParams { + apiKey: string + baseUrl: string +} + +export interface ObsidianListFilesParams extends ObsidianBaseParams { + path?: string +} + +export interface ObsidianListFilesResponse extends ToolResponse { + output: { + files: Array<{ + path: string + type: string + }> + } +} + +export interface ObsidianGetNoteParams extends ObsidianBaseParams { + filename: string +} + +export interface ObsidianGetNoteResponse extends ToolResponse { + output: { + content: string + filename: string + } +} + +export interface ObsidianCreateNoteParams extends ObsidianBaseParams { + filename: string + content: string +} + +export interface ObsidianCreateNoteResponse extends ToolResponse { + output: { + filename: string + created: boolean + } +} + +export interface ObsidianAppendNoteParams extends ObsidianBaseParams { + filename: string + content: string +} + +export interface ObsidianAppendNoteResponse extends ToolResponse { + output: { + filename: string + appended: boolean + } +} + +export interface ObsidianPatchNoteParams extends ObsidianBaseParams { + filename: string + content: string + operation: string + targetType: string + target: string + targetDelimiter?: string + trimTargetWhitespace?: boolean +} + +export interface ObsidianPatchNoteResponse extends ToolResponse { + output: { + filename: string + patched: boolean + } +} + +export interface ObsidianDeleteNoteParams extends ObsidianBaseParams { + filename: string +} + +export interface ObsidianDeleteNoteResponse extends ToolResponse { + output: { + filename: string + deleted: boolean + } +} + +export interface ObsidianSearchParams extends ObsidianBaseParams { + query: string + contextLength?: number +} + +export interface ObsidianSearchResponse extends ToolResponse { + output: { + results: Array<{ + filename: string + score: number + matches: Array<{ + context: string + }> + }> + } +} + +export interface ObsidianGetActiveParams extends ObsidianBaseParams {} + +export interface ObsidianGetActiveResponse extends ToolResponse { + output: { + content: string + filename: string | null + } +} + +export interface ObsidianAppendActiveParams extends ObsidianBaseParams { + content: string +} + +export interface ObsidianAppendActiveResponse extends ToolResponse { + output: { + appended: boolean + } +} + +export interface ObsidianPatchActiveParams extends ObsidianBaseParams { + content: string + operation: string + targetType: string + target: string + targetDelimiter?: string + trimTargetWhitespace?: boolean +} + +export interface ObsidianPatchActiveResponse extends ToolResponse { + output: { + patched: boolean + } +} + +export interface ObsidianListCommandsParams extends ObsidianBaseParams {} + +export interface ObsidianListCommandsResponse extends ToolResponse { + output: { + commands: Array<{ + id: string + name: string + }> + } +} + +export interface ObsidianExecuteCommandParams extends ObsidianBaseParams { + commandId: string +} + +export interface ObsidianExecuteCommandResponse extends ToolResponse { + output: { + commandId: string + executed: boolean + } +} + +export interface ObsidianOpenFileParams extends ObsidianBaseParams { + filename: string + newLeaf?: boolean +} + +export interface ObsidianOpenFileResponse extends ToolResponse { + output: { + filename: string + opened: boolean + } +} + +export interface ObsidianGetPeriodicNoteParams extends ObsidianBaseParams { + period: string +} + +export interface ObsidianGetPeriodicNoteResponse extends ToolResponse { + output: { + content: string + period: string + } +} + +export interface ObsidianAppendPeriodicNoteParams extends ObsidianBaseParams { + period: string + content: string +} + +export interface ObsidianAppendPeriodicNoteResponse extends ToolResponse { + output: { + period: string + appended: boolean + } +} diff --git a/apps/sim/tools/registry.ts b/apps/sim/tools/registry.ts index 3539724f68d..437c36a3777 100644 --- a/apps/sim/tools/registry.ts +++ b/apps/sim/tools/registry.ts @@ -1450,6 +1450,23 @@ import { notionWriteTool, notionWriteV2Tool, } from '@/tools/notion' +import { + obsidianAppendActiveTool, + obsidianAppendNoteTool, + obsidianAppendPeriodicNoteTool, + obsidianCreateNoteTool, + obsidianDeleteNoteTool, + obsidianExecuteCommandTool, + obsidianGetActiveTool, + obsidianGetNoteTool, + obsidianGetPeriodicNoteTool, + obsidianListCommandsTool, + obsidianListFilesTool, + obsidianOpenFileTool, + obsidianPatchActiveTool, + obsidianPatchNoteTool, + obsidianSearchTool, +} from '@/tools/obsidian' import { onedriveCreateFolderTool, onedriveDeleteTool, @@ -2739,6 +2756,21 @@ export const tools: Record = { notion_create_database_v2: notionCreateDatabaseV2Tool, notion_update_page_v2: notionUpdatePageV2Tool, notion_add_database_row_v2: notionAddDatabaseRowTool, + obsidian_append_active: obsidianAppendActiveTool, + obsidian_append_note: obsidianAppendNoteTool, + obsidian_append_periodic_note: obsidianAppendPeriodicNoteTool, + obsidian_create_note: obsidianCreateNoteTool, + obsidian_delete_note: obsidianDeleteNoteTool, + obsidian_execute_command: obsidianExecuteCommandTool, + obsidian_get_active: obsidianGetActiveTool, + obsidian_get_note: obsidianGetNoteTool, + obsidian_get_periodic_note: obsidianGetPeriodicNoteTool, + obsidian_list_commands: obsidianListCommandsTool, + obsidian_list_files: obsidianListFilesTool, + obsidian_open_file: obsidianOpenFileTool, + obsidian_patch_active: obsidianPatchActiveTool, + obsidian_patch_note: obsidianPatchNoteTool, + obsidian_search: obsidianSearchTool, onepassword_list_vaults: onepasswordListVaultsTool, onepassword_get_vault: onepasswordGetVaultTool, onepassword_list_items: onepasswordListItemsTool, From 1ba1bc8edb9f4182cbb57e9a48787e95efb8fe6b Mon Sep 17 00:00:00 2001 From: Waleed Date: Sat, 7 Mar 2026 00:52:57 -0800 Subject: [PATCH 2/6] feat(evernote): add Evernote integration with 11 tools (#3456) * feat(evernote): add Evernote integration with 11 tools * fix(evernote): fix signed integer mismatch in Thrift version check * fix(evernote): fix exception field mapping and add sandbox support * fix(evernote): address PR review feedback * fix(evernote): clamp maxNotes to Evernote's 250 limit Co-Authored-By: Claude Opus 4.6 --------- Co-authored-by: Claude Opus 4.6 --- apps/docs/components/icons.tsx | 8 + apps/docs/components/ui/icon-mapping.ts | 2 + apps/docs/content/docs/en/tools/evernote.mdx | 267 ++++++ apps/docs/content/docs/en/tools/meta.json | 1 + .../app/api/tools/evernote/copy-note/route.ts | 38 + .../api/tools/evernote/create-note/route.ts | 51 ++ .../tools/evernote/create-notebook/route.ts | 38 + .../api/tools/evernote/create-tag/route.ts | 38 + .../api/tools/evernote/delete-note/route.ts | 41 + .../app/api/tools/evernote/get-note/route.ts | 38 + .../api/tools/evernote/get-notebook/route.ts | 38 + apps/sim/app/api/tools/evernote/lib/client.ts | 799 ++++++++++++++++++ apps/sim/app/api/tools/evernote/lib/thrift.ts | 255 ++++++ .../tools/evernote/list-notebooks/route.ts | 35 + .../app/api/tools/evernote/list-tags/route.ts | 35 + .../api/tools/evernote/search-notes/route.ts | 49 ++ .../api/tools/evernote/update-note/route.ts | 58 ++ apps/sim/blocks/blocks/evernote.ts | 308 +++++++ apps/sim/blocks/registry.ts | 2 + apps/sim/components/icons.tsx | 8 + apps/sim/tools/evernote/copy_note.ts | 78 ++ apps/sim/tools/evernote/create_note.ts | 101 +++ apps/sim/tools/evernote/create_notebook.ts | 78 ++ apps/sim/tools/evernote/create_tag.ts | 70 ++ apps/sim/tools/evernote/delete_note.ts | 62 ++ apps/sim/tools/evernote/get_note.ts | 87 ++ apps/sim/tools/evernote/get_notebook.ts | 71 ++ apps/sim/tools/evernote/index.ts | 12 + apps/sim/tools/evernote/list_notebooks.ts | 64 ++ apps/sim/tools/evernote/list_tags.ts | 55 ++ apps/sim/tools/evernote/search_notes.ts | 92 ++ apps/sim/tools/evernote/types.ts | 166 ++++ apps/sim/tools/evernote/update_note.ts | 104 +++ apps/sim/tools/registry.ts | 24 + 34 files changed, 3173 insertions(+) create mode 100644 apps/docs/content/docs/en/tools/evernote.mdx create mode 100644 apps/sim/app/api/tools/evernote/copy-note/route.ts create mode 100644 apps/sim/app/api/tools/evernote/create-note/route.ts create mode 100644 apps/sim/app/api/tools/evernote/create-notebook/route.ts create mode 100644 apps/sim/app/api/tools/evernote/create-tag/route.ts create mode 100644 apps/sim/app/api/tools/evernote/delete-note/route.ts create mode 100644 apps/sim/app/api/tools/evernote/get-note/route.ts create mode 100644 apps/sim/app/api/tools/evernote/get-notebook/route.ts create mode 100644 apps/sim/app/api/tools/evernote/lib/client.ts create mode 100644 apps/sim/app/api/tools/evernote/lib/thrift.ts create mode 100644 apps/sim/app/api/tools/evernote/list-notebooks/route.ts create mode 100644 apps/sim/app/api/tools/evernote/list-tags/route.ts create mode 100644 apps/sim/app/api/tools/evernote/search-notes/route.ts create mode 100644 apps/sim/app/api/tools/evernote/update-note/route.ts create mode 100644 apps/sim/blocks/blocks/evernote.ts create mode 100644 apps/sim/tools/evernote/copy_note.ts create mode 100644 apps/sim/tools/evernote/create_note.ts create mode 100644 apps/sim/tools/evernote/create_notebook.ts create mode 100644 apps/sim/tools/evernote/create_tag.ts create mode 100644 apps/sim/tools/evernote/delete_note.ts create mode 100644 apps/sim/tools/evernote/get_note.ts create mode 100644 apps/sim/tools/evernote/get_notebook.ts create mode 100644 apps/sim/tools/evernote/index.ts create mode 100644 apps/sim/tools/evernote/list_notebooks.ts create mode 100644 apps/sim/tools/evernote/list_tags.ts create mode 100644 apps/sim/tools/evernote/search_notes.ts create mode 100644 apps/sim/tools/evernote/types.ts create mode 100644 apps/sim/tools/evernote/update_note.ts diff --git a/apps/docs/components/icons.tsx b/apps/docs/components/icons.tsx index 41fa14fa22a..e77c864d184 100644 --- a/apps/docs/components/icons.tsx +++ b/apps/docs/components/icons.tsx @@ -1955,6 +1955,14 @@ export function Mem0Icon(props: SVGProps) { ) } +export function EvernoteIcon(props: SVGProps) { + return ( + + + + ) +} + export function ElevenLabsIcon(props: SVGProps) { return ( = { elasticsearch: ElasticsearchIcon, elevenlabs: ElevenLabsIcon, enrich: EnrichSoIcon, + evernote: EvernoteIcon, exa: ExaAIIcon, file_v3: DocumentIcon, firecrawl: FirecrawlIcon, diff --git a/apps/docs/content/docs/en/tools/evernote.mdx b/apps/docs/content/docs/en/tools/evernote.mdx new file mode 100644 index 00000000000..4c024edea38 --- /dev/null +++ b/apps/docs/content/docs/en/tools/evernote.mdx @@ -0,0 +1,267 @@ +--- +title: Evernote +description: Manage notes, notebooks, and tags in Evernote +--- + +import { BlockInfoCard } from "@/components/ui/block-info-card" + + + +## Usage Instructions + +Integrate with Evernote to manage notes, notebooks, and tags. Create, read, update, copy, search, and delete notes. Create and list notebooks and tags. + + + +## Tools + +### `evernote_copy_note` + +Copy a note to another notebook in Evernote + +#### Input + +| Parameter | Type | Required | Description | +| --------- | ---- | -------- | ----------- | +| `apiKey` | string | Yes | Evernote developer token | +| `noteGuid` | string | Yes | GUID of the note to copy | +| `toNotebookGuid` | string | Yes | GUID of the destination notebook | + +#### Output + +| Parameter | Type | Description | +| --------- | ---- | ----------- | +| `note` | object | The copied note metadata | +| ↳ `guid` | string | New note GUID | +| ↳ `title` | string | Note title | +| ↳ `notebookGuid` | string | GUID of the destination notebook | +| ↳ `created` | number | Creation timestamp in milliseconds | +| ↳ `updated` | number | Last updated timestamp in milliseconds | + +### `evernote_create_note` + +Create a new note in Evernote + +#### Input + +| Parameter | Type | Required | Description | +| --------- | ---- | -------- | ----------- | +| `apiKey` | string | Yes | Evernote developer token | +| `title` | string | Yes | Title of the note | +| `content` | string | Yes | Content of the note \(plain text or ENML\) | +| `notebookGuid` | string | No | GUID of the notebook to create the note in \(defaults to default notebook\) | +| `tagNames` | string | No | Comma-separated list of tag names to apply | + +#### Output + +| Parameter | Type | Description | +| --------- | ---- | ----------- | +| `note` | object | The created note | +| ↳ `guid` | string | Unique identifier of the note | +| ↳ `title` | string | Title of the note | +| ↳ `content` | string | ENML content of the note | +| ↳ `notebookGuid` | string | GUID of the containing notebook | +| ↳ `tagNames` | array | Tag names applied to the note | +| ↳ `created` | number | Creation timestamp in milliseconds | +| ↳ `updated` | number | Last updated timestamp in milliseconds | + +### `evernote_create_notebook` + +Create a new notebook in Evernote + +#### Input + +| Parameter | Type | Required | Description | +| --------- | ---- | -------- | ----------- | +| `apiKey` | string | Yes | Evernote developer token | +| `name` | string | Yes | Name for the new notebook | +| `stack` | string | No | Stack name to group the notebook under | + +#### Output + +| Parameter | Type | Description | +| --------- | ---- | ----------- | +| `notebook` | object | The created notebook | +| ↳ `guid` | string | Notebook GUID | +| ↳ `name` | string | Notebook name | +| ↳ `defaultNotebook` | boolean | Whether this is the default notebook | +| ↳ `serviceCreated` | number | Creation timestamp in milliseconds | +| ↳ `serviceUpdated` | number | Last updated timestamp in milliseconds | +| ↳ `stack` | string | Notebook stack name | + +### `evernote_create_tag` + +Create a new tag in Evernote + +#### Input + +| Parameter | Type | Required | Description | +| --------- | ---- | -------- | ----------- | +| `apiKey` | string | Yes | Evernote developer token | +| `name` | string | Yes | Name for the new tag | +| `parentGuid` | string | No | GUID of the parent tag for hierarchy | + +#### Output + +| Parameter | Type | Description | +| --------- | ---- | ----------- | +| `tag` | object | The created tag | +| ↳ `guid` | string | Tag GUID | +| ↳ `name` | string | Tag name | +| ↳ `parentGuid` | string | Parent tag GUID | +| ↳ `updateSequenceNum` | number | Update sequence number | + +### `evernote_delete_note` + +Move a note to the trash in Evernote + +#### Input + +| Parameter | Type | Required | Description | +| --------- | ---- | -------- | ----------- | +| `apiKey` | string | Yes | Evernote developer token | +| `noteGuid` | string | Yes | GUID of the note to delete | + +#### Output + +| Parameter | Type | Description | +| --------- | ---- | ----------- | +| `success` | boolean | Whether the note was successfully deleted | +| `noteGuid` | string | GUID of the deleted note | + +### `evernote_get_note` + +Retrieve a note from Evernote by its GUID + +#### Input + +| Parameter | Type | Required | Description | +| --------- | ---- | -------- | ----------- | +| `apiKey` | string | Yes | Evernote developer token | +| `noteGuid` | string | Yes | GUID of the note to retrieve | +| `withContent` | boolean | No | Whether to include note content \(default: true\) | + +#### Output + +| Parameter | Type | Description | +| --------- | ---- | ----------- | +| `note` | object | The retrieved note | +| ↳ `guid` | string | Unique identifier of the note | +| ↳ `title` | string | Title of the note | +| ↳ `content` | string | ENML content of the note | +| ↳ `contentLength` | number | Length of the note content | +| ↳ `notebookGuid` | string | GUID of the containing notebook | +| ↳ `tagGuids` | array | GUIDs of tags on the note | +| ↳ `tagNames` | array | Names of tags on the note | +| ↳ `created` | number | Creation timestamp in milliseconds | +| ↳ `updated` | number | Last updated timestamp in milliseconds | +| ↳ `active` | boolean | Whether the note is active \(not in trash\) | + +### `evernote_get_notebook` + +Retrieve a notebook from Evernote by its GUID + +#### Input + +| Parameter | Type | Required | Description | +| --------- | ---- | -------- | ----------- | +| `apiKey` | string | Yes | Evernote developer token | +| `notebookGuid` | string | Yes | GUID of the notebook to retrieve | + +#### Output + +| Parameter | Type | Description | +| --------- | ---- | ----------- | +| `notebook` | object | The retrieved notebook | +| ↳ `guid` | string | Notebook GUID | +| ↳ `name` | string | Notebook name | +| ↳ `defaultNotebook` | boolean | Whether this is the default notebook | +| ↳ `serviceCreated` | number | Creation timestamp in milliseconds | +| ↳ `serviceUpdated` | number | Last updated timestamp in milliseconds | +| ↳ `stack` | string | Notebook stack name | + +### `evernote_list_notebooks` + +List all notebooks in an Evernote account + +#### Input + +| Parameter | Type | Required | Description | +| --------- | ---- | -------- | ----------- | +| `apiKey` | string | Yes | Evernote developer token | + +#### Output + +| Parameter | Type | Description | +| --------- | ---- | ----------- | +| `notebooks` | array | List of notebooks | + +### `evernote_list_tags` + +List all tags in an Evernote account + +#### Input + +| Parameter | Type | Required | Description | +| --------- | ---- | -------- | ----------- | +| `apiKey` | string | Yes | Evernote developer token | + +#### Output + +| Parameter | Type | Description | +| --------- | ---- | ----------- | +| `tags` | array | List of tags | + +### `evernote_search_notes` + +Search for notes in Evernote using the Evernote search grammar + +#### Input + +| Parameter | Type | Required | Description | +| --------- | ---- | -------- | ----------- | +| `apiKey` | string | Yes | Evernote developer token | +| `query` | string | Yes | Search query using Evernote search grammar \(e.g., "tag:work intitle:meeting"\) | +| `notebookGuid` | string | No | Restrict search to a specific notebook by GUID | +| `offset` | number | No | Starting index for results \(default: 0\) | +| `maxNotes` | number | No | Maximum number of notes to return \(default: 25\) | + +#### Output + +| Parameter | Type | Description | +| --------- | ---- | ----------- | +| `totalNotes` | number | Total number of matching notes | +| `notes` | array | List of matching note metadata | + +### `evernote_update_note` + +Update an existing note in Evernote + +#### Input + +| Parameter | Type | Required | Description | +| --------- | ---- | -------- | ----------- | +| `apiKey` | string | Yes | Evernote developer token | +| `noteGuid` | string | Yes | GUID of the note to update | +| `title` | string | No | New title for the note | +| `content` | string | No | New content for the note \(plain text or ENML\) | +| `notebookGuid` | string | No | GUID of the notebook to move the note to | +| `tagNames` | string | No | Comma-separated list of tag names \(replaces existing tags\) | + +#### Output + +| Parameter | Type | Description | +| --------- | ---- | ----------- | +| `note` | object | The updated note | +| ↳ `guid` | string | Unique identifier of the note | +| ↳ `title` | string | Title of the note | +| ↳ `content` | string | ENML content of the note | +| ↳ `notebookGuid` | string | GUID of the containing notebook | +| ↳ `tagNames` | array | Tag names on the note | +| ↳ `created` | number | Creation timestamp in milliseconds | +| ↳ `updated` | number | Last updated timestamp in milliseconds | + + diff --git a/apps/docs/content/docs/en/tools/meta.json b/apps/docs/content/docs/en/tools/meta.json index a5de340aaa7..81dd886faba 100644 --- a/apps/docs/content/docs/en/tools/meta.json +++ b/apps/docs/content/docs/en/tools/meta.json @@ -35,6 +35,7 @@ "elasticsearch", "elevenlabs", "enrich", + "evernote", "exa", "file", "firecrawl", diff --git a/apps/sim/app/api/tools/evernote/copy-note/route.ts b/apps/sim/app/api/tools/evernote/copy-note/route.ts new file mode 100644 index 00000000000..1011072a750 --- /dev/null +++ b/apps/sim/app/api/tools/evernote/copy-note/route.ts @@ -0,0 +1,38 @@ +import { createLogger } from '@sim/logger' +import { type NextRequest, NextResponse } from 'next/server' +import { checkInternalAuth } from '@/lib/auth/hybrid' +import { copyNote } from '@/app/api/tools/evernote/lib/client' + +export const dynamic = 'force-dynamic' + +const logger = createLogger('EvernoteCopyNoteAPI') + +export async function POST(request: NextRequest) { + const authResult = await checkInternalAuth(request, { requireWorkflowId: false }) + if (!authResult.success) { + return NextResponse.json({ success: false, error: 'Unauthorized' }, { status: 401 }) + } + + try { + const body = await request.json() + const { apiKey, noteGuid, toNotebookGuid } = body + + if (!apiKey || !noteGuid || !toNotebookGuid) { + return NextResponse.json( + { success: false, error: 'apiKey, noteGuid, and toNotebookGuid are required' }, + { status: 400 } + ) + } + + const note = await copyNote(apiKey, noteGuid, toNotebookGuid) + + return NextResponse.json({ + success: true, + output: { note }, + }) + } catch (error) { + const message = error instanceof Error ? error.message : 'Unknown error' + logger.error('Failed to copy note', { error: message }) + return NextResponse.json({ success: false, error: message }, { status: 500 }) + } +} diff --git a/apps/sim/app/api/tools/evernote/create-note/route.ts b/apps/sim/app/api/tools/evernote/create-note/route.ts new file mode 100644 index 00000000000..ef1c97f5982 --- /dev/null +++ b/apps/sim/app/api/tools/evernote/create-note/route.ts @@ -0,0 +1,51 @@ +import { createLogger } from '@sim/logger' +import { type NextRequest, NextResponse } from 'next/server' +import { checkInternalAuth } from '@/lib/auth/hybrid' +import { createNote } from '@/app/api/tools/evernote/lib/client' + +export const dynamic = 'force-dynamic' + +const logger = createLogger('EvernoteCreateNoteAPI') + +export async function POST(request: NextRequest) { + const authResult = await checkInternalAuth(request, { requireWorkflowId: false }) + if (!authResult.success) { + return NextResponse.json({ success: false, error: 'Unauthorized' }, { status: 401 }) + } + + try { + const body = await request.json() + const { apiKey, title, content, notebookGuid, tagNames } = body + + if (!apiKey || !title || !content) { + return NextResponse.json( + { success: false, error: 'apiKey, title, and content are required' }, + { status: 400 } + ) + } + + const parsedTags = tagNames + ? (() => { + const tags = + typeof tagNames === 'string' + ? tagNames + .split(',') + .map((t: string) => t.trim()) + .filter(Boolean) + : tagNames + return tags.length > 0 ? tags : undefined + })() + : undefined + + const note = await createNote(apiKey, title, content, notebookGuid || undefined, parsedTags) + + return NextResponse.json({ + success: true, + output: { note }, + }) + } catch (error) { + const message = error instanceof Error ? error.message : 'Unknown error' + logger.error('Failed to create note', { error: message }) + return NextResponse.json({ success: false, error: message }, { status: 500 }) + } +} diff --git a/apps/sim/app/api/tools/evernote/create-notebook/route.ts b/apps/sim/app/api/tools/evernote/create-notebook/route.ts new file mode 100644 index 00000000000..37ab2522d86 --- /dev/null +++ b/apps/sim/app/api/tools/evernote/create-notebook/route.ts @@ -0,0 +1,38 @@ +import { createLogger } from '@sim/logger' +import { type NextRequest, NextResponse } from 'next/server' +import { checkInternalAuth } from '@/lib/auth/hybrid' +import { createNotebook } from '@/app/api/tools/evernote/lib/client' + +export const dynamic = 'force-dynamic' + +const logger = createLogger('EvernoteCreateNotebookAPI') + +export async function POST(request: NextRequest) { + const authResult = await checkInternalAuth(request, { requireWorkflowId: false }) + if (!authResult.success) { + return NextResponse.json({ success: false, error: 'Unauthorized' }, { status: 401 }) + } + + try { + const body = await request.json() + const { apiKey, name, stack } = body + + if (!apiKey || !name) { + return NextResponse.json( + { success: false, error: 'apiKey and name are required' }, + { status: 400 } + ) + } + + const notebook = await createNotebook(apiKey, name, stack || undefined) + + return NextResponse.json({ + success: true, + output: { notebook }, + }) + } catch (error) { + const message = error instanceof Error ? error.message : 'Unknown error' + logger.error('Failed to create notebook', { error: message }) + return NextResponse.json({ success: false, error: message }, { status: 500 }) + } +} diff --git a/apps/sim/app/api/tools/evernote/create-tag/route.ts b/apps/sim/app/api/tools/evernote/create-tag/route.ts new file mode 100644 index 00000000000..188516cbe87 --- /dev/null +++ b/apps/sim/app/api/tools/evernote/create-tag/route.ts @@ -0,0 +1,38 @@ +import { createLogger } from '@sim/logger' +import { type NextRequest, NextResponse } from 'next/server' +import { checkInternalAuth } from '@/lib/auth/hybrid' +import { createTag } from '@/app/api/tools/evernote/lib/client' + +export const dynamic = 'force-dynamic' + +const logger = createLogger('EvernoteCreateTagAPI') + +export async function POST(request: NextRequest) { + const authResult = await checkInternalAuth(request, { requireWorkflowId: false }) + if (!authResult.success) { + return NextResponse.json({ success: false, error: 'Unauthorized' }, { status: 401 }) + } + + try { + const body = await request.json() + const { apiKey, name, parentGuid } = body + + if (!apiKey || !name) { + return NextResponse.json( + { success: false, error: 'apiKey and name are required' }, + { status: 400 } + ) + } + + const tag = await createTag(apiKey, name, parentGuid || undefined) + + return NextResponse.json({ + success: true, + output: { tag }, + }) + } catch (error) { + const message = error instanceof Error ? error.message : 'Unknown error' + logger.error('Failed to create tag', { error: message }) + return NextResponse.json({ success: false, error: message }, { status: 500 }) + } +} diff --git a/apps/sim/app/api/tools/evernote/delete-note/route.ts b/apps/sim/app/api/tools/evernote/delete-note/route.ts new file mode 100644 index 00000000000..e55b298496a --- /dev/null +++ b/apps/sim/app/api/tools/evernote/delete-note/route.ts @@ -0,0 +1,41 @@ +import { createLogger } from '@sim/logger' +import { type NextRequest, NextResponse } from 'next/server' +import { checkInternalAuth } from '@/lib/auth/hybrid' +import { deleteNote } from '@/app/api/tools/evernote/lib/client' + +export const dynamic = 'force-dynamic' + +const logger = createLogger('EvernoteDeleteNoteAPI') + +export async function POST(request: NextRequest) { + const authResult = await checkInternalAuth(request, { requireWorkflowId: false }) + if (!authResult.success) { + return NextResponse.json({ success: false, error: 'Unauthorized' }, { status: 401 }) + } + + try { + const body = await request.json() + const { apiKey, noteGuid } = body + + if (!apiKey || !noteGuid) { + return NextResponse.json( + { success: false, error: 'apiKey and noteGuid are required' }, + { status: 400 } + ) + } + + await deleteNote(apiKey, noteGuid) + + return NextResponse.json({ + success: true, + output: { + success: true, + noteGuid, + }, + }) + } catch (error) { + const message = error instanceof Error ? error.message : 'Unknown error' + logger.error('Failed to delete note', { error: message }) + return NextResponse.json({ success: false, error: message }, { status: 500 }) + } +} diff --git a/apps/sim/app/api/tools/evernote/get-note/route.ts b/apps/sim/app/api/tools/evernote/get-note/route.ts new file mode 100644 index 00000000000..f71c84aa7d5 --- /dev/null +++ b/apps/sim/app/api/tools/evernote/get-note/route.ts @@ -0,0 +1,38 @@ +import { createLogger } from '@sim/logger' +import { type NextRequest, NextResponse } from 'next/server' +import { checkInternalAuth } from '@/lib/auth/hybrid' +import { getNote } from '@/app/api/tools/evernote/lib/client' + +export const dynamic = 'force-dynamic' + +const logger = createLogger('EvernoteGetNoteAPI') + +export async function POST(request: NextRequest) { + const authResult = await checkInternalAuth(request, { requireWorkflowId: false }) + if (!authResult.success) { + return NextResponse.json({ success: false, error: 'Unauthorized' }, { status: 401 }) + } + + try { + const body = await request.json() + const { apiKey, noteGuid, withContent = true } = body + + if (!apiKey || !noteGuid) { + return NextResponse.json( + { success: false, error: 'apiKey and noteGuid are required' }, + { status: 400 } + ) + } + + const note = await getNote(apiKey, noteGuid, withContent) + + return NextResponse.json({ + success: true, + output: { note }, + }) + } catch (error) { + const message = error instanceof Error ? error.message : 'Unknown error' + logger.error('Failed to get note', { error: message }) + return NextResponse.json({ success: false, error: message }, { status: 500 }) + } +} diff --git a/apps/sim/app/api/tools/evernote/get-notebook/route.ts b/apps/sim/app/api/tools/evernote/get-notebook/route.ts new file mode 100644 index 00000000000..2f0e6db5d5d --- /dev/null +++ b/apps/sim/app/api/tools/evernote/get-notebook/route.ts @@ -0,0 +1,38 @@ +import { createLogger } from '@sim/logger' +import { type NextRequest, NextResponse } from 'next/server' +import { checkInternalAuth } from '@/lib/auth/hybrid' +import { getNotebook } from '@/app/api/tools/evernote/lib/client' + +export const dynamic = 'force-dynamic' + +const logger = createLogger('EvernoteGetNotebookAPI') + +export async function POST(request: NextRequest) { + const authResult = await checkInternalAuth(request, { requireWorkflowId: false }) + if (!authResult.success) { + return NextResponse.json({ success: false, error: 'Unauthorized' }, { status: 401 }) + } + + try { + const body = await request.json() + const { apiKey, notebookGuid } = body + + if (!apiKey || !notebookGuid) { + return NextResponse.json( + { success: false, error: 'apiKey and notebookGuid are required' }, + { status: 400 } + ) + } + + const notebook = await getNotebook(apiKey, notebookGuid) + + return NextResponse.json({ + success: true, + output: { notebook }, + }) + } catch (error) { + const message = error instanceof Error ? error.message : 'Unknown error' + logger.error('Failed to get notebook', { error: message }) + return NextResponse.json({ success: false, error: message }, { status: 500 }) + } +} diff --git a/apps/sim/app/api/tools/evernote/lib/client.ts b/apps/sim/app/api/tools/evernote/lib/client.ts new file mode 100644 index 00000000000..05b80eb4829 --- /dev/null +++ b/apps/sim/app/api/tools/evernote/lib/client.ts @@ -0,0 +1,799 @@ +/** + * Evernote API client using Thrift binary protocol over HTTP. + * Implements only the NoteStore methods needed for the integration. + */ + +import { + ThriftReader, + ThriftWriter, + TYPE_BOOL, + TYPE_I32, + TYPE_I64, + TYPE_LIST, + TYPE_STRING, + TYPE_STRUCT, +} from './thrift' + +export interface EvernoteNotebook { + guid: string + name: string + defaultNotebook: boolean + serviceCreated: number | null + serviceUpdated: number | null + stack: string | null +} + +export interface EvernoteNote { + guid: string + title: string + content: string | null + contentLength: number | null + created: number | null + updated: number | null + deleted: number | null + active: boolean + notebookGuid: string | null + tagGuids: string[] + tagNames: string[] +} + +export interface EvernoteNoteMetadata { + guid: string + title: string | null + contentLength: number | null + created: number | null + updated: number | null + notebookGuid: string | null + tagGuids: string[] +} + +export interface EvernoteTag { + guid: string + name: string + parentGuid: string | null + updateSequenceNum: number | null +} + +export interface EvernoteSearchResult { + startIndex: number + totalNotes: number + notes: EvernoteNoteMetadata[] +} + +/** Extract shard ID from an Evernote developer token */ +function extractShardId(token: string): string { + const match = token.match(/S=s(\d+)/) + if (!match) { + throw new Error('Invalid Evernote token format: cannot extract shard ID') + } + return `s${match[1]}` +} + +/** Get the NoteStore URL for the given token */ +function getNoteStoreUrl(token: string): string { + const shardId = extractShardId(token) + const host = token.includes(':Sandbox') ? 'sandbox.evernote.com' : 'www.evernote.com' + return `https://${host}/shard/${shardId}/notestore` +} + +/** Make a Thrift RPC call to the NoteStore */ +async function callNoteStore(token: string, writer: ThriftWriter): Promise { + const url = getNoteStoreUrl(token) + const body = writer.toBuffer() + + const response = await fetch(url, { + method: 'POST', + headers: { + 'Content-Type': 'application/x-thrift', + Accept: 'application/x-thrift', + }, + body: new Uint8Array(body), + }) + + if (!response.ok) { + throw new Error(`Evernote API HTTP error: ${response.status} ${response.statusText}`) + } + + const arrayBuffer = await response.arrayBuffer() + const reader = new ThriftReader(arrayBuffer) + const msg = reader.readMessageBegin() + + if (reader.isException(msg.type)) { + const ex = reader.readException() + throw new Error(`Evernote API error: ${ex.message}`) + } + + return reader +} + +/** Check for Evernote-specific exceptions in the response struct. Returns true if handled. */ +function checkEvernoteException(reader: ThriftReader, fieldId: number, fieldType: number): boolean { + if (fieldId === 1 && fieldType === TYPE_STRUCT) { + let message = '' + let errorCode = 0 + reader.readStruct((r, fid, ftype) => { + if (fid === 1 && ftype === TYPE_I32) { + errorCode = r.readI32() + } else if (fid === 2 && ftype === TYPE_STRING) { + message = r.readString() + } else { + r.skip(ftype) + } + }) + throw new Error(`Evernote error (${errorCode}): ${message}`) + } + if (fieldId === 2 && fieldType === TYPE_STRUCT) { + let message = '' + let errorCode = 0 + reader.readStruct((r, fid, ftype) => { + if (fid === 1 && ftype === TYPE_I32) { + errorCode = r.readI32() + } else if (fid === 2 && ftype === TYPE_STRING) { + message = r.readString() + } else { + r.skip(ftype) + } + }) + throw new Error(`Evernote system error (${errorCode}): ${message}`) + } + if (fieldId === 3 && fieldType === TYPE_STRUCT) { + let identifier = '' + let key = '' + reader.readStruct((r, fid, ftype) => { + if (fid === 1 && ftype === TYPE_STRING) { + identifier = r.readString() + } else if (fid === 2 && ftype === TYPE_STRING) { + key = r.readString() + } else { + r.skip(ftype) + } + }) + throw new Error(`Evernote not found: ${identifier}${key ? ` (${key})` : ''}`) + } + return false +} + +function readNotebook(reader: ThriftReader): EvernoteNotebook { + const notebook: EvernoteNotebook = { + guid: '', + name: '', + defaultNotebook: false, + serviceCreated: null, + serviceUpdated: null, + stack: null, + } + + reader.readStruct((r, fieldId, fieldType) => { + switch (fieldId) { + case 1: + if (fieldType === TYPE_STRING) notebook.guid = r.readString() + else r.skip(fieldType) + break + case 2: + if (fieldType === TYPE_STRING) notebook.name = r.readString() + else r.skip(fieldType) + break + case 4: + if (fieldType === TYPE_BOOL) notebook.defaultNotebook = r.readBool() + else r.skip(fieldType) + break + case 5: + if (fieldType === TYPE_I64) notebook.serviceCreated = Number(r.readI64()) + else r.skip(fieldType) + break + case 6: + if (fieldType === TYPE_I64) notebook.serviceUpdated = Number(r.readI64()) + else r.skip(fieldType) + break + case 9: + if (fieldType === TYPE_STRING) notebook.stack = r.readString() + else r.skip(fieldType) + break + default: + r.skip(fieldType) + } + }) + + return notebook +} + +function readNote(reader: ThriftReader): EvernoteNote { + const note: EvernoteNote = { + guid: '', + title: '', + content: null, + contentLength: null, + created: null, + updated: null, + deleted: null, + active: true, + notebookGuid: null, + tagGuids: [], + tagNames: [], + } + + reader.readStruct((r, fieldId, fieldType) => { + switch (fieldId) { + case 1: + if (fieldType === TYPE_STRING) note.guid = r.readString() + else r.skip(fieldType) + break + case 2: + if (fieldType === TYPE_STRING) note.title = r.readString() + else r.skip(fieldType) + break + case 3: + if (fieldType === TYPE_STRING) note.content = r.readString() + else r.skip(fieldType) + break + case 5: + if (fieldType === TYPE_I32) note.contentLength = r.readI32() + else r.skip(fieldType) + break + case 6: + if (fieldType === TYPE_I64) note.created = Number(r.readI64()) + else r.skip(fieldType) + break + case 7: + if (fieldType === TYPE_I64) note.updated = Number(r.readI64()) + else r.skip(fieldType) + break + case 8: + if (fieldType === TYPE_I64) note.deleted = Number(r.readI64()) + else r.skip(fieldType) + break + case 9: + if (fieldType === TYPE_BOOL) note.active = r.readBool() + else r.skip(fieldType) + break + case 11: + if (fieldType === TYPE_STRING) note.notebookGuid = r.readString() + else r.skip(fieldType) + break + case 12: + if (fieldType === TYPE_LIST) { + const { size } = r.readListBegin() + for (let i = 0; i < size; i++) { + note.tagGuids.push(r.readString()) + } + } else { + r.skip(fieldType) + } + break + case 15: + if (fieldType === TYPE_LIST) { + const { size } = r.readListBegin() + for (let i = 0; i < size; i++) { + note.tagNames.push(r.readString()) + } + } else { + r.skip(fieldType) + } + break + default: + r.skip(fieldType) + } + }) + + return note +} + +function readTag(reader: ThriftReader): EvernoteTag { + const tag: EvernoteTag = { + guid: '', + name: '', + parentGuid: null, + updateSequenceNum: null, + } + + reader.readStruct((r, fieldId, fieldType) => { + switch (fieldId) { + case 1: + if (fieldType === TYPE_STRING) tag.guid = r.readString() + else r.skip(fieldType) + break + case 2: + if (fieldType === TYPE_STRING) tag.name = r.readString() + else r.skip(fieldType) + break + case 3: + if (fieldType === TYPE_STRING) tag.parentGuid = r.readString() + else r.skip(fieldType) + break + case 4: + if (fieldType === TYPE_I32) tag.updateSequenceNum = r.readI32() + else r.skip(fieldType) + break + default: + r.skip(fieldType) + } + }) + + return tag +} + +function readNoteMetadata(reader: ThriftReader): EvernoteNoteMetadata { + const meta: EvernoteNoteMetadata = { + guid: '', + title: null, + contentLength: null, + created: null, + updated: null, + notebookGuid: null, + tagGuids: [], + } + + reader.readStruct((r, fieldId, fieldType) => { + switch (fieldId) { + case 1: + if (fieldType === TYPE_STRING) meta.guid = r.readString() + else r.skip(fieldType) + break + case 2: + if (fieldType === TYPE_STRING) meta.title = r.readString() + else r.skip(fieldType) + break + case 5: + if (fieldType === TYPE_I32) meta.contentLength = r.readI32() + else r.skip(fieldType) + break + case 6: + if (fieldType === TYPE_I64) meta.created = Number(r.readI64()) + else r.skip(fieldType) + break + case 7: + if (fieldType === TYPE_I64) meta.updated = Number(r.readI64()) + else r.skip(fieldType) + break + case 11: + if (fieldType === TYPE_STRING) meta.notebookGuid = r.readString() + else r.skip(fieldType) + break + case 12: + if (fieldType === TYPE_LIST) { + const { size } = r.readListBegin() + for (let i = 0; i < size; i++) { + meta.tagGuids.push(r.readString()) + } + } else { + r.skip(fieldType) + } + break + default: + r.skip(fieldType) + } + }) + + return meta +} + +export async function listNotebooks(token: string): Promise { + const writer = new ThriftWriter() + writer.writeMessageBegin('listNotebooks', 0) + writer.writeStringField(1, token) + writer.writeFieldStop() + + const reader = await callNoteStore(token, writer) + const notebooks: EvernoteNotebook[] = [] + + reader.readStruct((r, fieldId, fieldType) => { + if (fieldId === 0 && fieldType === TYPE_LIST) { + const { size } = r.readListBegin() + for (let i = 0; i < size; i++) { + notebooks.push(readNotebook(r)) + } + } else { + if (!checkEvernoteException(r, fieldId, fieldType)) { + r.skip(fieldType) + } + } + }) + + return notebooks +} + +export async function getNote( + token: string, + guid: string, + withContent = true +): Promise { + const writer = new ThriftWriter() + writer.writeMessageBegin('getNote', 0) + writer.writeStringField(1, token) + writer.writeStringField(2, guid) + writer.writeBoolField(3, withContent) + writer.writeBoolField(4, false) + writer.writeBoolField(5, false) + writer.writeBoolField(6, false) + writer.writeFieldStop() + + const reader = await callNoteStore(token, writer) + let note: EvernoteNote | null = null + + reader.readStruct((r, fieldId, fieldType) => { + if (fieldId === 0 && fieldType === TYPE_STRUCT) { + note = readNote(r) + } else { + if (!checkEvernoteException(r, fieldId, fieldType)) { + r.skip(fieldType) + } + } + }) + + if (!note) { + throw new Error('No note returned from Evernote API') + } + + return note +} + +/** Wrap content in ENML if it's not already */ +function wrapInEnml(content: string): string { + if (content.includes('/g, '>') + .replace(/\n/g, '
') + return `${escaped}` +} + +export async function createNote( + token: string, + title: string, + content: string, + notebookGuid?: string, + tagNames?: string[] +): Promise { + const writer = new ThriftWriter() + writer.writeMessageBegin('createNote', 0) + writer.writeStringField(1, token) + + writer.writeFieldBegin(TYPE_STRUCT, 2) + writer.writeStringField(2, title) + writer.writeStringField(3, wrapInEnml(content)) + if (notebookGuid) { + writer.writeStringField(11, notebookGuid) + } + if (tagNames && tagNames.length > 0) { + writer.writeStringListField(15, tagNames) + } + writer.writeFieldStop() + + writer.writeFieldStop() + + const reader = await callNoteStore(token, writer) + let note: EvernoteNote | null = null + + reader.readStruct((r, fieldId, fieldType) => { + if (fieldId === 0 && fieldType === TYPE_STRUCT) { + note = readNote(r) + } else { + if (!checkEvernoteException(r, fieldId, fieldType)) { + r.skip(fieldType) + } + } + }) + + if (!note) { + throw new Error('No note returned from Evernote API') + } + + return note +} + +export async function updateNote( + token: string, + guid: string, + title?: string, + content?: string, + notebookGuid?: string, + tagNames?: string[] +): Promise { + const writer = new ThriftWriter() + writer.writeMessageBegin('updateNote', 0) + writer.writeStringField(1, token) + + writer.writeFieldBegin(TYPE_STRUCT, 2) + writer.writeStringField(1, guid) + if (title !== undefined) { + writer.writeStringField(2, title) + } + if (content !== undefined) { + writer.writeStringField(3, wrapInEnml(content)) + } + if (notebookGuid !== undefined) { + writer.writeStringField(11, notebookGuid) + } + if (tagNames !== undefined) { + writer.writeStringListField(15, tagNames) + } + writer.writeFieldStop() + + writer.writeFieldStop() + + const reader = await callNoteStore(token, writer) + let note: EvernoteNote | null = null + + reader.readStruct((r, fieldId, fieldType) => { + if (fieldId === 0 && fieldType === TYPE_STRUCT) { + note = readNote(r) + } else { + if (!checkEvernoteException(r, fieldId, fieldType)) { + r.skip(fieldType) + } + } + }) + + if (!note) { + throw new Error('No note returned from Evernote API') + } + + return note +} + +export async function deleteNote(token: string, guid: string): Promise { + const writer = new ThriftWriter() + writer.writeMessageBegin('deleteNote', 0) + writer.writeStringField(1, token) + writer.writeStringField(2, guid) + writer.writeFieldStop() + + const reader = await callNoteStore(token, writer) + let usn = 0 + + reader.readStruct((r, fieldId, fieldType) => { + if (fieldId === 0 && fieldType === TYPE_I32) { + usn = r.readI32() + } else { + if (!checkEvernoteException(r, fieldId, fieldType)) { + r.skip(fieldType) + } + } + }) + + return usn +} + +export async function searchNotes( + token: string, + query: string, + notebookGuid?: string, + offset = 0, + maxNotes = 25 +): Promise { + const writer = new ThriftWriter() + writer.writeMessageBegin('findNotesMetadata', 0) + writer.writeStringField(1, token) + + // NoteFilter (field 2) + writer.writeFieldBegin(TYPE_STRUCT, 2) + if (query) { + writer.writeStringField(3, query) + } + if (notebookGuid) { + writer.writeStringField(4, notebookGuid) + } + writer.writeFieldStop() + + // offset (field 3) + writer.writeI32Field(3, offset) + // maxNotes (field 4) + writer.writeI32Field(4, maxNotes) + + // NotesMetadataResultSpec (field 5) + writer.writeFieldBegin(TYPE_STRUCT, 5) + writer.writeBoolField(2, true) // includeTitle + writer.writeBoolField(5, true) // includeContentLength + writer.writeBoolField(6, true) // includeCreated + writer.writeBoolField(7, true) // includeUpdated + writer.writeBoolField(11, true) // includeNotebookGuid + writer.writeBoolField(12, true) // includeTagGuids + writer.writeFieldStop() + + writer.writeFieldStop() + + const reader = await callNoteStore(token, writer) + const result: EvernoteSearchResult = { + startIndex: 0, + totalNotes: 0, + notes: [], + } + + reader.readStruct((r, fieldId, fieldType) => { + if (fieldId === 0 && fieldType === TYPE_STRUCT) { + r.readStruct((r2, fid2, ftype2) => { + switch (fid2) { + case 1: + if (ftype2 === TYPE_I32) result.startIndex = r2.readI32() + else r2.skip(ftype2) + break + case 2: + if (ftype2 === TYPE_I32) result.totalNotes = r2.readI32() + else r2.skip(ftype2) + break + case 3: + if (ftype2 === TYPE_LIST) { + const { size } = r2.readListBegin() + for (let i = 0; i < size; i++) { + result.notes.push(readNoteMetadata(r2)) + } + } else { + r2.skip(ftype2) + } + break + default: + r2.skip(ftype2) + } + }) + } else { + if (!checkEvernoteException(r, fieldId, fieldType)) { + r.skip(fieldType) + } + } + }) + + return result +} + +export async function getNotebook(token: string, guid: string): Promise { + const writer = new ThriftWriter() + writer.writeMessageBegin('getNotebook', 0) + writer.writeStringField(1, token) + writer.writeStringField(2, guid) + writer.writeFieldStop() + + const reader = await callNoteStore(token, writer) + let notebook: EvernoteNotebook | null = null + + reader.readStruct((r, fieldId, fieldType) => { + if (fieldId === 0 && fieldType === TYPE_STRUCT) { + notebook = readNotebook(r) + } else { + if (!checkEvernoteException(r, fieldId, fieldType)) { + r.skip(fieldType) + } + } + }) + + if (!notebook) { + throw new Error('No notebook returned from Evernote API') + } + + return notebook +} + +export async function createNotebook( + token: string, + name: string, + stack?: string +): Promise { + const writer = new ThriftWriter() + writer.writeMessageBegin('createNotebook', 0) + writer.writeStringField(1, token) + + writer.writeFieldBegin(TYPE_STRUCT, 2) + writer.writeStringField(2, name) + if (stack) { + writer.writeStringField(9, stack) + } + writer.writeFieldStop() + + writer.writeFieldStop() + + const reader = await callNoteStore(token, writer) + let notebook: EvernoteNotebook | null = null + + reader.readStruct((r, fieldId, fieldType) => { + if (fieldId === 0 && fieldType === TYPE_STRUCT) { + notebook = readNotebook(r) + } else { + if (!checkEvernoteException(r, fieldId, fieldType)) { + r.skip(fieldType) + } + } + }) + + if (!notebook) { + throw new Error('No notebook returned from Evernote API') + } + + return notebook +} + +export async function listTags(token: string): Promise { + const writer = new ThriftWriter() + writer.writeMessageBegin('listTags', 0) + writer.writeStringField(1, token) + writer.writeFieldStop() + + const reader = await callNoteStore(token, writer) + const tags: EvernoteTag[] = [] + + reader.readStruct((r, fieldId, fieldType) => { + if (fieldId === 0 && fieldType === TYPE_LIST) { + const { size } = r.readListBegin() + for (let i = 0; i < size; i++) { + tags.push(readTag(r)) + } + } else { + if (!checkEvernoteException(r, fieldId, fieldType)) { + r.skip(fieldType) + } + } + }) + + return tags +} + +export async function createTag( + token: string, + name: string, + parentGuid?: string +): Promise { + const writer = new ThriftWriter() + writer.writeMessageBegin('createTag', 0) + writer.writeStringField(1, token) + + writer.writeFieldBegin(TYPE_STRUCT, 2) + writer.writeStringField(2, name) + if (parentGuid) { + writer.writeStringField(3, parentGuid) + } + writer.writeFieldStop() + + writer.writeFieldStop() + + const reader = await callNoteStore(token, writer) + let tag: EvernoteTag | null = null + + reader.readStruct((r, fieldId, fieldType) => { + if (fieldId === 0 && fieldType === TYPE_STRUCT) { + tag = readTag(r) + } else { + if (!checkEvernoteException(r, fieldId, fieldType)) { + r.skip(fieldType) + } + } + }) + + if (!tag) { + throw new Error('No tag returned from Evernote API') + } + + return tag +} + +export async function copyNote( + token: string, + noteGuid: string, + toNotebookGuid: string +): Promise { + const writer = new ThriftWriter() + writer.writeMessageBegin('copyNote', 0) + writer.writeStringField(1, token) + writer.writeStringField(2, noteGuid) + writer.writeStringField(3, toNotebookGuid) + writer.writeFieldStop() + + const reader = await callNoteStore(token, writer) + let note: EvernoteNote | null = null + + reader.readStruct((r, fieldId, fieldType) => { + if (fieldId === 0 && fieldType === TYPE_STRUCT) { + note = readNote(r) + } else { + if (!checkEvernoteException(r, fieldId, fieldType)) { + r.skip(fieldType) + } + } + }) + + if (!note) { + throw new Error('No note returned from Evernote API') + } + + return note +} diff --git a/apps/sim/app/api/tools/evernote/lib/thrift.ts b/apps/sim/app/api/tools/evernote/lib/thrift.ts new file mode 100644 index 00000000000..3f51b6933b4 --- /dev/null +++ b/apps/sim/app/api/tools/evernote/lib/thrift.ts @@ -0,0 +1,255 @@ +/** + * Minimal Thrift binary protocol encoder/decoder for Evernote API. + * Supports only the types needed for NoteStore operations. + */ + +const THRIFT_VERSION_1 = 0x80010000 +const MESSAGE_CALL = 1 +const MESSAGE_EXCEPTION = 3 + +const TYPE_STOP = 0 +const TYPE_BOOL = 2 +const TYPE_I32 = 8 +const TYPE_I64 = 10 +const TYPE_STRING = 11 +const TYPE_STRUCT = 12 +const TYPE_LIST = 15 + +export class ThriftWriter { + private buffer: number[] = [] + + writeMessageBegin(name: string, seqId: number): void { + this.writeI32(THRIFT_VERSION_1 | MESSAGE_CALL) + this.writeString(name) + this.writeI32(seqId) + } + + writeFieldBegin(type: number, id: number): void { + this.buffer.push(type) + this.writeI16(id) + } + + writeFieldStop(): void { + this.buffer.push(TYPE_STOP) + } + + writeString(value: string): void { + const encoded = new TextEncoder().encode(value) + this.writeI32(encoded.length) + for (const byte of encoded) { + this.buffer.push(byte) + } + } + + writeBool(value: boolean): void { + this.buffer.push(value ? 1 : 0) + } + + writeI16(value: number): void { + this.buffer.push((value >> 8) & 0xff) + this.buffer.push(value & 0xff) + } + + writeI32(value: number): void { + this.buffer.push((value >> 24) & 0xff) + this.buffer.push((value >> 16) & 0xff) + this.buffer.push((value >> 8) & 0xff) + this.buffer.push(value & 0xff) + } + + writeI64(value: bigint): void { + const buf = new ArrayBuffer(8) + const view = new DataView(buf) + view.setBigInt64(0, value, false) + for (let i = 0; i < 8; i++) { + this.buffer.push(view.getUint8(i)) + } + } + + writeStringField(id: number, value: string): void { + this.writeFieldBegin(TYPE_STRING, id) + this.writeString(value) + } + + writeBoolField(id: number, value: boolean): void { + this.writeFieldBegin(TYPE_BOOL, id) + this.writeBool(value) + } + + writeI32Field(id: number, value: number): void { + this.writeFieldBegin(TYPE_I32, id) + this.writeI32(value) + } + + writeStringListField(id: number, values: string[]): void { + this.writeFieldBegin(TYPE_LIST, id) + this.buffer.push(TYPE_STRING) + this.writeI32(values.length) + for (const v of values) { + this.writeString(v) + } + } + + toBuffer(): Buffer { + return Buffer.from(this.buffer) + } +} + +export class ThriftReader { + private view: DataView + private pos = 0 + + constructor(buffer: ArrayBuffer) { + this.view = new DataView(buffer) + } + + readMessageBegin(): { name: string; type: number; seqId: number } { + const versionAndType = this.readI32() + const version = versionAndType & 0xffff0000 + if (version !== (THRIFT_VERSION_1 | 0)) { + throw new Error(`Unsupported Thrift version: 0x${version.toString(16)}`) + } + const type = versionAndType & 0x000000ff + const name = this.readString() + const seqId = this.readI32() + return { name, type, seqId } + } + + readFieldBegin(): { type: number; id: number } { + const type = this.view.getUint8(this.pos++) + if (type === TYPE_STOP) { + return { type: TYPE_STOP, id: 0 } + } + const id = this.view.getInt16(this.pos, false) + this.pos += 2 + return { type, id } + } + + readString(): string { + const length = this.readI32() + const bytes = new Uint8Array(this.view.buffer, this.pos, length) + this.pos += length + return new TextDecoder().decode(bytes) + } + + readBool(): boolean { + return this.view.getUint8(this.pos++) !== 0 + } + + readI32(): number { + const value = this.view.getInt32(this.pos, false) + this.pos += 4 + return value + } + + readI64(): bigint { + const value = this.view.getBigInt64(this.pos, false) + this.pos += 8 + return value + } + + readBinary(): Uint8Array { + const length = this.readI32() + const bytes = new Uint8Array(this.view.buffer, this.pos, length) + this.pos += length + return bytes + } + + readListBegin(): { elementType: number; size: number } { + const elementType = this.view.getUint8(this.pos++) + const size = this.readI32() + return { elementType, size } + } + + /** Skip a value of the given Thrift type */ + skip(type: number): void { + switch (type) { + case TYPE_BOOL: + this.pos += 1 + break + case 6: // I16 + this.pos += 2 + break + case 3: // BYTE + this.pos += 1 + break + case TYPE_I32: + this.pos += 4 + break + case TYPE_I64: + case 4: // DOUBLE + this.pos += 8 + break + case TYPE_STRING: { + const len = this.readI32() + this.pos += len + break + } + case TYPE_STRUCT: + this.skipStruct() + break + case TYPE_LIST: + case 14: { + // SET + const { elementType, size } = this.readListBegin() + for (let i = 0; i < size; i++) { + this.skip(elementType) + } + break + } + case 13: { + // MAP + const keyType = this.view.getUint8(this.pos++) + const valueType = this.view.getUint8(this.pos++) + const count = this.readI32() + for (let i = 0; i < count; i++) { + this.skip(keyType) + this.skip(valueType) + } + break + } + default: + throw new Error(`Cannot skip unknown Thrift type: ${type}`) + } + } + + private skipStruct(): void { + for (;;) { + const { type } = this.readFieldBegin() + if (type === TYPE_STOP) break + this.skip(type) + } + } + + /** Read struct fields, calling the handler for each field */ + readStruct(handler: (reader: ThriftReader, fieldId: number, fieldType: number) => void): void { + for (;;) { + const { type, id } = this.readFieldBegin() + if (type === TYPE_STOP) break + handler(this, id, type) + } + } + + /** Check if this is an exception response */ + isException(messageType: number): boolean { + return messageType === MESSAGE_EXCEPTION + } + + /** Read a Thrift application exception */ + readException(): { message: string; type: number } { + let message = '' + let type = 0 + this.readStruct((reader, fieldId, fieldType) => { + if (fieldId === 1 && fieldType === TYPE_STRING) { + message = reader.readString() + } else if (fieldId === 2 && fieldType === TYPE_I32) { + type = reader.readI32() + } else { + reader.skip(fieldType) + } + }) + return { message, type } + } +} + +export { TYPE_BOOL, TYPE_I32, TYPE_I64, TYPE_LIST, TYPE_STOP, TYPE_STRING, TYPE_STRUCT } diff --git a/apps/sim/app/api/tools/evernote/list-notebooks/route.ts b/apps/sim/app/api/tools/evernote/list-notebooks/route.ts new file mode 100644 index 00000000000..be5e3df9c5f --- /dev/null +++ b/apps/sim/app/api/tools/evernote/list-notebooks/route.ts @@ -0,0 +1,35 @@ +import { createLogger } from '@sim/logger' +import { type NextRequest, NextResponse } from 'next/server' +import { checkInternalAuth } from '@/lib/auth/hybrid' +import { listNotebooks } from '@/app/api/tools/evernote/lib/client' + +export const dynamic = 'force-dynamic' + +const logger = createLogger('EvernoteListNotebooksAPI') + +export async function POST(request: NextRequest) { + const authResult = await checkInternalAuth(request, { requireWorkflowId: false }) + if (!authResult.success) { + return NextResponse.json({ success: false, error: 'Unauthorized' }, { status: 401 }) + } + + try { + const body = await request.json() + const { apiKey } = body + + if (!apiKey) { + return NextResponse.json({ success: false, error: 'apiKey is required' }, { status: 400 }) + } + + const notebooks = await listNotebooks(apiKey) + + return NextResponse.json({ + success: true, + output: { notebooks }, + }) + } catch (error) { + const message = error instanceof Error ? error.message : 'Unknown error' + logger.error('Failed to list notebooks', { error: message }) + return NextResponse.json({ success: false, error: message }, { status: 500 }) + } +} diff --git a/apps/sim/app/api/tools/evernote/list-tags/route.ts b/apps/sim/app/api/tools/evernote/list-tags/route.ts new file mode 100644 index 00000000000..2475d64ee49 --- /dev/null +++ b/apps/sim/app/api/tools/evernote/list-tags/route.ts @@ -0,0 +1,35 @@ +import { createLogger } from '@sim/logger' +import { type NextRequest, NextResponse } from 'next/server' +import { checkInternalAuth } from '@/lib/auth/hybrid' +import { listTags } from '@/app/api/tools/evernote/lib/client' + +export const dynamic = 'force-dynamic' + +const logger = createLogger('EvernoteListTagsAPI') + +export async function POST(request: NextRequest) { + const authResult = await checkInternalAuth(request, { requireWorkflowId: false }) + if (!authResult.success) { + return NextResponse.json({ success: false, error: 'Unauthorized' }, { status: 401 }) + } + + try { + const body = await request.json() + const { apiKey } = body + + if (!apiKey) { + return NextResponse.json({ success: false, error: 'apiKey is required' }, { status: 400 }) + } + + const tags = await listTags(apiKey) + + return NextResponse.json({ + success: true, + output: { tags }, + }) + } catch (error) { + const message = error instanceof Error ? error.message : 'Unknown error' + logger.error('Failed to list tags', { error: message }) + return NextResponse.json({ success: false, error: message }, { status: 500 }) + } +} diff --git a/apps/sim/app/api/tools/evernote/search-notes/route.ts b/apps/sim/app/api/tools/evernote/search-notes/route.ts new file mode 100644 index 00000000000..2687779e593 --- /dev/null +++ b/apps/sim/app/api/tools/evernote/search-notes/route.ts @@ -0,0 +1,49 @@ +import { createLogger } from '@sim/logger' +import { type NextRequest, NextResponse } from 'next/server' +import { checkInternalAuth } from '@/lib/auth/hybrid' +import { searchNotes } from '@/app/api/tools/evernote/lib/client' + +export const dynamic = 'force-dynamic' + +const logger = createLogger('EvernoteSearchNotesAPI') + +export async function POST(request: NextRequest) { + const authResult = await checkInternalAuth(request, { requireWorkflowId: false }) + if (!authResult.success) { + return NextResponse.json({ success: false, error: 'Unauthorized' }, { status: 401 }) + } + + try { + const body = await request.json() + const { apiKey, query, notebookGuid, offset = 0, maxNotes = 25 } = body + + if (!apiKey || !query) { + return NextResponse.json( + { success: false, error: 'apiKey and query are required' }, + { status: 400 } + ) + } + + const clampedMaxNotes = Math.min(Math.max(Number(maxNotes) || 25, 1), 250) + + const result = await searchNotes( + apiKey, + query, + notebookGuid || undefined, + Number(offset), + clampedMaxNotes + ) + + return NextResponse.json({ + success: true, + output: { + totalNotes: result.totalNotes, + notes: result.notes, + }, + }) + } catch (error) { + const message = error instanceof Error ? error.message : 'Unknown error' + logger.error('Failed to search notes', { error: message }) + return NextResponse.json({ success: false, error: message }, { status: 500 }) + } +} diff --git a/apps/sim/app/api/tools/evernote/update-note/route.ts b/apps/sim/app/api/tools/evernote/update-note/route.ts new file mode 100644 index 00000000000..4a3fb884504 --- /dev/null +++ b/apps/sim/app/api/tools/evernote/update-note/route.ts @@ -0,0 +1,58 @@ +import { createLogger } from '@sim/logger' +import { type NextRequest, NextResponse } from 'next/server' +import { checkInternalAuth } from '@/lib/auth/hybrid' +import { updateNote } from '@/app/api/tools/evernote/lib/client' + +export const dynamic = 'force-dynamic' + +const logger = createLogger('EvernoteUpdateNoteAPI') + +export async function POST(request: NextRequest) { + const authResult = await checkInternalAuth(request, { requireWorkflowId: false }) + if (!authResult.success) { + return NextResponse.json({ success: false, error: 'Unauthorized' }, { status: 401 }) + } + + try { + const body = await request.json() + const { apiKey, noteGuid, title, content, notebookGuid, tagNames } = body + + if (!apiKey || !noteGuid) { + return NextResponse.json( + { success: false, error: 'apiKey and noteGuid are required' }, + { status: 400 } + ) + } + + const parsedTags = tagNames + ? (() => { + const tags = + typeof tagNames === 'string' + ? tagNames + .split(',') + .map((t: string) => t.trim()) + .filter(Boolean) + : tagNames + return tags.length > 0 ? tags : undefined + })() + : undefined + + const note = await updateNote( + apiKey, + noteGuid, + title || undefined, + content || undefined, + notebookGuid || undefined, + parsedTags + ) + + return NextResponse.json({ + success: true, + output: { note }, + }) + } catch (error) { + const message = error instanceof Error ? error.message : 'Unknown error' + logger.error('Failed to update note', { error: message }) + return NextResponse.json({ success: false, error: message }, { status: 500 }) + } +} diff --git a/apps/sim/blocks/blocks/evernote.ts b/apps/sim/blocks/blocks/evernote.ts new file mode 100644 index 00000000000..acc7fde5ccb --- /dev/null +++ b/apps/sim/blocks/blocks/evernote.ts @@ -0,0 +1,308 @@ +import { EvernoteIcon } from '@/components/icons' +import type { BlockConfig } from '@/blocks/types' +import { AuthMode } from '@/blocks/types' + +export const EvernoteBlock: BlockConfig = { + type: 'evernote', + name: 'Evernote', + description: 'Manage notes, notebooks, and tags in Evernote', + longDescription: + 'Integrate with Evernote to manage notes, notebooks, and tags. Create, read, update, copy, search, and delete notes. Create and list notebooks and tags.', + docsLink: 'https://docs.sim.ai/tools/evernote', + category: 'tools', + bgColor: '#E0E0E0', + icon: EvernoteIcon, + authMode: AuthMode.ApiKey, + + subBlocks: [ + { + id: 'operation', + title: 'Operation', + type: 'dropdown', + options: [ + { label: 'Create Note', id: 'create_note' }, + { label: 'Get Note', id: 'get_note' }, + { label: 'Update Note', id: 'update_note' }, + { label: 'Delete Note', id: 'delete_note' }, + { label: 'Copy Note', id: 'copy_note' }, + { label: 'Search Notes', id: 'search_notes' }, + { label: 'Get Notebook', id: 'get_notebook' }, + { label: 'Create Notebook', id: 'create_notebook' }, + { label: 'List Notebooks', id: 'list_notebooks' }, + { label: 'Create Tag', id: 'create_tag' }, + { label: 'List Tags', id: 'list_tags' }, + ], + value: () => 'create_note', + }, + { + id: 'apiKey', + title: 'Developer Token', + type: 'short-input', + password: true, + placeholder: 'Enter your Evernote developer token', + required: true, + }, + { + id: 'title', + title: 'Title', + type: 'short-input', + placeholder: 'Note title', + condition: { field: 'operation', value: 'create_note' }, + required: { field: 'operation', value: 'create_note' }, + }, + { + id: 'content', + title: 'Content', + type: 'long-input', + placeholder: 'Note content (plain text or ENML)', + condition: { field: 'operation', value: 'create_note' }, + required: { field: 'operation', value: 'create_note' }, + }, + { + id: 'noteGuid', + title: 'Note GUID', + type: 'short-input', + placeholder: 'Enter the note GUID', + condition: { + field: 'operation', + value: ['get_note', 'update_note', 'delete_note', 'copy_note'], + }, + required: { + field: 'operation', + value: ['get_note', 'update_note', 'delete_note', 'copy_note'], + }, + }, + { + id: 'updateTitle', + title: 'New Title', + type: 'short-input', + placeholder: 'New title (leave empty to keep current)', + condition: { field: 'operation', value: 'update_note' }, + }, + { + id: 'updateContent', + title: 'New Content', + type: 'long-input', + placeholder: 'New content (leave empty to keep current)', + condition: { field: 'operation', value: 'update_note' }, + }, + { + id: 'toNotebookGuid', + title: 'Destination Notebook GUID', + type: 'short-input', + placeholder: 'GUID of the destination notebook', + condition: { field: 'operation', value: 'copy_note' }, + required: { field: 'operation', value: 'copy_note' }, + }, + { + id: 'query', + title: 'Search Query', + type: 'short-input', + placeholder: 'e.g., "tag:work intitle:meeting"', + condition: { field: 'operation', value: 'search_notes' }, + required: { field: 'operation', value: 'search_notes' }, + }, + { + id: 'notebookGuid', + title: 'Notebook GUID', + type: 'short-input', + placeholder: 'Notebook GUID', + condition: { + field: 'operation', + value: ['create_note', 'update_note', 'search_notes', 'get_notebook'], + }, + required: { field: 'operation', value: 'get_notebook' }, + }, + { + id: 'notebookName', + title: 'Notebook Name', + type: 'short-input', + placeholder: 'Name for the new notebook', + condition: { field: 'operation', value: 'create_notebook' }, + required: { field: 'operation', value: 'create_notebook' }, + }, + { + id: 'stack', + title: 'Stack', + type: 'short-input', + placeholder: 'Stack name (optional)', + condition: { field: 'operation', value: 'create_notebook' }, + mode: 'advanced', + }, + { + id: 'tagName', + title: 'Tag Name', + type: 'short-input', + placeholder: 'Name for the new tag', + condition: { field: 'operation', value: 'create_tag' }, + required: { field: 'operation', value: 'create_tag' }, + }, + { + id: 'parentGuid', + title: 'Parent Tag GUID', + type: 'short-input', + placeholder: 'Parent tag GUID (optional)', + condition: { field: 'operation', value: 'create_tag' }, + mode: 'advanced', + }, + { + id: 'tagNames', + title: 'Tags', + type: 'short-input', + placeholder: 'Comma-separated tags (e.g., "work, meeting, urgent")', + condition: { field: 'operation', value: ['create_note', 'update_note'] }, + mode: 'advanced', + }, + { + id: 'maxNotes', + title: 'Max Results', + type: 'short-input', + placeholder: '25', + condition: { field: 'operation', value: 'search_notes' }, + mode: 'advanced', + }, + { + id: 'offset', + title: 'Offset', + type: 'short-input', + placeholder: '0', + condition: { field: 'operation', value: 'search_notes' }, + mode: 'advanced', + }, + { + id: 'withContent', + title: 'Include Content', + type: 'dropdown', + options: [ + { label: 'Yes', id: 'true' }, + { label: 'No', id: 'false' }, + ], + value: () => 'true', + condition: { field: 'operation', value: 'get_note' }, + mode: 'advanced', + }, + ], + + tools: { + access: [ + 'evernote_copy_note', + 'evernote_create_note', + 'evernote_create_notebook', + 'evernote_create_tag', + 'evernote_delete_note', + 'evernote_get_note', + 'evernote_get_notebook', + 'evernote_list_notebooks', + 'evernote_list_tags', + 'evernote_search_notes', + 'evernote_update_note', + ], + config: { + tool: (params) => `evernote_${params.operation}`, + params: (params) => { + const { operation, apiKey, ...rest } = params + + switch (operation) { + case 'create_note': + return { + apiKey, + title: rest.title, + content: rest.content, + notebookGuid: rest.notebookGuid || undefined, + tagNames: rest.tagNames || undefined, + } + case 'get_note': + return { + apiKey, + noteGuid: rest.noteGuid, + withContent: rest.withContent !== 'false', + } + case 'update_note': + return { + apiKey, + noteGuid: rest.noteGuid, + title: rest.updateTitle || undefined, + content: rest.updateContent || undefined, + notebookGuid: rest.notebookGuid || undefined, + tagNames: rest.tagNames || undefined, + } + case 'delete_note': + return { + apiKey, + noteGuid: rest.noteGuid, + } + case 'copy_note': + return { + apiKey, + noteGuid: rest.noteGuid, + toNotebookGuid: rest.toNotebookGuid, + } + case 'search_notes': + return { + apiKey, + query: rest.query, + notebookGuid: rest.notebookGuid || undefined, + offset: rest.offset ? Number(rest.offset) : 0, + maxNotes: rest.maxNotes ? Number(rest.maxNotes) : 25, + } + case 'get_notebook': + return { + apiKey, + notebookGuid: rest.notebookGuid, + } + case 'create_notebook': + return { + apiKey, + name: rest.notebookName, + stack: rest.stack || undefined, + } + case 'list_notebooks': + return { apiKey } + case 'create_tag': + return { + apiKey, + name: rest.tagName, + parentGuid: rest.parentGuid || undefined, + } + case 'list_tags': + return { apiKey } + default: + return { apiKey } + } + }, + }, + }, + + inputs: { + apiKey: { type: 'string', description: 'Evernote developer token' }, + operation: { type: 'string', description: 'Operation to perform' }, + title: { type: 'string', description: 'Note title' }, + content: { type: 'string', description: 'Note content' }, + noteGuid: { type: 'string', description: 'Note GUID' }, + updateTitle: { type: 'string', description: 'New note title' }, + updateContent: { type: 'string', description: 'New note content' }, + toNotebookGuid: { type: 'string', description: 'Destination notebook GUID' }, + query: { type: 'string', description: 'Search query' }, + notebookGuid: { type: 'string', description: 'Notebook GUID' }, + notebookName: { type: 'string', description: 'Notebook name' }, + stack: { type: 'string', description: 'Notebook stack name' }, + tagName: { type: 'string', description: 'Tag name' }, + parentGuid: { type: 'string', description: 'Parent tag GUID' }, + tagNames: { type: 'string', description: 'Comma-separated tag names' }, + maxNotes: { type: 'string', description: 'Maximum number of results' }, + offset: { type: 'string', description: 'Starting index for results' }, + withContent: { type: 'string', description: 'Whether to include note content' }, + }, + + outputs: { + note: { type: 'json', description: 'Note data' }, + notebook: { type: 'json', description: 'Notebook data' }, + notebooks: { type: 'json', description: 'List of notebooks' }, + tag: { type: 'json', description: 'Tag data' }, + tags: { type: 'json', description: 'List of tags' }, + totalNotes: { type: 'number', description: 'Total number of matching notes' }, + notes: { type: 'json', description: 'List of note metadata' }, + success: { type: 'boolean', description: 'Whether the operation succeeded' }, + noteGuid: { type: 'string', description: 'GUID of the affected note' }, + }, +} diff --git a/apps/sim/blocks/registry.ts b/apps/sim/blocks/registry.ts index 23c47186ca4..165facc910b 100644 --- a/apps/sim/blocks/registry.ts +++ b/apps/sim/blocks/registry.ts @@ -38,6 +38,7 @@ import { ElasticsearchBlock } from '@/blocks/blocks/elasticsearch' import { ElevenLabsBlock } from '@/blocks/blocks/elevenlabs' import { EnrichBlock } from '@/blocks/blocks/enrich' import { EvaluatorBlock } from '@/blocks/blocks/evaluator' +import { EvernoteBlock } from '@/blocks/blocks/evernote' import { ExaBlock } from '@/blocks/blocks/exa' import { FileBlock, FileV2Block, FileV3Block } from '@/blocks/blocks/file' import { FirecrawlBlock } from '@/blocks/blocks/firecrawl' @@ -235,6 +236,7 @@ export const registry: Record = { elasticsearch: ElasticsearchBlock, elevenlabs: ElevenLabsBlock, enrich: EnrichBlock, + evernote: EvernoteBlock, evaluator: EvaluatorBlock, exa: ExaBlock, file: FileBlock, diff --git a/apps/sim/components/icons.tsx b/apps/sim/components/icons.tsx index 41fa14fa22a..e77c864d184 100644 --- a/apps/sim/components/icons.tsx +++ b/apps/sim/components/icons.tsx @@ -1955,6 +1955,14 @@ export function Mem0Icon(props: SVGProps) { ) } +export function EvernoteIcon(props: SVGProps) { + return ( + + + + ) +} + export function ElevenLabsIcon(props: SVGProps) { return ( = { + id: 'evernote_copy_note', + name: 'Evernote Copy Note', + description: 'Copy a note to another notebook in Evernote', + version: '1.0.0', + + params: { + apiKey: { + type: 'string', + required: true, + visibility: 'user-only', + description: 'Evernote developer token', + }, + noteGuid: { + type: 'string', + required: true, + visibility: 'user-or-llm', + description: 'GUID of the note to copy', + }, + toNotebookGuid: { + type: 'string', + required: true, + visibility: 'user-or-llm', + description: 'GUID of the destination notebook', + }, + }, + + request: { + url: '/api/tools/evernote/copy-note', + method: 'POST', + headers: () => ({ 'Content-Type': 'application/json' }), + body: (params) => ({ + apiKey: params.apiKey, + noteGuid: params.noteGuid, + toNotebookGuid: params.toNotebookGuid, + }), + }, + + transformResponse: async (response) => { + const data = await response.json() + if (!data.success) { + throw new Error(data.error || 'Failed to copy note') + } + return { + success: true, + output: { note: data.output.note }, + } + }, + + outputs: { + note: { + type: 'object', + description: 'The copied note metadata', + properties: { + guid: { type: 'string', description: 'New note GUID' }, + title: { type: 'string', description: 'Note title' }, + notebookGuid: { + type: 'string', + description: 'GUID of the destination notebook', + optional: true, + }, + created: { + type: 'number', + description: 'Creation timestamp in milliseconds', + optional: true, + }, + updated: { + type: 'number', + description: 'Last updated timestamp in milliseconds', + optional: true, + }, + }, + }, + }, +} diff --git a/apps/sim/tools/evernote/create_note.ts b/apps/sim/tools/evernote/create_note.ts new file mode 100644 index 00000000000..281735f6ac1 --- /dev/null +++ b/apps/sim/tools/evernote/create_note.ts @@ -0,0 +1,101 @@ +import type { ToolConfig } from '@/tools/types' +import type { EvernoteCreateNoteParams, EvernoteCreateNoteResponse } from './types' + +export const evernoteCreateNoteTool: ToolConfig< + EvernoteCreateNoteParams, + EvernoteCreateNoteResponse +> = { + id: 'evernote_create_note', + name: 'Evernote Create Note', + description: 'Create a new note in Evernote', + version: '1.0.0', + + params: { + apiKey: { + type: 'string', + required: true, + visibility: 'user-only', + description: 'Evernote developer token', + }, + title: { + type: 'string', + required: true, + visibility: 'user-or-llm', + description: 'Title of the note', + }, + content: { + type: 'string', + required: true, + visibility: 'user-or-llm', + description: 'Content of the note (plain text or ENML)', + }, + notebookGuid: { + type: 'string', + required: false, + visibility: 'user-or-llm', + description: 'GUID of the notebook to create the note in (defaults to default notebook)', + }, + tagNames: { + type: 'string', + required: false, + visibility: 'user-or-llm', + description: 'Comma-separated list of tag names to apply', + }, + }, + + request: { + url: '/api/tools/evernote/create-note', + method: 'POST', + headers: () => ({ 'Content-Type': 'application/json' }), + body: (params) => ({ + apiKey: params.apiKey, + title: params.title, + content: params.content, + notebookGuid: params.notebookGuid || null, + tagNames: params.tagNames || null, + }), + }, + + transformResponse: async (response) => { + const data = await response.json() + if (!data.success) { + throw new Error(data.error || 'Failed to create note') + } + return { + success: true, + output: { note: data.output.note }, + } + }, + + outputs: { + note: { + type: 'object', + description: 'The created note', + properties: { + guid: { type: 'string', description: 'Unique identifier of the note' }, + title: { type: 'string', description: 'Title of the note' }, + content: { type: 'string', description: 'ENML content of the note', optional: true }, + notebookGuid: { + type: 'string', + description: 'GUID of the containing notebook', + optional: true, + }, + tagNames: { + type: 'array', + description: 'Tag names applied to the note', + optional: true, + }, + created: { + type: 'number', + description: 'Creation timestamp in milliseconds', + optional: true, + }, + updated: { + type: 'number', + description: 'Last updated timestamp in milliseconds', + optional: true, + }, + }, + }, + }, +} diff --git a/apps/sim/tools/evernote/create_notebook.ts b/apps/sim/tools/evernote/create_notebook.ts new file mode 100644 index 00000000000..ba46e48b50b --- /dev/null +++ b/apps/sim/tools/evernote/create_notebook.ts @@ -0,0 +1,78 @@ +import type { ToolConfig } from '@/tools/types' +import type { EvernoteCreateNotebookParams, EvernoteCreateNotebookResponse } from './types' + +export const evernoteCreateNotebookTool: ToolConfig< + EvernoteCreateNotebookParams, + EvernoteCreateNotebookResponse +> = { + id: 'evernote_create_notebook', + name: 'Evernote Create Notebook', + description: 'Create a new notebook in Evernote', + version: '1.0.0', + + params: { + apiKey: { + type: 'string', + required: true, + visibility: 'user-only', + description: 'Evernote developer token', + }, + name: { + type: 'string', + required: true, + visibility: 'user-or-llm', + description: 'Name for the new notebook', + }, + stack: { + type: 'string', + required: false, + visibility: 'user-or-llm', + description: 'Stack name to group the notebook under', + }, + }, + + request: { + url: '/api/tools/evernote/create-notebook', + method: 'POST', + headers: () => ({ 'Content-Type': 'application/json' }), + body: (params) => ({ + apiKey: params.apiKey, + name: params.name, + stack: params.stack || null, + }), + }, + + transformResponse: async (response) => { + const data = await response.json() + if (!data.success) { + throw new Error(data.error || 'Failed to create notebook') + } + return { + success: true, + output: { notebook: data.output.notebook }, + } + }, + + outputs: { + notebook: { + type: 'object', + description: 'The created notebook', + properties: { + guid: { type: 'string', description: 'Notebook GUID' }, + name: { type: 'string', description: 'Notebook name' }, + defaultNotebook: { type: 'boolean', description: 'Whether this is the default notebook' }, + serviceCreated: { + type: 'number', + description: 'Creation timestamp in milliseconds', + optional: true, + }, + serviceUpdated: { + type: 'number', + description: 'Last updated timestamp in milliseconds', + optional: true, + }, + stack: { type: 'string', description: 'Notebook stack name', optional: true }, + }, + }, + }, +} diff --git a/apps/sim/tools/evernote/create_tag.ts b/apps/sim/tools/evernote/create_tag.ts new file mode 100644 index 00000000000..aeaa3d2dbf6 --- /dev/null +++ b/apps/sim/tools/evernote/create_tag.ts @@ -0,0 +1,70 @@ +import type { ToolConfig } from '@/tools/types' +import type { EvernoteCreateTagParams, EvernoteCreateTagResponse } from './types' + +export const evernoteCreateTagTool: ToolConfig = + { + id: 'evernote_create_tag', + name: 'Evernote Create Tag', + description: 'Create a new tag in Evernote', + version: '1.0.0', + + params: { + apiKey: { + type: 'string', + required: true, + visibility: 'user-only', + description: 'Evernote developer token', + }, + name: { + type: 'string', + required: true, + visibility: 'user-or-llm', + description: 'Name for the new tag', + }, + parentGuid: { + type: 'string', + required: false, + visibility: 'user-or-llm', + description: 'GUID of the parent tag for hierarchy', + }, + }, + + request: { + url: '/api/tools/evernote/create-tag', + method: 'POST', + headers: () => ({ 'Content-Type': 'application/json' }), + body: (params) => ({ + apiKey: params.apiKey, + name: params.name, + parentGuid: params.parentGuid || null, + }), + }, + + transformResponse: async (response) => { + const data = await response.json() + if (!data.success) { + throw new Error(data.error || 'Failed to create tag') + } + return { + success: true, + output: { tag: data.output.tag }, + } + }, + + outputs: { + tag: { + type: 'object', + description: 'The created tag', + properties: { + guid: { type: 'string', description: 'Tag GUID' }, + name: { type: 'string', description: 'Tag name' }, + parentGuid: { type: 'string', description: 'Parent tag GUID', optional: true }, + updateSequenceNum: { + type: 'number', + description: 'Update sequence number', + optional: true, + }, + }, + }, + }, + } diff --git a/apps/sim/tools/evernote/delete_note.ts b/apps/sim/tools/evernote/delete_note.ts new file mode 100644 index 00000000000..6983a78d3f8 --- /dev/null +++ b/apps/sim/tools/evernote/delete_note.ts @@ -0,0 +1,62 @@ +import type { ToolConfig } from '@/tools/types' +import type { EvernoteDeleteNoteParams, EvernoteDeleteNoteResponse } from './types' + +export const evernoteDeleteNoteTool: ToolConfig< + EvernoteDeleteNoteParams, + EvernoteDeleteNoteResponse +> = { + id: 'evernote_delete_note', + name: 'Evernote Delete Note', + description: 'Move a note to the trash in Evernote', + version: '1.0.0', + + params: { + apiKey: { + type: 'string', + required: true, + visibility: 'user-only', + description: 'Evernote developer token', + }, + noteGuid: { + type: 'string', + required: true, + visibility: 'user-or-llm', + description: 'GUID of the note to delete', + }, + }, + + request: { + url: '/api/tools/evernote/delete-note', + method: 'POST', + headers: () => ({ 'Content-Type': 'application/json' }), + body: (params) => ({ + apiKey: params.apiKey, + noteGuid: params.noteGuid, + }), + }, + + transformResponse: async (response) => { + const data = await response.json() + if (!data.success) { + throw new Error(data.error || 'Failed to delete note') + } + return { + success: true, + output: { + success: true, + noteGuid: data.output.noteGuid, + }, + } + }, + + outputs: { + success: { + type: 'boolean', + description: 'Whether the note was successfully deleted', + }, + noteGuid: { + type: 'string', + description: 'GUID of the deleted note', + }, + }, +} diff --git a/apps/sim/tools/evernote/get_note.ts b/apps/sim/tools/evernote/get_note.ts new file mode 100644 index 00000000000..4773bd23700 --- /dev/null +++ b/apps/sim/tools/evernote/get_note.ts @@ -0,0 +1,87 @@ +import type { ToolConfig } from '@/tools/types' +import type { EvernoteGetNoteParams, EvernoteGetNoteResponse } from './types' + +export const evernoteGetNoteTool: ToolConfig = { + id: 'evernote_get_note', + name: 'Evernote Get Note', + description: 'Retrieve a note from Evernote by its GUID', + version: '1.0.0', + + params: { + apiKey: { + type: 'string', + required: true, + visibility: 'user-only', + description: 'Evernote developer token', + }, + noteGuid: { + type: 'string', + required: true, + visibility: 'user-or-llm', + description: 'GUID of the note to retrieve', + }, + withContent: { + type: 'boolean', + required: false, + visibility: 'user-or-llm', + description: 'Whether to include note content (default: true)', + }, + }, + + request: { + url: '/api/tools/evernote/get-note', + method: 'POST', + headers: () => ({ 'Content-Type': 'application/json' }), + body: (params) => ({ + apiKey: params.apiKey, + noteGuid: params.noteGuid, + withContent: params.withContent ?? true, + }), + }, + + transformResponse: async (response) => { + const data = await response.json() + if (!data.success) { + throw new Error(data.error || 'Failed to get note') + } + return { + success: true, + output: { note: data.output.note }, + } + }, + + outputs: { + note: { + type: 'object', + description: 'The retrieved note', + properties: { + guid: { type: 'string', description: 'Unique identifier of the note' }, + title: { type: 'string', description: 'Title of the note' }, + content: { type: 'string', description: 'ENML content of the note', optional: true }, + contentLength: { + type: 'number', + description: 'Length of the note content', + optional: true, + }, + notebookGuid: { + type: 'string', + description: 'GUID of the containing notebook', + optional: true, + }, + tagGuids: { type: 'array', description: 'GUIDs of tags on the note', optional: true }, + tagNames: { type: 'array', description: 'Names of tags on the note', optional: true }, + created: { + type: 'number', + description: 'Creation timestamp in milliseconds', + optional: true, + }, + updated: { + type: 'number', + description: 'Last updated timestamp in milliseconds', + optional: true, + }, + active: { type: 'boolean', description: 'Whether the note is active (not in trash)' }, + }, + }, + }, +} diff --git a/apps/sim/tools/evernote/get_notebook.ts b/apps/sim/tools/evernote/get_notebook.ts new file mode 100644 index 00000000000..78a2fd59fa6 --- /dev/null +++ b/apps/sim/tools/evernote/get_notebook.ts @@ -0,0 +1,71 @@ +import type { ToolConfig } from '@/tools/types' +import type { EvernoteGetNotebookParams, EvernoteGetNotebookResponse } from './types' + +export const evernoteGetNotebookTool: ToolConfig< + EvernoteGetNotebookParams, + EvernoteGetNotebookResponse +> = { + id: 'evernote_get_notebook', + name: 'Evernote Get Notebook', + description: 'Retrieve a notebook from Evernote by its GUID', + version: '1.0.0', + + params: { + apiKey: { + type: 'string', + required: true, + visibility: 'user-only', + description: 'Evernote developer token', + }, + notebookGuid: { + type: 'string', + required: true, + visibility: 'user-or-llm', + description: 'GUID of the notebook to retrieve', + }, + }, + + request: { + url: '/api/tools/evernote/get-notebook', + method: 'POST', + headers: () => ({ 'Content-Type': 'application/json' }), + body: (params) => ({ + apiKey: params.apiKey, + notebookGuid: params.notebookGuid, + }), + }, + + transformResponse: async (response) => { + const data = await response.json() + if (!data.success) { + throw new Error(data.error || 'Failed to get notebook') + } + return { + success: true, + output: { notebook: data.output.notebook }, + } + }, + + outputs: { + notebook: { + type: 'object', + description: 'The retrieved notebook', + properties: { + guid: { type: 'string', description: 'Notebook GUID' }, + name: { type: 'string', description: 'Notebook name' }, + defaultNotebook: { type: 'boolean', description: 'Whether this is the default notebook' }, + serviceCreated: { + type: 'number', + description: 'Creation timestamp in milliseconds', + optional: true, + }, + serviceUpdated: { + type: 'number', + description: 'Last updated timestamp in milliseconds', + optional: true, + }, + stack: { type: 'string', description: 'Notebook stack name', optional: true }, + }, + }, + }, +} diff --git a/apps/sim/tools/evernote/index.ts b/apps/sim/tools/evernote/index.ts new file mode 100644 index 00000000000..08819e0baf4 --- /dev/null +++ b/apps/sim/tools/evernote/index.ts @@ -0,0 +1,12 @@ +export { evernoteCopyNoteTool } from './copy_note' +export { evernoteCreateNoteTool } from './create_note' +export { evernoteCreateNotebookTool } from './create_notebook' +export { evernoteCreateTagTool } from './create_tag' +export { evernoteDeleteNoteTool } from './delete_note' +export { evernoteGetNoteTool } from './get_note' +export { evernoteGetNotebookTool } from './get_notebook' +export { evernoteListNotebooksTool } from './list_notebooks' +export { evernoteListTagsTool } from './list_tags' +export { evernoteSearchNotesTool } from './search_notes' +export * from './types' +export { evernoteUpdateNoteTool } from './update_note' diff --git a/apps/sim/tools/evernote/list_notebooks.ts b/apps/sim/tools/evernote/list_notebooks.ts new file mode 100644 index 00000000000..b2b9756c7e8 --- /dev/null +++ b/apps/sim/tools/evernote/list_notebooks.ts @@ -0,0 +1,64 @@ +import type { ToolConfig } from '@/tools/types' +import type { EvernoteListNotebooksParams, EvernoteListNotebooksResponse } from './types' + +export const evernoteListNotebooksTool: ToolConfig< + EvernoteListNotebooksParams, + EvernoteListNotebooksResponse +> = { + id: 'evernote_list_notebooks', + name: 'Evernote List Notebooks', + description: 'List all notebooks in an Evernote account', + version: '1.0.0', + + params: { + apiKey: { + type: 'string', + required: true, + visibility: 'user-only', + description: 'Evernote developer token', + }, + }, + + request: { + url: '/api/tools/evernote/list-notebooks', + method: 'POST', + headers: () => ({ 'Content-Type': 'application/json' }), + body: (params) => ({ + apiKey: params.apiKey, + }), + }, + + transformResponse: async (response) => { + const data = await response.json() + if (!data.success) { + throw new Error(data.error || 'Failed to list notebooks') + } + return { + success: true, + output: { notebooks: data.output.notebooks }, + } + }, + + outputs: { + notebooks: { + type: 'array', + description: 'List of notebooks', + properties: { + guid: { type: 'string', description: 'Notebook GUID' }, + name: { type: 'string', description: 'Notebook name' }, + defaultNotebook: { type: 'boolean', description: 'Whether this is the default notebook' }, + serviceCreated: { + type: 'number', + description: 'Creation timestamp in milliseconds', + optional: true, + }, + serviceUpdated: { + type: 'number', + description: 'Last updated timestamp in milliseconds', + optional: true, + }, + stack: { type: 'string', description: 'Notebook stack name', optional: true }, + }, + }, + }, +} diff --git a/apps/sim/tools/evernote/list_tags.ts b/apps/sim/tools/evernote/list_tags.ts new file mode 100644 index 00000000000..65cb5a04fdd --- /dev/null +++ b/apps/sim/tools/evernote/list_tags.ts @@ -0,0 +1,55 @@ +import type { ToolConfig } from '@/tools/types' +import type { EvernoteListTagsParams, EvernoteListTagsResponse } from './types' + +export const evernoteListTagsTool: ToolConfig = { + id: 'evernote_list_tags', + name: 'Evernote List Tags', + description: 'List all tags in an Evernote account', + version: '1.0.0', + + params: { + apiKey: { + type: 'string', + required: true, + visibility: 'user-only', + description: 'Evernote developer token', + }, + }, + + request: { + url: '/api/tools/evernote/list-tags', + method: 'POST', + headers: () => ({ 'Content-Type': 'application/json' }), + body: (params) => ({ + apiKey: params.apiKey, + }), + }, + + transformResponse: async (response) => { + const data = await response.json() + if (!data.success) { + throw new Error(data.error || 'Failed to list tags') + } + return { + success: true, + output: { tags: data.output.tags }, + } + }, + + outputs: { + tags: { + type: 'array', + description: 'List of tags', + properties: { + guid: { type: 'string', description: 'Tag GUID' }, + name: { type: 'string', description: 'Tag name' }, + parentGuid: { type: 'string', description: 'Parent tag GUID', optional: true }, + updateSequenceNum: { + type: 'number', + description: 'Update sequence number', + optional: true, + }, + }, + }, + }, +} diff --git a/apps/sim/tools/evernote/search_notes.ts b/apps/sim/tools/evernote/search_notes.ts new file mode 100644 index 00000000000..a75056434d3 --- /dev/null +++ b/apps/sim/tools/evernote/search_notes.ts @@ -0,0 +1,92 @@ +import type { ToolConfig } from '@/tools/types' +import type { EvernoteSearchNotesParams, EvernoteSearchNotesResponse } from './types' + +export const evernoteSearchNotesTool: ToolConfig< + EvernoteSearchNotesParams, + EvernoteSearchNotesResponse +> = { + id: 'evernote_search_notes', + name: 'Evernote Search Notes', + description: 'Search for notes in Evernote using the Evernote search grammar', + version: '1.0.0', + + params: { + apiKey: { + type: 'string', + required: true, + visibility: 'user-only', + description: 'Evernote developer token', + }, + query: { + type: 'string', + required: true, + visibility: 'user-or-llm', + description: 'Search query using Evernote search grammar (e.g., "tag:work intitle:meeting")', + }, + notebookGuid: { + type: 'string', + required: false, + visibility: 'user-or-llm', + description: 'Restrict search to a specific notebook by GUID', + }, + offset: { + type: 'number', + required: false, + visibility: 'user-or-llm', + description: 'Starting index for results (default: 0)', + }, + maxNotes: { + type: 'number', + required: false, + visibility: 'user-or-llm', + description: 'Maximum number of notes to return (default: 25)', + }, + }, + + request: { + url: '/api/tools/evernote/search-notes', + method: 'POST', + headers: () => ({ 'Content-Type': 'application/json' }), + body: (params) => ({ + apiKey: params.apiKey, + query: params.query, + notebookGuid: params.notebookGuid || null, + offset: params.offset ?? 0, + maxNotes: params.maxNotes ?? 25, + }), + }, + + transformResponse: async (response) => { + const data = await response.json() + if (!data.success) { + throw new Error(data.error || 'Failed to search notes') + } + return { + success: true, + output: { + totalNotes: data.output.totalNotes, + notes: data.output.notes, + }, + } + }, + + outputs: { + totalNotes: { + type: 'number', + description: 'Total number of matching notes', + }, + notes: { + type: 'array', + description: 'List of matching note metadata', + properties: { + guid: { type: 'string', description: 'Note GUID' }, + title: { type: 'string', description: 'Note title', optional: true }, + contentLength: { type: 'number', description: 'Content length in bytes', optional: true }, + created: { type: 'number', description: 'Creation timestamp', optional: true }, + updated: { type: 'number', description: 'Last updated timestamp', optional: true }, + notebookGuid: { type: 'string', description: 'Containing notebook GUID', optional: true }, + tagGuids: { type: 'array', description: 'Tag GUIDs', optional: true }, + }, + }, + }, +} diff --git a/apps/sim/tools/evernote/types.ts b/apps/sim/tools/evernote/types.ts new file mode 100644 index 00000000000..153594b3cc1 --- /dev/null +++ b/apps/sim/tools/evernote/types.ts @@ -0,0 +1,166 @@ +import type { ToolResponse } from '@/tools/types' + +export interface EvernoteBaseParams { + apiKey: string +} + +export interface EvernoteCreateNoteParams extends EvernoteBaseParams { + title: string + content: string + notebookGuid?: string + tagNames?: string +} + +export interface EvernoteGetNoteParams extends EvernoteBaseParams { + noteGuid: string + withContent?: boolean +} + +export interface EvernoteUpdateNoteParams extends EvernoteBaseParams { + noteGuid: string + title?: string + content?: string + notebookGuid?: string + tagNames?: string +} + +export interface EvernoteDeleteNoteParams extends EvernoteBaseParams { + noteGuid: string +} + +export interface EvernoteSearchNotesParams extends EvernoteBaseParams { + query: string + notebookGuid?: string + offset?: number + maxNotes?: number +} + +export interface EvernoteListNotebooksParams extends EvernoteBaseParams {} + +export interface EvernoteGetNotebookParams extends EvernoteBaseParams { + notebookGuid: string +} + +export interface EvernoteCreateNotebookParams extends EvernoteBaseParams { + name: string + stack?: string +} + +export interface EvernoteListTagsParams extends EvernoteBaseParams {} + +export interface EvernoteCreateTagParams extends EvernoteBaseParams { + name: string + parentGuid?: string +} + +export interface EvernoteCopyNoteParams extends EvernoteBaseParams { + noteGuid: string + toNotebookGuid: string +} + +export interface EvernoteNoteOutput { + guid: string + title: string + content: string | null + contentLength: number | null + created: number | null + updated: number | null + active: boolean + notebookGuid: string | null + tagGuids: string[] + tagNames: string[] +} + +export interface EvernoteNotebookOutput { + guid: string + name: string + defaultNotebook: boolean + serviceCreated: number | null + serviceUpdated: number | null + stack: string | null +} + +export interface EvernoteNoteMetadataOutput { + guid: string + title: string | null + contentLength: number | null + created: number | null + updated: number | null + notebookGuid: string | null + tagGuids: string[] +} + +export interface EvernoteTagOutput { + guid: string + name: string + parentGuid: string | null + updateSequenceNum: number | null +} + +export interface EvernoteCreateNoteResponse extends ToolResponse { + output: { + note: EvernoteNoteOutput + } +} + +export interface EvernoteGetNoteResponse extends ToolResponse { + output: { + note: EvernoteNoteOutput + } +} + +export interface EvernoteUpdateNoteResponse extends ToolResponse { + output: { + note: EvernoteNoteOutput + } +} + +export interface EvernoteDeleteNoteResponse extends ToolResponse { + output: { + success: boolean + noteGuid: string + } +} + +export interface EvernoteSearchNotesResponse extends ToolResponse { + output: { + totalNotes: number + notes: EvernoteNoteMetadataOutput[] + } +} + +export interface EvernoteListNotebooksResponse extends ToolResponse { + output: { + notebooks: EvernoteNotebookOutput[] + } +} + +export interface EvernoteGetNotebookResponse extends ToolResponse { + output: { + notebook: EvernoteNotebookOutput + } +} + +export interface EvernoteCreateNotebookResponse extends ToolResponse { + output: { + notebook: EvernoteNotebookOutput + } +} + +export interface EvernoteListTagsResponse extends ToolResponse { + output: { + tags: EvernoteTagOutput[] + } +} + +export interface EvernoteCreateTagResponse extends ToolResponse { + output: { + tag: EvernoteTagOutput + } +} + +export interface EvernoteCopyNoteResponse extends ToolResponse { + output: { + note: EvernoteNoteOutput + } +} diff --git a/apps/sim/tools/evernote/update_note.ts b/apps/sim/tools/evernote/update_note.ts new file mode 100644 index 00000000000..48872e6c6e4 --- /dev/null +++ b/apps/sim/tools/evernote/update_note.ts @@ -0,0 +1,104 @@ +import type { ToolConfig } from '@/tools/types' +import type { EvernoteUpdateNoteParams, EvernoteUpdateNoteResponse } from './types' + +export const evernoteUpdateNoteTool: ToolConfig< + EvernoteUpdateNoteParams, + EvernoteUpdateNoteResponse +> = { + id: 'evernote_update_note', + name: 'Evernote Update Note', + description: 'Update an existing note in Evernote', + version: '1.0.0', + + params: { + apiKey: { + type: 'string', + required: true, + visibility: 'user-only', + description: 'Evernote developer token', + }, + noteGuid: { + type: 'string', + required: true, + visibility: 'user-or-llm', + description: 'GUID of the note to update', + }, + title: { + type: 'string', + required: false, + visibility: 'user-or-llm', + description: 'New title for the note', + }, + content: { + type: 'string', + required: false, + visibility: 'user-or-llm', + description: 'New content for the note (plain text or ENML)', + }, + notebookGuid: { + type: 'string', + required: false, + visibility: 'user-or-llm', + description: 'GUID of the notebook to move the note to', + }, + tagNames: { + type: 'string', + required: false, + visibility: 'user-or-llm', + description: 'Comma-separated list of tag names (replaces existing tags)', + }, + }, + + request: { + url: '/api/tools/evernote/update-note', + method: 'POST', + headers: () => ({ 'Content-Type': 'application/json' }), + body: (params) => ({ + apiKey: params.apiKey, + noteGuid: params.noteGuid, + title: params.title || null, + content: params.content || null, + notebookGuid: params.notebookGuid || null, + tagNames: params.tagNames || null, + }), + }, + + transformResponse: async (response) => { + const data = await response.json() + if (!data.success) { + throw new Error(data.error || 'Failed to update note') + } + return { + success: true, + output: { note: data.output.note }, + } + }, + + outputs: { + note: { + type: 'object', + description: 'The updated note', + properties: { + guid: { type: 'string', description: 'Unique identifier of the note' }, + title: { type: 'string', description: 'Title of the note' }, + content: { type: 'string', description: 'ENML content of the note', optional: true }, + notebookGuid: { + type: 'string', + description: 'GUID of the containing notebook', + optional: true, + }, + tagNames: { type: 'array', description: 'Tag names on the note', optional: true }, + created: { + type: 'number', + description: 'Creation timestamp in milliseconds', + optional: true, + }, + updated: { + type: 'number', + description: 'Last updated timestamp in milliseconds', + optional: true, + }, + }, + }, + }, +} diff --git a/apps/sim/tools/registry.ts b/apps/sim/tools/registry.ts index 437c36a3777..dfc422e670b 100644 --- a/apps/sim/tools/registry.ts +++ b/apps/sim/tools/registry.ts @@ -426,6 +426,19 @@ import { enrichSearchSimilarCompaniesTool, enrichVerifyEmailTool, } from '@/tools/enrich' +import { + evernoteCopyNoteTool, + evernoteCreateNotebookTool, + evernoteCreateNoteTool, + evernoteCreateTagTool, + evernoteDeleteNoteTool, + evernoteGetNotebookTool, + evernoteGetNoteTool, + evernoteListNotebooksTool, + evernoteListTagsTool, + evernoteSearchNotesTool, + evernoteUpdateNoteTool, +} from '@/tools/evernote' import { exaAnswerTool, exaFindSimilarLinksTool, @@ -3154,6 +3167,17 @@ export const tools: Record = { elasticsearch_list_indices: elasticsearchListIndicesTool, elasticsearch_cluster_health: elasticsearchClusterHealthTool, elasticsearch_cluster_stats: elasticsearchClusterStatsTool, + evernote_copy_note: evernoteCopyNoteTool, + evernote_create_note: evernoteCreateNoteTool, + evernote_create_notebook: evernoteCreateNotebookTool, + evernote_create_tag: evernoteCreateTagTool, + evernote_delete_note: evernoteDeleteNoteTool, + evernote_get_note: evernoteGetNoteTool, + evernote_get_notebook: evernoteGetNotebookTool, + evernote_list_notebooks: evernoteListNotebooksTool, + evernote_list_tags: evernoteListTagsTool, + evernote_search_notes: evernoteSearchNotesTool, + evernote_update_note: evernoteUpdateNoteTool, enrich_check_credits: enrichCheckCreditsTool, enrich_company_funding: enrichCompanyFundingTool, enrich_company_lookup: enrichCompanyLookupTool, From 158d5236bc6adac9d62fa1ec00bdff5a181611da Mon Sep 17 00:00:00 2001 From: Theodore Li Date: Sat, 7 Mar 2026 10:06:57 -0800 Subject: [PATCH 3/6] feat(hosted key): Add exa hosted key (#3221) * feat(hosted keys): Implement serper hosted key * Handle required fields correctly for hosted keys * Add rate limiting (3 tries, exponential backoff) * Add custom pricing, switch to exa as first hosted key * Add telemetry * Consolidate byok type definitions * Add warning comment if default calculation is used * Record usage to user stats table * Fix unit tests, use cost property * Include more metadata in cost output * Fix disabled tests * Fix spacing * Fix lint * Move knowledge cost restructuring away from generic block handler * Migrate knowledge unit tests * Lint * Fix broken tests * Add user based hosted key throttling * Refactor hosted key handling. Add optimistic handling of throttling for custom throttle rules. * Remove research as hosted key. Recommend BYOK if throtttling occurs * Make adding api keys adjustable via env vars * Remove vestigial fields from research * Make billing actor id required for throttling * Switch to round robin for api key distribution * Add helper method for adding hosted key cost * Strip leading double underscores to avoid breaking change * Lint fix * Remove falsy check in favor for explicit null check * Add more detailed metrics for different throttling types * Fix _costDollars field * Handle hosted agent tool calls * Fail loudly if cost field isn't found * Remove any type * Fix type error * Fix lint * Fix usage log double logging data * Fix test --------- Co-authored-by: Theodore Li --- .../api/workspaces/[id]/byok-keys/route.ts | 2 +- .../hooks/use-editor-subblock-layout.ts | 4 + .../workflow-block/workflow-block.tsx | 2 + .../settings-modal/components/byok/byok.tsx | 11 +- apps/sim/blocks/blocks/exa.ts | 14 +- apps/sim/blocks/types.ts | 1 + .../handlers/generic/generic-handler.test.ts | 215 ----- .../handlers/generic/generic-handler.ts | 22 +- apps/sim/hooks/queries/byok-keys.ts | 3 +- apps/sim/lib/api-key/byok.ts | 3 +- apps/sim/lib/billing/core/usage-log.ts | 14 +- .../tool-executor/integration-tools.ts | 11 +- .../hosted-key-rate-limiter.test.ts | 521 +++++++++++ .../hosted-key/hosted-key-rate-limiter.ts | 349 ++++++++ .../lib/core/rate-limiter/hosted-key/index.ts | 17 + .../lib/core/rate-limiter/hosted-key/types.ts | 108 +++ apps/sim/lib/core/rate-limiter/index.ts | 15 + .../rate-limiter/storage/db-token-bucket.ts | 2 +- apps/sim/lib/core/telemetry.ts | 49 ++ apps/sim/lib/logs/execution/logger.ts | 2 + .../sim/lib/logs/execution/logging-factory.ts | 6 + .../sim/lib/workflows/subblocks/visibility.ts | 10 + apps/sim/providers/anthropic/core.ts | 19 +- apps/sim/providers/azure-openai/index.ts | 5 +- apps/sim/providers/bedrock/index.ts | 6 +- apps/sim/providers/cerebras/index.ts | 10 +- apps/sim/providers/deepseek/index.ts | 10 +- apps/sim/providers/gemini/core.ts | 5 +- apps/sim/providers/groq/index.ts | 10 +- apps/sim/providers/index.ts | 7 + apps/sim/providers/mistral/index.ts | 9 +- apps/sim/providers/ollama/index.ts | 10 +- apps/sim/providers/openai/core.ts | 9 +- apps/sim/providers/openrouter/index.ts | 5 +- apps/sim/providers/types.ts | 3 +- apps/sim/providers/utils.test.ts | 1 + apps/sim/providers/utils.ts | 15 + apps/sim/providers/vllm/index.ts | 9 +- apps/sim/providers/xai/index.ts | 10 +- apps/sim/serializer/index.ts | 2 + apps/sim/tools/exa/answer.ts | 20 + apps/sim/tools/exa/find_similar_links.ts | 20 + apps/sim/tools/exa/get_contents.ts | 20 + apps/sim/tools/exa/search.ts | 20 + apps/sim/tools/exa/types.ts | 9 + apps/sim/tools/index.test.ts | 824 +++++++++++++++++- apps/sim/tools/index.ts | 420 ++++++++- apps/sim/tools/knowledge/knowledge.test.ts | 202 +++++ apps/sim/tools/knowledge/search.ts | 13 +- apps/sim/tools/knowledge/upload_chunk.ts | 13 +- apps/sim/tools/params.ts | 8 + apps/sim/tools/types.ts | 80 +- 52 files changed, 2840 insertions(+), 335 deletions(-) create mode 100644 apps/sim/lib/core/rate-limiter/hosted-key/hosted-key-rate-limiter.test.ts create mode 100644 apps/sim/lib/core/rate-limiter/hosted-key/hosted-key-rate-limiter.ts create mode 100644 apps/sim/lib/core/rate-limiter/hosted-key/index.ts create mode 100644 apps/sim/lib/core/rate-limiter/hosted-key/types.ts create mode 100644 apps/sim/tools/knowledge/knowledge.test.ts diff --git a/apps/sim/app/api/workspaces/[id]/byok-keys/route.ts b/apps/sim/app/api/workspaces/[id]/byok-keys/route.ts index ab4c9600df9..f4bddc4298b 100644 --- a/apps/sim/app/api/workspaces/[id]/byok-keys/route.ts +++ b/apps/sim/app/api/workspaces/[id]/byok-keys/route.ts @@ -13,7 +13,7 @@ import { getUserEntityPermissions, getWorkspaceById } from '@/lib/workspaces/per const logger = createLogger('WorkspaceBYOKKeysAPI') -const VALID_PROVIDERS = ['openai', 'anthropic', 'google', 'mistral'] as const +const VALID_PROVIDERS = ['openai', 'anthropic', 'google', 'mistral', 'exa'] as const const UpsertKeySchema = z.object({ providerId: z.enum(VALID_PROVIDERS), diff --git a/apps/sim/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/editor/hooks/use-editor-subblock-layout.ts b/apps/sim/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/editor/hooks/use-editor-subblock-layout.ts index 50d3f416e43..0cf118e428e 100644 --- a/apps/sim/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/editor/hooks/use-editor-subblock-layout.ts +++ b/apps/sim/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/editor/hooks/use-editor-subblock-layout.ts @@ -3,6 +3,7 @@ import { buildCanonicalIndex, evaluateSubBlockCondition, isSubBlockFeatureEnabled, + isSubBlockHiddenByHostedKey, isSubBlockVisibleForMode, } from '@/lib/workflows/subblocks/visibility' import type { BlockConfig, SubBlockConfig, SubBlockType } from '@/blocks/types' @@ -108,6 +109,9 @@ export function useEditorSubblockLayout( // Check required feature if specified - declarative feature gating if (!isSubBlockFeatureEnabled(block)) return false + // Hide tool API key fields when hosted + if (isSubBlockHiddenByHostedKey(block)) return false + // Special handling for trigger-config type (legacy trigger configuration UI) if (block.type === ('trigger-config' as SubBlockType)) { const isPureTriggerBlock = config?.triggers?.enabled && config.category === 'triggers' diff --git a/apps/sim/app/workspace/[workspaceId]/w/[workflowId]/components/workflow-block/workflow-block.tsx b/apps/sim/app/workspace/[workspaceId]/w/[workflowId]/components/workflow-block/workflow-block.tsx index 5a559801917..10b56df0dc7 100644 --- a/apps/sim/app/workspace/[workspaceId]/w/[workflowId]/components/workflow-block/workflow-block.tsx +++ b/apps/sim/app/workspace/[workspaceId]/w/[workflowId]/components/workflow-block/workflow-block.tsx @@ -16,6 +16,7 @@ import { evaluateSubBlockCondition, hasAdvancedValues, isSubBlockFeatureEnabled, + isSubBlockHiddenByHostedKey, isSubBlockVisibleForMode, resolveDependencyValue, } from '@/lib/workflows/subblocks/visibility' @@ -977,6 +978,7 @@ export const WorkflowBlock = memo(function WorkflowBlock({ if (block.hidden) return false if (block.hideFromPreview) return false if (!isSubBlockFeatureEnabled(block)) return false + if (isSubBlockHiddenByHostedKey(block)) return false const isPureTriggerBlock = config?.triggers?.enabled && config.category === 'triggers' diff --git a/apps/sim/app/workspace/[workspaceId]/w/components/sidebar/components/settings-modal/components/byok/byok.tsx b/apps/sim/app/workspace/[workspaceId]/w/components/sidebar/components/settings-modal/components/byok/byok.tsx index b8304402b3b..39f308d9e8d 100644 --- a/apps/sim/app/workspace/[workspaceId]/w/components/sidebar/components/settings-modal/components/byok/byok.tsx +++ b/apps/sim/app/workspace/[workspaceId]/w/components/sidebar/components/settings-modal/components/byok/byok.tsx @@ -13,15 +13,15 @@ import { ModalFooter, ModalHeader, } from '@/components/emcn' -import { AnthropicIcon, GeminiIcon, MistralIcon, OpenAIIcon } from '@/components/icons' +import { AnthropicIcon, ExaAIIcon, GeminiIcon, MistralIcon, OpenAIIcon } from '@/components/icons' import { Skeleton } from '@/components/ui' import { type BYOKKey, - type BYOKProviderId, useBYOKKeys, useDeleteBYOKKey, useUpsertBYOKKey, } from '@/hooks/queries/byok-keys' +import type { BYOKProviderId } from '@/tools/types' const logger = createLogger('BYOKSettings') @@ -60,6 +60,13 @@ const PROVIDERS: { description: 'LLM calls and Knowledge Base OCR', placeholder: 'Enter your API key', }, + { + id: 'exa', + name: 'Exa', + icon: ExaAIIcon, + description: 'AI-powered search and research', + placeholder: 'Enter your Exa API key', + }, ] function BYOKKeySkeleton() { diff --git a/apps/sim/blocks/blocks/exa.ts b/apps/sim/blocks/blocks/exa.ts index dfdbd327952..193fe9c292d 100644 --- a/apps/sim/blocks/blocks/exa.ts +++ b/apps/sim/blocks/blocks/exa.ts @@ -309,7 +309,7 @@ export const ExaBlock: BlockConfig = { value: () => 'exa-research', condition: { field: 'operation', value: 'exa_research' }, }, - // API Key (common) + // API Key — hidden when hosted for operations with hosted key support { id: 'apiKey', title: 'API Key', @@ -317,6 +317,18 @@ export const ExaBlock: BlockConfig = { placeholder: 'Enter your Exa API key', password: true, required: true, + hideWhenHosted: true, + condition: { field: 'operation', value: 'exa_research', not: true }, + }, + // API Key — always visible for research (no hosted key support) + { + id: 'apiKey', + title: 'API Key', + type: 'short-input', + placeholder: 'Enter your Exa API key', + password: true, + required: true, + condition: { field: 'operation', value: 'exa_research' }, }, ], tools: { diff --git a/apps/sim/blocks/types.ts b/apps/sim/blocks/types.ts index fe486a8e05e..056632c0711 100644 --- a/apps/sim/blocks/types.ts +++ b/apps/sim/blocks/types.ts @@ -253,6 +253,7 @@ export interface SubBlockConfig { hidden?: boolean hideFromPreview?: boolean // Hide this subblock from the workflow block preview requiresFeature?: string // Environment variable name that must be truthy for this subblock to be visible + hideWhenHosted?: boolean // Hide this subblock when running on hosted sim description?: string tooltip?: string // Tooltip text displayed via info icon next to the title value?: (params: Record) => string diff --git a/apps/sim/executor/handlers/generic/generic-handler.test.ts b/apps/sim/executor/handlers/generic/generic-handler.test.ts index 3a107df40a0..cf18f8a254a 100644 --- a/apps/sim/executor/handlers/generic/generic-handler.test.ts +++ b/apps/sim/executor/handlers/generic/generic-handler.test.ts @@ -147,219 +147,4 @@ describe('GenericBlockHandler', () => { 'Block execution of Some Custom Tool failed with no error message' ) }) - - describe('Knowledge block cost tracking', () => { - beforeEach(() => { - // Set up knowledge block mock - mockBlock = { - ...mockBlock, - config: { tool: 'knowledge_search', params: {} }, - } - - mockTool = { - ...mockTool, - id: 'knowledge_search', - name: 'Knowledge Search', - } - - mockGetTool.mockImplementation((toolId) => { - if (toolId === 'knowledge_search') { - return mockTool - } - return undefined - }) - }) - - it.concurrent( - 'should extract and restructure cost information from knowledge tools', - async () => { - const inputs = { query: 'test query' } - const mockToolResponse = { - success: true, - output: { - results: [], - query: 'test query', - totalResults: 0, - cost: { - input: 0.00001042, - output: 0, - total: 0.00001042, - tokens: { - input: 521, - output: 0, - total: 521, - }, - model: 'text-embedding-3-small', - pricing: { - input: 0.02, - output: 0, - updatedAt: '2025-07-10', - }, - }, - }, - } - - mockExecuteTool.mockResolvedValue(mockToolResponse) - - const result = await handler.execute(mockContext, mockBlock, inputs) - - // Verify cost information is restructured correctly for enhanced logging - expect(result).toEqual({ - results: [], - query: 'test query', - totalResults: 0, - cost: { - input: 0.00001042, - output: 0, - total: 0.00001042, - }, - tokens: { - input: 521, - output: 0, - total: 521, - }, - model: 'text-embedding-3-small', - }) - } - ) - - it.concurrent('should handle knowledge_upload_chunk cost information', async () => { - // Update to upload_chunk tool - mockBlock.config.tool = 'knowledge_upload_chunk' - mockTool.id = 'knowledge_upload_chunk' - mockTool.name = 'Knowledge Upload Chunk' - - mockGetTool.mockImplementation((toolId) => { - if (toolId === 'knowledge_upload_chunk') { - return mockTool - } - return undefined - }) - - const inputs = { content: 'test content' } - const mockToolResponse = { - success: true, - output: { - data: { - id: 'chunk-123', - content: 'test content', - chunkIndex: 0, - }, - message: 'Successfully uploaded chunk', - documentId: 'doc-123', - cost: { - input: 0.00000521, - output: 0, - total: 0.00000521, - tokens: { - input: 260, - output: 0, - total: 260, - }, - model: 'text-embedding-3-small', - pricing: { - input: 0.02, - output: 0, - updatedAt: '2025-07-10', - }, - }, - }, - } - - mockExecuteTool.mockResolvedValue(mockToolResponse) - - const result = await handler.execute(mockContext, mockBlock, inputs) - - // Verify cost information is restructured correctly - expect(result).toEqual({ - data: { - id: 'chunk-123', - content: 'test content', - chunkIndex: 0, - }, - message: 'Successfully uploaded chunk', - documentId: 'doc-123', - cost: { - input: 0.00000521, - output: 0, - total: 0.00000521, - }, - tokens: { - input: 260, - output: 0, - total: 260, - }, - model: 'text-embedding-3-small', - }) - }) - - it('should pass through output unchanged for knowledge tools without cost info', async () => { - const inputs = { query: 'test query' } - const mockToolResponse = { - success: true, - output: { - results: [], - query: 'test query', - totalResults: 0, - // No cost information - }, - } - - mockExecuteTool.mockResolvedValue(mockToolResponse) - - const result = await handler.execute(mockContext, mockBlock, inputs) - - // Should return original output without cost transformation - expect(result).toEqual({ - results: [], - query: 'test query', - totalResults: 0, - }) - }) - - it.concurrent( - 'should process cost info for all tools (universal cost extraction)', - async () => { - mockBlock.config.tool = 'some_other_tool' - mockTool.id = 'some_other_tool' - - mockGetTool.mockImplementation((toolId) => { - if (toolId === 'some_other_tool') { - return mockTool - } - return undefined - }) - - const inputs = { param: 'value' } - const mockToolResponse = { - success: true, - output: { - result: 'success', - cost: { - input: 0.001, - output: 0.002, - total: 0.003, - tokens: { input: 100, output: 50, total: 150 }, - model: 'some-model', - }, - }, - } - - mockExecuteTool.mockResolvedValue(mockToolResponse) - - const result = await handler.execute(mockContext, mockBlock, inputs) - - expect(result).toEqual({ - result: 'success', - cost: { - input: 0.001, - output: 0.002, - total: 0.003, - }, - tokens: { input: 100, output: 50, total: 150 }, - model: 'some-model', - }) - } - ) - }) }) diff --git a/apps/sim/executor/handlers/generic/generic-handler.ts b/apps/sim/executor/handlers/generic/generic-handler.ts index ff9cbbf440d..6c9e1bb53ac 100644 --- a/apps/sim/executor/handlers/generic/generic-handler.ts +++ b/apps/sim/executor/handlers/generic/generic-handler.ts @@ -98,27 +98,7 @@ export class GenericBlockHandler implements BlockHandler { throw error } - const output = result.output - let cost = null - - if (output?.cost) { - cost = output.cost - } - - if (cost) { - return { - ...output, - cost: { - input: cost.input, - output: cost.output, - total: cost.total, - }, - tokens: cost.tokens, - model: cost.model, - } - } - - return output + return result.output } catch (error: any) { if (!error.message || error.message === 'undefined (undefined)') { let errorMessage = `Block execution of ${tool?.name || block.config.tool} failed` diff --git a/apps/sim/hooks/queries/byok-keys.ts b/apps/sim/hooks/queries/byok-keys.ts index 26d348d5a7f..167238f4a19 100644 --- a/apps/sim/hooks/queries/byok-keys.ts +++ b/apps/sim/hooks/queries/byok-keys.ts @@ -1,11 +1,10 @@ import { createLogger } from '@sim/logger' import { keepPreviousData, useMutation, useQuery, useQueryClient } from '@tanstack/react-query' import { API_ENDPOINTS } from '@/stores/constants' +import type { BYOKProviderId } from '@/tools/types' const logger = createLogger('BYOKKeysQueries') -export type BYOKProviderId = 'openai' | 'anthropic' | 'google' | 'mistral' - export interface BYOKKey { id: string providerId: BYOKProviderId diff --git a/apps/sim/lib/api-key/byok.ts b/apps/sim/lib/api-key/byok.ts index 04a35adb426..127feb9af31 100644 --- a/apps/sim/lib/api-key/byok.ts +++ b/apps/sim/lib/api-key/byok.ts @@ -7,11 +7,10 @@ import { isHosted } from '@/lib/core/config/feature-flags' import { decryptSecret } from '@/lib/core/security/encryption' import { getHostedModels } from '@/providers/models' import { useProvidersStore } from '@/stores/providers/store' +import type { BYOKProviderId } from '@/tools/types' const logger = createLogger('BYOKKeys') -export type BYOKProviderId = 'openai' | 'anthropic' | 'google' | 'mistral' - export interface BYOKKeyResult { apiKey: string isBYOK: true diff --git a/apps/sim/lib/billing/core/usage-log.ts b/apps/sim/lib/billing/core/usage-log.ts index b21fb552f7a..90be5c16d0b 100644 --- a/apps/sim/lib/billing/core/usage-log.ts +++ b/apps/sim/lib/billing/core/usage-log.ts @@ -22,12 +22,13 @@ export type UsageLogSource = 'workflow' | 'wand' | 'copilot' | 'mcp_copilot' export interface ModelUsageMetadata { inputTokens: number outputTokens: number + toolCost?: number } /** - * Metadata for 'fixed' category charges (currently empty, extensible) + * Metadata for 'fixed' category charges (e.g., tool cost breakdown) */ -export type FixedUsageMetadata = Record +export type FixedUsageMetadata = Record /** * Union type for all metadata types @@ -44,6 +45,7 @@ export interface LogModelUsageParams { inputTokens: number outputTokens: number cost: number + toolCost?: number workspaceId?: string workflowId?: string executionId?: string @@ -60,6 +62,8 @@ export interface LogFixedUsageParams { workspaceId?: string workflowId?: string executionId?: string + /** Optional metadata (e.g., tool cost breakdown from API) */ + metadata?: FixedUsageMetadata } /** @@ -74,6 +78,7 @@ export async function logModelUsage(params: LogModelUsageParams): Promise const metadata: ModelUsageMetadata = { inputTokens: params.inputTokens, outputTokens: params.outputTokens, + ...(params.toolCost != null && params.toolCost > 0 && { toolCost: params.toolCost }), } await db.insert(usageLog).values({ @@ -119,7 +124,7 @@ export async function logFixedUsage(params: LogFixedUsageParams): Promise category: 'fixed', source: params.source, description: params.description, - metadata: null, + metadata: params.metadata ?? null, cost: params.cost.toString(), workspaceId: params.workspaceId ?? null, workflowId: params.workflowId ?? null, @@ -155,6 +160,7 @@ export interface LogWorkflowUsageBatchParams { { total: number tokens: { input: number; output: number } + toolCost?: number } > } @@ -207,6 +213,8 @@ export async function logWorkflowUsageBatch(params: LogWorkflowUsageBatchParams) metadata: { inputTokens: modelData.tokens.input, outputTokens: modelData.tokens.output, + ...(modelData.toolCost != null && + modelData.toolCost > 0 && { toolCost: modelData.toolCost }), }, cost: modelData.total.toString(), workspaceId: params.workspaceId ?? null, diff --git a/apps/sim/lib/copilot/orchestrator/tool-executor/integration-tools.ts b/apps/sim/lib/copilot/orchestrator/tool-executor/integration-tools.ts index 8464e42ca57..4a81f5c46fd 100644 --- a/apps/sim/lib/copilot/orchestrator/tool-executor/integration-tools.ts +++ b/apps/sim/lib/copilot/orchestrator/tool-executor/integration-tools.ts @@ -6,19 +6,18 @@ import type { ToolCallResult, ToolCallState, } from '@/lib/copilot/orchestrator/types' +import { isHosted } from '@/lib/core/config/feature-flags' import { generateRequestId } from '@/lib/core/utils/request' import { getEffectiveDecryptedEnv } from '@/lib/environment/utils' import { refreshTokenIfNeeded } from '@/app/api/auth/oauth/utils' import { resolveEnvVarReferences } from '@/executor/utils/reference-validation' import { executeTool } from '@/tools' +import type { ToolConfig } from '@/tools/types' import { resolveToolId } from '@/tools/utils' export async function executeIntegrationToolDirect( toolCall: ToolCallState, - toolConfig: { - oauth?: { required?: boolean; provider?: string } - params?: { apiKey?: { required?: boolean } } - }, + toolConfig: ToolConfig, context: ExecutionContext ): Promise { const { userId, workflowId } = context @@ -74,7 +73,8 @@ export async function executeIntegrationToolDirect( executionParams.accessToken = accessToken } - if (toolConfig.params?.apiKey?.required && !executionParams.apiKey) { + const hasHostedKeySupport = isHosted && !!toolConfig.hosting + if (toolConfig.params?.apiKey?.required && !executionParams.apiKey && !hasHostedKeySupport) { return { success: false, error: `API key not provided for ${toolName}. Use {{YOUR_API_KEY_ENV_VAR}} to reference your environment variable.`, @@ -83,6 +83,7 @@ export async function executeIntegrationToolDirect( executionParams._context = { workflowId, + workspaceId, userId, } diff --git a/apps/sim/lib/core/rate-limiter/hosted-key/hosted-key-rate-limiter.test.ts b/apps/sim/lib/core/rate-limiter/hosted-key/hosted-key-rate-limiter.test.ts new file mode 100644 index 00000000000..be199a24cfa --- /dev/null +++ b/apps/sim/lib/core/rate-limiter/hosted-key/hosted-key-rate-limiter.test.ts @@ -0,0 +1,521 @@ +import { loggerMock } from '@sim/testing' +import { afterEach, beforeEach, describe, expect, it, type Mock, vi } from 'vitest' +import type { + ConsumeResult, + RateLimitStorageAdapter, + TokenStatus, +} from '@/lib/core/rate-limiter/storage' +import { HostedKeyRateLimiter } from './hosted-key-rate-limiter' +import type { CustomRateLimit, PerRequestRateLimit } from './types' + +vi.mock('@sim/logger', () => loggerMock) + +interface MockAdapter { + consumeTokens: Mock + getTokenStatus: Mock + resetBucket: Mock +} + +const createMockAdapter = (): MockAdapter => ({ + consumeTokens: vi.fn(), + getTokenStatus: vi.fn(), + resetBucket: vi.fn(), +}) + +describe('HostedKeyRateLimiter', () => { + const testProvider = 'exa' + const envKeyPrefix = 'EXA_API_KEY' + let mockAdapter: MockAdapter + let rateLimiter: HostedKeyRateLimiter + let originalEnv: NodeJS.ProcessEnv + + const perRequestRateLimit: PerRequestRateLimit = { + mode: 'per_request', + requestsPerMinute: 10, + } + + beforeEach(() => { + vi.clearAllMocks() + mockAdapter = createMockAdapter() + rateLimiter = new HostedKeyRateLimiter(mockAdapter as RateLimitStorageAdapter) + + originalEnv = { ...process.env } + process.env.EXA_API_KEY_COUNT = '3' + process.env.EXA_API_KEY_1 = 'test-key-1' + process.env.EXA_API_KEY_2 = 'test-key-2' + process.env.EXA_API_KEY_3 = 'test-key-3' + }) + + afterEach(() => { + process.env = originalEnv + }) + + describe('acquireKey', () => { + it('should return error when no keys are configured', async () => { + const allowedResult: ConsumeResult = { + allowed: true, + tokensRemaining: 9, + resetAt: new Date(Date.now() + 60000), + } + mockAdapter.consumeTokens.mockResolvedValue(allowedResult) + + process.env.EXA_API_KEY_COUNT = undefined + process.env.EXA_API_KEY_1 = undefined + process.env.EXA_API_KEY_2 = undefined + process.env.EXA_API_KEY_3 = undefined + + const result = await rateLimiter.acquireKey( + testProvider, + envKeyPrefix, + perRequestRateLimit, + 'workspace-1' + ) + + expect(result.success).toBe(false) + expect(result.error).toContain('No hosted keys configured') + }) + + it('should rate limit billing actor when they exceed their limit', async () => { + const rateLimitedResult: ConsumeResult = { + allowed: false, + tokensRemaining: 0, + resetAt: new Date(Date.now() + 30000), + } + mockAdapter.consumeTokens.mockResolvedValue(rateLimitedResult) + + const result = await rateLimiter.acquireKey( + testProvider, + envKeyPrefix, + perRequestRateLimit, + 'workspace-123' + ) + + expect(result.success).toBe(false) + expect(result.billingActorRateLimited).toBe(true) + expect(result.retryAfterMs).toBeDefined() + expect(result.error).toContain('Rate limit exceeded') + }) + + it('should allow billing actor within their rate limit', async () => { + const allowedResult: ConsumeResult = { + allowed: true, + tokensRemaining: 9, + resetAt: new Date(Date.now() + 60000), + } + mockAdapter.consumeTokens.mockResolvedValue(allowedResult) + + const result = await rateLimiter.acquireKey( + testProvider, + envKeyPrefix, + perRequestRateLimit, + 'workspace-123' + ) + + expect(result.success).toBe(true) + expect(result.billingActorRateLimited).toBeUndefined() + expect(result.key).toBe('test-key-1') + }) + + it('should distribute requests across keys round-robin style', async () => { + const allowedResult: ConsumeResult = { + allowed: true, + tokensRemaining: 9, + resetAt: new Date(Date.now() + 60000), + } + mockAdapter.consumeTokens.mockResolvedValue(allowedResult) + + const r1 = await rateLimiter.acquireKey( + testProvider, + envKeyPrefix, + perRequestRateLimit, + 'workspace-1' + ) + const r2 = await rateLimiter.acquireKey( + testProvider, + envKeyPrefix, + perRequestRateLimit, + 'workspace-2' + ) + const r3 = await rateLimiter.acquireKey( + testProvider, + envKeyPrefix, + perRequestRateLimit, + 'workspace-3' + ) + const r4 = await rateLimiter.acquireKey( + testProvider, + envKeyPrefix, + perRequestRateLimit, + 'workspace-4' + ) + + expect(r1.keyIndex).toBe(0) + expect(r2.keyIndex).toBe(1) + expect(r3.keyIndex).toBe(2) + expect(r4.keyIndex).toBe(0) // Wraps back + }) + + it('should handle partial key availability', async () => { + const allowedResult: ConsumeResult = { + allowed: true, + tokensRemaining: 9, + resetAt: new Date(Date.now() + 60000), + } + mockAdapter.consumeTokens.mockResolvedValue(allowedResult) + + process.env.EXA_API_KEY_2 = undefined + + const result = await rateLimiter.acquireKey( + testProvider, + envKeyPrefix, + perRequestRateLimit, + 'workspace-1' + ) + + expect(result.success).toBe(true) + expect(result.key).toBe('test-key-1') + expect(result.envVarName).toBe('EXA_API_KEY_1') + + const r2 = await rateLimiter.acquireKey( + testProvider, + envKeyPrefix, + perRequestRateLimit, + 'workspace-2' + ) + expect(r2.keyIndex).toBe(2) // Skips missing key 1 + expect(r2.envVarName).toBe('EXA_API_KEY_3') + }) + }) + + describe('acquireKey with custom rate limit', () => { + const customRateLimit: CustomRateLimit = { + mode: 'custom', + requestsPerMinute: 5, + dimensions: [ + { + name: 'tokens', + limitPerMinute: 1000, + extractUsage: (_params, response) => (response.tokenCount as number) ?? 0, + }, + ], + } + + it('should enforce requestsPerMinute for custom mode', async () => { + const rateLimitedResult: ConsumeResult = { + allowed: false, + tokensRemaining: 0, + resetAt: new Date(Date.now() + 30000), + } + mockAdapter.consumeTokens.mockResolvedValue(rateLimitedResult) + + const result = await rateLimiter.acquireKey( + testProvider, + envKeyPrefix, + customRateLimit, + 'workspace-1' + ) + + expect(result.success).toBe(false) + expect(result.billingActorRateLimited).toBe(true) + expect(result.error).toContain('Rate limit exceeded') + }) + + it('should allow request when actor request limit and dimensions have budget', async () => { + const allowedConsume: ConsumeResult = { + allowed: true, + tokensRemaining: 4, + resetAt: new Date(Date.now() + 60000), + } + mockAdapter.consumeTokens.mockResolvedValue(allowedConsume) + + const budgetAvailable: TokenStatus = { + tokensAvailable: 500, + maxTokens: 2000, + lastRefillAt: new Date(), + nextRefillAt: new Date(Date.now() + 60000), + } + mockAdapter.getTokenStatus.mockResolvedValue(budgetAvailable) + + const result = await rateLimiter.acquireKey( + testProvider, + envKeyPrefix, + customRateLimit, + 'workspace-1' + ) + + expect(result.success).toBe(true) + expect(result.key).toBe('test-key-1') + expect(mockAdapter.consumeTokens).toHaveBeenCalledTimes(1) + expect(mockAdapter.getTokenStatus).toHaveBeenCalledTimes(1) + }) + + it('should block request when a dimension is depleted', async () => { + const allowedConsume: ConsumeResult = { + allowed: true, + tokensRemaining: 4, + resetAt: new Date(Date.now() + 60000), + } + mockAdapter.consumeTokens.mockResolvedValue(allowedConsume) + + const depleted: TokenStatus = { + tokensAvailable: 0, + maxTokens: 2000, + lastRefillAt: new Date(), + nextRefillAt: new Date(Date.now() + 45000), + } + mockAdapter.getTokenStatus.mockResolvedValue(depleted) + + const result = await rateLimiter.acquireKey( + testProvider, + envKeyPrefix, + customRateLimit, + 'workspace-1' + ) + + expect(result.success).toBe(false) + expect(result.billingActorRateLimited).toBe(true) + expect(result.error).toContain('tokens') + }) + + it('should pre-check all dimensions and block on first depleted one', async () => { + const multiDimensionConfig: CustomRateLimit = { + mode: 'custom', + requestsPerMinute: 10, + dimensions: [ + { + name: 'tokens', + limitPerMinute: 1000, + extractUsage: (_p, r) => (r.tokenCount as number) ?? 0, + }, + { + name: 'search_units', + limitPerMinute: 50, + extractUsage: (_p, r) => (r.searchUnits as number) ?? 0, + }, + ], + } + + const allowedConsume: ConsumeResult = { + allowed: true, + tokensRemaining: 9, + resetAt: new Date(Date.now() + 60000), + } + mockAdapter.consumeTokens.mockResolvedValue(allowedConsume) + + const tokensBudget: TokenStatus = { + tokensAvailable: 500, + maxTokens: 2000, + lastRefillAt: new Date(), + nextRefillAt: new Date(Date.now() + 60000), + } + const searchUnitsDepleted: TokenStatus = { + tokensAvailable: 0, + maxTokens: 100, + lastRefillAt: new Date(), + nextRefillAt: new Date(Date.now() + 30000), + } + mockAdapter.getTokenStatus + .mockResolvedValueOnce(tokensBudget) + .mockResolvedValueOnce(searchUnitsDepleted) + + const result = await rateLimiter.acquireKey( + testProvider, + envKeyPrefix, + multiDimensionConfig, + 'workspace-1' + ) + + expect(result.success).toBe(false) + expect(result.billingActorRateLimited).toBe(true) + expect(result.error).toContain('search_units') + }) + }) + + describe('reportUsage', () => { + const customConfig: CustomRateLimit = { + mode: 'custom', + requestsPerMinute: 5, + dimensions: [ + { + name: 'tokens', + limitPerMinute: 1000, + extractUsage: (_params, response) => (response.tokenCount as number) ?? 0, + }, + ], + } + + it('should consume actual tokens from dimension bucket after execution', async () => { + const consumeResult: ConsumeResult = { + allowed: true, + tokensRemaining: 850, + resetAt: new Date(Date.now() + 60000), + } + mockAdapter.consumeTokens.mockResolvedValue(consumeResult) + + const result = await rateLimiter.reportUsage( + testProvider, + 'workspace-1', + customConfig, + {}, + { tokenCount: 150 } + ) + + expect(result.dimensions).toHaveLength(1) + expect(result.dimensions[0].name).toBe('tokens') + expect(result.dimensions[0].consumed).toBe(150) + expect(result.dimensions[0].allowed).toBe(true) + expect(result.dimensions[0].tokensRemaining).toBe(850) + + expect(mockAdapter.consumeTokens).toHaveBeenCalledWith( + 'hosted:exa:actor:workspace-1:tokens', + 150, + expect.objectContaining({ maxTokens: 2000, refillRate: 1000 }) + ) + }) + + it('should handle overdrawn bucket gracefully (optimistic concurrency)', async () => { + const overdrawnResult: ConsumeResult = { + allowed: false, + tokensRemaining: 0, + resetAt: new Date(Date.now() + 60000), + } + mockAdapter.consumeTokens.mockResolvedValue(overdrawnResult) + + const result = await rateLimiter.reportUsage( + testProvider, + 'workspace-1', + customConfig, + {}, + { tokenCount: 500 } + ) + + expect(result.dimensions[0].allowed).toBe(false) + expect(result.dimensions[0].consumed).toBe(500) + }) + + it('should skip consumption when extractUsage returns 0', async () => { + const result = await rateLimiter.reportUsage( + testProvider, + 'workspace-1', + customConfig, + {}, + { tokenCount: 0 } + ) + + expect(result.dimensions).toHaveLength(1) + expect(result.dimensions[0].consumed).toBe(0) + expect(mockAdapter.consumeTokens).not.toHaveBeenCalled() + }) + + it('should handle multiple dimensions independently', async () => { + const multiConfig: CustomRateLimit = { + mode: 'custom', + requestsPerMinute: 10, + dimensions: [ + { + name: 'tokens', + limitPerMinute: 1000, + extractUsage: (_p, r) => (r.tokenCount as number) ?? 0, + }, + { + name: 'search_units', + limitPerMinute: 50, + extractUsage: (_p, r) => (r.searchUnits as number) ?? 0, + }, + ], + } + + const tokensConsumed: ConsumeResult = { + allowed: true, + tokensRemaining: 800, + resetAt: new Date(Date.now() + 60000), + } + const searchConsumed: ConsumeResult = { + allowed: true, + tokensRemaining: 47, + resetAt: new Date(Date.now() + 60000), + } + mockAdapter.consumeTokens + .mockResolvedValueOnce(tokensConsumed) + .mockResolvedValueOnce(searchConsumed) + + const result = await rateLimiter.reportUsage( + testProvider, + 'workspace-1', + multiConfig, + {}, + { tokenCount: 200, searchUnits: 3 } + ) + + expect(result.dimensions).toHaveLength(2) + expect(result.dimensions[0]).toEqual({ + name: 'tokens', + consumed: 200, + allowed: true, + tokensRemaining: 800, + }) + expect(result.dimensions[1]).toEqual({ + name: 'search_units', + consumed: 3, + allowed: true, + tokensRemaining: 47, + }) + + expect(mockAdapter.consumeTokens).toHaveBeenCalledTimes(2) + }) + + it('should continue with remaining dimensions if extractUsage throws', async () => { + const throwingConfig: CustomRateLimit = { + mode: 'custom', + requestsPerMinute: 10, + dimensions: [ + { + name: 'broken', + limitPerMinute: 100, + extractUsage: () => { + throw new Error('extraction failed') + }, + }, + { + name: 'tokens', + limitPerMinute: 1000, + extractUsage: (_p, r) => (r.tokenCount as number) ?? 0, + }, + ], + } + + const consumeResult: ConsumeResult = { + allowed: true, + tokensRemaining: 900, + resetAt: new Date(Date.now() + 60000), + } + mockAdapter.consumeTokens.mockResolvedValue(consumeResult) + + const result = await rateLimiter.reportUsage( + testProvider, + 'workspace-1', + throwingConfig, + {}, + { tokenCount: 100 } + ) + + expect(result.dimensions).toHaveLength(1) + expect(result.dimensions[0].name).toBe('tokens') + expect(mockAdapter.consumeTokens).toHaveBeenCalledTimes(1) + }) + + it('should handle storage errors gracefully', async () => { + mockAdapter.consumeTokens.mockRejectedValue(new Error('db connection lost')) + + const result = await rateLimiter.reportUsage( + testProvider, + 'workspace-1', + customConfig, + {}, + { tokenCount: 100 } + ) + + expect(result.dimensions).toHaveLength(0) + }) + }) +}) diff --git a/apps/sim/lib/core/rate-limiter/hosted-key/hosted-key-rate-limiter.ts b/apps/sim/lib/core/rate-limiter/hosted-key/hosted-key-rate-limiter.ts new file mode 100644 index 00000000000..a20cf8413f3 --- /dev/null +++ b/apps/sim/lib/core/rate-limiter/hosted-key/hosted-key-rate-limiter.ts @@ -0,0 +1,349 @@ +import { createLogger } from '@sim/logger' +import { + createStorageAdapter, + type RateLimitStorageAdapter, + type TokenBucketConfig, +} from '@/lib/core/rate-limiter/storage' +import { + type AcquireKeyResult, + type CustomRateLimit, + DEFAULT_BURST_MULTIPLIER, + DEFAULT_WINDOW_MS, + type HostedKeyRateLimitConfig, + type ReportUsageResult, + toTokenBucketConfig, +} from './types' + +const logger = createLogger('HostedKeyRateLimiter') + +/** + * Resolves env var names for a numbered key prefix using a `{PREFIX}_COUNT` env var. + * E.g. with `EXA_API_KEY_COUNT=5`, returns `['EXA_API_KEY_1', ..., 'EXA_API_KEY_5']`. + */ +function resolveEnvKeys(prefix: string): string[] { + const count = Number.parseInt(process.env[`${prefix}_COUNT`] || '0', 10) + const names: string[] = [] + for (let i = 1; i <= count; i++) { + names.push(`${prefix}_${i}`) + } + return names +} + +/** Dimension name for per-billing-actor request rate limiting */ +const ACTOR_REQUESTS_DIMENSION = 'actor_requests' + +/** + * Information about an available hosted key + */ +interface AvailableKey { + key: string + keyIndex: number + envVarName: string +} + +/** + * HostedKeyRateLimiter provides: + * 1. Per-billing-actor rate limiting (enforced - blocks actors who exceed their limit) + * 2. Round-robin key selection (distributes requests evenly across keys) + * 3. Post-execution dimension usage tracking for custom rate limits + * + * The billing actor is typically a workspace ID, meaning rate limits are shared + * across all users within the same workspace. + */ +export class HostedKeyRateLimiter { + private storage: RateLimitStorageAdapter + /** Round-robin counter per provider for even key distribution */ + private roundRobinCounters = new Map() + + constructor(storage?: RateLimitStorageAdapter) { + this.storage = storage ?? createStorageAdapter() + } + + private buildActorStorageKey(provider: string, billingActorId: string): string { + return `hosted:${provider}:actor:${billingActorId}:${ACTOR_REQUESTS_DIMENSION}` + } + + private buildDimensionStorageKey( + provider: string, + billingActorId: string, + dimensionName: string + ): string { + return `hosted:${provider}:actor:${billingActorId}:${dimensionName}` + } + + private getAvailableKeys(envKeys: string[]): AvailableKey[] { + const keys: AvailableKey[] = [] + for (let i = 0; i < envKeys.length; i++) { + const envVarName = envKeys[i] + const key = process.env[envVarName] + if (key) { + keys.push({ key, keyIndex: i, envVarName }) + } + } + return keys + } + + /** + * Build a token bucket config for the per-billing-actor request rate limit. + * Works for both `per_request` and `custom` modes since both define `requestsPerMinute`. + */ + private getActorRateLimitConfig(config: HostedKeyRateLimitConfig): TokenBucketConfig | null { + if (!config.requestsPerMinute) return null + return toTokenBucketConfig( + config.requestsPerMinute, + config.burstMultiplier ?? DEFAULT_BURST_MULTIPLIER, + DEFAULT_WINDOW_MS + ) + } + + /** + * Check and consume billing actor request rate limit. Returns null if allowed, or retry info if blocked. + */ + private async checkActorRateLimit( + provider: string, + billingActorId: string, + config: HostedKeyRateLimitConfig + ): Promise<{ rateLimited: true; retryAfterMs: number } | null> { + const bucketConfig = this.getActorRateLimitConfig(config) + if (!bucketConfig) return null + + const storageKey = this.buildActorStorageKey(provider, billingActorId) + + try { + const result = await this.storage.consumeTokens(storageKey, 1, bucketConfig) + if (!result.allowed) { + const retryAfterMs = Math.max(0, result.resetAt.getTime() - Date.now()) + logger.info(`Billing actor ${billingActorId} rate limited for ${provider}`, { + provider, + billingActorId, + retryAfterMs, + tokensRemaining: result.tokensRemaining, + }) + return { rateLimited: true, retryAfterMs } + } + return null + } catch (error) { + logger.error(`Error checking billing actor rate limit for ${provider}`, { + error, + billingActorId, + }) + return null + } + } + + /** + * Pre-check that the billing actor has available budget in all custom dimensions. + * Does NOT consume tokens -- just verifies the actor isn't already depleted. + * Returns retry info for the most restrictive exhausted dimension, or null if all pass. + */ + private async preCheckDimensions( + provider: string, + billingActorId: string, + config: CustomRateLimit + ): Promise<{ rateLimited: true; retryAfterMs: number; dimension: string } | null> { + for (const dimension of config.dimensions) { + const storageKey = this.buildDimensionStorageKey(provider, billingActorId, dimension.name) + const bucketConfig = toTokenBucketConfig( + dimension.limitPerMinute, + dimension.burstMultiplier ?? DEFAULT_BURST_MULTIPLIER, + DEFAULT_WINDOW_MS + ) + + try { + const status = await this.storage.getTokenStatus(storageKey, bucketConfig) + if (status.tokensAvailable < 1) { + const retryAfterMs = Math.max(0, status.nextRefillAt.getTime() - Date.now()) + logger.info( + `Billing actor ${billingActorId} exhausted dimension ${dimension.name} for ${provider}`, + { + provider, + billingActorId, + dimension: dimension.name, + tokensAvailable: status.tokensAvailable, + retryAfterMs, + } + ) + return { rateLimited: true, retryAfterMs, dimension: dimension.name } + } + } catch (error) { + logger.error(`Error pre-checking dimension ${dimension.name} for ${provider}`, { + error, + billingActorId, + }) + } + } + return null + } + + /** + * Acquire an available key via round-robin selection. + * + * For both modes: + * 1. Per-billing-actor request rate limiting (enforced): blocks actors who exceed their request limit + * 2. Round-robin key selection: cycles through available keys for even distribution + * + * For `custom` mode additionally: + * 3. Pre-checks dimension budgets: blocks if any dimension is already depleted + * + * @param envKeyPrefix - Env var prefix (e.g. 'EXA_API_KEY'). Keys resolved via `{prefix}_COUNT`. + * @param billingActorId - The billing actor (typically workspace ID) to rate limit against + */ + async acquireKey( + provider: string, + envKeyPrefix: string, + config: HostedKeyRateLimitConfig, + billingActorId: string + ): Promise { + if (config.requestsPerMinute) { + const rateLimitResult = await this.checkActorRateLimit(provider, billingActorId, config) + if (rateLimitResult) { + return { + success: false, + billingActorRateLimited: true, + retryAfterMs: rateLimitResult.retryAfterMs, + error: `Rate limit exceeded. Please wait ${Math.ceil(rateLimitResult.retryAfterMs / 1000)} seconds. If you're getting throttled frequently, consider adding your own API key under Settings > BYOK to avoid shared rate limits.`, + } + } + } + + if (config.mode === 'custom' && config.dimensions.length > 0) { + const dimensionResult = await this.preCheckDimensions(provider, billingActorId, config) + if (dimensionResult) { + return { + success: false, + billingActorRateLimited: true, + retryAfterMs: dimensionResult.retryAfterMs, + error: `Rate limit exceeded for ${dimensionResult.dimension}. Please wait ${Math.ceil(dimensionResult.retryAfterMs / 1000)} seconds. If you're getting throttled frequently, consider adding your own API key under Settings > BYOK to avoid shared rate limits.`, + } + } + } + + const envKeys = resolveEnvKeys(envKeyPrefix) + const availableKeys = this.getAvailableKeys(envKeys) + + if (availableKeys.length === 0) { + logger.warn(`No hosted keys configured for provider ${provider}`) + return { + success: false, + error: `No hosted keys configured for ${provider}`, + } + } + + const counter = this.roundRobinCounters.get(provider) ?? 0 + const selected = availableKeys[counter % availableKeys.length] + this.roundRobinCounters.set(provider, counter + 1) + + logger.debug(`Selected hosted key for ${provider}`, { + provider, + keyIndex: selected.keyIndex, + envVarName: selected.envVarName, + }) + + return { + success: true, + key: selected.key, + keyIndex: selected.keyIndex, + envVarName: selected.envVarName, + } + } + + /** + * Report actual usage after successful tool execution (custom mode only). + * Calls `extractUsage` on each dimension and consumes the actual token count. + * This is the "post-execution" phase of the optimistic two-phase approach. + */ + async reportUsage( + provider: string, + billingActorId: string, + config: CustomRateLimit, + params: Record, + response: Record + ): Promise { + const results: ReportUsageResult['dimensions'] = [] + + for (const dimension of config.dimensions) { + let usage: number + try { + usage = dimension.extractUsage(params, response) + } catch (error) { + logger.error(`Failed to extract usage for dimension ${dimension.name}`, { + provider, + billingActorId, + error, + }) + continue + } + + if (usage <= 0) { + results.push({ + name: dimension.name, + consumed: 0, + allowed: true, + tokensRemaining: 0, + }) + continue + } + + const storageKey = this.buildDimensionStorageKey(provider, billingActorId, dimension.name) + const bucketConfig = toTokenBucketConfig( + dimension.limitPerMinute, + dimension.burstMultiplier ?? DEFAULT_BURST_MULTIPLIER, + DEFAULT_WINDOW_MS + ) + + try { + const consumeResult = await this.storage.consumeTokens(storageKey, usage, bucketConfig) + + results.push({ + name: dimension.name, + consumed: usage, + allowed: consumeResult.allowed, + tokensRemaining: consumeResult.tokensRemaining, + }) + + if (!consumeResult.allowed) { + logger.warn( + `Dimension ${dimension.name} overdrawn for ${provider} (optimistic concurrency)`, + { provider, billingActorId, usage, tokensRemaining: consumeResult.tokensRemaining } + ) + } + + logger.debug(`Consumed ${usage} from dimension ${dimension.name} for ${provider}`, { + provider, + billingActorId, + usage, + allowed: consumeResult.allowed, + tokensRemaining: consumeResult.tokensRemaining, + }) + } catch (error) { + logger.error(`Failed to consume tokens for dimension ${dimension.name}`, { + provider, + billingActorId, + usage, + error, + }) + } + } + + return { dimensions: results } + } +} + +let cachedInstance: HostedKeyRateLimiter | null = null + +/** + * Get the singleton HostedKeyRateLimiter instance + */ +export function getHostedKeyRateLimiter(): HostedKeyRateLimiter { + if (!cachedInstance) { + cachedInstance = new HostedKeyRateLimiter() + } + return cachedInstance +} + +/** + * Reset the cached rate limiter (for testing) + */ +export function resetHostedKeyRateLimiter(): void { + cachedInstance = null +} diff --git a/apps/sim/lib/core/rate-limiter/hosted-key/index.ts b/apps/sim/lib/core/rate-limiter/hosted-key/index.ts new file mode 100644 index 00000000000..8454618b9e6 --- /dev/null +++ b/apps/sim/lib/core/rate-limiter/hosted-key/index.ts @@ -0,0 +1,17 @@ +export { + getHostedKeyRateLimiter, + HostedKeyRateLimiter, + resetHostedKeyRateLimiter, +} from './hosted-key-rate-limiter' +export { + type AcquireKeyResult, + type CustomRateLimit, + DEFAULT_BURST_MULTIPLIER, + DEFAULT_WINDOW_MS, + type HostedKeyRateLimitConfig, + type HostedKeyRateLimitMode, + type PerRequestRateLimit, + type RateLimitDimension, + type ReportUsageResult, + toTokenBucketConfig, +} from './types' diff --git a/apps/sim/lib/core/rate-limiter/hosted-key/types.ts b/apps/sim/lib/core/rate-limiter/hosted-key/types.ts new file mode 100644 index 00000000000..65d2bb33877 --- /dev/null +++ b/apps/sim/lib/core/rate-limiter/hosted-key/types.ts @@ -0,0 +1,108 @@ +import type { TokenBucketConfig } from '@/lib/core/rate-limiter/storage' + +export type HostedKeyRateLimitMode = 'per_request' | 'custom' + +/** + * Simple per-request rate limit configuration. + * Enforces per-billing-actor rate limiting and distributes requests across keys. + */ +export interface PerRequestRateLimit { + mode: 'per_request' + /** Maximum requests per minute per billing actor (enforced - blocks if exceeded) */ + requestsPerMinute: number + /** Burst multiplier for token bucket max capacity. Default: 2 */ + burstMultiplier?: number +} + +/** + * Custom rate limit with multiple dimensions (e.g., tokens, search units). + * Allows tracking different usage metrics independently. + */ +export interface CustomRateLimit { + mode: 'custom' + /** Maximum requests per minute per billing actor (enforced - blocks if exceeded) */ + requestsPerMinute: number + /** Multiple dimensions to track */ + dimensions: RateLimitDimension[] + /** Burst multiplier for token bucket max capacity. Default: 2 */ + burstMultiplier?: number +} + +/** + * A single dimension for custom rate limiting. + * Each dimension has its own token bucket. + */ +export interface RateLimitDimension { + /** Dimension name (e.g., 'tokens', 'search_units') - used in storage key */ + name: string + /** Limit per minute for this dimension */ + limitPerMinute: number + /** Burst multiplier for token bucket max capacity. Default: 2 */ + burstMultiplier?: number + /** + * Extract usage amount from request params and response. + * Called after successful execution to consume the actual usage. + */ + extractUsage: (params: Record, response: Record) => number +} + +/** Union of all hosted key rate limit configuration types */ +export type HostedKeyRateLimitConfig = PerRequestRateLimit | CustomRateLimit + +/** + * Result from acquiring a key from the hosted key rate limiter + */ +export interface AcquireKeyResult { + /** Whether a key was successfully acquired */ + success: boolean + /** The API key value (if success=true) */ + key?: string + /** Index of the key in the envKeys array */ + keyIndex?: number + /** Environment variable name of the selected key */ + envVarName?: string + /** Error message if no key available */ + error?: string + /** Whether the billing actor was rate limited (exceeded their limit) */ + billingActorRateLimited?: boolean + /** Milliseconds until the billing actor's rate limit resets (if billingActorRateLimited=true) */ + retryAfterMs?: number +} + +/** + * Result from reporting post-execution usage for custom dimensions + */ +export interface ReportUsageResult { + /** Per-dimension consumption results */ + dimensions: { + name: string + consumed: number + allowed: boolean + tokensRemaining: number + }[] +} + +/** + * Convert rate limit config to token bucket config for a dimension + */ +export function toTokenBucketConfig( + limitPerMinute: number, + burstMultiplier = 2, + windowMs = 60000 +): TokenBucketConfig { + return { + maxTokens: limitPerMinute * burstMultiplier, + refillRate: limitPerMinute, + refillIntervalMs: windowMs, + } +} + +/** + * Default rate limit window in milliseconds (1 minute) + */ +export const DEFAULT_WINDOW_MS = 60000 + +/** + * Default burst multiplier + */ +export const DEFAULT_BURST_MULTIPLIER = 2 diff --git a/apps/sim/lib/core/rate-limiter/index.ts b/apps/sim/lib/core/rate-limiter/index.ts index e5a0081c71f..b690f720114 100644 --- a/apps/sim/lib/core/rate-limiter/index.ts +++ b/apps/sim/lib/core/rate-limiter/index.ts @@ -1,3 +1,18 @@ +export { + type AcquireKeyResult, + type CustomRateLimit, + DEFAULT_BURST_MULTIPLIER, + DEFAULT_WINDOW_MS, + getHostedKeyRateLimiter, + type HostedKeyRateLimitConfig, + HostedKeyRateLimiter, + type HostedKeyRateLimitMode, + type PerRequestRateLimit, + type RateLimitDimension, + type ReportUsageResult, + resetHostedKeyRateLimiter, + toTokenBucketConfig, +} from './hosted-key' export type { RateLimitResult, RateLimitStatus } from './rate-limiter' export { RateLimiter } from './rate-limiter' export type { RateLimitStorageAdapter, TokenBucketConfig } from './storage' diff --git a/apps/sim/lib/core/rate-limiter/storage/db-token-bucket.ts b/apps/sim/lib/core/rate-limiter/storage/db-token-bucket.ts index cdfb8b414c3..7f756fbc902 100644 --- a/apps/sim/lib/core/rate-limiter/storage/db-token-bucket.ts +++ b/apps/sim/lib/core/rate-limiter/storage/db-token-bucket.ts @@ -51,7 +51,7 @@ export class DbTokenBucket implements RateLimitStorageAdapter { ) * ${config.refillRate} )::numeric ) - ${requestedTokens}::numeric - ELSE ${rateLimitBucket.tokens}::numeric + ELSE -1 END `, lastRefillAt: sql` diff --git a/apps/sim/lib/core/telemetry.ts b/apps/sim/lib/core/telemetry.ts index c12fe1303a4..8d1a08279db 100644 --- a/apps/sim/lib/core/telemetry.ts +++ b/apps/sim/lib/core/telemetry.ts @@ -934,6 +934,55 @@ export const PlatformEvents = { }) }, + /** + * Track when a rate limit error is surfaced to the end user (not retried/absorbed). + * Fires for both billing-actor limits and exhausted upstream retries. + */ + userThrottled: (attrs: { + toolId: string + reason: 'billing_actor_limit' | 'upstream_retries_exhausted' + provider?: string + retryAfterMs?: number + userId?: string + workspaceId?: string + workflowId?: string + }) => { + trackPlatformEvent('platform.user.throttled', { + 'tool.id': attrs.toolId, + 'throttle.reason': attrs.reason, + ...(attrs.provider && { 'provider.id': attrs.provider }), + ...(attrs.retryAfterMs != null && { 'rate_limit.retry_after_ms': attrs.retryAfterMs }), + ...(attrs.userId && { 'user.id': attrs.userId }), + ...(attrs.workspaceId && { 'workspace.id': attrs.workspaceId }), + ...(attrs.workflowId && { 'workflow.id': attrs.workflowId }), + }) + }, + + /** + * Track hosted key rate limited by upstream provider (429 from the external API) + */ + hostedKeyRateLimited: (attrs: { + toolId: string + envVarName: string + attempt: number + maxRetries: number + delayMs: number + userId?: string + workspaceId?: string + workflowId?: string + }) => { + trackPlatformEvent('platform.hosted_key.rate_limited', { + 'tool.id': attrs.toolId, + 'hosted_key.env_var': attrs.envVarName, + 'rate_limit.attempt': attrs.attempt, + 'rate_limit.max_retries': attrs.maxRetries, + 'rate_limit.delay_ms': attrs.delayMs, + ...(attrs.userId && { 'user.id': attrs.userId }), + ...(attrs.workspaceId && { 'workspace.id': attrs.workspaceId }), + ...(attrs.workflowId && { 'workflow.id': attrs.workflowId }), + }) + }, + /** * Track chat deployed (workflow deployed as chat interface) */ diff --git a/apps/sim/lib/logs/execution/logger.ts b/apps/sim/lib/logs/execution/logger.ts index c9e2fb8d65d..7f0beca31d9 100644 --- a/apps/sim/lib/logs/execution/logger.ts +++ b/apps/sim/lib/logs/execution/logger.ts @@ -181,6 +181,7 @@ export class ExecutionLogger implements IExecutionLoggerService { input: number output: number total: number + toolCost?: number tokens: { input: number; output: number; total: number } } > @@ -507,6 +508,7 @@ export class ExecutionLogger implements IExecutionLoggerService { input: number output: number total: number + toolCost?: number tokens: { input: number; output: number; total: number } } > diff --git a/apps/sim/lib/logs/execution/logging-factory.ts b/apps/sim/lib/logs/execution/logging-factory.ts index be7e2d5fc5a..98c4424613d 100644 --- a/apps/sim/lib/logs/execution/logging-factory.ts +++ b/apps/sim/lib/logs/execution/logging-factory.ts @@ -95,6 +95,7 @@ export function calculateCostSummary(traceSpans: any[]): { input: number output: number total: number + toolCost?: number tokens: { input: number; output: number; total: number } } > @@ -143,6 +144,7 @@ export function calculateCostSummary(traceSpans: any[]): { input: number output: number total: number + toolCost?: number tokens: { input: number; output: number; total: number } } > = {} @@ -171,6 +173,10 @@ export function calculateCostSummary(traceSpans: any[]): { models[model].tokens.input += span.tokens?.input ?? span.tokens?.prompt ?? 0 models[model].tokens.output += span.tokens?.output ?? span.tokens?.completion ?? 0 models[model].tokens.total += span.tokens?.total || 0 + + if (span.cost.toolCost) { + models[model].toolCost = (models[model].toolCost || 0) + span.cost.toolCost + } } } diff --git a/apps/sim/lib/workflows/subblocks/visibility.ts b/apps/sim/lib/workflows/subblocks/visibility.ts index aab03ca5dba..44cddf1224d 100644 --- a/apps/sim/lib/workflows/subblocks/visibility.ts +++ b/apps/sim/lib/workflows/subblocks/visibility.ts @@ -1,4 +1,5 @@ import { getEnv, isTruthy } from '@/lib/core/config/env' +import { isHosted } from '@/lib/core/config/feature-flags' import type { SubBlockConfig } from '@/blocks/types' export type CanonicalMode = 'basic' | 'advanced' @@ -287,3 +288,12 @@ export function isSubBlockFeatureEnabled(subBlock: SubBlockConfig): boolean { if (!subBlock.requiresFeature) return true return isTruthy(getEnv(subBlock.requiresFeature)) } + +/** + * Check if a subblock should be hidden because we're running on hosted Sim. + * Used for tool API key fields that should be hidden when Sim provides hosted keys. + */ +export function isSubBlockHiddenByHostedKey(subBlock: SubBlockConfig): boolean { + if (!subBlock.hideWhenHosted) return false + return isHosted +} diff --git a/apps/sim/providers/anthropic/core.ts b/apps/sim/providers/anthropic/core.ts index dbe5df92221..240e7458dd4 100644 --- a/apps/sim/providers/anthropic/core.ts +++ b/apps/sim/providers/anthropic/core.ts @@ -19,6 +19,7 @@ import { calculateCost, prepareToolExecution, prepareToolsWithUsageControl, + sumToolCosts, } from '@/providers/utils' import { executeTool } from '@/tools' @@ -490,7 +491,7 @@ export async function executeAnthropicProviderRequest( } const toolCalls = [] - const toolResults = [] + const toolResults: Record[] = [] const currentMessages = [...messages] let iterationCount = 0 let hasUsedForcedTool = false @@ -609,7 +610,7 @@ export async function executeAnthropicProviderRequest( }) let resultContent: unknown - if (result.success) { + if (result.success && result.output) { toolResults.push(result.output) resultContent = result.output } else { @@ -783,10 +784,12 @@ export async function executeAnthropicProviderRequest( } const streamCost = calculateCost(request.model, usage.input_tokens, usage.output_tokens) + const tc = sumToolCosts(toolResults) streamingResult.execution.output.cost = { input: accumulatedCost.input + streamCost.input, output: accumulatedCost.output + streamCost.output, - total: accumulatedCost.total + streamCost.total, + toolCost: tc || undefined, + total: accumulatedCost.total + streamCost.total + tc, } const streamEndTime = Date.now() @@ -829,6 +832,7 @@ export async function executeAnthropicProviderRequest( cost: { input: accumulatedCost.input, output: accumulatedCost.output, + toolCost: undefined as number | undefined, total: accumulatedCost.total, }, }, @@ -901,7 +905,7 @@ export async function executeAnthropicProviderRequest( } const toolCalls = [] - const toolResults = [] + const toolResults: Record[] = [] const currentMessages = [...messages] let iterationCount = 0 let hasUsedForcedTool = false @@ -1022,7 +1026,7 @@ export async function executeAnthropicProviderRequest( }) let resultContent: unknown - if (result.success) { + if (result.success && result.output) { toolResults.push(result.output) resultContent = result.output } else { @@ -1208,10 +1212,12 @@ export async function executeAnthropicProviderRequest( } const streamCost = calculateCost(request.model, usage.input_tokens, usage.output_tokens) + const tc2 = sumToolCosts(toolResults) streamingResult.execution.output.cost = { input: cost.input + streamCost.input, output: cost.output + streamCost.output, - total: cost.total + streamCost.total, + toolCost: tc2 || undefined, + total: cost.total + streamCost.total + tc2, } const streamEndTime = Date.now() @@ -1254,6 +1260,7 @@ export async function executeAnthropicProviderRequest( cost: { input: cost.input, output: cost.output, + toolCost: undefined as number | undefined, total: cost.total, }, }, diff --git a/apps/sim/providers/azure-openai/index.ts b/apps/sim/providers/azure-openai/index.ts index b171ba9f1a2..930c31035a3 100644 --- a/apps/sim/providers/azure-openai/index.ts +++ b/apps/sim/providers/azure-openai/index.ts @@ -35,6 +35,7 @@ import { calculateCost, prepareToolExecution, prepareToolsWithUsageControl, + sumToolCosts, } from '@/providers/utils' import { executeTool } from '@/tools' @@ -499,10 +500,12 @@ async function executeChatCompletionsRequest( usage.prompt_tokens, usage.completion_tokens ) + const tc = sumToolCosts(toolResults) streamingResult.execution.output.cost = { input: accumulatedCost.input + streamCost.input, output: accumulatedCost.output + streamCost.output, - total: accumulatedCost.total + streamCost.total, + toolCost: tc || undefined, + total: accumulatedCost.total + streamCost.total + tc, } const streamEndTime = Date.now() diff --git a/apps/sim/providers/bedrock/index.ts b/apps/sim/providers/bedrock/index.ts index ab7866a5440..ec0af6ab04b 100644 --- a/apps/sim/providers/bedrock/index.ts +++ b/apps/sim/providers/bedrock/index.ts @@ -33,6 +33,7 @@ import { calculateCost, prepareToolExecution, prepareToolsWithUsageControl, + sumToolCosts, } from '@/providers/utils' import { executeTool } from '@/tools' @@ -815,10 +816,12 @@ export const bedrockProvider: ProviderConfig = { } const streamCost = calculateCost(request.model, usage.inputTokens, usage.outputTokens) + const tc = sumToolCosts(toolResults) streamingResult.execution.output.cost = { input: cost.input + streamCost.input, output: cost.output + streamCost.output, - total: cost.total + streamCost.total, + toolCost: tc || undefined, + total: cost.total + streamCost.total + tc, } const streamEndTime = Date.now() @@ -861,6 +864,7 @@ export const bedrockProvider: ProviderConfig = { cost: { input: cost.input, output: cost.output, + toolCost: undefined as number | undefined, total: cost.total, }, }, diff --git a/apps/sim/providers/cerebras/index.ts b/apps/sim/providers/cerebras/index.ts index 85ce7a2445e..9ef64836030 100644 --- a/apps/sim/providers/cerebras/index.ts +++ b/apps/sim/providers/cerebras/index.ts @@ -16,6 +16,7 @@ import { calculateCost, prepareToolExecution, prepareToolsWithUsageControl, + sumToolCosts, trackForcedToolUsage, } from '@/providers/utils' import { executeTool } from '@/tools' @@ -195,7 +196,7 @@ export const cerebrasProvider: ProviderConfig = { total: currentResponse.usage?.total_tokens || 0, } const toolCalls = [] - const toolResults = [] + const toolResults: Record[] = [] const currentMessages = [...allMessages] let iterationCount = 0 @@ -313,7 +314,7 @@ export const cerebrasProvider: ProviderConfig = { duration: duration, }) let resultContent: any - if (result.success) { + if (result.success && result.output) { toolResults.push(result.output) resultContent = result.output } else { @@ -472,10 +473,12 @@ export const cerebrasProvider: ProviderConfig = { usage.prompt_tokens, usage.completion_tokens ) + const tc = sumToolCosts(toolResults) streamingResult.execution.output.cost = { input: accumulatedCost.input + streamCost.input, output: accumulatedCost.output + streamCost.output, - total: accumulatedCost.total + streamCost.total, + toolCost: tc || undefined, + total: accumulatedCost.total + streamCost.total + tc, } }), execution: { @@ -508,6 +511,7 @@ export const cerebrasProvider: ProviderConfig = { cost: { input: accumulatedCost.input, output: accumulatedCost.output, + toolCost: undefined as number | undefined, total: accumulatedCost.total, }, }, diff --git a/apps/sim/providers/deepseek/index.ts b/apps/sim/providers/deepseek/index.ts index f537e5e89cf..692fb270591 100644 --- a/apps/sim/providers/deepseek/index.ts +++ b/apps/sim/providers/deepseek/index.ts @@ -15,6 +15,7 @@ import { calculateCost, prepareToolExecution, prepareToolsWithUsageControl, + sumToolCosts, trackForcedToolUsage, } from '@/providers/utils' import { executeTool } from '@/tools' @@ -205,7 +206,7 @@ export const deepseekProvider: ProviderConfig = { total: currentResponse.usage?.total_tokens || 0, } const toolCalls = [] - const toolResults = [] + const toolResults: Record[] = [] const currentMessages = [...allMessages] let iterationCount = 0 let hasUsedForcedTool = false @@ -325,7 +326,7 @@ export const deepseekProvider: ProviderConfig = { }) let resultContent: any - if (result.success) { + if (result.success && result.output) { toolResults.push(result.output) resultContent = result.output } else { @@ -471,10 +472,12 @@ export const deepseekProvider: ProviderConfig = { usage.prompt_tokens, usage.completion_tokens ) + const tc = sumToolCosts(toolResults) streamingResult.execution.output.cost = { input: accumulatedCost.input + streamCost.input, output: accumulatedCost.output + streamCost.output, - total: accumulatedCost.total + streamCost.total, + toolCost: tc || undefined, + total: accumulatedCost.total + streamCost.total + tc, } } ), @@ -508,6 +511,7 @@ export const deepseekProvider: ProviderConfig = { cost: { input: accumulatedCost.input, output: accumulatedCost.output, + toolCost: undefined as number | undefined, total: accumulatedCost.total, }, }, diff --git a/apps/sim/providers/gemini/core.ts b/apps/sim/providers/gemini/core.ts index 55855b334d7..c35f38dc782 100644 --- a/apps/sim/providers/gemini/core.ts +++ b/apps/sim/providers/gemini/core.ts @@ -31,6 +31,7 @@ import { isDeepResearchModel, prepareToolExecution, prepareToolsWithUsageControl, + sumToolCosts, } from '@/providers/utils' import { executeTool } from '@/tools' import type { ExecutionState, GeminiProviderType, GeminiUsage } from './types' @@ -1163,10 +1164,12 @@ export async function executeGeminiRequest( usage.promptTokenCount, usage.candidatesTokenCount ) + const tc = sumToolCosts(state.toolResults) streamingResult.execution.output.cost = { input: accumulatedCost.input + streamCost.input, output: accumulatedCost.output + streamCost.output, - total: accumulatedCost.total + streamCost.total, + toolCost: tc || undefined, + total: accumulatedCost.total + streamCost.total + tc, pricing: streamCost.pricing, } diff --git a/apps/sim/providers/groq/index.ts b/apps/sim/providers/groq/index.ts index 756082b45f6..8e1ecbabf94 100644 --- a/apps/sim/providers/groq/index.ts +++ b/apps/sim/providers/groq/index.ts @@ -15,6 +15,7 @@ import { calculateCost, prepareToolExecution, prepareToolsWithUsageControl, + sumToolCosts, trackForcedToolUsage, } from '@/providers/utils' import { executeTool } from '@/tools' @@ -201,7 +202,7 @@ export const groqProvider: ProviderConfig = { total: currentResponse.usage?.total_tokens || 0, } const toolCalls = [] - const toolResults = [] + const toolResults: Record[] = [] const currentMessages = [...allMessages] let iterationCount = 0 let modelTime = firstResponseTime @@ -303,7 +304,7 @@ export const groqProvider: ProviderConfig = { }) let resultContent: any - if (result.success) { + if (result.success && result.output) { toolResults.push(result.output) resultContent = result.output } else { @@ -426,10 +427,12 @@ export const groqProvider: ProviderConfig = { usage.prompt_tokens, usage.completion_tokens ) + const tc = sumToolCosts(toolResults) streamingResult.execution.output.cost = { input: accumulatedCost.input + streamCost.input, output: accumulatedCost.output + streamCost.output, - total: accumulatedCost.total + streamCost.total, + toolCost: tc || undefined, + total: accumulatedCost.total + streamCost.total + tc, } }), execution: { @@ -462,6 +465,7 @@ export const groqProvider: ProviderConfig = { cost: { input: accumulatedCost.input, output: accumulatedCost.output, + toolCost: undefined as number | undefined, total: accumulatedCost.total, }, }, diff --git a/apps/sim/providers/index.ts b/apps/sim/providers/index.ts index d99db8a6a4a..a6f03e721f6 100644 --- a/apps/sim/providers/index.ts +++ b/apps/sim/providers/index.ts @@ -8,6 +8,7 @@ import { calculateCost, generateStructuredOutputInstructions, shouldBillModelUsage, + sumToolCosts, supportsReasoningEffort, supportsTemperature, supportsThinking, @@ -162,5 +163,11 @@ export async function executeProviderRequest( } } + const toolCost = sumToolCosts(response.toolResults) + if (toolCost > 0 && response.cost) { + response.cost.toolCost = toolCost + response.cost.total += toolCost + } + return response } diff --git a/apps/sim/providers/mistral/index.ts b/apps/sim/providers/mistral/index.ts index 693885fe289..a332ae7b400 100644 --- a/apps/sim/providers/mistral/index.ts +++ b/apps/sim/providers/mistral/index.ts @@ -16,6 +16,7 @@ import { calculateCost, prepareToolExecution, prepareToolsWithUsageControl, + sumToolCosts, trackForcedToolUsage, } from '@/providers/utils' import { executeTool } from '@/tools' @@ -258,7 +259,7 @@ export const mistralProvider: ProviderConfig = { total: currentResponse.usage?.total_tokens || 0, } const toolCalls = [] - const toolResults = [] + const toolResults: Record[] = [] const currentMessages = [...allMessages] let iterationCount = 0 @@ -366,7 +367,7 @@ export const mistralProvider: ProviderConfig = { }) let resultContent: any - if (result.success) { + if (result.success && result.output) { toolResults.push(result.output) resultContent = result.output } else { @@ -482,10 +483,12 @@ export const mistralProvider: ProviderConfig = { usage.prompt_tokens, usage.completion_tokens ) + const tc = sumToolCosts(toolResults) streamingResult.execution.output.cost = { input: accumulatedCost.input + streamCost.input, output: accumulatedCost.output + streamCost.output, - total: accumulatedCost.total + streamCost.total, + toolCost: tc || undefined, + total: accumulatedCost.total + streamCost.total + tc, } }), execution: { diff --git a/apps/sim/providers/ollama/index.ts b/apps/sim/providers/ollama/index.ts index 1495f7aace1..b6cb8dd3234 100644 --- a/apps/sim/providers/ollama/index.ts +++ b/apps/sim/providers/ollama/index.ts @@ -13,7 +13,7 @@ import type { TimeSegment, } from '@/providers/types' import { ProviderError } from '@/providers/types' -import { calculateCost, prepareToolExecution } from '@/providers/utils' +import { calculateCost, prepareToolExecution, sumToolCosts } from '@/providers/utils' import { useProvidersStore } from '@/stores/providers' import { executeTool } from '@/tools' @@ -271,7 +271,7 @@ export const ollamaProvider: ProviderConfig = { total: currentResponse.usage?.total_tokens || 0, } const toolCalls = [] - const toolResults = [] + const toolResults: Record[] = [] const currentMessages = [...allMessages] let iterationCount = 0 @@ -377,7 +377,7 @@ export const ollamaProvider: ProviderConfig = { }) let resultContent: any - if (result.success) { + if (result.success && result.output) { toolResults.push(result.output) resultContent = result.output } else { @@ -486,10 +486,12 @@ export const ollamaProvider: ProviderConfig = { usage.prompt_tokens, usage.completion_tokens ) + const tc = sumToolCosts(toolResults) streamingResult.execution.output.cost = { input: accumulatedCost.input + streamCost.input, output: accumulatedCost.output + streamCost.output, - total: accumulatedCost.total + streamCost.total, + toolCost: tc || undefined, + total: accumulatedCost.total + streamCost.total + tc, } }), execution: { diff --git a/apps/sim/providers/openai/core.ts b/apps/sim/providers/openai/core.ts index 139e12eaa3d..312ac025ba9 100644 --- a/apps/sim/providers/openai/core.ts +++ b/apps/sim/providers/openai/core.ts @@ -8,6 +8,7 @@ import { calculateCost, prepareToolExecution, prepareToolsWithUsageControl, + sumToolCosts, trackForcedToolUsage, } from '@/providers/utils' import { executeTool } from '@/tools' @@ -405,7 +406,7 @@ export async function executeResponsesProviderRequest( } const toolCalls = [] - const toolResults = [] + const toolResults: Record[] = [] let iterationCount = 0 let modelTime = firstResponseTime let toolsTime = 0 @@ -512,7 +513,7 @@ export async function executeResponsesProviderRequest( }) let resultContent: Record - if (result.success) { + if (result.success && result.output) { toolResults.push(result.output) resultContent = result.output as Record } else { @@ -728,10 +729,12 @@ export async function executeResponsesProviderRequest( usage?.promptTokens || 0, usage?.completionTokens || 0 ) + const tc = sumToolCosts(toolResults) streamingResult.execution.output.cost = { input: accumulatedCost.input + streamCost.input, output: accumulatedCost.output + streamCost.output, - total: accumulatedCost.total + streamCost.total, + toolCost: tc || undefined, + total: accumulatedCost.total + streamCost.total + tc, } }), execution: { diff --git a/apps/sim/providers/openrouter/index.ts b/apps/sim/providers/openrouter/index.ts index 2951d56ae78..7b01fa5784a 100644 --- a/apps/sim/providers/openrouter/index.ts +++ b/apps/sim/providers/openrouter/index.ts @@ -23,6 +23,7 @@ import { generateSchemaInstructions, prepareToolExecution, prepareToolsWithUsageControl, + sumToolCosts, } from '@/providers/utils' import { executeTool } from '@/tools' @@ -478,10 +479,12 @@ export const openRouterProvider: ProviderConfig = { usage.prompt_tokens, usage.completion_tokens ) + const tc = sumToolCosts(toolResults) streamingResult.execution.output.cost = { input: accumulatedCost.input + streamCost.input, output: accumulatedCost.output + streamCost.output, - total: accumulatedCost.total + streamCost.total, + toolCost: tc || undefined, + total: accumulatedCost.total + streamCost.total + tc, } }), execution: { diff --git a/apps/sim/providers/types.ts b/apps/sim/providers/types.ts index af5362c3c75..9dd78eb643d 100644 --- a/apps/sim/providers/types.ts +++ b/apps/sim/providers/types.ts @@ -79,7 +79,7 @@ export interface ProviderResponse { total?: number } toolCalls?: FunctionCallResponse[] - toolResults?: any[] + toolResults?: Record[] timing?: { startTime: string endTime: string @@ -93,6 +93,7 @@ export interface ProviderResponse { cost?: { input: number output: number + toolCost?: number total: number pricing: ModelPricing } diff --git a/apps/sim/providers/utils.test.ts b/apps/sim/providers/utils.test.ts index 5276c4e72ca..031c64f1a7c 100644 --- a/apps/sim/providers/utils.test.ts +++ b/apps/sim/providers/utils.test.ts @@ -1405,6 +1405,7 @@ describe('prepareToolExecution', () => { workspaceId: 'ws-456', chatId: 'chat-789', userId: 'user-abc', + skipFixedUsageLog: true, }) }) diff --git a/apps/sim/providers/utils.ts b/apps/sim/providers/utils.ts index 82ca33e1070..7c7cf253117 100644 --- a/apps/sim/providers/utils.ts +++ b/apps/sim/providers/utils.ts @@ -650,6 +650,20 @@ export function calculateCost( } } +/** + * Sums the `cost.total` from each tool result returned during a provider tool loop. + * Tool results may carry a `cost` object injected by `applyHostedKeyCostToResult`. + */ +export function sumToolCosts(toolResults?: Record[]): number { + if (!toolResults?.length) return 0 + let total = 0 + for (const tr of toolResults) { + const cost = tr?.cost as Record | undefined + if (cost?.total && typeof cost.total === 'number') total += cost.total + } + return total +} + export function getModelPricing(modelId: string): any { const embeddingPricing = getEmbeddingModelPricing(modelId) if (embeddingPricing) { @@ -1140,6 +1154,7 @@ export function prepareToolExecution( ? { isDeployedContext: request.isDeployedContext } : {}), ...(request.callChain ? { callChain: request.callChain } : {}), + skipFixedUsageLog: true, }, } : {}), diff --git a/apps/sim/providers/vllm/index.ts b/apps/sim/providers/vllm/index.ts index db2f3b16597..e4f0a4c93e8 100644 --- a/apps/sim/providers/vllm/index.ts +++ b/apps/sim/providers/vllm/index.ts @@ -17,6 +17,7 @@ import { calculateCost, prepareToolExecution, prepareToolsWithUsageControl, + sumToolCosts, trackForcedToolUsage, } from '@/providers/utils' import { createReadableStreamFromVLLMStream } from '@/providers/vllm/utils' @@ -315,7 +316,7 @@ export const vllmProvider: ProviderConfig = { total: currentResponse.usage?.total_tokens || 0, } const toolCalls = [] - const toolResults = [] + const toolResults: Record[] = [] const currentMessages = [...allMessages] let iterationCount = 0 @@ -428,7 +429,7 @@ export const vllmProvider: ProviderConfig = { }) let resultContent: any - if (result.success) { + if (result.success && result.output) { toolResults.push(result.output) resultContent = result.output } else { @@ -553,10 +554,12 @@ export const vllmProvider: ProviderConfig = { usage.prompt_tokens, usage.completion_tokens ) + const tc = sumToolCosts(toolResults) streamingResult.execution.output.cost = { input: accumulatedCost.input + streamCost.input, output: accumulatedCost.output + streamCost.output, - total: accumulatedCost.total + streamCost.total, + toolCost: tc || undefined, + total: accumulatedCost.total + streamCost.total + tc, } }), execution: { diff --git a/apps/sim/providers/xai/index.ts b/apps/sim/providers/xai/index.ts index c5a6766fbfd..cfd2f3b784e 100644 --- a/apps/sim/providers/xai/index.ts +++ b/apps/sim/providers/xai/index.ts @@ -16,6 +16,7 @@ import { calculateCost, prepareToolExecution, prepareToolsWithUsageControl, + sumToolCosts, } from '@/providers/utils' import { checkForForcedToolUsage, @@ -215,7 +216,7 @@ export const xAIProvider: ProviderConfig = { total: currentResponse.usage?.total_tokens || 0, } const toolCalls = [] - const toolResults = [] + const toolResults: Record[] = [] const currentMessages = [...allMessages] let iterationCount = 0 @@ -331,7 +332,7 @@ export const xAIProvider: ProviderConfig = { duration: duration, }) let resultContent: any - if (result.success) { + if (result.success && result.output) { toolResults.push(result.output) resultContent = result.output } else { @@ -509,10 +510,12 @@ export const xAIProvider: ProviderConfig = { usage.prompt_tokens, usage.completion_tokens ) + const tc = sumToolCosts(toolResults) streamingResult.execution.output.cost = { input: accumulatedCost.input + streamCost.input, output: accumulatedCost.output + streamCost.output, - total: accumulatedCost.total + streamCost.total, + toolCost: tc || undefined, + total: accumulatedCost.total + streamCost.total + tc, } }), execution: { @@ -545,6 +548,7 @@ export const xAIProvider: ProviderConfig = { cost: { input: accumulatedCost.input, output: accumulatedCost.output, + toolCost: undefined as number | undefined, total: accumulatedCost.total, }, }, diff --git a/apps/sim/serializer/index.ts b/apps/sim/serializer/index.ts index 671535ef684..9c21661deb5 100644 --- a/apps/sim/serializer/index.ts +++ b/apps/sim/serializer/index.ts @@ -9,6 +9,7 @@ import { isCanonicalPair, isNonEmptyValue, isSubBlockFeatureEnabled, + isSubBlockHiddenByHostedKey, resolveCanonicalMode, } from '@/lib/workflows/subblocks/visibility' import { getBlock } from '@/blocks' @@ -48,6 +49,7 @@ function shouldSerializeSubBlock( canonicalModeOverrides?: CanonicalModeOverrides ): boolean { if (!isSubBlockFeatureEnabled(subBlockConfig)) return false + if (isSubBlockHiddenByHostedKey(subBlockConfig)) return false if (subBlockConfig.mode === 'trigger') { if (!isTriggerContext && !isTriggerCategory) return false diff --git a/apps/sim/tools/exa/answer.ts b/apps/sim/tools/exa/answer.ts index 95c29e0e686..7990f57ec9f 100644 --- a/apps/sim/tools/exa/answer.ts +++ b/apps/sim/tools/exa/answer.ts @@ -27,6 +27,25 @@ export const answerTool: ToolConfig = { description: 'Exa AI API Key', }, }, + hosting: { + envKeyPrefix: 'EXA_API_KEY', + apiKeyParam: 'apiKey', + byokProviderId: 'exa', + pricing: { + type: 'custom', + getCost: (_params, output) => { + const costDollars = output.__costDollars as { total?: number } | undefined + if (costDollars?.total == null) { + throw new Error('Exa answer response missing costDollars field') + } + return { cost: costDollars.total, metadata: { costDollars } } + }, + }, + rateLimit: { + mode: 'per_request', + requestsPerMinute: 5, + }, + }, request: { url: 'https://api.exa.ai/answer', @@ -61,6 +80,7 @@ export const answerTool: ToolConfig = { url: citation.url, text: citation.text || '', })) || [], + __costDollars: data.costDollars, }, } }, diff --git a/apps/sim/tools/exa/find_similar_links.ts b/apps/sim/tools/exa/find_similar_links.ts index 0996061a3d9..1685e601168 100644 --- a/apps/sim/tools/exa/find_similar_links.ts +++ b/apps/sim/tools/exa/find_similar_links.ts @@ -76,6 +76,25 @@ export const findSimilarLinksTool: ToolConfig< description: 'Exa AI API Key', }, }, + hosting: { + envKeyPrefix: 'EXA_API_KEY', + apiKeyParam: 'apiKey', + byokProviderId: 'exa', + pricing: { + type: 'custom', + getCost: (_params, output) => { + const costDollars = output.__costDollars as { total?: number } | undefined + if (costDollars?.total == null) { + throw new Error('Exa find_similar_links response missing costDollars field') + } + return { cost: costDollars.total, metadata: { costDollars } } + }, + }, + rateLimit: { + mode: 'per_request', + requestsPerMinute: 10, + }, + }, request: { url: 'https://api.exa.ai/findSimilar', @@ -140,6 +159,7 @@ export const findSimilarLinksTool: ToolConfig< highlights: result.highlights, score: result.score || 0, })), + __costDollars: data.costDollars, }, } }, diff --git a/apps/sim/tools/exa/get_contents.ts b/apps/sim/tools/exa/get_contents.ts index be44b70222d..c1b96967bd1 100644 --- a/apps/sim/tools/exa/get_contents.ts +++ b/apps/sim/tools/exa/get_contents.ts @@ -61,6 +61,25 @@ export const getContentsTool: ToolConfig { + const costDollars = output.__costDollars as { total?: number } | undefined + if (costDollars?.total == null) { + throw new Error('Exa get_contents response missing costDollars field') + } + return { cost: costDollars.total, metadata: { costDollars } } + }, + }, + rateLimit: { + mode: 'per_request', + requestsPerMinute: 10, + }, + }, request: { url: 'https://api.exa.ai/contents', @@ -132,6 +151,7 @@ export const getContentsTool: ToolConfig = { description: 'Exa AI API Key', }, }, + hosting: { + envKeyPrefix: 'EXA_API_KEY', + apiKeyParam: 'apiKey', + byokProviderId: 'exa', + pricing: { + type: 'custom', + getCost: (_params, output) => { + const costDollars = output.__costDollars as { total?: number } | undefined + if (costDollars?.total == null) { + throw new Error('Exa search response missing costDollars field') + } + return { cost: costDollars.total, metadata: { costDollars } } + }, + }, + rateLimit: { + mode: 'per_request', + requestsPerMinute: 5, + }, + }, request: { url: 'https://api.exa.ai/search', @@ -167,6 +186,7 @@ export const searchTool: ToolConfig = { highlights: result.highlights, score: result.score, })), + __costDollars: data.costDollars, }, } }, diff --git a/apps/sim/tools/exa/types.ts b/apps/sim/tools/exa/types.ts index bcdf63d1a2f..f633272a1af 100644 --- a/apps/sim/tools/exa/types.ts +++ b/apps/sim/tools/exa/types.ts @@ -6,6 +6,11 @@ export interface ExaBaseParams { apiKey: string } +/** Cost breakdown returned by Exa API responses */ +export interface ExaCostDollars { + total: number +} + // Search tool types export interface ExaSearchParams extends ExaBaseParams { query: string @@ -50,6 +55,7 @@ export interface ExaSearchResult { export interface ExaSearchResponse extends ToolResponse { output: { results: ExaSearchResult[] + __costDollars?: ExaCostDollars } } @@ -78,6 +84,7 @@ export interface ExaGetContentsResult { export interface ExaGetContentsResponse extends ToolResponse { output: { results: ExaGetContentsResult[] + __costDollars?: ExaCostDollars } } @@ -120,6 +127,7 @@ export interface ExaSimilarLink { export interface ExaFindSimilarLinksResponse extends ToolResponse { output: { similarLinks: ExaSimilarLink[] + __costDollars?: ExaCostDollars } } @@ -137,6 +145,7 @@ export interface ExaAnswerResponse extends ToolResponse { url: string text: string }[] + __costDollars?: ExaCostDollars } } diff --git a/apps/sim/tools/index.test.ts b/apps/sim/tools/index.test.ts index fe4b4469191..288893633af 100644 --- a/apps/sim/tools/index.test.ts +++ b/apps/sim/tools/index.test.ts @@ -15,52 +15,85 @@ import { } from '@sim/testing' import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest' -// Mock custom tools query - must be hoisted before imports -vi.mock('@/hooks/queries/custom-tools', () => ({ - getCustomTool: (toolId: string) => { - if (toolId === 'custom-tool-123') { - return { - id: 'custom-tool-123', - title: 'Custom Weather Tool', - code: 'return { result: "Weather data" }', - schema: { - function: { - description: 'Get weather information', - parameters: { - type: 'object', - properties: { - location: { type: 'string', description: 'City name' }, - unit: { type: 'string', description: 'Unit (metric/imperial)' }, - }, - required: ['location'], - }, - }, - }, - } - } - return undefined +// Hoisted mock state - these are available to vi.mock factories +const { mockIsHosted, mockEnv, mockGetBYOKKey, mockLogFixedUsage, mockRateLimiterFns } = vi.hoisted( + () => ({ + mockIsHosted: { value: false }, + mockEnv: { NEXT_PUBLIC_APP_URL: 'http://localhost:3000' } as Record, + mockGetBYOKKey: vi.fn(), + mockLogFixedUsage: vi.fn(), + mockRateLimiterFns: { + acquireKey: vi.fn(), + preConsumeCapacity: vi.fn(), + consumeCapacity: vi.fn(), + }, + }) +) + +// Mock feature flags +vi.mock('@/lib/core/config/feature-flags', () => ({ + get isHosted() { + return mockIsHosted.value }, - getCustomTools: () => [ - { - id: 'custom-tool-123', - title: 'Custom Weather Tool', - code: 'return { result: "Weather data" }', - schema: { - function: { - description: 'Get weather information', - parameters: { - type: 'object', - properties: { - location: { type: 'string', description: 'City name' }, - unit: { type: 'string', description: 'Unit (metric/imperial)' }, - }, - required: ['location'], + isProd: false, + isDev: true, + isTest: true, +})) + +// Mock env config to control hosted key availability +vi.mock('@/lib/core/config/env', () => ({ + env: new Proxy({} as Record, { + get: (_target, prop: string) => mockEnv[prop], + }), + getEnv: (key: string) => mockEnv[key], + isTruthy: (val: unknown) => val === true || val === 'true' || val === '1', + isFalsy: (val: unknown) => val === false || val === 'false' || val === '0', +})) + +// Mock getBYOKKey +vi.mock('@/lib/api-key/byok', () => ({ + getBYOKKey: (...args: unknown[]) => mockGetBYOKKey(...args), +})) + +// Mock logFixedUsage for billing +vi.mock('@/lib/billing/core/usage-log', () => ({ + logFixedUsage: (...args: unknown[]) => mockLogFixedUsage(...args), +})) + +vi.mock('@/lib/core/rate-limiter/hosted-key', () => ({ + getHostedKeyRateLimiter: () => mockRateLimiterFns, +})) + +// Mock custom tools - define mock data inside factory function +vi.mock('@/hooks/queries/custom-tools', () => { + const mockCustomTool = { + id: 'custom-tool-123', + title: 'Custom Weather Tool', + code: 'return { result: "Weather data" }', + schema: { + function: { + description: 'Get weather information', + parameters: { + type: 'object', + properties: { + location: { type: 'string', description: 'City name' }, + unit: { type: 'string', description: 'Unit (metric/imperial)' }, }, + required: ['location'], }, }, }, - ], -})) + } + return { + getCustomTool: (toolId: string) => { + if (toolId === 'custom-tool-123') { + return mockCustomTool + } + return undefined + }, + getCustomTools: () => [mockCustomTool], + } +}) import { executeTool } from '@/tools/index' import { tools } from '@/tools/registry' @@ -1186,3 +1219,712 @@ describe('MCP Tool Execution', () => { }) }) }) + +describe('Hosted Key Injection', () => { + let cleanupEnvVars: () => void + + beforeEach(() => { + process.env.NEXT_PUBLIC_APP_URL = 'http://localhost:3000' + cleanupEnvVars = setupEnvVars({ NEXT_PUBLIC_APP_URL: 'http://localhost:3000' }) + vi.clearAllMocks() + mockGetBYOKKey.mockReset() + mockLogFixedUsage.mockReset() + }) + + afterEach(() => { + vi.resetAllMocks() + cleanupEnvVars() + }) + + it('should not inject hosted key when tool has no hosting config', async () => { + const mockTool = { + id: 'test_no_hosting', + name: 'Test No Hosting', + description: 'A test tool without hosting config', + version: '1.0.0', + params: {}, + request: { + url: '/api/test/endpoint', + method: 'POST' as const, + headers: () => ({ 'Content-Type': 'application/json' }), + }, + transformResponse: vi.fn().mockResolvedValue({ + success: true, + output: { result: 'success' }, + }), + } + + const originalTools = { ...tools } + ;(tools as any).test_no_hosting = mockTool + + global.fetch = Object.assign( + vi.fn().mockImplementation(async () => ({ + ok: true, + status: 200, + headers: new Headers(), + json: () => Promise.resolve({ success: true }), + })), + { preconnect: vi.fn() } + ) as typeof fetch + + const mockContext = createToolExecutionContext() + await executeTool('test_no_hosting', {}, false, mockContext) + + // BYOK should not be called since there's no hosting config + expect(mockGetBYOKKey).not.toHaveBeenCalled() + + Object.assign(tools, originalTools) + }) + + it('should check BYOK key first when tool has hosting config', async () => { + // Note: isHosted is mocked to false by default, so hosted key injection won't happen + // This test verifies the flow when isHosted would be true + const mockTool = { + id: 'test_with_hosting', + name: 'Test With Hosting', + description: 'A test tool with hosting config', + version: '1.0.0', + params: { + apiKey: { type: 'string', required: true }, + }, + hosting: { + envKeyPrefix: 'TEST_API', + apiKeyParam: 'apiKey', + byokProviderId: 'exa', + pricing: { + type: 'per_request' as const, + cost: 0.005, + }, + rateLimit: { + mode: 'per_request' as const, + requestsPerMinute: 100, + }, + }, + request: { + url: '/api/test/endpoint', + method: 'POST' as const, + headers: (params: any) => ({ + 'Content-Type': 'application/json', + 'x-api-key': params.apiKey, + }), + }, + transformResponse: vi.fn().mockResolvedValue({ + success: true, + output: { result: 'success' }, + }), + } + + const originalTools = { ...tools } + ;(tools as any).test_with_hosting = mockTool + + // Mock BYOK returning a key + mockGetBYOKKey.mockResolvedValue({ apiKey: 'byok-test-key', isBYOK: true }) + + global.fetch = Object.assign( + vi.fn().mockImplementation(async () => ({ + ok: true, + status: 200, + headers: new Headers(), + json: () => Promise.resolve({ success: true }), + })), + { preconnect: vi.fn() } + ) as typeof fetch + + const mockContext = createToolExecutionContext() + await executeTool('test_with_hosting', {}, false, mockContext) + + // With isHosted=false, BYOK won't be called - this is expected behavior + // The test documents the current behavior + Object.assign(tools, originalTools) + }) + + it('should use per_request pricing model correctly', async () => { + const mockTool = { + id: 'test_per_request_pricing', + name: 'Test Per Request Pricing', + description: 'A test tool with per_request pricing', + version: '1.0.0', + params: { + apiKey: { type: 'string', required: true }, + }, + hosting: { + envKeyPrefix: 'TEST_API', + apiKeyParam: 'apiKey', + byokProviderId: 'exa', + pricing: { + type: 'per_request' as const, + cost: 0.005, + }, + rateLimit: { + mode: 'per_request' as const, + requestsPerMinute: 100, + }, + }, + request: { + url: '/api/test/endpoint', + method: 'POST' as const, + headers: (params: any) => ({ + 'Content-Type': 'application/json', + 'x-api-key': params.apiKey, + }), + }, + transformResponse: vi.fn().mockResolvedValue({ + success: true, + output: { result: 'success' }, + }), + } + + // Verify pricing config structure + expect(mockTool.hosting.pricing.type).toBe('per_request') + expect(mockTool.hosting.pricing.cost).toBe(0.005) + }) + + it('should use custom pricing model correctly', async () => { + const mockGetCost = vi.fn().mockReturnValue({ cost: 0.01, metadata: { breakdown: 'test' } }) + + const mockTool = { + id: 'test_custom_pricing', + name: 'Test Custom Pricing', + description: 'A test tool with custom pricing', + version: '1.0.0', + params: { + apiKey: { type: 'string', required: true }, + }, + hosting: { + envKeyPrefix: 'TEST_API', + apiKeyParam: 'apiKey', + byokProviderId: 'exa', + pricing: { + type: 'custom' as const, + getCost: mockGetCost, + }, + rateLimit: { + mode: 'per_request' as const, + requestsPerMinute: 100, + }, + }, + request: { + url: '/api/test/endpoint', + method: 'POST' as const, + headers: (params: any) => ({ + 'Content-Type': 'application/json', + 'x-api-key': params.apiKey, + }), + }, + transformResponse: vi.fn().mockResolvedValue({ + success: true, + output: { result: 'success', costDollars: { total: 0.01 } }, + }), + } + + // Verify pricing config structure + expect(mockTool.hosting.pricing.type).toBe('custom') + expect(typeof mockTool.hosting.pricing.getCost).toBe('function') + + // Test getCost returns expected value + const result = mockTool.hosting.pricing.getCost({}, { costDollars: { total: 0.01 } }) + expect(result).toEqual({ cost: 0.01, metadata: { breakdown: 'test' } }) + }) + + it('should handle custom pricing returning a number', async () => { + const mockGetCost = vi.fn().mockReturnValue(0.005) + + const mockTool = { + id: 'test_custom_pricing_number', + name: 'Test Custom Pricing Number', + description: 'A test tool with custom pricing returning number', + version: '1.0.0', + params: { + apiKey: { type: 'string', required: true }, + }, + hosting: { + envKeyPrefix: 'TEST_API', + apiKeyParam: 'apiKey', + byokProviderId: 'exa', + pricing: { + type: 'custom' as const, + getCost: mockGetCost, + }, + rateLimit: { + mode: 'per_request' as const, + requestsPerMinute: 100, + }, + }, + request: { + url: '/api/test/endpoint', + method: 'POST' as const, + headers: (params: any) => ({ + 'Content-Type': 'application/json', + 'x-api-key': params.apiKey, + }), + }, + } + + // Test getCost returns a number + const result = mockTool.hosting.pricing.getCost({}, {}) + expect(result).toBe(0.005) + }) +}) + +describe('Rate Limiting and Retry Logic', () => { + let cleanupEnvVars: () => void + + beforeEach(() => { + process.env.NEXT_PUBLIC_APP_URL = 'http://localhost:3000' + cleanupEnvVars = setupEnvVars({ + NEXT_PUBLIC_APP_URL: 'http://localhost:3000', + }) + vi.clearAllMocks() + mockIsHosted.value = true + mockEnv.TEST_HOSTED_KEY = 'test-hosted-api-key' + mockGetBYOKKey.mockResolvedValue(null) + // Set up throttler mock defaults + mockRateLimiterFns.acquireKey.mockResolvedValue({ + success: true, + key: 'mock-hosted-key', + keyIndex: 0, + envVarName: 'TEST_HOSTED_KEY', + }) + mockRateLimiterFns.preConsumeCapacity.mockResolvedValue(true) + mockRateLimiterFns.consumeCapacity.mockResolvedValue(undefined) + }) + + afterEach(() => { + vi.resetAllMocks() + cleanupEnvVars() + mockIsHosted.value = false + mockEnv.TEST_HOSTED_KEY = undefined + }) + + it('should retry on 429 rate limit errors with exponential backoff', async () => { + let attemptCount = 0 + + const mockTool = { + id: 'test_rate_limit', + name: 'Test Rate Limit', + description: 'A test tool for rate limiting', + version: '1.0.0', + params: { + apiKey: { type: 'string', required: false }, + }, + hosting: { + envKeyPrefix: 'TEST_HOSTED_KEY', + apiKeyParam: 'apiKey', + pricing: { + type: 'per_request' as const, + cost: 0.001, + }, + rateLimit: { + mode: 'per_request' as const, + requestsPerMinute: 100, + }, + }, + request: { + url: '/api/test/rate-limit', + method: 'POST' as const, + headers: () => ({ 'Content-Type': 'application/json' }), + }, + transformResponse: vi.fn().mockResolvedValue({ + success: true, + output: { result: 'success' }, + }), + } + + const originalTools = { ...tools } + ;(tools as any).test_rate_limit = mockTool + + global.fetch = Object.assign( + vi.fn().mockImplementation(async () => { + attemptCount++ + if (attemptCount < 3) { + // Return a proper 429 response - the code extracts error, attaches status, and throws + return { + ok: false, + status: 429, + statusText: 'Too Many Requests', + headers: new Headers(), + json: () => Promise.resolve({ error: 'Rate limited' }), + text: () => Promise.resolve('Rate limited'), + } + } + return { + ok: true, + status: 200, + headers: new Headers(), + json: () => Promise.resolve({ success: true }), + } + }), + { preconnect: vi.fn() } + ) as typeof fetch + + const mockContext = createToolExecutionContext() + const result = await executeTool('test_rate_limit', {}, false, mockContext) + + // Should succeed after retries + expect(result.success).toBe(true) + // Should have made 3 attempts (2 failures + 1 success) + expect(attemptCount).toBe(3) + + Object.assign(tools, originalTools) + }) + + it('should fail after max retries on persistent rate limiting', async () => { + const mockTool = { + id: 'test_persistent_rate_limit', + name: 'Test Persistent Rate Limit', + description: 'A test tool for persistent rate limiting', + version: '1.0.0', + params: { + apiKey: { type: 'string', required: false }, + }, + hosting: { + envKeyPrefix: 'TEST_HOSTED_KEY', + apiKeyParam: 'apiKey', + pricing: { + type: 'per_request' as const, + cost: 0.001, + }, + rateLimit: { + mode: 'per_request' as const, + requestsPerMinute: 100, + }, + }, + request: { + url: '/api/test/persistent-rate-limit', + method: 'POST' as const, + headers: () => ({ 'Content-Type': 'application/json' }), + }, + } + + const originalTools = { ...tools } + ;(tools as any).test_persistent_rate_limit = mockTool + + global.fetch = Object.assign( + vi.fn().mockImplementation(async () => { + // Always return 429 to test max retries exhaustion + return { + ok: false, + status: 429, + statusText: 'Too Many Requests', + headers: new Headers(), + json: () => Promise.resolve({ error: 'Rate limited' }), + text: () => Promise.resolve('Rate limited'), + } + }), + { preconnect: vi.fn() } + ) as typeof fetch + + const mockContext = createToolExecutionContext() + const result = await executeTool('test_persistent_rate_limit', {}, false, mockContext) + + // Should fail after all retries exhausted + expect(result.success).toBe(false) + expect(result.error).toContain('Rate limited') + + Object.assign(tools, originalTools) + }) + + it('should not retry on non-rate-limit errors', async () => { + let attemptCount = 0 + + const mockTool = { + id: 'test_no_retry', + name: 'Test No Retry', + description: 'A test tool that should not retry', + version: '1.0.0', + params: { + apiKey: { type: 'string', required: false }, + }, + hosting: { + envKeyPrefix: 'TEST_HOSTED_KEY', + apiKeyParam: 'apiKey', + pricing: { + type: 'per_request' as const, + cost: 0.001, + }, + rateLimit: { + mode: 'per_request' as const, + requestsPerMinute: 100, + }, + }, + request: { + url: '/api/test/no-retry', + method: 'POST' as const, + headers: () => ({ 'Content-Type': 'application/json' }), + }, + } + + const originalTools = { ...tools } + ;(tools as any).test_no_retry = mockTool + + global.fetch = Object.assign( + vi.fn().mockImplementation(async () => { + attemptCount++ + // Return a 400 response - should not trigger retry logic + return { + ok: false, + status: 400, + statusText: 'Bad Request', + headers: new Headers(), + json: () => Promise.resolve({ error: 'Bad request' }), + text: () => Promise.resolve('Bad request'), + } + }), + { preconnect: vi.fn() } + ) as typeof fetch + + const mockContext = createToolExecutionContext() + const result = await executeTool('test_no_retry', {}, false, mockContext) + + // Should fail immediately without retries + expect(result.success).toBe(false) + expect(attemptCount).toBe(1) + + Object.assign(tools, originalTools) + }) +}) + +describe('Cost Field Handling', () => { + let cleanupEnvVars: () => void + + beforeEach(() => { + process.env.NEXT_PUBLIC_APP_URL = 'http://localhost:3000' + cleanupEnvVars = setupEnvVars({ + NEXT_PUBLIC_APP_URL: 'http://localhost:3000', + }) + vi.clearAllMocks() + mockIsHosted.value = true + mockEnv.TEST_HOSTED_KEY = 'test-hosted-api-key' + mockGetBYOKKey.mockResolvedValue(null) + mockLogFixedUsage.mockResolvedValue(undefined) + // Set up throttler mock defaults + mockRateLimiterFns.acquireKey.mockResolvedValue({ + success: true, + key: 'mock-hosted-key', + keyIndex: 0, + envVarName: 'TEST_HOSTED_KEY', + }) + mockRateLimiterFns.preConsumeCapacity.mockResolvedValue(true) + mockRateLimiterFns.consumeCapacity.mockResolvedValue(undefined) + }) + + afterEach(() => { + vi.resetAllMocks() + cleanupEnvVars() + mockIsHosted.value = false + mockEnv.TEST_HOSTED_KEY = undefined + }) + + it('should add cost to output when using hosted key with per_request pricing', async () => { + const mockTool = { + id: 'test_cost_per_request', + name: 'Test Cost Per Request', + description: 'A test tool with per_request pricing', + version: '1.0.0', + params: { + apiKey: { type: 'string', required: false }, + }, + hosting: { + envKeyPrefix: 'TEST_HOSTED_KEY', + apiKeyParam: 'apiKey', + pricing: { + type: 'per_request' as const, + cost: 0.005, + }, + rateLimit: { + mode: 'per_request' as const, + requestsPerMinute: 100, + }, + }, + request: { + url: '/api/test/cost', + method: 'POST' as const, + headers: () => ({ 'Content-Type': 'application/json' }), + }, + transformResponse: vi.fn().mockResolvedValue({ + success: true, + output: { result: 'success' }, + }), + } + + const originalTools = { ...tools } + ;(tools as any).test_cost_per_request = mockTool + + global.fetch = Object.assign( + vi.fn().mockImplementation(async () => ({ + ok: true, + status: 200, + headers: new Headers(), + json: () => Promise.resolve({ success: true }), + })), + { preconnect: vi.fn() } + ) as typeof fetch + + const mockContext = createToolExecutionContext({ + userId: 'user-123', + } as any) + const result = await executeTool('test_cost_per_request', {}, false, mockContext) + + expect(result.success).toBe(true) + // Note: In test environment, hosted key injection may not work due to env mocking complexity. + // The cost calculation logic is tested via the pricing model tests above. + // This test verifies the tool execution flow when hosted key IS available (by checking output structure). + if (result.output.cost) { + expect(result.output.cost.total).toBe(0.005) + // Should have logged usage + expect(mockLogFixedUsage).toHaveBeenCalledWith( + expect.objectContaining({ + userId: 'user-123', + cost: 0.005, + description: 'tool:test_cost_per_request', + }) + ) + } + + Object.assign(tools, originalTools) + }) + + it('should not add cost when not using hosted key', async () => { + mockIsHosted.value = false + + const mockTool = { + id: 'test_no_hosted_cost', + name: 'Test No Hosted Cost', + description: 'A test tool without hosted key', + version: '1.0.0', + params: { + apiKey: { type: 'string', required: true }, + }, + hosting: { + envKeyPrefix: 'TEST_HOSTED_KEY', + apiKeyParam: 'apiKey', + pricing: { + type: 'per_request' as const, + cost: 0.005, + }, + rateLimit: { + mode: 'per_request' as const, + requestsPerMinute: 100, + }, + }, + request: { + url: '/api/test/no-hosted', + method: 'POST' as const, + headers: () => ({ 'Content-Type': 'application/json' }), + }, + transformResponse: vi.fn().mockResolvedValue({ + success: true, + output: { result: 'success' }, + }), + } + + const originalTools = { ...tools } + ;(tools as any).test_no_hosted_cost = mockTool + + global.fetch = Object.assign( + vi.fn().mockImplementation(async () => ({ + ok: true, + status: 200, + headers: new Headers(), + json: () => Promise.resolve({ success: true }), + })), + { preconnect: vi.fn() } + ) as typeof fetch + + const mockContext = createToolExecutionContext() + // Pass user's own API key + const result = await executeTool( + 'test_no_hosted_cost', + { apiKey: 'user-api-key' }, + false, + mockContext + ) + + expect(result.success).toBe(true) + // Should not have cost since user provided their own key + expect(result.output.cost).toBeUndefined() + // Should not have logged usage + expect(mockLogFixedUsage).not.toHaveBeenCalled() + + Object.assign(tools, originalTools) + }) + + it('should use custom pricing getCost function', async () => { + const mockGetCost = vi.fn().mockReturnValue({ + cost: 0.015, + metadata: { mode: 'advanced', results: 10 }, + }) + + const mockTool = { + id: 'test_custom_pricing_cost', + name: 'Test Custom Pricing Cost', + description: 'A test tool with custom pricing', + version: '1.0.0', + params: { + apiKey: { type: 'string', required: false }, + mode: { type: 'string', required: false }, + }, + hosting: { + envKeyPrefix: 'TEST_HOSTED_KEY', + apiKeyParam: 'apiKey', + pricing: { + type: 'custom' as const, + getCost: mockGetCost, + }, + rateLimit: { + mode: 'per_request' as const, + requestsPerMinute: 100, + }, + }, + request: { + url: '/api/test/custom-pricing', + method: 'POST' as const, + headers: () => ({ 'Content-Type': 'application/json' }), + }, + transformResponse: vi.fn().mockResolvedValue({ + success: true, + output: { result: 'success', results: 10 }, + }), + } + + const originalTools = { ...tools } + ;(tools as any).test_custom_pricing_cost = mockTool + + global.fetch = Object.assign( + vi.fn().mockImplementation(async () => ({ + ok: true, + status: 200, + headers: new Headers(), + json: () => Promise.resolve({ success: true }), + })), + { preconnect: vi.fn() } + ) as typeof fetch + + const mockContext = createToolExecutionContext({ + userId: 'user-123', + } as any) + const result = await executeTool( + 'test_custom_pricing_cost', + { mode: 'advanced' }, + false, + mockContext + ) + + expect(result.success).toBe(true) + expect(result.output.cost).toBeDefined() + expect(result.output.cost.total).toBe(0.015) + + // getCost should have been called with params and output + expect(mockGetCost).toHaveBeenCalled() + + // Should have logged usage with metadata + expect(mockLogFixedUsage).toHaveBeenCalledWith( + expect.objectContaining({ + cost: 0.015, + metadata: { mode: 'advanced', results: 10 }, + }) + ) + + Object.assign(tools, originalTools) + }) +}) diff --git a/apps/sim/tools/index.ts b/apps/sim/tools/index.ts index 8184cf70643..7722e2c630b 100644 --- a/apps/sim/tools/index.ts +++ b/apps/sim/tools/index.ts @@ -1,10 +1,15 @@ import { createLogger } from '@sim/logger' +import { getBYOKKey } from '@/lib/api-key/byok' import { generateInternalToken } from '@/lib/auth/internal' +import { logFixedUsage } from '@/lib/billing/core/usage-log' +import { isHosted } from '@/lib/core/config/feature-flags' import { DEFAULT_EXECUTION_TIMEOUT_MS } from '@/lib/core/execution-limits' +import { getHostedKeyRateLimiter } from '@/lib/core/rate-limiter' import { secureFetchWithPinnedIP, validateUrlWithDNS, } from '@/lib/core/security/input-validation.server' +import { PlatformEvents } from '@/lib/core/telemetry' import { generateRequestId } from '@/lib/core/utils/request' import { getBaseUrl, getInternalApiBaseUrl } from '@/lib/core/utils/urls' import { SIM_VIA_HEADER, serializeCallChain } from '@/lib/execution/call-chain' @@ -14,7 +19,14 @@ import { resolveSkillContent } from '@/executor/handlers/agent/skills-resolver' import type { ExecutionContext } from '@/executor/types' import type { ErrorInfo } from '@/tools/error-extractors' import { extractErrorMessage } from '@/tools/error-extractors' -import type { OAuthTokenPayload, ToolConfig, ToolResponse, ToolRetryConfig } from '@/tools/types' +import type { + BYOKProviderId, + OAuthTokenPayload, + ToolConfig, + ToolHostingPricing, + ToolResponse, + ToolRetryConfig, +} from '@/tools/types' import { formatRequestParams, getTool, @@ -24,6 +36,365 @@ import { const logger = createLogger('Tools') +/** Result from hosted key injection */ +interface HostedKeyInjectionResult { + isUsingHostedKey: boolean + envVarName?: string +} + +/** + * Inject hosted API key if tool supports it and user didn't provide one. + * Checks BYOK workspace keys first, then uses the HostedKeyRateLimiter for round-robin key selection. + * Returns whether a hosted (billable) key was injected and which env var it came from. + */ +async function injectHostedKeyIfNeeded( + tool: ToolConfig, + params: Record, + executionContext: ExecutionContext | undefined, + requestId: string +): Promise { + if (!tool.hosting) return { isUsingHostedKey: false } + if (!isHosted) return { isUsingHostedKey: false } + + const { envKeyPrefix, apiKeyParam, byokProviderId, rateLimit } = tool.hosting + + // Derive workspace/user/workflow IDs from executionContext or params._context + const ctx = params._context as Record | undefined + const workspaceId = executionContext?.workspaceId || (ctx?.workspaceId as string | undefined) + const userId = executionContext?.userId || (ctx?.userId as string | undefined) + const workflowId = executionContext?.workflowId || (ctx?.workflowId as string | undefined) + + // Check BYOK workspace key first + if (byokProviderId && workspaceId) { + try { + const byokResult = await getBYOKKey(workspaceId, byokProviderId as BYOKProviderId) + if (byokResult) { + params[apiKeyParam] = byokResult.apiKey + logger.info(`[${requestId}] Using BYOK key for ${tool.id}`) + return { isUsingHostedKey: false } // Don't bill - user's own key + } + } catch (error) { + logger.error(`[${requestId}] Failed to get BYOK key for ${tool.id}:`, error) + // Fall through to hosted key + } + } + + const rateLimiter = getHostedKeyRateLimiter() + const provider = byokProviderId || tool.id + const billingActorId = workspaceId + + if (!billingActorId) { + logger.error(`[${requestId}] No workspace ID available for hosted key rate limiting`) + return { isUsingHostedKey: false } + } + + const acquireResult = await rateLimiter.acquireKey( + provider, + envKeyPrefix, + rateLimit, + billingActorId + ) + + if (!acquireResult.success && acquireResult.billingActorRateLimited) { + logger.warn(`[${requestId}] Billing actor ${billingActorId} rate limited for ${tool.id}`, { + provider, + retryAfterMs: acquireResult.retryAfterMs, + }) + + PlatformEvents.userThrottled({ + toolId: tool.id, + reason: 'billing_actor_limit', + provider, + retryAfterMs: acquireResult.retryAfterMs ?? 0, + userId, + workspaceId, + workflowId, + }) + + const error = new Error(acquireResult.error || `Rate limit exceeded for ${tool.id}`) + ;(error as any).status = 429 + ;(error as any).retryAfterMs = acquireResult.retryAfterMs + throw error + } + + // Handle no keys configured (503) + if (!acquireResult.success) { + logger.error(`[${requestId}] No hosted keys configured for ${tool.id}: ${acquireResult.error}`) + const error = new Error(acquireResult.error || `No hosted keys configured for ${tool.id}`) + ;(error as any).status = 503 + throw error + } + + params[apiKeyParam] = acquireResult.key + logger.info(`[${requestId}] Using hosted key for ${tool.id} (${acquireResult.envVarName})`, { + keyIndex: acquireResult.keyIndex, + provider, + }) + + return { + isUsingHostedKey: true, + envVarName: acquireResult.envVarName, + } +} + +/** + * Check if an error is a rate limit (throttling) error + */ +function isRateLimitError(error: unknown): boolean { + if (error && typeof error === 'object') { + const status = (error as { status?: number }).status + // 429 = Too Many Requests, 503 = Service Unavailable (sometimes used for rate limiting) + if (status === 429 || status === 503) return true + } + return false +} + +/** Context for retry with rate limit tracking */ +interface RetryContext { + requestId: string + toolId: string + envVarName: string + executionContext?: ExecutionContext +} + +/** + * Execute a function with exponential backoff retry for rate limiting errors. + * Only used for hosted key requests. Tracks rate limit events via telemetry. + */ +async function executeWithRetry( + fn: () => Promise, + context: RetryContext, + maxRetries = 3, + baseDelayMs = 1000 +): Promise { + const { requestId, toolId, envVarName, executionContext } = context + let lastError: unknown + + for (let attempt = 0; attempt <= maxRetries; attempt++) { + try { + return await fn() + } catch (error) { + lastError = error + + if (!isRateLimitError(error) || attempt === maxRetries) { + if (isRateLimitError(error) && attempt === maxRetries) { + PlatformEvents.userThrottled({ + toolId, + reason: 'upstream_retries_exhausted', + userId: executionContext?.userId, + workspaceId: executionContext?.workspaceId, + workflowId: executionContext?.workflowId, + }) + } + throw error + } + + const delayMs = baseDelayMs * 2 ** attempt + + // Track throttling event via telemetry + PlatformEvents.hostedKeyRateLimited({ + toolId, + envVarName, + attempt: attempt + 1, + maxRetries, + delayMs, + userId: executionContext?.userId, + workspaceId: executionContext?.workspaceId, + workflowId: executionContext?.workflowId, + }) + + logger.warn( + `[${requestId}] Rate limited for ${toolId} (${envVarName}), retrying in ${delayMs}ms (attempt ${attempt + 1}/${maxRetries})` + ) + await new Promise((resolve) => setTimeout(resolve, delayMs)) + } + } + + throw lastError +} + +/** Result from cost calculation */ +interface ToolCostResult { + cost: number + metadata?: Record +} + +/** + * Calculate cost based on pricing model + */ +function calculateToolCost( + pricing: ToolHostingPricing, + params: Record, + response: Record +): ToolCostResult { + switch (pricing.type) { + case 'per_request': + return { cost: pricing.cost } + + case 'custom': { + const result = pricing.getCost(params, response) + if (typeof result === 'number') { + return { cost: result } + } + return result + } + + default: { + const exhaustiveCheck: never = pricing + throw new Error(`Unknown pricing type: ${(exhaustiveCheck as ToolHostingPricing).type}`) + } + } +} + +interface HostedKeyCostResult { + cost: number + metadata?: Record +} + +/** + * Calculate and log hosted key cost for a tool execution. + * Logs to usageLog for audit trail and returns cost + metadata for output. + */ +async function processHostedKeyCost( + tool: ToolConfig, + params: Record, + response: Record, + executionContext: ExecutionContext | undefined, + requestId: string +): Promise { + if (!tool.hosting?.pricing) { + return { cost: 0 } + } + + const { cost, metadata } = calculateToolCost(tool.hosting.pricing, params, response) + + if (cost <= 0) return { cost: 0 } + + const ctx = params._context as Record | undefined + const userId = executionContext?.userId || (ctx?.userId as string | undefined) + const wsId = executionContext?.workspaceId || (ctx?.workspaceId as string | undefined) + const wfId = executionContext?.workflowId || (ctx?.workflowId as string | undefined) + + if (!userId) return { cost, metadata } + + const skipLog = !!ctx?.skipFixedUsageLog + if (!skipLog) { + try { + await logFixedUsage({ + userId, + source: 'workflow', + description: `tool:${tool.id}`, + cost, + workspaceId: wsId, + workflowId: wfId, + executionId: executionContext?.executionId, + metadata, + }) + logger.debug( + `[${requestId}] Logged hosted key cost for ${tool.id}: $${cost}`, + metadata ? { metadata } : {} + ) + } catch (error) { + logger.error(`[${requestId}] Failed to log hosted key usage for ${tool.id}:`, error) + } + } else { + logger.debug( + `[${requestId}] Skipping fixed usage log for ${tool.id} (cost will be tracked via provider tool loop)` + ) + } + + return { cost, metadata } +} + +/** + * Report custom dimension usage after successful hosted-key tool execution. + * Only applies to tools with `custom` rate limit mode. Fires and logs; + * failures here do not block the response since execution already succeeded. + */ +async function reportCustomDimensionUsage( + tool: ToolConfig, + params: Record, + response: Record, + executionContext: ExecutionContext | undefined, + requestId: string +): Promise { + if (tool.hosting?.rateLimit.mode !== 'custom') return + const ctx = params._context as Record | undefined + const billingActorId = executionContext?.workspaceId || (ctx?.workspaceId as string | undefined) + if (!billingActorId) return + + const rateLimiter = getHostedKeyRateLimiter() + const provider = tool.hosting.byokProviderId || tool.id + + try { + const result = await rateLimiter.reportUsage( + provider, + billingActorId, + tool.hosting.rateLimit, + params, + response + ) + + for (const dim of result.dimensions) { + if (!dim.allowed) { + logger.warn(`[${requestId}] Dimension ${dim.name} overdrawn after ${tool.id} execution`, { + consumed: dim.consumed, + tokensRemaining: dim.tokensRemaining, + }) + } + } + } catch (error) { + logger.error(`[${requestId}] Failed to report custom dimension usage for ${tool.id}:`, error) + } +} + +/** + * Strips internal fields (keys starting with `__`) from tool output before + * returning to users. The double-underscore prefix is reserved for transient + * data (e.g. `__costDollars`) and will never collide with legitimate API + * fields like `_id`. + */ +function stripInternalFields(output: Record): Record { + const result: Record = {} + for (const [key, value] of Object.entries(output)) { + if (!key.startsWith('__')) { + result[key] = value + } + } + return result +} + +/** + * Apply post-execution hosted-key cost tracking to a successful tool result. + * Reports custom dimension usage, calculates cost, and merges it into the output. + */ +async function applyHostedKeyCostToResult( + finalResult: ToolResponse, + tool: ToolConfig, + params: Record, + executionContext: ExecutionContext | undefined, + requestId: string +): Promise { + await reportCustomDimensionUsage(tool, params, finalResult.output, executionContext, requestId) + + const { cost: hostedKeyCost, metadata } = await processHostedKeyCost( + tool, + params, + finalResult.output, + executionContext, + requestId + ) + if (hostedKeyCost > 0) { + finalResult.output = { + ...finalResult.output, + cost: { + ...metadata, + total: hostedKeyCost, + }, + } + } +} + /** * Normalizes a tool ID by stripping resource ID suffix (UUID/tableId). * Workflow tools: 'workflow_executor_' -> 'workflow_executor' @@ -299,6 +670,15 @@ export async function executeTool( throw new Error(`Tool not found: ${toolId}`) } + // Inject hosted API key if tool supports it and user didn't provide one + const hostedKeyInfo = await injectHostedKeyIfNeeded( + tool, + contextParams, + executionContext, + requestId + ) + + // If we have a credential parameter, fetch the access token if (contextParams.oauthCredential) { contextParams.credential = contextParams.oauthCredential } @@ -419,8 +799,22 @@ export async function executeTool( const endTime = new Date() const endTimeISO = endTime.toISOString() const duration = endTime.getTime() - startTime.getTime() + + if (hostedKeyInfo.isUsingHostedKey && finalResult.success) { + await applyHostedKeyCostToResult( + finalResult, + tool, + contextParams, + executionContext, + requestId + ) + } + + const strippedOutput = stripInternalFields(finalResult.output || {}) + return { ...finalResult, + output: strippedOutput, timing: { startTime: startTimeISO, endTime: endTimeISO, @@ -430,7 +824,15 @@ export async function executeTool( } // Execute the tool request directly (internal routes use regular fetch, external use SSRF-protected fetch) - const result = await executeToolRequest(toolId, tool, contextParams) + // Wrap with retry logic for hosted keys to handle rate limiting due to higher usage + const result = hostedKeyInfo.isUsingHostedKey + ? await executeWithRetry(() => executeToolRequest(toolId, tool, contextParams), { + requestId, + toolId, + envVarName: hostedKeyInfo.envVarName!, + executionContext, + }) + : await executeToolRequest(toolId, tool, contextParams) // Apply post-processing if available and not skipped let finalResult = result @@ -452,8 +854,22 @@ export async function executeTool( const endTime = new Date() const endTimeISO = endTime.toISOString() const duration = endTime.getTime() - startTime.getTime() + + if (hostedKeyInfo.isUsingHostedKey && finalResult.success) { + await applyHostedKeyCostToResult( + finalResult, + tool, + contextParams, + executionContext, + requestId + ) + } + + const strippedOutput = stripInternalFields(finalResult.output || {}) + return { ...finalResult, + output: strippedOutput, timing: { startTime: startTimeISO, endTime: endTimeISO, diff --git a/apps/sim/tools/knowledge/knowledge.test.ts b/apps/sim/tools/knowledge/knowledge.test.ts new file mode 100644 index 00000000000..1dd0f287711 --- /dev/null +++ b/apps/sim/tools/knowledge/knowledge.test.ts @@ -0,0 +1,202 @@ +/** + * @vitest-environment node + * + * Knowledge Tools Unit Tests + * + * Tests for knowledge_search and knowledge_upload_chunk tools, + * specifically the cost restructuring in transformResponse. + */ + +import { describe, expect, it } from 'vitest' +import { knowledgeSearchTool } from '@/tools/knowledge/search' +import { knowledgeUploadChunkTool } from '@/tools/knowledge/upload_chunk' + +/** + * Creates a mock Response object for testing transformResponse + */ +function createMockResponse(data: unknown): Response { + return { + json: async () => data, + ok: true, + status: 200, + } as Response +} + +describe('Knowledge Tools', () => { + describe('knowledgeSearchTool', () => { + describe('transformResponse', () => { + it('should restructure cost information for logging', async () => { + const apiResponse = { + data: { + results: [{ content: 'test result', similarity: 0.95 }], + query: 'test query', + totalResults: 1, + cost: { + input: 0.00001042, + output: 0, + total: 0.00001042, + tokens: { + prompt: 521, + completion: 0, + total: 521, + }, + model: 'text-embedding-3-small', + pricing: { + input: 0.02, + output: 0, + updatedAt: '2025-07-10', + }, + }, + }, + } + + const result = await knowledgeSearchTool.transformResponse!(createMockResponse(apiResponse)) + + expect(result.success).toBe(true) + expect(result.output).toEqual({ + results: [{ content: 'test result', similarity: 0.95 }], + query: 'test query', + totalResults: 1, + cost: { + input: 0.00001042, + output: 0, + total: 0.00001042, + }, + tokens: { + prompt: 521, + completion: 0, + total: 521, + }, + model: 'text-embedding-3-small', + }) + }) + + it('should handle response without cost information', async () => { + const apiResponse = { + data: { + results: [], + query: 'test query', + totalResults: 0, + }, + } + + const result = await knowledgeSearchTool.transformResponse!(createMockResponse(apiResponse)) + + expect(result.success).toBe(true) + expect(result.output).toEqual({ + results: [], + query: 'test query', + totalResults: 0, + }) + expect(result.output.cost).toBeUndefined() + expect(result.output.tokens).toBeUndefined() + expect(result.output.model).toBeUndefined() + }) + + it('should handle response with partial cost information', async () => { + const apiResponse = { + data: { + results: [], + query: 'test query', + totalResults: 0, + cost: { + input: 0.001, + output: 0, + total: 0.001, + // No tokens or model + }, + }, + } + + const result = await knowledgeSearchTool.transformResponse!(createMockResponse(apiResponse)) + + expect(result.success).toBe(true) + expect(result.output.cost).toEqual({ + input: 0.001, + output: 0, + total: 0.001, + }) + expect(result.output.tokens).toBeUndefined() + expect(result.output.model).toBeUndefined() + }) + }) + }) + + describe('knowledgeUploadChunkTool', () => { + describe('transformResponse', () => { + it('should restructure cost information for logging', async () => { + const apiResponse = { + data: { + id: 'chunk-123', + chunkIndex: 0, + content: 'test content', + contentLength: 12, + tokenCount: 3, + enabled: true, + documentId: 'doc-456', + documentName: 'Test Document', + createdAt: '2025-01-01T00:00:00Z', + updatedAt: '2025-01-01T00:00:00Z', + cost: { + input: 0.00000521, + output: 0, + total: 0.00000521, + tokens: { + prompt: 260, + completion: 0, + total: 260, + }, + model: 'text-embedding-3-small', + pricing: { + input: 0.02, + output: 0, + updatedAt: '2025-07-10', + }, + }, + }, + } + + const result = await knowledgeUploadChunkTool.transformResponse!( + createMockResponse(apiResponse) + ) + + expect(result.success).toBe(true) + expect(result.output.cost).toEqual({ + input: 0.00000521, + output: 0, + total: 0.00000521, + }) + expect(result.output.tokens).toEqual({ + prompt: 260, + completion: 0, + total: 260, + }) + expect(result.output.model).toBe('text-embedding-3-small') + expect(result.output.data.chunkId).toBe('chunk-123') + expect(result.output.documentId).toBe('doc-456') + }) + + it('should handle response without cost information', async () => { + const apiResponse = { + data: { + id: 'chunk-123', + chunkIndex: 0, + content: 'test content', + documentId: 'doc-456', + documentName: 'Test Document', + }, + } + + const result = await knowledgeUploadChunkTool.transformResponse!( + createMockResponse(apiResponse) + ) + + expect(result.success).toBe(true) + expect(result.output.cost).toBeUndefined() + expect(result.output.tokens).toBeUndefined() + expect(result.output.model).toBeUndefined() + expect(result.output.data.chunkId).toBe('chunk-123') + }) + }) + }) +}) diff --git a/apps/sim/tools/knowledge/search.ts b/apps/sim/tools/knowledge/search.ts index 574017d0831..af82111adc8 100644 --- a/apps/sim/tools/knowledge/search.ts +++ b/apps/sim/tools/knowledge/search.ts @@ -80,13 +80,24 @@ export const knowledgeSearchTool: ToolConfig = { const result = await response.json() const data = result.data || result + // Restructure cost: extract tokens/model to top level for logging + let costFields: Record = {} + if (data.cost && typeof data.cost === 'object') { + const { tokens, model, input, output: outputCost, total } = data.cost + costFields = { + cost: { input, output: outputCost, total }, + ...(tokens && { tokens }), + ...(model && { model }), + } + } + return { success: true, output: { results: data.results || [], query: data.query, totalResults: data.totalResults || 0, - cost: data.cost, + ...costFields, }, } }, diff --git a/apps/sim/tools/knowledge/upload_chunk.ts b/apps/sim/tools/knowledge/upload_chunk.ts index 24e07ee24a8..d7ad0fd93ba 100644 --- a/apps/sim/tools/knowledge/upload_chunk.ts +++ b/apps/sim/tools/knowledge/upload_chunk.ts @@ -52,6 +52,17 @@ export const knowledgeUploadChunkTool: ToolConfig = {} + if (data.cost && typeof data.cost === 'object') { + const { tokens, model, input, output: outputCost, total } = data.cost + costFields = { + cost: { input, output: outputCost, total }, + ...(tokens && { tokens }), + ...(model && { model }), + } + } + return { success: true, output: { @@ -68,7 +79,7 @@ export const knowledgeUploadChunkTool: ToolConfig { * Maps param IDs to their enrichment configuration. */ schemaEnrichment?: Record - /** * Optional tool-level enrichment that modifies description and all parameters. * Use when multiple params depend on a single runtime value. */ toolEnrichment?: ToolEnrichmentConfig + + /** + * Hosted API key configuration for this tool. + * When configured, the tool can use Sim's hosted API keys if user doesn't provide their own. + * Usage is billed according to the pricing config. + */ + hosting?: ToolHostingConfig

} export interface TableRow { @@ -222,3 +231,72 @@ export interface ToolEnrichmentConfig { } } | null> } + +/** + * Pricing models for hosted API key usage + */ +/** Flat fee per API call (e.g., Serper search) */ +export interface PerRequestPricing { + type: 'per_request' + /** Cost per request in dollars */ + cost: number +} + +/** Result from custom pricing calculation */ +export interface CustomPricingResult { + /** Cost in dollars */ + cost: number + /** Optional metadata about the cost calculation (e.g., breakdown from API) */ + metadata?: Record +} + +/** Custom pricing calculated from params and response (e.g., Exa with different modes/result counts) */ +export interface CustomPricing

> { + type: 'custom' + /** Calculate cost based on request params and response output. Fields starting with _ are internal. */ + getCost: (params: P, output: Record) => number | CustomPricingResult +} + +/** Union of all pricing models */ +export type ToolHostingPricing

> = PerRequestPricing | CustomPricing

+ +/** + * Configuration for hosted API key support. + * When configured, the tool can use Sim's hosted API keys if user doesn't provide their own. + * + * ### Hosted key env var convention + * + * Keys follow a numbered naming convention driven by a count env var: + * + * 1. Set `{envKeyPrefix}_COUNT` to the number of keys available. + * 2. Provide each key as `{envKeyPrefix}_1`, `{envKeyPrefix}_2`, ..., `{envKeyPrefix}_N`. + * + * **Example** — for `envKeyPrefix: 'EXA_API_KEY'` with 5 keys: + * ``` + * EXA_API_KEY_COUNT=5 + * EXA_API_KEY_1=sk-... + * EXA_API_KEY_2=sk-... + * EXA_API_KEY_3=sk-... + * EXA_API_KEY_4=sk-... + * EXA_API_KEY_5=sk-... + * ``` + * + * Adding more keys only requires updating the count and adding the new env var — + * no code changes needed. + */ +export interface ToolHostingConfig

> { + /** + * Env var name prefix for hosted keys. + * At runtime, `{envKeyPrefix}_COUNT` is read to determine how many keys exist, + * then `{envKeyPrefix}_1` through `{envKeyPrefix}_N` are resolved. + */ + envKeyPrefix: string + /** The parameter name that receives the API key */ + apiKeyParam: string + /** BYOK provider ID for workspace key lookup */ + byokProviderId?: BYOKProviderId + /** Pricing when using hosted key */ + pricing: ToolHostingPricing

+ /** Hosted key rate limit configuration (required for hosted key distribution) */ + rateLimit: HostedKeyRateLimitConfig +} From 690b47a0bfb0b90b966faa0d2f73aaf5a129505c Mon Sep 17 00:00:00 2001 From: Waleed Date: Sun, 8 Mar 2026 17:27:05 -0700 Subject: [PATCH 4/6] chore(monitoring): remove SSE connection tracking and Bun.gc debug instrumentation (#3472) --- apps/sim/app/api/a2a/serve/[agentId]/route.ts | 20 +------------- apps/sim/app/api/mcp/events/route.ts | 4 --- apps/sim/app/api/wand/route.ts | 16 +---------- .../app/api/workflows/[id]/execute/route.ts | 11 -------- .../executions/[executionId]/stream/route.ts | 12 --------- apps/sim/lib/monitoring/memory-telemetry.ts | 22 ++------------- apps/sim/lib/monitoring/sse-connections.ts | 27 ------------------- 7 files changed, 4 insertions(+), 108 deletions(-) delete mode 100644 apps/sim/lib/monitoring/sse-connections.ts diff --git a/apps/sim/app/api/a2a/serve/[agentId]/route.ts b/apps/sim/app/api/a2a/serve/[agentId]/route.ts index c46dc4f51ee..20cb4879e39 100644 --- a/apps/sim/app/api/a2a/serve/[agentId]/route.ts +++ b/apps/sim/app/api/a2a/serve/[agentId]/route.ts @@ -19,7 +19,6 @@ import { validateUrlWithDNS } from '@/lib/core/security/input-validation.server' import { SSE_HEADERS } from '@/lib/core/utils/sse' import { getBaseUrl } from '@/lib/core/utils/urls' import { markExecutionCancelled } from '@/lib/execution/cancellation' -import { decrementSSEConnections, incrementSSEConnections } from '@/lib/monitoring/sse-connections' import { checkWorkspaceAccess } from '@/lib/workspaces/permissions/utils' import { getWorkspaceBilledAccountUserId } from '@/lib/workspaces/utils' import { @@ -631,11 +630,9 @@ async function handleMessageStream( } const encoder = new TextEncoder() - let messageStreamDecremented = false const stream = new ReadableStream({ async start(controller) { - incrementSSEConnections('a2a-message') const sendEvent = (event: string, data: unknown) => { try { const jsonRpcResponse = { @@ -845,19 +842,10 @@ async function handleMessageStream( }) } finally { await releaseLock(lockKey, lockValue) - if (!messageStreamDecremented) { - messageStreamDecremented = true - decrementSSEConnections('a2a-message') - } controller.close() } }, - cancel() { - if (!messageStreamDecremented) { - messageStreamDecremented = true - decrementSSEConnections('a2a-message') - } - }, + cancel() {}, }) return new NextResponse(stream, { @@ -1042,22 +1030,16 @@ async function handleTaskResubscribe( { once: true } ) - let sseDecremented = false const cleanup = () => { isCancelled = true if (pollTimeoutId) { clearTimeout(pollTimeoutId) pollTimeoutId = null } - if (!sseDecremented) { - sseDecremented = true - decrementSSEConnections('a2a-resubscribe') - } } const stream = new ReadableStream({ async start(controller) { - incrementSSEConnections('a2a-resubscribe') const sendEvent = (event: string, data: unknown): boolean => { if (isCancelled || abortSignal.aborted) return false try { diff --git a/apps/sim/app/api/mcp/events/route.ts b/apps/sim/app/api/mcp/events/route.ts index 7def26b345e..fee4ca65fb5 100644 --- a/apps/sim/app/api/mcp/events/route.ts +++ b/apps/sim/app/api/mcp/events/route.ts @@ -14,7 +14,6 @@ import { getSession } from '@/lib/auth' import { SSE_HEADERS } from '@/lib/core/utils/sse' import { mcpConnectionManager } from '@/lib/mcp/connection-manager' import { mcpPubSub } from '@/lib/mcp/pubsub' -import { decrementSSEConnections, incrementSSEConnections } from '@/lib/monitoring/sse-connections' import { getUserEntityPermissions } from '@/lib/workspaces/permissions/utils' const logger = createLogger('McpEventsSSE') @@ -50,14 +49,11 @@ export async function GET(request: NextRequest) { for (const unsub of unsubscribers) { unsub() } - decrementSSEConnections('mcp-events') logger.info(`SSE connection closed for workspace ${workspaceId}`) } const stream = new ReadableStream({ start(controller) { - incrementSSEConnections('mcp-events') - const send = (eventName: string, data: Record) => { if (cleaned) return try { diff --git a/apps/sim/app/api/wand/route.ts b/apps/sim/app/api/wand/route.ts index abebcc18948..fffc3b08e70 100644 --- a/apps/sim/app/api/wand/route.ts +++ b/apps/sim/app/api/wand/route.ts @@ -10,7 +10,6 @@ import { checkAndBillOverageThreshold } from '@/lib/billing/threshold-billing' import { env } from '@/lib/core/config/env' import { getCostMultiplier, isBillingEnabled } from '@/lib/core/config/feature-flags' import { generateRequestId } from '@/lib/core/utils/request' -import { decrementSSEConnections, incrementSSEConnections } from '@/lib/monitoring/sse-connections' import { enrichTableSchema } from '@/lib/table/llm/wand' import { verifyWorkspaceMembership } from '@/app/api/workflows/utils' import { extractResponseText, parseResponsesUsage } from '@/providers/openai/utils' @@ -331,14 +330,10 @@ export async function POST(req: NextRequest) { const encoder = new TextEncoder() const decoder = new TextDecoder() - let wandStreamClosed = false const readable = new ReadableStream({ async start(controller) { - incrementSSEConnections('wand') const reader = response.body?.getReader() if (!reader) { - wandStreamClosed = true - decrementSSEConnections('wand') controller.close() return } @@ -483,18 +478,9 @@ export async function POST(req: NextRequest) { controller.close() } finally { reader.releaseLock() - if (!wandStreamClosed) { - wandStreamClosed = true - decrementSSEConnections('wand') - } - } - }, - cancel() { - if (!wandStreamClosed) { - wandStreamClosed = true - decrementSSEConnections('wand') } }, + cancel() {}, }) return new Response(readable, { diff --git a/apps/sim/app/api/workflows/[id]/execute/route.ts b/apps/sim/app/api/workflows/[id]/execute/route.ts index 5207f77c019..3c1e27080e5 100644 --- a/apps/sim/app/api/workflows/[id]/execute/route.ts +++ b/apps/sim/app/api/workflows/[id]/execute/route.ts @@ -22,7 +22,6 @@ import { createExecutionEventWriter, setExecutionMeta } from '@/lib/execution/ev import { processInputFileFields } from '@/lib/execution/files' import { preprocessExecution } from '@/lib/execution/preprocessing' import { LoggingSession } from '@/lib/logs/execution/logging-session' -import { decrementSSEConnections, incrementSSEConnections } from '@/lib/monitoring/sse-connections' import { cleanupExecutionBase64Cache, hydrateUserFilesWithBase64, @@ -764,7 +763,6 @@ export async function POST(req: NextRequest, { params }: { params: Promise<{ id: const encoder = new TextEncoder() const timeoutController = createTimeoutAbortController(preprocessResult.executionTimeout?.sync) let isStreamClosed = false - let sseDecremented = false const eventWriter = createExecutionEventWriter(executionId) setExecutionMeta(executionId, { @@ -775,7 +773,6 @@ export async function POST(req: NextRequest, { params }: { params: Promise<{ id: const stream = new ReadableStream({ async start(controller) { - incrementSSEConnections('workflow-execute') let finalMetaStatus: 'complete' | 'error' | 'cancelled' | null = null const sendEvent = (event: ExecutionEvent) => { @@ -1159,10 +1156,6 @@ export async function POST(req: NextRequest, { params }: { params: Promise<{ id: if (executionId) { await cleanupExecutionBase64Cache(executionId) } - if (!sseDecremented) { - sseDecremented = true - decrementSSEConnections('workflow-execute') - } if (!isStreamClosed) { try { controller.enqueue(encoder.encode('data: [DONE]\n\n')) @@ -1174,10 +1167,6 @@ export async function POST(req: NextRequest, { params }: { params: Promise<{ id: cancel() { isStreamClosed = true logger.info(`[${requestId}] Client disconnected from SSE stream`) - if (!sseDecremented) { - sseDecremented = true - decrementSSEConnections('workflow-execute') - } }, }) diff --git a/apps/sim/app/api/workflows/[id]/executions/[executionId]/stream/route.ts b/apps/sim/app/api/workflows/[id]/executions/[executionId]/stream/route.ts index 88e3c874470..1f77ff391d6 100644 --- a/apps/sim/app/api/workflows/[id]/executions/[executionId]/stream/route.ts +++ b/apps/sim/app/api/workflows/[id]/executions/[executionId]/stream/route.ts @@ -7,7 +7,6 @@ import { getExecutionMeta, readExecutionEvents, } from '@/lib/execution/event-buffer' -import { decrementSSEConnections, incrementSSEConnections } from '@/lib/monitoring/sse-connections' import { formatSSEEvent } from '@/lib/workflows/executor/execution-events' import { authorizeWorkflowByWorkspacePermission } from '@/lib/workflows/utils' @@ -74,10 +73,8 @@ export async function GET( let closed = false - let sseDecremented = false const stream = new ReadableStream({ async start(controller) { - incrementSSEConnections('execution-stream-reconnect') let lastEventId = fromEventId const pollDeadline = Date.now() + MAX_POLL_DURATION_MS @@ -145,20 +142,11 @@ export async function GET( controller.close() } catch {} } - } finally { - if (!sseDecremented) { - sseDecremented = true - decrementSSEConnections('execution-stream-reconnect') - } } }, cancel() { closed = true logger.info('Client disconnected from reconnection stream', { executionId }) - if (!sseDecremented) { - sseDecremented = true - decrementSSEConnections('execution-stream-reconnect') - } }, }) diff --git a/apps/sim/lib/monitoring/memory-telemetry.ts b/apps/sim/lib/monitoring/memory-telemetry.ts index ef7867ce84b..2845ee1def2 100644 --- a/apps/sim/lib/monitoring/memory-telemetry.ts +++ b/apps/sim/lib/monitoring/memory-telemetry.ts @@ -1,16 +1,10 @@ /** - * Periodic memory telemetry for diagnosing heap growth in production. - * Logs process.memoryUsage(), V8 heap stats, and active SSE connection - * counts every 60s, enabling correlation between connection leaks and - * memory spikes. + * Periodic memory telemetry for monitoring heap growth in production. + * Logs process.memoryUsage() and V8 heap stats every 60s. */ import v8 from 'node:v8' import { createLogger } from '@sim/logger' -import { - getActiveSSEConnectionCount, - getActiveSSEConnectionsByRoute, -} from '@/lib/monitoring/sse-connections' const logger = createLogger('MemoryTelemetry', { logLevel: 'INFO' }) @@ -23,16 +17,6 @@ export function startMemoryTelemetry(intervalMs = 60_000) { started = true const timer = setInterval(() => { - // Trigger opportunistic (non-blocking) garbage collection if running on Bun. - // This signals JSC GC + mimalloc page purge without blocking the event loop, - // helping reclaim RSS that mimalloc otherwise retains under sustained load. - const bunGlobal = (globalThis as Record).Bun as - | { gc?: (force: boolean) => void } - | undefined - if (typeof bunGlobal?.gc === 'function') { - bunGlobal.gc(false) - } - const mem = process.memoryUsage() const heap = v8.getHeapStatistics() @@ -49,8 +33,6 @@ export function startMemoryTelemetry(intervalMs = 60_000) { ? process.getActiveResourcesInfo().length : -1, uptimeMin: Math.round(process.uptime() / 60), - activeSSEConnections: getActiveSSEConnectionCount(), - sseByRoute: getActiveSSEConnectionsByRoute(), }) }, intervalMs) timer.unref() diff --git a/apps/sim/lib/monitoring/sse-connections.ts b/apps/sim/lib/monitoring/sse-connections.ts deleted file mode 100644 index b6394ddff6e..00000000000 --- a/apps/sim/lib/monitoring/sse-connections.ts +++ /dev/null @@ -1,27 +0,0 @@ -/** - * Tracks active SSE connections by route for memory leak diagnostics. - * Logged alongside periodic memory telemetry to correlate connection - * counts with heap growth. - */ - -const connections = new Map() - -export function incrementSSEConnections(route: string) { - connections.set(route, (connections.get(route) ?? 0) + 1) -} - -export function decrementSSEConnections(route: string) { - const count = (connections.get(route) ?? 0) - 1 - if (count <= 0) connections.delete(route) - else connections.set(route, count) -} - -export function getActiveSSEConnectionCount(): number { - let total = 0 - for (const count of connections.values()) total += count - return total -} - -export function getActiveSSEConnectionsByRoute(): Record { - return Object.fromEntries(connections) -} From f88926a6a8675e52cf7594139ddcc61748ba2163 Mon Sep 17 00:00:00 2001 From: Waleed Date: Mon, 9 Mar 2026 10:11:36 -0700 Subject: [PATCH 5/6] fix(webhooks): return empty 200 for Slack to close modals cleanly (#3492) * fix(webhooks): return empty 200 for Slack to close modals cleanly * fix(webhooks): add clarifying comment on Slack error path trade-off --- apps/sim/lib/webhooks/processor.ts | 12 ++++++++++++ 1 file changed, 12 insertions(+) diff --git a/apps/sim/lib/webhooks/processor.ts b/apps/sim/lib/webhooks/processor.ts index c3af3843518..fc1de215771 100644 --- a/apps/sim/lib/webhooks/processor.ts +++ b/apps/sim/lib/webhooks/processor.ts @@ -1166,6 +1166,12 @@ export async function queueWebhookExecution( }) } + // Slack requires an empty 200 for interactive payloads (view_submission, block_actions, etc.) + // A JSON body like {"message":"..."} is not a recognized response format and causes modal errors + if (foundWebhook.provider === 'slack') { + return new NextResponse(null, { status: 200 }) + } + // Twilio Voice requires TwiML XML response if (foundWebhook.provider === 'twilio_voice') { const providerConfig = (foundWebhook.providerConfig as Record) || {} @@ -1211,6 +1217,12 @@ export async function queueWebhookExecution( ) } + if (foundWebhook.provider === 'slack') { + // Return empty 200 to avoid Slack showing an error dialog to the user, + // even though processing failed. The error is already logged above. + return new NextResponse(null, { status: 200 }) + } + if (foundWebhook.provider === 'twilio_voice') { const errorTwiml = ` From 635179d696a1b29c441c5b01dc7c3fb163730543 Mon Sep 17 00:00:00 2001 From: Theodore Li Date: Mon, 9 Mar 2026 10:31:54 -0700 Subject: [PATCH 6/6] Revert "feat(hosted key): Add exa hosted key (#3221)" (#3495) This reverts commit 158d5236bc6adac9d62fa1ec00bdff5a181611da. Co-authored-by: Theodore Li --- .../api/workspaces/[id]/byok-keys/route.ts | 2 +- .../hooks/use-editor-subblock-layout.ts | 4 - .../workflow-block/workflow-block.tsx | 2 - .../settings-modal/components/byok/byok.tsx | 11 +- apps/sim/blocks/blocks/exa.ts | 14 +- apps/sim/blocks/types.ts | 1 - .../handlers/generic/generic-handler.test.ts | 215 +++++ .../handlers/generic/generic-handler.ts | 22 +- apps/sim/hooks/queries/byok-keys.ts | 3 +- apps/sim/lib/api-key/byok.ts | 3 +- apps/sim/lib/billing/core/usage-log.ts | 14 +- .../tool-executor/integration-tools.ts | 11 +- .../hosted-key-rate-limiter.test.ts | 521 ----------- .../hosted-key/hosted-key-rate-limiter.ts | 349 -------- .../lib/core/rate-limiter/hosted-key/index.ts | 17 - .../lib/core/rate-limiter/hosted-key/types.ts | 108 --- apps/sim/lib/core/rate-limiter/index.ts | 15 - .../rate-limiter/storage/db-token-bucket.ts | 2 +- apps/sim/lib/core/telemetry.ts | 49 -- apps/sim/lib/logs/execution/logger.ts | 2 - .../sim/lib/logs/execution/logging-factory.ts | 6 - .../sim/lib/workflows/subblocks/visibility.ts | 10 - apps/sim/providers/anthropic/core.ts | 19 +- apps/sim/providers/azure-openai/index.ts | 5 +- apps/sim/providers/bedrock/index.ts | 6 +- apps/sim/providers/cerebras/index.ts | 10 +- apps/sim/providers/deepseek/index.ts | 10 +- apps/sim/providers/gemini/core.ts | 5 +- apps/sim/providers/groq/index.ts | 10 +- apps/sim/providers/index.ts | 7 - apps/sim/providers/mistral/index.ts | 9 +- apps/sim/providers/ollama/index.ts | 10 +- apps/sim/providers/openai/core.ts | 9 +- apps/sim/providers/openrouter/index.ts | 5 +- apps/sim/providers/types.ts | 3 +- apps/sim/providers/utils.test.ts | 1 - apps/sim/providers/utils.ts | 15 - apps/sim/providers/vllm/index.ts | 9 +- apps/sim/providers/xai/index.ts | 10 +- apps/sim/serializer/index.ts | 2 - apps/sim/tools/exa/answer.ts | 20 - apps/sim/tools/exa/find_similar_links.ts | 20 - apps/sim/tools/exa/get_contents.ts | 20 - apps/sim/tools/exa/search.ts | 20 - apps/sim/tools/exa/types.ts | 9 - apps/sim/tools/index.test.ts | 824 +----------------- apps/sim/tools/index.ts | 420 +-------- apps/sim/tools/knowledge/knowledge.test.ts | 202 ----- apps/sim/tools/knowledge/search.ts | 13 +- apps/sim/tools/knowledge/upload_chunk.ts | 13 +- apps/sim/tools/params.ts | 8 - apps/sim/tools/types.ts | 80 +- 52 files changed, 335 insertions(+), 2840 deletions(-) delete mode 100644 apps/sim/lib/core/rate-limiter/hosted-key/hosted-key-rate-limiter.test.ts delete mode 100644 apps/sim/lib/core/rate-limiter/hosted-key/hosted-key-rate-limiter.ts delete mode 100644 apps/sim/lib/core/rate-limiter/hosted-key/index.ts delete mode 100644 apps/sim/lib/core/rate-limiter/hosted-key/types.ts delete mode 100644 apps/sim/tools/knowledge/knowledge.test.ts diff --git a/apps/sim/app/api/workspaces/[id]/byok-keys/route.ts b/apps/sim/app/api/workspaces/[id]/byok-keys/route.ts index f4bddc4298b..ab4c9600df9 100644 --- a/apps/sim/app/api/workspaces/[id]/byok-keys/route.ts +++ b/apps/sim/app/api/workspaces/[id]/byok-keys/route.ts @@ -13,7 +13,7 @@ import { getUserEntityPermissions, getWorkspaceById } from '@/lib/workspaces/per const logger = createLogger('WorkspaceBYOKKeysAPI') -const VALID_PROVIDERS = ['openai', 'anthropic', 'google', 'mistral', 'exa'] as const +const VALID_PROVIDERS = ['openai', 'anthropic', 'google', 'mistral'] as const const UpsertKeySchema = z.object({ providerId: z.enum(VALID_PROVIDERS), diff --git a/apps/sim/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/editor/hooks/use-editor-subblock-layout.ts b/apps/sim/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/editor/hooks/use-editor-subblock-layout.ts index 0cf118e428e..50d3f416e43 100644 --- a/apps/sim/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/editor/hooks/use-editor-subblock-layout.ts +++ b/apps/sim/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/editor/hooks/use-editor-subblock-layout.ts @@ -3,7 +3,6 @@ import { buildCanonicalIndex, evaluateSubBlockCondition, isSubBlockFeatureEnabled, - isSubBlockHiddenByHostedKey, isSubBlockVisibleForMode, } from '@/lib/workflows/subblocks/visibility' import type { BlockConfig, SubBlockConfig, SubBlockType } from '@/blocks/types' @@ -109,9 +108,6 @@ export function useEditorSubblockLayout( // Check required feature if specified - declarative feature gating if (!isSubBlockFeatureEnabled(block)) return false - // Hide tool API key fields when hosted - if (isSubBlockHiddenByHostedKey(block)) return false - // Special handling for trigger-config type (legacy trigger configuration UI) if (block.type === ('trigger-config' as SubBlockType)) { const isPureTriggerBlock = config?.triggers?.enabled && config.category === 'triggers' diff --git a/apps/sim/app/workspace/[workspaceId]/w/[workflowId]/components/workflow-block/workflow-block.tsx b/apps/sim/app/workspace/[workspaceId]/w/[workflowId]/components/workflow-block/workflow-block.tsx index 10b56df0dc7..5a559801917 100644 --- a/apps/sim/app/workspace/[workspaceId]/w/[workflowId]/components/workflow-block/workflow-block.tsx +++ b/apps/sim/app/workspace/[workspaceId]/w/[workflowId]/components/workflow-block/workflow-block.tsx @@ -16,7 +16,6 @@ import { evaluateSubBlockCondition, hasAdvancedValues, isSubBlockFeatureEnabled, - isSubBlockHiddenByHostedKey, isSubBlockVisibleForMode, resolveDependencyValue, } from '@/lib/workflows/subblocks/visibility' @@ -978,7 +977,6 @@ export const WorkflowBlock = memo(function WorkflowBlock({ if (block.hidden) return false if (block.hideFromPreview) return false if (!isSubBlockFeatureEnabled(block)) return false - if (isSubBlockHiddenByHostedKey(block)) return false const isPureTriggerBlock = config?.triggers?.enabled && config.category === 'triggers' diff --git a/apps/sim/app/workspace/[workspaceId]/w/components/sidebar/components/settings-modal/components/byok/byok.tsx b/apps/sim/app/workspace/[workspaceId]/w/components/sidebar/components/settings-modal/components/byok/byok.tsx index 39f308d9e8d..b8304402b3b 100644 --- a/apps/sim/app/workspace/[workspaceId]/w/components/sidebar/components/settings-modal/components/byok/byok.tsx +++ b/apps/sim/app/workspace/[workspaceId]/w/components/sidebar/components/settings-modal/components/byok/byok.tsx @@ -13,15 +13,15 @@ import { ModalFooter, ModalHeader, } from '@/components/emcn' -import { AnthropicIcon, ExaAIIcon, GeminiIcon, MistralIcon, OpenAIIcon } from '@/components/icons' +import { AnthropicIcon, GeminiIcon, MistralIcon, OpenAIIcon } from '@/components/icons' import { Skeleton } from '@/components/ui' import { type BYOKKey, + type BYOKProviderId, useBYOKKeys, useDeleteBYOKKey, useUpsertBYOKKey, } from '@/hooks/queries/byok-keys' -import type { BYOKProviderId } from '@/tools/types' const logger = createLogger('BYOKSettings') @@ -60,13 +60,6 @@ const PROVIDERS: { description: 'LLM calls and Knowledge Base OCR', placeholder: 'Enter your API key', }, - { - id: 'exa', - name: 'Exa', - icon: ExaAIIcon, - description: 'AI-powered search and research', - placeholder: 'Enter your Exa API key', - }, ] function BYOKKeySkeleton() { diff --git a/apps/sim/blocks/blocks/exa.ts b/apps/sim/blocks/blocks/exa.ts index 193fe9c292d..dfdbd327952 100644 --- a/apps/sim/blocks/blocks/exa.ts +++ b/apps/sim/blocks/blocks/exa.ts @@ -309,7 +309,7 @@ export const ExaBlock: BlockConfig = { value: () => 'exa-research', condition: { field: 'operation', value: 'exa_research' }, }, - // API Key — hidden when hosted for operations with hosted key support + // API Key (common) { id: 'apiKey', title: 'API Key', @@ -317,18 +317,6 @@ export const ExaBlock: BlockConfig = { placeholder: 'Enter your Exa API key', password: true, required: true, - hideWhenHosted: true, - condition: { field: 'operation', value: 'exa_research', not: true }, - }, - // API Key — always visible for research (no hosted key support) - { - id: 'apiKey', - title: 'API Key', - type: 'short-input', - placeholder: 'Enter your Exa API key', - password: true, - required: true, - condition: { field: 'operation', value: 'exa_research' }, }, ], tools: { diff --git a/apps/sim/blocks/types.ts b/apps/sim/blocks/types.ts index 056632c0711..fe486a8e05e 100644 --- a/apps/sim/blocks/types.ts +++ b/apps/sim/blocks/types.ts @@ -253,7 +253,6 @@ export interface SubBlockConfig { hidden?: boolean hideFromPreview?: boolean // Hide this subblock from the workflow block preview requiresFeature?: string // Environment variable name that must be truthy for this subblock to be visible - hideWhenHosted?: boolean // Hide this subblock when running on hosted sim description?: string tooltip?: string // Tooltip text displayed via info icon next to the title value?: (params: Record) => string diff --git a/apps/sim/executor/handlers/generic/generic-handler.test.ts b/apps/sim/executor/handlers/generic/generic-handler.test.ts index cf18f8a254a..3a107df40a0 100644 --- a/apps/sim/executor/handlers/generic/generic-handler.test.ts +++ b/apps/sim/executor/handlers/generic/generic-handler.test.ts @@ -147,4 +147,219 @@ describe('GenericBlockHandler', () => { 'Block execution of Some Custom Tool failed with no error message' ) }) + + describe('Knowledge block cost tracking', () => { + beforeEach(() => { + // Set up knowledge block mock + mockBlock = { + ...mockBlock, + config: { tool: 'knowledge_search', params: {} }, + } + + mockTool = { + ...mockTool, + id: 'knowledge_search', + name: 'Knowledge Search', + } + + mockGetTool.mockImplementation((toolId) => { + if (toolId === 'knowledge_search') { + return mockTool + } + return undefined + }) + }) + + it.concurrent( + 'should extract and restructure cost information from knowledge tools', + async () => { + const inputs = { query: 'test query' } + const mockToolResponse = { + success: true, + output: { + results: [], + query: 'test query', + totalResults: 0, + cost: { + input: 0.00001042, + output: 0, + total: 0.00001042, + tokens: { + input: 521, + output: 0, + total: 521, + }, + model: 'text-embedding-3-small', + pricing: { + input: 0.02, + output: 0, + updatedAt: '2025-07-10', + }, + }, + }, + } + + mockExecuteTool.mockResolvedValue(mockToolResponse) + + const result = await handler.execute(mockContext, mockBlock, inputs) + + // Verify cost information is restructured correctly for enhanced logging + expect(result).toEqual({ + results: [], + query: 'test query', + totalResults: 0, + cost: { + input: 0.00001042, + output: 0, + total: 0.00001042, + }, + tokens: { + input: 521, + output: 0, + total: 521, + }, + model: 'text-embedding-3-small', + }) + } + ) + + it.concurrent('should handle knowledge_upload_chunk cost information', async () => { + // Update to upload_chunk tool + mockBlock.config.tool = 'knowledge_upload_chunk' + mockTool.id = 'knowledge_upload_chunk' + mockTool.name = 'Knowledge Upload Chunk' + + mockGetTool.mockImplementation((toolId) => { + if (toolId === 'knowledge_upload_chunk') { + return mockTool + } + return undefined + }) + + const inputs = { content: 'test content' } + const mockToolResponse = { + success: true, + output: { + data: { + id: 'chunk-123', + content: 'test content', + chunkIndex: 0, + }, + message: 'Successfully uploaded chunk', + documentId: 'doc-123', + cost: { + input: 0.00000521, + output: 0, + total: 0.00000521, + tokens: { + input: 260, + output: 0, + total: 260, + }, + model: 'text-embedding-3-small', + pricing: { + input: 0.02, + output: 0, + updatedAt: '2025-07-10', + }, + }, + }, + } + + mockExecuteTool.mockResolvedValue(mockToolResponse) + + const result = await handler.execute(mockContext, mockBlock, inputs) + + // Verify cost information is restructured correctly + expect(result).toEqual({ + data: { + id: 'chunk-123', + content: 'test content', + chunkIndex: 0, + }, + message: 'Successfully uploaded chunk', + documentId: 'doc-123', + cost: { + input: 0.00000521, + output: 0, + total: 0.00000521, + }, + tokens: { + input: 260, + output: 0, + total: 260, + }, + model: 'text-embedding-3-small', + }) + }) + + it('should pass through output unchanged for knowledge tools without cost info', async () => { + const inputs = { query: 'test query' } + const mockToolResponse = { + success: true, + output: { + results: [], + query: 'test query', + totalResults: 0, + // No cost information + }, + } + + mockExecuteTool.mockResolvedValue(mockToolResponse) + + const result = await handler.execute(mockContext, mockBlock, inputs) + + // Should return original output without cost transformation + expect(result).toEqual({ + results: [], + query: 'test query', + totalResults: 0, + }) + }) + + it.concurrent( + 'should process cost info for all tools (universal cost extraction)', + async () => { + mockBlock.config.tool = 'some_other_tool' + mockTool.id = 'some_other_tool' + + mockGetTool.mockImplementation((toolId) => { + if (toolId === 'some_other_tool') { + return mockTool + } + return undefined + }) + + const inputs = { param: 'value' } + const mockToolResponse = { + success: true, + output: { + result: 'success', + cost: { + input: 0.001, + output: 0.002, + total: 0.003, + tokens: { input: 100, output: 50, total: 150 }, + model: 'some-model', + }, + }, + } + + mockExecuteTool.mockResolvedValue(mockToolResponse) + + const result = await handler.execute(mockContext, mockBlock, inputs) + + expect(result).toEqual({ + result: 'success', + cost: { + input: 0.001, + output: 0.002, + total: 0.003, + }, + tokens: { input: 100, output: 50, total: 150 }, + model: 'some-model', + }) + } + ) + }) }) diff --git a/apps/sim/executor/handlers/generic/generic-handler.ts b/apps/sim/executor/handlers/generic/generic-handler.ts index 6c9e1bb53ac..ff9cbbf440d 100644 --- a/apps/sim/executor/handlers/generic/generic-handler.ts +++ b/apps/sim/executor/handlers/generic/generic-handler.ts @@ -98,7 +98,27 @@ export class GenericBlockHandler implements BlockHandler { throw error } - return result.output + const output = result.output + let cost = null + + if (output?.cost) { + cost = output.cost + } + + if (cost) { + return { + ...output, + cost: { + input: cost.input, + output: cost.output, + total: cost.total, + }, + tokens: cost.tokens, + model: cost.model, + } + } + + return output } catch (error: any) { if (!error.message || error.message === 'undefined (undefined)') { let errorMessage = `Block execution of ${tool?.name || block.config.tool} failed` diff --git a/apps/sim/hooks/queries/byok-keys.ts b/apps/sim/hooks/queries/byok-keys.ts index 167238f4a19..26d348d5a7f 100644 --- a/apps/sim/hooks/queries/byok-keys.ts +++ b/apps/sim/hooks/queries/byok-keys.ts @@ -1,10 +1,11 @@ import { createLogger } from '@sim/logger' import { keepPreviousData, useMutation, useQuery, useQueryClient } from '@tanstack/react-query' import { API_ENDPOINTS } from '@/stores/constants' -import type { BYOKProviderId } from '@/tools/types' const logger = createLogger('BYOKKeysQueries') +export type BYOKProviderId = 'openai' | 'anthropic' | 'google' | 'mistral' + export interface BYOKKey { id: string providerId: BYOKProviderId diff --git a/apps/sim/lib/api-key/byok.ts b/apps/sim/lib/api-key/byok.ts index 127feb9af31..04a35adb426 100644 --- a/apps/sim/lib/api-key/byok.ts +++ b/apps/sim/lib/api-key/byok.ts @@ -7,10 +7,11 @@ import { isHosted } from '@/lib/core/config/feature-flags' import { decryptSecret } from '@/lib/core/security/encryption' import { getHostedModels } from '@/providers/models' import { useProvidersStore } from '@/stores/providers/store' -import type { BYOKProviderId } from '@/tools/types' const logger = createLogger('BYOKKeys') +export type BYOKProviderId = 'openai' | 'anthropic' | 'google' | 'mistral' + export interface BYOKKeyResult { apiKey: string isBYOK: true diff --git a/apps/sim/lib/billing/core/usage-log.ts b/apps/sim/lib/billing/core/usage-log.ts index 90be5c16d0b..b21fb552f7a 100644 --- a/apps/sim/lib/billing/core/usage-log.ts +++ b/apps/sim/lib/billing/core/usage-log.ts @@ -22,13 +22,12 @@ export type UsageLogSource = 'workflow' | 'wand' | 'copilot' | 'mcp_copilot' export interface ModelUsageMetadata { inputTokens: number outputTokens: number - toolCost?: number } /** - * Metadata for 'fixed' category charges (e.g., tool cost breakdown) + * Metadata for 'fixed' category charges (currently empty, extensible) */ -export type FixedUsageMetadata = Record +export type FixedUsageMetadata = Record /** * Union type for all metadata types @@ -45,7 +44,6 @@ export interface LogModelUsageParams { inputTokens: number outputTokens: number cost: number - toolCost?: number workspaceId?: string workflowId?: string executionId?: string @@ -62,8 +60,6 @@ export interface LogFixedUsageParams { workspaceId?: string workflowId?: string executionId?: string - /** Optional metadata (e.g., tool cost breakdown from API) */ - metadata?: FixedUsageMetadata } /** @@ -78,7 +74,6 @@ export async function logModelUsage(params: LogModelUsageParams): Promise const metadata: ModelUsageMetadata = { inputTokens: params.inputTokens, outputTokens: params.outputTokens, - ...(params.toolCost != null && params.toolCost > 0 && { toolCost: params.toolCost }), } await db.insert(usageLog).values({ @@ -124,7 +119,7 @@ export async function logFixedUsage(params: LogFixedUsageParams): Promise category: 'fixed', source: params.source, description: params.description, - metadata: params.metadata ?? null, + metadata: null, cost: params.cost.toString(), workspaceId: params.workspaceId ?? null, workflowId: params.workflowId ?? null, @@ -160,7 +155,6 @@ export interface LogWorkflowUsageBatchParams { { total: number tokens: { input: number; output: number } - toolCost?: number } > } @@ -213,8 +207,6 @@ export async function logWorkflowUsageBatch(params: LogWorkflowUsageBatchParams) metadata: { inputTokens: modelData.tokens.input, outputTokens: modelData.tokens.output, - ...(modelData.toolCost != null && - modelData.toolCost > 0 && { toolCost: modelData.toolCost }), }, cost: modelData.total.toString(), workspaceId: params.workspaceId ?? null, diff --git a/apps/sim/lib/copilot/orchestrator/tool-executor/integration-tools.ts b/apps/sim/lib/copilot/orchestrator/tool-executor/integration-tools.ts index 4a81f5c46fd..8464e42ca57 100644 --- a/apps/sim/lib/copilot/orchestrator/tool-executor/integration-tools.ts +++ b/apps/sim/lib/copilot/orchestrator/tool-executor/integration-tools.ts @@ -6,18 +6,19 @@ import type { ToolCallResult, ToolCallState, } from '@/lib/copilot/orchestrator/types' -import { isHosted } from '@/lib/core/config/feature-flags' import { generateRequestId } from '@/lib/core/utils/request' import { getEffectiveDecryptedEnv } from '@/lib/environment/utils' import { refreshTokenIfNeeded } from '@/app/api/auth/oauth/utils' import { resolveEnvVarReferences } from '@/executor/utils/reference-validation' import { executeTool } from '@/tools' -import type { ToolConfig } from '@/tools/types' import { resolveToolId } from '@/tools/utils' export async function executeIntegrationToolDirect( toolCall: ToolCallState, - toolConfig: ToolConfig, + toolConfig: { + oauth?: { required?: boolean; provider?: string } + params?: { apiKey?: { required?: boolean } } + }, context: ExecutionContext ): Promise { const { userId, workflowId } = context @@ -73,8 +74,7 @@ export async function executeIntegrationToolDirect( executionParams.accessToken = accessToken } - const hasHostedKeySupport = isHosted && !!toolConfig.hosting - if (toolConfig.params?.apiKey?.required && !executionParams.apiKey && !hasHostedKeySupport) { + if (toolConfig.params?.apiKey?.required && !executionParams.apiKey) { return { success: false, error: `API key not provided for ${toolName}. Use {{YOUR_API_KEY_ENV_VAR}} to reference your environment variable.`, @@ -83,7 +83,6 @@ export async function executeIntegrationToolDirect( executionParams._context = { workflowId, - workspaceId, userId, } diff --git a/apps/sim/lib/core/rate-limiter/hosted-key/hosted-key-rate-limiter.test.ts b/apps/sim/lib/core/rate-limiter/hosted-key/hosted-key-rate-limiter.test.ts deleted file mode 100644 index be199a24cfa..00000000000 --- a/apps/sim/lib/core/rate-limiter/hosted-key/hosted-key-rate-limiter.test.ts +++ /dev/null @@ -1,521 +0,0 @@ -import { loggerMock } from '@sim/testing' -import { afterEach, beforeEach, describe, expect, it, type Mock, vi } from 'vitest' -import type { - ConsumeResult, - RateLimitStorageAdapter, - TokenStatus, -} from '@/lib/core/rate-limiter/storage' -import { HostedKeyRateLimiter } from './hosted-key-rate-limiter' -import type { CustomRateLimit, PerRequestRateLimit } from './types' - -vi.mock('@sim/logger', () => loggerMock) - -interface MockAdapter { - consumeTokens: Mock - getTokenStatus: Mock - resetBucket: Mock -} - -const createMockAdapter = (): MockAdapter => ({ - consumeTokens: vi.fn(), - getTokenStatus: vi.fn(), - resetBucket: vi.fn(), -}) - -describe('HostedKeyRateLimiter', () => { - const testProvider = 'exa' - const envKeyPrefix = 'EXA_API_KEY' - let mockAdapter: MockAdapter - let rateLimiter: HostedKeyRateLimiter - let originalEnv: NodeJS.ProcessEnv - - const perRequestRateLimit: PerRequestRateLimit = { - mode: 'per_request', - requestsPerMinute: 10, - } - - beforeEach(() => { - vi.clearAllMocks() - mockAdapter = createMockAdapter() - rateLimiter = new HostedKeyRateLimiter(mockAdapter as RateLimitStorageAdapter) - - originalEnv = { ...process.env } - process.env.EXA_API_KEY_COUNT = '3' - process.env.EXA_API_KEY_1 = 'test-key-1' - process.env.EXA_API_KEY_2 = 'test-key-2' - process.env.EXA_API_KEY_3 = 'test-key-3' - }) - - afterEach(() => { - process.env = originalEnv - }) - - describe('acquireKey', () => { - it('should return error when no keys are configured', async () => { - const allowedResult: ConsumeResult = { - allowed: true, - tokensRemaining: 9, - resetAt: new Date(Date.now() + 60000), - } - mockAdapter.consumeTokens.mockResolvedValue(allowedResult) - - process.env.EXA_API_KEY_COUNT = undefined - process.env.EXA_API_KEY_1 = undefined - process.env.EXA_API_KEY_2 = undefined - process.env.EXA_API_KEY_3 = undefined - - const result = await rateLimiter.acquireKey( - testProvider, - envKeyPrefix, - perRequestRateLimit, - 'workspace-1' - ) - - expect(result.success).toBe(false) - expect(result.error).toContain('No hosted keys configured') - }) - - it('should rate limit billing actor when they exceed their limit', async () => { - const rateLimitedResult: ConsumeResult = { - allowed: false, - tokensRemaining: 0, - resetAt: new Date(Date.now() + 30000), - } - mockAdapter.consumeTokens.mockResolvedValue(rateLimitedResult) - - const result = await rateLimiter.acquireKey( - testProvider, - envKeyPrefix, - perRequestRateLimit, - 'workspace-123' - ) - - expect(result.success).toBe(false) - expect(result.billingActorRateLimited).toBe(true) - expect(result.retryAfterMs).toBeDefined() - expect(result.error).toContain('Rate limit exceeded') - }) - - it('should allow billing actor within their rate limit', async () => { - const allowedResult: ConsumeResult = { - allowed: true, - tokensRemaining: 9, - resetAt: new Date(Date.now() + 60000), - } - mockAdapter.consumeTokens.mockResolvedValue(allowedResult) - - const result = await rateLimiter.acquireKey( - testProvider, - envKeyPrefix, - perRequestRateLimit, - 'workspace-123' - ) - - expect(result.success).toBe(true) - expect(result.billingActorRateLimited).toBeUndefined() - expect(result.key).toBe('test-key-1') - }) - - it('should distribute requests across keys round-robin style', async () => { - const allowedResult: ConsumeResult = { - allowed: true, - tokensRemaining: 9, - resetAt: new Date(Date.now() + 60000), - } - mockAdapter.consumeTokens.mockResolvedValue(allowedResult) - - const r1 = await rateLimiter.acquireKey( - testProvider, - envKeyPrefix, - perRequestRateLimit, - 'workspace-1' - ) - const r2 = await rateLimiter.acquireKey( - testProvider, - envKeyPrefix, - perRequestRateLimit, - 'workspace-2' - ) - const r3 = await rateLimiter.acquireKey( - testProvider, - envKeyPrefix, - perRequestRateLimit, - 'workspace-3' - ) - const r4 = await rateLimiter.acquireKey( - testProvider, - envKeyPrefix, - perRequestRateLimit, - 'workspace-4' - ) - - expect(r1.keyIndex).toBe(0) - expect(r2.keyIndex).toBe(1) - expect(r3.keyIndex).toBe(2) - expect(r4.keyIndex).toBe(0) // Wraps back - }) - - it('should handle partial key availability', async () => { - const allowedResult: ConsumeResult = { - allowed: true, - tokensRemaining: 9, - resetAt: new Date(Date.now() + 60000), - } - mockAdapter.consumeTokens.mockResolvedValue(allowedResult) - - process.env.EXA_API_KEY_2 = undefined - - const result = await rateLimiter.acquireKey( - testProvider, - envKeyPrefix, - perRequestRateLimit, - 'workspace-1' - ) - - expect(result.success).toBe(true) - expect(result.key).toBe('test-key-1') - expect(result.envVarName).toBe('EXA_API_KEY_1') - - const r2 = await rateLimiter.acquireKey( - testProvider, - envKeyPrefix, - perRequestRateLimit, - 'workspace-2' - ) - expect(r2.keyIndex).toBe(2) // Skips missing key 1 - expect(r2.envVarName).toBe('EXA_API_KEY_3') - }) - }) - - describe('acquireKey with custom rate limit', () => { - const customRateLimit: CustomRateLimit = { - mode: 'custom', - requestsPerMinute: 5, - dimensions: [ - { - name: 'tokens', - limitPerMinute: 1000, - extractUsage: (_params, response) => (response.tokenCount as number) ?? 0, - }, - ], - } - - it('should enforce requestsPerMinute for custom mode', async () => { - const rateLimitedResult: ConsumeResult = { - allowed: false, - tokensRemaining: 0, - resetAt: new Date(Date.now() + 30000), - } - mockAdapter.consumeTokens.mockResolvedValue(rateLimitedResult) - - const result = await rateLimiter.acquireKey( - testProvider, - envKeyPrefix, - customRateLimit, - 'workspace-1' - ) - - expect(result.success).toBe(false) - expect(result.billingActorRateLimited).toBe(true) - expect(result.error).toContain('Rate limit exceeded') - }) - - it('should allow request when actor request limit and dimensions have budget', async () => { - const allowedConsume: ConsumeResult = { - allowed: true, - tokensRemaining: 4, - resetAt: new Date(Date.now() + 60000), - } - mockAdapter.consumeTokens.mockResolvedValue(allowedConsume) - - const budgetAvailable: TokenStatus = { - tokensAvailable: 500, - maxTokens: 2000, - lastRefillAt: new Date(), - nextRefillAt: new Date(Date.now() + 60000), - } - mockAdapter.getTokenStatus.mockResolvedValue(budgetAvailable) - - const result = await rateLimiter.acquireKey( - testProvider, - envKeyPrefix, - customRateLimit, - 'workspace-1' - ) - - expect(result.success).toBe(true) - expect(result.key).toBe('test-key-1') - expect(mockAdapter.consumeTokens).toHaveBeenCalledTimes(1) - expect(mockAdapter.getTokenStatus).toHaveBeenCalledTimes(1) - }) - - it('should block request when a dimension is depleted', async () => { - const allowedConsume: ConsumeResult = { - allowed: true, - tokensRemaining: 4, - resetAt: new Date(Date.now() + 60000), - } - mockAdapter.consumeTokens.mockResolvedValue(allowedConsume) - - const depleted: TokenStatus = { - tokensAvailable: 0, - maxTokens: 2000, - lastRefillAt: new Date(), - nextRefillAt: new Date(Date.now() + 45000), - } - mockAdapter.getTokenStatus.mockResolvedValue(depleted) - - const result = await rateLimiter.acquireKey( - testProvider, - envKeyPrefix, - customRateLimit, - 'workspace-1' - ) - - expect(result.success).toBe(false) - expect(result.billingActorRateLimited).toBe(true) - expect(result.error).toContain('tokens') - }) - - it('should pre-check all dimensions and block on first depleted one', async () => { - const multiDimensionConfig: CustomRateLimit = { - mode: 'custom', - requestsPerMinute: 10, - dimensions: [ - { - name: 'tokens', - limitPerMinute: 1000, - extractUsage: (_p, r) => (r.tokenCount as number) ?? 0, - }, - { - name: 'search_units', - limitPerMinute: 50, - extractUsage: (_p, r) => (r.searchUnits as number) ?? 0, - }, - ], - } - - const allowedConsume: ConsumeResult = { - allowed: true, - tokensRemaining: 9, - resetAt: new Date(Date.now() + 60000), - } - mockAdapter.consumeTokens.mockResolvedValue(allowedConsume) - - const tokensBudget: TokenStatus = { - tokensAvailable: 500, - maxTokens: 2000, - lastRefillAt: new Date(), - nextRefillAt: new Date(Date.now() + 60000), - } - const searchUnitsDepleted: TokenStatus = { - tokensAvailable: 0, - maxTokens: 100, - lastRefillAt: new Date(), - nextRefillAt: new Date(Date.now() + 30000), - } - mockAdapter.getTokenStatus - .mockResolvedValueOnce(tokensBudget) - .mockResolvedValueOnce(searchUnitsDepleted) - - const result = await rateLimiter.acquireKey( - testProvider, - envKeyPrefix, - multiDimensionConfig, - 'workspace-1' - ) - - expect(result.success).toBe(false) - expect(result.billingActorRateLimited).toBe(true) - expect(result.error).toContain('search_units') - }) - }) - - describe('reportUsage', () => { - const customConfig: CustomRateLimit = { - mode: 'custom', - requestsPerMinute: 5, - dimensions: [ - { - name: 'tokens', - limitPerMinute: 1000, - extractUsage: (_params, response) => (response.tokenCount as number) ?? 0, - }, - ], - } - - it('should consume actual tokens from dimension bucket after execution', async () => { - const consumeResult: ConsumeResult = { - allowed: true, - tokensRemaining: 850, - resetAt: new Date(Date.now() + 60000), - } - mockAdapter.consumeTokens.mockResolvedValue(consumeResult) - - const result = await rateLimiter.reportUsage( - testProvider, - 'workspace-1', - customConfig, - {}, - { tokenCount: 150 } - ) - - expect(result.dimensions).toHaveLength(1) - expect(result.dimensions[0].name).toBe('tokens') - expect(result.dimensions[0].consumed).toBe(150) - expect(result.dimensions[0].allowed).toBe(true) - expect(result.dimensions[0].tokensRemaining).toBe(850) - - expect(mockAdapter.consumeTokens).toHaveBeenCalledWith( - 'hosted:exa:actor:workspace-1:tokens', - 150, - expect.objectContaining({ maxTokens: 2000, refillRate: 1000 }) - ) - }) - - it('should handle overdrawn bucket gracefully (optimistic concurrency)', async () => { - const overdrawnResult: ConsumeResult = { - allowed: false, - tokensRemaining: 0, - resetAt: new Date(Date.now() + 60000), - } - mockAdapter.consumeTokens.mockResolvedValue(overdrawnResult) - - const result = await rateLimiter.reportUsage( - testProvider, - 'workspace-1', - customConfig, - {}, - { tokenCount: 500 } - ) - - expect(result.dimensions[0].allowed).toBe(false) - expect(result.dimensions[0].consumed).toBe(500) - }) - - it('should skip consumption when extractUsage returns 0', async () => { - const result = await rateLimiter.reportUsage( - testProvider, - 'workspace-1', - customConfig, - {}, - { tokenCount: 0 } - ) - - expect(result.dimensions).toHaveLength(1) - expect(result.dimensions[0].consumed).toBe(0) - expect(mockAdapter.consumeTokens).not.toHaveBeenCalled() - }) - - it('should handle multiple dimensions independently', async () => { - const multiConfig: CustomRateLimit = { - mode: 'custom', - requestsPerMinute: 10, - dimensions: [ - { - name: 'tokens', - limitPerMinute: 1000, - extractUsage: (_p, r) => (r.tokenCount as number) ?? 0, - }, - { - name: 'search_units', - limitPerMinute: 50, - extractUsage: (_p, r) => (r.searchUnits as number) ?? 0, - }, - ], - } - - const tokensConsumed: ConsumeResult = { - allowed: true, - tokensRemaining: 800, - resetAt: new Date(Date.now() + 60000), - } - const searchConsumed: ConsumeResult = { - allowed: true, - tokensRemaining: 47, - resetAt: new Date(Date.now() + 60000), - } - mockAdapter.consumeTokens - .mockResolvedValueOnce(tokensConsumed) - .mockResolvedValueOnce(searchConsumed) - - const result = await rateLimiter.reportUsage( - testProvider, - 'workspace-1', - multiConfig, - {}, - { tokenCount: 200, searchUnits: 3 } - ) - - expect(result.dimensions).toHaveLength(2) - expect(result.dimensions[0]).toEqual({ - name: 'tokens', - consumed: 200, - allowed: true, - tokensRemaining: 800, - }) - expect(result.dimensions[1]).toEqual({ - name: 'search_units', - consumed: 3, - allowed: true, - tokensRemaining: 47, - }) - - expect(mockAdapter.consumeTokens).toHaveBeenCalledTimes(2) - }) - - it('should continue with remaining dimensions if extractUsage throws', async () => { - const throwingConfig: CustomRateLimit = { - mode: 'custom', - requestsPerMinute: 10, - dimensions: [ - { - name: 'broken', - limitPerMinute: 100, - extractUsage: () => { - throw new Error('extraction failed') - }, - }, - { - name: 'tokens', - limitPerMinute: 1000, - extractUsage: (_p, r) => (r.tokenCount as number) ?? 0, - }, - ], - } - - const consumeResult: ConsumeResult = { - allowed: true, - tokensRemaining: 900, - resetAt: new Date(Date.now() + 60000), - } - mockAdapter.consumeTokens.mockResolvedValue(consumeResult) - - const result = await rateLimiter.reportUsage( - testProvider, - 'workspace-1', - throwingConfig, - {}, - { tokenCount: 100 } - ) - - expect(result.dimensions).toHaveLength(1) - expect(result.dimensions[0].name).toBe('tokens') - expect(mockAdapter.consumeTokens).toHaveBeenCalledTimes(1) - }) - - it('should handle storage errors gracefully', async () => { - mockAdapter.consumeTokens.mockRejectedValue(new Error('db connection lost')) - - const result = await rateLimiter.reportUsage( - testProvider, - 'workspace-1', - customConfig, - {}, - { tokenCount: 100 } - ) - - expect(result.dimensions).toHaveLength(0) - }) - }) -}) diff --git a/apps/sim/lib/core/rate-limiter/hosted-key/hosted-key-rate-limiter.ts b/apps/sim/lib/core/rate-limiter/hosted-key/hosted-key-rate-limiter.ts deleted file mode 100644 index a20cf8413f3..00000000000 --- a/apps/sim/lib/core/rate-limiter/hosted-key/hosted-key-rate-limiter.ts +++ /dev/null @@ -1,349 +0,0 @@ -import { createLogger } from '@sim/logger' -import { - createStorageAdapter, - type RateLimitStorageAdapter, - type TokenBucketConfig, -} from '@/lib/core/rate-limiter/storage' -import { - type AcquireKeyResult, - type CustomRateLimit, - DEFAULT_BURST_MULTIPLIER, - DEFAULT_WINDOW_MS, - type HostedKeyRateLimitConfig, - type ReportUsageResult, - toTokenBucketConfig, -} from './types' - -const logger = createLogger('HostedKeyRateLimiter') - -/** - * Resolves env var names for a numbered key prefix using a `{PREFIX}_COUNT` env var. - * E.g. with `EXA_API_KEY_COUNT=5`, returns `['EXA_API_KEY_1', ..., 'EXA_API_KEY_5']`. - */ -function resolveEnvKeys(prefix: string): string[] { - const count = Number.parseInt(process.env[`${prefix}_COUNT`] || '0', 10) - const names: string[] = [] - for (let i = 1; i <= count; i++) { - names.push(`${prefix}_${i}`) - } - return names -} - -/** Dimension name for per-billing-actor request rate limiting */ -const ACTOR_REQUESTS_DIMENSION = 'actor_requests' - -/** - * Information about an available hosted key - */ -interface AvailableKey { - key: string - keyIndex: number - envVarName: string -} - -/** - * HostedKeyRateLimiter provides: - * 1. Per-billing-actor rate limiting (enforced - blocks actors who exceed their limit) - * 2. Round-robin key selection (distributes requests evenly across keys) - * 3. Post-execution dimension usage tracking for custom rate limits - * - * The billing actor is typically a workspace ID, meaning rate limits are shared - * across all users within the same workspace. - */ -export class HostedKeyRateLimiter { - private storage: RateLimitStorageAdapter - /** Round-robin counter per provider for even key distribution */ - private roundRobinCounters = new Map() - - constructor(storage?: RateLimitStorageAdapter) { - this.storage = storage ?? createStorageAdapter() - } - - private buildActorStorageKey(provider: string, billingActorId: string): string { - return `hosted:${provider}:actor:${billingActorId}:${ACTOR_REQUESTS_DIMENSION}` - } - - private buildDimensionStorageKey( - provider: string, - billingActorId: string, - dimensionName: string - ): string { - return `hosted:${provider}:actor:${billingActorId}:${dimensionName}` - } - - private getAvailableKeys(envKeys: string[]): AvailableKey[] { - const keys: AvailableKey[] = [] - for (let i = 0; i < envKeys.length; i++) { - const envVarName = envKeys[i] - const key = process.env[envVarName] - if (key) { - keys.push({ key, keyIndex: i, envVarName }) - } - } - return keys - } - - /** - * Build a token bucket config for the per-billing-actor request rate limit. - * Works for both `per_request` and `custom` modes since both define `requestsPerMinute`. - */ - private getActorRateLimitConfig(config: HostedKeyRateLimitConfig): TokenBucketConfig | null { - if (!config.requestsPerMinute) return null - return toTokenBucketConfig( - config.requestsPerMinute, - config.burstMultiplier ?? DEFAULT_BURST_MULTIPLIER, - DEFAULT_WINDOW_MS - ) - } - - /** - * Check and consume billing actor request rate limit. Returns null if allowed, or retry info if blocked. - */ - private async checkActorRateLimit( - provider: string, - billingActorId: string, - config: HostedKeyRateLimitConfig - ): Promise<{ rateLimited: true; retryAfterMs: number } | null> { - const bucketConfig = this.getActorRateLimitConfig(config) - if (!bucketConfig) return null - - const storageKey = this.buildActorStorageKey(provider, billingActorId) - - try { - const result = await this.storage.consumeTokens(storageKey, 1, bucketConfig) - if (!result.allowed) { - const retryAfterMs = Math.max(0, result.resetAt.getTime() - Date.now()) - logger.info(`Billing actor ${billingActorId} rate limited for ${provider}`, { - provider, - billingActorId, - retryAfterMs, - tokensRemaining: result.tokensRemaining, - }) - return { rateLimited: true, retryAfterMs } - } - return null - } catch (error) { - logger.error(`Error checking billing actor rate limit for ${provider}`, { - error, - billingActorId, - }) - return null - } - } - - /** - * Pre-check that the billing actor has available budget in all custom dimensions. - * Does NOT consume tokens -- just verifies the actor isn't already depleted. - * Returns retry info for the most restrictive exhausted dimension, or null if all pass. - */ - private async preCheckDimensions( - provider: string, - billingActorId: string, - config: CustomRateLimit - ): Promise<{ rateLimited: true; retryAfterMs: number; dimension: string } | null> { - for (const dimension of config.dimensions) { - const storageKey = this.buildDimensionStorageKey(provider, billingActorId, dimension.name) - const bucketConfig = toTokenBucketConfig( - dimension.limitPerMinute, - dimension.burstMultiplier ?? DEFAULT_BURST_MULTIPLIER, - DEFAULT_WINDOW_MS - ) - - try { - const status = await this.storage.getTokenStatus(storageKey, bucketConfig) - if (status.tokensAvailable < 1) { - const retryAfterMs = Math.max(0, status.nextRefillAt.getTime() - Date.now()) - logger.info( - `Billing actor ${billingActorId} exhausted dimension ${dimension.name} for ${provider}`, - { - provider, - billingActorId, - dimension: dimension.name, - tokensAvailable: status.tokensAvailable, - retryAfterMs, - } - ) - return { rateLimited: true, retryAfterMs, dimension: dimension.name } - } - } catch (error) { - logger.error(`Error pre-checking dimension ${dimension.name} for ${provider}`, { - error, - billingActorId, - }) - } - } - return null - } - - /** - * Acquire an available key via round-robin selection. - * - * For both modes: - * 1. Per-billing-actor request rate limiting (enforced): blocks actors who exceed their request limit - * 2. Round-robin key selection: cycles through available keys for even distribution - * - * For `custom` mode additionally: - * 3. Pre-checks dimension budgets: blocks if any dimension is already depleted - * - * @param envKeyPrefix - Env var prefix (e.g. 'EXA_API_KEY'). Keys resolved via `{prefix}_COUNT`. - * @param billingActorId - The billing actor (typically workspace ID) to rate limit against - */ - async acquireKey( - provider: string, - envKeyPrefix: string, - config: HostedKeyRateLimitConfig, - billingActorId: string - ): Promise { - if (config.requestsPerMinute) { - const rateLimitResult = await this.checkActorRateLimit(provider, billingActorId, config) - if (rateLimitResult) { - return { - success: false, - billingActorRateLimited: true, - retryAfterMs: rateLimitResult.retryAfterMs, - error: `Rate limit exceeded. Please wait ${Math.ceil(rateLimitResult.retryAfterMs / 1000)} seconds. If you're getting throttled frequently, consider adding your own API key under Settings > BYOK to avoid shared rate limits.`, - } - } - } - - if (config.mode === 'custom' && config.dimensions.length > 0) { - const dimensionResult = await this.preCheckDimensions(provider, billingActorId, config) - if (dimensionResult) { - return { - success: false, - billingActorRateLimited: true, - retryAfterMs: dimensionResult.retryAfterMs, - error: `Rate limit exceeded for ${dimensionResult.dimension}. Please wait ${Math.ceil(dimensionResult.retryAfterMs / 1000)} seconds. If you're getting throttled frequently, consider adding your own API key under Settings > BYOK to avoid shared rate limits.`, - } - } - } - - const envKeys = resolveEnvKeys(envKeyPrefix) - const availableKeys = this.getAvailableKeys(envKeys) - - if (availableKeys.length === 0) { - logger.warn(`No hosted keys configured for provider ${provider}`) - return { - success: false, - error: `No hosted keys configured for ${provider}`, - } - } - - const counter = this.roundRobinCounters.get(provider) ?? 0 - const selected = availableKeys[counter % availableKeys.length] - this.roundRobinCounters.set(provider, counter + 1) - - logger.debug(`Selected hosted key for ${provider}`, { - provider, - keyIndex: selected.keyIndex, - envVarName: selected.envVarName, - }) - - return { - success: true, - key: selected.key, - keyIndex: selected.keyIndex, - envVarName: selected.envVarName, - } - } - - /** - * Report actual usage after successful tool execution (custom mode only). - * Calls `extractUsage` on each dimension and consumes the actual token count. - * This is the "post-execution" phase of the optimistic two-phase approach. - */ - async reportUsage( - provider: string, - billingActorId: string, - config: CustomRateLimit, - params: Record, - response: Record - ): Promise { - const results: ReportUsageResult['dimensions'] = [] - - for (const dimension of config.dimensions) { - let usage: number - try { - usage = dimension.extractUsage(params, response) - } catch (error) { - logger.error(`Failed to extract usage for dimension ${dimension.name}`, { - provider, - billingActorId, - error, - }) - continue - } - - if (usage <= 0) { - results.push({ - name: dimension.name, - consumed: 0, - allowed: true, - tokensRemaining: 0, - }) - continue - } - - const storageKey = this.buildDimensionStorageKey(provider, billingActorId, dimension.name) - const bucketConfig = toTokenBucketConfig( - dimension.limitPerMinute, - dimension.burstMultiplier ?? DEFAULT_BURST_MULTIPLIER, - DEFAULT_WINDOW_MS - ) - - try { - const consumeResult = await this.storage.consumeTokens(storageKey, usage, bucketConfig) - - results.push({ - name: dimension.name, - consumed: usage, - allowed: consumeResult.allowed, - tokensRemaining: consumeResult.tokensRemaining, - }) - - if (!consumeResult.allowed) { - logger.warn( - `Dimension ${dimension.name} overdrawn for ${provider} (optimistic concurrency)`, - { provider, billingActorId, usage, tokensRemaining: consumeResult.tokensRemaining } - ) - } - - logger.debug(`Consumed ${usage} from dimension ${dimension.name} for ${provider}`, { - provider, - billingActorId, - usage, - allowed: consumeResult.allowed, - tokensRemaining: consumeResult.tokensRemaining, - }) - } catch (error) { - logger.error(`Failed to consume tokens for dimension ${dimension.name}`, { - provider, - billingActorId, - usage, - error, - }) - } - } - - return { dimensions: results } - } -} - -let cachedInstance: HostedKeyRateLimiter | null = null - -/** - * Get the singleton HostedKeyRateLimiter instance - */ -export function getHostedKeyRateLimiter(): HostedKeyRateLimiter { - if (!cachedInstance) { - cachedInstance = new HostedKeyRateLimiter() - } - return cachedInstance -} - -/** - * Reset the cached rate limiter (for testing) - */ -export function resetHostedKeyRateLimiter(): void { - cachedInstance = null -} diff --git a/apps/sim/lib/core/rate-limiter/hosted-key/index.ts b/apps/sim/lib/core/rate-limiter/hosted-key/index.ts deleted file mode 100644 index 8454618b9e6..00000000000 --- a/apps/sim/lib/core/rate-limiter/hosted-key/index.ts +++ /dev/null @@ -1,17 +0,0 @@ -export { - getHostedKeyRateLimiter, - HostedKeyRateLimiter, - resetHostedKeyRateLimiter, -} from './hosted-key-rate-limiter' -export { - type AcquireKeyResult, - type CustomRateLimit, - DEFAULT_BURST_MULTIPLIER, - DEFAULT_WINDOW_MS, - type HostedKeyRateLimitConfig, - type HostedKeyRateLimitMode, - type PerRequestRateLimit, - type RateLimitDimension, - type ReportUsageResult, - toTokenBucketConfig, -} from './types' diff --git a/apps/sim/lib/core/rate-limiter/hosted-key/types.ts b/apps/sim/lib/core/rate-limiter/hosted-key/types.ts deleted file mode 100644 index 65d2bb33877..00000000000 --- a/apps/sim/lib/core/rate-limiter/hosted-key/types.ts +++ /dev/null @@ -1,108 +0,0 @@ -import type { TokenBucketConfig } from '@/lib/core/rate-limiter/storage' - -export type HostedKeyRateLimitMode = 'per_request' | 'custom' - -/** - * Simple per-request rate limit configuration. - * Enforces per-billing-actor rate limiting and distributes requests across keys. - */ -export interface PerRequestRateLimit { - mode: 'per_request' - /** Maximum requests per minute per billing actor (enforced - blocks if exceeded) */ - requestsPerMinute: number - /** Burst multiplier for token bucket max capacity. Default: 2 */ - burstMultiplier?: number -} - -/** - * Custom rate limit with multiple dimensions (e.g., tokens, search units). - * Allows tracking different usage metrics independently. - */ -export interface CustomRateLimit { - mode: 'custom' - /** Maximum requests per minute per billing actor (enforced - blocks if exceeded) */ - requestsPerMinute: number - /** Multiple dimensions to track */ - dimensions: RateLimitDimension[] - /** Burst multiplier for token bucket max capacity. Default: 2 */ - burstMultiplier?: number -} - -/** - * A single dimension for custom rate limiting. - * Each dimension has its own token bucket. - */ -export interface RateLimitDimension { - /** Dimension name (e.g., 'tokens', 'search_units') - used in storage key */ - name: string - /** Limit per minute for this dimension */ - limitPerMinute: number - /** Burst multiplier for token bucket max capacity. Default: 2 */ - burstMultiplier?: number - /** - * Extract usage amount from request params and response. - * Called after successful execution to consume the actual usage. - */ - extractUsage: (params: Record, response: Record) => number -} - -/** Union of all hosted key rate limit configuration types */ -export type HostedKeyRateLimitConfig = PerRequestRateLimit | CustomRateLimit - -/** - * Result from acquiring a key from the hosted key rate limiter - */ -export interface AcquireKeyResult { - /** Whether a key was successfully acquired */ - success: boolean - /** The API key value (if success=true) */ - key?: string - /** Index of the key in the envKeys array */ - keyIndex?: number - /** Environment variable name of the selected key */ - envVarName?: string - /** Error message if no key available */ - error?: string - /** Whether the billing actor was rate limited (exceeded their limit) */ - billingActorRateLimited?: boolean - /** Milliseconds until the billing actor's rate limit resets (if billingActorRateLimited=true) */ - retryAfterMs?: number -} - -/** - * Result from reporting post-execution usage for custom dimensions - */ -export interface ReportUsageResult { - /** Per-dimension consumption results */ - dimensions: { - name: string - consumed: number - allowed: boolean - tokensRemaining: number - }[] -} - -/** - * Convert rate limit config to token bucket config for a dimension - */ -export function toTokenBucketConfig( - limitPerMinute: number, - burstMultiplier = 2, - windowMs = 60000 -): TokenBucketConfig { - return { - maxTokens: limitPerMinute * burstMultiplier, - refillRate: limitPerMinute, - refillIntervalMs: windowMs, - } -} - -/** - * Default rate limit window in milliseconds (1 minute) - */ -export const DEFAULT_WINDOW_MS = 60000 - -/** - * Default burst multiplier - */ -export const DEFAULT_BURST_MULTIPLIER = 2 diff --git a/apps/sim/lib/core/rate-limiter/index.ts b/apps/sim/lib/core/rate-limiter/index.ts index b690f720114..e5a0081c71f 100644 --- a/apps/sim/lib/core/rate-limiter/index.ts +++ b/apps/sim/lib/core/rate-limiter/index.ts @@ -1,18 +1,3 @@ -export { - type AcquireKeyResult, - type CustomRateLimit, - DEFAULT_BURST_MULTIPLIER, - DEFAULT_WINDOW_MS, - getHostedKeyRateLimiter, - type HostedKeyRateLimitConfig, - HostedKeyRateLimiter, - type HostedKeyRateLimitMode, - type PerRequestRateLimit, - type RateLimitDimension, - type ReportUsageResult, - resetHostedKeyRateLimiter, - toTokenBucketConfig, -} from './hosted-key' export type { RateLimitResult, RateLimitStatus } from './rate-limiter' export { RateLimiter } from './rate-limiter' export type { RateLimitStorageAdapter, TokenBucketConfig } from './storage' diff --git a/apps/sim/lib/core/rate-limiter/storage/db-token-bucket.ts b/apps/sim/lib/core/rate-limiter/storage/db-token-bucket.ts index 7f756fbc902..cdfb8b414c3 100644 --- a/apps/sim/lib/core/rate-limiter/storage/db-token-bucket.ts +++ b/apps/sim/lib/core/rate-limiter/storage/db-token-bucket.ts @@ -51,7 +51,7 @@ export class DbTokenBucket implements RateLimitStorageAdapter { ) * ${config.refillRate} )::numeric ) - ${requestedTokens}::numeric - ELSE -1 + ELSE ${rateLimitBucket.tokens}::numeric END `, lastRefillAt: sql` diff --git a/apps/sim/lib/core/telemetry.ts b/apps/sim/lib/core/telemetry.ts index 8d1a08279db..c12fe1303a4 100644 --- a/apps/sim/lib/core/telemetry.ts +++ b/apps/sim/lib/core/telemetry.ts @@ -934,55 +934,6 @@ export const PlatformEvents = { }) }, - /** - * Track when a rate limit error is surfaced to the end user (not retried/absorbed). - * Fires for both billing-actor limits and exhausted upstream retries. - */ - userThrottled: (attrs: { - toolId: string - reason: 'billing_actor_limit' | 'upstream_retries_exhausted' - provider?: string - retryAfterMs?: number - userId?: string - workspaceId?: string - workflowId?: string - }) => { - trackPlatformEvent('platform.user.throttled', { - 'tool.id': attrs.toolId, - 'throttle.reason': attrs.reason, - ...(attrs.provider && { 'provider.id': attrs.provider }), - ...(attrs.retryAfterMs != null && { 'rate_limit.retry_after_ms': attrs.retryAfterMs }), - ...(attrs.userId && { 'user.id': attrs.userId }), - ...(attrs.workspaceId && { 'workspace.id': attrs.workspaceId }), - ...(attrs.workflowId && { 'workflow.id': attrs.workflowId }), - }) - }, - - /** - * Track hosted key rate limited by upstream provider (429 from the external API) - */ - hostedKeyRateLimited: (attrs: { - toolId: string - envVarName: string - attempt: number - maxRetries: number - delayMs: number - userId?: string - workspaceId?: string - workflowId?: string - }) => { - trackPlatformEvent('platform.hosted_key.rate_limited', { - 'tool.id': attrs.toolId, - 'hosted_key.env_var': attrs.envVarName, - 'rate_limit.attempt': attrs.attempt, - 'rate_limit.max_retries': attrs.maxRetries, - 'rate_limit.delay_ms': attrs.delayMs, - ...(attrs.userId && { 'user.id': attrs.userId }), - ...(attrs.workspaceId && { 'workspace.id': attrs.workspaceId }), - ...(attrs.workflowId && { 'workflow.id': attrs.workflowId }), - }) - }, - /** * Track chat deployed (workflow deployed as chat interface) */ diff --git a/apps/sim/lib/logs/execution/logger.ts b/apps/sim/lib/logs/execution/logger.ts index 7f0beca31d9..c9e2fb8d65d 100644 --- a/apps/sim/lib/logs/execution/logger.ts +++ b/apps/sim/lib/logs/execution/logger.ts @@ -181,7 +181,6 @@ export class ExecutionLogger implements IExecutionLoggerService { input: number output: number total: number - toolCost?: number tokens: { input: number; output: number; total: number } } > @@ -508,7 +507,6 @@ export class ExecutionLogger implements IExecutionLoggerService { input: number output: number total: number - toolCost?: number tokens: { input: number; output: number; total: number } } > diff --git a/apps/sim/lib/logs/execution/logging-factory.ts b/apps/sim/lib/logs/execution/logging-factory.ts index 98c4424613d..be7e2d5fc5a 100644 --- a/apps/sim/lib/logs/execution/logging-factory.ts +++ b/apps/sim/lib/logs/execution/logging-factory.ts @@ -95,7 +95,6 @@ export function calculateCostSummary(traceSpans: any[]): { input: number output: number total: number - toolCost?: number tokens: { input: number; output: number; total: number } } > @@ -144,7 +143,6 @@ export function calculateCostSummary(traceSpans: any[]): { input: number output: number total: number - toolCost?: number tokens: { input: number; output: number; total: number } } > = {} @@ -173,10 +171,6 @@ export function calculateCostSummary(traceSpans: any[]): { models[model].tokens.input += span.tokens?.input ?? span.tokens?.prompt ?? 0 models[model].tokens.output += span.tokens?.output ?? span.tokens?.completion ?? 0 models[model].tokens.total += span.tokens?.total || 0 - - if (span.cost.toolCost) { - models[model].toolCost = (models[model].toolCost || 0) + span.cost.toolCost - } } } diff --git a/apps/sim/lib/workflows/subblocks/visibility.ts b/apps/sim/lib/workflows/subblocks/visibility.ts index 44cddf1224d..aab03ca5dba 100644 --- a/apps/sim/lib/workflows/subblocks/visibility.ts +++ b/apps/sim/lib/workflows/subblocks/visibility.ts @@ -1,5 +1,4 @@ import { getEnv, isTruthy } from '@/lib/core/config/env' -import { isHosted } from '@/lib/core/config/feature-flags' import type { SubBlockConfig } from '@/blocks/types' export type CanonicalMode = 'basic' | 'advanced' @@ -288,12 +287,3 @@ export function isSubBlockFeatureEnabled(subBlock: SubBlockConfig): boolean { if (!subBlock.requiresFeature) return true return isTruthy(getEnv(subBlock.requiresFeature)) } - -/** - * Check if a subblock should be hidden because we're running on hosted Sim. - * Used for tool API key fields that should be hidden when Sim provides hosted keys. - */ -export function isSubBlockHiddenByHostedKey(subBlock: SubBlockConfig): boolean { - if (!subBlock.hideWhenHosted) return false - return isHosted -} diff --git a/apps/sim/providers/anthropic/core.ts b/apps/sim/providers/anthropic/core.ts index 240e7458dd4..dbe5df92221 100644 --- a/apps/sim/providers/anthropic/core.ts +++ b/apps/sim/providers/anthropic/core.ts @@ -19,7 +19,6 @@ import { calculateCost, prepareToolExecution, prepareToolsWithUsageControl, - sumToolCosts, } from '@/providers/utils' import { executeTool } from '@/tools' @@ -491,7 +490,7 @@ export async function executeAnthropicProviderRequest( } const toolCalls = [] - const toolResults: Record[] = [] + const toolResults = [] const currentMessages = [...messages] let iterationCount = 0 let hasUsedForcedTool = false @@ -610,7 +609,7 @@ export async function executeAnthropicProviderRequest( }) let resultContent: unknown - if (result.success && result.output) { + if (result.success) { toolResults.push(result.output) resultContent = result.output } else { @@ -784,12 +783,10 @@ export async function executeAnthropicProviderRequest( } const streamCost = calculateCost(request.model, usage.input_tokens, usage.output_tokens) - const tc = sumToolCosts(toolResults) streamingResult.execution.output.cost = { input: accumulatedCost.input + streamCost.input, output: accumulatedCost.output + streamCost.output, - toolCost: tc || undefined, - total: accumulatedCost.total + streamCost.total + tc, + total: accumulatedCost.total + streamCost.total, } const streamEndTime = Date.now() @@ -832,7 +829,6 @@ export async function executeAnthropicProviderRequest( cost: { input: accumulatedCost.input, output: accumulatedCost.output, - toolCost: undefined as number | undefined, total: accumulatedCost.total, }, }, @@ -905,7 +901,7 @@ export async function executeAnthropicProviderRequest( } const toolCalls = [] - const toolResults: Record[] = [] + const toolResults = [] const currentMessages = [...messages] let iterationCount = 0 let hasUsedForcedTool = false @@ -1026,7 +1022,7 @@ export async function executeAnthropicProviderRequest( }) let resultContent: unknown - if (result.success && result.output) { + if (result.success) { toolResults.push(result.output) resultContent = result.output } else { @@ -1212,12 +1208,10 @@ export async function executeAnthropicProviderRequest( } const streamCost = calculateCost(request.model, usage.input_tokens, usage.output_tokens) - const tc2 = sumToolCosts(toolResults) streamingResult.execution.output.cost = { input: cost.input + streamCost.input, output: cost.output + streamCost.output, - toolCost: tc2 || undefined, - total: cost.total + streamCost.total + tc2, + total: cost.total + streamCost.total, } const streamEndTime = Date.now() @@ -1260,7 +1254,6 @@ export async function executeAnthropicProviderRequest( cost: { input: cost.input, output: cost.output, - toolCost: undefined as number | undefined, total: cost.total, }, }, diff --git a/apps/sim/providers/azure-openai/index.ts b/apps/sim/providers/azure-openai/index.ts index 930c31035a3..b171ba9f1a2 100644 --- a/apps/sim/providers/azure-openai/index.ts +++ b/apps/sim/providers/azure-openai/index.ts @@ -35,7 +35,6 @@ import { calculateCost, prepareToolExecution, prepareToolsWithUsageControl, - sumToolCosts, } from '@/providers/utils' import { executeTool } from '@/tools' @@ -500,12 +499,10 @@ async function executeChatCompletionsRequest( usage.prompt_tokens, usage.completion_tokens ) - const tc = sumToolCosts(toolResults) streamingResult.execution.output.cost = { input: accumulatedCost.input + streamCost.input, output: accumulatedCost.output + streamCost.output, - toolCost: tc || undefined, - total: accumulatedCost.total + streamCost.total + tc, + total: accumulatedCost.total + streamCost.total, } const streamEndTime = Date.now() diff --git a/apps/sim/providers/bedrock/index.ts b/apps/sim/providers/bedrock/index.ts index ec0af6ab04b..ab7866a5440 100644 --- a/apps/sim/providers/bedrock/index.ts +++ b/apps/sim/providers/bedrock/index.ts @@ -33,7 +33,6 @@ import { calculateCost, prepareToolExecution, prepareToolsWithUsageControl, - sumToolCosts, } from '@/providers/utils' import { executeTool } from '@/tools' @@ -816,12 +815,10 @@ export const bedrockProvider: ProviderConfig = { } const streamCost = calculateCost(request.model, usage.inputTokens, usage.outputTokens) - const tc = sumToolCosts(toolResults) streamingResult.execution.output.cost = { input: cost.input + streamCost.input, output: cost.output + streamCost.output, - toolCost: tc || undefined, - total: cost.total + streamCost.total + tc, + total: cost.total + streamCost.total, } const streamEndTime = Date.now() @@ -864,7 +861,6 @@ export const bedrockProvider: ProviderConfig = { cost: { input: cost.input, output: cost.output, - toolCost: undefined as number | undefined, total: cost.total, }, }, diff --git a/apps/sim/providers/cerebras/index.ts b/apps/sim/providers/cerebras/index.ts index 9ef64836030..85ce7a2445e 100644 --- a/apps/sim/providers/cerebras/index.ts +++ b/apps/sim/providers/cerebras/index.ts @@ -16,7 +16,6 @@ import { calculateCost, prepareToolExecution, prepareToolsWithUsageControl, - sumToolCosts, trackForcedToolUsage, } from '@/providers/utils' import { executeTool } from '@/tools' @@ -196,7 +195,7 @@ export const cerebrasProvider: ProviderConfig = { total: currentResponse.usage?.total_tokens || 0, } const toolCalls = [] - const toolResults: Record[] = [] + const toolResults = [] const currentMessages = [...allMessages] let iterationCount = 0 @@ -314,7 +313,7 @@ export const cerebrasProvider: ProviderConfig = { duration: duration, }) let resultContent: any - if (result.success && result.output) { + if (result.success) { toolResults.push(result.output) resultContent = result.output } else { @@ -473,12 +472,10 @@ export const cerebrasProvider: ProviderConfig = { usage.prompt_tokens, usage.completion_tokens ) - const tc = sumToolCosts(toolResults) streamingResult.execution.output.cost = { input: accumulatedCost.input + streamCost.input, output: accumulatedCost.output + streamCost.output, - toolCost: tc || undefined, - total: accumulatedCost.total + streamCost.total + tc, + total: accumulatedCost.total + streamCost.total, } }), execution: { @@ -511,7 +508,6 @@ export const cerebrasProvider: ProviderConfig = { cost: { input: accumulatedCost.input, output: accumulatedCost.output, - toolCost: undefined as number | undefined, total: accumulatedCost.total, }, }, diff --git a/apps/sim/providers/deepseek/index.ts b/apps/sim/providers/deepseek/index.ts index 692fb270591..f537e5e89cf 100644 --- a/apps/sim/providers/deepseek/index.ts +++ b/apps/sim/providers/deepseek/index.ts @@ -15,7 +15,6 @@ import { calculateCost, prepareToolExecution, prepareToolsWithUsageControl, - sumToolCosts, trackForcedToolUsage, } from '@/providers/utils' import { executeTool } from '@/tools' @@ -206,7 +205,7 @@ export const deepseekProvider: ProviderConfig = { total: currentResponse.usage?.total_tokens || 0, } const toolCalls = [] - const toolResults: Record[] = [] + const toolResults = [] const currentMessages = [...allMessages] let iterationCount = 0 let hasUsedForcedTool = false @@ -326,7 +325,7 @@ export const deepseekProvider: ProviderConfig = { }) let resultContent: any - if (result.success && result.output) { + if (result.success) { toolResults.push(result.output) resultContent = result.output } else { @@ -472,12 +471,10 @@ export const deepseekProvider: ProviderConfig = { usage.prompt_tokens, usage.completion_tokens ) - const tc = sumToolCosts(toolResults) streamingResult.execution.output.cost = { input: accumulatedCost.input + streamCost.input, output: accumulatedCost.output + streamCost.output, - toolCost: tc || undefined, - total: accumulatedCost.total + streamCost.total + tc, + total: accumulatedCost.total + streamCost.total, } } ), @@ -511,7 +508,6 @@ export const deepseekProvider: ProviderConfig = { cost: { input: accumulatedCost.input, output: accumulatedCost.output, - toolCost: undefined as number | undefined, total: accumulatedCost.total, }, }, diff --git a/apps/sim/providers/gemini/core.ts b/apps/sim/providers/gemini/core.ts index c35f38dc782..55855b334d7 100644 --- a/apps/sim/providers/gemini/core.ts +++ b/apps/sim/providers/gemini/core.ts @@ -31,7 +31,6 @@ import { isDeepResearchModel, prepareToolExecution, prepareToolsWithUsageControl, - sumToolCosts, } from '@/providers/utils' import { executeTool } from '@/tools' import type { ExecutionState, GeminiProviderType, GeminiUsage } from './types' @@ -1164,12 +1163,10 @@ export async function executeGeminiRequest( usage.promptTokenCount, usage.candidatesTokenCount ) - const tc = sumToolCosts(state.toolResults) streamingResult.execution.output.cost = { input: accumulatedCost.input + streamCost.input, output: accumulatedCost.output + streamCost.output, - toolCost: tc || undefined, - total: accumulatedCost.total + streamCost.total + tc, + total: accumulatedCost.total + streamCost.total, pricing: streamCost.pricing, } diff --git a/apps/sim/providers/groq/index.ts b/apps/sim/providers/groq/index.ts index 8e1ecbabf94..756082b45f6 100644 --- a/apps/sim/providers/groq/index.ts +++ b/apps/sim/providers/groq/index.ts @@ -15,7 +15,6 @@ import { calculateCost, prepareToolExecution, prepareToolsWithUsageControl, - sumToolCosts, trackForcedToolUsage, } from '@/providers/utils' import { executeTool } from '@/tools' @@ -202,7 +201,7 @@ export const groqProvider: ProviderConfig = { total: currentResponse.usage?.total_tokens || 0, } const toolCalls = [] - const toolResults: Record[] = [] + const toolResults = [] const currentMessages = [...allMessages] let iterationCount = 0 let modelTime = firstResponseTime @@ -304,7 +303,7 @@ export const groqProvider: ProviderConfig = { }) let resultContent: any - if (result.success && result.output) { + if (result.success) { toolResults.push(result.output) resultContent = result.output } else { @@ -427,12 +426,10 @@ export const groqProvider: ProviderConfig = { usage.prompt_tokens, usage.completion_tokens ) - const tc = sumToolCosts(toolResults) streamingResult.execution.output.cost = { input: accumulatedCost.input + streamCost.input, output: accumulatedCost.output + streamCost.output, - toolCost: tc || undefined, - total: accumulatedCost.total + streamCost.total + tc, + total: accumulatedCost.total + streamCost.total, } }), execution: { @@ -465,7 +462,6 @@ export const groqProvider: ProviderConfig = { cost: { input: accumulatedCost.input, output: accumulatedCost.output, - toolCost: undefined as number | undefined, total: accumulatedCost.total, }, }, diff --git a/apps/sim/providers/index.ts b/apps/sim/providers/index.ts index a6f03e721f6..d99db8a6a4a 100644 --- a/apps/sim/providers/index.ts +++ b/apps/sim/providers/index.ts @@ -8,7 +8,6 @@ import { calculateCost, generateStructuredOutputInstructions, shouldBillModelUsage, - sumToolCosts, supportsReasoningEffort, supportsTemperature, supportsThinking, @@ -163,11 +162,5 @@ export async function executeProviderRequest( } } - const toolCost = sumToolCosts(response.toolResults) - if (toolCost > 0 && response.cost) { - response.cost.toolCost = toolCost - response.cost.total += toolCost - } - return response } diff --git a/apps/sim/providers/mistral/index.ts b/apps/sim/providers/mistral/index.ts index a332ae7b400..693885fe289 100644 --- a/apps/sim/providers/mistral/index.ts +++ b/apps/sim/providers/mistral/index.ts @@ -16,7 +16,6 @@ import { calculateCost, prepareToolExecution, prepareToolsWithUsageControl, - sumToolCosts, trackForcedToolUsage, } from '@/providers/utils' import { executeTool } from '@/tools' @@ -259,7 +258,7 @@ export const mistralProvider: ProviderConfig = { total: currentResponse.usage?.total_tokens || 0, } const toolCalls = [] - const toolResults: Record[] = [] + const toolResults = [] const currentMessages = [...allMessages] let iterationCount = 0 @@ -367,7 +366,7 @@ export const mistralProvider: ProviderConfig = { }) let resultContent: any - if (result.success && result.output) { + if (result.success) { toolResults.push(result.output) resultContent = result.output } else { @@ -483,12 +482,10 @@ export const mistralProvider: ProviderConfig = { usage.prompt_tokens, usage.completion_tokens ) - const tc = sumToolCosts(toolResults) streamingResult.execution.output.cost = { input: accumulatedCost.input + streamCost.input, output: accumulatedCost.output + streamCost.output, - toolCost: tc || undefined, - total: accumulatedCost.total + streamCost.total + tc, + total: accumulatedCost.total + streamCost.total, } }), execution: { diff --git a/apps/sim/providers/ollama/index.ts b/apps/sim/providers/ollama/index.ts index b6cb8dd3234..1495f7aace1 100644 --- a/apps/sim/providers/ollama/index.ts +++ b/apps/sim/providers/ollama/index.ts @@ -13,7 +13,7 @@ import type { TimeSegment, } from '@/providers/types' import { ProviderError } from '@/providers/types' -import { calculateCost, prepareToolExecution, sumToolCosts } from '@/providers/utils' +import { calculateCost, prepareToolExecution } from '@/providers/utils' import { useProvidersStore } from '@/stores/providers' import { executeTool } from '@/tools' @@ -271,7 +271,7 @@ export const ollamaProvider: ProviderConfig = { total: currentResponse.usage?.total_tokens || 0, } const toolCalls = [] - const toolResults: Record[] = [] + const toolResults = [] const currentMessages = [...allMessages] let iterationCount = 0 @@ -377,7 +377,7 @@ export const ollamaProvider: ProviderConfig = { }) let resultContent: any - if (result.success && result.output) { + if (result.success) { toolResults.push(result.output) resultContent = result.output } else { @@ -486,12 +486,10 @@ export const ollamaProvider: ProviderConfig = { usage.prompt_tokens, usage.completion_tokens ) - const tc = sumToolCosts(toolResults) streamingResult.execution.output.cost = { input: accumulatedCost.input + streamCost.input, output: accumulatedCost.output + streamCost.output, - toolCost: tc || undefined, - total: accumulatedCost.total + streamCost.total + tc, + total: accumulatedCost.total + streamCost.total, } }), execution: { diff --git a/apps/sim/providers/openai/core.ts b/apps/sim/providers/openai/core.ts index 312ac025ba9..139e12eaa3d 100644 --- a/apps/sim/providers/openai/core.ts +++ b/apps/sim/providers/openai/core.ts @@ -8,7 +8,6 @@ import { calculateCost, prepareToolExecution, prepareToolsWithUsageControl, - sumToolCosts, trackForcedToolUsage, } from '@/providers/utils' import { executeTool } from '@/tools' @@ -406,7 +405,7 @@ export async function executeResponsesProviderRequest( } const toolCalls = [] - const toolResults: Record[] = [] + const toolResults = [] let iterationCount = 0 let modelTime = firstResponseTime let toolsTime = 0 @@ -513,7 +512,7 @@ export async function executeResponsesProviderRequest( }) let resultContent: Record - if (result.success && result.output) { + if (result.success) { toolResults.push(result.output) resultContent = result.output as Record } else { @@ -729,12 +728,10 @@ export async function executeResponsesProviderRequest( usage?.promptTokens || 0, usage?.completionTokens || 0 ) - const tc = sumToolCosts(toolResults) streamingResult.execution.output.cost = { input: accumulatedCost.input + streamCost.input, output: accumulatedCost.output + streamCost.output, - toolCost: tc || undefined, - total: accumulatedCost.total + streamCost.total + tc, + total: accumulatedCost.total + streamCost.total, } }), execution: { diff --git a/apps/sim/providers/openrouter/index.ts b/apps/sim/providers/openrouter/index.ts index 7b01fa5784a..2951d56ae78 100644 --- a/apps/sim/providers/openrouter/index.ts +++ b/apps/sim/providers/openrouter/index.ts @@ -23,7 +23,6 @@ import { generateSchemaInstructions, prepareToolExecution, prepareToolsWithUsageControl, - sumToolCosts, } from '@/providers/utils' import { executeTool } from '@/tools' @@ -479,12 +478,10 @@ export const openRouterProvider: ProviderConfig = { usage.prompt_tokens, usage.completion_tokens ) - const tc = sumToolCosts(toolResults) streamingResult.execution.output.cost = { input: accumulatedCost.input + streamCost.input, output: accumulatedCost.output + streamCost.output, - toolCost: tc || undefined, - total: accumulatedCost.total + streamCost.total + tc, + total: accumulatedCost.total + streamCost.total, } }), execution: { diff --git a/apps/sim/providers/types.ts b/apps/sim/providers/types.ts index 9dd78eb643d..af5362c3c75 100644 --- a/apps/sim/providers/types.ts +++ b/apps/sim/providers/types.ts @@ -79,7 +79,7 @@ export interface ProviderResponse { total?: number } toolCalls?: FunctionCallResponse[] - toolResults?: Record[] + toolResults?: any[] timing?: { startTime: string endTime: string @@ -93,7 +93,6 @@ export interface ProviderResponse { cost?: { input: number output: number - toolCost?: number total: number pricing: ModelPricing } diff --git a/apps/sim/providers/utils.test.ts b/apps/sim/providers/utils.test.ts index 031c64f1a7c..5276c4e72ca 100644 --- a/apps/sim/providers/utils.test.ts +++ b/apps/sim/providers/utils.test.ts @@ -1405,7 +1405,6 @@ describe('prepareToolExecution', () => { workspaceId: 'ws-456', chatId: 'chat-789', userId: 'user-abc', - skipFixedUsageLog: true, }) }) diff --git a/apps/sim/providers/utils.ts b/apps/sim/providers/utils.ts index 7c7cf253117..82ca33e1070 100644 --- a/apps/sim/providers/utils.ts +++ b/apps/sim/providers/utils.ts @@ -650,20 +650,6 @@ export function calculateCost( } } -/** - * Sums the `cost.total` from each tool result returned during a provider tool loop. - * Tool results may carry a `cost` object injected by `applyHostedKeyCostToResult`. - */ -export function sumToolCosts(toolResults?: Record[]): number { - if (!toolResults?.length) return 0 - let total = 0 - for (const tr of toolResults) { - const cost = tr?.cost as Record | undefined - if (cost?.total && typeof cost.total === 'number') total += cost.total - } - return total -} - export function getModelPricing(modelId: string): any { const embeddingPricing = getEmbeddingModelPricing(modelId) if (embeddingPricing) { @@ -1154,7 +1140,6 @@ export function prepareToolExecution( ? { isDeployedContext: request.isDeployedContext } : {}), ...(request.callChain ? { callChain: request.callChain } : {}), - skipFixedUsageLog: true, }, } : {}), diff --git a/apps/sim/providers/vllm/index.ts b/apps/sim/providers/vllm/index.ts index e4f0a4c93e8..db2f3b16597 100644 --- a/apps/sim/providers/vllm/index.ts +++ b/apps/sim/providers/vllm/index.ts @@ -17,7 +17,6 @@ import { calculateCost, prepareToolExecution, prepareToolsWithUsageControl, - sumToolCosts, trackForcedToolUsage, } from '@/providers/utils' import { createReadableStreamFromVLLMStream } from '@/providers/vllm/utils' @@ -316,7 +315,7 @@ export const vllmProvider: ProviderConfig = { total: currentResponse.usage?.total_tokens || 0, } const toolCalls = [] - const toolResults: Record[] = [] + const toolResults = [] const currentMessages = [...allMessages] let iterationCount = 0 @@ -429,7 +428,7 @@ export const vllmProvider: ProviderConfig = { }) let resultContent: any - if (result.success && result.output) { + if (result.success) { toolResults.push(result.output) resultContent = result.output } else { @@ -554,12 +553,10 @@ export const vllmProvider: ProviderConfig = { usage.prompt_tokens, usage.completion_tokens ) - const tc = sumToolCosts(toolResults) streamingResult.execution.output.cost = { input: accumulatedCost.input + streamCost.input, output: accumulatedCost.output + streamCost.output, - toolCost: tc || undefined, - total: accumulatedCost.total + streamCost.total + tc, + total: accumulatedCost.total + streamCost.total, } }), execution: { diff --git a/apps/sim/providers/xai/index.ts b/apps/sim/providers/xai/index.ts index cfd2f3b784e..c5a6766fbfd 100644 --- a/apps/sim/providers/xai/index.ts +++ b/apps/sim/providers/xai/index.ts @@ -16,7 +16,6 @@ import { calculateCost, prepareToolExecution, prepareToolsWithUsageControl, - sumToolCosts, } from '@/providers/utils' import { checkForForcedToolUsage, @@ -216,7 +215,7 @@ export const xAIProvider: ProviderConfig = { total: currentResponse.usage?.total_tokens || 0, } const toolCalls = [] - const toolResults: Record[] = [] + const toolResults = [] const currentMessages = [...allMessages] let iterationCount = 0 @@ -332,7 +331,7 @@ export const xAIProvider: ProviderConfig = { duration: duration, }) let resultContent: any - if (result.success && result.output) { + if (result.success) { toolResults.push(result.output) resultContent = result.output } else { @@ -510,12 +509,10 @@ export const xAIProvider: ProviderConfig = { usage.prompt_tokens, usage.completion_tokens ) - const tc = sumToolCosts(toolResults) streamingResult.execution.output.cost = { input: accumulatedCost.input + streamCost.input, output: accumulatedCost.output + streamCost.output, - toolCost: tc || undefined, - total: accumulatedCost.total + streamCost.total + tc, + total: accumulatedCost.total + streamCost.total, } }), execution: { @@ -548,7 +545,6 @@ export const xAIProvider: ProviderConfig = { cost: { input: accumulatedCost.input, output: accumulatedCost.output, - toolCost: undefined as number | undefined, total: accumulatedCost.total, }, }, diff --git a/apps/sim/serializer/index.ts b/apps/sim/serializer/index.ts index 9c21661deb5..671535ef684 100644 --- a/apps/sim/serializer/index.ts +++ b/apps/sim/serializer/index.ts @@ -9,7 +9,6 @@ import { isCanonicalPair, isNonEmptyValue, isSubBlockFeatureEnabled, - isSubBlockHiddenByHostedKey, resolveCanonicalMode, } from '@/lib/workflows/subblocks/visibility' import { getBlock } from '@/blocks' @@ -49,7 +48,6 @@ function shouldSerializeSubBlock( canonicalModeOverrides?: CanonicalModeOverrides ): boolean { if (!isSubBlockFeatureEnabled(subBlockConfig)) return false - if (isSubBlockHiddenByHostedKey(subBlockConfig)) return false if (subBlockConfig.mode === 'trigger') { if (!isTriggerContext && !isTriggerCategory) return false diff --git a/apps/sim/tools/exa/answer.ts b/apps/sim/tools/exa/answer.ts index 7990f57ec9f..95c29e0e686 100644 --- a/apps/sim/tools/exa/answer.ts +++ b/apps/sim/tools/exa/answer.ts @@ -27,25 +27,6 @@ export const answerTool: ToolConfig = { description: 'Exa AI API Key', }, }, - hosting: { - envKeyPrefix: 'EXA_API_KEY', - apiKeyParam: 'apiKey', - byokProviderId: 'exa', - pricing: { - type: 'custom', - getCost: (_params, output) => { - const costDollars = output.__costDollars as { total?: number } | undefined - if (costDollars?.total == null) { - throw new Error('Exa answer response missing costDollars field') - } - return { cost: costDollars.total, metadata: { costDollars } } - }, - }, - rateLimit: { - mode: 'per_request', - requestsPerMinute: 5, - }, - }, request: { url: 'https://api.exa.ai/answer', @@ -80,7 +61,6 @@ export const answerTool: ToolConfig = { url: citation.url, text: citation.text || '', })) || [], - __costDollars: data.costDollars, }, } }, diff --git a/apps/sim/tools/exa/find_similar_links.ts b/apps/sim/tools/exa/find_similar_links.ts index 1685e601168..0996061a3d9 100644 --- a/apps/sim/tools/exa/find_similar_links.ts +++ b/apps/sim/tools/exa/find_similar_links.ts @@ -76,25 +76,6 @@ export const findSimilarLinksTool: ToolConfig< description: 'Exa AI API Key', }, }, - hosting: { - envKeyPrefix: 'EXA_API_KEY', - apiKeyParam: 'apiKey', - byokProviderId: 'exa', - pricing: { - type: 'custom', - getCost: (_params, output) => { - const costDollars = output.__costDollars as { total?: number } | undefined - if (costDollars?.total == null) { - throw new Error('Exa find_similar_links response missing costDollars field') - } - return { cost: costDollars.total, metadata: { costDollars } } - }, - }, - rateLimit: { - mode: 'per_request', - requestsPerMinute: 10, - }, - }, request: { url: 'https://api.exa.ai/findSimilar', @@ -159,7 +140,6 @@ export const findSimilarLinksTool: ToolConfig< highlights: result.highlights, score: result.score || 0, })), - __costDollars: data.costDollars, }, } }, diff --git a/apps/sim/tools/exa/get_contents.ts b/apps/sim/tools/exa/get_contents.ts index c1b96967bd1..be44b70222d 100644 --- a/apps/sim/tools/exa/get_contents.ts +++ b/apps/sim/tools/exa/get_contents.ts @@ -61,25 +61,6 @@ export const getContentsTool: ToolConfig { - const costDollars = output.__costDollars as { total?: number } | undefined - if (costDollars?.total == null) { - throw new Error('Exa get_contents response missing costDollars field') - } - return { cost: costDollars.total, metadata: { costDollars } } - }, - }, - rateLimit: { - mode: 'per_request', - requestsPerMinute: 10, - }, - }, request: { url: 'https://api.exa.ai/contents', @@ -151,7 +132,6 @@ export const getContentsTool: ToolConfig = { description: 'Exa AI API Key', }, }, - hosting: { - envKeyPrefix: 'EXA_API_KEY', - apiKeyParam: 'apiKey', - byokProviderId: 'exa', - pricing: { - type: 'custom', - getCost: (_params, output) => { - const costDollars = output.__costDollars as { total?: number } | undefined - if (costDollars?.total == null) { - throw new Error('Exa search response missing costDollars field') - } - return { cost: costDollars.total, metadata: { costDollars } } - }, - }, - rateLimit: { - mode: 'per_request', - requestsPerMinute: 5, - }, - }, request: { url: 'https://api.exa.ai/search', @@ -186,7 +167,6 @@ export const searchTool: ToolConfig = { highlights: result.highlights, score: result.score, })), - __costDollars: data.costDollars, }, } }, diff --git a/apps/sim/tools/exa/types.ts b/apps/sim/tools/exa/types.ts index f633272a1af..bcdf63d1a2f 100644 --- a/apps/sim/tools/exa/types.ts +++ b/apps/sim/tools/exa/types.ts @@ -6,11 +6,6 @@ export interface ExaBaseParams { apiKey: string } -/** Cost breakdown returned by Exa API responses */ -export interface ExaCostDollars { - total: number -} - // Search tool types export interface ExaSearchParams extends ExaBaseParams { query: string @@ -55,7 +50,6 @@ export interface ExaSearchResult { export interface ExaSearchResponse extends ToolResponse { output: { results: ExaSearchResult[] - __costDollars?: ExaCostDollars } } @@ -84,7 +78,6 @@ export interface ExaGetContentsResult { export interface ExaGetContentsResponse extends ToolResponse { output: { results: ExaGetContentsResult[] - __costDollars?: ExaCostDollars } } @@ -127,7 +120,6 @@ export interface ExaSimilarLink { export interface ExaFindSimilarLinksResponse extends ToolResponse { output: { similarLinks: ExaSimilarLink[] - __costDollars?: ExaCostDollars } } @@ -145,7 +137,6 @@ export interface ExaAnswerResponse extends ToolResponse { url: string text: string }[] - __costDollars?: ExaCostDollars } } diff --git a/apps/sim/tools/index.test.ts b/apps/sim/tools/index.test.ts index 288893633af..fe4b4469191 100644 --- a/apps/sim/tools/index.test.ts +++ b/apps/sim/tools/index.test.ts @@ -15,85 +15,52 @@ import { } from '@sim/testing' import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest' -// Hoisted mock state - these are available to vi.mock factories -const { mockIsHosted, mockEnv, mockGetBYOKKey, mockLogFixedUsage, mockRateLimiterFns } = vi.hoisted( - () => ({ - mockIsHosted: { value: false }, - mockEnv: { NEXT_PUBLIC_APP_URL: 'http://localhost:3000' } as Record, - mockGetBYOKKey: vi.fn(), - mockLogFixedUsage: vi.fn(), - mockRateLimiterFns: { - acquireKey: vi.fn(), - preConsumeCapacity: vi.fn(), - consumeCapacity: vi.fn(), - }, - }) -) - -// Mock feature flags -vi.mock('@/lib/core/config/feature-flags', () => ({ - get isHosted() { - return mockIsHosted.value +// Mock custom tools query - must be hoisted before imports +vi.mock('@/hooks/queries/custom-tools', () => ({ + getCustomTool: (toolId: string) => { + if (toolId === 'custom-tool-123') { + return { + id: 'custom-tool-123', + title: 'Custom Weather Tool', + code: 'return { result: "Weather data" }', + schema: { + function: { + description: 'Get weather information', + parameters: { + type: 'object', + properties: { + location: { type: 'string', description: 'City name' }, + unit: { type: 'string', description: 'Unit (metric/imperial)' }, + }, + required: ['location'], + }, + }, + }, + } + } + return undefined }, - isProd: false, - isDev: true, - isTest: true, -})) - -// Mock env config to control hosted key availability -vi.mock('@/lib/core/config/env', () => ({ - env: new Proxy({} as Record, { - get: (_target, prop: string) => mockEnv[prop], - }), - getEnv: (key: string) => mockEnv[key], - isTruthy: (val: unknown) => val === true || val === 'true' || val === '1', - isFalsy: (val: unknown) => val === false || val === 'false' || val === '0', -})) - -// Mock getBYOKKey -vi.mock('@/lib/api-key/byok', () => ({ - getBYOKKey: (...args: unknown[]) => mockGetBYOKKey(...args), -})) - -// Mock logFixedUsage for billing -vi.mock('@/lib/billing/core/usage-log', () => ({ - logFixedUsage: (...args: unknown[]) => mockLogFixedUsage(...args), -})) - -vi.mock('@/lib/core/rate-limiter/hosted-key', () => ({ - getHostedKeyRateLimiter: () => mockRateLimiterFns, -})) - -// Mock custom tools - define mock data inside factory function -vi.mock('@/hooks/queries/custom-tools', () => { - const mockCustomTool = { - id: 'custom-tool-123', - title: 'Custom Weather Tool', - code: 'return { result: "Weather data" }', - schema: { - function: { - description: 'Get weather information', - parameters: { - type: 'object', - properties: { - location: { type: 'string', description: 'City name' }, - unit: { type: 'string', description: 'Unit (metric/imperial)' }, + getCustomTools: () => [ + { + id: 'custom-tool-123', + title: 'Custom Weather Tool', + code: 'return { result: "Weather data" }', + schema: { + function: { + description: 'Get weather information', + parameters: { + type: 'object', + properties: { + location: { type: 'string', description: 'City name' }, + unit: { type: 'string', description: 'Unit (metric/imperial)' }, + }, + required: ['location'], }, - required: ['location'], }, }, }, - } - return { - getCustomTool: (toolId: string) => { - if (toolId === 'custom-tool-123') { - return mockCustomTool - } - return undefined - }, - getCustomTools: () => [mockCustomTool], - } -}) + ], +})) import { executeTool } from '@/tools/index' import { tools } from '@/tools/registry' @@ -1219,712 +1186,3 @@ describe('MCP Tool Execution', () => { }) }) }) - -describe('Hosted Key Injection', () => { - let cleanupEnvVars: () => void - - beforeEach(() => { - process.env.NEXT_PUBLIC_APP_URL = 'http://localhost:3000' - cleanupEnvVars = setupEnvVars({ NEXT_PUBLIC_APP_URL: 'http://localhost:3000' }) - vi.clearAllMocks() - mockGetBYOKKey.mockReset() - mockLogFixedUsage.mockReset() - }) - - afterEach(() => { - vi.resetAllMocks() - cleanupEnvVars() - }) - - it('should not inject hosted key when tool has no hosting config', async () => { - const mockTool = { - id: 'test_no_hosting', - name: 'Test No Hosting', - description: 'A test tool without hosting config', - version: '1.0.0', - params: {}, - request: { - url: '/api/test/endpoint', - method: 'POST' as const, - headers: () => ({ 'Content-Type': 'application/json' }), - }, - transformResponse: vi.fn().mockResolvedValue({ - success: true, - output: { result: 'success' }, - }), - } - - const originalTools = { ...tools } - ;(tools as any).test_no_hosting = mockTool - - global.fetch = Object.assign( - vi.fn().mockImplementation(async () => ({ - ok: true, - status: 200, - headers: new Headers(), - json: () => Promise.resolve({ success: true }), - })), - { preconnect: vi.fn() } - ) as typeof fetch - - const mockContext = createToolExecutionContext() - await executeTool('test_no_hosting', {}, false, mockContext) - - // BYOK should not be called since there's no hosting config - expect(mockGetBYOKKey).not.toHaveBeenCalled() - - Object.assign(tools, originalTools) - }) - - it('should check BYOK key first when tool has hosting config', async () => { - // Note: isHosted is mocked to false by default, so hosted key injection won't happen - // This test verifies the flow when isHosted would be true - const mockTool = { - id: 'test_with_hosting', - name: 'Test With Hosting', - description: 'A test tool with hosting config', - version: '1.0.0', - params: { - apiKey: { type: 'string', required: true }, - }, - hosting: { - envKeyPrefix: 'TEST_API', - apiKeyParam: 'apiKey', - byokProviderId: 'exa', - pricing: { - type: 'per_request' as const, - cost: 0.005, - }, - rateLimit: { - mode: 'per_request' as const, - requestsPerMinute: 100, - }, - }, - request: { - url: '/api/test/endpoint', - method: 'POST' as const, - headers: (params: any) => ({ - 'Content-Type': 'application/json', - 'x-api-key': params.apiKey, - }), - }, - transformResponse: vi.fn().mockResolvedValue({ - success: true, - output: { result: 'success' }, - }), - } - - const originalTools = { ...tools } - ;(tools as any).test_with_hosting = mockTool - - // Mock BYOK returning a key - mockGetBYOKKey.mockResolvedValue({ apiKey: 'byok-test-key', isBYOK: true }) - - global.fetch = Object.assign( - vi.fn().mockImplementation(async () => ({ - ok: true, - status: 200, - headers: new Headers(), - json: () => Promise.resolve({ success: true }), - })), - { preconnect: vi.fn() } - ) as typeof fetch - - const mockContext = createToolExecutionContext() - await executeTool('test_with_hosting', {}, false, mockContext) - - // With isHosted=false, BYOK won't be called - this is expected behavior - // The test documents the current behavior - Object.assign(tools, originalTools) - }) - - it('should use per_request pricing model correctly', async () => { - const mockTool = { - id: 'test_per_request_pricing', - name: 'Test Per Request Pricing', - description: 'A test tool with per_request pricing', - version: '1.0.0', - params: { - apiKey: { type: 'string', required: true }, - }, - hosting: { - envKeyPrefix: 'TEST_API', - apiKeyParam: 'apiKey', - byokProviderId: 'exa', - pricing: { - type: 'per_request' as const, - cost: 0.005, - }, - rateLimit: { - mode: 'per_request' as const, - requestsPerMinute: 100, - }, - }, - request: { - url: '/api/test/endpoint', - method: 'POST' as const, - headers: (params: any) => ({ - 'Content-Type': 'application/json', - 'x-api-key': params.apiKey, - }), - }, - transformResponse: vi.fn().mockResolvedValue({ - success: true, - output: { result: 'success' }, - }), - } - - // Verify pricing config structure - expect(mockTool.hosting.pricing.type).toBe('per_request') - expect(mockTool.hosting.pricing.cost).toBe(0.005) - }) - - it('should use custom pricing model correctly', async () => { - const mockGetCost = vi.fn().mockReturnValue({ cost: 0.01, metadata: { breakdown: 'test' } }) - - const mockTool = { - id: 'test_custom_pricing', - name: 'Test Custom Pricing', - description: 'A test tool with custom pricing', - version: '1.0.0', - params: { - apiKey: { type: 'string', required: true }, - }, - hosting: { - envKeyPrefix: 'TEST_API', - apiKeyParam: 'apiKey', - byokProviderId: 'exa', - pricing: { - type: 'custom' as const, - getCost: mockGetCost, - }, - rateLimit: { - mode: 'per_request' as const, - requestsPerMinute: 100, - }, - }, - request: { - url: '/api/test/endpoint', - method: 'POST' as const, - headers: (params: any) => ({ - 'Content-Type': 'application/json', - 'x-api-key': params.apiKey, - }), - }, - transformResponse: vi.fn().mockResolvedValue({ - success: true, - output: { result: 'success', costDollars: { total: 0.01 } }, - }), - } - - // Verify pricing config structure - expect(mockTool.hosting.pricing.type).toBe('custom') - expect(typeof mockTool.hosting.pricing.getCost).toBe('function') - - // Test getCost returns expected value - const result = mockTool.hosting.pricing.getCost({}, { costDollars: { total: 0.01 } }) - expect(result).toEqual({ cost: 0.01, metadata: { breakdown: 'test' } }) - }) - - it('should handle custom pricing returning a number', async () => { - const mockGetCost = vi.fn().mockReturnValue(0.005) - - const mockTool = { - id: 'test_custom_pricing_number', - name: 'Test Custom Pricing Number', - description: 'A test tool with custom pricing returning number', - version: '1.0.0', - params: { - apiKey: { type: 'string', required: true }, - }, - hosting: { - envKeyPrefix: 'TEST_API', - apiKeyParam: 'apiKey', - byokProviderId: 'exa', - pricing: { - type: 'custom' as const, - getCost: mockGetCost, - }, - rateLimit: { - mode: 'per_request' as const, - requestsPerMinute: 100, - }, - }, - request: { - url: '/api/test/endpoint', - method: 'POST' as const, - headers: (params: any) => ({ - 'Content-Type': 'application/json', - 'x-api-key': params.apiKey, - }), - }, - } - - // Test getCost returns a number - const result = mockTool.hosting.pricing.getCost({}, {}) - expect(result).toBe(0.005) - }) -}) - -describe('Rate Limiting and Retry Logic', () => { - let cleanupEnvVars: () => void - - beforeEach(() => { - process.env.NEXT_PUBLIC_APP_URL = 'http://localhost:3000' - cleanupEnvVars = setupEnvVars({ - NEXT_PUBLIC_APP_URL: 'http://localhost:3000', - }) - vi.clearAllMocks() - mockIsHosted.value = true - mockEnv.TEST_HOSTED_KEY = 'test-hosted-api-key' - mockGetBYOKKey.mockResolvedValue(null) - // Set up throttler mock defaults - mockRateLimiterFns.acquireKey.mockResolvedValue({ - success: true, - key: 'mock-hosted-key', - keyIndex: 0, - envVarName: 'TEST_HOSTED_KEY', - }) - mockRateLimiterFns.preConsumeCapacity.mockResolvedValue(true) - mockRateLimiterFns.consumeCapacity.mockResolvedValue(undefined) - }) - - afterEach(() => { - vi.resetAllMocks() - cleanupEnvVars() - mockIsHosted.value = false - mockEnv.TEST_HOSTED_KEY = undefined - }) - - it('should retry on 429 rate limit errors with exponential backoff', async () => { - let attemptCount = 0 - - const mockTool = { - id: 'test_rate_limit', - name: 'Test Rate Limit', - description: 'A test tool for rate limiting', - version: '1.0.0', - params: { - apiKey: { type: 'string', required: false }, - }, - hosting: { - envKeyPrefix: 'TEST_HOSTED_KEY', - apiKeyParam: 'apiKey', - pricing: { - type: 'per_request' as const, - cost: 0.001, - }, - rateLimit: { - mode: 'per_request' as const, - requestsPerMinute: 100, - }, - }, - request: { - url: '/api/test/rate-limit', - method: 'POST' as const, - headers: () => ({ 'Content-Type': 'application/json' }), - }, - transformResponse: vi.fn().mockResolvedValue({ - success: true, - output: { result: 'success' }, - }), - } - - const originalTools = { ...tools } - ;(tools as any).test_rate_limit = mockTool - - global.fetch = Object.assign( - vi.fn().mockImplementation(async () => { - attemptCount++ - if (attemptCount < 3) { - // Return a proper 429 response - the code extracts error, attaches status, and throws - return { - ok: false, - status: 429, - statusText: 'Too Many Requests', - headers: new Headers(), - json: () => Promise.resolve({ error: 'Rate limited' }), - text: () => Promise.resolve('Rate limited'), - } - } - return { - ok: true, - status: 200, - headers: new Headers(), - json: () => Promise.resolve({ success: true }), - } - }), - { preconnect: vi.fn() } - ) as typeof fetch - - const mockContext = createToolExecutionContext() - const result = await executeTool('test_rate_limit', {}, false, mockContext) - - // Should succeed after retries - expect(result.success).toBe(true) - // Should have made 3 attempts (2 failures + 1 success) - expect(attemptCount).toBe(3) - - Object.assign(tools, originalTools) - }) - - it('should fail after max retries on persistent rate limiting', async () => { - const mockTool = { - id: 'test_persistent_rate_limit', - name: 'Test Persistent Rate Limit', - description: 'A test tool for persistent rate limiting', - version: '1.0.0', - params: { - apiKey: { type: 'string', required: false }, - }, - hosting: { - envKeyPrefix: 'TEST_HOSTED_KEY', - apiKeyParam: 'apiKey', - pricing: { - type: 'per_request' as const, - cost: 0.001, - }, - rateLimit: { - mode: 'per_request' as const, - requestsPerMinute: 100, - }, - }, - request: { - url: '/api/test/persistent-rate-limit', - method: 'POST' as const, - headers: () => ({ 'Content-Type': 'application/json' }), - }, - } - - const originalTools = { ...tools } - ;(tools as any).test_persistent_rate_limit = mockTool - - global.fetch = Object.assign( - vi.fn().mockImplementation(async () => { - // Always return 429 to test max retries exhaustion - return { - ok: false, - status: 429, - statusText: 'Too Many Requests', - headers: new Headers(), - json: () => Promise.resolve({ error: 'Rate limited' }), - text: () => Promise.resolve('Rate limited'), - } - }), - { preconnect: vi.fn() } - ) as typeof fetch - - const mockContext = createToolExecutionContext() - const result = await executeTool('test_persistent_rate_limit', {}, false, mockContext) - - // Should fail after all retries exhausted - expect(result.success).toBe(false) - expect(result.error).toContain('Rate limited') - - Object.assign(tools, originalTools) - }) - - it('should not retry on non-rate-limit errors', async () => { - let attemptCount = 0 - - const mockTool = { - id: 'test_no_retry', - name: 'Test No Retry', - description: 'A test tool that should not retry', - version: '1.0.0', - params: { - apiKey: { type: 'string', required: false }, - }, - hosting: { - envKeyPrefix: 'TEST_HOSTED_KEY', - apiKeyParam: 'apiKey', - pricing: { - type: 'per_request' as const, - cost: 0.001, - }, - rateLimit: { - mode: 'per_request' as const, - requestsPerMinute: 100, - }, - }, - request: { - url: '/api/test/no-retry', - method: 'POST' as const, - headers: () => ({ 'Content-Type': 'application/json' }), - }, - } - - const originalTools = { ...tools } - ;(tools as any).test_no_retry = mockTool - - global.fetch = Object.assign( - vi.fn().mockImplementation(async () => { - attemptCount++ - // Return a 400 response - should not trigger retry logic - return { - ok: false, - status: 400, - statusText: 'Bad Request', - headers: new Headers(), - json: () => Promise.resolve({ error: 'Bad request' }), - text: () => Promise.resolve('Bad request'), - } - }), - { preconnect: vi.fn() } - ) as typeof fetch - - const mockContext = createToolExecutionContext() - const result = await executeTool('test_no_retry', {}, false, mockContext) - - // Should fail immediately without retries - expect(result.success).toBe(false) - expect(attemptCount).toBe(1) - - Object.assign(tools, originalTools) - }) -}) - -describe('Cost Field Handling', () => { - let cleanupEnvVars: () => void - - beforeEach(() => { - process.env.NEXT_PUBLIC_APP_URL = 'http://localhost:3000' - cleanupEnvVars = setupEnvVars({ - NEXT_PUBLIC_APP_URL: 'http://localhost:3000', - }) - vi.clearAllMocks() - mockIsHosted.value = true - mockEnv.TEST_HOSTED_KEY = 'test-hosted-api-key' - mockGetBYOKKey.mockResolvedValue(null) - mockLogFixedUsage.mockResolvedValue(undefined) - // Set up throttler mock defaults - mockRateLimiterFns.acquireKey.mockResolvedValue({ - success: true, - key: 'mock-hosted-key', - keyIndex: 0, - envVarName: 'TEST_HOSTED_KEY', - }) - mockRateLimiterFns.preConsumeCapacity.mockResolvedValue(true) - mockRateLimiterFns.consumeCapacity.mockResolvedValue(undefined) - }) - - afterEach(() => { - vi.resetAllMocks() - cleanupEnvVars() - mockIsHosted.value = false - mockEnv.TEST_HOSTED_KEY = undefined - }) - - it('should add cost to output when using hosted key with per_request pricing', async () => { - const mockTool = { - id: 'test_cost_per_request', - name: 'Test Cost Per Request', - description: 'A test tool with per_request pricing', - version: '1.0.0', - params: { - apiKey: { type: 'string', required: false }, - }, - hosting: { - envKeyPrefix: 'TEST_HOSTED_KEY', - apiKeyParam: 'apiKey', - pricing: { - type: 'per_request' as const, - cost: 0.005, - }, - rateLimit: { - mode: 'per_request' as const, - requestsPerMinute: 100, - }, - }, - request: { - url: '/api/test/cost', - method: 'POST' as const, - headers: () => ({ 'Content-Type': 'application/json' }), - }, - transformResponse: vi.fn().mockResolvedValue({ - success: true, - output: { result: 'success' }, - }), - } - - const originalTools = { ...tools } - ;(tools as any).test_cost_per_request = mockTool - - global.fetch = Object.assign( - vi.fn().mockImplementation(async () => ({ - ok: true, - status: 200, - headers: new Headers(), - json: () => Promise.resolve({ success: true }), - })), - { preconnect: vi.fn() } - ) as typeof fetch - - const mockContext = createToolExecutionContext({ - userId: 'user-123', - } as any) - const result = await executeTool('test_cost_per_request', {}, false, mockContext) - - expect(result.success).toBe(true) - // Note: In test environment, hosted key injection may not work due to env mocking complexity. - // The cost calculation logic is tested via the pricing model tests above. - // This test verifies the tool execution flow when hosted key IS available (by checking output structure). - if (result.output.cost) { - expect(result.output.cost.total).toBe(0.005) - // Should have logged usage - expect(mockLogFixedUsage).toHaveBeenCalledWith( - expect.objectContaining({ - userId: 'user-123', - cost: 0.005, - description: 'tool:test_cost_per_request', - }) - ) - } - - Object.assign(tools, originalTools) - }) - - it('should not add cost when not using hosted key', async () => { - mockIsHosted.value = false - - const mockTool = { - id: 'test_no_hosted_cost', - name: 'Test No Hosted Cost', - description: 'A test tool without hosted key', - version: '1.0.0', - params: { - apiKey: { type: 'string', required: true }, - }, - hosting: { - envKeyPrefix: 'TEST_HOSTED_KEY', - apiKeyParam: 'apiKey', - pricing: { - type: 'per_request' as const, - cost: 0.005, - }, - rateLimit: { - mode: 'per_request' as const, - requestsPerMinute: 100, - }, - }, - request: { - url: '/api/test/no-hosted', - method: 'POST' as const, - headers: () => ({ 'Content-Type': 'application/json' }), - }, - transformResponse: vi.fn().mockResolvedValue({ - success: true, - output: { result: 'success' }, - }), - } - - const originalTools = { ...tools } - ;(tools as any).test_no_hosted_cost = mockTool - - global.fetch = Object.assign( - vi.fn().mockImplementation(async () => ({ - ok: true, - status: 200, - headers: new Headers(), - json: () => Promise.resolve({ success: true }), - })), - { preconnect: vi.fn() } - ) as typeof fetch - - const mockContext = createToolExecutionContext() - // Pass user's own API key - const result = await executeTool( - 'test_no_hosted_cost', - { apiKey: 'user-api-key' }, - false, - mockContext - ) - - expect(result.success).toBe(true) - // Should not have cost since user provided their own key - expect(result.output.cost).toBeUndefined() - // Should not have logged usage - expect(mockLogFixedUsage).not.toHaveBeenCalled() - - Object.assign(tools, originalTools) - }) - - it('should use custom pricing getCost function', async () => { - const mockGetCost = vi.fn().mockReturnValue({ - cost: 0.015, - metadata: { mode: 'advanced', results: 10 }, - }) - - const mockTool = { - id: 'test_custom_pricing_cost', - name: 'Test Custom Pricing Cost', - description: 'A test tool with custom pricing', - version: '1.0.0', - params: { - apiKey: { type: 'string', required: false }, - mode: { type: 'string', required: false }, - }, - hosting: { - envKeyPrefix: 'TEST_HOSTED_KEY', - apiKeyParam: 'apiKey', - pricing: { - type: 'custom' as const, - getCost: mockGetCost, - }, - rateLimit: { - mode: 'per_request' as const, - requestsPerMinute: 100, - }, - }, - request: { - url: '/api/test/custom-pricing', - method: 'POST' as const, - headers: () => ({ 'Content-Type': 'application/json' }), - }, - transformResponse: vi.fn().mockResolvedValue({ - success: true, - output: { result: 'success', results: 10 }, - }), - } - - const originalTools = { ...tools } - ;(tools as any).test_custom_pricing_cost = mockTool - - global.fetch = Object.assign( - vi.fn().mockImplementation(async () => ({ - ok: true, - status: 200, - headers: new Headers(), - json: () => Promise.resolve({ success: true }), - })), - { preconnect: vi.fn() } - ) as typeof fetch - - const mockContext = createToolExecutionContext({ - userId: 'user-123', - } as any) - const result = await executeTool( - 'test_custom_pricing_cost', - { mode: 'advanced' }, - false, - mockContext - ) - - expect(result.success).toBe(true) - expect(result.output.cost).toBeDefined() - expect(result.output.cost.total).toBe(0.015) - - // getCost should have been called with params and output - expect(mockGetCost).toHaveBeenCalled() - - // Should have logged usage with metadata - expect(mockLogFixedUsage).toHaveBeenCalledWith( - expect.objectContaining({ - cost: 0.015, - metadata: { mode: 'advanced', results: 10 }, - }) - ) - - Object.assign(tools, originalTools) - }) -}) diff --git a/apps/sim/tools/index.ts b/apps/sim/tools/index.ts index 7722e2c630b..8184cf70643 100644 --- a/apps/sim/tools/index.ts +++ b/apps/sim/tools/index.ts @@ -1,15 +1,10 @@ import { createLogger } from '@sim/logger' -import { getBYOKKey } from '@/lib/api-key/byok' import { generateInternalToken } from '@/lib/auth/internal' -import { logFixedUsage } from '@/lib/billing/core/usage-log' -import { isHosted } from '@/lib/core/config/feature-flags' import { DEFAULT_EXECUTION_TIMEOUT_MS } from '@/lib/core/execution-limits' -import { getHostedKeyRateLimiter } from '@/lib/core/rate-limiter' import { secureFetchWithPinnedIP, validateUrlWithDNS, } from '@/lib/core/security/input-validation.server' -import { PlatformEvents } from '@/lib/core/telemetry' import { generateRequestId } from '@/lib/core/utils/request' import { getBaseUrl, getInternalApiBaseUrl } from '@/lib/core/utils/urls' import { SIM_VIA_HEADER, serializeCallChain } from '@/lib/execution/call-chain' @@ -19,14 +14,7 @@ import { resolveSkillContent } from '@/executor/handlers/agent/skills-resolver' import type { ExecutionContext } from '@/executor/types' import type { ErrorInfo } from '@/tools/error-extractors' import { extractErrorMessage } from '@/tools/error-extractors' -import type { - BYOKProviderId, - OAuthTokenPayload, - ToolConfig, - ToolHostingPricing, - ToolResponse, - ToolRetryConfig, -} from '@/tools/types' +import type { OAuthTokenPayload, ToolConfig, ToolResponse, ToolRetryConfig } from '@/tools/types' import { formatRequestParams, getTool, @@ -36,365 +24,6 @@ import { const logger = createLogger('Tools') -/** Result from hosted key injection */ -interface HostedKeyInjectionResult { - isUsingHostedKey: boolean - envVarName?: string -} - -/** - * Inject hosted API key if tool supports it and user didn't provide one. - * Checks BYOK workspace keys first, then uses the HostedKeyRateLimiter for round-robin key selection. - * Returns whether a hosted (billable) key was injected and which env var it came from. - */ -async function injectHostedKeyIfNeeded( - tool: ToolConfig, - params: Record, - executionContext: ExecutionContext | undefined, - requestId: string -): Promise { - if (!tool.hosting) return { isUsingHostedKey: false } - if (!isHosted) return { isUsingHostedKey: false } - - const { envKeyPrefix, apiKeyParam, byokProviderId, rateLimit } = tool.hosting - - // Derive workspace/user/workflow IDs from executionContext or params._context - const ctx = params._context as Record | undefined - const workspaceId = executionContext?.workspaceId || (ctx?.workspaceId as string | undefined) - const userId = executionContext?.userId || (ctx?.userId as string | undefined) - const workflowId = executionContext?.workflowId || (ctx?.workflowId as string | undefined) - - // Check BYOK workspace key first - if (byokProviderId && workspaceId) { - try { - const byokResult = await getBYOKKey(workspaceId, byokProviderId as BYOKProviderId) - if (byokResult) { - params[apiKeyParam] = byokResult.apiKey - logger.info(`[${requestId}] Using BYOK key for ${tool.id}`) - return { isUsingHostedKey: false } // Don't bill - user's own key - } - } catch (error) { - logger.error(`[${requestId}] Failed to get BYOK key for ${tool.id}:`, error) - // Fall through to hosted key - } - } - - const rateLimiter = getHostedKeyRateLimiter() - const provider = byokProviderId || tool.id - const billingActorId = workspaceId - - if (!billingActorId) { - logger.error(`[${requestId}] No workspace ID available for hosted key rate limiting`) - return { isUsingHostedKey: false } - } - - const acquireResult = await rateLimiter.acquireKey( - provider, - envKeyPrefix, - rateLimit, - billingActorId - ) - - if (!acquireResult.success && acquireResult.billingActorRateLimited) { - logger.warn(`[${requestId}] Billing actor ${billingActorId} rate limited for ${tool.id}`, { - provider, - retryAfterMs: acquireResult.retryAfterMs, - }) - - PlatformEvents.userThrottled({ - toolId: tool.id, - reason: 'billing_actor_limit', - provider, - retryAfterMs: acquireResult.retryAfterMs ?? 0, - userId, - workspaceId, - workflowId, - }) - - const error = new Error(acquireResult.error || `Rate limit exceeded for ${tool.id}`) - ;(error as any).status = 429 - ;(error as any).retryAfterMs = acquireResult.retryAfterMs - throw error - } - - // Handle no keys configured (503) - if (!acquireResult.success) { - logger.error(`[${requestId}] No hosted keys configured for ${tool.id}: ${acquireResult.error}`) - const error = new Error(acquireResult.error || `No hosted keys configured for ${tool.id}`) - ;(error as any).status = 503 - throw error - } - - params[apiKeyParam] = acquireResult.key - logger.info(`[${requestId}] Using hosted key for ${tool.id} (${acquireResult.envVarName})`, { - keyIndex: acquireResult.keyIndex, - provider, - }) - - return { - isUsingHostedKey: true, - envVarName: acquireResult.envVarName, - } -} - -/** - * Check if an error is a rate limit (throttling) error - */ -function isRateLimitError(error: unknown): boolean { - if (error && typeof error === 'object') { - const status = (error as { status?: number }).status - // 429 = Too Many Requests, 503 = Service Unavailable (sometimes used for rate limiting) - if (status === 429 || status === 503) return true - } - return false -} - -/** Context for retry with rate limit tracking */ -interface RetryContext { - requestId: string - toolId: string - envVarName: string - executionContext?: ExecutionContext -} - -/** - * Execute a function with exponential backoff retry for rate limiting errors. - * Only used for hosted key requests. Tracks rate limit events via telemetry. - */ -async function executeWithRetry( - fn: () => Promise, - context: RetryContext, - maxRetries = 3, - baseDelayMs = 1000 -): Promise { - const { requestId, toolId, envVarName, executionContext } = context - let lastError: unknown - - for (let attempt = 0; attempt <= maxRetries; attempt++) { - try { - return await fn() - } catch (error) { - lastError = error - - if (!isRateLimitError(error) || attempt === maxRetries) { - if (isRateLimitError(error) && attempt === maxRetries) { - PlatformEvents.userThrottled({ - toolId, - reason: 'upstream_retries_exhausted', - userId: executionContext?.userId, - workspaceId: executionContext?.workspaceId, - workflowId: executionContext?.workflowId, - }) - } - throw error - } - - const delayMs = baseDelayMs * 2 ** attempt - - // Track throttling event via telemetry - PlatformEvents.hostedKeyRateLimited({ - toolId, - envVarName, - attempt: attempt + 1, - maxRetries, - delayMs, - userId: executionContext?.userId, - workspaceId: executionContext?.workspaceId, - workflowId: executionContext?.workflowId, - }) - - logger.warn( - `[${requestId}] Rate limited for ${toolId} (${envVarName}), retrying in ${delayMs}ms (attempt ${attempt + 1}/${maxRetries})` - ) - await new Promise((resolve) => setTimeout(resolve, delayMs)) - } - } - - throw lastError -} - -/** Result from cost calculation */ -interface ToolCostResult { - cost: number - metadata?: Record -} - -/** - * Calculate cost based on pricing model - */ -function calculateToolCost( - pricing: ToolHostingPricing, - params: Record, - response: Record -): ToolCostResult { - switch (pricing.type) { - case 'per_request': - return { cost: pricing.cost } - - case 'custom': { - const result = pricing.getCost(params, response) - if (typeof result === 'number') { - return { cost: result } - } - return result - } - - default: { - const exhaustiveCheck: never = pricing - throw new Error(`Unknown pricing type: ${(exhaustiveCheck as ToolHostingPricing).type}`) - } - } -} - -interface HostedKeyCostResult { - cost: number - metadata?: Record -} - -/** - * Calculate and log hosted key cost for a tool execution. - * Logs to usageLog for audit trail and returns cost + metadata for output. - */ -async function processHostedKeyCost( - tool: ToolConfig, - params: Record, - response: Record, - executionContext: ExecutionContext | undefined, - requestId: string -): Promise { - if (!tool.hosting?.pricing) { - return { cost: 0 } - } - - const { cost, metadata } = calculateToolCost(tool.hosting.pricing, params, response) - - if (cost <= 0) return { cost: 0 } - - const ctx = params._context as Record | undefined - const userId = executionContext?.userId || (ctx?.userId as string | undefined) - const wsId = executionContext?.workspaceId || (ctx?.workspaceId as string | undefined) - const wfId = executionContext?.workflowId || (ctx?.workflowId as string | undefined) - - if (!userId) return { cost, metadata } - - const skipLog = !!ctx?.skipFixedUsageLog - if (!skipLog) { - try { - await logFixedUsage({ - userId, - source: 'workflow', - description: `tool:${tool.id}`, - cost, - workspaceId: wsId, - workflowId: wfId, - executionId: executionContext?.executionId, - metadata, - }) - logger.debug( - `[${requestId}] Logged hosted key cost for ${tool.id}: $${cost}`, - metadata ? { metadata } : {} - ) - } catch (error) { - logger.error(`[${requestId}] Failed to log hosted key usage for ${tool.id}:`, error) - } - } else { - logger.debug( - `[${requestId}] Skipping fixed usage log for ${tool.id} (cost will be tracked via provider tool loop)` - ) - } - - return { cost, metadata } -} - -/** - * Report custom dimension usage after successful hosted-key tool execution. - * Only applies to tools with `custom` rate limit mode. Fires and logs; - * failures here do not block the response since execution already succeeded. - */ -async function reportCustomDimensionUsage( - tool: ToolConfig, - params: Record, - response: Record, - executionContext: ExecutionContext | undefined, - requestId: string -): Promise { - if (tool.hosting?.rateLimit.mode !== 'custom') return - const ctx = params._context as Record | undefined - const billingActorId = executionContext?.workspaceId || (ctx?.workspaceId as string | undefined) - if (!billingActorId) return - - const rateLimiter = getHostedKeyRateLimiter() - const provider = tool.hosting.byokProviderId || tool.id - - try { - const result = await rateLimiter.reportUsage( - provider, - billingActorId, - tool.hosting.rateLimit, - params, - response - ) - - for (const dim of result.dimensions) { - if (!dim.allowed) { - logger.warn(`[${requestId}] Dimension ${dim.name} overdrawn after ${tool.id} execution`, { - consumed: dim.consumed, - tokensRemaining: dim.tokensRemaining, - }) - } - } - } catch (error) { - logger.error(`[${requestId}] Failed to report custom dimension usage for ${tool.id}:`, error) - } -} - -/** - * Strips internal fields (keys starting with `__`) from tool output before - * returning to users. The double-underscore prefix is reserved for transient - * data (e.g. `__costDollars`) and will never collide with legitimate API - * fields like `_id`. - */ -function stripInternalFields(output: Record): Record { - const result: Record = {} - for (const [key, value] of Object.entries(output)) { - if (!key.startsWith('__')) { - result[key] = value - } - } - return result -} - -/** - * Apply post-execution hosted-key cost tracking to a successful tool result. - * Reports custom dimension usage, calculates cost, and merges it into the output. - */ -async function applyHostedKeyCostToResult( - finalResult: ToolResponse, - tool: ToolConfig, - params: Record, - executionContext: ExecutionContext | undefined, - requestId: string -): Promise { - await reportCustomDimensionUsage(tool, params, finalResult.output, executionContext, requestId) - - const { cost: hostedKeyCost, metadata } = await processHostedKeyCost( - tool, - params, - finalResult.output, - executionContext, - requestId - ) - if (hostedKeyCost > 0) { - finalResult.output = { - ...finalResult.output, - cost: { - ...metadata, - total: hostedKeyCost, - }, - } - } -} - /** * Normalizes a tool ID by stripping resource ID suffix (UUID/tableId). * Workflow tools: 'workflow_executor_' -> 'workflow_executor' @@ -670,15 +299,6 @@ export async function executeTool( throw new Error(`Tool not found: ${toolId}`) } - // Inject hosted API key if tool supports it and user didn't provide one - const hostedKeyInfo = await injectHostedKeyIfNeeded( - tool, - contextParams, - executionContext, - requestId - ) - - // If we have a credential parameter, fetch the access token if (contextParams.oauthCredential) { contextParams.credential = contextParams.oauthCredential } @@ -799,22 +419,8 @@ export async function executeTool( const endTime = new Date() const endTimeISO = endTime.toISOString() const duration = endTime.getTime() - startTime.getTime() - - if (hostedKeyInfo.isUsingHostedKey && finalResult.success) { - await applyHostedKeyCostToResult( - finalResult, - tool, - contextParams, - executionContext, - requestId - ) - } - - const strippedOutput = stripInternalFields(finalResult.output || {}) - return { ...finalResult, - output: strippedOutput, timing: { startTime: startTimeISO, endTime: endTimeISO, @@ -824,15 +430,7 @@ export async function executeTool( } // Execute the tool request directly (internal routes use regular fetch, external use SSRF-protected fetch) - // Wrap with retry logic for hosted keys to handle rate limiting due to higher usage - const result = hostedKeyInfo.isUsingHostedKey - ? await executeWithRetry(() => executeToolRequest(toolId, tool, contextParams), { - requestId, - toolId, - envVarName: hostedKeyInfo.envVarName!, - executionContext, - }) - : await executeToolRequest(toolId, tool, contextParams) + const result = await executeToolRequest(toolId, tool, contextParams) // Apply post-processing if available and not skipped let finalResult = result @@ -854,22 +452,8 @@ export async function executeTool( const endTime = new Date() const endTimeISO = endTime.toISOString() const duration = endTime.getTime() - startTime.getTime() - - if (hostedKeyInfo.isUsingHostedKey && finalResult.success) { - await applyHostedKeyCostToResult( - finalResult, - tool, - contextParams, - executionContext, - requestId - ) - } - - const strippedOutput = stripInternalFields(finalResult.output || {}) - return { ...finalResult, - output: strippedOutput, timing: { startTime: startTimeISO, endTime: endTimeISO, diff --git a/apps/sim/tools/knowledge/knowledge.test.ts b/apps/sim/tools/knowledge/knowledge.test.ts deleted file mode 100644 index 1dd0f287711..00000000000 --- a/apps/sim/tools/knowledge/knowledge.test.ts +++ /dev/null @@ -1,202 +0,0 @@ -/** - * @vitest-environment node - * - * Knowledge Tools Unit Tests - * - * Tests for knowledge_search and knowledge_upload_chunk tools, - * specifically the cost restructuring in transformResponse. - */ - -import { describe, expect, it } from 'vitest' -import { knowledgeSearchTool } from '@/tools/knowledge/search' -import { knowledgeUploadChunkTool } from '@/tools/knowledge/upload_chunk' - -/** - * Creates a mock Response object for testing transformResponse - */ -function createMockResponse(data: unknown): Response { - return { - json: async () => data, - ok: true, - status: 200, - } as Response -} - -describe('Knowledge Tools', () => { - describe('knowledgeSearchTool', () => { - describe('transformResponse', () => { - it('should restructure cost information for logging', async () => { - const apiResponse = { - data: { - results: [{ content: 'test result', similarity: 0.95 }], - query: 'test query', - totalResults: 1, - cost: { - input: 0.00001042, - output: 0, - total: 0.00001042, - tokens: { - prompt: 521, - completion: 0, - total: 521, - }, - model: 'text-embedding-3-small', - pricing: { - input: 0.02, - output: 0, - updatedAt: '2025-07-10', - }, - }, - }, - } - - const result = await knowledgeSearchTool.transformResponse!(createMockResponse(apiResponse)) - - expect(result.success).toBe(true) - expect(result.output).toEqual({ - results: [{ content: 'test result', similarity: 0.95 }], - query: 'test query', - totalResults: 1, - cost: { - input: 0.00001042, - output: 0, - total: 0.00001042, - }, - tokens: { - prompt: 521, - completion: 0, - total: 521, - }, - model: 'text-embedding-3-small', - }) - }) - - it('should handle response without cost information', async () => { - const apiResponse = { - data: { - results: [], - query: 'test query', - totalResults: 0, - }, - } - - const result = await knowledgeSearchTool.transformResponse!(createMockResponse(apiResponse)) - - expect(result.success).toBe(true) - expect(result.output).toEqual({ - results: [], - query: 'test query', - totalResults: 0, - }) - expect(result.output.cost).toBeUndefined() - expect(result.output.tokens).toBeUndefined() - expect(result.output.model).toBeUndefined() - }) - - it('should handle response with partial cost information', async () => { - const apiResponse = { - data: { - results: [], - query: 'test query', - totalResults: 0, - cost: { - input: 0.001, - output: 0, - total: 0.001, - // No tokens or model - }, - }, - } - - const result = await knowledgeSearchTool.transformResponse!(createMockResponse(apiResponse)) - - expect(result.success).toBe(true) - expect(result.output.cost).toEqual({ - input: 0.001, - output: 0, - total: 0.001, - }) - expect(result.output.tokens).toBeUndefined() - expect(result.output.model).toBeUndefined() - }) - }) - }) - - describe('knowledgeUploadChunkTool', () => { - describe('transformResponse', () => { - it('should restructure cost information for logging', async () => { - const apiResponse = { - data: { - id: 'chunk-123', - chunkIndex: 0, - content: 'test content', - contentLength: 12, - tokenCount: 3, - enabled: true, - documentId: 'doc-456', - documentName: 'Test Document', - createdAt: '2025-01-01T00:00:00Z', - updatedAt: '2025-01-01T00:00:00Z', - cost: { - input: 0.00000521, - output: 0, - total: 0.00000521, - tokens: { - prompt: 260, - completion: 0, - total: 260, - }, - model: 'text-embedding-3-small', - pricing: { - input: 0.02, - output: 0, - updatedAt: '2025-07-10', - }, - }, - }, - } - - const result = await knowledgeUploadChunkTool.transformResponse!( - createMockResponse(apiResponse) - ) - - expect(result.success).toBe(true) - expect(result.output.cost).toEqual({ - input: 0.00000521, - output: 0, - total: 0.00000521, - }) - expect(result.output.tokens).toEqual({ - prompt: 260, - completion: 0, - total: 260, - }) - expect(result.output.model).toBe('text-embedding-3-small') - expect(result.output.data.chunkId).toBe('chunk-123') - expect(result.output.documentId).toBe('doc-456') - }) - - it('should handle response without cost information', async () => { - const apiResponse = { - data: { - id: 'chunk-123', - chunkIndex: 0, - content: 'test content', - documentId: 'doc-456', - documentName: 'Test Document', - }, - } - - const result = await knowledgeUploadChunkTool.transformResponse!( - createMockResponse(apiResponse) - ) - - expect(result.success).toBe(true) - expect(result.output.cost).toBeUndefined() - expect(result.output.tokens).toBeUndefined() - expect(result.output.model).toBeUndefined() - expect(result.output.data.chunkId).toBe('chunk-123') - }) - }) - }) -}) diff --git a/apps/sim/tools/knowledge/search.ts b/apps/sim/tools/knowledge/search.ts index af82111adc8..574017d0831 100644 --- a/apps/sim/tools/knowledge/search.ts +++ b/apps/sim/tools/knowledge/search.ts @@ -80,24 +80,13 @@ export const knowledgeSearchTool: ToolConfig = { const result = await response.json() const data = result.data || result - // Restructure cost: extract tokens/model to top level for logging - let costFields: Record = {} - if (data.cost && typeof data.cost === 'object') { - const { tokens, model, input, output: outputCost, total } = data.cost - costFields = { - cost: { input, output: outputCost, total }, - ...(tokens && { tokens }), - ...(model && { model }), - } - } - return { success: true, output: { results: data.results || [], query: data.query, totalResults: data.totalResults || 0, - ...costFields, + cost: data.cost, }, } }, diff --git a/apps/sim/tools/knowledge/upload_chunk.ts b/apps/sim/tools/knowledge/upload_chunk.ts index d7ad0fd93ba..24e07ee24a8 100644 --- a/apps/sim/tools/knowledge/upload_chunk.ts +++ b/apps/sim/tools/knowledge/upload_chunk.ts @@ -52,17 +52,6 @@ export const knowledgeUploadChunkTool: ToolConfig = {} - if (data.cost && typeof data.cost === 'object') { - const { tokens, model, input, output: outputCost, total } = data.cost - costFields = { - cost: { input, output: outputCost, total }, - ...(tokens && { tokens }), - ...(model && { model }), - } - } - return { success: true, output: { @@ -79,7 +68,7 @@ export const knowledgeUploadChunkTool: ToolConfig { * Maps param IDs to their enrichment configuration. */ schemaEnrichment?: Record + /** * Optional tool-level enrichment that modifies description and all parameters. * Use when multiple params depend on a single runtime value. */ toolEnrichment?: ToolEnrichmentConfig - - /** - * Hosted API key configuration for this tool. - * When configured, the tool can use Sim's hosted API keys if user doesn't provide their own. - * Usage is billed according to the pricing config. - */ - hosting?: ToolHostingConfig

} export interface TableRow { @@ -231,72 +222,3 @@ export interface ToolEnrichmentConfig { } } | null> } - -/** - * Pricing models for hosted API key usage - */ -/** Flat fee per API call (e.g., Serper search) */ -export interface PerRequestPricing { - type: 'per_request' - /** Cost per request in dollars */ - cost: number -} - -/** Result from custom pricing calculation */ -export interface CustomPricingResult { - /** Cost in dollars */ - cost: number - /** Optional metadata about the cost calculation (e.g., breakdown from API) */ - metadata?: Record -} - -/** Custom pricing calculated from params and response (e.g., Exa with different modes/result counts) */ -export interface CustomPricing

> { - type: 'custom' - /** Calculate cost based on request params and response output. Fields starting with _ are internal. */ - getCost: (params: P, output: Record) => number | CustomPricingResult -} - -/** Union of all pricing models */ -export type ToolHostingPricing

> = PerRequestPricing | CustomPricing

- -/** - * Configuration for hosted API key support. - * When configured, the tool can use Sim's hosted API keys if user doesn't provide their own. - * - * ### Hosted key env var convention - * - * Keys follow a numbered naming convention driven by a count env var: - * - * 1. Set `{envKeyPrefix}_COUNT` to the number of keys available. - * 2. Provide each key as `{envKeyPrefix}_1`, `{envKeyPrefix}_2`, ..., `{envKeyPrefix}_N`. - * - * **Example** — for `envKeyPrefix: 'EXA_API_KEY'` with 5 keys: - * ``` - * EXA_API_KEY_COUNT=5 - * EXA_API_KEY_1=sk-... - * EXA_API_KEY_2=sk-... - * EXA_API_KEY_3=sk-... - * EXA_API_KEY_4=sk-... - * EXA_API_KEY_5=sk-... - * ``` - * - * Adding more keys only requires updating the count and adding the new env var — - * no code changes needed. - */ -export interface ToolHostingConfig

> { - /** - * Env var name prefix for hosted keys. - * At runtime, `{envKeyPrefix}_COUNT` is read to determine how many keys exist, - * then `{envKeyPrefix}_1` through `{envKeyPrefix}_N` are resolved. - */ - envKeyPrefix: string - /** The parameter name that receives the API key */ - apiKeyParam: string - /** BYOK provider ID for workspace key lookup */ - byokProviderId?: BYOKProviderId - /** Pricing when using hosted key */ - pricing: ToolHostingPricing

- /** Hosted key rate limit configuration (required for hosted key distribution) */ - rateLimit: HostedKeyRateLimitConfig -}