diff --git a/README.md b/README.md
index 23c10de618..f25393ba18 100644
--- a/README.md
+++ b/README.md
@@ -10,7 +10,7 @@
-
+
diff --git a/apps/docs/components/icons.tsx b/apps/docs/components/icons.tsx
index d8ebc1641e..2c1bcb62bf 100644
--- a/apps/docs/components/icons.tsx
+++ b/apps/docs/components/icons.tsx
@@ -4678,3 +4678,349 @@ export function BedrockIcon(props: SVGProps) {
)
}
+
+export function ReductoIcon(props: SVGProps) {
+ return (
+
+ )
+}
+
+export function PulseIcon(props: SVGProps) {
+ return (
+
+ )
+}
diff --git a/apps/docs/components/ui/icon-mapping.ts b/apps/docs/components/ui/icon-mapping.ts
index d3a284093f..7927a555e9 100644
--- a/apps/docs/components/ui/icon-mapping.ts
+++ b/apps/docs/components/ui/icon-mapping.ts
@@ -84,9 +84,11 @@ import {
PolymarketIcon,
PostgresIcon,
PosthogIcon,
+ PulseIcon,
QdrantIcon,
RDSIcon,
RedditIcon,
+ ReductoIcon,
ResendIcon,
S3Icon,
SalesforceIcon,
@@ -208,9 +210,11 @@ export const blockTypeToIconMap: Record = {
polymarket: PolymarketIcon,
postgresql: PostgresIcon,
posthog: PosthogIcon,
+ pulse: PulseIcon,
qdrant: QdrantIcon,
rds: RDSIcon,
reddit: RedditIcon,
+ reducto: ReductoIcon,
resend: ResendIcon,
s3: S3Icon,
salesforce: SalesforceIcon,
diff --git a/apps/docs/content/docs/en/execution/meta.json b/apps/docs/content/docs/en/execution/meta.json
index 02f2c537db..37cac68f5a 100644
--- a/apps/docs/content/docs/en/execution/meta.json
+++ b/apps/docs/content/docs/en/execution/meta.json
@@ -1,3 +1,3 @@
{
- "pages": ["index", "basics", "api", "form", "logging", "costs"]
+ "pages": ["index", "basics", "api", "logging", "costs"]
}
diff --git a/apps/docs/content/docs/en/tools/google_vault.mdx b/apps/docs/content/docs/en/tools/google_vault.mdx
index 1dde5f9e48..d6e39c51ec 100644
--- a/apps/docs/content/docs/en/tools/google_vault.mdx
+++ b/apps/docs/content/docs/en/tools/google_vault.mdx
@@ -36,43 +36,47 @@ Connect Google Vault to create exports, list exports, and manage holds within ma
### `google_vault_create_matters_export`
+Create an export in a matter
+
#### Input
| Parameter | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
+| `matterId` | string | Yes | The matter ID |
+| `exportName` | string | Yes | Name for the export \(avoid special characters\) |
+| `corpus` | string | Yes | Data corpus to export \(MAIL, DRIVE, GROUPS, HANGOUTS_CHAT, VOICE\) |
+| `accountEmails` | string | No | Comma-separated list of user emails to scope export |
+| `orgUnitId` | string | No | Organization unit ID to scope export \(alternative to emails\) |
+| `startTime` | string | No | Start time for date filtering \(ISO 8601 format, e.g., 2024-01-01T00:00:00Z\) |
+| `endTime` | string | No | End time for date filtering \(ISO 8601 format, e.g., 2024-12-31T23:59:59Z\) |
+| `terms` | string | No | Search query terms to filter exported content |
#### Output
| Parameter | Type | Description |
| --------- | ---- | ----------- |
-| `matters` | json | Array of matter objects \(for list_matters\) |
-| `exports` | json | Array of export objects \(for list_matters_export\) |
-| `holds` | json | Array of hold objects \(for list_matters_holds\) |
-| `matter` | json | Created matter object \(for create_matters\) |
-| `export` | json | Created export object \(for create_matters_export\) |
-| `hold` | json | Created hold object \(for create_matters_holds\) |
-| `file` | json | Downloaded export file \(UserFile\) from execution files |
-| `nextPageToken` | string | Token for fetching next page of results \(for list operations\) |
+| `export` | json | Created export object |
### `google_vault_list_matters_export`
+List exports for a matter
+
#### Input
| Parameter | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
+| `matterId` | string | Yes | The matter ID |
+| `pageSize` | number | No | Number of exports to return per page |
+| `pageToken` | string | No | Token for pagination |
+| `exportId` | string | No | Optional export ID to fetch a specific export |
#### Output
| Parameter | Type | Description |
| --------- | ---- | ----------- |
-| `matters` | json | Array of matter objects \(for list_matters\) |
-| `exports` | json | Array of export objects \(for list_matters_export\) |
-| `holds` | json | Array of hold objects \(for list_matters_holds\) |
-| `matter` | json | Created matter object \(for create_matters\) |
-| `export` | json | Created export object \(for create_matters_export\) |
-| `hold` | json | Created hold object \(for create_matters_holds\) |
-| `file` | json | Downloaded export file \(UserFile\) from execution files |
-| `nextPageToken` | string | Token for fetching next page of results \(for list operations\) |
+| `exports` | json | Array of export objects |
+| `export` | json | Single export object \(when exportId is provided\) |
+| `nextPageToken` | string | Token for fetching next page of results |
### `google_vault_download_export_file`
@@ -82,10 +86,10 @@ Download a single file from a Google Vault export (GCS object)
| Parameter | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
-| `matterId` | string | Yes | No description |
-| `bucketName` | string | Yes | No description |
-| `objectName` | string | Yes | No description |
-| `fileName` | string | No | No description |
+| `matterId` | string | Yes | The matter ID |
+| `bucketName` | string | Yes | GCS bucket name from cloudStorageSink.files.bucketName |
+| `objectName` | string | Yes | GCS object name from cloudStorageSink.files.objectName |
+| `fileName` | string | No | Optional filename override for the downloaded file |
#### Output
@@ -95,82 +99,84 @@ Download a single file from a Google Vault export (GCS object)
### `google_vault_create_matters_holds`
+Create a hold in a matter
+
#### Input
| Parameter | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
+| `matterId` | string | Yes | The matter ID |
+| `holdName` | string | Yes | Name for the hold |
+| `corpus` | string | Yes | Data corpus to hold \(MAIL, DRIVE, GROUPS, HANGOUTS_CHAT, VOICE\) |
+| `accountEmails` | string | No | Comma-separated list of user emails to put on hold |
+| `orgUnitId` | string | No | Organization unit ID to put on hold \(alternative to accounts\) |
+| `terms` | string | No | Search terms to filter held content \(for MAIL and GROUPS corpus\) |
+| `startTime` | string | No | Start time for date filtering \(ISO 8601 format, for MAIL and GROUPS corpus\) |
+| `endTime` | string | No | End time for date filtering \(ISO 8601 format, for MAIL and GROUPS corpus\) |
+| `includeSharedDrives` | boolean | No | Include files in shared drives \(for DRIVE corpus\) |
#### Output
| Parameter | Type | Description |
| --------- | ---- | ----------- |
-| `matters` | json | Array of matter objects \(for list_matters\) |
-| `exports` | json | Array of export objects \(for list_matters_export\) |
-| `holds` | json | Array of hold objects \(for list_matters_holds\) |
-| `matter` | json | Created matter object \(for create_matters\) |
-| `export` | json | Created export object \(for create_matters_export\) |
-| `hold` | json | Created hold object \(for create_matters_holds\) |
-| `file` | json | Downloaded export file \(UserFile\) from execution files |
-| `nextPageToken` | string | Token for fetching next page of results \(for list operations\) |
+| `hold` | json | Created hold object |
### `google_vault_list_matters_holds`
+List holds for a matter
+
#### Input
| Parameter | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
+| `matterId` | string | Yes | The matter ID |
+| `pageSize` | number | No | Number of holds to return per page |
+| `pageToken` | string | No | Token for pagination |
+| `holdId` | string | No | Optional hold ID to fetch a specific hold |
#### Output
| Parameter | Type | Description |
| --------- | ---- | ----------- |
-| `matters` | json | Array of matter objects \(for list_matters\) |
-| `exports` | json | Array of export objects \(for list_matters_export\) |
-| `holds` | json | Array of hold objects \(for list_matters_holds\) |
-| `matter` | json | Created matter object \(for create_matters\) |
-| `export` | json | Created export object \(for create_matters_export\) |
-| `hold` | json | Created hold object \(for create_matters_holds\) |
-| `file` | json | Downloaded export file \(UserFile\) from execution files |
-| `nextPageToken` | string | Token for fetching next page of results \(for list operations\) |
+| `holds` | json | Array of hold objects |
+| `hold` | json | Single hold object \(when holdId is provided\) |
+| `nextPageToken` | string | Token for fetching next page of results |
### `google_vault_create_matters`
+Create a new matter in Google Vault
+
#### Input
| Parameter | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
+| `name` | string | Yes | Name for the new matter |
+| `description` | string | No | Optional description for the matter |
#### Output
| Parameter | Type | Description |
| --------- | ---- | ----------- |
-| `matters` | json | Array of matter objects \(for list_matters\) |
-| `exports` | json | Array of export objects \(for list_matters_export\) |
-| `holds` | json | Array of hold objects \(for list_matters_holds\) |
-| `matter` | json | Created matter object \(for create_matters\) |
-| `export` | json | Created export object \(for create_matters_export\) |
-| `hold` | json | Created hold object \(for create_matters_holds\) |
-| `file` | json | Downloaded export file \(UserFile\) from execution files |
-| `nextPageToken` | string | Token for fetching next page of results \(for list operations\) |
+| `matter` | json | Created matter object |
### `google_vault_list_matters`
+List matters, or get a specific matter if matterId is provided
+
#### Input
| Parameter | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
+| `pageSize` | number | No | Number of matters to return per page |
+| `pageToken` | string | No | Token for pagination |
+| `matterId` | string | No | Optional matter ID to fetch a specific matter |
#### Output
| Parameter | Type | Description |
| --------- | ---- | ----------- |
-| `matters` | json | Array of matter objects \(for list_matters\) |
-| `exports` | json | Array of export objects \(for list_matters_export\) |
-| `holds` | json | Array of hold objects \(for list_matters_holds\) |
-| `matter` | json | Created matter object \(for create_matters\) |
-| `export` | json | Created export object \(for create_matters_export\) |
-| `hold` | json | Created hold object \(for create_matters_holds\) |
-| `file` | json | Downloaded export file \(UserFile\) from execution files |
-| `nextPageToken` | string | Token for fetching next page of results \(for list operations\) |
+| `matters` | json | Array of matter objects |
+| `matter` | json | Single matter object \(when matterId is provided\) |
+| `nextPageToken` | string | Token for fetching next page of results |
diff --git a/apps/docs/content/docs/en/tools/meta.json b/apps/docs/content/docs/en/tools/meta.json
index d1d88a5116..ec3178013b 100644
--- a/apps/docs/content/docs/en/tools/meta.json
+++ b/apps/docs/content/docs/en/tools/meta.json
@@ -79,9 +79,11 @@
"polymarket",
"postgresql",
"posthog",
+ "pulse",
"qdrant",
"rds",
"reddit",
+ "reducto",
"resend",
"s3",
"salesforce",
diff --git a/apps/docs/content/docs/en/tools/pulse.mdx b/apps/docs/content/docs/en/tools/pulse.mdx
new file mode 100644
index 0000000000..92d2319e00
--- /dev/null
+++ b/apps/docs/content/docs/en/tools/pulse.mdx
@@ -0,0 +1,72 @@
+---
+title: Pulse
+description: Extract text from documents using Pulse OCR
+---
+
+import { BlockInfoCard } from "@/components/ui/block-info-card"
+
+
+
+{/* MANUAL-CONTENT-START:intro */}
+The [Pulse](https://www.pulseapi.com/) tool enables seamless extraction of text and structured content from a wide variety of documents—including PDFs, images, and Office files—using state-of-the-art OCR (Optical Character Recognition) powered by Pulse. Designed for automated agentic workflows, Pulse Parser makes it easy to unlock valuable information trapped in unstructured documents and integrate the extracted content directly into your workflow.
+
+With Pulse, you can:
+
+- **Extract text from documents**: Quickly convert scanned PDFs, images, and Office documents to usable text, markdown, or JSON.
+- **Process documents by URL or upload**: Simply provide a file URL or use upload to extract text from local documents or remote resources.
+- **Flexible output formats**: Choose between markdown, plain text, or JSON representations of the extracted content for downstream processing.
+- **Selective page processing**: Specify a range of pages to process, reducing processing time and cost when you only need part of a document.
+- **Figure and table extraction**: Optionally extract figures and tables, with automatic caption and description generation for populated context.
+- **Get processing insights**: Receive detailed metadata on each job, including file type, page count, processing time, and more.
+- **Integration-ready responses**: Incorporate extracted content into research, workflow automation, or data analysis pipelines.
+
+Ideal for automating tedious document review, enabling content summarization, research, and more, Pulse Parser brings real-world documents into the digital workflow era.
+
+If you need accurate, scalable, and developer-friendly document parsing capabilities—across formats, languages, and layouts—Pulse empowers your agents to read the world.
+{/* MANUAL-CONTENT-END */}
+
+
+## Usage Instructions
+
+Integrate Pulse into the workflow. Extract text from PDF documents, images, and Office files via URL or upload.
+
+
+
+## Tools
+
+### `pulse_parser`
+
+Parse documents (PDF, images, Office docs) using Pulse OCR API
+
+#### Input
+
+| Parameter | Type | Required | Description |
+| --------- | ---- | -------- | ----------- |
+| `filePath` | string | Yes | URL to a document to be processed |
+| `fileUpload` | object | No | File upload data from file-upload component |
+| `pages` | string | No | Page range to process \(1-indexed, e.g., "1-2,5"\) |
+| `extractFigure` | boolean | No | Enable figure extraction from the document |
+| `figureDescription` | boolean | No | Generate descriptions/captions for extracted figures |
+| `returnHtml` | boolean | No | Include HTML in the response |
+| `chunking` | string | No | Chunking strategies \(comma-separated: semantic, header, page, recursive\) |
+| `chunkSize` | number | No | Maximum characters per chunk when chunking is enabled |
+| `apiKey` | string | Yes | Pulse API key |
+
+#### Output
+
+| Parameter | Type | Description |
+| --------- | ---- | ----------- |
+| `markdown` | string | Extracted content in markdown format |
+| `page_count` | number | Number of pages in the document |
+| `job_id` | string | Unique job identifier |
+| `bounding_boxes` | json | Bounding box layout information |
+| `extraction_url` | string | URL for extraction results \(for large documents\) |
+| `html` | string | HTML content if requested |
+| `structured_output` | json | Structured output if schema was provided |
+| `chunks` | json | Chunked content if chunking was enabled |
+| `figures` | json | Extracted figures if figure extraction was enabled |
+
+
diff --git a/apps/docs/content/docs/en/tools/reducto.mdx b/apps/docs/content/docs/en/tools/reducto.mdx
new file mode 100644
index 0000000000..ef004cf898
--- /dev/null
+++ b/apps/docs/content/docs/en/tools/reducto.mdx
@@ -0,0 +1,63 @@
+---
+title: Reducto
+description: Extract text from PDF documents
+---
+
+import { BlockInfoCard } from "@/components/ui/block-info-card"
+
+
+
+{/* MANUAL-CONTENT-START:intro */}
+The [Reducto](https://reducto.ai/) tool enables fast and accurate extraction of text and data from PDF documents via OCR (Optical Character Recognition). Reducto is designed for agent workflows, making it easy to process uploaded or linked PDFs and transform their contents into ready-to-use information.
+
+With the Reducto tool, you can:
+
+- **Extract text and tables from PDFs**: Quickly convert scanned or digital PDFs to text, markdown, or structured JSON.
+- **Parse PDFs from uploads or URLs**: Process documents either by uploading a PDF or specifying a direct URL.
+- **Customize output formatting**: Choose your preferred output format—markdown, plain text, or JSON—and specify table formats as markdown or HTML.
+- **Select specific pages**: Optionally extract content from particular pages to optimize processing and focus on what matters.
+- **Receive detailed processing metadata**: Alongside extracted content, get job details, processing times, source file info, page counts, and OCR usage stats for audit and automation.
+
+Whether you’re automating workflow steps, extracting business-critical information, or unlocking archival documents for search and analysis, Reducto’s OCR parser gives you structured, actionable data from even the most complex PDFs.
+
+Looking for reliable and scalable PDF parsing? Reducto is optimized for developer and agent use—providing accuracy, speed, and flexibility for modern document understanding.
+{/* MANUAL-CONTENT-END */}
+
+
+## Usage Instructions
+
+Integrate Reducto Parse into the workflow. Can extract text from uploaded PDF documents, or from a URL.
+
+
+
+## Tools
+
+### `reducto_parser`
+
+Parse PDF documents using Reducto OCR API
+
+#### Input
+
+| Parameter | Type | Required | Description |
+| --------- | ---- | -------- | ----------- |
+| `filePath` | string | Yes | URL to a PDF document to be processed |
+| `fileUpload` | object | No | File upload data from file-upload component |
+| `pages` | array | No | Specific pages to process \(1-indexed page numbers\) |
+| `tableOutputFormat` | string | No | Table output format \(html or markdown\). Defaults to markdown. |
+| `apiKey` | string | Yes | Reducto API key \(REDUCTO_API_KEY\) |
+
+#### Output
+
+| Parameter | Type | Description |
+| --------- | ---- | ----------- |
+| `job_id` | string | Unique identifier for the processing job |
+| `duration` | number | Processing time in seconds |
+| `usage` | json | Resource consumption data |
+| `result` | json | Parsed document content with chunks and blocks |
+| `pdf_url` | string | Storage URL of converted PDF |
+| `studio_link` | string | Link to Reducto studio interface |
+
+
diff --git a/apps/docs/content/docs/en/tools/slack.mdx b/apps/docs/content/docs/en/tools/slack.mdx
index a1e847cd63..4462adba61 100644
--- a/apps/docs/content/docs/en/tools/slack.mdx
+++ b/apps/docs/content/docs/en/tools/slack.mdx
@@ -43,6 +43,27 @@ In Sim, the Slack integration enables your agents to programmatically interact w
- **Download files**: Retrieve files shared in Slack channels for processing or archival
This allows for powerful automation scenarios such as sending notifications with dynamic updates, managing conversational flows with editable status messages, acknowledging important messages with reactions, and maintaining clean channels by removing outdated bot messages. Your agents can deliver timely information, update messages as workflows progress, create collaborative documents, or alert team members when attention is needed. This integration bridges the gap between your AI workflows and your team's communication, ensuring everyone stays informed with accurate, up-to-date information. By connecting Sim with Slack, you can create agents that keep your team updated with relevant information at the right time, enhance collaboration by sharing and updating insights automatically, and reduce the need for manual status updates—all while leveraging your existing Slack workspace where your team already communicates.
+
+## Getting Started
+
+To connect Slack to your Sim workflows:
+
+1. Sign up or log in at [sim.ai](https://sim.ai)
+2. Create a new workflow or open an existing one
+3. Drag a **Slack** block onto your canvas
+4. Click the credential selector and choose **Connect**
+5. Authorize Sim to access your Slack workspace
+6. Select your target channel or user
+
+Once connected, you can use any of the Slack operations listed below.
+
+## AI-Generated Content
+
+Sim workflows may use AI models to generate messages and responses sent to Slack. AI-generated content may be inaccurate or contain errors. Always review automated outputs, especially for critical communications.
+
+## Need Help?
+
+If you encounter issues with the Slack integration, contact us at [help@sim.ai](mailto:help@sim.ai)
{/* MANUAL-CONTENT-END */}
diff --git a/apps/docs/tsconfig.json b/apps/docs/tsconfig.json
index deb9fba9d8..1a45ee6471 100644
--- a/apps/docs/tsconfig.json
+++ b/apps/docs/tsconfig.json
@@ -11,10 +11,8 @@
"next-env.d.ts",
"**/*.ts",
"**/*.tsx",
- ".next/types/**/*.ts",
"content/docs/execution/index.mdx",
- "content/docs/connections/index.mdx",
- ".next/dev/types/**/*.ts"
+ "content/docs/connections/index.mdx"
],
- "exclude": ["node_modules"]
+ "exclude": ["node_modules", ".next"]
}
diff --git a/apps/sim/app/_shell/providers/posthog-provider.tsx b/apps/sim/app/_shell/providers/posthog-provider.tsx
index 6e7ae038eb..8d1fb0dda7 100644
--- a/apps/sim/app/_shell/providers/posthog-provider.tsx
+++ b/apps/sim/app/_shell/providers/posthog-provider.tsx
@@ -16,9 +16,12 @@ export function PostHogProvider({ children }: { children: React.ReactNode }) {
ui_host: 'https://us.posthog.com',
defaults: '2025-05-24',
person_profiles: 'identified_only',
- capture_pageview: true,
+ autocapture: false,
+ capture_pageview: false,
capture_pageleave: false,
capture_performance: false,
+ capture_dead_clicks: false,
+ enable_heatmaps: false,
session_recording: {
maskAllInputs: false,
maskInputOptions: {
@@ -29,13 +32,7 @@ export function PostHogProvider({ children }: { children: React.ReactNode }) {
recordHeaders: false,
recordBody: false,
},
- autocapture: {
- dom_event_allowlist: ['click', 'submit', 'change'],
- element_allowlist: ['button', 'a', 'input'],
- },
- capture_dead_clicks: false,
persistence: 'localStorage+cookie',
- enable_heatmaps: false,
})
}
}, [])
diff --git a/apps/sim/app/_shell/providers/tooltip-provider.tsx b/apps/sim/app/_shell/providers/tooltip-provider.tsx
new file mode 100644
index 0000000000..84274ddb8c
--- /dev/null
+++ b/apps/sim/app/_shell/providers/tooltip-provider.tsx
@@ -0,0 +1,11 @@
+'use client'
+
+import { Tooltip } from '@/components/emcn'
+
+interface TooltipProviderProps {
+ children: React.ReactNode
+}
+
+export function TooltipProvider({ children }: TooltipProviderProps) {
+ return {children}
+}
diff --git a/apps/sim/app/_styles/globals.css b/apps/sim/app/_styles/globals.css
index b9dcb8c71c..a177876575 100644
--- a/apps/sim/app/_styles/globals.css
+++ b/apps/sim/app/_styles/globals.css
@@ -11,7 +11,7 @@
*/
:root {
--sidebar-width: 232px; /* SIDEBAR_WIDTH.DEFAULT */
- --panel-width: 290px; /* PANEL_WIDTH.DEFAULT */
+ --panel-width: 320px; /* PANEL_WIDTH.DEFAULT */
--toolbar-triggers-height: 300px; /* TOOLBAR_TRIGGERS_HEIGHT.DEFAULT */
--editor-connections-height: 172px; /* EDITOR_CONNECTIONS_HEIGHT.DEFAULT */
--terminal-height: 155px; /* TERMINAL_HEIGHT.DEFAULT */
@@ -59,21 +59,22 @@
}
/**
- * Selected node ring indicator
- * Uses a pseudo-element overlay to match the original behavior (absolute inset-0 z-40)
+ * Workflow canvas cursor styles
+ * Override React Flow's default selection cursor based on canvas mode
*/
-.react-flow__node.selected > div > div {
- position: relative;
+.workflow-container.canvas-mode-cursor .react-flow__pane,
+.workflow-container.canvas-mode-cursor .react-flow__selectionpane {
+ cursor: default !important;
}
-.react-flow__node.selected > div > div::after {
- content: "";
- position: absolute;
- inset: 0;
- z-index: 40;
- border-radius: 8px;
- box-shadow: 0 0 0 1.75px var(--brand-secondary);
- pointer-events: none;
+.workflow-container.canvas-mode-hand .react-flow__pane,
+.workflow-container.canvas-mode-hand .react-flow__selectionpane {
+ cursor: grab !important;
+}
+
+.workflow-container.canvas-mode-hand .react-flow__pane:active,
+.workflow-container.canvas-mode-hand .react-flow__selectionpane:active {
+ cursor: grabbing !important;
}
/**
@@ -557,32 +558,6 @@ input[type="search"]::-ms-clear {
transition-duration: 300ms;
}
- .streaming-effect {
- @apply relative overflow-hidden;
- }
-
- .streaming-effect::after {
- content: "";
- @apply pointer-events-none absolute left-0 top-0 h-full w-full;
- background: linear-gradient(
- 90deg,
- rgba(128, 128, 128, 0) 0%,
- rgba(128, 128, 128, 0.1) 50%,
- rgba(128, 128, 128, 0) 100%
- );
- animation: code-shimmer 1.5s infinite;
- z-index: 10;
- }
-
- .dark .streaming-effect::after {
- background: linear-gradient(
- 90deg,
- rgba(180, 180, 180, 0) 0%,
- rgba(180, 180, 180, 0.1) 50%,
- rgba(180, 180, 180, 0) 100%
- );
- }
-
.loading-placeholder::placeholder {
animation: placeholder-pulse 1.5s ease-in-out infinite;
}
@@ -657,6 +632,20 @@ input[type="search"]::-ms-clear {
}
}
+/**
+ * Notification toast enter animation
+ */
+@keyframes notification-enter {
+ from {
+ opacity: 0;
+ transform: translateX(-16px);
+ }
+ to {
+ opacity: 1;
+ transform: translateX(var(--stack-offset, 0px));
+ }
+}
+
/**
* @depricated
* Legacy globals (light/dark) kept for backward-compat with old classes.
diff --git a/apps/sim/app/api/__test-utils__/utils.ts b/apps/sim/app/api/__test-utils__/utils.ts
deleted file mode 100644
index 3ecefb443c..0000000000
--- a/apps/sim/app/api/__test-utils__/utils.ts
+++ /dev/null
@@ -1,1565 +0,0 @@
-import { createMockLogger as createSimTestingMockLogger } from '@sim/testing'
-import { NextRequest } from 'next/server'
-import { vi } from 'vitest'
-
-export { createMockLogger } from '@sim/testing'
-
-export interface MockUser {
- id: string
- email: string
- name?: string
-}
-
-export interface MockAuthResult {
- mockGetSession: ReturnType
- mockAuthenticatedUser: (user?: MockUser) => void
- mockUnauthenticated: () => void
- setAuthenticated: (user?: MockUser) => void
- setUnauthenticated: () => void
-}
-
-export interface DatabaseSelectResult {
- id: string
- [key: string]: any
-}
-
-export interface DatabaseInsertResult {
- id: string
- [key: string]: any
-}
-
-export interface DatabaseUpdateResult {
- id: string
- updatedAt?: Date
- [key: string]: any
-}
-
-export interface DatabaseDeleteResult {
- id: string
- [key: string]: any
-}
-
-export interface MockDatabaseOptions {
- select?: {
- results?: any[][]
- throwError?: boolean
- errorMessage?: string
- }
- insert?: {
- results?: any[]
- throwError?: boolean
- errorMessage?: string
- }
- update?: {
- results?: any[]
- throwError?: boolean
- errorMessage?: string
- }
- delete?: {
- results?: any[]
- throwError?: boolean
- errorMessage?: string
- }
- transaction?: {
- throwError?: boolean
- errorMessage?: string
- }
-}
-
-export interface CapturedFolderValues {
- name?: string
- color?: string
- parentId?: string | null
- isExpanded?: boolean
- sortOrder?: number
- updatedAt?: Date
-}
-
-export interface CapturedWorkflowValues {
- name?: string
- description?: string
- color?: string
- folderId?: string | null
- state?: any
- updatedAt?: Date
-}
-
-export const sampleWorkflowState = {
- blocks: {
- 'starter-id': {
- id: 'starter-id',
- type: 'starter',
- name: 'Start',
- position: { x: 100, y: 100 },
- subBlocks: {
- startWorkflow: { id: 'startWorkflow', type: 'dropdown', value: 'manual' },
- webhookPath: { id: 'webhookPath', type: 'short-input', value: '' },
- },
- outputs: {
- input: 'any',
- },
- enabled: true,
- horizontalHandles: true,
- advancedMode: false,
- triggerMode: false,
- height: 95,
- },
- 'agent-id': {
- id: 'agent-id',
- type: 'agent',
- name: 'Agent 1',
- position: { x: 634, y: -167 },
- subBlocks: {
- systemPrompt: {
- id: 'systemPrompt',
- type: 'long-input',
- value: 'You are a helpful assistant',
- },
- context: { id: 'context', type: 'short-input', value: '' },
- model: { id: 'model', type: 'dropdown', value: 'gpt-4o' },
- apiKey: { id: 'apiKey', type: 'short-input', value: '{{OPENAI_API_KEY}}' },
- },
- outputs: {
- response: {
- content: 'string',
- model: 'string',
- tokens: 'any',
- },
- },
- enabled: true,
- horizontalHandles: true,
- advancedMode: false,
- triggerMode: false,
- height: 680,
- },
- },
- edges: [
- {
- id: 'edge-id',
- source: 'starter-id',
- target: 'agent-id',
- sourceHandle: 'source',
- targetHandle: 'target',
- },
- ],
- loops: {},
- parallels: {},
- lastSaved: Date.now(),
- isDeployed: false,
-}
-
-// Global mock data that can be configured by tests
-export const globalMockData = {
- webhooks: [] as any[],
- workflows: [] as any[],
- schedules: [] as any[],
- shouldThrowError: false,
- errorMessage: 'Database error',
-}
-
-export const mockDb = {
- select: vi.fn().mockImplementation(() => {
- if (globalMockData.shouldThrowError) {
- throw new Error(globalMockData.errorMessage)
- }
- return {
- from: vi.fn().mockImplementation(() => ({
- innerJoin: vi.fn().mockImplementation(() => ({
- where: vi.fn().mockImplementation(() => ({
- limit: vi.fn().mockImplementation(() => {
- // Return webhook/workflow join data if available
- if (globalMockData.webhooks.length > 0) {
- return [
- {
- webhook: globalMockData.webhooks[0],
- workflow: globalMockData.workflows[0] || {
- id: 'test-workflow',
- userId: 'test-user',
- },
- },
- ]
- }
- return []
- }),
- })),
- })),
- where: vi.fn().mockImplementation(() => ({
- limit: vi.fn().mockImplementation(() => {
- // Return schedules if available
- if (globalMockData.schedules.length > 0) {
- return globalMockData.schedules
- }
- // Return simple workflow data
- if (globalMockData.workflows.length > 0) {
- return globalMockData.workflows
- }
- return [
- {
- id: 'workflow-id',
- userId: 'user-id',
- state: sampleWorkflowState,
- },
- ]
- }),
- })),
- })),
- }
- }),
- update: vi.fn().mockImplementation(() => ({
- set: vi.fn().mockImplementation(() => ({
- where: vi.fn().mockResolvedValue([]),
- })),
- })),
- eq: vi.fn().mockImplementation((field, value) => ({ field, value, type: 'eq' })),
- and: vi.fn().mockImplementation((...conditions) => ({
- conditions,
- type: 'and',
- })),
-}
-
-/**
- * Mock logger using @sim/testing createMockLogger.
- * This provides a consistent mock logger across all API tests.
- */
-export const mockLogger = createSimTestingMockLogger()
-
-export const mockUser = {
- id: 'user-123',
- email: 'test@example.com',
-}
-
-export const mockSubscription = {
- id: 'sub-123',
- plan: 'enterprise',
- status: 'active',
- seats: 5,
- referenceId: 'user-123',
- metadata: {
- perSeatAllowance: 100,
- totalAllowance: 500,
- updatedAt: '2023-01-01T00:00:00.000Z',
- },
-}
-
-export const mockOrganization = {
- id: 'org-456',
- name: 'Test Organization',
- slug: 'test-org',
-}
-
-export const mockAdminMember = {
- id: 'member-123',
- userId: 'user-123',
- organizationId: 'org-456',
- role: 'admin',
-}
-
-export const mockRegularMember = {
- id: 'member-456',
- userId: 'user-123',
- organizationId: 'org-456',
- role: 'member',
-}
-
-export const mockTeamSubscription = {
- id: 'sub-456',
- plan: 'team',
- status: 'active',
- seats: 5,
- referenceId: 'org-123',
-}
-
-export const mockPersonalSubscription = {
- id: 'sub-789',
- plan: 'enterprise',
- status: 'active',
- seats: 5,
- referenceId: 'user-123',
- metadata: {
- perSeatAllowance: 100,
- totalAllowance: 500,
- updatedAt: '2023-01-01T00:00:00.000Z',
- },
-}
-
-export const mockEnvironmentVars = {
- OPENAI_API_KEY: 'encrypted:openai-api-key',
- SERPER_API_KEY: 'encrypted:serper-api-key',
-}
-
-export const mockDecryptedEnvVars = {
- OPENAI_API_KEY: 'sk-test123',
- SERPER_API_KEY: 'serper-test123',
-}
-
-export function createMockRequest(
- method = 'GET',
- body?: any,
- headers: Record = {}
-): NextRequest {
- const url = 'http://localhost:3000/api/test'
-
- return new NextRequest(new URL(url), {
- method,
- headers: new Headers(headers),
- body: body ? JSON.stringify(body) : undefined,
- })
-}
-
-export function mockExecutionDependencies() {
- vi.mock('@/lib/core/security/encryption', () => ({
- decryptSecret: vi.fn().mockImplementation((encrypted: string) => {
- const entries = Object.entries(mockEnvironmentVars)
- const found = entries.find(([_, val]) => val === encrypted)
- const key = found ? found[0] : null
-
- return Promise.resolve({
- decrypted:
- key && key in mockDecryptedEnvVars
- ? mockDecryptedEnvVars[key as keyof typeof mockDecryptedEnvVars]
- : 'decrypted-value',
- })
- }),
- }))
-
- vi.mock('@/lib/logs/execution/trace-spans/trace-spans', () => ({
- buildTraceSpans: vi.fn().mockReturnValue({
- traceSpans: [],
- totalDuration: 100,
- }),
- }))
-
- vi.mock('@/lib/workflows/utils', () => ({
- updateWorkflowRunCounts: vi.fn().mockResolvedValue(undefined),
- }))
-
- vi.mock('@/serializer', () => ({
- Serializer: vi.fn().mockImplementation(() => ({
- serializeWorkflow: vi.fn().mockReturnValue({
- version: '1.0',
- blocks: [
- {
- id: 'starter-id',
- metadata: { id: 'starter', name: 'Start' },
- config: {},
- inputs: {},
- outputs: {},
- position: { x: 100, y: 100 },
- enabled: true,
- },
- {
- id: 'agent-id',
- metadata: { id: 'agent', name: 'Agent 1' },
- config: {},
- inputs: {},
- outputs: {},
- position: { x: 634, y: -167 },
- enabled: true,
- },
- ],
- connections: [
- {
- source: 'starter-id',
- target: 'agent-id',
- },
- ],
- loops: {},
- }),
- })),
- }))
-
- vi.mock('@/executor', () => ({
- Executor: vi.fn().mockImplementation(() => ({
- execute: vi.fn().mockResolvedValue({
- success: true,
- output: {
- response: {
- content: 'This is a test response',
- model: 'gpt-4o',
- },
- },
- logs: [],
- metadata: {
- duration: 1000,
- startTime: new Date().toISOString(),
- endTime: new Date().toISOString(),
- },
- }),
- })),
- }))
-
- vi.mock('@sim/db', () => ({
- db: mockDb,
- // Add common schema exports that tests might need
- webhook: {
- id: 'id',
- path: 'path',
- workflowId: 'workflowId',
- isActive: 'isActive',
- provider: 'provider',
- providerConfig: 'providerConfig',
- },
- workflow: {
- id: 'id',
- userId: 'userId',
- },
- workflowSchedule: {
- id: 'id',
- workflowId: 'workflowId',
- nextRunAt: 'nextRunAt',
- status: 'status',
- },
- userStats: {
- userId: 'userId',
- totalScheduledExecutions: 'totalScheduledExecutions',
- lastActive: 'lastActive',
- },
- }))
-}
-
-/**
- * Mock Trigger.dev SDK (tasks.trigger and task factory) for tests that import background modules
- */
-export function mockTriggerDevSdk() {
- vi.mock('@trigger.dev/sdk', () => ({
- tasks: {
- trigger: vi.fn().mockResolvedValue({ id: 'mock-task-id' }),
- },
- task: vi.fn().mockReturnValue({}),
- }))
-}
-
-export function mockWorkflowAccessValidation(shouldSucceed = true) {
- if (shouldSucceed) {
- vi.mock('@/app/api/workflows/middleware', () => ({
- validateWorkflowAccess: vi.fn().mockResolvedValue({
- workflow: {
- id: 'workflow-id',
- userId: 'user-id',
- state: sampleWorkflowState,
- },
- }),
- }))
- } else {
- vi.mock('@/app/api/workflows/middleware', () => ({
- validateWorkflowAccess: vi.fn().mockResolvedValue({
- error: {
- message: 'Access denied',
- status: 403,
- },
- }),
- }))
- }
-}
-
-export async function getMockedDependencies() {
- const encryptionModule = await import('@/lib/core/security/encryption')
- const traceSpansModule = await import('@/lib/logs/execution/trace-spans/trace-spans')
- const workflowUtilsModule = await import('@/lib/workflows/utils')
- const executorModule = await import('@/executor')
- const serializerModule = await import('@/serializer')
- const dbModule = await import('@sim/db')
-
- return {
- decryptSecret: encryptionModule.decryptSecret,
- buildTraceSpans: traceSpansModule.buildTraceSpans,
- updateWorkflowRunCounts: workflowUtilsModule.updateWorkflowRunCounts,
- Executor: executorModule.Executor,
- Serializer: serializerModule.Serializer,
- db: dbModule.db,
- }
-}
-
-export function mockScheduleStatusDb({
- schedule = [
- {
- id: 'schedule-id',
- workflowId: 'workflow-id',
- status: 'active',
- failedCount: 0,
- lastRanAt: new Date('2024-01-01T00:00:00.000Z'),
- lastFailedAt: null,
- nextRunAt: new Date('2024-01-02T00:00:00.000Z'),
- },
- ],
- workflow = [
- {
- userId: 'user-id',
- },
- ],
-}: {
- schedule?: any[]
- workflow?: any[]
-} = {}) {
- vi.doMock('@sim/db', () => {
- let callCount = 0
-
- const select = vi.fn().mockImplementation(() => ({
- from: vi.fn().mockImplementation(() => ({
- where: vi.fn().mockImplementation(() => ({
- limit: vi.fn().mockImplementation(() => {
- callCount += 1
- if (callCount === 1) return schedule
- if (callCount === 2) return workflow
- return []
- }),
- })),
- })),
- }))
-
- return {
- db: { select },
- }
- })
-}
-
-export function mockScheduleExecuteDb({
- schedules = [] as any[],
- workflowRecord = {
- id: 'workflow-id',
- userId: 'user-id',
- state: sampleWorkflowState,
- },
- envRecord = {
- userId: 'user-id',
- variables: {
- OPENAI_API_KEY: 'encrypted:openai-api-key',
- SERPER_API_KEY: 'encrypted:serper-api-key',
- },
- },
-}: {
- schedules?: any[]
- workflowRecord?: any
- envRecord?: any
-}): void {
- vi.doMock('@sim/db', () => {
- const select = vi.fn().mockImplementation(() => ({
- from: vi.fn().mockImplementation((table: any) => {
- const tbl = String(table)
- if (tbl === 'workflow_schedule' || tbl === 'schedule') {
- return {
- where: vi.fn().mockImplementation(() => ({
- limit: vi.fn().mockImplementation(() => schedules),
- })),
- }
- }
-
- if (tbl === 'workflow') {
- return {
- where: vi.fn().mockImplementation(() => ({
- limit: vi.fn().mockImplementation(() => [workflowRecord]),
- })),
- }
- }
-
- if (tbl === 'environment') {
- return {
- where: vi.fn().mockImplementation(() => ({
- limit: vi.fn().mockImplementation(() => [envRecord]),
- })),
- }
- }
-
- return {
- where: vi.fn().mockImplementation(() => ({
- limit: vi.fn().mockImplementation(() => []),
- })),
- }
- }),
- }))
-
- const update = vi.fn().mockImplementation(() => ({
- set: vi.fn().mockImplementation(() => ({
- where: vi.fn().mockResolvedValue([]),
- })),
- }))
-
- return { db: { select, update } }
- })
-}
-
-/**
- * Mock authentication for API tests
- * @param user - Optional user object to use for authenticated requests
- * @returns Object with authentication helper functions
- */
-export function mockAuth(user: MockUser = mockUser): MockAuthResult {
- const mockGetSession = vi.fn()
-
- vi.doMock('@/lib/auth', () => ({
- getSession: mockGetSession,
- }))
-
- const setAuthenticated = (customUser?: MockUser) =>
- mockGetSession.mockResolvedValue({ user: customUser || user })
- const setUnauthenticated = () => mockGetSession.mockResolvedValue(null)
-
- return {
- mockGetSession,
- mockAuthenticatedUser: setAuthenticated,
- mockUnauthenticated: setUnauthenticated,
- setAuthenticated,
- setUnauthenticated,
- }
-}
-
-/**
- * Mock common schema patterns
- */
-export function mockCommonSchemas() {
- vi.doMock('@sim/db/schema', () => ({
- workflowFolder: {
- id: 'id',
- userId: 'userId',
- parentId: 'parentId',
- updatedAt: 'updatedAt',
- workspaceId: 'workspaceId',
- sortOrder: 'sortOrder',
- createdAt: 'createdAt',
- },
- workflow: {
- id: 'id',
- folderId: 'folderId',
- userId: 'userId',
- updatedAt: 'updatedAt',
- },
- account: {
- userId: 'userId',
- providerId: 'providerId',
- },
- user: {
- email: 'email',
- id: 'id',
- },
- }))
-}
-
-/**
- * Mock drizzle-orm operators
- */
-export function mockDrizzleOrm() {
- vi.doMock('drizzle-orm', () => ({
- and: vi.fn((...conditions) => ({ conditions, type: 'and' })),
- eq: vi.fn((field, value) => ({ field, value, type: 'eq' })),
- or: vi.fn((...conditions) => ({ type: 'or', conditions })),
- gte: vi.fn((field, value) => ({ type: 'gte', field, value })),
- lte: vi.fn((field, value) => ({ type: 'lte', field, value })),
- asc: vi.fn((field) => ({ field, type: 'asc' })),
- desc: vi.fn((field) => ({ field, type: 'desc' })),
- isNull: vi.fn((field) => ({ field, type: 'isNull' })),
- count: vi.fn((field) => ({ field, type: 'count' })),
- sql: vi.fn((strings, ...values) => ({
- type: 'sql',
- sql: strings,
- values,
- })),
- }))
-}
-
-/**
- * Mock knowledge-related database schemas
- */
-export function mockKnowledgeSchemas() {
- vi.doMock('@sim/db/schema', () => ({
- knowledgeBase: {
- id: 'kb_id',
- userId: 'user_id',
- name: 'kb_name',
- description: 'description',
- tokenCount: 'token_count',
- embeddingModel: 'embedding_model',
- embeddingDimension: 'embedding_dimension',
- chunkingConfig: 'chunking_config',
- workspaceId: 'workspace_id',
- createdAt: 'created_at',
- updatedAt: 'updated_at',
- deletedAt: 'deleted_at',
- },
- document: {
- id: 'doc_id',
- knowledgeBaseId: 'kb_id',
- filename: 'filename',
- fileUrl: 'file_url',
- fileSize: 'file_size',
- mimeType: 'mime_type',
- chunkCount: 'chunk_count',
- tokenCount: 'token_count',
- characterCount: 'character_count',
- processingStatus: 'processing_status',
- processingStartedAt: 'processing_started_at',
- processingCompletedAt: 'processing_completed_at',
- processingError: 'processing_error',
- enabled: 'enabled',
- tag1: 'tag1',
- tag2: 'tag2',
- tag3: 'tag3',
- tag4: 'tag4',
- tag5: 'tag5',
- tag6: 'tag6',
- tag7: 'tag7',
- uploadedAt: 'uploaded_at',
- deletedAt: 'deleted_at',
- },
- embedding: {
- id: 'embedding_id',
- documentId: 'doc_id',
- knowledgeBaseId: 'kb_id',
- chunkIndex: 'chunk_index',
- content: 'content',
- embedding: 'embedding',
- tokenCount: 'token_count',
- characterCount: 'character_count',
- tag1: 'tag1',
- tag2: 'tag2',
- tag3: 'tag3',
- tag4: 'tag4',
- tag5: 'tag5',
- tag6: 'tag6',
- tag7: 'tag7',
- createdAt: 'created_at',
- },
- permissions: {
- id: 'permission_id',
- userId: 'user_id',
- entityType: 'entity_type',
- entityId: 'entity_id',
- permissionType: 'permission_type',
- createdAt: 'created_at',
- updatedAt: 'updated_at',
- },
- }))
-}
-
-/**
- * Mock console logger using the shared mockLogger instance.
- * This ensures tests can assert on the same mockLogger instance exported from this module.
- */
-export function mockConsoleLogger() {
- vi.doMock('@sim/logger', () => ({
- createLogger: vi.fn().mockReturnValue(mockLogger),
- }))
-}
-
-/**
- * Setup common API test mocks (auth, logger, schema, drizzle)
- */
-export function setupCommonApiMocks() {
- mockCommonSchemas()
- mockDrizzleOrm()
- mockConsoleLogger()
-}
-
-/**
- * Mock UUID generation for consistent test results
- */
-export function mockUuid(mockValue = 'test-uuid') {
- vi.doMock('uuid', () => ({
- v4: vi.fn().mockReturnValue(mockValue),
- }))
-}
-
-/**
- * Mock crypto.randomUUID for tests
- */
-export function mockCryptoUuid(mockValue = 'mock-uuid-1234-5678') {
- vi.stubGlobal('crypto', {
- randomUUID: vi.fn().mockReturnValue(mockValue),
- })
-}
-
-/**
- * Mock file system operations
- */
-export function mockFileSystem(
- options: { writeFileSuccess?: boolean; readFileContent?: string; existsResult?: boolean } = {}
-) {
- const { writeFileSuccess = true, readFileContent = 'test content', existsResult = true } = options
-
- vi.doMock('fs/promises', () => ({
- writeFile: vi.fn().mockImplementation(() => {
- if (writeFileSuccess) {
- return Promise.resolve()
- }
- return Promise.reject(new Error('Write failed'))
- }),
- readFile: vi.fn().mockResolvedValue(readFileContent),
- stat: vi.fn().mockResolvedValue({ size: 100, isFile: () => true }),
- access: vi.fn().mockImplementation(() => {
- if (existsResult) {
- return Promise.resolve()
- }
- return Promise.reject(new Error('File not found'))
- }),
- mkdir: vi.fn().mockResolvedValue(undefined),
- }))
-}
-
-/**
- * Mock encryption utilities
- */
-export function mockEncryption(options: { encryptedValue?: string; decryptedValue?: string } = {}) {
- const { encryptedValue = 'encrypted-value', decryptedValue = 'decrypted-value' } = options
-
- vi.doMock('@/lib/core/security/encryption', () => ({
- encryptSecret: vi.fn().mockResolvedValue({ encrypted: encryptedValue }),
- decryptSecret: vi.fn().mockResolvedValue({ decrypted: decryptedValue }),
- }))
-}
-
-/**
- * Interface for storage provider mock configuration
- */
-export interface StorageProviderMockOptions {
- provider?: 's3' | 'blob' | 'local'
- isCloudEnabled?: boolean
- throwError?: boolean
- errorMessage?: string
- presignedUrl?: string
- uploadHeaders?: Record
-}
-
-/**
- * Create storage provider mocks (S3, Blob, Local)
- */
-export function createStorageProviderMocks(options: StorageProviderMockOptions = {}) {
- const {
- provider = 's3',
- isCloudEnabled = true,
- throwError = false,
- errorMessage = 'Storage error',
- presignedUrl = 'https://example.com/presigned-url',
- uploadHeaders = {},
- } = options
-
- mockUuid('mock-uuid-1234')
- mockCryptoUuid('mock-uuid-1234-5678')
-
- const uploadFileMock = vi.fn().mockResolvedValue({
- path: '/api/files/serve/test-key.txt',
- key: 'test-key.txt',
- name: 'test.txt',
- size: 100,
- type: 'text/plain',
- })
- const downloadFileMock = vi.fn().mockResolvedValue(Buffer.from('test content'))
- const deleteFileMock = vi.fn().mockResolvedValue(undefined)
- const hasCloudStorageMock = vi.fn().mockReturnValue(isCloudEnabled)
-
- const generatePresignedUploadUrlMock = vi.fn().mockImplementation((params: any) => {
- const { fileName, context } = params
- const timestamp = Date.now()
- const random = Math.random().toString(36).substring(2, 9)
-
- let key = ''
- if (context === 'knowledge-base') {
- key = `kb/${timestamp}-${random}-${fileName}`
- } else if (context === 'chat') {
- key = `chat/${timestamp}-${random}-${fileName}`
- } else if (context === 'copilot') {
- key = `copilot/${timestamp}-${random}-${fileName}`
- } else if (context === 'workspace') {
- key = `workspace/${timestamp}-${random}-${fileName}`
- } else {
- key = `${timestamp}-${random}-${fileName}`
- }
-
- return Promise.resolve({
- url: presignedUrl,
- key,
- uploadHeaders: uploadHeaders,
- })
- })
-
- const generatePresignedDownloadUrlMock = vi.fn().mockResolvedValue(presignedUrl)
-
- vi.doMock('@/lib/uploads', () => ({
- getStorageProvider: vi.fn().mockReturnValue(provider),
- isUsingCloudStorage: vi.fn().mockReturnValue(isCloudEnabled),
- StorageService: {
- uploadFile: uploadFileMock,
- downloadFile: downloadFileMock,
- deleteFile: deleteFileMock,
- hasCloudStorage: hasCloudStorageMock,
- generatePresignedUploadUrl: generatePresignedUploadUrlMock,
- generatePresignedDownloadUrl: generatePresignedDownloadUrlMock,
- },
- uploadFile: uploadFileMock,
- downloadFile: downloadFileMock,
- deleteFile: deleteFileMock,
- getPresignedUrl: vi.fn().mockResolvedValue(presignedUrl),
- hasCloudStorage: hasCloudStorageMock,
- generatePresignedDownloadUrl: generatePresignedDownloadUrlMock,
- }))
-
- vi.doMock('@/lib/uploads/core/storage-service', () => ({
- uploadFile: uploadFileMock,
- downloadFile: downloadFileMock,
- deleteFile: deleteFileMock,
- hasCloudStorage: hasCloudStorageMock,
- generatePresignedUploadUrl: generatePresignedUploadUrlMock,
- generatePresignedDownloadUrl: generatePresignedDownloadUrlMock,
- StorageService: {
- uploadFile: uploadFileMock,
- downloadFile: downloadFileMock,
- deleteFile: deleteFileMock,
- hasCloudStorage: hasCloudStorageMock,
- generatePresignedUploadUrl: generatePresignedUploadUrlMock,
- generatePresignedDownloadUrl: generatePresignedDownloadUrlMock,
- },
- }))
-
- vi.doMock('@/lib/uploads/config', () => ({
- USE_S3_STORAGE: provider === 's3',
- USE_BLOB_STORAGE: provider === 'blob',
- USE_LOCAL_STORAGE: provider === 'local',
- getStorageProvider: vi.fn().mockReturnValue(provider),
- S3_CONFIG: {
- bucket: 'test-s3-bucket',
- region: 'us-east-1',
- },
- S3_KB_CONFIG: {
- bucket: 'test-s3-kb-bucket',
- region: 'us-east-1',
- },
- S3_CHAT_CONFIG: {
- bucket: 'test-s3-chat-bucket',
- region: 'us-east-1',
- },
- BLOB_CONFIG: {
- accountName: 'testaccount',
- accountKey: 'testkey',
- containerName: 'test-container',
- },
- BLOB_KB_CONFIG: {
- accountName: 'testaccount',
- accountKey: 'testkey',
- containerName: 'test-kb-container',
- },
- BLOB_CHAT_CONFIG: {
- accountName: 'testaccount',
- accountKey: 'testkey',
- containerName: 'test-chat-container',
- },
- }))
-
- if (provider === 's3') {
- vi.doMock('@/lib/uploads/providers/s3/client', () => ({
- getS3Client: vi.fn().mockReturnValue({}),
- }))
- vi.doMock('@aws-sdk/client-s3', () => ({
- PutObjectCommand: vi.fn(),
- }))
-
- vi.doMock('@aws-sdk/s3-request-presigner', () => ({
- getSignedUrl: vi.fn().mockImplementation(() => {
- if (throwError) {
- return Promise.reject(new Error(errorMessage))
- }
- return Promise.resolve(presignedUrl)
- }),
- }))
- } else if (provider === 'blob') {
- const baseUrl = 'https://testaccount.blob.core.windows.net/test-container'
- const mockBlockBlobClient = {
- url: baseUrl,
- }
- const mockContainerClient = {
- getBlockBlobClient: vi.fn(() => mockBlockBlobClient),
- }
- const mockBlobServiceClient = {
- getContainerClient: vi.fn(() => {
- if (throwError) {
- throw new Error(errorMessage)
- }
- return mockContainerClient
- }),
- }
-
- vi.doMock('@/lib/uploads/providers/blob/client', () => ({
- getBlobServiceClient: vi.fn().mockReturnValue(mockBlobServiceClient),
- }))
- vi.doMock('@azure/storage-blob', () => ({
- BlobSASPermissions: {
- parse: vi.fn(() => 'w'),
- },
- generateBlobSASQueryParameters: vi.fn(() => ({
- toString: () => 'sas-token-string',
- })),
- StorageSharedKeyCredential: vi.fn(),
- }))
- }
-
- return {
- provider,
- isCloudEnabled,
- mockBlobClient: provider === 'blob' ? vi.fn() : undefined,
- mockS3Client: provider === 's3' ? vi.fn() : undefined,
- }
-}
-
-/**
- * Interface for auth API mock configuration with all auth operations
- */
-export interface AuthApiMockOptions {
- operations?: {
- forgetPassword?: {
- success?: boolean
- error?: string
- }
- resetPassword?: {
- success?: boolean
- error?: string
- }
- signIn?: {
- success?: boolean
- error?: string
- }
- signUp?: {
- success?: boolean
- error?: string
- }
- }
-}
-
-/**
- * Interface for comprehensive test setup options
- */
-export interface TestSetupOptions {
- auth?: {
- authenticated?: boolean
- user?: MockUser
- }
- database?: MockDatabaseOptions
- storage?: StorageProviderMockOptions
- authApi?: AuthApiMockOptions
- features?: {
- workflowUtils?: boolean
- fileSystem?: boolean
- uploadUtils?: boolean
- encryption?: boolean
- }
-}
-
-/**
- * Master setup function for comprehensive test mocking
- * This is the preferred setup function for new tests
- */
-export function setupComprehensiveTestMocks(options: TestSetupOptions = {}) {
- const { auth = { authenticated: true }, database = {}, storage, authApi, features = {} } = options
-
- setupCommonApiMocks()
- mockUuid()
- mockCryptoUuid()
-
- const authMocks = mockAuth(auth.user)
- if (auth.authenticated) {
- authMocks.setAuthenticated(auth.user)
- } else {
- authMocks.setUnauthenticated()
- }
-
- const dbMocks = createMockDatabase(database)
-
- let storageMocks
- if (storage) {
- storageMocks = createStorageProviderMocks(storage)
- }
-
- let authApiMocks
- if (authApi) {
- authApiMocks = createAuthApiMocks(authApi)
- }
-
- const featureMocks: any = {}
- if (features.workflowUtils) {
- featureMocks.workflowUtils = mockWorkflowUtils()
- }
- if (features.fileSystem) {
- featureMocks.fileSystem = mockFileSystem()
- }
- if (features.uploadUtils) {
- featureMocks.uploadUtils = mockUploadUtils()
- }
- if (features.encryption) {
- featureMocks.encryption = mockEncryption()
- }
-
- return {
- auth: authMocks,
- database: dbMocks,
- storage: storageMocks,
- authApi: authApiMocks,
- features: featureMocks,
- }
-}
-
-/**
- * Create a more focused and composable database mock
- */
-export function createMockDatabase(options: MockDatabaseOptions = {}) {
- const selectOptions = options.select || { results: [[]], throwError: false }
- const insertOptions = options.insert || { results: [{ id: 'mock-id' }], throwError: false }
- const updateOptions = options.update || { results: [{ id: 'mock-id' }], throwError: false }
- const deleteOptions = options.delete || { results: [{ id: 'mock-id' }], throwError: false }
- const transactionOptions = options.transaction || { throwError: false }
-
- let selectCallCount = 0
-
- const createDbError = (operation: string, message?: string) => {
- return new Error(message || `Database ${operation} error`)
- }
-
- const createSelectChain = () => ({
- from: vi.fn().mockReturnThis(),
- leftJoin: vi.fn().mockReturnThis(),
- innerJoin: vi.fn().mockReturnThis(),
- where: vi.fn().mockReturnThis(),
- groupBy: vi.fn().mockReturnThis(),
- orderBy: vi.fn().mockImplementation(() => {
- if (selectOptions.throwError) {
- return Promise.reject(createDbError('select', selectOptions.errorMessage))
- }
- const result = selectOptions.results?.[selectCallCount] || selectOptions.results?.[0] || []
- selectCallCount++
- return Promise.resolve(result)
- }),
- limit: vi.fn().mockImplementation(() => {
- if (selectOptions.throwError) {
- return Promise.reject(createDbError('select', selectOptions.errorMessage))
- }
- const result = selectOptions.results?.[selectCallCount] || selectOptions.results?.[0] || []
- selectCallCount++
- return Promise.resolve(result)
- }),
- })
-
- const createInsertChain = () => ({
- values: vi.fn().mockImplementation(() => ({
- returning: vi.fn().mockImplementation(() => {
- if (insertOptions.throwError) {
- return Promise.reject(createDbError('insert', insertOptions.errorMessage))
- }
- return Promise.resolve(insertOptions.results)
- }),
- onConflictDoUpdate: vi.fn().mockImplementation(() => {
- if (insertOptions.throwError) {
- return Promise.reject(createDbError('insert', insertOptions.errorMessage))
- }
- return Promise.resolve(insertOptions.results)
- }),
- })),
- })
-
- const createUpdateChain = () => ({
- set: vi.fn().mockImplementation(() => ({
- where: vi.fn().mockImplementation(() => ({
- returning: vi.fn().mockImplementation(() => {
- if (updateOptions.throwError) {
- return Promise.reject(createDbError('update', updateOptions.errorMessage))
- }
- return Promise.resolve(updateOptions.results)
- }),
- then: vi.fn().mockImplementation((resolve) => {
- if (updateOptions.throwError) {
- return Promise.reject(createDbError('update', updateOptions.errorMessage))
- }
- return Promise.resolve(updateOptions.results).then(resolve)
- }),
- })),
- })),
- })
-
- const createDeleteChain = () => ({
- where: vi.fn().mockImplementation(() => {
- if (deleteOptions.throwError) {
- return Promise.reject(createDbError('delete', deleteOptions.errorMessage))
- }
- return Promise.resolve(deleteOptions.results)
- }),
- })
-
- const createTransactionMock = () => {
- return vi.fn().mockImplementation(async (callback: any) => {
- if (transactionOptions.throwError) {
- throw createDbError('transaction', transactionOptions.errorMessage)
- }
-
- const tx = {
- select: vi.fn().mockImplementation(() => createSelectChain()),
- insert: vi.fn().mockImplementation(() => createInsertChain()),
- update: vi.fn().mockImplementation(() => createUpdateChain()),
- delete: vi.fn().mockImplementation(() => createDeleteChain()),
- }
- return await callback(tx)
- })
- }
-
- const mockDb = {
- select: vi.fn().mockImplementation(() => createSelectChain()),
- insert: vi.fn().mockImplementation(() => createInsertChain()),
- update: vi.fn().mockImplementation(() => createUpdateChain()),
- delete: vi.fn().mockImplementation(() => createDeleteChain()),
- transaction: createTransactionMock(),
- }
-
- vi.doMock('@sim/db', () => ({ db: mockDb }))
-
- return {
- mockDb,
- resetSelectCallCount: () => {
- selectCallCount = 0
- },
- }
-}
-
-/**
- * Create comprehensive auth API mocks
- */
-export function createAuthApiMocks(options: AuthApiMockOptions = {}) {
- const { operations = {} } = options
-
- const defaultOperations = {
- forgetPassword: { success: true, error: 'Forget password error' },
- resetPassword: { success: true, error: 'Reset password error' },
- signIn: { success: true, error: 'Sign in error' },
- signUp: { success: true, error: 'Sign up error' },
- ...operations,
- }
-
- const createAuthMethod = (operation: string, config: { success?: boolean; error?: string }) => {
- return vi.fn().mockImplementation(() => {
- if (config.success) {
- return Promise.resolve()
- }
- return Promise.reject(new Error(config.error))
- })
- }
-
- vi.doMock('@/lib/auth', () => ({
- auth: {
- api: {
- forgetPassword: createAuthMethod('forgetPassword', defaultOperations.forgetPassword),
- resetPassword: createAuthMethod('resetPassword', defaultOperations.resetPassword),
- signIn: createAuthMethod('signIn', defaultOperations.signIn),
- signUp: createAuthMethod('signUp', defaultOperations.signUp),
- },
- },
- }))
-
- return {
- operations: defaultOperations,
- }
-}
-
-/**
- * Mock workflow utilities and response helpers
- */
-export function mockWorkflowUtils() {
- vi.doMock('@/app/api/workflows/utils', () => ({
- createSuccessResponse: vi.fn().mockImplementation((data) => {
- return new Response(JSON.stringify(data), {
- status: 200,
- headers: { 'Content-Type': 'application/json' },
- })
- }),
- createErrorResponse: vi.fn().mockImplementation((message, status = 500) => {
- return new Response(JSON.stringify({ error: message }), {
- status,
- headers: { 'Content-Type': 'application/json' },
- })
- }),
- }))
-}
-
-/**
- * Setup grouped mocks for knowledge base operations
- */
-export function setupKnowledgeMocks(
- options: {
- withDocumentProcessing?: boolean
- withEmbedding?: boolean
- accessCheckResult?: boolean
- } = {}
-) {
- const {
- withDocumentProcessing = false,
- withEmbedding = false,
- accessCheckResult = true,
- } = options
-
- const mocks: any = {
- checkKnowledgeBaseAccess: vi.fn().mockResolvedValue(accessCheckResult),
- }
-
- if (withDocumentProcessing) {
- mocks.processDocumentAsync = vi.fn().mockResolvedValue(undefined)
- }
-
- if (withEmbedding) {
- mocks.generateEmbedding = vi.fn().mockResolvedValue([0.1, 0.2, 0.3])
- }
-
- vi.doMock('@/app/api/knowledge/utils', () => mocks)
-
- return mocks
-}
-
-/**
- * Setup for file-related API routes
- */
-export function setupFileApiMocks(
- options: {
- authenticated?: boolean
- storageProvider?: 's3' | 'blob' | 'local'
- cloudEnabled?: boolean
- } = {}
-) {
- const { authenticated = true, storageProvider = 's3', cloudEnabled = true } = options
-
- setupCommonApiMocks()
- mockUuid()
- mockCryptoUuid()
-
- const authMocks = mockAuth()
- if (authenticated) {
- authMocks.setAuthenticated()
- } else {
- authMocks.setUnauthenticated()
- }
-
- vi.doMock('@/lib/auth/hybrid', () => ({
- checkHybridAuth: vi.fn().mockResolvedValue({
- success: authenticated,
- userId: authenticated ? 'test-user-id' : undefined,
- error: authenticated ? undefined : 'Unauthorized',
- }),
- }))
-
- vi.doMock('@/app/api/files/authorization', () => ({
- verifyFileAccess: vi.fn().mockResolvedValue(true),
- verifyWorkspaceFileAccess: vi.fn().mockResolvedValue(true),
- verifyKBFileAccess: vi.fn().mockResolvedValue(true),
- verifyCopilotFileAccess: vi.fn().mockResolvedValue(true),
- lookupWorkspaceFileByKey: vi.fn().mockResolvedValue({
- workspaceId: 'test-workspace-id',
- uploadedBy: 'test-user-id',
- }),
- }))
-
- vi.doMock('@/lib/uploads/contexts/workspace', () => ({
- uploadWorkspaceFile: vi.fn().mockResolvedValue({
- id: 'test-file-id',
- name: 'test.txt',
- url: '/api/files/serve/workspace/test-workspace-id/test-file.txt',
- size: 100,
- type: 'text/plain',
- key: 'workspace/test-workspace-id/1234567890-test.txt',
- uploadedAt: new Date().toISOString(),
- expiresAt: new Date(Date.now() + 24 * 60 * 60 * 1000).toISOString(),
- }),
- }))
-
- mockFileSystem({
- writeFileSuccess: true,
- readFileContent: 'test content',
- existsResult: true,
- })
-
- let storageMocks
- if (storageProvider) {
- storageMocks = createStorageProviderMocks({
- provider: storageProvider,
- isCloudEnabled: cloudEnabled,
- })
- } else {
- const uploadFileMock = vi.fn().mockResolvedValue({
- path: '/api/files/serve/test-key.txt',
- key: 'test-key.txt',
- name: 'test.txt',
- size: 100,
- type: 'text/plain',
- })
- const downloadFileMock = vi.fn().mockResolvedValue(Buffer.from('test content'))
- const deleteFileMock = vi.fn().mockResolvedValue(undefined)
- const hasCloudStorageMock = vi.fn().mockReturnValue(cloudEnabled)
-
- vi.doMock('@/lib/uploads', () => ({
- getStorageProvider: vi.fn().mockReturnValue('local'),
- isUsingCloudStorage: vi.fn().mockReturnValue(cloudEnabled),
- StorageService: {
- uploadFile: uploadFileMock,
- downloadFile: downloadFileMock,
- deleteFile: deleteFileMock,
- hasCloudStorage: hasCloudStorageMock,
- generatePresignedUploadUrl: vi.fn().mockResolvedValue({
- presignedUrl: 'https://example.com/presigned-url',
- key: 'test-key.txt',
- }),
- generatePresignedDownloadUrl: vi
- .fn()
- .mockResolvedValue('https://example.com/presigned-url'),
- },
- uploadFile: uploadFileMock,
- downloadFile: downloadFileMock,
- deleteFile: deleteFileMock,
- getPresignedUrl: vi.fn().mockResolvedValue('https://example.com/presigned-url'),
- hasCloudStorage: hasCloudStorageMock,
- }))
- }
-
- return {
- auth: authMocks,
- storage: storageMocks,
- }
-}
-
-/**
- * Setup for auth-related API routes
- */
-export function setupAuthApiMocks(options: { operations?: AuthApiMockOptions['operations'] } = {}) {
- return setupComprehensiveTestMocks({
- auth: { authenticated: false }, // Auth routes typically don't require authentication
- authApi: { operations: options.operations },
- })
-}
-
-/**
- * Setup for knowledge base API routes
- */
-export function setupKnowledgeApiMocks(
- options: {
- authenticated?: boolean
- withDocumentProcessing?: boolean
- withEmbedding?: boolean
- } = {}
-) {
- const mocks = setupComprehensiveTestMocks({
- auth: { authenticated: options.authenticated ?? true },
- database: {
- select: { results: [[]] },
- },
- })
-
- const knowledgeMocks = setupKnowledgeMocks({
- withDocumentProcessing: options.withDocumentProcessing,
- withEmbedding: options.withEmbedding,
- })
-
- return {
- ...mocks,
- knowledge: knowledgeMocks,
- }
-}
-
-export function setupApiTestMocks(
- options: {
- authenticated?: boolean
- user?: MockUser
- dbResults?: any[][]
- withWorkflowUtils?: boolean
- withFileSystem?: boolean
- withUploadUtils?: boolean
- } = {}
-) {
- const {
- authenticated = true,
- user = mockUser,
- dbResults = [[]],
- withWorkflowUtils = false,
- withFileSystem = false,
- withUploadUtils = false,
- } = options
-
- return setupComprehensiveTestMocks({
- auth: { authenticated, user },
- database: { select: { results: dbResults } },
- features: {
- workflowUtils: withWorkflowUtils,
- fileSystem: withFileSystem,
- uploadUtils: withUploadUtils,
- },
- })
-}
-
-export function mockUploadUtils(
- options: { isCloudStorage?: boolean; uploadResult?: any; uploadError?: boolean } = {}
-) {
- const {
- isCloudStorage = false,
- uploadResult = {
- path: '/api/files/serve/test-key.txt',
- key: 'test-key.txt',
- name: 'test.txt',
- size: 100,
- type: 'text/plain',
- },
- uploadError = false,
- } = options
-
- const uploadFileMock = vi.fn().mockImplementation(() => {
- if (uploadError) {
- return Promise.reject(new Error('Upload failed'))
- }
- return Promise.resolve(uploadResult)
- })
-
- vi.doMock('@/lib/uploads', () => ({
- StorageService: {
- uploadFile: uploadFileMock,
- downloadFile: vi.fn().mockResolvedValue(Buffer.from('test content')),
- deleteFile: vi.fn().mockResolvedValue(undefined),
- hasCloudStorage: vi.fn().mockReturnValue(isCloudStorage),
- },
- uploadFile: uploadFileMock,
- isUsingCloudStorage: vi.fn().mockReturnValue(isCloudStorage),
- }))
-
- vi.doMock('@/lib/uploads/config', () => ({
- UPLOAD_DIR: '/test/uploads',
- USE_S3_STORAGE: isCloudStorage,
- USE_BLOB_STORAGE: false,
- S3_CONFIG: {
- bucket: 'test-bucket',
- region: 'test-region',
- },
- }))
-}
-
-export function createMockTransaction(
- mockData: {
- selectData?: DatabaseSelectResult[]
- insertResult?: DatabaseInsertResult[]
- updateResult?: DatabaseUpdateResult[]
- deleteResult?: DatabaseDeleteResult[]
- } = {}
-) {
- const { selectData = [], insertResult = [], updateResult = [], deleteResult = [] } = mockData
-
- return vi.fn().mockImplementation(async (callback: any) => {
- const tx = {
- select: vi.fn().mockReturnValue({
- from: vi.fn().mockReturnValue({
- where: vi.fn().mockReturnValue({
- orderBy: vi.fn().mockReturnValue({
- limit: vi.fn().mockReturnValue(selectData),
- }),
- }),
- }),
- }),
- insert: vi.fn().mockReturnValue({
- values: vi.fn().mockReturnValue({
- returning: vi.fn().mockReturnValue(insertResult),
- }),
- }),
- update: vi.fn().mockReturnValue({
- set: vi.fn().mockReturnValue({
- where: vi.fn().mockReturnValue(updateResult),
- }),
- }),
- delete: vi.fn().mockReturnValue({
- where: vi.fn().mockReturnValue(deleteResult),
- }),
- }
- return await callback(tx)
- })
-}
diff --git a/apps/sim/app/api/auth/forget-password/route.test.ts b/apps/sim/app/api/auth/forget-password/route.test.ts
index 36cbb3e0e8..7f08c76e3e 100644
--- a/apps/sim/app/api/auth/forget-password/route.test.ts
+++ b/apps/sim/app/api/auth/forget-password/route.test.ts
@@ -3,13 +3,60 @@
*
* @vitest-environment node
*/
+import {
+ createMockRequest,
+ mockConsoleLogger,
+ mockCryptoUuid,
+ mockDrizzleOrm,
+ mockUuid,
+ setupCommonApiMocks,
+} from '@sim/testing'
import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest'
-import { createMockRequest, setupAuthApiMocks } from '@/app/api/__test-utils__/utils'
vi.mock('@/lib/core/utils/urls', () => ({
getBaseUrl: vi.fn(() => 'https://app.example.com'),
}))
+/** Setup auth API mocks for testing authentication routes */
+function setupAuthApiMocks(
+ options: {
+ operations?: {
+ forgetPassword?: { success?: boolean; error?: string }
+ resetPassword?: { success?: boolean; error?: string }
+ }
+ } = {}
+) {
+ setupCommonApiMocks()
+ mockUuid()
+ mockCryptoUuid()
+ mockConsoleLogger()
+ mockDrizzleOrm()
+
+ const { operations = {} } = options
+ const defaultOperations = {
+ forgetPassword: { success: true, error: 'Forget password error', ...operations.forgetPassword },
+ resetPassword: { success: true, error: 'Reset password error', ...operations.resetPassword },
+ }
+
+ const createAuthMethod = (config: { success?: boolean; error?: string }) => {
+ return vi.fn().mockImplementation(() => {
+ if (config.success) {
+ return Promise.resolve()
+ }
+ return Promise.reject(new Error(config.error))
+ })
+ }
+
+ vi.doMock('@/lib/auth', () => ({
+ auth: {
+ api: {
+ forgetPassword: createAuthMethod(defaultOperations.forgetPassword),
+ resetPassword: createAuthMethod(defaultOperations.resetPassword),
+ },
+ },
+ }))
+}
+
describe('Forget Password API Route', () => {
beforeEach(() => {
vi.resetModules()
diff --git a/apps/sim/app/api/auth/oauth/connections/route.test.ts b/apps/sim/app/api/auth/oauth/connections/route.test.ts
index 35bdcbc152..688f72edc7 100644
--- a/apps/sim/app/api/auth/oauth/connections/route.test.ts
+++ b/apps/sim/app/api/auth/oauth/connections/route.test.ts
@@ -3,8 +3,8 @@
*
* @vitest-environment node
*/
+import { createMockLogger, createMockRequest } from '@sim/testing'
import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest'
-import { createMockLogger, createMockRequest } from '@/app/api/__test-utils__/utils'
describe('OAuth Connections API Route', () => {
const mockGetSession = vi.fn()
diff --git a/apps/sim/app/api/auth/oauth/credentials/route.test.ts b/apps/sim/app/api/auth/oauth/credentials/route.test.ts
index 93aceaccc1..c83ed6625a 100644
--- a/apps/sim/app/api/auth/oauth/credentials/route.test.ts
+++ b/apps/sim/app/api/auth/oauth/credentials/route.test.ts
@@ -4,9 +4,9 @@
* @vitest-environment node
*/
+import { createMockLogger } from '@sim/testing'
import { NextRequest } from 'next/server'
import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest'
-import { createMockLogger } from '@/app/api/__test-utils__/utils'
describe('OAuth Credentials API Route', () => {
const mockGetSession = vi.fn()
diff --git a/apps/sim/app/api/auth/oauth/disconnect/route.test.ts b/apps/sim/app/api/auth/oauth/disconnect/route.test.ts
index 7f625d2539..9a504982af 100644
--- a/apps/sim/app/api/auth/oauth/disconnect/route.test.ts
+++ b/apps/sim/app/api/auth/oauth/disconnect/route.test.ts
@@ -3,8 +3,8 @@
*
* @vitest-environment node
*/
+import { createMockLogger, createMockRequest } from '@sim/testing'
import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest'
-import { createMockLogger, createMockRequest } from '@/app/api/__test-utils__/utils'
describe('OAuth Disconnect API Route', () => {
const mockGetSession = vi.fn()
diff --git a/apps/sim/app/api/auth/oauth/token/route.test.ts b/apps/sim/app/api/auth/oauth/token/route.test.ts
index 7359361a40..c5032fc326 100644
--- a/apps/sim/app/api/auth/oauth/token/route.test.ts
+++ b/apps/sim/app/api/auth/oauth/token/route.test.ts
@@ -3,8 +3,8 @@
*
* @vitest-environment node
*/
+import { createMockLogger, createMockRequest } from '@sim/testing'
import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest'
-import { createMockLogger, createMockRequest } from '@/app/api/__test-utils__/utils'
describe('OAuth Token API Routes', () => {
const mockGetUserId = vi.fn()
diff --git a/apps/sim/app/api/auth/reset-password/route.test.ts b/apps/sim/app/api/auth/reset-password/route.test.ts
index 9c9f2df5f9..18c4404440 100644
--- a/apps/sim/app/api/auth/reset-password/route.test.ts
+++ b/apps/sim/app/api/auth/reset-password/route.test.ts
@@ -3,8 +3,55 @@
*
* @vitest-environment node
*/
+import {
+ createMockRequest,
+ mockConsoleLogger,
+ mockCryptoUuid,
+ mockDrizzleOrm,
+ mockUuid,
+ setupCommonApiMocks,
+} from '@sim/testing'
import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest'
-import { createMockRequest, setupAuthApiMocks } from '@/app/api/__test-utils__/utils'
+
+/** Setup auth API mocks for testing authentication routes */
+function setupAuthApiMocks(
+ options: {
+ operations?: {
+ forgetPassword?: { success?: boolean; error?: string }
+ resetPassword?: { success?: boolean; error?: string }
+ }
+ } = {}
+) {
+ setupCommonApiMocks()
+ mockUuid()
+ mockCryptoUuid()
+ mockConsoleLogger()
+ mockDrizzleOrm()
+
+ const { operations = {} } = options
+ const defaultOperations = {
+ forgetPassword: { success: true, error: 'Forget password error', ...operations.forgetPassword },
+ resetPassword: { success: true, error: 'Reset password error', ...operations.resetPassword },
+ }
+
+ const createAuthMethod = (config: { success?: boolean; error?: string }) => {
+ return vi.fn().mockImplementation(() => {
+ if (config.success) {
+ return Promise.resolve()
+ }
+ return Promise.reject(new Error(config.error))
+ })
+ }
+
+ vi.doMock('@/lib/auth', () => ({
+ auth: {
+ api: {
+ forgetPassword: createAuthMethod(defaultOperations.forgetPassword),
+ resetPassword: createAuthMethod(defaultOperations.resetPassword),
+ },
+ },
+ }))
+}
describe('Reset Password API Route', () => {
beforeEach(() => {
diff --git a/apps/sim/app/api/chat/[identifier]/route.test.ts b/apps/sim/app/api/chat/[identifier]/route.test.ts
index efc89bc0f4..5a753fd4d9 100644
--- a/apps/sim/app/api/chat/[identifier]/route.test.ts
+++ b/apps/sim/app/api/chat/[identifier]/route.test.ts
@@ -5,7 +5,34 @@
*/
import { loggerMock } from '@sim/testing'
import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest'
-import { createMockRequest } from '@/app/api/__test-utils__/utils'
+
+/**
+ * Creates a mock NextRequest with cookies support for testing.
+ */
+function createMockNextRequest(
+ method = 'GET',
+ body?: unknown,
+ headers: Record = {},
+ url = 'http://localhost:3000/api/test'
+): any {
+ const headersObj = new Headers({
+ 'Content-Type': 'application/json',
+ ...headers,
+ })
+
+ return {
+ method,
+ headers: headersObj,
+ cookies: {
+ get: vi.fn().mockReturnValue(undefined),
+ },
+ json:
+ body !== undefined
+ ? vi.fn().mockResolvedValue(body)
+ : vi.fn().mockRejectedValue(new Error('No body')),
+ url,
+ }
+}
const createMockStream = () => {
return new ReadableStream({
@@ -71,10 +98,15 @@ vi.mock('@/lib/core/utils/request', () => ({
generateRequestId: vi.fn().mockReturnValue('test-request-id'),
}))
+vi.mock('@/lib/core/security/encryption', () => ({
+ decryptSecret: vi.fn().mockResolvedValue({ decrypted: 'test-password' }),
+}))
+
describe('Chat Identifier API Route', () => {
const mockAddCorsHeaders = vi.fn().mockImplementation((response) => response)
const mockValidateChatAuth = vi.fn().mockResolvedValue({ authorized: true })
const mockSetChatAuthCookie = vi.fn()
+ const mockValidateAuthToken = vi.fn().mockReturnValue(false)
const mockChatResult = [
{
@@ -114,11 +146,16 @@ describe('Chat Identifier API Route', () => {
beforeEach(() => {
vi.resetModules()
- vi.doMock('@/app/api/chat/utils', () => ({
+ vi.doMock('@/lib/core/security/deployment', () => ({
addCorsHeaders: mockAddCorsHeaders,
+ validateAuthToken: mockValidateAuthToken,
+ setDeploymentAuthCookie: vi.fn(),
+ isEmailAllowed: vi.fn().mockReturnValue(false),
+ }))
+
+ vi.doMock('@/app/api/chat/utils', () => ({
validateChatAuth: mockValidateChatAuth,
setChatAuthCookie: mockSetChatAuthCookie,
- validateAuthToken: vi.fn().mockReturnValue(true),
}))
// Mock logger - use loggerMock from @sim/testing
@@ -175,7 +212,7 @@ describe('Chat Identifier API Route', () => {
describe('GET endpoint', () => {
it('should return chat info for a valid identifier', async () => {
- const req = createMockRequest('GET')
+ const req = createMockNextRequest('GET')
const params = Promise.resolve({ identifier: 'test-chat' })
const { GET } = await import('@/app/api/chat/[identifier]/route')
@@ -206,7 +243,7 @@ describe('Chat Identifier API Route', () => {
}
})
- const req = createMockRequest('GET')
+ const req = createMockNextRequest('GET')
const params = Promise.resolve({ identifier: 'nonexistent' })
const { GET } = await import('@/app/api/chat/[identifier]/route')
@@ -240,7 +277,7 @@ describe('Chat Identifier API Route', () => {
}
})
- const req = createMockRequest('GET')
+ const req = createMockNextRequest('GET')
const params = Promise.resolve({ identifier: 'inactive-chat' })
const { GET } = await import('@/app/api/chat/[identifier]/route')
@@ -261,7 +298,7 @@ describe('Chat Identifier API Route', () => {
error: 'auth_required_password',
}))
- const req = createMockRequest('GET')
+ const req = createMockNextRequest('GET')
const params = Promise.resolve({ identifier: 'password-protected-chat' })
const { GET } = await import('@/app/api/chat/[identifier]/route')
@@ -282,7 +319,7 @@ describe('Chat Identifier API Route', () => {
describe('POST endpoint', () => {
it('should handle authentication requests without input', async () => {
- const req = createMockRequest('POST', { password: 'test-password' })
+ const req = createMockNextRequest('POST', { password: 'test-password' })
const params = Promise.resolve({ identifier: 'password-protected-chat' })
const { POST } = await import('@/app/api/chat/[identifier]/route')
@@ -298,7 +335,7 @@ describe('Chat Identifier API Route', () => {
})
it('should return 400 for requests without input', async () => {
- const req = createMockRequest('POST', {})
+ const req = createMockNextRequest('POST', {})
const params = Promise.resolve({ identifier: 'test-chat' })
const { POST } = await import('@/app/api/chat/[identifier]/route')
@@ -319,7 +356,7 @@ describe('Chat Identifier API Route', () => {
error: 'Authentication required',
}))
- const req = createMockRequest('POST', { input: 'Hello' })
+ const req = createMockNextRequest('POST', { input: 'Hello' })
const params = Promise.resolve({ identifier: 'protected-chat' })
const { POST } = await import('@/app/api/chat/[identifier]/route')
@@ -350,7 +387,7 @@ describe('Chat Identifier API Route', () => {
},
})
- const req = createMockRequest('POST', { input: 'Hello' })
+ const req = createMockNextRequest('POST', { input: 'Hello' })
const params = Promise.resolve({ identifier: 'test-chat' })
const { POST } = await import('@/app/api/chat/[identifier]/route')
@@ -369,7 +406,10 @@ describe('Chat Identifier API Route', () => {
})
it('should return streaming response for valid chat messages', async () => {
- const req = createMockRequest('POST', { input: 'Hello world', conversationId: 'conv-123' })
+ const req = createMockNextRequest('POST', {
+ input: 'Hello world',
+ conversationId: 'conv-123',
+ })
const params = Promise.resolve({ identifier: 'test-chat' })
const { POST } = await import('@/app/api/chat/[identifier]/route')
@@ -401,7 +441,7 @@ describe('Chat Identifier API Route', () => {
}, 10000)
it('should handle streaming response body correctly', async () => {
- const req = createMockRequest('POST', { input: 'Hello world' })
+ const req = createMockNextRequest('POST', { input: 'Hello world' })
const params = Promise.resolve({ identifier: 'test-chat' })
const { POST } = await import('@/app/api/chat/[identifier]/route')
@@ -431,7 +471,7 @@ describe('Chat Identifier API Route', () => {
throw new Error('Execution failed')
})
- const req = createMockRequest('POST', { input: 'Trigger error' })
+ const req = createMockNextRequest('POST', { input: 'Trigger error' })
const params = Promise.resolve({ identifier: 'test-chat' })
const { POST } = await import('@/app/api/chat/[identifier]/route')
@@ -470,7 +510,7 @@ describe('Chat Identifier API Route', () => {
})
it('should pass conversationId to streaming execution when provided', async () => {
- const req = createMockRequest('POST', {
+ const req = createMockNextRequest('POST', {
input: 'Hello world',
conversationId: 'test-conversation-123',
})
@@ -492,7 +532,7 @@ describe('Chat Identifier API Route', () => {
})
it('should handle missing conversationId gracefully', async () => {
- const req = createMockRequest('POST', { input: 'Hello world' })
+ const req = createMockNextRequest('POST', { input: 'Hello world' })
const params = Promise.resolve({ identifier: 'test-chat' })
const { POST } = await import('@/app/api/chat/[identifier]/route')
diff --git a/apps/sim/app/api/copilot/api-keys/route.test.ts b/apps/sim/app/api/copilot/api-keys/route.test.ts
index b5d27be6e1..8b8f630a09 100644
--- a/apps/sim/app/api/copilot/api-keys/route.test.ts
+++ b/apps/sim/app/api/copilot/api-keys/route.test.ts
@@ -3,9 +3,9 @@
*
* @vitest-environment node
*/
+import { mockAuth, mockCryptoUuid, setupCommonApiMocks } from '@sim/testing'
import { NextRequest } from 'next/server'
import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest'
-import { mockAuth, mockCryptoUuid, setupCommonApiMocks } from '@/app/api/__test-utils__/utils'
describe('Copilot API Keys API Route', () => {
const mockFetch = vi.fn()
diff --git a/apps/sim/app/api/copilot/chat/delete/route.test.ts b/apps/sim/app/api/copilot/chat/delete/route.test.ts
index af36cfb5e0..3b19bc262e 100644
--- a/apps/sim/app/api/copilot/chat/delete/route.test.ts
+++ b/apps/sim/app/api/copilot/chat/delete/route.test.ts
@@ -3,14 +3,9 @@
*
* @vitest-environment node
*/
+import { createMockRequest, mockAuth, mockCryptoUuid, setupCommonApiMocks } from '@sim/testing'
import { NextRequest } from 'next/server'
import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest'
-import {
- createMockRequest,
- mockAuth,
- mockCryptoUuid,
- setupCommonApiMocks,
-} from '@/app/api/__test-utils__/utils'
describe('Copilot Chat Delete API Route', () => {
const mockDelete = vi.fn()
diff --git a/apps/sim/app/api/copilot/chat/route.ts b/apps/sim/app/api/copilot/chat/route.ts
index c29d149e08..9d31bf5c36 100644
--- a/apps/sim/app/api/copilot/chat/route.ts
+++ b/apps/sim/app/api/copilot/chat/route.ts
@@ -8,6 +8,7 @@ import { getSession } from '@/lib/auth'
import { generateChatTitle } from '@/lib/copilot/chat-title'
import { getCopilotModel } from '@/lib/copilot/config'
import { SIM_AGENT_API_URL_DEFAULT, SIM_AGENT_VERSION } from '@/lib/copilot/constants'
+import { COPILOT_MODEL_IDS, COPILOT_REQUEST_MODES } from '@/lib/copilot/models'
import {
authenticateCopilotRequestSessionOnly,
createBadRequestResponse,
@@ -40,34 +41,8 @@ const ChatMessageSchema = z.object({
userMessageId: z.string().optional(), // ID from frontend for the user message
chatId: z.string().optional(),
workflowId: z.string().min(1, 'Workflow ID is required'),
- model: z
- .enum([
- 'gpt-5-fast',
- 'gpt-5',
- 'gpt-5-medium',
- 'gpt-5-high',
- 'gpt-5.1-fast',
- 'gpt-5.1',
- 'gpt-5.1-medium',
- 'gpt-5.1-high',
- 'gpt-5-codex',
- 'gpt-5.1-codex',
- 'gpt-5.2',
- 'gpt-5.2-codex',
- 'gpt-5.2-pro',
- 'gpt-4o',
- 'gpt-4.1',
- 'o3',
- 'claude-4-sonnet',
- 'claude-4.5-haiku',
- 'claude-4.5-sonnet',
- 'claude-4.5-opus',
- 'claude-4.1-opus',
- 'gemini-3-pro',
- ])
- .optional()
- .default('claude-4.5-opus'),
- mode: z.enum(['ask', 'agent', 'plan']).optional().default('agent'),
+ model: z.enum(COPILOT_MODEL_IDS).optional().default('claude-4.5-opus'),
+ mode: z.enum(COPILOT_REQUEST_MODES).optional().default('agent'),
prefetch: z.boolean().optional(),
createNewChat: z.boolean().optional().default(false),
stream: z.boolean().optional().default(true),
@@ -295,7 +270,8 @@ export async function POST(req: NextRequest) {
}
const defaults = getCopilotModel('chat')
- const modelToUse = env.COPILOT_MODEL || defaults.model
+ const selectedModel = model || defaults.model
+ const envModel = env.COPILOT_MODEL || defaults.model
let providerConfig: CopilotProviderConfig | undefined
const providerEnv = env.COPILOT_PROVIDER as any
@@ -304,7 +280,7 @@ export async function POST(req: NextRequest) {
if (providerEnv === 'azure-openai') {
providerConfig = {
provider: 'azure-openai',
- model: modelToUse,
+ model: envModel,
apiKey: env.AZURE_OPENAI_API_KEY,
apiVersion: 'preview',
endpoint: env.AZURE_OPENAI_ENDPOINT,
@@ -312,7 +288,7 @@ export async function POST(req: NextRequest) {
} else if (providerEnv === 'vertex') {
providerConfig = {
provider: 'vertex',
- model: modelToUse,
+ model: envModel,
apiKey: env.COPILOT_API_KEY,
vertexProject: env.VERTEX_PROJECT,
vertexLocation: env.VERTEX_LOCATION,
@@ -320,12 +296,15 @@ export async function POST(req: NextRequest) {
} else {
providerConfig = {
provider: providerEnv,
- model: modelToUse,
+ model: selectedModel,
apiKey: env.COPILOT_API_KEY,
}
}
}
+ const effectiveMode = mode === 'agent' ? 'build' : mode
+ const transportMode = effectiveMode === 'build' ? 'agent' : effectiveMode
+
// Determine conversationId to use for this request
const effectiveConversationId =
(currentChat?.conversationId as string | undefined) || conversationId
@@ -345,7 +324,7 @@ export async function POST(req: NextRequest) {
}
} | null = null
- if (mode === 'agent') {
+ if (effectiveMode === 'build') {
// Build base tools (executed locally, not deferred)
// Include function_execute for code execution capability
baseTools = [
@@ -452,8 +431,8 @@ export async function POST(req: NextRequest) {
userId: authenticatedUserId,
stream: stream,
streamToolCalls: true,
- model: model,
- mode: mode,
+ model: selectedModel,
+ mode: transportMode,
messageId: userMessageIdToUse,
version: SIM_AGENT_VERSION,
...(providerConfig ? { provider: providerConfig } : {}),
@@ -477,7 +456,7 @@ export async function POST(req: NextRequest) {
hasConversationId: !!effectiveConversationId,
hasFileAttachments: processedFileContents.length > 0,
messageLength: message.length,
- mode,
+ mode: effectiveMode,
hasTools: integrationTools.length > 0,
toolCount: integrationTools.length,
hasBaseTools: baseTools.length > 0,
diff --git a/apps/sim/app/api/copilot/chat/update-messages/route.test.ts b/apps/sim/app/api/copilot/chat/update-messages/route.test.ts
index 4ab1e654b9..a196215307 100644
--- a/apps/sim/app/api/copilot/chat/update-messages/route.test.ts
+++ b/apps/sim/app/api/copilot/chat/update-messages/route.test.ts
@@ -3,14 +3,9 @@
*
* @vitest-environment node
*/
+import { createMockRequest, mockAuth, mockCryptoUuid, setupCommonApiMocks } from '@sim/testing'
import { NextRequest } from 'next/server'
import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest'
-import {
- createMockRequest,
- mockAuth,
- mockCryptoUuid,
- setupCommonApiMocks,
-} from '@/app/api/__test-utils__/utils'
describe('Copilot Chat Update Messages API Route', () => {
const mockSelect = vi.fn()
diff --git a/apps/sim/app/api/copilot/chat/update-messages/route.ts b/apps/sim/app/api/copilot/chat/update-messages/route.ts
index 217ba0b058..4eceb7ea4b 100644
--- a/apps/sim/app/api/copilot/chat/update-messages/route.ts
+++ b/apps/sim/app/api/copilot/chat/update-messages/route.ts
@@ -4,6 +4,7 @@ import { createLogger } from '@sim/logger'
import { and, eq } from 'drizzle-orm'
import { type NextRequest, NextResponse } from 'next/server'
import { z } from 'zod'
+import { COPILOT_MODES } from '@/lib/copilot/models'
import {
authenticateCopilotRequestSessionOnly,
createInternalServerErrorResponse,
@@ -45,7 +46,7 @@ const UpdateMessagesSchema = z.object({
planArtifact: z.string().nullable().optional(),
config: z
.object({
- mode: z.enum(['ask', 'build', 'plan']).optional(),
+ mode: z.enum(COPILOT_MODES).optional(),
model: z.string().optional(),
})
.nullable()
diff --git a/apps/sim/app/api/copilot/chats/route.test.ts b/apps/sim/app/api/copilot/chats/route.test.ts
index 8cc3bb04e5..71e74e053b 100644
--- a/apps/sim/app/api/copilot/chats/route.test.ts
+++ b/apps/sim/app/api/copilot/chats/route.test.ts
@@ -3,8 +3,8 @@
*
* @vitest-environment node
*/
+import { mockCryptoUuid, setupCommonApiMocks } from '@sim/testing'
import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest'
-import { mockCryptoUuid, setupCommonApiMocks } from '@/app/api/__test-utils__/utils'
describe('Copilot Chats List API Route', () => {
const mockSelect = vi.fn()
diff --git a/apps/sim/app/api/copilot/checkpoints/revert/route.test.ts b/apps/sim/app/api/copilot/checkpoints/revert/route.test.ts
index 9725413985..cd5c46d9e1 100644
--- a/apps/sim/app/api/copilot/checkpoints/revert/route.test.ts
+++ b/apps/sim/app/api/copilot/checkpoints/revert/route.test.ts
@@ -3,14 +3,9 @@
*
* @vitest-environment node
*/
+import { createMockRequest, mockAuth, mockCryptoUuid, setupCommonApiMocks } from '@sim/testing'
import { NextRequest } from 'next/server'
import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest'
-import {
- createMockRequest,
- mockAuth,
- mockCryptoUuid,
- setupCommonApiMocks,
-} from '@/app/api/__test-utils__/utils'
describe('Copilot Checkpoints Revert API Route', () => {
const mockSelect = vi.fn()
diff --git a/apps/sim/app/api/copilot/checkpoints/route.test.ts b/apps/sim/app/api/copilot/checkpoints/route.test.ts
index a344573398..5a15e37b13 100644
--- a/apps/sim/app/api/copilot/checkpoints/route.test.ts
+++ b/apps/sim/app/api/copilot/checkpoints/route.test.ts
@@ -3,14 +3,9 @@
*
* @vitest-environment node
*/
+import { createMockRequest, mockAuth, mockCryptoUuid, setupCommonApiMocks } from '@sim/testing'
import { NextRequest } from 'next/server'
import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest'
-import {
- createMockRequest,
- mockAuth,
- mockCryptoUuid,
- setupCommonApiMocks,
-} from '@/app/api/__test-utils__/utils'
describe('Copilot Checkpoints API Route', () => {
const mockSelect = vi.fn()
diff --git a/apps/sim/app/api/copilot/confirm/route.test.ts b/apps/sim/app/api/copilot/confirm/route.test.ts
index 6fc1bfa7e8..5bb9efd684 100644
--- a/apps/sim/app/api/copilot/confirm/route.test.ts
+++ b/apps/sim/app/api/copilot/confirm/route.test.ts
@@ -3,14 +3,9 @@
*
* @vitest-environment node
*/
+import { createMockRequest, mockAuth, mockCryptoUuid, setupCommonApiMocks } from '@sim/testing'
import { NextRequest } from 'next/server'
import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest'
-import {
- createMockRequest,
- mockAuth,
- mockCryptoUuid,
- setupCommonApiMocks,
-} from '@/app/api/__test-utils__/utils'
describe('Copilot Confirm API Route', () => {
const mockRedisExists = vi.fn()
diff --git a/apps/sim/app/api/copilot/execute-tool/route.ts b/apps/sim/app/api/copilot/execute-tool/route.ts
index b737b196de..c8205821fb 100644
--- a/apps/sim/app/api/copilot/execute-tool/route.ts
+++ b/apps/sim/app/api/copilot/execute-tool/route.ts
@@ -14,8 +14,7 @@ import {
import { generateRequestId } from '@/lib/core/utils/request'
import { getEffectiveDecryptedEnv } from '@/lib/environment/utils'
import { refreshTokenIfNeeded } from '@/app/api/auth/oauth/utils'
-import { REFERENCE } from '@/executor/constants'
-import { createEnvVarPattern } from '@/executor/utils/reference-validation'
+import { resolveEnvVarReferences } from '@/executor/utils/reference-validation'
import { executeTool } from '@/tools'
import { getTool, resolveToolId } from '@/tools/utils'
@@ -28,45 +27,6 @@ const ExecuteToolSchema = z.object({
workflowId: z.string().optional(),
})
-/**
- * Resolves all {{ENV_VAR}} references in a value recursively
- * Works with strings, arrays, and objects
- */
-function resolveEnvVarReferences(value: any, envVars: Record): any {
- if (typeof value === 'string') {
- // Check for exact match: entire string is "{{VAR_NAME}}"
- const exactMatchPattern = new RegExp(
- `^\\${REFERENCE.ENV_VAR_START}([^}]+)\\${REFERENCE.ENV_VAR_END}$`
- )
- const exactMatch = exactMatchPattern.exec(value)
- if (exactMatch) {
- const envVarName = exactMatch[1].trim()
- return envVars[envVarName] ?? value
- }
-
- // Check for embedded references: "prefix {{VAR}} suffix"
- const envVarPattern = createEnvVarPattern()
- return value.replace(envVarPattern, (match, varName) => {
- const trimmedName = varName.trim()
- return envVars[trimmedName] ?? match
- })
- }
-
- if (Array.isArray(value)) {
- return value.map((item) => resolveEnvVarReferences(item, envVars))
- }
-
- if (value !== null && typeof value === 'object') {
- const resolved: Record = {}
- for (const [key, val] of Object.entries(value)) {
- resolved[key] = resolveEnvVarReferences(val, envVars)
- }
- return resolved
- }
-
- return value
-}
-
export async function POST(req: NextRequest) {
const tracker = createRequestTracker()
@@ -145,7 +105,17 @@ export async function POST(req: NextRequest) {
// Build execution params starting with LLM-provided arguments
// Resolve all {{ENV_VAR}} references in the arguments
- const executionParams: Record = resolveEnvVarReferences(toolArgs, decryptedEnvVars)
+ const executionParams: Record = resolveEnvVarReferences(
+ toolArgs,
+ decryptedEnvVars,
+ {
+ resolveExactMatch: true,
+ allowEmbedded: true,
+ trimKeys: true,
+ onMissing: 'keep',
+ deep: true,
+ }
+ ) as Record
logger.info(`[${tracker.requestId}] Resolved env var references in arguments`, {
toolName,
diff --git a/apps/sim/app/api/copilot/feedback/route.test.ts b/apps/sim/app/api/copilot/feedback/route.test.ts
index 547d5cd3b9..5752d7a5af 100644
--- a/apps/sim/app/api/copilot/feedback/route.test.ts
+++ b/apps/sim/app/api/copilot/feedback/route.test.ts
@@ -3,13 +3,9 @@
*
* @vitest-environment node
*/
+import { createMockRequest, mockCryptoUuid, setupCommonApiMocks } from '@sim/testing'
import { NextRequest } from 'next/server'
import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest'
-import {
- createMockRequest,
- mockCryptoUuid,
- setupCommonApiMocks,
-} from '@/app/api/__test-utils__/utils'
describe('Copilot Feedback API Route', () => {
const mockInsert = vi.fn()
diff --git a/apps/sim/app/api/copilot/stats/route.test.ts b/apps/sim/app/api/copilot/stats/route.test.ts
index 0d06c5edd9..35a0ad1dfc 100644
--- a/apps/sim/app/api/copilot/stats/route.test.ts
+++ b/apps/sim/app/api/copilot/stats/route.test.ts
@@ -3,13 +3,9 @@
*
* @vitest-environment node
*/
+import { createMockRequest, mockCryptoUuid, setupCommonApiMocks } from '@sim/testing'
import { NextRequest } from 'next/server'
import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest'
-import {
- createMockRequest,
- mockCryptoUuid,
- setupCommonApiMocks,
-} from '@/app/api/__test-utils__/utils'
describe('Copilot Stats API Route', () => {
const mockFetch = vi.fn()
diff --git a/apps/sim/app/api/copilot/user-models/route.ts b/apps/sim/app/api/copilot/user-models/route.ts
index 5e2f22f13d..ead14a5e9d 100644
--- a/apps/sim/app/api/copilot/user-models/route.ts
+++ b/apps/sim/app/api/copilot/user-models/route.ts
@@ -2,12 +2,13 @@ import { createLogger } from '@sim/logger'
import { eq } from 'drizzle-orm'
import { type NextRequest, NextResponse } from 'next/server'
import { getSession } from '@/lib/auth'
+import type { CopilotModelId } from '@/lib/copilot/models'
import { db } from '@/../../packages/db'
import { settings } from '@/../../packages/db/schema'
const logger = createLogger('CopilotUserModelsAPI')
-const DEFAULT_ENABLED_MODELS: Record = {
+const DEFAULT_ENABLED_MODELS: Record = {
'gpt-4o': false,
'gpt-4.1': false,
'gpt-5-fast': false,
@@ -28,7 +29,7 @@ const DEFAULT_ENABLED_MODELS: Record = {
'claude-4.5-haiku': true,
'claude-4.5-sonnet': true,
'claude-4.5-opus': true,
- // 'claude-4.1-opus': true,
+ 'claude-4.1-opus': false,
'gemini-3-pro': true,
}
@@ -54,7 +55,9 @@ export async function GET(request: NextRequest) {
const mergedModels = { ...DEFAULT_ENABLED_MODELS }
for (const [modelId, enabled] of Object.entries(userModelsMap)) {
- mergedModels[modelId] = enabled
+ if (modelId in mergedModels) {
+ mergedModels[modelId as CopilotModelId] = enabled
+ }
}
const hasNewModels = Object.keys(DEFAULT_ENABLED_MODELS).some(
diff --git a/apps/sim/app/api/files/delete/route.test.ts b/apps/sim/app/api/files/delete/route.test.ts
index 150358c4d2..669ea86ad4 100644
--- a/apps/sim/app/api/files/delete/route.test.ts
+++ b/apps/sim/app/api/files/delete/route.test.ts
@@ -1,5 +1,87 @@
+import {
+ createMockRequest,
+ mockAuth,
+ mockCryptoUuid,
+ mockUuid,
+ setupCommonApiMocks,
+} from '@sim/testing'
import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest'
-import { createMockRequest, setupFileApiMocks } from '@/app/api/__test-utils__/utils'
+
+/** Setup file API mocks for file delete tests */
+function setupFileApiMocks(
+ options: {
+ authenticated?: boolean
+ storageProvider?: 's3' | 'blob' | 'local'
+ cloudEnabled?: boolean
+ } = {}
+) {
+ const { authenticated = true, storageProvider = 's3', cloudEnabled = true } = options
+
+ setupCommonApiMocks()
+ mockUuid()
+ mockCryptoUuid()
+
+ const authMocks = mockAuth()
+ if (authenticated) {
+ authMocks.setAuthenticated()
+ } else {
+ authMocks.setUnauthenticated()
+ }
+
+ vi.doMock('@/lib/auth/hybrid', () => ({
+ checkHybridAuth: vi.fn().mockResolvedValue({
+ success: authenticated,
+ userId: authenticated ? 'test-user-id' : undefined,
+ error: authenticated ? undefined : 'Unauthorized',
+ }),
+ }))
+
+ vi.doMock('@/app/api/files/authorization', () => ({
+ verifyFileAccess: vi.fn().mockResolvedValue(true),
+ verifyWorkspaceFileAccess: vi.fn().mockResolvedValue(true),
+ }))
+
+ const uploadFileMock = vi.fn().mockResolvedValue({
+ path: '/api/files/serve/test-key.txt',
+ key: 'test-key.txt',
+ name: 'test.txt',
+ size: 100,
+ type: 'text/plain',
+ })
+ const downloadFileMock = vi.fn().mockResolvedValue(Buffer.from('test content'))
+ const deleteFileMock = vi.fn().mockResolvedValue(undefined)
+ const hasCloudStorageMock = vi.fn().mockReturnValue(cloudEnabled)
+
+ vi.doMock('@/lib/uploads', () => ({
+ getStorageProvider: vi.fn().mockReturnValue(storageProvider),
+ isUsingCloudStorage: vi.fn().mockReturnValue(cloudEnabled),
+ StorageService: {
+ uploadFile: uploadFileMock,
+ downloadFile: downloadFileMock,
+ deleteFile: deleteFileMock,
+ hasCloudStorage: hasCloudStorageMock,
+ },
+ uploadFile: uploadFileMock,
+ downloadFile: downloadFileMock,
+ deleteFile: deleteFileMock,
+ hasCloudStorage: hasCloudStorageMock,
+ }))
+
+ vi.doMock('@/lib/uploads/core/storage-service', () => ({
+ uploadFile: uploadFileMock,
+ downloadFile: downloadFileMock,
+ deleteFile: deleteFileMock,
+ hasCloudStorage: hasCloudStorageMock,
+ }))
+
+ vi.doMock('fs/promises', () => ({
+ unlink: vi.fn().mockResolvedValue(undefined),
+ access: vi.fn().mockResolvedValue(undefined),
+ stat: vi.fn().mockResolvedValue({ isFile: () => true }),
+ }))
+
+ return { auth: authMocks }
+}
describe('File Delete API Route', () => {
beforeEach(() => {
diff --git a/apps/sim/app/api/files/parse/route.test.ts b/apps/sim/app/api/files/parse/route.test.ts
index fa0793648d..801795570a 100644
--- a/apps/sim/app/api/files/parse/route.test.ts
+++ b/apps/sim/app/api/files/parse/route.test.ts
@@ -1,12 +1,59 @@
import path from 'path'
-import { NextRequest } from 'next/server'
/**
* Tests for file parse API route
*
* @vitest-environment node
*/
+import {
+ createMockRequest,
+ mockAuth,
+ mockCryptoUuid,
+ mockUuid,
+ setupCommonApiMocks,
+} from '@sim/testing'
+import { NextRequest } from 'next/server'
import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest'
-import { createMockRequest, setupFileApiMocks } from '@/app/api/__test-utils__/utils'
+
+function setupFileApiMocks(
+ options: {
+ authenticated?: boolean
+ storageProvider?: 's3' | 'blob' | 'local'
+ cloudEnabled?: boolean
+ } = {}
+) {
+ const { authenticated = true, storageProvider = 's3', cloudEnabled = true } = options
+
+ setupCommonApiMocks()
+ mockUuid()
+ mockCryptoUuid()
+
+ const authMocks = mockAuth()
+ if (authenticated) {
+ authMocks.setAuthenticated()
+ } else {
+ authMocks.setUnauthenticated()
+ }
+
+ vi.doMock('@/lib/auth/hybrid', () => ({
+ checkHybridAuth: vi.fn().mockResolvedValue({
+ success: authenticated,
+ userId: authenticated ? 'test-user-id' : undefined,
+ error: authenticated ? undefined : 'Unauthorized',
+ }),
+ }))
+
+ vi.doMock('@/app/api/files/authorization', () => ({
+ verifyFileAccess: vi.fn().mockResolvedValue(true),
+ verifyWorkspaceFileAccess: vi.fn().mockResolvedValue(true),
+ }))
+
+ vi.doMock('@/lib/uploads', () => ({
+ getStorageProvider: vi.fn().mockReturnValue(storageProvider),
+ isUsingCloudStorage: vi.fn().mockReturnValue(cloudEnabled),
+ }))
+
+ return { auth: authMocks }
+}
const mockJoin = vi.fn((...args: string[]): string => {
if (args[0] === '/test/uploads') {
diff --git a/apps/sim/app/api/files/presigned/route.test.ts b/apps/sim/app/api/files/presigned/route.test.ts
index 6dcac5c62b..0721269382 100644
--- a/apps/sim/app/api/files/presigned/route.test.ts
+++ b/apps/sim/app/api/files/presigned/route.test.ts
@@ -1,6 +1,6 @@
+import { mockAuth, mockCryptoUuid, mockUuid, setupCommonApiMocks } from '@sim/testing'
import { NextRequest } from 'next/server'
import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest'
-import { setupFileApiMocks } from '@/app/api/__test-utils__/utils'
/**
* Tests for file presigned API route
@@ -8,6 +8,106 @@ import { setupFileApiMocks } from '@/app/api/__test-utils__/utils'
* @vitest-environment node
*/
+function setupFileApiMocks(
+ options: {
+ authenticated?: boolean
+ storageProvider?: 's3' | 'blob' | 'local'
+ cloudEnabled?: boolean
+ } = {}
+) {
+ const { authenticated = true, storageProvider = 's3', cloudEnabled = true } = options
+
+ setupCommonApiMocks()
+ mockUuid()
+ mockCryptoUuid()
+
+ const authMocks = mockAuth()
+ if (authenticated) {
+ authMocks.setAuthenticated()
+ } else {
+ authMocks.setUnauthenticated()
+ }
+
+ vi.doMock('@/lib/auth/hybrid', () => ({
+ checkHybridAuth: vi.fn().mockResolvedValue({
+ success: authenticated,
+ userId: authenticated ? 'test-user-id' : undefined,
+ error: authenticated ? undefined : 'Unauthorized',
+ }),
+ }))
+
+ vi.doMock('@/app/api/files/authorization', () => ({
+ verifyFileAccess: vi.fn().mockResolvedValue(true),
+ verifyWorkspaceFileAccess: vi.fn().mockResolvedValue(true),
+ }))
+
+ const useBlobStorage = storageProvider === 'blob' && cloudEnabled
+ const useS3Storage = storageProvider === 's3' && cloudEnabled
+
+ vi.doMock('@/lib/uploads/config', () => ({
+ USE_BLOB_STORAGE: useBlobStorage,
+ USE_S3_STORAGE: useS3Storage,
+ UPLOAD_DIR: '/uploads',
+ getStorageConfig: vi.fn().mockReturnValue(
+ useBlobStorage
+ ? {
+ accountName: 'testaccount',
+ accountKey: 'testkey',
+ connectionString: 'testconnection',
+ containerName: 'testcontainer',
+ }
+ : {
+ bucket: 'test-bucket',
+ region: 'us-east-1',
+ }
+ ),
+ isUsingCloudStorage: vi.fn().mockReturnValue(cloudEnabled),
+ getStorageProvider: vi
+ .fn()
+ .mockReturnValue(
+ storageProvider === 'blob' ? 'Azure Blob' : storageProvider === 's3' ? 'S3' : 'Local'
+ ),
+ }))
+
+ const mockGeneratePresignedUploadUrl = vi.fn().mockImplementation(async (opts) => {
+ const timestamp = Date.now()
+ const safeFileName = opts.fileName.replace(/[^a-zA-Z0-9.-]/g, '_')
+ const key = `${opts.context}/${timestamp}-ik3a6w4-${safeFileName}`
+ return {
+ url: 'https://example.com/presigned-url',
+ key,
+ }
+ })
+
+ vi.doMock('@/lib/uploads/core/storage-service', () => ({
+ hasCloudStorage: vi.fn().mockReturnValue(cloudEnabled),
+ generatePresignedUploadUrl: mockGeneratePresignedUploadUrl,
+ generatePresignedDownloadUrl: vi.fn().mockResolvedValue('https://example.com/presigned-url'),
+ }))
+
+ vi.doMock('@/lib/uploads/utils/validation', () => ({
+ validateFileType: vi.fn().mockReturnValue(null),
+ }))
+
+ vi.doMock('@/lib/uploads', () => ({
+ CopilotFiles: {
+ generateCopilotUploadUrl: vi.fn().mockResolvedValue({
+ url: 'https://example.com/presigned-url',
+ key: 'copilot/test-key.txt',
+ }),
+ isImageFileType: vi.fn().mockReturnValue(true),
+ },
+ getStorageProvider: vi
+ .fn()
+ .mockReturnValue(
+ storageProvider === 'blob' ? 'Azure Blob' : storageProvider === 's3' ? 'S3' : 'Local'
+ ),
+ isUsingCloudStorage: vi.fn().mockReturnValue(cloudEnabled),
+ }))
+
+ return { auth: authMocks }
+}
+
describe('/api/files/presigned', () => {
beforeEach(() => {
vi.clearAllMocks()
@@ -210,7 +310,7 @@ describe('/api/files/presigned', () => {
const data = await response.json()
expect(response.status).toBe(200)
- expect(data.fileInfo.key).toMatch(/^kb\/.*knowledge-doc\.pdf$/)
+ expect(data.fileInfo.key).toMatch(/^knowledge-base\/.*knowledge-doc\.pdf$/)
expect(data.directUploadSupported).toBe(true)
})
diff --git a/apps/sim/app/api/files/serve/[...path]/route.test.ts b/apps/sim/app/api/files/serve/[...path]/route.test.ts
index e5ce18bb8b..fe833f3aa3 100644
--- a/apps/sim/app/api/files/serve/[...path]/route.test.ts
+++ b/apps/sim/app/api/files/serve/[...path]/route.test.ts
@@ -1,11 +1,49 @@
-import { NextRequest } from 'next/server'
/**
* Tests for file serve API route
*
* @vitest-environment node
*/
+import {
+ defaultMockUser,
+ mockAuth,
+ mockCryptoUuid,
+ mockUuid,
+ setupCommonApiMocks,
+} from '@sim/testing'
+import { NextRequest } from 'next/server'
import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest'
-import { setupApiTestMocks } from '@/app/api/__test-utils__/utils'
+
+function setupApiTestMocks(
+ options: {
+ authenticated?: boolean
+ user?: { id: string; email: string }
+ withFileSystem?: boolean
+ withUploadUtils?: boolean
+ } = {}
+) {
+ const { authenticated = true, user = defaultMockUser, withFileSystem = false } = options
+
+ setupCommonApiMocks()
+ mockUuid()
+ mockCryptoUuid()
+
+ const authMocks = mockAuth(user)
+ if (authenticated) {
+ authMocks.setAuthenticated(user)
+ } else {
+ authMocks.setUnauthenticated()
+ }
+
+ if (withFileSystem) {
+ vi.doMock('fs/promises', () => ({
+ readFile: vi.fn().mockResolvedValue(Buffer.from('test content')),
+ access: vi.fn().mockResolvedValue(undefined),
+ stat: vi.fn().mockResolvedValue({ isFile: () => true, size: 100 }),
+ }))
+ }
+
+ return { auth: authMocks }
+}
describe('File Serve API Route', () => {
beforeEach(() => {
@@ -31,6 +69,17 @@ describe('File Serve API Route', () => {
existsSync: vi.fn().mockReturnValue(true),
}))
+ vi.doMock('@/lib/uploads', () => ({
+ CopilotFiles: {
+ downloadCopilotFile: vi.fn(),
+ },
+ isUsingCloudStorage: vi.fn().mockReturnValue(false),
+ }))
+
+ vi.doMock('@/lib/uploads/utils/file-utils', () => ({
+ inferContextFromKey: vi.fn().mockReturnValue('workspace'),
+ }))
+
vi.doMock('@/app/api/files/utils', () => ({
FileNotFoundError: class FileNotFoundError extends Error {
constructor(message: string) {
@@ -126,6 +175,17 @@ describe('File Serve API Route', () => {
verifyFileAccess: vi.fn().mockResolvedValue(true),
}))
+ vi.doMock('@/lib/uploads', () => ({
+ CopilotFiles: {
+ downloadCopilotFile: vi.fn(),
+ },
+ isUsingCloudStorage: vi.fn().mockReturnValue(false),
+ }))
+
+ vi.doMock('@/lib/uploads/utils/file-utils', () => ({
+ inferContextFromKey: vi.fn().mockReturnValue('workspace'),
+ }))
+
const req = new NextRequest(
'http://localhost:3000/api/files/serve/workspace/test-workspace-id/nested-path-file.txt'
)
diff --git a/apps/sim/app/api/files/upload/route.test.ts b/apps/sim/app/api/files/upload/route.test.ts
index 35f580abd8..a5ecc030b8 100644
--- a/apps/sim/app/api/files/upload/route.test.ts
+++ b/apps/sim/app/api/files/upload/route.test.ts
@@ -1,11 +1,76 @@
-import { NextRequest } from 'next/server'
/**
* Tests for file upload API route
*
* @vitest-environment node
*/
+import { mockAuth, mockCryptoUuid, mockUuid, setupCommonApiMocks } from '@sim/testing'
+import { NextRequest } from 'next/server'
import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest'
-import { setupFileApiMocks } from '@/app/api/__test-utils__/utils'
+
+function setupFileApiMocks(
+ options: {
+ authenticated?: boolean
+ storageProvider?: 's3' | 'blob' | 'local'
+ cloudEnabled?: boolean
+ } = {}
+) {
+ const { authenticated = true, storageProvider = 's3', cloudEnabled = true } = options
+
+ setupCommonApiMocks()
+ mockUuid()
+ mockCryptoUuid()
+
+ const authMocks = mockAuth()
+ if (authenticated) {
+ authMocks.setAuthenticated()
+ } else {
+ authMocks.setUnauthenticated()
+ }
+
+ vi.doMock('@/lib/auth/hybrid', () => ({
+ checkHybridAuth: vi.fn().mockResolvedValue({
+ success: authenticated,
+ userId: authenticated ? 'test-user-id' : undefined,
+ error: authenticated ? undefined : 'Unauthorized',
+ }),
+ }))
+
+ vi.doMock('@/app/api/files/authorization', () => ({
+ verifyFileAccess: vi.fn().mockResolvedValue(true),
+ verifyWorkspaceFileAccess: vi.fn().mockResolvedValue(true),
+ verifyKBFileAccess: vi.fn().mockResolvedValue(true),
+ verifyCopilotFileAccess: vi.fn().mockResolvedValue(true),
+ }))
+
+ vi.doMock('@/lib/uploads/contexts/workspace', () => ({
+ uploadWorkspaceFile: vi.fn().mockResolvedValue({
+ id: 'test-file-id',
+ name: 'test.txt',
+ url: '/api/files/serve/workspace/test-workspace-id/test-file.txt',
+ size: 100,
+ type: 'text/plain',
+ key: 'workspace/test-workspace-id/1234567890-test.txt',
+ uploadedAt: new Date().toISOString(),
+ expiresAt: new Date(Date.now() + 24 * 60 * 60 * 1000).toISOString(),
+ }),
+ }))
+
+ const uploadFileMock = vi.fn().mockResolvedValue({
+ path: '/api/files/serve/test-key.txt',
+ key: 'test-key.txt',
+ name: 'test.txt',
+ size: 100,
+ type: 'text/plain',
+ })
+
+ vi.doMock('@/lib/uploads', () => ({
+ getStorageProvider: vi.fn().mockReturnValue(storageProvider),
+ isUsingCloudStorage: vi.fn().mockReturnValue(cloudEnabled),
+ uploadFile: uploadFileMock,
+ }))
+
+ return { auth: authMocks }
+}
describe('File Upload API Route', () => {
const createMockFormData = (files: File[], context = 'workspace'): FormData => {
diff --git a/apps/sim/app/api/folders/[id]/route.test.ts b/apps/sim/app/api/folders/[id]/route.test.ts
index 5b5f3c8c28..ce25228802 100644
--- a/apps/sim/app/api/folders/[id]/route.test.ts
+++ b/apps/sim/app/api/folders/[id]/route.test.ts
@@ -3,15 +3,24 @@
*
* @vitest-environment node
*/
-import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest'
import {
- type CapturedFolderValues,
createMockRequest,
type MockUser,
mockAuth,
- mockLogger,
+ mockConsoleLogger,
setupCommonApiMocks,
-} from '@/app/api/__test-utils__/utils'
+} from '@sim/testing'
+import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest'
+
+/** Type for captured folder values in tests */
+interface CapturedFolderValues {
+ name?: string
+ color?: string
+ parentId?: string | null
+ isExpanded?: boolean
+ sortOrder?: number
+ updatedAt?: Date
+}
interface FolderDbMockOptions {
folderLookupResult?: any
@@ -21,6 +30,8 @@ interface FolderDbMockOptions {
}
describe('Individual Folder API Route', () => {
+ let mockLogger: ReturnType
+
const TEST_USER: MockUser = {
id: 'user-123',
email: 'test@example.com',
@@ -39,7 +50,8 @@ describe('Individual Folder API Route', () => {
updatedAt: new Date('2024-01-01T00:00:00Z'),
}
- const { mockAuthenticatedUser, mockUnauthenticated } = mockAuth(TEST_USER)
+ let mockAuthenticatedUser: (user?: MockUser) => void
+ let mockUnauthenticated: () => void
const mockGetUserEntityPermissions = vi.fn()
function createFolderDbMock(options: FolderDbMockOptions = {}) {
@@ -110,6 +122,10 @@ describe('Individual Folder API Route', () => {
vi.resetModules()
vi.clearAllMocks()
setupCommonApiMocks()
+ mockLogger = mockConsoleLogger()
+ const auth = mockAuth(TEST_USER)
+ mockAuthenticatedUser = auth.mockAuthenticatedUser
+ mockUnauthenticated = auth.mockUnauthenticated
mockGetUserEntityPermissions.mockResolvedValue('admin')
diff --git a/apps/sim/app/api/folders/route.test.ts b/apps/sim/app/api/folders/route.test.ts
index d7da4f779c..6ad39d75ec 100644
--- a/apps/sim/app/api/folders/route.test.ts
+++ b/apps/sim/app/api/folders/route.test.ts
@@ -3,17 +3,46 @@
*
* @vitest-environment node
*/
+import { createMockRequest, mockAuth, mockConsoleLogger, setupCommonApiMocks } from '@sim/testing'
import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest'
-import {
- type CapturedFolderValues,
- createMockRequest,
- createMockTransaction,
- mockAuth,
- mockLogger,
- setupCommonApiMocks,
-} from '@/app/api/__test-utils__/utils'
+
+interface CapturedFolderValues {
+ name?: string
+ color?: string
+ parentId?: string | null
+ isExpanded?: boolean
+ sortOrder?: number
+ updatedAt?: Date
+}
+
+function createMockTransaction(mockData: {
+ selectData?: Array<{ id: string; [key: string]: unknown }>
+ insertResult?: Array<{ id: string; [key: string]: unknown }>
+}) {
+ const { selectData = [], insertResult = [] } = mockData
+ return vi.fn().mockImplementation(async (callback: (tx: unknown) => Promise) => {
+ const tx = {
+ select: vi.fn().mockReturnValue({
+ from: vi.fn().mockReturnValue({
+ where: vi.fn().mockReturnValue({
+ orderBy: vi.fn().mockReturnValue({
+ limit: vi.fn().mockReturnValue(selectData),
+ }),
+ }),
+ }),
+ }),
+ insert: vi.fn().mockReturnValue({
+ values: vi.fn().mockReturnValue({
+ returning: vi.fn().mockReturnValue(insertResult),
+ }),
+ }),
+ }
+ return await callback(tx)
+ })
+}
describe('Folders API Route', () => {
+ let mockLogger: ReturnType
const mockFolders = [
{
id: 'folder-1',
@@ -41,7 +70,8 @@ describe('Folders API Route', () => {
},
]
- const { mockAuthenticatedUser, mockUnauthenticated } = mockAuth()
+ let mockAuthenticatedUser: () => void
+ let mockUnauthenticated: () => void
const mockUUID = 'mock-uuid-12345678-90ab-cdef-1234-567890abcdef'
const mockSelect = vi.fn()
@@ -63,6 +93,10 @@ describe('Folders API Route', () => {
})
setupCommonApiMocks()
+ mockLogger = mockConsoleLogger()
+ const auth = mockAuth()
+ mockAuthenticatedUser = auth.mockAuthenticatedUser
+ mockUnauthenticated = auth.mockUnauthenticated
mockSelect.mockReturnValue({ from: mockFrom })
mockFrom.mockReturnValue({ where: mockWhere })
diff --git a/apps/sim/app/api/form/[identifier]/route.ts b/apps/sim/app/api/form/[identifier]/route.ts
index bfae3e36e0..e75dd236c6 100644
--- a/apps/sim/app/api/form/[identifier]/route.ts
+++ b/apps/sim/app/api/form/[identifier]/route.ts
@@ -9,7 +9,9 @@ import { addCorsHeaders, validateAuthToken } from '@/lib/core/security/deploymen
import { generateRequestId } from '@/lib/core/utils/request'
import { preprocessExecution } from '@/lib/execution/preprocessing'
import { LoggingSession } from '@/lib/logs/execution/logging-session'
+import { normalizeInputFormatValue } from '@/lib/workflows/input-format'
import { createStreamingResponse } from '@/lib/workflows/streaming/streaming'
+import { isValidStartBlockType } from '@/lib/workflows/triggers/start-block-types'
import { setFormAuthCookie, validateFormAuth } from '@/app/api/form/utils'
import { createErrorResponse, createSuccessResponse } from '@/app/api/workflows/utils'
@@ -34,22 +36,14 @@ async function getWorkflowInputSchema(workflowId: string): Promise {
.from(workflowBlocks)
.where(eq(workflowBlocks.workflowId, workflowId))
- // Find the start block (starter or start_trigger type)
- const startBlock = blocks.find(
- (block) => block.type === 'starter' || block.type === 'start_trigger'
- )
+ const startBlock = blocks.find((block) => isValidStartBlockType(block.type))
if (!startBlock) {
return []
}
- // Extract inputFormat from subBlocks
const subBlocks = startBlock.subBlocks as Record | null
- if (!subBlocks?.inputFormat?.value) {
- return []
- }
-
- return Array.isArray(subBlocks.inputFormat.value) ? subBlocks.inputFormat.value : []
+ return normalizeInputFormatValue(subBlocks?.inputFormat?.value)
} catch (error) {
logger.error('Error fetching workflow input schema:', error)
return []
diff --git a/apps/sim/app/api/function/execute/route.test.ts b/apps/sim/app/api/function/execute/route.test.ts
index 783b89d1b2..45abbb3212 100644
--- a/apps/sim/app/api/function/execute/route.test.ts
+++ b/apps/sim/app/api/function/execute/route.test.ts
@@ -3,10 +3,9 @@
*
* @vitest-environment node
*/
-import { loggerMock } from '@sim/testing'
+import { createMockRequest, loggerMock } from '@sim/testing'
import { NextRequest } from 'next/server'
import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest'
-import { createMockRequest } from '@/app/api/__test-utils__/utils'
vi.mock('@/lib/execution/isolated-vm', () => ({
executeInIsolatedVM: vi.fn().mockImplementation(async (req) => {
diff --git a/apps/sim/app/api/function/execute/route.ts b/apps/sim/app/api/function/execute/route.ts
index cb1da555af..4412cf9667 100644
--- a/apps/sim/app/api/function/execute/route.ts
+++ b/apps/sim/app/api/function/execute/route.ts
@@ -9,6 +9,7 @@ import { escapeRegExp, normalizeName, REFERENCE } from '@/executor/constants'
import {
createEnvVarPattern,
createWorkflowVariablePattern,
+ resolveEnvVarReferences,
} from '@/executor/utils/reference-validation'
export const dynamic = 'force-dynamic'
export const runtime = 'nodejs'
@@ -479,9 +480,29 @@ function resolveEnvironmentVariables(
const replacements: Array<{ match: string; index: number; varName: string; varValue: string }> =
[]
+ const resolverVars: Record = {}
+ Object.entries(params).forEach(([key, value]) => {
+ if (value) {
+ resolverVars[key] = String(value)
+ }
+ })
+ Object.entries(envVars).forEach(([key, value]) => {
+ if (value) {
+ resolverVars[key] = value
+ }
+ })
+
while ((match = regex.exec(code)) !== null) {
const varName = match[1].trim()
- const varValue = envVars[varName] || params[varName] || ''
+ const resolved = resolveEnvVarReferences(match[0], resolverVars, {
+ allowEmbedded: true,
+ resolveExactMatch: true,
+ trimKeys: true,
+ onMissing: 'empty',
+ deep: false,
+ })
+ const varValue =
+ typeof resolved === 'string' ? resolved : resolved == null ? '' : String(resolved)
replacements.push({
match: match[0],
index: match.index,
diff --git a/apps/sim/app/api/knowledge/[id]/documents/[documentId]/route.test.ts b/apps/sim/app/api/knowledge/[id]/documents/[documentId]/route.test.ts
index 710d9eea83..6b63ac13fc 100644
--- a/apps/sim/app/api/knowledge/[id]/documents/[documentId]/route.test.ts
+++ b/apps/sim/app/api/knowledge/[id]/documents/[documentId]/route.test.ts
@@ -3,14 +3,14 @@
*
* @vitest-environment node
*/
-import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest'
import {
createMockRequest,
mockAuth,
mockConsoleLogger,
mockDrizzleOrm,
mockKnowledgeSchemas,
-} from '@/app/api/__test-utils__/utils'
+} from '@sim/testing'
+import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest'
mockKnowledgeSchemas()
diff --git a/apps/sim/app/api/knowledge/[id]/documents/route.test.ts b/apps/sim/app/api/knowledge/[id]/documents/route.test.ts
index 2b22613f6e..e826de12d7 100644
--- a/apps/sim/app/api/knowledge/[id]/documents/route.test.ts
+++ b/apps/sim/app/api/knowledge/[id]/documents/route.test.ts
@@ -3,14 +3,14 @@
*
* @vitest-environment node
*/
-import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest'
import {
createMockRequest,
mockAuth,
mockConsoleLogger,
mockDrizzleOrm,
mockKnowledgeSchemas,
-} from '@/app/api/__test-utils__/utils'
+} from '@sim/testing'
+import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest'
mockKnowledgeSchemas()
diff --git a/apps/sim/app/api/knowledge/[id]/route.test.ts b/apps/sim/app/api/knowledge/[id]/route.test.ts
index 9d64bf5caf..20bbc710f9 100644
--- a/apps/sim/app/api/knowledge/[id]/route.test.ts
+++ b/apps/sim/app/api/knowledge/[id]/route.test.ts
@@ -3,14 +3,14 @@
*
* @vitest-environment node
*/
-import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest'
import {
createMockRequest,
mockAuth,
mockConsoleLogger,
mockDrizzleOrm,
mockKnowledgeSchemas,
-} from '@/app/api/__test-utils__/utils'
+} from '@sim/testing'
+import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest'
mockKnowledgeSchemas()
mockDrizzleOrm()
diff --git a/apps/sim/app/api/knowledge/route.test.ts b/apps/sim/app/api/knowledge/route.test.ts
index e72e7671a3..2a59f45409 100644
--- a/apps/sim/app/api/knowledge/route.test.ts
+++ b/apps/sim/app/api/knowledge/route.test.ts
@@ -3,14 +3,14 @@
*
* @vitest-environment node
*/
-import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest'
import {
createMockRequest,
mockAuth,
mockConsoleLogger,
mockDrizzleOrm,
mockKnowledgeSchemas,
-} from '@/app/api/__test-utils__/utils'
+} from '@sim/testing'
+import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest'
mockKnowledgeSchemas()
mockDrizzleOrm()
diff --git a/apps/sim/app/api/knowledge/search/route.test.ts b/apps/sim/app/api/knowledge/search/route.test.ts
index 04259062e7..d5748b1063 100644
--- a/apps/sim/app/api/knowledge/search/route.test.ts
+++ b/apps/sim/app/api/knowledge/search/route.test.ts
@@ -5,13 +5,13 @@
*
* @vitest-environment node
*/
-import { createEnvMock } from '@sim/testing'
-import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest'
import {
+ createEnvMock,
createMockRequest,
mockConsoleLogger,
mockKnowledgeSchemas,
-} from '@/app/api/__test-utils__/utils'
+} from '@sim/testing'
+import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest'
vi.mock('drizzle-orm', () => ({
and: vi.fn().mockImplementation((...args) => ({ and: args })),
diff --git a/apps/sim/app/api/mcp/serve/[serverId]/route.ts b/apps/sim/app/api/mcp/serve/[serverId]/route.ts
index cc9ec0272f..baa33e205f 100644
--- a/apps/sim/app/api/mcp/serve/[serverId]/route.ts
+++ b/apps/sim/app/api/mcp/serve/[serverId]/route.ts
@@ -20,6 +20,7 @@ import { createLogger } from '@sim/logger'
import { and, eq } from 'drizzle-orm'
import { type NextRequest, NextResponse } from 'next/server'
import { checkHybridAuth } from '@/lib/auth/hybrid'
+import { generateInternalToken } from '@/lib/auth/internal'
import { getBaseUrl } from '@/lib/core/utils/urls'
const logger = createLogger('WorkflowMcpServeAPI')
@@ -52,6 +53,8 @@ async function getServer(serverId: string) {
id: workflowMcpServer.id,
name: workflowMcpServer.name,
workspaceId: workflowMcpServer.workspaceId,
+ isPublic: workflowMcpServer.isPublic,
+ createdBy: workflowMcpServer.createdBy,
})
.from(workflowMcpServer)
.where(eq(workflowMcpServer.id, serverId))
@@ -90,9 +93,11 @@ export async function POST(request: NextRequest, { params }: { params: Promise },
- apiKey
+ apiKey,
+ server.isPublic ? server.createdBy : undefined
)
default:
@@ -200,7 +206,8 @@ async function handleToolsCall(
id: RequestId,
serverId: string,
params: { name: string; arguments?: Record } | undefined,
- apiKey?: string | null
+ apiKey?: string | null,
+ publicServerOwnerId?: string
): Promise {
try {
if (!params?.name) {
@@ -243,7 +250,13 @@ async function handleToolsCall(
const executeUrl = `${getBaseUrl()}/api/workflows/${tool.workflowId}/execute`
const headers: Record = { 'Content-Type': 'application/json' }
- if (apiKey) headers['X-API-Key'] = apiKey
+
+ if (publicServerOwnerId) {
+ const internalToken = await generateInternalToken(publicServerOwnerId)
+ headers.Authorization = `Bearer ${internalToken}`
+ } else if (apiKey) {
+ headers['X-API-Key'] = apiKey
+ }
logger.info(`Executing workflow ${tool.workflowId} via MCP tool ${params.name}`)
diff --git a/apps/sim/app/api/mcp/servers/test-connection/route.ts b/apps/sim/app/api/mcp/servers/test-connection/route.ts
index 3332397535..d91691d2f6 100644
--- a/apps/sim/app/api/mcp/servers/test-connection/route.ts
+++ b/apps/sim/app/api/mcp/servers/test-connection/route.ts
@@ -5,8 +5,7 @@ import { McpClient } from '@/lib/mcp/client'
import { getParsedBody, withMcpAuth } from '@/lib/mcp/middleware'
import type { McpServerConfig, McpTransport } from '@/lib/mcp/types'
import { createMcpErrorResponse, createMcpSuccessResponse } from '@/lib/mcp/utils'
-import { REFERENCE } from '@/executor/constants'
-import { createEnvVarPattern } from '@/executor/utils/reference-validation'
+import { resolveEnvVarReferences } from '@/executor/utils/reference-validation'
const logger = createLogger('McpServerTestAPI')
@@ -24,22 +23,23 @@ function isUrlBasedTransport(transport: McpTransport): boolean {
* Resolve environment variables in strings
*/
function resolveEnvVars(value: string, envVars: Record): string {
- const envVarPattern = createEnvVarPattern()
- const envMatches = value.match(envVarPattern)
- if (!envMatches) return value
-
- let resolvedValue = value
- for (const match of envMatches) {
- const envKey = match.slice(REFERENCE.ENV_VAR_START.length, -REFERENCE.ENV_VAR_END.length).trim()
- const envValue = envVars[envKey]
-
- if (envValue === undefined) {
+ const missingVars: string[] = []
+ const resolvedValue = resolveEnvVarReferences(value, envVars, {
+ allowEmbedded: true,
+ resolveExactMatch: true,
+ trimKeys: true,
+ onMissing: 'keep',
+ deep: false,
+ missingKeys: missingVars,
+ }) as string
+
+ if (missingVars.length > 0) {
+ const uniqueMissing = Array.from(new Set(missingVars))
+ uniqueMissing.forEach((envKey) => {
logger.warn(`Environment variable "${envKey}" not found in MCP server test`)
- continue
- }
-
- resolvedValue = resolvedValue.replace(match, envValue)
+ })
}
+
return resolvedValue
}
diff --git a/apps/sim/app/api/mcp/workflow-servers/[id]/route.ts b/apps/sim/app/api/mcp/workflow-servers/[id]/route.ts
index 62266b817a..3ce0e00455 100644
--- a/apps/sim/app/api/mcp/workflow-servers/[id]/route.ts
+++ b/apps/sim/app/api/mcp/workflow-servers/[id]/route.ts
@@ -31,6 +31,7 @@ export const GET = withMcpAuth('read')(
createdBy: workflowMcpServer.createdBy,
name: workflowMcpServer.name,
description: workflowMcpServer.description,
+ isPublic: workflowMcpServer.isPublic,
createdAt: workflowMcpServer.createdAt,
updatedAt: workflowMcpServer.updatedAt,
})
@@ -98,6 +99,9 @@ export const PATCH = withMcpAuth('write')(
if (body.description !== undefined) {
updateData.description = body.description?.trim() || null
}
+ if (body.isPublic !== undefined) {
+ updateData.isPublic = body.isPublic
+ }
const [updatedServer] = await db
.update(workflowMcpServer)
diff --git a/apps/sim/app/api/mcp/workflow-servers/[id]/tools/[toolId]/route.ts b/apps/sim/app/api/mcp/workflow-servers/[id]/tools/[toolId]/route.ts
index 4398bd4e53..d7fd532590 100644
--- a/apps/sim/app/api/mcp/workflow-servers/[id]/tools/[toolId]/route.ts
+++ b/apps/sim/app/api/mcp/workflow-servers/[id]/tools/[toolId]/route.ts
@@ -26,7 +26,6 @@ export const GET = withMcpAuth('read')(
logger.info(`[${requestId}] Getting tool ${toolId} from server ${serverId}`)
- // Verify server exists and belongs to workspace
const [server] = await db
.select({ id: workflowMcpServer.id })
.from(workflowMcpServer)
@@ -72,7 +71,6 @@ export const PATCH = withMcpAuth('write')(
logger.info(`[${requestId}] Updating tool ${toolId} in server ${serverId}`)
- // Verify server exists and belongs to workspace
const [server] = await db
.select({ id: workflowMcpServer.id })
.from(workflowMcpServer)
@@ -139,7 +137,6 @@ export const DELETE = withMcpAuth('write')(
logger.info(`[${requestId}] Deleting tool ${toolId} from server ${serverId}`)
- // Verify server exists and belongs to workspace
const [server] = await db
.select({ id: workflowMcpServer.id })
.from(workflowMcpServer)
diff --git a/apps/sim/app/api/mcp/workflow-servers/[id]/tools/route.ts b/apps/sim/app/api/mcp/workflow-servers/[id]/tools/route.ts
index 5c39098b0f..b2cef8ee5b 100644
--- a/apps/sim/app/api/mcp/workflow-servers/[id]/tools/route.ts
+++ b/apps/sim/app/api/mcp/workflow-servers/[id]/tools/route.ts
@@ -6,24 +6,10 @@ import type { NextRequest } from 'next/server'
import { getParsedBody, withMcpAuth } from '@/lib/mcp/middleware'
import { createMcpErrorResponse, createMcpSuccessResponse } from '@/lib/mcp/utils'
import { sanitizeToolName } from '@/lib/mcp/workflow-tool-schema'
-import { loadWorkflowFromNormalizedTables } from '@/lib/workflows/persistence/utils'
-import { hasValidStartBlockInState } from '@/lib/workflows/triggers/trigger-utils'
+import { hasValidStartBlock } from '@/lib/workflows/triggers/trigger-utils.server'
const logger = createLogger('WorkflowMcpToolsAPI')
-/**
- * Check if a workflow has a valid start block by loading from database
- */
-async function hasValidStartBlock(workflowId: string): Promise {
- try {
- const normalizedData = await loadWorkflowFromNormalizedTables(workflowId)
- return hasValidStartBlockInState(normalizedData)
- } catch (error) {
- logger.warn('Error checking for start block:', error)
- return false
- }
-}
-
export const dynamic = 'force-dynamic'
interface RouteParams {
@@ -40,7 +26,6 @@ export const GET = withMcpAuth('read')(
logger.info(`[${requestId}] Listing tools for workflow MCP server: ${serverId}`)
- // Verify server exists and belongs to workspace
const [server] = await db
.select({ id: workflowMcpServer.id })
.from(workflowMcpServer)
@@ -53,7 +38,6 @@ export const GET = withMcpAuth('read')(
return createMcpErrorResponse(new Error('Server not found'), 'Server not found', 404)
}
- // Get tools with workflow details
const tools = await db
.select({
id: workflowMcpTool.id,
@@ -107,7 +91,6 @@ export const POST = withMcpAuth('write')(
)
}
- // Verify server exists and belongs to workspace
const [server] = await db
.select({ id: workflowMcpServer.id })
.from(workflowMcpServer)
@@ -120,7 +103,6 @@ export const POST = withMcpAuth('write')(
return createMcpErrorResponse(new Error('Server not found'), 'Server not found', 404)
}
- // Verify workflow exists and is deployed
const [workflowRecord] = await db
.select({
id: workflow.id,
@@ -137,7 +119,6 @@ export const POST = withMcpAuth('write')(
return createMcpErrorResponse(new Error('Workflow not found'), 'Workflow not found', 404)
}
- // Verify workflow belongs to the same workspace
if (workflowRecord.workspaceId !== workspaceId) {
return createMcpErrorResponse(
new Error('Workflow does not belong to this workspace'),
@@ -154,7 +135,6 @@ export const POST = withMcpAuth('write')(
)
}
- // Verify workflow has a valid start block
const hasStartBlock = await hasValidStartBlock(body.workflowId)
if (!hasStartBlock) {
return createMcpErrorResponse(
@@ -164,7 +144,6 @@ export const POST = withMcpAuth('write')(
)
}
- // Check if tool already exists for this workflow
const [existingTool] = await db
.select({ id: workflowMcpTool.id })
.from(workflowMcpTool)
@@ -190,7 +169,6 @@ export const POST = withMcpAuth('write')(
workflowRecord.description ||
`Execute ${workflowRecord.name} workflow`
- // Create the tool
const toolId = crypto.randomUUID()
const [tool] = await db
.insert(workflowMcpTool)
diff --git a/apps/sim/app/api/mcp/workflow-servers/route.ts b/apps/sim/app/api/mcp/workflow-servers/route.ts
index 25258e0b21..e2900f5a88 100644
--- a/apps/sim/app/api/mcp/workflow-servers/route.ts
+++ b/apps/sim/app/api/mcp/workflow-servers/route.ts
@@ -1,10 +1,12 @@
import { db } from '@sim/db'
-import { workflowMcpServer, workflowMcpTool } from '@sim/db/schema'
+import { workflow, workflowMcpServer, workflowMcpTool } from '@sim/db/schema'
import { createLogger } from '@sim/logger'
import { eq, inArray, sql } from 'drizzle-orm'
import type { NextRequest } from 'next/server'
import { getParsedBody, withMcpAuth } from '@/lib/mcp/middleware'
import { createMcpErrorResponse, createMcpSuccessResponse } from '@/lib/mcp/utils'
+import { sanitizeToolName } from '@/lib/mcp/workflow-tool-schema'
+import { hasValidStartBlock } from '@/lib/workflows/triggers/trigger-utils.server'
const logger = createLogger('WorkflowMcpServersAPI')
@@ -25,18 +27,18 @@ export const GET = withMcpAuth('read')(
createdBy: workflowMcpServer.createdBy,
name: workflowMcpServer.name,
description: workflowMcpServer.description,
+ isPublic: workflowMcpServer.isPublic,
createdAt: workflowMcpServer.createdAt,
updatedAt: workflowMcpServer.updatedAt,
toolCount: sql`(
- SELECT COUNT(*)::int
- FROM "workflow_mcp_tool"
+ SELECT COUNT(*)::int
+ FROM "workflow_mcp_tool"
WHERE "workflow_mcp_tool"."server_id" = "workflow_mcp_server"."id"
)`.as('tool_count'),
})
.from(workflowMcpServer)
.where(eq(workflowMcpServer.workspaceId, workspaceId))
- // Fetch all tools for these servers
const serverIds = servers.map((s) => s.id)
const tools =
serverIds.length > 0
@@ -49,7 +51,6 @@ export const GET = withMcpAuth('read')(
.where(inArray(workflowMcpTool.serverId, serverIds))
: []
- // Group tool names by server
const toolNamesByServer: Record = {}
for (const tool of tools) {
if (!toolNamesByServer[tool.serverId]) {
@@ -58,7 +59,6 @@ export const GET = withMcpAuth('read')(
toolNamesByServer[tool.serverId].push(tool.toolName)
}
- // Attach tool names to servers
const serversWithToolNames = servers.map((server) => ({
...server,
toolNames: toolNamesByServer[server.id] || [],
@@ -90,6 +90,7 @@ export const POST = withMcpAuth('write')(
logger.info(`[${requestId}] Creating workflow MCP server:`, {
name: body.name,
workspaceId,
+ workflowIds: body.workflowIds,
})
if (!body.name) {
@@ -110,16 +111,76 @@ export const POST = withMcpAuth('write')(
createdBy: userId,
name: body.name.trim(),
description: body.description?.trim() || null,
+ isPublic: body.isPublic ?? false,
createdAt: new Date(),
updatedAt: new Date(),
})
.returning()
+ const workflowIds: string[] = body.workflowIds || []
+ const addedTools: Array<{ workflowId: string; toolName: string }> = []
+
+ if (workflowIds.length > 0) {
+ const workflows = await db
+ .select({
+ id: workflow.id,
+ name: workflow.name,
+ description: workflow.description,
+ isDeployed: workflow.isDeployed,
+ workspaceId: workflow.workspaceId,
+ })
+ .from(workflow)
+ .where(inArray(workflow.id, workflowIds))
+
+ for (const workflowRecord of workflows) {
+ if (workflowRecord.workspaceId !== workspaceId) {
+ logger.warn(
+ `[${requestId}] Skipping workflow ${workflowRecord.id} - does not belong to workspace`
+ )
+ continue
+ }
+
+ if (!workflowRecord.isDeployed) {
+ logger.warn(`[${requestId}] Skipping workflow ${workflowRecord.id} - not deployed`)
+ continue
+ }
+
+ const hasStartBlock = await hasValidStartBlock(workflowRecord.id)
+ if (!hasStartBlock) {
+ logger.warn(`[${requestId}] Skipping workflow ${workflowRecord.id} - no start block`)
+ continue
+ }
+
+ const toolName = sanitizeToolName(workflowRecord.name)
+ const toolDescription =
+ workflowRecord.description || `Execute ${workflowRecord.name} workflow`
+
+ const toolId = crypto.randomUUID()
+ await db.insert(workflowMcpTool).values({
+ id: toolId,
+ serverId,
+ workflowId: workflowRecord.id,
+ toolName,
+ toolDescription,
+ parameterSchema: {},
+ createdAt: new Date(),
+ updatedAt: new Date(),
+ })
+
+ addedTools.push({ workflowId: workflowRecord.id, toolName })
+ }
+
+ logger.info(
+ `[${requestId}] Added ${addedTools.length} tools to server ${serverId}:`,
+ addedTools.map((t) => t.toolName)
+ )
+ }
+
logger.info(
`[${requestId}] Successfully created workflow MCP server: ${body.name} (ID: ${serverId})`
)
- return createMcpSuccessResponse({ server }, 201)
+ return createMcpSuccessResponse({ server, addedTools }, 201)
} catch (error) {
logger.error(`[${requestId}] Error creating workflow MCP server:`, error)
return createMcpErrorResponse(
diff --git a/apps/sim/app/api/organizations/[id]/invitations/[invitationId]/route.ts b/apps/sim/app/api/organizations/[id]/invitations/[invitationId]/route.ts
index 143a924cc3..0c98a52bf8 100644
--- a/apps/sim/app/api/organizations/[id]/invitations/[invitationId]/route.ts
+++ b/apps/sim/app/api/organizations/[id]/invitations/[invitationId]/route.ts
@@ -4,6 +4,8 @@ import {
invitation,
member,
organization,
+ permissionGroup,
+ permissionGroupMember,
permissions,
subscription as subscriptionTable,
user,
@@ -17,6 +19,7 @@ import { type NextRequest, NextResponse } from 'next/server'
import { z } from 'zod'
import { getEmailSubject, renderInvitationEmail } from '@/components/emails'
import { getSession } from '@/lib/auth'
+import { hasAccessControlAccess } from '@/lib/billing'
import { requireStripeClient } from '@/lib/billing/stripe-client'
import { getBaseUrl } from '@/lib/core/utils/urls'
import { sendEmail } from '@/lib/messaging/email/mailer'
@@ -382,6 +385,47 @@ export async function PUT(
// Don't fail the whole invitation acceptance due to this
}
+ // Auto-assign to permission group if one has autoAddNewMembers enabled
+ try {
+ const hasAccessControl = await hasAccessControlAccess(session.user.id)
+ if (hasAccessControl) {
+ const [autoAddGroup] = await tx
+ .select({ id: permissionGroup.id, name: permissionGroup.name })
+ .from(permissionGroup)
+ .where(
+ and(
+ eq(permissionGroup.organizationId, organizationId),
+ eq(permissionGroup.autoAddNewMembers, true)
+ )
+ )
+ .limit(1)
+
+ if (autoAddGroup) {
+ await tx.insert(permissionGroupMember).values({
+ id: randomUUID(),
+ permissionGroupId: autoAddGroup.id,
+ userId: session.user.id,
+ assignedBy: null,
+ assignedAt: new Date(),
+ })
+
+ logger.info('Auto-assigned new member to permission group', {
+ userId: session.user.id,
+ organizationId,
+ permissionGroupId: autoAddGroup.id,
+ permissionGroupName: autoAddGroup.name,
+ })
+ }
+ }
+ } catch (error) {
+ logger.error('Failed to auto-assign user to permission group', {
+ userId: session.user.id,
+ organizationId,
+ error,
+ })
+ // Don't fail the whole invitation acceptance due to this
+ }
+
const linkedWorkspaceInvitations = await tx
.select()
.from(workspaceInvitation)
diff --git a/apps/sim/app/api/permission-groups/[id]/route.ts b/apps/sim/app/api/permission-groups/[id]/route.ts
index 5e1486ff26..977cb1bbfe 100644
--- a/apps/sim/app/api/permission-groups/[id]/route.ts
+++ b/apps/sim/app/api/permission-groups/[id]/route.ts
@@ -25,12 +25,19 @@ const configSchema = z.object({
disableMcpTools: z.boolean().optional(),
disableCustomTools: z.boolean().optional(),
hideTemplates: z.boolean().optional(),
+ disableInvitations: z.boolean().optional(),
+ hideDeployApi: z.boolean().optional(),
+ hideDeployMcp: z.boolean().optional(),
+ hideDeployA2a: z.boolean().optional(),
+ hideDeployChatbot: z.boolean().optional(),
+ hideDeployTemplate: z.boolean().optional(),
})
const updateSchema = z.object({
name: z.string().trim().min(1).max(100).optional(),
description: z.string().max(500).nullable().optional(),
config: configSchema.optional(),
+ autoAddNewMembers: z.boolean().optional(),
})
async function getPermissionGroupWithAccess(groupId: string, userId: string) {
@@ -44,6 +51,7 @@ async function getPermissionGroupWithAccess(groupId: string, userId: string) {
createdBy: permissionGroup.createdBy,
createdAt: permissionGroup.createdAt,
updatedAt: permissionGroup.updatedAt,
+ autoAddNewMembers: permissionGroup.autoAddNewMembers,
})
.from(permissionGroup)
.where(eq(permissionGroup.id, groupId))
@@ -140,11 +148,27 @@ export async function PUT(req: NextRequest, { params }: { params: Promise<{ id:
? { ...currentConfig, ...updates.config }
: currentConfig
+ // If setting autoAddNewMembers to true, unset it on other groups in the org first
+ if (updates.autoAddNewMembers === true) {
+ await db
+ .update(permissionGroup)
+ .set({ autoAddNewMembers: false, updatedAt: new Date() })
+ .where(
+ and(
+ eq(permissionGroup.organizationId, result.group.organizationId),
+ eq(permissionGroup.autoAddNewMembers, true)
+ )
+ )
+ }
+
await db
.update(permissionGroup)
.set({
...(updates.name !== undefined && { name: updates.name }),
...(updates.description !== undefined && { description: updates.description }),
+ ...(updates.autoAddNewMembers !== undefined && {
+ autoAddNewMembers: updates.autoAddNewMembers,
+ }),
config: newConfig,
updatedAt: new Date(),
})
diff --git a/apps/sim/app/api/permission-groups/route.ts b/apps/sim/app/api/permission-groups/route.ts
index a3c3a7512b..a72726c5a9 100644
--- a/apps/sim/app/api/permission-groups/route.ts
+++ b/apps/sim/app/api/permission-groups/route.ts
@@ -26,6 +26,12 @@ const configSchema = z.object({
disableMcpTools: z.boolean().optional(),
disableCustomTools: z.boolean().optional(),
hideTemplates: z.boolean().optional(),
+ disableInvitations: z.boolean().optional(),
+ hideDeployApi: z.boolean().optional(),
+ hideDeployMcp: z.boolean().optional(),
+ hideDeployA2a: z.boolean().optional(),
+ hideDeployChatbot: z.boolean().optional(),
+ hideDeployTemplate: z.boolean().optional(),
})
const createSchema = z.object({
@@ -33,6 +39,7 @@ const createSchema = z.object({
name: z.string().trim().min(1).max(100),
description: z.string().max(500).optional(),
config: configSchema.optional(),
+ autoAddNewMembers: z.boolean().optional(),
})
export async function GET(req: Request) {
@@ -68,6 +75,7 @@ export async function GET(req: Request) {
createdBy: permissionGroup.createdBy,
createdAt: permissionGroup.createdAt,
updatedAt: permissionGroup.updatedAt,
+ autoAddNewMembers: permissionGroup.autoAddNewMembers,
creatorName: user.name,
creatorEmail: user.email,
})
@@ -111,7 +119,8 @@ export async function POST(req: Request) {
}
const body = await req.json()
- const { organizationId, name, description, config } = createSchema.parse(body)
+ const { organizationId, name, description, config, autoAddNewMembers } =
+ createSchema.parse(body)
const membership = await db
.select({ id: member.id, role: member.role })
@@ -154,6 +163,19 @@ export async function POST(req: Request) {
...config,
}
+ // If autoAddNewMembers is true, unset it on any existing groups first
+ if (autoAddNewMembers) {
+ await db
+ .update(permissionGroup)
+ .set({ autoAddNewMembers: false, updatedAt: new Date() })
+ .where(
+ and(
+ eq(permissionGroup.organizationId, organizationId),
+ eq(permissionGroup.autoAddNewMembers, true)
+ )
+ )
+ }
+
const now = new Date()
const newGroup = {
id: crypto.randomUUID(),
@@ -164,6 +186,7 @@ export async function POST(req: Request) {
createdBy: session.user.id,
createdAt: now,
updatedAt: now,
+ autoAddNewMembers: autoAddNewMembers || false,
}
await db.insert(permissionGroup).values(newGroup)
diff --git a/apps/sim/app/api/schedules/execute/route.test.ts b/apps/sim/app/api/schedules/execute/route.test.ts
index 6feddfe7a2..0d44e1ccd5 100644
--- a/apps/sim/app/api/schedules/execute/route.test.ts
+++ b/apps/sim/app/api/schedules/execute/route.test.ts
@@ -57,6 +57,7 @@ describe('Scheduled Workflow Execution API Route', () => {
not: vi.fn((condition) => ({ type: 'not', condition })),
isNull: vi.fn((field) => ({ type: 'isNull', field })),
or: vi.fn((...conditions) => ({ type: 'or', conditions })),
+ sql: vi.fn((strings, ...values) => ({ type: 'sql', strings, values })),
}))
vi.doMock('@sim/db', () => {
@@ -92,6 +93,17 @@ describe('Scheduled Workflow Execution API Route', () => {
status: 'status',
nextRunAt: 'nextRunAt',
lastQueuedAt: 'lastQueuedAt',
+ deploymentVersionId: 'deploymentVersionId',
+ },
+ workflowDeploymentVersion: {
+ id: 'id',
+ workflowId: 'workflowId',
+ isActive: 'isActive',
+ },
+ workflow: {
+ id: 'id',
+ userId: 'userId',
+ workspaceId: 'workspaceId',
},
}
})
@@ -134,6 +146,7 @@ describe('Scheduled Workflow Execution API Route', () => {
not: vi.fn((condition) => ({ type: 'not', condition })),
isNull: vi.fn((field) => ({ type: 'isNull', field })),
or: vi.fn((...conditions) => ({ type: 'or', conditions })),
+ sql: vi.fn((strings, ...values) => ({ type: 'sql', strings, values })),
}))
vi.doMock('@sim/db', () => {
@@ -169,6 +182,17 @@ describe('Scheduled Workflow Execution API Route', () => {
status: 'status',
nextRunAt: 'nextRunAt',
lastQueuedAt: 'lastQueuedAt',
+ deploymentVersionId: 'deploymentVersionId',
+ },
+ workflowDeploymentVersion: {
+ id: 'id',
+ workflowId: 'workflowId',
+ isActive: 'isActive',
+ },
+ workflow: {
+ id: 'id',
+ userId: 'userId',
+ workspaceId: 'workspaceId',
},
}
})
@@ -206,6 +230,7 @@ describe('Scheduled Workflow Execution API Route', () => {
not: vi.fn((condition) => ({ type: 'not', condition })),
isNull: vi.fn((field) => ({ type: 'isNull', field })),
or: vi.fn((...conditions) => ({ type: 'or', conditions })),
+ sql: vi.fn((strings, ...values) => ({ type: 'sql', strings, values })),
}))
vi.doMock('@sim/db', () => {
@@ -228,6 +253,17 @@ describe('Scheduled Workflow Execution API Route', () => {
status: 'status',
nextRunAt: 'nextRunAt',
lastQueuedAt: 'lastQueuedAt',
+ deploymentVersionId: 'deploymentVersionId',
+ },
+ workflowDeploymentVersion: {
+ id: 'id',
+ workflowId: 'workflowId',
+ isActive: 'isActive',
+ },
+ workflow: {
+ id: 'id',
+ userId: 'userId',
+ workspaceId: 'workspaceId',
},
}
})
@@ -265,6 +301,7 @@ describe('Scheduled Workflow Execution API Route', () => {
not: vi.fn((condition) => ({ type: 'not', condition })),
isNull: vi.fn((field) => ({ type: 'isNull', field })),
or: vi.fn((...conditions) => ({ type: 'or', conditions })),
+ sql: vi.fn((strings, ...values) => ({ type: 'sql', strings, values })),
}))
vi.doMock('@sim/db', () => {
@@ -310,6 +347,17 @@ describe('Scheduled Workflow Execution API Route', () => {
status: 'status',
nextRunAt: 'nextRunAt',
lastQueuedAt: 'lastQueuedAt',
+ deploymentVersionId: 'deploymentVersionId',
+ },
+ workflowDeploymentVersion: {
+ id: 'id',
+ workflowId: 'workflowId',
+ isActive: 'isActive',
+ },
+ workflow: {
+ id: 'id',
+ userId: 'userId',
+ workspaceId: 'workspaceId',
},
}
})
diff --git a/apps/sim/app/api/schedules/execute/route.ts b/apps/sim/app/api/schedules/execute/route.ts
index cadad529f5..d401b085a3 100644
--- a/apps/sim/app/api/schedules/execute/route.ts
+++ b/apps/sim/app/api/schedules/execute/route.ts
@@ -1,7 +1,7 @@
-import { db, workflowSchedule } from '@sim/db'
+import { db, workflowDeploymentVersion, workflowSchedule } from '@sim/db'
import { createLogger } from '@sim/logger'
import { tasks } from '@trigger.dev/sdk'
-import { and, eq, isNull, lt, lte, not, or } from 'drizzle-orm'
+import { and, eq, isNull, lt, lte, not, or, sql } from 'drizzle-orm'
import { type NextRequest, NextResponse } from 'next/server'
import { verifyCronAuth } from '@/lib/auth/internal'
import { isTriggerDevEnabled } from '@/lib/core/config/feature-flags'
@@ -37,7 +37,8 @@ export async function GET(request: NextRequest) {
or(
isNull(workflowSchedule.lastQueuedAt),
lt(workflowSchedule.lastQueuedAt, workflowSchedule.nextRunAt)
- )
+ ),
+ sql`${workflowSchedule.deploymentVersionId} = (select ${workflowDeploymentVersion.id} from ${workflowDeploymentVersion} where ${workflowDeploymentVersion.workflowId} = ${workflowSchedule.workflowId} and ${workflowDeploymentVersion.isActive} = true)`
)
)
.returning({
diff --git a/apps/sim/app/api/schedules/route.test.ts b/apps/sim/app/api/schedules/route.test.ts
index 608a1eb068..a7df3c9529 100644
--- a/apps/sim/app/api/schedules/route.test.ts
+++ b/apps/sim/app/api/schedules/route.test.ts
@@ -29,12 +29,23 @@ vi.mock('@sim/db', () => ({
vi.mock('@sim/db/schema', () => ({
workflow: { id: 'id', userId: 'userId', workspaceId: 'workspaceId' },
- workflowSchedule: { workflowId: 'workflowId', blockId: 'blockId' },
+ workflowSchedule: {
+ workflowId: 'workflowId',
+ blockId: 'blockId',
+ deploymentVersionId: 'deploymentVersionId',
+ },
+ workflowDeploymentVersion: {
+ id: 'id',
+ workflowId: 'workflowId',
+ isActive: 'isActive',
+ },
}))
vi.mock('drizzle-orm', () => ({
eq: vi.fn(),
and: vi.fn(),
+ or: vi.fn(),
+ isNull: vi.fn(),
}))
vi.mock('@/lib/core/utils/request', () => ({
@@ -56,6 +67,11 @@ function mockDbChain(results: any[]) {
where: () => ({
limit: () => results[callIndex++] || [],
}),
+ leftJoin: () => ({
+ where: () => ({
+ limit: () => results[callIndex++] || [],
+ }),
+ }),
}),
}))
}
@@ -74,7 +90,16 @@ describe('Schedule GET API', () => {
it('returns schedule data for authorized user', async () => {
mockDbChain([
[{ userId: 'user-1', workspaceId: null }],
- [{ id: 'sched-1', cronExpression: '0 9 * * *', status: 'active', failedCount: 0 }],
+ [
+ {
+ schedule: {
+ id: 'sched-1',
+ cronExpression: '0 9 * * *',
+ status: 'active',
+ failedCount: 0,
+ },
+ },
+ ],
])
const res = await GET(createRequest('http://test/api/schedules?workflowId=wf-1'))
@@ -128,7 +153,7 @@ describe('Schedule GET API', () => {
it('allows workspace members to view', async () => {
mockDbChain([
[{ userId: 'other-user', workspaceId: 'ws-1' }],
- [{ id: 'sched-1', status: 'active', failedCount: 0 }],
+ [{ schedule: { id: 'sched-1', status: 'active', failedCount: 0 } }],
])
const res = await GET(createRequest('http://test/api/schedules?workflowId=wf-1'))
@@ -139,7 +164,7 @@ describe('Schedule GET API', () => {
it('indicates disabled schedule with failures', async () => {
mockDbChain([
[{ userId: 'user-1', workspaceId: null }],
- [{ id: 'sched-1', status: 'disabled', failedCount: 100 }],
+ [{ schedule: { id: 'sched-1', status: 'disabled', failedCount: 100 } }],
])
const res = await GET(createRequest('http://test/api/schedules?workflowId=wf-1'))
diff --git a/apps/sim/app/api/schedules/route.ts b/apps/sim/app/api/schedules/route.ts
index 3b6ba81864..50cf346065 100644
--- a/apps/sim/app/api/schedules/route.ts
+++ b/apps/sim/app/api/schedules/route.ts
@@ -1,7 +1,7 @@
import { db } from '@sim/db'
-import { workflow, workflowSchedule } from '@sim/db/schema'
+import { workflow, workflowDeploymentVersion, workflowSchedule } from '@sim/db/schema'
import { createLogger } from '@sim/logger'
-import { and, eq } from 'drizzle-orm'
+import { and, eq, isNull, or } from 'drizzle-orm'
import { type NextRequest, NextResponse } from 'next/server'
import { getSession } from '@/lib/auth'
import { generateRequestId } from '@/lib/core/utils/request'
@@ -62,9 +62,24 @@ export async function GET(req: NextRequest) {
}
const schedule = await db
- .select()
+ .select({ schedule: workflowSchedule })
.from(workflowSchedule)
- .where(conditions.length > 1 ? and(...conditions) : conditions[0])
+ .leftJoin(
+ workflowDeploymentVersion,
+ and(
+ eq(workflowDeploymentVersion.workflowId, workflowSchedule.workflowId),
+ eq(workflowDeploymentVersion.isActive, true)
+ )
+ )
+ .where(
+ and(
+ ...conditions,
+ or(
+ eq(workflowSchedule.deploymentVersionId, workflowDeploymentVersion.id),
+ and(isNull(workflowDeploymentVersion.id), isNull(workflowSchedule.deploymentVersionId))
+ )
+ )
+ )
.limit(1)
const headers = new Headers()
@@ -74,7 +89,7 @@ export async function GET(req: NextRequest) {
return NextResponse.json({ schedule: null }, { headers })
}
- const scheduleData = schedule[0]
+ const scheduleData = schedule[0].schedule
const isDisabled = scheduleData.status === 'disabled'
const hasFailures = scheduleData.failedCount > 0
diff --git a/apps/sim/app/api/tools/custom/route.test.ts b/apps/sim/app/api/tools/custom/route.test.ts
index da83f66153..1d990546c4 100644
--- a/apps/sim/app/api/tools/custom/route.test.ts
+++ b/apps/sim/app/api/tools/custom/route.test.ts
@@ -3,10 +3,9 @@
*
* @vitest-environment node
*/
-import { loggerMock } from '@sim/testing'
+import { createMockRequest, loggerMock } from '@sim/testing'
import { NextRequest } from 'next/server'
import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest'
-import { createMockRequest } from '@/app/api/__test-utils__/utils'
describe('Custom Tools API Routes', () => {
const sampleTools = [
@@ -364,7 +363,7 @@ describe('Custom Tools API Routes', () => {
})
it('should reject requests missing tool ID', async () => {
- const req = createMockRequest('DELETE')
+ const req = new NextRequest('http://localhost:3000/api/tools/custom')
const { DELETE } = await import('@/app/api/tools/custom/route')
diff --git a/apps/sim/app/api/tools/pulse/parse/route.ts b/apps/sim/app/api/tools/pulse/parse/route.ts
new file mode 100644
index 0000000000..7c2f340b1b
--- /dev/null
+++ b/apps/sim/app/api/tools/pulse/parse/route.ts
@@ -0,0 +1,169 @@
+import { createLogger } from '@sim/logger'
+import { type NextRequest, NextResponse } from 'next/server'
+import { z } from 'zod'
+import { checkHybridAuth } from '@/lib/auth/hybrid'
+import { generateRequestId } from '@/lib/core/utils/request'
+import { getBaseUrl } from '@/lib/core/utils/urls'
+import { StorageService } from '@/lib/uploads'
+import { extractStorageKey, inferContextFromKey } from '@/lib/uploads/utils/file-utils'
+import { verifyFileAccess } from '@/app/api/files/authorization'
+
+export const dynamic = 'force-dynamic'
+
+const logger = createLogger('PulseParseAPI')
+
+const PulseParseSchema = z.object({
+ apiKey: z.string().min(1, 'API key is required'),
+ filePath: z.string().min(1, 'File path is required'),
+ pages: z.string().optional(),
+ extractFigure: z.boolean().optional(),
+ figureDescription: z.boolean().optional(),
+ returnHtml: z.boolean().optional(),
+ chunking: z.string().optional(),
+ chunkSize: z.number().optional(),
+})
+
+export async function POST(request: NextRequest) {
+ const requestId = generateRequestId()
+
+ try {
+ const authResult = await checkHybridAuth(request, { requireWorkflowId: false })
+
+ if (!authResult.success || !authResult.userId) {
+ logger.warn(`[${requestId}] Unauthorized Pulse parse attempt`, {
+ error: authResult.error || 'Missing userId',
+ })
+ return NextResponse.json(
+ {
+ success: false,
+ error: authResult.error || 'Unauthorized',
+ },
+ { status: 401 }
+ )
+ }
+
+ const userId = authResult.userId
+ const body = await request.json()
+ const validatedData = PulseParseSchema.parse(body)
+
+ logger.info(`[${requestId}] Pulse parse request`, {
+ filePath: validatedData.filePath,
+ isWorkspaceFile: validatedData.filePath.includes('/api/files/serve/'),
+ userId,
+ })
+
+ let fileUrl = validatedData.filePath
+
+ if (validatedData.filePath?.includes('/api/files/serve/')) {
+ try {
+ const storageKey = extractStorageKey(validatedData.filePath)
+ const context = inferContextFromKey(storageKey)
+
+ const hasAccess = await verifyFileAccess(storageKey, userId, undefined, context, false)
+
+ if (!hasAccess) {
+ logger.warn(`[${requestId}] Unauthorized presigned URL generation attempt`, {
+ userId,
+ key: storageKey,
+ context,
+ })
+ return NextResponse.json(
+ {
+ success: false,
+ error: 'File not found',
+ },
+ { status: 404 }
+ )
+ }
+
+ fileUrl = await StorageService.generatePresignedDownloadUrl(storageKey, context, 5 * 60)
+ logger.info(`[${requestId}] Generated presigned URL for ${context} file`)
+ } catch (error) {
+ logger.error(`[${requestId}] Failed to generate presigned URL:`, error)
+ return NextResponse.json(
+ {
+ success: false,
+ error: 'Failed to generate file access URL',
+ },
+ { status: 500 }
+ )
+ }
+ } else if (validatedData.filePath?.startsWith('/')) {
+ const baseUrl = getBaseUrl()
+ fileUrl = `${baseUrl}${validatedData.filePath}`
+ }
+
+ const formData = new FormData()
+ formData.append('file_url', fileUrl)
+
+ if (validatedData.pages) {
+ formData.append('pages', validatedData.pages)
+ }
+ if (validatedData.extractFigure !== undefined) {
+ formData.append('extract_figure', String(validatedData.extractFigure))
+ }
+ if (validatedData.figureDescription !== undefined) {
+ formData.append('figure_description', String(validatedData.figureDescription))
+ }
+ if (validatedData.returnHtml !== undefined) {
+ formData.append('return_html', String(validatedData.returnHtml))
+ }
+ if (validatedData.chunking) {
+ formData.append('chunking', validatedData.chunking)
+ }
+ if (validatedData.chunkSize !== undefined) {
+ formData.append('chunk_size', String(validatedData.chunkSize))
+ }
+
+ const pulseResponse = await fetch('https://api.runpulse.com/extract', {
+ method: 'POST',
+ headers: {
+ 'x-api-key': validatedData.apiKey,
+ },
+ body: formData,
+ })
+
+ if (!pulseResponse.ok) {
+ const errorText = await pulseResponse.text()
+ logger.error(`[${requestId}] Pulse API error:`, errorText)
+ return NextResponse.json(
+ {
+ success: false,
+ error: `Pulse API error: ${pulseResponse.statusText}`,
+ },
+ { status: pulseResponse.status }
+ )
+ }
+
+ const pulseData = await pulseResponse.json()
+
+ logger.info(`[${requestId}] Pulse parse successful`)
+
+ return NextResponse.json({
+ success: true,
+ output: pulseData,
+ })
+ } catch (error) {
+ if (error instanceof z.ZodError) {
+ logger.warn(`[${requestId}] Invalid request data`, { errors: error.errors })
+ return NextResponse.json(
+ {
+ success: false,
+ error: 'Invalid request data',
+ details: error.errors,
+ },
+ { status: 400 }
+ )
+ }
+
+ logger.error(`[${requestId}] Error in Pulse parse:`, error)
+
+ return NextResponse.json(
+ {
+ success: false,
+ error: error instanceof Error ? error.message : 'Internal server error',
+ },
+ { status: 500 }
+ )
+ }
+}
diff --git a/apps/sim/app/api/tools/reducto/parse/route.ts b/apps/sim/app/api/tools/reducto/parse/route.ts
new file mode 100644
index 0000000000..fa96ac46b0
--- /dev/null
+++ b/apps/sim/app/api/tools/reducto/parse/route.ts
@@ -0,0 +1,167 @@
+import { createLogger } from '@sim/logger'
+import { type NextRequest, NextResponse } from 'next/server'
+import { z } from 'zod'
+import { checkHybridAuth } from '@/lib/auth/hybrid'
+import { generateRequestId } from '@/lib/core/utils/request'
+import { getBaseUrl } from '@/lib/core/utils/urls'
+import { StorageService } from '@/lib/uploads'
+import { extractStorageKey, inferContextFromKey } from '@/lib/uploads/utils/file-utils'
+import { verifyFileAccess } from '@/app/api/files/authorization'
+
+export const dynamic = 'force-dynamic'
+
+const logger = createLogger('ReductoParseAPI')
+
+const ReductoParseSchema = z.object({
+ apiKey: z.string().min(1, 'API key is required'),
+ filePath: z.string().min(1, 'File path is required'),
+ pages: z.array(z.number()).optional(),
+ tableOutputFormat: z.enum(['html', 'md']).optional(),
+})
+
+export async function POST(request: NextRequest) {
+ const requestId = generateRequestId()
+
+ try {
+ const authResult = await checkHybridAuth(request, { requireWorkflowId: false })
+
+ if (!authResult.success || !authResult.userId) {
+ logger.warn(`[${requestId}] Unauthorized Reducto parse attempt`, {
+ error: authResult.error || 'Missing userId',
+ })
+ return NextResponse.json(
+ {
+ success: false,
+ error: authResult.error || 'Unauthorized',
+ },
+ { status: 401 }
+ )
+ }
+
+ const userId = authResult.userId
+ const body = await request.json()
+ const validatedData = ReductoParseSchema.parse(body)
+
+ logger.info(`[${requestId}] Reducto parse request`, {
+ filePath: validatedData.filePath,
+ isWorkspaceFile: validatedData.filePath.includes('/api/files/serve/'),
+ userId,
+ })
+
+ let fileUrl = validatedData.filePath
+
+ if (validatedData.filePath?.includes('/api/files/serve/')) {
+ try {
+ const storageKey = extractStorageKey(validatedData.filePath)
+ const context = inferContextFromKey(storageKey)
+
+ const hasAccess = await verifyFileAccess(
+ storageKey,
+ userId,
+ undefined, // customConfig
+ context, // context
+ false // isLocal
+ )
+
+ if (!hasAccess) {
+ logger.warn(`[${requestId}] Unauthorized presigned URL generation attempt`, {
+ userId,
+ key: storageKey,
+ context,
+ })
+ return NextResponse.json(
+ {
+ success: false,
+ error: 'File not found',
+ },
+ { status: 404 }
+ )
+ }
+
+ fileUrl = await StorageService.generatePresignedDownloadUrl(storageKey, context, 5 * 60)
+ logger.info(`[${requestId}] Generated presigned URL for ${context} file`)
+ } catch (error) {
+ logger.error(`[${requestId}] Failed to generate presigned URL:`, error)
+ return NextResponse.json(
+ {
+ success: false,
+ error: 'Failed to generate file access URL',
+ },
+ { status: 500 }
+ )
+ }
+ } else if (validatedData.filePath?.startsWith('/')) {
+ const baseUrl = getBaseUrl()
+ fileUrl = `${baseUrl}${validatedData.filePath}`
+ }
+
+ const reductoBody: Record = {
+ input: fileUrl,
+ }
+
+ if (validatedData.pages && validatedData.pages.length > 0) {
+ reductoBody.settings = {
+ page_range: validatedData.pages,
+ }
+ }
+
+ if (validatedData.tableOutputFormat) {
+ reductoBody.formatting = {
+ table_output_format: validatedData.tableOutputFormat,
+ }
+ }
+
+ const reductoResponse = await fetch('https://platform.reducto.ai/parse', {
+ method: 'POST',
+ headers: {
+ 'Content-Type': 'application/json',
+ Accept: 'application/json',
+ Authorization: `Bearer ${validatedData.apiKey}`,
+ },
+ body: JSON.stringify(reductoBody),
+ })
+
+ if (!reductoResponse.ok) {
+ const errorText = await reductoResponse.text()
+ logger.error(`[${requestId}] Reducto API error:`, errorText)
+ return NextResponse.json(
+ {
+ success: false,
+ error: `Reducto API error: ${reductoResponse.statusText}`,
+ },
+ { status: reductoResponse.status }
+ )
+ }
+
+ const reductoData = await reductoResponse.json()
+
+ logger.info(`[${requestId}] Reducto parse successful`)
+
+ return NextResponse.json({
+ success: true,
+ output: reductoData,
+ })
+ } catch (error) {
+ if (error instanceof z.ZodError) {
+ logger.warn(`[${requestId}] Invalid request data`, { errors: error.errors })
+ return NextResponse.json(
+ {
+ success: false,
+ error: 'Invalid request data',
+ details: error.errors,
+ },
+ { status: 400 }
+ )
+ }
+
+ logger.error(`[${requestId}] Error in Reducto parse:`, error)
+
+ return NextResponse.json(
+ {
+ success: false,
+ error: error instanceof Error ? error.message : 'Internal server error',
+ },
+ { status: 500 }
+ )
+ }
+}
diff --git a/apps/sim/app/api/users/me/settings/route.ts b/apps/sim/app/api/users/me/settings/route.ts
index 6f6094558f..c8de2b0568 100644
--- a/apps/sim/app/api/users/me/settings/route.ts
+++ b/apps/sim/app/api/users/me/settings/route.ts
@@ -27,10 +27,11 @@ const SettingsSchema = z.object({
superUserModeEnabled: z.boolean().optional(),
errorNotificationsEnabled: z.boolean().optional(),
snapToGridSize: z.number().min(0).max(50).optional(),
+ showActionBar: z.boolean().optional(),
})
const defaultSettings = {
- theme: 'system',
+ theme: 'dark',
autoConnect: true,
telemetryEnabled: true,
emailPreferences: {},
@@ -39,6 +40,7 @@ const defaultSettings = {
superUserModeEnabled: false,
errorNotificationsEnabled: true,
snapToGridSize: 0,
+ showActionBar: true,
}
export async function GET() {
@@ -73,6 +75,7 @@ export async function GET() {
superUserModeEnabled: userSettings.superUserModeEnabled ?? true,
errorNotificationsEnabled: userSettings.errorNotificationsEnabled ?? true,
snapToGridSize: userSettings.snapToGridSize ?? 0,
+ showActionBar: userSettings.showActionBar ?? true,
},
},
{ status: 200 }
diff --git a/apps/sim/app/api/v1/admin/workflows/[id]/deploy/route.ts b/apps/sim/app/api/v1/admin/workflows/[id]/deploy/route.ts
index a868313c0f..4f9f517aeb 100644
--- a/apps/sim/app/api/v1/admin/workflows/[id]/deploy/route.ts
+++ b/apps/sim/app/api/v1/admin/workflows/[id]/deploy/route.ts
@@ -1,6 +1,8 @@
import { db, workflow } from '@sim/db'
import { createLogger } from '@sim/logger'
import { eq } from 'drizzle-orm'
+import { generateRequestId } from '@/lib/core/utils/request'
+import { cleanupWebhooksForWorkflow } from '@/lib/webhooks/deploy'
import {
deployWorkflow,
loadWorkflowFromNormalizedTables,
@@ -58,7 +60,17 @@ export const POST = withAdminAuthParams(async (request, context) =>
return internalErrorResponse(deployResult.error || 'Failed to deploy workflow')
}
- const scheduleResult = await createSchedulesForDeploy(workflowId, normalizedData.blocks, db)
+ if (!deployResult.deploymentVersionId) {
+ await undeployWorkflow({ workflowId })
+ return internalErrorResponse('Failed to resolve deployment version')
+ }
+
+ const scheduleResult = await createSchedulesForDeploy(
+ workflowId,
+ normalizedData.blocks,
+ db,
+ deployResult.deploymentVersionId
+ )
if (!scheduleResult.success) {
logger.warn(`Schedule creation failed for workflow ${workflowId}: ${scheduleResult.error}`)
}
@@ -80,10 +92,11 @@ export const POST = withAdminAuthParams(async (request, context) =>
export const DELETE = withAdminAuthParams(async (request, context) => {
const { id: workflowId } = await context.params
+ const requestId = generateRequestId()
try {
const [workflowRecord] = await db
- .select({ id: workflow.id })
+ .select()
.from(workflow)
.where(eq(workflow.id, workflowId))
.limit(1)
@@ -92,6 +105,13 @@ export const DELETE = withAdminAuthParams(async (request, context)
return notFoundResponse('Workflow')
}
+ // Clean up external webhook subscriptions before undeploying
+ await cleanupWebhooksForWorkflow(
+ workflowId,
+ workflowRecord as Record,
+ requestId
+ )
+
const result = await undeployWorkflow({ workflowId })
if (!result.success) {
return internalErrorResponse(result.error || 'Failed to undeploy workflow')
diff --git a/apps/sim/app/api/webhooks/[id]/route.ts b/apps/sim/app/api/webhooks/[id]/route.ts
index 0cd31402df..7f10feefb5 100644
--- a/apps/sim/app/api/webhooks/[id]/route.ts
+++ b/apps/sim/app/api/webhooks/[id]/route.ts
@@ -7,6 +7,11 @@ import { getSession } from '@/lib/auth'
import { validateInteger } from '@/lib/core/security/input-validation'
import { PlatformEvents } from '@/lib/core/telemetry'
import { generateRequestId } from '@/lib/core/utils/request'
+import {
+ cleanupExternalWebhook,
+ createExternalWebhookSubscription,
+ shouldRecreateExternalWebhookSubscription,
+} from '@/lib/webhooks/provider-subscriptions'
import { getUserEntityPermissions } from '@/lib/workspaces/permissions/utils'
const logger = createLogger('WebhookAPI')
@@ -177,6 +182,46 @@ export async function PATCH(request: NextRequest, { params }: { params: Promise<
return NextResponse.json({ error: 'Access denied' }, { status: 403 })
}
+ const existingProviderConfig =
+ (webhookData.webhook.providerConfig as Record) || {}
+ let nextProviderConfig =
+ providerConfig !== undefined &&
+ resolvedProviderConfig &&
+ typeof resolvedProviderConfig === 'object'
+ ? (resolvedProviderConfig as Record)
+ : existingProviderConfig
+ const nextProvider = (provider ?? webhookData.webhook.provider) as string
+
+ if (
+ providerConfig !== undefined &&
+ shouldRecreateExternalWebhookSubscription({
+ previousProvider: webhookData.webhook.provider as string,
+ nextProvider,
+ previousConfig: existingProviderConfig,
+ nextConfig: nextProviderConfig,
+ })
+ ) {
+ await cleanupExternalWebhook(
+ { ...webhookData.webhook, providerConfig: existingProviderConfig },
+ webhookData.workflow,
+ requestId
+ )
+
+ const result = await createExternalWebhookSubscription(
+ request,
+ {
+ ...webhookData.webhook,
+ provider: nextProvider,
+ providerConfig: nextProviderConfig,
+ },
+ webhookData.workflow,
+ session.user.id,
+ requestId
+ )
+
+ nextProviderConfig = result.updatedProviderConfig as Record
+ }
+
logger.debug(`[${requestId}] Updating webhook properties`, {
hasPathUpdate: path !== undefined,
hasProviderUpdate: provider !== undefined,
@@ -188,16 +233,16 @@ export async function PATCH(request: NextRequest, { params }: { params: Promise<
// Merge providerConfig to preserve credential-related fields
let finalProviderConfig = webhooks[0].webhook.providerConfig
if (providerConfig !== undefined) {
- const existingConfig = (webhooks[0].webhook.providerConfig as Record) || {}
+ const existingConfig = existingProviderConfig
finalProviderConfig = {
- ...resolvedProviderConfig,
+ ...nextProviderConfig,
credentialId: existingConfig.credentialId,
credentialSetId: existingConfig.credentialSetId,
userId: existingConfig.userId,
historyId: existingConfig.historyId,
lastCheckedTimestamp: existingConfig.lastCheckedTimestamp,
setupCompleted: existingConfig.setupCompleted,
- externalId: existingConfig.externalId,
+ externalId: nextProviderConfig.externalId ?? existingConfig.externalId,
}
}
diff --git a/apps/sim/app/api/webhooks/route.ts b/apps/sim/app/api/webhooks/route.ts
index 4e980646b9..da1412acf5 100644
--- a/apps/sim/app/api/webhooks/route.ts
+++ b/apps/sim/app/api/webhooks/route.ts
@@ -1,15 +1,14 @@
import { db } from '@sim/db'
-import { webhook, workflow } from '@sim/db/schema'
+import { webhook, workflow, workflowDeploymentVersion } from '@sim/db/schema'
import { createLogger } from '@sim/logger'
-import { and, desc, eq } from 'drizzle-orm'
+import { and, desc, eq, isNull, or } from 'drizzle-orm'
import { nanoid } from 'nanoid'
import { type NextRequest, NextResponse } from 'next/server'
import { getSession } from '@/lib/auth'
import { PlatformEvents } from '@/lib/core/telemetry'
import { generateRequestId } from '@/lib/core/utils/request'
-import { getBaseUrl } from '@/lib/core/utils/urls'
+import { createExternalWebhookSubscription } from '@/lib/webhooks/provider-subscriptions'
import { getUserEntityPermissions } from '@/lib/workspaces/permissions/utils'
-import { getOAuthToken } from '@/app/api/auth/oauth/utils'
const logger = createLogger('WebhooksAPI')
@@ -72,7 +71,23 @@ export async function GET(request: NextRequest) {
})
.from(webhook)
.innerJoin(workflow, eq(webhook.workflowId, workflow.id))
- .where(and(eq(webhook.workflowId, workflowId), eq(webhook.blockId, blockId)))
+ .leftJoin(
+ workflowDeploymentVersion,
+ and(
+ eq(workflowDeploymentVersion.workflowId, workflow.id),
+ eq(workflowDeploymentVersion.isActive, true)
+ )
+ )
+ .where(
+ and(
+ eq(webhook.workflowId, workflowId),
+ eq(webhook.blockId, blockId),
+ or(
+ eq(webhook.deploymentVersionId, workflowDeploymentVersion.id),
+ and(isNull(workflowDeploymentVersion.id), isNull(webhook.deploymentVersionId))
+ )
+ )
+ )
.orderBy(desc(webhook.updatedAt))
logger.info(
@@ -150,7 +165,23 @@ export async function POST(request: NextRequest) {
const existingForBlock = await db
.select({ id: webhook.id, path: webhook.path })
.from(webhook)
- .where(and(eq(webhook.workflowId, workflowId), eq(webhook.blockId, blockId)))
+ .leftJoin(
+ workflowDeploymentVersion,
+ and(
+ eq(workflowDeploymentVersion.workflowId, workflowId),
+ eq(workflowDeploymentVersion.isActive, true)
+ )
+ )
+ .where(
+ and(
+ eq(webhook.workflowId, workflowId),
+ eq(webhook.blockId, blockId),
+ or(
+ eq(webhook.deploymentVersionId, workflowDeploymentVersion.id),
+ and(isNull(workflowDeploymentVersion.id), isNull(webhook.deploymentVersionId))
+ )
+ )
+ )
.limit(1)
if (existingForBlock.length > 0) {
@@ -226,7 +257,23 @@ export async function POST(request: NextRequest) {
const existingForBlock = await db
.select({ id: webhook.id })
.from(webhook)
- .where(and(eq(webhook.workflowId, workflowId), eq(webhook.blockId, blockId)))
+ .leftJoin(
+ workflowDeploymentVersion,
+ and(
+ eq(workflowDeploymentVersion.workflowId, workflowId),
+ eq(workflowDeploymentVersion.isActive, true)
+ )
+ )
+ .where(
+ and(
+ eq(webhook.workflowId, workflowId),
+ eq(webhook.blockId, blockId),
+ or(
+ eq(webhook.deploymentVersionId, workflowDeploymentVersion.id),
+ and(isNull(workflowDeploymentVersion.id), isNull(webhook.deploymentVersionId))
+ )
+ )
+ )
.limit(1)
if (existingForBlock.length > 0) {
targetWebhookId = existingForBlock[0].id
@@ -257,7 +304,7 @@ export async function POST(request: NextRequest) {
const finalProviderConfig = providerConfig || {}
const { resolveEnvVarsInObject } = await import('@/lib/webhooks/env-resolver')
- const resolvedProviderConfig = await resolveEnvVarsInObject(
+ let resolvedProviderConfig = await resolveEnvVarsInObject(
finalProviderConfig,
userId,
workflowRecord.workspaceId || undefined
@@ -414,149 +461,33 @@ export async function POST(request: NextRequest) {
}
// --- End Credential Set Handling ---
- // Create external subscriptions before saving to DB to prevent orphaned records
- let externalSubscriptionId: string | undefined
let externalSubscriptionCreated = false
-
- const createTempWebhookData = () => ({
+ const createTempWebhookData = (providerConfigOverride = resolvedProviderConfig) => ({
id: targetWebhookId || nanoid(),
path: finalPath,
- providerConfig: resolvedProviderConfig,
+ provider,
+ providerConfig: providerConfigOverride,
})
- if (provider === 'airtable') {
- logger.info(`[${requestId}] Creating Airtable subscription before saving to database`)
- try {
- externalSubscriptionId = await createAirtableWebhookSubscription(
- request,
- userId,
- createTempWebhookData(),
- requestId
- )
- if (externalSubscriptionId) {
- resolvedProviderConfig.externalId = externalSubscriptionId
- externalSubscriptionCreated = true
- }
- } catch (err) {
- logger.error(`[${requestId}] Error creating Airtable webhook subscription`, err)
- return NextResponse.json(
- {
- error: 'Failed to create webhook in Airtable',
- details: err instanceof Error ? err.message : 'Unknown error',
- },
- { status: 500 }
- )
- }
- }
-
- if (provider === 'calendly') {
- logger.info(`[${requestId}] Creating Calendly subscription before saving to database`)
- try {
- externalSubscriptionId = await createCalendlyWebhookSubscription(
- request,
- userId,
- createTempWebhookData(),
- requestId
- )
- if (externalSubscriptionId) {
- resolvedProviderConfig.externalId = externalSubscriptionId
- externalSubscriptionCreated = true
- }
- } catch (err) {
- logger.error(`[${requestId}] Error creating Calendly webhook subscription`, err)
- return NextResponse.json(
- {
- error: 'Failed to create webhook in Calendly',
- details: err instanceof Error ? err.message : 'Unknown error',
- },
- { status: 500 }
- )
- }
- }
-
- if (provider === 'microsoft-teams') {
- const { createTeamsSubscription } = await import('@/lib/webhooks/provider-subscriptions')
- logger.info(`[${requestId}] Creating Teams subscription before saving to database`)
- try {
- await createTeamsSubscription(request, createTempWebhookData(), workflowRecord, requestId)
- externalSubscriptionCreated = true
- } catch (err) {
- logger.error(`[${requestId}] Error creating Teams subscription`, err)
- return NextResponse.json(
- {
- error: 'Failed to create Teams subscription',
- details: err instanceof Error ? err.message : 'Unknown error',
- },
- { status: 500 }
- )
- }
- }
-
- if (provider === 'telegram') {
- const { createTelegramWebhook } = await import('@/lib/webhooks/provider-subscriptions')
- logger.info(`[${requestId}] Creating Telegram webhook before saving to database`)
- try {
- await createTelegramWebhook(request, createTempWebhookData(), requestId)
- externalSubscriptionCreated = true
- } catch (err) {
- logger.error(`[${requestId}] Error creating Telegram webhook`, err)
- return NextResponse.json(
- {
- error: 'Failed to create Telegram webhook',
- details: err instanceof Error ? err.message : 'Unknown error',
- },
- { status: 500 }
- )
- }
- }
-
- if (provider === 'webflow') {
- logger.info(`[${requestId}] Creating Webflow subscription before saving to database`)
- try {
- externalSubscriptionId = await createWebflowWebhookSubscription(
- request,
- userId,
- createTempWebhookData(),
- requestId
- )
- if (externalSubscriptionId) {
- resolvedProviderConfig.externalId = externalSubscriptionId
- externalSubscriptionCreated = true
- }
- } catch (err) {
- logger.error(`[${requestId}] Error creating Webflow webhook subscription`, err)
- return NextResponse.json(
- {
- error: 'Failed to create webhook in Webflow',
- details: err instanceof Error ? err.message : 'Unknown error',
- },
- { status: 500 }
- )
- }
- }
-
- if (provider === 'typeform') {
- const { createTypeformWebhook } = await import('@/lib/webhooks/provider-subscriptions')
- logger.info(`[${requestId}] Creating Typeform webhook before saving to database`)
- try {
- const usedTag = await createTypeformWebhook(request, createTempWebhookData(), requestId)
-
- if (!resolvedProviderConfig.webhookTag) {
- resolvedProviderConfig.webhookTag = usedTag
- logger.info(`[${requestId}] Stored auto-generated webhook tag: ${usedTag}`)
- }
-
- externalSubscriptionCreated = true
- } catch (err) {
- logger.error(`[${requestId}] Error creating Typeform webhook`, err)
- return NextResponse.json(
- {
- error: 'Failed to create webhook in Typeform',
- details: err instanceof Error ? err.message : 'Unknown error',
- },
- { status: 500 }
- )
- }
+ try {
+ const result = await createExternalWebhookSubscription(
+ request,
+ createTempWebhookData(),
+ workflowRecord,
+ userId,
+ requestId
+ )
+ resolvedProviderConfig = result.updatedProviderConfig as Record
+ externalSubscriptionCreated = result.externalSubscriptionCreated
+ } catch (err) {
+ logger.error(`[${requestId}] Error creating external webhook subscription`, err)
+ return NextResponse.json(
+ {
+ error: 'Failed to create external webhook subscription',
+ details: err instanceof Error ? err.message : 'Unknown error',
+ },
+ { status: 500 }
+ )
}
// Now save to database (only if subscription succeeded or provider doesn't need external subscription)
@@ -617,7 +548,11 @@ export async function POST(request: NextRequest) {
logger.error(`[${requestId}] DB save failed, cleaning up external subscription`, dbError)
try {
const { cleanupExternalWebhook } = await import('@/lib/webhooks/provider-subscriptions')
- await cleanupExternalWebhook(createTempWebhookData(), workflowRecord, requestId)
+ await cleanupExternalWebhook(
+ createTempWebhookData(resolvedProviderConfig),
+ workflowRecord,
+ requestId
+ )
} catch (cleanupError) {
logger.error(
`[${requestId}] Failed to cleanup external subscription after DB save failure`,
@@ -741,110 +676,6 @@ export async function POST(request: NextRequest) {
}
// --- End RSS specific logic ---
- if (savedWebhook && provider === 'grain') {
- logger.info(`[${requestId}] Grain provider detected. Creating Grain webhook subscription.`)
- try {
- const grainResult = await createGrainWebhookSubscription(
- request,
- {
- id: savedWebhook.id,
- path: savedWebhook.path,
- providerConfig: savedWebhook.providerConfig,
- },
- requestId
- )
-
- if (grainResult) {
- // Update the webhook record with the external Grain hook ID and event types for filtering
- const updatedConfig = {
- ...(savedWebhook.providerConfig as Record),
- externalId: grainResult.id,
- eventTypes: grainResult.eventTypes,
- }
- await db
- .update(webhook)
- .set({
- providerConfig: updatedConfig,
- updatedAt: new Date(),
- })
- .where(eq(webhook.id, savedWebhook.id))
-
- savedWebhook.providerConfig = updatedConfig
- logger.info(`[${requestId}] Successfully created Grain webhook`, {
- grainHookId: grainResult.id,
- eventTypes: grainResult.eventTypes,
- webhookId: savedWebhook.id,
- })
- }
- } catch (err) {
- logger.error(
- `[${requestId}] Error creating Grain webhook subscription, rolling back webhook`,
- err
- )
- await db.delete(webhook).where(eq(webhook.id, savedWebhook.id))
- return NextResponse.json(
- {
- error: 'Failed to create webhook in Grain',
- details: err instanceof Error ? err.message : 'Unknown error',
- },
- { status: 500 }
- )
- }
- }
- // --- End Grain specific logic ---
-
- // --- Lemlist specific logic ---
- if (savedWebhook && provider === 'lemlist') {
- logger.info(
- `[${requestId}] Lemlist provider detected. Creating Lemlist webhook subscription.`
- )
- try {
- const lemlistResult = await createLemlistWebhookSubscription(
- {
- id: savedWebhook.id,
- path: savedWebhook.path,
- providerConfig: savedWebhook.providerConfig,
- },
- requestId
- )
-
- if (lemlistResult) {
- // Update the webhook record with the external Lemlist hook ID
- const updatedConfig = {
- ...(savedWebhook.providerConfig as Record),
- externalId: lemlistResult.id,
- }
- await db
- .update(webhook)
- .set({
- providerConfig: updatedConfig,
- updatedAt: new Date(),
- })
- .where(eq(webhook.id, savedWebhook.id))
-
- savedWebhook.providerConfig = updatedConfig
- logger.info(`[${requestId}] Successfully created Lemlist webhook`, {
- lemlistHookId: lemlistResult.id,
- webhookId: savedWebhook.id,
- })
- }
- } catch (err) {
- logger.error(
- `[${requestId}] Error creating Lemlist webhook subscription, rolling back webhook`,
- err
- )
- await db.delete(webhook).where(eq(webhook.id, savedWebhook.id))
- return NextResponse.json(
- {
- error: 'Failed to create webhook in Lemlist',
- details: err instanceof Error ? err.message : 'Unknown error',
- },
- { status: 500 }
- )
- }
- }
- // --- End Lemlist specific logic ---
-
if (!targetWebhookId && savedWebhook) {
try {
PlatformEvents.webhookCreated({
@@ -868,616 +699,3 @@ export async function POST(request: NextRequest) {
return NextResponse.json({ error: 'Internal server error' }, { status: 500 })
}
}
-
-// Helper function to create the webhook subscription in Airtable
-async function createAirtableWebhookSubscription(
- request: NextRequest,
- userId: string,
- webhookData: any,
- requestId: string
-): Promise {
- try {
- const { path, providerConfig } = webhookData
- const { baseId, tableId, includeCellValuesInFieldIds } = providerConfig || {}
-
- if (!baseId || !tableId) {
- logger.warn(`[${requestId}] Missing baseId or tableId for Airtable webhook creation.`, {
- webhookId: webhookData.id,
- })
- throw new Error(
- 'Base ID and Table ID are required to create Airtable webhook. Please provide valid Airtable base and table IDs.'
- )
- }
-
- const accessToken = await getOAuthToken(userId, 'airtable')
- if (!accessToken) {
- logger.warn(
- `[${requestId}] Could not retrieve Airtable access token for user ${userId}. Cannot create webhook in Airtable.`
- )
- throw new Error(
- 'Airtable account connection required. Please connect your Airtable account in the trigger configuration and try again.'
- )
- }
-
- const notificationUrl = `${getBaseUrl()}/api/webhooks/trigger/${path}`
-
- const airtableApiUrl = `https://api.airtable.com/v0/bases/${baseId}/webhooks`
-
- const specification: any = {
- options: {
- filters: {
- dataTypes: ['tableData'], // Watch table data changes
- recordChangeScope: tableId, // Watch only the specified table
- },
- },
- }
-
- // Conditionally add the 'includes' field based on the config
- if (includeCellValuesInFieldIds === 'all') {
- specification.options.includes = {
- includeCellValuesInFieldIds: 'all',
- }
- }
-
- const requestBody: any = {
- notificationUrl: notificationUrl,
- specification: specification,
- }
-
- const airtableResponse = await fetch(airtableApiUrl, {
- method: 'POST',
- headers: {
- Authorization: `Bearer ${accessToken}`,
- 'Content-Type': 'application/json',
- },
- body: JSON.stringify(requestBody),
- })
-
- // Airtable often returns 200 OK even for errors in the body, check payload
- const responseBody = await airtableResponse.json()
-
- if (!airtableResponse.ok || responseBody.error) {
- const errorMessage =
- responseBody.error?.message || responseBody.error || 'Unknown Airtable API error'
- const errorType = responseBody.error?.type
- logger.error(
- `[${requestId}] Failed to create webhook in Airtable for webhook ${webhookData.id}. Status: ${airtableResponse.status}`,
- { type: errorType, message: errorMessage, response: responseBody }
- )
-
- let userFriendlyMessage = 'Failed to create webhook subscription in Airtable'
- if (airtableResponse.status === 404) {
- userFriendlyMessage =
- 'Airtable base or table not found. Please verify that the Base ID and Table ID are correct and that you have access to them.'
- } else if (errorMessage && errorMessage !== 'Unknown Airtable API error') {
- userFriendlyMessage = `Airtable error: ${errorMessage}`
- }
-
- throw new Error(userFriendlyMessage)
- }
- logger.info(
- `[${requestId}] Successfully created webhook in Airtable for webhook ${webhookData.id}.`,
- {
- airtableWebhookId: responseBody.id,
- }
- )
- return responseBody.id
- } catch (error: any) {
- logger.error(
- `[${requestId}] Exception during Airtable webhook creation for webhook ${webhookData.id}.`,
- {
- message: error.message,
- stack: error.stack,
- }
- )
- // Re-throw the error so it can be caught by the outer try-catch
- throw error
- }
-}
-
-// Helper function to create the webhook subscription in Calendly
-async function createCalendlyWebhookSubscription(
- request: NextRequest,
- userId: string,
- webhookData: any,
- requestId: string
-): Promise {
- try {
- const { path, providerConfig } = webhookData
- const { apiKey, organization, triggerId } = providerConfig || {}
-
- if (!apiKey) {
- logger.warn(`[${requestId}] Missing apiKey for Calendly webhook creation.`, {
- webhookId: webhookData.id,
- })
- throw new Error(
- 'Personal Access Token is required to create Calendly webhook. Please provide your Calendly Personal Access Token.'
- )
- }
-
- if (!organization) {
- logger.warn(`[${requestId}] Missing organization URI for Calendly webhook creation.`, {
- webhookId: webhookData.id,
- })
- throw new Error(
- 'Organization URI is required to create Calendly webhook. Please provide your Organization URI from the "Get Current User" operation.'
- )
- }
-
- if (!triggerId) {
- logger.warn(`[${requestId}] Missing triggerId for Calendly webhook creation.`, {
- webhookId: webhookData.id,
- })
- throw new Error('Trigger ID is required to create Calendly webhook')
- }
-
- const notificationUrl = `${getBaseUrl()}/api/webhooks/trigger/${path}`
-
- // Map trigger IDs to Calendly event types
- const eventTypeMap: Record = {
- calendly_invitee_created: ['invitee.created'],
- calendly_invitee_canceled: ['invitee.canceled'],
- calendly_routing_form_submitted: ['routing_form_submission.created'],
- calendly_webhook: ['invitee.created', 'invitee.canceled', 'routing_form_submission.created'],
- }
-
- const events = eventTypeMap[triggerId] || ['invitee.created']
-
- const calendlyApiUrl = 'https://api.calendly.com/webhook_subscriptions'
-
- const requestBody = {
- url: notificationUrl,
- events,
- organization,
- scope: 'organization',
- }
-
- const calendlyResponse = await fetch(calendlyApiUrl, {
- method: 'POST',
- headers: {
- Authorization: `Bearer ${apiKey}`,
- 'Content-Type': 'application/json',
- },
- body: JSON.stringify(requestBody),
- })
-
- if (!calendlyResponse.ok) {
- const errorBody = await calendlyResponse.json().catch(() => ({}))
- const errorMessage = errorBody.message || errorBody.title || 'Unknown Calendly API error'
- logger.error(
- `[${requestId}] Failed to create webhook in Calendly for webhook ${webhookData.id}. Status: ${calendlyResponse.status}`,
- { response: errorBody }
- )
-
- let userFriendlyMessage = 'Failed to create webhook subscription in Calendly'
- if (calendlyResponse.status === 401) {
- userFriendlyMessage =
- 'Calendly authentication failed. Please verify your Personal Access Token is correct.'
- } else if (calendlyResponse.status === 403) {
- userFriendlyMessage =
- 'Calendly access denied. Please ensure you have appropriate permissions and a paid Calendly subscription.'
- } else if (calendlyResponse.status === 404) {
- userFriendlyMessage =
- 'Calendly organization not found. Please verify the Organization URI is correct.'
- } else if (errorMessage && errorMessage !== 'Unknown Calendly API error') {
- userFriendlyMessage = `Calendly error: ${errorMessage}`
- }
-
- throw new Error(userFriendlyMessage)
- }
-
- const responseBody = await calendlyResponse.json()
- const webhookUri = responseBody.resource?.uri
-
- if (!webhookUri) {
- logger.error(
- `[${requestId}] Calendly webhook created but no webhook URI returned for webhook ${webhookData.id}`,
- { response: responseBody }
- )
- throw new Error('Calendly webhook creation succeeded but no webhook URI was returned')
- }
-
- // Extract the webhook ID from the URI (e.g., https://api.calendly.com/webhook_subscriptions/WEBHOOK_ID)
- const webhookId = webhookUri.split('/').pop()
-
- if (!webhookId) {
- logger.error(`[${requestId}] Could not extract webhook ID from Calendly URI: ${webhookUri}`, {
- response: responseBody,
- })
- throw new Error('Failed to extract webhook ID from Calendly response')
- }
-
- logger.info(
- `[${requestId}] Successfully created webhook in Calendly for webhook ${webhookData.id}.`,
- {
- calendlyWebhookUri: webhookUri,
- calendlyWebhookId: webhookId,
- }
- )
- return webhookId
- } catch (error: any) {
- logger.error(
- `[${requestId}] Exception during Calendly webhook creation for webhook ${webhookData.id}.`,
- {
- message: error.message,
- stack: error.stack,
- }
- )
- // Re-throw the error so it can be caught by the outer try-catch
- throw error
- }
-}
-
-// Helper function to create the webhook subscription in Webflow
-async function createWebflowWebhookSubscription(
- request: NextRequest,
- userId: string,
- webhookData: any,
- requestId: string
-): Promise {
- try {
- const { path, providerConfig } = webhookData
- const { siteId, triggerId, collectionId, formId } = providerConfig || {}
-
- if (!siteId) {
- logger.warn(`[${requestId}] Missing siteId for Webflow webhook creation.`, {
- webhookId: webhookData.id,
- })
- throw new Error('Site ID is required to create Webflow webhook')
- }
-
- if (!triggerId) {
- logger.warn(`[${requestId}] Missing triggerId for Webflow webhook creation.`, {
- webhookId: webhookData.id,
- })
- throw new Error('Trigger type is required to create Webflow webhook')
- }
-
- const accessToken = await getOAuthToken(userId, 'webflow')
- if (!accessToken) {
- logger.warn(
- `[${requestId}] Could not retrieve Webflow access token for user ${userId}. Cannot create webhook in Webflow.`
- )
- throw new Error(
- 'Webflow account connection required. Please connect your Webflow account in the trigger configuration and try again.'
- )
- }
-
- const notificationUrl = `${getBaseUrl()}/api/webhooks/trigger/${path}`
-
- // Map trigger IDs to Webflow trigger types
- const triggerTypeMap: Record = {
- webflow_collection_item_created: 'collection_item_created',
- webflow_collection_item_changed: 'collection_item_changed',
- webflow_collection_item_deleted: 'collection_item_deleted',
- webflow_form_submission: 'form_submission',
- }
-
- const webflowTriggerType = triggerTypeMap[triggerId]
- if (!webflowTriggerType) {
- logger.warn(`[${requestId}] Invalid triggerId for Webflow: ${triggerId}`, {
- webhookId: webhookData.id,
- })
- throw new Error(`Invalid Webflow trigger type: ${triggerId}`)
- }
-
- const webflowApiUrl = `https://api.webflow.com/v2/sites/${siteId}/webhooks`
-
- const requestBody: any = {
- triggerType: webflowTriggerType,
- url: notificationUrl,
- }
-
- // Add filter for collection-based triggers
- if (collectionId && webflowTriggerType.startsWith('collection_item_')) {
- requestBody.filter = {
- resource_type: 'collection',
- resource_id: collectionId,
- }
- }
-
- // Add filter for form submissions
- if (formId && webflowTriggerType === 'form_submission') {
- requestBody.filter = {
- resource_type: 'form',
- resource_id: formId,
- }
- }
-
- const webflowResponse = await fetch(webflowApiUrl, {
- method: 'POST',
- headers: {
- Authorization: `Bearer ${accessToken}`,
- 'Content-Type': 'application/json',
- accept: 'application/json',
- },
- body: JSON.stringify(requestBody),
- })
-
- const responseBody = await webflowResponse.json()
-
- if (!webflowResponse.ok || responseBody.error) {
- const errorMessage = responseBody.message || responseBody.error || 'Unknown Webflow API error'
- logger.error(
- `[${requestId}] Failed to create webhook in Webflow for webhook ${webhookData.id}. Status: ${webflowResponse.status}`,
- { message: errorMessage, response: responseBody }
- )
- throw new Error(errorMessage)
- }
-
- logger.info(
- `[${requestId}] Successfully created webhook in Webflow for webhook ${webhookData.id}.`,
- {
- webflowWebhookId: responseBody.id || responseBody._id,
- }
- )
-
- return responseBody.id || responseBody._id
- } catch (error: any) {
- logger.error(
- `[${requestId}] Exception during Webflow webhook creation for webhook ${webhookData.id}.`,
- {
- message: error.message,
- stack: error.stack,
- }
- )
- throw error
- }
-}
-
-// Helper function to create the webhook subscription in Grain
-async function createGrainWebhookSubscription(
- request: NextRequest,
- webhookData: any,
- requestId: string
-): Promise<{ id: string; eventTypes: string[] } | undefined> {
- try {
- const { path, providerConfig } = webhookData
- const { apiKey, triggerId, includeHighlights, includeParticipants, includeAiSummary } =
- providerConfig || {}
-
- if (!apiKey) {
- logger.warn(`[${requestId}] Missing apiKey for Grain webhook creation.`, {
- webhookId: webhookData.id,
- })
- throw new Error(
- 'Grain API Key is required. Please provide your Grain Personal Access Token in the trigger configuration.'
- )
- }
-
- // Map trigger IDs to Grain API hook_type (only 2 options: recording_added, upload_status)
- const hookTypeMap: Record = {
- grain_webhook: 'recording_added',
- grain_recording_created: 'recording_added',
- grain_recording_updated: 'recording_added',
- grain_highlight_created: 'recording_added',
- grain_highlight_updated: 'recording_added',
- grain_story_created: 'recording_added',
- grain_upload_status: 'upload_status',
- }
-
- const eventTypeMap: Record = {
- grain_webhook: [],
- grain_recording_created: ['recording_added'],
- grain_recording_updated: ['recording_updated'],
- grain_highlight_created: ['highlight_created'],
- grain_highlight_updated: ['highlight_updated'],
- grain_story_created: ['story_created'],
- grain_upload_status: ['upload_status'],
- }
-
- const hookType = hookTypeMap[triggerId] ?? 'recording_added'
- const eventTypes = eventTypeMap[triggerId] ?? []
-
- if (!hookTypeMap[triggerId]) {
- logger.warn(
- `[${requestId}] Unknown triggerId for Grain: ${triggerId}, defaulting to recording_added`,
- {
- webhookId: webhookData.id,
- }
- )
- }
-
- logger.info(`[${requestId}] Creating Grain webhook`, {
- triggerId,
- hookType,
- eventTypes,
- webhookId: webhookData.id,
- })
-
- const notificationUrl = `${getBaseUrl()}/api/webhooks/trigger/${path}`
-
- const grainApiUrl = 'https://api.grain.com/_/public-api/v2/hooks/create'
-
- const requestBody: Record = {
- hook_url: notificationUrl,
- hook_type: hookType,
- }
-
- // Build include object based on configuration
- const include: Record = {}
- if (includeHighlights) {
- include.highlights = true
- }
- if (includeParticipants) {
- include.participants = true
- }
- if (includeAiSummary) {
- include.ai_summary = true
- }
- if (Object.keys(include).length > 0) {
- requestBody.include = include
- }
-
- const grainResponse = await fetch(grainApiUrl, {
- method: 'POST',
- headers: {
- Authorization: `Bearer ${apiKey}`,
- 'Content-Type': 'application/json',
- 'Public-Api-Version': '2025-10-31',
- },
- body: JSON.stringify(requestBody),
- })
-
- const responseBody = await grainResponse.json()
-
- if (!grainResponse.ok || responseBody.error || responseBody.errors) {
- logger.warn('[App] Grain response body:', responseBody)
- const errorMessage =
- responseBody.errors?.detail ||
- responseBody.error?.message ||
- responseBody.error ||
- responseBody.message ||
- 'Unknown Grain API error'
- logger.error(
- `[${requestId}] Failed to create webhook in Grain for webhook ${webhookData.id}. Status: ${grainResponse.status}`,
- { message: errorMessage, response: responseBody }
- )
-
- let userFriendlyMessage = 'Failed to create webhook subscription in Grain'
- if (grainResponse.status === 401) {
- userFriendlyMessage =
- 'Invalid Grain API Key. Please verify your Personal Access Token is correct.'
- } else if (grainResponse.status === 403) {
- userFriendlyMessage =
- 'Access denied. Please ensure your Grain API Key has appropriate permissions.'
- } else if (errorMessage && errorMessage !== 'Unknown Grain API error') {
- userFriendlyMessage = `Grain error: ${errorMessage}`
- }
-
- throw new Error(userFriendlyMessage)
- }
-
- logger.info(
- `[${requestId}] Successfully created webhook in Grain for webhook ${webhookData.id}.`,
- {
- grainWebhookId: responseBody.id,
- eventTypes,
- }
- )
-
- return { id: responseBody.id, eventTypes }
- } catch (error: any) {
- logger.error(
- `[${requestId}] Exception during Grain webhook creation for webhook ${webhookData.id}.`,
- {
- message: error.message,
- stack: error.stack,
- }
- )
- throw error
- }
-}
-
-// Helper function to create the webhook subscription in Lemlist
-async function createLemlistWebhookSubscription(
- webhookData: any,
- requestId: string
-): Promise<{ id: string } | undefined> {
- try {
- const { path, providerConfig } = webhookData
- const { apiKey, triggerId, campaignId } = providerConfig || {}
-
- if (!apiKey) {
- logger.warn(`[${requestId}] Missing apiKey for Lemlist webhook creation.`, {
- webhookId: webhookData.id,
- })
- throw new Error(
- 'Lemlist API Key is required. Please provide your Lemlist API Key in the trigger configuration.'
- )
- }
-
- // Map trigger IDs to Lemlist event types
- const eventTypeMap: Record = {
- lemlist_email_replied: 'emailsReplied',
- lemlist_linkedin_replied: 'linkedinReplied',
- lemlist_interested: 'interested',
- lemlist_not_interested: 'notInterested',
- lemlist_email_opened: 'emailsOpened',
- lemlist_email_clicked: 'emailsClicked',
- lemlist_email_bounced: 'emailsBounced',
- lemlist_email_sent: 'emailsSent',
- lemlist_webhook: undefined, // Generic webhook - no type filter
- }
-
- const eventType = eventTypeMap[triggerId]
-
- logger.info(`[${requestId}] Creating Lemlist webhook`, {
- triggerId,
- eventType,
- hasCampaignId: !!campaignId,
- webhookId: webhookData.id,
- })
-
- const notificationUrl = `${getBaseUrl()}/api/webhooks/trigger/${path}`
-
- const lemlistApiUrl = 'https://api.lemlist.com/api/hooks'
-
- // Build request body
- const requestBody: Record = {
- targetUrl: notificationUrl,
- }
-
- // Add event type if specified (omit for generic webhook to receive all events)
- if (eventType) {
- requestBody.type = eventType
- }
-
- // Add campaign filter if specified
- if (campaignId) {
- requestBody.campaignId = campaignId
- }
-
- // Lemlist uses Basic Auth with empty username and API key as password
- const authString = Buffer.from(`:${apiKey}`).toString('base64')
-
- const lemlistResponse = await fetch(lemlistApiUrl, {
- method: 'POST',
- headers: {
- Authorization: `Basic ${authString}`,
- 'Content-Type': 'application/json',
- },
- body: JSON.stringify(requestBody),
- })
-
- const responseBody = await lemlistResponse.json()
-
- if (!lemlistResponse.ok || responseBody.error) {
- const errorMessage = responseBody.message || responseBody.error || 'Unknown Lemlist API error'
- logger.error(
- `[${requestId}] Failed to create webhook in Lemlist for webhook ${webhookData.id}. Status: ${lemlistResponse.status}`,
- { message: errorMessage, response: responseBody }
- )
-
- let userFriendlyMessage = 'Failed to create webhook subscription in Lemlist'
- if (lemlistResponse.status === 401) {
- userFriendlyMessage = 'Invalid Lemlist API Key. Please verify your API Key is correct.'
- } else if (lemlistResponse.status === 403) {
- userFriendlyMessage =
- 'Access denied. Please ensure your Lemlist API Key has appropriate permissions.'
- } else if (errorMessage && errorMessage !== 'Unknown Lemlist API error') {
- userFriendlyMessage = `Lemlist error: ${errorMessage}`
- }
-
- throw new Error(userFriendlyMessage)
- }
-
- logger.info(
- `[${requestId}] Successfully created webhook in Lemlist for webhook ${webhookData.id}.`,
- {
- lemlistWebhookId: responseBody._id,
- }
- )
-
- return { id: responseBody._id }
- } catch (error: any) {
- logger.error(
- `[${requestId}] Exception during Lemlist webhook creation for webhook ${webhookData.id}.`,
- {
- message: error.message,
- stack: error.stack,
- }
- )
- throw error
- }
-}
diff --git a/apps/sim/app/api/webhooks/trigger/[path]/route.test.ts b/apps/sim/app/api/webhooks/trigger/[path]/route.test.ts
index fff521ca8f..737e5ac48b 100644
--- a/apps/sim/app/api/webhooks/trigger/[path]/route.test.ts
+++ b/apps/sim/app/api/webhooks/trigger/[path]/route.test.ts
@@ -3,15 +3,92 @@
*
* @vitest-environment node
*/
-
-import { loggerMock } from '@sim/testing'
+import { createMockRequest, loggerMock } from '@sim/testing'
import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest'
-import {
- createMockRequest,
- globalMockData,
- mockExecutionDependencies,
- mockTriggerDevSdk,
-} from '@/app/api/__test-utils__/utils'
+
+/** Mock execution dependencies for webhook tests */
+function mockExecutionDependencies() {
+ vi.mock('@/lib/core/security/encryption', () => ({
+ decryptSecret: vi.fn().mockResolvedValue({ decrypted: 'decrypted-value' }),
+ }))
+
+ vi.mock('@/lib/logs/execution/trace-spans/trace-spans', () => ({
+ buildTraceSpans: vi.fn().mockReturnValue({ traceSpans: [], totalDuration: 100 }),
+ }))
+
+ vi.mock('@/lib/workflows/utils', () => ({
+ updateWorkflowRunCounts: vi.fn().mockResolvedValue(undefined),
+ }))
+
+ vi.mock('@/serializer', () => ({
+ Serializer: vi.fn().mockImplementation(() => ({
+ serializeWorkflow: vi.fn().mockReturnValue({
+ version: '1.0',
+ blocks: [
+ {
+ id: 'starter-id',
+ metadata: { id: 'starter', name: 'Start' },
+ config: {},
+ inputs: {},
+ outputs: {},
+ position: { x: 100, y: 100 },
+ enabled: true,
+ },
+ {
+ id: 'agent-id',
+ metadata: { id: 'agent', name: 'Agent 1' },
+ config: {},
+ inputs: {},
+ outputs: {},
+ position: { x: 634, y: -167 },
+ enabled: true,
+ },
+ ],
+ edges: [
+ {
+ id: 'edge-1',
+ source: 'starter-id',
+ target: 'agent-id',
+ sourceHandle: 'source',
+ targetHandle: 'target',
+ },
+ ],
+ loops: {},
+ parallels: {},
+ }),
+ })),
+ }))
+}
+
+/** Mock Trigger.dev SDK */
+function mockTriggerDevSdk() {
+ vi.mock('@trigger.dev/sdk', () => ({
+ tasks: { trigger: vi.fn().mockResolvedValue({ id: 'mock-task-id' }) },
+ task: vi.fn().mockReturnValue({}),
+ }))
+}
+
+/**
+ * Test data store - isolated per test via beforeEach reset
+ * This replaces the global mutable state pattern with local test data
+ */
+const testData = {
+ webhooks: [] as Array<{
+ id: string
+ provider: string
+ path: string
+ isActive: boolean
+ providerConfig?: Record
+ workflowId: string
+ rateLimitCount?: number
+ rateLimitPeriod?: number
+ }>,
+ workflows: [] as Array<{
+ id: string
+ userId: string
+ workspaceId?: string
+ }>,
+}
const {
generateRequestHashMock,
@@ -159,8 +236,8 @@ vi.mock('@/lib/workflows/persistence/utils', () => ({
vi.mock('@/lib/webhooks/processor', () => ({
findAllWebhooksForPath: vi.fn().mockImplementation(async (options: { path: string }) => {
- // Filter webhooks by path from globalMockData
- const matchingWebhooks = globalMockData.webhooks.filter(
+ // Filter webhooks by path from testData
+ const matchingWebhooks = testData.webhooks.filter(
(wh) => wh.path === options.path && wh.isActive
)
@@ -170,7 +247,7 @@ vi.mock('@/lib/webhooks/processor', () => ({
// Return array of {webhook, workflow} objects
return matchingWebhooks.map((wh) => {
- const matchingWorkflow = globalMockData.workflows.find((w) => w.id === wh.workflowId) || {
+ const matchingWorkflow = testData.workflows.find((w) => w.id === wh.workflowId) || {
id: wh.workflowId || 'test-workflow-id',
userId: 'test-user-id',
workspaceId: 'test-workspace-id',
@@ -283,14 +360,15 @@ describe('Webhook Trigger API Route', () => {
beforeEach(() => {
vi.clearAllMocks()
- globalMockData.webhooks.length = 0
- globalMockData.workflows.length = 0
- globalMockData.schedules.length = 0
+ // Reset test data arrays
+ testData.webhooks.length = 0
+ testData.workflows.length = 0
mockExecutionDependencies()
mockTriggerDevSdk()
- globalMockData.workflows.push({
+ // Set up default workflow for tests
+ testData.workflows.push({
id: 'test-workflow-id',
userId: 'test-user-id',
workspaceId: 'test-workspace-id',
@@ -326,7 +404,7 @@ describe('Webhook Trigger API Route', () => {
describe('Generic Webhook Authentication', () => {
it('should process generic webhook without authentication', async () => {
- globalMockData.webhooks.push({
+ testData.webhooks.push({
id: 'generic-webhook-id',
provider: 'generic',
path: 'test-path',
@@ -336,7 +414,7 @@ describe('Webhook Trigger API Route', () => {
rateLimitCount: 100,
rateLimitPeriod: 60,
})
- globalMockData.workflows.push({
+ testData.workflows.push({
id: 'test-workflow-id',
userId: 'test-user-id',
workspaceId: 'test-workspace-id',
@@ -354,7 +432,7 @@ describe('Webhook Trigger API Route', () => {
})
it('should authenticate with Bearer token when no custom header is configured', async () => {
- globalMockData.webhooks.push({
+ testData.webhooks.push({
id: 'generic-webhook-id',
provider: 'generic',
path: 'test-path',
@@ -362,7 +440,7 @@ describe('Webhook Trigger API Route', () => {
providerConfig: { requireAuth: true, token: 'test-token-123' },
workflowId: 'test-workflow-id',
})
- globalMockData.workflows.push({
+ testData.workflows.push({
id: 'test-workflow-id',
userId: 'test-user-id',
workspaceId: 'test-workspace-id',
@@ -381,7 +459,7 @@ describe('Webhook Trigger API Route', () => {
})
it('should authenticate with custom header when configured', async () => {
- globalMockData.webhooks.push({
+ testData.webhooks.push({
id: 'generic-webhook-id',
provider: 'generic',
path: 'test-path',
@@ -393,7 +471,7 @@ describe('Webhook Trigger API Route', () => {
},
workflowId: 'test-workflow-id',
})
- globalMockData.workflows.push({
+ testData.workflows.push({
id: 'test-workflow-id',
userId: 'test-user-id',
workspaceId: 'test-workspace-id',
@@ -412,7 +490,7 @@ describe('Webhook Trigger API Route', () => {
})
it('should handle case insensitive Bearer token authentication', async () => {
- globalMockData.webhooks.push({
+ testData.webhooks.push({
id: 'generic-webhook-id',
provider: 'generic',
path: 'test-path',
@@ -420,7 +498,7 @@ describe('Webhook Trigger API Route', () => {
providerConfig: { requireAuth: true, token: 'case-test-token' },
workflowId: 'test-workflow-id',
})
- globalMockData.workflows.push({
+ testData.workflows.push({
id: 'test-workflow-id',
userId: 'test-user-id',
workspaceId: 'test-workspace-id',
@@ -454,7 +532,7 @@ describe('Webhook Trigger API Route', () => {
})
it('should handle case insensitive custom header authentication', async () => {
- globalMockData.webhooks.push({
+ testData.webhooks.push({
id: 'generic-webhook-id',
provider: 'generic',
path: 'test-path',
@@ -466,7 +544,7 @@ describe('Webhook Trigger API Route', () => {
},
workflowId: 'test-workflow-id',
})
- globalMockData.workflows.push({
+ testData.workflows.push({
id: 'test-workflow-id',
userId: 'test-user-id',
workspaceId: 'test-workspace-id',
@@ -495,7 +573,7 @@ describe('Webhook Trigger API Route', () => {
})
it('should reject wrong Bearer token', async () => {
- globalMockData.webhooks.push({
+ testData.webhooks.push({
id: 'generic-webhook-id',
provider: 'generic',
path: 'test-path',
@@ -519,7 +597,7 @@ describe('Webhook Trigger API Route', () => {
})
it('should reject wrong custom header token', async () => {
- globalMockData.webhooks.push({
+ testData.webhooks.push({
id: 'generic-webhook-id',
provider: 'generic',
path: 'test-path',
@@ -547,7 +625,7 @@ describe('Webhook Trigger API Route', () => {
})
it('should reject missing authentication when required', async () => {
- globalMockData.webhooks.push({
+ testData.webhooks.push({
id: 'generic-webhook-id',
provider: 'generic',
path: 'test-path',
@@ -567,7 +645,7 @@ describe('Webhook Trigger API Route', () => {
})
it('should reject Bearer token when custom header is configured', async () => {
- globalMockData.webhooks.push({
+ testData.webhooks.push({
id: 'generic-webhook-id',
provider: 'generic',
path: 'test-path',
@@ -595,7 +673,7 @@ describe('Webhook Trigger API Route', () => {
})
it('should reject wrong custom header name', async () => {
- globalMockData.webhooks.push({
+ testData.webhooks.push({
id: 'generic-webhook-id',
provider: 'generic',
path: 'test-path',
@@ -623,7 +701,7 @@ describe('Webhook Trigger API Route', () => {
})
it('should reject when auth is required but no token is configured', async () => {
- globalMockData.webhooks.push({
+ testData.webhooks.push({
id: 'generic-webhook-id',
provider: 'generic',
path: 'test-path',
@@ -631,7 +709,7 @@ describe('Webhook Trigger API Route', () => {
providerConfig: { requireAuth: true },
workflowId: 'test-workflow-id',
})
- globalMockData.workflows.push({ id: 'test-workflow-id', userId: 'test-user-id' })
+ testData.workflows.push({ id: 'test-workflow-id', userId: 'test-user-id' })
const headers = {
'Content-Type': 'application/json',
diff --git a/apps/sim/app/api/webhooks/trigger/[path]/route.ts b/apps/sim/app/api/webhooks/trigger/[path]/route.ts
index ae11e476cf..ba08df3907 100644
--- a/apps/sim/app/api/webhooks/trigger/[path]/route.ts
+++ b/apps/sim/app/api/webhooks/trigger/[path]/route.ts
@@ -152,7 +152,6 @@ export async function POST(
const response = await queueWebhookExecution(foundWebhook, foundWorkflow, body, request, {
requestId,
path,
- executionTarget: 'deployed',
})
responses.push(response)
}
diff --git a/apps/sim/app/api/workflows/[id]/chat/status/route.ts b/apps/sim/app/api/workflows/[id]/chat/status/route.ts
index f7733e1407..1bd930b7ef 100644
--- a/apps/sim/app/api/workflows/[id]/chat/status/route.ts
+++ b/apps/sim/app/api/workflows/[id]/chat/status/route.ts
@@ -22,6 +22,13 @@ export async function GET(_request: Request, { params }: { params: Promise<{ id:
.select({
id: chat.id,
identifier: chat.identifier,
+ title: chat.title,
+ description: chat.description,
+ customizations: chat.customizations,
+ authType: chat.authType,
+ allowedEmails: chat.allowedEmails,
+ outputConfigs: chat.outputConfigs,
+ password: chat.password,
isActive: chat.isActive,
})
.from(chat)
@@ -34,6 +41,13 @@ export async function GET(_request: Request, { params }: { params: Promise<{ id:
? {
id: deploymentResults[0].id,
identifier: deploymentResults[0].identifier,
+ title: deploymentResults[0].title,
+ description: deploymentResults[0].description,
+ customizations: deploymentResults[0].customizations,
+ authType: deploymentResults[0].authType,
+ allowedEmails: deploymentResults[0].allowedEmails,
+ outputConfigs: deploymentResults[0].outputConfigs,
+ hasPassword: Boolean(deploymentResults[0].password),
}
: null
diff --git a/apps/sim/app/api/workflows/[id]/deploy/route.ts b/apps/sim/app/api/workflows/[id]/deploy/route.ts
index 1ba7647955..6e1172c049 100644
--- a/apps/sim/app/api/workflows/[id]/deploy/route.ts
+++ b/apps/sim/app/api/workflows/[id]/deploy/route.ts
@@ -4,12 +4,17 @@ import { and, desc, eq } from 'drizzle-orm'
import type { NextRequest } from 'next/server'
import { generateRequestId } from '@/lib/core/utils/request'
import { removeMcpToolsForWorkflow, syncMcpToolsForWorkflow } from '@/lib/mcp/workflow-mcp-sync'
+import { cleanupWebhooksForWorkflow, saveTriggerWebhooksForDeploy } from '@/lib/webhooks/deploy'
import {
deployWorkflow,
loadWorkflowFromNormalizedTables,
undeployWorkflow,
} from '@/lib/workflows/persistence/utils'
-import { createSchedulesForDeploy, validateWorkflowSchedules } from '@/lib/workflows/schedules'
+import {
+ cleanupDeploymentVersion,
+ createSchedulesForDeploy,
+ validateWorkflowSchedules,
+} from '@/lib/workflows/schedules'
import { validateWorkflowPermissions } from '@/lib/workflows/utils'
import { createErrorResponse, createSuccessResponse } from '@/app/api/workflows/utils'
@@ -141,14 +146,58 @@ export async function POST(request: NextRequest, { params }: { params: Promise<{
}
const deployedAt = deployResult.deployedAt!
+ const deploymentVersionId = deployResult.deploymentVersionId
+
+ if (!deploymentVersionId) {
+ await undeployWorkflow({ workflowId: id })
+ return createErrorResponse('Failed to resolve deployment version', 500)
+ }
+
+ const triggerSaveResult = await saveTriggerWebhooksForDeploy({
+ request,
+ workflowId: id,
+ workflow: workflowData,
+ userId: actorUserId,
+ blocks: normalizedData.blocks,
+ requestId,
+ deploymentVersionId,
+ })
+
+ if (!triggerSaveResult.success) {
+ await cleanupDeploymentVersion({
+ workflowId: id,
+ workflow: workflowData as Record,
+ requestId,
+ deploymentVersionId,
+ })
+ await undeployWorkflow({ workflowId: id })
+ return createErrorResponse(
+ triggerSaveResult.error?.message || 'Failed to save trigger configuration',
+ triggerSaveResult.error?.status || 500
+ )
+ }
let scheduleInfo: { scheduleId?: string; cronExpression?: string; nextRunAt?: Date } = {}
- const scheduleResult = await createSchedulesForDeploy(id, normalizedData.blocks, db)
+ const scheduleResult = await createSchedulesForDeploy(
+ id,
+ normalizedData.blocks,
+ db,
+ deploymentVersionId
+ )
if (!scheduleResult.success) {
logger.error(
`[${requestId}] Failed to create schedule for workflow ${id}: ${scheduleResult.error}`
)
- } else if (scheduleResult.scheduleId) {
+ await cleanupDeploymentVersion({
+ workflowId: id,
+ workflow: workflowData as Record,
+ requestId,
+ deploymentVersionId,
+ })
+ await undeployWorkflow({ workflowId: id })
+ return createErrorResponse(scheduleResult.error || 'Failed to create schedule', 500)
+ }
+ if (scheduleResult.scheduleId) {
scheduleInfo = {
scheduleId: scheduleResult.scheduleId,
cronExpression: scheduleResult.cronExpression,
@@ -202,11 +251,18 @@ export async function DELETE(
try {
logger.debug(`[${requestId}] Undeploying workflow: ${id}`)
- const { error } = await validateWorkflowPermissions(id, requestId, 'admin')
+ const { error, workflow: workflowData } = await validateWorkflowPermissions(
+ id,
+ requestId,
+ 'admin'
+ )
if (error) {
return createErrorResponse(error.message, error.status)
}
+ // Clean up external webhook subscriptions before undeploying
+ await cleanupWebhooksForWorkflow(id, workflowData as Record, requestId)
+
const result = await undeployWorkflow({ workflowId: id })
if (!result.success) {
return createErrorResponse(result.error || 'Failed to undeploy workflow', 500)
diff --git a/apps/sim/app/api/workflows/[id]/deployments/[version]/activate/route.ts b/apps/sim/app/api/workflows/[id]/deployments/[version]/activate/route.ts
index 76126ee86c..d3e5abb555 100644
--- a/apps/sim/app/api/workflows/[id]/deployments/[version]/activate/route.ts
+++ b/apps/sim/app/api/workflows/[id]/deployments/[version]/activate/route.ts
@@ -1,10 +1,19 @@
+import { db, workflowDeploymentVersion } from '@sim/db'
import { createLogger } from '@sim/logger'
+import { and, eq } from 'drizzle-orm'
import type { NextRequest } from 'next/server'
import { generateRequestId } from '@/lib/core/utils/request'
import { syncMcpToolsForWorkflow } from '@/lib/mcp/workflow-mcp-sync'
+import { saveTriggerWebhooksForDeploy } from '@/lib/webhooks/deploy'
import { activateWorkflowVersion } from '@/lib/workflows/persistence/utils'
+import {
+ cleanupDeploymentVersion,
+ createSchedulesForDeploy,
+ validateWorkflowSchedules,
+} from '@/lib/workflows/schedules'
import { validateWorkflowPermissions } from '@/lib/workflows/utils'
import { createErrorResponse, createSuccessResponse } from '@/app/api/workflows/utils'
+import type { BlockState } from '@/stores/workflows/workflow/types'
const logger = createLogger('WorkflowActivateDeploymentAPI')
@@ -19,30 +28,135 @@ export async function POST(
const { id, version } = await params
try {
- const { error } = await validateWorkflowPermissions(id, requestId, 'admin')
+ const {
+ error,
+ session,
+ workflow: workflowData,
+ } = await validateWorkflowPermissions(id, requestId, 'admin')
if (error) {
return createErrorResponse(error.message, error.status)
}
+ const actorUserId = session?.user?.id
+ if (!actorUserId) {
+ logger.warn(`[${requestId}] Unable to resolve actor user for deployment activation: ${id}`)
+ return createErrorResponse('Unable to determine activating user', 400)
+ }
+
const versionNum = Number(version)
if (!Number.isFinite(versionNum)) {
return createErrorResponse('Invalid version number', 400)
}
- const result = await activateWorkflowVersion({ workflowId: id, version: versionNum })
- if (!result.success) {
- return createErrorResponse(result.error || 'Failed to activate deployment', 400)
+ const [versionRow] = await db
+ .select({
+ id: workflowDeploymentVersion.id,
+ state: workflowDeploymentVersion.state,
+ })
+ .from(workflowDeploymentVersion)
+ .where(
+ and(
+ eq(workflowDeploymentVersion.workflowId, id),
+ eq(workflowDeploymentVersion.version, versionNum)
+ )
+ )
+ .limit(1)
+
+ if (!versionRow?.state) {
+ return createErrorResponse('Deployment version not found', 404)
+ }
+
+ const [currentActiveVersion] = await db
+ .select({ id: workflowDeploymentVersion.id })
+ .from(workflowDeploymentVersion)
+ .where(
+ and(
+ eq(workflowDeploymentVersion.workflowId, id),
+ eq(workflowDeploymentVersion.isActive, true)
+ )
+ )
+ .limit(1)
+
+ const previousVersionId = currentActiveVersion?.id
+
+ const deployedState = versionRow.state as { blocks?: Record }
+ const blocks = deployedState.blocks
+ if (!blocks || typeof blocks !== 'object') {
+ return createErrorResponse('Invalid deployed state structure', 500)
+ }
+
+ const triggerSaveResult = await saveTriggerWebhooksForDeploy({
+ request,
+ workflowId: id,
+ workflow: workflowData as Record,
+ userId: actorUserId,
+ blocks,
+ requestId,
+ deploymentVersionId: versionRow.id,
+ })
+
+ if (!triggerSaveResult.success) {
+ return createErrorResponse(
+ triggerSaveResult.error?.message || 'Failed to sync trigger configuration',
+ triggerSaveResult.error?.status || 500
+ )
}
- if (result.state) {
- await syncMcpToolsForWorkflow({
+ const scheduleValidation = validateWorkflowSchedules(blocks)
+ if (!scheduleValidation.isValid) {
+ return createErrorResponse(`Invalid schedule configuration: ${scheduleValidation.error}`, 400)
+ }
+
+ const scheduleResult = await createSchedulesForDeploy(id, blocks, db, versionRow.id)
+
+ if (!scheduleResult.success) {
+ await cleanupDeploymentVersion({
+ workflowId: id,
+ workflow: workflowData as Record,
+ requestId,
+ deploymentVersionId: versionRow.id,
+ })
+ return createErrorResponse(scheduleResult.error || 'Failed to sync schedules', 500)
+ }
+
+ const result = await activateWorkflowVersion({ workflowId: id, version: versionNum })
+ if (!result.success) {
+ await cleanupDeploymentVersion({
workflowId: id,
+ workflow: workflowData as Record,
requestId,
- state: result.state,
- context: 'activate',
+ deploymentVersionId: versionRow.id,
})
+ return createErrorResponse(result.error || 'Failed to activate deployment', 400)
}
+ if (previousVersionId && previousVersionId !== versionRow.id) {
+ try {
+ logger.info(
+ `[${requestId}] Cleaning up previous version ${previousVersionId} webhooks/schedules`
+ )
+ await cleanupDeploymentVersion({
+ workflowId: id,
+ workflow: workflowData as Record,
+ requestId,
+ deploymentVersionId: previousVersionId,
+ })
+ logger.info(`[${requestId}] Previous version cleanup completed`)
+ } catch (cleanupError) {
+ logger.error(
+ `[${requestId}] Failed to clean up previous version ${previousVersionId}`,
+ cleanupError
+ )
+ }
+ }
+
+ await syncMcpToolsForWorkflow({
+ workflowId: id,
+ requestId,
+ state: versionRow.state,
+ context: 'activate',
+ })
+
return createSuccessResponse({ success: true, deployedAt: result.deployedAt })
} catch (error: any) {
logger.error(`[${requestId}] Error activating deployment for workflow: ${id}`, error)
diff --git a/apps/sim/app/api/workflows/[id]/execute/route.ts b/apps/sim/app/api/workflows/[id]/execute/route.ts
index 3a9b04dfba..df988f26a7 100644
--- a/apps/sim/app/api/workflows/[id]/execute/route.ts
+++ b/apps/sim/app/api/workflows/[id]/execute/route.ts
@@ -110,6 +110,7 @@ type AsyncExecutionParams = {
userId: string
input: any
triggerType: CoreTriggerType
+ preflighted?: boolean
}
/**
@@ -132,6 +133,7 @@ async function handleAsyncExecution(params: AsyncExecutionParams): Promise(block: BlockState, subBlockId: string): T | undefined {
- const value = block.subBlocks?.[subBlockId]?.value
- if (value === undefined || value === null) {
- return undefined
- }
- return value as T
-}
-
-async function syncWorkflowWebhooks(
- workflowId: string,
- blocks: Record
-): Promise {
- await syncBlockResources(workflowId, blocks, {
- resourceName: 'webhook',
- subBlockId: 'webhookId',
- buildMetadata: buildWebhookMetadata,
- applyMetadata: upsertWebhookRecord,
- })
-}
-
-interface WebhookMetadata {
- triggerPath: string
- provider: string | null
- providerConfig: Record
-}
-
-const CREDENTIAL_SET_PREFIX = 'credentialSet:'
-
-function buildWebhookMetadata(block: BlockState): WebhookMetadata | null {
- const triggerId =
- getSubBlockValue(block, 'triggerId') ||
- getSubBlockValue(block, 'selectedTriggerId')
- const triggerConfig = getSubBlockValue>(block, 'triggerConfig') || {}
- const triggerCredentials = getSubBlockValue(block, 'triggerCredentials')
- const triggerPath = getSubBlockValue(block, 'triggerPath') || block.id
-
- const triggerDef = triggerId ? getTrigger(triggerId) : undefined
- const provider = triggerDef?.provider || null
-
- // Handle credential sets vs individual credentials
- const isCredentialSet = triggerCredentials?.startsWith(CREDENTIAL_SET_PREFIX)
- const credentialSetId = isCredentialSet
- ? triggerCredentials!.slice(CREDENTIAL_SET_PREFIX.length)
- : undefined
- const credentialId = isCredentialSet ? undefined : triggerCredentials
-
- const providerConfig = {
- ...(typeof triggerConfig === 'object' ? triggerConfig : {}),
- ...(credentialId ? { credentialId } : {}),
- ...(credentialSetId ? { credentialSetId } : {}),
- ...(triggerId ? { triggerId } : {}),
- }
-
- return {
- triggerPath,
- provider,
- providerConfig,
- }
-}
-
-async function upsertWebhookRecord(
- workflowId: string,
- block: BlockState,
- webhookId: string,
- metadata: WebhookMetadata
-): Promise {
- const providerConfig = metadata.providerConfig as Record
- const credentialSetId = providerConfig?.credentialSetId as string | undefined
-
- // For credential sets, delegate to the sync function which handles fan-out
- if (credentialSetId && metadata.provider) {
- const { syncWebhooksForCredentialSet } = await import('@/lib/webhooks/utils.server')
- const { getProviderIdFromServiceId } = await import('@/lib/oauth')
-
- const oauthProviderId = getProviderIdFromServiceId(metadata.provider)
- const requestId = crypto.randomUUID().slice(0, 8)
-
- // Extract base config (without credential-specific fields)
- const {
- credentialId: _cId,
- credentialSetId: _csId,
- userId: _uId,
- ...baseConfig
- } = providerConfig
-
- try {
- await syncWebhooksForCredentialSet({
- workflowId,
- blockId: block.id,
- provider: metadata.provider,
- basePath: metadata.triggerPath,
- credentialSetId,
- oauthProviderId,
- providerConfig: baseConfig as Record,
- requestId,
- })
-
- logger.info('Synced credential set webhooks during workflow save', {
- workflowId,
- blockId: block.id,
- credentialSetId,
- })
- } catch (error) {
- logger.error('Failed to sync credential set webhooks during workflow save', {
- workflowId,
- blockId: block.id,
- credentialSetId,
- error,
- })
- }
- return
- }
-
- // For individual credentials, use the existing single webhook logic
- const [existing] = await db.select().from(webhook).where(eq(webhook.id, webhookId)).limit(1)
-
- if (existing) {
- const needsUpdate =
- existing.blockId !== block.id ||
- existing.workflowId !== workflowId ||
- existing.path !== metadata.triggerPath
-
- if (needsUpdate) {
- await db
- .update(webhook)
- .set({
- workflowId,
- blockId: block.id,
- path: metadata.triggerPath,
- provider: metadata.provider || existing.provider,
- providerConfig: Object.keys(metadata.providerConfig).length
- ? metadata.providerConfig
- : existing.providerConfig,
- isActive: true,
- updatedAt: new Date(),
- })
- .where(eq(webhook.id, webhookId))
- }
- return
- }
-
- await db.insert(webhook).values({
- id: webhookId,
- workflowId,
- blockId: block.id,
- path: metadata.triggerPath,
- provider: metadata.provider,
- providerConfig: metadata.providerConfig,
- credentialSetId: null,
- isActive: true,
- createdAt: new Date(),
- updatedAt: new Date(),
- })
-
- logger.info('Recreated missing webhook after workflow save', {
- workflowId,
- blockId: block.id,
- webhookId,
- })
-}
-
-interface BlockResourceSyncConfig {
- resourceName: string
- subBlockId: string
- buildMetadata: (block: BlockState, resourceId: string) => T | null
- applyMetadata: (
- workflowId: string,
- block: BlockState,
- resourceId: string,
- metadata: T
- ) => Promise
-}
-
-async function syncBlockResources(
- workflowId: string,
- blocks: Record,
- config: BlockResourceSyncConfig
-): Promise {
- const blockEntries = Object.values(blocks || {}).filter(Boolean) as BlockState[]
- if (blockEntries.length === 0) return
-
- for (const block of blockEntries) {
- const resourceId = getSubBlockValue(block, config.subBlockId)
- if (!resourceId) continue
-
- const metadata = config.buildMetadata(block, resourceId)
- if (!metadata) {
- logger.warn(`Skipping ${config.resourceName} sync due to invalid configuration`, {
- workflowId,
- blockId: block.id,
- resourceId,
- resourceName: config.resourceName,
- })
- continue
- }
-
- try {
- await config.applyMetadata(workflowId, block, resourceId, metadata)
- } catch (error) {
- logger.error(`Failed to sync ${config.resourceName}`, {
- workflowId,
- blockId: block.id,
- resourceId,
- resourceName: config.resourceName,
- error,
- })
- }
- }
-}
diff --git a/apps/sim/app/api/workflows/[id]/variables/route.test.ts b/apps/sim/app/api/workflows/[id]/variables/route.test.ts
index b2485fa408..949b52ebc4 100644
--- a/apps/sim/app/api/workflows/[id]/variables/route.test.ts
+++ b/apps/sim/app/api/workflows/[id]/variables/route.test.ts
@@ -4,29 +4,29 @@
*
* @vitest-environment node
*/
-
-import { NextRequest } from 'next/server'
-import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest'
import {
- createMockDatabase,
+ databaseMock,
+ defaultMockUser,
mockAuth,
mockCryptoUuid,
- mockUser,
setupCommonApiMocks,
-} from '@/app/api/__test-utils__/utils'
+} from '@sim/testing'
+import { NextRequest } from 'next/server'
+import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest'
describe('Workflow Variables API Route', () => {
let authMocks: ReturnType
- let databaseMocks: ReturnType
const mockGetWorkflowAccessContext = vi.fn()
beforeEach(() => {
vi.resetModules()
setupCommonApiMocks()
mockCryptoUuid('mock-request-id-12345678')
- authMocks = mockAuth(mockUser)
+ authMocks = mockAuth(defaultMockUser)
mockGetWorkflowAccessContext.mockReset()
+ vi.doMock('@sim/db', () => databaseMock)
+
vi.doMock('@/lib/workflows/utils', () => ({
getWorkflowAccessContext: mockGetWorkflowAccessContext,
}))
@@ -203,10 +203,6 @@ describe('Workflow Variables API Route', () => {
isWorkspaceOwner: false,
})
- databaseMocks = createMockDatabase({
- update: { results: [{}] },
- })
-
const variables = {
'var-1': {
id: 'var-1',
diff --git a/apps/sim/app/api/workspaces/invitations/route.test.ts b/apps/sim/app/api/workspaces/invitations/route.test.ts
index f56e9d0120..202559142a 100644
--- a/apps/sim/app/api/workspaces/invitations/route.test.ts
+++ b/apps/sim/app/api/workspaces/invitations/route.test.ts
@@ -1,5 +1,5 @@
+import { createMockRequest, mockAuth, mockConsoleLogger } from '@sim/testing'
import { beforeEach, describe, expect, it, vi } from 'vitest'
-import { createMockRequest, mockAuth, mockConsoleLogger } from '@/app/api/__test-utils__/utils'
describe('Workspace Invitations API Route', () => {
const mockWorkspace = { id: 'workspace-1', name: 'Test Workspace' }
diff --git a/apps/sim/app/layout.tsx b/apps/sim/app/layout.tsx
index 327a519291..166b260af8 100644
--- a/apps/sim/app/layout.tsx
+++ b/apps/sim/app/layout.tsx
@@ -12,6 +12,7 @@ import { HydrationErrorHandler } from '@/app/_shell/hydration-error-handler'
import { QueryProvider } from '@/app/_shell/providers/query-provider'
import { SessionProvider } from '@/app/_shell/providers/session-provider'
import { ThemeProvider } from '@/app/_shell/providers/theme-provider'
+import { TooltipProvider } from '@/app/_shell/providers/tooltip-provider'
import { season } from '@/app/_styles/fonts/season/season'
export const viewport: Viewport = {
@@ -208,7 +209,9 @@ export default function RootLayout({ children }: { children: React.ReactNode })
- {children}
+
+ {children}
+
diff --git a/apps/sim/app/playground/page.tsx b/apps/sim/app/playground/page.tsx
index 4670b805e0..d380256a21 100644
--- a/apps/sim/app/playground/page.tsx
+++ b/apps/sim/app/playground/page.tsx
@@ -21,12 +21,15 @@ import {
Combobox,
Connections,
Copy,
+ Cursor,
DatePicker,
DocumentAttachment,
Duplicate,
+ Expand,
Eye,
FolderCode,
FolderPlus,
+ Hand,
HexSimple,
Input,
Key as KeyIcon,
@@ -991,11 +994,14 @@ export default function PlaygroundPage() {
{ Icon: ChevronDown, name: 'ChevronDown' },
{ Icon: Connections, name: 'Connections' },
{ Icon: Copy, name: 'Copy' },
+ { Icon: Cursor, name: 'Cursor' },
{ Icon: DocumentAttachment, name: 'DocumentAttachment' },
{ Icon: Duplicate, name: 'Duplicate' },
+ { Icon: Expand, name: 'Expand' },
{ Icon: Eye, name: 'Eye' },
{ Icon: FolderCode, name: 'FolderCode' },
{ Icon: FolderPlus, name: 'FolderPlus' },
+ { Icon: Hand, name: 'Hand' },
{ Icon: HexSimple, name: 'HexSimple' },
{ Icon: KeyIcon, name: 'Key' },
{ Icon: Layout, name: 'Layout' },
diff --git a/apps/sim/app/templates/layout-client.tsx b/apps/sim/app/templates/layout-client.tsx
index d886b6c379..f49b81c6c6 100644
--- a/apps/sim/app/templates/layout-client.tsx
+++ b/apps/sim/app/templates/layout-client.tsx
@@ -1,15 +1,12 @@
'use client'
-import { Tooltip } from '@/components/emcn'
import { season } from '@/app/_styles/fonts/season/season'
export default function TemplatesLayoutClient({ children }: { children: React.ReactNode }) {
return (
-
-
-
+
)
}
diff --git a/apps/sim/app/workspace/[workspaceId]/knowledge/[id]/[documentId]/components/delete-chunk-modal/delete-chunk-modal.tsx b/apps/sim/app/workspace/[workspaceId]/knowledge/[id]/[documentId]/components/delete-chunk-modal/delete-chunk-modal.tsx
index 7d69f5e146..ff841ddec9 100644
--- a/apps/sim/app/workspace/[workspaceId]/knowledge/[id]/[documentId]/components/delete-chunk-modal/delete-chunk-modal.tsx
+++ b/apps/sim/app/workspace/[workspaceId]/knowledge/[id]/[documentId]/components/delete-chunk-modal/delete-chunk-modal.tsx
@@ -77,7 +77,7 @@ export function DeleteChunkModal({
-
{getTooltipMessage('Delete Block')}
diff --git a/apps/sim/app/workspace/[workspaceId]/w/[workflowId]/components/context-menu/block-context-menu.tsx b/apps/sim/app/workspace/[workspaceId]/w/[workflowId]/components/block-menu/block-menu.tsx
similarity index 81%
rename from apps/sim/app/workspace/[workspaceId]/w/[workflowId]/components/context-menu/block-context-menu.tsx
rename to apps/sim/app/workspace/[workspaceId]/w/[workflowId]/components/block-menu/block-menu.tsx
index 8945b13dc8..5d6af16f24 100644
--- a/apps/sim/app/workspace/[workspaceId]/w/[workflowId]/components/context-menu/block-context-menu.tsx
+++ b/apps/sim/app/workspace/[workspaceId]/w/[workflowId]/components/block-menu/block-menu.tsx
@@ -1,5 +1,6 @@
'use client'
+import type { RefObject } from 'react'
import {
Popover,
PopoverAnchor,
@@ -7,14 +8,49 @@ import {
PopoverDivider,
PopoverItem,
} from '@/components/emcn'
-import type { BlockContextMenuProps } from './types'
+import { isValidStartBlockType } from '@/lib/workflows/triggers/start-block-types'
+
+/**
+ * Block information for context menu actions
+ */
+export interface BlockInfo {
+ id: string
+ type: string
+ enabled: boolean
+ horizontalHandles: boolean
+ parentId?: string
+ parentType?: string
+}
+
+/**
+ * Props for BlockMenu component
+ */
+export interface BlockMenuProps {
+ isOpen: boolean
+ position: { x: number; y: number }
+ menuRef: RefObject
+ onClose: () => void
+ selectedBlocks: BlockInfo[]
+ onCopy: () => void
+ onPaste: () => void
+ onDuplicate: () => void
+ onDelete: () => void
+ onToggleEnabled: () => void
+ onToggleHandles: () => void
+ onRemoveFromSubflow: () => void
+ onOpenEditor: () => void
+ onRename: () => void
+ hasClipboard?: boolean
+ showRemoveFromSubflow?: boolean
+ disableEdit?: boolean
+}
/**
* Context menu for workflow block(s).
* Displays block-specific actions in a popover at right-click position.
* Supports multi-selection - actions apply to all selected blocks.
*/
-export function BlockContextMenu({
+export function BlockMenu({
isOpen,
position,
menuRef,
@@ -32,15 +68,13 @@ export function BlockContextMenu({
hasClipboard = false,
showRemoveFromSubflow = false,
disableEdit = false,
-}: BlockContextMenuProps) {
+}: BlockMenuProps) {
const isSingleBlock = selectedBlocks.length === 1
const allEnabled = selectedBlocks.every((b) => b.enabled)
const allDisabled = selectedBlocks.every((b) => !b.enabled)
- const hasStarterBlock = selectedBlocks.some(
- (b) => b.type === 'starter' || b.type === 'start_trigger'
- )
+ const hasStarterBlock = selectedBlocks.some((b) => isValidStartBlockType(b.type))
const allNoteBlocks = selectedBlocks.every((b) => b.type === 'note')
const isSubflow =
isSingleBlock && (selectedBlocks[0]?.type === 'loop' || selectedBlocks[0]?.type === 'parallel')
diff --git a/apps/sim/app/workspace/[workspaceId]/w/[workflowId]/components/block-menu/index.ts b/apps/sim/app/workspace/[workspaceId]/w/[workflowId]/components/block-menu/index.ts
new file mode 100644
index 0000000000..5016029014
--- /dev/null
+++ b/apps/sim/app/workspace/[workspaceId]/w/[workflowId]/components/block-menu/index.ts
@@ -0,0 +1,2 @@
+export type { BlockInfo, BlockMenuProps } from './block-menu'
+export { BlockMenu } from './block-menu'
diff --git a/apps/sim/app/workspace/[workspaceId]/w/[workflowId]/components/context-menu/pane-context-menu.tsx b/apps/sim/app/workspace/[workspaceId]/w/[workflowId]/components/canvas-menu/canvas-menu.tsx
similarity index 80%
rename from apps/sim/app/workspace/[workspaceId]/w/[workflowId]/components/context-menu/pane-context-menu.tsx
rename to apps/sim/app/workspace/[workspaceId]/w/[workflowId]/components/canvas-menu/canvas-menu.tsx
index a5bba68b46..7cd5294f32 100644
--- a/apps/sim/app/workspace/[workspaceId]/w/[workflowId]/components/context-menu/pane-context-menu.tsx
+++ b/apps/sim/app/workspace/[workspaceId]/w/[workflowId]/components/canvas-menu/canvas-menu.tsx
@@ -1,5 +1,6 @@
'use client'
+import type { RefObject } from 'react'
import {
Popover,
PopoverAnchor,
@@ -7,13 +8,40 @@ import {
PopoverDivider,
PopoverItem,
} from '@/components/emcn'
-import type { PaneContextMenuProps } from './types'
/**
- * Context menu for workflow canvas pane.
+ * Props for CanvasMenu component
+ */
+export interface CanvasMenuProps {
+ isOpen: boolean
+ position: { x: number; y: number }
+ menuRef: RefObject
+ onClose: () => void
+ onUndo: () => void
+ onRedo: () => void
+ onPaste: () => void
+ onAddBlock: () => void
+ onAutoLayout: () => void
+ onFitToView: () => void
+ onOpenLogs: () => void
+ onToggleVariables: () => void
+ onToggleChat: () => void
+ onInvite: () => void
+ isVariablesOpen?: boolean
+ isChatOpen?: boolean
+ hasClipboard?: boolean
+ disableEdit?: boolean
+ disableAdmin?: boolean
+ canUndo?: boolean
+ canRedo?: boolean
+ isInvitationsDisabled?: boolean
+}
+
+/**
+ * Context menu for workflow canvas.
* Displays canvas-level actions when right-clicking empty space.
*/
-export function PaneContextMenu({
+export function CanvasMenu({
isOpen,
position,
menuRef,
@@ -23,6 +51,7 @@ export function PaneContextMenu({
onPaste,
onAddBlock,
onAutoLayout,
+ onFitToView,
onOpenLogs,
onToggleVariables,
onToggleChat,
@@ -35,7 +64,7 @@ export function PaneContextMenu({
canUndo = false,
canRedo = false,
isInvitationsDisabled = false,
-}: PaneContextMenuProps) {
+}: CanvasMenuProps) {
return (
Auto-layout
⇧L
+ {
+ onFitToView()
+ onClose()
+ }}
+ >
+ Fit to View
+
{/* Navigation actions */}
diff --git a/apps/sim/app/workspace/[workspaceId]/w/[workflowId]/components/canvas-menu/index.ts b/apps/sim/app/workspace/[workspaceId]/w/[workflowId]/components/canvas-menu/index.ts
new file mode 100644
index 0000000000..ac5ef3e74f
--- /dev/null
+++ b/apps/sim/app/workspace/[workspaceId]/w/[workflowId]/components/canvas-menu/index.ts
@@ -0,0 +1,2 @@
+export type { CanvasMenuProps } from './canvas-menu'
+export { CanvasMenu } from './canvas-menu'
diff --git a/apps/sim/app/workspace/[workspaceId]/w/[workflowId]/components/chat/chat.tsx b/apps/sim/app/workspace/[workspaceId]/w/[workflowId]/components/chat/chat.tsx
index 7518a35c4d..24d609f505 100644
--- a/apps/sim/app/workspace/[workspaceId]/w/[workflowId]/components/chat/chat.tsx
+++ b/apps/sim/app/workspace/[workspaceId]/w/[workflowId]/components/chat/chat.tsx
@@ -20,6 +20,7 @@ import {
PopoverItem,
PopoverScrollArea,
PopoverTrigger,
+ Tooltip,
Trash,
} from '@/components/emcn'
import { useSession } from '@/lib/auth/auth-client'
@@ -29,7 +30,7 @@ import {
extractPathFromOutputId,
parseOutputContentSafely,
} from '@/lib/core/utils/response-format'
-import { normalizeInputFormatValue } from '@/lib/workflows/input-format-utils'
+import { normalizeInputFormatValue } from '@/lib/workflows/input-format'
import { StartBlockPath, TriggerUtils } from '@/lib/workflows/triggers/triggers'
import { START_BLOCK_RESERVED_FIELDS } from '@/lib/workflows/types'
import {
@@ -93,6 +94,9 @@ interface ProcessedAttachment {
dataUrl: string
}
+/** Timeout for FileReader operations in milliseconds */
+const FILE_READ_TIMEOUT_MS = 60000
+
/**
* Reads files and converts them to data URLs for image display
* @param chatFiles - Array of chat files to process
@@ -106,8 +110,37 @@ const processFileAttachments = async (chatFiles: ChatFile[]): Promise((resolve, reject) => {
const reader = new FileReader()
- reader.onload = () => resolve(reader.result as string)
- reader.onerror = reject
+ let settled = false
+
+ const timeoutId = setTimeout(() => {
+ if (!settled) {
+ settled = true
+ reader.abort()
+ reject(new Error(`File read timed out after ${FILE_READ_TIMEOUT_MS}ms`))
+ }
+ }, FILE_READ_TIMEOUT_MS)
+
+ reader.onload = () => {
+ if (!settled) {
+ settled = true
+ clearTimeout(timeoutId)
+ resolve(reader.result as string)
+ }
+ }
+ reader.onerror = () => {
+ if (!settled) {
+ settled = true
+ clearTimeout(timeoutId)
+ reject(reader.error)
+ }
+ }
+ reader.onabort = () => {
+ if (!settled) {
+ settled = true
+ clearTimeout(timeoutId)
+ reject(new Error('File read aborted'))
+ }
+ }
reader.readAsDataURL(file.file)
})
} catch (error) {
@@ -201,7 +234,6 @@ export function Chat() {
const triggerWorkflowUpdate = useWorkflowStore((state) => state.triggerUpdate)
const setSubBlockValue = useSubBlockStore((state) => state.setValue)
- // Chat state (UI and messages from unified store)
const {
isChatOpen,
chatPosition,
@@ -229,19 +261,16 @@ export function Chat() {
const { data: session } = useSession()
const { addToQueue } = useOperationQueue()
- // Local state
const [chatMessage, setChatMessage] = useState('')
const [promptHistory, setPromptHistory] = useState([])
const [historyIndex, setHistoryIndex] = useState(-1)
const [moreMenuOpen, setMoreMenuOpen] = useState(false)
- // Refs
const inputRef = useRef(null)
const timeoutRef = useRef(null)
const streamReaderRef = useRef | null>(null)
const preventZoomRef = usePreventZoom()
- // File upload hook
const {
chatFiles,
uploadErrors,
@@ -256,6 +285,38 @@ export function Chat() {
handleDrop,
} = useChatFileUpload()
+ const filePreviewUrls = useRef