Skip to content

Commit 229bfc9

Browse files
committed
Add support for optionally validating UI messages
1 parent 8dd0dc5 commit 229bfc9

File tree

2 files changed

+81
-1
lines changed
  • packages/trigger-sdk/src/v3
  • references/ai-chat/src/trigger

2 files changed

+81
-1
lines changed

packages/trigger-sdk/src/v3/ai.ts

Lines changed: 70 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1919,6 +1919,20 @@ export type ChatStartEvent<TClientData = unknown> = {
19191919
writer: ChatWriter;
19201920
};
19211921

1922+
/**
1923+
* Event passed to the `onValidateMessages` callback.
1924+
*/
1925+
export type ValidateMessagesEvent<TUIM extends UIMessage = UIMessage> = {
1926+
/** The incoming UI messages for this turn (after cleanup of aborted tool parts). */
1927+
messages: TUIM[];
1928+
/** The unique identifier for the chat session. */
1929+
chatId: string;
1930+
/** The turn number (0-indexed). */
1931+
turn: number;
1932+
/** The trigger type for this turn. */
1933+
trigger: "submit-message" | "regenerate-message" | "preload" | "close";
1934+
};
1935+
19221936
/**
19231937
* Event passed to the `onTurnStart` callback.
19241938
*/
@@ -2169,6 +2183,35 @@ export type ChatAgentOptions<
21692183
*/
21702184
onChatStart?: (event: ChatStartEvent<inferSchemaOut<TClientDataSchema>>) => Promise<void> | void;
21712185

2186+
/**
2187+
* Validate or transform incoming UI messages before they are converted to model
2188+
* messages and accumulated. Fires once per turn with the raw `UIMessage[]` from
2189+
* the wire payload (after cleanup of aborted tool parts).
2190+
*
2191+
* Return the validated messages array. Throw to abort the turn with an error.
2192+
*
2193+
* This is the right place to call the AI SDK's `validateUIMessages` to catch
2194+
* malformed messages from storage or untrusted input before they reach the model.
2195+
*
2196+
* @example
2197+
* ```ts
2198+
* import { validateUIMessages } from "ai";
2199+
*
2200+
* chat.agent({
2201+
* id: "my-chat",
2202+
* onValidateMessages: async ({ messages }) => {
2203+
* return validateUIMessages({ messages, tools: chatTools });
2204+
* },
2205+
* run: async ({ messages }) => {
2206+
* return streamText({ model, messages, tools: chatTools });
2207+
* },
2208+
* });
2209+
* ```
2210+
*/
2211+
onValidateMessages?: (
2212+
event: ValidateMessagesEvent<TUIMessage>
2213+
) => TUIMessage[] | Promise<TUIMessage[]>;
2214+
21722215
/**
21732216
* Called at the start of every turn, after message accumulation and `onChatStart` (turn 0),
21742217
* but before the `run` function executes.
@@ -2550,6 +2593,7 @@ function chatAgent<
25502593
clientDataSchema,
25512594
onPreload,
25522595
onChatStart,
2596+
onValidateMessages,
25532597
onTurnStart,
25542598
onBeforeTurnComplete,
25552599
onCompacted,
@@ -2908,10 +2952,35 @@ function chatAgent<
29082952
// useChat state may still contain assistant messages with tool parts
29092953
// in partial/input-available state. These cause API errors (e.g.
29102954
// Anthropic requires every tool_use to have a matching tool_result).
2911-
const cleanedUIMessages = uiMessages.map((msg) =>
2955+
let cleanedUIMessages = uiMessages.map((msg) =>
29122956
msg.role === "assistant" ? cleanupAbortedParts(msg) : msg
29132957
);
29142958

2959+
// Validate/transform UIMessages before conversion — catches malformed
2960+
// messages from storage or untrusted input before they reach the model.
2961+
if (onValidateMessages) {
2962+
cleanedUIMessages = await tracer.startActiveSpan(
2963+
"onValidateMessages()",
2964+
async () => {
2965+
return onValidateMessages({
2966+
messages: cleanedUIMessages as TUIMessage[],
2967+
chatId: currentWirePayload.chatId,
2968+
turn,
2969+
trigger: currentWirePayload.trigger,
2970+
});
2971+
},
2972+
{
2973+
attributes: {
2974+
[SemanticInternalAttributes.STYLE_ICON]: "task-hook-onStart",
2975+
[SemanticInternalAttributes.COLLAPSED]: true,
2976+
"chat.id": currentWirePayload.chatId,
2977+
"chat.turn": turn + 1,
2978+
"chat.messages.count": cleanedUIMessages.length,
2979+
},
2980+
}
2981+
);
2982+
}
2983+
29152984
// Convert the incoming UIMessages to model messages and update the accumulator.
29162985
// Turn 1: full history from the frontend → replaces the accumulator.
29172986
// Turn 2+: only the new message(s) → appended to the accumulator.

references/ai-chat/src/trigger/chat.ts

Lines changed: 11 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -7,6 +7,7 @@ import {
77
stepCountIs,
88
generateId,
99
createProviderRegistry,
10+
validateUIMessages,
1011
} from "ai";
1112
import type { LanguageModel, LanguageModelUsage, UIMessage } from "ai";
1213
import { openai } from "@ai-sdk/openai";
@@ -219,6 +220,16 @@ export const aiChat = chat
219220
},
220221
// #endregion
221222

223+
// #region onValidateMessages — validate UIMessages before model conversion
224+
onValidateMessages: async ({ messages, turn }) => {
225+
logger.info("Validating UI messages", {
226+
turn,
227+
count: messages.length,
228+
});
229+
return validateUIMessages({ messages, tools: chatTools });
230+
},
231+
// #endregion
232+
222233
// #region prepareMessages — runs before every LLM call
223234
prepareMessages: ({ messages, reason }) => {
224235
// Add Anthropic cache breaks to the last message for prompt caching.

0 commit comments

Comments
 (0)