Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
5 changes: 4 additions & 1 deletion package.json
Original file line number Diff line number Diff line change
Expand Up @@ -72,7 +72,10 @@
"sharp",
"simple-git-hooks",
"workerd"
]
],
"overrides": {
"@modelcontextprotocol/sdk": "^1.25.3"
}
},
"devDependencies": {
"@types/json-schema": "^7.0.15"
Expand Down
5 changes: 2 additions & 3 deletions packages/mcp-cloudflare/package.json
Original file line number Diff line number Diff line change
Expand Up @@ -4,9 +4,7 @@
"private": true,
"type": "module",
"license": "FSL-1.1-ALv2",
"files": [
"./dist/*"
],
"files": ["./dist/*"],
"exports": {
".": {
"types": "./dist/index.ts",
Expand Down Expand Up @@ -45,6 +43,7 @@
"wrangler": "^4.45.0"
},
"dependencies": {
"@ai-sdk/mcp": "catalog:",
"@ai-sdk/openai": "catalog:",
"@ai-sdk/react": "catalog:",
"@cloudflare/workers-oauth-provider": "catalog:",
Expand Down
124 changes: 101 additions & 23 deletions packages/mcp-cloudflare/src/client/components/chat/chat-message.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -127,6 +127,52 @@ const ToolPart = memo(function ToolPart({
);
});

// Helper to check if a part is an AI SDK 6 tool part (type starts with "tool-")
const isToolPart = (part: { type: string }): part is {
type: `tool-${string}`;
} & ChatToolInvocation => {
return part.type.startsWith("tool-") && part.type !== "tool-invocation";
};

// Helper to check if a part is a legacy tool-invocation part (AI SDK 4/5 format)
// Legacy format: { type: "tool-invocation", toolInvocation: ChatToolInvocation }
const isLegacyToolInvocation = (part: { type: string }): part is {
type: "tool-invocation";
toolInvocation: ChatToolInvocation;
} => {
return (
part.type === "tool-invocation" &&
"toolInvocation" in part &&
typeof (part as any).toolInvocation === "object"
);
};

// Helper to convert tool output to proper content format
const convertToolOutput = (
output: unknown,
): { content: Array<{ type: "text"; text: string }> } | undefined => {
if (output === undefined || output === null) {
return undefined;
}

// If output is already in MCP format with content array
if (
typeof output === "object" &&
"content" in (output as object) &&
Array.isArray((output as { content: unknown }).content)
) {
return output as { content: Array<{ type: "text"; text: string }> };
}

// If output is a string, wrap it
if (typeof output === "string") {
return { content: [{ type: "text", text: output }] };
}

// For other objects, JSON stringify
return { content: [{ type: "text", text: JSON.stringify(output, null, 2) }] };
};

// Main component for rendering individual message parts
const MessagePart = memo(function MessagePart({
part,
Expand All @@ -137,30 +183,62 @@ const MessagePart = memo(function MessagePart({
messageData,
onSlashCommand,
}: MessagePartProps) {
switch (part.type) {
case "text":
return (
<TextPart
text={part.text}
role={messageRole}
messageId={messageId}
isStreaming={isStreaming}
messageData={messageData}
onSlashCommand={onSlashCommand}
/>
);
case "tool-invocation":
return (
<ToolPart
toolInvocation={part.toolInvocation as ChatToolInvocation}
messageId={messageId}
partIndex={partIndex}
/>
);
default:
// Fallback for unknown part types
return null;
// Handle text parts
if (part.type === "text") {
return (
<TextPart
text={part.text}
role={messageRole}
messageId={messageId}
isStreaming={isStreaming}
messageData={messageData}
onSlashCommand={onSlashCommand}
/>
);
}

// Handle legacy tool-invocation parts (AI SDK 4/5 format from persisted messages)
// Legacy format: { type: "tool-invocation", toolInvocation: {...} }
if (isLegacyToolInvocation(part)) {
return (
<ToolPart
toolInvocation={part.toolInvocation}
messageId={messageId}
partIndex={partIndex}
/>
);
}

// Handle tool parts (AI SDK 6 format: type is "tool-${toolName}")
if (isToolPart(part)) {
// Map AI SDK 6 state to our ChatToolInvocation state
const partState = (part as any).state;
const mappedState: "partial-call" | "call" | "result" =
partState === "result"
? "result"
: partState === "partial-call"
? "partial-call"
: "call";

// Convert AI SDK 6 tool part to our ChatToolInvocation format
const toolInvocation: ChatToolInvocation = {
toolCallId: part.toolCallId,
toolName: part.type.replace(/^tool-/, ""),
args: (part as any).input ?? {},
state: mappedState,
result: convertToolOutput((part as any).output),
};
return (
<ToolPart
toolInvocation={toolInvocation}
messageId={messageId}
partIndex={partIndex}
/>
);
}

// Fallback for unknown part types
return null;
});

// Export the memoized components
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -3,15 +3,15 @@ import { Loader2, AlertCircle } from "lucide-react";
import { Button } from "../ui/button";
import { MessagePart } from ".";
import { ToolActions } from "../ui/tool-actions";
import type { Message, ProcessedMessagePart, ChatMessagesProps } from "./types";
import type { ProcessedMessagePart, ChatMessagesProps } from "./types";
import { isAuthError, getErrorMessage } from "../../utils/chat-error-handler";
import { useAuth } from "../../contexts/auth-context";

// Cache for stable part objects to avoid recreating them
const partCache = new WeakMap<Message, { type: "text"; text: string }>();
// Import UIMessage from our types (re-exported as Message for compatibility)
import type { UIMessage } from "@ai-sdk/react";

function processMessages(
messages: Message[],
messages: UIMessage[],
isChatLoading: boolean,
isLocalStreaming?: boolean,
isMessageStreaming?: (messageId: string) => boolean,
Expand All @@ -28,42 +28,25 @@ function processMessages(
if (message.parts && message.parts.length > 0) {
const lastPartIndex = message.parts.length - 1;

message.parts.forEach((part, partIndex) => {
const isLastPartOfLastMessage =
isLastMessage && partIndex === lastPartIndex;

allParts.push({
part,
messageId: message.id,
messageRole: message.role,
partIndex,
// Stream if it's AI response OR local streaming simulation
isStreaming:
(isLastPartOfLastMessage &&
isChatLoading &&
part.type === "text") ||
(part.type === "text" && !!isMessageStreaming?.(message.id)),
});
});
} else if (message.content) {
// Use cached part object to maintain stable references
let part = partCache.get(message);
if (!part) {
part = { type: "text", text: message.content };
partCache.set(message, part);
}

allParts.push({
part,
messageId: message.id,
messageRole: message.role,
partIndex: 0,
// Stream if it's AI response OR local streaming simulation
isStreaming:
(isLastMessage && isChatLoading) ||
isMessageStreaming?.(message.id) ||
false,
});
message.parts.forEach(
(part: UIMessage["parts"][number], partIndex: number) => {
const isLastPartOfLastMessage =
isLastMessage && partIndex === lastPartIndex;

allParts.push({
part,
messageId: message.id,
messageRole: message.role,
partIndex,
// Stream if it's AI response OR local streaming simulation
isStreaming:
(isLastPartOfLastMessage &&
isChatLoading &&
part.type === "text") ||
(part.type === "text" && !!isMessageStreaming?.(message.id)),
});
},
);
}
});

Expand Down Expand Up @@ -132,7 +115,7 @@ export function ChatMessages({
const originalMessage = messages.find(
(m) => m.id === item.messageId,
);
const messageData = originalMessage?.data as any;
const messageData = originalMessage?.metadata as any;
const hasToolActions =
messageData?.type === "tools-list" &&
messageData?.toolsDetailed &&
Expand All @@ -146,7 +129,7 @@ export function ChatMessages({
messageRole={item.messageRole}
partIndex={item.partIndex}
isStreaming={item.isStreaming}
messageData={originalMessage?.data}
messageData={originalMessage?.metadata}
onSlashCommand={onSlashCommand}
/>
{/* Show tool actions list for tools-list messages */}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@
import ScrollToBottom from "react-scroll-to-bottom";
import { Button } from "../ui/button";
import { ChatInput, ChatMessages } from ".";
import type { Message } from "ai/react";
import type { UIMessage } from "@ai-sdk/react";

// Constant empty function to avoid creating new instances on every render
const EMPTY_FUNCTION = () => {};
Expand All @@ -28,7 +28,7 @@ const SAMPLE_PROMPTS = [
] as const;

interface ChatUIProps {
messages: Message[];
messages: UIMessage[];
input: string;
error?: Error | null;
isChatLoading: boolean;
Expand Down
Loading
Loading