Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
5 changes: 5 additions & 0 deletions .changeset/gentle-horses-sniff.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,5 @@
---
'@ai-sdk/langchain': patch
---

fix(langchain): ensure message id consistency
32 changes: 32 additions & 0 deletions examples/next-langchain/app/api/reasoning/route.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,32 @@
import { toBaseMessages, toUIMessageStream } from '@ai-sdk/langchain';
import { ChatOpenAI } from '@langchain/openai';
import { createUIMessageStreamResponse, UIMessage } from 'ai';
import { NextResponse } from 'next/server';

export const maxDuration = 30;

/**
* this configuration streams reasoning summaries before the final response (thus triggering the error)
*/
const model = new ChatOpenAI({
model: 'gpt-5',
useResponsesApi: true,
reasoning: { effort: 'medium', summary: 'concise' },
});

export async function POST(req: Request) {
try {
const { messages }: { messages: UIMessage[] } = await req.json();

const langchainMessages = await toBaseMessages(messages);
const stream = await model.stream(langchainMessages as never);

return createUIMessageStreamResponse({
stream: toUIMessageStream(stream),
});
} catch (error) {
const message =
error instanceof Error ? error.message : 'An unknown error occurred';
return NextResponse.json({ error: message }, { status: 500 });
}
}
42 changes: 42 additions & 0 deletions examples/next-langchain/app/reasoning/page.tsx
Original file line number Diff line number Diff line change
@@ -0,0 +1,42 @@
'use client';

import { useChat } from '@ai-sdk/react';
import { DefaultChatTransport } from 'ai';
import { useMemo } from 'react';
import { ChatContainer } from '../../components/chat-container';
import { type CustomDataMessage } from '../types';

export default function ReasoningChat() {
const transport = useMemo(
() => new DefaultChatTransport({ api: '/api/reasoning' }),
[],
);

const { messages, sendMessage, status, error } = useChat<CustomDataMessage>({
transport,
});

return (
<ChatContainer
title="Reasoning Example"
description={
<>
Uses <code>ChatOpenAI</code> with OpenAI Responses API and reasoning
enabled. This streams reasoning summaries before the final response,
demonstrating the <code>@ai-sdk/langchain</code> adapter&apos;s
support for reasoning content.
</>
}
messages={messages}
onSend={text => sendMessage({ text })}
status={status}
error={error}
placeholder="Ask a question that requires reasoning..."
suggestions={[
'How many rs are in strawberry?',
'What is 15% of 80?',
'If I have 3 apples and give away half, how many do I have?',
]}
/>
);
}
96 changes: 96 additions & 0 deletions packages/langchain/src/utils.test.ts
Original file line number Diff line number Diff line change
Expand Up @@ -652,6 +652,102 @@ describe('processModelChunk', () => {
expect(state.reasoningStarted).toBe(false);
expect(state.textStarted).toBe(true);
});

it('should maintain consistent IDs when chunk.id changes during reasoning-to-text transition', () => {
// This test reproduces the bug where the message ID changes between chunks,
// which would cause the client to fail to look up activeReasoningParts or activeTextParts
// because the ID used for *-start doesn't match the ID used for *-delta and *-end.
//
const state = {
started: false,
messageId: 'default',
reasoningStarted: false,
textStarted: false,
reasoningMessageId: null as string | null,
textMessageId: null as string | null,
};
const chunks: unknown[] = [];
const controller = createMockController(chunks);

// First reasoning chunk arrives with id "run-abc123"
const reasoningChunk1 = new AIMessageChunk({
content: '',
id: 'run-abc123',
});
Object.defineProperty(reasoningChunk1, 'contentBlocks', {
get: () => [{ type: 'reasoning', reasoning: 'Let me think...' }],
});
processModelChunk(reasoningChunk1, state, controller);

const reasoningChunk2 = new AIMessageChunk({
content: '',
id: 'msg-xyz789',
});
Object.defineProperty(reasoningChunk2, 'contentBlocks', {
get: () => [{ type: 'reasoning', reasoning: ' about this.' }],
});
processModelChunk(reasoningChunk2, state, controller);

// Text chunk arrives with the new id "msg-xyz789"
const textChunk = new AIMessageChunk({
content: 'Here is my answer.',
id: 'msg-xyz789',
});
processModelChunk(textChunk, state, controller);

// Verify all reasoning chunks use the same id that was used for reasoning-start
// and all text chunks use the same id that was used for text-start
expect(chunks).toEqual([
{ type: 'reasoning-start', id: 'run-abc123' },
{ type: 'reasoning-delta', delta: 'Let me think...', id: 'run-abc123' },
{ type: 'reasoning-delta', delta: ' about this.', id: 'run-abc123' },
{ type: 'reasoning-end', id: 'run-abc123' },
{ type: 'text-start', id: 'msg-xyz789' },
{ type: 'text-delta', delta: 'Here is my answer.', id: 'msg-xyz789' },
]);

expect(state.reasoningMessageId).toBe('run-abc123');
expect(state.textMessageId).toBe('msg-xyz789');
expect(state.messageId).toBe('msg-xyz789');
});

it('should maintain consistent text IDs when chunk.id changes during text streaming', () => {
// Similar bug can occur with text-only streaming if the ID changes between chunks

const state = {
started: false,
messageId: 'default',
reasoningStarted: false,
textStarted: false,
reasoningMessageId: null as string | null,
textMessageId: null as string | null,
};
const chunks: unknown[] = [];
const controller = createMockController(chunks);

// First text chunk arrives with id "run-abc123"
const textChunk1 = new AIMessageChunk({
content: 'Hello',
id: 'run-abc123',
});
processModelChunk(textChunk1, state, controller);

const textChunk2 = new AIMessageChunk({
content: ' world!',
id: 'msg-xyz789',
});
processModelChunk(textChunk2, state, controller);

// Verify all text chunks use the same id that was used for text-start
expect(chunks).toEqual([
{ type: 'text-start', id: 'run-abc123' },
{ type: 'text-delta', delta: 'Hello', id: 'run-abc123' },
{ type: 'text-delta', delta: ' world!', id: 'run-abc123' },
]);

expect(state.textMessageId).toBe('run-abc123');
expect(state.messageId).toBe('msg-xyz789');
});
});

describe('isPlainMessageObject', () => {
Expand Down
17 changes: 14 additions & 3 deletions packages/langchain/src/utils.ts
Original file line number Diff line number Diff line change
Expand Up @@ -375,6 +375,10 @@ export function processModelChunk(
messageId: string;
reasoningStarted?: boolean;
textStarted?: boolean;
/** Track the ID used for reasoning-start to ensure reasoning-end uses the same ID */
reasoningMessageId?: string | null;
/** Track the ID used for text-start to ensure text-end uses the same ID */
textMessageId?: string | null;
emittedImages?: Set<string>;
},
controller: ReadableStreamDefaultController<UIMessageChunk>,
Expand Down Expand Up @@ -432,14 +436,16 @@ export function processModelChunk(
extractReasoningFromValuesMessage(chunk);
if (reasoning) {
if (!state.reasoningStarted) {
// Track the ID used for reasoning-start to ensure subsequent chunks use the same ID
state.reasoningMessageId = state.messageId;
controller.enqueue({ type: 'reasoning-start', id: state.messageId });
state.reasoningStarted = true;
state.started = true;
}
controller.enqueue({
type: 'reasoning-delta',
delta: reasoning,
id: state.messageId,
id: state.reasoningMessageId ?? state.messageId,
});
}

Expand Down Expand Up @@ -467,19 +473,24 @@ export function processModelChunk(
* If reasoning was streamed before text, close reasoning first
*/
if (state.reasoningStarted && !state.textStarted) {
controller.enqueue({ type: 'reasoning-end', id: state.messageId });
controller.enqueue({
type: 'reasoning-end',
id: state.reasoningMessageId ?? state.messageId,
});
state.reasoningStarted = false;
}

if (!state.textStarted) {
// Track the ID used for text-start to ensure subsequent chunks use the same ID
state.textMessageId = state.messageId;
controller.enqueue({ type: 'text-start', id: state.messageId });
state.textStarted = true;
state.started = true;
}
controller.enqueue({
type: 'text-delta',
delta: text,
id: state.messageId,
id: state.textMessageId ?? state.messageId,
});
}
}
Expand Down
Loading