Skip to content

Commit 925af1c

Browse files
author
erangi-ar
committed
production inference display logic change
1 parent 6830670 commit 925af1c

File tree

1 file changed

+108
-87
lines changed

1 file changed

+108
-87
lines changed
Lines changed: 108 additions & 87 deletions
Original file line numberDiff line numberDiff line change
@@ -1,9 +1,8 @@
1-
import { FC, useState, useRef, useEffect, useMemo } from 'react';
1+
import { FC, useState, useRef, useEffect } from 'react';
22
import { useTranslation } from 'react-i18next';
3-
import { Button, FormTextarea } from 'components';
3+
import { Button, FormTextarea, Section } from 'components';
4+
import { productionInference, ProductionInferenceRequest } from 'services/inference';
45
import { useToast } from 'hooks/useToast';
5-
import { useStreamingResponse } from 'hooks/useStreamingResponse';
6-
import MessageContent from 'components/MessageContent';
76
import './TestProductionLLM.scss';
87

98
interface Message {
@@ -16,115 +15,139 @@ interface Message {
1615
const TestProductionLLM: FC = () => {
1716
const { t } = useTranslation();
1817
const toast = useToast();
19-
const [inputMessage, setInputMessage] = useState<string>('');
18+
const [message, setMessage] = useState<string>('');
2019
const [messages, setMessages] = useState<Message[]>([]);
2120
const [isLoading, setIsLoading] = useState<boolean>(false);
2221
const messagesEndRef = useRef<HTMLDivElement>(null);
2322

24-
// Generate a unique channel ID for this session
25-
const channelId = useMemo(() => `channel-${Math.random().toString(36).substring(2, 15)}`, []);
26-
const { startStreaming, stopStreaming, isStreaming } = useStreamingResponse(channelId);
23+
const scrollToBottom = () => {
24+
messagesEndRef.current?.scrollIntoView({ behavior: 'smooth' });
25+
};
2726

28-
// Auto-scroll to bottom
2927
useEffect(() => {
30-
messagesEndRef.current?.scrollIntoView({ behavior: 'smooth' });
28+
scrollToBottom();
3129
}, [messages]);
3230

3331
const handleSendMessage = async () => {
34-
if (!inputMessage.trim()) {
32+
if (!message.trim()) {
3533
toast.open({
3634
type: 'warning',
37-
title: 'Warning',
38-
message: 'Please enter a message',
35+
title: t('warningTitle'),
36+
message: t('emptyMessageWarning'),
3937
});
4038
return;
4139
}
4240

43-
const userMessageText = inputMessage.trim();
44-
45-
// Add user message
4641
const userMessage: Message = {
4742
id: `user-${Date.now()}`,
48-
content: userMessageText,
43+
content: message.trim(),
4944
isUser: true,
5045
timestamp: new Date().toISOString(),
5146
};
5247

48+
// Add user message to chat
5349
setMessages(prev => [...prev, userMessage]);
54-
setInputMessage('');
50+
setMessage('');
5551
setIsLoading(true);
5652

57-
// Create bot message ID
58-
const botMessageId = `bot-${Date.now()}`;
53+
try {
54+
// Hardcoded values as requested
55+
const request: ProductionInferenceRequest = {
56+
chatId: 'test-chat-001',
57+
message: userMessage.content,
58+
authorId: 'test-author-001',
59+
conversationHistory: messages.map(msg => ({
60+
authorRole: msg.isUser ? 'user' : 'bot',
61+
message: msg.content,
62+
timestamp: msg.timestamp,
63+
})),
64+
url: 'https://test-url.example.com',
65+
};
66+
67+
let response;
68+
let attemptCount = 0;
69+
const maxAttempts = 2;
70+
71+
// Retry logic
72+
while (attemptCount < maxAttempts) {
73+
try {
74+
attemptCount++;
75+
console.log(`Production Inference Attempt ${attemptCount}/${maxAttempts}`);
76+
response = await productionInference(request);
77+
78+
// If we get a successful response, break out of retry loop
79+
if (!response.status || response.status < 400) {
80+
break;
81+
}
82+
83+
// If first attempt failed with error status, retry once more
84+
if (attemptCount < maxAttempts && response.status >= 400) {
85+
console.log('Retrying due to error status...');
86+
continue;
87+
}
88+
} catch (err) {
89+
// If first attempt threw an error, retry once more
90+
if (attemptCount < maxAttempts) {
91+
console.log('Retrying due to exception...');
92+
continue;
93+
}
94+
throw err; // Re-throw on final attempt
95+
}
96+
}
5997

60-
// Prepare conversation history (exclude the current user message)
61-
const conversationHistory = messages.map(msg => ({
62-
authorRole: msg.isUser ? 'user' : 'bot',
63-
message: msg.content,
64-
timestamp: msg.timestamp,
65-
}));
98+
console.log('Production Inference Response:', response);
6699

67-
const streamingOptions = {
68-
authorId: 'test-user-456',
69-
conversationHistory,
70-
url: 'opensearch-dashboard-test',
71-
};
100+
// Create bot response message
101+
let botContent = '';
102+
let botMessageType: 'success' | 'error' = 'success';
72103

73-
// Callbacks for streaming
74-
const onToken = (token: string) => {
75-
console.log('[Component] Received token:', token);
76-
77-
setMessages(prev => {
78-
// Find the bot message
79-
const botMsgIndex = prev.findIndex(msg => msg.id === botMessageId);
104+
if (response.status && response.status >= 400) {
105+
// Error response
106+
botContent = response.content || 'An error occurred while processing your request.';
107+
botMessageType = 'error';
108+
} else {
109+
// Success response
110+
botContent = response?.response?.content || 'Response received successfully.';
80111

81-
if (botMsgIndex === -1) {
82-
// First token - add the bot message
83-
console.log('[Component] Adding bot message with first token');
84-
setIsLoading(false);
85-
return [
86-
...prev,
87-
{
88-
id: botMessageId,
89-
content: token,
90-
isUser: false,
91-
timestamp: new Date().toISOString(),
92-
}
93-
];
94-
} else {
95-
// Append token to existing message
96-
console.log('[Component] Appending token to existing message');
97-
const updated = [...prev];
98-
updated[botMsgIndex] = {
99-
...updated[botMsgIndex],
100-
content: updated[botMsgIndex].content + token,
101-
};
102-
return updated;
112+
if (response.questionOutOfLlmScope) {
113+
botContent += ' (Note: This question appears to be outside the LLM scope)';
103114
}
104-
});
105-
};
115+
}
106116

107-
const onComplete = () => {
108-
console.log('[Component] Stream completed');
109-
setIsLoading(false);
110-
};
117+
const botMessage: Message = {
118+
id: `bot-${Date.now()}`,
119+
content: botContent,
120+
isUser: false,
121+
timestamp: new Date().toISOString(),
122+
};
111123

112-
const onError = (error: string) => {
113-
console.error('[Component] Stream error:', error);
114-
setIsLoading(false);
124+
setMessages(prev => [...prev, botMessage]);
125+
126+
// Show toast notification
127+
// toast.open({
128+
// type: botMessageType,
129+
// title: t('errorOccurred'),
130+
// message: t('errorMessage'),
131+
// });
132+
133+
} catch (error) {
134+
console.error('Error sending message:', error);
115135

136+
const errorMessage: Message = {
137+
id: `error-${Date.now()}`,
138+
content: 'Failed to send message. Please check your connection and try again.',
139+
isUser: false,
140+
timestamp: new Date().toISOString(),
141+
};
142+
143+
setMessages(prev => [...prev, errorMessage]);
144+
116145
toast.open({
117146
type: 'error',
118-
title: 'Streaming Error',
119-
message: error,
147+
title: 'Connection Error',
148+
message: 'Unable to connect to the production LLM service.',
120149
});
121-
};
122-
123-
// Start streaming
124-
try {
125-
await startStreaming(userMessageText, streamingOptions, onToken, onComplete, onError);
126-
} catch (error) {
127-
console.error('[Component] Failed to start streaming:', error);
150+
} finally {
128151
setIsLoading(false);
129152
}
130153
};
@@ -138,7 +161,6 @@ const TestProductionLLM: FC = () => {
138161

139162
const clearChat = () => {
140163
setMessages([]);
141-
stopStreaming();
142164
toast.open({
143165
type: 'info',
144166
title: 'Chat Cleared',
@@ -173,8 +195,7 @@ const TestProductionLLM: FC = () => {
173195
}`}
174196
>
175197
<div className="test-production-llm__message-content">
176-
<MessageContent content={msg.content} />
177-
{/* {msg.content} */}
198+
{msg.content}
178199
</div>
179200
<div className="test-production-llm__message-timestamp">
180201
{new Date(msg.timestamp).toLocaleTimeString()}
@@ -201,20 +222,20 @@ const TestProductionLLM: FC = () => {
201222
<FormTextarea
202223
label="Message"
203224
name="message"
204-
value={inputMessage}
205-
onChange={(e) => setInputMessage(e.target.value)}
225+
value={message}
226+
onChange={(e) => setMessage(e.target.value)}
206227
onKeyDown={handleKeyPress}
207228
placeholder="Type your message here... (Press Enter to send, Shift+Enter for new line)"
208229
hideLabel
209230
maxRows={4}
210-
disabled={isLoading || isStreaming}
231+
disabled={isLoading}
211232
/>
212233
<Button
213234
onClick={handleSendMessage}
214-
disabled={isLoading || isStreaming || !inputMessage.trim()}
235+
disabled={isLoading || !message.trim()}
215236
className="test-production-llm__send-button"
216237
>
217-
{isLoading || isStreaming ? 'Sending...' : 'Send'}
238+
{isLoading ? 'Sending...' : 'Send'}
218239
</Button>
219240
</div>
220241
</div>
@@ -223,4 +244,4 @@ const TestProductionLLM: FC = () => {
223244
);
224245
};
225246

226-
export default TestProductionLLM;
247+
export default TestProductionLLM;

0 commit comments

Comments
 (0)