refactor: replace SSE streaming with simple JSON chat endpoint
Replace Server-Sent Events streaming implementation with straightforward JSON request/response pattern to simplify debugging and reduce complexity. Changes: - server.js: Convert /api/chat/stream to /api/chat with JSON response - src/lib/api.ts: Remove SSE parsing logic, add ChatResponse interface - src/hooks/useChat.ts: Handle JSON response instead of stream chunks This change makes the request flow easier to debug and troubleshoot while maintaining the same conversation functionality.
This commit is contained in:
parent
a4f9df6906
commit
0aaa393a6d
103
server.js
103
server.js
|
|
@ -3,103 +3,50 @@ const path = require('path');
|
|||
const { GoogleAuth } = require('google-auth-library');
|
||||
|
||||
const app = express();
|
||||
const port = process.env.PORT || 8080;
|
||||
const BACKEND_URL = process.env.BACKEND_URL;
|
||||
app.use(express.json({ limit: '1mb' }));
|
||||
|
||||
// Validate required environment variable at startup
|
||||
const BACKEND_URL = process.env.BACKEND_URL;
|
||||
if (!BACKEND_URL) {
|
||||
console.error('ERROR: BACKEND_URL environment variable is required');
|
||||
console.error('FATAL: BACKEND_URL env var is required');
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
// Initialize Google Auth client (auto-detects credentials on Cloud Run)
|
||||
const auth = new GoogleAuth();
|
||||
|
||||
app.use(express.json());
|
||||
|
||||
// Serve static React build
|
||||
app.use(express.static(path.join(__dirname, 'dist')));
|
||||
|
||||
// Proxy endpoint - generates ID token and forwards to backend
|
||||
app.post('/api/chat/stream', async (req, res) => {
|
||||
console.log('Proxy request received for /api/chat/stream');
|
||||
app.get('/health', (_req, res) => {
|
||||
res.status(200).json({ status: 'ok' });
|
||||
});
|
||||
|
||||
app.post('/api/chat', async (req, res) => {
|
||||
try {
|
||||
// Get ID token client with backend URL as audience
|
||||
const client = await auth.getIdTokenClient(BACKEND_URL);
|
||||
const headers = await client.getRequestHeaders();
|
||||
// Create an ID-token authenticated client for the backend (audience = BACKEND_URL)
|
||||
const idTokenClient = await auth.getIdTokenClient(BACKEND_URL);
|
||||
|
||||
console.log('Generated ID token, forwarding to backend...');
|
||||
|
||||
// Forward request to backend with auth
|
||||
const backendResponse = await fetch(`${BACKEND_URL}/chat/stream`, {
|
||||
// Forward request to backend
|
||||
const backendResp = await idTokenClient.request({
|
||||
url: `${BACKEND_URL}/chat`,
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
...headers,
|
||||
},
|
||||
body: JSON.stringify(req.body),
|
||||
data: req.body,
|
||||
});
|
||||
|
||||
// Check if backend returned an error
|
||||
if (!backendResponse.ok) {
|
||||
const errorText = await backendResponse.text();
|
||||
console.error(`Backend error: ${backendResponse.status} - ${errorText}`);
|
||||
res.status(backendResponse.status).json({
|
||||
error: 'Backend error',
|
||||
status: backendResponse.status,
|
||||
message: errorText,
|
||||
});
|
||||
return;
|
||||
}
|
||||
|
||||
// Set SSE headers and flush immediately
|
||||
res.setHeader('Content-Type', 'text/event-stream');
|
||||
res.setHeader('Cache-Control', 'no-cache');
|
||||
res.setHeader('Connection', 'keep-alive');
|
||||
res.flushHeaders?.();
|
||||
|
||||
// Pipe raw SSE stream from backend to browser (no parsing)
|
||||
const reader = backendResponse.body.getReader();
|
||||
|
||||
// Handle client disconnect
|
||||
req.on('close', () => {
|
||||
console.log('Client disconnected');
|
||||
reader.cancel();
|
||||
});
|
||||
|
||||
// Stream loop
|
||||
while (true) {
|
||||
const { done, value } = await reader.read();
|
||||
if (done) {
|
||||
console.log('Stream complete');
|
||||
res.end();
|
||||
return;
|
||||
}
|
||||
res.write(value);
|
||||
}
|
||||
} catch (error) {
|
||||
console.error('Proxy error:', error);
|
||||
if (!res.headersSent) {
|
||||
res.status(500).json({ error: 'Proxy error', message: error.message });
|
||||
} else {
|
||||
res.end();
|
||||
}
|
||||
res.status(backendResp.status).json(backendResp.data);
|
||||
} catch (err) {
|
||||
console.error('Proxy error:', err.message);
|
||||
const status = err.response?.status || 500;
|
||||
const message = err.response?.data || { error: err.message };
|
||||
res.status(status).json(message);
|
||||
}
|
||||
});
|
||||
|
||||
// Health check endpoint for Cloud Run
|
||||
app.get('/health', (req, res) => {
|
||||
res.status(200).json({ status: 'healthy' });
|
||||
});
|
||||
// Serve static assets
|
||||
app.use(express.static(path.join(__dirname, 'dist')));
|
||||
|
||||
// SPA fallback - all other routes serve index.html
|
||||
app.get('*', (req, res) => {
|
||||
// SPA fallback
|
||||
app.get('*', (_req, res) => {
|
||||
res.sendFile(path.join(__dirname, 'dist', 'index.html'));
|
||||
});
|
||||
|
||||
// Listen on 0.0.0.0 for Cloud Run
|
||||
const port = parseInt(process.env.PORT || '8080', 10);
|
||||
app.listen(port, '0.0.0.0', () => {
|
||||
console.log(`Server listening on ${port}`);
|
||||
console.log(`Backend URL: ${BACKEND_URL}`);
|
||||
console.log(`Frontend proxy listening on port ${port}`);
|
||||
});
|
||||
|
|
|
|||
|
|
@ -24,47 +24,21 @@ export const useChat = (mode: ChatMode) => {
|
|||
setIsLoading(true)
|
||||
|
||||
try {
|
||||
// Create placeholder assistant message
|
||||
const assistantMessageId = (Date.now() + 1).toString()
|
||||
let assistantContent = ''
|
||||
// Call the chat API and get the response
|
||||
const response = await apiClient.chat(question, mode)
|
||||
|
||||
setMessages((prev) => [
|
||||
...prev,
|
||||
{
|
||||
id: assistantMessageId,
|
||||
role: 'assistant',
|
||||
content: '',
|
||||
},
|
||||
])
|
||||
|
||||
// Process SSE stream
|
||||
const streamGenerator = apiClient.chatStream(question, mode)
|
||||
|
||||
for await (const chunk of streamGenerator) {
|
||||
if (chunk.type === 'chunk') {
|
||||
// Append chunk content
|
||||
assistantContent += chunk.data
|
||||
|
||||
// Update assistant message
|
||||
setMessages((prev) =>
|
||||
prev.map((msg) =>
|
||||
msg.id === assistantMessageId
|
||||
? { ...msg, content: assistantContent }
|
||||
: msg
|
||||
)
|
||||
)
|
||||
} else if (chunk.type === 'error') {
|
||||
toast.error(chunk.data)
|
||||
console.error('Stream error:', chunk.data)
|
||||
} else if (chunk.type === 'done') {
|
||||
setIsLoading(false)
|
||||
}
|
||||
// Add assistant message with the response
|
||||
const assistantMessage: Message = {
|
||||
id: (Date.now() + 1).toString(),
|
||||
role: 'assistant',
|
||||
content: response.response,
|
||||
}
|
||||
|
||||
setIsLoading(false)
|
||||
setMessages((prev) => [...prev, assistantMessage])
|
||||
} catch (error) {
|
||||
console.error('Error sending message:', error)
|
||||
toast.error('Failed to send message. Please try again.')
|
||||
toast.error(error instanceof Error ? error.message : 'Failed to send message. Please try again.')
|
||||
} finally {
|
||||
setIsLoading(false)
|
||||
}
|
||||
}
|
||||
|
|
|
|||
134
src/lib/api.ts
134
src/lib/api.ts
|
|
@ -1,9 +1,7 @@
|
|||
import type { ChatMode } from '@/types/chat'
|
||||
|
||||
// In production, use relative URL to route through the proxy server
|
||||
// For local development, set VITE_API_URL=http://localhost:8000 in .env
|
||||
const API_BASE_URL = '/api'
|
||||
const API_ENDPOINT = import.meta.env.VITE_API_ENDPOINT || '/chat/stream'
|
||||
const USE_MOCK_DATA = false // Set to true to use mock data for testing
|
||||
|
||||
// Session management
|
||||
|
|
@ -35,39 +33,34 @@ const MOCK_RESPONSES = {
|
|||
],
|
||||
}
|
||||
|
||||
// Mock streaming function
|
||||
async function* mockChatStream(
|
||||
// Chat response type from backend
|
||||
export interface ChatResponse {
|
||||
conversation_id: string
|
||||
response: string
|
||||
mode: string
|
||||
sources: string[]
|
||||
}
|
||||
|
||||
// Mock chat function
|
||||
async function mockChat(
|
||||
_question: string,
|
||||
mode: ChatMode
|
||||
): AsyncGenerator<StreamEvent> {
|
||||
const sessionId = crypto.randomUUID()
|
||||
yield { type: 'session_id', data: sessionId }
|
||||
|
||||
): Promise<ChatResponse> {
|
||||
// Select a random response based on mode
|
||||
const responses = MOCK_RESPONSES[mode]
|
||||
const response = responses[Math.floor(Math.random() * responses.length)]
|
||||
|
||||
// Simulate streaming by yielding words with delays
|
||||
const words = response.split(' ')
|
||||
for (let i = 0; i < words.length; i++) {
|
||||
// Add space before word (except first word)
|
||||
const chunk = i === 0 ? words[i] : ' ' + words[i]
|
||||
yield { type: 'chunk', data: chunk }
|
||||
// Simulate network delay
|
||||
await new Promise(resolve => setTimeout(resolve, 500))
|
||||
|
||||
// Random delay between 30-80ms to simulate typing
|
||||
await new Promise(resolve => setTimeout(resolve, Math.random() * 50 + 30))
|
||||
return {
|
||||
conversation_id: crypto.randomUUID(),
|
||||
response,
|
||||
mode,
|
||||
sources: [],
|
||||
}
|
||||
|
||||
yield { type: 'done', data: null }
|
||||
}
|
||||
|
||||
// SSE stream event types
|
||||
export type StreamEvent =
|
||||
| { type: 'chunk'; data: string }
|
||||
| { type: 'done'; data: null }
|
||||
| { type: 'error'; data: string }
|
||||
| { type: 'session_id'; data: string }
|
||||
|
||||
class ApiClient {
|
||||
private sessionId: string | null = null
|
||||
|
||||
|
|
@ -85,89 +78,40 @@ class ApiClient {
|
|||
clearChatSessionId()
|
||||
}
|
||||
|
||||
// Streaming chat endpoint (SSE)
|
||||
async *chatStream(
|
||||
// Simple JSON chat endpoint
|
||||
async chat(
|
||||
question: string,
|
||||
mode: ChatMode,
|
||||
sessionId?: string
|
||||
): AsyncGenerator<StreamEvent> {
|
||||
): Promise<ChatResponse> {
|
||||
// Use mock data if enabled
|
||||
if (USE_MOCK_DATA) {
|
||||
yield* mockChatStream(question, mode)
|
||||
return
|
||||
return mockChat(question, mode)
|
||||
}
|
||||
|
||||
// Generate or reuse session ID
|
||||
const actualSessionId = sessionId || this.getSessionId() || crypto.randomUUID()
|
||||
this.setSessionId(actualSessionId)
|
||||
|
||||
// Yield session ID first
|
||||
yield { type: 'session_id', data: actualSessionId }
|
||||
const response = await fetch(`${API_BASE_URL}/chat`, {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
},
|
||||
body: JSON.stringify({
|
||||
message: question,
|
||||
conversation_id: actualSessionId,
|
||||
}),
|
||||
})
|
||||
|
||||
try {
|
||||
const response = await fetch(`${API_BASE_URL}${API_ENDPOINT}`, {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
},
|
||||
body: JSON.stringify({
|
||||
message: question,
|
||||
conversation_id: actualSessionId,
|
||||
}),
|
||||
})
|
||||
|
||||
if (!response.ok) {
|
||||
yield { type: 'error', data: `HTTP error! status: ${response.status}` }
|
||||
return
|
||||
}
|
||||
|
||||
if (!response.body) {
|
||||
yield { type: 'error', data: 'Response body is null' }
|
||||
return
|
||||
}
|
||||
|
||||
const reader = response.body.getReader()
|
||||
const decoder = new TextDecoder()
|
||||
let buffer = ''
|
||||
|
||||
while (true) {
|
||||
const { done, value } = await reader.read()
|
||||
|
||||
if (done) break
|
||||
|
||||
buffer += decoder.decode(value, { stream: true })
|
||||
const lines = buffer.split('\n')
|
||||
buffer = lines.pop() || ''
|
||||
|
||||
for (const line of lines) {
|
||||
if (line.startsWith('data: ')) {
|
||||
const data = line.slice(6)
|
||||
|
||||
try {
|
||||
const parsed = JSON.parse(data)
|
||||
if (parsed.type === 'chunk' && parsed.content) {
|
||||
yield { type: 'chunk', data: parsed.content }
|
||||
} else if (parsed.type === 'done') {
|
||||
yield { type: 'done', data: null }
|
||||
return
|
||||
} else if (parsed.type === 'error') {
|
||||
yield { type: 'error', data: parsed.message || 'Unknown error' }
|
||||
return
|
||||
}
|
||||
} catch {
|
||||
// Skip non-JSON lines
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
yield { type: 'done', data: null }
|
||||
} catch (error) {
|
||||
yield {
|
||||
type: 'error',
|
||||
data: error instanceof Error ? error.message : 'Network error',
|
||||
}
|
||||
if (!response.ok) {
|
||||
const errorData = await response.json().catch(() => ({}))
|
||||
throw new Error(errorData.detail || `HTTP error! status: ${response.status}`)
|
||||
}
|
||||
|
||||
const data: ChatResponse = await response.json()
|
||||
this.setSessionId(data.conversation_id)
|
||||
return data
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
|||
Loading…
Reference in New Issue