refactor: replace SSE streaming with simple JSON chat endpoint

Replace Server-Sent Events streaming implementation with straightforward
JSON request/response pattern to simplify debugging and reduce complexity.

Changes:
- server.js: Convert /api/chat/stream to /api/chat with JSON response
- src/lib/api.ts: Remove SSE parsing logic, add ChatResponse interface
- src/hooks/useChat.ts: Handle JSON response instead of stream chunks

This change makes the request flow easier to debug and troubleshoot
while maintaining the same conversation functionality.
This commit is contained in:
Danny 2026-01-20 13:49:44 -06:00
parent a4f9df6906
commit 0aaa393a6d
3 changed files with 74 additions and 209 deletions

117
server.js
View File

@ -3,103 +3,50 @@ const path = require('path');
const { GoogleAuth } = require('google-auth-library'); const { GoogleAuth } = require('google-auth-library');
const app = express(); const app = express();
const port = process.env.PORT || 8080; app.use(express.json({ limit: '1mb' }));
const BACKEND_URL = process.env.BACKEND_URL;
// Validate required environment variable at startup const BACKEND_URL = process.env.BACKEND_URL;
if (!BACKEND_URL) { if (!BACKEND_URL) {
console.error('ERROR: BACKEND_URL environment variable is required'); console.error('FATAL: BACKEND_URL env var is required');
process.exit(1); process.exit(1);
} }
// Initialize Google Auth client (auto-detects credentials on Cloud Run)
const auth = new GoogleAuth(); const auth = new GoogleAuth();
app.use(express.json()); app.get('/health', (_req, res) => {
res.status(200).json({ status: 'ok' });
});
// Serve static React build app.post('/api/chat', async (req, res) => {
try {
// Create an ID-token authenticated client for the backend (audience = BACKEND_URL)
const idTokenClient = await auth.getIdTokenClient(BACKEND_URL);
// Forward request to backend
const backendResp = await idTokenClient.request({
url: `${BACKEND_URL}/chat`,
method: 'POST',
data: req.body,
});
res.status(backendResp.status).json(backendResp.data);
} catch (err) {
console.error('Proxy error:', err.message);
const status = err.response?.status || 500;
const message = err.response?.data || { error: err.message };
res.status(status).json(message);
}
});
// Serve static assets
app.use(express.static(path.join(__dirname, 'dist'))); app.use(express.static(path.join(__dirname, 'dist')));
// Proxy endpoint - generates ID token and forwards to backend // SPA fallback
app.post('/api/chat/stream', async (req, res) => { app.get('*', (_req, res) => {
console.log('Proxy request received for /api/chat/stream');
try {
// Get ID token client with backend URL as audience
const client = await auth.getIdTokenClient(BACKEND_URL);
const headers = await client.getRequestHeaders();
console.log('Generated ID token, forwarding to backend...');
// Forward request to backend with auth
const backendResponse = await fetch(`${BACKEND_URL}/chat/stream`, {
method: 'POST',
headers: {
'Content-Type': 'application/json',
...headers,
},
body: JSON.stringify(req.body),
});
// Check if backend returned an error
if (!backendResponse.ok) {
const errorText = await backendResponse.text();
console.error(`Backend error: ${backendResponse.status} - ${errorText}`);
res.status(backendResponse.status).json({
error: 'Backend error',
status: backendResponse.status,
message: errorText,
});
return;
}
// Set SSE headers and flush immediately
res.setHeader('Content-Type', 'text/event-stream');
res.setHeader('Cache-Control', 'no-cache');
res.setHeader('Connection', 'keep-alive');
res.flushHeaders?.();
// Pipe raw SSE stream from backend to browser (no parsing)
const reader = backendResponse.body.getReader();
// Handle client disconnect
req.on('close', () => {
console.log('Client disconnected');
reader.cancel();
});
// Stream loop
while (true) {
const { done, value } = await reader.read();
if (done) {
console.log('Stream complete');
res.end();
return;
}
res.write(value);
}
} catch (error) {
console.error('Proxy error:', error);
if (!res.headersSent) {
res.status(500).json({ error: 'Proxy error', message: error.message });
} else {
res.end();
}
}
});
// Health check endpoint for Cloud Run
app.get('/health', (req, res) => {
res.status(200).json({ status: 'healthy' });
});
// SPA fallback - all other routes serve index.html
app.get('*', (req, res) => {
res.sendFile(path.join(__dirname, 'dist', 'index.html')); res.sendFile(path.join(__dirname, 'dist', 'index.html'));
}); });
// Listen on 0.0.0.0 for Cloud Run const port = parseInt(process.env.PORT || '8080', 10);
app.listen(port, '0.0.0.0', () => { app.listen(port, '0.0.0.0', () => {
console.log(`Server listening on ${port}`); console.log(`Frontend proxy listening on port ${port}`);
console.log(`Backend URL: ${BACKEND_URL}`);
}); });

View File

@ -24,47 +24,21 @@ export const useChat = (mode: ChatMode) => {
setIsLoading(true) setIsLoading(true)
try { try {
// Create placeholder assistant message // Call the chat API and get the response
const assistantMessageId = (Date.now() + 1).toString() const response = await apiClient.chat(question, mode)
let assistantContent = ''
setMessages((prev) => [ // Add assistant message with the response
...prev, const assistantMessage: Message = {
{ id: (Date.now() + 1).toString(),
id: assistantMessageId,
role: 'assistant', role: 'assistant',
content: '', content: response.response,
},
])
// Process SSE stream
const streamGenerator = apiClient.chatStream(question, mode)
for await (const chunk of streamGenerator) {
if (chunk.type === 'chunk') {
// Append chunk content
assistantContent += chunk.data
// Update assistant message
setMessages((prev) =>
prev.map((msg) =>
msg.id === assistantMessageId
? { ...msg, content: assistantContent }
: msg
)
)
} else if (chunk.type === 'error') {
toast.error(chunk.data)
console.error('Stream error:', chunk.data)
} else if (chunk.type === 'done') {
setIsLoading(false)
}
} }
setIsLoading(false) setMessages((prev) => [...prev, assistantMessage])
} catch (error) { } catch (error) {
console.error('Error sending message:', error) console.error('Error sending message:', error)
toast.error('Failed to send message. Please try again.') toast.error(error instanceof Error ? error.message : 'Failed to send message. Please try again.')
} finally {
setIsLoading(false) setIsLoading(false)
} }
} }

View File

@ -1,9 +1,7 @@
import type { ChatMode } from '@/types/chat' import type { ChatMode } from '@/types/chat'
// In production, use relative URL to route through the proxy server // In production, use relative URL to route through the proxy server
// For local development, set VITE_API_URL=http://localhost:8000 in .env
const API_BASE_URL = '/api' const API_BASE_URL = '/api'
const API_ENDPOINT = import.meta.env.VITE_API_ENDPOINT || '/chat/stream'
const USE_MOCK_DATA = false // Set to true to use mock data for testing const USE_MOCK_DATA = false // Set to true to use mock data for testing
// Session management // Session management
@ -35,39 +33,34 @@ const MOCK_RESPONSES = {
], ],
} }
// Mock streaming function // Chat response type from backend
async function* mockChatStream( export interface ChatResponse {
conversation_id: string
response: string
mode: string
sources: string[]
}
// Mock chat function
async function mockChat(
_question: string, _question: string,
mode: ChatMode mode: ChatMode
): AsyncGenerator<StreamEvent> { ): Promise<ChatResponse> {
const sessionId = crypto.randomUUID()
yield { type: 'session_id', data: sessionId }
// Select a random response based on mode // Select a random response based on mode
const responses = MOCK_RESPONSES[mode] const responses = MOCK_RESPONSES[mode]
const response = responses[Math.floor(Math.random() * responses.length)] const response = responses[Math.floor(Math.random() * responses.length)]
// Simulate streaming by yielding words with delays // Simulate network delay
const words = response.split(' ') await new Promise(resolve => setTimeout(resolve, 500))
for (let i = 0; i < words.length; i++) {
// Add space before word (except first word)
const chunk = i === 0 ? words[i] : ' ' + words[i]
yield { type: 'chunk', data: chunk }
// Random delay between 30-80ms to simulate typing return {
await new Promise(resolve => setTimeout(resolve, Math.random() * 50 + 30)) conversation_id: crypto.randomUUID(),
response,
mode,
sources: [],
} }
yield { type: 'done', data: null }
} }
// SSE stream event types
export type StreamEvent =
| { type: 'chunk'; data: string }
| { type: 'done'; data: null }
| { type: 'error'; data: string }
| { type: 'session_id'; data: string }
class ApiClient { class ApiClient {
private sessionId: string | null = null private sessionId: string | null = null
@ -85,27 +78,22 @@ class ApiClient {
clearChatSessionId() clearChatSessionId()
} }
// Streaming chat endpoint (SSE) // Simple JSON chat endpoint
async *chatStream( async chat(
question: string, question: string,
mode: ChatMode, mode: ChatMode,
sessionId?: string sessionId?: string
): AsyncGenerator<StreamEvent> { ): Promise<ChatResponse> {
// Use mock data if enabled // Use mock data if enabled
if (USE_MOCK_DATA) { if (USE_MOCK_DATA) {
yield* mockChatStream(question, mode) return mockChat(question, mode)
return
} }
// Generate or reuse session ID // Generate or reuse session ID
const actualSessionId = sessionId || this.getSessionId() || crypto.randomUUID() const actualSessionId = sessionId || this.getSessionId() || crypto.randomUUID()
this.setSessionId(actualSessionId) this.setSessionId(actualSessionId)
// Yield session ID first const response = await fetch(`${API_BASE_URL}/chat`, {
yield { type: 'session_id', data: actualSessionId }
try {
const response = await fetch(`${API_BASE_URL}${API_ENDPOINT}`, {
method: 'POST', method: 'POST',
headers: { headers: {
'Content-Type': 'application/json', 'Content-Type': 'application/json',
@ -117,57 +105,13 @@ class ApiClient {
}) })
if (!response.ok) { if (!response.ok) {
yield { type: 'error', data: `HTTP error! status: ${response.status}` } const errorData = await response.json().catch(() => ({}))
return throw new Error(errorData.detail || `HTTP error! status: ${response.status}`)
} }
if (!response.body) { const data: ChatResponse = await response.json()
yield { type: 'error', data: 'Response body is null' } this.setSessionId(data.conversation_id)
return return data
}
const reader = response.body.getReader()
const decoder = new TextDecoder()
let buffer = ''
while (true) {
const { done, value } = await reader.read()
if (done) break
buffer += decoder.decode(value, { stream: true })
const lines = buffer.split('\n')
buffer = lines.pop() || ''
for (const line of lines) {
if (line.startsWith('data: ')) {
const data = line.slice(6)
try {
const parsed = JSON.parse(data)
if (parsed.type === 'chunk' && parsed.content) {
yield { type: 'chunk', data: parsed.content }
} else if (parsed.type === 'done') {
yield { type: 'done', data: null }
return
} else if (parsed.type === 'error') {
yield { type: 'error', data: parsed.message || 'Unknown error' }
return
}
} catch {
// Skip non-JSON lines
}
}
}
}
yield { type: 'done', data: null }
} catch (error) {
yield {
type: 'error',
data: error instanceof Error ? error.message : 'Network error',
}
}
} }
} }