Fixed 502's for chatbots
All checks were successful
ci/woodpecker/push/woodpecker Pipeline was successful
ci/woodpecker/manual/woodpecker Pipeline was successful

This commit is contained in:
Josh 2025-08-21 12:09:13 +00:00
parent 5da3de3152
commit a8c5ed828b
6 changed files with 258 additions and 143 deletions

View File

@ -1 +1 @@
e43b26fea335b87cb7d2747d85540177b96c7847-803b2c2ecad09a0fbca070296808a53489de891a-e9eccd451b778829eb2f2c9752c670b707e1268b
e0de79c21e9b87f23a4da67149cea4e0e979e9e0-803b2c2ecad09a0fbca070296808a53489de891a-e9eccd451b778829eb2f2c9752c670b707e1268b

View File

@ -39,7 +39,6 @@ const CIP_TO_SOC_PATH = path.join(PUBLIC_DIR, 'CIP_to_ONET_SOC.xlsx');
const INSTITUTION_DATA_PATH= path.join(PUBLIC_DIR, 'Institution_data.json');
const SALARY_DB_PATH = path.join(ROOT_DIR, 'salary_info.db');
const USER_PROFILE_DB_PATH = path.join(ROOT_DIR, 'user_profile.db');
const API_BASE = (process.env.APTIVA_API_BASE || 'http://server1:5000').replace(/\/+$/,'');
for (const p of [CIP_TO_SOC_PATH, INSTITUTION_DATA_PATH, SALARY_DB_PATH, USER_PROFILE_DB_PATH]) {
if (!fs.existsSync(p)) {
@ -1253,11 +1252,13 @@ ${body}`;
}
);
/* ----------------- Support chat threads ----------------- */
app.post('/api/support/chat/threads', authenticateUser, async (req, res) => {
/* ----------------- Support bot chat (server2) ----------------- */
/* CREATE thread */
app.post('/api/chat/threads', authenticateUser, async (req, res) => {
const userId = req.user.id;
const id = uuid();
const title = (req.body?.title || 'Support chat').slice(0, 200);
const id = uuid();
const title = (req.body?.title || 'Support chat').slice(0, 200);
await pool.query(
'INSERT INTO ai_chat_threads (id,user_id,bot_type,title) VALUES (?,?, "support", ?)',
[id, userId, title]
@ -1265,7 +1266,8 @@ app.post('/api/support/chat/threads', authenticateUser, async (req, res) => {
res.json({ id, title });
});
app.get('/api/support/chat/threads', authenticateUser, async (req, res) => {
/* LIST threads */
app.get('/api/chat/threads', authenticateUser, async (req, res) => {
const [rows] = await pool.query(
'SELECT id,title,updated_at FROM ai_chat_threads WHERE user_id=? AND bot_type="support" ORDER BY updated_at DESC LIMIT 50',
[req.user.id]
@ -1273,13 +1275,15 @@ app.get('/api/support/chat/threads', authenticateUser, async (req, res) => {
res.json({ threads: rows });
});
app.get('/api/support/chat/threads/:id', authenticateUser, async (req, res) => {
/* GET thread + messages */
app.get('/api/chat/threads/:id', authenticateUser, async (req, res) => {
const { id } = req.params;
const [[t]] = await pool.query(
'SELECT id FROM ai_chat_threads WHERE id=? AND user_id=? AND bot_type="support"',
[id, req.user.id]
);
if (!t) return res.status(404).json({ error: 'not_found' });
const [msgs] = await pool.query(
'SELECT role,content,created_at FROM ai_chat_messages WHERE thread_id=? ORDER BY id ASC LIMIT 200',
[id]
@ -1287,8 +1291,8 @@ app.get('/api/support/chat/threads/:id', authenticateUser, async (req, res) => {
res.json({ messages: msgs });
});
/* ---- STREAM proxy: saves user msg, calls your /api/chat/free, saves assistant ---- */
app.post('/api/support/chat/threads/:id/stream', authenticateUser, async (req, res) => {
/* STREAM reply via local /api/chat/free */
app.post('/api/chat/threads/:id/stream', authenticateUser, async (req, res) => {
const { id } = req.params;
const userId = req.user.id;
const { prompt = '', pageContext = '', snapshot = null } = req.body || {};
@ -1300,79 +1304,85 @@ app.post('/api/support/chat/threads/:id/stream', authenticateUser, async (req, r
);
if (!t) return res.status(404).json({ error: 'not_found' });
// 1) save user message
// save user msg
await pool.query(
'INSERT INTO ai_chat_messages (thread_id,user_id,role,content) VALUES (?,?, "user", ?)',
[id, userId, prompt]
);
// 2) load last 40 messages as chatHistory for context
// small history for context
const [history] = await pool.query(
'SELECT role,content FROM ai_chat_messages WHERE thread_id=? ORDER BY id ASC LIMIT 40',
[id]
);
// 3) call internal free endpoint (streaming)
const internal = await fetch(`${API_BASE}/chat/free`, {
// call local free-chat (server2 hosts /api/chat/free)
const internal = await fetch('http://localhost:5001/api/chat/free', {
method: 'POST',
headers: {
'Content-Type': 'application/json',
Accept: 'text/event-stream',
Authorization: req.headers.authorization || ''
'Content-Type' : 'application/json',
'Accept' : 'text/event-stream',
'Authorization': req.headers.authorization || '',
'Cookie' : req.headers.cookie || ''
},
body: JSON.stringify({
prompt,
pageContext,
snapshot,
chatHistory: history
})
body: JSON.stringify({ prompt, pageContext, snapshot, chatHistory: history })
});
if (!internal.ok || !internal.body) {
return res.status(502).json({ error: 'upstream_failed' });
}
// 4) pipe stream to client while buffering assistant text to persist at the end
res.status(200);
res.setHeader('Content-Type', 'text/event-stream');
res.setHeader('Cache-Control', 'no-cache');
res.setHeader('Connection', 'keep-alive');
// SSE-ish newline stream (matches your ChatDrawer reader)
res.writeHead(200, {
'Content-Type' : 'text/event-stream; charset=utf-8',
'Cache-Control' : 'no-cache',
'Connection' : 'keep-alive',
'X-Accel-Buffering': 'no'
});
res.flushHeaders?.();
const reader = internal.body.getReader();
const encoder = new TextEncoder();
const reader = internal.body.getReader();
const decoder = new TextDecoder();
let buf = '';
let assistant = '';
async function flush(line) {
const push = (line) => {
assistant += line + '\n';
await res.write(encoder.encode(line + '\n'));
}
res.write(line + '\n'); // write strings, no await
};
while (true) {
const { value, done } = await reader.read();
if (done) break;
if (!value) continue;
buf += decoder.decode(value, { stream: true });
let nl;
while ((nl = buf.indexOf('\n')) !== -1) {
const line = buf.slice(0, nl).trim();
buf = buf.slice(nl + 1);
if (line) await flush(line);
try {
while (true) {
const { value, done } = await reader.read();
if (done) break;
if (!value) continue;
buf += decoder.decode(value, { stream: true });
let nl;
while ((nl = buf.indexOf('\n')) !== -1) {
const line = buf.slice(0, nl).trim();
buf = buf.slice(nl + 1);
if (line) push(line);
}
}
if (buf.trim()) push(buf.trim());
} catch (err) {
console.error('[support stream]', err);
res.write('Sorry — error occurred\n');
}
if (buf.trim()) await flush(buf.trim());
// 5) persist assistant message & touch thread
await pool.query(
'INSERT INTO ai_chat_messages (thread_id,user_id,role,content) VALUES (?,?, "assistant", ?)',
[id, userId, assistant.trim()]
);
await pool.query('UPDATE ai_chat_threads SET updated_at=CURRENT_TIMESTAMP WHERE id=?', [id]);
// persist assistant
if (assistant.trim()) {
await pool.query(
'INSERT INTO ai_chat_messages (thread_id,user_id,role,content) VALUES (?,?, "assistant", ?)',
[id, userId, assistant.trim()]
);
await pool.query('UPDATE ai_chat_threads SET updated_at=CURRENT_TIMESTAMP WHERE id=?', [id]);
}
res.end();
});
/**************************************************
* Start the Express server
**************************************************/

View File

@ -156,6 +156,7 @@ function internalFetch(req, urlPath, opts = {}) {
headers: {
"Content-Type": "application/json",
Authorization: req.headers?.authorization || "", // tolerate undefined
Cookie: req.headers?.cookie || "",
...(opts.headers || {})
}
});
@ -1723,16 +1724,19 @@ Always end with: “AptivaAI is an educational tool not advice.”
);
/* ------------- Retirement chat threads ------------- */
/* CREATE a Retirement thread */
app.post('/api/premium/retire/chat/threads', authenticatePremiumUser, async (req, res) => {
const id = uuid();
const title = (req.body?.title || 'Retirement chat').slice(0,200);
const id = uuid();
const title = (req.body?.title || 'Retirement chat').slice(0, 200);
await pool.query(
'INSERT INTO ai_chat_threads (id,user_id,bot_type,title) VALUES (?,?, "retire", ?)',
[req.id, title]
[id, req.id, title]
);
res.json({ id, title });
});
/* LIST Retirement threads */
app.get('/api/premium/retire/chat/threads', authenticatePremiumUser, async (req, res) => {
const [rows] = await pool.query(
'SELECT id,title,updated_at FROM ai_chat_threads WHERE user_id=? AND bot_type="retire" ORDER BY updated_at DESC LIMIT 50',
@ -1741,6 +1745,7 @@ app.get('/api/premium/retire/chat/threads', authenticatePremiumUser, async (req,
res.json({ threads: rows });
});
/* GET one Retirement thread + messages */
app.get('/api/premium/retire/chat/threads/:id', authenticatePremiumUser, async (req, res) => {
const { id } = req.params;
const [[t]] = await pool.query(
@ -1748,6 +1753,7 @@ app.get('/api/premium/retire/chat/threads/:id', authenticatePremiumUser, async (
[id, req.id]
);
if (!t) return res.status(404).json({ error: 'not_found' });
const [msgs] = await pool.query(
'SELECT role,content,created_at FROM ai_chat_messages WHERE thread_id=? ORDER BY id ASC LIMIT 200',
[id]
@ -1755,60 +1761,76 @@ app.get('/api/premium/retire/chat/threads/:id', authenticatePremiumUser, async (
res.json({ messages: msgs });
});
/* POST a message (auto-create thread if missing) */
app.post('/api/premium/retire/chat/threads/:id/messages', authenticatePremiumUser, async (req, res) => {
const { id } = req.params;
const { content = '', context = {} } = req.body || {};
if (!content.trim()) return res.status(400).json({ error: 'empty' });
// ensure thread exists (auto-create if missing)
const [[t]] = await pool.query(
'SELECT id FROM ai_chat_threads WHERE id=? AND user_id=? AND bot_type="retire"',
[id, req.id]
);
if (!t) return res.status(404).json({ error: 'not_found' });
if (!t) {
await pool.query(
'INSERT INTO ai_chat_threads (id,user_id,bot_type,title) VALUES (?,?, "retire", ?)',
[id, req.id, 'Retirement chat']
);
}
// save user msg
await pool.query(
'INSERT INTO ai_chat_messages (thread_id,user_id,role,content) VALUES (?,?, "user", ?)',
[id, req.id, content]
);
// history (≤40)
const [history] = await pool.query(
'SELECT role,content FROM ai_chat_messages WHERE thread_id=? ORDER BY id ASC LIMIT 40',
[id]
);
// Call your existing retirement logic (keeps all safety/patch behavior)
// call AI
const resp = await internalFetch(req, '/premium/retirement/aichat', {
method: 'POST',
headers: { 'Content-Type': 'application/json' },
body: JSON.stringify({
prompt: content,
scenario_id: context?.scenario_id,
chatHistory: history
})
method : 'POST',
headers: { 'Content-Type':'application/json' },
body : JSON.stringify({ prompt: content, scenario_id: context?.scenario_id, chatHistory: history })
});
const json = await resp.json();
const reply = (json?.reply || '').trim() || 'Sorry, please try again.';
await pool.query(
'INSERT INTO ai_chat_messages (thread_id,user_id,role,content) VALUES (?,?, "assistant", ?)',
[id, req.id, reply]
);
await pool.query('UPDATE ai_chat_threads SET updated_at=CURRENT_TIMESTAMP WHERE id=?', [id]);
let reply = 'Sorry, please try again.';
if (resp.ok) {
const json = await resp.json();
reply = (json?.reply || '').trim() || reply;
res.json(json); // keep scenarioPatch passthrough
// save AI reply
await pool.query(
'INSERT INTO ai_chat_messages (thread_id,user_id,role,content) VALUES (?,?, "assistant", ?)',
[id, req.id, reply]
);
await pool.query('UPDATE ai_chat_threads SET updated_at=CURRENT_TIMESTAMP WHERE id=?', [id]);
return res.json(json); // keep scenarioPatch passthrough
} else {
return res.status(502).json({ error: 'upstream_failed' });
}
});
/* ------------------ Coach chat threads ------------------ */
/* CREATE a Coach thread */
app.post('/api/premium/coach/chat/threads', authenticatePremiumUser, async (req, res) => {
const id = uuid();
const title = (req.body?.title || 'CareerCoach chat').slice(0,200);
const id = uuid();
const title = (req.body?.title || 'CareerCoach chat').slice(0, 200);
await pool.query(
'INSERT INTO ai_chat_threads (id,user_id,bot_type,title) VALUES (?,?, "coach", ?)',
[req.id, title]
[id, req.id, title]
);
res.json({ id, title });
});
/* LIST Coach threads */
app.get('/api/premium/coach/chat/threads', authenticatePremiumUser, async (req, res) => {
const [rows] = await pool.query(
'SELECT id,title,updated_at FROM ai_chat_threads WHERE user_id=? AND bot_type="coach" ORDER BY updated_at DESC LIMIT 50',
@ -1817,6 +1839,7 @@ app.get('/api/premium/coach/chat/threads', authenticatePremiumUser, async (req,
res.json({ threads: rows });
});
/* GET one Coach thread + messages */
app.get('/api/premium/coach/chat/threads/:id', authenticatePremiumUser, async (req, res) => {
const { id } = req.params;
const [[t]] = await pool.query(
@ -1824,6 +1847,7 @@ app.get('/api/premium/coach/chat/threads/:id', authenticatePremiumUser, async (r
[id, req.id]
);
if (!t) return res.status(404).json({ error: 'not_found' });
const [msgs] = await pool.query(
'SELECT role,content,created_at FROM ai_chat_messages WHERE thread_id=? ORDER BY id ASC LIMIT 200',
[id]
@ -1831,46 +1855,59 @@ app.get('/api/premium/coach/chat/threads/:id', authenticatePremiumUser, async (r
res.json({ messages: msgs });
});
/* Post a user message → call your existing /api/premium/ai/chat → save both */
/* POST a message (auto-create thread if missing) */
app.post('/api/premium/coach/chat/threads/:id/messages', authenticatePremiumUser, async (req, res) => {
const { id } = req.params;
const { content = '', context = {} } = req.body || {};
if (!content.trim()) return res.status(400).json({ error: 'empty' });
// ensure thread exists (auto-create if missing)
const [[t]] = await pool.query(
'SELECT id FROM ai_chat_threads WHERE id=? AND user_id=? AND bot_type="coach"',
[id, req.id]
);
if (!t) return res.status(404).json({ error: 'not_found' });
if (!t) {
await pool.query(
'INSERT INTO ai_chat_threads (id,user_id,bot_type,title) VALUES (?,?, "coach", ?)',
[id, req.id, 'CareerCoach chat']
);
}
// save user msg
await pool.query(
'INSERT INTO ai_chat_messages (thread_id,user_id,role,content) VALUES (?,?, "user", ?)',
[id, req.id, content]
);
// history (≤40)
const [history] = await pool.query(
'SELECT role,content FROM ai_chat_messages WHERE thread_id=? ORDER BY id ASC LIMIT 40',
[id]
);
// call AI
const resp = await internalFetch(req, '/premium/ai/chat', {
method: 'POST',
headers: { 'Content-Type': 'application/json' },
body: JSON.stringify({
...context, // userProfile, scenarioRow, etc.
chatHistory: history // reuse your existing prompt builder
})
method : 'POST',
headers: { 'Content-Type':'application/json' },
body : JSON.stringify({ ...context, chatHistory: history })
});
const json = await resp.json();
const reply = (json?.reply || '').trim() || 'Sorry, please try again.';
await pool.query(
'INSERT INTO ai_chat_messages (thread_id,user_id,role,content) VALUES (?,?, "assistant", ?)',
[id, req.id, reply]
);
await pool.query('UPDATE ai_chat_threads SET updated_at=CURRENT_TIMESTAMP WHERE id=?', [id]);
let reply = 'Sorry, please try again.';
if (resp.ok) {
const json = await resp.json();
reply = (json?.reply || '').trim() || reply;
res.json({ reply });
// save AI reply
await pool.query(
'INSERT INTO ai_chat_messages (thread_id,user_id,role,content) VALUES (?,?, "assistant", ?)',
[id, req.id, reply]
);
await pool.query('UPDATE ai_chat_threads SET updated_at=CURRENT_TIMESTAMP WHERE id=?', [id]);
return res.json({ reply });
} else {
return res.status(502).json({ error: 'upstream_failed' });
}
});
app.post('/api/premium/career-profile/clone', authenticatePremiumUser, async (req,res) => {

View File

@ -217,3 +217,20 @@ CREATE TABLE IF NOT EXISTS ai_chat_messages (
FOREIGN KEY (thread_id) REFERENCES ai_chat_threads(id)
ON DELETE CASCADE
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4;
-- Orphan message thread_ids (no matching thread row)
SELECT DISTINCT m.thread_id
FROM ai_chat_messages m
LEFT JOIN ai_chat_threads t ON t.id = m.thread_id
WHERE t.id IS NULL;
INSERT INTO ai_chat_threads (id, user_id, bot_type, title)
SELECT m.thread_id, 58, 'coach', 'CareerCoach chat'
FROM ai_chat_messages m
LEFT JOIN ai_chat_threads t ON t.id = m.thread_id
WHERE t.id IS NULL;
ALTER TABLE ai_chat_messages
ADD CONSTRAINT fk_messages_thread
FOREIGN KEY (thread_id) REFERENCES ai_chat_threads(id)
ON DELETE CASCADE;

View File

@ -3,6 +3,24 @@ import authFetch from "../utils/authFetch.js";
const isoToday = new Date().toISOString().slice(0,10); // top-level helper
async function ensureCoachThread() {
// try to list an existing thread
const r = await authFetch('/api/premium/coach/chat/threads');
if (r.ok && (r.headers.get('content-type')||'').includes('application/json')) {
const { threads = [] } = await r.json();
if (threads.length) return threads[0].id;
}
// none → create one
const r2 = await authFetch('/api/premium/coach/chat/threads', {
method : 'POST',
headers: { 'Content-Type':'application/json' },
body : JSON.stringify({ title: 'CareerCoach chat' })
});
if (!r2.ok) throw new Error('failed to create coach thread');
const { id } = await r2.json();
return id;
}
function buildInterviewPrompt(careerName, jobDescription = "") {
return `
You are an expert interviewer for the role **${careerName}**.
@ -126,41 +144,50 @@ export default function CareerCoach({
if (chatRef.current) chatRef.current.scrollTop = chatRef.current.scrollHeight;
}, [messages]);
useEffect(() => {
useEffect(() => {
(async () => {
if (!careerProfileId) return;
// list threads for this profile
const r = await authFetch(
`/api/premium/coach/chat/threads?careerProfileId=${encodeURIComponent(careerProfileId)}`
);
if (!(r.ok && (r.headers.get('content-type') || '').includes('application/json'))) {
setThreadId(null); // coach offline; no network errors on mount
return;
}
const { threads = [] } = await r.json();
const existing = threads.find(Boolean);
if (!existing?.id) {
setThreadId(null); // no thread yet; lazy-create on first send
return;
}
const id = existing.id;
setThreadId(id);
// preload history
const r3 = await authFetch(
`/api/premium/coach/chat/threads/${id}?careerProfileId=${encodeURIComponent(careerProfileId)}`
);
if (r3.ok && (r3.headers.get('content-type') || '').includes('application/json')) {
const { messages: msgs = [] } = await r3.json();
setMessages(msgs);
try {
const id = await ensureCoachThread();
setThreadId(id);
const r3 = await authFetch(`/api/premium/coach/chat/threads/${id}`);
if (r3.ok && (r3.headers.get('content-type') || '').includes('application/json')) {
const { messages: msgs = [] } = await r3.json();
setMessages(msgs);
}
} catch {
// keep UI usable; callAi will create on first send
}
})();
}, [careerProfileId]);
useEffect(() => {
let cancelled = false;
(async () => {
if (!careerProfileId) return;
try {
// ensure or create a thread
const newId = await ensureCoachThread();
if (cancelled) return;
setThreadId(newId);
// preload history (best-effort)
const r3 = await authFetch(`/api/premium/coach/chat/threads/${newId}`);
if (cancelled) return;
if (r3.ok && (r3.headers.get('content-type') || '').includes('application/json')) {
const { messages: msgs = [] } = await r3.json();
if (!cancelled) setMessages(msgs);
}
} catch (e) {
if (!cancelled) setThreadId(null); // UI stays usable; callAi will create on first send
}
})();
return () => { cancelled = true; };
}, [careerProfileId]);
/* -------------- intro ---------------- */
useEffect(() => {
@ -235,28 +262,37 @@ I'm here to support you with personalized coaching. What would you like to focus
async function callAi(updatedHistory, opts = {}) {
setLoading(true);
try {
if (!threadId) throw new Error('thread not ready');
let id = threadId; // <-- declare it
if (!id) { // first send or race
id = await ensureCoachThread(); // create/reuse
setThreadId(id);
}
const context = { userProfile, financialProfile, scenarioRow, collegeProfile };
const r = await authFetch(`/api/premium/coach/chat/threads/${threadId}/messages`, {
method:'POST',
headers:{ 'Content-Type':'application/json' },
body: JSON.stringify({ content: updatedHistory.at(-1)?.content || '', context })
const r = await authFetch(`/api/premium/coach/chat/threads/${id}/messages`, {
method: 'POST',
headers: { 'Content-Type': 'application/json' },
body: JSON.stringify({
content: updatedHistory.at(-1)?.content || '',
context
})
});
let reply = 'Sorry, something went wrong.';
if (r.ok && (r.headers.get('content-type')||'').includes('application/json')) {
const data = await r.json();
reply = (data?.reply || '').trim() || reply;
}
setMessages(prev => [...prev, { role:'assistant', content: reply }]);
if (r.ok && (r.headers.get('content-type') || '').includes('application/json')) {
const data = await r.json();
reply = (data?.reply || '').trim() || reply;
}
setMessages(prev => [...prev, { role: 'assistant', content: reply }]);
} catch (e) {
console.error(e);
setMessages(prev => [...prev, { role:'assistant', content:'Sorry, something went wrong.' }]);
setMessages(prev => [...prev, { role: 'assistant', content: 'Sorry, something went wrong.' }]);
} finally {
setLoading(false);
}
}
/* ------------ normal send ------------- */
function handleSubmit(e) {
e.preventDefault();

View File

@ -8,15 +8,20 @@ import { MessageCircle } from 'lucide-react';
import RetirementChatBar from './RetirementChatBar.js';
async function ensureSupportThread() {
const r = await fetch('/api/support/chat/threads', { credentials:'include' });
// list existing
const r = await fetch('/api/chat/threads', { credentials:'include' });
if (!r.ok) throw new Error(`threads list failed: ${r.status}`);
const { threads } = await r.json();
if (threads?.length) return threads[0].id;
const r2 = await fetch('/api/support/chat/threads', {
// create new
const r2 = await fetch('/api/chat/threads', {
method: 'POST',
credentials:'include',
headers:{ 'Content-Type':'application/json' },
body: JSON.stringify({ title: 'Support chat' })
});
if (!r2.ok) throw new Error(`thread create failed: ${r2.status}`);
const { id } = await r2.json();
return id;
}
@ -62,14 +67,24 @@ export default function ChatDrawer({
useEffect(() => {
(async () => {
const id = await ensureSupportThread();
setSupportThreadId(id);
// preload messages if you want:
const r = await fetch(`/api/support/chat/threads/${id}`, { credentials:'include' });
const { messages: msgs } = await r.json();
setMessages(msgs || []);
})();
}, []);
try {
const id = await ensureSupportThread();
setSupportThreadId(id);
// preload messages
const r = await fetch(`/api/chat/threads/${id}`, { credentials:'include' });
if (r.ok) {
const { messages: msgs } = await r.json();
setMessages(msgs || []);
} else {
// dont crash UI on preload failure
setMessages([]);
}
} catch (e) {
console.error('[Support preload]', e);
setMessages([]);
}
})();
}, []);
/* helper: merge chunks while streaming */
const pushAssistant = (chunk) =>
@ -102,7 +117,7 @@ export default function ChatDrawer({
setPrompt('');
try {
const resp = await fetch(`/api/support/chat/threads/${supportThreadId}/stream`, {
const resp = await fetch(`/api/chat/threads/${supportThreadId}/stream`, {
method: 'POST',
credentials: 'include',
headers: { 'Content-Type':'application/json', Accept:'text/event-stream' },