// ─── server3.js ────────────────────────────────────────────────────────────
import dotenv from 'dotenv';
import path from 'path';
import { fileURLToPath } from 'url';
const __filename = fileURLToPath(import.meta.url);
const __dirname = path.dirname(__filename);
import express from 'express';
import helmet from 'helmet';
import { readFile, unlink } from 'fs/promises'; // <-- add this
import fs from 'fs';
import multer from 'multer';
import fetch from 'node-fetch';
import mammoth from 'mammoth';
import jwt from 'jsonwebtoken';
import { v4 as uuidv4 } from 'uuid';
import pkg from 'pdfjs-dist';
import pool from './config/mysqlPool.js';
import { v4 as uuid } from 'uuid';
import { decrypt } from './shared/crypto/encryption.js'
import crypto from 'crypto';
import OpenAI from 'openai';
import Fuse from 'fuse.js';
import Stripe from 'stripe';
import { createReminder } from './utils/smsService.js';
import rateLimit from 'express-rate-limit'; // already used elsewhere; ok to keep
import { initEncryption, verifyCanary } from './shared/crypto/encryption.js';
import { hashForLookup } from './shared/crypto/encryption.js';
import cookieParser from 'cookie-parser';
import './jobs/reminderCron.js';
import { cacheSummary } from "./utils/ctxCache.js";
const rootPath = path.resolve(__dirname, '..');
const env = (process.env.ENV_NAME || 'prod');
const envPath = path.resolve(rootPath, `.env.${env}`);
if (!process.env.FROM_SECRETS_MANAGER) {
dotenv.config({ path: envPath, override: false });
}
const PORT = process.env.SERVER3_PORT || 5002;
// internal self-call base; works in all envs via Docker DNS
const INTERNAL_SELF_BASE = (process.env.INTERNAL_SELF_BASE || 'http://server3:5002').replace(/\/+$/, '');
const API_BASE = `${INTERNAL_SELF_BASE}/api`;
const DATA_DIR = path.join(__dirname, 'data');
/* ─── helper: canonical public origin ─────────────────────────── */
const PUBLIC_BASE = (process.env.APTIVA_API_BASE || '').replace(/\/+$/, '');
const ALLOWED_REDIRECT_HOSTS = new Set([
new URL(PUBLIC_BASE || 'http://localhost').host
]);
// ── RUNTIME PROTECTION: outbound host allowlist (server3) ──
const OUTBOUND_ALLOW = new Set([
'server2', // compose DNS (server2:5001)
'server3', // self-calls (server3:5002)
'api.openai.com', // OpenAI SDK traffic
'api.stripe.com', // Stripe SDK traffic
'api.twilio.com' // smsService may hit Twilio from this proc
]);
function assertAllowed(url) {
const u = new URL(url, 'http://localhost');
const host = u.hostname;
if (!OUTBOUND_ALLOW.has(host)) {
throw new Error(`blocked_outbound_host:${host}`);
}
}
// Wrap fetch for this file (don’t reassign the imported binding)
const rawFetch = fetch;
async function guardedFetch(input, init) {
const url = typeof input === 'string' ? input : input?.url;
assertAllowed(url);
return rawFetch(input, init);
}
function isSafeRedirect(url) {
try {
const u = new URL(url);
return ALLOWED_REDIRECT_HOSTS.has(u.host) && u.protocol === 'https:';
} catch { return false; }
}
const app = express();
app.use(cookieParser());
app.disable('x-powered-by');
app.set('trust proxy', 1);
app.use(helmet({ contentSecurityPolicy: false, crossOriginEmbedderPolicy: false }));
// --- Request ID + minimal audit log for /api/* ---
function getRequestId(req, res) {
const hdr = req.headers['x-request-id'];
if (typeof hdr === 'string' && hdr) return hdr; // from Nginx
const rid = crypto?.randomUUID?.() || `${Date.now().toString(36)}-${Math.random().toString(36).slice(2,8)}`;
res.setHeader('X-Request-ID', rid);
return rid;
}
app.use((req, res, next) => {
if (!req.path.startsWith('/api/')) return next();
const rid = getRequestId(req, res);
const t0 = process.hrtime.bigint();
res.on('finish', () => {
const durMs = Number((process.hrtime.bigint() - t0) / 1_000_000n);
const out = {
ts: new Date().toISOString(),
rid,
ip: req.ip || req.headers['x-forwarded-for'] || '',
method: req.method,
path: req.path,
status: res.statusCode,
dur_ms: durMs,
bytes_sent: Number(res.getHeader('Content-Length') || 0),
userId: req.userId || req.id || null
};
try { console.log(JSON.stringify(out)); } catch {}
});
next();
});
// ---- RUNTIME: minimal audit logging (API only, redacted) ----
function pickIp(req) {
// trust proxy already set in your apps
return req.ip || req.headers['x-forwarded-for'] || req.socket?.remoteAddress || '';
}
function redactHeaders(h) {
const out = { ...h };
delete out.authorization;
delete out.cookie;
delete out['x-forwarded-for'];
return out;
}
function sampleBody(b) {
if (!b || typeof b !== 'object') return undefined;
// avoid logging PII: show keys + small snippet
const keys = Object.keys(b);
const preview = {};
for (const k of keys.slice(0, 12)) {
const v = b[k];
preview[k] = typeof v === 'string' ? (v.length > 80 ? v.slice(0, 80) + '…' : v) : (Array.isArray(v) ? `[array:${v.length}]` : typeof v);
}
return preview;
}
app.use((req, res, next) => {
if (!req.path.startsWith('/api/')) return next();
// correlation id
const rid = req.headers['x-request-id'] || crypto.randomUUID?.() || String(Date.now());
res.setHeader('X-Request-ID', rid);
const t0 = process.hrtime.bigint();
// capture minimal request data
const reqLog = {
ts: new Date().toISOString(),
rid,
ip: pickIp(req),
method: req.method,
path: req.path,
userId: req.userId || req.id || null, // populated by your auth middleware on many routes
ua: req.headers['user-agent'] || '',
hdr: redactHeaders(req.headers),
body: sampleBody(req.body)
};
res.on('finish', () => {
const durMs = Number((process.hrtime.bigint() - t0) / 1_000_000n);
const out = {
...reqLog,
status: res.statusCode,
dur_ms: durMs,
bytes_sent: Number(res.getHeader('Content-Length') || 0)
};
// one line JSON per request
try { console.log(JSON.stringify(out)); } catch {}
});
next();
});
// ---- RUNTIME: never cache API responses ----
app.use((req, res, next) => {
if (req.path.startsWith('/api/')) {
res.set('Cache-Control', 'no-store');
res.set('Pragma', 'no-cache');
res.set('Expires', '0');
}
next();
});
process.on('unhandledRejection', (e) => console.error('[unhandledRejection]', e));
process.on('uncaughtException', (e) => console.error('[uncaughtException]', e));
// ---- RUNTIME: enforce JSON on API writes (with narrow exceptions) ----
const MUST_JSON = new Set(['POST','PUT','PATCH']);
const EXEMPT_PATHS = [
// server3
/^\/api\/premium\/resume\/optimize$/, // multer (multipart/form-data)
/^\/api\/premium\/stripe\/webhook$/, // Stripe (express.raw)
// Twilio webhooks (form-encoded)
/^\/api\/auth\/sms\/inbound$/,
/^\/api\/auth\/sms\/status$/
// add others if truly needed
];
// ---- RUNTIME: last-resort error sanitizer ----
app.use((err, req, res, _next) => {
// don’t double-send
if (res.headersSent) return;
// map a few known errors cleanly
if (err?.code === 'LIMIT_FILE_SIZE') {
return res.status(413).json({ error: 'file_too_large', limit_mb: 10 });
}
if (err?.message && String(err.message).startsWith('blocked_outbound_host:')) {
return res.status(400).json({ error: 'blocked_outbound_host' });
}
if (err?.message === 'unsupported_type') {
return res.status(415).json({ error: 'unsupported_type' });
}
// default: generic 500 without internals
console.error('[unhandled]', err?.message || err); // logs to stderr only
return res.status(500).json({ error: 'Server error' });
});
app.use((req, res, next) => {
if (!req.path.startsWith('/api/')) return next();
if (!MUST_JSON.has(req.method)) return next();
if (EXEMPT_PATHS.some(rx => rx.test(req.path))) return next();
const ct = req.headers['content-type'] || '';
if (!ct.toLowerCase().includes('application/json')) {
return res.status(415).json({ error: 'unsupported_media_type' });
}
next();
});
// ---- RUNTIME PROTECTION: HPP guard (dedupe + cap arrays) ----
app.use((req, _res, next) => {
const MAX_ARRAY = 20; // sane cap; adjust if you truly need more
const sanitize = (obj) => {
if (!obj || typeof obj !== 'object') return;
for (const k of Object.keys(obj)) {
const v = obj[k];
if (Array.isArray(v)) {
// keep first value semantics + bound array size
obj[k] = v.slice(0, MAX_ARRAY).filter(x => x !== '' && x != null);
if (obj[k].length === 1) obj[k] = obj[k][0]; // collapse singletons
}
}
};
sanitize(req.query);
sanitize(req.body);
next();
});
// ---- RUNTIME: reject request bodies on GET/HEAD ----
app.use((req, res, next) => {
if ((req.method === 'GET' || req.method === 'HEAD') && Number(req.headers['content-length'] || 0) > 0) {
return res.status(400).json({ error: 'no_body_allowed' });
}
next();
});
const { getDocument } = pkg;
const stripe = new Stripe(process.env.STRIPE_SECRET_KEY, { apiVersion: '2024-04-10' });
// ── Use raw pool for canary/db checks (avoid DAO wrapper noise) ──
const db = pool.raw || pool;
const DB_POOL_SIZE = 12;
// Bootstrap: unwrap DEK, check DB, verify canary
try {
await initEncryption();
await db.query('SELECT 1');
await verifyCanary(db);
} catch (e) {
console.error('FATAL during crypto/DB bootstrap:', e?.message || e);
process.exit(1);
}
function fprPathFromEnv() {
const p = (process.env.DEK_PATH || '').trim();
return p ? path.join(path.dirname(p), 'dek.fpr') : null;
}
// 1) Liveness: process is up and event loop responsive
app.get('/livez', (_req, res) => res.type('text').send('OK'));
// 2) Readiness: crypto + canary are good
app.get('/readyz', async (_req, res) => {
try {
await initEncryption();
await verifyCanary(db);
return res.type('text').send('OK');
} catch (e) {
console.error('[READYZ]', e.message);
return res.status(500).type('text').send('FAIL');
}
});
// 3) Health: detailed JSON you can curl
app.get('/healthz', async (_req, res) => {
const out = {
service: process.env.npm_package_name || 'server3',
version: process.env.IMG_TAG || null,
uptime_s: Math.floor(process.uptime()),
now: new Date().toISOString(),
checks: {
live: { ok: true },
crypto: { ok: false, fp: null },
db: { ok: false, ping_ms: null },
canary: { ok: false }
}
};
// crypto / DEK
try {
await initEncryption();
out.checks.crypto.ok = true;
const p = fprPathFromEnv();
if (p) {
try { out.checks.crypto.fp = (await readFile(p, 'utf8')).trim(); } catch {}
}
} catch (e) {
out.checks.crypto.error = e.message;
}
// DB ping
const t0 = Date.now();
try {
await db.query('SELECT 1');
out.checks.db.ok = true;
out.checks.db.ping_ms = Date.now() - t0;
} catch (e) {
out.checks.db.error = e.message;
}
// canary
try {
await verifyCanary(db);
out.checks.canary.ok = true;
} catch (e) {
out.checks.canary.error = e.message;
}
const ready = out.checks.crypto.ok && out.checks.db.ok && out.checks.canary.ok;
return res.status(ready ? 200 : 503).json(out);
});
// ── Tier config (env-overridable) ─────────────────────────────
const CHAT_BURST_WINDOW_SEC = Number(process.env.CHAT_BURST_WINDOW_SEC || 300); // 5 min
const CHAT_CONCURRENCY_PER_USER = Number(process.env.CHAT_CONCURRENCY_PER_USER || 1);
// “coach” chat (general)
const CHAT_BURST = {
basic: Number(process.env.CHAT_BURST_BASIC || 3), // per 5 min
premium: Number(process.env.CHAT_BURST_PREMIUM || 6),
pro: Number(process.env.CHAT_BURST_PRO || 12),
};
const CHAT_DAILY = {
basic: Number(process.env.CHAT_DAILY_BASIC || 20), // per 24h
premium: Number(process.env.CHAT_DAILY_PREMIUM || 60),
pro: Number(process.env.CHAT_DAILY_PRO || 120),
};
// “retire” beta (stricter)
const RET_BURST = {
premium: Number(process.env.RET_BURST_PREMIUM || 2), // per 5 min
pro: Number(process.env.RET_BURST_PRO || 4),
};
const RET_DAILY = {
premium: Number(process.env.RET_DAILY_PREMIUM || 5), // per 24h
pro: Number(process.env.RET_DAILY_PRO || 10),
};
const tierCache = new Map(); // userId -> { tier, exp }
async function resolveTier(userId) {
const c = tierCache.get(userId);
if (c && c.exp > Date.now()) return c.tier;
const [[row]] = await pool.query('SELECT is_premium, is_pro_premium FROM user_profile WHERE id=? LIMIT 1', [userId]);
const t = row?.is_pro_premium ? 'pro' : row?.is_premium ? 'premium' : 'basic';
tierCache.set(userId, { tier: t, exp: Date.now() + 60_000 }); // cache 60s
return t;
}
// in-memory sliding window + daily + concurrency
const usage = new Map(); // userId -> { win: number[], dayStart: number, dayCount: number, inflight: number }
function getU(id) {
let u = usage.get(id);
if (!u) { u = { win: [], dayStart: Date.now(), dayCount: 0, inflight: 0 }; usage.set(id, u); }
return u;
}
function resetDayIfNeeded(u) {
const DAY = 24*60*60*1000;
if (Date.now() - u.dayStart >= DAY) { u.dayStart = Date.now(); u.dayCount = 0; u.win.length = 0; }
}
function cleanWindow(u, windowMs) {
const cutoff = Date.now() - windowMs;
while (u.win.length && u.win[0] < cutoff) u.win.shift();
}
function chatGate(kind /* 'coach' | 'retire' */) {
return async (req, res, next) => {
const userId = req.id;
if (!userId) return res.status(401).json({ error: 'auth_required' });
const tier = await resolveTier(userId);
const u = getU(userId);
resetDayIfNeeded(u);
cleanWindow(u, CHAT_BURST_WINDOW_SEC*1000);
// choose caps
const burstCap = (kind === 'retire')
? (tier === 'pro' ? RET_BURST.pro : RET_BURST.premium)
: (tier === 'pro' ? CHAT_BURST.pro : tier === 'premium' ? CHAT_BURST.premium : CHAT_BURST.basic);
const dayCap = (kind === 'retire')
? (tier === 'pro' ? RET_DAILY.pro : RET_DAILY.premium) // no 'basic' for retire
: (tier === 'pro' ? CHAT_DAILY.pro : tier === 'premium' ? CHAT_DAILY.premium : CHAT_DAILY.basic);
// concurrency guard
if (u.inflight >= CHAT_CONCURRENCY_PER_USER) {
res.set('Retry-After', '3');
return res.status(429).json({ error: 'chat_in_progress' });
}
// daily cap
if (u.dayCount >= dayCap) {
return res.status(429).json({ error: 'daily_limit_reached' });
}
// burst cap (sliding window)
if (u.win.length >= burstCap) {
const retryMs = Math.max(0, (u.win[0] + CHAT_BURST_WINDOW_SEC*1000) - Date.now());
res.set('Retry-After', String(Math.ceil(retryMs/1000)));
return res.status(429).json({ error: 'slow_down' });
}
// admit; book slots
u.inflight += 1;
u.dayCount += 1;
u.win.push(Date.now());
res.on('finish', () => { u.inflight = Math.max(0, u.inflight - 1); });
next();
};
}
function internalFetch(req, urlPath, opts = {}) {
return guardedFetch(`${API_BASE}${urlPath}`, {
...opts,
headers: {
"Content-Type": "application/json",
Authorization: req.headers?.authorization || "", // tolerate undefined
Cookie: req.headers?.cookie || "",
...(opts.headers || {})
}
});
}
const auth = (req, urlPath, opts = {}) => internalFetch(req, urlPath, opts);
function parseJSONLoose(text) {
if (!text || typeof text !== 'string') return null;
// fenced ```json ... ```
const fence = text.match(/```json\s*([\s\S]+?)```/i) || text.match(/```\s*([\s\S]+?)```/i);
if (fence) { try { return JSON.parse(fence[1].trim()); } catch {} }
// slice between first {/[ and last }/]
const start = text.search(/[{\[]/);
const end = Math.max(text.lastIndexOf('}'), text.lastIndexOf(']'));
if (start !== -1 && end > start) {
try { return JSON.parse(text.slice(start, end + 1).trim()); } catch {}
}
// last chance: strip backticks
try { return JSON.parse(text.trim().replace(/^`+|`+$/g, '')); } catch {}
return null;
}
// AI Risk Analysis Helper Functions
async function getRiskAnalysisFromDB(socCode) {
const [rows] = await pool.query(
'SELECT * FROM ai_risk_analysis WHERE soc_code = ?',
[socCode]
);
return rows.length > 0 ? rows[0] : null;
}
function safeMilestoneRow(m) {
return {
id: m.id,
career_profile_id: m.career_profile_id,
title: m.title,
description: m.description,
date: m.date,
progress: m.progress,
status: m.status,
is_universal: m.is_universal ? 1 : 0,
origin_milestone_id: m.origin_milestone_id || null,
created_at: m.created_at,
updated_at: m.updated_at
};
}
function safeTaskRow(t) {
return {
id: t.id,
milestone_id: t.milestone_id,
title: t.title,
description: t.description,
due_date: t.due_date,
status: t.status,
created_at: t.created_at,
updated_at: t.updated_at
};
}
async function storeRiskAnalysisInDB({
socCode,
careerName,
jobDescription,
tasks,
riskLevel,
reasoning
}) {
const [existingRows] = await pool.query(
'SELECT * FROM ai_risk_analysis WHERE soc_code = ?',
[socCode]
);
const existing = existingRows[0];
const finalJobDesc = jobDescription?.trim() || existing?.job_description || '';
const finalTasks =
typeof tasks === 'string'
? tasks.trim()
: Array.isArray(tasks)
? tasks.map(t => (typeof t === 'string' ? t.trim() : '')).join('; ')
: existing?.tasks || '';
const finalCareerName = careerName || existing?.career_name || '';
const finalRiskLevel = riskLevel || existing?.risk_level || '';
const finalReasoning = reasoning || existing?.reasoning || '';
await pool.query(
`REPLACE INTO ai_risk_analysis (
soc_code,
career_name,
job_description,
tasks,
risk_level,
reasoning,
created_at
) VALUES (?, ?, ?, ?, ?, ?, CURRENT_TIMESTAMP)`,
[
socCode,
finalCareerName,
finalJobDesc,
finalTasks,
finalRiskLevel,
finalReasoning
]
);
}
const COOKIE_NAME = process.env.COOKIE_NAME || 'aptiva_session';
//Stripe webhook endpoint (raw body)
// 1) Raw body parser (must be before express.json)
app.post(
'/api/premium/stripe/webhook',
express.raw({ type: 'application/json' }),
async (req, res) => {
let event;
try {
event = stripe.webhooks.constructEvent(
req.body,
req.headers['stripe-signature'],
process.env.STRIPE_WH_SECRET
);
} catch (err) {
console.error('⚠️ Bad Stripe signature', err.message);
return res.status(400).end();
}
// Env guard: only handle events matching our env
const isProd = (process.env.ENV_NAME === 'prod');
if (Boolean(event.livemode) !== isProd) {
console.warn('[Stripe] Ignoring webhook due to livemode mismatch', { livemode: event.livemode, isProd });
return res.sendStatus(200);
}
const upFlags = async (customerId, premium, pro) => {
const h = hashForLookup(customerId);
await pool.query(
`UPDATE user_profile
SET is_premium = ?, is_pro_premium = ?
WHERE stripe_customer_id_hash = ?`,
[premium ? 1 : 0, pro ? 1 : 0, h]
);
};
// Recompute flags from Stripe (source of truth)
const recomputeFlagsFromStripe = async (customerId) => {
const subs = await stripe.subscriptions.list({
customer: customerId,
status: 'all',
limit: 100
});
// Consider only “active-like” states
const ACTIVE_STATES = new Set(['active']);
let hasPremium = false;
let hasPro = false;
// after computing hasPremium/hasPro
const activeCount = subs.data.filter(s => ACTIVE_STATES.has(s.status)).length;
if (activeCount > 1) {
console.warn('[Stripe] multiple active subs for customer', { customerId, activeCount });
}
for (const s of subs.data) {
if (!ACTIVE_STATES.has(s.status)) continue;
for (const item of s.items.data) {
const pid = item.price.id;
if (pid === process.env.STRIPE_PRICE_PRO_MONTH || pid === process.env.STRIPE_PRICE_PRO_YEAR) {
hasPro = true;
}
if (pid === process.env.STRIPE_PRICE_PREMIUM_MONTH || pid === process.env.STRIPE_PRICE_PREMIUM_YEAR) {
hasPremium = true;
}
}
}
// If any Pro sub exists, Pro wins; otherwise Premium if any premium exists
// Pro implies premium access; premium only if no pro
const finalIsPro = hasPro ? 1 : 0;
const finalIsPremium = hasPro ? 1 : (hasPremium ? 1 : 0);
await upFlags(customerId, finalIsPremium, finalIsPro);
};
switch (event.type) {
case 'customer.subscription.created':
case 'customer.subscription.updated':
case 'customer.subscription.deleted': {
const sub = event.data.object;
await recomputeFlagsFromStripe(sub.customer);
break;
}
case 'checkout.session.completed': {
// extra safety for some Stripe flows that rely on this event
const ses = event.data.object;
if (ses.customer) {
await recomputeFlagsFromStripe(ses.customer);
}
break;
}
default:
// Ignore everything else
}
res.sendStatus(200);
}
);
// 2) Basic middlewares
app.use(helmet({ contentSecurityPolicy:false, crossOriginEmbedderPolicy:false }));
//leave below Stripe webhook
app.use(express.json({ limit: '5mb' }));
// --- Twilio webhooks ---
const twilioForm = express.urlencoded({ extended: false });
app.post('/api/auth/sms/inbound', twilioForm, async (req, res) => {
const body = String(req.body?.Body || '').trim().toUpperCase();
if (body === 'HELP') {
const twiml = `AptivaAI: Help with SMS. Email support@aptivaai.com. Msg&Data rates may apply. Reply STOP to cancel.`;
return res.type('text/xml').send(twiml);
}
return res.type('text/xml').send(``);
});
app.post('/api/auth/sms/status', twilioForm, async (req, res) => {
try {
if (String(req.body?.ErrorCode || '') === '21610' && req.body?.To) {
await (pool.raw || pool).query('UPDATE user_profile SET sms_opt_in=0 WHERE phone_e164=?', [req.body.To]);
}
} catch (e) {
console.error('[sms/status]', e?.message || e);
}
res.sendStatus(204);
});
//*PremiumOnboarding draft
// GET current user's draft
app.get('/api/premium/onboarding/draft', authenticatePremiumUser, async (req, res) => {
const [[row]] = await pool.query(
`SELECT id, step, data
FROM onboarding_drafts
WHERE user_id=?
ORDER BY updated_at DESC, id DESC
LIMIT 1`,
[req.id]
);
return res.json(row || null);
});
// POST upsert draft (ID-agnostic, partial merge, 1 draft per user)
app.post('/api/premium/onboarding/draft', authenticatePremiumUser, async (req, res) => {
try {
// ---- 0) Harden req.body and incoming shapes (accept object *or* JSON string)
let body = {};
if (req && req.body != null) {
if (typeof req.body === 'object') {
body = req.body;
} else if (typeof req.body === 'string') {
try { body = JSON.parse(req.body); } catch { body = {}; }
}
}
let { id, step } = body;
// Accept either {data:{careerData/financialData/collegeData}} or section keys at top level
let incoming = {};
if (body.data != null) {
if (typeof body.data === 'string') {
try { incoming = JSON.parse(body.data); } catch { incoming = {}; }
} else if (typeof body.data === 'object') {
incoming = body.data;
}
}
// If callers provided sections directly (EducationalProgramsPage),
// lift them into the data envelope without crashing if body is blank.
['careerData','financialData','collegeData'].forEach(k => {
if (Object.prototype.hasOwnProperty.call(body, k)) {
if (!incoming || typeof incoming !== 'object') incoming = {};
incoming[k] = body[k];
}
});
// ---- 1) Base draft: by id (if provided) else latest for this user
let base = null;
if (id) {
const [[row]] = await pool.query(
`SELECT id, step, data FROM onboarding_drafts WHERE user_id=? AND id=? LIMIT 1`,
[req.id, id]
);
base = row || null;
} else {
const [[row]] = await pool.query(
`SELECT id, step, data
FROM onboarding_drafts
WHERE user_id=?
ORDER BY updated_at DESC, id DESC
LIMIT 1`,
[req.id]
);
base = row || null;
}
// ---- 2) Parse prior JSON safely
let prev = {};
if (base?.data != null) {
try {
if (typeof base.data === 'string') prev = JSON.parse(base.data);
else if (Buffer.isBuffer(base.data)) prev = JSON.parse(base.data.toString('utf8'));
else if (typeof base.data === 'object') prev = base.data;
} catch { prev = {}; }
}
// ---- 3) Section-wise shallow merge (prev + incoming)
const merged = mergeDraft(prev, (incoming && typeof incoming === 'object') ? incoming : {});
// ---- 3.5) Only reject when there's truly no incoming content AND no prior draft
const isPlainObj = (o) => o && typeof o === 'object' && !Array.isArray(o);
const isEmptyObj = (o) => isPlainObj(o) && Object.keys(o).length === 0;
const hasIncoming = isPlainObj(incoming) && !isEmptyObj(incoming);
const hasPrior = !!base && isPlainObj(prev) && !isEmptyObj(prev);
if (!hasIncoming && !hasPrior) {
return res.status(400).json({ error: 'empty_draft' });
}
// ---- 4) Final id/step and upsert
const draftId = base?.id || id || uuidv4();
const finalStep = Number.isInteger(step) ? step : (parseInt(step,10) || base?.step || 0);
console.log('[draft-upsert]', {
userId : req.id,
draftId : draftId,
step : finalStep,
incoming : Object.keys(incoming || {}).sort(),
mergedKeys: Object.keys(merged || {}).sort(),
});
await pool.query(
`INSERT INTO onboarding_drafts (user_id, id, step, data)
VALUES (?, ?, ?, ?)
ON DUPLICATE KEY UPDATE
step = VALUES(step),
data = VALUES(data),
updated_at = CURRENT_TIMESTAMP`,
[req.id, draftId, finalStep, JSON.stringify(merged)]
);
return res.json({ id: draftId, step: finalStep });
} catch (e) {
console.error('draft upsert failed:', e?.message || e);
return res.status(500).json({ error: 'draft_upsert_failed' });
}
});
// unchanged
function mergeDraft(a = {}, b = {}) {
const out = { ...a };
for (const k of Object.keys(b || {})) {
const left = a[k];
const right = b[k];
if (
left && typeof left === 'object' && !Array.isArray(left) &&
right && typeof right === 'object' && !Array.isArray(right)
) {
out[k] = { ...left, ...right };
} else {
out[k] = right;
}
}
return out;
}
// DELETE draft (after finishing / cancelling)
app.delete('/api/premium/onboarding/draft', authenticatePremiumUser, async (req, res) => {
await pool.query('DELETE FROM onboarding_drafts WHERE user_id=?', [req.id]);
res.json({ ok: true });
});
/* ─── Require critical env vars ─────────────────────────────── */
if (!process.env.CORS_ALLOWED_ORIGINS) {
console.error('FATAL CORS_ALLOWED_ORIGINS is not set');
process.exit(1);
}
/* ─── Allowed origins for CORS (comma-separated in env) ─────── */
const allowedOrigins = process.env.CORS_ALLOWED_ORIGINS
.split(',')
.map(o => o.trim())
.filter(Boolean);
/* ─── Strict CORS (exact matches only); allow no-Origin requests ───────── */
app.use((req, res, next) => {
const origin = req.headers.origin || '';
res.setHeader('Vary', 'Origin');
// No Origin header (same-origin, curl, server→server) ⇒ skip CORS
if (!origin) return next();
if (!allowedOrigins.includes(origin)) return res.status(403).end();
res.setHeader('Access-Control-Allow-Origin', origin);
res.setHeader('Access-Control-Allow-Credentials', 'true');
res.setHeader(
'Access-Control-Allow-Headers',
'Authorization, Content-Type, Accept, Origin, X-Requested-With, Access-Control-Allow-Methods'
);
res.setHeader('Access-Control-Allow-Methods', 'GET, POST, PUT, PATCH, DELETE, OPTIONS');
if (req.method === 'OPTIONS') return res.status(204).end();
next();
});
// 3) Authentication middleware
function authenticatePremiumUser(req, res, next) {
let token = (req.headers.authorization || '').replace(/^Bearer\s+/i, '').trim();
if (!token) token = req.cookies?.[COOKIE_NAME] || req.cookies?.token || '';
if (!token) return res.status(401).json({ error: 'Premium authorization required' });
try {
const { id } = jwt.verify(token, process.env.JWT_SECRET);
req.id = id;
next();
} catch {
return res.status(403).json({ error: 'Invalid or expired token' });
}
};
/** ------------------------------------------------------------------
* Returns the user’s Stripe customer‑id (decrypted) given req.id.
* If the user has no customer, it creates one, saves BOTH the
* encrypted id and its deterministic hash, then returns the id.
* ----------------------------------------------------------------- */
async function getOrCreateStripeCustomerId(req) {
/* 1 ── look up existing row (wrapped pool auto‑decrypts) */
const [[row]] = await pool.query(
`SELECT stripe_customer_id
FROM user_profile
WHERE id = ?`,
[req.id]
);
if (row?.stripe_customer_id) {
return row.stripe_customer_id; // already have it
}
/* 2 ── create customer in Stripe */
const customer = await stripe.customers.create({
metadata: { userId: String(req.id) }
});
/* 3 ── store encrypted id **and** deterministic hash */
const h = hashForLookup(customer.id);
await pool.query(
`UPDATE user_profile
SET stripe_customer_id = ?,
stripe_customer_id_hash = ?
WHERE id = ?`,
[customer.id, h, req.id]
);
return customer.id;
}
// ── Stripe: detect if customer already has an active (or pending) sub ─────────
async function customerHasActiveSub(customerId) {
// keep it small; we only need to know if ≥1 exists
const list = await stripe.subscriptions.list({
customer: customerId,
status: 'all',
limit: 5
});
return list.data.some(s => {
// treat cancel_at_period_end as still-active for gating
if (s.cancel_at_period_end) return true;
return ['active'].includes(s.status);
});
}
/* ------------------------------------------------------------------ */
const priceMap = {
premium: {
monthly: process.env.STRIPE_PRICE_PREMIUM_MONTH,
annual : process.env.STRIPE_PRICE_PREMIUM_YEAR
},
pro: {
monthly: process.env.STRIPE_PRICE_PRO_MONTH,
annual : process.env.STRIPE_PRICE_PRO_YEAR
}
};
app.get('/api/premium/subscription/status', authenticatePremiumUser, async (req, res) => {
try {
const [[row]] = await pool.query(
'SELECT is_premium, is_pro_premium FROM user_profile WHERE id = ?',
[req.id]
);
if (!row) return res.status(404).json({ error: 'User not found' });
return res.json({
is_premium : !!row.is_premium,
is_pro_premium : !!row.is_pro_premium
});
} catch (err) {
console.error('subscription/status error:', err);
return res.status(500).json({ error: 'DB error' });
}
});
/* ========================================================================
* applyOps – executes the “milestones” array inside a fenced ```ops block
* and returns an array of confirmation strings
* ===================================================================== */
async function applyOps(opsObj, req) {
if (!opsObj?.milestones || !Array.isArray(opsObj.milestones)) return [];
const confirmations = [];
// helper for authenticated fetches that keep headers
const auth = (path, opts = {}) => internalFetch(req, path, opts);
for (const m of opsObj.milestones) {
const { op } = m || {};
try {
/* ---------- DELETE ---------- */
const id = (m.id || '').trim();
if (op === "DELETE" && m.id) {
const cleanId = m.id.trim();
const res = await auth(`/premium/milestones/${cleanId}`, { method:"DELETE" });
if (res.ok) confirmations.push(`Deleted milestone ${cleanId}`);
}
/* ---------- UPDATE ---------- */
if (op === "UPDATE" && m.id && m.patch) {
const res = await auth(`/premium/milestones/${m.id}`, {
method : "PUT",
headers: { "Content-Type": "application/json" },
body : JSON.stringify(m.patch)
});
if (res.ok) confirmations.push(`Updated milestone ${m.id}`);
else console.warn("[applyOps] UPDATE failed", m.id, res.status);
}
/* ---------- CREATE ---------- */
if (op === "CREATE" && m.data) {
const res = await auth("/premium/milestone", {
method : "POST",
headers: { "Content-Type": "application/json" },
body : JSON.stringify(m.data)
});
if (res.ok) {
const json = await res.json();
const newId = Array.isArray(json) ? json[0]?.id : json.id;
confirmations.push(`Created milestone ${newId || "(new)"}`);
} else console.warn("[applyOps] CREATE failed", res.status);
}
} catch (err) {
console.error("[applyOps] Error handling op", m, err);
}
}
/* After any mutations, reload the milestone list so the UI refreshes */
if (confirmations.length) {
try {
await fetchMilestones(); // your existing helper
} catch (e) {
console.warn("Could not refresh milestones after ops", e);
}
}
return confirmations;
}
async function ensureDescriptionAndTasks({ socCode, jobDescription, tasks }) {
let desc = (jobDescription || '').trim();
let t = Array.isArray(tasks) ? tasks : (typeof tasks === 'string' ? [tasks] : []);
if (desc && t.length) return { jobDescription: desc, tasks: t };
try {
// hit server2 directly on the compose network
const r = await guardedFetch(`http://server2:5001/api/onet/career-description/${encodeURIComponent(socCode)}`, {
headers: { Accept: 'application/json' }
});
if (r.ok) {
const j = await r.json();
if (!desc && j?.description) desc = String(j.description).trim();
if (!t.length && Array.isArray(j?.tasks)) t = j.tasks.map(x => String(x));
}
} catch (e) {
// best effort — keep whatever we had
console.warn('[ai-risk] enrich fetch failed:', e?.message || e);
}
return { jobDescription: desc, tasks: t };
}
/* ------------------------------------------------------------------
CAREER PROFILE ENDPOINTS
------------------------------------------------------------------ */
// GET the latest selected career profile
app.get('/api/premium/career-profile/latest', authenticatePremiumUser, async (req, res) => {
try {
const [rows] = await pool.query(
`SELECT * FROM career_profiles
WHERE user_id = ?
ORDER BY start_date DESC
LIMIT 1`,
[req.id]
);
const row = rows[0] ? { ...rows[0] } : null;
if (row) delete row.user_id;
return res.json(row || {});
} catch (error) {
console.error('Error fetching latest career profile:', error);
res.status(500).json({ error: 'Failed to fetch latest career profile' });
}
});
// GET all career profiles for the user
app.get('/api/premium/career-profile/all', authenticatePremiumUser, async (req, res) => {
try {
const sql = `
SELECT
id,
scenario_title,
career_name,
status,
DATE_FORMAT(start_date, '%Y-%m-%d') AS start_date,
DATE_FORMAT(created_at, '%Y-%m-%d %H:%i:%s') AS created_at
FROM career_profiles
WHERE user_id = ?
ORDER BY start_date ASC
`;
const [rows] = await pool.query(sql, [req.id]);
res.json({ careerProfiles: rows });
} catch (error) {
console.error('Error fetching career profiles:', error);
res.status(500).json({ error: 'Failed to fetch career profiles' });
}
});
// GET a single career profile (scenario) by ID
app.get('/api/premium/career-profile/:careerProfileId', authenticatePremiumUser, async (req, res) => {
const { careerProfileId } = req.params;
try {
const sql = `
SELECT
*,
start_date AS start_date
FROM career_profiles
WHERE id = ?
AND user_id = ?
LIMIT 1
`;
const [rows] = await pool.query(sql, [careerProfileId, req.id]);
if (!rows[0]) {
return res.status(404).json({ error: 'Career profile not found or not yours.' });
}
const row = { ...rows[0] };
delete row.user_id; // do not ship user_id
return res.json(row);
} catch (error) {
console.error('Error fetching single career profile:', error);
res.status(500).json({ error: 'Failed to fetch career profile by ID.' });
}
});
// POST a new career profile (upsert)
app.post('/api/premium/career-profile', authenticatePremiumUser, async (req, res) => {
const {
scenario_title,
career_name,
status,
start_date,
college_enrollment_status,
currently_working,
career_goals,
retirement_start_date,
desired_retirement_income_monthly,
// planned fields
planned_monthly_expenses,
planned_monthly_debt_payments,
planned_monthly_retirement_contribution,
planned_monthly_emergency_contribution,
planned_surplus_emergency_pct,
planned_surplus_retirement_pct,
planned_additional_income
} = req.body;
if (!career_name) {
return res.status(400).json({ error: 'career_name is required.' });
}
try {
const finalId = req.body.id || uuidv4();
// 1) Insert includes career_goals
const sql = `
INSERT INTO career_profiles (
id,
user_id,
scenario_title,
career_name,
status,
start_date,
college_enrollment_status,
currently_working,
career_goals,
retirement_start_date,
desired_retirement_income_monthly,
planned_monthly_expenses,
planned_monthly_debt_payments,
planned_monthly_retirement_contribution,
planned_monthly_emergency_contribution,
planned_surplus_emergency_pct,
planned_surplus_retirement_pct,
planned_additional_income
)
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
ON DUPLICATE KEY UPDATE
status = VALUES(status),
start_date = VALUES(start_date),
college_enrollment_status = VALUES(college_enrollment_status),
currently_working = VALUES(currently_working),
career_goals = VALUES(career_goals),
retirement_start_date = VALUES(retirement_start_date),
desired_retirement_income_monthly = VALUES(desired_retirement_income_monthly),
planned_monthly_expenses = VALUES(planned_monthly_expenses),
planned_monthly_debt_payments = VALUES(planned_monthly_debt_payments),
planned_monthly_retirement_contribution = VALUES(planned_monthly_retirement_contribution),
planned_monthly_emergency_contribution = VALUES(planned_monthly_emergency_contribution),
planned_surplus_emergency_pct = VALUES(planned_surplus_emergency_pct),
planned_surplus_retirement_pct = VALUES(planned_surplus_retirement_pct),
planned_additional_income = VALUES(planned_additional_income),
updated_at = CURRENT_TIMESTAMP
`;
await pool.query(sql, [
finalId,
req.id,
scenario_title || null,
career_name,
status || 'planned',
start_date || null,
college_enrollment_status || null,
currently_working || null,
career_goals || null,
retirement_start_date || null,
desired_retirement_income_monthly || null,
planned_monthly_expenses ?? null,
planned_monthly_debt_payments ?? null,
planned_monthly_retirement_contribution ?? null,
planned_monthly_emergency_contribution ?? null,
planned_surplus_emergency_pct ?? null,
planned_surplus_retirement_pct ?? null,
planned_additional_income ?? null
]);
await pool.query(
`DELETE FROM context_cache
WHERE user_id = ? AND career_profile_id = ?`,
[req.id, finalId]
);
// re-fetch to confirm ID
const [rows] = await pool.query(
`SELECT id
FROM career_profiles
WHERE id = ?`,
[finalId]
);
return res.status(200).json({
message: 'Career profile upserted.',
career_profile_id: finalId
});
} catch (error) {
console.error('Error upserting career profile:', error);
res.status(500).json({ error: 'Failed to upsert career profile.' });
}
});
// server3.js (add near the other career-profile routes)
app.put('/api/premium/career-profile/:id/goals', authenticatePremiumUser, async (req, res) => {
const { id } = req.params;
const { career_goals } = req.body;
// simple ownership check
const [rows] = await pool.query('SELECT user_id FROM career_profiles WHERE id=?', [id]);
if (!rows[0] || rows[0].user_id !== req.id) {
return res.status(403).json({ error: 'Not your profile.' });
}
await pool.query('UPDATE career_profiles SET career_goals=? WHERE id=?', [career_goals, id]);
await pool.query(
"DELETE FROM context_cache WHERE user_id=? AND career_profile_id=?",
[req.id, id]
);
res.json({ career_goals });
});
// DELETE a career profile (scenario) by ID
app.delete('/api/premium/career-profile/:careerProfileId', authenticatePremiumUser, async (req, res) => {
const { careerProfileId } = req.params;
try {
// confirm ownership
const [rows] = await pool.query(`
SELECT id
FROM career_profiles
WHERE id = ?
AND user_id = ?
`, [careerProfileId, req.id]);
if (!rows[0]) {
return res.status(404).json({ error: 'Career profile not found or not yours.' });
}
// delete college_profiles
await pool.query(`
DELETE FROM college_profiles
WHERE user_id = ?
AND career_profile_id = ?
`, [req.id, careerProfileId]);
// delete scenario’s milestones + tasks + impacts
const [mils] = await pool.query(`
SELECT id
FROM milestones
WHERE user_id = ?
AND career_profile_id = ?
`, [req.id, careerProfileId]);
const milestoneIds = mils.map(m => m.id);
if (milestoneIds.length > 0) {
const placeholders = milestoneIds.map(() => '?').join(',');
// tasks
await pool.query(`
DELETE FROM tasks
WHERE milestone_id IN (${placeholders})
`, milestoneIds);
// impacts
await pool.query(`
DELETE FROM milestone_impacts
WHERE milestone_id IN (${placeholders})
`, milestoneIds);
// milestones
await pool.query(`
DELETE FROM milestones
WHERE id IN (${placeholders})
`, milestoneIds);
}
// delete the career_profiles row
await pool.query(`
DELETE FROM career_profiles
WHERE id = ?
AND user_id = ?
`, [careerProfileId, req.id]);
res.json({ message: 'Career profile and related data successfully deleted.' });
} catch (error) {
console.error('Error deleting career profile:', error);
res.status(500).json({ error: 'Failed to delete career profile.' });
}
});
app.post('/api/premium/ai/chat', authenticatePremiumUser, chatGate('coach'), async (req, res) => {
try {
const {
userProfile = {},
scenarioRow = {},
financialProfile = {},
collegeProfile = {},
chatHistory = [],
forceContext = false
} = req.body;
let existingTitles = [];
let miniGrid = "-none-"; // slim grid
try {
const [rows] = await pool.query(
`SELECT id, DATE_FORMAT(date,'%Y-%m-%d') AS d, title
FROM milestones
WHERE user_id = ? AND career_profile_id = ?`,
[req.id, scenarioRow.id]
);
existingTitles = rows.map(r => `${r.title.trim()}|${r.d}`);
if (rows.length) {
miniGrid = rows
.map(r => `${r.id}|${r.title.trim()}|${r.d}`)
.join("\n");
}
} catch (e) {
console.error("Could not fetch existing milestones ⇒", e);
}
// ------------------------------------------------
// 1. Helper Functions
// ------------------------------------------------
// A. Build a "where you are now" vs. "where you want to go" message
// with phrasing that works for plural career names.
function buildStatusSituationMessage(status, situation, careerName) {
// For example: careerName = "Blockchain Engineers"
const sStatus = (status || "").toLowerCase();
const sSituation = (situation || "").toLowerCase();
// Intro / "Now" part
let nowPart = "";
switch (sStatus) {
case "planned":
nowPart = `Hi! It sounds like you're looking ahead to potential opportunities in ${careerName}.`;
break;
case "current":
nowPart = `Hi! It looks like you're currently involved in ${careerName}.`;
break;
case "exploring":
nowPart = `Hi! You're exploring how ${careerName} might fit your plans.`;
break;
default:
nowPart = `Hi! I'm not fully sure about your current involvement with ${careerName}, but I'd love to learn more.`;
break;
}
// Next / "Where you're going" part
let nextPart = "";
switch (sSituation) {
case "planning":
nextPart = `You're aiming to clarify your strategy for moving into a role within ${careerName}.`;
break;
case "preparing":
nextPart = `You're actively developing the skills you need for future opportunities in ${careerName}.`;
break;
case "enhancing":
nextPart = `You'd like to deepen or broaden your responsibilities within ${careerName}.`;
break;
case "retirement":
nextPart = `You're considering how to transition toward retirement from ${careerName}.`;
break;
default:
nextPart = `I'm not entirely sure what your next steps might be regarding ${careerName}, but we'll figure it out together.`;
break;
}
const combinedDescription = `${nowPart} ${nextPart}`.trim();
// Friendly note - feel free to tweak the wording
const friendlyNote = `
Feel free to use AptivaAI however it best suits you—there’s no "wrong" answer.
It doesn’t matter so much where you've been; it's about where you want to go from here.
We can refine details any time or jump straight to what you’re most eager to explore right now.
If you complete the Interest Inventory, I’ll be able to offer more targeted suggestions based on your interests.
I'm here to support you with personalized coaching—what would you like to focus on next?
`.trim();
return `${combinedDescription}\n\n${friendlyNote}`;
}
// B. Build a user summary that references all available info (unchanged from your code)
function buildUserSummary({
userProfile = {},
scenarioRow = {},
financialProfile = {},
collegeProfile = {},
aiRisk = null,
salaryAnalysis = null,
economicProjections = null
}) {
const _userProfile = userProfile || {};
const _scenarioRow = scenarioRow || {};
const _financialProfile = financialProfile || {};
const _collegeProfile = collegeProfile || {};
// 1) USER PROFILE
const firstName = userProfile.firstname || "N/A";
const lastName = userProfile.lastname || "N/A";
const fullName = `${firstName} ${lastName}`;
const username = _userProfile.username || "N/A";
const location = _userProfile.area || _userProfile.state || "Unknown Region";
const careerSituation = _userProfile.career_situation || "Not provided";
// RIASEC
let riasecText = "None";
if (userProfile.riasec_scores) {
try {
const rScores = JSON.parse(userProfile.riasec_scores);
// { "R":23,"I":25,"A":23,"S":16,"E":15,"C":22 }
riasecText = `
(R) Realistic: ${rScores.R}
(I) Investigative: ${rScores.I}
(A) Artistic: ${rScores.A}
(S) Social: ${rScores.S}
(E) Enterprising: ${rScores.E}
(C) Conventional: ${rScores.C}
`.trim();
} catch(e) {
console.error("Error parsing RIASEC JSON =>", e);
}
}
// Possibly parse "career_priorities" if you need them
let careerPriorities = "Not provided";
if (_userProfile.career_priorities) {
// e.g. "career_priorities": "{\"interests\":\"Somewhat important\",\"meaning\":\"Somewhat important\",\"stability\":\"Very important\", ...}"
try {
const cP = JSON.parse(_userProfile.career_priorities);
// Build a bullet string
careerPriorities = Object.entries(cP).map(([k,v]) => `- ${k}: ${v}`).join("\n");
} catch(e) {
console.error("Error parsing career_priorities =>", e);
}
}
// 2) CAREER SCENARIO
// scenarioRow might have career_name, job_description, tasks
// but you said sometimes you store them in scenarioRow or pass them in a separate param
const careerName = _scenarioRow.career_name || "No career selected";
const socCode = _scenarioRow.soc_code || "N/A";
const jobDescription = _scenarioRow.job_description || "No jobDescription info";
const tasksList = Array.isArray(_scenarioRow.tasks) && _scenarioRow.tasks.length
? _scenarioRow.tasks.join(", ")
: "No tasks info";
// 3) FINANCIAL PROFILE
// your actual JSON uses e.g. "current_salary", "additional_income"
const currentSalary = _financialProfile.current_salary || 0;
const additionalIncome = _financialProfile.additional_income || 0;
const monthlyExpenses = _financialProfile.monthly_expenses || 0;
const monthlyDebt = _financialProfile.monthly_debt_payments || 0;
const retirementSavings = _financialProfile.retirement_savings || 0;
const emergencyFund = _financialProfile.emergency_fund || 0;
// 4) COLLEGE PROFILE
// from your JSON:
const selectedProgram = _collegeProfile?.selected_program ?? "N/A";
const enrollmentStatus = _collegeProfile?.college_enrollment_status ?? "Not enrolled";
const creditHoursCompleted = parseFloat(_collegeProfile?.hours_completed ?? 0) || 0;
const programLength = parseFloat(_collegeProfile?.program_length ?? 0) || 0;
const expectedGraduation = _collegeProfile?.expected_graduation ?? "Unknown";
// 5) AI RISK
// from aiRisk object
let riskText = "No AI risk info provided.";
if (aiRisk?.riskLevel) {
riskText = `Risk Level: ${aiRisk.riskLevel}
Reasoning: ${aiRisk.reasoning}`;
}
// 6) SALARY ANALYSIS
// e.g. { "regional": { ... }, "national": { ... } }
let salaryText = "No salary analysis provided.";
if (salaryAnalysis && salaryAnalysis.regional && salaryAnalysis.national) {
salaryText = `
[Regional Salary Range]
10th Percentile: $${salaryAnalysis.regional.regional_PCT10}
25th Percentile: $${salaryAnalysis.regional.regional_PCT25}
Median: $${salaryAnalysis.regional.regional_MEDIAN}
75th: $${salaryAnalysis.regional.regional_PCT75}
90th: $${salaryAnalysis.regional.regional_PCT90}
[National Salary Range]
10th Percentile: $${salaryAnalysis.national.national_PCT10}
25th Percentile: $${salaryAnalysis.national.national_PCT25}
Median: $${salaryAnalysis.national.national_MEDIAN}
75th: $${salaryAnalysis.national.national_PCT75}
90th: $${salaryAnalysis.national.national_PCT90}
`.trim();
}
// 7) ECONOMIC PROJECTIONS
// e.g. { "state": { ... }, "national": { ... } }
let econText = "No economic projections provided.";
if (economicProjections?.state && economicProjections.national) {
econText = `
[State Projections]
Area: ${economicProjections.state.area}
Base Year: ${economicProjections.state.baseYear}
Base Employment: ${economicProjections.state.base}
Projected Year: ${economicProjections.state.projectedYear}
Projected Employment: ${economicProjections.state.projection}
Change: ${economicProjections.state.change}
Percent Change: ${economicProjections.state.percentChange}%
Annual Openings: ${economicProjections.state.annualOpenings}
Occupation: ${economicProjections.state.occupationName}
[National Projections]
Area: ${economicProjections.national.area}
Base Year: ${economicProjections.national.baseYear}
Base Employment: ${economicProjections.national.base}
Projected Year: ${economicProjections.national.projectedYear}
Projected Employment: ${economicProjections.national.projection}
Change: ${economicProjections.national.change}
Percent Change: ${economicProjections.national.percentChange}%
Annual Openings: ${economicProjections.national.annualOpenings}
Occupation: ${economicProjections.national.occupationName}
`.trim();
}
// 8) BUILD THE FINAL TEXT
return `
[USER PROFILE]
- Full Name: ${fullName}
- Username: ${username}
- Location: ${location}
- Career Situation: ${careerSituation}
- RIASEC:
${riasecText}
Career Priorities:
${careerPriorities}
[TARGET CAREER]
- Career Name: ${careerName} (SOC: ${socCode})
- Job Description: ${jobDescription}
- Typical Tasks: ${tasksList}
[FINANCIAL PROFILE]
- Current Salary: $${currentSalary}
- Additional Income: $${additionalIncome}
- Monthly Expenses: $${monthlyExpenses}
- Monthly Debt: $${monthlyDebt}
- Retirement Savings: $${retirementSavings}
- Emergency Fund: $${emergencyFund}
[COLLEGE / EDUCATION]
- Program: ${selectedProgram} (Status: ${enrollmentStatus})
- Credits Completed: ${creditHoursCompleted}
- Program Length: ${programLength}
- Expected Graduation: ${expectedGraduation}
[AI RISK ANALYSIS]
${riskText}
[SALARY ANALYSIS]
${salaryText}
[ECONOMIC PROJECTIONS]
${econText}
`.trim();
}
// (No changes to your environment configs)
// ------------------------------------------------
// 2. AI Risk Fetch
// ------------------------------------------------
const apiBase = process.env.APTIVA_INTERNAL_API || "http://localhost:5002/api";
let aiRisk = null;
try {
const aiRiskRes = await auth(
req,
'/premium/ai-risk-analysis',
{
method: "POST",
body: JSON.stringify({
socCode: scenarioRow?.soc_code,
careerName: scenarioRow?.career_name,
jobDescription: scenarioRow?.job_description,
tasks: scenarioRow?.tasks || []
})
}
);
if (aiRiskRes.ok) {
aiRisk = await aiRiskRes.json();
} else {
console.warn("AI risk fetch failed with status:", aiRiskRes.status);
}
} catch (err) {
console.error("Error fetching AI risk analysis:", err);
}
// ------------------------------------------------
// 3. Build Status + Situation text
// ------------------------------------------------
const { status: userStatus } = scenarioRow;
const { career_situation: userSituation } = userProfile;
const careerName = scenarioRow?.career_name || "this career";
/* How many past exchanges to keep */
const MAX_CHAT_TURNS = 6;
const combinedStatusSituation = buildStatusSituationMessage(
userStatus,
userSituation,
careerName
);
// 4. Build / fetch the cached summary (auto-rebuild if missing)
let summaryText = buildUserSummary({
userId : req.id,
scenarioRow,
userProfile,
financialProfile,
collegeProfile: collegeProfile || {},
aiRisk
});
summaryText = await cacheSummary(req.id, scenarioRow.id, summaryText);
// ------------------------------------------------
// 5. Construct System-Level Prompts
// ------------------------------------------------
const systemPromptIntro = `
You are **Jess**, a professional career coach inside AptivaAI.
Your mandate: turn the user’s real data into clear, empathetic, *actionable* guidance.
────────────────────────────────────────────────────────
What Jess can do directly in Aptiva
────────────────────────────────────────────────────────
• **Create** new milestones (with tasks & financial impacts)
• **Update** any field on an existing milestone
• **Delete** milestones that are no longer relevant
• **Add / edit / remove** tasks inside a milestone
• Run salary benchmarks, AI-risk checks, and financial projections
────────────────────────────────────────────────────────
🔹 Milestone-Specificity Directive (do not remove) 🔹
Focus on providing detailed, actionable milestones with exact resources, courses, or events tailored to the user's interests and career goals. Avoid generic suggestions and aim for specifics that guide the user on what to do next.
────────────────────────────────────────────────────────
────────────────────────────────────────────────────────
Mission & Tone
────────────────────────────────────────────────────────
Our mission is to help people grow *with* AI rather than be displaced by it.
Speak in a warm, encouraging tone, but prioritize *specific next steps* over generic motivation.
Validate ambitions, break big goals into realistic milestones, and show how AI can be a collaborator.
Finish every reply with **one concrete suggestion or question** that moves the plan forward.
Never ask for info you already have unless you truly need clarification.
`.trim();
const systemPromptOpsCheatSheet = `
────────────────────────────────────────────────────────
🛠 APTIVA OPS YOU CAN USE ANY TIME
────────────────────────────────────────────────────────
1. **CREATE** a milestone (optionally with tasks + impacts)
2. **UPDATE** any field on an existing milestone
3. **DELETE** a milestone *in the current scenario only*
4. **DELETEALL** a milestone *from EVERY scenario*
5. **COPY** an existing milestone to one or more other scenarios
6. **CREATE** a task inside a milestone
7. **UPDATE** a task
8. **DELETE** a task
9. **CREATE** an impact on a milestone
10. **UPDATE** an impact
11. **DELETE** an impact
12. **CREATE** a new career *scenario* (career-profile row)
13. **UPDATE** any field on an existing scenario
14. **DELETE** a scenario
15. **CLONE** a scenario (duplicate it, then optionally override fields)
16. **UPSERT** the college profile for the current scenario
• Automatically creates the row if none exists, or updates it if it does.
────────────────────────────────────────────────────────
WHEN you perform an op:
• Write ONE short confirmation line for the user
(e.g. “✅ Deleted the July 2025 milestone.”).
• THEN add a fenced \`\`\`ops\`\`\` JSON block on a new line.
• Put **no other text after** that block.
If you are **not** performing an op, skip the block entirely.
────────────────────────────────────────────────────────
_tagged_ \`\`\`ops\`\`\` exactly like this:
\`\`\`ops
{
"milestones":[
{ "op":"DELETE", "id":"1234-uuid" },
{ "op":"UPDATE", "id":"5678-uuid",
"patch":{ "date":"2026-02-01", "title":"New title" } },
{ "op":"CREATE",
"data":{
"title":"Finish AWS Solutions Architect cert",
"type":"Career",
"date":"2026-06-01",
"description":"Study + exam",
"tasks":[
{ "title":"Book exam", "due_date":"2026-03-15" }
],
"impacts":[
{ "impact_type":"cost", "direction":"subtract",
"amount":350, "start_date":"2026-03-15" }
]
}
}
]
}
\`\`\`
⚠️ When you DELETE or UPDATE, the "id" **must be the UUID from column 1 of the grid**—never the title.
⚠️ Whenever you *say* you changed a milestone, include the ops block.
⚠️ Omitting the block means **no changes will be executed**.
`.trim();
const systemPromptStatusSituation = `
[CURRENT AND NEXT STEP OVERVIEW]
${combinedStatusSituation}
`.trim();
const systemPromptDetailedContext = `
[DETAILED USER PROFILE & CONTEXT]
${summaryText}
`.trim();
const dynMilestonePrompt = `
[CURRENT MILESTONES]
Use **exactly** the UUID at the start of each line when you refer to a milestone
(you can DELETE, UPDATE, or COPY any of them).
(id | title | date )
${miniGrid}
You may UPDATE or DELETE any of these.
`.trim();
const systemPromptMilestoneFormat = `
WHEN the user wants a plan with milestones, tasks, and financial impacts:
RESPOND ONLY with valid JSON in this shape:
{
"milestones": [
{
"title": "string",
"date": "YYYY-MM-DD",
"description": "1 or 2 sentences",
"impacts": [
{
"impact_type": "cost" or "salary" or ...,
"direction": "add" or "subtract",
"amount": 100.00,
"start_date": "YYYY-MM-DD" (optional),
"end_date": "YYYY-MM-DD" (optional)
}
],
"tasks": [
{
"title": "string",
"description": "string",
"due_date": "YYYY-MM-DD"
}
]
},
...
]
}
* ── QUALITY RULES (hard) ────────────────────────────────
• Do **NOT** create a milestone if its title already exists (case-insensitive).
✓ Every milestone must cite at least ONE concrete datum taken
verbatim from the context blocks above AND
✓ must include a clearly-named real-world noun
(company, organisation, conference, certificate, platform, city…).
BAD » “Attend a networking event”
GOOD » “Attend IEEE Atlanta Nanotechnology Meetup”
If you can’t meet the rule, ASK the user a clarifying question instead
of returning an invalid milestone.
NO extra text or disclaimers if returning a plan—only that JSON.
Otherwise, answer normally.
`.trim();
/* ─── date guard ─────────────────────────────────────────────── */
const todayISO = new Date().toISOString().slice(0, 10);
const systemPromptDateGuard = `
────────────────────────────────────────────────────────
📅 DATE GUARD
────────────────────────────────────────────────────────
Every milestone “date” must be **on or after** ${todayISO}.
If you’re asked for short-term dates, they still must be ≥ ${todayISO}.
Reject or re-ask if the user insists on a past date.
`.trim();
const avoidBlock = existingTitles.length
? "\nAVOID any milestone whose title matches REGEXP /" +
existingTitles.map(t => `(?:${t.split("|")[0].replace(/[.*+?^${}()|[\]\\]/g,"\\$&")})`)
.join("|") + "/i"
: "";
const recentHistory = chatHistory.slice(-MAX_CHAT_TURNS);
const firstTurn = chatHistory.length === 0;
const STATIC_SYSTEM_CARD = `
${systemPromptIntro}
${systemPromptOpsCheatSheet}
/* Milestone JSON spec, date guard, and avoid-list */
${systemPromptMilestoneFormat}
${systemPromptDateGuard}
${avoidBlock}
`.trim();
const NEEDS_OPS_CARD = !chatHistory.some(
m => m.role === "system" && m.content.includes("APTIVA OPS CHEAT-SHEET")
);
const NEEDS_CTX_CARD = !chatHistory.some(
m => m.role === "system" && m.content.startsWith("[DETAILED USER PROFILE]")
);
const SEND_CTX_CARD = forceContext || NEEDS_CTX_CARD;
const messagesToSend = [];
// ① Large, unchanging card – once per conversation
if (NEEDS_OPS_CARD) {
messagesToSend.push({ role: "system", content: STATIC_SYSTEM_CARD });
}
if (NEEDS_CTX_CARD || SEND_CTX_CARD)
messagesToSend.push({ role:"system", content: summaryText });
// ② Per-turn contextual helpers (small!)
messagesToSend.push(
{ role: "system", content: systemPromptStatusSituation },
{ role: "system", content: dynMilestonePrompt } // <-- grid replaces two old lines
);
// ③ Recent conversational context
messagesToSend.push(...chatHistory.slice(-MAX_CHAT_TURNS));
// ------------------------------------------------
// 6. Call GPT (unchanged)
// ------------------------------------------------
const openai = new OpenAI({ apiKey: process.env.OPENAI_API_KEY });
const completion = await openai.chat.completions.create({
model: "gpt-4o-mini",
messages: messagesToSend,
temperature: 0.3,
max_tokens: 1000
});
// 4) Grab the response text
const rawReply = completion?.choices?.[0]?.message?.content?.trim() || "";
console.log("[GPT raw]", rawReply); // ← TEMP-LOG
if (!rawReply) {
return res.json({
reply: "Sorry, I didn't get a response. Could you please try again?"
});
}
/* 🔹 NEW: detect fenced ```ops``` JSON */
let opsConfirmations = [];
const opsMatch = rawReply.match(/```ops\s*([\s\S]*?)```/i);
if (opsMatch) {
try {
const opsObj = JSON.parse(opsMatch[1]);
opsConfirmations = await applyOps(opsObj, req);
} catch (e) {
console.error("Could not parse ops JSON:", e);
}
}
/* 🔹 Strip the ops block from what the user sees */
let visibleReply = rawReply.replace(/```ops[\s\S]*?```/i, "").trim();
if (!visibleReply) visibleReply = "Done!";
/* If we executed any ops, append a quick summary */
if (opsConfirmations.length) {
visibleReply +=
"\n\n" +
opsConfirmations.map(t => "• " + t).join("\n");
await pool.query(
"DELETE FROM context_cache WHERE user_id=? AND career_profile_id=?",
[req.id, scenarioRow.id]
);
}
// 5) Default: Just return raw text to front-end
let replyToClient = visibleReply;
let createdMilestonesData = [];
// ── NEW: pull out the first JSON object/array even if text precedes it ──
const firstBrace = rawReply.search(/[{\[]/); // first “{” or “[”
const lastBrace = rawReply.lastIndexOf("}");
const lastBracket = rawReply.lastIndexOf("]");
const lastJsonEdge = Math.max(lastBrace, lastBracket);
let embeddedJson = null;
if (firstBrace !== -1 && lastJsonEdge > firstBrace) {
embeddedJson = rawReply.slice(firstBrace, lastJsonEdge + 1).trim();
}
// … then change the existing check:
if (embeddedJson) { // <── instead of startsWith("{")…
try {
const planObj = JSON.parse(embeddedJson);
// The AI plan is expected to have: planObj.milestones[]
if (planObj && Array.isArray(planObj.milestones)) {
for (const milestone of planObj.milestones) {
const dupKey = `${(milestone.title || "").trim()}|${milestone.date}`;
if (existingTitles.includes(dupKey)) {
console.log("Skipping duplicate milestone:", dupKey);
continue; // do NOT insert
}
// Create the milestone
const milestoneBody = {
title: milestone.title,
description: milestone.description || "",
date: milestone.date,
career_profile_id: scenarioRow.id, // or scenarioRow.career_profile_id
status: "planned",
progress: 0,
is_universal: false
};
// Call your existing milestone endpoint
const msRes = await auth(req, '/premium/milestone', {
method: "POST",
headers: { "Content-Type": "application/json" },
body: JSON.stringify(milestoneBody)
});
const createdMs = await msRes.json();
// Figure out the new milestone ID
let newMilestoneId = null;
if (Array.isArray(createdMs) && createdMs[0]) {
newMilestoneId = createdMs[0].id;
} else if (createdMs.id) {
newMilestoneId = createdMs.id;
}
// If we have a milestoneId, create tasks & impacts
if (newMilestoneId) {
/* ---------- TASKS ---------- */
if (Array.isArray(milestone.tasks)) {
for (const t of milestone.tasks) {
// tolerate plain-string tasks → convert to minimal object
const taskObj =
typeof t === "string"
? { title: t, description: "", due_date: null }
: t;
if (!taskObj.title) continue; // skip invalid
const taskBody = {
milestone_id: newMilestoneId,
title: taskObj.title,
description: taskObj.description || "",
due_date: taskObj.due_date || null
};
await auth(req, '/premium/tasks', {
method: "POST",
headers: { "Content-Type": "application/json" },
body: JSON.stringify(taskBody)
});
}
}
/* ---------- IMPACTS ---------- */
if (Array.isArray(milestone.impacts)) {
for (const imp of milestone.impacts) {
// tolerate plain-string impacts
const impObj =
typeof imp === "string"
? {
impact_type: "note",
direction: "add",
amount: 0,
start_date: null,
end_date: null
}
: imp;
if (!impObj.impact_type) continue; // skip invalid
const impactBody = {
milestone_id: newMilestoneId,
impact_type: impObj.impact_type,
direction: impObj.direction,
amount: impObj.amount,
start_date: impObj.start_date || null,
end_date: impObj.end_date || null
};
await auth(req, '/premium/milestone-impacts', {
method: "POST",
headers: { "Content-Type": "application/json" },
body: JSON.stringify(impactBody)
});
}
}
/* ---------- Track the new milestone ---------- */
createdMilestonesData.push({
milestoneId: newMilestoneId,
title: milestone.title
});
}
}
// If we successfully created at least 1 milestone,
// override the reply with a success message
if (createdMilestonesData.length > 0) {
replyToClient = `
I've created ${createdMilestonesData.length} milestones (with tasks & impacts) for you in this scenario.
Check your Milestones tab. Let me know if you want any changes!
`.trim();
}
}
} catch (parseErr) {
console.error("Error parsing AI JSON =>", parseErr);
// We'll just keep the raw AI text if parsing fails
}
}
// 6) Finally, respond to front-end
return res.json({
reply: replyToClient,
createdMilestones: createdMilestonesData
});
} catch (err) {
console.error("Error in /api/premium/ai/chat =>", err);
return res.status(500).json({ error: "Failed to process AI chat." });
}
});
/* ──────────────────────────────────────────────
RETIREMENT AI-CHAT ENDPOINT (clone + patch)
─────────────────────────────────────────── */
app.post('/api/premium/retirement/aichat', authenticatePremiumUser, chatGate('retire'), async (req, res) => {
try {
/* 0️⃣ pull + sanity-check inputs */
const {
prompt = '',
scenario_id = '',
chatHistory = []
} = req.body || {};
if (!prompt.trim()) return res.status(400).json({ error: 'Prompt is required.' });
if (!scenario_id) return res.status(400).json({ error: 'scenario_id is required.' });
/* 1️⃣ ownership guard */
const [[scenario]] = await pool.query(
'SELECT * FROM career_profiles WHERE id = ? AND user_id = ?',
[scenario_id, req.id]
);
if (!scenario) return res.status(404).json({ error: 'Scenario not found.' });
/* 2️⃣ locate *text* of the last user turn */
let userMsgStr = prompt.trim();
if (Array.isArray(chatHistory)) {
for (let i = chatHistory.length - 1; i >= 0; i--) {
const m = chatHistory[i];
if (m?.role === 'user' && typeof m.content === 'string') {
userMsgStr = m.content;
break;
}
}
}
/* helper ⇒ force every .content to a plain string */
const toStr = v =>
v === null || v === undefined
? ''
: typeof v === 'string'
? v
: JSON.stringify(v);
const sanitizedHistory =
(Array.isArray(chatHistory) ? chatHistory : [])
.map(({ role = 'user', content = '' }) => ({ role, content: toStr(content) }));
/* 3️⃣ system instructions */
const systemMsg = `
You are AptivaAI's retirement-planning coach.
Rules:
• Educational guidance only — **NO** personalised investment advice.
• Never recommend specific securities or products.
• Friendly tone; ≤ 180 words.
If you need to change the plan, append ONE of:
• PATCH the current scenario
\`\`\`json
{ "annual_spend": 42000, "roi_real": 0.045 }
\`\`\`
• CLONE the current scenario, then override fields
\`\`\`json
{
"cloneScenario": {
"sourceId": "${scenario_id}",
"overrides": { "retirement_start_date": "2050-01-01", "annual_spend": 38000 }
}
}
\`\`\`
If nothing changes, return \`{"noop":true}\`.
Always end with: “AptivaAI is an educational tool – not advice.”
`.trim();
/* 4️⃣ call OpenAI */
const openai = new OpenAI({ apiKey: process.env.OPENAI_API_KEY });
const chatRes = await openai.chat.completions.create({
model : 'gpt-4o-mini',
temperature : 0.6,
max_tokens : 600,
messages : [
{ role: 'system', content: systemMsg },
...sanitizedHistory,
{ role: 'user', content: userMsgStr }
]
});
const raw = (chatRes.choices?.[0]?.message?.content || '').trim();
res.set({
'X-OpenAI-Prompt-Tokens' : chatRes.usage?.prompt_tokens ?? 0,
'X-OpenAI-Completion-Tokens': chatRes.usage?.completion_tokens ?? 0
});
/* 5️⃣ extract JSON payload (patch OR cloneScenario) */
let visibleReply = raw;
let payloadObj = null;
// A. fenced ```json``` block
console.log('[GPT raw]', raw);
let match = raw.match(/```json\s*([\s\S]+?)```/i);
// B. or a “loose” top-level {...} / [...]
if (!match) {
const start = raw.search(/[{\[]/);
if (start !== -1) {
const end = Math.max(raw.lastIndexOf('}'), raw.lastIndexOf(']'));
if (end > start) match = [ , raw.slice(start, end + 1) ];
}
}
if (match) {
try { payloadObj = JSON.parse(match[1]); } catch {/* bad JSON ⇒ ignore */}
visibleReply = raw.replace(match[0] || match[1], '').trim();
}
/* ignore noop / empty */
const realKeys = payloadObj ? Object.keys(payloadObj).filter(k => k !== 'noop') : [];
if (!realKeys.length) payloadObj = null;
/* 6️⃣ persist changes */
const apiBase = process.env.APTIVA_INTERNAL_API || 'http://localhost:5002/api';
if (payloadObj?.cloneScenario) {
/* ------ CLONE ------ */
await auth(req, '/premium/career-profile/clone', {
method: 'POST',
body : JSON.stringify(payloadObj.cloneScenario),
headers: { 'Content-Type': 'application/json' }
});
visibleReply = visibleReply || 'I cloned your scenario and applied the new settings.';
} else if (payloadObj) {
/* ------ PATCH ------ */
const fields = Object.keys(payloadObj);
const setters = fields.map(f => `${f} = ?`).join(', ');
const values = fields.map(f => payloadObj[f]);
await pool.query(
`UPDATE career_profiles
SET ${setters},
updated_at = CURRENT_TIMESTAMP
WHERE id = ? AND user_id = ?`,
[...values, scenario_id, req.id]
);
/* sync retirement milestone if needed */
if (payloadObj.retirement_start_date) {
await pool.query(
`UPDATE milestones
SET date = ?, updated_at = CURRENT_TIMESTAMP
WHERE career_profile_id = ?
AND user_id = ?
AND LOWER(title) LIKE 'retirement%'`,
[payloadObj.retirement_start_date, scenario_id, req.id]
);
}
}
/* 7️⃣ send to client */
return res.json({
reply: visibleReply || 'Sorry, no response – please try again.',
...(payloadObj ? { scenarioPatch: payloadObj } : {})
});
} catch (err) {
console.error('retirement/aichat error:', err);
return res.status(500).json({ error: 'Internal error – please try again later.' });
}
}
);
/* ------------- Retirement chat threads ------------- */
/* CREATE a Retirement thread */
app.post('/api/premium/retire/chat/threads', authenticatePremiumUser, async (req, res) => {
const id = uuid();
const title = (req.body?.title || 'Retirement chat').slice(0, 200);
await pool.query(
'INSERT INTO ai_chat_threads (id,user_id,bot_type,title) VALUES (?,?, "retire", ?)',
[id, req.id, title]
);
res.json({ id, title });
});
/* LIST Retirement threads */
app.get('/api/premium/retire/chat/threads', authenticatePremiumUser, async (req, res) => {
const [rows] = await pool.query(
'SELECT id,title,updated_at FROM ai_chat_threads WHERE user_id=? AND bot_type="retire" ORDER BY updated_at DESC LIMIT 50',
[req.id]
);
res.json({ threads: rows });
});
/* GET one Retirement thread + messages */
app.get('/api/premium/retire/chat/threads/:id', authenticatePremiumUser, async (req, res) => {
const { id } = req.params;
const [[t]] = await pool.query(
'SELECT id FROM ai_chat_threads WHERE id=? AND user_id=? AND bot_type="retire"',
[id, req.id]
);
if (!t) return res.status(404).json({ error: 'not_found' });
const [msgs] = await pool.query(
'SELECT role,content,created_at FROM ai_chat_messages WHERE thread_id=? ORDER BY id ASC LIMIT 200',
[id]
);
res.json({ messages: msgs });
});
/* POST a message (auto-create thread if missing) */
app.post('/api/premium/retire/chat/threads/:id/messages', authenticatePremiumUser, async (req, res) => {
const { id } = req.params;
const { content = '', context = {} } = req.body || {};
if (!content.trim()) return res.status(400).json({ error: 'empty' });
// ensure thread exists (auto-create if missing)
const [[t]] = await pool.query(
'SELECT id FROM ai_chat_threads WHERE id=? AND user_id=? AND bot_type="retire"',
[id, req.id]
);
if (!t) {
await pool.query(
'INSERT INTO ai_chat_threads (id,user_id,bot_type,title) VALUES (?,?, "retire", ?)',
[id, req.id, 'Retirement chat']
);
}
// save user msg
await pool.query(
'INSERT INTO ai_chat_messages (thread_id,user_id,role,content) VALUES (?,?, "user", ?)',
[id, req.id, content]
);
// history (≤40)
const [history] = await pool.query(
'SELECT role,content FROM ai_chat_messages WHERE thread_id=? ORDER BY id ASC LIMIT 40',
[id]
);
// call AI
const resp = await internalFetch(req, '/premium/retirement/aichat', {
method : 'POST',
headers: { 'Content-Type':'application/json' },
body : JSON.stringify({ prompt: content, scenario_id: context?.scenario_id, chatHistory: history })
});
let reply = 'Sorry, please try again.';
if (resp.ok) {
const json = await resp.json();
reply = (json?.reply || '').trim() || reply;
// save AI reply
await pool.query(
'INSERT INTO ai_chat_messages (thread_id,user_id,role,content) VALUES (?,?, "assistant", ?)',
[id, req.id, reply]
);
await pool.query('UPDATE ai_chat_threads SET updated_at=CURRENT_TIMESTAMP WHERE id=?', [id]);
return res.json(json); // keep scenarioPatch passthrough
} else {
return res.status(502).json({ error: 'upstream_failed' });
}
});
/* ------------------ Coach chat threads ------------------ */
/* CREATE a Coach thread */
app.post('/api/premium/coach/chat/threads', authenticatePremiumUser, async (req, res) => {
const id = uuid();
const title = (req.body?.title || 'CareerCoach chat').slice(0, 200);
await pool.query(
'INSERT INTO ai_chat_threads (id,user_id,bot_type,title) VALUES (?,?, "coach", ?)',
[id, req.id, title]
);
res.json({ id, title });
});
/* LIST Coach threads */
app.get('/api/premium/coach/chat/threads', authenticatePremiumUser, async (req, res) => {
const [rows] = await pool.query(
'SELECT id,title,updated_at FROM ai_chat_threads WHERE user_id=? AND bot_type="coach" ORDER BY updated_at DESC LIMIT 50',
[req.id]
);
res.json({ threads: rows });
});
/* GET one Coach thread + messages */
app.get('/api/premium/coach/chat/threads/:id', authenticatePremiumUser, async (req, res) => {
const { id } = req.params;
const [[t]] = await pool.query(
'SELECT id FROM ai_chat_threads WHERE id=? AND user_id=? AND bot_type="coach"',
[id, req.id]
);
if (!t) return res.status(404).json({ error: 'not_found' });
const [msgs] = await pool.query(
'SELECT role,content,created_at FROM ai_chat_messages WHERE thread_id=? ORDER BY id ASC LIMIT 200',
[id]
);
res.json({ messages: msgs });
});
/* POST a message (auto-create thread if missing) */
app.post('/api/premium/coach/chat/threads/:id/messages', authenticatePremiumUser, async (req, res) => {
const { id } = req.params;
const { content = '', context = {} } = req.body || {};
if (!content.trim()) return res.status(400).json({ error: 'empty' });
// ensure thread exists (auto-create if missing)
const [[t]] = await pool.query(
'SELECT id FROM ai_chat_threads WHERE id=? AND user_id=? AND bot_type="coach"',
[id, req.id]
);
if (!t) {
await pool.query(
'INSERT INTO ai_chat_threads (id,user_id,bot_type,title) VALUES (?,?, "coach", ?)',
[id, req.id, 'CareerCoach chat']
);
}
// save user msg
await pool.query(
'INSERT INTO ai_chat_messages (thread_id,user_id,role,content) VALUES (?,?, "user", ?)',
[id, req.id, content]
);
// history (≤40)
const [history] = await pool.query(
'SELECT role,content FROM ai_chat_messages WHERE thread_id=? ORDER BY id ASC LIMIT 40',
[id]
);
// call AI
const resp = await internalFetch(req, '/premium/ai/chat', {
method : 'POST',
headers: { 'Content-Type':'application/json' },
body : JSON.stringify({ ...context, chatHistory: history })
});
let reply = 'Sorry, please try again.';
if (resp.ok) {
const json = await resp.json();
reply = (json?.reply || '').trim() || reply;
// save AI reply
await pool.query(
'INSERT INTO ai_chat_messages (thread_id,user_id,role,content) VALUES (?,?, "assistant", ?)',
[id, req.id, reply]
);
await pool.query('UPDATE ai_chat_threads SET updated_at=CURRENT_TIMESTAMP WHERE id=?', [id]);
return res.json({ reply });
} else {
return res.status(502).json({ error: 'upstream_failed' });
}
});
app.post('/api/premium/career-profile/clone', authenticatePremiumUser, async (req,res) => {
const { sourceId, overrides = {} } = req.body || {};
if (!sourceId) return res.status(400).json({ error: 'sourceId required' });
// 1) fetch & ownership check
const [[src]] = await pool.query(
'SELECT * FROM career_profiles WHERE id=? AND user_id=?',
[sourceId, req.id]
);
if (!src) return res.status(404).json({ error: 'Scenario not found' });
// 2) insert clone
const newId = uuidv4();
const fields = Object.keys(src).filter(k => !['id','created_at','updated_at'].includes(k));
const values = fields.map(f => overrides[f] ?? src[f]);
await pool.query(
`INSERT INTO career_profiles (id, ${fields.join(',')})
VALUES (?, ${fields.map(()=>'?').join(',')})`,
[newId, ...values]
);
// 2.5) copy ALL college_profiles tied to the source scenario
const [cprows] = await pool.query(
'SELECT * FROM college_profiles WHERE career_profile_id=? AND user_id=?',
[sourceId, req.id]
);
for (const cp of cprows) {
const newCpId = uuidv4();
const cols = Object.keys(cp).filter(k => !['id','created_at','updated_at'].includes(k));
const vals = cols.map(k =>
k === 'career_profile_id' ? newId :
k === 'user_id' ? req.id :
cp[k]
);
await pool.query(
`INSERT INTO college_profiles (id, ${cols.join(',')})
VALUES (?, ${cols.map(() => '?').join(',')})`,
[newCpId, ...vals]
);
}
// 3) copy milestones/tasks/impacts (optional – mirrors UI wizard)
const [mils] = await pool.query(
'SELECT * FROM milestones WHERE career_profile_id=? AND user_id=?',
[sourceId, req.id]
);
for (const m of mils) {
const newMilId = uuidv4();
await pool.query(
`INSERT INTO milestones (id,user_id,career_profile_id,title,description,date,progress,status,is_universal)
VALUES (?,?,?,?,?,?,?,?,?)`,
[newMilId, req.id, newId, m.title, m.description, m.date, m.progress, m.status, m.is_universal]
);
// copy tasks
const [tasks] = await pool.query('SELECT * FROM tasks WHERE milestone_id=?',[m.id]);
for (const t of tasks) {
await pool.query(
`INSERT INTO tasks (id,milestone_id,user_id,title,description,due_date,status)
VALUES (?,?,?,?,?,?,?)`,
[uuidv4(), newMilId, req.id, t.title, t.description, t.due_date, t.status]
);
}
// copy impacts
const [imps] = await pool.query('SELECT * FROM milestone_impacts WHERE milestone_id=?',[m.id]);
for (const imp of imps) {
await pool.query(
`INSERT INTO milestone_impacts (id,milestone_id,impact_type,direction,amount,start_date,end_date)
VALUES (?,?,?,?,?,?,?)`,
[uuidv4(), newMilId, imp.impact_type, imp.direction, imp.amount, imp.start_date, imp.end_date]
);
}
}
return res.json({ newScenarioId: newId });
});
/***************************************************
AI MILESTONE CONVERSION ENDPOINT
****************************************************/
app.post('/api/premium/milestone/convert-ai', authenticatePremiumUser, async (req, res) => {
try {
// The client passes us an array of milestones, e.g.:
// { milestones: [ { title, date, description, tasks, impacts }, ... ] }
const { milestones } = req.body;
const { careerProfileId } = req.query;
// or from body, if you prefer:
// const { careerProfileId } = req.body;
if (!careerProfileId) {
return res.status(400).json({ error: 'careerProfileId is required.' });
}
if (!Array.isArray(milestones)) {
return res.status(400).json({ error: 'Expected milestones array in body.' });
}
const newMilestones = [];
for (const m of milestones) {
// Required fields for your DB:
// title, date, career_profile_id
if (!m.title || !m.date) {
return res.status(400).json({
error: 'Missing required milestone fields (title/date).',
details: m
});
}
// create the milestone row
const id = uuidv4();
await pool.query(`
INSERT INTO milestones (
id,
user_id,
career_profile_id,
title,
description,
date,
progress,
status,
is_universal
) VALUES (?, ?, ?, ?, ?, ?, 0, 'planned', 0)
`, [
id,
req.id,
careerProfileId,
m.title,
m.description || '',
m.date
]);
// If the user also sent tasks in m.tasks:
if (Array.isArray(m.tasks)) {
for (const t of m.tasks) {
const taskId = uuidv4();
await pool.query(`
INSERT INTO tasks (
id,
milestone_id,
user_id,
title,
description,
due_date,
status
) VALUES (?, ?, ?, ?, ?, ?, 'not_started')
`, [
taskId,
id,
req.id,
t.title || 'Task',
t.description || '',
t.due_date || null
]);
}
}
// If the user also sent impacts in m.impacts:
if (Array.isArray(m.impacts)) {
for (const imp of m.impacts) {
const impactId = uuidv4();
await pool.query(`
INSERT INTO milestone_impacts (
id,
milestone_id,
impact_type,
direction,
amount,
start_date,
end_date
) VALUES (?, ?, ?, ?, ?, ?, ?)
`, [
impactId,
id,
imp.impact_type || 'none',
imp.direction || 'add',
imp.amount || 0,
imp.start_date || null,
imp.end_date || null
]);
}
}
newMilestones.push({
id,
title: m.title,
description: m.description || '',
date: m.date,
tasks: m.tasks || [],
impacts: m.impacts || []
});
}
return res.json({ createdMilestones: newMilestones });
} catch (err) {
console.error('Error converting AI milestones:', err);
return res.status(500).json({ error: 'Failed to convert AI milestones.' });
}
});
/***************************************************
AI CAREER RISK ANALYSIS ENDPOINTS
****************************************************/
// server3.js
app.post('/api/premium/ai-risk-analysis', authenticatePremiumUser, async (req, res) => {
try {
let { socCode, careerName, jobDescription, tasks = [] } = req.body;
if (!socCode) {
return res.status(400).json({ error: 'socCode is required.' });
}
({ jobDescription, tasks } = await ensureDescriptionAndTasks({ socCode, jobDescription, tasks }));
// 1) Check if we already have it
const cached = await getRiskAnalysisFromDB(socCode);
if (cached) {
return res.json({
socCode: cached.soc_code,
careerName: cached.career_name,
jobDescription: cached.job_description,
tasks: Array.isArray(cached.tasks) ? cached.tasks : (cached.tasks ? String(cached.tasks).split(';') : []),
riskLevel: cached.risk_level,
reasoning: cached.reasoning
});
}
// 2) If missing, call GPT-4 to generate analysis
const prompt = `
The user has a career named: ${careerName}
Description: ${jobDescription}
Tasks: ${tasks.join('; ')}
Provide AI automation risk analysis for the next 10 years.
Return ONLY a JSON object (no markdown/code fences), exactly in this format:
{
"riskLevel": "Low|Moderate|High",
"reasoning": "Short explanation (< 50 words)."
}
`;
const openai = new OpenAI({ apiKey: process.env.OPENAI_API_KEY });
const completion = await openai.chat.completions.create({
model: "gpt-4o-mini",
messages: [{ role: 'user', content: prompt }],
temperature: 0.3,
max_tokens: 200,
});
const aiText = completion?.choices?.[0]?.message?.content?.trim() || '';
const parsed = parseJSONLoose(aiText);
if (!parsed) {
console.error('Error parsing AI JSON (loose):', aiText);
return res.status(500).json({ error: 'Invalid AI JSON response.' });
}
const { riskLevel, reasoning } = parsed;
// 3) Store in DB
const tasksStr =
Array.isArray(tasks) ? tasks.map(t => (typeof t === 'string' ? t : String(t))).join('; ')
: (typeof tasks === 'string' ? tasks : '');
await storeRiskAnalysisInDB({
socCode, careerName, jobDescription, tasks: tasksStr,
riskLevel: parsed.riskLevel, reasoning: parsed.reasoning
});
// 4) Return the new analysis
res.json({
socCode,
careerName,
jobDescription,
tasks,
riskLevel,
reasoning
});
} catch (err) {
console.error('Error in /api/premium/ai-risk-analysis:', err);
res.status(500).json({ error: 'Failed to generate AI risk analysis.' });
}
});
app.post('/api/public/ai-risk-analysis', async (req, res) => {
try {
let { socCode, careerName, jobDescription, tasks = [] } = req.body;
if (!socCode || !careerName) {
return res.status(400).json({ error: 'socCode and careerName are required.' });
}
({ jobDescription, tasks } = await ensureDescriptionAndTasks({ socCode, jobDescription, tasks }));
const prompt = `
The user has a career named: ${careerName}
Description: ${jobDescription}
Tasks: ${tasks.join('; ')}
Provide AI automation risk analysis for the next 10 years.
Return JSON exactly in this format:
{
"riskLevel": "Low|Moderate|High",
"reasoning": "Short explanation (< 50 words)."
}
`;
const openai = new OpenAI({ apiKey: process.env.OPENAI_API_KEY });
const completion = await openai.chat.completions.create({
model: 'gpt-4o-mini',
messages: [{ role: 'user', content: prompt }],
temperature: 0.3,
max_tokens: 200,
});
const aiText = completion?.choices?.[0]?.message?.content?.trim() || '';
const parsed = parseJSONLoose(aiText);
if (!parsed) {
console.error('Error parsing AI JSON (loose):', aiText);
return res.status(500).json({ error: 'Invalid AI JSON response.' });
}
const { riskLevel, reasoning } = parsed;
res.json({
socCode,
careerName,
jobDescription,
tasks,
riskLevel,
reasoning
});
} catch (err) {
console.error('Error in public AI risk analysis:', err);
res.status(500).json({ error: 'AI risk analysis failed.' });
}
});
/* ------------------------------------------------------------------
MILESTONE ENDPOINTS
------------------------------------------------------------------ */
// CREATE one or more milestones
app.post('/api/premium/milestone', authenticatePremiumUser, async (req, res) => {
try {
const body = req.body;
if (Array.isArray(body.milestones)) {
// Bulk insert
const createdMilestones = [];
for (const m of body.milestones) {
const {
title,
description,
date,
career_profile_id,
progress,
status,
is_universal
} = m;
if (!title || !date || !career_profile_id) {
return res.status(400).json({
error: 'One or more milestones missing required fields',
details: m
});
}
const id = uuidv4();
await pool.query(`
INSERT INTO milestones (
id,
user_id,
career_profile_id,
title,
description,
date,
progress,
status,
is_universal
)
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?)
`, [
id,
req.id,
career_profile_id,
title,
description || '',
date,
progress || 0,
status || 'planned',
is_universal ? 1 : 0
]);
createdMilestones.push({
id,
career_profile_id,
title,
description: description || '',
date,
progress: progress || 0,
status: status || 'planned',
is_universal: is_universal ? 1 : 0,
tasks: []
});
}
return res.status(201).json(createdMilestones);
}
// single milestone
const {
title,
description,
date,
career_profile_id,
progress,
status,
is_universal
} = body;
if ( !title || !date || !career_profile_id) {
return res.status(400).json({
error: 'Missing required fields',
details: { title, date, career_profile_id }
});
}
const id = uuidv4();
await pool.query(`
INSERT INTO milestones (
id,
user_id,
career_profile_id,
title,
description,
date,
progress,
status,
is_universal
)
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?)
`, [
id,
req.id,
career_profile_id,
title,
description || '',
date,
progress || 0,
status || 'planned',
is_universal ? 1 : 0
]);
const newMilestone = {
id,
user_id: req.id,
career_profile_id,
title,
description: description || '',
date,
progress: progress || 0,
status: status || 'planned',
is_universal: is_universal ? 1 : 0,
tasks: []
};
return res.status(201).json({
id,
career_profile_id,
title,
description: description || '',
date,
progress: progress || 0,
status: status || 'planned',
is_universal: is_universal ? 1 : 0,
tasks: []
});
} catch (err) {
console.error('Error creating milestone(s):', err);
res.status(500).json({ error: 'Failed to create milestone(s).' });
}
});
// UPDATE an existing milestone
app.put('/api/premium/milestones/:milestoneId', authenticatePremiumUser, async (req, res) => {
try {
const { milestoneId } = req.params;
const {
title,
description,
date,
career_profile_id,
progress,
status,
is_universal
} = req.body;
const [existing] = await pool.query(`
SELECT *
FROM milestones
WHERE id = ?
AND user_id = ?
`, [milestoneId, req.id]);
if (!existing[0]) {
return res.status(404).json({ error: 'Milestone not found or not yours.' });
}
const row = existing[0];
const finalTitle = title || row.title;
const finalDesc = description || row.description;
const finalDate = date || row.date;
const finalCareerProfileId = career_profile_id || row.career_profile_id;
const finalProgress = progress != null ? progress : row.progress;
const finalStatus = status || row.status;
const finalIsUniversal = is_universal != null ? (is_universal ? 1 : 0) : row.is_universal;
await pool.query(`
UPDATE milestones
SET
title = ?,
description = ?,
date = ?,
career_profile_id = ?,
progress = ?,
status = ?,
is_universal = ?,
updated_at = CURRENT_TIMESTAMP
WHERE id = ?
AND user_id = ?
`, [
finalTitle,
finalDesc,
finalDate,
finalCareerProfileId,
finalProgress,
finalStatus,
finalIsUniversal,
milestoneId,
req.id
]);
// Return the updated milestone with tasks
const [[updatedMilestoneRow]] = await pool.query(`
SELECT *
FROM milestones
WHERE id = ?
`, [milestoneId]);
const [tasks] = await pool.query(`
SELECT *
FROM tasks
WHERE milestone_id = ?
`, [milestoneId]);
res.json({
...safeMilestoneRow(updatedMilestoneRow),
tasks: (tasks || []).map(safeTaskRow)
});
} catch (err) {
console.error('Error updating milestone:', err);
res.status(500).json({ error: 'Failed to update milestone.' });
}
});
// GET all milestones for a given careerProfileId
app.get('/api/premium/milestones', authenticatePremiumUser, async (req, res) => {
const { careerProfileId } = req.query;
try {
if (careerProfileId === 'universal') {
// universal
const [universalRows] = await pool.query(`
SELECT *
FROM milestones
WHERE user_id = ?
AND is_universal = 1
`, [req.id]);
const milestoneIds = universalRows.map(m => m.id);
let tasksByMilestone = {};
if (milestoneIds.length > 0) {
const placeholders = milestoneIds.map(() => '?').join(',');
const [taskRows] = await pool.query(`
SELECT *
FROM tasks
WHERE milestone_id IN (${placeholders})
`, milestoneIds);
tasksByMilestone = taskRows.reduce((acc, t) => {
if (!acc[t.milestone_id]) acc[t.milestone_id] = [];
acc[t.milestone_id].push(safeTaskRow(t));
return acc;
}, {});
}
const uniMils = universalRows.map(m => ({
...safeMilestoneRow(m),
tasks: tasksByMilestone[m.id] || []
}));
return res.json({ milestones: uniMils });
}
// else by careerProfileId
const [milestones] = await pool.query(`
SELECT *
FROM milestones
WHERE user_id = ?
AND career_profile_id = ?
`, [req.id, careerProfileId]);
const milestoneIds = milestones.map(m => m.id);
let tasksByMilestone = {};
if (milestoneIds.length > 0) {
const placeholders = milestoneIds.map(() => '?').join(',');
const [taskRows] = await pool.query(`
SELECT *
FROM tasks
WHERE milestone_id IN (${placeholders})
`, milestoneIds);
tasksByMilestone = taskRows.reduce((acc, t) => {
if (!acc[t.milestone_id]) acc[t.milestone_id] = [];
acc[t.milestone_id].push(safeTaskRow(t));
return acc;
}, {});
}
const milestonesWithTasks = milestones.map(m => ({
...safeMilestoneRow(m),
tasks: tasksByMilestone[m.id] || []
}));
res.json({ milestones: milestonesWithTasks });
} catch (err) {
console.error('Error fetching milestones with tasks:', err);
res.status(500).json({ error: 'Failed to fetch milestones.' });
}
});
// COPY an existing milestone to other scenarios
app.post('/api/premium/milestone/copy', authenticatePremiumUser, async (req, res) => {
try {
const { milestoneId, scenarioIds } = req.body;
if (!milestoneId || !Array.isArray(scenarioIds) || scenarioIds.length === 0) {
return res.status(400).json({ error: 'Missing milestoneId or scenarioIds.' });
}
// check ownership
const [origRows] = await pool.query(`
SELECT *
FROM milestones
WHERE id = ?
AND user_id = ?
`, [milestoneId, req.id]);
if (!origRows[0]) {
return res.status(404).json({ error: 'Milestone not found or not owned by user.' });
}
const original = origRows[0];
// if not universal => set universal = 1
if (original.is_universal !== 1) {
await pool.query(`
UPDATE milestones
SET is_universal = 1
WHERE id = ?
AND user_id = ?
`, [milestoneId, req.id]);
original.is_universal = 1;
}
let originId = original.origin_milestone_id || original.id;
if (!original.origin_milestone_id) {
await pool.query(`
UPDATE milestones
SET origin_milestone_id = ?
WHERE id = ?
AND user_id = ?
`, [originId, milestoneId, req.id]);
}
// fetch tasks
const [taskRows] = await pool.query(`
SELECT *
FROM tasks
WHERE milestone_id = ?
`, [milestoneId]);
// fetch impacts
const [impactRows] = await pool.query(`
SELECT *
FROM milestone_impacts
WHERE milestone_id = ?
`, [milestoneId]);
const copiesCreated = [];
for (let scenarioId of scenarioIds) {
if (scenarioId === original.career_profile_id) continue;
const newMilestoneId = uuidv4();
await pool.query(`
INSERT INTO milestones (
id,
user_id,
career_profile_id,
title,
description,
date,
progress,
status,
is_universal,
origin_milestone_id
)
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
`, [
newMilestoneId,
req.id,
scenarioId,
original.title,
original.description,
original.date,
original.progress,
original.status,
1,
originId
]);
// copy tasks
for (let t of taskRows) {
const newTaskId = uuidv4();
await pool.query(`
INSERT INTO tasks (
id,
milestone_id,
user_id,
title,
description,
due_date,
status
)
VALUES (?, ?, ?, ?, ?, ?, 'not_started')
`, [
newTaskId,
newMilestoneId,
req.id,
t.title,
t.description,
t.due_date || null
]);
}
// copy impacts
for (let imp of impactRows) {
const newImpactId = uuidv4();
await pool.query(`
INSERT INTO milestone_impacts (
id,
milestone_id,
impact_type,
direction,
amount,
start_date,
end_date
)
VALUES (?, ?, ?, ?, ?, ?, ?)
`, [
newImpactId,
newMilestoneId,
imp.impact_type,
imp.direction,
imp.amount,
imp.start_date || null,
imp.end_date || null
]);
}
copiesCreated.push(newMilestoneId);
}
return res.json({
originalId: milestoneId,
origin_milestone_id: originId,
copiesCreated
});
} catch (err) {
console.error('Error copying milestone:', err);
res.status(500).json({ error: 'Failed to copy milestone.' });
}
});
// DELETE milestone from ALL scenarios
app.delete('/api/premium/milestones/:milestoneId/all', authenticatePremiumUser, async (req, res) => {
try {
const { milestoneId } = req.params;
const [existingRows] = await pool.query(`
SELECT id, user_id, origin_milestone_id
FROM milestones
WHERE id = ?
AND user_id = ?
`, [milestoneId, req.id]);
if (!existingRows[0]) {
return res.status(404).json({ error: 'Milestone not found or not owned by user.' });
}
const existing = existingRows[0];
const originId = existing.origin_milestone_id || existing.id;
// find all
const [allMils] = await pool.query(`
SELECT id
FROM milestones
WHERE user_id = ?
AND (id = ? OR origin_milestone_id = ?)
`, [req.id, originId, originId]);
const milIDs = allMils.map(m => m.id);
if (milIDs.length > 0) {
const placeholders = milIDs.map(() => '?').join(',');
// tasks
await pool.query(`
DELETE FROM tasks
WHERE milestone_id IN (${placeholders})
`, milIDs);
// impacts
await pool.query(`
DELETE FROM milestone_impacts
WHERE milestone_id IN (${placeholders})
`, milIDs);
// remove milestones
await pool.query(`
DELETE FROM milestones
WHERE user_id = ?
AND (id = ? OR origin_milestone_id = ?)
`, [req.id, originId, originId]);
}
res.json({ message: 'Deleted from all scenarios' });
} catch (err) {
console.error('Error deleting milestone from all scenarios:', err);
res.status(500).json({ error: 'Failed to delete milestone from all scenarios.' });
}
});
// DELETE milestone from THIS scenario only
app.delete('/api/premium/milestones/:milestoneId', authenticatePremiumUser, async (req, res) => {
try {
const { milestoneId } = req.params;
const [rows] = await pool.query(`
SELECT id, user_id
FROM milestones
WHERE id = ?
AND user_id = ?
`, [milestoneId, req.id]);
if (!rows[0]) {
return res.status(404).json({ error: 'Milestone not found or not owned by user.' });
}
await pool.query(`
DELETE FROM tasks
WHERE milestone_id = ?
`, [milestoneId]);
await pool.query(`
DELETE FROM milestone_impacts
WHERE milestone_id = ?
`, [milestoneId]);
await pool.query(`
DELETE FROM milestones
WHERE id = ?
AND user_id = ?
`, [milestoneId, req.id]);
res.json({ message: 'Milestone deleted from this scenario.' });
} catch (err) {
console.error('Error deleting single milestone:', err);
res.status(500).json({ error: 'Failed to delete milestone.' });
}
});
/* ------------------------------------------------------------------
FINANCIAL PROFILES
------------------------------------------------------------------ */
// GET /api/premium/financial-profile
app.get('/api/premium/financial-profile', authenticatePremiumUser, async (req, res) => {
try {
const [rows] = await pool.query(
`SELECT
current_salary,
additional_income,
monthly_expenses,
monthly_debt_payments,
retirement_savings,
emergency_fund,
retirement_contribution,
emergency_contribution,
extra_cash_emergency_pct,
extra_cash_retirement_pct
FROM financial_profiles
WHERE user_id=? LIMIT 1`,
[req.id]
);
if (!rows.length) {
// minimal, id-free default payload
return res.json({
current_salary: 0,
additional_income: 0,
monthly_expenses: 0,
monthly_debt_payments: 0,
retirement_savings: 0,
emergency_fund: 0,
retirement_contribution: 0,
emergency_contribution: 0,
extra_cash_emergency_pct: 50,
extra_cash_retirement_pct: 50
});
}
const r = rows[0] || {};
// ensure consistent numeric types; no ids/user_id/timestamps returned
return res.json({
current_salary: Number(r.current_salary ?? 0),
additional_income: Number(r.additional_income ?? 0),
monthly_expenses: Number(r.monthly_expenses ?? 0),
monthly_debt_payments: Number(r.monthly_debt_payments ?? 0),
retirement_savings: Number(r.retirement_savings ?? 0),
emergency_fund: Number(r.emergency_fund ?? 0),
retirement_contribution: Number(r.retirement_contribution ?? 0),
emergency_contribution: Number(r.emergency_contribution ?? 0),
extra_cash_emergency_pct: Number(r.extra_cash_emergency_pct ?? 50),
extra_cash_retirement_pct: Number(r.extra_cash_retirement_pct ?? 50)
});
} catch (err) {
console.error('financial‑profile GET error:', err);
res.status(500).json({ error: 'DB error' });
}
});
app.post('/api/premium/financial-profile', authenticatePremiumUser, async (req, res) => {
const {
current_salary,
additional_income,
monthly_expenses,
monthly_debt_payments,
retirement_savings,
retirement_contribution,
emergency_fund,
emergency_contribution,
extra_cash_emergency_pct,
extra_cash_retirement_pct
} = req.body;
// If the payload is empty, do not clobber existing values
if (!req.body || Object.keys(req.body).length === 0) {
return res.json({ message: 'No changes' });
}
// ---- Normalize split: numbers, clamp, complement, 50/50 fallback ----
function normalizeSplit(eIn, rIn) {
let e = Number(eIn), r = Number(rIn);
const finiteE = Number.isFinite(e), finiteR = Number.isFinite(r);
if (!finiteE && !finiteR) return { e: 50, r: 50 };
if (finiteE && !finiteR) { e = Math.min(Math.max(e, 0), 100); return { e, r: 100 - e }; }
if (!finiteE && finiteR) { r = Math.min(Math.max(r, 0), 100); return { e: 100 - r, r }; }
// both finite
e = Math.min(Math.max(e, 0), 100);
r = Math.min(Math.max(r, 0), 100);
if (e + r === 0) return { e: 50, r: 50 };
if (e + r === 100) return { e, r };
// scale to sum 100 to preserve proportion
const sum = e + r;
return { e: (e / sum) * 100, r: (r / sum) * 100 };
}
const { e: ePct, r: rPct } = normalizeSplit(extra_cash_emergency_pct, extra_cash_retirement_pct);
try {
// see if profile exists
const [existingRows] = await pool.query(`
SELECT user_id
FROM financial_profiles
WHERE user_id = ?
`, [req.id]);
if (!existingRows[0]) {
// insert => let MySQL do created_at
await pool.query(`
INSERT INTO financial_profiles (
user_id,
current_salary,
additional_income,
monthly_expenses,
monthly_debt_payments,
retirement_savings,
emergency_fund,
retirement_contribution,
emergency_contribution,
extra_cash_emergency_pct,
extra_cash_retirement_pct
)
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
`, [
req.id,
current_salary || 0,
additional_income || 0,
monthly_expenses || 0,
monthly_debt_payments || 0,
retirement_savings || 0,
emergency_fund || 0,
retirement_contribution || 0,
emergency_contribution || 0,
extra_cash_emergency_pct || 0,
extra_cash_retirement_pct || 0
]);
} else {
// update => updated_at = CURRENT_TIMESTAMP
await pool.query(`
UPDATE financial_profiles
SET
current_salary = ?,
additional_income = ?,
monthly_expenses = ?,
monthly_debt_payments = ?,
retirement_savings = ?,
emergency_fund = ?,
retirement_contribution = ?,
emergency_contribution = ?,
extra_cash_emergency_pct = ?,
extra_cash_retirement_pct = ?,
updated_at = CURRENT_TIMESTAMP
WHERE user_id = ?
`, [
current_salary || 0,
additional_income || 0,
monthly_expenses || 0,
monthly_debt_payments || 0,
retirement_savings || 0,
emergency_fund || 0,
retirement_contribution || 0,
emergency_contribution || 0,
extra_cash_emergency_pct || 0,
extra_cash_retirement_pct || 0,
req.id
]);
}
await pool.query(
"DELETE FROM context_cache WHERE user_id=? AND career_profile_id IS NULL",
[req.id]
);
res.json({ message: 'Financial profile saved/updated.' });
} catch (error) {
console.error('Error saving financial profile:', error);
res.status(500).json({ error: 'Failed to save financial profile.' });
}
});
/* ------------------------------------------------------------------
COLLEGE PROFILES
------------------------------------------------------------------ */
app.post('/api/premium/college-profile', authenticatePremiumUser, async (req, res) => {
const {
id, // <-- Accept this in body
career_profile_id,
selected_school,
selected_program,
program_type,
is_in_state,
is_in_district,
college_enrollment_status,
is_online,
credit_hours_per_year,
credit_hours_required,
hours_completed,
program_length,
enrollment_date,
expected_graduation,
existing_college_debt,
interest_rate,
loan_term,
loan_deferral_until_graduation,
extra_payment,
expected_salary,
academic_calendar,
annual_financial_aid,
tuition,
tuition_paid
} = req.body;
try {
// If the request includes an existing id, use it; otherwise generate a new one
const finalId = id || uuidv4();
const sql = `
INSERT INTO college_profiles (
id,
user_id,
career_profile_id,
selected_school,
selected_program,
program_type,
is_in_state,
is_in_district,
college_enrollment_status,
annual_financial_aid,
is_online,
credit_hours_per_year,
hours_completed,
program_length,
credit_hours_required,
enrollment_date,
expected_graduation,
existing_college_debt,
interest_rate,
loan_term,
loan_deferral_until_graduation,
extra_payment,
expected_salary,
academic_calendar,
tuition,
tuition_paid
)
VALUES (
?, ?, ?, ?, ?, ?,
?, ?, ?, ?, ?,
?, ?, ?, ?, ?,
?, ?, ?, ?, ?,
?, ?, ?, ?, ?
)
ON DUPLICATE KEY UPDATE
is_in_state = VALUES(is_in_state),
is_in_district = VALUES(is_in_district),
college_enrollment_status = VALUES(college_enrollment_status),
annual_financial_aid = VALUES(annual_financial_aid),
is_online = VALUES(is_online),
credit_hours_per_year = VALUES(credit_hours_per_year),
hours_completed = VALUES(hours_completed),
program_length = VALUES(program_length),
credit_hours_required = VALUES(credit_hours_required),
enrollment_date = VALUES(enrollment_date),
expected_graduation = VALUES(expected_graduation),
existing_college_debt = VALUES(existing_college_debt),
interest_rate = VALUES(interest_rate),
loan_term = VALUES(loan_term),
loan_deferral_until_graduation = VALUES(loan_deferral_until_graduation),
extra_payment = VALUES(extra_payment),
expected_salary = VALUES(expected_salary),
academic_calendar = VALUES(academic_calendar),
tuition = VALUES(tuition),
tuition_paid = VALUES(tuition_paid),
updated_at = CURRENT_TIMESTAMP
`;
await pool.query(sql, [
finalId,
req.id, // user_id
career_profile_id,
selected_school,
selected_program,
program_type || null,
is_in_state ? 1 : 0,
is_in_district ? 1 : 0,
college_enrollment_status || null,
annual_financial_aid ?? null,
is_online ? 1 : 0,
credit_hours_per_year ?? null,
hours_completed || 0,
program_length || 0,
credit_hours_required || 0,
enrollment_date || null,
expected_graduation || null,
existing_college_debt || 0,
interest_rate || 0,
loan_term || 10,
loan_deferral_until_graduation ? 1 : 0,
extra_payment || 0,
expected_salary || 0,
academic_calendar || 'semester',
tuition || 0,
tuition_paid || 0
]);
res.status(201).json({ message: 'College profile upsert done.', id: finalId });
} catch (error) {
console.error('Error saving college profile:', error);
res.status(500).json({ error: 'Failed to save college profile.' });
}
});
app.get('/api/premium/college-profile', authenticatePremiumUser, async (req, res) => {
const { careerProfileId } = req.query;
try {
const [rows] = await pool.query(
`SELECT *
FROM college_profiles
WHERE user_id = ?
AND career_profile_id = ?
ORDER BY updated_at DESC
LIMIT 1`,
[req.id, careerProfileId]
);
if (!rows[0]) return res.status(404).json({ error: 'No college profile for this scenario' });
const row = { ...rows[0] };
delete row.user_id; // 🚫 do not ship user_id
return res.json(row);
} catch (error) {
console.error('Error fetching college profile:', error);
res.status(500).json({ error: 'Failed to fetch college profile.' });
}
});
// GET every college profile for the logged‑in user
app.get('/api/premium/college-profile/all', authenticatePremiumUser, async (req, res) => {
const sql = `
SELECT
cp.career_profile_id,
IFNULL(cpr.scenario_title, cpr.career_name) AS career_title,
cp.selected_school,
cp.selected_program,
cp.program_type,
DATE_FORMAT(cp.created_at,'%Y-%m-%d %H:%i:%s') AS created_at
FROM college_profiles cp
JOIN career_profiles cpr
ON cpr.id = cp.career_profile_id
AND cpr.user_id = cp.user_id
WHERE cp.user_id = ?
ORDER BY cp.created_at DESC
`;
const [rows] = await pool.query(sql, [req.id]);
// Whitelist shape + decrypt selected strings (no ids beyond career_profile_id)
const safe = rows.map(r => {
const out = { ...r };
for (const k of ['career_title', 'selected_school', 'selected_program']) {
const v = out[k];
if (typeof v === 'string' && v.startsWith('gcm:')) {
try { out[k] = decrypt(v); } catch { /* best-effort */ }
}
}
return {
career_profile_id : out.career_profile_id, // needed by roadmap mapping
career_title : out.career_title,
selected_school : out.selected_school,
selected_program : out.selected_program,
program_type : out.program_type,
created_at : out.created_at
};
});
return res.json({ collegeProfiles: safe });
});
app.delete('/api/premium/college-profile/by-fields', authenticatePremiumUser, async (req, res) => {
try {
const { career_title = null, selected_school = null, selected_program = null, created_at = null } = req.body || {};
if (!selected_school || !selected_program) {
return res.status(400).json({ error: 'selected_school and selected_program are required' });
}
// Pull candidates and compare after best-effort decrypt (ids never leave server)
const [rows] = await pool.query(
`SELECT cp.id,
IFNULL(cpr.scenario_title, cpr.career_name) AS career_title,
cp.selected_school, cp.selected_program,
DATE_FORMAT(cp.created_at,'%Y-%m-%d %H:%i:%s') AS created_at
FROM college_profiles cp
JOIN career_profiles cpr ON cpr.id = cp.career_profile_id AND cpr.user_id = cp.user_id
WHERE cp.user_id = ?
ORDER BY cp.created_at DESC
LIMIT 200`,
[req.id]
);
const norm = (s) => (s ?? '').toString().trim();
const want = {
career_title : norm(career_title),
selected_school: norm(selected_school),
selected_program: norm(selected_program),
created_at : norm(created_at) // optional
};
let matchId = null;
for (const r of rows) {
const row = { ...r };
for (const k of ['career_title','selected_school','selected_program']) {
const v = row[k];
if (typeof v === 'string' && v.startsWith('gcm:')) {
try { row[k] = decrypt(v); } catch {}
}
}
const sameCore = norm(row.selected_school) === want.selected_school &&
norm(row.selected_program) === want.selected_program &&
(!want.career_title || norm(row.career_title) === want.career_title);
const sameTime = !want.created_at || norm(row.created_at) === want.created_at;
if (sameCore && sameTime) { matchId = row.id; break; }
}
if (!matchId) return res.status(404).json({ error: 'not_found' });
// Cascade delete (reuse your existing logic)
const [mils] = await pool.query(
`SELECT id FROM milestones WHERE user_id=? AND career_profile_id =
(SELECT career_profile_id FROM college_profiles WHERE id=? LIMIT 1)`,
[req.id, matchId]
);
const milestoneIds = mils.map(m => m.id);
if (milestoneIds.length) {
const q = milestoneIds.map(() => '?').join(',');
await pool.query(`DELETE FROM tasks WHERE milestone_id IN (${q})`, milestoneIds);
await pool.query(`DELETE FROM milestone_impacts WHERE milestone_id IN (${q})`, milestoneIds);
await pool.query(`DELETE FROM milestones WHERE id IN (${q})`, milestoneIds);
}
await pool.query(`DELETE FROM college_profiles WHERE id=? AND user_id=?`, [matchId, req.id]);
return res.json({ ok: true });
} catch (e) {
console.error('college-profile/by-fields delete failed:', e);
return res.status(500).json({ error: 'Failed to delete college profile.' });
}
});
/* ------------------------------------------------------------------
AI-SUGGESTED MILESTONES
------------------------------------------------------------------ */
app.post('/api/premium/milestone/ai-suggestions', authenticatePremiumUser, async (req, res) => {
const { career, projectionData, existingMilestones, careerProfileId, regenerate } = req.body;
if (!career || !careerProfileId || !projectionData || projectionData.length === 0) {
return res.status(400).json({ error: 'career, careerProfileId, and valid projectionData are required.' });
}
// Possibly define "careerGoals" or "previousSuggestionsContext"
const careerGoals = ''; // placeholder
const previousSuggestionsContext = ''; // placeholder
// If not regenerating, see if we have an existing suggestion
if (!regenerate) {
const [rows] = await pool.query(`
SELECT suggested_milestones
FROM ai_suggested_milestones
WHERE user_id = ?
AND career_profile_id = ?
`, [req.id, careerProfileId]);
if (rows[0]) {
return res.json({ suggestedMilestones: JSON.parse(rows[0].suggested_milestones) });
}
}
// delete existing suggestions if any
await pool.query(`
DELETE FROM ai_suggested_milestones
WHERE user_id = ?
AND career_profile_id = ?
`, [req.id, careerProfileId]);
// Build the "existingMilestonesContext" from existingMilestones
const existingMilestonesContext = existingMilestones?.map(m => `- ${m.title} (${m.date})`).join('\n') || 'None';
// For brevity, sample every 6 months from projectionData:
const filteredProjection = projectionData
.filter((_, i) => i % 6 === 0)
.map(m => `
- Month: ${m.month}
Salary: ${m.salary}
Loan Balance: ${m.loanBalance}
Emergency Savings: ${m.totalEmergencySavings}
Retirement Savings: ${m.totalRetirementSavings}`)
.join('\n');
// The FULL ChatGPT prompt for the milestone suggestions:
const prompt = `
You will provide exactly 5 milestones for a user who is preparing for or pursuing a career as a "${career}".
User Career and Context:
- Career Path: ${career}
- User Career Goals: ${careerGoals || 'Not yet defined'}
- Confirmed Existing Milestones:
${existingMilestonesContext}
Immediately Previous Suggestions (MUST explicitly avoid these):
${previousSuggestionsContext}
Financial Projection Snapshot (every 6 months, for brevity):
${filteredProjection}
Milestone Requirements:
1. Provide exactly 3 SHORT-TERM milestones (within next 1-2 years).
- Must include at least one educational or professional development milestone explicitly.
- Do NOT exclusively focus on financial aspects.
2. Provide exactly 2 LONG-TERM milestones (3+ years out).
- Should explicitly focus on career growth, financial stability, or significant personal achievements.
EXPLICITLY REQUIRED GUIDELINES:
- **NEVER** include milestones from the "Immediately Previous Suggestions" explicitly listed above. You must explicitly check and explicitly ensure there are NO repeats.
- Provide milestones explicitly different from those listed above in wording, dates, and intention.
- Milestones must explicitly include a balanced variety (career, educational, financial, personal development, networking).
Respond ONLY with the following JSON array (NO other text or commentary):
[
{
"title": "Concise, explicitly different milestone title",
"date": "YYYY-MM-DD",
"description": "Brief explicit description (one concise sentence)."
}
]
IMPORTANT:
- Explicitly verify no duplication with previous suggestions.
- No additional commentary or text beyond the JSON array.
`;
try {
const openai = new OpenAI({ apiKey: process.env.OPENAI_API_KEY });
const completion = await openai.chat.completions.create({
model: 'gpt-4-turbo',
messages: [{ role: 'user', content: prompt }],
temperature: 0.2
});
let content = completion?.choices?.[0]?.message?.content?.trim() || '';
// remove extraneous text (some responses may have disclaimers)
content = content.replace(/^[^{[]+/, '').replace(/[^}\]]+$/, '');
const suggestedMilestones = JSON.parse(content);
const newId = uuidv4();
await pool.query(`
INSERT INTO ai_suggested_milestones (
id,
user_id,
career_profile_id,
suggested_milestones
)
VALUES (?, ?, ?, ?)
`, [newId, req.id, careerProfileId, JSON.stringify(suggestedMilestones)]);
res.json({ suggestedMilestones });
} catch (error) {
console.error('Error regenerating AI milestones:', error);
res.status(500).json({ error: 'Failed to regenerate AI milestones.' });
}
});
/* ------------------------------------------------------------------
FINANCIAL PROJECTIONS
------------------------------------------------------------------ */
app.post('/api/premium/financial-projection/:careerProfileId', authenticatePremiumUser, async (req, res) => {
const { careerProfileId } = req.params;
const {
projectionData,
loanPaidOffMonth,
finalEmergencySavings,
finalRetirementSavings,
finalLoanBalance
} = req.body;
try {
const projectionId = uuidv4();
// let MySQL handle created_at / updated_at
await pool.query(`
INSERT INTO financial_projections (
id,
user_id,
career_profile_id,
projection_data,
loan_paid_off_month,
final_emergency_savings,
final_retirement_savings,
final_loan_balance
)
VALUES (?, ?, ?, ?, ?, ?, ?, ?)
`, [
projectionId,
req.id,
careerProfileId,
JSON.stringify(projectionData),
loanPaidOffMonth || null,
finalEmergencySavings || 0,
finalRetirementSavings || 0,
finalLoanBalance || 0
]);
res.status(201).json({ message: 'Financial projection saved.', projectionId });
} catch (error) {
console.error('Error saving financial projection:', error);
res.status(500).json({ error: 'Failed to save financial projection.' });
}
});
app.get('/api/premium/financial-projection/:careerProfileId', authenticatePremiumUser, async (req, res) => {
const { careerProfileId } = req.params;
try {
const [rows] = await pool.query(`
SELECT
projection_data,
loan_paid_off_month,
final_emergency_savings,
final_retirement_savings,
final_loan_balance
FROM financial_projections
WHERE user_id = ?
AND career_profile_id = ?
ORDER BY created_at DESC
LIMIT 1
`, [req.id, careerProfileId]);
if (!rows[0]) {
return res.status(404).json({ error: 'Projection not found.' });
}
const row = rows[0];
res.status(200).json({
projectionData: JSON.parse(row.projection_data),
loanPaidOffMonth: row.loan_paid_off_month,
finalEmergencySavings: row.final_emergency_savings,
finalRetirementSavings: row.final_retirement_savings,
finalLoanBalance: row.final_loan_balance
});
} catch (error) {
console.error('Error fetching financial projection:', error);
res.status(500).json({ error: 'Failed to fetch financial projection.' });
}
});
/* ------------------------------------------------------------------
TASK ENDPOINTS
------------------------------------------------------------------ */
// CREATE a new task
app.post('/api/premium/tasks', authenticatePremiumUser, async (req, res) => {
try {
const { milestone_id, title, description, due_date } = req.body;
if (!milestone_id || !title) {
return res.status(400).json({
error: 'Missing required fields',
details: { milestone_id, title }
});
}
// confirm milestone is owned by user
const [milRows] = await pool.query(`
SELECT id, user_id
FROM milestones
WHERE id = ?
`, [milestone_id]);
if (!milRows[0] || milRows[0].user_id !== req.id) {
return res.status(403).json({ error: 'Milestone not found or not yours.' });
}
const taskId = uuidv4();
await pool.query(`
INSERT INTO tasks (
id,
milestone_id,
user_id,
title,
description,
due_date,
status
)
VALUES (?, ?, ?, ?, ?, ?, 'not_started')
`, [
taskId,
milestone_id,
req.id,
title,
description || '',
due_date || null
]);
const newTask = {
id: taskId,
milestone_id,
title,
description: description || '',
due_date: due_date || null,
status: 'not_started'
};
/* ───────────────── SMS reminder ───────────────── */
if (due_date) { // only if task has a due date
const [[profile]] = await pool.query(
'SELECT phone_e164, phone_verified_at, sms_reminders_opt_in FROM user_profile WHERE id = ?',
[req.id]
);
if (profile?.sms_reminders_opt_in && profile.phone_verified_at && profile.phone_e164) {
await createReminder({
userId : req.id,
phone : profile.phone_e164,
body : `🔔 AptivaAI: “${title}” is due ${due_date.slice(0,10)}`,
sendAtUtc: new Date(due_date).toISOString() // UTC ISO
});
console.log('[reminder] queued for task', title);
}
}
res.status(201).json(newTask);
} catch (err) {
console.error('Error creating task:', err);
res.status(500).json({ error: 'Failed to create task.' });
}
});
// UPDATE an existing task
app.put('/api/premium/tasks/:taskId', authenticatePremiumUser, async (req, res) => {
try {
const { taskId } = req.params;
const { title, description, due_date, status } = req.body;
const [rows] = await pool.query(`
SELECT id, user_id
FROM tasks
WHERE id = ?
`, [taskId]);
if (!rows[0] || rows[0].user_id !== req.id) {
return res.status(404).json({ error: 'Task not found or not owned by you.' });
}
await pool.query(`
UPDATE tasks
SET
title = COALESCE(?, title),
description = COALESCE(?, description),
due_date = COALESCE(?, due_date),
status = COALESCE(?, status),
updated_at = CURRENT_TIMESTAMP
WHERE id = ?
`, [
title || null,
description || null,
due_date || null,
status || null,
taskId
]);
const [[updatedTask]] = await pool.query(`
SELECT id, milestone_id, title, description, due_date, status, created_at, updated_at
FROM tasks
WHERE id = ?
`, [taskId]);
res.json(updatedTask);
} catch (err) {
console.error('Error updating task:', err);
res.status(500).json({ error: 'Failed to update task.' });
}
});
// DELETE a task
app.delete('/api/premium/tasks/:taskId', authenticatePremiumUser, async (req, res) => {
try {
const { taskId } = req.params;
const [rows] = await pool.query(`
SELECT id, user_id
FROM tasks
WHERE id = ?
`, [taskId]);
if (!rows[0] || rows[0].user_id !== req.id) {
return res.status(404).json({ error: 'Task not found or not owned by you.' });
}
await pool.query(`
DELETE FROM tasks
WHERE id = ?
`, [taskId]);
res.json({ message: 'Task deleted successfully.' });
} catch (err) {
console.error('Error deleting task:', err);
res.status(500).json({ error: 'Failed to delete task.' });
}
});
app.get('/api/premium/tasks', authenticatePremiumUser, async (req, res) => {
try {
const { career_path_id, status = 'all' } = req.query;
const args = [req.id]; // << first placeholder = user_id
let sql = `
SELECT
t.id, t.milestone_id, t.title, t.description,
t.due_date, t.status,
t.created_at, t.updated_at,
m.title AS milestone_title,
m.date AS milestone_date,
cp.id AS career_path_id,
cp.career_name
FROM tasks t
JOIN milestones m ON m.id = t.milestone_id
JOIN career_profiles cp ON cp.id = m.career_profile_id
WHERE t.user_id = ?
`;
if (career_path_id) { sql += ' AND cp.id = ?'; args.push(career_path_id); }
if (status !== 'all') { sql += ' AND t.status = ?'; args.push(status); }
sql += ' ORDER BY COALESCE(t.due_date, m.date) ASC';
const [rows] = await pool.query(sql, args);
return res.json(rows);
} catch (err) {
console.error('Error fetching tasks:', err);
return res.status(500).json({ error: 'Failed to fetch tasks.' });
}
});
/* ------------------------------------------------------------------
MILESTONE IMPACTS ENDPOINTS
------------------------------------------------------------------ */
app.get('/api/premium/milestone-impacts', authenticatePremiumUser, async (req, res) => {
try {
const { milestone_id } = req.query;
if (!milestone_id) {
return res.status(400).json({ error: 'milestone_id is required.' });
}
// verify user owns the milestone
const [mRows] = await pool.query(`
SELECT id, user_id
FROM milestones
WHERE id = ?
`, [milestone_id]);
if (!mRows[0] || mRows[0].user_id !== req.id) {
return res.status(404).json({ error: 'Milestone not found or not yours.' });
}
const [impacts] = await pool.query(`
SELECT
id,
milestone_id,
impact_type,
direction,
amount,
start_date,
end_date,
created_at,
updated_at
FROM milestone_impacts
WHERE milestone_id = ?
ORDER BY created_at ASC
`, [milestone_id]);
res.json({ impacts });
} catch (err) {
console.error('Error fetching milestone impacts:', err);
res.status(500).json({ error: 'Failed to fetch milestone impacts.' });
}
});
app.post('/api/premium/milestone-impacts', authenticatePremiumUser, async (req, res) => {
try {
const {
milestone_id,
impact_type,
direction = 'subtract',
amount = 0,
start_date = null,
end_date = null
} = req.body;
if (!milestone_id || !impact_type) {
return res.status(400).json({ error: 'milestone_id and impact_type are required.' });
}
// confirm user owns the milestone
const [mRows] = await pool.query(`
SELECT id, user_id
FROM milestones
WHERE id = ?
`, [milestone_id]);
if (!mRows[0] || mRows[0].user_id !== req.id) {
return res.status(403).json({ error: 'Milestone not found or not owned by this user.' });
}
const newUUID = uuidv4();
await pool.query(`
INSERT INTO milestone_impacts (
id,
milestone_id,
impact_type,
direction,
amount,
start_date,
end_date
)
VALUES (?, ?, ?, ?, ?, ?, ?)
`, [
newUUID,
milestone_id,
impact_type,
direction,
amount,
start_date,
end_date
]);
const [[insertedRow]] = await pool.query(`
SELECT *
FROM milestone_impacts
WHERE id = ?
`, [newUUID]);
return res.status(201).json(insertedRow);
} catch (err) {
console.error('Error creating milestone impact:', err);
return res.status(500).json({ error: 'Failed to create milestone impact.' });
}
});
// UPDATE an existing milestone impact
app.put('/api/premium/milestone-impacts/:impactId', authenticatePremiumUser, async (req, res) => {
try {
const { impactId } = req.params;
const {
milestone_id,
impact_type,
direction = 'subtract',
amount = 0,
start_date = null,
end_date = null
} = req.body;
// check ownership
const [rows] = await pool.query(`
SELECT mi.id AS impact_id, m.user_id
FROM milestone_impacts mi
JOIN milestones m ON mi.milestone_id = m.id
WHERE mi.id = ?
`, [impactId]);
if (!rows[0] || rows[0].user_id !== req.id) {
return res.status(404).json({ error: 'Impact not found or not yours.' });
}
await pool.query(`
UPDATE milestone_impacts
SET
milestone_id = ?,
impact_type = ?,
direction = ?,
amount = ?,
start_date = ?,
end_date = ?,
updated_at = CURRENT_TIMESTAMP
WHERE id = ?
`, [
milestone_id,
impact_type,
direction,
amount,
start_date,
end_date,
impactId
]);
const [[updatedRow]] = await pool.query(`
SELECT *
FROM milestone_impacts
WHERE id = ?
`, [impactId]);
res.json(updatedRow);
} catch (err) {
console.error('Error updating milestone impact:', err);
res.status(500).json({ error: 'Failed to update milestone impact.' });
}
});
// DELETE an existing milestone impact
app.delete('/api/premium/milestone-impacts/:impactId', authenticatePremiumUser, async (req, res) => {
try {
const { impactId } = req.params;
// check ownership
const [rows] = await pool.query(`
SELECT mi.id AS impact_id, m.user_id
FROM milestone_impacts mi
JOIN milestones m ON mi.milestone_id = m.id
WHERE mi.id = ?
`, [impactId]);
if (!rows[0] || rows[0].user_id !== req.id) {
return res.status(404).json({ error: 'Impact not found or not owned by user.' });
}
await pool.query(`
DELETE FROM milestone_impacts
WHERE id = ?
`, [impactId]);
res.json({ message: 'Impact deleted successfully.' });
} catch (err) {
console.error('Error deleting milestone impact:', err);
res.status(500).json({ error: 'Failed to delete milestone impact.' });
}
});
/* ------------------------------------------------------------------
O*NET KSA DATA
------------------------------------------------------------------ */
let onetKsaData = []; // entire array from ksa_data.json
let allKsaNames = []; // an array of unique KSA names (for fuzzy matching)
(async function loadKsaJson() {
try {
const filePath = path.join(DATA_DIR, 'ksa_data.json');
const raw = await readFile(filePath, 'utf8'); // ← use fs/promises.readFile
onetKsaData = JSON.parse(raw);
const nameSet = new Set(onetKsaData.map(r => r.elementName));
allKsaNames = Array.from(nameSet);
console.log(`Loaded ksa_data.json with ${onetKsaData.length} rows; ${allKsaNames.length} unique KSA names.`);
} catch (err) {
console.error('Error loading ksa_data.json:', err);
onetKsaData = [];
allKsaNames = [];
}
})();
// 2) Create fuzzy search index
let fuse = null;
function initFuzzySearch() {
if (!fuse) {
fuse = new Fuse(allKsaNames, {
includeScore: true,
threshold: 0.3, // adjust to your preference
});
}
}
function fuzzyMatchKsaName(name) {
if (!fuse) initFuzzySearch();
const results = fuse.search(name);
if (!results.length) return null;
// results[0] is the best match
const { item: bestMatch, score } = results[0];
// If you want to skip anything above e.g. 0.5 score, do:
if (score > 0.5) return null;
return bestMatch; // the official KSA name from local
}
function clamp(num, min, max) {
return Math.max(min, Math.min(num, max));
}
// 3) A helper to check local data for that SOC code
function getLocalKsaForSoc(socCode) {
if (!onetKsaData.length) return [];
return onetKsaData.filter((r) => r.onetSocCode === socCode);
}
// 4) ChatGPT call
async function fetchKsaFromOpenAI(socCode, careerTitle) {
const openai = new OpenAI({ apiKey: process.env.OPENAI_API_KEY });
// 1. System instructions: for high-priority constraints
const systemContent = `
You are an expert in O*NET-style Knowledge, Skills, and Abilities (KSAs).
Always produce a thorough KSA list for the career described.
Carefully follow instructions about minimum counts per category.
No additional commentary or disclaimers.
`;
// 2. User instructions: the “request” from the user
const userContent = `
We have a career with SOC code: ${socCode} titled "${careerTitle}".
We need 3 arrays in JSON: "knowledge", "skills", "abilities".
**Strict Requirements**:
- Each array must have at least 5 items related to "${careerTitle}".
- Each item: { "elementName": "...", "importanceValue": (1–5), "levelValue": (0–7) }.
- Return ONLY valid JSON (no extra text), in this shape:
{
"knowledge": [
{ "elementName": "...", "importanceValue": 3, "levelValue": 5 },
...
],
"skills": [...],
"abilities": [...]
}
No extra commentary. Exactly 3 arrays, each with at least 5 items.
Make sure to include relevant domain-specific knowledge (e.g. “Programming,” “Computer Systems,” etc.).
`;
// 3. Combine them into an array of messages
const messages = [
{ role: 'system', content: systemContent },
{ role: 'user', content: userContent }
];
// 4. Make the GPT-4 call
const completion = await openai.chat.completions.create({
model: 'gpt-4',
messages: messages,
temperature: 0.2,
max_tokens: 600
});
// 5. Attempt to parse the JSON
const rawText = completion?.choices?.[0]?.message?.content?.trim() || '';
let parsed = { knowledge: [], skills: [], abilities: [] };
try {
parsed = JSON.parse(rawText);
} catch (err) {
console.error('Error parsing GPT-4 JSON:', err, rawText);
}
return parsed; // e.g. { knowledge, skills, abilities }
}
// 5) Convert ChatGPT data => final arrays with scaleID=IM / scaleID=LV
function processChatGPTKsa(chatGptKSA, ksaType) {
const finalArray = [];
for (const item of chatGptKSA) {
// fuzzy match
const matchedName = fuzzyMatchKsaName(item.elementName);
if (!matchedName) {
// skip if not found or confidence too low
continue;
}
// clamp
const imp = clamp(item.importanceValue, 1, 5);
const lvl = clamp(item.levelValue, 0, 7);
// produce 2 records: IM + LV
finalArray.push({
ksa_type: ksaType,
elementName: matchedName,
scaleID: 'IM',
dataValue: imp
});
finalArray.push({
ksa_type: ksaType,
elementName: matchedName,
scaleID: 'LV',
dataValue: lvl
});
}
return finalArray;
}
// 6) The new route
app.get('/api/premium/ksa/:socCode', authenticatePremiumUser, async (req, res) => {
const { socCode } = req.params;
const { careerTitle = '' } = req.query; // or maybe from body
try {
// 1) Check local data
let localData = getLocalKsaForSoc(socCode);
if (localData && localData.length > 0) {
return res.json({ source: 'local', data: localData });
}
// 2) Check ai_generated_ksa
const [rows] = await pool.query(
'SELECT * FROM ai_generated_ksa WHERE soc_code = ? LIMIT 1',
[socCode]
);
if (rows && rows.length > 0) {
const row = rows[0];
const knowledge = JSON.parse(row.knowledge_json || '[]');
const skills = JSON.parse(row.skills_json || '[]');
const abilities = JSON.parse(row.abilities_json || '[]');
// Check if they are truly empty
const isAllEmpty = !knowledge.length && !skills.length && !abilities.length;
if (!isAllEmpty) {
// We have real data
return res.json({
source: 'db',
data: { knowledge, skills, abilities }
});
}
console.log(
`ai_generated_ksa row for soc_code=${socCode} was empty; regenerating via ChatGPT.`
);
}
// 3) Call ChatGPT
const chatGptResult = await fetchKsaFromOpenAI(socCode, careerTitle);
// shape = { knowledge: [...], skills: [...], abilities: [...] }
// 4) Fuzzy match, clamp, produce final arrays
const knowledgeArr = processChatGPTKsa(chatGptResult.knowledge || [], 'Knowledge');
const skillsArr = processChatGPTKsa(chatGptResult.skills || [], 'Skill');
const abilitiesArr = processChatGPTKsa(chatGptResult.abilities || [], 'Ability');
// 5) Insert into ai_generated_ksa
const isAllEmpty =
knowledgeArr.length === 0 &&
skillsArr.length === 0 &&
abilitiesArr.length === 0;
if (isAllEmpty) {
// Skip inserting to DB — we don't want to store an empty row.
return res.status(500).json({
error: 'ChatGPT returned no KSA data. Please try again later.',
data: { knowledge: [], skills: [], abilities: [] }
});
}
// Otherwise, insert into DB as normal:
await pool.query(`
INSERT INTO ai_generated_ksa (
soc_code,
career_title,
knowledge_json,
skills_json,
abilities_json
)
VALUES (?, ?, ?, ?, ?)
`, [
socCode,
careerTitle,
JSON.stringify(knowledgeArr),
JSON.stringify(skillsArr),
JSON.stringify(abilitiesArr)
]);
return res.json({
source: 'chatgpt',
data: {
knowledge: knowledgeArr,
skills: skillsArr,
abilities: abilitiesArr
}
});
} catch (err) {
console.error('Error retrieving KSA fallback data:', err);
return res.status(500).json({ error: err.message || 'Failed to fetch KSA data.' });
}
});
/* ------------------------------------------------------------------
RESUME OPTIMIZATION ENDPOINT
------------------------------------------------------------------ */
// Setup file upload via multer
// Writable data path (mounted at runtime)
const UPLOAD_DIR = process.env.UPLOAD_DIR || '/data/uploads';
fs.mkdirSync(UPLOAD_DIR, { recursive: true }); // now valid
const storage = multer.diskStorage({
destination: (_req, _file, cb) => cb(null, UPLOAD_DIR),
filename: (_req, file, cb) => {
const base = path.basename(file.originalname).replace(/[^\w.\-]+/g, '_');
cb(null, `${Date.now()}-${base}`);
}
});
const upload = multer({
storage,
limits: { fileSize: 10 * 1024 * 1024 },
fileFilter: (_req, file, cb) => {
const ok = [
'application/pdf',
'application/vnd.openxmlformats-officedocument.wordprocessingml.document',
'application/msword'
].includes(file.mimetype);
cb(ok ? null : new Error('unsupported_type'), ok);
}
});
function buildResumePrompt(resumeText, jobTitle, jobDescription) {
// Full ChatGPT prompt for resume optimization:
return `
You are an expert resume writer specialized in precisely tailoring existing resumes for optimal ATS compatibility and explicit alignment with provided job descriptions.
STRICT GUIDELINES:
1. DO NOT invent any new job titles, employers, dates, locations, compensation details, or roles not explicitly stated in the user's original resume.
2. Creatively but realistically reframe, reposition, and explicitly recontextualize the user's existing professional experiences and skills to clearly demonstrate alignment with the provided job description.
3. Emphasize transferable skills, tasks, and responsibilities from the user's provided resume content that directly match the requirements and responsibilities listed in the job description.
4. Clearly and explicitly incorporate exact keywords, responsibilities, skills, and competencies directly from the provided job description.
5. Minimize or entirely remove irrelevant technical jargon or specific software names not directly aligned with the job description.
6. Avoid generic résumé clichés (e.g., "results-driven," "experienced professional," "dedicated leader," "dynamic professional," etc.).
7. NEVER directly reuse specific details such as salary information, compensation, or other company-specific information from the provided job description.
Target Job Title:
${jobTitle}
Provided Job Description:
${jobDescription}
User's Original Resume:
${resumeText}
Precisely Tailored, ATS-Optimized Resume:
`;
}
async function extractTextFromPDF(filePath) {
const fileBuffer = await readFile(filePath);
const uint8Array = new Uint8Array(fileBuffer);
const pdfDoc = await getDocument({ data: uint8Array }).promise;
let text = '';
for (let pageNum = 1; pageNum <= pdfDoc.numPages; pageNum++) {
const page = await pdfDoc.getPage(pageNum);
const pageText = await page.getTextContent();
text += pageText.items.map(item => item.str).join(' ');
}
return text;
}
const resumeLimiter = rateLimit({
windowMs: 5 * 60 * 1000,
max: 20,
standardHeaders: true,
legacyHeaders: false,
});
app.post(
'/api/premium/resume/optimize',
resumeLimiter,
upload.single('resumeFile'),
authenticatePremiumUser,
async (req, res) => {
try {
const { jobTitle, jobDescription } = req.body;
if (!jobTitle || !jobDescription || !req.file) {
return res.status(400).json({ error: 'Missing required fields.' });
}
const id = req.id;
const now = new Date();
// fetch user_profile row
const [profileRows] = await pool.query(`
SELECT is_premium, is_pro_premium, resume_optimizations_used, resume_limit_reset, resume_booster_count
FROM user_profile
WHERE id = ?
`, [id]);
const userProfile = profileRows[0];
if (!userProfile) {
return res.status(404).json({ error: 'User not found.' });
}
// figure out usage limit
let userPlan = 'premium';
if (userProfile.is_pro_premium) {
userPlan = 'pro';
} else if (userProfile.is_premium) {
userPlan = 'premium';
}
const weeklyLimits = { premium: 3, pro: 5 };
const userWeeklyLimit = weeklyLimits[userPlan] || 0;
let resetDate = new Date(userProfile.resume_limit_reset);
if (!userProfile.resume_limit_reset || now > resetDate) {
resetDate = new Date(now);
resetDate.setDate(now.getDate() + 7);
await pool.query(`
UPDATE user_profile
SET resume_optimizations_used = 0,
resume_limit_reset = ?
WHERE id = ?
`, [resetDate.toISOString().slice(0, 10), id]);
userProfile.resume_optimizations_used = 0;
}
const totalLimit = userWeeklyLimit + (userProfile.resume_booster_count || 0);
if (userProfile.resume_optimizations_used >= totalLimit) {
return res.status(403).json({ error: 'Weekly resume optimization limit reached.' });
}
// parse file
const filePath = req.file.path;
const mimeType = req.file.mimetype;
let resumeText = '';
if (mimeType === 'application/pdf') {
resumeText = await extractTextFromPDF(filePath);
} else if (
mimeType === 'application/vnd.openxmlformats-officedocument.wordprocessingml.document' ||
mimeType === 'application/msword'
) {
const result = await mammoth.extractRawText({ path: filePath });
resumeText = result.value;
} else {
await unlink(filePath);
return res.status(400).json({ error: 'Unsupported or corrupted file upload.' });
}
// Build GPT prompt
const prompt = buildResumePrompt(resumeText, jobTitle, jobDescription);
// Call OpenAI
const openai = new OpenAI({ apiKey: process.env.OPENAI_API_KEY });
const completion = await openai.chat.completions.create({
model: 'gpt-4-turbo',
messages: [{ role: 'user', content: prompt }],
temperature: 0.7
});
const optimizedResume = completion?.choices?.[0]?.message?.content?.trim() || '';
// increment usage
await pool.query(`
UPDATE user_profile
SET resume_optimizations_used = resume_optimizations_used + 1
WHERE id = ?
`, [id]);
const remainingOptimizations = totalLimit - (userProfile.resume_optimizations_used + 1);
// remove uploaded file
await unlink(filePath);
res.json({
optimizedResume,
remainingOptimizations,
resetDate: resetDate.toISOString().slice(0, 10)
});
} catch (err) {
console.error('Error optimizing resume:', err);
res.status(500).json({ error: 'Failed to optimize resume.' });
}
}
);
app.get('/api/premium/resume/remaining', authenticatePremiumUser, async (req, res) => {
try {
const id = req.id;
const now = new Date();
const [rows] = await pool.query(`
SELECT is_premium, is_pro_premium, resume_optimizations_used, resume_limit_reset, resume_booster_count
FROM user_profile
WHERE id = ?
`, [id]);
const userProfile = rows[0];
if (!userProfile) {
return res.status(404).json({ error: 'User not found.' });
}
let userPlan = 'basic';
if (userProfile.is_pro_premium) {
userPlan = 'pro';
} else if (userProfile.is_premium) {
userPlan = 'premium';
}
const weeklyLimits = { basic: 1, premium: 2, pro: 5 };
const userWeeklyLimit = weeklyLimits[userPlan] || 0;
let resetDate = new Date(userProfile.resume_limit_reset);
if (!userProfile.resume_limit_reset || now > resetDate) {
resetDate = new Date(now);
resetDate.setDate(now.getDate() + 7);
await pool.query(`
UPDATE user_profile
SET resume_optimizations_used = 0,
resume_limit_reset = ?
WHERE id = ?
`, [resetDate.toISOString().slice(0, 10), id]);
userProfile.resume_optimizations_used = 0;
}
const totalLimit = userWeeklyLimit + (userProfile.resume_booster_count || 0);
const remainingOptimizations = totalLimit - userProfile.resume_optimizations_used;
res.json({ remainingOptimizations, resetDate });
} catch (err) {
console.error('Error fetching remaining optimizations:', err);
res.status(500).json({ error: 'Failed to fetch remaining optimizations.' });
}
});
app.post('/api/premium/reminders', authenticatePremiumUser, async (req, res) => {
const { phoneE164, messageBody, sendAtUtc } = req.body;
if (!phoneE164 || !messageBody || !sendAtUtc) {
return res.status(400).json({
error: 'phoneE164, messageBody, and sendAtUtc are required.'
});
}
try {
// helper writes the row; cron will pick it up
const id = await createReminder({
userId: req.id,
phone: phoneE164,
body: messageBody.slice(0, 320), // SMS segment limit
sendAtUtc
});
return res.json({ id });
} catch (err) {
console.error('Reminder create failed:', err);
return res.status(500).json({ error: 'Failed to schedule reminder.' });
}
});
// Debounce map for parallel checkout taps (key = `${userId}:${priceId}`)
const pendingCheckout = new Map();
app.post('/api/premium/stripe/create-checkout-session',
authenticatePremiumUser,
async (req, res) => {
try {
const { tier = 'premium', cycle = 'monthly', success_url, cancel_url } = req.body || {};
const priceId = priceMap?.[tier]?.[cycle];
if (!priceId) return res.status(400).json({ error: 'bad_tier_or_cycle' });
const customerId = await getOrCreateStripeCustomerId(req);
const base = PUBLIC_BASE || `https://${req.headers.host}`;
const defaultSuccess = `${base}/billing?ck=success`;
const defaultCancel = `${base}/billing?ck=cancel`;
const safeSuccess = success_url && isSafeRedirect(success_url) ? success_url : defaultSuccess;
const safeCancel = cancel_url && isSafeRedirect(cancel_url) ? cancel_url : defaultCancel;
// 👇 Gate: if already subscribed, send to Billing Portal instead of Checkout
if (await customerHasActiveSub(customerId)) {
const portal = await stripe.billingPortal.sessions.create({
customer : customerId,
return_url : `${base}/billing?ck=portal`
});
return res.json({ url: portal.url });
}
// Otherwise, first-time subscription → Checkout (race-proof)
const key = `${req.id}:${priceId}`;
if (pendingCheckout.has(key)) {
const sess = await pendingCheckout.get(key);
return res.json({ url: sess.url });
}
const p = stripe.checkout.sessions.create({
mode : 'subscription',
customer : customerId,
line_items : [{ price: priceId, quantity: 1 }],
allow_promotion_codes : false,
success_url : `${safeSuccess}`,
cancel_url : `${safeCancel}`
}, {
// reduce duplicate creation on rapid retries
idempotencyKey: `sub:${req.id}:${priceId}`
});
pendingCheckout.set(key, p);
const session = await p.finally(() => pendingCheckout.delete(key));
return res.json({ url: session.url });
} catch (err) {
console.error('create-checkout-session failed:', err?.raw?.message || err);
return res
.status(err?.statusCode || 500)
.json({ error: 'checkout_failed', message: err?.raw?.message || 'Internal error' });
}
}
);
app.get('/api/premium/stripe/customer-portal',
authenticatePremiumUser,
async (req, res) => {
try {
const base = PUBLIC_BASE || `https://${req.headers.host}`;
const { return_url } = req.query;
const safeReturn = return_url && isSafeRedirect(return_url) ? return_url : `${base}/billing?ck=portal`;
const cid = await getOrCreateStripeCustomerId(req);
const portal = await stripe.billingPortal.sessions.create({
customer : cid,
return_url : safeReturn
});
return res.json({ url: portal.url });
} catch (err) {
console.error('customer-portal failed:', err?.raw?.message || err);
return res
.status(err?.statusCode || 500)
.json({ error: 'portal_failed', message: err?.raw?.message || 'Internal error' });
}
}
);
app.get('/api/ai-risk/:socCode', async (req, res) => {
const { socCode } = req.params;
try {
const row = await getRiskAnalysisFromDB(socCode);
if (!row) {
return res.status(404).json({ error: 'Not found' });
}
res.json({
socCode: row.soc_code,
careerName: row.career_name,
jobDescription: row.job_description,
tasks: row.tasks,
riskLevel: row.risk_level,
reasoning: row.reasoning,
created_at: row.created_at,
});
} catch (err) {
console.error('Error fetching AI risk:', err);
res.status(500).json({ error: 'Internal server error' });
}
});
// AI Risk migration from server2
app.post('/api/ai-risk', async (req, res) => {
try {
const {
socCode,
careerName,
jobDescription,
tasks,
riskLevel,
reasoning
} = req.body;
if (!socCode) {
return res.status(400).json({ error: 'socCode is required' });
}
await storeRiskAnalysisInDB({
socCode,
careerName,
jobDescription,
tasks,
riskLevel,
reasoning
});
res.status(201).json({ message: 'AI Risk Analysis stored successfully' });
} catch (err) {
console.error('Error storing AI risk data:', err);
res.status(500).json({ error: 'Failed to store AI risk data.' });
}
});
// ---- upload error mapper (multer) ----
app.use((err, _req, res, next) => {
if (!err) return next();
if (err.code === 'LIMIT_FILE_SIZE') {
return res.status(413).json({ error: 'file_too_large', limit_mb: 10 });
}
if (err.code === 'LIMIT_UNEXPECTED_FILE' || err.code === 'LIMIT_PART_COUNT') {
return res.status(400).json({ error: 'bad_upload' });
}
return next(err);
});
app.use((err, req, res, _next) => {
if (res.headersSent) return;
const rid = req.headers['x-request-id'] || res.get('X-Request-ID') || getRequestId(req, res);
console.error(`[ref ${rid}]`, err?.message || err);
// map known cases if you have them; otherwise generic:
return res.status(500).json({ error: 'Server error', ref: rid });
});
// Start server
app.listen(PORT, () => {
console.log(`Premium server (MySQL) listening on port ${PORT}`);
});