CORS ALLOWED ORIGINS fix/alignment.
All checks were successful
ci/woodpecker/push/woodpecker Pipeline was successful
All checks were successful
ci/woodpecker/push/woodpecker Pipeline was successful
This commit is contained in:
parent
888bdd2939
commit
a736e1d4d1
128
.woodpecker.yml
128
.woodpecker.yml
@ -168,3 +168,131 @@ when:
|
||||
branch:
|
||||
- master
|
||||
- dev-master
|
||||
|
||||
---
|
||||
kind: pipeline
|
||||
type: docker
|
||||
name: prod-promotion
|
||||
|
||||
steps:
|
||||
- name: security-scan
|
||||
image: google/cloud-sdk:latest
|
||||
entrypoint:
|
||||
- bash
|
||||
- -c
|
||||
- |
|
||||
set -euo pipefail
|
||||
# Guard so this file doesn't run unless you explicitly set PROMOTE=prod in the UI
|
||||
[[ "${PROMOTE:-}" == "prod" ]] || { echo "⏭ Skipping (PROMOTE=$PROMOTE)"; exit 0; }
|
||||
|
||||
# Scan the images that WILL be deployed: pull IMG_TAG from PROD
|
||||
IMG_TAG="$(gcloud secrets versions access latest --secret=IMG_TAG --project=aptivaai-dev)"
|
||||
REG="us-central1-docker.pkg.dev/aptivaai-prod/aptiva-repo"
|
||||
|
||||
apt-get update -qq
|
||||
apt-get install -y -qq gnupg apt-transport-https curl ca-certificates docker.io
|
||||
|
||||
curl -sfL https://raw.githubusercontent.com/aquasecurity/trivy/main/contrib/install.sh | bash
|
||||
export PATH="$PATH:$(pwd)/bin"
|
||||
|
||||
gcloud auth configure-docker us-central1-docker.pkg.dev -q
|
||||
|
||||
trivy image --scanners vuln --ignore-unfixed --ignorefile .trivyignore --exit-code 1 --severity CRITICAL "$REG/server1:$IMG_TAG"
|
||||
trivy image --scanners vuln --ignore-unfixed --ignorefile .trivyignore --exit-code 1 --severity CRITICAL "$REG/server2:$IMG_TAG"
|
||||
trivy image --scanners vuln --ignore-unfixed --ignorefile .trivyignore --exit-code 1 --severity CRITICAL "$REG/server3:$IMG_TAG"
|
||||
trivy image --scanners vuln --ignore-unfixed --ignorefile .trivyignore --exit-code 1 --severity CRITICAL "$REG/nginx:$IMG_TAG"
|
||||
|
||||
- name: prod-deploy
|
||||
depends_on: [security-scan]
|
||||
image: google/cloud-sdk:latest
|
||||
entrypoint:
|
||||
- bash
|
||||
- -c
|
||||
- |
|
||||
set -euo pipefail
|
||||
[[ "${PROMOTE:-}" == "prod" ]] || { echo "⏭ Skipping (PROMOTE=$PROMOTE)"; exit 0; }
|
||||
|
||||
mkdir -p ~/.ssh
|
||||
|
||||
# Pull SSH materials for PROD from aptivaai-dev SM (same pattern as staging)
|
||||
|
||||
gcloud secrets versions access latest \
|
||||
--secret=PROD_SSH_KEY --project=aptivaai-dev \
|
||||
| base64 -d > ~/.ssh/id_ed25519
|
||||
chmod 600 ~/.ssh/id_ed25519
|
||||
|
||||
PROD_SSH_TARGET="$(gcloud secrets versions access latest --secret=PROD_SSH_TARGET --project=aptivaai-dev)"
|
||||
|
||||
# Use the IMG_TAG stored in PROD (the exact one mirrored there)
|
||||
IMG_TAG="$(gcloud secrets versions access latest --secret=IMG_TAG --project=aptivaai-prod)"
|
||||
|
||||
echo "🔑 SSH prerequisites installed"
|
||||
|
||||
# ── SSH into PROD and deploy (NO DEK SYNC) ────────────────────────
|
||||
ssh -o ProxyCommand="gcloud compute start-iap-tunnel aptiva-prod-vm 22 \
|
||||
--project=aptivaai-prod --zone=us-central1-a \
|
||||
--listen-on-stdin --verbosity=error" \
|
||||
-o StrictHostKeyChecking=accept-new -i ~/.ssh/id_ed25519 \
|
||||
"$PROD_SSH_TARGET" \
|
||||
'set -euo pipefail; \
|
||||
PROJECT=aptivaai-prod; \
|
||||
ENV=prod; \
|
||||
export IMG_TAG='"$IMG_TAG"'; \
|
||||
\
|
||||
# Pull all runtime secrets from aptivaai-prod
|
||||
JWT_SECRET=$(gcloud secrets versions access latest --secret=JWT_SECRET_$ENV --project=$PROJECT); export JWT_SECRET; \
|
||||
OPENAI_API_KEY=$(gcloud secrets versions access latest --secret=OPENAI_API_KEY_$ENV --project=$PROJECT); export OPENAI_API_KEY; \
|
||||
ONET_USERNAME=$(gcloud secrets versions access latest --secret=ONET_USERNAME_$ENV --project=$PROJECT); export ONET_USERNAME; \
|
||||
ONET_PASSWORD=$(gcloud secrets versions access latest --secret=ONET_PASSWORD_$ENV --project=$PROJECT); export ONET_PASSWORD; \
|
||||
STRIPE_SECRET_KEY=$(gcloud secrets versions access latest --secret=STRIPE_SECRET_KEY_$ENV --project=$PROJECT); export STRIPE_SECRET_KEY; \
|
||||
STRIPE_PUBLISHABLE_KEY=$(gcloud secrets versions access latest --secret=STRIPE_PUBLISHABLE_KEY_$ENV --project=$PROJECT); export STRIPE_PUBLISHABLE_KEY; \
|
||||
STRIPE_WH_SECRET=$(gcloud secrets versions access latest --secret=STRIPE_WH_SECRET_$ENV --project=$PROJECT); export STRIPE_WH_SECRET; \
|
||||
STRIPE_PRICE_PREMIUM_MONTH=$(gcloud secrets versions access latest --secret=STRIPE_PRICE_PREMIUM_MONTH_$ENV --project=$PROJECT); export STRIPE_PRICE_PREMIUM_MONTH; \
|
||||
STRIPE_PRICE_PREMIUM_YEAR=$(gcloud secrets versions access latest --secret=STRIPE_PRICE_PREMIUM_YEAR_$ENV --project=$PROJECT); export STRIPE_PRICE_PREMIUM_YEAR; \
|
||||
STRIPE_PRICE_PRO_MONTH=$(gcloud secrets versions access latest --secret=STRIPE_PRICE_PRO_MONTH_$ENV --project=$PROJECT); export STRIPE_PRICE_PRO_MONTH; \
|
||||
STRIPE_PRICE_PRO_YEAR=$(gcloud secrets versions access latest --secret=STRIPE_PRICE_PRO_YEAR_$ENV --project=$PROJECT); export STRIPE_PRICE_PRO_YEAR; \
|
||||
DB_NAME=$(gcloud secrets versions access latest --secret=DB_NAME_$ENV --project=$PROJECT); export DB_NAME; \
|
||||
DB_HOST=$(gcloud secrets versions access latest --secret=DB_HOST_$ENV --project=$PROJECT); export DB_HOST; \
|
||||
DB_PORT=$(gcloud secrets versions access latest --secret=DB_PORT_$ENV --project=$PROJECT); export DB_PORT; \
|
||||
DB_USER=$(gcloud secrets versions access latest --secret=DB_USER_$ENV --project=$PROJECT); export DB_USER; \
|
||||
DB_PASSWORD=$(gcloud secrets versions access latest --secret=DB_PASSWORD_$ENV --project=$PROJECT); export DB_PASSWORD; \
|
||||
DB_SSL_CA=$(gcloud secrets versions access latest --secret=DB_SSL_CA_$ENV --project=$PROJECT); export DB_SSL_CA; \
|
||||
DB_SSL_CERT=$(gcloud secrets versions access latest --secret=DB_SSL_CERT_$ENV --project=$PROJECT); export DB_SSL_CERT; \
|
||||
DB_SSL_KEY=$(gcloud secrets versions access latest --secret=DB_SSL_KEY_$ENV --project=$PROJECT); export DB_SSL_KEY; \
|
||||
TWILIO_ACCOUNT_SID=$(gcloud secrets versions access latest --secret=TWILIO_ACCOUNT_SID_$ENV --project=$PROJECT); export TWILIO_ACCOUNT_SID; \
|
||||
TWILIO_AUTH_TOKEN=$(gcloud secrets versions access latest --secret=TWILIO_AUTH_TOKEN_$ENV --project=$PROJECT); export TWILIO_AUTH_TOKEN; \
|
||||
TWILIO_MESSAGING_SERVICE_SID=$(gcloud secrets versions access latest --secret=TWILIO_MESSAGING_SERVICE_SID_$ENV --project=$PROJECT); export TWILIO_MESSAGING_SERVICE_SID; \
|
||||
KMS_KEY_NAME=$(gcloud secrets versions access latest --secret=KMS_KEY_NAME_$ENV --project=$PROJECT); export KMS_KEY_NAME; \
|
||||
DEK_PATH=$(gcloud secrets versions access latest --secret=DEK_PATH_$ENV --project=$PROJECT); export DEK_PATH; \
|
||||
SUPPORT_SENDGRID_API_KEY=$(gcloud secrets versions access latest --secret=SUPPORT_SENDGRID_API_KEY_$ENV --project=$PROJECT); export SUPPORT_SENDGRID_API_KEY; \
|
||||
GOOGLE_MAPS_API_KEY=$(gcloud secrets versions access latest --secret=GOOGLE_MAPS_API_KEY_$ENV --project=$PROJECT); export GOOGLE_MAPS_API_KEY; \
|
||||
SERVER1_PORT=$(gcloud secrets versions access latest --secret=SERVER1_PORT_$ENV --project=$PROJECT); export SERVER1_PORT; \
|
||||
SERVER2_PORT=$(gcloud secrets versions access latest --secret=SERVER2_PORT_$ENV --project=$PROJECT); export SERVER2_PORT; \
|
||||
SERVER3_PORT=$(gcloud secrets versions access latest --secret=SERVER3_PORT_$ENV --project=$PROJECT); export SERVER3_PORT; \
|
||||
ENV_NAME=$(gcloud secrets versions access latest --secret=ENV_NAME_$ENV --project=$PROJECT); export ENV_NAME; \
|
||||
CORS_ALLOWED_ORIGINS=$(gcloud secrets versions access latest --secret=CORS_ALLOWED_ORIGINS_$ENV --project=$PROJECT); export CORS_ALLOWED_ORIGINS; \
|
||||
APTIVA_API_BASE=$(gcloud secrets versions access latest --secret=APTIVA_API_BASE_$ENV --project=$PROJECT); export APTIVA_API_BASE; \
|
||||
TOKEN_MAX_AGE_MS=$(gcloud secrets versions access latest --secret=TOKEN_MAX_AGE_MS_$ENV --project=$PROJECT); export TOKEN_MAX_AGE_MS; \
|
||||
COOKIE_SECURE=$(gcloud secrets versions access latest --secret=COOKIE_SECURE_$ENV --project=$PROJECT); export COOKIE_SECURE; \
|
||||
COOKIE_SAMESITE=$(gcloud secrets versions access latest --secret=COOKIE_SAMESITE_$ENV --project=$PROJECT); export COOKIE_SAMESITE; \
|
||||
ACCESS_COOKIE_NAME=$(gcloud secrets versions access latest --secret=ACCESS_COOKIE_NAME_$ENV --project=$PROJECT); export ACCESS_COOKIE_NAME; \
|
||||
export FROM_SECRETS_MANAGER=true; \
|
||||
\
|
||||
APP_DIR="/home/jcoakley_aptivaai_com"; \
|
||||
cd "$APP_DIR"; \
|
||||
gcloud auth configure-docker us-central1-docker.pkg.dev -q; \
|
||||
sudo --preserve-env=IMG_TAG,FROM_SECRETS_MANAGER,JWT_SECRET,OPENAI_API_KEY,ONET_USERNAME,ONET_PASSWORD,STRIPE_SECRET_KEY,STRIPE_PUBLISHABLE_KEY,STRIPE_WH_SECRET,STRIPE_PRICE_PREMIUM_MONTH,STRIPE_PRICE_PREMIUM_YEAR,STRIPE_PRICE_PRO_MONTH,STRIPE_PRICE_PRO_YEAR,DB_NAME,DB_HOST,DB_PORT,DB_USER,DB_PASSWORD,DB_SSL_CA,DB_SSL_CERT,DB_SSL_KEY,TWILIO_ACCOUNT_SID,TWILIO_AUTH_TOKEN,TWILIO_MESSAGING_SERVICE_SID,KMS_KEY_NAME,DEK_PATH,SUPPORT_SENDGRID_API_KEY,GOOGLE_MAPS_API_KEY,SERVER1_PORT,SERVER2_PORT,SERVER3_PORT,CORS_ALLOWED_ORIGINS,ENV_NAME,APTIVA_API_BASE,PROJECT,TOKEN_MAX_AGE_MS,COOKIE_SECURE,COOKIE_SAMESITE,ACCESS_COOKIE_NAME \
|
||||
docker compose pull; \
|
||||
sudo --preserve-env=IMG_TAG,FROM_SECRETS_MANAGER,JWT_SECRET,OPENAI_API_KEY,ONET_USERNAME,ONET_PASSWORD,STRIPE_SECRET_KEY,STRIPE_PUBLISHABLE_KEY,STRIPE_WH_SECRET,STRIPE_PRICE_PREMIUM_MONTH,STRIPE_PRICE_PREMIUM_YEAR,STRIPE_PRICE_PRO_MONTH,STRIPE_PRICE_PRO_YEAR,DB_NAME,DB_HOST,DB_PORT,DB_USER,DB_PASSWORD,DB_SSL_CA,DB_SSL_CERT,DB_SSL_KEY,TWILIO_ACCOUNT_SID,TWILIO_AUTH_TOKEN,TWILIO_MESSAGING_SERVICE_SID,KMS_KEY_NAME,DEK_PATH,SUPPORT_SENDGRID_API_KEY,GOOGLE_MAPS_API_KEY,SERVER1_PORT,SERVER2_PORT,SERVER3_PORT,CORS_ALLOWED_ORIGINS,ENV_NAME,APTIVA_API_BASE,PROJECT,TOKEN_MAX_AGE_MS,COOKIE_SECURE,COOKIE_SAMESITE,ACCESS_COOKIE_NAME \
|
||||
docker compose up -d --force-recreate --remove-orphans; \
|
||||
echo "✅ Prod stack refreshed with tag $IMG_TAG"'
|
||||
|
||||
secrets:
|
||||
- PROD_SSH_KEY
|
||||
- PROD_SSH_TARGET
|
||||
|
||||
when:
|
||||
event:
|
||||
- manual
|
||||
branch:
|
||||
- master
|
||||
|
@ -18,7 +18,7 @@ COPY --chown=app:app src/ai/ ./src/ai/
|
||||
COPY --chown=app:app src/assets/ ./src/assets/
|
||||
COPY --chown=app:app backend/data/ ./backend/data/
|
||||
|
||||
RUN mkdir -p /run/secrets && chown -R app:app /run/secrets
|
||||
RUN mkdir -p /tmp && chmod 1777 /tmp
|
||||
USER app
|
||||
|
||||
CMD ["node", "backend/server1.js"]
|
@ -18,6 +18,6 @@ COPY --chown=app:app src/ai/ ./src/ai/
|
||||
COPY --chown=app:app src/assets/ ./src/assets/
|
||||
COPY --chown=app:app backend/data/ ./backend/data/
|
||||
|
||||
RUN mkdir -p /run/secrets && chown -R app:app /run/secrets
|
||||
RUN mkdir -p /tmp && chmod 1777 /tmp
|
||||
USER app
|
||||
CMD ["node", "backend/server2.js"]
|
@ -18,7 +18,7 @@ COPY --chown=app:app src/ai/ ./src/ai/
|
||||
COPY --chown=app:app src/assets/ ./src/assets/
|
||||
COPY --chown=app:app backend/data/ ./backend/data/
|
||||
|
||||
RUN mkdir -p /run/secrets && chown -R app:app /run/secrets
|
||||
RUN mkdir -p /tmp && chmod 1777 /tmp
|
||||
RUN mkdir -p /data/uploads && chown -R app:app /data
|
||||
|
||||
USER app
|
||||
|
@ -90,7 +90,7 @@ const PORT = process.env.SERVER1_PORT || 5000;
|
||||
|
||||
app.disable('x-powered-by');
|
||||
app.use(express.json({ limit: '1mb' }));
|
||||
if (process.env.NODE_ENV === 'prod') app.set('trust proxy', 1); // important if you're behind a proxy/HTTPS terminator
|
||||
app.set('trust proxy', 1); // behind proxy/HTTPS in all envs
|
||||
app.use(cookieParser());
|
||||
app.use(helmet({ contentSecurityPolicy: false, crossOriginEmbedderPolicy: false }));
|
||||
app.use((req, res, next) => {
|
||||
@ -286,13 +286,14 @@ const allowedOrigins = process.env.CORS_ALLOWED_ORIGINS
|
||||
.filter(Boolean);
|
||||
|
||||
function sessionCookieOptions() {
|
||||
const IS_PROD = process.env.NODE_ENV === 'prod';
|
||||
// All envs terminate TLS at Nginx; cookies must be Secure everywhere
|
||||
const IS_HTTPS = true;
|
||||
const CROSS_SITE = process.env.CROSS_SITE_COOKIES === '1'; // set to "1" if FE and API are different sites
|
||||
const COOKIE_DOMAIN = process.env.COOKIE_DOMAIN || undefined;
|
||||
|
||||
return {
|
||||
httpOnly: true,
|
||||
secure: IS_PROD, // <-- not secure in local dev
|
||||
secure: IS_HTTPS, // <-- not secure in local dev
|
||||
sameSite: CROSS_SITE ? 'none' : 'lax',
|
||||
path: '/',
|
||||
maxAge: 2 * 60 * 60 * 1000,
|
||||
@ -395,28 +396,25 @@ try {
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
// Enable CORS with dynamic origin checking
|
||||
app.use(
|
||||
cors({
|
||||
origin: (origin, callback) => {
|
||||
if (!origin || allowedOrigins.includes(origin)) {
|
||||
callback(null, true);
|
||||
} else {
|
||||
console.error('Blocked by CORS:', origin);
|
||||
callback(new Error('Not allowed by CORS'));
|
||||
// Enable CORS with strict, env-driven origin allowlist (exact scheme+host)
|
||||
app.use((req, res, next) => {
|
||||
const origin = req.headers.origin || '';
|
||||
if (!origin) return next(); // same-origin or server→server
|
||||
if (!allowedOrigins.includes(origin)) {
|
||||
// exact match only; no localhost/IP unless present in env for that env
|
||||
return res.status(403).end();
|
||||
}
|
||||
},
|
||||
methods: ['GET', 'POST', 'OPTIONS'],
|
||||
allowedHeaders: [
|
||||
'Authorization',
|
||||
'Content-Type',
|
||||
'Accept',
|
||||
'Origin',
|
||||
'X-Requested-With',
|
||||
],
|
||||
credentials: true,
|
||||
})
|
||||
);
|
||||
res.setHeader('Access-Control-Allow-Origin', origin);
|
||||
res.setHeader('Access-Control-Allow-Credentials', 'true');
|
||||
res.setHeader(
|
||||
'Access-Control-Allow-Headers',
|
||||
'Authorization, Content-Type, Accept, Origin, X-Requested-With, Access-Control-Allow-Methods'
|
||||
);
|
||||
res.setHeader('Access-Control-Allow-Methods', 'GET, POST, OPTIONS');
|
||||
if (req.method === 'OPTIONS') return res.status(204).end();
|
||||
return next();
|
||||
});
|
||||
|
||||
|
||||
// keep tight on request
|
||||
const pwRequestLimiter = rateLimit({
|
||||
@ -478,11 +476,14 @@ function emailLookup(email) {
|
||||
|
||||
|
||||
// ----- Password reset config (zero-config dev mode) -----
|
||||
if (!process.env.APTIVA_API_BASE) {
|
||||
console.error('FATAL: APTIVA_API_BASE missing – set this to your web origin (e.g., https://dev1.aptivaai.com)');
|
||||
process.exit(1);
|
||||
}
|
||||
const RESET_CONFIG = {
|
||||
// accept both spellings just in case
|
||||
BASE_URL: process.env.APTIVA_API_BASE || 'http://localhost:5173',
|
||||
FROM: 'no-reply@aptivaai.com', // edit here if you want
|
||||
TTL_MIN: 60, // edit here if you want
|
||||
BASE_URL: process.env.APTIVA_API_BASE, // must be a public web origin
|
||||
FROM: 'no-reply@aptivaai.com',
|
||||
TTL_MIN: 60,
|
||||
};
|
||||
|
||||
// --- SendGrid config (safe + simple) ---
|
||||
@ -1131,5 +1132,5 @@ app.use((err, req, res, _next) => {
|
||||
START SERVER
|
||||
------------------------------------------------------------------ */
|
||||
app.listen(PORT, () => {
|
||||
console.log(`Server running on http://localhost:${PORT}`);
|
||||
console.log(`Server1 listening on port ${PORT}`);
|
||||
});
|
||||
|
@ -43,6 +43,7 @@ const INSTITUTION_DATA_PATH = path.join(DATA_DIR, 'Institution_data.json');
|
||||
const SALARY_DB_PATH = path.join(ROOT_DIR, 'salary_info.db');
|
||||
const USER_PROFILE_DB_PATH = path.join(ROOT_DIR, 'user_profile.db');
|
||||
const DB_POOL_SIZE = 6;
|
||||
const API_BASE = (process.env.APTIVA_INTERNAL_API || 'http://server1:5000').replace(/\/+$/, '');
|
||||
|
||||
for (const p of [CIP_TO_SOC_PATH, INSTITUTION_DATA_PATH, SALARY_DB_PATH, USER_PROFILE_DB_PATH]) {
|
||||
if (!fs.existsSync(p)) {
|
||||
@ -66,7 +67,9 @@ const chatLimiter = rateLimit({
|
||||
const OUTBOUND_ALLOW = new Set([
|
||||
'services.onetcenter.org', // O*NET
|
||||
'maps.googleapis.com', // Google Distance
|
||||
'api.openai.com' // Free chat (chatFreeEndpoint)
|
||||
'api.openai.com', // Free chat (chatFreeEndpoint)
|
||||
'server2',
|
||||
'server1'
|
||||
]);
|
||||
|
||||
// Guard global fetch (Node 20+)
|
||||
@ -528,15 +531,17 @@ app.use(
|
||||
})
|
||||
);
|
||||
|
||||
/* 4 — Dynamic CORS / pre-flight handling */
|
||||
/* 4 — Strict CORS: only exact origins from env */
|
||||
app.use((req, res, next) => {
|
||||
const origin = req.headers.origin || '';
|
||||
|
||||
// A) No Origin header (e.g. same-origin, curl, server->server) → allow
|
||||
const origin = req.headers.origin || '';
|
||||
// No Origin header (same-origin, curl, server->server) → skip CORS and allow
|
||||
if (!origin) return next();
|
||||
|
||||
// B) Whitelisted browser origins (credentials allowed)
|
||||
if (allowedOrigins.includes(origin)) {
|
||||
// Exact match against env list only (scheme + host), no soft allow
|
||||
if (!allowedOrigins.includes(origin)) {
|
||||
return res.status(403).end();
|
||||
}
|
||||
|
||||
res.setHeader('Access-Control-Allow-Origin', origin);
|
||||
res.setHeader('Access-Control-Allow-Credentials', 'true');
|
||||
res.setHeader(
|
||||
@ -544,16 +549,8 @@ const origin = req.headers.origin || '';
|
||||
'Authorization, Content-Type, Accept, Origin, X-Requested-With, Access-Control-Allow-Methods'
|
||||
);
|
||||
res.setHeader('Access-Control-Allow-Methods', 'GET, POST, OPTIONS');
|
||||
} else {
|
||||
return res.status(403).end();
|
||||
}
|
||||
|
||||
/* 4d — Short-circuit pre-flight requests */
|
||||
if (req.method === 'OPTIONS') {
|
||||
res.status(204).end();
|
||||
return;
|
||||
}
|
||||
|
||||
if (req.method === 'OPTIONS') return res.status(204).end();
|
||||
next();
|
||||
});
|
||||
|
||||
@ -1086,7 +1083,7 @@ app.get('/api/tuition', (req, res) => {
|
||||
}
|
||||
|
||||
try {
|
||||
schoolsData = institutionData;
|
||||
let schoolsData = institutionData;
|
||||
|
||||
const cipArray = cipCodes
|
||||
.split(',')
|
||||
@ -1630,7 +1627,7 @@ app.post('/api/chat/threads/:id/stream', authenticateUser, async (req, res) => {
|
||||
);
|
||||
|
||||
// call local free-chat (server2 hosts /api/chat/free)
|
||||
const internal = await fetch('http://localhost:5001/api/chat/free', {
|
||||
const internal = await fetch('http://server2:5001/api/chat/free', {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'Content-Type' : 'application/json',
|
||||
|
@ -41,7 +41,9 @@ if (!process.env.FROM_SECRETS_MANAGER) {
|
||||
}
|
||||
|
||||
const PORT = process.env.SERVER3_PORT || 5002;
|
||||
const API_BASE = `http://localhost:${PORT}/api`;
|
||||
// internal self-call base; works in all envs via Docker DNS
|
||||
const INTERNAL_SELF_BASE = (process.env.INTERNAL_SELF_BASE || 'http://server3:5002').replace(/\/+$/, '');
|
||||
const API_BASE = `${INTERNAL_SELF_BASE}/api`;
|
||||
const DATA_DIR = path.join(__dirname, 'data');
|
||||
|
||||
/* ─── helper: canonical public origin ─────────────────────────── */
|
||||
@ -58,7 +60,7 @@ const ALLOWED_REDIRECT_HOSTS = new Set([
|
||||
// ── RUNTIME PROTECTION: outbound host allowlist (server3) ──
|
||||
const OUTBOUND_ALLOW = new Set([
|
||||
'server2', // compose DNS (server2:5001)
|
||||
'localhost', // self-calls (localhost:5002)
|
||||
'server3', // self-calls (localhost:5002)
|
||||
'api.openai.com', // OpenAI SDK traffic
|
||||
'api.stripe.com', // Stripe SDK traffic
|
||||
'api.twilio.com' // smsService may hit Twilio from this proc
|
||||
@ -648,12 +650,13 @@ const allowedOrigins = process.env.CORS_ALLOWED_ORIGINS
|
||||
.map(o => o.trim())
|
||||
.filter(Boolean);
|
||||
|
||||
/* ─── Dynamic CORS middleware (matches server1 / server2) ────────────── */
|
||||
/* ─── Strict CORS (exact matches only); allow no-Origin requests ───────── */
|
||||
app.use((req, res, next) => {
|
||||
const origin = req.headers.origin;
|
||||
res.setHeader('Vary', 'Origin');
|
||||
// A) whitelisted origins (credentials allowed)
|
||||
if (origin && allowedOrigins.includes(origin)) {
|
||||
const origin = req.headers.origin || '';
|
||||
res.setHeader('Vary', 'Origin');
|
||||
// No Origin header (same-origin, curl, server→server) ⇒ skip CORS
|
||||
if (!origin) return next();
|
||||
if (!allowedOrigins.includes(origin)) return res.status(403).end();
|
||||
res.setHeader('Access-Control-Allow-Origin', origin);
|
||||
res.setHeader('Access-Control-Allow-Credentials', 'true');
|
||||
res.setHeader(
|
||||
@ -661,14 +664,7 @@ res.setHeader('Vary', 'Origin');
|
||||
'Authorization, Content-Type, Accept, Origin, X-Requested-With, Access-Control-Allow-Methods'
|
||||
);
|
||||
res.setHeader('Access-Control-Allow-Methods', 'GET, POST, PUT, PATCH, DELETE, OPTIONS');
|
||||
|
||||
} else {
|
||||
return res.status(403).end();
|
||||
}
|
||||
|
||||
if (req.method === 'OPTIONS') {
|
||||
return res.status(204).end();
|
||||
}
|
||||
if (req.method === 'OPTIONS') return res.status(204).end();
|
||||
next();
|
||||
});
|
||||
|
||||
@ -836,7 +832,7 @@ async function ensureDescriptionAndTasks({ socCode, jobDescription, tasks }) {
|
||||
|
||||
try {
|
||||
// hit server2 directly on the compose network
|
||||
const r = await guardedfetch(`http://server2:5001/api/onet/career-description/${encodeURIComponent(socCode)}`, {
|
||||
const r = await guardedFetch(`http://server2:5001/api/onet/career-description/${encodeURIComponent(socCode)}`, {
|
||||
headers: { Accept: 'application/json' }
|
||||
});
|
||||
if (r.ok) {
|
||||
@ -4651,5 +4647,5 @@ app.use((err, req, res, _next) => {
|
||||
|
||||
// Start server
|
||||
app.listen(PORT, () => {
|
||||
console.log(`Premium server (MySQL) running on http://localhost:${PORT}`);
|
||||
console.log(`Premium server (MySQL) listening on port ${PORT}`);
|
||||
});
|
||||
|
@ -8,7 +8,10 @@ case "$ENV" in dev|staging|prod) ;; *) echo "❌ Unknown ENV='$ENV'"; exit 1 ;;
|
||||
PROJECT="aptivaai-${ENV}"
|
||||
REG="us-central1-docker.pkg.dev/${PROJECT}/aptiva-repo"
|
||||
ROOT="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
|
||||
ENV_FILE="${ROOT}/.env"
|
||||
|
||||
MIRROR_TO_STAGING="${MIRROR_TO_STAGING:-false}" # default off (set via Woodpecker var)
|
||||
MIRROR_TO_PROD="${MIRROR_TO_PROD:-false}" # default off
|
||||
PROMOTE_PROD="${PROMOTE_PROD:-false}" # default off
|
||||
|
||||
echo "🔧 Deploying environment: $ENV (GCP: $PROJECT)"
|
||||
|
||||
@ -100,14 +103,44 @@ build_and_push () {
|
||||
docker push "${REG}/${svc}:${TAG}"
|
||||
}
|
||||
|
||||
# Build servers first, then nginx (needs ./build)
|
||||
for svc in server1 server2 server3 nginx; do
|
||||
SERVICES=(server1 server2 server3 nginx)
|
||||
|
||||
# Build & push to DEV registry first (source of truth)
|
||||
for svc in "${SERVICES[@]}"; do
|
||||
build_and_push "$svc"
|
||||
done
|
||||
# ───────────────────── optional: mirror to staging/prod ─────────────────────
|
||||
# Staging mirror
|
||||
if [[ "$MIRROR_TO_STAGING" == "true" ]]; then
|
||||
echo "🔁 Mirroring images to STAGING registry (tag=${TAG})"
|
||||
DST_REG_STG="us-central1-docker.pkg.dev/aptivaai-staging/aptiva-repo"
|
||||
for svc in "${SERVICES[@]}"; do
|
||||
docker tag "${REG}/${svc}:${TAG}" "${DST_REG_STG}/${svc}:${TAG}"
|
||||
docker push "${DST_REG_STG}/${svc}:${TAG}"
|
||||
done
|
||||
printf "%s" "${TAG}" | gcloud secrets versions add IMG_TAG --data-file=- --project="aptivaai-staging" >/dev/null
|
||||
echo "🏷 IMG_TAG published to aptivaai-staging"
|
||||
else
|
||||
echo "⏭ Skipping staging mirror (MIRROR_TO_STAGING=$MIRROR_TO_STAGING)"
|
||||
fi
|
||||
|
||||
# Prod mirror (dual-key: MIRROR_TO_PROD && PROMOTE_PROD)
|
||||
if [[ "$MIRROR_TO_PROD" == "true" && "$PROMOTE_PROD" == "true" ]]; then
|
||||
echo "🔁 Mirroring images to PROD registry (tag=${TAG})"
|
||||
DST_REG_PROD="us-central1-docker.pkg.dev/aptivaai-prod/aptiva-repo"
|
||||
for svc in "${SERVICES[@]}"; do
|
||||
docker tag "${REG}/${svc}:${TAG}" "${DST_REG_PROD}/${svc}:${TAG}"
|
||||
docker push "${DST_REG_PROD}/${svc}:${TAG}"
|
||||
done
|
||||
printf "%s" "${TAG}" | gcloud secrets versions add IMG_TAG --data-file=- --project="aptivaai-prod" >/dev/null
|
||||
echo "🏷 IMG_TAG published to aptivaai-prod"
|
||||
else
|
||||
echo "⏭ Skipping prod mirror (MIRROR_TO_PROD=$MIRROR_TO_PROD, PROMOTE_PROD=$PROMOTE_PROD)"
|
||||
fi
|
||||
|
||||
# ───────────────────── write IMG_TAG locally ─────────────────────
|
||||
export IMG_TAG="${TAG}"
|
||||
echo "🔖 Using IMG_TAG=${IMG_TAG} (not writing to .env)"
|
||||
echo "🔖 Using IMG_TAG=${IMG_TAG}"
|
||||
|
||||
# ───────────────────── publish IMG_TAG to Secret Manager ─────────────────────
|
||||
printf "%s" "${TAG}" | gcloud secrets versions add IMG_TAG --data-file=- --project="$PROJECT" >/dev/null
|
||||
|
@ -97,7 +97,6 @@ http {
|
||||
location ^~ /api/tuition/ { proxy_pass http://backend5001; }
|
||||
location ^~ /api/projections/ { proxy_pass http://backend5001; }
|
||||
location ^~ /api/skills/ { proxy_pass http://backend5001; }
|
||||
location ^~ /api/ai-risk { proxy_pass http://backend5002; }
|
||||
location ^~ /api/maps/distance { proxy_pass http://backend5001; }
|
||||
location ^~ /api/schools { proxy_pass http://backend5001; }
|
||||
location ^~ /api/support { proxy_pass http://backend5001; }
|
||||
@ -107,6 +106,7 @@ http {
|
||||
|
||||
location ^~ /api/premium/ { proxy_pass http://backend5002; }
|
||||
location ^~ /api/public/ { proxy_pass http://backend5002; }
|
||||
location ^~ /api/ai-risk { proxy_pass http://backend5002; }
|
||||
|
||||
location ^~ /api/ { proxy_pass http://backend5000; }
|
||||
|
||||
|
Loading…
Reference in New Issue
Block a user