chore: initial project import
Some checks failed
CI - Production Readiness / Verify (push) Has been cancelled

This commit is contained in:
Wira Basalamah
2026-04-21 09:29:29 +07:00
commit adde003fba
222 changed files with 37657 additions and 0 deletions

View File

@ -0,0 +1,159 @@
#!/usr/bin/env node
const args = new Set(process.argv.slice(2));
const intervalSecondsRaw = process.env.CAMPAIGN_RETRY_DAEMON_INTERVAL_SECONDS?.trim();
const timeoutMsRaw = process.env.CAMPAIGN_RETRY_DAEMON_TIMEOUT_MS?.trim();
const runOnce = args.has("--once") || args.has("-1");
const shouldJitter = !args.has("--no-jitter");
const intervalMs = normalizePositiveNumber(intervalSecondsRaw, 300) * 1000;
const requestTimeoutMs = normalizePositiveNumber(timeoutMsRaw, 30000);
if (isNaN(intervalMs) || intervalMs <= 0) {
console.error("Invalid CAMPAIGN_RETRY_DAEMON_INTERVAL_SECONDS");
process.exit(1);
}
const endpoint = resolveCampaignRetryEndpoint();
const token = process.env.CAMPAIGN_RETRY_JOB_TOKEN?.trim();
const payload = {
...(process.env.CAMPAIGN_RETRY_TENANT_ID?.trim() ? { tenantId: process.env.CAMPAIGN_RETRY_TENANT_ID.trim() } : {}),
...(process.env.CAMPAIGN_RETRY_CAMPAIGN_ID?.trim()
? { campaignId: process.env.CAMPAIGN_RETRY_CAMPAIGN_ID.trim() }
: {}),
...(isPositiveInt(process.env.CAMPAIGN_RETRY_BATCH_SIZE)
? { recipientBatchSize: Number(process.env.CAMPAIGN_RETRY_BATCH_SIZE) }
: {}),
...(isPositiveInt(process.env.CAMPAIGN_RETRY_MAX_CAMPAIGNS)
? { maxCampaigns: Number(process.env.CAMPAIGN_RETRY_MAX_CAMPAIGNS) }
: {})
};
let isShuttingDown = false;
let lastSummary = null;
process.on("SIGINT", () => {
isShuttingDown = true;
console.info("\n[daemon] shutdown requested");
});
process.on("SIGTERM", () => {
isShuttingDown = true;
console.info("\n[daemon] shutdown requested");
});
(async function main() {
if (runOnce) {
await runLoopOnce();
} else {
await runDaemon();
}
})();
async function runDaemon() {
console.info(`[daemon] starting campaign retry worker, interval=${intervalMs / 1000}s`);
while (!isShuttingDown) {
await runLoopOnce();
if (isShuttingDown) {
break;
}
const jitter = shouldJitter ? Math.random() * Math.min(1000, intervalMs * 0.1) : 0;
const waitMs = Math.max(1000, intervalMs + jitter);
await sleep(waitMs);
}
if (lastSummary) {
console.info(`[daemon] last run summary: ${JSON.stringify(lastSummary)}`);
}
console.info("[daemon] stopped");
}
async function runLoopOnce() {
const startedAt = new Date().toISOString();
const headers = {
"content-type": "application/json"
};
if (token) {
headers.Authorization = `Bearer ${token}`;
}
const controller = new AbortController();
const timeout = setTimeout(() => controller.abort(), requestTimeoutMs);
try {
const response = await fetch(endpoint, {
method: "POST",
headers,
body: JSON.stringify(payload),
signal: controller.signal
});
const text = await response.text();
clearTimeout(timeout);
if (!response.ok) {
console.error(`[${startedAt}] campaign retry failed: ${response.status} ${response.statusText}`);
console.error(text);
lastSummary = { ts: startedAt, status: "failed", code: response.status, message: response.statusText };
if (runOnce) {
process.exitCode = 1;
}
return;
}
lastSummary = safeParseJson(text) ?? { ts: startedAt, raw: text, status: "ok" };
console.log(`[${startedAt}] campaign retry done: ${typeof text === "string" ? text.slice(0, 250) : text}`);
} catch (error) {
clearTimeout(timeout);
const message = error instanceof Error ? error.message : String(error);
lastSummary = { ts: startedAt, status: "error", message };
console.error(`[${startedAt}] campaign retry request failed: ${message}`);
if (runOnce) {
process.exitCode = 1;
}
}
if (runOnce) {
process.exit(process.exitCode ?? 0);
}
}
function resolveCampaignRetryEndpoint() {
const baseUrl = process.env.CAMPAIGN_RETRY_JOB_URL?.trim() || process.env.APP_URL?.trim() || process.env.NEXT_PUBLIC_APP_URL?.trim();
if (!baseUrl) {
console.error("Missing CAMPAIGN_RETRY_JOB_URL / APP_URL / NEXT_PUBLIC_APP_URL");
process.exit(1);
}
return baseUrl.endsWith("/api/jobs/campaign-retry")
? baseUrl
: `${baseUrl.replace(/\/+$/, "")}/api/jobs/campaign-retry`;
}
function normalizePositiveNumber(value, fallback) {
const raw = Number(value);
if (!Number.isFinite(raw) || raw <= 0) {
return fallback;
}
return raw;
}
function isPositiveInt(raw) {
const value = Number(raw);
return Number.isInteger(value) && value > 0;
}
function safeParseJson(raw) {
try {
return JSON.parse(raw);
} catch {
return null;
}
}
function sleep(ms) {
return new Promise((resolve) => setTimeout(resolve, ms));
}

View File

@ -0,0 +1,56 @@
#!/usr/bin/env node
const baseUrl = process.env.CAMPAIGN_RETRY_JOB_URL?.trim()
|| process.env.APP_URL?.trim()
|| process.env.NEXT_PUBLIC_APP_URL?.trim();
if (!baseUrl) {
console.error("Missing CAMPAIGN_RETRY_JOB_URL / APP_URL / NEXT_PUBLIC_APP_URL");
process.exit(1);
}
const endpoint = baseUrl.endsWith("/api/jobs/campaign-retry")
? baseUrl
: `${baseUrl.replace(/\/+$/, "")}/api/jobs/campaign-retry`;
const token = process.env.CAMPAIGN_RETRY_JOB_TOKEN?.trim();
const tenantId = process.env.CAMPAIGN_RETRY_TENANT_ID?.trim();
const campaignId = process.env.CAMPAIGN_RETRY_CAMPAIGN_ID?.trim();
const recipientBatchSize = Number(process.env.CAMPAIGN_RETRY_BATCH_SIZE);
const maxCampaigns = Number(process.env.CAMPAIGN_RETRY_MAX_CAMPAIGNS);
const payload = {
...(tenantId ? { tenantId } : {}),
...(campaignId ? { campaignId } : {}),
...(Number.isInteger(recipientBatchSize) && recipientBatchSize > 0 ? { recipientBatchSize } : {}),
...(Number.isInteger(maxCampaigns) && maxCampaigns > 0 ? { maxCampaigns } : {})
};
const headers = {
"content-type": "application/json"
};
if (token) {
headers.Authorization = `Bearer ${token}`;
}
try {
const response = await fetch(endpoint, {
method: "POST",
headers,
body: JSON.stringify(payload)
});
const bodyText = await response.text();
if (!response.ok) {
console.error(`Campaign retry job failed: ${response.status} ${response.statusText}`);
console.error(bodyText);
process.exit(1);
}
console.log(bodyText);
} catch (error) {
console.error(`Failed to trigger campaign retry job: ${error instanceof Error ? error.message : String(error)}`);
process.exit(1);
}

123
scripts/ops-healthcheck.mjs Normal file
View File

@ -0,0 +1,123 @@
#!/usr/bin/env node
const BASE_URL = resolveBaseUrl();
const token = process.env.CAMPAIGN_RETRY_JOB_TOKEN?.trim();
const healthToken = process.env.HEALTHCHECK_TOKEN?.trim();
if (!BASE_URL) {
console.error("Missing APP_URL / NEXT_PUBLIC_APP_URL / OPS_BASE_URL");
process.exit(1);
}
function resolveBaseUrl() {
return [
process.env.OPS_BASE_URL,
process.env.APP_URL,
process.env.NEXT_PUBLIC_APP_URL
].map((value) => value?.trim()).find(Boolean);
}
function buildUrl(path) {
return `${BASE_URL.replace(/\/+$/, "")}${path.startsWith("/") ? path : `/${path}`}`;
}
async function requestJson(url, options = {}) {
const controller = new AbortController();
const timeout = setTimeout(() => controller.abort(), 12_000);
try {
const response = await fetch(url, {
...options,
headers: {
accept: "application/json",
...options.headers
},
signal: controller.signal
});
const text = await response.text();
clearTimeout(timeout);
let parsed = null;
try {
parsed = text ? JSON.parse(text) : null;
} catch {
parsed = text;
}
return {
status: response.status,
ok: response.ok,
data: parsed,
raw: response.ok ? null : text
};
} catch (error) {
clearTimeout(timeout);
return {
status: 0,
ok: false,
data: null,
raw: error instanceof Error ? error.message : String(error)
};
}
}
function appendToken(url, tokenValue) {
if (!tokenValue) {
return url;
}
const parsed = new URL(url);
if (!parsed.searchParams.get("token")) {
parsed.searchParams.set("token", tokenValue);
}
return parsed.toString();
}
function passFail(ok) {
return ok ? "PASS" : "FAIL";
}
const results = [];
const checks = [
{
name: "Application root",
url: buildUrl("/"),
headers: {}
},
{
name: "Health endpoint",
url: buildUrl("/api/health"),
headers: healthToken
? {
Authorization: `Bearer ${healthToken}`
}
: {}
},
{
name: "Campaign retry job state",
url: appendToken(buildUrl("/api/jobs/campaign-retry"), token),
headers: token
? {
Authorization: `Bearer ${token}`
}
: {}
}
];
for (const check of checks) {
const result = await requestJson(check.url, { headers: check.headers });
results.push({ ...check, result });
console.log(`[${passFail(result.ok)}] ${check.name}: ${result.status}`);
}
const failedChecks = results.filter((item) => !item.result.ok);
if (failedChecks.length > 0) {
console.error("Some health checks failed");
for (const item of failedChecks) {
console.error(`- ${item.name}: ${item.result.raw || JSON.stringify(item.result.data)}`);
}
process.exit(1);
}
console.log("All checks passed.");

85
scripts/ops-incident.mjs Normal file
View File

@ -0,0 +1,85 @@
#!/usr/bin/env node
import { PrismaClient } from "@prisma/client";
const prisma = new PrismaClient();
const oneHourAgo = new Date(Date.now() - 60 * 60 * 1000);
const sixHoursAgo = new Date(Date.now() - 6 * 60 * 60 * 1000);
const oneDayAgo = new Date(Date.now() - 24 * 60 * 60 * 1000);
function formatDate(date) {
if (!date) {
return null;
}
return new Date(date).toISOString();
}
function printBlock(title, value) {
console.log(`\n=== ${title} ===`);
console.log(JSON.stringify(value, null, 2));
}
async function main() {
try {
const [retryState, webhookFailuresHour, webhookFailuresDay, failedCampaigns, disconnectedChannels, retryQueued] =
await Promise.all([
prisma.backgroundJobState.findUnique({
where: { jobName: "campaign-retry-worker" },
select: {
jobName: true,
lockedUntil: true,
lastRunStartedAt: true,
lastRunCompletedAt: true,
lastRunStatus: true,
consecutiveFailures: true,
lastError: true
}
}),
prisma.webhookEvent.count({ where: { processStatus: "failed", createdAt: { gte: oneHourAgo } } }),
prisma.webhookEvent.count({ where: { processStatus: "failed", createdAt: { gte: oneDayAgo } } }),
prisma.broadcastCampaign.count({ where: { status: { in: ["PROCESSING", "SCHEDULED"] } }),
prisma.channel.count({ where: { status: "DISCONNECTED" } }),
prisma.campaignRecipient.count({ where: { sendStatus: { in: ["QUEUED", "FAILED"] } })
]);
const pendingRetry = await prisma.campaignRecipient.aggregate({
_count: { _all: true },
where: {
OR: [{ sendStatus: "QUEUED" }, { sendStatus: "FAILED" }]
}
});
const recentWebhook = await prisma.webhookEvent.findMany({
where: { processStatus: "failed", createdAt: { gte: oneHourAgo } },
orderBy: { createdAt: "desc" },
take: 10,
select: { id: true, tenantId: true, channelId: true, eventType: true, createdAt: true, providerEventId: true, failedReason: true }
});
printBlock("Health Snapshot", {
runAt: new Date().toISOString(),
retryWorker: {
...retryState,
lockedUntil: formatDate(retryState?.lockedUntil),
lastRunStartedAt: formatDate(retryState?.lastRunStartedAt),
lastRunCompletedAt: formatDate(retryState?.lastRunCompletedAt)
},
failedCampaignsInProgress: failedCampaigns,
disconnectedChannels,
pendingRetryCandidates: pendingRetry._count._all,
webhookFailuresHour,
webhookFailuresDay
});
printBlock("Recent Failed Webhooks (1h)", recentWebhook);
printBlock("Retry Queue", { totalCandidates: retryQueued });
} catch (error) {
console.error("incident snapshot failed:", error instanceof Error ? error.message : String(error));
process.exitCode = 1;
} finally {
await prisma.$disconnect();
}
}
await main();

122
scripts/ops-maintenance.mjs Normal file
View File

@ -0,0 +1,122 @@
#!/usr/bin/env node
import { PrismaClient } from "@prisma/client";
const prisma = new PrismaClient();
const ONE_MINUTE_MS = 60_000;
const ONE_DAY_MS = 24 * 60 * 60 * 1000;
const now = new Date();
const consumedRetentionHours = getPositiveNumber(process.env.AUTH_TOKEN_CONSUMED_RETENTION_HOURS, 24);
const staleLockMinutes = getPositiveNumber(process.env.CAMPAIGN_RETRY_STALE_LOCK_MINUTES, 120);
const webhookRetentionDays = getPositiveNumber(process.env.WEBHOOK_EVENT_RETENTION_DAYS, 30);
const auditRetentionDays = getPositiveNumber(process.env.AUDIT_LOG_RETENTION_DAYS, 365);
function getPositiveNumber(raw, fallback) {
const parsed = Number(raw);
if (!Number.isFinite(parsed) || parsed <= 0) {
return fallback;
}
return parsed;
}
function printLine(message) {
console.log(`[ops-maintenance] ${message}`);
}
async function main() {
try {
const consumedCutoff = new Date(now.getTime() - consumedRetentionHours * 60 * 60 * 1000);
const staleLockCutoff = new Date(now.getTime() - staleLockMinutes * ONE_MINUTE_MS);
const webhookCutoff = new Date(now.getTime() - webhookRetentionDays * ONE_DAY_MS);
const auditCutoff = new Date(now.getTime() - auditRetentionDays * ONE_DAY_MS);
const expiredTokens = await prisma.authToken.deleteMany({
where: {
expiresAt: { lte: now },
consumedAt: null
}
});
const consumedTokens = await prisma.authToken.deleteMany({
where: {
consumedAt: { lte: consumedCutoff }
}
});
const recoveredLock = await prisma.backgroundJobState.updateMany({
where: {
lockedUntil: { lte: staleLockCutoff },
lastRunStatus: "running"
},
data: {
lockedUntil: null,
lastRunStatus: "stale",
lastError: "Recovered stale lock from maintenance cleanup",
lastFailureAt: now,
lastRunCompletedAt: now,
consecutiveFailures: { increment: 1 }
}
});
const removedWebhookEvents = await prisma.webhookEvent.deleteMany({
where: {
createdAt: { lte: webhookCutoff },
processStatus: { not: "skipped" }
}
});
const removedAuditLogs = await prisma.auditLog.deleteMany({
where: {
createdAt: { lte: auditCutoff }
}
});
printLine(`Expired auth tokens deleted: ${expiredTokens.count}`);
printLine(`Consumed auth tokens deleted (older than ${consumedRetentionHours}h): ${consumedTokens.count}`);
printLine(`Recovered stale background locks: ${recoveredLock.count}`);
printLine(`Webhook events deleted (older than ${webhookRetentionDays}d): ${removedWebhookEvents.count}`);
printLine(`Audit logs deleted (older than ${auditRetentionDays}d): ${removedAuditLogs.count}`);
const sampleTenant = await prisma.tenant.findFirst({ select: { id: true } });
const shouldWriteAudit = Boolean(sampleTenant) && (
expiredTokens.count > 0 ||
consumedTokens.count > 0 ||
recoveredLock.count > 0
);
if (shouldWriteAudit) {
const tenantId = sampleTenant.id;
await prisma.auditLog.create({
data: {
tenantId,
actorUserId: null,
entityType: "system",
entityId: "maintenance",
action: "ops_maintenance_cleanup",
metadataJson: JSON.stringify({
expiredAuthTokens: expiredTokens.count,
consumedAuthTokens: consumedTokens.count,
recoveredLocks: recoveredLock.count,
webhookEventsDeleted: removedWebhookEvents.count,
auditLogsDeleted: removedAuditLogs.count,
consumedRetentionHours,
staleLockMinutes,
webhookRetentionDays,
auditRetentionDays
})
}
});
}
printLine("maintenance completed");
} catch (error) {
const message = error instanceof Error ? error.message : String(error);
console.error(`[ops-maintenance] failed: ${message}`);
process.exitCode = 1;
} finally {
await prisma.$disconnect();
}
}
await main();

194
scripts/ops-readiness.mjs Normal file
View File

@ -0,0 +1,194 @@
#!/usr/bin/env node
import { execSync } from "node:child_process";
import { env } from "node:process";
const requiredVariables = [
{ key: "DATABASE_URL", requiredInProduction: true },
{ key: "AUTH_SECRET", requiredInProduction: true },
{ key: "CAMPAIGN_RETRY_JOB_TOKEN", requiredInProduction: true },
{ key: "WHATSAPP_WEBHOOK_VERIFY_TOKEN", requiredInProduction: false },
{ key: "WHATSAPP_WEBHOOK_SECRET", requiredInProduction: false },
{ key: "NEXT_PUBLIC_APP_URL", requiredInProduction: false },
{ key: "APP_URL", requiredInProduction: false },
{ key: "OPS_BASE_URL", requiredInProduction: false }
];
const placeholderPatterns = [
"change-me",
"changeme",
"your-",
"example",
"todo",
"placeholder"
];
const isProductionLike = env.NODE_ENV === "production";
let hasFailure = false;
function isPresent(value) {
return typeof value === "string" && value.trim().length > 0;
}
function isPlaceholderValue(value) {
const lowered = value.toLowerCase();
return placeholderPatterns.some((pattern) => lowered.includes(pattern));
}
function validateSecretValue(key, value, expected) {
if (!isPresent(value)) {
if (expected) {
reportIssue("error", `${key} is missing`);
}
return;
}
if (expected && value === expected) {
reportIssue("error", `${key} is using production default placeholder`);
}
if (isPlaceholderValue(value)) {
reportIssue("warn", `${key} looks like placeholder value`);
}
}
function reportIssue(level, message) {
if (level === "error") {
console.error(`[ERROR] ${message}`);
hasFailure = true;
return;
}
if (level === "warn") {
console.warn(`[WARN] ${message}`);
return;
}
console.log(`[OK] ${message}`);
}
function checkEnv() {
console.log("Checking environment variables...");
for (const item of requiredVariables) {
const value = env[item.key];
const isRequired = item.requiredInProduction && isProductionLike;
if (!isPresent(value)) {
if (isRequired) {
reportIssue("error", `${item.key} is missing`);
} else {
reportIssue("warn", `${item.key} is missing (optional)`);
}
continue;
}
if (item.key === "AUTH_SECRET") {
validateSecretValue(item.key, value, "whatsapp-inbox-dev-secret");
continue;
}
if (isPlaceholderValue(value)) {
reportIssue("warn", `${item.key} has placeholder-like value`);
}
const display = value.replace(/./g, "*").slice(0, 4);
reportIssue("ok", `${item.key} set (${display}...)`);
}
}
function checkPrismaMigrationStatus() {
console.log("Checking Prisma migration status...");
try {
const output = execSync("npx prisma migrate status --schema prisma/schema.prisma", {
encoding: "utf8",
stdio: ["pipe", "pipe", "pipe"]
});
if (/database is up to date/i.test(output)) {
reportIssue("ok", "Database schema is up to date");
return;
}
if (/No migration found/i.test(output)) {
reportIssue("warn", "No migration found for current database");
return;
}
reportIssue("warn", `Prisma migrate status output: ${output.trim()}`);
} catch (error) {
const message = error instanceof Error ? error.message : String(error);
reportIssue("error", `Prisma migration check failed: ${message}`);
}
}
async function runHealthCheck() {
const baseUrl = (env.OPS_BASE_URL || env.APP_URL || env.NEXT_PUBLIC_APP_URL || "http://localhost:3000").replace(/\/+$/, "");
const healthToken = env.HEALTHCHECK_TOKEN?.trim();
const url = `${baseUrl}/api/health`;
const headers = healthToken ? { Authorization: `Bearer ${healthToken}` } : {};
const controller = new AbortController();
const timeout = setTimeout(() => controller.abort(), 12_000);
try {
const response = await fetch(url, { headers, signal: controller.signal });
clearTimeout(timeout);
if (!response.ok) {
reportIssue("error", `Health endpoint returned ${response.status}`);
return;
}
const payload = await response.json();
if (!payload?.ok) {
reportIssue("warn", `Health endpoint warning: ${JSON.stringify(payload)}`);
} else {
reportIssue("ok", `Health endpoint OK: ${payload.status}`);
}
} catch (error) {
clearTimeout(timeout);
reportIssue("error", `Health endpoint unreachable: ${error instanceof Error ? error.message : String(error)}`);
}
}
async function runRetryEndpointCheck() {
const baseUrl = (env.OPS_BASE_URL || env.APP_URL || env.NEXT_PUBLIC_APP_URL || "http://localhost:3000").replace(/\/+$/, "");
const token = env.CAMPAIGN_RETRY_JOB_TOKEN?.trim();
if (!token) {
reportIssue("warn", "CAMPAIGN_RETRY_JOB_TOKEN missing; campaign job endpoint health check skipped");
return;
}
const url = `${baseUrl}/api/jobs/campaign-retry?token=${encodeURIComponent(token)}`;
const headers = { Authorization: `Bearer ${token}` };
const controller = new AbortController();
const timeout = setTimeout(() => controller.abort(), 12_000);
try {
const response = await fetch(url, { headers, signal: controller.signal });
clearTimeout(timeout);
if (!response.ok) {
reportIssue("warn", `Campaign retry endpoint returned ${response.status}`);
return;
}
const payload = await response.json();
reportIssue("ok", `Campaign retry endpoint reachable: ${payload.ok ? "ok" : "down"}`);
} catch (error) {
clearTimeout(timeout);
reportIssue("warn", `Campaign retry endpoint error: ${error instanceof Error ? error.message : String(error)}`);
}
}
async function main() {
checkEnv();
checkPrismaMigrationStatus();
await runHealthCheck();
await runRetryEndpointCheck();
if (hasFailure) {
reportIssue("error", "Readiness check failed.");
process.exit(1);
}
reportIssue("ok", "Readiness check completed.");
}
await main();

45
scripts/ops-smoke.mjs Executable file
View File

@ -0,0 +1,45 @@
#!/usr/bin/env node
import { execSync } from "node:child_process";
const baseUrl = (process.env.OPS_BASE_URL || process.env.APP_URL || process.env.NEXT_PUBLIC_APP_URL || "").replace(/\/+$/, "");
function fail(message) {
console.error(`[ERROR] ${message}`);
process.exit(1);
}
if (!baseUrl) {
fail("Missing OPS_BASE_URL / APP_URL / NEXT_PUBLIC_APP_URL");
}
function run(command, description) {
try {
execSync(command, { stdio: "inherit", env: process.env, timeout: 120000 });
console.log(`[OK] ${description}`);
} catch (error) {
throw new Error(`${description} failed`);
}
}
const jobToken = process.env.CAMPAIGN_RETRY_JOB_TOKEN?.trim() || "";
const healthToken = process.env.HEALTHCHECK_TOKEN?.trim() || "";
const webhookVerifyToken = process.env.WHATSAPP_WEBHOOK_VERIFY_TOKEN?.trim() || "";
const healthHeaders = healthToken ? ` -H "Authorization: Bearer ${healthToken}"` : "";
const jobHeaders = jobToken ? ` -H "Authorization: Bearer ${jobToken}"` : "";
try {
run("npm run -s ops:readiness", "Ops readiness");
run(`curl -fsS ${healthHeaders} ${baseUrl}/api/health`, "Health endpoint");
run(`curl -fsS ${jobHeaders} "${baseUrl}/api/jobs/campaign-retry?token=${encodeURIComponent(jobToken)}"`, "Campaign retry endpoint");
if (webhookVerifyToken) {
run(
`curl -fsS "${baseUrl}/api/webhooks/whatsapp?hub.mode=subscribe&hub.verify_token=${encodeURIComponent(webhookVerifyToken)}&hub.challenge=ok"`,
"Webhook verify probe"
);
} else {
console.log("[SKIP] Webhook verify probe (WHATSAPP_WEBHOOK_VERIFY_TOKEN not set)");
}
console.log("\nSmoke checks passed");
} catch (error) {
fail(error instanceof Error ? error.message : "Smoke checks failed");
}