Issues with data flow & consistency
This commit is contained in:
@@ -30,7 +30,7 @@ export async function POST(req: Request) {
|
||||
data: {
|
||||
orgId: machine.orgId,
|
||||
machineId: machine.id,
|
||||
ts,
|
||||
ts,
|
||||
cycleCount: typeof c.cycle_count === "number" ? c.cycle_count : null,
|
||||
actualCycleTime: Number(c.actual_cycle_time),
|
||||
theoreticalCycleTime: c.theoretical_cycle_time != null ? Number(c.theoretical_cycle_time) : null,
|
||||
@@ -41,6 +41,5 @@ export async function POST(req: Request) {
|
||||
scrapDelta: typeof c.scrap_delta === "number" ? c.scrap_delta : null,
|
||||
},
|
||||
});
|
||||
|
||||
return NextResponse.json({ ok: true, id: row.id, ts: row.ts });
|
||||
}
|
||||
|
||||
@@ -1,107 +1,153 @@
|
||||
import { NextResponse } from "next/server";
|
||||
import { prisma } from "@/lib/prisma";
|
||||
|
||||
const normalizeType = (t: any) =>
|
||||
String(t ?? "")
|
||||
.trim()
|
||||
.toLowerCase()
|
||||
.replace(/_/g, "-");
|
||||
|
||||
const CANON_TYPE: Record<string, string> = {
|
||||
// Node-RED
|
||||
"production-stopped": "stop",
|
||||
"oee-drop": "oee-drop",
|
||||
"quality-spike": "quality-spike",
|
||||
"predictive-oee-decline": "predictive-oee-decline",
|
||||
"performance-degradation": "performance-degradation",
|
||||
|
||||
// legacy / synonyms
|
||||
"macroparo": "macrostop",
|
||||
"macro-stop": "macrostop",
|
||||
"microparo": "microstop",
|
||||
"micro-paro": "microstop",
|
||||
"down": "stop",
|
||||
};
|
||||
|
||||
const ALLOWED_TYPES = new Set([
|
||||
"slow-cycle",
|
||||
"microstop",
|
||||
"macrostop",
|
||||
"oee-drop",
|
||||
"quality-spike",
|
||||
"performance-degradation",
|
||||
"predictive-oee-decline",
|
||||
]);
|
||||
|
||||
// thresholds for stop classification (tune later / move to machine config)
|
||||
const MICROSTOP_SEC = 60;
|
||||
const MACROSTOP_SEC = 300;
|
||||
|
||||
export async function POST(req: Request) {
|
||||
const apiKey = req.headers.get("x-api-key");
|
||||
if (!apiKey) return NextResponse.json({ ok: false, error: "Missing api key" }, { status: 401 });
|
||||
if (!apiKey) {
|
||||
return NextResponse.json({ ok: false, error: "Missing api key" }, { status: 401 });
|
||||
}
|
||||
|
||||
const body = await req.json().catch(() => null);
|
||||
if (!body?.machineId || !body?.event) {
|
||||
return NextResponse.json({ ok: false, error: "Invalid payload" }, { status: 400 });
|
||||
}
|
||||
|
||||
|
||||
const machine = await prisma.machine.findFirst({
|
||||
where: { id: String(body.machineId), apiKey },
|
||||
select: { id: true, orgId: true },
|
||||
});
|
||||
if (!machine) return NextResponse.json({ ok: false, error: "Unauthorized" }, { status: 401 });
|
||||
if (!machine) {
|
||||
return NextResponse.json({ ok: false, error: "Unauthorized" }, { status: 401 });
|
||||
}
|
||||
|
||||
|
||||
// Normalize to array (Node-RED sends array of anomalies)
|
||||
const rawEvent = body.event;
|
||||
const events = Array.isArray(rawEvent) ? rawEvent : [rawEvent];
|
||||
|
||||
// Convert ms epoch -> Date if provided
|
||||
const created: { id: string; ts: Date; eventType: string }[] = [];
|
||||
const skipped: any[] = [];
|
||||
|
||||
|
||||
|
||||
const rawEvent = body.event;
|
||||
const e = Array.isArray(rawEvent) ? rawEvent[0] : rawEvent;
|
||||
|
||||
if (!e || typeof e !== "object") {
|
||||
return NextResponse.json({ ok: false, error: "Invalid event object" }, { status: 400 });
|
||||
}
|
||||
const rawType =
|
||||
e.eventType ?? e.anomaly_type ?? e.topic ?? body.topic ?? "";
|
||||
|
||||
const normalizeType = (t: string) =>
|
||||
String(t)
|
||||
.trim()
|
||||
.toLowerCase()
|
||||
.replace(/_/g, "-");
|
||||
|
||||
const typ = normalizeType(rawType);
|
||||
const sev = String(e.severity ?? "").trim().toLowerCase();
|
||||
|
||||
// accept these types
|
||||
const ALLOWED_TYPES = new Set([
|
||||
"slow-cycle",
|
||||
"anomaly-detected",
|
||||
"performance-degradation",
|
||||
"scrap-spike",
|
||||
"down",
|
||||
"microstop",
|
||||
]);
|
||||
|
||||
if (!ALLOWED_TYPES.has(typ)) {
|
||||
return NextResponse.json({ ok: true, skipped: true, reason: "type_not_allowed", typ, sev }, { status: 200 });
|
||||
}
|
||||
|
||||
// optional: severity enforcement only for SOME types (not slow-cycle)
|
||||
const NEEDS_HIGH_SEV = new Set(["down", "scrap-spike"]);
|
||||
const ALLOWED_SEVERITIES = new Set(["warning", "critical", "error"]);
|
||||
|
||||
if (NEEDS_HIGH_SEV.has(typ) && !ALLOWED_SEVERITIES.has(sev)) {
|
||||
return NextResponse.json({ ok: true, skipped: true, reason: "severity_too_low", typ, sev }, { status: 200 });
|
||||
}
|
||||
|
||||
// timestamp handling (support multiple field names)
|
||||
const tsMs =
|
||||
(typeof (e as any)?.timestamp === "number" && (e as any).timestamp) ||
|
||||
(typeof e?.data?.timestamp === "number" && e.data.timestamp) ||
|
||||
(typeof e?.data?.event_timestamp === "number" && e.data.event_timestamp) ||
|
||||
(typeof e?.data?.ts === "number" && e.data.ts) ||
|
||||
undefined;
|
||||
|
||||
const ts = tsMs ? new Date(tsMs) : new Date(); // default to now if missing
|
||||
|
||||
const title =
|
||||
String(e.title ?? "").trim() ||
|
||||
(typ === "slow-cycle" ? "Slow Cycle Detected" : "Event");
|
||||
|
||||
const description = e.description
|
||||
? String(e.description)
|
||||
: null;
|
||||
|
||||
const row = await prisma.machineEvent.create({
|
||||
data: {
|
||||
orgId: machine.orgId,
|
||||
machineId: machine.id,
|
||||
ts,
|
||||
|
||||
topic: String(e.topic ?? typ),
|
||||
eventType: typ, // ✅ store normalized type
|
||||
severity: sev || "info", // ✅ store normalized severity
|
||||
requiresAck: !!e.requires_ack,
|
||||
title,
|
||||
description,
|
||||
|
||||
data: e.data ?? e,
|
||||
|
||||
workOrderId:
|
||||
(e as any)?.work_order_id ? String((e as any).work_order_id)
|
||||
: e?.data?.work_order_id ? String(e.data.work_order_id)
|
||||
: null,
|
||||
},
|
||||
});
|
||||
|
||||
return NextResponse.json({ ok: true, id: row.id, ts: row.ts });
|
||||
for (const ev of events) {
|
||||
if (!ev || typeof ev !== "object") {
|
||||
skipped.push({ reason: "invalid_event_object" });
|
||||
continue;
|
||||
}
|
||||
|
||||
const rawType = (ev as any).eventType ?? (ev as any).anomaly_type ?? (ev as any).topic ?? body.topic ?? "";
|
||||
const typ0 = normalizeType(rawType);
|
||||
const typ = CANON_TYPE[typ0] ?? typ0;
|
||||
|
||||
// Determine timestamp
|
||||
const tsMs =
|
||||
(typeof (ev as any)?.timestamp === "number" && (ev as any).timestamp) ||
|
||||
(typeof (ev as any)?.data?.timestamp === "number" && (ev as any).data.timestamp) ||
|
||||
(typeof (ev as any)?.data?.event_timestamp === "number" && (ev as any).data.event_timestamp) ||
|
||||
null;
|
||||
|
||||
const ts = tsMs ? new Date(tsMs) : new Date();
|
||||
|
||||
// Severity defaulting (do not skip on severity — store for audit)
|
||||
let sev = String((ev as any).severity ?? "").trim().toLowerCase();
|
||||
if (!sev) sev = "warning";
|
||||
|
||||
// Stop classification -> microstop/macrostop
|
||||
let finalType = typ;
|
||||
if (typ === "stop") {
|
||||
const stopSec =
|
||||
(typeof (ev as any)?.data?.stoppage_duration_seconds === "number" && (ev as any).data.stoppage_duration_seconds) ||
|
||||
(typeof (ev as any)?.data?.stop_duration_seconds === "number" && (ev as any).data.stop_duration_seconds) ||
|
||||
null;
|
||||
|
||||
if (stopSec != null) {
|
||||
finalType = stopSec >= MACROSTOP_SEC ? "macrostop" : "microstop";
|
||||
} else {
|
||||
// missing duration -> conservative
|
||||
finalType = "microstop";
|
||||
}
|
||||
}
|
||||
|
||||
if (!ALLOWED_TYPES.has(finalType)) {
|
||||
skipped.push({ reason: "type_not_allowed", typ: finalType, sev });
|
||||
continue;
|
||||
}
|
||||
|
||||
const title =
|
||||
String((ev as any).title ?? "").trim() ||
|
||||
(finalType === "slow-cycle" ? "Slow Cycle Detected" :
|
||||
finalType === "macrostop" ? "Macrostop Detected" :
|
||||
finalType === "microstop" ? "Microstop Detected" :
|
||||
"Event");
|
||||
|
||||
const description = (ev as any).description ? String((ev as any).description) : null;
|
||||
|
||||
// store full blob, ensure object
|
||||
const rawData = (ev as any).data ?? ev;
|
||||
const dataObj = typeof rawData === "string" ? (() => {
|
||||
try { return JSON.parse(rawData); } catch { return { raw: rawData }; }
|
||||
})() : rawData;
|
||||
|
||||
const row = await prisma.machineEvent.create({
|
||||
data: {
|
||||
orgId: machine.orgId,
|
||||
machineId: machine.id,
|
||||
ts,
|
||||
topic: String((ev as any).topic ?? finalType),
|
||||
eventType: finalType,
|
||||
severity: sev,
|
||||
requiresAck: !!(ev as any).requires_ack,
|
||||
title,
|
||||
description,
|
||||
data: dataObj,
|
||||
workOrderId:
|
||||
(ev as any)?.work_order_id ? String((ev as any).work_order_id)
|
||||
: (ev as any)?.data?.work_order_id ? String((ev as any).data.work_order_id)
|
||||
: null,
|
||||
sku:
|
||||
(ev as any)?.sku ? String((ev as any).sku)
|
||||
: (ev as any)?.data?.sku ? String((ev as any).data.sku)
|
||||
: null,
|
||||
},
|
||||
});
|
||||
|
||||
created.push({ id: row.id, ts: row.ts, eventType: row.eventType });
|
||||
}
|
||||
|
||||
return NextResponse.json({ ok: true, createdCount: created.length, created, skippedCount: skipped.length, skipped });
|
||||
}
|
||||
|
||||
@@ -4,9 +4,26 @@ import { prisma } from "@/lib/prisma";
|
||||
import { requireSession } from "@/lib/auth/requireSession";
|
||||
|
||||
function normalizeEvent(row: any) {
|
||||
// data can be object OR [object]
|
||||
// -----------------------------
|
||||
// 1) Parse row.data safely
|
||||
// data may be:
|
||||
// - object
|
||||
// - array of objects
|
||||
// - JSON string of either
|
||||
// -----------------------------
|
||||
const raw = row.data;
|
||||
const blob = Array.isArray(raw) ? raw[0] : raw;
|
||||
|
||||
let parsed: any = raw;
|
||||
if (typeof raw === "string") {
|
||||
try {
|
||||
parsed = JSON.parse(raw);
|
||||
} catch {
|
||||
parsed = raw; // keep as string if not JSON
|
||||
}
|
||||
}
|
||||
|
||||
// data can be object OR [object]
|
||||
const blob = Array.isArray(parsed) ? parsed[0] : parsed;
|
||||
|
||||
// some payloads nest details under blob.data
|
||||
const inner = blob?.data ?? blob ?? {};
|
||||
@@ -17,21 +34,71 @@ function normalizeEvent(row: any) {
|
||||
.toLowerCase()
|
||||
.replace(/_/g, "-");
|
||||
|
||||
// Prefer the DB columns if they are meaningful
|
||||
const fromDbType = row.eventType && row.eventType !== "unknown" ? row.eventType : null;
|
||||
const fromBlobType = blob?.anomaly_type ?? blob?.eventType ?? blob?.topic ?? inner?.anomaly_type ?? inner?.eventType ?? null;
|
||||
// -----------------------------
|
||||
// 2) Alias mapping (canonical types)
|
||||
// -----------------------------
|
||||
const ALIAS: Record<string, string> = {
|
||||
// Spanish / synonyms
|
||||
macroparo: "macrostop",
|
||||
"macro-stop": "macrostop",
|
||||
macro_stop: "macrostop",
|
||||
|
||||
// infer slow-cycle if the signature exists
|
||||
microparo: "microstop",
|
||||
"micro-paro": "microstop",
|
||||
micro_stop: "microstop",
|
||||
|
||||
// Node-RED types
|
||||
"production-stopped": "stop", // we'll classify to micro/macro below
|
||||
|
||||
// legacy / generic
|
||||
down: "stop",
|
||||
};
|
||||
|
||||
// -----------------------------
|
||||
// 3) Determine event type from DB or blob
|
||||
// -----------------------------
|
||||
const fromDbType =
|
||||
row.eventType && row.eventType !== "unknown" ? row.eventType : null;
|
||||
|
||||
const fromBlobType =
|
||||
blob?.anomaly_type ??
|
||||
blob?.eventType ??
|
||||
blob?.topic ??
|
||||
inner?.anomaly_type ??
|
||||
inner?.eventType ??
|
||||
null;
|
||||
|
||||
// infer slow-cycle if signature exists
|
||||
const inferredType =
|
||||
fromDbType ??
|
||||
fromBlobType ??
|
||||
((inner?.actual_cycle_time && inner?.theoretical_cycle_time) || (blob?.actual_cycle_time && blob?.theoretical_cycle_time)
|
||||
((inner?.actual_cycle_time && inner?.theoretical_cycle_time) ||
|
||||
(blob?.actual_cycle_time && blob?.theoretical_cycle_time)
|
||||
? "slow-cycle"
|
||||
: "unknown");
|
||||
|
||||
const eventTypeRaw = normalizeType(inferredType);
|
||||
let eventType = ALIAS[eventTypeRaw] ?? eventTypeRaw;
|
||||
|
||||
const eventType = normalizeType(inferredType);
|
||||
// -----------------------------
|
||||
// 4) Optional: classify "stop" into micro/macro based on duration if present
|
||||
// (keeps old rows usable even if they stored production-stopped)
|
||||
// -----------------------------
|
||||
if (eventType === "stop") {
|
||||
const stopSec =
|
||||
(typeof inner?.stoppage_duration_seconds === "number" && inner.stoppage_duration_seconds) ||
|
||||
(typeof blob?.stoppage_duration_seconds === "number" && blob.stoppage_duration_seconds) ||
|
||||
(typeof inner?.stop_duration_seconds === "number" && inner.stop_duration_seconds) ||
|
||||
null;
|
||||
|
||||
// tune these thresholds to match your MES spec
|
||||
const MACROSTOP_SEC = 300; // 5 min
|
||||
eventType = stopSec != null && stopSec >= MACROSTOP_SEC ? "macrostop" : "microstop";
|
||||
}
|
||||
|
||||
// -----------------------------
|
||||
// 5) Severity, title, description, timestamp
|
||||
// -----------------------------
|
||||
const severity =
|
||||
String(
|
||||
(row.severity && row.severity !== "info" ? row.severity : null) ??
|
||||
@@ -55,10 +122,10 @@ function normalizeEvent(row: any) {
|
||||
blob?.description ??
|
||||
inner?.description ??
|
||||
(eventType === "slow-cycle" &&
|
||||
inner?.actual_cycle_time &&
|
||||
inner?.theoretical_cycle_time &&
|
||||
inner?.delta_percent != null
|
||||
? `Cycle took ${Number(inner.actual_cycle_time).toFixed(1)}s (+${inner.delta_percent}% vs ${Number(inner.theoretical_cycle_time).toFixed(1)}s objetivo)`
|
||||
(inner?.actual_cycle_time ?? blob?.actual_cycle_time) &&
|
||||
(inner?.theoretical_cycle_time ?? blob?.theoretical_cycle_time) &&
|
||||
(inner?.delta_percent ?? blob?.delta_percent) != null
|
||||
? `Cycle took ${Number(inner?.actual_cycle_time ?? blob?.actual_cycle_time).toFixed(1)}s (+${Number(inner?.delta_percent ?? blob?.delta_percent)}% vs ${Number(inner?.theoretical_cycle_time ?? blob?.theoretical_cycle_time).toFixed(1)}s objetivo)`
|
||||
: null);
|
||||
|
||||
const ts =
|
||||
@@ -161,24 +228,54 @@ export async function GET(
|
||||
|
||||
const ALLOWED_TYPES = new Set([
|
||||
"slow-cycle",
|
||||
"anomaly-detected",
|
||||
"performance-degradation",
|
||||
"scrap-spike",
|
||||
"down",
|
||||
"microstop",
|
||||
"macrostop",
|
||||
"oee-drop",
|
||||
"quality-spike",
|
||||
"performance-degradation",
|
||||
"predictive-oee-decline",
|
||||
]);
|
||||
|
||||
const events = normalized
|
||||
.filter((e) => ALLOWED_TYPES.has(e.eventType))
|
||||
// keep slow-cycle even if severity is info, otherwise require warning/critical/error
|
||||
.filter((e) => e.eventType === "slow-cycle" || ["warning", "critical", "error"].includes(e.severity))
|
||||
.filter((e) =>
|
||||
["slow-cycle", "microstop", "macrostop"].includes(e.eventType) ||
|
||||
["warning", "critical", "error"].includes(e.severity)
|
||||
)
|
||||
.slice(0, 30);
|
||||
|
||||
|
||||
// ---- cycles window ----
|
||||
const url = new URL(_req.url);
|
||||
const windowSec = Number(url.searchParams.get("windowSec") ?? "10800"); // default 3h
|
||||
|
||||
const latestKpi = machine.kpiSnapshots[0] ?? null;
|
||||
|
||||
// If KPI cycleTime missing, fallback to DB cycles (we fetch 1 first)
|
||||
const latestCycleForIdeal = await prisma.machineCycle.findFirst({
|
||||
where: { orgId: session.orgId, machineId },
|
||||
orderBy: { ts: "desc" },
|
||||
select: { theoreticalCycleTime: true },
|
||||
});
|
||||
|
||||
const effectiveCycleTime =
|
||||
latestKpi?.cycleTime ??
|
||||
latestCycleForIdeal?.theoreticalCycleTime ??
|
||||
null;
|
||||
|
||||
// Estimate how many cycles we need to cover the window.
|
||||
// Add buffer so the chart doesn’t look “tight”.
|
||||
const estCycleSec = Math.max(1, Number(effectiveCycleTime ?? 14));
|
||||
const needed = Math.ceil(windowSec / estCycleSec) + 50;
|
||||
|
||||
// Safety cap to avoid crazy payloads
|
||||
const takeCycles = Math.min(5000, Math.max(200, needed));
|
||||
|
||||
const rawCycles = await prisma.machineCycle.findMany({
|
||||
where: { orgId: session.orgId, machineId },
|
||||
orderBy: { ts: "desc" },
|
||||
take: 200,
|
||||
take: takeCycles,
|
||||
select: {
|
||||
ts: true,
|
||||
cycleCount: true,
|
||||
@@ -194,23 +291,14 @@ const cycles = rawCycles
|
||||
.slice()
|
||||
.reverse()
|
||||
.map((c) => ({
|
||||
ts: c.ts, // keep Date for “time ago” UI
|
||||
t: c.ts.getTime(), // numeric x-axis for charts
|
||||
ts: c.ts,
|
||||
t: c.ts.getTime(),
|
||||
cycleCount: c.cycleCount ?? null,
|
||||
actual: c.actualCycleTime, // rename to what chart expects
|
||||
actual: c.actualCycleTime,
|
||||
ideal: c.theoreticalCycleTime ?? null,
|
||||
workOrderId: c.workOrderId ?? null,
|
||||
sku: c.sku ?? null,
|
||||
}
|
||||
));
|
||||
|
||||
const latestKpi = machine.kpiSnapshots[0] ?? null;
|
||||
|
||||
// rawCycles is ordered DESC, so [0] is the most recent cycle row
|
||||
const latestCycleIdeal = rawCycles[0]?.theoreticalCycleTime ?? null;
|
||||
|
||||
// REAL effective value (not mock): prefer KPI if present, else fallback to cycles table
|
||||
const effectiveCycleTime = latestKpi?.cycleTime ?? latestCycleIdeal ?? null;
|
||||
}));
|
||||
|
||||
|
||||
|
||||
|
||||
Reference in New Issue
Block a user