Issues with data flow & consistency

This commit is contained in:
Marcelo
2025-12-22 14:36:40 +00:00
parent ffc39a5c90
commit 945ff2dc09
4 changed files with 414 additions and 121 deletions

View File

@@ -110,7 +110,7 @@ export default function MachineDetailClient() {
async function load() { async function load() {
try { try {
const res = await fetch(`/api/machines/${machineId}`, { const res = await fetch(`/api/machines/${machineId}?windowSec=10800`, {
cache: "no-store", cache: "no-store",
credentials: "include", credentials: "include",
}); });
@@ -242,6 +242,77 @@ export default function MachineDetailClient() {
); );
} }
function MachineActivityTimeline({
segments,
windowSec,
}: {
segments: TimelineSeg[];
windowSec: number;
}) {
return (
<div className="rounded-2xl border border-white/10 bg-white/5 p-5">
<div className="flex items-start justify-between gap-4">
<div>
<div className="text-sm font-semibold text-white">Machine Activity Timeline</div>
<div className="mt-1 text-xs text-zinc-400">Análisis en tiempo real de ciclos de producción</div>
</div>
<div className="text-xs text-zinc-400">{windowSec}s</div>
</div>
<div className="mt-4 flex flex-wrap items-center gap-4 text-xs text-zinc-300">
{(["normal","slow","microstop","macrostop"] as const).map((k) => (
<div key={k} className="flex items-center gap-2">
<span className="h-2.5 w-2.5 rounded-full" style={{ backgroundColor: BUCKET[k].dot }} />
<span>{BUCKET[k].label}</span>
</div>
))}
</div>
<div className="mt-4 rounded-2xl border border-white/10 bg-black/25 p-4">
{/* time marks */}
<div className="mb-2 flex justify-between text-[11px] text-zinc-500">
<span>0s</span>
<span>3h</span>
</div>
{/* strip */}
<div className="flex h-14 w-full overflow-hidden rounded-2xl">
{segments.length === 0 ? (
<div className="flex h-full w-full items-center justify-center text-xs text-zinc-400">
No timeline data yet.
</div>
) : (
segments.map((seg, idx) => {
const wPct = Math.max(0.25, (seg.durationSec / windowSec) * 100); // min width for visibility
const meta = BUCKET[seg.state];
const glow =
seg.state === "microstop" || seg.state === "macrostop"
? `0 0 22px ${meta.glow}`
: `0 0 12px ${meta.glow}`;
return (
<div
key={`${seg.start}-${seg.end}-${idx}`}
title={`${meta.label}: ${seg.durationSec.toFixed(1)}s`}
className="h-full"
style={{
width: `${wPct}%`,
background: meta.dot,
boxShadow: glow,
opacity: 0.95,
}}
/>
);
})
)}
</div>
</div>
</div>
);
}
function Modal({ function Modal({
open, open,
onClose, onClose,
@@ -409,6 +480,91 @@ export default function MachineDetailClient() {
return { rows, total }; return { rows, total };
}, [cycleDerived.mapped]); }, [cycleDerived.mapped]);
type TimelineState = "normal" | "slow" | "microstop" | "macrostop";
type TimelineSeg = {
start: number; // ms
end: number; // ms
durationSec: number;
state: TimelineState;
};
function classifyGap(dtSec: number, idealSec: number): TimelineState {
const SLOW_X = 1.5;
const STOP_X = 3.0;
const MACRO_X = 10.0;
if (dtSec <= idealSec * SLOW_X) return "normal";
if (dtSec <= idealSec * STOP_X) return "slow";
if (dtSec <= idealSec * MACRO_X) return "microstop";
return "macrostop";
}
function mergeAdjacent(segs: TimelineSeg[]): TimelineSeg[] {
if (!segs.length) return [];
const out: TimelineSeg[] = [segs[0]];
for (let i = 1; i < segs.length; i++) {
const prev = out[out.length - 1];
const cur = segs[i];
// merge if same state and touching
if (cur.state === prev.state && cur.start <= prev.end + 1) {
prev.end = Math.max(prev.end, cur.end);
prev.durationSec = (prev.end - prev.start) / 1000;
} else {
out.push(cur);
}
}
return out;
}
const timeline = useMemo(() => {
const rows = cycles ?? [];
if (rows.length < 2) {
return { windowSec: 10800, segments: [] as TimelineSeg[], start: null as number | null, end: null as number | null };
}
// window: last 180s (like your screenshot)
const windowSec = 10800;
const end = rows[rows.length - 1].t;
const start = end - windowSec * 1000;
// keep cycles that overlap window (need one cycle before start to build first interval)
const idxFirst = Math.max(
0,
rows.findIndex(r => r.t >= start) - 1
);
const sliced = rows.slice(idxFirst);
const segs: TimelineSeg[] = [];
for (let i = 1; i < sliced.length; i++) {
const prev = sliced[i - 1];
const cur = sliced[i];
const s = Math.max(prev.t, start);
const e = Math.min(cur.t, end);
if (e <= s) continue;
const dtSec = (cur.t - prev.t) / 1000;
const ideal = (cur.ideal ?? prev.ideal ?? cycleTarget ?? 0) as number;
if (!ideal || ideal <= 0) continue;
const state = classifyGap(dtSec, ideal);
segs.push({
start: s,
end: e,
durationSec: (e - s) / 1000,
state,
});
}
const segments = mergeAdjacent(segs);
return { windowSec, segments, start, end };
}, [cycles, cycleTarget]);
return ( return (
<div className="p-6"> <div className="p-6">
@@ -471,6 +627,10 @@ export default function MachineDetailClient() {
</div> </div>
</div> </div>
<div className="mt-6">
<MachineActivityTimeline segments={timeline.segments} windowSec={timeline.windowSec} />
</div>
{/* Work order + recent events */} {/* Work order + recent events */}
<div className="mt-6 grid grid-cols-1 gap-4 xl:grid-cols-3"> <div className="mt-6 grid grid-cols-1 gap-4 xl:grid-cols-3">
<div className="rounded-2xl border border-white/10 bg-white/5 p-5 xl:col-span-1"> <div className="rounded-2xl border border-white/10 bg-white/5 p-5 xl:col-span-1">

View File

@@ -30,7 +30,7 @@ export async function POST(req: Request) {
data: { data: {
orgId: machine.orgId, orgId: machine.orgId,
machineId: machine.id, machineId: machine.id,
ts, ts,
cycleCount: typeof c.cycle_count === "number" ? c.cycle_count : null, cycleCount: typeof c.cycle_count === "number" ? c.cycle_count : null,
actualCycleTime: Number(c.actual_cycle_time), actualCycleTime: Number(c.actual_cycle_time),
theoreticalCycleTime: c.theoretical_cycle_time != null ? Number(c.theoretical_cycle_time) : null, theoreticalCycleTime: c.theoretical_cycle_time != null ? Number(c.theoretical_cycle_time) : null,
@@ -41,6 +41,5 @@ export async function POST(req: Request) {
scrapDelta: typeof c.scrap_delta === "number" ? c.scrap_delta : null, scrapDelta: typeof c.scrap_delta === "number" ? c.scrap_delta : null,
}, },
}); });
return NextResponse.json({ ok: true, id: row.id, ts: row.ts }); return NextResponse.json({ ok: true, id: row.id, ts: row.ts });
} }

View File

@@ -1,107 +1,153 @@
import { NextResponse } from "next/server"; import { NextResponse } from "next/server";
import { prisma } from "@/lib/prisma"; import { prisma } from "@/lib/prisma";
const normalizeType = (t: any) =>
String(t ?? "")
.trim()
.toLowerCase()
.replace(/_/g, "-");
const CANON_TYPE: Record<string, string> = {
// Node-RED
"production-stopped": "stop",
"oee-drop": "oee-drop",
"quality-spike": "quality-spike",
"predictive-oee-decline": "predictive-oee-decline",
"performance-degradation": "performance-degradation",
// legacy / synonyms
"macroparo": "macrostop",
"macro-stop": "macrostop",
"microparo": "microstop",
"micro-paro": "microstop",
"down": "stop",
};
const ALLOWED_TYPES = new Set([
"slow-cycle",
"microstop",
"macrostop",
"oee-drop",
"quality-spike",
"performance-degradation",
"predictive-oee-decline",
]);
// thresholds for stop classification (tune later / move to machine config)
const MICROSTOP_SEC = 60;
const MACROSTOP_SEC = 300;
export async function POST(req: Request) { export async function POST(req: Request) {
const apiKey = req.headers.get("x-api-key"); const apiKey = req.headers.get("x-api-key");
if (!apiKey) return NextResponse.json({ ok: false, error: "Missing api key" }, { status: 401 }); if (!apiKey) {
return NextResponse.json({ ok: false, error: "Missing api key" }, { status: 401 });
}
const body = await req.json().catch(() => null); const body = await req.json().catch(() => null);
if (!body?.machineId || !body?.event) { if (!body?.machineId || !body?.event) {
return NextResponse.json({ ok: false, error: "Invalid payload" }, { status: 400 }); return NextResponse.json({ ok: false, error: "Invalid payload" }, { status: 400 });
} }
const machine = await prisma.machine.findFirst({ const machine = await prisma.machine.findFirst({
where: { id: String(body.machineId), apiKey }, where: { id: String(body.machineId), apiKey },
select: { id: true, orgId: true }, select: { id: true, orgId: true },
}); });
if (!machine) return NextResponse.json({ ok: false, error: "Unauthorized" }, { status: 401 }); if (!machine) {
return NextResponse.json({ ok: false, error: "Unauthorized" }, { status: 401 });
}
// Normalize to array (Node-RED sends array of anomalies)
const rawEvent = body.event;
const events = Array.isArray(rawEvent) ? rawEvent : [rawEvent];
// Convert ms epoch -> Date if provided const created: { id: string; ts: Date; eventType: string }[] = [];
const skipped: any[] = [];
for (const ev of events) {
if (!ev || typeof ev !== "object") {
const rawEvent = body.event; skipped.push({ reason: "invalid_event_object" });
const e = Array.isArray(rawEvent) ? rawEvent[0] : rawEvent; continue;
}
if (!e || typeof e !== "object") {
return NextResponse.json({ ok: false, error: "Invalid event object" }, { status: 400 });
}
const rawType =
e.eventType ?? e.anomaly_type ?? e.topic ?? body.topic ?? "";
const normalizeType = (t: string) =>
String(t)
.trim()
.toLowerCase()
.replace(/_/g, "-");
const typ = normalizeType(rawType);
const sev = String(e.severity ?? "").trim().toLowerCase();
// accept these types
const ALLOWED_TYPES = new Set([
"slow-cycle",
"anomaly-detected",
"performance-degradation",
"scrap-spike",
"down",
"microstop",
]);
if (!ALLOWED_TYPES.has(typ)) {
return NextResponse.json({ ok: true, skipped: true, reason: "type_not_allowed", typ, sev }, { status: 200 });
}
// optional: severity enforcement only for SOME types (not slow-cycle)
const NEEDS_HIGH_SEV = new Set(["down", "scrap-spike"]);
const ALLOWED_SEVERITIES = new Set(["warning", "critical", "error"]);
if (NEEDS_HIGH_SEV.has(typ) && !ALLOWED_SEVERITIES.has(sev)) {
return NextResponse.json({ ok: true, skipped: true, reason: "severity_too_low", typ, sev }, { status: 200 });
}
// timestamp handling (support multiple field names)
const tsMs =
(typeof (e as any)?.timestamp === "number" && (e as any).timestamp) ||
(typeof e?.data?.timestamp === "number" && e.data.timestamp) ||
(typeof e?.data?.event_timestamp === "number" && e.data.event_timestamp) ||
(typeof e?.data?.ts === "number" && e.data.ts) ||
undefined;
const ts = tsMs ? new Date(tsMs) : new Date(); // default to now if missing
const title =
String(e.title ?? "").trim() ||
(typ === "slow-cycle" ? "Slow Cycle Detected" : "Event");
const description = e.description
? String(e.description)
: null;
const row = await prisma.machineEvent.create({
data: {
orgId: machine.orgId,
machineId: machine.id,
ts,
topic: String(e.topic ?? typ),
eventType: typ, // ✅ store normalized type
severity: sev || "info", // ✅ store normalized severity
requiresAck: !!e.requires_ack,
title,
description,
data: e.data ?? e,
workOrderId:
(e as any)?.work_order_id ? String((e as any).work_order_id)
: e?.data?.work_order_id ? String(e.data.work_order_id)
: null,
},
});
return NextResponse.json({ ok: true, id: row.id, ts: row.ts });
const rawType = (ev as any).eventType ?? (ev as any).anomaly_type ?? (ev as any).topic ?? body.topic ?? "";
const typ0 = normalizeType(rawType);
const typ = CANON_TYPE[typ0] ?? typ0;
// Determine timestamp
const tsMs =
(typeof (ev as any)?.timestamp === "number" && (ev as any).timestamp) ||
(typeof (ev as any)?.data?.timestamp === "number" && (ev as any).data.timestamp) ||
(typeof (ev as any)?.data?.event_timestamp === "number" && (ev as any).data.event_timestamp) ||
null;
const ts = tsMs ? new Date(tsMs) : new Date();
// Severity defaulting (do not skip on severity — store for audit)
let sev = String((ev as any).severity ?? "").trim().toLowerCase();
if (!sev) sev = "warning";
// Stop classification -> microstop/macrostop
let finalType = typ;
if (typ === "stop") {
const stopSec =
(typeof (ev as any)?.data?.stoppage_duration_seconds === "number" && (ev as any).data.stoppage_duration_seconds) ||
(typeof (ev as any)?.data?.stop_duration_seconds === "number" && (ev as any).data.stop_duration_seconds) ||
null;
if (stopSec != null) {
finalType = stopSec >= MACROSTOP_SEC ? "macrostop" : "microstop";
} else {
// missing duration -> conservative
finalType = "microstop";
}
}
if (!ALLOWED_TYPES.has(finalType)) {
skipped.push({ reason: "type_not_allowed", typ: finalType, sev });
continue;
}
const title =
String((ev as any).title ?? "").trim() ||
(finalType === "slow-cycle" ? "Slow Cycle Detected" :
finalType === "macrostop" ? "Macrostop Detected" :
finalType === "microstop" ? "Microstop Detected" :
"Event");
const description = (ev as any).description ? String((ev as any).description) : null;
// store full blob, ensure object
const rawData = (ev as any).data ?? ev;
const dataObj = typeof rawData === "string" ? (() => {
try { return JSON.parse(rawData); } catch { return { raw: rawData }; }
})() : rawData;
const row = await prisma.machineEvent.create({
data: {
orgId: machine.orgId,
machineId: machine.id,
ts,
topic: String((ev as any).topic ?? finalType),
eventType: finalType,
severity: sev,
requiresAck: !!(ev as any).requires_ack,
title,
description,
data: dataObj,
workOrderId:
(ev as any)?.work_order_id ? String((ev as any).work_order_id)
: (ev as any)?.data?.work_order_id ? String((ev as any).data.work_order_id)
: null,
sku:
(ev as any)?.sku ? String((ev as any).sku)
: (ev as any)?.data?.sku ? String((ev as any).data.sku)
: null,
},
});
created.push({ id: row.id, ts: row.ts, eventType: row.eventType });
}
return NextResponse.json({ ok: true, createdCount: created.length, created, skippedCount: skipped.length, skipped });
} }

View File

@@ -4,9 +4,26 @@ import { prisma } from "@/lib/prisma";
import { requireSession } from "@/lib/auth/requireSession"; import { requireSession } from "@/lib/auth/requireSession";
function normalizeEvent(row: any) { function normalizeEvent(row: any) {
// data can be object OR [object] // -----------------------------
// 1) Parse row.data safely
// data may be:
// - object
// - array of objects
// - JSON string of either
// -----------------------------
const raw = row.data; const raw = row.data;
const blob = Array.isArray(raw) ? raw[0] : raw;
let parsed: any = raw;
if (typeof raw === "string") {
try {
parsed = JSON.parse(raw);
} catch {
parsed = raw; // keep as string if not JSON
}
}
// data can be object OR [object]
const blob = Array.isArray(parsed) ? parsed[0] : parsed;
// some payloads nest details under blob.data // some payloads nest details under blob.data
const inner = blob?.data ?? blob ?? {}; const inner = blob?.data ?? blob ?? {};
@@ -17,21 +34,71 @@ function normalizeEvent(row: any) {
.toLowerCase() .toLowerCase()
.replace(/_/g, "-"); .replace(/_/g, "-");
// Prefer the DB columns if they are meaningful // -----------------------------
const fromDbType = row.eventType && row.eventType !== "unknown" ? row.eventType : null; // 2) Alias mapping (canonical types)
const fromBlobType = blob?.anomaly_type ?? blob?.eventType ?? blob?.topic ?? inner?.anomaly_type ?? inner?.eventType ?? null; // -----------------------------
const ALIAS: Record<string, string> = {
// Spanish / synonyms
macroparo: "macrostop",
"macro-stop": "macrostop",
macro_stop: "macrostop",
// infer slow-cycle if the signature exists microparo: "microstop",
"micro-paro": "microstop",
micro_stop: "microstop",
// Node-RED types
"production-stopped": "stop", // we'll classify to micro/macro below
// legacy / generic
down: "stop",
};
// -----------------------------
// 3) Determine event type from DB or blob
// -----------------------------
const fromDbType =
row.eventType && row.eventType !== "unknown" ? row.eventType : null;
const fromBlobType =
blob?.anomaly_type ??
blob?.eventType ??
blob?.topic ??
inner?.anomaly_type ??
inner?.eventType ??
null;
// infer slow-cycle if signature exists
const inferredType = const inferredType =
fromDbType ?? fromDbType ??
fromBlobType ?? fromBlobType ??
((inner?.actual_cycle_time && inner?.theoretical_cycle_time) || (blob?.actual_cycle_time && blob?.theoretical_cycle_time) ((inner?.actual_cycle_time && inner?.theoretical_cycle_time) ||
(blob?.actual_cycle_time && blob?.theoretical_cycle_time)
? "slow-cycle" ? "slow-cycle"
: "unknown"); : "unknown");
const eventTypeRaw = normalizeType(inferredType);
let eventType = ALIAS[eventTypeRaw] ?? eventTypeRaw;
const eventType = normalizeType(inferredType); // -----------------------------
// 4) Optional: classify "stop" into micro/macro based on duration if present
// (keeps old rows usable even if they stored production-stopped)
// -----------------------------
if (eventType === "stop") {
const stopSec =
(typeof inner?.stoppage_duration_seconds === "number" && inner.stoppage_duration_seconds) ||
(typeof blob?.stoppage_duration_seconds === "number" && blob.stoppage_duration_seconds) ||
(typeof inner?.stop_duration_seconds === "number" && inner.stop_duration_seconds) ||
null;
// tune these thresholds to match your MES spec
const MACROSTOP_SEC = 300; // 5 min
eventType = stopSec != null && stopSec >= MACROSTOP_SEC ? "macrostop" : "microstop";
}
// -----------------------------
// 5) Severity, title, description, timestamp
// -----------------------------
const severity = const severity =
String( String(
(row.severity && row.severity !== "info" ? row.severity : null) ?? (row.severity && row.severity !== "info" ? row.severity : null) ??
@@ -55,10 +122,10 @@ function normalizeEvent(row: any) {
blob?.description ?? blob?.description ??
inner?.description ?? inner?.description ??
(eventType === "slow-cycle" && (eventType === "slow-cycle" &&
inner?.actual_cycle_time && (inner?.actual_cycle_time ?? blob?.actual_cycle_time) &&
inner?.theoretical_cycle_time && (inner?.theoretical_cycle_time ?? blob?.theoretical_cycle_time) &&
inner?.delta_percent != null (inner?.delta_percent ?? blob?.delta_percent) != null
? `Cycle took ${Number(inner.actual_cycle_time).toFixed(1)}s (+${inner.delta_percent}% vs ${Number(inner.theoretical_cycle_time).toFixed(1)}s objetivo)` ? `Cycle took ${Number(inner?.actual_cycle_time ?? blob?.actual_cycle_time).toFixed(1)}s (+${Number(inner?.delta_percent ?? blob?.delta_percent)}% vs ${Number(inner?.theoretical_cycle_time ?? blob?.theoretical_cycle_time).toFixed(1)}s objetivo)`
: null); : null);
const ts = const ts =
@@ -161,24 +228,54 @@ export async function GET(
const ALLOWED_TYPES = new Set([ const ALLOWED_TYPES = new Set([
"slow-cycle", "slow-cycle",
"anomaly-detected",
"performance-degradation",
"scrap-spike",
"down",
"microstop", "microstop",
"macrostop",
"oee-drop",
"quality-spike",
"performance-degradation",
"predictive-oee-decline",
]); ]);
const events = normalized const events = normalized
.filter((e) => ALLOWED_TYPES.has(e.eventType)) .filter((e) => ALLOWED_TYPES.has(e.eventType))
// keep slow-cycle even if severity is info, otherwise require warning/critical/error // keep slow-cycle even if severity is info, otherwise require warning/critical/error
.filter((e) => e.eventType === "slow-cycle" || ["warning", "critical", "error"].includes(e.severity)) .filter((e) =>
["slow-cycle", "microstop", "macrostop"].includes(e.eventType) ||
["warning", "critical", "error"].includes(e.severity)
)
.slice(0, 30); .slice(0, 30);
// ---- cycles window ----
const url = new URL(_req.url);
const windowSec = Number(url.searchParams.get("windowSec") ?? "10800"); // default 3h
const latestKpi = machine.kpiSnapshots[0] ?? null;
// If KPI cycleTime missing, fallback to DB cycles (we fetch 1 first)
const latestCycleForIdeal = await prisma.machineCycle.findFirst({
where: { orgId: session.orgId, machineId },
orderBy: { ts: "desc" },
select: { theoreticalCycleTime: true },
});
const effectiveCycleTime =
latestKpi?.cycleTime ??
latestCycleForIdeal?.theoreticalCycleTime ??
null;
// Estimate how many cycles we need to cover the window.
// Add buffer so the chart doesnt look “tight”.
const estCycleSec = Math.max(1, Number(effectiveCycleTime ?? 14));
const needed = Math.ceil(windowSec / estCycleSec) + 50;
// Safety cap to avoid crazy payloads
const takeCycles = Math.min(5000, Math.max(200, needed));
const rawCycles = await prisma.machineCycle.findMany({ const rawCycles = await prisma.machineCycle.findMany({
where: { orgId: session.orgId, machineId }, where: { orgId: session.orgId, machineId },
orderBy: { ts: "desc" }, orderBy: { ts: "desc" },
take: 200, take: takeCycles,
select: { select: {
ts: true, ts: true,
cycleCount: true, cycleCount: true,
@@ -194,23 +291,14 @@ const cycles = rawCycles
.slice() .slice()
.reverse() .reverse()
.map((c) => ({ .map((c) => ({
ts: c.ts, // keep Date for “time ago” UI ts: c.ts,
t: c.ts.getTime(), // numeric x-axis for charts t: c.ts.getTime(),
cycleCount: c.cycleCount ?? null, cycleCount: c.cycleCount ?? null,
actual: c.actualCycleTime, // rename to what chart expects actual: c.actualCycleTime,
ideal: c.theoreticalCycleTime ?? null, ideal: c.theoreticalCycleTime ?? null,
workOrderId: c.workOrderId ?? null, workOrderId: c.workOrderId ?? null,
sku: c.sku ?? null, sku: c.sku ?? null,
} }));
));
const latestKpi = machine.kpiSnapshots[0] ?? null;
// rawCycles is ordered DESC, so [0] is the most recent cycle row
const latestCycleIdeal = rawCycles[0]?.theoreticalCycleTime ?? null;
// REAL effective value (not mock): prefer KPI if present, else fallback to cycles table
const effectiveCycleTime = latestKpi?.cycleTime ?? latestCycleIdeal ?? null;