This commit is contained in:
Marcelo
2026-04-29 07:13:42 +00:00
parent 62169b163c
commit 5e7ddaa0db
10 changed files with 679 additions and 104 deletions

View File

@@ -221,6 +221,16 @@ const WORK_ORDER_TEMPLATE_HEADERS = [
"Active Cavities", "Active Cavities",
] as const; ] as const;
const WORK_ORDER_TEMPLATE_EXAMPLE_ROW = [
"*borra esta fila al subir excel)",
"SKU-12345",
35,
10000,
"MOLD-01",
8,
8,
] as const;
function normalizeKey(value: string) { function normalizeKey(value: string) {
return value.toLowerCase().replace(/[^a-z0-9]/g, ""); return value.toLowerCase().replace(/[^a-z0-9]/g, "");
} }
@@ -654,7 +664,10 @@ export default function MachineDetailClient() {
async function downloadWorkOrderTemplate() { async function downloadWorkOrderTemplate() {
const xlsx = await import("xlsx"); const xlsx = await import("xlsx");
const wb = xlsx.utils.book_new(); const wb = xlsx.utils.book_new();
const ws = xlsx.utils.aoa_to_sheet([Array.from(WORK_ORDER_TEMPLATE_HEADERS)]); const ws = xlsx.utils.aoa_to_sheet([
Array.from(WORK_ORDER_TEMPLATE_HEADERS),
Array.from(WORK_ORDER_TEMPLATE_EXAMPLE_ROW),
]);
xlsx.utils.book_append_sheet(wb, ws, "Work Orders"); xlsx.utils.book_append_sheet(wb, ws, "Work Orders");
const wbout = xlsx.write(wb, { bookType: "xlsx", type: "array" }); const wbout = xlsx.write(wb, { bookType: "xlsx", type: "array" });
const blob = new Blob([wbout], { const blob = new Blob([wbout], {

View File

@@ -21,7 +21,7 @@ type SimpleTooltipProps<T> = {
label?: string | number; label?: string | number;
}; };
type ChartPoint = { ts: string; label: string; value: number }; type ChartPoint = { ts: string; label: string; value: number | null };
type CycleHistogramRow = { type CycleHistogramRow = {
label: string; label: string;
count: number; count: number;
@@ -135,7 +135,14 @@ export default function ReportsCharts({
"OEE", "OEE",
]} ]}
/> />
<Line type="monotone" dataKey="value" stroke="#34d399" dot={false} strokeWidth={2} /> <Line
type="linear"
dataKey="value"
stroke="#34d399"
dot={false}
strokeWidth={2}
connectNulls={false}
/>
</LineChart> </LineChart>
</ResponsiveContainer> </ResponsiveContainer>
) : ( ) : (

View File

@@ -29,7 +29,7 @@ type ReportDowntime = {
oeeDropCount: number; oeeDropCount: number;
}; };
type ReportTrendPoint = { t: string; v: number }; type ReportTrendPoint = { t: string; v: number | null };
type ReportPayload = { type ReportPayload = {
summary: ReportSummary; summary: ReportSummary;
@@ -78,6 +78,31 @@ function downsample<T>(rows: T[], max: number) {
return rows.filter((_, idx) => idx % step === 0); return rows.filter((_, idx) => idx % step === 0);
} }
function downsampleTrendPreserveGaps(rows: ReportTrendPoint[], max: number) {
if (rows.length <= max) return rows;
const step = Math.ceil(rows.length / max);
const picked = new Set<number>();
picked.add(0);
picked.add(rows.length - 1);
for (let idx = 0; idx < rows.length; idx += step) picked.add(idx);
// Keep both sides of null/non-null transitions so chart gaps remain visible.
for (let idx = 1; idx < rows.length; idx += 1) {
const prevIsNull = rows[idx - 1]?.v == null;
const currIsNull = rows[idx]?.v == null;
if (prevIsNull !== currIsNull) {
picked.add(idx - 1);
picked.add(idx);
}
}
return [...picked]
.sort((a, b) => a - b)
.map((idx) => rows[idx])
.filter((row): row is ReportTrendPoint => row != null);
}
function formatTickLabel(ts: string, range: RangeKey) { function formatTickLabel(ts: string, range: RangeKey) {
const d = new Date(ts); const d = new Date(ts);
if (Number.isNaN(d.getTime())) return ts; if (Number.isNaN(d.getTime())) return ts;
@@ -107,7 +132,7 @@ function ReportsChartsSkeleton() {
} }
function buildCsv(report: ReportPayload, t: Translator) { function buildCsv(report: ReportPayload, t: Translator) {
const rows = new Map<string, Record<string, string | number>>(); const rows = new Map<string, Record<string, string | number | null>>();
const addSeries = (series: ReportTrendPoint[], key: string) => { const addSeries = (series: ReportTrendPoint[], key: string) => {
for (const p of series) { for (const p of series) {
const row = rows.get(p.t) ?? { timestamp: p.t }; const row = rows.get(p.t) ?? { timestamp: p.t };
@@ -414,7 +439,7 @@ export default function ReportsPageClient({
const oeeSeries = useMemo(() => { const oeeSeries = useMemo(() => {
const rows = trend?.oee ?? []; const rows = trend?.oee ?? [];
const trimmed = downsample(rows, 600); const trimmed = downsampleTrendPreserveGaps(rows, 600);
return trimmed.map((p) => ({ return trimmed.map((p) => ({
ts: p.t, ts: p.t,
label: formatTickLabel(p.t, range), label: formatTickLabel(p.t, range),

View File

@@ -3,6 +3,13 @@ import { prisma } from "@/lib/prisma";
import { requireSession } from "@/lib/auth/requireSession"; import { requireSession } from "@/lib/auth/requireSession";
import { coerceDowntimeRange, rangeToStart } from "@/lib/analytics/downtimeRange"; import { coerceDowntimeRange, rangeToStart } from "@/lib/analytics/downtimeRange";
import type { Prisma } from "@prisma/client"; import type { Prisma } from "@prisma/client";
import {
applyDowntimeFilters,
loadDowntimeShiftContext,
normalizeMicrostopLtMin,
normalizeShiftFilter,
resolvePlannedFilter,
} from "@/lib/analytics/downtimeFilters";
const bad = (status: number, error: string) => const bad = (status: number, error: string) =>
NextResponse.json({ ok: false, error }, { status }); NextResponse.json({ ok: false, error }, { status });
@@ -26,6 +33,9 @@ export async function GET(req: Request) {
const machineId = url.searchParams.get("machineId"); // optional const machineId = url.searchParams.get("machineId"); // optional
const reasonCode = url.searchParams.get("reasonCode"); // optional const reasonCode = url.searchParams.get("reasonCode"); // optional
const includeMoldChange = url.searchParams.get("includeMoldChange") === "true"; const includeMoldChange = url.searchParams.get("includeMoldChange") === "true";
const planned = resolvePlannedFilter(url.searchParams.get("planned"), includeMoldChange);
const shift = normalizeShiftFilter(url.searchParams.get("shift"));
const microstopLtMin = normalizeMicrostopLtMin(url.searchParams.get("microstopLtMin"));
const limitRaw = url.searchParams.get("limit"); const limitRaw = url.searchParams.get("limit");
const limit = Math.min(Math.max(Number(limitRaw || 200), 1), 500); const limit = Math.min(Math.max(Number(limitRaw || 200), 1), 500);
@@ -50,7 +60,6 @@ export async function GET(req: Request) {
orgId, orgId,
kind: "downtime", kind: "downtime",
episodeId: { not: null }, episodeId: { not: null },
...(includeMoldChange ? {} : { reasonCode: { not: "MOLD_CHANGE" } }),
capturedAt: { capturedAt: {
gte: start, gte: start,
...(beforeDate ? { lt: beforeDate } : {}), ...(beforeDate ? { lt: beforeDate } : {}),
@@ -59,10 +68,11 @@ export async function GET(req: Request) {
...(reasonCode ? { reasonCode } : {}), ...(reasonCode ? { reasonCode } : {}),
}; };
const rows = await prisma.reasonEntry.findMany({ const scanTake = Math.min(Math.max(limit * 8, 1000), 5000);
const rowsRaw = await prisma.reasonEntry.findMany({
where, where,
orderBy: { capturedAt: "desc" }, orderBy: { capturedAt: "desc" },
take: limit, take: scanTake,
select: { select: {
id: true, id: true,
episodeId: true, episodeId: true,
@@ -80,6 +90,14 @@ export async function GET(req: Request) {
}, },
}); });
const shiftContext = shift === "all" ? null : await loadDowntimeShiftContext(orgId);
const rows = applyDowntimeFilters(rowsRaw, {
planned,
shift,
microstopLtMin,
shiftContext,
}).slice(0, limit);
const events = rows.map((r) => { const events = rows.map((r) => {
const startAt = r.capturedAt; const startAt = r.capturedAt;
const endAt = const endAt =
@@ -116,7 +134,11 @@ export async function GET(req: Request) {
}); });
const nextBefore = const nextBefore =
events.length > 0 ? events[events.length - 1]?.capturedAt ?? null : null; events.length > 0
? events[events.length - 1]?.capturedAt ?? null
: rowsRaw.length > 0
? toISO(rowsRaw[rowsRaw.length - 1]?.capturedAt)
: null;
return NextResponse.json({ return NextResponse.json({
ok: true, ok: true,
@@ -125,6 +147,9 @@ export async function GET(req: Request) {
start, start,
machineId: machineId ?? null, machineId: machineId ?? null,
reasonCode: reasonCode ?? null, reasonCode: reasonCode ?? null,
planned,
shift,
microstopLtMin,
includeMoldChange, includeMoldChange,
limit, limit,
before: before ?? null, before: before ?? null,

View File

@@ -2,6 +2,13 @@ import { NextResponse } from "next/server";
import { prisma } from "@/lib/prisma"; import { prisma } from "@/lib/prisma";
import { requireSession } from "@/lib/auth/requireSession"; import { requireSession } from "@/lib/auth/requireSession";
import { coerceDowntimeRange, rangeToStart } from "@/lib/analytics/downtimeRange"; import { coerceDowntimeRange, rangeToStart } from "@/lib/analytics/downtimeRange";
import {
applyDowntimeFilters,
loadDowntimeShiftContext,
normalizeMicrostopLtMin,
normalizeShiftFilter,
resolvePlannedFilter,
} from "@/lib/analytics/downtimeFilters";
const bad = (status: number, error: string) => const bad = (status: number, error: string) =>
NextResponse.json({ ok: false, error }, { status }); NextResponse.json({ ok: false, error }, { status });
@@ -21,6 +28,9 @@ export async function GET(req: Request) {
const machineId = url.searchParams.get("machineId"); // optional const machineId = url.searchParams.get("machineId"); // optional
const kind = (url.searchParams.get("kind") || "downtime").toLowerCase(); const kind = (url.searchParams.get("kind") || "downtime").toLowerCase();
const includeMoldChange = url.searchParams.get("includeMoldChange") === "true"; const includeMoldChange = url.searchParams.get("includeMoldChange") === "true";
const planned = resolvePlannedFilter(url.searchParams.get("planned"), includeMoldChange);
const shift = normalizeShiftFilter(url.searchParams.get("shift"));
const microstopLtMin = normalizeMicrostopLtMin(url.searchParams.get("microstopLtMin"));
if (kind !== "downtime" && kind !== "scrap" && kind !== "planned-downtime") { if (kind !== "downtime" && kind !== "scrap" && kind !== "planned-downtime") {
return bad(400, "Invalid kind (downtime|scrap|planned-downtime)"); return bad(400, "Invalid kind (downtime|scrap|planned-downtime)");
@@ -35,41 +45,82 @@ export async function GET(req: Request) {
if (!m) return bad(404, "Machine not found"); if (!m) return bad(404, "Machine not found");
} }
// ✅ Scope by orgId (+ machineId if provided) let itemsRaw: { reasonCode: string; reasonLabel: string; value: number; count: number }[] = [];
if (kind === "downtime" || kind === "planned-downtime") {
const baseRows = await prisma.reasonEntry.findMany({
where: {
orgId,
...(machineId ? { machineId } : {}),
kind: "downtime",
capturedAt: { gte: start },
},
select: {
reasonCode: true,
reasonLabel: true,
durationSeconds: true,
capturedAt: true,
meta: true,
episodeId: true,
},
});
const effectivePlanned = kind === "planned-downtime" ? "planned" : planned;
const shiftContext = shift === "all" ? null : await loadDowntimeShiftContext(orgId);
const filteredRows = applyDowntimeFilters(baseRows, {
planned: effectivePlanned,
shift,
microstopLtMin,
shiftContext,
});
const grouped = new Map<string, { reasonCode: string; reasonLabel: string; durationSeconds: number; count: number }>();
for (const row of filteredRows) {
const key = `${row.reasonCode}:::${row.reasonLabel ?? row.reasonCode}`;
const slot =
grouped.get(key) ??
{
reasonCode: row.reasonCode,
reasonLabel: row.reasonLabel ?? row.reasonCode,
durationSeconds: 0,
count: 0,
};
slot.durationSeconds += Math.max(0, row.durationSeconds ?? 0);
slot.count += 1;
grouped.set(key, slot);
}
itemsRaw = [...grouped.values()]
.map((g) => ({
reasonCode: g.reasonCode,
reasonLabel: g.reasonLabel,
value: Math.round((g.durationSeconds / 60) * 10) / 10,
count: g.count,
}))
.filter((x) => x.value > 0 || x.count > 0);
} else {
// Scrap path unchanged.
const grouped = await prisma.reasonEntry.groupBy({ const grouped = await prisma.reasonEntry.groupBy({
by: ["reasonCode", "reasonLabel"], by: ["reasonCode", "reasonLabel"],
where: { where: {
orgId, orgId,
...(machineId ? { machineId } : {}), ...(machineId ? { machineId } : {}),
kind: kind === "planned-downtime" ? "downtime" : kind, kind,
...(kind === "downtime" && !includeMoldChange ? { reasonCode: { not: "MOLD_CHANGE" } } : {}),
...(kind === "planned-downtime" ? { reasonCode: "MOLD_CHANGE" } : {}),
capturedAt: { gte: start }, capturedAt: { gte: start },
}, },
_sum: { _sum: { scrapQty: true },
durationSeconds: true,
scrapQty: true,
},
_count: { _all: true }, _count: { _all: true },
}); });
const itemsRaw = grouped itemsRaw = grouped
.map((g) => { .map((g) => ({
const value =
kind === "downtime" || kind === "planned-downtime"
? Math.round(((g._sum.durationSeconds ?? 0) / 60) * 10) / 10 // minutes, 1 decimal
: g._sum.scrapQty ?? 0;
return {
reasonCode: g.reasonCode, reasonCode: g.reasonCode,
reasonLabel: g.reasonLabel ?? g.reasonCode, reasonLabel: g.reasonLabel ?? g.reasonCode,
value, value: g._sum.scrapQty ?? 0,
count: g._count._all, count: g._count._all,
}; }))
}) .filter((x) => x.value > 0);
.filter((x) => }
kind === "downtime" || kind === "planned-downtime" ? x.value > 0 || x.count > 0 : x.value > 0
);
itemsRaw.sort((a, b) => b.value - a.value); itemsRaw.sort((a, b) => b.value - a.value);
@@ -111,6 +162,9 @@ export async function GET(req: Request) {
orgId, orgId,
machineId: machineId ?? null, machineId: machineId ?? null,
kind, kind,
planned: kind === "downtime" ? planned : kind === "planned-downtime" ? "planned" : "all",
shift,
microstopLtMin,
includeMoldChange, includeMoldChange,
range, // ✅ now defined correctly range, // ✅ now defined correctly
start, // ✅ now defined correctly start, // ✅ now defined correctly

View File

@@ -517,6 +517,14 @@ export async function POST(req: Request) {
if (evRecord.is_update || evRecord.is_auto_ack || dataObj.is_update || dataObj.is_auto_ack){ if (evRecord.is_update || evRecord.is_auto_ack || dataObj.is_update || dataObj.is_auto_ack){
// skip duplicate reasonEntry for refresh/ack // skip duplicate reasonEntry for refresh/ack
} else if (evReason || finalType === "microstop" || finalType === "macrostop" || finalType === "downtime-acknowledged" || finalType === "mold-change"){ } else if (evReason || finalType === "microstop" || finalType === "macrostop" || finalType === "downtime-acknowledged" || finalType === "mold-change"){
const fallbackIncidentKey =
clampText(
evData.incidentKey ??
dataObj.incidentKey ??
evDowntime?.incidentKey ??
evReason?.incidentKey,
128
) ?? null;
const moldIncidentKey = const moldIncidentKey =
clampText(evData.incidentKey ?? dataObj.incidentKey, 128) ?? clampText(evData.incidentKey ?? dataObj.incidentKey, 128) ??
(numberFrom(evData.start_ms ?? dataObj.start_ms) != null (numberFrom(evData.start_ms ?? dataObj.start_ms) != null
@@ -533,7 +541,7 @@ export async function POST(req: Request) {
detailLabel: "Cambio molde", detailLabel: "Cambio molde",
reasonCode: "MOLD_CHANGE", reasonCode: "MOLD_CHANGE",
reasonText: "Cambio molde", reasonText: "Cambio molde",
incidentKey: moldIncidentKey ?? row.id, incidentKey: moldIncidentKey ?? fallbackIncidentKey ?? row.id,
} as Record<string, unknown>) } as Record<string, unknown>)
: :
({ ({
@@ -544,7 +552,7 @@ export async function POST(req: Request) {
detailLabel: "Unclassified", detailLabel: "Unclassified",
reasonCode: "UNCLASSIFIED", reasonCode: "UNCLASSIFIED",
reasonText: "Unclassified", reasonText: "Unclassified",
incidentKey: row.id, incidentKey: fallbackIncidentKey ?? row.id,
} as Record<string, unknown>)); } as Record<string, unknown>));
const inferredKind: ReasonCatalogKind = const inferredKind: ReasonCatalogKind =
@@ -554,10 +562,18 @@ export async function POST(req: Request) {
const resolved = resolveReason(reasonRaw, inferredKind, reasonCatalog, reasonCatalog.version); const resolved = resolveReason(reasonRaw, inferredKind, reasonCatalog, reasonCatalog.version);
if (resolved.reasonCode) { if (resolved.reasonCode) {
const continuityIncidentKey =
inferredKind === "downtime"
? clampText((reasonRaw as any).incidentKey ?? evDowntime?.incidentKey ?? fallbackIncidentKey, 128) ?? row.id
: null;
const reasonMetaIncidentKey =
inferredKind === "downtime"
? continuityIncidentKey
: clampText((reasonRaw as any).incidentKey ?? evDowntime?.incidentKey, 128);
const reasonId = const reasonId =
clampText(reasonRaw.reasonId, 128) ?? clampText(reasonRaw.reasonId, 128) ??
(inferredKind === "downtime" (inferredKind === "downtime"
? `evt:${machine.id}:downtime:${clampText((reasonRaw as any).incidentKey ?? evDowntime?.incidentKey, 128) ?? row.id}` ? `evt:${machine.id}:downtime:${continuityIncidentKey ?? row.id}`
: `evt:${machine.id}:scrap:${clampText(reasonRaw.scrapEntryId, 128) ?? row.id}`); : `evt:${machine.id}:scrap:${clampText(reasonRaw.scrapEntryId, 128) ?? row.id}`);
const workOrderId = const workOrderId =
@@ -577,7 +593,7 @@ export async function POST(req: Request) {
source: "ingest:event", source: "ingest:event",
eventId: row.id, eventId: row.id,
eventType: row.eventType, eventType: row.eventType,
incidentKey: clampText((reasonRaw as any).incidentKey ?? evDowntime?.incidentKey, 128), incidentKey: reasonMetaIncidentKey,
anomalyType: anomalyType:
clampText(evRecord.anomalyType, 64) ?? clampText(evRecord.anomalyType, 64) ??
clampText(evDowntime?.anomalyType, 64) ?? clampText(evDowntime?.anomalyType, 64) ??
@@ -595,7 +611,7 @@ export async function POST(req: Request) {
}; };
if (inferredKind === "downtime") { if (inferredKind === "downtime") {
const incidentKey = clampText((reasonRaw as any).incidentKey ?? evDowntime?.incidentKey, 128) ?? row.id; const incidentKey = continuityIncidentKey ?? row.id;
const durationSeconds = const durationSeconds =
numberFrom(evDowntime?.durationSeconds) ?? numberFrom(evDowntime?.durationSeconds) ??
numberFrom(evData.duration_sec) ?? numberFrom(evData.duration_sec) ??
@@ -641,7 +657,7 @@ export async function POST(req: Request) {
source: "ingest:event", source: "ingest:event",
eventId: row.id, eventId: row.id,
eventType: row.eventType, eventType: row.eventType,
incidentKey: clampText((reasonRaw as any).incidentKey ?? evDowntime?.incidentKey, 128), incidentKey: reasonMetaIncidentKey,
anomalyType: anomalyType:
clampText(evRecord.anomalyType, 64) ?? clampText(evRecord.anomalyType, 64) ??
clampText(evDowntime?.anomalyType, 64) ?? clampText(evDowntime?.anomalyType, 64) ??

View File

@@ -47,6 +47,10 @@ function safeNum(v: unknown) {
return typeof v === "number" && Number.isFinite(v) ? v : null; return typeof v === "number" && Number.isFinite(v) ? v : null;
} }
function isProductionSnapshot(trackingEnabled: unknown, productionStarted: unknown) {
return trackingEnabled === true && productionStarted === true;
}
function toMs(value?: Date | null) { function toMs(value?: Date | null) {
return value ? value.getTime() : 0; return value ? value.getTime() : 0;
} }
@@ -137,6 +141,8 @@ export async function GET(req: NextRequest) {
good: true, good: true,
scrap: true, scrap: true,
target: true, target: true,
trackingEnabled: true,
productionStarted: true,
machineId: true, machineId: true,
}, },
}); });
@@ -151,7 +157,9 @@ export async function GET(req: NextRequest) {
let qualSum = 0; let qualSum = 0;
let qualCount = 0; let qualCount = 0;
// OEE-family summaries are production-only to avoid mixing downtime/off windows.
for (const k of kpiRows) { for (const k of kpiRows) {
if (!isProductionSnapshot(k.trackingEnabled, k.productionStarted)) continue;
if (safeNum(k.oee) != null) { if (safeNum(k.oee) != null) {
oeeSum += Number(k.oee); oeeSum += Number(k.oee);
oeeCount += 1; oeeCount += 1;
@@ -274,7 +282,7 @@ export async function GET(req: NextRequest) {
else if (type === "oee-drop") oeeDropCount += 1; else if (type === "oee-drop") oeeDropCount += 1;
} }
type TrendPoint = { t: string; v: number }; type TrendPoint = { t: string; v: number | null };
const trend: { const trend: {
oee: TrendPoint[]; oee: TrendPoint[];
@@ -292,10 +300,18 @@ export async function GET(req: NextRequest) {
for (const k of kpiRows) { for (const k of kpiRows) {
const t = k.ts.toISOString(); const t = k.ts.toISOString();
if (safeNum(k.oee) != null) trend.oee.push({ t, v: Number(k.oee) }); if (!isProductionSnapshot(k.trackingEnabled, k.productionStarted)) {
if (safeNum(k.availability) != null) trend.availability.push({ t, v: Number(k.availability) }); // Preserve timeline gaps across non-production windows for OEE-family charting.
if (safeNum(k.performance) != null) trend.performance.push({ t, v: Number(k.performance) }); trend.oee.push({ t, v: null });
if (safeNum(k.quality) != null) trend.quality.push({ t, v: Number(k.quality) }); trend.availability.push({ t, v: null });
trend.performance.push({ t, v: null });
trend.quality.push({ t, v: null });
} else {
trend.oee.push({ t, v: safeNum(k.oee) != null ? Number(k.oee) : null });
trend.availability.push({ t, v: safeNum(k.availability) != null ? Number(k.availability) : null });
trend.performance.push({ t, v: safeNum(k.performance) != null ? Number(k.performance) : null });
trend.quality.push({ t, v: safeNum(k.quality) != null ? Number(k.quality) : null });
}
const good = safeNum(k.good); const good = safeNum(k.good);
const scrap = safeNum(k.scrap); const scrap = safeNum(k.scrap);

View File

@@ -246,18 +246,6 @@ function buildParetoFromEvents(events: ApiDowntimeEvent[]): ApiParetoRes | null
} }
type ApiCoverageRes = {
ok: boolean;
error?: string;
orgId?: string;
machineId?: string | null;
range?: "24h" | "7d" | "30d";
start?: string;
receivedEpisodes?: number;
receivedMinutes?: number;
note?: string;
};
type Range = "24h" | "7d" | "30d"; type Range = "24h" | "7d" | "30d";
type Metric = "minutes" | "count"; type Metric = "minutes" | "count";
@@ -1297,6 +1285,9 @@ export default function DowntimePageClient() {
// client-only filters (shareable) // client-only filters (shareable)
const metric = ((sp.get("metric") as Metric) || "minutes") as Metric; const metric = ((sp.get("metric") as Metric) || "minutes") as Metric;
const reasonCode = sp.get("reasonCode") || null; const reasonCode = sp.get("reasonCode") || null;
const shift = (sp.get("shift") || "all").toUpperCase();
const planned = (sp.get("planned") as "all" | "planned" | "unplanned") || "all";
const microstopLtMin = sp.get("microstopLtMin") || "2";
const hmDay = sp.get("hmDay"); const hmDay = sp.get("hmDay");
const hmHour = sp.get("hmHour"); const hmHour = sp.get("hmHour");
@@ -1308,7 +1299,6 @@ export default function DowntimePageClient() {
const [pareto, setPareto] = useState<ApiParetoRes | null>(null); const [pareto, setPareto] = useState<ApiParetoRes | null>(null);
const [coverage, setCoverage] = useState<ApiCoverageRes | null>(null);
const [loading, setLoading] = useState(true); const [loading, setLoading] = useState(true);
const [err, setErr] = useState<string | null>(null); const [err, setErr] = useState<string | null>(null);
const [eventsRes, setEventsRes] = useState<ApiDowntimeEventsRes | null>(null); const [eventsRes, setEventsRes] = useState<ApiDowntimeEventsRes | null>(null);
@@ -1364,40 +1354,27 @@ export default function DowntimePageClient() {
qs.set("kind", "downtime"); qs.set("kind", "downtime");
qs.set("range", range); qs.set("range", range);
if (machineId) qs.set("machineId", machineId); if (machineId) qs.set("machineId", machineId);
qs.set("shift", shift);
qs.set("planned", planned);
qs.set("microstopLtMin", microstopLtMin);
const [r1, r2] = await Promise.all([ const r1 = await fetch(`/api/analytics/pareto?${qs.toString()}`, {
fetch(`/api/analytics/pareto?${qs.toString()}`, {
cache: "no-cache", cache: "no-cache",
credentials: "include", credentials: "include",
signal: ac.signal, signal: ac.signal,
}), });
fetch(`/api/analytics/coverage?${qs.toString()}`, {
cache: "no-cache",
credentials: "include",
signal: ac.signal,
}),
]);
const j1raw = (await r1.json().catch(() => ({}))) as ApiParetoRes; const j1raw = (await r1.json().catch(() => ({}))) as ApiParetoRes;
const j2 = (await r2.json().catch(() => ({}))) as ApiCoverageRes;
if (!alive) return; if (!alive) return;
if (!r1.ok || j1raw.ok === false) { if (!r1.ok || j1raw.ok === false) {
setErr(j1raw?.error ?? "Failed to load pareto"); setErr(j1raw?.error ?? "Failed to load pareto");
setPareto(null); setPareto(null);
setCoverage(null);
setLoading(false); setLoading(false);
return; return;
} }
if (!r2.ok || j2.ok === false) {
// coverage is “nice to have” — dont kill the page
setCoverage(null);
} else {
setCoverage(j2);
}
setPareto(normalizeParetoRes(j1raw)); setPareto(normalizeParetoRes(j1raw));
setLoading(false); setLoading(false);
} catch (e: any) { } catch (e: any) {
@@ -1412,7 +1389,7 @@ export default function DowntimePageClient() {
alive = false; alive = false;
ac.abort(); ac.abort();
}; };
}, [range, machineId]); }, [range, machineId, shift, planned, microstopLtMin]);
useEffect(() => { useEffect(() => {
let alive = true; let alive = true;
@@ -1462,6 +1439,9 @@ export default function DowntimePageClient() {
qs.set("limit", String(eventsLimit)); qs.set("limit", String(eventsLimit));
if (machineId) qs.set("machineId", machineId); if (machineId) qs.set("machineId", machineId);
if (reasonCode) qs.set("reasonCode", reasonCode); if (reasonCode) qs.set("reasonCode", reasonCode);
qs.set("shift", shift);
qs.set("planned", planned);
qs.set("microstopLtMin", microstopLtMin);
if (eventsBefore) qs.set("before", eventsBefore); if (eventsBefore) qs.set("before", eventsBefore);
const r = await fetch(`/api/analytics/downtime-events?${qs.toString()}`, { const r = await fetch(`/api/analytics/downtime-events?${qs.toString()}`, {
@@ -1494,7 +1474,7 @@ export default function DowntimePageClient() {
alive = false; alive = false;
ac.abort(); ac.abort();
}; };
}, [range, machineId, reasonCode, eventsLimit, eventsBefore]); }, [range, machineId, reasonCode, shift, planned, microstopLtMin, eventsLimit, eventsBefore]);
// Derived data // Derived data
const events = eventsRes?.events ?? []; const events = eventsRes?.events ?? [];
@@ -1582,7 +1562,7 @@ const totalDowntimeMin = paretoEffective?.totalMinutesLost ?? 0;
useEffect(() => { useEffect(() => {
setEventsBefore(null); setEventsBefore(null);
}, [range, machineId, reasonCode]); }, [range, machineId, reasonCode, shift, planned, microstopLtMin]);
const filteredEvents = useMemo(() => { const filteredEvents = useMemo(() => {
let list = events; let list = events;
@@ -1612,8 +1592,8 @@ const filteredEvents = useMemo(() => {
// Use distinct episodes as "stops" (best available now) // Use filtered pareto totals so top filters always affect the KPI.
const stops = coverage?.receivedEpisodes ?? totalStops; const stops = totalStops;
// Window minutes for MTBF/Availability // Window minutes for MTBF/Availability
const windowMin = const windowMin =
@@ -1728,11 +1708,6 @@ const estImpactMxn = rate > 0 ? totalDowntimeMin * rate : 0;
); );
const shift = sp.get("shift") || "all";
const planned = (sp.get("planned") as "all" | "planned" | "unplanned") || "all";
const microstopLtMin = sp.get("microstopLtMin") || "2";
const filtersRow = ( const filtersRow = (
<div className="mt-4 flex items-center justify-between gap-4"> <div className="mt-4 flex items-center justify-between gap-4">
{/* LEFT: range + metric + reset (never wrap) */} {/* LEFT: range + metric + reset (never wrap) */}
@@ -2018,7 +1993,7 @@ const estImpactMxn = rate > 0 ? totalDowntimeMin * rate : 0;
<KPI <KPI
label="Stops count" label="Stops count"
value={fmtNum(stops, 0)} value={fmtNum(stops, 0)}
sub="Distinct episodes (coverage)" sub="Distinct episodes (filtered)"
accent="zinc" accent="zinc"
/> />
<KPI <KPI
@@ -2247,29 +2222,25 @@ const estImpactMxn = rate > 0 ? totalDowntimeMin * rate : 0;
{/* Coverage mini */} {/* Coverage mini */}
<div className="mt-4 rounded-2xl border border-white/10 bg-white/5 p-4"> <div className="mt-4 rounded-2xl border border-white/10 bg-white/5 p-4">
<div className="text-sm font-semibold text-white">Coverage received</div> <div className="text-sm font-semibold text-white">Filtered downtime summary</div>
<div className="mt-1 text-xs text-zinc-400"> <div className="mt-1 text-xs text-zinc-400">
Sync health from Control Tower ingest Reflects the active range/machine/shift/planned/microstop filters
</div> </div>
<div className="mt-3 grid grid-cols-2 gap-3"> <div className="mt-3 grid grid-cols-2 gap-3">
<div className="rounded-xl border border-white/10 bg-black/20 p-3"> <div className="rounded-xl border border-white/10 bg-black/20 p-3">
<div className="text-[11px] text-zinc-400">Episodes</div> <div className="text-[11px] text-zinc-400">Episodes</div>
<div className="mt-1 text-base font-semibold text-white"> <div className="mt-1 text-base font-semibold text-white">
{coverage?.receivedEpisodes != null ? fmtNum(coverage.receivedEpisodes, 0) : "—"} {fmtNum(stops, 0)}
</div> </div>
</div> </div>
<div className="rounded-xl border border-white/10 bg-black/20 p-3"> <div className="rounded-xl border border-white/10 bg-black/20 p-3">
<div className="text-[11px] text-zinc-400">Minutes</div> <div className="text-[11px] text-zinc-400">Minutes</div>
<div className="mt-1 text-base font-semibold text-white"> <div className="mt-1 text-base font-semibold text-white">
{coverage?.receivedMinutes != null ? fmtNum(coverage.receivedMinutes, 1) : "—"} {fmtNum(totalDowntimeMin, 1)}
</div> </div>
</div> </div>
</div> </div>
{coverage?.note ? (
<div className="mt-3 text-[11px] text-zinc-500">{coverage.note}</div>
) : null}
</div> </div>
</div> </div>
</div> </div>

244
fix4.md Normal file
View File

@@ -0,0 +1,244 @@
Task: Implement Control Tower changes only (no Node-RED edits), then run full verification with SQL + backfill script.
Repository context:
- Workspace root: Plastic-Dashboard
- Target branch assumption: sandbox-main
- Database: PostgreSQL via Prisma
- Scope strictly limited to Control Tower code and scripts in this repo
Hard constraints:
1. Do NOT edit any Node-RED flow files or Node-RED runtime code.
2. Do NOT change behavior outside the requested areas unless required for correctness.
3. Preserve existing non-authoritative guard behavior for downtime reasons (PENDIENTE / UNCLASSIFIED).
4. Run verification before and after backfill, and report results clearly.
5. If lint/test has unrelated pre-existing failures, do not refactor unrelated modules.
Implementation requirements:
A) Downtime continuity fallback key fix
File:
- app/api/ingest/event/route.ts
Goal:
- Ensure fallback downtime reason identity/continuity uses episode continuity key (incidentKey) whenever present.
- Use row.id only when incidentKey is truly absent.
- Preserve guard that prevents non-authoritative values from overwriting authoritative manual reasons.
Details:
1. In the event ingestion logic where ReasonEntry payload is created for downtime-like events (including fallback UNCLASSIFIED and mold-change):
- Derive a fallbackIncidentKey from available payload fields in this preference order:
- evData.incidentKey
- dataObj.incidentKey
- evDowntime?.incidentKey
- evReason?.incidentKey (if available)
- Only if all are missing, fallback to row.id.
2. For fallback reasonRaw objects:
- For mold-change fallback, set incidentKey to moldIncidentKey ?? fallbackIncidentKey ?? row.id.
- For unclassified fallback, set incidentKey to fallbackIncidentKey ?? row.id.
3. Create one continuityIncidentKey (single source of truth) used consistently for:
- downtime reasonId construction (evt:<machineId>:downtime:<continuityIncidentKey>)
- ReasonEntry episodeId for downtime
- meta.incidentKey in reason entry writes
- manual-preservation guard queries by episodeId
4. Keep non-authoritative guard semantics unchanged:
- incoming non-authoritative reason should not overwrite existing authoritative reason for same episode
- downtime-acknowledged/manual authoritative path remains preserved
B) OEE trend from production-only snapshots
File:
- app/api/reports/route.ts
Goal:
- Build OEE trend from production-only snapshots:
- trackingEnabled = true
- productionStarted = true
- Keep summary metrics behavior explicit and consistent with this filtering decision.
Details:
1. Include trackingEnabled and productionStarted in KPI snapshot select.
2. Add helper like isProductionSnapshot(trackingEnabled, productionStarted).
3. Compute OEE/Availability/Performance/Quality averages using production-only rows.
4. For trend generation:
- Iterate timeline in ts order.
- For non-production snapshots, emit null points (for OEE and related KPI lines) so chart can render true gaps.
- For production snapshots, emit actual numeric values (or null if value is missing).
5. Keep downtime/event aggregates and cycle-based totals behavior intact unless explicitly tied to OEE production-only requirement.
6. Keep logic explicit in code comments (short, concrete comments only where needed).
C) Chart rendering behavior: no smoothing across gaps
Files:
- app/(app)/reports/ReportsCharts.tsx
- app/(app)/reports/ReportsPageClient.tsx (if types/downsampling need updates)
Goal:
- OEE line interpolation must be linear.
- Gaps must be rendered as gaps (no fake continuity through filtered/non-production windows).
Details:
1. In OEE line chart:
- change Line type from monotone to linear
- set connectNulls={false}
2. Ensure frontend types allow nullable trend values for OEE points.
3. If downsampling exists, preserve gap markers so null separators are not removed.
- Keep null transition points when reducing point count.
4. Ensure tooltip/value formatting handles nulls gracefully.
Verification and execution steps:
1) Run targeted checks first
- run tests related to downtime guard if available:
- npm run test:downtime-reason-guard
- run lint at least for changed files (or full lint if practical):
- npx eslint app/api/ingest/event/route.ts app/api/reports/route.ts app/(app)/reports/ReportsCharts.tsx app/(app)/reports/ReportsPageClient.tsx
2) SQL Verification Pack (PRE-BACKFILL)
Execute these exactly and capture output snapshots:
A. Recent downtime reason quality mix
SELECT
reasonCode,
COUNT(*) AS rows
FROM "ReasonEntry"
WHERE kind = 'downtime'
AND "capturedAt" >= NOW() - INTERVAL '7 days'
GROUP BY reasonCode
ORDER BY rows DESC;
B. Episodes with conflicting reason codes
SELECT
"orgId",
"machineId",
"episodeId",
COUNT(DISTINCT "reasonCode") AS distinct_codes,
MIN("capturedAt") AS first_seen,
MAX("capturedAt") AS last_seen
FROM "ReasonEntry"
WHERE kind = 'downtime'
AND "episodeId" IS NOT NULL
AND "capturedAt" >= NOW() - INTERVAL '14 days'
GROUP BY "orgId", "machineId", "episodeId"
HAVING COUNT(DISTINCT "reasonCode") > 1
ORDER BY last_seen DESC
LIMIT 200;
C. Potential manual overwritten by non-authoritative check
SELECT
re."orgId",
re."machineId",
re."episodeId",
re."reasonCode",
re."capturedAt",
re.meta
FROM "ReasonEntry" re
WHERE re.kind = 'downtime'
AND re."capturedAt" >= NOW() - INTERVAL '14 days'
AND re."reasonCode" IN ('PENDIENTE', 'UNCLASSIFIED')
ORDER BY re."capturedAt" DESC
LIMIT 200;
D. Event continuity around downtime + ack
SELECT
"machineId",
"eventType",
ts,
data->>'incidentKey' AS incident_key,
data->>'status' AS status,
data->>'is_update' AS is_update,
data->>'is_auto_ack' AS is_auto_ack
FROM "MachineEvent"
WHERE ts >= NOW() - INTERVAL '3 days'
AND "eventType" IN ('microstop', 'macrostop', 'downtime-acknowledged')
ORDER BY ts DESC
LIMIT 500;
E. KPI production vs non-production counts
SELECT
COALESCE("trackingEnabled", false) AS tracking_enabled,
COALESCE("productionStarted", false) AS production_started,
COUNT(*) AS rows
FROM "MachineKpiSnapshot"
WHERE ts >= NOW() - INTERVAL '7 days'
GROUP BY 1,2
ORDER BY rows DESC;
F. Sharp OEE jumps in production snapshots
WITH k AS (
SELECT
"machineId",
ts,
oee,
LAG(oee) OVER (PARTITION BY "machineId" ORDER BY ts) AS prev_oee
FROM "MachineKpiSnapshot"
WHERE ts >= NOW() - INTERVAL '7 days'
AND "trackingEnabled" = true
AND "productionStarted" = true
AND oee IS NOT NULL
)
SELECT
"machineId",
ts,
prev_oee,
oee,
ABS(oee - prev_oee) AS delta
FROM k
WHERE prev_oee IS NOT NULL
AND ABS(oee - prev_oee) >= 25
ORDER BY delta DESC, ts DESC
LIMIT 200;
G. Trend point count comparison
SELECT
'all' AS series,
COUNT(*) AS points
FROM "MachineKpiSnapshot"
WHERE ts >= NOW() - INTERVAL '24 hours'
AND oee IS NOT NULL
UNION ALL
SELECT
'production_only' AS series,
COUNT(*) AS points
FROM "MachineKpiSnapshot"
WHERE ts >= NOW() - INTERVAL '24 hours'
AND oee IS NOT NULL
AND "trackingEnabled" = true
AND "productionStarted" = true;
3) Backfill run plan (must follow this order)
A. Dry-run first:
node scripts/backfill-downtime-reasons.mjs --dry-run --since 30d
B. Review dry-run output:
- candidates
- sampleUpdates
- incident distribution by machine
- any suspicious replacements
C. Apply scoped first (single machine from dry-run sample):
node scripts/backfill-downtime-reasons.mjs --since 30d --machine-id <machine_uuid>
4) SQL Verification Pack (POST-BACKFILL)
- Re-run queries A, B, C at minimum.
- Optionally rerun D/F/G for confidence.
- Confirm reduction in stale PENDIENTE/UNCLASSIFIED rows where authoritative reason exists.
- Confirm conflicting episode reason cases reduced or shifted as expected.
Acceptance criteria checklist:
- New downtime episodes retain authoritative manual reason and do not regress to PENDIENTE/UNCLASSIFIED.
- Fallback downtime continuity now keys by incidentKey whenever available; row.id only when absent.
- OEE trend no longer shows implausible 0/100 jumps from non-production snapshots.
- OEE chart is linear and visually shows true gaps (no smoothing continuity across filtered windows).
- Backfill dry-run and scoped apply outputs are captured and reasonable.
- Post-run SQL confirms expected improvements without obvious regressions.
Output format required from you:
1. Files changed with concise reason per file.
2. Exact diff summary for each modified file.
3. Test/lint commands run + result.
4. Pre-backfill SQL results (compact tables or summarized counts).
5. Dry-run output summary (key fields + sample updates).
6. Scoped apply command used and output summary.
7. Post-backfill SQL delta summary (before vs after).
8. Any blockers (env vars, DB auth, migration state, etc.) and exactly what is needed to unblock.

View File

@@ -0,0 +1,204 @@
import { prisma } from "@/lib/prisma";
import { normalizeShiftOverrides } from "@/lib/settings";
type PlannedFilter = "all" | "planned" | "unplanned";
type ShiftFilter = "all" | "A" | "B" | "C";
type ShiftLike = {
name: string;
startTime?: string | null;
endTime?: string | null;
start?: string | null;
end?: string | null;
enabled?: boolean;
};
type ShiftContext = {
timeZone: string;
shifts: ShiftLike[];
overrides: Record<string, ShiftLike[]> | undefined;
};
const SHIFT_ALIAS: ShiftFilter[] = ["A", "B", "C"];
const TIME_RE = /^([01]\d|2[0-3]):([0-5]\d)$/;
const WEEKDAY_KEY_MAP: Record<string, string> = {
Sun: "sun",
Mon: "mon",
Tue: "tue",
Wed: "wed",
Thu: "thu",
Fri: "fri",
Sat: "sat",
};
const WEEKDAY_KEYS = ["sun", "mon", "tue", "wed", "thu", "fri", "sat"] as const;
function asRecord(value: unknown) {
return value && typeof value === "object" && !Array.isArray(value)
? (value as Record<string, unknown>)
: null;
}
function parseTimeMinutes(value?: string | null) {
if (!value || !TIME_RE.test(value)) return null;
const [hh, mm] = value.split(":");
return Number(hh) * 60 + Number(mm);
}
function getLocalMinutes(ts: Date, timeZone: string) {
try {
const parts = new Intl.DateTimeFormat("en-US", {
timeZone,
hour: "2-digit",
minute: "2-digit",
hourCycle: "h23",
}).formatToParts(ts);
const hours = Number(parts.find((p) => p.type === "hour")?.value ?? "0");
const minutes = Number(parts.find((p) => p.type === "minute")?.value ?? "0");
return hours * 60 + minutes;
} catch {
return ts.getUTCHours() * 60 + ts.getUTCMinutes();
}
}
function getLocalDayKey(ts: Date, timeZone: string) {
try {
const weekday = new Intl.DateTimeFormat("en-US", {
timeZone,
weekday: "short",
}).format(ts);
return WEEKDAY_KEY_MAP[weekday] ?? WEEKDAY_KEYS[ts.getUTCDay()];
} catch {
return WEEKDAY_KEYS[ts.getUTCDay()];
}
}
function resolveShiftAlias(context: ShiftContext, ts: Date): ShiftFilter | null {
const dayKey = getLocalDayKey(ts, context.timeZone);
const dayOverrides = context.overrides?.[dayKey];
const activeShifts = dayOverrides ?? context.shifts;
if (!activeShifts.length) return null;
const nowMin = getLocalMinutes(ts, context.timeZone);
let enabledOrdinal = 0;
for (const shift of activeShifts) {
if (shift.enabled === false) continue;
const start = parseTimeMinutes(shift.startTime ?? shift.start ?? null);
const end = parseTimeMinutes(shift.endTime ?? shift.end ?? null);
if (start == null || end == null) continue;
const alias = SHIFT_ALIAS[enabledOrdinal] ?? null;
enabledOrdinal += 1;
if (!alias) continue;
if (start <= end) {
if (nowMin >= start && nowMin < end) return alias;
} else if (nowMin >= start || nowMin < end) {
return alias;
}
}
return null;
}
function isMicrostopLike(row: {
episodeId?: string | null;
meta?: unknown;
}) {
const episodeId = String(row.episodeId ?? "").toLowerCase();
if (episodeId.startsWith("microstop:")) return true;
const meta = asRecord(row.meta);
const anomalyType = String(meta?.anomalyType ?? "").toLowerCase();
if (anomalyType === "microstop") return true;
const eventType = String(meta?.eventType ?? "").toLowerCase();
return eventType === "microstop";
}
function normalizePlanned(raw: string | null): PlannedFilter {
const v = String(raw ?? "").trim().toLowerCase();
if (v === "planned") return "planned";
if (v === "unplanned") return "unplanned";
return "all";
}
export function resolvePlannedFilter(raw: string | null, includeMoldChange: boolean): PlannedFilter {
const normalized = normalizePlanned(raw);
if (raw != null && String(raw).trim() !== "") return normalized;
return includeMoldChange ? "all" : "unplanned";
}
export function normalizeShiftFilter(raw: string | null): ShiftFilter {
const v = String(raw ?? "").trim().toUpperCase();
if (v === "A" || v === "B" || v === "C") return v;
return "all";
}
export function normalizeMicrostopLtMin(raw: string | null) {
if (!raw) return null;
const n = Number(raw);
if (!Number.isFinite(n) || n <= 0) return null;
return n;
}
function passesPlannedFilter(reasonCode: string, planned: PlannedFilter) {
if (planned === "planned") return reasonCode === "MOLD_CHANGE";
if (planned === "unplanned") return reasonCode !== "MOLD_CHANGE";
return true;
}
export async function loadDowntimeShiftContext(orgId: string): Promise<ShiftContext> {
const [shifts, settings] = await Promise.all([
prisma.orgShift.findMany({
where: { orgId },
orderBy: { sortOrder: "asc" },
select: { name: true, startTime: true, endTime: true, enabled: true },
}),
prisma.orgSettings.findUnique({
where: { orgId },
select: { timezone: true, shiftScheduleOverridesJson: true },
}),
]);
return {
timeZone: settings?.timezone || "UTC",
shifts,
overrides: normalizeShiftOverrides(settings?.shiftScheduleOverridesJson),
};
}
export function applyDowntimeFilters<T extends {
reasonCode: string;
capturedAt: Date;
durationSeconds?: number | null;
episodeId?: string | null;
meta?: unknown;
}>(
rows: T[],
options: {
planned: PlannedFilter;
shift: ShiftFilter;
microstopLtMin: number | null;
shiftContext: ShiftContext | null;
}
) {
return rows.filter((row) => {
if (!passesPlannedFilter(row.reasonCode, options.planned)) return false;
if (options.shift !== "all") {
if (!options.shiftContext) return false;
const alias = resolveShiftAlias(options.shiftContext, row.capturedAt);
if (alias !== options.shift) return false;
}
if (options.microstopLtMin != null && isMicrostopLike(row)) {
if (row.durationSeconds == null) return false;
const durationMin = row.durationSeconds / 60;
if (!(durationMin < options.microstopLtMin)) return false;
}
return true;
});
}