Downtime catalog

This commit is contained in:
Marcelo
2026-05-06 00:36:48 +00:00
parent 0491237bad
commit bfc1673d89
42 changed files with 8035 additions and 1093 deletions

View File

@@ -0,0 +1,22 @@
# Reason catalog: Control Tower → settings → MySQL (Pi)
## Authority
- The canonical catalog lives in Control Tower: `org_settings.defaults_json.reasonCatalog` (and `reasonCatalogData` alias), merged in API responses with fallback from `downtime_menu.md`.
- Each **detail** may include:
- `reasonCode`: official printed code (e.g. `DTPRC-01`, `MX001`). If omitted, a slug `CATEGORY__DETAIL` is derived for backward compatibility.
- `active`: `false` to hide from operator pickers while keeping history/report labels. **Never remove** a code from JSON once used; only set `active: false`.
## Raspberry Pi
1. Apply [`scripts/mysql/reason_catalog_mirror.sql`](mysql/reason_catalog_mirror.sql) on the same MySQL database used by Node-RED (`node-red-node-mysql`).
2. Deploy [`flows_may_4_26.json`](../flows_may_4_26.json). After each successful **Apply settings + update UI**, the flow emits a message on output 3 to **Build reason catalog mirror SQL**, which reads `global.settings.reasonCatalog` and runs `INSERT ... ON DUPLICATE KEY UPDATE` into `reason_catalog_row` (no deletes).
## Operator payloads (printed codes)
- On downtime acknowledge and scrap entry, send `reason.reasonCode` (and labels) matching the printed sheet. Ingest already normalizes and stores uppercase codes.
- Generate printable lists from the same JSON as CT: [`scripts/export-reason-catalog-csv.mjs`](export-reason-catalog-csv.mjs).
```bash
node scripts/export-reason-catalog-csv.mjs path/to/reasonCatalog.json > claves.csv
```

View File

@@ -0,0 +1,76 @@
#!/usr/bin/env node
/**
* Export reasonCatalog JSON (downtime + scrap) to CSV for printed operator sheets.
* Usage: node scripts/export-reason-catalog-csv.mjs <path-to-catalog.json>
* cat reasonCatalog.json | node scripts/export-reason-catalog-csv.mjs
*
* CSV columns: kind, reasonCode, categoryLabel, reasonLabel, active
*/
import { readFileSync, existsSync } from "fs";
function escCsv(s) {
const t = String(s ?? "");
if (/[",\n\r]/.test(t)) return `"${t.replace(/"/g, '""')}"`;
return t;
}
function effectiveReasonCode(categoryId, detail) {
const c = String(detail.reasonCode ?? detail.code ?? "").trim();
if (c) return c.toUpperCase();
const cat = String(categoryId ?? "")
.normalize("NFD")
.replace(/[\u0300-\u036f]/g, "")
.toLowerCase()
.replace(/[^a-z0-9]+/g, "-")
.replace(/^-+|-+$/g, "");
const det = String(detail.id ?? "")
.normalize("NFD")
.replace(/[\u0300-\u036f]/g, "")
.toLowerCase()
.replace(/[^a-z0-9]+/g, "-")
.replace(/^-+|-+$/g, "");
return `${cat}__${det}`.toUpperCase();
}
function walk(kind, categories, rows) {
if (!Array.isArray(categories)) return;
for (const cat of categories) {
const cid = String(cat.id ?? "").trim();
const clab = String(cat.label ?? "").trim();
const details = Array.isArray(cat.details)
? cat.details
: Array.isArray(cat.children)
? cat.children
: [];
for (const d of details) {
const active = d.active === false ? "0" : "1";
const dlab = String(d.label ?? "").trim();
rows.push({
kind,
reasonCode: effectiveReasonCode(cid || clab, d),
categoryLabel: clab,
reasonLabel: dlab,
active,
});
}
}
}
let raw = "";
const arg = process.argv[2];
if (arg && existsSync(arg)) {
raw = readFileSync(arg, "utf8");
} else {
raw = readFileSync(0, "utf8");
}
const catalog = JSON.parse(raw || "{}");
const rows = [];
walk("downtime", catalog.downtime, rows);
walk("scrap", catalog.scrap, rows);
const header = ["kind", "reasonCode", "categoryLabel", "reasonLabel", "active"];
console.log(header.map(escCsv).join(","));
for (const r of rows) {
console.log([r.kind, r.reasonCode, r.categoryLabel, r.reasonLabel, r.active].map(escCsv).join(","));
}

View File

@@ -0,0 +1,18 @@
-- Mirror of Control Tower reasonCatalog on the Raspberry Pi (MySQL / MariaDB).
-- Policy: never DELETE rows by reason_code; only INSERT ... ON DUPLICATE KEY UPDATE
-- and set active=0 when CT marks a code inactive.
CREATE TABLE IF NOT EXISTS reason_catalog_row (
kind VARCHAR(16) NOT NULL COMMENT 'downtime | scrap',
category_id VARCHAR(128) NOT NULL,
category_label VARCHAR(255) NOT NULL,
reason_code VARCHAR(64) NOT NULL,
reason_label VARCHAR(512) NOT NULL,
sort_order INT NOT NULL DEFAULT 0,
active TINYINT(1) NOT NULL DEFAULT 1,
catalog_version INT NOT NULL DEFAULT 1,
updated_at TIMESTAMP(3) NOT NULL DEFAULT CURRENT_TIMESTAMP(3) ON UPDATE CURRENT_TIMESTAMP(3),
PRIMARY KEY (kind, reason_code),
KEY idx_reason_catalog_kind_active (kind, active),
KEY idx_reason_catalog_version (catalog_version)
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_unicode_ci;

View File

@@ -0,0 +1,213 @@
#!/usr/bin/env node
/**
* Patches flows_may_4_26.json:
* - Apply settings: pass reasonCode/active in catalog; 3 outputs; trigger MySQL mirror sync
* - New nodes: Build reason catalog mirror SQL → mysql
*/
import { readFileSync, writeFileSync } from "fs";
const path = new URL("../flows_may_4_26.json", import.meta.url).pathname;
const j = JSON.parse(readFileSync(path, "utf8"));
const applyId = "abbec199700a5e29";
const gateId = "f8e0d1c2b3a40911";
const mysqlPersistId = "f8e0d1c2b3a40912";
const apply = j.find((n) => n.id === applyId);
if (!apply || apply.type !== "function") {
console.error("Apply settings node not found");
process.exit(1);
}
const oldDetails =
"const details = detailsRaw.map((d, jdx) => ({\n id: String(d.id || d.detailId || (categoryId + \"_d\" + jdx)),\n label: String(d.label || d.detailLabel || (\"Detalle \" + (jdx + 1)))\n }));";
const newDetails = `const details = detailsRaw.map((d, jdx) => {
const row = {
id: String(d.id || d.detailId || (categoryId + "_d" + jdx)),
label: String(d.label || d.detailLabel || ("Detalle " + (jdx + 1)))
};
if (d.reasonCode != null && String(d.reasonCode).trim()) {
row.reasonCode = String(d.reasonCode).trim();
} else if (d.code != null && String(d.code).trim()) {
row.reasonCode = String(d.code).trim();
}
if (d.active === false) {
row.active = false;
}
return row;
});`;
if (!apply.func.includes(oldDetails)) {
console.error("Expected normalizeCatalog details snippet not found; abort.");
process.exit(1);
}
apply.func = apply.func.replace(oldDetails, newDetails);
apply.func = apply.func.replaceAll("node.send([uiConfigMsg, null]);", "node.send([uiConfigMsg, null, null]);");
apply.func = apply.func.replaceAll("node.send([uiMoldMsg, null]);", "node.send([uiMoldMsg, null, null]);");
apply.func = apply.func.replaceAll("node.send([uiReadOnlyMsg, null]);", "node.send([uiReadOnlyMsg, null, null]);");
apply.func = apply.func.replaceAll("node.send([uiReasonCatalogMsg, null]);", "node.send([uiReasonCatalogMsg, null, null]);");
const oldReturnAck = `const ackMsg = {
topic: ackTopic,
payload: JSON.stringify({
type: "settings_ack",
orgId,
machineId,
version,
source: "node-red",
ts: new Date().toISOString()
})
};
return [null, ackMsg];
`;
const newReturnAck = `const ackMsg = {
topic: ackTopic,
payload: JSON.stringify({
type: "settings_ack",
orgId,
machineId,
version,
source: "node-red",
ts: new Date().toISOString()
})
};
const mirrorTrigger = { payload: { _syncReasonCatalog: true } };
return [null, ackMsg, mirrorTrigger];
`;
if (!apply.func.includes(oldReturnAck.trim())) {
console.error("Expected ack return block not found");
process.exit(1);
}
apply.func = apply.func.replace(oldReturnAck.trim(), newReturnAck.trim());
apply.func = apply.func.replace(
`if (!orgId || !machineId) {
return [null, null];
}`,
`if (!orgId || !machineId) {
return [null, null, null];
}`
);
apply.outputs = 3;
apply.wires = [
["2c8562b2471078ab", "dbfd127c516efa87", "9748899355370bae"],
[],
[gateId],
];
const gateFunc = `const p = msg.payload || {};
if (!p._syncReasonCatalog) {
return null;
}
const settings = global.get("settings") || {};
const cat = settings.reasonCatalog || {};
const ver = Number(cat.version || 1);
function esc(v) {
return String(v ?? "").replace(/\\\\/g, "\\\\\\\\").replace(/'/g, "''");
}
const parts = [];
function walk(kind, list) {
if (!Array.isArray(list)) {
return;
}
let sort = 0;
list.forEach((c) => {
const categoryId = esc(String(c.id || ""));
const categoryLabel = esc(String(c.label || ""));
const ch = c.children || c.details || [];
if (!Array.isArray(ch)) {
return;
}
ch.forEach((d) => {
const id = String(d.id || "").trim();
const label = String(d.label || "").trim();
const rc = String(d.reasonCode || d.code || id || "").trim();
if (!rc) {
return;
}
const active = d.active === false ? 0 : 1;
parts.push(
"('" +
kind +
"','" +
categoryId +
"','" +
categoryLabel +
"','" +
esc(rc) +
"','" +
esc(label) +
"'," +
sort +
"," +
active +
"," +
ver +
")"
);
sort += 1;
});
});
}
walk("downtime", cat.downtime || []);
walk("scrap", cat.scrap || []);
if (!parts.length) {
node.status({ fill: "yellow", shape: "ring", text: "No reason rows to mirror" });
return null;
}
const sql =
"INSERT INTO reason_catalog_row (kind,category_id,category_label,reason_code,reason_label,sort_order,active,catalog_version) VALUES " +
parts.join(",") +
" ON DUPLICATE KEY UPDATE category_id=VALUES(category_id),category_label=VALUES(category_label),reason_label=VALUES(reason_label),sort_order=VALUES(sort_order),active=VALUES(active),catalog_version=VALUES(catalog_version),updated_at=CURRENT_TIMESTAMP(3)";
node.status({ fill: "green", shape: "dot", text: "Reason mirror SQL built" });
msg.topic = sql;
msg.payload = [];
return msg;
`;
const gateNode = {
id: gateId,
type: "function",
z: "05d4cb231221b842",
g: "a1b43a9e095c10db",
name: "Build reason catalog mirror SQL",
func: gateFunc,
outputs: 1,
timeout: 0,
noerr: 0,
initialize: "",
finalize: "",
libs: [],
x: 1500,
y: 1020,
wires: [[mysqlPersistId]],
};
const mysqlNode = {
id: mysqlPersistId,
type: "mysql",
z: "05d4cb231221b842",
g: "a1b43a9e095c10db",
mydb: "fc9634aabefee16b",
name: "Persist reason catalog mirror",
x: 1820,
y: 1020,
wires: [[]],
};
if (j.some((n) => n.id === gateId)) {
console.log("Patch already applied (gate node exists). Skipping insert.");
} else {
const idx = j.findIndex((n) => n.id === applyId);
j.splice(idx + 1, 0, gateNode, mysqlNode);
}
writeFileSync(path, JSON.stringify(j, null, 4) + "\n");
console.log("Patched", path);

View File

@@ -0,0 +1,280 @@
#!/usr/bin/env node
/**
* Load downtime + scrap catalogs from Excel under ./reasons/ into Postgres.
*
* npx dotenv -e .env -- node scripts/seed-reason-catalog-from-xlsx.mjs --org-id <uuid>
* npx dotenv -e .env -- node scripts/seed-reason-catalog-from-xlsx.mjs --org-slug my-org --replace
*
* --dry-run parse and print counts only
* --replace delete existing reason_catalog_* rows for the org before insert
*/
import { readFileSync, existsSync } from "fs";
import path from "path";
import { fileURLToPath } from "url";
import * as XLSX from "xlsx";
import { PrismaClient } from "@prisma/client";
const __dirname = path.dirname(fileURLToPath(import.meta.url));
const ROOT = path.join(__dirname, "..");
const prisma = new PrismaClient();
function composeReasonCode(prefix, suffix) {
const p = String(prefix ?? "").trim().toUpperCase();
const s = String(suffix ?? "").trim();
if (/^\d+$/.test(s) && p.length >= 3) {
return `${p}-${s}`.toUpperCase();
}
return `${p}${s}`.toUpperCase();
}
function parseArgs(argv) {
const out = {
dryRun: false,
replace: false,
orgId: null,
orgSlug: null,
downtimePath: path.join(ROOT, "reasons", "Claves Tiempo Muerto.xlsx"),
scrapPath: path.join(ROOT, "reasons", "Claves de Scrap.xlsx"),
};
for (let i = 0; i < argv.length; i += 1) {
const t = argv[i];
if (t === "--dry-run") out.dryRun = true;
else if (t === "--replace") out.replace = true;
else if (t === "--org-id") {
out.orgId = argv[i + 1] || null;
i += 1;
} else if (t === "--org-slug") {
out.orgSlug = argv[i + 1] || null;
i += 1;
} else if (t === "--downtime") {
out.downtimePath = argv[i + 1] || out.downtimePath;
i += 1;
} else if (t === "--scrap") {
out.scrapPath = argv[i + 1] || out.scrapPath;
i += 1;
} else {
throw new Error(`Unknown arg: ${t}`);
}
}
return out;
}
function readWorkbook(filePath) {
if (!existsSync(filePath)) {
throw new Error(`File not found: ${filePath}`);
}
const buf = readFileSync(filePath);
return XLSX.read(buf, { type: "buffer" });
}
/** @returns {{ kind:'downtime', name:string, codePrefix:string, items: { suffix:string, name:string }[] }[]} */
function parseDowntimeXlsx(filePath) {
const wb = readWorkbook(filePath);
const data = XLSX.utils.sheet_to_json(wb.Sheets["Sheet1"], { header: 1, defval: "" });
const headerRowIdx = 3;
const header = data[headerRowIdx] || [];
const cols = [];
for (let c = 0; c < header.length; c += 1) {
if (String(header[c] || "").trim()) cols.push(c);
}
const categoryByCol = {};
cols.forEach((c) => {
categoryByCol[c] = String(header[c]).trim();
});
const CODE = /^([A-Z0-9][A-Za-z0-9-]*)-(\d+)\s+(.*)$/;
const rawItems = [];
for (let r = headerRowIdx + 1; r < data.length; r += 1) {
const row = data[r] || [];
for (const c of cols) {
const cell = String(row[c] ?? "").trim();
if (!cell) continue;
const m = cell.match(CODE);
if (m) {
rawItems.push({
col: c,
categoryLabel: categoryByCol[c],
prefix: m[1].toUpperCase(),
suffix: m[2],
name: m[3].trim(),
row: r,
});
} else if (cell.length > 2 && cell === cell.toUpperCase() && !/\d/.test(cell)) {
categoryByCol[c] = cell;
}
}
}
/** @type {Map<string, { kind:'downtime', name:string, codePrefix:string, items: { suffix:string, name:string }[]}>} */
const catMap = new Map();
function catKey(categoryName, prefix) {
return `${categoryName}\0${prefix}`;
}
for (const it of rawItems) {
const key = catKey(it.categoryLabel, it.prefix);
let bucket = catMap.get(key);
if (!bucket) {
bucket = { kind: "downtime", name: it.categoryLabel, codePrefix: it.prefix, items: [] };
catMap.set(key, bucket);
}
bucket.items.push({ suffix: it.suffix, name: it.name });
}
/** Dedupe suffix per category (keep first description). */
for (const b of catMap.values()) {
const seen = new Map();
const next = [];
for (const row of b.items) {
if (seen.has(row.suffix)) continue;
seen.set(row.suffix, true);
next.push(row);
}
b.items = next.sort((a, b) => Number(a.suffix) - Number(b.suffix));
}
return [...catMap.values()];
}
function parseScrapXlsx(filePath) {
const wb = readWorkbook(filePath);
const data = XLSX.utils.sheet_to_json(wb.Sheets["Sheet1"], { header: 1, defval: "" });
/** @type { { suffix:string, name:string, full:string }[] } */
const rows = [];
for (let r = 0; r < data.length; r += 1) {
const clave = String(data[r][0] ?? "").trim();
const desc = String(data[r][1] ?? "").trim().replace(/\s+/g, " ");
if (!clave || /^clave/i.test(clave)) continue;
if (!desc || /Rev\.?\s*[A-Z]/i.test(desc)) continue;
const m = clave.toUpperCase().match(/^([A-Z]+)(\d+)$/);
if (!m) {
console.warn(`[scrap] skip row ${r}:`, clave);
continue;
}
rows.push({
full: `${m[1]}${m[2]}`,
suffix: m[2],
name: desc,
});
}
/** Single category when all MX… */
const prefixes = new Set(rows.map((x) => x.full.replace(/\d+$/, "")));
if (prefixes.size !== 1) {
console.warn("[scrap] multiple prefixes:", [...prefixes]);
}
const codePrefix = [...prefixes][0] || "MX";
const items = rows.map(({ suffix, name }) => ({ suffix, name }));
return [{ kind: "scrap", name: "Scrap", codePrefix, items }];
}
async function main() {
const args = parseArgs(process.argv.slice(2));
let orgId = args.orgId;
if (!orgId && args.orgSlug) {
const org = await prisma.org.findUnique({ where: { slug: args.orgSlug }, select: { id: true } });
if (!org) throw new Error(`Org slug not found: ${args.orgSlug}`);
orgId = org.id;
}
if (!orgId) {
console.error("Provide --org-id <uuid> or --org-slug <slug>");
process.exit(1);
}
const downtimeCats = parseDowntimeXlsx(args.downtimePath);
const scrapCats = parseScrapXlsx(args.scrapPath);
const totalItems =
downtimeCats.reduce((n, c) => n + c.items.length, 0) + scrapCats.reduce((n, c) => n + c.items.length, 0);
console.log("[seed] downtime categories:", downtimeCats.length, "scrap categories:", scrapCats.length);
console.log("[seed] total items:", totalItems);
if (args.dryRun) {
console.log(JSON.stringify({ downtimeCats: downtimeCats.slice(0, 2), scrapCats }, null, 2));
return;
}
const existing = await prisma.reasonCatalogCategory.count({ where: { orgId } });
if (existing && !args.replace) {
console.error(
`Org already has ${existing} catalog categor(ies). Re-run with --replace to wipe and reload, or use Control Tower UI.`
);
process.exit(1);
}
const bundled = [...downtimeCats, ...scrapCats];
/** @type {string[]} */
const dupCheck = [];
await prisma.$transaction(async (tx) => {
if (args.replace) {
await tx.reasonCatalogItem.deleteMany({ where: { orgId } });
await tx.reasonCatalogCategory.deleteMany({ where: { orgId } });
}
let catOrder = 0;
for (const block of bundled) {
const category = await tx.reasonCatalogCategory.create({
data: {
orgId,
kind: block.kind,
name: block.name,
codePrefix: block.codePrefix,
sortOrder: catOrder++,
active: true,
},
});
let itOrder = 0;
for (const row of block.items) {
const reasonCode = composeReasonCode(block.codePrefix, row.suffix);
dupCheck.push(reasonCode);
await tx.reasonCatalogItem.create({
data: {
orgId,
categoryId: category.id,
name: row.name,
codeSuffix: row.suffix,
reasonCode,
sortOrder: itOrder++,
active: true,
},
});
}
}
await tx.orgSettings.update({
where: { orgId },
data: { version: { increment: 1 } },
});
});
const seen = new Set();
let dup = 0;
for (const rc of dupCheck) {
if (seen.has(rc)) dup++;
seen.add(rc);
}
if (dup) console.warn("[seed] duplicate reason_code skipped by DB unique?", dup);
console.log("[seed] done. Bump org_settings.version (+1).");
}
main()
.catch((e) => {
console.error(e);
process.exit(1);
})
.finally(async () => {
await prisma.$disconnect();
});