feat: New reports

This commit is contained in:
Brennan Wilkes (Text Groove) 2026-02-02 19:13:49 -08:00
parent 6a2b51eaf3
commit 35e5956c12
6 changed files with 226 additions and 37 deletions

View file

@ -106,6 +106,19 @@ fi
"$NODE_BIN" tools/build_viz_commits.js
"$NODE_BIN" tools/build_viz_recent.js
# Build common listings artifacts (9 files)
"$NODE_BIN" tools/build_common_listings.js --group all --top 50 --out "reports/common_listings_all_top50.json"
"$NODE_BIN" tools/build_common_listings.js --group all --top 250 --out "reports/common_listings_all_top250.json"
"$NODE_BIN" tools/build_common_listings.js --group all --top 1000 --out "reports/common_listings_all_top1000.json"
"$NODE_BIN" tools/build_common_listings.js --group bc --top 50 --out "reports/common_listings_bc_top50.json"
"$NODE_BIN" tools/build_common_listings.js --group bc --top 250 --out "reports/common_listings_bc_top250.json"
"$NODE_BIN" tools/build_common_listings.js --group bc --top 1000 --out "reports/common_listings_bc_top1000.json"
"$NODE_BIN" tools/build_common_listings.js --group ab --top 50 --out "reports/common_listings_ab_top50.json"
"$NODE_BIN" tools/build_common_listings.js --group ab --top 250 --out "reports/common_listings_ab_top250.json"
"$NODE_BIN" tools/build_common_listings.js --group ab --top 1000 --out "reports/common_listings_ab_top1000.json"
# Stage only data/report/viz outputs
git add -A data/db reports viz/data

View file

@ -6,12 +6,14 @@
- Store = storeLabel (union across categories).
- Canonicalizes via sku_map.
- Debug output while scanning.
- Writes: reports/common_listings.json
- Writes: reports/common_listings_<group>_top<N>.json (or --out)
Flags:
--top N
--min-stores N
--require-all
--group all|bc|ab
--out path/to/file.json
*/
const fs = require("fs");
@ -94,16 +96,31 @@ function canonicalize(k, skuMap) {
/* ---------------- args ---------------- */
function parseArgs(argv) {
const out = { top: 50, minStores: 2, requireAll: false };
const out = {
top: 50,
minStores: 2,
requireAll: false,
group: "all", // all|bc|ab
out: "", // optional explicit output path
};
for (let i = 0; i < argv.length; i++) {
const a = argv[i];
if (a === "--top" && argv[i + 1]) out.top = Number(argv[++i]) || 50;
else if (a === "--min-stores" && argv[i + 1]) out.minStores = Number(argv[++i]) || 2;
else if (a === "--require-all") out.requireAll = true;
else if (a === "--group" && argv[i + 1]) out.group = String(argv[++i] || "all");
else if (a === "--out" && argv[i + 1]) out.out = String(argv[++i] || "");
}
return out;
}
function groupStores(group, allStoresSorted) {
const bc = new Set(["gull", "strath", "bcl", "legacy", "tudor"]);
if (group === "bc") return allStoresSorted.filter((s) => bc.has(s));
if (group === "ab") return allStoresSorted.filter((s) => !bc.has(s));
return allStoresSorted; // "all"
}
/* ---------------- main ---------------- */
function main() {
@ -123,8 +140,8 @@ function main() {
console.log(`[debug] skuMap: ${skuMap ? "loaded" : "missing"}`);
console.log(`[debug] scanning ${dbFiles.length} db files`);
const storeToCanon = new Map(); // storeLabel -> Set(canonSku)
const canonAgg = new Map(); // canonSku -> { stores:Set, listings:[], cheapest }
const storeToCanon = new Map(); // storeLabel -> Set(canonSku)
const canonAgg = new Map(); // canonSku -> { stores:Set, listings:[], cheapest, perStore:Map(storeLabel -> {priceNum, item}) }
let liveRows = 0;
let removedRows = 0;
@ -167,7 +184,7 @@ function main() {
let agg = canonAgg.get(canonSku);
if (!agg) {
agg = { stores: new Set(), listings: [], cheapest: null };
agg = { stores: new Set(), listings: [], cheapest: null, perStore: new Map() };
canonAgg.set(canonSku, agg);
}
@ -195,13 +212,25 @@ function main() {
agg.cheapest = { priceNum, item: listing };
}
}
// per-store numeric price (best/lowest numeric; otherwise first seen)
const prev = agg.perStore.get(storeLabel);
if (priceNum !== null) {
if (!prev || prev.priceNum === null || priceNum < prev.priceNum) {
agg.perStore.set(storeLabel, { priceNum, item: listing });
}
} else {
if (!prev) agg.perStore.set(storeLabel, { priceNum: null, item: listing });
}
}
}
const stores = [...storeToCanon.keys()].sort();
const allStores = [...storeToCanon.keys()].sort();
const stores = groupStores(String(args.group || "all").toLowerCase(), allStores);
const storeCount = stores.length;
console.log(`[debug] stores (${storeCount}): ${stores.join(", ")}`);
console.log(`[debug] stores(all) (${allStores.length}): ${allStores.join(", ")}`);
console.log(`[debug] group="${args.group}" stores(${storeCount}): ${stores.join(", ")}`);
console.log(`[debug] liveRows=${liveRows} removedRows=${removedRows} canonSkus=${canonAgg.size}`);
function pickRepresentative(agg) {
@ -217,14 +246,24 @@ function main() {
const rows = [];
for (const [canonSku, agg] of canonAgg.entries()) {
const groupStoresPresent = stores.filter((s) => agg.stores.has(s));
if (groupStoresPresent.length === 0) continue;
const rep = pickRepresentative(agg);
const missingStores = stores.filter((s) => !agg.stores.has(s));
const storePrices = {};
for (const s of stores) {
const ps = agg.perStore.get(s);
storePrices[s] = ps ? ps.priceNum : null;
}
rows.push({
canonSku,
storeCount: agg.stores.size,
stores: [...agg.stores].sort(),
storeCount: groupStoresPresent.length,
stores: groupStoresPresent.sort(),
missingStores,
storePrices,
representative: rep
? {
name: rep.name,
@ -249,7 +288,11 @@ function main() {
});
}
rows.sort((a, b) => b.storeCount - a.storeCount);
// stable-ish ordering: primary by store coverage, tie-break by canonSku
rows.sort((a, b) => {
if (b.storeCount !== a.storeCount) return b.storeCount - a.storeCount;
return String(a.canonSku).localeCompare(String(b.canonSku));
});
const filtered = args.requireAll
? rows.filter((r) => r.storeCount === storeCount)
@ -257,9 +300,11 @@ function main() {
const top = filtered.slice(0, args.top);
const safeGroup = String(args.group || "all").toLowerCase();
const payload = {
generatedAt: new Date().toISOString(),
args,
args: { ...args, group: safeGroup },
storeCount,
stores,
totals: {
@ -271,7 +316,9 @@ function main() {
rows: top,
};
const outPath = path.join(reportsDir, "common_listings.json");
const defaultName = `common_listings_${safeGroup}_top${args.top}.json`;
const outPath = args.out ? path.resolve(repoRoot, args.out) : path.join(reportsDir, defaultName);
fs.writeFileSync(outPath, JSON.stringify(payload, null, 2) + "\n", "utf8");
console.log(`Wrote ${path.relative(repoRoot, outPath)} (${top.length} rows)`);
}

View file

@ -20,45 +20,44 @@ function listDbFiles(dbDir) {
}
}
function listCommonListingReportFiles(reportsDir) {
try {
return fs
.readdirSync(reportsDir, { withFileTypes: true })
.filter((e) => e.isFile() && e.name.endsWith(".json"))
.map((e) => e.name)
.filter((name) => /^common_listings_.*_top\d+\.json$/i.test(name))
.map((name) => path.join(reportsDir, name));
} catch {
return [];
}
}
function dateOnly(iso) {
const m = String(iso ?? "").match(/^(\d{4}-\d{2}-\d{2})/);
return m ? m[1] : "";
}
function main() {
const repoRoot = process.cwd();
const dbDir = path.join(repoRoot, "data", "db");
const outDir = path.join(repoRoot, "viz", "data");
const outFile = path.join(outDir, "db_commits.json");
fs.mkdirSync(outDir, { recursive: true });
const files = listDbFiles(dbDir).map((abs) => path.posix.join("data/db", path.basename(abs)));
function buildCommitPayloadForFiles({ repoRoot, relFiles, maxRawPerFile, maxDaysPerFile }) {
const payload = {
generatedAt: new Date().toISOString(),
branch: "data",
files: {},
};
// We want the viz to show ONE point per day (the most recent run that day).
// So we collapse multiple commits per day down to the newest commit for that date.
//
// With multiple runs/day, we also want to keep a long-ish daily history.
// Raw commits per day could be ~4, so grab a larger raw window and then collapse.
const MAX_RAW_PER_FILE = 2400; // ~600 days @ 4 runs/day
const MAX_DAYS_PER_FILE = 600; // daily points kept after collapsing
for (const rel of files.sort()) {
for (const rel of relFiles.sort()) {
let txt = "";
try {
// %H = sha, %cI = committer date strict ISO 8601 (includes time + tz)
txt = runGit(["log", "--format=%H %cI", `-${MAX_RAW_PER_FILE}`, "--", rel]);
txt = runGit(["log", "--format=%H %cI", `-${maxRawPerFile}`, "--", rel]);
} catch {
continue;
}
const lines = txt.split(/\r?\n/).map((s) => s.trim()).filter(Boolean);
const lines = txt
.split(/\r?\n/)
.map((s) => s.trim())
.filter(Boolean);
// git log is newest -> oldest.
// Keep the FIRST commit we see for each date (that is the most recent commit for that date).
@ -79,15 +78,62 @@ function main() {
let arr = [...byDate.values()].reverse();
// Keep only the newest MAX_DAYS_PER_FILE (still oldest -> newest)
if (arr.length > MAX_DAYS_PER_FILE) {
arr = arr.slice(arr.length - MAX_DAYS_PER_FILE);
if (arr.length > maxDaysPerFile) {
arr = arr.slice(arr.length - maxDaysPerFile);
}
payload.files[rel] = arr;
}
fs.writeFileSync(outFile, JSON.stringify(payload, null, 2) + "\n", "utf8");
process.stdout.write(`Wrote ${outFile} (${Object.keys(payload.files).length} files)\n`);
return payload;
}
function main() {
const repoRoot = process.cwd();
const dbDir = path.join(repoRoot, "data", "db");
const reportsDir = path.join(repoRoot, "reports");
const outDir = path.join(repoRoot, "viz", "data");
fs.mkdirSync(outDir, { recursive: true });
// ---- Existing output (UNCHANGED): db_commits.json ----
const outFileDb = path.join(outDir, "db_commits.json");
const dbFiles = listDbFiles(dbDir).map((abs) => path.posix.join("data/db", path.basename(abs)));
// We want the viz to show ONE point per day (the most recent run that day).
// So we collapse multiple commits per day down to the newest commit for that date.
//
// With multiple runs/day, we also want to keep a long-ish daily history.
// Raw commits per day could be ~4, so grab a larger raw window and then collapse.
const MAX_RAW_PER_FILE = 2400; // ~600 days @ 4 runs/day
const MAX_DAYS_PER_FILE = 600; // daily points kept after collapsing
const payloadDb = buildCommitPayloadForFiles({
repoRoot,
relFiles: dbFiles,
maxRawPerFile: MAX_RAW_PER_FILE,
maxDaysPerFile: MAX_DAYS_PER_FILE,
});
fs.writeFileSync(outFileDb, JSON.stringify(payloadDb, null, 2) + "\n", "utf8");
process.stdout.write(`Wrote ${outFileDb} (${Object.keys(payloadDb.files).length} files)\n`);
// ---- New output: common listings report commits ----
const outFileCommon = path.join(outDir, "common_listings_commits.json");
const reportFilesAbs = listCommonListingReportFiles(reportsDir);
const reportFilesRel = reportFilesAbs.map((abs) => path.posix.join("reports", path.basename(abs)));
const payloadCommon = buildCommitPayloadForFiles({
repoRoot,
relFiles: reportFilesRel,
maxRawPerFile: MAX_RAW_PER_FILE,
maxDaysPerFile: MAX_DAYS_PER_FILE,
});
fs.writeFileSync(outFileCommon, JSON.stringify(payloadCommon, null, 2) + "\n", "utf8");
process.stdout.write(`Wrote ${outFileCommon} (${Object.keys(payloadCommon.files).length} files)\n`);
}
main();

View file

@ -4,6 +4,7 @@
* #/item/<sku> detail
* #/link sku linker (local-write only)
* #/store/<store> store page (in-stock only)
* #/stats statistics
*/
import { destroyChart } from "./item_page.js";
@ -11,6 +12,7 @@ import { renderSearch } from "./search_page.js";
import { renderItem } from "./item_page.js";
import { renderSkuLinker } from "./linker_page.js";
import { renderStore } from "./store_page.js";
+import { renderStats, destroyStatsChart } from "./stats_page.js";
function route() {
const $app = document.getElementById("app");
@ -18,6 +20,7 @@ function route() {
// always clean up chart when navigating
destroyChart();
destroyStatsChart();
const h = location.hash || "#/";
const parts = h.replace(/^#\/?/, "").split("/").filter(Boolean);
@ -26,6 +29,7 @@ function route() {
if (parts[0] === "item" && parts[1]) return renderItem($app, decodeURIComponent(parts[1]));
if (parts[0] === "store" && parts[1]) return renderStore($app, decodeURIComponent(parts[1]));
if (parts[0] === "link") return renderSkuLinker($app);
if (parts[0] === "stats") return renderStats($app);
return renderSearch($app);
}

View file

@ -17,6 +17,7 @@ export function renderSearch($app) {
</div>
<div class="headerRight headerButtons">
<a class="btn btnWide" href="#/stats" style="text-decoration:none;">Statistics</a>
<a class="btn btnWide" href="#/link" style="text-decoration:none;">Link SKUs</a>
<button class="btn btnWide" type="button" disabled>Email Notifications</button>
</div>

78
viz/app/stats_page.js Normal file
View file

@ -0,0 +1,78 @@
import { esc } from "./dom.js";
let _chart = null;
export function destroyStatsChart() {
try {
if (_chart) _chart.destroy();
} catch {}
_chart = null;
}
function ensureChartJs() {
if (window.Chart) return Promise.resolve(window.Chart);
return new Promise((resolve, reject) => {
const s = document.createElement("script");
// UMD build -> window.Chart
s.src = "https://cdn.jsdelivr.net/npm/chart.js@4.4.1/dist/chart.umd.min.js";
s.async = true;
s.onload = () => resolve(window.Chart);
s.onerror = () => reject(new Error("Failed to load Chart.js"));
document.head.appendChild(s);
});
}
export async function renderStats($app) {
destroyStatsChart();
$app.innerHTML = `
<div class="container">
<div class="header">
<div class="headerRow1">
<div class="headerLeft">
<h1 class="h1">Statistics</h1>
<div class="small">Coming soon</div>
</div>
<div class="headerRight headerButtons">
<a class="btn btnWide" href="#/" style="text-decoration:none;"> Back</a>
</div>
</div>
</div>
<div class="card">
<div style="height:340px;">
<canvas id="statsChart" aria-label="Statistics chart" role="img"></canvas>
</div>
</div>
</div>
`;
try {
const Chart = await ensureChartJs();
const canvas = document.getElementById("statsChart");
if (!canvas) return;
const ctx = canvas.getContext("2d");
_chart = new Chart(ctx, {
type: "line",
data: {
labels: [],
datasets: [{ label: "Price", data: [] }],
},
options: {
responsive: true,
maintainAspectRatio: false,
animation: false,
plugins: { legend: { display: true } },
scales: {
x: { title: { display: true, text: "Time" } },
y: { title: { display: true, text: "Value" } },
},
},
});
} catch (e) {
const msg = esc(e?.message || String(e));
$app.querySelector(".card").innerHTML = `<div class="small">Chart unavailable: ${msg}</div>`;
}
}