perf(test): add gh run ingestion for memory hotspots (#60187)

* perf(test): add gh run ingestion for memory hotspots

* perf(test): harden gh run hotspot ingestion
This commit is contained in:
Vincent Koc
2026-04-03 18:30:51 +09:00
committed by GitHub
parent 87b7bb1d14
commit ddb7e4cc34
4 changed files with 133 additions and 5 deletions

View File

@@ -5,6 +5,8 @@ import path from "node:path";
export function loadHotspotInputTexts({
logPaths = [],
ghJobs = [],
ghRuns = [],
ghRunJobMatches = [],
readFileSyncImpl = fs.readFileSync,
execFileSyncImpl = execFileSync,
}) {
@@ -15,7 +17,66 @@ export function loadHotspotInputTexts({
text: readFileSyncImpl(logPath, "utf8"),
});
}
for (const ghJobId of ghJobs) {
const normalizedRunJobMatches = ghRunJobMatches
.map((match) => (typeof match === "string" ? match.trim() : String(match ?? "").trim()))
.filter((match) => match.length > 0)
.map((match) => match.toLowerCase());
const shouldIncludeRunJob = (jobName) => {
if (normalizedRunJobMatches.length === 0) {
return true;
}
if (typeof jobName !== "string") {
return false;
}
const normalizedName = jobName.toLowerCase();
return normalizedRunJobMatches.some((match) => normalizedName.includes(match));
};
// Deduplicate explicit and run-derived job ids so repeated inputs do not refetch the same log.
const ghJobIds = new Set(
ghJobs
.map((jobId) => (typeof jobId === "string" ? jobId.trim() : String(jobId ?? "").trim()))
.filter((jobId) => jobId.length > 0),
);
for (const ghRunId of ghRuns) {
const normalizedRunId =
typeof ghRunId === "string" ? ghRunId.trim() : String(ghRunId ?? "").trim();
if (normalizedRunId.length === 0) {
continue;
}
let rawJobs;
try {
rawJobs = execFileSyncImpl("gh", ["run", "view", normalizedRunId, "--json", "jobs"], {
encoding: "utf8",
maxBuffer: 8 * 1024 * 1024,
});
} catch (error) {
throw new Error(
`[test-update-memory-hotspots] failed to fetch gh run ${normalizedRunId} jobs`,
{ cause: error },
);
}
let jobs = [];
try {
const parsed = JSON.parse(rawJobs);
jobs = Array.isArray(parsed?.jobs) ? parsed.jobs : [];
} catch (error) {
throw new Error(
`[test-update-memory-hotspots] failed to parse gh run ${normalizedRunId} jobs json`,
{ cause: error },
);
}
for (const job of jobs) {
if (!shouldIncludeRunJob(job?.name)) {
continue;
}
const jobId = job?.databaseId;
if (!Number.isFinite(jobId)) {
continue;
}
ghJobIds.add(String(jobId));
}
}
for (const ghJobId of ghJobIds) {
inputs.push({
sourceName: `gh-job-${String(ghJobId)}`,
text: execFileSyncImpl("gh", ["run", "view", "--job", String(ghJobId), "--log"], {

View File

@@ -19,6 +19,8 @@ if (process.argv.slice(2).includes("--help")) {
" --lane-prefix <prefix> Additional lane prefixes to include (repeatable)",
" --log <path> Memory trace log to ingest (repeatable, required)",
" --gh-job <id> GitHub Actions job id to ingest via gh (repeatable)",
" --gh-run <id> GitHub Actions run id to ingest via gh (repeatable)",
" --gh-run-job-match <text> Filter gh-run jobs by name substring (repeatable)",
" --min-delta-kb <kb> Minimum RSS delta to retain (default: 262144)",
" --limit <count> Max hotspot entries to retain (default: 64)",
" --help Show this help text",
@@ -27,6 +29,7 @@ if (process.argv.slice(2).includes("--help")) {
" node scripts/test-update-memory-hotspots.mjs --log /tmp/unit-fast.log",
" node scripts/test-update-memory-hotspots.mjs --log a.log --log b.log --lane-prefix unit-fast-batch-",
" node scripts/test-update-memory-hotspots.mjs --gh-job 69804189668 --gh-job 69804189672",
" node scripts/test-update-memory-hotspots.mjs --gh-run 23933168654 --gh-run-job-match extensions",
].join("\n"),
);
process.exit(0);
@@ -42,6 +45,8 @@ function parseArgs(argv) {
lanePrefixes: [],
logs: [],
ghJobs: [],
ghRuns: [],
ghRunJobMatches: [],
minDeltaKb: 256 * 1024,
limit: 64,
},
@@ -52,6 +57,8 @@ function parseArgs(argv) {
stringListFlag("--lane-prefix", "lanePrefixes"),
stringListFlag("--log", "logs"),
stringListFlag("--gh-job", "ghJobs"),
stringListFlag("--gh-run", "ghRuns"),
stringListFlag("--gh-run-job-match", "ghRunJobMatches"),
intFlag("--min-delta-kb", "minDeltaKb", { min: 1 }),
intFlag("--limit", "limit", { min: 1 }),
],
@@ -95,8 +102,10 @@ function mergeHotspotEntry(aggregated, file, value) {
const opts = parseArgs(process.argv.slice(2));
if (opts.logs.length === 0 && opts.ghJobs.length === 0) {
console.error("[test-update-memory-hotspots] pass at least one --log <path> or --gh-job <id>.");
if (opts.logs.length === 0 && opts.ghJobs.length === 0 && opts.ghRuns.length === 0) {
console.error(
"[test-update-memory-hotspots] pass at least one --log <path>, --gh-job <id>, or --gh-run <id>.",
);
process.exit(2);
}
@@ -107,7 +116,12 @@ if (existing) {
mergeHotspotEntry(aggregated, file, value);
}
}
for (const input of loadHotspotInputTexts({ logPaths: opts.logs, ghJobs: opts.ghJobs })) {
for (const input of loadHotspotInputTexts({
logPaths: opts.logs,
ghJobs: opts.ghJobs,
ghRuns: opts.ghRuns,
ghRunJobMatches: opts.ghRunJobMatches,
})) {
const text = input.text;
const summaries = parseMemoryTraceSummaryLines(text).filter((summary) =>
matchesHotspotSummaryLane(summary.lane, opts.lane, opts.lanePrefixes),