perf(test): add gh run ingestion for memory hotspots (#60187)

* perf(test): add gh run ingestion for memory hotspots

* perf(test): harden gh run hotspot ingestion
This commit is contained in:
Vincent Koc
2026-04-03 18:30:51 +09:00
committed by GitHub
parent 87b7bb1d14
commit ddb7e4cc34
4 changed files with 133 additions and 5 deletions

View File

@@ -1197,7 +1197,7 @@
"test:perf:profile:main": "node scripts/run-vitest-profile.mjs main",
"test:perf:profile:runner": "node scripts/run-vitest-profile.mjs runner",
"test:perf:update-memory-hotspots": "node scripts/test-update-memory-hotspots.mjs",
"test:perf:update-memory-hotspots:extensions": "node scripts/test-update-memory-hotspots.mjs --config vitest.extensions.config.ts --out test/fixtures/test-memory-hotspots.extensions.json --lane extensions --lane-prefix extensions-batch- --min-delta-kb 1048576",
"test:perf:update-memory-hotspots:extensions": "node scripts/test-update-memory-hotspots.mjs --config vitest.extensions.config.ts --out test/fixtures/test-memory-hotspots.extensions.json --lane extensions --lane-prefix extensions-batch- --min-delta-kb 1048576 --limit 20",
"test:perf:update-timings": "node scripts/test-update-timings.mjs",
"test:perf:update-timings:extensions": "node scripts/test-update-timings.mjs --config vitest.extensions.config.ts",
"test:sectriage": "pnpm exec vitest run --config vitest.gateway.config.ts && vitest run --config vitest.unit.config.ts --exclude src/daemon/launchd.integration.test.ts --exclude src/process/exec.test.ts",

View File

@@ -5,6 +5,8 @@ import path from "node:path";
export function loadHotspotInputTexts({
logPaths = [],
ghJobs = [],
ghRuns = [],
ghRunJobMatches = [],
readFileSyncImpl = fs.readFileSync,
execFileSyncImpl = execFileSync,
}) {
@@ -15,7 +17,66 @@ export function loadHotspotInputTexts({
text: readFileSyncImpl(logPath, "utf8"),
});
}
for (const ghJobId of ghJobs) {
const normalizedRunJobMatches = ghRunJobMatches
.map((match) => (typeof match === "string" ? match.trim() : String(match ?? "").trim()))
.filter((match) => match.length > 0)
.map((match) => match.toLowerCase());
const shouldIncludeRunJob = (jobName) => {
if (normalizedRunJobMatches.length === 0) {
return true;
}
if (typeof jobName !== "string") {
return false;
}
const normalizedName = jobName.toLowerCase();
return normalizedRunJobMatches.some((match) => normalizedName.includes(match));
};
// Deduplicate explicit and run-derived job ids so repeated inputs do not refetch the same log.
const ghJobIds = new Set(
ghJobs
.map((jobId) => (typeof jobId === "string" ? jobId.trim() : String(jobId ?? "").trim()))
.filter((jobId) => jobId.length > 0),
);
for (const ghRunId of ghRuns) {
const normalizedRunId =
typeof ghRunId === "string" ? ghRunId.trim() : String(ghRunId ?? "").trim();
if (normalizedRunId.length === 0) {
continue;
}
let rawJobs;
try {
rawJobs = execFileSyncImpl("gh", ["run", "view", normalizedRunId, "--json", "jobs"], {
encoding: "utf8",
maxBuffer: 8 * 1024 * 1024,
});
} catch (error) {
throw new Error(
`[test-update-memory-hotspots] failed to fetch gh run ${normalizedRunId} jobs`,
{ cause: error },
);
}
let jobs = [];
try {
const parsed = JSON.parse(rawJobs);
jobs = Array.isArray(parsed?.jobs) ? parsed.jobs : [];
} catch (error) {
throw new Error(
`[test-update-memory-hotspots] failed to parse gh run ${normalizedRunId} jobs json`,
{ cause: error },
);
}
for (const job of jobs) {
if (!shouldIncludeRunJob(job?.name)) {
continue;
}
const jobId = job?.databaseId;
if (!Number.isFinite(jobId)) {
continue;
}
ghJobIds.add(String(jobId));
}
}
for (const ghJobId of ghJobIds) {
inputs.push({
sourceName: `gh-job-${String(ghJobId)}`,
text: execFileSyncImpl("gh", ["run", "view", "--job", String(ghJobId), "--log"], {

View File

@@ -19,6 +19,8 @@ if (process.argv.slice(2).includes("--help")) {
" --lane-prefix <prefix> Additional lane prefixes to include (repeatable)",
" --log <path> Memory trace log to ingest (repeatable, required)",
" --gh-job <id> GitHub Actions job id to ingest via gh (repeatable)",
" --gh-run <id> GitHub Actions run id to ingest via gh (repeatable)",
" --gh-run-job-match <text> Filter gh-run jobs by name substring (repeatable)",
" --min-delta-kb <kb> Minimum RSS delta to retain (default: 262144)",
" --limit <count> Max hotspot entries to retain (default: 64)",
" --help Show this help text",
@@ -27,6 +29,7 @@ if (process.argv.slice(2).includes("--help")) {
" node scripts/test-update-memory-hotspots.mjs --log /tmp/unit-fast.log",
" node scripts/test-update-memory-hotspots.mjs --log a.log --log b.log --lane-prefix unit-fast-batch-",
" node scripts/test-update-memory-hotspots.mjs --gh-job 69804189668 --gh-job 69804189672",
" node scripts/test-update-memory-hotspots.mjs --gh-run 23933168654 --gh-run-job-match extensions",
].join("\n"),
);
process.exit(0);
@@ -42,6 +45,8 @@ function parseArgs(argv) {
lanePrefixes: [],
logs: [],
ghJobs: [],
ghRuns: [],
ghRunJobMatches: [],
minDeltaKb: 256 * 1024,
limit: 64,
},
@@ -52,6 +57,8 @@ function parseArgs(argv) {
stringListFlag("--lane-prefix", "lanePrefixes"),
stringListFlag("--log", "logs"),
stringListFlag("--gh-job", "ghJobs"),
stringListFlag("--gh-run", "ghRuns"),
stringListFlag("--gh-run-job-match", "ghRunJobMatches"),
intFlag("--min-delta-kb", "minDeltaKb", { min: 1 }),
intFlag("--limit", "limit", { min: 1 }),
],
@@ -95,8 +102,10 @@ function mergeHotspotEntry(aggregated, file, value) {
const opts = parseArgs(process.argv.slice(2));
if (opts.logs.length === 0 && opts.ghJobs.length === 0) {
console.error("[test-update-memory-hotspots] pass at least one --log <path> or --gh-job <id>.");
if (opts.logs.length === 0 && opts.ghJobs.length === 0 && opts.ghRuns.length === 0) {
console.error(
"[test-update-memory-hotspots] pass at least one --log <path>, --gh-job <id>, or --gh-run <id>.",
);
process.exit(2);
}
@@ -107,7 +116,12 @@ if (existing) {
mergeHotspotEntry(aggregated, file, value);
}
}
for (const input of loadHotspotInputTexts({ logPaths: opts.logs, ghJobs: opts.ghJobs })) {
for (const input of loadHotspotInputTexts({
logPaths: opts.logs,
ghJobs: opts.ghJobs,
ghRuns: opts.ghRuns,
ghRunJobMatches: opts.ghRunJobMatches,
})) {
const text = input.text;
const summaries = parseMemoryTraceSummaryLines(text).filter((summary) =>
matchesHotspotSummaryLane(summary.lane, opts.lane, opts.lanePrefixes),

View File

@@ -45,4 +45,57 @@ describe("test-update-memory-hotspots source loading", () => {
},
);
});
it("loads GitHub Actions run jobs and filters by job name", () => {
const execFileSyncImpl = vi.fn((command, args) => {
if (
command === "gh" &&
args[0] === "run" &&
args[1] === "view" &&
args[2] === "23933168654" &&
args[3] === "--json" &&
args[4] === "jobs"
) {
return JSON.stringify({
jobs: [
{ databaseId: 69804189668, name: "checks-fast-extensions-1" },
{ databaseId: 69804189669, name: "build-smoke" },
],
});
}
if (command === "gh" && args[0] === "run" && args[1] === "view" && args[2] === "--job") {
return `job-log-${args[3]}`;
}
throw new Error("unexpected gh call");
});
expect(
loadHotspotInputTexts({
ghRuns: ["23933168654"],
ghRunJobMatches: ["extensions"],
execFileSyncImpl,
}),
).toEqual([{ sourceName: "gh-job-69804189668", text: "job-log-69804189668" }]);
expect(execFileSyncImpl).toHaveBeenCalledWith(
"gh",
["run", "view", "23933168654", "--json", "jobs"],
{
encoding: "utf8",
maxBuffer: 8 * 1024 * 1024,
},
);
const jobLogCalls = execFileSyncImpl.mock.calls.filter(
(call) => call[0] === "gh" && call[1][2] === "--job",
);
expect(jobLogCalls).toEqual([
[
"gh",
["run", "view", "--job", "69804189668", "--log"],
{
encoding: "utf8",
maxBuffer: 64 * 1024 * 1024,
},
],
]);
});
});