fix(ci): parse GitHub Actions memory hotspot logs

This commit is contained in:
Shakker 2026-03-20 04:42:46 +00:00 committed by Shakker
parent 9c7da58770
commit 254ea0c65e
3 changed files with 58 additions and 18 deletions

View File

@ -7,13 +7,14 @@ const ANSI_ESCAPE_PATTERN = new RegExp(
`${ESCAPE}(?:\\][^${BELL}]*(?:${BELL}|${ESCAPE}\\\\)|\\[[0-?]*[ -/]*[@-~]|[@-Z\\\\-_])`,
"g",
);
const GITHUB_ACTIONS_LOG_PREFIX_PATTERN = /^\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}(?:\.\d+)?Z\s+/u;
const COMPLETED_TEST_FILE_LINE_PATTERN =
/(?<file>(?:src|extensions|test|ui)\/\S+?\.(?:live\.test|e2e\.test|test)\.ts)\s+\(.*\)\s+(?<duration>\d+(?:\.\d+)?)(?<unit>ms|s)\s*$/;
const MEMORY_TRACE_SUMMARY_PATTERN =
/^\[test-parallel\]\[mem\] summary (?<lane>\S+) files=(?<files>\d+) peak=(?<peak>[0-9]+(?:\.[0-9]+)?(?:GiB|MiB|KiB)) totalDelta=(?<totalDelta>[+-][0-9]+(?:\.[0-9]+)?(?:GiB|MiB|KiB)) peakAt=(?<peakAt>\S+) top=(?<top>.*)$/u;
/^\[test-parallel\]\[mem\] summary (?<lane>\S+) files=(?<files>\d+) peak=(?<peak>[0-9]+(?:\.[0-9]+)?(?:GiB|MiB|KiB)) totalDelta=(?<totalDelta>[+-]?[0-9]+(?:\.[0-9]+)?(?:GiB|MiB|KiB)) peakAt=(?<peakAt>\S+) top=(?<top>.*)$/u;
const MEMORY_TRACE_TOP_ENTRY_PATTERN =
/^(?<file>(?:src|extensions|test|ui)\/\S+?\.(?:live\.test|e2e\.test|test)\.ts):(?<delta>[+-][0-9]+(?:\.[0-9]+)?(?:GiB|MiB|KiB))$/u;
/^(?<file>(?:src|extensions|test|ui)\/\S+?\.(?:live\.test|e2e\.test|test)\.ts):(?<delta>[+-]?[0-9]+(?:\.[0-9]+)?(?:GiB|MiB|KiB))$/u;
const PS_COLUMNS = ["pid=", "ppid=", "rss=", "comm="];
@ -44,9 +45,14 @@ function stripAnsi(text) {
return text.replaceAll(ANSI_ESCAPE_PATTERN, "");
}
function normalizeLogLine(line) {
return line.replace(GITHUB_ACTIONS_LOG_PREFIX_PATTERN, "");
}
export function parseCompletedTestFileLines(text) {
return stripAnsi(text)
.split(/\r?\n/u)
.map((line) => normalizeLogLine(line))
.map((line) => {
const match = line.match(COMPLETED_TEST_FILE_LINE_PATTERN);
if (!match?.groups) {
@ -63,6 +69,7 @@ export function parseCompletedTestFileLines(text) {
export function parseMemoryTraceSummaryLines(text) {
return stripAnsi(text)
.split(/\r?\n/u)
.map((line) => normalizeLogLine(line))
.map((line) => {
const match = line.match(MEMORY_TRACE_SUMMARY_PATTERN);
if (!match?.groups) {

View File

@ -57,6 +57,40 @@ function parseArgs(argv) {
return args;
}
function mergeHotspotEntry(aggregated, file, value) {
if (!(Number.isFinite(value?.deltaKb) && value.deltaKb > 0)) {
return;
}
const normalizeSourceLabel = (source) => {
const separator = source.lastIndexOf(":");
if (separator === -1) {
return source.endsWith(".log") ? source.slice(0, -4) : source;
}
const name = source.slice(0, separator);
const lane = source.slice(separator + 1);
return `${name.endsWith(".log") ? name.slice(0, -4) : name}:${lane}`;
};
const nextSources = Array.isArray(value?.sources)
? value.sources
.filter((source) => typeof source === "string" && source.length > 0)
.map(normalizeSourceLabel)
: [];
const previous = aggregated.get(file);
if (!previous) {
aggregated.set(file, {
deltaKb: Math.round(value.deltaKb),
sources: [...new Set(nextSources)],
});
return;
}
previous.deltaKb = Math.max(previous.deltaKb, Math.round(value.deltaKb));
for (const source of nextSources) {
if (!previous.sources.includes(source)) {
previous.sources.push(source);
}
}
}
const opts = parseArgs(process.argv.slice(2));
if (opts.logs.length === 0) {
@ -65,6 +99,14 @@ if (opts.logs.length === 0) {
}
const aggregated = new Map();
try {
const existing = JSON.parse(fs.readFileSync(opts.out, "utf8"));
for (const [file, value] of Object.entries(existing.files ?? {})) {
mergeHotspotEntry(aggregated, file, value);
}
} catch {
// Start from scratch when the output file does not exist yet.
}
for (const logPath of opts.logs) {
const text = fs.readFileSync(logPath, "utf8");
const summaries = parseMemoryTraceSummaryLines(text).filter(
@ -75,19 +117,10 @@ for (const logPath of opts.logs) {
if (record.deltaKb < opts.minDeltaKb) {
continue;
}
const nextSource = `${path.basename(logPath)}:${summary.lane}`;
const previous = aggregated.get(record.file);
if (!previous) {
aggregated.set(record.file, {
deltaKb: record.deltaKb,
sources: [nextSource],
});
continue;
}
previous.deltaKb = Math.max(previous.deltaKb, record.deltaKb);
if (!previous.sources.includes(nextSource)) {
previous.sources.push(nextSource);
}
mergeHotspotEntry(aggregated, record.file, {
deltaKb: record.deltaKb,
sources: [`${path.basename(logPath, path.extname(logPath))}:${summary.lane}`],
});
}
}
}

View File

@ -84,7 +84,7 @@ describe("scripts/test-parallel memory trace parsing", () => {
it("parses memory trace summary lines and hotspot deltas", () => {
const summaries = parseMemoryTraceSummaryLines(
[
"[test-parallel][mem] summary unit-fast files=360 peak=13.22GiB totalDelta=+6.69GiB peakAt=poll top=src/config/schema.help.quality.test.ts:+1.06GiB, src/infra/update-runner.test.ts:+463.6MiB",
"2026-03-20T04:32:18.7721466Z [test-parallel][mem] summary unit-fast files=360 peak=13.22GiB totalDelta=6.69GiB peakAt=poll top=src/config/schema.help.quality.test.ts:1.06GiB, src/infra/update-runner.test.ts:+463.6MiB",
].join("\n"),
);
@ -93,12 +93,12 @@ describe("scripts/test-parallel memory trace parsing", () => {
lane: "unit-fast",
files: 360,
peakRssKb: parseMemoryValueKb("13.22GiB"),
totalDeltaKb: parseMemoryValueKb("+6.69GiB"),
totalDeltaKb: parseMemoryValueKb("6.69GiB"),
peakAt: "poll",
top: [
{
file: "src/config/schema.help.quality.test.ts",
deltaKb: parseMemoryValueKb("+1.06GiB"),
deltaKb: parseMemoryValueKb("1.06GiB"),
},
{
file: "src/infra/update-runner.test.ts",