fix(ci): restore build and typecheck on main

This commit is contained in:
Peter Steinberger
2026-04-04 07:22:01 +01:00
parent 7e7460c2f9
commit e7e1707277
3 changed files with 124 additions and 29 deletions

View File

@@ -7,6 +7,8 @@ import type { OpenClawConfig } from "openclaw/plugin-sdk/config-runtime";
import { collectProviderDangerousNameMatchingScopes } from "openclaw/plugin-sdk/runtime";
import { isZalouserMutableGroupEntry } from "./security-audit.js";
type ZalouserChannelsConfig = NonNullable<OpenClawConfig["channels"]>;
function asObjectRecord(value: unknown): Record<string, unknown> | null {
return value && typeof value === "object" && !Array.isArray(value)
? (value as Record<string, unknown>)
@@ -132,7 +134,7 @@ function normalizeZalouserCompatibilityConfig(cfg: OpenClawConfig): ChannelDocto
...cfg,
channels: {
...cfg.channels,
zalouser: updatedZalouser as OpenClawConfig["channels"]["zalouser"],
zalouser: updatedZalouser as ZalouserChannelsConfig["zalouser"],
},
},
changes,

View File

@@ -18,6 +18,7 @@ import {
} from "./live-cache-test-support.js";
import { runEmbeddedPiAgent } from "./pi-embedded-runner.js";
import { compactEmbeddedPiSessionDirect } from "./pi-embedded-runner/compact.runtime.js";
import { buildZeroUsage } from "./stream-message-shared.js";
const describeCacheLive = LIVE_CACHE_TEST_ENABLED ? describe : describe.skip;
@@ -92,26 +93,114 @@ function buildRunnerSessionPaths(sessionId: string) {
};
}
function resolveProviderBaseUrl(fixture: LiveResolvedModel): string | undefined {
const candidate = (fixture.model as { baseUrl?: unknown }).baseUrl;
function resolveProviderBaseUrl(model: LiveResolvedModel["model"]): string | undefined {
const candidate = (model as { baseUrl?: unknown }).baseUrl;
return typeof candidate === "string" && candidate.trim().length > 0 ? candidate : undefined;
}
function buildEmbeddedRunnerConfig(params: {
fixture: LiveResolvedModel;
cacheRetention: "none" | "short" | "long";
transport?: "sse" | "websocket";
}): OpenClawConfig {
const provider = params.fixture.model.provider;
const modelKey = `${provider}/${params.fixture.model.id}`;
const providerBaseUrl = resolveProviderBaseUrl(params.fixture);
function resolveDefaultProviderBaseUrl(model: LiveResolvedModel["model"]): string {
if (model.provider === "anthropic") {
return "https://api.anthropic.com/v1";
}
if (model.provider === "openai") {
return "https://api.openai.com/v1";
}
return "https://example.invalid/v1";
}
function buildEmbeddedModelDefinition(model: LiveResolvedModel["model"]) {
const contextWindowCandidate = (model as { contextWindow?: unknown }).contextWindow;
const maxTokensCandidate = (model as { maxTokens?: unknown }).maxTokens;
const reasoningCandidate = (model as { reasoning?: unknown }).reasoning;
const inputCandidate = (model as { input?: unknown }).input;
const contextWindow =
typeof contextWindowCandidate === "number" && Number.isFinite(contextWindowCandidate)
? Math.max(1, Math.trunc(contextWindowCandidate))
: 128_000;
const maxTokens =
typeof maxTokensCandidate === "number" && Number.isFinite(maxTokensCandidate)
? Math.max(1, Math.trunc(maxTokensCandidate))
: 8_192;
const input =
Array.isArray(inputCandidate) &&
inputCandidate.every((value) => value === "text" || value === "image")
? [...inputCandidate]
: (["text", "image"] as Array<"text" | "image">);
return {
id: model.id,
name: model.id,
api: resolveEmbeddedModelApi(model),
reasoning: typeof reasoningCandidate === "boolean" ? reasoningCandidate : false,
input,
cost: { input: 0, output: 0, cacheRead: 0, cacheWrite: 0 },
contextWindow,
maxTokens,
};
}
function resolveEmbeddedModelApi(
model: LiveResolvedModel["model"],
): "anthropic-messages" | "openai-responses" {
return model.provider === "anthropic" ? "anthropic-messages" : "openai-responses";
}
function normalizeLiveUsage(
usage:
| AssistantMessage["usage"]
| {
input?: number;
output?: number;
cacheRead?: number;
cacheWrite?: number;
total?: number;
}
| undefined,
): AssistantMessage["usage"] {
if (!usage) {
return buildZeroUsage();
}
const input = usage.input ?? 0;
const output = usage.output ?? 0;
const cacheRead = usage.cacheRead ?? 0;
const cacheWrite = usage.cacheWrite ?? 0;
const totalTokens =
"totalTokens" in usage && typeof usage.totalTokens === "number"
? usage.totalTokens
: "total" in usage && typeof usage.total === "number"
? usage.total
: input + output;
const cost =
"cost" in usage && usage.cost
? usage.cost
: { input: 0, output: 0, cacheRead: 0, cacheWrite: 0, total: 0 };
return {
input,
output,
cacheRead,
cacheWrite,
totalTokens,
cost,
};
}
function buildEmbeddedRunnerConfig(
params: LiveResolvedModel & {
cacheRetention: "none" | "short" | "long";
transport?: "sse" | "websocket";
},
): OpenClawConfig {
const provider = params.model.provider;
const modelKey = `${provider}/${params.model.id}`;
const providerBaseUrl =
resolveProviderBaseUrl(params.model) ?? resolveDefaultProviderBaseUrl(params.model);
return {
models: {
providers: {
[provider]: {
api: params.fixture.model.api,
apiKey: params.fixture.apiKey,
...(providerBaseUrl ? { baseUrl: providerBaseUrl } : {}),
api: resolveEmbeddedModelApi(params.model),
apiKey: params.apiKey,
baseUrl: providerBaseUrl,
models: [buildEmbeddedModelDefinition(params.model)],
},
},
},
@@ -153,8 +242,9 @@ function extractRunPayloadText(payloads: Array<{ text?: string } | undefined> |
}
async function runEmbeddedCacheProbe(params: {
fixture: LiveResolvedModel;
apiKey: string;
cacheRetention: "none" | "short" | "long";
model: LiveResolvedModel["model"];
prefix: string;
providerTag: "anthropic" | "openai";
sessionId: string;
@@ -172,13 +262,14 @@ async function runEmbeddedCacheProbe(params: {
workspaceDir: sessionPaths.workspaceDir,
agentDir: sessionPaths.agentDir,
config: buildEmbeddedRunnerConfig({
fixture: params.fixture,
apiKey: params.apiKey,
cacheRetention: params.cacheRetention,
model: params.model,
transport: params.transport,
}),
prompt: buildEmbeddedCachePrompt(params.suffix, params.promptSections),
provider: params.fixture.model.provider,
model: params.fixture.model.id,
provider: params.model.provider,
model: params.model.id,
timeoutMs: params.providerTag === "openai" ? OPENAI_TIMEOUT_MS : ANTHROPIC_TIMEOUT_MS,
runId: `${params.sessionId}-${params.suffix}-${params.transport ?? "default"}`,
extraSystemPrompt: params.prefix,
@@ -189,7 +280,7 @@ async function runEmbeddedCacheProbe(params: {
);
const text = extractRunPayloadText(result.payloads);
expect(text.toLowerCase()).toContain(params.suffix.toLowerCase());
const usage = result.meta.agentMeta?.usage ?? {};
const usage = normalizeLiveUsage(result.meta.agentMeta?.usage);
return {
suffix: params.suffix,
text,
@@ -199,8 +290,9 @@ async function runEmbeddedCacheProbe(params: {
}
async function compactLiveCacheSession(params: {
fixture: LiveResolvedModel;
apiKey: string;
cacheRetention: "none" | "short" | "long";
model: LiveResolvedModel["model"];
providerTag: "anthropic" | "openai";
sessionId: string;
}) {
@@ -214,11 +306,12 @@ async function compactLiveCacheSession(params: {
workspaceDir: sessionPaths.workspaceDir,
agentDir: sessionPaths.agentDir,
config: buildEmbeddedRunnerConfig({
fixture: params.fixture,
apiKey: params.apiKey,
cacheRetention: params.cacheRetention,
model: params.model,
}),
provider: params.fixture.model.provider,
model: params.fixture.model.id,
provider: params.model.provider,
model: params.model.id,
force: true,
trigger: "manual",
runId: `${params.sessionId}-compact`,

View File

@@ -1,6 +1,6 @@
import fs from "node:fs/promises";
import os from "node:os";
import type { AgentMessage, StreamFn } from "@mariozechner/pi-agent-core";
import type { AgentMessage } from "@mariozechner/pi-agent-core";
import {
createAgentSession,
DefaultResourceLoader,
@@ -113,16 +113,16 @@ import { buildEmbeddedSandboxInfo } from "../sandbox-info.js";
import { prewarmSessionFile, trackSessionManagerAccess } from "../session-manager-cache.js";
import { prepareSessionManagerForRun } from "../session-manager-init.js";
import { resolveEmbeddedRunSkillEntries } from "../skills-runtime.js";
import {
applySystemPromptOverrideToSession,
buildEmbeddedSystemPrompt,
createSystemPromptOverride,
} from "../system-prompt.js";
import {
resetEmbeddedAgentBaseStreamFnCacheForTest,
resolveEmbeddedAgentBaseStreamFn,
resolveEmbeddedAgentStreamFn,
} from "../stream-resolution.js";
import {
applySystemPromptOverrideToSession,
buildEmbeddedSystemPrompt,
createSystemPromptOverride,
} from "../system-prompt.js";
import { dropThinkingBlocks } from "../thinking.js";
import { collectAllowedToolNames } from "../tool-name-allowlist.js";
import { installToolResultContextGuard } from "../tool-result-context-guard.js";