fix(agents): accept silent no-reply turns

This commit is contained in:
Peter Steinberger
2026-04-22 18:34:42 +01:00
parent 9d27d09d47
commit 9337e1bd8a
3 changed files with 43 additions and 0 deletions

View File

@@ -22,6 +22,7 @@ Docs: https://docs.openclaw.ai
- Gateway/restart: preserve group and channel chat context when resuming an agent turn after a Gateway restart, so continuation replies keep the same prompt, routing, and tool-status behavior as the original conversation.
- Gateway/pairing: shared-secret loopback CLI clients now silently auto-approve `metadata-upgrade` pairing (platform / device family refresh) instead of being disconnected with `1008 pairing required`. This matches the scope-upgrade and role-upgrade behavior added in #69431 and unblocks non-interactive CLI automation when a paired-device record has a stale platform string (e.g. device key replicated across hosts, install migrated between OSes, or platform-string format changed between OpenClaw versions). Browser / Control-UI clients keep the existing approval-required flow for metadata changes.
- Gateway/pairing: treat any forwarded-header evidence (`Forwarded`, `X-Forwarded-*`, or `X-Real-IP`) as proxied WebSocket traffic before pairing locality checks, so reverse-proxy topologies cannot use the loopback shared-secret helper auto-pairing path.
- Agents/OpenAI: treat exact `NO_REPLY` assistant output as a deliberate silent reply in embedded runs, so GPT-5.4 turns with signed reasoning plus a silent final no longer surface a false incomplete-turn error.
- Gateway/pairing webchat: render `/pair qr` replies as structured media instead of raw markdown text, preserve inline reply threading and silent-control handling on media replies, avoid persisting sensitive QR images into transcript history, and keep local webchat media embedding behind internal-only trust markers. (#70047) Thanks @BunsDev.
- Codex harness: default app-server runs to unchained local execution, so OpenAI heartbeats can use network and shell tools without stalling behind native Codex approvals or the workspace-write sandbox.
- Codex harness: apply the GPT-5 behavior and heartbeat prompt overlay to native Codex app-server runs, so `codex/gpt-5.x` sessions get the same follow-through, tool-use, and proactive heartbeat guidance as OpenAI GPT-5 runs.

View File

@@ -23,6 +23,7 @@ import {
resolveEmptyResponseRetryInstruction,
resolvePlanningOnlyRetryLimit,
resolvePlanningOnlyRetryInstruction,
resolveIncompleteTurnPayloadText,
resolveReasoningOnlyRetryInstruction,
STRICT_AGENTIC_BLOCKED_TEXT,
resolveReplayInvalidFlag,
@@ -732,6 +733,34 @@ describe("runEmbeddedPiAgent incomplete-turn safety", () => {
expect(retryInstruction).toBe(REASONING_ONLY_RETRY_INSTRUCTION);
});
it("treats exact NO_REPLY as a deliberate silent assistant reply", () => {
const incompleteTurnText = resolveIncompleteTurnPayloadText({
payloadCount: 0,
aborted: false,
timedOut: false,
attempt: makeAttemptResult({
assistantTexts: ["NO_REPLY"],
lastAssistant: {
role: "assistant",
stopReason: "stop",
provider: "openai",
model: "gpt-5.4",
content: [
{
type: "thinking",
thinking: "internal reasoning",
thinkingSignature: JSON.stringify({ id: "rs_no_reply", type: "reasoning" }),
},
{ type: "text", text: "" },
{ type: "text", text: "NO_REPLY" },
],
} as unknown as EmbeddedRunAttemptResult["lastAssistant"],
}),
});
expect(incompleteTurnText).toBeNull();
});
it("does not retry reasoning-only GPT turns after side effects", () => {
const retryInstruction = resolveReasoningOnlyRetryInstruction({
provider: "openai",

View File

@@ -1,4 +1,5 @@
import type { AgentMessage } from "@mariozechner/pi-agent-core";
import { isSilentReplyPayloadText, SILENT_REPLY_TOKEN } from "../../../auto-reply/tokens.js";
import type { EmbeddedPiExecutionContract } from "../../../config/types.agent-defaults.js";
import { normalizeLowercaseStringOrEmpty } from "../../../shared/string-coerce.js";
import { isStrictAgenticSupportedProviderModel } from "../../execution-contract.js";
@@ -173,6 +174,10 @@ export function resolveIncompleteTurnPayloadText(params: {
return null;
}
if (hasOnlySilentAssistantReply(params.attempt.assistantTexts)) {
return null;
}
const stopReason = params.attempt.lastAssistant?.stopReason;
const incompleteTerminalAssistant = isIncompleteTerminalAssistantTurn({
hasAssistantVisibleText: params.payloadCount > 0,
@@ -199,6 +204,14 @@ export function resolveIncompleteTurnPayloadText(params: {
: "⚠️ Agent couldn't generate a response. Please try again.";
}
function hasOnlySilentAssistantReply(assistantTexts: readonly string[]): boolean {
const nonEmptyTexts = assistantTexts.filter((text) => text.trim().length > 0);
return (
nonEmptyTexts.length > 0 &&
nonEmptyTexts.every((text) => isSilentReplyPayloadText(text, SILENT_REPLY_TOKEN))
);
}
export function resolveReplayInvalidFlag(params: {
attempt: RunLivenessAttempt;
incompleteTurnText?: string | null;