fix(auto-reply): preserve visible fallback for requested modes

This commit is contained in:
Peter Steinberger
2026-04-30 15:25:38 +01:00
parent ac599c9e53
commit 8291537710
4 changed files with 78 additions and 27 deletions

View File

@@ -6,6 +6,7 @@ Docs: https://docs.openclaw.ai
### Fixes
- Auto-reply/group chats: fall back to automatic source delivery when a channel precomputes message-tool-only replies but the `message` tool is unavailable, so Discord/Slack-style group turns do not silently complete without a visible reply. Fixes #74868. Thanks @kagura-agent.
- Agents/subagents: bound automatic orphan recovery with persisted recovery attempts and a wedged-session tombstone, and teach task maintenance/doctor to reconcile those sessions so restart loops no longer require manual `sessions.json` surgery. Fixes #74864. Thanks @solosage1.
- Plugins/runtime-deps: keep bundled provider policy config loading from staging plugin runtime dependencies, so config reads no longer fail on locked-down `/var/lib/openclaw/plugin-runtime-deps` directories. Fixes #74971. Thanks @eurojojo.
- Memory/runtime-deps: retain the native `node-llama-cpp` runtime only when local memory search is configured, so packaged installs can repair local embeddings without relying on unreachable global npm installs. Fixes #74777. Thanks @LLagoon3.

View File

@@ -4427,6 +4427,34 @@ describe("sendPolicy deny — suppress delivery, not processing (#53328)", () =>
);
});
it("falls back when a channel precomputed message-tool-only delivery but the message tool is unavailable", async () => {
setNoAbort();
const dispatcher = createDispatcher();
const replyResolver = vi.fn(async (_ctx: MsgContext, opts?: GetReplyOptions) => {
expect(opts?.sourceReplyDeliveryMode).toBe("automatic");
return { text: "requested fallback" } satisfies ReplyPayload;
});
const result = await dispatchReplyFromConfig({
ctx: buildTestCtx({
ChatType: "channel",
SessionKey: "test:discord:channel:C1",
}),
cfg: { tools: { allow: ["read"] } } as OpenClawConfig,
dispatcher,
replyResolver,
replyOptions: {
sourceReplyDeliveryMode: "message_tool_only",
},
});
expect(replyResolver).toHaveBeenCalledTimes(1);
expect(result.queuedFinal).toBe(true);
expect(dispatcher.sendFinalReply).toHaveBeenCalledWith(
expect.objectContaining({ text: "requested fallback" }),
);
});
it("keeps native command replies visible in group/channel turns", async () => {
setNoAbort();
const dispatcher = createDispatcher();

View File

@@ -125,6 +125,14 @@ describe("resolveSourceReplyDeliveryMode", () => {
messageToolAvailable: false,
}),
).toBe("automatic");
expect(
resolveSourceReplyDeliveryMode({
cfg: emptyConfig,
ctx: { ChatType: "channel" },
requested: "message_tool_only",
messageToolAvailable: false,
}),
).toBe("automatic");
expect(loggerMocks.warn).not.toHaveBeenCalled();
});
@@ -302,5 +310,19 @@ describe("resolveSourceReplyVisibilityPolicy", () => {
suppressHookUserDelivery: false,
deliverySuppressionReason: "",
});
expect(
resolveSourceReplyVisibilityPolicy({
cfg: emptyConfig,
ctx: { ChatType: "channel" },
requested: "message_tool_only",
sendPolicy: "allow",
messageToolAvailable: false,
}),
).toMatchObject({
sourceReplyDeliveryMode: "automatic",
suppressAutomaticSourceDelivery: false,
suppressDelivery: false,
deliverySuppressionReason: "",
});
});
});

View File

@@ -24,35 +24,35 @@ export function resolveSourceReplyDeliveryMode(params: {
requested?: SourceReplyDeliveryMode;
messageToolAvailable?: boolean;
}): SourceReplyDeliveryMode {
if (params.requested) {
return params.requested;
}
if (params.ctx.CommandSource === "native") {
return "automatic";
}
const chatType = normalizeChatType(params.ctx.ChatType);
let mode: SourceReplyDeliveryMode;
if (chatType === "group" || chatType === "channel") {
const configuredMode =
params.cfg.messages?.groupChat?.visibleReplies ?? params.cfg.messages?.visibleReplies;
mode = configuredMode === "automatic" ? "automatic" : "message_tool_only";
if (
mode === "message_tool_only" &&
configuredMode === undefined &&
params.messageToolAvailable !== false &&
!visibleRepliesPrivateDefaultWarned
) {
visibleRepliesPrivateDefaultWarned = true;
log.warn(
`Group/channel replies are private by default since 2026.4.27. ` +
`To restore automatic room posting, set messages.groupChat.visibleReplies to "automatic" in openclaw.json and save the config. ` +
`The gateway hot-reloads messages config; restart only if file watching/reload is disabled. ` +
`Relates to https://github.com/openclaw/openclaw/issues/74876`,
);
}
if (params.requested) {
mode = params.requested;
} else if (params.ctx.CommandSource === "native") {
mode = "automatic";
} else {
mode =
params.cfg.messages?.visibleReplies === "message_tool" ? "message_tool_only" : "automatic";
const chatType = normalizeChatType(params.ctx.ChatType);
if (chatType === "group" || chatType === "channel") {
const configuredMode =
params.cfg.messages?.groupChat?.visibleReplies ?? params.cfg.messages?.visibleReplies;
mode = configuredMode === "automatic" ? "automatic" : "message_tool_only";
if (
mode === "message_tool_only" &&
configuredMode === undefined &&
params.messageToolAvailable !== false &&
!visibleRepliesPrivateDefaultWarned
) {
visibleRepliesPrivateDefaultWarned = true;
log.warn(
`Group/channel replies are private by default since 2026.4.27. ` +
`To restore automatic room posting, set messages.groupChat.visibleReplies to "automatic" in openclaw.json and save the config. ` +
`The gateway hot-reloads messages config; restart only if file watching/reload is disabled. ` +
`Relates to https://github.com/openclaw/openclaw/issues/74876`,
);
}
} else {
mode =
params.cfg.messages?.visibleReplies === "message_tool" ? "message_tool_only" : "automatic";
}
}
if (mode === "message_tool_only" && params.messageToolAvailable === false) {
return "automatic";