fix: harden synology reply user resolution and cache scope (#23709) (thanks @druide67)

This commit is contained in:
Peter Steinberger
2026-03-02 19:49:47 +00:00
parent d5df0cb2fa
commit 326a8fbf66
4 changed files with 55 additions and 15 deletions

View File

@@ -49,6 +49,7 @@ Docs: https://docs.openclaw.ai
- Synology Chat/webhook compatibility: accept JSON and alias payload fields, allow token resolution from body/query/header sources, and ACK webhook requests with `204` to avoid persistent `Processing...` states in Synology Chat clients. (#26635) Thanks @memphislee09-source.
- Synology Chat/webhook ingress hardening: enforce bounded body reads (size + timeout) via shared request-body guards to prevent unauthenticated slow-body hangs before token validation. (#25831) Thanks @bmendonca3.
- Synology Chat/reply delivery: resolve webhook usernames to Chat API `user_id` values for outbound chatbot replies, avoiding mismatches between webhook user IDs and `method=chatbot` recipient IDs in multi-account setups. (#23709) Thanks @druide67.
- Auto-reply/followup queue: avoid stale callback reuse across idle-window restarts by caching the followup runner only when a drain actually starts, preserving enqueue ordering after empty-finalize paths. (#31902) Thanks @Lanfei.
- Gateway/Heartbeat model reload: treat `models.*` and `agents.defaults.model` config updates as heartbeat hot-reload triggers so heartbeat picks up model changes without a full gateway restart. (#32046) Thanks @stakeswky.
- Slack/inbound debounce routing: isolate top-level non-DM message debounce keys by message timestamp to avoid cross-thread collisions, preserve DM batching, and flush pending top-level buffers before immediate non-debounce follow-ups to keep ordering stable. (#31951) Thanks @scoootscooob.

View File

@@ -133,9 +133,29 @@ function mockUserListResponse(
});
}
function mockUserListResponseOnce(
users: Array<{ user_id: number; username: string; nickname: string }>,
) {
const httpsGet = vi.mocked((https as any).get);
httpsGet.mockImplementationOnce((_url: any, _opts: any, callback: any) => {
const res = new EventEmitter() as any;
res.statusCode = 200;
process.nextTick(() => {
callback(res);
res.emit("data", Buffer.from(JSON.stringify({ success: true, data: { users } })));
res.emit("end");
});
const req = new EventEmitter() as any;
req.destroy = vi.fn();
return req;
});
}
describe("resolveChatUserId", () => {
const baseUrl =
"https://nas.example.com/webapi/entry.cgi?api=SYNO.Chat.External&method=chatbot&version=2&token=%22test%22";
const baseUrl2 =
"https://nas2.example.com/webapi/entry.cgi?api=SYNO.Chat.External&method=chatbot&version=2&token=%22test-2%22";
beforeEach(() => {
vi.clearAllMocks();
@@ -198,4 +218,17 @@ describe("resolveChatUserId", () => {
expect.any(Function),
);
});
it("keeps user cache scoped per incoming URL", async () => {
mockUserListResponseOnce([{ user_id: 4, username: "jmn67", nickname: "jmn" }]);
mockUserListResponseOnce([{ user_id: 9, username: "jmn67", nickname: "jmn" }]);
const result1 = await resolveChatUserId(baseUrl, "jmn");
const result2 = await resolveChatUserId(baseUrl2, "jmn");
expect(result1).toBe(4);
expect(result2).toBe(9);
const httpsGet = vi.mocked((https as any).get);
expect(httpsGet).toHaveBeenCalledTimes(2);
});
});

View File

@@ -22,8 +22,13 @@ interface ChatUser {
nickname: string;
}
let chatUserCache: ChatUser[] | null = null;
let chatUserCacheTime = 0;
type ChatUserCacheEntry = {
users: ChatUser[];
cachedAt: number;
};
// Cache user lists per bot endpoint to avoid cross-account bleed.
const chatUserCache = new Map<string, ChatUserCacheEntry>();
const CACHE_TTL_MS = 5 * 60 * 1000; // 5 minutes
/**
@@ -122,11 +127,11 @@ export async function fetchChatUsers(
log?: { warn: (...args: unknown[]) => void },
): Promise<ChatUser[]> {
const now = Date.now();
if (chatUserCache && now - chatUserCacheTime < CACHE_TTL_MS) {
return chatUserCache;
}
const listUrl = incomingUrl.replace(/method=\w+/, "method=user_list");
const cached = chatUserCache.get(listUrl);
if (cached && now - cached.cachedAt < CACHE_TTL_MS) {
return cached.users;
}
return new Promise((resolve) => {
let parsedUrl: URL;
@@ -134,7 +139,7 @@ export async function fetchChatUsers(
parsedUrl = new URL(listUrl);
} catch {
log?.warn("fetchChatUsers: invalid user_list URL, using cached data");
resolve(chatUserCache ?? []);
resolve(cached?.users ?? []);
return;
}
const transport = parsedUrl.protocol === "https:" ? https : http;
@@ -149,28 +154,31 @@ export async function fetchChatUsers(
try {
const result = JSON.parse(data);
if (result.success && result.data?.users) {
chatUserCache = result.data.users.map((u: any) => ({
const users = result.data.users.map((u: any) => ({
user_id: u.user_id,
username: u.username || "",
nickname: u.nickname || "",
}));
chatUserCacheTime = now;
resolve(chatUserCache!);
chatUserCache.set(listUrl, {
users,
cachedAt: now,
});
resolve(users);
} else {
log?.warn(
`fetchChatUsers: API returned success=${result.success}, using cached data`,
);
resolve(chatUserCache ?? []);
resolve(cached?.users ?? []);
}
} catch {
log?.warn("fetchChatUsers: failed to parse user_list response");
resolve(chatUserCache ?? []);
resolve(cached?.users ?? []);
}
});
})
.on("error", (err) => {
log?.warn(`fetchChatUsers: HTTP error — ${err instanceof Error ? err.message : err}`);
resolve(chatUserCache ?? []);
resolve(cached?.users ?? []);
});
});
}

View File

@@ -329,8 +329,6 @@ export function createWebhookHandler(deps: WebhookHandlerDeps) {
const preview = cleanText.length > 100 ? `${cleanText.slice(0, 100)}...` : cleanText;
log?.info(`Message from ${payload.username} (${payload.user_id}): ${preview}`);
// ACK immediately so Synology Chat won't remain in "Processing..."
respondNoContent(res);
// ACK immediately so Synology Chat won't remain in "Processing..."
respondNoContent(res);