feat: add LLM idle timeout for streaming responses

Problem: When LLM stops responding, the agent hangs for ~5 minutes with no feedback.
Users had to use /stop to recover.

Solution: Add idle timeout detection for LLM streaming responses.
This commit is contained in:
Liu Yuan
2026-03-27 15:52:10 +08:00
committed by Ayaan Zaidi
parent 47839d3b9a
commit 84b72e66b9
6 changed files with 404 additions and 1 deletions

View File

@@ -171,6 +171,8 @@ export type AgentDefaultsConfig = {
cliBackends?: Record<string, CliBackendConfig>;
/** Opt-in: prune old tool results from the LLM context to reduce token usage. */
contextPruning?: AgentContextPruningConfig;
/** LLM timeout configuration. */
llm?: AgentLlmConfig;
/** Compaction tuning and pre-compaction memory flush behavior. */
compaction?: AgentCompactionConfig;
/** Embedded Pi runner hardening and compatibility controls. */
@@ -365,3 +367,16 @@ export type AgentCompactionMemoryFlushConfig = {
/** System prompt appended for the memory flush turn. */
systemPrompt?: string;
};
/**
* LLM timeout configuration.
*/
export type AgentLlmConfig = {
/**
* Idle timeout for LLM streaming responses in seconds.
* If no token is received within this time, the request is aborted.
* Set to 0 to disable (never timeout).
* Default: 60 seconds.
*/
idleTimeoutSeconds?: number;
};