mirror of
https://github.com/openclaw/openclaw.git
synced 2026-05-06 15:00:41 +00:00
refactor: move tasks behind plugin-sdk seam
This commit is contained in:
@@ -1,14 +1,14 @@
|
||||
import { resolveAgentTimeoutMs } from "../../agents/timeout.js";
|
||||
import type { OpenClawConfig } from "../../config/config.js";
|
||||
import { logVerbose } from "../../globals.js";
|
||||
import { normalizeAgentId } from "../../routing/session-key.js";
|
||||
import { isAcpSessionKey } from "../../sessions/session-key-utils.js";
|
||||
import {
|
||||
createRunningTaskRun,
|
||||
completeTaskRunByRunId,
|
||||
failTaskRunByRunId,
|
||||
startTaskRunByRunId,
|
||||
} from "../../tasks/task-executor.js";
|
||||
} from "openclaw/plugin-sdk/tasks";
|
||||
import { resolveAgentTimeoutMs } from "../../agents/timeout.js";
|
||||
import type { OpenClawConfig } from "../../config/config.js";
|
||||
import { logVerbose } from "../../globals.js";
|
||||
import { normalizeAgentId } from "../../routing/session-key.js";
|
||||
import { isAcpSessionKey } from "../../sessions/session-key-utils.js";
|
||||
import type { DeliveryContext } from "../../utils/delivery-context.js";
|
||||
import {
|
||||
AcpRuntimeError,
|
||||
|
||||
@@ -37,9 +37,9 @@ vi.mock("../runtime/registry.js", async (importOriginal) => {
|
||||
let AcpSessionManager: typeof import("./manager.js").AcpSessionManager;
|
||||
let AcpRuntimeError: typeof import("../runtime/errors.js").AcpRuntimeError;
|
||||
let resetAcpSessionManagerForTests: typeof import("./manager.js").__testing.resetAcpSessionManagerForTests;
|
||||
let findTaskByRunId: typeof import("../../tasks/task-registry.js").findTaskByRunId;
|
||||
let resetTaskRegistryForTests: typeof import("../../tasks/task-registry.js").resetTaskRegistryForTests;
|
||||
let resetFlowRegistryForTests: typeof import("../../tasks/flow-registry.js").resetFlowRegistryForTests;
|
||||
let findTaskByRunId: typeof import("openclaw/plugin-sdk/tasks").findTaskByRunId;
|
||||
let resetTaskRegistryForTests: typeof import("openclaw/plugin-sdk/tasks").resetTaskRegistryForTests;
|
||||
let resetFlowRegistryForTests: typeof import("openclaw/plugin-sdk/tasks").resetFlowRegistryForTests;
|
||||
let installInMemoryTaskAndFlowRegistryRuntime: typeof import("../../test-utils/task-flow-registry-runtime.js").installInMemoryTaskAndFlowRegistryRuntime;
|
||||
|
||||
const baseCfg = {
|
||||
@@ -184,8 +184,8 @@ describe("AcpSessionManager", () => {
|
||||
__testing: { resetAcpSessionManagerForTests },
|
||||
} = await import("./manager.js"));
|
||||
({ AcpRuntimeError } = await import("../runtime/errors.js"));
|
||||
({ findTaskByRunId, resetTaskRegistryForTests } = await import("../../tasks/task-registry.js"));
|
||||
({ resetFlowRegistryForTests } = await import("../../tasks/flow-registry.js"));
|
||||
({ findTaskByRunId, resetTaskRegistryForTests } = await import("openclaw/plugin-sdk/tasks"));
|
||||
({ resetFlowRegistryForTests } = await import("openclaw/plugin-sdk/tasks"));
|
||||
({ installInMemoryTaskAndFlowRegistryRuntime } =
|
||||
await import("../../test-utils/task-flow-registry-runtime.js"));
|
||||
});
|
||||
|
||||
@@ -1,12 +1,12 @@
|
||||
import { appendFile, mkdir } from "node:fs/promises";
|
||||
import path from "node:path";
|
||||
import { recordTaskRunProgressByRunId } from "openclaw/plugin-sdk/tasks";
|
||||
import { readAcpSessionEntry } from "../acp/runtime/session-meta.js";
|
||||
import { resolveSessionFilePath, resolveSessionFilePathOptions } from "../config/sessions/paths.js";
|
||||
import { onAgentEvent } from "../infra/agent-events.js";
|
||||
import { requestHeartbeatNow } from "../infra/heartbeat-wake.js";
|
||||
import { enqueueSystemEvent } from "../infra/system-events.js";
|
||||
import { scopedHeartbeatWakeOptions } from "../routing/session-key.js";
|
||||
import { recordTaskRunProgressByRunId } from "../tasks/task-executor.js";
|
||||
|
||||
const DEFAULT_STREAM_FLUSH_MS = 2_500;
|
||||
const DEFAULT_NO_OUTPUT_NOTICE_MS = 60_000;
|
||||
|
||||
@@ -1,3 +1,4 @@
|
||||
import { resetTaskRegistryForTests } from "openclaw/plugin-sdk/tasks";
|
||||
import { afterEach, beforeEach, describe, expect, it, vi } from "vitest";
|
||||
import * as acpSessionManager from "../acp/control-plane/manager.js";
|
||||
import type { AcpInitializeSessionInput } from "../acp/control-plane/manager.types.js";
|
||||
@@ -18,7 +19,6 @@ import {
|
||||
type SessionBindingPlacement,
|
||||
type SessionBindingRecord,
|
||||
} from "../infra/outbound/session-binding-service.js";
|
||||
import { resetTaskRegistryForTests } from "../tasks/task-registry.js";
|
||||
import * as acpSpawnParentStream from "./acp-spawn-parent-stream.js";
|
||||
|
||||
function createDefaultSpawnConfig(): OpenClawConfig {
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
import crypto from "node:crypto";
|
||||
import { createRunningTaskRun } from "openclaw/plugin-sdk/tasks";
|
||||
import { getAcpSessionManager } from "../acp/control-plane/manager.js";
|
||||
import {
|
||||
cleanupFailedAcpSpawn,
|
||||
@@ -44,7 +45,6 @@ import {
|
||||
normalizeAgentId,
|
||||
parseAgentSessionKey,
|
||||
} from "../routing/session-key.js";
|
||||
import { createRunningTaskRun } from "../tasks/task-executor.js";
|
||||
import {
|
||||
deliveryContextFromSession,
|
||||
formatConversationTarget,
|
||||
|
||||
@@ -192,7 +192,7 @@ async function loadFreshOpenClawToolsForSessionStatusTest() {
|
||||
vi.doMock("../auto-reply/status.js", () => ({
|
||||
buildStatusMessage: buildStatusMessageMock,
|
||||
}));
|
||||
vi.doMock("../tasks/task-registry.js", () => ({
|
||||
vi.doMock("openclaw/plugin-sdk/tasks", () => ({
|
||||
listTasksForSessionKey: (sessionKey: string) => listTasksForSessionKeyMock(sessionKey),
|
||||
}));
|
||||
({ createSessionStatusTool } = await import("./tools/session-status-tool.js"));
|
||||
|
||||
@@ -1,11 +1,11 @@
|
||||
import { isSilentReplyText, SILENT_REPLY_TOKEN } from "../auto-reply/tokens.js";
|
||||
import { defaultRuntime } from "../runtime.js";
|
||||
import { emitSessionLifecycleEvent } from "../sessions/session-lifecycle-events.js";
|
||||
import {
|
||||
completeTaskRunByRunId,
|
||||
failTaskRunByRunId,
|
||||
setDetachedTaskDeliveryStatusByRunId,
|
||||
} from "../tasks/task-executor.js";
|
||||
} from "openclaw/plugin-sdk/tasks";
|
||||
import { isSilentReplyText, SILENT_REPLY_TOKEN } from "../auto-reply/tokens.js";
|
||||
import { defaultRuntime } from "../runtime.js";
|
||||
import { emitSessionLifecycleEvent } from "../sessions/session-lifecycle-events.js";
|
||||
import { normalizeDeliveryContext } from "../utils/delivery-context.js";
|
||||
import {
|
||||
captureSubagentCompletionReply,
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
import { createRunningTaskRun } from "openclaw/plugin-sdk/tasks";
|
||||
import { loadConfig } from "../config/config.js";
|
||||
import { callGateway } from "../gateway/call.js";
|
||||
import { createSubsystemLogger } from "../logging/subsystem.js";
|
||||
import { createRunningTaskRun } from "../tasks/task-executor.js";
|
||||
import { type DeliveryContext, normalizeDeliveryContext } from "../utils/delivery-context.js";
|
||||
import { ensureRuntimePluginsLoaded } from "./runtime-plugins.js";
|
||||
import type { SubagentRunOutcome } from "./subagent-announce.js";
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
import { Type } from "@sinclair/typebox";
|
||||
import { listTasksForSessionKey } from "openclaw/plugin-sdk/tasks";
|
||||
import { normalizeGroupActivation } from "../../auto-reply/group-activation.js";
|
||||
import { getFollowupQueueDepth, resolveQueueSettings } from "../../auto-reply/reply/queue.js";
|
||||
import { buildStatusMessage } from "../../auto-reply/status.js";
|
||||
@@ -23,7 +24,6 @@ import {
|
||||
resolveAgentIdFromSessionKey,
|
||||
} from "../../routing/session-key.js";
|
||||
import { applyModelOverrideToSessionEntry } from "../../sessions/model-overrides.js";
|
||||
import { listTasksForSessionKey } from "../../tasks/task-registry.js";
|
||||
import { resolveAgentConfig, resolveAgentDir } from "../agent-scope.js";
|
||||
import { formatUserTime, resolveUserTimeFormat, resolveUserTimezone } from "../date-time.js";
|
||||
import { resolveModelAuthLabel } from "../model-auth-label.js";
|
||||
|
||||
@@ -114,8 +114,7 @@ const { handleAcpCommand } = await import("./commands-acp.js");
|
||||
const { buildCommandTestParams } = await import("./commands-spawn.test-harness.js");
|
||||
const { __testing: acpManagerTesting } = await import("../../acp/control-plane/manager.js");
|
||||
const { __testing: acpResetTargetTesting } = await import("./acp-reset-target.js");
|
||||
const { createTaskRecord, resetTaskRegistryForTests } =
|
||||
await import("../../tasks/task-registry.js");
|
||||
const { createTaskRecord, resetTaskRegistryForTests } = await import("openclaw/plugin-sdk/tasks");
|
||||
|
||||
function parseTelegramChatIdForTest(raw?: string | null): string | undefined {
|
||||
const trimmed = raw?.trim().replace(/^telegram:/i, "");
|
||||
|
||||
@@ -1,3 +1,4 @@
|
||||
import { findLatestTaskForSessionKey } from "openclaw/plugin-sdk/tasks";
|
||||
import { getAcpSessionManager } from "../../../acp/control-plane/manager.js";
|
||||
import {
|
||||
parseRuntimeTimeoutSecondsInput,
|
||||
@@ -8,7 +9,6 @@ import {
|
||||
validateRuntimePermissionProfileInput,
|
||||
} from "../../../acp/control-plane/runtime-options.js";
|
||||
import { resolveAcpSessionIdentifierLinesFromIdentity } from "../../../acp/runtime/session-identifiers.js";
|
||||
import { findLatestTaskForSessionKey } from "../../../tasks/task-registry.js";
|
||||
import type { CommandHandlerResult, HandleCommandsParams } from "../commands-types.js";
|
||||
import {
|
||||
ACP_CWD_USAGE,
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
import { findTaskByRunId } from "openclaw/plugin-sdk/tasks";
|
||||
import { countPendingDescendantRuns } from "../../../agents/subagent-registry.js";
|
||||
import { loadSessionStore, resolveStorePath } from "../../../config/sessions.js";
|
||||
import { formatDurationCompact } from "../../../shared/subagents-format.js";
|
||||
import { findTaskByRunId } from "../../../tasks/task-registry.js";
|
||||
import type { CommandHandlerResult } from "../commands-types.js";
|
||||
import { formatRunLabel } from "../subagents-utils.js";
|
||||
import {
|
||||
|
||||
@@ -249,8 +249,7 @@ const { parseConfigCommand } = await import("./config-commands.js");
|
||||
const { parseDebugCommand } = await import("./debug-commands.js");
|
||||
const { parseInlineDirectives } = await import("./directive-handling.js");
|
||||
const { buildCommandContext, handleCommands } = await import("./commands.js");
|
||||
const { createTaskRecord, resetTaskRegistryForTests } =
|
||||
await import("../../tasks/task-registry.js");
|
||||
const { createTaskRecord, resetTaskRegistryForTests } = await import("openclaw/plugin-sdk/tasks");
|
||||
|
||||
let testWorkspaceDir = os.tmpdir();
|
||||
|
||||
|
||||
@@ -52,11 +52,11 @@ vi.mock("../plugins/memory-state.js", () => ({
|
||||
hasMemoryRuntime: hasMemoryRuntimeMock,
|
||||
}));
|
||||
|
||||
vi.mock("../tasks/task-registry.js", () => ({
|
||||
vi.mock("openclaw/plugin-sdk/tasks", () => ({
|
||||
ensureTaskRegistryReady: ensureTaskRegistryReadyMock,
|
||||
}));
|
||||
|
||||
vi.mock("../tasks/task-registry.maintenance.js", () => ({
|
||||
vi.mock("openclaw/plugin-sdk/tasks", () => ({
|
||||
startTaskRegistryMaintenance: startTaskRegistryMaintenanceMock,
|
||||
}));
|
||||
|
||||
|
||||
@@ -32,11 +32,11 @@ vi.mock("../plugins/status.js", () => ({
|
||||
mocks.buildPluginCompatibilityWarnings(...args),
|
||||
}));
|
||||
|
||||
vi.mock("../tasks/flow-registry.js", () => ({
|
||||
vi.mock("openclaw/plugin-sdk/tasks", () => ({
|
||||
listFlowRecords: (...args: unknown[]) => mocks.listFlowRecords(...args),
|
||||
}));
|
||||
|
||||
vi.mock("../tasks/task-registry.js", () => ({
|
||||
vi.mock("openclaw/plugin-sdk/tasks", () => ({
|
||||
listTasksForFlowId: (...args: unknown[]) => mocks.listTasksForFlowId(...args),
|
||||
}));
|
||||
|
||||
|
||||
@@ -1,10 +1,10 @@
|
||||
import { listFlowRecords } from "openclaw/plugin-sdk/tasks";
|
||||
import { listTasksForFlowId } from "openclaw/plugin-sdk/tasks";
|
||||
import { resolveAgentWorkspaceDir, resolveDefaultAgentId } from "../agents/agent-scope.js";
|
||||
import { buildWorkspaceSkillStatus } from "../agents/skills-status.js";
|
||||
import { formatCliCommand } from "../cli/command-format.js";
|
||||
import type { OpenClawConfig } from "../config/config.js";
|
||||
import { buildPluginCompatibilityWarnings, buildPluginStatusReport } from "../plugins/status.js";
|
||||
import { listFlowRecords } from "../tasks/flow-registry.js";
|
||||
import { listTasksForFlowId } from "../tasks/task-registry.js";
|
||||
import { note } from "../terminal/note.js";
|
||||
import { detectLegacyWorkspaceDirs, formatLegacyWorkspaceWarning } from "./doctor-workspace.js";
|
||||
|
||||
|
||||
@@ -12,17 +12,11 @@ const mocks = vi.hoisted(() => ({
|
||||
loadConfigMock: vi.fn(() => ({ loaded: true })),
|
||||
}));
|
||||
|
||||
vi.mock("../tasks/flow-registry.js", () => ({
|
||||
vi.mock("openclaw/plugin-sdk/tasks", () => ({
|
||||
listFlowRecords: (...args: unknown[]) => mocks.listFlowRecordsMock(...args),
|
||||
resolveFlowForLookupToken: (...args: unknown[]) => mocks.resolveFlowForLookupTokenMock(...args),
|
||||
getFlowById: (...args: unknown[]) => mocks.getFlowByIdMock(...args),
|
||||
}));
|
||||
|
||||
vi.mock("../tasks/task-registry.js", () => ({
|
||||
listTasksForFlowId: (...args: unknown[]) => mocks.listTasksForFlowIdMock(...args),
|
||||
}));
|
||||
|
||||
vi.mock("../tasks/task-executor.js", () => ({
|
||||
getFlowTaskSummary: (...args: unknown[]) => mocks.getFlowTaskSummaryMock(...args),
|
||||
cancelFlowById: (...args: unknown[]) => mocks.cancelFlowByIdMock(...args),
|
||||
}));
|
||||
|
||||
@@ -1,10 +1,10 @@
|
||||
import { getFlowById, listFlowRecords, resolveFlowForLookupToken } from "openclaw/plugin-sdk/tasks";
|
||||
import type { FlowRecord, FlowStatus } from "openclaw/plugin-sdk/tasks";
|
||||
import { cancelFlowById, getFlowTaskSummary } from "openclaw/plugin-sdk/tasks";
|
||||
import { listTasksForFlowId } from "openclaw/plugin-sdk/tasks";
|
||||
import { loadConfig } from "../config/config.js";
|
||||
import { info } from "../globals.js";
|
||||
import type { RuntimeEnv } from "../runtime.js";
|
||||
import { getFlowById, listFlowRecords, resolveFlowForLookupToken } from "../tasks/flow-registry.js";
|
||||
import type { FlowRecord, FlowStatus } from "../tasks/flow-registry.types.js";
|
||||
import { cancelFlowById, getFlowTaskSummary } from "../tasks/task-executor.js";
|
||||
import { listTasksForFlowId } from "../tasks/task-registry.js";
|
||||
import { isRich, theme } from "../terminal/theme.js";
|
||||
|
||||
const ID_PAD = 10;
|
||||
|
||||
@@ -1,10 +1,10 @@
|
||||
import { createEmptyTaskAuditSummary } from "openclaw/plugin-sdk/tasks";
|
||||
import { createEmptyTaskRegistrySummary } from "openclaw/plugin-sdk/tasks";
|
||||
import type { OpenClawConfig } from "../config/types.js";
|
||||
import type { UpdateCheckResult } from "../infra/update-check.js";
|
||||
import { loggingState } from "../logging/state.js";
|
||||
import { runExec } from "../process/exec.js";
|
||||
import type { RuntimeEnv } from "../runtime.js";
|
||||
import { createEmptyTaskAuditSummary } from "../tasks/task-registry.audit.shared.js";
|
||||
import { createEmptyTaskRegistrySummary } from "../tasks/task-registry.summary.js";
|
||||
import type { getAgentLocalStatuses as getAgentLocalStatusesFn } from "./status.agent-local.js";
|
||||
import type { StatusScanResult } from "./status.scan.js";
|
||||
import {
|
||||
|
||||
@@ -1,4 +1,6 @@
|
||||
import { existsSync } from "node:fs";
|
||||
import { createEmptyTaskAuditSummary } from "openclaw/plugin-sdk/tasks";
|
||||
import { createEmptyTaskRegistrySummary } from "openclaw/plugin-sdk/tasks";
|
||||
import { resolveMemorySearchConfig } from "../agents/memory-search.js";
|
||||
import { hasPotentialConfiguredChannels } from "../channels/config-presence.js";
|
||||
import { resolveCommandSecretRefsViaGateway } from "../cli/command-secret-gateway.js";
|
||||
@@ -17,8 +19,6 @@ import {
|
||||
import { runExec } from "../process/exec.js";
|
||||
import type { RuntimeEnv } from "../runtime.js";
|
||||
import { createLazyRuntimeSurface } from "../shared/lazy-runtime.js";
|
||||
import { createEmptyTaskAuditSummary } from "../tasks/task-registry.audit.shared.js";
|
||||
import { createEmptyTaskRegistrySummary } from "../tasks/task-registry.summary.js";
|
||||
import type { buildChannelsTable as buildChannelsTableFn } from "./status-all/channels.js";
|
||||
import type { getAgentLocalStatuses as getAgentLocalStatusesFn } from "./status.agent-local.js";
|
||||
import { buildColdStartUpdateResult, scanStatusJsonCore } from "./status.scan.json-core.js";
|
||||
|
||||
@@ -59,7 +59,7 @@ vi.mock("../infra/system-events.js", () => ({
|
||||
peekSystemEvents: vi.fn(() => []),
|
||||
}));
|
||||
|
||||
vi.mock("../tasks/task-registry.maintenance.js", () => ({
|
||||
vi.mock("openclaw/plugin-sdk/tasks", () => ({
|
||||
getInspectableTaskRegistrySummary: vi.fn(() => ({
|
||||
total: 0,
|
||||
active: 0,
|
||||
|
||||
@@ -17,7 +17,7 @@ let channelSummaryModulePromise: Promise<typeof import("../infra/channel-summary
|
||||
let linkChannelModulePromise: Promise<typeof import("./status.link-channel.js")> | undefined;
|
||||
let configIoModulePromise: Promise<typeof import("../config/io.js")> | undefined;
|
||||
let taskRegistryMaintenanceModulePromise:
|
||||
| Promise<typeof import("../tasks/task-registry.maintenance.js")>
|
||||
| Promise<typeof import("openclaw/plugin-sdk/tasks")>
|
||||
| undefined;
|
||||
|
||||
function loadChannelSummaryModule() {
|
||||
@@ -41,7 +41,7 @@ function loadConfigIoModule() {
|
||||
}
|
||||
|
||||
function loadTaskRegistryMaintenanceModule() {
|
||||
taskRegistryMaintenanceModulePromise ??= import("../tasks/task-registry.maintenance.js");
|
||||
taskRegistryMaintenanceModulePromise ??= import("openclaw/plugin-sdk/tasks");
|
||||
return taskRegistryMaintenanceModulePromise;
|
||||
}
|
||||
|
||||
|
||||
@@ -467,7 +467,7 @@ vi.mock("../daemon/node-service.js", () => ({
|
||||
vi.mock("../node-host/config.js", () => ({
|
||||
loadNodeHostConfig: mocks.loadNodeHostConfig,
|
||||
}));
|
||||
vi.mock("../tasks/task-registry.maintenance.js", () => ({
|
||||
vi.mock("openclaw/plugin-sdk/tasks", () => ({
|
||||
getInspectableTaskRegistrySummary: mocks.getInspectableTaskRegistrySummary,
|
||||
getInspectableTaskAuditSummary: mocks.getInspectableTaskAuditSummary,
|
||||
}));
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
import type { TaskAuditSummary } from "openclaw/plugin-sdk/tasks";
|
||||
import type { TaskRegistrySummary } from "openclaw/plugin-sdk/tasks";
|
||||
import type { ChannelId } from "../channels/plugins/types.js";
|
||||
import type { TaskAuditSummary } from "../tasks/task-registry.audit.js";
|
||||
import type { TaskRegistrySummary } from "../tasks/task-registry.types.js";
|
||||
|
||||
export type SessionStatus = {
|
||||
agentId?: string;
|
||||
|
||||
@@ -1,8 +1,8 @@
|
||||
import fs from "node:fs/promises";
|
||||
import path from "node:path";
|
||||
import * as taskExecutor from "openclaw/plugin-sdk/tasks";
|
||||
import { findTaskByRunId, resetTaskRegistryForTests } from "openclaw/plugin-sdk/tasks";
|
||||
import { describe, expect, it, vi } from "vitest";
|
||||
import * as taskExecutor from "../../tasks/task-executor.js";
|
||||
import { findTaskByRunId, resetTaskRegistryForTests } from "../../tasks/task-registry.js";
|
||||
import { setupCronServiceSuite, writeCronStoreSnapshot } from "../service.test-harness.js";
|
||||
import type { CronJob } from "../types.js";
|
||||
import { run, start, stop } from "./ops.js";
|
||||
|
||||
@@ -1,10 +1,10 @@
|
||||
import { enqueueCommandInLane } from "../../process/command-queue.js";
|
||||
import { CommandLane } from "../../process/lanes.js";
|
||||
import {
|
||||
completeTaskRunByRunId,
|
||||
createRunningTaskRun,
|
||||
failTaskRunByRunId,
|
||||
} from "../../tasks/task-executor.js";
|
||||
} from "openclaw/plugin-sdk/tasks";
|
||||
import { enqueueCommandInLane } from "../../process/command-queue.js";
|
||||
import { CommandLane } from "../../process/lanes.js";
|
||||
import type { CronJob, CronJobCreate, CronJobPatch } from "../types.js";
|
||||
import { normalizeCronCreateDeliveryInput } from "./initial-delivery.js";
|
||||
import {
|
||||
|
||||
@@ -1,11 +1,11 @@
|
||||
import fs from "node:fs/promises";
|
||||
import * as taskExecutor from "openclaw/plugin-sdk/tasks";
|
||||
import { resetTaskRegistryForTests } from "openclaw/plugin-sdk/tasks";
|
||||
import { afterEach, describe, expect, it, vi } from "vitest";
|
||||
import { setupCronServiceSuite, writeCronStoreSnapshot } from "../../cron/service.test-harness.js";
|
||||
import { createCronServiceState } from "../../cron/service/state.js";
|
||||
import { onTimer } from "../../cron/service/timer.js";
|
||||
import type { CronJob } from "../../cron/types.js";
|
||||
import * as taskExecutor from "../../tasks/task-executor.js";
|
||||
import { resetTaskRegistryForTests } from "../../tasks/task-registry.js";
|
||||
|
||||
const { logger, makeStorePath } = setupCronServiceSuite({
|
||||
prefix: "cron-service-timer-seam",
|
||||
|
||||
@@ -1,12 +1,12 @@
|
||||
import { resolveFailoverReasonFromError } from "../../agents/failover-error.js";
|
||||
import type { CronConfig, CronRetryOn } from "../../config/types.cron.js";
|
||||
import type { HeartbeatRunResult } from "../../infra/heartbeat-wake.js";
|
||||
import { DEFAULT_AGENT_ID } from "../../routing/session-key.js";
|
||||
import {
|
||||
completeTaskRunByRunId,
|
||||
createRunningTaskRun,
|
||||
failTaskRunByRunId,
|
||||
} from "../../tasks/task-executor.js";
|
||||
} from "openclaw/plugin-sdk/tasks";
|
||||
import { resolveFailoverReasonFromError } from "../../agents/failover-error.js";
|
||||
import type { CronConfig, CronRetryOn } from "../../config/types.cron.js";
|
||||
import type { HeartbeatRunResult } from "../../infra/heartbeat-wake.js";
|
||||
import { DEFAULT_AGENT_ID } from "../../routing/session-key.js";
|
||||
import { resolveCronDeliveryPlan } from "../delivery.js";
|
||||
import { sweepCronRunSessions } from "../session-reaper.js";
|
||||
import type {
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
import { findTaskByRunId, resetTaskRegistryForTests } from "openclaw/plugin-sdk/tasks";
|
||||
import { afterEach, describe, expect, it, vi } from "vitest";
|
||||
import { BARE_SESSION_RESET_PROMPT } from "../../auto-reply/reply/session-reset-prompt.js";
|
||||
import { findTaskByRunId, resetTaskRegistryForTests } from "../../tasks/task-registry.js";
|
||||
import { withTempDir } from "../../test-helpers/temp-dir.js";
|
||||
import { agentHandlers } from "./agent.js";
|
||||
import { expectSubagentFollowupReactivation } from "./subagent-followup.test-helpers.js";
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
import { randomUUID } from "node:crypto";
|
||||
import { createRunningTaskRun } from "openclaw/plugin-sdk/tasks";
|
||||
import { listAgentIds } from "../../agents/agent-scope.js";
|
||||
import type { AgentInternalEvent } from "../../agents/internal-events.js";
|
||||
import {
|
||||
@@ -28,7 +29,6 @@ import { classifySessionKeyShape, normalizeAgentId } from "../../routing/session
|
||||
import { defaultRuntime } from "../../runtime.js";
|
||||
import { normalizeInputProvenance, type InputProvenance } from "../../sessions/input-provenance.js";
|
||||
import { resolveSendPolicy } from "../../sessions/send-policy.js";
|
||||
import { createRunningTaskRun } from "../../tasks/task-executor.js";
|
||||
import {
|
||||
normalizeDeliveryContext,
|
||||
normalizeSessionDeliveryFields,
|
||||
|
||||
@@ -1,3 +1,4 @@
|
||||
import { getInspectableTaskRegistrySummary } from "openclaw/plugin-sdk/tasks";
|
||||
import { getActiveEmbeddedRunCount } from "../agents/pi-embedded-runner/runs.js";
|
||||
import { getTotalPendingReplies } from "../auto-reply/reply/dispatcher-registry.js";
|
||||
import type { CliDeps } from "../cli/deps.js";
|
||||
@@ -16,7 +17,6 @@ import {
|
||||
} from "../infra/restart.js";
|
||||
import { setCommandLaneConcurrency, getTotalQueueSize } from "../process/command-queue.js";
|
||||
import { CommandLane } from "../process/lanes.js";
|
||||
import { getInspectableTaskRegistrySummary } from "../tasks/task-registry.maintenance.js";
|
||||
import type { ChannelHealthMonitor } from "./channel-health-monitor.js";
|
||||
import type { ChannelKind } from "./config-reload-plan.js";
|
||||
import type { GatewayReloadPlan } from "./config-reload.js";
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
import path from "node:path";
|
||||
import { getInspectableTaskRegistrySummary } from "openclaw/plugin-sdk/tasks";
|
||||
import { resolveAgentWorkspaceDir, resolveDefaultAgentId } from "../agents/agent-scope.js";
|
||||
import { getActiveEmbeddedRunCount } from "../agents/pi-embedded-runner/runs.js";
|
||||
import { registerSkillsChangeListener } from "../agents/skills/refresh.js";
|
||||
@@ -75,7 +76,6 @@ import {
|
||||
} from "../secrets/runtime.js";
|
||||
import { onSessionLifecycleEvent } from "../sessions/session-lifecycle-events.js";
|
||||
import { onSessionTranscriptUpdate } from "../sessions/transcript-events.js";
|
||||
import { getInspectableTaskRegistrySummary } from "../tasks/task-registry.maintenance.js";
|
||||
import { runSetupWizard } from "../wizard/setup.js";
|
||||
import { createAuthRateLimiter, type AuthRateLimiter } from "./auth-rate-limit.js";
|
||||
import { startChannelHealthMonitor } from "./channel-health-monitor.js";
|
||||
|
||||
@@ -1,6 +1,8 @@
|
||||
import { defaultTaskOperationsRuntime } from "../../packages/tasks-host-sdk/src/runtime-core.js";
|
||||
import { startTaskRegistryMaintenance } from "../../packages/tasks-host-sdk/src/runtime-core.js";
|
||||
import type { OpenClawPluginService } from "../plugins/types.js";
|
||||
import { defaultTaskOperationsRuntime } from "../tasks/operations-runtime.js";
|
||||
import { startTaskRegistryMaintenance } from "../tasks/task-registry.maintenance.js";
|
||||
|
||||
export * from "../../packages/tasks-host-sdk/src/runtime-core.js";
|
||||
|
||||
export const defaultOperationsRuntime = defaultTaskOperationsRuntime;
|
||||
|
||||
@@ -1,3 +1,4 @@
|
||||
import { defaultTaskOperationsRuntime } from "openclaw/plugin-sdk/tasks";
|
||||
import { resolveStateDir } from "../../config/paths.js";
|
||||
import { loadBundledPluginPublicSurfaceModuleSync } from "../../plugin-sdk/facade-runtime.js";
|
||||
import { resolveGlobalSingleton } from "../../shared/global-singleton.js";
|
||||
@@ -6,7 +7,6 @@ import {
|
||||
createLazyRuntimeMethodBinder,
|
||||
createLazyRuntimeModule,
|
||||
} from "../../shared/lazy-runtime.js";
|
||||
import { defaultTaskOperationsRuntime } from "../../tasks/operations-runtime.js";
|
||||
import { VERSION } from "../../version.js";
|
||||
import { listWebSearchProviders, runWebSearch } from "../../web-search/runtime.js";
|
||||
import { getRegisteredOperationsRuntime } from "../operations-state.js";
|
||||
|
||||
@@ -1,10 +0,0 @@
|
||||
import path from "node:path";
|
||||
import { resolveTaskStateDir } from "./task-registry.paths.js";
|
||||
|
||||
export function resolveFlowRegistryDir(env: NodeJS.ProcessEnv = process.env): string {
|
||||
return path.join(resolveTaskStateDir(env), "flows");
|
||||
}
|
||||
|
||||
export function resolveFlowRegistrySqlitePath(env: NodeJS.ProcessEnv = process.env): string {
|
||||
return path.join(resolveFlowRegistryDir(env), "registry.sqlite");
|
||||
}
|
||||
@@ -1,318 +0,0 @@
|
||||
import { chmodSync, existsSync, mkdirSync } from "node:fs";
|
||||
import type { DatabaseSync, StatementSync } from "node:sqlite";
|
||||
import { requireNodeSqlite } from "../infra/node-sqlite.js";
|
||||
import type { DeliveryContext } from "../utils/delivery-context.js";
|
||||
import { resolveFlowRegistryDir, resolveFlowRegistrySqlitePath } from "./flow-registry.paths.js";
|
||||
import type { FlowRegistryStoreSnapshot } from "./flow-registry.store.js";
|
||||
import type { FlowOutputBag, FlowRecord, FlowShape } from "./flow-registry.types.js";
|
||||
|
||||
type FlowRegistryRow = {
|
||||
flow_id: string;
|
||||
shape: FlowShape | null;
|
||||
owner_session_key: string;
|
||||
requester_origin_json: string | null;
|
||||
status: FlowRecord["status"];
|
||||
notify_policy: FlowRecord["notifyPolicy"];
|
||||
goal: string;
|
||||
current_step: string | null;
|
||||
waiting_on_task_id: string | null;
|
||||
outputs_json: string | null;
|
||||
blocked_task_id: string | null;
|
||||
blocked_summary: string | null;
|
||||
created_at: number | bigint;
|
||||
updated_at: number | bigint;
|
||||
ended_at: number | bigint | null;
|
||||
};
|
||||
|
||||
type FlowRegistryStatements = {
|
||||
selectAll: StatementSync;
|
||||
upsertRow: StatementSync;
|
||||
deleteRow: StatementSync;
|
||||
clearRows: StatementSync;
|
||||
};
|
||||
|
||||
type FlowRegistryDatabase = {
|
||||
db: DatabaseSync;
|
||||
path: string;
|
||||
statements: FlowRegistryStatements;
|
||||
};
|
||||
|
||||
let cachedDatabase: FlowRegistryDatabase | null = null;
|
||||
const FLOW_REGISTRY_DIR_MODE = 0o700;
|
||||
const FLOW_REGISTRY_FILE_MODE = 0o600;
|
||||
const FLOW_REGISTRY_SIDECAR_SUFFIXES = ["", "-shm", "-wal"] as const;
|
||||
|
||||
function normalizeNumber(value: number | bigint | null): number | undefined {
|
||||
if (typeof value === "bigint") {
|
||||
return Number(value);
|
||||
}
|
||||
return typeof value === "number" ? value : undefined;
|
||||
}
|
||||
|
||||
function serializeJson(value: unknown): string | null {
|
||||
return value == null ? null : JSON.stringify(value);
|
||||
}
|
||||
|
||||
function parseJsonValue<T>(raw: string | null): T | undefined {
|
||||
if (!raw?.trim()) {
|
||||
return undefined;
|
||||
}
|
||||
try {
|
||||
return JSON.parse(raw) as T;
|
||||
} catch {
|
||||
return undefined;
|
||||
}
|
||||
}
|
||||
|
||||
function rowToFlowRecord(row: FlowRegistryRow): FlowRecord {
|
||||
const endedAt = normalizeNumber(row.ended_at);
|
||||
const requesterOrigin = parseJsonValue<DeliveryContext>(row.requester_origin_json);
|
||||
const outputs = parseJsonValue<FlowOutputBag>(row.outputs_json);
|
||||
return {
|
||||
flowId: row.flow_id,
|
||||
shape: row.shape === "linear" ? "linear" : "single_task",
|
||||
ownerSessionKey: row.owner_session_key,
|
||||
...(requesterOrigin ? { requesterOrigin } : {}),
|
||||
status: row.status,
|
||||
notifyPolicy: row.notify_policy,
|
||||
goal: row.goal,
|
||||
...(row.current_step ? { currentStep: row.current_step } : {}),
|
||||
...(row.waiting_on_task_id ? { waitingOnTaskId: row.waiting_on_task_id } : {}),
|
||||
...(outputs ? { outputs } : {}),
|
||||
...(row.blocked_task_id ? { blockedTaskId: row.blocked_task_id } : {}),
|
||||
...(row.blocked_summary ? { blockedSummary: row.blocked_summary } : {}),
|
||||
createdAt: normalizeNumber(row.created_at) ?? 0,
|
||||
updatedAt: normalizeNumber(row.updated_at) ?? 0,
|
||||
...(endedAt != null ? { endedAt } : {}),
|
||||
};
|
||||
}
|
||||
|
||||
function bindFlowRecord(record: FlowRecord) {
|
||||
return {
|
||||
flow_id: record.flowId,
|
||||
shape: record.shape,
|
||||
owner_session_key: record.ownerSessionKey,
|
||||
requester_origin_json: serializeJson(record.requesterOrigin),
|
||||
status: record.status,
|
||||
notify_policy: record.notifyPolicy,
|
||||
goal: record.goal,
|
||||
current_step: record.currentStep ?? null,
|
||||
waiting_on_task_id: record.waitingOnTaskId ?? null,
|
||||
outputs_json: serializeJson(record.outputs),
|
||||
blocked_task_id: record.blockedTaskId ?? null,
|
||||
blocked_summary: record.blockedSummary ?? null,
|
||||
created_at: record.createdAt,
|
||||
updated_at: record.updatedAt,
|
||||
ended_at: record.endedAt ?? null,
|
||||
};
|
||||
}
|
||||
|
||||
function createStatements(db: DatabaseSync): FlowRegistryStatements {
|
||||
return {
|
||||
selectAll: db.prepare(`
|
||||
SELECT
|
||||
flow_id,
|
||||
shape,
|
||||
owner_session_key,
|
||||
requester_origin_json,
|
||||
status,
|
||||
notify_policy,
|
||||
goal,
|
||||
current_step,
|
||||
waiting_on_task_id,
|
||||
outputs_json,
|
||||
blocked_task_id,
|
||||
blocked_summary,
|
||||
created_at,
|
||||
updated_at,
|
||||
ended_at
|
||||
FROM flow_runs
|
||||
ORDER BY created_at ASC, flow_id ASC
|
||||
`),
|
||||
upsertRow: db.prepare(`
|
||||
INSERT INTO flow_runs (
|
||||
flow_id,
|
||||
shape,
|
||||
owner_session_key,
|
||||
requester_origin_json,
|
||||
status,
|
||||
notify_policy,
|
||||
goal,
|
||||
current_step,
|
||||
waiting_on_task_id,
|
||||
outputs_json,
|
||||
blocked_task_id,
|
||||
blocked_summary,
|
||||
created_at,
|
||||
updated_at,
|
||||
ended_at
|
||||
) VALUES (
|
||||
@flow_id,
|
||||
@shape,
|
||||
@owner_session_key,
|
||||
@requester_origin_json,
|
||||
@status,
|
||||
@notify_policy,
|
||||
@goal,
|
||||
@current_step,
|
||||
@waiting_on_task_id,
|
||||
@outputs_json,
|
||||
@blocked_task_id,
|
||||
@blocked_summary,
|
||||
@created_at,
|
||||
@updated_at,
|
||||
@ended_at
|
||||
)
|
||||
ON CONFLICT(flow_id) DO UPDATE SET
|
||||
shape = excluded.shape,
|
||||
owner_session_key = excluded.owner_session_key,
|
||||
requester_origin_json = excluded.requester_origin_json,
|
||||
status = excluded.status,
|
||||
notify_policy = excluded.notify_policy,
|
||||
goal = excluded.goal,
|
||||
current_step = excluded.current_step,
|
||||
waiting_on_task_id = excluded.waiting_on_task_id,
|
||||
outputs_json = excluded.outputs_json,
|
||||
blocked_task_id = excluded.blocked_task_id,
|
||||
blocked_summary = excluded.blocked_summary,
|
||||
created_at = excluded.created_at,
|
||||
updated_at = excluded.updated_at,
|
||||
ended_at = excluded.ended_at
|
||||
`),
|
||||
deleteRow: db.prepare(`DELETE FROM flow_runs WHERE flow_id = ?`),
|
||||
clearRows: db.prepare(`DELETE FROM flow_runs`),
|
||||
};
|
||||
}
|
||||
|
||||
function ensureSchema(db: DatabaseSync) {
|
||||
db.exec(`
|
||||
CREATE TABLE IF NOT EXISTS flow_runs (
|
||||
flow_id TEXT PRIMARY KEY,
|
||||
shape TEXT NOT NULL,
|
||||
owner_session_key TEXT NOT NULL,
|
||||
requester_origin_json TEXT,
|
||||
status TEXT NOT NULL,
|
||||
notify_policy TEXT NOT NULL,
|
||||
goal TEXT NOT NULL,
|
||||
current_step TEXT,
|
||||
waiting_on_task_id TEXT,
|
||||
outputs_json TEXT,
|
||||
blocked_task_id TEXT,
|
||||
blocked_summary TEXT,
|
||||
created_at INTEGER NOT NULL,
|
||||
updated_at INTEGER NOT NULL,
|
||||
ended_at INTEGER
|
||||
);
|
||||
`);
|
||||
ensureColumn(db, "flow_runs", "shape", "TEXT");
|
||||
ensureColumn(db, "flow_runs", "waiting_on_task_id", "TEXT");
|
||||
ensureColumn(db, "flow_runs", "outputs_json", "TEXT");
|
||||
ensureColumn(db, "flow_runs", "blocked_task_id", "TEXT");
|
||||
ensureColumn(db, "flow_runs", "blocked_summary", "TEXT");
|
||||
db.exec(`CREATE INDEX IF NOT EXISTS idx_flow_runs_status ON flow_runs(status);`);
|
||||
db.exec(
|
||||
`CREATE INDEX IF NOT EXISTS idx_flow_runs_owner_session_key ON flow_runs(owner_session_key);`,
|
||||
);
|
||||
db.exec(`CREATE INDEX IF NOT EXISTS idx_flow_runs_updated_at ON flow_runs(updated_at);`);
|
||||
}
|
||||
|
||||
function ensureColumn(
|
||||
db: DatabaseSync,
|
||||
tableName: string,
|
||||
columnName: string,
|
||||
columnDefinition: string,
|
||||
) {
|
||||
const rows = db.prepare(`PRAGMA table_info(${tableName})`).all() as Array<{ name?: string }>;
|
||||
if (rows.some((row) => row.name === columnName)) {
|
||||
return;
|
||||
}
|
||||
db.exec(`ALTER TABLE ${tableName} ADD COLUMN ${columnName} ${columnDefinition};`);
|
||||
}
|
||||
|
||||
function ensureFlowRegistryPermissions(pathname: string) {
|
||||
const dir = resolveFlowRegistryDir(process.env);
|
||||
mkdirSync(dir, { recursive: true, mode: FLOW_REGISTRY_DIR_MODE });
|
||||
chmodSync(dir, FLOW_REGISTRY_DIR_MODE);
|
||||
for (const suffix of FLOW_REGISTRY_SIDECAR_SUFFIXES) {
|
||||
const candidate = `${pathname}${suffix}`;
|
||||
if (!existsSync(candidate)) {
|
||||
continue;
|
||||
}
|
||||
chmodSync(candidate, FLOW_REGISTRY_FILE_MODE);
|
||||
}
|
||||
}
|
||||
|
||||
function openFlowRegistryDatabase(): FlowRegistryDatabase {
|
||||
const pathname = resolveFlowRegistrySqlitePath(process.env);
|
||||
if (cachedDatabase && cachedDatabase.path === pathname) {
|
||||
return cachedDatabase;
|
||||
}
|
||||
if (cachedDatabase) {
|
||||
cachedDatabase.db.close();
|
||||
cachedDatabase = null;
|
||||
}
|
||||
ensureFlowRegistryPermissions(pathname);
|
||||
const { DatabaseSync } = requireNodeSqlite();
|
||||
const db = new DatabaseSync(pathname);
|
||||
db.exec(`PRAGMA journal_mode = WAL;`);
|
||||
db.exec(`PRAGMA synchronous = NORMAL;`);
|
||||
db.exec(`PRAGMA busy_timeout = 5000;`);
|
||||
ensureSchema(db);
|
||||
ensureFlowRegistryPermissions(pathname);
|
||||
cachedDatabase = {
|
||||
db,
|
||||
path: pathname,
|
||||
statements: createStatements(db),
|
||||
};
|
||||
return cachedDatabase;
|
||||
}
|
||||
|
||||
function withWriteTransaction(write: (statements: FlowRegistryStatements) => void) {
|
||||
const { db, path, statements } = openFlowRegistryDatabase();
|
||||
db.exec("BEGIN IMMEDIATE");
|
||||
try {
|
||||
write(statements);
|
||||
db.exec("COMMIT");
|
||||
ensureFlowRegistryPermissions(path);
|
||||
} catch (error) {
|
||||
db.exec("ROLLBACK");
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
export function loadFlowRegistryStateFromSqlite(): FlowRegistryStoreSnapshot {
|
||||
const { statements } = openFlowRegistryDatabase();
|
||||
const rows = statements.selectAll.all() as FlowRegistryRow[];
|
||||
return {
|
||||
flows: new Map(rows.map((row) => [row.flow_id, rowToFlowRecord(row)])),
|
||||
};
|
||||
}
|
||||
|
||||
export function saveFlowRegistryStateToSqlite(snapshot: FlowRegistryStoreSnapshot) {
|
||||
withWriteTransaction((statements) => {
|
||||
statements.clearRows.run();
|
||||
for (const flow of snapshot.flows.values()) {
|
||||
statements.upsertRow.run(bindFlowRecord(flow));
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
export function upsertFlowRegistryRecordToSqlite(flow: FlowRecord) {
|
||||
const store = openFlowRegistryDatabase();
|
||||
store.statements.upsertRow.run(bindFlowRecord(flow));
|
||||
ensureFlowRegistryPermissions(store.path);
|
||||
}
|
||||
|
||||
export function deleteFlowRegistryRecordFromSqlite(flowId: string) {
|
||||
const store = openFlowRegistryDatabase();
|
||||
store.statements.deleteRow.run(flowId);
|
||||
ensureFlowRegistryPermissions(store.path);
|
||||
}
|
||||
|
||||
export function closeFlowRegistrySqliteStore() {
|
||||
if (!cachedDatabase) {
|
||||
return;
|
||||
}
|
||||
cachedDatabase.db.close();
|
||||
cachedDatabase = null;
|
||||
}
|
||||
@@ -1,148 +0,0 @@
|
||||
import { statSync } from "node:fs";
|
||||
import { afterEach, beforeEach, describe, expect, it, vi } from "vitest";
|
||||
import { withTempDir } from "../test-helpers/temp-dir.js";
|
||||
import { createFlowRecord, getFlowById, resetFlowRegistryForTests } from "./flow-registry.js";
|
||||
import { resolveFlowRegistryDir, resolveFlowRegistrySqlitePath } from "./flow-registry.paths.js";
|
||||
import { configureFlowRegistryRuntime } from "./flow-registry.store.js";
|
||||
import type { FlowRecord } from "./flow-registry.types.js";
|
||||
|
||||
function createStoredFlow(): FlowRecord {
|
||||
return {
|
||||
flowId: "flow-restored",
|
||||
shape: "linear",
|
||||
ownerSessionKey: "agent:main:main",
|
||||
status: "blocked",
|
||||
notifyPolicy: "done_only",
|
||||
goal: "Restored flow",
|
||||
currentStep: "spawn_task",
|
||||
waitingOnTaskId: "task-waiting",
|
||||
outputs: {
|
||||
bucket: ["business"],
|
||||
},
|
||||
blockedTaskId: "task-restored",
|
||||
blockedSummary: "Writable session required.",
|
||||
createdAt: 100,
|
||||
updatedAt: 100,
|
||||
endedAt: 120,
|
||||
};
|
||||
}
|
||||
|
||||
async function withFlowRegistryTempDir<T>(run: (root: string) => Promise<T>): Promise<T> {
|
||||
return await withTempDir({ prefix: "openclaw-flow-store-" }, async (root) => {
|
||||
process.env.OPENCLAW_STATE_DIR = root;
|
||||
resetFlowRegistryForTests();
|
||||
try {
|
||||
return await run(root);
|
||||
} finally {
|
||||
// Close the sqlite-backed registry before Windows temp-dir cleanup removes the store root.
|
||||
resetFlowRegistryForTests();
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
describe("flow-registry store runtime", () => {
|
||||
beforeEach(() => {
|
||||
vi.useRealTimers();
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
vi.useRealTimers();
|
||||
delete process.env.OPENCLAW_STATE_DIR;
|
||||
resetFlowRegistryForTests();
|
||||
});
|
||||
|
||||
it("uses the configured flow store for restore and save", () => {
|
||||
const storedFlow = createStoredFlow();
|
||||
const loadSnapshot = vi.fn(() => ({
|
||||
flows: new Map([[storedFlow.flowId, storedFlow]]),
|
||||
}));
|
||||
const saveSnapshot = vi.fn();
|
||||
configureFlowRegistryRuntime({
|
||||
store: {
|
||||
loadSnapshot,
|
||||
saveSnapshot,
|
||||
},
|
||||
});
|
||||
|
||||
expect(getFlowById("flow-restored")).toMatchObject({
|
||||
flowId: "flow-restored",
|
||||
shape: "linear",
|
||||
goal: "Restored flow",
|
||||
waitingOnTaskId: "task-waiting",
|
||||
outputs: {
|
||||
bucket: ["business"],
|
||||
},
|
||||
blockedTaskId: "task-restored",
|
||||
blockedSummary: "Writable session required.",
|
||||
});
|
||||
expect(loadSnapshot).toHaveBeenCalledTimes(1);
|
||||
|
||||
createFlowRecord({
|
||||
ownerSessionKey: "agent:main:main",
|
||||
goal: "New flow",
|
||||
status: "running",
|
||||
currentStep: "wait_for",
|
||||
});
|
||||
|
||||
expect(saveSnapshot).toHaveBeenCalled();
|
||||
const latestSnapshot = saveSnapshot.mock.calls.at(-1)?.[0] as {
|
||||
flows: ReadonlyMap<string, FlowRecord>;
|
||||
};
|
||||
expect(latestSnapshot.flows.size).toBe(2);
|
||||
expect(latestSnapshot.flows.get("flow-restored")?.goal).toBe("Restored flow");
|
||||
});
|
||||
|
||||
it("restores persisted flows from the default sqlite store", async () => {
|
||||
await withFlowRegistryTempDir(async (root) => {
|
||||
process.env.OPENCLAW_STATE_DIR = root;
|
||||
resetFlowRegistryForTests();
|
||||
|
||||
const created = createFlowRecord({
|
||||
ownerSessionKey: "agent:main:main",
|
||||
goal: "Persisted flow",
|
||||
status: "waiting",
|
||||
currentStep: "ask_user",
|
||||
waitingOnTaskId: "task-restored",
|
||||
outputs: {
|
||||
bucket: ["personal"],
|
||||
},
|
||||
});
|
||||
|
||||
resetFlowRegistryForTests({ persist: false });
|
||||
|
||||
expect(getFlowById(created.flowId)).toMatchObject({
|
||||
flowId: created.flowId,
|
||||
shape: "linear",
|
||||
status: "waiting",
|
||||
currentStep: "ask_user",
|
||||
waitingOnTaskId: "task-restored",
|
||||
outputs: {
|
||||
bucket: ["personal"],
|
||||
},
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
it("hardens the sqlite flow store directory and file modes", async () => {
|
||||
if (process.platform === "win32") {
|
||||
return;
|
||||
}
|
||||
await withFlowRegistryTempDir(async (root) => {
|
||||
process.env.OPENCLAW_STATE_DIR = root;
|
||||
resetFlowRegistryForTests();
|
||||
|
||||
createFlowRecord({
|
||||
ownerSessionKey: "agent:main:main",
|
||||
goal: "Secured flow",
|
||||
status: "blocked",
|
||||
blockedTaskId: "task-secured",
|
||||
blockedSummary: "Need auth.",
|
||||
});
|
||||
|
||||
const registryDir = resolveFlowRegistryDir(process.env);
|
||||
const sqlitePath = resolveFlowRegistrySqlitePath(process.env);
|
||||
expect(statSync(registryDir).mode & 0o777).toBe(0o700);
|
||||
expect(statSync(sqlitePath).mode & 0o777).toBe(0o600);
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -1,45 +0,0 @@
|
||||
import {
|
||||
closeFlowRegistrySqliteStore,
|
||||
deleteFlowRegistryRecordFromSqlite,
|
||||
loadFlowRegistryStateFromSqlite,
|
||||
saveFlowRegistryStateToSqlite,
|
||||
upsertFlowRegistryRecordToSqlite,
|
||||
} from "./flow-registry.store.sqlite.js";
|
||||
import type { FlowRecord } from "./flow-registry.types.js";
|
||||
|
||||
export type FlowRegistryStoreSnapshot = {
|
||||
flows: Map<string, FlowRecord>;
|
||||
};
|
||||
|
||||
export type FlowRegistryStore = {
|
||||
loadSnapshot: () => FlowRegistryStoreSnapshot;
|
||||
saveSnapshot: (snapshot: FlowRegistryStoreSnapshot) => void;
|
||||
upsertFlow?: (flow: FlowRecord) => void;
|
||||
deleteFlow?: (flowId: string) => void;
|
||||
close?: () => void;
|
||||
};
|
||||
|
||||
const defaultFlowRegistryStore: FlowRegistryStore = {
|
||||
loadSnapshot: loadFlowRegistryStateFromSqlite,
|
||||
saveSnapshot: saveFlowRegistryStateToSqlite,
|
||||
upsertFlow: upsertFlowRegistryRecordToSqlite,
|
||||
deleteFlow: deleteFlowRegistryRecordFromSqlite,
|
||||
close: closeFlowRegistrySqliteStore,
|
||||
};
|
||||
|
||||
let configuredFlowRegistryStore: FlowRegistryStore = defaultFlowRegistryStore;
|
||||
|
||||
export function getFlowRegistryStore(): FlowRegistryStore {
|
||||
return configuredFlowRegistryStore;
|
||||
}
|
||||
|
||||
export function configureFlowRegistryRuntime(params: { store?: FlowRegistryStore }) {
|
||||
if (params.store) {
|
||||
configuredFlowRegistryStore = params.store;
|
||||
}
|
||||
}
|
||||
|
||||
export function resetFlowRegistryRuntimeForTests() {
|
||||
configuredFlowRegistryStore.close?.();
|
||||
configuredFlowRegistryStore = defaultFlowRegistryStore;
|
||||
}
|
||||
@@ -1,256 +0,0 @@
|
||||
import { afterEach, beforeEach, describe, expect, it, vi } from "vitest";
|
||||
import { withTempDir } from "../test-helpers/temp-dir.js";
|
||||
import {
|
||||
createFlowRecord,
|
||||
deleteFlowRecordById,
|
||||
getFlowById,
|
||||
listFlowRecords,
|
||||
resetFlowRegistryForTests,
|
||||
syncFlowFromTask,
|
||||
updateFlowRecordById,
|
||||
} from "./flow-registry.js";
|
||||
|
||||
const ORIGINAL_STATE_DIR = process.env.OPENCLAW_STATE_DIR;
|
||||
|
||||
async function withFlowRegistryTempDir<T>(run: (root: string) => Promise<T>): Promise<T> {
|
||||
return await withTempDir({ prefix: "openclaw-flow-registry-" }, async (root) => {
|
||||
process.env.OPENCLAW_STATE_DIR = root;
|
||||
resetFlowRegistryForTests();
|
||||
try {
|
||||
return await run(root);
|
||||
} finally {
|
||||
// Close the sqlite-backed registry before Windows temp-dir cleanup removes the store root.
|
||||
resetFlowRegistryForTests();
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
describe("flow-registry", () => {
|
||||
beforeEach(() => {
|
||||
vi.useRealTimers();
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
vi.useRealTimers();
|
||||
if (ORIGINAL_STATE_DIR === undefined) {
|
||||
delete process.env.OPENCLAW_STATE_DIR;
|
||||
} else {
|
||||
process.env.OPENCLAW_STATE_DIR = ORIGINAL_STATE_DIR;
|
||||
}
|
||||
resetFlowRegistryForTests();
|
||||
});
|
||||
|
||||
it("creates, updates, lists, and deletes flow records", async () => {
|
||||
await withFlowRegistryTempDir(async (root) => {
|
||||
process.env.OPENCLAW_STATE_DIR = root;
|
||||
resetFlowRegistryForTests();
|
||||
|
||||
const created = createFlowRecord({
|
||||
ownerSessionKey: "agent:main:main",
|
||||
goal: "Investigate flaky test",
|
||||
status: "running",
|
||||
currentStep: "spawn_task",
|
||||
});
|
||||
|
||||
expect(getFlowById(created.flowId)).toMatchObject({
|
||||
flowId: created.flowId,
|
||||
status: "running",
|
||||
currentStep: "spawn_task",
|
||||
});
|
||||
|
||||
const updated = updateFlowRecordById(created.flowId, {
|
||||
status: "waiting",
|
||||
currentStep: "ask_user",
|
||||
waitingOnTaskId: "task-123",
|
||||
outputs: {
|
||||
bucket: ["personal"],
|
||||
},
|
||||
});
|
||||
expect(updated).toMatchObject({
|
||||
flowId: created.flowId,
|
||||
status: "waiting",
|
||||
currentStep: "ask_user",
|
||||
waitingOnTaskId: "task-123",
|
||||
outputs: {
|
||||
bucket: ["personal"],
|
||||
},
|
||||
});
|
||||
|
||||
expect(listFlowRecords()).toEqual([
|
||||
expect.objectContaining({
|
||||
flowId: created.flowId,
|
||||
goal: "Investigate flaky test",
|
||||
status: "waiting",
|
||||
}),
|
||||
]);
|
||||
|
||||
expect(deleteFlowRecordById(created.flowId)).toBe(true);
|
||||
expect(getFlowById(created.flowId)).toBeUndefined();
|
||||
expect(listFlowRecords()).toEqual([]);
|
||||
});
|
||||
});
|
||||
|
||||
it("lists newest flows first", async () => {
|
||||
await withFlowRegistryTempDir(async (root) => {
|
||||
process.env.OPENCLAW_STATE_DIR = root;
|
||||
resetFlowRegistryForTests();
|
||||
|
||||
const earlier = createFlowRecord({
|
||||
ownerSessionKey: "agent:main:main",
|
||||
goal: "First flow",
|
||||
createdAt: 100,
|
||||
updatedAt: 100,
|
||||
});
|
||||
const later = createFlowRecord({
|
||||
ownerSessionKey: "agent:main:main",
|
||||
goal: "Second flow",
|
||||
createdAt: 200,
|
||||
updatedAt: 200,
|
||||
});
|
||||
|
||||
expect(listFlowRecords().map((flow) => flow.flowId)).toEqual([later.flowId, earlier.flowId]);
|
||||
});
|
||||
});
|
||||
|
||||
it("applies minimal defaults for new flow records", async () => {
|
||||
await withFlowRegistryTempDir(async (root) => {
|
||||
process.env.OPENCLAW_STATE_DIR = root;
|
||||
resetFlowRegistryForTests();
|
||||
|
||||
const created = createFlowRecord({
|
||||
ownerSessionKey: "agent:main:main",
|
||||
goal: "Background job",
|
||||
});
|
||||
|
||||
expect(created).toMatchObject({
|
||||
flowId: expect.any(String),
|
||||
shape: "linear",
|
||||
ownerSessionKey: "agent:main:main",
|
||||
goal: "Background job",
|
||||
status: "queued",
|
||||
notifyPolicy: "done_only",
|
||||
});
|
||||
expect(created.currentStep).toBeUndefined();
|
||||
expect(created.endedAt).toBeUndefined();
|
||||
});
|
||||
});
|
||||
|
||||
it("preserves endedAt when later updates change other flow fields", async () => {
|
||||
await withFlowRegistryTempDir(async (root) => {
|
||||
process.env.OPENCLAW_STATE_DIR = root;
|
||||
resetFlowRegistryForTests();
|
||||
|
||||
const created = createFlowRecord({
|
||||
ownerSessionKey: "agent:main:main",
|
||||
goal: "Finish a task",
|
||||
status: "succeeded",
|
||||
endedAt: 456,
|
||||
});
|
||||
|
||||
const updated = updateFlowRecordById(created.flowId, {
|
||||
currentStep: "finish",
|
||||
});
|
||||
|
||||
expect(updated).toMatchObject({
|
||||
flowId: created.flowId,
|
||||
currentStep: "finish",
|
||||
endedAt: 456,
|
||||
});
|
||||
expect(getFlowById(created.flowId)).toMatchObject({
|
||||
flowId: created.flowId,
|
||||
endedAt: 456,
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
it("stores blocked metadata and clears it when a later task resumes the same flow", async () => {
|
||||
await withFlowRegistryTempDir(async (root) => {
|
||||
process.env.OPENCLAW_STATE_DIR = root;
|
||||
resetFlowRegistryForTests();
|
||||
|
||||
const created = createFlowRecord({
|
||||
shape: "single_task",
|
||||
ownerSessionKey: "agent:main:main",
|
||||
goal: "Fix permissions",
|
||||
status: "running",
|
||||
});
|
||||
|
||||
const blocked = syncFlowFromTask({
|
||||
taskId: "task-blocked",
|
||||
parentFlowId: created.flowId,
|
||||
status: "succeeded",
|
||||
terminalOutcome: "blocked",
|
||||
notifyPolicy: "done_only",
|
||||
label: "Fix permissions",
|
||||
task: "Fix permissions",
|
||||
lastEventAt: 200,
|
||||
endedAt: 200,
|
||||
terminalSummary: "Writable session required.",
|
||||
});
|
||||
|
||||
expect(blocked).toMatchObject({
|
||||
flowId: created.flowId,
|
||||
status: "blocked",
|
||||
blockedTaskId: "task-blocked",
|
||||
blockedSummary: "Writable session required.",
|
||||
endedAt: 200,
|
||||
});
|
||||
|
||||
const resumed = syncFlowFromTask({
|
||||
taskId: "task-retry",
|
||||
parentFlowId: created.flowId,
|
||||
status: "running",
|
||||
notifyPolicy: "done_only",
|
||||
label: "Fix permissions",
|
||||
task: "Fix permissions",
|
||||
lastEventAt: 260,
|
||||
progressSummary: "Retrying with writable session",
|
||||
});
|
||||
|
||||
expect(resumed).toMatchObject({
|
||||
flowId: created.flowId,
|
||||
status: "running",
|
||||
});
|
||||
expect(resumed?.blockedTaskId).toBeUndefined();
|
||||
expect(resumed?.blockedSummary).toBeUndefined();
|
||||
expect(resumed?.endedAt).toBeUndefined();
|
||||
});
|
||||
});
|
||||
|
||||
it("does not auto-sync linear flow state from linked child tasks", async () => {
|
||||
await withFlowRegistryTempDir(async (root) => {
|
||||
process.env.OPENCLAW_STATE_DIR = root;
|
||||
resetFlowRegistryForTests();
|
||||
|
||||
const created = createFlowRecord({
|
||||
ownerSessionKey: "agent:main:main",
|
||||
goal: "Cluster PRs",
|
||||
status: "waiting",
|
||||
currentStep: "wait_for",
|
||||
});
|
||||
|
||||
const synced = syncFlowFromTask({
|
||||
taskId: "task-child",
|
||||
parentFlowId: created.flowId,
|
||||
status: "running",
|
||||
notifyPolicy: "done_only",
|
||||
label: "Child task",
|
||||
task: "Child task",
|
||||
lastEventAt: 250,
|
||||
progressSummary: "Running child task",
|
||||
});
|
||||
|
||||
expect(synced).toMatchObject({
|
||||
flowId: created.flowId,
|
||||
shape: "linear",
|
||||
status: "waiting",
|
||||
currentStep: "wait_for",
|
||||
});
|
||||
expect(getFlowById(created.flowId)).toMatchObject({
|
||||
flowId: created.flowId,
|
||||
status: "waiting",
|
||||
currentStep: "wait_for",
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -1,349 +0,0 @@
|
||||
import crypto from "node:crypto";
|
||||
import { getFlowRegistryStore, resetFlowRegistryRuntimeForTests } from "./flow-registry.store.js";
|
||||
import type { FlowOutputBag, FlowRecord, FlowShape, FlowStatus } from "./flow-registry.types.js";
|
||||
import type { TaskNotifyPolicy, TaskRecord } from "./task-registry.types.js";
|
||||
|
||||
const flows = new Map<string, FlowRecord>();
|
||||
let restoreAttempted = false;
|
||||
|
||||
function cloneFlowOutputs(outputs: FlowOutputBag | undefined): FlowOutputBag | undefined {
|
||||
if (!outputs) {
|
||||
return undefined;
|
||||
}
|
||||
return JSON.parse(JSON.stringify(outputs)) as FlowOutputBag;
|
||||
}
|
||||
|
||||
function cloneFlowRecord(record: FlowRecord): FlowRecord {
|
||||
return {
|
||||
...record,
|
||||
...(record.requesterOrigin ? { requesterOrigin: { ...record.requesterOrigin } } : {}),
|
||||
...(record.outputs ? { outputs: cloneFlowOutputs(record.outputs) } : {}),
|
||||
};
|
||||
}
|
||||
|
||||
function snapshotFlowRecords(source: ReadonlyMap<string, FlowRecord>): FlowRecord[] {
|
||||
return [...source.values()].map((record) => cloneFlowRecord(record));
|
||||
}
|
||||
|
||||
function ensureNotifyPolicy(notifyPolicy?: TaskNotifyPolicy): TaskNotifyPolicy {
|
||||
return notifyPolicy ?? "done_only";
|
||||
}
|
||||
|
||||
function ensureFlowShape(shape?: FlowShape): FlowShape {
|
||||
return shape ?? "linear";
|
||||
}
|
||||
|
||||
function resolveFlowGoal(task: Pick<TaskRecord, "label" | "task">): string {
|
||||
return task.label?.trim() || task.task.trim() || "Background task";
|
||||
}
|
||||
|
||||
function resolveFlowBlockedSummary(
|
||||
task: Pick<TaskRecord, "status" | "terminalOutcome" | "terminalSummary" | "progressSummary">,
|
||||
): string | undefined {
|
||||
if (task.status !== "succeeded" || task.terminalOutcome !== "blocked") {
|
||||
return undefined;
|
||||
}
|
||||
return task.terminalSummary?.trim() || task.progressSummary?.trim() || undefined;
|
||||
}
|
||||
|
||||
type FlowRecordPatch = {
|
||||
status?: FlowStatus;
|
||||
notifyPolicy?: TaskNotifyPolicy;
|
||||
goal?: string;
|
||||
currentStep?: string | null;
|
||||
waitingOnTaskId?: string | null;
|
||||
outputs?: FlowOutputBag | null;
|
||||
blockedTaskId?: string | null;
|
||||
blockedSummary?: string | null;
|
||||
updatedAt?: number;
|
||||
endedAt?: number | null;
|
||||
};
|
||||
|
||||
export function deriveFlowStatusFromTask(
|
||||
task: Pick<TaskRecord, "status" | "terminalOutcome">,
|
||||
): FlowStatus {
|
||||
if (task.status === "queued") {
|
||||
return "queued";
|
||||
}
|
||||
if (task.status === "running") {
|
||||
return "running";
|
||||
}
|
||||
if (task.status === "succeeded") {
|
||||
return task.terminalOutcome === "blocked" ? "blocked" : "succeeded";
|
||||
}
|
||||
if (task.status === "cancelled") {
|
||||
return "cancelled";
|
||||
}
|
||||
if (task.status === "lost") {
|
||||
return "lost";
|
||||
}
|
||||
return "failed";
|
||||
}
|
||||
|
||||
function ensureFlowRegistryReady() {
|
||||
if (restoreAttempted) {
|
||||
return;
|
||||
}
|
||||
restoreAttempted = true;
|
||||
const restored = getFlowRegistryStore().loadSnapshot();
|
||||
flows.clear();
|
||||
for (const [flowId, flow] of restored.flows) {
|
||||
flows.set(flowId, cloneFlowRecord(flow));
|
||||
}
|
||||
}
|
||||
|
||||
function persistFlowRegistry() {
|
||||
getFlowRegistryStore().saveSnapshot({
|
||||
flows: new Map(snapshotFlowRecords(flows).map((flow) => [flow.flowId, flow])),
|
||||
});
|
||||
}
|
||||
|
||||
function persistFlowUpsert(flow: FlowRecord) {
|
||||
const store = getFlowRegistryStore();
|
||||
if (store.upsertFlow) {
|
||||
store.upsertFlow(cloneFlowRecord(flow));
|
||||
return;
|
||||
}
|
||||
persistFlowRegistry();
|
||||
}
|
||||
|
||||
function persistFlowDelete(flowId: string) {
|
||||
const store = getFlowRegistryStore();
|
||||
if (store.deleteFlow) {
|
||||
store.deleteFlow(flowId);
|
||||
return;
|
||||
}
|
||||
persistFlowRegistry();
|
||||
}
|
||||
|
||||
export function createFlowRecord(params: {
|
||||
shape?: FlowShape;
|
||||
ownerSessionKey: string;
|
||||
requesterOrigin?: FlowRecord["requesterOrigin"];
|
||||
status?: FlowStatus;
|
||||
notifyPolicy?: TaskNotifyPolicy;
|
||||
goal: string;
|
||||
currentStep?: string;
|
||||
waitingOnTaskId?: string;
|
||||
outputs?: FlowOutputBag;
|
||||
blockedTaskId?: string;
|
||||
blockedSummary?: string;
|
||||
createdAt?: number;
|
||||
updatedAt?: number;
|
||||
endedAt?: number;
|
||||
}): FlowRecord {
|
||||
ensureFlowRegistryReady();
|
||||
const now = params.createdAt ?? Date.now();
|
||||
const record: FlowRecord = {
|
||||
flowId: crypto.randomUUID(),
|
||||
shape: ensureFlowShape(params.shape),
|
||||
ownerSessionKey: params.ownerSessionKey,
|
||||
...(params.requesterOrigin ? { requesterOrigin: { ...params.requesterOrigin } } : {}),
|
||||
status: params.status ?? "queued",
|
||||
notifyPolicy: ensureNotifyPolicy(params.notifyPolicy),
|
||||
goal: params.goal,
|
||||
currentStep: params.currentStep?.trim() || undefined,
|
||||
waitingOnTaskId: params.waitingOnTaskId?.trim() || undefined,
|
||||
outputs: cloneFlowOutputs(params.outputs),
|
||||
blockedTaskId: params.blockedTaskId?.trim() || undefined,
|
||||
blockedSummary: params.blockedSummary?.trim() || undefined,
|
||||
createdAt: now,
|
||||
updatedAt: params.updatedAt ?? now,
|
||||
...(params.endedAt !== undefined ? { endedAt: params.endedAt } : {}),
|
||||
};
|
||||
flows.set(record.flowId, record);
|
||||
persistFlowUpsert(record);
|
||||
return cloneFlowRecord(record);
|
||||
}
|
||||
|
||||
export function createFlowForTask(params: {
|
||||
task: Pick<
|
||||
TaskRecord,
|
||||
| "requesterSessionKey"
|
||||
| "taskId"
|
||||
| "notifyPolicy"
|
||||
| "status"
|
||||
| "terminalOutcome"
|
||||
| "label"
|
||||
| "task"
|
||||
| "createdAt"
|
||||
| "lastEventAt"
|
||||
| "endedAt"
|
||||
| "terminalSummary"
|
||||
| "progressSummary"
|
||||
>;
|
||||
requesterOrigin?: FlowRecord["requesterOrigin"];
|
||||
}): FlowRecord {
|
||||
const terminalFlowStatus = deriveFlowStatusFromTask(params.task);
|
||||
const isTerminal =
|
||||
terminalFlowStatus === "succeeded" ||
|
||||
terminalFlowStatus === "blocked" ||
|
||||
terminalFlowStatus === "failed" ||
|
||||
terminalFlowStatus === "cancelled" ||
|
||||
terminalFlowStatus === "lost";
|
||||
const endedAt = isTerminal
|
||||
? (params.task.endedAt ?? params.task.lastEventAt ?? params.task.createdAt)
|
||||
: undefined;
|
||||
return createFlowRecord({
|
||||
shape: "single_task",
|
||||
ownerSessionKey: params.task.requesterSessionKey,
|
||||
requesterOrigin: params.requesterOrigin,
|
||||
status: terminalFlowStatus,
|
||||
notifyPolicy: params.task.notifyPolicy,
|
||||
goal: resolveFlowGoal(params.task),
|
||||
blockedTaskId:
|
||||
terminalFlowStatus === "blocked" ? params.task.taskId.trim() || undefined : undefined,
|
||||
blockedSummary: resolveFlowBlockedSummary(params.task),
|
||||
createdAt: params.task.createdAt,
|
||||
updatedAt: params.task.lastEventAt ?? params.task.createdAt,
|
||||
...(endedAt !== undefined ? { endedAt } : {}),
|
||||
});
|
||||
}
|
||||
|
||||
export function updateFlowRecordById(flowId: string, patch: FlowRecordPatch): FlowRecord | null {
|
||||
ensureFlowRegistryReady();
|
||||
const current = flows.get(flowId);
|
||||
if (!current) {
|
||||
return null;
|
||||
}
|
||||
const next: FlowRecord = {
|
||||
...current,
|
||||
...(patch.status ? { status: patch.status } : {}),
|
||||
...(patch.notifyPolicy ? { notifyPolicy: patch.notifyPolicy } : {}),
|
||||
...(patch.goal ? { goal: patch.goal } : {}),
|
||||
currentStep:
|
||||
patch.currentStep === undefined
|
||||
? current.currentStep
|
||||
: patch.currentStep?.trim() || undefined,
|
||||
waitingOnTaskId:
|
||||
patch.waitingOnTaskId === undefined
|
||||
? current.waitingOnTaskId
|
||||
: patch.waitingOnTaskId?.trim() || undefined,
|
||||
outputs:
|
||||
patch.outputs === undefined
|
||||
? cloneFlowOutputs(current.outputs)
|
||||
: (cloneFlowOutputs(patch.outputs ?? undefined) ?? undefined),
|
||||
blockedTaskId:
|
||||
patch.blockedTaskId === undefined
|
||||
? current.blockedTaskId
|
||||
: patch.blockedTaskId?.trim() || undefined,
|
||||
blockedSummary:
|
||||
patch.blockedSummary === undefined
|
||||
? current.blockedSummary
|
||||
: patch.blockedSummary?.trim() || undefined,
|
||||
updatedAt: patch.updatedAt ?? Date.now(),
|
||||
endedAt: patch.endedAt === undefined ? current.endedAt : (patch.endedAt ?? undefined),
|
||||
};
|
||||
flows.set(flowId, next);
|
||||
persistFlowUpsert(next);
|
||||
return cloneFlowRecord(next);
|
||||
}
|
||||
|
||||
export function syncFlowFromTask(
|
||||
task: Pick<
|
||||
TaskRecord,
|
||||
| "parentFlowId"
|
||||
| "status"
|
||||
| "terminalOutcome"
|
||||
| "notifyPolicy"
|
||||
| "label"
|
||||
| "task"
|
||||
| "lastEventAt"
|
||||
| "endedAt"
|
||||
| "taskId"
|
||||
| "terminalSummary"
|
||||
| "progressSummary"
|
||||
>,
|
||||
): FlowRecord | null {
|
||||
const flowId = task.parentFlowId?.trim();
|
||||
if (!flowId) {
|
||||
return null;
|
||||
}
|
||||
const flow = getFlowById(flowId);
|
||||
if (!flow) {
|
||||
return null;
|
||||
}
|
||||
if (flow.shape !== "single_task") {
|
||||
return flow;
|
||||
}
|
||||
const terminalFlowStatus = deriveFlowStatusFromTask(task);
|
||||
const isTerminal =
|
||||
terminalFlowStatus === "succeeded" ||
|
||||
terminalFlowStatus === "blocked" ||
|
||||
terminalFlowStatus === "failed" ||
|
||||
terminalFlowStatus === "cancelled" ||
|
||||
terminalFlowStatus === "lost";
|
||||
return updateFlowRecordById(flowId, {
|
||||
status: terminalFlowStatus,
|
||||
notifyPolicy: task.notifyPolicy,
|
||||
goal: resolveFlowGoal(task),
|
||||
blockedTaskId: terminalFlowStatus === "blocked" ? task.taskId.trim() || null : null,
|
||||
blockedSummary:
|
||||
terminalFlowStatus === "blocked" ? (resolveFlowBlockedSummary(task) ?? null) : null,
|
||||
updatedAt: task.lastEventAt ?? Date.now(),
|
||||
...(isTerminal
|
||||
? {
|
||||
endedAt: task.endedAt ?? task.lastEventAt ?? Date.now(),
|
||||
}
|
||||
: { endedAt: null }),
|
||||
});
|
||||
}
|
||||
|
||||
export function getFlowById(flowId: string): FlowRecord | undefined {
|
||||
ensureFlowRegistryReady();
|
||||
const flow = flows.get(flowId);
|
||||
return flow ? cloneFlowRecord(flow) : undefined;
|
||||
}
|
||||
|
||||
export function listFlowsForOwnerSessionKey(sessionKey: string): FlowRecord[] {
|
||||
ensureFlowRegistryReady();
|
||||
const normalizedSessionKey = sessionKey.trim();
|
||||
if (!normalizedSessionKey) {
|
||||
return [];
|
||||
}
|
||||
return [...flows.values()]
|
||||
.filter((flow) => flow.ownerSessionKey.trim() === normalizedSessionKey)
|
||||
.map((flow) => cloneFlowRecord(flow))
|
||||
.toSorted((left, right) => right.createdAt - left.createdAt);
|
||||
}
|
||||
|
||||
export function findLatestFlowForOwnerSessionKey(sessionKey: string): FlowRecord | undefined {
|
||||
const flow = listFlowsForOwnerSessionKey(sessionKey)[0];
|
||||
return flow ? cloneFlowRecord(flow) : undefined;
|
||||
}
|
||||
|
||||
export function resolveFlowForLookupToken(token: string): FlowRecord | undefined {
|
||||
const lookup = token.trim();
|
||||
if (!lookup) {
|
||||
return undefined;
|
||||
}
|
||||
return getFlowById(lookup) ?? findLatestFlowForOwnerSessionKey(lookup);
|
||||
}
|
||||
|
||||
export function listFlowRecords(): FlowRecord[] {
|
||||
ensureFlowRegistryReady();
|
||||
return [...flows.values()]
|
||||
.map((flow) => cloneFlowRecord(flow))
|
||||
.toSorted((left, right) => right.createdAt - left.createdAt);
|
||||
}
|
||||
|
||||
export function deleteFlowRecordById(flowId: string): boolean {
|
||||
ensureFlowRegistryReady();
|
||||
const current = flows.get(flowId);
|
||||
if (!current) {
|
||||
return false;
|
||||
}
|
||||
flows.delete(flowId);
|
||||
persistFlowDelete(flowId);
|
||||
return true;
|
||||
}
|
||||
|
||||
export function resetFlowRegistryForTests(opts?: { persist?: boolean }) {
|
||||
flows.clear();
|
||||
restoreAttempted = false;
|
||||
resetFlowRegistryRuntimeForTests();
|
||||
if (opts?.persist !== false) {
|
||||
persistFlowRegistry();
|
||||
getFlowRegistryStore().close?.();
|
||||
}
|
||||
}
|
||||
@@ -1,42 +0,0 @@
|
||||
import type { DeliveryContext } from "../utils/delivery-context.js";
|
||||
import type { TaskNotifyPolicy } from "./task-registry.types.js";
|
||||
|
||||
export type FlowShape = "single_task" | "linear";
|
||||
|
||||
export type FlowOutputValue =
|
||||
| null
|
||||
| boolean
|
||||
| number
|
||||
| string
|
||||
| FlowOutputValue[]
|
||||
| { [key: string]: FlowOutputValue };
|
||||
|
||||
export type FlowOutputBag = Record<string, FlowOutputValue>;
|
||||
|
||||
export type FlowStatus =
|
||||
| "queued"
|
||||
| "running"
|
||||
| "waiting"
|
||||
| "blocked"
|
||||
| "succeeded"
|
||||
| "failed"
|
||||
| "cancelled"
|
||||
| "lost";
|
||||
|
||||
export type FlowRecord = {
|
||||
flowId: string;
|
||||
shape: FlowShape;
|
||||
ownerSessionKey: string;
|
||||
requesterOrigin?: DeliveryContext;
|
||||
status: FlowStatus;
|
||||
notifyPolicy: TaskNotifyPolicy;
|
||||
goal: string;
|
||||
currentStep?: string;
|
||||
waitingOnTaskId?: string;
|
||||
outputs?: FlowOutputBag;
|
||||
blockedTaskId?: string;
|
||||
blockedSummary?: string;
|
||||
createdAt: number;
|
||||
updatedAt: number;
|
||||
endedAt?: number;
|
||||
};
|
||||
@@ -1,281 +0,0 @@
|
||||
import { afterEach, describe, expect, it, vi } from "vitest";
|
||||
import { withTempDir } from "../test-helpers/temp-dir.js";
|
||||
import { getFlowById, resetFlowRegistryForTests, updateFlowRecordById } from "./flow-registry.js";
|
||||
import {
|
||||
appendFlowOutput,
|
||||
createFlow,
|
||||
emitFlowUpdate,
|
||||
failFlow,
|
||||
finishFlow,
|
||||
resumeFlow,
|
||||
runTaskInFlow,
|
||||
setFlowOutput,
|
||||
} from "./flow-runtime.js";
|
||||
import { listTasksForFlowId, resetTaskRegistryForTests } from "./task-registry.js";
|
||||
|
||||
const ORIGINAL_STATE_DIR = process.env.OPENCLAW_STATE_DIR;
|
||||
const mocks = vi.hoisted(() => ({
|
||||
sendMessageMock: vi.fn(),
|
||||
enqueueSystemEventMock: vi.fn(),
|
||||
requestHeartbeatNowMock: vi.fn(),
|
||||
}));
|
||||
|
||||
vi.mock("./task-registry-delivery-runtime.js", () => ({
|
||||
sendMessage: (...args: unknown[]) => mocks.sendMessageMock(...args),
|
||||
}));
|
||||
|
||||
vi.mock("../infra/system-events.js", () => ({
|
||||
enqueueSystemEvent: (...args: unknown[]) => mocks.enqueueSystemEventMock(...args),
|
||||
}));
|
||||
|
||||
vi.mock("../infra/heartbeat-wake.js", () => ({
|
||||
requestHeartbeatNow: (...args: unknown[]) => mocks.requestHeartbeatNowMock(...args),
|
||||
}));
|
||||
|
||||
vi.mock("../infra/agent-events.js", () => ({
|
||||
onAgentEvent: () => () => {},
|
||||
}));
|
||||
|
||||
vi.mock("../acp/control-plane/manager.js", () => ({
|
||||
getAcpSessionManager: () => ({
|
||||
cancelSession: vi.fn(),
|
||||
}),
|
||||
}));
|
||||
|
||||
vi.mock("../agents/subagent-control.js", () => ({
|
||||
killSubagentRunAdmin: vi.fn(),
|
||||
}));
|
||||
|
||||
async function withFlowRuntimeStateDir(run: (root: string) => Promise<void>): Promise<void> {
|
||||
await withTempDir({ prefix: "openclaw-flow-runtime-" }, async (root) => {
|
||||
process.env.OPENCLAW_STATE_DIR = root;
|
||||
resetTaskRegistryForTests();
|
||||
resetFlowRegistryForTests();
|
||||
try {
|
||||
await run(root);
|
||||
} finally {
|
||||
resetTaskRegistryForTests();
|
||||
resetFlowRegistryForTests();
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
describe("flow-runtime", () => {
|
||||
afterEach(() => {
|
||||
if (ORIGINAL_STATE_DIR === undefined) {
|
||||
delete process.env.OPENCLAW_STATE_DIR;
|
||||
} else {
|
||||
process.env.OPENCLAW_STATE_DIR = ORIGINAL_STATE_DIR;
|
||||
}
|
||||
resetTaskRegistryForTests();
|
||||
resetFlowRegistryForTests();
|
||||
mocks.sendMessageMock.mockReset();
|
||||
mocks.enqueueSystemEventMock.mockReset();
|
||||
mocks.requestHeartbeatNowMock.mockReset();
|
||||
});
|
||||
|
||||
it("runs a child task under a linear flow and marks the flow as waiting on it", async () => {
|
||||
await withFlowRuntimeStateDir(async () => {
|
||||
const flow = createFlow({
|
||||
ownerSessionKey: "agent:main:main",
|
||||
requesterOrigin: {
|
||||
channel: "telegram",
|
||||
to: "telegram:123",
|
||||
},
|
||||
goal: "Triage inbox",
|
||||
});
|
||||
|
||||
const started = runTaskInFlow({
|
||||
flowId: flow.flowId,
|
||||
runtime: "acp",
|
||||
childSessionKey: "agent:codex:acp:child",
|
||||
runId: "run-flow-runtime-1",
|
||||
task: "Classify inbox messages",
|
||||
currentStep: "wait_for_classification",
|
||||
});
|
||||
|
||||
expect(started.task).toMatchObject({
|
||||
requesterSessionKey: "agent:main:main",
|
||||
parentFlowId: flow.flowId,
|
||||
childSessionKey: "agent:codex:acp:child",
|
||||
runId: "run-flow-runtime-1",
|
||||
status: "queued",
|
||||
});
|
||||
expect(started.flow).toMatchObject({
|
||||
flowId: flow.flowId,
|
||||
status: "waiting",
|
||||
currentStep: "wait_for_classification",
|
||||
waitingOnTaskId: started.task.taskId,
|
||||
});
|
||||
expect(listTasksForFlowId(flow.flowId)).toHaveLength(1);
|
||||
});
|
||||
});
|
||||
|
||||
it("stores outputs and waiting metadata across sqlite restore", async () => {
|
||||
await withFlowRuntimeStateDir(async () => {
|
||||
const flow = createFlow({
|
||||
ownerSessionKey: "agent:main:main",
|
||||
goal: "Inbox routing",
|
||||
});
|
||||
|
||||
const started = runTaskInFlow({
|
||||
flowId: flow.flowId,
|
||||
runtime: "subagent",
|
||||
childSessionKey: "agent:codex:subagent:child",
|
||||
runId: "run-flow-runtime-restore",
|
||||
task: "Bucket messages",
|
||||
});
|
||||
|
||||
setFlowOutput({
|
||||
flowId: flow.flowId,
|
||||
key: "classification",
|
||||
value: {
|
||||
business: 1,
|
||||
personal: 2,
|
||||
},
|
||||
});
|
||||
appendFlowOutput({
|
||||
flowId: flow.flowId,
|
||||
key: "eod_summary",
|
||||
value: {
|
||||
subject: "Newsletter",
|
||||
},
|
||||
});
|
||||
|
||||
resetTaskRegistryForTests({ persist: false });
|
||||
resetFlowRegistryForTests({ persist: false });
|
||||
|
||||
expect(getFlowById(flow.flowId)).toMatchObject({
|
||||
flowId: flow.flowId,
|
||||
status: "waiting",
|
||||
waitingOnTaskId: started.task.taskId,
|
||||
outputs: {
|
||||
classification: {
|
||||
business: 1,
|
||||
personal: 2,
|
||||
},
|
||||
eod_summary: [
|
||||
{
|
||||
subject: "Newsletter",
|
||||
},
|
||||
],
|
||||
},
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
it("reopens a blocked flow with resume and marks terminal states with finish/fail", async () => {
|
||||
await withFlowRuntimeStateDir(async () => {
|
||||
const flow = createFlow({
|
||||
ownerSessionKey: "agent:main:main",
|
||||
goal: "Review inbox",
|
||||
});
|
||||
const started = runTaskInFlow({
|
||||
flowId: flow.flowId,
|
||||
runtime: "acp",
|
||||
childSessionKey: "agent:codex:acp:child",
|
||||
runId: "run-flow-runtime-reopen",
|
||||
task: "Review inbox",
|
||||
});
|
||||
|
||||
updateFlowRecordById(flow.flowId, {
|
||||
status: "blocked",
|
||||
blockedTaskId: started.task.taskId,
|
||||
blockedSummary: "Need auth.",
|
||||
endedAt: 120,
|
||||
});
|
||||
|
||||
expect(resumeFlow({ flowId: flow.flowId, currentStep: "retry_auth" })).toMatchObject({
|
||||
flowId: flow.flowId,
|
||||
status: "running",
|
||||
currentStep: "retry_auth",
|
||||
});
|
||||
expect(getFlowById(flow.flowId)?.blockedTaskId).toBeUndefined();
|
||||
expect(getFlowById(flow.flowId)?.waitingOnTaskId).toBeUndefined();
|
||||
expect(getFlowById(flow.flowId)?.endedAt).toBeUndefined();
|
||||
|
||||
expect(
|
||||
finishFlow({ flowId: flow.flowId, currentStep: "finish", endedAt: 200 }),
|
||||
).toMatchObject({
|
||||
flowId: flow.flowId,
|
||||
status: "succeeded",
|
||||
currentStep: "finish",
|
||||
endedAt: 200,
|
||||
});
|
||||
|
||||
const failed = createFlow({
|
||||
ownerSessionKey: "agent:main:main",
|
||||
goal: "Failing flow",
|
||||
});
|
||||
expect(failFlow({ flowId: failed.flowId, currentStep: "abort", endedAt: 300 })).toMatchObject(
|
||||
{
|
||||
flowId: failed.flowId,
|
||||
status: "failed",
|
||||
currentStep: "abort",
|
||||
endedAt: 300,
|
||||
},
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
it("delivers explicit flow updates through the flow owner context when possible", async () => {
|
||||
await withFlowRuntimeStateDir(async () => {
|
||||
const flow = createFlow({
|
||||
ownerSessionKey: "agent:main:main",
|
||||
requesterOrigin: {
|
||||
channel: "telegram",
|
||||
to: "telegram:123",
|
||||
threadId: "42",
|
||||
},
|
||||
goal: "Inbox routing",
|
||||
});
|
||||
|
||||
const result = await emitFlowUpdate({
|
||||
flowId: flow.flowId,
|
||||
content: "Personal message needs your attention.",
|
||||
eventKey: "personal-alert",
|
||||
});
|
||||
|
||||
expect(result.delivery).toBe("direct");
|
||||
expect(mocks.sendMessageMock).toHaveBeenCalledWith(
|
||||
expect.objectContaining({
|
||||
channel: "telegram",
|
||||
to: "telegram:123",
|
||||
threadId: "42",
|
||||
content: "Personal message needs your attention.",
|
||||
idempotencyKey: `flow:${flow.flowId}:update:personal-alert`,
|
||||
mirror: expect.objectContaining({
|
||||
sessionKey: "agent:main:main",
|
||||
}),
|
||||
}),
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
it("falls back to session-queued flow updates when direct delivery is unavailable", async () => {
|
||||
await withFlowRuntimeStateDir(async () => {
|
||||
const flow = createFlow({
|
||||
ownerSessionKey: "agent:main:main",
|
||||
goal: "Inbox routing",
|
||||
});
|
||||
|
||||
const result = await emitFlowUpdate({
|
||||
flowId: flow.flowId,
|
||||
content: "Business email sent to Slack and waiting for reply.",
|
||||
});
|
||||
|
||||
expect(result.delivery).toBe("session_queued");
|
||||
expect(mocks.enqueueSystemEventMock).toHaveBeenCalledWith(
|
||||
"Business email sent to Slack and waiting for reply.",
|
||||
expect.objectContaining({
|
||||
sessionKey: "agent:main:main",
|
||||
contextKey: `flow:${flow.flowId}`,
|
||||
}),
|
||||
);
|
||||
expect(mocks.requestHeartbeatNowMock).toHaveBeenCalledWith({
|
||||
reason: "clawflow-update",
|
||||
sessionKey: "agent:main:main",
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -1,377 +0,0 @@
|
||||
import { requestHeartbeatNow } from "../infra/heartbeat-wake.js";
|
||||
import { enqueueSystemEvent } from "../infra/system-events.js";
|
||||
import { parseAgentSessionKey } from "../routing/session-key.js";
|
||||
import { isDeliverableMessageChannel } from "../utils/message-channel.js";
|
||||
import { createFlowRecord, getFlowById, updateFlowRecordById } from "./flow-registry.js";
|
||||
import type { FlowOutputBag, FlowOutputValue, FlowRecord } from "./flow-registry.types.js";
|
||||
import { createQueuedTaskRun, createRunningTaskRun } from "./task-executor.js";
|
||||
import { listTasksForFlowId } from "./task-registry.js";
|
||||
import type {
|
||||
TaskDeliveryStatus,
|
||||
TaskNotifyPolicy,
|
||||
TaskRecord,
|
||||
TaskRuntime,
|
||||
} from "./task-registry.types.js";
|
||||
|
||||
let deliveryRuntimePromise: Promise<typeof import("./task-registry-delivery-runtime.js")> | null =
|
||||
null;
|
||||
|
||||
type FlowTaskLaunch = "queued" | "running";
|
||||
|
||||
export type FlowUpdateDelivery = "direct" | "session_queued" | "parent_missing" | "failed";
|
||||
|
||||
function loadFlowDeliveryRuntime() {
|
||||
deliveryRuntimePromise ??= import("./task-registry-delivery-runtime.js");
|
||||
return deliveryRuntimePromise;
|
||||
}
|
||||
|
||||
function requireFlow(flowId: string): FlowRecord {
|
||||
const flow = getFlowById(flowId);
|
||||
if (!flow) {
|
||||
throw new Error(`Flow not found: ${flowId}`);
|
||||
}
|
||||
return flow;
|
||||
}
|
||||
|
||||
function requireLinearFlow(flowId: string): FlowRecord {
|
||||
const flow = requireFlow(flowId);
|
||||
if (flow.shape !== "linear") {
|
||||
throw new Error(`Flow is not linear: ${flowId}`);
|
||||
}
|
||||
return flow;
|
||||
}
|
||||
|
||||
function cloneOutputValue<T extends FlowOutputValue>(value: T): T {
|
||||
return JSON.parse(JSON.stringify(value)) as T;
|
||||
}
|
||||
|
||||
function updateRequiredFlow(
|
||||
flowId: string,
|
||||
patch: Parameters<typeof updateFlowRecordById>[1],
|
||||
): FlowRecord {
|
||||
const updated = updateFlowRecordById(flowId, patch);
|
||||
if (!updated) {
|
||||
throw new Error(`Flow not found: ${flowId}`);
|
||||
}
|
||||
return updated;
|
||||
}
|
||||
|
||||
function resolveFlowOutputs(flow: FlowRecord): FlowOutputBag {
|
||||
return flow.outputs ? cloneOutputValue(flow.outputs) : {};
|
||||
}
|
||||
|
||||
function canDeliverFlowToRequesterOrigin(flow: FlowRecord): boolean {
|
||||
const channel = flow.requesterOrigin?.channel?.trim();
|
||||
const to = flow.requesterOrigin?.to?.trim();
|
||||
return Boolean(channel && to && isDeliverableMessageChannel(channel));
|
||||
}
|
||||
|
||||
export function createFlow(params: {
|
||||
ownerSessionKey: string;
|
||||
requesterOrigin?: FlowRecord["requesterOrigin"];
|
||||
goal: string;
|
||||
notifyPolicy?: TaskNotifyPolicy;
|
||||
currentStep?: string;
|
||||
createdAt?: number;
|
||||
updatedAt?: number;
|
||||
}): FlowRecord {
|
||||
return createFlowRecord({
|
||||
shape: "linear",
|
||||
ownerSessionKey: params.ownerSessionKey,
|
||||
requesterOrigin: params.requesterOrigin,
|
||||
goal: params.goal,
|
||||
notifyPolicy: params.notifyPolicy,
|
||||
currentStep: params.currentStep,
|
||||
status: "queued",
|
||||
createdAt: params.createdAt,
|
||||
updatedAt: params.updatedAt,
|
||||
});
|
||||
}
|
||||
|
||||
export function runTaskInFlow(params: {
|
||||
flowId: string;
|
||||
runtime: TaskRuntime;
|
||||
sourceId?: string;
|
||||
childSessionKey?: string;
|
||||
parentTaskId?: string;
|
||||
agentId?: string;
|
||||
runId?: string;
|
||||
label?: string;
|
||||
task: string;
|
||||
preferMetadata?: boolean;
|
||||
notifyPolicy?: TaskNotifyPolicy;
|
||||
deliveryStatus?: TaskDeliveryStatus;
|
||||
launch?: FlowTaskLaunch;
|
||||
startedAt?: number;
|
||||
lastEventAt?: number;
|
||||
progressSummary?: string | null;
|
||||
currentStep?: string;
|
||||
}): { flow: FlowRecord; task: TaskRecord } {
|
||||
const flow = requireLinearFlow(params.flowId);
|
||||
const launch = params.launch ?? "queued";
|
||||
const task =
|
||||
launch === "running"
|
||||
? createRunningTaskRun({
|
||||
runtime: params.runtime,
|
||||
sourceId: params.sourceId,
|
||||
requesterSessionKey: flow.ownerSessionKey,
|
||||
requesterOrigin: flow.requesterOrigin,
|
||||
parentFlowId: flow.flowId,
|
||||
childSessionKey: params.childSessionKey,
|
||||
parentTaskId: params.parentTaskId,
|
||||
agentId: params.agentId,
|
||||
runId: params.runId,
|
||||
label: params.label,
|
||||
task: params.task,
|
||||
preferMetadata: params.preferMetadata,
|
||||
notifyPolicy: params.notifyPolicy ?? flow.notifyPolicy,
|
||||
deliveryStatus: params.deliveryStatus,
|
||||
startedAt: params.startedAt,
|
||||
lastEventAt: params.lastEventAt,
|
||||
progressSummary: params.progressSummary,
|
||||
})
|
||||
: createQueuedTaskRun({
|
||||
runtime: params.runtime,
|
||||
sourceId: params.sourceId,
|
||||
requesterSessionKey: flow.ownerSessionKey,
|
||||
requesterOrigin: flow.requesterOrigin,
|
||||
parentFlowId: flow.flowId,
|
||||
childSessionKey: params.childSessionKey,
|
||||
parentTaskId: params.parentTaskId,
|
||||
agentId: params.agentId,
|
||||
runId: params.runId,
|
||||
label: params.label,
|
||||
task: params.task,
|
||||
preferMetadata: params.preferMetadata,
|
||||
notifyPolicy: params.notifyPolicy ?? flow.notifyPolicy,
|
||||
deliveryStatus: params.deliveryStatus,
|
||||
});
|
||||
return {
|
||||
task,
|
||||
flow: updateRequiredFlow(flow.flowId, {
|
||||
status: "waiting",
|
||||
currentStep: params.currentStep ?? flow.currentStep ?? "wait_for_task",
|
||||
waitingOnTaskId: task.taskId,
|
||||
blockedTaskId: null,
|
||||
blockedSummary: null,
|
||||
endedAt: null,
|
||||
updatedAt: task.lastEventAt ?? task.startedAt ?? Date.now(),
|
||||
}),
|
||||
};
|
||||
}
|
||||
|
||||
export function setFlowWaiting(params: {
|
||||
flowId: string;
|
||||
currentStep?: string | null;
|
||||
waitingOnTaskId?: string | null;
|
||||
updatedAt?: number;
|
||||
}): FlowRecord {
|
||||
const flow = requireLinearFlow(params.flowId);
|
||||
if (params.waitingOnTaskId?.trim()) {
|
||||
const waitingOnTaskId = params.waitingOnTaskId.trim();
|
||||
const linkedTaskIds = new Set(listTasksForFlowId(flow.flowId).map((task) => task.taskId));
|
||||
if (!linkedTaskIds.has(waitingOnTaskId)) {
|
||||
throw new Error(`Flow ${flow.flowId} is not linked to task ${waitingOnTaskId}`);
|
||||
}
|
||||
}
|
||||
return updateRequiredFlow(flow.flowId, {
|
||||
status: "waiting",
|
||||
currentStep: params.currentStep,
|
||||
waitingOnTaskId: params.waitingOnTaskId,
|
||||
endedAt: null,
|
||||
updatedAt: params.updatedAt ?? Date.now(),
|
||||
});
|
||||
}
|
||||
|
||||
export function setFlowOutput(params: {
|
||||
flowId: string;
|
||||
key: string;
|
||||
value: FlowOutputValue;
|
||||
updatedAt?: number;
|
||||
}): FlowRecord {
|
||||
const flow = requireLinearFlow(params.flowId);
|
||||
const key = params.key.trim();
|
||||
if (!key) {
|
||||
throw new Error("Flow output key is required.");
|
||||
}
|
||||
const outputs = resolveFlowOutputs(flow);
|
||||
outputs[key] = cloneOutputValue(params.value);
|
||||
return updateRequiredFlow(flow.flowId, {
|
||||
outputs,
|
||||
updatedAt: params.updatedAt ?? Date.now(),
|
||||
});
|
||||
}
|
||||
|
||||
export function appendFlowOutput(params: {
|
||||
flowId: string;
|
||||
key: string;
|
||||
value: FlowOutputValue;
|
||||
updatedAt?: number;
|
||||
}): FlowRecord {
|
||||
const flow = requireLinearFlow(params.flowId);
|
||||
const key = params.key.trim();
|
||||
if (!key) {
|
||||
throw new Error("Flow output key is required.");
|
||||
}
|
||||
const outputs = resolveFlowOutputs(flow);
|
||||
const nextValue = cloneOutputValue(params.value);
|
||||
const current = outputs[key];
|
||||
if (current === undefined) {
|
||||
outputs[key] = [nextValue];
|
||||
} else if (Array.isArray(current)) {
|
||||
outputs[key] = [...current, nextValue];
|
||||
} else {
|
||||
throw new Error(`Flow output ${key} is not an array.`);
|
||||
}
|
||||
return updateRequiredFlow(flow.flowId, {
|
||||
outputs,
|
||||
updatedAt: params.updatedAt ?? Date.now(),
|
||||
});
|
||||
}
|
||||
|
||||
export function resumeFlow(params: {
|
||||
flowId: string;
|
||||
currentStep?: string | null;
|
||||
updatedAt?: number;
|
||||
}): FlowRecord {
|
||||
const flow = requireLinearFlow(params.flowId);
|
||||
return updateRequiredFlow(flow.flowId, {
|
||||
status: "running",
|
||||
currentStep: params.currentStep,
|
||||
waitingOnTaskId: null,
|
||||
blockedTaskId: null,
|
||||
blockedSummary: null,
|
||||
endedAt: null,
|
||||
updatedAt: params.updatedAt ?? Date.now(),
|
||||
});
|
||||
}
|
||||
|
||||
export function finishFlow(params: {
|
||||
flowId: string;
|
||||
currentStep?: string | null;
|
||||
updatedAt?: number;
|
||||
endedAt?: number;
|
||||
}): FlowRecord {
|
||||
const flow = requireLinearFlow(params.flowId);
|
||||
const endedAt = params.endedAt ?? params.updatedAt ?? Date.now();
|
||||
return updateRequiredFlow(flow.flowId, {
|
||||
status: "succeeded",
|
||||
currentStep: params.currentStep,
|
||||
waitingOnTaskId: null,
|
||||
blockedTaskId: null,
|
||||
blockedSummary: null,
|
||||
updatedAt: params.updatedAt ?? endedAt,
|
||||
endedAt,
|
||||
});
|
||||
}
|
||||
|
||||
export function failFlow(params: {
|
||||
flowId: string;
|
||||
currentStep?: string | null;
|
||||
updatedAt?: number;
|
||||
endedAt?: number;
|
||||
}): FlowRecord {
|
||||
const flow = requireLinearFlow(params.flowId);
|
||||
const endedAt = params.endedAt ?? params.updatedAt ?? Date.now();
|
||||
return updateRequiredFlow(flow.flowId, {
|
||||
status: "failed",
|
||||
currentStep: params.currentStep,
|
||||
waitingOnTaskId: null,
|
||||
blockedTaskId: null,
|
||||
blockedSummary: null,
|
||||
updatedAt: params.updatedAt ?? endedAt,
|
||||
endedAt,
|
||||
});
|
||||
}
|
||||
|
||||
export async function emitFlowUpdate(params: {
|
||||
flowId: string;
|
||||
content: string;
|
||||
eventKey?: string;
|
||||
currentStep?: string | null;
|
||||
updatedAt?: number;
|
||||
}): Promise<{ flow: FlowRecord; delivery: FlowUpdateDelivery }> {
|
||||
const flow = requireFlow(params.flowId);
|
||||
const content = params.content.trim();
|
||||
if (!content) {
|
||||
throw new Error("Flow update content is required.");
|
||||
}
|
||||
const ownerSessionKey = flow.ownerSessionKey.trim();
|
||||
const updatedAt = params.updatedAt ?? Date.now();
|
||||
const updatedFlow = updateRequiredFlow(flow.flowId, {
|
||||
currentStep: params.currentStep,
|
||||
updatedAt,
|
||||
});
|
||||
if (!ownerSessionKey) {
|
||||
return {
|
||||
flow: updatedFlow,
|
||||
delivery: "parent_missing",
|
||||
};
|
||||
}
|
||||
if (!canDeliverFlowToRequesterOrigin(updatedFlow)) {
|
||||
try {
|
||||
enqueueSystemEvent(content, {
|
||||
sessionKey: ownerSessionKey,
|
||||
contextKey: `flow:${updatedFlow.flowId}`,
|
||||
deliveryContext: updatedFlow.requesterOrigin,
|
||||
});
|
||||
requestHeartbeatNow({
|
||||
reason: "clawflow-update",
|
||||
sessionKey: ownerSessionKey,
|
||||
});
|
||||
return {
|
||||
flow: updatedFlow,
|
||||
delivery: "session_queued",
|
||||
};
|
||||
} catch {
|
||||
return {
|
||||
flow: updatedFlow,
|
||||
delivery: "failed",
|
||||
};
|
||||
}
|
||||
}
|
||||
try {
|
||||
const requesterAgentId = parseAgentSessionKey(ownerSessionKey)?.agentId;
|
||||
const idempotencyKey = `flow:${updatedFlow.flowId}:update:${params.eventKey?.trim() || updatedAt}`;
|
||||
const { sendMessage } = await loadFlowDeliveryRuntime();
|
||||
await sendMessage({
|
||||
channel: updatedFlow.requesterOrigin?.channel,
|
||||
to: updatedFlow.requesterOrigin?.to ?? "",
|
||||
accountId: updatedFlow.requesterOrigin?.accountId,
|
||||
threadId: updatedFlow.requesterOrigin?.threadId,
|
||||
content,
|
||||
agentId: requesterAgentId,
|
||||
idempotencyKey,
|
||||
mirror: {
|
||||
sessionKey: ownerSessionKey,
|
||||
agentId: requesterAgentId,
|
||||
idempotencyKey,
|
||||
},
|
||||
});
|
||||
return {
|
||||
flow: updatedFlow,
|
||||
delivery: "direct",
|
||||
};
|
||||
} catch {
|
||||
try {
|
||||
enqueueSystemEvent(content, {
|
||||
sessionKey: ownerSessionKey,
|
||||
contextKey: `flow:${updatedFlow.flowId}`,
|
||||
deliveryContext: updatedFlow.requesterOrigin,
|
||||
});
|
||||
requestHeartbeatNow({
|
||||
reason: "clawflow-update",
|
||||
sessionKey: ownerSessionKey,
|
||||
});
|
||||
return {
|
||||
flow: updatedFlow,
|
||||
delivery: "session_queued",
|
||||
};
|
||||
} catch {
|
||||
return {
|
||||
flow: updatedFlow,
|
||||
delivery: "failed",
|
||||
};
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,183 +0,0 @@
|
||||
import { afterEach, describe, expect, it } from "vitest";
|
||||
import { withTempDir } from "../test-helpers/temp-dir.js";
|
||||
import { defaultTaskOperationsRuntime } from "./operations-runtime.js";
|
||||
import { findTaskByRunId, resetTaskRegistryForTests } from "./task-registry.js";
|
||||
|
||||
const ORIGINAL_STATE_DIR = process.env.OPENCLAW_STATE_DIR;
|
||||
|
||||
async function withTaskStateDir(run: () => Promise<void>): Promise<void> {
|
||||
await withTempDir({ prefix: "openclaw-task-operations-" }, async (root) => {
|
||||
process.env.OPENCLAW_STATE_DIR = root;
|
||||
resetTaskRegistryForTests();
|
||||
try {
|
||||
await run();
|
||||
} finally {
|
||||
resetTaskRegistryForTests();
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
describe("task operations runtime", () => {
|
||||
afterEach(() => {
|
||||
if (ORIGINAL_STATE_DIR === undefined) {
|
||||
delete process.env.OPENCLAW_STATE_DIR;
|
||||
} else {
|
||||
process.env.OPENCLAW_STATE_DIR = ORIGINAL_STATE_DIR;
|
||||
}
|
||||
resetTaskRegistryForTests();
|
||||
});
|
||||
|
||||
it("creates and transitions task records through the generic operations runtime", async () => {
|
||||
await withTaskStateDir(async () => {
|
||||
const created = await defaultTaskOperationsRuntime.dispatch({
|
||||
type: "create",
|
||||
namespace: "tasks",
|
||||
kind: "cli",
|
||||
status: "queued",
|
||||
requesterSessionKey: "agent:test:main",
|
||||
childSessionKey: "agent:test:child",
|
||||
runId: "run-ops-create",
|
||||
title: "Task title",
|
||||
description: "Do the thing",
|
||||
});
|
||||
|
||||
expect(created.matched).toBe(true);
|
||||
expect(created.created).toBe(true);
|
||||
expect(created.record).toMatchObject({
|
||||
namespace: "tasks",
|
||||
kind: "cli",
|
||||
status: "queued",
|
||||
title: "Task title",
|
||||
description: "Do the thing",
|
||||
runId: "run-ops-create",
|
||||
});
|
||||
|
||||
const progressed = await defaultTaskOperationsRuntime.dispatch({
|
||||
type: "transition",
|
||||
runId: "run-ops-create",
|
||||
status: "running",
|
||||
at: 100,
|
||||
startedAt: 100,
|
||||
progressSummary: "Started work",
|
||||
});
|
||||
|
||||
expect(progressed.record).toMatchObject({
|
||||
status: "running",
|
||||
progressSummary: "Started work",
|
||||
});
|
||||
|
||||
const completed = await defaultTaskOperationsRuntime.dispatch({
|
||||
type: "transition",
|
||||
runId: "run-ops-create",
|
||||
status: "succeeded",
|
||||
at: 200,
|
||||
endedAt: 200,
|
||||
terminalSummary: "All done",
|
||||
});
|
||||
|
||||
expect(completed.record).toMatchObject({
|
||||
status: "succeeded",
|
||||
terminalSummary: "All done",
|
||||
});
|
||||
expect(findTaskByRunId("run-ops-create")).toMatchObject({
|
||||
status: "succeeded",
|
||||
terminalSummary: "All done",
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
it("lists and summarizes task-backed operations", async () => {
|
||||
await withTaskStateDir(async () => {
|
||||
await defaultTaskOperationsRuntime.dispatch({
|
||||
type: "create",
|
||||
namespace: "tasks",
|
||||
kind: "acp",
|
||||
status: "running",
|
||||
requesterSessionKey: "agent:test:main",
|
||||
runId: "run-ops-list-1",
|
||||
description: "One",
|
||||
startedAt: 10,
|
||||
});
|
||||
await defaultTaskOperationsRuntime.dispatch({
|
||||
type: "create",
|
||||
namespace: "tasks",
|
||||
kind: "cron",
|
||||
status: "failed",
|
||||
requesterSessionKey: "agent:test:main",
|
||||
runId: "run-ops-list-2",
|
||||
description: "Two",
|
||||
endedAt: 20,
|
||||
terminalSummary: "Failed",
|
||||
});
|
||||
|
||||
const listed = await defaultTaskOperationsRuntime.list({
|
||||
namespace: "tasks",
|
||||
});
|
||||
const summary = await defaultTaskOperationsRuntime.summarize({
|
||||
namespace: "tasks",
|
||||
});
|
||||
|
||||
expect(listed).toHaveLength(2);
|
||||
expect(summary).toEqual({
|
||||
total: 2,
|
||||
active: 1,
|
||||
terminal: 1,
|
||||
failures: 1,
|
||||
byNamespace: { tasks: 2 },
|
||||
byKind: { acp: 1, cron: 1 },
|
||||
byStatus: { failed: 1, running: 1 },
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
it("patches notify policy and exposes audit plus maintenance", async () => {
|
||||
await withTaskStateDir(async () => {
|
||||
const created = await defaultTaskOperationsRuntime.dispatch({
|
||||
type: "create",
|
||||
namespace: "tasks",
|
||||
kind: "cli",
|
||||
status: "running",
|
||||
requesterSessionKey: "agent:test:main",
|
||||
runId: "run-ops-patch",
|
||||
description: "Patch me",
|
||||
startedAt: Date.now() - 31 * 60_000,
|
||||
});
|
||||
|
||||
expect(created.record?.metadata?.notifyPolicy).toBe("done_only");
|
||||
|
||||
const findings = await defaultTaskOperationsRuntime.audit({
|
||||
namespace: "tasks",
|
||||
severity: "error",
|
||||
code: "stale_running",
|
||||
});
|
||||
|
||||
const patched = await defaultTaskOperationsRuntime.dispatch({
|
||||
type: "patch",
|
||||
operationId: created.record?.operationId,
|
||||
metadataPatch: {
|
||||
notifyPolicy: "silent",
|
||||
},
|
||||
});
|
||||
|
||||
expect(patched.record?.metadata?.notifyPolicy).toBe("silent");
|
||||
|
||||
const preview = await defaultTaskOperationsRuntime.maintenance({
|
||||
namespace: "tasks",
|
||||
});
|
||||
|
||||
expect(findings).toHaveLength(1);
|
||||
expect(findings[0]).toMatchObject({
|
||||
severity: "error",
|
||||
code: "stale_running",
|
||||
operation: {
|
||||
operationId: created.record?.operationId,
|
||||
},
|
||||
});
|
||||
expect(preview).toEqual({
|
||||
reconciled: 0,
|
||||
cleanupStamped: 0,
|
||||
pruned: 0,
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -1,389 +0,0 @@
|
||||
import type {
|
||||
PluginOperationAuditFinding,
|
||||
PluginOperationAuditQuery,
|
||||
PluginOperationDispatchEvent,
|
||||
PluginOperationDispatchResult,
|
||||
PluginOperationListQuery,
|
||||
PluginOperationMaintenanceQuery,
|
||||
PluginOperationMaintenanceSummary,
|
||||
PluginOperationRecord,
|
||||
PluginOperationSummary,
|
||||
PluginOperationsCancelResult,
|
||||
PluginOperationsRuntime,
|
||||
} from "../plugins/operations-state.js";
|
||||
import { summarizeOperationRecords } from "../plugins/operations-state.js";
|
||||
import {
|
||||
listTaskAuditFindings,
|
||||
type TaskAuditFinding,
|
||||
type TaskAuditSeverity,
|
||||
} from "./task-registry.audit.js";
|
||||
import {
|
||||
cancelTaskById,
|
||||
createTaskRecord,
|
||||
findTaskByRunId,
|
||||
getTaskById,
|
||||
listTaskRecords,
|
||||
listTasksForSessionKey,
|
||||
markTaskLostById,
|
||||
markTaskRunningByRunId,
|
||||
markTaskTerminalByRunId,
|
||||
recordTaskProgressByRunId,
|
||||
updateTaskNotifyPolicyById,
|
||||
} from "./task-registry.js";
|
||||
import {
|
||||
previewTaskRegistryMaintenance,
|
||||
runTaskRegistryMaintenance,
|
||||
} from "./task-registry.maintenance.js";
|
||||
import type {
|
||||
TaskRecord,
|
||||
TaskRuntime,
|
||||
TaskStatus,
|
||||
TaskTerminalOutcome,
|
||||
} from "./task-registry.types.js";
|
||||
|
||||
const TASK_NAMESPACE = "tasks";
|
||||
|
||||
function isTaskNamespace(namespace: string | undefined): boolean {
|
||||
const trimmed = namespace?.trim().toLowerCase();
|
||||
return !trimmed || trimmed === "task" || trimmed === TASK_NAMESPACE;
|
||||
}
|
||||
|
||||
function normalizeTaskRuntime(kind: string): TaskRuntime {
|
||||
const trimmed = kind.trim();
|
||||
if (trimmed === "acp" || trimmed === "subagent" || trimmed === "cli" || trimmed === "cron") {
|
||||
return trimmed;
|
||||
}
|
||||
throw new Error(`Unsupported task operation kind: ${kind}`);
|
||||
}
|
||||
|
||||
function normalizeTaskStatus(status: string | undefined): TaskStatus {
|
||||
const trimmed = status?.trim();
|
||||
if (
|
||||
trimmed === "queued" ||
|
||||
trimmed === "running" ||
|
||||
trimmed === "succeeded" ||
|
||||
trimmed === "failed" ||
|
||||
trimmed === "timed_out" ||
|
||||
trimmed === "cancelled" ||
|
||||
trimmed === "lost"
|
||||
) {
|
||||
return trimmed;
|
||||
}
|
||||
return "queued";
|
||||
}
|
||||
|
||||
function normalizeTaskTerminalOutcome(status: TaskStatus): TaskTerminalOutcome | undefined {
|
||||
return status === "succeeded" ? "succeeded" : undefined;
|
||||
}
|
||||
|
||||
function toOperationRecord(task: TaskRecord): PluginOperationRecord {
|
||||
const metadata: Record<string, unknown> = {
|
||||
deliveryStatus: task.deliveryStatus,
|
||||
notifyPolicy: task.notifyPolicy,
|
||||
};
|
||||
if (typeof task.cleanupAfter === "number") {
|
||||
metadata.cleanupAfter = task.cleanupAfter;
|
||||
}
|
||||
if (task.terminalOutcome) {
|
||||
metadata.terminalOutcome = task.terminalOutcome;
|
||||
}
|
||||
return {
|
||||
operationId: task.taskId,
|
||||
namespace: TASK_NAMESPACE,
|
||||
kind: task.runtime,
|
||||
status: task.status,
|
||||
sourceId: task.sourceId,
|
||||
requesterSessionKey: task.requesterSessionKey,
|
||||
childSessionKey: task.childSessionKey,
|
||||
parentOperationId: task.parentTaskId,
|
||||
agentId: task.agentId,
|
||||
runId: task.runId,
|
||||
title: task.label,
|
||||
description: task.task,
|
||||
createdAt: task.createdAt,
|
||||
startedAt: task.startedAt,
|
||||
endedAt: task.endedAt,
|
||||
updatedAt: task.lastEventAt ?? task.endedAt ?? task.startedAt ?? task.createdAt,
|
||||
error: task.error,
|
||||
progressSummary: task.progressSummary,
|
||||
terminalSummary: task.terminalSummary,
|
||||
metadata,
|
||||
};
|
||||
}
|
||||
|
||||
function resolveTaskRecordForTransition(event: {
|
||||
operationId?: string;
|
||||
runId?: string;
|
||||
}): TaskRecord | undefined {
|
||||
const operationId = event.operationId?.trim();
|
||||
if (operationId) {
|
||||
return getTaskById(operationId);
|
||||
}
|
||||
const runId = event.runId?.trim();
|
||||
if (runId) {
|
||||
return findTaskByRunId(runId);
|
||||
}
|
||||
return undefined;
|
||||
}
|
||||
|
||||
function filterOperationRecord(
|
||||
record: PluginOperationRecord,
|
||||
query: PluginOperationListQuery,
|
||||
): boolean {
|
||||
if (query.namespace && !isTaskNamespace(query.namespace)) {
|
||||
return false;
|
||||
}
|
||||
if (query.kind && record.kind !== query.kind) {
|
||||
return false;
|
||||
}
|
||||
if (query.status && record.status !== query.status) {
|
||||
return false;
|
||||
}
|
||||
if (query.runId && record.runId !== query.runId) {
|
||||
return false;
|
||||
}
|
||||
if (query.sourceId && record.sourceId !== query.sourceId) {
|
||||
return false;
|
||||
}
|
||||
if (query.parentOperationId && record.parentOperationId !== query.parentOperationId) {
|
||||
return false;
|
||||
}
|
||||
if (
|
||||
query.sessionKey &&
|
||||
record.requesterSessionKey !== query.sessionKey &&
|
||||
record.childSessionKey !== query.sessionKey
|
||||
) {
|
||||
return false;
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
async function dispatchTaskOperation(
|
||||
event: PluginOperationDispatchEvent,
|
||||
): Promise<PluginOperationDispatchResult> {
|
||||
if (event.type === "create") {
|
||||
if (!isTaskNamespace(event.namespace)) {
|
||||
throw new Error(
|
||||
`Default operations runtime only supports the "${TASK_NAMESPACE}" namespace.`,
|
||||
);
|
||||
}
|
||||
const status = normalizeTaskStatus(event.status);
|
||||
const record = createTaskRecord({
|
||||
runtime: normalizeTaskRuntime(event.kind),
|
||||
sourceId: event.sourceId,
|
||||
requesterSessionKey: event.requesterSessionKey?.trim() || "",
|
||||
childSessionKey: event.childSessionKey,
|
||||
parentTaskId: event.parentOperationId,
|
||||
agentId: event.agentId,
|
||||
runId: event.runId,
|
||||
label: event.title,
|
||||
task: event.description,
|
||||
status,
|
||||
startedAt: event.startedAt,
|
||||
lastEventAt: event.updatedAt ?? event.startedAt ?? event.createdAt,
|
||||
progressSummary: event.progressSummary,
|
||||
terminalSummary: event.terminalSummary,
|
||||
terminalOutcome: normalizeTaskTerminalOutcome(status),
|
||||
});
|
||||
return {
|
||||
matched: true,
|
||||
created: true,
|
||||
record: toOperationRecord(record),
|
||||
};
|
||||
}
|
||||
|
||||
if (event.type === "patch") {
|
||||
const current = resolveTaskRecordForTransition(event);
|
||||
if (!current) {
|
||||
return {
|
||||
matched: false,
|
||||
record: null,
|
||||
};
|
||||
}
|
||||
const nextNotifyPolicy = event.metadataPatch?.notifyPolicy;
|
||||
const next =
|
||||
nextNotifyPolicy === "done_only" ||
|
||||
nextNotifyPolicy === "state_changes" ||
|
||||
nextNotifyPolicy === "silent"
|
||||
? (updateTaskNotifyPolicyById({
|
||||
taskId: current.taskId,
|
||||
notifyPolicy: nextNotifyPolicy,
|
||||
}) ?? current)
|
||||
: current;
|
||||
return {
|
||||
matched: true,
|
||||
record: toOperationRecord(next),
|
||||
};
|
||||
}
|
||||
|
||||
const current = resolveTaskRecordForTransition(event);
|
||||
if (!current) {
|
||||
return {
|
||||
matched: false,
|
||||
record: null,
|
||||
};
|
||||
}
|
||||
|
||||
const at = event.at ?? event.endedAt ?? event.startedAt ?? Date.now();
|
||||
const runId = event.runId?.trim() || current.runId?.trim();
|
||||
const status = normalizeTaskStatus(event.status);
|
||||
let next: TaskRecord | null | undefined;
|
||||
|
||||
if (status === "running") {
|
||||
if (!runId) {
|
||||
throw new Error("Task transition to running requires a runId.");
|
||||
}
|
||||
next = markTaskRunningByRunId({
|
||||
runId,
|
||||
startedAt: event.startedAt,
|
||||
lastEventAt: at,
|
||||
progressSummary: event.progressSummary,
|
||||
eventSummary: event.progressSummary,
|
||||
})[0];
|
||||
} else if (status === "queued") {
|
||||
if (!runId) {
|
||||
throw new Error("Task transition to queued requires a runId.");
|
||||
}
|
||||
next = recordTaskProgressByRunId({
|
||||
runId,
|
||||
lastEventAt: at,
|
||||
progressSummary: event.progressSummary,
|
||||
eventSummary: event.progressSummary,
|
||||
})[0];
|
||||
} else if (
|
||||
status === "succeeded" ||
|
||||
status === "failed" ||
|
||||
status === "timed_out" ||
|
||||
status === "cancelled"
|
||||
) {
|
||||
if (!runId) {
|
||||
throw new Error(`Task transition to ${status} requires a runId.`);
|
||||
}
|
||||
next = markTaskTerminalByRunId({
|
||||
runId,
|
||||
status,
|
||||
startedAt: event.startedAt,
|
||||
endedAt: event.endedAt ?? at,
|
||||
lastEventAt: at,
|
||||
error: event.error ?? undefined,
|
||||
progressSummary: event.progressSummary,
|
||||
terminalSummary: event.terminalSummary,
|
||||
terminalOutcome: status === "succeeded" ? "succeeded" : undefined,
|
||||
})[0];
|
||||
} else if (status === "lost") {
|
||||
next = markTaskLostById({
|
||||
taskId: current.taskId,
|
||||
endedAt: event.endedAt ?? at,
|
||||
lastEventAt: at,
|
||||
error: event.error ?? undefined,
|
||||
});
|
||||
}
|
||||
|
||||
return {
|
||||
matched: true,
|
||||
record: next ? toOperationRecord(next) : toOperationRecord(current),
|
||||
};
|
||||
}
|
||||
|
||||
async function getTaskOperationList(
|
||||
query: PluginOperationListQuery = {},
|
||||
): Promise<PluginOperationRecord[]> {
|
||||
if (query.namespace && !isTaskNamespace(query.namespace)) {
|
||||
return [];
|
||||
}
|
||||
const records = (
|
||||
query.sessionKey ? listTasksForSessionKey(query.sessionKey) : listTaskRecords()
|
||||
).map(toOperationRecord);
|
||||
const filtered = records.filter((record) => filterOperationRecord(record, query));
|
||||
const limit =
|
||||
typeof query.limit === "number" && Number.isFinite(query.limit) && query.limit > 0
|
||||
? Math.floor(query.limit)
|
||||
: undefined;
|
||||
return typeof limit === "number" ? filtered.slice(0, limit) : filtered;
|
||||
}
|
||||
|
||||
function isMatchingTaskAuditSeverity(
|
||||
actual: TaskAuditSeverity,
|
||||
requested: PluginOperationAuditQuery["severity"],
|
||||
): boolean {
|
||||
return !requested || actual === requested;
|
||||
}
|
||||
|
||||
function toOperationAuditFinding(finding: TaskAuditFinding): PluginOperationAuditFinding {
|
||||
return {
|
||||
severity: finding.severity,
|
||||
code: finding.code,
|
||||
operation: toOperationRecord(finding.task),
|
||||
detail: finding.detail,
|
||||
...(typeof finding.ageMs === "number" ? { ageMs: finding.ageMs } : {}),
|
||||
};
|
||||
}
|
||||
|
||||
async function auditTaskOperations(
|
||||
query: PluginOperationAuditQuery = {},
|
||||
): Promise<PluginOperationAuditFinding[]> {
|
||||
if (query.namespace && !isTaskNamespace(query.namespace)) {
|
||||
return [];
|
||||
}
|
||||
return listTaskAuditFindings()
|
||||
.filter((finding) => {
|
||||
if (!isMatchingTaskAuditSeverity(finding.severity, query.severity)) {
|
||||
return false;
|
||||
}
|
||||
if (query.code && finding.code !== query.code) {
|
||||
return false;
|
||||
}
|
||||
return true;
|
||||
})
|
||||
.map(toOperationAuditFinding);
|
||||
}
|
||||
|
||||
async function maintainTaskOperations(
|
||||
query: PluginOperationMaintenanceQuery = {},
|
||||
): Promise<PluginOperationMaintenanceSummary> {
|
||||
if (query.namespace && !isTaskNamespace(query.namespace)) {
|
||||
return {
|
||||
reconciled: 0,
|
||||
cleanupStamped: 0,
|
||||
pruned: 0,
|
||||
};
|
||||
}
|
||||
return query.apply ? runTaskRegistryMaintenance() : previewTaskRegistryMaintenance();
|
||||
}
|
||||
|
||||
export const defaultTaskOperationsRuntime: PluginOperationsRuntime = {
|
||||
dispatch: dispatchTaskOperation,
|
||||
async getById(operationId: string) {
|
||||
const record = getTaskById(operationId.trim());
|
||||
return record ? toOperationRecord(record) : null;
|
||||
},
|
||||
async findByRunId(runId: string) {
|
||||
const record = findTaskByRunId(runId.trim());
|
||||
return record ? toOperationRecord(record) : null;
|
||||
},
|
||||
list: getTaskOperationList,
|
||||
async summarize(query) {
|
||||
const records = await getTaskOperationList(query);
|
||||
return summarizeOperationRecords(records);
|
||||
},
|
||||
audit: auditTaskOperations,
|
||||
maintenance: maintainTaskOperations,
|
||||
async cancel(params): Promise<PluginOperationsCancelResult> {
|
||||
const result = await cancelTaskById({
|
||||
cfg: params.cfg,
|
||||
taskId: params.operationId,
|
||||
});
|
||||
return {
|
||||
found: result.found,
|
||||
cancelled: result.cancelled,
|
||||
reason: result.reason,
|
||||
record: result.task ? toOperationRecord(result.task) : null,
|
||||
};
|
||||
},
|
||||
};
|
||||
|
||||
export async function summarizeTaskOperations(
|
||||
query: PluginOperationListQuery = {},
|
||||
): Promise<PluginOperationSummary> {
|
||||
return defaultTaskOperationsRuntime.summarize(query);
|
||||
}
|
||||
@@ -1,57 +0,0 @@
|
||||
import fs from "node:fs/promises";
|
||||
import path from "node:path";
|
||||
import { describe, expect, it } from "vitest";
|
||||
|
||||
const TASK_ROOT = path.resolve(import.meta.dirname);
|
||||
const SRC_ROOT = path.resolve(TASK_ROOT, "..");
|
||||
|
||||
const RAW_TASK_MUTATORS = [
|
||||
"createTaskRecord",
|
||||
"markTaskRunningByRunId",
|
||||
"markTaskTerminalByRunId",
|
||||
"markTaskTerminalById",
|
||||
"setTaskRunDeliveryStatusByRunId",
|
||||
] as const;
|
||||
|
||||
const ALLOWED_CALLERS = new Set([
|
||||
"tasks/operations-runtime.ts",
|
||||
"tasks/task-executor.ts",
|
||||
"tasks/task-registry.ts",
|
||||
"tasks/task-registry.maintenance.ts",
|
||||
]);
|
||||
|
||||
async function listSourceFiles(root: string): Promise<string[]> {
|
||||
const entries = await fs.readdir(root, { withFileTypes: true });
|
||||
const files: string[] = [];
|
||||
for (const entry of entries) {
|
||||
const fullPath = path.join(root, entry.name);
|
||||
if (entry.isDirectory()) {
|
||||
files.push(...(await listSourceFiles(fullPath)));
|
||||
continue;
|
||||
}
|
||||
if (!entry.isFile() || !entry.name.endsWith(".ts") || entry.name.endsWith(".test.ts")) {
|
||||
continue;
|
||||
}
|
||||
files.push(fullPath);
|
||||
}
|
||||
return files;
|
||||
}
|
||||
|
||||
describe("task executor boundary", () => {
|
||||
it("keeps raw task lifecycle mutators behind task internals", async () => {
|
||||
const offenders: string[] = [];
|
||||
for (const file of await listSourceFiles(SRC_ROOT)) {
|
||||
const relative = path.relative(SRC_ROOT, file).replaceAll(path.sep, "/");
|
||||
if (ALLOWED_CALLERS.has(relative)) {
|
||||
continue;
|
||||
}
|
||||
const source = await fs.readFile(file, "utf8");
|
||||
for (const symbol of RAW_TASK_MUTATORS) {
|
||||
if (source.includes(`${symbol}(`)) {
|
||||
offenders.push(`${relative}:${symbol}`);
|
||||
}
|
||||
}
|
||||
}
|
||||
expect(offenders).toEqual([]);
|
||||
});
|
||||
});
|
||||
@@ -1,128 +0,0 @@
|
||||
import { describe, expect, it } from "vitest";
|
||||
import {
|
||||
formatTaskBlockedFollowupMessage,
|
||||
formatTaskStateChangeMessage,
|
||||
formatTaskTerminalMessage,
|
||||
isTerminalTaskStatus,
|
||||
shouldAutoDeliverTaskStateChange,
|
||||
shouldAutoDeliverTaskTerminalUpdate,
|
||||
shouldSuppressDuplicateTerminalDelivery,
|
||||
} from "./task-executor-policy.js";
|
||||
import type { TaskEventRecord, TaskRecord } from "./task-registry.types.js";
|
||||
|
||||
function createTask(partial: Partial<TaskRecord>): TaskRecord {
|
||||
return {
|
||||
taskId: partial.taskId ?? "task-1",
|
||||
runtime: partial.runtime ?? "acp",
|
||||
requesterSessionKey: partial.requesterSessionKey ?? "agent:main:main",
|
||||
task: partial.task ?? "Investigate issue",
|
||||
status: partial.status ?? "running",
|
||||
deliveryStatus: partial.deliveryStatus ?? "pending",
|
||||
notifyPolicy: partial.notifyPolicy ?? "done_only",
|
||||
createdAt: partial.createdAt ?? 1,
|
||||
...partial,
|
||||
};
|
||||
}
|
||||
|
||||
describe("task-executor-policy", () => {
|
||||
it("identifies terminal statuses", () => {
|
||||
expect(isTerminalTaskStatus("queued")).toBe(false);
|
||||
expect(isTerminalTaskStatus("running")).toBe(false);
|
||||
expect(isTerminalTaskStatus("succeeded")).toBe(true);
|
||||
expect(isTerminalTaskStatus("failed")).toBe(true);
|
||||
expect(isTerminalTaskStatus("timed_out")).toBe(true);
|
||||
expect(isTerminalTaskStatus("cancelled")).toBe(true);
|
||||
expect(isTerminalTaskStatus("lost")).toBe(true);
|
||||
});
|
||||
|
||||
it("formats terminal, followup, and progress messages", () => {
|
||||
const blockedTask = createTask({
|
||||
status: "succeeded",
|
||||
terminalOutcome: "blocked",
|
||||
terminalSummary: "Needs login.",
|
||||
runId: "run-1234567890",
|
||||
label: "ACP import",
|
||||
});
|
||||
const progressEvent: TaskEventRecord = {
|
||||
at: 10,
|
||||
kind: "progress",
|
||||
summary: "No output for 60s.",
|
||||
};
|
||||
|
||||
expect(formatTaskTerminalMessage(blockedTask)).toBe(
|
||||
"Background task blocked: ACP import (run run-1234). Needs login.",
|
||||
);
|
||||
expect(formatTaskBlockedFollowupMessage(blockedTask)).toBe(
|
||||
"Task needs follow-up: ACP import (run run-1234). Needs login.",
|
||||
);
|
||||
expect(formatTaskStateChangeMessage(blockedTask, progressEvent)).toBe(
|
||||
"Background task update: ACP import. No output for 60s.",
|
||||
);
|
||||
});
|
||||
|
||||
it("keeps delivery policy decisions explicit", () => {
|
||||
expect(
|
||||
shouldAutoDeliverTaskTerminalUpdate(
|
||||
createTask({
|
||||
status: "succeeded",
|
||||
deliveryStatus: "pending",
|
||||
notifyPolicy: "done_only",
|
||||
}),
|
||||
),
|
||||
).toBe(true);
|
||||
expect(
|
||||
shouldAutoDeliverTaskTerminalUpdate(
|
||||
createTask({
|
||||
runtime: "subagent",
|
||||
status: "succeeded",
|
||||
deliveryStatus: "pending",
|
||||
}),
|
||||
),
|
||||
).toBe(false);
|
||||
expect(
|
||||
shouldAutoDeliverTaskStateChange(
|
||||
createTask({
|
||||
status: "running",
|
||||
notifyPolicy: "state_changes",
|
||||
deliveryStatus: "pending",
|
||||
}),
|
||||
),
|
||||
).toBe(true);
|
||||
expect(
|
||||
shouldAutoDeliverTaskStateChange(
|
||||
createTask({
|
||||
status: "failed",
|
||||
notifyPolicy: "state_changes",
|
||||
deliveryStatus: "pending",
|
||||
}),
|
||||
),
|
||||
).toBe(false);
|
||||
expect(
|
||||
shouldSuppressDuplicateTerminalDelivery({
|
||||
task: createTask({
|
||||
runtime: "acp",
|
||||
runId: "run-duplicate",
|
||||
}),
|
||||
preferredTaskId: "task-2",
|
||||
}),
|
||||
).toBe(true);
|
||||
expect(
|
||||
shouldSuppressDuplicateTerminalDelivery({
|
||||
task: createTask({
|
||||
runtime: "acp",
|
||||
runId: "run-duplicate",
|
||||
}),
|
||||
preferredTaskId: "task-1",
|
||||
}),
|
||||
).toBe(false);
|
||||
expect(
|
||||
shouldSuppressDuplicateTerminalDelivery({
|
||||
task: createTask({
|
||||
runtime: "acp",
|
||||
runId: "run-duplicate",
|
||||
}),
|
||||
preferredTaskId: undefined,
|
||||
}),
|
||||
).toBe(false);
|
||||
});
|
||||
});
|
||||
@@ -1,110 +0,0 @@
|
||||
import type { TaskEventRecord, TaskRecord, TaskStatus } from "./task-registry.types.js";
|
||||
|
||||
export function isTerminalTaskStatus(status: TaskStatus): boolean {
|
||||
return (
|
||||
status === "succeeded" ||
|
||||
status === "failed" ||
|
||||
status === "timed_out" ||
|
||||
status === "cancelled" ||
|
||||
status === "lost"
|
||||
);
|
||||
}
|
||||
|
||||
function resolveTaskDisplayTitle(task: TaskRecord): string {
|
||||
return (
|
||||
task.label?.trim() ||
|
||||
(task.runtime === "acp"
|
||||
? "ACP background task"
|
||||
: task.runtime === "subagent"
|
||||
? "Subagent task"
|
||||
: task.task.trim() || "Background task")
|
||||
);
|
||||
}
|
||||
|
||||
function resolveTaskRunLabel(task: TaskRecord): string {
|
||||
return task.runId ? ` (run ${task.runId.slice(0, 8)})` : "";
|
||||
}
|
||||
|
||||
export function formatTaskTerminalMessage(task: TaskRecord): string {
|
||||
const title = resolveTaskDisplayTitle(task);
|
||||
const runLabel = resolveTaskRunLabel(task);
|
||||
const summary = task.terminalSummary?.trim();
|
||||
if (task.status === "succeeded") {
|
||||
if (task.terminalOutcome === "blocked") {
|
||||
return summary
|
||||
? `Background task blocked: ${title}${runLabel}. ${summary}`
|
||||
: `Background task blocked: ${title}${runLabel}.`;
|
||||
}
|
||||
return summary
|
||||
? `Background task done: ${title}${runLabel}. ${summary}`
|
||||
: `Background task done: ${title}${runLabel}.`;
|
||||
}
|
||||
if (task.status === "timed_out") {
|
||||
return `Background task timed out: ${title}${runLabel}.`;
|
||||
}
|
||||
if (task.status === "lost") {
|
||||
return `Background task lost: ${title}${runLabel}. ${task.error ?? "Backing session disappeared."}`;
|
||||
}
|
||||
if (task.status === "cancelled") {
|
||||
return `Background task cancelled: ${title}${runLabel}.`;
|
||||
}
|
||||
const error = task.error?.trim();
|
||||
return error
|
||||
? `Background task failed: ${title}${runLabel}. ${error}`
|
||||
: `Background task failed: ${title}${runLabel}.`;
|
||||
}
|
||||
|
||||
export function formatTaskBlockedFollowupMessage(task: TaskRecord): string | null {
|
||||
if (task.status !== "succeeded" || task.terminalOutcome !== "blocked") {
|
||||
return null;
|
||||
}
|
||||
const title = resolveTaskDisplayTitle(task);
|
||||
const runLabel = resolveTaskRunLabel(task);
|
||||
const summary = task.terminalSummary?.trim() || "Task is blocked and needs follow-up.";
|
||||
return `Task needs follow-up: ${title}${runLabel}. ${summary}`;
|
||||
}
|
||||
|
||||
export function formatTaskStateChangeMessage(
|
||||
task: TaskRecord,
|
||||
event: TaskEventRecord,
|
||||
): string | null {
|
||||
const title = resolveTaskDisplayTitle(task);
|
||||
if (event.kind === "running") {
|
||||
return `Background task started: ${title}.`;
|
||||
}
|
||||
if (event.kind === "progress") {
|
||||
return event.summary ? `Background task update: ${title}. ${event.summary}` : null;
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
export function shouldAutoDeliverTaskTerminalUpdate(task: TaskRecord): boolean {
|
||||
if (task.notifyPolicy === "silent") {
|
||||
return false;
|
||||
}
|
||||
if (task.runtime === "subagent" && task.status !== "cancelled") {
|
||||
return false;
|
||||
}
|
||||
if (!isTerminalTaskStatus(task.status)) {
|
||||
return false;
|
||||
}
|
||||
return task.deliveryStatus === "pending";
|
||||
}
|
||||
|
||||
export function shouldAutoDeliverTaskStateChange(task: TaskRecord): boolean {
|
||||
return (
|
||||
task.notifyPolicy === "state_changes" &&
|
||||
task.deliveryStatus === "pending" &&
|
||||
!isTerminalTaskStatus(task.status)
|
||||
);
|
||||
}
|
||||
|
||||
export function shouldSuppressDuplicateTerminalDelivery(params: {
|
||||
task: TaskRecord;
|
||||
preferredTaskId?: string;
|
||||
}): boolean {
|
||||
if (params.task.runtime !== "acp" || !params.task.runId?.trim()) {
|
||||
return false;
|
||||
}
|
||||
return Boolean(params.preferredTaskId && params.preferredTaskId !== params.task.taskId);
|
||||
}
|
||||
@@ -1,465 +0,0 @@
|
||||
import { afterEach, describe, expect, it, vi } from "vitest";
|
||||
import { withTempDir } from "../test-helpers/temp-dir.js";
|
||||
import {
|
||||
getFlowById,
|
||||
listFlowRecords,
|
||||
resetFlowRegistryForTests,
|
||||
updateFlowRecordById,
|
||||
} from "./flow-registry.js";
|
||||
import {
|
||||
cancelFlowById,
|
||||
completeTaskRunByRunId,
|
||||
createLinearFlow,
|
||||
createQueuedTaskRun,
|
||||
createRunningTaskRun,
|
||||
failTaskRunByRunId,
|
||||
recordTaskRunProgressByRunId,
|
||||
retryBlockedFlowAsQueuedTaskRun,
|
||||
retryBlockedFlowAsRunningTaskRun,
|
||||
setDetachedTaskDeliveryStatusByRunId,
|
||||
startTaskRunByRunId,
|
||||
} from "./task-executor.js";
|
||||
import {
|
||||
findLatestTaskForFlowId,
|
||||
findTaskByRunId,
|
||||
resetTaskRegistryForTests,
|
||||
} from "./task-registry.js";
|
||||
|
||||
const ORIGINAL_STATE_DIR = process.env.OPENCLAW_STATE_DIR;
|
||||
const hoisted = vi.hoisted(() => {
|
||||
const sendMessageMock = vi.fn();
|
||||
const cancelSessionMock = vi.fn();
|
||||
const killSubagentRunAdminMock = vi.fn();
|
||||
return {
|
||||
sendMessageMock,
|
||||
cancelSessionMock,
|
||||
killSubagentRunAdminMock,
|
||||
};
|
||||
});
|
||||
|
||||
vi.mock("./task-registry-delivery-runtime.js", () => ({
|
||||
sendMessage: hoisted.sendMessageMock,
|
||||
}));
|
||||
|
||||
vi.mock("../acp/control-plane/manager.js", () => ({
|
||||
getAcpSessionManager: () => ({
|
||||
cancelSession: hoisted.cancelSessionMock,
|
||||
}),
|
||||
}));
|
||||
|
||||
vi.mock("../agents/subagent-control.js", () => ({
|
||||
killSubagentRunAdmin: (params: unknown) => hoisted.killSubagentRunAdminMock(params),
|
||||
}));
|
||||
|
||||
async function withTaskExecutorStateDir(run: (root: string) => Promise<void>): Promise<void> {
|
||||
await withTempDir({ prefix: "openclaw-task-executor-" }, async (root) => {
|
||||
process.env.OPENCLAW_STATE_DIR = root;
|
||||
resetTaskRegistryForTests();
|
||||
resetFlowRegistryForTests();
|
||||
try {
|
||||
await run(root);
|
||||
} finally {
|
||||
resetTaskRegistryForTests();
|
||||
resetFlowRegistryForTests();
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
describe("task-executor", () => {
|
||||
afterEach(() => {
|
||||
if (ORIGINAL_STATE_DIR === undefined) {
|
||||
delete process.env.OPENCLAW_STATE_DIR;
|
||||
} else {
|
||||
process.env.OPENCLAW_STATE_DIR = ORIGINAL_STATE_DIR;
|
||||
}
|
||||
resetTaskRegistryForTests();
|
||||
resetFlowRegistryForTests();
|
||||
hoisted.sendMessageMock.mockReset();
|
||||
hoisted.cancelSessionMock.mockReset();
|
||||
hoisted.killSubagentRunAdminMock.mockReset();
|
||||
});
|
||||
|
||||
it("advances a queued run through start and completion", async () => {
|
||||
await withTaskExecutorStateDir(async () => {
|
||||
const created = createQueuedTaskRun({
|
||||
runtime: "acp",
|
||||
requesterSessionKey: "agent:main:main",
|
||||
childSessionKey: "agent:codex:acp:child",
|
||||
runId: "run-executor-queued",
|
||||
task: "Investigate issue",
|
||||
});
|
||||
|
||||
expect(created.status).toBe("queued");
|
||||
|
||||
startTaskRunByRunId({
|
||||
runId: "run-executor-queued",
|
||||
startedAt: 100,
|
||||
lastEventAt: 100,
|
||||
eventSummary: "Started.",
|
||||
});
|
||||
|
||||
completeTaskRunByRunId({
|
||||
runId: "run-executor-queued",
|
||||
endedAt: 250,
|
||||
lastEventAt: 250,
|
||||
terminalSummary: "Done.",
|
||||
});
|
||||
|
||||
expect(findTaskByRunId("run-executor-queued")).toMatchObject({
|
||||
taskId: created.taskId,
|
||||
status: "succeeded",
|
||||
startedAt: 100,
|
||||
endedAt: 250,
|
||||
terminalSummary: "Done.",
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
it("records progress, failure, and delivery status through the executor", async () => {
|
||||
await withTaskExecutorStateDir(async () => {
|
||||
const created = createRunningTaskRun({
|
||||
runtime: "subagent",
|
||||
requesterSessionKey: "agent:main:main",
|
||||
childSessionKey: "agent:codex:subagent:child",
|
||||
runId: "run-executor-fail",
|
||||
task: "Write summary",
|
||||
startedAt: 10,
|
||||
});
|
||||
|
||||
recordTaskRunProgressByRunId({
|
||||
runId: "run-executor-fail",
|
||||
lastEventAt: 20,
|
||||
progressSummary: "Collecting results",
|
||||
eventSummary: "Collecting results",
|
||||
});
|
||||
|
||||
failTaskRunByRunId({
|
||||
runId: "run-executor-fail",
|
||||
endedAt: 40,
|
||||
lastEventAt: 40,
|
||||
error: "tool failed",
|
||||
});
|
||||
|
||||
setDetachedTaskDeliveryStatusByRunId({
|
||||
runId: "run-executor-fail",
|
||||
deliveryStatus: "failed",
|
||||
});
|
||||
|
||||
expect(findTaskByRunId("run-executor-fail")).toMatchObject({
|
||||
taskId: created.taskId,
|
||||
status: "failed",
|
||||
progressSummary: "Collecting results",
|
||||
error: "tool failed",
|
||||
deliveryStatus: "failed",
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
it("auto-creates a one-task flow and keeps it synced with task status", async () => {
|
||||
await withTaskExecutorStateDir(async () => {
|
||||
const created = createRunningTaskRun({
|
||||
runtime: "subagent",
|
||||
requesterSessionKey: "agent:main:main",
|
||||
childSessionKey: "agent:codex:subagent:child",
|
||||
runId: "run-executor-flow",
|
||||
task: "Write summary",
|
||||
startedAt: 10,
|
||||
deliveryStatus: "pending",
|
||||
});
|
||||
|
||||
expect(created.parentFlowId).toEqual(expect.any(String));
|
||||
expect(getFlowById(created.parentFlowId!)).toMatchObject({
|
||||
flowId: created.parentFlowId,
|
||||
ownerSessionKey: "agent:main:main",
|
||||
status: "running",
|
||||
goal: "Write summary",
|
||||
notifyPolicy: "done_only",
|
||||
});
|
||||
|
||||
completeTaskRunByRunId({
|
||||
runId: "run-executor-flow",
|
||||
endedAt: 40,
|
||||
lastEventAt: 40,
|
||||
terminalSummary: "Done.",
|
||||
});
|
||||
|
||||
expect(getFlowById(created.parentFlowId!)).toMatchObject({
|
||||
flowId: created.parentFlowId,
|
||||
status: "succeeded",
|
||||
endedAt: 40,
|
||||
goal: "Write summary",
|
||||
notifyPolicy: "done_only",
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
it("does not auto-create one-task flows for non-returning bookkeeping runs", async () => {
|
||||
await withTaskExecutorStateDir(async () => {
|
||||
const created = createRunningTaskRun({
|
||||
runtime: "cli",
|
||||
requesterSessionKey: "agent:main:main",
|
||||
childSessionKey: "agent:main:main",
|
||||
runId: "run-executor-cli",
|
||||
task: "Foreground gateway run",
|
||||
deliveryStatus: "not_applicable",
|
||||
startedAt: 10,
|
||||
});
|
||||
|
||||
expect(created.parentFlowId).toBeUndefined();
|
||||
expect(listFlowRecords()).toEqual([]);
|
||||
});
|
||||
});
|
||||
|
||||
it("records blocked metadata on one-task flows and reuses the same flow for queued retries", async () => {
|
||||
await withTaskExecutorStateDir(async () => {
|
||||
const created = createRunningTaskRun({
|
||||
runtime: "acp",
|
||||
requesterSessionKey: "agent:main:main",
|
||||
requesterOrigin: {
|
||||
channel: "telegram",
|
||||
to: "telegram:123",
|
||||
},
|
||||
childSessionKey: "agent:codex:acp:child",
|
||||
runId: "run-executor-blocked",
|
||||
task: "Patch file",
|
||||
startedAt: 10,
|
||||
deliveryStatus: "pending",
|
||||
notifyPolicy: "silent",
|
||||
});
|
||||
|
||||
completeTaskRunByRunId({
|
||||
runId: "run-executor-blocked",
|
||||
endedAt: 40,
|
||||
lastEventAt: 40,
|
||||
terminalOutcome: "blocked",
|
||||
terminalSummary: "Writable session required.",
|
||||
});
|
||||
|
||||
expect(getFlowById(created.parentFlowId!)).toMatchObject({
|
||||
flowId: created.parentFlowId,
|
||||
status: "blocked",
|
||||
blockedTaskId: created.taskId,
|
||||
blockedSummary: "Writable session required.",
|
||||
endedAt: 40,
|
||||
});
|
||||
|
||||
const retried = retryBlockedFlowAsQueuedTaskRun({
|
||||
flowId: created.parentFlowId!,
|
||||
runId: "run-executor-retry",
|
||||
childSessionKey: "agent:codex:acp:retry-child",
|
||||
});
|
||||
|
||||
expect(retried).toMatchObject({
|
||||
found: true,
|
||||
retried: true,
|
||||
previousTask: expect.objectContaining({
|
||||
taskId: created.taskId,
|
||||
}),
|
||||
task: expect.objectContaining({
|
||||
parentFlowId: created.parentFlowId,
|
||||
parentTaskId: created.taskId,
|
||||
status: "queued",
|
||||
runId: "run-executor-retry",
|
||||
}),
|
||||
});
|
||||
|
||||
expect(getFlowById(created.parentFlowId!)).toMatchObject({
|
||||
flowId: created.parentFlowId,
|
||||
status: "queued",
|
||||
});
|
||||
expect(getFlowById(created.parentFlowId!)?.blockedTaskId).toBeUndefined();
|
||||
expect(getFlowById(created.parentFlowId!)?.blockedSummary).toBeUndefined();
|
||||
expect(getFlowById(created.parentFlowId!)?.endedAt).toBeUndefined();
|
||||
expect(findLatestTaskForFlowId(created.parentFlowId!)).toMatchObject({
|
||||
taskId: retried.task?.taskId,
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
it("can reopen blocked one-task flows directly into a running retry", async () => {
|
||||
await withTaskExecutorStateDir(async () => {
|
||||
const created = createRunningTaskRun({
|
||||
runtime: "subagent",
|
||||
requesterSessionKey: "agent:main:main",
|
||||
childSessionKey: "agent:codex:subagent:child",
|
||||
runId: "run-executor-blocked-running",
|
||||
task: "Write summary",
|
||||
startedAt: 10,
|
||||
deliveryStatus: "pending",
|
||||
});
|
||||
|
||||
completeTaskRunByRunId({
|
||||
runId: "run-executor-blocked-running",
|
||||
endedAt: 40,
|
||||
lastEventAt: 40,
|
||||
terminalOutcome: "blocked",
|
||||
terminalSummary: "Need write approval.",
|
||||
});
|
||||
|
||||
const retried = retryBlockedFlowAsRunningTaskRun({
|
||||
flowId: created.parentFlowId!,
|
||||
runId: "run-executor-running-retry",
|
||||
childSessionKey: "agent:codex:subagent:retry",
|
||||
startedAt: 55,
|
||||
lastEventAt: 55,
|
||||
progressSummary: "Retrying with approval",
|
||||
});
|
||||
|
||||
expect(retried).toMatchObject({
|
||||
found: true,
|
||||
retried: true,
|
||||
task: expect.objectContaining({
|
||||
parentFlowId: created.parentFlowId,
|
||||
status: "running",
|
||||
runId: "run-executor-running-retry",
|
||||
progressSummary: "Retrying with approval",
|
||||
}),
|
||||
});
|
||||
|
||||
expect(getFlowById(created.parentFlowId!)).toMatchObject({
|
||||
flowId: created.parentFlowId,
|
||||
status: "running",
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
it("refuses to retry flows that are not currently blocked", async () => {
|
||||
await withTaskExecutorStateDir(async () => {
|
||||
const created = createRunningTaskRun({
|
||||
runtime: "acp",
|
||||
requesterSessionKey: "agent:main:main",
|
||||
childSessionKey: "agent:codex:acp:child",
|
||||
runId: "run-executor-not-blocked",
|
||||
task: "Patch file",
|
||||
startedAt: 10,
|
||||
deliveryStatus: "pending",
|
||||
});
|
||||
|
||||
const retried = retryBlockedFlowAsQueuedTaskRun({
|
||||
flowId: created.parentFlowId!,
|
||||
runId: "run-should-not-exist",
|
||||
});
|
||||
|
||||
expect(retried).toMatchObject({
|
||||
found: true,
|
||||
retried: false,
|
||||
reason: "Flow is not blocked.",
|
||||
});
|
||||
expect(findTaskByRunId("run-should-not-exist")).toBeUndefined();
|
||||
});
|
||||
});
|
||||
|
||||
it("keeps linear flows under explicit control instead of auto-syncing child task status", async () => {
|
||||
await withTaskExecutorStateDir(async () => {
|
||||
const flow = createLinearFlow({
|
||||
ownerSessionKey: "agent:main:main",
|
||||
goal: "Triage a PR cluster",
|
||||
currentStep: "wait_for",
|
||||
notifyPolicy: "done_only",
|
||||
});
|
||||
|
||||
const child = createRunningTaskRun({
|
||||
runtime: "acp",
|
||||
requesterSessionKey: "agent:main:main",
|
||||
parentFlowId: flow.flowId,
|
||||
childSessionKey: "agent:codex:acp:child",
|
||||
runId: "run-linear-child",
|
||||
task: "Inspect a PR",
|
||||
startedAt: 10,
|
||||
deliveryStatus: "pending",
|
||||
});
|
||||
|
||||
completeTaskRunByRunId({
|
||||
runId: "run-linear-child",
|
||||
endedAt: 40,
|
||||
lastEventAt: 40,
|
||||
terminalSummary: "Done.",
|
||||
});
|
||||
|
||||
expect(child.parentFlowId).toBe(flow.flowId);
|
||||
expect(getFlowById(flow.flowId)).toMatchObject({
|
||||
flowId: flow.flowId,
|
||||
shape: "linear",
|
||||
status: "queued",
|
||||
currentStep: "wait_for",
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
it("cancels active child tasks and marks a linear flow cancelled", async () => {
|
||||
await withTaskExecutorStateDir(async () => {
|
||||
hoisted.cancelSessionMock.mockResolvedValue(undefined);
|
||||
|
||||
const flow = createLinearFlow({
|
||||
ownerSessionKey: "agent:main:main",
|
||||
goal: "Cluster related PRs",
|
||||
currentStep: "wait_for",
|
||||
});
|
||||
|
||||
const child = createRunningTaskRun({
|
||||
runtime: "acp",
|
||||
requesterSessionKey: "agent:main:main",
|
||||
parentFlowId: flow.flowId,
|
||||
childSessionKey: "agent:codex:acp:child",
|
||||
runId: "run-linear-cancel",
|
||||
task: "Inspect a PR",
|
||||
startedAt: 10,
|
||||
deliveryStatus: "pending",
|
||||
});
|
||||
|
||||
const cancelled = await cancelFlowById({
|
||||
cfg: {} as never,
|
||||
flowId: flow.flowId,
|
||||
});
|
||||
|
||||
expect(cancelled).toMatchObject({
|
||||
found: true,
|
||||
cancelled: true,
|
||||
flow: expect.objectContaining({
|
||||
flowId: flow.flowId,
|
||||
status: "cancelled",
|
||||
}),
|
||||
});
|
||||
expect(findTaskByRunId("run-linear-cancel")).toMatchObject({
|
||||
taskId: child.taskId,
|
||||
status: "cancelled",
|
||||
});
|
||||
expect(getFlowById(flow.flowId)).toMatchObject({
|
||||
flowId: flow.flowId,
|
||||
status: "cancelled",
|
||||
});
|
||||
expect(hoisted.cancelSessionMock).toHaveBeenCalled();
|
||||
});
|
||||
});
|
||||
|
||||
it("refuses to rewrite terminal linear flows when cancel is requested", async () => {
|
||||
await withTaskExecutorStateDir(async () => {
|
||||
const flow = createLinearFlow({
|
||||
ownerSessionKey: "agent:main:main",
|
||||
goal: "Cluster related PRs",
|
||||
currentStep: "finish",
|
||||
});
|
||||
updateFlowRecordById(flow.flowId, {
|
||||
status: "succeeded",
|
||||
endedAt: 55,
|
||||
updatedAt: 55,
|
||||
});
|
||||
|
||||
const cancelled = await cancelFlowById({
|
||||
cfg: {} as never,
|
||||
flowId: flow.flowId,
|
||||
});
|
||||
|
||||
expect(cancelled).toMatchObject({
|
||||
found: true,
|
||||
cancelled: false,
|
||||
reason: "Flow is already succeeded.",
|
||||
});
|
||||
expect(getFlowById(flow.flowId)).toMatchObject({
|
||||
flowId: flow.flowId,
|
||||
status: "succeeded",
|
||||
endedAt: 55,
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -1,448 +0,0 @@
|
||||
import type { OpenClawConfig } from "../config/config.js";
|
||||
import { createSubsystemLogger } from "../logging/subsystem.js";
|
||||
import {
|
||||
createFlowForTask,
|
||||
createFlowRecord,
|
||||
deleteFlowRecordById,
|
||||
getFlowById,
|
||||
updateFlowRecordById,
|
||||
} from "./flow-registry.js";
|
||||
import type { FlowRecord } from "./flow-registry.types.js";
|
||||
import {
|
||||
cancelTaskById,
|
||||
createTaskRecord,
|
||||
findLatestTaskForFlowId,
|
||||
linkTaskToFlowById,
|
||||
listTasksForFlowId,
|
||||
markTaskLostById,
|
||||
markTaskRunningByRunId,
|
||||
markTaskTerminalByRunId,
|
||||
recordTaskProgressByRunId,
|
||||
setTaskRunDeliveryStatusByRunId,
|
||||
} from "./task-registry.js";
|
||||
import { summarizeTaskRecords } from "./task-registry.summary.js";
|
||||
import type {
|
||||
TaskDeliveryState,
|
||||
TaskDeliveryStatus,
|
||||
TaskNotifyPolicy,
|
||||
TaskRecord,
|
||||
TaskRegistrySummary,
|
||||
TaskRuntime,
|
||||
TaskStatus,
|
||||
TaskTerminalOutcome,
|
||||
} from "./task-registry.types.js";
|
||||
|
||||
const log = createSubsystemLogger("tasks/executor");
|
||||
|
||||
function isOneTaskFlowEligible(task: TaskRecord): boolean {
|
||||
if (task.parentFlowId?.trim() || !task.requesterSessionKey.trim()) {
|
||||
return false;
|
||||
}
|
||||
if (task.deliveryStatus === "not_applicable") {
|
||||
return false;
|
||||
}
|
||||
return task.runtime === "acp" || task.runtime === "subagent";
|
||||
}
|
||||
|
||||
function ensureSingleTaskFlow(params: {
|
||||
task: TaskRecord;
|
||||
requesterOrigin?: TaskDeliveryState["requesterOrigin"];
|
||||
}): TaskRecord {
|
||||
if (!isOneTaskFlowEligible(params.task)) {
|
||||
return params.task;
|
||||
}
|
||||
try {
|
||||
const flow = createFlowForTask({
|
||||
task: params.task,
|
||||
requesterOrigin: params.requesterOrigin,
|
||||
});
|
||||
const linked = linkTaskToFlowById({
|
||||
taskId: params.task.taskId,
|
||||
flowId: flow.flowId,
|
||||
});
|
||||
if (!linked) {
|
||||
deleteFlowRecordById(flow.flowId);
|
||||
return params.task;
|
||||
}
|
||||
if (linked.parentFlowId !== flow.flowId) {
|
||||
deleteFlowRecordById(flow.flowId);
|
||||
return linked;
|
||||
}
|
||||
return linked;
|
||||
} catch (error) {
|
||||
log.warn("Failed to create one-task flow for detached run", {
|
||||
taskId: params.task.taskId,
|
||||
runId: params.task.runId,
|
||||
error,
|
||||
});
|
||||
return params.task;
|
||||
}
|
||||
}
|
||||
|
||||
export function createQueuedTaskRun(params: {
|
||||
runtime: TaskRuntime;
|
||||
sourceId?: string;
|
||||
requesterSessionKey: string;
|
||||
requesterOrigin?: TaskDeliveryState["requesterOrigin"];
|
||||
parentFlowId?: string;
|
||||
childSessionKey?: string;
|
||||
parentTaskId?: string;
|
||||
agentId?: string;
|
||||
runId?: string;
|
||||
label?: string;
|
||||
task: string;
|
||||
preferMetadata?: boolean;
|
||||
notifyPolicy?: TaskNotifyPolicy;
|
||||
deliveryStatus?: TaskDeliveryStatus;
|
||||
}): TaskRecord {
|
||||
const task = createTaskRecord({
|
||||
...params,
|
||||
status: "queued",
|
||||
});
|
||||
return ensureSingleTaskFlow({
|
||||
task,
|
||||
requesterOrigin: params.requesterOrigin,
|
||||
});
|
||||
}
|
||||
|
||||
export function createLinearFlow(params: {
|
||||
ownerSessionKey: string;
|
||||
requesterOrigin?: TaskDeliveryState["requesterOrigin"];
|
||||
goal: string;
|
||||
notifyPolicy?: TaskNotifyPolicy;
|
||||
currentStep?: string;
|
||||
createdAt?: number;
|
||||
updatedAt?: number;
|
||||
}): FlowRecord {
|
||||
return createFlowRecord({
|
||||
shape: "linear",
|
||||
ownerSessionKey: params.ownerSessionKey,
|
||||
requesterOrigin: params.requesterOrigin,
|
||||
goal: params.goal,
|
||||
notifyPolicy: params.notifyPolicy,
|
||||
currentStep: params.currentStep,
|
||||
status: "queued",
|
||||
createdAt: params.createdAt,
|
||||
updatedAt: params.updatedAt,
|
||||
});
|
||||
}
|
||||
|
||||
export function getFlowTaskSummary(flowId: string): TaskRegistrySummary {
|
||||
return summarizeTaskRecords(listTasksForFlowId(flowId));
|
||||
}
|
||||
|
||||
type RetryBlockedFlowResult = {
|
||||
found: boolean;
|
||||
retried: boolean;
|
||||
reason?: string;
|
||||
previousTask?: TaskRecord;
|
||||
task?: TaskRecord;
|
||||
};
|
||||
|
||||
type RetryBlockedFlowParams = {
|
||||
flowId: string;
|
||||
sourceId?: string;
|
||||
requesterOrigin?: TaskDeliveryState["requesterOrigin"];
|
||||
childSessionKey?: string;
|
||||
agentId?: string;
|
||||
runId?: string;
|
||||
label?: string;
|
||||
task?: string;
|
||||
preferMetadata?: boolean;
|
||||
notifyPolicy?: TaskNotifyPolicy;
|
||||
deliveryStatus?: TaskDeliveryStatus;
|
||||
status: "queued" | "running";
|
||||
startedAt?: number;
|
||||
lastEventAt?: number;
|
||||
progressSummary?: string | null;
|
||||
};
|
||||
|
||||
function resolveRetryableBlockedFlowTask(flowId: string): {
|
||||
flowFound: boolean;
|
||||
retryable: boolean;
|
||||
latestTask?: TaskRecord;
|
||||
reason?: string;
|
||||
} {
|
||||
const flow = getFlowById(flowId);
|
||||
if (!flow) {
|
||||
return {
|
||||
flowFound: false,
|
||||
retryable: false,
|
||||
reason: "Flow not found.",
|
||||
};
|
||||
}
|
||||
const latestTask = findLatestTaskForFlowId(flowId);
|
||||
if (!latestTask) {
|
||||
return {
|
||||
flowFound: true,
|
||||
retryable: false,
|
||||
reason: "Flow has no retryable task.",
|
||||
};
|
||||
}
|
||||
if (flow.status !== "blocked") {
|
||||
return {
|
||||
flowFound: true,
|
||||
retryable: false,
|
||||
latestTask,
|
||||
reason: "Flow is not blocked.",
|
||||
};
|
||||
}
|
||||
if (latestTask.status !== "succeeded" || latestTask.terminalOutcome !== "blocked") {
|
||||
return {
|
||||
flowFound: true,
|
||||
retryable: false,
|
||||
latestTask,
|
||||
reason: "Latest flow task is not blocked.",
|
||||
};
|
||||
}
|
||||
return {
|
||||
flowFound: true,
|
||||
retryable: true,
|
||||
latestTask,
|
||||
};
|
||||
}
|
||||
|
||||
function retryBlockedFlowTask(params: RetryBlockedFlowParams): RetryBlockedFlowResult {
|
||||
const resolved = resolveRetryableBlockedFlowTask(params.flowId);
|
||||
if (!resolved.retryable || !resolved.latestTask) {
|
||||
return {
|
||||
found: resolved.flowFound,
|
||||
retried: false,
|
||||
reason: resolved.reason,
|
||||
};
|
||||
}
|
||||
const flow = getFlowById(params.flowId);
|
||||
if (!flow) {
|
||||
return {
|
||||
found: false,
|
||||
retried: false,
|
||||
reason: "Flow not found.",
|
||||
previousTask: resolved.latestTask,
|
||||
};
|
||||
}
|
||||
const task = createTaskRecord({
|
||||
runtime: resolved.latestTask.runtime,
|
||||
sourceId: params.sourceId ?? resolved.latestTask.sourceId,
|
||||
requesterSessionKey: flow.ownerSessionKey,
|
||||
requesterOrigin: params.requesterOrigin ?? flow.requesterOrigin,
|
||||
parentFlowId: flow.flowId,
|
||||
childSessionKey: params.childSessionKey,
|
||||
parentTaskId: resolved.latestTask.taskId,
|
||||
agentId: params.agentId ?? resolved.latestTask.agentId,
|
||||
runId: params.runId,
|
||||
label: params.label ?? resolved.latestTask.label,
|
||||
task: params.task ?? resolved.latestTask.task,
|
||||
preferMetadata: params.preferMetadata,
|
||||
notifyPolicy: params.notifyPolicy ?? resolved.latestTask.notifyPolicy,
|
||||
deliveryStatus: params.deliveryStatus ?? "pending",
|
||||
status: params.status,
|
||||
startedAt: params.startedAt,
|
||||
lastEventAt: params.lastEventAt,
|
||||
progressSummary: params.progressSummary,
|
||||
});
|
||||
return {
|
||||
found: true,
|
||||
retried: true,
|
||||
previousTask: resolved.latestTask,
|
||||
task,
|
||||
};
|
||||
}
|
||||
|
||||
export function retryBlockedFlowAsQueuedTaskRun(
|
||||
params: Omit<RetryBlockedFlowParams, "status" | "startedAt" | "lastEventAt" | "progressSummary">,
|
||||
): RetryBlockedFlowResult {
|
||||
return retryBlockedFlowTask({
|
||||
...params,
|
||||
status: "queued",
|
||||
});
|
||||
}
|
||||
|
||||
export function retryBlockedFlowAsRunningTaskRun(
|
||||
params: Omit<RetryBlockedFlowParams, "status">,
|
||||
): RetryBlockedFlowResult {
|
||||
return retryBlockedFlowTask({
|
||||
...params,
|
||||
status: "running",
|
||||
});
|
||||
}
|
||||
|
||||
type CancelFlowResult = {
|
||||
found: boolean;
|
||||
cancelled: boolean;
|
||||
reason?: string;
|
||||
flow?: FlowRecord;
|
||||
tasks?: TaskRecord[];
|
||||
};
|
||||
|
||||
function isActiveTaskStatus(status: TaskStatus): boolean {
|
||||
return status === "queued" || status === "running";
|
||||
}
|
||||
|
||||
function isTerminalFlowStatus(status: FlowRecord["status"]): boolean {
|
||||
return (
|
||||
status === "succeeded" || status === "failed" || status === "cancelled" || status === "lost"
|
||||
);
|
||||
}
|
||||
|
||||
export async function cancelFlowById(params: {
|
||||
cfg: OpenClawConfig;
|
||||
flowId: string;
|
||||
}): Promise<CancelFlowResult> {
|
||||
const flow = getFlowById(params.flowId);
|
||||
if (!flow) {
|
||||
return {
|
||||
found: false,
|
||||
cancelled: false,
|
||||
reason: "Flow not found.",
|
||||
};
|
||||
}
|
||||
const linkedTasks = listTasksForFlowId(flow.flowId);
|
||||
const activeTasks = linkedTasks.filter((task) => isActiveTaskStatus(task.status));
|
||||
for (const task of activeTasks) {
|
||||
await cancelTaskById({
|
||||
cfg: params.cfg,
|
||||
taskId: task.taskId,
|
||||
});
|
||||
}
|
||||
const refreshedTasks = listTasksForFlowId(flow.flowId);
|
||||
const remainingActive = refreshedTasks.filter((task) => isActiveTaskStatus(task.status));
|
||||
if (remainingActive.length > 0) {
|
||||
return {
|
||||
found: true,
|
||||
cancelled: false,
|
||||
reason: "One or more child tasks are still active.",
|
||||
flow: getFlowById(flow.flowId),
|
||||
tasks: refreshedTasks,
|
||||
};
|
||||
}
|
||||
if (isTerminalFlowStatus(flow.status)) {
|
||||
return {
|
||||
found: true,
|
||||
cancelled: false,
|
||||
reason: `Flow is already ${flow.status}.`,
|
||||
flow,
|
||||
tasks: refreshedTasks,
|
||||
};
|
||||
}
|
||||
const updatedFlow = updateFlowRecordById(flow.flowId, {
|
||||
status: "cancelled",
|
||||
blockedTaskId: null,
|
||||
blockedSummary: null,
|
||||
endedAt: Date.now(),
|
||||
updatedAt: Date.now(),
|
||||
});
|
||||
return {
|
||||
found: true,
|
||||
cancelled: true,
|
||||
flow: updatedFlow ?? getFlowById(flow.flowId),
|
||||
tasks: refreshedTasks,
|
||||
};
|
||||
}
|
||||
|
||||
export function createRunningTaskRun(params: {
|
||||
runtime: TaskRuntime;
|
||||
sourceId?: string;
|
||||
requesterSessionKey: string;
|
||||
requesterOrigin?: TaskDeliveryState["requesterOrigin"];
|
||||
parentFlowId?: string;
|
||||
childSessionKey?: string;
|
||||
parentTaskId?: string;
|
||||
agentId?: string;
|
||||
runId?: string;
|
||||
label?: string;
|
||||
task: string;
|
||||
notifyPolicy?: TaskNotifyPolicy;
|
||||
deliveryStatus?: TaskDeliveryStatus;
|
||||
preferMetadata?: boolean;
|
||||
startedAt?: number;
|
||||
lastEventAt?: number;
|
||||
progressSummary?: string | null;
|
||||
}): TaskRecord {
|
||||
const task = createTaskRecord({
|
||||
...params,
|
||||
status: "running",
|
||||
});
|
||||
return ensureSingleTaskFlow({
|
||||
task,
|
||||
requesterOrigin: params.requesterOrigin,
|
||||
});
|
||||
}
|
||||
|
||||
export function startTaskRunByRunId(params: {
|
||||
runId: string;
|
||||
startedAt?: number;
|
||||
lastEventAt?: number;
|
||||
progressSummary?: string | null;
|
||||
eventSummary?: string | null;
|
||||
}) {
|
||||
return markTaskRunningByRunId(params);
|
||||
}
|
||||
|
||||
export function recordTaskRunProgressByRunId(params: {
|
||||
runId: string;
|
||||
lastEventAt?: number;
|
||||
progressSummary?: string | null;
|
||||
eventSummary?: string | null;
|
||||
}) {
|
||||
return recordTaskProgressByRunId(params);
|
||||
}
|
||||
|
||||
export function completeTaskRunByRunId(params: {
|
||||
runId: string;
|
||||
endedAt: number;
|
||||
lastEventAt?: number;
|
||||
progressSummary?: string | null;
|
||||
terminalSummary?: string | null;
|
||||
terminalOutcome?: TaskTerminalOutcome | null;
|
||||
}) {
|
||||
return markTaskTerminalByRunId({
|
||||
runId: params.runId,
|
||||
status: "succeeded",
|
||||
endedAt: params.endedAt,
|
||||
lastEventAt: params.lastEventAt,
|
||||
progressSummary: params.progressSummary,
|
||||
terminalSummary: params.terminalSummary,
|
||||
terminalOutcome: params.terminalOutcome,
|
||||
});
|
||||
}
|
||||
|
||||
export function failTaskRunByRunId(params: {
|
||||
runId: string;
|
||||
status?: Extract<TaskStatus, "failed" | "timed_out" | "cancelled">;
|
||||
endedAt: number;
|
||||
lastEventAt?: number;
|
||||
error?: string;
|
||||
progressSummary?: string | null;
|
||||
terminalSummary?: string | null;
|
||||
}) {
|
||||
return markTaskTerminalByRunId({
|
||||
runId: params.runId,
|
||||
status: params.status ?? "failed",
|
||||
endedAt: params.endedAt,
|
||||
lastEventAt: params.lastEventAt,
|
||||
error: params.error,
|
||||
progressSummary: params.progressSummary,
|
||||
terminalSummary: params.terminalSummary,
|
||||
});
|
||||
}
|
||||
|
||||
export function markTaskRunLostById(params: {
|
||||
taskId: string;
|
||||
endedAt: number;
|
||||
lastEventAt?: number;
|
||||
error?: string;
|
||||
cleanupAfter?: number;
|
||||
}) {
|
||||
return markTaskLostById(params);
|
||||
}
|
||||
|
||||
export function setDetachedTaskDeliveryStatusByRunId(params: {
|
||||
runId: string;
|
||||
deliveryStatus: TaskDeliveryStatus;
|
||||
}) {
|
||||
return setTaskRunDeliveryStatusByRunId(params);
|
||||
}
|
||||
|
||||
export async function cancelDetachedTaskRunById(params: { cfg: OpenClawConfig; taskId: string }) {
|
||||
return cancelTaskById(params);
|
||||
}
|
||||
@@ -1 +0,0 @@
|
||||
export { sendMessage } from "../infra/outbound/message.js";
|
||||
@@ -1,49 +0,0 @@
|
||||
import fs from "node:fs/promises";
|
||||
import path from "node:path";
|
||||
import { describe, expect, it } from "vitest";
|
||||
|
||||
const TASK_ROOT = path.resolve(import.meta.dirname);
|
||||
const SRC_ROOT = path.resolve(TASK_ROOT, "..");
|
||||
|
||||
const ALLOWED_IMPORTERS = new Set([
|
||||
"agents/tools/session-status-tool.ts",
|
||||
"auto-reply/reply/commands-acp/runtime-options.ts",
|
||||
"auto-reply/reply/commands-subagents/action-info.ts",
|
||||
"commands/doctor-workspace-status.ts",
|
||||
"commands/flows.ts",
|
||||
"tasks/flow-runtime.ts",
|
||||
"tasks/operations-runtime.ts",
|
||||
"tasks/task-executor.ts",
|
||||
"tasks/task-registry.maintenance.ts",
|
||||
]);
|
||||
|
||||
async function listSourceFiles(root: string): Promise<string[]> {
|
||||
const entries = await fs.readdir(root, { withFileTypes: true });
|
||||
const files: string[] = [];
|
||||
for (const entry of entries) {
|
||||
const fullPath = path.join(root, entry.name);
|
||||
if (entry.isDirectory()) {
|
||||
files.push(...(await listSourceFiles(fullPath)));
|
||||
continue;
|
||||
}
|
||||
if (!entry.isFile() || !entry.name.endsWith(".ts") || entry.name.endsWith(".test.ts")) {
|
||||
continue;
|
||||
}
|
||||
files.push(fullPath);
|
||||
}
|
||||
return files;
|
||||
}
|
||||
|
||||
describe("task registry import boundary", () => {
|
||||
it("keeps direct task-registry imports on the approved read-model seam", async () => {
|
||||
const importers: string[] = [];
|
||||
for (const file of await listSourceFiles(SRC_ROOT)) {
|
||||
const relative = path.relative(SRC_ROOT, file).replaceAll(path.sep, "/");
|
||||
const source = await fs.readFile(file, "utf8");
|
||||
if (source.includes("task-registry.js")) {
|
||||
importers.push(relative);
|
||||
}
|
||||
}
|
||||
expect(importers.toSorted()).toEqual([...ALLOWED_IMPORTERS].toSorted());
|
||||
});
|
||||
});
|
||||
@@ -1,41 +0,0 @@
|
||||
import type { TaskRecord } from "./task-registry.types.js";
|
||||
|
||||
export type TaskAuditSeverity = "warn" | "error";
|
||||
export type TaskAuditCode =
|
||||
| "stale_queued"
|
||||
| "stale_running"
|
||||
| "lost"
|
||||
| "delivery_failed"
|
||||
| "missing_cleanup"
|
||||
| "inconsistent_timestamps";
|
||||
|
||||
export type TaskAuditFinding = {
|
||||
severity: TaskAuditSeverity;
|
||||
code: TaskAuditCode;
|
||||
task: TaskRecord;
|
||||
ageMs?: number;
|
||||
detail: string;
|
||||
};
|
||||
|
||||
export type TaskAuditSummary = {
|
||||
total: number;
|
||||
warnings: number;
|
||||
errors: number;
|
||||
byCode: Record<TaskAuditCode, number>;
|
||||
};
|
||||
|
||||
export function createEmptyTaskAuditSummary(): TaskAuditSummary {
|
||||
return {
|
||||
total: 0,
|
||||
warnings: 0,
|
||||
errors: 0,
|
||||
byCode: {
|
||||
stale_queued: 0,
|
||||
stale_running: 0,
|
||||
lost: 0,
|
||||
delivery_failed: 0,
|
||||
missing_cleanup: 0,
|
||||
inconsistent_timestamps: 0,
|
||||
},
|
||||
};
|
||||
}
|
||||
@@ -1,100 +0,0 @@
|
||||
import { describe, expect, it } from "vitest";
|
||||
import { listTaskAuditFindings, summarizeTaskAuditFindings } from "./task-registry.audit.js";
|
||||
import type { TaskRecord } from "./task-registry.types.js";
|
||||
|
||||
function createTask(partial: Partial<TaskRecord>): TaskRecord {
|
||||
return {
|
||||
taskId: partial.taskId ?? "task-1",
|
||||
runtime: partial.runtime ?? "acp",
|
||||
requesterSessionKey: partial.requesterSessionKey ?? "agent:main:main",
|
||||
task: partial.task ?? "Background task",
|
||||
status: partial.status ?? "queued",
|
||||
deliveryStatus: partial.deliveryStatus ?? "pending",
|
||||
notifyPolicy: partial.notifyPolicy ?? "done_only",
|
||||
createdAt: partial.createdAt ?? Date.parse("2026-03-30T00:00:00.000Z"),
|
||||
...partial,
|
||||
};
|
||||
}
|
||||
|
||||
describe("task-registry audit", () => {
|
||||
it("flags stale running, lost, and missing cleanup tasks", () => {
|
||||
const now = Date.parse("2026-03-30T01:00:00.000Z");
|
||||
const findings = listTaskAuditFindings({
|
||||
now,
|
||||
tasks: [
|
||||
createTask({
|
||||
taskId: "stale-running",
|
||||
status: "running",
|
||||
startedAt: now - 40 * 60_000,
|
||||
lastEventAt: now - 40 * 60_000,
|
||||
}),
|
||||
createTask({
|
||||
taskId: "lost-task",
|
||||
status: "lost",
|
||||
error: "backing session missing",
|
||||
endedAt: now - 5 * 60_000,
|
||||
}),
|
||||
createTask({
|
||||
taskId: "missing-cleanup",
|
||||
status: "failed",
|
||||
endedAt: now - 60_000,
|
||||
cleanupAfter: undefined,
|
||||
}),
|
||||
],
|
||||
});
|
||||
|
||||
expect(findings.map((finding) => [finding.code, finding.task.taskId])).toEqual([
|
||||
["lost", "lost-task"],
|
||||
["stale_running", "stale-running"],
|
||||
["missing_cleanup", "missing-cleanup"],
|
||||
]);
|
||||
});
|
||||
|
||||
it("summarizes findings by severity and code", () => {
|
||||
const summary = summarizeTaskAuditFindings([
|
||||
{
|
||||
severity: "error",
|
||||
code: "stale_running",
|
||||
task: createTask({ taskId: "a", status: "running" }),
|
||||
detail: "running task appears stuck",
|
||||
},
|
||||
{
|
||||
severity: "warn",
|
||||
code: "delivery_failed",
|
||||
task: createTask({ taskId: "b", status: "failed" }),
|
||||
detail: "terminal update delivery failed",
|
||||
},
|
||||
]);
|
||||
|
||||
expect(summary).toEqual({
|
||||
total: 2,
|
||||
warnings: 1,
|
||||
errors: 1,
|
||||
byCode: {
|
||||
stale_queued: 0,
|
||||
stale_running: 1,
|
||||
lost: 0,
|
||||
delivery_failed: 1,
|
||||
missing_cleanup: 0,
|
||||
inconsistent_timestamps: 0,
|
||||
},
|
||||
});
|
||||
});
|
||||
|
||||
it("does not double-report lost tasks as missing cleanup", () => {
|
||||
const now = Date.parse("2026-03-30T01:00:00.000Z");
|
||||
const findings = listTaskAuditFindings({
|
||||
now,
|
||||
tasks: [
|
||||
createTask({
|
||||
taskId: "lost-projected",
|
||||
status: "lost",
|
||||
endedAt: now - 60_000,
|
||||
cleanupAfter: undefined,
|
||||
}),
|
||||
],
|
||||
});
|
||||
|
||||
expect(findings.map((finding) => finding.code)).toEqual(["lost"]);
|
||||
});
|
||||
});
|
||||
@@ -1,182 +0,0 @@
|
||||
import {
|
||||
createEmptyTaskAuditSummary,
|
||||
type TaskAuditCode,
|
||||
type TaskAuditFinding,
|
||||
type TaskAuditSeverity,
|
||||
type TaskAuditSummary,
|
||||
} from "./task-registry.audit.shared.js";
|
||||
import { reconcileInspectableTasks } from "./task-registry.reconcile.js";
|
||||
import type { TaskRecord } from "./task-registry.types.js";
|
||||
|
||||
export type TaskAuditOptions = {
|
||||
now?: number;
|
||||
tasks?: TaskRecord[];
|
||||
staleQueuedMs?: number;
|
||||
staleRunningMs?: number;
|
||||
};
|
||||
|
||||
const DEFAULT_STALE_QUEUED_MS = 10 * 60_000;
|
||||
const DEFAULT_STALE_RUNNING_MS = 30 * 60_000;
|
||||
export { createEmptyTaskAuditSummary };
|
||||
export type { TaskAuditCode, TaskAuditFinding, TaskAuditSeverity, TaskAuditSummary };
|
||||
|
||||
function createFinding(params: {
|
||||
severity: TaskAuditSeverity;
|
||||
code: TaskAuditCode;
|
||||
task: TaskRecord;
|
||||
detail: string;
|
||||
ageMs?: number;
|
||||
}): TaskAuditFinding {
|
||||
return {
|
||||
severity: params.severity,
|
||||
code: params.code,
|
||||
task: params.task,
|
||||
detail: params.detail,
|
||||
...(typeof params.ageMs === "number" ? { ageMs: params.ageMs } : {}),
|
||||
};
|
||||
}
|
||||
|
||||
function taskReferenceAt(task: TaskRecord): number {
|
||||
return task.lastEventAt ?? task.startedAt ?? task.createdAt;
|
||||
}
|
||||
|
||||
function findTimestampInconsistency(task: TaskRecord): TaskAuditFinding | null {
|
||||
if (task.startedAt && task.startedAt < task.createdAt) {
|
||||
return createFinding({
|
||||
severity: "warn",
|
||||
code: "inconsistent_timestamps",
|
||||
task,
|
||||
detail: "startedAt is earlier than createdAt",
|
||||
});
|
||||
}
|
||||
if (task.endedAt && task.startedAt && task.endedAt < task.startedAt) {
|
||||
return createFinding({
|
||||
severity: "warn",
|
||||
code: "inconsistent_timestamps",
|
||||
task,
|
||||
detail: "endedAt is earlier than startedAt",
|
||||
});
|
||||
}
|
||||
if ((task.status === "queued" || task.status === "running") && task.endedAt) {
|
||||
return createFinding({
|
||||
severity: "warn",
|
||||
code: "inconsistent_timestamps",
|
||||
task,
|
||||
detail: `${task.status} task should not already have endedAt`,
|
||||
});
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
function compareFindings(left: TaskAuditFinding, right: TaskAuditFinding): number {
|
||||
const severityRank = (severity: TaskAuditSeverity) => (severity === "error" ? 0 : 1);
|
||||
const severityDiff = severityRank(left.severity) - severityRank(right.severity);
|
||||
if (severityDiff !== 0) {
|
||||
return severityDiff;
|
||||
}
|
||||
const leftAge = left.ageMs ?? -1;
|
||||
const rightAge = right.ageMs ?? -1;
|
||||
if (leftAge !== rightAge) {
|
||||
return rightAge - leftAge;
|
||||
}
|
||||
return left.task.createdAt - right.task.createdAt;
|
||||
}
|
||||
|
||||
export function listTaskAuditFindings(options: TaskAuditOptions = {}): TaskAuditFinding[] {
|
||||
const tasks = options.tasks ?? reconcileInspectableTasks();
|
||||
const now = options.now ?? Date.now();
|
||||
const staleQueuedMs = options.staleQueuedMs ?? DEFAULT_STALE_QUEUED_MS;
|
||||
const staleRunningMs = options.staleRunningMs ?? DEFAULT_STALE_RUNNING_MS;
|
||||
const findings: TaskAuditFinding[] = [];
|
||||
|
||||
for (const task of tasks) {
|
||||
const referenceAt = taskReferenceAt(task);
|
||||
const ageMs = Math.max(0, now - referenceAt);
|
||||
|
||||
if (task.status === "queued" && ageMs >= staleQueuedMs) {
|
||||
findings.push(
|
||||
createFinding({
|
||||
severity: "warn",
|
||||
code: "stale_queued",
|
||||
task,
|
||||
ageMs,
|
||||
detail: "queued task has not advanced recently",
|
||||
}),
|
||||
);
|
||||
}
|
||||
|
||||
if (task.status === "running" && ageMs >= staleRunningMs) {
|
||||
findings.push(
|
||||
createFinding({
|
||||
severity: "error",
|
||||
code: "stale_running",
|
||||
task,
|
||||
ageMs,
|
||||
detail: "running task appears stuck",
|
||||
}),
|
||||
);
|
||||
}
|
||||
|
||||
if (task.status === "lost") {
|
||||
findings.push(
|
||||
createFinding({
|
||||
severity: "error",
|
||||
code: "lost",
|
||||
task,
|
||||
ageMs,
|
||||
detail: task.error?.trim() || "task lost its backing session",
|
||||
}),
|
||||
);
|
||||
}
|
||||
|
||||
if (task.deliveryStatus === "failed" && task.notifyPolicy !== "silent") {
|
||||
findings.push(
|
||||
createFinding({
|
||||
severity: "warn",
|
||||
code: "delivery_failed",
|
||||
task,
|
||||
ageMs,
|
||||
detail: "terminal update delivery failed",
|
||||
}),
|
||||
);
|
||||
}
|
||||
|
||||
if (
|
||||
task.status !== "lost" &&
|
||||
task.status !== "queued" &&
|
||||
task.status !== "running" &&
|
||||
typeof task.cleanupAfter !== "number"
|
||||
) {
|
||||
findings.push(
|
||||
createFinding({
|
||||
severity: "warn",
|
||||
code: "missing_cleanup",
|
||||
task,
|
||||
ageMs,
|
||||
detail: "terminal task is missing cleanupAfter",
|
||||
}),
|
||||
);
|
||||
}
|
||||
|
||||
const inconsistency = findTimestampInconsistency(task);
|
||||
if (inconsistency) {
|
||||
findings.push(inconsistency);
|
||||
}
|
||||
}
|
||||
|
||||
return findings.toSorted(compareFindings);
|
||||
}
|
||||
|
||||
export function summarizeTaskAuditFindings(findings: Iterable<TaskAuditFinding>): TaskAuditSummary {
|
||||
const summary = createEmptyTaskAuditSummary();
|
||||
for (const finding of findings) {
|
||||
summary.total += 1;
|
||||
summary.byCode[finding.code] += 1;
|
||||
if (finding.severity === "error") {
|
||||
summary.errors += 1;
|
||||
} else {
|
||||
summary.warnings += 1;
|
||||
}
|
||||
}
|
||||
return summary;
|
||||
}
|
||||
@@ -1,249 +0,0 @@
|
||||
import { readAcpSessionEntry } from "../acp/runtime/session-meta.js";
|
||||
import { loadSessionStore, resolveStorePath } from "../config/sessions.js";
|
||||
import { parseAgentSessionKey } from "../routing/session-key.js";
|
||||
import { listTaskAuditFindings, summarizeTaskAuditFindings } from "./task-registry.audit.js";
|
||||
import type { TaskAuditSummary } from "./task-registry.audit.js";
|
||||
import {
|
||||
deleteTaskRecordById,
|
||||
ensureTaskRegistryReady,
|
||||
getTaskById,
|
||||
listTaskRecords,
|
||||
markTaskLostById,
|
||||
maybeDeliverTaskTerminalUpdate,
|
||||
resolveTaskForLookupToken,
|
||||
setTaskCleanupAfterById,
|
||||
} from "./task-registry.js";
|
||||
import { summarizeTaskRecords } from "./task-registry.summary.js";
|
||||
import type { TaskRecord, TaskRegistrySummary } from "./task-registry.types.js";
|
||||
|
||||
const TASK_RECONCILE_GRACE_MS = 5 * 60_000;
|
||||
const TASK_RETENTION_MS = 7 * 24 * 60 * 60_000;
|
||||
const TASK_SWEEP_INTERVAL_MS = 60_000;
|
||||
|
||||
let sweeper: NodeJS.Timeout | null = null;
|
||||
|
||||
export type TaskRegistryMaintenanceSummary = {
|
||||
reconciled: number;
|
||||
cleanupStamped: number;
|
||||
pruned: number;
|
||||
};
|
||||
|
||||
function findSessionEntryByKey(store: Record<string, unknown>, sessionKey: string): unknown {
|
||||
const direct = store[sessionKey];
|
||||
if (direct) {
|
||||
return direct;
|
||||
}
|
||||
const normalized = sessionKey.toLowerCase();
|
||||
for (const [key, entry] of Object.entries(store)) {
|
||||
if (key.toLowerCase() === normalized) {
|
||||
return entry;
|
||||
}
|
||||
}
|
||||
return undefined;
|
||||
}
|
||||
|
||||
function isActiveTask(task: TaskRecord): boolean {
|
||||
return task.status === "queued" || task.status === "running";
|
||||
}
|
||||
|
||||
function isTerminalTask(task: TaskRecord): boolean {
|
||||
return !isActiveTask(task);
|
||||
}
|
||||
|
||||
function hasLostGraceExpired(task: TaskRecord, now: number): boolean {
|
||||
const referenceAt = task.lastEventAt ?? task.startedAt ?? task.createdAt;
|
||||
return now - referenceAt >= TASK_RECONCILE_GRACE_MS;
|
||||
}
|
||||
|
||||
function hasBackingSession(task: TaskRecord): boolean {
|
||||
const childSessionKey = task.childSessionKey?.trim();
|
||||
if (!childSessionKey) {
|
||||
return true;
|
||||
}
|
||||
if (task.runtime === "acp") {
|
||||
const acpEntry = readAcpSessionEntry({
|
||||
sessionKey: childSessionKey,
|
||||
});
|
||||
if (!acpEntry || acpEntry.storeReadFailed) {
|
||||
return true;
|
||||
}
|
||||
return Boolean(acpEntry.entry);
|
||||
}
|
||||
if (task.runtime === "subagent" || task.runtime === "cli") {
|
||||
const agentId = parseAgentSessionKey(childSessionKey)?.agentId;
|
||||
const storePath = resolveStorePath(undefined, { agentId });
|
||||
const store = loadSessionStore(storePath);
|
||||
return Boolean(findSessionEntryByKey(store, childSessionKey));
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
function shouldMarkLost(task: TaskRecord, now: number): boolean {
|
||||
if (!isActiveTask(task)) {
|
||||
return false;
|
||||
}
|
||||
if (!hasLostGraceExpired(task, now)) {
|
||||
return false;
|
||||
}
|
||||
return !hasBackingSession(task);
|
||||
}
|
||||
|
||||
function shouldPruneTerminalTask(task: TaskRecord, now: number): boolean {
|
||||
if (!isTerminalTask(task)) {
|
||||
return false;
|
||||
}
|
||||
if (typeof task.cleanupAfter === "number") {
|
||||
return now >= task.cleanupAfter;
|
||||
}
|
||||
const terminalAt = task.endedAt ?? task.lastEventAt ?? task.createdAt;
|
||||
return now - terminalAt >= TASK_RETENTION_MS;
|
||||
}
|
||||
|
||||
function shouldStampCleanupAfter(task: TaskRecord): boolean {
|
||||
return isTerminalTask(task) && typeof task.cleanupAfter !== "number";
|
||||
}
|
||||
|
||||
function resolveCleanupAfter(task: TaskRecord): number {
|
||||
const terminalAt = task.endedAt ?? task.lastEventAt ?? task.createdAt;
|
||||
return terminalAt + TASK_RETENTION_MS;
|
||||
}
|
||||
|
||||
function markTaskLost(task: TaskRecord, now: number): TaskRecord {
|
||||
const cleanupAfter = task.cleanupAfter ?? projectTaskLost(task, now).cleanupAfter;
|
||||
const updated =
|
||||
markTaskLostById({
|
||||
taskId: task.taskId,
|
||||
endedAt: task.endedAt ?? now,
|
||||
lastEventAt: now,
|
||||
error: task.error ?? "backing session missing",
|
||||
cleanupAfter,
|
||||
}) ?? task;
|
||||
void maybeDeliverTaskTerminalUpdate(updated.taskId);
|
||||
return updated;
|
||||
}
|
||||
|
||||
function projectTaskLost(task: TaskRecord, now: number): TaskRecord {
|
||||
const projected: TaskRecord = {
|
||||
...task,
|
||||
status: "lost",
|
||||
endedAt: task.endedAt ?? now,
|
||||
lastEventAt: now,
|
||||
error: task.error ?? "backing session missing",
|
||||
};
|
||||
return {
|
||||
...projected,
|
||||
...(typeof projected.cleanupAfter === "number"
|
||||
? {}
|
||||
: { cleanupAfter: resolveCleanupAfter(projected) }),
|
||||
};
|
||||
}
|
||||
|
||||
export function reconcileTaskRecordForOperatorInspection(task: TaskRecord): TaskRecord {
|
||||
const now = Date.now();
|
||||
if (!shouldMarkLost(task, now)) {
|
||||
return task;
|
||||
}
|
||||
return projectTaskLost(task, now);
|
||||
}
|
||||
|
||||
export function reconcileInspectableTasks(): TaskRecord[] {
|
||||
ensureTaskRegistryReady();
|
||||
return listTaskRecords().map((task) => reconcileTaskRecordForOperatorInspection(task));
|
||||
}
|
||||
|
||||
export function getInspectableTaskRegistrySummary(): TaskRegistrySummary {
|
||||
return summarizeTaskRecords(reconcileInspectableTasks());
|
||||
}
|
||||
|
||||
export function getInspectableTaskAuditSummary(): TaskAuditSummary {
|
||||
const tasks = reconcileInspectableTasks();
|
||||
return summarizeTaskAuditFindings(listTaskAuditFindings({ tasks }));
|
||||
}
|
||||
|
||||
export function reconcileTaskLookupToken(token: string): TaskRecord | undefined {
|
||||
ensureTaskRegistryReady();
|
||||
const task = resolveTaskForLookupToken(token);
|
||||
return task ? reconcileTaskRecordForOperatorInspection(task) : undefined;
|
||||
}
|
||||
|
||||
export function previewTaskRegistryMaintenance(): TaskRegistryMaintenanceSummary {
|
||||
ensureTaskRegistryReady();
|
||||
const now = Date.now();
|
||||
let reconciled = 0;
|
||||
let cleanupStamped = 0;
|
||||
let pruned = 0;
|
||||
for (const task of listTaskRecords()) {
|
||||
if (shouldMarkLost(task, now)) {
|
||||
reconciled += 1;
|
||||
continue;
|
||||
}
|
||||
if (shouldPruneTerminalTask(task, now)) {
|
||||
pruned += 1;
|
||||
continue;
|
||||
}
|
||||
if (shouldStampCleanupAfter(task)) {
|
||||
cleanupStamped += 1;
|
||||
}
|
||||
}
|
||||
return { reconciled, cleanupStamped, pruned };
|
||||
}
|
||||
|
||||
export function runTaskRegistryMaintenance(): TaskRegistryMaintenanceSummary {
|
||||
ensureTaskRegistryReady();
|
||||
const now = Date.now();
|
||||
let reconciled = 0;
|
||||
let cleanupStamped = 0;
|
||||
let pruned = 0;
|
||||
for (const task of listTaskRecords()) {
|
||||
if (shouldMarkLost(task, now)) {
|
||||
const next = markTaskLost(task, now);
|
||||
if (next.status === "lost") {
|
||||
reconciled += 1;
|
||||
}
|
||||
continue;
|
||||
}
|
||||
if (shouldPruneTerminalTask(task, now) && deleteTaskRecordById(task.taskId)) {
|
||||
pruned += 1;
|
||||
continue;
|
||||
}
|
||||
if (
|
||||
shouldStampCleanupAfter(task) &&
|
||||
setTaskCleanupAfterById({
|
||||
taskId: task.taskId,
|
||||
cleanupAfter: resolveCleanupAfter(task),
|
||||
})
|
||||
) {
|
||||
cleanupStamped += 1;
|
||||
}
|
||||
}
|
||||
return { reconciled, cleanupStamped, pruned };
|
||||
}
|
||||
|
||||
export function sweepTaskRegistry(): TaskRegistryMaintenanceSummary {
|
||||
return runTaskRegistryMaintenance();
|
||||
}
|
||||
|
||||
export function startTaskRegistryMaintenance() {
|
||||
ensureTaskRegistryReady();
|
||||
void sweepTaskRegistry();
|
||||
if (sweeper) {
|
||||
return;
|
||||
}
|
||||
sweeper = setInterval(() => {
|
||||
void sweepTaskRegistry();
|
||||
}, TASK_SWEEP_INTERVAL_MS);
|
||||
sweeper.unref?.();
|
||||
}
|
||||
|
||||
export function stopTaskRegistryMaintenanceForTests() {
|
||||
if (!sweeper) {
|
||||
return;
|
||||
}
|
||||
clearInterval(sweeper);
|
||||
sweeper = null;
|
||||
}
|
||||
|
||||
export function getReconciledTaskById(taskId: string): TaskRecord | undefined {
|
||||
const task = getTaskById(taskId);
|
||||
return task ? reconcileTaskRecordForOperatorInspection(task) : undefined;
|
||||
}
|
||||
@@ -1,22 +0,0 @@
|
||||
import os from "node:os";
|
||||
import path from "node:path";
|
||||
import { resolveStateDir } from "../config/paths.js";
|
||||
|
||||
export function resolveTaskStateDir(env: NodeJS.ProcessEnv = process.env): string {
|
||||
const explicit = env.OPENCLAW_STATE_DIR?.trim();
|
||||
if (explicit) {
|
||||
return resolveStateDir(env);
|
||||
}
|
||||
if (env.VITEST || env.NODE_ENV === "test") {
|
||||
return path.join(os.tmpdir(), "openclaw-test-state", String(process.pid));
|
||||
}
|
||||
return resolveStateDir(env);
|
||||
}
|
||||
|
||||
export function resolveTaskRegistryDir(env: NodeJS.ProcessEnv = process.env): string {
|
||||
return path.join(resolveTaskStateDir(env), "tasks");
|
||||
}
|
||||
|
||||
export function resolveTaskRegistrySqlitePath(env: NodeJS.ProcessEnv = process.env): string {
|
||||
return path.join(resolveTaskRegistryDir(env), "runs.sqlite");
|
||||
}
|
||||
@@ -1,5 +0,0 @@
|
||||
export {
|
||||
reconcileInspectableTasks,
|
||||
reconcileTaskLookupToken,
|
||||
reconcileTaskRecordForOperatorInspection,
|
||||
} from "./task-registry.maintenance.js";
|
||||
@@ -1,458 +0,0 @@
|
||||
import { chmodSync, existsSync, mkdirSync } from "node:fs";
|
||||
import type { DatabaseSync, StatementSync } from "node:sqlite";
|
||||
import { requireNodeSqlite } from "../infra/node-sqlite.js";
|
||||
import type { DeliveryContext } from "../utils/delivery-context.js";
|
||||
import { resolveTaskRegistryDir, resolveTaskRegistrySqlitePath } from "./task-registry.paths.js";
|
||||
import type { TaskRegistryStoreSnapshot } from "./task-registry.store.js";
|
||||
import type { TaskDeliveryState, TaskRecord } from "./task-registry.types.js";
|
||||
|
||||
type TaskRegistryRow = {
|
||||
task_id: string;
|
||||
runtime: TaskRecord["runtime"];
|
||||
source_id: string | null;
|
||||
requester_session_key: string;
|
||||
parent_flow_id: string | null;
|
||||
child_session_key: string | null;
|
||||
parent_task_id: string | null;
|
||||
agent_id: string | null;
|
||||
run_id: string | null;
|
||||
label: string | null;
|
||||
task: string;
|
||||
status: TaskRecord["status"];
|
||||
delivery_status: TaskRecord["deliveryStatus"];
|
||||
notify_policy: TaskRecord["notifyPolicy"];
|
||||
created_at: number | bigint;
|
||||
started_at: number | bigint | null;
|
||||
ended_at: number | bigint | null;
|
||||
last_event_at: number | bigint | null;
|
||||
cleanup_after: number | bigint | null;
|
||||
error: string | null;
|
||||
progress_summary: string | null;
|
||||
terminal_summary: string | null;
|
||||
terminal_outcome: TaskRecord["terminalOutcome"] | null;
|
||||
};
|
||||
|
||||
type TaskDeliveryStateRow = {
|
||||
task_id: string;
|
||||
requester_origin_json: string | null;
|
||||
last_notified_event_at: number | bigint | null;
|
||||
};
|
||||
|
||||
type TaskRegistryStatements = {
|
||||
selectAll: StatementSync;
|
||||
selectAllDeliveryStates: StatementSync;
|
||||
upsertRow: StatementSync;
|
||||
replaceDeliveryState: StatementSync;
|
||||
deleteRow: StatementSync;
|
||||
deleteDeliveryState: StatementSync;
|
||||
clearRows: StatementSync;
|
||||
clearDeliveryStates: StatementSync;
|
||||
};
|
||||
|
||||
type TaskRegistryDatabase = {
|
||||
db: DatabaseSync;
|
||||
path: string;
|
||||
statements: TaskRegistryStatements;
|
||||
};
|
||||
|
||||
let cachedDatabase: TaskRegistryDatabase | null = null;
|
||||
const TASK_REGISTRY_DIR_MODE = 0o700;
|
||||
const TASK_REGISTRY_FILE_MODE = 0o600;
|
||||
const TASK_REGISTRY_SIDEcar_SUFFIXES = ["", "-shm", "-wal"] as const;
|
||||
|
||||
function normalizeNumber(value: number | bigint | null): number | undefined {
|
||||
if (typeof value === "bigint") {
|
||||
return Number(value);
|
||||
}
|
||||
return typeof value === "number" ? value : undefined;
|
||||
}
|
||||
|
||||
function serializeJson(value: unknown): string | null {
|
||||
return value == null ? null : JSON.stringify(value);
|
||||
}
|
||||
|
||||
function parseJsonValue<T>(raw: string | null): T | undefined {
|
||||
if (!raw?.trim()) {
|
||||
return undefined;
|
||||
}
|
||||
try {
|
||||
return JSON.parse(raw) as T;
|
||||
} catch {
|
||||
return undefined;
|
||||
}
|
||||
}
|
||||
|
||||
function rowToTaskRecord(row: TaskRegistryRow): TaskRecord {
|
||||
const startedAt = normalizeNumber(row.started_at);
|
||||
const endedAt = normalizeNumber(row.ended_at);
|
||||
const lastEventAt = normalizeNumber(row.last_event_at);
|
||||
const cleanupAfter = normalizeNumber(row.cleanup_after);
|
||||
return {
|
||||
taskId: row.task_id,
|
||||
runtime: row.runtime,
|
||||
...(row.source_id ? { sourceId: row.source_id } : {}),
|
||||
requesterSessionKey: row.requester_session_key,
|
||||
...(row.parent_flow_id ? { parentFlowId: row.parent_flow_id } : {}),
|
||||
...(row.child_session_key ? { childSessionKey: row.child_session_key } : {}),
|
||||
...(row.parent_task_id ? { parentTaskId: row.parent_task_id } : {}),
|
||||
...(row.agent_id ? { agentId: row.agent_id } : {}),
|
||||
...(row.run_id ? { runId: row.run_id } : {}),
|
||||
...(row.label ? { label: row.label } : {}),
|
||||
task: row.task,
|
||||
status: row.status,
|
||||
deliveryStatus: row.delivery_status,
|
||||
notifyPolicy: row.notify_policy,
|
||||
createdAt: normalizeNumber(row.created_at) ?? 0,
|
||||
...(startedAt != null ? { startedAt } : {}),
|
||||
...(endedAt != null ? { endedAt } : {}),
|
||||
...(lastEventAt != null ? { lastEventAt } : {}),
|
||||
...(cleanupAfter != null ? { cleanupAfter } : {}),
|
||||
...(row.error ? { error: row.error } : {}),
|
||||
...(row.progress_summary ? { progressSummary: row.progress_summary } : {}),
|
||||
...(row.terminal_summary ? { terminalSummary: row.terminal_summary } : {}),
|
||||
...(row.terminal_outcome ? { terminalOutcome: row.terminal_outcome } : {}),
|
||||
};
|
||||
}
|
||||
|
||||
function rowToTaskDeliveryState(row: TaskDeliveryStateRow): TaskDeliveryState {
|
||||
const requesterOrigin = parseJsonValue<DeliveryContext>(row.requester_origin_json);
|
||||
const lastNotifiedEventAt = normalizeNumber(row.last_notified_event_at);
|
||||
return {
|
||||
taskId: row.task_id,
|
||||
...(requesterOrigin ? { requesterOrigin } : {}),
|
||||
...(lastNotifiedEventAt != null ? { lastNotifiedEventAt } : {}),
|
||||
};
|
||||
}
|
||||
|
||||
function bindTaskRecord(record: TaskRecord) {
|
||||
return {
|
||||
task_id: record.taskId,
|
||||
runtime: record.runtime,
|
||||
source_id: record.sourceId ?? null,
|
||||
requester_session_key: record.requesterSessionKey,
|
||||
parent_flow_id: record.parentFlowId ?? null,
|
||||
child_session_key: record.childSessionKey ?? null,
|
||||
parent_task_id: record.parentTaskId ?? null,
|
||||
agent_id: record.agentId ?? null,
|
||||
run_id: record.runId ?? null,
|
||||
label: record.label ?? null,
|
||||
task: record.task,
|
||||
status: record.status,
|
||||
delivery_status: record.deliveryStatus,
|
||||
notify_policy: record.notifyPolicy,
|
||||
created_at: record.createdAt,
|
||||
started_at: record.startedAt ?? null,
|
||||
ended_at: record.endedAt ?? null,
|
||||
last_event_at: record.lastEventAt ?? null,
|
||||
cleanup_after: record.cleanupAfter ?? null,
|
||||
error: record.error ?? null,
|
||||
progress_summary: record.progressSummary ?? null,
|
||||
terminal_summary: record.terminalSummary ?? null,
|
||||
terminal_outcome: record.terminalOutcome ?? null,
|
||||
};
|
||||
}
|
||||
|
||||
function bindTaskDeliveryState(state: TaskDeliveryState) {
|
||||
return {
|
||||
task_id: state.taskId,
|
||||
requester_origin_json: serializeJson(state.requesterOrigin),
|
||||
last_notified_event_at: state.lastNotifiedEventAt ?? null,
|
||||
};
|
||||
}
|
||||
|
||||
function createStatements(db: DatabaseSync): TaskRegistryStatements {
|
||||
return {
|
||||
selectAll: db.prepare(`
|
||||
SELECT
|
||||
task_id,
|
||||
runtime,
|
||||
source_id,
|
||||
requester_session_key,
|
||||
parent_flow_id,
|
||||
child_session_key,
|
||||
parent_task_id,
|
||||
agent_id,
|
||||
run_id,
|
||||
label,
|
||||
task,
|
||||
status,
|
||||
delivery_status,
|
||||
notify_policy,
|
||||
created_at,
|
||||
started_at,
|
||||
ended_at,
|
||||
last_event_at,
|
||||
cleanup_after,
|
||||
error,
|
||||
progress_summary,
|
||||
terminal_summary,
|
||||
terminal_outcome
|
||||
FROM task_runs
|
||||
ORDER BY created_at ASC, task_id ASC
|
||||
`),
|
||||
selectAllDeliveryStates: db.prepare(`
|
||||
SELECT
|
||||
task_id,
|
||||
requester_origin_json,
|
||||
last_notified_event_at
|
||||
FROM task_delivery_state
|
||||
ORDER BY task_id ASC
|
||||
`),
|
||||
upsertRow: db.prepare(`
|
||||
INSERT INTO task_runs (
|
||||
task_id,
|
||||
runtime,
|
||||
source_id,
|
||||
requester_session_key,
|
||||
parent_flow_id,
|
||||
child_session_key,
|
||||
parent_task_id,
|
||||
agent_id,
|
||||
run_id,
|
||||
label,
|
||||
task,
|
||||
status,
|
||||
delivery_status,
|
||||
notify_policy,
|
||||
created_at,
|
||||
started_at,
|
||||
ended_at,
|
||||
last_event_at,
|
||||
cleanup_after,
|
||||
error,
|
||||
progress_summary,
|
||||
terminal_summary,
|
||||
terminal_outcome
|
||||
) VALUES (
|
||||
@task_id,
|
||||
@runtime,
|
||||
@source_id,
|
||||
@requester_session_key,
|
||||
@parent_flow_id,
|
||||
@child_session_key,
|
||||
@parent_task_id,
|
||||
@agent_id,
|
||||
@run_id,
|
||||
@label,
|
||||
@task,
|
||||
@status,
|
||||
@delivery_status,
|
||||
@notify_policy,
|
||||
@created_at,
|
||||
@started_at,
|
||||
@ended_at,
|
||||
@last_event_at,
|
||||
@cleanup_after,
|
||||
@error,
|
||||
@progress_summary,
|
||||
@terminal_summary,
|
||||
@terminal_outcome
|
||||
)
|
||||
ON CONFLICT(task_id) DO UPDATE SET
|
||||
runtime = excluded.runtime,
|
||||
source_id = excluded.source_id,
|
||||
requester_session_key = excluded.requester_session_key,
|
||||
parent_flow_id = excluded.parent_flow_id,
|
||||
child_session_key = excluded.child_session_key,
|
||||
parent_task_id = excluded.parent_task_id,
|
||||
agent_id = excluded.agent_id,
|
||||
run_id = excluded.run_id,
|
||||
label = excluded.label,
|
||||
task = excluded.task,
|
||||
status = excluded.status,
|
||||
delivery_status = excluded.delivery_status,
|
||||
notify_policy = excluded.notify_policy,
|
||||
created_at = excluded.created_at,
|
||||
started_at = excluded.started_at,
|
||||
ended_at = excluded.ended_at,
|
||||
last_event_at = excluded.last_event_at,
|
||||
cleanup_after = excluded.cleanup_after,
|
||||
error = excluded.error,
|
||||
progress_summary = excluded.progress_summary,
|
||||
terminal_summary = excluded.terminal_summary,
|
||||
terminal_outcome = excluded.terminal_outcome
|
||||
`),
|
||||
replaceDeliveryState: db.prepare(`
|
||||
INSERT OR REPLACE INTO task_delivery_state (
|
||||
task_id,
|
||||
requester_origin_json,
|
||||
last_notified_event_at
|
||||
) VALUES (
|
||||
@task_id,
|
||||
@requester_origin_json,
|
||||
@last_notified_event_at
|
||||
)
|
||||
`),
|
||||
deleteRow: db.prepare(`DELETE FROM task_runs WHERE task_id = ?`),
|
||||
deleteDeliveryState: db.prepare(`DELETE FROM task_delivery_state WHERE task_id = ?`),
|
||||
clearRows: db.prepare(`DELETE FROM task_runs`),
|
||||
clearDeliveryStates: db.prepare(`DELETE FROM task_delivery_state`),
|
||||
};
|
||||
}
|
||||
|
||||
function ensureSchema(db: DatabaseSync) {
|
||||
db.exec(`
|
||||
CREATE TABLE IF NOT EXISTS task_runs (
|
||||
task_id TEXT PRIMARY KEY,
|
||||
runtime TEXT NOT NULL,
|
||||
source_id TEXT,
|
||||
requester_session_key TEXT NOT NULL,
|
||||
parent_flow_id TEXT,
|
||||
child_session_key TEXT,
|
||||
parent_task_id TEXT,
|
||||
agent_id TEXT,
|
||||
run_id TEXT,
|
||||
label TEXT,
|
||||
task TEXT NOT NULL,
|
||||
status TEXT NOT NULL,
|
||||
delivery_status TEXT NOT NULL,
|
||||
notify_policy TEXT NOT NULL,
|
||||
created_at INTEGER NOT NULL,
|
||||
started_at INTEGER,
|
||||
ended_at INTEGER,
|
||||
last_event_at INTEGER,
|
||||
cleanup_after INTEGER,
|
||||
error TEXT,
|
||||
progress_summary TEXT,
|
||||
terminal_summary TEXT,
|
||||
terminal_outcome TEXT
|
||||
);
|
||||
`);
|
||||
db.exec(`
|
||||
CREATE TABLE IF NOT EXISTS task_delivery_state (
|
||||
task_id TEXT PRIMARY KEY,
|
||||
requester_origin_json TEXT,
|
||||
last_notified_event_at INTEGER
|
||||
);
|
||||
`);
|
||||
db.exec(`CREATE INDEX IF NOT EXISTS idx_task_runs_run_id ON task_runs(run_id);`);
|
||||
ensureColumn(db, "task_runs", "parent_flow_id", "TEXT");
|
||||
db.exec(`CREATE INDEX IF NOT EXISTS idx_task_runs_status ON task_runs(status);`);
|
||||
db.exec(`CREATE INDEX IF NOT EXISTS idx_task_runs_runtime_status ON task_runs(runtime, status);`);
|
||||
db.exec(`CREATE INDEX IF NOT EXISTS idx_task_runs_cleanup_after ON task_runs(cleanup_after);`);
|
||||
db.exec(`CREATE INDEX IF NOT EXISTS idx_task_runs_last_event_at ON task_runs(last_event_at);`);
|
||||
db.exec(`CREATE INDEX IF NOT EXISTS idx_task_runs_parent_flow_id ON task_runs(parent_flow_id);`);
|
||||
db.exec(
|
||||
`CREATE INDEX IF NOT EXISTS idx_task_runs_child_session_key ON task_runs(child_session_key);`,
|
||||
);
|
||||
}
|
||||
|
||||
function ensureColumn(
|
||||
db: DatabaseSync,
|
||||
tableName: string,
|
||||
columnName: string,
|
||||
columnDefinition: string,
|
||||
) {
|
||||
const rows = db.prepare(`PRAGMA table_info(${tableName})`).all() as Array<{ name?: string }>;
|
||||
if (rows.some((row) => row.name === columnName)) {
|
||||
return;
|
||||
}
|
||||
db.exec(`ALTER TABLE ${tableName} ADD COLUMN ${columnName} ${columnDefinition};`);
|
||||
}
|
||||
|
||||
function ensureTaskRegistryPermissions(pathname: string) {
|
||||
const dir = resolveTaskRegistryDir(process.env);
|
||||
mkdirSync(dir, { recursive: true, mode: TASK_REGISTRY_DIR_MODE });
|
||||
chmodSync(dir, TASK_REGISTRY_DIR_MODE);
|
||||
for (const suffix of TASK_REGISTRY_SIDEcar_SUFFIXES) {
|
||||
const candidate = `${pathname}${suffix}`;
|
||||
if (!existsSync(candidate)) {
|
||||
continue;
|
||||
}
|
||||
chmodSync(candidate, TASK_REGISTRY_FILE_MODE);
|
||||
}
|
||||
}
|
||||
|
||||
function openTaskRegistryDatabase(): TaskRegistryDatabase {
|
||||
const pathname = resolveTaskRegistrySqlitePath(process.env);
|
||||
if (cachedDatabase && cachedDatabase.path === pathname) {
|
||||
return cachedDatabase;
|
||||
}
|
||||
if (cachedDatabase) {
|
||||
cachedDatabase.db.close();
|
||||
cachedDatabase = null;
|
||||
}
|
||||
ensureTaskRegistryPermissions(pathname);
|
||||
const { DatabaseSync } = requireNodeSqlite();
|
||||
const db = new DatabaseSync(pathname);
|
||||
db.exec(`PRAGMA journal_mode = WAL;`);
|
||||
db.exec(`PRAGMA synchronous = NORMAL;`);
|
||||
db.exec(`PRAGMA busy_timeout = 5000;`);
|
||||
ensureSchema(db);
|
||||
ensureTaskRegistryPermissions(pathname);
|
||||
cachedDatabase = {
|
||||
db,
|
||||
path: pathname,
|
||||
statements: createStatements(db),
|
||||
};
|
||||
return cachedDatabase;
|
||||
}
|
||||
|
||||
function withWriteTransaction(write: (statements: TaskRegistryStatements) => void) {
|
||||
const { db, path, statements } = openTaskRegistryDatabase();
|
||||
db.exec("BEGIN IMMEDIATE");
|
||||
try {
|
||||
write(statements);
|
||||
db.exec("COMMIT");
|
||||
ensureTaskRegistryPermissions(path);
|
||||
} catch (error) {
|
||||
db.exec("ROLLBACK");
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
export function loadTaskRegistryStateFromSqlite(): TaskRegistryStoreSnapshot {
|
||||
const { statements } = openTaskRegistryDatabase();
|
||||
const taskRows = statements.selectAll.all() as TaskRegistryRow[];
|
||||
const deliveryRows = statements.selectAllDeliveryStates.all() as TaskDeliveryStateRow[];
|
||||
return {
|
||||
tasks: new Map(taskRows.map((row) => [row.task_id, rowToTaskRecord(row)])),
|
||||
deliveryStates: new Map(deliveryRows.map((row) => [row.task_id, rowToTaskDeliveryState(row)])),
|
||||
};
|
||||
}
|
||||
|
||||
export function saveTaskRegistryStateToSqlite(snapshot: TaskRegistryStoreSnapshot) {
|
||||
withWriteTransaction((statements) => {
|
||||
statements.clearDeliveryStates.run();
|
||||
statements.clearRows.run();
|
||||
for (const task of snapshot.tasks.values()) {
|
||||
statements.upsertRow.run(bindTaskRecord(task));
|
||||
}
|
||||
for (const state of snapshot.deliveryStates.values()) {
|
||||
statements.replaceDeliveryState.run(bindTaskDeliveryState(state));
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
export function upsertTaskRegistryRecordToSqlite(task: TaskRecord) {
|
||||
const store = openTaskRegistryDatabase();
|
||||
store.statements.upsertRow.run(bindTaskRecord(task));
|
||||
ensureTaskRegistryPermissions(store.path);
|
||||
}
|
||||
|
||||
export function deleteTaskRegistryRecordFromSqlite(taskId: string) {
|
||||
const store = openTaskRegistryDatabase();
|
||||
store.statements.deleteRow.run(taskId);
|
||||
store.statements.deleteDeliveryState.run(taskId);
|
||||
ensureTaskRegistryPermissions(store.path);
|
||||
}
|
||||
|
||||
export function upsertTaskDeliveryStateToSqlite(state: TaskDeliveryState) {
|
||||
const store = openTaskRegistryDatabase();
|
||||
store.statements.replaceDeliveryState.run(bindTaskDeliveryState(state));
|
||||
ensureTaskRegistryPermissions(store.path);
|
||||
}
|
||||
|
||||
export function deleteTaskDeliveryStateFromSqlite(taskId: string) {
|
||||
const store = openTaskRegistryDatabase();
|
||||
store.statements.deleteDeliveryState.run(taskId);
|
||||
ensureTaskRegistryPermissions(store.path);
|
||||
}
|
||||
|
||||
export function closeTaskRegistrySqliteStore() {
|
||||
if (!cachedDatabase) {
|
||||
return;
|
||||
}
|
||||
cachedDatabase.db.close();
|
||||
cachedDatabase = null;
|
||||
}
|
||||
@@ -1,187 +0,0 @@
|
||||
import { mkdtempSync, rmSync, statSync } from "node:fs";
|
||||
import os from "node:os";
|
||||
import path from "node:path";
|
||||
import { afterEach, describe, expect, it, vi } from "vitest";
|
||||
import {
|
||||
createTaskRecord,
|
||||
deleteTaskRecordById,
|
||||
findTaskByRunId,
|
||||
resetTaskRegistryForTests,
|
||||
} from "./task-registry.js";
|
||||
import { resolveTaskRegistryDir, resolveTaskRegistrySqlitePath } from "./task-registry.paths.js";
|
||||
import { configureTaskRegistryRuntime, type TaskRegistryHookEvent } from "./task-registry.store.js";
|
||||
import type { TaskRecord } from "./task-registry.types.js";
|
||||
|
||||
function createStoredTask(): TaskRecord {
|
||||
return {
|
||||
taskId: "task-restored",
|
||||
runtime: "acp",
|
||||
sourceId: "run-restored",
|
||||
requesterSessionKey: "agent:main:main",
|
||||
childSessionKey: "agent:codex:acp:restored",
|
||||
runId: "run-restored",
|
||||
task: "Restored task",
|
||||
status: "running",
|
||||
deliveryStatus: "pending",
|
||||
notifyPolicy: "done_only",
|
||||
createdAt: 100,
|
||||
lastEventAt: 100,
|
||||
};
|
||||
}
|
||||
|
||||
describe("task-registry store runtime", () => {
|
||||
afterEach(() => {
|
||||
delete process.env.OPENCLAW_STATE_DIR;
|
||||
resetTaskRegistryForTests();
|
||||
});
|
||||
|
||||
it("uses the configured task store for restore and save", () => {
|
||||
const storedTask = createStoredTask();
|
||||
const loadSnapshot = vi.fn(() => ({
|
||||
tasks: new Map([[storedTask.taskId, storedTask]]),
|
||||
deliveryStates: new Map(),
|
||||
}));
|
||||
const saveSnapshot = vi.fn();
|
||||
configureTaskRegistryRuntime({
|
||||
store: {
|
||||
loadSnapshot,
|
||||
saveSnapshot,
|
||||
},
|
||||
});
|
||||
|
||||
expect(findTaskByRunId("run-restored")).toMatchObject({
|
||||
taskId: "task-restored",
|
||||
task: "Restored task",
|
||||
});
|
||||
expect(loadSnapshot).toHaveBeenCalledTimes(1);
|
||||
|
||||
createTaskRecord({
|
||||
runtime: "acp",
|
||||
requesterSessionKey: "agent:main:main",
|
||||
childSessionKey: "agent:codex:acp:new",
|
||||
runId: "run-new",
|
||||
task: "New task",
|
||||
status: "running",
|
||||
deliveryStatus: "pending",
|
||||
});
|
||||
|
||||
expect(saveSnapshot).toHaveBeenCalled();
|
||||
const latestSnapshot = saveSnapshot.mock.calls.at(-1)?.[0] as {
|
||||
tasks: ReadonlyMap<string, TaskRecord>;
|
||||
};
|
||||
expect(latestSnapshot.tasks.size).toBe(2);
|
||||
expect(latestSnapshot.tasks.get("task-restored")?.task).toBe("Restored task");
|
||||
});
|
||||
|
||||
it("emits incremental hook events for restore, mutation, and delete", () => {
|
||||
const events: TaskRegistryHookEvent[] = [];
|
||||
configureTaskRegistryRuntime({
|
||||
store: {
|
||||
loadSnapshot: () => ({
|
||||
tasks: new Map([[createStoredTask().taskId, createStoredTask()]]),
|
||||
deliveryStates: new Map(),
|
||||
}),
|
||||
saveSnapshot: () => {},
|
||||
},
|
||||
hooks: {
|
||||
onEvent: (event) => {
|
||||
events.push(event);
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
expect(findTaskByRunId("run-restored")).toBeTruthy();
|
||||
const created = createTaskRecord({
|
||||
runtime: "acp",
|
||||
requesterSessionKey: "agent:main:main",
|
||||
childSessionKey: "agent:codex:acp:new",
|
||||
runId: "run-new",
|
||||
task: "New task",
|
||||
status: "running",
|
||||
deliveryStatus: "pending",
|
||||
});
|
||||
expect(deleteTaskRecordById(created.taskId)).toBe(true);
|
||||
|
||||
expect(events.map((event) => event.kind)).toEqual(["restored", "upserted", "deleted"]);
|
||||
expect(events[0]).toMatchObject({
|
||||
kind: "restored",
|
||||
tasks: [expect.objectContaining({ taskId: "task-restored" })],
|
||||
});
|
||||
expect(events[1]).toMatchObject({
|
||||
kind: "upserted",
|
||||
task: expect.objectContaining({ taskId: created.taskId }),
|
||||
});
|
||||
expect(events[2]).toMatchObject({
|
||||
kind: "deleted",
|
||||
taskId: created.taskId,
|
||||
});
|
||||
});
|
||||
|
||||
it("restores persisted tasks from the default sqlite store", () => {
|
||||
const created = createTaskRecord({
|
||||
runtime: "cron",
|
||||
requesterSessionKey: "agent:main:main",
|
||||
sourceId: "job-123",
|
||||
runId: "run-sqlite",
|
||||
task: "Run nightly cron",
|
||||
status: "running",
|
||||
deliveryStatus: "not_applicable",
|
||||
notifyPolicy: "silent",
|
||||
});
|
||||
|
||||
resetTaskRegistryForTests({ persist: false });
|
||||
|
||||
expect(findTaskByRunId("run-sqlite")).toMatchObject({
|
||||
taskId: created.taskId,
|
||||
sourceId: "job-123",
|
||||
task: "Run nightly cron",
|
||||
});
|
||||
});
|
||||
|
||||
it("persists parent flow linkage on task records", () => {
|
||||
const created = createTaskRecord({
|
||||
runtime: "acp",
|
||||
requesterSessionKey: "agent:main:main",
|
||||
parentFlowId: "flow-123",
|
||||
runId: "run-linked",
|
||||
task: "Linked task",
|
||||
status: "running",
|
||||
deliveryStatus: "pending",
|
||||
});
|
||||
|
||||
resetTaskRegistryForTests({ persist: false });
|
||||
|
||||
expect(findTaskByRunId("run-linked")).toMatchObject({
|
||||
taskId: created.taskId,
|
||||
parentFlowId: "flow-123",
|
||||
task: "Linked task",
|
||||
});
|
||||
});
|
||||
|
||||
it("hardens the sqlite task store directory and file modes", () => {
|
||||
if (process.platform === "win32") {
|
||||
return;
|
||||
}
|
||||
const stateDir = mkdtempSync(path.join(os.tmpdir(), "openclaw-task-store-"));
|
||||
process.env.OPENCLAW_STATE_DIR = stateDir;
|
||||
|
||||
createTaskRecord({
|
||||
runtime: "cron",
|
||||
requesterSessionKey: "agent:main:main",
|
||||
sourceId: "job-456",
|
||||
runId: "run-perms",
|
||||
task: "Run secured cron",
|
||||
status: "running",
|
||||
deliveryStatus: "not_applicable",
|
||||
notifyPolicy: "silent",
|
||||
});
|
||||
|
||||
const registryDir = resolveTaskRegistryDir(process.env);
|
||||
const sqlitePath = resolveTaskRegistrySqlitePath(process.env);
|
||||
expect(statSync(registryDir).mode & 0o777).toBe(0o700);
|
||||
expect(statSync(sqlitePath).mode & 0o777).toBe(0o600);
|
||||
|
||||
resetTaskRegistryForTests();
|
||||
rmSync(stateDir, { recursive: true, force: true });
|
||||
});
|
||||
});
|
||||
@@ -1,85 +0,0 @@
|
||||
import {
|
||||
closeTaskRegistrySqliteStore,
|
||||
deleteTaskDeliveryStateFromSqlite,
|
||||
deleteTaskRegistryRecordFromSqlite,
|
||||
loadTaskRegistryStateFromSqlite,
|
||||
saveTaskRegistryStateToSqlite,
|
||||
upsertTaskDeliveryStateToSqlite,
|
||||
upsertTaskRegistryRecordToSqlite,
|
||||
} from "./task-registry.store.sqlite.js";
|
||||
import type { TaskDeliveryState, TaskRecord } from "./task-registry.types.js";
|
||||
|
||||
export type TaskRegistryStoreSnapshot = {
|
||||
tasks: Map<string, TaskRecord>;
|
||||
deliveryStates: Map<string, TaskDeliveryState>;
|
||||
};
|
||||
|
||||
export type TaskRegistryStore = {
|
||||
loadSnapshot: () => TaskRegistryStoreSnapshot;
|
||||
saveSnapshot: (snapshot: TaskRegistryStoreSnapshot) => void;
|
||||
upsertTask?: (task: TaskRecord) => void;
|
||||
deleteTask?: (taskId: string) => void;
|
||||
upsertDeliveryState?: (state: TaskDeliveryState) => void;
|
||||
deleteDeliveryState?: (taskId: string) => void;
|
||||
close?: () => void;
|
||||
};
|
||||
|
||||
export type TaskRegistryHookEvent =
|
||||
| {
|
||||
kind: "restored";
|
||||
tasks: TaskRecord[];
|
||||
}
|
||||
| {
|
||||
kind: "upserted";
|
||||
task: TaskRecord;
|
||||
previous?: TaskRecord;
|
||||
}
|
||||
| {
|
||||
kind: "deleted";
|
||||
taskId: string;
|
||||
previous: TaskRecord;
|
||||
};
|
||||
|
||||
export type TaskRegistryHooks = {
|
||||
// Hooks are incremental/observational. Snapshot persistence belongs to TaskRegistryStore.
|
||||
onEvent?: (event: TaskRegistryHookEvent) => void;
|
||||
};
|
||||
|
||||
const defaultTaskRegistryStore: TaskRegistryStore = {
|
||||
loadSnapshot: loadTaskRegistryStateFromSqlite,
|
||||
saveSnapshot: saveTaskRegistryStateToSqlite,
|
||||
upsertTask: upsertTaskRegistryRecordToSqlite,
|
||||
deleteTask: deleteTaskRegistryRecordFromSqlite,
|
||||
upsertDeliveryState: upsertTaskDeliveryStateToSqlite,
|
||||
deleteDeliveryState: deleteTaskDeliveryStateFromSqlite,
|
||||
close: closeTaskRegistrySqliteStore,
|
||||
};
|
||||
|
||||
let configuredTaskRegistryStore: TaskRegistryStore = defaultTaskRegistryStore;
|
||||
let configuredTaskRegistryHooks: TaskRegistryHooks | null = null;
|
||||
|
||||
export function getTaskRegistryStore(): TaskRegistryStore {
|
||||
return configuredTaskRegistryStore;
|
||||
}
|
||||
|
||||
export function getTaskRegistryHooks(): TaskRegistryHooks | null {
|
||||
return configuredTaskRegistryHooks;
|
||||
}
|
||||
|
||||
export function configureTaskRegistryRuntime(params: {
|
||||
store?: TaskRegistryStore;
|
||||
hooks?: TaskRegistryHooks | null;
|
||||
}) {
|
||||
if (params.store) {
|
||||
configuredTaskRegistryStore = params.store;
|
||||
}
|
||||
if ("hooks" in params) {
|
||||
configuredTaskRegistryHooks = params.hooks ?? null;
|
||||
}
|
||||
}
|
||||
|
||||
export function resetTaskRegistryRuntimeForTests() {
|
||||
configuredTaskRegistryStore.close?.();
|
||||
configuredTaskRegistryStore = defaultTaskRegistryStore;
|
||||
configuredTaskRegistryHooks = null;
|
||||
}
|
||||
@@ -1,56 +0,0 @@
|
||||
import type {
|
||||
TaskRecord,
|
||||
TaskRegistrySummary,
|
||||
TaskRuntimeCounts,
|
||||
TaskStatusCounts,
|
||||
} from "./task-registry.types.js";
|
||||
|
||||
function createEmptyTaskStatusCounts(): TaskStatusCounts {
|
||||
return {
|
||||
queued: 0,
|
||||
running: 0,
|
||||
succeeded: 0,
|
||||
failed: 0,
|
||||
timed_out: 0,
|
||||
cancelled: 0,
|
||||
lost: 0,
|
||||
};
|
||||
}
|
||||
|
||||
function createEmptyTaskRuntimeCounts(): TaskRuntimeCounts {
|
||||
return {
|
||||
subagent: 0,
|
||||
acp: 0,
|
||||
cli: 0,
|
||||
cron: 0,
|
||||
};
|
||||
}
|
||||
|
||||
export function createEmptyTaskRegistrySummary(): TaskRegistrySummary {
|
||||
return {
|
||||
total: 0,
|
||||
active: 0,
|
||||
terminal: 0,
|
||||
failures: 0,
|
||||
byStatus: createEmptyTaskStatusCounts(),
|
||||
byRuntime: createEmptyTaskRuntimeCounts(),
|
||||
};
|
||||
}
|
||||
|
||||
export function summarizeTaskRecords(records: Iterable<TaskRecord>): TaskRegistrySummary {
|
||||
const summary = createEmptyTaskRegistrySummary();
|
||||
for (const task of records) {
|
||||
summary.total += 1;
|
||||
summary.byStatus[task.status] += 1;
|
||||
summary.byRuntime[task.runtime] += 1;
|
||||
if (task.status === "queued" || task.status === "running") {
|
||||
summary.active += 1;
|
||||
} else {
|
||||
summary.terminal += 1;
|
||||
}
|
||||
if (task.status === "failed" || task.status === "timed_out" || task.status === "lost") {
|
||||
summary.failures += 1;
|
||||
}
|
||||
}
|
||||
return summary;
|
||||
}
|
||||
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
@@ -1,81 +0,0 @@
|
||||
import type { DeliveryContext } from "../utils/delivery-context.js";
|
||||
|
||||
export type TaskRuntime = "subagent" | "acp" | "cli" | "cron";
|
||||
|
||||
export type TaskStatus =
|
||||
| "queued"
|
||||
| "running"
|
||||
| "succeeded"
|
||||
| "failed"
|
||||
| "timed_out"
|
||||
| "cancelled"
|
||||
| "lost";
|
||||
|
||||
export type TaskDeliveryStatus =
|
||||
| "pending"
|
||||
| "delivered"
|
||||
| "session_queued"
|
||||
| "failed"
|
||||
| "parent_missing"
|
||||
| "not_applicable";
|
||||
|
||||
export type TaskNotifyPolicy = "done_only" | "state_changes" | "silent";
|
||||
|
||||
export type TaskTerminalOutcome = "succeeded" | "blocked";
|
||||
|
||||
export type TaskStatusCounts = Record<TaskStatus, number>;
|
||||
export type TaskRuntimeCounts = Record<TaskRuntime, number>;
|
||||
|
||||
export type TaskRegistrySummary = {
|
||||
total: number;
|
||||
active: number;
|
||||
terminal: number;
|
||||
failures: number;
|
||||
byStatus: TaskStatusCounts;
|
||||
byRuntime: TaskRuntimeCounts;
|
||||
};
|
||||
|
||||
export type TaskEventKind = TaskStatus | "progress";
|
||||
|
||||
export type TaskEventRecord = {
|
||||
at: number;
|
||||
kind: TaskEventKind;
|
||||
summary?: string;
|
||||
};
|
||||
|
||||
export type TaskDeliveryState = {
|
||||
taskId: string;
|
||||
requesterOrigin?: DeliveryContext;
|
||||
lastNotifiedEventAt?: number;
|
||||
};
|
||||
|
||||
export type TaskRecord = {
|
||||
taskId: string;
|
||||
runtime: TaskRuntime;
|
||||
sourceId?: string;
|
||||
requesterSessionKey: string;
|
||||
parentFlowId?: string;
|
||||
childSessionKey?: string;
|
||||
parentTaskId?: string;
|
||||
agentId?: string;
|
||||
runId?: string;
|
||||
label?: string;
|
||||
task: string;
|
||||
status: TaskStatus;
|
||||
deliveryStatus: TaskDeliveryStatus;
|
||||
notifyPolicy: TaskNotifyPolicy;
|
||||
createdAt: number;
|
||||
startedAt?: number;
|
||||
endedAt?: number;
|
||||
lastEventAt?: number;
|
||||
cleanupAfter?: number;
|
||||
error?: string;
|
||||
progressSummary?: string;
|
||||
terminalSummary?: string;
|
||||
terminalOutcome?: TaskTerminalOutcome;
|
||||
};
|
||||
|
||||
export type TaskRegistrySnapshot = {
|
||||
tasks: TaskRecord[];
|
||||
deliveryStates: TaskDeliveryState[];
|
||||
};
|
||||
@@ -2,14 +2,14 @@ import {
|
||||
configureFlowRegistryRuntime,
|
||||
type FlowRegistryStore,
|
||||
type FlowRegistryStoreSnapshot,
|
||||
} from "../tasks/flow-registry.store.js";
|
||||
import type { FlowRecord } from "../tasks/flow-registry.types.js";
|
||||
} from "openclaw/plugin-sdk/tasks";
|
||||
import type { FlowRecord } from "openclaw/plugin-sdk/tasks";
|
||||
import {
|
||||
configureTaskRegistryRuntime,
|
||||
type TaskRegistryStore,
|
||||
type TaskRegistryStoreSnapshot,
|
||||
} from "../tasks/task-registry.store.js";
|
||||
import type { TaskDeliveryState, TaskRecord } from "../tasks/task-registry.types.js";
|
||||
} from "openclaw/plugin-sdk/tasks";
|
||||
import type { TaskDeliveryState, TaskRecord } from "openclaw/plugin-sdk/tasks";
|
||||
|
||||
function cloneTask(task: TaskRecord): TaskRecord {
|
||||
return { ...task };
|
||||
|
||||
Reference in New Issue
Block a user