mirror of
https://github.com/openclaw/openclaw.git
synced 2026-05-06 15:50:46 +00:00
feat(openai): enable native web search
This commit is contained in:
@@ -6,6 +6,7 @@ Docs: https://docs.openclaw.ai
|
|||||||
|
|
||||||
### Changes
|
### Changes
|
||||||
|
|
||||||
|
- OpenAI/Responses: use OpenAI's native `web_search` tool automatically for direct OpenAI Responses models when web search is enabled and no managed search provider is pinned; explicit providers such as Brave keep the managed `web_search` tool.
|
||||||
- Models/commands: add `/models add <provider> <modelId>` so you can register a model from chat and use it without restarting the gateway; keep `/models` as a simple provider browser while adding clearer add guidance and copy-friendly command examples. (#70211) Thanks @Takhoffman.
|
- Models/commands: add `/models add <provider> <modelId>` so you can register a model from chat and use it without restarting the gateway; keep `/models` as a simple provider browser while adding clearer add guidance and copy-friendly command examples. (#70211) Thanks @Takhoffman.
|
||||||
- Pi/models: update the bundled pi packages to `0.68.1` and let the OpenCode Go catalog come from pi instead of plugin-maintained model aliases, adding the refreshed `opencode-go/kimi-k2.6`, Qwen, GLM, MiMo, and MiniMax entries.
|
- Pi/models: update the bundled pi packages to `0.68.1` and let the OpenCode Go catalog come from pi instead of plugin-maintained model aliases, adding the refreshed `opencode-go/kimi-k2.6`, Qwen, GLM, MiMo, and MiniMax entries.
|
||||||
- CLI/doctor plugins: lazy-load doctor plugin paths and prefer installed plugin `dist/*` runtime entries over source-adjacent JavaScript fallbacks, reducing the measured `doctor --non-interactive` runtime by about 74% while keeping cold doctor startup on built plugin artifacts. (#69840) Thanks @gumadeiras.
|
- CLI/doctor plugins: lazy-load doctor plugin paths and prefer installed plugin `dist/*` runtime entries over source-adjacent JavaScript fallbacks, reducing the measured `doctor --non-interactive` runtime by about 74% while keeping cold doctor startup on built plugin artifacts. (#69840) Thanks @gumadeiras.
|
||||||
|
|||||||
@@ -116,6 +116,10 @@ local while `web_search` and `x_search` can use xAI Responses under the hood.
|
|||||||
|
|
||||||
## Auto-detection
|
## Auto-detection
|
||||||
|
|
||||||
|
## Native OpenAI web search
|
||||||
|
|
||||||
|
Direct OpenAI Responses models use OpenAI's hosted `web_search` tool automatically when OpenClaw web search is enabled and no managed provider is pinned. This is provider-owned behavior in the bundled OpenAI plugin and only applies to native OpenAI API traffic, not OpenAI-compatible proxy base URLs or Azure routes. Set `tools.web.search.provider` to another provider such as `brave` to keep the managed `web_search` tool for OpenAI models, or set `tools.web.search.enabled: false` to disable both managed search and native OpenAI search.
|
||||||
|
|
||||||
## Native Codex web search
|
## Native Codex web search
|
||||||
|
|
||||||
Codex-capable models can optionally use the provider-native Responses `web_search` tool instead of OpenClaw's managed `web_search` function.
|
Codex-capable models can optionally use the provider-native Responses `web_search` tool instead of OpenClaw's managed `web_search` function.
|
||||||
|
|||||||
93
extensions/openai/native-web-search.ts
Normal file
93
extensions/openai/native-web-search.ts
Normal file
@@ -0,0 +1,93 @@
|
|||||||
|
import type { StreamFn } from "@mariozechner/pi-agent-core";
|
||||||
|
import { streamSimple } from "@mariozechner/pi-ai";
|
||||||
|
import type { OpenClawConfig } from "openclaw/plugin-sdk/config-runtime";
|
||||||
|
import { normalizeProviderId } from "openclaw/plugin-sdk/provider-model-shared";
|
||||||
|
import { streamWithPayloadPatch } from "openclaw/plugin-sdk/provider-stream-shared";
|
||||||
|
import { isOpenAIApiBaseUrl } from "./base-url.js";
|
||||||
|
|
||||||
|
const OPENAI_WEB_SEARCH_TOOL = { type: "web_search" } as const;
|
||||||
|
|
||||||
|
export type OpenAINativeWebSearchPatchResult =
|
||||||
|
| "payload_not_object"
|
||||||
|
| "native_tool_already_present"
|
||||||
|
| "injected";
|
||||||
|
|
||||||
|
function isRecord(value: unknown): value is Record<string, unknown> {
|
||||||
|
return !!value && typeof value === "object" && !Array.isArray(value);
|
||||||
|
}
|
||||||
|
|
||||||
|
function isOpenAINativeWebSearchEligibleModel(model: {
|
||||||
|
api?: unknown;
|
||||||
|
provider?: unknown;
|
||||||
|
baseUrl?: unknown;
|
||||||
|
}): boolean {
|
||||||
|
const provider = typeof model.provider === "string" ? model.provider : undefined;
|
||||||
|
if (model.api !== "openai-responses" || !provider || normalizeProviderId(provider) !== "openai") {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
const baseUrl = typeof model.baseUrl === "string" ? model.baseUrl : undefined;
|
||||||
|
return !baseUrl || isOpenAIApiBaseUrl(baseUrl);
|
||||||
|
}
|
||||||
|
|
||||||
|
function shouldUseOpenAINativeWebSearchProvider(config: OpenClawConfig | undefined): boolean {
|
||||||
|
const provider = config?.tools?.web?.search?.provider;
|
||||||
|
if (typeof provider !== "string") {
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
const normalized = provider.trim().toLowerCase();
|
||||||
|
return normalized === "" || normalized === "auto" || normalized === "openai";
|
||||||
|
}
|
||||||
|
|
||||||
|
export function shouldEnableOpenAINativeWebSearch(params: {
|
||||||
|
config?: OpenClawConfig;
|
||||||
|
model: { api?: unknown; provider?: unknown; baseUrl?: unknown };
|
||||||
|
}): boolean {
|
||||||
|
return (
|
||||||
|
params.config?.tools?.web?.search?.enabled !== false &&
|
||||||
|
shouldUseOpenAINativeWebSearchProvider(params.config) &&
|
||||||
|
isOpenAINativeWebSearchEligibleModel(params.model)
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
function isNativeWebSearchTool(tool: unknown): boolean {
|
||||||
|
return isRecord(tool) && tool.type === OPENAI_WEB_SEARCH_TOOL.type;
|
||||||
|
}
|
||||||
|
|
||||||
|
function isManagedWebSearchTool(tool: unknown): boolean {
|
||||||
|
return isRecord(tool) && tool.type === "function" && tool.name === OPENAI_WEB_SEARCH_TOOL.type;
|
||||||
|
}
|
||||||
|
|
||||||
|
export function patchOpenAINativeWebSearchPayload(
|
||||||
|
payload: unknown,
|
||||||
|
): OpenAINativeWebSearchPatchResult {
|
||||||
|
if (!isRecord(payload)) {
|
||||||
|
return "payload_not_object";
|
||||||
|
}
|
||||||
|
|
||||||
|
const existingTools = Array.isArray(payload.tools) ? payload.tools : [];
|
||||||
|
const filteredTools = existingTools.filter((tool) => !isManagedWebSearchTool(tool));
|
||||||
|
if (filteredTools.some(isNativeWebSearchTool)) {
|
||||||
|
if (filteredTools.length !== existingTools.length) {
|
||||||
|
payload.tools = filteredTools;
|
||||||
|
}
|
||||||
|
return "native_tool_already_present";
|
||||||
|
}
|
||||||
|
|
||||||
|
payload.tools = [...filteredTools, OPENAI_WEB_SEARCH_TOOL];
|
||||||
|
return "injected";
|
||||||
|
}
|
||||||
|
|
||||||
|
export function createOpenAINativeWebSearchWrapper(
|
||||||
|
baseStreamFn: StreamFn | undefined,
|
||||||
|
params: { config?: OpenClawConfig },
|
||||||
|
): StreamFn {
|
||||||
|
const underlying = baseStreamFn ?? streamSimple;
|
||||||
|
return (model, context, options) => {
|
||||||
|
if (!shouldEnableOpenAINativeWebSearch({ config: params.config, model })) {
|
||||||
|
return underlying(model, context, options);
|
||||||
|
}
|
||||||
|
return streamWithPayloadPatch(underlying, model, context, options, (payload) => {
|
||||||
|
patchOpenAINativeWebSearchPayload(payload);
|
||||||
|
});
|
||||||
|
};
|
||||||
|
}
|
||||||
@@ -355,6 +355,102 @@ describe("buildOpenAIProvider", () => {
|
|||||||
expect(result.payload.service_tier).toBe("priority");
|
expect(result.payload.service_tier).toBe("priority");
|
||||||
expect(result.payload.text).toEqual({ verbosity: "low" });
|
expect(result.payload.text).toEqual({ verbosity: "low" });
|
||||||
expect(result.payload.reasoning).toEqual({ effort: "none" });
|
expect(result.payload.reasoning).toEqual({ effort: "none" });
|
||||||
|
expect(result.payload.tools).toEqual([{ type: "web_search" }]);
|
||||||
|
});
|
||||||
|
|
||||||
|
it("uses native OpenAI web search instead of the managed web_search function", () => {
|
||||||
|
const provider = buildOpenAIProvider();
|
||||||
|
const wrap = provider.wrapStreamFn;
|
||||||
|
expect(wrap).toBeTypeOf("function");
|
||||||
|
if (!wrap) {
|
||||||
|
throw new Error("expected OpenAI wrapper");
|
||||||
|
}
|
||||||
|
|
||||||
|
const result = runWrappedPayloadCase({
|
||||||
|
wrap,
|
||||||
|
provider: "openai",
|
||||||
|
modelId: "gpt-5.4",
|
||||||
|
model: {
|
||||||
|
api: "openai-responses",
|
||||||
|
provider: "openai",
|
||||||
|
id: "gpt-5.4",
|
||||||
|
baseUrl: "https://api.openai.com/v1",
|
||||||
|
} as Model<"openai-responses">,
|
||||||
|
payload: {
|
||||||
|
tools: [
|
||||||
|
{ type: "function", name: "read" },
|
||||||
|
{ type: "function", name: "web_search" },
|
||||||
|
],
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
expect(result.payload.tools).toEqual([
|
||||||
|
{ type: "function", name: "read" },
|
||||||
|
{ type: "web_search" },
|
||||||
|
]);
|
||||||
|
});
|
||||||
|
|
||||||
|
it("does not inject native OpenAI web search when disabled or proxied", () => {
|
||||||
|
const provider = buildOpenAIProvider();
|
||||||
|
const wrap = provider.wrapStreamFn;
|
||||||
|
expect(wrap).toBeTypeOf("function");
|
||||||
|
if (!wrap) {
|
||||||
|
throw new Error("expected OpenAI wrapper");
|
||||||
|
}
|
||||||
|
|
||||||
|
const disabled = runWrappedPayloadCase({
|
||||||
|
wrap,
|
||||||
|
provider: "openai",
|
||||||
|
modelId: "gpt-5.4",
|
||||||
|
cfg: { tools: { web: { search: { enabled: false } } } },
|
||||||
|
model: {
|
||||||
|
api: "openai-responses",
|
||||||
|
provider: "openai",
|
||||||
|
id: "gpt-5.4",
|
||||||
|
baseUrl: "https://api.openai.com/v1",
|
||||||
|
} as Model<"openai-responses">,
|
||||||
|
payload: { tools: [{ type: "function", name: "web_search" }] },
|
||||||
|
});
|
||||||
|
const proxied = runWrappedPayloadCase({
|
||||||
|
wrap,
|
||||||
|
provider: "openai",
|
||||||
|
modelId: "gpt-5.4",
|
||||||
|
model: {
|
||||||
|
api: "openai-responses",
|
||||||
|
provider: "openai",
|
||||||
|
id: "gpt-5.4",
|
||||||
|
baseUrl: "https://example-proxy.invalid/v1",
|
||||||
|
} as Model<"openai-responses">,
|
||||||
|
payload: { tools: [{ type: "function", name: "web_search" }] },
|
||||||
|
});
|
||||||
|
|
||||||
|
expect(disabled.payload.tools).toEqual([{ type: "function", name: "web_search" }]);
|
||||||
|
expect(proxied.payload.tools).toEqual([{ type: "function", name: "web_search" }]);
|
||||||
|
});
|
||||||
|
|
||||||
|
it("keeps managed web_search when another search provider is configured", () => {
|
||||||
|
const provider = buildOpenAIProvider();
|
||||||
|
const wrap = provider.wrapStreamFn;
|
||||||
|
expect(wrap).toBeTypeOf("function");
|
||||||
|
if (!wrap) {
|
||||||
|
throw new Error("expected OpenAI wrapper");
|
||||||
|
}
|
||||||
|
|
||||||
|
const result = runWrappedPayloadCase({
|
||||||
|
wrap,
|
||||||
|
provider: "openai",
|
||||||
|
modelId: "gpt-5.4",
|
||||||
|
cfg: { tools: { web: { search: { enabled: true, provider: "brave" } } } },
|
||||||
|
model: {
|
||||||
|
api: "openai-responses",
|
||||||
|
provider: "openai",
|
||||||
|
id: "gpt-5.4",
|
||||||
|
baseUrl: "https://api.openai.com/v1",
|
||||||
|
} as Model<"openai-responses">,
|
||||||
|
payload: { tools: [{ type: "function", name: "web_search" }] },
|
||||||
|
});
|
||||||
|
|
||||||
|
expect(result.payload.tools).toEqual([{ type: "function", name: "web_search" }]);
|
||||||
});
|
});
|
||||||
|
|
||||||
it("preserves explicit OpenAI responses transport and warmup overrides", () => {
|
it("preserves explicit OpenAI responses transport and warmup overrides", () => {
|
||||||
|
|||||||
@@ -7,6 +7,7 @@ import {
|
|||||||
} from "openclaw/plugin-sdk/provider-model-shared";
|
} from "openclaw/plugin-sdk/provider-model-shared";
|
||||||
import { OPENAI_RESPONSES_STREAM_HOOKS } from "openclaw/plugin-sdk/provider-stream-family";
|
import { OPENAI_RESPONSES_STREAM_HOOKS } from "openclaw/plugin-sdk/provider-stream-family";
|
||||||
import { normalizeOptionalString } from "openclaw/plugin-sdk/text-runtime";
|
import { normalizeOptionalString } from "openclaw/plugin-sdk/text-runtime";
|
||||||
|
import { createOpenAINativeWebSearchWrapper } from "./native-web-search.js";
|
||||||
import { buildOpenAIReplayPolicy } from "./replay-policy.js";
|
import { buildOpenAIReplayPolicy } from "./replay-policy.js";
|
||||||
import {
|
import {
|
||||||
resolveOpenAITransportTurnState,
|
resolveOpenAITransportTurnState,
|
||||||
@@ -82,6 +83,14 @@ const resolveOpenAIResponsesWebSocketSessionPolicy: NonNullable<
|
|||||||
OpenAIResponsesProviderHooks["resolveWebSocketSessionPolicy"]
|
OpenAIResponsesProviderHooks["resolveWebSocketSessionPolicy"]
|
||||||
> = (ctx) => resolveOpenAIWebSocketSessionPolicy(ctx);
|
> = (ctx) => resolveOpenAIWebSocketSessionPolicy(ctx);
|
||||||
|
|
||||||
|
const wrapOpenAIResponsesStreamFn = OPENAI_RESPONSES_STREAM_HOOKS.wrapStreamFn;
|
||||||
|
const wrapOpenAIResponsesProviderStreamFn: NonNullable<
|
||||||
|
OpenAIResponsesProviderHooks["wrapStreamFn"]
|
||||||
|
> = (ctx) =>
|
||||||
|
createOpenAINativeWebSearchWrapper(wrapOpenAIResponsesStreamFn?.(ctx) ?? ctx.streamFn, {
|
||||||
|
config: ctx.config,
|
||||||
|
});
|
||||||
|
|
||||||
export function buildOpenAIResponsesProviderHooks(options?: {
|
export function buildOpenAIResponsesProviderHooks(options?: {
|
||||||
openaiWsWarmup?: boolean;
|
openaiWsWarmup?: boolean;
|
||||||
}): OpenAIResponsesProviderHooks {
|
}): OpenAIResponsesProviderHooks {
|
||||||
@@ -89,6 +98,7 @@ export function buildOpenAIResponsesProviderHooks(options?: {
|
|||||||
buildReplayPolicy: buildOpenAIReplayPolicy,
|
buildReplayPolicy: buildOpenAIReplayPolicy,
|
||||||
prepareExtraParams: (ctx) => defaultOpenAIResponsesExtraParams(ctx.extraParams, options),
|
prepareExtraParams: (ctx) => defaultOpenAIResponsesExtraParams(ctx.extraParams, options),
|
||||||
...OPENAI_RESPONSES_STREAM_HOOKS,
|
...OPENAI_RESPONSES_STREAM_HOOKS,
|
||||||
|
wrapStreamFn: wrapOpenAIResponsesProviderStreamFn,
|
||||||
resolveTransportTurnState: resolveOpenAIResponsesTransportTurnState,
|
resolveTransportTurnState: resolveOpenAIResponsesTransportTurnState,
|
||||||
resolveWebSocketSessionPolicy: resolveOpenAIResponsesWebSocketSessionPolicy,
|
resolveWebSocketSessionPolicy: resolveOpenAIResponsesWebSocketSessionPolicy,
|
||||||
};
|
};
|
||||||
|
|||||||
Reference in New Issue
Block a user