fix: preserve interactive Ollama model selection (#49249) (thanks @BruceMacD)

This commit is contained in:
Peter Steinberger
2026-03-19 00:42:58 +00:00
parent f8c70bf1f1
commit 42b9212eb2
4 changed files with 131 additions and 1 deletions

View File

@@ -410,6 +410,33 @@ describe("runSetupWizard", () => {
}
});
it("prompts for a model during explicit interactive Ollama setup", async () => {
promptDefaultModel.mockClear();
const prompter = buildWizardPrompter({});
const runtime = createRuntime();
await runSetupWizard(
{
acceptRisk: true,
flow: "quickstart",
authChoice: "ollama",
installDaemon: false,
skipSkills: true,
skipSearch: true,
skipHealth: true,
skipUi: true,
},
runtime,
prompter,
);
expect(promptDefaultModel).toHaveBeenCalledWith(
expect.objectContaining({
allowKeep: false,
}),
);
});
it("shows plugin compatibility notices for an existing valid config", async () => {
buildPluginCompatibilityNotices.mockReturnValue([
{

View File

@@ -482,7 +482,9 @@ export async function runSetupWizard(
}
}
if (authChoiceFromPrompt && authChoice !== "custom-api-key") {
const shouldPromptModelSelection =
authChoice !== "custom-api-key" && (authChoiceFromPrompt || authChoice === "ollama");
if (shouldPromptModelSelection) {
const modelSelection = await promptDefaultModel({
config: nextConfig,
prompter,