diff --git a/core/llm/llms/Ollama.test.ts b/core/llm/llms/Ollama.test.ts new file mode 100644 index 0000000000..78d69dd157 --- /dev/null +++ b/core/llm/llms/Ollama.test.ts @@ -0,0 +1,226 @@ +jest.mock("@continuedev/fetch", () => ({ + streamResponse: jest.fn(), +})); + +import { ChatMessage } from "../../index.js"; +import Ollama from "./Ollama.js"; + +function createOllama(): Ollama { + // Create instance without triggering constructor's fetch call + const instance = Object.create(Ollama.prototype); + instance.model = "test-model"; + instance.completionOptions = {}; + instance.fetch = jest.fn(); + return instance; +} + +describe("Ollama", () => { + describe("_convertToOllamaMessage", () => { + let ollama: Ollama; + + beforeEach(() => { + ollama = createOllama(); + }); + + it("should convert a basic user message", () => { + const msg: ChatMessage = { role: "user", content: "hello" }; + const result = (ollama as any)._convertToOllamaMessage(msg); + expect(result).toEqual({ role: "user", content: "hello" }); + }); + + it("should convert assistant message with toolCalls, stripping index and other unsupported fields", () => { + const msg: ChatMessage = { + role: "assistant", + content: "", + toolCalls: [ + { + id: "tc_123", + type: "function", + index: 0, // This field causes errors on Gemma3 + function: { + name: "get_weather", + arguments: '{"city":"London"}', + }, + } as any, + ], + }; + const result = (ollama as any)._convertToOllamaMessage(msg); + + expect(result.tool_calls).toBeDefined(); + expect(result.tool_calls).toHaveLength(1); + expect(result.tool_calls[0]).toEqual({ + function: { + name: "get_weather", + arguments: { city: "London" }, + }, + }); + // Verify no index, id, or type fields leaked through + expect(result.tool_calls[0]).not.toHaveProperty("index"); + expect(result.tool_calls[0]).not.toHaveProperty("id"); + expect(result.tool_calls[0]).not.toHaveProperty("type"); + }); + + it("should handle toolCalls with object arguments (not string)", () => { + const msg: ChatMessage = { + role: "assistant", + content: "", + toolCalls: [ + { + id: "tc_456", + type: "function", + function: { + name: "search", + arguments: { query: "test" } as any, + }, + } as any, + ], + }; + const result = (ollama as any)._convertToOllamaMessage(msg); + expect(result.tool_calls[0].function.arguments).toEqual({ + query: "test", + }); + }); + + it("should not add tool_calls for assistant messages without them", () => { + const msg: ChatMessage = { role: "assistant", content: "Sure!" }; + const result = (ollama as any)._convertToOllamaMessage(msg); + expect(result.tool_calls).toBeUndefined(); + }); + + it("should convert tool result messages", () => { + const msg: ChatMessage = { + role: "tool", + content: '{"temp": 20}', + toolCallId: "tc_123", + }; + const result = (ollama as any)._convertToOllamaMessage(msg); + expect(result.role).toBe("tool"); + expect(result.content).toBe('{"temp": 20}'); + }); + + it("should filter out toolCalls without a function name", () => { + const msg: ChatMessage = { + role: "assistant", + content: "", + toolCalls: [ + { + id: "tc_1", + type: "function", + function: { + name: "valid_tool", + arguments: "{}", + }, + }, + { + id: "tc_2", + type: "function", + function: { + name: undefined as any, + arguments: "{}", + }, + }, + ], + }; + const result = (ollama as any)._convertToOllamaMessage(msg); + expect(result.tool_calls).toHaveLength(1); + expect(result.tool_calls[0].function.name).toBe("valid_tool"); + }); + }); + + describe("_reorderMessagesForToolCompat", () => { + let ollama: Ollama; + + beforeEach(() => { + ollama = createOllama(); + }); + + it("should move system message from after tool to before assistant+tool block", () => { + const messages = [ + { role: "system" as const, content: "You are helpful" }, + { role: "user" as const, content: "What's the weather?" }, + { + role: "assistant" as const, + content: "", + tool_calls: [{ function: { name: "get_weather", arguments: {} } }], + }, + { role: "tool" as const, content: '{"temp": 20}' }, + { role: "system" as const, content: "Use metric units" }, + { role: "user" as const, content: "Thanks" }, + ]; + + const result = (ollama as any)._reorderMessagesForToolCompat(messages); + + // No system message should follow a tool message + for (let i = 1; i < result.length; i++) { + if (result[i].role === "system") { + expect(result[i - 1].role).not.toBe("tool"); + } + } + + // The moved system message should appear before the assistant + const sysIdx = result.findIndex( + (m: any) => m.role === "system" && m.content === "Use metric units", + ); + const assistantIdx = result.findIndex((m: any) => m.role === "assistant"); + expect(sysIdx).toBeLessThan(assistantIdx); + }); + + it("should not modify messages when no system follows tool", () => { + const messages = [ + { role: "system" as const, content: "You are helpful" }, + { role: "user" as const, content: "Hello" }, + { role: "assistant" as const, content: "Hi there" }, + ]; + + const result = (ollama as any)._reorderMessagesForToolCompat(messages); + expect(result).toEqual(messages); + }); + + it("should handle multiple tool results before a system message", () => { + const messages = [ + { role: "user" as const, content: "Do two things" }, + { + role: "assistant" as const, + content: "", + tool_calls: [ + { function: { name: "tool1", arguments: {} } }, + { function: { name: "tool2", arguments: {} } }, + ], + }, + { role: "tool" as const, content: "result1" }, + { role: "tool" as const, content: "result2" }, + { role: "system" as const, content: "extra instructions" }, + ]; + + const result = (ollama as any)._reorderMessagesForToolCompat(messages); + + // System should come before the assistant message + const sysIdx = result.findIndex( + (m: any) => m.content === "extra instructions", + ); + const assistantIdx = result.findIndex((m: any) => m.role === "assistant"); + expect(sysIdx).toBeLessThan(assistantIdx); + + // No system message should follow a tool message + for (let i = 1; i < result.length; i++) { + if (result[i].role === "system") { + expect(result[i - 1].role).not.toBe("tool"); + } + } + }); + + it("should handle system message after tool when no preceding assistant", () => { + // Edge case: tool messages without a preceding assistant + const messages = [ + { role: "tool" as const, content: "result" }, + { role: "system" as const, content: "instructions" }, + ]; + + const result = (ollama as any)._reorderMessagesForToolCompat(messages); + + // System should be moved before tool + expect(result[0].role).toBe("system"); + expect(result[1].role).toBe("tool"); + }); + }); +}); diff --git a/core/llm/llms/Ollama.ts b/core/llm/llms/Ollama.ts index d2d8660b64..f72e41185b 100644 --- a/core/llm/llms/Ollama.ts +++ b/core/llm/llms/Ollama.ts @@ -313,6 +313,41 @@ class Ollama extends BaseLLM implements ModelInstaller { }; ollamaMessage.content = renderChatMessage(message); + + // Convert assistant tool calls to Ollama format, stripping unsupported + // fields like `index` (which causes errors on Gemma3 models). + if ( + message.role === "assistant" && + "toolCalls" in message && + message.toolCalls?.length + ) { + ollamaMessage.tool_calls = message.toolCalls + .filter((tc) => tc.function?.name) + .map((tc) => { + let args: JSONSchema7Object; + if (typeof tc.function!.arguments === "string") { + try { + args = JSON.parse(tc.function!.arguments!); + } catch (e) { + console.warn( + `Failed to parse tool call arguments for "${tc.function!.name}": ${e}`, + ); + args = {}; + } + } else { + args = tc.function!.arguments + ? (tc.function!.arguments as unknown as JSONSchema7Object) + : {}; + } + return { + function: { + name: tc.function!.name!, + arguments: args, + }, + }; + }); + } + if (Array.isArray(message.content)) { const images: string[] = []; message.content.forEach((part) => { @@ -409,12 +444,47 @@ class Ollama extends BaseLLM implements ModelInstaller { } } + /** + * Reorder messages so that system messages never appear directly after tool + * messages. Some Ollama models (Mistral, Ministral) reject the sequence + * `tool → system` with "Unexpected role 'system' after role 'tool'". + * This moves such system messages to just before the preceding + * assistant+tool block. + */ + private _reorderMessagesForToolCompat( + messages: OllamaChatMessage[], + ): OllamaChatMessage[] { + const result: OllamaChatMessage[] = [...messages]; + + for (let i = 1; i < result.length; i++) { + if (result[i].role === "system" && result[i - 1].role === "tool") { + // Find the start of the tool block (assistant tool_call + tool results) + let insertIdx = i - 1; + while (insertIdx > 0 && result[insertIdx - 1].role === "tool") { + insertIdx--; + } + // Also skip past the assistant message that triggered the tool calls + if (insertIdx > 0 && result[insertIdx - 1].role === "assistant") { + insertIdx--; + } + + const [sysMsg] = result.splice(i, 1); + result.splice(insertIdx, 0, sysMsg); + // Don't increment i — re-check current position after splice + } + } + + return result; + } + protected async *_streamChat( messages: ChatMessage[], signal: AbortSignal, options: CompletionOptions, ): AsyncGenerator { - const ollamaMessages = messages.map(this._convertToOllamaMessage); + const ollamaMessages = this._reorderMessagesForToolCompat( + messages.map(this._convertToOllamaMessage), + ); const chatOptions: OllamaChatOptions = { model: this._getModel(), messages: ollamaMessages, @@ -715,8 +785,12 @@ class Ollama extends BaseLLM implements ModelInstaller { const chunk = new TextDecoder().decode(value); const lines = chunk.split("\n").filter(Boolean); for (const line of lines) { - const data = JSON.parse(line); - progressReporter?.(data.status, data.completed, data.total); + try { + const data = JSON.parse(line); + progressReporter?.(data.status, data.completed, data.total); + } catch (e) { + console.warn(`Error parsing Ollama pull response: ${e}`); + } } } } finally {