diff --git a/src/commands/config.ts b/src/commands/config.ts index 6594acf6..95fc8d7a 100644 --- a/src/commands/config.ts +++ b/src/commands/config.ts @@ -29,7 +29,8 @@ export enum CONFIG_KEYS { OCO_API_CUSTOM_HEADERS = 'OCO_API_CUSTOM_HEADERS', OCO_OMIT_SCOPE = 'OCO_OMIT_SCOPE', OCO_GITPUSH = 'OCO_GITPUSH', // todo: deprecate - OCO_HOOK_AUTO_UNCOMMENT = 'OCO_HOOK_AUTO_UNCOMMENT' + OCO_HOOK_AUTO_UNCOMMENT = 'OCO_HOOK_AUTO_UNCOMMENT', + OCO_OLLAMA_THINK = 'OCO_OLLAMA_THINK' } export enum CONFIG_MODES { @@ -838,6 +839,15 @@ export const configValidators = { typeof value === 'boolean', 'Must be true or false' ); + }, + + [CONFIG_KEYS.OCO_OLLAMA_THINK](value: any) { + validateConfig( + CONFIG_KEYS.OCO_OLLAMA_THINK, + typeof value === 'boolean', + 'Must be true or false' + ); + return value; } }; @@ -905,6 +915,7 @@ export type ConfigType = { [CONFIG_KEYS.OCO_OMIT_SCOPE]: boolean; [CONFIG_KEYS.OCO_TEST_MOCK_TYPE]: string; [CONFIG_KEYS.OCO_HOOK_AUTO_UNCOMMENT]: boolean; + [CONFIG_KEYS.OCO_OLLAMA_THINK]?: boolean; }; export const defaultConfigPath = pathJoin(homedir(), '.opencommit'); diff --git a/src/engine/Engine.ts b/src/engine/Engine.ts index d2c44a1a..0ddaa8d2 100644 --- a/src/engine/Engine.ts +++ b/src/engine/Engine.ts @@ -13,6 +13,7 @@ export interface AiEngineConfig { baseURL?: string; proxy?: string; customHeaders?: Record; + ollamaThink?: boolean; } type Client = diff --git a/src/engine/ollama.ts b/src/engine/ollama.ts index 621bfcc3..d395a1c6 100644 --- a/src/engine/ollama.ts +++ b/src/engine/ollama.ts @@ -4,7 +4,9 @@ import { normalizeEngineError } from '../utils/engineErrorHandler'; import { removeContentTags } from '../utils/removeContentTags'; import { AiEngine, AiEngineConfig } from './Engine'; -interface OllamaConfig extends AiEngineConfig {} +interface OllamaConfig extends AiEngineConfig { + ollamaThink?: boolean; +} const DEFAULT_OLLAMA_URL = 'http://localhost:11434'; const OLLAMA_CHAT_PATH = '/api/chat'; @@ -32,12 +34,15 @@ export class OllamaEngine implements AiEngine { async generateCommitMessage( messages: Array ): Promise { - const params = { + const params: Record = { model: this.config.model ?? 'mistral', messages, options: { temperature: 0, top_p: 0.1 }, stream: false }; + if (typeof this.config.ollamaThink === 'boolean') { + params.think = this.config.ollamaThink; + } try { const response = await this.client.post(this.chatUrl, params); diff --git a/src/utils/engine.ts b/src/utils/engine.ts index 8a248e3a..436f7a11 100644 --- a/src/utils/engine.ts +++ b/src/utils/engine.ts @@ -54,7 +54,10 @@ export function getEngine(): AiEngine { switch (provider) { case OCO_AI_PROVIDER_ENUM.OLLAMA: - return new OllamaEngine(DEFAULT_CONFIG); + return new OllamaEngine({ + ...DEFAULT_CONFIG, + ollamaThink: config.OCO_OLLAMA_THINK + }); case OCO_AI_PROVIDER_ENUM.ANTHROPIC: return new AnthropicEngine(DEFAULT_CONFIG); diff --git a/test/unit/ollama.test.ts b/test/unit/ollama.test.ts new file mode 100644 index 00000000..ff35629f --- /dev/null +++ b/test/unit/ollama.test.ts @@ -0,0 +1,64 @@ +import { OllamaEngine } from '../../src/engine/ollama'; + +describe('OllamaEngine', () => { + it('sends think=false when configured', async () => { + const engine = new OllamaEngine({ + apiKey: 'ollama', + model: 'qwen3.5:2b', + maxTokensOutput: 500, + maxTokensInput: 4096, + ollamaThink: false + }); + + const post = jest.fn().mockResolvedValue({ + data: { + message: { + content: 'feat: add support for ollama think config' + } + } + }); + + engine.client = { post } as any; + + await engine.generateCommitMessage([ + { role: 'user', content: 'diff --git a/file b/file' } + ]); + + expect(post).toHaveBeenCalledWith( + 'http://localhost:11434/api/chat', + expect.objectContaining({ + think: false + }) + ); + }); + + it('omits think when not configured', async () => { + const engine = new OllamaEngine({ + apiKey: 'ollama', + model: 'qwen3.5:2b', + maxTokensOutput: 500, + maxTokensInput: 4096 + }); + + const post = jest.fn().mockResolvedValue({ + data: { + message: { + content: 'feat: add support for ollama think config' + } + } + }); + + engine.client = { post } as any; + + await engine.generateCommitMessage([ + { role: 'user', content: 'diff --git a/file b/file' } + ]); + + expect(post).toHaveBeenCalledWith( + 'http://localhost:11434/api/chat', + expect.not.objectContaining({ + think: expect.anything() + }) + ); + }); +});