@@ -30629,6 +30629,26 @@ function getI18nLocal(value) {
3062930629}
3063030630
3063130631// src/commands/config.ts
30632+ var CONFIG_KEYS = /* @__PURE__ */ ((CONFIG_KEYS2) => {
30633+ CONFIG_KEYS2["OCO_OPENAI_API_KEY"] = "OCO_OPENAI_API_KEY";
30634+ CONFIG_KEYS2["OCO_ANTHROPIC_API_KEY"] = "OCO_ANTHROPIC_API_KEY";
30635+ CONFIG_KEYS2["OCO_AZURE_API_KEY"] = "OCO_AZURE_API_KEY";
30636+ CONFIG_KEYS2["OCO_TOKENS_MAX_INPUT"] = "OCO_TOKENS_MAX_INPUT";
30637+ CONFIG_KEYS2["OCO_TOKENS_MAX_OUTPUT"] = "OCO_TOKENS_MAX_OUTPUT";
30638+ CONFIG_KEYS2["OCO_OPENAI_BASE_PATH"] = "OCO_OPENAI_BASE_PATH";
30639+ CONFIG_KEYS2["OCO_DESCRIPTION"] = "OCO_DESCRIPTION";
30640+ CONFIG_KEYS2["OCO_EMOJI"] = "OCO_EMOJI";
30641+ CONFIG_KEYS2["OCO_MODEL"] = "OCO_MODEL";
30642+ CONFIG_KEYS2["OCO_LANGUAGE"] = "OCO_LANGUAGE";
30643+ CONFIG_KEYS2["OCO_MESSAGE_TEMPLATE_PLACEHOLDER"] = "OCO_MESSAGE_TEMPLATE_PLACEHOLDER";
30644+ CONFIG_KEYS2["OCO_PROMPT_MODULE"] = "OCO_PROMPT_MODULE";
30645+ CONFIG_KEYS2["OCO_AI_PROVIDER"] = "OCO_AI_PROVIDER";
30646+ CONFIG_KEYS2["OCO_GITPUSH"] = "OCO_GITPUSH";
30647+ CONFIG_KEYS2["OCO_ONE_LINE_COMMIT"] = "OCO_ONE_LINE_COMMIT";
30648+ CONFIG_KEYS2["OCO_AZURE_ENDPOINT"] = "OCO_AZURE_ENDPOINT";
30649+ CONFIG_KEYS2["OCO_OLLAMA_API_URL"] = "OCO_API_URL";
30650+ return CONFIG_KEYS2;
30651+ })(CONFIG_KEYS || {});
3063230652var MODEL_LIST = {
3063330653 openai: [
3063430654 "gpt-3.5-turbo",
@@ -30825,6 +30845,14 @@ var configValidators = {
3082530845 'Must be in format "https://<resource name>.openai.azure.com/"'
3082630846 );
3082730847 return value;
30848+ },
30849+ ["OCO_API_URL" /* OCO_OLLAMA_API_URL */](value) {
30850+ validateConfig(
30851+ CONFIG_KEYS.OCO_API_URL,
30852+ typeof value === "string" && value.startsWith("http"),
30853+ `${value} is not a valid URL`
30854+ );
30855+ return value;
3082830856 }
3082930857};
3083030858var defaultConfigPath = (0, import_path.join)((0, import_os.homedir)(), ".opencommit");
@@ -34151,12 +34179,16 @@ var api = new OpenAi();
3415134179var config4 = getConfig();
3415234180var OllamaAi = class {
3415334181 model = "mistral";
34182+ url = "http://localhost:11434/api/chat";
3415434183 setModel(model) {
3415534184 this.model = model ?? config4?.OCO_MODEL ?? "mistral";
3415634185 }
34186+ setUrl(url2) {
34187+ this.url = url2 ?? config4?.OCO_OLLAMA_API_URL ?? "http://localhost:11434/api/chat";
34188+ }
3415734189 async generateCommitMessage(messages) {
3415834190 const model = this.model;
34159- const url2 = "http://localhost:11434/api/chat" ;
34191+ const url2 = this.url ;
3416034192 const p4 = {
3416134193 model,
3416234194 messages,
0 commit comments