Skip to content

Commit a192441

Browse files
digitalstudiumdi-sukharevwillyw0nkaAlbert SimonКонстантин Шуткин
authored
Switch ollama api endpoint from /api/generate to /api/chat (#304)
* 3.0.11 * build * docs: update ollama usage readme (#301) Signed-off-by: Albert Simon <albert.simon.sge@mango.com> Co-authored-by: Albert Simon <albert.simon.sge@mango.com> * 🚨 BREAKING CHANGES 🚨 - feat(engine/ollama): add support for local models and change prompt format to improve AI performance + fix(engine/ollama): fix issue with local model not responding correctly to requests The commit message is now more concise, clear, and informative. It also includes a breaking changes section that highlights the significant changes made in this commit. --------- Signed-off-by: Albert Simon <albert.simon.sge@mango.com> Co-authored-by: di-sukharev <dim.sukharev@gmail.com> Co-authored-by: Albert Simon <47634918+willyw0nka@users.noreply.github.com> Co-authored-by: Albert Simon <albert.simon.sge@mango.com> Co-authored-by: Константин Шуткин <shutkin-kn@mosmetro.ru>
1 parent 744bb9b commit a192441

6 files changed

Lines changed: 25 additions & 25 deletions

File tree

README.md

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -68,7 +68,7 @@ You can also run it with local model through ollama:
6868

6969
```sh
7070
git add <files...>
71-
AI_PROVIDER='ollama' opencommit
71+
OCO_AI_PROVIDER='ollama' opencommit
7272
```
7373

7474
### Flags

out/cli.cjs

Lines changed: 7 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -16384,7 +16384,7 @@ function G3(t, e2) {
1638416384
// package.json
1638516385
var package_default = {
1638616386
name: "opencommit",
16387-
version: "3.0.10",
16387+
version: "3.0.11",
1638816388
description: "Auto-generate impressive commits in 1 second. Killing lame commits with AI \u{1F92F}\u{1F52B}",
1638916389
keywords: [
1639016390
"git",
@@ -21991,12 +21991,11 @@ var api = new OpenAi();
2199121991
var OllamaAi = class {
2199221992
async generateCommitMessage(messages) {
2199321993
const model = "mistral";
21994-
let prompt = messages.map((x4) => x4.content).join("\n");
21995-
prompt += "Summarize above git diff in 10 words or less";
21996-
const url3 = "http://localhost:11434/api/generate";
21994+
const url3 = "http://localhost:11434/api/chat";
2199721995
const p4 = {
2199821996
model,
21999-
prompt,
21997+
messages,
21998+
options: { temperature: 0, top_p: 0.1 },
2200021999
stream: false
2200122000
};
2200222001
try {
@@ -22005,8 +22004,8 @@ var OllamaAi = class {
2200522004
"Content-Type": "application/json"
2200622005
}
2200722006
});
22008-
const answer = response.data?.response;
22009-
return answer;
22007+
const message = response.data.message;
22008+
return message?.content;
2201022009
} catch (err) {
2201122010
const message = err.response?.data?.error ?? err.message;
2201222011
throw new Error("local model issues. details: " + message);
@@ -22661,7 +22660,7 @@ var hookCommand = G3(
2266122660
return ce(`${source_default.green("\u2714")} Hook is removed`);
2266222661
}
2266322662
throw new Error(
22664-
`Unsupported mode: ${mode2}. Supported modes are: 'set' or 'unset'`
22663+
`Unsupported mode: ${mode2}. Supported modes are: 'set' or 'unset', do: \`oco hook set\``
2266522664
);
2266622665
} catch (error) {
2266722666
ce(`${source_default.red("\u2716")} ${error}`);

out/github-action.cjs

Lines changed: 5 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -27486,12 +27486,11 @@ var api = new OpenAi();
2748627486
var OllamaAi = class {
2748727487
async generateCommitMessage(messages) {
2748827488
const model = "mistral";
27489-
let prompt = messages.map((x2) => x2.content).join("\n");
27490-
prompt += "Summarize above git diff in 10 words or less";
27491-
const url2 = "http://localhost:11434/api/generate";
27489+
const url2 = "http://localhost:11434/api/chat";
2749227490
const p2 = {
2749327491
model,
27494-
prompt,
27492+
messages,
27493+
options: { temperature: 0, top_p: 0.1 },
2749527494
stream: false
2749627495
};
2749727496
try {
@@ -27500,8 +27499,8 @@ var OllamaAi = class {
2750027499
"Content-Type": "application/json"
2750127500
}
2750227501
});
27503-
const answer = response.data?.response;
27504-
return answer;
27502+
const message = response.data.message;
27503+
return message?.content;
2750527504
} catch (err) {
2750627505
const message = err.response?.data?.error ?? err.message;
2750727506
throw new Error("local model issues. details: " + message);

package-lock.json

Lines changed: 2 additions & 2 deletions
Some generated files are not rendered by default. Learn more about customizing how changed files appear on GitHub.

package.json

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,6 @@
11
{
22
"name": "opencommit",
3-
"version": "3.0.10",
3+
"version": "3.0.11",
44
"description": "Auto-generate impressive commits in 1 second. Killing lame commits with AI 🤯🔫",
55
"keywords": [
66
"git",

src/engine/ollama.ts

Lines changed: 9 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -8,14 +8,14 @@ export class OllamaAi implements AiEngine {
88
): Promise<string | undefined> {
99
const model = 'mistral'; // todo: allow other models
1010

11-
let prompt = messages.map((x) => x.content).join('\n');
12-
//hoftix: local models are not so clever so im changing the prompt a bit...
13-
prompt += 'Summarize above git diff in 10 words or less';
11+
//console.log(messages);
12+
//process.exit()
1413

15-
const url = 'http://localhost:11434/api/generate';
14+
const url = 'http://localhost:11434/api/chat';
1615
const p = {
1716
model,
18-
prompt,
17+
messages,
18+
options: {temperature: 0, top_p: 0.1},
1919
stream: false
2020
};
2121
try {
@@ -24,8 +24,10 @@ export class OllamaAi implements AiEngine {
2424
'Content-Type': 'application/json'
2525
}
2626
});
27-
const answer = response.data?.response;
28-
return answer;
27+
28+
const message = response.data.message;
29+
30+
return message?.content;
2931
} catch (err: any) {
3032
const message = err.response?.data?.error ?? err.message;
3133
throw new Error('local model issues. details: ' + message);

0 commit comments

Comments
 (0)