Skip to content

Commit 8c92b92

Browse files
committed
Merge branch 'dev'
2 parents b0b9067 + a33027b commit 8c92b92

12 files changed

Lines changed: 400 additions & 124 deletions

File tree

README.md

Lines changed: 17 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -58,6 +58,8 @@ git add <files...>
5858
oco
5959
```
6060

61+
Link to the GitMoji specification: https://gitmoji.dev/
62+
6163
You can also run it with local model through ollama:
6264

6365
- install and start ollama
@@ -69,6 +71,17 @@ git add <files...>
6971
AI_PROVIDER='ollama' opencommit
7072
```
7173

74+
### Flags
75+
There are multiple optional flags that can be used with the `oco` command:
76+
77+
#### Use Full GitMoji Specification
78+
This flag can only be used if the `OCO_EMOJI` configuration item is set to `true`. This flag allows users to use all emojis in the GitMoji specification, By default, the GitMoji full specification is set to `false`, which only includes 10 emojis (🐛✨📝🚀✅♻️⬆️🔧🌐💡).
79+
This is due to limit the number of tokens sent in each request. However, if you would like to use the full GitMoji specification, you can use the `--fgm` flag.
80+
81+
```
82+
oco --fgm
83+
```
84+
7285
## Configuration
7386

7487
### Local per repo configuration
@@ -77,7 +90,8 @@ Create a `.env` file and add OpenCommit config variables there like this:
7790

7891
```env
7992
OCO_OPENAI_API_KEY=<your OpenAI API token>
80-
OCO_OPENAI_MAX_TOKENS=<max response tokens from OpenAI API>
93+
OCO_TOKENS_MAX_INPUT=<max model token limit (default: 4096)>
94+
OCO_TOKENS_MAX_OUTPUT=<max response tokens (default: 500)>
8195
OCO_OPENAI_BASE_PATH=<may be used to set proxy path to OpenAI api>
8296
OCO_DESCRIPTION=<postface a message with ~3 sentences description of the changes>
8397
OCO_EMOJI=<boolean, add GitMoji>
@@ -329,7 +343,8 @@ jobs:
329343
OCO_OPENAI_API_KEY: ${{ secrets.OCO_OPENAI_API_KEY }}
330344
331345
# customization
332-
OCO_OPENAI_MAX_TOKENS: 500
346+
OCO_TOKENS_MAX_INPUT: 4096
347+
OCO_TOKENS_MAX_OUTPUT: 500
333348
OCO_OPENAI_BASE_PATH: ''
334349
OCO_DESCRIPTION: false
335350
OCO_EMOJI: false

out/cli.cjs

Lines changed: 95 additions & 41 deletions
Large diffs are not rendered by default.

out/github-action.cjs

Lines changed: 79 additions & 31 deletions
Large diffs are not rendered by default.

src/cli.ts

Lines changed: 5 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -17,17 +17,19 @@ cli(
1717
version: packageJSON.version,
1818
name: 'opencommit',
1919
commands: [configCommand, hookCommand, commitlintConfigCommand],
20-
flags: {},
20+
flags: {
21+
fgm: Boolean
22+
},
2123
ignoreArgv: (type) => type === 'unknown-flag' || type === 'argument',
2224
help: { description: packageJSON.description }
2325
},
24-
async () => {
26+
async ({ flags }) => {
2527
await checkIsLatestVersion();
2628

2729
if (await isHookCalled()) {
2830
prepareCommitMessageHook();
2931
} else {
30-
commit(extraArgs);
32+
commit(extraArgs, flags.fgm);
3133
}
3234
},
3335
extraArgs

src/commands/commit.ts

Lines changed: 12 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -40,14 +40,18 @@ const checkMessageTemplate = (extraArgs: string[]): string | false => {
4040

4141
const generateCommitMessageFromGitDiff = async (
4242
diff: string,
43-
extraArgs: string[]
43+
extraArgs: string[],
44+
fullGitMojiSpec: boolean
4445
): Promise<void> => {
4546
await assertGitRepo();
4647
const commitSpinner = spinner();
4748
commitSpinner.start('Generating the commit message');
4849

4950
try {
50-
let commitMessage = await generateCommitMessageByDiff(diff);
51+
let commitMessage = await generateCommitMessageByDiff(
52+
diff,
53+
fullGitMojiSpec
54+
);
5155

5256
const messageTemplate = checkMessageTemplate(extraArgs);
5357
if (
@@ -154,7 +158,8 @@ ${chalk.grey('——————————————————')}`
154158

155159
export async function commit(
156160
extraArgs: string[] = [],
157-
isStageAllFlag: Boolean = false
161+
isStageAllFlag: Boolean = false,
162+
fullGitMojiSpec: boolean = false
158163
) {
159164
if (isStageAllFlag) {
160165
const changedFiles = await getChangedFiles();
@@ -194,7 +199,7 @@ export async function commit(
194199
isStageAllAndCommitConfirmedByUser &&
195200
!isCancel(isStageAllAndCommitConfirmedByUser)
196201
) {
197-
await commit(extraArgs, true);
202+
await commit(extraArgs, true, fullGitMojiSpec);
198203
process.exit(1);
199204
}
200205

@@ -212,7 +217,7 @@ export async function commit(
212217
await gitAdd({ files });
213218
}
214219

215-
await commit(extraArgs, false);
220+
await commit(extraArgs, false, fullGitMojiSpec);
216221
process.exit(1);
217222
}
218223

@@ -225,7 +230,8 @@ export async function commit(
225230
const [, generateCommitError] = await trytm(
226231
generateCommitMessageFromGitDiff(
227232
await getDiff({ files: stagedFiles }),
228-
extraArgs
233+
extraArgs,
234+
fullGitMojiSpec
229235
)
230236
);
231237

src/commands/config.ts

Lines changed: 34 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -15,7 +15,8 @@ dotenv.config();
1515

1616
export enum CONFIG_KEYS {
1717
OCO_OPENAI_API_KEY = 'OCO_OPENAI_API_KEY',
18-
OCO_OPENAI_MAX_TOKENS = 'OCO_OPENAI_MAX_TOKENS',
18+
OCO_TOKENS_MAX_INPUT = 'OCO_TOKENS_MAX_INPUT',
19+
OCO_TOKENS_MAX_OUTPUT = 'OCO_TOKENS_MAX_OUTPUT',
1920
OCO_OPENAI_BASE_PATH = 'OCO_OPENAI_BASE_PATH',
2021
OCO_DESCRIPTION = 'OCO_DESCRIPTION',
2122
OCO_EMOJI = 'OCO_EMOJI',
@@ -26,13 +27,16 @@ export enum CONFIG_KEYS {
2627
OCO_AI_PROVIDER = 'OCO_AI_PROVIDER',
2728
}
2829

29-
export const DEFAULT_MODEL_TOKEN_LIMIT = 4096;
30-
3130
export enum CONFIG_MODES {
3231
get = 'get',
3332
set = 'set'
3433
}
3534

35+
export enum DEFAULT_TOKEN_LIMITS {
36+
DEFAULT_MAX_TOKENS_INPUT = 4096,
37+
DEFAULT_MAX_TOKENS_OUTPUT = 500
38+
}
39+
3640
const validateConfig = (
3741
key: string,
3842
condition: any,
@@ -75,18 +79,37 @@ export const configValidators = {
7579
return value;
7680
},
7781

78-
[CONFIG_KEYS.OCO_OPENAI_MAX_TOKENS](value: any) {
82+
[CONFIG_KEYS.OCO_TOKENS_MAX_INPUT](value: any) {
83+
// If the value is a string, convert it to a number.
84+
if (typeof value === 'string') {
85+
value = parseInt(value);
86+
validateConfig(
87+
CONFIG_KEYS.OCO_TOKENS_MAX_INPUT,
88+
!isNaN(value),
89+
'Must be a number'
90+
);
91+
}
92+
validateConfig(
93+
CONFIG_KEYS.OCO_TOKENS_MAX_INPUT,
94+
value ? typeof value === 'number' : undefined,
95+
'Must be a number'
96+
);
97+
98+
return value;
99+
},
100+
101+
[CONFIG_KEYS.OCO_TOKENS_MAX_OUTPUT](value: any) {
79102
// If the value is a string, convert it to a number.
80103
if (typeof value === 'string') {
81104
value = parseInt(value);
82105
validateConfig(
83-
CONFIG_KEYS.OCO_OPENAI_MAX_TOKENS,
106+
CONFIG_KEYS.OCO_TOKENS_MAX_OUTPUT,
84107
!isNaN(value),
85108
'Must be a number'
86109
);
87110
}
88111
validateConfig(
89-
CONFIG_KEYS.OCO_OPENAI_MAX_TOKENS,
112+
CONFIG_KEYS.OCO_TOKENS_MAX_OUTPUT,
90113
value ? typeof value === 'number' : undefined,
91114
'Must be a number'
92115
);
@@ -178,8 +201,11 @@ const configPath = pathJoin(homedir(), '.opencommit');
178201
export const getConfig = (): ConfigType | null => {
179202
const configFromEnv = {
180203
OCO_OPENAI_API_KEY: process.env.OCO_OPENAI_API_KEY,
181-
OCO_OPENAI_MAX_TOKENS: process.env.OCO_OPENAI_MAX_TOKENS
182-
? Number(process.env.OCO_OPENAI_MAX_TOKENS)
204+
OCO_TOKENS_MAX_INPUT: process.env.OCO_TOKENS_MAX_INPUT
205+
? Number(process.env.OCO_TOKENS_MAX_INPUT)
206+
: undefined,
207+
OCO_TOKENS_MAX_OUTPUT: process.env.OCO_TOKENS_MAX_OUTPUT
208+
? Number(process.env.OCO_TOKENS_MAX_OUTPUT)
183209
: undefined,
184210
OCO_OPENAI_BASE_PATH: process.env.OCO_OPENAI_BASE_PATH,
185211
OCO_DESCRIPTION: process.env.OCO_DESCRIPTION === 'true' ? true : false,

src/engine/openAi.ts

Lines changed: 5 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -11,7 +11,7 @@ import { intro, outro } from '@clack/prompts';
1111

1212
import {
1313
CONFIG_MODES,
14-
DEFAULT_MODEL_TOKEN_LIMIT,
14+
DEFAULT_TOKEN_LIMITS,
1515
getConfig
1616
} from '../commands/config';
1717
import { GenerateCommitMessageErrorEnum } from '../generateCommitMessageFromGitDiff';
@@ -20,7 +20,8 @@ import { AiEngine } from './Engine';
2020

2121
const config = getConfig();
2222

23-
let maxTokens = config?.OCO_OPENAI_MAX_TOKENS;
23+
const MAX_TOKENS_OUTPUT = config?.OCO_TOKENS_MAX_OUTPUT || DEFAULT_TOKEN_LIMITS.DEFAULT_MAX_TOKENS_OUTPUT;
24+
const MAX_TOKENS_INPUT = config?.OCO_TOKENS_MAX_INPUT || DEFAULT_TOKEN_LIMITS.DEFAULT_MAX_TOKENS_INPUT;
2425
let basePath = config?.OCO_OPENAI_BASE_PATH;
2526
let apiKey = config?.OCO_OPENAI_API_KEY
2627

@@ -65,14 +66,14 @@ class OpenAi implements AiEngine {
6566
messages,
6667
temperature: 0,
6768
top_p: 0.1,
68-
max_tokens: maxTokens || 500
69+
max_tokens: MAX_TOKENS_OUTPUT
6970
};
7071
try {
7172
const REQUEST_TOKENS = messages
7273
.map((msg) => tokenCount(msg.content) + 4)
7374
.reduce((a, b) => a + b, 0);
7475

75-
if (REQUEST_TOKENS > DEFAULT_MODEL_TOKEN_LIMIT - maxTokens) {
76+
if (REQUEST_TOKENS > MAX_TOKENS_INPUT - MAX_TOKENS_OUTPUT) {
7677
throw new Error(GenerateCommitMessageErrorEnum.tooMuchTokens);
7778
}
7879

src/generateCommitMessageFromGitDiff.ts

Lines changed: 30 additions & 15 deletions
Original file line numberDiff line numberDiff line change
@@ -3,18 +3,21 @@ import {
33
ChatCompletionRequestMessageRoleEnum
44
} from 'openai';
55

6-
import { DEFAULT_MODEL_TOKEN_LIMIT, getConfig } from './commands/config';
6+
import { DEFAULT_TOKEN_LIMITS, getConfig } from './commands/config';
77
import { getMainCommitPrompt } from './prompts';
88
import { mergeDiffs } from './utils/mergeDiffs';
99
import { tokenCount } from './utils/tokenCount';
1010
import { getEngine } from './utils/engine';
1111

1212
const config = getConfig();
13+
const MAX_TOKENS_INPUT = config?.OCO_TOKENS_MAX_INPUT || DEFAULT_TOKEN_LIMITS.DEFAULT_MAX_TOKENS_INPUT;
14+
const MAX_TOKENS_OUTPUT = config?.OCO_TOKENS_MAX_OUTPUT || DEFAULT_TOKEN_LIMITS.DEFAULT_MAX_TOKENS_OUTPUT;
1315

1416
const generateCommitMessageChatCompletionPrompt = async (
15-
diff: string
17+
diff: string,
18+
fullGitMojiSpec: boolean
1619
): Promise<Array<ChatCompletionRequestMessage>> => {
17-
const INIT_MESSAGES_PROMPT = await getMainCommitPrompt();
20+
const INIT_MESSAGES_PROMPT = await getMainCommitPrompt(fullGitMojiSpec);
1821

1922
const chatContextAsCompletionRequest = [...INIT_MESSAGES_PROMPT];
2023

@@ -29,31 +32,34 @@ const generateCommitMessageChatCompletionPrompt = async (
2932
export enum GenerateCommitMessageErrorEnum {
3033
tooMuchTokens = 'TOO_MUCH_TOKENS',
3134
internalError = 'INTERNAL_ERROR',
32-
emptyMessage = 'EMPTY_MESSAGE'
35+
emptyMessage = 'EMPTY_MESSAGE',
36+
outputTokensTooHigh = `Token limit exceeded, OCO_TOKENS_MAX_OUTPUT must not be much higher than the default ${DEFAULT_TOKEN_LIMITS.DEFAULT_MAX_TOKENS_OUTPUT} tokens.`
3337
}
3438

3539
const ADJUSTMENT_FACTOR = 20;
3640

3741
export const generateCommitMessageByDiff = async (
38-
diff: string
42+
diff: string,
43+
fullGitMojiSpec: boolean
3944
): Promise<string> => {
4045
try {
41-
const INIT_MESSAGES_PROMPT = await getMainCommitPrompt();
46+
const INIT_MESSAGES_PROMPT = await getMainCommitPrompt(fullGitMojiSpec);
4247

4348
const INIT_MESSAGES_PROMPT_LENGTH = INIT_MESSAGES_PROMPT.map(
4449
(msg) => tokenCount(msg.content) + 4
4550
).reduce((a, b) => a + b, 0);
4651

4752
const MAX_REQUEST_TOKENS =
48-
DEFAULT_MODEL_TOKEN_LIMIT -
53+
MAX_TOKENS_INPUT -
4954
ADJUSTMENT_FACTOR -
5055
INIT_MESSAGES_PROMPT_LENGTH -
51-
config?.OCO_OPENAI_MAX_TOKENS;
56+
MAX_TOKENS_OUTPUT;
5257

5358
if (tokenCount(diff) >= MAX_REQUEST_TOKENS) {
5459
const commitMessagePromises = await getCommitMsgsPromisesFromFileDiffs(
5560
diff,
56-
MAX_REQUEST_TOKENS
61+
MAX_REQUEST_TOKENS,
62+
fullGitMojiSpec
5763
);
5864

5965
const commitMessages = [];
@@ -65,7 +71,7 @@ export const generateCommitMessageByDiff = async (
6571
return commitMessages.join('\n\n');
6672
}
6773

68-
const messages = await generateCommitMessageChatCompletionPrompt(diff);
74+
const messages = await generateCommitMessageChatCompletionPrompt(diff, fullGitMojiSpec);
6975

7076
const engine = getEngine()
7177
const commitMessage = await engine.generateCommitMessage(messages);
@@ -82,7 +88,8 @@ export const generateCommitMessageByDiff = async (
8288
function getMessagesPromisesByChangesInFile(
8389
fileDiff: string,
8490
separator: string,
85-
maxChangeLength: number
91+
maxChangeLength: number,
92+
fullGitMojiSpec: boolean
8693
) {
8794
const hunkHeaderSeparator = '@@ ';
8895
const [fileHeader, ...fileDiffByLines] = fileDiff.split(hunkHeaderSeparator);
@@ -109,7 +116,8 @@ function getMessagesPromisesByChangesInFile(
109116
const commitMsgsFromFileLineDiffs = lineDiffsWithHeader.map(
110117
async (lineDiff) => {
111118
const messages = await generateCommitMessageChatCompletionPrompt(
112-
separator + lineDiff
119+
separator + lineDiff,
120+
fullGitMojiSpec
113121
);
114122

115123
return engine.generateCommitMessage(messages);
@@ -124,6 +132,10 @@ function splitDiff(diff: string, maxChangeLength: number) {
124132
const splitDiffs = [];
125133
let currentDiff = '';
126134

135+
if (maxChangeLength <= 0) {
136+
throw new Error(GenerateCommitMessageErrorEnum.outputTokensTooHigh);
137+
}
138+
127139
for (let line of lines) {
128140
// If a single line exceeds maxChangeLength, split it into multiple lines
129141
while (tokenCount(line) > maxChangeLength) {
@@ -153,7 +165,8 @@ function splitDiff(diff: string, maxChangeLength: number) {
153165

154166
export const getCommitMsgsPromisesFromFileDiffs = async (
155167
diff: string,
156-
maxDiffLength: number
168+
maxDiffLength: number,
169+
fullGitMojiSpec: boolean
157170
) => {
158171
const separator = 'diff --git ';
159172

@@ -170,13 +183,15 @@ export const getCommitMsgsPromisesFromFileDiffs = async (
170183
const messagesPromises = getMessagesPromisesByChangesInFile(
171184
fileDiff,
172185
separator,
173-
maxDiffLength
186+
maxDiffLength,
187+
fullGitMojiSpec
174188
);
175189

176190
commitMessagePromises.push(...messagesPromises);
177191
} else {
178192
const messages = await generateCommitMessageChatCompletionPrompt(
179-
separator + fileDiff
193+
separator + fileDiff,
194+
fullGitMojiSpec
180195
);
181196

182197
const engine = getEngine()

src/modules/commitlint/config.ts

Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -61,6 +61,10 @@ export const configureCommitlintIntegration = async (force = false) => {
6161

6262
// Cleanup the consistency answer. Sometimes 'gpt-3.5-turbo' sends rule's back.
6363
prompts.forEach((prompt) => (consistency = consistency.replace(prompt, '')));
64+
65+
// sometimes consistency is preceded by explanatory text like "Here is your JSON:"
66+
consistency = utils.getJSONBlock(consistency);
67+
6468
// ... remaining might be extra set of "\n"
6569
consistency = utils.removeDoubleNewlines(consistency);
6670

0 commit comments

Comments
 (0)