Skip to content

Commit b4bf55c

Browse files
feat(proxy): add universal proxy support and fix Gemini model resolution (#536)
Integrated undici ProxyAgent for native fetch and HttpsProxyAgent for axios/openai/anthropic. Upgraded @google/generative-ai to fix #536. Added OCO_PROXY config. Co-authored-by: uni <uni@hanwei.ink>
1 parent f51393e commit b4bf55c

File tree

13 files changed

+120
-16
lines changed

13 files changed

+120
-16
lines changed

.gitignore

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,5 @@
11
node_modules/
2+
out/
23
coverage/
34
temp/
45
build/

README.md

Lines changed: 10 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -237,6 +237,16 @@ oco config set OCO_AI_PROVIDER=flowise OCO_API_KEY=<your_flowise_api_key> OCO_AP
237237
oco config set OCO_AI_PROVIDER=ollama OCO_API_KEY=<your_ollama_api_key> OCO_API_URL=<your_ollama_endpoint>
238238
```
239239

240+
### Use with Proxy
241+
242+
If you are behind a proxy, you can set it in the config:
243+
244+
```sh
245+
oco config set OCO_PROXY=http://127.0.0.1:7890
246+
```
247+
248+
Or it will automatically use `HTTPS_PROXY` or `HTTP_PROXY` environment variables.
249+
240250
### Locale configuration
241251

242252
To globally specify the language used to generate commit messages:

package-lock.json

Lines changed: 35 additions & 9 deletions
Some generated files are not rendered by default. Learn more about customizing how changed files appear on GitHub.

package.json

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -89,7 +89,7 @@
8989
"@azure/openai": "^1.0.0-beta.12",
9090
"@clack/prompts": "^0.6.1",
9191
"@dqbd/tiktoken": "^1.0.2",
92-
"@google/generative-ai": "^0.11.4",
92+
"@google/generative-ai": "^0.24.1",
9393
"@mistralai/mistralai": "^1.3.5",
9494
"@octokit/webhooks-schemas": "^6.11.0",
9595
"@octokit/webhooks-types": "^6.11.0",
@@ -98,6 +98,7 @@
9898
"cleye": "^1.3.2",
9999
"crypto": "^1.0.1",
100100
"execa": "^7.0.0",
101+
"https-proxy-agent": "^8.0.0",
101102
"ignore": "^5.2.4",
102103
"ini": "^3.0.1",
103104
"inquirer": "^9.1.4",

src/cli.ts

Lines changed: 5 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -5,9 +5,10 @@ import { cli } from 'cleye';
55
import packageJSON from '../package.json';
66
import { commit } from './commands/commit';
77
import { commitlintConfigCommand } from './commands/commitlint';
8-
import { configCommand } from './commands/config';
8+
import { configCommand, getConfig } from './commands/config';
99
import { hookCommand, isHookCalled } from './commands/githook.js';
1010
import { prepareCommitMessageHook } from './commands/prepare-commit-msg-hook';
11+
import { setupProxy } from './utils/proxy';
1112
import {
1213
setupCommand,
1314
isFirstRun,
@@ -18,6 +19,9 @@ import { modelsCommand } from './commands/models';
1819
import { checkIsLatestVersion } from './utils/checkIsLatestVersion';
1920
import { runMigrations } from './migrations/_run.js';
2021

22+
const config = getConfig();
23+
setupProxy(config.OCO_PROXY);
24+
2125
const extraArgs = process.argv.slice(2);
2226

2327
cli(

src/commands/config.ts

Lines changed: 17 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -25,6 +25,7 @@ export enum CONFIG_KEYS {
2525
OCO_ONE_LINE_COMMIT = 'OCO_ONE_LINE_COMMIT',
2626
OCO_TEST_MOCK_TYPE = 'OCO_TEST_MOCK_TYPE',
2727
OCO_API_URL = 'OCO_API_URL',
28+
OCO_PROXY = 'OCO_PROXY',
2829
OCO_API_CUSTOM_HEADERS = 'OCO_API_CUSTOM_HEADERS',
2930
OCO_OMIT_SCOPE = 'OCO_OMIT_SCOPE',
3031
OCO_GITPUSH = 'OCO_GITPUSH', // todo: deprecate
@@ -727,6 +728,15 @@ export const configValidators = {
727728
return value;
728729
},
729730

731+
[CONFIG_KEYS.OCO_PROXY](value: any) {
732+
validateConfig(
733+
CONFIG_KEYS.OCO_PROXY,
734+
typeof value === 'string',
735+
`${value} is not a valid URL. It should start with 'http://' or 'https://'.`
736+
);
737+
return value;
738+
},
739+
730740
[CONFIG_KEYS.OCO_MODEL](value: any, config: any = {}) {
731741
validateConfig(
732742
CONFIG_KEYS.OCO_MODEL,
@@ -880,6 +890,7 @@ export type ConfigType = {
880890
[CONFIG_KEYS.OCO_TOKENS_MAX_INPUT]: number;
881891
[CONFIG_KEYS.OCO_TOKENS_MAX_OUTPUT]: number;
882892
[CONFIG_KEYS.OCO_API_URL]?: string;
893+
[CONFIG_KEYS.OCO_PROXY]?: string;
883894
[CONFIG_KEYS.OCO_API_CUSTOM_HEADERS]?: string;
884895
[CONFIG_KEYS.OCO_DESCRIPTION]: boolean;
885896
[CONFIG_KEYS.OCO_EMOJI]: boolean;
@@ -964,6 +975,7 @@ const getEnvConfig = (envPath: string) => {
964975
return {
965976
OCO_MODEL: process.env.OCO_MODEL,
966977
OCO_API_URL: process.env.OCO_API_URL,
978+
OCO_PROXY: process.env.OCO_PROXY || process.env.HTTPS_PROXY || process.env.HTTP_PROXY,
967979
OCO_API_KEY: process.env.OCO_API_KEY,
968980
OCO_API_CUSTOM_HEADERS: process.env.OCO_API_CUSTOM_HEADERS,
969981
OCO_AI_PROVIDER: process.env.OCO_AI_PROVIDER as OCO_AI_PROVIDER_ENUM,
@@ -1189,6 +1201,11 @@ function getConfigKeyDetails(key) {
11891201
'Custom API URL - may be used to set proxy path to OpenAI API',
11901202
values: ["URL string (must start with 'http://' or 'https://')"]
11911203
};
1204+
case CONFIG_KEYS.OCO_PROXY:
1205+
return {
1206+
description: 'HTTP/HTTPS Proxy URL',
1207+
values: ["URL string (must start with 'http://' or 'https://')"]
1208+
};
11921209
case CONFIG_KEYS.OCO_MESSAGE_TEMPLATE_PLACEHOLDER:
11931210
return {
11941211
description: 'Message template placeholder',

src/engine/Engine.ts

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -11,6 +11,7 @@ export interface AiEngineConfig {
1111
maxTokensOutput: number;
1212
maxTokensInput: number;
1313
baseURL?: string;
14+
proxy?: string;
1415
customHeaders?: Record<string, string>;
1516
}
1617

src/engine/anthropic.ts

Lines changed: 10 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,5 @@
11
import AnthropicClient from '@anthropic-ai/sdk';
2+
import { HttpsProxyAgent } from 'https-proxy-agent';
23
import {
34
MessageCreateParamsNonStreaming,
45
MessageParam
@@ -18,7 +19,15 @@ export class AnthropicEngine implements AiEngine {
1819

1920
constructor(config) {
2021
this.config = config;
21-
this.client = new AnthropicClient({ apiKey: this.config.apiKey });
22+
const clientOptions: any = { apiKey: this.config.apiKey };
23+
24+
const proxy =
25+
config.proxy || process.env.HTTPS_PROXY || process.env.HTTP_PROXY;
26+
if (proxy) {
27+
clientOptions.httpAgent = new HttpsProxyAgent(proxy);
28+
}
29+
30+
this.client = new AnthropicClient(clientOptions);
2231
}
2332

2433
public generateCommitMessage = async (

src/engine/gemini.ts

Lines changed: 9 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -29,10 +29,15 @@ export class GeminiEngine implements AiEngine {
2929
.map((m) => m.content)
3030
.join('\n');
3131

32-
const gemini = this.client.getGenerativeModel({
33-
model: this.config.model,
34-
systemInstruction
35-
});
32+
const gemini = this.client.getGenerativeModel(
33+
{
34+
model: this.config.model,
35+
systemInstruction
36+
},
37+
{
38+
baseUrl: this.config.baseURL
39+
}
40+
);
3641

3742
const contents = messages
3843
.filter((m) => m.role !== 'system')

src/engine/mistral.ts

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,5 @@
11
import { OpenAI } from 'openai';
2+
import { HttpsProxyAgent } from 'https-proxy-agent';
23
import { GenerateCommitMessageErrorEnum } from '../generateCommitMessageFromGitDiff';
34
import { normalizeEngineError } from '../utils/engineErrorHandler';
45
import { removeContentTags } from '../utils/removeContentTags';

0 commit comments

Comments
 (0)