Skip to content

Commit 52d11b5

Browse files
Merge branch 'dev' into feat/add-mistral-ai-as-provider
2 parents c34cdee + 26ebfb4 commit 52d11b5

File tree

6 files changed

+142
-13
lines changed

6 files changed

+142
-13
lines changed

out/cli.cjs

Lines changed: 45 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -431,8 +431,8 @@ var require_escape = __commonJS({
431431
}
432432
function escapeArgument(arg, doubleEscapeMetaChars) {
433433
arg = `${arg}`;
434-
arg = arg.replace(/(\\*)"/g, '$1$1\\"');
435-
arg = arg.replace(/(\\*)$/, "$1$1");
434+
arg = arg.replace(/(?=(\\+?)?)\1"/g, '$1$1\\"');
435+
arg = arg.replace(/(?=(\\+?)?)\1$/, "$1$1");
436436
arg = `"${arg}"`;
437437
arg = arg.replace(metaCharsRegExp, "^$1");
438438
if (doubleEscapeMetaChars) {
@@ -578,7 +578,7 @@ var require_enoent = __commonJS({
578578
const originalEmit = cp.emit;
579579
cp.emit = function(name, arg1) {
580580
if (name === "exit") {
581-
const err = verifyENOENT(arg1, parsed, "spawn");
581+
const err = verifyENOENT(arg1, parsed);
582582
if (err) {
583583
return originalEmit.call(cp, "error", err);
584584
}
@@ -27389,7 +27389,8 @@ var package_default = {
2738927389
"test:unit:docker": "npm run test:docker-build && DOCKER_CONTENT_TRUST=0 docker run --rm oco-test npm run test:unit",
2739027390
"test:e2e": "npm run test:e2e:setup && jest test/e2e",
2739127391
"test:e2e:setup": "sh test/e2e/setup.sh",
27392-
"test:e2e:docker": "npm run test:docker-build && DOCKER_CONTENT_TRUST=0 docker run --rm oco-test npm run test:e2e"
27392+
"test:e2e:docker": "npm run test:docker-build && DOCKER_CONTENT_TRUST=0 docker run --rm oco-test npm run test:e2e",
27393+
"mlx:start": "OCO_AI_PROVIDER='mlx' node ./out/cli.cjs"
2739327394
},
2739427395
devDependencies: {
2739527396
"@commitlint/types": "^17.4.4",
@@ -29933,6 +29934,8 @@ var getDefaultModel = (provider) => {
2993329934
switch (provider) {
2993429935
case "ollama":
2993529936
return "";
29937+
case "mlx":
29938+
return "";
2993629939
case "anthropic":
2993729940
return MODEL_LIST.anthropic[0];
2993829941
case "gemini":
@@ -29964,7 +29967,7 @@ var configValidators = {
2996429967
validateConfig(
2996529968
"OCO_API_KEY",
2996629969
value,
29967-
'You need to provide the OCO_API_KEY when OCO_AI_PROVIDER set to "openai" (default) or "ollama" or "azure" or "gemini" or "flowise" or "anthropic". Run `oco config set OCO_API_KEY=your_key OCO_AI_PROVIDER=openai`'
29970+
'You need to provide the OCO_API_KEY when OCO_AI_PROVIDER set to "openai" (default) or "ollama" or "mlx" or "azure" or "gemini" or "flowise" or "anthropic". Run `oco config set OCO_API_KEY=your_key OCO_AI_PROVIDER=openai`'
2996829971
);
2996929972
return value;
2997029973
},
@@ -30070,8 +30073,8 @@ var configValidators = {
3007030073
"test",
3007130074
"flowise",
3007230075
"groq"
30073-
].includes(value) || value.startsWith("ollama"),
30074-
`${value} is not supported yet, use 'ollama', 'anthropic', 'azure', 'gemini', 'flowise' or 'openai' (default)`
30076+
].includes(value) || value.startsWith("ollama") || value.startsWith("mlx"),
30077+
`${value} is not supported yet, use 'ollama', 'mlx', anthropic', 'azure', 'gemini', 'flowise' or 'openai' (default)`
3007530078
);
3007630079
return value;
3007730080
},
@@ -30111,6 +30114,7 @@ var OCO_AI_PROVIDER_ENUM = /* @__PURE__ */ ((OCO_AI_PROVIDER_ENUM2) => {
3011130114
OCO_AI_PROVIDER_ENUM2["TEST"] = "test";
3011230115
OCO_AI_PROVIDER_ENUM2["FLOWISE"] = "flowise";
3011330116
OCO_AI_PROVIDER_ENUM2["GROQ"] = "groq";
30117+
OCO_AI_PROVIDER_ENUM2["MLX"] = "mlx";
3011430118
return OCO_AI_PROVIDER_ENUM2;
3011530119
})(OCO_AI_PROVIDER_ENUM || {});
3011630120
var defaultConfigPath = (0, import_path.join)((0, import_os.homedir)(), ".opencommit");
@@ -44524,6 +44528,38 @@ var GroqEngine = class extends OpenAiEngine {
4452444528
}
4452544529
};
4452644530

44531+
// src/engine/mlx.ts
44532+
var MLXEngine = class {
44533+
constructor(config7) {
44534+
this.config = config7;
44535+
this.client = axios_default.create({
44536+
url: config7.baseURL ? `${config7.baseURL}/${config7.apiKey}` : "http://localhost:8080/v1/chat/completions",
44537+
headers: { "Content-Type": "application/json" }
44538+
});
44539+
}
44540+
async generateCommitMessage(messages) {
44541+
const params = {
44542+
messages,
44543+
temperature: 0,
44544+
top_p: 0.1,
44545+
repetition_penalty: 1.5,
44546+
stream: false
44547+
};
44548+
try {
44549+
const response = await this.client.post(
44550+
this.client.getUri(this.config),
44551+
params
44552+
);
44553+
const choices = response.data.choices;
44554+
const message = choices[0].message;
44555+
return message?.content;
44556+
} catch (err) {
44557+
const message = err.response?.data?.error ?? err.message;
44558+
throw new Error(`MLX provider error: ${message}`);
44559+
}
44560+
}
44561+
};
44562+
4452744563
// src/utils/engine.ts
4452844564
function getEngine() {
4452944565
const config7 = getConfig();
@@ -44550,6 +44586,8 @@ function getEngine() {
4455044586
return new FlowiseEngine(DEFAULT_CONFIG2);
4455144587
case "groq" /* GROQ */:
4455244588
return new GroqEngine(DEFAULT_CONFIG2);
44589+
case "mlx" /* MLX */:
44590+
return new MLXEngine(DEFAULT_CONFIG2);
4455344591
default:
4455444592
return new OpenAiEngine(DEFAULT_CONFIG2);
4455544593
}

out/github-action.cjs

Lines changed: 39 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -48745,6 +48745,8 @@ var getDefaultModel = (provider) => {
4874548745
switch (provider) {
4874648746
case "ollama":
4874748747
return "";
48748+
case "mlx":
48749+
return "";
4874848750
case "anthropic":
4874948751
return MODEL_LIST.anthropic[0];
4875048752
case "gemini":
@@ -48776,7 +48778,7 @@ var configValidators = {
4877648778
validateConfig(
4877748779
"OCO_API_KEY",
4877848780
value,
48779-
'You need to provide the OCO_API_KEY when OCO_AI_PROVIDER set to "openai" (default) or "ollama" or "azure" or "gemini" or "flowise" or "anthropic". Run `oco config set OCO_API_KEY=your_key OCO_AI_PROVIDER=openai`'
48781+
'You need to provide the OCO_API_KEY when OCO_AI_PROVIDER set to "openai" (default) or "ollama" or "mlx" or "azure" or "gemini" or "flowise" or "anthropic". Run `oco config set OCO_API_KEY=your_key OCO_AI_PROVIDER=openai`'
4878048782
);
4878148783
return value;
4878248784
},
@@ -48882,8 +48884,8 @@ var configValidators = {
4888248884
"test",
4888348885
"flowise",
4888448886
"groq"
48885-
].includes(value) || value.startsWith("ollama"),
48886-
`${value} is not supported yet, use 'ollama', 'anthropic', 'azure', 'gemini', 'flowise' or 'openai' (default)`
48887+
].includes(value) || value.startsWith("ollama") || value.startsWith("mlx"),
48888+
`${value} is not supported yet, use 'ollama', 'mlx', anthropic', 'azure', 'gemini', 'flowise' or 'openai' (default)`
4888748889
);
4888848890
return value;
4888948891
},
@@ -63325,6 +63327,38 @@ var GroqEngine = class extends OpenAiEngine {
6332563327
}
6332663328
};
6332763329

63330+
// src/engine/mlx.ts
63331+
var MLXEngine = class {
63332+
constructor(config6) {
63333+
this.config = config6;
63334+
this.client = axios_default.create({
63335+
url: config6.baseURL ? `${config6.baseURL}/${config6.apiKey}` : "http://localhost:8080/v1/chat/completions",
63336+
headers: { "Content-Type": "application/json" }
63337+
});
63338+
}
63339+
async generateCommitMessage(messages) {
63340+
const params = {
63341+
messages,
63342+
temperature: 0,
63343+
top_p: 0.1,
63344+
repetition_penalty: 1.5,
63345+
stream: false
63346+
};
63347+
try {
63348+
const response = await this.client.post(
63349+
this.client.getUri(this.config),
63350+
params
63351+
);
63352+
const choices = response.data.choices;
63353+
const message = choices[0].message;
63354+
return message?.content;
63355+
} catch (err) {
63356+
const message = err.response?.data?.error ?? err.message;
63357+
throw new Error(`MLX provider error: ${message}`);
63358+
}
63359+
}
63360+
};
63361+
6332863362
// src/utils/engine.ts
6332963363
function getEngine() {
6333063364
const config6 = getConfig();
@@ -63351,6 +63385,8 @@ function getEngine() {
6335163385
return new FlowiseEngine(DEFAULT_CONFIG2);
6335263386
case "groq" /* GROQ */:
6335363387
return new GroqEngine(DEFAULT_CONFIG2);
63388+
case "mlx" /* MLX */:
63389+
return new MLXEngine(DEFAULT_CONFIG2);
6335463390
default:
6335563391
return new OpenAiEngine(DEFAULT_CONFIG2);
6335663392
}

package.json

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -58,7 +58,8 @@
5858
"test:unit:docker": "npm run test:docker-build && DOCKER_CONTENT_TRUST=0 docker run --rm oco-test npm run test:unit",
5959
"test:e2e": "npm run test:e2e:setup && jest test/e2e",
6060
"test:e2e:setup": "sh test/e2e/setup.sh",
61-
"test:e2e:docker": "npm run test:docker-build && DOCKER_CONTENT_TRUST=0 docker run --rm oco-test npm run test:e2e"
61+
"test:e2e:docker": "npm run test:docker-build && DOCKER_CONTENT_TRUST=0 docker run --rm oco-test npm run test:e2e",
62+
"mlx:start": "OCO_AI_PROVIDER='mlx' node ./out/cli.cjs"
6263
},
6364
"devDependencies": {
6465
"@commitlint/types": "^17.4.4",

src/commands/config.ts

Lines changed: 5 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -135,6 +135,8 @@ const getDefaultModel = (provider: string | undefined): string => {
135135
switch (provider) {
136136
case 'ollama':
137137
return '';
138+
case 'mlx':
139+
return '';
138140
case 'anthropic':
139141
return MODEL_LIST.anthropic[0];
140142
case 'gemini':
@@ -182,7 +184,7 @@ export const configValidators = {
182184
validateConfig(
183185
'OCO_API_KEY',
184186
value,
185-
'You need to provide the OCO_API_KEY when OCO_AI_PROVIDER set to "openai" (default) or "ollama" or "azure" or "gemini" or "flowise" or "anthropic". Run `oco config set OCO_API_KEY=your_key OCO_AI_PROVIDER=openai`'
187+
'You need to provide the OCO_API_KEY when OCO_AI_PROVIDER set to "openai" (default) or "ollama" or "mlx" or "azure" or "gemini" or "flowise" or "anthropic". Run `oco config set OCO_API_KEY=your_key OCO_AI_PROVIDER=openai`'
186188
);
187189

188190
return value;
@@ -307,7 +309,7 @@ export const configValidators = {
307309
'flowise',
308310
'groq'
309311
].includes(value) || value.startsWith('ollama'),
310-
`${value} is not supported yet, use 'ollama', 'anthropic', 'azure', 'gemini', 'flowise', 'mistral' or 'openai' (default)`
312+
`${value} is not supported yet, use 'ollama', 'mlx', 'anthropic', 'azure', 'gemini', 'flowise', 'mistral' or 'openai' (default)`
311313
);
312314

313315
return value;
@@ -354,6 +356,7 @@ export enum OCO_AI_PROVIDER_ENUM {
354356
FLOWISE = 'flowise',
355357
GROQ = 'groq',
356358
MISTRAL = 'mistral',
359+
MLX = 'mlx'
357360
}
358361

359362
export type ConfigType = {

src/engine/mlx.ts

Lines changed: 47 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,47 @@
1+
import axios, { AxiosInstance } from 'axios';
2+
import { OpenAI } from 'openai';
3+
import { AiEngine, AiEngineConfig } from './Engine';
4+
import { chown } from 'fs';
5+
6+
interface MLXConfig extends AiEngineConfig {}
7+
8+
export class MLXEngine implements AiEngine {
9+
config: MLXConfig;
10+
client: AxiosInstance;
11+
12+
constructor(config) {
13+
this.config = config;
14+
this.client = axios.create({
15+
url: config.baseURL
16+
? `${config.baseURL}/${config.apiKey}`
17+
: 'http://localhost:8080/v1/chat/completions',
18+
headers: { 'Content-Type': 'application/json' }
19+
});
20+
}
21+
22+
async generateCommitMessage(
23+
messages: Array<OpenAI.Chat.Completions.ChatCompletionMessageParam>):
24+
Promise<string | undefined> {
25+
const params = {
26+
messages,
27+
temperature: 0,
28+
top_p: 0.1,
29+
repetition_penalty: 1.5,
30+
stream: false
31+
};
32+
try {
33+
const response = await this.client.post(
34+
this.client.getUri(this.config),
35+
params
36+
);
37+
38+
const choices = response.data.choices;
39+
const message = choices[0].message;
40+
41+
return message?.content;
42+
} catch (err: any) {
43+
const message = err.response?.data?.error ?? err.message;
44+
throw new Error(`MLX provider error: ${message}`);
45+
}
46+
}
47+
}

src/utils/engine.ts

Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -9,6 +9,7 @@ import { OpenAiEngine } from '../engine/openAi';
99
import { MistralAiEngine } from '../engine/mistral';
1010
import { TestAi, TestMockType } from '../engine/testAi';
1111
import { GroqEngine } from '../engine/groq';
12+
import { MLXEngine } from '../engine/mlx';
1213

1314
export function getEngine(): AiEngine {
1415
const config = getConfig();
@@ -47,6 +48,9 @@ export function getEngine(): AiEngine {
4748
case OCO_AI_PROVIDER_ENUM.MISTRAL:
4849
return new MistralAiEngine(DEFAULT_CONFIG);
4950

51+
case OCO_AI_PROVIDER_ENUM.MLX:
52+
return new MLXEngine(DEFAULT_CONFIG);
53+
5054
default:
5155
return new OpenAiEngine(DEFAULT_CONFIG);
5256
}

0 commit comments

Comments
 (0)