Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Ollama suport #12

Merged
merged 4 commits into from
Dec 25, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
36 changes: 30 additions & 6 deletions apps/web/src/components/commands.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -186,44 +186,68 @@ export function Commands() {
step >= 5 ? "opacity-100" : "opacity-0",
)}
>
◇ Which OpenAI model should be used for translations?
◇ Which provider would you like to use??
</span>
<span
className={cn(
"transition-opacity duration-100",
step >= 5 ? "opacity-100" : "opacity-0",
)}
>
│ ● GPT-4 (Default)
│ ● OpenAI
</span>
<span
className={cn(
"transition-opacity duration-100",
step >= 5 ? "opacity-100" : "opacity-0",
)}
>
│ ○ Ollama
</span>
<span
className={cn(
"transition-opacity duration-100",
step >= 6 ? "opacity-100" : "opacity-0",
)}
>
◇ Which model should be used for translations?
</span>
<span
className={cn(
"transition-opacity duration-100",
step >= 6 ? "opacity-100" : "opacity-0",
)}
>
│ ● GPT-4 (Default)
</span>
<span
className={cn(
"transition-opacity duration-100",
step >= 6 ? "opacity-100" : "opacity-0",
)}
>
│ ○ GPT-4 Turbo
</span>
<span
className={cn(
"transition-opacity duration-100",
step >= 5 ? "opacity-100" : "opacity-0",
step >= 6 ? "opacity-100" : "opacity-0",
)}
>
│ ○ GPT-4o
</span>
<span
className={cn(
"transition-opacity duration-100",
step >= 5 ? "opacity-100" : "opacity-0",
step >= 6 ? "opacity-100" : "opacity-0",
)}
>
│ ○ GPT-4o mini
</span>
<span
className={cn(
"transition-opacity duration-100",
step >= 5 ? "opacity-100" : "opacity-0",
step >= 6 ? "opacity-100" : "opacity-0",
)}
>
│ ○ GPT-3.5 Turbo
Expand All @@ -239,7 +263,7 @@ export function Commands() {
<span
className={cn(
"transition-opacity duration-100 -ml-[1.5px]",
step >= 6 ? "opacity-100" : "opacity-0",
step >= 7 ? "opacity-100" : "opacity-0",
)}
>
└ Configuration file and language files created successfully!
Expand Down
Binary file modified bun.lockb
Binary file not shown.
7 changes: 4 additions & 3 deletions examples/next-international/languine.config.mjs
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
export default {
version: "1.0.0",
version: "0.5.5",
locale: {
source: "en",
targets: ["fr"],
Expand All @@ -9,7 +9,8 @@ export default {
include: ["locales/[locale].ts"],
},
},
openai: {
model: "gpt-4-turbo",
llm: {
provider: "ollama",
model: "mistral:latest",
},
};
3 changes: 2 additions & 1 deletion examples/next-international/locales/en.ts
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
export default {
hello: "Hello",
welcome: "Hello {name}!",
"about.you": "Hello {name}! You have {age} yo",
"about.you": "Hello {name}! You have {age} years",
"scope.test": "A scope",
"scope.more.test": "A scope",
"scope.more.param": "A scope with {param}",
Expand All @@ -11,4 +11,5 @@ export default {
"missing.translation.in.fr": "This should work",
"cows#one": "A cow",
"cows#other": "{count} cows",
"languine.hello": "Hello Languine",
} as const;
13 changes: 7 additions & 6 deletions examples/next-international/locales/fr.ts
Original file line number Diff line number Diff line change
@@ -1,14 +1,15 @@
export default {
hello: "Bonjour",
welcome: "Bonjour {name} !",
"about.you": "Bonjour {name} ! Tu as {age} ans",
"scope.test": "Un domaine",
"scope.more.test": "Un domaine",
"scope.more.param": "Un domaine avec {param}",
"scope.more.and.more.test": "Un domaine",
welcome: "Bonjour {name}!",
"about.you": "Bonjour {name}! Vous avez {age} ans",
"scope.test": "Un scope",
"scope.more.test": "Un scope",
"scope.more.param": "Un scope avec {param}",
"scope.more.and.more.test": "Un scope",
"scope.more.stars#one": "1 étoile sur GitHub",
"scope.more.stars#other": "{count} étoiles sur GitHub",
"missing.translation.in.fr": "Cela devrait fonctionner",
"cows#one": "Une vache",
"cows#other": "{count} vaches",
"languine.hello": "Hello Languine",
} as const;
4 changes: 3 additions & 1 deletion packages/cli/package.json
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
{
"name": "languine",
"version": "0.5.5",
"version": "0.5.6",
"type": "module",
"bin": "dist/index.js",
"main": "dist/index.js",
Expand All @@ -23,6 +23,8 @@
"diff": "^7.0.0",
"dotenv": "^16.4.7",
"simple-git": "^3.27.0",
"ollama": "^0.5.11",
"ollama-ai-provider": "^1.1.0",
"zod": "^3.24.1"
},
"devDependencies": {
Expand Down
48 changes: 33 additions & 15 deletions packages/cli/src/commands/init.ts
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,8 @@ import { execSync } from "node:child_process";
import fs from "node:fs/promises";
import path from "node:path";
import { intro, outro, select, text } from "@clack/prompts";
import { providers } from "../providers.js";
import type { Provider } from "../types.js";
import { configPath } from "../utils.js";

export async function init() {
Expand Down Expand Up @@ -51,32 +53,48 @@ export async function init() {
],
})) as string;

const provider = (await select<Provider>({
message: "Which provider would you like to use?",
options: Object.values(providers),
initialValue: "openai",
})) as Provider;

if (provider === "ollama") {
try {
const ollamaBinary = execSync("which ollama").toString().trim();
if (!ollamaBinary) {
outro("Ollama binary not found. Please install Ollama");
process.exit(1);
}
} catch (error) {
outro("Ollama binary not found. Please install Ollama");
process.exit(1);
}
}

const models = await providers[provider].getModels();

const model = (await select({
message: "Which OpenAI model should be used for translations?",
options: [
{ value: "gpt-4-turbo", label: "GPT-4 Turbo (Default)" },
{ value: "gpt-4", label: "GPT-4" },
{ value: "gpt-4o", label: "GPT-4o" },
{ value: "gpt-4o-mini", label: "GPT-4o mini" },
{ value: "gpt-3.5-turbo", label: "GPT-3.5 Turbo" },
],
initialValue: "gpt-4-turbo",
message: "Which model should be used for translations?",
options: models,
})) as string;

const configContent = `export default {
version: "${require("../../package.json").version}",
locale: {
source: "${sourceLanguage}",
targets: ${JSON.stringify(targetLanguages.split(",").map((l) => l.trim()))}
targets: ${JSON.stringify(targetLanguages.split(",").map((l) => l.trim()))},
},
files: {
${fileFormat}: {
include: ["${filesDirectory}/[locale].${fileFormat}"]
}
include: ["${filesDirectory}/[locale].${fileFormat}"],
},
},
llm: {
provider: "${provider}",
model: "${model}",
temperature: 0,
},
openai: {
model: "${model}"
}
}`;

try {
Expand Down
31 changes: 23 additions & 8 deletions packages/cli/src/commands/translate.ts
Original file line number Diff line number Diff line change
@@ -1,13 +1,28 @@
import fs from "node:fs/promises";
import path from "node:path";
import { createOpenAI } from "@ai-sdk/openai";
import { type OpenAIProvider, createOpenAI } from "@ai-sdk/openai";
import { intro, outro, spinner } from "@clack/prompts";
import chalk from "chalk";
import { type OllamaProvider, createOllama } from "ollama-ai-provider";
import { simpleGit } from "simple-git";
import { getTranslator } from "../translators/index.js";
import type { PromptOptions, UpdateResult } from "../types.js";
import type { Config, Provider } from "../types.js";
import { getApiKey, getConfig } from "../utils.js";

const providersMap: Record<Provider, OpenAIProvider | OllamaProvider> = {
openai: createOpenAI({
apiKey: await getApiKey("OpenAI", "OPENAI_API_KEY"),
}),
ollama: createOllama(),
};

function getModel(config: Config) {
const provider = providersMap[config.llm.provider];

return provider(config.llm.model);
}

export async function translate(targetLocale?: string, force = false) {
intro("Starting translation process...");

Expand All @@ -28,12 +43,7 @@ export async function translate(targetLocale?: string, force = false) {

const git = simpleGit();

// Initialize OpenAI
const openai = createOpenAI({
apiKey: await getApiKey("OpenAI", "OPENAI_API_KEY"),
});

const model = openai(config.openai.model);
const model = getModel(config);

const s = spinner();
s.start("Checking for changes and translating to target locales...");
Expand Down Expand Up @@ -120,7 +130,12 @@ export async function translate(targetLocale?: string, force = false) {
summary,
};
} catch (error) {
return { locale, sourcePath, success: false, error };
return {
locale,
sourcePath,
success: false,
error,
};
}
}),
),
Expand Down
2 changes: 2 additions & 0 deletions packages/cli/src/envs.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,2 @@
import dotenv from "dotenv";
dotenv.config();
4 changes: 1 addition & 3 deletions packages/cli/src/index.ts
Original file line number Diff line number Diff line change
@@ -1,8 +1,6 @@
#!/usr/bin/env node

import dotenv from "dotenv";
dotenv.config();

import "./envs.js";
import { select } from "@clack/prompts";
import chalk from "chalk";
import dedent from "dedent";
Expand Down
1 change: 0 additions & 1 deletion packages/cli/src/prompt.ts
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,6 @@ Translation Requirements:
- Keep all technical identifiers unchanged
- Keep consistent capitalization, spacing, and line breaks
- Respect existing whitespace and newline patterns
- Never add space before a ! or ?
`;

export function createBasePrompt(text: string, options: PromptOptions) {
Expand Down
50 changes: 50 additions & 0 deletions packages/cli/src/providers.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,50 @@
import { outro } from "@clack/prompts";
import chalk from "chalk";
import ollama from "ollama";
import type { Provider } from "./types.js";

type ModelInfo = {
value: string;
label: string;
};

type ProviderConfig = {
value: Provider;
label: string;
getModels: () => Promise<ModelInfo[]>;
};

export const providers: Record<Provider, ProviderConfig> = {
openai: {
value: "openai",
label: "OpenAI",
getModels: async () => [
{ value: "gpt-4-turbo", label: "GPT-4 Turbo (Default)" },
{ value: "gpt-4", label: "GPT-4" },
{ value: "gpt-4o", label: "GPT-4o" },
{ value: "gpt-4o-mini", label: "GPT-4o mini" },
{ value: "gpt-3.5-turbo", label: "GPT-3.5 Turbo" },
],
},
ollama: {
value: "ollama",
label: "Ollama",
getModels: async () => {
try {
const { models } = await ollama.list();

return models.map((model) => ({
value: model.name,
label: model.name,
}));
} catch {
outro(
chalk.red(
"Failed to get models from Ollama, is it installed and running?",
),
);
process.exit(1);
}
},
},
};
Loading