diff --git a/packages/cali/package.json b/packages/cali/package.json index 541ee19..3f460ae 100644 --- a/packages/cali/package.json +++ b/packages/cali/package.json @@ -18,6 +18,7 @@ "dedent": "^1.5.3", "dotenv": "^16.4.5", "gradient-string": "^3.0.0", + "ollama-ai-provider": "^1.0.0", "zod": "^3.23.8" }, "bugs": { diff --git a/packages/cali/src/cli.ts b/packages/cali/src/cli.ts index 840c6dd..08fbf53 100755 --- a/packages/cali/src/cli.ts +++ b/packages/cali/src/cli.ts @@ -2,7 +2,6 @@ import 'dotenv/config' -import { createOpenAI } from '@ai-sdk/openai' import { confirm, outro, select, spinner, text } from '@clack/prompts' import { CoreMessage, generateText } from 'ai' import * as tools from 'cali-tools' @@ -12,7 +11,6 @@ import { retro } from 'gradient-string' import { z } from 'zod' import { reactNativePrompt } from './prompt.js' -import { getApiKey } from './utils.js' const MessageSchema = z.union([ z.object({ type: z.literal('select'), content: z.string(), options: z.array(z.string()) }), @@ -49,11 +47,7 @@ console.log( console.log() -const AI_MODEL = process.env.AI_MODEL || 'gpt-4o' - -const openai = createOpenAI({ - apiKey: await getApiKey('OpenAI', 'OPENAI_API_K2EY'), -}) +import model from './model-ollama.js' async function startSession(): Promise { const question = await text({ @@ -88,7 +82,7 @@ while (true) { s.start(chalk.gray('Thinking...')) const response = await generateText({ - model: openai(AI_MODEL), + model, system: reactNativePrompt, tools, maxSteps: 10, diff --git a/packages/cali/src/model-ollama.ts b/packages/cali/src/model-ollama.ts new file mode 100755 index 0000000..c70d554 --- /dev/null +++ b/packages/cali/src/model-ollama.ts @@ -0,0 +1,13 @@ +// to make this work: +// 1. Download ollama and install it: https://ollama.com/ +// 2. Run `ollama run llama3.2` + +import { createOllama } from 'ollama-ai-provider' + +const ollama = createOllama({ + baseURL: 'http://localhost:11434/api', +}) + +const model = ollama('llama3.2:latest') + +export default model diff --git a/packages/cali/src/model.ts b/packages/cali/src/model.ts new file mode 100755 index 0000000..16d3c49 --- /dev/null +++ b/packages/cali/src/model.ts @@ -0,0 +1,13 @@ +import { createOpenAI } from '@ai-sdk/openai' + +import { getApiKey } from './utils.js' + +const AI_MODEL = process.env.AI_MODEL || 'gpt-4o' + +const openai = createOpenAI({ + apiKey: await getApiKey('OpenAI', 'OPENAI_API_K2EY'), +}) + +const model = openai(AI_MODEL) + +export default model