[GenAI] Scripting Local Language Models with Ollama and the Vercel AI SDK
Run: ollama run gemma3
import { ollama } from "ollama-ai-provider";
import { generateObject } from "ai";
import { z } from "zod";
const model = ollama("gemma3:latest");
const prompt =
"Concatenate all the markdown files in my Downloads folder into a single file";
const { object } = await generateObject({
model,
schema: z.object({
command: z
.string()
.describe("The command to execute, e.g. 'summarize' or 'translate'"),
}),
prompt,
});
console.log(object);
/**
➜ ollama-vercel-language-scripting bun index.ts
{
command: "concatenate",
}
*/