golang调用ollama接口

调用接口

package main

import (
	"context"
	"errors"
	"fmt"
	"io"

	openai "github.com/sashabaranov/go-openai"
)

func main() {
	config := openai.DefaultConfig("ollama")
	config.BaseURL = "http://127.0.0.1:11434/v1"
	client := openai.NewClientWithConfig(config)

	resp, err := client.CreateChatCompletion(
		context.Background(),
		openai.ChatCompletionRequest{
			Model: "qwen3:0.6b",
			Messages: []openai.ChatCompletionMessage{
				{
					Role:    openai.ChatMessageRoleSystem,
					Content: "/no_think You are a helpful assistant.",
				},
				{
					Role:    openai.ChatMessageRoleUser,
					Content: "介绍下你自己",
				},
			},
		},
	)

	if err != nil {
		fmt.Printf("ChatCompletion error: %v\n", err)
		return
	}

	fmt.Println(resp.Choices[0].Message.Content)
}

流式输出

package main

import (
	"context"
	"errors"
	"fmt"
	"io"

	openai "github.com/sashabaranov/go-openai"
)

func main() {
	ctx := context.Background()

	config := openai.DefaultConfig("ollama")
	config.BaseURL = "http://127.0.0.1:11434/v1"
	client := openai.NewClientWithConfig(config)

	req := openai.CompletionRequest{
		Model:  "qwen3:0.6b",
		Prompt: "你好啊,你是谁",
		Stream: true,
	}

	stream, err := client.CreateCompletionStream(ctx, req)
	if err != nil {
		fmt.Printf("CompletionStream error: %v\n", err)
		return
	}
	defer stream.Close()

	for {
		response, err := stream.Recv()
		if errors.Is(err, io.EOF) {
			return
		}

		if err != nil {
			fmt.Printf("Stream error: %v\n", err)
			return
		}

		fmt.Printf("%v", response.Choices[0].Text)
	}
}
posted @ 2025-10-09 21:18  jiftle  阅读(9)  评论(0)    收藏  举报