折腾笔记[45]-导入及导出ollama模型

摘要

使用cli工具从文件夹导入模型及导出本机ollama模型到文件夹.导入时可以自动处理重名的问题.

AGENTS.md声明

本文用于投喂龙虾(openclaw).

实操

1. 使用shell方式导出

[https://github.com/AaronFeng753/Ollama-Model-Dumper]

## 导出
MODEL="modelscope.cn/unsloth/Qwen3-Coder-30B-A3B-Instruct-1M-GGUF:Qwen3-Coder-30B-A3B-Instruct-1M-UD-IQ1_S.gguf" && \
TARGET="$HOME/Downloads/Qwen3-Coder-30B-A3B-Instruct-1M-GGUF" && \
mkdir -p "$TARGET" && \
ollama show --modelfile "$MODEL" > "$TARGET/Modelfile" && \
MANIFEST="$HOME/.ollama/models/manifests/modelscope.cn/unsloth/Qwen3-Coder-30B-A3B-Instruct-1M-GGUF/Qwen3-Coder-30B-A3B-Instruct-1M-UD-IQ1_S.gguf" && \
DIGEST=$(python3 -c "import json,sys; d=json.load(open('$MANIFEST')); print([l for l in d['layers'] if l['mediaType']=='application/vnd.ollama.image.model'][0]['digest'].replace('sha256:','sha256-'))") && \
cp "$HOME/.ollama/models/blobs/$DIGEST" "$TARGET/model.gguf" && \
ls -lh "$TARGET"

# modelscope.cn/unsloth/Qwen3-Coder-30B-A3B-Instruct-1M-GGUF:Qwen3-Coder-30B-A3B-Instruct-1M-UD-Q2_K_XL.gguf
MODEL="modelscope.cn/unsloth/Qwen3-Coder-30B-A3B-Instruct-1M-GGUF:Qwen3-Coder-30B-A3B-Instruct-1M-UD-Q2_K_XL.gguf" && \
TARGET="$HOME/Downloads/Qwen3-Coder-30B-A3B-Instruct-1M-GGUF_UD-Q2_K_XL" && \
mkdir -p "$TARGET" && \
ollama show --modelfile "$MODEL" > "$TARGET/Modelfile" && \
MANIFEST="$HOME/.ollama/models/manifests/modelscope.cn/unsloth/Qwen3-Coder-30B-A3B-Instruct-1M-GGUF/Qwen3-Coder-30B-A3B-Instruct-1M-UD-Q2_K_XL.gguf" && \
DIGEST=$(python3 -c "import json,sys; d=json.load(open('$MANIFEST')); print([l for l in d['layers'] if l['mediaType']=='application/vnd.ollama.image.model'][0]['digest'].replace('sha256:','sha256-'))") && \
cp "$HOME/.ollama/models/blobs/$DIGEST" "$TARGET/model.gguf" && \
ls -lh "$TARGET"

## 导入
cd "$env:USERPROFILE\Downloads\Qwen3-Coder-30B-A3B-Instruct-1M-GGUF"; `
(Get-Content Modelfile) -replace '^FROM\s+.*', 'FROM ./model.gguf' | Set-Content Modelfile; `
ollama create qwen3-coder-30b-local -f Modelfile

2. go的cli工具

// 文件: ollama_export_and_import.go
// 功能: 导出/导入ollama模型

package main

import (
	"bufio"
	"encoding/json"
	"fmt"
	"os"
	"os/exec"
	"path/filepath"
	"regexp"
	"runtime"
	"strings"
	"time"
)

// ModelInfo stores model information
type ModelInfo struct {
	Name     string
	ID       string
	Size     string
	Modified string
}

func main() {
	reader := bufio.NewReader(os.Stdin)

	fmt.Println("╔════════════════════════════════════════╗")
	fmt.Println("║     Ollama Model Export/Import Tool    ║")
	fmt.Println("╚════════════════════════════════════════╝")
	fmt.Println()

	fmt.Println("Select operation:")
	fmt.Println("  [1] Export Model")
	fmt.Println("  [2] Import Model")
	fmt.Println()
	fmt.Print("Enter option (1/2): ")

	choice, _ := reader.ReadString('\n')
	choice = strings.TrimSpace(choice)

	switch choice {
	case "1":
		exportModel(reader)
	case "2":
		importModel(reader)
	default:
		fmt.Println("Invalid option, exiting")
		os.Exit(1)
	}
}

func exportModel(reader *bufio.Reader) {
	fmt.Println("\n--- Export Mode ---\n")

	models := getModelList()
	if len(models) == 0 {
		fmt.Println("No Ollama models found")
		return
	}

	fmt.Println("Installed models:")
	fmt.Println(strings.Repeat("-", 80))
	fmt.Printf("%-5s %-40s %-20s %s\n", "No.", "Name", "Size", "Modified")
	fmt.Println(strings.Repeat("-", 80))

	for i, model := range models {
		fmt.Printf("%-5d %-40s %-20s %s\n", i+1, model.Name, model.Size, model.Modified)
	}
	fmt.Println(strings.Repeat("-", 80))
	fmt.Println()

	fmt.Print("Enter model number to export: ")
	input, _ := reader.ReadString('\n')
	input = strings.TrimSpace(input)

	var selectedIndex int
	if _, err := fmt.Sscanf(input, "%d", &selectedIndex); err != nil || selectedIndex < 1 || selectedIndex > len(models) {
		fmt.Println("Invalid selection")
		return
	}

	selectedModel := models[selectedIndex-1]
	fmt.Printf("\nSelected model: %s\n", selectedModel.Name)

	defaultDir := filepath.Join(getHomeDir(), "Downloads", strings.ReplaceAll(selectedModel.Name, ":", "_"))
	fmt.Printf("Enter export directory (default: %s): ", defaultDir)
	targetDir, _ := reader.ReadString('\n')
	targetDir = strings.TrimSpace(targetDir)
	if targetDir == "" {
		targetDir = defaultDir
	}

	fmt.Printf("\nWill export model to: %s\n", targetDir)
	fmt.Print("Confirm export? (y/n): ")
	confirm, _ := reader.ReadString('\n')
	if strings.ToLower(strings.TrimSpace(confirm)) != "y" {
		fmt.Println("Operation cancelled")
		return
	}

	if err := performExport(selectedModel.Name, targetDir); err != nil {
		fmt.Printf("Export failed: %v\n", err)
		os.Exit(1)
	}
}

func getModelList() []ModelInfo {
	cmd := exec.Command("ollama", "ls")
	output, err := cmd.Output()
	if err != nil {
		return nil
	}

	var models []ModelInfo
	lines := strings.Split(string(output), "\n")

	for i, line := range lines {
		if i == 0 || strings.TrimSpace(line) == "" {
			continue
		}

		fields := strings.Fields(line)
		if len(fields) >= 4 {
			model := ModelInfo{
				Name:     fields[0],
				ID:       fields[1],
				Size:     fields[2],
				Modified: strings.Join(fields[3:], " "),
			}
			models = append(models, model)
		}
	}

	return models
}

func performExport(modelName, targetDir string) error {
	fmt.Println("\nStarting export...")

	if err := os.MkdirAll(targetDir, 0755); err != nil {
		return fmt.Errorf("failed to create directory: %v", err)
	}

	fmt.Println("[1/3] Exporting Modelfile...")
	modelfilePath := filepath.Join(targetDir, "Modelfile")
	cmd := exec.Command("ollama", "show", "--modelfile", modelName)
	output, err := cmd.Output()
	if err != nil {
		return fmt.Errorf("failed to get Modelfile: %v", err)
	}

	if err := os.WriteFile(modelfilePath, output, 0644); err != nil {
		return fmt.Errorf("failed to save Modelfile: %v", err)
	}

	fmt.Println("[2/3] Parsing model metadata...")
	digest, err := getModelDigest(modelName)
	if err != nil {
		return fmt.Errorf("failed to get model digest: %v", err)
	}

	fmt.Println("[3/3] Copying model file...")
	ollamaHome := getOllamaHome()
	sourcePath := filepath.Join(ollamaHome, "models", "blobs", digest)
	targetPath := filepath.Join(targetDir, "model.gguf")

	if err := copyFile(sourcePath, targetPath); err != nil {
		return fmt.Errorf("failed to copy model file: %v", err)
	}

	fmt.Println("\nExport successful!")
	fmt.Printf("Export directory: %s\n", targetDir)
	fmt.Println("\nFiles:")

	files, _ := os.ReadDir(targetDir)
	for _, file := range files {
		info, _ := file.Info()
		size := formatBytes(info.Size())
		fmt.Printf("  - %s (%s)\n", file.Name(), size)
	}

	return nil
}

func getModelDigest(modelName string) (string, error) {
	parts := strings.Split(modelName, ":")
	tag := "latest"
	namePart := modelName
	if len(parts) == 2 {
		namePart = parts[0]
		tag = parts[1]
	}

	ollamaHome := getOllamaHome()
	manifestPath := filepath.Join(ollamaHome, "models", "manifests")

	var manifestFile string
	if strings.Contains(namePart, "/") {
		manifestFile = filepath.Join(manifestPath, namePart, tag)
	} else {
		manifestFile = filepath.Join(manifestPath, "registry.ollama.ai", "library", namePart, tag)
	}

	data, err := os.ReadFile(manifestFile)
	if err != nil {
		altPath := filepath.Join(manifestPath, "registry.ollama.ai", "library", strings.ToLower(namePart), tag)
		data, err = os.ReadFile(altPath)
		if err != nil {
			return "", fmt.Errorf("failed to read manifest: %v", err)
		}
		manifestFile = altPath
	}

	var manifest struct {
		Layers []struct {
			MediaType string `json:"mediaType"`
			Digest    string `json:"digest"`
		} `json:"layers"`
	}

	if err := json.Unmarshal(data, &manifest); err != nil {
		return "", fmt.Errorf("failed to parse manifest: %v", err)
	}

	for _, layer := range manifest.Layers {
		if layer.MediaType == "application/vnd.ollama.image.model" {
			digest := strings.Replace(layer.Digest, ":", "-", 1)
			return digest, nil
		}
	}

	return "", fmt.Errorf("model layer not found")
}

func importModel(reader *bufio.Reader) {
	fmt.Println("\n--- Import Mode ---\n")

	fmt.Print("Enter model folder path (contains Modelfile and model.gguf): ")
	modelDir, _ := reader.ReadString('\n')
	modelDir = strings.TrimSpace(modelDir)
	modelDir = strings.Trim(modelDir, `"'`)

	if _, err := os.Stat(modelDir); os.IsNotExist(err) {
		fmt.Printf("Directory does not exist: %s\n", modelDir)
		return
	}

	modelfilePath := filepath.Join(modelDir, "Modelfile")
	modelFilePath := filepath.Join(modelDir, "model.gguf")

	if _, err := os.Stat(modelfilePath); os.IsNotExist(err) {
		fmt.Println("Error: Modelfile not found")
		return
	}

	if _, err := os.Stat(modelFilePath); os.IsNotExist(err) {
		fmt.Println("Error: model.gguf not found")
		return
	}

	fmt.Println("\nReading Modelfile...")
	content, err := os.ReadFile(modelfilePath)
	if err != nil {
		fmt.Printf("Failed to read Modelfile: %v\n", err)
		return
	}

	contentStr := string(content)
	re := regexp.MustCompile(`(?m)^FROM\s+.*$`)
	modifiedContent := re.ReplaceAllString(contentStr, "FROM ./model.gguf")

	tempModelfile := filepath.Join(modelDir, "Modelfile.import")
	if err := os.WriteFile(tempModelfile, []byte(modifiedContent), 0644); err != nil {
		fmt.Printf("Failed to write temp Modelfile: %v\n", err)
		return
	}
	defer os.Remove(tempModelfile)

	suggestedName := extractModelName(string(content), modelDir)
	fmt.Printf("\nSuggested model name: %s\n", suggestedName)
	fmt.Print("Enter new model name (press Enter to use suggested): ")
	newName, _ := reader.ReadString('\n')
	newName = strings.TrimSpace(newName)
	if newName == "" {
		newName = suggestedName
	}

	existingModels := getExistingModelNames()
	finalName := resolveModelName(newName, existingModels)

	if finalName != newName {
		fmt.Printf("\nModel name '%s' already exists, using: %s\n", newName, finalName)
	}

	fmt.Printf("\nWill import model '%s' from: %s\n", finalName, modelDir)
	fmt.Print("Confirm import? (y/n): ")
	confirm, _ := reader.ReadString('\n')
	if strings.ToLower(strings.TrimSpace(confirm)) != "y" {
		fmt.Println("Operation cancelled")
		return
	}

	fmt.Println("\nStarting import...")
	cmd := exec.Command("ollama", "create", finalName, "-f", tempModelfile)
	cmd.Dir = modelDir
	cmd.Stdout = os.Stdout
	cmd.Stderr = os.Stderr

	if err := cmd.Run(); err != nil {
		fmt.Printf("\nImport failed: %v\n", err)
		os.Exit(1)
	}

	fmt.Printf("\nImport successful! Model name: %s\n", finalName)
	fmt.Println("Usage: ollama run", finalName)
}

func extractModelName(modelfileContent, modelDir string) string {
	lines := strings.Split(modelfileContent, "\n")
	for _, line := range lines {
		line = strings.TrimSpace(line)
		if strings.HasPrefix(line, "FROM ") {
			fromPath := strings.TrimSpace(strings.TrimPrefix(line, "FROM"))
			if strings.HasSuffix(fromPath, ".gguf") {
				base := filepath.Base(fromPath)
				base = strings.TrimSuffix(base, ".gguf")
				base = sanitizeModelName(base)
				if base != "" {
					return base
				}
			}
		}
	}

	base := filepath.Base(modelDir)
	base = sanitizeModelName(base)
	if base == "" {
		base = "imported-model"
	}
	return base
}

func sanitizeModelName(name string) string {
	name = strings.ToLower(name)

	var result strings.Builder
	for _, r := range name {
		switch {
		case (r >= 'a' && r <= 'z') || (r >= '0' && r <= '9') || r == '_' || r == '.' || r == '-':
			result.WriteRune(r)
		case r == ' ' || r == '\\' || r == '/' || r == ':':
			result.WriteRune('-')
		}
	}

	cleaned := result.String()
	for strings.Contains(cleaned, "--") {
		cleaned = strings.ReplaceAll(cleaned, "--", "-")
	}

	cleaned = strings.Trim(cleaned, "-")
	return cleaned
}

func getExistingModelNames() []string {
	cmd := exec.Command("ollama", "list")
	output, err := cmd.Output()
	if err != nil {
		return []string{}
	}

	var names []string
	lines := strings.Split(string(output), "\n")
	for i, line := range lines {
		if i == 0 || strings.TrimSpace(line) == "" {
			continue
		}
		fields := strings.Fields(line)
		if len(fields) > 0 {
			names = append(names, fields[0])
		}
	}
	return names
}

func resolveModelName(name string, existing []string) string {
	if !contains(existing, name) {
		return name
	}

	baseName := name
	tag := "latest"
	if idx := strings.LastIndex(name, ":"); idx != -1 {
		baseName = name[:idx]
		tag = name[idx+1:]
	}

	timestamp := time.Now().Format("20060102-1504")

	newName := fmt.Sprintf("%s-%s:%s", baseName, timestamp, tag)
	if !contains(existing, newName) {
		return newName
	}

	timestamp = time.Now().Format("20060102-150405")
	newName = fmt.Sprintf("%s-%s:%s", baseName, timestamp, tag)
	if !contains(existing, newName) {
		return newName
	}

	for i := 1; i < 1000; i++ {
		newName = fmt.Sprintf("%s-%s-%d:%s", baseName, timestamp, i, tag)
		if !contains(existing, newName) {
			return newName
		}
	}

	return newName
}

func contains(slice []string, item string) bool {
	for _, s := range slice {
		if s == item {
			return true
		}
	}
	return false
}

func getHomeDir() string {
	home, err := os.UserHomeDir()
	if err != nil {
		return "."
	}
	return home
}

func getOllamaHome() string {
	if ollamaHome := os.Getenv("OLLAMA_HOME"); ollamaHome != "" {
		return ollamaHome
	}

	home := getHomeDir()
	if runtime.GOOS == "windows" {
		return filepath.Join(home, ".ollama")
	}
	return filepath.Join(home, ".ollama")
}

func copyFile(src, dst string) error {
	input, err := os.ReadFile(src)
	if err != nil {
		return err
	}
	return os.WriteFile(dst, input, 0644)
}

func formatBytes(bytes int64) string {
	const (
		KB = 1024
		MB = 1024 * KB
		GB = 1024 * MB
	)

	switch {
	case bytes >= GB:
		return fmt.Sprintf("%.2f GB", float64(bytes)/GB)
	case bytes >= MB:
		return fmt.Sprintf("%.2f MB", float64(bytes)/MB)
	case bytes >= KB:
		return fmt.Sprintf("%.2f KB", float64(bytes)/KB)
	default:
		return fmt.Sprintf("%d B", bytes)
	}
}

效果:

(base) workspace@macbook2022 exp307-mqtt-ollama % ./ollama_export_and_import_2603081900 
╔════════════════════════════════════════╗
║     Ollama Model Export/Import Tool    ║
╚════════════════════════════════════════╝

Select operation:
  [1] Export Model
  [2] Import Model

Enter option (1/2): 1

--- Export Mode ---

Installed models:
--------------------------------------------------------------------------------
No.   Name                                     Size                 Modified
--------------------------------------------------------------------------------
1     qwen3:4b                                 11                   GB 5 weeks ago
2     modelscope.cn/unsloth/Qwen3-Coder-30B-A3B-Instruct-1M-GGUF:Qwen3-Coder-30B-A3B-Instruct-1M-UD-Q2_K_XL.gguf 11                   GB 5 weeks ago
3     modelscope.cn/Qwen/Qwen3-VL-4B-Instruct-GGUF:latest 3.3                  GB 5 weeks ago
4     qwen3:0.6b                               522                  MB 9 months ago
5     qllama/bge-reranker-large:latest         604                  MB 11 months ago
6     bge-m3:latest                            1.2                  GB 11 months ago
--------------------------------------------------------------------------------

Enter model number to export: 4

Selected model: qwen3:0.6b
Enter export directory (default: /Users/workspace/Downloads/qwen3_0.6b): 

Will export model to: /Users/workspace/Downloads/qwen3_0.6b
Confirm export? (y/n): y

Starting export...
[1/3] Exporting Modelfile...
[2/3] Parsing model metadata...
[3/3] Copying model file...

Export successful!
Export directory: /Users/workspace/Downloads/qwen3_0.6b

Files:
  - Modelfile (12.92 KB)
  - model.gguf (498.43 MB)
(base) workspace@macbook2022 exp307-mqtt-ollama % ./ollama_export_and_import_2603081900
╔════════════════════════════════════════╗
║     Ollama Model Export/Import Tool    ║
╚════════════════════════════════════════╝

Select operation:
  [1] Export Model
  [2] Import Model

Enter option (1/2): 2

--- Import Mode ---

Enter model folder path (contains Modelfile and model.gguf): /Users/workspace/Downloads/qwen3_0.6b

Reading Modelfile...

Suggested model name: qwen3_0.6b
Enter new model name (press Enter to use suggested): 

Will import model 'qwen3_0.6b' from: /Users/workspace/Downloads/qwen3_0.6b
Confirm import? (y/n): y

Starting import...
gathering model components 
copying file sha256:7f4030143c1c477224c5434f8272c662a8b042079a0a584f0a27a1684fe2e1fa 100% 
parsing GGUF 
using existing layer sha256:7f4030143c1c477224c5434f8272c662a8b042079a0a584f0a27a1684fe2e1fa 
using existing layer sha256:eb4402837c7829a690fa845de4d7f3fd842c2adee476d5341da8a46ea9255175 
using existing layer sha256:d18a5cc71b84bc4af394a31116bd3932b42241de70c77d2b76d69a314ec8aa12 
using existing layer sha256:cff3f395ef3756ab63e58b0ad1b32bb6f802905cae1472e6a12034e4246fbbdb 
writing manifest 
success 

Import successful! Model name: qwen3_0.6b
Usage: ollama run qwen3_0.6b
(base) workspace@macbook2022 exp307-mqtt-ollama % ollama ls
NAME                                                                                                          ID              SIZE      MODIFIED      
qwen3_0.6b:latest                                                                                             d8203e03137e    522 MB    3 seconds ago    
qwen3:4b                                                                                                      e363c0c8e5ca    11 GB     5 weeks ago      
modelscope.cn/unsloth/Qwen3-Coder-30B-A3B-Instruct-1M-GGUF:Qwen3-Coder-30B-A3B-Instruct-1M-UD-Q2_K_XL.gguf    e363c0c8e5ca    11 GB     5 weeks ago      
modelscope.cn/Qwen/Qwen3-VL-4B-Instruct-GGUF:latest                                                           7b8954ee27f7    3.3 GB    5 weeks ago      
qwen3:0.6b                                                                                                    3bae9c93586b    522 MB    9 months ago     
qllama/bge-reranker-large:latest                                                                              1327c18a2be0    604 MB    11 months ago    
bge-m3:latest                                                                                                 790764642607    1.2 GB    11 months ago    
posted @ 2026-03-08 19:21  qsBye  阅读(4)  评论(0)    收藏  举报