1、安装docker、docker-compose:

docker-compose.yml

version: "3.8"

services:
  ollama:
    image: m.daocloud.io/docker.io/ollama/ollama:latest
    container_name: ollama
    ports:
      - "11434:11434"  # 将容器端口映射到本地
    restart: unless-stopped 
    healthcheck:
      test: ollama --version || exit 1
    volumes:
      - ./ollama:/root/.ollama  # 持久化模型数据到本地目录

  anythingllm:
    image: m.daocloud.io/docker.io/mintplexlabs/anythingllm
    container_name: anythingllm
    ports:
    - "3001:3001"
    cap_add:
      - SYS_ADMIN
    volumes:
      # 该卷对应的目录,需要确保默认的“UID:1000”用户具有写权限
      - ./anythingllm:/app/server/storage
    extra_hosts:
      - "host.docker.internal:host-gateway"
    restart: unless-stopped 
    environment:
    # Adjust for your environment
      - STORAGE_DIR=/app/server/storage
      - JWT_SECRET="28905cae896f0c97magedu.com"
      - LLM_PROVIDER=ollama
      - OLLAMA_BASE_PATH=http://ollama:11434
      - OLLAMA_MODEL_PREF='erwan2/DeepSeek-R1-Distill-Qwen-1.5B'
      - OLLAMA_MODEL_TOKEN_LIMIT=4096
      - EMBEDDING_ENGINE=ollama
      - EMBEDDING_BASE_PATH=http://ollama:11434
      - EMBEDDING_MODEL_PREF='nomic-embed-text:latest'
      - EMBEDDING_MODEL_MAX_CHUNK_LENGTH=8192
      - VECTOR_DB=lancedb
      - WHISPER_PROVIDER=local
      - TTS_PROVIDER=native
      - PASSWORDMINCHAR=8
      - UID='1000'
      - GID='1000'

2、此示例需要创建数据目录:

mkdir anythingllm/
chmod +x anythingllm/

3、启动:

docker-compose up

4、下载大模型:

# 下载要部署的本地模型(有一个即可)
docker-compose exec ollama ollama pull erwan2/DeepSeek-R1-Distill-Qwen-1.5B
docker-compose exec ollama ollama pull deepseek-r1:1.5b

# 下载用于词嵌入的模型(必须)
docker-compose exec ollama ollama pull nomic-embed-text:latest