Skip to main content

Docker Compose

Basic Docker Compose

Create docker-compose.yml:

version: '3.8'

services:
llumen:
image: ghcr.io/pinkfuwa/llumen:latest
container_name: llumen
restart: unless-stopped
ports:
- "80:80"
environment:
- API_KEY=${API_KEY}
- DATA_PATH=/data
volumes:
- ./llumen-data:/data

Create .env:

API_KEY=sk-or-v1-your-key-here

Run:

docker-compose up -d

Author's real setup

services:
llumen:
image: ghcr.io/pinkfuwa/llumen:nightly
restart: on-failure:4
environment:
- "API_KEY=sk-or-<api-key>"
- "TRUSTED_HEADER=Remote-User"
volumes:
- "./data:/data"
labels:
- "traefik.enable=true"
- "traefik.http.routers.llumen.rule=Host(`llumen.example.com`)"
- "traefik.http.services.llumen.loadbalancer.server.port=80"
- "traefik.http.routers.llumen.tls.domains[0].main=example.com"
- "traefik.http.routers.llumen.tls.domains[0].sans=*.example.com"
- "traefik.http.routers.llumen.tls.certresolver=gcp"
- "traefik.http.routers.llumen.middlewares=authelia"
deploy:
resources:
limits:
memory: 1G
memswap_limit: 1G

With Reverse Proxy

Nginx

version: '3.8'

services:
llumen:
image: ghcr.io/pinkfuwa/llumen:latest
restart: unless-stopped
environment:
- API_KEY=${API_KEY}
- BIND_ADDR=0.0.0.0:3000
volumes:
- ./data:/data
expose:
- "3000"
networks:
- web

nginx:
image: nginx:alpine
restart: unless-stopped
ports:
- "80:80"
- "443:443"
volumes:
- ./nginx.conf:/etc/nginx/nginx.conf:ro
- ./ssl:/etc/nginx/ssl:ro
depends_on:
- llumen
networks:
- web

networks:
web:

Caddy (Easier HTTPS)

version: '3.8'

services:
llumen:
image: ghcr.io/pinkfuwa/llumen:latest
restart: unless-stopped
environment:
- API_KEY=${API_KEY}
volumes:
- ./data:/data
networks:
- web

caddy:
image: caddy:alpine
restart: unless-stopped
ports:
- "80:80"
- "443:443"
volumes:
- ./Caddyfile:/etc/caddy/Caddyfile:ro
- caddy_data:/data
- caddy_config:/config
depends_on:
- llumen
networks:
- web

networks:
web:

volumes:
caddy_data:
caddy_config:

Caddyfile:

llumen.yourdomain.com {
reverse_proxy llumen:80
}

Caddy automatically handles HTTPS certificates!

With Ollama

Run local LLMs alongside llumen:

version: '3.8'

services:
llumen:
image: ghcr.io/pinkfuwa/llumen:latest
restart: unless-stopped
ports:
- "80:80"
environment:
- API_KEY=ollama
- API_BASE=http://ollama:11434/v1
volumes:
- ./data:/data
depends_on:
- ollama
networks:
- ai

ollama:
image: ollama/ollama:latest
restart: unless-stopped
volumes:
- ollama_data:/root/.ollama
networks:
- ai

networks:
ai:

volumes:
ollama_data:

Pull models:

docker-compose exec ollama ollama pull llama3
docker-compose exec ollama ollama pull mistral

Logging

services:
llumen:
image: ghcr.io/pinkfuwa/llumen:latest
restart: unless-stopped
logging:
driver: "json-file"
options:
max-size: "10m"
max-file: "3"
# ... rest of config

Management Commands

# Start services
docker-compose up -d

# View logs
docker-compose logs -f llumen

# Restart service
docker-compose restart llumen

# Stop services
docker-compose down

# Stop and remove volumes
docker-compose down -v

# Update to latest image
docker-compose pull
docker-compose up -d