clustered-fucks/compose-files/databases/karakeep/docker-compose.yml
Maddox 38d8a72e41 Phase 3: Migrate karakeep stack to databases
- Add docker-compose.yml with web, meilisearch, chrome, ollama services
- Add deploy-karakeep.yml Ansible playbook
- Karakeep web on port 3054, meilisearch on 7700, ollama on 11434
- Fixed ollama CPU limit for 2-core VM
- Migrated data (bookmarks, assets, llama3.2:3b model) from alien

Co-Authored-By: Claude Opus 4.5 <noreply@anthropic.com>
2026-01-29 18:44:47 +00:00

138 lines
4 KiB
YAML

services:
# =========================================================================
# Karakeep Web - Main application
# =========================================================================
web:
image: ghcr.io/karakeep-app/karakeep:${KARAKEEP_VERSION:-release}
container_name: karakeep-web
hostname: karakeep-web
restart: unless-stopped
volumes:
- ./data:/data
ports:
- "3054:3000"
environment:
- DATA_DIR=/data
- MEILI_ADDR=http://meilisearch:7700
- MEILI_MASTER_KEY=${MEILI_MASTER_KEY}
- BROWSER_WEB_URL=http://chrome:9222
- OLLAMA_BASE_URL=http://ollama:11434
- INFERENCE_TEXT_MODEL=${INFERENCE_TEXT_MODEL:-llama3.2:3b}
- INFERENCE_IMAGE_MODEL=${INFERENCE_IMAGE_MODEL:-llava}
- INFERENCE_LANG=${INFERENCE_LANG:-english}
- NEXTAUTH_URL=${NEXTAUTH_URL:-http://localhost:3000}
- NEXTAUTH_SECRET=${NEXTAUTH_SECRET}
- NODE_ENV=production
- NEXT_TELEMETRY_DISABLED=1
depends_on:
meilisearch:
condition: service_started
chrome:
condition: service_started
ollama:
condition: service_started
networks:
- karakeep_internal
- proxy
deploy:
resources:
limits:
memory: 1G
cpus: '2.0'
healthcheck:
test: ["CMD-SHELL", "wget --no-verbose --tries=1 --spider http://127.0.0.1:3000/api/health || exit 1"]
interval: 30s
timeout: 10s
retries: 3
start_period: 30s
labels:
- "autoheal=true"
- "com.centurylinklabs.watchtower.enable=true"
- "homepage.group=Personal"
- "homepage.name=Karakeep"
- "homepage.icon=karakeep.png"
- "homepage.href=https://karakeep.3ddbrewery.com"
# =========================================================================
# Meilisearch - Full-text search engine
# =========================================================================
meilisearch:
image: getmeili/meilisearch:v1.13.3
container_name: karakeep-meilisearch
hostname: meilisearch
restart: unless-stopped
volumes:
- ./meilisearch:/meili_data
ports:
- "7700:7700"
environment:
- MEILI_NO_ANALYTICS=true
- MEILI_MASTER_KEY=${MEILI_MASTER_KEY}
networks:
- karakeep_internal
deploy:
resources:
limits:
memory: 1G
cpus: '1.0'
labels:
- "autoheal=true"
- "com.centurylinklabs.watchtower.enable=true"
# =========================================================================
# Chrome - Headless browser for screenshots/crawling
# =========================================================================
chrome:
image: gcr.io/zenika-hub/alpine-chrome:123
container_name: karakeep-chrome
hostname: chrome
restart: unless-stopped
command:
- --no-sandbox
- --disable-gpu
- --disable-dev-shm-usage
- --remote-debugging-address=0.0.0.0
- --remote-debugging-port=9222
- --hide-scrollbars
networks:
- karakeep_internal
deploy:
resources:
limits:
memory: 512M
cpus: '1.0'
labels:
- "autoheal=true"
- "com.centurylinklabs.watchtower.enable=true"
# =========================================================================
# Ollama - Local AI inference (CPU-only on databases VM)
# =========================================================================
ollama:
image: ollama/ollama:latest
container_name: karakeep-ollama
hostname: ollama
restart: unless-stopped
volumes:
- ./ollama:/root/.ollama
ports:
- "11434:11434"
environment:
- OLLAMA_HOST=0.0.0.0:11434
networks:
- karakeep_internal
deploy:
resources:
limits:
# Ollama needs more memory for models (CPU-only, limited by VM)
memory: 4G
cpus: '2.0'
labels:
- "autoheal=true"
- "com.centurylinklabs.watchtower.enable=true"
networks:
karakeep_internal:
driver: bridge
proxy:
external: true