90 lines
2.6 KiB
YAML
90 lines
2.6 KiB
YAML
services:
|
|
karakeep-web:
|
|
image: ghcr.io/karakeep-app/karakeep:${KARAKEEP_VERSION:-release}
|
|
restart: unless-stopped
|
|
volumes:
|
|
# By default, the data is stored in a docker volume called "data".
|
|
# If you want to mount a custom directory, change the volume mapping to:
|
|
# - /path/to/your/directory:/data
|
|
- /data/karakeep:/data
|
|
# ports:
|
|
# - 3000:3000
|
|
env_file:
|
|
- .env
|
|
environment:
|
|
MEILI_ADDR: http://meilisearch:7700
|
|
BROWSER_WEB_URL: http://karakeep-chrome:9222
|
|
OLLAMA_BASE_URL: http://ollama:11434 #comma separated ollama hosts
|
|
INFERENCE_TEXT_MODEL: gemma3
|
|
INFERENCE_IMAGE_MODEL: llava
|
|
INFERENCE_OUTPUT_SCHEMA: json
|
|
INFERENCE_CONTEXT_LENGTH: 1024
|
|
INFERENCE_JOB_TIMEOUT_SEC: 120
|
|
|
|
# You almost never want to change the value of the DATA_DIR variable.
|
|
# If you want to mount a custom directory, change the volume mapping above instead.
|
|
DATA_DIR: /data # DON'T CHANGE THIS
|
|
networks:
|
|
- karakeep
|
|
- karakeep-int
|
|
- ollama-int
|
|
karakeep-chrome:
|
|
image: gcr.io/zenika-hub/alpine-chrome:123
|
|
restart: unless-stopped
|
|
command:
|
|
- --no-sandbox
|
|
- --disable-gpu
|
|
- --disable-dev-shm-usage
|
|
- --remote-debugging-address=0.0.0.0
|
|
- --remote-debugging-port=9222
|
|
- --hide-scrollbars
|
|
networks:
|
|
- karakeep-int
|
|
meilisearch:
|
|
image: getmeili/meilisearch:v1.13.3
|
|
restart: unless-stopped
|
|
env_file:
|
|
- .env
|
|
environment:
|
|
MEILI_NO_ANALYTICS: "true"
|
|
volumes:
|
|
- /data/meilisearch:/meili_data
|
|
networks:
|
|
- karakeep-int
|
|
ollama:
|
|
image: docker.io/ollama/ollama:latest
|
|
volumes:
|
|
- .:/code
|
|
- /data/library/ollama/ollama:/root/.ollama
|
|
container_name: ollama
|
|
pull_policy: always
|
|
tty: true
|
|
restart: always
|
|
environment:
|
|
- OLLAMA_KEEP_ALIVE=24h
|
|
- OLLAMA_HOST=0.0.0.0
|
|
- OLLAMA_DEBUG=1
|
|
networks:
|
|
- ollama-int
|
|
|
|
ollama-webui:
|
|
image: ghcr.io/open-webui/open-webui:main
|
|
container_name: ollama-webui
|
|
volumes:
|
|
- /data/library/ollama/ollama-webui:/app/backend/data
|
|
depends_on:
|
|
- ollama
|
|
environment: # https://docs.openwebui.com/getting-started/env-configuration#default_models
|
|
- OLLAMA_BASE_URLS=http://host.docker.internal:7869 #comma separated ollama hosts
|
|
- ENV=dev
|
|
- WEBUI_AUTH=True
|
|
- WEBUI_NAME=valiantlynx AI
|
|
- WEBUI_URL=http://localhost:8080
|
|
- WEBUI_SECRET_KEY=t0p-s3cr3t
|
|
extra_hosts:
|
|
- host.docker.internal:host-gateway
|
|
restart: unless-stopped
|
|
networks:
|
|
- ollama
|
|
- ollama-int
|