v4.1.0: DMS email documents, category-specific Nachweis linking, version system
- Save cover email body as DMS document with new 'email' context type - Show email body separately from attachments in email detail view - Add per-category DMS document assignment in quarterly confirmation (Studiennachweis, Einkommenssituation, Vermögenssituation) - Add VERSION file and context processor for automatic version display - Add MCP server, agent system, import/export, and new migrations - Update compose files and production environment template Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
This commit is contained in:
@@ -84,6 +84,71 @@ services:
|
||||
- db
|
||||
- redis
|
||||
|
||||
mcp:
|
||||
build: ./app
|
||||
depends_on:
|
||||
db:
|
||||
condition: service_healthy
|
||||
environment:
|
||||
- POSTGRES_DB=stiftung_dev
|
||||
- POSTGRES_USER=postgres
|
||||
- POSTGRES_PASSWORD=postgres_dev
|
||||
- DB_HOST=db
|
||||
- DB_PORT=5432
|
||||
- DJANGO_SECRET_KEY=dev-secret-key-not-for-production
|
||||
- DJANGO_DEBUG=1
|
||||
- DJANGO_ALLOWED_HOSTS=localhost,127.0.0.1
|
||||
- LANGUAGE_CODE=de
|
||||
- TIME_ZONE=Europe/Berlin
|
||||
- MCP_TOKEN_READONLY=${MCP_TOKEN_READONLY:-dev-readonly-token}
|
||||
- MCP_TOKEN_EDITOR=${MCP_TOKEN_EDITOR:-dev-editor-token}
|
||||
- MCP_TOKEN_ADMIN=${MCP_TOKEN_ADMIN:-dev-admin-token}
|
||||
# Kein Port-Mapping – nur internes Netz
|
||||
# Start via: docker compose -f compose.dev.yml run --rm -e MCP_AUTH_TOKEN=dev-readonly-token mcp
|
||||
stdin_open: true
|
||||
volumes:
|
||||
- ./app:/app
|
||||
command: ["python", "-m", "mcp_server"]
|
||||
|
||||
ollama:
|
||||
image: ollama/ollama:latest
|
||||
# Kein externes Port-Mapping — nur über internes Docker-Netzwerk erreichbar
|
||||
# Django-App: http://ollama:11434
|
||||
environment:
|
||||
- OLLAMA_MAX_LOADED_MODELS=1
|
||||
- OLLAMA_NUM_PARALLEL=1
|
||||
- OLLAMA_DEFAULT_MODEL=${OLLAMA_DEFAULT_MODEL:-qwen2.5:3b}
|
||||
volumes:
|
||||
- ollama_data_dev:/root/.ollama
|
||||
restart: unless-stopped
|
||||
healthcheck:
|
||||
test: ["CMD-SHELL", "curl -sf http://localhost:11434/api/tags || exit 1"]
|
||||
interval: 30s
|
||||
timeout: 10s
|
||||
retries: 5
|
||||
start_period: 60s
|
||||
# Beim ersten Start: Ollama starten, dann Modell laden (falls nicht vorhanden)
|
||||
entrypoint: >
|
||||
sh -c "
|
||||
ollama serve &
|
||||
OLLAMA_PID=$$!
|
||||
echo '[ollama] Warte auf API...'
|
||||
RETRIES=0
|
||||
until curl -sf http://localhost:11434/api/tags > /dev/null 2>&1; do
|
||||
RETRIES=$$((RETRIES + 1))
|
||||
[ $$RETRIES -ge 60 ] && echo '[ollama] FEHLER: API nicht bereit.' && exit 1
|
||||
sleep 1
|
||||
done
|
||||
MODEL=$${OLLAMA_DEFAULT_MODEL:-qwen2.5:3b}
|
||||
if ollama list | grep -q \"$$MODEL\"; then
|
||||
echo \"[ollama] Modell '$$MODEL' bereits vorhanden.\"
|
||||
else
|
||||
echo \"[ollama] Lade Modell '$$MODEL'...\"
|
||||
ollama pull \"$$MODEL\"
|
||||
fi
|
||||
wait $$OLLAMA_PID
|
||||
"
|
||||
|
||||
grampsweb:
|
||||
image: ghcr.io/gramps-project/grampsweb:latest
|
||||
ports:
|
||||
@@ -112,3 +177,4 @@ volumes:
|
||||
paperless_export_dev:
|
||||
paperless_consume_dev:
|
||||
gramps_data_dev:
|
||||
ollama_data_dev:
|
||||
|
||||
Reference in New Issue
Block a user