ai-lawyer-agent/compose.yaml

47 lines
1.1 KiB
YAML

name: "legal-ai-assistant"
services:
backend:
build:
context: .
dockerfile: backend/Dockerfile
restart: unless-stopped
ports:
- "8000:8000"
environment:
- PYTHONUNBUFFERED=1
- LITELLM_BASE_URL=http://litellm:4000/v1
- LITELLM_API_KEY=sk-anything
- MCP_SERVER_URL=http://mcp:8001/mcp
- DEFAULT_MODEL=llama-3.3-70b
depends_on:
litellm:
condition: service_started
mcp:
condition: service_started
litellm:
image: ghcr.io/berriai/litellm:main-stable
container_name: litellm
restart: unless-stopped
ports:
- "4000:4000"
env_file:
- .env
environment:
- GROQ_API_KEY=${GROQ_API_KEY}
- GEMINI_API_KEY=${GEMINI_API_KEY}
volumes:
- ./config.yaml:/app/config.yaml:ro
command: ['--config', '/app/config.yaml', '--port', '4000']
mcp:
build:
context: .
dockerfile: backend/mcp_server/Dockerfile
restart: unless-stopped
ports:
- "8001:8001"
environment:
- JUSTICE_API_BASE=https://obcan.justice.sk/pilot/api/ress-isu-service