This guide deploys AnythingLLM with Docker Compose on Debian 10 to latest stable, Ubuntu LTS (20.04+), and RHEL 9+ compatible hosts.
Important: This playbook includes the required SYS_ADMIN capability for webpage scraping functionality and proper security hardening.
- name: Deploy AnythingLLM
hosts: anythingllm
become: true
vars:
app_root: /opt/anythingllm
app_port: 3001
app_image: mintplexlabs/anythingllm:master
storage_dir: "{{ app_root }}/storage"
# Generate a secure JWT secret
jwt_secret: "{{ lookup('password', '/dev/null length=64 chars=hexdigits') }}"
# Gateway token for Web UI access
gateway_token: "{{ lookup('password', '/dev/null length=32 chars=hexdigits') }}"
tasks:
- name: Install Docker on Debian/Ubuntu
apt:
name:
- docker.io
- docker-compose-plugin
- curl
state: present
update_cache: true
when: ansible_os_family == "Debian"
- name: Install Docker on RHEL family
dnf:
name:
- docker
- docker-compose-plugin
- curl
state: present
when: ansible_os_family == "RedHat"
- name: Enable and start Docker
service:
name: docker
state: started
enabled: true
- name: Create app directory
file:
path: "{{ app_root }}"
state: directory
mode: "0755"
- name: Create storage directory
file:
path: "{{ storage_dir }}"
state: directory
mode: "0755"
- name: Create .env file
copy:
dest: "{{ storage_dir }}/.env"
mode: "0600"
content: |
# Storage configuration
STORAGE_DIR=/app/server/storage
WORKSPACES_DIR=/app/server/storage/workspaces
# Security settings
JWT_SECRET={{ jwt_secret }}
MULTI_USER_MODE=false
# Network settings
PORT=3001
# LLM Provider (configure via Web UI or uncomment below)
# LLM_PROVIDER=ollama
# OLLAMA_BASE_PATH=http://localhost:11434
# Vector Database (default: lancedb)
# VECTOR_DB=lancedb
# Telemetry (optional)
# DISABLE_TELEMETRY=false
- name: Write Docker Compose file
copy:
dest: "{{ app_root }}/docker-compose.yml"
mode: "0644"
content: |
version: '3.8'
services:
anythingllm:
image: {{ app_image }}
container_name: anythingllm
restart: unless-stopped
ports:
- "{{ app_port }}:3001"
cap_add:
- SYS_ADMIN
volumes:
- {{ storage_dir }}:/app/server/storage
- {{ storage_dir }}/.env:/app/server/.env
environment:
- STORAGE_DIR=/app/server/storage
healthcheck:
test: ["CMD", "curl", "-f", "http://localhost:3001/api/health"]
interval: 30s
timeout: 10s
retries: 3
start_period: 60s
networks:
- anythingllm-net
# Security: Read-only root filesystem (optional, advanced)
# read_only: true
# tmpfs:
# - /tmp
# - /run
# Security: Drop capabilities (optional, advanced)
# security_opt:
# - no-new-privileges:true
# cap_drop:
# - ALL
networks:
anythingllm-net:
driver: bridge
- name: Start application stack
command: docker compose up -d
args:
chdir: "{{ app_root }}"
register: compose_result
- name: Wait for application to be ready
uri:
url: "http://localhost:{{ app_port }}/api/health"
method: GET
status_code: 200
retries: 30
delay: 10
until: result is succeeded
register: result
ignore_errors: true
- name: Display deployment information
debug:
msg: |
AnythingLLM deployment complete!
Access the Web UI at: http://{{ inventory_hostname }}:{{ app_port }}
IMPORTANT:
- Complete the setup wizard in the Web UI
- Configure your LLM provider (OpenAI, Anthropic, Ollama, etc.)
- Enable multi-user mode if needed (Docker version only)
- Store API keys securely in the Web UI
- Do not expose port {{ app_port }} directly to the internet
Storage location: {{ storage_dir }}
Configuration file: {{ storage_dir }}/.env
Save the playbook as deploy-anythingllm.yml.
Create an inventory file (inventory.ini):
[anythingllm]
server1.example.com
server2.example.com
Use Ansible Vault to store sensitive values:
# Create vault file
ansible-vault create group_vars/anythingllm/vault.yml
Add your secrets:
# group_vars/anythingllm/vault.yml
jwt_secret: your-secure-jwt-secret-here
gateway_token: your-secure-token-here
# Basic run
ansible-playbook -i inventory.ini deploy-anythingllm.yml
# With vault
ansible-playbook -i inventory.ini deploy-anythingllm.yml --ask-vault-pass
# Limit to specific host
ansible-playbook -i inventory.ini deploy-anythingllm.yml --limit server1.example.com
Add Ollama service to the Docker Compose file:
- name: Write Docker Compose file with Ollama
copy:
dest: "{{ app_root }}/docker-compose.yml"
mode: "0644"
content: |
version: '3.8'
services:
ollama:
image: ollama/ollama:latest
container_name: ollama
restart: unless-stopped
volumes:
- {{ app_root }}/ollama-data:/root/.ollama
ports:
- "11434:11434"
networks:
- anythingllm-net
anythingllm:
image: {{ app_image }}
container_name: anythingllm
restart: unless-stopped
ports:
- "{{ app_port }}:3001"
cap_add:
- SYS_ADMIN
volumes:
- {{ storage_dir }}:/app/server/storage
- {{ storage_dir }}/.env:/app/server/.env
environment:
- STORAGE_DIR=/app/server/storage
- OLLAMA_BASE_PATH=http://ollama:11434
depends_on:
- ollama
healthcheck:
test: ["CMD", "curl", "-f", "http://localhost:3001/api/health"]
interval: 30s
timeout: 10s
retries: 3
start_period: 60s
networks:
- anythingllm-net
networks:
anythingllm-net:
driver: bridge
Add Nginx service for TLS termination:
- name: Write Docker Compose with Nginx
copy:
dest: "{{ app_root }}/docker-compose.yml"
mode: "0644"
content: |
version: '3.8'
services:
anythingllm:
image: {{ app_image }}
container_name: anythingllm
restart: unless-stopped
expose:
- "3001"
cap_add:
- SYS_ADMIN
volumes:
- {{ storage_dir }}:/app/server/storage
- {{ storage_dir }}/.env:/app/server/.env
environment:
- STORAGE_DIR=/app/server/storage
healthcheck:
test: ["CMD", "curl", "-f", "http://localhost:3001/api/health"]
interval: 30s
timeout: 10s
retries: 3
start_period: 60s
networks:
- anythingllm-net
nginx:
image: nginx:alpine
container_name: anythingllm-nginx
restart: unless-stopped
ports:
- "80:80"
- "443:443"
volumes:
- {{ app_root }}/nginx.conf:/etc/nginx/nginx.conf:ro
- {{ app_root }}/ssl:/etc/nginx/ssl:ro
depends_on:
- anythingllm
networks:
- anythingllm-net
networks:
anythingllm-net:
driver: bridge
After deployment, access AnythingLLM at:
http://YOUR_SERVER_IP:3001
# SSH to the server
ssh user@server
# Check container status
docker compose ps -f /opt/anythingllm/docker-compose.yml
# View logs
docker compose logs -f anythingllm
docker compose -f /opt/anythingllm/docker-compose.yml restart
cd /opt/anythingllm
docker compose pull
docker compose up -d
# Test health endpoint
curl http://localhost:3001/api/health
# Check container health
docker compose exec anythingllm curl http://localhost:3001/api/health
# Stop service
docker compose -f /opt/anythingllm/docker-compose.yml down
# Backup storage directory
tar -czf anythingllm-backup-$(date +%Y%m%d_%H%M%S).tar.gz /opt/anythingllm/storage/
# Restart service
docker compose -f /opt/anythingllm/docker-compose.yml up -d
.env file with restricted permissions (mode: “0600”)For detailed security guidance, see AnythingLLM Security.
Any questions?
Feel free to contact us. Find all contact information on our contact page.