Complete Docker deployment guide for FastGPT using docker-compose.
| Requirement | Details |
|---|---|
| Docker | Docker Engine 20+ or Docker Desktop |
| Docker Compose | Docker Compose v2+ |
| RAM | 4GB minimum, 8GB+ recommended |
| Disk | 20GB minimum, 50GB+ recommended |
| OS | Linux, macOS, Windows with Docker Desktop |
mkdir -p ~/fastgpt && cd ~/fastgpt
Linux/macOS:
# One-click install script
bash <(curl -fsSL https://doc.fastgpt.cn/deploy/install.sh)
Manual Download:
curl -o docker-compose.yml https://raw.githubusercontent.com/labring/FastGPT/main/projects/app/docker-compose.yml
docker compose up -d
# Check container status
docker compose ps
# View logs
docker compose logs -f fastgpt
# Access web interface
# http://localhost:3000
version: '3.8'
services:
# FastGPT Application
fastgpt:
image: ghcr.io/labring/fastgpt:v4.14.7.2
container_name: fastgpt
restart: always
ports:
- "3000:3000"
environment:
# Database connections
- DB_URL=mongodb://mongodb:27017/fastgpt
- PG_URL=postgresql://postgres:postgres@postgresql:5432/fastgpt
# Security keys (CHANGE THESE!)
- ROOT_KEY=your-secure-root-key-change-this
- TOKEN_KEY=your-secure-token-key-change-this
# Optional: LLM API Keys
- OPENAI_API_KEY=your-openai-key
- ANTHROPIC_API_KEY=your-anthropic-key
# Optional: LLM request tracking retention
- LLM_REQUEST_TRACKING_RETENTION_HOURS=6
volumes:
- ./fastgpt-config:/app/data
depends_on:
mongodb:
condition: service_healthy
postgresql:
condition: service_healthy
networks:
- fastgpt-network
healthcheck:
test: ["CMD", "curl", "-f", "http://localhost:3000/api/health"]
interval: 30s
timeout: 10s
retries: 3
# MongoDB
mongodb:
image: mongo:7.0
container_name: fastgpt-mongodb
restart: always
volumes:
- ./mongodb-data:/data/db
environment:
- MONGO_INITDB_DATABASE=fastgpt
networks:
- fastgpt-network
healthcheck:
test: ["CMD", "mongosh", "--eval", "db.adminCommand('ping')"]
interval: 30s
timeout: 10s
retries: 3
# PostgreSQL
postgresql:
image: postgres:15-alpine
container_name: fastgpt-postgresql
restart: always
volumes:
- ./postgresql-data:/var/lib/postgresql/data
environment:
- POSTGRES_USER=postgres
- POSTGRES_PASSWORD=postgres
- POSTGRES_DB=fastgpt
networks:
- fastgpt-network
healthcheck:
test: ["CMD-SHELL", "pg_isready -U postgres"]
interval: 30s
timeout: 10s
retries: 3
networks:
fastgpt-network:
driver: bridge
volumes:
mongodb-data:
postgresql-data:
| Variable | Description | Example |
|---|---|---|
DB_URL |
MongoDB connection string | mongodb://mongodb:27017/fastgpt |
PG_URL |
PostgreSQL connection string | postgresql://postgres:postgres@postgresql:5432/fastgpt |
ROOT_KEY |
Root authentication key | your-secure-random-string |
TOKEN_KEY |
Token signing key | your-secure-random-string |
| Variable | Description | Default |
|---|---|---|
OPENAI_API_KEY |
OpenAI API key | - |
ANTHROPIC_API_KEY |
Anthropic API key | - |
LLM_REQUEST_TRACKING_RETENTION_HOURS |
LLM request log retention | 6 |
HTTPS_PROXY |
HTTP proxy for outbound requests | - |
# Generate random ROOT_KEY
openssl rand -hex 32
# Generate random TOKEN_KEY
openssl rand -hex 32
| Volume | Purpose | Location |
|---|---|---|
mongodb-data |
MongoDB database | ./mongodb-data/ |
postgresql-data |
PostgreSQL database | ./postgresql-data/ |
fastgpt-config |
FastGPT configuration | ./fastgpt-config/ |
# Backup all data
tar -czf fastgpt-backup-$(date +%Y%m%d).tar.gz \
mongodb-data/ \
postgresql-data/ \
fastgpt-config/
# Backup MongoDB only
docker compose exec mongodb mongodump --out /backup
docker cp mongodb:/backup ./mongodb-backup
# Backup PostgreSQL only
docker compose exec postgresql pg_dump -U postgres fastgpt > postgres-backup.sql
Default (localhost only):
ports:
- "3000:3000"
All interfaces (⚠️ use with firewall):
ports:
- "0.0.0.0:3000:3000"
Specific interface:
ports:
- "192.168.1.100:3000:3000"
Nginx configuration:
server {
listen 443 ssl http2;
server_name fastgpt.example.com;
ssl_certificate /etc/ssl/certs/fastgpt.example.com.crt;
ssl_certificate_key /etc/ssl/private/fastgpt.example.com.key;
location / {
proxy_pass http://localhost:3000;
proxy_set_header Host $host;
proxy_set_header X-Real-IP $remote_addr;
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
proxy_set_header X-Forwarded-Proto $scheme;
}
}
services:
fastgpt:
# ... other config ...
deploy:
resources:
reservations:
devices:
- driver: nvidia
count: all
capabilities: [gpu]
Prerequisites:
services:
fastgpt:
# ... other config ...
deploy:
resources:
limits:
cpus: '4'
memory: 4G
reservations:
cpus: '2'
memory: 2G
For production HA deployment:
# Add monitoring sidecars
services:
fastgpt:
# ... other config ...
logging:
driver: "json-file"
options:
max-size: "10m"
max-file: "3"
# Check logs
docker compose logs fastgpt
# Check dependencies
docker compose logs mongodb
docker compose logs postgresql
# Verify health
docker compose ps
# Test MongoDB connection
docker compose exec mongodb mongosh --eval "db.adminCommand('ping')"
# Test PostgreSQL connection
docker compose exec postgresql pg_isready -U postgres
# Check environment variables
docker compose exec fastgpt env | grep -E "DB_URL|PG_URL"
# Find process using port 3000
lsof -i :3000
# Change port in docker-compose.yml
ports:
- "3001:3000" # Use different host port
# Check container memory
docker stats
# Limit MongoDB memory
mongodb:
environment:
- MONGODB_MAX_MEMORY=2GB
# Limit PostgreSQL memory
postgresql:
environment:
- POSTGRES_MAX_MEMORY=2GB
# Pull latest image
docker compose pull fastgpt
# Backup data
tar -czf fastgpt-backup-$(date +%Y%m%d).tar.gz \
mongodb-data/ postgresql-data/ fastgpt-config/
# Restart with new version
docker compose up -d fastgpt
# Verify version
docker compose exec fastgpt cat /app/package.json | grep version
# Stop current version
docker compose down
# Restore backup
tar -xzf fastgpt-backup-YYYYMMDD.tar.gz
# Start with previous version
docker compose up -d
# Use Docker secrets or external secret management
# Don't commit .env files to version control
echo ".env" >> .gitignore
# Restrict network access
sudo ufw allow from 192.168.1.0/24 to any port 3000
sudo ufw deny 3000/tcp
Any questions?
Feel free to contact us. Find all contact information on our contact page.