plane/docker-compose-hub.yml
Bavisetti Narayan 0c3635cf25
chore: added DEBUG value for docker setup (#1327)
* chore: add DEBUG value for docker setup

* refactor: removed the extra DEFAULT value
2023-06-20 09:36:08 +05:30

165 lines
6.7 KiB
YAML

version: "3.8"
services:
plane-web:
container_name: planefrontend
image: makeplane/plane-frontend:latest
restart: always
command: /usr/local/bin/start.sh
env_file:
- .env
environment:
NEXT_PUBLIC_API_BASE_URL: ${NEXT_PUBLIC_API_BASE_URL}
NEXT_PUBLIC_GOOGLE_CLIENTID: 0
NEXT_PUBLIC_GITHUB_APP_NAME: 0
NEXT_PUBLIC_GITHUB_ID: 0
NEXT_PUBLIC_SENTRY_DSN: 0
NEXT_PUBLIC_ENABLE_OAUTH: 0
NEXT_PUBLIC_ENABLE_SENTRY: 0
depends_on:
- plane-api
- plane-worker
plane-api:
container_name: planebackend
image: makeplane/plane-backend:latest
restart: always
command: ./bin/takeoff
env_file:
- .env
environment:
DJANGO_SETTINGS_MODULE: plane.settings.production
DATABASE_URL: postgres://${PGUSER}:${PGPASSWORD}@${PGHOST}:5432/${PGDATABASE}
REDIS_URL: redis://plane-redis:6379/
EMAIL_HOST: ${EMAIL_HOST}
EMAIL_HOST_USER: ${EMAIL_HOST_USER}
EMAIL_HOST_PASSWORD: ${EMAIL_HOST_PASSWORD}
EMAIL_PORT: ${EMAIL_PORT}
EMAIL_FROM: ${EMAIL_FROM}
EMAIL_USE_TLS: ${EMAIL_USE_TLS}
AWS_REGION: ${AWS_REGION}
AWS_ACCESS_KEY_ID: ${AWS_ACCESS_KEY_ID}
AWS_SECRET_ACCESS_KEY: ${AWS_SECRET_ACCESS_KEY}
AWS_S3_BUCKET_NAME: ${AWS_S3_BUCKET_NAME}
FILE_SIZE_LIMIT: ${FILE_SIZE_LIMIT}
WEB_URL: ${WEB_URL}
GITHUB_CLIENT_SECRET: ${GITHUB_CLIENT_SECRET}
DISABLE_COLLECTSTATIC: 1
DOCKERIZED: 1
OPENAI_API_KEY: ${OPENAI_API_KEY}
GPT_ENGINE: ${GPT_ENGINE}
SECRET_KEY: ${SECRET_KEY}
DEFAULT_EMAIL: ${DEFAULT_EMAIL}
DEFAULT_PASSWORD: ${DEFAULT_PASSWORD}
USE_MINIO: ${USE_MINIO}
DEBUG: ${DEBUG}
depends_on:
- plane-db
- plane-redis
plane-worker:
container_name: planerqworker
image: makeplane/plane-worker:latest
restart: always
command: ./bin/worker
env_file:
- .env
environment:
DJANGO_SETTINGS_MODULE: plane.settings.production
DATABASE_URL: postgres://${PGUSER}:${PGPASSWORD}@${PGHOST}:5432/${PGDATABASE}
REDIS_URL: redis://plane-redis:6379/
EMAIL_HOST: ${EMAIL_HOST}
EMAIL_HOST_USER: ${EMAIL_HOST_USER}
EMAIL_HOST_PASSWORD: ${EMAIL_HOST_PASSWORD}
EMAIL_PORT: ${EMAIL_PORT}
EMAIL_FROM: ${EMAIL_FROM}
EMAIL_USE_TLS: ${EMAIL_USE_TLS}
AWS_REGION: ${AWS_REGION}
AWS_ACCESS_KEY_ID: ${AWS_ACCESS_KEY_ID}
AWS_SECRET_ACCESS_KEY: ${AWS_SECRET_ACCESS_KEY}
AWS_S3_BUCKET_NAME: ${AWS_S3_BUCKET_NAME}
FILE_SIZE_LIMIT: ${FILE_SIZE_LIMIT}
WEB_URL: ${WEB_URL}
GITHUB_CLIENT_SECRET: ${GITHUB_CLIENT_SECRET}
DISABLE_COLLECTSTATIC: 1
DOCKERIZED: 1
OPENAI_API_KEY: ${OPENAI_API_KEY}
GPT_ENGINE: ${GPT_ENGINE}
SECRET_KEY: ${SECRET_KEY}
DEFAULT_EMAIL: ${DEFAULT_EMAIL}
DEFAULT_PASSWORD: ${DEFAULT_PASSWORD}
USE_MINIO: ${USE_MINIO}
DEBUG: ${DEBUG}
depends_on:
- plane-api
- plane-db
- plane-redis
plane-db:
container_name: plane-db
image: postgres:15.2-alpine
restart: always
command: postgres -c 'max_connections=1000'
env_file:
- .env
environment:
POSTGRES_USER: ${PGUSER}
POSTGRES_DB: ${PGDATABASE}
POSTGRES_PASSWORD: ${PGPASSWORD}
volumes:
- pgdata:/var/lib/postgresql/data
plane-redis:
container_name: plane-redis
image: redis:6.2.7-alpine
restart: always
volumes:
- redisdata:/data
plane-minio:
container_name: plane-minio
image: minio/minio
volumes:
- uploads:/export
command: server /export --console-address ":9090"
env_file:
- .env
environment:
MINIO_ROOT_USER: ${AWS_ACCESS_KEY_ID}
MINIO_ROOT_PASSWORD: ${AWS_SECRET_ACCESS_KEY}
createbuckets:
image: minio/mc
entrypoint: >
/bin/sh -c "
/usr/bin/mc config host add plane-minio http://plane-minio:9000 \$AWS_ACCESS_KEY_ID \$AWS_SECRET_ACCESS_KEY;
/usr/bin/mc mb plane-minio/\$AWS_S3_BUCKET_NAME;
/usr/bin/mc anonymous set download plane-minio/\$AWS_S3_BUCKET_NAME;
exit 0;
"
env_file:
- .env
depends_on:
- plane-minio
# Comment this if you already have a reverse proxy running
plane-proxy:
container_name: planeproxy
image: makeplane/plane-proxy:latest
ports:
- ${NGINX_PORT}:80
env_file:
- .env
environment:
FILE_SIZE_LIMIT: ${FILE_SIZE_LIMIT:-5242880}
BUCKET_NAME: ${AWS_S3_BUCKET_NAME:-uploads}
depends_on:
- plane-web
- plane-api
volumes:
pgdata:
redisdata:
uploads: