chore: docker setup (#1136)

* chore: update docker environment variables and compose file for better readability

* dev: update single dockerfile

* dev: update WEB_URL configuration

* dev: move database settings to environment variable

* chore: remove port configuration from default compose file

* dev: update example env to add EMAIL_FROM and default values for AWS
This commit is contained in:
pablohashescobar 2023-05-26 11:09:59 +05:30 committed by GitHub
parent 16604dd31b
commit b6321438ce
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
15 changed files with 204 additions and 195 deletions

View File

@ -26,16 +26,23 @@ EMAIL_HOST=""
EMAIL_HOST_USER="" EMAIL_HOST_USER=""
EMAIL_HOST_PASSWORD="" EMAIL_HOST_PASSWORD=""
EMAIL_PORT=587 EMAIL_PORT=587
EMAIL_FROM="Team Plane <team@mailer.plane.so>"
# AWS Settings # AWS Settings
AWS_REGION="" AWS_REGION=""
AWS_ACCESS_KEY_ID="" AWS_ACCESS_KEY_ID="access-key"
AWS_SECRET_ACCESS_KEY="" AWS_SECRET_ACCESS_KEY="secret-key"
AWS_S3_BUCKET_NAME="" AWS_S3_BUCKET_NAME="uploads"
AWS_S3_ENDPOINT_URL="" AWS_S3_ENDPOINT_URL=""
# GPT settings # GPT settings
OPENAI_API_KEY="" OPENAI_API_KEY=""
GPT_ENGINE="" GPT_ENGINE=""
# Auto generated and Required # Github
GITHUB_CLIENT_SECRET="" # For fetching release notes
# Settings related to Docker
DOCKERIZED=1
# Auto generated and Required that will be generated from setup.sh

View File

@ -13,7 +13,6 @@ RUN turbo prune --scope=app --docker
# Add lockfile and package.json's of isolated subworkspace # Add lockfile and package.json's of isolated subworkspace
FROM node:18-alpine AS installer FROM node:18-alpine AS installer
RUN apk add --no-cache libc6-compat RUN apk add --no-cache libc6-compat
RUN apk update RUN apk update
WORKDIR /app WORKDIR /app
@ -44,6 +43,8 @@ FROM python:3.11.1-alpine3.17 AS backend
ENV PYTHONDONTWRITEBYTECODE 1 ENV PYTHONDONTWRITEBYTECODE 1
ENV PYTHONUNBUFFERED 1 ENV PYTHONUNBUFFERED 1
ENV PIP_DISABLE_PIP_VERSION_CHECK=1 ENV PIP_DISABLE_PIP_VERSION_CHECK=1
ENV DJANGO_SETTINGS_MODULE plane.settings.production
ENV DOCKERIZED 1
WORKDIR /code WORKDIR /code
@ -88,11 +89,6 @@ RUN chmod +x ./bin/takeoff ./bin/worker
RUN chmod -R 777 /code RUN chmod -R 777 /code
# Expose container port and run entry point script # Expose container port and run entry point script
EXPOSE 8000
EXPOSE 3000
EXPOSE 80
WORKDIR /app WORKDIR /app
@ -126,9 +122,6 @@ COPY start.sh /usr/local/bin/
RUN chmod +x /usr/local/bin/replace-env-vars.sh RUN chmod +x /usr/local/bin/replace-env-vars.sh
RUN chmod +x /usr/local/bin/start.sh RUN chmod +x /usr/local/bin/start.sh
EXPOSE 80
CMD ["supervisord","-c","/code/supervisor.conf"] CMD ["supervisord","-c","/code/supervisor.conf"]

View File

@ -35,8 +35,8 @@ class FileAssetEndpoint(BaseAPIView):
serializer.save(workspace_id=request.user.last_workspace_id) serializer.save(workspace_id=request.user.last_workspace_id)
response_data = serializer.data response_data = serializer.data
if settings.DOCKERIZED and "minio:9000" in response_data["asset"]: if settings.DOCKERIZED and settings.AWS_S3_ENDPOINT_URL in response_data["asset"]:
response_data["asset"] = response_data["asset"].replace("minio:9000", settings.WEB_URL) response_data["asset"] = response_data["asset"].replace(settings.AWS_S3_ENDPOINT_URL, settings.WEB_URL)
return Response(response_data, status=status.HTTP_201_CREATED) return Response(response_data, status=status.HTTP_201_CREATED)
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
except Exception as e: except Exception as e:
@ -86,8 +86,8 @@ class UserAssetsEndpoint(BaseAPIView):
if serializer.is_valid(): if serializer.is_valid():
serializer.save() serializer.save()
response_data = serializer.data response_data = serializer.data
if settings.DOCKERIZED and "minio:9000" in response_data["asset"]: if settings.DOCKERIZED and settings.AWS_S3_ENDPOINT_URL in response_data["asset"]:
response_data["asset"] = response_data["asset"].replace("minio:9000", settings.WEB_URL) response_data["asset"] = response_data["asset"].replace(settings.AWS_S3_ENDPOINT_URL, settings.WEB_URL)
return Response(response_data, status=status.HTTP_201_CREATED) return Response(response_data, status=status.HTTP_201_CREATED)
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
except Exception as e: except Exception as e:

View File

@ -789,8 +789,8 @@ class IssueAttachmentEndpoint(BaseAPIView):
if serializer.is_valid(): if serializer.is_valid():
serializer.save(project_id=project_id, issue_id=issue_id) serializer.save(project_id=project_id, issue_id=issue_id)
response_data = serializer.data response_data = serializer.data
if settings.DOCKERIZED and "minio:9000" in response_data["asset"]: if settings.DOCKERIZED and settings.AWS_S3_ENDPOINT_URL in response_data["asset"]:
response_data["asset"] = response_data["asset"].replace("minio:9000", settings.WEB_URL) response_data["asset"] = response_data["asset"].replace(settings.AWS_S3_ENDPOINT_URL, settings.WEB_URL)
issue_activity.delay( issue_activity.delay(
type="attachment.activity.created", type="attachment.activity.created",
requested_data=None, requested_data=None,

View File

@ -19,7 +19,7 @@ def email_verification(first_name, email, token, current_site):
try: try:
realtivelink = "/request-email-verification/" + "?token=" + str(token) realtivelink = "/request-email-verification/" + "?token=" + str(token)
abs_url = "http://" + current_site + realtivelink abs_url = current_site + realtivelink
from_email_string = settings.EMAIL_FROM from_email_string = settings.EMAIL_FROM

View File

@ -17,7 +17,7 @@ def forgot_password(first_name, email, uidb64, token, current_site):
try: try:
realtivelink = f"/email-verify/?uidb64={uidb64}&token={token}/" realtivelink = f"/email-verify/?uidb64={uidb64}&token={token}/"
abs_url = "http://" + current_site + realtivelink abs_url = current_site + realtivelink
from_email_string = settings.EMAIL_FROM from_email_string = settings.EMAIL_FROM

View File

@ -13,7 +13,7 @@ from sentry_sdk import capture_exception
def magic_link(email, key, token, current_site): def magic_link(email, key, token, current_site):
try: try:
realtivelink = f"/magic-sign-in/?password={token}&key={key}" realtivelink = f"/magic-sign-in/?password={token}&key={key}"
abs_url = "http://" + current_site + realtivelink abs_url = current_site + realtivelink
from_email_string = settings.EMAIL_FROM from_email_string = settings.EMAIL_FROM

View File

@ -21,7 +21,7 @@ def project_invitation(email, project_id, token, current_site):
) )
relativelink = f"/project-member-invitation/{project_member_invite.id}" relativelink = f"/project-member-invitation/{project_member_invite.id}"
abs_url = "http://" + current_site + relativelink abs_url = current_site + relativelink
from_email_string = settings.EMAIL_FROM from_email_string = settings.EMAIL_FROM

View File

@ -25,7 +25,7 @@ def workspace_invitation(email, workspace_id, token, current_site, invitor):
realtivelink = ( realtivelink = (
f"/workspace-member-invitation/{workspace_member_invite.id}?email={email}" f"/workspace-member-invitation/{workspace_member_invite.id}?email={email}"
) )
abs_url = "http://" + current_site + realtivelink abs_url = current_site + realtivelink
from_email_string = settings.EMAIL_FROM from_email_string = settings.EMAIL_FROM

View File

@ -70,7 +70,7 @@ MEDIA_ROOT = os.path.join(BASE_DIR, "uploads")
if DOCKERIZED: if DOCKERIZED:
REDIS_URL = os.environ.get("REDIS_URL") REDIS_URL = os.environ.get("REDIS_URL")
WEB_URL = os.environ.get("WEB_URL", "localhost:3000") WEB_URL = os.environ.get("WEB_URL", "http://localhost:3000")
PROXY_BASE_URL = os.environ.get("PROXY_BASE_URL", False) PROXY_BASE_URL = os.environ.get("PROXY_BASE_URL", False)
ANALYTICS_SECRET_KEY = os.environ.get("ANALYTICS_SECRET_KEY", False) ANALYTICS_SECRET_KEY = os.environ.get("ANALYTICS_SECRET_KEY", False)

View File

@ -90,7 +90,7 @@ if DOCKERIZED:
# The name of the bucket to store files in. # The name of the bucket to store files in.
AWS_STORAGE_BUCKET_NAME = os.environ.get("AWS_S3_BUCKET_NAME", "uploads") AWS_STORAGE_BUCKET_NAME = os.environ.get("AWS_S3_BUCKET_NAME", "uploads")
# The full URL to the S3 endpoint. Leave blank to use the default region URL. # The full URL to the S3 endpoint. Leave blank to use the default region URL.
AWS_S3_ENDPOINT_URL = os.environ.get("AWS_S3_ENDPOINT_URL", "http://minio:9000") AWS_S3_ENDPOINT_URL = os.environ.get("AWS_S3_ENDPOINT_URL", "http://plane-minio:9000")
# Default permissions # Default permissions
AWS_DEFAULT_ACL = "public-read" AWS_DEFAULT_ACL = "public-read"
AWS_QUERYSTRING_AUTH = False AWS_QUERYSTRING_AUTH = False
@ -223,7 +223,7 @@ else:
} }
WEB_URL = os.environ.get("WEB_URL") WEB_URL = os.environ.get("WEB_URL", "https://app.plane.so")
PROXY_BASE_URL = os.environ.get("PROXY_BASE_URL", False) PROXY_BASE_URL = os.environ.get("PROXY_BASE_URL", False)

View File

@ -3,7 +3,7 @@ version: "3.8"
services: services:
plane-web: plane-web:
container_name: planefrontend container_name: planefrontend
image: makeplane/plane-frontend:0.6 image: makeplane/plane-frontend:latest
restart: always restart: always
command: /usr/local/bin/start.sh command: /usr/local/bin/start.sh
environment: environment:
@ -19,12 +19,12 @@ services:
plane-api: plane-api:
container_name: planebackend container_name: planebackend
image: makeplane/plane-backend:0.6 image: makeplane/plane-backend:latest
restart: always restart: always
command: ./bin/takeoff command: ./bin/takeoff
environment: environment:
DJANGO_SETTINGS_MODULE: plane.settings.production DJANGO_SETTINGS_MODULE: plane.settings.production
DATABASE_URL: postgres://plane:xyzzyspoon@plane-db:5432/plane DATABASE_URL: postgres://${PGUSER}:${PGPASSWORD}@plane-db:5432/plane
REDIS_URL: redis://plane-redis:6379/ REDIS_URL: redis://plane-redis:6379/
EMAIL_HOST: ${EMAIL_HOST} EMAIL_HOST: ${EMAIL_HOST}
EMAIL_HOST_USER: ${EMAIL_HOST_USER} EMAIL_HOST_USER: ${EMAIL_HOST_USER}
@ -33,7 +33,7 @@ services:
AWS_ACCESS_KEY_ID: ${AWS_ACCESS_KEY_ID} AWS_ACCESS_KEY_ID: ${AWS_ACCESS_KEY_ID}
AWS_SECRET_ACCESS_KEY: ${AWS_SECRET_ACCESS_KEY} AWS_SECRET_ACCESS_KEY: ${AWS_SECRET_ACCESS_KEY}
AWS_S3_BUCKET_NAME: ${AWS_S3_BUCKET_NAME} AWS_S3_BUCKET_NAME: ${AWS_S3_BUCKET_NAME}
WEB_URL: localhost/ WEB_URL: ${WEB_URL}
GITHUB_CLIENT_SECRET: ${GITHUB_CLIENT_SECRET} GITHUB_CLIENT_SECRET: ${GITHUB_CLIENT_SECRET}
DISABLE_COLLECTSTATIC: 1 DISABLE_COLLECTSTATIC: 1
DOCKERIZED: 1 DOCKERIZED: 1
@ -48,12 +48,12 @@ services:
plane-worker: plane-worker:
container_name: planerqworker container_name: planerqworker
image: makeplane/plane-worker:0.6 image: makeplane/plane-worker:latest
restart: always restart: always
command: ./bin/worker command: ./bin/worker
environment: environment:
DJANGO_SETTINGS_MODULE: plane.settings.production DJANGO_SETTINGS_MODULE: plane.settings.production
DATABASE_URL: postgres://plane:xyzzyspoon@plane-db:5432/plane DATABASE_URL: postgres://${PGUSER}:${PGPASSWORD}@plane-db:5432/plane
REDIS_URL: redis://plane-redis:6379/ REDIS_URL: redis://plane-redis:6379/
EMAIL_HOST: ${EMAIL_HOST} EMAIL_HOST: ${EMAIL_HOST}
EMAIL_HOST_USER: ${EMAIL_HOST_USER} EMAIL_HOST_USER: ${EMAIL_HOST_USER}
@ -62,7 +62,7 @@ services:
AWS_ACCESS_KEY_ID: ${AWS_ACCESS_KEY_ID} AWS_ACCESS_KEY_ID: ${AWS_ACCESS_KEY_ID}
AWS_SECRET_ACCESS_KEY: ${AWS_SECRET_ACCESS_KEY} AWS_SECRET_ACCESS_KEY: ${AWS_SECRET_ACCESS_KEY}
AWS_S3_BUCKET_NAME: ${AWS_S3_BUCKET_NAME} AWS_S3_BUCKET_NAME: ${AWS_S3_BUCKET_NAME}
WEB_URL: localhost/ WEB_URL: ${WEB_URL}
GITHUB_CLIENT_SECRET: ${GITHUB_CLIENT_SECRET} GITHUB_CLIENT_SECRET: ${GITHUB_CLIENT_SECRET}
DISABLE_COLLECTSTATIC: 1 DISABLE_COLLECTSTATIC: 1
DOCKERIZED: 1 DOCKERIZED: 1
@ -80,9 +80,9 @@ services:
restart: always restart: always
command: postgres -c 'max_connections=1000' command: postgres -c 'max_connections=1000'
environment: environment:
POSTGRES_USER: plane POSTGRES_USER: ${PGUSER}
POSTGRES_DB: plane POSTGRES_DB: plane
POSTGRES_PASSWORD: xyzzyspoon POSTGRES_PASSWORD: ${PGPASSWORD}
volumes: volumes:
- pgdata:/var/lib/postgresql/data - pgdata:/var/lib/postgresql/data
@ -94,25 +94,30 @@ services:
- redisdata:/data - redisdata:/data
plane-minio: plane-minio:
container_name: plane-minio container_name: plane-minio
image: minio/minio image: minio/minio
environment: volumes:
MINIO_ROOT_USER: ${AWS_ACCESS_KEY_ID} - uploads:/export
MINIO_ROOT_PASSWORD: ${AWS_SECRET_ACCESS_KEY} environment:
command: server /export --console-address ":9090" MINIO_ROOT_USER: ${AWS_ACCESS_KEY_ID}
MINIO_ROOT_PASSWORD: ${AWS_SECRET_ACCESS_KEY}
command: server /export --console-address ":9090"
ports:
- 9000:9000
createbuckets: createbuckets:
image: minio/mc image: minio/mc
depends_on: depends_on:
- minio - plane-minio
entrypoint: > entrypoint: >
/bin/sh -c " /bin/sh -c "
/usr/bin/mc config host add plane-minio http://minio:9000 \$AWS_ACCESS_KEY_ID \$AWS_SECRET_ACCESS_KEY; /usr/bin/mc config host add plane-minio http://plane-minio:9000 \$AWS_ACCESS_KEY_ID \$AWS_SECRET_ACCESS_KEY;
/usr/bin/mc mb plane-minio/\$AWS_S3_BUCKET_NAME; /usr/bin/mc mb plane-minio/\$AWS_S3_BUCKET_NAME;
/usr/bin/mc anonymous set download plane-minio/\$AWS_S3_BUCKET_NAME; /usr/bin/mc anonymous set download plane-minio/\$AWS_S3_BUCKET_NAME;
exit 0; exit 0;
" "
# Comment this if you already have a reverse proxy running
nginx: nginx:
container_name: nginx container_name: nginx
build: build:
@ -128,3 +133,4 @@ services:
volumes: volumes:
pgdata: pgdata:
redisdata: redisdata:
uploads:

View File

@ -1,145 +1,148 @@
version: "3.8" version: "3.8"
services: services:
nginx: plane-web:
container_name: nginx container_name: planefrontend
build: build:
context: ./nginx context: .
dockerfile: Dockerfile dockerfile: ./apps/app/Dockerfile.web
restart: always args:
ports: NEXT_PUBLIC_API_BASE_URL: http://localhost:8000
- 80:80 restart: always
depends_on: command: [ "/usr/local/bin/start.sh" ]
- plane-web environment:
- plane-api NEXT_PUBLIC_API_BASE_URL: ${NEXT_PUBLIC_API_BASE_URL}
db: NEXT_PUBLIC_GOOGLE_CLIENTID: "0"
image: postgres:15.2-alpine NEXT_PUBLIC_GITHUB_APP_NAME: "0"
container_name: db NEXT_PUBLIC_GITHUB_ID: "0"
restart: always NEXT_PUBLIC_SENTRY_DSN: "0"
volumes: NEXT_PUBLIC_ENABLE_OAUTH: "0"
- pgdata:/var/lib/postgresql/data NEXT_PUBLIC_ENABLE_SENTRY: "0"
environment: NEXT_PUBLIC_ENABLE_SESSION_RECORDER: "0"
POSTGRES_USER: plane NEXT_PUBLIC_TRACK_EVENTS: "0"
POSTGRES_DB: plane
POSTGRES_PASSWORD: xyzzyspoon plane-api:
PGDATA: /var/lib/postgresql/data container_name: planebackend
command: postgres -c 'max_connections=1000' build:
ports: context: ./apiserver
- 5432:5432 dockerfile: Dockerfile.api
redis: restart: always
image: redis:6.2.7-alpine command: ./bin/takeoff
container_name: redis environment:
restart: always DJANGO_SETTINGS_MODULE: plane.settings.production
ports: DATABASE_URL: postgres://${PGUSER}:${PGPASSWORD}@plane-db:5432/plane
- 6379:6379 REDIS_URL: redis://redis:6379/
volumes: EMAIL_HOST: ${EMAIL_HOST}
- redisdata:/data EMAIL_HOST_USER: ${EMAIL_HOST_USER}
plane-web: EMAIL_HOST_PASSWORD: ${EMAIL_HOST_PASSWORD}
container_name: planefrontend AWS_REGION: ${AWS_REGION}
build: AWS_ACCESS_KEY_ID: ${AWS_ACCESS_KEY_ID}
context: . AWS_SECRET_ACCESS_KEY: ${AWS_SECRET_ACCESS_KEY}
dockerfile: ./apps/app/Dockerfile.web AWS_S3_BUCKET_NAME: ${AWS_S3_BUCKET_NAME}
args: AWS_S3_ENDPOINT_URL: ${AWS_S3_ENDPOINT_URL}
NEXT_PUBLIC_API_BASE_URL: http://localhost:8000 WEB_URL: ${WEB_URL}
command: [ "/usr/local/bin/start.sh" ] GITHUB_CLIENT_SECRET: ${GITHUB_CLIENT_SECRET}
ports: DISABLE_COLLECTSTATIC: 1
- 3000:3000 DOCKERIZED: 1
environment: OPENAI_API_KEY: ${OPENAI_API_KEY}
NEXT_PUBLIC_API_BASE_URL: ${NEXT_PUBLIC_API_BASE_URL} GPT_ENGINE: ${GPT_ENGINE}
NEXT_PUBLIC_GOOGLE_CLIENTID: "0" SECRET_KEY: ${SECRET_KEY}
NEXT_PUBLIC_GITHUB_APP_NAME: "0" depends_on:
NEXT_PUBLIC_GITHUB_ID: "0" - plane-db
NEXT_PUBLIC_SENTRY_DSN: "0" - plane-redis
NEXT_PUBLIC_ENABLE_OAUTH: "0"
NEXT_PUBLIC_ENABLE_SENTRY: "0" plane-worker:
NEXT_PUBLIC_ENABLE_SESSION_RECORDER: "0" container_name: planebgworker
NEXT_PUBLIC_TRACK_EVENTS: "0" build:
plane-api: context: ./apiserver
container_name: planebackend dockerfile: Dockerfile.api
build: restart: always
context: ./apiserver command: ./bin/worker
dockerfile: Dockerfile.api environment:
restart: always DJANGO_SETTINGS_MODULE: plane.settings.production
ports: DATABASE_URL: postgres://${PGUSER}:${PGPASSWORD}@plane-db:5432/plane
- 8000:8000 REDIS_URL: redis://redis:6379/
environment: EMAIL_HOST: ${EMAIL_HOST}
DJANGO_SETTINGS_MODULE: plane.settings.production EMAIL_HOST_USER: ${EMAIL_HOST_USER}
DATABASE_URL: postgres://plane:xyzzyspoon@db:5432/plane EMAIL_HOST_PASSWORD: ${EMAIL_HOST_PASSWORD}
REDIS_URL: redis://redis:6379/ AWS_REGION: ${AWS_REGION}
EMAIL_HOST: ${EMAIL_HOST} AWS_ACCESS_KEY_ID: ${AWS_ACCESS_KEY_ID}
EMAIL_HOST_USER: ${EMAIL_HOST_USER} AWS_SECRET_ACCESS_KEY: ${AWS_SECRET_ACCESS_KEY}
EMAIL_HOST_PASSWORD: ${EMAIL_HOST_PASSWORD} AWS_S3_BUCKET_NAME: ${AWS_S3_BUCKET_NAME}
AWS_REGION: ${AWS_REGION} AWS_S3_ENDPOINT_URL: ${AWS_S3_ENDPOINT_URL}
AWS_ACCESS_KEY_ID: ${AWS_ACCESS_KEY_ID} WEB_URL: ${WEB_URL}
AWS_SECRET_ACCESS_KEY: ${AWS_SECRET_ACCESS_KEY} GITHUB_CLIENT_SECRET: ${GITHUB_CLIENT_SECRET}
AWS_S3_BUCKET_NAME: ${AWS_S3_BUCKET_NAME} DISABLE_COLLECTSTATIC: 1
AWS_S3_ENDPOINT_URL: ${AWS_S3_ENDPOINT_URL} DOCKERIZED: 1
WEB_URL: ${WEB_URL} OPENAI_API_KEY: ${OPENAI_API_KEY}
GITHUB_CLIENT_SECRET: ${GITHUB_CLIENT_SECRET} GPT_ENGINE: ${GPT_ENGINE}
DISABLE_COLLECTSTATIC: 1 SECRET_KEY: ${SECRET_KEY}
DOCKERIZED: 1 depends_on:
OPENAI_API_KEY: ${OPENAI_API_KEY} - plane-api
GPT_ENGINE: ${GPT_ENGINE} - plane-db
SECRET_KEY: ${SECRET_KEY} - plane-redis
depends_on:
- db plane-db:
- redis container_name: plane-db
command: ./bin/takeoff image: postgres:15.2-alpine
links: restart: always
- db:db command: postgres -c 'max_connections=1000'
- redis:redis volumes:
plane-worker: - pgdata:/var/lib/postgresql/data
container_name: planebgworker environment:
build: POSTGRES_USER: ${PGUSER}
context: ./apiserver POSTGRES_DB: plane
dockerfile: Dockerfile.api POSTGRES_PASSWORD: ${PGPASSWORD}
depends_on: PGDATA: /var/lib/postgresql/data
- redis ports:
- db - 5432:5432
- plane-api
command: ./bin/worker plane-redis:
links: container_name: redis
- redis:redis image: redis:6.2.7-alpine
- db:db restart: always
environment: volumes:
DJANGO_SETTINGS_MODULE: plane.settings.production - redisdata:/data
DATABASE_URL: postgres://plane:xyzzyspoon@db:5432/plane ports:
REDIS_URL: redis://redis:6379/ - 6379:6379
EMAIL_HOST: ${EMAIL_HOST}
EMAIL_HOST_USER: ${EMAIL_HOST_USER} plane-minio:
EMAIL_HOST_PASSWORD: ${EMAIL_HOST_PASSWORD} container_name: plane-minio
AWS_REGION: ${AWS_REGION} image: minio/minio
AWS_ACCESS_KEY_ID: ${AWS_ACCESS_KEY_ID} restart: always
AWS_SECRET_ACCESS_KEY: ${AWS_SECRET_ACCESS_KEY} command: server /export --console-address ":9090"
AWS_S3_BUCKET_NAME: ${AWS_S3_BUCKET_NAME} volumes:
AWS_S3_ENDPOINT_URL: ${AWS_S3_ENDPOINT_URL} - uploads:/export
WEB_URL: ${WEB_URL} environment:
GITHUB_CLIENT_SECRET: ${GITHUB_CLIENT_SECRET} MINIO_ROOT_USER: ${AWS_ACCESS_KEY_ID}
DISABLE_COLLECTSTATIC: 1 MINIO_ROOT_PASSWORD: ${AWS_SECRET_ACCESS_KEY}
DOCKERIZED: 1
OPENAI_API_KEY: ${OPENAI_API_KEY} createbuckets:
GPT_ENGINE: ${GPT_ENGINE} image: minio/mc
SECRET_KEY: ${SECRET_KEY} entrypoint: >
minio: /bin/sh -c "
image: minio/minio /usr/bin/mc config host add plane-minio http://plane-minio:9000 \$AWS_ACCESS_KEY_ID \$AWS_SECRET_ACCESS_KEY;
ports: /usr/bin/mc mb plane-minio/\$AWS_S3_BUCKET_NAME;
- 9000:9000 /usr/bin/mc anonymous set download plane-minio/\$AWS_S3_BUCKET_NAME;
environment: exit 0;
MINIO_ROOT_USER: ${AWS_ACCESS_KEY_ID} "
MINIO_ROOT_PASSWORD: ${AWS_SECRET_ACCESS_KEY} depends_on:
command: server /export --console-address ":9090" - plane-minio
nginx:
container_name: nginx
build:
context: ./nginx
dockerfile: Dockerfile
restart: always
ports:
- 80:80
depends_on:
- plane-web
- plane-api
createbuckets:
image: minio/mc
depends_on:
- minio
entrypoint: >
/bin/sh -c "
/usr/bin/mc config host add plane-minio http://minio:9000 \$AWS_ACCESS_KEY_ID \$AWS_SECRET_ACCESS_KEY;
/usr/bin/mc mb plane-minio/\$AWS_S3_BUCKET_NAME;
/usr/bin/mc anonymous set download plane-minio/\$AWS_S3_BUCKET_NAME;
exit 0;
"
volumes: volumes:
pgdata: pgdata:
redisdata: redisdata:
uploads:

View File

@ -17,7 +17,7 @@ server {
} }
location /uploads/ { location /uploads/ {
proxy_pass http://minio:9000/uploads/; proxy_pass http://plane-minio:9000/uploads/;
} }
} }
} }

View File

@ -7,7 +7,7 @@ export LC_CTYPE=C
# Generate the NEXT_PUBLIC_API_BASE_URL with given IP # Generate the NEXT_PUBLIC_API_BASE_URL with given IP
echo -e "\nNEXT_PUBLIC_API_BASE_URL=http://$1" >> ./.env echo -e "\nNEXT_PUBLIC_API_BASE_URL=$1" >> ./.env
# Generate the SECRET_KEY that will be used by django # Generate the SECRET_KEY that will be used by django
echo -e "SECRET_KEY=\"$(tr -dc 'a-z0-9!@#$%^&*(-_=+)' < /dev/urandom | head -c50)\"" >> ./.env echo -e "SECRET_KEY=\"$(tr -dc 'a-z0-9!@#$%^&*(-_=+)' < /dev/urandom | head -c50)\"" >> ./.env