chore: improve docker setup (#1150)

This commit is contained in:
pablohashescobar 2023-05-29 12:11:16 +05:30 committed by GitHub
parent 23d08a2ad1
commit ffc6077e9b
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
11 changed files with 52 additions and 26 deletions

View File

@ -21,6 +21,13 @@ NEXT_PUBLIC_TRACK_EVENTS=0
NEXT_PUBLIC_SLACK_CLIENT_ID="" NEXT_PUBLIC_SLACK_CLIENT_ID=""
# Backend # Backend
# Database Settings
PGUSER="plane"
PGPASSWORD="plane"
PGHOST="plane-db"
PGDATABASE="plane"
# Email Settings # Email Settings
EMAIL_HOST="" EMAIL_HOST=""
EMAIL_HOST_USER="" EMAIL_HOST_USER=""
@ -32,8 +39,10 @@ EMAIL_FROM="Team Plane <team@mailer.plane.so>"
AWS_REGION="" AWS_REGION=""
AWS_ACCESS_KEY_ID="access-key" AWS_ACCESS_KEY_ID="access-key"
AWS_SECRET_ACCESS_KEY="secret-key" AWS_SECRET_ACCESS_KEY="secret-key"
# Changing this requires change in the nginx.conf for uploads if using minio setup
AWS_S3_BUCKET_NAME="uploads" AWS_S3_BUCKET_NAME="uploads"
AWS_S3_ENDPOINT_URL="" # Maximum file upload limit
FILE_SIZE_LIMIT=5242880
# GPT settings # GPT settings
OPENAI_API_KEY="" OPENAI_API_KEY=""
@ -45,13 +54,11 @@ GITHUB_CLIENT_SECRET="" # For fetching release notes
# Settings related to Docker # Settings related to Docker
DOCKERIZED=1 DOCKERIZED=1
# Database Settings
PGUSER="plane"
PGPASSWORD="plane"
PGHOST="plane-db"
PGDATABASE="plane"
# Nginx Configuration # Nginx Configuration
NGINX_PORT=80 NGINX_PORT=80
# Default Creds
DEFAULT_EMAIL="captain@plane.so"
DEFAULT_PASSWORD="password123"
# Auto generated and Required that will be generated from setup.sh # Auto generated and Required that will be generated from setup.sh

View File

@ -35,7 +35,7 @@ class FileAssetEndpoint(BaseAPIView):
serializer.save(workspace_id=request.user.last_workspace_id) serializer.save(workspace_id=request.user.last_workspace_id)
response_data = serializer.data response_data = serializer.data
if settings.DOCKERIZED and settings.AWS_S3_ENDPOINT_URL in response_data["asset"]: if settings.DOCKERIZED and settings.USE_MINIO:
response_data["asset"] = response_data["asset"].replace(settings.AWS_S3_ENDPOINT_URL, settings.WEB_URL) response_data["asset"] = response_data["asset"].replace(settings.AWS_S3_ENDPOINT_URL, settings.WEB_URL)
return Response(response_data, status=status.HTTP_201_CREATED) return Response(response_data, status=status.HTTP_201_CREATED)
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
@ -86,7 +86,7 @@ class UserAssetsEndpoint(BaseAPIView):
if serializer.is_valid(): if serializer.is_valid():
serializer.save() serializer.save()
response_data = serializer.data response_data = serializer.data
if settings.DOCKERIZED and settings.AWS_S3_ENDPOINT_URL in response_data["asset"]: if settings.DOCKERIZED and settings.USE_MINIO:
response_data["asset"] = response_data["asset"].replace(settings.AWS_S3_ENDPOINT_URL, settings.WEB_URL) response_data["asset"] = response_data["asset"].replace(settings.AWS_S3_ENDPOINT_URL, settings.WEB_URL)
return Response(response_data, status=status.HTTP_201_CREATED) return Response(response_data, status=status.HTTP_201_CREATED)
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)

View File

@ -820,7 +820,7 @@ class IssueAttachmentEndpoint(BaseAPIView):
response_data = serializer.data response_data = serializer.data
if ( if (
settings.DOCKERIZED settings.DOCKERIZED
and settings.AWS_S3_ENDPOINT_URL in response_data["asset"] and settings.USE_MINIO
): ):
response_data["asset"] = response_data["asset"].replace( response_data["asset"] = response_data["asset"].replace(
settings.AWS_S3_ENDPOINT_URL, settings.WEB_URL settings.AWS_S3_ENDPOINT_URL, settings.WEB_URL

View File

@ -17,11 +17,8 @@ def get_upload_path(instance, filename):
def file_size(value): def file_size(value):
# File limit check is only for cloud hosted if value.size > settings.FILE_SIZE_LIMIT:
if not settings.DOCKERIZED: raise ValidationError("File too large. Size should not exceed 5 MB.")
limit = 5 * 1024 * 1024
if value.size > limit:
raise ValidationError("File too large. Size should not exceed 5 MB.")
class FileAsset(BaseModel): class FileAsset(BaseModel):

View File

@ -211,10 +211,8 @@ def get_upload_path(instance, filename):
def file_size(value): def file_size(value):
# File limit check is only for cloud hosted # File limit check is only for cloud hosted
if not settings.DOCKERIZED: if value.size > settings.FILE_SIZE_LIMIT:
limit = 5 * 1024 * 1024 raise ValidationError("File too large. Size should not exceed 5 MB.")
if value.size > limit:
raise ValidationError("File too large. Size should not exceed 5 MB.")
class IssueAttachment(ProjectBaseModel): class IssueAttachment(ProjectBaseModel):

View File

@ -29,6 +29,10 @@ DOCKERIZED = int(os.environ.get(
"DOCKERIZED", 0 "DOCKERIZED", 0
)) == 1 )) == 1
USE_MINIO = int(os.environ.get("USE_MINIO"), 0) == 1
FILE_SIZE_LIMIT = int(os.environ.get("FILE_SIZE_LIMIT", 5242880))
if DOCKERIZED: if DOCKERIZED:
DATABASES["default"] = dj_database_url.config() DATABASES["default"] = dj_database_url.config()
@ -86,5 +90,4 @@ LOGGER_BASE_URL = os.environ.get("LOGGER_BASE_URL", False)
CELERY_RESULT_BACKEND = os.environ.get("REDIS_URL") CELERY_RESULT_BACKEND = os.environ.get("REDIS_URL")
CELERY_BROKER_URL = os.environ.get("REDIS_URL") CELERY_BROKER_URL = os.environ.get("REDIS_URL")
GITHUB_ACCESS_TOKEN = os.environ.get("GITHUB_ACCESS_TOKEN", False) GITHUB_ACCESS_TOKEN = os.environ.get("GITHUB_ACCESS_TOKEN", False)

View File

@ -34,6 +34,10 @@ DOCKERIZED = int(os.environ.get(
"DOCKERIZED", 0 "DOCKERIZED", 0
)) == 1 )) == 1
USE_MINIO = int(os.environ.get("USE_MINIO"), 0) == 1
FILE_SIZE_LIMIT = int(os.environ.get("FILE_SIZE_LIMIT", 5242880))
# Enable Connection Pooling (if desired) # Enable Connection Pooling (if desired)
# DATABASES['default']['ENGINE'] = 'django_postgrespool' # DATABASES['default']['ENGINE'] = 'django_postgrespool'
@ -81,7 +85,7 @@ if bool(os.environ.get("SENTRY_DSN", False)):
environment="production", environment="production",
) )
if DOCKERIZED: if DOCKERIZED and USE_MINIO:
DEFAULT_FILE_STORAGE = 'storages.backends.s3boto3.S3Boto3Storage' DEFAULT_FILE_STORAGE = 'storages.backends.s3boto3.S3Boto3Storage'
# The AWS access key to use. # The AWS access key to use.
AWS_ACCESS_KEY_ID = os.environ.get("AWS_ACCESS_KEY_ID", "access-key") AWS_ACCESS_KEY_ID = os.environ.get("AWS_ACCESS_KEY_ID", "access-key")
@ -90,7 +94,7 @@ if DOCKERIZED:
# The name of the bucket to store files in. # The name of the bucket to store files in.
AWS_STORAGE_BUCKET_NAME = os.environ.get("AWS_S3_BUCKET_NAME", "uploads") AWS_STORAGE_BUCKET_NAME = os.environ.get("AWS_S3_BUCKET_NAME", "uploads")
# The full URL to the S3 endpoint. Leave blank to use the default region URL. # The full URL to the S3 endpoint. Leave blank to use the default region URL.
AWS_S3_ENDPOINT_URL = os.environ.get("AWS_S3_ENDPOINT_URL", "http://plane-minio:9000") AWS_S3_ENDPOINT_URL = "http://plane-minio:9000"
# Default permissions # Default permissions
AWS_DEFAULT_ACL = "public-read" AWS_DEFAULT_ACL = "public-read"
AWS_QUERYSTRING_AUTH = False AWS_QUERYSTRING_AUTH = False

View File

@ -53,6 +53,8 @@ STATICFILES_STORAGE = "whitenoise.storage.CompressedManifestStaticFilesStorage"
DOCKERIZED = int(os.environ.get( DOCKERIZED = int(os.environ.get(
"DOCKERIZED", 0 "DOCKERIZED", 0
)) == 1 )) == 1
FILE_SIZE_LIMIT = int(os.environ.get("FILE_SIZE_LIMIT", 5242880))
USE_MINIO = int(os.environ.get("USE_MINIO"), 0) == 1
sentry_sdk.init( sentry_sdk.init(
dsn=os.environ.get("SENTRY_DSN"), dsn=os.environ.get("SENTRY_DSN"),
@ -169,7 +171,6 @@ CSRF_COOKIE_SECURE = True
REDIS_URL = os.environ.get("REDIS_URL") REDIS_URL = os.environ.get("REDIS_URL")
DOCKERIZED = os.environ.get("DOCKERIZED", False)
CACHES = { CACHES = {
"default": { "default": {

View File

@ -35,6 +35,7 @@ services:
AWS_ACCESS_KEY_ID: ${AWS_ACCESS_KEY_ID} AWS_ACCESS_KEY_ID: ${AWS_ACCESS_KEY_ID}
AWS_SECRET_ACCESS_KEY: ${AWS_SECRET_ACCESS_KEY} AWS_SECRET_ACCESS_KEY: ${AWS_SECRET_ACCESS_KEY}
AWS_S3_BUCKET_NAME: ${AWS_S3_BUCKET_NAME} AWS_S3_BUCKET_NAME: ${AWS_S3_BUCKET_NAME}
FILE_SIZE_LIMIT: ${FILE_SIZE_LIMIT}
WEB_URL: ${WEB_URL} WEB_URL: ${WEB_URL}
GITHUB_CLIENT_SECRET: ${GITHUB_CLIENT_SECRET} GITHUB_CLIENT_SECRET: ${GITHUB_CLIENT_SECRET}
DISABLE_COLLECTSTATIC: 1 DISABLE_COLLECTSTATIC: 1
@ -42,6 +43,9 @@ services:
OPENAI_API_KEY: ${OPENAI_API_KEY} OPENAI_API_KEY: ${OPENAI_API_KEY}
GPT_ENGINE: ${GPT_ENGINE} GPT_ENGINE: ${GPT_ENGINE}
SECRET_KEY: ${SECRET_KEY} SECRET_KEY: ${SECRET_KEY}
DEFAULT_EMAIL: ${DEFAULT_EMAIL}
DEFAULT_PASSWORD: ${DEFAULT_PASSWORD}
USE_MINIO: 1
depends_on: depends_on:
- plane-db - plane-db
- plane-redis - plane-redis
@ -66,6 +70,7 @@ services:
AWS_ACCESS_KEY_ID: ${AWS_ACCESS_KEY_ID} AWS_ACCESS_KEY_ID: ${AWS_ACCESS_KEY_ID}
AWS_SECRET_ACCESS_KEY: ${AWS_SECRET_ACCESS_KEY} AWS_SECRET_ACCESS_KEY: ${AWS_SECRET_ACCESS_KEY}
AWS_S3_BUCKET_NAME: ${AWS_S3_BUCKET_NAME} AWS_S3_BUCKET_NAME: ${AWS_S3_BUCKET_NAME}
FILE_SIZE_LIMIT: ${FILE_SIZE_LIMIT}
WEB_URL: ${WEB_URL} WEB_URL: ${WEB_URL}
GITHUB_CLIENT_SECRET: ${GITHUB_CLIENT_SECRET} GITHUB_CLIENT_SECRET: ${GITHUB_CLIENT_SECRET}
DISABLE_COLLECTSTATIC: 1 DISABLE_COLLECTSTATIC: 1
@ -73,6 +78,9 @@ services:
OPENAI_API_KEY: ${OPENAI_API_KEY} OPENAI_API_KEY: ${OPENAI_API_KEY}
GPT_ENGINE: ${GPT_ENGINE} GPT_ENGINE: ${GPT_ENGINE}
SECRET_KEY: ${SECRET_KEY} SECRET_KEY: ${SECRET_KEY}
DEFAULT_EMAIL: ${DEFAULT_EMAIL}
DEFAULT_PASSWORD: ${DEFAULT_PASSWORD}
USE_MINIO: 1
depends_on: depends_on:
- plane-api - plane-api
- plane-db - plane-db

View File

@ -42,6 +42,7 @@ services:
AWS_SECRET_ACCESS_KEY: ${AWS_SECRET_ACCESS_KEY} AWS_SECRET_ACCESS_KEY: ${AWS_SECRET_ACCESS_KEY}
AWS_S3_BUCKET_NAME: ${AWS_S3_BUCKET_NAME} AWS_S3_BUCKET_NAME: ${AWS_S3_BUCKET_NAME}
AWS_S3_ENDPOINT_URL: ${AWS_S3_ENDPOINT_URL} AWS_S3_ENDPOINT_URL: ${AWS_S3_ENDPOINT_URL}
FILE_SIZE_LIMIT: ${FILE_SIZE_LIMIT}
WEB_URL: ${WEB_URL} WEB_URL: ${WEB_URL}
GITHUB_CLIENT_SECRET: ${GITHUB_CLIENT_SECRET} GITHUB_CLIENT_SECRET: ${GITHUB_CLIENT_SECRET}
DISABLE_COLLECTSTATIC: 1 DISABLE_COLLECTSTATIC: 1
@ -49,6 +50,9 @@ services:
OPENAI_API_KEY: ${OPENAI_API_KEY} OPENAI_API_KEY: ${OPENAI_API_KEY}
GPT_ENGINE: ${GPT_ENGINE} GPT_ENGINE: ${GPT_ENGINE}
SECRET_KEY: ${SECRET_KEY} SECRET_KEY: ${SECRET_KEY}
DEFAULT_EMAIL: ${DEFAULT_EMAIL}
DEFAULT_PASSWORD: ${DEFAULT_PASSWORD}
USE_MINIO: 1
depends_on: depends_on:
- plane-db - plane-db
- plane-redis - plane-redis
@ -74,6 +78,7 @@ services:
AWS_SECRET_ACCESS_KEY: ${AWS_SECRET_ACCESS_KEY} AWS_SECRET_ACCESS_KEY: ${AWS_SECRET_ACCESS_KEY}
AWS_S3_BUCKET_NAME: ${AWS_S3_BUCKET_NAME} AWS_S3_BUCKET_NAME: ${AWS_S3_BUCKET_NAME}
AWS_S3_ENDPOINT_URL: ${AWS_S3_ENDPOINT_URL} AWS_S3_ENDPOINT_URL: ${AWS_S3_ENDPOINT_URL}
FILE_SIZE_LIMIT: ${FILE_SIZE_LIMIT}
WEB_URL: ${WEB_URL} WEB_URL: ${WEB_URL}
GITHUB_CLIENT_SECRET: ${GITHUB_CLIENT_SECRET} GITHUB_CLIENT_SECRET: ${GITHUB_CLIENT_SECRET}
DISABLE_COLLECTSTATIC: 1 DISABLE_COLLECTSTATIC: 1
@ -81,6 +86,9 @@ services:
OPENAI_API_KEY: ${OPENAI_API_KEY} OPENAI_API_KEY: ${OPENAI_API_KEY}
GPT_ENGINE: ${GPT_ENGINE} GPT_ENGINE: ${GPT_ENGINE}
SECRET_KEY: ${SECRET_KEY} SECRET_KEY: ${SECRET_KEY}
DEFAULT_EMAIL: ${DEFAULT_EMAIL}
DEFAULT_PASSWORD: ${DEFAULT_PASSWORD}
USE_MINIO: 1
depends_on: depends_on:
- plane-api - plane-api
- plane-db - plane-db
@ -136,7 +144,7 @@ services:
dockerfile: Dockerfile dockerfile: Dockerfile
restart: always restart: always
ports: ports:
- ${NGINX_PORT}:80 - ${NGINX_PORT}:80
depends_on: depends_on:
- plane-web - plane-web
- plane-api - plane-api

View File

@ -10,7 +10,7 @@ export LC_CTYPE=C
echo -e "\nNEXT_PUBLIC_API_BASE_URL=$1" >> ./.env echo -e "\nNEXT_PUBLIC_API_BASE_URL=$1" >> ./.env
# Generate the SECRET_KEY that will be used by django # Generate the SECRET_KEY that will be used by django
echo -e "SECRET_KEY=\"$(tr -dc 'a-z0-9!@#$%^&*(-_=+)' < /dev/urandom | head -c50)\"" >> ./.env echo -e "SECRET_KEY=\"$(tr -dc 'a-z0-9' < /dev/urandom | head -c50)\"" >> ./.env
# WEB_URL for email redirection and image saving # WEB_URL for email redirection and image saving
echo -e "WEB_URL=$1" >> ./.env echo -e "WEB_URL=$1" >> ./.env