From ffc6077e9ba878f0e5d4697cb7bbece06534fab0 Mon Sep 17 00:00:00 2001 From: pablohashescobar <118773738+pablohashescobar@users.noreply.github.com> Date: Mon, 29 May 2023 12:11:16 +0530 Subject: [PATCH] chore: improve docker setup (#1150) --- .env.example | 21 ++++++++++++++------- apiserver/plane/api/views/asset.py | 4 ++-- apiserver/plane/api/views/issue.py | 2 +- apiserver/plane/db/models/asset.py | 7 ++----- apiserver/plane/db/models/issue.py | 6 ++---- apiserver/plane/settings/local.py | 7 +++++-- apiserver/plane/settings/production.py | 8 ++++++-- apiserver/plane/settings/staging.py | 3 ++- docker-compose-hub.yml | 8 ++++++++ docker-compose.yml | 10 +++++++++- setup.sh | 2 +- 11 files changed, 52 insertions(+), 26 deletions(-) diff --git a/.env.example b/.env.example index a5ba26b3f..35c5f83d3 100644 --- a/.env.example +++ b/.env.example @@ -21,6 +21,13 @@ NEXT_PUBLIC_TRACK_EVENTS=0 NEXT_PUBLIC_SLACK_CLIENT_ID="" # Backend + +# Database Settings +PGUSER="plane" +PGPASSWORD="plane" +PGHOST="plane-db" +PGDATABASE="plane" + # Email Settings EMAIL_HOST="" EMAIL_HOST_USER="" @@ -32,8 +39,10 @@ EMAIL_FROM="Team Plane " AWS_REGION="" AWS_ACCESS_KEY_ID="access-key" AWS_SECRET_ACCESS_KEY="secret-key" +# Changing this requires change in the nginx.conf for uploads if using minio setup AWS_S3_BUCKET_NAME="uploads" -AWS_S3_ENDPOINT_URL="" +# Maximum file upload limit +FILE_SIZE_LIMIT=5242880 # GPT settings OPENAI_API_KEY="" @@ -45,13 +54,11 @@ GITHUB_CLIENT_SECRET="" # For fetching release notes # Settings related to Docker DOCKERIZED=1 -# Database Settings -PGUSER="plane" -PGPASSWORD="plane" -PGHOST="plane-db" -PGDATABASE="plane" - # Nginx Configuration NGINX_PORT=80 +# Default Creds +DEFAULT_EMAIL="captain@plane.so" +DEFAULT_PASSWORD="password123" + # Auto generated and Required that will be generated from setup.sh \ No newline at end of file diff --git a/apiserver/plane/api/views/asset.py b/apiserver/plane/api/views/asset.py index 0102867d7..0f0513405 100644 --- a/apiserver/plane/api/views/asset.py +++ b/apiserver/plane/api/views/asset.py @@ -35,7 +35,7 @@ class FileAssetEndpoint(BaseAPIView): serializer.save(workspace_id=request.user.last_workspace_id) response_data = serializer.data - if settings.DOCKERIZED and settings.AWS_S3_ENDPOINT_URL in response_data["asset"]: + if settings.DOCKERIZED and settings.USE_MINIO: response_data["asset"] = response_data["asset"].replace(settings.AWS_S3_ENDPOINT_URL, settings.WEB_URL) return Response(response_data, status=status.HTTP_201_CREATED) return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) @@ -86,7 +86,7 @@ class UserAssetsEndpoint(BaseAPIView): if serializer.is_valid(): serializer.save() response_data = serializer.data - if settings.DOCKERIZED and settings.AWS_S3_ENDPOINT_URL in response_data["asset"]: + if settings.DOCKERIZED and settings.USE_MINIO: response_data["asset"] = response_data["asset"].replace(settings.AWS_S3_ENDPOINT_URL, settings.WEB_URL) return Response(response_data, status=status.HTTP_201_CREATED) return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) diff --git a/apiserver/plane/api/views/issue.py b/apiserver/plane/api/views/issue.py index abdfe6aae..74d0d466c 100644 --- a/apiserver/plane/api/views/issue.py +++ b/apiserver/plane/api/views/issue.py @@ -820,7 +820,7 @@ class IssueAttachmentEndpoint(BaseAPIView): response_data = serializer.data if ( settings.DOCKERIZED - and settings.AWS_S3_ENDPOINT_URL in response_data["asset"] + and settings.USE_MINIO ): response_data["asset"] = response_data["asset"].replace( settings.AWS_S3_ENDPOINT_URL, settings.WEB_URL diff --git a/apiserver/plane/db/models/asset.py b/apiserver/plane/db/models/asset.py index e37f2c0b0..01ef1d9d8 100644 --- a/apiserver/plane/db/models/asset.py +++ b/apiserver/plane/db/models/asset.py @@ -17,11 +17,8 @@ def get_upload_path(instance, filename): def file_size(value): - # File limit check is only for cloud hosted - if not settings.DOCKERIZED: - limit = 5 * 1024 * 1024 - if value.size > limit: - raise ValidationError("File too large. Size should not exceed 5 MB.") + if value.size > settings.FILE_SIZE_LIMIT: + raise ValidationError("File too large. Size should not exceed 5 MB.") class FileAsset(BaseModel): diff --git a/apiserver/plane/db/models/issue.py b/apiserver/plane/db/models/issue.py index f58d4ac13..e25695c42 100644 --- a/apiserver/plane/db/models/issue.py +++ b/apiserver/plane/db/models/issue.py @@ -211,10 +211,8 @@ def get_upload_path(instance, filename): def file_size(value): # File limit check is only for cloud hosted - if not settings.DOCKERIZED: - limit = 5 * 1024 * 1024 - if value.size > limit: - raise ValidationError("File too large. Size should not exceed 5 MB.") + if value.size > settings.FILE_SIZE_LIMIT: + raise ValidationError("File too large. Size should not exceed 5 MB.") class IssueAttachment(ProjectBaseModel): diff --git a/apiserver/plane/settings/local.py b/apiserver/plane/settings/local.py index 70a4be49a..324d2edbe 100644 --- a/apiserver/plane/settings/local.py +++ b/apiserver/plane/settings/local.py @@ -29,6 +29,10 @@ DOCKERIZED = int(os.environ.get( "DOCKERIZED", 0 )) == 1 +USE_MINIO = int(os.environ.get("USE_MINIO"), 0) == 1 + +FILE_SIZE_LIMIT = int(os.environ.get("FILE_SIZE_LIMIT", 5242880)) + if DOCKERIZED: DATABASES["default"] = dj_database_url.config() @@ -86,5 +90,4 @@ LOGGER_BASE_URL = os.environ.get("LOGGER_BASE_URL", False) CELERY_RESULT_BACKEND = os.environ.get("REDIS_URL") CELERY_BROKER_URL = os.environ.get("REDIS_URL") - -GITHUB_ACCESS_TOKEN = os.environ.get("GITHUB_ACCESS_TOKEN", False) \ No newline at end of file +GITHUB_ACCESS_TOKEN = os.environ.get("GITHUB_ACCESS_TOKEN", False) diff --git a/apiserver/plane/settings/production.py b/apiserver/plane/settings/production.py index 81a3c2082..a1d450137 100644 --- a/apiserver/plane/settings/production.py +++ b/apiserver/plane/settings/production.py @@ -34,6 +34,10 @@ DOCKERIZED = int(os.environ.get( "DOCKERIZED", 0 )) == 1 +USE_MINIO = int(os.environ.get("USE_MINIO"), 0) == 1 + +FILE_SIZE_LIMIT = int(os.environ.get("FILE_SIZE_LIMIT", 5242880)) + # Enable Connection Pooling (if desired) # DATABASES['default']['ENGINE'] = 'django_postgrespool' @@ -81,7 +85,7 @@ if bool(os.environ.get("SENTRY_DSN", False)): environment="production", ) -if DOCKERIZED: +if DOCKERIZED and USE_MINIO: DEFAULT_FILE_STORAGE = 'storages.backends.s3boto3.S3Boto3Storage' # The AWS access key to use. AWS_ACCESS_KEY_ID = os.environ.get("AWS_ACCESS_KEY_ID", "access-key") @@ -90,7 +94,7 @@ if DOCKERIZED: # The name of the bucket to store files in. AWS_STORAGE_BUCKET_NAME = os.environ.get("AWS_S3_BUCKET_NAME", "uploads") # The full URL to the S3 endpoint. Leave blank to use the default region URL. - AWS_S3_ENDPOINT_URL = os.environ.get("AWS_S3_ENDPOINT_URL", "http://plane-minio:9000") + AWS_S3_ENDPOINT_URL = "http://plane-minio:9000" # Default permissions AWS_DEFAULT_ACL = "public-read" AWS_QUERYSTRING_AUTH = False diff --git a/apiserver/plane/settings/staging.py b/apiserver/plane/settings/staging.py index d1d8e1749..c5db5d300 100644 --- a/apiserver/plane/settings/staging.py +++ b/apiserver/plane/settings/staging.py @@ -53,6 +53,8 @@ STATICFILES_STORAGE = "whitenoise.storage.CompressedManifestStaticFilesStorage" DOCKERIZED = int(os.environ.get( "DOCKERIZED", 0 )) == 1 +FILE_SIZE_LIMIT = int(os.environ.get("FILE_SIZE_LIMIT", 5242880)) +USE_MINIO = int(os.environ.get("USE_MINIO"), 0) == 1 sentry_sdk.init( dsn=os.environ.get("SENTRY_DSN"), @@ -169,7 +171,6 @@ CSRF_COOKIE_SECURE = True REDIS_URL = os.environ.get("REDIS_URL") -DOCKERIZED = os.environ.get("DOCKERIZED", False) CACHES = { "default": { diff --git a/docker-compose-hub.yml b/docker-compose-hub.yml index 97ef38260..1bdd41d07 100644 --- a/docker-compose-hub.yml +++ b/docker-compose-hub.yml @@ -35,6 +35,7 @@ services: AWS_ACCESS_KEY_ID: ${AWS_ACCESS_KEY_ID} AWS_SECRET_ACCESS_KEY: ${AWS_SECRET_ACCESS_KEY} AWS_S3_BUCKET_NAME: ${AWS_S3_BUCKET_NAME} + FILE_SIZE_LIMIT: ${FILE_SIZE_LIMIT} WEB_URL: ${WEB_URL} GITHUB_CLIENT_SECRET: ${GITHUB_CLIENT_SECRET} DISABLE_COLLECTSTATIC: 1 @@ -42,6 +43,9 @@ services: OPENAI_API_KEY: ${OPENAI_API_KEY} GPT_ENGINE: ${GPT_ENGINE} SECRET_KEY: ${SECRET_KEY} + DEFAULT_EMAIL: ${DEFAULT_EMAIL} + DEFAULT_PASSWORD: ${DEFAULT_PASSWORD} + USE_MINIO: 1 depends_on: - plane-db - plane-redis @@ -66,6 +70,7 @@ services: AWS_ACCESS_KEY_ID: ${AWS_ACCESS_KEY_ID} AWS_SECRET_ACCESS_KEY: ${AWS_SECRET_ACCESS_KEY} AWS_S3_BUCKET_NAME: ${AWS_S3_BUCKET_NAME} + FILE_SIZE_LIMIT: ${FILE_SIZE_LIMIT} WEB_URL: ${WEB_URL} GITHUB_CLIENT_SECRET: ${GITHUB_CLIENT_SECRET} DISABLE_COLLECTSTATIC: 1 @@ -73,6 +78,9 @@ services: OPENAI_API_KEY: ${OPENAI_API_KEY} GPT_ENGINE: ${GPT_ENGINE} SECRET_KEY: ${SECRET_KEY} + DEFAULT_EMAIL: ${DEFAULT_EMAIL} + DEFAULT_PASSWORD: ${DEFAULT_PASSWORD} + USE_MINIO: 1 depends_on: - plane-api - plane-db diff --git a/docker-compose.yml b/docker-compose.yml index fe9e5f15d..19f95f875 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -42,6 +42,7 @@ services: AWS_SECRET_ACCESS_KEY: ${AWS_SECRET_ACCESS_KEY} AWS_S3_BUCKET_NAME: ${AWS_S3_BUCKET_NAME} AWS_S3_ENDPOINT_URL: ${AWS_S3_ENDPOINT_URL} + FILE_SIZE_LIMIT: ${FILE_SIZE_LIMIT} WEB_URL: ${WEB_URL} GITHUB_CLIENT_SECRET: ${GITHUB_CLIENT_SECRET} DISABLE_COLLECTSTATIC: 1 @@ -49,6 +50,9 @@ services: OPENAI_API_KEY: ${OPENAI_API_KEY} GPT_ENGINE: ${GPT_ENGINE} SECRET_KEY: ${SECRET_KEY} + DEFAULT_EMAIL: ${DEFAULT_EMAIL} + DEFAULT_PASSWORD: ${DEFAULT_PASSWORD} + USE_MINIO: 1 depends_on: - plane-db - plane-redis @@ -74,6 +78,7 @@ services: AWS_SECRET_ACCESS_KEY: ${AWS_SECRET_ACCESS_KEY} AWS_S3_BUCKET_NAME: ${AWS_S3_BUCKET_NAME} AWS_S3_ENDPOINT_URL: ${AWS_S3_ENDPOINT_URL} + FILE_SIZE_LIMIT: ${FILE_SIZE_LIMIT} WEB_URL: ${WEB_URL} GITHUB_CLIENT_SECRET: ${GITHUB_CLIENT_SECRET} DISABLE_COLLECTSTATIC: 1 @@ -81,6 +86,9 @@ services: OPENAI_API_KEY: ${OPENAI_API_KEY} GPT_ENGINE: ${GPT_ENGINE} SECRET_KEY: ${SECRET_KEY} + DEFAULT_EMAIL: ${DEFAULT_EMAIL} + DEFAULT_PASSWORD: ${DEFAULT_PASSWORD} + USE_MINIO: 1 depends_on: - plane-api - plane-db @@ -136,7 +144,7 @@ services: dockerfile: Dockerfile restart: always ports: - - ${NGINX_PORT}:80 + - ${NGINX_PORT}:80 depends_on: - plane-web - plane-api diff --git a/setup.sh b/setup.sh index 7fc5847e0..8c1f81a48 100755 --- a/setup.sh +++ b/setup.sh @@ -10,7 +10,7 @@ export LC_CTYPE=C echo -e "\nNEXT_PUBLIC_API_BASE_URL=$1" >> ./.env # Generate the SECRET_KEY that will be used by django -echo -e "SECRET_KEY=\"$(tr -dc 'a-z0-9!@#$%^&*(-_=+)' < /dev/urandom | head -c50)\"" >> ./.env +echo -e "SECRET_KEY=\"$(tr -dc 'a-z0-9' < /dev/urandom | head -c50)\"" >> ./.env # WEB_URL for email redirection and image saving echo -e "WEB_URL=$1" >> ./.env