forked from github/plane
chore: improve docker setup (#1150)
This commit is contained in:
parent
23d08a2ad1
commit
ffc6077e9b
21
.env.example
21
.env.example
@ -21,6 +21,13 @@ NEXT_PUBLIC_TRACK_EVENTS=0
|
||||
NEXT_PUBLIC_SLACK_CLIENT_ID=""
|
||||
|
||||
# Backend
|
||||
|
||||
# Database Settings
|
||||
PGUSER="plane"
|
||||
PGPASSWORD="plane"
|
||||
PGHOST="plane-db"
|
||||
PGDATABASE="plane"
|
||||
|
||||
# Email Settings
|
||||
EMAIL_HOST=""
|
||||
EMAIL_HOST_USER=""
|
||||
@ -32,8 +39,10 @@ EMAIL_FROM="Team Plane <team@mailer.plane.so>"
|
||||
AWS_REGION=""
|
||||
AWS_ACCESS_KEY_ID="access-key"
|
||||
AWS_SECRET_ACCESS_KEY="secret-key"
|
||||
# Changing this requires change in the nginx.conf for uploads if using minio setup
|
||||
AWS_S3_BUCKET_NAME="uploads"
|
||||
AWS_S3_ENDPOINT_URL=""
|
||||
# Maximum file upload limit
|
||||
FILE_SIZE_LIMIT=5242880
|
||||
|
||||
# GPT settings
|
||||
OPENAI_API_KEY=""
|
||||
@ -45,13 +54,11 @@ GITHUB_CLIENT_SECRET="" # For fetching release notes
|
||||
# Settings related to Docker
|
||||
DOCKERIZED=1
|
||||
|
||||
# Database Settings
|
||||
PGUSER="plane"
|
||||
PGPASSWORD="plane"
|
||||
PGHOST="plane-db"
|
||||
PGDATABASE="plane"
|
||||
|
||||
# Nginx Configuration
|
||||
NGINX_PORT=80
|
||||
|
||||
# Default Creds
|
||||
DEFAULT_EMAIL="captain@plane.so"
|
||||
DEFAULT_PASSWORD="password123"
|
||||
|
||||
# Auto generated and Required that will be generated from setup.sh
|
@ -35,7 +35,7 @@ class FileAssetEndpoint(BaseAPIView):
|
||||
|
||||
serializer.save(workspace_id=request.user.last_workspace_id)
|
||||
response_data = serializer.data
|
||||
if settings.DOCKERIZED and settings.AWS_S3_ENDPOINT_URL in response_data["asset"]:
|
||||
if settings.DOCKERIZED and settings.USE_MINIO:
|
||||
response_data["asset"] = response_data["asset"].replace(settings.AWS_S3_ENDPOINT_URL, settings.WEB_URL)
|
||||
return Response(response_data, status=status.HTTP_201_CREATED)
|
||||
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
|
||||
@ -86,7 +86,7 @@ class UserAssetsEndpoint(BaseAPIView):
|
||||
if serializer.is_valid():
|
||||
serializer.save()
|
||||
response_data = serializer.data
|
||||
if settings.DOCKERIZED and settings.AWS_S3_ENDPOINT_URL in response_data["asset"]:
|
||||
if settings.DOCKERIZED and settings.USE_MINIO:
|
||||
response_data["asset"] = response_data["asset"].replace(settings.AWS_S3_ENDPOINT_URL, settings.WEB_URL)
|
||||
return Response(response_data, status=status.HTTP_201_CREATED)
|
||||
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
|
||||
|
@ -820,7 +820,7 @@ class IssueAttachmentEndpoint(BaseAPIView):
|
||||
response_data = serializer.data
|
||||
if (
|
||||
settings.DOCKERIZED
|
||||
and settings.AWS_S3_ENDPOINT_URL in response_data["asset"]
|
||||
and settings.USE_MINIO
|
||||
):
|
||||
response_data["asset"] = response_data["asset"].replace(
|
||||
settings.AWS_S3_ENDPOINT_URL, settings.WEB_URL
|
||||
|
@ -17,11 +17,8 @@ def get_upload_path(instance, filename):
|
||||
|
||||
|
||||
def file_size(value):
|
||||
# File limit check is only for cloud hosted
|
||||
if not settings.DOCKERIZED:
|
||||
limit = 5 * 1024 * 1024
|
||||
if value.size > limit:
|
||||
raise ValidationError("File too large. Size should not exceed 5 MB.")
|
||||
if value.size > settings.FILE_SIZE_LIMIT:
|
||||
raise ValidationError("File too large. Size should not exceed 5 MB.")
|
||||
|
||||
|
||||
class FileAsset(BaseModel):
|
||||
|
@ -211,10 +211,8 @@ def get_upload_path(instance, filename):
|
||||
|
||||
def file_size(value):
|
||||
# File limit check is only for cloud hosted
|
||||
if not settings.DOCKERIZED:
|
||||
limit = 5 * 1024 * 1024
|
||||
if value.size > limit:
|
||||
raise ValidationError("File too large. Size should not exceed 5 MB.")
|
||||
if value.size > settings.FILE_SIZE_LIMIT:
|
||||
raise ValidationError("File too large. Size should not exceed 5 MB.")
|
||||
|
||||
|
||||
class IssueAttachment(ProjectBaseModel):
|
||||
|
@ -29,6 +29,10 @@ DOCKERIZED = int(os.environ.get(
|
||||
"DOCKERIZED", 0
|
||||
)) == 1
|
||||
|
||||
USE_MINIO = int(os.environ.get("USE_MINIO"), 0) == 1
|
||||
|
||||
FILE_SIZE_LIMIT = int(os.environ.get("FILE_SIZE_LIMIT", 5242880))
|
||||
|
||||
if DOCKERIZED:
|
||||
DATABASES["default"] = dj_database_url.config()
|
||||
|
||||
@ -86,5 +90,4 @@ LOGGER_BASE_URL = os.environ.get("LOGGER_BASE_URL", False)
|
||||
CELERY_RESULT_BACKEND = os.environ.get("REDIS_URL")
|
||||
CELERY_BROKER_URL = os.environ.get("REDIS_URL")
|
||||
|
||||
|
||||
GITHUB_ACCESS_TOKEN = os.environ.get("GITHUB_ACCESS_TOKEN", False)
|
||||
GITHUB_ACCESS_TOKEN = os.environ.get("GITHUB_ACCESS_TOKEN", False)
|
||||
|
@ -34,6 +34,10 @@ DOCKERIZED = int(os.environ.get(
|
||||
"DOCKERIZED", 0
|
||||
)) == 1
|
||||
|
||||
USE_MINIO = int(os.environ.get("USE_MINIO"), 0) == 1
|
||||
|
||||
FILE_SIZE_LIMIT = int(os.environ.get("FILE_SIZE_LIMIT", 5242880))
|
||||
|
||||
# Enable Connection Pooling (if desired)
|
||||
# DATABASES['default']['ENGINE'] = 'django_postgrespool'
|
||||
|
||||
@ -81,7 +85,7 @@ if bool(os.environ.get("SENTRY_DSN", False)):
|
||||
environment="production",
|
||||
)
|
||||
|
||||
if DOCKERIZED:
|
||||
if DOCKERIZED and USE_MINIO:
|
||||
DEFAULT_FILE_STORAGE = 'storages.backends.s3boto3.S3Boto3Storage'
|
||||
# The AWS access key to use.
|
||||
AWS_ACCESS_KEY_ID = os.environ.get("AWS_ACCESS_KEY_ID", "access-key")
|
||||
@ -90,7 +94,7 @@ if DOCKERIZED:
|
||||
# The name of the bucket to store files in.
|
||||
AWS_STORAGE_BUCKET_NAME = os.environ.get("AWS_S3_BUCKET_NAME", "uploads")
|
||||
# The full URL to the S3 endpoint. Leave blank to use the default region URL.
|
||||
AWS_S3_ENDPOINT_URL = os.environ.get("AWS_S3_ENDPOINT_URL", "http://plane-minio:9000")
|
||||
AWS_S3_ENDPOINT_URL = "http://plane-minio:9000"
|
||||
# Default permissions
|
||||
AWS_DEFAULT_ACL = "public-read"
|
||||
AWS_QUERYSTRING_AUTH = False
|
||||
|
@ -53,6 +53,8 @@ STATICFILES_STORAGE = "whitenoise.storage.CompressedManifestStaticFilesStorage"
|
||||
DOCKERIZED = int(os.environ.get(
|
||||
"DOCKERIZED", 0
|
||||
)) == 1
|
||||
FILE_SIZE_LIMIT = int(os.environ.get("FILE_SIZE_LIMIT", 5242880))
|
||||
USE_MINIO = int(os.environ.get("USE_MINIO"), 0) == 1
|
||||
|
||||
sentry_sdk.init(
|
||||
dsn=os.environ.get("SENTRY_DSN"),
|
||||
@ -169,7 +171,6 @@ CSRF_COOKIE_SECURE = True
|
||||
|
||||
|
||||
REDIS_URL = os.environ.get("REDIS_URL")
|
||||
DOCKERIZED = os.environ.get("DOCKERIZED", False)
|
||||
|
||||
CACHES = {
|
||||
"default": {
|
||||
|
@ -35,6 +35,7 @@ services:
|
||||
AWS_ACCESS_KEY_ID: ${AWS_ACCESS_KEY_ID}
|
||||
AWS_SECRET_ACCESS_KEY: ${AWS_SECRET_ACCESS_KEY}
|
||||
AWS_S3_BUCKET_NAME: ${AWS_S3_BUCKET_NAME}
|
||||
FILE_SIZE_LIMIT: ${FILE_SIZE_LIMIT}
|
||||
WEB_URL: ${WEB_URL}
|
||||
GITHUB_CLIENT_SECRET: ${GITHUB_CLIENT_SECRET}
|
||||
DISABLE_COLLECTSTATIC: 1
|
||||
@ -42,6 +43,9 @@ services:
|
||||
OPENAI_API_KEY: ${OPENAI_API_KEY}
|
||||
GPT_ENGINE: ${GPT_ENGINE}
|
||||
SECRET_KEY: ${SECRET_KEY}
|
||||
DEFAULT_EMAIL: ${DEFAULT_EMAIL}
|
||||
DEFAULT_PASSWORD: ${DEFAULT_PASSWORD}
|
||||
USE_MINIO: 1
|
||||
depends_on:
|
||||
- plane-db
|
||||
- plane-redis
|
||||
@ -66,6 +70,7 @@ services:
|
||||
AWS_ACCESS_KEY_ID: ${AWS_ACCESS_KEY_ID}
|
||||
AWS_SECRET_ACCESS_KEY: ${AWS_SECRET_ACCESS_KEY}
|
||||
AWS_S3_BUCKET_NAME: ${AWS_S3_BUCKET_NAME}
|
||||
FILE_SIZE_LIMIT: ${FILE_SIZE_LIMIT}
|
||||
WEB_URL: ${WEB_URL}
|
||||
GITHUB_CLIENT_SECRET: ${GITHUB_CLIENT_SECRET}
|
||||
DISABLE_COLLECTSTATIC: 1
|
||||
@ -73,6 +78,9 @@ services:
|
||||
OPENAI_API_KEY: ${OPENAI_API_KEY}
|
||||
GPT_ENGINE: ${GPT_ENGINE}
|
||||
SECRET_KEY: ${SECRET_KEY}
|
||||
DEFAULT_EMAIL: ${DEFAULT_EMAIL}
|
||||
DEFAULT_PASSWORD: ${DEFAULT_PASSWORD}
|
||||
USE_MINIO: 1
|
||||
depends_on:
|
||||
- plane-api
|
||||
- plane-db
|
||||
|
@ -42,6 +42,7 @@ services:
|
||||
AWS_SECRET_ACCESS_KEY: ${AWS_SECRET_ACCESS_KEY}
|
||||
AWS_S3_BUCKET_NAME: ${AWS_S3_BUCKET_NAME}
|
||||
AWS_S3_ENDPOINT_URL: ${AWS_S3_ENDPOINT_URL}
|
||||
FILE_SIZE_LIMIT: ${FILE_SIZE_LIMIT}
|
||||
WEB_URL: ${WEB_URL}
|
||||
GITHUB_CLIENT_SECRET: ${GITHUB_CLIENT_SECRET}
|
||||
DISABLE_COLLECTSTATIC: 1
|
||||
@ -49,6 +50,9 @@ services:
|
||||
OPENAI_API_KEY: ${OPENAI_API_KEY}
|
||||
GPT_ENGINE: ${GPT_ENGINE}
|
||||
SECRET_KEY: ${SECRET_KEY}
|
||||
DEFAULT_EMAIL: ${DEFAULT_EMAIL}
|
||||
DEFAULT_PASSWORD: ${DEFAULT_PASSWORD}
|
||||
USE_MINIO: 1
|
||||
depends_on:
|
||||
- plane-db
|
||||
- plane-redis
|
||||
@ -74,6 +78,7 @@ services:
|
||||
AWS_SECRET_ACCESS_KEY: ${AWS_SECRET_ACCESS_KEY}
|
||||
AWS_S3_BUCKET_NAME: ${AWS_S3_BUCKET_NAME}
|
||||
AWS_S3_ENDPOINT_URL: ${AWS_S3_ENDPOINT_URL}
|
||||
FILE_SIZE_LIMIT: ${FILE_SIZE_LIMIT}
|
||||
WEB_URL: ${WEB_URL}
|
||||
GITHUB_CLIENT_SECRET: ${GITHUB_CLIENT_SECRET}
|
||||
DISABLE_COLLECTSTATIC: 1
|
||||
@ -81,6 +86,9 @@ services:
|
||||
OPENAI_API_KEY: ${OPENAI_API_KEY}
|
||||
GPT_ENGINE: ${GPT_ENGINE}
|
||||
SECRET_KEY: ${SECRET_KEY}
|
||||
DEFAULT_EMAIL: ${DEFAULT_EMAIL}
|
||||
DEFAULT_PASSWORD: ${DEFAULT_PASSWORD}
|
||||
USE_MINIO: 1
|
||||
depends_on:
|
||||
- plane-api
|
||||
- plane-db
|
||||
@ -136,7 +144,7 @@ services:
|
||||
dockerfile: Dockerfile
|
||||
restart: always
|
||||
ports:
|
||||
- ${NGINX_PORT}:80
|
||||
- ${NGINX_PORT}:80
|
||||
depends_on:
|
||||
- plane-web
|
||||
- plane-api
|
||||
|
2
setup.sh
2
setup.sh
@ -10,7 +10,7 @@ export LC_CTYPE=C
|
||||
echo -e "\nNEXT_PUBLIC_API_BASE_URL=$1" >> ./.env
|
||||
|
||||
# Generate the SECRET_KEY that will be used by django
|
||||
echo -e "SECRET_KEY=\"$(tr -dc 'a-z0-9!@#$%^&*(-_=+)' < /dev/urandom | head -c50)\"" >> ./.env
|
||||
echo -e "SECRET_KEY=\"$(tr -dc 'a-z0-9' < /dev/urandom | head -c50)\"" >> ./.env
|
||||
|
||||
# WEB_URL for email redirection and image saving
|
||||
echo -e "WEB_URL=$1" >> ./.env
|
||||
|
Loading…
Reference in New Issue
Block a user