fix: docker image uploads (#1108)

* dev: basic initial setup for images

* Update docker-compose.yml

* dev: minio setup

* dev: docker minio setup

* dev: update the asset view

* dev: setup minio with default configuration

* dev: update minio setup for creating buckets

* dev: update the permission sets

* dev: get variables from shell for create bucket

* dev: update image uploading setup for docker

* dev: environment variables update

* dev: web url for images

* dev: update image configuration

* dev: env update for email port

---------

Co-authored-by: Narayana <narayana.vadapalli1996@gmail.com>
This commit is contained in:
pablohashescobar 2023-05-25 10:24:20 +05:30 committed by GitHub
parent 0bd6e53b44
commit 0fb4a87454
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
15 changed files with 142 additions and 49 deletions

View File

@ -1,20 +1,41 @@
# Replace with your instance Public IP
# Frontend
# Extra image domains that need to be added for Next Image
NEXT_PUBLIC_EXTRA_IMAGE_DOMAINS=
# Google Client ID for Google OAuth
NEXT_PUBLIC_GOOGLE_CLIENTID=""
NEXT_PUBLIC_GITHUB_APP_NAME=""
# Github ID for Github OAuth
NEXT_PUBLIC_GITHUB_ID=""
# Github App Name for GitHub Integration
NEXT_PUBLIC_GITHUB_APP_NAME=""
# Sentry DSN for error monitoring
NEXT_PUBLIC_SENTRY_DSN=""
# Enable/Disable OAUTH - default 0 for selfhosted instance
NEXT_PUBLIC_ENABLE_OAUTH=0
# Enable/Disable sentry
NEXT_PUBLIC_ENABLE_SENTRY=0
# Enable/Disable session recording
NEXT_PUBLIC_ENABLE_SESSION_RECORDER=0
# Enable/Disable event tracking
NEXT_PUBLIC_TRACK_EVENTS=0
# Slack for Slack Integration
NEXT_PUBLIC_SLACK_CLIENT_ID=""
# Backend
# Email Settings
EMAIL_HOST=""
EMAIL_HOST_USER=""
EMAIL_HOST_PASSWORD=""
EMAIL_PORT=587
# AWS Settings
AWS_REGION=""
AWS_ACCESS_KEY_ID=""
AWS_SECRET_ACCESS_KEY=""
AWS_S3_BUCKET_NAME=""
AWS_S3_ENDPOINT_URL=""
# GPT settings
OPENAI_API_KEY=""
GPT_ENGINE=""
GPT_ENGINE=""
# Auto generated and Required

View File

@ -3,7 +3,7 @@ from rest_framework import status
from rest_framework.response import Response
from rest_framework.parsers import MultiPartParser, FormParser
from sentry_sdk import capture_exception
from django.conf import settings
# Module imports
from .base import BaseAPIView
from plane.db.models import FileAsset
@ -34,7 +34,10 @@ class FileAssetEndpoint(BaseAPIView):
)
serializer.save(workspace_id=request.user.last_workspace_id)
return Response(serializer.data, status=status.HTTP_201_CREATED)
response_data = serializer.data
if settings.DOCKERIZED and "minio:9000" in response_data["asset"]:
response_data["asset"] = response_data["asset"].replace("minio:9000", settings.WEB_URL)
return Response(response_data, status=status.HTTP_201_CREATED)
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
except Exception as e:
capture_exception(e)
@ -82,7 +85,10 @@ class UserAssetsEndpoint(BaseAPIView):
serializer = FileAssetSerializer(data=request.data)
if serializer.is_valid():
serializer.save()
return Response(serializer.data, status=status.HTTP_201_CREATED)
response_data = serializer.data
if settings.DOCKERIZED and "minio:9000" in response_data["asset"]:
response_data["asset"] = response_data["asset"].replace("minio:9000", settings.WEB_URL)
return Response(response_data, status=status.HTTP_201_CREATED)
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
except Exception as e:
capture_exception(e)

View File

@ -9,7 +9,7 @@ from django.core.serializers.json import DjangoJSONEncoder
from django.utils.decorators import method_decorator
from django.views.decorators.gzip import gzip_page
from django.db.models.functions import Coalesce
from django.conf import settings
# Third Party imports
from rest_framework.response import Response
from rest_framework import status
@ -788,6 +788,9 @@ class IssueAttachmentEndpoint(BaseAPIView):
serializer = IssueAttachmentSerializer(data=request.data)
if serializer.is_valid():
serializer.save(project_id=project_id, issue_id=issue_id)
response_data = serializer.data
if settings.DOCKERIZED and "minio:9000" in response_data["asset"]:
response_data["asset"] = response_data["asset"].replace("minio:9000", settings.WEB_URL)
issue_activity.delay(
type="attachment.activity.created",
requested_data=None,
@ -799,7 +802,7 @@ class IssueAttachmentEndpoint(BaseAPIView):
cls=DjangoJSONEncoder,
),
)
return Response(serializer.data, status=status.HTTP_201_CREATED)
return Response(response_data, status=status.HTTP_201_CREATED)
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
except Exception as e:
capture_exception(e)

View File

@ -4,6 +4,7 @@ from uuid import uuid4
# Django import
from django.db import models
from django.core.exceptions import ValidationError
from django.conf import settings
# Module import
from . import BaseModel
@ -16,9 +17,11 @@ def get_upload_path(instance, filename):
def file_size(value):
limit = 5 * 1024 * 1024
if value.size > limit:
raise ValidationError("File too large. Size should not exceed 5 MB.")
# File limit check is only for cloud hosted
if not settings.DOCKERIZED:
limit = 5 * 1024 * 1024
if value.size > limit:
raise ValidationError("File too large. Size should not exceed 5 MB.")
class FileAsset(BaseModel):

View File

@ -210,9 +210,11 @@ def get_upload_path(instance, filename):
def file_size(value):
limit = 5 * 1024 * 1024
if value.size > limit:
raise ValidationError("File too large. Size should not exceed 5 MB.")
# File limit check is only for cloud hosted
if not settings.DOCKERIZED:
limit = 5 * 1024 * 1024
if value.size > limit:
raise ValidationError("File too large. Size should not exceed 5 MB.")
class IssueAttachment(ProjectBaseModel):

View File

@ -25,7 +25,9 @@ DATABASES = {
}
}
DOCKERIZED = os.environ.get("DOCKERIZED", False)
DOCKERIZED = int(os.environ.get(
"DOCKERIZED", 0
)) == 1
if DOCKERIZED:
DATABASES["default"] = dj_database_url.config()

View File

@ -29,9 +29,10 @@ DATABASES = {
DATABASES["default"] = dj_database_url.config()
SITE_ID = 1
DOCKERIZED = os.environ.get(
"DOCKERIZED", False
) # Set the variable true if running in docker-compose environment
# Set the variable true if running in docker environment
DOCKERIZED = int(os.environ.get(
"DOCKERIZED", 0
)) == 1
# Enable Connection Pooling (if desired)
# DATABASES['default']['ENGINE'] = 'django_postgrespool'
@ -69,7 +70,7 @@ CORS_ALLOW_CREDENTIALS = True
# Simplified static file serving.
STATICFILES_STORAGE = "whitenoise.storage.CompressedManifestStaticFilesStorage"
if os.environ.get("SENTRY_DSN", False):
if bool(os.environ.get("SENTRY_DSN", False)):
sentry_sdk.init(
dsn=os.environ.get("SENTRY_DSN", ""),
integrations=[DjangoIntegration(), RedisIntegration()],
@ -80,12 +81,21 @@ if os.environ.get("SENTRY_DSN", False):
environment="production",
)
if (
os.environ.get("AWS_REGION", False)
and os.environ.get("AWS_ACCESS_KEY_ID", False)
and os.environ.get("AWS_SECRET_ACCESS_KEY", False)
and os.environ.get("AWS_S3_BUCKET_NAME", False)
):
if DOCKERIZED:
DEFAULT_FILE_STORAGE = 'storages.backends.s3boto3.S3Boto3Storage'
# The AWS access key to use.
AWS_ACCESS_KEY_ID = os.environ.get("AWS_ACCESS_KEY_ID", "access-key")
# The AWS secret access key to use.
AWS_SECRET_ACCESS_KEY = os.environ.get("AWS_SECRET_ACCESS_KEY", "secret-key")
# The name of the bucket to store files in.
AWS_STORAGE_BUCKET_NAME = os.environ.get("AWS_S3_BUCKET_NAME", "uploads")
# The full URL to the S3 endpoint. Leave blank to use the default region URL.
AWS_S3_ENDPOINT_URL = os.environ.get("AWS_S3_ENDPOINT_URL", "http://minio:9000")
# Default permissions
AWS_DEFAULT_ACL = "public-read"
AWS_QUERYSTRING_AUTH = False
AWS_S3_FILE_OVERWRITE = False
else:
# The AWS region to connect to.
AWS_REGION = os.environ.get("AWS_REGION", "")
@ -99,7 +109,7 @@ if (
# AWS_SESSION_TOKEN = ""
# The name of the bucket to store files in.
AWS_S3_BUCKET_NAME = os.environ.get("AWS_S3_BUCKET_NAME", "")
AWS_S3_BUCKET_NAME = os.environ.get("AWS_S3_BUCKET_NAME")
# How to construct S3 URLs ("auto", "path", "virtual").
AWS_S3_ADDRESSING_STYLE = "auto"
@ -166,14 +176,8 @@ if (
# extra characters appended.
AWS_S3_FILE_OVERWRITE = False
# AWS Settings End
DEFAULT_FILE_STORAGE = "django_s3_storage.storage.S3Storage"
else:
MEDIA_URL = "/uploads/"
MEDIA_ROOT = os.path.join(BASE_DIR, "uploads")
# AWS Settings End
# Enable Connection Pooling (if desired)
# DATABASES['default']['ENGINE'] = 'django_postgrespool'
@ -218,12 +222,6 @@ else:
}
}
RQ_QUEUES = {
"default": {
"USE_REDIS_CACHE": "default",
}
}
WEB_URL = os.environ.get("WEB_URL")

View File

@ -49,6 +49,10 @@ CORS_ALLOW_ALL_ORIGINS = True
# Simplified static file serving.
STATICFILES_STORAGE = "whitenoise.storage.CompressedManifestStaticFilesStorage"
# Make true if running in a docker environment
DOCKERIZED = int(os.environ.get(
"DOCKERIZED", 0
)) == 1
sentry_sdk.init(
dsn=os.environ.get("SENTRY_DSN"),

View File

@ -7,7 +7,7 @@ from django.urls import path
from django.views.generic import TemplateView
from django.conf import settings
from django.conf.urls import include, url
from django.conf.urls import include, url, static
# from django.conf.urls.static import static
@ -17,9 +17,8 @@ urlpatterns = [
path("api/", include("plane.api.urls")),
path("", include("plane.web.urls")),
]
# + static(settings.STATIC_URL, document_root=settings.STATIC_ROOT)
# + static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT)
urlpatterns = urlpatterns + static.static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT)
if settings.DEBUG:
import debug_toolbar

View File

@ -4,7 +4,7 @@ dj-database-url==1.2.0
gunicorn==20.1.0
whitenoise==6.3.0
django-storages==1.13.2
boto==2.49.0
boto3==1.26.136
django-anymail==9.0
twilio==7.16.2
django-debug-toolbar==3.8.1

View File

@ -16,6 +16,7 @@ const nextConfig = {
"planefs.s3.amazonaws.com",
"images.unsplash.com",
"avatars.githubusercontent.com",
"localhost",
...extraImageDomains,
],
},

View File

@ -68,7 +68,7 @@ services:
AWS_ACCESS_KEY_ID: ${AWS_ACCESS_KEY_ID}
AWS_SECRET_ACCESS_KEY: ${AWS_SECRET_ACCESS_KEY}
AWS_S3_BUCKET_NAME: ${AWS_S3_BUCKET_NAME}
WEB_URL: localhost/
WEB_URL: ${WEB_URL}
GITHUB_CLIENT_SECRET: ${GITHUB_CLIENT_SECRET}
DISABLE_COLLECTSTATIC: 1
DOCKERIZED: 1
@ -104,13 +104,33 @@ services:
AWS_ACCESS_KEY_ID: ${AWS_ACCESS_KEY_ID}
AWS_SECRET_ACCESS_KEY: ${AWS_SECRET_ACCESS_KEY}
AWS_S3_BUCKET_NAME: ${AWS_S3_BUCKET_NAME}
WEB_URL: localhost/
WEB_URL: ${WEB_URL}
GITHUB_CLIENT_SECRET: ${GITHUB_CLIENT_SECRET}
DISABLE_COLLECTSTATIC: 1
DOCKERIZED: 1
OPENAI_API_KEY: ${OPENAI_API_KEY}
GPT_ENGINE: ${GPT_ENGINE}
SECRET_KEY: ${SECRET_KEY}
minio:
image: minio/minio
ports:
- 9000:9000
environment:
MINIO_ROOT_USER: ${AWS_ACCESS_KEY_ID}
MINIO_ROOT_PASSWORD: ${AWS_SECRET_ACCESS_KEY}
command: server /export --console-address ":9090"
createbuckets:
image: minio/mc
depends_on:
- minio
entrypoint: >
/bin/sh -c "
/usr/bin/mc config host add plane-minio http://minio:9000 \$AWS_ACCESS_KEY_ID \$AWS_SECRET_ACCESS_KEY;
/usr/bin/mc mb plane-minio/\$AWS_S3_BUCKET_NAME;
/usr/bin/mc anonymous set download plane-minio/\$AWS_S3_BUCKET_NAME;
exit 0;
"
volumes:
pgdata:
redisdata:

View File

@ -6,6 +6,7 @@ services:
build:
context: ./nginx
dockerfile: Dockerfile
restart: always
ports:
- 80:80
depends_on:
@ -72,7 +73,8 @@ services:
AWS_ACCESS_KEY_ID: ${AWS_ACCESS_KEY_ID}
AWS_SECRET_ACCESS_KEY: ${AWS_SECRET_ACCESS_KEY}
AWS_S3_BUCKET_NAME: ${AWS_S3_BUCKET_NAME}
WEB_URL: localhost/
AWS_S3_ENDPOINT_URL: ${AWS_S3_ENDPOINT_URL}
WEB_URL: ${WEB_URL}
GITHUB_CLIENT_SECRET: ${GITHUB_CLIENT_SECRET}
DISABLE_COLLECTSTATIC: 1
DOCKERIZED: 1
@ -110,13 +112,34 @@ services:
AWS_ACCESS_KEY_ID: ${AWS_ACCESS_KEY_ID}
AWS_SECRET_ACCESS_KEY: ${AWS_SECRET_ACCESS_KEY}
AWS_S3_BUCKET_NAME: ${AWS_S3_BUCKET_NAME}
WEB_URL: localhost/
AWS_S3_ENDPOINT_URL: ${AWS_S3_ENDPOINT_URL}
WEB_URL: ${WEB_URL}
GITHUB_CLIENT_SECRET: ${GITHUB_CLIENT_SECRET}
DISABLE_COLLECTSTATIC: 1
DOCKERIZED: 1
OPENAI_API_KEY: ${OPENAI_API_KEY}
GPT_ENGINE: ${GPT_ENGINE}
SECRET_KEY: ${SECRET_KEY}
minio:
image: minio/minio
ports:
- 9000:9000
environment:
MINIO_ROOT_USER: ${AWS_ACCESS_KEY_ID}
MINIO_ROOT_PASSWORD: ${AWS_SECRET_ACCESS_KEY}
command: server /export --console-address ":9090"
createbuckets:
image: minio/mc
depends_on:
- minio
entrypoint: >
/bin/sh -c "
/usr/bin/mc config host add plane-minio http://minio:9000 \$AWS_ACCESS_KEY_ID \$AWS_SECRET_ACCESS_KEY;
/usr/bin/mc mb plane-minio/\$AWS_S3_BUCKET_NAME;
/usr/bin/mc anonymous set download plane-minio/\$AWS_S3_BUCKET_NAME;
exit 0;
"
volumes:
pgdata:
redisdata:

View File

@ -16,5 +16,8 @@ server {
proxy_pass http://planebackend:8000/api/;
}
location /uploads/ {
proxy_pass http://minio:9000/uploads/;
}
}
}

View File

@ -1,7 +1,15 @@
#!/bin/bash
cp ./.env.example ./.env
echo -e "\nNEXT_PUBLIC_API_BASE_URL=http://$1" >> ./.env
# Export for tr error in mac
export LC_ALL=C
export LC_CTYPE=C
echo -e "\nSECRET_KEY=\"$(tr -dc 'a-z0-9!@#$%^&*(-_=+)' < /dev/urandom | head -c50)\"" >> ./.env
# Generate the NEXT_PUBLIC_API_BASE_URL with given IP
echo -e "\nNEXT_PUBLIC_API_BASE_URL=http://$1" >> ./.env
# Generate the SECRET_KEY that will be used by django
echo -e "SECRET_KEY=\"$(tr -dc 'a-z0-9!@#$%^&*(-_=+)' < /dev/urandom | head -c50)\"" >> ./.env
# WEB_URL for email redirection and image saving
echo -e "WEB_URL=$1" >> ./.env