forked from github/plane
chore: update docker uploads (#1202)
This commit is contained in:
parent
bffc6a60e7
commit
50060a0bf9
@ -34,10 +34,7 @@ class FileAssetEndpoint(BaseAPIView):
|
|||||||
)
|
)
|
||||||
|
|
||||||
serializer.save(workspace_id=request.user.last_workspace_id)
|
serializer.save(workspace_id=request.user.last_workspace_id)
|
||||||
response_data = serializer.data
|
return Response(serializer.data, status=status.HTTP_201_CREATED)
|
||||||
if settings.DOCKERIZED and settings.USE_MINIO:
|
|
||||||
response_data["asset"] = response_data["asset"].replace(settings.AWS_S3_ENDPOINT_URL, settings.WEB_URL)
|
|
||||||
return Response(response_data, status=status.HTTP_201_CREATED)
|
|
||||||
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
|
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
capture_exception(e)
|
capture_exception(e)
|
||||||
@ -85,10 +82,7 @@ class UserAssetsEndpoint(BaseAPIView):
|
|||||||
serializer = FileAssetSerializer(data=request.data)
|
serializer = FileAssetSerializer(data=request.data)
|
||||||
if serializer.is_valid():
|
if serializer.is_valid():
|
||||||
serializer.save()
|
serializer.save()
|
||||||
response_data = serializer.data
|
return Response(serializer.data, status=status.HTTP_201_CREATED)
|
||||||
if settings.DOCKERIZED and settings.USE_MINIO:
|
|
||||||
response_data["asset"] = response_data["asset"].replace(settings.AWS_S3_ENDPOINT_URL, settings.WEB_URL)
|
|
||||||
return Response(response_data, status=status.HTTP_201_CREATED)
|
|
||||||
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
|
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
capture_exception(e)
|
capture_exception(e)
|
||||||
|
@ -817,14 +817,6 @@ class IssueAttachmentEndpoint(BaseAPIView):
|
|||||||
serializer = IssueAttachmentSerializer(data=request.data)
|
serializer = IssueAttachmentSerializer(data=request.data)
|
||||||
if serializer.is_valid():
|
if serializer.is_valid():
|
||||||
serializer.save(project_id=project_id, issue_id=issue_id)
|
serializer.save(project_id=project_id, issue_id=issue_id)
|
||||||
response_data = serializer.data
|
|
||||||
if (
|
|
||||||
settings.DOCKERIZED
|
|
||||||
and settings.USE_MINIO
|
|
||||||
):
|
|
||||||
response_data["asset"] = response_data["asset"].replace(
|
|
||||||
settings.AWS_S3_ENDPOINT_URL, settings.WEB_URL
|
|
||||||
)
|
|
||||||
issue_activity.delay(
|
issue_activity.delay(
|
||||||
type="attachment.activity.created",
|
type="attachment.activity.created",
|
||||||
requested_data=None,
|
requested_data=None,
|
||||||
@ -836,7 +828,7 @@ class IssueAttachmentEndpoint(BaseAPIView):
|
|||||||
cls=DjangoJSONEncoder,
|
cls=DjangoJSONEncoder,
|
||||||
),
|
),
|
||||||
)
|
)
|
||||||
return Response(response_data, status=status.HTTP_201_CREATED)
|
return Response(serializer.data, status=status.HTTP_201_CREATED)
|
||||||
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
|
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
capture_exception(e)
|
capture_exception(e)
|
||||||
|
@ -30,9 +30,7 @@ DATABASES["default"] = dj_database_url.config()
|
|||||||
SITE_ID = 1
|
SITE_ID = 1
|
||||||
|
|
||||||
# Set the variable true if running in docker environment
|
# Set the variable true if running in docker environment
|
||||||
DOCKERIZED = int(os.environ.get(
|
DOCKERIZED = int(os.environ.get("DOCKERIZED", 0)) == 1
|
||||||
"DOCKERIZED", 0
|
|
||||||
)) == 1
|
|
||||||
|
|
||||||
USE_MINIO = int(os.environ.get("USE_MINIO"), 0) == 1
|
USE_MINIO = int(os.environ.get("USE_MINIO"), 0) == 1
|
||||||
|
|
||||||
@ -86,7 +84,8 @@ if bool(os.environ.get("SENTRY_DSN", False)):
|
|||||||
)
|
)
|
||||||
|
|
||||||
if DOCKERIZED and USE_MINIO:
|
if DOCKERIZED and USE_MINIO:
|
||||||
DEFAULT_FILE_STORAGE = 'storages.backends.s3boto3.S3Boto3Storage'
|
INSTALLED_APPS += ("storages",)
|
||||||
|
DEFAULT_FILE_STORAGE = "storages.backends.s3boto3.S3Boto3Storage"
|
||||||
# The AWS access key to use.
|
# The AWS access key to use.
|
||||||
AWS_ACCESS_KEY_ID = os.environ.get("AWS_ACCESS_KEY_ID", "access-key")
|
AWS_ACCESS_KEY_ID = os.environ.get("AWS_ACCESS_KEY_ID", "access-key")
|
||||||
# The AWS secret access key to use.
|
# The AWS secret access key to use.
|
||||||
@ -99,6 +98,11 @@ if DOCKERIZED and USE_MINIO:
|
|||||||
AWS_DEFAULT_ACL = "public-read"
|
AWS_DEFAULT_ACL = "public-read"
|
||||||
AWS_QUERYSTRING_AUTH = False
|
AWS_QUERYSTRING_AUTH = False
|
||||||
AWS_S3_FILE_OVERWRITE = False
|
AWS_S3_FILE_OVERWRITE = False
|
||||||
|
|
||||||
|
# Custom Domain settings
|
||||||
|
parsed_url = urlparse(os.environ.get("WEB_URL", "http://localhost"))
|
||||||
|
AWS_S3_CUSTOM_DOMAIN = f"{parsed_url.netloc}/{AWS_STORAGE_BUCKET_NAME}"
|
||||||
|
AWS_S3_URL_PROTOCOL = f"{parsed_url.scheme}:"
|
||||||
else:
|
else:
|
||||||
# The AWS region to connect to.
|
# The AWS region to connect to.
|
||||||
AWS_REGION = os.environ.get("AWS_REGION", "")
|
AWS_REGION = os.environ.get("AWS_REGION", "")
|
||||||
|
Loading…
Reference in New Issue
Block a user