forked from github/plane
chore: file asset update (#2816)
* chore: endpoint to update file asset * chore: aws storage endpoint change
This commit is contained in:
parent
e21acf1341
commit
e57b95f99e
@ -1,7 +1,7 @@
|
|||||||
# Third party imports
|
# Third party imports
|
||||||
from rest_framework import status
|
from rest_framework import status
|
||||||
from rest_framework.response import Response
|
from rest_framework.response import Response
|
||||||
from rest_framework.parsers import MultiPartParser, FormParser
|
from rest_framework.parsers import MultiPartParser, FormParser, JSONParser
|
||||||
|
|
||||||
# Module imports
|
# Module imports
|
||||||
from .base import BaseAPIView
|
from .base import BaseAPIView
|
||||||
@ -10,7 +10,7 @@ from plane.app.serializers import FileAssetSerializer
|
|||||||
|
|
||||||
|
|
||||||
class FileAssetEndpoint(BaseAPIView):
|
class FileAssetEndpoint(BaseAPIView):
|
||||||
parser_classes = (MultiPartParser, FormParser)
|
parser_classes = (MultiPartParser, FormParser, JSONParser,)
|
||||||
|
|
||||||
"""
|
"""
|
||||||
A viewset for viewing and editing task instances.
|
A viewset for viewing and editing task instances.
|
||||||
@ -25,7 +25,6 @@ class FileAssetEndpoint(BaseAPIView):
|
|||||||
else:
|
else:
|
||||||
return Response({"error": "Asset key does not exist", "status": False}, status=status.HTTP_200_OK)
|
return Response({"error": "Asset key does not exist", "status": False}, status=status.HTTP_200_OK)
|
||||||
|
|
||||||
|
|
||||||
def post(self, request, slug):
|
def post(self, request, slug):
|
||||||
serializer = FileAssetSerializer(data=request.data)
|
serializer = FileAssetSerializer(data=request.data)
|
||||||
if serializer.is_valid():
|
if serializer.is_valid():
|
||||||
@ -34,12 +33,11 @@ class FileAssetEndpoint(BaseAPIView):
|
|||||||
serializer.save(workspace_id=workspace.id)
|
serializer.save(workspace_id=workspace.id)
|
||||||
return Response(serializer.data, status=status.HTTP_201_CREATED)
|
return Response(serializer.data, status=status.HTTP_201_CREATED)
|
||||||
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
|
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
|
||||||
|
|
||||||
|
def patch(self, request, workspace_id, asset_key):
|
||||||
def delete(self, request, workspace_id, asset_key):
|
|
||||||
asset_key = str(workspace_id) + "/" + asset_key
|
asset_key = str(workspace_id) + "/" + asset_key
|
||||||
file_asset = FileAsset.objects.get(asset=asset_key)
|
file_asset = FileAsset.objects.get(asset=asset_key)
|
||||||
file_asset.is_deleted = True
|
file_asset.is_deleted = request.data.get("is_deleted", file_asset.is_deleted)
|
||||||
file_asset.save()
|
file_asset.save()
|
||||||
return Response(status=status.HTTP_204_NO_CONTENT)
|
return Response(status=status.HTTP_204_NO_CONTENT)
|
||||||
|
|
||||||
|
@ -975,7 +975,7 @@ class ProjectPublicCoverImagesEndpoint(BaseAPIView):
|
|||||||
aws_secret_access_key=settings.AWS_SECRET_ACCESS_KEY,
|
aws_secret_access_key=settings.AWS_SECRET_ACCESS_KEY,
|
||||||
)
|
)
|
||||||
params = {
|
params = {
|
||||||
"Bucket": settings.AWS_S3_BUCKET_NAME,
|
"Bucket": settings.AWS_STORAGE_BUCKET_NAME,
|
||||||
"Prefix": "static/project-cover/",
|
"Prefix": "static/project-cover/",
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -987,7 +987,7 @@ class ProjectPublicCoverImagesEndpoint(BaseAPIView):
|
|||||||
"/"
|
"/"
|
||||||
): # This line ensures we're only getting files, not "sub-folders"
|
): # This line ensures we're only getting files, not "sub-folders"
|
||||||
files.append(
|
files.append(
|
||||||
f"https://{settings.AWS_S3_BUCKET_NAME}.s3.{settings.AWS_REGION}.amazonaws.com/{content['Key']}"
|
f"https://{settings.AWS_STORAGE_BUCKET_NAME}.s3.{settings.AWS_REGION}.amazonaws.com/{content['Key']}"
|
||||||
)
|
)
|
||||||
|
|
||||||
return Response(files, status=status.HTTP_200_OK)
|
return Response(files, status=status.HTTP_200_OK)
|
||||||
|
@ -81,13 +81,13 @@ def upload_to_s3(zip_file, workspace_id, token_id, slug):
|
|||||||
)
|
)
|
||||||
s3.upload_fileobj(
|
s3.upload_fileobj(
|
||||||
zip_file,
|
zip_file,
|
||||||
settings.AWS_S3_BUCKET_NAME,
|
settings.AWS_STORAGE_BUCKET_NAME,
|
||||||
file_name,
|
file_name,
|
||||||
ExtraArgs={"ACL": "public-read", "ContentType": "application/zip"},
|
ExtraArgs={"ACL": "public-read", "ContentType": "application/zip"},
|
||||||
)
|
)
|
||||||
presigned_url = s3.generate_presigned_url(
|
presigned_url = s3.generate_presigned_url(
|
||||||
"get_object",
|
"get_object",
|
||||||
Params={"Bucket": settings.AWS_S3_BUCKET_NAME, "Key": file_name},
|
Params={"Bucket": settings.AWS_STORAGE_BUCKET_NAME, "Key": file_name},
|
||||||
ExpiresIn=expires_in,
|
ExpiresIn=expires_in,
|
||||||
)
|
)
|
||||||
# Create the new url with updated domain and protocol
|
# Create the new url with updated domain and protocol
|
||||||
@ -105,14 +105,14 @@ def upload_to_s3(zip_file, workspace_id, token_id, slug):
|
|||||||
)
|
)
|
||||||
s3.upload_fileobj(
|
s3.upload_fileobj(
|
||||||
zip_file,
|
zip_file,
|
||||||
settings.AWS_S3_BUCKET_NAME,
|
settings.AWS_STORAGE_BUCKET_NAME,
|
||||||
file_name,
|
file_name,
|
||||||
ExtraArgs={"ACL": "public-read", "ContentType": "application/zip"},
|
ExtraArgs={"ACL": "public-read", "ContentType": "application/zip"},
|
||||||
)
|
)
|
||||||
|
|
||||||
presigned_url = s3.generate_presigned_url(
|
presigned_url = s3.generate_presigned_url(
|
||||||
"get_object",
|
"get_object",
|
||||||
Params={"Bucket": settings.AWS_S3_BUCKET_NAME, "Key": file_name},
|
Params={"Bucket": settings.AWS_STORAGE_BUCKET_NAME, "Key": file_name},
|
||||||
ExpiresIn=expires_in,
|
ExpiresIn=expires_in,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@ -42,8 +42,8 @@ def delete_old_s3_link():
|
|||||||
# Delete object from S3
|
# Delete object from S3
|
||||||
if file_name:
|
if file_name:
|
||||||
if settings.USE_MINIO:
|
if settings.USE_MINIO:
|
||||||
s3.delete_object(Bucket=settings.AWS_S3_BUCKET_NAME, Key=file_name)
|
s3.delete_object(Bucket=settings.AWS_STORAGE_BUCKET_NAME, Key=file_name)
|
||||||
else:
|
else:
|
||||||
s3.delete_object(Bucket=settings.AWS_S3_BUCKET_NAME, Key=file_name)
|
s3.delete_object(Bucket=settings.AWS_STORAGE_BUCKET_NAME, Key=file_name)
|
||||||
|
|
||||||
ExporterHistory.objects.filter(id=exporter_id).update(url=None)
|
ExporterHistory.objects.filter(id=exporter_id).update(url=None)
|
||||||
|
@ -40,7 +40,7 @@ class Command(BaseCommand):
|
|||||||
)
|
)
|
||||||
# Create an S3 client using the session
|
# Create an S3 client using the session
|
||||||
s3_client = session.client('s3', endpoint_url=settings.AWS_S3_ENDPOINT_URL)
|
s3_client = session.client('s3', endpoint_url=settings.AWS_S3_ENDPOINT_URL)
|
||||||
bucket_name = settings.AWS_S3_BUCKET_NAME
|
bucket_name = settings.AWS_STORAGE_BUCKET_NAME
|
||||||
|
|
||||||
self.stdout.write(self.style.NOTICE("Checking bucket..."))
|
self.stdout.write(self.style.NOTICE("Checking bucket..."))
|
||||||
|
|
||||||
@ -50,7 +50,7 @@ class Command(BaseCommand):
|
|||||||
self.set_bucket_public_policy(s3_client, bucket_name)
|
self.set_bucket_public_policy(s3_client, bucket_name)
|
||||||
except ClientError as e:
|
except ClientError as e:
|
||||||
error_code = int(e.response['Error']['Code'])
|
error_code = int(e.response['Error']['Code'])
|
||||||
bucket_name = settings.AWS_S3_BUCKET_NAME
|
bucket_name = settings.AWS_STORAGE_BUCKET_NAME
|
||||||
if error_code == 404:
|
if error_code == 404:
|
||||||
# Bucket does not exist, create it
|
# Bucket does not exist, create it
|
||||||
self.stdout.write(self.style.WARNING(f"Bucket '{bucket_name}' does not exist. Creating bucket..."))
|
self.stdout.write(self.style.WARNING(f"Bucket '{bucket_name}' does not exist. Creating bucket..."))
|
||||||
|
@ -224,7 +224,7 @@ STORAGES["default"] = {
|
|||||||
}
|
}
|
||||||
AWS_ACCESS_KEY_ID = os.environ.get("AWS_ACCESS_KEY_ID", "access-key")
|
AWS_ACCESS_KEY_ID = os.environ.get("AWS_ACCESS_KEY_ID", "access-key")
|
||||||
AWS_SECRET_ACCESS_KEY = os.environ.get("AWS_SECRET_ACCESS_KEY", "secret-key")
|
AWS_SECRET_ACCESS_KEY = os.environ.get("AWS_SECRET_ACCESS_KEY", "secret-key")
|
||||||
AWS_S3_BUCKET_NAME = os.environ.get("AWS_S3_BUCKET_NAME", "uploads")
|
AWS_STORAGE_BUCKET_NAME = os.environ.get("AWS_S3_BUCKET_NAME", "uploads")
|
||||||
AWS_REGION = os.environ.get("AWS_REGION", "")
|
AWS_REGION = os.environ.get("AWS_REGION", "")
|
||||||
AWS_DEFAULT_ACL = "public-read"
|
AWS_DEFAULT_ACL = "public-read"
|
||||||
AWS_QUERYSTRING_AUTH = False
|
AWS_QUERYSTRING_AUTH = False
|
||||||
@ -234,7 +234,7 @@ AWS_S3_ENDPOINT_URL = os.environ.get("AWS_S3_ENDPOINT_URL", None) or os.environ.
|
|||||||
)
|
)
|
||||||
if AWS_S3_ENDPOINT_URL:
|
if AWS_S3_ENDPOINT_URL:
|
||||||
parsed_url = urlparse(os.environ.get("WEB_URL", "http://localhost"))
|
parsed_url = urlparse(os.environ.get("WEB_URL", "http://localhost"))
|
||||||
AWS_S3_CUSTOM_DOMAIN = f"{parsed_url.netloc}/{AWS_S3_BUCKET_NAME}"
|
AWS_S3_CUSTOM_DOMAIN = f"{parsed_url.netloc}/{AWS_STORAGE_BUCKET_NAME}"
|
||||||
AWS_S3_URL_PROTOCOL = f"{parsed_url.scheme}:"
|
AWS_S3_URL_PROTOCOL = f"{parsed_url.scheme}:"
|
||||||
|
|
||||||
|
|
||||||
|
Loading…
Reference in New Issue
Block a user