dev: update the response for assets

This commit is contained in:
pablohashescobar 2024-01-31 18:43:02 +05:30
parent c9d2ea36b8
commit 42f307421a
8 changed files with 102 additions and 20 deletions

View File

@ -1,8 +1,9 @@
from .base import BaseSerializer
from .base import BaseFileSerializer
from plane.db.models import FileAsset
class FileAssetSerializer(BaseSerializer):
class FileAssetSerializer(BaseFileSerializer):
class Meta:
model = FileAsset
fields = "__all__"

View File

@ -1,4 +1,5 @@
from rest_framework import serializers
from plane.settings.storage import S3PrivateBucketStorage
class BaseSerializer(serializers.ModelSerializer):
@ -60,7 +61,7 @@ class DynamicBaseSerializer(BaseSerializer):
CycleIssueSerializer,
IssueFlatSerializer,
IssueRelationSerializer,
InboxIssueLiteSerializer
InboxIssueLiteSerializer,
)
# Expansion mapper
@ -84,7 +85,19 @@ class DynamicBaseSerializer(BaseSerializer):
"issue_inbox": InboxIssueLiteSerializer,
}
self.fields[field] = expansion[field](many=True if field in ["members", "assignees", "labels", "issue_cycle", "issue_relation", "issue_inbox"] else False)
self.fields[field] = expansion[field](
many=True
if field
in [
"members",
"assignees",
"labels",
"issue_cycle",
"issue_relation",
"issue_inbox",
]
else False
)
return self.fields
@ -105,7 +118,7 @@ class DynamicBaseSerializer(BaseSerializer):
LabelSerializer,
CycleIssueSerializer,
IssueRelationSerializer,
InboxIssueLiteSerializer
InboxIssueLiteSerializer,
)
# Expansion mapper
@ -146,3 +159,29 @@ class DynamicBaseSerializer(BaseSerializer):
)
return response
class BaseFileSerializer(DynamicBaseSerializer):
download_url = serializers.SerializerMethodField()
class Meta:
abstract = True # Make this serializer abstract
def get_download_url(self, obj):
if hasattr(obj, "asset") and obj.asset:
storage = S3PrivateBucketStorage()
return storage.download_url(obj.asset.name)
return None
def to_representation(self, instance):
"""
Object instance -> Dict of primitive datatypes.
"""
response = super().to_representation(instance)
response[
"asset"
] = (
instance.asset.name
) # Ensure 'asset' field is consistently serialized
# Apply custom method to get download URL
return response

View File

@ -5,7 +5,7 @@ from django.utils import timezone
from rest_framework import serializers
# Module imports
from .base import BaseSerializer, DynamicBaseSerializer
from .base import BaseSerializer, DynamicBaseSerializer, BaseFileSerializer
from .user import UserLiteSerializer
from .state import StateSerializer, StateLiteSerializer
from .project import ProjectLiteSerializer
@ -444,7 +444,8 @@ class IssueLinkSerializer(BaseSerializer):
return IssueLink.objects.create(**validated_data)
class IssueAttachmentSerializer(BaseSerializer):
class IssueAttachmentSerializer(BaseFileSerializer):
class Meta:
model = IssueAttachment
fields = "__all__"
@ -503,9 +504,7 @@ class IssueCommentSerializer(BaseSerializer):
workspace_detail = WorkspaceLiteSerializer(
read_only=True, source="workspace"
)
comment_reactions = CommentReactionSerializer(
read_only=True, many=True
)
comment_reactions = CommentReactionSerializer(read_only=True, many=True)
is_member = serializers.BooleanField(read_only=True)
class Meta:
@ -615,7 +614,10 @@ class IssueSerializer(DynamicBaseSerializer):
def get_module_ids(self, obj):
# Access the prefetched modules and extract module IDs
return [module for module in obj.issue_module.values_list("module_id", flat=True)]
return [
module
for module in obj.issue_module.values_list("module_id", flat=True)
]
class IssueLiteSerializer(DynamicBaseSerializer):

View File

@ -8,12 +8,12 @@ from django.conf import settings
# Module import
from . import BaseModel
from plane.settings.storage import S3PrivateBucketStorage
def get_upload_path(instance, filename):
if instance.workspace_id is not None:
return f"{instance.workspace.id}/{uuid4().hex}-{filename}"
return f"user-{uuid4().hex}-{filename}"
return f"{instance.workspace.id}/{uuid4().hex}"
return f"user-{uuid4().hex}"
def file_size(value):
@ -32,6 +32,7 @@ class FileAsset(BaseModel):
validators=[
file_size,
],
storage=S3PrivateBucketStorage(),
)
workspace = models.ForeignKey(
"db.Workspace",

View File

@ -337,7 +337,7 @@ class IssueLink(ProjectBaseModel):
def get_upload_path(instance, filename):
return f"{instance.workspace.id}/{uuid4().hex}-{filename}"
return f"{instance.workspace.id}/{uuid4().hex}"
def file_size(value):

View File

@ -226,14 +226,14 @@ STORAGES = {
},
}
STORAGES["default"] = {
"BACKEND": "storages.backends.s3boto3.S3Boto3Storage",
"BACKEND": "plane.settings.storage.S3PrivateBucketStorage",
}
AWS_ACCESS_KEY_ID = os.environ.get("AWS_ACCESS_KEY_ID", "access-key")
AWS_SECRET_ACCESS_KEY = os.environ.get("AWS_SECRET_ACCESS_KEY", "secret-key")
AWS_STORAGE_BUCKET_NAME = os.environ.get("AWS_S3_BUCKET_NAME", "uploads")
AWS_REGION = os.environ.get("AWS_REGION", "")
AWS_DEFAULT_ACL = "public-read"
AWS_QUERYSTRING_AUTH = False
AWS_QUERYSTRING_AUTH = True
AWS_S3_FILE_OVERWRITE = False
AWS_S3_ENDPOINT_URL = os.environ.get(
"AWS_S3_ENDPOINT_URL", None

View File

@ -0,0 +1,15 @@
# Third party imports
from storages.backends.s3boto3 import S3Boto3Storage
# Module imports
from plane.utils.presigned_url_generator import generate_download_presigned_url
class S3PrivateBucketStorage(S3Boto3Storage):
def url(self, name):
# Return an empty string or None, or implement custom logic here
return name
def download_url(self, name):
return generate_download_presigned_url(name)

View File

@ -0,0 +1,24 @@
import boto3
from django.conf import settings
def generate_download_presigned_url(object_name, expiration=3600):
"""
Generate a presigned URL to download an object from S3.
:param object_name: The key name of the object in the S3 bucket.
:param expiration: Time in seconds for the presigned URL to remain valid (default is 1 hour).
:return: Presigned URL as a string. If error, returns None.
"""
s3_client = boto3.client('s3',
aws_access_key_id=settings.AWS_ACCESS_KEY_ID,
aws_secret_access_key=settings.AWS_SECRET_ACCESS_KEY,
region_name=settings.AWS_REGION,
endpoint_url=settings.AWS_S3_ENDPOINT_URL)
try:
response = s3_client.generate_presigned_url('get_object',
Params={'Bucket': settings.AWS_STORAGE_BUCKET_NAME,
'Key': object_name},
ExpiresIn=expiration)
return response
except Exception as e:
print(f"Error generating presigned download URL: {e}")
return None