diff --git a/apiserver/plane/bgtasks/export_task.py b/apiserver/plane/bgtasks/export_task.py index 22a9afe51..a45120eb5 100644 --- a/apiserver/plane/bgtasks/export_task.py +++ b/apiserver/plane/bgtasks/export_task.py @@ -4,6 +4,7 @@ import io import json import boto3 import zipfile +from urllib.parse import urlparse, urlunparse # Django imports from django.conf import settings @@ -23,9 +24,11 @@ def dateTimeConverter(time): if time: return time.strftime("%a, %d %b %Y %I:%M:%S %Z%z") + def dateConverter(time): if time: - return time.strftime("%a, %d %b %Y") + return time.strftime("%a, %d %b %Y") + def create_csv_file(data): csv_buffer = io.StringIO() @@ -66,28 +69,53 @@ def create_zip_file(files): def upload_to_s3(zip_file, workspace_id, token_id, slug): - s3 = boto3.client( - "s3", - region_name=settings.AWS_REGION, - aws_access_key_id=settings.AWS_ACCESS_KEY_ID, - aws_secret_access_key=settings.AWS_SECRET_ACCESS_KEY, - config=Config(signature_version="s3v4"), - ) file_name = f"{workspace_id}/export-{slug}-{token_id[:6]}-{timezone.now()}.zip" - - s3.upload_fileobj( - zip_file, - settings.AWS_S3_BUCKET_NAME, - file_name, - ExtraArgs={"ACL": "public-read", "ContentType": "application/zip"}, - ) - expires_in = 7 * 24 * 60 * 60 - presigned_url = s3.generate_presigned_url( - "get_object", - Params={"Bucket": settings.AWS_S3_BUCKET_NAME, "Key": file_name}, - ExpiresIn=expires_in, - ) + + if settings.DOCKERIZED and settings.USE_MINIO: + s3 = boto3.client( + "s3", + endpoint_url=settings.AWS_S3_ENDPOINT_URL, + aws_access_key_id=settings.AWS_ACCESS_KEY_ID, + aws_secret_access_key=settings.AWS_SECRET_ACCESS_KEY, + config=Config(signature_version="s3v4"), + ) + s3.upload_fileobj( + zip_file, + settings.AWS_STORAGE_BUCKET_NAME, + file_name, + ExtraArgs={"ACL": "public-read", "ContentType": "application/zip"}, + ) + presigned_url = s3.generate_presigned_url( + "get_object", + Params={"Bucket": settings.AWS_STORAGE_BUCKET_NAME, "Key": file_name}, + ExpiresIn=expires_in, + ) + # Create the new url with updated domain and protocol + presigned_url = presigned_url.replace( + "http://plane-minio:9000/uploads/", + f"{settings.AWS_S3_URL_PROTOCOL}//{settings.AWS_S3_CUSTOM_DOMAIN}/", + ) + else: + s3 = boto3.client( + "s3", + region_name=settings.AWS_REGION, + aws_access_key_id=settings.AWS_ACCESS_KEY_ID, + aws_secret_access_key=settings.AWS_SECRET_ACCESS_KEY, + config=Config(signature_version="s3v4"), + ) + s3.upload_fileobj( + zip_file, + settings.AWS_S3_BUCKET_NAME, + file_name, + ExtraArgs={"ACL": "public-read", "ContentType": "application/zip"}, + ) + + presigned_url = s3.generate_presigned_url( + "get_object", + Params={"Bucket": settings.AWS_S3_BUCKET_NAME, "Key": file_name}, + ExpiresIn=expires_in, + ) exporter_instance = ExporterHistory.objects.get(token=token_id) @@ -98,7 +126,7 @@ def upload_to_s3(zip_file, workspace_id, token_id, slug): else: exporter_instance.status = "failed" - exporter_instance.save(update_fields=["status", "url","key"]) + exporter_instance.save(update_fields=["status", "url", "key"]) def generate_table_row(issue): @@ -145,7 +173,7 @@ def generate_json_row(issue): else "", "Labels": issue["labels__name"], "Cycle Name": issue["issue_cycle__cycle__name"], - "Cycle Start Date": dateConverter(issue["issue_cycle__cycle__start_date"]), + "Cycle Start Date": dateConverter(issue["issue_cycle__cycle__start_date"]), "Cycle End Date": dateConverter(issue["issue_cycle__cycle__end_date"]), "Module Name": issue["issue_module__module__name"], "Module Start Date": dateConverter(issue["issue_module__module__start_date"]), @@ -242,7 +270,9 @@ def issue_export_task(provider, workspace_id, project_ids, token_id, multiple, s workspace_issues = ( ( Issue.objects.filter( - workspace__id=workspace_id, project_id__in=project_ids + workspace__id=workspace_id, + project_id__in=project_ids, + project__project_projectmember__member=exporter_instance.initiated_by_id, ) .select_related("project", "workspace", "state", "parent", "created_by") .prefetch_related( @@ -275,7 +305,7 @@ def issue_export_task(provider, workspace_id, project_ids, token_id, multiple, s "labels__name", ) ) - .order_by("project__identifier","sequence_id") + .order_by("project__identifier", "sequence_id") .distinct() ) # CSV header @@ -338,7 +368,6 @@ def issue_export_task(provider, workspace_id, project_ids, token_id, multiple, s exporter_instance.status = "failed" exporter_instance.reason = str(e) exporter_instance.save(update_fields=["status", "reason"]) - # Print logs if in DEBUG mode if settings.DEBUG: print(e) diff --git a/apiserver/plane/bgtasks/exporter_expired_task.py b/apiserver/plane/bgtasks/exporter_expired_task.py index 799904347..a77d68b4b 100644 --- a/apiserver/plane/bgtasks/exporter_expired_task.py +++ b/apiserver/plane/bgtasks/exporter_expired_task.py @@ -21,18 +21,29 @@ def delete_old_s3_link(): expired_exporter_history = ExporterHistory.objects.filter( Q(url__isnull=False) & Q(created_at__lte=timezone.now() - timedelta(days=8)) ).values_list("key", "id") - - s3 = boto3.client( - "s3", - region_name="ap-south-1", - aws_access_key_id=settings.AWS_ACCESS_KEY_ID, - aws_secret_access_key=settings.AWS_SECRET_ACCESS_KEY, - config=Config(signature_version="s3v4"), - ) + if settings.DOCKERIZED and settings.USE_MINIO: + s3 = boto3.client( + "s3", + endpoint_url=settings.AWS_S3_ENDPOINT_URL, + aws_access_key_id=settings.AWS_ACCESS_KEY_ID, + aws_secret_access_key=settings.AWS_SECRET_ACCESS_KEY, + config=Config(signature_version="s3v4"), + ) + else: + s3 = boto3.client( + "s3", + region_name="ap-south-1", + aws_access_key_id=settings.AWS_ACCESS_KEY_ID, + aws_secret_access_key=settings.AWS_SECRET_ACCESS_KEY, + config=Config(signature_version="s3v4"), + ) for file_name, exporter_id in expired_exporter_history: # Delete object from S3 if file_name: - s3.delete_object(Bucket=settings.AWS_S3_BUCKET_NAME, Key=file_name) + if settings.DOCKERIZED and settings.USE_MINIO: + s3.delete_object(Bucket=settings.AWS_STORAGE_BUCKET_NAME, Key=file_name) + else: + s3.delete_object(Bucket=settings.AWS_S3_BUCKET_NAME, Key=file_name) ExporterHistory.objects.filter(id=exporter_id).update(url=None)