mirror of
https://github.com/makeplane/plane
synced 2024-06-14 14:31:34 +00:00
fix: issue exports in self hosted instances (#1996)
* fix: issue exports in self hosted instances * dev: remove print logs * dev: update url creation function * fix: changed the presigned url for self hosted exports --------- Co-authored-by: NarayanBavisetti <narayan3119@gmail.com>
This commit is contained in:
parent
abcdebef85
commit
e1ad385688
@ -4,6 +4,7 @@ import io
|
|||||||
import json
|
import json
|
||||||
import boto3
|
import boto3
|
||||||
import zipfile
|
import zipfile
|
||||||
|
from urllib.parse import urlparse, urlunparse
|
||||||
|
|
||||||
# Django imports
|
# Django imports
|
||||||
from django.conf import settings
|
from django.conf import settings
|
||||||
@ -23,9 +24,11 @@ def dateTimeConverter(time):
|
|||||||
if time:
|
if time:
|
||||||
return time.strftime("%a, %d %b %Y %I:%M:%S %Z%z")
|
return time.strftime("%a, %d %b %Y %I:%M:%S %Z%z")
|
||||||
|
|
||||||
|
|
||||||
def dateConverter(time):
|
def dateConverter(time):
|
||||||
if time:
|
if time:
|
||||||
return time.strftime("%a, %d %b %Y")
|
return time.strftime("%a, %d %b %Y")
|
||||||
|
|
||||||
|
|
||||||
def create_csv_file(data):
|
def create_csv_file(data):
|
||||||
csv_buffer = io.StringIO()
|
csv_buffer = io.StringIO()
|
||||||
@ -66,28 +69,53 @@ def create_zip_file(files):
|
|||||||
|
|
||||||
|
|
||||||
def upload_to_s3(zip_file, workspace_id, token_id, slug):
|
def upload_to_s3(zip_file, workspace_id, token_id, slug):
|
||||||
s3 = boto3.client(
|
|
||||||
"s3",
|
|
||||||
region_name=settings.AWS_REGION,
|
|
||||||
aws_access_key_id=settings.AWS_ACCESS_KEY_ID,
|
|
||||||
aws_secret_access_key=settings.AWS_SECRET_ACCESS_KEY,
|
|
||||||
config=Config(signature_version="s3v4"),
|
|
||||||
)
|
|
||||||
file_name = f"{workspace_id}/export-{slug}-{token_id[:6]}-{timezone.now()}.zip"
|
file_name = f"{workspace_id}/export-{slug}-{token_id[:6]}-{timezone.now()}.zip"
|
||||||
|
|
||||||
s3.upload_fileobj(
|
|
||||||
zip_file,
|
|
||||||
settings.AWS_S3_BUCKET_NAME,
|
|
||||||
file_name,
|
|
||||||
ExtraArgs={"ACL": "public-read", "ContentType": "application/zip"},
|
|
||||||
)
|
|
||||||
|
|
||||||
expires_in = 7 * 24 * 60 * 60
|
expires_in = 7 * 24 * 60 * 60
|
||||||
presigned_url = s3.generate_presigned_url(
|
|
||||||
"get_object",
|
if settings.DOCKERIZED and settings.USE_MINIO:
|
||||||
Params={"Bucket": settings.AWS_S3_BUCKET_NAME, "Key": file_name},
|
s3 = boto3.client(
|
||||||
ExpiresIn=expires_in,
|
"s3",
|
||||||
)
|
endpoint_url=settings.AWS_S3_ENDPOINT_URL,
|
||||||
|
aws_access_key_id=settings.AWS_ACCESS_KEY_ID,
|
||||||
|
aws_secret_access_key=settings.AWS_SECRET_ACCESS_KEY,
|
||||||
|
config=Config(signature_version="s3v4"),
|
||||||
|
)
|
||||||
|
s3.upload_fileobj(
|
||||||
|
zip_file,
|
||||||
|
settings.AWS_STORAGE_BUCKET_NAME,
|
||||||
|
file_name,
|
||||||
|
ExtraArgs={"ACL": "public-read", "ContentType": "application/zip"},
|
||||||
|
)
|
||||||
|
presigned_url = s3.generate_presigned_url(
|
||||||
|
"get_object",
|
||||||
|
Params={"Bucket": settings.AWS_STORAGE_BUCKET_NAME, "Key": file_name},
|
||||||
|
ExpiresIn=expires_in,
|
||||||
|
)
|
||||||
|
# Create the new url with updated domain and protocol
|
||||||
|
presigned_url = presigned_url.replace(
|
||||||
|
"http://plane-minio:9000/uploads/",
|
||||||
|
f"{settings.AWS_S3_URL_PROTOCOL}//{settings.AWS_S3_CUSTOM_DOMAIN}/",
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
s3 = boto3.client(
|
||||||
|
"s3",
|
||||||
|
region_name=settings.AWS_REGION,
|
||||||
|
aws_access_key_id=settings.AWS_ACCESS_KEY_ID,
|
||||||
|
aws_secret_access_key=settings.AWS_SECRET_ACCESS_KEY,
|
||||||
|
config=Config(signature_version="s3v4"),
|
||||||
|
)
|
||||||
|
s3.upload_fileobj(
|
||||||
|
zip_file,
|
||||||
|
settings.AWS_S3_BUCKET_NAME,
|
||||||
|
file_name,
|
||||||
|
ExtraArgs={"ACL": "public-read", "ContentType": "application/zip"},
|
||||||
|
)
|
||||||
|
|
||||||
|
presigned_url = s3.generate_presigned_url(
|
||||||
|
"get_object",
|
||||||
|
Params={"Bucket": settings.AWS_S3_BUCKET_NAME, "Key": file_name},
|
||||||
|
ExpiresIn=expires_in,
|
||||||
|
)
|
||||||
|
|
||||||
exporter_instance = ExporterHistory.objects.get(token=token_id)
|
exporter_instance = ExporterHistory.objects.get(token=token_id)
|
||||||
|
|
||||||
@ -98,7 +126,7 @@ def upload_to_s3(zip_file, workspace_id, token_id, slug):
|
|||||||
else:
|
else:
|
||||||
exporter_instance.status = "failed"
|
exporter_instance.status = "failed"
|
||||||
|
|
||||||
exporter_instance.save(update_fields=["status", "url","key"])
|
exporter_instance.save(update_fields=["status", "url", "key"])
|
||||||
|
|
||||||
|
|
||||||
def generate_table_row(issue):
|
def generate_table_row(issue):
|
||||||
@ -145,7 +173,7 @@ def generate_json_row(issue):
|
|||||||
else "",
|
else "",
|
||||||
"Labels": issue["labels__name"],
|
"Labels": issue["labels__name"],
|
||||||
"Cycle Name": issue["issue_cycle__cycle__name"],
|
"Cycle Name": issue["issue_cycle__cycle__name"],
|
||||||
"Cycle Start Date": dateConverter(issue["issue_cycle__cycle__start_date"]),
|
"Cycle Start Date": dateConverter(issue["issue_cycle__cycle__start_date"]),
|
||||||
"Cycle End Date": dateConverter(issue["issue_cycle__cycle__end_date"]),
|
"Cycle End Date": dateConverter(issue["issue_cycle__cycle__end_date"]),
|
||||||
"Module Name": issue["issue_module__module__name"],
|
"Module Name": issue["issue_module__module__name"],
|
||||||
"Module Start Date": dateConverter(issue["issue_module__module__start_date"]),
|
"Module Start Date": dateConverter(issue["issue_module__module__start_date"]),
|
||||||
@ -242,7 +270,9 @@ def issue_export_task(provider, workspace_id, project_ids, token_id, multiple, s
|
|||||||
workspace_issues = (
|
workspace_issues = (
|
||||||
(
|
(
|
||||||
Issue.objects.filter(
|
Issue.objects.filter(
|
||||||
workspace__id=workspace_id, project_id__in=project_ids
|
workspace__id=workspace_id,
|
||||||
|
project_id__in=project_ids,
|
||||||
|
project__project_projectmember__member=exporter_instance.initiated_by_id,
|
||||||
)
|
)
|
||||||
.select_related("project", "workspace", "state", "parent", "created_by")
|
.select_related("project", "workspace", "state", "parent", "created_by")
|
||||||
.prefetch_related(
|
.prefetch_related(
|
||||||
@ -275,7 +305,7 @@ def issue_export_task(provider, workspace_id, project_ids, token_id, multiple, s
|
|||||||
"labels__name",
|
"labels__name",
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
.order_by("project__identifier","sequence_id")
|
.order_by("project__identifier", "sequence_id")
|
||||||
.distinct()
|
.distinct()
|
||||||
)
|
)
|
||||||
# CSV header
|
# CSV header
|
||||||
@ -338,7 +368,6 @@ def issue_export_task(provider, workspace_id, project_ids, token_id, multiple, s
|
|||||||
exporter_instance.status = "failed"
|
exporter_instance.status = "failed"
|
||||||
exporter_instance.reason = str(e)
|
exporter_instance.reason = str(e)
|
||||||
exporter_instance.save(update_fields=["status", "reason"])
|
exporter_instance.save(update_fields=["status", "reason"])
|
||||||
|
|
||||||
# Print logs if in DEBUG mode
|
# Print logs if in DEBUG mode
|
||||||
if settings.DEBUG:
|
if settings.DEBUG:
|
||||||
print(e)
|
print(e)
|
||||||
|
@ -21,18 +21,29 @@ def delete_old_s3_link():
|
|||||||
expired_exporter_history = ExporterHistory.objects.filter(
|
expired_exporter_history = ExporterHistory.objects.filter(
|
||||||
Q(url__isnull=False) & Q(created_at__lte=timezone.now() - timedelta(days=8))
|
Q(url__isnull=False) & Q(created_at__lte=timezone.now() - timedelta(days=8))
|
||||||
).values_list("key", "id")
|
).values_list("key", "id")
|
||||||
|
if settings.DOCKERIZED and settings.USE_MINIO:
|
||||||
s3 = boto3.client(
|
s3 = boto3.client(
|
||||||
"s3",
|
"s3",
|
||||||
region_name="ap-south-1",
|
endpoint_url=settings.AWS_S3_ENDPOINT_URL,
|
||||||
aws_access_key_id=settings.AWS_ACCESS_KEY_ID,
|
aws_access_key_id=settings.AWS_ACCESS_KEY_ID,
|
||||||
aws_secret_access_key=settings.AWS_SECRET_ACCESS_KEY,
|
aws_secret_access_key=settings.AWS_SECRET_ACCESS_KEY,
|
||||||
config=Config(signature_version="s3v4"),
|
config=Config(signature_version="s3v4"),
|
||||||
)
|
)
|
||||||
|
else:
|
||||||
|
s3 = boto3.client(
|
||||||
|
"s3",
|
||||||
|
region_name="ap-south-1",
|
||||||
|
aws_access_key_id=settings.AWS_ACCESS_KEY_ID,
|
||||||
|
aws_secret_access_key=settings.AWS_SECRET_ACCESS_KEY,
|
||||||
|
config=Config(signature_version="s3v4"),
|
||||||
|
)
|
||||||
|
|
||||||
for file_name, exporter_id in expired_exporter_history:
|
for file_name, exporter_id in expired_exporter_history:
|
||||||
# Delete object from S3
|
# Delete object from S3
|
||||||
if file_name:
|
if file_name:
|
||||||
s3.delete_object(Bucket=settings.AWS_S3_BUCKET_NAME, Key=file_name)
|
if settings.DOCKERIZED and settings.USE_MINIO:
|
||||||
|
s3.delete_object(Bucket=settings.AWS_STORAGE_BUCKET_NAME, Key=file_name)
|
||||||
|
else:
|
||||||
|
s3.delete_object(Bucket=settings.AWS_S3_BUCKET_NAME, Key=file_name)
|
||||||
|
|
||||||
ExporterHistory.objects.filter(id=exporter_id).update(url=None)
|
ExporterHistory.objects.filter(id=exporter_id).update(url=None)
|
||||||
|
Loading…
Reference in New Issue
Block a user