From 8e9f9cf6df7fde49f42796711d03aaef4a4419b5 Mon Sep 17 00:00:00 2001 From: Bavisetti Narayan <72156168+NarayanBavisetti@users.noreply.github.com> Date: Mon, 20 Nov 2023 16:34:57 +0530 Subject: [PATCH] chore: ams url name changed (#2808) --- apiserver/plane/bgtasks/export_task.py | 4 ++-- apiserver/plane/bgtasks/exporter_expired_task.py | 2 +- apiserver/plane/db/management/commands/create_bucket.py | 4 ++-- apiserver/plane/settings/common.py | 5 +++-- 4 files changed, 8 insertions(+), 7 deletions(-) diff --git a/apiserver/plane/bgtasks/export_task.py b/apiserver/plane/bgtasks/export_task.py index 1329697e9..7941344ef 100644 --- a/apiserver/plane/bgtasks/export_task.py +++ b/apiserver/plane/bgtasks/export_task.py @@ -81,13 +81,13 @@ def upload_to_s3(zip_file, workspace_id, token_id, slug): ) s3.upload_fileobj( zip_file, - settings.AWS_STORAGE_BUCKET_NAME, + settings.AWS_S3_BUCKET_NAME, file_name, ExtraArgs={"ACL": "public-read", "ContentType": "application/zip"}, ) presigned_url = s3.generate_presigned_url( "get_object", - Params={"Bucket": settings.AWS_STORAGE_BUCKET_NAME, "Key": file_name}, + Params={"Bucket": settings.AWS_S3_BUCKET_NAME, "Key": file_name}, ExpiresIn=expires_in, ) # Create the new url with updated domain and protocol diff --git a/apiserver/plane/bgtasks/exporter_expired_task.py b/apiserver/plane/bgtasks/exporter_expired_task.py index 45c53eaca..34b254d95 100644 --- a/apiserver/plane/bgtasks/exporter_expired_task.py +++ b/apiserver/plane/bgtasks/exporter_expired_task.py @@ -42,7 +42,7 @@ def delete_old_s3_link(): # Delete object from S3 if file_name: if settings.DOCKERIZED and settings.USE_MINIO: - s3.delete_object(Bucket=settings.AWS_STORAGE_BUCKET_NAME, Key=file_name) + s3.delete_object(Bucket=settings.AWS_S3_BUCKET_NAME, Key=file_name) else: s3.delete_object(Bucket=settings.AWS_S3_BUCKET_NAME, Key=file_name) diff --git a/apiserver/plane/db/management/commands/create_bucket.py b/apiserver/plane/db/management/commands/create_bucket.py index 054523bf9..fbda34f77 100644 --- a/apiserver/plane/db/management/commands/create_bucket.py +++ b/apiserver/plane/db/management/commands/create_bucket.py @@ -40,7 +40,7 @@ class Command(BaseCommand): ) # Create an S3 client using the session s3_client = session.client('s3', endpoint_url=settings.AWS_S3_ENDPOINT_URL) - bucket_name = settings.AWS_STORAGE_BUCKET_NAME + bucket_name = settings.AWS_S3_BUCKET_NAME self.stdout.write(self.style.NOTICE("Checking bucket...")) @@ -50,7 +50,7 @@ class Command(BaseCommand): self.set_bucket_public_policy(s3_client, bucket_name) except ClientError as e: error_code = int(e.response['Error']['Code']) - bucket_name = settings.AWS_STORAGE_BUCKET_NAME + bucket_name = settings.AWS_S3_BUCKET_NAME if error_code == 404: # Bucket does not exist, create it self.stdout.write(self.style.WARNING(f"Bucket '{bucket_name}' does not exist. Creating bucket...")) diff --git a/apiserver/plane/settings/common.py b/apiserver/plane/settings/common.py index b461697f1..46ef090ab 100644 --- a/apiserver/plane/settings/common.py +++ b/apiserver/plane/settings/common.py @@ -224,7 +224,8 @@ STORAGES["default"] = { } AWS_ACCESS_KEY_ID = os.environ.get("AWS_ACCESS_KEY_ID", "access-key") AWS_SECRET_ACCESS_KEY = os.environ.get("AWS_SECRET_ACCESS_KEY", "secret-key") -AWS_STORAGE_BUCKET_NAME = os.environ.get("AWS_S3_BUCKET_NAME", "uploads") +AWS_S3_BUCKET_NAME = os.environ.get("AWS_S3_BUCKET_NAME", "uploads") +AWS_REGION = os.environ.get("AWS_REGION", "") AWS_DEFAULT_ACL = "public-read" AWS_QUERYSTRING_AUTH = False AWS_S3_FILE_OVERWRITE = False @@ -233,7 +234,7 @@ AWS_S3_ENDPOINT_URL = os.environ.get("AWS_S3_ENDPOINT_URL", None) or os.environ. ) if AWS_S3_ENDPOINT_URL: parsed_url = urlparse(os.environ.get("WEB_URL", "http://localhost")) - AWS_S3_CUSTOM_DOMAIN = f"{parsed_url.netloc}/{AWS_STORAGE_BUCKET_NAME}" + AWS_S3_CUSTOM_DOMAIN = f"{parsed_url.netloc}/{AWS_S3_BUCKET_NAME}" AWS_S3_URL_PROTOCOL = f"{parsed_url.scheme}:"