From d46d70fcd518d0932b485983b18d9cce0b75be8a Mon Sep 17 00:00:00 2001 From: Bavisetti Narayan <72156168+NarayanBavisetti@users.noreply.github.com> Date: Mon, 20 Nov 2023 21:32:00 +0530 Subject: [PATCH] chore: removed DOCKERIZED value and changed REDIS_SSL (#2813) * chore: removed DOCKERIZED value * chore: changed redis ssl --- apiserver/.env.example | 3 ++- apiserver/plane/bgtasks/export_task.py | 2 +- apiserver/plane/bgtasks/exporter_expired_task.py | 4 ++-- apiserver/plane/settings/common.py | 2 +- 4 files changed, 6 insertions(+), 5 deletions(-) diff --git a/apiserver/.env.example b/apiserver/.env.example index 88a9c17f5..ace1e07b1 100644 --- a/apiserver/.env.example +++ b/apiserver/.env.example @@ -38,7 +38,8 @@ GPT_ENGINE="gpt-3.5-turbo" # deprecated GITHUB_CLIENT_SECRET="" # For fetching release notes # Settings related to Docker -DOCKERIZED=1 +DOCKERIZED=1 # deprecated + # set to 1 If using the pre-configured minio setup USE_MINIO=1 diff --git a/apiserver/plane/bgtasks/export_task.py b/apiserver/plane/bgtasks/export_task.py index 7941344ef..a49f8bb86 100644 --- a/apiserver/plane/bgtasks/export_task.py +++ b/apiserver/plane/bgtasks/export_task.py @@ -71,7 +71,7 @@ def upload_to_s3(zip_file, workspace_id, token_id, slug): file_name = f"{workspace_id}/export-{slug}-{token_id[:6]}-{timezone.now()}.zip" expires_in = 7 * 24 * 60 * 60 - if settings.DOCKERIZED and settings.USE_MINIO: + if settings.USE_MINIO: s3 = boto3.client( "s3", endpoint_url=settings.AWS_S3_ENDPOINT_URL, diff --git a/apiserver/plane/bgtasks/exporter_expired_task.py b/apiserver/plane/bgtasks/exporter_expired_task.py index 34b254d95..aef4408d4 100644 --- a/apiserver/plane/bgtasks/exporter_expired_task.py +++ b/apiserver/plane/bgtasks/exporter_expired_task.py @@ -21,7 +21,7 @@ def delete_old_s3_link(): expired_exporter_history = ExporterHistory.objects.filter( Q(url__isnull=False) & Q(created_at__lte=timezone.now() - timedelta(days=8)) ).values_list("key", "id") - if settings.DOCKERIZED and settings.USE_MINIO: + if settings.USE_MINIO: s3 = boto3.client( "s3", endpoint_url=settings.AWS_S3_ENDPOINT_URL, @@ -41,7 +41,7 @@ def delete_old_s3_link(): for file_name, exporter_id in expired_exporter_history: # Delete object from S3 if file_name: - if settings.DOCKERIZED and settings.USE_MINIO: + if settings.USE_MINIO: s3.delete_object(Bucket=settings.AWS_S3_BUCKET_NAME, Key=file_name) else: s3.delete_object(Bucket=settings.AWS_S3_BUCKET_NAME, Key=file_name) diff --git a/apiserver/plane/settings/common.py b/apiserver/plane/settings/common.py index 46ef090ab..4fa761f06 100644 --- a/apiserver/plane/settings/common.py +++ b/apiserver/plane/settings/common.py @@ -149,7 +149,7 @@ else: # Redis Config REDIS_URL = os.environ.get("REDIS_URL") -REDIS_SSL = "rediss" in REDIS_URL +REDIS_SSL = REDIS_URL and "rediss" in REDIS_URL if REDIS_SSL: CACHES = {