dev: back migration for urls

This commit is contained in:
pablohashescobar 2024-02-04 11:27:50 +05:30
parent e1f0da5e6c
commit 6c97bcefbf
6 changed files with 263 additions and 69 deletions

View File

@ -1,6 +1,5 @@
# Python imports
import json
from itertools import chain
# Django imports
from django.db import IntegrityError

View File

@ -6,7 +6,7 @@ import django.db.models
import plane.db.models.asset
def update_urls(apps, schema_editor):
def update_user_urls(apps, schema_editor):
# Check if the app is using minio or s3
if settings.USE_MINIO:
prefix1 = (
@ -27,28 +27,44 @@ def update_urls(apps, schema_editor):
# prefix 1
if user.avatar and (user.avatar.startswith(prefix1)):
avatar_key = user.avatar
user.avatar = "/api/users/avatar/" + avatar_key[len(prefix1) :] + "/"
user.avatar = (
"/api/users/avatar/" + avatar_key[len(prefix1) :] + "/"
)
bulk_users.append(user)
# prefix 2
if not settings.USE_MINIO and user.avatar and user.avatar.startswith(prefix2):
if (
not settings.USE_MINIO
and user.avatar
and user.avatar.startswith(prefix2)
):
avatar_key = user.avatar
user.avatar = "/api/users/avatar/" + avatar_key[len(prefix2) :] + "/"
user.avatar = (
"/api/users/avatar/" + avatar_key[len(prefix2) :] + "/"
)
bulk_users.append(user)
# prefix 1
if user.cover_image and (user.cover_image.startswith(prefix1)):
cover_image_key = user.cover_image
user.cover_image = (
"/api/users/cover-image/" + cover_image_key[len(prefix1) :] + "/"
"/api/users/cover-image/"
+ cover_image_key[len(prefix1) :]
+ "/"
)
bulk_users.append(user)
# prefix 2
if not settings.USE_MINIO and user.cover_image and user.cover_image.startswith(prefix2):
if (
not settings.USE_MINIO
and user.cover_image
and user.cover_image.startswith(prefix2)
):
cover_image_key = user.cover_image
user.cover_image = (
"/api/users/cover-image/" + cover_image_key[len(prefix2) :] + "/"
"/api/users/cover-image/"
+ cover_image_key[len(prefix2) :]
+ "/"
)
bulk_users.append(user)
@ -57,6 +73,80 @@ def update_urls(apps, schema_editor):
)
def update_workspace_urls(apps, schema_editor):
# Check if the app is using minio or s3
if settings.USE_MINIO:
prefix1 = (
f"{settings.AWS_S3_URL_PROTOCOL}//{settings.AWS_S3_CUSTOM_DOMAIN}/"
)
prefix2 = prefix1
else:
prefix1 = f"https://{settings.AWS_STORAGE_BUCKET_NAME}.s3.{settings.AWS_REGION}.amazonaws.com/"
prefix2 = (
f"https://{settings.AWS_STORAGE_BUCKET_NAME}.s3.amazonaws.com/"
)
Workspace = apps.get_model("db", "Workspace")
bulk_workspaces = []
# Loop through all the users and update the cover image
for workspace in Workspace.objects.all():
# prefix 1
if workspace.logo and (workspace.logo.startswith(prefix1)):
logo_key = workspace.logo
workspace.logo = f"/api/workspaces/{workspace.slug}/logo/{logo_key[len(prefix1) :]}/"
bulk_workspaces.append(workspace)
# prefix 2
if (
not settings.USE_MINIO
and workspace.logo
and (workspace.logo.startswith(prefix2))
):
logo_key = workspace.logo
workspace.logo = f"/api/workspaces/{workspace.slug}/logo/{logo_key[len(prefix2) :]}/"
bulk_workspaces.append(workspace)
Workspace.objects.bulk_update(bulk_workspaces, ["logo"], batch_size=100)
def update_project_urls(apps, schema_editor):
# Check if the app is using minio or s3
if settings.USE_MINIO:
prefix1 = (
f"{settings.AWS_S3_URL_PROTOCOL}//{settings.AWS_S3_CUSTOM_DOMAIN}/"
)
prefix2 = prefix1
else:
prefix1 = f"https://{settings.AWS_STORAGE_BUCKET_NAME}.s3.{settings.AWS_REGION}.amazonaws.com/"
prefix2 = (
f"https://{settings.AWS_STORAGE_BUCKET_NAME}.s3.amazonaws.com/"
)
Project = apps.get_model("db", "Project")
bulk_projects = []
# Loop through all the users and update the cover image
for project in Project.objects.all():
# prefix 1
if project.cover_image and (project.cover_image.startswith(prefix1)):
cover_image_key = project.cover_image
project.cover_image = f"/api/workspaces/{project.workspace.slug}/projects/{project.id}/cover-image/{cover_image_key[len(prefix1) :]}/"
bulk_projects.append(project)
# prefix 2
if (
not settings.USE_MINIO
and project.cover_image
and (project.cover_image.startswith(prefix2))
):
cover_image_key = project.cover_image
project.cover_image = f"/api/workspaces/{project.workspace.slug}/projects/{project.id}/cover-image/{cover_image_key[len(prefix2) :]}/"
bulk_projects.append(project)
Project.objects.bulk_update(bulk_projects, ["cover_image"], batch_size=100)
class Migration(migrations.Migration):
dependencies = [
("db", "0058_alter_moduleissue_issue_and_more"),
@ -100,5 +190,12 @@ class Migration(migrations.Migration):
name="logo",
field=models.CharField(blank=True, null=True, verbose_name="Logo"),
),
migrations.RunPython(update_urls),
migrations.AddField(
model_name="fileasset",
name="size",
field=models.PositiveBigIntegerField(null=True),
),
migrations.RunPython(update_user_urls),
migrations.RunPython(update_workspace_urls),
migrations.RunPython(update_project_urls),
]

View File

@ -2,10 +2,14 @@
from django.db import migrations, models
from django.conf import settings
import django.db.models.deletion
# Third party imports
from bs4 import BeautifulSoup
def update_workspace_urls(apps, schema_editor):
# Check if the app is using minio or s3
def convert_image_sources(apps, schema_editor):
if settings.USE_MINIO:
prefix1 = (
f"{settings.AWS_S3_URL_PROTOCOL}//{settings.AWS_S3_CUSTOM_DOMAIN}/"
@ -17,56 +21,36 @@ def update_workspace_urls(apps, schema_editor):
f"https://{settings.AWS_STORAGE_BUCKET_NAME}.s3.amazonaws.com/"
)
Workspace = apps.get_model("db", "Workspace")
bulk_workspaces = []
Issue = apps.get_model("db", "Issue")
# Loop through all the users and update the cover image
for workspace in Workspace.objects.all():
# prefix 1
if workspace.logo and (workspace.logo.startswith(prefix1)):
logo_key = workspace.logo
workspace.logo = f"/api/workspaces/{workspace.slug}/logo/{logo_key[len(prefix1) :]}/"
bulk_workspaces.append(workspace)
bulk_issues = []
# prefix 2
if not settings.USE_MINIO and workspace.logo and (workspace.logo.startswith(prefix2)):
logo_key = workspace.logo
workspace.logo = f"/api/workspaces/{workspace.slug}/logo/{logo_key[len(prefix2) :]}/"
bulk_workspaces.append(workspace)
for issue in Issue.objects.all():
# Parse the html
soup = BeautifulSoup(issue.description_html, "lxml")
img_tags = soup.find_all("img")
for img in img_tags:
src = img.get("src", "")
if src and (src.startswith(prefix1)):
img["src"] = (
f"/api/workspaces/{issue.workspace.slug}/projects/{issue.project_id}/issues/{issue.id}/attachments/{src[len(prefix1): ]}"
)
issue.description_html = str(soup)
bulk_issues.append(issue)
Workspace.objects.bulk_update(bulk_workspaces, ["logo"], batch_size=100)
# prefix 2
if (
not settings.USE_MINIO
and src
and src.startswith(prefix2)
):
img["src"] = (
f"/api/workspaces/{issue.workspace.slug}/projects/{issue.project_id}/issues/{issue.id}/attachments/{src[len(prefix2): ]}"
)
issue.description_html = str(soup)
bulk_issues.append(issue)
def update_project_urls(apps, schema_editor):
# Check if the app is using minio or s3
if settings.USE_MINIO:
prefix1 = (
f"{settings.AWS_S3_URL_PROTOCOL}//{settings.AWS_S3_CUSTOM_DOMAIN}/"
)
prefix2 = prefix1
else:
prefix1 = f"https://{settings.AWS_STORAGE_BUCKET_NAME}.s3.{settings.AWS_REGION}.amazonaws.com/"
prefix2 = (
f"https://{settings.AWS_STORAGE_BUCKET_NAME}.s3.amazonaws.com/"
)
Project = apps.get_model("db", "Project")
bulk_projects = []
# Loop through all the users and update the cover image
for project in Project.objects.all():
# prefix 1
if project.cover_image and (project.cover_image.startswith(prefix1)):
cover_image_key = project.cover_image
project.cover_image = f"/api/workspaces/{project.workspace.slug}/projects/{project.id}/cover-image/{cover_image_key[len(prefix1) :]}/"
bulk_projects.append(project)
# prefix 2
if not settings.USE_MINIO and project.cover_image and (project.cover_image.startswith(prefix2)):
cover_image_key = project.cover_image
project.cover_image = f"/api/workspaces/{project.workspace.slug}/projects/{project.id}/cover-image/{cover_image_key[len(prefix2) :]}/"
bulk_projects.append(project)
Project.objects.bulk_update(bulk_projects, ["cover_image"], batch_size=100)
Issue.objects.bulk_update(bulk_issues, ["description_html"], batch_size=1000)
class Migration(migrations.Migration):
@ -75,11 +59,32 @@ class Migration(migrations.Migration):
]
operations = [
migrations.AddField(
model_name="fileasset",
name="size",
field=models.PositiveBigIntegerField(null=True),
),
migrations.RunPython(update_workspace_urls),
migrations.RunPython(update_project_urls),
# migrations.AddField(
# model_name="fileasset",
# name="entity_identifier",
# field=models.UUIDField(null=True),
# ),
# migrations.AddField(
# model_name="fileasset",
# name="entity_type",
# field=models.CharField(
# choices=[
# ("issue", "Issue"),
# ("comment", "Comment"),
# ("page", "Page"),
# ],
# null=True,
# ),
# ),
# migrations.AddField(
# model_name="fileasset",
# name="project_id",
# field=models.ForeignKey(
# null=True,
# on_delete=django.db.models.deletion.CASCADE,
# related_name="assets",
# to="db.project",
# ),
# ),
migrations.RunPython(convert_image_sources),
]

View File

@ -0,0 +1,62 @@
# Generated by Django 4.2.7 on 2024-02-02 14:35
from django.db import migrations
from django.conf import settings
# Third party imports
from bs4 import BeautifulSoup
def convert_image_sources(apps, schema_editor):
if settings.USE_MINIO:
prefix1 = (
f"{settings.AWS_S3_URL_PROTOCOL}//{settings.AWS_S3_CUSTOM_DOMAIN}/"
)
prefix2 = prefix1
else:
prefix1 = f"https://{settings.AWS_STORAGE_BUCKET_NAME}.s3.{settings.AWS_REGION}.amazonaws.com/"
prefix2 = (
f"https://{settings.AWS_STORAGE_BUCKET_NAME}.s3.amazonaws.com/"
)
Page = apps.get_model("db", "Page")
FileAsset = apps.get_model("db", "FileAsset")
bulk_pages = []
bulk_assets = {}
for page in Page.objects.all():
# Parse the html
soup = BeautifulSoup(page.description_html, "lxml")
img_tags = soup.find_all("img")
for img in img_tags:
src = img.get("src", "")
if src and (src.startswith(prefix1)):
img["src"] = (
f"/api/workspaces/{page.workspace.slug}/projects/{page.project_id}/issues/{page.id}/attachments/{src[len(prefix1): ]}/"
)
bulk_assets[src[len(prefix1): ]] = {"project_id": str(page.project_id)}
page.description_html = str(soup)
bulk_pages.append(page)
# prefix 2
if not settings.USE_MINIO and src and src.startswith(prefix2):
img["src"] = (
f"/api/workspaces/{page.workspace.slug}/projects/{page.project_id}/issues/{page.id}/attachments/{src[len(prefix2): ]}/"
)
page.description_html = str(soup)
bulk_pages.append(page)
Page.objects.bulk_update(bulk_pages, ["description_html"], batch_size=1000)
class Migration(migrations.Migration):
dependencies = [
("db", "0060_fileasset_size"),
]
operations = [
migrations.RunPython(convert_image_sources),
]

View File

@ -40,6 +40,21 @@ class FileAsset(BaseModel):
null=True,
related_name="assets",
)
project_id = models.ForeignKey(
"db.Project",
on_delete=models.CASCADE,
null=True,
related_name="assets",
)
entity_type = models.CharField(
choices=(
("issue", "Issue"),
("comment", "Comment"),
("page", "Page"),
),
null=True,
)
entity_identifier = models.UUIDField(null=True)
is_deleted = models.BooleanField(default=False)
size = models.PositiveBigIntegerField(null=True)

View File

@ -3,20 +3,36 @@ from django.conf import settings
def generate_download_presigned_url(object_name, expiration=3600):
"""
Generate a presigned URL to download an object from S3.
:param object_name: The key name of the object in the S3 bucket.
:param expiration: Time in seconds for the presigned URL to remain valid (default is 1 hour).
:return: Presigned URL as a string. If error, returns None.
Generate a presigned URL to download an object from S3, dynamically setting
the Content-Disposition based on the file metadata.
"""
s3_client = boto3.client('s3',
aws_access_key_id=settings.AWS_ACCESS_KEY_ID,
aws_secret_access_key=settings.AWS_SECRET_ACCESS_KEY,
region_name=settings.AWS_REGION,
endpoint_url=settings.AWS_S3_ENDPOINT_URL)
# Fetch the object's metadata
metadata = s3_client.head_object(Bucket=settings.AWS_STORAGE_BUCKET_NAME, Key=object_name)
# Determine the content type
content_type = metadata.get('ContentType', 'application/octet-stream')
# Example logic to determine Content-Disposition based on content_type or other criteria
if content_type.startswith('image/'):
disposition = 'inline'
else:
disposition = 'attachment'
# Optionally, use the file's original name from metadata, if available
file_name = object_name.split('/')[-1] # Basic way to extract file name
disposition += f'; filename="{file_name}"'
try:
response = s3_client.generate_presigned_url('get_object',
Params={'Bucket': settings.AWS_STORAGE_BUCKET_NAME,
'Key': object_name},
'Key': object_name,
'ResponseContentDisposition': disposition,
'ResponseContentType': content_type},
ExpiresIn=expiration)
return response
except Exception as e: