dev: improve performance for cycle apis

This commit is contained in:
pablohashescobar 2024-02-19 11:35:35 +05:30
parent ce9ed6b25e
commit bac8aeb4ad
2 changed files with 202 additions and 202 deletions

View File

@ -3,10 +3,7 @@ from rest_framework import serializers
# Module imports # Module imports
from .base import BaseSerializer from .base import BaseSerializer
from .user import UserLiteSerializer
from .issue import IssueStateSerializer from .issue import IssueStateSerializer
from .workspace import WorkspaceLiteSerializer
from .project import ProjectLiteSerializer
from plane.db.models import ( from plane.db.models import (
Cycle, Cycle,
CycleIssue, CycleIssue,
@ -14,7 +11,6 @@ from plane.db.models import (
CycleUserProperties, CycleUserProperties,
) )
class CycleWriteSerializer(BaseSerializer): class CycleWriteSerializer(BaseSerializer):
def validate(self, data): def validate(self, data):
if ( if (
@ -33,62 +29,80 @@ class CycleWriteSerializer(BaseSerializer):
class CycleSerializer(BaseSerializer): class CycleSerializer(BaseSerializer):
# workspace and project ids
workspace_id = serializers.PrimaryKeyRelatedField(read_only=True)
project_id = serializers.PrimaryKeyRelatedField(read_only=True)
owned_by_id = serializers.PrimaryKeyRelatedField(read_only=True)
# favorite
is_favorite = serializers.BooleanField(read_only=True) is_favorite = serializers.BooleanField(read_only=True)
total_issues = serializers.IntegerField(read_only=True) total_issues = serializers.IntegerField(read_only=True)
# state group wise distribution
cancelled_issues = serializers.IntegerField(read_only=True) cancelled_issues = serializers.IntegerField(read_only=True)
completed_issues = serializers.IntegerField(read_only=True) completed_issues = serializers.IntegerField(read_only=True)
started_issues = serializers.IntegerField(read_only=True) started_issues = serializers.IntegerField(read_only=True)
unstarted_issues = serializers.IntegerField(read_only=True) unstarted_issues = serializers.IntegerField(read_only=True)
backlog_issues = serializers.IntegerField(read_only=True) backlog_issues = serializers.IntegerField(read_only=True)
#TODO: Remove once confirmed # estimates
# total_estimates = serializers.IntegerField(read_only=True)
# completed_estimates = serializers.IntegerField(read_only=True)
# started_estimates = serializers.IntegerField(read_only=True)
# method fields
assignees = serializers.SerializerMethodField(read_only=True) assignees = serializers.SerializerMethodField(read_only=True)
total_estimates = serializers.IntegerField(read_only=True)
completed_estimates = serializers.IntegerField(read_only=True) # active | draft | upcoming | completed
started_estimates = serializers.IntegerField(read_only=True)
workspace_detail = WorkspaceLiteSerializer(
read_only=True, source="workspace"
)
project_detail = ProjectLiteSerializer(read_only=True, source="project")
status = serializers.CharField(read_only=True) status = serializers.CharField(read_only=True)
def validate(self, data):
if (
data.get("start_date", None) is not None
and data.get("end_date", None) is not None
and data.get("start_date", None) > data.get("end_date", None)
):
raise serializers.ValidationError(
"Start date cannot exceed end date"
)
return data
def get_assignees(self, obj): def get_assignees(self, obj):
# Get all the members
members = [ members = [
{ {
"avatar": assignee.avatar,
"display_name": assignee.display_name,
"id": assignee.id, "id": assignee.id,
"display_name": assignee.display_name,
"avatar": assignee.avatar,
} }
for issue_cycle in obj.issue_cycle.prefetch_related( for issue_cycle in obj.issue_cycle.prefetch_related(
"issue__assignees" "issue__assignees"
).all() ).all()
for assignee in issue_cycle.issue.assignees.all() for assignee in issue_cycle.issue.assignees.all()
] ]
# Use a set comprehension to return only the unique objects
unique_objects = {frozenset(item.items()) for item in members}
# Convert the set back to a list of dictionaries # Convert the set back to a list of dictionaries
unique_list = [dict(item) for item in unique_objects] unique_list = [dict(item) for item in {frozenset(item.items()) for item in members}]
return unique_list return unique_list
class Meta: class Meta:
model = Cycle model = Cycle
fields = "__all__" fields = [
read_only_fields = [ # necessary fields
"workspace", "id",
"project", "workspace_id",
"owned_by", "project_id",
# model fields
"name",
"description",
"start_date",
"end_date",
"owned_by_id",
"view_props",
"sort_order",
"external_source",
"external_id",
"progress_snapshot",
# meta fields
"is_favorite",
"total_issues",
"cancelled_issues",
"completed_issues",
"started_issues",
"unstarted_issues",
"backlog_issues",
# "total_estimates",
# "completed_estimates",
# "started_estimates",
"assignees",
"status",
] ]
read_only_fields = fields
class CycleIssueSerializer(BaseSerializer): class CycleIssueSerializer(BaseSerializer):

View File

@ -33,7 +33,6 @@ from plane.app.serializers import (
CycleIssueSerializer, CycleIssueSerializer,
CycleFavoriteSerializer, CycleFavoriteSerializer,
IssueSerializer, IssueSerializer,
IssueStateSerializer,
CycleWriteSerializer, CycleWriteSerializer,
CycleUserPropertiesSerializer, CycleUserPropertiesSerializer,
) )
@ -51,7 +50,6 @@ from plane.db.models import (
IssueAttachment, IssueAttachment,
Label, Label,
CycleUserProperties, CycleUserProperties,
IssueSubscriber,
) )
from plane.bgtasks.issue_activites_task import issue_activity from plane.bgtasks.issue_activites_task import issue_activity
from plane.utils.issue_filters import issue_filters from plane.utils.issue_filters import issue_filters
@ -73,7 +71,7 @@ class CycleViewSet(WebhookMixin, BaseViewSet):
) )
def get_queryset(self): def get_queryset(self):
subquery = CycleFavorite.objects.filter( favorite_subquery = CycleFavorite.objects.filter(
user=self.request.user, user=self.request.user,
cycle_id=OuterRef("pk"), cycle_id=OuterRef("pk"),
project_id=self.kwargs.get("project_id"), project_id=self.kwargs.get("project_id"),
@ -85,10 +83,24 @@ class CycleViewSet(WebhookMixin, BaseViewSet):
.filter(workspace__slug=self.kwargs.get("slug")) .filter(workspace__slug=self.kwargs.get("slug"))
.filter(project_id=self.kwargs.get("project_id")) .filter(project_id=self.kwargs.get("project_id"))
.filter(project__project_projectmember__member=self.request.user) .filter(project__project_projectmember__member=self.request.user)
.select_related("project") .select_related("project", "workspace", "owned_by")
.select_related("workspace") .prefetch_related(
.select_related("owned_by") Prefetch(
.annotate(is_favorite=Exists(subquery)) "issue_cycle__issue__assignees",
queryset=User.objects.only(
"avatar", "first_name", "id"
).distinct(),
)
)
.prefetch_related(
Prefetch(
"issue_cycle__issue__labels",
queryset=Label.objects.only(
"name", "color", "id"
).distinct(),
)
)
.annotate(is_favorite=Exists(favorite_subquery))
.annotate( .annotate(
total_issues=Count( total_issues=Count(
"issue_cycle", "issue_cycle",
@ -148,29 +160,29 @@ class CycleViewSet(WebhookMixin, BaseViewSet):
), ),
) )
) )
.annotate( # .annotate(
total_estimates=Sum("issue_cycle__issue__estimate_point") # total_estimates=Sum("issue_cycle__issue__estimate_point")
) # )
.annotate( # .annotate(
completed_estimates=Sum( # completed_estimates=Sum(
"issue_cycle__issue__estimate_point", # "issue_cycle__issue__estimate_point",
filter=Q( # filter=Q(
issue_cycle__issue__state__group="completed", # issue_cycle__issue__state__group="completed",
issue_cycle__issue__archived_at__isnull=True, # issue_cycle__issue__archived_at__isnull=True,
issue_cycle__issue__is_draft=False, # issue_cycle__issue__is_draft=False,
), # ),
) # )
) # )
.annotate( # .annotate(
started_estimates=Sum( # started_estimates=Sum(
"issue_cycle__issue__estimate_point", # "issue_cycle__issue__estimate_point",
filter=Q( # filter=Q(
issue_cycle__issue__state__group="started", # issue_cycle__issue__state__group="started",
issue_cycle__issue__archived_at__isnull=True, # issue_cycle__issue__archived_at__isnull=True,
issue_cycle__issue__is_draft=False, # issue_cycle__issue__is_draft=False,
), # ),
) # )
) # )
.annotate( .annotate(
status=Case( status=Case(
When( When(
@ -190,22 +202,6 @@ class CycleViewSet(WebhookMixin, BaseViewSet):
output_field=CharField(), output_field=CharField(),
) )
) )
.prefetch_related(
Prefetch(
"issue_cycle__issue__assignees",
queryset=User.objects.only(
"avatar", "first_name", "id"
).distinct(),
)
)
.prefetch_related(
Prefetch(
"issue_cycle__issue__labels",
queryset=Label.objects.only(
"name", "color", "id"
).distinct(),
)
)
.order_by("-is_favorite", "name") .order_by("-is_favorite", "name")
.distinct() .distinct()
) )
@ -213,12 +209,8 @@ class CycleViewSet(WebhookMixin, BaseViewSet):
def list(self, request, slug, project_id): def list(self, request, slug, project_id):
queryset = self.get_queryset() queryset = self.get_queryset()
cycle_view = request.GET.get("cycle_view", "all") cycle_view = request.GET.get("cycle_view", "all")
fields = [
field
for field in request.GET.get("fields", "").split(",")
if field
]
# Update the order by
queryset = queryset.order_by("-is_favorite", "-created_at") queryset = queryset.order_by("-is_favorite", "-created_at")
# Current Cycle # Current Cycle
@ -230,7 +222,7 @@ class CycleViewSet(WebhookMixin, BaseViewSet):
data = CycleSerializer(queryset, many=True).data data = CycleSerializer(queryset, many=True).data
if len(data): if data:
assignee_distribution = ( assignee_distribution = (
Issue.objects.filter( Issue.objects.filter(
issue_cycle__cycle_id=data[0]["id"], issue_cycle__cycle_id=data[0]["id"],
@ -315,14 +307,14 @@ class CycleViewSet(WebhookMixin, BaseViewSet):
} }
if data[0]["start_date"] and data[0]["end_date"]: if data[0]["start_date"] and data[0]["end_date"]:
data[0]["distribution"][ data[0]["distribution"]["completion_chart"] = (
"completion_chart" burndown_plot(
] = burndown_plot(
queryset=queryset.first(), queryset=queryset.first(),
slug=slug, slug=slug,
project_id=project_id, project_id=project_id,
cycle_id=data[0]["id"], cycle_id=data[0]["id"],
) )
)
return Response(data, status=status.HTTP_200_OK) return Response(data, status=status.HTTP_200_OK)
@ -375,7 +367,7 @@ class CycleViewSet(WebhookMixin, BaseViewSet):
and cycle.end_date < timezone.now().date() and cycle.end_date < timezone.now().date()
): ):
if "sort_order" in request_data: if "sort_order" in request_data:
# Can only change sort order # Can only change sort order for a completed cycle``
request_data = { request_data = {
"sort_order": request_data.get( "sort_order": request_data.get(
"sort_order", cycle.sort_order "sort_order", cycle.sort_order
@ -591,20 +583,18 @@ class CycleIssueViewSet(WebhookMixin, BaseViewSet):
filters = issue_filters(request.query_params, "GET") filters = issue_filters(request.query_params, "GET")
issues = ( issues = (
Issue.issue_objects.filter(issue_cycle__cycle_id=cycle_id) Issue.issue_objects.filter(issue_cycle__cycle_id=cycle_id)
.annotate(
sub_issues_count=Issue.issue_objects.filter(
parent=OuterRef("id")
)
.order_by()
.annotate(count=Func(F("id"), function="Count"))
.values("count")
)
.filter(project_id=project_id) .filter(project_id=project_id)
.filter(workspace__slug=slug) .filter(workspace__slug=slug)
.select_related("workspace", "project", "state", "parent") .select_related("workspace", "project", "state", "parent")
.prefetch_related("assignees", "labels", "issue_module__module") .prefetch_related(
"assignees",
"labels",
"issue_module__module",
"issue_cycle__cycle",
)
.order_by(order_by) .order_by(order_by)
.filter(**filters) .filter(**filters)
.annotate(module_ids=F("issue_module__module_id"))
.annotate(cycle_id=F("issue_cycle__cycle_id")) .annotate(cycle_id=F("issue_cycle__cycle_id"))
.annotate( .annotate(
link_count=IssueLink.objects.filter(issue=OuterRef("id")) link_count=IssueLink.objects.filter(issue=OuterRef("id"))
@ -621,11 +611,12 @@ class CycleIssueViewSet(WebhookMixin, BaseViewSet):
.values("count") .values("count")
) )
.annotate( .annotate(
is_subscribed=Exists( sub_issues_count=Issue.issue_objects.filter(
IssueSubscriber.objects.filter( parent=OuterRef("id")
subscriber=self.request.user, issue_id=OuterRef("id")
)
) )
.order_by()
.annotate(count=Func(F("id"), function="Count"))
.values("count")
) )
) )
serializer = IssueSerializer( serializer = IssueSerializer(
@ -636,7 +627,7 @@ class CycleIssueViewSet(WebhookMixin, BaseViewSet):
def create(self, request, slug, project_id, cycle_id): def create(self, request, slug, project_id, cycle_id):
issues = request.data.get("issues", []) issues = request.data.get("issues", [])
if not len(issues): if not issues:
return Response( return Response(
{"error": "Issues are required"}, {"error": "Issues are required"},
status=status.HTTP_400_BAD_REQUEST, status=status.HTTP_400_BAD_REQUEST,
@ -658,50 +649,48 @@ class CycleIssueViewSet(WebhookMixin, BaseViewSet):
) )
# Get all CycleIssues already created # Get all CycleIssues already created
cycle_issues = list(CycleIssue.objects.filter(issue_id__in=issues)) cycle_issues = list(
update_cycle_issue_activity = [] CycleIssue.objects.filter(
record_to_create = [] ~Q(cycle_id=cycle_id), issue_id__in=issues
records_to_update = []
for issue in issues:
cycle_issue = [
cycle_issue
for cycle_issue in cycle_issues
if str(cycle_issue.issue_id) in issues
]
# Update only when cycle changes
if len(cycle_issue):
if cycle_issue[0].cycle_id != cycle_id:
update_cycle_issue_activity.append(
{
"old_cycle_id": str(cycle_issue[0].cycle_id),
"new_cycle_id": str(cycle_id),
"issue_id": str(cycle_issue[0].issue_id),
}
) )
cycle_issue[0].cycle_id = cycle_id )
records_to_update.append(cycle_issue[0]) existing_issues = [
else: str(cycle_issue.issue_id) for cycle_issue in cycle_issues
record_to_create.append( ]
new_issues = list(set(issues) - set(existing_issues))
# New issues to create
created_records = CycleIssue.objects.bulk_create(
[
CycleIssue( CycleIssue(
project_id=project_id, project_id=project_id,
workspace=cycle.workspace, workspace_id=cycle.workspace_id,
created_by=request.user, created_by_id=request.user.id,
updated_by=request.user, updated_by_id=request.user.id,
cycle=cycle, cycle_id=cycle_id,
issue_id=issue, issue_id=issue,
) )
for issue in new_issues
],
batch_size=10,
) )
CycleIssue.objects.bulk_create( # Updated Issues
record_to_create, updated_records = []
batch_size=10, update_cycle_issue_activity = []
ignore_conflicts=True, # Iterate over each cycle_issue in cycle_issues
) for cycle_issue in cycle_issues:
CycleIssue.objects.bulk_update( # Update the cycle_issue's cycle_id
records_to_update, cycle_issue.cycle_id = cycle_id
["cycle"], # Add the modified cycle_issue to the records_to_update list
batch_size=10, updated_records.append(cycle_issue)
# Record the update activity
update_cycle_issue_activity.append(
{
"old_cycle_id": str(cycle_issue.cycle_id),
"new_cycle_id": str(cycle_id),
"issue_id": str(cycle_issue.issue_id),
}
) )
# Capture Issue Activity # Capture Issue Activity
@ -715,7 +704,7 @@ class CycleIssueViewSet(WebhookMixin, BaseViewSet):
{ {
"updated_cycle_issues": update_cycle_issue_activity, "updated_cycle_issues": update_cycle_issue_activity,
"created_cycle_issues": serializers.serialize( "created_cycle_issues": serializers.serialize(
"json", record_to_create "json", created_records
), ),
} }
), ),
@ -723,16 +712,7 @@ class CycleIssueViewSet(WebhookMixin, BaseViewSet):
notification=True, notification=True,
origin=request.META.get("HTTP_ORIGIN"), origin=request.META.get("HTTP_ORIGIN"),
) )
return Response({"message": "success"}, status=status.HTTP_201_CREATED)
# Return all Cycle Issues
issues = self.get_queryset().values_list("issue_id", flat=True)
return Response(
IssueSerializer(
Issue.objects.filter(pk__in=issues), many=True
).data,
status=status.HTTP_200_OK,
)
def destroy(self, request, slug, project_id, cycle_id, issue_id): def destroy(self, request, slug, project_id, cycle_id, issue_id):
cycle_issue = CycleIssue.objects.get( cycle_issue = CycleIssue.objects.get(
@ -776,6 +756,7 @@ class CycleDateCheckEndpoint(BaseAPIView):
status=status.HTTP_400_BAD_REQUEST, status=status.HTTP_400_BAD_REQUEST,
) )
# Check if any cycle intersects in the given interval
cycles = Cycle.objects.filter( cycles = Cycle.objects.filter(
Q(workspace__slug=slug) Q(workspace__slug=slug)
& Q(project_id=project_id) & Q(project_id=project_id)
@ -785,7 +766,6 @@ class CycleDateCheckEndpoint(BaseAPIView):
| Q(start_date__gte=start_date, end_date__lte=end_date) | Q(start_date__gte=start_date, end_date__lte=end_date)
) )
).exclude(pk=cycle_id) ).exclude(pk=cycle_id)
if cycles.exists(): if cycles.exists():
return Response( return Response(
{ {
@ -909,29 +889,29 @@ class TransferCycleIssueEndpoint(BaseAPIView):
), ),
) )
) )
.annotate( # .annotate(
total_estimates=Sum("issue_cycle__issue__estimate_point") # total_estimates=Sum("issue_cycle__issue__estimate_point")
) # )
.annotate( # .annotate(
completed_estimates=Sum( # completed_estimates=Sum(
"issue_cycle__issue__estimate_point", # "issue_cycle__issue__estimate_point",
filter=Q( # filter=Q(
issue_cycle__issue__state__group="completed", # issue_cycle__issue__state__group="completed",
issue_cycle__issue__archived_at__isnull=True, # issue_cycle__issue__archived_at__isnull=True,
issue_cycle__issue__is_draft=False, # issue_cycle__issue__is_draft=False,
), # ),
) # )
) # )
.annotate( # .annotate(
started_estimates=Sum( # started_estimates=Sum(
"issue_cycle__issue__estimate_point", # "issue_cycle__issue__estimate_point",
filter=Q( # filter=Q(
issue_cycle__issue__state__group="started", # issue_cycle__issue__state__group="started",
issue_cycle__issue__archived_at__isnull=True, # issue_cycle__issue__archived_at__isnull=True,
issue_cycle__issue__is_draft=False, # issue_cycle__issue__is_draft=False,
), # ),
) # )
) # )
) )
# Pass the new_cycle queryset to burndown_plot # Pass the new_cycle queryset to burndown_plot
@ -942,6 +922,7 @@ class TransferCycleIssueEndpoint(BaseAPIView):
cycle_id=cycle_id, cycle_id=cycle_id,
) )
# Get the assignee distribution
assignee_distribution = ( assignee_distribution = (
Issue.objects.filter( Issue.objects.filter(
issue_cycle__cycle_id=cycle_id, issue_cycle__cycle_id=cycle_id,
@ -980,7 +961,22 @@ class TransferCycleIssueEndpoint(BaseAPIView):
) )
.order_by("display_name") .order_by("display_name")
) )
# assignee distribution serialized
assignee_distribution_data = [
{
"display_name": item["display_name"],
"assignee_id": (
str(item["assignee_id"]) if item["assignee_id"] else None
),
"avatar": item["avatar"],
"total_issues": item["total_issues"],
"completed_issues": item["completed_issues"],
"pending_issues": item["pending_issues"],
}
for item in assignee_distribution
]
# Get the label distribution
label_distribution = ( label_distribution = (
Issue.objects.filter( Issue.objects.filter(
issue_cycle__cycle_id=cycle_id, issue_cycle__cycle_id=cycle_id,
@ -1019,24 +1015,14 @@ class TransferCycleIssueEndpoint(BaseAPIView):
) )
.order_by("label_name") .order_by("label_name")
) )
# Label distribution serilization
assignee_distribution_data = [
{
"display_name": item["display_name"],
"assignee_id": str(item["assignee_id"]) if item["assignee_id"] else None,
"avatar": item["avatar"],
"total_issues": item["total_issues"],
"completed_issues": item["completed_issues"],
"pending_issues": item["pending_issues"],
}
for item in assignee_distribution
]
label_distribution_data = [ label_distribution_data = [
{ {
"label_name": item["label_name"], "label_name": item["label_name"],
"color": item["color"], "color": item["color"],
"label_id": str(item["label_id"]) if item["label_id"] else None, "label_id": (
str(item["label_id"]) if item["label_id"] else None
),
"total_issues": item["total_issues"], "total_issues": item["total_issues"],
"completed_issues": item["completed_issues"], "completed_issues": item["completed_issues"],
"pending_issues": item["pending_issues"], "pending_issues": item["pending_issues"],
@ -1058,7 +1044,7 @@ class TransferCycleIssueEndpoint(BaseAPIView):
"total_estimates": old_cycle.first().total_estimates, "total_estimates": old_cycle.first().total_estimates,
"completed_estimates": old_cycle.first().completed_estimates, "completed_estimates": old_cycle.first().completed_estimates,
"started_estimates": old_cycle.first().started_estimates, "started_estimates": old_cycle.first().started_estimates,
"distribution":{ "distribution": {
"labels": label_distribution_data, "labels": label_distribution_data,
"assignees": assignee_distribution_data, "assignees": assignee_distribution_data,
"completion_chart": completion_chart, "completion_chart": completion_chart,