dev: improve performance for cycle apis

This commit is contained in:
pablohashescobar 2024-02-19 11:35:35 +05:30
parent ce9ed6b25e
commit bac8aeb4ad
2 changed files with 202 additions and 202 deletions

View File

@ -3,10 +3,7 @@ from rest_framework import serializers
# Module imports
from .base import BaseSerializer
from .user import UserLiteSerializer
from .issue import IssueStateSerializer
from .workspace import WorkspaceLiteSerializer
from .project import ProjectLiteSerializer
from plane.db.models import (
Cycle,
CycleIssue,
@ -14,7 +11,6 @@ from plane.db.models import (
CycleUserProperties,
)
class CycleWriteSerializer(BaseSerializer):
def validate(self, data):
if (
@ -33,62 +29,80 @@ class CycleWriteSerializer(BaseSerializer):
class CycleSerializer(BaseSerializer):
# workspace and project ids
workspace_id = serializers.PrimaryKeyRelatedField(read_only=True)
project_id = serializers.PrimaryKeyRelatedField(read_only=True)
owned_by_id = serializers.PrimaryKeyRelatedField(read_only=True)
# favorite
is_favorite = serializers.BooleanField(read_only=True)
total_issues = serializers.IntegerField(read_only=True)
# state group wise distribution
cancelled_issues = serializers.IntegerField(read_only=True)
completed_issues = serializers.IntegerField(read_only=True)
started_issues = serializers.IntegerField(read_only=True)
unstarted_issues = serializers.IntegerField(read_only=True)
backlog_issues = serializers.IntegerField(read_only=True)
#TODO: Remove once confirmed # estimates
# total_estimates = serializers.IntegerField(read_only=True)
# completed_estimates = serializers.IntegerField(read_only=True)
# started_estimates = serializers.IntegerField(read_only=True)
# method fields
assignees = serializers.SerializerMethodField(read_only=True)
total_estimates = serializers.IntegerField(read_only=True)
completed_estimates = serializers.IntegerField(read_only=True)
started_estimates = serializers.IntegerField(read_only=True)
workspace_detail = WorkspaceLiteSerializer(
read_only=True, source="workspace"
)
project_detail = ProjectLiteSerializer(read_only=True, source="project")
# active | draft | upcoming | completed
status = serializers.CharField(read_only=True)
def validate(self, data):
if (
data.get("start_date", None) is not None
and data.get("end_date", None) is not None
and data.get("start_date", None) > data.get("end_date", None)
):
raise serializers.ValidationError(
"Start date cannot exceed end date"
)
return data
def get_assignees(self, obj):
# Get all the members
members = [
{
"avatar": assignee.avatar,
"display_name": assignee.display_name,
"id": assignee.id,
"display_name": assignee.display_name,
"avatar": assignee.avatar,
}
for issue_cycle in obj.issue_cycle.prefetch_related(
"issue__assignees"
).all()
for assignee in issue_cycle.issue.assignees.all()
]
# Use a set comprehension to return only the unique objects
unique_objects = {frozenset(item.items()) for item in members}
# Convert the set back to a list of dictionaries
unique_list = [dict(item) for item in unique_objects]
unique_list = [dict(item) for item in {frozenset(item.items()) for item in members}]
return unique_list
class Meta:
model = Cycle
fields = "__all__"
read_only_fields = [
"workspace",
"project",
"owned_by",
fields = [
# necessary fields
"id",
"workspace_id",
"project_id",
# model fields
"name",
"description",
"start_date",
"end_date",
"owned_by_id",
"view_props",
"sort_order",
"external_source",
"external_id",
"progress_snapshot",
# meta fields
"is_favorite",
"total_issues",
"cancelled_issues",
"completed_issues",
"started_issues",
"unstarted_issues",
"backlog_issues",
# "total_estimates",
# "completed_estimates",
# "started_estimates",
"assignees",
"status",
]
read_only_fields = fields
class CycleIssueSerializer(BaseSerializer):

View File

@ -33,7 +33,6 @@ from plane.app.serializers import (
CycleIssueSerializer,
CycleFavoriteSerializer,
IssueSerializer,
IssueStateSerializer,
CycleWriteSerializer,
CycleUserPropertiesSerializer,
)
@ -51,7 +50,6 @@ from plane.db.models import (
IssueAttachment,
Label,
CycleUserProperties,
IssueSubscriber,
)
from plane.bgtasks.issue_activites_task import issue_activity
from plane.utils.issue_filters import issue_filters
@ -73,7 +71,7 @@ class CycleViewSet(WebhookMixin, BaseViewSet):
)
def get_queryset(self):
subquery = CycleFavorite.objects.filter(
favorite_subquery = CycleFavorite.objects.filter(
user=self.request.user,
cycle_id=OuterRef("pk"),
project_id=self.kwargs.get("project_id"),
@ -85,10 +83,24 @@ class CycleViewSet(WebhookMixin, BaseViewSet):
.filter(workspace__slug=self.kwargs.get("slug"))
.filter(project_id=self.kwargs.get("project_id"))
.filter(project__project_projectmember__member=self.request.user)
.select_related("project")
.select_related("workspace")
.select_related("owned_by")
.annotate(is_favorite=Exists(subquery))
.select_related("project", "workspace", "owned_by")
.prefetch_related(
Prefetch(
"issue_cycle__issue__assignees",
queryset=User.objects.only(
"avatar", "first_name", "id"
).distinct(),
)
)
.prefetch_related(
Prefetch(
"issue_cycle__issue__labels",
queryset=Label.objects.only(
"name", "color", "id"
).distinct(),
)
)
.annotate(is_favorite=Exists(favorite_subquery))
.annotate(
total_issues=Count(
"issue_cycle",
@ -148,29 +160,29 @@ class CycleViewSet(WebhookMixin, BaseViewSet):
),
)
)
.annotate(
total_estimates=Sum("issue_cycle__issue__estimate_point")
)
.annotate(
completed_estimates=Sum(
"issue_cycle__issue__estimate_point",
filter=Q(
issue_cycle__issue__state__group="completed",
issue_cycle__issue__archived_at__isnull=True,
issue_cycle__issue__is_draft=False,
),
)
)
.annotate(
started_estimates=Sum(
"issue_cycle__issue__estimate_point",
filter=Q(
issue_cycle__issue__state__group="started",
issue_cycle__issue__archived_at__isnull=True,
issue_cycle__issue__is_draft=False,
),
)
)
# .annotate(
# total_estimates=Sum("issue_cycle__issue__estimate_point")
# )
# .annotate(
# completed_estimates=Sum(
# "issue_cycle__issue__estimate_point",
# filter=Q(
# issue_cycle__issue__state__group="completed",
# issue_cycle__issue__archived_at__isnull=True,
# issue_cycle__issue__is_draft=False,
# ),
# )
# )
# .annotate(
# started_estimates=Sum(
# "issue_cycle__issue__estimate_point",
# filter=Q(
# issue_cycle__issue__state__group="started",
# issue_cycle__issue__archived_at__isnull=True,
# issue_cycle__issue__is_draft=False,
# ),
# )
# )
.annotate(
status=Case(
When(
@ -190,22 +202,6 @@ class CycleViewSet(WebhookMixin, BaseViewSet):
output_field=CharField(),
)
)
.prefetch_related(
Prefetch(
"issue_cycle__issue__assignees",
queryset=User.objects.only(
"avatar", "first_name", "id"
).distinct(),
)
)
.prefetch_related(
Prefetch(
"issue_cycle__issue__labels",
queryset=Label.objects.only(
"name", "color", "id"
).distinct(),
)
)
.order_by("-is_favorite", "name")
.distinct()
)
@ -213,12 +209,8 @@ class CycleViewSet(WebhookMixin, BaseViewSet):
def list(self, request, slug, project_id):
queryset = self.get_queryset()
cycle_view = request.GET.get("cycle_view", "all")
fields = [
field
for field in request.GET.get("fields", "").split(",")
if field
]
# Update the order by
queryset = queryset.order_by("-is_favorite", "-created_at")
# Current Cycle
@ -230,7 +222,7 @@ class CycleViewSet(WebhookMixin, BaseViewSet):
data = CycleSerializer(queryset, many=True).data
if len(data):
if data:
assignee_distribution = (
Issue.objects.filter(
issue_cycle__cycle_id=data[0]["id"],
@ -315,14 +307,14 @@ class CycleViewSet(WebhookMixin, BaseViewSet):
}
if data[0]["start_date"] and data[0]["end_date"]:
data[0]["distribution"][
"completion_chart"
] = burndown_plot(
data[0]["distribution"]["completion_chart"] = (
burndown_plot(
queryset=queryset.first(),
slug=slug,
project_id=project_id,
cycle_id=data[0]["id"],
)
)
return Response(data, status=status.HTTP_200_OK)
@ -375,7 +367,7 @@ class CycleViewSet(WebhookMixin, BaseViewSet):
and cycle.end_date < timezone.now().date()
):
if "sort_order" in request_data:
# Can only change sort order
# Can only change sort order for a completed cycle``
request_data = {
"sort_order": request_data.get(
"sort_order", cycle.sort_order
@ -591,20 +583,18 @@ class CycleIssueViewSet(WebhookMixin, BaseViewSet):
filters = issue_filters(request.query_params, "GET")
issues = (
Issue.issue_objects.filter(issue_cycle__cycle_id=cycle_id)
.annotate(
sub_issues_count=Issue.issue_objects.filter(
parent=OuterRef("id")
)
.order_by()
.annotate(count=Func(F("id"), function="Count"))
.values("count")
)
.filter(project_id=project_id)
.filter(workspace__slug=slug)
.select_related("workspace", "project", "state", "parent")
.prefetch_related("assignees", "labels", "issue_module__module")
.prefetch_related(
"assignees",
"labels",
"issue_module__module",
"issue_cycle__cycle",
)
.order_by(order_by)
.filter(**filters)
.annotate(module_ids=F("issue_module__module_id"))
.annotate(cycle_id=F("issue_cycle__cycle_id"))
.annotate(
link_count=IssueLink.objects.filter(issue=OuterRef("id"))
@ -621,11 +611,12 @@ class CycleIssueViewSet(WebhookMixin, BaseViewSet):
.values("count")
)
.annotate(
is_subscribed=Exists(
IssueSubscriber.objects.filter(
subscriber=self.request.user, issue_id=OuterRef("id")
)
sub_issues_count=Issue.issue_objects.filter(
parent=OuterRef("id")
)
.order_by()
.annotate(count=Func(F("id"), function="Count"))
.values("count")
)
)
serializer = IssueSerializer(
@ -636,7 +627,7 @@ class CycleIssueViewSet(WebhookMixin, BaseViewSet):
def create(self, request, slug, project_id, cycle_id):
issues = request.data.get("issues", [])
if not len(issues):
if not issues:
return Response(
{"error": "Issues are required"},
status=status.HTTP_400_BAD_REQUEST,
@ -658,50 +649,48 @@ class CycleIssueViewSet(WebhookMixin, BaseViewSet):
)
# Get all CycleIssues already created
cycle_issues = list(CycleIssue.objects.filter(issue_id__in=issues))
update_cycle_issue_activity = []
record_to_create = []
records_to_update = []
for issue in issues:
cycle_issue = [
cycle_issue
for cycle_issue in cycle_issues
if str(cycle_issue.issue_id) in issues
]
# Update only when cycle changes
if len(cycle_issue):
if cycle_issue[0].cycle_id != cycle_id:
update_cycle_issue_activity.append(
{
"old_cycle_id": str(cycle_issue[0].cycle_id),
"new_cycle_id": str(cycle_id),
"issue_id": str(cycle_issue[0].issue_id),
}
cycle_issues = list(
CycleIssue.objects.filter(
~Q(cycle_id=cycle_id), issue_id__in=issues
)
cycle_issue[0].cycle_id = cycle_id
records_to_update.append(cycle_issue[0])
else:
record_to_create.append(
)
existing_issues = [
str(cycle_issue.issue_id) for cycle_issue in cycle_issues
]
new_issues = list(set(issues) - set(existing_issues))
# New issues to create
created_records = CycleIssue.objects.bulk_create(
[
CycleIssue(
project_id=project_id,
workspace=cycle.workspace,
created_by=request.user,
updated_by=request.user,
cycle=cycle,
workspace_id=cycle.workspace_id,
created_by_id=request.user.id,
updated_by_id=request.user.id,
cycle_id=cycle_id,
issue_id=issue,
)
for issue in new_issues
],
batch_size=10,
)
CycleIssue.objects.bulk_create(
record_to_create,
batch_size=10,
ignore_conflicts=True,
)
CycleIssue.objects.bulk_update(
records_to_update,
["cycle"],
batch_size=10,
# Updated Issues
updated_records = []
update_cycle_issue_activity = []
# Iterate over each cycle_issue in cycle_issues
for cycle_issue in cycle_issues:
# Update the cycle_issue's cycle_id
cycle_issue.cycle_id = cycle_id
# Add the modified cycle_issue to the records_to_update list
updated_records.append(cycle_issue)
# Record the update activity
update_cycle_issue_activity.append(
{
"old_cycle_id": str(cycle_issue.cycle_id),
"new_cycle_id": str(cycle_id),
"issue_id": str(cycle_issue.issue_id),
}
)
# Capture Issue Activity
@ -715,7 +704,7 @@ class CycleIssueViewSet(WebhookMixin, BaseViewSet):
{
"updated_cycle_issues": update_cycle_issue_activity,
"created_cycle_issues": serializers.serialize(
"json", record_to_create
"json", created_records
),
}
),
@ -723,16 +712,7 @@ class CycleIssueViewSet(WebhookMixin, BaseViewSet):
notification=True,
origin=request.META.get("HTTP_ORIGIN"),
)
# Return all Cycle Issues
issues = self.get_queryset().values_list("issue_id", flat=True)
return Response(
IssueSerializer(
Issue.objects.filter(pk__in=issues), many=True
).data,
status=status.HTTP_200_OK,
)
return Response({"message": "success"}, status=status.HTTP_201_CREATED)
def destroy(self, request, slug, project_id, cycle_id, issue_id):
cycle_issue = CycleIssue.objects.get(
@ -776,6 +756,7 @@ class CycleDateCheckEndpoint(BaseAPIView):
status=status.HTTP_400_BAD_REQUEST,
)
# Check if any cycle intersects in the given interval
cycles = Cycle.objects.filter(
Q(workspace__slug=slug)
& Q(project_id=project_id)
@ -785,7 +766,6 @@ class CycleDateCheckEndpoint(BaseAPIView):
| Q(start_date__gte=start_date, end_date__lte=end_date)
)
).exclude(pk=cycle_id)
if cycles.exists():
return Response(
{
@ -909,29 +889,29 @@ class TransferCycleIssueEndpoint(BaseAPIView):
),
)
)
.annotate(
total_estimates=Sum("issue_cycle__issue__estimate_point")
)
.annotate(
completed_estimates=Sum(
"issue_cycle__issue__estimate_point",
filter=Q(
issue_cycle__issue__state__group="completed",
issue_cycle__issue__archived_at__isnull=True,
issue_cycle__issue__is_draft=False,
),
)
)
.annotate(
started_estimates=Sum(
"issue_cycle__issue__estimate_point",
filter=Q(
issue_cycle__issue__state__group="started",
issue_cycle__issue__archived_at__isnull=True,
issue_cycle__issue__is_draft=False,
),
)
)
# .annotate(
# total_estimates=Sum("issue_cycle__issue__estimate_point")
# )
# .annotate(
# completed_estimates=Sum(
# "issue_cycle__issue__estimate_point",
# filter=Q(
# issue_cycle__issue__state__group="completed",
# issue_cycle__issue__archived_at__isnull=True,
# issue_cycle__issue__is_draft=False,
# ),
# )
# )
# .annotate(
# started_estimates=Sum(
# "issue_cycle__issue__estimate_point",
# filter=Q(
# issue_cycle__issue__state__group="started",
# issue_cycle__issue__archived_at__isnull=True,
# issue_cycle__issue__is_draft=False,
# ),
# )
# )
)
# Pass the new_cycle queryset to burndown_plot
@ -942,6 +922,7 @@ class TransferCycleIssueEndpoint(BaseAPIView):
cycle_id=cycle_id,
)
# Get the assignee distribution
assignee_distribution = (
Issue.objects.filter(
issue_cycle__cycle_id=cycle_id,
@ -980,7 +961,22 @@ class TransferCycleIssueEndpoint(BaseAPIView):
)
.order_by("display_name")
)
# assignee distribution serialized
assignee_distribution_data = [
{
"display_name": item["display_name"],
"assignee_id": (
str(item["assignee_id"]) if item["assignee_id"] else None
),
"avatar": item["avatar"],
"total_issues": item["total_issues"],
"completed_issues": item["completed_issues"],
"pending_issues": item["pending_issues"],
}
for item in assignee_distribution
]
# Get the label distribution
label_distribution = (
Issue.objects.filter(
issue_cycle__cycle_id=cycle_id,
@ -1019,24 +1015,14 @@ class TransferCycleIssueEndpoint(BaseAPIView):
)
.order_by("label_name")
)
assignee_distribution_data = [
{
"display_name": item["display_name"],
"assignee_id": str(item["assignee_id"]) if item["assignee_id"] else None,
"avatar": item["avatar"],
"total_issues": item["total_issues"],
"completed_issues": item["completed_issues"],
"pending_issues": item["pending_issues"],
}
for item in assignee_distribution
]
# Label distribution serilization
label_distribution_data = [
{
"label_name": item["label_name"],
"color": item["color"],
"label_id": str(item["label_id"]) if item["label_id"] else None,
"label_id": (
str(item["label_id"]) if item["label_id"] else None
),
"total_issues": item["total_issues"],
"completed_issues": item["completed_issues"],
"pending_issues": item["pending_issues"],