dev: optimizing cycle and module api viewa

This commit is contained in:
pablohashescobar 2024-02-16 11:32:13 +05:30
parent 86acd2af04
commit 92663ee778
4 changed files with 103 additions and 136 deletions

View File

@ -42,10 +42,10 @@ class CycleSerializer(BaseSerializer):
started_issues = serializers.IntegerField(read_only=True)
unstarted_issues = serializers.IntegerField(read_only=True)
backlog_issues = serializers.IntegerField(read_only=True)
# estimates
total_estimates = serializers.IntegerField(read_only=True)
completed_estimates = serializers.IntegerField(read_only=True)
started_estimates = serializers.IntegerField(read_only=True)
#TODO: Remove once confirmed # estimates
# total_estimates = serializers.IntegerField(read_only=True)
# completed_estimates = serializers.IntegerField(read_only=True)
# started_estimates = serializers.IntegerField(read_only=True)
# method fields
assignees = serializers.SerializerMethodField(read_only=True)
@ -55,11 +55,11 @@ class CycleSerializer(BaseSerializer):
def get_assignees(self, obj):
# Get all the members
members = [
{
"id": assignee.id,
"display_name": assignee.display_name,
"avatar": assignee.avatar,
}
{
"id": assignee.id,
"display_name": assignee.display_name,
"avatar": assignee.avatar,
}
for issue_cycle in obj.issue_cycle.prefetch_related(
"issue__assignees"
).all()
@ -95,9 +95,9 @@ class CycleSerializer(BaseSerializer):
"started_issues",
"unstarted_issues",
"backlog_issues",
"total_estimates",
"completed_estimates",
"started_estimates",
# "total_estimates",
# "completed_estimates",
# "started_estimates",
"assignees",
"status",
]

View File

@ -160,29 +160,29 @@ class CycleViewSet(WebhookMixin, BaseViewSet):
),
)
)
.annotate(
total_estimates=Sum("issue_cycle__issue__estimate_point")
)
.annotate(
completed_estimates=Sum(
"issue_cycle__issue__estimate_point",
filter=Q(
issue_cycle__issue__state__group="completed",
issue_cycle__issue__archived_at__isnull=True,
issue_cycle__issue__is_draft=False,
),
)
)
.annotate(
started_estimates=Sum(
"issue_cycle__issue__estimate_point",
filter=Q(
issue_cycle__issue__state__group="started",
issue_cycle__issue__archived_at__isnull=True,
issue_cycle__issue__is_draft=False,
),
)
)
# .annotate(
# total_estimates=Sum("issue_cycle__issue__estimate_point")
# )
# .annotate(
# completed_estimates=Sum(
# "issue_cycle__issue__estimate_point",
# filter=Q(
# issue_cycle__issue__state__group="completed",
# issue_cycle__issue__archived_at__isnull=True,
# issue_cycle__issue__is_draft=False,
# ),
# )
# )
# .annotate(
# started_estimates=Sum(
# "issue_cycle__issue__estimate_point",
# filter=Q(
# issue_cycle__issue__state__group="started",
# issue_cycle__issue__archived_at__isnull=True,
# issue_cycle__issue__is_draft=False,
# ),
# )
# )
.annotate(
status=Case(
When(
@ -367,7 +367,7 @@ class CycleViewSet(WebhookMixin, BaseViewSet):
and cycle.end_date < timezone.now().date()
):
if "sort_order" in request_data:
# Can only change sort order
# Can only change sort order for a completed cycle``
request_data = {
"sort_order": request_data.get(
"sort_order", cycle.sort_order
@ -586,7 +586,12 @@ class CycleIssueViewSet(WebhookMixin, BaseViewSet):
.filter(project_id=project_id)
.filter(workspace__slug=slug)
.select_related("workspace", "project", "state", "parent")
.prefetch_related("assignees", "labels", "issue_module__module", "issue_cycle__cycle")
.prefetch_related(
"assignees",
"labels",
"issue_module__module",
"issue_cycle__cycle",
)
.order_by(order_by)
.filter(**filters)
.annotate(module_ids=F("issue_module__module_id"))
@ -644,7 +649,11 @@ class CycleIssueViewSet(WebhookMixin, BaseViewSet):
)
# Get all CycleIssues already created
cycle_issues = list(CycleIssue.objects.filter(issue_id__in=issues))
cycle_issues = list(
CycleIssue.objects.filter(
~Q(cycle_id=cycle_id), issue_id__in=issues
)
)
existing_issues = [
str(cycle_issue.issue_id) for cycle_issue in cycle_issues
]
@ -670,9 +679,7 @@ class CycleIssueViewSet(WebhookMixin, BaseViewSet):
updated_records = []
update_cycle_issue_activity = []
# Iterate over each cycle_issue in cycle_issues
for cycle_issue in [
ci for ci in cycle_issues if str(ci.cycle_id) != str(cycle_id)
]:
for cycle_issue in cycle_issues:
# Update the cycle_issue's cycle_id
cycle_issue.cycle_id = cycle_id
# Add the modified cycle_issue to the records_to_update list
@ -685,7 +692,7 @@ class CycleIssueViewSet(WebhookMixin, BaseViewSet):
"issue_id": str(cycle_issue.issue_id),
}
)
# Capture Issue Activity
issue_activity.delay(
type="cycle.activity.created",
@ -705,41 +712,7 @@ class CycleIssueViewSet(WebhookMixin, BaseViewSet):
notification=True,
origin=request.META.get("HTTP_ORIGIN"),
)
# Get all the issues for cycle
issue_queryset = (
Issue.issue_objects.filter(pk__in=self.get_queryset().values_list("issue_id", flat=True))
.filter(workspace__slug=self.kwargs.get("slug"))
.select_related("workspace", "project", "state", "parent")
.prefetch_related("assignees", "labels", "issue_module__module")
.annotate(cycle_id=F("issue_cycle__cycle_id"))
.annotate(
link_count=IssueLink.objects.filter(issue=OuterRef("id"))
.order_by()
.annotate(count=Func(F("id"), function="Count"))
.values("count")
)
.annotate(
attachment_count=IssueAttachment.objects.filter(
issue=OuterRef("id")
)
.order_by()
.annotate(count=Func(F("id"), function="Count"))
.values("count")
)
.annotate(
sub_issues_count=Issue.issue_objects.filter(
parent=OuterRef("id")
)
.order_by()
.annotate(count=Func(F("id"), function="Count"))
.values("count")
)
)
return Response(
IssueSerializer(issue_queryset, many=True).data,
status=status.HTTP_200_OK,
)
return Response({"message": "success"}, status=status.HTTP_201_CREATED)
def destroy(self, request, slug, project_id, cycle_id, issue_id):
cycle_issue = CycleIssue.objects.get(
@ -783,6 +756,7 @@ class CycleDateCheckEndpoint(BaseAPIView):
status=status.HTTP_400_BAD_REQUEST,
)
# Check if any cycle intersects in the given interval
cycles = Cycle.objects.filter(
Q(workspace__slug=slug)
& Q(project_id=project_id)
@ -792,7 +766,6 @@ class CycleDateCheckEndpoint(BaseAPIView):
| Q(start_date__gte=start_date, end_date__lte=end_date)
)
).exclude(pk=cycle_id)
if cycles.exists():
return Response(
{
@ -916,29 +889,29 @@ class TransferCycleIssueEndpoint(BaseAPIView):
),
)
)
.annotate(
total_estimates=Sum("issue_cycle__issue__estimate_point")
)
.annotate(
completed_estimates=Sum(
"issue_cycle__issue__estimate_point",
filter=Q(
issue_cycle__issue__state__group="completed",
issue_cycle__issue__archived_at__isnull=True,
issue_cycle__issue__is_draft=False,
),
)
)
.annotate(
started_estimates=Sum(
"issue_cycle__issue__estimate_point",
filter=Q(
issue_cycle__issue__state__group="started",
issue_cycle__issue__archived_at__isnull=True,
issue_cycle__issue__is_draft=False,
),
)
)
# .annotate(
# total_estimates=Sum("issue_cycle__issue__estimate_point")
# )
# .annotate(
# completed_estimates=Sum(
# "issue_cycle__issue__estimate_point",
# filter=Q(
# issue_cycle__issue__state__group="completed",
# issue_cycle__issue__archived_at__isnull=True,
# issue_cycle__issue__is_draft=False,
# ),
# )
# )
# .annotate(
# started_estimates=Sum(
# "issue_cycle__issue__estimate_point",
# filter=Q(
# issue_cycle__issue__state__group="started",
# issue_cycle__issue__archived_at__isnull=True,
# issue_cycle__issue__is_draft=False,
# ),
# )
# )
)
# Pass the new_cycle queryset to burndown_plot
@ -949,6 +922,7 @@ class TransferCycleIssueEndpoint(BaseAPIView):
cycle_id=cycle_id,
)
# Get the assignee distribution
assignee_distribution = (
Issue.objects.filter(
issue_cycle__cycle_id=cycle_id,
@ -987,7 +961,22 @@ class TransferCycleIssueEndpoint(BaseAPIView):
)
.order_by("display_name")
)
# assignee distribution serialized
assignee_distribution_data = [
{
"display_name": item["display_name"],
"assignee_id": (
str(item["assignee_id"]) if item["assignee_id"] else None
),
"avatar": item["avatar"],
"total_issues": item["total_issues"],
"completed_issues": item["completed_issues"],
"pending_issues": item["pending_issues"],
}
for item in assignee_distribution
]
# Get the label distribution
label_distribution = (
Issue.objects.filter(
issue_cycle__cycle_id=cycle_id,
@ -1026,21 +1015,7 @@ class TransferCycleIssueEndpoint(BaseAPIView):
)
.order_by("label_name")
)
assignee_distribution_data = [
{
"display_name": item["display_name"],
"assignee_id": (
str(item["assignee_id"]) if item["assignee_id"] else None
),
"avatar": item["avatar"],
"total_issues": item["total_issues"],
"completed_issues": item["completed_issues"],
"pending_issues": item["pending_issues"],
}
for item in assignee_distribution
]
# Label distribution serilization
label_distribution_data = [
{
"label_name": item["label_name"],

View File

@ -4,10 +4,8 @@ import json
# Django Imports
from django.utils import timezone
from django.db.models import Prefetch, F, OuterRef, Func, Exists, Count, Q
from django.core import serializers
from django.utils.decorators import method_decorator
from django.views.decorators.gzip import gzip_page
from django.core.serializers.json import DjangoJSONEncoder
# Third party imports
@ -38,11 +36,9 @@ from plane.db.models import (
ModuleFavorite,
IssueLink,
IssueAttachment,
IssueSubscriber,
ModuleUserProperties,
)
from plane.bgtasks.issue_activites_task import issue_activity
from plane.utils.grouper import group_results
from plane.utils.issue_filters import issue_filters
from plane.utils.analytics_plot import burndown_plot
@ -62,7 +58,7 @@ class ModuleViewSet(WebhookMixin, BaseViewSet):
)
def get_queryset(self):
subquery = ModuleFavorite.objects.filter(
favorite_subquery = ModuleFavorite.objects.filter(
user=self.request.user,
module_id=OuterRef("pk"),
project_id=self.kwargs.get("project_id"),
@ -73,7 +69,7 @@ class ModuleViewSet(WebhookMixin, BaseViewSet):
.get_queryset()
.filter(project_id=self.kwargs.get("project_id"))
.filter(workspace__slug=self.kwargs.get("slug"))
.annotate(is_favorite=Exists(subquery))
.annotate(is_favorite=Exists(favorite_subquery))
.select_related("project")
.select_related("workspace")
.select_related("lead")
@ -331,17 +327,16 @@ class ModuleIssueViewSet(WebhookMixin, BaseViewSet):
ProjectEntityPermission,
]
def get_queryset(self):
return (
Issue.issue_objects.filter(
project_id=self.kwargs.get("project_id"),
workspace__slug=self.kwargs.get("slug"),
issue_module__module_id=self.kwargs.get("module_id")
issue_module__module_id=self.kwargs.get("module_id"),
)
.select_related("workspace", "project", "state", "parent")
.prefetch_related("labels", "assignees")
.prefetch_related('issue_module__module')
.prefetch_related("issue_module__module")
.annotate(cycle_id=F("issue_cycle__cycle_id"))
.annotate(
link_count=IssueLink.objects.filter(issue=OuterRef("id"))
@ -384,7 +379,7 @@ class ModuleIssueViewSet(WebhookMixin, BaseViewSet):
# create multiple issues inside a module
def create_module_issues(self, request, slug, project_id, module_id):
issues = request.data.get("issues", [])
if not len(issues):
if not issues:
return Response(
{"error": "Issues are required"},
status=status.HTTP_400_BAD_REQUEST,
@ -420,15 +415,12 @@ class ModuleIssueViewSet(WebhookMixin, BaseViewSet):
)
for issue in issues
]
issues = (self.get_queryset().filter(pk__in=issues))
serializer = IssueSerializer(issues , many=True)
return Response(serializer.data, status=status.HTTP_201_CREATED)
return Response({"message": "success"}, status=status.HTTP_201_CREATED)
# create multiple module inside an issue
def create_issue_modules(self, request, slug, project_id, issue_id):
modules = request.data.get("modules", [])
if not len(modules):
if not modules:
return Response(
{"error": "Modules are required"},
status=status.HTTP_400_BAD_REQUEST,
@ -466,10 +458,7 @@ class ModuleIssueViewSet(WebhookMixin, BaseViewSet):
for module in modules
]
issue = (self.get_queryset().filter(pk=issue_id).first())
serializer = IssueSerializer(issue)
return Response(serializer.data, status=status.HTTP_201_CREATED)
return Response({"message": "success"}, status=status.HTTP_201_CREATED)
def destroy(self, request, slug, project_id, module_id, issue_id):
module_issue = ModuleIssue.objects.get(
@ -484,7 +473,9 @@ class ModuleIssueViewSet(WebhookMixin, BaseViewSet):
actor_id=str(request.user.id),
issue_id=str(issue_id),
project_id=str(project_id),
current_instance=json.dumps({"module_name": module_issue.module.name}),
current_instance=json.dumps(
{"module_name": module_issue.module.name}
),
epoch=int(timezone.now().timestamp()),
notification=True,
origin=request.META.get("HTTP_ORIGIN"),

View File

@ -160,6 +160,7 @@ def burndown_plot(queryset, slug, project_id, cycle_id=None, module_id=None):
.order_by("date")
)
# Burndown plot
for date in date_range:
cumulative_pending_issues = total_issues
total_completed = 0