dev: refactor pagination

This commit is contained in:
pablohashescobar 2024-02-28 16:54:46 +05:30
parent e82d7a2aa8
commit 84160e3d8d
3 changed files with 191 additions and 47 deletions

View File

@ -72,7 +72,7 @@ from plane.db.models import (
IssueRelation, IssueRelation,
) )
from plane.bgtasks.issue_activites_task import issue_activity from plane.bgtasks.issue_activites_task import issue_activity
from plane.utils.grouper import issue_grouper from plane.utils.grouper import issue_grouper, issue_queryset_grouper
from plane.utils.issue_filters import issue_filters from plane.utils.issue_filters import issue_filters
from plane.utils.order_queryset import order_issue_queryset from plane.utils.order_queryset import order_issue_queryset
from plane.utils.paginator import GroupedOffsetPaginator from plane.utils.paginator import GroupedOffsetPaginator
@ -264,32 +264,6 @@ class IssueViewSet(WebhookMixin, BaseViewSet):
.annotate(count=Func(F("id"), function="Count")) .annotate(count=Func(F("id"), function="Count"))
.values("count") .values("count")
) )
.annotate(
label_ids=Coalesce(
ArrayAgg(
"labels__id",
distinct=True,
filter=~Q(labels__id__isnull=True),
),
Value([], output_field=ArrayField(UUIDField())),
),
assignee_ids=Coalesce(
ArrayAgg(
"assignees__id",
distinct=True,
filter=~Q(assignees__id__isnull=True),
),
Value([], output_field=ArrayField(UUIDField())),
),
module_ids=Coalesce(
ArrayAgg(
"issue_module__module_id",
distinct=True,
filter=~Q(issue_module__module_id__isnull=True),
),
Value([], output_field=ArrayField(UUIDField())),
),
)
).distinct() ).distinct()
@method_decorator(gzip_page) @method_decorator(gzip_page)
@ -307,12 +281,7 @@ class IssueViewSet(WebhookMixin, BaseViewSet):
) )
def on_results(issues): def on_results(issues):
if self.expand or self.fields: required_fields = [
return IssueSerializer(
issues, many=True, expand=self.expand, fields=self.fields
).data
return issues.values(
"id", "id",
"name", "name",
"state_id", "state_id",
@ -326,9 +295,6 @@ class IssueViewSet(WebhookMixin, BaseViewSet):
"project_id", "project_id",
"parent_id", "parent_id",
"cycle_id", "cycle_id",
"module_ids",
"label_ids",
"assignee_ids",
"sub_issues_count", "sub_issues_count",
"created_at", "created_at",
"updated_at", "updated_at",
@ -338,7 +304,20 @@ class IssueViewSet(WebhookMixin, BaseViewSet):
"link_count", "link_count",
"is_draft", "is_draft",
"archived_at", "archived_at",
]
if group_by == "assignees__id":
required_fields.extend(
["label_ids", "module_ids", "assignees__id"]
) )
if group_by == "labels__id":
required_fields.extend(
["assignee_ids", "module_ids", "labels__id"]
)
if group_by == "modules__id":
required_fields.extend(
["assignee_ids", "label_ids", "modules__id"]
)
return issues.values(*required_fields)
# Group by # Group by
group_by = request.GET.get("group_by", False) group_by = request.GET.get("group_by", False)
@ -349,6 +328,9 @@ class IssueViewSet(WebhookMixin, BaseViewSet):
request=request, queryset=issue_queryset, on_results=on_results request=request, queryset=issue_queryset, on_results=on_results
) )
issue_queryset = issue_queryset_grouper(
queryset=issue_queryset, field=group_by
)
# Group paginate # Group paginate
return self.paginate( return self.paginate(
request=request, request=request,
@ -359,6 +341,14 @@ class IssueViewSet(WebhookMixin, BaseViewSet):
group_by_fields=issue_grouper( group_by_fields=issue_grouper(
field=group_by, slug=slug, project_id=project_id field=group_by, slug=slug, project_id=project_id
), ),
count_filter=Q(
Q(issue_inbox__status=1)
| Q(issue_inbox__status=-1)
| Q(issue_inbox__status=2)
| Q(issue_inbox__isnull=True),
archived_at__isnull=False,
is_draft=True,
),
) )
def create(self, request, slug, project_id): def create(self, request, slug, project_id):

View File

@ -1,5 +1,9 @@
# Django imports # Django imports
from django.db.models import Q from django.db.models import Q, F
from django.contrib.postgres.aggregates import ArrayAgg
from django.contrib.postgres.fields import ArrayField
from django.db.models import Value, UUIDField
from django.db.models.functions import Coalesce
# Module imports # Module imports
from plane.db.models import State, Label, ProjectMember, Cycle, Module from plane.db.models import State, Label, ProjectMember, Cycle, Module
@ -256,13 +260,13 @@ def issue_grouper(field, slug, project_id):
project_id=project_id, project_id=project_id,
).values_list("id", flat=True) ).values_list("id", flat=True)
) )
if field == "label_ids": if field == "labels__id":
return list( return list(
Label.objects.filter( Label.objects.filter(
workspace__slug=slug, project_id=project_id workspace__slug=slug, project_id=project_id
).values_list("id", flat=True) ).values_list("id", flat=True)
) )
if field == "assignee_ids": if field == "assignees__id":
return list( return list(
ProjectMember.objects.filter( ProjectMember.objects.filter(
workspace__slug=slug, project_id=project_id workspace__slug=slug, project_id=project_id
@ -283,9 +287,72 @@ def issue_grouper(field, slug, project_id):
workspace__slug=slug, project_id=project_id workspace__slug=slug, project_id=project_id
).values_list("id", flat=True) ).values_list("id", flat=True)
) )
if field == "module_ids": if field == "modules__id":
return list( return list(
Module.objects.filter( Module.objects.filter(
workspace__slug=slug, project_id=project_id workspace__slug=slug, project_id=project_id
).values_list("id", flat=True) ).values_list("id", flat=True)
) )
def issue_queryset_grouper(field, queryset):
if field == "assignees__id":
return queryset.annotate(
label_ids=Coalesce(
ArrayAgg(
"labels__id",
distinct=True,
filter=~Q(labels__id__isnull=True),
),
Value([], output_field=ArrayField(UUIDField())),
),
module_ids=Coalesce(
ArrayAgg(
"issue_module__module_id",
distinct=True,
filter=~Q(issue_module__module_id__isnull=True),
),
Value([], output_field=ArrayField(UUIDField())),
),
)
if field == "labels__id":
return queryset.annotate(
assignee_ids=Coalesce(
ArrayAgg(
"assignees__id",
distinct=True,
filter=~Q(assignees__id__isnull=True),
),
Value([], output_field=ArrayField(UUIDField())),
),
module_ids=Coalesce(
ArrayAgg(
"issue_module__module_id",
distinct=True,
filter=~Q(issue_module__module_id__isnull=True),
),
Value([], output_field=ArrayField(UUIDField())),
),
)
if field == "modules__id":
return queryset.annotate(modules__id=F("issue_module__module_id")).annotate(
label_ids=Coalesce(
ArrayAgg(
"labels__id",
distinct=True,
filter=~Q(labels__id__isnull=True),
),
Value([], output_field=ArrayField(UUIDField())),
),
assignee_ids=Coalesce(
ArrayAgg(
"assignees__id",
distinct=True,
filter=~Q(assignees__id__isnull=True),
),
Value([], output_field=ArrayField(UUIDField())),
),
)
return queryset

View File

@ -1,6 +1,7 @@
# Python imports # Python imports
import math import math
from collections.abc import Sequence from collections.abc import Sequence
from collections import defaultdict
# Django imports # Django imports
from django.db.models import Window, F, Count, Q from django.db.models import Window, F, Count, Q
@ -154,12 +155,26 @@ class OffsetPaginator:
class GroupedOffsetPaginator(OffsetPaginator): class GroupedOffsetPaginator(OffsetPaginator):
FIELD_MAPPER = {
"labels__id": "label_ids",
"assignees__id": "assignee_ids",
"modules__id": "module_ids",
}
def __init__( def __init__(
self, queryset, group_by_field_name, group_by_fields, *args, **kwargs self,
queryset,
group_by_field_name,
group_by_fields,
count_filter,
*args,
**kwargs,
): ):
super().__init__(queryset, *args, **kwargs) super().__init__(queryset, *args, **kwargs)
self.group_by_field_name = group_by_field_name self.group_by_field_name = group_by_field_name
self.group_by_fields = group_by_fields self.group_by_fields = group_by_fields
self.count_filter = count_filter
def get_result(self, limit=100, cursor=None): def get_result(self, limit=100, cursor=None):
# offset is page # # offset is page #
@ -189,10 +204,6 @@ class GroupedOffsetPaginator(OffsetPaginator):
# Compute the results # Compute the results
results = {} results = {}
queryset = queryset.annotate( queryset = queryset.annotate(
# group_rank=Window(
# expression=DenseRank(),
# order_by=F(self.group_by_field_name).asc()
# ),
row_number=Window( row_number=Window(
expression=RowNumber(), expression=RowNumber(),
partition_by=[F(self.group_by_field_name)], partition_by=[F(self.group_by_field_name)],
@ -237,17 +248,91 @@ class GroupedOffsetPaginator(OffsetPaginator):
max_hits=max_hits, max_hits=max_hits,
) )
def process_results(self, results): def __get_total_queryset(self):
return self.queryset.values(self.group_by_field_name).annotate(
count=Count(
self.group_by_field_name,
filter=self.count_filter,
)
)
def __get_total_dict(self):
total_group_dict = {}
for group in self.__get_total_queryset():
total_group_dict[str(group.get(self.group_by_field_name))] = (
total_group_dict.get(
str(group.get(self.group_by_field_name)), 0
)
+ (1 if group.get("count") == 0 else group.get("count"))
)
return total_group_dict
def __query_multi_grouper(self, results):
total_group_dict = self.__get_total_dict()
# Preparing a dict to keep track of group IDs associated with each label ID
result_group_mapping = defaultdict(set)
# Preparing a dict to group result by group ID
grouped_by_field_name = defaultdict(list)
# Iterate over results to fill the above dictionaries
for result in results:
result_id = result["id"]
group_id = result[self.group_by_field_name]
result_group_mapping[str(result_id)].add(str(group_id))
def result_already_added(result, group):
for existing_issue in group:
if existing_issue["id"] == result["id"]:
return True
return False
# Adding group_ids key to each issue and grouping by group_name
for result in results:
result_id = result["id"]
group_ids = list(result_group_mapping[str(result_id)])
result[self.FIELD_MAPPER.get(self.group_by_field_name)] = (
[] if "None" in group_ids else group_ids
)
# If a result belongs to multiple groups, add it to each group
for group_id in group_ids:
if not result_already_added(
result, grouped_by_field_name[group_id]
):
grouped_by_field_name[group_id].append(result)
# Convert grouped_by_field_name back to a list for each group
processed_results = {
str(group_id): {
"results": issues,
"total_results": total_group_dict.get(str(group_id)),
}
for group_id, issues in grouped_by_field_name.items()
}
return processed_results
def __query_grouper(self, results):
total_group_dict = self.__get_total_dict()
processed_results = {} processed_results = {}
for result in results: for result in results:
group_value = str(result.get(self.group_by_field_name)) group_value = str(result.get(self.group_by_field_name))
if group_value not in processed_results: if group_value not in processed_results:
processed_results[str(group_value)] = { processed_results[str(group_value)] = {
"results": [], "results": [],
"total_results": total_group_dict.get(group_value),
} }
processed_results[str(group_value)]["results"].append(result) processed_results[str(group_value)]["results"].append(result)
return processed_results return processed_results
def process_results(self, results):
if self.group_by_field_name in self.FIELD_MAPPER:
processed_results = self.__query_multi_grouper(results=results)
else:
processed_results = self.__query_grouper(results=results)
return processed_results
class BasePaginator: class BasePaginator:
"""BasePaginator class can be inherited by any View to return a paginated view""" """BasePaginator class can be inherited by any View to return a paginated view"""
@ -283,6 +368,7 @@ class BasePaginator:
controller=None, controller=None,
group_by_fields=None, group_by_fields=None,
group_by_field_name=None, group_by_field_name=None,
count_filter=None,
**paginator_kwargs, **paginator_kwargs,
): ):
"""Paginate the request""" """Paginate the request"""
@ -301,6 +387,7 @@ class BasePaginator:
if group_by_fields and group_by_field_name: if group_by_fields and group_by_field_name:
paginator_kwargs["group_by_fields"] = group_by_fields paginator_kwargs["group_by_fields"] = group_by_fields
paginator_kwargs["group_by_field_name"] = group_by_field_name paginator_kwargs["group_by_field_name"] = group_by_field_name
paginator_kwargs["count_filter"] = count_filter
paginator = paginator_cls(**paginator_kwargs) paginator = paginator_cls(**paginator_kwargs)
try: try: