dev: grouped pagination for empty groups

This commit is contained in:
pablohashescobar 2024-02-29 12:28:03 +05:30
parent 3929f97167
commit db31644313
4 changed files with 116 additions and 399 deletions

View File

@ -75,6 +75,7 @@ from plane.bgtasks.issue_activites_task import issue_activity
from plane.utils.grouper import (
issue_queryset_grouper,
issue_on_results,
issue_group_values,
)
from plane.utils.issue_filters import issue_filters
from plane.utils.order_queryset import order_issue_queryset
@ -273,6 +274,11 @@ class IssueViewSet(WebhookMixin, BaseViewSet):
group_by=group_by, issues=issues
),
paginator_cls=GroupedOffsetPaginator,
group_by_fields=issue_group_values(
field=group_by,
slug=slug,
project_id=project_id,
),
group_by_field_name=group_by,
count_filter=Q(
Q(issue_inbox__status=1)
@ -1849,11 +1855,6 @@ class IssueDraftViewSet(BaseViewSet):
@method_decorator(gzip_page)
def list(self, request, slug, project_id):
filters = issue_filters(request.query_params, "GET")
fields = [
field
for field in request.GET.get("fields", "").split(",")
if field
]
order_by_param = request.GET.get("order_by", "-created_at")

View File

@ -49,10 +49,14 @@ from plane.db.models import (
IssueVote,
ProjectPublicMember,
)
from plane.utils.grouper import (
issue_queryset_grouper,
issue_on_results,
)
from plane.bgtasks.issue_activites_task import issue_activity
from plane.utils.grouper import group_results
from plane.utils.issue_filters import issue_filters
from plane.utils.order_queryset import order_issue_queryset
from plane.utils.paginator import GroupedOffsetPaginator
class IssueCommentPublicViewSet(BaseViewSet):
serializer_class = IssueCommentSerializer
@ -520,46 +524,15 @@ class ProjectIssuesPublicEndpoint(BaseAPIView):
filters = issue_filters(request.query_params, "GET")
# Custom ordering for priority and state
priority_order = ["urgent", "high", "medium", "low", "none"]
state_order = [
"backlog",
"unstarted",
"started",
"completed",
"cancelled",
]
order_by_param = request.GET.get("order_by", "-created_at")
issue_queryset = (
Issue.issue_objects.annotate(
sub_issues_count=Issue.issue_objects.filter(
parent=OuterRef("id")
)
.order_by()
.annotate(count=Func(F("id"), function="Count"))
.values("count")
)
.filter(project_id=project_id)
.filter(workspace__slug=slug)
.select_related("project", "workspace", "state", "parent")
.prefetch_related("assignees", "labels")
.prefetch_related(
Prefetch(
"issue_reactions",
queryset=IssueReaction.objects.select_related("actor"),
)
)
.prefetch_related(
Prefetch(
"votes",
queryset=IssueVote.objects.select_related("actor"),
)
)
.filter(**filters)
Issue.objects.filter(project_id=self.kwargs.get("project_id"))
.filter(workspace__slug=self.kwargs.get("slug"))
.filter(is_draft=True)
.select_related("workspace", "project", "state", "parent")
.prefetch_related("assignees", "labels", "issue_module__module")
.annotate(cycle_id=F("issue_cycle__cycle_id"))
.annotate(module_id=F("issue_module__module_id"))
.annotate(
link_count=IssueLink.objects.filter(issue=OuterRef("id"))
.order_by()
@ -574,112 +547,57 @@ class ProjectIssuesPublicEndpoint(BaseAPIView):
.annotate(count=Func(F("id"), function="Count"))
.values("count")
)
)
# Priority Ordering
if order_by_param == "priority" or order_by_param == "-priority":
priority_order = (
priority_order
if order_by_param == "priority"
else priority_order[::-1]
)
issue_queryset = issue_queryset.annotate(
priority_order=Case(
*[
When(priority=p, then=Value(i))
for i, p in enumerate(priority_order)
],
output_field=CharField(),
)
).order_by("priority_order")
# State Ordering
elif order_by_param in [
"state__name",
"state__group",
"-state__name",
"-state__group",
]:
state_order = (
state_order
if order_by_param in ["state__name", "state__group"]
else state_order[::-1]
)
issue_queryset = issue_queryset.annotate(
state_order=Case(
*[
When(state__group=state_group, then=Value(i))
for i, state_group in enumerate(state_order)
],
default=Value(len(state_order)),
output_field=CharField(),
)
).order_by("state_order")
# assignee and label ordering
elif order_by_param in [
"labels__name",
"-labels__name",
"assignees__first_name",
"-assignees__first_name",
]:
issue_queryset = issue_queryset.annotate(
max_values=Max(
order_by_param[1::]
if order_by_param.startswith("-")
else order_by_param
)
).order_by(
"-max_values"
if order_by_param.startswith("-")
else "max_values"
)
else:
issue_queryset = issue_queryset.order_by(order_by_param)
issues = IssuePublicSerializer(issue_queryset, many=True).data
state_group_order = [
"backlog",
"unstarted",
"started",
"completed",
"cancelled",
]
states = (
State.objects.filter(
~Q(name="Triage"),
workspace__slug=slug,
project_id=project_id,
)
.annotate(
custom_order=Case(
*[
When(group=value, then=Value(index))
for index, value in enumerate(state_group_order)
],
default=Value(len(state_group_order)),
output_field=IntegerField(),
sub_issues_count=Issue.issue_objects.filter(
parent=OuterRef("id")
)
.order_by()
.annotate(count=Func(F("id"), function="Count"))
.values("count")
)
).distinct()
order_by_param = request.GET.get("order_by", "-created_at")
issue_queryset = self.get_queryset().filter(**filters)
# Issue queryset
issue_queryset = order_issue_queryset(
issue_queryset=issue_queryset,
order_by_param=order_by_param,
)
# Group by
group_by = request.GET.get("group_by", False)
# List Paginate
if not group_by:
return self.paginate(
request=request,
queryset=issue_queryset,
on_results=lambda issues: issue_on_results(
group_by=group_by, issues=issues
),
)
.values("name", "group", "color", "id")
.order_by("custom_order", "sequence")
issue_queryset = issue_queryset_grouper(
queryset=issue_queryset, field=group_by
)
labels = Label.objects.filter(
workspace__slug=slug, project_id=project_id
).values("id", "name", "color", "parent")
## Grouping the results
group_by = request.GET.get("group_by", False)
if group_by:
issues = group_results(issues, group_by)
return Response(
{
"issues": issues,
"states": states,
"labels": labels,
},
status=status.HTTP_200_OK,
# Group paginate
return self.paginate(
request=request,
queryset=issue_queryset,
on_results=lambda issues: issue_on_results(
group_by=group_by, issues=issues
),
paginator_cls=GroupedOffsetPaginator,
group_by_field_name=group_by,
count_filter=Q(
Q(issue_inbox__status=1)
| Q(issue_inbox__status=-1)
| Q(issue_inbox__status=2)
| Q(issue_inbox__isnull=True),
archived_at__isnull=False,
is_draft=True,
),
)

View File

@ -1,5 +1,5 @@
# Django imports
from django.db.models import Q, F
from django.db.models import Q, F, QuerySet
from django.contrib.postgres.aggregates import ArrayAgg
from django.contrib.postgres.fields import ArrayField
from django.db.models import Value, UUIDField
@ -9,248 +9,6 @@ from django.db.models.functions import Coalesce
from plane.db.models import State, Label, ProjectMember, Cycle, Module
def resolve_keys(group_keys, value):
"""resolve keys to a key which will be used for
grouping
Args:
group_keys (string): key which will be used for grouping
value (obj): data value
Returns:
string: the key which will be used for
"""
keys = group_keys.split(".")
for key in keys:
value = value.get(key, None)
return value
def group_results(results_data, group_by, sub_group_by=False):
"""group results data into certain group_by
Args:
results_data (obj): complete results data
group_by (key): string
Returns:
obj: grouped results
"""
if sub_group_by:
main_responsive_dict = dict()
if sub_group_by == "priority":
main_responsive_dict = {
"urgent": {},
"high": {},
"medium": {},
"low": {},
"none": {},
}
for value in results_data:
main_group_attribute = resolve_keys(sub_group_by, value)
group_attribute = resolve_keys(group_by, value)
if isinstance(main_group_attribute, list) and not isinstance(
group_attribute, list
):
if len(main_group_attribute):
for attrib in main_group_attribute:
if str(attrib) not in main_responsive_dict:
main_responsive_dict[str(attrib)] = {}
if (
str(group_attribute)
in main_responsive_dict[str(attrib)]
):
main_responsive_dict[str(attrib)][
str(group_attribute)
].append(value)
else:
main_responsive_dict[str(attrib)][
str(group_attribute)
] = []
main_responsive_dict[str(attrib)][
str(group_attribute)
].append(value)
else:
if str(None) not in main_responsive_dict:
main_responsive_dict[str(None)] = {}
if str(group_attribute) in main_responsive_dict[str(None)]:
main_responsive_dict[str(None)][
str(group_attribute)
].append(value)
else:
main_responsive_dict[str(None)][
str(group_attribute)
] = []
main_responsive_dict[str(None)][
str(group_attribute)
].append(value)
elif isinstance(group_attribute, list) and not isinstance(
main_group_attribute, list
):
if str(main_group_attribute) not in main_responsive_dict:
main_responsive_dict[str(main_group_attribute)] = {}
if len(group_attribute):
for attrib in group_attribute:
if (
str(attrib)
in main_responsive_dict[str(main_group_attribute)]
):
main_responsive_dict[str(main_group_attribute)][
str(attrib)
].append(value)
else:
main_responsive_dict[str(main_group_attribute)][
str(attrib)
] = []
main_responsive_dict[str(main_group_attribute)][
str(attrib)
].append(value)
else:
if (
str(None)
in main_responsive_dict[str(main_group_attribute)]
):
main_responsive_dict[str(main_group_attribute)][
str(None)
].append(value)
else:
main_responsive_dict[str(main_group_attribute)][
str(None)
] = []
main_responsive_dict[str(main_group_attribute)][
str(None)
].append(value)
elif isinstance(group_attribute, list) and isinstance(
main_group_attribute, list
):
if len(main_group_attribute):
for main_attrib in main_group_attribute:
if str(main_attrib) not in main_responsive_dict:
main_responsive_dict[str(main_attrib)] = {}
if len(group_attribute):
for attrib in group_attribute:
if (
str(attrib)
in main_responsive_dict[str(main_attrib)]
):
main_responsive_dict[str(main_attrib)][
str(attrib)
].append(value)
else:
main_responsive_dict[str(main_attrib)][
str(attrib)
] = []
main_responsive_dict[str(main_attrib)][
str(attrib)
].append(value)
else:
if (
str(None)
in main_responsive_dict[str(main_attrib)]
):
main_responsive_dict[str(main_attrib)][
str(None)
].append(value)
else:
main_responsive_dict[str(main_attrib)][
str(None)
] = []
main_responsive_dict[str(main_attrib)][
str(None)
].append(value)
else:
if str(None) not in main_responsive_dict:
main_responsive_dict[str(None)] = {}
if len(group_attribute):
for attrib in group_attribute:
if str(attrib) in main_responsive_dict[str(None)]:
main_responsive_dict[str(None)][
str(attrib)
].append(value)
else:
main_responsive_dict[str(None)][
str(attrib)
] = []
main_responsive_dict[str(None)][
str(attrib)
].append(value)
else:
if str(None) in main_responsive_dict[str(None)]:
main_responsive_dict[str(None)][str(None)].append(
value
)
else:
main_responsive_dict[str(None)][str(None)] = []
main_responsive_dict[str(None)][str(None)].append(
value
)
else:
main_group_attribute = resolve_keys(sub_group_by, value)
group_attribute = resolve_keys(group_by, value)
if str(main_group_attribute) not in main_responsive_dict:
main_responsive_dict[str(main_group_attribute)] = {}
if (
str(group_attribute)
in main_responsive_dict[str(main_group_attribute)]
):
main_responsive_dict[str(main_group_attribute)][
str(group_attribute)
].append(value)
else:
main_responsive_dict[str(main_group_attribute)][
str(group_attribute)
] = []
main_responsive_dict[str(main_group_attribute)][
str(group_attribute)
].append(value)
return main_responsive_dict
else:
response_dict = {}
if group_by == "priority":
response_dict = {
"urgent": [],
"high": [],
"medium": [],
"low": [],
"none": [],
}
for value in results_data:
group_attribute = resolve_keys(group_by, value)
if isinstance(group_attribute, list):
if len(group_attribute):
for attrib in group_attribute:
if str(attrib) in response_dict:
response_dict[str(attrib)].append(value)
else:
response_dict[str(attrib)] = []
response_dict[str(attrib)].append(value)
else:
if str(None) in response_dict:
response_dict[str(None)].append(value)
else:
response_dict[str(None)] = []
response_dict[str(None)].append(value)
else:
if str(group_attribute) in response_dict:
response_dict[str(group_attribute)].append(value)
else:
response_dict[str(group_attribute)] = []
response_dict[str(group_attribute)].append(value)
return response_dict
def issue_queryset_grouper(field, queryset):
if field == "assignees__id":
return queryset.annotate(
@ -376,3 +134,28 @@ def issue_on_results(issues, group_by):
else:
required_fields.extend(["assignee_ids", "label_ids", "module_ids"])
return issues.values(*required_fields)
def issue_group_values(field, slug, project_id):
if field == "state_id":
return list(State.objects.filter( workspace__slug=slug, project_id=project_id
).values_list("id", flat=True))
if field == "labels__id":
return list(Label.objects.filter(
workspace__slug=slug, project_id=project_id
).values_list("id", flat=True)) + ["None"]
if field == "assignees__id":
return list(ProjectMember.objects.filter(
workspace__slug=slug, project_id=project_id, is_active=True,
).values_list("member_id", flat=True)) + ["None"]
if field == "modules__id":
return list(Module.objects.filter(
workspace__slug=slug, project_id=project_id
).values_list("id", flat=True)) + ["None"]
if field == "cycle_id":
return list(Cycle.objects.filter(
workspace__slug=slug, project_id=project_id
).values_list("id", flat=True)) + ["None"]
if field == "priority":
return ["low", "medium", "high", "urgent", "none"]
return []

View File

@ -166,12 +166,14 @@ class GroupedOffsetPaginator(OffsetPaginator):
self,
queryset,
group_by_field_name,
group_by_fields,
count_filter,
*args,
**kwargs,
):
super().__init__(queryset, *args, **kwargs)
self.group_by_field_name = group_by_field_name
self.group_by_fields = group_by_fields
self.count_filter = count_filter
def get_result(self, limit=100, cursor=None):
@ -266,6 +268,22 @@ class GroupedOffsetPaginator(OffsetPaginator):
return total_group_dict
def __get_field_dict(self):
total_group_dict = self.__get_total_dict()
return {
str(field): {
"results": [],
"total_results": total_group_dict.get(str(field), 0),
}
for field in self.group_by_fields
}
def __result_already_added(self, result, group):
for existing_issue in group:
if existing_issue["id"] == result["id"]:
return True
return False
def __query_multi_grouper(self, results):
total_group_dict = self.__get_total_dict()
@ -281,12 +299,6 @@ class GroupedOffsetPaginator(OffsetPaginator):
group_id = result[self.group_by_field_name]
result_group_mapping[str(result_id)].add(str(group_id))
def result_already_added(result, group):
for existing_issue in group:
if existing_issue["id"] == result["id"]:
return True
return False
# Adding group_ids key to each issue and grouping by group_name
for result in results:
result_id = result["id"]
@ -296,7 +308,7 @@ class GroupedOffsetPaginator(OffsetPaginator):
)
# If a result belongs to multiple groups, add it to each group
for group_id in group_ids:
if not result_already_added(
if not self.__result_already_added(
result, grouped_by_field_name[group_id]
):
grouped_by_field_name[group_id].append(result)
@ -312,14 +324,15 @@ class GroupedOffsetPaginator(OffsetPaginator):
return processed_results
def __query_grouper(self, results):
total_group_dict = self.__get_total_dict()
processed_results = {}
processed_results = self.__get_field_dict()
for result in results:
group_value = str(result.get(self.group_by_field_name))
if group_value not in processed_results:
if group_value in processed_results:
processed_results[str(group_value)] = {
"results": [],
"total_results": total_group_dict.get(group_value),
"total_results": processed_results[str(group_value)][
"total_results"
],
}
processed_results[str(group_value)]["results"].append(result)
return processed_results
@ -365,6 +378,7 @@ class BasePaginator:
extra_stats=None,
controller=None,
group_by_field_name=None,
group_by_fields=None,
count_filter=None,
**paginator_kwargs,
):
@ -383,6 +397,7 @@ class BasePaginator:
if not paginator:
if group_by_field_name:
paginator_kwargs["group_by_field_name"] = group_by_field_name
paginator_kwargs["group_by_fields"] = group_by_fields
paginator_kwargs["count_filter"] = count_filter
paginator = paginator_cls(**paginator_kwargs)