mirror of
https://github.com/makeplane/plane
synced 2024-06-14 14:31:34 +00:00
dev: group pagination
This commit is contained in:
parent
2efa21e8f4
commit
32f2719ca0
@ -1,6 +1,6 @@
|
||||
# Python imports
|
||||
import zoneinfo
|
||||
import json
|
||||
import traceback
|
||||
|
||||
# Django imports
|
||||
from django.urls import resolve
|
||||
@ -104,7 +104,7 @@ class BaseViewSet(TimezoneMixin, ModelViewSet, BasePaginator):
|
||||
response = super().handle_exception(exc)
|
||||
return response
|
||||
except Exception as e:
|
||||
print(e) if settings.DEBUG else print("Server Error")
|
||||
print(e, traceback.format_exc()) if settings.DEBUG else print("Server Error")
|
||||
if isinstance(e, IntegrityError):
|
||||
return Response(
|
||||
{"error": "The payload is not valid"},
|
||||
|
@ -58,6 +58,7 @@ from plane.app.permissions import (
|
||||
from plane.db.models import (
|
||||
Project,
|
||||
Issue,
|
||||
State,
|
||||
IssueActivity,
|
||||
IssueComment,
|
||||
IssueProperty,
|
||||
@ -71,10 +72,10 @@ from plane.db.models import (
|
||||
IssueRelation,
|
||||
)
|
||||
from plane.bgtasks.issue_activites_task import issue_activity
|
||||
from plane.utils.grouper import group_results
|
||||
from plane.utils.grouper import issue_grouper
|
||||
from plane.utils.issue_filters import issue_filters
|
||||
from plane.utils.order_queryset import order_issue_queryset
|
||||
|
||||
from plane.utils.paginator import GroupedOffsetPaginator
|
||||
|
||||
class IssueListEndpoint(BaseAPIView):
|
||||
|
||||
@ -202,9 +203,7 @@ class IssueListEndpoint(BaseAPIView):
|
||||
"spreadsheet",
|
||||
]:
|
||||
return self.paginate(
|
||||
request=request,
|
||||
queryset=issue_queryset,
|
||||
on_results=on_results
|
||||
request=request, queryset=issue_queryset, on_results=on_results
|
||||
)
|
||||
return on_results(issues=issue_queryset)
|
||||
|
||||
@ -311,6 +310,7 @@ class IssueViewSet(WebhookMixin, BaseViewSet):
|
||||
return IssueSerializer(
|
||||
issues, many=True, expand=self.expand, fields=self.fields
|
||||
).data
|
||||
|
||||
return issues.values(
|
||||
"id",
|
||||
"name",
|
||||
@ -340,16 +340,23 @@ class IssueViewSet(WebhookMixin, BaseViewSet):
|
||||
)
|
||||
|
||||
if request.GET.get("layout", "spreadsheet") in [
|
||||
"layout",
|
||||
"gantt",
|
||||
"spreadsheet",
|
||||
]:
|
||||
return self.paginate(
|
||||
request=request,
|
||||
queryset=issue_queryset,
|
||||
on_results=on_results
|
||||
request=request, queryset=issue_queryset, on_results=on_results
|
||||
)
|
||||
return on_results(issues=issue_queryset)
|
||||
|
||||
return self.paginate(
|
||||
request=request,
|
||||
queryset=issue_queryset,
|
||||
on_results=on_results,
|
||||
paginator_cls=GroupedOffsetPaginator,
|
||||
group_by_field_name="priority",
|
||||
group_by_fields=issue_grouper(
|
||||
field="priority", slug=slug, project_id=project_id
|
||||
),
|
||||
)
|
||||
|
||||
def create(self, request, slug, project_id):
|
||||
project = Project.objects.get(pk=project_id)
|
||||
@ -576,12 +583,12 @@ class UserWorkSpaceIssues(BaseAPIView):
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
if group_by:
|
||||
grouped_results = group_results(issues, group_by, sub_group_by)
|
||||
return Response(
|
||||
grouped_results,
|
||||
status=status.HTTP_200_OK,
|
||||
)
|
||||
# if group_by:
|
||||
# grouped_results = group_results(issues, group_by, sub_group_by)
|
||||
# return Response(
|
||||
# grouped_results,
|
||||
# status=status.HTTP_200_OK,
|
||||
# )
|
||||
|
||||
return Response(issues, status=status.HTTP_200_OK)
|
||||
|
||||
@ -1350,13 +1357,10 @@ class IssueArchiveViewSet(BaseViewSet):
|
||||
"spreadsheet",
|
||||
]:
|
||||
return self.paginate(
|
||||
request=request,
|
||||
queryset=issue_queryset,
|
||||
on_results=on_results
|
||||
request=request, queryset=issue_queryset, on_results=on_results
|
||||
)
|
||||
return on_results(issues=issue_queryset)
|
||||
|
||||
|
||||
def retrieve(self, request, slug, project_id, pk=None):
|
||||
issue = (
|
||||
self.get_queryset()
|
||||
@ -1925,6 +1929,7 @@ class IssueDraftViewSet(BaseViewSet):
|
||||
issue_queryset = order_issue_queryset(
|
||||
issue_queryset=issue_queryset, order_by_param=order_by_param
|
||||
)
|
||||
|
||||
def on_results(issues):
|
||||
if self.expand or self.fields:
|
||||
return IssueSerializer(
|
||||
@ -1963,9 +1968,7 @@ class IssueDraftViewSet(BaseViewSet):
|
||||
"spreadsheet",
|
||||
]:
|
||||
return self.paginate(
|
||||
request=request,
|
||||
queryset=issue_queryset,
|
||||
on_results=on_results
|
||||
request=request, queryset=issue_queryset, on_results=on_results
|
||||
)
|
||||
return on_results(issues=issue_queryset)
|
||||
|
||||
|
@ -1,3 +1,6 @@
|
||||
from plane.db.models import State, Label, ProjectMember, Cycle, Module
|
||||
|
||||
|
||||
def resolve_keys(group_keys, value):
|
||||
"""resolve keys to a key which will be used for
|
||||
grouping
|
||||
@ -238,3 +241,45 @@ def group_results(results_data, group_by, sub_group_by=False):
|
||||
response_dict[str(group_attribute)].append(value)
|
||||
|
||||
return response_dict
|
||||
|
||||
|
||||
def issue_grouper(field, slug, project_id):
|
||||
if field == "state":
|
||||
return list(
|
||||
State.objects.filter(
|
||||
workspace__slug=slug, project_id=project_id
|
||||
).values_list("id", flat=True)
|
||||
)
|
||||
if field == "labels":
|
||||
return list(
|
||||
Label.objects.filter(
|
||||
workspace__slug=slug, project_id=project_id
|
||||
).values_list("id", flat=True)
|
||||
)
|
||||
if field == "assignees":
|
||||
return list(
|
||||
ProjectMember.objects.filter(
|
||||
workspace__slug=slug, project_id=project_id
|
||||
).values_list("member_id", flat=True)
|
||||
)
|
||||
if field == "priority":
|
||||
return ["urgent", "high", "medium", "low", "none"]
|
||||
|
||||
if field == "created_by":
|
||||
return list(
|
||||
ProjectMember.objects.filter(
|
||||
workspace__slug=slug, project_id=project_id
|
||||
).values_list("member_id", flat=True)
|
||||
)
|
||||
if field == "cycle":
|
||||
return list(
|
||||
Cycle.objects.filter(
|
||||
workspace__slug=slug, project_id=project_id
|
||||
).values_list("id", flat=True)
|
||||
)
|
||||
if field == "module":
|
||||
return list(
|
||||
Module.objects.filter(
|
||||
workspace__slug=slug, project_id=project_id
|
||||
).values_list("id", flat=True)
|
||||
)
|
||||
|
@ -1,7 +1,17 @@
|
||||
# Python imports
|
||||
import math
|
||||
from collections.abc import Sequence
|
||||
|
||||
# Django imports
|
||||
from django.db.models import Window, F, Count, Q
|
||||
from django.db.models.functions import RowNumber
|
||||
|
||||
# Third party imports
|
||||
from rest_framework.response import Response
|
||||
from rest_framework.exceptions import ParseError, ValidationError
|
||||
from collections.abc import Sequence
|
||||
import math
|
||||
|
||||
# Module imports
|
||||
from plane.db.models import Issue
|
||||
|
||||
|
||||
class Cursor:
|
||||
@ -140,6 +150,87 @@ class OffsetPaginator:
|
||||
)
|
||||
|
||||
|
||||
class GroupedOffsetPaginator(OffsetPaginator):
|
||||
def __init__(
|
||||
self, queryset, group_by_field_name, group_by_fields, *args, **kwargs
|
||||
):
|
||||
super().__init__(queryset, *args, **kwargs)
|
||||
self.group_by_field_name = group_by_field_name
|
||||
self.group_by_fields = group_by_fields
|
||||
|
||||
def get_result(self, limit=100, cursor=None):
|
||||
# offset is page #
|
||||
# value is page limit
|
||||
if cursor is None:
|
||||
cursor = Cursor(0, 0, 0)
|
||||
|
||||
limit = min(limit, self.max_limit)
|
||||
|
||||
# Adjust the initial offset and stop based on the cursor and limit
|
||||
queryset = self.queryset
|
||||
if self.key:
|
||||
queryset = queryset.order_by(*self.key)
|
||||
|
||||
page = cursor.offset
|
||||
offset = cursor.offset * cursor.value
|
||||
stop = offset + (cursor.value or limit) + 1
|
||||
|
||||
if self.max_offset is not None and offset >= self.max_offset:
|
||||
raise BadPaginationError("Pagination offset too large")
|
||||
if offset < 0:
|
||||
raise BadPaginationError("Pagination offset cannot be negative")
|
||||
|
||||
# Get the queryset
|
||||
queryset = self.queryset
|
||||
|
||||
# Compute the results
|
||||
results = {}
|
||||
queryset = queryset.annotate(
|
||||
row_number=Window(
|
||||
expression=RowNumber(),
|
||||
partition_by=[F(self.group_by_field_name)],
|
||||
order_by=F(self.group_by_field_name).asc(),
|
||||
)
|
||||
)
|
||||
|
||||
results = queryset.filter(row_number__gte=offset, row_number__lt=stop)
|
||||
|
||||
# Adjust cursors based on the grouped results for pagination
|
||||
next_cursor = Cursor(
|
||||
limit,
|
||||
page + 1,
|
||||
False,
|
||||
queryset.filter(row_number__gte=stop).exists(),
|
||||
)
|
||||
prev_cursor = Cursor(
|
||||
limit,
|
||||
page - 1,
|
||||
True,
|
||||
page > 0,
|
||||
)
|
||||
|
||||
# Optionally, calculate the total count and max_hits if needed
|
||||
# This might require adjustments based on specific use cases
|
||||
max_hits = math.ceil(
|
||||
self.queryset.values(self.group_by_field_name)
|
||||
.annotate(
|
||||
count=Count(
|
||||
self.group_by_field_name,
|
||||
)
|
||||
)
|
||||
.order_by("-count")[0]["count"]
|
||||
/ limit
|
||||
)
|
||||
|
||||
return CursorResult(
|
||||
results=results,
|
||||
next=next_cursor,
|
||||
prev=prev_cursor,
|
||||
hits=None,
|
||||
max_hits=max_hits,
|
||||
)
|
||||
|
||||
|
||||
class BasePaginator:
|
||||
"""BasePaginator class can be inherited by any View to return a paginated view"""
|
||||
|
||||
@ -184,11 +275,12 @@ class BasePaginator:
|
||||
cursor_cls=Cursor,
|
||||
extra_stats=None,
|
||||
controller=None,
|
||||
group_by_fields=None,
|
||||
group_by_field_name=None,
|
||||
**paginator_kwargs,
|
||||
):
|
||||
"""Paginate the request"""
|
||||
per_page = self.get_per_page(request, default_per_page, max_per_page)
|
||||
layout = self.get_layout(request=request)
|
||||
|
||||
# Convert the cursor value to integer and float from string
|
||||
input_cursor = None
|
||||
@ -200,6 +292,8 @@ class BasePaginator:
|
||||
raise ParseError(detail="Invalid cursor parameter.")
|
||||
|
||||
if not paginator:
|
||||
paginator_kwargs["group_by_fields"] = group_by_fields
|
||||
paginator_kwargs["group_by_field_name"] = group_by_field_name
|
||||
paginator = paginator_cls(**paginator_kwargs)
|
||||
|
||||
try:
|
||||
@ -209,12 +303,20 @@ class BasePaginator:
|
||||
except BadPaginationError as e:
|
||||
raise ParseError(detail="Error in parsing")
|
||||
|
||||
# Serialize result according to the on_result function
|
||||
if on_results:
|
||||
results = on_results(cursor_result.results)
|
||||
else:
|
||||
results = cursor_result.results
|
||||
|
||||
processed_results = {}
|
||||
if group_by_field_name and group_by_fields:
|
||||
for result in results:
|
||||
group_value = str(result.get(group_by_field_name))
|
||||
if group_value not in processed_results:
|
||||
processed_results[str(group_value)] = {
|
||||
"results": [],
|
||||
}
|
||||
processed_results[str(group_value)]["results"].append(result)
|
||||
|
||||
results = processed_results
|
||||
# Add Manipulation functions to the response
|
||||
if controller is not None:
|
||||
results = controller(results)
|
||||
@ -224,6 +326,7 @@ class BasePaginator:
|
||||
# Return the response
|
||||
response = Response(
|
||||
{
|
||||
"grouped_by": group_by_field_name,
|
||||
"next_cursor": str(cursor_result.next),
|
||||
"prev_cursor": str(cursor_result.prev),
|
||||
"next_page_results": cursor_result.next.has_results,
|
||||
|
Loading…
Reference in New Issue
Block a user