Merge branch 'feat/pagination' of github.com:makeplane/plane into feat/pagination

This commit is contained in:
rahulramesha 2024-03-20 16:44:57 +05:30
commit 322d434f7e
9 changed files with 981 additions and 343 deletions

View File

@ -60,7 +60,10 @@ from plane.utils.grouper import (
)
from plane.utils.issue_filters import issue_filters
from plane.utils.order_queryset import order_issue_queryset
from plane.utils.paginator import GroupedOffsetPaginator
from plane.utils.paginator import (
GroupedOffsetPaginator,
SubGroupedOffsetPaginator,
)
# Module imports
from .. import BaseAPIView, BaseViewSet, WebhookMixin
@ -781,7 +784,10 @@ class CycleIssueViewSet(WebhookMixin, BaseViewSet):
.values("count")
)
)
filters = issue_filters(request.query_params, "GET")
order_by_param = request.GET.get("order_by", "-created_at")
issue_queryset = issue_queryset.filter(**filters)
# Issue queryset
issue_queryset = order_issue_queryset(
issue_queryset=issue_queryset,
@ -790,48 +796,100 @@ class CycleIssueViewSet(WebhookMixin, BaseViewSet):
# Group by
group_by = request.GET.get("group_by", False)
sub_group_by = request.GET.get("sub_group_by", False)
# issue queryset
issue_queryset = issue_queryset_grouper(
queryset=issue_queryset,
field=group_by,
group_by=group_by,
sub_group_by=sub_group_by,
)
# List Paginate
if not group_by:
if group_by:
# Check group and sub group value paginate
if sub_group_by:
if group_by == sub_group_by:
return Response(
{
"error": "Group by and sub group by cannot have same parameters"
},
status=status.HTTP_400_BAD_REQUEST,
)
else:
# group and sub group pagination
return self.paginate(
request=request,
order_by=order_by_param,
queryset=issue_queryset,
on_results=lambda issues: issue_on_results(
group_by=group_by,
issues=issues,
sub_group_by=sub_group_by,
),
paginator_cls=SubGroupedOffsetPaginator,
group_by_fields=issue_group_values(
field=group_by,
slug=slug,
project_id=project_id,
filters=filters,
),
sub_group_by_fields=issue_group_values(
field=sub_group_by,
slug=slug,
project_id=project_id,
filters=filters,
),
group_by_field_name=group_by,
sub_group_by_field_name=sub_group_by,
count_filter=Q(
Q(issue_inbox__status=1)
| Q(issue_inbox__status=-1)
| Q(issue_inbox__status=2)
| Q(issue_inbox__isnull=True),
archived_at__isnull=True,
is_draft=False,
),
)
# Group Paginate
else:
# Group paginate
return self.paginate(
request=request,
order_by=order_by_param,
queryset=issue_queryset,
on_results=lambda issues: issue_on_results(
group_by=group_by,
issues=issues,
sub_group_by=sub_group_by,
),
paginator_cls=GroupedOffsetPaginator,
group_by_fields=issue_group_values(
field=group_by,
slug=slug,
project_id=project_id,
filters=filters,
),
group_by_field_name=group_by,
count_filter=Q(
Q(issue_inbox__status=1)
| Q(issue_inbox__status=-1)
| Q(issue_inbox__status=2)
| Q(issue_inbox__isnull=True),
archived_at__isnull=True,
is_draft=False,
),
)
else:
# List Paginate
return self.paginate(
order_by=order_by_param,
request=request,
queryset=issue_queryset,
on_results=lambda issues: issue_on_results(
group_by=group_by, issues=issues
group_by=group_by, issues=issues, sub_group_by=sub_group_by
),
)
# Group paginate
return self.paginate(
order_by=order_by_param,
request=request,
queryset=issue_queryset,
on_results=lambda issues: issue_on_results(
group_by=group_by, issues=issues
),
paginator_cls=GroupedOffsetPaginator,
group_by_field_name=group_by,
group_by_fields=issue_group_values(
field=group_by,
slug=slug,
project_id=project_id,
filters=filters,
),
count_filter=Q(
Q(issue_inbox__status=1)
| Q(issue_inbox__status=-1)
| Q(issue_inbox__status=2)
| Q(issue_inbox__isnull=True),
archived_at__isnull=False,
is_draft=True,
),
)
def create(self, request, slug, project_id, cycle_id):
issues = request.data.get("issues", [])

View File

@ -43,7 +43,10 @@ from plane.utils.grouper import (
)
from plane.utils.issue_filters import issue_filters
from plane.utils.order_queryset import order_issue_queryset
from plane.utils.paginator import GroupedOffsetPaginator
from plane.utils.paginator import (
GroupedOffsetPaginator,
SubGroupedOffsetPaginator,
)
# Module imports
from .. import BaseViewSet
@ -116,49 +119,100 @@ class IssueArchiveViewSet(BaseViewSet):
# Group by
group_by = request.GET.get("group_by", False)
sub_group_by = request.GET.get("sub_group_by", False)
# issue queryset
issue_queryset = issue_queryset_grouper(
queryset=issue_queryset,
field=group_by,
group_by=group_by,
sub_group_by=sub_group_by,
)
# List Paginate
if not group_by:
if group_by:
# Check group and sub group value paginate
if sub_group_by:
if group_by == sub_group_by:
return Response(
{
"error": "Group by and sub group by cannot have same parameters"
},
status=status.HTTP_400_BAD_REQUEST,
)
else:
# group and sub group pagination
return self.paginate(
request=request,
order_by=order_by_param,
queryset=issue_queryset,
on_results=lambda issues: issue_on_results(
group_by=group_by,
issues=issues,
sub_group_by=sub_group_by,
),
paginator_cls=SubGroupedOffsetPaginator,
group_by_fields=issue_group_values(
field=group_by,
slug=slug,
project_id=project_id,
filters=filters,
),
sub_group_by_fields=issue_group_values(
field=sub_group_by,
slug=slug,
project_id=project_id,
filters=filters,
),
group_by_field_name=group_by,
sub_group_by_field_name=sub_group_by,
count_filter=Q(
Q(issue_inbox__status=1)
| Q(issue_inbox__status=-1)
| Q(issue_inbox__status=2)
| Q(issue_inbox__isnull=True),
archived_at__isnull=True,
is_draft=False,
),
)
# Group Paginate
else:
# Group paginate
return self.paginate(
request=request,
order_by=order_by_param,
queryset=issue_queryset,
on_results=lambda issues: issue_on_results(
group_by=group_by,
issues=issues,
sub_group_by=sub_group_by,
),
paginator_cls=GroupedOffsetPaginator,
group_by_fields=issue_group_values(
field=group_by,
slug=slug,
project_id=project_id,
filters=filters,
),
group_by_field_name=group_by,
count_filter=Q(
Q(issue_inbox__status=1)
| Q(issue_inbox__status=-1)
| Q(issue_inbox__status=2)
| Q(issue_inbox__isnull=True),
archived_at__isnull=True,
is_draft=False,
),
)
else:
# List Paginate
return self.paginate(
order_by=order_by_param,
request=request,
queryset=issue_queryset,
on_results=lambda issues: issue_on_results(
group_by=group_by, issues=issues
group_by=group_by, issues=issues, sub_group_by=sub_group_by
),
)
# Group paginate
return self.paginate(
order_by=order_by_param,
request=request,
queryset=issue_queryset,
on_results=lambda issues: issue_on_results(
group_by=group_by, issues=issues
),
paginator_cls=GroupedOffsetPaginator,
group_by_field_name=group_by,
group_by_fields=issue_group_values(
field=group_by,
slug=slug,
project_id=project_id,
filters=filters,
),
count_filter=Q(
Q(issue_inbox__status=1)
| Q(issue_inbox__status=-1)
| Q(issue_inbox__status=2)
| Q(issue_inbox__isnull=True),
archived_at__isnull=False,
is_draft=True,
),
)
def retrieve(self, request, slug, project_id, pk=None):
issue = (
self.get_queryset()

View File

@ -47,7 +47,10 @@ from plane.utils.grouper import (
)
from plane.utils.issue_filters import issue_filters
from plane.utils.order_queryset import order_issue_queryset
from plane.utils.paginator import GroupedOffsetPaginator
from plane.utils.paginator import (
GroupedOffsetPaginator,
SubGroupedOffsetPaginator,
)
# Module imports
from .. import BaseAPIView, BaseViewSet, WebhookMixin
@ -116,47 +119,100 @@ class IssueListEndpoint(BaseAPIView):
# Group by
group_by = request.GET.get("group_by", False)
sub_group_by = request.GET.get("sub_group_by", False)
# issue queryset
issue_queryset = issue_queryset_grouper(
queryset=issue_queryset, field=group_by
queryset=issue_queryset,
group_by=group_by,
sub_group_by=sub_group_by,
)
# List Paginate
if not group_by:
if group_by:
# Check group and sub group value paginate
if sub_group_by:
if group_by == sub_group_by:
return Response(
{
"error": "Group by and sub group by cannot have same parameters"
},
status=status.HTTP_400_BAD_REQUEST,
)
else:
# group and sub group pagination
return self.paginate(
request=request,
order_by=order_by_param,
queryset=issue_queryset,
on_results=lambda issues: issue_on_results(
group_by=group_by,
issues=issues,
sub_group_by=sub_group_by,
),
paginator_cls=SubGroupedOffsetPaginator,
group_by_fields=issue_group_values(
field=group_by,
slug=slug,
project_id=project_id,
filters=filters,
),
sub_group_by_fields=issue_group_values(
field=sub_group_by,
slug=slug,
project_id=project_id,
filters=filters,
),
group_by_field_name=group_by,
sub_group_by_field_name=sub_group_by,
count_filter=Q(
Q(issue_inbox__status=1)
| Q(issue_inbox__status=-1)
| Q(issue_inbox__status=2)
| Q(issue_inbox__isnull=True),
archived_at__isnull=True,
is_draft=False,
),
)
# Group Paginate
else:
# Group paginate
return self.paginate(
request=request,
order_by=order_by_param,
queryset=issue_queryset,
on_results=lambda issues: issue_on_results(
group_by=group_by,
issues=issues,
sub_group_by=sub_group_by,
),
paginator_cls=GroupedOffsetPaginator,
group_by_fields=issue_group_values(
field=group_by,
slug=slug,
project_id=project_id,
filters=filters,
),
group_by_field_name=group_by,
count_filter=Q(
Q(issue_inbox__status=1)
| Q(issue_inbox__status=-1)
| Q(issue_inbox__status=2)
| Q(issue_inbox__isnull=True),
archived_at__isnull=True,
is_draft=False,
),
)
else:
# List Paginate
return self.paginate(
order_by=order_by_param,
request=request,
queryset=issue_queryset,
on_results=lambda issues: issue_on_results(
group_by=group_by, issues=issues
group_by=group_by, issues=issues, sub_group_by=sub_group_by
),
)
# Group paginate
return self.paginate(
order_by=order_by_param,
request=request,
queryset=issue_queryset,
on_results=lambda issues: issue_on_results(
group_by=group_by, issues=issues
),
paginator_cls=GroupedOffsetPaginator,
group_by_field_name=group_by,
group_by_fields=issue_group_values(
field=group_by,
slug=slug,
project_id=project_id,
filters=filters,
),
count_filter=Q(
Q(issue_inbox__status=1)
| Q(issue_inbox__status=-1)
| Q(issue_inbox__status=2)
| Q(issue_inbox__isnull=True),
archived_at__isnull=False,
is_draft=True,
),
)
class IssueViewSet(WebhookMixin, BaseViewSet):
def get_serializer_class(self):
@ -231,49 +287,96 @@ class IssueViewSet(WebhookMixin, BaseViewSet):
# Group by
group_by = request.GET.get("group_by", False)
sub_group_by = request.GET.get("sub_group_by", False)
# issue queryset
issue_queryset = issue_queryset_grouper(
queryset=issue_queryset, field=group_by
queryset=issue_queryset,
group_by=group_by,
sub_group_by=sub_group_by,
)
# List Paginate
if not group_by:
if group_by:
if sub_group_by:
if group_by == sub_group_by:
return Response(
{
"error": "Group by and sub group by cannot have same parameters"
},
status=status.HTTP_400_BAD_REQUEST,
)
else:
return self.paginate(
request=request,
order_by=order_by_param,
queryset=issue_queryset,
on_results=lambda issues: issue_on_results(
group_by=group_by,
issues=issues,
sub_group_by=sub_group_by,
),
paginator_cls=SubGroupedOffsetPaginator,
group_by_fields=issue_group_values(
field=group_by,
slug=slug,
project_id=project_id,
filters=filters,
),
sub_group_by_fields=issue_group_values(
field=sub_group_by,
slug=slug,
project_id=project_id,
filters=filters,
),
group_by_field_name=group_by,
sub_group_by_field_name=sub_group_by,
count_filter=Q(
Q(issue_inbox__status=1)
| Q(issue_inbox__status=-1)
| Q(issue_inbox__status=2)
| Q(issue_inbox__isnull=True),
archived_at__isnull=True,
is_draft=False,
),
)
else:
# Group paginate
return self.paginate(
request=request,
order_by=order_by_param,
queryset=issue_queryset,
on_results=lambda issues: issue_on_results(
group_by=group_by,
issues=issues,
sub_group_by=sub_group_by,
),
paginator_cls=GroupedOffsetPaginator,
group_by_fields=issue_group_values(
field=group_by,
slug=slug,
project_id=project_id,
filters=filters,
),
group_by_field_name=group_by,
count_filter=Q(
Q(issue_inbox__status=1)
| Q(issue_inbox__status=-1)
| Q(issue_inbox__status=2)
| Q(issue_inbox__isnull=True),
archived_at__isnull=True,
is_draft=False,
),
)
else:
return self.paginate(
order_by=order_by_param,
request=request,
queryset=issue_queryset,
on_results=lambda issues: issue_on_results(
group_by=group_by, issues=issues
group_by=group_by, issues=issues, sub_group_by=sub_group_by
),
)
# Group paginate
return self.paginate(
request=request,
order_by=order_by_param,
queryset=issue_queryset,
on_results=lambda issues: issue_on_results(
group_by=group_by, issues=issues
),
paginator_cls=GroupedOffsetPaginator,
group_by_fields=issue_group_values(
field=group_by,
slug=slug,
project_id=project_id,
filters=filters,
),
group_by_field_name=group_by,
count_filter=Q(
Q(issue_inbox__status=1)
| Q(issue_inbox__status=-1)
| Q(issue_inbox__status=2)
| Q(issue_inbox__isnull=True),
archived_at__isnull=False,
is_draft=True,
),
)
def create(self, request, slug, project_id):
project = Project.objects.get(pk=project_id)

View File

@ -43,7 +43,10 @@ from plane.utils.grouper import (
)
from plane.utils.issue_filters import issue_filters
from plane.utils.order_queryset import order_issue_queryset
from plane.utils.paginator import GroupedOffsetPaginator
from plane.utils.paginator import (
GroupedOffsetPaginator,
SubGroupedOffsetPaginator,
)
# Module imports
from .. import BaseViewSet
@ -95,7 +98,6 @@ class IssueDraftViewSet(BaseViewSet):
order_by_param = request.GET.get("order_by", "-created_at")
issue_queryset = self.get_queryset().filter(**filters)
# Issue queryset
issue_queryset = order_issue_queryset(
issue_queryset=issue_queryset,
@ -104,48 +106,100 @@ class IssueDraftViewSet(BaseViewSet):
# Group by
group_by = request.GET.get("group_by", False)
sub_group_by = request.GET.get("sub_group_by", False)
# issue queryset
issue_queryset = issue_queryset_grouper(
queryset=issue_queryset, field=group_by
queryset=issue_queryset,
group_by=group_by,
sub_group_by=sub_group_by,
)
# List Paginate
if not group_by:
if group_by:
# Check group and sub group value paginate
if sub_group_by:
if group_by == sub_group_by:
return Response(
{
"error": "Group by and sub group by cannot have same parameters"
},
status=status.HTTP_400_BAD_REQUEST,
)
else:
# group and sub group pagination
return self.paginate(
request=request,
order_by=order_by_param,
queryset=issue_queryset,
on_results=lambda issues: issue_on_results(
group_by=group_by,
issues=issues,
sub_group_by=sub_group_by,
),
paginator_cls=SubGroupedOffsetPaginator,
group_by_fields=issue_group_values(
field=group_by,
slug=slug,
project_id=project_id,
filters=filters,
),
sub_group_by_fields=issue_group_values(
field=sub_group_by,
slug=slug,
project_id=project_id,
filters=filters,
),
group_by_field_name=group_by,
sub_group_by_field_name=sub_group_by,
count_filter=Q(
Q(issue_inbox__status=1)
| Q(issue_inbox__status=-1)
| Q(issue_inbox__status=2)
| Q(issue_inbox__isnull=True),
archived_at__isnull=True,
is_draft=False,
),
)
# Group Paginate
else:
# Group paginate
return self.paginate(
request=request,
order_by=order_by_param,
queryset=issue_queryset,
on_results=lambda issues: issue_on_results(
group_by=group_by,
issues=issues,
sub_group_by=sub_group_by,
),
paginator_cls=GroupedOffsetPaginator,
group_by_fields=issue_group_values(
field=group_by,
slug=slug,
project_id=project_id,
filters=filters,
),
group_by_field_name=group_by,
count_filter=Q(
Q(issue_inbox__status=1)
| Q(issue_inbox__status=-1)
| Q(issue_inbox__status=2)
| Q(issue_inbox__isnull=True),
archived_at__isnull=True,
is_draft=False,
),
)
else:
# List Paginate
return self.paginate(
order_by=order_by_param,
request=request,
queryset=issue_queryset,
on_results=lambda issues: issue_on_results(
group_by=group_by, issues=issues
group_by=group_by, issues=issues, sub_group_by=sub_group_by
),
)
# Group paginate
return self.paginate(
order_by=order_by_param,
request=request,
queryset=issue_queryset,
on_results=lambda issues: issue_on_results(
group_by=group_by, issues=issues
),
paginator_cls=GroupedOffsetPaginator,
group_by_fields=issue_group_values(
field=group_by,
slug=slug,
project_id=project_id,
filters=filters,
),
group_by_field_name=group_by,
count_filter=Q(
Q(issue_inbox__status=1)
| Q(issue_inbox__status=-1)
| Q(issue_inbox__status=2)
| Q(issue_inbox__isnull=True),
archived_at__isnull=False,
is_draft=True,
),
)
def create(self, request, slug, project_id):
project = Project.objects.get(pk=project_id)

View File

@ -58,7 +58,10 @@ from plane.utils.grouper import (
)
from plane.utils.issue_filters import issue_filters
from plane.utils.order_queryset import order_issue_queryset
from plane.utils.paginator import GroupedOffsetPaginator
from plane.utils.paginator import (
GroupedOffsetPaginator,
SubGroupedOffsetPaginator,
)
# Module imports
from .. import BaseAPIView, BaseViewSet, WebhookMixin
@ -517,47 +520,100 @@ class ModuleIssueViewSet(WebhookMixin, BaseViewSet):
# Group by
group_by = request.GET.get("group_by", False)
sub_group_by = request.GET.get("sub_group_by", False)
# issue queryset
issue_queryset = issue_queryset_grouper(
queryset=issue_queryset, field=group_by
queryset=issue_queryset,
group_by=group_by,
sub_group_by=sub_group_by,
)
# List Paginate
if not group_by:
if group_by:
# Check group and sub group value paginate
if sub_group_by:
if group_by == sub_group_by:
return Response(
{
"error": "Group by and sub group by cannot have same parameters"
},
status=status.HTTP_400_BAD_REQUEST,
)
else:
# group and sub group pagination
return self.paginate(
request=request,
order_by=order_by_param,
queryset=issue_queryset,
on_results=lambda issues: issue_on_results(
group_by=group_by,
issues=issues,
sub_group_by=sub_group_by,
),
paginator_cls=SubGroupedOffsetPaginator,
group_by_fields=issue_group_values(
field=group_by,
slug=slug,
project_id=project_id,
filters=filters,
),
sub_group_by_fields=issue_group_values(
field=sub_group_by,
slug=slug,
project_id=project_id,
filters=filters,
),
group_by_field_name=group_by,
sub_group_by_field_name=sub_group_by,
count_filter=Q(
Q(issue_inbox__status=1)
| Q(issue_inbox__status=-1)
| Q(issue_inbox__status=2)
| Q(issue_inbox__isnull=True),
archived_at__isnull=True,
is_draft=False,
),
)
# Group Paginate
else:
# Group paginate
return self.paginate(
request=request,
order_by=order_by_param,
queryset=issue_queryset,
on_results=lambda issues: issue_on_results(
group_by=group_by,
issues=issues,
sub_group_by=sub_group_by,
),
paginator_cls=GroupedOffsetPaginator,
group_by_fields=issue_group_values(
field=group_by,
slug=slug,
project_id=project_id,
filters=filters,
),
group_by_field_name=group_by,
count_filter=Q(
Q(issue_inbox__status=1)
| Q(issue_inbox__status=-1)
| Q(issue_inbox__status=2)
| Q(issue_inbox__isnull=True),
archived_at__isnull=True,
is_draft=False,
),
)
else:
# List Paginate
return self.paginate(
order_by=order_by_param,
request=request,
queryset=issue_queryset,
on_results=lambda issues: issue_on_results(
group_by=group_by, issues=issues
group_by=group_by, issues=issues, sub_group_by=sub_group_by
),
)
# Group paginate
return self.paginate(
order_by=order_by_param,
request=request,
queryset=issue_queryset,
on_results=lambda issues: issue_on_results(
group_by=group_by, issues=issues
),
group_by_fields=issue_group_values(
field=group_by,
slug=slug,
project_id=project_id,
filters=filters,
),
paginator_cls=GroupedOffsetPaginator,
group_by_field_name=group_by,
count_filter=Q(
Q(issue_inbox__status=1)
| Q(issue_inbox__status=-1)
| Q(issue_inbox__status=2)
| Q(issue_inbox__isnull=True),
archived_at__isnull=False,
is_draft=True,
),
)
# create multiple issues inside a module
def create_module_issues(self, request, slug, project_id, module_id):
issues = request.data.get("issues", [])

View File

@ -41,7 +41,10 @@ from plane.utils.grouper import (
)
from plane.utils.issue_filters import issue_filters
from plane.utils.order_queryset import order_issue_queryset
from plane.utils.paginator import GroupedOffsetPaginator
from plane.utils.paginator import (
GroupedOffsetPaginator,
SubGroupedOffsetPaginator,
)
# Module imports
from .. import BaseViewSet
@ -153,52 +156,109 @@ class GlobalViewIssuesViewSet(BaseViewSet):
.filter(**filters)
.annotate(cycle_id=F("issue_cycle__cycle_id"))
)
# Issue queryset
issue_queryset = order_issue_queryset(
issue_queryset=issue_queryset, order_by_param=order_by_param
issue_queryset=issue_queryset,
order_by_param=order_by_param,
)
# Group by
group_by = request.GET.get("group_by", False)
sub_group_by = request.GET.get("sub_group_by", False)
# issue queryset
issue_queryset = issue_queryset_grouper(
queryset=issue_queryset, field=group_by
queryset=issue_queryset,
group_by=group_by,
sub_group_by=sub_group_by,
)
# List Paginate
if not group_by:
if group_by:
# Check group and sub group value paginate
if sub_group_by:
if group_by == sub_group_by:
return Response(
{
"error": "Group by and sub group by cannot have same parameters"
},
status=status.HTTP_400_BAD_REQUEST,
)
else:
# group and sub group pagination
return self.paginate(
request=request,
order_by=order_by_param,
queryset=issue_queryset,
on_results=lambda issues: issue_on_results(
group_by=group_by,
issues=issues,
sub_group_by=sub_group_by,
),
paginator_cls=SubGroupedOffsetPaginator,
group_by_fields=issue_group_values(
field=group_by,
slug=slug,
project_id=None,
filters=filters,
),
sub_group_by_fields=issue_group_values(
field=sub_group_by,
slug=slug,
project_id=None,
filters=filters,
),
group_by_field_name=group_by,
sub_group_by_field_name=sub_group_by,
count_filter=Q(
Q(issue_inbox__status=1)
| Q(issue_inbox__status=-1)
| Q(issue_inbox__status=2)
| Q(issue_inbox__isnull=True),
archived_at__isnull=True,
is_draft=False,
),
)
# Group Paginate
else:
# Group paginate
return self.paginate(
request=request,
order_by=order_by_param,
queryset=issue_queryset,
on_results=lambda issues: issue_on_results(
group_by=group_by,
issues=issues,
sub_group_by=sub_group_by,
),
paginator_cls=GroupedOffsetPaginator,
group_by_fields=issue_group_values(
field=group_by,
slug=slug,
project_id=None,
filters=filters,
),
group_by_field_name=group_by,
count_filter=Q(
Q(issue_inbox__status=1)
| Q(issue_inbox__status=-1)
| Q(issue_inbox__status=2)
| Q(issue_inbox__isnull=True),
archived_at__isnull=True,
is_draft=False,
),
)
else:
# List Paginate
return self.paginate(
order_by=request.GET.get("order_by", "-created_at"),
order_by=order_by_param,
request=request,
queryset=issue_queryset,
on_results=lambda issues: issue_on_results(
group_by=group_by, issues=issues
group_by=group_by, issues=issues, sub_group_by=sub_group_by
),
)
# Group paginate
return self.paginate(
order_by=request.GET.get("order_by", "-created_at"),
request=request,
queryset=issue_queryset,
on_results=lambda issues: issue_on_results(
group_by=group_by, issues=issues
),
group_by_fields=issue_group_values(
field=group_by,
slug=slug,
filters=filters,
),
paginator_cls=GroupedOffsetPaginator,
group_by_field_name=group_by,
count_filter=Q(
Q(issue_inbox__status=1)
| Q(issue_inbox__status=-1)
| Q(issue_inbox__status=2)
| Q(issue_inbox__isnull=True),
archived_at__isnull=False,
is_draft=True,
),
)
class IssueViewViewSet(BaseViewSet):
serializer_class = IssueViewSerializer

View File

@ -45,7 +45,10 @@ from plane.utils.grouper import (
)
from plane.utils.issue_filters import issue_filters
from plane.utils.order_queryset import order_issue_queryset
from plane.utils.paginator import GroupedOffsetPaginator
from plane.utils.paginator import (
GroupedOffsetPaginator,
SubGroupedOffsetPaginator,
)
# Module imports
from .base import BaseAPIView, BaseViewSet
@ -566,40 +569,96 @@ class ProjectIssuesPublicEndpoint(BaseAPIView):
# Group by
group_by = request.GET.get("group_by", False)
sub_group_by = request.GET.get("sub_group_by", False)
# issue queryset
issue_queryset = issue_queryset_grouper(
queryset=issue_queryset, field=group_by
queryset=issue_queryset,
group_by=group_by,
sub_group_by=sub_group_by,
)
# List Paginate
if not group_by:
if group_by:
# Check group and sub group value paginate
if sub_group_by:
if group_by == sub_group_by:
return Response(
{
"error": "Group by and sub group by cannot have same parameters"
},
status=status.HTTP_400_BAD_REQUEST,
)
else:
# group and sub group pagination
return self.paginate(
request=request,
order_by=order_by_param,
queryset=issue_queryset,
on_results=lambda issues: issue_on_results(
group_by=group_by,
issues=issues,
sub_group_by=sub_group_by,
),
paginator_cls=SubGroupedOffsetPaginator,
group_by_fields=issue_group_values(
field=group_by,
slug=slug,
project_id=project_id,
filters=filters,
),
sub_group_by_fields=issue_group_values(
field=sub_group_by,
slug=slug,
project_id=project_id,
filters=filters,
),
group_by_field_name=group_by,
sub_group_by_field_name=sub_group_by,
count_filter=Q(
Q(issue_inbox__status=1)
| Q(issue_inbox__status=-1)
| Q(issue_inbox__status=2)
| Q(issue_inbox__isnull=True),
archived_at__isnull=True,
is_draft=False,
),
)
# Group Paginate
else:
# Group paginate
return self.paginate(
request=request,
order_by=order_by_param,
queryset=issue_queryset,
on_results=lambda issues: issue_on_results(
group_by=group_by,
issues=issues,
sub_group_by=sub_group_by,
),
paginator_cls=GroupedOffsetPaginator,
group_by_fields=issue_group_values(
field=group_by,
slug=slug,
project_id=project_id,
filters=filters,
),
group_by_field_name=group_by,
count_filter=Q(
Q(issue_inbox__status=1)
| Q(issue_inbox__status=-1)
| Q(issue_inbox__status=2)
| Q(issue_inbox__isnull=True),
archived_at__isnull=True,
is_draft=False,
),
)
else:
# List Paginate
return self.paginate(
order_by=order_by_param,
request=request,
queryset=issue_queryset,
on_results=lambda issues: issue_on_results(
group_by=group_by, issues=issues
group_by=group_by, issues=issues, sub_group_by=sub_group_by
),
)
# Group paginate
return self.paginate(
request=request,
queryset=issue_queryset,
on_results=lambda issues: issue_on_results(
group_by=group_by, issues=issues
),
paginator_cls=GroupedOffsetPaginator,
group_by_fields=issue_group_values(
field=group_by,
slug=slug,
filters=filters,
),
group_by_field_name=group_by,
count_filter=Q(
Q(issue_inbox__status=1)
| Q(issue_inbox__status=-1)
| Q(issue_inbox__status=2)
| Q(issue_inbox__isnull=True),
archived_at__isnull=False,
is_draft=True,
),
)

View File

@ -1,7 +1,7 @@
# Django imports
from django.contrib.postgres.aggregates import ArrayAgg
from django.contrib.postgres.fields import ArrayField
from django.db.models import F, Q, UUIDField, Value
from django.db.models import Q, UUIDField, Value
from django.db.models.functions import Coalesce
# Module imports
@ -17,98 +17,49 @@ from plane.db.models import (
)
def issue_queryset_grouper(field, queryset):
if field == "assignees__id":
return queryset.annotate(
label_ids=Coalesce(
ArrayAgg(
"labels__id",
distinct=True,
filter=~Q(labels__id__isnull=True),
),
Value([], output_field=ArrayField(UUIDField())),
),
module_ids=Coalesce(
ArrayAgg(
"issue_module__module_id",
distinct=True,
filter=~Q(issue_module__module_id__isnull=True),
),
Value([], output_field=ArrayField(UUIDField())),
),
)
def issue_queryset_grouper(queryset, group_by, sub_group_by):
elif field == "labels__id":
return queryset.annotate(
assignee_ids=Coalesce(
ArrayAgg(
"assignees__id",
distinct=True,
filter=~Q(assignees__id__isnull=True),
),
Value([], output_field=ArrayField(UUIDField())),
),
module_ids=Coalesce(
ArrayAgg(
"issue_module__module_id",
distinct=True,
filter=~Q(issue_module__module_id__isnull=True),
),
Value([], output_field=ArrayField(UUIDField())),
),
)
FIELD_MAPPER = {
"label_ids": "labels__id",
"assignee_ids": "assignees__id",
"module_ids": "modules__id",
}
elif field == "modules__id":
return queryset.annotate(
modules__id=F("issue_module__module_id")
).annotate(
label_ids=Coalesce(
ArrayAgg(
"labels__id",
distinct=True,
filter=~Q(labels__id__isnull=True),
),
Value([], output_field=ArrayField(UUIDField())),
),
assignee_ids=Coalesce(
ArrayAgg(
"assignees__id",
distinct=True,
filter=~Q(assignees__id__isnull=True),
),
Value([], output_field=ArrayField(UUIDField())),
),
)
else:
return queryset.annotate(
label_ids=Coalesce(
ArrayAgg(
"labels__id",
distinct=True,
filter=~Q(labels__id__isnull=True),
),
Value([], output_field=ArrayField(UUIDField())),
),
module_ids=Coalesce(
ArrayAgg(
"issue_module__module_id",
distinct=True,
filter=~Q(issue_module__module_id__isnull=True),
),
Value([], output_field=ArrayField(UUIDField())),
),
assignee_ids=Coalesce(
ArrayAgg(
"assignees__id",
distinct=True,
filter=~Q(assignees__id__isnull=True),
),
Value([], output_field=ArrayField(UUIDField())),
annotations_map = {
"label_ids": ("assignees__id", ~Q(assignees__id__isnull=True)),
"assignee_ids": ("labels__id", ~Q(labels__id__isnull=True)),
"module_ids": (
"issue_module__module_id",
~Q(issue_module__module_id__isnull=True),
),
}
default_annotations = {
key: Coalesce(
ArrayAgg(
field,
distinct=True,
filter=condition,
),
Value([], output_field=ArrayField(UUIDField())),
)
for key, (field, condition) in annotations_map.items()
if FIELD_MAPPER.get(key) != group_by
or FIELD_MAPPER.get(key) != sub_group_by
}
return queryset.annotate(**default_annotations)
def issue_on_results(issues, group_by):
def issue_on_results(issues, group_by, sub_group_by):
FIELD_MAPPER = {
"labels__id": "label_ids",
"assignees__id": "assignee_ids",
"modules__id": "module_ids",
}
original_list = ["assignee_ids", "label_ids", "module_ids"]
required_fields = [
"id",
"name",
@ -134,14 +85,16 @@ def issue_on_results(issues, group_by):
"archived_at",
"state__group",
]
if group_by == "assignees__id":
required_fields.extend(["label_ids", "module_ids", "assignees__id"])
elif group_by == "labels__id":
required_fields.extend(["assignee_ids", "module_ids", "labels__id"])
elif group_by == "modules__id":
required_fields.extend(["assignee_ids", "label_ids", "modules__id"])
else:
required_fields.extend(["assignee_ids", "label_ids", "module_ids"])
if group_by in FIELD_MAPPER:
original_list.remove(FIELD_MAPPER[group_by])
original_list.append(group_by)
if sub_group_by in FIELD_MAPPER:
original_list.remove(FIELD_MAPPER[sub_group_by])
original_list.append(sub_group_by)
required_fields.extend(original_list)
return issues.values(*required_fields)

View File

@ -175,7 +175,7 @@ class GroupedOffsetPaginator(OffsetPaginator):
self.group_by_fields = group_by_fields
self.count_filter = count_filter
def get_result(self, limit=100, cursor=None):
def get_result(self, limit=50, cursor=None):
# offset is page #
# value is page limit
if cursor is None:
@ -337,6 +337,235 @@ class GroupedOffsetPaginator(OffsetPaginator):
return processed_results
class SubGroupedOffsetPaginator(OffsetPaginator):
FIELD_MAPPER = {
"labels__id": "label_ids",
"assignees__id": "assignee_ids",
"modules__id": "module_ids",
}
def __init__(
self,
queryset,
group_by_field_name,
sub_group_by_field_name,
group_by_fields,
sub_group_by_fields,
count_filter,
*args,
**kwargs,
):
super().__init__(queryset, *args, **kwargs)
self.group_by_field_name = group_by_field_name
self.group_by_fields = group_by_fields
self.sub_group_by_field_name = sub_group_by_field_name
self.sub_group_by_fields = sub_group_by_fields
self.count_filter = count_filter
def get_result(self, limit=30, cursor=None):
# offset is page #
# value is page limit
if cursor is None:
cursor = Cursor(0, 0, 0)
limit = min(limit, self.max_limit)
# Adjust the initial offset and stop based on the cursor and limit
queryset = self.queryset
if self.key:
queryset = queryset.order_by(*self.key)
page = cursor.offset
offset = cursor.offset * cursor.value
stop = offset + (cursor.value or limit) + 1
if self.max_offset is not None and offset >= self.max_offset:
raise BadPaginationError("Pagination offset too large")
if offset < 0:
raise BadPaginationError("Pagination offset cannot be negative")
# Compute the results
results = {}
queryset = queryset.annotate(
row_number=Window(
expression=RowNumber(),
partition_by=[F(self.sub_group_by_field_name)],
order_by=(*self.key,),
)
)
# Filter the results
results = queryset.filter(row_number__gt=offset, row_number__lt=stop)
# Adjust cursors based on the grouped results for pagination
next_cursor = Cursor(
limit,
page + 1,
False,
queryset.filter(row_number__gte=stop).exists(),
)
prev_cursor = Cursor(
limit,
page - 1,
True,
page > 0,
)
count = queryset.count()
# Optionally, calculate the total count and max_hits if needed
# This might require adjustments based on specific use cases
max_hits = math.ceil(
queryset.values(self.group_by_field_name)
.annotate(
count=Count(
self.group_by_field_name,
)
)
.order_by("-count")[0]["count"]
/ limit
)
return CursorResult(
results=results,
next=next_cursor,
prev=prev_cursor,
hits=count,
max_hits=max_hits,
)
def __get_group_total_queryset(self):
return (
self.queryset.order_by(self.group_by_field_name)
.values(self.group_by_field_name)
.annotate(
count=Count(
self.group_by_field_name,
filter=self.count_filter,
)
)
.distinct()
)
def __get_subgroup_total_queryset(self):
return self.queryset.values(self.sub_group_by_field_name).annotate(
count=Count(
self.sub_group_by_field_name,
filter=self.count_filter,
)
)
def __get_total_dict(self):
total_group_dict = {}
total_sub_group_dict = {}
for group in self.__get_group_total_queryset():
total_group_dict[str(group.get(self.group_by_field_name))] = (
total_group_dict.get(
str(group.get(self.group_by_field_name)), 0
)
+ (1 if group.get("count") == 0 else group.get("count"))
)
for sub_group in self.__get_subgroup_total_queryset():
total_sub_group_dict[
str(sub_group.get(self.sub_group_by_field_name))
] = total_sub_group_dict.get(
str(sub_group.get(self.sub_group_by_field_name)), 0
) + (
1 if sub_group.get("count") == 0 else sub_group.get("count")
)
return total_group_dict, total_sub_group_dict
def __get_field_dict(self):
total_group_dict, total_sub_group_dict = self.__get_total_dict()
return {
str(group): {
"results": {
str(sub_group): {
"results": [],
"total_results": total_sub_group_dict.get(
str(sub_group), 0
),
}
for sub_group in total_sub_group_dict
},
"total_results": total_group_dict.get(str(group), 0),
}
for group in self.group_by_fields
}
def __result_already_added(self, result, group):
for existing_issue in group:
if existing_issue["id"] == result["id"]:
return True
return False
def __query_multi_grouper(self, results):
processed_results = self.__get_field_dict()
# Preparing a dict to keep track of group IDs associated with each label ID
result_group_mapping = defaultdict(set)
result_sub_group_mapping = defaultdict(set)
# Iterate over results to fill the above dictionaries
if self.group_by_field_name in self.FIELD_MAPPER:
for result in results:
result_id = result["id"]
group_id = result[self.group_by_field_name]
result_group_mapping[str(result_id)].add(str(group_id))
if self.sub_group_by_field_name in self.FIELD_MAPPER:
for result in results:
result_id = result["id"]
sub_group_id = result[self.sub_group_by_field_name]
result_sub_group_mapping[str(result_id)].add(str(sub_group_id))
for result in results:
group_value = str(result.get(self.group_by_field_name))
sub_group_value = str(result.get(self.sub_group_by_field_name))
if (
group_value in processed_results
and sub_group_value
in processed_results[str(group_value)]["results"]
):
if self.group_by_field_name in self.FIELD_MAPPER:
group_ids = list(result_group_mapping[str(result_id)])
result[self.FIELD_MAPPER.get(self.group_by_field_name)] = (
[] if "None" in group_ids else group_ids
)
if self.sub_group_by_field_name in self.FIELD_MAPPER:
sub_group_ids = list(result_group_mapping[str(result_id)])
result[self.FIELD_MAPPER.get(self.group_by_field_name)] = (
[] if "None" in sub_group_ids else sub_group_ids
)
processed_results[str(group_value)]["results"][
str(sub_group_value)
]["results"].append(result)
return processed_results
def __query_grouper(self, results):
processed_results = self.__get_field_dict()
for result in results:
group_value = str(result.get(self.group_by_field_name))
sub_group_value = str(result.get(self.sub_group_by_field_name))
if (
group_value in processed_results
and sub_group_value
in processed_results[str(group_value)]["results"]
):
processed_results[str(group_value)]["results"][
str(sub_group_value)
]["results"].append(result)
return processed_results
def process_results(self, results):
if (
self.group_by_field_name in self.FIELD_MAPPER
or self.sub_group_by_field_name in self.FIELD_MAPPER
):
processed_results = self.__query_multi_grouper(results=results)
else:
processed_results = self.__query_grouper(results=results)
return processed_results
class BasePaginator:
"""BasePaginator class can be inherited by any View to return a paginated view"""
@ -371,6 +600,8 @@ class BasePaginator:
controller=None,
group_by_field_name=None,
group_by_fields=None,
sub_group_by_field_name=None,
sub_group_by_fields=None,
count_filter=None,
**paginator_kwargs,
):
@ -391,6 +622,15 @@ class BasePaginator:
paginator_kwargs["group_by_field_name"] = group_by_field_name
paginator_kwargs["group_by_fields"] = group_by_fields
paginator_kwargs["count_filter"] = count_filter
if sub_group_by_field_name:
paginator_kwargs["sub_group_by_field_name"] = (
sub_group_by_field_name
)
paginator_kwargs["sub_group_by_fields"] = (
sub_group_by_fields
)
paginator = paginator_cls(**paginator_kwargs)
try:
@ -416,6 +656,7 @@ class BasePaginator:
response = Response(
{
"grouped_by": group_by_field_name,
"sub_grouped_by": sub_group_by_field_name,
"total_count": (cursor_result.hits),
"next_cursor": str(cursor_result.next),
"prev_cursor": str(cursor_result.prev),