mirror of
https://github.com/makeplane/plane
synced 2024-06-14 14:31:34 +00:00
dev: paginating issue apis
This commit is contained in:
parent
7060fb712f
commit
f292ee00a8
@ -57,7 +57,9 @@ from plane.db.models import (
|
||||
from plane.bgtasks.issue_activites_task import issue_activity
|
||||
from plane.utils.issue_filters import issue_filters
|
||||
from plane.utils.analytics_plot import burndown_plot
|
||||
|
||||
from plane.utils.grouper import issue_queryset_grouper, issue_on_results
|
||||
from plane.utils.order_queryset import order_issue_queryset
|
||||
from plane.utils.paginator import GroupedOffsetPaginator
|
||||
|
||||
class CycleViewSet(WebhookMixin, BaseViewSet):
|
||||
serializer_class = CycleSerializer
|
||||
@ -707,12 +709,7 @@ class CycleIssueViewSet(WebhookMixin, BaseViewSet):
|
||||
|
||||
@method_decorator(gzip_page)
|
||||
def list(self, request, slug, project_id, cycle_id):
|
||||
fields = [
|
||||
field
|
||||
for field in request.GET.get("fields", "").split(",")
|
||||
if field
|
||||
]
|
||||
order_by = request.GET.get("order_by", "created_at")
|
||||
order_by_param = request.GET.get("order_by", "created_at")
|
||||
filters = issue_filters(request.query_params, "GET")
|
||||
issue_queryset = (
|
||||
Issue.issue_objects.filter(issue_cycle__cycle_id=cycle_id)
|
||||
@ -726,7 +723,6 @@ class CycleIssueViewSet(WebhookMixin, BaseViewSet):
|
||||
"issue_module__module",
|
||||
"issue_cycle__cycle",
|
||||
)
|
||||
.order_by(order_by)
|
||||
.filter(**filters)
|
||||
.annotate(cycle_id=F("issue_cycle__cycle_id"))
|
||||
.annotate(
|
||||
@ -751,77 +747,49 @@ class CycleIssueViewSet(WebhookMixin, BaseViewSet):
|
||||
.annotate(count=Func(F("id"), function="Count"))
|
||||
.values("count")
|
||||
)
|
||||
.annotate(
|
||||
label_ids=Coalesce(
|
||||
ArrayAgg(
|
||||
"labels__id",
|
||||
distinct=True,
|
||||
filter=~Q(labels__id__isnull=True),
|
||||
),
|
||||
Value([], output_field=ArrayField(UUIDField())),
|
||||
),
|
||||
assignee_ids=Coalesce(
|
||||
ArrayAgg(
|
||||
"assignees__id",
|
||||
distinct=True,
|
||||
filter=~Q(assignees__id__isnull=True),
|
||||
),
|
||||
Value([], output_field=ArrayField(UUIDField())),
|
||||
),
|
||||
module_ids=Coalesce(
|
||||
ArrayAgg(
|
||||
"issue_module__module_id",
|
||||
distinct=True,
|
||||
filter=~Q(issue_module__module_id__isnull=True),
|
||||
),
|
||||
Value([], output_field=ArrayField(UUIDField())),
|
||||
),
|
||||
)
|
||||
.order_by(order_by)
|
||||
)
|
||||
def on_results(issues):
|
||||
if self.expand or self.fields:
|
||||
return IssueSerializer(
|
||||
issues, many=True, expand=self.expand, fields=self.fields
|
||||
).data
|
||||
return issues.values(
|
||||
"id",
|
||||
"name",
|
||||
"state_id",
|
||||
"sort_order",
|
||||
"completed_at",
|
||||
"estimate_point",
|
||||
"priority",
|
||||
"start_date",
|
||||
"target_date",
|
||||
"sequence_id",
|
||||
"project_id",
|
||||
"parent_id",
|
||||
"cycle_id",
|
||||
"module_ids",
|
||||
"label_ids",
|
||||
"assignee_ids",
|
||||
"sub_issues_count",
|
||||
"created_at",
|
||||
"updated_at",
|
||||
"created_by",
|
||||
"updated_by",
|
||||
"attachment_count",
|
||||
"link_count",
|
||||
"is_draft",
|
||||
"archived_at",
|
||||
)
|
||||
|
||||
if request.GET.get("layout", "spreadsheet") in [
|
||||
"layout",
|
||||
"spreadsheet",
|
||||
]:
|
||||
# Issue queryset
|
||||
issue_queryset = order_issue_queryset(
|
||||
issue_queryset=issue_queryset,
|
||||
order_by_param=order_by_param,
|
||||
)
|
||||
|
||||
# Group by
|
||||
group_by = request.GET.get("group_by", False)
|
||||
|
||||
# List Paginate
|
||||
if not group_by:
|
||||
return self.paginate(
|
||||
request=request,
|
||||
queryset=issue_queryset,
|
||||
on_results=on_results
|
||||
on_results=lambda issues: issue_on_results(
|
||||
group_by=group_by, issues=issues
|
||||
),
|
||||
)
|
||||
return on_results(issues=issue_queryset)
|
||||
|
||||
issue_queryset = issue_queryset_grouper(
|
||||
queryset=issue_queryset, field=group_by
|
||||
)
|
||||
# Group paginate
|
||||
return self.paginate(
|
||||
request=request,
|
||||
queryset=issue_queryset,
|
||||
on_results=lambda issues: issue_on_results(
|
||||
group_by=group_by, issues=issues
|
||||
),
|
||||
paginator_cls=GroupedOffsetPaginator,
|
||||
group_by_field_name=group_by,
|
||||
count_filter=Q(
|
||||
Q(issue_inbox__status=1)
|
||||
| Q(issue_inbox__status=-1)
|
||||
| Q(issue_inbox__status=2)
|
||||
| Q(issue_inbox__isnull=True),
|
||||
archived_at__isnull=False,
|
||||
is_draft=True,
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
|
||||
def create(self, request, slug, project_id, cycle_id):
|
||||
|
@ -72,7 +72,10 @@ from plane.db.models import (
|
||||
IssueRelation,
|
||||
)
|
||||
from plane.bgtasks.issue_activites_task import issue_activity
|
||||
from plane.utils.grouper import issue_grouper, issue_queryset_grouper
|
||||
from plane.utils.grouper import (
|
||||
issue_queryset_grouper,
|
||||
issue_on_results,
|
||||
)
|
||||
from plane.utils.issue_filters import issue_filters
|
||||
from plane.utils.order_queryset import order_issue_queryset
|
||||
from plane.utils.paginator import GroupedOffsetPaginator
|
||||
@ -127,86 +130,52 @@ class IssueListEndpoint(BaseAPIView):
|
||||
.annotate(count=Func(F("id"), function="Count"))
|
||||
.values("count")
|
||||
)
|
||||
.annotate(
|
||||
label_ids=Coalesce(
|
||||
ArrayAgg(
|
||||
"labels__id",
|
||||
distinct=True,
|
||||
filter=~Q(labels__id__isnull=True),
|
||||
),
|
||||
Value([], output_field=ArrayField(UUIDField())),
|
||||
),
|
||||
assignee_ids=Coalesce(
|
||||
ArrayAgg(
|
||||
"assignees__id",
|
||||
distinct=True,
|
||||
filter=~Q(assignees__id__isnull=True),
|
||||
),
|
||||
Value([], output_field=ArrayField(UUIDField())),
|
||||
),
|
||||
module_ids=Coalesce(
|
||||
ArrayAgg(
|
||||
"issue_module__module_id",
|
||||
distinct=True,
|
||||
filter=~Q(issue_module__module_id__isnull=True),
|
||||
),
|
||||
Value([], output_field=ArrayField(UUIDField())),
|
||||
),
|
||||
)
|
||||
).distinct()
|
||||
|
||||
filters = issue_filters(request.query_params, "GET")
|
||||
|
||||
order_by_param = request.GET.get("order_by", "-created_at")
|
||||
issue_queryset = queryset.filter(**filters)
|
||||
|
||||
# Issue queryset
|
||||
issue_queryset = order_issue_queryset(
|
||||
issue_queryset=issue_queryset,
|
||||
order_by_param=order_by_param,
|
||||
)
|
||||
|
||||
def on_results(issues):
|
||||
if self.expand or self.fields:
|
||||
return IssueSerializer(
|
||||
issues, many=True, expand=self.expand, fields=self.fields
|
||||
).data
|
||||
return issues.values(
|
||||
"id",
|
||||
"name",
|
||||
"state_id",
|
||||
"sort_order",
|
||||
"completed_at",
|
||||
"estimate_point",
|
||||
"priority",
|
||||
"start_date",
|
||||
"target_date",
|
||||
"sequence_id",
|
||||
"project_id",
|
||||
"parent_id",
|
||||
"cycle_id",
|
||||
"module_ids",
|
||||
"label_ids",
|
||||
"assignee_ids",
|
||||
"sub_issues_count",
|
||||
"created_at",
|
||||
"updated_at",
|
||||
"created_by",
|
||||
"updated_by",
|
||||
"attachment_count",
|
||||
"link_count",
|
||||
"is_draft",
|
||||
"archived_at",
|
||||
# Group by
|
||||
group_by = request.GET.get("group_by", False)
|
||||
|
||||
# List Paginate
|
||||
if not group_by:
|
||||
return self.paginate(
|
||||
request=request,
|
||||
queryset=issue_queryset,
|
||||
on_results=lambda issues: issue_on_results(
|
||||
group_by=group_by, issues=issues
|
||||
),
|
||||
)
|
||||
|
||||
if request.GET.get("layout", "spreadsheet") in [
|
||||
"layout",
|
||||
"spreadsheet",
|
||||
]:
|
||||
return self.paginate(
|
||||
request=request, queryset=issue_queryset, on_results=on_results
|
||||
issue_queryset = issue_queryset_grouper(
|
||||
queryset=issue_queryset, field=group_by
|
||||
)
|
||||
# Group paginate
|
||||
return self.paginate(
|
||||
request=request,
|
||||
queryset=issue_queryset,
|
||||
on_results=lambda issues: issue_on_results(
|
||||
group_by=group_by, issues=issues
|
||||
),
|
||||
paginator_cls=GroupedOffsetPaginator,
|
||||
group_by_field_name=group_by,
|
||||
count_filter=Q(
|
||||
Q(issue_inbox__status=1)
|
||||
| Q(issue_inbox__status=-1)
|
||||
| Q(issue_inbox__status=2)
|
||||
| Q(issue_inbox__isnull=True),
|
||||
archived_at__isnull=False,
|
||||
is_draft=True,
|
||||
),
|
||||
)
|
||||
return on_results(issues=issue_queryset)
|
||||
|
||||
|
||||
class IssueViewSet(WebhookMixin, BaseViewSet):
|
||||
@ -280,52 +249,17 @@ class IssueViewSet(WebhookMixin, BaseViewSet):
|
||||
order_by_param=order_by_param,
|
||||
)
|
||||
|
||||
def on_results(issues):
|
||||
required_fields = [
|
||||
"id",
|
||||
"name",
|
||||
"state_id",
|
||||
"sort_order",
|
||||
"completed_at",
|
||||
"estimate_point",
|
||||
"priority",
|
||||
"start_date",
|
||||
"target_date",
|
||||
"sequence_id",
|
||||
"project_id",
|
||||
"parent_id",
|
||||
"cycle_id",
|
||||
"sub_issues_count",
|
||||
"created_at",
|
||||
"updated_at",
|
||||
"created_by",
|
||||
"updated_by",
|
||||
"attachment_count",
|
||||
"link_count",
|
||||
"is_draft",
|
||||
"archived_at",
|
||||
]
|
||||
if group_by == "assignees__id":
|
||||
required_fields.extend(
|
||||
["label_ids", "module_ids", "assignees__id"]
|
||||
)
|
||||
if group_by == "labels__id":
|
||||
required_fields.extend(
|
||||
["assignee_ids", "module_ids", "labels__id"]
|
||||
)
|
||||
if group_by == "modules__id":
|
||||
required_fields.extend(
|
||||
["assignee_ids", "label_ids", "modules__id"]
|
||||
)
|
||||
return issues.values(*required_fields)
|
||||
|
||||
# Group by
|
||||
group_by = request.GET.get("group_by", False)
|
||||
|
||||
# List Paginate
|
||||
if not group_by:
|
||||
return self.paginate(
|
||||
request=request, queryset=issue_queryset, on_results=on_results
|
||||
request=request,
|
||||
queryset=issue_queryset,
|
||||
on_results=lambda issues: issue_on_results(
|
||||
group_by=group_by, issues=issues
|
||||
),
|
||||
)
|
||||
|
||||
issue_queryset = issue_queryset_grouper(
|
||||
@ -335,12 +269,11 @@ class IssueViewSet(WebhookMixin, BaseViewSet):
|
||||
return self.paginate(
|
||||
request=request,
|
||||
queryset=issue_queryset,
|
||||
on_results=on_results,
|
||||
on_results=lambda issues: issue_on_results(
|
||||
group_by=group_by, issues=issues
|
||||
),
|
||||
paginator_cls=GroupedOffsetPaginator,
|
||||
group_by_field_name=group_by,
|
||||
group_by_fields=issue_grouper(
|
||||
field=group_by, slug=slug, project_id=project_id
|
||||
),
|
||||
count_filter=Q(
|
||||
Q(issue_inbox__status=1)
|
||||
| Q(issue_inbox__status=-1)
|
||||
@ -1279,32 +1212,6 @@ class IssueArchiveViewSet(BaseViewSet):
|
||||
.annotate(count=Func(F("id"), function="Count"))
|
||||
.values("count")
|
||||
)
|
||||
.annotate(
|
||||
label_ids=Coalesce(
|
||||
ArrayAgg(
|
||||
"labels__id",
|
||||
distinct=True,
|
||||
filter=~Q(labels__id__isnull=True),
|
||||
),
|
||||
Value([], output_field=ArrayField(UUIDField())),
|
||||
),
|
||||
assignee_ids=Coalesce(
|
||||
ArrayAgg(
|
||||
"assignees__id",
|
||||
distinct=True,
|
||||
filter=~Q(assignees__id__isnull=True),
|
||||
),
|
||||
Value([], output_field=ArrayField(UUIDField())),
|
||||
),
|
||||
module_ids=Coalesce(
|
||||
ArrayAgg(
|
||||
"issue_module__module_id",
|
||||
distinct=True,
|
||||
filter=~Q(issue_module__module_id__isnull=True),
|
||||
),
|
||||
Value([], output_field=ArrayField(UUIDField())),
|
||||
),
|
||||
)
|
||||
)
|
||||
|
||||
@method_decorator(gzip_page)
|
||||
@ -1316,57 +1223,51 @@ class IssueArchiveViewSet(BaseViewSet):
|
||||
|
||||
issue_queryset = self.get_queryset().filter(**filters)
|
||||
|
||||
issue_queryset = order_issue_queryset(
|
||||
issue_queryset=issue_queryset, order_by_param=order_by_param
|
||||
)
|
||||
|
||||
issue_queryset = (
|
||||
issue_queryset
|
||||
if show_sub_issues == "true"
|
||||
else issue_queryset.filter(parent__isnull=True)
|
||||
)
|
||||
|
||||
def on_results(issues):
|
||||
if self.expand or self.fields:
|
||||
return IssueSerializer(
|
||||
issues, many=True, expand=self.expand, fields=self.fields
|
||||
).data
|
||||
return issues.values(
|
||||
"id",
|
||||
"name",
|
||||
"state_id",
|
||||
"sort_order",
|
||||
"completed_at",
|
||||
"estimate_point",
|
||||
"priority",
|
||||
"start_date",
|
||||
"target_date",
|
||||
"sequence_id",
|
||||
"project_id",
|
||||
"parent_id",
|
||||
"cycle_id",
|
||||
"module_ids",
|
||||
"label_ids",
|
||||
"assignee_ids",
|
||||
"sub_issues_count",
|
||||
"created_at",
|
||||
"updated_at",
|
||||
"created_by",
|
||||
"updated_by",
|
||||
"attachment_count",
|
||||
"link_count",
|
||||
"is_draft",
|
||||
"archived_at",
|
||||
# Issue queryset
|
||||
issue_queryset = order_issue_queryset(
|
||||
issue_queryset=issue_queryset,
|
||||
order_by_param=order_by_param,
|
||||
)
|
||||
|
||||
if request.GET.get("layout", "spreadsheet") in [
|
||||
"layout",
|
||||
"spreadsheet",
|
||||
]:
|
||||
# Group by
|
||||
group_by = request.GET.get("group_by", False)
|
||||
|
||||
# List Paginate
|
||||
if not group_by:
|
||||
return self.paginate(
|
||||
request=request, queryset=issue_queryset, on_results=on_results
|
||||
request=request,
|
||||
queryset=issue_queryset,
|
||||
on_results=lambda issues: issue_on_results(
|
||||
group_by=group_by, issues=issues
|
||||
),
|
||||
)
|
||||
|
||||
issue_queryset = issue_queryset_grouper(
|
||||
queryset=issue_queryset, field=group_by
|
||||
)
|
||||
# Group paginate
|
||||
return self.paginate(
|
||||
request=request,
|
||||
queryset=issue_queryset,
|
||||
on_results=lambda issues: issue_on_results(
|
||||
group_by=group_by, issues=issues
|
||||
),
|
||||
paginator_cls=GroupedOffsetPaginator,
|
||||
group_by_field_name=group_by,
|
||||
count_filter=Q(
|
||||
Q(issue_inbox__status=1)
|
||||
| Q(issue_inbox__status=-1)
|
||||
| Q(issue_inbox__status=2)
|
||||
| Q(issue_inbox__isnull=True),
|
||||
archived_at__isnull=False,
|
||||
is_draft=True,
|
||||
),
|
||||
)
|
||||
return on_results(issues=issue_queryset)
|
||||
|
||||
def retrieve(self, request, slug, project_id, pk=None):
|
||||
issue = (
|
||||
@ -1419,12 +1320,19 @@ class IssueArchiveViewSet(BaseViewSet):
|
||||
)
|
||||
if issue.state.group not in ["completed", "cancelled"]:
|
||||
return Response(
|
||||
{"error": "Can only archive completed or cancelled state group issue"},
|
||||
{
|
||||
"error": "Can only archive completed or cancelled state group issue"
|
||||
},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
issue_activity.delay(
|
||||
type="issue.activity.updated",
|
||||
requested_data=json.dumps({"archived_at": str(timezone.now().date()), "automation": False}),
|
||||
requested_data=json.dumps(
|
||||
{
|
||||
"archived_at": str(timezone.now().date()),
|
||||
"automation": False,
|
||||
}
|
||||
),
|
||||
actor_id=str(request.user.id),
|
||||
issue_id=str(issue.id),
|
||||
project_id=str(project_id),
|
||||
@ -1438,8 +1346,9 @@ class IssueArchiveViewSet(BaseViewSet):
|
||||
issue.archived_at = timezone.now().date()
|
||||
issue.save()
|
||||
|
||||
return Response({"archived_at": str(issue.archived_at)}, status=status.HTTP_200_OK)
|
||||
|
||||
return Response(
|
||||
{"archived_at": str(issue.archived_at)}, status=status.HTTP_200_OK
|
||||
)
|
||||
|
||||
def unarchive(self, request, slug, project_id, pk=None):
|
||||
issue = Issue.objects.get(
|
||||
@ -1935,32 +1844,6 @@ class IssueDraftViewSet(BaseViewSet):
|
||||
.annotate(count=Func(F("id"), function="Count"))
|
||||
.values("count")
|
||||
)
|
||||
.annotate(
|
||||
label_ids=Coalesce(
|
||||
ArrayAgg(
|
||||
"labels__id",
|
||||
distinct=True,
|
||||
filter=~Q(labels__id__isnull=True),
|
||||
),
|
||||
Value([], output_field=ArrayField(UUIDField())),
|
||||
),
|
||||
assignee_ids=Coalesce(
|
||||
ArrayAgg(
|
||||
"assignees__id",
|
||||
distinct=True,
|
||||
filter=~Q(assignees__id__isnull=True),
|
||||
),
|
||||
Value([], output_field=ArrayField(UUIDField())),
|
||||
),
|
||||
module_ids=Coalesce(
|
||||
ArrayAgg(
|
||||
"issue_module__module_id",
|
||||
distinct=True,
|
||||
filter=~Q(issue_module__module_id__isnull=True),
|
||||
),
|
||||
Value([], output_field=ArrayField(UUIDField())),
|
||||
),
|
||||
)
|
||||
).distinct()
|
||||
|
||||
@method_decorator(gzip_page)
|
||||
@ -1975,51 +1858,47 @@ class IssueDraftViewSet(BaseViewSet):
|
||||
order_by_param = request.GET.get("order_by", "-created_at")
|
||||
|
||||
issue_queryset = self.get_queryset().filter(**filters)
|
||||
|
||||
# Issue queryset
|
||||
issue_queryset = order_issue_queryset(
|
||||
issue_queryset=issue_queryset, order_by_param=order_by_param
|
||||
issue_queryset=issue_queryset,
|
||||
order_by_param=order_by_param,
|
||||
)
|
||||
|
||||
def on_results(issues):
|
||||
if self.expand or self.fields:
|
||||
return IssueSerializer(
|
||||
issues, many=True, expand=self.expand, fields=self.fields
|
||||
).data
|
||||
return issues.values(
|
||||
"id",
|
||||
"name",
|
||||
"state_id",
|
||||
"sort_order",
|
||||
"completed_at",
|
||||
"estimate_point",
|
||||
"priority",
|
||||
"start_date",
|
||||
"target_date",
|
||||
"sequence_id",
|
||||
"project_id",
|
||||
"parent_id",
|
||||
"cycle_id",
|
||||
"module_ids",
|
||||
"label_ids",
|
||||
"assignee_ids",
|
||||
"sub_issues_count",
|
||||
"created_at",
|
||||
"updated_at",
|
||||
"created_by",
|
||||
"updated_by",
|
||||
"attachment_count",
|
||||
"link_count",
|
||||
"is_draft",
|
||||
"archived_at",
|
||||
)
|
||||
# Group by
|
||||
group_by = request.GET.get("group_by", False)
|
||||
|
||||
if request.GET.get("layout", "spreadsheet") in [
|
||||
"layout",
|
||||
"spreadsheet",
|
||||
]:
|
||||
# List Paginate
|
||||
if not group_by:
|
||||
return self.paginate(
|
||||
request=request, queryset=issue_queryset, on_results=on_results
|
||||
request=request,
|
||||
queryset=issue_queryset,
|
||||
on_results=lambda issues: issue_on_results(
|
||||
group_by=group_by, issues=issues
|
||||
),
|
||||
)
|
||||
|
||||
issue_queryset = issue_queryset_grouper(
|
||||
queryset=issue_queryset, field=group_by
|
||||
)
|
||||
# Group paginate
|
||||
return self.paginate(
|
||||
request=request,
|
||||
queryset=issue_queryset,
|
||||
on_results=lambda issues: issue_on_results(
|
||||
group_by=group_by, issues=issues
|
||||
),
|
||||
paginator_cls=GroupedOffsetPaginator,
|
||||
group_by_field_name=group_by,
|
||||
count_filter=Q(
|
||||
Q(issue_inbox__status=1)
|
||||
| Q(issue_inbox__status=-1)
|
||||
| Q(issue_inbox__status=2)
|
||||
| Q(issue_inbox__isnull=True),
|
||||
archived_at__isnull=False,
|
||||
is_draft=True,
|
||||
),
|
||||
)
|
||||
return on_results(issues=issue_queryset)
|
||||
|
||||
def create(self, request, slug, project_id):
|
||||
project = Project.objects.get(pk=project_id)
|
||||
|
@ -45,7 +45,9 @@ from plane.db.models import (
|
||||
from plane.bgtasks.issue_activites_task import issue_activity
|
||||
from plane.utils.issue_filters import issue_filters
|
||||
from plane.utils.analytics_plot import burndown_plot
|
||||
|
||||
from plane.utils.grouper import issue_queryset_grouper, issue_on_results
|
||||
from plane.utils.order_queryset import order_issue_queryset
|
||||
from plane.utils.paginator import GroupedOffsetPaginator
|
||||
|
||||
class ModuleViewSet(WebhookMixin, BaseViewSet):
|
||||
model = Module
|
||||
@ -473,86 +475,55 @@ class ModuleIssueViewSet(WebhookMixin, BaseViewSet):
|
||||
.annotate(count=Func(F("id"), function="Count"))
|
||||
.values("count")
|
||||
)
|
||||
.annotate(
|
||||
label_ids=Coalesce(
|
||||
ArrayAgg(
|
||||
"labels__id",
|
||||
distinct=True,
|
||||
filter=~Q(labels__id__isnull=True),
|
||||
),
|
||||
Value([], output_field=ArrayField(UUIDField())),
|
||||
),
|
||||
assignee_ids=Coalesce(
|
||||
ArrayAgg(
|
||||
"assignees__id",
|
||||
distinct=True,
|
||||
filter=~Q(assignees__id__isnull=True),
|
||||
),
|
||||
Value([], output_field=ArrayField(UUIDField())),
|
||||
),
|
||||
module_ids=Coalesce(
|
||||
ArrayAgg(
|
||||
"issue_module__module_id",
|
||||
distinct=True,
|
||||
filter=~Q(issue_module__module_id__isnull=True),
|
||||
),
|
||||
Value([], output_field=ArrayField(UUIDField())),
|
||||
),
|
||||
)
|
||||
).distinct()
|
||||
|
||||
@method_decorator(gzip_page)
|
||||
def list(self, request, slug, project_id, module_id):
|
||||
fields = [
|
||||
field
|
||||
for field in request.GET.get("fields", "").split(",")
|
||||
if field
|
||||
]
|
||||
filters = issue_filters(request.query_params, "GET")
|
||||
issue_queryset = self.get_queryset().filter(**filters)
|
||||
def on_results(issues):
|
||||
if self.expand or self.fields:
|
||||
return IssueSerializer(
|
||||
issues, many=True, expand=self.expand, fields=self.fields
|
||||
).data
|
||||
return issues.values(
|
||||
"id",
|
||||
"name",
|
||||
"state_id",
|
||||
"sort_order",
|
||||
"completed_at",
|
||||
"estimate_point",
|
||||
"priority",
|
||||
"start_date",
|
||||
"target_date",
|
||||
"sequence_id",
|
||||
"project_id",
|
||||
"parent_id",
|
||||
"cycle_id",
|
||||
"module_ids",
|
||||
"label_ids",
|
||||
"assignee_ids",
|
||||
"sub_issues_count",
|
||||
"created_at",
|
||||
"updated_at",
|
||||
"created_by",
|
||||
"updated_by",
|
||||
"attachment_count",
|
||||
"link_count",
|
||||
"is_draft",
|
||||
"archived_at",
|
||||
order_by_param = request.GET.get("order_by", "created_at")
|
||||
|
||||
# Issue queryset
|
||||
issue_queryset = order_issue_queryset(
|
||||
issue_queryset=issue_queryset,
|
||||
order_by_param=order_by_param,
|
||||
)
|
||||
|
||||
if request.GET.get("layout", "spreadsheet") in [
|
||||
"layout",
|
||||
"spreadsheet",
|
||||
]:
|
||||
# Group by
|
||||
group_by = request.GET.get("group_by", False)
|
||||
|
||||
# List Paginate
|
||||
if not group_by:
|
||||
return self.paginate(
|
||||
request=request,
|
||||
queryset=issue_queryset,
|
||||
on_results=on_results
|
||||
on_results=lambda issues: issue_on_results(
|
||||
group_by=group_by, issues=issues
|
||||
),
|
||||
)
|
||||
return on_results(issues=issue_queryset)
|
||||
|
||||
issue_queryset = issue_queryset_grouper(
|
||||
queryset=issue_queryset, field=group_by
|
||||
)
|
||||
# Group paginate
|
||||
return self.paginate(
|
||||
request=request,
|
||||
queryset=issue_queryset,
|
||||
on_results=lambda issues: issue_on_results(
|
||||
group_by=group_by, issues=issues
|
||||
),
|
||||
paginator_cls=GroupedOffsetPaginator,
|
||||
group_by_field_name=group_by,
|
||||
count_filter=Q(
|
||||
Q(issue_inbox__status=1)
|
||||
| Q(issue_inbox__status=-1)
|
||||
| Q(issue_inbox__status=2)
|
||||
| Q(issue_inbox__isnull=True),
|
||||
archived_at__isnull=False,
|
||||
is_draft=True,
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
# create multiple issues inside a module
|
||||
def create_module_issues(self, request, slug, project_id, module_id):
|
||||
|
@ -251,50 +251,6 @@ def group_results(results_data, group_by, sub_group_by=False):
|
||||
return response_dict
|
||||
|
||||
|
||||
def issue_grouper(field, slug, project_id):
|
||||
if field == "state_id":
|
||||
return list(
|
||||
State.objects.filter(
|
||||
~Q(name="Triage"),
|
||||
workspace__slug=slug,
|
||||
project_id=project_id,
|
||||
).values_list("id", flat=True)
|
||||
)
|
||||
if field == "labels__id":
|
||||
return list(
|
||||
Label.objects.filter(
|
||||
workspace__slug=slug, project_id=project_id
|
||||
).values_list("id", flat=True)
|
||||
)
|
||||
if field == "assignees__id":
|
||||
return list(
|
||||
ProjectMember.objects.filter(
|
||||
workspace__slug=slug, project_id=project_id
|
||||
).values_list("member_id", flat=True)
|
||||
)
|
||||
if field == "priority":
|
||||
return ["urgent", "high", "medium", "low", "none"]
|
||||
|
||||
if field == "created_by":
|
||||
return list(
|
||||
ProjectMember.objects.filter(
|
||||
workspace__slug=slug, project_id=project_id
|
||||
).values_list("member_id", flat=True)
|
||||
)
|
||||
if field == "cycle_id":
|
||||
return list(
|
||||
Cycle.objects.filter(
|
||||
workspace__slug=slug, project_id=project_id
|
||||
).values_list("id", flat=True)
|
||||
)
|
||||
if field == "modules__id":
|
||||
return list(
|
||||
Module.objects.filter(
|
||||
workspace__slug=slug, project_id=project_id
|
||||
).values_list("id", flat=True)
|
||||
)
|
||||
|
||||
|
||||
def issue_queryset_grouper(field, queryset):
|
||||
if field == "assignees__id":
|
||||
return queryset.annotate(
|
||||
@ -316,7 +272,7 @@ def issue_queryset_grouper(field, queryset):
|
||||
),
|
||||
)
|
||||
|
||||
if field == "labels__id":
|
||||
elif field == "labels__id":
|
||||
return queryset.annotate(
|
||||
assignee_ids=Coalesce(
|
||||
ArrayAgg(
|
||||
@ -336,8 +292,10 @@ def issue_queryset_grouper(field, queryset):
|
||||
),
|
||||
)
|
||||
|
||||
if field == "modules__id":
|
||||
return queryset.annotate(modules__id=F("issue_module__module_id")).annotate(
|
||||
elif field == "modules__id":
|
||||
return queryset.annotate(
|
||||
modules__id=F("issue_module__module_id")
|
||||
).annotate(
|
||||
label_ids=Coalesce(
|
||||
ArrayAgg(
|
||||
"labels__id",
|
||||
@ -355,4 +313,66 @@ def issue_queryset_grouper(field, queryset):
|
||||
Value([], output_field=ArrayField(UUIDField())),
|
||||
),
|
||||
)
|
||||
return queryset
|
||||
else:
|
||||
return queryset.annotate(
|
||||
label_ids=Coalesce(
|
||||
ArrayAgg(
|
||||
"labels__id",
|
||||
distinct=True,
|
||||
filter=~Q(labels__id__isnull=True),
|
||||
),
|
||||
Value([], output_field=ArrayField(UUIDField())),
|
||||
),
|
||||
module_ids=Coalesce(
|
||||
ArrayAgg(
|
||||
"issue_module__module_id",
|
||||
distinct=True,
|
||||
filter=~Q(issue_module__module_id__isnull=True),
|
||||
),
|
||||
Value([], output_field=ArrayField(UUIDField())),
|
||||
),
|
||||
assignee_ids=Coalesce(
|
||||
ArrayAgg(
|
||||
"assignees__id",
|
||||
distinct=True,
|
||||
filter=~Q(assignees__id__isnull=True),
|
||||
),
|
||||
Value([], output_field=ArrayField(UUIDField())),
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
def issue_on_results(issues, group_by):
|
||||
required_fields = [
|
||||
"id",
|
||||
"name",
|
||||
"state_id",
|
||||
"sort_order",
|
||||
"completed_at",
|
||||
"estimate_point",
|
||||
"priority",
|
||||
"start_date",
|
||||
"target_date",
|
||||
"sequence_id",
|
||||
"project_id",
|
||||
"parent_id",
|
||||
"cycle_id",
|
||||
"sub_issues_count",
|
||||
"created_at",
|
||||
"updated_at",
|
||||
"created_by",
|
||||
"updated_by",
|
||||
"attachment_count",
|
||||
"link_count",
|
||||
"is_draft",
|
||||
"archived_at",
|
||||
]
|
||||
if group_by == "assignees__id":
|
||||
required_fields.extend(["label_ids", "module_ids", "assignees__id"])
|
||||
elif group_by == "labels__id":
|
||||
required_fields.extend(["assignee_ids", "module_ids", "labels__id"])
|
||||
elif group_by == "modules__id":
|
||||
required_fields.extend(["assignee_ids", "label_ids", "modules__id"])
|
||||
else:
|
||||
required_fields.extend(["assignee_ids", "label_ids", "module_ids"])
|
||||
return issues.values(*required_fields)
|
||||
|
@ -16,6 +16,7 @@ STATE_ORDER = [
|
||||
"cancelled",
|
||||
]
|
||||
|
||||
|
||||
def order_issue_queryset(issue_queryset, order_by_param="created_at"):
|
||||
# Priority Ordering
|
||||
if order_by_param == "priority" or order_by_param == "-priority":
|
||||
@ -70,9 +71,7 @@ def order_issue_queryset(issue_queryset, order_by_param="created_at"):
|
||||
else order_by_param
|
||||
)
|
||||
).order_by(
|
||||
"-max_values"
|
||||
if order_by_param.startswith("-")
|
||||
else "max_values"
|
||||
"-max_values" if order_by_param.startswith("-") else "max_values"
|
||||
)
|
||||
else:
|
||||
issue_queryset = issue_queryset.order_by(order_by_param)
|
||||
|
@ -166,14 +166,12 @@ class GroupedOffsetPaginator(OffsetPaginator):
|
||||
self,
|
||||
queryset,
|
||||
group_by_field_name,
|
||||
group_by_fields,
|
||||
count_filter,
|
||||
*args,
|
||||
**kwargs,
|
||||
):
|
||||
super().__init__(queryset, *args, **kwargs)
|
||||
self.group_by_field_name = group_by_field_name
|
||||
self.group_by_fields = group_by_fields
|
||||
self.count_filter = count_filter
|
||||
|
||||
def get_result(self, limit=100, cursor=None):
|
||||
@ -366,7 +364,6 @@ class BasePaginator:
|
||||
cursor_cls=Cursor,
|
||||
extra_stats=None,
|
||||
controller=None,
|
||||
group_by_fields=None,
|
||||
group_by_field_name=None,
|
||||
count_filter=None,
|
||||
**paginator_kwargs,
|
||||
@ -384,8 +381,7 @@ class BasePaginator:
|
||||
raise ParseError(detail="Invalid cursor parameter.")
|
||||
|
||||
if not paginator:
|
||||
if group_by_fields and group_by_field_name:
|
||||
paginator_kwargs["group_by_fields"] = group_by_fields
|
||||
if group_by_field_name:
|
||||
paginator_kwargs["group_by_field_name"] = group_by_field_name
|
||||
paginator_kwargs["count_filter"] = count_filter
|
||||
paginator = paginator_cls(**paginator_kwargs)
|
||||
@ -400,7 +396,7 @@ class BasePaginator:
|
||||
if on_results:
|
||||
results = on_results(cursor_result.results)
|
||||
|
||||
if group_by_field_name and group_by_fields:
|
||||
if group_by_field_name:
|
||||
results = paginator.process_results(results=results)
|
||||
|
||||
# Add Manipulation functions to the response
|
||||
|
Loading…
Reference in New Issue
Block a user