dev: fix cycle and module issue

This commit is contained in:
pablohashescobar 2024-03-26 17:08:02 +05:30
parent 4341bce9a8
commit 3a07eae192
5 changed files with 292 additions and 780 deletions

View File

@ -3,7 +3,6 @@ import json
from django.contrib.postgres.aggregates import ArrayAgg
from django.contrib.postgres.fields import ArrayField
from django.core import serializers
# Django imports
from django.db.models import (
@ -22,8 +21,6 @@ from django.db.models import (
)
from django.db.models.functions import Coalesce
from django.utils import timezone
from django.utils.decorators import method_decorator
from django.views.decorators.gzip import gzip_page
# Third party imports
from rest_framework import status
@ -35,7 +32,6 @@ from plane.app.permissions import (
)
from plane.app.serializers import (
CycleFavoriteSerializer,
CycleIssueSerializer,
CycleSerializer,
CycleUserPropertiesSerializer,
CycleWriteSerializer,
@ -47,23 +43,10 @@ from plane.db.models import (
CycleIssue,
CycleUserProperties,
Issue,
IssueAttachment,
IssueLink,
Label,
User,
)
from plane.utils.analytics_plot import burndown_plot
from plane.utils.grouper import (
issue_group_values,
issue_on_results,
issue_queryset_grouper,
)
from plane.utils.issue_filters import issue_filters
from plane.utils.order_queryset import order_issue_queryset
from plane.utils.paginator import (
GroupedOffsetPaginator,
SubGroupedOffsetPaginator,
)
# Module imports
from .. import BaseAPIView, BaseViewSet, WebhookMixin
@ -700,329 +683,6 @@ class CycleViewSet(WebhookMixin, BaseViewSet):
return Response(status=status.HTTP_204_NO_CONTENT)
class CycleIssueViewSet(WebhookMixin, BaseViewSet):
serializer_class = CycleIssueSerializer
model = CycleIssue
webhook_event = "cycle_issue"
bulk = True
permission_classes = [
ProjectEntityPermission,
]
filterset_fields = [
"issue__labels__id",
"issue__assignees__id",
]
def get_queryset(self):
return self.filter_queryset(
super()
.get_queryset()
.annotate(
sub_issues_count=Issue.issue_objects.filter(
parent=OuterRef("issue_id")
)
.order_by()
.annotate(count=Func(F("id"), function="Count"))
.values("count")
)
.filter(workspace__slug=self.kwargs.get("slug"))
.filter(project_id=self.kwargs.get("project_id"))
.filter(
project__project_projectmember__member=self.request.user,
project__project_projectmember__is_active=True,
)
.filter(cycle_id=self.kwargs.get("cycle_id"))
.select_related("project")
.select_related("workspace")
.select_related("cycle")
.select_related("issue", "issue__state", "issue__project")
.prefetch_related("issue__assignees", "issue__labels")
.distinct()
)
@method_decorator(gzip_page)
def list(self, request, slug, project_id, cycle_id):
order_by_param = request.GET.get("order_by", "created_at")
filters = issue_filters(request.query_params, "GET")
issue_queryset = (
Issue.issue_objects.filter(issue_cycle__cycle_id=cycle_id)
.filter(project_id=project_id)
.filter(workspace__slug=slug)
.filter(**filters)
.select_related("workspace", "project", "state", "parent")
.prefetch_related(
"assignees",
"labels",
"issue_module__module",
"issue_cycle__cycle",
)
.filter(**filters)
.annotate(cycle_id=F("issue_cycle__cycle_id"))
.annotate(
link_count=IssueLink.objects.filter(issue=OuterRef("id"))
.order_by()
.annotate(count=Func(F("id"), function="Count"))
.values("count")
)
.annotate(
attachment_count=IssueAttachment.objects.filter(
issue=OuterRef("id")
)
.order_by()
.annotate(count=Func(F("id"), function="Count"))
.values("count")
)
.annotate(
sub_issues_count=Issue.issue_objects.filter(
parent=OuterRef("id")
)
.order_by()
.annotate(count=Func(F("id"), function="Count"))
.values("count")
)
)
filters = issue_filters(request.query_params, "GET")
order_by_param = request.GET.get("order_by", "-created_at")
issue_queryset = issue_queryset.filter(**filters)
# Issue queryset
issue_queryset = order_issue_queryset(
issue_queryset=issue_queryset,
order_by_param=order_by_param,
)
# Group by
group_by = request.GET.get("group_by", False)
sub_group_by = request.GET.get("sub_group_by", False)
# issue queryset
issue_queryset = issue_queryset_grouper(
queryset=issue_queryset,
group_by=group_by,
sub_group_by=sub_group_by,
)
if group_by:
# Check group and sub group value paginate
if sub_group_by:
if group_by == sub_group_by:
return Response(
{
"error": "Group by and sub group by cannot have same parameters"
},
status=status.HTTP_400_BAD_REQUEST,
)
else:
# group and sub group pagination
return self.paginate(
request=request,
order_by=(
"priority_order"
if order_by_param in ["priority", "-priority"]
else order_by_param
),
queryset=issue_queryset,
on_results=lambda issues: issue_on_results(
group_by=group_by,
issues=issues,
sub_group_by=sub_group_by,
),
paginator_cls=SubGroupedOffsetPaginator,
group_by_fields=issue_group_values(
field=group_by,
slug=slug,
project_id=project_id,
filters=filters,
),
sub_group_by_fields=issue_group_values(
field=sub_group_by,
slug=slug,
project_id=project_id,
filters=filters,
),
group_by_field_name=group_by,
sub_group_by_field_name=sub_group_by,
count_filter=Q(
Q(issue_inbox__status=1)
| Q(issue_inbox__status=-1)
| Q(issue_inbox__status=2)
| Q(issue_inbox__isnull=True),
archived_at__isnull=True,
is_draft=False,
),
)
# Group Paginate
else:
# Group paginate
return self.paginate(
request=request,
order_by=(
"priority_order"
if order_by_param in ["priority", "-priority"]
else order_by_param
),
queryset=issue_queryset,
on_results=lambda issues: issue_on_results(
group_by=group_by,
issues=issues,
sub_group_by=sub_group_by,
),
paginator_cls=GroupedOffsetPaginator,
group_by_fields=issue_group_values(
field=group_by,
slug=slug,
project_id=project_id,
filters=filters,
),
group_by_field_name=group_by,
count_filter=Q(
Q(issue_inbox__status=1)
| Q(issue_inbox__status=-1)
| Q(issue_inbox__status=2)
| Q(issue_inbox__isnull=True),
archived_at__isnull=True,
is_draft=False,
),
)
else:
# List Paginate
return self.paginate(
order_by=(
"-priority_order"
if order_by_param in ["priority", "-priority"]
else order_by_param
),
request=request,
queryset=issue_queryset,
on_results=lambda issues: issue_on_results(
group_by=group_by, issues=issues, sub_group_by=sub_group_by
),
)
def create(self, request, slug, project_id, cycle_id):
issues = request.data.get("issues", [])
if not issues:
return Response(
{"error": "Issues are required"},
status=status.HTTP_400_BAD_REQUEST,
)
cycle = Cycle.objects.get(
workspace__slug=slug, project_id=project_id, pk=cycle_id
)
if (
cycle.end_date is not None
and cycle.end_date < timezone.now().date()
):
return Response(
{
"error": "The Cycle has already been completed so no new issues can be added"
},
status=status.HTTP_400_BAD_REQUEST,
)
# Get all CycleIssues already created
cycle_issues = list(
CycleIssue.objects.filter(
~Q(cycle_id=cycle_id), issue_id__in=issues
)
)
existing_issues = [
str(cycle_issue.issue_id) for cycle_issue in cycle_issues
]
new_issues = list(set(issues) - set(existing_issues))
# New issues to create
created_records = CycleIssue.objects.bulk_create(
[
CycleIssue(
project_id=project_id,
workspace_id=cycle.workspace_id,
created_by_id=request.user.id,
updated_by_id=request.user.id,
cycle_id=cycle_id,
issue_id=issue,
)
for issue in new_issues
],
batch_size=10,
)
# Updated Issues
updated_records = []
update_cycle_issue_activity = []
# Iterate over each cycle_issue in cycle_issues
for cycle_issue in cycle_issues:
# Update the cycle_issue's cycle_id
cycle_issue.cycle_id = cycle_id
# Add the modified cycle_issue to the records_to_update list
updated_records.append(cycle_issue)
# Record the update activity
update_cycle_issue_activity.append(
{
"old_cycle_id": str(cycle_issue.cycle_id),
"new_cycle_id": str(cycle_id),
"issue_id": str(cycle_issue.issue_id),
}
)
# Update the cycle issues
CycleIssue.objects.bulk_update(
updated_records, ["cycle_id"], batch_size=100
)
# Capture Issue Activity
issue_activity.delay(
type="cycle.activity.created",
requested_data=json.dumps({"cycles_list": issues}),
actor_id=str(self.request.user.id),
issue_id=None,
project_id=str(self.kwargs.get("project_id", None)),
current_instance=json.dumps(
{
"updated_cycle_issues": update_cycle_issue_activity,
"created_cycle_issues": serializers.serialize(
"json", created_records
),
}
),
epoch=int(timezone.now().timestamp()),
notification=True,
origin=request.META.get("HTTP_ORIGIN"),
)
return Response({"message": "success"}, status=status.HTTP_201_CREATED)
def destroy(self, request, slug, project_id, cycle_id, issue_id):
cycle_issue = CycleIssue.objects.get(
issue_id=issue_id,
workspace__slug=slug,
project_id=project_id,
cycle_id=cycle_id,
)
issue_activity.delay(
type="cycle.activity.deleted",
requested_data=json.dumps(
{
"cycle_id": str(self.kwargs.get("cycle_id")),
"issues": [str(issue_id)],
}
),
actor_id=str(self.request.user.id),
issue_id=str(issue_id),
project_id=str(self.kwargs.get("project_id", None)),
current_instance=None,
epoch=int(timezone.now().timestamp()),
notification=True,
origin=request.META.get("HTTP_ORIGIN"),
)
cycle_issue.delete()
return Response(status=status.HTTP_204_NO_CONTENT)
class CycleDateCheckEndpoint(BaseAPIView):
permission_classes = [
ProjectEntityPermission,

View File

@ -2,42 +2,49 @@
import json
# Django imports
from django.db.models import (
Func,
F,
Q,
OuterRef,
Value,
UUIDField,
)
from django.core import serializers
from django.db.models import (
F,
Func,
OuterRef,
Q,
)
from django.utils import timezone
from django.utils.decorators import method_decorator
from django.views.decorators.gzip import gzip_page
from django.contrib.postgres.aggregates import ArrayAgg
from django.contrib.postgres.fields import ArrayField
from django.db.models.functions import Coalesce
# Third party imports
from rest_framework.response import Response
from rest_framework import status
from rest_framework.response import Response
# Module imports
from .. import BaseViewSet, WebhookMixin
from plane.app.permissions import (
ProjectEntityPermission,
)
from plane.app.serializers import (
IssueSerializer,
CycleIssueSerializer,
)
from plane.app.permissions import ProjectEntityPermission
from plane.bgtasks.issue_activites_task import issue_activity
from plane.db.models import (
Cycle,
CycleIssue,
Issue,
IssueLink,
IssueAttachment,
IssueLink,
)
from plane.utils.grouper import (
issue_group_values,
issue_on_results,
issue_queryset_grouper,
)
from plane.bgtasks.issue_activites_task import issue_activity
from plane.utils.issue_filters import issue_filters
from plane.utils.order_queryset import order_issue_queryset
from plane.utils.paginator import (
GroupedOffsetPaginator,
SubGroupedOffsetPaginator,
)
# Module imports
from .. import BaseViewSet, WebhookMixin
class CycleIssueViewSet(WebhookMixin, BaseViewSet):
@ -85,14 +92,9 @@ class CycleIssueViewSet(WebhookMixin, BaseViewSet):
@method_decorator(gzip_page)
def list(self, request, slug, project_id, cycle_id):
fields = [
field
for field in request.GET.get("fields", "").split(",")
if field
]
order_by = request.GET.get("order_by", "created_at")
order_by_param = request.GET.get("order_by", "created_at")
filters = issue_filters(request.query_params, "GET")
queryset = (
issue_queryset = (
Issue.issue_objects.filter(issue_cycle__cycle_id=cycle_id)
.filter(project_id=project_id)
.filter(workspace__slug=slug)
@ -104,7 +106,6 @@ class CycleIssueViewSet(WebhookMixin, BaseViewSet):
"issue_module__module",
"issue_cycle__cycle",
)
.order_by(order_by)
.filter(**filters)
.annotate(cycle_id=F("issue_cycle__cycle_id"))
.annotate(
@ -129,67 +130,124 @@ class CycleIssueViewSet(WebhookMixin, BaseViewSet):
.annotate(count=Func(F("id"), function="Count"))
.values("count")
)
.annotate(
label_ids=Coalesce(
ArrayAgg(
"labels__id",
distinct=True,
filter=~Q(labels__id__isnull=True),
),
Value([], output_field=ArrayField(UUIDField())),
),
assignee_ids=Coalesce(
ArrayAgg(
"assignees__id",
distinct=True,
filter=~Q(assignees__id__isnull=True),
),
Value([], output_field=ArrayField(UUIDField())),
),
module_ids=Coalesce(
ArrayAgg(
"issue_module__module_id",
distinct=True,
filter=~Q(issue_module__module_id__isnull=True),
),
Value([], output_field=ArrayField(UUIDField())),
),
)
.order_by(order_by)
filters = issue_filters(request.query_params, "GET")
order_by_param = request.GET.get("order_by", "-created_at")
issue_queryset = issue_queryset.filter(**filters)
# Issue queryset
issue_queryset = order_issue_queryset(
issue_queryset=issue_queryset,
order_by_param=order_by_param,
)
# Group by
group_by = request.GET.get("group_by", False)
sub_group_by = request.GET.get("sub_group_by", False)
# issue queryset
issue_queryset = issue_queryset_grouper(
queryset=issue_queryset,
group_by=group_by,
sub_group_by=sub_group_by,
)
if group_by:
# Check group and sub group value paginate
if sub_group_by:
if group_by == sub_group_by:
return Response(
{
"error": "Group by and sub group by cannot have same parameters"
},
status=status.HTTP_400_BAD_REQUEST,
)
if self.fields:
issues = IssueSerializer(
queryset, many=True, fields=fields if fields else None
).data
else:
issues = queryset.values(
"id",
"name",
"state_id",
"sort_order",
"completed_at",
"estimate_point",
"priority",
"start_date",
"target_date",
"sequence_id",
"project_id",
"parent_id",
"cycle_id",
"module_ids",
"label_ids",
"assignee_ids",
"sub_issues_count",
"created_at",
"updated_at",
"created_by",
"updated_by",
"attachment_count",
"link_count",
"is_draft",
"archived_at",
# group and sub group pagination
return self.paginate(
request=request,
order_by=(
"priority_order"
if order_by_param in ["priority", "-priority"]
else order_by_param
),
queryset=issue_queryset,
on_results=lambda issues: issue_on_results(
group_by=group_by,
issues=issues,
sub_group_by=sub_group_by,
),
paginator_cls=SubGroupedOffsetPaginator,
group_by_fields=issue_group_values(
field=group_by,
slug=slug,
project_id=project_id,
filters=filters,
),
sub_group_by_fields=issue_group_values(
field=sub_group_by,
slug=slug,
project_id=project_id,
filters=filters,
),
group_by_field_name=group_by,
sub_group_by_field_name=sub_group_by,
count_filter=Q(
Q(issue_inbox__status=1)
| Q(issue_inbox__status=-1)
| Q(issue_inbox__status=2)
| Q(issue_inbox__isnull=True),
archived_at__isnull=True,
is_draft=False,
),
)
# Group Paginate
else:
# Group paginate
return self.paginate(
request=request,
order_by=(
"priority_order"
if order_by_param in ["priority", "-priority"]
else order_by_param
),
queryset=issue_queryset,
on_results=lambda issues: issue_on_results(
group_by=group_by,
issues=issues,
sub_group_by=sub_group_by,
),
paginator_cls=GroupedOffsetPaginator,
group_by_fields=issue_group_values(
field=group_by,
slug=slug,
project_id=project_id,
filters=filters,
),
group_by_field_name=group_by,
count_filter=Q(
Q(issue_inbox__status=1)
| Q(issue_inbox__status=-1)
| Q(issue_inbox__status=2)
| Q(issue_inbox__isnull=True),
archived_at__isnull=True,
is_draft=False,
),
)
else:
# List Paginate
return self.paginate(
order_by=(
"-priority_order"
if order_by_param in ["priority", "-priority"]
else order_by_param
),
request=request,
queryset=issue_queryset,
on_results=lambda issues: issue_on_results(
group_by=group_by, issues=issues, sub_group_by=sub_group_by
),
)
return Response(issues, status=status.HTTP_200_OK)
def create(self, request, slug, project_id, cycle_id):
issues = request.data.get("issues", [])

View File

@ -457,283 +457,6 @@ class ModuleViewSet(WebhookMixin, BaseViewSet):
return Response(status=status.HTTP_204_NO_CONTENT)
class ModuleIssueViewSet(WebhookMixin, BaseViewSet):
serializer_class = ModuleIssueSerializer
model = ModuleIssue
webhook_event = "module_issue"
bulk = True
filterset_fields = [
"issue__labels__id",
"issue__assignees__id",
]
permission_classes = [
ProjectEntityPermission,
]
def get_queryset(self):
return (
Issue.issue_objects.filter(
project_id=self.kwargs.get("project_id"),
workspace__slug=self.kwargs.get("slug"),
issue_module__module_id=self.kwargs.get("module_id"),
)
.select_related("workspace", "project", "state", "parent")
.prefetch_related("assignees", "labels", "issue_module__module")
.annotate(cycle_id=F("issue_cycle__cycle_id"))
.annotate(
link_count=IssueLink.objects.filter(issue=OuterRef("id"))
.order_by()
.annotate(count=Func(F("id"), function="Count"))
.values("count")
)
.annotate(
attachment_count=IssueAttachment.objects.filter(
issue=OuterRef("id")
)
.order_by()
.annotate(count=Func(F("id"), function="Count"))
.values("count")
)
.annotate(
sub_issues_count=Issue.issue_objects.filter(
parent=OuterRef("id")
)
.order_by()
.annotate(count=Func(F("id"), function="Count"))
.values("count")
)
).distinct()
@method_decorator(gzip_page)
def list(self, request, slug, project_id, module_id):
filters = issue_filters(request.query_params, "GET")
issue_queryset = self.get_queryset().filter(**filters)
order_by_param = request.GET.get("order_by", "created_at")
# Issue queryset
issue_queryset = order_issue_queryset(
issue_queryset=issue_queryset,
order_by_param=order_by_param,
)
# Group by
group_by = request.GET.get("group_by", False)
sub_group_by = request.GET.get("sub_group_by", False)
# issue queryset
issue_queryset = issue_queryset_grouper(
queryset=issue_queryset,
group_by=group_by,
sub_group_by=sub_group_by,
)
if group_by:
# Check group and sub group value paginate
if sub_group_by:
if group_by == sub_group_by:
return Response(
{
"error": "Group by and sub group by cannot have same parameters"
},
status=status.HTTP_400_BAD_REQUEST,
)
else:
# group and sub group pagination
return self.paginate(
request=request,
order_by=(
"priority_order"
if order_by_param in ["priority", "-priority"]
else order_by_param
),
queryset=issue_queryset,
on_results=lambda issues: issue_on_results(
group_by=group_by,
issues=issues,
sub_group_by=sub_group_by,
),
paginator_cls=SubGroupedOffsetPaginator,
group_by_fields=issue_group_values(
field=group_by,
slug=slug,
project_id=project_id,
filters=filters,
),
sub_group_by_fields=issue_group_values(
field=sub_group_by,
slug=slug,
project_id=project_id,
filters=filters,
),
group_by_field_name=group_by,
sub_group_by_field_name=sub_group_by,
count_filter=Q(
Q(issue_inbox__status=1)
| Q(issue_inbox__status=-1)
| Q(issue_inbox__status=2)
| Q(issue_inbox__isnull=True),
archived_at__isnull=True,
is_draft=False,
),
)
# Group Paginate
else:
# Group paginate
return self.paginate(
request=request,
order_by=(
"priority_order"
if order_by_param in ["priority", "-priority"]
else order_by_param
),
queryset=issue_queryset,
on_results=lambda issues: issue_on_results(
group_by=group_by,
issues=issues,
sub_group_by=sub_group_by,
),
paginator_cls=GroupedOffsetPaginator,
group_by_fields=issue_group_values(
field=group_by,
slug=slug,
project_id=project_id,
filters=filters,
),
group_by_field_name=group_by,
count_filter=Q(
Q(issue_inbox__status=1)
| Q(issue_inbox__status=-1)
| Q(issue_inbox__status=2)
| Q(issue_inbox__isnull=True),
archived_at__isnull=True,
is_draft=False,
),
)
else:
# List Paginate
return self.paginate(
order_by=(
"-priority_order"
if order_by_param in ["priority", "-priority"]
else order_by_param
),
request=request,
queryset=issue_queryset,
on_results=lambda issues: issue_on_results(
group_by=group_by, issues=issues, sub_group_by=sub_group_by
),
)
# create multiple issues inside a module
def create_module_issues(self, request, slug, project_id, module_id):
issues = request.data.get("issues", [])
if not issues:
return Response(
{"error": "Issues are required"},
status=status.HTTP_400_BAD_REQUEST,
)
project = Project.objects.get(pk=project_id)
_ = ModuleIssue.objects.bulk_create(
[
ModuleIssue(
issue_id=str(issue),
module_id=module_id,
project_id=project_id,
workspace_id=project.workspace_id,
created_by=request.user,
updated_by=request.user,
)
for issue in issues
],
batch_size=10,
ignore_conflicts=True,
)
# Bulk Update the activity
_ = [
issue_activity.delay(
type="module.activity.created",
requested_data=json.dumps({"module_id": str(module_id)}),
actor_id=str(request.user.id),
issue_id=str(issue),
project_id=project_id,
current_instance=None,
epoch=int(timezone.now().timestamp()),
notification=True,
origin=request.META.get("HTTP_ORIGIN"),
)
for issue in issues
]
return Response({"message": "success"}, status=status.HTTP_201_CREATED)
# create multiple module inside an issue
def create_issue_modules(self, request, slug, project_id, issue_id):
modules = request.data.get("modules", [])
if not modules:
return Response(
{"error": "Modules are required"},
status=status.HTTP_400_BAD_REQUEST,
)
project = Project.objects.get(pk=project_id)
_ = ModuleIssue.objects.bulk_create(
[
ModuleIssue(
issue_id=issue_id,
module_id=module,
project_id=project_id,
workspace_id=project.workspace_id,
created_by=request.user,
updated_by=request.user,
)
for module in modules
],
batch_size=10,
ignore_conflicts=True,
)
# Bulk Update the activity
_ = [
issue_activity.delay(
type="module.activity.created",
requested_data=json.dumps({"module_id": module}),
actor_id=str(request.user.id),
issue_id=issue_id,
project_id=project_id,
current_instance=None,
epoch=int(timezone.now().timestamp()),
notification=True,
origin=request.META.get("HTTP_ORIGIN"),
)
for module in modules
]
return Response({"message": "success"}, status=status.HTTP_201_CREATED)
def destroy(self, request, slug, project_id, module_id, issue_id):
module_issue = ModuleIssue.objects.get(
workspace__slug=slug,
project_id=project_id,
module_id=module_id,
issue_id=issue_id,
)
issue_activity.delay(
type="module.activity.deleted",
requested_data=json.dumps({"module_id": str(module_id)}),
actor_id=str(request.user.id),
issue_id=str(issue_id),
project_id=str(project_id),
current_instance=json.dumps(
{"module_name": module_issue.module.name}
),
epoch=int(timezone.now().timestamp()),
notification=True,
origin=request.META.get("HTTP_ORIGIN"),
)
module_issue.delete()
return Response(status=status.HTTP_204_NO_CONTENT)
class ModuleLinkViewSet(BaseViewSet):
permission_classes = [
ProjectEntityPermission,

View File

@ -1,36 +1,50 @@
# Python imports
import json
from django.db.models import (
F,
Func,
OuterRef,
Q,
)
# Django Imports
from django.utils import timezone
from django.db.models import F, OuterRef, Func, Q
from django.utils.decorators import method_decorator
from django.views.decorators.gzip import gzip_page
from django.contrib.postgres.aggregates import ArrayAgg
from django.contrib.postgres.fields import ArrayField
from django.db.models import Value, UUIDField
from django.db.models.functions import Coalesce
# Third party imports
from rest_framework.response import Response
from rest_framework import status
from rest_framework.response import Response
# Module imports
from .. import BaseViewSet, WebhookMixin
from plane.app.permissions import (
ProjectEntityPermission,
)
from plane.app.serializers import (
ModuleIssueSerializer,
IssueSerializer,
)
from plane.app.permissions import ProjectEntityPermission
from plane.db.models import (
ModuleIssue,
Project,
Issue,
IssueLink,
IssueAttachment,
)
from plane.bgtasks.issue_activites_task import issue_activity
from plane.db.models import (
Issue,
IssueAttachment,
IssueLink,
ModuleIssue,
Project,
)
from plane.utils.grouper import (
issue_group_values,
issue_on_results,
issue_queryset_grouper,
)
from plane.utils.issue_filters import issue_filters
from plane.utils.order_queryset import order_issue_queryset
from plane.utils.paginator import (
GroupedOffsetPaginator,
SubGroupedOffsetPaginator,
)
# Module imports
from .. import BaseAPIView, BaseViewSet, WebhookMixin
class ModuleIssueViewSet(WebhookMixin, BaseViewSet):
@ -80,76 +94,127 @@ class ModuleIssueViewSet(WebhookMixin, BaseViewSet):
.annotate(count=Func(F("id"), function="Count"))
.values("count")
)
.annotate(
label_ids=Coalesce(
ArrayAgg(
"labels__id",
distinct=True,
filter=~Q(labels__id__isnull=True),
),
Value([], output_field=ArrayField(UUIDField())),
),
assignee_ids=Coalesce(
ArrayAgg(
"assignees__id",
distinct=True,
filter=~Q(assignees__id__isnull=True),
),
Value([], output_field=ArrayField(UUIDField())),
),
module_ids=Coalesce(
ArrayAgg(
"issue_module__module_id",
distinct=True,
filter=~Q(issue_module__module_id__isnull=True),
),
Value([], output_field=ArrayField(UUIDField())),
),
)
).distinct()
@method_decorator(gzip_page)
def list(self, request, slug, project_id, module_id):
fields = [
field
for field in request.GET.get("fields", "").split(",")
if field
]
filters = issue_filters(request.query_params, "GET")
issue_queryset = self.get_queryset().filter(**filters)
if self.fields or self.expand:
issues = IssueSerializer(
issue_queryset, many=True, fields=fields if fields else None
).data
else:
issues = issue_queryset.values(
"id",
"name",
"state_id",
"sort_order",
"completed_at",
"estimate_point",
"priority",
"start_date",
"target_date",
"sequence_id",
"project_id",
"parent_id",
"cycle_id",
"module_ids",
"label_ids",
"assignee_ids",
"sub_issues_count",
"created_at",
"updated_at",
"created_by",
"updated_by",
"attachment_count",
"link_count",
"is_draft",
"archived_at",
order_by_param = request.GET.get("order_by", "created_at")
# Issue queryset
issue_queryset = order_issue_queryset(
issue_queryset=issue_queryset,
order_by_param=order_by_param,
)
# Group by
group_by = request.GET.get("group_by", False)
sub_group_by = request.GET.get("sub_group_by", False)
# issue queryset
issue_queryset = issue_queryset_grouper(
queryset=issue_queryset,
group_by=group_by,
sub_group_by=sub_group_by,
)
if group_by:
# Check group and sub group value paginate
if sub_group_by:
if group_by == sub_group_by:
return Response(
{
"error": "Group by and sub group by cannot have same parameters"
},
status=status.HTTP_400_BAD_REQUEST,
)
else:
# group and sub group pagination
return self.paginate(
request=request,
order_by=(
"priority_order"
if order_by_param in ["priority", "-priority"]
else order_by_param
),
queryset=issue_queryset,
on_results=lambda issues: issue_on_results(
group_by=group_by,
issues=issues,
sub_group_by=sub_group_by,
),
paginator_cls=SubGroupedOffsetPaginator,
group_by_fields=issue_group_values(
field=group_by,
slug=slug,
project_id=project_id,
filters=filters,
),
sub_group_by_fields=issue_group_values(
field=sub_group_by,
slug=slug,
project_id=project_id,
filters=filters,
),
group_by_field_name=group_by,
sub_group_by_field_name=sub_group_by,
count_filter=Q(
Q(issue_inbox__status=1)
| Q(issue_inbox__status=-1)
| Q(issue_inbox__status=2)
| Q(issue_inbox__isnull=True),
archived_at__isnull=True,
is_draft=False,
),
)
# Group Paginate
else:
# Group paginate
return self.paginate(
request=request,
order_by=(
"priority_order"
if order_by_param in ["priority", "-priority"]
else order_by_param
),
queryset=issue_queryset,
on_results=lambda issues: issue_on_results(
group_by=group_by,
issues=issues,
sub_group_by=sub_group_by,
),
paginator_cls=GroupedOffsetPaginator,
group_by_fields=issue_group_values(
field=group_by,
slug=slug,
project_id=project_id,
filters=filters,
),
group_by_field_name=group_by,
count_filter=Q(
Q(issue_inbox__status=1)
| Q(issue_inbox__status=-1)
| Q(issue_inbox__status=2)
| Q(issue_inbox__isnull=True),
archived_at__isnull=True,
is_draft=False,
),
)
else:
# List Paginate
return self.paginate(
order_by=(
"-priority_order"
if order_by_param in ["priority", "-priority"]
else order_by_param
),
request=request,
queryset=issue_queryset,
on_results=lambda issues: issue_on_results(
group_by=group_by, issues=issues, sub_group_by=sub_group_by
),
)
return Response(issues, status=status.HTTP_200_OK)
# create multiple issues inside a module
def create_module_issues(self, request, slug, project_id, module_id):

View File

@ -332,6 +332,12 @@ class GroupedOffsetPaginator(OffsetPaginator):
}
for group_id, issues in grouped_by_field_name.items()
}
# ordering
for group_value, data in processed_results.items():
data["results"].sort(
key=lambda x: x.get("created_at"), reverse=True
)
return processed_results
def __query_grouper(self, results):