forked from github/plane
Merge pull request #302 from makeplane/feat/issue_sorting_grouping
feat: updated issue grouping and filtering
This commit is contained in:
commit
a904c4a7de
@ -1,8 +1,9 @@
|
||||
# All the python scripts that are used for back migrations
|
||||
import uuid
|
||||
import random
|
||||
from django.contrib.auth.hashers import make_password
|
||||
from plane.db.models import ProjectIdentifier
|
||||
from plane.db.models import Issue, IssueComment, User
|
||||
from django.contrib.auth.hashers import make_password
|
||||
|
||||
|
||||
# Update description and description html values for old descriptions
|
||||
@ -79,3 +80,19 @@ def update_user_empty_password():
|
||||
except Exception as e:
|
||||
print(e)
|
||||
print("Failed")
|
||||
|
||||
|
||||
def updated_issue_sort_order():
|
||||
try:
|
||||
issues = Issue.objects.all()
|
||||
updated_issues = []
|
||||
|
||||
for issue in issues:
|
||||
issue.sort_order = issue.sequence_id * random.randint(100, 500)
|
||||
updated_issues.append(issue)
|
||||
|
||||
Issue.objects.bulk_update(updated_issues, ["sort_order"], batch_size=100)
|
||||
print("Success")
|
||||
except Exception as e:
|
||||
print(e)
|
||||
print("Failed")
|
||||
|
@ -42,6 +42,7 @@ from plane.db.models import (
|
||||
IssueLink,
|
||||
)
|
||||
from plane.bgtasks.issue_activites_task import issue_activity
|
||||
from plane.utils.grouper import group_results
|
||||
|
||||
|
||||
class IssueViewSet(BaseViewSet):
|
||||
@ -145,51 +146,33 @@ class IssueViewSet(BaseViewSet):
|
||||
)
|
||||
)
|
||||
|
||||
def grouper(self, issue, group_by):
|
||||
group_by = issue.get(group_by, "")
|
||||
|
||||
if isinstance(group_by, list):
|
||||
if len(group_by):
|
||||
return group_by[0]
|
||||
else:
|
||||
return ""
|
||||
|
||||
else:
|
||||
return group_by
|
||||
|
||||
def list(self, request, slug, project_id):
|
||||
try:
|
||||
issue_queryset = self.get_queryset()
|
||||
# Issue State groups
|
||||
type = request.GET.get("type", "all")
|
||||
group = ["backlog", "unstarted", "started", "completed", "cancelled"]
|
||||
if type == "backlog":
|
||||
group = ["backlog"]
|
||||
if type == "active":
|
||||
group = ["unstarted", "started"]
|
||||
|
||||
## Grouping the results
|
||||
group_by = request.GET.get("group_by", False)
|
||||
# TODO: Move this group by from ittertools to ORM for better performance - nk
|
||||
if group_by:
|
||||
issue_dict = dict()
|
||||
issue_queryset = (
|
||||
self.get_queryset()
|
||||
.order_by(request.GET.get("order_by", "created_at"))
|
||||
.filter(state__group__in=group)
|
||||
)
|
||||
|
||||
issues = IssueSerializer(issue_queryset, many=True).data
|
||||
|
||||
for key, value in groupby(
|
||||
issues, lambda issue: self.grouper(issue, group_by)
|
||||
):
|
||||
issue_dict[str(key)] = list(value)
|
||||
|
||||
return Response(issue_dict, status=status.HTTP_200_OK)
|
||||
|
||||
## Grouping the results
|
||||
group_by = request.GET.get("group_by", False)
|
||||
if group_by:
|
||||
return Response(
|
||||
{
|
||||
"next_cursor": str(0),
|
||||
"prev_cursor": str(0),
|
||||
"next_page_results": False,
|
||||
"prev_page_results": False,
|
||||
"count": issue_queryset.count(),
|
||||
"total_pages": 1,
|
||||
"extra_stats": {},
|
||||
"results": IssueSerializer(issue_queryset, many=True).data,
|
||||
},
|
||||
status=status.HTTP_200_OK,
|
||||
group_results(issues, group_by), status=status.HTTP_200_OK
|
||||
)
|
||||
|
||||
return Response(issues, status=status.HTTP_200_OK)
|
||||
|
||||
except Exception as e:
|
||||
print(e)
|
||||
capture_exception(e)
|
||||
|
@ -69,16 +69,6 @@ class Issue(ProjectBaseModel):
|
||||
|
||||
def save(self, *args, **kwargs):
|
||||
# This means that the model isn't saved to the database yet
|
||||
if self._state.adding:
|
||||
# Get the maximum display_id value from the database
|
||||
|
||||
last_id = IssueSequence.objects.filter(project=self.project).aggregate(
|
||||
largest=models.Max("sequence")
|
||||
)["largest"]
|
||||
# aggregate can return None! Check it first.
|
||||
# If it isn't none, just use the last ID specified (which should be the greatest) and add one to it
|
||||
if last_id is not None:
|
||||
self.sequence_id = last_id + 1
|
||||
if self.state is None:
|
||||
try:
|
||||
from plane.db.models import State
|
||||
@ -109,6 +99,23 @@ class Issue(ProjectBaseModel):
|
||||
|
||||
except ImportError:
|
||||
pass
|
||||
if self._state.adding:
|
||||
# Get the maximum display_id value from the database
|
||||
|
||||
last_id = IssueSequence.objects.filter(project=self.project).aggregate(
|
||||
largest=models.Max("sequence")
|
||||
)["largest"]
|
||||
# aggregate can return None! Check it first.
|
||||
# If it isn't none, just use the last ID specified (which should be the greatest) and add one to it
|
||||
if last_id is not None:
|
||||
self.sequence_id = last_id + 1
|
||||
|
||||
largest_sort_order = Issue.objects.filter(
|
||||
project=self.project, state=self.state
|
||||
).aggregate(largest=models.Max("sort_order"))["largest"]
|
||||
if largest_sort_order is not None:
|
||||
self.sort_order = largest_sort_order + 10000
|
||||
|
||||
# Strip the html tags using html parser
|
||||
self.description_stripped = (
|
||||
None
|
||||
|
31
apiserver/plane/utils/grouper.py
Normal file
31
apiserver/plane/utils/grouper.py
Normal file
@ -0,0 +1,31 @@
|
||||
def group_results(results_data, group_by):
|
||||
"""
|
||||
Utility function to group data into a given attribute.
|
||||
Function can group attributes of string and list type.
|
||||
"""
|
||||
response_dict = dict()
|
||||
|
||||
for value in results_data:
|
||||
group_attribute = value.get(group_by, None)
|
||||
if isinstance(group_attribute, list):
|
||||
if len(group_attribute):
|
||||
for attrib in group_attribute:
|
||||
if str(attrib) in response_dict:
|
||||
response_dict[str(attrib)].append(value)
|
||||
else:
|
||||
response_dict[str(attrib)] = []
|
||||
response_dict[str(attrib)].append(value)
|
||||
else:
|
||||
if str(None) in response_dict:
|
||||
response_dict[str(None)].append(value)
|
||||
else:
|
||||
response_dict[str(None)] = []
|
||||
response_dict[str(None)].append(value)
|
||||
else:
|
||||
if str(group_attribute) in response_dict:
|
||||
response_dict[str(group_attribute)].append(value)
|
||||
else:
|
||||
response_dict[str(group_attribute)] = []
|
||||
response_dict[str(group_attribute)].append(value)
|
||||
|
||||
return response_dict
|
Loading…
Reference in New Issue
Block a user