chore: burn down chart analytics

This commit is contained in:
NarayanBavisetti 2024-06-04 12:24:21 +05:30
parent 0a72adc2a6
commit 7bf8b351bc
6 changed files with 121 additions and 36 deletions

View File

@ -784,6 +784,7 @@ class TransferCycleIssueAPIEndpoint(BaseAPIView):
def post(self, request, slug, project_id, cycle_id):
new_cycle_id = request.data.get("new_cycle_id", False)
plot_type = request.GET.get("plot_type", "issues")
if not new_cycle_id:
return Response(
@ -865,6 +866,7 @@ class TransferCycleIssueAPIEndpoint(BaseAPIView):
queryset=old_cycle.first(),
slug=slug,
project_id=project_id,
plot_type=plot_type,
cycle_id=cycle_id,
)

View File

@ -177,6 +177,7 @@ class CycleArchiveUnarchiveEndpoint(BaseAPIView):
)
def get(self, request, slug, project_id, pk=None):
plot_type = request.GET.get("plot_type", "issues")
if pk is None:
queryset = (
self.get_queryset()
@ -375,6 +376,7 @@ class CycleArchiveUnarchiveEndpoint(BaseAPIView):
queryset=queryset,
slug=slug,
project_id=project_id,
plot_type=plot_type,
cycle_id=pk,
)

View File

@ -203,6 +203,7 @@ class CycleViewSet(BaseViewSet):
def list(self, request, slug, project_id):
queryset = self.get_queryset().filter(archived_at__isnull=True)
plot_type = request.GET.get("plot_type", "issues")
cycle_view = request.GET.get("cycle_view", "all")
# Update the order by
@ -334,6 +335,7 @@ class CycleViewSet(BaseViewSet):
queryset=queryset.first(),
slug=slug,
project_id=project_id,
plot_type=plot_type,
cycle_id=data[0]["id"],
)
)
@ -523,6 +525,7 @@ class CycleViewSet(BaseViewSet):
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
def retrieve(self, request, slug, project_id, pk):
plot_type = request.GET.get("plot_type", "issues")
queryset = (
self.get_queryset().filter(archived_at__isnull=True).filter(pk=pk)
)
@ -677,6 +680,7 @@ class CycleViewSet(BaseViewSet):
queryset=queryset,
slug=slug,
project_id=project_id,
plot_type=plot_type,
cycle_id=pk,
)
@ -793,6 +797,7 @@ class TransferCycleIssueEndpoint(BaseAPIView):
def post(self, request, slug, project_id, cycle_id):
new_cycle_id = request.data.get("new_cycle_id", False)
plot_type = request.GET.get("plot_type", "issues")
if not new_cycle_id:
return Response(
@ -874,6 +879,7 @@ class TransferCycleIssueEndpoint(BaseAPIView):
queryset=old_cycle.first(),
slug=slug,
project_id=project_id,
plot_type=plot_type,
cycle_id=cycle_id,
)

View File

@ -165,6 +165,7 @@ class ModuleArchiveUnarchiveEndpoint(BaseAPIView):
)
def get(self, request, slug, project_id, pk=None):
plot_type = request.GET.get("plot_type", "issues")
if pk is None:
queryset = self.get_queryset()
modules = queryset.values( # Required fields
@ -323,6 +324,7 @@ class ModuleArchiveUnarchiveEndpoint(BaseAPIView):
queryset=modules,
slug=slug,
project_id=project_id,
plot_type=plot_type,
module_id=pk,
)

View File

@ -299,6 +299,7 @@ class ModuleViewSet(BaseViewSet):
return Response(modules, status=status.HTTP_200_OK)
def retrieve(self, request, slug, project_id, pk):
plot_type = request.GET.get("plot_type", "burndown")
queryset = (
self.get_queryset()
.filter(archived_at__isnull=True)
@ -421,6 +422,7 @@ class ModuleViewSet(BaseViewSet):
queryset=modules,
slug=slug,
project_id=project_id,
plot_type=plot_type,
module_id=pk,
)

View File

@ -4,7 +4,15 @@ from itertools import groupby
# Django import
from django.db import models
from django.db.models import Case, CharField, Count, F, Sum, Value, When
from django.db.models import (
Case,
CharField,
Count,
F,
Sum,
Value,
When,
)
from django.db.models.functions import (
Coalesce,
Concat,
@ -15,7 +23,7 @@ from django.db.models.functions import (
from django.utils import timezone
# Module imports
from plane.db.models import Issue
from plane.db.models import Issue, Estimate
def annotate_with_monthly_dimension(queryset, field_name, attribute):
@ -110,9 +118,30 @@ def build_graph_plot(queryset, x_axis, y_axis, segment=None):
return sort_data(grouped_data, temp_axis)
def burndown_plot(queryset, slug, project_id, cycle_id=None, module_id=None):
def burndown_plot(
queryset,
slug,
project_id,
plot_type,
cycle_id=None,
module_id=None,
):
# Total Issues in Cycle or Module
total_issues = queryset.total_issues
# check whether the estimate is a point or not
estimate_type = Estimate.objects.filter(
workspace__slug=slug, project_id=project_id, type="points"
).exists()
if estimate_type and plot_type == "points":
issue_estimates = Issue.objects.filter(
workspace__slug=slug,
project_id=project_id,
issue_cycle__cycle_id=cycle_id,
estimate_point__isnull=False,
).values_list("estimate_point__value", flat=True)
issue_estimates = [int(value) for value in issue_estimates]
total_estimate_points = sum(issue_estimates)
if cycle_id:
if queryset.end_date and queryset.start_date:
@ -128,18 +157,32 @@ def burndown_plot(queryset, slug, project_id, cycle_id=None, module_id=None):
chart_data = {str(date): 0 for date in date_range}
completed_issues_distribution = (
Issue.issue_objects.filter(
workspace__slug=slug,
project_id=project_id,
issue_cycle__cycle_id=cycle_id,
if plot_type == "issues":
completed_issues_distribution = (
Issue.issue_objects.filter(
workspace__slug=slug,
project_id=project_id,
issue_cycle__cycle_id=cycle_id,
)
.annotate(date=TruncDate("completed_at"))
.values("date")
.annotate(total_completed=Count("id"))
.values("date", "total_completed")
.order_by("date")
)
else:
completed_issues_estimate_point_distribution = (
Issue.issue_objects.filter(
workspace__slug=slug,
project_id=project_id,
issue_cycle__cycle_id=cycle_id,
estimate_point__isnull=False,
)
.annotate(date=TruncDate("completed_at"))
.values("date")
.values("date", "estimate_point__value")
.order_by("date")
)
.annotate(date=TruncDate("completed_at"))
.values("date")
.annotate(total_completed=Count("id"))
.values("date", "total_completed")
.order_by("date")
)
if module_id:
# Get all dates between the two dates
@ -152,31 +195,59 @@ def burndown_plot(queryset, slug, project_id, cycle_id=None, module_id=None):
chart_data = {str(date): 0 for date in date_range}
completed_issues_distribution = (
Issue.issue_objects.filter(
workspace__slug=slug,
project_id=project_id,
issue_module__module_id=module_id,
if plot_type == "issues":
completed_issues_distribution = (
Issue.issue_objects.filter(
workspace__slug=slug,
project_id=project_id,
issue_module__module_id=module_id,
)
.annotate(date=TruncDate("completed_at"))
.values("date")
.annotate(total_completed=Count("id"))
.values("date", "total_completed")
.order_by("date")
)
else:
completed_issues_estimate_point_distribution = (
Issue.issue_objects.filter(
workspace__slug=slug,
project_id=project_id,
issue_module__module_id=module_id,
estimate_point__isnull=False,
)
.annotate(date=TruncDate("completed_at"))
.values("date")
.values("date", "estimate_point__value")
.order_by("date")
)
.annotate(date=TruncDate("completed_at"))
.values("date")
.annotate(total_completed=Count("id"))
.values("date", "total_completed")
.order_by("date")
)
for date in date_range:
cumulative_pending_issues = total_issues
total_completed = 0
total_completed = sum(
item["total_completed"]
for item in completed_issues_distribution
if item["date"] is not None and item["date"] <= date
)
cumulative_pending_issues -= total_completed
if date > timezone.now().date():
chart_data[str(date)] = None
if plot_type == "issues":
cumulative_pending_issues = total_issues
total_completed = 0
total_completed = sum(
item["total_completed"]
for item in completed_issues_distribution
if item["date"] is not None and item["date"] <= date
)
cumulative_pending_issues -= total_completed
if date > timezone.now().date():
chart_data[str(date)] = None
else:
chart_data[str(date)] = cumulative_pending_issues
else:
chart_data[str(date)] = cumulative_pending_issues
cumulative_pending_issues = total_estimate_points
total_completed = 0
total_completed = sum(
int(item["estimate_point__value"])
for item in completed_issues_estimate_point_distribution
if item["date"] is not None and item["date"] <= date
)
cumulative_pending_issues -= total_completed
if date > timezone.now().date():
chart_data[str(date)] = None
else:
chart_data[str(date)] = cumulative_pending_issues
return chart_data