feat: analytics (#1018)

* dev: initialize plane analytics

* dev: plane analytics endpoint

* dev: update endpoint to give data with segments as well

* dev: analytics with count and effort paramters

* feat: analytics endpoints

* feat: saved analytics

* dev: remove print logs

* dev: rename x_axis to dimension in response

* dev: remove color queries

* dev: update query for None values

* feat: analytics export

* dev: update code structure send color when state or label and fix none count

* dev: uncomment try catch block

* dev: fix segment keyerror

* dev: default analytics endpoint

* dev: fix segmented results

* dev: default analytics endpoint and colors for segment

* dev: total issues and open issues by state

* dev: segment colors

* dev: fix total issue annotate

* dev: effort segmentation

* dev: total estimates and open estimates

* fix: effort when not segmented

* dev: send avatar for default analytics
This commit is contained in:
pablohashescobar 2023-05-11 16:57:03 +05:30 committed by GitHub
parent fb165d080e
commit abaa65b4b7
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
13 changed files with 651 additions and 5 deletions

View File

@ -70,3 +70,5 @@ from .importer import ImporterSerializer
from .page import PageSerializer, PageBlockSerializer, PageFavoriteSerializer from .page import PageSerializer, PageBlockSerializer, PageFavoriteSerializer
from .estimate import EstimateSerializer, EstimatePointSerializer, EstimateReadSerializer from .estimate import EstimateSerializer, EstimatePointSerializer, EstimateReadSerializer
from .analytic import AnalyticViewSerializer

View File

@ -0,0 +1,30 @@
from .base import BaseSerializer
from plane.db.models import AnalyticView
from plane.utils.issue_filters import issue_filters
class AnalyticViewSerializer(BaseSerializer):
class Meta:
model = AnalyticView
fields = "__all__"
read_only_fields = [
"workspace",
"query",
]
def create(self, validated_data):
query_params = validated_data.get("query_dict", {})
if bool(query_params):
validated_data["query"] = issue_filters(query_params, "POST")
else:
validated_data["query"] = dict()
return AnalyticView.objects.create(**validated_data)
def update(self, instance, validated_data):
query_params = validated_data.get("query_data", {})
if bool(query_params):
validated_data["query"] = issue_filters(query_params, "POST")
else:
validated_data["query"] = dict()
validated_data["query"] = issue_filters(query_params, "PATCH")
return super().update(instance, validated_data)

View File

@ -148,6 +148,13 @@ from plane.api.views import (
# Release Notes # Release Notes
ReleaseNotesEndpoint, ReleaseNotesEndpoint,
## End Release Notes ## End Release Notes
# Analytics
AnalyticsEndpoint,
AnalyticViewViewset,
SavedAnalyticEndpoint,
ExportAnalyticsEndpoint,
DefaultAnalyticsEndpoint,
## End Analytics
) )
@ -1285,4 +1292,38 @@ urlpatterns = [
name="release-notes", name="release-notes",
), ),
## End Release Notes ## End Release Notes
# Analytics
path(
"workspaces/<str:slug>/analytics/",
AnalyticsEndpoint.as_view(),
name="plane-analytics",
),
path(
"workspaces/<str:slug>/analytic-view/",
AnalyticViewViewset.as_view({"get": "list", "post": "create"}),
name="analytic-view",
),
path(
"workspaces/<str:slug>/analytic-view/<uuid:pk>/",
AnalyticViewViewset.as_view(
{"get": "retrieve", "patch": "partial_update", "delete": "destroy"}
),
name="analytic-view",
),
path(
"workspaces/<str:slug>/saved-analytic-view/<uuid:analytic_id>/",
SavedAnalyticEndpoint.as_view(),
name="saved-analytic-view",
),
path(
"workspaces/<str:slug>/export-analytics/",
ExportAnalyticsEndpoint.as_view(),
name="export-analytics",
),
path(
"workspaces/<str:slug>/default-analytics/",
DefaultAnalyticsEndpoint.as_view(),
name="default-analytics",
),
## End Analytics
] ]

View File

@ -140,3 +140,11 @@ from .estimate import (
from .release import ReleaseNotesEndpoint from .release import ReleaseNotesEndpoint
from .analytic import (
AnalyticsEndpoint,
AnalyticViewViewset,
SavedAnalyticEndpoint,
ExportAnalyticsEndpoint,
DefaultAnalyticsEndpoint,
)

View File

@ -0,0 +1,308 @@
# Django imports
from django.db.models import (
Q,
Count,
Sum,
Value,
Case,
When,
FloatField,
Subquery,
OuterRef,
F,
ExpressionWrapper,
)
from django.db.models.functions import ExtractMonth
# Third party imports
from rest_framework import status
from rest_framework.response import Response
from sentry_sdk import capture_exception
# Module imports
from plane.api.views import BaseAPIView, BaseViewSet
from plane.api.permissions import WorkSpaceAdminPermission
from plane.db.models import Issue, AnalyticView, Workspace, State, Label
from plane.api.serializers import AnalyticViewSerializer
from plane.utils.analytics_plot import build_graph_plot
from plane.bgtasks.analytic_plot_export import analytic_export_task
class AnalyticsEndpoint(BaseAPIView):
permission_classes = [
WorkSpaceAdminPermission,
]
def get(self, request, slug):
try:
x_axis = request.GET.get("x_axis", False)
y_axis = request.GET.get("y_axis", False)
if not x_axis or not y_axis:
return Response(
{"error": "x-axis and y-axis dimensions are required"},
status=status.HTTP_400_BAD_REQUEST,
)
project_ids = request.GET.getlist("project")
cycle_ids = request.GET.getlist("cycle")
module_ids = request.GET.getlist("module")
segment = request.GET.get("segment", False)
queryset = Issue.objects.filter(workspace__slug=slug)
if project_ids:
queryset = queryset.filter(project_id__in=project_ids)
if cycle_ids:
queryset = queryset.filter(issue_cycle__cycle_id__in=cycle_ids)
if module_ids:
queryset = queryset.filter(issue_module__module_id__in=module_ids)
total_issues = queryset.count()
distribution = build_graph_plot(
queryset=queryset, x_axis=x_axis, y_axis=y_axis, segment=segment
)
colors = dict()
if x_axis in ["state__name", "state__group"] or segment in [
"state__name",
"state__group",
]:
if x_axis in ["state__name", "state__group"]:
key = "name" if x_axis == "state__name" else "group"
else:
key = "name" if segment == "state__name" else "group"
colors = (
State.objects.filter(
workspace__slug=slug, project_id__in=project_ids
).values(key, "color")
if project_ids
else State.objects.filter(workspace__slug=slug).values(key, "color")
)
if x_axis in ["labels__name"] or segment in ["labels__name"]:
colors = (
Label.objects.filter(
workspace__slug=slug, project_id__in=project_ids
).values("name", "color")
if project_ids
else Label.objects.filter(workspace__slug=slug).values(
"name", "color"
)
)
return Response(
{
"total": total_issues,
"distribution": distribution,
"extras": {"colors": colors},
},
status=status.HTTP_200_OK,
)
except Exception as e:
capture_exception(e)
return Response(
{"error": "Something went wrong please try again later"},
status=status.HTTP_400_BAD_REQUEST,
)
class AnalyticViewViewset(BaseViewSet):
permission_classes = [
WorkSpaceAdminPermission,
]
model = AnalyticView
serializer_class = AnalyticViewSerializer
def perform_create(self, serializer):
workspace = Workspace.objects.get(slug=self.kwargs.get("slug"))
serializer.save(workspace_id=workspace.id)
def get_queryset(self):
return self.filter_queryset(
super().get_queryset().filter(workspace__slug=self.kwargs.get("slug"))
)
class SavedAnalyticEndpoint(BaseAPIView):
permission_classes = [
WorkSpaceAdminPermission,
]
def get(self, request, slug, analytic_id):
try:
analytic_view = AnalyticView.objects.get(
pk=analytic_id, workspace__slug=slug
)
filter = analytic_view.query
queryset = Issue.objects.filter(**filter)
x_axis = analytic_view.query_dict.get("x_axis", False)
y_axis = analytic_view.query_dict.get("y_axis", False)
if not x_axis or not y_axis:
return Response(
{"error": "x-axis and y-axis dimensions are required"},
status=status.HTTP_400_BAD_REQUEST,
)
segment = request.GET.get("segment", False)
distribution = build_graph_plot(
queryset=queryset, x_axis=x_axis, y_axis=y_axis, segment=segment
)
total_issues = queryset.count()
return Response(
{"total": total_issues, "distribution": distribution},
status=status.HTTP_200_OK,
)
except AnalyticView.DoesNotExist:
return Response(
{"error": "Analytic View Does not exist"},
status=status.HTTP_400_BAD_REQUEST,
)
except Exception as e:
capture_exception(e)
return Response(
{"error": "Something went wrong please try again later"},
status=status.HTTP_400_BAD_REQUEST,
)
class ExportAnalyticsEndpoint(BaseAPIView):
permission_classes = [
WorkSpaceAdminPermission,
]
def post(self, request, slug):
try:
x_axis = request.data.get("x_axis", False)
y_axis = request.data.get("y_axis", False)
if not x_axis or not y_axis:
return Response(
{"error": "x-axis and y-axis dimensions are required"},
status=status.HTTP_400_BAD_REQUEST,
)
analytic_export_task.delay(
email=request.user.email, data=request.data, slug=slug
)
return Response(
{
"message": f"Once the export is ready it will be emailed to you at {str(request.user.email)}"
},
status=status.HTTP_200_OK,
)
except Exception as e:
capture_exception(e)
return Response(
{"error": "Something went wrong please try again later"},
status=status.HTTP_400_BAD_REQUEST,
)
class DefaultAnalyticsEndpoint(BaseAPIView):
permission_classes = [
WorkSpaceAdminPermission,
]
def get(self, request, slug):
try:
queryset = Issue.objects.filter(workspace__slug=slug)
project_ids = request.GET.getlist("project")
cycle_ids = request.GET.getlist("cycle")
module_ids = request.GET.getlist("module")
if project_ids:
queryset = queryset.filter(project_id__in=project_ids)
if cycle_ids:
queryset = queryset.filter(issue_cycle__cycle_id__in=cycle_ids)
if module_ids:
queryset = queryset.filter(issue_module__module_id__in=module_ids)
total_issues = queryset.count()
total_issues_classified = (
queryset.annotate(state_group=F("state__group"))
.values("state_group")
.annotate(state_count=Count("state_group"))
.order_by("state_group")
)
open_issues = queryset.filter(
state__group__in=["backlog", "unstarted", "started"]
).count()
open_issues_classified = (
queryset.filter(state__group__in=["backlog", "unstarted", "started"])
.annotate(state_group=F("state__group"))
.values("state_group")
.annotate(state_count=Count("state_group"))
.order_by("state_group")
)
issue_completed_month_wise = (
queryset.filter(completed_at__isnull=False)
.annotate(month=ExtractMonth("completed_at"))
.values("month")
.annotate(count=Count("*"))
.order_by("month")
)
most_issue_created_user = (
queryset.filter(created_by__isnull=False)
.values("assignees__email", "assignees__avatar")
.annotate(count=Count("id"))
.order_by("-count")
)[:5]
most_issue_closed_user = (
queryset.filter(completed_at__isnull=False, assignees__isnull=False)
.values("assignees__email", "assignees__avatar")
.annotate(count=Count("id"))
.order_by("-count")
)[:5]
pending_issue_user = (
queryset.filter(completed_at__isnull=True)
.values("assignees__email", "assignees__avatar")
.annotate(count=Count("id"))
.order_by("-count")
)
open_estimate_sum = (
Issue.objects.filter(
state__group__in=["backlog", "unstarted", "started"]
).aggregate(open_estimate_sum=Sum("estimate_point"))
)["open_estimate_sum"]
total_estimate_sum = Issue.objects.aggregate(
total_estimate_sum=Sum("estimate_point")
)["total_estimate_sum"]
return Response(
{
"total_issues": total_issues,
"total_issues_classified": total_issues_classified,
"open_issues": open_issues,
"open_issues_classified": open_issues_classified,
"issue_completed_month_wise": issue_completed_month_wise,
"most_issue_created_user": most_issue_created_user,
"most_issue_closed_user": most_issue_closed_user,
"pending_issue_user": pending_issue_user,
"open_estimate_sum": open_estimate_sum,
"total_estimate_sum": total_estimate_sum,
},
status=status.HTTP_200_OK,
)
except Exception as e:
capture_exception(e)
return Response(
{"error": "Something went wrong please try again later"},
status=status.HTTP_400_BAD_REQUEST,
)

View File

@ -18,10 +18,6 @@ from plane.api.permissions import ProjectEntityPermission
from plane.db.models import ( from plane.db.models import (
IssueView, IssueView,
Issue, Issue,
IssueBlocker,
IssueLink,
CycleIssue,
ModuleIssue,
IssueViewFavorite, IssueViewFavorite,
) )
from plane.utils.issue_filters import issue_filters from plane.utils.issue_filters import issue_filters

View File

@ -0,0 +1,138 @@
# Python imports
import csv
import io
# Django imports
from django.core.mail import EmailMultiAlternatives
from django.template.loader import render_to_string
from django.utils.html import strip_tags
from django.conf import settings
# Third party imports
from celery import shared_task
from sentry_sdk import capture_exception
# Module imports
from plane.db.models import Issue
from plane.utils.analytics_plot import build_graph_plot
from plane.utils.issue_filters import issue_filters
row_mapping = {
"state__name": "State",
"state__group": "State Group",
"labels__name": "Label",
"assignees__email": "Assignee Email",
"start_date": "Start Date",
"target_date": "Due Date",
"completed_at": "Completed At",
"created_at": "Created At",
"issue_count": "Issue Count",
"effort": "Effort",
}
@shared_task
def analytic_export_task(email, data, slug):
try:
filters = issue_filters(data, "POST")
queryset = Issue.objects.filter(**filters, workspace__slug=slug)
x_axis = data.get("x_axis", False)
y_axis = data.get("y_axis", False)
segment = data.get("segment", False)
distribution = build_graph_plot(
queryset=queryset, x_axis=x_axis, y_axis=y_axis, segment=segment
)
key = "count" if y_axis == "issue_count" else "effort"
if segment:
row_zero = [
row_mapping.get(x_axis, "X-Axis"),
]
segment_zero = []
for item in distribution:
current_dict = distribution.get(item)
for current in current_dict:
segment_zero.append(current.get("segment"))
segment_zero = list(set(segment_zero))
row_zero = row_zero + segment_zero
rows = []
for item in distribution:
generated_row = []
data = distribution.get(item)
for segment in segment_zero[1:]:
value = [x for x in data if x.get("segment") == segment]
if len(value):
generated_row.append(value[0].get(key))
else:
generated_row.append("")
rows.append(tuple(generated_row))
rows = [tuple(row_zero)] + rows
csv_buffer = io.StringIO()
writer = csv.writer(csv_buffer, delimiter=",", quoting=csv.QUOTE_ALL)
# Write CSV data to the buffer
for row in rows:
writer.writerow(row)
subject = "Your Export is ready"
html_content = render_to_string("emails/exports/analytics.html", {})
text_content = strip_tags(html_content)
msg = EmailMultiAlternatives(
subject, text_content, settings.EMAIL_FROM, [email]
)
msg.attach(f"{slug}-analytics.csv", csv_buffer.read())
msg.send(fail_silently=False)
else:
row_zero = [
row_mapping.get(x_axis, "X-Axis"),
row_mapping.get(y_axis, "Y-Axis"),
]
rows = []
for item in distribution:
rows.append(
tuple(
[
item,
distribution.get(item)[0].get("count")
if y_axis == "issue_count"
else distribution.get(item)[0].get("effort"),
]
)
)
rows = [tuple(row_zero)] + rows
csv_buffer = io.StringIO()
writer = csv.writer(csv_buffer, delimiter=",", quoting=csv.QUOTE_ALL)
# Write CSV data to the buffer
for row in rows:
writer.writerow(row)
subject = "Your Export is ready"
html_content = render_to_string("emails/exports/analytics.html", {})
text_content = strip_tags(html_content)
msg = EmailMultiAlternatives(
subject, text_content, settings.EMAIL_FROM, [email]
)
msg.attach(f"{slug}-analytics.csv", csv_buffer.read())
msg.send(fail_silently=False)
except Exception as e:
print(e)
capture_exception(e)
return

View File

@ -136,7 +136,6 @@ def track_priority(
comment=f"{actor.email} updated the priority to {requested_data.get('priority')}", comment=f"{actor.email} updated the priority to {requested_data.get('priority')}",
) )
) )
print(issue_activities)
# Track chnages in state of the issue # Track chnages in state of the issue

View File

@ -67,3 +67,5 @@ from .importer import Importer
from .page import Page, PageBlock, PageFavorite, PageLabel from .page import Page, PageBlock, PageFavorite, PageLabel
from .estimate import Estimate, EstimatePoint from .estimate import Estimate, EstimatePoint
from .analytic import AnalyticView

View File

@ -0,0 +1,25 @@
# Django models
from django.db import models
from django.conf import settings
from .base import BaseModel
class AnalyticView(BaseModel):
workspace = models.ForeignKey(
"db.Workspace", related_name="analytics", on_delete=models.CASCADE
)
name = models.CharField(max_length=255)
description = models.TextField(blank=True)
query = models.JSONField()
query_dict = models.JSONField(default=dict)
class Meta:
verbose_name = "Analytic"
verbose_name_plural = "Analytics"
db_table = "analytic_views"
ordering = ("-created_at",)
def __str__(self):
"""Return name of the analytic view"""
return f"{self.name} <{self.workspace.name}>"

View File

@ -0,0 +1,57 @@
# Python imports
from itertools import groupby
# Django import
from django.db import models
from django.db.models import Count, DateField, F, Sum, Value, Case, When, Q
from django.db.models.functions import Cast, Concat, Coalesce
def build_graph_plot(queryset, x_axis, y_axis, segment=None):
if x_axis in ["created_at", "completed_at"]:
queryset = queryset.annotate(dimension=Cast(x_axis, DateField()))
x_axis = "dimension"
else:
queryset = queryset.annotate(dimension=F(x_axis))
x_axis = "dimension"
if x_axis in ["created_at", "start_date", "target_date", "completed_at"]:
queryset = queryset.exclude(x_axis__is_null=True)
queryset = queryset.values(x_axis)
# Group queryset by x_axis field
if y_axis == "issue_count":
queryset = (
queryset.annotate(
is_null=Case(
When(dimension__isnull=True, then=Value("None")),
default=Value("not_null"),
output_field=models.CharField(max_length=8),
),
dimension_ex=Coalesce("dimension", Value("null")),
)
.values("dimension")
)
if segment:
queryset = queryset.annotate(segment=F(segment)).values("dimension", "segment")
else:
queryset = queryset.values("dimension")
queryset = queryset.annotate(count=Count("*")).order_by("dimension")
if y_axis == "effort":
queryset = queryset.annotate(effort=Sum("estimate_point")).order_by(x_axis)
if segment:
queryset = queryset.annotate(segment=F(segment)).values("dimension", "segment", "effort")
else:
queryset = queryset.values("dimension", "effort")
result_values = list(queryset)
grouped_data = {}
for date, items in groupby(result_values, key=lambda x: x[str("dimension")]):
grouped_data[str(date)] = list(items)
return grouped_data

View File

@ -198,6 +198,39 @@ def filter_issue_state_type(params, filter, method):
return filter return filter
def filter_project(params, filter, method):
if method == "GET":
projects = params.get("project").split(",")
if len(projects) and "" not in projects:
filter["project__in"] = projects
else:
if params.get("project", None) and len(params.get("project")):
filter["project__in"] = params.get("project")
return filter
def filter_cycle(params, filter, method):
if method == "GET":
cycles = params.get("cycle").split(",")
if len(cycles) and "" not in cycles:
filter["cycle__in"] = cycles
else:
if params.get("cycle", None) and len(params.get("cycle")):
filter["issue_cycle__cycle_id__in"] = params.get("cycle")
return filter
def filter_module(params, filter, method):
if method == "GET":
modules = params.get("module").split(",")
if len(modules) and "" not in modules:
filter["module__in"] = modules
else:
if params.get("module", None) and len(params.get("module")):
filter["issue_module__module_id__in"] = params.get("module")
return filter
def issue_filters(query_params, method): def issue_filters(query_params, method):
filter = dict() filter = dict()
@ -216,6 +249,9 @@ def issue_filters(query_params, method):
"target_date": filter_target_date, "target_date": filter_target_date,
"completed_at": filter_completed_at, "completed_at": filter_completed_at,
"type": filter_issue_state_type, "type": filter_issue_state_type,
"project": filter_project,
"cycle": filter_cycle,
"module": filter_module,
} }
for key, value in ISSUE_FILTER.items(): for key, value in ISSUE_FILTER.items():

View File

@ -0,0 +1,4 @@
<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Strict//EN" "http://www.w3.org/TR/xhtml1/DTD/xhtml1-strict.dtd">
<html>
Your Export is ready
</html>