diff --git a/.env.example b/.env.example
index 118a94883..727ea0806 100644
--- a/.env.example
+++ b/.env.example
@@ -1,20 +1,41 @@
-# Replace with your instance Public IP
+# Frontend
+# Extra image domains that need to be added for Next Image
NEXT_PUBLIC_EXTRA_IMAGE_DOMAINS=
+# Google Client ID for Google OAuth
NEXT_PUBLIC_GOOGLE_CLIENTID=""
-NEXT_PUBLIC_GITHUB_APP_NAME=""
+# Github ID for Github OAuth
NEXT_PUBLIC_GITHUB_ID=""
+# Github App Name for GitHub Integration
+NEXT_PUBLIC_GITHUB_APP_NAME=""
+# Sentry DSN for error monitoring
NEXT_PUBLIC_SENTRY_DSN=""
+# Enable/Disable OAUTH - default 0 for selfhosted instance
NEXT_PUBLIC_ENABLE_OAUTH=0
+# Enable/Disable sentry
NEXT_PUBLIC_ENABLE_SENTRY=0
+# Enable/Disable session recording
NEXT_PUBLIC_ENABLE_SESSION_RECORDER=0
+# Enable/Disable event tracking
NEXT_PUBLIC_TRACK_EVENTS=0
+# Slack for Slack Integration
NEXT_PUBLIC_SLACK_CLIENT_ID=""
+
+# Backend
+# Email Settings
EMAIL_HOST=""
EMAIL_HOST_USER=""
EMAIL_HOST_PASSWORD=""
+EMAIL_PORT=587
+
+# AWS Settings
AWS_REGION=""
AWS_ACCESS_KEY_ID=""
AWS_SECRET_ACCESS_KEY=""
AWS_S3_BUCKET_NAME=""
+AWS_S3_ENDPOINT_URL=""
+
+# GPT settings
OPENAI_API_KEY=""
-GPT_ENGINE=""
\ No newline at end of file
+GPT_ENGINE=""
+
+# Auto generated and Required
\ No newline at end of file
diff --git a/README.md b/README.md
index 13cc060c1..879be2c56 100644
--- a/README.md
+++ b/README.md
@@ -15,11 +15,18 @@
+
+
## 📚Documentation
@@ -131,7 +159,7 @@ To see how to Contribute, visit [here](https://github.com/makeplane/plane/blob/m
The Plane community can be found on GitHub Discussions, where you can ask questions, voice ideas, and share your projects.
-To chat with other community members you can join the [Plane Discord](https://discord.com/invite/q9HKAdau).
+To chat with other community members you can join the [Plane Discord](https://discord.com/invite/A92xrEGCge).
Our [Code of Conduct](https://github.com/makeplane/plane/blob/master/CODE_OF_CONDUCT.md) applies to all Plane community channels.
diff --git a/apiserver/back_migration.py b/apiserver/back_migration.py
index 0f272755f..c04ee7771 100644
--- a/apiserver/back_migration.py
+++ b/apiserver/back_migration.py
@@ -204,7 +204,21 @@ def update_integration_verified():
Integration.objects.bulk_update(
updated_integrations, ["verified"], batch_size=10
)
- print("Sucess")
+ print("Success")
+ except Exception as e:
+ print(e)
+ print("Failed")
+
+
+def update_start_date():
+ try:
+ issues = Issue.objects.filter(state__group__in=["started", "completed"])
+ updated_issues = []
+ for issue in issues:
+ issue.start_date = issue.created_at.date()
+ updated_issues.append(issue)
+ Issue.objects.bulk_update(updated_issues, ["start_date"], batch_size=500)
+ print("Success")
except Exception as e:
print(e)
print("Failed")
diff --git a/apiserver/plane/api/serializers/__init__.py b/apiserver/plane/api/serializers/__init__.py
index 79014c53d..505a9978d 100644
--- a/apiserver/plane/api/serializers/__init__.py
+++ b/apiserver/plane/api/serializers/__init__.py
@@ -70,3 +70,5 @@ from .importer import ImporterSerializer
from .page import PageSerializer, PageBlockSerializer, PageFavoriteSerializer
from .estimate import EstimateSerializer, EstimatePointSerializer, EstimateReadSerializer
+
+from .analytic import AnalyticViewSerializer
diff --git a/apiserver/plane/api/serializers/analytic.py b/apiserver/plane/api/serializers/analytic.py
new file mode 100644
index 000000000..5f35e1117
--- /dev/null
+++ b/apiserver/plane/api/serializers/analytic.py
@@ -0,0 +1,30 @@
+from .base import BaseSerializer
+from plane.db.models import AnalyticView
+from plane.utils.issue_filters import issue_filters
+
+
+class AnalyticViewSerializer(BaseSerializer):
+ class Meta:
+ model = AnalyticView
+ fields = "__all__"
+ read_only_fields = [
+ "workspace",
+ "query",
+ ]
+
+ def create(self, validated_data):
+ query_params = validated_data.get("query_dict", {})
+ if bool(query_params):
+ validated_data["query"] = issue_filters(query_params, "POST")
+ else:
+ validated_data["query"] = dict()
+ return AnalyticView.objects.create(**validated_data)
+
+ def update(self, instance, validated_data):
+ query_params = validated_data.get("query_data", {})
+ if bool(query_params):
+ validated_data["query"] = issue_filters(query_params, "POST")
+ else:
+ validated_data["query"] = dict()
+ validated_data["query"] = issue_filters(query_params, "PATCH")
+ return super().update(instance, validated_data)
diff --git a/apiserver/plane/api/serializers/cycle.py b/apiserver/plane/api/serializers/cycle.py
index d6d281357..5c1c68fb8 100644
--- a/apiserver/plane/api/serializers/cycle.py
+++ b/apiserver/plane/api/serializers/cycle.py
@@ -19,7 +19,10 @@ class CycleSerializer(BaseSerializer):
started_issues = serializers.IntegerField(read_only=True)
unstarted_issues = serializers.IntegerField(read_only=True)
backlog_issues = serializers.IntegerField(read_only=True)
- assignees = serializers.SerializerMethodField()
+ assignees = serializers.SerializerMethodField(read_only=True)
+ total_estimates = serializers.IntegerField(read_only=True)
+ completed_estimates = serializers.IntegerField(read_only=True)
+ started_estimates = serializers.IntegerField(read_only=True)
workspace_detail = WorkspaceLiteSerializer(read_only=True, source="workspace")
project_detail = ProjectLiteSerializer(read_only=True, source="project")
diff --git a/apiserver/plane/api/serializers/project.py b/apiserver/plane/api/serializers/project.py
index bf06b3fa2..0a8ad1cf8 100644
--- a/apiserver/plane/api/serializers/project.py
+++ b/apiserver/plane/api/serializers/project.py
@@ -82,6 +82,9 @@ class ProjectDetailSerializer(BaseSerializer):
default_assignee = UserLiteSerializer(read_only=True)
project_lead = UserLiteSerializer(read_only=True)
is_favorite = serializers.BooleanField(read_only=True)
+ total_members = serializers.IntegerField(read_only=True)
+ total_cycles = serializers.IntegerField(read_only=True)
+ total_modules = serializers.IntegerField(read_only=True)
class Meta:
model = Project
diff --git a/apiserver/plane/api/urls.py b/apiserver/plane/api/urls.py
index a88744b4a..93af9d762 100644
--- a/apiserver/plane/api/urls.py
+++ b/apiserver/plane/api/urls.py
@@ -148,6 +148,13 @@ from plane.api.views import (
# Release Notes
ReleaseNotesEndpoint,
## End Release Notes
+ # Analytics
+ AnalyticsEndpoint,
+ AnalyticViewViewset,
+ SavedAnalyticEndpoint,
+ ExportAnalyticsEndpoint,
+ DefaultAnalyticsEndpoint,
+ ## End Analytics
)
@@ -308,7 +315,6 @@ urlpatterns = [
"workspaces//members//",
WorkSpaceMemberViewSet.as_view(
{
- "put": "update",
"patch": "partial_update",
"delete": "destroy",
"get": "retrieve",
@@ -418,7 +424,6 @@ urlpatterns = [
ProjectMemberViewSet.as_view(
{
"get": "retrieve",
- "put": "update",
"patch": "partial_update",
"delete": "destroy",
}
@@ -1285,4 +1290,38 @@ urlpatterns = [
name="release-notes",
),
## End Release Notes
+ # Analytics
+ path(
+ "workspaces//analytics/",
+ AnalyticsEndpoint.as_view(),
+ name="plane-analytics",
+ ),
+ path(
+ "workspaces//analytic-view/",
+ AnalyticViewViewset.as_view({"get": "list", "post": "create"}),
+ name="analytic-view",
+ ),
+ path(
+ "workspaces//analytic-view//",
+ AnalyticViewViewset.as_view(
+ {"get": "retrieve", "patch": "partial_update", "delete": "destroy"}
+ ),
+ name="analytic-view",
+ ),
+ path(
+ "workspaces//saved-analytic-view//",
+ SavedAnalyticEndpoint.as_view(),
+ name="saved-analytic-view",
+ ),
+ path(
+ "workspaces//export-analytics/",
+ ExportAnalyticsEndpoint.as_view(),
+ name="export-analytics",
+ ),
+ path(
+ "workspaces//default-analytics/",
+ DefaultAnalyticsEndpoint.as_view(),
+ name="default-analytics",
+ ),
+ ## End Analytics
]
diff --git a/apiserver/plane/api/views/__init__.py b/apiserver/plane/api/views/__init__.py
index 536fd83bf..65554f529 100644
--- a/apiserver/plane/api/views/__init__.py
+++ b/apiserver/plane/api/views/__init__.py
@@ -140,3 +140,11 @@ from .estimate import (
from .release import ReleaseNotesEndpoint
+
+from .analytic import (
+ AnalyticsEndpoint,
+ AnalyticViewViewset,
+ SavedAnalyticEndpoint,
+ ExportAnalyticsEndpoint,
+ DefaultAnalyticsEndpoint,
+)
diff --git a/apiserver/plane/api/views/analytic.py b/apiserver/plane/api/views/analytic.py
new file mode 100644
index 000000000..56ca12bae
--- /dev/null
+++ b/apiserver/plane/api/views/analytic.py
@@ -0,0 +1,295 @@
+# Django imports
+from django.db.models import (
+ Count,
+ Sum,
+ F,
+)
+from django.db.models.functions import ExtractMonth
+
+# Third party imports
+from rest_framework import status
+from rest_framework.response import Response
+from sentry_sdk import capture_exception
+
+# Module imports
+from plane.api.views import BaseAPIView, BaseViewSet
+from plane.api.permissions import WorkSpaceAdminPermission
+from plane.db.models import Issue, AnalyticView, Workspace, State, Label
+from plane.api.serializers import AnalyticViewSerializer
+from plane.utils.analytics_plot import build_graph_plot
+from plane.bgtasks.analytic_plot_export import analytic_export_task
+from plane.utils.issue_filters import issue_filters
+
+
+class AnalyticsEndpoint(BaseAPIView):
+ permission_classes = [
+ WorkSpaceAdminPermission,
+ ]
+
+ def get(self, request, slug):
+ try:
+ x_axis = request.GET.get("x_axis", False)
+ y_axis = request.GET.get("y_axis", False)
+
+ if not x_axis or not y_axis:
+ return Response(
+ {"error": "x-axis and y-axis dimensions are required"},
+ status=status.HTTP_400_BAD_REQUEST,
+ )
+
+ segment = request.GET.get("segment", False)
+ filters = issue_filters(request.GET, "GET")
+
+ queryset = Issue.objects.filter(workspace__slug=slug, **filters)
+
+ total_issues = queryset.count()
+ distribution = build_graph_plot(
+ queryset=queryset, x_axis=x_axis, y_axis=y_axis, segment=segment
+ )
+
+ colors = dict()
+ if x_axis in ["state__name", "state__group"] or segment in [
+ "state__name",
+ "state__group",
+ ]:
+ if x_axis in ["state__name", "state__group"]:
+ key = "name" if x_axis == "state__name" else "group"
+ else:
+ key = "name" if segment == "state__name" else "group"
+
+ colors = (
+ State.objects.filter(
+ workspace__slug=slug, project_id__in=filters.get("project__in")
+ ).values(key, "color")
+ if filters.get("project__in", False)
+ else State.objects.filter(workspace__slug=slug).values(key, "color")
+ )
+
+ if x_axis in ["labels__name"] or segment in ["labels__name"]:
+ colors = (
+ Label.objects.filter(
+ workspace__slug=slug, project_id__in=filters.get("project__in")
+ ).values("name", "color")
+ if filters.get("project__in", False)
+ else Label.objects.filter(workspace__slug=slug).values(
+ "name", "color"
+ )
+ )
+
+ assignee_details = {}
+ if x_axis in ["assignees__email"] or segment in ["assignees__email"]:
+ assignee_details = (
+ Issue.objects.filter(workspace__slug=slug, **filters, assignees__avatar__isnull=False)
+ .order_by("assignees__id")
+ .distinct("assignees__id")
+ .values("assignees__avatar", "assignees__email", "assignees__first_name", "assignees__last_name")
+ )
+
+
+ return Response(
+ {
+ "total": total_issues,
+ "distribution": distribution,
+ "extras": {"colors": colors, "assignee_details": assignee_details},
+ },
+ status=status.HTTP_200_OK,
+ )
+
+ except Exception as e:
+ capture_exception(e)
+ return Response(
+ {"error": "Something went wrong please try again later"},
+ status=status.HTTP_400_BAD_REQUEST,
+ )
+
+
+class AnalyticViewViewset(BaseViewSet):
+ permission_classes = [
+ WorkSpaceAdminPermission,
+ ]
+ model = AnalyticView
+ serializer_class = AnalyticViewSerializer
+
+ def perform_create(self, serializer):
+ workspace = Workspace.objects.get(slug=self.kwargs.get("slug"))
+ serializer.save(workspace_id=workspace.id)
+
+ def get_queryset(self):
+ return self.filter_queryset(
+ super().get_queryset().filter(workspace__slug=self.kwargs.get("slug"))
+ )
+
+
+class SavedAnalyticEndpoint(BaseAPIView):
+ permission_classes = [
+ WorkSpaceAdminPermission,
+ ]
+
+ def get(self, request, slug, analytic_id):
+ try:
+ analytic_view = AnalyticView.objects.get(
+ pk=analytic_id, workspace__slug=slug
+ )
+
+ filter = analytic_view.query
+ queryset = Issue.objects.filter(**filter)
+
+ x_axis = analytic_view.query_dict.get("x_axis", False)
+ y_axis = analytic_view.query_dict.get("y_axis", False)
+
+ if not x_axis or not y_axis:
+ return Response(
+ {"error": "x-axis and y-axis dimensions are required"},
+ status=status.HTTP_400_BAD_REQUEST,
+ )
+
+ segment = request.GET.get("segment", False)
+ distribution = build_graph_plot(
+ queryset=queryset, x_axis=x_axis, y_axis=y_axis, segment=segment
+ )
+ total_issues = queryset.count()
+ return Response(
+ {"total": total_issues, "distribution": distribution},
+ status=status.HTTP_200_OK,
+ )
+
+ except AnalyticView.DoesNotExist:
+ return Response(
+ {"error": "Analytic View Does not exist"},
+ status=status.HTTP_400_BAD_REQUEST,
+ )
+ except Exception as e:
+ capture_exception(e)
+ return Response(
+ {"error": "Something went wrong please try again later"},
+ status=status.HTTP_400_BAD_REQUEST,
+ )
+
+
+class ExportAnalyticsEndpoint(BaseAPIView):
+ permission_classes = [
+ WorkSpaceAdminPermission,
+ ]
+
+ def post(self, request, slug):
+ try:
+ x_axis = request.data.get("x_axis", False)
+ y_axis = request.data.get("y_axis", False)
+
+ if not x_axis or not y_axis:
+ return Response(
+ {"error": "x-axis and y-axis dimensions are required"},
+ status=status.HTTP_400_BAD_REQUEST,
+ )
+
+ analytic_export_task.delay(
+ email=request.user.email, data=request.data, slug=slug
+ )
+
+ return Response(
+ {
+ "message": f"Once the export is ready it will be emailed to you at {str(request.user.email)}"
+ },
+ status=status.HTTP_200_OK,
+ )
+ except Exception as e:
+ capture_exception(e)
+ return Response(
+ {"error": "Something went wrong please try again later"},
+ status=status.HTTP_400_BAD_REQUEST,
+ )
+
+
+class DefaultAnalyticsEndpoint(BaseAPIView):
+ permission_classes = [
+ WorkSpaceAdminPermission,
+ ]
+
+ def get(self, request, slug):
+ try:
+ filters = issue_filters(request.GET, "GET")
+
+ queryset = Issue.objects.filter(workspace__slug=slug, **filters)
+
+ total_issues = queryset.count()
+
+ total_issues_classified = (
+ queryset.annotate(state_group=F("state__group"))
+ .values("state_group")
+ .annotate(state_count=Count("state_group"))
+ .order_by("state_group")
+ )
+
+ open_issues = queryset.filter(
+ state__group__in=["backlog", "unstarted", "started"]
+ ).count()
+
+ open_issues_classified = (
+ queryset.filter(state__group__in=["backlog", "unstarted", "started"])
+ .annotate(state_group=F("state__group"))
+ .values("state_group")
+ .annotate(state_count=Count("state_group"))
+ .order_by("state_group")
+ )
+
+ issue_completed_month_wise = (
+ queryset.filter(completed_at__isnull=False)
+ .annotate(month=ExtractMonth("completed_at"))
+ .values("month")
+ .annotate(count=Count("*"))
+ .order_by("month")
+ )
+ most_issue_created_user = (
+ queryset.exclude(created_by=None)
+ .values("created_by__first_name", "created_by__last_name", "created_by__avatar", "created_by__email")
+ .annotate(count=Count("id"))
+ .order_by("-count")
+ )[:5]
+
+ most_issue_closed_user = (
+ queryset.filter(completed_at__isnull=False, assignees__isnull=False)
+ .values("assignees__first_name", "assignees__last_name", "assignees__avatar", "assignees__email")
+ .annotate(count=Count("id"))
+ .order_by("-count")
+ )[:5]
+
+ pending_issue_user = (
+ queryset.filter(completed_at__isnull=True)
+ .values("assignees__first_name", "assignees__last_name", "assignees__avatar", "assignees__email")
+ .annotate(count=Count("id"))
+ .order_by("-count")
+ )
+
+ open_estimate_sum = (
+ queryset.filter(
+ state__group__in=["backlog", "unstarted", "started"]
+ ).aggregate(open_estimate_sum=Sum("estimate_point"))
+ )["open_estimate_sum"]
+ print(open_estimate_sum)
+
+ total_estimate_sum = queryset.aggregate(
+ total_estimate_sum=Sum("estimate_point")
+ )["total_estimate_sum"]
+
+ return Response(
+ {
+ "total_issues": total_issues,
+ "total_issues_classified": total_issues_classified,
+ "open_issues": open_issues,
+ "open_issues_classified": open_issues_classified,
+ "issue_completed_month_wise": issue_completed_month_wise,
+ "most_issue_created_user": most_issue_created_user,
+ "most_issue_closed_user": most_issue_closed_user,
+ "pending_issue_user": pending_issue_user,
+ "open_estimate_sum": open_estimate_sum,
+ "total_estimate_sum": total_estimate_sum,
+ },
+ status=status.HTTP_200_OK,
+ )
+
+ except Exception as e:
+ capture_exception(e)
+ return Response(
+ {"error": "Something went wrong please try again later"},
+ status=status.HTTP_400_BAD_REQUEST,
+ )
diff --git a/apiserver/plane/api/views/asset.py b/apiserver/plane/api/views/asset.py
index 98c9f9caf..705735e51 100644
--- a/apiserver/plane/api/views/asset.py
+++ b/apiserver/plane/api/views/asset.py
@@ -3,7 +3,7 @@ from rest_framework import status
from rest_framework.response import Response
from rest_framework.parsers import MultiPartParser, FormParser
from sentry_sdk import capture_exception
-
+from django.conf import settings
# Module imports
from .base import BaseAPIView
from plane.db.models import FileAsset
@@ -34,7 +34,10 @@ class FileAssetEndpoint(BaseAPIView):
)
serializer.save(workspace_id=request.user.last_workspace_id)
- return Response(serializer.data, status=status.HTTP_201_CREATED)
+ response_data = serializer.data
+ if settings.DOCKERIZED and "minio:9000" in response_data["asset"]:
+ response_data["asset"] = response_data["asset"].replace("minio:9000", settings.WEB_URL)
+ return Response(response_data, status=status.HTTP_201_CREATED)
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
except Exception as e:
capture_exception(e)
@@ -82,7 +85,10 @@ class UserAssetsEndpoint(BaseAPIView):
serializer = FileAssetSerializer(data=request.data)
if serializer.is_valid():
serializer.save()
- return Response(serializer.data, status=status.HTTP_201_CREATED)
+ response_data = serializer.data
+ if settings.DOCKERIZED and "minio:9000" in response_data["asset"]:
+ response_data["asset"] = response_data["asset"].replace("minio:9000", settings.WEB_URL)
+ return Response(response_data, status=status.HTTP_201_CREATED)
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
except Exception as e:
capture_exception(e)
diff --git a/apiserver/plane/api/views/cycle.py b/apiserver/plane/api/views/cycle.py
index f61a93487..b12b49b2f 100644
--- a/apiserver/plane/api/views/cycle.py
+++ b/apiserver/plane/api/views/cycle.py
@@ -3,7 +3,17 @@ import json
# Django imports
from django.db import IntegrityError
-from django.db.models import OuterRef, Func, F, Q, Exists, OuterRef, Count, Prefetch
+from django.db.models import (
+ OuterRef,
+ Func,
+ F,
+ Q,
+ Exists,
+ OuterRef,
+ Count,
+ Prefetch,
+ Sum,
+)
from django.core import serializers
from django.utils import timezone
from django.utils.decorators import method_decorator
@@ -24,13 +34,13 @@ from plane.api.serializers import (
)
from plane.api.permissions import ProjectEntityPermission
from plane.db.models import (
+ User,
Cycle,
CycleIssue,
Issue,
CycleFavorite,
IssueLink,
IssueAttachment,
- User,
)
from plane.bgtasks.issue_activites_task import issue_activity
from plane.utils.grouper import group_results
@@ -119,6 +129,25 @@ class CycleViewSet(BaseViewSet):
filter=Q(issue_cycle__issue__state__group="backlog"),
)
)
+ .annotate(total_estimates=Sum("issue_cycle__issue__estimate_point"))
+ .annotate(
+ completed_estimates=Sum(
+ "issue_cycle__issue__estimate_point",
+ filter=Q(issue_cycle__issue__state__group="completed"),
+ )
+ )
+ .annotate(
+ started_estimates=Sum(
+ "issue_cycle__issue__estimate_point",
+ filter=Q(issue_cycle__issue__state__group="started"),
+ )
+ )
+ .prefetch_related(
+ Prefetch(
+ "issue_cycle__issue__assignees",
+ queryset=User.objects.only("avatar", "first_name", "id").distinct(),
+ )
+ )
.order_by("-is_favorite", "name")
.distinct()
)
@@ -414,26 +443,22 @@ class CycleDateCheckEndpoint(BaseAPIView):
try:
start_date = request.data.get("start_date", False)
end_date = request.data.get("end_date", False)
- cycle_id = request.data.get("cycle_id", False)
-
+ cycle_id = request.data.get("cycle_id")
if not start_date or not end_date:
return Response(
- {"error": "Start date and end date are required"},
+ {"error": "Start date and end date both are required"},
status=status.HTTP_400_BAD_REQUEST,
)
cycles = Cycle.objects.filter(
- Q(start_date__lte=start_date, end_date__gte=start_date)
- | Q(start_date__lte=end_date, end_date__gte=end_date)
- | Q(start_date__gte=start_date, end_date__lte=end_date),
- workspace__slug=slug,
- project_id=project_id,
- )
-
- if cycle_id:
- cycles = cycles.filter(
- ~Q(pk=cycle_id),
+ Q(workspace__slug=slug)
+ & Q(project_id=project_id)
+ & (
+ Q(start_date__lte=start_date, end_date__gte=start_date)
+ | Q(start_date__lte=end_date, end_date__gte=end_date)
+ | Q(start_date__gte=start_date, end_date__lte=end_date)
)
+ ).exclude(pk=cycle_id)
if cycles.exists():
return Response(
@@ -508,10 +533,25 @@ class CurrentUpcomingCyclesEndpoint(BaseAPIView):
filter=Q(issue_cycle__issue__state__group="backlog"),
)
)
+ .annotate(total_estimates=Sum("issue_cycle__issue__estimate_point"))
+ .annotate(
+ completed_estimates=Sum(
+ "issue_cycle__issue__estimate_point",
+ filter=Q(issue_cycle__issue__state__group="completed"),
+ )
+ )
+ .annotate(
+ started_estimates=Sum(
+ "issue_cycle__issue__estimate_point",
+ filter=Q(issue_cycle__issue__state__group="started"),
+ )
+ )
.prefetch_related(
Prefetch(
"issue_cycle__issue__assignees",
- queryset=User.objects.only("avatar", "first_name", "id").distinct(),
+ queryset=User.objects.only(
+ "avatar", "first_name", "id"
+ ).distinct(),
)
)
.order_by("name", "-is_favorite")
@@ -558,10 +598,25 @@ class CurrentUpcomingCyclesEndpoint(BaseAPIView):
filter=Q(issue_cycle__issue__state__group="backlog"),
)
)
+ .annotate(total_estimates=Sum("issue_cycle__issue__estimate_point"))
+ .annotate(
+ completed_estimates=Sum(
+ "issue_cycle__issue__estimate_point",
+ filter=Q(issue_cycle__issue__state__group="completed"),
+ )
+ )
+ .annotate(
+ started_estimates=Sum(
+ "issue_cycle__issue__estimate_point",
+ filter=Q(issue_cycle__issue__state__group="started"),
+ )
+ )
.prefetch_related(
Prefetch(
"issue_cycle__issue__assignees",
- queryset=User.objects.only("avatar", "first_name", "id").distinct(),
+ queryset=User.objects.only(
+ "avatar", "first_name", "id"
+ ).distinct(),
)
)
.order_by("name", "-is_favorite")
@@ -576,7 +631,7 @@ class CurrentUpcomingCyclesEndpoint(BaseAPIView):
)
except Exception as e:
- print(e)
+ capture_exception(e)
return Response(
{"error": "Something went wrong please try again later"},
status=status.HTTP_400_BAD_REQUEST,
@@ -637,10 +692,25 @@ class CompletedCyclesEndpoint(BaseAPIView):
filter=Q(issue_cycle__issue__state__group="backlog"),
)
)
+ .annotate(total_estimates=Sum("issue_cycle__issue__estimate_point"))
+ .annotate(
+ completed_estimates=Sum(
+ "issue_cycle__issue__estimate_point",
+ filter=Q(issue_cycle__issue__state__group="completed"),
+ )
+ )
+ .annotate(
+ started_estimates=Sum(
+ "issue_cycle__issue__estimate_point",
+ filter=Q(issue_cycle__issue__state__group="started"),
+ )
+ )
.prefetch_related(
Prefetch(
"issue_cycle__issue__assignees",
- queryset=User.objects.only("avatar", "first_name", "id").distinct(),
+ queryset=User.objects.only(
+ "avatar", "first_name", "id"
+ ).distinct(),
)
)
.order_by("name", "-is_favorite")
@@ -718,10 +788,25 @@ class DraftCyclesEndpoint(BaseAPIView):
filter=Q(issue_cycle__issue__state__group="backlog"),
)
)
+ .annotate(total_estimates=Sum("issue_cycle__issue__estimate_point"))
+ .annotate(
+ completed_estimates=Sum(
+ "issue_cycle__issue__estimate_point",
+ filter=Q(issue_cycle__issue__state__group="completed"),
+ )
+ )
+ .annotate(
+ started_estimates=Sum(
+ "issue_cycle__issue__estimate_point",
+ filter=Q(issue_cycle__issue__state__group="started"),
+ )
+ )
.prefetch_related(
Prefetch(
"issue_cycle__issue__assignees",
- queryset=User.objects.only("avatar", "first_name", "id").distinct(),
+ queryset=User.objects.only(
+ "avatar", "first_name", "id"
+ ).distinct(),
)
)
.order_by("name", "-is_favorite")
diff --git a/apiserver/plane/api/views/importer.py b/apiserver/plane/api/views/importer.py
index b9a7fe0c5..2e0f1cec0 100644
--- a/apiserver/plane/api/views/importer.py
+++ b/apiserver/plane/api/views/importer.py
@@ -363,6 +363,7 @@ class BulkImportIssuesEndpoint(BaseAPIView):
start_date=issue_data.get("start_date", None),
target_date=issue_data.get("target_date", None),
priority=issue_data.get("priority", None),
+ created_by=request.user,
)
)
@@ -400,7 +401,6 @@ class BulkImportIssuesEndpoint(BaseAPIView):
project_id=project_id,
workspace_id=project.workspace_id,
created_by=request.user,
- updated_by=request.user,
)
for label_id in labels_list
]
@@ -420,7 +420,6 @@ class BulkImportIssuesEndpoint(BaseAPIView):
project_id=project_id,
workspace_id=project.workspace_id,
created_by=request.user,
- updated_by=request.user,
)
for assignee_id in assignees_list
]
@@ -439,6 +438,7 @@ class BulkImportIssuesEndpoint(BaseAPIView):
workspace_id=project.workspace_id,
comment=f"{request.user.email} importer the issue from {service}",
verb="created",
+ created_by=request.user,
)
for issue in issues
],
@@ -457,7 +457,6 @@ class BulkImportIssuesEndpoint(BaseAPIView):
project_id=project_id,
workspace_id=project.workspace_id,
created_by=request.user,
- updated_by=request.user,
)
for comment in comments_list
]
@@ -474,7 +473,6 @@ class BulkImportIssuesEndpoint(BaseAPIView):
project_id=project_id,
workspace_id=project.workspace_id,
created_by=request.user,
- updated_by=request.user,
)
for issue, issue_data in zip(issues, issues_data)
]
@@ -512,7 +510,6 @@ class BulkImportModulesEndpoint(BaseAPIView):
project_id=project_id,
workspace_id=project.workspace_id,
created_by=request.user,
- updated_by=request.user,
)
for module in modules_data
],
@@ -536,7 +533,6 @@ class BulkImportModulesEndpoint(BaseAPIView):
project_id=project_id,
workspace_id=project.workspace_id,
created_by=request.user,
- updated_by=request.user,
)
for module, module_data in zip(modules, modules_data)
],
@@ -554,7 +550,6 @@ class BulkImportModulesEndpoint(BaseAPIView):
project_id=project_id,
workspace_id=project.workspace_id,
created_by=request.user,
- updated_by=request.user,
)
for issue in module_issues_list
]
diff --git a/apiserver/plane/api/views/issue.py b/apiserver/plane/api/views/issue.py
index 987677bb2..4f519ce69 100644
--- a/apiserver/plane/api/views/issue.py
+++ b/apiserver/plane/api/views/issue.py
@@ -9,7 +9,7 @@ from django.core.serializers.json import DjangoJSONEncoder
from django.utils.decorators import method_decorator
from django.views.decorators.gzip import gzip_page
from django.db.models.functions import Coalesce
-
+from django.conf import settings
# Third Party imports
from rest_framework.response import Response
from rest_framework import status
@@ -788,6 +788,9 @@ class IssueAttachmentEndpoint(BaseAPIView):
serializer = IssueAttachmentSerializer(data=request.data)
if serializer.is_valid():
serializer.save(project_id=project_id, issue_id=issue_id)
+ response_data = serializer.data
+ if settings.DOCKERIZED and "minio:9000" in response_data["asset"]:
+ response_data["asset"] = response_data["asset"].replace("minio:9000", settings.WEB_URL)
issue_activity.delay(
type="attachment.activity.created",
requested_data=None,
@@ -799,7 +802,7 @@ class IssueAttachmentEndpoint(BaseAPIView):
cls=DjangoJSONEncoder,
),
)
- return Response(serializer.data, status=status.HTTP_201_CREATED)
+ return Response(response_data, status=status.HTTP_201_CREATED)
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
except Exception as e:
capture_exception(e)
diff --git a/apiserver/plane/api/views/project.py b/apiserver/plane/api/views/project.py
index 869bd15c9..f6c4ed87d 100644
--- a/apiserver/plane/api/views/project.py
+++ b/apiserver/plane/api/views/project.py
@@ -5,7 +5,7 @@ from datetime import datetime
# Django imports
from django.core.exceptions import ValidationError
from django.db import IntegrityError
-from django.db.models import Q, Exists, OuterRef
+from django.db.models import Q, Exists, OuterRef, Func, F
from django.core.validators import validate_email
from django.conf import settings
@@ -46,6 +46,8 @@ from plane.db.models import (
ProjectMemberInvite,
User,
ProjectIdentifier,
+ Cycle,
+ Module,
)
from plane.bgtasks.project_invitation_task import project_invitation
@@ -92,6 +94,26 @@ class ProjectViewSet(BaseViewSet):
self.get_queryset()
.annotate(is_favorite=Exists(subquery))
.order_by("-is_favorite", "name")
+ .annotate(
+ total_members=ProjectMember.objects.filter(
+ project_id=OuterRef("id")
+ )
+ .order_by()
+ .annotate(count=Func(F("id"), function="Count"))
+ .values("count")
+ )
+ .annotate(
+ total_cycles=Cycle.objects.filter(project_id=OuterRef("id"))
+ .order_by()
+ .annotate(count=Func(F("id"), function="Count"))
+ .values("count")
+ )
+ .annotate(
+ total_modules=Module.objects.filter(project_id=OuterRef("id"))
+ .order_by()
+ .annotate(count=Func(F("id"), function="Count"))
+ .values("count")
+ )
)
return Response(ProjectDetailSerializer(projects, many=True).data)
except Exception as e:
@@ -161,6 +183,7 @@ class ProjectViewSet(BaseViewSet):
workspace=serializer.instance.workspace,
group=state["group"],
default=state.get("default", False),
+ created_by=request.user,
)
for state in states
]
@@ -344,6 +367,7 @@ class UserProjectInvitationsViewset(BaseViewSet):
workspace=invitation.project.workspace,
member=request.user,
role=invitation.role,
+ created_by=request.user,
)
for invitation in project_invitations
]
@@ -385,6 +409,41 @@ class ProjectMemberViewSet(BaseViewSet):
.select_related("workspace", "workspace__owner")
)
+ def partial_update(self, request, slug, project_id, pk):
+ try:
+ project_member = ProjectMember.objects.get(pk=pk, workspace__slug=slug, project_id=project_id)
+ if request.user.id == project_member.member_id:
+ return Response(
+ {"error": "You cannot update your own role"},
+ status=status.HTTP_400_BAD_REQUEST,
+ )
+
+ if request.data.get("role", 10) > project_member.role:
+ return Response(
+ {
+ "error": "You cannot update a role that is higher than your own role"
+ },
+ status=status.HTTP_400_BAD_REQUEST,
+ )
+
+ serializer = ProjectMemberSerializer(
+ project_member, data=request.data, partial=True
+ )
+
+ if serializer.is_valid():
+ serializer.save()
+ return Response(serializer.data, status=status.HTTP_200_OK)
+ return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
+ except ProjectMember.DoesNotExist:
+ return Response(
+ {"error": "Project Member does not exist"},
+ status=status.HTTP_400_BAD_REQUEST,
+ )
+ except Exception as e:
+ capture_exception(e)
+ return Response({"error": "Something went wrong please try again later"}, status=status.HTTP_400_BAD_REQUEST)
+
+
class AddMemberToProjectEndpoint(BaseAPIView):
permission_classes = [
@@ -465,6 +524,7 @@ class AddTeamToProjectEndpoint(BaseAPIView):
project_id=project_id,
member_id=member,
workspace=workspace,
+ created_by=request.user,
)
)
@@ -612,6 +672,7 @@ class ProjectJoinEndpoint(BaseAPIView):
if workspace_role >= 15
else (15 if workspace_role == 10 else workspace_role),
workspace=workspace,
+ created_by=request.user,
)
for project_id in project_ids
],
diff --git a/apiserver/plane/api/views/view.py b/apiserver/plane/api/views/view.py
index b4e300dcb..1b6fb42cc 100644
--- a/apiserver/plane/api/views/view.py
+++ b/apiserver/plane/api/views/view.py
@@ -18,10 +18,6 @@ from plane.api.permissions import ProjectEntityPermission
from plane.db.models import (
IssueView,
Issue,
- IssueBlocker,
- IssueLink,
- CycleIssue,
- ModuleIssue,
IssueViewFavorite,
)
from plane.utils.issue_filters import issue_filters
diff --git a/apiserver/plane/api/views/workspace.py b/apiserver/plane/api/views/workspace.py
index 8a2791e3b..dcb8941a1 100644
--- a/apiserver/plane/api/views/workspace.py
+++ b/apiserver/plane/api/views/workspace.py
@@ -223,6 +223,7 @@ class InviteWorkspaceEndpoint(BaseAPIView):
algorithm="HS256",
),
role=email.get("role", 10),
+ created_by=request.user,
)
)
except ValidationError:
@@ -381,6 +382,7 @@ class UserWorkspaceInvitationsEndpoint(BaseViewSet):
workspace=invitation.workspace,
member=request.user,
role=invitation.role,
+ created_by=request.user,
)
for invitation in workspace_invitations
],
@@ -421,6 +423,43 @@ class WorkSpaceMemberViewSet(BaseViewSet):
.select_related("member")
)
+ def partial_update(self, request, slug, pk):
+ try:
+ workspace_member = WorkspaceMember.objects.get(pk=pk, workspace__slug=slug)
+ if request.user.id == workspace_member.member_id:
+ return Response(
+ {"error": "You cannot update your own role"},
+ status=status.HTTP_400_BAD_REQUEST,
+ )
+
+ if request.data.get("role", 10) > workspace_member.role:
+ return Response(
+ {
+ "error": "You cannot update a role that is higher than your own role"
+ },
+ status=status.HTTP_400_BAD_REQUEST,
+ )
+
+ serializer = WorkSpaceMemberSerializer(
+ workspace_member, data=request.data, partial=True
+ )
+
+ if serializer.is_valid():
+ serializer.save()
+ return Response(serializer.data, status=status.HTTP_200_OK)
+ return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
+ except WorkspaceMember.DoesNotExist:
+ return Response(
+ {"error": "Workspace Member does not exist"},
+ status=status.HTTP_400_BAD_REQUEST,
+ )
+ except Exception as e:
+ capture_exception(e)
+ return Response(
+ {"error": "Something went wrong please try again later"},
+ status=status.HTTP_400_BAD_REQUEST,
+ )
+
class TeamMemberViewSet(BaseViewSet):
serializer_class = TeamSerializer
@@ -783,4 +822,3 @@ class WorkspaceThemeViewSet(BaseViewSet):
{"error": "Something went wrong please try again later"},
status=status.HTTP_400_BAD_REQUEST,
)
-
diff --git a/apiserver/plane/bgtasks/analytic_plot_export.py b/apiserver/plane/bgtasks/analytic_plot_export.py
new file mode 100644
index 000000000..7f276be82
--- /dev/null
+++ b/apiserver/plane/bgtasks/analytic_plot_export.py
@@ -0,0 +1,174 @@
+# Python imports
+import csv
+import io
+
+# Django imports
+from django.core.mail import EmailMultiAlternatives
+from django.template.loader import render_to_string
+from django.utils.html import strip_tags
+from django.conf import settings
+
+# Third party imports
+from celery import shared_task
+from sentry_sdk import capture_exception
+
+# Module imports
+from plane.db.models import Issue
+from plane.utils.analytics_plot import build_graph_plot
+from plane.utils.issue_filters import issue_filters
+
+row_mapping = {
+ "state__name": "State",
+ "state__group": "State Group",
+ "labels__name": "Label",
+ "assignees__email": "Assignee Name",
+ "start_date": "Start Date",
+ "target_date": "Due Date",
+ "completed_at": "Completed At",
+ "created_at": "Created At",
+ "issue_count": "Issue Count",
+ "priority": "Priority",
+ "estimate": "Estimate",
+}
+
+
+@shared_task
+def analytic_export_task(email, data, slug):
+ try:
+ filters = issue_filters(data, "POST")
+ queryset = Issue.objects.filter(**filters, workspace__slug=slug)
+
+ x_axis = data.get("x_axis", False)
+ y_axis = data.get("y_axis", False)
+ segment = data.get("segment", False)
+
+ distribution = build_graph_plot(
+ queryset=queryset, x_axis=x_axis, y_axis=y_axis, segment=segment
+ )
+
+ key = "count" if y_axis == "issue_count" else "estimate"
+
+ segmented = segment
+
+ assignee_details = {}
+ if x_axis in ["assignees__email"] or segment in ["assignees__email"]:
+ assignee_details = (
+ Issue.objects.filter(workspace__slug=slug, **filters, assignees__avatar__isnull=False)
+ .order_by("assignees__id")
+ .distinct("assignees__id")
+ .values("assignees__avatar", "assignees__email", "assignees__first_name", "assignees__last_name")
+ )
+
+ if segment:
+ segment_zero = []
+ for item in distribution:
+ current_dict = distribution.get(item)
+ for current in current_dict:
+ segment_zero.append(current.get("segment"))
+
+ segment_zero = list(set(segment_zero))
+ row_zero = (
+ [
+ row_mapping.get(x_axis, "X-Axis"),
+ ]
+ + [
+ row_mapping.get(y_axis, "Y-Axis"),
+ ]
+ + segment_zero
+ )
+ rows = []
+ for item in distribution:
+ generated_row = [
+ item,
+ ]
+
+ data = distribution.get(item)
+ # Add y axis values
+ generated_row.append(sum(obj.get(key) for obj in data if obj.get(key, None) is not None))
+
+ for segment in segment_zero:
+ value = [x for x in data if x.get("segment") == segment]
+ if len(value):
+ generated_row.append(value[0].get(key))
+ else:
+ generated_row.append("0")
+ # x-axis replacement for names
+ if x_axis in ["assignees__email"]:
+ assignee = [user for user in assignee_details if str(user.get("assignees__email")) == str(item)]
+ if len(assignee):
+ generated_row[0] = str(assignee[0].get("assignees__first_name")) + " " + str(assignee[0].get("assignees__last_name"))
+ rows.append(tuple(generated_row))
+
+ # If segment is ["assignees__email"] then replace segment_zero rows with first and last names
+ if segmented in ["assignees__email"]:
+ for index, segm in enumerate(row_zero[2:]):
+ # find the name of the user
+ assignee = [user for user in assignee_details if str(user.get("assignees__email")) == str(segm)]
+ if len(assignee):
+ row_zero[index] = str(assignee[0].get("assignees__first_name")) + " " + str(assignee[0].get("assignees__last_name"))
+
+ rows = [tuple(row_zero)] + rows
+ csv_buffer = io.StringIO()
+ writer = csv.writer(csv_buffer, delimiter=",", quoting=csv.QUOTE_ALL)
+
+ # Write CSV data to the buffer
+ for row in rows:
+ writer.writerow(row)
+
+ subject = "Your Export is ready"
+
+ html_content = render_to_string("emails/exports/analytics.html", {})
+
+ text_content = strip_tags(html_content)
+ csv_buffer.seek(0)
+ msg = EmailMultiAlternatives(
+ subject, text_content, settings.EMAIL_FROM, [email]
+ )
+ msg.attach(f"{slug}-analytics.csv", csv_buffer.read())
+ msg.send(fail_silently=False)
+
+ else:
+ row_zero = [
+ row_mapping.get(x_axis, "X-Axis"),
+ row_mapping.get(y_axis, "Y-Axis"),
+ ]
+ rows = []
+ for item in distribution:
+ row = [
+ item,
+ distribution.get(item)[0].get("count")
+ if y_axis == "issue_count"
+ else distribution.get(item)[0].get("estimate "),
+ ]
+ # x-axis replacement to names
+ if x_axis in ["assignees__email"]:
+ assignee = [user for user in assignee_details if str(user.get("assignees__email")) == str(item)]
+ if len(assignee):
+ row[0] = str(assignee[0].get("assignees__first_name")) + " " + str(assignee[0].get("assignees__last_name"))
+
+ rows.append(tuple(row))
+ rows = [tuple(row_zero)] + rows
+ csv_buffer = io.StringIO()
+ writer = csv.writer(csv_buffer, delimiter=",", quoting=csv.QUOTE_ALL)
+
+ # Write CSV data to the buffer
+ for row in rows:
+ writer.writerow(row)
+
+ subject = "Your Export is ready"
+
+ html_content = render_to_string("emails/exports/analytics.html", {})
+
+ text_content = strip_tags(html_content)
+
+ csv_buffer.seek(0)
+ msg = EmailMultiAlternatives(
+ subject, text_content, settings.EMAIL_FROM, [email]
+ )
+ msg.attach(f"{slug}-analytics.csv", csv_buffer.read())
+ msg.send(fail_silently=False)
+
+ except Exception as e:
+ print(e)
+ capture_exception(e)
+ return
diff --git a/apiserver/plane/bgtasks/importer_task.py b/apiserver/plane/bgtasks/importer_task.py
index 291b71be3..85ac1c89b 100644
--- a/apiserver/plane/bgtasks/importer_task.py
+++ b/apiserver/plane/bgtasks/importer_task.py
@@ -27,7 +27,7 @@ from plane.db.models import (
User,
)
from .workspace_invitation_task import workspace_invitation
-from plane.bgtasks.user_welcome_task import send_welcome_email
+from plane.bgtasks.user_welcome_task import send_welcome_slack
@shared_task
@@ -58,7 +58,7 @@ def service_importer(service, importer_id):
)
[
- send_welcome_email.delay(
+ send_welcome_slack.delay(
str(user.id),
True,
f"{user.email} was imported to Plane from {service}",
@@ -78,7 +78,11 @@ def service_importer(service, importer_id):
# Add new users to Workspace and project automatically
WorkspaceMember.objects.bulk_create(
[
- WorkspaceMember(member=user, workspace_id=importer.workspace_id)
+ WorkspaceMember(
+ member=user,
+ workspace_id=importer.workspace_id,
+ created_by=importer.created_by,
+ )
for user in workspace_users
],
batch_size=100,
@@ -91,6 +95,7 @@ def service_importer(service, importer_id):
project_id=importer.project_id,
workspace_id=importer.workspace_id,
member=user,
+ created_by=importer.created_by,
)
for user in workspace_users
],
diff --git a/apiserver/plane/bgtasks/issue_activites_task.py b/apiserver/plane/bgtasks/issue_activites_task.py
index c749d9c15..417fe2324 100644
--- a/apiserver/plane/bgtasks/issue_activites_task.py
+++ b/apiserver/plane/bgtasks/issue_activites_task.py
@@ -136,7 +136,6 @@ def track_priority(
comment=f"{actor.email} updated the priority to {requested_data.get('priority')}",
)
)
- print(issue_activities)
# Track chnages in state of the issue
diff --git a/apiserver/plane/bgtasks/user_welcome_task.py b/apiserver/plane/bgtasks/user_welcome_task.py
index c042d0a0b..bea2ee33d 100644
--- a/apiserver/plane/bgtasks/user_welcome_task.py
+++ b/apiserver/plane/bgtasks/user_welcome_task.py
@@ -1,8 +1,5 @@
# Django imports
from django.conf import settings
-from django.core.mail import EmailMultiAlternatives
-from django.template.loader import render_to_string
-from django.utils.html import strip_tags
# Third party imports
from celery import shared_task
@@ -15,31 +12,11 @@ from plane.db.models import User
@shared_task
-def send_welcome_email(user_id, created, message):
+def send_welcome_slack(user_id, created, message):
try:
instance = User.objects.get(pk=user_id)
if created and not instance.is_bot:
- first_name = instance.first_name.capitalize()
- to_email = instance.email
- from_email_string = settings.EMAIL_FROM
-
- subject = f"Welcome to Plane ✈️!"
-
- context = {"first_name": first_name, "email": instance.email}
-
- html_content = render_to_string(
- "emails/auth/user_welcome_email.html", context
- )
-
- text_content = strip_tags(html_content)
-
- msg = EmailMultiAlternatives(
- subject, text_content, from_email_string, [to_email]
- )
- msg.attach_alternative(html_content, "text/html")
- msg.send()
-
# Send message on slack as well
if settings.SLACK_BOT_TOKEN:
client = WebClient(token=settings.SLACK_BOT_TOKEN)
diff --git a/apiserver/plane/db/migrations/0031_analyticview.py b/apiserver/plane/db/migrations/0031_analyticview.py
new file mode 100644
index 000000000..7e02b78b2
--- /dev/null
+++ b/apiserver/plane/db/migrations/0031_analyticview.py
@@ -0,0 +1,37 @@
+# Generated by Django 3.2.18 on 2023-05-12 11:31
+
+from django.conf import settings
+from django.db import migrations, models
+import django.db.models.deletion
+import uuid
+
+
+class Migration(migrations.Migration):
+
+ dependencies = [
+ ('db', '0030_alter_estimatepoint_unique_together'),
+ ]
+
+ operations = [
+ migrations.CreateModel(
+ name='AnalyticView',
+ fields=[
+ ('created_at', models.DateTimeField(auto_now_add=True, verbose_name='Created At')),
+ ('updated_at', models.DateTimeField(auto_now=True, verbose_name='Last Modified At')),
+ ('id', models.UUIDField(db_index=True, default=uuid.uuid4, editable=False, primary_key=True, serialize=False, unique=True)),
+ ('name', models.CharField(max_length=255)),
+ ('description', models.TextField(blank=True)),
+ ('query', models.JSONField()),
+ ('query_dict', models.JSONField(default=dict)),
+ ('created_by', models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='analyticview_created_by', to=settings.AUTH_USER_MODEL, verbose_name='Created By')),
+ ('updated_by', models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='analyticview_updated_by', to=settings.AUTH_USER_MODEL, verbose_name='Last Modified By')),
+ ('workspace', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='analytics', to='db.workspace')),
+ ],
+ options={
+ 'verbose_name': 'Analytic',
+ 'verbose_name_plural': 'Analytics',
+ 'db_table': 'analytic_views',
+ 'ordering': ('-created_at',),
+ },
+ ),
+ ]
diff --git a/apiserver/plane/db/migrations/0032_auto_20230520_2015.py b/apiserver/plane/db/migrations/0032_auto_20230520_2015.py
new file mode 100644
index 000000000..27c13537e
--- /dev/null
+++ b/apiserver/plane/db/migrations/0032_auto_20230520_2015.py
@@ -0,0 +1,23 @@
+# Generated by Django 3.2.19 on 2023-05-20 14:45
+
+from django.db import migrations, models
+
+
+class Migration(migrations.Migration):
+
+ dependencies = [
+ ('db', '0031_analyticview'),
+ ]
+
+ operations = [
+ migrations.RenameField(
+ model_name='project',
+ old_name='icon',
+ new_name='emoji',
+ ),
+ migrations.AddField(
+ model_name='project',
+ name='icon_prop',
+ field=models.JSONField(null=True),
+ ),
+ ]
diff --git a/apiserver/plane/db/models/__init__.py b/apiserver/plane/db/models/__init__.py
index e32d768e0..53b501716 100644
--- a/apiserver/plane/db/models/__init__.py
+++ b/apiserver/plane/db/models/__init__.py
@@ -67,3 +67,5 @@ from .importer import Importer
from .page import Page, PageBlock, PageFavorite, PageLabel
from .estimate import Estimate, EstimatePoint
+
+from .analytic import AnalyticView
\ No newline at end of file
diff --git a/apiserver/plane/db/models/analytic.py b/apiserver/plane/db/models/analytic.py
new file mode 100644
index 000000000..d097051af
--- /dev/null
+++ b/apiserver/plane/db/models/analytic.py
@@ -0,0 +1,25 @@
+# Django models
+from django.db import models
+from django.conf import settings
+
+from .base import BaseModel
+
+
+class AnalyticView(BaseModel):
+ workspace = models.ForeignKey(
+ "db.Workspace", related_name="analytics", on_delete=models.CASCADE
+ )
+ name = models.CharField(max_length=255)
+ description = models.TextField(blank=True)
+ query = models.JSONField()
+ query_dict = models.JSONField(default=dict)
+
+ class Meta:
+ verbose_name = "Analytic"
+ verbose_name_plural = "Analytics"
+ db_table = "analytic_views"
+ ordering = ("-created_at",)
+
+ def __str__(self):
+ """Return name of the analytic view"""
+ return f"{self.name} <{self.workspace.name}>"
diff --git a/apiserver/plane/db/models/asset.py b/apiserver/plane/db/models/asset.py
index acbb9428f..e37f2c0b0 100644
--- a/apiserver/plane/db/models/asset.py
+++ b/apiserver/plane/db/models/asset.py
@@ -4,6 +4,7 @@ from uuid import uuid4
# Django import
from django.db import models
from django.core.exceptions import ValidationError
+from django.conf import settings
# Module import
from . import BaseModel
@@ -16,9 +17,11 @@ def get_upload_path(instance, filename):
def file_size(value):
- limit = 5 * 1024 * 1024
- if value.size > limit:
- raise ValidationError("File too large. Size should not exceed 5 MB.")
+ # File limit check is only for cloud hosted
+ if not settings.DOCKERIZED:
+ limit = 5 * 1024 * 1024
+ if value.size > limit:
+ raise ValidationError("File too large. Size should not exceed 5 MB.")
class FileAsset(BaseModel):
diff --git a/apiserver/plane/db/models/issue.py b/apiserver/plane/db/models/issue.py
index fed946a61..f58d4ac13 100644
--- a/apiserver/plane/db/models/issue.py
+++ b/apiserver/plane/db/models/issue.py
@@ -85,8 +85,13 @@ class Issue(ProjectBaseModel):
).first()
# if there is no default state assign any random state
if default_state is None:
- self.state = State.objects.filter(project=self.project).first()
+ random_state = State.objects.filter(project=self.project).first()
+ self.state = random_state
+ if random_state.group == "started":
+ self.start_date = timezone.now().date()
else:
+ if default_state.group == "started":
+ self.start_date = timezone.now().date()
self.state = default_state
except ImportError:
pass
@@ -94,18 +99,15 @@ class Issue(ProjectBaseModel):
try:
from plane.db.models import State, PageBlock
- # Get the completed states of the project
- completed_states = State.objects.filter(
- group="completed", project=self.project
- ).values_list("pk", flat=True)
# Check if the current issue state and completed state id are same
- if self.state.id in completed_states:
+ if self.state.group == "completed":
self.completed_at = timezone.now()
# check if there are any page blocks
PageBlock.objects.filter(issue_id=self.id).filter().update(
completed_at=timezone.now()
)
-
+ elif self.state.group == "started":
+ self.start_date = timezone.now().date()
else:
PageBlock.objects.filter(issue_id=self.id).filter().update(
completed_at=None
@@ -116,7 +118,6 @@ class Issue(ProjectBaseModel):
pass
if self._state.adding:
# Get the maximum display_id value from the database
-
last_id = IssueSequence.objects.filter(project=self.project).aggregate(
largest=models.Max("sequence")
)["largest"]
@@ -131,6 +132,9 @@ class Issue(ProjectBaseModel):
if largest_sort_order is not None:
self.sort_order = largest_sort_order + 10000
+ # If adding it to started state
+ if self.state.group == "started":
+ self.start_date = timezone.now().date()
# Strip the html tags using html parser
self.description_stripped = (
None
@@ -206,9 +210,11 @@ def get_upload_path(instance, filename):
def file_size(value):
- limit = 5 * 1024 * 1024
- if value.size > limit:
- raise ValidationError("File too large. Size should not exceed 5 MB.")
+ # File limit check is only for cloud hosted
+ if not settings.DOCKERIZED:
+ limit = 5 * 1024 * 1024
+ if value.size > limit:
+ raise ValidationError("File too large. Size should not exceed 5 MB.")
class IssueAttachment(ProjectBaseModel):
diff --git a/apiserver/plane/db/models/project.py b/apiserver/plane/db/models/project.py
index 04435cadf..41b1ac654 100644
--- a/apiserver/plane/db/models/project.py
+++ b/apiserver/plane/db/models/project.py
@@ -63,7 +63,8 @@ class Project(BaseModel):
null=True,
blank=True,
)
- icon = models.CharField(max_length=255, null=True, blank=True)
+ emoji = models.CharField(max_length=255, null=True, blank=True)
+ icon_prop = models.JSONField(null=True)
module_view = models.BooleanField(default=True)
cycle_view = models.BooleanField(default=True)
issue_views_view = models.BooleanField(default=True)
diff --git a/apiserver/plane/db/models/user.py b/apiserver/plane/db/models/user.py
index 5a4f487c1..b0ab72159 100644
--- a/apiserver/plane/db/models/user.py
+++ b/apiserver/plane/db/models/user.py
@@ -104,29 +104,9 @@ class User(AbstractBaseUser, PermissionsMixin):
@receiver(post_save, sender=User)
-def send_welcome_email(sender, instance, created, **kwargs):
+def send_welcome_slack(sender, instance, created, **kwargs):
try:
if created and not instance.is_bot:
- first_name = instance.first_name.capitalize()
- to_email = instance.email
- from_email_string = settings.EMAIL_FROM
-
- subject = f"Welcome to Plane ✈️!"
-
- context = {"first_name": first_name, "email": instance.email}
-
- html_content = render_to_string(
- "emails/auth/user_welcome_email.html", context
- )
-
- text_content = strip_tags(html_content)
-
- msg = EmailMultiAlternatives(
- subject, text_content, from_email_string, [to_email]
- )
- msg.attach_alternative(html_content, "text/html")
- msg.send()
-
# Send message on slack as well
if settings.SLACK_BOT_TOKEN:
client = WebClient(token=settings.SLACK_BOT_TOKEN)
diff --git a/apiserver/plane/settings/local.py b/apiserver/plane/settings/local.py
index e03a0b822..87e04e1ed 100644
--- a/apiserver/plane/settings/local.py
+++ b/apiserver/plane/settings/local.py
@@ -25,7 +25,9 @@ DATABASES = {
}
}
-DOCKERIZED = os.environ.get("DOCKERIZED", False)
+DOCKERIZED = int(os.environ.get(
+ "DOCKERIZED", 0
+)) == 1
if DOCKERIZED:
DATABASES["default"] = dj_database_url.config()
diff --git a/apiserver/plane/settings/production.py b/apiserver/plane/settings/production.py
index e58736472..4d7da6ce3 100644
--- a/apiserver/plane/settings/production.py
+++ b/apiserver/plane/settings/production.py
@@ -29,9 +29,10 @@ DATABASES = {
DATABASES["default"] = dj_database_url.config()
SITE_ID = 1
-DOCKERIZED = os.environ.get(
- "DOCKERIZED", False
-) # Set the variable true if running in docker-compose environment
+# Set the variable true if running in docker environment
+DOCKERIZED = int(os.environ.get(
+ "DOCKERIZED", 0
+)) == 1
# Enable Connection Pooling (if desired)
# DATABASES['default']['ENGINE'] = 'django_postgrespool'
@@ -69,7 +70,7 @@ CORS_ALLOW_CREDENTIALS = True
# Simplified static file serving.
STATICFILES_STORAGE = "whitenoise.storage.CompressedManifestStaticFilesStorage"
-if os.environ.get("SENTRY_DSN", False):
+if bool(os.environ.get("SENTRY_DSN", False)):
sentry_sdk.init(
dsn=os.environ.get("SENTRY_DSN", ""),
integrations=[DjangoIntegration(), RedisIntegration()],
@@ -80,12 +81,21 @@ if os.environ.get("SENTRY_DSN", False):
environment="production",
)
-if (
- os.environ.get("AWS_REGION", False)
- and os.environ.get("AWS_ACCESS_KEY_ID", False)
- and os.environ.get("AWS_SECRET_ACCESS_KEY", False)
- and os.environ.get("AWS_S3_BUCKET_NAME", False)
-):
+if DOCKERIZED:
+ DEFAULT_FILE_STORAGE = 'storages.backends.s3boto3.S3Boto3Storage'
+ # The AWS access key to use.
+ AWS_ACCESS_KEY_ID = os.environ.get("AWS_ACCESS_KEY_ID", "access-key")
+ # The AWS secret access key to use.
+ AWS_SECRET_ACCESS_KEY = os.environ.get("AWS_SECRET_ACCESS_KEY", "secret-key")
+ # The name of the bucket to store files in.
+ AWS_STORAGE_BUCKET_NAME = os.environ.get("AWS_S3_BUCKET_NAME", "uploads")
+ # The full URL to the S3 endpoint. Leave blank to use the default region URL.
+ AWS_S3_ENDPOINT_URL = os.environ.get("AWS_S3_ENDPOINT_URL", "http://minio:9000")
+ # Default permissions
+ AWS_DEFAULT_ACL = "public-read"
+ AWS_QUERYSTRING_AUTH = False
+ AWS_S3_FILE_OVERWRITE = False
+else:
# The AWS region to connect to.
AWS_REGION = os.environ.get("AWS_REGION", "")
@@ -99,7 +109,7 @@ if (
# AWS_SESSION_TOKEN = ""
# The name of the bucket to store files in.
- AWS_S3_BUCKET_NAME = os.environ.get("AWS_S3_BUCKET_NAME", "")
+ AWS_S3_BUCKET_NAME = os.environ.get("AWS_S3_BUCKET_NAME")
# How to construct S3 URLs ("auto", "path", "virtual").
AWS_S3_ADDRESSING_STYLE = "auto"
@@ -166,14 +176,8 @@ if (
# extra characters appended.
AWS_S3_FILE_OVERWRITE = False
- # AWS Settings End
-
DEFAULT_FILE_STORAGE = "django_s3_storage.storage.S3Storage"
-
-else:
- MEDIA_URL = "/uploads/"
- MEDIA_ROOT = os.path.join(BASE_DIR, "uploads")
-
+# AWS Settings End
# Enable Connection Pooling (if desired)
# DATABASES['default']['ENGINE'] = 'django_postgrespool'
@@ -218,12 +222,6 @@ else:
}
}
-RQ_QUEUES = {
- "default": {
- "USE_REDIS_CACHE": "default",
- }
-}
-
WEB_URL = os.environ.get("WEB_URL")
diff --git a/apiserver/plane/settings/staging.py b/apiserver/plane/settings/staging.py
index d4d0e5e12..d1d8e1749 100644
--- a/apiserver/plane/settings/staging.py
+++ b/apiserver/plane/settings/staging.py
@@ -49,6 +49,10 @@ CORS_ALLOW_ALL_ORIGINS = True
# Simplified static file serving.
STATICFILES_STORAGE = "whitenoise.storage.CompressedManifestStaticFilesStorage"
+# Make true if running in a docker environment
+DOCKERIZED = int(os.environ.get(
+ "DOCKERIZED", 0
+)) == 1
sentry_sdk.init(
dsn=os.environ.get("SENTRY_DSN"),
diff --git a/apiserver/plane/urls.py b/apiserver/plane/urls.py
index 3dfde38bd..a2244ffe0 100644
--- a/apiserver/plane/urls.py
+++ b/apiserver/plane/urls.py
@@ -7,7 +7,7 @@ from django.urls import path
from django.views.generic import TemplateView
from django.conf import settings
-from django.conf.urls import include, url
+from django.conf.urls import include, url, static
# from django.conf.urls.static import static
@@ -17,9 +17,8 @@ urlpatterns = [
path("api/", include("plane.api.urls")),
path("", include("plane.web.urls")),
]
-# + static(settings.STATIC_URL, document_root=settings.STATIC_ROOT)
-# + static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT)
+urlpatterns = urlpatterns + static.static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT)
if settings.DEBUG:
import debug_toolbar
diff --git a/apiserver/plane/utils/analytics_plot.py b/apiserver/plane/utils/analytics_plot.py
new file mode 100644
index 000000000..161f6497e
--- /dev/null
+++ b/apiserver/plane/utils/analytics_plot.py
@@ -0,0 +1,76 @@
+# Python imports
+from itertools import groupby
+
+# Django import
+from django.db import models
+from django.db.models import Count, F, Sum, Value, Case, When, CharField
+from django.db.models.functions import Coalesce, ExtractMonth, ExtractYear, Concat
+
+
+def build_graph_plot(queryset, x_axis, y_axis, segment=None):
+
+ temp_axis = x_axis
+
+ if x_axis in ["created_at", "start_date", "target_date", "completed_at"]:
+ year = ExtractYear(x_axis)
+ month = ExtractMonth(x_axis)
+ dimension = Concat(year, Value("-"), month, output_field=CharField())
+ queryset = queryset.annotate(dimension=dimension)
+ x_axis = "dimension"
+ else:
+ queryset = queryset.annotate(dimension=F(x_axis))
+ x_axis = "dimension"
+
+ if x_axis in ["created_at", "start_date", "target_date", "completed_at"]:
+ queryset = queryset.exclude(x_axis__is_null=True)
+
+ if segment in ["created_at", "start_date", "target_date", "completed_at"]:
+ year = ExtractYear(segment)
+ month = ExtractMonth(segment)
+ dimension = Concat(year, Value("-"), month, output_field=CharField())
+ queryset = queryset.annotate(segmented=dimension)
+ segment = "segmented"
+
+ queryset = queryset.values(x_axis)
+
+ # Group queryset by x_axis field
+
+ if y_axis == "issue_count":
+ queryset = queryset.annotate(
+ is_null=Case(
+ When(dimension__isnull=True, then=Value("None")),
+ default=Value("not_null"),
+ output_field=models.CharField(max_length=8),
+ ),
+ dimension_ex=Coalesce("dimension", Value("null")),
+ ).values("dimension")
+ if segment:
+ queryset = queryset.annotate(segment=F(segment)).values(
+ "dimension", "segment"
+ )
+ else:
+ queryset = queryset.values("dimension")
+
+ queryset = queryset.annotate(count=Count("*")).order_by("dimension")
+
+ if y_axis == "estimate":
+ queryset = queryset.annotate(estimate=Sum("estimate_point")).order_by(x_axis)
+ if segment:
+ queryset = queryset.annotate(segment=F(segment)).values(
+ "dimension", "segment", "estimate"
+ )
+ else:
+ queryset = queryset.values("dimension", "estimate")
+
+ result_values = list(queryset)
+ grouped_data = {}
+ for key, items in groupby(result_values, key=lambda x: x[str("dimension")]):
+ grouped_data[str(key)] = list(items)
+
+ sorted_data = grouped_data
+ if temp_axis == "priority":
+ order = ["low", "medium", "high", "urgent", "None"]
+ sorted_data = {key: grouped_data[key] for key in order if key in grouped_data}
+ else:
+ sorted_data = dict(sorted(grouped_data.items(), key=lambda x: (x[0] == "None", x[0])))
+ return sorted_data
diff --git a/apiserver/plane/utils/issue_filters.py b/apiserver/plane/utils/issue_filters.py
index 8b62da722..944906f92 100644
--- a/apiserver/plane/utils/issue_filters.py
+++ b/apiserver/plane/utils/issue_filters.py
@@ -198,6 +198,39 @@ def filter_issue_state_type(params, filter, method):
return filter
+def filter_project(params, filter, method):
+ if method == "GET":
+ projects = params.get("project").split(",")
+ if len(projects) and "" not in projects:
+ filter["project__in"] = projects
+ else:
+ if params.get("project", None) and len(params.get("project")):
+ filter["project__in"] = params.get("project")
+ return filter
+
+
+def filter_cycle(params, filter, method):
+ if method == "GET":
+ cycles = params.get("cycle").split(",")
+ if len(cycles) and "" not in cycles:
+ filter["issue_cycle__cycle_id__in"] = cycles
+ else:
+ if params.get("cycle", None) and len(params.get("cycle")):
+ filter["issue_cycle__cycle_id__in"] = params.get("cycle")
+ return filter
+
+
+def filter_module(params, filter, method):
+ if method == "GET":
+ modules = params.get("module").split(",")
+ if len(modules) and "" not in modules:
+ filter["issue_module__module_id__in"] = modules
+ else:
+ if params.get("module", None) and len(params.get("module")):
+ filter["issue_module__module_id__in"] = params.get("module")
+ return filter
+
+
def issue_filters(query_params, method):
filter = dict()
@@ -216,6 +249,9 @@ def issue_filters(query_params, method):
"target_date": filter_target_date,
"completed_at": filter_completed_at,
"type": filter_issue_state_type,
+ "project": filter_project,
+ "cycle": filter_cycle,
+ "module": filter_module,
}
for key, value in ISSUE_FILTER.items():
diff --git a/apiserver/requirements/base.txt b/apiserver/requirements/base.txt
index e3e58450c..2bc109968 100644
--- a/apiserver/requirements/base.txt
+++ b/apiserver/requirements/base.txt
@@ -1,6 +1,6 @@
# base requirements
-Django==3.2.18
+Django==3.2.19
django-braces==1.15.0
django-taggit==3.1.0
psycopg2==2.9.5
diff --git a/apiserver/requirements/production.txt b/apiserver/requirements/production.txt
index 2547ce255..c37e98ffd 100644
--- a/apiserver/requirements/production.txt
+++ b/apiserver/requirements/production.txt
@@ -4,7 +4,7 @@ dj-database-url==1.2.0
gunicorn==20.1.0
whitenoise==6.3.0
django-storages==1.13.2
-boto==2.49.0
+boto3==1.26.136
django-anymail==9.0
twilio==7.16.2
django-debug-toolbar==3.8.1
diff --git a/apiserver/templates/emails/auth/user_welcome_email.html b/apiserver/templates/emails/auth/user_welcome_email.html
deleted file mode 100644
index af4e60d99..000000000
--- a/apiserver/templates/emails/auth/user_welcome_email.html
+++ /dev/null
@@ -1,481 +0,0 @@
-
-
-
-
-
-
-
-
- Welcome to Plane ✈️!
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
Welcome to Plane!
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
We're thrilled you're here. We know this is the beginning of a long and exciting journey, and we want to be there every step of the way.
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
Plane is an open-source issue planning and tracking tool that allows teams to collaborate on projects and prioritize tasks. With Plane, you can easily create and assign issues, set deadlines, and track progress.
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
We have put together some resources to help you get started. Please find them below:
Note: Plane is still in its early days, not everything will be perfect yet, and hiccups may happen. Please let us know of any suggestions, ideas, or bugs that you encounter on our Discord or GitHub, and we will use your feedback to improve on our upcoming releases.