diff --git a/apiserver/plane/api/serializers/project.py b/apiserver/plane/api/serializers/project.py
index 169b0c319..36fa6ecca 100644
--- a/apiserver/plane/api/serializers/project.py
+++ b/apiserver/plane/api/serializers/project.py
@@ -1,6 +1,3 @@
-# Django imports
-from django.db import IntegrityError
-
# Third party imports
from rest_framework import serializers
diff --git a/apiserver/plane/api/views/analytic.py b/apiserver/plane/api/views/analytic.py
index ad8a15c4f..c29a4b692 100644
--- a/apiserver/plane/api/views/analytic.py
+++ b/apiserver/plane/api/views/analytic.py
@@ -23,166 +23,156 @@ class AnalyticsEndpoint(BaseAPIView):
]
def get(self, request, slug):
- try:
- x_axis = request.GET.get("x_axis", False)
- y_axis = request.GET.get("y_axis", False)
- segment = request.GET.get("segment", False)
+ x_axis = request.GET.get("x_axis", False)
+ y_axis = request.GET.get("y_axis", False)
+ segment = request.GET.get("segment", False)
- valid_xaxis_segment = [
- "state_id",
- "state__group",
- "labels__id",
- "assignees__id",
- "estimate_point",
- "issue_cycle__cycle_id",
- "issue_module__module_id",
- "priority",
- "start_date",
- "target_date",
- "created_at",
- "completed_at",
- ]
+ valid_xaxis_segment = [
+ "state_id",
+ "state__group",
+ "labels__id",
+ "assignees__id",
+ "estimate_point",
+ "issue_cycle__cycle_id",
+ "issue_module__module_id",
+ "priority",
+ "start_date",
+ "target_date",
+ "created_at",
+ "completed_at",
+ ]
- valid_yaxis = [
- "issue_count",
- "estimate",
- ]
-
- # Check for x-axis and y-axis as thery are required parameters
- if (
- not x_axis
- or not y_axis
- or not x_axis in valid_xaxis_segment
- or not y_axis in valid_yaxis
- ):
- return Response(
- {
- "error": "x-axis and y-axis dimensions are required and the values should be valid"
- },
- status=status.HTTP_400_BAD_REQUEST,
- )
-
- # If segment is present it cannot be same as x-axis
- if segment and (segment not in valid_xaxis_segment or x_axis == segment):
- return Response(
- {
- "error": "Both segment and x axis cannot be same and segment should be valid"
- },
- status=status.HTTP_400_BAD_REQUEST,
- )
-
- # Additional filters that need to be applied
- filters = issue_filters(request.GET, "GET")
-
- # Get the issues for the workspace with the additional filters applied
- queryset = Issue.issue_objects.filter(workspace__slug=slug, **filters)
-
- # Get the total issue count
- total_issues = queryset.count()
-
- # Build the graph payload
- distribution = build_graph_plot(
- queryset=queryset, x_axis=x_axis, y_axis=y_axis, segment=segment
- )
-
- state_details = {}
- if x_axis in ["state_id"] or segment in ["state_id"]:
- state_details = (
- Issue.issue_objects.filter(
- workspace__slug=slug,
- **filters,
- )
- .distinct("state_id")
- .order_by("state_id")
- .values("state_id", "state__name", "state__color")
- )
-
- label_details = {}
- if x_axis in ["labels__id"] or segment in ["labels__id"]:
- label_details = (
- Issue.objects.filter(
- workspace__slug=slug, **filters, labels__id__isnull=False
- )
- .distinct("labels__id")
- .order_by("labels__id")
- .values("labels__id", "labels__color", "labels__name")
- )
-
- assignee_details = {}
- if x_axis in ["assignees__id"] or segment in ["assignees__id"]:
- assignee_details = (
- Issue.issue_objects.filter(
- workspace__slug=slug, **filters, assignees__avatar__isnull=False
- )
- .order_by("assignees__id")
- .distinct("assignees__id")
- .values(
- "assignees__avatar",
- "assignees__display_name",
- "assignees__first_name",
- "assignees__last_name",
- "assignees__id",
- )
- )
-
- cycle_details = {}
- if x_axis in ["issue_cycle__cycle_id"] or segment in [
- "issue_cycle__cycle_id"
- ]:
- cycle_details = (
- Issue.issue_objects.filter(
- workspace__slug=slug,
- **filters,
- issue_cycle__cycle_id__isnull=False,
- )
- .distinct("issue_cycle__cycle_id")
- .order_by("issue_cycle__cycle_id")
- .values(
- "issue_cycle__cycle_id",
- "issue_cycle__cycle__name",
- )
- )
-
- module_details = {}
- if x_axis in ["issue_module__module_id"] or segment in [
- "issue_module__module_id"
- ]:
- module_details = (
- Issue.issue_objects.filter(
- workspace__slug=slug,
- **filters,
- issue_module__module_id__isnull=False,
- )
- .distinct("issue_module__module_id")
- .order_by("issue_module__module_id")
- .values(
- "issue_module__module_id",
- "issue_module__module__name",
- )
- )
+ valid_yaxis = [
+ "issue_count",
+ "estimate",
+ ]
+ # Check for x-axis and y-axis as thery are required parameters
+ if (
+ not x_axis
+ or not y_axis
+ or not x_axis in valid_xaxis_segment
+ or not y_axis in valid_yaxis
+ ):
return Response(
{
- "total": total_issues,
- "distribution": distribution,
- "extras": {
- "state_details": state_details,
- "assignee_details": assignee_details,
- "label_details": label_details,
- "cycle_details": cycle_details,
- "module_details": module_details,
- },
+ "error": "x-axis and y-axis dimensions are required and the values should be valid"
},
- status=status.HTTP_200_OK,
- )
-
- except Exception as e:
- capture_exception(e)
- return Response(
- {"error": "Something went wrong please try again later"},
status=status.HTTP_400_BAD_REQUEST,
)
+ # If segment is present it cannot be same as x-axis
+ if segment and (segment not in valid_xaxis_segment or x_axis == segment):
+ return Response(
+ {
+ "error": "Both segment and x axis cannot be same and segment should be valid"
+ },
+ status=status.HTTP_400_BAD_REQUEST,
+ )
+
+ # Additional filters that need to be applied
+ filters = issue_filters(request.GET, "GET")
+
+ # Get the issues for the workspace with the additional filters applied
+ queryset = Issue.issue_objects.filter(workspace__slug=slug, **filters)
+
+ # Get the total issue count
+ total_issues = queryset.count()
+
+ # Build the graph payload
+ distribution = build_graph_plot(
+ queryset=queryset, x_axis=x_axis, y_axis=y_axis, segment=segment
+ )
+
+ state_details = {}
+ if x_axis in ["state_id"] or segment in ["state_id"]:
+ state_details = (
+ Issue.issue_objects.filter(
+ workspace__slug=slug,
+ **filters,
+ )
+ .distinct("state_id")
+ .order_by("state_id")
+ .values("state_id", "state__name", "state__color")
+ )
+
+ label_details = {}
+ if x_axis in ["labels__id"] or segment in ["labels__id"]:
+ label_details = (
+ Issue.objects.filter(
+ workspace__slug=slug, **filters, labels__id__isnull=False
+ )
+ .distinct("labels__id")
+ .order_by("labels__id")
+ .values("labels__id", "labels__color", "labels__name")
+ )
+
+ assignee_details = {}
+ if x_axis in ["assignees__id"] or segment in ["assignees__id"]:
+ assignee_details = (
+ Issue.issue_objects.filter(
+ workspace__slug=slug, **filters, assignees__avatar__isnull=False
+ )
+ .order_by("assignees__id")
+ .distinct("assignees__id")
+ .values(
+ "assignees__avatar",
+ "assignees__display_name",
+ "assignees__first_name",
+ "assignees__last_name",
+ "assignees__id",
+ )
+ )
+
+ cycle_details = {}
+ if x_axis in ["issue_cycle__cycle_id"] or segment in ["issue_cycle__cycle_id"]:
+ cycle_details = (
+ Issue.issue_objects.filter(
+ workspace__slug=slug,
+ **filters,
+ issue_cycle__cycle_id__isnull=False,
+ )
+ .distinct("issue_cycle__cycle_id")
+ .order_by("issue_cycle__cycle_id")
+ .values(
+ "issue_cycle__cycle_id",
+ "issue_cycle__cycle__name",
+ )
+ )
+
+ module_details = {}
+ if x_axis in ["issue_module__module_id"] or segment in [
+ "issue_module__module_id"
+ ]:
+ module_details = (
+ Issue.issue_objects.filter(
+ workspace__slug=slug,
+ **filters,
+ issue_module__module_id__isnull=False,
+ )
+ .distinct("issue_module__module_id")
+ .order_by("issue_module__module_id")
+ .values(
+ "issue_module__module_id",
+ "issue_module__module__name",
+ )
+ )
+
+ return Response(
+ {
+ "total": total_issues,
+ "distribution": distribution,
+ "extras": {
+ "state_details": state_details,
+ "assignee_details": assignee_details,
+ "label_details": label_details,
+ "cycle_details": cycle_details,
+ "module_details": module_details,
+ },
+ },
+ status=status.HTTP_200_OK,
+ )
+
class AnalyticViewViewset(BaseViewSet):
permission_classes = [
@@ -207,45 +197,30 @@ class SavedAnalyticEndpoint(BaseAPIView):
]
def get(self, request, slug, analytic_id):
- try:
- analytic_view = AnalyticView.objects.get(
- pk=analytic_id, workspace__slug=slug
- )
+ analytic_view = AnalyticView.objects.get(pk=analytic_id, workspace__slug=slug)
- filter = analytic_view.query
- queryset = Issue.issue_objects.filter(**filter)
+ filter = analytic_view.query
+ queryset = Issue.issue_objects.filter(**filter)
- x_axis = analytic_view.query_dict.get("x_axis", False)
- y_axis = analytic_view.query_dict.get("y_axis", False)
+ x_axis = analytic_view.query_dict.get("x_axis", False)
+ y_axis = analytic_view.query_dict.get("y_axis", False)
- if not x_axis or not y_axis:
- return Response(
- {"error": "x-axis and y-axis dimensions are required"},
- status=status.HTTP_400_BAD_REQUEST,
- )
-
- segment = request.GET.get("segment", False)
- distribution = build_graph_plot(
- queryset=queryset, x_axis=x_axis, y_axis=y_axis, segment=segment
- )
- total_issues = queryset.count()
+ if not x_axis or not y_axis:
return Response(
- {"total": total_issues, "distribution": distribution},
- status=status.HTTP_200_OK,
- )
-
- except AnalyticView.DoesNotExist:
- return Response(
- {"error": "Analytic View Does not exist"},
- status=status.HTTP_400_BAD_REQUEST,
- )
- except Exception as e:
- capture_exception(e)
- return Response(
- {"error": "Something went wrong please try again later"},
+ {"error": "x-axis and y-axis dimensions are required"},
status=status.HTTP_400_BAD_REQUEST,
)
+ segment = request.GET.get("segment", False)
+ distribution = build_graph_plot(
+ queryset=queryset, x_axis=x_axis, y_axis=y_axis, segment=segment
+ )
+ total_issues = queryset.count()
+ return Response(
+ {"total": total_issues, "distribution": distribution},
+ status=status.HTTP_200_OK,
+ )
+
class ExportAnalyticsEndpoint(BaseAPIView):
permission_classes = [
@@ -253,73 +228,64 @@ class ExportAnalyticsEndpoint(BaseAPIView):
]
def post(self, request, slug):
- try:
- x_axis = request.data.get("x_axis", False)
- y_axis = request.data.get("y_axis", False)
- segment = request.data.get("segment", False)
+ x_axis = request.data.get("x_axis", False)
+ y_axis = request.data.get("y_axis", False)
+ segment = request.data.get("segment", False)
-
- valid_xaxis_segment = [
- "state_id",
- "state__group",
- "labels__id",
- "assignees__id",
- "estimate_point",
- "issue_cycle__cycle_id",
- "issue_module__module_id",
- "priority",
- "start_date",
- "target_date",
- "created_at",
- "completed_at",
- ]
+ valid_xaxis_segment = [
+ "state_id",
+ "state__group",
+ "labels__id",
+ "assignees__id",
+ "estimate_point",
+ "issue_cycle__cycle_id",
+ "issue_module__module_id",
+ "priority",
+ "start_date",
+ "target_date",
+ "created_at",
+ "completed_at",
+ ]
- valid_yaxis = [
- "issue_count",
- "estimate",
- ]
-
- # Check for x-axis and y-axis as thery are required parameters
- if (
- not x_axis
- or not y_axis
- or not x_axis in valid_xaxis_segment
- or not y_axis in valid_yaxis
- ):
- return Response(
- {
- "error": "x-axis and y-axis dimensions are required and the values should be valid"
- },
- status=status.HTTP_400_BAD_REQUEST,
- )
-
- # If segment is present it cannot be same as x-axis
- if segment and (segment not in valid_xaxis_segment or x_axis == segment):
- return Response(
- {
- "error": "Both segment and x axis cannot be same and segment should be valid"
- },
- status=status.HTTP_400_BAD_REQUEST,
- )
-
-
- analytic_export_task.delay(
- email=request.user.email, data=request.data, slug=slug
- )
+ valid_yaxis = [
+ "issue_count",
+ "estimate",
+ ]
+ # Check for x-axis and y-axis as thery are required parameters
+ if (
+ not x_axis
+ or not y_axis
+ or not x_axis in valid_xaxis_segment
+ or not y_axis in valid_yaxis
+ ):
return Response(
{
- "message": f"Once the export is ready it will be emailed to you at {str(request.user.email)}"
+ "error": "x-axis and y-axis dimensions are required and the values should be valid"
},
- status=status.HTTP_200_OK,
- )
- except Exception as e:
- capture_exception(e)
- return Response(
- {"error": "Something went wrong please try again later"},
status=status.HTTP_400_BAD_REQUEST,
)
+ # If segment is present it cannot be same as x-axis
+ if segment and (segment not in valid_xaxis_segment or x_axis == segment):
+ return Response(
+ {
+ "error": "Both segment and x axis cannot be same and segment should be valid"
+ },
+ status=status.HTTP_400_BAD_REQUEST,
+ )
+
+ analytic_export_task.delay(
+ email=request.user.email, data=request.data, slug=slug
+ )
+
+ return Response(
+ {
+ "message": f"Once the export is ready it will be emailed to you at {str(request.user.email)}"
+ },
+ status=status.HTTP_200_OK,
+ )
+
class DefaultAnalyticsEndpoint(BaseAPIView):
permission_classes = [
@@ -327,102 +293,92 @@ class DefaultAnalyticsEndpoint(BaseAPIView):
]
def get(self, request, slug):
- try:
- filters = issue_filters(request.GET, "GET")
- base_issues = Issue.issue_objects.filter(workspace__slug=slug, **filters)
+ filters = issue_filters(request.GET, "GET")
+ base_issues = Issue.issue_objects.filter(workspace__slug=slug, **filters)
- total_issues = base_issues.count()
+ total_issues = base_issues.count()
- state_groups = base_issues.annotate(state_group=F("state__group"))
+ state_groups = base_issues.annotate(state_group=F("state__group"))
- total_issues_classified = (
- state_groups.values("state_group")
- .annotate(state_count=Count("state_group"))
- .order_by("state_group")
- )
+ total_issues_classified = (
+ state_groups.values("state_group")
+ .annotate(state_count=Count("state_group"))
+ .order_by("state_group")
+ )
- open_issues_groups = ["backlog", "unstarted", "started"]
- open_issues_queryset = state_groups.filter(
- state__group__in=open_issues_groups
- )
+ open_issues_groups = ["backlog", "unstarted", "started"]
+ open_issues_queryset = state_groups.filter(state__group__in=open_issues_groups)
- open_issues = open_issues_queryset.count()
- open_issues_classified = (
- open_issues_queryset.values("state_group")
- .annotate(state_count=Count("state_group"))
- .order_by("state_group")
- )
+ open_issues = open_issues_queryset.count()
+ open_issues_classified = (
+ open_issues_queryset.values("state_group")
+ .annotate(state_count=Count("state_group"))
+ .order_by("state_group")
+ )
- issue_completed_month_wise = (
- base_issues.filter(completed_at__isnull=False)
- .annotate(month=ExtractMonth("completed_at"))
- .values("month")
- .annotate(count=Count("*"))
- .order_by("month")
- )
+ issue_completed_month_wise = (
+ base_issues.filter(completed_at__isnull=False)
+ .annotate(month=ExtractMonth("completed_at"))
+ .values("month")
+ .annotate(count=Count("*"))
+ .order_by("month")
+ )
- user_details = [
- "created_by__first_name",
- "created_by__last_name",
- "created_by__avatar",
- "created_by__display_name",
- "created_by__id",
- ]
+ user_details = [
+ "created_by__first_name",
+ "created_by__last_name",
+ "created_by__avatar",
+ "created_by__display_name",
+ "created_by__id",
+ ]
- most_issue_created_user = (
- base_issues.exclude(created_by=None)
- .values(*user_details)
- .annotate(count=Count("id"))
- .order_by("-count")[:5]
- )
+ most_issue_created_user = (
+ base_issues.exclude(created_by=None)
+ .values(*user_details)
+ .annotate(count=Count("id"))
+ .order_by("-count")[:5]
+ )
- user_assignee_details = [
- "assignees__first_name",
- "assignees__last_name",
- "assignees__avatar",
- "assignees__display_name",
- "assignees__id",
- ]
+ user_assignee_details = [
+ "assignees__first_name",
+ "assignees__last_name",
+ "assignees__avatar",
+ "assignees__display_name",
+ "assignees__id",
+ ]
- most_issue_closed_user = (
- base_issues.filter(completed_at__isnull=False)
- .exclude(assignees=None)
- .values(*user_assignee_details)
- .annotate(count=Count("id"))
- .order_by("-count")[:5]
- )
+ most_issue_closed_user = (
+ base_issues.filter(completed_at__isnull=False)
+ .exclude(assignees=None)
+ .values(*user_assignee_details)
+ .annotate(count=Count("id"))
+ .order_by("-count")[:5]
+ )
- pending_issue_user = (
- base_issues.filter(completed_at__isnull=True)
- .values(*user_assignee_details)
- .annotate(count=Count("id"))
- .order_by("-count")
- )
+ pending_issue_user = (
+ base_issues.filter(completed_at__isnull=True)
+ .values(*user_assignee_details)
+ .annotate(count=Count("id"))
+ .order_by("-count")
+ )
- open_estimate_sum = open_issues_queryset.aggregate(
- sum=Sum("estimate_point")
- )["sum"]
- total_estimate_sum = base_issues.aggregate(sum=Sum("estimate_point"))["sum"]
+ open_estimate_sum = open_issues_queryset.aggregate(sum=Sum("estimate_point"))[
+ "sum"
+ ]
+ total_estimate_sum = base_issues.aggregate(sum=Sum("estimate_point"))["sum"]
- return Response(
- {
- "total_issues": total_issues,
- "total_issues_classified": total_issues_classified,
- "open_issues": open_issues,
- "open_issues_classified": open_issues_classified,
- "issue_completed_month_wise": issue_completed_month_wise,
- "most_issue_created_user": most_issue_created_user,
- "most_issue_closed_user": most_issue_closed_user,
- "pending_issue_user": pending_issue_user,
- "open_estimate_sum": open_estimate_sum,
- "total_estimate_sum": total_estimate_sum,
- },
- status=status.HTTP_200_OK,
- )
-
- except Exception as e:
- capture_exception(e)
- return Response(
- {"error": "Something went wrong. Please try again later."},
- status=status.HTTP_400_BAD_REQUEST,
- )
+ return Response(
+ {
+ "total_issues": total_issues,
+ "total_issues_classified": total_issues_classified,
+ "open_issues": open_issues,
+ "open_issues_classified": open_issues_classified,
+ "issue_completed_month_wise": issue_completed_month_wise,
+ "most_issue_created_user": most_issue_created_user,
+ "most_issue_closed_user": most_issue_closed_user,
+ "pending_issue_user": pending_issue_user,
+ "open_estimate_sum": open_estimate_sum,
+ "total_estimate_sum": total_estimate_sum,
+ },
+ status=status.HTTP_200_OK,
+ )
diff --git a/apiserver/plane/api/views/api_token.py b/apiserver/plane/api/views/api_token.py
index a94ffb45c..2253903a9 100644
--- a/apiserver/plane/api/views/api_token.py
+++ b/apiserver/plane/api/views/api_token.py
@@ -14,57 +14,34 @@ from plane.api.serializers import APITokenSerializer
class ApiTokenEndpoint(BaseAPIView):
def post(self, request):
- try:
- label = request.data.get("label", str(uuid4().hex))
- workspace = request.data.get("workspace", False)
+ label = request.data.get("label", str(uuid4().hex))
+ workspace = request.data.get("workspace", False)
- if not workspace:
- return Response(
- {"error": "Workspace is required"}, status=status.HTTP_200_OK
- )
-
- api_token = APIToken.objects.create(
- label=label, user=request.user, workspace_id=workspace
- )
-
- serializer = APITokenSerializer(api_token)
- # Token will be only vissible while creating
+ if not workspace:
return Response(
- {"api_token": serializer.data, "token": api_token.token},
- status=status.HTTP_201_CREATED,
+ {"error": "Workspace is required"}, status=status.HTTP_200_OK
)
- except Exception as e:
- capture_exception(e)
- return Response(
- {"error": "Something went wrong please try again later"},
- status=status.HTTP_400_BAD_REQUEST,
- )
+ api_token = APIToken.objects.create(
+ label=label, user=request.user, workspace_id=workspace
+ )
+
+ serializer = APITokenSerializer(api_token)
+ # Token will be only vissible while creating
+ return Response(
+ {"api_token": serializer.data, "token": api_token.token},
+ status=status.HTTP_201_CREATED,
+ )
+
def get(self, request):
- try:
- api_tokens = APIToken.objects.filter(user=request.user)
- serializer = APITokenSerializer(api_tokens, many=True)
- return Response(serializer.data, status=status.HTTP_200_OK)
- except Exception as e:
- capture_exception(e)
- return Response(
- {"error": "Something went wrong please try again later"},
- status=status.HTTP_400_BAD_REQUEST,
- )
+ api_tokens = APIToken.objects.filter(user=request.user)
+ serializer = APITokenSerializer(api_tokens, many=True)
+ return Response(serializer.data, status=status.HTTP_200_OK)
+
def delete(self, request, pk):
- try:
- api_token = APIToken.objects.get(pk=pk)
- api_token.delete()
- return Response(status=status.HTTP_204_NO_CONTENT)
- except APIToken.DoesNotExist:
- return Response(
- {"error": "Token does not exists"}, status=status.HTTP_400_BAD_REQUEST
- )
- except Exception as e:
- capture_exception(e)
- return Response(
- {"error": "Something went wrong please try again later"},
- status=status.HTTP_400_BAD_REQUEST,
- )
+ api_token = APIToken.objects.get(pk=pk)
+ api_token.delete()
+ return Response(status=status.HTTP_204_NO_CONTENT)
+
diff --git a/apiserver/plane/api/views/asset.py b/apiserver/plane/api/views/asset.py
index d9b6e502d..3f5dcceac 100644
--- a/apiserver/plane/api/views/asset.py
+++ b/apiserver/plane/api/views/asset.py
@@ -18,108 +18,58 @@ class FileAssetEndpoint(BaseAPIView):
"""
def get(self, request, workspace_id, asset_key):
- try:
- asset_key = str(workspace_id) + "/" + asset_key
- files = FileAsset.objects.filter(asset=asset_key)
- if files.exists():
- serializer = FileAssetSerializer(files, context={"request": request}, many=True)
- return Response({"data": serializer.data, "status": True}, status=status.HTTP_200_OK)
- else:
- return Response({"error": "Asset key does not exist", "status": False}, status=status.HTTP_200_OK)
- except Exception as e:
- capture_exception(e)
- return Response(
- {"error": "Something went wrong please try again later"},
- status=status.HTTP_400_BAD_REQUEST,
- )
+ asset_key = str(workspace_id) + "/" + asset_key
+ files = FileAsset.objects.filter(asset=asset_key)
+ if files.exists():
+ serializer = FileAssetSerializer(files, context={"request": request}, many=True)
+ return Response({"data": serializer.data, "status": True}, status=status.HTTP_200_OK)
+ else:
+ return Response({"error": "Asset key does not exist", "status": False}, status=status.HTTP_200_OK)
def post(self, request, slug):
- try:
- serializer = FileAssetSerializer(data=request.data)
- if serializer.is_valid():
- # Get the workspace
- workspace = Workspace.objects.get(slug=slug)
- serializer.save(workspace_id=workspace.id)
- return Response(serializer.data, status=status.HTTP_201_CREATED)
- return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
- except Workspace.DoesNotExist:
- return Response({"error": "Workspace does not exist"}, status=status.HTTP_400_BAD_REQUEST)
- except Exception as e:
- capture_exception(e)
- return Response(
- {"error": "Something went wrong please try again later"},
- status=status.HTTP_400_BAD_REQUEST,
- )
+ serializer = FileAssetSerializer(data=request.data)
+ if serializer.is_valid():
+ # Get the workspace
+ workspace = Workspace.objects.get(slug=slug)
+ serializer.save(workspace_id=workspace.id)
+ return Response(serializer.data, status=status.HTTP_201_CREATED)
+ return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
+
def delete(self, request, workspace_id, asset_key):
- try:
- asset_key = str(workspace_id) + "/" + asset_key
- file_asset = FileAsset.objects.get(asset=asset_key)
- # Delete the file from storage
- file_asset.asset.delete(save=False)
- # Delete the file object
- file_asset.delete()
- return Response(status=status.HTTP_204_NO_CONTENT)
- except FileAsset.DoesNotExist:
- return Response(
- {"error": "File Asset doesn't exist"}, status=status.HTTP_404_NOT_FOUND
- )
- except Exception as e:
- capture_exception(e)
- return Response(
- {"error": "Something went wrong please try again later"},
- status=status.HTTP_400_BAD_REQUEST,
- )
+ asset_key = str(workspace_id) + "/" + asset_key
+ file_asset = FileAsset.objects.get(asset=asset_key)
+ # Delete the file from storage
+ file_asset.asset.delete(save=False)
+ # Delete the file object
+ file_asset.delete()
+ return Response(status=status.HTTP_204_NO_CONTENT)
class UserAssetsEndpoint(BaseAPIView):
parser_classes = (MultiPartParser, FormParser)
def get(self, request, asset_key):
- try:
files = FileAsset.objects.filter(asset=asset_key, created_by=request.user)
if files.exists():
serializer = FileAssetSerializer(files, context={"request": request})
return Response({"data": serializer.data, "status": True}, status=status.HTTP_200_OK)
else:
return Response({"error": "Asset key does not exist", "status": False}, status=status.HTTP_200_OK)
- except Exception as e:
- capture_exception(e)
- return Response(
- {"error": "Something went wrong please try again later"},
- status=status.HTTP_400_BAD_REQUEST,
- )
def post(self, request):
- try:
serializer = FileAssetSerializer(data=request.data)
if serializer.is_valid():
serializer.save()
return Response(serializer.data, status=status.HTTP_201_CREATED)
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
- except Exception as e:
- capture_exception(e)
- return Response(
- {"error": "Something went wrong please try again later"},
- status=status.HTTP_400_BAD_REQUEST,
- )
+
def delete(self, request, asset_key):
- try:
file_asset = FileAsset.objects.get(asset=asset_key, created_by=request.user)
# Delete the file from storage
file_asset.asset.delete(save=False)
# Delete the file object
file_asset.delete()
return Response(status=status.HTTP_204_NO_CONTENT)
- except FileAsset.DoesNotExist:
- return Response(
- {"error": "File Asset doesn't exist"}, status=status.HTTP_404_NOT_FOUND
- )
- except Exception as e:
- capture_exception(e)
- return Response(
- {"error": "Something went wrong please try again later"},
- status=status.HTTP_400_BAD_REQUEST,
- )
diff --git a/apiserver/plane/api/views/auth_extended.py b/apiserver/plane/api/views/auth_extended.py
index 161314294..c7107ecfa 100644
--- a/apiserver/plane/api/views/auth_extended.py
+++ b/apiserver/plane/api/views/auth_extended.py
@@ -127,32 +127,25 @@ class ResetPasswordEndpoint(BaseAPIView):
class ChangePasswordEndpoint(BaseAPIView):
def post(self, request):
- try:
- serializer = ChangePasswordSerializer(data=request.data)
+ serializer = ChangePasswordSerializer(data=request.data)
- user = User.objects.get(pk=request.user.id)
- if serializer.is_valid():
- # Check old password
- if not user.object.check_password(serializer.data.get("old_password")):
- return Response(
- {"old_password": ["Wrong password."]},
- status=status.HTTP_400_BAD_REQUEST,
- )
- # set_password also hashes the password that the user will get
- self.object.set_password(serializer.data.get("new_password"))
- self.object.save()
- response = {
- "status": "success",
- "code": status.HTTP_200_OK,
- "message": "Password updated successfully",
- }
+ user = User.objects.get(pk=request.user.id)
+ if serializer.is_valid():
+ # Check old password
+ if not user.object.check_password(serializer.data.get("old_password")):
+ return Response(
+ {"old_password": ["Wrong password."]},
+ status=status.HTTP_400_BAD_REQUEST,
+ )
+ # set_password also hashes the password that the user will get
+ self.object.set_password(serializer.data.get("new_password"))
+ self.object.save()
+ response = {
+ "status": "success",
+ "code": status.HTTP_200_OK,
+ "message": "Password updated successfully",
+ }
- return Response(response)
+ return Response(response)
- return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
- except Exception as e:
- capture_exception(e)
- return Response(
- {"error": "Something went wrong please try again later"},
- status=status.HTTP_400_BAD_REQUEST,
- )
+ return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
diff --git a/apiserver/plane/api/views/authentication.py b/apiserver/plane/api/views/authentication.py
index 19466a8ff..2f01abb0c 100644
--- a/apiserver/plane/api/views/authentication.py
+++ b/apiserver/plane/api/views/authentication.py
@@ -40,223 +40,194 @@ class SignUpEndpoint(BaseAPIView):
permission_classes = (AllowAny,)
def post(self, request):
- try:
- if not settings.ENABLE_SIGNUP:
- return Response(
- {
- "error": "New account creation is disabled. Please contact your site administrator"
- },
- status=status.HTTP_400_BAD_REQUEST,
- )
-
- email = request.data.get("email", False)
- password = request.data.get("password", False)
-
- ## Raise exception if any of the above are missing
- if not email or not password:
- return Response(
- {"error": "Both email and password are required"},
- status=status.HTTP_400_BAD_REQUEST,
- )
-
- email = email.strip().lower()
-
- try:
- validate_email(email)
- except ValidationError as e:
- return Response(
- {"error": "Please provide a valid email address."},
- status=status.HTTP_400_BAD_REQUEST,
- )
-
- # Check if the user already exists
- if User.objects.filter(email=email).exists():
- return Response(
- {"error": "User with this email already exists"},
- status=status.HTTP_400_BAD_REQUEST,
- )
-
- user = User.objects.create(email=email, username=uuid.uuid4().hex)
- user.set_password(password)
-
- # settings last actives for the user
- user.last_active = timezone.now()
- user.last_login_time = timezone.now()
- user.last_login_ip = request.META.get("REMOTE_ADDR")
- user.last_login_uagent = request.META.get("HTTP_USER_AGENT")
- user.token_updated_at = timezone.now()
- user.save()
-
- access_token, refresh_token = get_tokens_for_user(user)
-
- data = {
- "access_token": access_token,
- "refresh_token": refresh_token,
- }
-
- # Send Analytics
- if settings.ANALYTICS_BASE_API:
- _ = requests.post(
- settings.ANALYTICS_BASE_API,
- headers={
- "Content-Type": "application/json",
- "X-Auth-Token": settings.ANALYTICS_SECRET_KEY,
- },
- json={
- "event_id": uuid.uuid4().hex,
- "event_data": {
- "medium": "email",
- },
- "user": {"email": email, "id": str(user.id)},
- "device_ctx": {
- "ip": request.META.get("REMOTE_ADDR"),
- "user_agent": request.META.get("HTTP_USER_AGENT"),
- },
- "event_type": "SIGN_UP",
- },
- )
-
- return Response(data, status=status.HTTP_200_OK)
-
- except Exception as e:
- capture_exception(e)
+ if not settings.ENABLE_SIGNUP:
return Response(
- {"error": "Something went wrong please try again later"},
+ {
+ "error": "New account creation is disabled. Please contact your site administrator"
+ },
status=status.HTTP_400_BAD_REQUEST,
)
+ email = request.data.get("email", False)
+ password = request.data.get("password", False)
+
+ ## Raise exception if any of the above are missing
+ if not email or not password:
+ return Response(
+ {"error": "Both email and password are required"},
+ status=status.HTTP_400_BAD_REQUEST,
+ )
+
+ email = email.strip().lower()
+
+ try:
+ validate_email(email)
+ except ValidationError as e:
+ return Response(
+ {"error": "Please provide a valid email address."},
+ status=status.HTTP_400_BAD_REQUEST,
+ )
+
+ # Check if the user already exists
+ if User.objects.filter(email=email).exists():
+ return Response(
+ {"error": "User with this email already exists"},
+ status=status.HTTP_400_BAD_REQUEST,
+ )
+
+ user = User.objects.create(email=email, username=uuid.uuid4().hex)
+ user.set_password(password)
+
+ # settings last actives for the user
+ user.last_active = timezone.now()
+ user.last_login_time = timezone.now()
+ user.last_login_ip = request.META.get("REMOTE_ADDR")
+ user.last_login_uagent = request.META.get("HTTP_USER_AGENT")
+ user.token_updated_at = timezone.now()
+ user.save()
+
+ access_token, refresh_token = get_tokens_for_user(user)
+
+ data = {
+ "access_token": access_token,
+ "refresh_token": refresh_token,
+ }
+
+ # Send Analytics
+ if settings.ANALYTICS_BASE_API:
+ _ = requests.post(
+ settings.ANALYTICS_BASE_API,
+ headers={
+ "Content-Type": "application/json",
+ "X-Auth-Token": settings.ANALYTICS_SECRET_KEY,
+ },
+ json={
+ "event_id": uuid.uuid4().hex,
+ "event_data": {
+ "medium": "email",
+ },
+ "user": {"email": email, "id": str(user.id)},
+ "device_ctx": {
+ "ip": request.META.get("REMOTE_ADDR"),
+ "user_agent": request.META.get("HTTP_USER_AGENT"),
+ },
+ "event_type": "SIGN_UP",
+ },
+ )
+
+ return Response(data, status=status.HTTP_200_OK)
+
class SignInEndpoint(BaseAPIView):
permission_classes = (AllowAny,)
def post(self, request):
- try:
- email = request.data.get("email", False)
- password = request.data.get("password", False)
+ email = request.data.get("email", False)
+ password = request.data.get("password", False)
- ## Raise exception if any of the above are missing
- if not email or not password:
- return Response(
- {"error": "Both email and password are required"},
- status=status.HTTP_400_BAD_REQUEST,
- )
-
- email = email.strip().lower()
-
- try:
- validate_email(email)
- except ValidationError as e:
- return Response(
- {"error": "Please provide a valid email address."},
- status=status.HTTP_400_BAD_REQUEST,
- )
-
- user = User.objects.filter(email=email).first()
-
- if user is None:
- return Response(
- {
- "error": "Sorry, we could not find a user with the provided credentials. Please try again."
- },
- status=status.HTTP_403_FORBIDDEN,
- )
-
- # Sign up Process
- if not user.check_password(password):
- return Response(
- {
- "error": "Sorry, we could not find a user with the provided credentials. Please try again."
- },
- status=status.HTTP_403_FORBIDDEN,
- )
- if not user.is_active:
- return Response(
- {
- "error": "Your account has been deactivated. Please contact your site administrator."
- },
- status=status.HTTP_403_FORBIDDEN,
- )
-
- # settings last active for the user
- user.last_active = timezone.now()
- user.last_login_time = timezone.now()
- user.last_login_ip = request.META.get("REMOTE_ADDR")
- user.last_login_uagent = request.META.get("HTTP_USER_AGENT")
- user.token_updated_at = timezone.now()
- user.save()
-
- access_token, refresh_token = get_tokens_for_user(user)
- # Send Analytics
- if settings.ANALYTICS_BASE_API:
- _ = requests.post(
- settings.ANALYTICS_BASE_API,
- headers={
- "Content-Type": "application/json",
- "X-Auth-Token": settings.ANALYTICS_SECRET_KEY,
- },
- json={
- "event_id": uuid.uuid4().hex,
- "event_data": {
- "medium": "email",
- },
- "user": {"email": email, "id": str(user.id)},
- "device_ctx": {
- "ip": request.META.get("REMOTE_ADDR"),
- "user_agent": request.META.get("HTTP_USER_AGENT"),
- },
- "event_type": "SIGN_IN",
- },
- )
- data = {
- "access_token": access_token,
- "refresh_token": refresh_token,
- }
-
- return Response(data, status=status.HTTP_200_OK)
-
- except Exception as e:
- capture_exception(e)
+ ## Raise exception if any of the above are missing
+ if not email or not password:
return Response(
- {
- "error": "Something went wrong. Please try again later or contact the support team."
- },
+ {"error": "Both email and password are required"},
status=status.HTTP_400_BAD_REQUEST,
)
+ email = email.strip().lower()
+
+ try:
+ validate_email(email)
+ except ValidationError as e:
+ return Response(
+ {"error": "Please provide a valid email address."},
+ status=status.HTTP_400_BAD_REQUEST,
+ )
+
+ user = User.objects.filter(email=email).first()
+
+ if user is None:
+ return Response(
+ {
+ "error": "Sorry, we could not find a user with the provided credentials. Please try again."
+ },
+ status=status.HTTP_403_FORBIDDEN,
+ )
+
+ # Sign up Process
+ if not user.check_password(password):
+ return Response(
+ {
+ "error": "Sorry, we could not find a user with the provided credentials. Please try again."
+ },
+ status=status.HTTP_403_FORBIDDEN,
+ )
+ if not user.is_active:
+ return Response(
+ {
+ "error": "Your account has been deactivated. Please contact your site administrator."
+ },
+ status=status.HTTP_403_FORBIDDEN,
+ )
+
+ # settings last active for the user
+ user.last_active = timezone.now()
+ user.last_login_time = timezone.now()
+ user.last_login_ip = request.META.get("REMOTE_ADDR")
+ user.last_login_uagent = request.META.get("HTTP_USER_AGENT")
+ user.token_updated_at = timezone.now()
+ user.save()
+
+ access_token, refresh_token = get_tokens_for_user(user)
+ # Send Analytics
+ if settings.ANALYTICS_BASE_API:
+ _ = requests.post(
+ settings.ANALYTICS_BASE_API,
+ headers={
+ "Content-Type": "application/json",
+ "X-Auth-Token": settings.ANALYTICS_SECRET_KEY,
+ },
+ json={
+ "event_id": uuid.uuid4().hex,
+ "event_data": {
+ "medium": "email",
+ },
+ "user": {"email": email, "id": str(user.id)},
+ "device_ctx": {
+ "ip": request.META.get("REMOTE_ADDR"),
+ "user_agent": request.META.get("HTTP_USER_AGENT"),
+ },
+ "event_type": "SIGN_IN",
+ },
+ )
+ data = {
+ "access_token": access_token,
+ "refresh_token": refresh_token,
+ }
+
+ return Response(data, status=status.HTTP_200_OK)
+
class SignOutEndpoint(BaseAPIView):
def post(self, request):
- try:
- refresh_token = request.data.get("refresh_token", False)
+ refresh_token = request.data.get("refresh_token", False)
- if not refresh_token:
- capture_message("No refresh token provided")
- return Response(
- {
- "error": "Something went wrong. Please try again later or contact the support team."
- },
- status=status.HTTP_400_BAD_REQUEST,
- )
-
- user = User.objects.get(pk=request.user.id)
-
- user.last_logout_time = timezone.now()
- user.last_logout_ip = request.META.get("REMOTE_ADDR")
-
- user.save()
-
- token = RefreshToken(refresh_token)
- token.blacklist()
- return Response({"message": "success"}, status=status.HTTP_200_OK)
- except Exception as e:
- capture_exception(e)
+ if not refresh_token:
+ capture_message("No refresh token provided")
return Response(
- {
- "error": "Something went wrong. Please try again later or contact the support team."
- },
+ {"error": "No refresh token provided"},
status=status.HTTP_400_BAD_REQUEST,
)
+ user = User.objects.get(pk=request.user.id)
+
+ user.last_logout_time = timezone.now()
+ user.last_logout_ip = request.META.get("REMOTE_ADDR")
+
+ user.save()
+
+ token = RefreshToken(refresh_token)
+ token.blacklist()
+ return Response({"message": "success"}, status=status.HTTP_200_OK)
+
class MagicSignInGenerateEndpoint(BaseAPIView):
permission_classes = [
@@ -264,74 +235,62 @@ class MagicSignInGenerateEndpoint(BaseAPIView):
]
def post(self, request):
- try:
- email = request.data.get("email", False)
+ email = request.data.get("email", False)
- if not email:
+ if not email:
+ return Response(
+ {"error": "Please provide a valid email address"},
+ status=status.HTTP_400_BAD_REQUEST,
+ )
+
+ # Clean up
+ email = email.strip().lower()
+ validate_email(email)
+
+ ## Generate a random token
+ token = (
+ "".join(random.choices(string.ascii_lowercase + string.digits, k=4))
+ + "-"
+ + "".join(random.choices(string.ascii_lowercase + string.digits, k=4))
+ + "-"
+ + "".join(random.choices(string.ascii_lowercase + string.digits, k=4))
+ )
+
+ ri = redis_instance()
+
+ key = "magic_" + str(email)
+
+ # Check if the key already exists in python
+ if ri.exists(key):
+ data = json.loads(ri.get(key))
+
+ current_attempt = data["current_attempt"] + 1
+
+ if data["current_attempt"] > 2:
return Response(
- {"error": "Please provide a valid email address"},
+ {"error": "Max attempts exhausted. Please try again later."},
status=status.HTTP_400_BAD_REQUEST,
)
- # Clean up
- email = email.strip().lower()
- validate_email(email)
+ value = {
+ "current_attempt": current_attempt,
+ "email": email,
+ "token": token,
+ }
+ expiry = 600
- ## Generate a random token
- token = (
- "".join(random.choices(string.ascii_lowercase + string.digits, k=4))
- + "-"
- + "".join(random.choices(string.ascii_lowercase + string.digits, k=4))
- + "-"
- + "".join(random.choices(string.ascii_lowercase + string.digits, k=4))
- )
+ ri.set(key, json.dumps(value), ex=expiry)
- ri = redis_instance()
+ else:
+ value = {"current_attempt": 0, "email": email, "token": token}
+ expiry = 600
- key = "magic_" + str(email)
+ ri.set(key, json.dumps(value), ex=expiry)
- # Check if the key already exists in python
- if ri.exists(key):
- data = json.loads(ri.get(key))
+ current_site = settings.WEB_URL
+ magic_link.delay(email, key, token, current_site)
- current_attempt = data["current_attempt"] + 1
-
- if data["current_attempt"] > 2:
- return Response(
- {"error": "Max attempts exhausted. Please try again later."},
- status=status.HTTP_400_BAD_REQUEST,
- )
-
- value = {
- "current_attempt": current_attempt,
- "email": email,
- "token": token,
- }
- expiry = 600
-
- ri.set(key, json.dumps(value), ex=expiry)
-
- else:
- value = {"current_attempt": 0, "email": email, "token": token}
- expiry = 600
-
- ri.set(key, json.dumps(value), ex=expiry)
-
- current_site = settings.WEB_URL
- magic_link.delay(email, key, token, current_site)
-
- return Response({"key": key}, status=status.HTTP_200_OK)
- except ValidationError:
- return Response(
- {"error": "Please provide a valid email address."},
- status=status.HTTP_400_BAD_REQUEST,
- )
- except Exception as e:
- capture_exception(e)
- return Response(
- {"error": "Something went wrong please try again later"},
- status=status.HTTP_400_BAD_REQUEST,
- )
+ return Response({"key": key}, status=status.HTTP_200_OK)
class MagicSignInEndpoint(BaseAPIView):
@@ -340,111 +299,99 @@ class MagicSignInEndpoint(BaseAPIView):
]
def post(self, request):
- try:
- user_token = request.data.get("token", "").strip()
- key = request.data.get("key", False).strip().lower()
+ user_token = request.data.get("token", "").strip()
+ key = request.data.get("key", False).strip().lower()
- if not key or user_token == "":
- return Response(
- {"error": "User token and key are required"},
- status=status.HTTP_400_BAD_REQUEST,
- )
+ if not key or user_token == "":
+ return Response(
+ {"error": "User token and key are required"},
+ status=status.HTTP_400_BAD_REQUEST,
+ )
- ri = redis_instance()
+ ri = redis_instance()
- if ri.exists(key):
- data = json.loads(ri.get(key))
+ if ri.exists(key):
+ data = json.loads(ri.get(key))
- token = data["token"]
- email = data["email"]
+ token = data["token"]
+ email = data["email"]
- if str(token) == str(user_token):
- if User.objects.filter(email=email).exists():
- user = User.objects.get(email=email)
- # Send event to Jitsu for tracking
- if settings.ANALYTICS_BASE_API:
- _ = requests.post(
- settings.ANALYTICS_BASE_API,
- headers={
- "Content-Type": "application/json",
- "X-Auth-Token": settings.ANALYTICS_SECRET_KEY,
+ if str(token) == str(user_token):
+ if User.objects.filter(email=email).exists():
+ user = User.objects.get(email=email)
+ # Send event to Jitsu for tracking
+ if settings.ANALYTICS_BASE_API:
+ _ = requests.post(
+ settings.ANALYTICS_BASE_API,
+ headers={
+ "Content-Type": "application/json",
+ "X-Auth-Token": settings.ANALYTICS_SECRET_KEY,
+ },
+ json={
+ "event_id": uuid.uuid4().hex,
+ "event_data": {
+ "medium": "code",
},
- json={
- "event_id": uuid.uuid4().hex,
- "event_data": {
- "medium": "code",
- },
- "user": {"email": email, "id": str(user.id)},
- "device_ctx": {
- "ip": request.META.get("REMOTE_ADDR"),
- "user_agent": request.META.get(
- "HTTP_USER_AGENT"
- ),
- },
- "event_type": "SIGN_IN",
+ "user": {"email": email, "id": str(user.id)},
+ "device_ctx": {
+ "ip": request.META.get("REMOTE_ADDR"),
+ "user_agent": request.META.get("HTTP_USER_AGENT"),
},
- )
- else:
- user = User.objects.create(
- email=email,
- username=uuid.uuid4().hex,
- password=make_password(uuid.uuid4().hex),
- is_password_autoset=True,
+ "event_type": "SIGN_IN",
+ },
)
- # Send event to Jitsu for tracking
- if settings.ANALYTICS_BASE_API:
- _ = requests.post(
- settings.ANALYTICS_BASE_API,
- headers={
- "Content-Type": "application/json",
- "X-Auth-Token": settings.ANALYTICS_SECRET_KEY,
- },
- json={
- "event_id": uuid.uuid4().hex,
- "event_data": {
- "medium": "code",
- },
- "user": {"email": email, "id": str(user.id)},
- "device_ctx": {
- "ip": request.META.get("REMOTE_ADDR"),
- "user_agent": request.META.get(
- "HTTP_USER_AGENT"
- ),
- },
- "event_type": "SIGN_UP",
- },
- )
-
- user.last_active = timezone.now()
- user.last_login_time = timezone.now()
- user.last_login_ip = request.META.get("REMOTE_ADDR")
- user.last_login_uagent = request.META.get("HTTP_USER_AGENT")
- user.token_updated_at = timezone.now()
- user.save()
-
- access_token, refresh_token = get_tokens_for_user(user)
- data = {
- "access_token": access_token,
- "refresh_token": refresh_token,
- }
-
- return Response(data, status=status.HTTP_200_OK)
-
else:
- return Response(
- {"error": "Your login code was incorrect. Please try again."},
- status=status.HTTP_400_BAD_REQUEST,
+ user = User.objects.create(
+ email=email,
+ username=uuid.uuid4().hex,
+ password=make_password(uuid.uuid4().hex),
+ is_password_autoset=True,
)
+ # Send event to Jitsu for tracking
+ if settings.ANALYTICS_BASE_API:
+ _ = requests.post(
+ settings.ANALYTICS_BASE_API,
+ headers={
+ "Content-Type": "application/json",
+ "X-Auth-Token": settings.ANALYTICS_SECRET_KEY,
+ },
+ json={
+ "event_id": uuid.uuid4().hex,
+ "event_data": {
+ "medium": "code",
+ },
+ "user": {"email": email, "id": str(user.id)},
+ "device_ctx": {
+ "ip": request.META.get("REMOTE_ADDR"),
+ "user_agent": request.META.get("HTTP_USER_AGENT"),
+ },
+ "event_type": "SIGN_UP",
+ },
+ )
+
+ user.last_active = timezone.now()
+ user.last_login_time = timezone.now()
+ user.last_login_ip = request.META.get("REMOTE_ADDR")
+ user.last_login_uagent = request.META.get("HTTP_USER_AGENT")
+ user.token_updated_at = timezone.now()
+ user.save()
+
+ access_token, refresh_token = get_tokens_for_user(user)
+ data = {
+ "access_token": access_token,
+ "refresh_token": refresh_token,
+ }
+
+ return Response(data, status=status.HTTP_200_OK)
else:
return Response(
- {"error": "The magic code/link has expired please try again"},
+ {"error": "Your login code was incorrect. Please try again."},
status=status.HTTP_400_BAD_REQUEST,
)
- except Exception as e:
- capture_exception(e)
+ else:
return Response(
- {"error": "Something went wrong please try again later"},
+ {"error": "The magic code/link has expired please try again"},
status=status.HTTP_400_BAD_REQUEST,
)
diff --git a/apiserver/plane/api/views/base.py b/apiserver/plane/api/views/base.py
index 60b0ec0c6..538c8e484 100644
--- a/apiserver/plane/api/views/base.py
+++ b/apiserver/plane/api/views/base.py
@@ -5,10 +5,14 @@ import zoneinfo
from django.urls import resolve
from django.conf import settings
from django.utils import timezone
-# Third part imports
+from django.db import IntegrityError
+from django.core.exceptions import ObjectDoesNotExist, ValidationError
+# Third part imports
+from rest_framework import status
from rest_framework import status
from rest_framework.viewsets import ModelViewSet
+from rest_framework.response import Response
from rest_framework.exceptions import APIException
from rest_framework.views import APIView
from rest_framework.filters import SearchFilter
@@ -33,8 +37,6 @@ class TimezoneMixin:
timezone.deactivate()
-
-
class BaseViewSet(TimezoneMixin, ModelViewSet, BasePaginator):
model = None
@@ -58,17 +60,49 @@ class BaseViewSet(TimezoneMixin, ModelViewSet, BasePaginator):
except Exception as e:
capture_exception(e)
raise APIException("Please check the view", status.HTTP_400_BAD_REQUEST)
+
+ def handle_exception(self, exc):
+ """
+ Handle any exception that occurs, by returning an appropriate response,
+ or re-raising the error.
+ """
+ try:
+ response = super().handle_exception(exc)
+ return response
+ except Exception as e:
+ if isinstance(e, IntegrityError):
+ return Response({"error": "The payload is not valid"}, status=status.HTTP_400_BAD_REQUEST)
+
+ if isinstance(e, ValidationError):
+ return Response({"error": "Please provide valid detail"}, status=status.HTTP_400_BAD_REQUEST)
+
+ if isinstance(e, ObjectDoesNotExist):
+ model_name = str(exc).split(" matching query does not exist.")[0]
+ return Response({"error": f"{model_name} does not exist."}, status=status.HTTP_404_NOT_FOUND)
+
+ if isinstance(e, KeyError):
+ capture_exception(e)
+ return Response({"error": f"key {e} does not exist"}, status=status.HTTP_400_BAD_REQUEST)
+
+ capture_exception(e)
+ return Response({"error": "Something went wrong please try again later"}, status=status.HTTP_500_INTERNAL_SERVER_ERROR)
+
def dispatch(self, request, *args, **kwargs):
- response = super().dispatch(request, *args, **kwargs)
+ try:
+ response = super().dispatch(request, *args, **kwargs)
- if settings.DEBUG:
- from django.db import connection
+ if settings.DEBUG:
+ from django.db import connection
- print(
- f"{request.method} - {request.get_full_path()} of Queries: {len(connection.queries)}"
- )
- return response
+ print(
+ f"{request.method} - {request.get_full_path()} of Queries: {len(connection.queries)}"
+ )
+ return response
+
+ except Exception as exc:
+ response = self.handle_exception(exc)
+ return exc
@property
def workspace_slug(self):
@@ -104,16 +138,48 @@ class BaseAPIView(TimezoneMixin, APIView, BasePaginator):
queryset = backend().filter_queryset(self.request, queryset, self)
return queryset
+
+ def handle_exception(self, exc):
+ """
+ Handle any exception that occurs, by returning an appropriate response,
+ or re-raising the error.
+ """
+ try:
+ response = super().handle_exception(exc)
+ return response
+ except Exception as e:
+ if isinstance(e, IntegrityError):
+ return Response({"error": "The payload is not valid"}, status=status.HTTP_400_BAD_REQUEST)
+
+ if isinstance(e, ValidationError):
+ return Response({"error": "Please provide valid detail"}, status=status.HTTP_400_BAD_REQUEST)
+
+ if isinstance(e, ObjectDoesNotExist):
+ model_name = str(exc).split(" matching query does not exist.")[0]
+ return Response({"error": f"{model_name} does not exist."}, status=status.HTTP_404_NOT_FOUND)
+
+ if isinstance(e, KeyError):
+ return Response({"error": f"key {e} does not exist"}, status=status.HTTP_400_BAD_REQUEST)
+
+ capture_exception(e)
+ return Response({"error": "Something went wrong please try again later"}, status=status.HTTP_500_INTERNAL_SERVER_ERROR)
+
+
def dispatch(self, request, *args, **kwargs):
- response = super().dispatch(request, *args, **kwargs)
+ try:
+ response = super().dispatch(request, *args, **kwargs)
- if settings.DEBUG:
- from django.db import connection
+ if settings.DEBUG:
+ from django.db import connection
- print(
- f"{request.method} - {request.get_full_path()} of Queries: {len(connection.queries)}"
- )
- return response
+ print(
+ f"{request.method} - {request.get_full_path()} of Queries: {len(connection.queries)}"
+ )
+ return response
+
+ except Exception as exc:
+ response = self.handle_exception(exc)
+ return exc
@property
def workspace_slug(self):
diff --git a/apiserver/plane/api/views/config.py b/apiserver/plane/api/views/config.py
index ea1b39d9c..f59ca04a0 100644
--- a/apiserver/plane/api/views/config.py
+++ b/apiserver/plane/api/views/config.py
@@ -20,21 +20,14 @@ class ConfigurationEndpoint(BaseAPIView):
]
def get(self, request):
- try:
- data = {}
- data["google"] = os.environ.get("GOOGLE_CLIENT_ID", None)
- data["github"] = os.environ.get("GITHUB_CLIENT_ID", None)
- data["github_app_name"] = os.environ.get("GITHUB_APP_NAME", None)
- data["magic_login"] = (
- bool(settings.EMAIL_HOST_USER) and bool(settings.EMAIL_HOST_PASSWORD)
- ) and os.environ.get("ENABLE_MAGIC_LINK_LOGIN", "0") == "1"
- data["email_password_login"] = (
- os.environ.get("ENABLE_EMAIL_PASSWORD", "0") == "1"
- )
- return Response(data, status=status.HTTP_200_OK)
- except Exception as e:
- capture_exception(e)
- return Response(
- {"error": "Something went wrong please try again later"},
- status=status.HTTP_400_BAD_REQUEST,
- )
+ data = {}
+ data["google"] = os.environ.get("GOOGLE_CLIENT_ID", None)
+ data["github"] = os.environ.get("GITHUB_CLIENT_ID", None)
+ data["github_app_name"] = os.environ.get("GITHUB_APP_NAME", None)
+ data["magic_login"] = (
+ bool(settings.EMAIL_HOST_USER) and bool(settings.EMAIL_HOST_PASSWORD)
+ ) and os.environ.get("ENABLE_MAGIC_LINK_LOGIN", "0") == "1"
+ data["email_password_login"] = (
+ os.environ.get("ENABLE_EMAIL_PASSWORD", "0") == "1"
+ )
+ return Response(data, status=status.HTTP_200_OK)
diff --git a/apiserver/plane/api/views/cycle.py b/apiserver/plane/api/views/cycle.py
index e59b59b5c..7b14af4a2 100644
--- a/apiserver/plane/api/views/cycle.py
+++ b/apiserver/plane/api/views/cycle.py
@@ -2,7 +2,6 @@
import json
# Django imports
-from django.db import IntegrityError
from django.db.models import (
OuterRef,
Func,
@@ -62,7 +61,6 @@ class CycleViewSet(BaseViewSet):
project_id=self.kwargs.get("project_id"), owned_by=self.request.user
)
-
def get_queryset(self):
subquery = CycleFavorite.objects.filter(
user=self.request.user,
@@ -177,378 +175,328 @@ class CycleViewSet(BaseViewSet):
)
def list(self, request, slug, project_id):
- try:
- queryset = self.get_queryset()
- cycle_view = request.GET.get("cycle_view", "all")
- order_by = request.GET.get("order_by", "sort_order")
+ queryset = self.get_queryset()
+ cycle_view = request.GET.get("cycle_view", "all")
+ order_by = request.GET.get("order_by", "sort_order")
- queryset = queryset.order_by(order_by)
+ queryset = queryset.order_by(order_by)
- # Current Cycle
- if cycle_view == "current":
- queryset = queryset.filter(
- start_date__lte=timezone.now(),
- end_date__gte=timezone.now(),
- )
+ # Current Cycle
+ if cycle_view == "current":
+ queryset = queryset.filter(
+ start_date__lte=timezone.now(),
+ end_date__gte=timezone.now(),
+ )
- data = CycleSerializer(queryset, many=True).data
+ data = CycleSerializer(queryset, many=True).data
- if len(data):
- assignee_distribution = (
- Issue.objects.filter(
- issue_cycle__cycle_id=data[0]["id"],
- workspace__slug=slug,
- project_id=project_id,
- )
- .annotate(display_name=F("assignees__display_name"))
- .annotate(assignee_id=F("assignees__id"))
- .annotate(avatar=F("assignees__avatar"))
- .values("display_name", "assignee_id", "avatar")
- .annotate(
- total_issues=Count(
- "assignee_id",
- filter=Q(archived_at__isnull=True, is_draft=False),
+ if len(data):
+ assignee_distribution = (
+ Issue.objects.filter(
+ issue_cycle__cycle_id=data[0]["id"],
+ workspace__slug=slug,
+ project_id=project_id,
+ )
+ .annotate(display_name=F("assignees__display_name"))
+ .annotate(assignee_id=F("assignees__id"))
+ .annotate(avatar=F("assignees__avatar"))
+ .values("display_name", "assignee_id", "avatar")
+ .annotate(
+ total_issues=Count(
+ "assignee_id",
+ filter=Q(archived_at__isnull=True, is_draft=False),
+ ),
+ )
+ .annotate(
+ completed_issues=Count(
+ "assignee_id",
+ filter=Q(
+ completed_at__isnull=False,
+ archived_at__isnull=True,
+ is_draft=False,
),
)
- .annotate(
- completed_issues=Count(
- "assignee_id",
- filter=Q(
- completed_at__isnull=False,
- archived_at__isnull=True,
- is_draft=False,
- ),
- )
+ )
+ .annotate(
+ pending_issues=Count(
+ "assignee_id",
+ filter=Q(
+ completed_at__isnull=True,
+ archived_at__isnull=True,
+ is_draft=False,
+ ),
)
- .annotate(
- pending_issues=Count(
- "assignee_id",
- filter=Q(
- completed_at__isnull=True,
- archived_at__isnull=True,
- is_draft=False,
- ),
- )
+ )
+ .order_by("display_name")
+ )
+
+ label_distribution = (
+ Issue.objects.filter(
+ issue_cycle__cycle_id=data[0]["id"],
+ workspace__slug=slug,
+ project_id=project_id,
+ )
+ .annotate(label_name=F("labels__name"))
+ .annotate(color=F("labels__color"))
+ .annotate(label_id=F("labels__id"))
+ .values("label_name", "color", "label_id")
+ .annotate(
+ total_issues=Count(
+ "label_id",
+ filter=Q(archived_at__isnull=True, is_draft=False),
)
- .order_by("display_name")
+ )
+ .annotate(
+ completed_issues=Count(
+ "label_id",
+ filter=Q(
+ completed_at__isnull=False,
+ archived_at__isnull=True,
+ is_draft=False,
+ ),
+ )
+ )
+ .annotate(
+ pending_issues=Count(
+ "label_id",
+ filter=Q(
+ completed_at__isnull=True,
+ archived_at__isnull=True,
+ is_draft=False,
+ ),
+ )
+ )
+ .order_by("label_name")
+ )
+ data[0]["distribution"] = {
+ "assignees": assignee_distribution,
+ "labels": label_distribution,
+ "completion_chart": {},
+ }
+ if data[0]["start_date"] and data[0]["end_date"]:
+ data[0]["distribution"]["completion_chart"] = burndown_plot(
+ queryset=queryset.first(),
+ slug=slug,
+ project_id=project_id,
+ cycle_id=data[0]["id"],
)
- label_distribution = (
- Issue.objects.filter(
- issue_cycle__cycle_id=data[0]["id"],
- workspace__slug=slug,
- project_id=project_id,
- )
- .annotate(label_name=F("labels__name"))
- .annotate(color=F("labels__color"))
- .annotate(label_id=F("labels__id"))
- .values("label_name", "color", "label_id")
- .annotate(
- total_issues=Count(
- "label_id",
- filter=Q(archived_at__isnull=True, is_draft=False),
- )
- )
- .annotate(
- completed_issues=Count(
- "label_id",
- filter=Q(
- completed_at__isnull=False,
- archived_at__isnull=True,
- is_draft=False,
- ),
- )
- )
- .annotate(
- pending_issues=Count(
- "label_id",
- filter=Q(
- completed_at__isnull=True,
- archived_at__isnull=True,
- is_draft=False,
- ),
- )
- )
- .order_by("label_name")
- )
- data[0]["distribution"] = {
- "assignees": assignee_distribution,
- "labels": label_distribution,
- "completion_chart": {},
- }
- if data[0]["start_date"] and data[0]["end_date"]:
- data[0]["distribution"]["completion_chart"] = burndown_plot(
- queryset=queryset.first(),
- slug=slug,
- project_id=project_id,
- cycle_id=data[0]["id"],
- )
+ return Response(data, status=status.HTTP_200_OK)
- return Response(data, status=status.HTTP_200_OK)
-
- # Upcoming Cycles
- if cycle_view == "upcoming":
- queryset = queryset.filter(start_date__gt=timezone.now())
- return Response(
- CycleSerializer(queryset, many=True).data, status=status.HTTP_200_OK
- )
-
- # Completed Cycles
- if cycle_view == "completed":
- queryset = queryset.filter(end_date__lt=timezone.now())
- return Response(
- CycleSerializer(queryset, many=True).data, status=status.HTTP_200_OK
- )
-
- # Draft Cycles
- if cycle_view == "draft":
- queryset = queryset.filter(
- end_date=None,
- start_date=None,
- )
-
- return Response(
- CycleSerializer(queryset, many=True).data, status=status.HTTP_200_OK
- )
-
- # Incomplete Cycles
- if cycle_view == "incomplete":
- queryset = queryset.filter(
- Q(end_date__gte=timezone.now().date()) | Q(end_date__isnull=True),
- )
- return Response(
- CycleSerializer(queryset, many=True).data, status=status.HTTP_200_OK
- )
-
- # If no matching view is found return all cycles
+ # Upcoming Cycles
+ if cycle_view == "upcoming":
+ queryset = queryset.filter(start_date__gt=timezone.now())
return Response(
CycleSerializer(queryset, many=True).data, status=status.HTTP_200_OK
)
- except Exception as e:
- capture_exception(e)
+ # Completed Cycles
+ if cycle_view == "completed":
+ queryset = queryset.filter(end_date__lt=timezone.now())
return Response(
- {"error": "Something went wrong please try again later"},
- status=status.HTTP_400_BAD_REQUEST,
+ CycleSerializer(queryset, many=True).data, status=status.HTTP_200_OK
+ )
+
+ # Draft Cycles
+ if cycle_view == "draft":
+ queryset = queryset.filter(
+ end_date=None,
+ start_date=None,
)
- def create(self, request, slug, project_id):
- try:
- if (
- request.data.get("start_date", None) is None
- and request.data.get("end_date", None) is None
- ) or (
- request.data.get("start_date", None) is not None
- and request.data.get("end_date", None) is not None
- ):
- serializer = CycleSerializer(data=request.data)
- if serializer.is_valid():
- serializer.save(
- project_id=project_id,
- owned_by=request.user,
- )
- return Response(serializer.data, status=status.HTTP_201_CREATED)
- return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
- else:
- return Response(
- {
- "error": "Both start date and end date are either required or are to be null"
- },
- status=status.HTTP_400_BAD_REQUEST,
- )
- except Exception as e:
- capture_exception(e)
return Response(
- {"error": "Something went wrong please try again later"},
+ CycleSerializer(queryset, many=True).data, status=status.HTTP_200_OK
+ )
+
+ # Incomplete Cycles
+ if cycle_view == "incomplete":
+ queryset = queryset.filter(
+ Q(end_date__gte=timezone.now().date()) | Q(end_date__isnull=True),
+ )
+ return Response(
+ CycleSerializer(queryset, many=True).data, status=status.HTTP_200_OK
+ )
+
+ # If no matching view is found return all cycles
+ return Response(
+ CycleSerializer(queryset, many=True).data, status=status.HTTP_200_OK
+ )
+
+ def create(self, request, slug, project_id):
+ if (
+ request.data.get("start_date", None) is None
+ and request.data.get("end_date", None) is None
+ ) or (
+ request.data.get("start_date", None) is not None
+ and request.data.get("end_date", None) is not None
+ ):
+ serializer = CycleSerializer(data=request.data)
+ if serializer.is_valid():
+ serializer.save(
+ project_id=project_id,
+ owned_by=request.user,
+ )
+ return Response(serializer.data, status=status.HTTP_201_CREATED)
+ return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
+ else:
+ return Response(
+ {
+ "error": "Both start date and end date are either required or are to be null"
+ },
status=status.HTTP_400_BAD_REQUEST,
)
def partial_update(self, request, slug, project_id, pk):
- try:
- cycle = Cycle.objects.get(
- workspace__slug=slug, project_id=project_id, pk=pk
- )
+ cycle = Cycle.objects.get(workspace__slug=slug, project_id=project_id, pk=pk)
- request_data = request.data
+ request_data = request.data
- if cycle.end_date is not None and cycle.end_date < timezone.now().date():
- if "sort_order" in request_data:
- # Can only change sort order
- request_data = {
- "sort_order": request_data.get("sort_order", cycle.sort_order)
- }
- else:
- return Response(
- {
- "error": "The Cycle has already been completed so it cannot be edited"
- },
- status=status.HTTP_400_BAD_REQUEST,
- )
+ if cycle.end_date is not None and cycle.end_date < timezone.now().date():
+ if "sort_order" in request_data:
+ # Can only change sort order
+ request_data = {
+ "sort_order": request_data.get("sort_order", cycle.sort_order)
+ }
+ else:
+ return Response(
+ {
+ "error": "The Cycle has already been completed so it cannot be edited"
+ },
+ status=status.HTTP_400_BAD_REQUEST,
+ )
- serializer = CycleWriteSerializer(cycle, data=request.data, partial=True)
- if serializer.is_valid():
- serializer.save()
- return Response(serializer.data, status=status.HTTP_200_OK)
- return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
- except Cycle.DoesNotExist:
- return Response(
- {"error": "Cycle does not exist"}, status=status.HTTP_400_BAD_REQUEST
- )
- except Exception as e:
- capture_exception(e)
- return Response(
- {"error": "Something went wrong please try again later"},
- status=status.HTTP_400_BAD_REQUEST,
- )
+ serializer = CycleWriteSerializer(cycle, data=request.data, partial=True)
+ if serializer.is_valid():
+ serializer.save()
+ return Response(serializer.data, status=status.HTTP_200_OK)
+ return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
def retrieve(self, request, slug, project_id, pk):
- try:
- queryset = self.get_queryset().get(pk=pk)
+ queryset = self.get_queryset().get(pk=pk)
- # Assignee Distribution
- assignee_distribution = (
- Issue.objects.filter(
- issue_cycle__cycle_id=pk,
- workspace__slug=slug,
- project_id=project_id,
- )
- .annotate(first_name=F("assignees__first_name"))
- .annotate(last_name=F("assignees__last_name"))
- .annotate(assignee_id=F("assignees__id"))
- .annotate(avatar=F("assignees__avatar"))
- .annotate(display_name=F("assignees__display_name"))
- .values(
- "first_name", "last_name", "assignee_id", "avatar", "display_name"
- )
- .annotate(
- total_issues=Count(
- "assignee_id",
- filter=Q(archived_at__isnull=True, is_draft=False),
+ # Assignee Distribution
+ assignee_distribution = (
+ Issue.objects.filter(
+ issue_cycle__cycle_id=pk,
+ workspace__slug=slug,
+ project_id=project_id,
+ )
+ .annotate(first_name=F("assignees__first_name"))
+ .annotate(last_name=F("assignees__last_name"))
+ .annotate(assignee_id=F("assignees__id"))
+ .annotate(avatar=F("assignees__avatar"))
+ .annotate(display_name=F("assignees__display_name"))
+ .values("first_name", "last_name", "assignee_id", "avatar", "display_name")
+ .annotate(
+ total_issues=Count(
+ "assignee_id",
+ filter=Q(archived_at__isnull=True, is_draft=False),
+ ),
+ )
+ .annotate(
+ completed_issues=Count(
+ "assignee_id",
+ filter=Q(
+ completed_at__isnull=False,
+ archived_at__isnull=True,
+ is_draft=False,
),
)
- .annotate(
- completed_issues=Count(
- "assignee_id",
- filter=Q(
- completed_at__isnull=False,
- archived_at__isnull=True,
- is_draft=False,
- ),
- )
- )
- .annotate(
- pending_issues=Count(
- "assignee_id",
- filter=Q(
- completed_at__isnull=True,
- archived_at__isnull=True,
- is_draft=False,
- ),
- )
- )
- .order_by("first_name", "last_name")
)
-
- # Label Distribution
- label_distribution = (
- Issue.objects.filter(
- issue_cycle__cycle_id=pk,
- workspace__slug=slug,
- project_id=project_id,
- )
- .annotate(label_name=F("labels__name"))
- .annotate(color=F("labels__color"))
- .annotate(label_id=F("labels__id"))
- .values("label_name", "color", "label_id")
- .annotate(
- total_issues=Count(
- "label_id",
- filter=Q(archived_at__isnull=True, is_draft=False),
+ .annotate(
+ pending_issues=Count(
+ "assignee_id",
+ filter=Q(
+ completed_at__isnull=True,
+ archived_at__isnull=True,
+ is_draft=False,
),
)
- .annotate(
- completed_issues=Count(
- "label_id",
- filter=Q(
- completed_at__isnull=False,
- archived_at__isnull=True,
- is_draft=False,
- ),
- )
+ )
+ .order_by("first_name", "last_name")
+ )
+
+ # Label Distribution
+ label_distribution = (
+ Issue.objects.filter(
+ issue_cycle__cycle_id=pk,
+ workspace__slug=slug,
+ project_id=project_id,
+ )
+ .annotate(label_name=F("labels__name"))
+ .annotate(color=F("labels__color"))
+ .annotate(label_id=F("labels__id"))
+ .values("label_name", "color", "label_id")
+ .annotate(
+ total_issues=Count(
+ "label_id",
+ filter=Q(archived_at__isnull=True, is_draft=False),
+ ),
+ )
+ .annotate(
+ completed_issues=Count(
+ "label_id",
+ filter=Q(
+ completed_at__isnull=False,
+ archived_at__isnull=True,
+ is_draft=False,
+ ),
)
- .annotate(
- pending_issues=Count(
- "label_id",
- filter=Q(
- completed_at__isnull=True,
- archived_at__isnull=True,
- is_draft=False,
- ),
- )
+ )
+ .annotate(
+ pending_issues=Count(
+ "label_id",
+ filter=Q(
+ completed_at__isnull=True,
+ archived_at__isnull=True,
+ is_draft=False,
+ ),
)
- .order_by("label_name")
+ )
+ .order_by("label_name")
+ )
+
+ data = CycleSerializer(queryset).data
+ data["distribution"] = {
+ "assignees": assignee_distribution,
+ "labels": label_distribution,
+ "completion_chart": {},
+ }
+
+ if queryset.start_date and queryset.end_date:
+ data["distribution"]["completion_chart"] = burndown_plot(
+ queryset=queryset, slug=slug, project_id=project_id, cycle_id=pk
)
- data = CycleSerializer(queryset).data
- data["distribution"] = {
- "assignees": assignee_distribution,
- "labels": label_distribution,
- "completion_chart": {},
- }
-
- if queryset.start_date and queryset.end_date:
- data["distribution"]["completion_chart"] = burndown_plot(
- queryset=queryset, slug=slug, project_id=project_id, cycle_id=pk
- )
-
- return Response(
- data,
- status=status.HTTP_200_OK,
- )
- except Cycle.DoesNotExist:
- return Response(
- {"error": "Cycle Does not exists"}, status=status.HTTP_400_BAD_REQUEST
- )
- except Exception as e:
- capture_exception(e)
- return Response(
- {"error": "Something went wrong please try again later"},
- status=status.HTTP_400_BAD_REQUEST,
- )
+ return Response(
+ data,
+ status=status.HTTP_200_OK,
+ )
def destroy(self, request, slug, project_id, pk):
- try:
- cycle_issues = list(
- CycleIssue.objects.filter(cycle_id=self.kwargs.get("pk")).values_list(
- "issue", flat=True
- )
- )
- cycle = Cycle.objects.get(
- workspace__slug=slug, project_id=project_id, pk=pk
- )
- # Delete the cycle
- cycle.delete()
- issue_activity.delay(
- type="cycle.activity.deleted",
- requested_data=json.dumps(
- {
- "cycle_id": str(pk),
- "issues": [str(issue_id) for issue_id in cycle_issues],
- }
- ),
- actor_id=str(request.user.id),
- issue_id=str(pk),
- project_id=str(project_id),
- current_instance=None,
- epoch=int(timezone.now().timestamp()),
- )
- return Response(status=status.HTTP_204_NO_CONTENT)
- except Exception as e:
- capture_exception(e)
- return Response(
- {"error": "Something went wrong please try again later"},
- status=status.HTTP_400_BAD_REQUEST,
+ cycle_issues = list(
+ CycleIssue.objects.filter(cycle_id=self.kwargs.get("pk")).values_list(
+ "issue", flat=True
)
+ )
+ cycle = Cycle.objects.get(workspace__slug=slug, project_id=project_id, pk=pk)
+ # Delete the cycle
+ cycle.delete()
+ issue_activity.delay(
+ type="cycle.activity.deleted",
+ requested_data=json.dumps(
+ {
+ "cycle_id": str(pk),
+ "issues": [str(issue_id) for issue_id in cycle_issues],
+ }
+ ),
+ actor_id=str(request.user.id),
+ issue_id=str(pk),
+ project_id=str(project_id),
+ current_instance=None,
+ epoch=int(timezone.now().timestamp()),
+ )
+ return Response(status=status.HTTP_204_NO_CONTENT)
class CycleIssueViewSet(BaseViewSet):
@@ -594,197 +542,174 @@ class CycleIssueViewSet(BaseViewSet):
@method_decorator(gzip_page)
def list(self, request, slug, project_id, cycle_id):
- try:
- order_by = request.GET.get("order_by", "created_at")
- group_by = request.GET.get("group_by", False)
- sub_group_by = request.GET.get("sub_group_by", False)
- filters = issue_filters(request.query_params, "GET")
- issues = (
- Issue.issue_objects.filter(issue_cycle__cycle_id=cycle_id)
- .annotate(
- sub_issues_count=Issue.issue_objects.filter(parent=OuterRef("id"))
- .order_by()
- .annotate(count=Func(F("id"), function="Count"))
- .values("count")
- )
- .annotate(bridge_id=F("issue_cycle__id"))
- .filter(project_id=project_id)
- .filter(workspace__slug=slug)
- .select_related("project")
- .select_related("workspace")
- .select_related("state")
- .select_related("parent")
- .prefetch_related("assignees")
- .prefetch_related("labels")
- .order_by(order_by)
- .filter(**filters)
- .annotate(
- link_count=IssueLink.objects.filter(issue=OuterRef("id"))
- .order_by()
- .annotate(count=Func(F("id"), function="Count"))
- .values("count")
- )
- .annotate(
- attachment_count=IssueAttachment.objects.filter(
- issue=OuterRef("id")
- )
- .order_by()
- .annotate(count=Func(F("id"), function="Count"))
- .values("count")
- )
+ order_by = request.GET.get("order_by", "created_at")
+ group_by = request.GET.get("group_by", False)
+ sub_group_by = request.GET.get("sub_group_by", False)
+ filters = issue_filters(request.query_params, "GET")
+ issues = (
+ Issue.issue_objects.filter(issue_cycle__cycle_id=cycle_id)
+ .annotate(
+ sub_issues_count=Issue.issue_objects.filter(parent=OuterRef("id"))
+ .order_by()
+ .annotate(count=Func(F("id"), function="Count"))
+ .values("count")
)
-
- issues_data = IssueStateSerializer(issues, many=True).data
-
- if sub_group_by and sub_group_by == group_by:
- return Response(
- {"error": "Group by and sub group by cannot be same"},
- status=status.HTTP_400_BAD_REQUEST,
- )
-
- if group_by:
- return Response(
- group_results(issues_data, group_by, sub_group_by),
- status=status.HTTP_200_OK,
- )
-
- return Response(
- issues_data,
- status=status.HTTP_200_OK,
+ .annotate(bridge_id=F("issue_cycle__id"))
+ .filter(project_id=project_id)
+ .filter(workspace__slug=slug)
+ .select_related("project")
+ .select_related("workspace")
+ .select_related("state")
+ .select_related("parent")
+ .prefetch_related("assignees")
+ .prefetch_related("labels")
+ .order_by(order_by)
+ .filter(**filters)
+ .annotate(
+ link_count=IssueLink.objects.filter(issue=OuterRef("id"))
+ .order_by()
+ .annotate(count=Func(F("id"), function="Count"))
+ .values("count")
)
- except Exception as e:
- capture_exception(e)
+ .annotate(
+ attachment_count=IssueAttachment.objects.filter(issue=OuterRef("id"))
+ .order_by()
+ .annotate(count=Func(F("id"), function="Count"))
+ .values("count")
+ )
+ )
+
+ issues_data = IssueStateSerializer(issues, many=True).data
+
+ if sub_group_by and sub_group_by == group_by:
return Response(
- {"error": "Something went wrong please try again later"},
+ {"error": "Group by and sub group by cannot be same"},
status=status.HTTP_400_BAD_REQUEST,
)
+ if group_by:
+ return Response(
+ group_results(issues_data, group_by, sub_group_by),
+ status=status.HTTP_200_OK,
+ )
+
+ return Response(
+ issues_data,
+ status=status.HTTP_200_OK,
+ )
+
def create(self, request, slug, project_id, cycle_id):
- try:
- issues = request.data.get("issues", [])
+ issues = request.data.get("issues", [])
- if not len(issues):
- return Response(
- {"error": "Issues are required"}, status=status.HTTP_400_BAD_REQUEST
- )
-
- cycle = Cycle.objects.get(
- workspace__slug=slug, project_id=project_id, pk=cycle_id
- )
-
- if cycle.end_date is not None and cycle.end_date < timezone.now().date():
- return Response(
- {
- "error": "The Cycle has already been completed so no new issues can be added"
- },
- status=status.HTTP_400_BAD_REQUEST,
- )
-
- # Get all CycleIssues already created
- cycle_issues = list(CycleIssue.objects.filter(issue_id__in=issues))
- update_cycle_issue_activity = []
- record_to_create = []
- records_to_update = []
-
- for issue in issues:
- cycle_issue = [
- cycle_issue
- for cycle_issue in cycle_issues
- if str(cycle_issue.issue_id) in issues
- ]
- # Update only when cycle changes
- if len(cycle_issue):
- if cycle_issue[0].cycle_id != cycle_id:
- update_cycle_issue_activity.append(
- {
- "old_cycle_id": str(cycle_issue[0].cycle_id),
- "new_cycle_id": str(cycle_id),
- "issue_id": str(cycle_issue[0].issue_id),
- }
- )
- cycle_issue[0].cycle_id = cycle_id
- records_to_update.append(cycle_issue[0])
- else:
- record_to_create.append(
- CycleIssue(
- project_id=project_id,
- workspace=cycle.workspace,
- created_by=request.user,
- updated_by=request.user,
- cycle=cycle,
- issue_id=issue,
- )
- )
-
- CycleIssue.objects.bulk_create(
- record_to_create,
- batch_size=10,
- ignore_conflicts=True,
- )
- CycleIssue.objects.bulk_update(
- records_to_update,
- ["cycle"],
- batch_size=10,
- )
-
- # Capture Issue Activity
- issue_activity.delay(
- type="cycle.activity.created",
- requested_data=json.dumps({"cycles_list": issues}),
- actor_id=str(self.request.user.id),
- issue_id=str(self.kwargs.get("pk", None)),
- project_id=str(self.kwargs.get("project_id", None)),
- current_instance=json.dumps(
- {
- "updated_cycle_issues": update_cycle_issue_activity,
- "created_cycle_issues": serializers.serialize(
- "json", record_to_create
- ),
- }
- ),
- epoch=int(timezone.now().timestamp()),
- )
-
- # Return all Cycle Issues
+ if not len(issues):
return Response(
- CycleIssueSerializer(self.get_queryset(), many=True).data,
- status=status.HTTP_200_OK,
+ {"error": "Issues are required"}, status=status.HTTP_400_BAD_REQUEST
)
- except Cycle.DoesNotExist:
+ cycle = Cycle.objects.get(
+ workspace__slug=slug, project_id=project_id, pk=cycle_id
+ )
+
+ if cycle.end_date is not None and cycle.end_date < timezone.now().date():
return Response(
- {"error": "Cycle not found"}, status=status.HTTP_404_NOT_FOUND
- )
- except Exception as e:
- capture_exception(e)
- return Response(
- {"error": "Something went wrong please try again later"},
+ {
+ "error": "The Cycle has already been completed so no new issues can be added"
+ },
status=status.HTTP_400_BAD_REQUEST,
)
+ # Get all CycleIssues already created
+ cycle_issues = list(CycleIssue.objects.filter(issue_id__in=issues))
+ update_cycle_issue_activity = []
+ record_to_create = []
+ records_to_update = []
+
+ for issue in issues:
+ cycle_issue = [
+ cycle_issue
+ for cycle_issue in cycle_issues
+ if str(cycle_issue.issue_id) in issues
+ ]
+ # Update only when cycle changes
+ if len(cycle_issue):
+ if cycle_issue[0].cycle_id != cycle_id:
+ update_cycle_issue_activity.append(
+ {
+ "old_cycle_id": str(cycle_issue[0].cycle_id),
+ "new_cycle_id": str(cycle_id),
+ "issue_id": str(cycle_issue[0].issue_id),
+ }
+ )
+ cycle_issue[0].cycle_id = cycle_id
+ records_to_update.append(cycle_issue[0])
+ else:
+ record_to_create.append(
+ CycleIssue(
+ project_id=project_id,
+ workspace=cycle.workspace,
+ created_by=request.user,
+ updated_by=request.user,
+ cycle=cycle,
+ issue_id=issue,
+ )
+ )
+
+ CycleIssue.objects.bulk_create(
+ record_to_create,
+ batch_size=10,
+ ignore_conflicts=True,
+ )
+ CycleIssue.objects.bulk_update(
+ records_to_update,
+ ["cycle"],
+ batch_size=10,
+ )
+
+ # Capture Issue Activity
+ issue_activity.delay(
+ type="cycle.activity.created",
+ requested_data=json.dumps({"cycles_list": issues}),
+ actor_id=str(self.request.user.id),
+ issue_id=str(self.kwargs.get("pk", None)),
+ project_id=str(self.kwargs.get("project_id", None)),
+ current_instance=json.dumps(
+ {
+ "updated_cycle_issues": update_cycle_issue_activity,
+ "created_cycle_issues": serializers.serialize(
+ "json", record_to_create
+ ),
+ }
+ ),
+ epoch=int(timezone.now().timestamp()),
+ )
+
+ # Return all Cycle Issues
+ return Response(
+ CycleIssueSerializer(self.get_queryset(), many=True).data,
+ status=status.HTTP_200_OK,
+ )
+
def destroy(self, request, slug, project_id, cycle_id, pk):
- try:
- cycle_issue = CycleIssue.objects.get(pk=pk, workspace__slug=slug, project_id=project_id, cycle_id=cycle_id)
- issue_id = cycle_issue.issue_id
- cycle_issue.delete()
- issue_activity.delay(
- type="cycle.activity.deleted",
- requested_data=json.dumps(
- {
- "cycle_id": str(self.kwargs.get("cycle_id")),
- "issues": [str(issue_id)],
- }
- ),
- actor_id=str(self.request.user.id),
- issue_id=str(self.kwargs.get("pk", None)),
- project_id=str(self.kwargs.get("project_id", None)),
- current_instance=None,
- epoch=int(timezone.now().timestamp()),
- )
- return Response(status=status.HTTP_204_NO_CONTENT)
- except Exception as e:
- capture_exception(e)
- return Response({"error": "Something went wrong please try again later"}, status=status.HTTP_400_BAD_REQUEST)
+ cycle_issue = CycleIssue.objects.get(
+ pk=pk, workspace__slug=slug, project_id=project_id, cycle_id=cycle_id
+ )
+ issue_id = cycle_issue.issue_id
+ cycle_issue.delete()
+ issue_activity.delay(
+ type="cycle.activity.deleted",
+ requested_data=json.dumps(
+ {
+ "cycle_id": str(self.kwargs.get("cycle_id")),
+ "issues": [str(issue_id)],
+ }
+ ),
+ actor_id=str(self.request.user.id),
+ issue_id=str(self.kwargs.get("pk", None)),
+ project_id=str(self.kwargs.get("project_id", None)),
+ current_instance=None,
+ epoch=int(timezone.now().timestamp()),
+ )
+ return Response(status=status.HTTP_204_NO_CONTENT)
class CycleDateCheckEndpoint(BaseAPIView):
@@ -793,42 +718,35 @@ class CycleDateCheckEndpoint(BaseAPIView):
]
def post(self, request, slug, project_id):
- try:
- start_date = request.data.get("start_date", False)
- end_date = request.data.get("end_date", False)
- cycle_id = request.data.get("cycle_id")
- if not start_date or not end_date:
- return Response(
- {"error": "Start date and end date both are required"},
- status=status.HTTP_400_BAD_REQUEST,
- )
-
- cycles = Cycle.objects.filter(
- Q(workspace__slug=slug)
- & Q(project_id=project_id)
- & (
- Q(start_date__lte=start_date, end_date__gte=start_date)
- | Q(start_date__lte=end_date, end_date__gte=end_date)
- | Q(start_date__gte=start_date, end_date__lte=end_date)
- )
- ).exclude(pk=cycle_id)
-
- if cycles.exists():
- return Response(
- {
- "error": "You have a cycle already on the given dates, if you want to create a draft cycle you can do that by removing dates",
- "status": False,
- }
- )
- else:
- return Response({"status": True}, status=status.HTTP_200_OK)
- except Exception as e:
- capture_exception(e)
+ start_date = request.data.get("start_date", False)
+ end_date = request.data.get("end_date", False)
+ cycle_id = request.data.get("cycle_id")
+ if not start_date or not end_date:
return Response(
- {"error": "Something went wrong please try again later"},
+ {"error": "Start date and end date both are required"},
status=status.HTTP_400_BAD_REQUEST,
)
+ cycles = Cycle.objects.filter(
+ Q(workspace__slug=slug)
+ & Q(project_id=project_id)
+ & (
+ Q(start_date__lte=start_date, end_date__gte=start_date)
+ | Q(start_date__lte=end_date, end_date__gte=end_date)
+ | Q(start_date__gte=start_date, end_date__lte=end_date)
+ )
+ ).exclude(pk=cycle_id)
+
+ if cycles.exists():
+ return Response(
+ {
+ "error": "You have a cycle already on the given dates, if you want to create a draft cycle you can do that by removing dates",
+ "status": False,
+ }
+ )
+ else:
+ return Response({"status": True}, status=status.HTTP_200_OK)
+
class CycleFavoriteViewSet(BaseViewSet):
serializer_class = CycleFavoriteSerializer
@@ -844,52 +762,21 @@ class CycleFavoriteViewSet(BaseViewSet):
)
def create(self, request, slug, project_id):
- try:
- serializer = CycleFavoriteSerializer(data=request.data)
- if serializer.is_valid():
- serializer.save(user=request.user, project_id=project_id)
- return Response(serializer.data, status=status.HTTP_201_CREATED)
- return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
- except IntegrityError as e:
- if "already exists" in str(e):
- return Response(
- {"error": "The cycle is already added to favorites"},
- status=status.HTTP_410_GONE,
- )
- else:
- capture_exception(e)
- return Response(
- {"error": "Something went wrong please try again later"},
- status=status.HTTP_400_BAD_REQUEST,
- )
- except Exception as e:
- capture_exception(e)
- return Response(
- {"error": "Something went wrong please try again later"},
- status=status.HTTP_400_BAD_REQUEST,
- )
+ serializer = CycleFavoriteSerializer(data=request.data)
+ if serializer.is_valid():
+ serializer.save(user=request.user, project_id=project_id)
+ return Response(serializer.data, status=status.HTTP_201_CREATED)
+ return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
def destroy(self, request, slug, project_id, cycle_id):
- try:
- cycle_favorite = CycleFavorite.objects.get(
- project=project_id,
- user=request.user,
- workspace__slug=slug,
- cycle_id=cycle_id,
- )
- cycle_favorite.delete()
- return Response(status=status.HTTP_204_NO_CONTENT)
- except CycleFavorite.DoesNotExist:
- return Response(
- {"error": "Cycle is not in favorites"},
- status=status.HTTP_400_BAD_REQUEST,
- )
- except Exception as e:
- capture_exception(e)
- return Response(
- {"error": "Something went wrong please try again later"},
- status=status.HTTP_400_BAD_REQUEST,
- )
+ cycle_favorite = CycleFavorite.objects.get(
+ project=project_id,
+ user=request.user,
+ workspace__slug=slug,
+ cycle_id=cycle_id,
+ )
+ cycle_favorite.delete()
+ return Response(status=status.HTTP_204_NO_CONTENT)
class TransferCycleIssueEndpoint(BaseAPIView):
@@ -898,55 +785,43 @@ class TransferCycleIssueEndpoint(BaseAPIView):
]
def post(self, request, slug, project_id, cycle_id):
- try:
- new_cycle_id = request.data.get("new_cycle_id", False)
+ new_cycle_id = request.data.get("new_cycle_id", False)
- if not new_cycle_id:
- return Response(
- {"error": "New Cycle Id is required"},
- status=status.HTTP_400_BAD_REQUEST,
- )
-
- new_cycle = Cycle.objects.get(
- workspace__slug=slug, project_id=project_id, pk=new_cycle_id
- )
-
- if (
- new_cycle.end_date is not None
- and new_cycle.end_date < timezone.now().date()
- ):
- return Response(
- {
- "error": "The cycle where the issues are transferred is already completed"
- },
- status=status.HTTP_400_BAD_REQUEST,
- )
-
- cycle_issues = CycleIssue.objects.filter(
- cycle_id=cycle_id,
- project_id=project_id,
- workspace__slug=slug,
- issue__state__group__in=["backlog", "unstarted", "started"],
- )
-
- updated_cycles = []
- for cycle_issue in cycle_issues:
- cycle_issue.cycle_id = new_cycle_id
- updated_cycles.append(cycle_issue)
-
- cycle_issues = CycleIssue.objects.bulk_update(
- updated_cycles, ["cycle_id"], batch_size=100
- )
-
- return Response({"message": "Success"}, status=status.HTTP_200_OK)
- except Cycle.DoesNotExist:
+ if not new_cycle_id:
return Response(
- {"error": "New Cycle Does not exist"},
+ {"error": "New Cycle Id is required"},
status=status.HTTP_400_BAD_REQUEST,
)
- except Exception as e:
- capture_exception(e)
+
+ new_cycle = Cycle.objects.get(
+ workspace__slug=slug, project_id=project_id, pk=new_cycle_id
+ )
+
+ if (
+ new_cycle.end_date is not None
+ and new_cycle.end_date < timezone.now().date()
+ ):
return Response(
- {"error": "Something went wrong please try again later"},
+ {
+ "error": "The cycle where the issues are transferred is already completed"
+ },
status=status.HTTP_400_BAD_REQUEST,
)
+
+ cycle_issues = CycleIssue.objects.filter(
+ cycle_id=cycle_id,
+ project_id=project_id,
+ workspace__slug=slug,
+ issue__state__group__in=["backlog", "unstarted", "started"],
+ )
+
+ updated_cycles = []
+ for cycle_issue in cycle_issues:
+ cycle_issue.cycle_id = new_cycle_id
+ updated_cycles.append(cycle_issue)
+
+ cycle_issues = CycleIssue.objects.bulk_update(
+ updated_cycles, ["cycle_id"], batch_size=100
+ )
+
+ return Response({"message": "Success"}, status=status.HTTP_200_OK)
diff --git a/apiserver/plane/api/views/estimate.py b/apiserver/plane/api/views/estimate.py
index 68de54d7a..3c2cca4d5 100644
--- a/apiserver/plane/api/views/estimate.py
+++ b/apiserver/plane/api/views/estimate.py
@@ -1,6 +1,3 @@
-# Django imports
-from django.db import IntegrityError
-
# Third party imports
from rest_framework.response import Response
from rest_framework import status
@@ -23,7 +20,6 @@ class ProjectEstimatePointEndpoint(BaseAPIView):
]
def get(self, request, slug, project_id):
- try:
project = Project.objects.get(workspace__slug=slug, pk=project_id)
if project.estimate_id is not None:
estimate_points = EstimatePoint.objects.filter(
@@ -34,12 +30,6 @@ class ProjectEstimatePointEndpoint(BaseAPIView):
serializer = EstimatePointSerializer(estimate_points, many=True)
return Response(serializer.data, status=status.HTTP_200_OK)
return Response([], status=status.HTTP_200_OK)
- except Exception as e:
- capture_exception(e)
- return Response(
- {"error": "Something went wrong please try again later"},
- status=status.HTTP_400_BAD_REQUEST,
- )
class BulkEstimatePointEndpoint(BaseViewSet):
@@ -50,204 +40,139 @@ class BulkEstimatePointEndpoint(BaseViewSet):
serializer_class = EstimateSerializer
def list(self, request, slug, project_id):
- try:
- estimates = Estimate.objects.filter(
- workspace__slug=slug, project_id=project_id
- ).prefetch_related("points").select_related("workspace", "project")
- serializer = EstimateReadSerializer(estimates, many=True)
- return Response(serializer.data, status=status.HTTP_200_OK)
- except Exception as e:
- capture_exception(e)
- return Response(
- {"error": "Something went wrong please try again later"},
- status=status.HTTP_400_BAD_REQUEST,
- )
+ estimates = Estimate.objects.filter(
+ workspace__slug=slug, project_id=project_id
+ ).prefetch_related("points").select_related("workspace", "project")
+ serializer = EstimateReadSerializer(estimates, many=True)
+ return Response(serializer.data, status=status.HTTP_200_OK)
def create(self, request, slug, project_id):
- try:
- if not request.data.get("estimate", False):
- return Response(
- {"error": "Estimate is required"},
- status=status.HTTP_400_BAD_REQUEST,
- )
-
- estimate_points = request.data.get("estimate_points", [])
-
- if not len(estimate_points) or len(estimate_points) > 8:
- return Response(
- {"error": "Estimate points are required"},
- status=status.HTTP_400_BAD_REQUEST,
- )
-
- estimate_serializer = EstimateSerializer(data=request.data.get("estimate"))
- if not estimate_serializer.is_valid():
- return Response(
- estimate_serializer.errors, status=status.HTTP_400_BAD_REQUEST
- )
- try:
- estimate = estimate_serializer.save(project_id=project_id)
- except IntegrityError:
- return Response(
- {"errror": "Estimate with the name already exists"},
- status=status.HTTP_400_BAD_REQUEST,
- )
- estimate_points = EstimatePoint.objects.bulk_create(
- [
- EstimatePoint(
- estimate=estimate,
- key=estimate_point.get("key", 0),
- value=estimate_point.get("value", ""),
- description=estimate_point.get("description", ""),
- project_id=project_id,
- workspace_id=estimate.workspace_id,
- created_by=request.user,
- updated_by=request.user,
- )
- for estimate_point in estimate_points
- ],
- batch_size=10,
- ignore_conflicts=True,
- )
-
- estimate_point_serializer = EstimatePointSerializer(
- estimate_points, many=True
- )
-
+ if not request.data.get("estimate", False):
return Response(
- {
- "estimate": estimate_serializer.data,
- "estimate_points": estimate_point_serializer.data,
- },
- status=status.HTTP_200_OK,
- )
- except Estimate.DoesNotExist:
- return Response(
- {"error": "Estimate does not exist"},
+ {"error": "Estimate is required"},
status=status.HTTP_400_BAD_REQUEST,
)
- except Exception as e:
- capture_exception(e)
+
+ estimate_points = request.data.get("estimate_points", [])
+
+ if not len(estimate_points) or len(estimate_points) > 8:
return Response(
- {"error": "Something went wrong please try again later"},
+ {"error": "Estimate points are required"},
status=status.HTTP_400_BAD_REQUEST,
)
+ estimate_serializer = EstimateSerializer(data=request.data.get("estimate"))
+ if not estimate_serializer.is_valid():
+ return Response(
+ estimate_serializer.errors, status=status.HTTP_400_BAD_REQUEST
+ )
+ estimate = estimate_serializer.save(project_id=project_id)
+ estimate_points = EstimatePoint.objects.bulk_create(
+ [
+ EstimatePoint(
+ estimate=estimate,
+ key=estimate_point.get("key", 0),
+ value=estimate_point.get("value", ""),
+ description=estimate_point.get("description", ""),
+ project_id=project_id,
+ workspace_id=estimate.workspace_id,
+ created_by=request.user,
+ updated_by=request.user,
+ )
+ for estimate_point in estimate_points
+ ],
+ batch_size=10,
+ ignore_conflicts=True,
+ )
+
+ estimate_point_serializer = EstimatePointSerializer(
+ estimate_points, many=True
+ )
+
+ return Response(
+ {
+ "estimate": estimate_serializer.data,
+ "estimate_points": estimate_point_serializer.data,
+ },
+ status=status.HTTP_200_OK,
+ )
+
def retrieve(self, request, slug, project_id, estimate_id):
- try:
- estimate = Estimate.objects.get(
- pk=estimate_id, workspace__slug=slug, project_id=project_id
- )
- serializer = EstimateReadSerializer(estimate)
- return Response(
- serializer.data,
- status=status.HTTP_200_OK,
- )
- except Estimate.DoesNotExist:
- return Response(
- {"error": "Estimate does not exist"}, status=status.HTTP_400_BAD_REQUEST
- )
- except Exception as e:
- capture_exception(e)
- return Response(
- {"error": "Something went wrong please try again later"},
- status=status.HTTP_400_BAD_REQUEST,
- )
+ estimate = Estimate.objects.get(
+ pk=estimate_id, workspace__slug=slug, project_id=project_id
+ )
+ serializer = EstimateReadSerializer(estimate)
+ return Response(
+ serializer.data,
+ status=status.HTTP_200_OK,
+ )
def partial_update(self, request, slug, project_id, estimate_id):
- try:
- if not request.data.get("estimate", False):
- return Response(
- {"error": "Estimate is required"},
- status=status.HTTP_400_BAD_REQUEST,
- )
-
- if not len(request.data.get("estimate_points", [])):
- return Response(
- {"error": "Estimate points are required"},
- status=status.HTTP_400_BAD_REQUEST,
- )
-
- estimate = Estimate.objects.get(pk=estimate_id)
-
- estimate_serializer = EstimateSerializer(
- estimate, data=request.data.get("estimate"), partial=True
- )
- if not estimate_serializer.is_valid():
- return Response(
- estimate_serializer.errors, status=status.HTTP_400_BAD_REQUEST
- )
- try:
- estimate = estimate_serializer.save()
- except IntegrityError:
- return Response(
- {"errror": "Estimate with the name already exists"},
- status=status.HTTP_400_BAD_REQUEST,
- )
-
- estimate_points_data = request.data.get("estimate_points", [])
-
- estimate_points = EstimatePoint.objects.filter(
- pk__in=[
- estimate_point.get("id") for estimate_point in estimate_points_data
- ],
- workspace__slug=slug,
- project_id=project_id,
- estimate_id=estimate_id,
- )
-
- updated_estimate_points = []
- for estimate_point in estimate_points:
- # Find the data for that estimate point
- estimate_point_data = [
- point
- for point in estimate_points_data
- if point.get("id") == str(estimate_point.id)
- ]
- if len(estimate_point_data):
- estimate_point.value = estimate_point_data[0].get(
- "value", estimate_point.value
- )
- updated_estimate_points.append(estimate_point)
-
- try:
- EstimatePoint.objects.bulk_update(
- updated_estimate_points, ["value"], batch_size=10,
- )
- except IntegrityError as e:
- return Response(
- {"error": "Values need to be unique for each key"},
- status=status.HTTP_400_BAD_REQUEST,
- )
-
- estimate_point_serializer = EstimatePointSerializer(estimate_points, many=True)
+ if not request.data.get("estimate", False):
return Response(
- {
- "estimate": estimate_serializer.data,
- "estimate_points": estimate_point_serializer.data,
- },
- status=status.HTTP_200_OK,
- )
- except Estimate.DoesNotExist:
- return Response(
- {"error": "Estimate does not exist"}, status=status.HTTP_400_BAD_REQUEST
- )
- except Exception as e:
- capture_exception(e)
- return Response(
- {"error": "Something went wrong please try again later"},
+ {"error": "Estimate is required"},
status=status.HTTP_400_BAD_REQUEST,
)
+ if not len(request.data.get("estimate_points", [])):
+ return Response(
+ {"error": "Estimate points are required"},
+ status=status.HTTP_400_BAD_REQUEST,
+ )
+
+ estimate = Estimate.objects.get(pk=estimate_id)
+
+ estimate_serializer = EstimateSerializer(
+ estimate, data=request.data.get("estimate"), partial=True
+ )
+ if not estimate_serializer.is_valid():
+ return Response(
+ estimate_serializer.errors, status=status.HTTP_400_BAD_REQUEST
+ )
+
+ estimate = estimate_serializer.save()
+
+ estimate_points_data = request.data.get("estimate_points", [])
+
+ estimate_points = EstimatePoint.objects.filter(
+ pk__in=[
+ estimate_point.get("id") for estimate_point in estimate_points_data
+ ],
+ workspace__slug=slug,
+ project_id=project_id,
+ estimate_id=estimate_id,
+ )
+
+ updated_estimate_points = []
+ for estimate_point in estimate_points:
+ # Find the data for that estimate point
+ estimate_point_data = [
+ point
+ for point in estimate_points_data
+ if point.get("id") == str(estimate_point.id)
+ ]
+ if len(estimate_point_data):
+ estimate_point.value = estimate_point_data[0].get(
+ "value", estimate_point.value
+ )
+ updated_estimate_points.append(estimate_point)
+
+ EstimatePoint.objects.bulk_update(
+ updated_estimate_points, ["value"], batch_size=10,
+ )
+
+ estimate_point_serializer = EstimatePointSerializer(estimate_points, many=True)
+ return Response(
+ {
+ "estimate": estimate_serializer.data,
+ "estimate_points": estimate_point_serializer.data,
+ },
+ status=status.HTTP_200_OK,
+ )
+
def destroy(self, request, slug, project_id, estimate_id):
- try:
- estimate = Estimate.objects.get(
- pk=estimate_id, workspace__slug=slug, project_id=project_id
- )
- estimate.delete()
- return Response(status=status.HTTP_204_NO_CONTENT)
- except Exception as e:
- capture_exception(e)
- return Response(
- {"error": "Something went wrong please try again later"},
- status=status.HTTP_400_BAD_REQUEST,
- )
+ estimate = Estimate.objects.get(
+ pk=estimate_id, workspace__slug=slug, project_id=project_id
+ )
+ estimate.delete()
+ return Response(status=status.HTTP_204_NO_CONTENT)
diff --git a/apiserver/plane/api/views/exporter.py b/apiserver/plane/api/views/exporter.py
index 7e14aa82f..03da8932f 100644
--- a/apiserver/plane/api/views/exporter.py
+++ b/apiserver/plane/api/views/exporter.py
@@ -20,81 +20,62 @@ class ExportIssuesEndpoint(BaseAPIView):
serializer_class = ExporterHistorySerializer
def post(self, request, slug):
- try:
- # Get the workspace
- workspace = Workspace.objects.get(slug=slug)
-
- provider = request.data.get("provider", False)
- multiple = request.data.get("multiple", False)
- project_ids = request.data.get("project", [])
-
- if provider in ["csv", "xlsx", "json"]:
- if not project_ids:
- project_ids = Project.objects.filter(
- workspace__slug=slug
- ).values_list("id", flat=True)
- project_ids = [str(project_id) for project_id in project_ids]
+ # Get the workspace
+ workspace = Workspace.objects.get(slug=slug)
+
+ provider = request.data.get("provider", False)
+ multiple = request.data.get("multiple", False)
+ project_ids = request.data.get("project", [])
+
+ if provider in ["csv", "xlsx", "json"]:
+ if not project_ids:
+ project_ids = Project.objects.filter(
+ workspace__slug=slug
+ ).values_list("id", flat=True)
+ project_ids = [str(project_id) for project_id in project_ids]
- exporter = ExporterHistory.objects.create(
- workspace=workspace,
- project=project_ids,
- initiated_by=request.user,
- provider=provider,
- )
-
- issue_export_task.delay(
- provider=exporter.provider,
- workspace_id=workspace.id,
- project_ids=project_ids,
- token_id=exporter.token,
- multiple=multiple,
- slug=slug,
- )
- return Response(
- {
- "message": f"Once the export is ready you will be able to download it"
- },
- status=status.HTTP_200_OK,
- )
- else:
- return Response(
- {"error": f"Provider '{provider}' not found."},
- status=status.HTTP_400_BAD_REQUEST,
- )
- except Workspace.DoesNotExist:
- return Response(
- {"error": "Workspace does not exists"},
- status=status.HTTP_400_BAD_REQUEST,
+ exporter = ExporterHistory.objects.create(
+ workspace=workspace,
+ project=project_ids,
+ initiated_by=request.user,
+ provider=provider,
+ )
+
+ issue_export_task.delay(
+ provider=exporter.provider,
+ workspace_id=workspace.id,
+ project_ids=project_ids,
+ token_id=exporter.token,
+ multiple=multiple,
+ slug=slug,
)
- except Exception as e:
- capture_exception(e)
return Response(
- {"error": "Something went wrong please try again later"},
+ {
+ "message": f"Once the export is ready you will be able to download it"
+ },
+ status=status.HTTP_200_OK,
+ )
+ else:
+ return Response(
+ {"error": f"Provider '{provider}' not found."},
status=status.HTTP_400_BAD_REQUEST,
)
def get(self, request, slug):
- try:
- exporter_history = ExporterHistory.objects.filter(
- workspace__slug=slug
- ).select_related("workspace","initiated_by")
+ exporter_history = ExporterHistory.objects.filter(
+ workspace__slug=slug
+ ).select_related("workspace","initiated_by")
- if request.GET.get("per_page", False) and request.GET.get("cursor", False):
- return self.paginate(
- request=request,
- queryset=exporter_history,
- on_results=lambda exporter_history: ExporterHistorySerializer(
- exporter_history, many=True
- ).data,
- )
- else:
- return Response(
- {"error": "per_page and cursor are required"},
- status=status.HTTP_400_BAD_REQUEST,
- )
- except Exception as e:
- capture_exception(e)
+ if request.GET.get("per_page", False) and request.GET.get("cursor", False):
+ return self.paginate(
+ request=request,
+ queryset=exporter_history,
+ on_results=lambda exporter_history: ExporterHistorySerializer(
+ exporter_history, many=True
+ ).data,
+ )
+ else:
return Response(
- {"error": "Something went wrong please try again later"},
+ {"error": "per_page and cursor are required"},
status=status.HTTP_400_BAD_REQUEST,
)
diff --git a/apiserver/plane/api/views/external.py b/apiserver/plane/api/views/external.py
index 00a0270e4..755879dc6 100644
--- a/apiserver/plane/api/views/external.py
+++ b/apiserver/plane/api/views/external.py
@@ -25,94 +25,68 @@ class GPTIntegrationEndpoint(BaseAPIView):
]
def post(self, request, slug, project_id):
- try:
- if not settings.OPENAI_API_KEY or not settings.GPT_ENGINE:
- return Response(
- {"error": "OpenAI API key and engine is required"},
- status=status.HTTP_400_BAD_REQUEST,
- )
-
- prompt = request.data.get("prompt", False)
- task = request.data.get("task", False)
-
- if not task:
- return Response(
- {"error": "Task is required"}, status=status.HTTP_400_BAD_REQUEST
- )
-
- final_text = task + "\n" + prompt
-
- openai.api_key = settings.OPENAI_API_KEY
- response = openai.ChatCompletion.create(
- model=settings.GPT_ENGINE,
- messages=[{"role": "user", "content": final_text}],
- temperature=0.7,
- max_tokens=1024,
- )
-
- workspace = Workspace.objects.get(slug=slug)
- project = Project.objects.get(pk=project_id)
-
- text = response.choices[0].message.content.strip()
- text_html = text.replace("\n", "
")
+ if not settings.OPENAI_API_KEY or not settings.GPT_ENGINE:
return Response(
- {
- "response": text,
- "response_html": text_html,
- "project_detail": ProjectLiteSerializer(project).data,
- "workspace_detail": WorkspaceLiteSerializer(workspace).data,
- },
- status=status.HTTP_200_OK,
- )
- except (Workspace.DoesNotExist, Project.DoesNotExist) as e:
- return Response(
- {"error": "Workspace or Project Does not exist"},
+ {"error": "OpenAI API key and engine is required"},
status=status.HTTP_400_BAD_REQUEST,
)
- except Exception as e:
- capture_exception(e)
+
+ prompt = request.data.get("prompt", False)
+ task = request.data.get("task", False)
+
+ if not task:
return Response(
- {"error": "Something went wrong please try again later"},
- status=status.HTTP_400_BAD_REQUEST,
+ {"error": "Task is required"}, status=status.HTTP_400_BAD_REQUEST
)
+ final_text = task + "\n" + prompt
+
+ openai.api_key = settings.OPENAI_API_KEY
+ response = openai.ChatCompletion.create(
+ model=settings.GPT_ENGINE,
+ messages=[{"role": "user", "content": final_text}],
+ temperature=0.7,
+ max_tokens=1024,
+ )
+
+ workspace = Workspace.objects.get(slug=slug)
+ project = Project.objects.get(pk=project_id)
+
+ text = response.choices[0].message.content.strip()
+ text_html = text.replace("\n", "
")
+ return Response(
+ {
+ "response": text,
+ "response_html": text_html,
+ "project_detail": ProjectLiteSerializer(project).data,
+ "workspace_detail": WorkspaceLiteSerializer(workspace).data,
+ },
+ status=status.HTTP_200_OK,
+ )
+
class ReleaseNotesEndpoint(BaseAPIView):
def get(self, request):
- try:
- release_notes = get_release_notes()
- return Response(release_notes, status=status.HTTP_200_OK)
- except Exception as e:
- capture_exception(e)
- return Response(
- {"error": "Something went wrong please try again later"},
- status=status.HTTP_400_BAD_REQUEST,
- )
+ release_notes = get_release_notes()
+ return Response(release_notes, status=status.HTTP_200_OK)
class UnsplashEndpoint(BaseAPIView):
def get(self, request):
- try:
- query = request.GET.get("query", False)
- page = request.GET.get("page", 1)
- per_page = request.GET.get("per_page", 20)
+ query = request.GET.get("query", False)
+ page = request.GET.get("page", 1)
+ per_page = request.GET.get("per_page", 20)
- url = (
- f"https://api.unsplash.com/search/photos/?client_id={settings.UNSPLASH_ACCESS_KEY}&query={query}&page=${page}&per_page={per_page}"
- if query
- else f"https://api.unsplash.com/photos/?client_id={settings.UNSPLASH_ACCESS_KEY}&page={page}&per_page={per_page}"
- )
+ url = (
+ f"https://api.unsplash.com/search/photos/?client_id={settings.UNSPLASH_ACCESS_KEY}&query={query}&page=${page}&per_page={per_page}"
+ if query
+ else f"https://api.unsplash.com/photos/?client_id={settings.UNSPLASH_ACCESS_KEY}&page={page}&per_page={per_page}"
+ )
- headers = {
- "Content-Type": "application/json",
- }
+ headers = {
+ "Content-Type": "application/json",
+ }
- resp = requests.get(url=url, headers=headers)
- return Response(resp.json(), status=status.HTTP_200_OK)
- except Exception as e:
- capture_exception(e)
- return Response(
- {"error": "Something went wrong please try again later"},
- status=status.HTTP_400_BAD_REQUEST,
- )
+ resp = requests.get(url=url, headers=headers)
+ return Response(resp.json(), status=status.HTTP_200_OK)
diff --git a/apiserver/plane/api/views/importer.py b/apiserver/plane/api/views/importer.py
index 18d9a1d69..373324d5d 100644
--- a/apiserver/plane/api/views/importer.py
+++ b/apiserver/plane/api/views/importer.py
@@ -44,559 +44,479 @@ from plane.utils.html_processor import strip_tags
class ServiceIssueImportSummaryEndpoint(BaseAPIView):
def get(self, request, slug, service):
- try:
- if service == "github":
- owner = request.GET.get("owner", False)
- repo = request.GET.get("repo", False)
+ if service == "github":
+ owner = request.GET.get("owner", False)
+ repo = request.GET.get("repo", False)
- if not owner or not repo:
- return Response(
- {"error": "Owner and repo are required"},
- status=status.HTTP_400_BAD_REQUEST,
- )
-
- workspace_integration = WorkspaceIntegration.objects.get(
- integration__provider="github", workspace__slug=slug
+ if not owner or not repo:
+ return Response(
+ {"error": "Owner and repo are required"},
+ status=status.HTTP_400_BAD_REQUEST,
)
- access_tokens_url = workspace_integration.metadata.get(
- "access_tokens_url", False
- )
+ workspace_integration = WorkspaceIntegration.objects.get(
+ integration__provider="github", workspace__slug=slug
+ )
- if not access_tokens_url:
- return Response(
- {
- "error": "There was an error during the installation of the GitHub app. To resolve this issue, we recommend reinstalling the GitHub app."
- },
- status=status.HTTP_400_BAD_REQUEST,
- )
+ access_tokens_url = workspace_integration.metadata.get(
+ "access_tokens_url", False
+ )
- issue_count, labels, collaborators = get_github_repo_details(
- access_tokens_url, owner, repo
- )
+ if not access_tokens_url:
return Response(
{
- "issue_count": issue_count,
- "labels": labels,
- "collaborators": collaborators,
+ "error": "There was an error during the installation of the GitHub app. To resolve this issue, we recommend reinstalling the GitHub app."
},
+ status=status.HTTP_400_BAD_REQUEST,
+ )
+
+ issue_count, labels, collaborators = get_github_repo_details(
+ access_tokens_url, owner, repo
+ )
+ return Response(
+ {
+ "issue_count": issue_count,
+ "labels": labels,
+ "collaborators": collaborators,
+ },
+ status=status.HTTP_200_OK,
+ )
+
+ if service == "jira":
+ # Check for all the keys
+ params = {
+ "project_key": "Project key is required",
+ "api_token": "API token is required",
+ "email": "Email is required",
+ "cloud_hostname": "Cloud hostname is required",
+ }
+
+ for key, error_message in params.items():
+ if not request.GET.get(key, False):
+ return Response(
+ {"error": error_message}, status=status.HTTP_400_BAD_REQUEST
+ )
+
+ project_key = request.GET.get("project_key", "")
+ api_token = request.GET.get("api_token", "")
+ email = request.GET.get("email", "")
+ cloud_hostname = request.GET.get("cloud_hostname", "")
+
+ response = jira_project_issue_summary(
+ email, api_token, project_key, cloud_hostname
+ )
+ if "error" in response:
+ return Response(response, status=status.HTTP_400_BAD_REQUEST)
+ else:
+ return Response(
+ response,
status=status.HTTP_200_OK,
)
-
- if service == "jira":
- # Check for all the keys
- params = {
- "project_key": "Project key is required",
- "api_token": "API token is required",
- "email": "Email is required",
- "cloud_hostname": "Cloud hostname is required",
- }
-
- for key, error_message in params.items():
- if not request.GET.get(key, False):
- return Response(
- {"error": error_message}, status=status.HTTP_400_BAD_REQUEST
- )
-
- project_key = request.GET.get("project_key", "")
- api_token = request.GET.get("api_token", "")
- email = request.GET.get("email", "")
- cloud_hostname = request.GET.get("cloud_hostname", "")
-
- response = jira_project_issue_summary(
- email, api_token, project_key, cloud_hostname
- )
- if "error" in response:
- return Response(response, status=status.HTTP_400_BAD_REQUEST)
- else:
- return Response(
- response,
- status=status.HTTP_200_OK,
- )
- return Response(
- {"error": "Service not supported yet"},
- status=status.HTTP_400_BAD_REQUEST,
- )
- except WorkspaceIntegration.DoesNotExist:
- return Response(
- {"error": "Requested integration was not installed in the workspace"},
- status=status.HTTP_400_BAD_REQUEST,
- )
- except Exception as e:
- capture_exception(e)
- return Response(
- {"error": "Something went wrong please try again later"},
- status=status.HTTP_400_BAD_REQUEST,
- )
+ return Response(
+ {"error": "Service not supported yet"},
+ status=status.HTTP_400_BAD_REQUEST,
+ )
class ImportServiceEndpoint(BaseAPIView):
def post(self, request, slug, service):
- try:
- project_id = request.data.get("project_id", False)
+ project_id = request.data.get("project_id", False)
- if not project_id:
+ if not project_id:
+ return Response(
+ {"error": "Project ID is required"},
+ status=status.HTTP_400_BAD_REQUEST,
+ )
+
+ workspace = Workspace.objects.get(slug=slug)
+
+ if service == "github":
+ data = request.data.get("data", False)
+ metadata = request.data.get("metadata", False)
+ config = request.data.get("config", False)
+ if not data or not metadata or not config:
return Response(
- {"error": "Project ID is required"},
+ {"error": "Data, config and metadata are required"},
status=status.HTTP_400_BAD_REQUEST,
)
- workspace = Workspace.objects.get(slug=slug)
-
- if service == "github":
- data = request.data.get("data", False)
- metadata = request.data.get("metadata", False)
- config = request.data.get("config", False)
- if not data or not metadata or not config:
- return Response(
- {"error": "Data, config and metadata are required"},
- status=status.HTTP_400_BAD_REQUEST,
- )
-
- api_token = APIToken.objects.filter(
- user=request.user, workspace=workspace
- ).first()
- if api_token is None:
- api_token = APIToken.objects.create(
- user=request.user,
- label="Importer",
- workspace=workspace,
- )
-
- importer = Importer.objects.create(
- service=service,
- project_id=project_id,
- status="queued",
- initiated_by=request.user,
- data=data,
- metadata=metadata,
- token=api_token,
- config=config,
- created_by=request.user,
- updated_by=request.user,
+ api_token = APIToken.objects.filter(
+ user=request.user, workspace=workspace
+ ).first()
+ if api_token is None:
+ api_token = APIToken.objects.create(
+ user=request.user,
+ label="Importer",
+ workspace=workspace,
)
- service_importer.delay(service, importer.id)
- serializer = ImporterSerializer(importer)
- return Response(serializer.data, status=status.HTTP_201_CREATED)
+ importer = Importer.objects.create(
+ service=service,
+ project_id=project_id,
+ status="queued",
+ initiated_by=request.user,
+ data=data,
+ metadata=metadata,
+ token=api_token,
+ config=config,
+ created_by=request.user,
+ updated_by=request.user,
+ )
- if service == "jira":
- data = request.data.get("data", False)
- metadata = request.data.get("metadata", False)
- config = request.data.get("config", False)
- if not data or not metadata:
- return Response(
- {"error": "Data, config and metadata are required"},
- status=status.HTTP_400_BAD_REQUEST,
- )
- api_token = APIToken.objects.filter(
- user=request.user, workspace=workspace
- ).first()
- if api_token is None:
- api_token = APIToken.objects.create(
- user=request.user,
- label="Importer",
- workspace=workspace,
- )
+ service_importer.delay(service, importer.id)
+ serializer = ImporterSerializer(importer)
+ return Response(serializer.data, status=status.HTTP_201_CREATED)
- importer = Importer.objects.create(
- service=service,
- project_id=project_id,
- status="queued",
- initiated_by=request.user,
- data=data,
- metadata=metadata,
- token=api_token,
- config=config,
- created_by=request.user,
- updated_by=request.user,
+ if service == "jira":
+ data = request.data.get("data", False)
+ metadata = request.data.get("metadata", False)
+ config = request.data.get("config", False)
+ if not data or not metadata:
+ return Response(
+ {"error": "Data, config and metadata are required"},
+ status=status.HTTP_400_BAD_REQUEST,
+ )
+ api_token = APIToken.objects.filter(
+ user=request.user, workspace=workspace
+ ).first()
+ if api_token is None:
+ api_token = APIToken.objects.create(
+ user=request.user,
+ label="Importer",
+ workspace=workspace,
)
- service_importer.delay(service, importer.id)
- serializer = ImporterSerializer(importer)
- return Response(serializer.data, status=status.HTTP_201_CREATED)
+ importer = Importer.objects.create(
+ service=service,
+ project_id=project_id,
+ status="queued",
+ initiated_by=request.user,
+ data=data,
+ metadata=metadata,
+ token=api_token,
+ config=config,
+ created_by=request.user,
+ updated_by=request.user,
+ )
- return Response(
- {"error": "Servivce not supported yet"},
- status=status.HTTP_400_BAD_REQUEST,
- )
- except (
- Workspace.DoesNotExist,
- WorkspaceIntegration.DoesNotExist,
- Project.DoesNotExist,
- ) as e:
- return Response(
- {"error": "Workspace Integration or Project does not exist"},
- status=status.HTTP_404_NOT_FOUND,
- )
- except Exception as e:
- capture_exception(e)
- return Response(
- {"error": "Something went wrong please try again later"},
- status=status.HTTP_400_BAD_REQUEST,
- )
+ service_importer.delay(service, importer.id)
+ serializer = ImporterSerializer(importer)
+ return Response(serializer.data, status=status.HTTP_201_CREATED)
+
+ return Response(
+ {"error": "Servivce not supported yet"},
+ status=status.HTTP_400_BAD_REQUEST,
+ )
def get(self, request, slug):
- try:
- imports = (
- Importer.objects.filter(workspace__slug=slug)
- .order_by("-created_at")
- .select_related("initiated_by", "project", "workspace")
- )
- serializer = ImporterSerializer(imports, many=True)
- return Response(serializer.data)
- except Exception as e:
- capture_exception(e)
- return Response(
- {"error": "Something went wrong please try again later"},
- status=status.HTTP_400_BAD_REQUEST,
- )
+ imports = (
+ Importer.objects.filter(workspace__slug=slug)
+ .order_by("-created_at")
+ .select_related("initiated_by", "project", "workspace")
+ )
+ serializer = ImporterSerializer(imports, many=True)
+ return Response(serializer.data)
def delete(self, request, slug, service, pk):
- try:
- importer = Importer.objects.get(
- pk=pk, service=service, workspace__slug=slug
- )
+ importer = Importer.objects.get(
+ pk=pk, service=service, workspace__slug=slug
+ )
- if importer.imported_data is not None:
- # Delete all imported Issues
- imported_issues = importer.imported_data.get("issues", [])
- Issue.issue_objects.filter(id__in=imported_issues).delete()
+ if importer.imported_data is not None:
+ # Delete all imported Issues
+ imported_issues = importer.imported_data.get("issues", [])
+ Issue.issue_objects.filter(id__in=imported_issues).delete()
- # Delete all imported Labels
- imported_labels = importer.imported_data.get("labels", [])
- Label.objects.filter(id__in=imported_labels).delete()
+ # Delete all imported Labels
+ imported_labels = importer.imported_data.get("labels", [])
+ Label.objects.filter(id__in=imported_labels).delete()
- if importer.service == "jira":
- imported_modules = importer.imported_data.get("modules", [])
- Module.objects.filter(id__in=imported_modules).delete()
- importer.delete()
- return Response(status=status.HTTP_204_NO_CONTENT)
- except Exception as e:
- capture_exception(e)
- return Response(
- {"error": "Something went wrong please try again later"},
- status=status.HTTP_400_BAD_REQUEST,
- )
+ if importer.service == "jira":
+ imported_modules = importer.imported_data.get("modules", [])
+ Module.objects.filter(id__in=imported_modules).delete()
+ importer.delete()
+ return Response(status=status.HTTP_204_NO_CONTENT)
def patch(self, request, slug, service, pk):
- try:
- importer = Importer.objects.get(
- pk=pk, service=service, workspace__slug=slug
- )
- serializer = ImporterSerializer(importer, data=request.data, partial=True)
- if serializer.is_valid():
- serializer.save()
- return Response(serializer.data, status=status.HTTP_200_OK)
- return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
- except Importer.DoesNotExist:
- return Response(
- {"error": "Importer Does not exists"}, status=status.HTTP_404_NOT_FOUND
- )
- except Exception as e:
- capture_exception(e)
- return Response(
- {"error": "Something went wrong please try again later"},
- status=status.HTTP_400_BAD_REQUEST,
- )
+ importer = Importer.objects.get(
+ pk=pk, service=service, workspace__slug=slug
+ )
+ serializer = ImporterSerializer(importer, data=request.data, partial=True)
+ if serializer.is_valid():
+ serializer.save()
+ return Response(serializer.data, status=status.HTTP_200_OK)
+ return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
class UpdateServiceImportStatusEndpoint(BaseAPIView):
def post(self, request, slug, project_id, service, importer_id):
- try:
- importer = Importer.objects.get(
- pk=importer_id,
- workspace__slug=slug,
- project_id=project_id,
- service=service,
- )
- importer.status = request.data.get("status", "processing")
- importer.save()
- return Response(status.HTTP_200_OK)
- except Importer.DoesNotExist:
- return Response(
- {"error": "Importer does not exist"}, status=status.HTTP_404_NOT_FOUND
- )
+ importer = Importer.objects.get(
+ pk=importer_id,
+ workspace__slug=slug,
+ project_id=project_id,
+ service=service,
+ )
+ importer.status = request.data.get("status", "processing")
+ importer.save()
+ return Response(status.HTTP_200_OK)
class BulkImportIssuesEndpoint(BaseAPIView):
def post(self, request, slug, project_id, service):
- try:
- # Get the project
- project = Project.objects.get(pk=project_id, workspace__slug=slug)
+ # Get the project
+ project = Project.objects.get(pk=project_id, workspace__slug=slug)
- # Get the default state
+ # Get the default state
+ default_state = State.objects.filter(
+ ~Q(name="Triage"), project_id=project_id, default=True
+ ).first()
+ # if there is no default state assign any random state
+ if default_state is None:
default_state = State.objects.filter(
- ~Q(name="Triage"), project_id=project_id, default=True
+ ~Q(name="Triage"), project_id=project_id
).first()
- # if there is no default state assign any random state
- if default_state is None:
- default_state = State.objects.filter(
- ~Q(name="Triage"), project_id=project_id
- ).first()
- # Get the maximum sequence_id
- last_id = IssueSequence.objects.filter(project_id=project_id).aggregate(
- largest=Max("sequence")
- )["largest"]
+ # Get the maximum sequence_id
+ last_id = IssueSequence.objects.filter(project_id=project_id).aggregate(
+ largest=Max("sequence")
+ )["largest"]
- last_id = 1 if last_id is None else last_id + 1
+ last_id = 1 if last_id is None else last_id + 1
- # Get the maximum sort order
- largest_sort_order = Issue.objects.filter(
- project_id=project_id, state=default_state
- ).aggregate(largest=Max("sort_order"))["largest"]
+ # Get the maximum sort order
+ largest_sort_order = Issue.objects.filter(
+ project_id=project_id, state=default_state
+ ).aggregate(largest=Max("sort_order"))["largest"]
- largest_sort_order = (
- 65535 if largest_sort_order is None else largest_sort_order + 10000
- )
+ largest_sort_order = (
+ 65535 if largest_sort_order is None else largest_sort_order + 10000
+ )
- # Get the issues_data
- issues_data = request.data.get("issues_data", [])
-
- if not len(issues_data):
- return Response(
- {"error": "Issue data is required"},
- status=status.HTTP_400_BAD_REQUEST,
- )
-
- # Issues
- bulk_issues = []
- for issue_data in issues_data:
- bulk_issues.append(
- Issue(
- project_id=project_id,
- workspace_id=project.workspace_id,
- state_id=issue_data.get("state")
- if issue_data.get("state", False)
- else default_state.id,
- name=issue_data.get("name", "Issue Created through Bulk"),
- description_html=issue_data.get("description_html", "
"),
- description_stripped=(
- None
- if (
- issue_data.get("description_html") == ""
- or issue_data.get("description_html") is None
- )
- else strip_tags(issue_data.get("description_html"))
- ),
- sequence_id=last_id,
- sort_order=largest_sort_order,
- start_date=issue_data.get("start_date", None),
- target_date=issue_data.get("target_date", None),
- priority=issue_data.get("priority", "none"),
- created_by=request.user,
- )
- )
-
- largest_sort_order = largest_sort_order + 10000
- last_id = last_id + 1
-
- issues = Issue.objects.bulk_create(
- bulk_issues,
- batch_size=100,
- ignore_conflicts=True,
- )
-
- # Sequences
- _ = IssueSequence.objects.bulk_create(
- [
- IssueSequence(
- issue=issue,
- sequence=issue.sequence_id,
- project_id=project_id,
- workspace_id=project.workspace_id,
- )
- for issue in issues
- ],
- batch_size=100,
- )
-
- # Attach Labels
- bulk_issue_labels = []
- for issue, issue_data in zip(issues, issues_data):
- labels_list = issue_data.get("labels_list", [])
- bulk_issue_labels = bulk_issue_labels + [
- IssueLabel(
- issue=issue,
- label_id=label_id,
- project_id=project_id,
- workspace_id=project.workspace_id,
- created_by=request.user,
- )
- for label_id in labels_list
- ]
-
- _ = IssueLabel.objects.bulk_create(
- bulk_issue_labels, batch_size=100, ignore_conflicts=True
- )
-
- # Attach Assignees
- bulk_issue_assignees = []
- for issue, issue_data in zip(issues, issues_data):
- assignees_list = issue_data.get("assignees_list", [])
- bulk_issue_assignees = bulk_issue_assignees + [
- IssueAssignee(
- issue=issue,
- assignee_id=assignee_id,
- project_id=project_id,
- workspace_id=project.workspace_id,
- created_by=request.user,
- )
- for assignee_id in assignees_list
- ]
-
- _ = IssueAssignee.objects.bulk_create(
- bulk_issue_assignees, batch_size=100, ignore_conflicts=True
- )
-
- # Track the issue activities
- IssueActivity.objects.bulk_create(
- [
- IssueActivity(
- issue=issue,
- actor=request.user,
- project_id=project_id,
- workspace_id=project.workspace_id,
- comment=f"imported the issue from {service}",
- verb="created",
- created_by=request.user,
- )
- for issue in issues
- ],
- batch_size=100,
- )
-
- # Create Comments
- bulk_issue_comments = []
- for issue, issue_data in zip(issues, issues_data):
- comments_list = issue_data.get("comments_list", [])
- bulk_issue_comments = bulk_issue_comments + [
- IssueComment(
- issue=issue,
- comment_html=comment.get("comment_html", ""),
- actor=request.user,
- project_id=project_id,
- workspace_id=project.workspace_id,
- created_by=request.user,
- )
- for comment in comments_list
- ]
-
- _ = IssueComment.objects.bulk_create(bulk_issue_comments, batch_size=100)
-
- # Attach Links
- _ = IssueLink.objects.bulk_create(
- [
- IssueLink(
- issue=issue,
- url=issue_data.get("link", {}).get("url", "https://github.com"),
- title=issue_data.get("link", {}).get("title", "Original Issue"),
- project_id=project_id,
- workspace_id=project.workspace_id,
- created_by=request.user,
- )
- for issue, issue_data in zip(issues, issues_data)
- ]
- )
+ # Get the issues_data
+ issues_data = request.data.get("issues_data", [])
+ if not len(issues_data):
return Response(
- {"issues": IssueFlatSerializer(issues, many=True).data},
- status=status.HTTP_201_CREATED,
- )
- except Project.DoesNotExist:
- return Response(
- {"error": "Project Does not exist"}, status=status.HTTP_404_NOT_FOUND
- )
- except Exception as e:
- capture_exception(e)
- return Response(
- {"error": "Something went wrong please try again later"},
+ {"error": "Issue data is required"},
status=status.HTTP_400_BAD_REQUEST,
)
+ # Issues
+ bulk_issues = []
+ for issue_data in issues_data:
+ bulk_issues.append(
+ Issue(
+ project_id=project_id,
+ workspace_id=project.workspace_id,
+ state_id=issue_data.get("state")
+ if issue_data.get("state", False)
+ else default_state.id,
+ name=issue_data.get("name", "Issue Created through Bulk"),
+ description_html=issue_data.get("description_html", ""),
+ description_stripped=(
+ None
+ if (
+ issue_data.get("description_html") == ""
+ or issue_data.get("description_html") is None
+ )
+ else strip_tags(issue_data.get("description_html"))
+ ),
+ sequence_id=last_id,
+ sort_order=largest_sort_order,
+ start_date=issue_data.get("start_date", None),
+ target_date=issue_data.get("target_date", None),
+ priority=issue_data.get("priority", "none"),
+ created_by=request.user,
+ )
+ )
+
+ largest_sort_order = largest_sort_order + 10000
+ last_id = last_id + 1
+
+ issues = Issue.objects.bulk_create(
+ bulk_issues,
+ batch_size=100,
+ ignore_conflicts=True,
+ )
+
+ # Sequences
+ _ = IssueSequence.objects.bulk_create(
+ [
+ IssueSequence(
+ issue=issue,
+ sequence=issue.sequence_id,
+ project_id=project_id,
+ workspace_id=project.workspace_id,
+ )
+ for issue in issues
+ ],
+ batch_size=100,
+ )
+
+ # Attach Labels
+ bulk_issue_labels = []
+ for issue, issue_data in zip(issues, issues_data):
+ labels_list = issue_data.get("labels_list", [])
+ bulk_issue_labels = bulk_issue_labels + [
+ IssueLabel(
+ issue=issue,
+ label_id=label_id,
+ project_id=project_id,
+ workspace_id=project.workspace_id,
+ created_by=request.user,
+ )
+ for label_id in labels_list
+ ]
+
+ _ = IssueLabel.objects.bulk_create(
+ bulk_issue_labels, batch_size=100, ignore_conflicts=True
+ )
+
+ # Attach Assignees
+ bulk_issue_assignees = []
+ for issue, issue_data in zip(issues, issues_data):
+ assignees_list = issue_data.get("assignees_list", [])
+ bulk_issue_assignees = bulk_issue_assignees + [
+ IssueAssignee(
+ issue=issue,
+ assignee_id=assignee_id,
+ project_id=project_id,
+ workspace_id=project.workspace_id,
+ created_by=request.user,
+ )
+ for assignee_id in assignees_list
+ ]
+
+ _ = IssueAssignee.objects.bulk_create(
+ bulk_issue_assignees, batch_size=100, ignore_conflicts=True
+ )
+
+ # Track the issue activities
+ IssueActivity.objects.bulk_create(
+ [
+ IssueActivity(
+ issue=issue,
+ actor=request.user,
+ project_id=project_id,
+ workspace_id=project.workspace_id,
+ comment=f"imported the issue from {service}",
+ verb="created",
+ created_by=request.user,
+ )
+ for issue in issues
+ ],
+ batch_size=100,
+ )
+
+ # Create Comments
+ bulk_issue_comments = []
+ for issue, issue_data in zip(issues, issues_data):
+ comments_list = issue_data.get("comments_list", [])
+ bulk_issue_comments = bulk_issue_comments + [
+ IssueComment(
+ issue=issue,
+ comment_html=comment.get("comment_html", ""),
+ actor=request.user,
+ project_id=project_id,
+ workspace_id=project.workspace_id,
+ created_by=request.user,
+ )
+ for comment in comments_list
+ ]
+
+ _ = IssueComment.objects.bulk_create(bulk_issue_comments, batch_size=100)
+
+ # Attach Links
+ _ = IssueLink.objects.bulk_create(
+ [
+ IssueLink(
+ issue=issue,
+ url=issue_data.get("link", {}).get("url", "https://github.com"),
+ title=issue_data.get("link", {}).get("title", "Original Issue"),
+ project_id=project_id,
+ workspace_id=project.workspace_id,
+ created_by=request.user,
+ )
+ for issue, issue_data in zip(issues, issues_data)
+ ]
+ )
+
+ return Response(
+ {"issues": IssueFlatSerializer(issues, many=True).data},
+ status=status.HTTP_201_CREATED,
+ )
+
class BulkImportModulesEndpoint(BaseAPIView):
def post(self, request, slug, project_id, service):
- try:
- modules_data = request.data.get("modules_data", [])
- project = Project.objects.get(pk=project_id, workspace__slug=slug)
+ modules_data = request.data.get("modules_data", [])
+ project = Project.objects.get(pk=project_id, workspace__slug=slug)
- modules = Module.objects.bulk_create(
+ modules = Module.objects.bulk_create(
+ [
+ Module(
+ name=module.get("name", uuid.uuid4().hex),
+ description=module.get("description", ""),
+ start_date=module.get("start_date", None),
+ target_date=module.get("target_date", None),
+ project_id=project_id,
+ workspace_id=project.workspace_id,
+ created_by=request.user,
+ )
+ for module in modules_data
+ ],
+ batch_size=100,
+ ignore_conflicts=True,
+ )
+
+ modules = Module.objects.filter(id__in=[module.id for module in modules])
+
+ if len(modules) == len(modules_data):
+ _ = ModuleLink.objects.bulk_create(
[
- Module(
- name=module.get("name", uuid.uuid4().hex),
- description=module.get("description", ""),
- start_date=module.get("start_date", None),
- target_date=module.get("target_date", None),
+ ModuleLink(
+ module=module,
+ url=module_data.get("link", {}).get(
+ "url", "https://plane.so"
+ ),
+ title=module_data.get("link", {}).get(
+ "title", "Original Issue"
+ ),
project_id=project_id,
workspace_id=project.workspace_id,
created_by=request.user,
)
- for module in modules_data
+ for module, module_data in zip(modules, modules_data)
],
batch_size=100,
ignore_conflicts=True,
)
- modules = Module.objects.filter(id__in=[module.id for module in modules])
+ bulk_module_issues = []
+ for module, module_data in zip(modules, modules_data):
+ module_issues_list = module_data.get("module_issues_list", [])
+ bulk_module_issues = bulk_module_issues + [
+ ModuleIssue(
+ issue_id=issue,
+ module=module,
+ project_id=project_id,
+ workspace_id=project.workspace_id,
+ created_by=request.user,
+ )
+ for issue in module_issues_list
+ ]
- if len(modules) == len(modules_data):
- _ = ModuleLink.objects.bulk_create(
- [
- ModuleLink(
- module=module,
- url=module_data.get("link", {}).get(
- "url", "https://plane.so"
- ),
- title=module_data.get("link", {}).get(
- "title", "Original Issue"
- ),
- project_id=project_id,
- workspace_id=project.workspace_id,
- created_by=request.user,
- )
- for module, module_data in zip(modules, modules_data)
- ],
- batch_size=100,
- ignore_conflicts=True,
- )
-
- bulk_module_issues = []
- for module, module_data in zip(modules, modules_data):
- module_issues_list = module_data.get("module_issues_list", [])
- bulk_module_issues = bulk_module_issues + [
- ModuleIssue(
- issue_id=issue,
- module=module,
- project_id=project_id,
- workspace_id=project.workspace_id,
- created_by=request.user,
- )
- for issue in module_issues_list
- ]
-
- _ = ModuleIssue.objects.bulk_create(
- bulk_module_issues, batch_size=100, ignore_conflicts=True
- )
-
- serializer = ModuleSerializer(modules, many=True)
- return Response(
- {"modules": serializer.data}, status=status.HTTP_201_CREATED
- )
-
- else:
- return Response(
- {"message": "Modules created but issues could not be imported"},
- status=status.HTTP_200_OK,
- )
- except Project.DoesNotExist:
- return Response(
- {"error": "Project does not exist"}, status=status.HTTP_404_NOT_FOUND
+ _ = ModuleIssue.objects.bulk_create(
+ bulk_module_issues, batch_size=100, ignore_conflicts=True
)
- except Exception as e:
- capture_exception(e)
+
+ serializer = ModuleSerializer(modules, many=True)
return Response(
- {"error": "Something went wrong please try again later"},
- status=status.HTTP_400_BAD_REQUEST,
+ {"modules": serializer.data}, status=status.HTTP_201_CREATED
+ )
+
+ else:
+ return Response(
+ {"message": "Modules created but issues could not be imported"},
+ status=status.HTTP_200_OK,
)
diff --git a/apiserver/plane/api/views/inbox.py b/apiserver/plane/api/views/inbox.py
index 4bfc32f01..632da0d95 100644
--- a/apiserver/plane/api/views/inbox.py
+++ b/apiserver/plane/api/views/inbox.py
@@ -64,24 +64,17 @@ class InboxViewSet(BaseViewSet):
serializer.save(project_id=self.kwargs.get("project_id"))
def destroy(self, request, slug, project_id, pk):
- try:
- inbox = Inbox.objects.get(
- workspace__slug=slug, project_id=project_id, pk=pk
- )
- # Handle default inbox delete
- if inbox.is_default:
- return Response(
- {"error": "You cannot delete the default inbox"},
- status=status.HTTP_400_BAD_REQUEST,
- )
- inbox.delete()
- return Response(status=status.HTTP_204_NO_CONTENT)
- except Exception as e:
- capture_exception(e)
+ inbox = Inbox.objects.get(
+ workspace__slug=slug, project_id=project_id, pk=pk
+ )
+ # Handle default inbox delete
+ if inbox.is_default:
return Response(
- {"error": "Something went wronf please try again later"},
+ {"error": "You cannot delete the default inbox"},
status=status.HTTP_400_BAD_REQUEST,
)
+ inbox.delete()
+ return Response(status=status.HTTP_204_NO_CONTENT)
class InboxIssueViewSet(BaseViewSet):
@@ -110,281 +103,239 @@ class InboxIssueViewSet(BaseViewSet):
)
def list(self, request, slug, project_id, inbox_id):
- try:
- filters = issue_filters(request.query_params, "GET")
- issues = (
- Issue.objects.filter(
- issue_inbox__inbox_id=inbox_id,
- workspace__slug=slug,
- project_id=project_id,
+ filters = issue_filters(request.query_params, "GET")
+ issues = (
+ Issue.objects.filter(
+ issue_inbox__inbox_id=inbox_id,
+ workspace__slug=slug,
+ project_id=project_id,
+ )
+ .filter(**filters)
+ .annotate(bridge_id=F("issue_inbox__id"))
+ .select_related("workspace", "project", "state", "parent")
+ .prefetch_related("assignees", "labels")
+ .order_by("issue_inbox__snoozed_till", "issue_inbox__status")
+ .annotate(
+ sub_issues_count=Issue.issue_objects.filter(parent=OuterRef("id"))
+ .order_by()
+ .annotate(count=Func(F("id"), function="Count"))
+ .values("count")
+ )
+ .annotate(
+ link_count=IssueLink.objects.filter(issue=OuterRef("id"))
+ .order_by()
+ .annotate(count=Func(F("id"), function="Count"))
+ .values("count")
+ )
+ .annotate(
+ attachment_count=IssueAttachment.objects.filter(
+ issue=OuterRef("id")
)
- .filter(**filters)
- .annotate(bridge_id=F("issue_inbox__id"))
- .select_related("workspace", "project", "state", "parent")
- .prefetch_related("assignees", "labels")
- .order_by("issue_inbox__snoozed_till", "issue_inbox__status")
- .annotate(
- sub_issues_count=Issue.issue_objects.filter(parent=OuterRef("id"))
- .order_by()
- .annotate(count=Func(F("id"), function="Count"))
- .values("count")
- )
- .annotate(
- link_count=IssueLink.objects.filter(issue=OuterRef("id"))
- .order_by()
- .annotate(count=Func(F("id"), function="Count"))
- .values("count")
- )
- .annotate(
- attachment_count=IssueAttachment.objects.filter(
- issue=OuterRef("id")
- )
- .order_by()
- .annotate(count=Func(F("id"), function="Count"))
- .values("count")
- )
- .prefetch_related(
- Prefetch(
- "issue_inbox",
- queryset=InboxIssue.objects.only(
- "status", "duplicate_to", "snoozed_till", "source"
- ),
- )
+ .order_by()
+ .annotate(count=Func(F("id"), function="Count"))
+ .values("count")
+ )
+ .prefetch_related(
+ Prefetch(
+ "issue_inbox",
+ queryset=InboxIssue.objects.only(
+ "status", "duplicate_to", "snoozed_till", "source"
+ ),
)
)
- issues_data = IssueStateInboxSerializer(issues, many=True).data
- return Response(
- issues_data,
- status=status.HTTP_200_OK,
- )
+ )
+ issues_data = IssueStateInboxSerializer(issues, many=True).data
+ return Response(
+ issues_data,
+ status=status.HTTP_200_OK,
+ )
- except Exception as e:
- capture_exception(e)
- return Response(
- {"error": "Something went wrong please try again later"},
- status=status.HTTP_400_BAD_REQUEST,
- )
def create(self, request, slug, project_id, inbox_id):
- try:
- if not request.data.get("issue", {}).get("name", False):
- return Response(
- {"error": "Name is required"}, status=status.HTTP_400_BAD_REQUEST
- )
-
- # Check for valid priority
- if not request.data.get("issue", {}).get("priority", "none") in [
- "low",
- "medium",
- "high",
- "urgent",
- "none",
- ]:
- return Response(
- {"error": "Invalid priority"}, status=status.HTTP_400_BAD_REQUEST
- )
-
- # Create or get state
- state, _ = State.objects.get_or_create(
- name="Triage",
- group="backlog",
- description="Default state for managing all Inbox Issues",
- project_id=project_id,
- color="#ff7700",
- )
-
- # create an issue
- issue = Issue.objects.create(
- name=request.data.get("issue", {}).get("name"),
- description=request.data.get("issue", {}).get("description", {}),
- description_html=request.data.get("issue", {}).get(
- "description_html", ""
- ),
- priority=request.data.get("issue", {}).get("priority", "low"),
- project_id=project_id,
- state=state,
- )
-
- # Create an Issue Activity
- issue_activity.delay(
- type="issue.activity.created",
- requested_data=json.dumps(request.data, cls=DjangoJSONEncoder),
- actor_id=str(request.user.id),
- issue_id=str(issue.id),
- project_id=str(project_id),
- current_instance=None,
- epoch=int(timezone.now().timestamp())
- )
- # create an inbox issue
- InboxIssue.objects.create(
- inbox_id=inbox_id,
- project_id=project_id,
- issue=issue,
- source=request.data.get("source", "in-app"),
- )
-
- serializer = IssueStateInboxSerializer(issue)
- return Response(serializer.data, status=status.HTTP_200_OK)
- except Exception as e:
- capture_exception(e)
+ if not request.data.get("issue", {}).get("name", False):
return Response(
- {"error": "Something went wrong please try again later"},
- status=status.HTTP_400_BAD_REQUEST,
+ {"error": "Name is required"}, status=status.HTTP_400_BAD_REQUEST
)
+ # Check for valid priority
+ if not request.data.get("issue", {}).get("priority", "none") in [
+ "low",
+ "medium",
+ "high",
+ "urgent",
+ "none",
+ ]:
+ return Response(
+ {"error": "Invalid priority"}, status=status.HTTP_400_BAD_REQUEST
+ )
+
+ # Create or get state
+ state, _ = State.objects.get_or_create(
+ name="Triage",
+ group="backlog",
+ description="Default state for managing all Inbox Issues",
+ project_id=project_id,
+ color="#ff7700",
+ )
+
+ # create an issue
+ issue = Issue.objects.create(
+ name=request.data.get("issue", {}).get("name"),
+ description=request.data.get("issue", {}).get("description", {}),
+ description_html=request.data.get("issue", {}).get(
+ "description_html", ""
+ ),
+ priority=request.data.get("issue", {}).get("priority", "low"),
+ project_id=project_id,
+ state=state,
+ )
+
+ # Create an Issue Activity
+ issue_activity.delay(
+ type="issue.activity.created",
+ requested_data=json.dumps(request.data, cls=DjangoJSONEncoder),
+ actor_id=str(request.user.id),
+ issue_id=str(issue.id),
+ project_id=str(project_id),
+ current_instance=None,
+ epoch=int(timezone.now().timestamp())
+ )
+ # create an inbox issue
+ InboxIssue.objects.create(
+ inbox_id=inbox_id,
+ project_id=project_id,
+ issue=issue,
+ source=request.data.get("source", "in-app"),
+ )
+
+ serializer = IssueStateInboxSerializer(issue)
+ return Response(serializer.data, status=status.HTTP_200_OK)
+
def partial_update(self, request, slug, project_id, inbox_id, pk):
- try:
- inbox_issue = InboxIssue.objects.get(
- pk=pk, workspace__slug=slug, project_id=project_id, inbox_id=inbox_id
+ inbox_issue = InboxIssue.objects.get(
+ pk=pk, workspace__slug=slug, project_id=project_id, inbox_id=inbox_id
+ )
+ # Get the project member
+ project_member = ProjectMember.objects.get(workspace__slug=slug, project_id=project_id, member=request.user)
+ # Only project members admins and created_by users can access this endpoint
+ if project_member.role <= 10 and str(inbox_issue.created_by_id) != str(request.user.id):
+ return Response({"error": "You cannot edit inbox issues"}, status=status.HTTP_400_BAD_REQUEST)
+
+ # Get issue data
+ issue_data = request.data.pop("issue", False)
+
+ if bool(issue_data):
+ issue = Issue.objects.get(
+ pk=inbox_issue.issue_id, workspace__slug=slug, project_id=project_id
)
- # Get the project member
- project_member = ProjectMember.objects.get(workspace__slug=slug, project_id=project_id, member=request.user)
- # Only project members admins and created_by users can access this endpoint
- if project_member.role <= 10 and str(inbox_issue.created_by_id) != str(request.user.id):
- return Response({"error": "You cannot edit inbox issues"}, status=status.HTTP_400_BAD_REQUEST)
+ # Only allow guests and viewers to edit name and description
+ if project_member.role <= 10:
+ # viewers and guests since only viewers and guests
+ issue_data = {
+ "name": issue_data.get("name", issue.name),
+ "description_html": issue_data.get("description_html", issue.description_html),
+ "description": issue_data.get("description", issue.description)
+ }
- # Get issue data
- issue_data = request.data.pop("issue", False)
+ issue_serializer = IssueCreateSerializer(
+ issue, data=issue_data, partial=True
+ )
- if bool(issue_data):
- issue = Issue.objects.get(
- pk=inbox_issue.issue_id, workspace__slug=slug, project_id=project_id
- )
- # Only allow guests and viewers to edit name and description
- if project_member.role <= 10:
- # viewers and guests since only viewers and guests
- issue_data = {
- "name": issue_data.get("name", issue.name),
- "description_html": issue_data.get("description_html", issue.description_html),
- "description": issue_data.get("description", issue.description)
- }
-
- issue_serializer = IssueCreateSerializer(
- issue, data=issue_data, partial=True
+ if issue_serializer.is_valid():
+ current_instance = issue
+ # Log all the updates
+ requested_data = json.dumps(issue_data, cls=DjangoJSONEncoder)
+ if issue is not None:
+ issue_activity.delay(
+ type="issue.activity.updated",
+ requested_data=requested_data,
+ actor_id=str(request.user.id),
+ issue_id=str(issue.id),
+ project_id=str(project_id),
+ current_instance=json.dumps(
+ IssueSerializer(current_instance).data,
+ cls=DjangoJSONEncoder,
+ ),
+ epoch=int(timezone.now().timestamp())
+ )
+ issue_serializer.save()
+ else:
+ return Response(
+ issue_serializer.errors, status=status.HTTP_400_BAD_REQUEST
)
- if issue_serializer.is_valid():
- current_instance = issue
- # Log all the updates
- requested_data = json.dumps(issue_data, cls=DjangoJSONEncoder)
- if issue is not None:
- issue_activity.delay(
- type="issue.activity.updated",
- requested_data=requested_data,
- actor_id=str(request.user.id),
- issue_id=str(issue.id),
- project_id=str(project_id),
- current_instance=json.dumps(
- IssueSerializer(current_instance).data,
- cls=DjangoJSONEncoder,
- ),
- epoch=int(timezone.now().timestamp())
- )
- issue_serializer.save()
- else:
- return Response(
- issue_serializer.errors, status=status.HTTP_400_BAD_REQUEST
+ # Only project admins and members can edit inbox issue attributes
+ if project_member.role > 10:
+ serializer = InboxIssueSerializer(
+ inbox_issue, data=request.data, partial=True
+ )
+
+ if serializer.is_valid():
+ serializer.save()
+ # Update the issue state if the issue is rejected or marked as duplicate
+ if serializer.data["status"] in [-1, 2]:
+ issue = Issue.objects.get(
+ pk=inbox_issue.issue_id,
+ workspace__slug=slug,
+ project_id=project_id,
+ )
+ state = State.objects.filter(
+ group="cancelled", workspace__slug=slug, project_id=project_id
+ ).first()
+ if state is not None:
+ issue.state = state
+ issue.save()
+
+ # Update the issue state if it is accepted
+ if serializer.data["status"] in [1]:
+ issue = Issue.objects.get(
+ pk=inbox_issue.issue_id,
+ workspace__slug=slug,
+ project_id=project_id,
)
- # Only project admins and members can edit inbox issue attributes
- if project_member.role > 10:
- serializer = InboxIssueSerializer(
- inbox_issue, data=request.data, partial=True
- )
-
- if serializer.is_valid():
- serializer.save()
- # Update the issue state if the issue is rejected or marked as duplicate
- if serializer.data["status"] in [-1, 2]:
- issue = Issue.objects.get(
- pk=inbox_issue.issue_id,
- workspace__slug=slug,
- project_id=project_id,
- )
+ # Update the issue state only if it is in triage state
+ if issue.state.name == "Triage":
+ # Move to default state
state = State.objects.filter(
- group="cancelled", workspace__slug=slug, project_id=project_id
+ workspace__slug=slug, project_id=project_id, default=True
).first()
if state is not None:
issue.state = state
issue.save()
- # Update the issue state if it is accepted
- if serializer.data["status"] in [1]:
- issue = Issue.objects.get(
- pk=inbox_issue.issue_id,
- workspace__slug=slug,
- project_id=project_id,
- )
-
- # Update the issue state only if it is in triage state
- if issue.state.name == "Triage":
- # Move to default state
- state = State.objects.filter(
- workspace__slug=slug, project_id=project_id, default=True
- ).first()
- if state is not None:
- issue.state = state
- issue.save()
-
- return Response(serializer.data, status=status.HTTP_200_OK)
- return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
- else:
- return Response(InboxIssueSerializer(inbox_issue).data, status=status.HTTP_200_OK)
- except InboxIssue.DoesNotExist:
- return Response(
- {"error": "Inbox Issue does not exist"},
- status=status.HTTP_400_BAD_REQUEST,
- )
- except Exception as e:
- capture_exception(e)
- return Response(
- {"error": "Something went wrong please try again later"},
- status=status.HTTP_400_BAD_REQUEST,
- )
+ return Response(serializer.data, status=status.HTTP_200_OK)
+ return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
+ else:
+ return Response(InboxIssueSerializer(inbox_issue).data, status=status.HTTP_200_OK)
def retrieve(self, request, slug, project_id, inbox_id, pk):
- try:
- inbox_issue = InboxIssue.objects.get(
- pk=pk, workspace__slug=slug, project_id=project_id, inbox_id=inbox_id
- )
- issue = Issue.objects.get(
- pk=inbox_issue.issue_id, workspace__slug=slug, project_id=project_id
- )
- serializer = IssueStateInboxSerializer(issue)
- return Response(serializer.data, status=status.HTTP_200_OK)
- except Exception as e:
- capture_exception(e)
- return Response(
- {"error": "Something went wrong please try again later"},
- status=status.HTTP_400_BAD_REQUEST,
- )
+ inbox_issue = InboxIssue.objects.get(
+ pk=pk, workspace__slug=slug, project_id=project_id, inbox_id=inbox_id
+ )
+ issue = Issue.objects.get(
+ pk=inbox_issue.issue_id, workspace__slug=slug, project_id=project_id
+ )
+ serializer = IssueStateInboxSerializer(issue)
+ return Response(serializer.data, status=status.HTTP_200_OK)
def destroy(self, request, slug, project_id, inbox_id, pk):
- try:
- inbox_issue = InboxIssue.objects.get(
- pk=pk, workspace__slug=slug, project_id=project_id, inbox_id=inbox_id
- )
- # Get the project member
- project_member = ProjectMember.objects.get(workspace__slug=slug, project_id=project_id, member=request.user)
+ inbox_issue = InboxIssue.objects.get(
+ pk=pk, workspace__slug=slug, project_id=project_id, inbox_id=inbox_id
+ )
+ # Get the project member
+ project_member = ProjectMember.objects.get(workspace__slug=slug, project_id=project_id, member=request.user)
- if project_member.role <= 10 and str(inbox_issue.created_by_id) != str(request.user.id):
- return Response({"error": "You cannot delete inbox issue"}, status=status.HTTP_400_BAD_REQUEST)
+ if project_member.role <= 10 and str(inbox_issue.created_by_id) != str(request.user.id):
+ return Response({"error": "You cannot delete inbox issue"}, status=status.HTTP_400_BAD_REQUEST)
- # Check the issue status
- if inbox_issue.status in [-2, -1, 0, 2]:
- # Delete the issue also
- Issue.objects.filter(workspace__slug=slug, project_id=project_id, pk=inbox_issue.issue_id).delete()
+ # Check the issue status
+ if inbox_issue.status in [-2, -1, 0, 2]:
+ # Delete the issue also
+ Issue.objects.filter(workspace__slug=slug, project_id=project_id, pk=inbox_issue.issue_id).delete()
- inbox_issue.delete()
- return Response(status=status.HTTP_204_NO_CONTENT)
- except InboxIssue.DoesNotExist:
- return Response({"error": "Inbox Issue does not exists"}, status=status.HTTP_400_BAD_REQUEST)
- except Exception as e:
- capture_exception(e)
- return Response(
- {"error": "Something went wrong please try again later"},
- status=status.HTTP_400_BAD_REQUEST,
- )
+ inbox_issue.delete()
+ return Response(status=status.HTTP_204_NO_CONTENT)
class InboxIssuePublicViewSet(BaseViewSet):
@@ -413,242 +364,197 @@ class InboxIssuePublicViewSet(BaseViewSet):
return InboxIssue.objects.none()
def list(self, request, slug, project_id, inbox_id):
- try:
- project_deploy_board = ProjectDeployBoard.objects.get(workspace__slug=slug, project_id=project_id)
- if project_deploy_board.inbox is None:
- return Response({"error": "Inbox is not enabled for this Project Board"}, status=status.HTTP_400_BAD_REQUEST)
+ project_deploy_board = ProjectDeployBoard.objects.get(workspace__slug=slug, project_id=project_id)
+ if project_deploy_board.inbox is None:
+ return Response({"error": "Inbox is not enabled for this Project Board"}, status=status.HTTP_400_BAD_REQUEST)
- filters = issue_filters(request.query_params, "GET")
- issues = (
- Issue.objects.filter(
- issue_inbox__inbox_id=inbox_id,
- workspace__slug=slug,
- project_id=project_id,
+ filters = issue_filters(request.query_params, "GET")
+ issues = (
+ Issue.objects.filter(
+ issue_inbox__inbox_id=inbox_id,
+ workspace__slug=slug,
+ project_id=project_id,
+ )
+ .filter(**filters)
+ .annotate(bridge_id=F("issue_inbox__id"))
+ .select_related("workspace", "project", "state", "parent")
+ .prefetch_related("assignees", "labels")
+ .order_by("issue_inbox__snoozed_till", "issue_inbox__status")
+ .annotate(
+ sub_issues_count=Issue.issue_objects.filter(parent=OuterRef("id"))
+ .order_by()
+ .annotate(count=Func(F("id"), function="Count"))
+ .values("count")
+ )
+ .annotate(
+ link_count=IssueLink.objects.filter(issue=OuterRef("id"))
+ .order_by()
+ .annotate(count=Func(F("id"), function="Count"))
+ .values("count")
+ )
+ .annotate(
+ attachment_count=IssueAttachment.objects.filter(
+ issue=OuterRef("id")
)
- .filter(**filters)
- .annotate(bridge_id=F("issue_inbox__id"))
- .select_related("workspace", "project", "state", "parent")
- .prefetch_related("assignees", "labels")
- .order_by("issue_inbox__snoozed_till", "issue_inbox__status")
- .annotate(
- sub_issues_count=Issue.issue_objects.filter(parent=OuterRef("id"))
- .order_by()
- .annotate(count=Func(F("id"), function="Count"))
- .values("count")
- )
- .annotate(
- link_count=IssueLink.objects.filter(issue=OuterRef("id"))
- .order_by()
- .annotate(count=Func(F("id"), function="Count"))
- .values("count")
- )
- .annotate(
- attachment_count=IssueAttachment.objects.filter(
- issue=OuterRef("id")
- )
- .order_by()
- .annotate(count=Func(F("id"), function="Count"))
- .values("count")
- )
- .prefetch_related(
- Prefetch(
- "issue_inbox",
- queryset=InboxIssue.objects.only(
- "status", "duplicate_to", "snoozed_till", "source"
- ),
- )
+ .order_by()
+ .annotate(count=Func(F("id"), function="Count"))
+ .values("count")
+ )
+ .prefetch_related(
+ Prefetch(
+ "issue_inbox",
+ queryset=InboxIssue.objects.only(
+ "status", "duplicate_to", "snoozed_till", "source"
+ ),
)
)
- issues_data = IssueStateInboxSerializer(issues, many=True).data
- return Response(
- issues_data,
- status=status.HTTP_200_OK,
- )
- except ProjectDeployBoard.DoesNotExist:
- return Response({"error": "Project Deploy Board does not exist"}, status=status.HTTP_400_BAD_REQUEST)
- except Exception as e:
- capture_exception(e)
- return Response(
- {"error": "Something went wrong please try again later"},
- status=status.HTTP_400_BAD_REQUEST,
- )
+ )
+ issues_data = IssueStateInboxSerializer(issues, many=True).data
+ return Response(
+ issues_data,
+ status=status.HTTP_200_OK,
+ )
def create(self, request, slug, project_id, inbox_id):
- try:
- project_deploy_board = ProjectDeployBoard.objects.get(workspace__slug=slug, project_id=project_id)
- if project_deploy_board.inbox is None:
- return Response({"error": "Inbox is not enabled for this Project Board"}, status=status.HTTP_400_BAD_REQUEST)
+ project_deploy_board = ProjectDeployBoard.objects.get(workspace__slug=slug, project_id=project_id)
+ if project_deploy_board.inbox is None:
+ return Response({"error": "Inbox is not enabled for this Project Board"}, status=status.HTTP_400_BAD_REQUEST)
- if not request.data.get("issue", {}).get("name", False):
- return Response(
- {"error": "Name is required"}, status=status.HTTP_400_BAD_REQUEST
- )
-
- # Check for valid priority
- if not request.data.get("issue", {}).get("priority", "none") in [
- "low",
- "medium",
- "high",
- "urgent",
- "none",
- ]:
- return Response(
- {"error": "Invalid priority"}, status=status.HTTP_400_BAD_REQUEST
- )
-
- # Create or get state
- state, _ = State.objects.get_or_create(
- name="Triage",
- group="backlog",
- description="Default state for managing all Inbox Issues",
- project_id=project_id,
- color="#ff7700",
- )
-
- # create an issue
- issue = Issue.objects.create(
- name=request.data.get("issue", {}).get("name"),
- description=request.data.get("issue", {}).get("description", {}),
- description_html=request.data.get("issue", {}).get(
- "description_html", ""
- ),
- priority=request.data.get("issue", {}).get("priority", "low"),
- project_id=project_id,
- state=state,
- )
-
- # Create an Issue Activity
- issue_activity.delay(
- type="issue.activity.created",
- requested_data=json.dumps(request.data, cls=DjangoJSONEncoder),
- actor_id=str(request.user.id),
- issue_id=str(issue.id),
- project_id=str(project_id),
- current_instance=None,
- epoch=int(timezone.now().timestamp())
- )
- # create an inbox issue
- InboxIssue.objects.create(
- inbox_id=inbox_id,
- project_id=project_id,
- issue=issue,
- source=request.data.get("source", "in-app"),
- )
-
- serializer = IssueStateInboxSerializer(issue)
- return Response(serializer.data, status=status.HTTP_200_OK)
- except Exception as e:
- capture_exception(e)
+ if not request.data.get("issue", {}).get("name", False):
return Response(
- {"error": "Something went wrong please try again later"},
- status=status.HTTP_400_BAD_REQUEST,
+ {"error": "Name is required"}, status=status.HTTP_400_BAD_REQUEST
)
+ # Check for valid priority
+ if not request.data.get("issue", {}).get("priority", "none") in [
+ "low",
+ "medium",
+ "high",
+ "urgent",
+ "none",
+ ]:
+ return Response(
+ {"error": "Invalid priority"}, status=status.HTTP_400_BAD_REQUEST
+ )
+
+ # Create or get state
+ state, _ = State.objects.get_or_create(
+ name="Triage",
+ group="backlog",
+ description="Default state for managing all Inbox Issues",
+ project_id=project_id,
+ color="#ff7700",
+ )
+
+ # create an issue
+ issue = Issue.objects.create(
+ name=request.data.get("issue", {}).get("name"),
+ description=request.data.get("issue", {}).get("description", {}),
+ description_html=request.data.get("issue", {}).get(
+ "description_html", ""
+ ),
+ priority=request.data.get("issue", {}).get("priority", "low"),
+ project_id=project_id,
+ state=state,
+ )
+
+ # Create an Issue Activity
+ issue_activity.delay(
+ type="issue.activity.created",
+ requested_data=json.dumps(request.data, cls=DjangoJSONEncoder),
+ actor_id=str(request.user.id),
+ issue_id=str(issue.id),
+ project_id=str(project_id),
+ current_instance=None,
+ epoch=int(timezone.now().timestamp())
+ )
+ # create an inbox issue
+ InboxIssue.objects.create(
+ inbox_id=inbox_id,
+ project_id=project_id,
+ issue=issue,
+ source=request.data.get("source", "in-app"),
+ )
+
+ serializer = IssueStateInboxSerializer(issue)
+ return Response(serializer.data, status=status.HTTP_200_OK)
+
def partial_update(self, request, slug, project_id, inbox_id, pk):
- try:
- project_deploy_board = ProjectDeployBoard.objects.get(workspace__slug=slug, project_id=project_id)
- if project_deploy_board.inbox is None:
- return Response({"error": "Inbox is not enabled for this Project Board"}, status=status.HTTP_400_BAD_REQUEST)
+ project_deploy_board = ProjectDeployBoard.objects.get(workspace__slug=slug, project_id=project_id)
+ if project_deploy_board.inbox is None:
+ return Response({"error": "Inbox is not enabled for this Project Board"}, status=status.HTTP_400_BAD_REQUEST)
- inbox_issue = InboxIssue.objects.get(
- pk=pk, workspace__slug=slug, project_id=project_id, inbox_id=inbox_id
- )
- # Get the project member
- if str(inbox_issue.created_by_id) != str(request.user.id):
- return Response({"error": "You cannot edit inbox issues"}, status=status.HTTP_400_BAD_REQUEST)
+ inbox_issue = InboxIssue.objects.get(
+ pk=pk, workspace__slug=slug, project_id=project_id, inbox_id=inbox_id
+ )
+ # Get the project member
+ if str(inbox_issue.created_by_id) != str(request.user.id):
+ return Response({"error": "You cannot edit inbox issues"}, status=status.HTTP_400_BAD_REQUEST)
- # Get issue data
- issue_data = request.data.pop("issue", False)
+ # Get issue data
+ issue_data = request.data.pop("issue", False)
- issue = Issue.objects.get(
- pk=inbox_issue.issue_id, workspace__slug=slug, project_id=project_id
- )
- # viewers and guests since only viewers and guests
- issue_data = {
- "name": issue_data.get("name", issue.name),
- "description_html": issue_data.get("description_html", issue.description_html),
- "description": issue_data.get("description", issue.description)
- }
+ issue = Issue.objects.get(
+ pk=inbox_issue.issue_id, workspace__slug=slug, project_id=project_id
+ )
+ # viewers and guests since only viewers and guests
+ issue_data = {
+ "name": issue_data.get("name", issue.name),
+ "description_html": issue_data.get("description_html", issue.description_html),
+ "description": issue_data.get("description", issue.description)
+ }
- issue_serializer = IssueCreateSerializer(
- issue, data=issue_data, partial=True
- )
+ issue_serializer = IssueCreateSerializer(
+ issue, data=issue_data, partial=True
+ )
- if issue_serializer.is_valid():
- current_instance = issue
- # Log all the updates
- requested_data = json.dumps(issue_data, cls=DjangoJSONEncoder)
- if issue is not None:
- issue_activity.delay(
- type="issue.activity.updated",
- requested_data=requested_data,
- actor_id=str(request.user.id),
- issue_id=str(issue.id),
- project_id=str(project_id),
- current_instance=json.dumps(
- IssueSerializer(current_instance).data,
- cls=DjangoJSONEncoder,
- ),
- epoch=int(timezone.now().timestamp())
- )
- issue_serializer.save()
- return Response(issue_serializer.data, status=status.HTTP_200_OK)
- return Response(issue_serializer.errors, status=status.HTTP_400_BAD_REQUEST)
- except InboxIssue.DoesNotExist:
- return Response(
- {"error": "Inbox Issue does not exist"},
- status=status.HTTP_400_BAD_REQUEST,
- )
- except Exception as e:
- capture_exception(e)
- return Response(
- {"error": "Something went wrong please try again later"},
- status=status.HTTP_400_BAD_REQUEST,
- )
+ if issue_serializer.is_valid():
+ current_instance = issue
+ # Log all the updates
+ requested_data = json.dumps(issue_data, cls=DjangoJSONEncoder)
+ if issue is not None:
+ issue_activity.delay(
+ type="issue.activity.updated",
+ requested_data=requested_data,
+ actor_id=str(request.user.id),
+ issue_id=str(issue.id),
+ project_id=str(project_id),
+ current_instance=json.dumps(
+ IssueSerializer(current_instance).data,
+ cls=DjangoJSONEncoder,
+ ),
+ epoch=int(timezone.now().timestamp())
+ )
+ issue_serializer.save()
+ return Response(issue_serializer.data, status=status.HTTP_200_OK)
+ return Response(issue_serializer.errors, status=status.HTTP_400_BAD_REQUEST)
def retrieve(self, request, slug, project_id, inbox_id, pk):
- try:
- project_deploy_board = ProjectDeployBoard.objects.get(workspace__slug=slug, project_id=project_id)
- if project_deploy_board.inbox is None:
- return Response({"error": "Inbox is not enabled for this Project Board"}, status=status.HTTP_400_BAD_REQUEST)
-
- inbox_issue = InboxIssue.objects.get(
- pk=pk, workspace__slug=slug, project_id=project_id, inbox_id=inbox_id
- )
- issue = Issue.objects.get(
- pk=inbox_issue.issue_id, workspace__slug=slug, project_id=project_id
- )
- serializer = IssueStateInboxSerializer(issue)
- return Response(serializer.data, status=status.HTTP_200_OK)
- except Exception as e:
- capture_exception(e)
- return Response(
- {"error": "Something went wrong please try again later"},
- status=status.HTTP_400_BAD_REQUEST,
- )
+ project_deploy_board = ProjectDeployBoard.objects.get(workspace__slug=slug, project_id=project_id)
+ if project_deploy_board.inbox is None:
+ return Response({"error": "Inbox is not enabled for this Project Board"}, status=status.HTTP_400_BAD_REQUEST)
+
+ inbox_issue = InboxIssue.objects.get(
+ pk=pk, workspace__slug=slug, project_id=project_id, inbox_id=inbox_id
+ )
+ issue = Issue.objects.get(
+ pk=inbox_issue.issue_id, workspace__slug=slug, project_id=project_id
+ )
+ serializer = IssueStateInboxSerializer(issue)
+ return Response(serializer.data, status=status.HTTP_200_OK)
def destroy(self, request, slug, project_id, inbox_id, pk):
- try:
- project_deploy_board = ProjectDeployBoard.objects.get(workspace__slug=slug, project_id=project_id)
- if project_deploy_board.inbox is None:
- return Response({"error": "Inbox is not enabled for this Project Board"}, status=status.HTTP_400_BAD_REQUEST)
+ project_deploy_board = ProjectDeployBoard.objects.get(workspace__slug=slug, project_id=project_id)
+ if project_deploy_board.inbox is None:
+ return Response({"error": "Inbox is not enabled for this Project Board"}, status=status.HTTP_400_BAD_REQUEST)
- inbox_issue = InboxIssue.objects.get(
- pk=pk, workspace__slug=slug, project_id=project_id, inbox_id=inbox_id
- )
+ inbox_issue = InboxIssue.objects.get(
+ pk=pk, workspace__slug=slug, project_id=project_id, inbox_id=inbox_id
+ )
- if str(inbox_issue.created_by_id) != str(request.user.id):
- return Response({"error": "You cannot delete inbox issue"}, status=status.HTTP_400_BAD_REQUEST)
-
- inbox_issue.delete()
- return Response(status=status.HTTP_204_NO_CONTENT)
- except InboxIssue.DoesNotExist:
- return Response({"error": "Inbox Issue does not exists"}, status=status.HTTP_400_BAD_REQUEST)
- except Exception as e:
- capture_exception(e)
- return Response(
- {"error": "Something went wrong please try again later"},
- status=status.HTTP_400_BAD_REQUEST,
- )
+ if str(inbox_issue.created_by_id) != str(request.user.id):
+ return Response({"error": "You cannot delete inbox issue"}, status=status.HTTP_400_BAD_REQUEST)
+ inbox_issue.delete()
+ return Response(status=status.HTTP_204_NO_CONTENT)
diff --git a/apiserver/plane/api/views/integration/base.py b/apiserver/plane/api/views/integration/base.py
index 5213baf63..65b94d0a1 100644
--- a/apiserver/plane/api/views/integration/base.py
+++ b/apiserver/plane/api/views/integration/base.py
@@ -2,7 +2,6 @@
import uuid
# Django imports
-from django.db import IntegrityError
from django.contrib.auth.hashers import make_password
# Third party imports
@@ -33,66 +32,40 @@ class IntegrationViewSet(BaseViewSet):
model = Integration
def create(self, request):
- try:
- serializer = IntegrationSerializer(data=request.data)
- if serializer.is_valid():
- serializer.save()
- return Response(serializer.data, status=status.HTTP_201_CREATED)
- return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
- except Exception as e:
- capture_exception(e)
- return Response(
- {"error": "Something went wrong please try again later"},
- status=status.HTTP_400_BAD_REQUEST,
- )
+ serializer = IntegrationSerializer(data=request.data)
+ if serializer.is_valid():
+ serializer.save()
+ return Response(serializer.data, status=status.HTTP_201_CREATED)
+ return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
def partial_update(self, request, pk):
- try:
- integration = Integration.objects.get(pk=pk)
- if integration.verified:
- return Response(
- {"error": "Verified integrations cannot be updated"},
- status=status.HTTP_400_BAD_REQUEST,
- )
-
- serializer = IntegrationSerializer(
- integration, data=request.data, partial=True
- )
-
- if serializer.is_valid():
- serializer.save()
- return Response(serializer.data, status=status.HTTP_200_OK)
- return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
-
- except Integration.DoesNotExist:
+ integration = Integration.objects.get(pk=pk)
+ if integration.verified:
return Response(
- {"error": "Integration Does not exist"},
- status=status.HTTP_404_NOT_FOUND,
- )
- except Exception as e:
- capture_exception(e)
- return Response(
- {"error": "Something went wrong please try again later"},
+ {"error": "Verified integrations cannot be updated"},
status=status.HTTP_400_BAD_REQUEST,
)
- def destroy(self, request, pk):
- try:
- integration = Integration.objects.get(pk=pk)
- if integration.verified:
- return Response(
- {"error": "Verified integrations cannot be updated"},
- status=status.HTTP_400_BAD_REQUEST,
- )
+ serializer = IntegrationSerializer(
+ integration, data=request.data, partial=True
+ )
- integration.delete()
- return Response(status=status.HTTP_204_NO_CONTENT)
- except Integration.DoesNotExist:
+ if serializer.is_valid():
+ serializer.save()
+ return Response(serializer.data, status=status.HTTP_200_OK)
+ return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
+
+ def destroy(self, request, pk):
+ integration = Integration.objects.get(pk=pk)
+ if integration.verified:
return Response(
- {"error": "Integration Does not exist"},
- status=status.HTTP_404_NOT_FOUND,
+ {"error": "Verified integrations cannot be updated"},
+ status=status.HTTP_400_BAD_REQUEST,
)
+ integration.delete()
+ return Response(status=status.HTTP_204_NO_CONTENT)
+
class WorkspaceIntegrationViewSet(BaseViewSet):
serializer_class = WorkspaceIntegrationSerializer
@@ -111,119 +84,81 @@ class WorkspaceIntegrationViewSet(BaseViewSet):
)
def create(self, request, slug, provider):
- try:
- workspace = Workspace.objects.get(slug=slug)
- integration = Integration.objects.get(provider=provider)
- config = {}
- if provider == "github":
- installation_id = request.data.get("installation_id", None)
- if not installation_id:
- return Response(
- {"error": "Installation ID is required"},
- status=status.HTTP_400_BAD_REQUEST,
- )
- metadata = get_github_metadata(installation_id)
- config = {"installation_id": installation_id}
-
- if provider == "slack":
- metadata = request.data.get("metadata", {})
- access_token = metadata.get("access_token", False)
- team_id = metadata.get("team", {}).get("id", False)
- if not metadata or not access_token or not team_id:
- return Response(
- {"error": "Access token and team id is required"},
- status=status.HTTP_400_BAD_REQUEST,
- )
- config = {"team_id": team_id, "access_token": access_token}
-
- # Create a bot user
- bot_user = User.objects.create(
- email=f"{uuid.uuid4().hex}@plane.so",
- username=uuid.uuid4().hex,
- password=make_password(uuid.uuid4().hex),
- is_password_autoset=True,
- is_bot=True,
- first_name=integration.title,
- avatar=integration.avatar_url
- if integration.avatar_url is not None
- else "",
- )
-
- # Create an API Token for the bot user
- api_token = APIToken.objects.create(
- user=bot_user,
- user_type=1, # bot user
- workspace=workspace,
- )
-
- workspace_integration = WorkspaceIntegration.objects.create(
- workspace=workspace,
- integration=integration,
- actor=bot_user,
- api_token=api_token,
- metadata=metadata,
- config=config,
- )
-
- # Add bot user as a member of workspace
- _ = WorkspaceMember.objects.create(
- workspace=workspace_integration.workspace,
- member=bot_user,
- role=20,
- )
- return Response(
- WorkspaceIntegrationSerializer(workspace_integration).data,
- status=status.HTTP_201_CREATED,
- )
- except IntegrityError as e:
- if "already exists" in str(e):
+ workspace = Workspace.objects.get(slug=slug)
+ integration = Integration.objects.get(provider=provider)
+ config = {}
+ if provider == "github":
+ installation_id = request.data.get("installation_id", None)
+ if not installation_id:
return Response(
- {"error": "Integration is already active in the workspace"},
- status=status.HTTP_410_GONE,
- )
- else:
- capture_exception(e)
- return Response(
- {"error": "Something went wrong please try again later"},
+ {"error": "Installation ID is required"},
status=status.HTTP_400_BAD_REQUEST,
)
- except (Workspace.DoesNotExist, Integration.DoesNotExist) as e:
- capture_exception(e)
- return Response(
- {"error": "Workspace or Integration not found"},
- status=status.HTTP_400_BAD_REQUEST,
- )
- except Exception as e:
- capture_exception(e)
- return Response(
- {"error": "Something went wrong please try again later"},
- status=status.HTTP_400_BAD_REQUEST,
- )
+ metadata = get_github_metadata(installation_id)
+ config = {"installation_id": installation_id}
+
+ if provider == "slack":
+ metadata = request.data.get("metadata", {})
+ access_token = metadata.get("access_token", False)
+ team_id = metadata.get("team", {}).get("id", False)
+ if not metadata or not access_token or not team_id:
+ return Response(
+ {"error": "Access token and team id is required"},
+ status=status.HTTP_400_BAD_REQUEST,
+ )
+ config = {"team_id": team_id, "access_token": access_token}
+
+ # Create a bot user
+ bot_user = User.objects.create(
+ email=f"{uuid.uuid4().hex}@plane.so",
+ username=uuid.uuid4().hex,
+ password=make_password(uuid.uuid4().hex),
+ is_password_autoset=True,
+ is_bot=True,
+ first_name=integration.title,
+ avatar=integration.avatar_url
+ if integration.avatar_url is not None
+ else "",
+ )
+
+ # Create an API Token for the bot user
+ api_token = APIToken.objects.create(
+ user=bot_user,
+ user_type=1, # bot user
+ workspace=workspace,
+ )
+
+ workspace_integration = WorkspaceIntegration.objects.create(
+ workspace=workspace,
+ integration=integration,
+ actor=bot_user,
+ api_token=api_token,
+ metadata=metadata,
+ config=config,
+ )
+
+ # Add bot user as a member of workspace
+ _ = WorkspaceMember.objects.create(
+ workspace=workspace_integration.workspace,
+ member=bot_user,
+ role=20,
+ )
+ return Response(
+ WorkspaceIntegrationSerializer(workspace_integration).data,
+ status=status.HTTP_201_CREATED,
+ )
def destroy(self, request, slug, pk):
- try:
- workspace_integration = WorkspaceIntegration.objects.get(
- pk=pk, workspace__slug=slug
- )
+ workspace_integration = WorkspaceIntegration.objects.get(
+ pk=pk, workspace__slug=slug
+ )
- if workspace_integration.integration.provider == "github":
- installation_id = workspace_integration.config.get(
- "installation_id", False
- )
- if installation_id:
- delete_github_installation(installation_id=installation_id)
-
- workspace_integration.delete()
- return Response(status=status.HTTP_204_NO_CONTENT)
-
- except WorkspaceIntegration.DoesNotExist:
- return Response(
- {"error": "Workspace Integration Does not exists"},
- status=status.HTTP_400_BAD_REQUEST,
- )
- except Exception as e:
- capture_exception(e)
- return Response(
- {"error": "Something went wrong please try again later"},
- status=status.HTTP_400_BAD_REQUEST,
+ if workspace_integration.integration.provider == "github":
+ installation_id = workspace_integration.config.get(
+ "installation_id", False
)
+ if installation_id:
+ delete_github_installation(installation_id=installation_id)
+
+ workspace_integration.delete()
+ return Response(status=status.HTTP_204_NO_CONTENT)
diff --git a/apiserver/plane/api/views/integration/github.py b/apiserver/plane/api/views/integration/github.py
index 4cf07c705..f2035639e 100644
--- a/apiserver/plane/api/views/integration/github.py
+++ b/apiserver/plane/api/views/integration/github.py
@@ -30,31 +30,25 @@ class GithubRepositoriesEndpoint(BaseAPIView):
]
def get(self, request, slug, workspace_integration_id):
- try:
- page = request.GET.get("page", 1)
- workspace_integration = WorkspaceIntegration.objects.get(
- workspace__slug=slug, pk=workspace_integration_id
- )
+ page = request.GET.get("page", 1)
+ workspace_integration = WorkspaceIntegration.objects.get(
+ workspace__slug=slug, pk=workspace_integration_id
+ )
- if workspace_integration.integration.provider != "github":
- return Response(
- {"error": "Not a github integration"},
- status=status.HTTP_400_BAD_REQUEST,
- )
-
- access_tokens_url = workspace_integration.metadata["access_tokens_url"]
- repositories_url = (
- workspace_integration.metadata["repositories_url"]
- + f"?per_page=100&page={page}"
- )
- repositories = get_github_repos(access_tokens_url, repositories_url)
- return Response(repositories, status=status.HTTP_200_OK)
- except WorkspaceIntegration.DoesNotExist:
+ if workspace_integration.integration.provider != "github":
return Response(
- {"error": "Workspace Integration Does not exists"},
+ {"error": "Not a github integration"},
status=status.HTTP_400_BAD_REQUEST,
)
+ access_tokens_url = workspace_integration.metadata["access_tokens_url"]
+ repositories_url = (
+ workspace_integration.metadata["repositories_url"]
+ + f"?per_page=100&page={page}"
+ )
+ repositories = get_github_repos(access_tokens_url, repositories_url)
+ return Response(repositories, status=status.HTTP_200_OK)
+
class GithubRepositorySyncViewSet(BaseViewSet):
permission_classes = [
@@ -76,89 +70,76 @@ class GithubRepositorySyncViewSet(BaseViewSet):
)
def create(self, request, slug, project_id, workspace_integration_id):
- try:
- name = request.data.get("name", False)
- url = request.data.get("url", False)
- config = request.data.get("config", {})
- repository_id = request.data.get("repository_id", False)
- owner = request.data.get("owner", False)
+ name = request.data.get("name", False)
+ url = request.data.get("url", False)
+ config = request.data.get("config", {})
+ repository_id = request.data.get("repository_id", False)
+ owner = request.data.get("owner", False)
- if not name or not url or not repository_id or not owner:
- return Response(
- {"error": "Name, url, repository_id and owner are required"},
- status=status.HTTP_400_BAD_REQUEST,
- )
-
- # Get the workspace integration
- workspace_integration = WorkspaceIntegration.objects.get(
- pk=workspace_integration_id
- )
-
- # Delete the old repository object
- GithubRepositorySync.objects.filter(
- project_id=project_id, workspace__slug=slug
- ).delete()
- GithubRepository.objects.filter(
- project_id=project_id, workspace__slug=slug
- ).delete()
-
- # Create repository
- repo = GithubRepository.objects.create(
- name=name,
- url=url,
- config=config,
- repository_id=repository_id,
- owner=owner,
- project_id=project_id,
- )
-
- # Create a Label for github
- label = Label.objects.filter(
- name="GitHub",
- project_id=project_id,
- ).first()
-
- if label is None:
- label = Label.objects.create(
- name="GitHub",
- project_id=project_id,
- description="Label to sync Plane issues with GitHub issues",
- color="#003773",
- )
-
- # Create repo sync
- repo_sync = GithubRepositorySync.objects.create(
- repository=repo,
- workspace_integration=workspace_integration,
- actor=workspace_integration.actor,
- credentials=request.data.get("credentials", {}),
- project_id=project_id,
- label=label,
- )
-
- # Add bot as a member in the project
- _ = ProjectMember.objects.get_or_create(
- member=workspace_integration.actor, role=20, project_id=project_id
- )
-
- # Return Response
+ if not name or not url or not repository_id or not owner:
return Response(
- GithubRepositorySyncSerializer(repo_sync).data,
- status=status.HTTP_201_CREATED,
- )
-
- except WorkspaceIntegration.DoesNotExist:
- return Response(
- {"error": "Workspace Integration does not exist"},
- status=status.HTTP_404_NOT_FOUND,
- )
- except Exception as e:
- capture_exception(e)
- return Response(
- {"error": "Something went wrong please try again later"},
+ {"error": "Name, url, repository_id and owner are required"},
status=status.HTTP_400_BAD_REQUEST,
)
+ # Get the workspace integration
+ workspace_integration = WorkspaceIntegration.objects.get(
+ pk=workspace_integration_id
+ )
+
+ # Delete the old repository object
+ GithubRepositorySync.objects.filter(
+ project_id=project_id, workspace__slug=slug
+ ).delete()
+ GithubRepository.objects.filter(
+ project_id=project_id, workspace__slug=slug
+ ).delete()
+
+ # Create repository
+ repo = GithubRepository.objects.create(
+ name=name,
+ url=url,
+ config=config,
+ repository_id=repository_id,
+ owner=owner,
+ project_id=project_id,
+ )
+
+ # Create a Label for github
+ label = Label.objects.filter(
+ name="GitHub",
+ project_id=project_id,
+ ).first()
+
+ if label is None:
+ label = Label.objects.create(
+ name="GitHub",
+ project_id=project_id,
+ description="Label to sync Plane issues with GitHub issues",
+ color="#003773",
+ )
+
+ # Create repo sync
+ repo_sync = GithubRepositorySync.objects.create(
+ repository=repo,
+ workspace_integration=workspace_integration,
+ actor=workspace_integration.actor,
+ credentials=request.data.get("credentials", {}),
+ project_id=project_id,
+ label=label,
+ )
+
+ # Add bot as a member in the project
+ _ = ProjectMember.objects.get_or_create(
+ member=workspace_integration.actor, role=20, project_id=project_id
+ )
+
+ # Return Response
+ return Response(
+ GithubRepositorySyncSerializer(repo_sync).data,
+ status=status.HTTP_201_CREATED,
+ )
+
class GithubIssueSyncViewSet(BaseViewSet):
permission_classes = [
@@ -177,42 +158,30 @@ class GithubIssueSyncViewSet(BaseViewSet):
class BulkCreateGithubIssueSyncEndpoint(BaseAPIView):
def post(self, request, slug, project_id, repo_sync_id):
- try:
- project = Project.objects.get(pk=project_id, workspace__slug=slug)
+ project = Project.objects.get(pk=project_id, workspace__slug=slug)
- github_issue_syncs = request.data.get("github_issue_syncs", [])
- github_issue_syncs = GithubIssueSync.objects.bulk_create(
- [
- GithubIssueSync(
- issue_id=github_issue_sync.get("issue"),
- repo_issue_id=github_issue_sync.get("repo_issue_id"),
- issue_url=github_issue_sync.get("issue_url"),
- github_issue_id=github_issue_sync.get("github_issue_id"),
- repository_sync_id=repo_sync_id,
- project_id=project_id,
- workspace_id=project.workspace_id,
- created_by=request.user,
- updated_by=request.user,
- )
- for github_issue_sync in github_issue_syncs
- ],
- batch_size=100,
- ignore_conflicts=True,
- )
+ github_issue_syncs = request.data.get("github_issue_syncs", [])
+ github_issue_syncs = GithubIssueSync.objects.bulk_create(
+ [
+ GithubIssueSync(
+ issue_id=github_issue_sync.get("issue"),
+ repo_issue_id=github_issue_sync.get("repo_issue_id"),
+ issue_url=github_issue_sync.get("issue_url"),
+ github_issue_id=github_issue_sync.get("github_issue_id"),
+ repository_sync_id=repo_sync_id,
+ project_id=project_id,
+ workspace_id=project.workspace_id,
+ created_by=request.user,
+ updated_by=request.user,
+ )
+ for github_issue_sync in github_issue_syncs
+ ],
+ batch_size=100,
+ ignore_conflicts=True,
+ )
- serializer = GithubIssueSyncSerializer(github_issue_syncs, many=True)
- return Response(serializer.data, status=status.HTTP_201_CREATED)
- except Project.DoesNotExist:
- return Response(
- {"error": "Project does not exist"},
- status=status.HTTP_404_NOT_FOUND,
- )
- except Exception as e:
- capture_exception(e)
- return Response(
- {"error": "Something went wrong please try again later"},
- status=status.HTTP_400_BAD_REQUEST,
- )
+ serializer = GithubIssueSyncSerializer(github_issue_syncs, many=True)
+ return Response(serializer.data, status=status.HTTP_201_CREATED)
class GithubCommentSyncViewSet(BaseViewSet):
diff --git a/apiserver/plane/api/views/integration/slack.py b/apiserver/plane/api/views/integration/slack.py
index 498dd0607..83aa951ba 100644
--- a/apiserver/plane/api/views/integration/slack.py
+++ b/apiserver/plane/api/views/integration/slack.py
@@ -32,42 +32,25 @@ class SlackProjectSyncViewSet(BaseViewSet):
)
def create(self, request, slug, project_id, workspace_integration_id):
- try:
- serializer = SlackProjectSyncSerializer(data=request.data)
+ serializer = SlackProjectSyncSerializer(data=request.data)
+
+ workspace_integration = WorkspaceIntegration.objects.get(
+ workspace__slug=slug, pk=workspace_integration_id
+ )
+
+ if serializer.is_valid():
+ serializer.save(
+ project_id=project_id,
+ workspace_integration_id=workspace_integration_id,
+ )
workspace_integration = WorkspaceIntegration.objects.get(
- workspace__slug=slug, pk=workspace_integration_id
+ pk=workspace_integration_id, workspace__slug=slug
)
- if serializer.is_valid():
- serializer.save(
- project_id=project_id,
- workspace_integration_id=workspace_integration_id,
- )
-
- workspace_integration = WorkspaceIntegration.objects.get(
- pk=workspace_integration_id, workspace__slug=slug
- )
-
- _ = ProjectMember.objects.get_or_create(
- member=workspace_integration.actor, role=20, project_id=project_id
- )
-
- return Response(serializer.data, status=status.HTTP_200_OK)
- return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
- except IntegrityError:
- return Response(
- {"error": "Slack is already enabled for the project"},
- status=status.HTTP_400_BAD_REQUEST,
- )
- except WorkspaceIntegration.DoesNotExist:
- return Response(
- {"error": "Workspace Integration does not exist"},
- status=status.HTTP_400_BAD_REQUEST,
- )
- except Exception as e:
- print(e)
- return Response(
- {"error": "Something went wrong please try again later"},
- status=status.HTTP_400_BAD_REQUEST,
+ _ = ProjectMember.objects.get_or_create(
+ member=workspace_integration.actor, role=20, project_id=project_id
)
+
+ return Response(serializer.data, status=status.HTTP_200_OK)
+ return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
diff --git a/apiserver/plane/api/views/issue.py b/apiserver/plane/api/views/issue.py
index 2ca1ec014..be95c304e 100644
--- a/apiserver/plane/api/views/issue.py
+++ b/apiserver/plane/api/views/issue.py
@@ -134,364 +134,312 @@ class IssueViewSet(BaseViewSet):
@method_decorator(gzip_page)
def list(self, request, slug, project_id):
- try:
- filters = issue_filters(request.query_params, "GET")
+ filters = issue_filters(request.query_params, "GET")
- # Custom ordering for priority and state
- priority_order = ["urgent", "high", "medium", "low", "none"]
- state_order = ["backlog", "unstarted", "started", "completed", "cancelled"]
+ # Custom ordering for priority and state
+ priority_order = ["urgent", "high", "medium", "low", "none"]
+ state_order = ["backlog", "unstarted", "started", "completed", "cancelled"]
- order_by_param = request.GET.get("order_by", "-created_at")
+ order_by_param = request.GET.get("order_by", "-created_at")
- issue_queryset = (
- self.get_queryset()
- .filter(**filters)
- .annotate(cycle_id=F("issue_cycle__cycle_id"))
- .annotate(module_id=F("issue_module__module_id"))
- .annotate(
- link_count=IssueLink.objects.filter(issue=OuterRef("id"))
- .order_by()
- .annotate(count=Func(F("id"), function="Count"))
- .values("count")
- )
- .annotate(
- attachment_count=IssueAttachment.objects.filter(
- issue=OuterRef("id")
- )
- .order_by()
- .annotate(count=Func(F("id"), function="Count"))
- .values("count")
- )
- )
-
- # Priority Ordering
- if order_by_param == "priority" or order_by_param == "-priority":
- priority_order = (
- priority_order
- if order_by_param == "priority"
- else priority_order[::-1]
- )
- issue_queryset = issue_queryset.annotate(
- priority_order=Case(
- *[
- When(priority=p, then=Value(i))
- for i, p in enumerate(priority_order)
- ],
- output_field=CharField(),
- )
- ).order_by("priority_order")
-
- # State Ordering
- elif order_by_param in [
- "state__name",
- "state__group",
- "-state__name",
- "-state__group",
- ]:
- state_order = (
- state_order
- if order_by_param in ["state__name", "state__group"]
- else state_order[::-1]
- )
- issue_queryset = issue_queryset.annotate(
- state_order=Case(
- *[
- When(state__group=state_group, then=Value(i))
- for i, state_group in enumerate(state_order)
- ],
- default=Value(len(state_order)),
- output_field=CharField(),
- )
- ).order_by("state_order")
- # assignee and label ordering
- elif order_by_param in [
- "labels__name",
- "-labels__name",
- "assignees__first_name",
- "-assignees__first_name",
- ]:
- issue_queryset = issue_queryset.annotate(
- max_values=Max(
- order_by_param[1::]
- if order_by_param.startswith("-")
- else order_by_param
- )
- ).order_by(
- "-max_values" if order_by_param.startswith("-") else "max_values"
- )
- else:
- issue_queryset = issue_queryset.order_by(order_by_param)
-
- issues = IssueLiteSerializer(issue_queryset, many=True).data
-
- ## Grouping the results
- group_by = request.GET.get("group_by", False)
- sub_group_by = request.GET.get("sub_group_by", False)
- if sub_group_by and sub_group_by == group_by:
- return Response(
- {"error": "Group by and sub group by cannot be same"},
- status=status.HTTP_400_BAD_REQUEST,
- )
-
- if group_by:
- return Response(
- group_results(issues, group_by, sub_group_by),
- status=status.HTTP_200_OK,
- )
-
- return Response(issues, status=status.HTTP_200_OK)
-
- except Exception as e:
- capture_exception(e)
- return Response(
- {"error": "Something went wrong please try again later"},
- status=status.HTTP_400_BAD_REQUEST,
- )
-
- def create(self, request, slug, project_id):
- try:
- project = Project.objects.get(pk=project_id)
-
- serializer = IssueCreateSerializer(
- data=request.data,
- context={
- "project_id": project_id,
- "workspace_id": project.workspace_id,
- "default_assignee_id": project.default_assignee_id,
- },
- )
-
- if serializer.is_valid():
- serializer.save()
-
- # Track the issue
- issue_activity.delay(
- type="issue.activity.created",
- requested_data=json.dumps(self.request.data, cls=DjangoJSONEncoder),
- actor_id=str(request.user.id),
- issue_id=str(serializer.data.get("id", None)),
- project_id=str(project_id),
- current_instance=None,
- epoch=int(timezone.now().timestamp()),
- )
- return Response(serializer.data, status=status.HTTP_201_CREATED)
- return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
-
- except Project.DoesNotExist:
- return Response(
- {"error": "Project was not found"}, status=status.HTTP_404_NOT_FOUND
- )
-
- def retrieve(self, request, slug, project_id, pk=None):
- try:
- issue = Issue.issue_objects.annotate(
- sub_issues_count=Issue.issue_objects.filter(parent=OuterRef("id"))
+ issue_queryset = (
+ self.get_queryset()
+ .filter(**filters)
+ .annotate(cycle_id=F("issue_cycle__cycle_id"))
+ .annotate(module_id=F("issue_module__module_id"))
+ .annotate(
+ link_count=IssueLink.objects.filter(issue=OuterRef("id"))
.order_by()
.annotate(count=Func(F("id"), function="Count"))
.values("count")
- ).get(workspace__slug=slug, project_id=project_id, pk=pk)
- return Response(IssueSerializer(issue).data, status=status.HTTP_200_OK)
- except Issue.DoesNotExist:
- return Response(
- {"error": "Issue Does not exist"}, status=status.HTTP_404_NOT_FOUND
)
+ .annotate(
+ attachment_count=IssueAttachment.objects.filter(issue=OuterRef("id"))
+ .order_by()
+ .annotate(count=Func(F("id"), function="Count"))
+ .values("count")
+ )
+ )
- def partial_update(self, request, slug, project_id, pk=None):
- try:
- issue = Issue.objects.get(
- workspace__slug=slug, project_id=project_id, pk=pk
+ # Priority Ordering
+ if order_by_param == "priority" or order_by_param == "-priority":
+ priority_order = (
+ priority_order if order_by_param == "priority" else priority_order[::-1]
)
- current_instance = json.dumps(
- IssueSerializer(issue).data, cls=DjangoJSONEncoder
- )
- requested_data = json.dumps(self.request.data, cls=DjangoJSONEncoder)
- serializer = IssueCreateSerializer(issue, data=request.data, partial=True)
- if serializer.is_valid():
- serializer.save()
- issue_activity.delay(
- type="issue.activity.updated",
- requested_data=requested_data,
- actor_id=str(request.user.id),
- issue_id=str(pk),
- project_id=str(project_id),
- current_instance=current_instance,
- epoch=int(timezone.now().timestamp()),
+ issue_queryset = issue_queryset.annotate(
+ priority_order=Case(
+ *[
+ When(priority=p, then=Value(i))
+ for i, p in enumerate(priority_order)
+ ],
+ output_field=CharField(),
)
- return Response(serializer.data, status=status.HTTP_200_OK)
- return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
- except Exception as e:
- capture_exception(e)
+ ).order_by("priority_order")
+
+ # State Ordering
+ elif order_by_param in [
+ "state__name",
+ "state__group",
+ "-state__name",
+ "-state__group",
+ ]:
+ state_order = (
+ state_order
+ if order_by_param in ["state__name", "state__group"]
+ else state_order[::-1]
+ )
+ issue_queryset = issue_queryset.annotate(
+ state_order=Case(
+ *[
+ When(state__group=state_group, then=Value(i))
+ for i, state_group in enumerate(state_order)
+ ],
+ default=Value(len(state_order)),
+ output_field=CharField(),
+ )
+ ).order_by("state_order")
+ # assignee and label ordering
+ elif order_by_param in [
+ "labels__name",
+ "-labels__name",
+ "assignees__first_name",
+ "-assignees__first_name",
+ ]:
+ issue_queryset = issue_queryset.annotate(
+ max_values=Max(
+ order_by_param[1::]
+ if order_by_param.startswith("-")
+ else order_by_param
+ )
+ ).order_by(
+ "-max_values" if order_by_param.startswith("-") else "max_values"
+ )
+ else:
+ issue_queryset = issue_queryset.order_by(order_by_param)
+
+ issues = IssueLiteSerializer(issue_queryset, many=True).data
+
+ ## Grouping the results
+ group_by = request.GET.get("group_by", False)
+ sub_group_by = request.GET.get("sub_group_by", False)
+ if sub_group_by and sub_group_by == group_by:
return Response(
- {"error": "Something went wrong please try again later"},
+ {"error": "Group by and sub group by cannot be same"},
status=status.HTTP_400_BAD_REQUEST,
)
- def destroy(self, request, slug, project_id, pk=None):
- try:
- issue = Issue.objects.get(
- workspace__slug=slug, project_id=project_id, pk=pk
+ if group_by:
+ return Response(
+ group_results(issues, group_by, sub_group_by),
+ status=status.HTTP_200_OK,
)
- current_instance = json.dumps(
- IssueSerializer(issue).data, cls=DjangoJSONEncoder
- )
- issue.delete()
+
+ return Response(issues, status=status.HTTP_200_OK)
+
+ def create(self, request, slug, project_id):
+ project = Project.objects.get(pk=project_id)
+
+ serializer = IssueCreateSerializer(
+ data=request.data,
+ context={
+ "project_id": project_id,
+ "workspace_id": project.workspace_id,
+ "default_assignee_id": project.default_assignee_id,
+ },
+ )
+
+ if serializer.is_valid():
+ serializer.save()
+
+ # Track the issue
issue_activity.delay(
- type="issue.activity.deleted",
- requested_data=json.dumps({"issue_id": str(pk)}),
+ type="issue.activity.created",
+ requested_data=json.dumps(self.request.data, cls=DjangoJSONEncoder),
+ actor_id=str(request.user.id),
+ issue_id=str(serializer.data.get("id", None)),
+ project_id=str(project_id),
+ current_instance=None,
+ epoch=int(timezone.now().timestamp()),
+ )
+ return Response(serializer.data, status=status.HTTP_201_CREATED)
+ return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
+
+ def retrieve(self, request, slug, project_id, pk=None):
+ issue = Issue.issue_objects.annotate(
+ sub_issues_count=Issue.issue_objects.filter(parent=OuterRef("id"))
+ .order_by()
+ .annotate(count=Func(F("id"), function="Count"))
+ .values("count")
+ ).get(workspace__slug=slug, project_id=project_id, pk=pk)
+ return Response(IssueSerializer(issue).data, status=status.HTTP_200_OK)
+
+ def partial_update(self, request, slug, project_id, pk=None):
+ issue = Issue.objects.get(workspace__slug=slug, project_id=project_id, pk=pk)
+ current_instance = json.dumps(
+ IssueSerializer(issue).data, cls=DjangoJSONEncoder
+ )
+ requested_data = json.dumps(self.request.data, cls=DjangoJSONEncoder)
+ serializer = IssueCreateSerializer(issue, data=request.data, partial=True)
+ if serializer.is_valid():
+ serializer.save()
+ issue_activity.delay(
+ type="issue.activity.updated",
+ requested_data=requested_data,
actor_id=str(request.user.id),
issue_id=str(pk),
project_id=str(project_id),
current_instance=current_instance,
epoch=int(timezone.now().timestamp()),
)
- return Response(status=status.HTTP_204_NO_CONTENT)
- except Exception as e:
- capture_exception(e)
- return Response(
- {"error": "Something went wrong please try again later"},
- status=status.HTTP_400_BAD_REQUEST,
- )
+ return Response(serializer.data, status=status.HTTP_200_OK)
+ return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
+
+ def destroy(self, request, slug, project_id, pk=None):
+ issue = Issue.objects.get(workspace__slug=slug, project_id=project_id, pk=pk)
+ current_instance = json.dumps(
+ IssueSerializer(issue).data, cls=DjangoJSONEncoder
+ )
+ issue.delete()
+ issue_activity.delay(
+ type="issue.activity.deleted",
+ requested_data=json.dumps({"issue_id": str(pk)}),
+ actor_id=str(request.user.id),
+ issue_id=str(pk),
+ project_id=str(project_id),
+ current_instance=current_instance,
+ epoch=int(timezone.now().timestamp()),
+ )
+ return Response(status=status.HTTP_204_NO_CONTENT)
class UserWorkSpaceIssues(BaseAPIView):
@method_decorator(gzip_page)
def get(self, request, slug):
- try:
- filters = issue_filters(request.query_params, "GET")
- # Custom ordering for priority and state
- priority_order = ["urgent", "high", "medium", "low", "none"]
- state_order = ["backlog", "unstarted", "started", "completed", "cancelled"]
+ filters = issue_filters(request.query_params, "GET")
+ # Custom ordering for priority and state
+ priority_order = ["urgent", "high", "medium", "low", "none"]
+ state_order = ["backlog", "unstarted", "started", "completed", "cancelled"]
- order_by_param = request.GET.get("order_by", "-created_at")
+ order_by_param = request.GET.get("order_by", "-created_at")
- issue_queryset = (
- Issue.issue_objects.filter(
- (
- Q(assignees__in=[request.user])
- | Q(created_by=request.user)
- | Q(issue_subscribers__subscriber=request.user)
- ),
- workspace__slug=slug,
+ issue_queryset = (
+ Issue.issue_objects.filter(
+ (
+ Q(assignees__in=[request.user])
+ | Q(created_by=request.user)
+ | Q(issue_subscribers__subscriber=request.user)
+ ),
+ workspace__slug=slug,
+ )
+ .annotate(
+ sub_issues_count=Issue.issue_objects.filter(parent=OuterRef("id"))
+ .order_by()
+ .annotate(count=Func(F("id"), function="Count"))
+ .values("count")
+ )
+ .select_related("project")
+ .select_related("workspace")
+ .select_related("state")
+ .select_related("parent")
+ .prefetch_related("assignees")
+ .prefetch_related("labels")
+ .order_by(order_by_param)
+ .annotate(
+ link_count=IssueLink.objects.filter(issue=OuterRef("id"))
+ .order_by()
+ .annotate(count=Func(F("id"), function="Count"))
+ .values("count")
+ )
+ .annotate(
+ attachment_count=IssueAttachment.objects.filter(issue=OuterRef("id"))
+ .order_by()
+ .annotate(count=Func(F("id"), function="Count"))
+ .values("count")
+ )
+ .prefetch_related(
+ Prefetch(
+ "issue_reactions",
+ queryset=IssueReaction.objects.select_related("actor"),
)
- .annotate(
- sub_issues_count=Issue.issue_objects.filter(parent=OuterRef("id"))
- .order_by()
- .annotate(count=Func(F("id"), function="Count"))
- .values("count")
- )
- .select_related("project")
- .select_related("workspace")
- .select_related("state")
- .select_related("parent")
- .prefetch_related("assignees")
- .prefetch_related("labels")
- .order_by(order_by_param)
- .annotate(
- link_count=IssueLink.objects.filter(issue=OuterRef("id"))
- .order_by()
- .annotate(count=Func(F("id"), function="Count"))
- .values("count")
- )
- .annotate(
- attachment_count=IssueAttachment.objects.filter(
- issue=OuterRef("id")
- )
- .order_by()
- .annotate(count=Func(F("id"), function="Count"))
- .values("count")
- )
- .prefetch_related(
- Prefetch(
- "issue_reactions",
- queryset=IssueReaction.objects.select_related("actor"),
- )
- )
- .filter(**filters)
- ).distinct()
+ )
+ .filter(**filters)
+ ).distinct()
- # Priority Ordering
- if order_by_param == "priority" or order_by_param == "-priority":
- priority_order = (
- priority_order
- if order_by_param == "priority"
- else priority_order[::-1]
+ # Priority Ordering
+ if order_by_param == "priority" or order_by_param == "-priority":
+ priority_order = (
+ priority_order if order_by_param == "priority" else priority_order[::-1]
+ )
+ issue_queryset = issue_queryset.annotate(
+ priority_order=Case(
+ *[
+ When(priority=p, then=Value(i))
+ for i, p in enumerate(priority_order)
+ ],
+ output_field=CharField(),
)
- issue_queryset = issue_queryset.annotate(
- priority_order=Case(
- *[
- When(priority=p, then=Value(i))
- for i, p in enumerate(priority_order)
- ],
- output_field=CharField(),
- )
- ).order_by("priority_order")
+ ).order_by("priority_order")
- # State Ordering
- elif order_by_param in [
- "state__name",
- "state__group",
- "-state__name",
- "-state__group",
- ]:
- state_order = (
- state_order
- if order_by_param in ["state__name", "state__group"]
- else state_order[::-1]
+ # State Ordering
+ elif order_by_param in [
+ "state__name",
+ "state__group",
+ "-state__name",
+ "-state__group",
+ ]:
+ state_order = (
+ state_order
+ if order_by_param in ["state__name", "state__group"]
+ else state_order[::-1]
+ )
+ issue_queryset = issue_queryset.annotate(
+ state_order=Case(
+ *[
+ When(state__group=state_group, then=Value(i))
+ for i, state_group in enumerate(state_order)
+ ],
+ default=Value(len(state_order)),
+ output_field=CharField(),
)
- issue_queryset = issue_queryset.annotate(
- state_order=Case(
- *[
- When(state__group=state_group, then=Value(i))
- for i, state_group in enumerate(state_order)
- ],
- default=Value(len(state_order)),
- output_field=CharField(),
- )
- ).order_by("state_order")
- # assignee and label ordering
- elif order_by_param in [
- "labels__name",
- "-labels__name",
- "assignees__first_name",
- "-assignees__first_name",
- ]:
- issue_queryset = issue_queryset.annotate(
- max_values=Max(
- order_by_param[1::]
- if order_by_param.startswith("-")
- else order_by_param
- )
- ).order_by(
- "-max_values" if order_by_param.startswith("-") else "max_values"
+ ).order_by("state_order")
+ # assignee and label ordering
+ elif order_by_param in [
+ "labels__name",
+ "-labels__name",
+ "assignees__first_name",
+ "-assignees__first_name",
+ ]:
+ issue_queryset = issue_queryset.annotate(
+ max_values=Max(
+ order_by_param[1::]
+ if order_by_param.startswith("-")
+ else order_by_param
)
- else:
- issue_queryset = issue_queryset.order_by(order_by_param)
+ ).order_by(
+ "-max_values" if order_by_param.startswith("-") else "max_values"
+ )
+ else:
+ issue_queryset = issue_queryset.order_by(order_by_param)
- issues = IssueLiteSerializer(issue_queryset, many=True).data
+ issues = IssueLiteSerializer(issue_queryset, many=True).data
- ## Grouping the results
- group_by = request.GET.get("group_by", False)
- sub_group_by = request.GET.get("sub_group_by", False)
- if sub_group_by and sub_group_by == group_by:
- return Response(
- {"error": "Group by and sub group by cannot be same"},
- status=status.HTTP_400_BAD_REQUEST,
- )
-
- if group_by:
- return Response(
- group_results(issues, group_by, sub_group_by),
- status=status.HTTP_200_OK,
- )
-
- return Response(issues, status=status.HTTP_200_OK)
- except Exception as e:
- capture_exception(e)
+ ## Grouping the results
+ group_by = request.GET.get("group_by", False)
+ sub_group_by = request.GET.get("sub_group_by", False)
+ if sub_group_by and sub_group_by == group_by:
return Response(
- {"error": "Something went wrong please try again later"},
+ {"error": "Group by and sub group by cannot be same"},
status=status.HTTP_400_BAD_REQUEST,
)
+ if group_by:
+ return Response(
+ group_results(issues, group_by, sub_group_by),
+ status=status.HTTP_200_OK,
+ )
+
+ return Response(issues, status=status.HTTP_200_OK)
+
class WorkSpaceIssuesEndpoint(BaseAPIView):
permission_classes = [
@@ -500,20 +448,13 @@ class WorkSpaceIssuesEndpoint(BaseAPIView):
@method_decorator(gzip_page)
def get(self, request, slug):
- try:
- issues = (
- Issue.issue_objects.filter(workspace__slug=slug)
- .filter(project__project_projectmember__member=self.request.user)
- .order_by("-created_at")
- )
- serializer = IssueSerializer(issues, many=True)
- return Response(serializer.data, status=status.HTTP_200_OK)
- except Exception as e:
- capture_exception(e)
- return Response(
- {"error": "Something went wrong please try again later"},
- status=status.HTTP_400_BAD_REQUEST,
- )
+ issues = (
+ Issue.issue_objects.filter(workspace__slug=slug)
+ .filter(project__project_projectmember__member=self.request.user)
+ .order_by("-created_at")
+ )
+ serializer = IssueSerializer(issues, many=True)
+ return Response(serializer.data, status=status.HTTP_200_OK)
class IssueActivityEndpoint(BaseAPIView):
@@ -523,42 +464,35 @@ class IssueActivityEndpoint(BaseAPIView):
@method_decorator(gzip_page)
def get(self, request, slug, project_id, issue_id):
- try:
- issue_activities = (
- IssueActivity.objects.filter(issue_id=issue_id)
- .filter(
- ~Q(field__in=["comment", "vote", "reaction", "draft"]),
- project__project_projectmember__member=self.request.user,
- )
- .select_related("actor", "workspace", "issue", "project")
- ).order_by("created_at")
- issue_comments = (
- IssueComment.objects.filter(issue_id=issue_id)
- .filter(project__project_projectmember__member=self.request.user)
- .order_by("created_at")
- .select_related("actor", "issue", "project", "workspace")
- .prefetch_related(
- Prefetch(
- "comment_reactions",
- queryset=CommentReaction.objects.select_related("actor"),
- )
+ issue_activities = (
+ IssueActivity.objects.filter(issue_id=issue_id)
+ .filter(
+ ~Q(field__in=["comment", "vote", "reaction", "draft"]),
+ project__project_projectmember__member=self.request.user,
+ )
+ .select_related("actor", "workspace", "issue", "project")
+ ).order_by("created_at")
+ issue_comments = (
+ IssueComment.objects.filter(issue_id=issue_id)
+ .filter(project__project_projectmember__member=self.request.user)
+ .order_by("created_at")
+ .select_related("actor", "issue", "project", "workspace")
+ .prefetch_related(
+ Prefetch(
+ "comment_reactions",
+ queryset=CommentReaction.objects.select_related("actor"),
)
)
- issue_activities = IssueActivitySerializer(issue_activities, many=True).data
- issue_comments = IssueCommentSerializer(issue_comments, many=True).data
+ )
+ issue_activities = IssueActivitySerializer(issue_activities, many=True).data
+ issue_comments = IssueCommentSerializer(issue_comments, many=True).data
- result_list = sorted(
- chain(issue_activities, issue_comments),
- key=lambda instance: instance["created_at"],
- )
+ result_list = sorted(
+ chain(issue_activities, issue_comments),
+ key=lambda instance: instance["created_at"],
+ )
- return Response(result_list, status=status.HTTP_200_OK)
- except Exception as e:
- capture_exception(e)
- return Response(
- {"error": "Something went wrong please try again later"},
- status=status.HTTP_400_BAD_REQUEST,
- )
+ return Response(result_list, status=status.HTTP_200_OK)
class IssueCommentViewSet(BaseViewSet):
@@ -597,91 +531,70 @@ class IssueCommentViewSet(BaseViewSet):
)
def create(self, request, slug, project_id, issue_id):
- try:
- serializer = IssueCommentSerializer(data=request.data)
- if serializer.is_valid():
- serializer.save(
- project_id=project_id,
- issue_id=issue_id,
- actor=request.user,
- )
- issue_activity.delay(
- type="comment.activity.created",
- requested_data=json.dumps(serializer.data, cls=DjangoJSONEncoder),
- actor_id=str(self.request.user.id),
- issue_id=str(self.kwargs.get("issue_id")),
- project_id=str(self.kwargs.get("project_id")),
- current_instance=None,
- epoch=int(timezone.now().timestamp()),
- )
- return Response(serializer.data, status=status.HTTP_201_CREATED)
- return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
- except Exception as e:
- capture_exception(e)
- return Response(
- {"error": "Something went wrong please try again later"},
- status=status.HTTP_400_BAD_REQUEST,
+ serializer = IssueCommentSerializer(data=request.data)
+ if serializer.is_valid():
+ serializer.save(
+ project_id=project_id,
+ issue_id=issue_id,
+ actor=request.user,
)
+ issue_activity.delay(
+ type="comment.activity.created",
+ requested_data=json.dumps(serializer.data, cls=DjangoJSONEncoder),
+ actor_id=str(self.request.user.id),
+ issue_id=str(self.kwargs.get("issue_id")),
+ project_id=str(self.kwargs.get("project_id")),
+ current_instance=None,
+ epoch=int(timezone.now().timestamp()),
+ )
+ return Response(serializer.data, status=status.HTTP_201_CREATED)
+ return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
def partial_update(self, request, slug, project_id, issue_id, pk):
- try:
- issue_comment = IssueComment.objects.get(
- workspace__slug=slug, project_id=project_id, issue_id=issue_id, pk=pk
- )
- requested_data = json.dumps(self.request.data, cls=DjangoJSONEncoder)
- current_instance = json.dumps(
- IssueCommentSerializer(issue_comment).data,
- cls=DjangoJSONEncoder,
- )
- serializer = IssueCommentSerializer(
- issue_comment, data=request.data, partial=True
- )
- if serializer.is_valid():
- serializer.save()
- issue_activity.delay(
- type="comment.activity.updated",
- requested_data=requested_data,
- actor_id=str(request.user.id),
- issue_id=str(issue_id),
- project_id=str(project_id),
- current_instance=current_instance,
- epoch=int(timezone.now().timestamp()),
- )
- return Response(serializer.data, status=status.HTTP_200_OK)
- return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
- except Exception as e:
- capture_exception(e)
- return Response(
- {"error": "Something went wrong please try again later"},
- status=status.HTTP_400_BAD_REQUEST,
- )
-
- def destroy(self, request, slug, project_id, issue_id, pk):
- try:
- issue_comment = IssueComment.objects.get(
- workspace__slug=slug, project_id=project_id, issue_id=issue_id, pk=pk
- )
- current_instance = json.dumps(
- IssueCommentSerializer(issue_comment).data,
- cls=DjangoJSONEncoder,
- )
- issue_comment.delete()
+ issue_comment = IssueComment.objects.get(
+ workspace__slug=slug, project_id=project_id, issue_id=issue_id, pk=pk
+ )
+ requested_data = json.dumps(self.request.data, cls=DjangoJSONEncoder)
+ current_instance = json.dumps(
+ IssueCommentSerializer(issue_comment).data,
+ cls=DjangoJSONEncoder,
+ )
+ serializer = IssueCommentSerializer(
+ issue_comment, data=request.data, partial=True
+ )
+ if serializer.is_valid():
+ serializer.save()
issue_activity.delay(
- type="comment.activity.deleted",
- requested_data=json.dumps({"comment_id": str(pk)}),
+ type="comment.activity.updated",
+ requested_data=requested_data,
actor_id=str(request.user.id),
issue_id=str(issue_id),
project_id=str(project_id),
current_instance=current_instance,
epoch=int(timezone.now().timestamp()),
)
- return Response(status=status.HTTP_204_NO_CONTENT)
- except Exception as e:
- capture_exception(e)
- return Response(
- {"error": "Something went wrong please try again later"},
- status=status.HTTP_400_BAD_REQUEST,
- )
+ return Response(serializer.data, status=status.HTTP_200_OK)
+ return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
+
+ def destroy(self, request, slug, project_id, issue_id, pk):
+ issue_comment = IssueComment.objects.get(
+ workspace__slug=slug, project_id=project_id, issue_id=issue_id, pk=pk
+ )
+ current_instance = json.dumps(
+ IssueCommentSerializer(issue_comment).data,
+ cls=DjangoJSONEncoder,
+ )
+ issue_comment.delete()
+ issue_activity.delay(
+ type="comment.activity.deleted",
+ requested_data=json.dumps({"comment_id": str(pk)}),
+ actor_id=str(request.user.id),
+ issue_id=str(issue_id),
+ project_id=str(project_id),
+ current_instance=current_instance,
+ epoch=int(timezone.now().timestamp()),
+ )
+ return Response(status=status.HTTP_204_NO_CONTENT)
class IssuePropertyViewSet(BaseViewSet):
@@ -719,30 +632,22 @@ class IssuePropertyViewSet(BaseViewSet):
)
def create(self, request, slug, project_id):
- try:
- issue_property, created = IssueProperty.objects.get_or_create(
- user=request.user,
- project_id=project_id,
- )
-
- if not created:
- issue_property.properties = request.data.get("properties", {})
- issue_property.save()
-
- serializer = IssuePropertySerializer(issue_property)
- return Response(serializer.data, status=status.HTTP_200_OK)
+ issue_property, created = IssueProperty.objects.get_or_create(
+ user=request.user,
+ project_id=project_id,
+ )
+ if not created:
issue_property.properties = request.data.get("properties", {})
issue_property.save()
- serializer = IssuePropertySerializer(issue_property)
- return Response(serializer.data, status=status.HTTP_201_CREATED)
- except Exception as e:
- capture_exception(e)
- return Response(
- {"error": "Something went wrong please try again later"},
- status=status.HTTP_400_BAD_REQUEST,
- )
+ serializer = IssuePropertySerializer(issue_property)
+ return Response(serializer.data, status=status.HTTP_200_OK)
+
+ issue_property.properties = request.data.get("properties", {})
+ issue_property.save()
+ serializer = IssuePropertySerializer(issue_property)
+ return Response(serializer.data, status=status.HTTP_201_CREATED)
class LabelViewSet(BaseViewSet):
@@ -764,12 +669,6 @@ class LabelViewSet(BaseViewSet):
{"error": "Label with the same name already exists in the project"},
status=status.HTTP_400_BAD_REQUEST,
)
- except Exception as e:
- capture_exception(e)
- return Response(
- {"error": "Something went wrong please try again later"},
- status=status.HTTP_400_BAD_REQUEST,
- )
def get_queryset(self):
return self.filter_queryset(
@@ -792,34 +691,27 @@ class BulkDeleteIssuesEndpoint(BaseAPIView):
]
def delete(self, request, slug, project_id):
- try:
- issue_ids = request.data.get("issue_ids", [])
-
- if not len(issue_ids):
- return Response(
- {"error": "Issue IDs are required"},
- status=status.HTTP_400_BAD_REQUEST,
- )
-
- issues = Issue.issue_objects.filter(
- workspace__slug=slug, project_id=project_id, pk__in=issue_ids
- )
-
- total_issues = len(issues)
-
- issues.delete()
+ issue_ids = request.data.get("issue_ids", [])
+ if not len(issue_ids):
return Response(
- {"message": f"{total_issues} issues were deleted"},
- status=status.HTTP_200_OK,
- )
- except Exception as e:
- capture_exception(e)
- return Response(
- {"error": "Something went wrong please try again later"},
+ {"error": "Issue IDs are required"},
status=status.HTTP_400_BAD_REQUEST,
)
+ issues = Issue.issue_objects.filter(
+ workspace__slug=slug, project_id=project_id, pk__in=issue_ids
+ )
+
+ total_issues = len(issues)
+
+ issues.delete()
+
+ return Response(
+ {"message": f"{total_issues} issues were deleted"},
+ status=status.HTTP_200_OK,
+ )
+
class SubIssuesEndpoint(BaseAPIView):
permission_classes = [
@@ -828,111 +720,89 @@ class SubIssuesEndpoint(BaseAPIView):
@method_decorator(gzip_page)
def get(self, request, slug, project_id, issue_id):
- try:
- sub_issues = (
- Issue.issue_objects.filter(parent_id=issue_id, workspace__slug=slug)
- .select_related("project")
- .select_related("workspace")
- .select_related("state")
- .select_related("parent")
- .prefetch_related("assignees")
- .prefetch_related("labels")
- .annotate(
- sub_issues_count=Issue.issue_objects.filter(parent=OuterRef("id"))
- .order_by()
- .annotate(count=Func(F("id"), function="Count"))
- .values("count")
- )
- .annotate(
- link_count=IssueLink.objects.filter(issue=OuterRef("id"))
- .order_by()
- .annotate(count=Func(F("id"), function="Count"))
- .values("count")
- )
- .annotate(
- attachment_count=IssueAttachment.objects.filter(
- issue=OuterRef("id")
- )
- .order_by()
- .annotate(count=Func(F("id"), function="Count"))
- .values("count")
- )
- .prefetch_related(
- Prefetch(
- "issue_reactions",
- queryset=IssueReaction.objects.select_related("actor"),
- )
+ sub_issues = (
+ Issue.issue_objects.filter(parent_id=issue_id, workspace__slug=slug)
+ .select_related("project")
+ .select_related("workspace")
+ .select_related("state")
+ .select_related("parent")
+ .prefetch_related("assignees")
+ .prefetch_related("labels")
+ .annotate(
+ sub_issues_count=Issue.issue_objects.filter(parent=OuterRef("id"))
+ .order_by()
+ .annotate(count=Func(F("id"), function="Count"))
+ .values("count")
+ )
+ .annotate(
+ link_count=IssueLink.objects.filter(issue=OuterRef("id"))
+ .order_by()
+ .annotate(count=Func(F("id"), function="Count"))
+ .values("count")
+ )
+ .annotate(
+ attachment_count=IssueAttachment.objects.filter(issue=OuterRef("id"))
+ .order_by()
+ .annotate(count=Func(F("id"), function="Count"))
+ .values("count")
+ )
+ .prefetch_related(
+ Prefetch(
+ "issue_reactions",
+ queryset=IssueReaction.objects.select_related("actor"),
)
)
+ )
- state_distribution = (
- State.objects.filter(
- workspace__slug=slug, state_issue__parent_id=issue_id
- )
- .annotate(state_group=F("group"))
- .values("state_group")
- .annotate(state_count=Count("state_group"))
- .order_by("state_group")
- )
+ state_distribution = (
+ State.objects.filter(workspace__slug=slug, state_issue__parent_id=issue_id)
+ .annotate(state_group=F("group"))
+ .values("state_group")
+ .annotate(state_count=Count("state_group"))
+ .order_by("state_group")
+ )
- result = {
- item["state_group"]: item["state_count"] for item in state_distribution
- }
+ result = {
+ item["state_group"]: item["state_count"] for item in state_distribution
+ }
- serializer = IssueLiteSerializer(
- sub_issues,
- many=True,
- )
- return Response(
- {
- "sub_issues": serializer.data,
- "state_distribution": result,
- },
- status=status.HTTP_200_OK,
- )
- except Exception as e:
- capture_exception(e)
- return Response(
- {"error": "Something went wrong please try again later"},
- status=status.HTTP_400_BAD_REQUEST,
- )
+ serializer = IssueLiteSerializer(
+ sub_issues,
+ many=True,
+ )
+ return Response(
+ {
+ "sub_issues": serializer.data,
+ "state_distribution": result,
+ },
+ status=status.HTTP_200_OK,
+ )
# Assign multiple sub issues
def post(self, request, slug, project_id, issue_id):
- try:
- parent_issue = Issue.issue_objects.get(pk=issue_id)
- sub_issue_ids = request.data.get("sub_issue_ids", [])
-
- if not len(sub_issue_ids):
- return Response(
- {"error": "Sub Issue IDs are required"},
- status=status.HTTP_400_BAD_REQUEST,
- )
-
- sub_issues = Issue.issue_objects.filter(id__in=sub_issue_ids)
-
- for sub_issue in sub_issues:
- sub_issue.parent = parent_issue
-
- _ = Issue.objects.bulk_update(sub_issues, ["parent"], batch_size=10)
-
- updated_sub_issues = Issue.issue_objects.filter(id__in=sub_issue_ids)
+ parent_issue = Issue.issue_objects.get(pk=issue_id)
+ sub_issue_ids = request.data.get("sub_issue_ids", [])
+ if not len(sub_issue_ids):
return Response(
- IssueFlatSerializer(updated_sub_issues, many=True).data,
- status=status.HTTP_200_OK,
- )
- except Issue.DoesNotExist:
- return Response(
- {"Parent Issue does not exists"}, status=status.HTTP_400_BAD_REQUEST
- )
- except Exception as e:
- capture_exception(e)
- return Response(
- {"error": "Something went wrong please try again later"},
+ {"error": "Sub Issue IDs are required"},
status=status.HTTP_400_BAD_REQUEST,
)
+ sub_issues = Issue.issue_objects.filter(id__in=sub_issue_ids)
+
+ for sub_issue in sub_issues:
+ sub_issue.parent = parent_issue
+
+ _ = Issue.objects.bulk_update(sub_issues, ["parent"], batch_size=10)
+
+ updated_sub_issues = Issue.issue_objects.filter(id__in=sub_issue_ids)
+
+ return Response(
+ IssueFlatSerializer(updated_sub_issues, many=True).data,
+ status=status.HTTP_200_OK,
+ )
+
class IssueLinkViewSet(BaseViewSet):
permission_classes = [
@@ -955,129 +825,95 @@ class IssueLinkViewSet(BaseViewSet):
)
def create(self, request, slug, project_id, issue_id):
- try:
- serializer = IssueLinkSerializer(data=request.data)
- if serializer.is_valid():
- serializer.save(
- project_id=project_id,
- issue_id=issue_id,
- )
- issue_activity.delay(
- type="link.activity.created",
- requested_data=json.dumps(serializer.data, cls=DjangoJSONEncoder),
- actor_id=str(self.request.user.id),
- issue_id=str(self.kwargs.get("issue_id")),
- project_id=str(self.kwargs.get("project_id")),
- current_instance=None,
- epoch=int(timezone.now().timestamp()),
- )
- return Response(serializer.data, status=status.HTTP_201_CREATED)
- return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
- except Exception as e:
- capture_exception(e)
- return Response(
- {"error": "Something went wrong please try again later"},
- status=status.HTTP_400_BAD_REQUEST,
- )
-
- def partial_update(self, request, slug, project_id, issue_id, pk):
- try:
- issue_link = IssueLink.objects.get(
- workspace__slug=slug, project_id=project_id, issue_id=issue_id, pk=pk
- )
- requested_data = json.dumps(request.data, cls=DjangoJSONEncoder)
- current_instance = json.dumps(
- IssueLinkSerializer(issue_link).data,
- cls=DjangoJSONEncoder,
- )
- serializer = IssueLinkSerializer(
- issue_link, data=request.data, partial=True
- )
- if serializer.is_valid():
- serializer.save()
- issue_activity.delay(
- type="link.activity.updated",
- requested_data=requested_data,
- actor_id=str(request.user.id),
- issue_id=str(issue_id),
- project_id=str(project_id),
- current_instance=current_instance,
- epoch=int(timezone.now().timestamp()),
- )
- return Response(serializer.data, status=status.HTTP_200_OK)
- return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
- except Exception as e:
- capture_exception(e)
- return Response(
- {"error": "Something went wrong please try again later"},
- status=status.HTTP_400_BAD_REQUEST,
- )
-
- def destroy(self, request, slug, project_id, issue_id, pk):
- try:
- issue_link = IssueLink.objects.get(
- workspace__slug=slug, project_id=project_id, issue_id=issue_id, pk=pk
- )
- current_instance = json.dumps(
- IssueLinkSerializer(issue_link).data,
- cls=DjangoJSONEncoder,
+ serializer = IssueLinkSerializer(data=request.data)
+ if serializer.is_valid():
+ serializer.save(
+ project_id=project_id,
+ issue_id=issue_id,
)
issue_activity.delay(
- type="link.activity.deleted",
- requested_data=json.dumps({"link_id": str(pk)}),
+ type="link.activity.created",
+ requested_data=json.dumps(serializer.data, cls=DjangoJSONEncoder),
+ actor_id=str(self.request.user.id),
+ issue_id=str(self.kwargs.get("issue_id")),
+ project_id=str(self.kwargs.get("project_id")),
+ current_instance=None,
+ epoch=int(timezone.now().timestamp()),
+ )
+ return Response(serializer.data, status=status.HTTP_201_CREATED)
+ return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
+
+ def partial_update(self, request, slug, project_id, issue_id, pk):
+ issue_link = IssueLink.objects.get(
+ workspace__slug=slug, project_id=project_id, issue_id=issue_id, pk=pk
+ )
+ requested_data = json.dumps(request.data, cls=DjangoJSONEncoder)
+ current_instance = json.dumps(
+ IssueLinkSerializer(issue_link).data,
+ cls=DjangoJSONEncoder,
+ )
+ serializer = IssueLinkSerializer(issue_link, data=request.data, partial=True)
+ if serializer.is_valid():
+ serializer.save()
+ issue_activity.delay(
+ type="link.activity.updated",
+ requested_data=requested_data,
actor_id=str(request.user.id),
issue_id=str(issue_id),
project_id=str(project_id),
current_instance=current_instance,
epoch=int(timezone.now().timestamp()),
)
- issue_link.delete()
- return Response(status=status.HTTP_204_NO_CONTENT)
- except Exception as e:
- capture_exception(e)
- return Response(
- {"error": "Something went wrong please try again later"},
- status=status.HTTP_400_BAD_REQUEST,
- )
+ return Response(serializer.data, status=status.HTTP_200_OK)
+ return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
+
+ def destroy(self, request, slug, project_id, issue_id, pk):
+ issue_link = IssueLink.objects.get(
+ workspace__slug=slug, project_id=project_id, issue_id=issue_id, pk=pk
+ )
+ current_instance = json.dumps(
+ IssueLinkSerializer(issue_link).data,
+ cls=DjangoJSONEncoder,
+ )
+ issue_activity.delay(
+ type="link.activity.deleted",
+ requested_data=json.dumps({"link_id": str(pk)}),
+ actor_id=str(request.user.id),
+ issue_id=str(issue_id),
+ project_id=str(project_id),
+ current_instance=current_instance,
+ epoch=int(timezone.now().timestamp()),
+ )
+ issue_link.delete()
+ return Response(status=status.HTTP_204_NO_CONTENT)
class BulkCreateIssueLabelsEndpoint(BaseAPIView):
def post(self, request, slug, project_id):
- try:
- label_data = request.data.get("label_data", [])
- project = Project.objects.get(pk=project_id)
+ label_data = request.data.get("label_data", [])
+ project = Project.objects.get(pk=project_id)
- labels = Label.objects.bulk_create(
- [
- Label(
- name=label.get("name", "Migrated"),
- description=label.get("description", "Migrated Issue"),
- color="#" + "%06x" % random.randint(0, 0xFFFFFF),
- project_id=project_id,
- workspace_id=project.workspace_id,
- created_by=request.user,
- updated_by=request.user,
- )
- for label in label_data
- ],
- batch_size=50,
- ignore_conflicts=True,
- )
+ labels = Label.objects.bulk_create(
+ [
+ Label(
+ name=label.get("name", "Migrated"),
+ description=label.get("description", "Migrated Issue"),
+ color="#" + "%06x" % random.randint(0, 0xFFFFFF),
+ project_id=project_id,
+ workspace_id=project.workspace_id,
+ created_by=request.user,
+ updated_by=request.user,
+ )
+ for label in label_data
+ ],
+ batch_size=50,
+ ignore_conflicts=True,
+ )
- return Response(
- {"labels": LabelSerializer(labels, many=True).data},
- status=status.HTTP_201_CREATED,
- )
- except Project.DoesNotExist:
- return Response(
- {"error": "Project Does not exist"}, status=status.HTTP_404_NOT_FOUND
- )
- except Exception as e:
- capture_exception(e)
- return Response(
- {"error": "Something went wrong please try again later"},
- status=status.HTTP_400_BAD_REQUEST,
- )
+ return Response(
+ {"labels": LabelSerializer(labels, many=True).data},
+ status=status.HTTP_201_CREATED,
+ )
class IssueAttachmentEndpoint(BaseAPIView):
@@ -1089,66 +925,46 @@ class IssueAttachmentEndpoint(BaseAPIView):
parser_classes = (MultiPartParser, FormParser)
def post(self, request, slug, project_id, issue_id):
- try:
- serializer = IssueAttachmentSerializer(data=request.data)
- if serializer.is_valid():
- serializer.save(project_id=project_id, issue_id=issue_id)
- issue_activity.delay(
- type="attachment.activity.created",
- requested_data=None,
- actor_id=str(self.request.user.id),
- issue_id=str(self.kwargs.get("issue_id", None)),
- project_id=str(self.kwargs.get("project_id", None)),
- current_instance=json.dumps(
- serializer.data,
- cls=DjangoJSONEncoder,
- ),
- epoch=int(timezone.now().timestamp()),
- )
- return Response(serializer.data, status=status.HTTP_201_CREATED)
- return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
- except Exception as e:
- capture_exception(e)
- return Response(
- {"error": "Something went wrong please try again later"},
- status=status.HTTP_400_BAD_REQUEST,
- )
-
- def delete(self, request, slug, project_id, issue_id, pk):
- try:
- issue_attachment = IssueAttachment.objects.get(pk=pk)
- issue_attachment.asset.delete(save=False)
- issue_attachment.delete()
+ serializer = IssueAttachmentSerializer(data=request.data)
+ if serializer.is_valid():
+ serializer.save(project_id=project_id, issue_id=issue_id)
issue_activity.delay(
- type="attachment.activity.deleted",
+ type="attachment.activity.created",
requested_data=None,
actor_id=str(self.request.user.id),
issue_id=str(self.kwargs.get("issue_id", None)),
project_id=str(self.kwargs.get("project_id", None)),
- current_instance=None,
+ current_instance=json.dumps(
+ serializer.data,
+ cls=DjangoJSONEncoder,
+ ),
epoch=int(timezone.now().timestamp()),
)
+ return Response(serializer.data, status=status.HTTP_201_CREATED)
+ return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
- return Response(status=status.HTTP_204_NO_CONTENT)
- except IssueAttachment.DoesNotExist:
- return Response(
- {"error": "Issue Attachment does not exist"},
- status=status.HTTP_400_BAD_REQUEST,
- )
+ def delete(self, request, slug, project_id, issue_id, pk):
+ issue_attachment = IssueAttachment.objects.get(pk=pk)
+ issue_attachment.asset.delete(save=False)
+ issue_attachment.delete()
+ issue_activity.delay(
+ type="attachment.activity.deleted",
+ requested_data=None,
+ actor_id=str(self.request.user.id),
+ issue_id=str(self.kwargs.get("issue_id", None)),
+ project_id=str(self.kwargs.get("project_id", None)),
+ current_instance=None,
+ epoch=int(timezone.now().timestamp()),
+ )
+
+ return Response(status=status.HTTP_204_NO_CONTENT)
def get(self, request, slug, project_id, issue_id):
- try:
- issue_attachments = IssueAttachment.objects.filter(
- issue_id=issue_id, workspace__slug=slug, project_id=project_id
- )
- serilaizer = IssueAttachmentSerializer(issue_attachments, many=True)
- return Response(serilaizer.data, status=status.HTTP_200_OK)
- except Exception as e:
- capture_exception(e)
- return Response(
- {"error": "Something went wrong please try again later"},
- status=status.HTTP_400_BAD_REQUEST,
- )
+ issue_attachments = IssueAttachment.objects.filter(
+ issue_id=issue_id, workspace__slug=slug, project_id=project_id
+ )
+ serilaizer = IssueAttachmentSerializer(issue_attachments, many=True)
+ return Response(serilaizer.data, status=status.HTTP_200_OK)
class IssueArchiveViewSet(BaseViewSet):
@@ -1179,170 +995,134 @@ class IssueArchiveViewSet(BaseViewSet):
@method_decorator(gzip_page)
def list(self, request, slug, project_id):
- try:
- filters = issue_filters(request.query_params, "GET")
- show_sub_issues = request.GET.get("show_sub_issues", "true")
+ filters = issue_filters(request.query_params, "GET")
+ show_sub_issues = request.GET.get("show_sub_issues", "true")
- # Custom ordering for priority and state
- priority_order = ["urgent", "high", "medium", "low", "none"]
- state_order = ["backlog", "unstarted", "started", "completed", "cancelled"]
+ # Custom ordering for priority and state
+ priority_order = ["urgent", "high", "medium", "low", "none"]
+ state_order = ["backlog", "unstarted", "started", "completed", "cancelled"]
- order_by_param = request.GET.get("order_by", "-created_at")
+ order_by_param = request.GET.get("order_by", "-created_at")
- issue_queryset = (
- self.get_queryset()
- .filter(**filters)
- .annotate(cycle_id=F("issue_cycle__cycle_id"))
- .annotate(module_id=F("issue_module__module_id"))
- .annotate(
- link_count=IssueLink.objects.filter(issue=OuterRef("id"))
- .order_by()
- .annotate(count=Func(F("id"), function="Count"))
- .values("count")
- )
- .annotate(
- attachment_count=IssueAttachment.objects.filter(
- issue=OuterRef("id")
- )
- .order_by()
- .annotate(count=Func(F("id"), function="Count"))
- .values("count")
- )
+ issue_queryset = (
+ self.get_queryset()
+ .filter(**filters)
+ .annotate(cycle_id=F("issue_cycle__cycle_id"))
+ .annotate(module_id=F("issue_module__module_id"))
+ .annotate(
+ link_count=IssueLink.objects.filter(issue=OuterRef("id"))
+ .order_by()
+ .annotate(count=Func(F("id"), function="Count"))
+ .values("count")
)
-
- # Priority Ordering
- if order_by_param == "priority" or order_by_param == "-priority":
- priority_order = (
- priority_order
- if order_by_param == "priority"
- else priority_order[::-1]
- )
- issue_queryset = issue_queryset.annotate(
- priority_order=Case(
- *[
- When(priority=p, then=Value(i))
- for i, p in enumerate(priority_order)
- ],
- output_field=CharField(),
- )
- ).order_by("priority_order")
-
- # State Ordering
- elif order_by_param in [
- "state__name",
- "state__group",
- "-state__name",
- "-state__group",
- ]:
- state_order = (
- state_order
- if order_by_param in ["state__name", "state__group"]
- else state_order[::-1]
- )
- issue_queryset = issue_queryset.annotate(
- state_order=Case(
- *[
- When(state__group=state_group, then=Value(i))
- for i, state_group in enumerate(state_order)
- ],
- default=Value(len(state_order)),
- output_field=CharField(),
- )
- ).order_by("state_order")
- # assignee and label ordering
- elif order_by_param in [
- "labels__name",
- "-labels__name",
- "assignees__first_name",
- "-assignees__first_name",
- ]:
- issue_queryset = issue_queryset.annotate(
- max_values=Max(
- order_by_param[1::]
- if order_by_param.startswith("-")
- else order_by_param
- )
- ).order_by(
- "-max_values" if order_by_param.startswith("-") else "max_values"
- )
- else:
- issue_queryset = issue_queryset.order_by(order_by_param)
-
- issue_queryset = (
- issue_queryset
- if show_sub_issues == "true"
- else issue_queryset.filter(parent__isnull=True)
+ .annotate(
+ attachment_count=IssueAttachment.objects.filter(issue=OuterRef("id"))
+ .order_by()
+ .annotate(count=Func(F("id"), function="Count"))
+ .values("count")
)
+ )
- issues = IssueLiteSerializer(issue_queryset, many=True).data
-
- ## Grouping the results
- group_by = request.GET.get("group_by", False)
- if group_by:
- return Response(
- group_results(issues, group_by), status=status.HTTP_200_OK
- )
-
- return Response(issues, status=status.HTTP_200_OK)
-
- except Exception as e:
- capture_exception(e)
- return Response(
- {"error": "Something went wrong please try again later"},
- status=status.HTTP_400_BAD_REQUEST,
+ # Priority Ordering
+ if order_by_param == "priority" or order_by_param == "-priority":
+ priority_order = (
+ priority_order if order_by_param == "priority" else priority_order[::-1]
)
+ issue_queryset = issue_queryset.annotate(
+ priority_order=Case(
+ *[
+ When(priority=p, then=Value(i))
+ for i, p in enumerate(priority_order)
+ ],
+ output_field=CharField(),
+ )
+ ).order_by("priority_order")
+
+ # State Ordering
+ elif order_by_param in [
+ "state__name",
+ "state__group",
+ "-state__name",
+ "-state__group",
+ ]:
+ state_order = (
+ state_order
+ if order_by_param in ["state__name", "state__group"]
+ else state_order[::-1]
+ )
+ issue_queryset = issue_queryset.annotate(
+ state_order=Case(
+ *[
+ When(state__group=state_group, then=Value(i))
+ for i, state_group in enumerate(state_order)
+ ],
+ default=Value(len(state_order)),
+ output_field=CharField(),
+ )
+ ).order_by("state_order")
+ # assignee and label ordering
+ elif order_by_param in [
+ "labels__name",
+ "-labels__name",
+ "assignees__first_name",
+ "-assignees__first_name",
+ ]:
+ issue_queryset = issue_queryset.annotate(
+ max_values=Max(
+ order_by_param[1::]
+ if order_by_param.startswith("-")
+ else order_by_param
+ )
+ ).order_by(
+ "-max_values" if order_by_param.startswith("-") else "max_values"
+ )
+ else:
+ issue_queryset = issue_queryset.order_by(order_by_param)
+
+ issue_queryset = (
+ issue_queryset
+ if show_sub_issues == "true"
+ else issue_queryset.filter(parent__isnull=True)
+ )
+
+ issues = IssueLiteSerializer(issue_queryset, many=True).data
+
+ ## Grouping the results
+ group_by = request.GET.get("group_by", False)
+ if group_by:
+ return Response(group_results(issues, group_by), status=status.HTTP_200_OK)
+
+ return Response(issues, status=status.HTTP_200_OK)
def retrieve(self, request, slug, project_id, pk=None):
- try:
- issue = Issue.objects.get(
- workspace__slug=slug,
- project_id=project_id,
- archived_at__isnull=False,
- pk=pk,
- )
- return Response(IssueSerializer(issue).data, status=status.HTTP_200_OK)
- except Issue.DoesNotExist:
- return Response(
- {"error": "Issue Does not exist"}, status=status.HTTP_404_NOT_FOUND
- )
- except Exception as e:
- capture_exception(e)
- return Response(
- {"error": "Something went wrong please try again later"},
- status=status.HTTP_400_BAD_REQUEST,
- )
+ issue = Issue.objects.get(
+ workspace__slug=slug,
+ project_id=project_id,
+ archived_at__isnull=False,
+ pk=pk,
+ )
+ return Response(IssueSerializer(issue).data, status=status.HTTP_200_OK)
def unarchive(self, request, slug, project_id, pk=None):
- try:
- issue = Issue.objects.get(
- workspace__slug=slug,
- project_id=project_id,
- archived_at__isnull=False,
- pk=pk,
- )
- issue.archived_at = None
- issue.save()
- issue_activity.delay(
- type="issue.activity.updated",
- requested_data=json.dumps({"archived_at": None}),
- actor_id=str(request.user.id),
- issue_id=str(issue.id),
- project_id=str(project_id),
- current_instance=None,
- epoch=int(timezone.now().timestamp()),
- )
+ issue = Issue.objects.get(
+ workspace__slug=slug,
+ project_id=project_id,
+ archived_at__isnull=False,
+ pk=pk,
+ )
+ issue.archived_at = None
+ issue.save()
+ issue_activity.delay(
+ type="issue.activity.updated",
+ requested_data=json.dumps({"archived_at": None}),
+ actor_id=str(request.user.id),
+ issue_id=str(issue.id),
+ project_id=str(project_id),
+ current_instance=None,
+ epoch=int(timezone.now().timestamp()),
+ )
- return Response(IssueSerializer(issue).data, status=status.HTTP_200_OK)
- except Issue.DoesNotExist:
- return Response(
- {"error": "Issue Does not exist"}, status=status.HTTP_404_NOT_FOUND
- )
- except Exception as e:
- capture_exception(e)
- return Response(
- {"error": "Something went wrong, please try again later"},
- status=status.HTTP_400_BAD_REQUEST,
- )
+ return Response(IssueSerializer(issue).data, status=status.HTTP_200_OK)
class IssueSubscriberViewSet(BaseViewSet):
@@ -1384,122 +1164,77 @@ class IssueSubscriberViewSet(BaseViewSet):
)
def list(self, request, slug, project_id, issue_id):
- try:
- members = (
- ProjectMember.objects.filter(
- workspace__slug=slug, project_id=project_id
- )
- .annotate(
- is_subscribed=Exists(
- IssueSubscriber.objects.filter(
- workspace__slug=slug,
- project_id=project_id,
- issue_id=issue_id,
- subscriber=OuterRef("member"),
- )
+ members = (
+ ProjectMember.objects.filter(
+ workspace__slug=slug, project_id=project_id
+ )
+ .annotate(
+ is_subscribed=Exists(
+ IssueSubscriber.objects.filter(
+ workspace__slug=slug,
+ project_id=project_id,
+ issue_id=issue_id,
+ subscriber=OuterRef("member"),
)
)
- .select_related("member")
- )
- serializer = ProjectMemberLiteSerializer(members, many=True)
- return Response(serializer.data, status=status.HTTP_200_OK)
- except Exception as e:
- capture_exception(e)
- return Response(
- {"error": e},
- status=status.HTTP_400_BAD_REQUEST,
)
+ .select_related("member")
+ )
+ serializer = ProjectMemberLiteSerializer(members, many=True)
+ return Response(serializer.data, status=status.HTTP_200_OK)
def destroy(self, request, slug, project_id, issue_id, subscriber_id):
- try:
- issue_subscriber = IssueSubscriber.objects.get(
- project=project_id,
- subscriber=subscriber_id,
- workspace__slug=slug,
- issue=issue_id,
- )
- issue_subscriber.delete()
- return Response(
- status=status.HTTP_204_NO_CONTENT,
- )
- except IssueSubscriber.DoesNotExist:
- return Response(
- {"error": "User is not subscribed to this issue"},
- status=status.HTTP_400_BAD_REQUEST,
- )
- except Exception as e:
- capture_exception(e)
- return Response(
- {"error": "Something went wrong please try again later"},
- status=status.HTTP_400_BAD_REQUEST,
- )
+ issue_subscriber = IssueSubscriber.objects.get(
+ project=project_id,
+ subscriber=subscriber_id,
+ workspace__slug=slug,
+ issue=issue_id,
+ )
+ issue_subscriber.delete()
+ return Response(
+ status=status.HTTP_204_NO_CONTENT,
+ )
def subscribe(self, request, slug, project_id, issue_id):
- try:
- if IssueSubscriber.objects.filter(
- issue_id=issue_id,
- subscriber=request.user,
- workspace__slug=slug,
- project=project_id,
- ).exists():
- return Response(
- {"message": "User already subscribed to the issue."},
- status=status.HTTP_400_BAD_REQUEST,
- )
-
- subscriber = IssueSubscriber.objects.create(
- issue_id=issue_id,
- subscriber_id=request.user.id,
- project_id=project_id,
- )
- serilaizer = IssueSubscriberSerializer(subscriber)
- return Response(serilaizer.data, status=status.HTTP_201_CREATED)
- except Exception as e:
- capture_exception(e)
+ if IssueSubscriber.objects.filter(
+ issue_id=issue_id,
+ subscriber=request.user,
+ workspace__slug=slug,
+ project=project_id,
+ ).exists():
return Response(
- {"error": "Something went wrong, please try again later"},
+ {"message": "User already subscribed to the issue."},
status=status.HTTP_400_BAD_REQUEST,
)
+ subscriber = IssueSubscriber.objects.create(
+ issue_id=issue_id,
+ subscriber_id=request.user.id,
+ project_id=project_id,
+ )
+ serializer = IssueSubscriberSerializer(subscriber)
+ return Response(serializer.data, status=status.HTTP_201_CREATED)
+
def unsubscribe(self, request, slug, project_id, issue_id):
- try:
- issue_subscriber = IssueSubscriber.objects.get(
- project=project_id,
- subscriber=request.user,
- workspace__slug=slug,
- issue=issue_id,
- )
- issue_subscriber.delete()
- return Response(
- status=status.HTTP_204_NO_CONTENT,
- )
- except IssueSubscriber.DoesNotExist:
- return Response(
- {"error": "User subscribed to this issue"},
- status=status.HTTP_400_BAD_REQUEST,
- )
- except Exception as e:
- capture_exception(e)
- return Response(
- {"error": "Something went wrong please try again later"},
- status=status.HTTP_400_BAD_REQUEST,
- )
+ issue_subscriber = IssueSubscriber.objects.get(
+ project=project_id,
+ subscriber=request.user,
+ workspace__slug=slug,
+ issue=issue_id,
+ )
+ issue_subscriber.delete()
+ return Response(
+ status=status.HTTP_204_NO_CONTENT,
+ )
def subscription_status(self, request, slug, project_id, issue_id):
- try:
- issue_subscriber = IssueSubscriber.objects.filter(
- issue=issue_id,
- subscriber=request.user,
- workspace__slug=slug,
- project=project_id,
- ).exists()
- return Response({"subscribed": issue_subscriber}, status=status.HTTP_200_OK)
- except Exception as e:
- capture_exception(e)
- return Response(
- {"error": "Something went wrong, please try again later"},
- status=status.HTTP_400_BAD_REQUEST,
- )
+ issue_subscriber = IssueSubscriber.objects.filter(
+ issue=issue_id,
+ subscriber=request.user,
+ workspace__slug=slug,
+ project=project_id,
+ ).exists()
+ return Response({"subscribed": issue_subscriber}, status=status.HTTP_200_OK)
class IssueReactionViewSet(BaseViewSet):
@@ -1522,68 +1257,49 @@ class IssueReactionViewSet(BaseViewSet):
)
def create(self, request, slug, project_id, issue_id):
- try:
- serializer = IssueReactionSerializer(data=request.data)
- if serializer.is_valid():
- serializer.save(
- issue_id=issue_id,
- project_id=project_id,
- actor=request.user,
- )
- issue_activity.delay(
- type="issue_reaction.activity.created",
- requested_data=json.dumps(request.data, cls=DjangoJSONEncoder),
- actor_id=str(request.user.id),
- issue_id=str(issue_id),
- project_id=str(project_id),
- current_instance=None,
- epoch=int(timezone.now().timestamp()),
- )
- return Response(serializer.data, status=status.HTTP_201_CREATED)
- return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
- except Exception as e:
- capture_exception(e)
- return Response(
- {"error": "Something went wrong please try again later"},
- status=status.HTTP_400_BAD_REQUEST,
- )
-
- def destroy(self, request, slug, project_id, issue_id, reaction_code):
- try:
- issue_reaction = IssueReaction.objects.get(
- workspace__slug=slug,
- project_id=project_id,
+ serializer = IssueReactionSerializer(data=request.data)
+ if serializer.is_valid():
+ serializer.save(
issue_id=issue_id,
- reaction=reaction_code,
+ project_id=project_id,
actor=request.user,
)
issue_activity.delay(
- type="issue_reaction.activity.deleted",
- requested_data=None,
- actor_id=str(self.request.user.id),
- issue_id=str(self.kwargs.get("issue_id", None)),
- project_id=str(self.kwargs.get("project_id", None)),
- current_instance=json.dumps(
- {
- "reaction": str(reaction_code),
- "identifier": str(issue_reaction.id),
- }
- ),
+ type="issue_reaction.activity.created",
+ requested_data=json.dumps(request.data, cls=DjangoJSONEncoder),
+ actor_id=str(request.user.id),
+ issue_id=str(issue_id),
+ project_id=str(project_id),
+ current_instance=None,
epoch=int(timezone.now().timestamp()),
)
- issue_reaction.delete()
- return Response(status=status.HTTP_204_NO_CONTENT)
- except IssueReaction.DoesNotExist:
- return Response(
- {"error": "Issue reaction does not exist"},
- status=status.HTTP_400_BAD_REQUEST,
- )
- except Exception as e:
- capture_exception(e)
- return Response(
- {"error": "Something went wrong please try again later"},
- status=status.HTTP_400_BAD_REQUEST,
- )
+ return Response(serializer.data, status=status.HTTP_201_CREATED)
+ return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
+
+ def destroy(self, request, slug, project_id, issue_id, reaction_code):
+ issue_reaction = IssueReaction.objects.get(
+ workspace__slug=slug,
+ project_id=project_id,
+ issue_id=issue_id,
+ reaction=reaction_code,
+ actor=request.user,
+ )
+ issue_activity.delay(
+ type="issue_reaction.activity.deleted",
+ requested_data=None,
+ actor_id=str(self.request.user.id),
+ issue_id=str(self.kwargs.get("issue_id", None)),
+ project_id=str(self.kwargs.get("project_id", None)),
+ current_instance=json.dumps(
+ {
+ "reaction": str(reaction_code),
+ "identifier": str(issue_reaction.id),
+ }
+ ),
+ epoch=int(timezone.now().timestamp()),
+ )
+ issue_reaction.delete()
+ return Response(status=status.HTTP_204_NO_CONTENT)
class CommentReactionViewSet(BaseViewSet):
@@ -1606,69 +1322,50 @@ class CommentReactionViewSet(BaseViewSet):
)
def create(self, request, slug, project_id, comment_id):
- try:
- serializer = CommentReactionSerializer(data=request.data)
- if serializer.is_valid():
- serializer.save(
- project_id=project_id,
- actor_id=request.user.id,
- comment_id=comment_id,
- )
- issue_activity.delay(
- type="comment_reaction.activity.created",
- requested_data=json.dumps(request.data, cls=DjangoJSONEncoder),
- actor_id=str(request.user.id),
- issue_id=None,
- project_id=str(project_id),
- current_instance=None,
- epoch=int(timezone.now().timestamp()),
- )
- return Response(serializer.data, status=status.HTTP_201_CREATED)
- return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
- except Exception as e:
- capture_exception(e)
- return Response(
- {"error": "Something went wrong please try again later"},
- status=status.HTTP_400_BAD_REQUEST,
- )
-
- def destroy(self, request, slug, project_id, comment_id, reaction_code):
- try:
- comment_reaction = CommentReaction.objects.get(
- workspace__slug=slug,
+ serializer = CommentReactionSerializer(data=request.data)
+ if serializer.is_valid():
+ serializer.save(
project_id=project_id,
+ actor_id=request.user.id,
comment_id=comment_id,
- reaction=reaction_code,
- actor=request.user,
)
issue_activity.delay(
- type="comment_reaction.activity.deleted",
- requested_data=None,
- actor_id=str(self.request.user.id),
+ type="comment_reaction.activity.created",
+ requested_data=json.dumps(request.data, cls=DjangoJSONEncoder),
+ actor_id=str(request.user.id),
issue_id=None,
- project_id=str(self.kwargs.get("project_id", None)),
- current_instance=json.dumps(
- {
- "reaction": str(reaction_code),
- "identifier": str(comment_reaction.id),
- "comment_id": str(comment_id),
- }
- ),
+ project_id=str(project_id),
+ current_instance=None,
epoch=int(timezone.now().timestamp()),
)
- comment_reaction.delete()
- return Response(status=status.HTTP_204_NO_CONTENT)
- except CommentReaction.DoesNotExist:
- return Response(
- {"error": "Comment reaction does not exist"},
- status=status.HTTP_400_BAD_REQUEST,
- )
- except Exception as e:
- capture_exception(e)
- return Response(
- {"error": "Something went wrong please try again later"},
- status=status.HTTP_400_BAD_REQUEST,
- )
+ return Response(serializer.data, status=status.HTTP_201_CREATED)
+ return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
+
+ def destroy(self, request, slug, project_id, comment_id, reaction_code):
+ comment_reaction = CommentReaction.objects.get(
+ workspace__slug=slug,
+ project_id=project_id,
+ comment_id=comment_id,
+ reaction=reaction_code,
+ actor=request.user,
+ )
+ issue_activity.delay(
+ type="comment_reaction.activity.deleted",
+ requested_data=None,
+ actor_id=str(self.request.user.id),
+ issue_id=None,
+ project_id=str(self.kwargs.get("project_id", None)),
+ current_instance=json.dumps(
+ {
+ "reaction": str(reaction_code),
+ "identifier": str(comment_reaction.id),
+ "comment_id": str(comment_id),
+ }
+ ),
+ epoch=int(timezone.now().timestamp()),
+ )
+ comment_reaction.delete()
+ return Response(status=status.HTTP_204_NO_CONTENT)
class IssueCommentPublicViewSet(BaseViewSet):
@@ -1725,109 +1422,65 @@ class IssueCommentPublicViewSet(BaseViewSet):
return IssueComment.objects.none()
def create(self, request, slug, project_id, issue_id):
- try:
- project_deploy_board = ProjectDeployBoard.objects.get(
- workspace__slug=slug, project_id=project_id
- )
+ project_deploy_board = ProjectDeployBoard.objects.get(
+ workspace__slug=slug, project_id=project_id
+ )
- if not project_deploy_board.comments:
- return Response(
- {"error": "Comments are not enabled for this project"},
- status=status.HTTP_400_BAD_REQUEST,
- )
-
- serializer = IssueCommentSerializer(data=request.data)
- if serializer.is_valid():
- serializer.save(
- project_id=project_id,
- issue_id=issue_id,
- actor=request.user,
- access="EXTERNAL",
- )
- issue_activity.delay(
- type="comment.activity.created",
- requested_data=json.dumps(serializer.data, cls=DjangoJSONEncoder),
- actor_id=str(request.user.id),
- issue_id=str(issue_id),
- project_id=str(project_id),
- current_instance=None,
- epoch=int(timezone.now().timestamp()),
- )
- if not ProjectMember.objects.filter(
- project_id=project_id,
- member=request.user,
- ).exists():
- # Add the user for workspace tracking
- _ = ProjectPublicMember.objects.get_or_create(
- project_id=project_id,
- member=request.user,
- )
-
- return Response(serializer.data, status=status.HTTP_201_CREATED)
- return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
- except Exception as e:
- capture_exception(e)
+ if not project_deploy_board.comments:
return Response(
- {"error": "Something went wrong please try again later"},
+ {"error": "Comments are not enabled for this project"},
status=status.HTTP_400_BAD_REQUEST,
)
- def partial_update(self, request, slug, project_id, issue_id, pk):
- try:
- project_deploy_board = ProjectDeployBoard.objects.get(
- workspace__slug=slug, project_id=project_id
- )
-
- if not project_deploy_board.comments:
- return Response(
- {"error": "Comments are not enabled for this project"},
- status=status.HTTP_400_BAD_REQUEST,
- )
- comment = IssueComment.objects.get(
- workspace__slug=slug, pk=pk, actor=request.user
- )
- serializer = IssueCommentSerializer(
- comment, data=request.data, partial=True
- )
- if serializer.is_valid():
- serializer.save()
- issue_activity.delay(
- type="comment.activity.updated",
- requested_data=json.dumps(request.data, cls=DjangoJSONEncoder),
- actor_id=str(request.user.id),
- issue_id=str(issue_id),
- project_id=str(project_id),
- current_instance=json.dumps(
- IssueCommentSerializer(comment).data,
- cls=DjangoJSONEncoder,
- ),
- epoch=int(timezone.now().timestamp()),
- )
- return Response(serializer.data, status=status.HTTP_200_OK)
- return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
- except (IssueComment.DoesNotExist, ProjectDeployBoard.DoesNotExist):
- return Response(
- {"error": "IssueComent Does not exists"},
- status=status.HTTP_400_BAD_REQUEST,
- )
-
- def destroy(self, request, slug, project_id, issue_id, pk):
- try:
- project_deploy_board = ProjectDeployBoard.objects.get(
- workspace__slug=slug, project_id=project_id
- )
-
- if not project_deploy_board.comments:
- return Response(
- {"error": "Comments are not enabled for this project"},
- status=status.HTTP_400_BAD_REQUEST,
- )
- comment = IssueComment.objects.get(
- workspace__slug=slug, pk=pk, project_id=project_id, actor=request.user
+ serializer = IssueCommentSerializer(data=request.data)
+ if serializer.is_valid():
+ serializer.save(
+ project_id=project_id,
+ issue_id=issue_id,
+ actor=request.user,
+ access="EXTERNAL",
)
issue_activity.delay(
- type="comment.activity.deleted",
- requested_data=json.dumps({"comment_id": str(pk)}),
+ type="comment.activity.created",
+ requested_data=json.dumps(serializer.data, cls=DjangoJSONEncoder),
+ actor_id=str(request.user.id),
+ issue_id=str(issue_id),
+ project_id=str(project_id),
+ current_instance=None,
+ epoch=int(timezone.now().timestamp()),
+ )
+ if not ProjectMember.objects.filter(
+ project_id=project_id,
+ member=request.user,
+ ).exists():
+ # Add the user for workspace tracking
+ _ = ProjectPublicMember.objects.get_or_create(
+ project_id=project_id,
+ member=request.user,
+ )
+
+ return Response(serializer.data, status=status.HTTP_201_CREATED)
+ return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
+
+ def partial_update(self, request, slug, project_id, issue_id, pk):
+ project_deploy_board = ProjectDeployBoard.objects.get(
+ workspace__slug=slug, project_id=project_id
+ )
+
+ if not project_deploy_board.comments:
+ return Response(
+ {"error": "Comments are not enabled for this project"},
+ status=status.HTTP_400_BAD_REQUEST,
+ )
+ comment = IssueComment.objects.get(
+ workspace__slug=slug, pk=pk, actor=request.user
+ )
+ serializer = IssueCommentSerializer(comment, data=request.data, partial=True)
+ if serializer.is_valid():
+ serializer.save()
+ issue_activity.delay(
+ type="comment.activity.updated",
+ requested_data=json.dumps(request.data, cls=DjangoJSONEncoder),
actor_id=str(request.user.id),
issue_id=str(issue_id),
project_id=str(project_id),
@@ -1837,19 +1490,36 @@ class IssueCommentPublicViewSet(BaseViewSet):
),
epoch=int(timezone.now().timestamp()),
)
- comment.delete()
- return Response(status=status.HTTP_204_NO_CONTENT)
- except (IssueComment.DoesNotExist, ProjectDeployBoard.DoesNotExist):
+ return Response(serializer.data, status=status.HTTP_200_OK)
+ return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
+
+ def destroy(self, request, slug, project_id, issue_id, pk):
+ project_deploy_board = ProjectDeployBoard.objects.get(
+ workspace__slug=slug, project_id=project_id
+ )
+
+ if not project_deploy_board.comments:
return Response(
- {"error": "IssueComent Does not exists"},
- status=status.HTTP_400_BAD_REQUEST,
- )
- except Exception as e:
- capture_exception(e)
- return Response(
- {"error": "Something went wrong please try again later"},
+ {"error": "Comments are not enabled for this project"},
status=status.HTTP_400_BAD_REQUEST,
)
+ comment = IssueComment.objects.get(
+ workspace__slug=slug, pk=pk, project_id=project_id, actor=request.user
+ )
+ issue_activity.delay(
+ type="comment.activity.deleted",
+ requested_data=json.dumps({"comment_id": str(pk)}),
+ actor_id=str(request.user.id),
+ issue_id=str(issue_id),
+ project_id=str(project_id),
+ current_instance=json.dumps(
+ IssueCommentSerializer(comment).data,
+ cls=DjangoJSONEncoder,
+ ),
+ epoch=int(timezone.now().timestamp()),
+ )
+ comment.delete()
+ return Response(status=status.HTTP_204_NO_CONTENT)
class IssueReactionPublicViewSet(BaseViewSet):
@@ -1878,98 +1548,74 @@ class IssueReactionPublicViewSet(BaseViewSet):
return IssueReaction.objects.none()
def create(self, request, slug, project_id, issue_id):
- try:
- project_deploy_board = ProjectDeployBoard.objects.get(
- workspace__slug=slug, project_id=project_id
+ project_deploy_board = ProjectDeployBoard.objects.get(
+ workspace__slug=slug, project_id=project_id
+ )
+
+ if not project_deploy_board.reactions:
+ return Response(
+ {"error": "Reactions are not enabled for this project board"},
+ status=status.HTTP_400_BAD_REQUEST,
)
- if not project_deploy_board.reactions:
- return Response(
- {"error": "Reactions are not enabled for this project board"},
- status=status.HTTP_400_BAD_REQUEST,
- )
-
- serializer = IssueReactionSerializer(data=request.data)
- if serializer.is_valid():
- serializer.save(
- project_id=project_id, issue_id=issue_id, actor=request.user
- )
- if not ProjectMember.objects.filter(
+ serializer = IssueReactionSerializer(data=request.data)
+ if serializer.is_valid():
+ serializer.save(
+ project_id=project_id, issue_id=issue_id, actor=request.user
+ )
+ if not ProjectMember.objects.filter(
+ project_id=project_id,
+ member=request.user,
+ ).exists():
+ # Add the user for workspace tracking
+ _ = ProjectPublicMember.objects.get_or_create(
project_id=project_id,
member=request.user,
- ).exists():
- # Add the user for workspace tracking
- _ = ProjectPublicMember.objects.get_or_create(
- project_id=project_id,
- member=request.user,
- )
- issue_activity.delay(
- type="issue_reaction.activity.created",
- requested_data=json.dumps(self.request.data, cls=DjangoJSONEncoder),
- actor_id=str(self.request.user.id),
- issue_id=str(self.kwargs.get("issue_id", None)),
- project_id=str(self.kwargs.get("project_id", None)),
- current_instance=None,
- epoch=int(timezone.now().timestamp()),
)
- return Response(serializer.data, status=status.HTTP_201_CREATED)
- return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
- except ProjectDeployBoard.DoesNotExist:
- return Response(
- {"error": "Project board does not exist"},
- status=status.HTTP_400_BAD_REQUEST,
- )
- except Exception as e:
- capture_exception(e)
- return Response(
- {"error": "Something went wrong please try again later"},
- status=status.HTTP_400_BAD_REQUEST,
- )
-
- def destroy(self, request, slug, project_id, issue_id, reaction_code):
- try:
- project_deploy_board = ProjectDeployBoard.objects.get(
- workspace__slug=slug, project_id=project_id
- )
-
- if not project_deploy_board.reactions:
- return Response(
- {"error": "Reactions are not enabled for this project board"},
- status=status.HTTP_400_BAD_REQUEST,
- )
- issue_reaction = IssueReaction.objects.get(
- workspace__slug=slug,
- issue_id=issue_id,
- reaction=reaction_code,
- actor=request.user,
- )
issue_activity.delay(
- type="issue_reaction.activity.deleted",
- requested_data=None,
+ type="issue_reaction.activity.created",
+ requested_data=json.dumps(self.request.data, cls=DjangoJSONEncoder),
actor_id=str(self.request.user.id),
issue_id=str(self.kwargs.get("issue_id", None)),
project_id=str(self.kwargs.get("project_id", None)),
- current_instance=json.dumps(
- {
- "reaction": str(reaction_code),
- "identifier": str(issue_reaction.id),
- }
- ),
+ current_instance=None,
epoch=int(timezone.now().timestamp()),
)
- issue_reaction.delete()
- return Response(status=status.HTTP_204_NO_CONTENT)
- except IssueReaction.DoesNotExist:
+ return Response(serializer.data, status=status.HTTP_201_CREATED)
+ return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
+
+ def destroy(self, request, slug, project_id, issue_id, reaction_code):
+ project_deploy_board = ProjectDeployBoard.objects.get(
+ workspace__slug=slug, project_id=project_id
+ )
+
+ if not project_deploy_board.reactions:
return Response(
- {"error": "Issue reaction does not exist"},
- status=status.HTTP_400_BAD_REQUEST,
- )
- except Exception as e:
- capture_exception(e)
- return Response(
- {"error": "Something went wrong please try again later"},
+ {"error": "Reactions are not enabled for this project board"},
status=status.HTTP_400_BAD_REQUEST,
)
+ issue_reaction = IssueReaction.objects.get(
+ workspace__slug=slug,
+ issue_id=issue_id,
+ reaction=reaction_code,
+ actor=request.user,
+ )
+ issue_activity.delay(
+ type="issue_reaction.activity.deleted",
+ requested_data=None,
+ actor_id=str(self.request.user.id),
+ issue_id=str(self.kwargs.get("issue_id", None)),
+ project_id=str(self.kwargs.get("project_id", None)),
+ current_instance=json.dumps(
+ {
+ "reaction": str(reaction_code),
+ "identifier": str(issue_reaction.id),
+ }
+ ),
+ epoch=int(timezone.now().timestamp()),
+ )
+ issue_reaction.delete()
+ return Response(status=status.HTTP_204_NO_CONTENT)
class CommentReactionPublicViewSet(BaseViewSet):
@@ -1998,105 +1644,76 @@ class CommentReactionPublicViewSet(BaseViewSet):
return CommentReaction.objects.none()
def create(self, request, slug, project_id, comment_id):
- try:
- project_deploy_board = ProjectDeployBoard.objects.get(
- workspace__slug=slug, project_id=project_id
- )
+ project_deploy_board = ProjectDeployBoard.objects.get(
+ workspace__slug=slug, project_id=project_id
+ )
- if not project_deploy_board.reactions:
- return Response(
- {"error": "Reactions are not enabled for this board"},
- status=status.HTTP_400_BAD_REQUEST,
- )
-
- serializer = CommentReactionSerializer(data=request.data)
- if serializer.is_valid():
- serializer.save(
- project_id=project_id, comment_id=comment_id, actor=request.user
- )
- if not ProjectMember.objects.filter(
- project_id=project_id, member=request.user
- ).exists():
- # Add the user for workspace tracking
- _ = ProjectPublicMember.objects.get_or_create(
- project_id=project_id,
- member=request.user,
- )
- issue_activity.delay(
- type="comment_reaction.activity.created",
- requested_data=json.dumps(self.request.data, cls=DjangoJSONEncoder),
- actor_id=str(self.request.user.id),
- issue_id=None,
- project_id=str(self.kwargs.get("project_id", None)),
- current_instance=None,
- epoch=int(timezone.now().timestamp()),
- )
- return Response(serializer.data, status=status.HTTP_201_CREATED)
- return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
- except IssueComment.DoesNotExist:
+ if not project_deploy_board.reactions:
return Response(
- {"error": "Comment does not exist"},
- status=status.HTTP_400_BAD_REQUEST,
- )
- except ProjectDeployBoard.DoesNotExist:
- return Response(
- {"error": "Project board does not exist"},
- status=status.HTTP_400_BAD_REQUEST,
- )
- except Exception as e:
- capture_exception(e)
- return Response(
- {"error": "Something went wrong please try again later"},
+ {"error": "Reactions are not enabled for this board"},
status=status.HTTP_400_BAD_REQUEST,
)
- def destroy(self, request, slug, project_id, comment_id, reaction_code):
- try:
- project_deploy_board = ProjectDeployBoard.objects.get(
- workspace__slug=slug, project_id=project_id
+ serializer = CommentReactionSerializer(data=request.data)
+ if serializer.is_valid():
+ serializer.save(
+ project_id=project_id, comment_id=comment_id, actor=request.user
)
- if not project_deploy_board.reactions:
- return Response(
- {"error": "Reactions are not enabled for this board"},
- status=status.HTTP_400_BAD_REQUEST,
+ if not ProjectMember.objects.filter(
+ project_id=project_id, member=request.user
+ ).exists():
+ # Add the user for workspace tracking
+ _ = ProjectPublicMember.objects.get_or_create(
+ project_id=project_id,
+ member=request.user,
)
-
- comment_reaction = CommentReaction.objects.get(
- project_id=project_id,
- workspace__slug=slug,
- comment_id=comment_id,
- reaction=reaction_code,
- actor=request.user,
- )
issue_activity.delay(
- type="comment_reaction.activity.deleted",
- requested_data=None,
+ type="comment_reaction.activity.created",
+ requested_data=json.dumps(self.request.data, cls=DjangoJSONEncoder),
actor_id=str(self.request.user.id),
issue_id=None,
project_id=str(self.kwargs.get("project_id", None)),
- current_instance=json.dumps(
- {
- "reaction": str(reaction_code),
- "identifier": str(comment_reaction.id),
- "comment_id": str(comment_id),
- }
- ),
+ current_instance=None,
epoch=int(timezone.now().timestamp()),
)
- comment_reaction.delete()
- return Response(status=status.HTTP_204_NO_CONTENT)
- except CommentReaction.DoesNotExist:
+ return Response(serializer.data, status=status.HTTP_201_CREATED)
+ return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
+
+ def destroy(self, request, slug, project_id, comment_id, reaction_code):
+ project_deploy_board = ProjectDeployBoard.objects.get(
+ workspace__slug=slug, project_id=project_id
+ )
+ if not project_deploy_board.reactions:
return Response(
- {"error": "Comment reaction does not exist"},
- status=status.HTTP_400_BAD_REQUEST,
- )
- except Exception as e:
- capture_exception(e)
- return Response(
- {"error": "Something went wrong please try again later"},
+ {"error": "Reactions are not enabled for this board"},
status=status.HTTP_400_BAD_REQUEST,
)
+ comment_reaction = CommentReaction.objects.get(
+ project_id=project_id,
+ workspace__slug=slug,
+ comment_id=comment_id,
+ reaction=reaction_code,
+ actor=request.user,
+ )
+ issue_activity.delay(
+ type="comment_reaction.activity.deleted",
+ requested_data=None,
+ actor_id=str(self.request.user.id),
+ issue_id=None,
+ project_id=str(self.kwargs.get("project_id", None)),
+ current_instance=json.dumps(
+ {
+ "reaction": str(reaction_code),
+ "identifier": str(comment_reaction.id),
+ "comment_id": str(comment_id),
+ }
+ ),
+ epoch=int(timezone.now().timestamp()),
+ )
+ comment_reaction.delete()
+ return Response(status=status.HTTP_204_NO_CONTENT)
+
class IssueVotePublicViewSet(BaseViewSet):
model = IssueVote
@@ -2122,74 +1739,56 @@ class IssueVotePublicViewSet(BaseViewSet):
return IssueVote.objects.none()
def create(self, request, slug, project_id, issue_id):
- try:
- issue_vote, _ = IssueVote.objects.get_or_create(
- actor_id=request.user.id,
+ issue_vote, _ = IssueVote.objects.get_or_create(
+ actor_id=request.user.id,
+ project_id=project_id,
+ issue_id=issue_id,
+ )
+ # Add the user for workspace tracking
+ if not ProjectMember.objects.filter(
+ project_id=project_id, member=request.user
+ ).exists():
+ _ = ProjectPublicMember.objects.get_or_create(
project_id=project_id,
- issue_id=issue_id,
- )
- # Add the user for workspace tracking
- if not ProjectMember.objects.filter(
- project_id=project_id, member=request.user
- ).exists():
- _ = ProjectPublicMember.objects.get_or_create(
- project_id=project_id,
- member=request.user,
- )
- issue_vote.vote = request.data.get("vote", 1)
- issue_vote.save()
- issue_activity.delay(
- type="issue_vote.activity.created",
- requested_data=json.dumps(self.request.data, cls=DjangoJSONEncoder),
- actor_id=str(self.request.user.id),
- issue_id=str(self.kwargs.get("issue_id", None)),
- project_id=str(self.kwargs.get("project_id", None)),
- current_instance=None,
- epoch=int(timezone.now().timestamp()),
- )
- serializer = IssueVoteSerializer(issue_vote)
- return Response(serializer.data, status=status.HTTP_201_CREATED)
- except IntegrityError:
- return Response(
- {"error": "Reaction already exists"}, status=status.HTTP_400_BAD_REQUEST
- )
- except Exception as e:
- capture_exception(e)
- return Response(
- {"error": "Something went wrong please try again later"},
- status=status.HTTP_400_BAD_REQUEST,
+ member=request.user,
)
+ issue_vote.vote = request.data.get("vote", 1)
+ issue_vote.save()
+ issue_activity.delay(
+ type="issue_vote.activity.created",
+ requested_data=json.dumps(self.request.data, cls=DjangoJSONEncoder),
+ actor_id=str(self.request.user.id),
+ issue_id=str(self.kwargs.get("issue_id", None)),
+ project_id=str(self.kwargs.get("project_id", None)),
+ current_instance=None,
+ epoch=int(timezone.now().timestamp()),
+ )
+ serializer = IssueVoteSerializer(issue_vote)
+ return Response(serializer.data, status=status.HTTP_201_CREATED)
def destroy(self, request, slug, project_id, issue_id):
- try:
- issue_vote = IssueVote.objects.get(
- workspace__slug=slug,
- project_id=project_id,
- issue_id=issue_id,
- actor_id=request.user.id,
- )
- issue_activity.delay(
- type="issue_vote.activity.deleted",
- requested_data=None,
- actor_id=str(self.request.user.id),
- issue_id=str(self.kwargs.get("issue_id", None)),
- project_id=str(self.kwargs.get("project_id", None)),
- current_instance=json.dumps(
- {
- "vote": str(issue_vote.vote),
- "identifier": str(issue_vote.id),
- }
- ),
- epoch=int(timezone.now().timestamp()),
- )
- issue_vote.delete()
- return Response(status=status.HTTP_204_NO_CONTENT)
- except Exception as e:
- capture_exception(e)
- return Response(
- {"error": "Something went wrong please try again later"},
- status=status.HTTP_400_BAD_REQUEST,
- )
+ issue_vote = IssueVote.objects.get(
+ workspace__slug=slug,
+ project_id=project_id,
+ issue_id=issue_id,
+ actor_id=request.user.id,
+ )
+ issue_activity.delay(
+ type="issue_vote.activity.deleted",
+ requested_data=None,
+ actor_id=str(self.request.user.id),
+ issue_id=str(self.kwargs.get("issue_id", None)),
+ project_id=str(self.kwargs.get("project_id", None)),
+ current_instance=json.dumps(
+ {
+ "vote": str(issue_vote.vote),
+ "identifier": str(issue_vote.id),
+ }
+ ),
+ epoch=int(timezone.now().timestamp()),
+ )
+ issue_vote.delete()
+ return Response(status=status.HTTP_204_NO_CONTENT)
class IssueRelationViewSet(BaseViewSet):
@@ -2214,93 +1813,67 @@ class IssueRelationViewSet(BaseViewSet):
)
def create(self, request, slug, project_id, issue_id):
- try:
- related_list = request.data.get("related_list", [])
- relation = request.data.get("relation", None)
- project = Project.objects.get(pk=project_id)
+ related_list = request.data.get("related_list", [])
+ relation = request.data.get("relation", None)
+ project = Project.objects.get(pk=project_id)
- issue_relation = IssueRelation.objects.bulk_create(
- [
- IssueRelation(
- issue_id=related_issue["issue"],
- related_issue_id=related_issue["related_issue"],
- relation_type=related_issue["relation_type"],
- project_id=project_id,
- workspace_id=project.workspace_id,
- created_by=request.user,
- updated_by=request.user,
- )
- for related_issue in related_list
- ],
- batch_size=10,
- ignore_conflicts=True,
- )
+ issue_relation = IssueRelation.objects.bulk_create(
+ [
+ IssueRelation(
+ issue_id=related_issue["issue"],
+ related_issue_id=related_issue["related_issue"],
+ relation_type=related_issue["relation_type"],
+ project_id=project_id,
+ workspace_id=project.workspace_id,
+ created_by=request.user,
+ updated_by=request.user,
+ )
+ for related_issue in related_list
+ ],
+ batch_size=10,
+ ignore_conflicts=True,
+ )
- issue_activity.delay(
- type="issue_relation.activity.created",
- requested_data=json.dumps(request.data, cls=DjangoJSONEncoder),
- actor_id=str(request.user.id),
- issue_id=str(issue_id),
- project_id=str(project_id),
- current_instance=None,
- epoch=int(timezone.now().timestamp()),
- )
+ issue_activity.delay(
+ type="issue_relation.activity.created",
+ requested_data=json.dumps(request.data, cls=DjangoJSONEncoder),
+ actor_id=str(request.user.id),
+ issue_id=str(issue_id),
+ project_id=str(project_id),
+ current_instance=None,
+ epoch=int(timezone.now().timestamp()),
+ )
- if relation == "blocking":
- return Response(
- RelatedIssueSerializer(issue_relation, many=True).data,
- status=status.HTTP_201_CREATED,
- )
- else:
- return Response(
- IssueRelationSerializer(issue_relation, many=True).data,
- status=status.HTTP_201_CREATED,
- )
- except IntegrityError as e:
- if "already exists" in str(e):
- return Response(
- {"name": "The issue is already taken"},
- status=status.HTTP_410_GONE,
- )
- else:
- capture_exception(e)
- return Response(
- {"error": "Something went wrong please try again later"},
- status=status.HTTP_400_BAD_REQUEST,
- )
- except Exception as e:
- capture_exception(e)
+ if relation == "blocking":
return Response(
- {"error": "Something went wrong please try again later"},
- status=status.HTTP_400_BAD_REQUEST,
+ RelatedIssueSerializer(issue_relation, many=True).data,
+ status=status.HTTP_201_CREATED,
+ )
+ else:
+ return Response(
+ IssueRelationSerializer(issue_relation, many=True).data,
+ status=status.HTTP_201_CREATED,
)
def destroy(self, request, slug, project_id, issue_id, pk):
- try:
- issue_relation = IssueRelation.objects.get(
- workspace__slug=slug, project_id=project_id, issue_id=issue_id, pk=pk
- )
- current_instance = json.dumps(
- IssueRelationSerializer(issue_relation).data,
- cls=DjangoJSONEncoder,
- )
- issue_relation.delete()
- issue_activity.delay(
- type="issue_relation.activity.deleted",
- requested_data=json.dumps({"related_list": None}),
- actor_id=str(request.user.id),
- issue_id=str(issue_id),
- project_id=str(project_id),
- current_instance=current_instance,
- epoch=int(timezone.now().timestamp()),
- )
- return Response(status=status.HTTP_204_NO_CONTENT)
- except Exception as e:
- capture_exception(e)
- return Response(
- {"error": "Something went wrong please try again later"},
- status=status.HTTP_400_BAD_REQUEST,
- )
+ issue_relation = IssueRelation.objects.get(
+ workspace__slug=slug, project_id=project_id, issue_id=issue_id, pk=pk
+ )
+ current_instance = json.dumps(
+ IssueRelationSerializer(issue_relation).data,
+ cls=DjangoJSONEncoder,
+ )
+ issue_relation.delete()
+ issue_activity.delay(
+ type="issue_relation.activity.deleted",
+ requested_data=json.dumps({"related_list": None}),
+ actor_id=str(request.user.id),
+ issue_id=str(issue_id),
+ project_id=str(project_id),
+ current_instance=current_instance,
+ epoch=int(timezone.now().timestamp()),
+ )
+ return Response(status=status.HTTP_204_NO_CONTENT)
class IssueRetrievePublicEndpoint(BaseAPIView):
@@ -2309,22 +1882,11 @@ class IssueRetrievePublicEndpoint(BaseAPIView):
]
def get(self, request, slug, project_id, issue_id):
- try:
- issue = Issue.objects.get(
- workspace__slug=slug, project_id=project_id, pk=issue_id
- )
- serializer = IssuePublicSerializer(issue)
- return Response(serializer.data, status=status.HTTP_200_OK)
- except Issue.DoesNotExist:
- return Response(
- {"error": "Issue Does not exist"}, status=status.HTTP_400_BAD_REQUEST
- )
- except Exception as e:
- print(e)
- return Response(
- {"error": "Something went wrong please try again later"},
- status=status.HTTP_400_BAD_REQUEST,
- )
+ issue = Issue.objects.get(
+ workspace__slug=slug, project_id=project_id, pk=issue_id
+ )
+ serializer = IssuePublicSerializer(issue)
+ return Response(serializer.data, status=status.HTTP_200_OK)
class ProjectIssuesPublicEndpoint(BaseAPIView):
@@ -2333,176 +1895,161 @@ class ProjectIssuesPublicEndpoint(BaseAPIView):
]
def get(self, request, slug, project_id):
- try:
- project_deploy_board = ProjectDeployBoard.objects.get(
- workspace__slug=slug, project_id=project_id
+ project_deploy_board = ProjectDeployBoard.objects.get(
+ workspace__slug=slug, project_id=project_id
+ )
+
+ filters = issue_filters(request.query_params, "GET")
+
+ # Custom ordering for priority and state
+ priority_order = ["urgent", "high", "medium", "low", "none"]
+ state_order = ["backlog", "unstarted", "started", "completed", "cancelled"]
+
+ order_by_param = request.GET.get("order_by", "-created_at")
+
+ issue_queryset = (
+ Issue.issue_objects.annotate(
+ sub_issues_count=Issue.issue_objects.filter(parent=OuterRef("id"))
+ .order_by()
+ .annotate(count=Func(F("id"), function="Count"))
+ .values("count")
)
-
- filters = issue_filters(request.query_params, "GET")
-
- # Custom ordering for priority and state
- priority_order = ["urgent", "high", "medium", "low", "none"]
- state_order = ["backlog", "unstarted", "started", "completed", "cancelled"]
-
- order_by_param = request.GET.get("order_by", "-created_at")
-
- issue_queryset = (
- Issue.issue_objects.annotate(
- sub_issues_count=Issue.issue_objects.filter(parent=OuterRef("id"))
- .order_by()
- .annotate(count=Func(F("id"), function="Count"))
- .values("count")
- )
- .filter(project_id=project_id)
- .filter(workspace__slug=slug)
- .select_related("project", "workspace", "state", "parent")
- .prefetch_related("assignees", "labels")
- .prefetch_related(
- Prefetch(
- "issue_reactions",
- queryset=IssueReaction.objects.select_related("actor"),
- )
- )
- .prefetch_related(
- Prefetch(
- "votes",
- queryset=IssueVote.objects.select_related("actor"),
- )
- )
- .filter(**filters)
- .annotate(cycle_id=F("issue_cycle__cycle_id"))
- .annotate(module_id=F("issue_module__module_id"))
- .annotate(
- link_count=IssueLink.objects.filter(issue=OuterRef("id"))
- .order_by()
- .annotate(count=Func(F("id"), function="Count"))
- .values("count")
- )
- .annotate(
- attachment_count=IssueAttachment.objects.filter(
- issue=OuterRef("id")
- )
- .order_by()
- .annotate(count=Func(F("id"), function="Count"))
- .values("count")
+ .filter(project_id=project_id)
+ .filter(workspace__slug=slug)
+ .select_related("project", "workspace", "state", "parent")
+ .prefetch_related("assignees", "labels")
+ .prefetch_related(
+ Prefetch(
+ "issue_reactions",
+ queryset=IssueReaction.objects.select_related("actor"),
)
)
-
- # Priority Ordering
- if order_by_param == "priority" or order_by_param == "-priority":
- priority_order = (
- priority_order
- if order_by_param == "priority"
- else priority_order[::-1]
+ .prefetch_related(
+ Prefetch(
+ "votes",
+ queryset=IssueVote.objects.select_related("actor"),
)
- issue_queryset = issue_queryset.annotate(
- priority_order=Case(
- *[
- When(priority=p, then=Value(i))
- for i, p in enumerate(priority_order)
- ],
- output_field=CharField(),
- )
- ).order_by("priority_order")
-
- # State Ordering
- elif order_by_param in [
- "state__name",
- "state__group",
- "-state__name",
- "-state__group",
- ]:
- state_order = (
- state_order
- if order_by_param in ["state__name", "state__group"]
- else state_order[::-1]
- )
- issue_queryset = issue_queryset.annotate(
- state_order=Case(
- *[
- When(state__group=state_group, then=Value(i))
- for i, state_group in enumerate(state_order)
- ],
- default=Value(len(state_order)),
- output_field=CharField(),
- )
- ).order_by("state_order")
- # assignee and label ordering
- elif order_by_param in [
- "labels__name",
- "-labels__name",
- "assignees__first_name",
- "-assignees__first_name",
- ]:
- issue_queryset = issue_queryset.annotate(
- max_values=Max(
- order_by_param[1::]
- if order_by_param.startswith("-")
- else order_by_param
- )
- ).order_by(
- "-max_values" if order_by_param.startswith("-") else "max_values"
- )
- else:
- issue_queryset = issue_queryset.order_by(order_by_param)
-
- issues = IssuePublicSerializer(issue_queryset, many=True).data
-
- state_group_order = [
- "backlog",
- "unstarted",
- "started",
- "completed",
- "cancelled",
- ]
-
- states = (
- State.objects.filter(
- ~Q(name="Triage"),
- workspace__slug=slug,
- project_id=project_id,
- )
- .annotate(
- custom_order=Case(
- *[
- When(group=value, then=Value(index))
- for index, value in enumerate(state_group_order)
- ],
- default=Value(len(state_group_order)),
- output_field=IntegerField(),
- ),
- )
- .values("name", "group", "color", "id")
- .order_by("custom_order", "sequence")
)
-
- labels = Label.objects.filter(
- workspace__slug=slug, project_id=project_id
- ).values("id", "name", "color", "parent")
-
- ## Grouping the results
- group_by = request.GET.get("group_by", False)
- if group_by:
- issues = group_results(issues, group_by)
-
- return Response(
- {
- "issues": issues,
- "states": states,
- "labels": labels,
- },
- status=status.HTTP_200_OK,
+ .filter(**filters)
+ .annotate(cycle_id=F("issue_cycle__cycle_id"))
+ .annotate(module_id=F("issue_module__module_id"))
+ .annotate(
+ link_count=IssueLink.objects.filter(issue=OuterRef("id"))
+ .order_by()
+ .annotate(count=Func(F("id"), function="Count"))
+ .values("count")
)
- except ProjectDeployBoard.DoesNotExist:
- return Response(
- {"error": "Board does not exists"}, status=status.HTTP_404_NOT_FOUND
+ .annotate(
+ attachment_count=IssueAttachment.objects.filter(issue=OuterRef("id"))
+ .order_by()
+ .annotate(count=Func(F("id"), function="Count"))
+ .values("count")
)
- except Exception as e:
- capture_exception(e)
- return Response(
- {"error": "Something went wrong please try again later"},
- status=status.HTTP_400_BAD_REQUEST,
+ )
+
+ # Priority Ordering
+ if order_by_param == "priority" or order_by_param == "-priority":
+ priority_order = (
+ priority_order if order_by_param == "priority" else priority_order[::-1]
)
+ issue_queryset = issue_queryset.annotate(
+ priority_order=Case(
+ *[
+ When(priority=p, then=Value(i))
+ for i, p in enumerate(priority_order)
+ ],
+ output_field=CharField(),
+ )
+ ).order_by("priority_order")
+
+ # State Ordering
+ elif order_by_param in [
+ "state__name",
+ "state__group",
+ "-state__name",
+ "-state__group",
+ ]:
+ state_order = (
+ state_order
+ if order_by_param in ["state__name", "state__group"]
+ else state_order[::-1]
+ )
+ issue_queryset = issue_queryset.annotate(
+ state_order=Case(
+ *[
+ When(state__group=state_group, then=Value(i))
+ for i, state_group in enumerate(state_order)
+ ],
+ default=Value(len(state_order)),
+ output_field=CharField(),
+ )
+ ).order_by("state_order")
+ # assignee and label ordering
+ elif order_by_param in [
+ "labels__name",
+ "-labels__name",
+ "assignees__first_name",
+ "-assignees__first_name",
+ ]:
+ issue_queryset = issue_queryset.annotate(
+ max_values=Max(
+ order_by_param[1::]
+ if order_by_param.startswith("-")
+ else order_by_param
+ )
+ ).order_by(
+ "-max_values" if order_by_param.startswith("-") else "max_values"
+ )
+ else:
+ issue_queryset = issue_queryset.order_by(order_by_param)
+
+ issues = IssuePublicSerializer(issue_queryset, many=True).data
+
+ state_group_order = [
+ "backlog",
+ "unstarted",
+ "started",
+ "completed",
+ "cancelled",
+ ]
+
+ states = (
+ State.objects.filter(
+ ~Q(name="Triage"),
+ workspace__slug=slug,
+ project_id=project_id,
+ )
+ .annotate(
+ custom_order=Case(
+ *[
+ When(group=value, then=Value(index))
+ for index, value in enumerate(state_group_order)
+ ],
+ default=Value(len(state_group_order)),
+ output_field=IntegerField(),
+ ),
+ )
+ .values("name", "group", "color", "id")
+ .order_by("custom_order", "sequence")
+ )
+
+ labels = Label.objects.filter(
+ workspace__slug=slug, project_id=project_id
+ ).values("id", "name", "color", "parent")
+
+ ## Grouping the results
+ group_by = request.GET.get("group_by", False)
+ if group_by:
+ issues = group_results(issues, group_by)
+
+ return Response(
+ {
+ "issues": issues,
+ "states": states,
+ "labels": labels,
+ },
+ status=status.HTTP_200_OK,
+ )
class IssueDraftViewSet(BaseViewSet):
@@ -2539,218 +2086,171 @@ class IssueDraftViewSet(BaseViewSet):
@method_decorator(gzip_page)
def list(self, request, slug, project_id):
- try:
- filters = issue_filters(request.query_params, "GET")
+ filters = issue_filters(request.query_params, "GET")
- # Custom ordering for priority and state
- priority_order = ["urgent", "high", "medium", "low", "none"]
- state_order = ["backlog", "unstarted", "started", "completed", "cancelled"]
+ # Custom ordering for priority and state
+ priority_order = ["urgent", "high", "medium", "low", "none"]
+ state_order = ["backlog", "unstarted", "started", "completed", "cancelled"]
- order_by_param = request.GET.get("order_by", "-created_at")
+ order_by_param = request.GET.get("order_by", "-created_at")
- issue_queryset = (
- self.get_queryset()
- .filter(**filters)
- .annotate(cycle_id=F("issue_cycle__cycle_id"))
- .annotate(module_id=F("issue_module__module_id"))
- .annotate(
- link_count=IssueLink.objects.filter(issue=OuterRef("id"))
- .order_by()
- .annotate(count=Func(F("id"), function="Count"))
- .values("count")
- )
- .annotate(
- attachment_count=IssueAttachment.objects.filter(
- issue=OuterRef("id")
- )
- .order_by()
- .annotate(count=Func(F("id"), function="Count"))
- .values("count")
- )
+ issue_queryset = (
+ self.get_queryset()
+ .filter(**filters)
+ .annotate(cycle_id=F("issue_cycle__cycle_id"))
+ .annotate(module_id=F("issue_module__module_id"))
+ .annotate(
+ link_count=IssueLink.objects.filter(issue=OuterRef("id"))
+ .order_by()
+ .annotate(count=Func(F("id"), function="Count"))
+ .values("count")
)
-
- # Priority Ordering
- if order_by_param == "priority" or order_by_param == "-priority":
- priority_order = (
- priority_order
- if order_by_param == "priority"
- else priority_order[::-1]
- )
- issue_queryset = issue_queryset.annotate(
- priority_order=Case(
- *[
- When(priority=p, then=Value(i))
- for i, p in enumerate(priority_order)
- ],
- output_field=CharField(),
- )
- ).order_by("priority_order")
-
- # State Ordering
- elif order_by_param in [
- "state__name",
- "state__group",
- "-state__name",
- "-state__group",
- ]:
- state_order = (
- state_order
- if order_by_param in ["state__name", "state__group"]
- else state_order[::-1]
- )
- issue_queryset = issue_queryset.annotate(
- state_order=Case(
- *[
- When(state__group=state_group, then=Value(i))
- for i, state_group in enumerate(state_order)
- ],
- default=Value(len(state_order)),
- output_field=CharField(),
- )
- ).order_by("state_order")
- # assignee and label ordering
- elif order_by_param in [
- "labels__name",
- "-labels__name",
- "assignees__first_name",
- "-assignees__first_name",
- ]:
- issue_queryset = issue_queryset.annotate(
- max_values=Max(
- order_by_param[1::]
- if order_by_param.startswith("-")
- else order_by_param
- )
- ).order_by(
- "-max_values" if order_by_param.startswith("-") else "max_values"
- )
- else:
- issue_queryset = issue_queryset.order_by(order_by_param)
-
- issues = IssueLiteSerializer(issue_queryset, many=True).data
-
- ## Grouping the results
- group_by = request.GET.get("group_by", False)
- if group_by:
- return Response(
- group_results(issues, group_by), status=status.HTTP_200_OK
- )
-
- return Response(issues, status=status.HTTP_200_OK)
-
- except Exception as e:
- capture_exception(e)
- return Response(
- {"error": "Something went wrong please try again later"},
- status=status.HTTP_400_BAD_REQUEST,
+ .annotate(
+ attachment_count=IssueAttachment.objects.filter(issue=OuterRef("id"))
+ .order_by()
+ .annotate(count=Func(F("id"), function="Count"))
+ .values("count")
)
+ )
+
+ # Priority Ordering
+ if order_by_param == "priority" or order_by_param == "-priority":
+ priority_order = (
+ priority_order if order_by_param == "priority" else priority_order[::-1]
+ )
+ issue_queryset = issue_queryset.annotate(
+ priority_order=Case(
+ *[
+ When(priority=p, then=Value(i))
+ for i, p in enumerate(priority_order)
+ ],
+ output_field=CharField(),
+ )
+ ).order_by("priority_order")
+
+ # State Ordering
+ elif order_by_param in [
+ "state__name",
+ "state__group",
+ "-state__name",
+ "-state__group",
+ ]:
+ state_order = (
+ state_order
+ if order_by_param in ["state__name", "state__group"]
+ else state_order[::-1]
+ )
+ issue_queryset = issue_queryset.annotate(
+ state_order=Case(
+ *[
+ When(state__group=state_group, then=Value(i))
+ for i, state_group in enumerate(state_order)
+ ],
+ default=Value(len(state_order)),
+ output_field=CharField(),
+ )
+ ).order_by("state_order")
+ # assignee and label ordering
+ elif order_by_param in [
+ "labels__name",
+ "-labels__name",
+ "assignees__first_name",
+ "-assignees__first_name",
+ ]:
+ issue_queryset = issue_queryset.annotate(
+ max_values=Max(
+ order_by_param[1::]
+ if order_by_param.startswith("-")
+ else order_by_param
+ )
+ ).order_by(
+ "-max_values" if order_by_param.startswith("-") else "max_values"
+ )
+ else:
+ issue_queryset = issue_queryset.order_by(order_by_param)
+
+ issues = IssueLiteSerializer(issue_queryset, many=True).data
+
+ ## Grouping the results
+ group_by = request.GET.get("group_by", False)
+ if group_by:
+ return Response(group_results(issues, group_by), status=status.HTTP_200_OK)
+
+ return Response(issues, status=status.HTTP_200_OK)
def create(self, request, slug, project_id):
- try:
- project = Project.objects.get(pk=project_id)
+ project = Project.objects.get(pk=project_id)
- serializer = IssueCreateSerializer(
- data=request.data,
- context={
- "project_id": project_id,
- "workspace_id": project.workspace_id,
- "default_assignee_id": project.default_assignee_id,
- },
- )
+ serializer = IssueCreateSerializer(
+ data=request.data,
+ context={
+ "project_id": project_id,
+ "workspace_id": project.workspace_id,
+ "default_assignee_id": project.default_assignee_id,
+ },
+ )
- if serializer.is_valid():
- serializer.save(is_draft=True)
+ if serializer.is_valid():
+ serializer.save(is_draft=True)
- # Track the issue
- issue_activity.delay(
- type="issue_draft.activity.created",
- requested_data=json.dumps(self.request.data, cls=DjangoJSONEncoder),
- actor_id=str(request.user.id),
- issue_id=str(serializer.data.get("id", None)),
- project_id=str(project_id),
- current_instance=None,
- epoch=int(timezone.now().timestamp()),
- )
- return Response(serializer.data, status=status.HTTP_201_CREATED)
- return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
-
- except Project.DoesNotExist:
- return Response(
- {"error": "Project was not found"}, status=status.HTTP_404_NOT_FOUND
- )
-
- def partial_update(self, request, slug, project_id, pk):
- try:
- issue = Issue.objects.get(
- workspace__slug=slug, project_id=project_id, pk=pk
- )
- serializer = IssueSerializer(issue, data=request.data, partial=True)
-
- if serializer.is_valid():
- if request.data.get("is_draft") is not None and not request.data.get(
- "is_draft"
- ):
- serializer.save(
- created_at=timezone.now(), updated_at=timezone.now()
- )
- else:
- serializer.save()
- issue_activity.delay(
- type="issue_draft.activity.updated",
- requested_data=json.dumps(request.data, cls=DjangoJSONEncoder),
- actor_id=str(self.request.user.id),
- issue_id=str(self.kwargs.get("pk", None)),
- project_id=str(self.kwargs.get("project_id", None)),
- current_instance=json.dumps(
- IssueSerializer(issue).data,
- cls=DjangoJSONEncoder,
- ),
- epoch=int(timezone.now().timestamp()),
- )
- return Response(serializer.data, status=status.HTTP_200_OK)
- return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
- except Issue.DoesNotExist:
- return Response(
- {"error": "Issue does not exists"},
- status=status.HTTP_400_BAD_REQUEST,
- )
- except Exception as e:
- capture_exception(e)
- return Response(
- {"error": "Something went wrong please try again later"},
- status=status.HTTP_400_BAD_REQUEST,
- )
-
- def retrieve(self, request, slug, project_id, pk=None):
- try:
- issue = Issue.objects.get(
- workspace__slug=slug, project_id=project_id, pk=pk, is_draft=True
- )
- return Response(IssueSerializer(issue).data, status=status.HTTP_200_OK)
- except Issue.DoesNotExist:
- return Response(
- {"error": "Issue Does not exist"}, status=status.HTTP_404_NOT_FOUND
- )
-
- def destroy(self, request, slug, project_id, pk=None):
- try:
- issue = Issue.objects.get(workspace__slug=slug, project_id=project_id, pk=pk)
- current_instance=json.dumps(
- IssueSerializer(current_instance).data, cls=DjangoJSONEncoder
- )
- issue.delete()
+ # Track the issue
issue_activity.delay(
- type="issue_draft.activity.deleted",
- requested_data=json.dumps(
- {"issue_id": str(pk)}
- ),
+ type="issue_draft.activity.created",
+ requested_data=json.dumps(self.request.data, cls=DjangoJSONEncoder),
actor_id=str(request.user.id),
- issue_id=str(pk),
+ issue_id=str(serializer.data.get("id", None)),
project_id=str(project_id),
- current_instance=current_instance,
+ current_instance=None,
epoch=int(timezone.now().timestamp()),
)
- return Response(status=status.HTTP_204_NO_CONTENT)
- except Exception as e:
- capture_exception(e)
- return Response({"error": "Something went wrong please try again later"}, status=status.HTTP_400_BAD_REQUEST)
+ return Response(serializer.data, status=status.HTTP_201_CREATED)
+ return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
+
+ def partial_update(self, request, slug, project_id, pk):
+ issue = Issue.objects.get(workspace__slug=slug, project_id=project_id, pk=pk)
+ serializer = IssueSerializer(issue, data=request.data, partial=True)
+
+ if serializer.is_valid():
+ if request.data.get("is_draft") is not None and not request.data.get(
+ "is_draft"
+ ):
+ serializer.save(created_at=timezone.now(), updated_at=timezone.now())
+ else:
+ serializer.save()
+ issue_activity.delay(
+ type="issue_draft.activity.updated",
+ requested_data=json.dumps(request.data, cls=DjangoJSONEncoder),
+ actor_id=str(self.request.user.id),
+ issue_id=str(self.kwargs.get("pk", None)),
+ project_id=str(self.kwargs.get("project_id", None)),
+ current_instance=json.dumps(
+ IssueSerializer(issue).data,
+ cls=DjangoJSONEncoder,
+ ),
+ epoch=int(timezone.now().timestamp()),
+ )
+ return Response(serializer.data, status=status.HTTP_200_OK)
+ return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
+
+ def retrieve(self, request, slug, project_id, pk=None):
+ issue = Issue.objects.get(
+ workspace__slug=slug, project_id=project_id, pk=pk, is_draft=True
+ )
+ return Response(IssueSerializer(issue).data, status=status.HTTP_200_OK)
+
+ def destroy(self, request, slug, project_id, pk=None):
+ issue = Issue.objects.get(workspace__slug=slug, project_id=project_id, pk=pk)
+ current_instance = json.dumps(
+ IssueSerializer(current_instance).data, cls=DjangoJSONEncoder
+ )
+ issue.delete()
+ issue_activity.delay(
+ type="issue_draft.activity.deleted",
+ requested_data=json.dumps({"issue_id": str(pk)}),
+ actor_id=str(request.user.id),
+ issue_id=str(pk),
+ project_id=str(project_id),
+ current_instance=current_instance,
+ epoch=int(timezone.now().timestamp()),
+ )
+ return Response(status=status.HTTP_204_NO_CONTENT)
diff --git a/apiserver/plane/api/views/module.py b/apiserver/plane/api/views/module.py
index 8dda63968..ba088ea9c 100644
--- a/apiserver/plane/api/views/module.py
+++ b/apiserver/plane/api/views/module.py
@@ -142,180 +142,143 @@ class ModuleViewSet(BaseViewSet):
)
def create(self, request, slug, project_id):
- try:
- project = Project.objects.get(workspace__slug=slug, pk=project_id)
- serializer = ModuleWriteSerializer(
- data=request.data, context={"project": project}
- )
+ project = Project.objects.get(workspace__slug=slug, pk=project_id)
+ serializer = ModuleWriteSerializer(
+ data=request.data, context={"project": project}
+ )
- if serializer.is_valid():
- serializer.save()
- return Response(serializer.data, status=status.HTTP_201_CREATED)
- return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
-
- except Project.DoesNotExist:
- return Response(
- {"error": "Project was not found"}, status=status.HTTP_404_NOT_FOUND
- )
- except IntegrityError as e:
- if "already exists" in str(e):
- return Response(
- {"name": "The module name is already taken"},
- status=status.HTTP_410_GONE,
- )
- except Exception as e:
- capture_exception(e)
- return Response(
- {"error": "Something went wrong please try again later"},
- status=status.HTTP_400_BAD_REQUEST,
- )
+ if serializer.is_valid():
+ serializer.save()
+ return Response(serializer.data, status=status.HTTP_201_CREATED)
+ return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
def retrieve(self, request, slug, project_id, pk):
- try:
- queryset = self.get_queryset().get(pk=pk)
+ queryset = self.get_queryset().get(pk=pk)
- assignee_distribution = (
- Issue.objects.filter(
- issue_module__module_id=pk,
- workspace__slug=slug,
- project_id=project_id,
- )
- .annotate(first_name=F("assignees__first_name"))
- .annotate(last_name=F("assignees__last_name"))
- .annotate(assignee_id=F("assignees__id"))
- .annotate(display_name=F("assignees__display_name"))
- .annotate(avatar=F("assignees__avatar"))
- .values(
- "first_name", "last_name", "assignee_id", "avatar", "display_name"
- )
- .annotate(
- total_issues=Count(
- "assignee_id",
- filter=Q(
- archived_at__isnull=True,
- is_draft=False,
- ),
- )
- )
- .annotate(
- completed_issues=Count(
- "assignee_id",
- filter=Q(
- completed_at__isnull=False,
- archived_at__isnull=True,
- is_draft=False,
- ),
- )
- )
- .annotate(
- pending_issues=Count(
- "assignee_id",
- filter=Q(
- completed_at__isnull=True,
- archived_at__isnull=True,
- is_draft=False,
- ),
- )
- )
- .order_by("first_name", "last_name")
+ assignee_distribution = (
+ Issue.objects.filter(
+ issue_module__module_id=pk,
+ workspace__slug=slug,
+ project_id=project_id,
)
-
- label_distribution = (
- Issue.objects.filter(
- issue_module__module_id=pk,
- workspace__slug=slug,
- project_id=project_id,
- )
- .annotate(label_name=F("labels__name"))
- .annotate(color=F("labels__color"))
- .annotate(label_id=F("labels__id"))
- .values("label_name", "color", "label_id")
- .annotate(
- total_issues=Count(
- "label_id",
- filter=Q(
- archived_at__isnull=True,
- is_draft=False,
- ),
+ .annotate(first_name=F("assignees__first_name"))
+ .annotate(last_name=F("assignees__last_name"))
+ .annotate(assignee_id=F("assignees__id"))
+ .annotate(display_name=F("assignees__display_name"))
+ .annotate(avatar=F("assignees__avatar"))
+ .values("first_name", "last_name", "assignee_id", "avatar", "display_name")
+ .annotate(
+ total_issues=Count(
+ "assignee_id",
+ filter=Q(
+ archived_at__isnull=True,
+ is_draft=False,
),
)
- .annotate(
- completed_issues=Count(
- "label_id",
- filter=Q(
- completed_at__isnull=False,
- archived_at__isnull=True,
- is_draft=False,
- ),
- )
+ )
+ .annotate(
+ completed_issues=Count(
+ "assignee_id",
+ filter=Q(
+ completed_at__isnull=False,
+ archived_at__isnull=True,
+ is_draft=False,
+ ),
)
- .annotate(
- pending_issues=Count(
- "label_id",
- filter=Q(
- completed_at__isnull=True,
- archived_at__isnull=True,
- is_draft=False,
- ),
- )
+ )
+ .annotate(
+ pending_issues=Count(
+ "assignee_id",
+ filter=Q(
+ completed_at__isnull=True,
+ archived_at__isnull=True,
+ is_draft=False,
+ ),
)
- .order_by("label_name")
+ )
+ .order_by("first_name", "last_name")
+ )
+
+ label_distribution = (
+ Issue.objects.filter(
+ issue_module__module_id=pk,
+ workspace__slug=slug,
+ project_id=project_id,
+ )
+ .annotate(label_name=F("labels__name"))
+ .annotate(color=F("labels__color"))
+ .annotate(label_id=F("labels__id"))
+ .values("label_name", "color", "label_id")
+ .annotate(
+ total_issues=Count(
+ "label_id",
+ filter=Q(
+ archived_at__isnull=True,
+ is_draft=False,
+ ),
+ ),
+ )
+ .annotate(
+ completed_issues=Count(
+ "label_id",
+ filter=Q(
+ completed_at__isnull=False,
+ archived_at__isnull=True,
+ is_draft=False,
+ ),
+ )
+ )
+ .annotate(
+ pending_issues=Count(
+ "label_id",
+ filter=Q(
+ completed_at__isnull=True,
+ archived_at__isnull=True,
+ is_draft=False,
+ ),
+ )
+ )
+ .order_by("label_name")
+ )
+
+ data = ModuleSerializer(queryset).data
+ data["distribution"] = {
+ "assignees": assignee_distribution,
+ "labels": label_distribution,
+ "completion_chart": {},
+ }
+
+ if queryset.start_date and queryset.target_date:
+ data["distribution"]["completion_chart"] = burndown_plot(
+ queryset=queryset, slug=slug, project_id=project_id, module_id=pk
)
- data = ModuleSerializer(queryset).data
- data["distribution"] = {
- "assignees": assignee_distribution,
- "labels": label_distribution,
- "completion_chart": {},
- }
-
- if queryset.start_date and queryset.target_date:
- data["distribution"]["completion_chart"] = burndown_plot(
- queryset=queryset, slug=slug, project_id=project_id, module_id=pk
- )
-
- return Response(
- data,
- status=status.HTTP_200_OK,
- )
-
- except Exception as e:
- capture_exception(e)
- return Response(
- {"error": "Something went wrong please try again later"},
- status=status.HTTP_400_BAD_REQUEST,
- )
+ return Response(
+ data,
+ status=status.HTTP_200_OK,
+ )
def destroy(self, request, slug, project_id, pk):
- try:
- module = Module.objects.get(
- workspace__slug=slug, project_id=project_id, pk=pk
- )
- module_issues = list(
- ModuleIssue.objects.filter(module_id=pk).values_list("issue", flat=True)
- )
- module.delete()
- issue_activity.delay(
- type="module.activity.deleted",
- requested_data=json.dumps(
- {
- "module_id": str(pk),
- "issues": [str(issue_id) for issue_id in module_issues],
- }
- ),
- actor_id=str(request.user.id),
- issue_id=str(pk),
- project_id=str(project_id),
- current_instance=None,
- epoch=int(timezone.now().timestamp()),
- )
- return Response(status=status.HTTP_204_NO_CONTENT)
- except Exception as e:
- capture_exception(e)
- return Response(
- {"error": "Something went wrong please try again later"},
- status=status.HTTP_400_BAD_REQUEST,
- )
+ module = Module.objects.get(workspace__slug=slug, project_id=project_id, pk=pk)
+ module_issues = list(
+ ModuleIssue.objects.filter(module_id=pk).values_list("issue", flat=True)
+ )
+ module.delete()
+ issue_activity.delay(
+ type="module.activity.deleted",
+ requested_data=json.dumps(
+ {
+ "module_id": str(pk),
+ "issues": [str(issue_id) for issue_id in module_issues],
+ }
+ ),
+ actor_id=str(request.user.id),
+ issue_id=str(pk),
+ project_id=str(project_id),
+ current_instance=None,
+ epoch=int(timezone.now().timestamp()),
+ )
+ return Response(status=status.HTTP_204_NO_CONTENT)
class ModuleIssueViewSet(BaseViewSet):
@@ -337,7 +300,6 @@ class ModuleIssueViewSet(BaseViewSet):
module_id=self.kwargs.get("module_id"),
)
-
def get_queryset(self):
return self.filter_queryset(
super()
@@ -363,190 +325,163 @@ class ModuleIssueViewSet(BaseViewSet):
@method_decorator(gzip_page)
def list(self, request, slug, project_id, module_id):
- try:
- order_by = request.GET.get("order_by", "created_at")
- group_by = request.GET.get("group_by", False)
- sub_group_by = request.GET.get("sub_group_by", False)
- filters = issue_filters(request.query_params, "GET")
- issues = (
- Issue.issue_objects.filter(issue_module__module_id=module_id)
- .annotate(
- sub_issues_count=Issue.issue_objects.filter(parent=OuterRef("id"))
- .order_by()
- .annotate(count=Func(F("id"), function="Count"))
- .values("count")
- )
- .annotate(bridge_id=F("issue_module__id"))
- .filter(project_id=project_id)
- .filter(workspace__slug=slug)
- .select_related("project")
- .select_related("workspace")
- .select_related("state")
- .select_related("parent")
- .prefetch_related("assignees")
- .prefetch_related("labels")
- .order_by(order_by)
- .filter(**filters)
- .annotate(
- link_count=IssueLink.objects.filter(issue=OuterRef("id"))
- .order_by()
- .annotate(count=Func(F("id"), function="Count"))
- .values("count")
- )
- .annotate(
- attachment_count=IssueAttachment.objects.filter(
- issue=OuterRef("id")
- )
- .order_by()
- .annotate(count=Func(F("id"), function="Count"))
- .values("count")
- )
+ order_by = request.GET.get("order_by", "created_at")
+ group_by = request.GET.get("group_by", False)
+ sub_group_by = request.GET.get("sub_group_by", False)
+ filters = issue_filters(request.query_params, "GET")
+ issues = (
+ Issue.issue_objects.filter(issue_module__module_id=module_id)
+ .annotate(
+ sub_issues_count=Issue.issue_objects.filter(parent=OuterRef("id"))
+ .order_by()
+ .annotate(count=Func(F("id"), function="Count"))
+ .values("count")
)
-
- issues_data = IssueStateSerializer(issues, many=True).data
-
- if sub_group_by and sub_group_by == group_by:
- return Response(
- {"error": "Group by and sub group by cannot be same"},
- status=status.HTTP_400_BAD_REQUEST,
- )
-
- if group_by:
- return Response(
- group_results(issues_data, group_by, sub_group_by),
- status=status.HTTP_200_OK,
- )
-
- return Response(
- issues_data,
- status=status.HTTP_200_OK,
+ .annotate(bridge_id=F("issue_module__id"))
+ .filter(project_id=project_id)
+ .filter(workspace__slug=slug)
+ .select_related("project")
+ .select_related("workspace")
+ .select_related("state")
+ .select_related("parent")
+ .prefetch_related("assignees")
+ .prefetch_related("labels")
+ .order_by(order_by)
+ .filter(**filters)
+ .annotate(
+ link_count=IssueLink.objects.filter(issue=OuterRef("id"))
+ .order_by()
+ .annotate(count=Func(F("id"), function="Count"))
+ .values("count")
)
- except Exception as e:
- capture_exception(e)
+ .annotate(
+ attachment_count=IssueAttachment.objects.filter(issue=OuterRef("id"))
+ .order_by()
+ .annotate(count=Func(F("id"), function="Count"))
+ .values("count")
+ )
+ )
+
+ issues_data = IssueStateSerializer(issues, many=True).data
+
+ if sub_group_by and sub_group_by == group_by:
return Response(
- {"error": "Something went wrong please try again later"},
+ {"error": "Group by and sub group by cannot be same"},
status=status.HTTP_400_BAD_REQUEST,
)
+ if group_by:
+ return Response(
+ group_results(issues_data, group_by, sub_group_by),
+ status=status.HTTP_200_OK,
+ )
+
+ return Response(
+ issues_data,
+ status=status.HTTP_200_OK,
+ )
+
def create(self, request, slug, project_id, module_id):
- try:
- issues = request.data.get("issues", [])
- if not len(issues):
- return Response(
- {"error": "Issues are required"}, status=status.HTTP_400_BAD_REQUEST
- )
- module = Module.objects.get(
- workspace__slug=slug, project_id=project_id, pk=module_id
+ issues = request.data.get("issues", [])
+ if not len(issues):
+ return Response(
+ {"error": "Issues are required"}, status=status.HTTP_400_BAD_REQUEST
)
+ module = Module.objects.get(
+ workspace__slug=slug, project_id=project_id, pk=module_id
+ )
- module_issues = list(ModuleIssue.objects.filter(issue_id__in=issues))
+ module_issues = list(ModuleIssue.objects.filter(issue_id__in=issues))
- update_module_issue_activity = []
- records_to_update = []
- record_to_create = []
+ update_module_issue_activity = []
+ records_to_update = []
+ record_to_create = []
- for issue in issues:
- module_issue = [
- module_issue
- for module_issue in module_issues
- if str(module_issue.issue_id) in issues
- ]
+ for issue in issues:
+ module_issue = [
+ module_issue
+ for module_issue in module_issues
+ if str(module_issue.issue_id) in issues
+ ]
- if len(module_issue):
- if module_issue[0].module_id != module_id:
- update_module_issue_activity.append(
- {
- "old_module_id": str(module_issue[0].module_id),
- "new_module_id": str(module_id),
- "issue_id": str(module_issue[0].issue_id),
- }
- )
- module_issue[0].module_id = module_id
- records_to_update.append(module_issue[0])
- else:
- record_to_create.append(
- ModuleIssue(
- module=module,
- issue_id=issue,
- project_id=project_id,
- workspace=module.workspace,
- created_by=request.user,
- updated_by=request.user,
- )
+ if len(module_issue):
+ if module_issue[0].module_id != module_id:
+ update_module_issue_activity.append(
+ {
+ "old_module_id": str(module_issue[0].module_id),
+ "new_module_id": str(module_id),
+ "issue_id": str(module_issue[0].issue_id),
+ }
)
+ module_issue[0].module_id = module_id
+ records_to_update.append(module_issue[0])
+ else:
+ record_to_create.append(
+ ModuleIssue(
+ module=module,
+ issue_id=issue,
+ project_id=project_id,
+ workspace=module.workspace,
+ created_by=request.user,
+ updated_by=request.user,
+ )
+ )
- ModuleIssue.objects.bulk_create(
- record_to_create,
- batch_size=10,
- ignore_conflicts=True,
- )
+ ModuleIssue.objects.bulk_create(
+ record_to_create,
+ batch_size=10,
+ ignore_conflicts=True,
+ )
- ModuleIssue.objects.bulk_update(
- records_to_update,
- ["module"],
- batch_size=10,
- )
+ ModuleIssue.objects.bulk_update(
+ records_to_update,
+ ["module"],
+ batch_size=10,
+ )
- # Capture Issue Activity
- issue_activity.delay(
- type="module.activity.created",
- requested_data=json.dumps({"modules_list": issues}),
- actor_id=str(self.request.user.id),
- issue_id=str(self.kwargs.get("pk", None)),
- project_id=str(self.kwargs.get("project_id", None)),
- current_instance=json.dumps(
- {
- "updated_module_issues": update_module_issue_activity,
- "created_module_issues": serializers.serialize(
- "json", record_to_create
- ),
- }
- ),
- epoch=int(timezone.now().timestamp()),
- )
+ # Capture Issue Activity
+ issue_activity.delay(
+ type="module.activity.created",
+ requested_data=json.dumps({"modules_list": issues}),
+ actor_id=str(self.request.user.id),
+ issue_id=str(self.kwargs.get("pk", None)),
+ project_id=str(self.kwargs.get("project_id", None)),
+ current_instance=json.dumps(
+ {
+ "updated_module_issues": update_module_issue_activity,
+ "created_module_issues": serializers.serialize(
+ "json", record_to_create
+ ),
+ }
+ ),
+ epoch=int(timezone.now().timestamp()),
+ )
- return Response(
- ModuleIssueSerializer(self.get_queryset(), many=True).data,
- status=status.HTTP_200_OK,
- )
- except Module.DoesNotExist:
- return Response(
- {"error": "Module Does not exists"}, status=status.HTTP_400_BAD_REQUEST
- )
- except Exception as e:
- capture_exception(e)
- return Response(
- {"error": "Something went wrong please try again later"},
- status=status.HTTP_400_BAD_REQUEST,
- )
+ return Response(
+ ModuleIssueSerializer(self.get_queryset(), many=True).data,
+ status=status.HTTP_200_OK,
+ )
def destroy(self, request, slug, project_id, module_id, pk):
- try:
- module_issue = ModuleIssue.objects.get(
- workspace__slug=slug, project_id=project_id, module_id=module_id, pk=pk
- )
- module_issue.delete()
- issue_activity.delay(
- type="module.activity.deleted",
- requested_data=json.dumps(
- {
- "module_id": str(module_id),
- "issues": [str(module_issue.issue_id)],
- }
- ),
- actor_id=str(request.user.id),
- issue_id=str(pk),
- project_id=str(project_id),
- current_instance=None,
- epoch=int(timezone.now().timestamp()),
- )
- return Response(status=status.HTTP_204_NO_CONTENT)
- except Exception as e:
- capture_exception(e)
- return Response(
- {"error": "Something went wrong please try again later"},
- status=status.HTTP_400_BAD_REQUEST,
- )
+ module_issue = ModuleIssue.objects.get(
+ workspace__slug=slug, project_id=project_id, module_id=module_id, pk=pk
+ )
+ module_issue.delete()
+ issue_activity.delay(
+ type="module.activity.deleted",
+ requested_data=json.dumps(
+ {
+ "module_id": str(module_id),
+ "issues": [str(module_issue.issue_id)],
+ }
+ ),
+ actor_id=str(request.user.id),
+ issue_id=str(pk),
+ project_id=str(project_id),
+ current_instance=None,
+ epoch=int(timezone.now().timestamp()),
+ )
+ return Response(status=status.HTTP_204_NO_CONTENT)
class ModuleLinkViewSet(BaseViewSet):
@@ -590,49 +525,18 @@ class ModuleFavoriteViewSet(BaseViewSet):
)
def create(self, request, slug, project_id):
- try:
- serializer = ModuleFavoriteSerializer(data=request.data)
- if serializer.is_valid():
- serializer.save(user=request.user, project_id=project_id)
- return Response(serializer.data, status=status.HTTP_201_CREATED)
- return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
- except IntegrityError as e:
- if "already exists" in str(e):
- return Response(
- {"error": "The module is already added to favorites"},
- status=status.HTTP_410_GONE,
- )
- else:
- capture_exception(e)
- return Response(
- {"error": "Something went wrong please try again later"},
- status=status.HTTP_400_BAD_REQUEST,
- )
- except Exception as e:
- capture_exception(e)
- return Response(
- {"error": "Something went wrong please try again later"},
- status=status.HTTP_400_BAD_REQUEST,
- )
+ serializer = ModuleFavoriteSerializer(data=request.data)
+ if serializer.is_valid():
+ serializer.save(user=request.user, project_id=project_id)
+ return Response(serializer.data, status=status.HTTP_201_CREATED)
+ return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
def destroy(self, request, slug, project_id, module_id):
- try:
- module_favorite = ModuleFavorite.objects.get(
- project=project_id,
- user=request.user,
- workspace__slug=slug,
- module_id=module_id,
- )
- module_favorite.delete()
- return Response(status=status.HTTP_204_NO_CONTENT)
- except ModuleFavorite.DoesNotExist:
- return Response(
- {"error": "Module is not in favorites"},
- status=status.HTTP_400_BAD_REQUEST,
- )
- except Exception as e:
- capture_exception(e)
- return Response(
- {"error": "Something went wrong please try again later"},
- status=status.HTTP_400_BAD_REQUEST,
- )
+ module_favorite = ModuleFavorite.objects.get(
+ project=project_id,
+ user=request.user,
+ workspace__slug=slug,
+ module_id=module_id,
+ )
+ module_favorite.delete()
+ return Response(status=status.HTTP_204_NO_CONTENT)
diff --git a/apiserver/plane/api/views/notification.py b/apiserver/plane/api/views/notification.py
index 6feca2fb2..978c01bac 100644
--- a/apiserver/plane/api/views/notification.py
+++ b/apiserver/plane/api/views/notification.py
@@ -36,320 +36,239 @@ class NotificationViewSet(BaseViewSet, BasePaginator):
)
def list(self, request, slug):
- try:
- # Get query parameters
- snoozed = request.GET.get("snoozed", "false")
- archived = request.GET.get("archived", "false")
- read = request.GET.get("read", "true")
- type = request.GET.get("type", "all")
+ # Get query parameters
+ snoozed = request.GET.get("snoozed", "false")
+ archived = request.GET.get("archived", "false")
+ read = request.GET.get("read", "true")
+ type = request.GET.get("type", "all")
- notifications = Notification.objects.filter(workspace__slug=slug, receiver_id=request.user.id) \
- .select_related("workspace", "project", "triggered_by", "receiver") \
- .order_by("snoozed_till", "-created_at")
-
- # Filters based on query parameters
- snoozed_filters = {
- "true": Q(snoozed_till__lt=timezone.now()) | Q(snoozed_till__isnull=False),
- "false": Q(snoozed_till__gte=timezone.now()) | Q(snoozed_till__isnull=True),
- }
-
- notifications = notifications.filter(snoozed_filters[snoozed])
-
- archived_filters = {
- "true": Q(archived_at__isnull=False),
- "false": Q(archived_at__isnull=True),
- }
-
- notifications = notifications.filter(archived_filters[archived])
-
- if read == "false":
- notifications = notifications.filter(read_at__isnull=True)
-
- # Subscribed issues
- if type == "watching":
- issue_ids = IssueSubscriber.objects.filter(
- workspace__slug=slug, subscriber_id=request.user.id
- ).values_list("issue_id", flat=True)
- notifications = notifications.filter(entity_identifier__in=issue_ids)
-
- # Assigned Issues
- if type == "assigned":
- issue_ids = IssueAssignee.objects.filter(
- workspace__slug=slug, assignee_id=request.user.id
- ).values_list("issue_id", flat=True)
- notifications = notifications.filter(entity_identifier__in=issue_ids)
-
- # Created issues
- if type == "created":
- if WorkspaceMember.objects.filter(
- workspace__slug=slug, member=request.user, role__lt=15
- ).exists():
- notifications = Notification.objects.none()
- else:
- issue_ids = Issue.objects.filter(
- workspace__slug=slug, created_by=request.user
- ).values_list("pk", flat=True)
- notifications = notifications.filter(
- entity_identifier__in=issue_ids
- )
-
- # Pagination
- if request.GET.get("per_page", False) and request.GET.get("cursor", False):
- return self.paginate(
- request=request,
- queryset=(notifications),
- on_results=lambda notifications: NotificationSerializer(
- notifications, many=True
- ).data,
- )
-
- serializer = NotificationSerializer(notifications, many=True)
- return Response(serializer.data, status=status.HTTP_200_OK)
- except Exception as e:
- capture_exception(e)
- return Response(
- {"error": "Something went wrong please try again later"},
- status=status.HTTP_400_BAD_REQUEST,
+ notifications = (
+ Notification.objects.filter(
+ workspace__slug=slug, receiver_id=request.user.id
)
+ .select_related("workspace", "project", "triggered_by", "receiver")
+ .order_by("snoozed_till", "-created_at")
+ )
+
+ # Filters based on query parameters
+ snoozed_filters = {
+ "true": Q(snoozed_till__lt=timezone.now()) | Q(snoozed_till__isnull=False),
+ "false": Q(snoozed_till__gte=timezone.now()) | Q(snoozed_till__isnull=True),
+ }
+
+ notifications = notifications.filter(snoozed_filters[snoozed])
+
+ archived_filters = {
+ "true": Q(archived_at__isnull=False),
+ "false": Q(archived_at__isnull=True),
+ }
+
+ notifications = notifications.filter(archived_filters[archived])
+
+ if read == "false":
+ notifications = notifications.filter(read_at__isnull=True)
+
+ # Subscribed issues
+ if type == "watching":
+ issue_ids = IssueSubscriber.objects.filter(
+ workspace__slug=slug, subscriber_id=request.user.id
+ ).values_list("issue_id", flat=True)
+ notifications = notifications.filter(entity_identifier__in=issue_ids)
+
+ # Assigned Issues
+ if type == "assigned":
+ issue_ids = IssueAssignee.objects.filter(
+ workspace__slug=slug, assignee_id=request.user.id
+ ).values_list("issue_id", flat=True)
+ notifications = notifications.filter(entity_identifier__in=issue_ids)
+
+ # Created issues
+ if type == "created":
+ if WorkspaceMember.objects.filter(
+ workspace__slug=slug, member=request.user, role__lt=15
+ ).exists():
+ notifications = Notification.objects.none()
+ else:
+ issue_ids = Issue.objects.filter(
+ workspace__slug=slug, created_by=request.user
+ ).values_list("pk", flat=True)
+ notifications = notifications.filter(entity_identifier__in=issue_ids)
+
+ # Pagination
+ if request.GET.get("per_page", False) and request.GET.get("cursor", False):
+ return self.paginate(
+ request=request,
+ queryset=(notifications),
+ on_results=lambda notifications: NotificationSerializer(
+ notifications, many=True
+ ).data,
+ )
+
+ serializer = NotificationSerializer(notifications, many=True)
+ return Response(serializer.data, status=status.HTTP_200_OK)
def partial_update(self, request, slug, pk):
- try:
- notification = Notification.objects.get(
- workspace__slug=slug, pk=pk, receiver=request.user
- )
- # Only read_at and snoozed_till can be updated
- notification_data = {
- "snoozed_till": request.data.get("snoozed_till", None),
- }
- serializer = NotificationSerializer(
- notification, data=notification_data, partial=True
- )
+ notification = Notification.objects.get(
+ workspace__slug=slug, pk=pk, receiver=request.user
+ )
+ # Only read_at and snoozed_till can be updated
+ notification_data = {
+ "snoozed_till": request.data.get("snoozed_till", None),
+ }
+ serializer = NotificationSerializer(
+ notification, data=notification_data, partial=True
+ )
- if serializer.is_valid():
- serializer.save()
- return Response(serializer.data, status=status.HTTP_200_OK)
- return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
- except Notification.DoesNotExist:
- return Response(
- {"error": "Notification does not exists"},
- status=status.HTTP_400_BAD_REQUEST,
- )
- except Exception as e:
- capture_exception(e)
- return Response(
- {"error": "Something went wrong please try again later"},
- status=status.HTTP_400_BAD_REQUEST,
- )
+ if serializer.is_valid():
+ serializer.save()
+ return Response(serializer.data, status=status.HTTP_200_OK)
+ return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
def mark_read(self, request, slug, pk):
- try:
- notification = Notification.objects.get(
- receiver=request.user, workspace__slug=slug, pk=pk
- )
- notification.read_at = timezone.now()
- notification.save()
- serializer = NotificationSerializer(notification)
- return Response(serializer.data, status=status.HTTP_200_OK)
- except Notification.DoesNotExist:
- return Response(
- {"error": "Notification does not exists"},
- status=status.HTTP_400_BAD_REQUEST,
- )
- except Exception as e:
- capture_exception(e)
- return Response(
- {"error": "Something went wrong please try again later"},
- status=status.HTTP_400_BAD_REQUEST,
- )
+ notification = Notification.objects.get(
+ receiver=request.user, workspace__slug=slug, pk=pk
+ )
+ notification.read_at = timezone.now()
+ notification.save()
+ serializer = NotificationSerializer(notification)
+ return Response(serializer.data, status=status.HTTP_200_OK)
def mark_unread(self, request, slug, pk):
- try:
- notification = Notification.objects.get(
- receiver=request.user, workspace__slug=slug, pk=pk
- )
- notification.read_at = None
- notification.save()
- serializer = NotificationSerializer(notification)
- return Response(serializer.data, status=status.HTTP_200_OK)
- except Notification.DoesNotExist:
- return Response(
- {"error": "Notification does not exists"},
- status=status.HTTP_400_BAD_REQUEST,
- )
- except Exception as e:
- capture_exception(e)
- return Response(
- {"error": "Something went wrong please try again later"},
- status=status.HTTP_400_BAD_REQUEST,
- )
+ notification = Notification.objects.get(
+ receiver=request.user, workspace__slug=slug, pk=pk
+ )
+ notification.read_at = None
+ notification.save()
+ serializer = NotificationSerializer(notification)
+ return Response(serializer.data, status=status.HTTP_200_OK)
def archive(self, request, slug, pk):
- try:
- notification = Notification.objects.get(
- receiver=request.user, workspace__slug=slug, pk=pk
- )
- notification.archived_at = timezone.now()
- notification.save()
- serializer = NotificationSerializer(notification)
- return Response(serializer.data, status=status.HTTP_200_OK)
- except Notification.DoesNotExist:
- return Response(
- {"error": "Notification does not exists"},
- status=status.HTTP_400_BAD_REQUEST,
- )
- except Exception as e:
- capture_exception(e)
- return Response(
- {"error": "Something went wrong please try again later"},
- status=status.HTTP_400_BAD_REQUEST,
- )
+ notification = Notification.objects.get(
+ receiver=request.user, workspace__slug=slug, pk=pk
+ )
+ notification.archived_at = timezone.now()
+ notification.save()
+ serializer = NotificationSerializer(notification)
+ return Response(serializer.data, status=status.HTTP_200_OK)
def unarchive(self, request, slug, pk):
- try:
- notification = Notification.objects.get(
- receiver=request.user, workspace__slug=slug, pk=pk
- )
- notification.archived_at = None
- notification.save()
- serializer = NotificationSerializer(notification)
- return Response(serializer.data, status=status.HTTP_200_OK)
- except Notification.DoesNotExist:
- return Response(
- {"error": "Notification does not exists"},
- status=status.HTTP_400_BAD_REQUEST,
- )
- except Exception as e:
- capture_exception(e)
- return Response(
- {"error": "Something went wrong please try again later"},
- status=status.HTTP_400_BAD_REQUEST,
- )
+ notification = Notification.objects.get(
+ receiver=request.user, workspace__slug=slug, pk=pk
+ )
+ notification.archived_at = None
+ notification.save()
+ serializer = NotificationSerializer(notification)
+ return Response(serializer.data, status=status.HTTP_200_OK)
class UnreadNotificationEndpoint(BaseAPIView):
def get(self, request, slug):
- try:
- # Watching Issues Count
- watching_issues_count = Notification.objects.filter(
- workspace__slug=slug,
- receiver_id=request.user.id,
- read_at__isnull=True,
- archived_at__isnull=True,
- entity_identifier__in=IssueSubscriber.objects.filter(
- workspace__slug=slug, subscriber_id=request.user.id
- ).values_list("issue_id", flat=True),
- ).count()
+ # Watching Issues Count
+ watching_issues_count = Notification.objects.filter(
+ workspace__slug=slug,
+ receiver_id=request.user.id,
+ read_at__isnull=True,
+ archived_at__isnull=True,
+ entity_identifier__in=IssueSubscriber.objects.filter(
+ workspace__slug=slug, subscriber_id=request.user.id
+ ).values_list("issue_id", flat=True),
+ ).count()
- # My Issues Count
- my_issues_count = Notification.objects.filter(
- workspace__slug=slug,
- receiver_id=request.user.id,
- read_at__isnull=True,
- archived_at__isnull=True,
- entity_identifier__in=IssueAssignee.objects.filter(
- workspace__slug=slug, assignee_id=request.user.id
- ).values_list("issue_id", flat=True),
- ).count()
+ # My Issues Count
+ my_issues_count = Notification.objects.filter(
+ workspace__slug=slug,
+ receiver_id=request.user.id,
+ read_at__isnull=True,
+ archived_at__isnull=True,
+ entity_identifier__in=IssueAssignee.objects.filter(
+ workspace__slug=slug, assignee_id=request.user.id
+ ).values_list("issue_id", flat=True),
+ ).count()
- # Created Issues Count
- created_issues_count = Notification.objects.filter(
- workspace__slug=slug,
- receiver_id=request.user.id,
- read_at__isnull=True,
- archived_at__isnull=True,
- entity_identifier__in=Issue.objects.filter(
- workspace__slug=slug, created_by=request.user
- ).values_list("pk", flat=True),
- ).count()
+ # Created Issues Count
+ created_issues_count = Notification.objects.filter(
+ workspace__slug=slug,
+ receiver_id=request.user.id,
+ read_at__isnull=True,
+ archived_at__isnull=True,
+ entity_identifier__in=Issue.objects.filter(
+ workspace__slug=slug, created_by=request.user
+ ).values_list("pk", flat=True),
+ ).count()
- return Response(
- {
- "watching_issues": watching_issues_count,
- "my_issues": my_issues_count,
- "created_issues": created_issues_count,
- },
- status=status.HTTP_200_OK,
- )
- except Exception as e:
- capture_exception(e)
- return Response(
- {"error": "Something went wrong please try again later"},
- status=status.HTTP_400_BAD_REQUEST,
- )
+ return Response(
+ {
+ "watching_issues": watching_issues_count,
+ "my_issues": my_issues_count,
+ "created_issues": created_issues_count,
+ },
+ status=status.HTTP_200_OK,
+ )
class MarkAllReadNotificationViewSet(BaseViewSet):
def create(self, request, slug):
- try:
- snoozed = request.data.get("snoozed", False)
- archived = request.data.get("archived", False)
- type = request.data.get("type", "all")
+ snoozed = request.data.get("snoozed", False)
+ archived = request.data.get("archived", False)
+ type = request.data.get("type", "all")
- notifications = (
- Notification.objects.filter(
- workspace__slug=slug,
- receiver_id=request.user.id,
- read_at__isnull=True,
- )
- .select_related("workspace", "project", "triggered_by", "receiver")
- .order_by("snoozed_till", "-created_at")
+ notifications = (
+ Notification.objects.filter(
+ workspace__slug=slug,
+ receiver_id=request.user.id,
+ read_at__isnull=True,
+ )
+ .select_related("workspace", "project", "triggered_by", "receiver")
+ .order_by("snoozed_till", "-created_at")
+ )
+
+ # Filter for snoozed notifications
+ if snoozed:
+ notifications = notifications.filter(
+ Q(snoozed_till__lt=timezone.now()) | Q(snoozed_till__isnull=False)
+ )
+ else:
+ notifications = notifications.filter(
+ Q(snoozed_till__gte=timezone.now()) | Q(snoozed_till__isnull=True),
)
- # Filter for snoozed notifications
- if snoozed:
- notifications = notifications.filter(
- Q(snoozed_till__lt=timezone.now()) | Q(snoozed_till__isnull=False)
- )
- else:
- notifications = notifications.filter(
- Q(snoozed_till__gte=timezone.now()) | Q(snoozed_till__isnull=True),
- )
+ # Filter for archived or unarchive
+ if archived:
+ notifications = notifications.filter(archived_at__isnull=False)
+ else:
+ notifications = notifications.filter(archived_at__isnull=True)
- # Filter for archived or unarchive
- if archived:
- notifications = notifications.filter(archived_at__isnull=False)
- else:
- notifications = notifications.filter(archived_at__isnull=True)
+ # Subscribed issues
+ if type == "watching":
+ issue_ids = IssueSubscriber.objects.filter(
+ workspace__slug=slug, subscriber_id=request.user.id
+ ).values_list("issue_id", flat=True)
+ notifications = notifications.filter(entity_identifier__in=issue_ids)
- # Subscribed issues
- if type == "watching":
- issue_ids = IssueSubscriber.objects.filter(
- workspace__slug=slug, subscriber_id=request.user.id
- ).values_list("issue_id", flat=True)
+ # Assigned Issues
+ if type == "assigned":
+ issue_ids = IssueAssignee.objects.filter(
+ workspace__slug=slug, assignee_id=request.user.id
+ ).values_list("issue_id", flat=True)
+ notifications = notifications.filter(entity_identifier__in=issue_ids)
+
+ # Created issues
+ if type == "created":
+ if WorkspaceMember.objects.filter(
+ workspace__slug=slug, member=request.user, role__lt=15
+ ).exists():
+ notifications = Notification.objects.none()
+ else:
+ issue_ids = Issue.objects.filter(
+ workspace__slug=slug, created_by=request.user
+ ).values_list("pk", flat=True)
notifications = notifications.filter(entity_identifier__in=issue_ids)
- # Assigned Issues
- if type == "assigned":
- issue_ids = IssueAssignee.objects.filter(
- workspace__slug=slug, assignee_id=request.user.id
- ).values_list("issue_id", flat=True)
- notifications = notifications.filter(entity_identifier__in=issue_ids)
-
- # Created issues
- if type == "created":
- if WorkspaceMember.objects.filter(
- workspace__slug=slug, member=request.user, role__lt=15
- ).exists():
- notifications = Notification.objects.none()
- else:
- issue_ids = Issue.objects.filter(
- workspace__slug=slug, created_by=request.user
- ).values_list("pk", flat=True)
- notifications = notifications.filter(
- entity_identifier__in=issue_ids
- )
-
- updated_notifications = []
- for notification in notifications:
- notification.read_at = timezone.now()
- updated_notifications.append(notification)
- Notification.objects.bulk_update(
- updated_notifications, ["read_at"], batch_size=100
- )
- return Response({"message": "Successful"}, status=status.HTTP_200_OK)
- except Exception as e:
- capture_exception(e)
- return Response(
- {"error": "Something went wrong please try again later"},
- status=status.HTTP_400_BAD_REQUEST,
- )
+ updated_notifications = []
+ for notification in notifications:
+ notification.read_at = timezone.now()
+ updated_notifications.append(notification)
+ Notification.objects.bulk_update(
+ updated_notifications, ["read_at"], batch_size=100
+ )
+ return Response({"message": "Successful"}, status=status.HTTP_200_OK)
diff --git a/apiserver/plane/api/views/oauth.py b/apiserver/plane/api/views/oauth.py
index 4603229f4..04c83813a 100644
--- a/apiserver/plane/api/views/oauth.py
+++ b/apiserver/plane/api/views/oauth.py
@@ -15,6 +15,7 @@ from rest_framework.views import APIView
from rest_framework_simplejwt.tokens import RefreshToken
from rest_framework import status
from sentry_sdk import capture_exception
+
# sso authentication
from google.oauth2 import id_token
from google.auth.transport import requests as google_auth_request
@@ -298,11 +299,3 @@ class OauthEndpoint(BaseAPIView):
},
)
return Response(data, status=status.HTTP_201_CREATED)
- except Exception as e:
- capture_exception(e)
- return Response(
- {
- "error": "Something went wrong. Please try again later or contact the support team."
- },
- status=status.HTTP_400_BAD_REQUEST,
- )
diff --git a/apiserver/plane/api/views/page.py b/apiserver/plane/api/views/page.py
index d9fad9eaa..fd31cdf14 100644
--- a/apiserver/plane/api/views/page.py
+++ b/apiserver/plane/api/views/page.py
@@ -2,7 +2,6 @@
from datetime import timedelta, datetime, date
# Django imports
-from django.db import IntegrityError
from django.db.models import Exists, OuterRef, Q, Prefetch
from django.utils import timezone
@@ -78,104 +77,82 @@ class PageViewSet(BaseViewSet):
)
def create(self, request, slug, project_id):
- try:
- serializer = PageSerializer(
- data=request.data,
- context={"project_id": project_id, "owned_by_id": request.user.id},
- )
+ serializer = PageSerializer(
+ data=request.data,
+ context={"project_id": project_id, "owned_by_id": request.user.id},
+ )
- if serializer.is_valid():
- serializer.save()
- return Response(serializer.data, status=status.HTTP_201_CREATED)
- return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
-
- except Exception as e:
- capture_exception(e)
- return Response(
- {"error": "Something went wrong please try again later"},
- status=status.HTTP_400_BAD_REQUEST,
- )
+ if serializer.is_valid():
+ serializer.save()
+ return Response(serializer.data, status=status.HTTP_201_CREATED)
+ return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
def partial_update(self, request, slug, project_id, pk):
- try:
- page = Page.objects.get(pk=pk, workspace__slug=slug, project_id=project_id)
- # Only update access if the page owner is the requesting user
- if (
- page.access != request.data.get("access", page.access)
- and page.owned_by_id != request.user.id
- ):
- return Response(
- {
- "error": "Access cannot be updated since this page is owned by someone else"
- },
- status=status.HTTP_400_BAD_REQUEST,
- )
- serializer = PageSerializer(page, data=request.data, partial=True)
- if serializer.is_valid():
- serializer.save()
- return Response(serializer.data, status=status.HTTP_200_OK)
- return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
- except Page.DoesNotExist:
+ page = Page.objects.get(pk=pk, workspace__slug=slug, project_id=project_id)
+ # Only update access if the page owner is the requesting user
+ if (
+ page.access != request.data.get("access", page.access)
+ and page.owned_by_id != request.user.id
+ ):
return Response(
- {"error": "Page Does not exist"}, status=status.HTTP_400_BAD_REQUEST
- )
- except Exception as e:
- capture_exception(e)
- return Response(
- {"error": "Something went wrong please try again later"},
+ {
+ "error": "Access cannot be updated since this page is owned by someone else"
+ },
status=status.HTTP_400_BAD_REQUEST,
)
+ serializer = PageSerializer(page, data=request.data, partial=True)
+ if serializer.is_valid():
+ serializer.save()
+ return Response(serializer.data, status=status.HTTP_200_OK)
+ return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
def list(self, request, slug, project_id):
- try:
- queryset = self.get_queryset()
- page_view = request.GET.get("page_view", False)
+ queryset = self.get_queryset()
+ page_view = request.GET.get("page_view", False)
- if not page_view:
- return Response({"error": "Page View parameter is required"}, status=status.HTTP_400_BAD_REQUEST)
+ if not page_view:
+ return Response({"error": "Page View parameter is required"}, status=status.HTTP_400_BAD_REQUEST)
- # All Pages
- if page_view == "all":
- return Response(PageSerializer(queryset, many=True).data, status=status.HTTP_200_OK)
+ # All Pages
+ if page_view == "all":
+ return Response(PageSerializer(queryset, many=True).data, status=status.HTTP_200_OK)
- # Recent pages
- if page_view == "recent":
- current_time = date.today()
- day_before = current_time - timedelta(days=1)
- todays_pages = queryset.filter(updated_at__date=date.today())
- yesterdays_pages = queryset.filter(updated_at__date=day_before)
- earlier_this_week = queryset.filter( updated_at__date__range=(
- (timezone.now() - timedelta(days=7)),
- (timezone.now() - timedelta(days=2)),
- ))
- return Response(
- {
- "today": PageSerializer(todays_pages, many=True).data,
- "yesterday": PageSerializer(yesterdays_pages, many=True).data,
- "earlier_this_week": PageSerializer(earlier_this_week, many=True).data,
- },
- status=status.HTTP_200_OK,
- )
+ # Recent pages
+ if page_view == "recent":
+ current_time = date.today()
+ day_before = current_time - timedelta(days=1)
+ todays_pages = queryset.filter(updated_at__date=date.today())
+ yesterdays_pages = queryset.filter(updated_at__date=day_before)
+ earlier_this_week = queryset.filter( updated_at__date__range=(
+ (timezone.now() - timedelta(days=7)),
+ (timezone.now() - timedelta(days=2)),
+ ))
+ return Response(
+ {
+ "today": PageSerializer(todays_pages, many=True).data,
+ "yesterday": PageSerializer(yesterdays_pages, many=True).data,
+ "earlier_this_week": PageSerializer(earlier_this_week, many=True).data,
+ },
+ status=status.HTTP_200_OK,
+ )
- # Favorite Pages
- if page_view == "favorite":
- queryset = queryset.filter(is_favorite=True)
- return Response(PageSerializer(queryset, many=True).data, status=status.HTTP_200_OK)
-
- # My pages
- if page_view == "created_by_me":
- queryset = queryset.filter(owned_by=request.user)
- return Response(PageSerializer(queryset, many=True).data, status=status.HTTP_200_OK)
+ # Favorite Pages
+ if page_view == "favorite":
+ queryset = queryset.filter(is_favorite=True)
+ return Response(PageSerializer(queryset, many=True).data, status=status.HTTP_200_OK)
+
+ # My pages
+ if page_view == "created_by_me":
+ queryset = queryset.filter(owned_by=request.user)
+ return Response(PageSerializer(queryset, many=True).data, status=status.HTTP_200_OK)
- # Created by other Pages
- if page_view == "created_by_other":
- queryset = queryset.filter(~Q(owned_by=request.user), access=0)
- return Response(PageSerializer(queryset, many=True).data, status=status.HTTP_200_OK)
+ # Created by other Pages
+ if page_view == "created_by_other":
+ queryset = queryset.filter(~Q(owned_by=request.user), access=0)
+ return Response(PageSerializer(queryset, many=True).data, status=status.HTTP_200_OK)
+
+ return Response({"error": "No matching view found"}, status=status.HTTP_400_BAD_REQUEST)
- return Response({"error": "No matching view found"}, status=status.HTTP_400_BAD_REQUEST)
- except Exception as e:
- capture_exception(e)
- return Response({"error": "Something went wrong please try again later"}, status=status.HTTP_400_BAD_REQUEST)
class PageBlockViewSet(BaseViewSet):
serializer_class = PageBlockSerializer
@@ -225,53 +202,21 @@ class PageFavoriteViewSet(BaseViewSet):
)
def create(self, request, slug, project_id):
- try:
- serializer = PageFavoriteSerializer(data=request.data)
- if serializer.is_valid():
- serializer.save(user=request.user, project_id=project_id)
- return Response(serializer.data, status=status.HTTP_201_CREATED)
- return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
- except IntegrityError as e:
- if "already exists" in str(e):
- return Response(
- {"error": "The page is already added to favorites"},
- status=status.HTTP_410_GONE,
- )
- else:
- capture_exception(e)
- return Response(
- {"error": "Something went wrong please try again later"},
- status=status.HTTP_400_BAD_REQUEST,
- )
- except Exception as e:
- capture_exception(e)
- return Response(
- {"error": "Something went wrong please try again later"},
- status=status.HTTP_400_BAD_REQUEST,
- )
+ serializer = PageFavoriteSerializer(data=request.data)
+ if serializer.is_valid():
+ serializer.save(user=request.user, project_id=project_id)
+ return Response(serializer.data, status=status.HTTP_201_CREATED)
+ return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
def destroy(self, request, slug, project_id, page_id):
- try:
- page_favorite = PageFavorite.objects.get(
- project=project_id,
- user=request.user,
- workspace__slug=slug,
- page_id=page_id,
- )
- page_favorite.delete()
- return Response(status=status.HTTP_204_NO_CONTENT)
- except PageFavorite.DoesNotExist:
- return Response(
- {"error": "Page is not in favorites"},
- status=status.HTTP_400_BAD_REQUEST,
- )
- except Exception as e:
- capture_exception(e)
- return Response(
- {"error": "Something went wrong please try again later"},
- status=status.HTTP_400_BAD_REQUEST,
- )
-
+ page_favorite = PageFavorite.objects.get(
+ project=project_id,
+ user=request.user,
+ workspace__slug=slug,
+ page_id=page_id,
+ )
+ page_favorite.delete()
+ return Response(status=status.HTTP_204_NO_CONTENT)
class CreateIssueFromPageBlockEndpoint(BaseAPIView):
permission_classes = [
@@ -279,43 +224,32 @@ class CreateIssueFromPageBlockEndpoint(BaseAPIView):
]
def post(self, request, slug, project_id, page_id, page_block_id):
- try:
- page_block = PageBlock.objects.get(
- pk=page_block_id,
- workspace__slug=slug,
- project_id=project_id,
- page_id=page_id,
- )
- issue = Issue.objects.create(
- name=page_block.name,
- project_id=project_id,
- description=page_block.description,
- description_html=page_block.description_html,
- description_stripped=page_block.description_stripped,
- )
- _ = IssueAssignee.objects.create(
- issue=issue, assignee=request.user, project_id=project_id
- )
+ page_block = PageBlock.objects.get(
+ pk=page_block_id,
+ workspace__slug=slug,
+ project_id=project_id,
+ page_id=page_id,
+ )
+ issue = Issue.objects.create(
+ name=page_block.name,
+ project_id=project_id,
+ description=page_block.description,
+ description_html=page_block.description_html,
+ description_stripped=page_block.description_stripped,
+ )
+ _ = IssueAssignee.objects.create(
+ issue=issue, assignee=request.user, project_id=project_id
+ )
- _ = IssueActivity.objects.create(
- issue=issue,
- actor=request.user,
- project_id=project_id,
- comment=f"created the issue from {page_block.name} block",
- verb="created",
- )
+ _ = IssueActivity.objects.create(
+ issue=issue,
+ actor=request.user,
+ project_id=project_id,
+ comment=f"created the issue from {page_block.name} block",
+ verb="created",
+ )
- page_block.issue = issue
- page_block.save()
+ page_block.issue = issue
+ page_block.save()
- return Response(IssueLiteSerializer(issue).data, status=status.HTTP_200_OK)
- except PageBlock.DoesNotExist:
- return Response(
- {"error": "Page Block does not exist"}, status=status.HTTP_404_NOT_FOUND
- )
- except Exception as e:
- capture_exception(e)
- return Response(
- {"error": "Something went wrong please try again later"},
- status=status.HTTP_400_BAD_REQUEST,
- )
+ return Response(IssueLiteSerializer(issue).data, status=status.HTTP_200_OK)
diff --git a/apiserver/plane/api/views/project.py b/apiserver/plane/api/views/project.py
index 4545b5376..1058ac593 100644
--- a/apiserver/plane/api/views/project.py
+++ b/apiserver/plane/api/views/project.py
@@ -152,47 +152,40 @@ class ProjectViewSet(BaseViewSet):
)
def list(self, request, slug):
- try:
- fields = [field for field in request.GET.get("fields", "").split(",") if field]
+ fields = [field for field in request.GET.get("fields", "").split(",") if field]
- sort_order_query = ProjectMember.objects.filter(
- member=request.user,
- project_id=OuterRef("pk"),
- workspace__slug=self.kwargs.get("slug"),
- ).values("sort_order")
- projects = (
- self.get_queryset()
- .annotate(sort_order=Subquery(sort_order_query))
- .prefetch_related(
- Prefetch(
- "project_projectmember",
- queryset=ProjectMember.objects.filter(
- workspace__slug=slug,
- ).select_related("member"),
- )
+ sort_order_query = ProjectMember.objects.filter(
+ member=request.user,
+ project_id=OuterRef("pk"),
+ workspace__slug=self.kwargs.get("slug"),
+ ).values("sort_order")
+ projects = (
+ self.get_queryset()
+ .annotate(sort_order=Subquery(sort_order_query))
+ .prefetch_related(
+ Prefetch(
+ "project_projectmember",
+ queryset=ProjectMember.objects.filter(
+ workspace__slug=slug,
+ ).select_related("member"),
)
- .order_by("sort_order", "name")
)
- if request.GET.get("per_page", False) and request.GET.get("cursor", False):
- return self.paginate(
- request=request,
- queryset=(projects),
- on_results=lambda projects: ProjectListSerializer(
- projects, many=True
- ).data,
- )
+ .order_by("sort_order", "name")
+ )
+ if request.GET.get("per_page", False) and request.GET.get("cursor", False):
+ return self.paginate(
+ request=request,
+ queryset=(projects),
+ on_results=lambda projects: ProjectListSerializer(
+ projects, many=True
+ ).data,
+ )
- return Response(
- ProjectListSerializer(
- projects, many=True, fields=fields if fields else None
- ).data
- )
- except Exception as e:
- capture_exception(e)
- return Response(
- {"error": "Something went wrong please try again later"},
- status=status.HTTP_400_BAD_REQUEST,
- )
+ return Response(
+ ProjectListSerializer(
+ projects, many=True, fields=fields if fields else None
+ ).data
+ )
def create(self, request, slug):
try:
@@ -285,12 +278,6 @@ class ProjectViewSet(BaseViewSet):
{"name": "The project name is already taken"},
status=status.HTTP_410_GONE,
)
- else:
- capture_exception(e)
- return Response(
- {"error": "Something went wrong please try again later"},
- status=status.HTTP_410_GONE,
- )
except Workspace.DoesNotExist as e:
return Response(
{"error": "Workspace does not exist"}, status=status.HTTP_404_NOT_FOUND
@@ -300,12 +287,6 @@ class ProjectViewSet(BaseViewSet):
{"identifier": "The project identifier is already taken"},
status=status.HTTP_410_GONE,
)
- except Exception as e:
- capture_exception(e)
- return Response(
- {"error": "Something went wrong please try again later"},
- status=status.HTTP_400_BAD_REQUEST,
- )
def partial_update(self, request, slug, pk=None):
try:
@@ -354,12 +335,6 @@ class ProjectViewSet(BaseViewSet):
{"identifier": "The project identifier is already taken"},
status=status.HTTP_410_GONE,
)
- except Exception as e:
- capture_exception(e)
- return Response(
- {"error": "Something went wrong please try again later"},
- status=status.HTTP_400_BAD_REQUEST,
- )
class InviteProjectEndpoint(BaseAPIView):
@@ -368,80 +343,60 @@ class InviteProjectEndpoint(BaseAPIView):
]
def post(self, request, slug, project_id):
- try:
- email = request.data.get("email", False)
- role = request.data.get("role", False)
-
- # Check if email is provided
- if not email:
- return Response(
- {"error": "Email is required"}, status=status.HTTP_400_BAD_REQUEST
- )
-
- validate_email(email)
- # Check if user is already a member of workspace
- if ProjectMember.objects.filter(
- project_id=project_id,
- member__email=email,
- member__is_bot=False,
- ).exists():
- return Response(
- {"error": "User is already member of workspace"},
- status=status.HTTP_400_BAD_REQUEST,
- )
-
- user = User.objects.filter(email=email).first()
-
- if user is None:
- token = jwt.encode(
- {"email": email, "timestamp": datetime.now().timestamp()},
- settings.SECRET_KEY,
- algorithm="HS256",
- )
- project_invitation_obj = ProjectMemberInvite.objects.create(
- email=email.strip().lower(),
- project_id=project_id,
- token=token,
- role=role,
- )
- domain = settings.WEB_URL
- project_invitation.delay(email, project_id, token, domain)
-
- return Response(
- {
- "message": "Email sent successfully",
- "id": project_invitation_obj.id,
- },
- status=status.HTTP_200_OK,
- )
-
- project_member = ProjectMember.objects.create(
- member=user, project_id=project_id, role=role
- )
+ email = request.data.get("email", False)
+ role = request.data.get("role", False)
+ # Check if email is provided
+ if not email:
return Response(
- ProjectMemberSerializer(project_member).data, status=status.HTTP_200_OK
+ {"error": "Email is required"}, status=status.HTTP_400_BAD_REQUEST
)
- except ValidationError:
+ validate_email(email)
+ # Check if user is already a member of workspace
+ if ProjectMember.objects.filter(
+ project_id=project_id,
+ member__email=email,
+ member__is_bot=False,
+ ).exists():
+ return Response(
+ {"error": "User is already member of workspace"},
+ status=status.HTTP_400_BAD_REQUEST,
+ )
+
+ user = User.objects.filter(email=email).first()
+
+ if user is None:
+ token = jwt.encode(
+ {"email": email, "timestamp": datetime.now().timestamp()},
+ settings.SECRET_KEY,
+ algorithm="HS256",
+ )
+ project_invitation_obj = ProjectMemberInvite.objects.create(
+ email=email.strip().lower(),
+ project_id=project_id,
+ token=token,
+ role=role,
+ )
+ domain = settings.WEB_URL
+ project_invitation.delay(email, project_id, token, domain)
+
return Response(
{
- "error": "Invalid email address provided a valid email address is required to send the invite"
+ "message": "Email sent successfully",
+ "id": project_invitation_obj.id,
},
- status=status.HTTP_400_BAD_REQUEST,
- )
- except (Workspace.DoesNotExist, Project.DoesNotExist) as e:
- return Response(
- {"error": "Workspace or Project does not exists"},
- status=status.HTTP_400_BAD_REQUEST,
- )
- except Exception as e:
- capture_exception(e)
- return Response(
- {"error": "Something went wrong please try again later"},
- status=status.HTTP_400_BAD_REQUEST,
+ status=status.HTTP_200_OK,
)
+ project_member = ProjectMember.objects.create(
+ member=user, project_id=project_id, role=role
+ )
+
+ return Response(
+ ProjectMemberSerializer(project_member).data, status=status.HTTP_200_OK
+ )
+
class UserProjectInvitationsViewset(BaseViewSet):
serializer_class = ProjectMemberInviteSerializer
@@ -456,34 +411,27 @@ class UserProjectInvitationsViewset(BaseViewSet):
)
def create(self, request):
- try:
- invitations = request.data.get("invitations")
- project_invitations = ProjectMemberInvite.objects.filter(
- pk__in=invitations, accepted=True
- )
- ProjectMember.objects.bulk_create(
- [
- ProjectMember(
- project=invitation.project,
- workspace=invitation.project.workspace,
- member=request.user,
- role=invitation.role,
- created_by=request.user,
- )
- for invitation in project_invitations
- ]
- )
+ invitations = request.data.get("invitations")
+ project_invitations = ProjectMemberInvite.objects.filter(
+ pk__in=invitations, accepted=True
+ )
+ ProjectMember.objects.bulk_create(
+ [
+ ProjectMember(
+ project=invitation.project,
+ workspace=invitation.project.workspace,
+ member=request.user,
+ role=invitation.role,
+ created_by=request.user,
+ )
+ for invitation in project_invitations
+ ]
+ )
- # Delete joined project invites
- project_invitations.delete()
+ # Delete joined project invites
+ project_invitations.delete()
- return Response(status=status.HTTP_204_NO_CONTENT)
- except Exception as e:
- capture_exception(e)
- return Response(
- {"error": "Something went wrong please try again later"},
- status=status.HTTP_400_BAD_REQUEST,
- )
+ return Response(status=status.HTTP_204_NO_CONTENT)
class ProjectMemberViewSet(BaseViewSet):
@@ -511,113 +459,88 @@ class ProjectMemberViewSet(BaseViewSet):
)
def partial_update(self, request, slug, project_id, pk):
- try:
- project_member = ProjectMember.objects.get(
- pk=pk, workspace__slug=slug, project_id=project_id
- )
- if request.user.id == project_member.member_id:
- return Response(
- {"error": "You cannot update your own role"},
- status=status.HTTP_400_BAD_REQUEST,
- )
- # Check while updating user roles
- requested_project_member = ProjectMember.objects.get(
- project_id=project_id, workspace__slug=slug, member=request.user
- )
- if (
- "role" in request.data
- and int(request.data.get("role", project_member.role))
- > requested_project_member.role
- ):
- return Response(
- {
- "error": "You cannot update a role that is higher than your own role"
- },
- status=status.HTTP_400_BAD_REQUEST,
- )
-
- serializer = ProjectMemberSerializer(
- project_member, data=request.data, partial=True
- )
-
- if serializer.is_valid():
- serializer.save()
- return Response(serializer.data, status=status.HTTP_200_OK)
- return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
- except ProjectMember.DoesNotExist:
+ project_member = ProjectMember.objects.get(
+ pk=pk, workspace__slug=slug, project_id=project_id
+ )
+ if request.user.id == project_member.member_id:
return Response(
- {"error": "Project Member does not exist"},
+ {"error": "You cannot update your own role"},
status=status.HTTP_400_BAD_REQUEST,
)
- except Exception as e:
- capture_exception(e)
+ # Check while updating user roles
+ requested_project_member = ProjectMember.objects.get(
+ project_id=project_id, workspace__slug=slug, member=request.user
+ )
+ if (
+ "role" in request.data
+ and int(request.data.get("role", project_member.role))
+ > requested_project_member.role
+ ):
return Response(
- {"error": "Something went wrong please try again later"},
+ {"error": "You cannot update a role that is higher than your own role"},
status=status.HTTP_400_BAD_REQUEST,
)
+ serializer = ProjectMemberSerializer(
+ project_member, data=request.data, partial=True
+ )
+
+ if serializer.is_valid():
+ serializer.save()
+ return Response(serializer.data, status=status.HTTP_200_OK)
+ return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
+
def destroy(self, request, slug, project_id, pk):
- try:
- project_member = ProjectMember.objects.get(
- workspace__slug=slug, project_id=project_id, pk=pk
- )
- # check requesting user role
- requesting_project_member = ProjectMember.objects.get(
- workspace__slug=slug, member=request.user, project_id=project_id
- )
- if requesting_project_member.role < project_member.role:
- return Response(
- {
- "error": "You cannot remove a user having role higher than yourself"
- },
- status=status.HTTP_400_BAD_REQUEST,
- )
-
- # Remove all favorites
- ProjectFavorite.objects.filter(
- workspace__slug=slug, project_id=project_id, user=project_member.member
- ).delete()
- CycleFavorite.objects.filter(
- workspace__slug=slug, project_id=project_id, user=project_member.member
- ).delete()
- ModuleFavorite.objects.filter(
- workspace__slug=slug, project_id=project_id, user=project_member.member
- ).delete()
- PageFavorite.objects.filter(
- workspace__slug=slug, project_id=project_id, user=project_member.member
- ).delete()
- IssueViewFavorite.objects.filter(
- workspace__slug=slug, project_id=project_id, user=project_member.member
- ).delete()
- # Also remove issue from issue assigned
- IssueAssignee.objects.filter(
- workspace__slug=slug,
- project_id=project_id,
- assignee=project_member.member,
- ).delete()
-
- # Remove if module member
- ModuleMember.objects.filter(
- workspace__slug=slug,
- project_id=project_id,
- member=project_member.member,
- ).delete()
- # Delete owned Pages
- Page.objects.filter(
- workspace__slug=slug,
- project_id=project_id,
- owned_by=project_member.member,
- ).delete()
- project_member.delete()
- return Response(status=status.HTTP_204_NO_CONTENT)
- except ProjectMember.DoesNotExist:
+ project_member = ProjectMember.objects.get(
+ workspace__slug=slug, project_id=project_id, pk=pk
+ )
+ # check requesting user role
+ requesting_project_member = ProjectMember.objects.get(
+ workspace__slug=slug, member=request.user, project_id=project_id
+ )
+ if requesting_project_member.role < project_member.role:
return Response(
- {"error": "Project Member does not exist"},
+ {"error": "You cannot remove a user having role higher than yourself"},
status=status.HTTP_400_BAD_REQUEST,
)
- except Exception as e:
- capture_exception(e)
- return Response({"error": "Something went wrong please try again later"})
+
+ # Remove all favorites
+ ProjectFavorite.objects.filter(
+ workspace__slug=slug, project_id=project_id, user=project_member.member
+ ).delete()
+ CycleFavorite.objects.filter(
+ workspace__slug=slug, project_id=project_id, user=project_member.member
+ ).delete()
+ ModuleFavorite.objects.filter(
+ workspace__slug=slug, project_id=project_id, user=project_member.member
+ ).delete()
+ PageFavorite.objects.filter(
+ workspace__slug=slug, project_id=project_id, user=project_member.member
+ ).delete()
+ IssueViewFavorite.objects.filter(
+ workspace__slug=slug, project_id=project_id, user=project_member.member
+ ).delete()
+ # Also remove issue from issue assigned
+ IssueAssignee.objects.filter(
+ workspace__slug=slug,
+ project_id=project_id,
+ assignee=project_member.member,
+ ).delete()
+
+ # Remove if module member
+ ModuleMember.objects.filter(
+ workspace__slug=slug,
+ project_id=project_id,
+ member=project_member.member,
+ ).delete()
+ # Delete owned Pages
+ Page.objects.filter(
+ workspace__slug=slug,
+ project_id=project_id,
+ owned_by=project_member.member,
+ ).delete()
+ project_member.delete()
+ return Response(status=status.HTTP_204_NO_CONTENT)
class AddMemberToProjectEndpoint(BaseAPIView):
@@ -626,73 +549,51 @@ class AddMemberToProjectEndpoint(BaseAPIView):
]
def post(self, request, slug, project_id):
- try:
- members = request.data.get("members", [])
+ members = request.data.get("members", [])
- # get the project
- project = Project.objects.get(pk=project_id, workspace__slug=slug)
+ # get the project
+ project = Project.objects.get(pk=project_id, workspace__slug=slug)
- if not len(members):
- return Response(
- {"error": "Atleast one member is required"},
- status=status.HTTP_400_BAD_REQUEST,
- )
- bulk_project_members = []
-
- project_members = (
- ProjectMember.objects.filter(
- workspace__slug=slug,
- member_id__in=[member.get("member_id") for member in members],
- )
- .values("member_id", "sort_order")
- .order_by("sort_order")
- )
-
- for member in members:
- sort_order = [
- project_member.get("sort_order")
- for project_member in project_members
- if str(project_member.get("member_id"))
- == str(member.get("member_id"))
- ]
- bulk_project_members.append(
- ProjectMember(
- member_id=member.get("member_id"),
- role=member.get("role", 10),
- project_id=project_id,
- workspace_id=project.workspace_id,
- sort_order=sort_order[0] - 10000 if len(sort_order) else 65535,
- )
- )
-
- project_members = ProjectMember.objects.bulk_create(
- bulk_project_members,
- batch_size=10,
- ignore_conflicts=True,
- )
-
- serializer = ProjectMemberSerializer(project_members, many=True)
-
- return Response(serializer.data, status=status.HTTP_201_CREATED)
- except KeyError:
+ if not len(members):
return Response(
- {"error": "Incorrect data sent"}, status=status.HTTP_400_BAD_REQUEST
- )
- except Project.DoesNotExist:
- return Response(
- {"error": "Project does not exist"}, status=status.HTTP_400_BAD_REQUEST
- )
- except IntegrityError:
- return Response(
- {"error": "User not member of the workspace"},
+ {"error": "Atleast one member is required"},
status=status.HTTP_400_BAD_REQUEST,
)
- except Exception as e:
- capture_exception(e)
- return Response(
- {"error": "Something went wrong please try again later"},
- status=status.HTTP_400_BAD_REQUEST,
+ bulk_project_members = []
+
+ project_members = (
+ ProjectMember.objects.filter(
+ workspace__slug=slug,
+ member_id__in=[member.get("member_id") for member in members],
)
+ .values("member_id", "sort_order")
+ .order_by("sort_order")
+ )
+
+ for member in members:
+ sort_order = [
+ project_member.get("sort_order")
+ for project_member in project_members
+ if str(project_member.get("member_id")) == str(member.get("member_id"))
+ ]
+ bulk_project_members.append(
+ ProjectMember(
+ member_id=member.get("member_id"),
+ role=member.get("role", 10),
+ project_id=project_id,
+ workspace_id=project.workspace_id,
+ sort_order=sort_order[0] - 10000 if len(sort_order) else 65535,
+ )
+ )
+
+ project_members = ProjectMember.objects.bulk_create(
+ bulk_project_members,
+ batch_size=10,
+ ignore_conflicts=True,
+ )
+
+ serializer = ProjectMemberSerializer(project_members, many=True)
+ return Response(serializer.data, status=status.HTTP_201_CREATED)
class AddTeamToProjectEndpoint(BaseAPIView):
@@ -701,53 +602,35 @@ class AddTeamToProjectEndpoint(BaseAPIView):
]
def post(self, request, slug, project_id):
- try:
- team_members = TeamMember.objects.filter(
- workspace__slug=slug, team__in=request.data.get("teams", [])
- ).values_list("member", flat=True)
+ team_members = TeamMember.objects.filter(
+ workspace__slug=slug, team__in=request.data.get("teams", [])
+ ).values_list("member", flat=True)
- if len(team_members) == 0:
- return Response(
- {"error": "No such team exists"}, status=status.HTTP_400_BAD_REQUEST
- )
-
- workspace = Workspace.objects.get(slug=slug)
-
- project_members = []
- for member in team_members:
- project_members.append(
- ProjectMember(
- project_id=project_id,
- member_id=member,
- workspace=workspace,
- created_by=request.user,
- )
- )
-
- ProjectMember.objects.bulk_create(
- project_members, batch_size=10, ignore_conflicts=True
- )
-
- serializer = ProjectMemberSerializer(project_members, many=True)
- return Response(serializer.data, status=status.HTTP_201_CREATED)
- except IntegrityError as e:
- if "already exists" in str(e):
- return Response(
- {"error": "The team with the name already exists"},
- status=status.HTTP_410_GONE,
- )
- except Workspace.DoesNotExist:
+ if len(team_members) == 0:
return Response(
- {"error": "The requested workspace could not be found"},
- status=status.HTTP_404_NOT_FOUND,
+ {"error": "No such team exists"}, status=status.HTTP_400_BAD_REQUEST
)
- except Exception as e:
- capture_exception(e)
- return Response(
- {"error": "Something went wrong please try again later"},
- status=status.HTTP_400_BAD_REQUEST,
+
+ workspace = Workspace.objects.get(slug=slug)
+
+ project_members = []
+ for member in team_members:
+ project_members.append(
+ ProjectMember(
+ project_id=project_id,
+ member_id=member,
+ workspace=workspace,
+ created_by=request.user,
+ )
)
+ ProjectMember.objects.bulk_create(
+ project_members, batch_size=10, ignore_conflicts=True
+ )
+
+ serializer = ProjectMemberSerializer(project_members, many=True)
+ return Response(serializer.data, status=status.HTTP_201_CREATED)
+
class ProjectMemberInvitationsViewset(BaseViewSet):
serializer_class = ProjectMemberInviteSerializer
@@ -795,165 +678,111 @@ class ProjectIdentifierEndpoint(BaseAPIView):
]
def get(self, request, slug):
- try:
- name = request.GET.get("name", "").strip().upper()
-
- if name == "":
- return Response(
- {"error": "Name is required"}, status=status.HTTP_400_BAD_REQUEST
- )
-
- exists = ProjectIdentifier.objects.filter(
- name=name, workspace__slug=slug
- ).values("id", "name", "project")
+ name = request.GET.get("name", "").strip().upper()
+ if name == "":
return Response(
- {"exists": len(exists), "identifiers": exists},
- status=status.HTTP_200_OK,
- )
- except Exception as e:
- capture_exception(e)
- return Response(
- {"error": "Something went wrong please try again later"},
- status=status.HTTP_400_BAD_REQUEST,
+ {"error": "Name is required"}, status=status.HTTP_400_BAD_REQUEST
)
+ exists = ProjectIdentifier.objects.filter(
+ name=name, workspace__slug=slug
+ ).values("id", "name", "project")
+
+ return Response(
+ {"exists": len(exists), "identifiers": exists},
+ status=status.HTTP_200_OK,
+ )
+
def delete(self, request, slug):
- try:
- name = request.data.get("name", "").strip().upper()
-
- if name == "":
- return Response(
- {"error": "Name is required"}, status=status.HTTP_400_BAD_REQUEST
- )
-
- if Project.objects.filter(identifier=name, workspace__slug=slug).exists():
- return Response(
- {"error": "Cannot delete an identifier of an existing project"},
- status=status.HTTP_400_BAD_REQUEST,
- )
-
- ProjectIdentifier.objects.filter(name=name, workspace__slug=slug).delete()
+ name = request.data.get("name", "").strip().upper()
+ if name == "":
return Response(
- status=status.HTTP_204_NO_CONTENT,
+ {"error": "Name is required"}, status=status.HTTP_400_BAD_REQUEST
)
- except Exception as e:
- capture_exception(e)
+
+ if Project.objects.filter(identifier=name, workspace__slug=slug).exists():
return Response(
- {"error": "Something went wrong please try again later"},
+ {"error": "Cannot delete an identifier of an existing project"},
status=status.HTTP_400_BAD_REQUEST,
)
+ ProjectIdentifier.objects.filter(name=name, workspace__slug=slug).delete()
+
+ return Response(
+ status=status.HTTP_204_NO_CONTENT,
+ )
+
class ProjectJoinEndpoint(BaseAPIView):
def post(self, request, slug):
- try:
- project_ids = request.data.get("project_ids", [])
+ project_ids = request.data.get("project_ids", [])
- # Get the workspace user role
- workspace_member = WorkspaceMember.objects.get(
- member=request.user, workspace__slug=slug
- )
+ # Get the workspace user role
+ workspace_member = WorkspaceMember.objects.get(
+ member=request.user, workspace__slug=slug
+ )
- workspace_role = workspace_member.role
- workspace = workspace_member.workspace
+ workspace_role = workspace_member.role
+ workspace = workspace_member.workspace
- ProjectMember.objects.bulk_create(
- [
- ProjectMember(
- project_id=project_id,
- member=request.user,
- role=20
- if workspace_role >= 15
- else (15 if workspace_role == 10 else workspace_role),
- workspace=workspace,
- created_by=request.user,
- )
- for project_id in project_ids
- ],
- ignore_conflicts=True,
- )
+ ProjectMember.objects.bulk_create(
+ [
+ ProjectMember(
+ project_id=project_id,
+ member=request.user,
+ role=20
+ if workspace_role >= 15
+ else (15 if workspace_role == 10 else workspace_role),
+ workspace=workspace,
+ created_by=request.user,
+ )
+ for project_id in project_ids
+ ],
+ ignore_conflicts=True,
+ )
- return Response(
- {"message": "Projects joined successfully"},
- status=status.HTTP_201_CREATED,
- )
- except WorkspaceMember.DoesNotExist:
- return Response(
- {"error": "User is not a member of workspace"},
- status=status.HTTP_403_FORBIDDEN,
- )
- except Exception as e:
- capture_exception(e)
- return Response(
- {"error": "Something went wrong please try again later"},
- status=status.HTTP_400_BAD_REQUEST,
- )
+ return Response(
+ {"message": "Projects joined successfully"},
+ status=status.HTTP_201_CREATED,
+ )
class ProjectUserViewsEndpoint(BaseAPIView):
def post(self, request, slug, project_id):
- try:
- project = Project.objects.get(pk=project_id, workspace__slug=slug)
+ project = Project.objects.get(pk=project_id, workspace__slug=slug)
- project_member = ProjectMember.objects.filter(
- member=request.user, project=project
- ).first()
+ project_member = ProjectMember.objects.filter(
+ member=request.user, project=project
+ ).first()
- if project_member is None:
- return Response(
- {"error": "Forbidden"}, status=status.HTTP_403_FORBIDDEN
- )
+ if project_member is None:
+ return Response({"error": "Forbidden"}, status=status.HTTP_403_FORBIDDEN)
- view_props = project_member.view_props
- default_props = project_member.default_props
- preferences = project_member.preferences
- sort_order = project_member.sort_order
+ view_props = project_member.view_props
+ default_props = project_member.default_props
+ preferences = project_member.preferences
+ sort_order = project_member.sort_order
- project_member.view_props = request.data.get("view_props", view_props)
- project_member.default_props = request.data.get(
- "default_props", default_props
- )
- project_member.preferences = request.data.get("preferences", preferences)
- project_member.sort_order = request.data.get("sort_order", sort_order)
+ project_member.view_props = request.data.get("view_props", view_props)
+ project_member.default_props = request.data.get("default_props", default_props)
+ project_member.preferences = request.data.get("preferences", preferences)
+ project_member.sort_order = request.data.get("sort_order", sort_order)
- project_member.save()
+ project_member.save()
- return Response(status=status.HTTP_204_NO_CONTENT)
- except Project.DoesNotExist:
- return Response(
- {"error": "The requested resource does not exists"},
- status=status.HTTP_404_NOT_FOUND,
- )
- except Exception as e:
- return Response(
- {"error": "Something went wrong please try again later"},
- status=status.HTTP_400_BAD_REQUEST,
- )
+ return Response(status=status.HTTP_204_NO_CONTENT)
class ProjectMemberUserEndpoint(BaseAPIView):
def get(self, request, slug, project_id):
- try:
- project_member = ProjectMember.objects.get(
- project_id=project_id, workspace__slug=slug, member=request.user
- )
- serializer = ProjectMemberSerializer(project_member)
+ project_member = ProjectMember.objects.get(
+ project_id=project_id, workspace__slug=slug, member=request.user
+ )
+ serializer = ProjectMemberSerializer(project_member)
- return Response(serializer.data, status=status.HTTP_200_OK)
-
- except ProjectMember.DoesNotExist:
- return Response(
- {"error": "User not a member of the project"},
- status=status.HTTP_403_FORBIDDEN,
- )
- except Exception as e:
- capture_exception(e)
- return Response(
- {"error": "Something went wrong please try again later"},
- status=status.HTTP_400_BAD_REQUEST,
- )
+ return Response(serializer.data, status=status.HTTP_200_OK)
class ProjectFavoritesViewSet(BaseViewSet):
@@ -976,50 +805,18 @@ class ProjectFavoritesViewSet(BaseViewSet):
serializer.save(user=self.request.user)
def create(self, request, slug):
- try:
- serializer = ProjectFavoriteSerializer(data=request.data)
- if serializer.is_valid():
- serializer.save(user=request.user)
- return Response(serializer.data, status=status.HTTP_201_CREATED)
- return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
- except IntegrityError as e:
- print(str(e))
- if "already exists" in str(e):
- return Response(
- {"error": "The project is already added to favorites"},
- status=status.HTTP_410_GONE,
- )
- else:
- capture_exception(e)
- return Response(
- {"error": "Something went wrong please try again later"},
- status=status.HTTP_410_GONE,
- )
- except Exception as e:
- capture_exception(e)
- return Response(
- {"error": "Something went wrong please try again later"},
- status=status.HTTP_400_BAD_REQUEST,
- )
+ serializer = ProjectFavoriteSerializer(data=request.data)
+ if serializer.is_valid():
+ serializer.save(user=request.user)
+ return Response(serializer.data, status=status.HTTP_201_CREATED)
+ return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
def destroy(self, request, slug, project_id):
- try:
- project_favorite = ProjectFavorite.objects.get(
- project=project_id, user=request.user, workspace__slug=slug
- )
- project_favorite.delete()
- return Response(status=status.HTTP_204_NO_CONTENT)
- except ProjectFavorite.DoesNotExist:
- return Response(
- {"error": "Project is not in favorites"},
- status=status.HTTP_400_BAD_REQUEST,
- )
- except Exception as e:
- capture_exception(e)
- return Response(
- {"error": "Something went wrong please try again later"},
- status=status.HTTP_400_BAD_REQUEST,
- )
+ project_favorite = ProjectFavorite.objects.get(
+ project=project_id, user=request.user, workspace__slug=slug
+ )
+ project_favorite.delete()
+ return Response(status=status.HTTP_204_NO_CONTENT)
class ProjectDeployBoardViewSet(BaseViewSet):
@@ -1041,42 +838,35 @@ class ProjectDeployBoardViewSet(BaseViewSet):
)
def create(self, request, slug, project_id):
- try:
- comments = request.data.get("comments", False)
- reactions = request.data.get("reactions", False)
- inbox = request.data.get("inbox", None)
- votes = request.data.get("votes", False)
- views = request.data.get(
- "views",
- {
- "list": True,
- "kanban": True,
- "calendar": True,
- "gantt": True,
- "spreadsheet": True,
- },
- )
+ comments = request.data.get("comments", False)
+ reactions = request.data.get("reactions", False)
+ inbox = request.data.get("inbox", None)
+ votes = request.data.get("votes", False)
+ views = request.data.get(
+ "views",
+ {
+ "list": True,
+ "kanban": True,
+ "calendar": True,
+ "gantt": True,
+ "spreadsheet": True,
+ },
+ )
- project_deploy_board, _ = ProjectDeployBoard.objects.get_or_create(
- anchor=f"{slug}/{project_id}",
- project_id=project_id,
- )
- project_deploy_board.comments = comments
- project_deploy_board.reactions = reactions
- project_deploy_board.inbox = inbox
- project_deploy_board.votes = votes
- project_deploy_board.views = views
+ project_deploy_board, _ = ProjectDeployBoard.objects.get_or_create(
+ anchor=f"{slug}/{project_id}",
+ project_id=project_id,
+ )
+ project_deploy_board.comments = comments
+ project_deploy_board.reactions = reactions
+ project_deploy_board.inbox = inbox
+ project_deploy_board.votes = votes
+ project_deploy_board.views = views
- project_deploy_board.save()
+ project_deploy_board.save()
- serializer = ProjectDeployBoardSerializer(project_deploy_board)
- return Response(serializer.data, status=status.HTTP_200_OK)
- except Exception as e:
- capture_exception(e)
- return Response(
- {"error": "Something went wrong please try again later"},
- status=status.HTTP_400_BAD_REQUEST,
- )
+ serializer = ProjectDeployBoardSerializer(project_deploy_board)
+ return Response(serializer.data, status=status.HTTP_200_OK)
class ProjectMemberEndpoint(BaseAPIView):
@@ -1085,20 +875,13 @@ class ProjectMemberEndpoint(BaseAPIView):
]
def get(self, request, slug, project_id):
- try:
- project_members = ProjectMember.objects.filter(
- project_id=project_id,
- workspace__slug=slug,
- member__is_bot=False,
- ).select_related("project", "member", "workspace")
- serializer = ProjectMemberSerializer(project_members, many=True)
- return Response(serializer.data, status=status.HTTP_200_OK)
- except Exception as e:
- capture_exception(e)
- return Response(
- {"error": "Something went wrong please try again later"},
- status=status.HTTP_400_BAD_REQUEST,
- )
+ project_members = ProjectMember.objects.filter(
+ project_id=project_id,
+ workspace__slug=slug,
+ member__is_bot=False,
+ ).select_related("project", "member", "workspace")
+ serializer = ProjectMemberSerializer(project_members, many=True)
+ return Response(serializer.data, status=status.HTTP_200_OK)
class ProjectDeployBoardPublicSettingsEndpoint(BaseAPIView):
@@ -1107,23 +890,11 @@ class ProjectDeployBoardPublicSettingsEndpoint(BaseAPIView):
]
def get(self, request, slug, project_id):
- try:
- project_deploy_board = ProjectDeployBoard.objects.get(
- workspace__slug=slug, project_id=project_id
- )
- serializer = ProjectDeployBoardSerializer(project_deploy_board)
- return Response(serializer.data, status=status.HTTP_200_OK)
- except ProjectDeployBoard.DoesNotExist:
- return Response(
- {"error": "Project Deploy Board does not exists"},
- status=status.HTTP_404_NOT_FOUND,
- )
- except Exception as e:
- capture_exception(e)
- return Response(
- {"error": "Something went wrong please try again later"},
- status=status.HTTP_400_BAD_REQUEST,
- )
+ project_deploy_board = ProjectDeployBoard.objects.get(
+ workspace__slug=slug, project_id=project_id
+ )
+ serializer = ProjectDeployBoardSerializer(project_deploy_board)
+ return Response(serializer.data, status=status.HTTP_200_OK)
class WorkspaceProjectDeployBoardEndpoint(BaseAPIView):
@@ -1132,34 +903,27 @@ class WorkspaceProjectDeployBoardEndpoint(BaseAPIView):
]
def get(self, request, slug):
- try:
- projects = (
- Project.objects.filter(workspace__slug=slug)
- .annotate(
- is_public=Exists(
- ProjectDeployBoard.objects.filter(
- workspace__slug=slug, project_id=OuterRef("pk")
- )
+ projects = (
+ Project.objects.filter(workspace__slug=slug)
+ .annotate(
+ is_public=Exists(
+ ProjectDeployBoard.objects.filter(
+ workspace__slug=slug, project_id=OuterRef("pk")
)
)
- .filter(is_public=True)
- ).values(
- "id",
- "identifier",
- "name",
- "description",
- "emoji",
- "icon_prop",
- "cover_image",
)
+ .filter(is_public=True)
+ ).values(
+ "id",
+ "identifier",
+ "name",
+ "description",
+ "emoji",
+ "icon_prop",
+ "cover_image",
+ )
- return Response(projects, status=status.HTTP_200_OK)
- except Exception as e:
- capture_exception(e)
- return Response(
- {"error": "Something went wrong please try again later"},
- status=status.HTTP_400_BAD_REQUEST,
- )
+ return Response(projects, status=status.HTTP_200_OK)
class LeaveProjectEndpoint(BaseAPIView):
@@ -1168,43 +932,31 @@ class LeaveProjectEndpoint(BaseAPIView):
]
def delete(self, request, slug, project_id):
- try:
- project_member = ProjectMember.objects.get(
- workspace__slug=slug,
- member=request.user,
- project_id=project_id,
- )
+ project_member = ProjectMember.objects.get(
+ workspace__slug=slug,
+ member=request.user,
+ project_id=project_id,
+ )
- # Only Admin case
- if (
- project_member.role == 20
- and ProjectMember.objects.filter(
- workspace__slug=slug,
- role=20,
- project_id=project_id,
- ).count()
- == 1
- ):
- return Response(
- {
- "error": "You cannot leave the project since you are the only admin of the project you should delete the project"
- },
- status=status.HTTP_400_BAD_REQUEST,
- )
- # Delete the member from workspace
- project_member.delete()
- return Response(status=status.HTTP_204_NO_CONTENT)
- except ProjectMember.DoesNotExist:
+ # Only Admin case
+ if (
+ project_member.role == 20
+ and ProjectMember.objects.filter(
+ workspace__slug=slug,
+ role=20,
+ project_id=project_id,
+ ).count()
+ == 1
+ ):
return Response(
- {"error": "Workspace member does not exists"},
- status=status.HTTP_400_BAD_REQUEST,
- )
- except Exception as e:
- capture_exception(e)
- return Response(
- {"error": "Something went wrong please try again later"},
+ {
+ "error": "You cannot leave the project since you are the only admin of the project you should delete the project"
+ },
status=status.HTTP_400_BAD_REQUEST,
)
+ # Delete the member from workspace
+ project_member.delete()
+ return Response(status=status.HTTP_204_NO_CONTENT)
class ProjectPublicCoverImagesEndpoint(BaseAPIView):
@@ -1213,30 +965,26 @@ class ProjectPublicCoverImagesEndpoint(BaseAPIView):
]
def get(self, request):
- try:
- files = []
- s3 = boto3.client(
- "s3",
- aws_access_key_id=settings.AWS_ACCESS_KEY_ID,
- aws_secret_access_key=settings.AWS_SECRET_ACCESS_KEY,
- )
- params = {
- "Bucket": settings.AWS_S3_BUCKET_NAME,
- "Prefix": "static/project-cover/",
- }
+ files = []
+ s3 = boto3.client(
+ "s3",
+ aws_access_key_id=settings.AWS_ACCESS_KEY_ID,
+ aws_secret_access_key=settings.AWS_SECRET_ACCESS_KEY,
+ )
+ params = {
+ "Bucket": settings.AWS_S3_BUCKET_NAME,
+ "Prefix": "static/project-cover/",
+ }
- response = s3.list_objects_v2(**params)
- # Extracting file keys from the response
- if "Contents" in response:
- for content in response["Contents"]:
- if not content["Key"].endswith(
- "/"
- ): # This line ensures we're only getting files, not "sub-folders"
- files.append(
- f"https://{settings.AWS_S3_BUCKET_NAME}.s3.{settings.AWS_REGION}.amazonaws.com/{content['Key']}"
- )
+ response = s3.list_objects_v2(**params)
+ # Extracting file keys from the response
+ if "Contents" in response:
+ for content in response["Contents"]:
+ if not content["Key"].endswith(
+ "/"
+ ): # This line ensures we're only getting files, not "sub-folders"
+ files.append(
+ f"https://{settings.AWS_S3_BUCKET_NAME}.s3.{settings.AWS_REGION}.amazonaws.com/{content['Key']}"
+ )
- return Response(files, status=status.HTTP_200_OK)
- except Exception as e:
- capture_exception(e)
- return Response([], status=status.HTTP_200_OK)
+ return Response(files, status=status.HTTP_200_OK)
diff --git a/apiserver/plane/api/views/search.py b/apiserver/plane/api/views/search.py
index 35b75ce67..ff7431543 100644
--- a/apiserver/plane/api/views/search.py
+++ b/apiserver/plane/api/views/search.py
@@ -168,126 +168,107 @@ class GlobalSearchEndpoint(BaseAPIView):
)
def get(self, request, slug):
- try:
- query = request.query_params.get("search", False)
- workspace_search = request.query_params.get("workspace_search", "false")
- project_id = request.query_params.get("project_id", False)
+ query = request.query_params.get("search", False)
+ workspace_search = request.query_params.get("workspace_search", "false")
+ project_id = request.query_params.get("project_id", False)
- if not query:
- return Response(
- {
- "results": {
- "workspace": [],
- "project": [],
- "issue": [],
- "cycle": [],
- "module": [],
- "issue_view": [],
- "page": [],
- }
- },
- status=status.HTTP_200_OK,
- )
-
- MODELS_MAPPER = {
- "workspace": self.filter_workspaces,
- "project": self.filter_projects,
- "issue": self.filter_issues,
- "cycle": self.filter_cycles,
- "module": self.filter_modules,
- "issue_view": self.filter_views,
- "page": self.filter_pages,
- }
-
- results = {}
-
- for model in MODELS_MAPPER.keys():
- func = MODELS_MAPPER.get(model, None)
- results[model] = func(query, slug, project_id, workspace_search)
- return Response({"results": results}, status=status.HTTP_200_OK)
-
- except Exception as e:
- capture_exception(e)
+ if not query:
return Response(
- {"error": "Something went wrong please try again later"},
- status=status.HTTP_400_BAD_REQUEST,
+ {
+ "results": {
+ "workspace": [],
+ "project": [],
+ "issue": [],
+ "cycle": [],
+ "module": [],
+ "issue_view": [],
+ "page": [],
+ }
+ },
+ status=status.HTTP_200_OK,
)
+ MODELS_MAPPER = {
+ "workspace": self.filter_workspaces,
+ "project": self.filter_projects,
+ "issue": self.filter_issues,
+ "cycle": self.filter_cycles,
+ "module": self.filter_modules,
+ "issue_view": self.filter_views,
+ "page": self.filter_pages,
+ }
+
+ results = {}
+
+ for model in MODELS_MAPPER.keys():
+ func = MODELS_MAPPER.get(model, None)
+ results[model] = func(query, slug, project_id, workspace_search)
+ return Response({"results": results}, status=status.HTTP_200_OK)
+
class IssueSearchEndpoint(BaseAPIView):
def get(self, request, slug, project_id):
- try:
- query = request.query_params.get("search", False)
- workspace_search = request.query_params.get("workspace_search", "false")
- parent = request.query_params.get("parent", "false")
- issue_relation = request.query_params.get("issue_relation", "false")
- cycle = request.query_params.get("cycle", "false")
- module = request.query_params.get("module", "false")
- sub_issue = request.query_params.get("sub_issue", "false")
+ query = request.query_params.get("search", False)
+ workspace_search = request.query_params.get("workspace_search", "false")
+ parent = request.query_params.get("parent", "false")
+ issue_relation = request.query_params.get("issue_relation", "false")
+ cycle = request.query_params.get("cycle", "false")
+ module = request.query_params.get("module", "false")
+ sub_issue = request.query_params.get("sub_issue", "false")
- issue_id = request.query_params.get("issue_id", False)
+ issue_id = request.query_params.get("issue_id", False)
- issues = Issue.issue_objects.filter(
- workspace__slug=slug,
- project__project_projectmember__member=self.request.user,
- )
+ issues = Issue.issue_objects.filter(
+ workspace__slug=slug,
+ project__project_projectmember__member=self.request.user,
+ )
- if workspace_search == "false":
- issues = issues.filter(project_id=project_id)
+ if workspace_search == "false":
+ issues = issues.filter(project_id=project_id)
- if query:
- issues = search_issues(query, issues)
+ if query:
+ issues = search_issues(query, issues)
- if parent == "true" and issue_id:
- issue = Issue.issue_objects.get(pk=issue_id)
- issues = issues.filter(
- ~Q(pk=issue_id), ~Q(pk=issue.parent_id), parent__isnull=True
- ).exclude(
- pk__in=Issue.issue_objects.filter(parent__isnull=False).values_list(
- "parent_id", flat=True
- )
+ if parent == "true" and issue_id:
+ issue = Issue.issue_objects.get(pk=issue_id)
+ issues = issues.filter(
+ ~Q(pk=issue_id), ~Q(pk=issue.parent_id), parent__isnull=True
+ ).exclude(
+ pk__in=Issue.issue_objects.filter(parent__isnull=False).values_list(
+ "parent_id", flat=True
)
- if issue_relation == "true" and issue_id:
- issue = Issue.issue_objects.get(pk=issue_id)
- issues = issues.filter(
- ~Q(pk=issue_id),
- ~Q(issue_related__issue=issue),
- ~Q(issue_relation__related_issue=issue),
- )
- if sub_issue == "true" and issue_id:
- issue = Issue.issue_objects.get(pk=issue_id)
- issues = issues.filter(~Q(pk=issue_id), parent__isnull=True)
- if issue.parent:
- issues = issues.filter(~Q(pk=issue.parent_id))
-
- if cycle == "true":
- issues = issues.exclude(issue_cycle__isnull=False)
-
- if module == "true":
- issues = issues.exclude(issue_module__isnull=False)
-
- return Response(
- issues.values(
- "name",
- "id",
- "sequence_id",
- "project__name",
- "project__identifier",
- "project_id",
- "workspace__slug",
- "state__name",
- "state__group",
- "state__color",
- ),
- status=status.HTTP_200_OK,
)
- except Issue.DoesNotExist:
- return Response(
- {"error": "Issue Does not exist"}, status=status.HTTP_400_BAD_REQUEST
- )
- except Exception as e:
- print(e)
- return Response(
- {"error": "Something went wrong please try again later"},
- status=status.HTTP_400_BAD_REQUEST,
+ if issue_relation == "true" and issue_id:
+ issue = Issue.issue_objects.get(pk=issue_id)
+ issues = issues.filter(
+ ~Q(pk=issue_id),
+ ~Q(issue_related__issue=issue),
+ ~Q(issue_relation__related_issue=issue),
)
+ if sub_issue == "true" and issue_id:
+ issue = Issue.issue_objects.get(pk=issue_id)
+ issues = issues.filter(~Q(pk=issue_id), parent__isnull=True)
+ if issue.parent:
+ issues = issues.filter(~Q(pk=issue.parent_id))
+
+ if cycle == "true":
+ issues = issues.exclude(issue_cycle__isnull=False)
+
+ if module == "true":
+ issues = issues.exclude(issue_module__isnull=False)
+
+ return Response(
+ issues.values(
+ "name",
+ "id",
+ "sequence_id",
+ "project__name",
+ "project__identifier",
+ "project_id",
+ "workspace__slug",
+ "state__name",
+ "state__group",
+ "state__color",
+ ),
+ status=status.HTTP_200_OK,
+ )
diff --git a/apiserver/plane/api/views/state.py b/apiserver/plane/api/views/state.py
index 4fe0c8260..063abf0e3 100644
--- a/apiserver/plane/api/views/state.py
+++ b/apiserver/plane/api/views/state.py
@@ -2,7 +2,6 @@
from itertools import groupby
# Django imports
-from django.db import IntegrityError
from django.db.models import Q
# Third party imports
@@ -41,67 +40,45 @@ class StateViewSet(BaseViewSet):
)
def create(self, request, slug, project_id):
- try:
- serializer = StateSerializer(data=request.data)
- if serializer.is_valid():
- serializer.save(project_id=project_id)
- return Response(serializer.data, status=status.HTTP_200_OK)
- return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
- except IntegrityError:
- return Response(
- {"error": "State with the name already exists"},
- status=status.HTTP_400_BAD_REQUEST,
- )
- except Exception as e:
- capture_exception(e)
- return Response(
- {"error": "Something went wrong please try again later"},
- status=status.HTTP_400_BAD_REQUEST,
- )
+ serializer = StateSerializer(data=request.data)
+ if serializer.is_valid():
+ serializer.save(project_id=project_id)
+ return Response(serializer.data, status=status.HTTP_200_OK)
+ return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
def list(self, request, slug, project_id):
- try:
- state_dict = dict()
- states = StateSerializer(self.get_queryset(), many=True).data
+ state_dict = dict()
+ states = StateSerializer(self.get_queryset(), many=True).data
- for key, value in groupby(
- sorted(states, key=lambda state: state["group"]),
- lambda state: state.get("group"),
- ):
- state_dict[str(key)] = list(value)
+ for key, value in groupby(
+ sorted(states, key=lambda state: state["group"]),
+ lambda state: state.get("group"),
+ ):
+ state_dict[str(key)] = list(value)
- return Response(state_dict, status=status.HTTP_200_OK)
- except Exception as e:
- capture_exception(e)
+ return Response(state_dict, status=status.HTTP_200_OK)
+
+ def destroy(self, request, slug, project_id, pk):
+ state = State.objects.get(
+ ~Q(name="Triage"),
+ pk=pk, project_id=project_id, workspace__slug=slug,
+ )
+
+ if state.default:
return Response(
- {"error": "Something went wrong please try again later"},
+ {"error": "Default state cannot be deleted"}, status=False
+ )
+
+ # Check for any issues in the state
+ issue_exist = Issue.issue_objects.filter(state=pk).exists()
+
+ if issue_exist:
+ return Response(
+ {
+ "error": "The state is not empty, only empty states can be deleted"
+ },
status=status.HTTP_400_BAD_REQUEST,
)
- def destroy(self, request, slug, project_id, pk):
- try:
- state = State.objects.get(
- ~Q(name="Triage"),
- pk=pk, project_id=project_id, workspace__slug=slug,
- )
-
- if state.default:
- return Response(
- {"error": "Default state cannot be deleted"}, status=False
- )
-
- # Check for any issues in the state
- issue_exist = Issue.issue_objects.filter(state=pk).exists()
-
- if issue_exist:
- return Response(
- {
- "error": "The state is not empty, only empty states can be deleted"
- },
- status=status.HTTP_400_BAD_REQUEST,
- )
-
- state.delete()
- return Response(status=status.HTTP_204_NO_CONTENT)
- except State.DoesNotExist:
- return Response({"error": "State does not exists"}, status=status.HTTP_404)
+ state.delete()
+ return Response(status=status.HTTP_204_NO_CONTENT)
diff --git a/apiserver/plane/api/views/user.py b/apiserver/plane/api/views/user.py
index adb7a0eab..f17b176ba 100644
--- a/apiserver/plane/api/views/user.py
+++ b/apiserver/plane/api/views/user.py
@@ -32,82 +32,43 @@ class UserEndpoint(BaseViewSet):
return self.request.user
def retrieve(self, request):
- try:
- serialized_data = UserMeSerializer(request.user).data
- return Response(
- serialized_data,
- status=status.HTTP_200_OK,
- )
- except Exception as e:
- capture_exception(e)
- return Response(
- {"error": "Something went wrong please try again later"},
- status=status.HTTP_400_BAD_REQUEST,
- )
+ serialized_data = UserMeSerializer(request.user).data
+ return Response(
+ serialized_data,
+ status=status.HTTP_200_OK,
+ )
def retrieve_user_settings(self, request):
- try:
- serialized_data = UserMeSettingsSerializer(request.user).data
- return Response(serialized_data, status=status.HTTP_200_OK)
- except Exception as e:
- capture_exception(e)
- return Response(
- {"error": "Something went wrong please try again later"},
- status=status.HTTP_400_BAD_REQUEST,
- )
+ serialized_data = UserMeSettingsSerializer(request.user).data
+ return Response(serialized_data, status=status.HTTP_200_OK)
class UpdateUserOnBoardedEndpoint(BaseAPIView):
def patch(self, request):
- try:
- user = User.objects.get(pk=request.user.id)
- user.is_onboarded = request.data.get("is_onboarded", False)
- user.save()
- return Response(
- {"message": "Updated successfully"}, status=status.HTTP_200_OK
- )
- except Exception as e:
- capture_exception(e)
- return Response(
- {"error": "Something went wrong please try again later"},
- status=status.HTTP_400_BAD_REQUEST,
- )
+ user = User.objects.get(pk=request.user.id)
+ user.is_onboarded = request.data.get("is_onboarded", False)
+ user.save()
+ return Response({"message": "Updated successfully"}, status=status.HTTP_200_OK)
class UpdateUserTourCompletedEndpoint(BaseAPIView):
def patch(self, request):
- try:
- user = User.objects.get(pk=request.user.id)
- user.is_tour_completed = request.data.get("is_tour_completed", False)
- user.save()
- return Response(
- {"message": "Updated successfully"}, status=status.HTTP_200_OK
- )
- except Exception as e:
- capture_exception(e)
- return Response(
- {"error": "Something went wrong please try again later"},
- status=status.HTTP_400_BAD_REQUEST,
- )
+ user = User.objects.get(pk=request.user.id)
+ user.is_tour_completed = request.data.get("is_tour_completed", False)
+ user.save()
+ return Response({"message": "Updated successfully"}, status=status.HTTP_200_OK)
class UserActivityEndpoint(BaseAPIView, BasePaginator):
def get(self, request, slug):
- try:
- queryset = IssueActivity.objects.filter(
- actor=request.user, workspace__slug=slug
- ).select_related("actor", "workspace", "issue", "project")
+ queryset = IssueActivity.objects.filter(
+ actor=request.user, workspace__slug=slug
+ ).select_related("actor", "workspace", "issue", "project")
- return self.paginate(
- request=request,
- queryset=queryset,
- on_results=lambda issue_activities: IssueActivitySerializer(
- issue_activities, many=True
- ).data,
- )
- except Exception as e:
- capture_exception(e)
- return Response(
- {"error": "Something went wrong please try again later"},
- status=status.HTTP_400_BAD_REQUEST,
- )
+ return self.paginate(
+ request=request,
+ queryset=queryset,
+ on_results=lambda issue_activities: IssueActivitySerializer(
+ issue_activities, many=True
+ ).data,
+ )
diff --git a/apiserver/plane/api/views/view.py b/apiserver/plane/api/views/view.py
index 938dc1382..0e4b074c6 100644
--- a/apiserver/plane/api/views/view.py
+++ b/apiserver/plane/api/views/view.py
@@ -13,7 +13,6 @@ from django.db.models import (
)
from django.utils.decorators import method_decorator
from django.views.decorators.gzip import gzip_page
-from django.db import IntegrityError
from django.db.models import Prefetch, OuterRef, Exists
# Third party imports
@@ -97,119 +96,111 @@ class GlobalViewIssuesViewSet(BaseViewSet):
@method_decorator(gzip_page)
def list(self, request, slug):
- try:
- filters = issue_filters(request.query_params, "GET")
+ filters = issue_filters(request.query_params, "GET")
- # Custom ordering for priority and state
- priority_order = ["urgent", "high", "medium", "low", "none"]
- state_order = ["backlog", "unstarted", "started", "completed", "cancelled"]
+ # Custom ordering for priority and state
+ priority_order = ["urgent", "high", "medium", "low", "none"]
+ state_order = ["backlog", "unstarted", "started", "completed", "cancelled"]
- order_by_param = request.GET.get("order_by", "-created_at")
+ order_by_param = request.GET.get("order_by", "-created_at")
- issue_queryset = (
- self.get_queryset()
- .filter(**filters)
- .filter(project__project_projectmember__member=self.request.user)
- .annotate(cycle_id=F("issue_cycle__cycle_id"))
- .annotate(module_id=F("issue_module__module_id"))
- .annotate(
- link_count=IssueLink.objects.filter(issue=OuterRef("id"))
- .order_by()
- .annotate(count=Func(F("id"), function="Count"))
- .values("count")
- )
- .annotate(
- attachment_count=IssueAttachment.objects.filter(
- issue=OuterRef("id")
- )
- .order_by()
- .annotate(count=Func(F("id"), function="Count"))
- .values("count")
- )
+ issue_queryset = (
+ self.get_queryset()
+ .filter(**filters)
+ .filter(project__project_projectmember__member=self.request.user)
+ .annotate(cycle_id=F("issue_cycle__cycle_id"))
+ .annotate(module_id=F("issue_module__module_id"))
+ .annotate(
+ link_count=IssueLink.objects.filter(issue=OuterRef("id"))
+ .order_by()
+ .annotate(count=Func(F("id"), function="Count"))
+ .values("count")
)
-
- # Priority Ordering
- if order_by_param == "priority" or order_by_param == "-priority":
- priority_order = (
- priority_order
- if order_by_param == "priority"
- else priority_order[::-1]
+ .annotate(
+ attachment_count=IssueAttachment.objects.filter(
+ issue=OuterRef("id")
)
- issue_queryset = issue_queryset.annotate(
- priority_order=Case(
- *[
- When(priority=p, then=Value(i))
- for i, p in enumerate(priority_order)
- ],
- output_field=CharField(),
- )
- ).order_by("priority_order")
+ .order_by()
+ .annotate(count=Func(F("id"), function="Count"))
+ .values("count")
+ )
+ )
- # State Ordering
- elif order_by_param in [
- "state__name",
- "state__group",
- "-state__name",
- "-state__group",
- ]:
- state_order = (
- state_order
- if order_by_param in ["state__name", "state__group"]
- else state_order[::-1]
+ # Priority Ordering
+ if order_by_param == "priority" or order_by_param == "-priority":
+ priority_order = (
+ priority_order
+ if order_by_param == "priority"
+ else priority_order[::-1]
+ )
+ issue_queryset = issue_queryset.annotate(
+ priority_order=Case(
+ *[
+ When(priority=p, then=Value(i))
+ for i, p in enumerate(priority_order)
+ ],
+ output_field=CharField(),
)
- issue_queryset = issue_queryset.annotate(
- state_order=Case(
- *[
- When(state__group=state_group, then=Value(i))
- for i, state_group in enumerate(state_order)
- ],
- default=Value(len(state_order)),
- output_field=CharField(),
- )
- ).order_by("state_order")
- # assignee and label ordering
- elif order_by_param in [
- "labels__name",
- "-labels__name",
- "assignees__first_name",
- "-assignees__first_name",
- ]:
- issue_queryset = issue_queryset.annotate(
- max_values=Max(
- order_by_param[1::]
- if order_by_param.startswith("-")
- else order_by_param
- )
- ).order_by(
- "-max_values" if order_by_param.startswith("-") else "max_values"
+ ).order_by("priority_order")
+
+ # State Ordering
+ elif order_by_param in [
+ "state__name",
+ "state__group",
+ "-state__name",
+ "-state__group",
+ ]:
+ state_order = (
+ state_order
+ if order_by_param in ["state__name", "state__group"]
+ else state_order[::-1]
+ )
+ issue_queryset = issue_queryset.annotate(
+ state_order=Case(
+ *[
+ When(state__group=state_group, then=Value(i))
+ for i, state_group in enumerate(state_order)
+ ],
+ default=Value(len(state_order)),
+ output_field=CharField(),
)
- else:
- issue_queryset = issue_queryset.order_by(order_by_param)
-
- issues = IssueLiteSerializer(issue_queryset, many=True).data
-
- ## Grouping the results
- group_by = request.GET.get("group_by", False)
- sub_group_by = request.GET.get("sub_group_by", False)
- if sub_group_by and sub_group_by == group_by:
- return Response(
- {"error": "Group by and sub group by cannot be same"},
- status=status.HTTP_400_BAD_REQUEST,
- )
-
- if group_by:
- return Response(
- group_results(issues, group_by, sub_group_by), status=status.HTTP_200_OK
+ ).order_by("state_order")
+ # assignee and label ordering
+ elif order_by_param in [
+ "labels__name",
+ "-labels__name",
+ "assignees__first_name",
+ "-assignees__first_name",
+ ]:
+ issue_queryset = issue_queryset.annotate(
+ max_values=Max(
+ order_by_param[1::]
+ if order_by_param.startswith("-")
+ else order_by_param
)
+ ).order_by(
+ "-max_values" if order_by_param.startswith("-") else "max_values"
+ )
+ else:
+ issue_queryset = issue_queryset.order_by(order_by_param)
- return Response(issues, status=status.HTTP_200_OK)
+ issues = IssueLiteSerializer(issue_queryset, many=True).data
- except Exception as e:
- capture_exception(e)
+ ## Grouping the results
+ group_by = request.GET.get("group_by", False)
+ sub_group_by = request.GET.get("sub_group_by", False)
+ if sub_group_by and sub_group_by == group_by:
return Response(
- {"error": "Something went wrong please try again later"},
+ {"error": "Group by and sub group by cannot be same"},
status=status.HTTP_400_BAD_REQUEST,
)
+
+ if group_by:
+ return Response(
+ group_results(issues, group_by, sub_group_by), status=status.HTTP_200_OK
+ )
+
+ return Response(issues, status=status.HTTP_200_OK)
class IssueViewViewSet(BaseViewSet):
@@ -257,49 +248,18 @@ class IssueViewFavoriteViewSet(BaseViewSet):
)
def create(self, request, slug, project_id):
- try:
- serializer = IssueViewFavoriteSerializer(data=request.data)
- if serializer.is_valid():
- serializer.save(user=request.user, project_id=project_id)
- return Response(serializer.data, status=status.HTTP_201_CREATED)
- return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
- except IntegrityError as e:
- if "already exists" in str(e):
- return Response(
- {"error": "The view is already added to favorites"},
- status=status.HTTP_410_GONE,
- )
- else:
- capture_exception(e)
- return Response(
- {"error": "Something went wrong please try again later"},
- status=status.HTTP_400_BAD_REQUEST,
- )
- except Exception as e:
- capture_exception(e)
- return Response(
- {"error": "Something went wrong please try again later"},
- status=status.HTTP_400_BAD_REQUEST,
- )
+ serializer = IssueViewFavoriteSerializer(data=request.data)
+ if serializer.is_valid():
+ serializer.save(user=request.user, project_id=project_id)
+ return Response(serializer.data, status=status.HTTP_201_CREATED)
+ return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
def destroy(self, request, slug, project_id, view_id):
- try:
- view_favourite = IssueViewFavorite.objects.get(
- project=project_id,
- user=request.user,
- workspace__slug=slug,
- view_id=view_id,
- )
- view_favourite.delete()
- return Response(status=status.HTTP_204_NO_CONTENT)
- except IssueViewFavorite.DoesNotExist:
- return Response(
- {"error": "View is not in favorites"},
- status=status.HTTP_400_BAD_REQUEST,
- )
- except Exception as e:
- capture_exception(e)
- return Response(
- {"error": "Something went wrong please try again later"},
- status=status.HTTP_400_BAD_REQUEST,
- )
+ view_favourite = IssueViewFavorite.objects.get(
+ project=project_id,
+ user=request.user,
+ workspace__slug=slug,
+ view_id=view_id,
+ )
+ view_favourite.delete()
+ return Response(status=status.HTTP_204_NO_CONTENT)
diff --git a/apiserver/plane/api/views/workspace.py b/apiserver/plane/api/views/workspace.py
index b692dc345..9aa0ebcd9 100644
--- a/apiserver/plane/api/views/workspace.py
+++ b/apiserver/plane/api/views/workspace.py
@@ -165,23 +165,12 @@ class WorkSpaceViewSet(BaseViewSet):
status=status.HTTP_400_BAD_REQUEST,
)
- ## Handling unique integrity error for now
- ## TODO: Extend this to handle other common errors which are not automatically handled by APIException
except IntegrityError as e:
if "already exists" in str(e):
return Response(
{"slug": "The workspace with the slug already exists"},
status=status.HTTP_410_GONE,
)
- except Exception as e:
- capture_exception(e)
- return Response(
- {
- "error": "Something went wrong please try again later",
- "identifier": None,
- },
- status=status.HTTP_400_BAD_REQUEST,
- )
class UserWorkSpacesEndpoint(BaseAPIView):
@@ -193,70 +182,53 @@ class UserWorkSpacesEndpoint(BaseAPIView):
]
def get(self, request):
- try:
- member_count = (
- WorkspaceMember.objects.filter(
- workspace=OuterRef("id"), member__is_bot=False
+ member_count = (
+ WorkspaceMember.objects.filter(
+ workspace=OuterRef("id"), member__is_bot=False
+ )
+ .order_by()
+ .annotate(count=Func(F("id"), function="Count"))
+ .values("count")
+ )
+
+ issue_count = (
+ Issue.issue_objects.filter(workspace=OuterRef("id"))
+ .order_by()
+ .annotate(count=Func(F("id"), function="Count"))
+ .values("count")
+ )
+
+ workspace = (
+ (
+ Workspace.objects.prefetch_related(
+ Prefetch("workspace_member", queryset=WorkspaceMember.objects.all())
)
- .order_by()
- .annotate(count=Func(F("id"), function="Count"))
- .values("count")
- )
-
- issue_count = (
- Issue.issue_objects.filter(workspace=OuterRef("id"))
- .order_by()
- .annotate(count=Func(F("id"), function="Count"))
- .values("count")
- )
-
- workspace = (
- (
- Workspace.objects.prefetch_related(
- Prefetch(
- "workspace_member", queryset=WorkspaceMember.objects.all()
- )
- )
- .filter(
- workspace_member__member=request.user,
- )
- .select_related("owner")
+ .filter(
+ workspace_member__member=request.user,
)
- .annotate(total_members=member_count)
- .annotate(total_issues=issue_count)
+ .select_related("owner")
)
+ .annotate(total_members=member_count)
+ .annotate(total_issues=issue_count)
+ )
- serializer = WorkSpaceSerializer(self.filter_queryset(workspace), many=True)
- return Response(serializer.data, status=status.HTTP_200_OK)
-
- except Exception as e:
- capture_exception(e)
- return Response(
- {"error": "Something went wrong please try again later"},
- status=status.HTTP_400_BAD_REQUEST,
- )
+ serializer = WorkSpaceSerializer(self.filter_queryset(workspace), many=True)
+ return Response(serializer.data, status=status.HTTP_200_OK)
class WorkSpaceAvailabilityCheckEndpoint(BaseAPIView):
def get(self, request):
- try:
- slug = request.GET.get("slug", False)
+ slug = request.GET.get("slug", False)
- if not slug or slug == "":
- return Response(
- {"error": "Workspace Slug is required"},
- status=status.HTTP_400_BAD_REQUEST,
- )
-
- workspace = Workspace.objects.filter(slug=slug).exists()
- return Response({"status": not workspace}, status=status.HTTP_200_OK)
- except Exception as e:
- capture_exception(e)
+ if not slug or slug == "":
return Response(
- {"error": "Something went wrong please try again later"},
+ {"error": "Workspace Slug is required"},
status=status.HTTP_400_BAD_REQUEST,
)
+ workspace = Workspace.objects.filter(slug=slug).exists()
+ return Response({"status": not workspace}, status=status.HTTP_200_OK)
+
class InviteWorkspaceEndpoint(BaseAPIView):
permission_classes = [
@@ -264,126 +236,113 @@ class InviteWorkspaceEndpoint(BaseAPIView):
]
def post(self, request, slug):
- try:
- emails = request.data.get("emails", False)
- # Check if email is provided
- if not emails or not len(emails):
- return Response(
- {"error": "Emails are required"}, status=status.HTTP_400_BAD_REQUEST
- )
-
- # check for role level
- requesting_user = WorkspaceMember.objects.get(
- workspace__slug=slug, member=request.user
+ emails = request.data.get("emails", False)
+ # Check if email is provided
+ if not emails or not len(emails):
+ return Response(
+ {"error": "Emails are required"}, status=status.HTTP_400_BAD_REQUEST
)
- if len(
- [
- email
- for email in emails
- if int(email.get("role", 10)) > requesting_user.role
- ]
- ):
- return Response(
- {"error": "You cannot invite a user with higher role"},
- status=status.HTTP_400_BAD_REQUEST,
+
+ # check for role level
+ requesting_user = WorkspaceMember.objects.get(
+ workspace__slug=slug, member=request.user
+ )
+ if len(
+ [
+ email
+ for email in emails
+ if int(email.get("role", 10)) > requesting_user.role
+ ]
+ ):
+ return Response(
+ {"error": "You cannot invite a user with higher role"},
+ status=status.HTTP_400_BAD_REQUEST,
+ )
+
+ workspace = Workspace.objects.get(slug=slug)
+
+ # Check if user is already a member of workspace
+ workspace_members = WorkspaceMember.objects.filter(
+ workspace_id=workspace.id,
+ member__email__in=[email.get("email") for email in emails],
+ ).select_related("member", "workspace", "workspace__owner")
+
+ if len(workspace_members):
+ return Response(
+ {
+ "error": "Some users are already member of workspace",
+ "workspace_users": WorkSpaceMemberSerializer(
+ workspace_members, many=True
+ ).data,
+ },
+ status=status.HTTP_400_BAD_REQUEST,
+ )
+
+ workspace_invitations = []
+ for email in emails:
+ try:
+ validate_email(email.get("email"))
+ workspace_invitations.append(
+ WorkspaceMemberInvite(
+ email=email.get("email").strip().lower(),
+ workspace_id=workspace.id,
+ token=jwt.encode(
+ {
+ "email": email,
+ "timestamp": datetime.now().timestamp(),
+ },
+ settings.SECRET_KEY,
+ algorithm="HS256",
+ ),
+ role=email.get("role", 10),
+ created_by=request.user,
+ )
)
-
- workspace = Workspace.objects.get(slug=slug)
-
- # Check if user is already a member of workspace
- workspace_members = WorkspaceMember.objects.filter(
- workspace_id=workspace.id,
- member__email__in=[email.get("email") for email in emails],
- ).select_related("member", "workspace", "workspace__owner")
-
- if len(workspace_members):
+ except ValidationError:
return Response(
{
- "error": "Some users are already member of workspace",
- "workspace_users": WorkSpaceMemberSerializer(
- workspace_members, many=True
- ).data,
+ "error": f"Invalid email - {email} provided a valid email address is required to send the invite"
},
status=status.HTTP_400_BAD_REQUEST,
)
+ WorkspaceMemberInvite.objects.bulk_create(
+ workspace_invitations, batch_size=10, ignore_conflicts=True
+ )
- workspace_invitations = []
- for email in emails:
- try:
- validate_email(email.get("email"))
- workspace_invitations.append(
- WorkspaceMemberInvite(
- email=email.get("email").strip().lower(),
- workspace_id=workspace.id,
- token=jwt.encode(
- {
- "email": email,
- "timestamp": datetime.now().timestamp(),
- },
- settings.SECRET_KEY,
- algorithm="HS256",
- ),
- role=email.get("role", 10),
- created_by=request.user,
- )
+ workspace_invitations = WorkspaceMemberInvite.objects.filter(
+ email__in=[email.get("email") for email in emails]
+ ).select_related("workspace")
+
+ # create the user if signup is disabled
+ if settings.DOCKERIZED and not settings.ENABLE_SIGNUP:
+ _ = User.objects.bulk_create(
+ [
+ User(
+ username=str(uuid4().hex),
+ email=invitation.email,
+ password=make_password(uuid4().hex),
+ is_password_autoset=True,
)
- except ValidationError:
- return Response(
- {
- "error": f"Invalid email - {email} provided a valid email address is required to send the invite"
- },
- status=status.HTTP_400_BAD_REQUEST,
- )
- WorkspaceMemberInvite.objects.bulk_create(
- workspace_invitations, batch_size=10, ignore_conflicts=True
+ for invitation in workspace_invitations
+ ],
+ batch_size=100,
)
- workspace_invitations = WorkspaceMemberInvite.objects.filter(
- email__in=[email.get("email") for email in emails]
- ).select_related("workspace")
-
- # create the user if signup is disabled
- if settings.DOCKERIZED and not settings.ENABLE_SIGNUP:
- _ = User.objects.bulk_create(
- [
- User(
- username=str(uuid4().hex),
- email=invitation.email,
- password=make_password(uuid4().hex),
- is_password_autoset=True,
- )
- for invitation in workspace_invitations
- ],
- batch_size=100,
- )
-
- for invitation in workspace_invitations:
- workspace_invitation.delay(
- invitation.email,
- workspace.id,
- invitation.token,
- settings.WEB_URL,
- request.user.email,
- )
-
- return Response(
- {
- "message": "Emails sent successfully",
- },
- status=status.HTTP_200_OK,
+ for invitation in workspace_invitations:
+ workspace_invitation.delay(
+ invitation.email,
+ workspace.id,
+ invitation.token,
+ settings.WEB_URL,
+ request.user.email,
)
- except Workspace.DoesNotExist:
- return Response(
- {"error": "Workspace does not exists"},
- status=status.HTTP_400_BAD_REQUEST,
- )
- except Exception as e:
- capture_exception(e)
- return Response(
- {"error": "Something went wrong please try again later"},
- status=status.HTTP_400_BAD_REQUEST,
- )
+ return Response(
+ {
+ "message": "Emails sent successfully",
+ },
+ status=status.HTTP_200_OK,
+ )
class JoinWorkspaceEndpoint(BaseAPIView):
@@ -392,68 +351,55 @@ class JoinWorkspaceEndpoint(BaseAPIView):
]
def post(self, request, slug, pk):
- try:
- workspace_invite = WorkspaceMemberInvite.objects.get(
- pk=pk, workspace__slug=slug
+ workspace_invite = WorkspaceMemberInvite.objects.get(
+ pk=pk, workspace__slug=slug
+ )
+
+ email = request.data.get("email", "")
+
+ if email == "" or workspace_invite.email != email:
+ return Response(
+ {"error": "You do not have permission to join the workspace"},
+ status=status.HTTP_403_FORBIDDEN,
)
- email = request.data.get("email", "")
+ if workspace_invite.responded_at is None:
+ workspace_invite.accepted = request.data.get("accepted", False)
+ workspace_invite.responded_at = timezone.now()
+ workspace_invite.save()
- if email == "" or workspace_invite.email != email:
- return Response(
- {"error": "You do not have permission to join the workspace"},
- status=status.HTTP_403_FORBIDDEN,
- )
+ if workspace_invite.accepted:
+ # Check if the user created account after invitation
+ user = User.objects.filter(email=email).first()
- if workspace_invite.responded_at is None:
- workspace_invite.accepted = request.data.get("accepted", False)
- workspace_invite.responded_at = timezone.now()
- workspace_invite.save()
-
- if workspace_invite.accepted:
- # Check if the user created account after invitation
- user = User.objects.filter(email=email).first()
-
- # If the user is present then create the workspace member
- if user is not None:
- WorkspaceMember.objects.create(
- workspace=workspace_invite.workspace,
- member=user,
- role=workspace_invite.role,
- )
-
- user.last_workspace_id = workspace_invite.workspace.id
- user.save()
-
- # Delete the invitation
- workspace_invite.delete()
-
- return Response(
- {"message": "Workspace Invitation Accepted"},
- status=status.HTTP_200_OK,
+ # If the user is present then create the workspace member
+ if user is not None:
+ WorkspaceMember.objects.create(
+ workspace=workspace_invite.workspace,
+ member=user,
+ role=workspace_invite.role,
)
+ user.last_workspace_id = workspace_invite.workspace.id
+ user.save()
+
+ # Delete the invitation
+ workspace_invite.delete()
+
return Response(
- {"message": "Workspace Invitation was not accepted"},
+ {"message": "Workspace Invitation Accepted"},
status=status.HTTP_200_OK,
)
return Response(
- {"error": "You have already responded to the invitation request"},
- status=status.HTTP_400_BAD_REQUEST,
+ {"message": "Workspace Invitation was not accepted"},
+ status=status.HTTP_200_OK,
)
- except WorkspaceMemberInvite.DoesNotExist:
- return Response(
- {"error": "The invitation either got expired or could not be found"},
- status=status.HTTP_404_NOT_FOUND,
- )
- except Exception as e:
- capture_exception(e)
- return Response(
- {"error": "Something went wrong please try again later"},
- status=status.HTTP_400_BAD_REQUEST,
- )
+ return Response(
+ {"error": "You have already responded to the invitation request"},
+ status=status.HTTP_400_BAD_REQUEST,
+ )
class WorkspaceInvitationsViewset(BaseViewSet):
@@ -473,28 +419,16 @@ class WorkspaceInvitationsViewset(BaseViewSet):
)
def destroy(self, request, slug, pk):
- try:
- workspace_member_invite = WorkspaceMemberInvite.objects.get(
- pk=pk, workspace__slug=slug
- )
- # delete the user if signup is disabled
- if settings.DOCKERIZED and not settings.ENABLE_SIGNUP:
- user = User.objects.filter(email=workspace_member_invite.email).first()
- if user is not None:
- user.delete()
- workspace_member_invite.delete()
- return Response(status=status.HTTP_204_NO_CONTENT)
- except WorkspaceMemberInvite.DoesNotExist:
- return Response(
- {"error": "Workspace member invite does not exists"},
- status=status.HTTP_400_BAD_REQUEST,
- )
- except Exception as e:
- capture_exception(e)
- return Response(
- {"error": "Something went wrong please try again later"},
- status=status.HTTP_400_BAD_REQUEST,
- )
+ workspace_member_invite = WorkspaceMemberInvite.objects.get(
+ pk=pk, workspace__slug=slug
+ )
+ # delete the user if signup is disabled
+ if settings.DOCKERIZED and not settings.ENABLE_SIGNUP:
+ user = User.objects.filter(email=workspace_member_invite.email).first()
+ if user is not None:
+ user.delete()
+ workspace_member_invite.delete()
+ return Response(status=status.HTTP_204_NO_CONTENT)
class UserWorkspaceInvitationsEndpoint(BaseViewSet):
@@ -511,35 +445,26 @@ class UserWorkspaceInvitationsEndpoint(BaseViewSet):
)
def create(self, request):
- try:
- invitations = request.data.get("invitations")
- workspace_invitations = WorkspaceMemberInvite.objects.filter(
- pk__in=invitations
- )
+ invitations = request.data.get("invitations")
+ workspace_invitations = WorkspaceMemberInvite.objects.filter(pk__in=invitations)
- WorkspaceMember.objects.bulk_create(
- [
- WorkspaceMember(
- workspace=invitation.workspace,
- member=request.user,
- role=invitation.role,
- created_by=request.user,
- )
- for invitation in workspace_invitations
- ],
- ignore_conflicts=True,
- )
+ WorkspaceMember.objects.bulk_create(
+ [
+ WorkspaceMember(
+ workspace=invitation.workspace,
+ member=request.user,
+ role=invitation.role,
+ created_by=request.user,
+ )
+ for invitation in workspace_invitations
+ ],
+ ignore_conflicts=True,
+ )
- # Delete joined workspace invites
- workspace_invitations.delete()
+ # Delete joined workspace invites
+ workspace_invitations.delete()
- return Response(status=status.HTTP_204_NO_CONTENT)
- except Exception as e:
- capture_exception(e)
- return Response(
- {"error": "Something went wrong please try again later"},
- status=status.HTTP_400_BAD_REQUEST,
- )
+ return Response(status=status.HTTP_204_NO_CONTENT)
class WorkSpaceMemberViewSet(BaseViewSet):
@@ -565,130 +490,104 @@ class WorkSpaceMemberViewSet(BaseViewSet):
)
def partial_update(self, request, slug, pk):
- try:
- workspace_member = WorkspaceMember.objects.get(pk=pk, workspace__slug=slug)
- if request.user.id == workspace_member.member_id:
- return Response(
- {"error": "You cannot update your own role"},
- status=status.HTTP_400_BAD_REQUEST,
- )
-
- # Get the requested user role
- requested_workspace_member = WorkspaceMember.objects.get(
- workspace__slug=slug, member=request.user
- )
- # Check if role is being updated
- # One cannot update role higher than his own role
- if (
- "role" in request.data
- and int(request.data.get("role", workspace_member.role))
- > requested_workspace_member.role
- ):
- return Response(
- {
- "error": "You cannot update a role that is higher than your own role"
- },
- status=status.HTTP_400_BAD_REQUEST,
- )
-
- serializer = WorkSpaceMemberSerializer(
- workspace_member, data=request.data, partial=True
- )
-
- if serializer.is_valid():
- serializer.save()
- return Response(serializer.data, status=status.HTTP_200_OK)
- return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
- except WorkspaceMember.DoesNotExist:
+ workspace_member = WorkspaceMember.objects.get(pk=pk, workspace__slug=slug)
+ if request.user.id == workspace_member.member_id:
return Response(
- {"error": "Workspace Member does not exist"},
+ {"error": "You cannot update your own role"},
status=status.HTTP_400_BAD_REQUEST,
)
- except Exception as e:
- capture_exception(e)
+
+ # Get the requested user role
+ requested_workspace_member = WorkspaceMember.objects.get(
+ workspace__slug=slug, member=request.user
+ )
+ # Check if role is being updated
+ # One cannot update role higher than his own role
+ if (
+ "role" in request.data
+ and int(request.data.get("role", workspace_member.role))
+ > requested_workspace_member.role
+ ):
return Response(
- {"error": "Something went wrong please try again later"},
+ {"error": "You cannot update a role that is higher than your own role"},
status=status.HTTP_400_BAD_REQUEST,
)
+ serializer = WorkSpaceMemberSerializer(
+ workspace_member, data=request.data, partial=True
+ )
+
+ if serializer.is_valid():
+ serializer.save()
+ return Response(serializer.data, status=status.HTTP_200_OK)
+ return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
+
def destroy(self, request, slug, pk):
- try:
- # Check the user role who is deleting the user
- workspace_member = WorkspaceMember.objects.get(workspace__slug=slug, pk=pk)
+ # Check the user role who is deleting the user
+ workspace_member = WorkspaceMember.objects.get(workspace__slug=slug, pk=pk)
- # check requesting user role
- requesting_workspace_member = WorkspaceMember.objects.get(
- workspace__slug=slug, member=request.user
- )
- if requesting_workspace_member.role < workspace_member.role:
- return Response(
- {"error": "You cannot remove a user having role higher than you"},
- status=status.HTTP_400_BAD_REQUEST,
- )
-
- # Check for the only member in the workspace
- if (
- workspace_member.role == 20
- and WorkspaceMember.objects.filter(
- workspace__slug=slug,
- role=20,
- member__is_bot=False,
- ).count()
- == 1
- ):
- return Response(
- {"error": "Cannot delete the only Admin for the workspace"},
- status=status.HTTP_400_BAD_REQUEST,
- )
-
- # Delete the user also from all the projects
- ProjectMember.objects.filter(
- workspace__slug=slug, member=workspace_member.member
- ).delete()
- # Remove all favorites
- ProjectFavorite.objects.filter(
- workspace__slug=slug, user=workspace_member.member
- ).delete()
- CycleFavorite.objects.filter(
- workspace__slug=slug, user=workspace_member.member
- ).delete()
- ModuleFavorite.objects.filter(
- workspace__slug=slug, user=workspace_member.member
- ).delete()
- PageFavorite.objects.filter(
- workspace__slug=slug, user=workspace_member.member
- ).delete()
- IssueViewFavorite.objects.filter(
- workspace__slug=slug, user=workspace_member.member
- ).delete()
- # Also remove issue from issue assigned
- IssueAssignee.objects.filter(
- workspace__slug=slug, assignee=workspace_member.member
- ).delete()
-
- # Remove if module member
- ModuleMember.objects.filter(
- workspace__slug=slug, member=workspace_member.member
- ).delete()
- # Delete owned Pages
- Page.objects.filter(
- workspace__slug=slug, owned_by=workspace_member.member
- ).delete()
-
- workspace_member.delete()
- return Response(status=status.HTTP_204_NO_CONTENT)
- except WorkspaceMember.DoesNotExist:
+ # check requesting user role
+ requesting_workspace_member = WorkspaceMember.objects.get(
+ workspace__slug=slug, member=request.user
+ )
+ if requesting_workspace_member.role < workspace_member.role:
return Response(
- {"error": "Workspace Member does not exists"},
+ {"error": "You cannot remove a user having role higher than you"},
status=status.HTTP_400_BAD_REQUEST,
)
- except Exception as e:
- capture_exception(e)
+
+ # Check for the only member in the workspace
+ if (
+ workspace_member.role == 20
+ and WorkspaceMember.objects.filter(
+ workspace__slug=slug,
+ role=20,
+ member__is_bot=False,
+ ).count()
+ == 1
+ ):
return Response(
- {"error": "Something went wrong please try again later"},
+ {"error": "Cannot delete the only Admin for the workspace"},
status=status.HTTP_400_BAD_REQUEST,
)
+ # Delete the user also from all the projects
+ ProjectMember.objects.filter(
+ workspace__slug=slug, member=workspace_member.member
+ ).delete()
+ # Remove all favorites
+ ProjectFavorite.objects.filter(
+ workspace__slug=slug, user=workspace_member.member
+ ).delete()
+ CycleFavorite.objects.filter(
+ workspace__slug=slug, user=workspace_member.member
+ ).delete()
+ ModuleFavorite.objects.filter(
+ workspace__slug=slug, user=workspace_member.member
+ ).delete()
+ PageFavorite.objects.filter(
+ workspace__slug=slug, user=workspace_member.member
+ ).delete()
+ IssueViewFavorite.objects.filter(
+ workspace__slug=slug, user=workspace_member.member
+ ).delete()
+ # Also remove issue from issue assigned
+ IssueAssignee.objects.filter(
+ workspace__slug=slug, assignee=workspace_member.member
+ ).delete()
+
+ # Remove if module member
+ ModuleMember.objects.filter(
+ workspace__slug=slug, member=workspace_member.member
+ ).delete()
+ # Delete owned Pages
+ Page.objects.filter(
+ workspace__slug=slug, owned_by=workspace_member.member
+ ).delete()
+
+ workspace_member.delete()
+ return Response(status=status.HTTP_204_NO_CONTENT)
+
class TeamMemberViewSet(BaseViewSet):
serializer_class = TeamSerializer
@@ -712,51 +611,36 @@ class TeamMemberViewSet(BaseViewSet):
)
def create(self, request, slug):
- try:
- members = list(
- WorkspaceMember.objects.filter(
- workspace__slug=slug, member__id__in=request.data.get("members", [])
- )
- .annotate(member_str_id=Cast("member", output_field=CharField()))
- .distinct()
- .values_list("member_str_id", flat=True)
+ members = list(
+ WorkspaceMember.objects.filter(
+ workspace__slug=slug, member__id__in=request.data.get("members", [])
)
+ .annotate(member_str_id=Cast("member", output_field=CharField()))
+ .distinct()
+ .values_list("member_str_id", flat=True)
+ )
- if len(members) != len(request.data.get("members", [])):
- users = list(set(request.data.get("members", [])).difference(members))
- users = User.objects.filter(pk__in=users)
+ if len(members) != len(request.data.get("members", [])):
+ users = list(set(request.data.get("members", [])).difference(members))
+ users = User.objects.filter(pk__in=users)
- serializer = UserLiteSerializer(users, many=True)
- return Response(
- {
- "error": f"{len(users)} of the member(s) are not a part of the workspace",
- "members": serializer.data,
- },
- status=status.HTTP_400_BAD_REQUEST,
- )
-
- workspace = Workspace.objects.get(slug=slug)
-
- serializer = TeamSerializer(
- data=request.data, context={"workspace": workspace}
- )
- if serializer.is_valid():
- serializer.save()
- return Response(serializer.data, status=status.HTTP_201_CREATED)
- return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
- except IntegrityError as e:
- if "already exists" in str(e):
- return Response(
- {"error": "The team with the name already exists"},
- status=status.HTTP_410_GONE,
- )
- except Exception as e:
- capture_exception(e)
+ serializer = UserLiteSerializer(users, many=True)
return Response(
- {"error": "Something went wrong please try again later"},
+ {
+ "error": f"{len(users)} of the member(s) are not a part of the workspace",
+ "members": serializer.data,
+ },
status=status.HTTP_400_BAD_REQUEST,
)
+ workspace = Workspace.objects.get(slug=slug)
+
+ serializer = TeamSerializer(data=request.data, context={"workspace": workspace})
+ if serializer.is_valid():
+ serializer.save()
+ return Response(serializer.data, status=status.HTTP_201_CREATED)
+ return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
+
class UserWorkspaceInvitationEndpoint(BaseViewSet):
model = WorkspaceMemberInvite
@@ -777,140 +661,93 @@ class UserWorkspaceInvitationEndpoint(BaseViewSet):
class UserLastProjectWithWorkspaceEndpoint(BaseAPIView):
def get(self, request):
- try:
- user = User.objects.get(pk=request.user.id)
+ user = User.objects.get(pk=request.user.id)
- last_workspace_id = user.last_workspace_id
-
- if last_workspace_id is None:
- return Response(
- {
- "project_details": [],
- "workspace_details": {},
- },
- status=status.HTTP_200_OK,
- )
-
- workspace = Workspace.objects.get(pk=last_workspace_id)
- workspace_serializer = WorkSpaceSerializer(workspace)
-
- project_member = ProjectMember.objects.filter(
- workspace_id=last_workspace_id, member=request.user
- ).select_related("workspace", "project", "member", "workspace__owner")
-
- project_member_serializer = ProjectMemberSerializer(
- project_member, many=True
- )
+ last_workspace_id = user.last_workspace_id
+ if last_workspace_id is None:
return Response(
{
- "workspace_details": workspace_serializer.data,
- "project_details": project_member_serializer.data,
+ "project_details": [],
+ "workspace_details": {},
},
status=status.HTTP_200_OK,
)
- except User.DoesNotExist:
- return Response({"error": "Forbidden"}, status=status.HTTP_403_FORBIDDEN)
- except Exception as e:
- capture_exception(e)
- return Response(
- {"error": "Something went wrong please try again later"},
- status=status.HTTP_400_BAD_REQUEST,
- )
+ workspace = Workspace.objects.get(pk=last_workspace_id)
+ workspace_serializer = WorkSpaceSerializer(workspace)
+
+ project_member = ProjectMember.objects.filter(
+ workspace_id=last_workspace_id, member=request.user
+ ).select_related("workspace", "project", "member", "workspace__owner")
+
+ project_member_serializer = ProjectMemberSerializer(project_member, many=True)
+
+ return Response(
+ {
+ "workspace_details": workspace_serializer.data,
+ "project_details": project_member_serializer.data,
+ },
+ status=status.HTTP_200_OK,
+ )
class WorkspaceMemberUserEndpoint(BaseAPIView):
def get(self, request, slug):
- try:
- workspace_member = WorkspaceMember.objects.get(
- member=request.user, workspace__slug=slug
- )
- serializer = WorkspaceMemberMeSerializer(workspace_member)
- return Response(serializer.data, status=status.HTTP_200_OK)
- except (Workspace.DoesNotExist, WorkspaceMember.DoesNotExist):
- return Response({"error": "Forbidden"}, status=status.HTTP_403_FORBIDDEN)
- except Exception as e:
- capture_exception(e)
- return Response(
- {"error": "Something went wrong please try again later"},
- status=status.HTTP_400_BAD_REQUEST,
- )
+ workspace_member = WorkspaceMember.objects.get(
+ member=request.user, workspace__slug=slug
+ )
+ serializer = WorkspaceMemberMeSerializer(workspace_member)
+ return Response(serializer.data, status=status.HTTP_200_OK)
class WorkspaceMemberUserViewsEndpoint(BaseAPIView):
def post(self, request, slug):
- try:
- workspace_member = WorkspaceMember.objects.get(
- workspace__slug=slug, member=request.user
- )
- workspace_member.view_props = request.data.get("view_props", {})
- workspace_member.save()
+ workspace_member = WorkspaceMember.objects.get(
+ workspace__slug=slug, member=request.user
+ )
+ workspace_member.view_props = request.data.get("view_props", {})
+ workspace_member.save()
- return Response(status=status.HTTP_204_NO_CONTENT)
- except WorkspaceMember.DoesNotExist:
- return Response(
- {"error": "User not a member of workspace"},
- status=status.HTTP_403_FORBIDDEN,
- )
- except Exception as e:
- capture_exception(e)
- return Response(
- {"error": "Something went wrong please try again later"},
- status=status.HTTP_400_BAD_REQUEST,
- )
+ return Response(status=status.HTTP_204_NO_CONTENT)
class UserActivityGraphEndpoint(BaseAPIView):
def get(self, request, slug):
- try:
- issue_activities = (
- IssueActivity.objects.filter(
- actor=request.user,
- workspace__slug=slug,
- created_at__date__gte=date.today() + relativedelta(months=-6),
- )
- .annotate(created_date=Cast("created_at", DateField()))
- .values("created_date")
- .annotate(activity_count=Count("created_date"))
- .order_by("created_date")
+ issue_activities = (
+ IssueActivity.objects.filter(
+ actor=request.user,
+ workspace__slug=slug,
+ created_at__date__gte=date.today() + relativedelta(months=-6),
)
+ .annotate(created_date=Cast("created_at", DateField()))
+ .values("created_date")
+ .annotate(activity_count=Count("created_date"))
+ .order_by("created_date")
+ )
- return Response(issue_activities, status=status.HTTP_200_OK)
- except Exception as e:
- capture_exception(e)
- return Response(
- {"error": "Something went wrong please try again later"},
- status=status.HTTP_400_BAD_REQUEST,
- )
+ return Response(issue_activities, status=status.HTTP_200_OK)
class UserIssueCompletedGraphEndpoint(BaseAPIView):
def get(self, request, slug):
- try:
- month = request.GET.get("month", 1)
+ month = request.GET.get("month", 1)
- issues = (
- Issue.issue_objects.filter(
- assignees__in=[request.user],
- workspace__slug=slug,
- completed_at__month=month,
- completed_at__isnull=False,
- )
- .annotate(completed_week=ExtractWeek("completed_at"))
- .annotate(week=F("completed_week") % 4)
- .values("week")
- .annotate(completed_count=Count("completed_week"))
- .order_by("week")
+ issues = (
+ Issue.issue_objects.filter(
+ assignees__in=[request.user],
+ workspace__slug=slug,
+ completed_at__month=month,
+ completed_at__isnull=False,
)
+ .annotate(completed_week=ExtractWeek("completed_at"))
+ .annotate(week=F("completed_week") % 4)
+ .values("week")
+ .annotate(completed_count=Count("completed_week"))
+ .order_by("week")
+ )
- return Response(issues, status=status.HTTP_200_OK)
- except Exception as e:
- capture_exception(e)
- return Response(
- {"error": "Something went wrong please try again later"},
- status=status.HTTP_400_BAD_REQUEST,
- )
+ return Response(issues, status=status.HTTP_200_OK)
class WeekInMonth(Func):
@@ -920,108 +757,100 @@ class WeekInMonth(Func):
class UserWorkspaceDashboardEndpoint(BaseAPIView):
def get(self, request, slug):
- try:
- issue_activities = (
- IssueActivity.objects.filter(
- actor=request.user,
- workspace__slug=slug,
- created_at__date__gte=date.today() + relativedelta(months=-3),
- )
- .annotate(created_date=Cast("created_at", DateField()))
- .values("created_date")
- .annotate(activity_count=Count("created_date"))
- .order_by("created_date")
+ issue_activities = (
+ IssueActivity.objects.filter(
+ actor=request.user,
+ workspace__slug=slug,
+ created_at__date__gte=date.today() + relativedelta(months=-3),
)
+ .annotate(created_date=Cast("created_at", DateField()))
+ .values("created_date")
+ .annotate(activity_count=Count("created_date"))
+ .order_by("created_date")
+ )
- month = request.GET.get("month", 1)
+ month = request.GET.get("month", 1)
- completed_issues = (
- Issue.issue_objects.filter(
- assignees__in=[request.user],
- workspace__slug=slug,
- completed_at__month=month,
- completed_at__isnull=False,
- )
- .annotate(day_of_month=ExtractDay("completed_at"))
- .annotate(week_in_month=WeekInMonth(F("day_of_month")))
- .values("week_in_month")
- .annotate(completed_count=Count("id"))
- .order_by("week_in_month")
+ completed_issues = (
+ Issue.issue_objects.filter(
+ assignees__in=[request.user],
+ workspace__slug=slug,
+ completed_at__month=month,
+ completed_at__isnull=False,
)
+ .annotate(day_of_month=ExtractDay("completed_at"))
+ .annotate(week_in_month=WeekInMonth(F("day_of_month")))
+ .values("week_in_month")
+ .annotate(completed_count=Count("id"))
+ .order_by("week_in_month")
+ )
- assigned_issues = Issue.issue_objects.filter(
+ assigned_issues = Issue.issue_objects.filter(
+ workspace__slug=slug, assignees__in=[request.user]
+ ).count()
+
+ pending_issues_count = Issue.issue_objects.filter(
+ ~Q(state__group__in=["completed", "cancelled"]),
+ workspace__slug=slug,
+ assignees__in=[request.user],
+ ).count()
+
+ completed_issues_count = Issue.issue_objects.filter(
+ workspace__slug=slug,
+ assignees__in=[request.user],
+ state__group="completed",
+ ).count()
+
+ issues_due_week = (
+ Issue.issue_objects.filter(
+ workspace__slug=slug,
+ assignees__in=[request.user],
+ )
+ .annotate(target_week=ExtractWeek("target_date"))
+ .filter(target_week=timezone.now().date().isocalendar()[1])
+ .count()
+ )
+
+ state_distribution = (
+ Issue.issue_objects.filter(
workspace__slug=slug, assignees__in=[request.user]
- ).count()
-
- pending_issues_count = Issue.issue_objects.filter(
- ~Q(state__group__in=["completed", "cancelled"]),
- workspace__slug=slug,
- assignees__in=[request.user],
- ).count()
-
- completed_issues_count = Issue.issue_objects.filter(
- workspace__slug=slug,
- assignees__in=[request.user],
- state__group="completed",
- ).count()
-
- issues_due_week = (
- Issue.issue_objects.filter(
- workspace__slug=slug,
- assignees__in=[request.user],
- )
- .annotate(target_week=ExtractWeek("target_date"))
- .filter(target_week=timezone.now().date().isocalendar()[1])
- .count()
)
+ .annotate(state_group=F("state__group"))
+ .values("state_group")
+ .annotate(state_count=Count("state_group"))
+ .order_by("state_group")
+ )
- state_distribution = (
- Issue.issue_objects.filter(
- workspace__slug=slug, assignees__in=[request.user]
- )
- .annotate(state_group=F("state__group"))
- .values("state_group")
- .annotate(state_count=Count("state_group"))
- .order_by("state_group")
- )
+ overdue_issues = Issue.issue_objects.filter(
+ ~Q(state__group__in=["completed", "cancelled"]),
+ workspace__slug=slug,
+ assignees__in=[request.user],
+ target_date__lt=timezone.now(),
+ completed_at__isnull=True,
+ ).values("id", "name", "workspace__slug", "project_id", "target_date")
- overdue_issues = Issue.issue_objects.filter(
- ~Q(state__group__in=["completed", "cancelled"]),
- workspace__slug=slug,
- assignees__in=[request.user],
- target_date__lt=timezone.now(),
- completed_at__isnull=True,
- ).values("id", "name", "workspace__slug", "project_id", "target_date")
+ upcoming_issues = Issue.issue_objects.filter(
+ ~Q(state__group__in=["completed", "cancelled"]),
+ start_date__gte=timezone.now(),
+ workspace__slug=slug,
+ assignees__in=[request.user],
+ completed_at__isnull=True,
+ ).values("id", "name", "workspace__slug", "project_id", "start_date")
- upcoming_issues = Issue.issue_objects.filter(
- ~Q(state__group__in=["completed", "cancelled"]),
- start_date__gte=timezone.now(),
- workspace__slug=slug,
- assignees__in=[request.user],
- completed_at__isnull=True,
- ).values("id", "name", "workspace__slug", "project_id", "start_date")
-
- return Response(
- {
- "issue_activities": issue_activities,
- "completed_issues": completed_issues,
- "assigned_issues_count": assigned_issues,
- "pending_issues_count": pending_issues_count,
- "completed_issues_count": completed_issues_count,
- "issues_due_week_count": issues_due_week,
- "state_distribution": state_distribution,
- "overdue_issues": overdue_issues,
- "upcoming_issues": upcoming_issues,
- },
- status=status.HTTP_200_OK,
- )
-
- except Exception as e:
- capture_exception(e)
- return Response(
- {"error": "Something went wrong please try again later"},
- status=status.HTTP_400_BAD_REQUEST,
- )
+ return Response(
+ {
+ "issue_activities": issue_activities,
+ "completed_issues": completed_issues,
+ "assigned_issues_count": assigned_issues,
+ "pending_issues_count": pending_issues_count,
+ "completed_issues_count": completed_issues_count,
+ "issues_due_week_count": issues_due_week,
+ "state_distribution": state_distribution,
+ "overdue_issues": overdue_issues,
+ "upcoming_issues": upcoming_issues,
+ },
+ status=status.HTTP_200_OK,
+ )
class WorkspaceThemeViewSet(BaseViewSet):
@@ -1035,157 +864,138 @@ class WorkspaceThemeViewSet(BaseViewSet):
return super().get_queryset().filter(workspace__slug=self.kwargs.get("slug"))
def create(self, request, slug):
- try:
- workspace = Workspace.objects.get(slug=slug)
- serializer = WorkspaceThemeSerializer(data=request.data)
- if serializer.is_valid():
- serializer.save(workspace=workspace, actor=request.user)
- return Response(serializer.data, status=status.HTTP_201_CREATED)
- return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
- except Workspace.DoesNotExist:
- return Response(
- {"error": "Workspace does not exist"},
- status=status.HTTP_400_BAD_REQUEST,
- )
- except Exception as e:
- capture_exception(e)
- return Response(
- {"error": "Something went wrong please try again later"},
- status=status.HTTP_400_BAD_REQUEST,
- )
+ workspace = Workspace.objects.get(slug=slug)
+ serializer = WorkspaceThemeSerializer(data=request.data)
+ if serializer.is_valid():
+ serializer.save(workspace=workspace, actor=request.user)
+ return Response(serializer.data, status=status.HTTP_201_CREATED)
+ return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
class WorkspaceUserProfileStatsEndpoint(BaseAPIView):
def get(self, request, slug, user_id):
- try:
- filters = issue_filters(request.query_params, "GET")
+ filters = issue_filters(request.query_params, "GET")
- state_distribution = (
- Issue.issue_objects.filter(
- workspace__slug=slug,
- assignees__in=[user_id],
- project__project_projectmember__member=request.user,
- )
- .filter(**filters)
- .annotate(state_group=F("state__group"))
- .values("state_group")
- .annotate(state_count=Count("state_group"))
- .order_by("state_group")
- )
-
- priority_order = ["urgent", "high", "medium", "low", "none"]
-
- priority_distribution = (
- Issue.issue_objects.filter(
- workspace__slug=slug,
- assignees__in=[user_id],
- project__project_projectmember__member=request.user,
- )
- .filter(**filters)
- .values("priority")
- .annotate(priority_count=Count("priority"))
- .filter(priority_count__gte=1)
- .annotate(
- priority_order=Case(
- *[
- When(priority=p, then=Value(i))
- for i, p in enumerate(priority_order)
- ],
- default=Value(len(priority_order)),
- output_field=IntegerField(),
- )
- )
- .order_by("priority_order")
- )
-
- created_issues = (
- Issue.issue_objects.filter(
- workspace__slug=slug,
- project__project_projectmember__member=request.user,
- created_by_id=user_id,
- )
- .filter(**filters)
- .count()
- )
-
- assigned_issues_count = (
- Issue.issue_objects.filter(
- workspace__slug=slug,
- assignees__in=[user_id],
- project__project_projectmember__member=request.user,
- )
- .filter(**filters)
- .count()
- )
-
- pending_issues_count = (
- Issue.issue_objects.filter(
- ~Q(state__group__in=["completed", "cancelled"]),
- workspace__slug=slug,
- assignees__in=[user_id],
- project__project_projectmember__member=request.user,
- )
- .filter(**filters)
- .count()
- )
-
- completed_issues_count = (
- Issue.issue_objects.filter(
- workspace__slug=slug,
- assignees__in=[user_id],
- state__group="completed",
- project__project_projectmember__member=request.user,
- )
- .filter(**filters)
- .count()
- )
-
- subscribed_issues_count = (
- IssueSubscriber.objects.filter(
- workspace__slug=slug,
- subscriber_id=user_id,
- project__project_projectmember__member=request.user,
- )
- .filter(**filters)
- .count()
- )
-
- upcoming_cycles = CycleIssue.objects.filter(
+ state_distribution = (
+ Issue.issue_objects.filter(
workspace__slug=slug,
- cycle__start_date__gt=timezone.now().date(),
- issue__assignees__in=[
- user_id,
- ],
- ).values("cycle__name", "cycle__id", "cycle__project_id")
+ assignees__in=[user_id],
+ project__project_projectmember__member=request.user,
+ )
+ .filter(**filters)
+ .annotate(state_group=F("state__group"))
+ .values("state_group")
+ .annotate(state_count=Count("state_group"))
+ .order_by("state_group")
+ )
- present_cycle = CycleIssue.objects.filter(
+ priority_order = ["urgent", "high", "medium", "low", "none"]
+
+ priority_distribution = (
+ Issue.issue_objects.filter(
workspace__slug=slug,
- cycle__start_date__lt=timezone.now().date(),
- cycle__end_date__gt=timezone.now().date(),
- issue__assignees__in=[
- user_id,
- ],
- ).values("cycle__name", "cycle__id", "cycle__project_id")
+ assignees__in=[user_id],
+ project__project_projectmember__member=request.user,
+ )
+ .filter(**filters)
+ .values("priority")
+ .annotate(priority_count=Count("priority"))
+ .filter(priority_count__gte=1)
+ .annotate(
+ priority_order=Case(
+ *[
+ When(priority=p, then=Value(i))
+ for i, p in enumerate(priority_order)
+ ],
+ default=Value(len(priority_order)),
+ output_field=IntegerField(),
+ )
+ )
+ .order_by("priority_order")
+ )
- return Response(
- {
- "state_distribution": state_distribution,
- "priority_distribution": priority_distribution,
- "created_issues": created_issues,
- "assigned_issues": assigned_issues_count,
- "completed_issues": completed_issues_count,
- "pending_issues": pending_issues_count,
- "subscribed_issues": subscribed_issues_count,
- "present_cycles": present_cycle,
- "upcoming_cycles": upcoming_cycles,
- }
+ created_issues = (
+ Issue.issue_objects.filter(
+ workspace__slug=slug,
+ project__project_projectmember__member=request.user,
+ created_by_id=user_id,
)
- except Exception as e:
- capture_exception(e)
- return Response(
- {"error": "Something went wrong please try again later"},
- status=status.HTTP_400_BAD_REQUEST,
+ .filter(**filters)
+ .count()
+ )
+
+ assigned_issues_count = (
+ Issue.issue_objects.filter(
+ workspace__slug=slug,
+ assignees__in=[user_id],
+ project__project_projectmember__member=request.user,
)
+ .filter(**filters)
+ .count()
+ )
+
+ pending_issues_count = (
+ Issue.issue_objects.filter(
+ ~Q(state__group__in=["completed", "cancelled"]),
+ workspace__slug=slug,
+ assignees__in=[user_id],
+ project__project_projectmember__member=request.user,
+ )
+ .filter(**filters)
+ .count()
+ )
+
+ completed_issues_count = (
+ Issue.issue_objects.filter(
+ workspace__slug=slug,
+ assignees__in=[user_id],
+ state__group="completed",
+ project__project_projectmember__member=request.user,
+ )
+ .filter(**filters)
+ .count()
+ )
+
+ subscribed_issues_count = (
+ IssueSubscriber.objects.filter(
+ workspace__slug=slug,
+ subscriber_id=user_id,
+ project__project_projectmember__member=request.user,
+ )
+ .filter(**filters)
+ .count()
+ )
+
+ upcoming_cycles = CycleIssue.objects.filter(
+ workspace__slug=slug,
+ cycle__start_date__gt=timezone.now().date(),
+ issue__assignees__in=[
+ user_id,
+ ],
+ ).values("cycle__name", "cycle__id", "cycle__project_id")
+
+ present_cycle = CycleIssue.objects.filter(
+ workspace__slug=slug,
+ cycle__start_date__lt=timezone.now().date(),
+ cycle__end_date__gt=timezone.now().date(),
+ issue__assignees__in=[
+ user_id,
+ ],
+ ).values("cycle__name", "cycle__id", "cycle__project_id")
+
+ return Response(
+ {
+ "state_distribution": state_distribution,
+ "priority_distribution": priority_distribution,
+ "created_issues": created_issues,
+ "assigned_issues": assigned_issues_count,
+ "completed_issues": completed_issues_count,
+ "pending_issues": pending_issues_count,
+ "subscribed_issues": subscribed_issues_count,
+ "present_cycles": present_cycle,
+ "upcoming_cycles": upcoming_cycles,
+ }
+ )
class WorkspaceUserActivityEndpoint(BaseAPIView):
@@ -1194,132 +1004,116 @@ class WorkspaceUserActivityEndpoint(BaseAPIView):
]
def get(self, request, slug, user_id):
- try:
- projects = request.query_params.getlist("project", [])
+ projects = request.query_params.getlist("project", [])
- queryset = IssueActivity.objects.filter(
- ~Q(field__in=["comment", "vote", "reaction", "draft"]),
- workspace__slug=slug,
- project__project_projectmember__member=request.user,
- actor=user_id,
- ).select_related("actor", "workspace", "issue", "project")
+ queryset = IssueActivity.objects.filter(
+ ~Q(field__in=["comment", "vote", "reaction", "draft"]),
+ workspace__slug=slug,
+ project__project_projectmember__member=request.user,
+ actor=user_id,
+ ).select_related("actor", "workspace", "issue", "project")
- if projects:
- queryset = queryset.filter(project__in=projects)
+ if projects:
+ queryset = queryset.filter(project__in=projects)
- return self.paginate(
- request=request,
- queryset=queryset,
- on_results=lambda issue_activities: IssueActivitySerializer(
- issue_activities, many=True
- ).data,
- )
- except Exception as e:
- capture_exception(e)
- return Response(
- {"error": "Something went wrong please try again later"},
- status=status.HTTP_400_BAD_REQUEST,
- )
+ return self.paginate(
+ request=request,
+ queryset=queryset,
+ on_results=lambda issue_activities: IssueActivitySerializer(
+ issue_activities, many=True
+ ).data,
+ )
class WorkspaceUserProfileEndpoint(BaseAPIView):
def get(self, request, slug, user_id):
- try:
- user_data = User.objects.get(pk=user_id)
+ user_data = User.objects.get(pk=user_id)
- requesting_workspace_member = WorkspaceMember.objects.get(
- workspace__slug=slug, member=request.user
- )
- projects = []
- if requesting_workspace_member.role >= 10:
- projects = (
- Project.objects.filter(
- workspace__slug=slug,
- project_projectmember__member=request.user,
- )
- .annotate(
- created_issues=Count(
- "project_issue",
- filter=Q(
- project_issue__created_by_id=user_id,
- project_issue__archived_at__isnull=True,
- project_issue__is_draft=False,
- ),
- )
- )
- .annotate(
- assigned_issues=Count(
- "project_issue",
- filter=Q(
- project_issue__assignees__in=[user_id],
- project_issue__archived_at__isnull=True,
- project_issue__is_draft=False,
- ),
- )
- )
- .annotate(
- completed_issues=Count(
- "project_issue",
- filter=Q(
- project_issue__completed_at__isnull=False,
- project_issue__assignees__in=[user_id],
- project_issue__archived_at__isnull=True,
- project_issue__is_draft=False,
- ),
- )
- )
- .annotate(
- pending_issues=Count(
- "project_issue",
- filter=Q(
- project_issue__state__group__in=[
- "backlog",
- "unstarted",
- "started",
- ],
- project_issue__assignees__in=[user_id],
- project_issue__archived_at__isnull=True,
- project_issue__is_draft=False,
- ),
- )
- )
- .values(
- "id",
- "name",
- "identifier",
- "emoji",
- "icon_prop",
- "created_issues",
- "assigned_issues",
- "completed_issues",
- "pending_issues",
+ requesting_workspace_member = WorkspaceMember.objects.get(
+ workspace__slug=slug, member=request.user
+ )
+ projects = []
+ if requesting_workspace_member.role >= 10:
+ projects = (
+ Project.objects.filter(
+ workspace__slug=slug,
+ project_projectmember__member=request.user,
+ )
+ .annotate(
+ created_issues=Count(
+ "project_issue",
+ filter=Q(
+ project_issue__created_by_id=user_id,
+ project_issue__archived_at__isnull=True,
+ project_issue__is_draft=False,
+ ),
)
)
+ .annotate(
+ assigned_issues=Count(
+ "project_issue",
+ filter=Q(
+ project_issue__assignees__in=[user_id],
+ project_issue__archived_at__isnull=True,
+ project_issue__is_draft=False,
+ ),
+ )
+ )
+ .annotate(
+ completed_issues=Count(
+ "project_issue",
+ filter=Q(
+ project_issue__completed_at__isnull=False,
+ project_issue__assignees__in=[user_id],
+ project_issue__archived_at__isnull=True,
+ project_issue__is_draft=False,
+ ),
+ )
+ )
+ .annotate(
+ pending_issues=Count(
+ "project_issue",
+ filter=Q(
+ project_issue__state__group__in=[
+ "backlog",
+ "unstarted",
+ "started",
+ ],
+ project_issue__assignees__in=[user_id],
+ project_issue__archived_at__isnull=True,
+ project_issue__is_draft=False,
+ ),
+ )
+ )
+ .values(
+ "id",
+ "name",
+ "identifier",
+ "emoji",
+ "icon_prop",
+ "created_issues",
+ "assigned_issues",
+ "completed_issues",
+ "pending_issues",
+ )
+ )
- return Response(
- {
- "project_data": projects,
- "user_data": {
- "email": user_data.email,
- "first_name": user_data.first_name,
- "last_name": user_data.last_name,
- "avatar": user_data.avatar,
- "cover_image": user_data.cover_image,
- "date_joined": user_data.date_joined,
- "user_timezone": user_data.user_timezone,
- "display_name": user_data.display_name,
- },
+ return Response(
+ {
+ "project_data": projects,
+ "user_data": {
+ "email": user_data.email,
+ "first_name": user_data.first_name,
+ "last_name": user_data.last_name,
+ "avatar": user_data.avatar,
+ "cover_image": user_data.cover_image,
+ "date_joined": user_data.date_joined,
+ "user_timezone": user_data.user_timezone,
+ "display_name": user_data.display_name,
},
- status=status.HTTP_200_OK,
- )
- except WorkspaceMember.DoesNotExist:
- return Response({"error": "Forbidden"}, status=status.HTTP_403_FORBIDDEN)
- except Exception as e:
- capture_exception(e)
- return Response(
- {"error": "Something went wrong please try again later"},
- status=status.HTTP_400_BAD_REQUEST,
- )
+ },
+ status=status.HTTP_200_OK,
+ )
class WorkspaceUserProfileIssuesEndpoint(BaseAPIView):
@@ -1328,128 +1122,115 @@ class WorkspaceUserProfileIssuesEndpoint(BaseAPIView):
]
def get(self, request, slug, user_id):
- try:
- filters = issue_filters(request.query_params, "GET")
+ filters = issue_filters(request.query_params, "GET")
- # Custom ordering for priority and state
- priority_order = ["urgent", "high", "medium", "low", "none"]
- state_order = ["backlog", "unstarted", "started", "completed", "cancelled"]
+ # Custom ordering for priority and state
+ priority_order = ["urgent", "high", "medium", "low", "none"]
+ state_order = ["backlog", "unstarted", "started", "completed", "cancelled"]
- order_by_param = request.GET.get("order_by", "-created_at")
- issue_queryset = (
- Issue.issue_objects.filter(
- Q(assignees__in=[user_id])
- | Q(created_by_id=user_id)
- | Q(issue_subscribers__subscriber_id=user_id),
- workspace__slug=slug,
- project__project_projectmember__member=request.user,
- )
- .filter(**filters)
- .annotate(
- sub_issues_count=Issue.issue_objects.filter(parent=OuterRef("id"))
- .order_by()
- .annotate(count=Func(F("id"), function="Count"))
- .values("count")
- )
- .select_related("project", "workspace", "state", "parent")
- .prefetch_related("assignees", "labels")
- .prefetch_related(
- Prefetch(
- "issue_reactions",
- queryset=IssueReaction.objects.select_related("actor"),
- )
- )
- .order_by("-created_at")
- .annotate(
- link_count=IssueLink.objects.filter(issue=OuterRef("id"))
- .order_by()
- .annotate(count=Func(F("id"), function="Count"))
- .values("count")
- )
- .annotate(
- attachment_count=IssueAttachment.objects.filter(
- issue=OuterRef("id")
- )
- .order_by()
- .annotate(count=Func(F("id"), function="Count"))
- .values("count")
- )
- ).distinct()
-
- # Priority Ordering
- if order_by_param == "priority" or order_by_param == "-priority":
- priority_order = (
- priority_order
- if order_by_param == "priority"
- else priority_order[::-1]
- )
- issue_queryset = issue_queryset.annotate(
- priority_order=Case(
- *[
- When(priority=p, then=Value(i))
- for i, p in enumerate(priority_order)
- ],
- output_field=CharField(),
- )
- ).order_by("priority_order")
-
- # State Ordering
- elif order_by_param in [
- "state__name",
- "state__group",
- "-state__name",
- "-state__group",
- ]:
- state_order = (
- state_order
- if order_by_param in ["state__name", "state__group"]
- else state_order[::-1]
- )
- issue_queryset = issue_queryset.annotate(
- state_order=Case(
- *[
- When(state__group=state_group, then=Value(i))
- for i, state_group in enumerate(state_order)
- ],
- default=Value(len(state_order)),
- output_field=CharField(),
- )
- ).order_by("state_order")
- # assignee and label ordering
- elif order_by_param in [
- "labels__name",
- "-labels__name",
- "assignees__first_name",
- "-assignees__first_name",
- ]:
- issue_queryset = issue_queryset.annotate(
- max_values=Max(
- order_by_param[1::]
- if order_by_param.startswith("-")
- else order_by_param
- )
- ).order_by(
- "-max_values" if order_by_param.startswith("-") else "max_values"
- )
- else:
- issue_queryset = issue_queryset.order_by(order_by_param)
-
- issues = IssueLiteSerializer(issue_queryset, many=True).data
-
- ## Grouping the results
- group_by = request.GET.get("group_by", False)
- if group_by:
- return Response(
- group_results(issues, group_by), status=status.HTTP_200_OK
- )
-
- return Response(issues, status=status.HTTP_200_OK)
- except Exception as e:
- capture_exception(e)
- return Response(
- {"error": "Something went wrong please try again later"},
- status=status.HTTP_400_BAD_REQUEST,
+ order_by_param = request.GET.get("order_by", "-created_at")
+ issue_queryset = (
+ Issue.issue_objects.filter(
+ Q(assignees__in=[user_id])
+ | Q(created_by_id=user_id)
+ | Q(issue_subscribers__subscriber_id=user_id),
+ workspace__slug=slug,
+ project__project_projectmember__member=request.user,
)
+ .filter(**filters)
+ .annotate(
+ sub_issues_count=Issue.issue_objects.filter(parent=OuterRef("id"))
+ .order_by()
+ .annotate(count=Func(F("id"), function="Count"))
+ .values("count")
+ )
+ .select_related("project", "workspace", "state", "parent")
+ .prefetch_related("assignees", "labels")
+ .prefetch_related(
+ Prefetch(
+ "issue_reactions",
+ queryset=IssueReaction.objects.select_related("actor"),
+ )
+ )
+ .order_by("-created_at")
+ .annotate(
+ link_count=IssueLink.objects.filter(issue=OuterRef("id"))
+ .order_by()
+ .annotate(count=Func(F("id"), function="Count"))
+ .values("count")
+ )
+ .annotate(
+ attachment_count=IssueAttachment.objects.filter(issue=OuterRef("id"))
+ .order_by()
+ .annotate(count=Func(F("id"), function="Count"))
+ .values("count")
+ )
+ ).distinct()
+
+ # Priority Ordering
+ if order_by_param == "priority" or order_by_param == "-priority":
+ priority_order = (
+ priority_order if order_by_param == "priority" else priority_order[::-1]
+ )
+ issue_queryset = issue_queryset.annotate(
+ priority_order=Case(
+ *[
+ When(priority=p, then=Value(i))
+ for i, p in enumerate(priority_order)
+ ],
+ output_field=CharField(),
+ )
+ ).order_by("priority_order")
+
+ # State Ordering
+ elif order_by_param in [
+ "state__name",
+ "state__group",
+ "-state__name",
+ "-state__group",
+ ]:
+ state_order = (
+ state_order
+ if order_by_param in ["state__name", "state__group"]
+ else state_order[::-1]
+ )
+ issue_queryset = issue_queryset.annotate(
+ state_order=Case(
+ *[
+ When(state__group=state_group, then=Value(i))
+ for i, state_group in enumerate(state_order)
+ ],
+ default=Value(len(state_order)),
+ output_field=CharField(),
+ )
+ ).order_by("state_order")
+ # assignee and label ordering
+ elif order_by_param in [
+ "labels__name",
+ "-labels__name",
+ "assignees__first_name",
+ "-assignees__first_name",
+ ]:
+ issue_queryset = issue_queryset.annotate(
+ max_values=Max(
+ order_by_param[1::]
+ if order_by_param.startswith("-")
+ else order_by_param
+ )
+ ).order_by(
+ "-max_values" if order_by_param.startswith("-") else "max_values"
+ )
+ else:
+ issue_queryset = issue_queryset.order_by(order_by_param)
+
+ issues = IssueLiteSerializer(issue_queryset, many=True).data
+
+ ## Grouping the results
+ group_by = request.GET.get("group_by", False)
+ if group_by:
+ return Response(group_results(issues, group_by), status=status.HTTP_200_OK)
+
+ return Response(issues, status=status.HTTP_200_OK)
class WorkspaceLabelsEndpoint(BaseAPIView):
@@ -1458,18 +1239,11 @@ class WorkspaceLabelsEndpoint(BaseAPIView):
]
def get(self, request, slug):
- try:
- labels = Label.objects.filter(
- workspace__slug=slug,
- project__project_projectmember__member=request.user,
- ).values("parent", "name", "color", "id", "project_id", "workspace__slug")
- return Response(labels, status=status.HTTP_200_OK)
- except Exception as e:
- capture_exception(e)
- return Response(
- {"error": "Something went wrong please try again later"},
- status=status.HTTP_400_BAD_REQUEST,
- )
+ labels = Label.objects.filter(
+ workspace__slug=slug,
+ project__project_projectmember__member=request.user,
+ ).values("parent", "name", "color", "id", "project_id", "workspace__slug")
+ return Response(labels, status=status.HTTP_200_OK)
class WorkspaceMembersEndpoint(BaseAPIView):
@@ -1478,19 +1252,12 @@ class WorkspaceMembersEndpoint(BaseAPIView):
]
def get(self, request, slug):
- try:
- workspace_members = WorkspaceMember.objects.filter(
- workspace__slug=slug,
- member__is_bot=False,
- ).select_related("workspace", "member")
- serialzier = WorkSpaceMemberSerializer(workspace_members, many=True)
- return Response(serialzier.data, status=status.HTTP_200_OK)
- except Exception as e:
- capture_exception(e)
- return Response(
- {"error": "Something went wrong please try again later"},
- status=status.HTTP_400_BAD_REQUEST,
- )
+ workspace_members = WorkspaceMember.objects.filter(
+ workspace__slug=slug,
+ member__is_bot=False,
+ ).select_related("workspace", "member")
+ serialzier = WorkSpaceMemberSerializer(workspace_members, many=True)
+ return Response(serialzier.data, status=status.HTTP_200_OK)
class LeaveWorkspaceEndpoint(BaseAPIView):
@@ -1499,36 +1266,22 @@ class LeaveWorkspaceEndpoint(BaseAPIView):
]
def delete(self, request, slug):
- try:
- workspace_member = WorkspaceMember.objects.get(
- workspace__slug=slug, member=request.user
- )
+ workspace_member = WorkspaceMember.objects.get(
+ workspace__slug=slug, member=request.user
+ )
- # Only Admin case
- if (
- workspace_member.role == 20
- and WorkspaceMember.objects.filter(
- workspace__slug=slug, role=20
- ).count()
- == 1
- ):
- return Response(
- {
- "error": "You cannot leave the workspace since you are the only admin of the workspace you should delete the workspace"
- },
- status=status.HTTP_400_BAD_REQUEST,
- )
- # Delete the member from workspace
- workspace_member.delete()
- return Response(status=status.HTTP_204_NO_CONTENT)
- except WorkspaceMember.DoesNotExist:
+ # Only Admin case
+ if (
+ workspace_member.role == 20
+ and WorkspaceMember.objects.filter(workspace__slug=slug, role=20).count()
+ == 1
+ ):
return Response(
- {"error": "Workspace member does not exists"},
- status=status.HTTP_400_BAD_REQUEST,
- )
- except Exception as e:
- capture_exception(e)
- return Response(
- {"error": "Something went wrong please try again later"},
+ {
+ "error": "You cannot leave the workspace since you are the only admin of the workspace you should delete the workspace"
+ },
status=status.HTTP_400_BAD_REQUEST,
)
+ # Delete the member from workspace
+ workspace_member.delete()
+ return Response(status=status.HTTP_204_NO_CONTENT)