refactor: exception handling (#2454)

* chore: implemented global exception handler

* dev: remove something went wrong

* chore: exception handling cleanup

* chore: changed the status code

* chore: added status 500 internal server error

---------

Co-authored-by: pablohashescobar <nikhilschacko@gmail.com>
This commit is contained in:
Bavisetti Narayan 2023-10-17 14:38:06 +05:30 committed by GitHub
parent 46d34263f0
commit 0b79f8687e
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
28 changed files with 5866 additions and 7889 deletions

View File

@ -1,6 +1,3 @@
# Django imports
from django.db import IntegrityError
# Third party imports # Third party imports
from rest_framework import serializers from rest_framework import serializers

View File

@ -23,166 +23,156 @@ class AnalyticsEndpoint(BaseAPIView):
] ]
def get(self, request, slug): def get(self, request, slug):
try: x_axis = request.GET.get("x_axis", False)
x_axis = request.GET.get("x_axis", False) y_axis = request.GET.get("y_axis", False)
y_axis = request.GET.get("y_axis", False) segment = request.GET.get("segment", False)
segment = request.GET.get("segment", False)
valid_xaxis_segment = [ valid_xaxis_segment = [
"state_id", "state_id",
"state__group", "state__group",
"labels__id", "labels__id",
"assignees__id", "assignees__id",
"estimate_point", "estimate_point",
"issue_cycle__cycle_id", "issue_cycle__cycle_id",
"issue_module__module_id", "issue_module__module_id",
"priority", "priority",
"start_date", "start_date",
"target_date", "target_date",
"created_at", "created_at",
"completed_at", "completed_at",
] ]
valid_yaxis = [ valid_yaxis = [
"issue_count", "issue_count",
"estimate", "estimate",
] ]
# Check for x-axis and y-axis as thery are required parameters
if (
not x_axis
or not y_axis
or not x_axis in valid_xaxis_segment
or not y_axis in valid_yaxis
):
return Response(
{
"error": "x-axis and y-axis dimensions are required and the values should be valid"
},
status=status.HTTP_400_BAD_REQUEST,
)
# If segment is present it cannot be same as x-axis
if segment and (segment not in valid_xaxis_segment or x_axis == segment):
return Response(
{
"error": "Both segment and x axis cannot be same and segment should be valid"
},
status=status.HTTP_400_BAD_REQUEST,
)
# Additional filters that need to be applied
filters = issue_filters(request.GET, "GET")
# Get the issues for the workspace with the additional filters applied
queryset = Issue.issue_objects.filter(workspace__slug=slug, **filters)
# Get the total issue count
total_issues = queryset.count()
# Build the graph payload
distribution = build_graph_plot(
queryset=queryset, x_axis=x_axis, y_axis=y_axis, segment=segment
)
state_details = {}
if x_axis in ["state_id"] or segment in ["state_id"]:
state_details = (
Issue.issue_objects.filter(
workspace__slug=slug,
**filters,
)
.distinct("state_id")
.order_by("state_id")
.values("state_id", "state__name", "state__color")
)
label_details = {}
if x_axis in ["labels__id"] or segment in ["labels__id"]:
label_details = (
Issue.objects.filter(
workspace__slug=slug, **filters, labels__id__isnull=False
)
.distinct("labels__id")
.order_by("labels__id")
.values("labels__id", "labels__color", "labels__name")
)
assignee_details = {}
if x_axis in ["assignees__id"] or segment in ["assignees__id"]:
assignee_details = (
Issue.issue_objects.filter(
workspace__slug=slug, **filters, assignees__avatar__isnull=False
)
.order_by("assignees__id")
.distinct("assignees__id")
.values(
"assignees__avatar",
"assignees__display_name",
"assignees__first_name",
"assignees__last_name",
"assignees__id",
)
)
cycle_details = {}
if x_axis in ["issue_cycle__cycle_id"] or segment in [
"issue_cycle__cycle_id"
]:
cycle_details = (
Issue.issue_objects.filter(
workspace__slug=slug,
**filters,
issue_cycle__cycle_id__isnull=False,
)
.distinct("issue_cycle__cycle_id")
.order_by("issue_cycle__cycle_id")
.values(
"issue_cycle__cycle_id",
"issue_cycle__cycle__name",
)
)
module_details = {}
if x_axis in ["issue_module__module_id"] or segment in [
"issue_module__module_id"
]:
module_details = (
Issue.issue_objects.filter(
workspace__slug=slug,
**filters,
issue_module__module_id__isnull=False,
)
.distinct("issue_module__module_id")
.order_by("issue_module__module_id")
.values(
"issue_module__module_id",
"issue_module__module__name",
)
)
# Check for x-axis and y-axis as thery are required parameters
if (
not x_axis
or not y_axis
or not x_axis in valid_xaxis_segment
or not y_axis in valid_yaxis
):
return Response( return Response(
{ {
"total": total_issues, "error": "x-axis and y-axis dimensions are required and the values should be valid"
"distribution": distribution,
"extras": {
"state_details": state_details,
"assignee_details": assignee_details,
"label_details": label_details,
"cycle_details": cycle_details,
"module_details": module_details,
},
}, },
status=status.HTTP_200_OK,
)
except Exception as e:
capture_exception(e)
return Response(
{"error": "Something went wrong please try again later"},
status=status.HTTP_400_BAD_REQUEST, status=status.HTTP_400_BAD_REQUEST,
) )
# If segment is present it cannot be same as x-axis
if segment and (segment not in valid_xaxis_segment or x_axis == segment):
return Response(
{
"error": "Both segment and x axis cannot be same and segment should be valid"
},
status=status.HTTP_400_BAD_REQUEST,
)
# Additional filters that need to be applied
filters = issue_filters(request.GET, "GET")
# Get the issues for the workspace with the additional filters applied
queryset = Issue.issue_objects.filter(workspace__slug=slug, **filters)
# Get the total issue count
total_issues = queryset.count()
# Build the graph payload
distribution = build_graph_plot(
queryset=queryset, x_axis=x_axis, y_axis=y_axis, segment=segment
)
state_details = {}
if x_axis in ["state_id"] or segment in ["state_id"]:
state_details = (
Issue.issue_objects.filter(
workspace__slug=slug,
**filters,
)
.distinct("state_id")
.order_by("state_id")
.values("state_id", "state__name", "state__color")
)
label_details = {}
if x_axis in ["labels__id"] or segment in ["labels__id"]:
label_details = (
Issue.objects.filter(
workspace__slug=slug, **filters, labels__id__isnull=False
)
.distinct("labels__id")
.order_by("labels__id")
.values("labels__id", "labels__color", "labels__name")
)
assignee_details = {}
if x_axis in ["assignees__id"] or segment in ["assignees__id"]:
assignee_details = (
Issue.issue_objects.filter(
workspace__slug=slug, **filters, assignees__avatar__isnull=False
)
.order_by("assignees__id")
.distinct("assignees__id")
.values(
"assignees__avatar",
"assignees__display_name",
"assignees__first_name",
"assignees__last_name",
"assignees__id",
)
)
cycle_details = {}
if x_axis in ["issue_cycle__cycle_id"] or segment in ["issue_cycle__cycle_id"]:
cycle_details = (
Issue.issue_objects.filter(
workspace__slug=slug,
**filters,
issue_cycle__cycle_id__isnull=False,
)
.distinct("issue_cycle__cycle_id")
.order_by("issue_cycle__cycle_id")
.values(
"issue_cycle__cycle_id",
"issue_cycle__cycle__name",
)
)
module_details = {}
if x_axis in ["issue_module__module_id"] or segment in [
"issue_module__module_id"
]:
module_details = (
Issue.issue_objects.filter(
workspace__slug=slug,
**filters,
issue_module__module_id__isnull=False,
)
.distinct("issue_module__module_id")
.order_by("issue_module__module_id")
.values(
"issue_module__module_id",
"issue_module__module__name",
)
)
return Response(
{
"total": total_issues,
"distribution": distribution,
"extras": {
"state_details": state_details,
"assignee_details": assignee_details,
"label_details": label_details,
"cycle_details": cycle_details,
"module_details": module_details,
},
},
status=status.HTTP_200_OK,
)
class AnalyticViewViewset(BaseViewSet): class AnalyticViewViewset(BaseViewSet):
permission_classes = [ permission_classes = [
@ -207,45 +197,30 @@ class SavedAnalyticEndpoint(BaseAPIView):
] ]
def get(self, request, slug, analytic_id): def get(self, request, slug, analytic_id):
try: analytic_view = AnalyticView.objects.get(pk=analytic_id, workspace__slug=slug)
analytic_view = AnalyticView.objects.get(
pk=analytic_id, workspace__slug=slug
)
filter = analytic_view.query filter = analytic_view.query
queryset = Issue.issue_objects.filter(**filter) queryset = Issue.issue_objects.filter(**filter)
x_axis = analytic_view.query_dict.get("x_axis", False) x_axis = analytic_view.query_dict.get("x_axis", False)
y_axis = analytic_view.query_dict.get("y_axis", False) y_axis = analytic_view.query_dict.get("y_axis", False)
if not x_axis or not y_axis: if not x_axis or not y_axis:
return Response(
{"error": "x-axis and y-axis dimensions are required"},
status=status.HTTP_400_BAD_REQUEST,
)
segment = request.GET.get("segment", False)
distribution = build_graph_plot(
queryset=queryset, x_axis=x_axis, y_axis=y_axis, segment=segment
)
total_issues = queryset.count()
return Response( return Response(
{"total": total_issues, "distribution": distribution}, {"error": "x-axis and y-axis dimensions are required"},
status=status.HTTP_200_OK,
)
except AnalyticView.DoesNotExist:
return Response(
{"error": "Analytic View Does not exist"},
status=status.HTTP_400_BAD_REQUEST,
)
except Exception as e:
capture_exception(e)
return Response(
{"error": "Something went wrong please try again later"},
status=status.HTTP_400_BAD_REQUEST, status=status.HTTP_400_BAD_REQUEST,
) )
segment = request.GET.get("segment", False)
distribution = build_graph_plot(
queryset=queryset, x_axis=x_axis, y_axis=y_axis, segment=segment
)
total_issues = queryset.count()
return Response(
{"total": total_issues, "distribution": distribution},
status=status.HTTP_200_OK,
)
class ExportAnalyticsEndpoint(BaseAPIView): class ExportAnalyticsEndpoint(BaseAPIView):
permission_classes = [ permission_classes = [
@ -253,73 +228,64 @@ class ExportAnalyticsEndpoint(BaseAPIView):
] ]
def post(self, request, slug): def post(self, request, slug):
try: x_axis = request.data.get("x_axis", False)
x_axis = request.data.get("x_axis", False) y_axis = request.data.get("y_axis", False)
y_axis = request.data.get("y_axis", False) segment = request.data.get("segment", False)
segment = request.data.get("segment", False)
valid_xaxis_segment = [
"state_id",
"state__group",
"labels__id",
"assignees__id",
"estimate_point",
"issue_cycle__cycle_id",
"issue_module__module_id",
"priority",
"start_date",
"target_date",
"created_at",
"completed_at",
]
valid_xaxis_segment = [ valid_yaxis = [
"state_id", "issue_count",
"state__group", "estimate",
"labels__id", ]
"assignees__id",
"estimate_point",
"issue_cycle__cycle_id",
"issue_module__module_id",
"priority",
"start_date",
"target_date",
"created_at",
"completed_at",
]
valid_yaxis = [
"issue_count",
"estimate",
]
# Check for x-axis and y-axis as thery are required parameters
if (
not x_axis
or not y_axis
or not x_axis in valid_xaxis_segment
or not y_axis in valid_yaxis
):
return Response(
{
"error": "x-axis and y-axis dimensions are required and the values should be valid"
},
status=status.HTTP_400_BAD_REQUEST,
)
# If segment is present it cannot be same as x-axis
if segment and (segment not in valid_xaxis_segment or x_axis == segment):
return Response(
{
"error": "Both segment and x axis cannot be same and segment should be valid"
},
status=status.HTTP_400_BAD_REQUEST,
)
analytic_export_task.delay(
email=request.user.email, data=request.data, slug=slug
)
# Check for x-axis and y-axis as thery are required parameters
if (
not x_axis
or not y_axis
or not x_axis in valid_xaxis_segment
or not y_axis in valid_yaxis
):
return Response( return Response(
{ {
"message": f"Once the export is ready it will be emailed to you at {str(request.user.email)}" "error": "x-axis and y-axis dimensions are required and the values should be valid"
}, },
status=status.HTTP_200_OK,
)
except Exception as e:
capture_exception(e)
return Response(
{"error": "Something went wrong please try again later"},
status=status.HTTP_400_BAD_REQUEST, status=status.HTTP_400_BAD_REQUEST,
) )
# If segment is present it cannot be same as x-axis
if segment and (segment not in valid_xaxis_segment or x_axis == segment):
return Response(
{
"error": "Both segment and x axis cannot be same and segment should be valid"
},
status=status.HTTP_400_BAD_REQUEST,
)
analytic_export_task.delay(
email=request.user.email, data=request.data, slug=slug
)
return Response(
{
"message": f"Once the export is ready it will be emailed to you at {str(request.user.email)}"
},
status=status.HTTP_200_OK,
)
class DefaultAnalyticsEndpoint(BaseAPIView): class DefaultAnalyticsEndpoint(BaseAPIView):
permission_classes = [ permission_classes = [
@ -327,102 +293,92 @@ class DefaultAnalyticsEndpoint(BaseAPIView):
] ]
def get(self, request, slug): def get(self, request, slug):
try: filters = issue_filters(request.GET, "GET")
filters = issue_filters(request.GET, "GET") base_issues = Issue.issue_objects.filter(workspace__slug=slug, **filters)
base_issues = Issue.issue_objects.filter(workspace__slug=slug, **filters)
total_issues = base_issues.count() total_issues = base_issues.count()
state_groups = base_issues.annotate(state_group=F("state__group")) state_groups = base_issues.annotate(state_group=F("state__group"))
total_issues_classified = ( total_issues_classified = (
state_groups.values("state_group") state_groups.values("state_group")
.annotate(state_count=Count("state_group")) .annotate(state_count=Count("state_group"))
.order_by("state_group") .order_by("state_group")
) )
open_issues_groups = ["backlog", "unstarted", "started"] open_issues_groups = ["backlog", "unstarted", "started"]
open_issues_queryset = state_groups.filter( open_issues_queryset = state_groups.filter(state__group__in=open_issues_groups)
state__group__in=open_issues_groups
)
open_issues = open_issues_queryset.count() open_issues = open_issues_queryset.count()
open_issues_classified = ( open_issues_classified = (
open_issues_queryset.values("state_group") open_issues_queryset.values("state_group")
.annotate(state_count=Count("state_group")) .annotate(state_count=Count("state_group"))
.order_by("state_group") .order_by("state_group")
) )
issue_completed_month_wise = ( issue_completed_month_wise = (
base_issues.filter(completed_at__isnull=False) base_issues.filter(completed_at__isnull=False)
.annotate(month=ExtractMonth("completed_at")) .annotate(month=ExtractMonth("completed_at"))
.values("month") .values("month")
.annotate(count=Count("*")) .annotate(count=Count("*"))
.order_by("month") .order_by("month")
) )
user_details = [ user_details = [
"created_by__first_name", "created_by__first_name",
"created_by__last_name", "created_by__last_name",
"created_by__avatar", "created_by__avatar",
"created_by__display_name", "created_by__display_name",
"created_by__id", "created_by__id",
] ]
most_issue_created_user = ( most_issue_created_user = (
base_issues.exclude(created_by=None) base_issues.exclude(created_by=None)
.values(*user_details) .values(*user_details)
.annotate(count=Count("id")) .annotate(count=Count("id"))
.order_by("-count")[:5] .order_by("-count")[:5]
) )
user_assignee_details = [ user_assignee_details = [
"assignees__first_name", "assignees__first_name",
"assignees__last_name", "assignees__last_name",
"assignees__avatar", "assignees__avatar",
"assignees__display_name", "assignees__display_name",
"assignees__id", "assignees__id",
] ]
most_issue_closed_user = ( most_issue_closed_user = (
base_issues.filter(completed_at__isnull=False) base_issues.filter(completed_at__isnull=False)
.exclude(assignees=None) .exclude(assignees=None)
.values(*user_assignee_details) .values(*user_assignee_details)
.annotate(count=Count("id")) .annotate(count=Count("id"))
.order_by("-count")[:5] .order_by("-count")[:5]
) )
pending_issue_user = ( pending_issue_user = (
base_issues.filter(completed_at__isnull=True) base_issues.filter(completed_at__isnull=True)
.values(*user_assignee_details) .values(*user_assignee_details)
.annotate(count=Count("id")) .annotate(count=Count("id"))
.order_by("-count") .order_by("-count")
) )
open_estimate_sum = open_issues_queryset.aggregate( open_estimate_sum = open_issues_queryset.aggregate(sum=Sum("estimate_point"))[
sum=Sum("estimate_point") "sum"
)["sum"] ]
total_estimate_sum = base_issues.aggregate(sum=Sum("estimate_point"))["sum"] total_estimate_sum = base_issues.aggregate(sum=Sum("estimate_point"))["sum"]
return Response( return Response(
{ {
"total_issues": total_issues, "total_issues": total_issues,
"total_issues_classified": total_issues_classified, "total_issues_classified": total_issues_classified,
"open_issues": open_issues, "open_issues": open_issues,
"open_issues_classified": open_issues_classified, "open_issues_classified": open_issues_classified,
"issue_completed_month_wise": issue_completed_month_wise, "issue_completed_month_wise": issue_completed_month_wise,
"most_issue_created_user": most_issue_created_user, "most_issue_created_user": most_issue_created_user,
"most_issue_closed_user": most_issue_closed_user, "most_issue_closed_user": most_issue_closed_user,
"pending_issue_user": pending_issue_user, "pending_issue_user": pending_issue_user,
"open_estimate_sum": open_estimate_sum, "open_estimate_sum": open_estimate_sum,
"total_estimate_sum": total_estimate_sum, "total_estimate_sum": total_estimate_sum,
}, },
status=status.HTTP_200_OK, status=status.HTTP_200_OK,
) )
except Exception as e:
capture_exception(e)
return Response(
{"error": "Something went wrong. Please try again later."},
status=status.HTTP_400_BAD_REQUEST,
)

View File

@ -14,57 +14,34 @@ from plane.api.serializers import APITokenSerializer
class ApiTokenEndpoint(BaseAPIView): class ApiTokenEndpoint(BaseAPIView):
def post(self, request): def post(self, request):
try: label = request.data.get("label", str(uuid4().hex))
label = request.data.get("label", str(uuid4().hex)) workspace = request.data.get("workspace", False)
workspace = request.data.get("workspace", False)
if not workspace: if not workspace:
return Response(
{"error": "Workspace is required"}, status=status.HTTP_200_OK
)
api_token = APIToken.objects.create(
label=label, user=request.user, workspace_id=workspace
)
serializer = APITokenSerializer(api_token)
# Token will be only vissible while creating
return Response( return Response(
{"api_token": serializer.data, "token": api_token.token}, {"error": "Workspace is required"}, status=status.HTTP_200_OK
status=status.HTTP_201_CREATED,
) )
except Exception as e: api_token = APIToken.objects.create(
capture_exception(e) label=label, user=request.user, workspace_id=workspace
return Response( )
{"error": "Something went wrong please try again later"},
status=status.HTTP_400_BAD_REQUEST, serializer = APITokenSerializer(api_token)
) # Token will be only vissible while creating
return Response(
{"api_token": serializer.data, "token": api_token.token},
status=status.HTTP_201_CREATED,
)
def get(self, request): def get(self, request):
try: api_tokens = APIToken.objects.filter(user=request.user)
api_tokens = APIToken.objects.filter(user=request.user) serializer = APITokenSerializer(api_tokens, many=True)
serializer = APITokenSerializer(api_tokens, many=True) return Response(serializer.data, status=status.HTTP_200_OK)
return Response(serializer.data, status=status.HTTP_200_OK)
except Exception as e:
capture_exception(e)
return Response(
{"error": "Something went wrong please try again later"},
status=status.HTTP_400_BAD_REQUEST,
)
def delete(self, request, pk): def delete(self, request, pk):
try: api_token = APIToken.objects.get(pk=pk)
api_token = APIToken.objects.get(pk=pk) api_token.delete()
api_token.delete() return Response(status=status.HTTP_204_NO_CONTENT)
return Response(status=status.HTTP_204_NO_CONTENT)
except APIToken.DoesNotExist:
return Response(
{"error": "Token does not exists"}, status=status.HTTP_400_BAD_REQUEST
)
except Exception as e:
capture_exception(e)
return Response(
{"error": "Something went wrong please try again later"},
status=status.HTTP_400_BAD_REQUEST,
)

View File

@ -18,108 +18,58 @@ class FileAssetEndpoint(BaseAPIView):
""" """
def get(self, request, workspace_id, asset_key): def get(self, request, workspace_id, asset_key):
try: asset_key = str(workspace_id) + "/" + asset_key
asset_key = str(workspace_id) + "/" + asset_key files = FileAsset.objects.filter(asset=asset_key)
files = FileAsset.objects.filter(asset=asset_key) if files.exists():
if files.exists(): serializer = FileAssetSerializer(files, context={"request": request}, many=True)
serializer = FileAssetSerializer(files, context={"request": request}, many=True) return Response({"data": serializer.data, "status": True}, status=status.HTTP_200_OK)
return Response({"data": serializer.data, "status": True}, status=status.HTTP_200_OK) else:
else: return Response({"error": "Asset key does not exist", "status": False}, status=status.HTTP_200_OK)
return Response({"error": "Asset key does not exist", "status": False}, status=status.HTTP_200_OK)
except Exception as e:
capture_exception(e)
return Response(
{"error": "Something went wrong please try again later"},
status=status.HTTP_400_BAD_REQUEST,
)
def post(self, request, slug): def post(self, request, slug):
try: serializer = FileAssetSerializer(data=request.data)
serializer = FileAssetSerializer(data=request.data) if serializer.is_valid():
if serializer.is_valid(): # Get the workspace
# Get the workspace workspace = Workspace.objects.get(slug=slug)
workspace = Workspace.objects.get(slug=slug) serializer.save(workspace_id=workspace.id)
serializer.save(workspace_id=workspace.id) return Response(serializer.data, status=status.HTTP_201_CREATED)
return Response(serializer.data, status=status.HTTP_201_CREATED) return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
except Workspace.DoesNotExist:
return Response({"error": "Workspace does not exist"}, status=status.HTTP_400_BAD_REQUEST)
except Exception as e:
capture_exception(e)
return Response(
{"error": "Something went wrong please try again later"},
status=status.HTTP_400_BAD_REQUEST,
)
def delete(self, request, workspace_id, asset_key): def delete(self, request, workspace_id, asset_key):
try: asset_key = str(workspace_id) + "/" + asset_key
asset_key = str(workspace_id) + "/" + asset_key file_asset = FileAsset.objects.get(asset=asset_key)
file_asset = FileAsset.objects.get(asset=asset_key) # Delete the file from storage
# Delete the file from storage file_asset.asset.delete(save=False)
file_asset.asset.delete(save=False) # Delete the file object
# Delete the file object file_asset.delete()
file_asset.delete() return Response(status=status.HTTP_204_NO_CONTENT)
return Response(status=status.HTTP_204_NO_CONTENT)
except FileAsset.DoesNotExist:
return Response(
{"error": "File Asset doesn't exist"}, status=status.HTTP_404_NOT_FOUND
)
except Exception as e:
capture_exception(e)
return Response(
{"error": "Something went wrong please try again later"},
status=status.HTTP_400_BAD_REQUEST,
)
class UserAssetsEndpoint(BaseAPIView): class UserAssetsEndpoint(BaseAPIView):
parser_classes = (MultiPartParser, FormParser) parser_classes = (MultiPartParser, FormParser)
def get(self, request, asset_key): def get(self, request, asset_key):
try:
files = FileAsset.objects.filter(asset=asset_key, created_by=request.user) files = FileAsset.objects.filter(asset=asset_key, created_by=request.user)
if files.exists(): if files.exists():
serializer = FileAssetSerializer(files, context={"request": request}) serializer = FileAssetSerializer(files, context={"request": request})
return Response({"data": serializer.data, "status": True}, status=status.HTTP_200_OK) return Response({"data": serializer.data, "status": True}, status=status.HTTP_200_OK)
else: else:
return Response({"error": "Asset key does not exist", "status": False}, status=status.HTTP_200_OK) return Response({"error": "Asset key does not exist", "status": False}, status=status.HTTP_200_OK)
except Exception as e:
capture_exception(e)
return Response(
{"error": "Something went wrong please try again later"},
status=status.HTTP_400_BAD_REQUEST,
)
def post(self, request): def post(self, request):
try:
serializer = FileAssetSerializer(data=request.data) serializer = FileAssetSerializer(data=request.data)
if serializer.is_valid(): if serializer.is_valid():
serializer.save() serializer.save()
return Response(serializer.data, status=status.HTTP_201_CREATED) return Response(serializer.data, status=status.HTTP_201_CREATED)
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
except Exception as e:
capture_exception(e)
return Response(
{"error": "Something went wrong please try again later"},
status=status.HTTP_400_BAD_REQUEST,
)
def delete(self, request, asset_key): def delete(self, request, asset_key):
try:
file_asset = FileAsset.objects.get(asset=asset_key, created_by=request.user) file_asset = FileAsset.objects.get(asset=asset_key, created_by=request.user)
# Delete the file from storage # Delete the file from storage
file_asset.asset.delete(save=False) file_asset.asset.delete(save=False)
# Delete the file object # Delete the file object
file_asset.delete() file_asset.delete()
return Response(status=status.HTTP_204_NO_CONTENT) return Response(status=status.HTTP_204_NO_CONTENT)
except FileAsset.DoesNotExist:
return Response(
{"error": "File Asset doesn't exist"}, status=status.HTTP_404_NOT_FOUND
)
except Exception as e:
capture_exception(e)
return Response(
{"error": "Something went wrong please try again later"},
status=status.HTTP_400_BAD_REQUEST,
)

View File

@ -127,32 +127,25 @@ class ResetPasswordEndpoint(BaseAPIView):
class ChangePasswordEndpoint(BaseAPIView): class ChangePasswordEndpoint(BaseAPIView):
def post(self, request): def post(self, request):
try: serializer = ChangePasswordSerializer(data=request.data)
serializer = ChangePasswordSerializer(data=request.data)
user = User.objects.get(pk=request.user.id) user = User.objects.get(pk=request.user.id)
if serializer.is_valid(): if serializer.is_valid():
# Check old password # Check old password
if not user.object.check_password(serializer.data.get("old_password")): if not user.object.check_password(serializer.data.get("old_password")):
return Response( return Response(
{"old_password": ["Wrong password."]}, {"old_password": ["Wrong password."]},
status=status.HTTP_400_BAD_REQUEST, status=status.HTTP_400_BAD_REQUEST,
) )
# set_password also hashes the password that the user will get # set_password also hashes the password that the user will get
self.object.set_password(serializer.data.get("new_password")) self.object.set_password(serializer.data.get("new_password"))
self.object.save() self.object.save()
response = { response = {
"status": "success", "status": "success",
"code": status.HTTP_200_OK, "code": status.HTTP_200_OK,
"message": "Password updated successfully", "message": "Password updated successfully",
} }
return Response(response) return Response(response)
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
except Exception as e:
capture_exception(e)
return Response(
{"error": "Something went wrong please try again later"},
status=status.HTTP_400_BAD_REQUEST,
)

View File

@ -40,223 +40,194 @@ class SignUpEndpoint(BaseAPIView):
permission_classes = (AllowAny,) permission_classes = (AllowAny,)
def post(self, request): def post(self, request):
try: if not settings.ENABLE_SIGNUP:
if not settings.ENABLE_SIGNUP:
return Response(
{
"error": "New account creation is disabled. Please contact your site administrator"
},
status=status.HTTP_400_BAD_REQUEST,
)
email = request.data.get("email", False)
password = request.data.get("password", False)
## Raise exception if any of the above are missing
if not email or not password:
return Response(
{"error": "Both email and password are required"},
status=status.HTTP_400_BAD_REQUEST,
)
email = email.strip().lower()
try:
validate_email(email)
except ValidationError as e:
return Response(
{"error": "Please provide a valid email address."},
status=status.HTTP_400_BAD_REQUEST,
)
# Check if the user already exists
if User.objects.filter(email=email).exists():
return Response(
{"error": "User with this email already exists"},
status=status.HTTP_400_BAD_REQUEST,
)
user = User.objects.create(email=email, username=uuid.uuid4().hex)
user.set_password(password)
# settings last actives for the user
user.last_active = timezone.now()
user.last_login_time = timezone.now()
user.last_login_ip = request.META.get("REMOTE_ADDR")
user.last_login_uagent = request.META.get("HTTP_USER_AGENT")
user.token_updated_at = timezone.now()
user.save()
access_token, refresh_token = get_tokens_for_user(user)
data = {
"access_token": access_token,
"refresh_token": refresh_token,
}
# Send Analytics
if settings.ANALYTICS_BASE_API:
_ = requests.post(
settings.ANALYTICS_BASE_API,
headers={
"Content-Type": "application/json",
"X-Auth-Token": settings.ANALYTICS_SECRET_KEY,
},
json={
"event_id": uuid.uuid4().hex,
"event_data": {
"medium": "email",
},
"user": {"email": email, "id": str(user.id)},
"device_ctx": {
"ip": request.META.get("REMOTE_ADDR"),
"user_agent": request.META.get("HTTP_USER_AGENT"),
},
"event_type": "SIGN_UP",
},
)
return Response(data, status=status.HTTP_200_OK)
except Exception as e:
capture_exception(e)
return Response( return Response(
{"error": "Something went wrong please try again later"}, {
"error": "New account creation is disabled. Please contact your site administrator"
},
status=status.HTTP_400_BAD_REQUEST, status=status.HTTP_400_BAD_REQUEST,
) )
email = request.data.get("email", False)
password = request.data.get("password", False)
## Raise exception if any of the above are missing
if not email or not password:
return Response(
{"error": "Both email and password are required"},
status=status.HTTP_400_BAD_REQUEST,
)
email = email.strip().lower()
try:
validate_email(email)
except ValidationError as e:
return Response(
{"error": "Please provide a valid email address."},
status=status.HTTP_400_BAD_REQUEST,
)
# Check if the user already exists
if User.objects.filter(email=email).exists():
return Response(
{"error": "User with this email already exists"},
status=status.HTTP_400_BAD_REQUEST,
)
user = User.objects.create(email=email, username=uuid.uuid4().hex)
user.set_password(password)
# settings last actives for the user
user.last_active = timezone.now()
user.last_login_time = timezone.now()
user.last_login_ip = request.META.get("REMOTE_ADDR")
user.last_login_uagent = request.META.get("HTTP_USER_AGENT")
user.token_updated_at = timezone.now()
user.save()
access_token, refresh_token = get_tokens_for_user(user)
data = {
"access_token": access_token,
"refresh_token": refresh_token,
}
# Send Analytics
if settings.ANALYTICS_BASE_API:
_ = requests.post(
settings.ANALYTICS_BASE_API,
headers={
"Content-Type": "application/json",
"X-Auth-Token": settings.ANALYTICS_SECRET_KEY,
},
json={
"event_id": uuid.uuid4().hex,
"event_data": {
"medium": "email",
},
"user": {"email": email, "id": str(user.id)},
"device_ctx": {
"ip": request.META.get("REMOTE_ADDR"),
"user_agent": request.META.get("HTTP_USER_AGENT"),
},
"event_type": "SIGN_UP",
},
)
return Response(data, status=status.HTTP_200_OK)
class SignInEndpoint(BaseAPIView): class SignInEndpoint(BaseAPIView):
permission_classes = (AllowAny,) permission_classes = (AllowAny,)
def post(self, request): def post(self, request):
try: email = request.data.get("email", False)
email = request.data.get("email", False) password = request.data.get("password", False)
password = request.data.get("password", False)
## Raise exception if any of the above are missing ## Raise exception if any of the above are missing
if not email or not password: if not email or not password:
return Response(
{"error": "Both email and password are required"},
status=status.HTTP_400_BAD_REQUEST,
)
email = email.strip().lower()
try:
validate_email(email)
except ValidationError as e:
return Response(
{"error": "Please provide a valid email address."},
status=status.HTTP_400_BAD_REQUEST,
)
user = User.objects.filter(email=email).first()
if user is None:
return Response(
{
"error": "Sorry, we could not find a user with the provided credentials. Please try again."
},
status=status.HTTP_403_FORBIDDEN,
)
# Sign up Process
if not user.check_password(password):
return Response(
{
"error": "Sorry, we could not find a user with the provided credentials. Please try again."
},
status=status.HTTP_403_FORBIDDEN,
)
if not user.is_active:
return Response(
{
"error": "Your account has been deactivated. Please contact your site administrator."
},
status=status.HTTP_403_FORBIDDEN,
)
# settings last active for the user
user.last_active = timezone.now()
user.last_login_time = timezone.now()
user.last_login_ip = request.META.get("REMOTE_ADDR")
user.last_login_uagent = request.META.get("HTTP_USER_AGENT")
user.token_updated_at = timezone.now()
user.save()
access_token, refresh_token = get_tokens_for_user(user)
# Send Analytics
if settings.ANALYTICS_BASE_API:
_ = requests.post(
settings.ANALYTICS_BASE_API,
headers={
"Content-Type": "application/json",
"X-Auth-Token": settings.ANALYTICS_SECRET_KEY,
},
json={
"event_id": uuid.uuid4().hex,
"event_data": {
"medium": "email",
},
"user": {"email": email, "id": str(user.id)},
"device_ctx": {
"ip": request.META.get("REMOTE_ADDR"),
"user_agent": request.META.get("HTTP_USER_AGENT"),
},
"event_type": "SIGN_IN",
},
)
data = {
"access_token": access_token,
"refresh_token": refresh_token,
}
return Response(data, status=status.HTTP_200_OK)
except Exception as e:
capture_exception(e)
return Response( return Response(
{ {"error": "Both email and password are required"},
"error": "Something went wrong. Please try again later or contact the support team."
},
status=status.HTTP_400_BAD_REQUEST, status=status.HTTP_400_BAD_REQUEST,
) )
email = email.strip().lower()
try:
validate_email(email)
except ValidationError as e:
return Response(
{"error": "Please provide a valid email address."},
status=status.HTTP_400_BAD_REQUEST,
)
user = User.objects.filter(email=email).first()
if user is None:
return Response(
{
"error": "Sorry, we could not find a user with the provided credentials. Please try again."
},
status=status.HTTP_403_FORBIDDEN,
)
# Sign up Process
if not user.check_password(password):
return Response(
{
"error": "Sorry, we could not find a user with the provided credentials. Please try again."
},
status=status.HTTP_403_FORBIDDEN,
)
if not user.is_active:
return Response(
{
"error": "Your account has been deactivated. Please contact your site administrator."
},
status=status.HTTP_403_FORBIDDEN,
)
# settings last active for the user
user.last_active = timezone.now()
user.last_login_time = timezone.now()
user.last_login_ip = request.META.get("REMOTE_ADDR")
user.last_login_uagent = request.META.get("HTTP_USER_AGENT")
user.token_updated_at = timezone.now()
user.save()
access_token, refresh_token = get_tokens_for_user(user)
# Send Analytics
if settings.ANALYTICS_BASE_API:
_ = requests.post(
settings.ANALYTICS_BASE_API,
headers={
"Content-Type": "application/json",
"X-Auth-Token": settings.ANALYTICS_SECRET_KEY,
},
json={
"event_id": uuid.uuid4().hex,
"event_data": {
"medium": "email",
},
"user": {"email": email, "id": str(user.id)},
"device_ctx": {
"ip": request.META.get("REMOTE_ADDR"),
"user_agent": request.META.get("HTTP_USER_AGENT"),
},
"event_type": "SIGN_IN",
},
)
data = {
"access_token": access_token,
"refresh_token": refresh_token,
}
return Response(data, status=status.HTTP_200_OK)
class SignOutEndpoint(BaseAPIView): class SignOutEndpoint(BaseAPIView):
def post(self, request): def post(self, request):
try: refresh_token = request.data.get("refresh_token", False)
refresh_token = request.data.get("refresh_token", False)
if not refresh_token: if not refresh_token:
capture_message("No refresh token provided") capture_message("No refresh token provided")
return Response(
{
"error": "Something went wrong. Please try again later or contact the support team."
},
status=status.HTTP_400_BAD_REQUEST,
)
user = User.objects.get(pk=request.user.id)
user.last_logout_time = timezone.now()
user.last_logout_ip = request.META.get("REMOTE_ADDR")
user.save()
token = RefreshToken(refresh_token)
token.blacklist()
return Response({"message": "success"}, status=status.HTTP_200_OK)
except Exception as e:
capture_exception(e)
return Response( return Response(
{ {"error": "No refresh token provided"},
"error": "Something went wrong. Please try again later or contact the support team."
},
status=status.HTTP_400_BAD_REQUEST, status=status.HTTP_400_BAD_REQUEST,
) )
user = User.objects.get(pk=request.user.id)
user.last_logout_time = timezone.now()
user.last_logout_ip = request.META.get("REMOTE_ADDR")
user.save()
token = RefreshToken(refresh_token)
token.blacklist()
return Response({"message": "success"}, status=status.HTTP_200_OK)
class MagicSignInGenerateEndpoint(BaseAPIView): class MagicSignInGenerateEndpoint(BaseAPIView):
permission_classes = [ permission_classes = [
@ -264,74 +235,62 @@ class MagicSignInGenerateEndpoint(BaseAPIView):
] ]
def post(self, request): def post(self, request):
try: email = request.data.get("email", False)
email = request.data.get("email", False)
if not email: if not email:
return Response(
{"error": "Please provide a valid email address"},
status=status.HTTP_400_BAD_REQUEST,
)
# Clean up
email = email.strip().lower()
validate_email(email)
## Generate a random token
token = (
"".join(random.choices(string.ascii_lowercase + string.digits, k=4))
+ "-"
+ "".join(random.choices(string.ascii_lowercase + string.digits, k=4))
+ "-"
+ "".join(random.choices(string.ascii_lowercase + string.digits, k=4))
)
ri = redis_instance()
key = "magic_" + str(email)
# Check if the key already exists in python
if ri.exists(key):
data = json.loads(ri.get(key))
current_attempt = data["current_attempt"] + 1
if data["current_attempt"] > 2:
return Response( return Response(
{"error": "Please provide a valid email address"}, {"error": "Max attempts exhausted. Please try again later."},
status=status.HTTP_400_BAD_REQUEST, status=status.HTTP_400_BAD_REQUEST,
) )
# Clean up value = {
email = email.strip().lower() "current_attempt": current_attempt,
validate_email(email) "email": email,
"token": token,
}
expiry = 600
## Generate a random token ri.set(key, json.dumps(value), ex=expiry)
token = (
"".join(random.choices(string.ascii_lowercase + string.digits, k=4))
+ "-"
+ "".join(random.choices(string.ascii_lowercase + string.digits, k=4))
+ "-"
+ "".join(random.choices(string.ascii_lowercase + string.digits, k=4))
)
ri = redis_instance() else:
value = {"current_attempt": 0, "email": email, "token": token}
expiry = 600
key = "magic_" + str(email) ri.set(key, json.dumps(value), ex=expiry)
# Check if the key already exists in python current_site = settings.WEB_URL
if ri.exists(key): magic_link.delay(email, key, token, current_site)
data = json.loads(ri.get(key))
current_attempt = data["current_attempt"] + 1 return Response({"key": key}, status=status.HTTP_200_OK)
if data["current_attempt"] > 2:
return Response(
{"error": "Max attempts exhausted. Please try again later."},
status=status.HTTP_400_BAD_REQUEST,
)
value = {
"current_attempt": current_attempt,
"email": email,
"token": token,
}
expiry = 600
ri.set(key, json.dumps(value), ex=expiry)
else:
value = {"current_attempt": 0, "email": email, "token": token}
expiry = 600
ri.set(key, json.dumps(value), ex=expiry)
current_site = settings.WEB_URL
magic_link.delay(email, key, token, current_site)
return Response({"key": key}, status=status.HTTP_200_OK)
except ValidationError:
return Response(
{"error": "Please provide a valid email address."},
status=status.HTTP_400_BAD_REQUEST,
)
except Exception as e:
capture_exception(e)
return Response(
{"error": "Something went wrong please try again later"},
status=status.HTTP_400_BAD_REQUEST,
)
class MagicSignInEndpoint(BaseAPIView): class MagicSignInEndpoint(BaseAPIView):
@ -340,111 +299,99 @@ class MagicSignInEndpoint(BaseAPIView):
] ]
def post(self, request): def post(self, request):
try: user_token = request.data.get("token", "").strip()
user_token = request.data.get("token", "").strip() key = request.data.get("key", False).strip().lower()
key = request.data.get("key", False).strip().lower()
if not key or user_token == "": if not key or user_token == "":
return Response( return Response(
{"error": "User token and key are required"}, {"error": "User token and key are required"},
status=status.HTTP_400_BAD_REQUEST, status=status.HTTP_400_BAD_REQUEST,
) )
ri = redis_instance() ri = redis_instance()
if ri.exists(key): if ri.exists(key):
data = json.loads(ri.get(key)) data = json.loads(ri.get(key))
token = data["token"] token = data["token"]
email = data["email"] email = data["email"]
if str(token) == str(user_token): if str(token) == str(user_token):
if User.objects.filter(email=email).exists(): if User.objects.filter(email=email).exists():
user = User.objects.get(email=email) user = User.objects.get(email=email)
# Send event to Jitsu for tracking # Send event to Jitsu for tracking
if settings.ANALYTICS_BASE_API: if settings.ANALYTICS_BASE_API:
_ = requests.post( _ = requests.post(
settings.ANALYTICS_BASE_API, settings.ANALYTICS_BASE_API,
headers={ headers={
"Content-Type": "application/json", "Content-Type": "application/json",
"X-Auth-Token": settings.ANALYTICS_SECRET_KEY, "X-Auth-Token": settings.ANALYTICS_SECRET_KEY,
},
json={
"event_id": uuid.uuid4().hex,
"event_data": {
"medium": "code",
}, },
json={ "user": {"email": email, "id": str(user.id)},
"event_id": uuid.uuid4().hex, "device_ctx": {
"event_data": { "ip": request.META.get("REMOTE_ADDR"),
"medium": "code", "user_agent": request.META.get("HTTP_USER_AGENT"),
},
"user": {"email": email, "id": str(user.id)},
"device_ctx": {
"ip": request.META.get("REMOTE_ADDR"),
"user_agent": request.META.get(
"HTTP_USER_AGENT"
),
},
"event_type": "SIGN_IN",
}, },
) "event_type": "SIGN_IN",
else: },
user = User.objects.create(
email=email,
username=uuid.uuid4().hex,
password=make_password(uuid.uuid4().hex),
is_password_autoset=True,
) )
# Send event to Jitsu for tracking
if settings.ANALYTICS_BASE_API:
_ = requests.post(
settings.ANALYTICS_BASE_API,
headers={
"Content-Type": "application/json",
"X-Auth-Token": settings.ANALYTICS_SECRET_KEY,
},
json={
"event_id": uuid.uuid4().hex,
"event_data": {
"medium": "code",
},
"user": {"email": email, "id": str(user.id)},
"device_ctx": {
"ip": request.META.get("REMOTE_ADDR"),
"user_agent": request.META.get(
"HTTP_USER_AGENT"
),
},
"event_type": "SIGN_UP",
},
)
user.last_active = timezone.now()
user.last_login_time = timezone.now()
user.last_login_ip = request.META.get("REMOTE_ADDR")
user.last_login_uagent = request.META.get("HTTP_USER_AGENT")
user.token_updated_at = timezone.now()
user.save()
access_token, refresh_token = get_tokens_for_user(user)
data = {
"access_token": access_token,
"refresh_token": refresh_token,
}
return Response(data, status=status.HTTP_200_OK)
else: else:
return Response( user = User.objects.create(
{"error": "Your login code was incorrect. Please try again."}, email=email,
status=status.HTTP_400_BAD_REQUEST, username=uuid.uuid4().hex,
password=make_password(uuid.uuid4().hex),
is_password_autoset=True,
) )
# Send event to Jitsu for tracking
if settings.ANALYTICS_BASE_API:
_ = requests.post(
settings.ANALYTICS_BASE_API,
headers={
"Content-Type": "application/json",
"X-Auth-Token": settings.ANALYTICS_SECRET_KEY,
},
json={
"event_id": uuid.uuid4().hex,
"event_data": {
"medium": "code",
},
"user": {"email": email, "id": str(user.id)},
"device_ctx": {
"ip": request.META.get("REMOTE_ADDR"),
"user_agent": request.META.get("HTTP_USER_AGENT"),
},
"event_type": "SIGN_UP",
},
)
user.last_active = timezone.now()
user.last_login_time = timezone.now()
user.last_login_ip = request.META.get("REMOTE_ADDR")
user.last_login_uagent = request.META.get("HTTP_USER_AGENT")
user.token_updated_at = timezone.now()
user.save()
access_token, refresh_token = get_tokens_for_user(user)
data = {
"access_token": access_token,
"refresh_token": refresh_token,
}
return Response(data, status=status.HTTP_200_OK)
else: else:
return Response( return Response(
{"error": "The magic code/link has expired please try again"}, {"error": "Your login code was incorrect. Please try again."},
status=status.HTTP_400_BAD_REQUEST, status=status.HTTP_400_BAD_REQUEST,
) )
except Exception as e: else:
capture_exception(e)
return Response( return Response(
{"error": "Something went wrong please try again later"}, {"error": "The magic code/link has expired please try again"},
status=status.HTTP_400_BAD_REQUEST, status=status.HTTP_400_BAD_REQUEST,
) )

View File

@ -5,10 +5,14 @@ import zoneinfo
from django.urls import resolve from django.urls import resolve
from django.conf import settings from django.conf import settings
from django.utils import timezone from django.utils import timezone
# Third part imports from django.db import IntegrityError
from django.core.exceptions import ObjectDoesNotExist, ValidationError
# Third part imports
from rest_framework import status
from rest_framework import status from rest_framework import status
from rest_framework.viewsets import ModelViewSet from rest_framework.viewsets import ModelViewSet
from rest_framework.response import Response
from rest_framework.exceptions import APIException from rest_framework.exceptions import APIException
from rest_framework.views import APIView from rest_framework.views import APIView
from rest_framework.filters import SearchFilter from rest_framework.filters import SearchFilter
@ -33,8 +37,6 @@ class TimezoneMixin:
timezone.deactivate() timezone.deactivate()
class BaseViewSet(TimezoneMixin, ModelViewSet, BasePaginator): class BaseViewSet(TimezoneMixin, ModelViewSet, BasePaginator):
model = None model = None
@ -59,16 +61,48 @@ class BaseViewSet(TimezoneMixin, ModelViewSet, BasePaginator):
capture_exception(e) capture_exception(e)
raise APIException("Please check the view", status.HTTP_400_BAD_REQUEST) raise APIException("Please check the view", status.HTTP_400_BAD_REQUEST)
def handle_exception(self, exc):
"""
Handle any exception that occurs, by returning an appropriate response,
or re-raising the error.
"""
try:
response = super().handle_exception(exc)
return response
except Exception as e:
if isinstance(e, IntegrityError):
return Response({"error": "The payload is not valid"}, status=status.HTTP_400_BAD_REQUEST)
if isinstance(e, ValidationError):
return Response({"error": "Please provide valid detail"}, status=status.HTTP_400_BAD_REQUEST)
if isinstance(e, ObjectDoesNotExist):
model_name = str(exc).split(" matching query does not exist.")[0]
return Response({"error": f"{model_name} does not exist."}, status=status.HTTP_404_NOT_FOUND)
if isinstance(e, KeyError):
capture_exception(e)
return Response({"error": f"key {e} does not exist"}, status=status.HTTP_400_BAD_REQUEST)
capture_exception(e)
return Response({"error": "Something went wrong please try again later"}, status=status.HTTP_500_INTERNAL_SERVER_ERROR)
def dispatch(self, request, *args, **kwargs): def dispatch(self, request, *args, **kwargs):
response = super().dispatch(request, *args, **kwargs) try:
response = super().dispatch(request, *args, **kwargs)
if settings.DEBUG: if settings.DEBUG:
from django.db import connection from django.db import connection
print( print(
f"{request.method} - {request.get_full_path()} of Queries: {len(connection.queries)}" f"{request.method} - {request.get_full_path()} of Queries: {len(connection.queries)}"
) )
return response return response
except Exception as exc:
response = self.handle_exception(exc)
return exc
@property @property
def workspace_slug(self): def workspace_slug(self):
@ -104,16 +138,48 @@ class BaseAPIView(TimezoneMixin, APIView, BasePaginator):
queryset = backend().filter_queryset(self.request, queryset, self) queryset = backend().filter_queryset(self.request, queryset, self)
return queryset return queryset
def handle_exception(self, exc):
"""
Handle any exception that occurs, by returning an appropriate response,
or re-raising the error.
"""
try:
response = super().handle_exception(exc)
return response
except Exception as e:
if isinstance(e, IntegrityError):
return Response({"error": "The payload is not valid"}, status=status.HTTP_400_BAD_REQUEST)
if isinstance(e, ValidationError):
return Response({"error": "Please provide valid detail"}, status=status.HTTP_400_BAD_REQUEST)
if isinstance(e, ObjectDoesNotExist):
model_name = str(exc).split(" matching query does not exist.")[0]
return Response({"error": f"{model_name} does not exist."}, status=status.HTTP_404_NOT_FOUND)
if isinstance(e, KeyError):
return Response({"error": f"key {e} does not exist"}, status=status.HTTP_400_BAD_REQUEST)
capture_exception(e)
return Response({"error": "Something went wrong please try again later"}, status=status.HTTP_500_INTERNAL_SERVER_ERROR)
def dispatch(self, request, *args, **kwargs): def dispatch(self, request, *args, **kwargs):
response = super().dispatch(request, *args, **kwargs) try:
response = super().dispatch(request, *args, **kwargs)
if settings.DEBUG: if settings.DEBUG:
from django.db import connection from django.db import connection
print( print(
f"{request.method} - {request.get_full_path()} of Queries: {len(connection.queries)}" f"{request.method} - {request.get_full_path()} of Queries: {len(connection.queries)}"
) )
return response return response
except Exception as exc:
response = self.handle_exception(exc)
return exc
@property @property
def workspace_slug(self): def workspace_slug(self):

View File

@ -20,21 +20,14 @@ class ConfigurationEndpoint(BaseAPIView):
] ]
def get(self, request): def get(self, request):
try: data = {}
data = {} data["google"] = os.environ.get("GOOGLE_CLIENT_ID", None)
data["google"] = os.environ.get("GOOGLE_CLIENT_ID", None) data["github"] = os.environ.get("GITHUB_CLIENT_ID", None)
data["github"] = os.environ.get("GITHUB_CLIENT_ID", None) data["github_app_name"] = os.environ.get("GITHUB_APP_NAME", None)
data["github_app_name"] = os.environ.get("GITHUB_APP_NAME", None) data["magic_login"] = (
data["magic_login"] = ( bool(settings.EMAIL_HOST_USER) and bool(settings.EMAIL_HOST_PASSWORD)
bool(settings.EMAIL_HOST_USER) and bool(settings.EMAIL_HOST_PASSWORD) ) and os.environ.get("ENABLE_MAGIC_LINK_LOGIN", "0") == "1"
) and os.environ.get("ENABLE_MAGIC_LINK_LOGIN", "0") == "1" data["email_password_login"] = (
data["email_password_login"] = ( os.environ.get("ENABLE_EMAIL_PASSWORD", "0") == "1"
os.environ.get("ENABLE_EMAIL_PASSWORD", "0") == "1" )
) return Response(data, status=status.HTTP_200_OK)
return Response(data, status=status.HTTP_200_OK)
except Exception as e:
capture_exception(e)
return Response(
{"error": "Something went wrong please try again later"},
status=status.HTTP_400_BAD_REQUEST,
)

File diff suppressed because it is too large Load Diff

View File

@ -1,6 +1,3 @@
# Django imports
from django.db import IntegrityError
# Third party imports # Third party imports
from rest_framework.response import Response from rest_framework.response import Response
from rest_framework import status from rest_framework import status
@ -23,7 +20,6 @@ class ProjectEstimatePointEndpoint(BaseAPIView):
] ]
def get(self, request, slug, project_id): def get(self, request, slug, project_id):
try:
project = Project.objects.get(workspace__slug=slug, pk=project_id) project = Project.objects.get(workspace__slug=slug, pk=project_id)
if project.estimate_id is not None: if project.estimate_id is not None:
estimate_points = EstimatePoint.objects.filter( estimate_points = EstimatePoint.objects.filter(
@ -34,12 +30,6 @@ class ProjectEstimatePointEndpoint(BaseAPIView):
serializer = EstimatePointSerializer(estimate_points, many=True) serializer = EstimatePointSerializer(estimate_points, many=True)
return Response(serializer.data, status=status.HTTP_200_OK) return Response(serializer.data, status=status.HTTP_200_OK)
return Response([], status=status.HTTP_200_OK) return Response([], status=status.HTTP_200_OK)
except Exception as e:
capture_exception(e)
return Response(
{"error": "Something went wrong please try again later"},
status=status.HTTP_400_BAD_REQUEST,
)
class BulkEstimatePointEndpoint(BaseViewSet): class BulkEstimatePointEndpoint(BaseViewSet):
@ -50,204 +40,139 @@ class BulkEstimatePointEndpoint(BaseViewSet):
serializer_class = EstimateSerializer serializer_class = EstimateSerializer
def list(self, request, slug, project_id): def list(self, request, slug, project_id):
try: estimates = Estimate.objects.filter(
estimates = Estimate.objects.filter( workspace__slug=slug, project_id=project_id
workspace__slug=slug, project_id=project_id ).prefetch_related("points").select_related("workspace", "project")
).prefetch_related("points").select_related("workspace", "project") serializer = EstimateReadSerializer(estimates, many=True)
serializer = EstimateReadSerializer(estimates, many=True) return Response(serializer.data, status=status.HTTP_200_OK)
return Response(serializer.data, status=status.HTTP_200_OK)
except Exception as e:
capture_exception(e)
return Response(
{"error": "Something went wrong please try again later"},
status=status.HTTP_400_BAD_REQUEST,
)
def create(self, request, slug, project_id): def create(self, request, slug, project_id):
try: if not request.data.get("estimate", False):
if not request.data.get("estimate", False):
return Response(
{"error": "Estimate is required"},
status=status.HTTP_400_BAD_REQUEST,
)
estimate_points = request.data.get("estimate_points", [])
if not len(estimate_points) or len(estimate_points) > 8:
return Response(
{"error": "Estimate points are required"},
status=status.HTTP_400_BAD_REQUEST,
)
estimate_serializer = EstimateSerializer(data=request.data.get("estimate"))
if not estimate_serializer.is_valid():
return Response(
estimate_serializer.errors, status=status.HTTP_400_BAD_REQUEST
)
try:
estimate = estimate_serializer.save(project_id=project_id)
except IntegrityError:
return Response(
{"errror": "Estimate with the name already exists"},
status=status.HTTP_400_BAD_REQUEST,
)
estimate_points = EstimatePoint.objects.bulk_create(
[
EstimatePoint(
estimate=estimate,
key=estimate_point.get("key", 0),
value=estimate_point.get("value", ""),
description=estimate_point.get("description", ""),
project_id=project_id,
workspace_id=estimate.workspace_id,
created_by=request.user,
updated_by=request.user,
)
for estimate_point in estimate_points
],
batch_size=10,
ignore_conflicts=True,
)
estimate_point_serializer = EstimatePointSerializer(
estimate_points, many=True
)
return Response( return Response(
{ {"error": "Estimate is required"},
"estimate": estimate_serializer.data,
"estimate_points": estimate_point_serializer.data,
},
status=status.HTTP_200_OK,
)
except Estimate.DoesNotExist:
return Response(
{"error": "Estimate does not exist"},
status=status.HTTP_400_BAD_REQUEST, status=status.HTTP_400_BAD_REQUEST,
) )
except Exception as e:
capture_exception(e) estimate_points = request.data.get("estimate_points", [])
if not len(estimate_points) or len(estimate_points) > 8:
return Response( return Response(
{"error": "Something went wrong please try again later"}, {"error": "Estimate points are required"},
status=status.HTTP_400_BAD_REQUEST, status=status.HTTP_400_BAD_REQUEST,
) )
estimate_serializer = EstimateSerializer(data=request.data.get("estimate"))
if not estimate_serializer.is_valid():
return Response(
estimate_serializer.errors, status=status.HTTP_400_BAD_REQUEST
)
estimate = estimate_serializer.save(project_id=project_id)
estimate_points = EstimatePoint.objects.bulk_create(
[
EstimatePoint(
estimate=estimate,
key=estimate_point.get("key", 0),
value=estimate_point.get("value", ""),
description=estimate_point.get("description", ""),
project_id=project_id,
workspace_id=estimate.workspace_id,
created_by=request.user,
updated_by=request.user,
)
for estimate_point in estimate_points
],
batch_size=10,
ignore_conflicts=True,
)
estimate_point_serializer = EstimatePointSerializer(
estimate_points, many=True
)
return Response(
{
"estimate": estimate_serializer.data,
"estimate_points": estimate_point_serializer.data,
},
status=status.HTTP_200_OK,
)
def retrieve(self, request, slug, project_id, estimate_id): def retrieve(self, request, slug, project_id, estimate_id):
try: estimate = Estimate.objects.get(
estimate = Estimate.objects.get( pk=estimate_id, workspace__slug=slug, project_id=project_id
pk=estimate_id, workspace__slug=slug, project_id=project_id )
) serializer = EstimateReadSerializer(estimate)
serializer = EstimateReadSerializer(estimate) return Response(
return Response( serializer.data,
serializer.data, status=status.HTTP_200_OK,
status=status.HTTP_200_OK, )
)
except Estimate.DoesNotExist:
return Response(
{"error": "Estimate does not exist"}, status=status.HTTP_400_BAD_REQUEST
)
except Exception as e:
capture_exception(e)
return Response(
{"error": "Something went wrong please try again later"},
status=status.HTTP_400_BAD_REQUEST,
)
def partial_update(self, request, slug, project_id, estimate_id): def partial_update(self, request, slug, project_id, estimate_id):
try: if not request.data.get("estimate", False):
if not request.data.get("estimate", False):
return Response(
{"error": "Estimate is required"},
status=status.HTTP_400_BAD_REQUEST,
)
if not len(request.data.get("estimate_points", [])):
return Response(
{"error": "Estimate points are required"},
status=status.HTTP_400_BAD_REQUEST,
)
estimate = Estimate.objects.get(pk=estimate_id)
estimate_serializer = EstimateSerializer(
estimate, data=request.data.get("estimate"), partial=True
)
if not estimate_serializer.is_valid():
return Response(
estimate_serializer.errors, status=status.HTTP_400_BAD_REQUEST
)
try:
estimate = estimate_serializer.save()
except IntegrityError:
return Response(
{"errror": "Estimate with the name already exists"},
status=status.HTTP_400_BAD_REQUEST,
)
estimate_points_data = request.data.get("estimate_points", [])
estimate_points = EstimatePoint.objects.filter(
pk__in=[
estimate_point.get("id") for estimate_point in estimate_points_data
],
workspace__slug=slug,
project_id=project_id,
estimate_id=estimate_id,
)
updated_estimate_points = []
for estimate_point in estimate_points:
# Find the data for that estimate point
estimate_point_data = [
point
for point in estimate_points_data
if point.get("id") == str(estimate_point.id)
]
if len(estimate_point_data):
estimate_point.value = estimate_point_data[0].get(
"value", estimate_point.value
)
updated_estimate_points.append(estimate_point)
try:
EstimatePoint.objects.bulk_update(
updated_estimate_points, ["value"], batch_size=10,
)
except IntegrityError as e:
return Response(
{"error": "Values need to be unique for each key"},
status=status.HTTP_400_BAD_REQUEST,
)
estimate_point_serializer = EstimatePointSerializer(estimate_points, many=True)
return Response( return Response(
{ {"error": "Estimate is required"},
"estimate": estimate_serializer.data,
"estimate_points": estimate_point_serializer.data,
},
status=status.HTTP_200_OK,
)
except Estimate.DoesNotExist:
return Response(
{"error": "Estimate does not exist"}, status=status.HTTP_400_BAD_REQUEST
)
except Exception as e:
capture_exception(e)
return Response(
{"error": "Something went wrong please try again later"},
status=status.HTTP_400_BAD_REQUEST, status=status.HTTP_400_BAD_REQUEST,
) )
if not len(request.data.get("estimate_points", [])):
return Response(
{"error": "Estimate points are required"},
status=status.HTTP_400_BAD_REQUEST,
)
estimate = Estimate.objects.get(pk=estimate_id)
estimate_serializer = EstimateSerializer(
estimate, data=request.data.get("estimate"), partial=True
)
if not estimate_serializer.is_valid():
return Response(
estimate_serializer.errors, status=status.HTTP_400_BAD_REQUEST
)
estimate = estimate_serializer.save()
estimate_points_data = request.data.get("estimate_points", [])
estimate_points = EstimatePoint.objects.filter(
pk__in=[
estimate_point.get("id") for estimate_point in estimate_points_data
],
workspace__slug=slug,
project_id=project_id,
estimate_id=estimate_id,
)
updated_estimate_points = []
for estimate_point in estimate_points:
# Find the data for that estimate point
estimate_point_data = [
point
for point in estimate_points_data
if point.get("id") == str(estimate_point.id)
]
if len(estimate_point_data):
estimate_point.value = estimate_point_data[0].get(
"value", estimate_point.value
)
updated_estimate_points.append(estimate_point)
EstimatePoint.objects.bulk_update(
updated_estimate_points, ["value"], batch_size=10,
)
estimate_point_serializer = EstimatePointSerializer(estimate_points, many=True)
return Response(
{
"estimate": estimate_serializer.data,
"estimate_points": estimate_point_serializer.data,
},
status=status.HTTP_200_OK,
)
def destroy(self, request, slug, project_id, estimate_id): def destroy(self, request, slug, project_id, estimate_id):
try: estimate = Estimate.objects.get(
estimate = Estimate.objects.get( pk=estimate_id, workspace__slug=slug, project_id=project_id
pk=estimate_id, workspace__slug=slug, project_id=project_id )
) estimate.delete()
estimate.delete() return Response(status=status.HTTP_204_NO_CONTENT)
return Response(status=status.HTTP_204_NO_CONTENT)
except Exception as e:
capture_exception(e)
return Response(
{"error": "Something went wrong please try again later"},
status=status.HTTP_400_BAD_REQUEST,
)

View File

@ -20,81 +20,62 @@ class ExportIssuesEndpoint(BaseAPIView):
serializer_class = ExporterHistorySerializer serializer_class = ExporterHistorySerializer
def post(self, request, slug): def post(self, request, slug):
try: # Get the workspace
# Get the workspace workspace = Workspace.objects.get(slug=slug)
workspace = Workspace.objects.get(slug=slug)
provider = request.data.get("provider", False) provider = request.data.get("provider", False)
multiple = request.data.get("multiple", False) multiple = request.data.get("multiple", False)
project_ids = request.data.get("project", []) project_ids = request.data.get("project", [])
if provider in ["csv", "xlsx", "json"]: if provider in ["csv", "xlsx", "json"]:
if not project_ids: if not project_ids:
project_ids = Project.objects.filter( project_ids = Project.objects.filter(
workspace__slug=slug workspace__slug=slug
).values_list("id", flat=True) ).values_list("id", flat=True)
project_ids = [str(project_id) for project_id in project_ids] project_ids = [str(project_id) for project_id in project_ids]
exporter = ExporterHistory.objects.create( exporter = ExporterHistory.objects.create(
workspace=workspace, workspace=workspace,
project=project_ids, project=project_ids,
initiated_by=request.user, initiated_by=request.user,
provider=provider, provider=provider,
) )
issue_export_task.delay( issue_export_task.delay(
provider=exporter.provider, provider=exporter.provider,
workspace_id=workspace.id, workspace_id=workspace.id,
project_ids=project_ids, project_ids=project_ids,
token_id=exporter.token, token_id=exporter.token,
multiple=multiple, multiple=multiple,
slug=slug, slug=slug,
)
return Response(
{
"message": f"Once the export is ready you will be able to download it"
},
status=status.HTTP_200_OK,
)
else:
return Response(
{"error": f"Provider '{provider}' not found."},
status=status.HTTP_400_BAD_REQUEST,
)
except Workspace.DoesNotExist:
return Response(
{"error": "Workspace does not exists"},
status=status.HTTP_400_BAD_REQUEST,
) )
except Exception as e:
capture_exception(e)
return Response( return Response(
{"error": "Something went wrong please try again later"}, {
"message": f"Once the export is ready you will be able to download it"
},
status=status.HTTP_200_OK,
)
else:
return Response(
{"error": f"Provider '{provider}' not found."},
status=status.HTTP_400_BAD_REQUEST, status=status.HTTP_400_BAD_REQUEST,
) )
def get(self, request, slug): def get(self, request, slug):
try: exporter_history = ExporterHistory.objects.filter(
exporter_history = ExporterHistory.objects.filter( workspace__slug=slug
workspace__slug=slug ).select_related("workspace","initiated_by")
).select_related("workspace","initiated_by")
if request.GET.get("per_page", False) and request.GET.get("cursor", False): if request.GET.get("per_page", False) and request.GET.get("cursor", False):
return self.paginate( return self.paginate(
request=request, request=request,
queryset=exporter_history, queryset=exporter_history,
on_results=lambda exporter_history: ExporterHistorySerializer( on_results=lambda exporter_history: ExporterHistorySerializer(
exporter_history, many=True exporter_history, many=True
).data, ).data,
) )
else: else:
return Response(
{"error": "per_page and cursor are required"},
status=status.HTTP_400_BAD_REQUEST,
)
except Exception as e:
capture_exception(e)
return Response( return Response(
{"error": "Something went wrong please try again later"}, {"error": "per_page and cursor are required"},
status=status.HTTP_400_BAD_REQUEST, status=status.HTTP_400_BAD_REQUEST,
) )

View File

@ -25,94 +25,68 @@ class GPTIntegrationEndpoint(BaseAPIView):
] ]
def post(self, request, slug, project_id): def post(self, request, slug, project_id):
try: if not settings.OPENAI_API_KEY or not settings.GPT_ENGINE:
if not settings.OPENAI_API_KEY or not settings.GPT_ENGINE:
return Response(
{"error": "OpenAI API key and engine is required"},
status=status.HTTP_400_BAD_REQUEST,
)
prompt = request.data.get("prompt", False)
task = request.data.get("task", False)
if not task:
return Response(
{"error": "Task is required"}, status=status.HTTP_400_BAD_REQUEST
)
final_text = task + "\n" + prompt
openai.api_key = settings.OPENAI_API_KEY
response = openai.ChatCompletion.create(
model=settings.GPT_ENGINE,
messages=[{"role": "user", "content": final_text}],
temperature=0.7,
max_tokens=1024,
)
workspace = Workspace.objects.get(slug=slug)
project = Project.objects.get(pk=project_id)
text = response.choices[0].message.content.strip()
text_html = text.replace("\n", "<br/>")
return Response( return Response(
{ {"error": "OpenAI API key and engine is required"},
"response": text,
"response_html": text_html,
"project_detail": ProjectLiteSerializer(project).data,
"workspace_detail": WorkspaceLiteSerializer(workspace).data,
},
status=status.HTTP_200_OK,
)
except (Workspace.DoesNotExist, Project.DoesNotExist) as e:
return Response(
{"error": "Workspace or Project Does not exist"},
status=status.HTTP_400_BAD_REQUEST, status=status.HTTP_400_BAD_REQUEST,
) )
except Exception as e:
capture_exception(e) prompt = request.data.get("prompt", False)
task = request.data.get("task", False)
if not task:
return Response( return Response(
{"error": "Something went wrong please try again later"}, {"error": "Task is required"}, status=status.HTTP_400_BAD_REQUEST
status=status.HTTP_400_BAD_REQUEST,
) )
final_text = task + "\n" + prompt
openai.api_key = settings.OPENAI_API_KEY
response = openai.ChatCompletion.create(
model=settings.GPT_ENGINE,
messages=[{"role": "user", "content": final_text}],
temperature=0.7,
max_tokens=1024,
)
workspace = Workspace.objects.get(slug=slug)
project = Project.objects.get(pk=project_id)
text = response.choices[0].message.content.strip()
text_html = text.replace("\n", "<br/>")
return Response(
{
"response": text,
"response_html": text_html,
"project_detail": ProjectLiteSerializer(project).data,
"workspace_detail": WorkspaceLiteSerializer(workspace).data,
},
status=status.HTTP_200_OK,
)
class ReleaseNotesEndpoint(BaseAPIView): class ReleaseNotesEndpoint(BaseAPIView):
def get(self, request): def get(self, request):
try: release_notes = get_release_notes()
release_notes = get_release_notes() return Response(release_notes, status=status.HTTP_200_OK)
return Response(release_notes, status=status.HTTP_200_OK)
except Exception as e:
capture_exception(e)
return Response(
{"error": "Something went wrong please try again later"},
status=status.HTTP_400_BAD_REQUEST,
)
class UnsplashEndpoint(BaseAPIView): class UnsplashEndpoint(BaseAPIView):
def get(self, request): def get(self, request):
try: query = request.GET.get("query", False)
query = request.GET.get("query", False) page = request.GET.get("page", 1)
page = request.GET.get("page", 1) per_page = request.GET.get("per_page", 20)
per_page = request.GET.get("per_page", 20)
url = ( url = (
f"https://api.unsplash.com/search/photos/?client_id={settings.UNSPLASH_ACCESS_KEY}&query={query}&page=${page}&per_page={per_page}" f"https://api.unsplash.com/search/photos/?client_id={settings.UNSPLASH_ACCESS_KEY}&query={query}&page=${page}&per_page={per_page}"
if query if query
else f"https://api.unsplash.com/photos/?client_id={settings.UNSPLASH_ACCESS_KEY}&page={page}&per_page={per_page}" else f"https://api.unsplash.com/photos/?client_id={settings.UNSPLASH_ACCESS_KEY}&page={page}&per_page={per_page}"
) )
headers = { headers = {
"Content-Type": "application/json", "Content-Type": "application/json",
} }
resp = requests.get(url=url, headers=headers) resp = requests.get(url=url, headers=headers)
return Response(resp.json(), status=status.HTTP_200_OK) return Response(resp.json(), status=status.HTTP_200_OK)
except Exception as e:
capture_exception(e)
return Response(
{"error": "Something went wrong please try again later"},
status=status.HTTP_400_BAD_REQUEST,
)

View File

@ -44,559 +44,479 @@ from plane.utils.html_processor import strip_tags
class ServiceIssueImportSummaryEndpoint(BaseAPIView): class ServiceIssueImportSummaryEndpoint(BaseAPIView):
def get(self, request, slug, service): def get(self, request, slug, service):
try: if service == "github":
if service == "github": owner = request.GET.get("owner", False)
owner = request.GET.get("owner", False) repo = request.GET.get("repo", False)
repo = request.GET.get("repo", False)
if not owner or not repo: if not owner or not repo:
return Response( return Response(
{"error": "Owner and repo are required"}, {"error": "Owner and repo are required"},
status=status.HTTP_400_BAD_REQUEST, status=status.HTTP_400_BAD_REQUEST,
)
workspace_integration = WorkspaceIntegration.objects.get(
integration__provider="github", workspace__slug=slug
) )
access_tokens_url = workspace_integration.metadata.get( workspace_integration = WorkspaceIntegration.objects.get(
"access_tokens_url", False integration__provider="github", workspace__slug=slug
) )
if not access_tokens_url: access_tokens_url = workspace_integration.metadata.get(
return Response( "access_tokens_url", False
{ )
"error": "There was an error during the installation of the GitHub app. To resolve this issue, we recommend reinstalling the GitHub app."
},
status=status.HTTP_400_BAD_REQUEST,
)
issue_count, labels, collaborators = get_github_repo_details( if not access_tokens_url:
access_tokens_url, owner, repo
)
return Response( return Response(
{ {
"issue_count": issue_count, "error": "There was an error during the installation of the GitHub app. To resolve this issue, we recommend reinstalling the GitHub app."
"labels": labels,
"collaborators": collaborators,
}, },
status=status.HTTP_400_BAD_REQUEST,
)
issue_count, labels, collaborators = get_github_repo_details(
access_tokens_url, owner, repo
)
return Response(
{
"issue_count": issue_count,
"labels": labels,
"collaborators": collaborators,
},
status=status.HTTP_200_OK,
)
if service == "jira":
# Check for all the keys
params = {
"project_key": "Project key is required",
"api_token": "API token is required",
"email": "Email is required",
"cloud_hostname": "Cloud hostname is required",
}
for key, error_message in params.items():
if not request.GET.get(key, False):
return Response(
{"error": error_message}, status=status.HTTP_400_BAD_REQUEST
)
project_key = request.GET.get("project_key", "")
api_token = request.GET.get("api_token", "")
email = request.GET.get("email", "")
cloud_hostname = request.GET.get("cloud_hostname", "")
response = jira_project_issue_summary(
email, api_token, project_key, cloud_hostname
)
if "error" in response:
return Response(response, status=status.HTTP_400_BAD_REQUEST)
else:
return Response(
response,
status=status.HTTP_200_OK, status=status.HTTP_200_OK,
) )
return Response(
if service == "jira": {"error": "Service not supported yet"},
# Check for all the keys status=status.HTTP_400_BAD_REQUEST,
params = { )
"project_key": "Project key is required",
"api_token": "API token is required",
"email": "Email is required",
"cloud_hostname": "Cloud hostname is required",
}
for key, error_message in params.items():
if not request.GET.get(key, False):
return Response(
{"error": error_message}, status=status.HTTP_400_BAD_REQUEST
)
project_key = request.GET.get("project_key", "")
api_token = request.GET.get("api_token", "")
email = request.GET.get("email", "")
cloud_hostname = request.GET.get("cloud_hostname", "")
response = jira_project_issue_summary(
email, api_token, project_key, cloud_hostname
)
if "error" in response:
return Response(response, status=status.HTTP_400_BAD_REQUEST)
else:
return Response(
response,
status=status.HTTP_200_OK,
)
return Response(
{"error": "Service not supported yet"},
status=status.HTTP_400_BAD_REQUEST,
)
except WorkspaceIntegration.DoesNotExist:
return Response(
{"error": "Requested integration was not installed in the workspace"},
status=status.HTTP_400_BAD_REQUEST,
)
except Exception as e:
capture_exception(e)
return Response(
{"error": "Something went wrong please try again later"},
status=status.HTTP_400_BAD_REQUEST,
)
class ImportServiceEndpoint(BaseAPIView): class ImportServiceEndpoint(BaseAPIView):
def post(self, request, slug, service): def post(self, request, slug, service):
try: project_id = request.data.get("project_id", False)
project_id = request.data.get("project_id", False)
if not project_id: if not project_id:
return Response(
{"error": "Project ID is required"},
status=status.HTTP_400_BAD_REQUEST,
)
workspace = Workspace.objects.get(slug=slug)
if service == "github":
data = request.data.get("data", False)
metadata = request.data.get("metadata", False)
config = request.data.get("config", False)
if not data or not metadata or not config:
return Response( return Response(
{"error": "Project ID is required"}, {"error": "Data, config and metadata are required"},
status=status.HTTP_400_BAD_REQUEST, status=status.HTTP_400_BAD_REQUEST,
) )
workspace = Workspace.objects.get(slug=slug) api_token = APIToken.objects.filter(
user=request.user, workspace=workspace
if service == "github": ).first()
data = request.data.get("data", False) if api_token is None:
metadata = request.data.get("metadata", False) api_token = APIToken.objects.create(
config = request.data.get("config", False) user=request.user,
if not data or not metadata or not config: label="Importer",
return Response( workspace=workspace,
{"error": "Data, config and metadata are required"},
status=status.HTTP_400_BAD_REQUEST,
)
api_token = APIToken.objects.filter(
user=request.user, workspace=workspace
).first()
if api_token is None:
api_token = APIToken.objects.create(
user=request.user,
label="Importer",
workspace=workspace,
)
importer = Importer.objects.create(
service=service,
project_id=project_id,
status="queued",
initiated_by=request.user,
data=data,
metadata=metadata,
token=api_token,
config=config,
created_by=request.user,
updated_by=request.user,
) )
service_importer.delay(service, importer.id) importer = Importer.objects.create(
serializer = ImporterSerializer(importer) service=service,
return Response(serializer.data, status=status.HTTP_201_CREATED) project_id=project_id,
status="queued",
initiated_by=request.user,
data=data,
metadata=metadata,
token=api_token,
config=config,
created_by=request.user,
updated_by=request.user,
)
if service == "jira": service_importer.delay(service, importer.id)
data = request.data.get("data", False) serializer = ImporterSerializer(importer)
metadata = request.data.get("metadata", False) return Response(serializer.data, status=status.HTTP_201_CREATED)
config = request.data.get("config", False)
if not data or not metadata:
return Response(
{"error": "Data, config and metadata are required"},
status=status.HTTP_400_BAD_REQUEST,
)
api_token = APIToken.objects.filter(
user=request.user, workspace=workspace
).first()
if api_token is None:
api_token = APIToken.objects.create(
user=request.user,
label="Importer",
workspace=workspace,
)
importer = Importer.objects.create( if service == "jira":
service=service, data = request.data.get("data", False)
project_id=project_id, metadata = request.data.get("metadata", False)
status="queued", config = request.data.get("config", False)
initiated_by=request.user, if not data or not metadata:
data=data, return Response(
metadata=metadata, {"error": "Data, config and metadata are required"},
token=api_token, status=status.HTTP_400_BAD_REQUEST,
config=config, )
created_by=request.user, api_token = APIToken.objects.filter(
updated_by=request.user, user=request.user, workspace=workspace
).first()
if api_token is None:
api_token = APIToken.objects.create(
user=request.user,
label="Importer",
workspace=workspace,
) )
service_importer.delay(service, importer.id) importer = Importer.objects.create(
serializer = ImporterSerializer(importer) service=service,
return Response(serializer.data, status=status.HTTP_201_CREATED) project_id=project_id,
status="queued",
initiated_by=request.user,
data=data,
metadata=metadata,
token=api_token,
config=config,
created_by=request.user,
updated_by=request.user,
)
return Response( service_importer.delay(service, importer.id)
{"error": "Servivce not supported yet"}, serializer = ImporterSerializer(importer)
status=status.HTTP_400_BAD_REQUEST, return Response(serializer.data, status=status.HTTP_201_CREATED)
)
except ( return Response(
Workspace.DoesNotExist, {"error": "Servivce not supported yet"},
WorkspaceIntegration.DoesNotExist, status=status.HTTP_400_BAD_REQUEST,
Project.DoesNotExist, )
) as e:
return Response(
{"error": "Workspace Integration or Project does not exist"},
status=status.HTTP_404_NOT_FOUND,
)
except Exception as e:
capture_exception(e)
return Response(
{"error": "Something went wrong please try again later"},
status=status.HTTP_400_BAD_REQUEST,
)
def get(self, request, slug): def get(self, request, slug):
try: imports = (
imports = ( Importer.objects.filter(workspace__slug=slug)
Importer.objects.filter(workspace__slug=slug) .order_by("-created_at")
.order_by("-created_at") .select_related("initiated_by", "project", "workspace")
.select_related("initiated_by", "project", "workspace") )
) serializer = ImporterSerializer(imports, many=True)
serializer = ImporterSerializer(imports, many=True) return Response(serializer.data)
return Response(serializer.data)
except Exception as e:
capture_exception(e)
return Response(
{"error": "Something went wrong please try again later"},
status=status.HTTP_400_BAD_REQUEST,
)
def delete(self, request, slug, service, pk): def delete(self, request, slug, service, pk):
try: importer = Importer.objects.get(
importer = Importer.objects.get( pk=pk, service=service, workspace__slug=slug
pk=pk, service=service, workspace__slug=slug )
)
if importer.imported_data is not None: if importer.imported_data is not None:
# Delete all imported Issues # Delete all imported Issues
imported_issues = importer.imported_data.get("issues", []) imported_issues = importer.imported_data.get("issues", [])
Issue.issue_objects.filter(id__in=imported_issues).delete() Issue.issue_objects.filter(id__in=imported_issues).delete()
# Delete all imported Labels # Delete all imported Labels
imported_labels = importer.imported_data.get("labels", []) imported_labels = importer.imported_data.get("labels", [])
Label.objects.filter(id__in=imported_labels).delete() Label.objects.filter(id__in=imported_labels).delete()
if importer.service == "jira": if importer.service == "jira":
imported_modules = importer.imported_data.get("modules", []) imported_modules = importer.imported_data.get("modules", [])
Module.objects.filter(id__in=imported_modules).delete() Module.objects.filter(id__in=imported_modules).delete()
importer.delete() importer.delete()
return Response(status=status.HTTP_204_NO_CONTENT) return Response(status=status.HTTP_204_NO_CONTENT)
except Exception as e:
capture_exception(e)
return Response(
{"error": "Something went wrong please try again later"},
status=status.HTTP_400_BAD_REQUEST,
)
def patch(self, request, slug, service, pk): def patch(self, request, slug, service, pk):
try: importer = Importer.objects.get(
importer = Importer.objects.get( pk=pk, service=service, workspace__slug=slug
pk=pk, service=service, workspace__slug=slug )
) serializer = ImporterSerializer(importer, data=request.data, partial=True)
serializer = ImporterSerializer(importer, data=request.data, partial=True) if serializer.is_valid():
if serializer.is_valid(): serializer.save()
serializer.save() return Response(serializer.data, status=status.HTTP_200_OK)
return Response(serializer.data, status=status.HTTP_200_OK) return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
except Importer.DoesNotExist:
return Response(
{"error": "Importer Does not exists"}, status=status.HTTP_404_NOT_FOUND
)
except Exception as e:
capture_exception(e)
return Response(
{"error": "Something went wrong please try again later"},
status=status.HTTP_400_BAD_REQUEST,
)
class UpdateServiceImportStatusEndpoint(BaseAPIView): class UpdateServiceImportStatusEndpoint(BaseAPIView):
def post(self, request, slug, project_id, service, importer_id): def post(self, request, slug, project_id, service, importer_id):
try: importer = Importer.objects.get(
importer = Importer.objects.get( pk=importer_id,
pk=importer_id, workspace__slug=slug,
workspace__slug=slug, project_id=project_id,
project_id=project_id, service=service,
service=service, )
) importer.status = request.data.get("status", "processing")
importer.status = request.data.get("status", "processing") importer.save()
importer.save() return Response(status.HTTP_200_OK)
return Response(status.HTTP_200_OK)
except Importer.DoesNotExist:
return Response(
{"error": "Importer does not exist"}, status=status.HTTP_404_NOT_FOUND
)
class BulkImportIssuesEndpoint(BaseAPIView): class BulkImportIssuesEndpoint(BaseAPIView):
def post(self, request, slug, project_id, service): def post(self, request, slug, project_id, service):
try: # Get the project
# Get the project project = Project.objects.get(pk=project_id, workspace__slug=slug)
project = Project.objects.get(pk=project_id, workspace__slug=slug)
# Get the default state # Get the default state
default_state = State.objects.filter(
~Q(name="Triage"), project_id=project_id, default=True
).first()
# if there is no default state assign any random state
if default_state is None:
default_state = State.objects.filter( default_state = State.objects.filter(
~Q(name="Triage"), project_id=project_id, default=True ~Q(name="Triage"), project_id=project_id
).first() ).first()
# if there is no default state assign any random state
if default_state is None:
default_state = State.objects.filter(
~Q(name="Triage"), project_id=project_id
).first()
# Get the maximum sequence_id # Get the maximum sequence_id
last_id = IssueSequence.objects.filter(project_id=project_id).aggregate( last_id = IssueSequence.objects.filter(project_id=project_id).aggregate(
largest=Max("sequence") largest=Max("sequence")
)["largest"] )["largest"]
last_id = 1 if last_id is None else last_id + 1 last_id = 1 if last_id is None else last_id + 1
# Get the maximum sort order # Get the maximum sort order
largest_sort_order = Issue.objects.filter( largest_sort_order = Issue.objects.filter(
project_id=project_id, state=default_state project_id=project_id, state=default_state
).aggregate(largest=Max("sort_order"))["largest"] ).aggregate(largest=Max("sort_order"))["largest"]
largest_sort_order = ( largest_sort_order = (
65535 if largest_sort_order is None else largest_sort_order + 10000 65535 if largest_sort_order is None else largest_sort_order + 10000
) )
# Get the issues_data # Get the issues_data
issues_data = request.data.get("issues_data", []) issues_data = request.data.get("issues_data", [])
if not len(issues_data):
return Response(
{"error": "Issue data is required"},
status=status.HTTP_400_BAD_REQUEST,
)
# Issues
bulk_issues = []
for issue_data in issues_data:
bulk_issues.append(
Issue(
project_id=project_id,
workspace_id=project.workspace_id,
state_id=issue_data.get("state")
if issue_data.get("state", False)
else default_state.id,
name=issue_data.get("name", "Issue Created through Bulk"),
description_html=issue_data.get("description_html", "<p></p>"),
description_stripped=(
None
if (
issue_data.get("description_html") == ""
or issue_data.get("description_html") is None
)
else strip_tags(issue_data.get("description_html"))
),
sequence_id=last_id,
sort_order=largest_sort_order,
start_date=issue_data.get("start_date", None),
target_date=issue_data.get("target_date", None),
priority=issue_data.get("priority", "none"),
created_by=request.user,
)
)
largest_sort_order = largest_sort_order + 10000
last_id = last_id + 1
issues = Issue.objects.bulk_create(
bulk_issues,
batch_size=100,
ignore_conflicts=True,
)
# Sequences
_ = IssueSequence.objects.bulk_create(
[
IssueSequence(
issue=issue,
sequence=issue.sequence_id,
project_id=project_id,
workspace_id=project.workspace_id,
)
for issue in issues
],
batch_size=100,
)
# Attach Labels
bulk_issue_labels = []
for issue, issue_data in zip(issues, issues_data):
labels_list = issue_data.get("labels_list", [])
bulk_issue_labels = bulk_issue_labels + [
IssueLabel(
issue=issue,
label_id=label_id,
project_id=project_id,
workspace_id=project.workspace_id,
created_by=request.user,
)
for label_id in labels_list
]
_ = IssueLabel.objects.bulk_create(
bulk_issue_labels, batch_size=100, ignore_conflicts=True
)
# Attach Assignees
bulk_issue_assignees = []
for issue, issue_data in zip(issues, issues_data):
assignees_list = issue_data.get("assignees_list", [])
bulk_issue_assignees = bulk_issue_assignees + [
IssueAssignee(
issue=issue,
assignee_id=assignee_id,
project_id=project_id,
workspace_id=project.workspace_id,
created_by=request.user,
)
for assignee_id in assignees_list
]
_ = IssueAssignee.objects.bulk_create(
bulk_issue_assignees, batch_size=100, ignore_conflicts=True
)
# Track the issue activities
IssueActivity.objects.bulk_create(
[
IssueActivity(
issue=issue,
actor=request.user,
project_id=project_id,
workspace_id=project.workspace_id,
comment=f"imported the issue from {service}",
verb="created",
created_by=request.user,
)
for issue in issues
],
batch_size=100,
)
# Create Comments
bulk_issue_comments = []
for issue, issue_data in zip(issues, issues_data):
comments_list = issue_data.get("comments_list", [])
bulk_issue_comments = bulk_issue_comments + [
IssueComment(
issue=issue,
comment_html=comment.get("comment_html", "<p></p>"),
actor=request.user,
project_id=project_id,
workspace_id=project.workspace_id,
created_by=request.user,
)
for comment in comments_list
]
_ = IssueComment.objects.bulk_create(bulk_issue_comments, batch_size=100)
# Attach Links
_ = IssueLink.objects.bulk_create(
[
IssueLink(
issue=issue,
url=issue_data.get("link", {}).get("url", "https://github.com"),
title=issue_data.get("link", {}).get("title", "Original Issue"),
project_id=project_id,
workspace_id=project.workspace_id,
created_by=request.user,
)
for issue, issue_data in zip(issues, issues_data)
]
)
if not len(issues_data):
return Response( return Response(
{"issues": IssueFlatSerializer(issues, many=True).data}, {"error": "Issue data is required"},
status=status.HTTP_201_CREATED,
)
except Project.DoesNotExist:
return Response(
{"error": "Project Does not exist"}, status=status.HTTP_404_NOT_FOUND
)
except Exception as e:
capture_exception(e)
return Response(
{"error": "Something went wrong please try again later"},
status=status.HTTP_400_BAD_REQUEST, status=status.HTTP_400_BAD_REQUEST,
) )
# Issues
bulk_issues = []
for issue_data in issues_data:
bulk_issues.append(
Issue(
project_id=project_id,
workspace_id=project.workspace_id,
state_id=issue_data.get("state")
if issue_data.get("state", False)
else default_state.id,
name=issue_data.get("name", "Issue Created through Bulk"),
description_html=issue_data.get("description_html", "<p></p>"),
description_stripped=(
None
if (
issue_data.get("description_html") == ""
or issue_data.get("description_html") is None
)
else strip_tags(issue_data.get("description_html"))
),
sequence_id=last_id,
sort_order=largest_sort_order,
start_date=issue_data.get("start_date", None),
target_date=issue_data.get("target_date", None),
priority=issue_data.get("priority", "none"),
created_by=request.user,
)
)
largest_sort_order = largest_sort_order + 10000
last_id = last_id + 1
issues = Issue.objects.bulk_create(
bulk_issues,
batch_size=100,
ignore_conflicts=True,
)
# Sequences
_ = IssueSequence.objects.bulk_create(
[
IssueSequence(
issue=issue,
sequence=issue.sequence_id,
project_id=project_id,
workspace_id=project.workspace_id,
)
for issue in issues
],
batch_size=100,
)
# Attach Labels
bulk_issue_labels = []
for issue, issue_data in zip(issues, issues_data):
labels_list = issue_data.get("labels_list", [])
bulk_issue_labels = bulk_issue_labels + [
IssueLabel(
issue=issue,
label_id=label_id,
project_id=project_id,
workspace_id=project.workspace_id,
created_by=request.user,
)
for label_id in labels_list
]
_ = IssueLabel.objects.bulk_create(
bulk_issue_labels, batch_size=100, ignore_conflicts=True
)
# Attach Assignees
bulk_issue_assignees = []
for issue, issue_data in zip(issues, issues_data):
assignees_list = issue_data.get("assignees_list", [])
bulk_issue_assignees = bulk_issue_assignees + [
IssueAssignee(
issue=issue,
assignee_id=assignee_id,
project_id=project_id,
workspace_id=project.workspace_id,
created_by=request.user,
)
for assignee_id in assignees_list
]
_ = IssueAssignee.objects.bulk_create(
bulk_issue_assignees, batch_size=100, ignore_conflicts=True
)
# Track the issue activities
IssueActivity.objects.bulk_create(
[
IssueActivity(
issue=issue,
actor=request.user,
project_id=project_id,
workspace_id=project.workspace_id,
comment=f"imported the issue from {service}",
verb="created",
created_by=request.user,
)
for issue in issues
],
batch_size=100,
)
# Create Comments
bulk_issue_comments = []
for issue, issue_data in zip(issues, issues_data):
comments_list = issue_data.get("comments_list", [])
bulk_issue_comments = bulk_issue_comments + [
IssueComment(
issue=issue,
comment_html=comment.get("comment_html", "<p></p>"),
actor=request.user,
project_id=project_id,
workspace_id=project.workspace_id,
created_by=request.user,
)
for comment in comments_list
]
_ = IssueComment.objects.bulk_create(bulk_issue_comments, batch_size=100)
# Attach Links
_ = IssueLink.objects.bulk_create(
[
IssueLink(
issue=issue,
url=issue_data.get("link", {}).get("url", "https://github.com"),
title=issue_data.get("link", {}).get("title", "Original Issue"),
project_id=project_id,
workspace_id=project.workspace_id,
created_by=request.user,
)
for issue, issue_data in zip(issues, issues_data)
]
)
return Response(
{"issues": IssueFlatSerializer(issues, many=True).data},
status=status.HTTP_201_CREATED,
)
class BulkImportModulesEndpoint(BaseAPIView): class BulkImportModulesEndpoint(BaseAPIView):
def post(self, request, slug, project_id, service): def post(self, request, slug, project_id, service):
try: modules_data = request.data.get("modules_data", [])
modules_data = request.data.get("modules_data", []) project = Project.objects.get(pk=project_id, workspace__slug=slug)
project = Project.objects.get(pk=project_id, workspace__slug=slug)
modules = Module.objects.bulk_create( modules = Module.objects.bulk_create(
[
Module(
name=module.get("name", uuid.uuid4().hex),
description=module.get("description", ""),
start_date=module.get("start_date", None),
target_date=module.get("target_date", None),
project_id=project_id,
workspace_id=project.workspace_id,
created_by=request.user,
)
for module in modules_data
],
batch_size=100,
ignore_conflicts=True,
)
modules = Module.objects.filter(id__in=[module.id for module in modules])
if len(modules) == len(modules_data):
_ = ModuleLink.objects.bulk_create(
[ [
Module( ModuleLink(
name=module.get("name", uuid.uuid4().hex), module=module,
description=module.get("description", ""), url=module_data.get("link", {}).get(
start_date=module.get("start_date", None), "url", "https://plane.so"
target_date=module.get("target_date", None), ),
title=module_data.get("link", {}).get(
"title", "Original Issue"
),
project_id=project_id, project_id=project_id,
workspace_id=project.workspace_id, workspace_id=project.workspace_id,
created_by=request.user, created_by=request.user,
) )
for module in modules_data for module, module_data in zip(modules, modules_data)
], ],
batch_size=100, batch_size=100,
ignore_conflicts=True, ignore_conflicts=True,
) )
modules = Module.objects.filter(id__in=[module.id for module in modules]) bulk_module_issues = []
for module, module_data in zip(modules, modules_data):
module_issues_list = module_data.get("module_issues_list", [])
bulk_module_issues = bulk_module_issues + [
ModuleIssue(
issue_id=issue,
module=module,
project_id=project_id,
workspace_id=project.workspace_id,
created_by=request.user,
)
for issue in module_issues_list
]
if len(modules) == len(modules_data): _ = ModuleIssue.objects.bulk_create(
_ = ModuleLink.objects.bulk_create( bulk_module_issues, batch_size=100, ignore_conflicts=True
[
ModuleLink(
module=module,
url=module_data.get("link", {}).get(
"url", "https://plane.so"
),
title=module_data.get("link", {}).get(
"title", "Original Issue"
),
project_id=project_id,
workspace_id=project.workspace_id,
created_by=request.user,
)
for module, module_data in zip(modules, modules_data)
],
batch_size=100,
ignore_conflicts=True,
)
bulk_module_issues = []
for module, module_data in zip(modules, modules_data):
module_issues_list = module_data.get("module_issues_list", [])
bulk_module_issues = bulk_module_issues + [
ModuleIssue(
issue_id=issue,
module=module,
project_id=project_id,
workspace_id=project.workspace_id,
created_by=request.user,
)
for issue in module_issues_list
]
_ = ModuleIssue.objects.bulk_create(
bulk_module_issues, batch_size=100, ignore_conflicts=True
)
serializer = ModuleSerializer(modules, many=True)
return Response(
{"modules": serializer.data}, status=status.HTTP_201_CREATED
)
else:
return Response(
{"message": "Modules created but issues could not be imported"},
status=status.HTTP_200_OK,
)
except Project.DoesNotExist:
return Response(
{"error": "Project does not exist"}, status=status.HTTP_404_NOT_FOUND
) )
except Exception as e:
capture_exception(e) serializer = ModuleSerializer(modules, many=True)
return Response( return Response(
{"error": "Something went wrong please try again later"}, {"modules": serializer.data}, status=status.HTTP_201_CREATED
status=status.HTTP_400_BAD_REQUEST, )
else:
return Response(
{"message": "Modules created but issues could not be imported"},
status=status.HTTP_200_OK,
) )

View File

@ -64,24 +64,17 @@ class InboxViewSet(BaseViewSet):
serializer.save(project_id=self.kwargs.get("project_id")) serializer.save(project_id=self.kwargs.get("project_id"))
def destroy(self, request, slug, project_id, pk): def destroy(self, request, slug, project_id, pk):
try: inbox = Inbox.objects.get(
inbox = Inbox.objects.get( workspace__slug=slug, project_id=project_id, pk=pk
workspace__slug=slug, project_id=project_id, pk=pk )
) # Handle default inbox delete
# Handle default inbox delete if inbox.is_default:
if inbox.is_default:
return Response(
{"error": "You cannot delete the default inbox"},
status=status.HTTP_400_BAD_REQUEST,
)
inbox.delete()
return Response(status=status.HTTP_204_NO_CONTENT)
except Exception as e:
capture_exception(e)
return Response( return Response(
{"error": "Something went wronf please try again later"}, {"error": "You cannot delete the default inbox"},
status=status.HTTP_400_BAD_REQUEST, status=status.HTTP_400_BAD_REQUEST,
) )
inbox.delete()
return Response(status=status.HTTP_204_NO_CONTENT)
class InboxIssueViewSet(BaseViewSet): class InboxIssueViewSet(BaseViewSet):
@ -110,281 +103,239 @@ class InboxIssueViewSet(BaseViewSet):
) )
def list(self, request, slug, project_id, inbox_id): def list(self, request, slug, project_id, inbox_id):
try: filters = issue_filters(request.query_params, "GET")
filters = issue_filters(request.query_params, "GET") issues = (
issues = ( Issue.objects.filter(
Issue.objects.filter( issue_inbox__inbox_id=inbox_id,
issue_inbox__inbox_id=inbox_id, workspace__slug=slug,
workspace__slug=slug, project_id=project_id,
project_id=project_id, )
.filter(**filters)
.annotate(bridge_id=F("issue_inbox__id"))
.select_related("workspace", "project", "state", "parent")
.prefetch_related("assignees", "labels")
.order_by("issue_inbox__snoozed_till", "issue_inbox__status")
.annotate(
sub_issues_count=Issue.issue_objects.filter(parent=OuterRef("id"))
.order_by()
.annotate(count=Func(F("id"), function="Count"))
.values("count")
)
.annotate(
link_count=IssueLink.objects.filter(issue=OuterRef("id"))
.order_by()
.annotate(count=Func(F("id"), function="Count"))
.values("count")
)
.annotate(
attachment_count=IssueAttachment.objects.filter(
issue=OuterRef("id")
) )
.filter(**filters) .order_by()
.annotate(bridge_id=F("issue_inbox__id")) .annotate(count=Func(F("id"), function="Count"))
.select_related("workspace", "project", "state", "parent") .values("count")
.prefetch_related("assignees", "labels") )
.order_by("issue_inbox__snoozed_till", "issue_inbox__status") .prefetch_related(
.annotate( Prefetch(
sub_issues_count=Issue.issue_objects.filter(parent=OuterRef("id")) "issue_inbox",
.order_by() queryset=InboxIssue.objects.only(
.annotate(count=Func(F("id"), function="Count")) "status", "duplicate_to", "snoozed_till", "source"
.values("count") ),
)
.annotate(
link_count=IssueLink.objects.filter(issue=OuterRef("id"))
.order_by()
.annotate(count=Func(F("id"), function="Count"))
.values("count")
)
.annotate(
attachment_count=IssueAttachment.objects.filter(
issue=OuterRef("id")
)
.order_by()
.annotate(count=Func(F("id"), function="Count"))
.values("count")
)
.prefetch_related(
Prefetch(
"issue_inbox",
queryset=InboxIssue.objects.only(
"status", "duplicate_to", "snoozed_till", "source"
),
)
) )
) )
issues_data = IssueStateInboxSerializer(issues, many=True).data )
return Response( issues_data = IssueStateInboxSerializer(issues, many=True).data
issues_data, return Response(
status=status.HTTP_200_OK, issues_data,
) status=status.HTTP_200_OK,
)
except Exception as e:
capture_exception(e)
return Response(
{"error": "Something went wrong please try again later"},
status=status.HTTP_400_BAD_REQUEST,
)
def create(self, request, slug, project_id, inbox_id): def create(self, request, slug, project_id, inbox_id):
try: if not request.data.get("issue", {}).get("name", False):
if not request.data.get("issue", {}).get("name", False):
return Response(
{"error": "Name is required"}, status=status.HTTP_400_BAD_REQUEST
)
# Check for valid priority
if not request.data.get("issue", {}).get("priority", "none") in [
"low",
"medium",
"high",
"urgent",
"none",
]:
return Response(
{"error": "Invalid priority"}, status=status.HTTP_400_BAD_REQUEST
)
# Create or get state
state, _ = State.objects.get_or_create(
name="Triage",
group="backlog",
description="Default state for managing all Inbox Issues",
project_id=project_id,
color="#ff7700",
)
# create an issue
issue = Issue.objects.create(
name=request.data.get("issue", {}).get("name"),
description=request.data.get("issue", {}).get("description", {}),
description_html=request.data.get("issue", {}).get(
"description_html", "<p></p>"
),
priority=request.data.get("issue", {}).get("priority", "low"),
project_id=project_id,
state=state,
)
# Create an Issue Activity
issue_activity.delay(
type="issue.activity.created",
requested_data=json.dumps(request.data, cls=DjangoJSONEncoder),
actor_id=str(request.user.id),
issue_id=str(issue.id),
project_id=str(project_id),
current_instance=None,
epoch=int(timezone.now().timestamp())
)
# create an inbox issue
InboxIssue.objects.create(
inbox_id=inbox_id,
project_id=project_id,
issue=issue,
source=request.data.get("source", "in-app"),
)
serializer = IssueStateInboxSerializer(issue)
return Response(serializer.data, status=status.HTTP_200_OK)
except Exception as e:
capture_exception(e)
return Response( return Response(
{"error": "Something went wrong please try again later"}, {"error": "Name is required"}, status=status.HTTP_400_BAD_REQUEST
status=status.HTTP_400_BAD_REQUEST,
) )
# Check for valid priority
if not request.data.get("issue", {}).get("priority", "none") in [
"low",
"medium",
"high",
"urgent",
"none",
]:
return Response(
{"error": "Invalid priority"}, status=status.HTTP_400_BAD_REQUEST
)
# Create or get state
state, _ = State.objects.get_or_create(
name="Triage",
group="backlog",
description="Default state for managing all Inbox Issues",
project_id=project_id,
color="#ff7700",
)
# create an issue
issue = Issue.objects.create(
name=request.data.get("issue", {}).get("name"),
description=request.data.get("issue", {}).get("description", {}),
description_html=request.data.get("issue", {}).get(
"description_html", "<p></p>"
),
priority=request.data.get("issue", {}).get("priority", "low"),
project_id=project_id,
state=state,
)
# Create an Issue Activity
issue_activity.delay(
type="issue.activity.created",
requested_data=json.dumps(request.data, cls=DjangoJSONEncoder),
actor_id=str(request.user.id),
issue_id=str(issue.id),
project_id=str(project_id),
current_instance=None,
epoch=int(timezone.now().timestamp())
)
# create an inbox issue
InboxIssue.objects.create(
inbox_id=inbox_id,
project_id=project_id,
issue=issue,
source=request.data.get("source", "in-app"),
)
serializer = IssueStateInboxSerializer(issue)
return Response(serializer.data, status=status.HTTP_200_OK)
def partial_update(self, request, slug, project_id, inbox_id, pk): def partial_update(self, request, slug, project_id, inbox_id, pk):
try: inbox_issue = InboxIssue.objects.get(
inbox_issue = InboxIssue.objects.get( pk=pk, workspace__slug=slug, project_id=project_id, inbox_id=inbox_id
pk=pk, workspace__slug=slug, project_id=project_id, inbox_id=inbox_id )
# Get the project member
project_member = ProjectMember.objects.get(workspace__slug=slug, project_id=project_id, member=request.user)
# Only project members admins and created_by users can access this endpoint
if project_member.role <= 10 and str(inbox_issue.created_by_id) != str(request.user.id):
return Response({"error": "You cannot edit inbox issues"}, status=status.HTTP_400_BAD_REQUEST)
# Get issue data
issue_data = request.data.pop("issue", False)
if bool(issue_data):
issue = Issue.objects.get(
pk=inbox_issue.issue_id, workspace__slug=slug, project_id=project_id
) )
# Get the project member # Only allow guests and viewers to edit name and description
project_member = ProjectMember.objects.get(workspace__slug=slug, project_id=project_id, member=request.user) if project_member.role <= 10:
# Only project members admins and created_by users can access this endpoint # viewers and guests since only viewers and guests
if project_member.role <= 10 and str(inbox_issue.created_by_id) != str(request.user.id): issue_data = {
return Response({"error": "You cannot edit inbox issues"}, status=status.HTTP_400_BAD_REQUEST) "name": issue_data.get("name", issue.name),
"description_html": issue_data.get("description_html", issue.description_html),
"description": issue_data.get("description", issue.description)
}
# Get issue data issue_serializer = IssueCreateSerializer(
issue_data = request.data.pop("issue", False) issue, data=issue_data, partial=True
)
if bool(issue_data): if issue_serializer.is_valid():
issue = Issue.objects.get( current_instance = issue
pk=inbox_issue.issue_id, workspace__slug=slug, project_id=project_id # Log all the updates
) requested_data = json.dumps(issue_data, cls=DjangoJSONEncoder)
# Only allow guests and viewers to edit name and description if issue is not None:
if project_member.role <= 10: issue_activity.delay(
# viewers and guests since only viewers and guests type="issue.activity.updated",
issue_data = { requested_data=requested_data,
"name": issue_data.get("name", issue.name), actor_id=str(request.user.id),
"description_html": issue_data.get("description_html", issue.description_html), issue_id=str(issue.id),
"description": issue_data.get("description", issue.description) project_id=str(project_id),
} current_instance=json.dumps(
IssueSerializer(current_instance).data,
issue_serializer = IssueCreateSerializer( cls=DjangoJSONEncoder,
issue, data=issue_data, partial=True ),
epoch=int(timezone.now().timestamp())
)
issue_serializer.save()
else:
return Response(
issue_serializer.errors, status=status.HTTP_400_BAD_REQUEST
) )
if issue_serializer.is_valid(): # Only project admins and members can edit inbox issue attributes
current_instance = issue if project_member.role > 10:
# Log all the updates serializer = InboxIssueSerializer(
requested_data = json.dumps(issue_data, cls=DjangoJSONEncoder) inbox_issue, data=request.data, partial=True
if issue is not None: )
issue_activity.delay(
type="issue.activity.updated", if serializer.is_valid():
requested_data=requested_data, serializer.save()
actor_id=str(request.user.id), # Update the issue state if the issue is rejected or marked as duplicate
issue_id=str(issue.id), if serializer.data["status"] in [-1, 2]:
project_id=str(project_id), issue = Issue.objects.get(
current_instance=json.dumps( pk=inbox_issue.issue_id,
IssueSerializer(current_instance).data, workspace__slug=slug,
cls=DjangoJSONEncoder, project_id=project_id,
), )
epoch=int(timezone.now().timestamp()) state = State.objects.filter(
) group="cancelled", workspace__slug=slug, project_id=project_id
issue_serializer.save() ).first()
else: if state is not None:
return Response( issue.state = state
issue_serializer.errors, status=status.HTTP_400_BAD_REQUEST issue.save()
# Update the issue state if it is accepted
if serializer.data["status"] in [1]:
issue = Issue.objects.get(
pk=inbox_issue.issue_id,
workspace__slug=slug,
project_id=project_id,
) )
# Only project admins and members can edit inbox issue attributes # Update the issue state only if it is in triage state
if project_member.role > 10: if issue.state.name == "Triage":
serializer = InboxIssueSerializer( # Move to default state
inbox_issue, data=request.data, partial=True
)
if serializer.is_valid():
serializer.save()
# Update the issue state if the issue is rejected or marked as duplicate
if serializer.data["status"] in [-1, 2]:
issue = Issue.objects.get(
pk=inbox_issue.issue_id,
workspace__slug=slug,
project_id=project_id,
)
state = State.objects.filter( state = State.objects.filter(
group="cancelled", workspace__slug=slug, project_id=project_id workspace__slug=slug, project_id=project_id, default=True
).first() ).first()
if state is not None: if state is not None:
issue.state = state issue.state = state
issue.save() issue.save()
# Update the issue state if it is accepted return Response(serializer.data, status=status.HTTP_200_OK)
if serializer.data["status"] in [1]: return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
issue = Issue.objects.get( else:
pk=inbox_issue.issue_id, return Response(InboxIssueSerializer(inbox_issue).data, status=status.HTTP_200_OK)
workspace__slug=slug,
project_id=project_id,
)
# Update the issue state only if it is in triage state
if issue.state.name == "Triage":
# Move to default state
state = State.objects.filter(
workspace__slug=slug, project_id=project_id, default=True
).first()
if state is not None:
issue.state = state
issue.save()
return Response(serializer.data, status=status.HTTP_200_OK)
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
else:
return Response(InboxIssueSerializer(inbox_issue).data, status=status.HTTP_200_OK)
except InboxIssue.DoesNotExist:
return Response(
{"error": "Inbox Issue does not exist"},
status=status.HTTP_400_BAD_REQUEST,
)
except Exception as e:
capture_exception(e)
return Response(
{"error": "Something went wrong please try again later"},
status=status.HTTP_400_BAD_REQUEST,
)
def retrieve(self, request, slug, project_id, inbox_id, pk): def retrieve(self, request, slug, project_id, inbox_id, pk):
try: inbox_issue = InboxIssue.objects.get(
inbox_issue = InboxIssue.objects.get( pk=pk, workspace__slug=slug, project_id=project_id, inbox_id=inbox_id
pk=pk, workspace__slug=slug, project_id=project_id, inbox_id=inbox_id )
) issue = Issue.objects.get(
issue = Issue.objects.get( pk=inbox_issue.issue_id, workspace__slug=slug, project_id=project_id
pk=inbox_issue.issue_id, workspace__slug=slug, project_id=project_id )
) serializer = IssueStateInboxSerializer(issue)
serializer = IssueStateInboxSerializer(issue) return Response(serializer.data, status=status.HTTP_200_OK)
return Response(serializer.data, status=status.HTTP_200_OK)
except Exception as e:
capture_exception(e)
return Response(
{"error": "Something went wrong please try again later"},
status=status.HTTP_400_BAD_REQUEST,
)
def destroy(self, request, slug, project_id, inbox_id, pk): def destroy(self, request, slug, project_id, inbox_id, pk):
try: inbox_issue = InboxIssue.objects.get(
inbox_issue = InboxIssue.objects.get( pk=pk, workspace__slug=slug, project_id=project_id, inbox_id=inbox_id
pk=pk, workspace__slug=slug, project_id=project_id, inbox_id=inbox_id )
) # Get the project member
# Get the project member project_member = ProjectMember.objects.get(workspace__slug=slug, project_id=project_id, member=request.user)
project_member = ProjectMember.objects.get(workspace__slug=slug, project_id=project_id, member=request.user)
if project_member.role <= 10 and str(inbox_issue.created_by_id) != str(request.user.id): if project_member.role <= 10 and str(inbox_issue.created_by_id) != str(request.user.id):
return Response({"error": "You cannot delete inbox issue"}, status=status.HTTP_400_BAD_REQUEST) return Response({"error": "You cannot delete inbox issue"}, status=status.HTTP_400_BAD_REQUEST)
# Check the issue status # Check the issue status
if inbox_issue.status in [-2, -1, 0, 2]: if inbox_issue.status in [-2, -1, 0, 2]:
# Delete the issue also # Delete the issue also
Issue.objects.filter(workspace__slug=slug, project_id=project_id, pk=inbox_issue.issue_id).delete() Issue.objects.filter(workspace__slug=slug, project_id=project_id, pk=inbox_issue.issue_id).delete()
inbox_issue.delete() inbox_issue.delete()
return Response(status=status.HTTP_204_NO_CONTENT) return Response(status=status.HTTP_204_NO_CONTENT)
except InboxIssue.DoesNotExist:
return Response({"error": "Inbox Issue does not exists"}, status=status.HTTP_400_BAD_REQUEST)
except Exception as e:
capture_exception(e)
return Response(
{"error": "Something went wrong please try again later"},
status=status.HTTP_400_BAD_REQUEST,
)
class InboxIssuePublicViewSet(BaseViewSet): class InboxIssuePublicViewSet(BaseViewSet):
@ -413,242 +364,197 @@ class InboxIssuePublicViewSet(BaseViewSet):
return InboxIssue.objects.none() return InboxIssue.objects.none()
def list(self, request, slug, project_id, inbox_id): def list(self, request, slug, project_id, inbox_id):
try: project_deploy_board = ProjectDeployBoard.objects.get(workspace__slug=slug, project_id=project_id)
project_deploy_board = ProjectDeployBoard.objects.get(workspace__slug=slug, project_id=project_id) if project_deploy_board.inbox is None:
if project_deploy_board.inbox is None: return Response({"error": "Inbox is not enabled for this Project Board"}, status=status.HTTP_400_BAD_REQUEST)
return Response({"error": "Inbox is not enabled for this Project Board"}, status=status.HTTP_400_BAD_REQUEST)
filters = issue_filters(request.query_params, "GET") filters = issue_filters(request.query_params, "GET")
issues = ( issues = (
Issue.objects.filter( Issue.objects.filter(
issue_inbox__inbox_id=inbox_id, issue_inbox__inbox_id=inbox_id,
workspace__slug=slug, workspace__slug=slug,
project_id=project_id, project_id=project_id,
)
.filter(**filters)
.annotate(bridge_id=F("issue_inbox__id"))
.select_related("workspace", "project", "state", "parent")
.prefetch_related("assignees", "labels")
.order_by("issue_inbox__snoozed_till", "issue_inbox__status")
.annotate(
sub_issues_count=Issue.issue_objects.filter(parent=OuterRef("id"))
.order_by()
.annotate(count=Func(F("id"), function="Count"))
.values("count")
)
.annotate(
link_count=IssueLink.objects.filter(issue=OuterRef("id"))
.order_by()
.annotate(count=Func(F("id"), function="Count"))
.values("count")
)
.annotate(
attachment_count=IssueAttachment.objects.filter(
issue=OuterRef("id")
) )
.filter(**filters) .order_by()
.annotate(bridge_id=F("issue_inbox__id")) .annotate(count=Func(F("id"), function="Count"))
.select_related("workspace", "project", "state", "parent") .values("count")
.prefetch_related("assignees", "labels") )
.order_by("issue_inbox__snoozed_till", "issue_inbox__status") .prefetch_related(
.annotate( Prefetch(
sub_issues_count=Issue.issue_objects.filter(parent=OuterRef("id")) "issue_inbox",
.order_by() queryset=InboxIssue.objects.only(
.annotate(count=Func(F("id"), function="Count")) "status", "duplicate_to", "snoozed_till", "source"
.values("count") ),
)
.annotate(
link_count=IssueLink.objects.filter(issue=OuterRef("id"))
.order_by()
.annotate(count=Func(F("id"), function="Count"))
.values("count")
)
.annotate(
attachment_count=IssueAttachment.objects.filter(
issue=OuterRef("id")
)
.order_by()
.annotate(count=Func(F("id"), function="Count"))
.values("count")
)
.prefetch_related(
Prefetch(
"issue_inbox",
queryset=InboxIssue.objects.only(
"status", "duplicate_to", "snoozed_till", "source"
),
)
) )
) )
issues_data = IssueStateInboxSerializer(issues, many=True).data )
return Response( issues_data = IssueStateInboxSerializer(issues, many=True).data
issues_data, return Response(
status=status.HTTP_200_OK, issues_data,
) status=status.HTTP_200_OK,
except ProjectDeployBoard.DoesNotExist: )
return Response({"error": "Project Deploy Board does not exist"}, status=status.HTTP_400_BAD_REQUEST)
except Exception as e:
capture_exception(e)
return Response(
{"error": "Something went wrong please try again later"},
status=status.HTTP_400_BAD_REQUEST,
)
def create(self, request, slug, project_id, inbox_id): def create(self, request, slug, project_id, inbox_id):
try: project_deploy_board = ProjectDeployBoard.objects.get(workspace__slug=slug, project_id=project_id)
project_deploy_board = ProjectDeployBoard.objects.get(workspace__slug=slug, project_id=project_id) if project_deploy_board.inbox is None:
if project_deploy_board.inbox is None: return Response({"error": "Inbox is not enabled for this Project Board"}, status=status.HTTP_400_BAD_REQUEST)
return Response({"error": "Inbox is not enabled for this Project Board"}, status=status.HTTP_400_BAD_REQUEST)
if not request.data.get("issue", {}).get("name", False): if not request.data.get("issue", {}).get("name", False):
return Response(
{"error": "Name is required"}, status=status.HTTP_400_BAD_REQUEST
)
# Check for valid priority
if not request.data.get("issue", {}).get("priority", "none") in [
"low",
"medium",
"high",
"urgent",
"none",
]:
return Response(
{"error": "Invalid priority"}, status=status.HTTP_400_BAD_REQUEST
)
# Create or get state
state, _ = State.objects.get_or_create(
name="Triage",
group="backlog",
description="Default state for managing all Inbox Issues",
project_id=project_id,
color="#ff7700",
)
# create an issue
issue = Issue.objects.create(
name=request.data.get("issue", {}).get("name"),
description=request.data.get("issue", {}).get("description", {}),
description_html=request.data.get("issue", {}).get(
"description_html", "<p></p>"
),
priority=request.data.get("issue", {}).get("priority", "low"),
project_id=project_id,
state=state,
)
# Create an Issue Activity
issue_activity.delay(
type="issue.activity.created",
requested_data=json.dumps(request.data, cls=DjangoJSONEncoder),
actor_id=str(request.user.id),
issue_id=str(issue.id),
project_id=str(project_id),
current_instance=None,
epoch=int(timezone.now().timestamp())
)
# create an inbox issue
InboxIssue.objects.create(
inbox_id=inbox_id,
project_id=project_id,
issue=issue,
source=request.data.get("source", "in-app"),
)
serializer = IssueStateInboxSerializer(issue)
return Response(serializer.data, status=status.HTTP_200_OK)
except Exception as e:
capture_exception(e)
return Response( return Response(
{"error": "Something went wrong please try again later"}, {"error": "Name is required"}, status=status.HTTP_400_BAD_REQUEST
status=status.HTTP_400_BAD_REQUEST,
) )
# Check for valid priority
if not request.data.get("issue", {}).get("priority", "none") in [
"low",
"medium",
"high",
"urgent",
"none",
]:
return Response(
{"error": "Invalid priority"}, status=status.HTTP_400_BAD_REQUEST
)
# Create or get state
state, _ = State.objects.get_or_create(
name="Triage",
group="backlog",
description="Default state for managing all Inbox Issues",
project_id=project_id,
color="#ff7700",
)
# create an issue
issue = Issue.objects.create(
name=request.data.get("issue", {}).get("name"),
description=request.data.get("issue", {}).get("description", {}),
description_html=request.data.get("issue", {}).get(
"description_html", "<p></p>"
),
priority=request.data.get("issue", {}).get("priority", "low"),
project_id=project_id,
state=state,
)
# Create an Issue Activity
issue_activity.delay(
type="issue.activity.created",
requested_data=json.dumps(request.data, cls=DjangoJSONEncoder),
actor_id=str(request.user.id),
issue_id=str(issue.id),
project_id=str(project_id),
current_instance=None,
epoch=int(timezone.now().timestamp())
)
# create an inbox issue
InboxIssue.objects.create(
inbox_id=inbox_id,
project_id=project_id,
issue=issue,
source=request.data.get("source", "in-app"),
)
serializer = IssueStateInboxSerializer(issue)
return Response(serializer.data, status=status.HTTP_200_OK)
def partial_update(self, request, slug, project_id, inbox_id, pk): def partial_update(self, request, slug, project_id, inbox_id, pk):
try: project_deploy_board = ProjectDeployBoard.objects.get(workspace__slug=slug, project_id=project_id)
project_deploy_board = ProjectDeployBoard.objects.get(workspace__slug=slug, project_id=project_id) if project_deploy_board.inbox is None:
if project_deploy_board.inbox is None: return Response({"error": "Inbox is not enabled for this Project Board"}, status=status.HTTP_400_BAD_REQUEST)
return Response({"error": "Inbox is not enabled for this Project Board"}, status=status.HTTP_400_BAD_REQUEST)
inbox_issue = InboxIssue.objects.get( inbox_issue = InboxIssue.objects.get(
pk=pk, workspace__slug=slug, project_id=project_id, inbox_id=inbox_id pk=pk, workspace__slug=slug, project_id=project_id, inbox_id=inbox_id
) )
# Get the project member # Get the project member
if str(inbox_issue.created_by_id) != str(request.user.id): if str(inbox_issue.created_by_id) != str(request.user.id):
return Response({"error": "You cannot edit inbox issues"}, status=status.HTTP_400_BAD_REQUEST) return Response({"error": "You cannot edit inbox issues"}, status=status.HTTP_400_BAD_REQUEST)
# Get issue data # Get issue data
issue_data = request.data.pop("issue", False) issue_data = request.data.pop("issue", False)
issue = Issue.objects.get( issue = Issue.objects.get(
pk=inbox_issue.issue_id, workspace__slug=slug, project_id=project_id pk=inbox_issue.issue_id, workspace__slug=slug, project_id=project_id
) )
# viewers and guests since only viewers and guests # viewers and guests since only viewers and guests
issue_data = { issue_data = {
"name": issue_data.get("name", issue.name), "name": issue_data.get("name", issue.name),
"description_html": issue_data.get("description_html", issue.description_html), "description_html": issue_data.get("description_html", issue.description_html),
"description": issue_data.get("description", issue.description) "description": issue_data.get("description", issue.description)
} }
issue_serializer = IssueCreateSerializer( issue_serializer = IssueCreateSerializer(
issue, data=issue_data, partial=True issue, data=issue_data, partial=True
) )
if issue_serializer.is_valid(): if issue_serializer.is_valid():
current_instance = issue current_instance = issue
# Log all the updates # Log all the updates
requested_data = json.dumps(issue_data, cls=DjangoJSONEncoder) requested_data = json.dumps(issue_data, cls=DjangoJSONEncoder)
if issue is not None: if issue is not None:
issue_activity.delay( issue_activity.delay(
type="issue.activity.updated", type="issue.activity.updated",
requested_data=requested_data, requested_data=requested_data,
actor_id=str(request.user.id), actor_id=str(request.user.id),
issue_id=str(issue.id), issue_id=str(issue.id),
project_id=str(project_id), project_id=str(project_id),
current_instance=json.dumps( current_instance=json.dumps(
IssueSerializer(current_instance).data, IssueSerializer(current_instance).data,
cls=DjangoJSONEncoder, cls=DjangoJSONEncoder,
), ),
epoch=int(timezone.now().timestamp()) epoch=int(timezone.now().timestamp())
) )
issue_serializer.save() issue_serializer.save()
return Response(issue_serializer.data, status=status.HTTP_200_OK) return Response(issue_serializer.data, status=status.HTTP_200_OK)
return Response(issue_serializer.errors, status=status.HTTP_400_BAD_REQUEST) return Response(issue_serializer.errors, status=status.HTTP_400_BAD_REQUEST)
except InboxIssue.DoesNotExist:
return Response(
{"error": "Inbox Issue does not exist"},
status=status.HTTP_400_BAD_REQUEST,
)
except Exception as e:
capture_exception(e)
return Response(
{"error": "Something went wrong please try again later"},
status=status.HTTP_400_BAD_REQUEST,
)
def retrieve(self, request, slug, project_id, inbox_id, pk): def retrieve(self, request, slug, project_id, inbox_id, pk):
try: project_deploy_board = ProjectDeployBoard.objects.get(workspace__slug=slug, project_id=project_id)
project_deploy_board = ProjectDeployBoard.objects.get(workspace__slug=slug, project_id=project_id) if project_deploy_board.inbox is None:
if project_deploy_board.inbox is None: return Response({"error": "Inbox is not enabled for this Project Board"}, status=status.HTTP_400_BAD_REQUEST)
return Response({"error": "Inbox is not enabled for this Project Board"}, status=status.HTTP_400_BAD_REQUEST)
inbox_issue = InboxIssue.objects.get( inbox_issue = InboxIssue.objects.get(
pk=pk, workspace__slug=slug, project_id=project_id, inbox_id=inbox_id pk=pk, workspace__slug=slug, project_id=project_id, inbox_id=inbox_id
) )
issue = Issue.objects.get( issue = Issue.objects.get(
pk=inbox_issue.issue_id, workspace__slug=slug, project_id=project_id pk=inbox_issue.issue_id, workspace__slug=slug, project_id=project_id
) )
serializer = IssueStateInboxSerializer(issue) serializer = IssueStateInboxSerializer(issue)
return Response(serializer.data, status=status.HTTP_200_OK) return Response(serializer.data, status=status.HTTP_200_OK)
except Exception as e:
capture_exception(e)
return Response(
{"error": "Something went wrong please try again later"},
status=status.HTTP_400_BAD_REQUEST,
)
def destroy(self, request, slug, project_id, inbox_id, pk): def destroy(self, request, slug, project_id, inbox_id, pk):
try: project_deploy_board = ProjectDeployBoard.objects.get(workspace__slug=slug, project_id=project_id)
project_deploy_board = ProjectDeployBoard.objects.get(workspace__slug=slug, project_id=project_id) if project_deploy_board.inbox is None:
if project_deploy_board.inbox is None: return Response({"error": "Inbox is not enabled for this Project Board"}, status=status.HTTP_400_BAD_REQUEST)
return Response({"error": "Inbox is not enabled for this Project Board"}, status=status.HTTP_400_BAD_REQUEST)
inbox_issue = InboxIssue.objects.get( inbox_issue = InboxIssue.objects.get(
pk=pk, workspace__slug=slug, project_id=project_id, inbox_id=inbox_id pk=pk, workspace__slug=slug, project_id=project_id, inbox_id=inbox_id
) )
if str(inbox_issue.created_by_id) != str(request.user.id): if str(inbox_issue.created_by_id) != str(request.user.id):
return Response({"error": "You cannot delete inbox issue"}, status=status.HTTP_400_BAD_REQUEST) return Response({"error": "You cannot delete inbox issue"}, status=status.HTTP_400_BAD_REQUEST)
inbox_issue.delete()
return Response(status=status.HTTP_204_NO_CONTENT)
except InboxIssue.DoesNotExist:
return Response({"error": "Inbox Issue does not exists"}, status=status.HTTP_400_BAD_REQUEST)
except Exception as e:
capture_exception(e)
return Response(
{"error": "Something went wrong please try again later"},
status=status.HTTP_400_BAD_REQUEST,
)
inbox_issue.delete()
return Response(status=status.HTTP_204_NO_CONTENT)

View File

@ -2,7 +2,6 @@
import uuid import uuid
# Django imports # Django imports
from django.db import IntegrityError
from django.contrib.auth.hashers import make_password from django.contrib.auth.hashers import make_password
# Third party imports # Third party imports
@ -33,66 +32,40 @@ class IntegrationViewSet(BaseViewSet):
model = Integration model = Integration
def create(self, request): def create(self, request):
try: serializer = IntegrationSerializer(data=request.data)
serializer = IntegrationSerializer(data=request.data) if serializer.is_valid():
if serializer.is_valid(): serializer.save()
serializer.save() return Response(serializer.data, status=status.HTTP_201_CREATED)
return Response(serializer.data, status=status.HTTP_201_CREATED) return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
except Exception as e:
capture_exception(e)
return Response(
{"error": "Something went wrong please try again later"},
status=status.HTTP_400_BAD_REQUEST,
)
def partial_update(self, request, pk): def partial_update(self, request, pk):
try: integration = Integration.objects.get(pk=pk)
integration = Integration.objects.get(pk=pk) if integration.verified:
if integration.verified:
return Response(
{"error": "Verified integrations cannot be updated"},
status=status.HTTP_400_BAD_REQUEST,
)
serializer = IntegrationSerializer(
integration, data=request.data, partial=True
)
if serializer.is_valid():
serializer.save()
return Response(serializer.data, status=status.HTTP_200_OK)
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
except Integration.DoesNotExist:
return Response( return Response(
{"error": "Integration Does not exist"}, {"error": "Verified integrations cannot be updated"},
status=status.HTTP_404_NOT_FOUND,
)
except Exception as e:
capture_exception(e)
return Response(
{"error": "Something went wrong please try again later"},
status=status.HTTP_400_BAD_REQUEST, status=status.HTTP_400_BAD_REQUEST,
) )
def destroy(self, request, pk): serializer = IntegrationSerializer(
try: integration, data=request.data, partial=True
integration = Integration.objects.get(pk=pk) )
if integration.verified:
return Response(
{"error": "Verified integrations cannot be updated"},
status=status.HTTP_400_BAD_REQUEST,
)
integration.delete() if serializer.is_valid():
return Response(status=status.HTTP_204_NO_CONTENT) serializer.save()
except Integration.DoesNotExist: return Response(serializer.data, status=status.HTTP_200_OK)
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
def destroy(self, request, pk):
integration = Integration.objects.get(pk=pk)
if integration.verified:
return Response( return Response(
{"error": "Integration Does not exist"}, {"error": "Verified integrations cannot be updated"},
status=status.HTTP_404_NOT_FOUND, status=status.HTTP_400_BAD_REQUEST,
) )
integration.delete()
return Response(status=status.HTTP_204_NO_CONTENT)
class WorkspaceIntegrationViewSet(BaseViewSet): class WorkspaceIntegrationViewSet(BaseViewSet):
serializer_class = WorkspaceIntegrationSerializer serializer_class = WorkspaceIntegrationSerializer
@ -111,119 +84,81 @@ class WorkspaceIntegrationViewSet(BaseViewSet):
) )
def create(self, request, slug, provider): def create(self, request, slug, provider):
try: workspace = Workspace.objects.get(slug=slug)
workspace = Workspace.objects.get(slug=slug) integration = Integration.objects.get(provider=provider)
integration = Integration.objects.get(provider=provider) config = {}
config = {} if provider == "github":
if provider == "github": installation_id = request.data.get("installation_id", None)
installation_id = request.data.get("installation_id", None) if not installation_id:
if not installation_id:
return Response(
{"error": "Installation ID is required"},
status=status.HTTP_400_BAD_REQUEST,
)
metadata = get_github_metadata(installation_id)
config = {"installation_id": installation_id}
if provider == "slack":
metadata = request.data.get("metadata", {})
access_token = metadata.get("access_token", False)
team_id = metadata.get("team", {}).get("id", False)
if not metadata or not access_token or not team_id:
return Response(
{"error": "Access token and team id is required"},
status=status.HTTP_400_BAD_REQUEST,
)
config = {"team_id": team_id, "access_token": access_token}
# Create a bot user
bot_user = User.objects.create(
email=f"{uuid.uuid4().hex}@plane.so",
username=uuid.uuid4().hex,
password=make_password(uuid.uuid4().hex),
is_password_autoset=True,
is_bot=True,
first_name=integration.title,
avatar=integration.avatar_url
if integration.avatar_url is not None
else "",
)
# Create an API Token for the bot user
api_token = APIToken.objects.create(
user=bot_user,
user_type=1, # bot user
workspace=workspace,
)
workspace_integration = WorkspaceIntegration.objects.create(
workspace=workspace,
integration=integration,
actor=bot_user,
api_token=api_token,
metadata=metadata,
config=config,
)
# Add bot user as a member of workspace
_ = WorkspaceMember.objects.create(
workspace=workspace_integration.workspace,
member=bot_user,
role=20,
)
return Response(
WorkspaceIntegrationSerializer(workspace_integration).data,
status=status.HTTP_201_CREATED,
)
except IntegrityError as e:
if "already exists" in str(e):
return Response( return Response(
{"error": "Integration is already active in the workspace"}, {"error": "Installation ID is required"},
status=status.HTTP_410_GONE,
)
else:
capture_exception(e)
return Response(
{"error": "Something went wrong please try again later"},
status=status.HTTP_400_BAD_REQUEST, status=status.HTTP_400_BAD_REQUEST,
) )
except (Workspace.DoesNotExist, Integration.DoesNotExist) as e: metadata = get_github_metadata(installation_id)
capture_exception(e) config = {"installation_id": installation_id}
return Response(
{"error": "Workspace or Integration not found"}, if provider == "slack":
status=status.HTTP_400_BAD_REQUEST, metadata = request.data.get("metadata", {})
) access_token = metadata.get("access_token", False)
except Exception as e: team_id = metadata.get("team", {}).get("id", False)
capture_exception(e) if not metadata or not access_token or not team_id:
return Response( return Response(
{"error": "Something went wrong please try again later"}, {"error": "Access token and team id is required"},
status=status.HTTP_400_BAD_REQUEST, status=status.HTTP_400_BAD_REQUEST,
) )
config = {"team_id": team_id, "access_token": access_token}
# Create a bot user
bot_user = User.objects.create(
email=f"{uuid.uuid4().hex}@plane.so",
username=uuid.uuid4().hex,
password=make_password(uuid.uuid4().hex),
is_password_autoset=True,
is_bot=True,
first_name=integration.title,
avatar=integration.avatar_url
if integration.avatar_url is not None
else "",
)
# Create an API Token for the bot user
api_token = APIToken.objects.create(
user=bot_user,
user_type=1, # bot user
workspace=workspace,
)
workspace_integration = WorkspaceIntegration.objects.create(
workspace=workspace,
integration=integration,
actor=bot_user,
api_token=api_token,
metadata=metadata,
config=config,
)
# Add bot user as a member of workspace
_ = WorkspaceMember.objects.create(
workspace=workspace_integration.workspace,
member=bot_user,
role=20,
)
return Response(
WorkspaceIntegrationSerializer(workspace_integration).data,
status=status.HTTP_201_CREATED,
)
def destroy(self, request, slug, pk): def destroy(self, request, slug, pk):
try: workspace_integration = WorkspaceIntegration.objects.get(
workspace_integration = WorkspaceIntegration.objects.get( pk=pk, workspace__slug=slug
pk=pk, workspace__slug=slug )
)
if workspace_integration.integration.provider == "github": if workspace_integration.integration.provider == "github":
installation_id = workspace_integration.config.get( installation_id = workspace_integration.config.get(
"installation_id", False "installation_id", False
)
if installation_id:
delete_github_installation(installation_id=installation_id)
workspace_integration.delete()
return Response(status=status.HTTP_204_NO_CONTENT)
except WorkspaceIntegration.DoesNotExist:
return Response(
{"error": "Workspace Integration Does not exists"},
status=status.HTTP_400_BAD_REQUEST,
)
except Exception as e:
capture_exception(e)
return Response(
{"error": "Something went wrong please try again later"},
status=status.HTTP_400_BAD_REQUEST,
) )
if installation_id:
delete_github_installation(installation_id=installation_id)
workspace_integration.delete()
return Response(status=status.HTTP_204_NO_CONTENT)

View File

@ -30,31 +30,25 @@ class GithubRepositoriesEndpoint(BaseAPIView):
] ]
def get(self, request, slug, workspace_integration_id): def get(self, request, slug, workspace_integration_id):
try: page = request.GET.get("page", 1)
page = request.GET.get("page", 1) workspace_integration = WorkspaceIntegration.objects.get(
workspace_integration = WorkspaceIntegration.objects.get( workspace__slug=slug, pk=workspace_integration_id
workspace__slug=slug, pk=workspace_integration_id )
)
if workspace_integration.integration.provider != "github": if workspace_integration.integration.provider != "github":
return Response(
{"error": "Not a github integration"},
status=status.HTTP_400_BAD_REQUEST,
)
access_tokens_url = workspace_integration.metadata["access_tokens_url"]
repositories_url = (
workspace_integration.metadata["repositories_url"]
+ f"?per_page=100&page={page}"
)
repositories = get_github_repos(access_tokens_url, repositories_url)
return Response(repositories, status=status.HTTP_200_OK)
except WorkspaceIntegration.DoesNotExist:
return Response( return Response(
{"error": "Workspace Integration Does not exists"}, {"error": "Not a github integration"},
status=status.HTTP_400_BAD_REQUEST, status=status.HTTP_400_BAD_REQUEST,
) )
access_tokens_url = workspace_integration.metadata["access_tokens_url"]
repositories_url = (
workspace_integration.metadata["repositories_url"]
+ f"?per_page=100&page={page}"
)
repositories = get_github_repos(access_tokens_url, repositories_url)
return Response(repositories, status=status.HTTP_200_OK)
class GithubRepositorySyncViewSet(BaseViewSet): class GithubRepositorySyncViewSet(BaseViewSet):
permission_classes = [ permission_classes = [
@ -76,89 +70,76 @@ class GithubRepositorySyncViewSet(BaseViewSet):
) )
def create(self, request, slug, project_id, workspace_integration_id): def create(self, request, slug, project_id, workspace_integration_id):
try: name = request.data.get("name", False)
name = request.data.get("name", False) url = request.data.get("url", False)
url = request.data.get("url", False) config = request.data.get("config", {})
config = request.data.get("config", {}) repository_id = request.data.get("repository_id", False)
repository_id = request.data.get("repository_id", False) owner = request.data.get("owner", False)
owner = request.data.get("owner", False)
if not name or not url or not repository_id or not owner: if not name or not url or not repository_id or not owner:
return Response(
{"error": "Name, url, repository_id and owner are required"},
status=status.HTTP_400_BAD_REQUEST,
)
# Get the workspace integration
workspace_integration = WorkspaceIntegration.objects.get(
pk=workspace_integration_id
)
# Delete the old repository object
GithubRepositorySync.objects.filter(
project_id=project_id, workspace__slug=slug
).delete()
GithubRepository.objects.filter(
project_id=project_id, workspace__slug=slug
).delete()
# Create repository
repo = GithubRepository.objects.create(
name=name,
url=url,
config=config,
repository_id=repository_id,
owner=owner,
project_id=project_id,
)
# Create a Label for github
label = Label.objects.filter(
name="GitHub",
project_id=project_id,
).first()
if label is None:
label = Label.objects.create(
name="GitHub",
project_id=project_id,
description="Label to sync Plane issues with GitHub issues",
color="#003773",
)
# Create repo sync
repo_sync = GithubRepositorySync.objects.create(
repository=repo,
workspace_integration=workspace_integration,
actor=workspace_integration.actor,
credentials=request.data.get("credentials", {}),
project_id=project_id,
label=label,
)
# Add bot as a member in the project
_ = ProjectMember.objects.get_or_create(
member=workspace_integration.actor, role=20, project_id=project_id
)
# Return Response
return Response( return Response(
GithubRepositorySyncSerializer(repo_sync).data, {"error": "Name, url, repository_id and owner are required"},
status=status.HTTP_201_CREATED,
)
except WorkspaceIntegration.DoesNotExist:
return Response(
{"error": "Workspace Integration does not exist"},
status=status.HTTP_404_NOT_FOUND,
)
except Exception as e:
capture_exception(e)
return Response(
{"error": "Something went wrong please try again later"},
status=status.HTTP_400_BAD_REQUEST, status=status.HTTP_400_BAD_REQUEST,
) )
# Get the workspace integration
workspace_integration = WorkspaceIntegration.objects.get(
pk=workspace_integration_id
)
# Delete the old repository object
GithubRepositorySync.objects.filter(
project_id=project_id, workspace__slug=slug
).delete()
GithubRepository.objects.filter(
project_id=project_id, workspace__slug=slug
).delete()
# Create repository
repo = GithubRepository.objects.create(
name=name,
url=url,
config=config,
repository_id=repository_id,
owner=owner,
project_id=project_id,
)
# Create a Label for github
label = Label.objects.filter(
name="GitHub",
project_id=project_id,
).first()
if label is None:
label = Label.objects.create(
name="GitHub",
project_id=project_id,
description="Label to sync Plane issues with GitHub issues",
color="#003773",
)
# Create repo sync
repo_sync = GithubRepositorySync.objects.create(
repository=repo,
workspace_integration=workspace_integration,
actor=workspace_integration.actor,
credentials=request.data.get("credentials", {}),
project_id=project_id,
label=label,
)
# Add bot as a member in the project
_ = ProjectMember.objects.get_or_create(
member=workspace_integration.actor, role=20, project_id=project_id
)
# Return Response
return Response(
GithubRepositorySyncSerializer(repo_sync).data,
status=status.HTTP_201_CREATED,
)
class GithubIssueSyncViewSet(BaseViewSet): class GithubIssueSyncViewSet(BaseViewSet):
permission_classes = [ permission_classes = [
@ -177,42 +158,30 @@ class GithubIssueSyncViewSet(BaseViewSet):
class BulkCreateGithubIssueSyncEndpoint(BaseAPIView): class BulkCreateGithubIssueSyncEndpoint(BaseAPIView):
def post(self, request, slug, project_id, repo_sync_id): def post(self, request, slug, project_id, repo_sync_id):
try: project = Project.objects.get(pk=project_id, workspace__slug=slug)
project = Project.objects.get(pk=project_id, workspace__slug=slug)
github_issue_syncs = request.data.get("github_issue_syncs", []) github_issue_syncs = request.data.get("github_issue_syncs", [])
github_issue_syncs = GithubIssueSync.objects.bulk_create( github_issue_syncs = GithubIssueSync.objects.bulk_create(
[ [
GithubIssueSync( GithubIssueSync(
issue_id=github_issue_sync.get("issue"), issue_id=github_issue_sync.get("issue"),
repo_issue_id=github_issue_sync.get("repo_issue_id"), repo_issue_id=github_issue_sync.get("repo_issue_id"),
issue_url=github_issue_sync.get("issue_url"), issue_url=github_issue_sync.get("issue_url"),
github_issue_id=github_issue_sync.get("github_issue_id"), github_issue_id=github_issue_sync.get("github_issue_id"),
repository_sync_id=repo_sync_id, repository_sync_id=repo_sync_id,
project_id=project_id, project_id=project_id,
workspace_id=project.workspace_id, workspace_id=project.workspace_id,
created_by=request.user, created_by=request.user,
updated_by=request.user, updated_by=request.user,
) )
for github_issue_sync in github_issue_syncs for github_issue_sync in github_issue_syncs
], ],
batch_size=100, batch_size=100,
ignore_conflicts=True, ignore_conflicts=True,
) )
serializer = GithubIssueSyncSerializer(github_issue_syncs, many=True) serializer = GithubIssueSyncSerializer(github_issue_syncs, many=True)
return Response(serializer.data, status=status.HTTP_201_CREATED) return Response(serializer.data, status=status.HTTP_201_CREATED)
except Project.DoesNotExist:
return Response(
{"error": "Project does not exist"},
status=status.HTTP_404_NOT_FOUND,
)
except Exception as e:
capture_exception(e)
return Response(
{"error": "Something went wrong please try again later"},
status=status.HTTP_400_BAD_REQUEST,
)
class GithubCommentSyncViewSet(BaseViewSet): class GithubCommentSyncViewSet(BaseViewSet):

View File

@ -32,42 +32,25 @@ class SlackProjectSyncViewSet(BaseViewSet):
) )
def create(self, request, slug, project_id, workspace_integration_id): def create(self, request, slug, project_id, workspace_integration_id):
try: serializer = SlackProjectSyncSerializer(data=request.data)
serializer = SlackProjectSyncSerializer(data=request.data)
workspace_integration = WorkspaceIntegration.objects.get(
workspace__slug=slug, pk=workspace_integration_id
)
if serializer.is_valid():
serializer.save(
project_id=project_id,
workspace_integration_id=workspace_integration_id,
)
workspace_integration = WorkspaceIntegration.objects.get( workspace_integration = WorkspaceIntegration.objects.get(
workspace__slug=slug, pk=workspace_integration_id pk=workspace_integration_id, workspace__slug=slug
) )
if serializer.is_valid(): _ = ProjectMember.objects.get_or_create(
serializer.save( member=workspace_integration.actor, role=20, project_id=project_id
project_id=project_id,
workspace_integration_id=workspace_integration_id,
)
workspace_integration = WorkspaceIntegration.objects.get(
pk=workspace_integration_id, workspace__slug=slug
)
_ = ProjectMember.objects.get_or_create(
member=workspace_integration.actor, role=20, project_id=project_id
)
return Response(serializer.data, status=status.HTTP_200_OK)
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
except IntegrityError:
return Response(
{"error": "Slack is already enabled for the project"},
status=status.HTTP_400_BAD_REQUEST,
)
except WorkspaceIntegration.DoesNotExist:
return Response(
{"error": "Workspace Integration does not exist"},
status=status.HTTP_400_BAD_REQUEST,
)
except Exception as e:
print(e)
return Response(
{"error": "Something went wrong please try again later"},
status=status.HTTP_400_BAD_REQUEST,
) )
return Response(serializer.data, status=status.HTTP_200_OK)
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)

File diff suppressed because it is too large Load Diff

View File

@ -142,180 +142,143 @@ class ModuleViewSet(BaseViewSet):
) )
def create(self, request, slug, project_id): def create(self, request, slug, project_id):
try: project = Project.objects.get(workspace__slug=slug, pk=project_id)
project = Project.objects.get(workspace__slug=slug, pk=project_id) serializer = ModuleWriteSerializer(
serializer = ModuleWriteSerializer( data=request.data, context={"project": project}
data=request.data, context={"project": project} )
)
if serializer.is_valid(): if serializer.is_valid():
serializer.save() serializer.save()
return Response(serializer.data, status=status.HTTP_201_CREATED) return Response(serializer.data, status=status.HTTP_201_CREATED)
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
except Project.DoesNotExist:
return Response(
{"error": "Project was not found"}, status=status.HTTP_404_NOT_FOUND
)
except IntegrityError as e:
if "already exists" in str(e):
return Response(
{"name": "The module name is already taken"},
status=status.HTTP_410_GONE,
)
except Exception as e:
capture_exception(e)
return Response(
{"error": "Something went wrong please try again later"},
status=status.HTTP_400_BAD_REQUEST,
)
def retrieve(self, request, slug, project_id, pk): def retrieve(self, request, slug, project_id, pk):
try: queryset = self.get_queryset().get(pk=pk)
queryset = self.get_queryset().get(pk=pk)
assignee_distribution = ( assignee_distribution = (
Issue.objects.filter( Issue.objects.filter(
issue_module__module_id=pk, issue_module__module_id=pk,
workspace__slug=slug, workspace__slug=slug,
project_id=project_id, project_id=project_id,
)
.annotate(first_name=F("assignees__first_name"))
.annotate(last_name=F("assignees__last_name"))
.annotate(assignee_id=F("assignees__id"))
.annotate(display_name=F("assignees__display_name"))
.annotate(avatar=F("assignees__avatar"))
.values(
"first_name", "last_name", "assignee_id", "avatar", "display_name"
)
.annotate(
total_issues=Count(
"assignee_id",
filter=Q(
archived_at__isnull=True,
is_draft=False,
),
)
)
.annotate(
completed_issues=Count(
"assignee_id",
filter=Q(
completed_at__isnull=False,
archived_at__isnull=True,
is_draft=False,
),
)
)
.annotate(
pending_issues=Count(
"assignee_id",
filter=Q(
completed_at__isnull=True,
archived_at__isnull=True,
is_draft=False,
),
)
)
.order_by("first_name", "last_name")
) )
.annotate(first_name=F("assignees__first_name"))
label_distribution = ( .annotate(last_name=F("assignees__last_name"))
Issue.objects.filter( .annotate(assignee_id=F("assignees__id"))
issue_module__module_id=pk, .annotate(display_name=F("assignees__display_name"))
workspace__slug=slug, .annotate(avatar=F("assignees__avatar"))
project_id=project_id, .values("first_name", "last_name", "assignee_id", "avatar", "display_name")
) .annotate(
.annotate(label_name=F("labels__name")) total_issues=Count(
.annotate(color=F("labels__color")) "assignee_id",
.annotate(label_id=F("labels__id")) filter=Q(
.values("label_name", "color", "label_id") archived_at__isnull=True,
.annotate( is_draft=False,
total_issues=Count(
"label_id",
filter=Q(
archived_at__isnull=True,
is_draft=False,
),
), ),
) )
.annotate( )
completed_issues=Count( .annotate(
"label_id", completed_issues=Count(
filter=Q( "assignee_id",
completed_at__isnull=False, filter=Q(
archived_at__isnull=True, completed_at__isnull=False,
is_draft=False, archived_at__isnull=True,
), is_draft=False,
) ),
) )
.annotate( )
pending_issues=Count( .annotate(
"label_id", pending_issues=Count(
filter=Q( "assignee_id",
completed_at__isnull=True, filter=Q(
archived_at__isnull=True, completed_at__isnull=True,
is_draft=False, archived_at__isnull=True,
), is_draft=False,
) ),
) )
.order_by("label_name") )
.order_by("first_name", "last_name")
)
label_distribution = (
Issue.objects.filter(
issue_module__module_id=pk,
workspace__slug=slug,
project_id=project_id,
)
.annotate(label_name=F("labels__name"))
.annotate(color=F("labels__color"))
.annotate(label_id=F("labels__id"))
.values("label_name", "color", "label_id")
.annotate(
total_issues=Count(
"label_id",
filter=Q(
archived_at__isnull=True,
is_draft=False,
),
),
)
.annotate(
completed_issues=Count(
"label_id",
filter=Q(
completed_at__isnull=False,
archived_at__isnull=True,
is_draft=False,
),
)
)
.annotate(
pending_issues=Count(
"label_id",
filter=Q(
completed_at__isnull=True,
archived_at__isnull=True,
is_draft=False,
),
)
)
.order_by("label_name")
)
data = ModuleSerializer(queryset).data
data["distribution"] = {
"assignees": assignee_distribution,
"labels": label_distribution,
"completion_chart": {},
}
if queryset.start_date and queryset.target_date:
data["distribution"]["completion_chart"] = burndown_plot(
queryset=queryset, slug=slug, project_id=project_id, module_id=pk
) )
data = ModuleSerializer(queryset).data return Response(
data["distribution"] = { data,
"assignees": assignee_distribution, status=status.HTTP_200_OK,
"labels": label_distribution, )
"completion_chart": {},
}
if queryset.start_date and queryset.target_date:
data["distribution"]["completion_chart"] = burndown_plot(
queryset=queryset, slug=slug, project_id=project_id, module_id=pk
)
return Response(
data,
status=status.HTTP_200_OK,
)
except Exception as e:
capture_exception(e)
return Response(
{"error": "Something went wrong please try again later"},
status=status.HTTP_400_BAD_REQUEST,
)
def destroy(self, request, slug, project_id, pk): def destroy(self, request, slug, project_id, pk):
try: module = Module.objects.get(workspace__slug=slug, project_id=project_id, pk=pk)
module = Module.objects.get( module_issues = list(
workspace__slug=slug, project_id=project_id, pk=pk ModuleIssue.objects.filter(module_id=pk).values_list("issue", flat=True)
) )
module_issues = list( module.delete()
ModuleIssue.objects.filter(module_id=pk).values_list("issue", flat=True) issue_activity.delay(
) type="module.activity.deleted",
module.delete() requested_data=json.dumps(
issue_activity.delay( {
type="module.activity.deleted", "module_id": str(pk),
requested_data=json.dumps( "issues": [str(issue_id) for issue_id in module_issues],
{ }
"module_id": str(pk), ),
"issues": [str(issue_id) for issue_id in module_issues], actor_id=str(request.user.id),
} issue_id=str(pk),
), project_id=str(project_id),
actor_id=str(request.user.id), current_instance=None,
issue_id=str(pk), epoch=int(timezone.now().timestamp()),
project_id=str(project_id), )
current_instance=None, return Response(status=status.HTTP_204_NO_CONTENT)
epoch=int(timezone.now().timestamp()),
)
return Response(status=status.HTTP_204_NO_CONTENT)
except Exception as e:
capture_exception(e)
return Response(
{"error": "Something went wrong please try again later"},
status=status.HTTP_400_BAD_REQUEST,
)
class ModuleIssueViewSet(BaseViewSet): class ModuleIssueViewSet(BaseViewSet):
@ -337,7 +300,6 @@ class ModuleIssueViewSet(BaseViewSet):
module_id=self.kwargs.get("module_id"), module_id=self.kwargs.get("module_id"),
) )
def get_queryset(self): def get_queryset(self):
return self.filter_queryset( return self.filter_queryset(
super() super()
@ -363,190 +325,163 @@ class ModuleIssueViewSet(BaseViewSet):
@method_decorator(gzip_page) @method_decorator(gzip_page)
def list(self, request, slug, project_id, module_id): def list(self, request, slug, project_id, module_id):
try: order_by = request.GET.get("order_by", "created_at")
order_by = request.GET.get("order_by", "created_at") group_by = request.GET.get("group_by", False)
group_by = request.GET.get("group_by", False) sub_group_by = request.GET.get("sub_group_by", False)
sub_group_by = request.GET.get("sub_group_by", False) filters = issue_filters(request.query_params, "GET")
filters = issue_filters(request.query_params, "GET") issues = (
issues = ( Issue.issue_objects.filter(issue_module__module_id=module_id)
Issue.issue_objects.filter(issue_module__module_id=module_id) .annotate(
.annotate( sub_issues_count=Issue.issue_objects.filter(parent=OuterRef("id"))
sub_issues_count=Issue.issue_objects.filter(parent=OuterRef("id")) .order_by()
.order_by() .annotate(count=Func(F("id"), function="Count"))
.annotate(count=Func(F("id"), function="Count")) .values("count")
.values("count")
)
.annotate(bridge_id=F("issue_module__id"))
.filter(project_id=project_id)
.filter(workspace__slug=slug)
.select_related("project")
.select_related("workspace")
.select_related("state")
.select_related("parent")
.prefetch_related("assignees")
.prefetch_related("labels")
.order_by(order_by)
.filter(**filters)
.annotate(
link_count=IssueLink.objects.filter(issue=OuterRef("id"))
.order_by()
.annotate(count=Func(F("id"), function="Count"))
.values("count")
)
.annotate(
attachment_count=IssueAttachment.objects.filter(
issue=OuterRef("id")
)
.order_by()
.annotate(count=Func(F("id"), function="Count"))
.values("count")
)
) )
.annotate(bridge_id=F("issue_module__id"))
issues_data = IssueStateSerializer(issues, many=True).data .filter(project_id=project_id)
.filter(workspace__slug=slug)
if sub_group_by and sub_group_by == group_by: .select_related("project")
return Response( .select_related("workspace")
{"error": "Group by and sub group by cannot be same"}, .select_related("state")
status=status.HTTP_400_BAD_REQUEST, .select_related("parent")
) .prefetch_related("assignees")
.prefetch_related("labels")
if group_by: .order_by(order_by)
return Response( .filter(**filters)
group_results(issues_data, group_by, sub_group_by), .annotate(
status=status.HTTP_200_OK, link_count=IssueLink.objects.filter(issue=OuterRef("id"))
) .order_by()
.annotate(count=Func(F("id"), function="Count"))
return Response( .values("count")
issues_data,
status=status.HTTP_200_OK,
) )
except Exception as e: .annotate(
capture_exception(e) attachment_count=IssueAttachment.objects.filter(issue=OuterRef("id"))
.order_by()
.annotate(count=Func(F("id"), function="Count"))
.values("count")
)
)
issues_data = IssueStateSerializer(issues, many=True).data
if sub_group_by and sub_group_by == group_by:
return Response( return Response(
{"error": "Something went wrong please try again later"}, {"error": "Group by and sub group by cannot be same"},
status=status.HTTP_400_BAD_REQUEST, status=status.HTTP_400_BAD_REQUEST,
) )
if group_by:
return Response(
group_results(issues_data, group_by, sub_group_by),
status=status.HTTP_200_OK,
)
return Response(
issues_data,
status=status.HTTP_200_OK,
)
def create(self, request, slug, project_id, module_id): def create(self, request, slug, project_id, module_id):
try: issues = request.data.get("issues", [])
issues = request.data.get("issues", []) if not len(issues):
if not len(issues): return Response(
return Response( {"error": "Issues are required"}, status=status.HTTP_400_BAD_REQUEST
{"error": "Issues are required"}, status=status.HTTP_400_BAD_REQUEST
)
module = Module.objects.get(
workspace__slug=slug, project_id=project_id, pk=module_id
) )
module = Module.objects.get(
workspace__slug=slug, project_id=project_id, pk=module_id
)
module_issues = list(ModuleIssue.objects.filter(issue_id__in=issues)) module_issues = list(ModuleIssue.objects.filter(issue_id__in=issues))
update_module_issue_activity = [] update_module_issue_activity = []
records_to_update = [] records_to_update = []
record_to_create = [] record_to_create = []
for issue in issues: for issue in issues:
module_issue = [ module_issue = [
module_issue module_issue
for module_issue in module_issues for module_issue in module_issues
if str(module_issue.issue_id) in issues if str(module_issue.issue_id) in issues
] ]
if len(module_issue): if len(module_issue):
if module_issue[0].module_id != module_id: if module_issue[0].module_id != module_id:
update_module_issue_activity.append( update_module_issue_activity.append(
{ {
"old_module_id": str(module_issue[0].module_id), "old_module_id": str(module_issue[0].module_id),
"new_module_id": str(module_id), "new_module_id": str(module_id),
"issue_id": str(module_issue[0].issue_id), "issue_id": str(module_issue[0].issue_id),
} }
)
module_issue[0].module_id = module_id
records_to_update.append(module_issue[0])
else:
record_to_create.append(
ModuleIssue(
module=module,
issue_id=issue,
project_id=project_id,
workspace=module.workspace,
created_by=request.user,
updated_by=request.user,
)
) )
module_issue[0].module_id = module_id
records_to_update.append(module_issue[0])
else:
record_to_create.append(
ModuleIssue(
module=module,
issue_id=issue,
project_id=project_id,
workspace=module.workspace,
created_by=request.user,
updated_by=request.user,
)
)
ModuleIssue.objects.bulk_create( ModuleIssue.objects.bulk_create(
record_to_create, record_to_create,
batch_size=10, batch_size=10,
ignore_conflicts=True, ignore_conflicts=True,
) )
ModuleIssue.objects.bulk_update( ModuleIssue.objects.bulk_update(
records_to_update, records_to_update,
["module"], ["module"],
batch_size=10, batch_size=10,
) )
# Capture Issue Activity # Capture Issue Activity
issue_activity.delay( issue_activity.delay(
type="module.activity.created", type="module.activity.created",
requested_data=json.dumps({"modules_list": issues}), requested_data=json.dumps({"modules_list": issues}),
actor_id=str(self.request.user.id), actor_id=str(self.request.user.id),
issue_id=str(self.kwargs.get("pk", None)), issue_id=str(self.kwargs.get("pk", None)),
project_id=str(self.kwargs.get("project_id", None)), project_id=str(self.kwargs.get("project_id", None)),
current_instance=json.dumps( current_instance=json.dumps(
{ {
"updated_module_issues": update_module_issue_activity, "updated_module_issues": update_module_issue_activity,
"created_module_issues": serializers.serialize( "created_module_issues": serializers.serialize(
"json", record_to_create "json", record_to_create
), ),
} }
), ),
epoch=int(timezone.now().timestamp()), epoch=int(timezone.now().timestamp()),
) )
return Response( return Response(
ModuleIssueSerializer(self.get_queryset(), many=True).data, ModuleIssueSerializer(self.get_queryset(), many=True).data,
status=status.HTTP_200_OK, status=status.HTTP_200_OK,
) )
except Module.DoesNotExist:
return Response(
{"error": "Module Does not exists"}, status=status.HTTP_400_BAD_REQUEST
)
except Exception as e:
capture_exception(e)
return Response(
{"error": "Something went wrong please try again later"},
status=status.HTTP_400_BAD_REQUEST,
)
def destroy(self, request, slug, project_id, module_id, pk): def destroy(self, request, slug, project_id, module_id, pk):
try: module_issue = ModuleIssue.objects.get(
module_issue = ModuleIssue.objects.get( workspace__slug=slug, project_id=project_id, module_id=module_id, pk=pk
workspace__slug=slug, project_id=project_id, module_id=module_id, pk=pk )
) module_issue.delete()
module_issue.delete() issue_activity.delay(
issue_activity.delay( type="module.activity.deleted",
type="module.activity.deleted", requested_data=json.dumps(
requested_data=json.dumps( {
{ "module_id": str(module_id),
"module_id": str(module_id), "issues": [str(module_issue.issue_id)],
"issues": [str(module_issue.issue_id)], }
} ),
), actor_id=str(request.user.id),
actor_id=str(request.user.id), issue_id=str(pk),
issue_id=str(pk), project_id=str(project_id),
project_id=str(project_id), current_instance=None,
current_instance=None, epoch=int(timezone.now().timestamp()),
epoch=int(timezone.now().timestamp()), )
) return Response(status=status.HTTP_204_NO_CONTENT)
return Response(status=status.HTTP_204_NO_CONTENT)
except Exception as e:
capture_exception(e)
return Response(
{"error": "Something went wrong please try again later"},
status=status.HTTP_400_BAD_REQUEST,
)
class ModuleLinkViewSet(BaseViewSet): class ModuleLinkViewSet(BaseViewSet):
@ -590,49 +525,18 @@ class ModuleFavoriteViewSet(BaseViewSet):
) )
def create(self, request, slug, project_id): def create(self, request, slug, project_id):
try: serializer = ModuleFavoriteSerializer(data=request.data)
serializer = ModuleFavoriteSerializer(data=request.data) if serializer.is_valid():
if serializer.is_valid(): serializer.save(user=request.user, project_id=project_id)
serializer.save(user=request.user, project_id=project_id) return Response(serializer.data, status=status.HTTP_201_CREATED)
return Response(serializer.data, status=status.HTTP_201_CREATED) return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
except IntegrityError as e:
if "already exists" in str(e):
return Response(
{"error": "The module is already added to favorites"},
status=status.HTTP_410_GONE,
)
else:
capture_exception(e)
return Response(
{"error": "Something went wrong please try again later"},
status=status.HTTP_400_BAD_REQUEST,
)
except Exception as e:
capture_exception(e)
return Response(
{"error": "Something went wrong please try again later"},
status=status.HTTP_400_BAD_REQUEST,
)
def destroy(self, request, slug, project_id, module_id): def destroy(self, request, slug, project_id, module_id):
try: module_favorite = ModuleFavorite.objects.get(
module_favorite = ModuleFavorite.objects.get( project=project_id,
project=project_id, user=request.user,
user=request.user, workspace__slug=slug,
workspace__slug=slug, module_id=module_id,
module_id=module_id, )
) module_favorite.delete()
module_favorite.delete() return Response(status=status.HTTP_204_NO_CONTENT)
return Response(status=status.HTTP_204_NO_CONTENT)
except ModuleFavorite.DoesNotExist:
return Response(
{"error": "Module is not in favorites"},
status=status.HTTP_400_BAD_REQUEST,
)
except Exception as e:
capture_exception(e)
return Response(
{"error": "Something went wrong please try again later"},
status=status.HTTP_400_BAD_REQUEST,
)

View File

@ -36,320 +36,239 @@ class NotificationViewSet(BaseViewSet, BasePaginator):
) )
def list(self, request, slug): def list(self, request, slug):
try: # Get query parameters
# Get query parameters snoozed = request.GET.get("snoozed", "false")
snoozed = request.GET.get("snoozed", "false") archived = request.GET.get("archived", "false")
archived = request.GET.get("archived", "false") read = request.GET.get("read", "true")
read = request.GET.get("read", "true") type = request.GET.get("type", "all")
type = request.GET.get("type", "all")
notifications = Notification.objects.filter(workspace__slug=slug, receiver_id=request.user.id) \ notifications = (
.select_related("workspace", "project", "triggered_by", "receiver") \ Notification.objects.filter(
.order_by("snoozed_till", "-created_at") workspace__slug=slug, receiver_id=request.user.id
# Filters based on query parameters
snoozed_filters = {
"true": Q(snoozed_till__lt=timezone.now()) | Q(snoozed_till__isnull=False),
"false": Q(snoozed_till__gte=timezone.now()) | Q(snoozed_till__isnull=True),
}
notifications = notifications.filter(snoozed_filters[snoozed])
archived_filters = {
"true": Q(archived_at__isnull=False),
"false": Q(archived_at__isnull=True),
}
notifications = notifications.filter(archived_filters[archived])
if read == "false":
notifications = notifications.filter(read_at__isnull=True)
# Subscribed issues
if type == "watching":
issue_ids = IssueSubscriber.objects.filter(
workspace__slug=slug, subscriber_id=request.user.id
).values_list("issue_id", flat=True)
notifications = notifications.filter(entity_identifier__in=issue_ids)
# Assigned Issues
if type == "assigned":
issue_ids = IssueAssignee.objects.filter(
workspace__slug=slug, assignee_id=request.user.id
).values_list("issue_id", flat=True)
notifications = notifications.filter(entity_identifier__in=issue_ids)
# Created issues
if type == "created":
if WorkspaceMember.objects.filter(
workspace__slug=slug, member=request.user, role__lt=15
).exists():
notifications = Notification.objects.none()
else:
issue_ids = Issue.objects.filter(
workspace__slug=slug, created_by=request.user
).values_list("pk", flat=True)
notifications = notifications.filter(
entity_identifier__in=issue_ids
)
# Pagination
if request.GET.get("per_page", False) and request.GET.get("cursor", False):
return self.paginate(
request=request,
queryset=(notifications),
on_results=lambda notifications: NotificationSerializer(
notifications, many=True
).data,
)
serializer = NotificationSerializer(notifications, many=True)
return Response(serializer.data, status=status.HTTP_200_OK)
except Exception as e:
capture_exception(e)
return Response(
{"error": "Something went wrong please try again later"},
status=status.HTTP_400_BAD_REQUEST,
) )
.select_related("workspace", "project", "triggered_by", "receiver")
.order_by("snoozed_till", "-created_at")
)
# Filters based on query parameters
snoozed_filters = {
"true": Q(snoozed_till__lt=timezone.now()) | Q(snoozed_till__isnull=False),
"false": Q(snoozed_till__gte=timezone.now()) | Q(snoozed_till__isnull=True),
}
notifications = notifications.filter(snoozed_filters[snoozed])
archived_filters = {
"true": Q(archived_at__isnull=False),
"false": Q(archived_at__isnull=True),
}
notifications = notifications.filter(archived_filters[archived])
if read == "false":
notifications = notifications.filter(read_at__isnull=True)
# Subscribed issues
if type == "watching":
issue_ids = IssueSubscriber.objects.filter(
workspace__slug=slug, subscriber_id=request.user.id
).values_list("issue_id", flat=True)
notifications = notifications.filter(entity_identifier__in=issue_ids)
# Assigned Issues
if type == "assigned":
issue_ids = IssueAssignee.objects.filter(
workspace__slug=slug, assignee_id=request.user.id
).values_list("issue_id", flat=True)
notifications = notifications.filter(entity_identifier__in=issue_ids)
# Created issues
if type == "created":
if WorkspaceMember.objects.filter(
workspace__slug=slug, member=request.user, role__lt=15
).exists():
notifications = Notification.objects.none()
else:
issue_ids = Issue.objects.filter(
workspace__slug=slug, created_by=request.user
).values_list("pk", flat=True)
notifications = notifications.filter(entity_identifier__in=issue_ids)
# Pagination
if request.GET.get("per_page", False) and request.GET.get("cursor", False):
return self.paginate(
request=request,
queryset=(notifications),
on_results=lambda notifications: NotificationSerializer(
notifications, many=True
).data,
)
serializer = NotificationSerializer(notifications, many=True)
return Response(serializer.data, status=status.HTTP_200_OK)
def partial_update(self, request, slug, pk): def partial_update(self, request, slug, pk):
try: notification = Notification.objects.get(
notification = Notification.objects.get( workspace__slug=slug, pk=pk, receiver=request.user
workspace__slug=slug, pk=pk, receiver=request.user )
) # Only read_at and snoozed_till can be updated
# Only read_at and snoozed_till can be updated notification_data = {
notification_data = { "snoozed_till": request.data.get("snoozed_till", None),
"snoozed_till": request.data.get("snoozed_till", None), }
} serializer = NotificationSerializer(
serializer = NotificationSerializer( notification, data=notification_data, partial=True
notification, data=notification_data, partial=True )
)
if serializer.is_valid(): if serializer.is_valid():
serializer.save() serializer.save()
return Response(serializer.data, status=status.HTTP_200_OK) return Response(serializer.data, status=status.HTTP_200_OK)
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
except Notification.DoesNotExist:
return Response(
{"error": "Notification does not exists"},
status=status.HTTP_400_BAD_REQUEST,
)
except Exception as e:
capture_exception(e)
return Response(
{"error": "Something went wrong please try again later"},
status=status.HTTP_400_BAD_REQUEST,
)
def mark_read(self, request, slug, pk): def mark_read(self, request, slug, pk):
try: notification = Notification.objects.get(
notification = Notification.objects.get( receiver=request.user, workspace__slug=slug, pk=pk
receiver=request.user, workspace__slug=slug, pk=pk )
) notification.read_at = timezone.now()
notification.read_at = timezone.now() notification.save()
notification.save() serializer = NotificationSerializer(notification)
serializer = NotificationSerializer(notification) return Response(serializer.data, status=status.HTTP_200_OK)
return Response(serializer.data, status=status.HTTP_200_OK)
except Notification.DoesNotExist:
return Response(
{"error": "Notification does not exists"},
status=status.HTTP_400_BAD_REQUEST,
)
except Exception as e:
capture_exception(e)
return Response(
{"error": "Something went wrong please try again later"},
status=status.HTTP_400_BAD_REQUEST,
)
def mark_unread(self, request, slug, pk): def mark_unread(self, request, slug, pk):
try: notification = Notification.objects.get(
notification = Notification.objects.get( receiver=request.user, workspace__slug=slug, pk=pk
receiver=request.user, workspace__slug=slug, pk=pk )
) notification.read_at = None
notification.read_at = None notification.save()
notification.save() serializer = NotificationSerializer(notification)
serializer = NotificationSerializer(notification) return Response(serializer.data, status=status.HTTP_200_OK)
return Response(serializer.data, status=status.HTTP_200_OK)
except Notification.DoesNotExist:
return Response(
{"error": "Notification does not exists"},
status=status.HTTP_400_BAD_REQUEST,
)
except Exception as e:
capture_exception(e)
return Response(
{"error": "Something went wrong please try again later"},
status=status.HTTP_400_BAD_REQUEST,
)
def archive(self, request, slug, pk): def archive(self, request, slug, pk):
try: notification = Notification.objects.get(
notification = Notification.objects.get( receiver=request.user, workspace__slug=slug, pk=pk
receiver=request.user, workspace__slug=slug, pk=pk )
) notification.archived_at = timezone.now()
notification.archived_at = timezone.now() notification.save()
notification.save() serializer = NotificationSerializer(notification)
serializer = NotificationSerializer(notification) return Response(serializer.data, status=status.HTTP_200_OK)
return Response(serializer.data, status=status.HTTP_200_OK)
except Notification.DoesNotExist:
return Response(
{"error": "Notification does not exists"},
status=status.HTTP_400_BAD_REQUEST,
)
except Exception as e:
capture_exception(e)
return Response(
{"error": "Something went wrong please try again later"},
status=status.HTTP_400_BAD_REQUEST,
)
def unarchive(self, request, slug, pk): def unarchive(self, request, slug, pk):
try: notification = Notification.objects.get(
notification = Notification.objects.get( receiver=request.user, workspace__slug=slug, pk=pk
receiver=request.user, workspace__slug=slug, pk=pk )
) notification.archived_at = None
notification.archived_at = None notification.save()
notification.save() serializer = NotificationSerializer(notification)
serializer = NotificationSerializer(notification) return Response(serializer.data, status=status.HTTP_200_OK)
return Response(serializer.data, status=status.HTTP_200_OK)
except Notification.DoesNotExist:
return Response(
{"error": "Notification does not exists"},
status=status.HTTP_400_BAD_REQUEST,
)
except Exception as e:
capture_exception(e)
return Response(
{"error": "Something went wrong please try again later"},
status=status.HTTP_400_BAD_REQUEST,
)
class UnreadNotificationEndpoint(BaseAPIView): class UnreadNotificationEndpoint(BaseAPIView):
def get(self, request, slug): def get(self, request, slug):
try: # Watching Issues Count
# Watching Issues Count watching_issues_count = Notification.objects.filter(
watching_issues_count = Notification.objects.filter( workspace__slug=slug,
workspace__slug=slug, receiver_id=request.user.id,
receiver_id=request.user.id, read_at__isnull=True,
read_at__isnull=True, archived_at__isnull=True,
archived_at__isnull=True, entity_identifier__in=IssueSubscriber.objects.filter(
entity_identifier__in=IssueSubscriber.objects.filter( workspace__slug=slug, subscriber_id=request.user.id
workspace__slug=slug, subscriber_id=request.user.id ).values_list("issue_id", flat=True),
).values_list("issue_id", flat=True), ).count()
).count()
# My Issues Count # My Issues Count
my_issues_count = Notification.objects.filter( my_issues_count = Notification.objects.filter(
workspace__slug=slug, workspace__slug=slug,
receiver_id=request.user.id, receiver_id=request.user.id,
read_at__isnull=True, read_at__isnull=True,
archived_at__isnull=True, archived_at__isnull=True,
entity_identifier__in=IssueAssignee.objects.filter( entity_identifier__in=IssueAssignee.objects.filter(
workspace__slug=slug, assignee_id=request.user.id workspace__slug=slug, assignee_id=request.user.id
).values_list("issue_id", flat=True), ).values_list("issue_id", flat=True),
).count() ).count()
# Created Issues Count # Created Issues Count
created_issues_count = Notification.objects.filter( created_issues_count = Notification.objects.filter(
workspace__slug=slug, workspace__slug=slug,
receiver_id=request.user.id, receiver_id=request.user.id,
read_at__isnull=True, read_at__isnull=True,
archived_at__isnull=True, archived_at__isnull=True,
entity_identifier__in=Issue.objects.filter( entity_identifier__in=Issue.objects.filter(
workspace__slug=slug, created_by=request.user workspace__slug=slug, created_by=request.user
).values_list("pk", flat=True), ).values_list("pk", flat=True),
).count() ).count()
return Response( return Response(
{ {
"watching_issues": watching_issues_count, "watching_issues": watching_issues_count,
"my_issues": my_issues_count, "my_issues": my_issues_count,
"created_issues": created_issues_count, "created_issues": created_issues_count,
}, },
status=status.HTTP_200_OK, status=status.HTTP_200_OK,
) )
except Exception as e:
capture_exception(e)
return Response(
{"error": "Something went wrong please try again later"},
status=status.HTTP_400_BAD_REQUEST,
)
class MarkAllReadNotificationViewSet(BaseViewSet): class MarkAllReadNotificationViewSet(BaseViewSet):
def create(self, request, slug): def create(self, request, slug):
try: snoozed = request.data.get("snoozed", False)
snoozed = request.data.get("snoozed", False) archived = request.data.get("archived", False)
archived = request.data.get("archived", False) type = request.data.get("type", "all")
type = request.data.get("type", "all")
notifications = ( notifications = (
Notification.objects.filter( Notification.objects.filter(
workspace__slug=slug, workspace__slug=slug,
receiver_id=request.user.id, receiver_id=request.user.id,
read_at__isnull=True, read_at__isnull=True,
) )
.select_related("workspace", "project", "triggered_by", "receiver") .select_related("workspace", "project", "triggered_by", "receiver")
.order_by("snoozed_till", "-created_at") .order_by("snoozed_till", "-created_at")
)
# Filter for snoozed notifications
if snoozed:
notifications = notifications.filter(
Q(snoozed_till__lt=timezone.now()) | Q(snoozed_till__isnull=False)
)
else:
notifications = notifications.filter(
Q(snoozed_till__gte=timezone.now()) | Q(snoozed_till__isnull=True),
) )
# Filter for snoozed notifications # Filter for archived or unarchive
if snoozed: if archived:
notifications = notifications.filter( notifications = notifications.filter(archived_at__isnull=False)
Q(snoozed_till__lt=timezone.now()) | Q(snoozed_till__isnull=False) else:
) notifications = notifications.filter(archived_at__isnull=True)
else:
notifications = notifications.filter(
Q(snoozed_till__gte=timezone.now()) | Q(snoozed_till__isnull=True),
)
# Filter for archived or unarchive # Subscribed issues
if archived: if type == "watching":
notifications = notifications.filter(archived_at__isnull=False) issue_ids = IssueSubscriber.objects.filter(
else: workspace__slug=slug, subscriber_id=request.user.id
notifications = notifications.filter(archived_at__isnull=True) ).values_list("issue_id", flat=True)
notifications = notifications.filter(entity_identifier__in=issue_ids)
# Subscribed issues # Assigned Issues
if type == "watching": if type == "assigned":
issue_ids = IssueSubscriber.objects.filter( issue_ids = IssueAssignee.objects.filter(
workspace__slug=slug, subscriber_id=request.user.id workspace__slug=slug, assignee_id=request.user.id
).values_list("issue_id", flat=True) ).values_list("issue_id", flat=True)
notifications = notifications.filter(entity_identifier__in=issue_ids)
# Created issues
if type == "created":
if WorkspaceMember.objects.filter(
workspace__slug=slug, member=request.user, role__lt=15
).exists():
notifications = Notification.objects.none()
else:
issue_ids = Issue.objects.filter(
workspace__slug=slug, created_by=request.user
).values_list("pk", flat=True)
notifications = notifications.filter(entity_identifier__in=issue_ids) notifications = notifications.filter(entity_identifier__in=issue_ids)
# Assigned Issues updated_notifications = []
if type == "assigned": for notification in notifications:
issue_ids = IssueAssignee.objects.filter( notification.read_at = timezone.now()
workspace__slug=slug, assignee_id=request.user.id updated_notifications.append(notification)
).values_list("issue_id", flat=True) Notification.objects.bulk_update(
notifications = notifications.filter(entity_identifier__in=issue_ids) updated_notifications, ["read_at"], batch_size=100
)
# Created issues return Response({"message": "Successful"}, status=status.HTTP_200_OK)
if type == "created":
if WorkspaceMember.objects.filter(
workspace__slug=slug, member=request.user, role__lt=15
).exists():
notifications = Notification.objects.none()
else:
issue_ids = Issue.objects.filter(
workspace__slug=slug, created_by=request.user
).values_list("pk", flat=True)
notifications = notifications.filter(
entity_identifier__in=issue_ids
)
updated_notifications = []
for notification in notifications:
notification.read_at = timezone.now()
updated_notifications.append(notification)
Notification.objects.bulk_update(
updated_notifications, ["read_at"], batch_size=100
)
return Response({"message": "Successful"}, status=status.HTTP_200_OK)
except Exception as e:
capture_exception(e)
return Response(
{"error": "Something went wrong please try again later"},
status=status.HTTP_400_BAD_REQUEST,
)

View File

@ -15,6 +15,7 @@ from rest_framework.views import APIView
from rest_framework_simplejwt.tokens import RefreshToken from rest_framework_simplejwt.tokens import RefreshToken
from rest_framework import status from rest_framework import status
from sentry_sdk import capture_exception from sentry_sdk import capture_exception
# sso authentication # sso authentication
from google.oauth2 import id_token from google.oauth2 import id_token
from google.auth.transport import requests as google_auth_request from google.auth.transport import requests as google_auth_request
@ -298,11 +299,3 @@ class OauthEndpoint(BaseAPIView):
}, },
) )
return Response(data, status=status.HTTP_201_CREATED) return Response(data, status=status.HTTP_201_CREATED)
except Exception as e:
capture_exception(e)
return Response(
{
"error": "Something went wrong. Please try again later or contact the support team."
},
status=status.HTTP_400_BAD_REQUEST,
)

View File

@ -2,7 +2,6 @@
from datetime import timedelta, datetime, date from datetime import timedelta, datetime, date
# Django imports # Django imports
from django.db import IntegrityError
from django.db.models import Exists, OuterRef, Q, Prefetch from django.db.models import Exists, OuterRef, Q, Prefetch
from django.utils import timezone from django.utils import timezone
@ -78,104 +77,82 @@ class PageViewSet(BaseViewSet):
) )
def create(self, request, slug, project_id): def create(self, request, slug, project_id):
try: serializer = PageSerializer(
serializer = PageSerializer( data=request.data,
data=request.data, context={"project_id": project_id, "owned_by_id": request.user.id},
context={"project_id": project_id, "owned_by_id": request.user.id}, )
)
if serializer.is_valid(): if serializer.is_valid():
serializer.save() serializer.save()
return Response(serializer.data, status=status.HTTP_201_CREATED) return Response(serializer.data, status=status.HTTP_201_CREATED)
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
except Exception as e:
capture_exception(e)
return Response(
{"error": "Something went wrong please try again later"},
status=status.HTTP_400_BAD_REQUEST,
)
def partial_update(self, request, slug, project_id, pk): def partial_update(self, request, slug, project_id, pk):
try: page = Page.objects.get(pk=pk, workspace__slug=slug, project_id=project_id)
page = Page.objects.get(pk=pk, workspace__slug=slug, project_id=project_id) # Only update access if the page owner is the requesting user
# Only update access if the page owner is the requesting user if (
if ( page.access != request.data.get("access", page.access)
page.access != request.data.get("access", page.access) and page.owned_by_id != request.user.id
and page.owned_by_id != request.user.id ):
):
return Response(
{
"error": "Access cannot be updated since this page is owned by someone else"
},
status=status.HTTP_400_BAD_REQUEST,
)
serializer = PageSerializer(page, data=request.data, partial=True)
if serializer.is_valid():
serializer.save()
return Response(serializer.data, status=status.HTTP_200_OK)
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
except Page.DoesNotExist:
return Response( return Response(
{"error": "Page Does not exist"}, status=status.HTTP_400_BAD_REQUEST {
) "error": "Access cannot be updated since this page is owned by someone else"
except Exception as e: },
capture_exception(e)
return Response(
{"error": "Something went wrong please try again later"},
status=status.HTTP_400_BAD_REQUEST, status=status.HTTP_400_BAD_REQUEST,
) )
serializer = PageSerializer(page, data=request.data, partial=True)
if serializer.is_valid():
serializer.save()
return Response(serializer.data, status=status.HTTP_200_OK)
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
def list(self, request, slug, project_id): def list(self, request, slug, project_id):
try: queryset = self.get_queryset()
queryset = self.get_queryset() page_view = request.GET.get("page_view", False)
page_view = request.GET.get("page_view", False)
if not page_view: if not page_view:
return Response({"error": "Page View parameter is required"}, status=status.HTTP_400_BAD_REQUEST) return Response({"error": "Page View parameter is required"}, status=status.HTTP_400_BAD_REQUEST)
# All Pages # All Pages
if page_view == "all": if page_view == "all":
return Response(PageSerializer(queryset, many=True).data, status=status.HTTP_200_OK) return Response(PageSerializer(queryset, many=True).data, status=status.HTTP_200_OK)
# Recent pages # Recent pages
if page_view == "recent": if page_view == "recent":
current_time = date.today() current_time = date.today()
day_before = current_time - timedelta(days=1) day_before = current_time - timedelta(days=1)
todays_pages = queryset.filter(updated_at__date=date.today()) todays_pages = queryset.filter(updated_at__date=date.today())
yesterdays_pages = queryset.filter(updated_at__date=day_before) yesterdays_pages = queryset.filter(updated_at__date=day_before)
earlier_this_week = queryset.filter( updated_at__date__range=( earlier_this_week = queryset.filter( updated_at__date__range=(
(timezone.now() - timedelta(days=7)), (timezone.now() - timedelta(days=7)),
(timezone.now() - timedelta(days=2)), (timezone.now() - timedelta(days=2)),
)) ))
return Response( return Response(
{ {
"today": PageSerializer(todays_pages, many=True).data, "today": PageSerializer(todays_pages, many=True).data,
"yesterday": PageSerializer(yesterdays_pages, many=True).data, "yesterday": PageSerializer(yesterdays_pages, many=True).data,
"earlier_this_week": PageSerializer(earlier_this_week, many=True).data, "earlier_this_week": PageSerializer(earlier_this_week, many=True).data,
}, },
status=status.HTTP_200_OK, status=status.HTTP_200_OK,
) )
# Favorite Pages # Favorite Pages
if page_view == "favorite": if page_view == "favorite":
queryset = queryset.filter(is_favorite=True) queryset = queryset.filter(is_favorite=True)
return Response(PageSerializer(queryset, many=True).data, status=status.HTTP_200_OK) return Response(PageSerializer(queryset, many=True).data, status=status.HTTP_200_OK)
# My pages # My pages
if page_view == "created_by_me": if page_view == "created_by_me":
queryset = queryset.filter(owned_by=request.user) queryset = queryset.filter(owned_by=request.user)
return Response(PageSerializer(queryset, many=True).data, status=status.HTTP_200_OK) return Response(PageSerializer(queryset, many=True).data, status=status.HTTP_200_OK)
# Created by other Pages # Created by other Pages
if page_view == "created_by_other": if page_view == "created_by_other":
queryset = queryset.filter(~Q(owned_by=request.user), access=0) queryset = queryset.filter(~Q(owned_by=request.user), access=0)
return Response(PageSerializer(queryset, many=True).data, status=status.HTTP_200_OK) return Response(PageSerializer(queryset, many=True).data, status=status.HTTP_200_OK)
return Response({"error": "No matching view found"}, status=status.HTTP_400_BAD_REQUEST)
return Response({"error": "No matching view found"}, status=status.HTTP_400_BAD_REQUEST)
except Exception as e:
capture_exception(e)
return Response({"error": "Something went wrong please try again later"}, status=status.HTTP_400_BAD_REQUEST)
class PageBlockViewSet(BaseViewSet): class PageBlockViewSet(BaseViewSet):
serializer_class = PageBlockSerializer serializer_class = PageBlockSerializer
@ -225,53 +202,21 @@ class PageFavoriteViewSet(BaseViewSet):
) )
def create(self, request, slug, project_id): def create(self, request, slug, project_id):
try: serializer = PageFavoriteSerializer(data=request.data)
serializer = PageFavoriteSerializer(data=request.data) if serializer.is_valid():
if serializer.is_valid(): serializer.save(user=request.user, project_id=project_id)
serializer.save(user=request.user, project_id=project_id) return Response(serializer.data, status=status.HTTP_201_CREATED)
return Response(serializer.data, status=status.HTTP_201_CREATED) return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
except IntegrityError as e:
if "already exists" in str(e):
return Response(
{"error": "The page is already added to favorites"},
status=status.HTTP_410_GONE,
)
else:
capture_exception(e)
return Response(
{"error": "Something went wrong please try again later"},
status=status.HTTP_400_BAD_REQUEST,
)
except Exception as e:
capture_exception(e)
return Response(
{"error": "Something went wrong please try again later"},
status=status.HTTP_400_BAD_REQUEST,
)
def destroy(self, request, slug, project_id, page_id): def destroy(self, request, slug, project_id, page_id):
try: page_favorite = PageFavorite.objects.get(
page_favorite = PageFavorite.objects.get( project=project_id,
project=project_id, user=request.user,
user=request.user, workspace__slug=slug,
workspace__slug=slug, page_id=page_id,
page_id=page_id, )
) page_favorite.delete()
page_favorite.delete() return Response(status=status.HTTP_204_NO_CONTENT)
return Response(status=status.HTTP_204_NO_CONTENT)
except PageFavorite.DoesNotExist:
return Response(
{"error": "Page is not in favorites"},
status=status.HTTP_400_BAD_REQUEST,
)
except Exception as e:
capture_exception(e)
return Response(
{"error": "Something went wrong please try again later"},
status=status.HTTP_400_BAD_REQUEST,
)
class CreateIssueFromPageBlockEndpoint(BaseAPIView): class CreateIssueFromPageBlockEndpoint(BaseAPIView):
permission_classes = [ permission_classes = [
@ -279,43 +224,32 @@ class CreateIssueFromPageBlockEndpoint(BaseAPIView):
] ]
def post(self, request, slug, project_id, page_id, page_block_id): def post(self, request, slug, project_id, page_id, page_block_id):
try: page_block = PageBlock.objects.get(
page_block = PageBlock.objects.get( pk=page_block_id,
pk=page_block_id, workspace__slug=slug,
workspace__slug=slug, project_id=project_id,
project_id=project_id, page_id=page_id,
page_id=page_id, )
) issue = Issue.objects.create(
issue = Issue.objects.create( name=page_block.name,
name=page_block.name, project_id=project_id,
project_id=project_id, description=page_block.description,
description=page_block.description, description_html=page_block.description_html,
description_html=page_block.description_html, description_stripped=page_block.description_stripped,
description_stripped=page_block.description_stripped, )
) _ = IssueAssignee.objects.create(
_ = IssueAssignee.objects.create( issue=issue, assignee=request.user, project_id=project_id
issue=issue, assignee=request.user, project_id=project_id )
)
_ = IssueActivity.objects.create( _ = IssueActivity.objects.create(
issue=issue, issue=issue,
actor=request.user, actor=request.user,
project_id=project_id, project_id=project_id,
comment=f"created the issue from {page_block.name} block", comment=f"created the issue from {page_block.name} block",
verb="created", verb="created",
) )
page_block.issue = issue page_block.issue = issue
page_block.save() page_block.save()
return Response(IssueLiteSerializer(issue).data, status=status.HTTP_200_OK) return Response(IssueLiteSerializer(issue).data, status=status.HTTP_200_OK)
except PageBlock.DoesNotExist:
return Response(
{"error": "Page Block does not exist"}, status=status.HTTP_404_NOT_FOUND
)
except Exception as e:
capture_exception(e)
return Response(
{"error": "Something went wrong please try again later"},
status=status.HTTP_400_BAD_REQUEST,
)

File diff suppressed because it is too large Load Diff

View File

@ -168,126 +168,107 @@ class GlobalSearchEndpoint(BaseAPIView):
) )
def get(self, request, slug): def get(self, request, slug):
try: query = request.query_params.get("search", False)
query = request.query_params.get("search", False) workspace_search = request.query_params.get("workspace_search", "false")
workspace_search = request.query_params.get("workspace_search", "false") project_id = request.query_params.get("project_id", False)
project_id = request.query_params.get("project_id", False)
if not query: if not query:
return Response(
{
"results": {
"workspace": [],
"project": [],
"issue": [],
"cycle": [],
"module": [],
"issue_view": [],
"page": [],
}
},
status=status.HTTP_200_OK,
)
MODELS_MAPPER = {
"workspace": self.filter_workspaces,
"project": self.filter_projects,
"issue": self.filter_issues,
"cycle": self.filter_cycles,
"module": self.filter_modules,
"issue_view": self.filter_views,
"page": self.filter_pages,
}
results = {}
for model in MODELS_MAPPER.keys():
func = MODELS_MAPPER.get(model, None)
results[model] = func(query, slug, project_id, workspace_search)
return Response({"results": results}, status=status.HTTP_200_OK)
except Exception as e:
capture_exception(e)
return Response( return Response(
{"error": "Something went wrong please try again later"}, {
status=status.HTTP_400_BAD_REQUEST, "results": {
"workspace": [],
"project": [],
"issue": [],
"cycle": [],
"module": [],
"issue_view": [],
"page": [],
}
},
status=status.HTTP_200_OK,
) )
MODELS_MAPPER = {
"workspace": self.filter_workspaces,
"project": self.filter_projects,
"issue": self.filter_issues,
"cycle": self.filter_cycles,
"module": self.filter_modules,
"issue_view": self.filter_views,
"page": self.filter_pages,
}
results = {}
for model in MODELS_MAPPER.keys():
func = MODELS_MAPPER.get(model, None)
results[model] = func(query, slug, project_id, workspace_search)
return Response({"results": results}, status=status.HTTP_200_OK)
class IssueSearchEndpoint(BaseAPIView): class IssueSearchEndpoint(BaseAPIView):
def get(self, request, slug, project_id): def get(self, request, slug, project_id):
try: query = request.query_params.get("search", False)
query = request.query_params.get("search", False) workspace_search = request.query_params.get("workspace_search", "false")
workspace_search = request.query_params.get("workspace_search", "false") parent = request.query_params.get("parent", "false")
parent = request.query_params.get("parent", "false") issue_relation = request.query_params.get("issue_relation", "false")
issue_relation = request.query_params.get("issue_relation", "false") cycle = request.query_params.get("cycle", "false")
cycle = request.query_params.get("cycle", "false") module = request.query_params.get("module", "false")
module = request.query_params.get("module", "false") sub_issue = request.query_params.get("sub_issue", "false")
sub_issue = request.query_params.get("sub_issue", "false")
issue_id = request.query_params.get("issue_id", False) issue_id = request.query_params.get("issue_id", False)
issues = Issue.issue_objects.filter( issues = Issue.issue_objects.filter(
workspace__slug=slug, workspace__slug=slug,
project__project_projectmember__member=self.request.user, project__project_projectmember__member=self.request.user,
) )
if workspace_search == "false": if workspace_search == "false":
issues = issues.filter(project_id=project_id) issues = issues.filter(project_id=project_id)
if query: if query:
issues = search_issues(query, issues) issues = search_issues(query, issues)
if parent == "true" and issue_id: if parent == "true" and issue_id:
issue = Issue.issue_objects.get(pk=issue_id) issue = Issue.issue_objects.get(pk=issue_id)
issues = issues.filter( issues = issues.filter(
~Q(pk=issue_id), ~Q(pk=issue.parent_id), parent__isnull=True ~Q(pk=issue_id), ~Q(pk=issue.parent_id), parent__isnull=True
).exclude( ).exclude(
pk__in=Issue.issue_objects.filter(parent__isnull=False).values_list( pk__in=Issue.issue_objects.filter(parent__isnull=False).values_list(
"parent_id", flat=True "parent_id", flat=True
)
) )
if issue_relation == "true" and issue_id:
issue = Issue.issue_objects.get(pk=issue_id)
issues = issues.filter(
~Q(pk=issue_id),
~Q(issue_related__issue=issue),
~Q(issue_relation__related_issue=issue),
)
if sub_issue == "true" and issue_id:
issue = Issue.issue_objects.get(pk=issue_id)
issues = issues.filter(~Q(pk=issue_id), parent__isnull=True)
if issue.parent:
issues = issues.filter(~Q(pk=issue.parent_id))
if cycle == "true":
issues = issues.exclude(issue_cycle__isnull=False)
if module == "true":
issues = issues.exclude(issue_module__isnull=False)
return Response(
issues.values(
"name",
"id",
"sequence_id",
"project__name",
"project__identifier",
"project_id",
"workspace__slug",
"state__name",
"state__group",
"state__color",
),
status=status.HTTP_200_OK,
) )
except Issue.DoesNotExist: if issue_relation == "true" and issue_id:
return Response( issue = Issue.issue_objects.get(pk=issue_id)
{"error": "Issue Does not exist"}, status=status.HTTP_400_BAD_REQUEST issues = issues.filter(
) ~Q(pk=issue_id),
except Exception as e: ~Q(issue_related__issue=issue),
print(e) ~Q(issue_relation__related_issue=issue),
return Response(
{"error": "Something went wrong please try again later"},
status=status.HTTP_400_BAD_REQUEST,
) )
if sub_issue == "true" and issue_id:
issue = Issue.issue_objects.get(pk=issue_id)
issues = issues.filter(~Q(pk=issue_id), parent__isnull=True)
if issue.parent:
issues = issues.filter(~Q(pk=issue.parent_id))
if cycle == "true":
issues = issues.exclude(issue_cycle__isnull=False)
if module == "true":
issues = issues.exclude(issue_module__isnull=False)
return Response(
issues.values(
"name",
"id",
"sequence_id",
"project__name",
"project__identifier",
"project_id",
"workspace__slug",
"state__name",
"state__group",
"state__color",
),
status=status.HTTP_200_OK,
)

View File

@ -2,7 +2,6 @@
from itertools import groupby from itertools import groupby
# Django imports # Django imports
from django.db import IntegrityError
from django.db.models import Q from django.db.models import Q
# Third party imports # Third party imports
@ -41,67 +40,45 @@ class StateViewSet(BaseViewSet):
) )
def create(self, request, slug, project_id): def create(self, request, slug, project_id):
try: serializer = StateSerializer(data=request.data)
serializer = StateSerializer(data=request.data) if serializer.is_valid():
if serializer.is_valid(): serializer.save(project_id=project_id)
serializer.save(project_id=project_id) return Response(serializer.data, status=status.HTTP_200_OK)
return Response(serializer.data, status=status.HTTP_200_OK) return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
except IntegrityError:
return Response(
{"error": "State with the name already exists"},
status=status.HTTP_400_BAD_REQUEST,
)
except Exception as e:
capture_exception(e)
return Response(
{"error": "Something went wrong please try again later"},
status=status.HTTP_400_BAD_REQUEST,
)
def list(self, request, slug, project_id): def list(self, request, slug, project_id):
try: state_dict = dict()
state_dict = dict() states = StateSerializer(self.get_queryset(), many=True).data
states = StateSerializer(self.get_queryset(), many=True).data
for key, value in groupby( for key, value in groupby(
sorted(states, key=lambda state: state["group"]), sorted(states, key=lambda state: state["group"]),
lambda state: state.get("group"), lambda state: state.get("group"),
): ):
state_dict[str(key)] = list(value) state_dict[str(key)] = list(value)
return Response(state_dict, status=status.HTTP_200_OK) return Response(state_dict, status=status.HTTP_200_OK)
except Exception as e:
capture_exception(e) def destroy(self, request, slug, project_id, pk):
state = State.objects.get(
~Q(name="Triage"),
pk=pk, project_id=project_id, workspace__slug=slug,
)
if state.default:
return Response( return Response(
{"error": "Something went wrong please try again later"}, {"error": "Default state cannot be deleted"}, status=False
)
# Check for any issues in the state
issue_exist = Issue.issue_objects.filter(state=pk).exists()
if issue_exist:
return Response(
{
"error": "The state is not empty, only empty states can be deleted"
},
status=status.HTTP_400_BAD_REQUEST, status=status.HTTP_400_BAD_REQUEST,
) )
def destroy(self, request, slug, project_id, pk): state.delete()
try: return Response(status=status.HTTP_204_NO_CONTENT)
state = State.objects.get(
~Q(name="Triage"),
pk=pk, project_id=project_id, workspace__slug=slug,
)
if state.default:
return Response(
{"error": "Default state cannot be deleted"}, status=False
)
# Check for any issues in the state
issue_exist = Issue.issue_objects.filter(state=pk).exists()
if issue_exist:
return Response(
{
"error": "The state is not empty, only empty states can be deleted"
},
status=status.HTTP_400_BAD_REQUEST,
)
state.delete()
return Response(status=status.HTTP_204_NO_CONTENT)
except State.DoesNotExist:
return Response({"error": "State does not exists"}, status=status.HTTP_404)

View File

@ -32,82 +32,43 @@ class UserEndpoint(BaseViewSet):
return self.request.user return self.request.user
def retrieve(self, request): def retrieve(self, request):
try: serialized_data = UserMeSerializer(request.user).data
serialized_data = UserMeSerializer(request.user).data return Response(
return Response( serialized_data,
serialized_data, status=status.HTTP_200_OK,
status=status.HTTP_200_OK, )
)
except Exception as e:
capture_exception(e)
return Response(
{"error": "Something went wrong please try again later"},
status=status.HTTP_400_BAD_REQUEST,
)
def retrieve_user_settings(self, request): def retrieve_user_settings(self, request):
try: serialized_data = UserMeSettingsSerializer(request.user).data
serialized_data = UserMeSettingsSerializer(request.user).data return Response(serialized_data, status=status.HTTP_200_OK)
return Response(serialized_data, status=status.HTTP_200_OK)
except Exception as e:
capture_exception(e)
return Response(
{"error": "Something went wrong please try again later"},
status=status.HTTP_400_BAD_REQUEST,
)
class UpdateUserOnBoardedEndpoint(BaseAPIView): class UpdateUserOnBoardedEndpoint(BaseAPIView):
def patch(self, request): def patch(self, request):
try: user = User.objects.get(pk=request.user.id)
user = User.objects.get(pk=request.user.id) user.is_onboarded = request.data.get("is_onboarded", False)
user.is_onboarded = request.data.get("is_onboarded", False) user.save()
user.save() return Response({"message": "Updated successfully"}, status=status.HTTP_200_OK)
return Response(
{"message": "Updated successfully"}, status=status.HTTP_200_OK
)
except Exception as e:
capture_exception(e)
return Response(
{"error": "Something went wrong please try again later"},
status=status.HTTP_400_BAD_REQUEST,
)
class UpdateUserTourCompletedEndpoint(BaseAPIView): class UpdateUserTourCompletedEndpoint(BaseAPIView):
def patch(self, request): def patch(self, request):
try: user = User.objects.get(pk=request.user.id)
user = User.objects.get(pk=request.user.id) user.is_tour_completed = request.data.get("is_tour_completed", False)
user.is_tour_completed = request.data.get("is_tour_completed", False) user.save()
user.save() return Response({"message": "Updated successfully"}, status=status.HTTP_200_OK)
return Response(
{"message": "Updated successfully"}, status=status.HTTP_200_OK
)
except Exception as e:
capture_exception(e)
return Response(
{"error": "Something went wrong please try again later"},
status=status.HTTP_400_BAD_REQUEST,
)
class UserActivityEndpoint(BaseAPIView, BasePaginator): class UserActivityEndpoint(BaseAPIView, BasePaginator):
def get(self, request, slug): def get(self, request, slug):
try: queryset = IssueActivity.objects.filter(
queryset = IssueActivity.objects.filter( actor=request.user, workspace__slug=slug
actor=request.user, workspace__slug=slug ).select_related("actor", "workspace", "issue", "project")
).select_related("actor", "workspace", "issue", "project")
return self.paginate( return self.paginate(
request=request, request=request,
queryset=queryset, queryset=queryset,
on_results=lambda issue_activities: IssueActivitySerializer( on_results=lambda issue_activities: IssueActivitySerializer(
issue_activities, many=True issue_activities, many=True
).data, ).data,
) )
except Exception as e:
capture_exception(e)
return Response(
{"error": "Something went wrong please try again later"},
status=status.HTTP_400_BAD_REQUEST,
)

View File

@ -13,7 +13,6 @@ from django.db.models import (
) )
from django.utils.decorators import method_decorator from django.utils.decorators import method_decorator
from django.views.decorators.gzip import gzip_page from django.views.decorators.gzip import gzip_page
from django.db import IntegrityError
from django.db.models import Prefetch, OuterRef, Exists from django.db.models import Prefetch, OuterRef, Exists
# Third party imports # Third party imports
@ -97,120 +96,112 @@ class GlobalViewIssuesViewSet(BaseViewSet):
@method_decorator(gzip_page) @method_decorator(gzip_page)
def list(self, request, slug): def list(self, request, slug):
try: filters = issue_filters(request.query_params, "GET")
filters = issue_filters(request.query_params, "GET")
# Custom ordering for priority and state # Custom ordering for priority and state
priority_order = ["urgent", "high", "medium", "low", "none"] priority_order = ["urgent", "high", "medium", "low", "none"]
state_order = ["backlog", "unstarted", "started", "completed", "cancelled"] state_order = ["backlog", "unstarted", "started", "completed", "cancelled"]
order_by_param = request.GET.get("order_by", "-created_at") order_by_param = request.GET.get("order_by", "-created_at")
issue_queryset = ( issue_queryset = (
self.get_queryset() self.get_queryset()
.filter(**filters) .filter(**filters)
.filter(project__project_projectmember__member=self.request.user) .filter(project__project_projectmember__member=self.request.user)
.annotate(cycle_id=F("issue_cycle__cycle_id")) .annotate(cycle_id=F("issue_cycle__cycle_id"))
.annotate(module_id=F("issue_module__module_id")) .annotate(module_id=F("issue_module__module_id"))
.annotate( .annotate(
link_count=IssueLink.objects.filter(issue=OuterRef("id")) link_count=IssueLink.objects.filter(issue=OuterRef("id"))
.order_by() .order_by()
.annotate(count=Func(F("id"), function="Count")) .annotate(count=Func(F("id"), function="Count"))
.values("count") .values("count")
)
.annotate(
attachment_count=IssueAttachment.objects.filter(
issue=OuterRef("id")
)
.order_by()
.annotate(count=Func(F("id"), function="Count"))
.values("count")
)
) )
.annotate(
# Priority Ordering attachment_count=IssueAttachment.objects.filter(
if order_by_param == "priority" or order_by_param == "-priority": issue=OuterRef("id")
priority_order = (
priority_order
if order_by_param == "priority"
else priority_order[::-1]
) )
issue_queryset = issue_queryset.annotate( .order_by()
priority_order=Case( .annotate(count=Func(F("id"), function="Count"))
*[ .values("count")
When(priority=p, then=Value(i)) )
for i, p in enumerate(priority_order) )
],
output_field=CharField(),
)
).order_by("priority_order")
# State Ordering # Priority Ordering
elif order_by_param in [ if order_by_param == "priority" or order_by_param == "-priority":
"state__name", priority_order = (
"state__group", priority_order
"-state__name", if order_by_param == "priority"
"-state__group", else priority_order[::-1]
]: )
state_order = ( issue_queryset = issue_queryset.annotate(
state_order priority_order=Case(
if order_by_param in ["state__name", "state__group"] *[
else state_order[::-1] When(priority=p, then=Value(i))
for i, p in enumerate(priority_order)
],
output_field=CharField(),
) )
issue_queryset = issue_queryset.annotate( ).order_by("priority_order")
state_order=Case(
*[ # State Ordering
When(state__group=state_group, then=Value(i)) elif order_by_param in [
for i, state_group in enumerate(state_order) "state__name",
], "state__group",
default=Value(len(state_order)), "-state__name",
output_field=CharField(), "-state__group",
) ]:
).order_by("state_order") state_order = (
# assignee and label ordering state_order
elif order_by_param in [ if order_by_param in ["state__name", "state__group"]
"labels__name", else state_order[::-1]
"-labels__name", )
"assignees__first_name", issue_queryset = issue_queryset.annotate(
"-assignees__first_name", state_order=Case(
]: *[
issue_queryset = issue_queryset.annotate( When(state__group=state_group, then=Value(i))
max_values=Max( for i, state_group in enumerate(state_order)
order_by_param[1::] ],
if order_by_param.startswith("-") default=Value(len(state_order)),
else order_by_param output_field=CharField(),
)
).order_by(
"-max_values" if order_by_param.startswith("-") else "max_values"
) )
else: ).order_by("state_order")
issue_queryset = issue_queryset.order_by(order_by_param) # assignee and label ordering
elif order_by_param in [
issues = IssueLiteSerializer(issue_queryset, many=True).data "labels__name",
"-labels__name",
## Grouping the results "assignees__first_name",
group_by = request.GET.get("group_by", False) "-assignees__first_name",
sub_group_by = request.GET.get("sub_group_by", False) ]:
if sub_group_by and sub_group_by == group_by: issue_queryset = issue_queryset.annotate(
return Response( max_values=Max(
{"error": "Group by and sub group by cannot be same"}, order_by_param[1::]
status=status.HTTP_400_BAD_REQUEST, if order_by_param.startswith("-")
else order_by_param
) )
).order_by(
"-max_values" if order_by_param.startswith("-") else "max_values"
)
else:
issue_queryset = issue_queryset.order_by(order_by_param)
if group_by: issues = IssueLiteSerializer(issue_queryset, many=True).data
return Response(
group_results(issues, group_by, sub_group_by), status=status.HTTP_200_OK
)
return Response(issues, status=status.HTTP_200_OK) ## Grouping the results
group_by = request.GET.get("group_by", False)
except Exception as e: sub_group_by = request.GET.get("sub_group_by", False)
capture_exception(e) if sub_group_by and sub_group_by == group_by:
return Response( return Response(
{"error": "Something went wrong please try again later"}, {"error": "Group by and sub group by cannot be same"},
status=status.HTTP_400_BAD_REQUEST, status=status.HTTP_400_BAD_REQUEST,
) )
if group_by:
return Response(
group_results(issues, group_by, sub_group_by), status=status.HTTP_200_OK
)
return Response(issues, status=status.HTTP_200_OK)
class IssueViewViewSet(BaseViewSet): class IssueViewViewSet(BaseViewSet):
serializer_class = IssueViewSerializer serializer_class = IssueViewSerializer
@ -257,49 +248,18 @@ class IssueViewFavoriteViewSet(BaseViewSet):
) )
def create(self, request, slug, project_id): def create(self, request, slug, project_id):
try: serializer = IssueViewFavoriteSerializer(data=request.data)
serializer = IssueViewFavoriteSerializer(data=request.data) if serializer.is_valid():
if serializer.is_valid(): serializer.save(user=request.user, project_id=project_id)
serializer.save(user=request.user, project_id=project_id) return Response(serializer.data, status=status.HTTP_201_CREATED)
return Response(serializer.data, status=status.HTTP_201_CREATED) return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
except IntegrityError as e:
if "already exists" in str(e):
return Response(
{"error": "The view is already added to favorites"},
status=status.HTTP_410_GONE,
)
else:
capture_exception(e)
return Response(
{"error": "Something went wrong please try again later"},
status=status.HTTP_400_BAD_REQUEST,
)
except Exception as e:
capture_exception(e)
return Response(
{"error": "Something went wrong please try again later"},
status=status.HTTP_400_BAD_REQUEST,
)
def destroy(self, request, slug, project_id, view_id): def destroy(self, request, slug, project_id, view_id):
try: view_favourite = IssueViewFavorite.objects.get(
view_favourite = IssueViewFavorite.objects.get( project=project_id,
project=project_id, user=request.user,
user=request.user, workspace__slug=slug,
workspace__slug=slug, view_id=view_id,
view_id=view_id, )
) view_favourite.delete()
view_favourite.delete() return Response(status=status.HTTP_204_NO_CONTENT)
return Response(status=status.HTTP_204_NO_CONTENT)
except IssueViewFavorite.DoesNotExist:
return Response(
{"error": "View is not in favorites"},
status=status.HTTP_400_BAD_REQUEST,
)
except Exception as e:
capture_exception(e)
return Response(
{"error": "Something went wrong please try again later"},
status=status.HTTP_400_BAD_REQUEST,
)

File diff suppressed because it is too large Load Diff