chore: api endpoints (#2407)

* refactor: folder structure for urls

* chore: deleted the urls file

* chore: proper naming for urls

* chore: reset password url

* dev: create refresh token endpoint and endpoint to get settings for user

* dev: workspace member me serializer

* dev: remove extra fields from project list and retrieve endpoints

* dev: update the project list endpoint with member details and deploy boolean

* dev: enable user favorite project endpoint and remove is_favorite from project list

* dev: analytics refactoring

* dev: revert is_favorite settings

* dev: create new serializer for project list and add pagination from projects

* dev: fix analytics api

* dev: module and cycle

* dev:  update error message, fix module analytics and add null check for labels

* dev: member serializer

* dev: dynamic base serializer

* dev: remove view issues endpoint

* dev: url pattern updates

* dev: add comments to delete this file

* dev: last workspace id

* dev: analytics export

* dev: export analytics validation

* dev: update python runtime

* dev: update notification endpoints

* dev: cycle and validation fix

* dev: issue activity validation when creating updating and deleting issue and comments

* dev: update issue activity logging for link and reactions

* dev: update module issue activity logging

* dev: update module issue activity

---------

Co-authored-by: NarayanBavisetti <narayan3119@gmail.com>
This commit is contained in:
Nikhil 2023-10-16 19:18:45 +05:30 committed by GitHub
parent cb80c98413
commit 1fc5d2bd45
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
47 changed files with 3461 additions and 925 deletions

View File

@ -1,5 +1,13 @@
from .base import BaseSerializer from .base import BaseSerializer
from .user import UserSerializer, UserLiteSerializer, ChangePasswordSerializer, ResetPasswordSerializer, UserAdminLiteSerializer from .user import (
UserSerializer,
UserLiteSerializer,
ChangePasswordSerializer,
ResetPasswordSerializer,
UserAdminLiteSerializer,
UserMeSerializer,
UserMeSettingsSerializer,
)
from .workspace import ( from .workspace import (
WorkSpaceSerializer, WorkSpaceSerializer,
WorkSpaceMemberSerializer, WorkSpaceMemberSerializer,
@ -8,9 +16,11 @@ from .workspace import (
WorkspaceLiteSerializer, WorkspaceLiteSerializer,
WorkspaceThemeSerializer, WorkspaceThemeSerializer,
WorkspaceMemberAdminSerializer, WorkspaceMemberAdminSerializer,
WorkspaceMemberMeSerializer,
) )
from .project import ( from .project import (
ProjectSerializer, ProjectSerializer,
ProjectListSerializer,
ProjectDetailSerializer, ProjectDetailSerializer,
ProjectMemberSerializer, ProjectMemberSerializer,
ProjectMemberInviteSerializer, ProjectMemberInviteSerializer,
@ -20,11 +30,16 @@ from .project import (
ProjectMemberLiteSerializer, ProjectMemberLiteSerializer,
ProjectDeployBoardSerializer, ProjectDeployBoardSerializer,
ProjectMemberAdminSerializer, ProjectMemberAdminSerializer,
ProjectPublicMemberSerializer ProjectPublicMemberSerializer,
) )
from .state import StateSerializer, StateLiteSerializer from .state import StateSerializer, StateLiteSerializer
from .view import GlobalViewSerializer, IssueViewSerializer, IssueViewFavoriteSerializer from .view import GlobalViewSerializer, IssueViewSerializer, IssueViewFavoriteSerializer
from .cycle import CycleSerializer, CycleIssueSerializer, CycleFavoriteSerializer, CycleWriteSerializer from .cycle import (
CycleSerializer,
CycleIssueSerializer,
CycleFavoriteSerializer,
CycleWriteSerializer,
)
from .asset import FileAssetSerializer from .asset import FileAssetSerializer
from .issue import ( from .issue import (
IssueCreateSerializer, IssueCreateSerializer,

View File

@ -3,3 +3,56 @@ from rest_framework import serializers
class BaseSerializer(serializers.ModelSerializer): class BaseSerializer(serializers.ModelSerializer):
id = serializers.PrimaryKeyRelatedField(read_only=True) id = serializers.PrimaryKeyRelatedField(read_only=True)
class DynamicBaseSerializer(BaseSerializer):
def __init__(self, *args, **kwargs):
# If 'fields' is provided in the arguments, remove it and store it separately.
# This is done so as not to pass this custom argument up to the superclass.
fields = kwargs.pop("fields", None)
# Call the initialization of the superclass.
super().__init__(*args, **kwargs)
# If 'fields' was provided, filter the fields of the serializer accordingly.
if fields is not None:
self.fields = self._filter_fields(fields)
def _filter_fields(self, fields):
"""
Adjust the serializer's fields based on the provided 'fields' list.
:param fields: List or dictionary specifying which fields to include in the serializer.
:return: The updated fields for the serializer.
"""
# Check each field_name in the provided fields.
for field_name in fields:
# If the field is a dictionary (indicating nested fields),
# loop through its keys and values.
if isinstance(field_name, dict):
for key, value in field_name.items():
# If the value of this nested field is a list,
# perform a recursive filter on it.
if isinstance(value, list):
self._filter_fields(self.fields[key], value)
# Create a list to store allowed fields.
allowed = []
for item in fields:
# If the item is a string, it directly represents a field's name.
if isinstance(item, str):
allowed.append(item)
# If the item is a dictionary, it represents a nested field.
# Add the key of this dictionary to the allowed list.
elif isinstance(item, dict):
allowed.append(list(item.keys())[0])
# Convert the current serializer's fields and the allowed fields to sets.
existing = set(self.fields)
allowed = set(allowed)
# Remove fields from the serializer that aren't in the 'allowed' list.
for field_name in (existing - allowed):
self.fields.pop(field_name)
return self.fields

View File

@ -12,10 +12,14 @@ from .workspace import WorkspaceLiteSerializer
from .project import ProjectLiteSerializer from .project import ProjectLiteSerializer
from plane.db.models import Cycle, CycleIssue, CycleFavorite from plane.db.models import Cycle, CycleIssue, CycleFavorite
class CycleWriteSerializer(BaseSerializer):
class CycleWriteSerializer(BaseSerializer):
def validate(self, data): def validate(self, data):
if data.get("start_date", None) is not None and data.get("end_date", None) is not None and data.get("start_date", None) > data.get("end_date", None): if (
data.get("start_date", None) is not None
and data.get("end_date", None) is not None
and data.get("start_date", None) > data.get("end_date", None)
):
raise serializers.ValidationError("Start date cannot exceed end date") raise serializers.ValidationError("Start date cannot exceed end date")
return data return data
@ -41,7 +45,11 @@ class CycleSerializer(BaseSerializer):
project_detail = ProjectLiteSerializer(read_only=True, source="project") project_detail = ProjectLiteSerializer(read_only=True, source="project")
def validate(self, data): def validate(self, data):
if data.get("start_date", None) is not None and data.get("end_date", None) is not None and data.get("start_date", None) > data.get("end_date", None): if (
data.get("start_date", None) is not None
and data.get("end_date", None) is not None
and data.get("start_date", None) > data.get("end_date", None)
):
raise serializers.ValidationError("Start date cannot exceed end date") raise serializers.ValidationError("Start date cannot exceed end date")
return data return data
@ -52,7 +60,9 @@ class CycleSerializer(BaseSerializer):
"display_name": assignee.display_name, "display_name": assignee.display_name,
"id": assignee.id, "id": assignee.id,
} }
for issue_cycle in obj.issue_cycle.prefetch_related("issue__assignees").all() for issue_cycle in obj.issue_cycle.prefetch_related(
"issue__assignees"
).all()
for assignee in issue_cycle.issue.assignees.all() for assignee in issue_cycle.issue.assignees.all()
] ]
# Use a set comprehension to return only the unique objects # Use a set comprehension to return only the unique objects

View File

@ -5,7 +5,7 @@ from django.db import IntegrityError
from rest_framework import serializers from rest_framework import serializers
# Module imports # Module imports
from .base import BaseSerializer from .base import BaseSerializer, DynamicBaseSerializer
from plane.api.serializers.workspace import WorkSpaceSerializer, WorkspaceLiteSerializer from plane.api.serializers.workspace import WorkSpaceSerializer, WorkspaceLiteSerializer
from plane.api.serializers.user import UserLiteSerializer, UserAdminLiteSerializer from plane.api.serializers.user import UserLiteSerializer, UserAdminLiteSerializer
from plane.db.models import ( from plane.db.models import (
@ -94,8 +94,33 @@ class ProjectLiteSerializer(BaseSerializer):
read_only_fields = fields read_only_fields = fields
class ProjectListSerializer(DynamicBaseSerializer):
is_favorite = serializers.BooleanField(read_only=True)
total_members = serializers.IntegerField(read_only=True)
total_cycles = serializers.IntegerField(read_only=True)
total_modules = serializers.IntegerField(read_only=True)
is_member = serializers.BooleanField(read_only=True)
sort_order = serializers.FloatField(read_only=True)
member_role = serializers.IntegerField(read_only=True)
is_deployed = serializers.BooleanField(read_only=True)
members = serializers.SerializerMethodField()
def get_members(self, obj):
project_members = ProjectMember.objects.filter(project_id=obj.id).values(
"id",
"member_id",
"member__display_name",
"member__avatar",
)
return project_members
class Meta:
model = Project
fields = "__all__"
class ProjectDetailSerializer(BaseSerializer): class ProjectDetailSerializer(BaseSerializer):
workspace = WorkSpaceSerializer(read_only=True) # workspace = WorkSpaceSerializer(read_only=True)
default_assignee = UserLiteSerializer(read_only=True) default_assignee = UserLiteSerializer(read_only=True)
project_lead = UserLiteSerializer(read_only=True) project_lead = UserLiteSerializer(read_only=True)
is_favorite = serializers.BooleanField(read_only=True) is_favorite = serializers.BooleanField(read_only=True)
@ -148,8 +173,6 @@ class ProjectIdentifierSerializer(BaseSerializer):
class ProjectFavoriteSerializer(BaseSerializer): class ProjectFavoriteSerializer(BaseSerializer):
project_detail = ProjectLiteSerializer(source="project", read_only=True)
class Meta: class Meta:
model = ProjectFavorite model = ProjectFavorite
fields = "__all__" fields = "__all__"
@ -178,12 +201,12 @@ class ProjectDeployBoardSerializer(BaseSerializer):
fields = "__all__" fields = "__all__"
read_only_fields = [ read_only_fields = [
"workspace", "workspace",
"project", "anchor", "project",
"anchor",
] ]
class ProjectPublicMemberSerializer(BaseSerializer): class ProjectPublicMemberSerializer(BaseSerializer):
class Meta: class Meta:
model = ProjectPublicMember model = ProjectPublicMember
fields = "__all__" fields = "__all__"

View File

@ -3,7 +3,7 @@ from rest_framework import serializers
# Module import # Module import
from .base import BaseSerializer from .base import BaseSerializer
from plane.db.models import User from plane.db.models import User, Workspace, WorkspaceMemberInvite
class UserSerializer(BaseSerializer): class UserSerializer(BaseSerializer):
@ -33,6 +33,81 @@ class UserSerializer(BaseSerializer):
return bool(obj.first_name) or bool(obj.last_name) return bool(obj.first_name) or bool(obj.last_name)
class UserMeSerializer(BaseSerializer):
class Meta:
model = User
fields = [
"id",
"avatar",
"cover_image",
"date_joined",
"display_name",
"email",
"first_name",
"last_name",
"is_active",
"is_bot",
"is_email_verified",
"is_managed",
"is_onboarded",
"is_tour_completed",
"mobile_number",
"role",
"onboarding_step",
"user_timezone",
"username",
"theme",
"last_workspace_id",
]
read_only_fields = fields
class UserMeSettingsSerializer(BaseSerializer):
workspace = serializers.SerializerMethodField()
class Meta:
model = User
fields = [
"id",
"email",
"workspace",
]
read_only_fields = fields
def get_workspace(self, obj):
workspace_invites = WorkspaceMemberInvite.objects.filter(
email=obj.email
).count()
if obj.last_workspace_id is not None:
workspace = Workspace.objects.get(
pk=obj.last_workspace_id, workspace_member__member=obj.id
)
return {
"last_workspace_id": obj.last_workspace_id,
"last_workspace_slug": workspace.slug,
"fallback_workspace_id": obj.last_workspace_id,
"fallback_workspace_slug": workspace.slug,
"invites": workspace_invites,
}
else:
fallback_workspace = (
Workspace.objects.filter(workspace_member__member_id=obj.id)
.order_by("created_at")
.first()
)
return {
"last_workspace_id": None,
"last_workspace_slug": None,
"fallback_workspace_id": fallback_workspace.id
if fallback_workspace is not None
else None,
"fallback_workspace_slug": fallback_workspace.slug
if fallback_workspace is not None
else None,
"invites": workspace_invites,
}
class UserLiteSerializer(BaseSerializer): class UserLiteSerializer(BaseSerializer):
class Meta: class Meta:
model = User model = User
@ -51,7 +126,6 @@ class UserLiteSerializer(BaseSerializer):
class UserAdminLiteSerializer(BaseSerializer): class UserAdminLiteSerializer(BaseSerializer):
class Meta: class Meta:
model = User model = User
fields = [ fields = [

View File

@ -54,6 +54,13 @@ class WorkSpaceMemberSerializer(BaseSerializer):
fields = "__all__" fields = "__all__"
class WorkspaceMemberMeSerializer(BaseSerializer):
class Meta:
model = WorkspaceMember
fields = "__all__"
class WorkspaceMemberAdminSerializer(BaseSerializer): class WorkspaceMemberAdminSerializer(BaseSerializer):
member = UserAdminLiteSerializer(read_only=True) member = UserAdminLiteSerializer(read_only=True)
workspace = WorkspaceLiteSerializer(read_only=True) workspace = WorkspaceLiteSerializer(read_only=True)

View File

@ -0,0 +1,50 @@
from .analytic import urlpatterns as analytic_urls
from .asset import urlpatterns as asset_urls
from .authentication import urlpatterns as authentication_urls
from .configuration import urlpatterns as configuration_urls
from .cycle import urlpatterns as cycle_urls
from .estimate import urlpatterns as estimate_urls
from .gpt import urlpatterns as gpt_urls
from .importer import urlpatterns as importer_urls
from .inbox import urlpatterns as inbox_urls
from .integration import urlpatterns as integration_urls
from .issue import urlpatterns as issue_urls
from .module import urlpatterns as module_urls
from .notification import urlpatterns as notification_urls
from .page import urlpatterns as page_urls
from .project import urlpatterns as project_urls
from .public_board import urlpatterns as public_board_urls
from .release_note import urlpatterns as release_note_urls
from .search import urlpatterns as search_urls
from .state import urlpatterns as state_urls
from .unsplash import urlpatterns as unsplash_urls
from .user import urlpatterns as user_urls
from .views import urlpatterns as view_urls
from .workspace import urlpatterns as workspace_urls
urlpatterns = [
*analytic_urls,
*asset_urls,
*authentication_urls,
*configuration_urls,
*cycle_urls,
*estimate_urls,
*gpt_urls,
*importer_urls,
*inbox_urls,
*integration_urls,
*issue_urls,
*module_urls,
*notification_urls,
*page_urls,
*project_urls,
*public_board_urls,
*release_note_urls,
*search_urls,
*state_urls,
*unsplash_urls,
*user_urls,
*view_urls,
*workspace_urls,
]

View File

@ -0,0 +1,46 @@
from django.urls import path
from plane.api.views import (
AnalyticsEndpoint,
AnalyticViewViewset,
SavedAnalyticEndpoint,
ExportAnalyticsEndpoint,
DefaultAnalyticsEndpoint,
)
urlpatterns = [
path(
"workspaces/<str:slug>/analytics/",
AnalyticsEndpoint.as_view(),
name="plane-analytics",
),
path(
"workspaces/<str:slug>/analytic-view/",
AnalyticViewViewset.as_view({"get": "list", "post": "create"}),
name="analytic-view",
),
path(
"workspaces/<str:slug>/analytic-view/<uuid:pk>/",
AnalyticViewViewset.as_view(
{"get": "retrieve", "patch": "partial_update", "delete": "destroy"}
),
name="analytic-view",
),
path(
"workspaces/<str:slug>/saved-analytic-view/<uuid:analytic_id>/",
SavedAnalyticEndpoint.as_view(),
name="saved-analytic-view",
),
path(
"workspaces/<str:slug>/export-analytics/",
ExportAnalyticsEndpoint.as_view(),
name="export-analytics",
),
path(
"workspaces/<str:slug>/default-analytics/",
DefaultAnalyticsEndpoint.as_view(),
name="default-analytics",
),
]

View File

@ -0,0 +1,31 @@
from django.urls import path
from plane.api.views import (
FileAssetEndpoint,
UserAssetsEndpoint,
)
urlpatterns = [
path(
"workspaces/<str:slug>/file-assets/",
FileAssetEndpoint.as_view(),
name="file-assets",
),
path(
"workspaces/file-assets/<uuid:workspace_id>/<str:asset_key>/",
FileAssetEndpoint.as_view(),
name="file-assets",
),
path(
"users/file-assets/",
UserAssetsEndpoint.as_view(),
name="user-file-assets",
),
path(
"users/file-assets/<str:asset_key>/",
UserAssetsEndpoint.as_view(),
name="user-file-assets",
),
]

View File

@ -0,0 +1,68 @@
from django.urls import path
from rest_framework_simplejwt.views import TokenRefreshView
from plane.api.views import (
# Authentication
SignUpEndpoint,
SignInEndpoint,
SignOutEndpoint,
MagicSignInEndpoint,
MagicSignInGenerateEndpoint,
OauthEndpoint,
## End Authentication
# Auth Extended
ForgotPasswordEndpoint,
VerifyEmailEndpoint,
ResetPasswordEndpoint,
RequestEmailVerificationEndpoint,
ChangePasswordEndpoint,
## End Auth Extender
# API Tokens
ApiTokenEndpoint,
## End API Tokens
)
urlpatterns = [
# Social Auth
path("social-auth/", OauthEndpoint.as_view(), name="oauth"),
# Auth
path("sign-up/", SignUpEndpoint.as_view(), name="sign-up"),
path("sign-in/", SignInEndpoint.as_view(), name="sign-in"),
path("sign-out/", SignOutEndpoint.as_view(), name="sign-out"),
# Magic Sign In/Up
path(
"magic-generate/", MagicSignInGenerateEndpoint.as_view(), name="magic-generate"
),
path("magic-sign-in/", MagicSignInEndpoint.as_view(), name="magic-sign-in"),
path("token/refresh/", TokenRefreshView.as_view(), name="token_refresh"),
# Email verification
path("email-verify/", VerifyEmailEndpoint.as_view(), name="email-verify"),
path(
"request-email-verify/",
RequestEmailVerificationEndpoint.as_view(),
name="request-reset-email",
),
# Password Manipulation
path(
"users/me/change-password/",
ChangePasswordEndpoint.as_view(),
name="change-password",
),
path(
"reset-password/<uidb64>/<token>/",
ResetPasswordEndpoint.as_view(),
name="password-reset",
),
path(
"forgot-password/",
ForgotPasswordEndpoint.as_view(),
name="forgot-password",
),
# API Tokens
path("api-tokens/", ApiTokenEndpoint.as_view(), name="api-tokens"),
path("api-tokens/<uuid:pk>/", ApiTokenEndpoint.as_view(), name="api-tokens"),
## End API Tokens
]

View File

@ -0,0 +1,12 @@
from django.urls import path
from plane.api.views import ConfigurationEndpoint
urlpatterns = [
path(
"configs/",
ConfigurationEndpoint.as_view(),
name="configuration",
),
]

View File

@ -0,0 +1,87 @@
from django.urls import path
from plane.api.views import (
CycleViewSet,
CycleIssueViewSet,
CycleDateCheckEndpoint,
CycleFavoriteViewSet,
TransferCycleIssueEndpoint,
)
urlpatterns = [
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/cycles/",
CycleViewSet.as_view(
{
"get": "list",
"post": "create",
}
),
name="project-cycle",
),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/cycles/<uuid:pk>/",
CycleViewSet.as_view(
{
"get": "retrieve",
"put": "update",
"patch": "partial_update",
"delete": "destroy",
}
),
name="project-cycle",
),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/cycles/<uuid:cycle_id>/cycle-issues/",
CycleIssueViewSet.as_view(
{
"get": "list",
"post": "create",
}
),
name="project-issue-cycle",
),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/cycles/<uuid:cycle_id>/cycle-issues/<uuid:pk>/",
CycleIssueViewSet.as_view(
{
"get": "retrieve",
"put": "update",
"patch": "partial_update",
"delete": "destroy",
}
),
name="project-issue-cycle",
),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/cycles/date-check/",
CycleDateCheckEndpoint.as_view(),
name="project-cycle-date",
),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/user-favorite-cycles/",
CycleFavoriteViewSet.as_view(
{
"get": "list",
"post": "create",
}
),
name="user-favorite-cycle",
),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/user-favorite-cycles/<uuid:cycle_id>/",
CycleFavoriteViewSet.as_view(
{
"delete": "destroy",
}
),
name="user-favorite-cycle",
),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/cycles/<uuid:cycle_id>/transfer-issues/",
TransferCycleIssueEndpoint.as_view(),
name="transfer-issues",
),
]

View File

@ -0,0 +1,37 @@
from django.urls import path
from plane.api.views import (
ProjectEstimatePointEndpoint,
BulkEstimatePointEndpoint,
)
urlpatterns = [
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/project-estimates/",
ProjectEstimatePointEndpoint.as_view(),
name="project-estimate-points",
),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/estimates/",
BulkEstimatePointEndpoint.as_view(
{
"get": "list",
"post": "create",
}
),
name="bulk-create-estimate-points",
),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/estimates/<uuid:estimate_id>/",
BulkEstimatePointEndpoint.as_view(
{
"get": "retrieve",
"patch": "partial_update",
"delete": "destroy",
}
),
name="bulk-create-estimate-points",
),
]

View File

@ -0,0 +1,13 @@
from django.urls import path
from plane.api.views import GPTIntegrationEndpoint
urlpatterns = [
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/ai-assistant/",
GPTIntegrationEndpoint.as_view(),
name="importer",
),
]

View File

@ -0,0 +1,37 @@
from django.urls import path
from plane.api.views import (
ServiceIssueImportSummaryEndpoint,
ImportServiceEndpoint,
UpdateServiceImportStatusEndpoint,
)
urlpatterns = [
path(
"workspaces/<str:slug>/importers/<str:service>/",
ServiceIssueImportSummaryEndpoint.as_view(),
name="importer-summary",
),
path(
"workspaces/<str:slug>/projects/importers/<str:service>/",
ImportServiceEndpoint.as_view(),
name="importer",
),
path(
"workspaces/<str:slug>/importers/",
ImportServiceEndpoint.as_view(),
name="importer",
),
path(
"workspaces/<str:slug>/importers/<str:service>/<uuid:pk>/",
ImportServiceEndpoint.as_view(),
name="importer",
),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/service/<str:service>/importers/<uuid:importer_id>/",
UpdateServiceImportStatusEndpoint.as_view(),
name="importer-status",
),
]

View File

@ -0,0 +1,53 @@
from django.urls import path
from plane.api.views import (
InboxViewSet,
InboxIssueViewSet,
)
urlpatterns = [
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/inboxes/",
InboxViewSet.as_view(
{
"get": "list",
"post": "create",
}
),
name="inbox",
),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/inboxes/<uuid:pk>/",
InboxViewSet.as_view(
{
"get": "retrieve",
"patch": "partial_update",
"delete": "destroy",
}
),
name="inbox",
),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/inboxes/<uuid:inbox_id>/inbox-issues/",
InboxIssueViewSet.as_view(
{
"get": "list",
"post": "create",
}
),
name="inbox-issue",
),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/inboxes/<uuid:inbox_id>/inbox-issues/<uuid:pk>/",
InboxIssueViewSet.as_view(
{
"get": "retrieve",
"patch": "partial_update",
"delete": "destroy",
}
),
name="inbox-issue",
),
]

View File

@ -0,0 +1,150 @@
from django.urls import path
from plane.api.views import (
IntegrationViewSet,
WorkspaceIntegrationViewSet,
GithubRepositoriesEndpoint,
GithubRepositorySyncViewSet,
GithubIssueSyncViewSet,
GithubCommentSyncViewSet,
BulkCreateGithubIssueSyncEndpoint,
SlackProjectSyncViewSet,
)
urlpatterns = [
path(
"integrations/",
IntegrationViewSet.as_view(
{
"get": "list",
"post": "create",
}
),
name="integrations",
),
path(
"integrations/<uuid:pk>/",
IntegrationViewSet.as_view(
{
"get": "retrieve",
"patch": "partial_update",
"delete": "destroy",
}
),
name="integrations",
),
path(
"workspaces/<str:slug>/workspace-integrations/",
WorkspaceIntegrationViewSet.as_view(
{
"get": "list",
}
),
name="workspace-integrations",
),
path(
"workspaces/<str:slug>/workspace-integrations/<str:provider>/",
WorkspaceIntegrationViewSet.as_view(
{
"post": "create",
}
),
name="workspace-integrations",
),
path(
"workspaces/<str:slug>/workspace-integrations/<uuid:pk>/provider/",
WorkspaceIntegrationViewSet.as_view(
{
"get": "retrieve",
"delete": "destroy",
}
),
name="workspace-integrations",
),
# Github Integrations
path(
"workspaces/<str:slug>/workspace-integrations/<uuid:workspace_integration_id>/github-repositories/",
GithubRepositoriesEndpoint.as_view(),
),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/workspace-integrations/<uuid:workspace_integration_id>/github-repository-sync/",
GithubRepositorySyncViewSet.as_view(
{
"get": "list",
"post": "create",
}
),
),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/workspace-integrations/<uuid:workspace_integration_id>/github-repository-sync/<uuid:pk>/",
GithubRepositorySyncViewSet.as_view(
{
"get": "retrieve",
"delete": "destroy",
}
),
),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/github-repository-sync/<uuid:repo_sync_id>/github-issue-sync/",
GithubIssueSyncViewSet.as_view(
{
"post": "create",
"get": "list",
}
),
),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/github-repository-sync/<uuid:repo_sync_id>/bulk-create-github-issue-sync/",
BulkCreateGithubIssueSyncEndpoint.as_view(),
),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/github-repository-sync/<uuid:repo_sync_id>/github-issue-sync/<uuid:pk>/",
GithubIssueSyncViewSet.as_view(
{
"get": "retrieve",
"delete": "destroy",
}
),
),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/github-repository-sync/<uuid:repo_sync_id>/github-issue-sync/<uuid:issue_sync_id>/github-comment-sync/",
GithubCommentSyncViewSet.as_view(
{
"post": "create",
"get": "list",
}
),
),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/github-repository-sync/<uuid:repo_sync_id>/github-issue-sync/<uuid:issue_sync_id>/github-comment-sync/<uuid:pk>/",
GithubCommentSyncViewSet.as_view(
{
"get": "retrieve",
"delete": "destroy",
}
),
),
## End Github Integrations
# Slack Integration
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/workspace-integrations/<uuid:workspace_integration_id>/project-slack-sync/",
SlackProjectSyncViewSet.as_view(
{
"post": "create",
"get": "list",
}
),
),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/workspace-integrations/<uuid:workspace_integration_id>/project-slack-sync/<uuid:pk>/",
SlackProjectSyncViewSet.as_view(
{
"delete": "destroy",
"get": "retrieve",
}
),
),
## End Slack Integration
]

View File

@ -0,0 +1,332 @@
from django.urls import path
from plane.api.views import (
IssueViewSet,
LabelViewSet,
BulkCreateIssueLabelsEndpoint,
BulkDeleteIssuesEndpoint,
BulkImportIssuesEndpoint,
UserWorkSpaceIssues,
SubIssuesEndpoint,
IssueLinkViewSet,
IssueAttachmentEndpoint,
ExportIssuesEndpoint,
IssueActivityEndpoint,
IssueCommentViewSet,
IssueSubscriberViewSet,
IssueReactionViewSet,
CommentReactionViewSet,
IssuePropertyViewSet,
IssueArchiveViewSet,
IssueRelationViewSet,
IssueDraftViewSet,
)
urlpatterns = [
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/issues/",
IssueViewSet.as_view(
{
"get": "list",
"post": "create",
}
),
name="project-issue",
),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/issues/<uuid:pk>/",
IssueViewSet.as_view(
{
"get": "retrieve",
"put": "update",
"patch": "partial_update",
"delete": "destroy",
}
),
name="project-issue",
),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/issue-labels/",
LabelViewSet.as_view(
{
"get": "list",
"post": "create",
}
),
name="project-issue-labels",
),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/issue-labels/<uuid:pk>/",
LabelViewSet.as_view(
{
"get": "retrieve",
"put": "update",
"patch": "partial_update",
"delete": "destroy",
}
),
name="project-issue-labels",
),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/bulk-create-labels/",
BulkCreateIssueLabelsEndpoint.as_view(),
name="project-bulk-labels",
),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/bulk-delete-issues/",
BulkDeleteIssuesEndpoint.as_view(),
name="project-issues-bulk",
),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/bulk-import-issues/<str:service>/",
BulkImportIssuesEndpoint.as_view(),
name="project-issues-bulk",
),
path(
"workspaces/<str:slug>/my-issues/",
UserWorkSpaceIssues.as_view(),
name="workspace-issues",
),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/issues/<uuid:issue_id>/sub-issues/",
SubIssuesEndpoint.as_view(),
name="sub-issues",
),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/issues/<uuid:issue_id>/issue-links/",
IssueLinkViewSet.as_view(
{
"get": "list",
"post": "create",
}
),
name="project-issue-links",
),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/issues/<uuid:issue_id>/issue-links/<uuid:pk>/",
IssueLinkViewSet.as_view(
{
"get": "retrieve",
"put": "update",
"patch": "partial_update",
"delete": "destroy",
}
),
name="project-issue-links",
),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/issues/<uuid:issue_id>/issue-attachments/",
IssueAttachmentEndpoint.as_view(),
name="project-issue-attachments",
),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/issues/<uuid:issue_id>/issue-attachments/<uuid:pk>/",
IssueAttachmentEndpoint.as_view(),
name="project-issue-attachments",
),
path(
"workspaces/<str:slug>/export-issues/",
ExportIssuesEndpoint.as_view(),
name="export-issues",
),
## End Issues
## Issue Activity
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/issues/<uuid:issue_id>/history/",
IssueActivityEndpoint.as_view(),
name="project-issue-history",
),
## Issue Activity
## IssueComments
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/issues/<uuid:issue_id>/comments/",
IssueCommentViewSet.as_view(
{
"get": "list",
"post": "create",
}
),
name="project-issue-comment",
),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/issues/<uuid:issue_id>/comments/<uuid:pk>/",
IssueCommentViewSet.as_view(
{
"get": "retrieve",
"put": "update",
"patch": "partial_update",
"delete": "destroy",
}
),
name="project-issue-comment",
),
## End IssueComments
# Issue Subscribers
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/issues/<uuid:issue_id>/issue-subscribers/",
IssueSubscriberViewSet.as_view(
{
"get": "list",
"post": "create",
}
),
name="project-issue-subscribers",
),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/issues/<uuid:issue_id>/issue-subscribers/<uuid:subscriber_id>/",
IssueSubscriberViewSet.as_view({"delete": "destroy"}),
name="project-issue-subscribers",
),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/issues/<uuid:issue_id>/subscribe/",
IssueSubscriberViewSet.as_view(
{
"get": "subscription_status",
"post": "subscribe",
"delete": "unsubscribe",
}
),
name="project-issue-subscribers",
),
## End Issue Subscribers
# Issue Reactions
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/issues/<uuid:issue_id>/reactions/",
IssueReactionViewSet.as_view(
{
"get": "list",
"post": "create",
}
),
name="project-issue-reactions",
),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/issues/<uuid:issue_id>/reactions/<str:reaction_code>/",
IssueReactionViewSet.as_view(
{
"delete": "destroy",
}
),
name="project-issue-reactions",
),
## End Issue Reactions
# Comment Reactions
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/comments/<uuid:comment_id>/reactions/",
CommentReactionViewSet.as_view(
{
"get": "list",
"post": "create",
}
),
name="project-issue-comment-reactions",
),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/comments/<uuid:comment_id>/reactions/<str:reaction_code>/",
CommentReactionViewSet.as_view(
{
"delete": "destroy",
}
),
name="project-issue-comment-reactions",
),
## End Comment Reactions
## IssueProperty
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/issue-properties/",
IssuePropertyViewSet.as_view(
{
"get": "list",
"post": "create",
}
),
name="project-issue-roadmap",
),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/issue-properties/<uuid:pk>/",
IssuePropertyViewSet.as_view(
{
"get": "retrieve",
"put": "update",
"patch": "partial_update",
"delete": "destroy",
}
),
name="project-issue-roadmap",
),
## IssueProperty Ebd
## Issue Archives
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/archived-issues/",
IssueArchiveViewSet.as_view(
{
"get": "list",
}
),
name="project-issue-archive",
),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/archived-issues/<uuid:pk>/",
IssueArchiveViewSet.as_view(
{
"get": "retrieve",
"delete": "destroy",
}
),
name="project-issue-archive",
),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/unarchive/<uuid:pk>/",
IssueArchiveViewSet.as_view(
{
"post": "unarchive",
}
),
name="project-issue-archive",
),
## End Issue Archives
## Issue Relation
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/issues/<uuid:issue_id>/issue-relation/",
IssueRelationViewSet.as_view(
{
"post": "create",
}
),
name="issue-relation",
),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/issues/<uuid:issue_id>/issue-relation/<uuid:pk>/",
IssueRelationViewSet.as_view(
{
"delete": "destroy",
}
),
name="issue-relation",
),
## End Issue Relation
## Issue Drafts
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/issue-drafts/",
IssueDraftViewSet.as_view(
{
"get": "list",
"post": "create",
}
),
name="project-issue-draft",
),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/issue-drafts/<uuid:pk>/",
IssueDraftViewSet.as_view(
{
"get": "retrieve",
"patch": "partial_update",
"delete": "destroy",
}
),
name="project-issue-draft",
),
]

View File

@ -0,0 +1,104 @@
from django.urls import path
from plane.api.views import (
ModuleViewSet,
ModuleIssueViewSet,
ModuleLinkViewSet,
ModuleFavoriteViewSet,
BulkImportModulesEndpoint,
)
urlpatterns = [
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/modules/",
ModuleViewSet.as_view(
{
"get": "list",
"post": "create",
}
),
name="project-modules",
),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/modules/<uuid:pk>/",
ModuleViewSet.as_view(
{
"get": "retrieve",
"put": "update",
"patch": "partial_update",
"delete": "destroy",
}
),
name="project-modules",
),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/modules/<uuid:module_id>/module-issues/",
ModuleIssueViewSet.as_view(
{
"get": "list",
"post": "create",
}
),
name="project-module-issues",
),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/modules/<uuid:module_id>/module-issues/<uuid:pk>/",
ModuleIssueViewSet.as_view(
{
"get": "retrieve",
"put": "update",
"patch": "partial_update",
"delete": "destroy",
}
),
name="project-module-issues",
),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/modules/<uuid:module_id>/module-links/",
ModuleLinkViewSet.as_view(
{
"get": "list",
"post": "create",
}
),
name="project-issue-module-links",
),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/modules/<uuid:module_id>/module-links/<uuid:pk>/",
ModuleLinkViewSet.as_view(
{
"get": "retrieve",
"put": "update",
"patch": "partial_update",
"delete": "destroy",
}
),
name="project-issue-module-links",
),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/user-favorite-modules/",
ModuleFavoriteViewSet.as_view(
{
"get": "list",
"post": "create",
}
),
name="user-favorite-module",
),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/user-favorite-modules/<uuid:module_id>/",
ModuleFavoriteViewSet.as_view(
{
"delete": "destroy",
}
),
name="user-favorite-module",
),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/bulk-import-modules/<str:service>/",
BulkImportModulesEndpoint.as_view(),
name="bulk-modules-create",
),
]

View File

@ -0,0 +1,66 @@
from django.urls import path
from plane.api.views import (
NotificationViewSet,
UnreadNotificationEndpoint,
MarkAllReadNotificationViewSet,
)
urlpatterns = [
path(
"workspaces/<str:slug>/users/notifications/",
NotificationViewSet.as_view(
{
"get": "list",
}
),
name="notifications",
),
path(
"workspaces/<str:slug>/users/notifications/<uuid:pk>/",
NotificationViewSet.as_view(
{
"get": "retrieve",
"patch": "partial_update",
"delete": "destroy",
}
),
name="notifications",
),
path(
"workspaces/<str:slug>/users/notifications/<uuid:pk>/read/",
NotificationViewSet.as_view(
{
"post": "mark_read",
"delete": "mark_unread",
}
),
name="notifications",
),
path(
"workspaces/<str:slug>/users/notifications/<uuid:pk>/archive/",
NotificationViewSet.as_view(
{
"post": "archive",
"delete": "unarchive",
}
),
name="notifications",
),
path(
"workspaces/<str:slug>/users/notifications/unread/",
UnreadNotificationEndpoint.as_view(),
name="unread-notifications",
),
path(
"workspaces/<str:slug>/users/notifications/mark-all-read/",
MarkAllReadNotificationViewSet.as_view(
{
"post": "create",
}
),
name="mark-all-read-notifications",
),
]

View File

@ -0,0 +1,79 @@
from django.urls import path
from plane.api.views import (
PageViewSet,
PageBlockViewSet,
PageFavoriteViewSet,
CreateIssueFromPageBlockEndpoint,
)
urlpatterns = [
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/pages/",
PageViewSet.as_view(
{
"get": "list",
"post": "create",
}
),
name="project-pages",
),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/pages/<uuid:pk>/",
PageViewSet.as_view(
{
"get": "retrieve",
"patch": "partial_update",
"delete": "destroy",
}
),
name="project-pages",
),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/pages/<uuid:page_id>/page-blocks/",
PageBlockViewSet.as_view(
{
"get": "list",
"post": "create",
}
),
name="project-page-blocks",
),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/pages/<uuid:page_id>/page-blocks/<uuid:pk>/",
PageBlockViewSet.as_view(
{
"get": "retrieve",
"patch": "partial_update",
"delete": "destroy",
}
),
name="project-page-blocks",
),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/user-favorite-pages/",
PageFavoriteViewSet.as_view(
{
"get": "list",
"post": "create",
}
),
name="user-favorite-pages",
),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/user-favorite-pages/<uuid:page_id>/",
PageFavoriteViewSet.as_view(
{
"delete": "destroy",
}
),
name="user-favorite-pages",
),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/pages/<uuid:page_id>/page-blocks/<uuid:page_block_id>/issues/",
CreateIssueFromPageBlockEndpoint.as_view(),
name="page-block-issues",
),
]

View File

@ -0,0 +1,144 @@
from django.urls import path
from plane.api.views import (
ProjectViewSet,
InviteProjectEndpoint,
ProjectMemberViewSet,
ProjectMemberEndpoint,
ProjectMemberInvitationsViewset,
ProjectMemberUserEndpoint,
AddMemberToProjectEndpoint,
ProjectJoinEndpoint,
AddTeamToProjectEndpoint,
ProjectUserViewsEndpoint,
ProjectIdentifierEndpoint,
ProjectFavoritesViewSet,
LeaveProjectEndpoint,
ProjectPublicCoverImagesEndpoint
)
urlpatterns = [
path(
"workspaces/<str:slug>/projects/",
ProjectViewSet.as_view(
{
"get": "list",
"post": "create",
}
),
name="project",
),
path(
"workspaces/<str:slug>/projects/<uuid:pk>/",
ProjectViewSet.as_view(
{
"get": "retrieve",
"put": "update",
"patch": "partial_update",
"delete": "destroy",
}
),
name="project",
),
path(
"workspaces/<str:slug>/project-identifiers/",
ProjectIdentifierEndpoint.as_view(),
name="project-identifiers",
),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/invite/",
InviteProjectEndpoint.as_view(),
name="invite-project",
),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/members/",
ProjectMemberViewSet.as_view({"get": "list"}),
name="project-member",
),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/members/<uuid:pk>/",
ProjectMemberViewSet.as_view(
{
"get": "retrieve",
"patch": "partial_update",
"delete": "destroy",
}
),
name="project-member",
),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/project-members/",
ProjectMemberEndpoint.as_view(),
name="project-member",
),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/members/add/",
AddMemberToProjectEndpoint.as_view(),
name="project",
),
path(
"workspaces/<str:slug>/projects/join/",
ProjectJoinEndpoint.as_view(),
name="project-join",
),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/team-invite/",
AddTeamToProjectEndpoint.as_view(),
name="projects",
),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/invitations/",
ProjectMemberInvitationsViewset.as_view({"get": "list"}),
name="project-member-invite",
),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/invitations/<uuid:pk>/",
ProjectMemberInvitationsViewset.as_view(
{
"get": "retrieve",
"delete": "destroy",
}
),
name="project-member-invite",
),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/project-views/",
ProjectUserViewsEndpoint.as_view(),
name="project-view",
),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/project-members/me/",
ProjectMemberUserEndpoint.as_view(),
name="project-member-view",
),
path(
"workspaces/<str:slug>/user-favorite-projects/",
ProjectFavoritesViewSet.as_view(
{
"get": "list",
"post": "create",
}
),
name="project-favorite",
),
path(
"workspaces/<str:slug>/user-favorite-projects/<uuid:project_id>/",
ProjectFavoritesViewSet.as_view(
{
"delete": "destroy",
}
),
name="project-favorite",
),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/members/leave/",
LeaveProjectEndpoint.as_view(),
name="leave-project",
),
path(
"project-covers/",
ProjectPublicCoverImagesEndpoint.as_view(),
name="project-covers",
),
]

View File

@ -0,0 +1,151 @@
from django.urls import path
from plane.api.views import (
ProjectDeployBoardViewSet,
ProjectDeployBoardPublicSettingsEndpoint,
ProjectIssuesPublicEndpoint,
IssueRetrievePublicEndpoint,
IssueCommentPublicViewSet,
IssueReactionPublicViewSet,
CommentReactionPublicViewSet,
InboxIssuePublicViewSet,
IssueVotePublicViewSet,
WorkspaceProjectDeployBoardEndpoint,
)
urlpatterns = [
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/project-deploy-boards/",
ProjectDeployBoardViewSet.as_view(
{
"get": "list",
"post": "create",
}
),
name="project-deploy-board",
),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/project-deploy-boards/<uuid:pk>/",
ProjectDeployBoardViewSet.as_view(
{
"get": "retrieve",
"patch": "partial_update",
"delete": "destroy",
}
),
name="project-deploy-board",
),
path(
"public/workspaces/<str:slug>/project-boards/<uuid:project_id>/settings/",
ProjectDeployBoardPublicSettingsEndpoint.as_view(),
name="project-deploy-board-settings",
),
path(
"public/workspaces/<str:slug>/project-boards/<uuid:project_id>/issues/",
ProjectIssuesPublicEndpoint.as_view(),
name="project-deploy-board",
),
path(
"public/workspaces/<str:slug>/project-boards/<uuid:project_id>/issues/<uuid:issue_id>/",
IssueRetrievePublicEndpoint.as_view(),
name="workspace-project-boards",
),
path(
"public/workspaces/<str:slug>/project-boards/<uuid:project_id>/issues/<uuid:issue_id>/comments/",
IssueCommentPublicViewSet.as_view(
{
"get": "list",
"post": "create",
}
),
name="issue-comments-project-board",
),
path(
"public/workspaces/<str:slug>/project-boards/<uuid:project_id>/issues/<uuid:issue_id>/comments/<uuid:pk>/",
IssueCommentPublicViewSet.as_view(
{
"get": "retrieve",
"patch": "partial_update",
"delete": "destroy",
}
),
name="issue-comments-project-board",
),
path(
"public/workspaces/<str:slug>/project-boards/<uuid:project_id>/issues/<uuid:issue_id>/reactions/",
IssueReactionPublicViewSet.as_view(
{
"get": "list",
"post": "create",
}
),
name="issue-reactions-project-board",
),
path(
"public/workspaces/<str:slug>/project-boards/<uuid:project_id>/issues/<uuid:issue_id>/reactions/<str:reaction_code>/",
IssueReactionPublicViewSet.as_view(
{
"delete": "destroy",
}
),
name="issue-reactions-project-board",
),
path(
"public/workspaces/<str:slug>/project-boards/<uuid:project_id>/comments/<uuid:comment_id>/reactions/",
CommentReactionPublicViewSet.as_view(
{
"get": "list",
"post": "create",
}
),
name="comment-reactions-project-board",
),
path(
"public/workspaces/<str:slug>/project-boards/<uuid:project_id>/comments/<uuid:comment_id>/reactions/<str:reaction_code>/",
CommentReactionPublicViewSet.as_view(
{
"delete": "destroy",
}
),
name="comment-reactions-project-board",
),
path(
"public/workspaces/<str:slug>/project-boards/<uuid:project_id>/inboxes/<uuid:inbox_id>/inbox-issues/",
InboxIssuePublicViewSet.as_view(
{
"get": "list",
"post": "create",
}
),
name="inbox-issue",
),
path(
"public/workspaces/<str:slug>/project-boards/<uuid:project_id>/inboxes/<uuid:inbox_id>/inbox-issues/<uuid:pk>/",
InboxIssuePublicViewSet.as_view(
{
"get": "retrieve",
"patch": "partial_update",
"delete": "destroy",
}
),
name="inbox-issue",
),
path(
"public/workspaces/<str:slug>/project-boards/<uuid:project_id>/issues/<uuid:issue_id>/votes/",
IssueVotePublicViewSet.as_view(
{
"get": "list",
"post": "create",
"delete": "destroy",
}
),
name="issue-vote-project-board",
),
path(
"public/workspaces/<str:slug>/project-boards/",
WorkspaceProjectDeployBoardEndpoint.as_view(),
name="workspace-project-boards",
),
]

View File

@ -0,0 +1,13 @@
from django.urls import path
from plane.api.views import ReleaseNotesEndpoint
urlpatterns = [
path(
"release-notes/",
ReleaseNotesEndpoint.as_view(),
name="release-notes",
),
]

View File

@ -0,0 +1,21 @@
from django.urls import path
from plane.api.views import (
GlobalSearchEndpoint,
IssueSearchEndpoint,
)
urlpatterns = [
path(
"workspaces/<str:slug>/search/",
GlobalSearchEndpoint.as_view(),
name="global-search",
),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/search-issues/",
IssueSearchEndpoint.as_view(),
name="project-issue-search",
),
]

View File

@ -0,0 +1,30 @@
from django.urls import path
from plane.api.views import StateViewSet
urlpatterns = [
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/states/",
StateViewSet.as_view(
{
"get": "list",
"post": "create",
}
),
name="project-states",
),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/states/<uuid:pk>/",
StateViewSet.as_view(
{
"get": "retrieve",
"put": "update",
"patch": "partial_update",
"delete": "destroy",
}
),
name="project-state",
),
]

View File

@ -0,0 +1,13 @@
from django.urls import path
from plane.api.views import UnsplashEndpoint
urlpatterns = [
path(
"unsplash/",
UnsplashEndpoint.as_view(),
name="unsplash",
),
]

View File

@ -0,0 +1,113 @@
from django.urls import path
from plane.api.views import (
## User
UserEndpoint,
UpdateUserOnBoardedEndpoint,
UpdateUserTourCompletedEndpoint,
UserActivityEndpoint,
ChangePasswordEndpoint,
## End User
## Workspaces
UserWorkspaceInvitationsEndpoint,
UserWorkSpacesEndpoint,
JoinWorkspaceEndpoint,
UserWorkspaceInvitationsEndpoint,
UserWorkspaceInvitationEndpoint,
UserActivityGraphEndpoint,
UserIssueCompletedGraphEndpoint,
UserWorkspaceDashboardEndpoint,
UserProjectInvitationsViewset,
## End Workspaces
)
urlpatterns = [
# User Profile
path(
"users/me/",
UserEndpoint.as_view(
{"get": "retrieve", "patch": "partial_update", "delete": "destroy"}
),
name="users",
),
path(
"users/me/settings/",
UserEndpoint.as_view(
{
"get": "retrieve_user_settings",
}
),
name="users",
),
path(
"users/me/change-password/",
ChangePasswordEndpoint.as_view(),
name="change-password",
),
path(
"users/me/onboard/",
UpdateUserOnBoardedEndpoint.as_view(),
name="user-onboard",
),
path(
"users/me/tour-completed/",
UpdateUserTourCompletedEndpoint.as_view(),
name="user-tour",
),
path(
"users/workspaces/<str:slug>/activities/",
UserActivityEndpoint.as_view(),
name="user-activities",
),
# user workspaces
path(
"users/me/workspaces/",
UserWorkSpacesEndpoint.as_view(),
name="user-workspace",
),
# user workspace invitations
path(
"users/me/invitations/workspaces/",
UserWorkspaceInvitationsEndpoint.as_view({"get": "list", "post": "create"}),
name="user-workspace-invitations",
),
# user workspace invitation
path(
"users/me/invitations/<uuid:pk>/",
UserWorkspaceInvitationEndpoint.as_view(
{
"get": "retrieve",
}
),
name="user-workspace-invitation",
),
# user join workspace
# User Graphs
path(
"users/me/workspaces/<str:slug>/activity-graph/",
UserActivityGraphEndpoint.as_view(),
name="user-activity-graph",
),
path(
"users/me/workspaces/<str:slug>/issues-completed-graph/",
UserIssueCompletedGraphEndpoint.as_view(),
name="completed-graph",
),
path(
"users/me/workspaces/<str:slug>/dashboard/",
UserWorkspaceDashboardEndpoint.as_view(),
name="user-workspace-dashboard",
),
## End User Graph
path(
"users/me/invitations/workspaces/<str:slug>/<uuid:pk>/join/",
JoinWorkspaceEndpoint.as_view(),
name="user-join-workspace",
),
# user project invitations
path(
"users/me/invitations/projects/",
UserProjectInvitationsViewset.as_view({"get": "list", "post": "create"}),
name="user-project-invitations",
),
]

View File

@ -0,0 +1,85 @@
from django.urls import path
from plane.api.views import (
IssueViewViewSet,
GlobalViewViewSet,
GlobalViewIssuesViewSet,
IssueViewFavoriteViewSet,
)
urlpatterns = [
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/views/",
IssueViewViewSet.as_view(
{
"get": "list",
"post": "create",
}
),
name="project-view",
),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/views/<uuid:pk>/",
IssueViewViewSet.as_view(
{
"get": "retrieve",
"put": "update",
"patch": "partial_update",
"delete": "destroy",
}
),
name="project-view",
),
path(
"workspaces/<str:slug>/views/",
GlobalViewViewSet.as_view(
{
"get": "list",
"post": "create",
}
),
name="global-view",
),
path(
"workspaces/<str:slug>/views/<uuid:pk>/",
GlobalViewViewSet.as_view(
{
"get": "retrieve",
"put": "update",
"patch": "partial_update",
"delete": "destroy",
}
),
name="global-view",
),
path(
"workspaces/<str:slug>/issues/",
GlobalViewIssuesViewSet.as_view(
{
"get": "list",
}
),
name="global-view-issues",
),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/user-favorite-views/",
IssueViewFavoriteViewSet.as_view(
{
"get": "list",
"post": "create",
}
),
name="user-favorite-view",
),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/user-favorite-views/<uuid:view_id>/",
IssueViewFavoriteViewSet.as_view(
{
"delete": "destroy",
}
),
name="user-favorite-view",
),
]

View File

@ -0,0 +1,182 @@
from django.urls import path
from plane.api.views import (
WorkSpaceViewSet,
InviteWorkspaceEndpoint,
WorkSpaceMemberViewSet,
WorkspaceMembersEndpoint,
WorkspaceInvitationsViewset,
WorkspaceMemberUserEndpoint,
WorkspaceMemberUserViewsEndpoint,
WorkSpaceAvailabilityCheckEndpoint,
TeamMemberViewSet,
UserLastProjectWithWorkspaceEndpoint,
WorkspaceThemeViewSet,
WorkspaceUserProfileStatsEndpoint,
WorkspaceUserActivityEndpoint,
WorkspaceUserProfileEndpoint,
WorkspaceUserProfileIssuesEndpoint,
WorkspaceLabelsEndpoint,
LeaveWorkspaceEndpoint,
)
urlpatterns = [
path(
"workspace-slug-check/",
WorkSpaceAvailabilityCheckEndpoint.as_view(),
name="workspace-availability",
),
path(
"workspaces/",
WorkSpaceViewSet.as_view(
{
"get": "list",
"post": "create",
}
),
name="workspace",
),
path(
"workspaces/<str:slug>/",
WorkSpaceViewSet.as_view(
{
"get": "retrieve",
"put": "update",
"patch": "partial_update",
"delete": "destroy",
}
),
name="workspace",
),
path(
"workspaces/<str:slug>/invite/",
InviteWorkspaceEndpoint.as_view(),
name="invite-workspace",
),
path(
"workspaces/<str:slug>/invitations/",
WorkspaceInvitationsViewset.as_view({"get": "list"}),
name="workspace-invitations",
),
path(
"workspaces/<str:slug>/invitations/<uuid:pk>/",
WorkspaceInvitationsViewset.as_view(
{
"delete": "destroy",
"get": "retrieve",
}
),
name="workspace-invitations",
),
path(
"workspaces/<str:slug>/members/",
WorkSpaceMemberViewSet.as_view({"get": "list"}),
name="workspace-member",
),
path(
"workspaces/<str:slug>/members/<uuid:pk>/",
WorkSpaceMemberViewSet.as_view(
{
"patch": "partial_update",
"delete": "destroy",
"get": "retrieve",
}
),
name="workspace-member",
),
path(
"workspaces/<str:slug>/workspace-members/",
WorkspaceMembersEndpoint.as_view(),
name="workspace-members",
),
path(
"workspaces/<str:slug>/teams/",
TeamMemberViewSet.as_view(
{
"get": "list",
"post": "create",
}
),
name="workspace-team-members",
),
path(
"workspaces/<str:slug>/teams/<uuid:pk>/",
TeamMemberViewSet.as_view(
{
"put": "update",
"patch": "partial_update",
"delete": "destroy",
"get": "retrieve",
}
),
name="workspace-team-members",
),
path(
"users/last-visited-workspace/",
UserLastProjectWithWorkspaceEndpoint.as_view(),
name="workspace-project-details",
),
path(
"workspaces/<str:slug>/workspace-members/me/",
WorkspaceMemberUserEndpoint.as_view(),
name="workspace-member-details",
),
path(
"workspaces/<str:slug>/workspace-views/",
WorkspaceMemberUserViewsEndpoint.as_view(),
name="workspace-member-views-details",
),
path(
"workspaces/<str:slug>/workspace-themes/",
WorkspaceThemeViewSet.as_view(
{
"get": "list",
"post": "create",
}
),
name="workspace-themes",
),
path(
"workspaces/<str:slug>/workspace-themes/<uuid:pk>/",
WorkspaceThemeViewSet.as_view(
{
"get": "retrieve",
"patch": "partial_update",
"delete": "destroy",
}
),
name="workspace-themes",
),
path(
"workspaces/<str:slug>/user-stats/<uuid:user_id>/",
WorkspaceUserProfileStatsEndpoint.as_view(),
name="workspace-user-stats",
),
path(
"workspaces/<str:slug>/user-activity/<uuid:user_id>/",
WorkspaceUserActivityEndpoint.as_view(),
name="workspace-user-activity",
),
path(
"workspaces/<str:slug>/user-profile/<uuid:user_id>/",
WorkspaceUserProfileEndpoint.as_view(),
name="workspace-user-profile-page",
),
path(
"workspaces/<str:slug>/user-issues/<uuid:user_id>/",
WorkspaceUserProfileIssuesEndpoint.as_view(),
name="workspace-user-profile-issues",
),
path(
"workspaces/<str:slug>/labels/",
WorkspaceLabelsEndpoint.as_view(),
name="workspace-labels",
),
path(
"workspaces/<str:slug>/members/leave/",
LeaveWorkspaceEndpoint.as_view(),
name="leave-workspace-members",
),
]

View File

@ -1,5 +1,6 @@
from django.urls import path from django.urls import path
from rest_framework_simplejwt.views import TokenRefreshView
# Create your urls here. # Create your urls here.
@ -191,6 +192,9 @@ from plane.api.views import (
) )
#TODO: Delete this file
# This url file has been deprecated use apiserver/plane/urls folder to create new urls
urlpatterns = [ urlpatterns = [
# Social Auth # Social Auth
path("social-auth/", OauthEndpoint.as_view(), name="oauth"), path("social-auth/", OauthEndpoint.as_view(), name="oauth"),
@ -203,6 +207,7 @@ urlpatterns = [
"magic-generate/", MagicSignInGenerateEndpoint.as_view(), name="magic-generate" "magic-generate/", MagicSignInGenerateEndpoint.as_view(), name="magic-generate"
), ),
path("magic-sign-in/", MagicSignInEndpoint.as_view(), name="magic-sign-in"), path("magic-sign-in/", MagicSignInEndpoint.as_view(), name="magic-sign-in"),
path('token/refresh/', TokenRefreshView.as_view(), name='token_refresh'),
# Email verification # Email verification
path("email-verify/", VerifyEmailEndpoint.as_view(), name="email-verify"), path("email-verify/", VerifyEmailEndpoint.as_view(), name="email-verify"),
path( path(
@ -229,6 +234,15 @@ urlpatterns = [
), ),
name="users", name="users",
), ),
path(
"users/me/settings/",
UserEndpoint.as_view(
{
"get": "retrieve_user_settings",
}
),
name="users",
),
path( path(
"users/me/change-password/", "users/me/change-password/",
ChangePasswordEndpoint.as_view(), ChangePasswordEndpoint.as_view(),
@ -556,6 +570,7 @@ urlpatterns = [
"workspaces/<str:slug>/user-favorite-projects/", "workspaces/<str:slug>/user-favorite-projects/",
ProjectFavoritesViewSet.as_view( ProjectFavoritesViewSet.as_view(
{ {
"get": "list",
"post": "create", "post": "create",
} }
), ),

View File

@ -1,10 +1,5 @@
# Django imports # Django imports
from django.db.models import ( from django.db.models import Count, Sum, F, Q
Count,
Sum,
F,
Q
)
from django.db.models.functions import ExtractMonth from django.db.models.functions import ExtractMonth
# Third party imports # Third party imports
@ -31,68 +26,152 @@ class AnalyticsEndpoint(BaseAPIView):
try: try:
x_axis = request.GET.get("x_axis", False) x_axis = request.GET.get("x_axis", False)
y_axis = request.GET.get("y_axis", False) y_axis = request.GET.get("y_axis", False)
segment = request.GET.get("segment", False)
if not x_axis or not y_axis: valid_xaxis_segment = [
"state_id",
"state__group",
"labels__id",
"assignees__id",
"estimate_point",
"issue_cycle__cycle_id",
"issue_module__module_id",
"priority",
"start_date",
"target_date",
"created_at",
"completed_at",
]
valid_yaxis = [
"issue_count",
"estimate",
]
# Check for x-axis and y-axis as thery are required parameters
if (
not x_axis
or not y_axis
or not x_axis in valid_xaxis_segment
or not y_axis in valid_yaxis
):
return Response( return Response(
{"error": "x-axis and y-axis dimensions are required"}, {
"error": "x-axis and y-axis dimensions are required and the values should be valid"
},
status=status.HTTP_400_BAD_REQUEST, status=status.HTTP_400_BAD_REQUEST,
) )
segment = request.GET.get("segment", False) # If segment is present it cannot be same as x-axis
if segment and (segment not in valid_xaxis_segment or x_axis == segment):
return Response(
{
"error": "Both segment and x axis cannot be same and segment should be valid"
},
status=status.HTTP_400_BAD_REQUEST,
)
# Additional filters that need to be applied
filters = issue_filters(request.GET, "GET") filters = issue_filters(request.GET, "GET")
# Get the issues for the workspace with the additional filters applied
queryset = Issue.issue_objects.filter(workspace__slug=slug, **filters) queryset = Issue.issue_objects.filter(workspace__slug=slug, **filters)
# Get the total issue count
total_issues = queryset.count() total_issues = queryset.count()
# Build the graph payload
distribution = build_graph_plot( distribution = build_graph_plot(
queryset=queryset, x_axis=x_axis, y_axis=y_axis, segment=segment queryset=queryset, x_axis=x_axis, y_axis=y_axis, segment=segment
) )
colors = dict() state_details = {}
if x_axis in ["state__name", "state__group"] or segment in [ if x_axis in ["state_id"] or segment in ["state_id"]:
"state__name", state_details = (
"state__group", Issue.issue_objects.filter(
]: workspace__slug=slug,
if x_axis in ["state__name", "state__group"]: **filters,
key = "name" if x_axis == "state__name" else "group" )
else: .distinct("state_id")
key = "name" if segment == "state__name" else "group" .order_by("state_id")
.values("state_id", "state__name", "state__color")
colors = (
State.objects.filter(
~Q(name="Triage"),
workspace__slug=slug, project_id__in=filters.get("project__in")
).values(key, "color")
if filters.get("project__in", False)
else State.objects.filter(~Q(name="Triage"), workspace__slug=slug).values(key, "color")
) )
if x_axis in ["labels__name"] or segment in ["labels__name"]: label_details = {}
colors = ( if x_axis in ["labels__id"] or segment in ["labels__id"]:
Label.objects.filter( label_details = (
workspace__slug=slug, project_id__in=filters.get("project__in") Issue.objects.filter(
).values("name", "color") workspace__slug=slug, **filters, labels__id__isnull=False
if filters.get("project__in", False)
else Label.objects.filter(workspace__slug=slug).values(
"name", "color"
) )
.distinct("labels__id")
.order_by("labels__id")
.values("labels__id", "labels__color", "labels__name")
) )
assignee_details = {} assignee_details = {}
if x_axis in ["assignees__id"] or segment in ["assignees__id"]: if x_axis in ["assignees__id"] or segment in ["assignees__id"]:
assignee_details = ( assignee_details = (
Issue.issue_objects.filter(workspace__slug=slug, **filters, assignees__avatar__isnull=False) Issue.issue_objects.filter(
workspace__slug=slug, **filters, assignees__avatar__isnull=False
)
.order_by("assignees__id") .order_by("assignees__id")
.distinct("assignees__id") .distinct("assignees__id")
.values("assignees__avatar", "assignees__display_name", "assignees__first_name", "assignees__last_name", "assignees__id") .values(
"assignees__avatar",
"assignees__display_name",
"assignees__first_name",
"assignees__last_name",
"assignees__id",
)
) )
cycle_details = {}
if x_axis in ["issue_cycle__cycle_id"] or segment in [
"issue_cycle__cycle_id"
]:
cycle_details = (
Issue.issue_objects.filter(
workspace__slug=slug,
**filters,
issue_cycle__cycle_id__isnull=False,
)
.distinct("issue_cycle__cycle_id")
.order_by("issue_cycle__cycle_id")
.values(
"issue_cycle__cycle_id",
"issue_cycle__cycle__name",
)
)
module_details = {}
if x_axis in ["issue_module__module_id"] or segment in [
"issue_module__module_id"
]:
module_details = (
Issue.issue_objects.filter(
workspace__slug=slug,
**filters,
issue_module__module_id__isnull=False,
)
.distinct("issue_module__module_id")
.order_by("issue_module__module_id")
.values(
"issue_module__module_id",
"issue_module__module__name",
)
)
return Response( return Response(
{ {
"total": total_issues, "total": total_issues,
"distribution": distribution, "distribution": distribution,
"extras": {"colors": colors, "assignee_details": assignee_details}, "extras": {
"state_details": state_details,
"assignee_details": assignee_details,
"label_details": label_details,
"cycle_details": cycle_details,
"module_details": module_details,
},
}, },
status=status.HTTP_200_OK, status=status.HTTP_200_OK,
) )
@ -177,13 +256,53 @@ class ExportAnalyticsEndpoint(BaseAPIView):
try: try:
x_axis = request.data.get("x_axis", False) x_axis = request.data.get("x_axis", False)
y_axis = request.data.get("y_axis", False) y_axis = request.data.get("y_axis", False)
segment = request.data.get("segment", False)
if not x_axis or not y_axis:
valid_xaxis_segment = [
"state_id",
"state__group",
"labels__id",
"assignees__id",
"estimate_point",
"issue_cycle__cycle_id",
"issue_module__module_id",
"priority",
"start_date",
"target_date",
"created_at",
"completed_at",
]
valid_yaxis = [
"issue_count",
"estimate",
]
# Check for x-axis and y-axis as thery are required parameters
if (
not x_axis
or not y_axis
or not x_axis in valid_xaxis_segment
or not y_axis in valid_yaxis
):
return Response( return Response(
{"error": "x-axis and y-axis dimensions are required"}, {
"error": "x-axis and y-axis dimensions are required and the values should be valid"
},
status=status.HTTP_400_BAD_REQUEST, status=status.HTTP_400_BAD_REQUEST,
) )
# If segment is present it cannot be same as x-axis
if segment and (segment not in valid_xaxis_segment or x_axis == segment):
return Response(
{
"error": "Both segment and x axis cannot be same and segment should be valid"
},
status=status.HTTP_400_BAD_REQUEST,
)
analytic_export_task.delay( analytic_export_task.delay(
email=request.user.email, data=request.data, slug=slug email=request.user.email, data=request.data, slug=slug
) )
@ -210,68 +329,80 @@ class DefaultAnalyticsEndpoint(BaseAPIView):
def get(self, request, slug): def get(self, request, slug):
try: try:
filters = issue_filters(request.GET, "GET") filters = issue_filters(request.GET, "GET")
base_issues = Issue.issue_objects.filter(workspace__slug=slug, **filters)
queryset = Issue.issue_objects.filter(workspace__slug=slug, **filters) total_issues = base_issues.count()
total_issues = queryset.count() state_groups = base_issues.annotate(state_group=F("state__group"))
total_issues_classified = ( total_issues_classified = (
queryset.annotate(state_group=F("state__group")) state_groups.values("state_group")
.values("state_group")
.annotate(state_count=Count("state_group")) .annotate(state_count=Count("state_group"))
.order_by("state_group") .order_by("state_group")
) )
open_issues = queryset.filter( open_issues_groups = ["backlog", "unstarted", "started"]
state__group__in=["backlog", "unstarted", "started"] open_issues_queryset = state_groups.filter(
).count() state__group__in=open_issues_groups
)
open_issues = open_issues_queryset.count()
open_issues_classified = ( open_issues_classified = (
queryset.filter(state__group__in=["backlog", "unstarted", "started"]) open_issues_queryset.values("state_group")
.annotate(state_group=F("state__group"))
.values("state_group")
.annotate(state_count=Count("state_group")) .annotate(state_count=Count("state_group"))
.order_by("state_group") .order_by("state_group")
) )
issue_completed_month_wise = ( issue_completed_month_wise = (
queryset.filter(completed_at__isnull=False) base_issues.filter(completed_at__isnull=False)
.annotate(month=ExtractMonth("completed_at")) .annotate(month=ExtractMonth("completed_at"))
.values("month") .values("month")
.annotate(count=Count("*")) .annotate(count=Count("*"))
.order_by("month") .order_by("month")
) )
user_details = [
"created_by__first_name",
"created_by__last_name",
"created_by__avatar",
"created_by__display_name",
"created_by__id",
]
most_issue_created_user = ( most_issue_created_user = (
queryset.exclude(created_by=None) base_issues.exclude(created_by=None)
.values("created_by__first_name", "created_by__last_name", "created_by__avatar", "created_by__display_name", "created_by__id") .values(*user_details)
.annotate(count=Count("id")) .annotate(count=Count("id"))
.order_by("-count") .order_by("-count")[:5]
)[:5] )
user_assignee_details = [
"assignees__first_name",
"assignees__last_name",
"assignees__avatar",
"assignees__display_name",
"assignees__id",
]
most_issue_closed_user = ( most_issue_closed_user = (
queryset.filter(completed_at__isnull=False, assignees__isnull=False) base_issues.filter(completed_at__isnull=False)
.values("assignees__first_name", "assignees__last_name", "assignees__avatar", "assignees__display_name", "assignees__id") .exclude(assignees=None)
.values(*user_assignee_details)
.annotate(count=Count("id")) .annotate(count=Count("id"))
.order_by("-count") .order_by("-count")[:5]
)[:5] )
pending_issue_user = ( pending_issue_user = (
queryset.filter(completed_at__isnull=True) base_issues.filter(completed_at__isnull=True)
.values("assignees__first_name", "assignees__last_name", "assignees__avatar", "assignees__display_name", "assignees__id") .values(*user_assignee_details)
.annotate(count=Count("id")) .annotate(count=Count("id"))
.order_by("-count") .order_by("-count")
) )
open_estimate_sum = ( open_estimate_sum = open_issues_queryset.aggregate(
queryset.filter( sum=Sum("estimate_point")
state__group__in=["backlog", "unstarted", "started"] )["sum"]
).aggregate(open_estimate_sum=Sum("estimate_point")) total_estimate_sum = base_issues.aggregate(sum=Sum("estimate_point"))["sum"]
)["open_estimate_sum"]
print(open_estimate_sum)
total_estimate_sum = queryset.aggregate(
total_estimate_sum=Sum("estimate_point")
)["total_estimate_sum"]
return Response( return Response(
{ {
@ -292,6 +423,6 @@ class DefaultAnalyticsEndpoint(BaseAPIView):
except Exception as e: except Exception as e:
capture_exception(e) capture_exception(e)
return Response( return Response(
{"error": "Something went wrong please try again later"}, {"error": "Something went wrong. Please try again later."},
status=status.HTTP_400_BAD_REQUEST, status=status.HTTP_400_BAD_REQUEST,
) )

View File

@ -9,7 +9,6 @@ from django.utils.encoding import (
DjangoUnicodeDecodeError, DjangoUnicodeDecodeError,
) )
from django.utils.http import urlsafe_base64_decode, urlsafe_base64_encode from django.utils.http import urlsafe_base64_decode, urlsafe_base64_encode
from django.contrib.sites.shortcuts import get_current_site
from django.conf import settings from django.conf import settings
## Third Party Imports ## Third Party Imports

View File

@ -87,14 +87,11 @@ class SignUpEndpoint(BaseAPIView):
user.token_updated_at = timezone.now() user.token_updated_at = timezone.now()
user.save() user.save()
serialized_user = UserSerializer(user).data
access_token, refresh_token = get_tokens_for_user(user) access_token, refresh_token = get_tokens_for_user(user)
data = { data = {
"access_token": access_token, "access_token": access_token,
"refresh_token": refresh_token, "refresh_token": refresh_token,
"user": serialized_user,
} }
# Send Analytics # Send Analytics
@ -180,8 +177,6 @@ class SignInEndpoint(BaseAPIView):
status=status.HTTP_403_FORBIDDEN, status=status.HTTP_403_FORBIDDEN,
) )
serialized_user = UserSerializer(user).data
# settings last active for the user # settings last active for the user
user.last_active = timezone.now() user.last_active = timezone.now()
user.last_login_time = timezone.now() user.last_login_time = timezone.now()
@ -215,7 +210,6 @@ class SignInEndpoint(BaseAPIView):
data = { data = {
"access_token": access_token, "access_token": access_token,
"refresh_token": refresh_token, "refresh_token": refresh_token,
"user": serialized_user,
} }
return Response(data, status=status.HTTP_200_OK) return Response(data, status=status.HTTP_200_OK)
@ -427,13 +421,11 @@ class MagicSignInEndpoint(BaseAPIView):
user.last_login_uagent = request.META.get("HTTP_USER_AGENT") user.last_login_uagent = request.META.get("HTTP_USER_AGENT")
user.token_updated_at = timezone.now() user.token_updated_at = timezone.now()
user.save() user.save()
serialized_user = UserSerializer(user).data
access_token, refresh_token = get_tokens_for_user(user) access_token, refresh_token = get_tokens_for_user(user)
data = { data = {
"access_token": access_token, "access_token": access_token,
"refresh_token": refresh_token, "refresh_token": refresh_token,
"user": serialized_user,
} }
return Response(data, status=status.HTTP_200_OK) return Response(data, status=status.HTTP_200_OK)

View File

@ -62,28 +62,6 @@ class CycleViewSet(BaseViewSet):
project_id=self.kwargs.get("project_id"), owned_by=self.request.user project_id=self.kwargs.get("project_id"), owned_by=self.request.user
) )
def perform_destroy(self, instance):
cycle_issues = list(
CycleIssue.objects.filter(cycle_id=self.kwargs.get("pk")).values_list(
"issue", flat=True
)
)
issue_activity.delay(
type="cycle.activity.deleted",
requested_data=json.dumps(
{
"cycle_id": str(self.kwargs.get("pk")),
"issues": [str(issue_id) for issue_id in cycle_issues],
}
),
actor_id=str(self.request.user.id),
issue_id=str(self.kwargs.get("pk", None)),
project_id=str(self.kwargs.get("project_id", None)),
current_instance=None,
epoch=int(timezone.now().timestamp())
)
return super().perform_destroy(instance)
def get_queryset(self): def get_queryset(self):
subquery = CycleFavorite.objects.filter( subquery = CycleFavorite.objects.filter(
@ -206,12 +184,6 @@ class CycleViewSet(BaseViewSet):
queryset = queryset.order_by(order_by) queryset = queryset.order_by(order_by)
# All Cycles
if cycle_view == "all":
return Response(
CycleSerializer(queryset, many=True).data, status=status.HTTP_200_OK
)
# Current Cycle # Current Cycle
if cycle_view == "current": if cycle_view == "current":
queryset = queryset.filter( queryset = queryset.filter(
@ -348,8 +320,9 @@ class CycleViewSet(BaseViewSet):
CycleSerializer(queryset, many=True).data, status=status.HTTP_200_OK CycleSerializer(queryset, many=True).data, status=status.HTTP_200_OK
) )
# If no matching view is found return all cycles
return Response( return Response(
{"error": "No matching view found"}, status=status.HTTP_400_BAD_REQUEST CycleSerializer(queryset, many=True).data, status=status.HTTP_200_OK
) )
except Exception as e: except Exception as e:
@ -543,6 +516,40 @@ class CycleViewSet(BaseViewSet):
status=status.HTTP_400_BAD_REQUEST, status=status.HTTP_400_BAD_REQUEST,
) )
def destroy(self, request, slug, project_id, pk):
try:
cycle_issues = list(
CycleIssue.objects.filter(cycle_id=self.kwargs.get("pk")).values_list(
"issue", flat=True
)
)
cycle = Cycle.objects.get(
workspace__slug=slug, project_id=project_id, pk=pk
)
# Delete the cycle
cycle.delete()
issue_activity.delay(
type="cycle.activity.deleted",
requested_data=json.dumps(
{
"cycle_id": str(pk),
"issues": [str(issue_id) for issue_id in cycle_issues],
}
),
actor_id=str(request.user.id),
issue_id=str(pk),
project_id=str(project_id),
current_instance=None,
epoch=int(timezone.now().timestamp()),
)
return Response(status=status.HTTP_204_NO_CONTENT)
except Exception as e:
capture_exception(e)
return Response(
{"error": "Something went wrong please try again later"},
status=status.HTTP_400_BAD_REQUEST,
)
class CycleIssueViewSet(BaseViewSet): class CycleIssueViewSet(BaseViewSet):
serializer_class = CycleIssueSerializer serializer_class = CycleIssueSerializer
@ -563,23 +570,6 @@ class CycleIssueViewSet(BaseViewSet):
cycle_id=self.kwargs.get("cycle_id"), cycle_id=self.kwargs.get("cycle_id"),
) )
def perform_destroy(self, instance):
issue_activity.delay(
type="cycle.activity.deleted",
requested_data=json.dumps(
{
"cycle_id": str(self.kwargs.get("cycle_id")),
"issues": [str(instance.issue_id)],
}
),
actor_id=str(self.request.user.id),
issue_id=str(self.kwargs.get("pk", None)),
project_id=str(self.kwargs.get("project_id", None)),
current_instance=None,
epoch=int(timezone.now().timestamp())
)
return super().perform_destroy(instance)
def get_queryset(self): def get_queryset(self):
return self.filter_queryset( return self.filter_queryset(
super() super()
@ -752,7 +742,7 @@ class CycleIssueViewSet(BaseViewSet):
), ),
} }
), ),
epoch=int(timezone.now().timestamp()) epoch=int(timezone.now().timestamp()),
) )
# Return all Cycle Issues # Return all Cycle Issues
@ -772,6 +762,30 @@ class CycleIssueViewSet(BaseViewSet):
status=status.HTTP_400_BAD_REQUEST, status=status.HTTP_400_BAD_REQUEST,
) )
def destroy(self, request, slug, project_id, cycle_id, pk):
try:
cycle_issue = CycleIssue.objects.get(pk=pk, workspace__slug=slug, project_id=project_id, cycle_id=cycle_id)
issue_id = cycle_issue.issue_id
cycle_issue.delete()
issue_activity.delay(
type="cycle.activity.deleted",
requested_data=json.dumps(
{
"cycle_id": str(self.kwargs.get("cycle_id")),
"issues": [str(issue_id)],
}
),
actor_id=str(self.request.user.id),
issue_id=str(self.kwargs.get("pk", None)),
project_id=str(self.kwargs.get("project_id", None)),
current_instance=None,
epoch=int(timezone.now().timestamp()),
)
return Response(status=status.HTTP_204_NO_CONTENT)
except Exception as e:
capture_exception(e)
return Response({"error": "Something went wrong please try again later"}, status=status.HTTP_400_BAD_REQUEST)
class CycleDateCheckEndpoint(BaseAPIView): class CycleDateCheckEndpoint(BaseAPIView):
permission_classes = [ permission_classes = [
@ -802,7 +816,7 @@ class CycleDateCheckEndpoint(BaseAPIView):
if cycles.exists(): if cycles.exists():
return Response( return Response(
{ {
"error": "You have a cycle already on the given dates, if you want to create your draft cycle you can do that by removing dates", "error": "You have a cycle already on the given dates, if you want to create a draft cycle you can do that by removing dates",
"status": False, "status": False,
} }
) )

View File

@ -108,49 +108,6 @@ class IssueViewSet(BaseViewSet):
"workspace__id", "workspace__id",
] ]
def perform_create(self, serializer):
serializer.save(project_id=self.kwargs.get("project_id"))
def perform_update(self, serializer):
requested_data = json.dumps(self.request.data, cls=DjangoJSONEncoder)
current_instance = (
self.get_queryset().filter(pk=self.kwargs.get("pk", None)).first()
)
if current_instance is not None:
issue_activity.delay(
type="issue.activity.updated",
requested_data=requested_data,
actor_id=str(self.request.user.id),
issue_id=str(self.kwargs.get("pk", None)),
project_id=str(self.kwargs.get("project_id", None)),
current_instance=json.dumps(
IssueSerializer(current_instance).data, cls=DjangoJSONEncoder
),
epoch=int(timezone.now().timestamp())
)
return super().perform_update(serializer)
def perform_destroy(self, instance):
current_instance = (
self.get_queryset().filter(pk=self.kwargs.get("pk", None)).first()
)
if current_instance is not None:
issue_activity.delay(
type="issue.activity.deleted",
requested_data=json.dumps(
{"issue_id": str(self.kwargs.get("pk", None))}
),
actor_id=str(self.request.user.id),
issue_id=str(self.kwargs.get("pk", None)),
project_id=str(self.kwargs.get("project_id", None)),
current_instance=json.dumps(
IssueSerializer(current_instance).data, cls=DjangoJSONEncoder
),
epoch=int(timezone.now().timestamp())
)
return super().perform_destroy(instance)
def get_queryset(self): def get_queryset(self):
return ( return (
Issue.issue_objects.annotate( Issue.issue_objects.annotate(
@ -278,7 +235,8 @@ class IssueViewSet(BaseViewSet):
if group_by: if group_by:
return Response( return Response(
group_results(issues, group_by, sub_group_by), status=status.HTTP_200_OK group_results(issues, group_by, sub_group_by),
status=status.HTTP_200_OK,
) )
return Response(issues, status=status.HTTP_200_OK) return Response(issues, status=status.HTTP_200_OK)
@ -314,7 +272,7 @@ class IssueViewSet(BaseViewSet):
issue_id=str(serializer.data.get("id", None)), issue_id=str(serializer.data.get("id", None)),
project_id=str(project_id), project_id=str(project_id),
current_instance=None, current_instance=None,
epoch=int(timezone.now().timestamp()) epoch=int(timezone.now().timestamp()),
) )
return Response(serializer.data, status=status.HTTP_201_CREATED) return Response(serializer.data, status=status.HTTP_201_CREATED)
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
@ -331,15 +289,69 @@ class IssueViewSet(BaseViewSet):
.order_by() .order_by()
.annotate(count=Func(F("id"), function="Count")) .annotate(count=Func(F("id"), function="Count"))
.values("count") .values("count")
).get( ).get(workspace__slug=slug, project_id=project_id, pk=pk)
workspace__slug=slug, project_id=project_id, pk=pk
)
return Response(IssueSerializer(issue).data, status=status.HTTP_200_OK) return Response(IssueSerializer(issue).data, status=status.HTTP_200_OK)
except Issue.DoesNotExist: except Issue.DoesNotExist:
return Response( return Response(
{"error": "Issue Does not exist"}, status=status.HTTP_404_NOT_FOUND {"error": "Issue Does not exist"}, status=status.HTTP_404_NOT_FOUND
) )
def partial_update(self, request, slug, project_id, pk=None):
try:
issue = Issue.objects.get(
workspace__slug=slug, project_id=project_id, pk=pk
)
current_instance = json.dumps(
IssueSerializer(issue).data, cls=DjangoJSONEncoder
)
requested_data = json.dumps(self.request.data, cls=DjangoJSONEncoder)
serializer = IssueCreateSerializer(issue, data=request.data, partial=True)
if serializer.is_valid():
serializer.save()
issue_activity.delay(
type="issue.activity.updated",
requested_data=requested_data,
actor_id=str(request.user.id),
issue_id=str(pk),
project_id=str(project_id),
current_instance=current_instance,
epoch=int(timezone.now().timestamp()),
)
return Response(serializer.data, status=status.HTTP_200_OK)
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
except Exception as e:
capture_exception(e)
return Response(
{"error": "Something went wrong please try again later"},
status=status.HTTP_400_BAD_REQUEST,
)
def destroy(self, request, slug, project_id, pk=None):
try:
issue = Issue.objects.get(
workspace__slug=slug, project_id=project_id, pk=pk
)
current_instance = json.dumps(
IssueSerializer(issue).data, cls=DjangoJSONEncoder
)
issue.delete()
issue_activity.delay(
type="issue.activity.deleted",
requested_data=json.dumps({"issue_id": str(pk)}),
actor_id=str(request.user.id),
issue_id=str(pk),
project_id=str(project_id),
current_instance=current_instance,
epoch=int(timezone.now().timestamp()),
)
return Response(status=status.HTTP_204_NO_CONTENT)
except Exception as e:
capture_exception(e)
return Response(
{"error": "Something went wrong please try again later"},
status=status.HTTP_400_BAD_REQUEST,
)
class UserWorkSpaceIssues(BaseAPIView): class UserWorkSpaceIssues(BaseAPIView):
@method_decorator(gzip_page) @method_decorator(gzip_page)
@ -468,7 +480,8 @@ class UserWorkSpaceIssues(BaseAPIView):
if group_by: if group_by:
return Response( return Response(
group_results(issues, group_by, sub_group_by), status=status.HTTP_200_OK group_results(issues, group_by, sub_group_by),
status=status.HTTP_200_OK,
) )
return Response(issues, status=status.HTTP_200_OK) return Response(issues, status=status.HTTP_200_OK)
@ -560,64 +573,6 @@ class IssueCommentViewSet(BaseViewSet):
"workspace__id", "workspace__id",
] ]
def perform_create(self, serializer):
serializer.save(
project_id=self.kwargs.get("project_id"),
issue_id=self.kwargs.get("issue_id"),
actor=self.request.user if self.request.user is not None else None,
)
issue_activity.delay(
type="comment.activity.created",
requested_data=json.dumps(serializer.data, cls=DjangoJSONEncoder),
actor_id=str(self.request.user.id),
issue_id=str(self.kwargs.get("issue_id")),
project_id=str(self.kwargs.get("project_id")),
current_instance=None,
epoch=int(timezone.now().timestamp())
)
def perform_update(self, serializer):
requested_data = json.dumps(self.request.data, cls=DjangoJSONEncoder)
current_instance = (
self.get_queryset().filter(pk=self.kwargs.get("pk", None)).first()
)
if current_instance is not None:
issue_activity.delay(
type="comment.activity.updated",
requested_data=requested_data,
actor_id=str(self.request.user.id),
issue_id=str(self.kwargs.get("issue_id", None)),
project_id=str(self.kwargs.get("project_id", None)),
current_instance=json.dumps(
IssueCommentSerializer(current_instance).data,
cls=DjangoJSONEncoder,
),
epoch=int(timezone.now().timestamp())
)
return super().perform_update(serializer)
def perform_destroy(self, instance):
current_instance = (
self.get_queryset().filter(pk=self.kwargs.get("pk", None)).first()
)
if current_instance is not None:
issue_activity.delay(
type="comment.activity.deleted",
requested_data=json.dumps(
{"comment_id": str(self.kwargs.get("pk", None))}
),
actor_id=str(self.request.user.id),
issue_id=str(self.kwargs.get("issue_id", None)),
project_id=str(self.kwargs.get("project_id", None)),
current_instance=json.dumps(
IssueCommentSerializer(current_instance).data,
cls=DjangoJSONEncoder,
),
epoch=int(timezone.now().timestamp())
)
return super().perform_destroy(instance)
def get_queryset(self): def get_queryset(self):
return self.filter_queryset( return self.filter_queryset(
super() super()
@ -641,6 +596,93 @@ class IssueCommentViewSet(BaseViewSet):
.distinct() .distinct()
) )
def create(self, request, slug, project_id, issue_id):
try:
serializer = IssueCommentSerializer(data=request.data)
if serializer.is_valid():
serializer.save(
project_id=project_id,
issue_id=issue_id,
actor=request.user,
)
issue_activity.delay(
type="comment.activity.created",
requested_data=json.dumps(serializer.data, cls=DjangoJSONEncoder),
actor_id=str(self.request.user.id),
issue_id=str(self.kwargs.get("issue_id")),
project_id=str(self.kwargs.get("project_id")),
current_instance=None,
epoch=int(timezone.now().timestamp()),
)
return Response(serializer.data, status=status.HTTP_201_CREATED)
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
except Exception as e:
capture_exception(e)
return Response(
{"error": "Something went wrong please try again later"},
status=status.HTTP_400_BAD_REQUEST,
)
def partial_update(self, request, slug, project_id, issue_id, pk):
try:
issue_comment = IssueComment.objects.get(
workspace__slug=slug, project_id=project_id, issue_id=issue_id, pk=pk
)
requested_data = json.dumps(self.request.data, cls=DjangoJSONEncoder)
current_instance = json.dumps(
IssueCommentSerializer(issue_comment).data,
cls=DjangoJSONEncoder,
)
serializer = IssueCommentSerializer(
issue_comment, data=request.data, partial=True
)
if serializer.is_valid():
serializer.save()
issue_activity.delay(
type="comment.activity.updated",
requested_data=requested_data,
actor_id=str(request.user.id),
issue_id=str(issue_id),
project_id=str(project_id),
current_instance=current_instance,
epoch=int(timezone.now().timestamp()),
)
return Response(serializer.data, status=status.HTTP_200_OK)
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
except Exception as e:
capture_exception(e)
return Response(
{"error": "Something went wrong please try again later"},
status=status.HTTP_400_BAD_REQUEST,
)
def destroy(self, request, slug, project_id, issue_id, pk):
try:
issue_comment = IssueComment.objects.get(
workspace__slug=slug, project_id=project_id, issue_id=issue_id, pk=pk
)
current_instance = json.dumps(
IssueCommentSerializer(issue_comment).data,
cls=DjangoJSONEncoder,
)
issue_comment.delete()
issue_activity.delay(
type="comment.activity.deleted",
requested_data=json.dumps({"comment_id": str(pk)}),
actor_id=str(request.user.id),
issue_id=str(issue_id),
project_id=str(project_id),
current_instance=current_instance,
epoch=int(timezone.now().timestamp()),
)
return Response(status=status.HTTP_204_NO_CONTENT)
except Exception as e:
capture_exception(e)
return Response(
{"error": "Something went wrong please try again later"},
status=status.HTTP_400_BAD_REQUEST,
)
class IssuePropertyViewSet(BaseViewSet): class IssuePropertyViewSet(BaseViewSet):
serializer_class = IssuePropertySerializer serializer_class = IssuePropertySerializer
@ -718,10 +760,16 @@ class LabelViewSet(BaseViewSet):
return Response(serializer.data, status=status.HTTP_201_CREATED) return Response(serializer.data, status=status.HTTP_201_CREATED)
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
except IntegrityError: except IntegrityError:
return Response({"error": "Label with the same name already exists in the project"}, status=status.HTTP_400_BAD_REQUEST) return Response(
{"error": "Label with the same name already exists in the project"},
status=status.HTTP_400_BAD_REQUEST,
)
except Exception as e: except Exception as e:
capture_exception(e) capture_exception(e)
return Response({"error": "Something went wrong please try again later"}, status=status.HTTP_400_BAD_REQUEST) return Response(
{"error": "Something went wrong please try again later"},
status=status.HTTP_400_BAD_REQUEST,
)
def get_queryset(self): def get_queryset(self):
return self.filter_queryset( return self.filter_queryset(
@ -894,63 +942,6 @@ class IssueLinkViewSet(BaseViewSet):
model = IssueLink model = IssueLink
serializer_class = IssueLinkSerializer serializer_class = IssueLinkSerializer
def perform_create(self, serializer):
serializer.save(
project_id=self.kwargs.get("project_id"),
issue_id=self.kwargs.get("issue_id"),
)
issue_activity.delay(
type="link.activity.created",
requested_data=json.dumps(serializer.data, cls=DjangoJSONEncoder),
actor_id=str(self.request.user.id),
issue_id=str(self.kwargs.get("issue_id")),
project_id=str(self.kwargs.get("project_id")),
current_instance=None,
epoch=int(timezone.now().timestamp())
)
def perform_update(self, serializer):
requested_data = json.dumps(self.request.data, cls=DjangoJSONEncoder)
current_instance = (
self.get_queryset().filter(pk=self.kwargs.get("pk", None)).first()
)
if current_instance is not None:
issue_activity.delay(
type="link.activity.updated",
requested_data=requested_data,
actor_id=str(self.request.user.id),
issue_id=str(self.kwargs.get("issue_id", None)),
project_id=str(self.kwargs.get("project_id", None)),
current_instance=json.dumps(
IssueLinkSerializer(current_instance).data,
cls=DjangoJSONEncoder,
),
epoch=int(timezone.now().timestamp())
)
return super().perform_update(serializer)
def perform_destroy(self, instance):
current_instance = (
self.get_queryset().filter(pk=self.kwargs.get("pk", None)).first()
)
if current_instance is not None:
issue_activity.delay(
type="link.activity.deleted",
requested_data=json.dumps(
{"link_id": str(self.kwargs.get("pk", None))}
),
actor_id=str(self.request.user.id),
issue_id=str(self.kwargs.get("issue_id", None)),
project_id=str(self.kwargs.get("project_id", None)),
current_instance=json.dumps(
IssueLinkSerializer(current_instance).data,
cls=DjangoJSONEncoder,
),
epoch=int(timezone.now().timestamp())
)
return super().perform_destroy(instance)
def get_queryset(self): def get_queryset(self):
return ( return (
super() super()
@ -963,6 +954,92 @@ class IssueLinkViewSet(BaseViewSet):
.distinct() .distinct()
) )
def create(self, request, slug, project_id, issue_id):
try:
serializer = IssueLinkSerializer(data=request.data)
if serializer.is_valid():
serializer.save(
project_id=project_id,
issue_id=issue_id,
)
issue_activity.delay(
type="link.activity.created",
requested_data=json.dumps(serializer.data, cls=DjangoJSONEncoder),
actor_id=str(self.request.user.id),
issue_id=str(self.kwargs.get("issue_id")),
project_id=str(self.kwargs.get("project_id")),
current_instance=None,
epoch=int(timezone.now().timestamp()),
)
return Response(serializer.data, status=status.HTTP_201_CREATED)
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
except Exception as e:
capture_exception(e)
return Response(
{"error": "Something went wrong please try again later"},
status=status.HTTP_400_BAD_REQUEST,
)
def partial_update(self, request, slug, project_id, issue_id, pk):
try:
issue_link = IssueLink.objects.get(
workspace__slug=slug, project_id=project_id, issue_id=issue_id, pk=pk
)
requested_data = json.dumps(request.data, cls=DjangoJSONEncoder)
current_instance = json.dumps(
IssueLinkSerializer(issue_link).data,
cls=DjangoJSONEncoder,
)
serializer = IssueLinkSerializer(
issue_link, data=request.data, partial=True
)
if serializer.is_valid():
serializer.save()
issue_activity.delay(
type="link.activity.updated",
requested_data=requested_data,
actor_id=str(request.user.id),
issue_id=str(issue_id),
project_id=str(project_id),
current_instance=current_instance,
epoch=int(timezone.now().timestamp()),
)
return Response(serializer.data, status=status.HTTP_200_OK)
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
except Exception as e:
capture_exception(e)
return Response(
{"error": "Something went wrong please try again later"},
status=status.HTTP_400_BAD_REQUEST,
)
def destroy(self, request, slug, project_id, issue_id, pk):
try:
issue_link = IssueLink.objects.get(
workspace__slug=slug, project_id=project_id, issue_id=issue_id, pk=pk
)
current_instance = json.dumps(
IssueLinkSerializer(issue_link).data,
cls=DjangoJSONEncoder,
)
issue_activity.delay(
type="link.activity.deleted",
requested_data=json.dumps({"link_id": str(pk)}),
actor_id=str(request.user.id),
issue_id=str(issue_id),
project_id=str(project_id),
current_instance=current_instance,
epoch=int(timezone.now().timestamp()),
)
issue_link.delete()
return Response(status=status.HTTP_204_NO_CONTENT)
except Exception as e:
capture_exception(e)
return Response(
{"error": "Something went wrong please try again later"},
status=status.HTTP_400_BAD_REQUEST,
)
class BulkCreateIssueLabelsEndpoint(BaseAPIView): class BulkCreateIssueLabelsEndpoint(BaseAPIView):
def post(self, request, slug, project_id): def post(self, request, slug, project_id):
@ -1026,7 +1103,7 @@ class IssueAttachmentEndpoint(BaseAPIView):
serializer.data, serializer.data,
cls=DjangoJSONEncoder, cls=DjangoJSONEncoder,
), ),
epoch=int(timezone.now().timestamp()) epoch=int(timezone.now().timestamp()),
) )
return Response(serializer.data, status=status.HTTP_201_CREATED) return Response(serializer.data, status=status.HTTP_201_CREATED)
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
@ -1049,7 +1126,7 @@ class IssueAttachmentEndpoint(BaseAPIView):
issue_id=str(self.kwargs.get("issue_id", None)), issue_id=str(self.kwargs.get("issue_id", None)),
project_id=str(self.kwargs.get("project_id", None)), project_id=str(self.kwargs.get("project_id", None)),
current_instance=None, current_instance=None,
epoch=int(timezone.now().timestamp()) epoch=int(timezone.now().timestamp()),
) )
return Response(status=status.HTTP_204_NO_CONTENT) return Response(status=status.HTTP_204_NO_CONTENT)
@ -1252,7 +1329,7 @@ class IssueArchiveViewSet(BaseViewSet):
issue_id=str(issue.id), issue_id=str(issue.id),
project_id=str(project_id), project_id=str(project_id),
current_instance=None, current_instance=None,
epoch=int(timezone.now().timestamp()) epoch=int(timezone.now().timestamp()),
) )
return Response(IssueSerializer(issue).data, status=status.HTTP_200_OK) return Response(IssueSerializer(issue).data, status=status.HTTP_200_OK)
@ -1444,20 +1521,31 @@ class IssueReactionViewSet(BaseViewSet):
.distinct() .distinct()
) )
def perform_create(self, serializer): def create(self, request, slug, project_id, issue_id):
try:
serializer = IssueReactionSerializer(data=request.data)
if serializer.is_valid():
serializer.save( serializer.save(
issue_id=self.kwargs.get("issue_id"), issue_id=issue_id,
project_id=self.kwargs.get("project_id"), project_id=project_id,
actor=self.request.user, actor=request.user,
) )
issue_activity.delay( issue_activity.delay(
type="issue_reaction.activity.created", type="issue_reaction.activity.created",
requested_data=json.dumps(self.request.data, cls=DjangoJSONEncoder), requested_data=json.dumps(request.data, cls=DjangoJSONEncoder),
actor_id=str(self.request.user.id), actor_id=str(request.user.id),
issue_id=str(self.kwargs.get("issue_id", None)), issue_id=str(issue_id),
project_id=str(self.kwargs.get("project_id", None)), project_id=str(project_id),
current_instance=None, current_instance=None,
epoch=int(timezone.now().timestamp()) epoch=int(timezone.now().timestamp()),
)
return Response(serializer.data, status=status.HTTP_201_CREATED)
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
except Exception as e:
capture_exception(e)
return Response(
{"error": "Something went wrong please try again later"},
status=status.HTTP_400_BAD_REQUEST,
) )
def destroy(self, request, slug, project_id, issue_id, reaction_code): def destroy(self, request, slug, project_id, issue_id, reaction_code):
@ -1481,7 +1569,7 @@ class IssueReactionViewSet(BaseViewSet):
"identifier": str(issue_reaction.id), "identifier": str(issue_reaction.id),
} }
), ),
epoch=int(timezone.now().timestamp()) epoch=int(timezone.now().timestamp()),
) )
issue_reaction.delete() issue_reaction.delete()
return Response(status=status.HTTP_204_NO_CONTENT) return Response(status=status.HTTP_204_NO_CONTENT)
@ -1517,20 +1605,31 @@ class CommentReactionViewSet(BaseViewSet):
.distinct() .distinct()
) )
def perform_create(self, serializer): def create(self, request, slug, project_id, comment_id):
try:
serializer = CommentReactionSerializer(data=request.data)
if serializer.is_valid():
serializer.save( serializer.save(
actor=self.request.user, project_id=project_id,
comment_id=self.kwargs.get("comment_id"), actor_id=request.user.id,
project_id=self.kwargs.get("project_id"), comment_id=comment_id,
) )
issue_activity.delay( issue_activity.delay(
type="comment_reaction.activity.created", type="comment_reaction.activity.created",
requested_data=json.dumps(self.request.data, cls=DjangoJSONEncoder), requested_data=json.dumps(request.data, cls=DjangoJSONEncoder),
actor_id=str(self.request.user.id), actor_id=str(request.user.id),
issue_id=None, issue_id=None,
project_id=str(self.kwargs.get("project_id", None)), project_id=str(project_id),
current_instance=None, current_instance=None,
epoch=int(timezone.now().timestamp()) epoch=int(timezone.now().timestamp()),
)
return Response(serializer.data, status=status.HTTP_201_CREATED)
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
except Exception as e:
capture_exception(e)
return Response(
{"error": "Something went wrong please try again later"},
status=status.HTTP_400_BAD_REQUEST,
) )
def destroy(self, request, slug, project_id, comment_id, reaction_code): def destroy(self, request, slug, project_id, comment_id, reaction_code):
@ -1555,7 +1654,7 @@ class CommentReactionViewSet(BaseViewSet):
"comment_id": str(comment_id), "comment_id": str(comment_id),
} }
), ),
epoch=int(timezone.now().timestamp()) epoch=int(timezone.now().timestamp()),
) )
comment_reaction.delete() comment_reaction.delete()
return Response(status=status.HTTP_204_NO_CONTENT) return Response(status=status.HTTP_204_NO_CONTENT)
@ -1652,7 +1751,7 @@ class IssueCommentPublicViewSet(BaseViewSet):
issue_id=str(issue_id), issue_id=str(issue_id),
project_id=str(project_id), project_id=str(project_id),
current_instance=None, current_instance=None,
epoch=int(timezone.now().timestamp()) epoch=int(timezone.now().timestamp()),
) )
if not ProjectMember.objects.filter( if not ProjectMember.objects.filter(
project_id=project_id, project_id=project_id,
@ -1702,7 +1801,7 @@ class IssueCommentPublicViewSet(BaseViewSet):
IssueCommentSerializer(comment).data, IssueCommentSerializer(comment).data,
cls=DjangoJSONEncoder, cls=DjangoJSONEncoder,
), ),
epoch=int(timezone.now().timestamp()) epoch=int(timezone.now().timestamp()),
) )
return Response(serializer.data, status=status.HTTP_200_OK) return Response(serializer.data, status=status.HTTP_200_OK)
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
@ -1736,7 +1835,7 @@ class IssueCommentPublicViewSet(BaseViewSet):
IssueCommentSerializer(comment).data, IssueCommentSerializer(comment).data,
cls=DjangoJSONEncoder, cls=DjangoJSONEncoder,
), ),
epoch=int(timezone.now().timestamp()) epoch=int(timezone.now().timestamp()),
) )
comment.delete() comment.delete()
return Response(status=status.HTTP_204_NO_CONTENT) return Response(status=status.HTTP_204_NO_CONTENT)
@ -1811,7 +1910,7 @@ class IssueReactionPublicViewSet(BaseViewSet):
issue_id=str(self.kwargs.get("issue_id", None)), issue_id=str(self.kwargs.get("issue_id", None)),
project_id=str(self.kwargs.get("project_id", None)), project_id=str(self.kwargs.get("project_id", None)),
current_instance=None, current_instance=None,
epoch=int(timezone.now().timestamp()) epoch=int(timezone.now().timestamp()),
) )
return Response(serializer.data, status=status.HTTP_201_CREATED) return Response(serializer.data, status=status.HTTP_201_CREATED)
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
@ -1856,7 +1955,7 @@ class IssueReactionPublicViewSet(BaseViewSet):
"identifier": str(issue_reaction.id), "identifier": str(issue_reaction.id),
} }
), ),
epoch=int(timezone.now().timestamp()) epoch=int(timezone.now().timestamp()),
) )
issue_reaction.delete() issue_reaction.delete()
return Response(status=status.HTTP_204_NO_CONTENT) return Response(status=status.HTTP_204_NO_CONTENT)
@ -1930,7 +2029,7 @@ class CommentReactionPublicViewSet(BaseViewSet):
issue_id=None, issue_id=None,
project_id=str(self.kwargs.get("project_id", None)), project_id=str(self.kwargs.get("project_id", None)),
current_instance=None, current_instance=None,
epoch=int(timezone.now().timestamp()) epoch=int(timezone.now().timestamp()),
) )
return Response(serializer.data, status=status.HTTP_201_CREATED) return Response(serializer.data, status=status.HTTP_201_CREATED)
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
@ -1982,7 +2081,7 @@ class CommentReactionPublicViewSet(BaseViewSet):
"comment_id": str(comment_id), "comment_id": str(comment_id),
} }
), ),
epoch=int(timezone.now().timestamp()) epoch=int(timezone.now().timestamp()),
) )
comment_reaction.delete() comment_reaction.delete()
return Response(status=status.HTTP_204_NO_CONTENT) return Response(status=status.HTTP_204_NO_CONTENT)
@ -2046,7 +2145,7 @@ class IssueVotePublicViewSet(BaseViewSet):
issue_id=str(self.kwargs.get("issue_id", None)), issue_id=str(self.kwargs.get("issue_id", None)),
project_id=str(self.kwargs.get("project_id", None)), project_id=str(self.kwargs.get("project_id", None)),
current_instance=None, current_instance=None,
epoch=int(timezone.now().timestamp()) epoch=int(timezone.now().timestamp()),
) )
serializer = IssueVoteSerializer(issue_vote) serializer = IssueVoteSerializer(issue_vote)
return Response(serializer.data, status=status.HTTP_201_CREATED) return Response(serializer.data, status=status.HTTP_201_CREATED)
@ -2081,7 +2180,7 @@ class IssueVotePublicViewSet(BaseViewSet):
"identifier": str(issue_vote.id), "identifier": str(issue_vote.id),
} }
), ),
epoch=int(timezone.now().timestamp()) epoch=int(timezone.now().timestamp()),
) )
issue_vote.delete() issue_vote.delete()
return Response(status=status.HTTP_204_NO_CONTENT) return Response(status=status.HTTP_204_NO_CONTENT)
@ -2100,24 +2199,19 @@ class IssueRelationViewSet(BaseViewSet):
ProjectEntityPermission, ProjectEntityPermission,
] ]
def perform_destroy(self, instance): def get_queryset(self):
current_instance = ( return self.filter_queryset(
self.get_queryset().filter(pk=self.kwargs.get("pk", None)).first() super()
.get_queryset()
.filter(workspace__slug=self.kwargs.get("slug"))
.filter(project_id=self.kwargs.get("project_id"))
.filter(issue_id=self.kwargs.get("issue_id"))
.filter(project__project_projectmember__member=self.request.user)
.select_related("project")
.select_related("workspace")
.select_related("issue")
.distinct()
) )
if current_instance is not None:
issue_activity.delay(
type="issue_relation.activity.deleted",
requested_data=json.dumps({"related_list": None}),
actor_id=str(self.request.user.id),
issue_id=str(self.kwargs.get("issue_id", None)),
project_id=str(self.kwargs.get("project_id", None)),
current_instance=json.dumps(
IssueRelationSerializer(current_instance).data,
cls=DjangoJSONEncoder,
),
epoch=int(timezone.now().timestamp())
)
return super().perform_destroy(instance)
def create(self, request, slug, project_id, issue_id): def create(self, request, slug, project_id, issue_id):
try: try:
@ -2149,7 +2243,7 @@ class IssueRelationViewSet(BaseViewSet):
issue_id=str(issue_id), issue_id=str(issue_id),
project_id=str(project_id), project_id=str(project_id),
current_instance=None, current_instance=None,
epoch=int(timezone.now().timestamp()) epoch=int(timezone.now().timestamp()),
) )
if relation == "blocking": if relation == "blocking":
@ -2181,18 +2275,31 @@ class IssueRelationViewSet(BaseViewSet):
status=status.HTTP_400_BAD_REQUEST, status=status.HTTP_400_BAD_REQUEST,
) )
def get_queryset(self): def destroy(self, request, slug, project_id, issue_id, pk):
return self.filter_queryset( try:
super() issue_relation = IssueRelation.objects.get(
.get_queryset() workspace__slug=slug, project_id=project_id, issue_id=issue_id, pk=pk
.filter(workspace__slug=self.kwargs.get("slug")) )
.filter(project_id=self.kwargs.get("project_id")) current_instance = json.dumps(
.filter(issue_id=self.kwargs.get("issue_id")) IssueRelationSerializer(issue_relation).data,
.filter(project__project_projectmember__member=self.request.user) cls=DjangoJSONEncoder,
.select_related("project") )
.select_related("workspace") issue_relation.delete()
.select_related("issue") issue_activity.delay(
.distinct() type="issue_relation.activity.deleted",
requested_data=json.dumps({"related_list": None}),
actor_id=str(request.user.id),
issue_id=str(issue_id),
project_id=str(project_id),
current_instance=current_instance,
epoch=int(timezone.now().timestamp()),
)
return Response(status=status.HTTP_204_NO_CONTENT)
except Exception as e:
capture_exception(e)
return Response(
{"error": "Something went wrong please try again later"},
status=status.HTTP_400_BAD_REQUEST,
) )
@ -2405,28 +2512,6 @@ class IssueDraftViewSet(BaseViewSet):
serializer_class = IssueFlatSerializer serializer_class = IssueFlatSerializer
model = Issue model = Issue
def perform_destroy(self, instance):
current_instance = (
self.get_queryset().filter(pk=self.kwargs.get("pk", None)).first()
)
if current_instance is not None:
issue_activity.delay(
type="issue_draft.activity.deleted",
requested_data=json.dumps(
{"issue_id": str(self.kwargs.get("pk", None))}
),
actor_id=str(self.request.user.id),
issue_id=str(self.kwargs.get("pk", None)),
project_id=str(self.kwargs.get("project_id", None)),
current_instance=json.dumps(
IssueSerializer(current_instance).data, cls=DjangoJSONEncoder
),
epoch=int(timezone.now().timestamp())
)
return super().perform_destroy(instance)
def get_queryset(self): def get_queryset(self):
return ( return (
Issue.objects.annotate( Issue.objects.annotate(
@ -2452,7 +2537,6 @@ class IssueDraftViewSet(BaseViewSet):
) )
) )
@method_decorator(gzip_page) @method_decorator(gzip_page)
def list(self, request, slug, project_id): def list(self, request, slug, project_id):
try: try:
@ -2561,7 +2645,6 @@ class IssueDraftViewSet(BaseViewSet):
status=status.HTTP_400_BAD_REQUEST, status=status.HTTP_400_BAD_REQUEST,
) )
def create(self, request, slug, project_id): def create(self, request, slug, project_id):
try: try:
project = Project.objects.get(pk=project_id) project = Project.objects.get(pk=project_id)
@ -2586,7 +2669,7 @@ class IssueDraftViewSet(BaseViewSet):
issue_id=str(serializer.data.get("id", None)), issue_id=str(serializer.data.get("id", None)),
project_id=str(project_id), project_id=str(project_id),
current_instance=None, current_instance=None,
epoch=int(timezone.now().timestamp()) epoch=int(timezone.now().timestamp()),
) )
return Response(serializer.data, status=status.HTTP_201_CREATED) return Response(serializer.data, status=status.HTTP_201_CREATED)
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
@ -2596,19 +2679,20 @@ class IssueDraftViewSet(BaseViewSet):
{"error": "Project was not found"}, status=status.HTTP_404_NOT_FOUND {"error": "Project was not found"}, status=status.HTTP_404_NOT_FOUND
) )
def partial_update(self, request, slug, project_id, pk): def partial_update(self, request, slug, project_id, pk):
try: try:
issue = Issue.objects.get( issue = Issue.objects.get(
workspace__slug=slug, project_id=project_id, pk=pk workspace__slug=slug, project_id=project_id, pk=pk
) )
serializer = IssueSerializer( serializer = IssueSerializer(issue, data=request.data, partial=True)
issue, data=request.data, partial=True
)
if serializer.is_valid(): if serializer.is_valid():
if(request.data.get("is_draft") is not None and not request.data.get("is_draft")): if request.data.get("is_draft") is not None and not request.data.get(
serializer.save(created_at=timezone.now(), updated_at=timezone.now()) "is_draft"
):
serializer.save(
created_at=timezone.now(), updated_at=timezone.now()
)
else: else:
serializer.save() serializer.save()
issue_activity.delay( issue_activity.delay(
@ -2621,7 +2705,7 @@ class IssueDraftViewSet(BaseViewSet):
IssueSerializer(issue).data, IssueSerializer(issue).data,
cls=DjangoJSONEncoder, cls=DjangoJSONEncoder,
), ),
epoch=int(timezone.now().timestamp()) epoch=int(timezone.now().timestamp()),
) )
return Response(serializer.data, status=status.HTTP_200_OK) return Response(serializer.data, status=status.HTTP_200_OK)
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
@ -2637,7 +2721,6 @@ class IssueDraftViewSet(BaseViewSet):
status=status.HTTP_400_BAD_REQUEST, status=status.HTTP_400_BAD_REQUEST,
) )
def retrieve(self, request, slug, project_id, pk=None): def retrieve(self, request, slug, project_id, pk=None):
try: try:
issue = Issue.objects.get( issue = Issue.objects.get(
@ -2649,3 +2732,25 @@ class IssueDraftViewSet(BaseViewSet):
{"error": "Issue Does not exist"}, status=status.HTTP_404_NOT_FOUND {"error": "Issue Does not exist"}, status=status.HTTP_404_NOT_FOUND
) )
def destroy(self, request, slug, project_id, pk=None):
try:
issue = Issue.objects.get(workspace__slug=slug, project_id=project_id, pk=pk)
current_instance=json.dumps(
IssueSerializer(current_instance).data, cls=DjangoJSONEncoder
)
issue.delete()
issue_activity.delay(
type="issue_draft.activity.deleted",
requested_data=json.dumps(
{"issue_id": str(pk)}
),
actor_id=str(request.user.id),
issue_id=str(pk),
project_id=str(project_id),
current_instance=current_instance,
epoch=int(timezone.now().timestamp()),
)
return Response(status=status.HTTP_204_NO_CONTENT)
except Exception as e:
capture_exception(e)
return Response({"error": "Something went wrong please try again later"}, status=status.HTTP_400_BAD_REQUEST)

View File

@ -141,29 +141,6 @@ class ModuleViewSet(BaseViewSet):
.order_by(order_by, "name") .order_by(order_by, "name")
) )
def perform_destroy(self, instance):
module_issues = list(
ModuleIssue.objects.filter(module_id=self.kwargs.get("pk")).values_list(
"issue", flat=True
)
)
issue_activity.delay(
type="module.activity.deleted",
requested_data=json.dumps(
{
"module_id": str(self.kwargs.get("pk")),
"issues": [str(issue_id) for issue_id in module_issues],
}
),
actor_id=str(self.request.user.id),
issue_id=str(self.kwargs.get("pk", None)),
project_id=str(self.kwargs.get("project_id", None)),
current_instance=None,
epoch=int(timezone.now().timestamp())
)
return super().perform_destroy(instance)
def create(self, request, slug, project_id): def create(self, request, slug, project_id):
try: try:
project = Project.objects.get(workspace__slug=slug, pk=project_id) project = Project.objects.get(workspace__slug=slug, pk=project_id)
@ -309,6 +286,37 @@ class ModuleViewSet(BaseViewSet):
status=status.HTTP_400_BAD_REQUEST, status=status.HTTP_400_BAD_REQUEST,
) )
def destroy(self, request, slug, project_id, pk):
try:
module = Module.objects.get(
workspace__slug=slug, project_id=project_id, pk=pk
)
module_issues = list(
ModuleIssue.objects.filter(module_id=pk).values_list("issue", flat=True)
)
module.delete()
issue_activity.delay(
type="module.activity.deleted",
requested_data=json.dumps(
{
"module_id": str(pk),
"issues": [str(issue_id) for issue_id in module_issues],
}
),
actor_id=str(request.user.id),
issue_id=str(pk),
project_id=str(project_id),
current_instance=None,
epoch=int(timezone.now().timestamp()),
)
return Response(status=status.HTTP_204_NO_CONTENT)
except Exception as e:
capture_exception(e)
return Response(
{"error": "Something went wrong please try again later"},
status=status.HTTP_400_BAD_REQUEST,
)
class ModuleIssueViewSet(BaseViewSet): class ModuleIssueViewSet(BaseViewSet):
serializer_class = ModuleIssueSerializer serializer_class = ModuleIssueSerializer
@ -329,22 +337,6 @@ class ModuleIssueViewSet(BaseViewSet):
module_id=self.kwargs.get("module_id"), module_id=self.kwargs.get("module_id"),
) )
def perform_destroy(self, instance):
issue_activity.delay(
type="module.activity.deleted",
requested_data=json.dumps(
{
"module_id": str(self.kwargs.get("module_id")),
"issues": [str(instance.issue_id)],
}
),
actor_id=str(self.request.user.id),
issue_id=str(self.kwargs.get("pk", None)),
project_id=str(self.kwargs.get("project_id", None)),
current_instance=None,
epoch=int(timezone.now().timestamp())
)
return super().perform_destroy(instance)
def get_queryset(self): def get_queryset(self):
return self.filter_queryset( return self.filter_queryset(
@ -510,7 +502,7 @@ class ModuleIssueViewSet(BaseViewSet):
), ),
} }
), ),
epoch=int(timezone.now().timestamp()) epoch=int(timezone.now().timestamp()),
) )
return Response( return Response(
@ -528,6 +520,34 @@ class ModuleIssueViewSet(BaseViewSet):
status=status.HTTP_400_BAD_REQUEST, status=status.HTTP_400_BAD_REQUEST,
) )
def destroy(self, request, slug, project_id, module_id, pk):
try:
module_issue = ModuleIssue.objects.get(
workspace__slug=slug, project_id=project_id, module_id=module_id, pk=pk
)
module_issue.delete()
issue_activity.delay(
type="module.activity.deleted",
requested_data=json.dumps(
{
"module_id": str(module_id),
"issues": [str(module_issue.issue_id)],
}
),
actor_id=str(request.user.id),
issue_id=str(pk),
project_id=str(project_id),
current_instance=None,
epoch=int(timezone.now().timestamp()),
)
return Response(status=status.HTTP_204_NO_CONTENT)
except Exception as e:
capture_exception(e)
return Response(
{"error": "Something went wrong please try again later"},
status=status.HTTP_400_BAD_REQUEST,
)
class ModuleLinkViewSet(BaseViewSet): class ModuleLinkViewSet(BaseViewSet):
permission_classes = [ permission_classes = [

View File

@ -37,42 +37,34 @@ class NotificationViewSet(BaseViewSet, BasePaginator):
def list(self, request, slug): def list(self, request, slug):
try: try:
# Get query parameters
snoozed = request.GET.get("snoozed", "false") snoozed = request.GET.get("snoozed", "false")
archived = request.GET.get("archived", "false") archived = request.GET.get("archived", "false")
read = request.GET.get("read", "true") read = request.GET.get("read", "true")
# Filter type
type = request.GET.get("type", "all") type = request.GET.get("type", "all")
notifications = ( notifications = Notification.objects.filter(workspace__slug=slug, receiver_id=request.user.id) \
Notification.objects.filter( .select_related("workspace", "project", "triggered_by", "receiver") \
workspace__slug=slug, receiver_id=request.user.id
)
.select_related("workspace", "project", "triggered_by", "receiver")
.order_by("snoozed_till", "-created_at") .order_by("snoozed_till", "-created_at")
)
# Filter for snoozed notifications # Filters based on query parameters
if snoozed == "false": snoozed_filters = {
notifications = notifications.filter( "true": Q(snoozed_till__lt=timezone.now()) | Q(snoozed_till__isnull=False),
Q(snoozed_till__gte=timezone.now()) | Q(snoozed_till__isnull=True), "false": Q(snoozed_till__gte=timezone.now()) | Q(snoozed_till__isnull=True),
) }
if snoozed == "true": notifications = notifications.filter(snoozed_filters[snoozed])
notifications = notifications.filter(
Q(snoozed_till__lt=timezone.now()) | Q(snoozed_till__isnull=False) archived_filters = {
) "true": Q(archived_at__isnull=False),
"false": Q(archived_at__isnull=True),
}
notifications = notifications.filter(archived_filters[archived])
if read == "false": if read == "false":
notifications = notifications.filter(read_at__isnull=True) notifications = notifications.filter(read_at__isnull=True)
# Filter for archived or unarchive
if archived == "false":
notifications = notifications.filter(archived_at__isnull=True)
if archived == "true":
notifications = notifications.filter(archived_at__isnull=False)
# Subscribed issues # Subscribed issues
if type == "watching": if type == "watching":
issue_ids = IssueSubscriber.objects.filter( issue_ids = IssueSubscriber.objects.filter(

View File

@ -186,14 +186,11 @@ class OauthEndpoint(BaseAPIView):
user.is_email_verified = email_verified user.is_email_verified = email_verified
user.save() user.save()
serialized_user = UserSerializer(user).data
access_token, refresh_token = get_tokens_for_user(user) access_token, refresh_token = get_tokens_for_user(user)
data = { data = {
"access_token": access_token, "access_token": access_token,
"refresh_token": refresh_token, "refresh_token": refresh_token,
"user": serialized_user,
} }
SocialLoginConnection.objects.update_or_create( SocialLoginConnection.objects.update_or_create(
@ -264,14 +261,11 @@ class OauthEndpoint(BaseAPIView):
user.last_login_uagent = request.META.get("HTTP_USER_AGENT") user.last_login_uagent = request.META.get("HTTP_USER_AGENT")
user.token_updated_at = timezone.now() user.token_updated_at = timezone.now()
user.save() user.save()
serialized_user = UserSerializer(user).data
access_token, refresh_token = get_tokens_for_user(user) access_token, refresh_token = get_tokens_for_user(user)
data = { data = {
"access_token": access_token, "access_token": access_token,
"refresh_token": refresh_token, "refresh_token": refresh_token,
"user": serialized_user,
"permissions": [],
} }
if settings.ANALYTICS_BASE_API: if settings.ANALYTICS_BASE_API:
_ = requests.post( _ = requests.post(

View File

@ -7,6 +7,7 @@ from datetime import datetime
from django.core.exceptions import ValidationError from django.core.exceptions import ValidationError
from django.db import IntegrityError from django.db import IntegrityError
from django.db.models import ( from django.db.models import (
Prefetch,
Q, Q,
Exists, Exists,
OuterRef, OuterRef,
@ -29,6 +30,7 @@ from sentry_sdk import capture_exception
from .base import BaseViewSet, BaseAPIView from .base import BaseViewSet, BaseAPIView
from plane.api.serializers import ( from plane.api.serializers import (
ProjectSerializer, ProjectSerializer,
ProjectListSerializer,
ProjectMemberSerializer, ProjectMemberSerializer,
ProjectDetailSerializer, ProjectDetailSerializer,
ProjectMemberInviteSerializer, ProjectMemberInviteSerializer,
@ -86,12 +88,6 @@ class ProjectViewSet(BaseViewSet):
return ProjectDetailSerializer return ProjectDetailSerializer
def get_queryset(self): def get_queryset(self):
subquery = ProjectFavorite.objects.filter(
user=self.request.user,
project_id=OuterRef("pk"),
workspace__slug=self.kwargs.get("slug"),
)
return self.filter_queryset( return self.filter_queryset(
super() super()
.get_queryset() .get_queryset()
@ -100,7 +96,15 @@ class ProjectViewSet(BaseViewSet):
.select_related( .select_related(
"workspace", "workspace__owner", "default_assignee", "project_lead" "workspace", "workspace__owner", "default_assignee", "project_lead"
) )
.annotate(is_favorite=Exists(subquery)) .annotate(
is_favorite=Exists(
ProjectFavorite.objects.filter(
user=self.request.user,
project_id=OuterRef("pk"),
workspace__slug=self.kwargs.get("slug"),
)
)
)
.annotate( .annotate(
is_member=Exists( is_member=Exists(
ProjectMember.objects.filter( ProjectMember.objects.filter(
@ -149,12 +153,8 @@ class ProjectViewSet(BaseViewSet):
def list(self, request, slug): def list(self, request, slug):
try: try:
is_favorite = request.GET.get("is_favorite", "all") fields = [field for field in request.GET.get("fields", "").split(",") if field]
subquery = ProjectFavorite.objects.filter(
user=self.request.user,
project_id=OuterRef("pk"),
workspace__slug=self.kwargs.get("slug"),
)
sort_order_query = ProjectMember.objects.filter( sort_order_query = ProjectMember.objects.filter(
member=request.user, member=request.user,
project_id=OuterRef("pk"), project_id=OuterRef("pk"),
@ -162,37 +162,31 @@ class ProjectViewSet(BaseViewSet):
).values("sort_order") ).values("sort_order")
projects = ( projects = (
self.get_queryset() self.get_queryset()
.annotate(is_favorite=Exists(subquery))
.annotate(sort_order=Subquery(sort_order_query)) .annotate(sort_order=Subquery(sort_order_query))
.prefetch_related(
Prefetch(
"project_projectmember",
queryset=ProjectMember.objects.filter(
workspace__slug=slug,
).select_related("member"),
)
)
.order_by("sort_order", "name") .order_by("sort_order", "name")
.annotate(
total_members=ProjectMember.objects.filter(
project_id=OuterRef("id")
)
.order_by()
.annotate(count=Func(F("id"), function="Count"))
.values("count")
)
.annotate(
total_cycles=Cycle.objects.filter(project_id=OuterRef("id"))
.order_by()
.annotate(count=Func(F("id"), function="Count"))
.values("count")
)
.annotate(
total_modules=Module.objects.filter(project_id=OuterRef("id"))
.order_by()
.annotate(count=Func(F("id"), function="Count"))
.values("count")
) )
if request.GET.get("per_page", False) and request.GET.get("cursor", False):
return self.paginate(
request=request,
queryset=(projects),
on_results=lambda projects: ProjectListSerializer(
projects, many=True
).data,
) )
if is_favorite == "true": return Response(
projects = projects.filter(is_favorite=True) ProjectListSerializer(
if is_favorite == "false": projects, many=True, fields=fields if fields else None
projects = projects.filter(is_favorite=False) ).data
)
return Response(ProjectDetailSerializer(projects, many=True).data)
except Exception as e: except Exception as e:
capture_exception(e) capture_exception(e)
return Response( return Response(

View File

@ -8,6 +8,8 @@ from sentry_sdk import capture_exception
from plane.api.serializers import ( from plane.api.serializers import (
UserSerializer, UserSerializer,
IssueActivitySerializer, IssueActivitySerializer,
UserMeSerializer,
UserMeSettingsSerializer,
) )
from plane.api.views.base import BaseViewSet, BaseAPIView from plane.api.views.base import BaseViewSet, BaseAPIView
@ -31,69 +33,22 @@ class UserEndpoint(BaseViewSet):
def retrieve(self, request): def retrieve(self, request):
try: try:
workspace = Workspace.objects.get( serialized_data = UserMeSerializer(request.user).data
pk=request.user.last_workspace_id, workspace_member__member=request.user
)
workspace_invites = WorkspaceMemberInvite.objects.filter(
email=request.user.email
).count()
assigned_issues = Issue.issue_objects.filter(
assignees__in=[request.user]
).count()
serialized_data = UserSerializer(request.user).data
serialized_data["workspace"] = {
"last_workspace_id": request.user.last_workspace_id,
"last_workspace_slug": workspace.slug,
"fallback_workspace_id": request.user.last_workspace_id,
"fallback_workspace_slug": workspace.slug,
"invites": workspace_invites,
}
serialized_data.setdefault("issues", {})[
"assigned_issues"
] = assigned_issues
return Response( return Response(
serialized_data, serialized_data,
status=status.HTTP_200_OK, status=status.HTTP_200_OK,
) )
except Workspace.DoesNotExist: except Exception as e:
# This exception will be hit even when the `last_workspace_id` is None capture_exception(e)
workspace_invites = WorkspaceMemberInvite.objects.filter(
email=request.user.email
).count()
assigned_issues = Issue.issue_objects.filter(
assignees__in=[request.user]
).count()
fallback_workspace = (
Workspace.objects.filter(workspace_member__member=request.user)
.order_by("created_at")
.first()
)
serialized_data = UserSerializer(request.user).data
serialized_data["workspace"] = {
"last_workspace_id": None,
"last_workspace_slug": None,
"fallback_workspace_id": fallback_workspace.id
if fallback_workspace is not None
else None,
"fallback_workspace_slug": fallback_workspace.slug
if fallback_workspace is not None
else None,
"invites": workspace_invites,
}
serialized_data.setdefault("issues", {})[
"assigned_issues"
] = assigned_issues
return Response( return Response(
serialized_data, {"error": "Something went wrong please try again later"},
status=status.HTTP_200_OK, status=status.HTTP_400_BAD_REQUEST,
) )
def retrieve_user_settings(self, request):
try:
serialized_data = UserMeSettingsSerializer(request.user).data
return Response(serialized_data, status=status.HTTP_200_OK)
except Exception as e: except Exception as e:
capture_exception(e) capture_exception(e)
return Response( return Response(

View File

@ -48,6 +48,7 @@ from plane.api.serializers import (
IssueActivitySerializer, IssueActivitySerializer,
IssueLiteSerializer, IssueLiteSerializer,
WorkspaceMemberAdminSerializer, WorkspaceMemberAdminSerializer,
WorkspaceMemberMeSerializer,
) )
from plane.api.views.base import BaseAPIView from plane.api.views.base import BaseAPIView
from . import BaseViewSet from . import BaseViewSet
@ -825,7 +826,7 @@ class WorkspaceMemberUserEndpoint(BaseAPIView):
workspace_member = WorkspaceMember.objects.get( workspace_member = WorkspaceMember.objects.get(
member=request.user, workspace__slug=slug member=request.user, workspace__slug=slug
) )
serializer = WorkSpaceMemberSerializer(workspace_member) serializer = WorkspaceMemberMeSerializer(workspace_member)
return Response(serializer.data, status=status.HTTP_200_OK) return Response(serializer.data, status=status.HTTP_200_OK)
except (Workspace.DoesNotExist, WorkspaceMember.DoesNotExist): except (Workspace.DoesNotExist, WorkspaceMember.DoesNotExist):
return Response({"error": "Forbidden"}, status=status.HTTP_403_FORBIDDEN) return Response({"error": "Forbidden"}, status=status.HTTP_403_FORBIDDEN)

View File

@ -20,8 +20,8 @@ from plane.utils.issue_filters import issue_filters
row_mapping = { row_mapping = {
"state__name": "State", "state__name": "State",
"state__group": "State Group", "state__group": "State Group",
"labels__name": "Label", "labels__id": "Label",
"assignees__display_name": "Assignee Name", "assignees__id": "Assignee Name",
"start_date": "Start Date", "start_date": "Start Date",
"target_date": "Due Date", "target_date": "Due Date",
"completed_at": "Completed At", "completed_at": "Completed At",
@ -29,8 +29,321 @@ row_mapping = {
"issue_count": "Issue Count", "issue_count": "Issue Count",
"priority": "Priority", "priority": "Priority",
"estimate": "Estimate", "estimate": "Estimate",
"issue_cycle__cycle_id": "Cycle",
"issue_module__module_id": "Module"
} }
ASSIGNEE_ID = "assignees__id"
LABEL_ID = "labels__id"
STATE_ID = "state_id"
CYCLE_ID = "issue_cycle__cycle_id"
MODULE_ID = "issue_module__module_id"
def send_export_email(email, slug, csv_buffer):
"""Helper function to send export email."""
subject = "Your Export is ready"
html_content = render_to_string("emails/exports/analytics.html", {})
text_content = strip_tags(html_content)
csv_buffer.seek(0)
msg = EmailMultiAlternatives(subject, text_content, settings.EMAIL_FROM, [email])
msg.attach(f"{slug}-analytics.csv", csv_buffer.getvalue())
msg.send(fail_silently=False)
def get_assignee_details(slug, filters):
"""Fetch assignee details if required."""
return (
Issue.issue_objects.filter(
workspace__slug=slug, **filters, assignees__avatar__isnull=False
)
.distinct("assignees__id")
.order_by("assignees__id")
.values(
"assignees__avatar",
"assignees__display_name",
"assignees__first_name",
"assignees__last_name",
"assignees__id",
)
)
def get_label_details(slug, filters):
"""Fetch label details if required"""
return (
Issue.objects.filter(workspace__slug=slug, **filters, labels__id__isnull=False)
.distinct("labels__id")
.order_by("labels__id")
.values("labels__id", "labels__color", "labels__name")
)
def get_state_details(slug, filters):
return (
Issue.issue_objects.filter(
workspace__slug=slug,
**filters,
)
.distinct("state_id")
.order_by("state_id")
.values("state_id", "state__name", "state__color")
)
def get_module_details(slug, filters):
return (
Issue.issue_objects.filter(
workspace__slug=slug,
**filters,
issue_module__module_id__isnull=False,
)
.distinct("issue_module__module_id")
.order_by("issue_module__module_id")
.values(
"issue_module__module_id",
"issue_module__module__name",
)
)
def get_cycle_details(slug, filters):
return (
Issue.issue_objects.filter(
workspace__slug=slug,
**filters,
issue_cycle__cycle_id__isnull=False,
)
.distinct("issue_cycle__cycle_id")
.order_by("issue_cycle__cycle_id")
.values(
"issue_cycle__cycle_id",
"issue_cycle__cycle__name",
)
)
def generate_csv_from_rows(rows):
"""Generate CSV buffer from rows."""
csv_buffer = io.StringIO()
writer = csv.writer(csv_buffer, delimiter=",", quoting=csv.QUOTE_ALL)
[writer.writerow(row) for row in rows]
return csv_buffer
def generate_segmented_rows(
distribution,
x_axis,
y_axis,
segment,
key,
assignee_details,
label_details,
state_details,
cycle_details,
module_details,
):
segment_zero = list(
set(
item.get("segment") for sublist in distribution.values() for item in sublist
)
)
segmented = segment
row_zero = [
row_mapping.get(x_axis, "X-Axis"),
row_mapping.get(y_axis, "Y-Axis"),
] + segment_zero
rows = []
for item, data in distribution.items():
generated_row = [
item,
sum(obj.get(key) for obj in data if obj.get(key) is not None),
]
for segment in segment_zero:
value = next((x.get(key) for x in data if x.get("segment") == segment), "0")
generated_row.append(value)
if x_axis == ASSIGNEE_ID:
assignee = next(
(
user
for user in assignee_details
if str(user[ASSIGNEE_ID]) == str(item)
),
None,
)
if assignee:
generated_row[
0
] = f"{assignee['assignees__first_name']} {assignee['assignees__last_name']}"
if x_axis == LABEL_ID:
label = next(
(lab for lab in label_details if str(lab[LABEL_ID]) == str(item)),
None,
)
if label:
generated_row[0] = f"{label['labels__name']}"
if x_axis == STATE_ID:
state = next(
(sta for sta in state_details if str(sta[STATE_ID]) == str(item)),
None,
)
if state:
generated_row[0] = f"{state['state__name']}"
if x_axis == CYCLE_ID:
cycle = next(
(cyc for cyc in cycle_details if str(cyc[CYCLE_ID]) == str(item)),
None,
)
if cycle:
generated_row[0] = f"{cycle['issue_cycle__cycle__name']}"
if x_axis == MODULE_ID:
module = next(
(mod for mod in module_details if str(mod[MODULE_ID]) == str(item)),
None,
)
if module:
generated_row[0] = f"{module['issue_module__module__name']}"
rows.append(tuple(generated_row))
if segmented == ASSIGNEE_ID:
for index, segm in enumerate(row_zero[2:]):
assignee = next(
(
user
for user in assignee_details
if str(user[ASSIGNEE_ID]) == str(segm)
),
None,
)
if assignee:
row_zero[
index + 2
] = f"{assignee['assignees__first_name']} {assignee['assignees__last_name']}"
if segmented == LABEL_ID:
for index, segm in enumerate(row_zero[2:]):
label = next(
(lab for lab in label_details if str(lab[LABEL_ID]) == str(segm)),
None,
)
if label:
row_zero[index + 2] = label["labels__name"]
if segmented == STATE_ID:
for index, segm in enumerate(row_zero[2:]):
state = next(
(sta for sta in state_details if str(sta[STATE_ID]) == str(segm)),
None,
)
if state:
row_zero[index + 2] = state["state__name"]
if segmented == MODULE_ID:
for index, segm in enumerate(row_zero[2:]):
module = next(
(mod for mod in label_details if str(mod[MODULE_ID]) == str(segm)),
None,
)
if module:
row_zero[index + 2] = module["issue_module__module__name"]
if segmented == CYCLE_ID:
for index, segm in enumerate(row_zero[2:]):
cycle = next(
(cyc for cyc in cycle_details if str(cyc[CYCLE_ID]) == str(segm)),
None,
)
if cycle:
row_zero[index + 2] = cycle["issue_cycle__cycle__name"]
return [tuple(row_zero)] + rows
def generate_non_segmented_rows(
distribution,
x_axis,
y_axis,
key,
assignee_details,
label_details,
state_details,
cycle_details,
module_details,
):
rows = []
for item, data in distribution.items():
row = [item, data[0].get("count" if y_axis == "issue_count" else "estimate")]
if x_axis == ASSIGNEE_ID:
assignee = next(
(
user
for user in assignee_details
if str(user[ASSIGNEE_ID]) == str(item)
),
None,
)
if assignee:
row[
0
] = f"{assignee['assignees__first_name']} {assignee['assignees__last_name']}"
if x_axis == LABEL_ID:
label = next(
(lab for lab in label_details if str(lab[LABEL_ID]) == str(item)),
None,
)
if label:
row[0] = f"{label['labels__name']}"
if x_axis == STATE_ID:
state = next(
(sta for sta in state_details if str(sta[STATE_ID]) == str(item)),
None,
)
if state:
row[0] = f"{state['state__name']}"
if x_axis == CYCLE_ID:
cycle = next(
(cyc for cyc in cycle_details if str(cyc[CYCLE_ID]) == str(item)),
None,
)
if cycle:
row[0] = f"{cycle['issue_cycle__cycle__name']}"
if x_axis == MODULE_ID:
module = next(
(mod for mod in module_details if str(mod[MODULE_ID]) == str(item)),
None,
)
if module:
row[0] = f"{module['issue_module__module__name']}"
rows.append(tuple(row))
row_zero = [row_mapping.get(x_axis, "X-Axis"), row_mapping.get(y_axis, "Y-Axis")]
return [tuple(row_zero)] + rows
@shared_task @shared_task
def analytic_export_task(email, data, slug): def analytic_export_task(email, data, slug):
@ -43,134 +356,70 @@ def analytic_export_task(email, data, slug):
segment = data.get("segment", False) segment = data.get("segment", False)
distribution = build_graph_plot( distribution = build_graph_plot(
queryset=queryset, x_axis=x_axis, y_axis=y_axis, segment=segment queryset, x_axis=x_axis, y_axis=y_axis, segment=segment
) )
key = "count" if y_axis == "issue_count" else "estimate" key = "count" if y_axis == "issue_count" else "estimate"
segmented = segment
assignee_details = {}
if x_axis in ["assignees__id"] or segment in ["assignees__id"]:
assignee_details = ( assignee_details = (
Issue.issue_objects.filter(workspace__slug=slug, **filters, assignees__avatar__isnull=False) get_assignee_details(slug, filters)
.order_by("assignees__id") if x_axis == ASSIGNEE_ID or segment == ASSIGNEE_ID
.distinct("assignees__id") else {}
.values("assignees__avatar", "assignees__display_name", "assignees__first_name", "assignees__last_name", "assignees__id") )
label_details = (
get_label_details(slug, filters)
if x_axis == LABEL_ID or segment == LABEL_ID
else {}
)
state_details = (
get_state_details(slug, filters)
if x_axis == STATE_ID or segment == STATE_ID
else {}
)
cycle_details = (
get_cycle_details(slug, filters)
if x_axis == CYCLE_ID or segment == CYCLE_ID
else {}
)
module_details = (
get_module_details(slug, filters)
if x_axis == MODULE_ID or segment == MODULE_ID
else {}
) )
if segment: if segment:
segment_zero = [] rows = generate_segmented_rows(
for item in distribution: distribution,
current_dict = distribution.get(item) x_axis,
for current in current_dict: y_axis,
segment_zero.append(current.get("segment")) segment,
key,
segment_zero = list(set(segment_zero)) assignee_details,
row_zero = ( label_details,
[ state_details,
row_mapping.get(x_axis, "X-Axis"), cycle_details,
] module_details,
+ [
row_mapping.get(y_axis, "Y-Axis"),
]
+ segment_zero
) )
rows = []
for item in distribution:
generated_row = [
item,
]
data = distribution.get(item)
# Add y axis values
generated_row.append(sum(obj.get(key) for obj in data if obj.get(key, None) is not None))
for segment in segment_zero:
value = [x for x in data if x.get("segment") == segment]
if len(value):
generated_row.append(value[0].get(key))
else: else:
generated_row.append("0") rows = generate_non_segmented_rows(
# x-axis replacement for names distribution,
if x_axis in ["assignees__id"]: x_axis,
assignee = [user for user in assignee_details if str(user.get("assignees__id")) == str(item)] y_axis,
if len(assignee): segment,
generated_row[0] = str(assignee[0].get("assignees__first_name")) + " " + str(assignee[0].get("assignees__last_name")) key,
rows.append(tuple(generated_row)) assignee_details,
label_details,
# If segment is ["assignees__display_name"] then replace segment_zero rows with first and last names state_details,
if segmented in ["assignees__id"]: cycle_details,
for index, segm in enumerate(row_zero[2:]): module_details,
# find the name of the user
assignee = [user for user in assignee_details if str(user.get("assignees__id")) == str(segm)]
if len(assignee):
row_zero[index + 2] = str(assignee[0].get("assignees__first_name")) + " " + str(assignee[0].get("assignees__last_name"))
rows = [tuple(row_zero)] + rows
csv_buffer = io.StringIO()
writer = csv.writer(csv_buffer, delimiter=",", quoting=csv.QUOTE_ALL)
# Write CSV data to the buffer
for row in rows:
writer.writerow(row)
subject = "Your Export is ready"
html_content = render_to_string("emails/exports/analytics.html", {})
text_content = strip_tags(html_content)
csv_buffer.seek(0)
msg = EmailMultiAlternatives(
subject, text_content, settings.EMAIL_FROM, [email]
) )
msg.attach(f"{slug}-analytics.csv", csv_buffer.read())
msg.send(fail_silently=False)
else:
row_zero = [
row_mapping.get(x_axis, "X-Axis"),
row_mapping.get(y_axis, "Y-Axis"),
]
rows = []
for item in distribution:
row = [
item,
distribution.get(item)[0].get("count")
if y_axis == "issue_count"
else distribution.get(item)[0].get("estimate "),
]
# x-axis replacement to names
if x_axis in ["assignees__id"]:
assignee = [user for user in assignee_details if str(user.get("assignees__id")) == str(item)]
if len(assignee):
row[0] = str(assignee[0].get("assignees__first_name")) + " " + str(assignee[0].get("assignees__last_name"))
rows.append(tuple(row))
rows = [tuple(row_zero)] + rows
csv_buffer = io.StringIO()
writer = csv.writer(csv_buffer, delimiter=",", quoting=csv.QUOTE_ALL)
# Write CSV data to the buffer
for row in rows:
writer.writerow(row)
subject = "Your Export is ready"
html_content = render_to_string("emails/exports/analytics.html", {})
text_content = strip_tags(html_content)
csv_buffer.seek(0)
msg = EmailMultiAlternatives(
subject, text_content, settings.EMAIL_FROM, [email]
)
msg.attach(f"{slug}-analytics.csv", csv_buffer.read())
msg.send(fail_silently=False)
csv_buffer = generate_csv_from_rows(rows)
send_export_email(email, slug, csv_buffer)
except Exception as e: except Exception as e:
# Print logs if in DEBUG mode
if settings.DEBUG: if settings.DEBUG:
print(e) print(e)
capture_exception(e) capture_exception(e)
return

View File

@ -16,7 +16,7 @@ from plane.db.models import User
def forgot_password(first_name, email, uidb64, token, current_site): def forgot_password(first_name, email, uidb64, token, current_site):
try: try:
realtivelink = f"/reset-password/?uidb64={uidb64}&token={token}" realtivelink = f"/accounts/reset-password/?uidb64={uidb64}&token={token}"
abs_url = current_site + realtivelink abs_url = current_site + realtivelink
from_email_string = settings.EMAIL_FROM from_email_string = settings.EMAIL_FROM

View File

@ -33,13 +33,7 @@ from plane.api.serializers import IssueActivitySerializer
# Track Chnages in name # Track Chnages in name
def track_name( def track_name(
requested_data, requested_data, current_instance, issue_id, project, actor, issue_activities, epoch
current_instance,
issue_id,
project,
actor,
issue_activities,
epoch
): ):
if current_instance.get("name") != requested_data.get("name"): if current_instance.get("name") != requested_data.get("name"):
issue_activities.append( issue_activities.append(
@ -60,13 +54,7 @@ def track_name(
# Track changes in parent issue # Track changes in parent issue
def track_parent( def track_parent(
requested_data, requested_data, current_instance, issue_id, project, actor, issue_activities, epoch
current_instance,
issue_id,
project,
actor,
issue_activities,
epoch
): ):
if current_instance.get("parent") != requested_data.get("parent"): if current_instance.get("parent") != requested_data.get("parent"):
if requested_data.get("parent") == None: if requested_data.get("parent") == None:
@ -112,13 +100,7 @@ def track_parent(
# Track changes in priority # Track changes in priority
def track_priority( def track_priority(
requested_data, requested_data, current_instance, issue_id, project, actor, issue_activities, epoch
current_instance,
issue_id,
project,
actor,
issue_activities,
epoch
): ):
if current_instance.get("priority") != requested_data.get("priority"): if current_instance.get("priority") != requested_data.get("priority"):
issue_activities.append( issue_activities.append(
@ -139,13 +121,7 @@ def track_priority(
# Track chnages in state of the issue # Track chnages in state of the issue
def track_state( def track_state(
requested_data, requested_data, current_instance, issue_id, project, actor, issue_activities, epoch
current_instance,
issue_id,
project,
actor,
issue_activities,
epoch
): ):
if current_instance.get("state") != requested_data.get("state"): if current_instance.get("state") != requested_data.get("state"):
new_state = State.objects.get(pk=requested_data.get("state", None)) new_state = State.objects.get(pk=requested_data.get("state", None))
@ -171,19 +147,21 @@ def track_state(
# Track issue description # Track issue description
def track_description( def track_description(
requested_data, requested_data, current_instance, issue_id, project, actor, issue_activities, epoch
current_instance,
issue_id,
project,
actor,
issue_activities,
epoch
): ):
if current_instance.get("description_html") != requested_data.get( if current_instance.get("description_html") != requested_data.get(
"description_html" "description_html"
): ):
last_activity = IssueActivity.objects.filter(issue_id=issue_id).order_by("-created_at").first() last_activity = (
if(last_activity is not None and last_activity.field == "description" and actor.id == last_activity.actor_id): IssueActivity.objects.filter(issue_id=issue_id)
.order_by("-created_at")
.first()
)
if (
last_activity is not None
and last_activity.field == "description"
and actor.id == last_activity.actor_id
):
last_activity.created_at = timezone.now() last_activity.created_at = timezone.now()
last_activity.save(update_fields=["created_at"]) last_activity.save(update_fields=["created_at"])
else: else:
@ -205,13 +183,7 @@ def track_description(
# Track changes in issue target date # Track changes in issue target date
def track_target_date( def track_target_date(
requested_data, requested_data, current_instance, issue_id, project, actor, issue_activities, epoch
current_instance,
issue_id,
project,
actor,
issue_activities,
epoch
): ):
if current_instance.get("target_date") != requested_data.get("target_date"): if current_instance.get("target_date") != requested_data.get("target_date"):
if requested_data.get("target_date") == None: if requested_data.get("target_date") == None:
@ -248,13 +220,7 @@ def track_target_date(
# Track changes in issue start date # Track changes in issue start date
def track_start_date( def track_start_date(
requested_data, requested_data, current_instance, issue_id, project, actor, issue_activities, epoch
current_instance,
issue_id,
project,
actor,
issue_activities,
epoch
): ):
if current_instance.get("start_date") != requested_data.get("start_date"): if current_instance.get("start_date") != requested_data.get("start_date"):
if requested_data.get("start_date") == None: if requested_data.get("start_date") == None:
@ -291,13 +257,7 @@ def track_start_date(
# Track changes in issue labels # Track changes in issue labels
def track_labels( def track_labels(
requested_data, requested_data, current_instance, issue_id, project, actor, issue_activities, epoch
current_instance,
issue_id,
project,
actor,
issue_activities,
epoch
): ):
# Label Addition # Label Addition
if len(requested_data.get("labels_list")) > len(current_instance.get("labels")): if len(requested_data.get("labels_list")) > len(current_instance.get("labels")):
@ -346,13 +306,7 @@ def track_labels(
# Track changes in issue assignees # Track changes in issue assignees
def track_assignees( def track_assignees(
requested_data, requested_data, current_instance, issue_id, project, actor, issue_activities, epoch
current_instance,
issue_id,
project,
actor,
issue_activities,
epoch
): ):
# Assignee Addition # Assignee Addition
if len(requested_data.get("assignees_list")) > len( if len(requested_data.get("assignees_list")) > len(
@ -547,7 +501,7 @@ def update_issue_activity(
project, project,
actor, actor,
issue_activities, issue_activities,
epoch epoch,
) )
@ -868,7 +822,6 @@ def update_link_activity(
def delete_link_activity( def delete_link_activity(
requested_data, current_instance, issue_id, project, actor, issue_activities, epoch requested_data, current_instance, issue_id, project, actor, issue_activities, epoch
): ):
current_instance = ( current_instance = (
json.loads(current_instance) if current_instance is not None else None json.loads(current_instance) if current_instance is not None else None
) )
@ -929,12 +882,19 @@ def delete_attachment_activity(
) )
) )
def create_issue_reaction_activity( def create_issue_reaction_activity(
requested_data, current_instance, issue_id, project, actor, issue_activities, epoch requested_data, current_instance, issue_id, project, actor, issue_activities, epoch
): ):
requested_data = json.loads(requested_data) if requested_data is not None else None requested_data = json.loads(requested_data) if requested_data is not None else None
if requested_data and requested_data.get("reaction") is not None: if requested_data and requested_data.get("reaction") is not None:
issue_reaction = IssueReaction.objects.filter(reaction=requested_data.get("reaction"), project=project, actor=actor).values_list('id', flat=True).first() issue_reaction = (
IssueReaction.objects.filter(
reaction=requested_data.get("reaction"), project=project, actor=actor
)
.values_list("id", flat=True)
.first()
)
if issue_reaction is not None: if issue_reaction is not None:
issue_activities.append( issue_activities.append(
IssueActivity( IssueActivity(
@ -984,9 +944,19 @@ def create_comment_reaction_activity(
): ):
requested_data = json.loads(requested_data) if requested_data is not None else None requested_data = json.loads(requested_data) if requested_data is not None else None
if requested_data and requested_data.get("reaction") is not None: if requested_data and requested_data.get("reaction") is not None:
comment_reaction_id, comment_id = CommentReaction.objects.filter(reaction=requested_data.get("reaction"), project=project, actor=actor).values_list('id', 'comment__id').first() comment_reaction_id, comment_id = (
comment = IssueComment.objects.get(pk=comment_id,project=project) CommentReaction.objects.filter(
if comment is not None and comment_reaction_id is not None and comment_id is not None: reaction=requested_data.get("reaction"), project=project, actor=actor
)
.values_list("id", "comment__id")
.first()
)
comment = IssueComment.objects.get(pk=comment_id, project=project)
if (
comment is not None
and comment_reaction_id is not None
and comment_id is not None
):
issue_activities.append( issue_activities.append(
IssueActivity( IssueActivity(
issue_id=comment.issue_id, issue_id=comment.issue_id,
@ -1012,7 +982,13 @@ def delete_comment_reaction_activity(
json.loads(current_instance) if current_instance is not None else None json.loads(current_instance) if current_instance is not None else None
) )
if current_instance and current_instance.get("reaction") is not None: if current_instance and current_instance.get("reaction") is not None:
issue_id = IssueComment.objects.filter(pk=current_instance.get("comment_id"), project=project).values_list('issue_id', flat=True).first() issue_id = (
IssueComment.objects.filter(
pk=current_instance.get("comment_id"), project=project
)
.values_list("issue_id", flat=True)
.first()
)
if issue_id is not None: if issue_id is not None:
issue_activities.append( issue_activities.append(
IssueActivity( IssueActivity(
@ -1104,7 +1080,7 @@ def create_issue_relation_activity(
field=relation_type, field=relation_type,
project=project, project=project,
workspace=project.workspace, workspace=project.workspace,
comment=f'added {relation_type} relation', comment=f"added {relation_type} relation",
old_identifier=issue_relation.get("issue"), old_identifier=issue_relation.get("issue"),
) )
) )
@ -1149,7 +1125,7 @@ def delete_issue_relation_activity(
field=relation_type, field=relation_type,
project=project, project=project,
workspace=project.workspace, workspace=project.workspace,
comment=f'deleted {relation_type} relation', comment=f"deleted {relation_type} relation",
old_identifier=current_instance.get("issue"), old_identifier=current_instance.get("issue"),
epoch=epoch, epoch=epoch,
) )
@ -1196,7 +1172,10 @@ def update_draft_issue_activity(
current_instance = ( current_instance = (
json.loads(current_instance) if current_instance is not None else None json.loads(current_instance) if current_instance is not None else None
) )
if requested_data.get("is_draft") is not None and requested_data.get("is_draft") == False: if (
requested_data.get("is_draft") is not None
and requested_data.get("is_draft") == False
):
issue_activities.append( issue_activities.append(
IssueActivity( IssueActivity(
issue_id=issue_id, issue_id=issue_id,
@ -1223,7 +1202,6 @@ def update_draft_issue_activity(
) )
def delete_draft_issue_activity( def delete_draft_issue_activity(
requested_data, current_instance, issue_id, project, actor, issue_activities, epoch requested_data, current_instance, issue_id, project, actor, issue_activities, epoch
): ):
@ -1239,6 +1217,7 @@ def delete_draft_issue_activity(
) )
) )
# Receive message from room group # Receive message from room group
@shared_task @shared_task
def issue_activity( def issue_activity(
@ -1252,6 +1231,7 @@ def issue_activity(
subscriber=True, subscriber=True,
): ):
try: try:
issue_activities = [] issue_activities = []
actor = User.objects.get(pk=actor_id) actor = User.objects.get(pk=actor_id)

View File

@ -12,34 +12,47 @@ from django.db.models.functions import Coalesce, ExtractMonth, ExtractYear, Conc
from plane.db.models import Issue from plane.db.models import Issue
def build_graph_plot(queryset, x_axis, y_axis, segment=None): def annotate_with_monthly_dimension(queryset, field_name):
# Get the year and the months
temp_axis = x_axis year = ExtractYear(field_name)
month = ExtractMonth(field_name)
if x_axis in ["created_at", "start_date", "target_date", "completed_at"]: # Concat the year and month
year = ExtractYear(x_axis)
month = ExtractMonth(x_axis)
dimension = Concat(year, Value("-"), month, output_field=CharField()) dimension = Concat(year, Value("-"), month, output_field=CharField())
queryset = queryset.annotate(dimension=dimension) # Annotate the dimension
x_axis = "dimension" return queryset.annotate(dimension=dimension)
def extract_axis(queryset, x_axis):
# Format the dimension when the axis is in date
if x_axis in ["created_at", "start_date", "target_date", "completed_at"]:
queryset = annotate_with_monthly_dimension(queryset, x_axis)
return queryset, "dimension"
else: else:
queryset = queryset.annotate(dimension=F(x_axis)) return queryset.annotate(dimension=F(x_axis)), "dimension"
x_axis = "dimension"
if x_axis in ["created_at", "start_date", "target_date", "completed_at"]: def sort_data(data, temp_axis):
queryset = queryset.exclude(x_axis__is_null=True) # When the axis is in priority order by
if temp_axis == "priority":
order = ["low", "medium", "high", "urgent", "none"]
return {key: data[key] for key in order if key in data}
else:
return dict(sorted(data.items(), key=lambda x: (x[0] == "none", x[0])))
def build_graph_plot(queryset, x_axis, y_axis, segment=None):
# temp x_axis
temp_axis = x_axis
# Extract the x_axis and queryset
queryset, x_axis = extract_axis(queryset, x_axis)
if x_axis == "dimension":
queryset = queryset.exclude(dimension__isnull=True)
#
if segment in ["created_at", "start_date", "target_date", "completed_at"]: if segment in ["created_at", "start_date", "target_date", "completed_at"]:
year = ExtractYear(segment) queryset = annotate_with_monthly_dimension(queryset, segment)
month = ExtractMonth(segment)
dimension = Concat(year, Value("-"), month, output_field=CharField())
queryset = queryset.annotate(segmented=dimension)
segment = "segmented" segment = "segmented"
queryset = queryset.values(x_axis) queryset = queryset.values(x_axis)
# Group queryset by x_axis field # Issue count
if y_axis == "issue_count": if y_axis == "issue_count":
queryset = queryset.annotate( queryset = queryset.annotate(
is_null=Case( is_null=Case(
@ -49,37 +62,20 @@ def build_graph_plot(queryset, x_axis, y_axis, segment=None):
), ),
dimension_ex=Coalesce("dimension", Value("null")), dimension_ex=Coalesce("dimension", Value("null")),
).values("dimension") ).values("dimension")
if segment: queryset = queryset.annotate(segment=F(segment)) if segment else queryset
queryset = queryset.annotate(segment=F(segment)).values( queryset = queryset.values("dimension", "segment") if segment else queryset.values("dimension")
"dimension", "segment"
)
else:
queryset = queryset.values("dimension")
queryset = queryset.annotate(count=Count("*")).order_by("dimension") queryset = queryset.annotate(count=Count("*")).order_by("dimension")
if y_axis == "estimate": # Estimate
queryset = queryset.annotate(estimate=Sum("estimate_point")).order_by(x_axis)
if segment:
queryset = queryset.annotate(segment=F(segment)).values(
"dimension", "segment", "estimate"
)
else: else:
queryset = queryset.values("dimension", "estimate") queryset = queryset.annotate(estimate=Sum("estimate_point")).order_by(x_axis)
queryset = queryset.annotate(segment=F(segment)) if segment else queryset
queryset = queryset.values("dimension", "segment", "estimate") if segment else queryset.values("dimension", "estimate")
result_values = list(queryset) result_values = list(queryset)
grouped_data = {} grouped_data = {str(key): list(items) for key, items in groupby(result_values, key=lambda x: x[str("dimension")])}
for key, items in groupby(result_values, key=lambda x: x[str("dimension")]):
grouped_data[str(key)] = list(items)
sorted_data = grouped_data
if temp_axis == "priority":
order = ["low", "medium", "high", "urgent", "none"]
sorted_data = {key: grouped_data[key] for key in order if key in grouped_data}
else:
sorted_data = dict(sorted(grouped_data.items(), key=lambda x: (x[0] == "none", x[0])))
return sorted_data
return sort_data(grouped_data, temp_axis)
def burndown_plot(queryset, slug, project_id, cycle_id=None, module_id=None): def burndown_plot(queryset, slug, project_id, cycle_id=None, module_id=None):
# Total Issues in Cycle or Module # Total Issues in Cycle or Module

View File

@ -1 +1 @@
python-3.11.5 python-3.11.6