diff --git a/.env.example b/.env.example index 082aa753b..b98adf171 100644 --- a/.env.example +++ b/.env.example @@ -33,3 +33,8 @@ USE_MINIO=1 # Nginx Configuration NGINX_PORT=80 +# Set it to 0, to disable it +ENABLE_WEBHOOK=1 + +# Set it to 0, to disable it +ENABLE_API=1 \ No newline at end of file diff --git a/apiserver/.env.example b/apiserver/.env.example index 128f5134f..5b7d1c020 100644 --- a/apiserver/.env.example +++ b/apiserver/.env.example @@ -1,7 +1,7 @@ # Backend # Debug value for api server use it as 0 for production use DEBUG=0 -DJANGO_SETTINGS_MODULE="plane.settings.production" +CORS_ALLOWED_ORIGINS="http://localhost" # Error logs SENTRY_DSN="" @@ -58,6 +58,12 @@ ENABLE_MAGIC_LINK_LOGIN="0" # Email redirections and minio domain settings WEB_URL="http://localhost" +# Set it to 0, to disable it +ENABLE_WEBHOOK=1 + +# Set it to 0, to disable it +ENABLE_API=1 # Gunicorn Workers GUNICORN_WORKERS=2 + diff --git a/apiserver/bin/bucket_script.py b/apiserver/bin/bucket_script.py index cb2d05540..89717d527 100644 --- a/apiserver/bin/bucket_script.py +++ b/apiserver/bin/bucket_script.py @@ -1,5 +1,6 @@ import os, sys import boto3 +import json from botocore.exceptions import ClientError @@ -10,6 +11,28 @@ import django django.setup() +def set_bucket_public_policy(s3_client, bucket_name): + public_policy = { + "Version": "2012-10-17", + "Statement": [{ + "Effect": "Allow", + "Principal": "*", + "Action": ["s3:GetObject"], + "Resource": [f"arn:aws:s3:::{bucket_name}/*"] + }] + } + + try: + s3_client.put_bucket_policy( + Bucket=bucket_name, + Policy=json.dumps(public_policy) + ) + print(f"Public read access policy set for bucket '{bucket_name}'.") + except ClientError as e: + print(f"Error setting public read access policy: {e}") + + + def create_bucket(): try: from django.conf import settings @@ -31,6 +54,8 @@ def create_bucket(): # If head_bucket does not raise an exception, the bucket exists print(f"Bucket '{bucket_name}' already exists.") + + set_bucket_public_policy(s3_client, bucket_name) except ClientError as e: error_code = int(e.response['Error']['Code']) @@ -41,6 +66,7 @@ def create_bucket(): try: s3_client.create_bucket(Bucket=bucket_name) print(f"Bucket '{bucket_name}' created successfully.") + set_bucket_public_policy(s3_client, bucket_name) except ClientError as create_error: print(f"Failed to create bucket: {create_error}") elif error_code == 403: diff --git a/apiserver/plane/api/permissions/__init__.py b/apiserver/plane/api/permissions/__init__.py index 8b15a9373..2298f3442 100644 --- a/apiserver/plane/api/permissions/__init__.py +++ b/apiserver/plane/api/permissions/__init__.py @@ -1,2 +1,17 @@ -from .workspace import WorkSpaceBasePermission, WorkSpaceAdminPermission, WorkspaceEntityPermission, WorkspaceViewerPermission -from .project import ProjectBasePermission, ProjectEntityPermission, ProjectMemberPermission, ProjectLitePermission + +from .workspace import ( + WorkSpaceBasePermission, + WorkspaceOwnerPermission, + WorkSpaceAdminPermission, + WorkspaceEntityPermission, + WorkspaceViewerPermission, + WorkspaceUserPermission, +) +from .project import ( + ProjectBasePermission, + ProjectEntityPermission, + ProjectMemberPermission, + ProjectLitePermission, +) + + diff --git a/apiserver/plane/api/permissions/project.py b/apiserver/plane/api/permissions/project.py index 4f907dbd6..80775cbf6 100644 --- a/apiserver/plane/api/permissions/project.py +++ b/apiserver/plane/api/permissions/project.py @@ -13,14 +13,15 @@ Guest = 5 class ProjectBasePermission(BasePermission): def has_permission(self, request, view): - if request.user.is_anonymous: return False ## Safe Methods -> Handle the filtering logic in queryset if request.method in SAFE_METHODS: return WorkspaceMember.objects.filter( - workspace__slug=view.workspace_slug, member=request.user + workspace__slug=view.workspace_slug, + member=request.user, + is_active=True, ).exists() ## Only workspace owners or admins can create the projects @@ -29,6 +30,7 @@ class ProjectBasePermission(BasePermission): workspace__slug=view.workspace_slug, member=request.user, role__in=[Admin, Member], + is_active=True, ).exists() ## Only Project Admins can update project attributes @@ -37,19 +39,21 @@ class ProjectBasePermission(BasePermission): member=request.user, role=Admin, project_id=view.project_id, + is_active=True, ).exists() class ProjectMemberPermission(BasePermission): def has_permission(self, request, view): - if request.user.is_anonymous: return False ## Safe Methods -> Handle the filtering logic in queryset if request.method in SAFE_METHODS: return ProjectMember.objects.filter( - workspace__slug=view.workspace_slug, member=request.user + workspace__slug=view.workspace_slug, + member=request.user, + is_active=True, ).exists() ## Only workspace owners or admins can create the projects if request.method == "POST": @@ -57,6 +61,7 @@ class ProjectMemberPermission(BasePermission): workspace__slug=view.workspace_slug, member=request.user, role__in=[Admin, Member], + is_active=True, ).exists() ## Only Project Admins can update project attributes @@ -65,12 +70,12 @@ class ProjectMemberPermission(BasePermission): member=request.user, role__in=[Admin, Member], project_id=view.project_id, + is_active=True, ).exists() class ProjectEntityPermission(BasePermission): def has_permission(self, request, view): - if request.user.is_anonymous: return False @@ -80,6 +85,7 @@ class ProjectEntityPermission(BasePermission): workspace__slug=view.workspace_slug, member=request.user, project_id=view.project_id, + is_active=True, ).exists() ## Only project members or admins can create and edit the project attributes @@ -88,17 +94,18 @@ class ProjectEntityPermission(BasePermission): member=request.user, role__in=[Admin, Member], project_id=view.project_id, + is_active=True, ).exists() class ProjectLitePermission(BasePermission): - def has_permission(self, request, view): if request.user.is_anonymous: return False - + return ProjectMember.objects.filter( workspace__slug=view.workspace_slug, member=request.user, project_id=view.project_id, + is_active=True, ).exists() diff --git a/apiserver/plane/api/permissions/workspace.py b/apiserver/plane/api/permissions/workspace.py index 66e836614..33bcab31c 100644 --- a/apiserver/plane/api/permissions/workspace.py +++ b/apiserver/plane/api/permissions/workspace.py @@ -32,15 +32,31 @@ class WorkSpaceBasePermission(BasePermission): member=request.user, workspace__slug=view.workspace_slug, role__in=[Owner, Admin], + is_active=True, ).exists() # allow only owner to delete the workspace if request.method == "DELETE": return WorkspaceMember.objects.filter( - member=request.user, workspace__slug=view.workspace_slug, role=Owner + member=request.user, + workspace__slug=view.workspace_slug, + role=Owner, + is_active=True, ).exists() +class WorkspaceOwnerPermission(BasePermission): + def has_permission(self, request, view): + if request.user.is_anonymous: + return False + + return WorkspaceMember.objects.filter( + workspace__slug=view.workspace_slug, + member=request.user, + role=Owner, + ).exists() + + class WorkSpaceAdminPermission(BasePermission): def has_permission(self, request, view): if request.user.is_anonymous: @@ -50,6 +66,7 @@ class WorkSpaceAdminPermission(BasePermission): member=request.user, workspace__slug=view.workspace_slug, role__in=[Owner, Admin], + is_active=True, ).exists() @@ -63,12 +80,14 @@ class WorkspaceEntityPermission(BasePermission): return WorkspaceMember.objects.filter( workspace__slug=view.workspace_slug, member=request.user, + is_active=True, ).exists() return WorkspaceMember.objects.filter( member=request.user, workspace__slug=view.workspace_slug, role__in=[Owner, Admin], + is_active=True, ).exists() @@ -78,5 +97,20 @@ class WorkspaceViewerPermission(BasePermission): return False return WorkspaceMember.objects.filter( - member=request.user, workspace__slug=view.workspace_slug, role__gte=10 + member=request.user, + workspace__slug=view.workspace_slug, + role__gte=10, + is_active=True, + ).exists() + + +class WorkspaceUserPermission(BasePermission): + def has_permission(self, request, view): + if request.user.is_anonymous: + return False + + return WorkspaceMember.objects.filter( + member=request.user, + workspace__slug=view.workspace_slug, + is_active=True, ).exists() diff --git a/apiserver/plane/api/serializers/__init__.py b/apiserver/plane/api/serializers/__init__.py index f1a7de3b8..901f0bc01 100644 --- a/apiserver/plane/api/serializers/__init__.py +++ b/apiserver/plane/api/serializers/__init__.py @@ -71,7 +71,7 @@ from .module import ( ModuleFavoriteSerializer, ) -from .api_token import APITokenSerializer +from .api import APITokenSerializer, APITokenReadSerializer from .integration import ( IntegrationSerializer, @@ -100,3 +100,5 @@ from .analytic import AnalyticViewSerializer from .notification import NotificationSerializer from .exporter import ExporterHistorySerializer + +from .webhook import WebhookSerializer, WebhookLogSerializer \ No newline at end of file diff --git a/apiserver/plane/api/serializers/api.py b/apiserver/plane/api/serializers/api.py new file mode 100644 index 000000000..08bb747d9 --- /dev/null +++ b/apiserver/plane/api/serializers/api.py @@ -0,0 +1,31 @@ +from .base import BaseSerializer +from plane.db.models import APIToken, APIActivityLog + + +class APITokenSerializer(BaseSerializer): + + class Meta: + model = APIToken + fields = "__all__" + read_only_fields = [ + "token", + "expired_at", + "created_at", + "updated_at", + "workspace", + "user", + ] + + +class APITokenReadSerializer(BaseSerializer): + + class Meta: + model = APIToken + exclude = ('token',) + + +class APIActivityLogSerializer(BaseSerializer): + + class Meta: + model = APIActivityLog + fields = "__all__" diff --git a/apiserver/plane/api/serializers/api_token.py b/apiserver/plane/api/serializers/api_token.py deleted file mode 100644 index 9c363f895..000000000 --- a/apiserver/plane/api/serializers/api_token.py +++ /dev/null @@ -1,14 +0,0 @@ -from .base import BaseSerializer -from plane.db.models import APIToken - - -class APITokenSerializer(BaseSerializer): - class Meta: - model = APIToken - fields = [ - "label", - "user", - "user_type", - "workspace", - "created_at", - ] diff --git a/apiserver/plane/api/serializers/project.py b/apiserver/plane/api/serializers/project.py index 36fa6ecca..9ecae555c 100644 --- a/apiserver/plane/api/serializers/project.py +++ b/apiserver/plane/api/serializers/project.py @@ -103,13 +103,16 @@ class ProjectListSerializer(DynamicBaseSerializer): members = serializers.SerializerMethodField() def get_members(self, obj): - project_members = ProjectMember.objects.filter(project_id=obj.id).values( + project_members = ProjectMember.objects.filter( + project_id=obj.id, + is_active=True, + ).values( "id", "member_id", "member__display_name", "member__avatar", ) - return project_members + return list(project_members) class Meta: model = Project diff --git a/apiserver/plane/api/serializers/webhook.py b/apiserver/plane/api/serializers/webhook.py new file mode 100644 index 000000000..351b6fe7d --- /dev/null +++ b/apiserver/plane/api/serializers/webhook.py @@ -0,0 +1,30 @@ +# Third party imports +from rest_framework import serializers + +# Module imports +from .base import DynamicBaseSerializer +from plane.db.models import Webhook, WebhookLog +from plane.db.models.webhook import validate_domain, validate_schema + +class WebhookSerializer(DynamicBaseSerializer): + url = serializers.URLField(validators=[validate_schema, validate_domain]) + + class Meta: + model = Webhook + fields = "__all__" + read_only_fields = [ + "workspace", + "secret_key", + ] + + +class WebhookLogSerializer(DynamicBaseSerializer): + + class Meta: + model = WebhookLog + fields = "__all__" + read_only_fields = [ + "workspace", + "webhook" + ] + diff --git a/apiserver/plane/api/urls/__init__.py b/apiserver/plane/api/urls/__init__.py index 957dac24e..1e3c1cbca 100644 --- a/apiserver/plane/api/urls/__init__.py +++ b/apiserver/plane/api/urls/__init__.py @@ -19,6 +19,12 @@ from .state import urlpatterns as state_urls from .user import urlpatterns as user_urls from .views import urlpatterns as view_urls from .workspace import urlpatterns as workspace_urls +from .api import urlpatterns as api_urls +from .webhook import urlpatterns as webhook_urls + + +# Django imports +from django.conf import settings urlpatterns = [ @@ -44,3 +50,9 @@ urlpatterns = [ *view_urls, *workspace_urls, ] + +if settings.ENABLE_WEBHOOK: + urlpatterns += webhook_urls + +if settings.ENABLE_API: + urlpatterns += api_urls diff --git a/apiserver/plane/api/urls/api.py b/apiserver/plane/api/urls/api.py new file mode 100644 index 000000000..1a2862045 --- /dev/null +++ b/apiserver/plane/api/urls/api.py @@ -0,0 +1,17 @@ +from django.urls import path +from plane.api.views import ApiTokenEndpoint + +urlpatterns = [ + # API Tokens + path( + "workspaces//api-tokens/", + ApiTokenEndpoint.as_view(), + name="api-tokens", + ), + path( + "workspaces//api-tokens//", + ApiTokenEndpoint.as_view(), + name="api-tokens", + ), + ## End API Tokens +] diff --git a/apiserver/plane/api/urls/project.py b/apiserver/plane/api/urls/project.py index 2d9e513df..83bb765e6 100644 --- a/apiserver/plane/api/urls/project.py +++ b/apiserver/plane/api/urls/project.py @@ -2,17 +2,16 @@ from django.urls import path from plane.api.views import ( ProjectViewSet, - InviteProjectEndpoint, + ProjectInvitationsViewset, ProjectMemberViewSet, - ProjectMemberInvitationsViewset, ProjectMemberUserEndpoint, ProjectJoinEndpoint, AddTeamToProjectEndpoint, ProjectUserViewsEndpoint, ProjectIdentifierEndpoint, ProjectFavoritesViewSet, - LeaveProjectEndpoint, ProjectPublicCoverImagesEndpoint, + UserProjectInvitationsViewset, ) @@ -45,13 +44,48 @@ urlpatterns = [ name="project-identifiers", ), path( - "workspaces//projects//invite/", - InviteProjectEndpoint.as_view(), - name="invite-project", + "workspaces//projects//invitations/", + ProjectInvitationsViewset.as_view( + { + "get": "list", + "post": "create", + }, + ), + name="project-member-invite", + ), + path( + "workspaces//projects//invitations//", + ProjectInvitationsViewset.as_view( + { + "get": "retrieve", + "delete": "destroy", + } + ), + name="project-member-invite", + ), + path( + "users/me/invitations/projects/", + UserProjectInvitationsViewset.as_view( + { + "get": "list", + "post": "create", + }, + ), + name="user-project-invitations", + ), + path( + "workspaces//projects/join/", + ProjectJoinEndpoint.as_view(), + name="project-join", ), path( "workspaces//projects//members/", - ProjectMemberViewSet.as_view({"get": "list", "post": "create"}), + ProjectMemberViewSet.as_view( + { + "get": "list", + "post": "create", + } + ), name="project-member", ), path( @@ -66,30 +100,19 @@ urlpatterns = [ name="project-member", ), path( - "workspaces//projects/join/", - ProjectJoinEndpoint.as_view(), - name="project-join", + "workspaces//projects//members/leave/", + ProjectMemberViewSet.as_view( + { + "post": "leave", + } + ), + name="project-member", ), path( "workspaces//projects//team-invite/", AddTeamToProjectEndpoint.as_view(), name="projects", ), - path( - "workspaces//projects//invitations/", - ProjectMemberInvitationsViewset.as_view({"get": "list"}), - name="project-member-invite", - ), - path( - "workspaces//projects//invitations//", - ProjectMemberInvitationsViewset.as_view( - { - "get": "retrieve", - "delete": "destroy", - } - ), - name="project-member-invite", - ), path( "workspaces//projects//project-views/", ProjectUserViewsEndpoint.as_view(), @@ -119,11 +142,6 @@ urlpatterns = [ ), name="project-favorite", ), - path( - "workspaces//projects//members/leave/", - LeaveProjectEndpoint.as_view(), - name="leave-project", - ), path( "project-covers/", ProjectPublicCoverImagesEndpoint.as_view(), diff --git a/apiserver/plane/api/urls/user.py b/apiserver/plane/api/urls/user.py index 5282a7cf6..00f95cd42 100644 --- a/apiserver/plane/api/urls/user.py +++ b/apiserver/plane/api/urls/user.py @@ -9,15 +9,10 @@ from plane.api.views import ( ChangePasswordEndpoint, ## End User ## Workspaces - UserWorkspaceInvitationsEndpoint, UserWorkSpacesEndpoint, - JoinWorkspaceEndpoint, - UserWorkspaceInvitationsEndpoint, - UserWorkspaceInvitationEndpoint, UserActivityGraphEndpoint, UserIssueCompletedGraphEndpoint, UserWorkspaceDashboardEndpoint, - UserProjectInvitationsViewset, ## End Workspaces ) @@ -26,7 +21,11 @@ urlpatterns = [ path( "users/me/", UserEndpoint.as_view( - {"get": "retrieve", "patch": "partial_update", "delete": "destroy"} + { + "get": "retrieve", + "patch": "partial_update", + "delete": "deactivate", + } ), name="users", ), @@ -65,23 +64,6 @@ urlpatterns = [ UserWorkSpacesEndpoint.as_view(), name="user-workspace", ), - # user workspace invitations - path( - "users/me/invitations/workspaces/", - UserWorkspaceInvitationsEndpoint.as_view({"get": "list", "post": "create"}), - name="user-workspace-invitations", - ), - # user workspace invitation - path( - "users/me/invitations//", - UserWorkspaceInvitationEndpoint.as_view( - { - "get": "retrieve", - } - ), - name="user-workspace-invitation", - ), - # user join workspace # User Graphs path( "users/me/workspaces//activity-graph/", @@ -99,15 +81,4 @@ urlpatterns = [ name="user-workspace-dashboard", ), ## End User Graph - path( - "users/me/invitations/workspaces///join/", - JoinWorkspaceEndpoint.as_view(), - name="user-join-workspace", - ), - # user project invitations - path( - "users/me/invitations/projects/", - UserProjectInvitationsViewset.as_view({"get": "list", "post": "create"}), - name="user-project-invitations", - ), ] diff --git a/apiserver/plane/api/urls/webhook.py b/apiserver/plane/api/urls/webhook.py new file mode 100644 index 000000000..74a8da759 --- /dev/null +++ b/apiserver/plane/api/urls/webhook.py @@ -0,0 +1,31 @@ +from django.urls import path + +from plane.api.views import ( + WebhookEndpoint, + WebhookLogsEndpoint, + WebhookSecretRegenerateEndpoint, +) + + +urlpatterns = [ + path( + "workspaces//webhooks/", + WebhookEndpoint.as_view(), + name="webhooks", + ), + path( + "workspaces//webhooks//", + WebhookEndpoint.as_view(), + name="webhooks", + ), + path( + "workspaces//webhooks//regenerate/", + WebhookSecretRegenerateEndpoint.as_view(), + name="webhooks", + ), + path( + "workspaces//webhook-logs//", + WebhookLogsEndpoint.as_view(), + name="webhooks", + ), +] diff --git a/apiserver/plane/api/urls/workspace.py b/apiserver/plane/api/urls/workspace.py index f26730833..64e558f10 100644 --- a/apiserver/plane/api/urls/workspace.py +++ b/apiserver/plane/api/urls/workspace.py @@ -2,8 +2,9 @@ from django.urls import path from plane.api.views import ( + UserWorkspaceInvitationsViewSet, WorkSpaceViewSet, - InviteWorkspaceEndpoint, + WorkspaceJoinEndpoint, WorkSpaceMemberViewSet, WorkspaceInvitationsViewset, WorkspaceMemberUserEndpoint, @@ -17,7 +18,6 @@ from plane.api.views import ( WorkspaceUserProfileEndpoint, WorkspaceUserProfileIssuesEndpoint, WorkspaceLabelsEndpoint, - LeaveWorkspaceEndpoint, ) @@ -49,14 +49,14 @@ urlpatterns = [ ), name="workspace", ), - path( - "workspaces//invite/", - InviteWorkspaceEndpoint.as_view(), - name="invite-workspace", - ), path( "workspaces//invitations/", - WorkspaceInvitationsViewset.as_view({"get": "list"}), + WorkspaceInvitationsViewset.as_view( + { + "get": "list", + "post": "create", + }, + ), name="workspace-invitations", ), path( @@ -69,6 +69,23 @@ urlpatterns = [ ), name="workspace-invitations", ), + # user workspace invitations + path( + "users/me/workspaces/invitations/", + UserWorkspaceInvitationsViewSet.as_view( + { + "get": "list", + "post": "create", + }, + ), + name="user-workspace-invitations", + ), + path( + "workspaces//invitations//join/", + WorkspaceJoinEndpoint.as_view(), + name="workspace-join", + ), + # user join workspace path( "workspaces//members/", WorkSpaceMemberViewSet.as_view({"get": "list"}), @@ -85,6 +102,15 @@ urlpatterns = [ ), name="workspace-member", ), + path( + "workspaces//members/leave/", + WorkSpaceMemberViewSet.as_view( + { + "post": "leave", + }, + ), + name="leave-workspace-members", + ), path( "workspaces//teams/", TeamMemberViewSet.as_view( @@ -168,9 +194,4 @@ urlpatterns = [ WorkspaceLabelsEndpoint.as_view(), name="workspace-labels", ), - path( - "workspaces//members/leave/", - LeaveWorkspaceEndpoint.as_view(), - name="leave-workspace-members", - ), ] diff --git a/apiserver/plane/api/views/__init__.py b/apiserver/plane/api/views/__init__.py index ca66ce48e..787dfb3e2 100644 --- a/apiserver/plane/api/views/__init__.py +++ b/apiserver/plane/api/views/__init__.py @@ -2,10 +2,8 @@ from .project import ( ProjectViewSet, ProjectMemberViewSet, UserProjectInvitationsViewset, - InviteProjectEndpoint, + ProjectInvitationsViewset, AddTeamToProjectEndpoint, - ProjectMemberInvitationsViewset, - ProjectMemberInviteDetailViewSet, ProjectIdentifierEndpoint, ProjectJoinEndpoint, ProjectUserViewsEndpoint, @@ -14,7 +12,6 @@ from .project import ( ProjectDeployBoardViewSet, ProjectDeployBoardPublicSettingsEndpoint, WorkspaceProjectDeployBoardEndpoint, - LeaveProjectEndpoint, ProjectPublicCoverImagesEndpoint, ) from .user import ( @@ -26,19 +23,17 @@ from .user import ( from .oauth import OauthEndpoint -from .base import BaseAPIView, BaseViewSet +from .base import BaseAPIView, BaseViewSet, WebhookMixin from .workspace import ( WorkSpaceViewSet, UserWorkSpacesEndpoint, WorkSpaceAvailabilityCheckEndpoint, - InviteWorkspaceEndpoint, - JoinWorkspaceEndpoint, + WorkspaceJoinEndpoint, WorkSpaceMemberViewSet, TeamMemberViewSet, WorkspaceInvitationsViewset, - UserWorkspaceInvitationsEndpoint, - UserWorkspaceInvitationEndpoint, + UserWorkspaceInvitationsViewSet, UserLastProjectWithWorkspaceEndpoint, WorkspaceMemberUserEndpoint, WorkspaceMemberUserViewsEndpoint, @@ -51,7 +46,6 @@ from .workspace import ( WorkspaceUserProfileEndpoint, WorkspaceUserProfileIssuesEndpoint, WorkspaceLabelsEndpoint, - LeaveWorkspaceEndpoint, ) from .state import StateViewSet from .view import ( @@ -121,7 +115,7 @@ from .module import ( ModuleFavoriteViewSet, ) -from .api_token import ApiTokenEndpoint +from .api import ApiTokenEndpoint from .integration import ( WorkspaceIntegrationViewSet, @@ -178,3 +172,5 @@ from .notification import ( from .exporter import ExportIssuesEndpoint from .config import ConfigurationEndpoint + +from .webhook import WebhookEndpoint, WebhookLogsEndpoint, WebhookSecretRegenerateEndpoint diff --git a/apiserver/plane/api/views/api.py b/apiserver/plane/api/views/api.py new file mode 100644 index 000000000..59da6d3c4 --- /dev/null +++ b/apiserver/plane/api/views/api.py @@ -0,0 +1,78 @@ +# Python import +from uuid import uuid4 + +# Third party +from rest_framework.response import Response +from rest_framework import status + +# Module import +from .base import BaseAPIView +from plane.db.models import APIToken, Workspace +from plane.api.serializers import APITokenSerializer, APITokenReadSerializer +from plane.api.permissions import WorkspaceOwnerPermission + + +class ApiTokenEndpoint(BaseAPIView): + permission_classes = [ + WorkspaceOwnerPermission, + ] + + def post(self, request, slug): + label = request.data.get("label", str(uuid4().hex)) + description = request.data.get("description", "") + workspace = Workspace.objects.get(slug=slug) + expired_at = request.data.get("expired_at", None) + + # Check the user type + user_type = 1 if request.user.is_bot else 0 + + api_token = APIToken.objects.create( + label=label, + description=description, + user=request.user, + workspace=workspace, + user_type=user_type, + expired_at=expired_at, + ) + + serializer = APITokenSerializer(api_token) + # Token will be only visible while creating + return Response( + serializer.data, + status=status.HTTP_201_CREATED, + ) + + def get(self, request, slug, pk=None): + if pk == None: + api_tokens = APIToken.objects.filter( + user=request.user, workspace__slug=slug + ) + serializer = APITokenReadSerializer(api_tokens, many=True) + return Response(serializer.data, status=status.HTTP_200_OK) + else: + api_tokens = APIToken.objects.get( + user=request.user, workspace__slug=slug, pk=pk + ) + serializer = APITokenReadSerializer(api_tokens) + return Response(serializer.data, status=status.HTTP_200_OK) + + def delete(self, request, slug, pk): + api_token = APIToken.objects.get( + workspace__slug=slug, + user=request.user, + pk=pk, + ) + api_token.delete() + return Response(status=status.HTTP_204_NO_CONTENT) + + def patch(self, request, slug, pk): + api_token = APIToken.objects.get( + workspace__slug=slug, + user=request.user, + pk=pk, + ) + serializer = APITokenSerializer(api_token, data=request.data, partial=True) + if serializer.is_valid(): + serializer.save() + return Response(serializer.data, status=status.HTTP_200_OK) + return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) diff --git a/apiserver/plane/api/views/api_token.py b/apiserver/plane/api/views/api_token.py deleted file mode 100644 index 2253903a9..000000000 --- a/apiserver/plane/api/views/api_token.py +++ /dev/null @@ -1,47 +0,0 @@ -# Python import -from uuid import uuid4 - -# Third party -from rest_framework.response import Response -from rest_framework import status -from sentry_sdk import capture_exception - -# Module import -from .base import BaseAPIView -from plane.db.models import APIToken -from plane.api.serializers import APITokenSerializer - - -class ApiTokenEndpoint(BaseAPIView): - def post(self, request): - label = request.data.get("label", str(uuid4().hex)) - workspace = request.data.get("workspace", False) - - if not workspace: - return Response( - {"error": "Workspace is required"}, status=status.HTTP_200_OK - ) - - api_token = APIToken.objects.create( - label=label, user=request.user, workspace_id=workspace - ) - - serializer = APITokenSerializer(api_token) - # Token will be only vissible while creating - return Response( - {"api_token": serializer.data, "token": api_token.token}, - status=status.HTTP_201_CREATED, - ) - - - def get(self, request): - api_tokens = APIToken.objects.filter(user=request.user) - serializer = APITokenSerializer(api_tokens, many=True) - return Response(serializer.data, status=status.HTTP_200_OK) - - - def delete(self, request, pk): - api_token = APIToken.objects.get(pk=pk) - api_token.delete() - return Response(status=status.HTTP_204_NO_CONTENT) - diff --git a/apiserver/plane/api/views/auth_extended.py b/apiserver/plane/api/views/auth_extended.py index fbffacff8..e2ec9d5b6 100644 --- a/apiserver/plane/api/views/auth_extended.py +++ b/apiserver/plane/api/views/auth_extended.py @@ -33,7 +33,7 @@ from plane.bgtasks.forgot_password_task import forgot_password class RequestEmailVerificationEndpoint(BaseAPIView): def get(self, request): token = RefreshToken.for_user(request.user).access_token - current_site = settings.WEB_URL + current_site = request.META.get('HTTP_ORIGIN') email_verification.delay( request.user.first_name, request.user.email, token, current_site ) @@ -76,7 +76,7 @@ class ForgotPasswordEndpoint(BaseAPIView): uidb64 = urlsafe_base64_encode(smart_bytes(user.id)) token = PasswordResetTokenGenerator().make_token(user) - current_site = settings.WEB_URL + current_site = request.META.get('HTTP_ORIGIN') forgot_password.delay( user.first_name, user.email, uidb64, token, current_site diff --git a/apiserver/plane/api/views/authentication.py b/apiserver/plane/api/views/authentication.py index eadfeef61..fe7b4c473 100644 --- a/apiserver/plane/api/views/authentication.py +++ b/apiserver/plane/api/views/authentication.py @@ -4,7 +4,7 @@ import random import string import json import requests - +from requests.exceptions import RequestException # Django imports from django.utils import timezone from django.core.exceptions import ValidationError @@ -22,8 +22,13 @@ from sentry_sdk import capture_exception, capture_message # Module imports from . import BaseAPIView -from plane.db.models import User -from plane.api.serializers import UserSerializer +from plane.db.models import ( + User, + WorkspaceMemberInvite, + WorkspaceMember, + ProjectMemberInvite, + ProjectMember, +) from plane.settings.redis import redis_instance from plane.bgtasks.magic_link_code_task import magic_link @@ -86,35 +91,93 @@ class SignUpEndpoint(BaseAPIView): user.token_updated_at = timezone.now() user.save() + # Check if user has any accepted invites for workspace and add them to workspace + workspace_member_invites = WorkspaceMemberInvite.objects.filter( + email=user.email, accepted=True + ) + + WorkspaceMember.objects.bulk_create( + [ + WorkspaceMember( + workspace_id=workspace_member_invite.workspace_id, + member=user, + role=workspace_member_invite.role, + ) + for workspace_member_invite in workspace_member_invites + ], + ignore_conflicts=True, + ) + + # Check if user has any project invites + project_member_invites = ProjectMemberInvite.objects.filter( + email=user.email, accepted=True + ) + + # Add user to workspace + WorkspaceMember.objects.bulk_create( + [ + WorkspaceMember( + workspace_id=project_member_invite.workspace_id, + role=project_member_invite.role + if project_member_invite.role in [5, 10, 15] + else 15, + member=user, + created_by_id=project_member_invite.created_by_id, + ) + for project_member_invite in project_member_invites + ], + ignore_conflicts=True, + ) + + # Now add the users to project + ProjectMember.objects.bulk_create( + [ + ProjectMember( + workspace_id=project_member_invite.workspace_id, + role=project_member_invite.role + if project_member_invite.role in [5, 10, 15] + else 15, + member=user, + created_by_id=project_member_invite.created_by_id, + ) for project_member_invite in project_member_invites + ], + ignore_conflicts=True, + ) + # Delete all the invites + workspace_member_invites.delete() + project_member_invites.delete() + + try: + # Send Analytics + if settings.ANALYTICS_BASE_API: + _ = requests.post( + settings.ANALYTICS_BASE_API, + headers={ + "Content-Type": "application/json", + "X-Auth-Token": settings.ANALYTICS_SECRET_KEY, + }, + json={ + "event_id": uuid.uuid4().hex, + "event_data": { + "medium": "email", + }, + "user": {"email": email, "id": str(user.id)}, + "device_ctx": { + "ip": request.META.get("REMOTE_ADDR"), + "user_agent": request.META.get("HTTP_USER_AGENT"), + }, + "event_type": "SIGN_UP", + }, + ) + except RequestException as e: + capture_exception(e) + access_token, refresh_token = get_tokens_for_user(user) data = { "access_token": access_token, "refresh_token": refresh_token, } - - # Send Analytics - if settings.ANALYTICS_BASE_API: - _ = requests.post( - settings.ANALYTICS_BASE_API, - headers={ - "Content-Type": "application/json", - "X-Auth-Token": settings.ANALYTICS_SECRET_KEY, - }, - json={ - "event_id": uuid.uuid4().hex, - "event_data": { - "medium": "email", - }, - "user": {"email": email, "id": str(user.id)}, - "device_ctx": { - "ip": request.META.get("REMOTE_ADDR"), - "user_agent": request.META.get("HTTP_USER_AGENT"), - }, - "event_type": "SIGN_UP", - }, - ) - return Response(data, status=status.HTTP_200_OK) @@ -176,33 +239,92 @@ class SignInEndpoint(BaseAPIView): user.token_updated_at = timezone.now() user.save() - access_token, refresh_token = get_tokens_for_user(user) - # Send Analytics - if settings.ANALYTICS_BASE_API: - _ = requests.post( - settings.ANALYTICS_BASE_API, - headers={ - "Content-Type": "application/json", - "X-Auth-Token": settings.ANALYTICS_SECRET_KEY, - }, - json={ - "event_id": uuid.uuid4().hex, - "event_data": { - "medium": "email", + # Check if user has any accepted invites for workspace and add them to workspace + workspace_member_invites = WorkspaceMemberInvite.objects.filter( + email=user.email, accepted=True + ) + + WorkspaceMember.objects.bulk_create( + [ + WorkspaceMember( + workspace_id=workspace_member_invite.workspace_id, + member=user, + role=workspace_member_invite.role, + ) + for workspace_member_invite in workspace_member_invites + ], + ignore_conflicts=True, + ) + + # Check if user has any project invites + project_member_invites = ProjectMemberInvite.objects.filter( + email=user.email, accepted=True + ) + + # Add user to workspace + WorkspaceMember.objects.bulk_create( + [ + WorkspaceMember( + workspace_id=project_member_invite.workspace_id, + role=project_member_invite.role + if project_member_invite.role in [5, 10, 15] + else 15, + member=user, + created_by_id=project_member_invite.created_by_id, + ) + for project_member_invite in project_member_invites + ], + ignore_conflicts=True, + ) + + # Now add the users to project + ProjectMember.objects.bulk_create( + [ + ProjectMember( + workspace_id=project_member_invite.workspace_id, + role=project_member_invite.role + if project_member_invite.role in [5, 10, 15] + else 15, + member=user, + created_by_id=project_member_invite.created_by_id, + ) for project_member_invite in project_member_invites + ], + ignore_conflicts=True, + ) + + # Delete all the invites + workspace_member_invites.delete() + project_member_invites.delete() + try: + # Send Analytics + if settings.ANALYTICS_BASE_API: + _ = requests.post( + settings.ANALYTICS_BASE_API, + headers={ + "Content-Type": "application/json", + "X-Auth-Token": settings.ANALYTICS_SECRET_KEY, }, - "user": {"email": email, "id": str(user.id)}, - "device_ctx": { - "ip": request.META.get("REMOTE_ADDR"), - "user_agent": request.META.get("HTTP_USER_AGENT"), + json={ + "event_id": uuid.uuid4().hex, + "event_data": { + "medium": "email", + }, + "user": {"email": email, "id": str(user.id)}, + "device_ctx": { + "ip": request.META.get("REMOTE_ADDR"), + "user_agent": request.META.get("HTTP_USER_AGENT"), + }, + "event_type": "SIGN_IN", }, - "event_type": "SIGN_IN", - }, - ) + ) + except RequestException as e: + capture_exception(e) + data = { "access_token": access_token, "refresh_token": refresh_token, } - + access_token, refresh_token = get_tokens_for_user(user) return Response(data, status=status.HTTP_200_OK) @@ -287,7 +409,8 @@ class MagicSignInGenerateEndpoint(BaseAPIView): ri.set(key, json.dumps(value), ex=expiry) - current_site = settings.WEB_URL + + current_site = request.META.get('HTTP_ORIGIN') magic_link.delay(email, key, token, current_site) return Response({"key": key}, status=status.HTTP_200_OK) @@ -319,27 +442,37 @@ class MagicSignInEndpoint(BaseAPIView): if str(token) == str(user_token): if User.objects.filter(email=email).exists(): user = User.objects.get(email=email) - # Send event to Jitsu for tracking - if settings.ANALYTICS_BASE_API: - _ = requests.post( - settings.ANALYTICS_BASE_API, - headers={ - "Content-Type": "application/json", - "X-Auth-Token": settings.ANALYTICS_SECRET_KEY, - }, - json={ - "event_id": uuid.uuid4().hex, - "event_data": { - "medium": "code", - }, - "user": {"email": email, "id": str(user.id)}, - "device_ctx": { - "ip": request.META.get("REMOTE_ADDR"), - "user_agent": request.META.get("HTTP_USER_AGENT"), - }, - "event_type": "SIGN_IN", + if not user.is_active: + return Response( + { + "error": "Your account has been deactivated. Please contact your site administrator." }, + status=status.HTTP_403_FORBIDDEN, ) + try: + # Send event to Jitsu for tracking + if settings.ANALYTICS_BASE_API: + _ = requests.post( + settings.ANALYTICS_BASE_API, + headers={ + "Content-Type": "application/json", + "X-Auth-Token": settings.ANALYTICS_SECRET_KEY, + }, + json={ + "event_id": uuid.uuid4().hex, + "event_data": { + "medium": "code", + }, + "user": {"email": email, "id": str(user.id)}, + "device_ctx": { + "ip": request.META.get("REMOTE_ADDR"), + "user_agent": request.META.get("HTTP_USER_AGENT"), + }, + "event_type": "SIGN_IN", + }, + ) + except RequestException as e: + capture_exception(e) else: user = User.objects.create( email=email, @@ -347,27 +480,30 @@ class MagicSignInEndpoint(BaseAPIView): password=make_password(uuid.uuid4().hex), is_password_autoset=True, ) - # Send event to Jitsu for tracking - if settings.ANALYTICS_BASE_API: - _ = requests.post( - settings.ANALYTICS_BASE_API, - headers={ - "Content-Type": "application/json", - "X-Auth-Token": settings.ANALYTICS_SECRET_KEY, - }, - json={ - "event_id": uuid.uuid4().hex, - "event_data": { - "medium": "code", + try: + # Send event to Jitsu for tracking + if settings.ANALYTICS_BASE_API: + _ = requests.post( + settings.ANALYTICS_BASE_API, + headers={ + "Content-Type": "application/json", + "X-Auth-Token": settings.ANALYTICS_SECRET_KEY, }, - "user": {"email": email, "id": str(user.id)}, - "device_ctx": { - "ip": request.META.get("REMOTE_ADDR"), - "user_agent": request.META.get("HTTP_USER_AGENT"), + json={ + "event_id": uuid.uuid4().hex, + "event_data": { + "medium": "code", + }, + "user": {"email": email, "id": str(user.id)}, + "device_ctx": { + "ip": request.META.get("REMOTE_ADDR"), + "user_agent": request.META.get("HTTP_USER_AGENT"), + }, + "event_type": "SIGN_UP", }, - "event_type": "SIGN_UP", - }, - ) + ) + except RequestException as e: + capture_exception(e) user.last_active = timezone.now() user.last_login_time = timezone.now() @@ -376,6 +512,63 @@ class MagicSignInEndpoint(BaseAPIView): user.token_updated_at = timezone.now() user.save() + # Check if user has any accepted invites for workspace and add them to workspace + workspace_member_invites = WorkspaceMemberInvite.objects.filter( + email=user.email, accepted=True + ) + + WorkspaceMember.objects.bulk_create( + [ + WorkspaceMember( + workspace_id=workspace_member_invite.workspace_id, + member=user, + role=workspace_member_invite.role, + ) + for workspace_member_invite in workspace_member_invites + ], + ignore_conflicts=True, + ) + + # Check if user has any project invites + project_member_invites = ProjectMemberInvite.objects.filter( + email=user.email, accepted=True + ) + + # Add user to workspace + WorkspaceMember.objects.bulk_create( + [ + WorkspaceMember( + workspace_id=project_member_invite.workspace_id, + role=project_member_invite.role + if project_member_invite.role in [5, 10, 15] + else 15, + member=user, + created_by_id=project_member_invite.created_by_id, + ) + for project_member_invite in project_member_invites + ], + ignore_conflicts=True, + ) + + # Now add the users to project + ProjectMember.objects.bulk_create( + [ + ProjectMember( + workspace_id=project_member_invite.workspace_id, + role=project_member_invite.role + if project_member_invite.role in [5, 10, 15] + else 15, + member=user, + created_by_id=project_member_invite.created_by_id, + ) for project_member_invite in project_member_invites + ], + ignore_conflicts=True, + ) + + # Delete all the invites + workspace_member_invites.delete() + project_member_invites.delete() + access_token, refresh_token = get_tokens_for_user(user) data = { "access_token": access_token, diff --git a/apiserver/plane/api/views/base.py b/apiserver/plane/api/views/base.py index 7ab660e81..71f9c1842 100644 --- a/apiserver/plane/api/views/base.py +++ b/apiserver/plane/api/views/base.py @@ -1,5 +1,6 @@ # Python imports import zoneinfo +import json # Django imports from django.urls import resolve @@ -7,6 +8,7 @@ from django.conf import settings from django.utils import timezone from django.db import IntegrityError from django.core.exceptions import ObjectDoesNotExist, ValidationError +from django.core.serializers.json import DjangoJSONEncoder # Third part imports from rest_framework import status @@ -22,6 +24,7 @@ from django_filters.rest_framework import DjangoFilterBackend # Module imports from plane.utils.paginator import BasePaginator +from plane.bgtasks.webhook_task import send_webhook class TimezoneMixin: @@ -29,6 +32,7 @@ class TimezoneMixin: This enables timezone conversion according to the user set timezone """ + def initial(self, request, *args, **kwargs): super().initial(request, *args, **kwargs) if request.user.is_authenticated: @@ -37,8 +41,29 @@ class TimezoneMixin: timezone.deactivate() -class BaseViewSet(TimezoneMixin, ModelViewSet, BasePaginator): +class WebhookMixin: + webhook_event = None + def finalize_response(self, request, response, *args, **kwargs): + response = super().finalize_response(request, response, *args, **kwargs) + + if ( + self.webhook_event + and self.request.method in ["POST", "PATCH", "DELETE"] + and response.status_code in [200, 201, 204] + and settings.ENABLE_WEBHOOK + ): + send_webhook.delay( + event=self.webhook_event, + event_data=json.dumps(response.data, cls=DjangoJSONEncoder), + action=self.request.method, + slug=self.workspace_slug, + ) + + return response + + +class BaseViewSet(TimezoneMixin, ModelViewSet, BasePaginator): model = None permission_classes = [ @@ -60,7 +85,7 @@ class BaseViewSet(TimezoneMixin, ModelViewSet, BasePaginator): except Exception as e: capture_exception(e) raise APIException("Please check the view", status.HTTP_400_BAD_REQUEST) - + def handle_exception(self, exc): """ Handle any exception that occurs, by returning an appropriate response, @@ -71,18 +96,30 @@ class BaseViewSet(TimezoneMixin, ModelViewSet, BasePaginator): return response except Exception as e: if isinstance(e, IntegrityError): - return Response({"error": "The payload is not valid"}, status=status.HTTP_400_BAD_REQUEST) - + return Response( + {"error": "The payload is not valid"}, + status=status.HTTP_400_BAD_REQUEST, + ) + if isinstance(e, ValidationError): - return Response({"error": "Please provide valid detail"}, status=status.HTTP_400_BAD_REQUEST) - + return Response( + {"error": "Please provide valid detail"}, + status=status.HTTP_400_BAD_REQUEST, + ) + if isinstance(e, ObjectDoesNotExist): model_name = str(exc).split(" matching query does not exist.")[0] - return Response({"error": f"{model_name} does not exist."}, status=status.HTTP_404_NOT_FOUND) - + return Response( + {"error": f"{model_name} does not exist."}, + status=status.HTTP_404_NOT_FOUND, + ) + if isinstance(e, KeyError): capture_exception(e) - return Response({"error": f"key {e} does not exist"}, status=status.HTTP_400_BAD_REQUEST) + return Response( + {"error": f"key {e} does not exist"}, + status=status.HTTP_400_BAD_REQUEST, + ) print(e) if settings.DEBUG else print("Server Error") capture_exception(e) @@ -99,8 +136,8 @@ class BaseViewSet(TimezoneMixin, ModelViewSet, BasePaginator): print( f"{request.method} - {request.get_full_path()} of Queries: {len(connection.queries)}" ) - return response + return response except Exception as exc: response = self.handle_exception(exc) return exc @@ -120,7 +157,6 @@ class BaseViewSet(TimezoneMixin, ModelViewSet, BasePaginator): class BaseAPIView(TimezoneMixin, APIView, BasePaginator): - permission_classes = [ IsAuthenticated, ] @@ -139,7 +175,6 @@ class BaseAPIView(TimezoneMixin, APIView, BasePaginator): queryset = backend().filter_queryset(self.request, queryset, self) return queryset - def handle_exception(self, exc): """ Handle any exception that occurs, by returning an appropriate response, @@ -150,19 +185,29 @@ class BaseAPIView(TimezoneMixin, APIView, BasePaginator): return response except Exception as e: if isinstance(e, IntegrityError): - return Response({"error": "The payload is not valid"}, status=status.HTTP_400_BAD_REQUEST) - + return Response( + {"error": "The payload is not valid"}, + status=status.HTTP_400_BAD_REQUEST, + ) + if isinstance(e, ValidationError): - return Response({"error": "Please provide valid detail"}, status=status.HTTP_400_BAD_REQUEST) - + return Response( + {"error": "Please provide valid detail"}, + status=status.HTTP_400_BAD_REQUEST, + ) + if isinstance(e, ObjectDoesNotExist): model_name = str(exc).split(" matching query does not exist.")[0] - return Response({"error": f"{model_name} does not exist."}, status=status.HTTP_404_NOT_FOUND) + return Response( + {"error": f"{model_name} does not exist."}, + status=status.HTTP_404_NOT_FOUND, + ) if isinstance(e, KeyError): return Response({"error": f"key {e} does not exist"}, status=status.HTTP_400_BAD_REQUEST) - - print(e) if settings.DEBUG else print("Server Error") + + if settings.DEBUG: + print(e) capture_exception(e) return Response({"error": "Something went wrong please try again later"}, status=status.HTTP_500_INTERNAL_SERVER_ERROR) diff --git a/apiserver/plane/api/views/config.py b/apiserver/plane/api/views/config.py index 1d65f3d68..237d8d6bf 100644 --- a/apiserver/plane/api/views/config.py +++ b/apiserver/plane/api/views/config.py @@ -12,8 +12,9 @@ from sentry_sdk import capture_exception # Module imports from .base import BaseAPIView -from plane.license.models import Instance -from plane.license.utils.instance_value import get_configuration_value +from plane.license.models import Instance, InstanceConfiguration +from plane.license.utils.instance_value import get_configuration_value + class ConfigurationEndpoint(BaseAPIView): permission_classes = [ @@ -21,27 +22,75 @@ class ConfigurationEndpoint(BaseAPIView): ] def get(self, request): - instance_configuration = Instance.objects.values("key", "value") + instance_configuration = InstanceConfiguration.objects.values("key", "value") data = {} # Authentication - data["google_client_id"] = get_configuration_value(instance_configuration, "GOOGLE_CLIENT_ID") - data["github_client_id"] = get_configuration_value(instance_configuration,"GITHUB_CLIENT_ID") - data["github_app_name"] = get_configuration_value(instance_configuration, "GITHUB_APP_NAME") + data["google_client_id"] = get_configuration_value( + instance_configuration, + "GOOGLE_CLIENT_ID", + os.environ.get("GOOGLE_CLIENT_ID", None), + ) + data["github_client_id"] = get_configuration_value( + instance_configuration, + "GITHUB_CLIENT_ID", + os.environ.get("GITHUB_CLIENT_ID", None), + ) + data["github_app_name"] = get_configuration_value( + instance_configuration, + "GITHUB_APP_NAME", + os.environ.get("GITHUB_APP_NAME", None), + ) data["magic_login"] = ( - bool(get_configuration_value(instance_configuration, "EMAIL_HOST_USER")) and bool(get_configuration_value(instance_configuration, "EMAIL_HOST_PASSWORD")) - ) and get_configuration_value(instance_configuration, "ENABLE_MAGIC_LINK_LOGIN", "0") == "1" + bool( + get_configuration_value( + instance_configuration, + "EMAIL_HOST_USER", + os.environ.get("GITHUB_APP_NAME", None), + ), + ) + and bool( + get_configuration_value( + instance_configuration, + "EMAIL_HOST_PASSWORD", + os.environ.get("GITHUB_APP_NAME", None), + ) + ) + ) and get_configuration_value( + instance_configuration, "ENABLE_MAGIC_LINK_LOGIN", "0" + ) == "1" data["email_password_login"] = ( - get_configuration_value(instance_configuration, "ENABLE_EMAIL_PASSWORD", "0") == "1" + get_configuration_value( + instance_configuration, "ENABLE_EMAIL_PASSWORD", "0" + ) + == "1" ) # Slack client - data["slack_client_id"] = get_configuration_value(instance_configuration, "SLACK_CLIENT_ID") - + data["slack_client_id"] = get_configuration_value( + instance_configuration, + "SLACK_CLIENT_ID", + os.environ.get("SLACK_CLIENT_ID", None), + ) + # Posthog - data["posthog_api_key"] = get_configuration_value(instance_configuration, "POSTHOG_API_KEY") - data["posthog_host"] = get_configuration_value(instance_configuration, "POSTHOG_HOST") + data["posthog_api_key"] = get_configuration_value( + instance_configuration, + "POSTHOG_API_KEY", + os.environ.get("POSTHOG_API_KEY", None), + ) + data["posthog_host"] = get_configuration_value( + instance_configuration, + "POSTHOG_HOST", + os.environ.get("POSTHOG_HOST", None), + ) # Unsplash - data["has_unsplash_configured"] = bool(get_configuration_value(instance_configuration, "UNSPLASH_ACCESS_KEY")) + data["has_unsplash_configured"] = bool( + get_configuration_value( + instance_configuration, + "UNSPLASH_ACCESS_KEY", + os.environ.get("UNSPLASH_ACCESS_KEY", None), + ) + ) return Response(data, status=status.HTTP_200_OK) diff --git a/apiserver/plane/api/views/cycle.py b/apiserver/plane/api/views/cycle.py index 21defcc13..2a62ab8ac 100644 --- a/apiserver/plane/api/views/cycle.py +++ b/apiserver/plane/api/views/cycle.py @@ -23,7 +23,7 @@ from rest_framework import status from sentry_sdk import capture_exception # Module imports -from . import BaseViewSet, BaseAPIView +from . import BaseViewSet, BaseAPIView, WebhookMixin from plane.api.serializers import ( CycleSerializer, CycleIssueSerializer, @@ -48,9 +48,10 @@ from plane.utils.issue_filters import issue_filters from plane.utils.analytics_plot import burndown_plot -class CycleViewSet(BaseViewSet): +class CycleViewSet(WebhookMixin, BaseViewSet): serializer_class = CycleSerializer model = Cycle + webhook_event = "cycle" permission_classes = [ ProjectEntityPermission, ] @@ -499,10 +500,10 @@ class CycleViewSet(BaseViewSet): return Response(status=status.HTTP_204_NO_CONTENT) -class CycleIssueViewSet(BaseViewSet): +class CycleIssueViewSet(WebhookMixin, BaseViewSet): serializer_class = CycleIssueSerializer model = CycleIssue - + webhook_event = "cycle" permission_classes = [ ProjectEntityPermission, ] diff --git a/apiserver/plane/api/views/inbox.py b/apiserver/plane/api/views/inbox.py index 517e9b6de..999d0a459 100644 --- a/apiserver/plane/api/views/inbox.py +++ b/apiserver/plane/api/views/inbox.py @@ -64,9 +64,7 @@ class InboxViewSet(BaseViewSet): serializer.save(project_id=self.kwargs.get("project_id")) def destroy(self, request, slug, project_id, pk): - inbox = Inbox.objects.get( - workspace__slug=slug, project_id=project_id, pk=pk - ) + inbox = Inbox.objects.get(workspace__slug=slug, project_id=project_id, pk=pk) # Handle default inbox delete if inbox.is_default: return Response( @@ -128,9 +126,7 @@ class InboxIssueViewSet(BaseViewSet): .values("count") ) .annotate( - attachment_count=IssueAttachment.objects.filter( - issue=OuterRef("id") - ) + attachment_count=IssueAttachment.objects.filter(issue=OuterRef("id")) .order_by() .annotate(count=Func(F("id"), function="Count")) .values("count") @@ -150,7 +146,6 @@ class InboxIssueViewSet(BaseViewSet): status=status.HTTP_200_OK, ) - def create(self, request, slug, project_id, inbox_id): if not request.data.get("issue", {}).get("name", False): return Response( @@ -198,7 +193,7 @@ class InboxIssueViewSet(BaseViewSet): issue_id=str(issue.id), project_id=str(project_id), current_instance=None, - epoch=int(timezone.now().timestamp()) + epoch=int(timezone.now().timestamp()), ) # create an inbox issue InboxIssue.objects.create( @@ -216,10 +211,20 @@ class InboxIssueViewSet(BaseViewSet): pk=pk, workspace__slug=slug, project_id=project_id, inbox_id=inbox_id ) # Get the project member - project_member = ProjectMember.objects.get(workspace__slug=slug, project_id=project_id, member=request.user) + project_member = ProjectMember.objects.get( + workspace__slug=slug, + project_id=project_id, + member=request.user, + is_active=True, + ) # Only project members admins and created_by users can access this endpoint - if project_member.role <= 10 and str(inbox_issue.created_by_id) != str(request.user.id): - return Response({"error": "You cannot edit inbox issues"}, status=status.HTTP_400_BAD_REQUEST) + if project_member.role <= 10 and str(inbox_issue.created_by_id) != str( + request.user.id + ): + return Response( + {"error": "You cannot edit inbox issues"}, + status=status.HTTP_400_BAD_REQUEST, + ) # Get issue data issue_data = request.data.pop("issue", False) @@ -230,11 +235,13 @@ class InboxIssueViewSet(BaseViewSet): ) # Only allow guests and viewers to edit name and description if project_member.role <= 10: - # viewers and guests since only viewers and guests + # viewers and guests since only viewers and guests issue_data = { "name": issue_data.get("name", issue.name), - "description_html": issue_data.get("description_html", issue.description_html), - "description": issue_data.get("description", issue.description) + "description_html": issue_data.get( + "description_html", issue.description_html + ), + "description": issue_data.get("description", issue.description), } issue_serializer = IssueCreateSerializer( @@ -256,7 +263,7 @@ class InboxIssueViewSet(BaseViewSet): IssueSerializer(current_instance).data, cls=DjangoJSONEncoder, ), - epoch=int(timezone.now().timestamp()) + epoch=int(timezone.now().timestamp()), ) issue_serializer.save() else: @@ -307,7 +314,9 @@ class InboxIssueViewSet(BaseViewSet): return Response(serializer.data, status=status.HTTP_200_OK) return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) else: - return Response(InboxIssueSerializer(inbox_issue).data, status=status.HTTP_200_OK) + return Response( + InboxIssueSerializer(inbox_issue).data, status=status.HTTP_200_OK + ) def retrieve(self, request, slug, project_id, inbox_id, pk): inbox_issue = InboxIssue.objects.get( @@ -324,15 +333,27 @@ class InboxIssueViewSet(BaseViewSet): pk=pk, workspace__slug=slug, project_id=project_id, inbox_id=inbox_id ) # Get the project member - project_member = ProjectMember.objects.get(workspace__slug=slug, project_id=project_id, member=request.user) + project_member = ProjectMember.objects.get( + workspace__slug=slug, + project_id=project_id, + member=request.user, + is_active=True, + ) - if project_member.role <= 10 and str(inbox_issue.created_by_id) != str(request.user.id): - return Response({"error": "You cannot delete inbox issue"}, status=status.HTTP_400_BAD_REQUEST) + if project_member.role <= 10 and str(inbox_issue.created_by_id) != str( + request.user.id + ): + return Response( + {"error": "You cannot delete inbox issue"}, + status=status.HTTP_400_BAD_REQUEST, + ) # Check the issue status if inbox_issue.status in [-2, -1, 0, 2]: # Delete the issue also - Issue.objects.filter(workspace__slug=slug, project_id=project_id, pk=inbox_issue.issue_id).delete() + Issue.objects.filter( + workspace__slug=slug, project_id=project_id, pk=inbox_issue.issue_id + ).delete() inbox_issue.delete() return Response(status=status.HTTP_204_NO_CONTENT) @@ -347,7 +368,10 @@ class InboxIssuePublicViewSet(BaseViewSet): ] def get_queryset(self): - project_deploy_board = ProjectDeployBoard.objects.get(workspace__slug=self.kwargs.get("slug"), project_id=self.kwargs.get("project_id")) + project_deploy_board = ProjectDeployBoard.objects.get( + workspace__slug=self.kwargs.get("slug"), + project_id=self.kwargs.get("project_id"), + ) if project_deploy_board is not None: return self.filter_queryset( super() @@ -363,9 +387,14 @@ class InboxIssuePublicViewSet(BaseViewSet): return InboxIssue.objects.none() def list(self, request, slug, project_id, inbox_id): - project_deploy_board = ProjectDeployBoard.objects.get(workspace__slug=slug, project_id=project_id) + project_deploy_board = ProjectDeployBoard.objects.get( + workspace__slug=slug, project_id=project_id + ) if project_deploy_board.inbox is None: - return Response({"error": "Inbox is not enabled for this Project Board"}, status=status.HTTP_400_BAD_REQUEST) + return Response( + {"error": "Inbox is not enabled for this Project Board"}, + status=status.HTTP_400_BAD_REQUEST, + ) filters = issue_filters(request.query_params, "GET") issues = ( @@ -392,9 +421,7 @@ class InboxIssuePublicViewSet(BaseViewSet): .values("count") ) .annotate( - attachment_count=IssueAttachment.objects.filter( - issue=OuterRef("id") - ) + attachment_count=IssueAttachment.objects.filter(issue=OuterRef("id")) .order_by() .annotate(count=Func(F("id"), function="Count")) .values("count") @@ -415,9 +442,14 @@ class InboxIssuePublicViewSet(BaseViewSet): ) def create(self, request, slug, project_id, inbox_id): - project_deploy_board = ProjectDeployBoard.objects.get(workspace__slug=slug, project_id=project_id) + project_deploy_board = ProjectDeployBoard.objects.get( + workspace__slug=slug, project_id=project_id + ) if project_deploy_board.inbox is None: - return Response({"error": "Inbox is not enabled for this Project Board"}, status=status.HTTP_400_BAD_REQUEST) + return Response( + {"error": "Inbox is not enabled for this Project Board"}, + status=status.HTTP_400_BAD_REQUEST, + ) if not request.data.get("issue", {}).get("name", False): return Response( @@ -465,7 +497,7 @@ class InboxIssuePublicViewSet(BaseViewSet): issue_id=str(issue.id), project_id=str(project_id), current_instance=None, - epoch=int(timezone.now().timestamp()) + epoch=int(timezone.now().timestamp()), ) # create an inbox issue InboxIssue.objects.create( @@ -479,34 +511,41 @@ class InboxIssuePublicViewSet(BaseViewSet): return Response(serializer.data, status=status.HTTP_200_OK) def partial_update(self, request, slug, project_id, inbox_id, pk): - project_deploy_board = ProjectDeployBoard.objects.get(workspace__slug=slug, project_id=project_id) + project_deploy_board = ProjectDeployBoard.objects.get( + workspace__slug=slug, project_id=project_id + ) if project_deploy_board.inbox is None: - return Response({"error": "Inbox is not enabled for this Project Board"}, status=status.HTTP_400_BAD_REQUEST) + return Response( + {"error": "Inbox is not enabled for this Project Board"}, + status=status.HTTP_400_BAD_REQUEST, + ) inbox_issue = InboxIssue.objects.get( pk=pk, workspace__slug=slug, project_id=project_id, inbox_id=inbox_id ) # Get the project member if str(inbox_issue.created_by_id) != str(request.user.id): - return Response({"error": "You cannot edit inbox issues"}, status=status.HTTP_400_BAD_REQUEST) + return Response( + {"error": "You cannot edit inbox issues"}, + status=status.HTTP_400_BAD_REQUEST, + ) # Get issue data issue_data = request.data.pop("issue", False) - issue = Issue.objects.get( pk=inbox_issue.issue_id, workspace__slug=slug, project_id=project_id ) - # viewers and guests since only viewers and guests + # viewers and guests since only viewers and guests issue_data = { "name": issue_data.get("name", issue.name), - "description_html": issue_data.get("description_html", issue.description_html), - "description": issue_data.get("description", issue.description) + "description_html": issue_data.get( + "description_html", issue.description_html + ), + "description": issue_data.get("description", issue.description), } - issue_serializer = IssueCreateSerializer( - issue, data=issue_data, partial=True - ) + issue_serializer = IssueCreateSerializer(issue, data=issue_data, partial=True) if issue_serializer.is_valid(): current_instance = issue @@ -523,17 +562,22 @@ class InboxIssuePublicViewSet(BaseViewSet): IssueSerializer(current_instance).data, cls=DjangoJSONEncoder, ), - epoch=int(timezone.now().timestamp()) + epoch=int(timezone.now().timestamp()), ) issue_serializer.save() return Response(issue_serializer.data, status=status.HTTP_200_OK) return Response(issue_serializer.errors, status=status.HTTP_400_BAD_REQUEST) def retrieve(self, request, slug, project_id, inbox_id, pk): - project_deploy_board = ProjectDeployBoard.objects.get(workspace__slug=slug, project_id=project_id) + project_deploy_board = ProjectDeployBoard.objects.get( + workspace__slug=slug, project_id=project_id + ) if project_deploy_board.inbox is None: - return Response({"error": "Inbox is not enabled for this Project Board"}, status=status.HTTP_400_BAD_REQUEST) - + return Response( + {"error": "Inbox is not enabled for this Project Board"}, + status=status.HTTP_400_BAD_REQUEST, + ) + inbox_issue = InboxIssue.objects.get( pk=pk, workspace__slug=slug, project_id=project_id, inbox_id=inbox_id ) @@ -544,16 +588,24 @@ class InboxIssuePublicViewSet(BaseViewSet): return Response(serializer.data, status=status.HTTP_200_OK) def destroy(self, request, slug, project_id, inbox_id, pk): - project_deploy_board = ProjectDeployBoard.objects.get(workspace__slug=slug, project_id=project_id) + project_deploy_board = ProjectDeployBoard.objects.get( + workspace__slug=slug, project_id=project_id + ) if project_deploy_board.inbox is None: - return Response({"error": "Inbox is not enabled for this Project Board"}, status=status.HTTP_400_BAD_REQUEST) + return Response( + {"error": "Inbox is not enabled for this Project Board"}, + status=status.HTTP_400_BAD_REQUEST, + ) inbox_issue = InboxIssue.objects.get( pk=pk, workspace__slug=slug, project_id=project_id, inbox_id=inbox_id ) if str(inbox_issue.created_by_id) != str(request.user.id): - return Response({"error": "You cannot delete inbox issue"}, status=status.HTTP_400_BAD_REQUEST) + return Response( + {"error": "You cannot delete inbox issue"}, + status=status.HTTP_400_BAD_REQUEST, + ) inbox_issue.delete() return Response(status=status.HTTP_204_NO_CONTENT) diff --git a/apiserver/plane/api/views/issue.py b/apiserver/plane/api/views/issue.py index d1cd93e73..072fabe0e 100644 --- a/apiserver/plane/api/views/issue.py +++ b/apiserver/plane/api/views/issue.py @@ -33,7 +33,7 @@ from rest_framework.permissions import AllowAny, IsAuthenticated from sentry_sdk import capture_exception # Module imports -from . import BaseViewSet, BaseAPIView +from . import BaseViewSet, BaseAPIView, WebhookMixin from plane.api.serializers import ( IssueCreateSerializer, IssueActivitySerializer, @@ -84,7 +84,7 @@ from plane.utils.grouper import group_results from plane.utils.issue_filters import issue_filters -class IssueViewSet(BaseViewSet): +class IssueViewSet(WebhookMixin, BaseViewSet): def get_serializer_class(self): return ( IssueCreateSerializer @@ -93,6 +93,7 @@ class IssueViewSet(BaseViewSet): ) model = Issue + webhook_event = "issue" permission_classes = [ ProjectEntityPermission, ] @@ -594,9 +595,10 @@ class IssueActivityEndpoint(BaseAPIView): return Response(result_list, status=status.HTTP_200_OK) -class IssueCommentViewSet(BaseViewSet): +class IssueCommentViewSet(WebhookMixin, BaseViewSet): serializer_class = IssueCommentSerializer model = IssueComment + webhook_event = "issue-comment" permission_classes = [ ProjectLitePermission, ] @@ -623,6 +625,7 @@ class IssueCommentViewSet(BaseViewSet): workspace__slug=self.kwargs.get("slug"), project_id=self.kwargs.get("project_id"), member_id=self.request.user.id, + is_active=True, ) ) ) @@ -753,8 +756,8 @@ class LabelViewSet(BaseViewSet): .select_related("project") .select_related("workspace") .select_related("parent") - .order_by("name") .distinct() + .order_by("sort_order") ) @@ -1254,7 +1257,11 @@ class IssueSubscriberViewSet(BaseViewSet): def list(self, request, slug, project_id, issue_id): members = ( - ProjectMember.objects.filter(workspace__slug=slug, project_id=project_id) + ProjectMember.objects.filter( + workspace__slug=slug, + project_id=project_id, + is_active=True, + ) .annotate( is_subscribed=Exists( IssueSubscriber.objects.filter( @@ -1498,6 +1505,7 @@ class IssueCommentPublicViewSet(BaseViewSet): workspace__slug=self.kwargs.get("slug"), project_id=self.kwargs.get("project_id"), member_id=self.request.user.id, + is_active=True, ) ) ) @@ -1538,6 +1546,7 @@ class IssueCommentPublicViewSet(BaseViewSet): if not ProjectMember.objects.filter( project_id=project_id, member=request.user, + is_active=True, ).exists(): # Add the user for workspace tracking _ = ProjectPublicMember.objects.get_or_create( @@ -1651,6 +1660,7 @@ class IssueReactionPublicViewSet(BaseViewSet): if not ProjectMember.objects.filter( project_id=project_id, member=request.user, + is_active=True, ).exists(): # Add the user for workspace tracking _ = ProjectPublicMember.objects.get_or_create( @@ -1744,7 +1754,9 @@ class CommentReactionPublicViewSet(BaseViewSet): project_id=project_id, comment_id=comment_id, actor=request.user ) if not ProjectMember.objects.filter( - project_id=project_id, member=request.user + project_id=project_id, + member=request.user, + is_active=True, ).exists(): # Add the user for workspace tracking _ = ProjectPublicMember.objects.get_or_create( @@ -1829,7 +1841,9 @@ class IssueVotePublicViewSet(BaseViewSet): ) # Add the user for workspace tracking if not ProjectMember.objects.filter( - project_id=project_id, member=request.user + project_id=project_id, + member=request.user, + is_active=True, ).exists(): _ = ProjectPublicMember.objects.get_or_create( project_id=project_id, diff --git a/apiserver/plane/api/views/module.py b/apiserver/plane/api/views/module.py index 6c2088922..173526a2c 100644 --- a/apiserver/plane/api/views/module.py +++ b/apiserver/plane/api/views/module.py @@ -15,7 +15,7 @@ from rest_framework import status from sentry_sdk import capture_exception # Module imports -from . import BaseViewSet +from . import BaseViewSet, WebhookMixin from plane.api.serializers import ( ModuleWriteSerializer, ModuleSerializer, @@ -41,11 +41,12 @@ from plane.utils.issue_filters import issue_filters from plane.utils.analytics_plot import burndown_plot -class ModuleViewSet(BaseViewSet): +class ModuleViewSet(WebhookMixin, BaseViewSet): model = Module permission_classes = [ ProjectEntityPermission, ] + webhook_event = "module" def get_serializer_class(self): return ( diff --git a/apiserver/plane/api/views/notification.py b/apiserver/plane/api/views/notification.py index 978c01bac..19dcba734 100644 --- a/apiserver/plane/api/views/notification.py +++ b/apiserver/plane/api/views/notification.py @@ -85,7 +85,10 @@ class NotificationViewSet(BaseViewSet, BasePaginator): # Created issues if type == "created": if WorkspaceMember.objects.filter( - workspace__slug=slug, member=request.user, role__lt=15 + workspace__slug=slug, + member=request.user, + role__lt=15, + is_active=True, ).exists(): notifications = Notification.objects.none() else: @@ -255,7 +258,10 @@ class MarkAllReadNotificationViewSet(BaseViewSet): # Created issues if type == "created": if WorkspaceMember.objects.filter( - workspace__slug=slug, member=request.user, role__lt=15 + workspace__slug=slug, + member=request.user, + role__lt=15, + is_active=True, ).exists(): notifications = Notification.objects.none() else: diff --git a/apiserver/plane/api/views/oauth.py b/apiserver/plane/api/views/oauth.py index f0ea9acc9..d2b65d926 100644 --- a/apiserver/plane/api/views/oauth.py +++ b/apiserver/plane/api/views/oauth.py @@ -2,6 +2,7 @@ import uuid import requests import os +from requests.exceptions import RequestException # Django imports from django.utils import timezone @@ -20,7 +21,14 @@ from google.oauth2 import id_token from google.auth.transport import requests as google_auth_request # Module imports -from plane.db.models import SocialLoginConnection, User +from plane.db.models import ( + SocialLoginConnection, + User, + WorkspaceMemberInvite, + WorkspaceMember, + ProjectMemberInvite, + ProjectMember, +) from plane.api.serializers import UserSerializer from .base import BaseAPIView @@ -168,7 +176,6 @@ class OauthEndpoint(BaseAPIView): ) ## Login Case - if not user.is_active: return Response( { @@ -185,12 +192,61 @@ class OauthEndpoint(BaseAPIView): user.is_email_verified = email_verified user.save() - access_token, refresh_token = get_tokens_for_user(user) + # Check if user has any accepted invites for workspace and add them to workspace + workspace_member_invites = WorkspaceMemberInvite.objects.filter( + email=user.email, accepted=True + ) - data = { - "access_token": access_token, - "refresh_token": refresh_token, - } + WorkspaceMember.objects.bulk_create( + [ + WorkspaceMember( + workspace_id=workspace_member_invite.workspace_id, + member=user, + role=workspace_member_invite.role, + ) + for workspace_member_invite in workspace_member_invites + ], + ignore_conflicts=True, + ) + + # Check if user has any project invites + project_member_invites = ProjectMemberInvite.objects.filter( + email=user.email, accepted=True + ) + + # Add user to workspace + WorkspaceMember.objects.bulk_create( + [ + WorkspaceMember( + workspace_id=project_member_invite.workspace_id, + role=project_member_invite.role + if project_member_invite.role in [5, 10, 15] + else 15, + member=user, + created_by_id=project_member_invite.created_by_id, + ) + for project_member_invite in project_member_invites + ], + ignore_conflicts=True, + ) + + # Now add the users to project + ProjectMember.objects.bulk_create( + [ + ProjectMember( + workspace_id=project_member_invite.workspace_id, + role=project_member_invite.role + if project_member_invite.role in [5, 10, 15] + else 15, + member=user, + created_by_id=project_member_invite.created_by_id, + ) for project_member_invite in project_member_invites + ], + ignore_conflicts=True, + ) + # Delete all the invites + workspace_member_invites.delete() + project_member_invites.delete() SocialLoginConnection.objects.update_or_create( medium=medium, @@ -201,26 +257,36 @@ class OauthEndpoint(BaseAPIView): "last_login_at": timezone.now(), }, ) - if settings.ANALYTICS_BASE_API: - _ = requests.post( - settings.ANALYTICS_BASE_API, - headers={ - "Content-Type": "application/json", - "X-Auth-Token": settings.ANALYTICS_SECRET_KEY, - }, - json={ - "event_id": uuid.uuid4().hex, - "event_data": { - "medium": f"oauth-{medium}", + try: + if settings.ANALYTICS_BASE_API: + _ = requests.post( + settings.ANALYTICS_BASE_API, + headers={ + "Content-Type": "application/json", + "X-Auth-Token": settings.ANALYTICS_SECRET_KEY, }, - "user": {"email": email, "id": str(user.id)}, - "device_ctx": { - "ip": request.META.get("REMOTE_ADDR"), - "user_agent": request.META.get("HTTP_USER_AGENT"), + json={ + "event_id": uuid.uuid4().hex, + "event_data": { + "medium": f"oauth-{medium}", + }, + "user": {"email": email, "id": str(user.id)}, + "device_ctx": { + "ip": request.META.get("REMOTE_ADDR"), + "user_agent": request.META.get("HTTP_USER_AGENT"), + }, + "event_type": "SIGN_IN", }, - "event_type": "SIGN_IN", - }, - ) + ) + except RequestException as e: + capture_exception(e) + + access_token, refresh_token = get_tokens_for_user(user) + + data = { + "access_token": access_token, + "refresh_token": refresh_token, + } return Response(data, status=status.HTTP_200_OK) except User.DoesNotExist: @@ -260,31 +326,85 @@ class OauthEndpoint(BaseAPIView): user.token_updated_at = timezone.now() user.save() - access_token, refresh_token = get_tokens_for_user(user) - data = { - "access_token": access_token, - "refresh_token": refresh_token, - } - if settings.ANALYTICS_BASE_API: - _ = requests.post( - settings.ANALYTICS_BASE_API, - headers={ - "Content-Type": "application/json", - "X-Auth-Token": settings.ANALYTICS_SECRET_KEY, - }, - json={ - "event_id": uuid.uuid4().hex, - "event_data": { - "medium": f"oauth-{medium}", + # Check if user has any accepted invites for workspace and add them to workspace + workspace_member_invites = WorkspaceMemberInvite.objects.filter( + email=user.email, accepted=True + ) + + WorkspaceMember.objects.bulk_create( + [ + WorkspaceMember( + workspace_id=workspace_member_invite.workspace_id, + member=user, + role=workspace_member_invite.role, + ) + for workspace_member_invite in workspace_member_invites + ], + ignore_conflicts=True, + ) + + # Check if user has any project invites + project_member_invites = ProjectMemberInvite.objects.filter( + email=user.email, accepted=True + ) + + # Add user to workspace + WorkspaceMember.objects.bulk_create( + [ + WorkspaceMember( + workspace_id=project_member_invite.workspace_id, + role=project_member_invite.role + if project_member_invite.role in [5, 10, 15] + else 15, + member=user, + created_by_id=project_member_invite.created_by_id, + ) + for project_member_invite in project_member_invites + ], + ignore_conflicts=True, + ) + + # Now add the users to project + ProjectMember.objects.bulk_create( + [ + ProjectMember( + workspace_id=project_member_invite.workspace_id, + role=project_member_invite.role + if project_member_invite.role in [5, 10, 15] + else 15, + member=user, + created_by_id=project_member_invite.created_by_id, + ) for project_member_invite in project_member_invites + ], + ignore_conflicts=True, + ) + # Delete all the invites + workspace_member_invites.delete() + project_member_invites.delete() + + try: + if settings.ANALYTICS_BASE_API: + _ = requests.post( + settings.ANALYTICS_BASE_API, + headers={ + "Content-Type": "application/json", + "X-Auth-Token": settings.ANALYTICS_SECRET_KEY, }, - "user": {"email": email, "id": str(user.id)}, - "device_ctx": { - "ip": request.META.get("REMOTE_ADDR"), - "user_agent": request.META.get("HTTP_USER_AGENT"), + json={ + "event_id": uuid.uuid4().hex, + "event_data": { + "medium": f"oauth-{medium}", + }, + "user": {"email": email, "id": str(user.id)}, + "device_ctx": { + "ip": request.META.get("REMOTE_ADDR"), + "user_agent": request.META.get("HTTP_USER_AGENT"), + }, + "event_type": "SIGN_UP", }, - "event_type": "SIGN_UP", - }, - ) + ) + except RequestException as e: + capture_exception(e) SocialLoginConnection.objects.update_or_create( medium=medium, @@ -295,4 +415,10 @@ class OauthEndpoint(BaseAPIView): "last_login_at": timezone.now(), }, ) + + access_token, refresh_token = get_tokens_for_user(user) + data = { + "access_token": access_token, + "refresh_token": refresh_token, + } return Response(data, status=status.HTTP_201_CREATED) diff --git a/apiserver/plane/api/views/project.py b/apiserver/plane/api/views/project.py index 37e491e83..08c7fee4d 100644 --- a/apiserver/plane/api/views/project.py +++ b/apiserver/plane/api/views/project.py @@ -17,16 +17,16 @@ from django.db.models import ( ) from django.core.validators import validate_email from django.conf import settings +from django.utils import timezone # Third Party imports from rest_framework.response import Response from rest_framework import status from rest_framework import serializers from rest_framework.permissions import AllowAny -from sentry_sdk import capture_exception # Module imports -from .base import BaseViewSet, BaseAPIView +from .base import BaseViewSet, BaseAPIView, WebhookMixin from plane.api.serializers import ( ProjectSerializer, ProjectListSerializer, @@ -39,6 +39,7 @@ from plane.api.serializers import ( ) from plane.api.permissions import ( + WorkspaceUserPermission, ProjectBasePermission, ProjectEntityPermission, ProjectMemberPermission, @@ -58,13 +59,6 @@ from plane.db.models import ( ProjectIdentifier, Module, Cycle, - CycleFavorite, - ModuleFavorite, - PageFavorite, - IssueViewFavorite, - Page, - IssueAssignee, - ModuleMember, Inbox, ProjectDeployBoard, IssueProperty, @@ -73,9 +67,10 @@ from plane.db.models import ( from plane.bgtasks.project_invitation_task import project_invitation -class ProjectViewSet(BaseViewSet): +class ProjectViewSet(WebhookMixin, BaseViewSet): serializer_class = ProjectSerializer model = Project + webhook_event = "project" permission_classes = [ ProjectBasePermission, @@ -110,12 +105,15 @@ class ProjectViewSet(BaseViewSet): member=self.request.user, project_id=OuterRef("pk"), workspace__slug=self.kwargs.get("slug"), + is_active=True, ) ) ) .annotate( total_members=ProjectMember.objects.filter( - project_id=OuterRef("id"), member__is_bot=False + project_id=OuterRef("id"), + member__is_bot=False, + is_active=True, ) .order_by() .annotate(count=Func(F("id"), function="Count")) @@ -137,6 +135,7 @@ class ProjectViewSet(BaseViewSet): member_role=ProjectMember.objects.filter( project_id=OuterRef("pk"), member_id=self.request.user.id, + is_active=True, ).values("role") ) .annotate( @@ -157,6 +156,7 @@ class ProjectViewSet(BaseViewSet): member=request.user, project_id=OuterRef("pk"), workspace__slug=self.kwargs.get("slug"), + is_active=True, ).values("sort_order") projects = ( self.get_queryset() @@ -166,6 +166,7 @@ class ProjectViewSet(BaseViewSet): "project_projectmember", queryset=ProjectMember.objects.filter( workspace__slug=slug, + is_active=True, ).select_related("member"), ) ) @@ -345,66 +346,104 @@ class ProjectViewSet(BaseViewSet): ) -class InviteProjectEndpoint(BaseAPIView): +class ProjectInvitationsViewset(BaseViewSet): + serializer_class = ProjectMemberInviteSerializer + model = ProjectMemberInvite + + search_fields = [] + permission_classes = [ ProjectBasePermission, ] - def post(self, request, slug, project_id): - email = request.data.get("email", False) - role = request.data.get("role", False) - - # Check if email is provided - if not email: - return Response( - {"error": "Email is required"}, status=status.HTTP_400_BAD_REQUEST - ) - - validate_email(email) - # Check if user is already a member of workspace - if ProjectMember.objects.filter( - project_id=project_id, - member__email=email, - member__is_bot=False, - ).exists(): - return Response( - {"error": "User is already member of workspace"}, - status=status.HTTP_400_BAD_REQUEST, - ) - - user = User.objects.filter(email=email).first() - - if user is None: - token = jwt.encode( - {"email": email, "timestamp": datetime.now().timestamp()}, - settings.SECRET_KEY, - algorithm="HS256", - ) - project_invitation_obj = ProjectMemberInvite.objects.create( - email=email.strip().lower(), - project_id=project_id, - token=token, - role=role, - ) - domain = settings.WEB_URL - project_invitation.delay(email, project_id, token, domain) - - return Response( - { - "message": "Email sent successfully", - "id": project_invitation_obj.id, - }, - status=status.HTTP_200_OK, - ) - - project_member = ProjectMember.objects.create( - member=user, project_id=project_id, role=role + def get_queryset(self): + return self.filter_queryset( + super() + .get_queryset() + .filter(workspace__slug=self.kwargs.get("slug")) + .filter(project_id=self.kwargs.get("project_id")) + .select_related("project") + .select_related("workspace", "workspace__owner") ) - _ = IssueProperty.objects.create(user=user, project_id=project_id) + def create(self, request, slug, project_id): + emails = request.data.get("emails", []) + + # Check if email is provided + if not emails: + return Response( + {"error": "Emails are required"}, status=status.HTTP_400_BAD_REQUEST + ) + + requesting_user = ProjectMember.objects.get( + workspace__slug=slug, project_id=project_id, member_id=request.user.id + ) + + # Check if any invited user has an higher role + if len( + [ + email + for email in emails + if int(email.get("role", 10)) > requesting_user.role + ] + ): + return Response( + {"error": "You cannot invite a user with higher role"}, + status=status.HTTP_400_BAD_REQUEST, + ) + + workspace = Workspace.objects.get(slug=slug) + + project_invitations = [] + for email in emails: + try: + validate_email(email.get("email")) + project_invitations.append( + ProjectMemberInvite( + email=email.get("email").strip().lower(), + project_id=project_id, + workspace_id=workspace.id, + token=jwt.encode( + { + "email": email, + "timestamp": datetime.now().timestamp(), + }, + settings.SECRET_KEY, + algorithm="HS256", + ), + role=email.get("role", 10), + created_by=request.user, + ) + ) + except ValidationError: + return Response( + { + "error": f"Invalid email - {email} provided a valid email address is required to send the invite" + }, + status=status.HTTP_400_BAD_REQUEST, + ) + + # Create workspace member invite + project_invitations = ProjectMemberInvite.objects.bulk_create( + project_invitations, batch_size=10, ignore_conflicts=True + ) + current_site = f"{request.scheme}://{request.get_host()}", + + # Send invitations + for invitation in project_invitations: + project_invitations.delay( + invitation.email, + project_id, + invitation.token, + current_site, + request.user.email, + ) return Response( - ProjectMemberSerializer(project_member).data, status=status.HTTP_200_OK + { + "message": "Email sent successfully", + }, + status=status.HTTP_200_OK, ) @@ -420,40 +459,134 @@ class UserProjectInvitationsViewset(BaseViewSet): .select_related("workspace", "workspace__owner", "project") ) - def create(self, request): - invitations = request.data.get("invitations") - project_invitations = ProjectMemberInvite.objects.filter( - pk__in=invitations, accepted=True + def create(self, request, slug): + project_ids = request.data.get("project_ids", []) + + # Get the workspace user role + workspace_member = WorkspaceMember.objects.get( + member=request.user, + workspace__slug=slug, + is_active=True, ) + + workspace_role = workspace_member.role + workspace = workspace_member.workspace + ProjectMember.objects.bulk_create( [ ProjectMember( - project=invitation.project, - workspace=invitation.project.workspace, + project_id=project_id, member=request.user, - role=invitation.role, + role=15 if workspace_role >= 15 else 10, + workspace=workspace, created_by=request.user, ) - for invitation in project_invitations - ] + for project_id in project_ids + ], + ignore_conflicts=True, ) IssueProperty.objects.bulk_create( [ - ProjectMember( - project=invitation.project, - workspace=invitation.project.workspace, + IssueProperty( + project_id=project_id, user=request.user, + workspace=workspace, created_by=request.user, ) - for invitation in project_invitations - ] + for project_id in project_ids + ], + ignore_conflicts=True, ) - # Delete joined project invites - project_invitations.delete() + return Response( + {"message": "Projects joined successfully"}, + status=status.HTTP_201_CREATED, + ) - return Response(status=status.HTTP_204_NO_CONTENT) + +class ProjectJoinEndpoint(BaseAPIView): + permission_classes = [ + AllowAny, + ] + + def post(self, request, slug, project_id, pk): + project_invite = ProjectMemberInvite.objects.get( + pk=pk, + project_id=project_id, + workspace__slug=slug, + ) + + email = request.data.get("email", "") + + if email == "" or project_invite.email != email: + return Response( + {"error": "You do not have permission to join the project"}, + status=status.HTTP_403_FORBIDDEN, + ) + + if project_invite.responded_at is None: + project_invite.accepted = request.data.get("accepted", False) + project_invite.responded_at = timezone.now() + project_invite.save() + + if project_invite.accepted: + # Check if the user account exists + user = User.objects.filter(email=email).first() + + # Check if user is a part of workspace + workspace_member = WorkspaceMember.objects.filter( + workspace__slug=slug, member=user + ).first() + # Add him to workspace + if workspace_member is None: + _ = WorkspaceMember.objects.create( + workspace_id=project_invite.workspace_id, + member=user, + role=15 if project_invite.role >= 15 else project_invite.role, + ) + else: + # Else make him active + workspace_member.is_active = True + workspace_member.save() + + # Check if the user was already a member of project then activate the user + project_member = ProjectMember.objects.filter( + workspace_id=project_invite.workspace_id, member=user + ).first() + if project_member is None: + # Create a Project Member + _ = ProjectMember.objects.create( + workspace_id=project_invite.workspace_id, + member=user, + role=project_invite.role, + ) + else: + project_member.is_active = True + project_member.role = project_member.role + project_member.save() + + return Response( + {"message": "Project Invitation Accepted"}, + status=status.HTTP_200_OK, + ) + + return Response( + {"message": "Project Invitation was not accepted"}, + status=status.HTTP_200_OK, + ) + + return Response( + {"error": "You have already responded to the invitation request"}, + status=status.HTTP_400_BAD_REQUEST, + ) + + def get(self, request, slug, project_id, pk): + project_invitation = ProjectMemberInvite.objects.get( + workspace__slug=slug, project_id=project_id, pk=pk + ) + serializer = ProjectMemberInviteSerializer(project_invitation) + return Response(serializer.data, status=status.HTTP_200_OK) class ProjectMemberViewSet(BaseViewSet): @@ -475,6 +608,7 @@ class ProjectMemberViewSet(BaseViewSet): .filter(workspace__slug=self.kwargs.get("slug")) .filter(project_id=self.kwargs.get("project_id")) .filter(member__is_bot=False) + .filter() .select_related("project") .select_related("member") .select_related("workspace", "workspace__owner") @@ -542,13 +676,17 @@ class ProjectMemberViewSet(BaseViewSet): def list(self, request, slug, project_id): project_member = ProjectMember.objects.get( - member=request.user, workspace__slug=slug, project_id=project_id + member=request.user, + workspace__slug=slug, + project_id=project_id, + is_active=True, ) project_members = ProjectMember.objects.filter( project_id=project_id, workspace__slug=slug, member__is_bot=False, + is_active=True, ).select_related("project", "member", "workspace") if project_member.role > 10: @@ -559,7 +697,10 @@ class ProjectMemberViewSet(BaseViewSet): def partial_update(self, request, slug, project_id, pk): project_member = ProjectMember.objects.get( - pk=pk, workspace__slug=slug, project_id=project_id + pk=pk, + workspace__slug=slug, + project_id=project_id, + is_active=True, ) if request.user.id == project_member.member_id: return Response( @@ -568,7 +709,10 @@ class ProjectMemberViewSet(BaseViewSet): ) # Check while updating user roles requested_project_member = ProjectMember.objects.get( - project_id=project_id, workspace__slug=slug, member=request.user + project_id=project_id, + workspace__slug=slug, + member=request.user, + is_active=True, ) if ( "role" in request.data @@ -591,54 +735,66 @@ class ProjectMemberViewSet(BaseViewSet): def destroy(self, request, slug, project_id, pk): project_member = ProjectMember.objects.get( - workspace__slug=slug, project_id=project_id, pk=pk + workspace__slug=slug, + project_id=project_id, + pk=pk, + member__is_bot=False, + is_active=True, ) # check requesting user role requesting_project_member = ProjectMember.objects.get( - workspace__slug=slug, member=request.user, project_id=project_id + workspace__slug=slug, + member=request.user, + project_id=project_id, + is_active=True, ) + # User cannot remove himself + if str(project_member.id) == str(requesting_project_member.id): + return Response( + { + "error": "You cannot remove yourself from the workspace. Please use leave workspace" + }, + status=status.HTTP_400_BAD_REQUEST, + ) + # User cannot deactivate higher role if requesting_project_member.role < project_member.role: return Response( - {"error": "You cannot remove a user having role higher than yourself"}, + {"error": "You cannot remove a user having role higher than you"}, status=status.HTTP_400_BAD_REQUEST, ) - # Remove all favorites - ProjectFavorite.objects.filter( - workspace__slug=slug, project_id=project_id, user=project_member.member - ).delete() - CycleFavorite.objects.filter( - workspace__slug=slug, project_id=project_id, user=project_member.member - ).delete() - ModuleFavorite.objects.filter( - workspace__slug=slug, project_id=project_id, user=project_member.member - ).delete() - PageFavorite.objects.filter( - workspace__slug=slug, project_id=project_id, user=project_member.member - ).delete() - IssueViewFavorite.objects.filter( - workspace__slug=slug, project_id=project_id, user=project_member.member - ).delete() - # Also remove issue from issue assigned - IssueAssignee.objects.filter( - workspace__slug=slug, - project_id=project_id, - assignee=project_member.member, - ).delete() + project_member.is_active = False + project_member.save() + return Response(status=status.HTTP_204_NO_CONTENT) - # Remove if module member - ModuleMember.objects.filter( + def leave(self, request, slug, project_id): + project_member = ProjectMember.objects.get( workspace__slug=slug, project_id=project_id, - member=project_member.member, - ).delete() - # Delete owned Pages - Page.objects.filter( - workspace__slug=slug, - project_id=project_id, - owned_by=project_member.member, - ).delete() - project_member.delete() + member=request.user, + is_active=True, + ) + + # Check if the leaving user is the only admin of the project + if ( + project_member.role == 20 + and not ProjectMember.objects.filter( + workspace__slug=slug, + project_id=project_id, + role=20, + is_active=True, + ).count() + > 1 + ): + return Response( + { + "error": "You cannot leave the project as your the only admin of the project you will have to either delete the project or create an another admin", + }, + status=status.HTTP_400_BAD_REQUEST, + ) + # Deactivate the user + project_member.is_active = False + project_member.save() return Response(status=status.HTTP_204_NO_CONTENT) @@ -691,46 +847,6 @@ class AddTeamToProjectEndpoint(BaseAPIView): return Response(serializer.data, status=status.HTTP_201_CREATED) -class ProjectMemberInvitationsViewset(BaseViewSet): - serializer_class = ProjectMemberInviteSerializer - model = ProjectMemberInvite - - search_fields = [] - - permission_classes = [ - ProjectBasePermission, - ] - - def get_queryset(self): - return self.filter_queryset( - super() - .get_queryset() - .filter(workspace__slug=self.kwargs.get("slug")) - .filter(project_id=self.kwargs.get("project_id")) - .select_related("project") - .select_related("workspace", "workspace__owner") - ) - - -class ProjectMemberInviteDetailViewSet(BaseViewSet): - serializer_class = ProjectMemberInviteSerializer - model = ProjectMemberInvite - - search_fields = [] - - permission_classes = [ - ProjectBasePermission, - ] - - def get_queryset(self): - return self.filter_queryset( - super() - .get_queryset() - .select_related("project") - .select_related("workspace", "workspace__owner") - ) - - class ProjectIdentifierEndpoint(BaseAPIView): permission_classes = [ ProjectBasePermission, @@ -774,59 +890,14 @@ class ProjectIdentifierEndpoint(BaseAPIView): ) -class ProjectJoinEndpoint(BaseAPIView): - def post(self, request, slug): - project_ids = request.data.get("project_ids", []) - - # Get the workspace user role - workspace_member = WorkspaceMember.objects.get( - member=request.user, workspace__slug=slug - ) - - workspace_role = workspace_member.role - workspace = workspace_member.workspace - - ProjectMember.objects.bulk_create( - [ - ProjectMember( - project_id=project_id, - member=request.user, - role=20 - if workspace_role >= 15 - else (15 if workspace_role == 10 else workspace_role), - workspace=workspace, - created_by=request.user, - ) - for project_id in project_ids - ], - ignore_conflicts=True, - ) - - IssueProperty.objects.bulk_create( - [ - IssueProperty( - project_id=project_id, - user=request.user, - workspace=workspace, - created_by=request.user, - ) - for project_id in project_ids - ], - ignore_conflicts=True, - ) - - return Response( - {"message": "Projects joined successfully"}, - status=status.HTTP_201_CREATED, - ) - - class ProjectUserViewsEndpoint(BaseAPIView): def post(self, request, slug, project_id): project = Project.objects.get(pk=project_id, workspace__slug=slug) project_member = ProjectMember.objects.filter( - member=request.user, project=project + member=request.user, + project=project, + is_active=True, ).first() if project_member is None: @@ -850,7 +921,10 @@ class ProjectUserViewsEndpoint(BaseAPIView): class ProjectMemberUserEndpoint(BaseAPIView): def get(self, request, slug, project_id): project_member = ProjectMember.objects.get( - project_id=project_id, workspace__slug=slug, member=request.user + project_id=project_id, + workspace__slug=slug, + member=request.user, + is_active=True, ) serializer = ProjectMemberSerializer(project_member) @@ -983,39 +1057,6 @@ class WorkspaceProjectDeployBoardEndpoint(BaseAPIView): return Response(projects, status=status.HTTP_200_OK) -class LeaveProjectEndpoint(BaseAPIView): - permission_classes = [ - ProjectLitePermission, - ] - - def delete(self, request, slug, project_id): - project_member = ProjectMember.objects.get( - workspace__slug=slug, - member=request.user, - project_id=project_id, - ) - - # Only Admin case - if ( - project_member.role == 20 - and ProjectMember.objects.filter( - workspace__slug=slug, - role=20, - project_id=project_id, - ).count() - == 1 - ): - return Response( - { - "error": "You cannot leave the project since you are the only admin of the project you should delete the project" - }, - status=status.HTTP_400_BAD_REQUEST, - ) - # Delete the member from workspace - project_member.delete() - return Response(status=status.HTTP_204_NO_CONTENT) - - class ProjectPublicCoverImagesEndpoint(BaseAPIView): permission_classes = [ AllowAny, diff --git a/apiserver/plane/api/views/user.py b/apiserver/plane/api/views/user.py index 2e40565b4..9b488489a 100644 --- a/apiserver/plane/api/views/user.py +++ b/apiserver/plane/api/views/user.py @@ -13,13 +13,7 @@ from plane.api.serializers import ( ) from plane.api.views.base import BaseViewSet, BaseAPIView -from plane.db.models import ( - User, - Workspace, - WorkspaceMemberInvite, - Issue, - IssueActivity, -) +from plane.db.models import User, IssueActivity, WorkspaceMember from plane.utils.paginator import BasePaginator @@ -41,10 +35,28 @@ class UserEndpoint(BaseViewSet): serialized_data = UserMeSettingsSerializer(request.user).data return Response(serialized_data, status=status.HTTP_200_OK) + def deactivate(self, request): + # Check all workspace user is active + user = self.get_object() + if WorkspaceMember.objects.filter( + member=request.user, is_active=True + ).exists(): + return Response( + { + "error": "User cannot deactivate account as user is active in some workspaces" + }, + status=status.HTTP_400_BAD_REQUEST, + ) + + # Deactivate the user + user.is_active = False + user.save() + return Response(status=status.HTTP_204_NO_CONTENT) + class UpdateUserOnBoardedEndpoint(BaseAPIView): def patch(self, request): - user = User.objects.get(pk=request.user.id) + user = User.objects.get(pk=request.user.id, is_active=True) user.is_onboarded = request.data.get("is_onboarded", False) user.save() return Response({"message": "Updated successfully"}, status=status.HTTP_200_OK) @@ -52,7 +64,7 @@ class UpdateUserOnBoardedEndpoint(BaseAPIView): class UpdateUserTourCompletedEndpoint(BaseAPIView): def patch(self, request): - user = User.objects.get(pk=request.user.id) + user = User.objects.get(pk=request.user.id, is_active=True) user.is_tour_completed = request.data.get("is_tour_completed", False) user.save() return Response({"message": "Updated successfully"}, status=status.HTTP_200_OK) diff --git a/apiserver/plane/api/views/webhook.py b/apiserver/plane/api/views/webhook.py new file mode 100644 index 000000000..91a2f6729 --- /dev/null +++ b/apiserver/plane/api/views/webhook.py @@ -0,0 +1,130 @@ +# Django imports +from django.db import IntegrityError + +# Third party imports +from rest_framework import status +from rest_framework.response import Response + +# Module imports +from plane.db.models import Webhook, WebhookLog, Workspace +from plane.db.models.webhook import generate_token +from .base import BaseAPIView +from plane.api.permissions import WorkspaceOwnerPermission +from plane.api.serializers import WebhookSerializer, WebhookLogSerializer + + +class WebhookEndpoint(BaseAPIView): + permission_classes = [ + WorkspaceOwnerPermission, + ] + + def post(self, request, slug): + workspace = Workspace.objects.get(slug=slug) + + try: + serializer = WebhookSerializer(data=request.data) + if serializer.is_valid(): + serializer.save(workspace_id=workspace.id) + return Response(serializer.data, status=status.HTTP_201_CREATED) + return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) + except IntegrityError as e: + if "already exists" in str(e): + return Response( + {"error": "URL already exists for the workspace"}, + status=status.HTTP_410_GONE, + ) + raise IntegrityError + + def get(self, request, slug, pk=None): + if pk == None: + webhooks = Webhook.objects.filter(workspace__slug=slug) + serializer = WebhookSerializer( + webhooks, + fields=( + "id", + "url", + "is_active", + "created_at", + "updated_at", + "project", + "issue", + "cycle", + "module", + "issue_comment", + ), + many=True, + ) + return Response(serializer.data, status=status.HTTP_200_OK) + else: + webhook = Webhook.objects.get(workspace__slug=slug, pk=pk) + serializer = WebhookSerializer( + webhook, + fields=( + "id", + "url", + "is_active", + "created_at", + "updated_at", + "project", + "issue", + "cycle", + "module", + "issue_comment", + ), + ) + return Response(serializer.data, status=status.HTTP_200_OK) + + def patch(self, request, slug, pk): + webhook = Webhook.objects.get(workspace__slug=slug, pk=pk) + serializer = WebhookSerializer( + webhook, + data=request.data, + partial=True, + fields=( + "id", + "url", + "is_active", + "created_at", + "updated_at", + "project", + "issue", + "cycle", + "module", + "issue_comment", + ), + ) + if serializer.is_valid(): + serializer.save() + return Response(serializer.data, status=status.HTTP_200_OK) + return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) + + def delete(self, request, slug, pk): + webhook = Webhook.objects.get(pk=pk, workspace__slug=slug) + webhook.delete() + return Response(status=status.HTTP_204_NO_CONTENT) + + +class WebhookSecretRegenerateEndpoint(BaseAPIView): + permission_classes = [ + WorkspaceOwnerPermission, + ] + + def post(self, request, slug, pk): + webhook = Webhook.objects.get(workspace__slug=slug, pk=pk) + webhook.secret_key = generate_token() + webhook.save() + serializer = WebhookSerializer(webhook) + return Response(serializer.data, status=status.HTTP_200_OK) + + +class WebhookLogsEndpoint(BaseAPIView): + permission_classes = [ + WorkspaceOwnerPermission, + ] + + def get(self, request, slug, webhook_id): + webhook_logs = WebhookLog.objects.filter( + workspace__slug=slug, webhook_id=webhook_id + ) + serializer = WebhookLogSerializer(webhook_logs, many=True) + return Response(serializer.data, status=status.HTTP_200_OK) diff --git a/apiserver/plane/api/views/workspace.py b/apiserver/plane/api/views/workspace.py index c53fbf126..3fc9b7bde 100644 --- a/apiserver/plane/api/views/workspace.py +++ b/apiserver/plane/api/views/workspace.py @@ -2,7 +2,6 @@ import jwt from datetime import date, datetime from dateutil.relativedelta import relativedelta -from uuid import uuid4 # Django imports from django.db import IntegrityError @@ -26,13 +25,11 @@ from django.db.models import ( ) from django.db.models.functions import ExtractWeek, Cast, ExtractDay from django.db.models.fields import DateField -from django.contrib.auth.hashers import make_password # Third party modules from rest_framework import status from rest_framework.response import Response -from rest_framework.permissions import AllowAny -from sentry_sdk import capture_exception +from rest_framework.permissions import AllowAny, IsAuthenticated # Module imports from plane.api.serializers import ( @@ -59,14 +56,6 @@ from plane.db.models import ( IssueActivity, Issue, WorkspaceTheme, - IssueAssignee, - ProjectFavorite, - CycleFavorite, - ModuleMember, - ModuleFavorite, - PageFavorite, - Page, - IssueViewFavorite, IssueLink, IssueAttachment, IssueSubscriber, @@ -106,7 +95,9 @@ class WorkSpaceViewSet(BaseViewSet): def get_queryset(self): member_count = ( WorkspaceMember.objects.filter( - workspace=OuterRef("id"), member__is_bot=False + workspace=OuterRef("id"), + member__is_bot=False, + is_active=True, ) .order_by() .annotate(count=Func(F("id"), function="Count")) @@ -181,7 +172,9 @@ class UserWorkSpacesEndpoint(BaseAPIView): def get(self, request): member_count = ( WorkspaceMember.objects.filter( - workspace=OuterRef("id"), member__is_bot=False + workspace=OuterRef("id"), + member__is_bot=False, + is_active=True, ) .order_by() .annotate(count=Func(F("id"), function="Count")) @@ -227,23 +220,40 @@ class WorkSpaceAvailabilityCheckEndpoint(BaseAPIView): return Response({"status": not workspace}, status=status.HTTP_200_OK) -class InviteWorkspaceEndpoint(BaseAPIView): +class WorkspaceInvitationsViewset(BaseViewSet): + """Endpoint for creating, listing and deleting workspaces""" + + serializer_class = WorkSpaceMemberInviteSerializer + model = WorkspaceMemberInvite + permission_classes = [ WorkSpaceAdminPermission, ] - def post(self, request, slug): - emails = request.data.get("emails", False) + def get_queryset(self): + return self.filter_queryset( + super() + .get_queryset() + .filter(workspace__slug=self.kwargs.get("slug")) + .select_related("workspace", "workspace__owner", "created_by") + ) + + def create(self, request, slug): + emails = request.data.get("emails", []) # Check if email is provided - if not emails or not len(emails): + if not emails: return Response( {"error": "Emails are required"}, status=status.HTTP_400_BAD_REQUEST ) - # check for role level + # check for role level of the requesting user requesting_user = WorkspaceMember.objects.get( - workspace__slug=slug, member=request.user + workspace__slug=slug, + member=request.user, + is_active=True, ) + + # Check if any invited user has an higher role if len( [ email @@ -256,15 +266,17 @@ class InviteWorkspaceEndpoint(BaseAPIView): status=status.HTTP_400_BAD_REQUEST, ) + # Get the workspace object workspace = Workspace.objects.get(slug=slug) # Check if user is already a member of workspace workspace_members = WorkspaceMember.objects.filter( workspace_id=workspace.id, member__email__in=[email.get("email") for email in emails], + is_active=True, ).select_related("member", "workspace", "workspace__owner") - if len(workspace_members): + if workspace_members: return Response( { "error": "Some users are already member of workspace", @@ -302,35 +314,20 @@ class InviteWorkspaceEndpoint(BaseAPIView): }, status=status.HTTP_400_BAD_REQUEST, ) - WorkspaceMemberInvite.objects.bulk_create( + # Create workspace member invite + workspace_invitations = WorkspaceMemberInvite.objects.bulk_create( workspace_invitations, batch_size=10, ignore_conflicts=True ) - workspace_invitations = WorkspaceMemberInvite.objects.filter( - email__in=[email.get("email") for email in emails] - ).select_related("workspace") - - # create the user if signup is disabled - if settings.DOCKERIZED and not settings.ENABLE_SIGNUP: - _ = User.objects.bulk_create( - [ - User( - username=str(uuid4().hex), - email=invitation.email, - password=make_password(uuid4().hex), - is_password_autoset=True, - ) - for invitation in workspace_invitations - ], - batch_size=100, - ) + current_site = f"{request.scheme}://{request.get_host()}", + # Send invitations for invitation in workspace_invitations: workspace_invitation.delay( invitation.email, workspace.id, invitation.token, - settings.WEB_URL, + current_site, request.user.email, ) @@ -341,11 +338,19 @@ class InviteWorkspaceEndpoint(BaseAPIView): status=status.HTTP_200_OK, ) + def destroy(self, request, slug, pk): + workspace_member_invite = WorkspaceMemberInvite.objects.get( + pk=pk, workspace__slug=slug + ) + workspace_member_invite.delete() + return Response(status=status.HTTP_204_NO_CONTENT) -class JoinWorkspaceEndpoint(BaseAPIView): + +class WorkspaceJoinEndpoint(BaseAPIView): permission_classes = [ AllowAny, ] + """Invitation response endpoint the user can respond to the invitation""" def post(self, request, slug, pk): workspace_invite = WorkspaceMemberInvite.objects.get( @@ -354,12 +359,14 @@ class JoinWorkspaceEndpoint(BaseAPIView): email = request.data.get("email", "") + # Check the email if email == "" or workspace_invite.email != email: return Response( {"error": "You do not have permission to join the workspace"}, status=status.HTTP_403_FORBIDDEN, ) + # If already responded then return error if workspace_invite.responded_at is None: workspace_invite.accepted = request.data.get("accepted", False) workspace_invite.responded_at = timezone.now() @@ -371,12 +378,23 @@ class JoinWorkspaceEndpoint(BaseAPIView): # If the user is present then create the workspace member if user is not None: - WorkspaceMember.objects.create( - workspace=workspace_invite.workspace, - member=user, - role=workspace_invite.role, - ) + # Check if the user was already a member of workspace then activate the user + workspace_member = WorkspaceMember.objects.filter( + workspace=workspace_invite.workspace, member=user + ).first() + if workspace_member is not None: + workspace_member.is_active = True + workspace_member.role = workspace_invite.role + workspace_member.save() + else: + # Create a Workspace + _ = WorkspaceMember.objects.create( + workspace=workspace_invite.workspace, + member=user, + role=workspace_invite.role, + ) + # Set the user last_workspace_id to the accepted workspace user.last_workspace_id = workspace_invite.workspace.id user.save() @@ -388,6 +406,7 @@ class JoinWorkspaceEndpoint(BaseAPIView): status=status.HTTP_200_OK, ) + # Workspace invitation rejected return Response( {"message": "Workspace Invitation was not accepted"}, status=status.HTTP_200_OK, @@ -398,37 +417,13 @@ class JoinWorkspaceEndpoint(BaseAPIView): status=status.HTTP_400_BAD_REQUEST, ) - -class WorkspaceInvitationsViewset(BaseViewSet): - serializer_class = WorkSpaceMemberInviteSerializer - model = WorkspaceMemberInvite - - permission_classes = [ - WorkSpaceAdminPermission, - ] - - def get_queryset(self): - return self.filter_queryset( - super() - .get_queryset() - .filter(workspace__slug=self.kwargs.get("slug")) - .select_related("workspace", "workspace__owner", "created_by") - ) - - def destroy(self, request, slug, pk): - workspace_member_invite = WorkspaceMemberInvite.objects.get( - pk=pk, workspace__slug=slug - ) - # delete the user if signup is disabled - if settings.DOCKERIZED and not settings.ENABLE_SIGNUP: - user = User.objects.filter(email=workspace_member_invite.email).first() - if user is not None: - user.delete() - workspace_member_invite.delete() - return Response(status=status.HTTP_204_NO_CONTENT) + def get(self, request, slug, pk): + workspace_invitation = WorkspaceMemberInvite.objects.get(workspace__slug=slug, pk=pk) + serializer = WorkSpaceMemberInviteSerializer(workspace_invitation) + return Response(serializer.data, status=status.HTTP_200_OK) -class UserWorkspaceInvitationsEndpoint(BaseViewSet): +class UserWorkspaceInvitationsViewSet(BaseViewSet): serializer_class = WorkSpaceMemberInviteSerializer model = WorkspaceMemberInvite @@ -442,9 +437,19 @@ class UserWorkspaceInvitationsEndpoint(BaseViewSet): ) def create(self, request): - invitations = request.data.get("invitations") - workspace_invitations = WorkspaceMemberInvite.objects.filter(pk__in=invitations) + invitations = request.data.get("invitations", []) + workspace_invitations = WorkspaceMemberInvite.objects.filter( + pk__in=invitations, email=request.user.email + ).order_by("-created_at") + # If the user is already a member of workspace and was deactivated then activate the user + for invitation in workspace_invitations: + # Update the WorkspaceMember for this specific invitation + WorkspaceMember.objects.filter( + workspace_id=invitation.workspace_id, member=request.user + ).update(is_active=True, role=invitation.role) + + # Bulk create the user for all the workspaces WorkspaceMember.objects.bulk_create( [ WorkspaceMember( @@ -481,20 +486,24 @@ class WorkSpaceMemberViewSet(BaseViewSet): return self.filter_queryset( super() .get_queryset() - .filter(workspace__slug=self.kwargs.get("slug"), member__is_bot=False) + .filter( + workspace__slug=self.kwargs.get("slug"), + member__is_bot=False, + is_active=True, + ) .select_related("workspace", "workspace__owner") .select_related("member") ) def list(self, request, slug): workspace_member = WorkspaceMember.objects.get( - member=request.user, workspace__slug=slug + member=request.user, + workspace__slug=slug, + is_active=True, ) - workspace_members = WorkspaceMember.objects.filter( - workspace__slug=slug, - member__is_bot=False, - ).select_related("workspace", "member") + # Get all active workspace members + workspace_members = self.get_queryset() if workspace_member.role > 10: serializer = WorkspaceMemberAdminSerializer(workspace_members, many=True) @@ -506,7 +515,12 @@ class WorkSpaceMemberViewSet(BaseViewSet): return Response(serializer.data, status=status.HTTP_200_OK) def partial_update(self, request, slug, pk): - workspace_member = WorkspaceMember.objects.get(pk=pk, workspace__slug=slug) + workspace_member = WorkspaceMember.objects.get( + pk=pk, + workspace__slug=slug, + member__is_bot=False, + is_active=True, + ) if request.user.id == workspace_member.member_id: return Response( {"error": "You cannot update your own role"}, @@ -515,7 +529,9 @@ class WorkSpaceMemberViewSet(BaseViewSet): # Get the requested user role requested_workspace_member = WorkspaceMember.objects.get( - workspace__slug=slug, member=request.user + workspace__slug=slug, + member=request.user, + is_active=True, ) # Check if role is being updated # One cannot update role higher than his own role @@ -540,68 +556,121 @@ class WorkSpaceMemberViewSet(BaseViewSet): def destroy(self, request, slug, pk): # Check the user role who is deleting the user - workspace_member = WorkspaceMember.objects.get(workspace__slug=slug, pk=pk) + workspace_member = WorkspaceMember.objects.get( + workspace__slug=slug, + pk=pk, + member__is_bot=False, + is_active=True, + ) # check requesting user role requesting_workspace_member = WorkspaceMember.objects.get( - workspace__slug=slug, member=request.user + workspace__slug=slug, + member=request.user, + is_active=True, ) + + if str(workspace_member.id) == str(requesting_workspace_member.id): + return Response( + { + "error": "You cannot remove yourself from the workspace. Please use leave workspace" + }, + status=status.HTTP_400_BAD_REQUEST, + ) + if requesting_workspace_member.role < workspace_member.role: return Response( {"error": "You cannot remove a user having role higher than you"}, status=status.HTTP_400_BAD_REQUEST, ) - # Check for the only member in the workspace if ( - workspace_member.role == 20 - and WorkspaceMember.objects.filter( - workspace__slug=slug, - role=20, - member__is_bot=False, - ).count() - == 1 + Project.objects.annotate( + total_members=Count("project_projectmember"), + member_with_role=Count( + "project_projectmember", + filter=Q( + project_projectmember__member_id=request.user.id, + project_projectmember__role=20, + ), + ), + ) + .filter(total_members=1, member_with_role=1, workspace__slug=slug) + .exists() ): return Response( - {"error": "Cannot delete the only Admin for the workspace"}, + { + "error": "User is part of some projects where they are the only admin you should leave that project first" + }, status=status.HTTP_400_BAD_REQUEST, ) - # Delete the user also from all the projects - ProjectMember.objects.filter( - workspace__slug=slug, member=workspace_member.member - ).delete() - # Remove all favorites - ProjectFavorite.objects.filter( - workspace__slug=slug, user=workspace_member.member - ).delete() - CycleFavorite.objects.filter( - workspace__slug=slug, user=workspace_member.member - ).delete() - ModuleFavorite.objects.filter( - workspace__slug=slug, user=workspace_member.member - ).delete() - PageFavorite.objects.filter( - workspace__slug=slug, user=workspace_member.member - ).delete() - IssueViewFavorite.objects.filter( - workspace__slug=slug, user=workspace_member.member - ).delete() - # Also remove issue from issue assigned - IssueAssignee.objects.filter( - workspace__slug=slug, assignee=workspace_member.member - ).delete() + # Deactivate the users from the projects where the user is part of + _ = ProjectMember.objects.filter( + workspace__slug=slug, + member_id=workspace_member.member_id, + is_active=True, + ).update(is_active=False) - # Remove if module member - ModuleMember.objects.filter( - workspace__slug=slug, member=workspace_member.member - ).delete() - # Delete owned Pages - Page.objects.filter( - workspace__slug=slug, owned_by=workspace_member.member - ).delete() + workspace_member.is_active = False + workspace_member.save() + return Response(status=status.HTTP_204_NO_CONTENT) - workspace_member.delete() + def leave(self, request, slug): + workspace_member = WorkspaceMember.objects.get( + workspace__slug=slug, + member=request.user, + is_active=True, + ) + + # Check if the leaving user is the only admin of the workspace + if ( + workspace_member.role == 20 + and not WorkspaceMember.objects.filter( + workspace__slug=slug, + role=20, + is_active=True, + ).count() + > 1 + ): + return Response( + { + "error": "You cannot leave the workspace as your the only admin of the workspace you will have to either delete the workspace or create an another admin" + }, + status=status.HTTP_400_BAD_REQUEST, + ) + + if ( + Project.objects.annotate( + total_members=Count("project_projectmember"), + member_with_role=Count( + "project_projectmember", + filter=Q( + project_projectmember__member_id=request.user.id, + project_projectmember__role=20, + ), + ), + ) + .filter(total_members=1, member_with_role=1, workspace__slug=slug) + .exists() + ): + return Response( + { + "error": "User is part of some projects where they are the only admin you should leave that project first" + }, + status=status.HTTP_400_BAD_REQUEST, + ) + + # # Deactivate the users from the projects where the user is part of + _ = ProjectMember.objects.filter( + workspace__slug=slug, + member_id=workspace_member.member_id, + is_active=True, + ).update(is_active=False) + + # # Deactivate the user + workspace_member.is_active = False + workspace_member.save() return Response(status=status.HTTP_204_NO_CONTENT) @@ -629,7 +698,9 @@ class TeamMemberViewSet(BaseViewSet): def create(self, request, slug): members = list( WorkspaceMember.objects.filter( - workspace__slug=slug, member__id__in=request.data.get("members", []) + workspace__slug=slug, + member__id__in=request.data.get("members", []), + is_active=True, ) .annotate(member_str_id=Cast("member", output_field=CharField())) .distinct() @@ -658,23 +729,6 @@ class TeamMemberViewSet(BaseViewSet): return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) -class UserWorkspaceInvitationEndpoint(BaseViewSet): - model = WorkspaceMemberInvite - serializer_class = WorkSpaceMemberInviteSerializer - - permission_classes = [ - AllowAny, - ] - - def get_queryset(self): - return self.filter_queryset( - super() - .get_queryset() - .filter(pk=self.kwargs.get("pk")) - .select_related("workspace") - ) - - class UserLastProjectWithWorkspaceEndpoint(BaseAPIView): def get(self, request): user = User.objects.get(pk=request.user.id) @@ -711,7 +765,9 @@ class UserLastProjectWithWorkspaceEndpoint(BaseAPIView): class WorkspaceMemberUserEndpoint(BaseAPIView): def get(self, request, slug): workspace_member = WorkspaceMember.objects.get( - member=request.user, workspace__slug=slug + member=request.user, + workspace__slug=slug, + is_active=True, ) serializer = WorkspaceMemberMeSerializer(workspace_member) return Response(serializer.data, status=status.HTTP_200_OK) @@ -720,7 +776,9 @@ class WorkspaceMemberUserEndpoint(BaseAPIView): class WorkspaceMemberUserViewsEndpoint(BaseAPIView): def post(self, request, slug): workspace_member = WorkspaceMember.objects.get( - workspace__slug=slug, member=request.user + workspace__slug=slug, + member=request.user, + is_active=True, ) workspace_member.view_props = request.data.get("view_props", {}) workspace_member.save() @@ -1046,7 +1104,9 @@ class WorkspaceUserProfileEndpoint(BaseAPIView): user_data = User.objects.get(pk=user_id) requesting_workspace_member = WorkspaceMember.objects.get( - workspace__slug=slug, member=request.user + workspace__slug=slug, + member=request.user, + is_active=True, ) projects = [] if requesting_workspace_member.role >= 10: @@ -1250,9 +1310,7 @@ class WorkspaceUserProfileIssuesEndpoint(BaseAPIView): status=status.HTTP_200_OK, ) - return Response( - issues, status=status.HTTP_200_OK - ) + return Response(issues, status=status.HTTP_200_OK) class WorkspaceLabelsEndpoint(BaseAPIView): @@ -1266,30 +1324,3 @@ class WorkspaceLabelsEndpoint(BaseAPIView): project__project_projectmember__member=request.user, ).values("parent", "name", "color", "id", "project_id", "workspace__slug") return Response(labels, status=status.HTTP_200_OK) - - -class LeaveWorkspaceEndpoint(BaseAPIView): - permission_classes = [ - WorkspaceEntityPermission, - ] - - def delete(self, request, slug): - workspace_member = WorkspaceMember.objects.get( - workspace__slug=slug, member=request.user - ) - - # Only Admin case - if ( - workspace_member.role == 20 - and WorkspaceMember.objects.filter(workspace__slug=slug, role=20).count() - == 1 - ): - return Response( - { - "error": "You cannot leave the workspace since you are the only admin of the workspace you should delete the workspace" - }, - status=status.HTTP_400_BAD_REQUEST, - ) - # Delete the member from workspace - workspace_member.delete() - return Response(status=status.HTTP_204_NO_CONTENT) diff --git a/apiserver/plane/authentication/__init__.py b/apiserver/plane/authentication/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/apiserver/plane/authentication/api_authentication.py b/apiserver/plane/authentication/api_authentication.py new file mode 100644 index 000000000..ddabb4132 --- /dev/null +++ b/apiserver/plane/authentication/api_authentication.py @@ -0,0 +1,47 @@ +# Django imports +from django.utils import timezone +from django.db.models import Q + +# Third party imports +from rest_framework import authentication +from rest_framework.exceptions import AuthenticationFailed + +# Module imports +from plane.db.models import APIToken + + +class APIKeyAuthentication(authentication.BaseAuthentication): + """ + Authentication with an API Key + """ + + www_authenticate_realm = "api" + media_type = "application/json" + auth_header_name = "X-Api-Key" + + def get_api_token(self, request): + return request.headers.get(self.auth_header_name) + + def validate_api_token(self, token): + try: + api_token = APIToken.objects.get( + Q(Q(expired_at__gt=timezone.now()) | Q(expired_at__isnull=True)), + token=token, + is_active=True, + ) + except APIToken.DoesNotExist: + raise AuthenticationFailed("Given API token is not valid") + + # save api token last used + api_token.last_used = timezone.now() + api_token.save(update_fields=["last_used"]) + return (api_token.user, api_token.token) + + def authenticate(self, request): + token = self.get_api_token(request=request) + if not token: + return None + + # Validate the API token + user, token = self.validate_api_token(token) + return user, token diff --git a/apiserver/plane/authentication/apps.py b/apiserver/plane/authentication/apps.py new file mode 100644 index 000000000..de6100e0f --- /dev/null +++ b/apiserver/plane/authentication/apps.py @@ -0,0 +1,5 @@ +from django.apps import AppConfig + + +class ApiConfig(AppConfig): + name = "plane.authentication" diff --git a/apiserver/plane/bgtasks/importer_task.py b/apiserver/plane/bgtasks/importer_task.py index 14bece21b..f9e3df21e 100644 --- a/apiserver/plane/bgtasks/importer_task.py +++ b/apiserver/plane/bgtasks/importer_task.py @@ -73,6 +73,12 @@ def service_importer(service, importer_id): ] ) + # Check if any of the users are already member of workspace + _ = WorkspaceMember.objects.filter( + member__in=[user for user in workspace_users], + workspace_id=importer.workspace_id, + ).update(is_active=True) + # Add new users to Workspace and project automatically WorkspaceMember.objects.bulk_create( [ diff --git a/apiserver/plane/bgtasks/project_invitation_task.py b/apiserver/plane/bgtasks/project_invitation_task.py index a9adad02f..e6571d795 100644 --- a/apiserver/plane/bgtasks/project_invitation_task.py +++ b/apiserver/plane/bgtasks/project_invitation_task.py @@ -14,23 +14,24 @@ from plane.license.models import InstanceConfiguration from plane.license.utils.instance_value import get_configuration_value @shared_task -def project_invitation(email, project_id, token, current_site): +def project_invitation(email, project_id, token, current_site, invitor): try: + user = User.objects.get(email=invitor) project = Project.objects.get(pk=project_id) project_member_invite = ProjectMemberInvite.objects.get( token=token, email=email ) - relativelink = f"/project-member-invitation/{project_member_invite.id}" + relativelink = f"/project-invitations/?invitation_id={project_member_invite.id}&email={email}&slug={project.workspace.slug}&project_id={str(project_id)}" abs_url = current_site + relativelink from_email_string = settings.EMAIL_FROM - subject = f"{project.created_by.first_name or project.created_by.email} invited you to join {project.name} on Plane" + subject = f"{user.first_name or user.display_name or user.email} invited you to join {project.name} on Plane" context = { "email": email, - "first_name": project.created_by.first_name, + "first_name": user.first_name, "project_name": project.name, "invitation_url": abs_url, } diff --git a/apiserver/plane/bgtasks/webhook_task.py b/apiserver/plane/bgtasks/webhook_task.py new file mode 100644 index 000000000..57f94dc03 --- /dev/null +++ b/apiserver/plane/bgtasks/webhook_task.py @@ -0,0 +1,139 @@ +import requests +import uuid +import hashlib +import json + +# Django imports +from django.conf import settings + +# Third party imports +from celery import shared_task +from sentry_sdk import capture_exception + +from plane.db.models import Webhook, WebhookLog + + +@shared_task( + bind=True, + autoretry_for=(requests.RequestException,), + retry_backoff=600, + max_retries=5, + retry_jitter=True, +) +def webhook_task(self, webhook, slug, event, event_data, action): + try: + webhook = Webhook.objects.get(id=webhook, workspace__slug=slug) + + headers = { + "Content-Type": "application/json", + "User-Agent": "Autopilot", + "X-Plane-Delivery": str(uuid.uuid4()), + "X-Plane-Event": event, + } + + # Your secret key + if webhook.secret_key: + # Concatenate the data and the secret key + message = event_data + webhook.secret_key + + # Create a SHA-256 hash of the message + sha256 = hashlib.sha256() + sha256.update(message.encode("utf-8")) + signature = sha256.hexdigest() + headers["X-Plane-Signature"] = signature + + event_data = json.loads(event_data) if event_data is not None else None + + action = { + "POST": "create", + "PATCH": "update", + "PUT": "update", + "DELETE": "delete", + }.get(action, action) + + payload = { + "event": event, + "action": action, + "webhook_id": str(webhook.id), + "workspace_id": str(webhook.workspace_id), + "data": event_data, + } + + # Send the webhook event + response = requests.post( + webhook.url, + headers=headers, + json=payload, + timeout=30, + ) + + # Log the webhook request + WebhookLog.objects.create( + workspace_id=str(webhook.workspace_id), + webhook_id=str(webhook.id), + event_type=str(event), + request_method=str(action), + request_headers=str(headers), + request_body=str(payload), + response_status=str(response.status_code), + response_headers=str(response.headers), + response_body=str(response.text), + retry_count=str(self.request.retries), + ) + + except requests.RequestException as e: + # Log the failed webhook request + WebhookLog.objects.create( + workspace_id=str(webhook.workspace_id), + webhook_id=str(webhook.id), + event_type=str(event), + request_method=str(action), + request_headers=str(headers), + request_body=str(payload), + response_status=500, + response_headers="", + response_body=str(e), + retry_count=str(self.request.retries), + ) + + # Retry logic + if self.request.retries >= self.max_retries: + Webhook.objects.filter(pk=webhook.id).update(is_active=False) + return + raise requests.RequestException() + + except Exception as e: + if settings.DEBUG: + print(e) + capture_exception(e) + return + + +@shared_task() +def send_webhook(event, event_data, action, slug): + try: + webhooks = Webhook.objects.filter(workspace__slug=slug, is_active=True) + + if event == "project": + webhooks = webhooks.filter(project=True) + + if event == "issue": + webhooks = webhooks.filter(issue=True) + + if event == "module": + webhooks = webhooks.filter(module=True) + + if event == "cycle": + webhooks = webhooks.filter(cycle=True) + + if event == "issue-comment": + webhooks = webhooks.filter(issue_comment=True) + + for webhook in webhooks: + webhook_task.delay(webhook.id, slug, event, event_data, action) + + except Exception as e: + if settings.DEBUG: + print(e) + capture_exception(e) + return diff --git a/apiserver/plane/bgtasks/workspace_invitation_task.py b/apiserver/plane/bgtasks/workspace_invitation_task.py index d8c40a6a3..477c7f6fa 100644 --- a/apiserver/plane/bgtasks/workspace_invitation_task.py +++ b/apiserver/plane/bgtasks/workspace_invitation_task.py @@ -11,26 +11,32 @@ from slack_sdk import WebClient from slack_sdk.errors import SlackApiError # Module imports -from plane.db.models import Workspace, WorkspaceMemberInvite +from plane.db.models import Workspace, WorkspaceMemberInvite, User from plane.license.models import InstanceConfiguration from plane.license.utils.instance_value import get_configuration_value + @shared_task def workspace_invitation(email, workspace_id, token, current_site, invitor): try: + user = User.objects.get(email=invitor) + workspace = Workspace.objects.get(pk=workspace_id) workspace_member_invite = WorkspaceMemberInvite.objects.get( token=token, email=email ) - realtivelink = ( - f"/workspace-member-invitation/?invitation_id={workspace_member_invite.id}&email={email}" - ) - abs_url = current_site + realtivelink + # Relative link + relative_link = f"/workspace-invitations/?invitation_id={workspace_member_invite.id}&email={email}&slug={workspace.slug}" + # The complete url including the domain + abs_url = current_site + relative_link + + # The email from from_email_string = settings.EMAIL_FROM - subject = f"{invitor or email} invited you to join {workspace.name} on Plane" + # Subject of the email + subject = f"{user.first_name or user.display_name or user.email} invited you to join {workspace.name} on Plane" context = { "email": email, @@ -48,17 +54,33 @@ def workspace_invitation(email, workspace_id, token, current_site, invitor): workspace_member_invite.message = text_content workspace_member_invite.save() - instance_configuration = InstanceConfiguration.objects.filter(key__startswith='EMAIL_').values("key", "value") + instance_configuration = InstanceConfiguration.objects.filter( + key__startswith="EMAIL_" + ).values("key", "value") connection = get_connection( host=get_configuration_value(instance_configuration, "EMAIL_HOST"), - port=int(get_configuration_value(instance_configuration, "EMAIL_PORT", "587")), + port=int( + get_configuration_value(instance_configuration, "EMAIL_PORT", "587") + ), username=get_configuration_value(instance_configuration, "EMAIL_HOST_USER"), - password=get_configuration_value(instance_configuration, "EMAIL_HOST_PASSWORD"), - use_tls=bool(get_configuration_value(instance_configuration, "EMAIL_USE_TLS", "1")), - use_ssl=bool(get_configuration_value(instance_configuration, "EMAIL_USE_SSL", "0")), + password=get_configuration_value( + instance_configuration, "EMAIL_HOST_PASSWORD" + ), + use_tls=bool( + get_configuration_value(instance_configuration, "EMAIL_USE_TLS", "1") + ), + use_ssl=bool( + get_configuration_value(instance_configuration, "EMAIL_USE_SSL", "0") + ), ) # Initiate email alternatives - msg = EmailMultiAlternatives(subject=subject, body=text_content, from_email=get_configuration_value(instance_configuration, "EMAIL_FROM"), to=[email], connection=connection) + msg = EmailMultiAlternatives( + subject=subject, + body=text_content, + from_email=get_configuration_value(instance_configuration, "EMAIL_FROM"), + to=[email], + connection=connection, + ) msg.attach_alternative(html_content, "text/html") msg.send() diff --git a/apiserver/plane/db/migrations/0018_auto_20230130_0119.py b/apiserver/plane/db/migrations/0018_auto_20230130_0119.py index 500bc3b28..03eaeacd7 100644 --- a/apiserver/plane/db/migrations/0018_auto_20230130_0119.py +++ b/apiserver/plane/db/migrations/0018_auto_20230130_0119.py @@ -3,7 +3,7 @@ from django.conf import settings from django.db import migrations, models import django.db.models.deletion -import plane.db.models.api_token +import plane.db.models.api import uuid @@ -40,8 +40,8 @@ class Migration(migrations.Migration): ('created_at', models.DateTimeField(auto_now_add=True, verbose_name='Created At')), ('updated_at', models.DateTimeField(auto_now=True, verbose_name='Last Modified At')), ('id', models.UUIDField(db_index=True, default=uuid.uuid4, editable=False, primary_key=True, serialize=False, unique=True)), - ('token', models.CharField(default=plane.db.models.api_token.generate_token, max_length=255, unique=True)), - ('label', models.CharField(default=plane.db.models.api_token.generate_label_token, max_length=255)), + ('token', models.CharField(default=plane.db.models.api.generate_token, max_length=255, unique=True)), + ('label', models.CharField(default=plane.db.models.api.generate_label_token, max_length=255)), ('user_type', models.PositiveSmallIntegerField(choices=[(0, 'Human'), (1, 'Bot')], default=0)), ('created_by', models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='apitoken_created_by', to=settings.AUTH_USER_MODEL, verbose_name='Created By')), ('updated_by', models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='apitoken_updated_by', to=settings.AUTH_USER_MODEL, verbose_name='Last Modified By')), diff --git a/apiserver/plane/db/migrations/0047_auto_20231030_0833.py b/apiserver/plane/db/migrations/0047_auto_20231030_0833.py new file mode 100644 index 000000000..0005e683c --- /dev/null +++ b/apiserver/plane/db/migrations/0047_auto_20231030_0833.py @@ -0,0 +1,116 @@ +# Generated by Django 4.2.5 on 2023-10-20 12:16 + +from django.conf import settings +from django.db import migrations, models +import django.db.models.deletion +import plane.db.models.api +import plane.db.models.webhook +import uuid + + +class Migration(migrations.Migration): + + dependencies = [ + ('db', '0046_alter_analyticview_created_by_and_more'), + ] + + operations = [ + migrations.CreateModel( + name='Webhook', + fields=[ + ('created_at', models.DateTimeField(auto_now_add=True, verbose_name='Created At')), + ('updated_at', models.DateTimeField(auto_now=True, verbose_name='Last Modified At')), + ('id', models.UUIDField(db_index=True, default=uuid.uuid4, editable=False, primary_key=True, serialize=False, unique=True)), + ('url', models.URLField(validators=[plane.db.models.webhook.validate_schema, plane.db.models.webhook.validate_domain])), + ('is_active', models.BooleanField(default=True)), + ('secret_key', models.CharField(default=plane.db.models.webhook.generate_token, max_length=255)), + ('project', models.BooleanField(default=False)), + ('issue', models.BooleanField(default=False)), + ('module', models.BooleanField(default=False)), + ('cycle', models.BooleanField(default=False)), + ('issue_comment', models.BooleanField(default=False)), + ('created_by', models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='%(class)s_created_by', to=settings.AUTH_USER_MODEL, verbose_name='Created By')), + ('updated_by', models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='%(class)s_updated_by', to=settings.AUTH_USER_MODEL, verbose_name='Last Modified By')), + ('workspace', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='workspace_webhooks', to='db.workspace')), + ], + options={ + 'verbose_name': 'Webhook', + 'verbose_name_plural': 'Webhooks', + 'db_table': 'webhooks', + 'ordering': ('-created_at',), + 'unique_together': {('workspace', 'url')}, + }, + ), + migrations.AddField( + model_name='apitoken', + name='description', + field=models.TextField(blank=True), + ), + migrations.AddField( + model_name='apitoken', + name='expired_at', + field=models.DateTimeField(blank=True, null=True), + ), + migrations.AddField( + model_name='apitoken', + name='is_active', + field=models.BooleanField(default=True), + ), + migrations.AddField( + model_name='apitoken', + name='last_used', + field=models.DateTimeField(null=True), + ), + migrations.CreateModel( + name='WebhookLog', + fields=[ + ('created_at', models.DateTimeField(auto_now_add=True, verbose_name='Created At')), + ('updated_at', models.DateTimeField(auto_now=True, verbose_name='Last Modified At')), + ('id', models.UUIDField(db_index=True, default=uuid.uuid4, editable=False, primary_key=True, serialize=False, unique=True)), + ('event_type', models.CharField(blank=True, max_length=255, null=True)), + ('request_method', models.CharField(blank=True, max_length=10, null=True)), + ('request_headers', models.TextField(blank=True, null=True)), + ('request_body', models.TextField(blank=True, null=True)), + ('response_status', models.TextField(blank=True, null=True)), + ('response_headers', models.TextField(blank=True, null=True)), + ('response_body', models.TextField(blank=True, null=True)), + ('retry_count', models.PositiveSmallIntegerField(default=0)), + ('created_by', models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='%(class)s_created_by', to=settings.AUTH_USER_MODEL, verbose_name='Created By')), + ('updated_by', models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='%(class)s_updated_by', to=settings.AUTH_USER_MODEL, verbose_name='Last Modified By')), + ('webhook', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='logs', to='db.webhook')), + ('workspace', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='webhook_logs', to='db.workspace')), + ], + options={ + 'verbose_name': 'Webhook Log', + 'verbose_name_plural': 'Webhook Logs', + 'db_table': 'webhook_logs', + 'ordering': ('-created_at',), + }, + ), + migrations.CreateModel( + name='APIActivityLog', + fields=[ + ('created_at', models.DateTimeField(auto_now_add=True, verbose_name='Created At')), + ('updated_at', models.DateTimeField(auto_now=True, verbose_name='Last Modified At')), + ('id', models.UUIDField(db_index=True, default=uuid.uuid4, editable=False, primary_key=True, serialize=False, unique=True)), + ('token_identifier', models.CharField(max_length=255)), + ('path', models.CharField(max_length=255)), + ('method', models.CharField(max_length=10)), + ('query_params', models.TextField(blank=True, null=True)), + ('headers', models.TextField(blank=True, null=True)), + ('body', models.TextField(blank=True, null=True)), + ('response_code', models.PositiveIntegerField()), + ('response_body', models.TextField(blank=True, null=True)), + ('ip_address', models.GenericIPAddressField(blank=True, null=True)), + ('user_agent', models.CharField(blank=True, max_length=512, null=True)), + ('created_by', models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='%(class)s_created_by', to=settings.AUTH_USER_MODEL, verbose_name='Created By')), + ('updated_by', models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='%(class)s_updated_by', to=settings.AUTH_USER_MODEL, verbose_name='Last Modified By')), + ], + options={ + 'verbose_name': 'API Activity Log', + 'verbose_name_plural': 'API Activity Logs', + 'db_table': 'api_activity_logs', + 'ordering': ('-created_at',), + }, + ), + ] diff --git a/apiserver/plane/db/migrations/0047_issuemention_label_sort_order_and_more.py b/apiserver/plane/db/migrations/0047_issuemention_label_sort_order_and_more.py new file mode 100644 index 000000000..de7d6ed0f --- /dev/null +++ b/apiserver/plane/db/migrations/0047_issuemention_label_sort_order_and_more.py @@ -0,0 +1,33 @@ +# Generated by Django 4.2.5 on 2023-11-13 15:07 + +from django.conf import settings +from django.db import migrations, models +import django.db.models.deletion +import uuid +import random + + +def random_sort_ordering(apps, schema_editor): + Label = apps.get_model("db", "Label") + + bulk_labels = [] + for label in Label.objects.all(): + label.sort_order = random.randint(0,65535) + bulk_labels.append(label) + + Label.objects.bulk_update(bulk_labels, ["sort_order"], batch_size=1000) + + +class Migration(migrations.Migration): + dependencies = [ + ("db", "0046_alter_analyticview_created_by_and_more"), + ] + + operations = [ + migrations.AddField( + model_name="label", + name="sort_order", + field=models.FloatField(default=65535), + ), + migrations.RunPython(random_sort_ordering) + ] diff --git a/apiserver/plane/db/migrations/0047_issuemention_projectmember_is_deactivated_and_more.py b/apiserver/plane/db/migrations/0047_issuemention_projectmember_is_deactivated_and_more.py new file mode 100644 index 000000000..922bb16a7 --- /dev/null +++ b/apiserver/plane/db/migrations/0047_issuemention_projectmember_is_deactivated_and_more.py @@ -0,0 +1,26 @@ +# Generated by Django 4.2.5 on 2023-11-09 11:19 + +from django.conf import settings +from django.db import migrations, models +import django.db.models.deletion +import uuid + + +class Migration(migrations.Migration): + + dependencies = [ + ('db', '0046_alter_analyticview_created_by_and_more'), + ] + + operations = [ + migrations.AddField( + model_name='projectmember', + name='is_deactivated', + field=models.BooleanField(default=False), + ), + migrations.AddField( + model_name='workspacemember', + name='is_deactivated', + field=models.BooleanField(default=False), + ), + ] diff --git a/apiserver/plane/db/migrations/0048_issuemention_remove_projectmember_is_deactivated_and_more.py b/apiserver/plane/db/migrations/0048_issuemention_remove_projectmember_is_deactivated_and_more.py new file mode 100644 index 000000000..4ac133ada --- /dev/null +++ b/apiserver/plane/db/migrations/0048_issuemention_remove_projectmember_is_deactivated_and_more.py @@ -0,0 +1,26 @@ +# Generated by Django 4.2.5 on 2023-11-10 09:41 + +from django.conf import settings +from django.db import migrations, models +import django.db.models.deletion +import uuid + + +class Migration(migrations.Migration): + + dependencies = [ + ('db', '0047_issuemention_projectmember_is_deactivated_and_more'), + ] + + operations = [ + migrations.AddField( + model_name='projectmember', + name='is_active', + field=models.BooleanField(default=True), + ), + migrations.AddField( + model_name='workspacemember', + name='is_active', + field=models.BooleanField(default=True), + ), + ] diff --git a/apiserver/plane/db/migrations/0049_issuemention_remove_projectmember_is_deactivated_and_more.py b/apiserver/plane/db/migrations/0049_issuemention_remove_projectmember_is_deactivated_and_more.py new file mode 100644 index 000000000..060b970dc --- /dev/null +++ b/apiserver/plane/db/migrations/0049_issuemention_remove_projectmember_is_deactivated_and_more.py @@ -0,0 +1,24 @@ +# Generated by Django 4.2.5 on 2023-11-11 17:57 + +from django.conf import settings +from django.db import migrations, models +import django.db.models.deletion +import uuid + + +class Migration(migrations.Migration): + + dependencies = [ + ('db', '0048_issuemention_remove_projectmember_is_deactivated_and_more'), + ] + + operations = [ + migrations.RemoveField( + model_name='projectmember', + name='is_deactivated', + ), + migrations.RemoveField( + model_name='workspacemember', + name='is_deactivated', + ), + ] diff --git a/apiserver/plane/db/models/__init__.py b/apiserver/plane/db/models/__init__.py index d8286f8f8..37ac6dfb5 100644 --- a/apiserver/plane/db/models/__init__.py +++ b/apiserver/plane/db/models/__init__.py @@ -54,7 +54,7 @@ from .view import GlobalView, IssueView, IssueViewFavorite from .module import Module, ModuleMember, ModuleIssue, ModuleLink, ModuleFavorite -from .api_token import APIToken +from .api import APIToken, APIActivityLog from .integration import ( WorkspaceIntegration, @@ -79,3 +79,5 @@ from .analytic import AnalyticView from .notification import Notification from .exporter import ExporterHistory + +from .webhook import Webhook, WebhookLog diff --git a/apiserver/plane/db/models/api.py b/apiserver/plane/db/models/api.py new file mode 100644 index 000000000..0fa1d4aba --- /dev/null +++ b/apiserver/plane/db/models/api.py @@ -0,0 +1,80 @@ +# Python imports +from uuid import uuid4 + +# Django imports +from django.db import models +from django.conf import settings + +from .base import BaseModel + + +def generate_label_token(): + return uuid4().hex + + +def generate_token(): + return "plane_api_" + uuid4().hex + + +class APIToken(BaseModel): + # Meta information + label = models.CharField(max_length=255, default=generate_label_token) + description = models.TextField(blank=True) + is_active = models.BooleanField(default=True) + last_used = models.DateTimeField(null=True) + + # Token + token = models.CharField( + max_length=255, unique=True, default=generate_token, db_index=True + ) + + # User Information + user = models.ForeignKey( + settings.AUTH_USER_MODEL, + on_delete=models.CASCADE, + related_name="bot_tokens", + ) + user_type = models.PositiveSmallIntegerField( + choices=((0, "Human"), (1, "Bot")), default=0 + ) + workspace = models.ForeignKey( + "db.Workspace", related_name="api_tokens", on_delete=models.CASCADE, null=True + ) + expired_at = models.DateTimeField(blank=True, null=True) + + class Meta: + verbose_name = "API Token" + verbose_name_plural = "API Tokems" + db_table = "api_tokens" + ordering = ("-created_at",) + + def __str__(self): + return str(self.user.id) + + +class APIActivityLog(BaseModel): + token_identifier = models.CharField(max_length=255) + + # Request Info + path = models.CharField(max_length=255) + method = models.CharField(max_length=10) + query_params = models.TextField(null=True, blank=True) + headers = models.TextField(null=True, blank=True) + body = models.TextField(null=True, blank=True) + + # Response info + response_code = models.PositiveIntegerField() + response_body = models.TextField(null=True, blank=True) + + # Meta information + ip_address = models.GenericIPAddressField(null=True, blank=True) + user_agent = models.CharField(max_length=512, null=True, blank=True) + + class Meta: + verbose_name = "API Activity Log" + verbose_name_plural = "API Activity Logs" + db_table = "api_activity_logs" + ordering = ("-created_at",) + + def __str__(self): + return str(self.token_identifier) diff --git a/apiserver/plane/db/models/api_token.py b/apiserver/plane/db/models/api_token.py deleted file mode 100644 index b4009e6eb..000000000 --- a/apiserver/plane/db/models/api_token.py +++ /dev/null @@ -1,41 +0,0 @@ -# Python imports -from uuid import uuid4 - -# Django imports -from django.db import models -from django.conf import settings - -from .base import BaseModel - - -def generate_label_token(): - return uuid4().hex - - -def generate_token(): - return uuid4().hex + uuid4().hex - - -class APIToken(BaseModel): - token = models.CharField(max_length=255, unique=True, default=generate_token) - label = models.CharField(max_length=255, default=generate_label_token) - user = models.ForeignKey( - settings.AUTH_USER_MODEL, - on_delete=models.CASCADE, - related_name="bot_tokens", - ) - user_type = models.PositiveSmallIntegerField( - choices=((0, "Human"), (1, "Bot")), default=0 - ) - workspace = models.ForeignKey( - "db.Workspace", related_name="api_tokens", on_delete=models.CASCADE, null=True - ) - - class Meta: - verbose_name = "API Token" - verbose_name_plural = "API Tokems" - db_table = "api_tokens" - ordering = ("-created_at",) - - def __str__(self): - return str(self.user.name) diff --git a/apiserver/plane/db/models/issue.py b/apiserver/plane/db/models/issue.py index 0c227a158..a951e5c11 100644 --- a/apiserver/plane/db/models/issue.py +++ b/apiserver/plane/db/models/issue.py @@ -431,6 +431,7 @@ class Label(ProjectBaseModel): name = models.CharField(max_length=255) description = models.TextField(blank=True) color = models.CharField(max_length=255, blank=True) + sort_order = models.FloatField(default=65535) class Meta: unique_together = ["name", "project"] @@ -439,6 +440,18 @@ class Label(ProjectBaseModel): db_table = "labels" ordering = ("-created_at",) + def save(self, *args, **kwargs): + if self._state.adding: + # Get the maximum sequence value from the database + last_id = Label.objects.filter(project=self.project).aggregate( + largest=models.Max("sort_order") + )["largest"] + # if last_id is not None + if last_id is not None: + self.sort_order = last_id + 10000 + + super(Label, self).save(*args, **kwargs) + def __str__(self): return str(self.name) diff --git a/apiserver/plane/db/models/project.py b/apiserver/plane/db/models/project.py index f4ace65e5..fe72c260b 100644 --- a/apiserver/plane/db/models/project.py +++ b/apiserver/plane/db/models/project.py @@ -166,6 +166,7 @@ class ProjectMember(ProjectBaseModel): default_props = models.JSONField(default=get_default_props) preferences = models.JSONField(default=get_default_preferences) sort_order = models.FloatField(default=65535) + is_active = models.BooleanField(default=True) def save(self, *args, **kwargs): if self._state.adding: diff --git a/apiserver/plane/db/models/webhook.py b/apiserver/plane/db/models/webhook.py new file mode 100644 index 000000000..6698ec5b0 --- /dev/null +++ b/apiserver/plane/db/models/webhook.py @@ -0,0 +1,90 @@ +# Python imports +from uuid import uuid4 +from urllib.parse import urlparse + +# Django imports +from django.db import models +from django.core.exceptions import ValidationError + +# Module imports +from plane.db.models import BaseModel + + +def generate_token(): + return "plane_wh_" + uuid4().hex + + +def validate_schema(value): + parsed_url = urlparse(value) + print(parsed_url) + if parsed_url.scheme not in ["http", "https"]: + raise ValidationError("Invalid schema. Only HTTP and HTTPS are allowed.") + + +def validate_domain(value): + parsed_url = urlparse(value) + domain = parsed_url.netloc + if domain in ["localhost", "127.0.0.1"]: + raise ValidationError("Local URLs are not allowed.") + + +class Webhook(BaseModel): + workspace = models.ForeignKey( + "db.Workspace", + on_delete=models.CASCADE, + related_name="workspace_webhooks", + ) + url = models.URLField( + validators=[ + validate_schema, + validate_domain, + ] + ) + is_active = models.BooleanField(default=True) + secret_key = models.CharField(max_length=255, default=generate_token) + project = models.BooleanField(default=False) + issue = models.BooleanField(default=False) + module = models.BooleanField(default=False) + cycle = models.BooleanField(default=False) + issue_comment = models.BooleanField(default=False) + + def __str__(self): + return f"{self.workspace.slug} {self.url}" + + class Meta: + unique_together = ["workspace", "url"] + verbose_name = "Webhook" + verbose_name_plural = "Webhooks" + db_table = "webhooks" + ordering = ("-created_at",) + + +class WebhookLog(BaseModel): + workspace = models.ForeignKey( + "db.Workspace", on_delete=models.CASCADE, related_name="webhook_logs" + ) + # Associated webhook + webhook = models.ForeignKey(Webhook, on_delete=models.CASCADE, related_name="logs") + + # Basic request details + event_type = models.CharField(max_length=255, blank=True, null=True) + request_method = models.CharField(max_length=10, blank=True, null=True) + request_headers = models.TextField(blank=True, null=True) + request_body = models.TextField(blank=True, null=True) + + # Response details + response_status = models.TextField(blank=True, null=True) + response_headers = models.TextField(blank=True, null=True) + response_body = models.TextField(blank=True, null=True) + + # Retry Count + retry_count = models.PositiveSmallIntegerField(default=0) + + class Meta: + verbose_name = "Webhook Log" + verbose_name_plural = "Webhook Logs" + db_table = "webhook_logs" + ordering = ("-created_at",) + + def __str__(self): + return f"{self.event_type} {str(self.webhook.url)}" diff --git a/apiserver/plane/db/models/workspace.py b/apiserver/plane/db/models/workspace.py index d1012f549..3b694062b 100644 --- a/apiserver/plane/db/models/workspace.py +++ b/apiserver/plane/db/models/workspace.py @@ -99,6 +99,7 @@ class WorkspaceMember(BaseModel): view_props = models.JSONField(default=get_default_props) default_props = models.JSONField(default=get_default_props) issue_props = models.JSONField(default=get_issue_props) + is_active = models.BooleanField(default=True) class Meta: unique_together = ["workspace", "member"] diff --git a/apiserver/plane/middleware/api_log_middleware.py b/apiserver/plane/middleware/api_log_middleware.py new file mode 100644 index 000000000..a1894fad5 --- /dev/null +++ b/apiserver/plane/middleware/api_log_middleware.py @@ -0,0 +1,40 @@ +from plane.db.models import APIToken, APIActivityLog + + +class APITokenLogMiddleware: + def __init__(self, get_response): + self.get_response = get_response + + def __call__(self, request): + request_body = request.body + response = self.get_response(request) + self.process_request(request, response, request_body) + return response + + def process_request(self, request, response, request_body): + api_key_header = "X-Api-Key" + api_key = request.headers.get(api_key_header) + # If the API key is present, log the request + if api_key: + try: + APIActivityLog.objects.create( + token_identifier=api_key, + path=request.path, + method=request.method, + query_params=request.META.get("QUERY_STRING", ""), + headers=str(request.headers), + body=(request_body.decode('utf-8') if request_body else None), + response_body=( + response.content.decode("utf-8") if response.content else None + ), + response_code=response.status_code, + ip_address=request.META.get("REMOTE_ADDR", None), + user_agent=request.META.get("HTTP_USER_AGENT", None), + ) + + except Exception as e: + print(e) + # If the token does not exist, you can decide whether to log this as an invalid attempt + pass + + return None diff --git a/apiserver/plane/proxy/__init__.py b/apiserver/plane/proxy/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/apiserver/plane/proxy/apps.py b/apiserver/plane/proxy/apps.py new file mode 100644 index 000000000..e5a5a80ef --- /dev/null +++ b/apiserver/plane/proxy/apps.py @@ -0,0 +1,5 @@ +from django.apps import AppConfig + + +class ProxyConfig(AppConfig): + name = "plane.proxy" diff --git a/apiserver/plane/proxy/rate_limit.py b/apiserver/plane/proxy/rate_limit.py new file mode 100644 index 000000000..16fce639d --- /dev/null +++ b/apiserver/plane/proxy/rate_limit.py @@ -0,0 +1,45 @@ +from django.utils import timezone +from rest_framework.throttling import SimpleRateThrottle + + +class ApiKeyRateThrottle(SimpleRateThrottle): + scope = 'api_key' + + def get_cache_key(self, request, view): + # Retrieve the API key from the request header + api_key = request.headers.get('X-Api-Key') + if not api_key: + return None # Allow the request if there's no API key + + # Use the API key as part of the cache key + return f'{self.scope}:{api_key}' + + def allow_request(self, request, view): + # Calculate the current time as a Unix timestamp + now = timezone.now().timestamp() + + # Use the parent class's method to check if the request is allowed + allowed = super().allow_request(request, view) + + if allowed: + # Calculate the remaining limit and reset time + history = self.cache.get(self.key, []) + + # Remove old histories + while history and history[-1] <= now - self.duration: + history.pop() + + # Calculate the requests + num_requests = len(history) + + # Check available requests + available = self.num_requests - num_requests + + # Unix timestamp for when the rate limit will reset + reset_time = int(now + self.duration) + + # Add headers + request.META['X-RateLimit-Remaining'] = max(0, available) + request.META['X-RateLimit-Reset'] = reset_time + + return allowed diff --git a/apiserver/plane/proxy/urls/__init__.py b/apiserver/plane/proxy/urls/__init__.py new file mode 100644 index 000000000..2ba6385d5 --- /dev/null +++ b/apiserver/plane/proxy/urls/__init__.py @@ -0,0 +1,13 @@ +from .cycle import urlpatterns as cycle_patterns +from .inbox import urlpatterns as inbox_patterns +from .issue import urlpatterns as issue_patterns +from .module import urlpatterns as module_patterns +from .project import urlpatterns as project_patterns + +urlpatterns = [ + *cycle_patterns, + *inbox_patterns, + *issue_patterns, + *module_patterns, + *project_patterns, +] diff --git a/apiserver/plane/proxy/urls/cycle.py b/apiserver/plane/proxy/urls/cycle.py new file mode 100644 index 000000000..e4f7cfe78 --- /dev/null +++ b/apiserver/plane/proxy/urls/cycle.py @@ -0,0 +1,35 @@ +from django.urls import path + +from plane.proxy.views.cycle import ( + CycleAPIEndpoint, + CycleIssueAPIEndpoint, + TransferCycleIssueAPIEndpoint, +) + +urlpatterns = [ + path( + "workspaces//projects//cycles/", + CycleAPIEndpoint.as_view(), + name="cycles", + ), + path( + "workspaces//projects//cycles//", + CycleAPIEndpoint.as_view(), + name="cycles", + ), + path( + "workspaces//projects//cycles//cycle-issues/", + CycleIssueAPIEndpoint.as_view(), + name="cycle-issues", + ), + path( + "workspaces//projects//cycles//cycle-issues//", + CycleIssueAPIEndpoint.as_view(), + name="cycle-issues", + ), + path( + "workspaces//projects//cycles//transfer-issues/", + TransferCycleIssueAPIEndpoint.as_view(), + name="transfer-issues", + ), +] diff --git a/apiserver/plane/proxy/urls/inbox.py b/apiserver/plane/proxy/urls/inbox.py new file mode 100644 index 000000000..39a630ee8 --- /dev/null +++ b/apiserver/plane/proxy/urls/inbox.py @@ -0,0 +1,17 @@ +from django.urls import path + +from plane.proxy.views import InboxIssueAPIEndpoint + + +urlpatterns = [ + path( + "workspaces//projects//inboxes//inbox-issues/", + InboxIssueAPIEndpoint.as_view(), + name="inbox-issue", + ), + path( + "workspaces//projects//inboxes//inbox-issues//", + InboxIssueAPIEndpoint.as_view(), + name="inbox-issue", + ), +] diff --git a/apiserver/plane/proxy/urls/issue.py b/apiserver/plane/proxy/urls/issue.py new file mode 100644 index 000000000..0fb236521 --- /dev/null +++ b/apiserver/plane/proxy/urls/issue.py @@ -0,0 +1,51 @@ +from django.urls import path + +from plane.proxy.views import ( + IssueAPIEndpoint, + LabelAPIEndpoint, + IssueLinkAPIEndpoint, + IssueCommentAPIEndpoint, +) + +urlpatterns = [ + path( + "workspaces//projects//issues/", + IssueAPIEndpoint.as_view(), + name="issues", + ), + path( + "workspaces//projects//issues//", + IssueAPIEndpoint.as_view(), + name="issues", + ), + path( + "workspaces//projects//issue-labels/", + LabelAPIEndpoint.as_view(), + name="labels", + ), + path( + "workspaces//projects//issue-labels//", + LabelAPIEndpoint.as_view(), + name="labels", + ), + path( + "workspaces//projects//issues//issue-links/", + IssueLinkAPIEndpoint.as_view(), + name="issue-links", + ), + path( + "workspaces//projects//issues//issue-links//", + IssueLinkAPIEndpoint.as_view(), + name="issue-links", + ), + path( + "workspaces//projects//issues//comments/", + IssueCommentAPIEndpoint.as_view(), + name="project-issue-comment", + ), + path( + "workspaces//projects//issues//comments//", + IssueCommentAPIEndpoint.as_view(), + name="project-issue-comment", + ), +] diff --git a/apiserver/plane/proxy/urls/module.py b/apiserver/plane/proxy/urls/module.py new file mode 100644 index 000000000..289c8596b --- /dev/null +++ b/apiserver/plane/proxy/urls/module.py @@ -0,0 +1,26 @@ +from django.urls import path + +from plane.proxy.views import ModuleAPIEndpoint, ModuleIssueAPIEndpoint + +urlpatterns = [ + path( + "workspaces//projects//modules/", + ModuleAPIEndpoint.as_view(), + name="modules", + ), + path( + "workspaces//projects//modules//", + ModuleAPIEndpoint.as_view(), + name="modules", + ), + path( + "workspaces//projects//modules//module-issues/", + ModuleIssueAPIEndpoint.as_view(), + name="module-issues", + ), + path( + "workspaces//projects//modules//module-issues//", + ModuleIssueAPIEndpoint.as_view(), + name="module-issues", + ), +] diff --git a/apiserver/plane/proxy/urls/project.py b/apiserver/plane/proxy/urls/project.py new file mode 100644 index 000000000..c97625197 --- /dev/null +++ b/apiserver/plane/proxy/urls/project.py @@ -0,0 +1,16 @@ +from django.urls import path + +from plane.proxy.views import ProjectAPIEndpoint + +urlpatterns = [ + path( + "workspaces//projects/", + ProjectAPIEndpoint.as_view(), + name="project", + ), + path( + "workspaces//projects//", + ProjectAPIEndpoint.as_view(), + name="project", + ), +] diff --git a/apiserver/plane/proxy/views/__init__.py b/apiserver/plane/proxy/views/__init__.py new file mode 100644 index 000000000..fcbd5182b --- /dev/null +++ b/apiserver/plane/proxy/views/__init__.py @@ -0,0 +1,18 @@ +from .project import ProjectAPIEndpoint + +from .issue import ( + IssueAPIEndpoint, + LabelAPIEndpoint, + IssueLinkAPIEndpoint, + IssueCommentAPIEndpoint, +) + +from .cycle import ( + CycleAPIEndpoint, + CycleIssueAPIEndpoint, + TransferCycleIssueAPIEndpoint, +) + +from .module import ModuleAPIEndpoint, ModuleIssueAPIEndpoint + +from .inbox import InboxIssueAPIEndpoint \ No newline at end of file diff --git a/apiserver/plane/proxy/views/base.py b/apiserver/plane/proxy/views/base.py new file mode 100644 index 000000000..d5dc9fc4c --- /dev/null +++ b/apiserver/plane/proxy/views/base.py @@ -0,0 +1,101 @@ +# Python imports +import re +import json +import requests + +# Django imports +from django.conf import settings + +# Third party imports +from rest_framework.views import APIView +from rest_framework.response import Response +from rest_framework.permissions import IsAuthenticated +from rest_framework_simplejwt.tokens import RefreshToken + +# Module imports +from plane.authentication.api_authentication import APIKeyAuthentication +from plane.proxy.rate_limit import ApiKeyRateThrottle + + +class BaseAPIView(APIView): + authentication_classes = [ + APIKeyAuthentication, + ] + + permission_classes = [ + IsAuthenticated, + ] + + throttle_classes = [ + ApiKeyRateThrottle, + ] + + def _get_jwt_token(self, request): + refresh = RefreshToken.for_user(request.user) + return str(refresh.access_token) + + def _get_url_path(self, request): + match = re.search(r"/v1/(.*)", request.path) + return match.group(1) if match else "" + + def _get_headers(self, request): + return { + "Authorization": f"Bearer {self._get_jwt_token(request=request)}", + "Content-Type": request.headers.get("Content-Type", "application/json"), + } + + def _get_url(self, request): + path = self._get_url_path(request=request) + url = request.build_absolute_uri("/api/" + path) + return url + + def _get_query_params(self, request): + query_params = request.GET + return query_params + + def _get_payload(self, request): + content_type = request.headers.get("Content-Type", "application/json") + if content_type.startswith("multipart/form-data"): + files_dict = {k: v[0] for k, v in request.FILES.lists()} + return (None, files_dict) + else: + return (json.dumps(request.data), None) + + def _make_request(self, request, method="GET"): + data_payload, files_payload = self._get_payload(request=request) + response = requests.request( + method=method, + url=self._get_url(request=request), + headers=self._get_headers(request=request), + params=self._get_query_params(request=request), + data=data_payload, + files=files_payload, + ) + return response.json(), response.status_code + + def finalize_response(self, request, response, *args, **kwargs): + # Call super to get the default response + response = super().finalize_response(request, response, *args, **kwargs) + + # Add custom headers if they exist in the request META + ratelimit_remaining = request.META.get('X-RateLimit-Remaining') + if ratelimit_remaining is not None: + response['X-RateLimit-Remaining'] = ratelimit_remaining + + ratelimit_reset = request.META.get('X-RateLimit-Reset') + if ratelimit_reset is not None: + response['X-RateLimit-Reset'] = ratelimit_reset + + return response + + def get(self, request, *args, **kwargs): + response, status_code = self._make_request(request=request, method="GET") + return Response(response, status=status_code) + + def post(self, request, *args, **kwargs): + response, status_code = self._make_request(request=request, method="POST") + return Response(response, status=status_code) + + def partial_update(self, request, *args, **kwargs): + response, status_code = self._make_request(request=request, method="PATCH") + return Response(response, status=status_code) diff --git a/apiserver/plane/proxy/views/cycle.py b/apiserver/plane/proxy/views/cycle.py new file mode 100644 index 000000000..2407693af --- /dev/null +++ b/apiserver/plane/proxy/views/cycle.py @@ -0,0 +1,30 @@ +from .base import BaseAPIView + + +class CycleAPIEndpoint(BaseAPIView): + """ + This viewset automatically provides `list`, `create`, `retrieve`, + `update` and `destroy` actions related to cycle. + + """ + + pass + + +class CycleIssueAPIEndpoint(BaseAPIView): + """ + This viewset automatically provides `list`, `create`, `retrieve`, + `update` and `destroy` actions related to cycle issues. + + """ + + pass + + +class TransferCycleIssueAPIEndpoint(BaseAPIView): + """ + This viewset provides `create` actions for transfering the issues into a particular cycle. + + """ + + pass diff --git a/apiserver/plane/proxy/views/inbox.py b/apiserver/plane/proxy/views/inbox.py new file mode 100644 index 000000000..7e79f4c0b --- /dev/null +++ b/apiserver/plane/proxy/views/inbox.py @@ -0,0 +1,10 @@ +from .base import BaseAPIView + + +class InboxIssueAPIEndpoint(BaseAPIView): + """ + This viewset automatically provides `list`, `create`, `retrieve`, + `update` and `destroy` actions related to inbox issues. + + """ + pass \ No newline at end of file diff --git a/apiserver/plane/proxy/views/issue.py b/apiserver/plane/proxy/views/issue.py new file mode 100644 index 000000000..56dc71a3a --- /dev/null +++ b/apiserver/plane/proxy/views/issue.py @@ -0,0 +1,37 @@ +from .base import BaseAPIView + + +class IssueAPIEndpoint(BaseAPIView): + """ + This viewset automatically provides `list`, `create`, `retrieve`, + `update` and `destroy` actions related to issue. + + """ + pass + + +class LabelAPIEndpoint(BaseAPIView): + """ + This viewset automatically provides `list`, `create`, `retrieve`, + `update` and `destroy` actions related to the labels. + + """ + pass + + +class IssueLinkAPIEndpoint(BaseAPIView): + """ + This viewset automatically provides `list`, `create`, `retrieve`, + `update` and `destroy` actions related to the links of the particular issue. + + """ + pass + + +class IssueCommentAPIEndpoint(BaseAPIView): + """ + This viewset automatically provides `list`, `create`, `retrieve`, + `update` and `destroy` actions related to comments of the particular issue. + + """ + pass \ No newline at end of file diff --git a/apiserver/plane/proxy/views/module.py b/apiserver/plane/proxy/views/module.py new file mode 100644 index 000000000..3726d4af5 --- /dev/null +++ b/apiserver/plane/proxy/views/module.py @@ -0,0 +1,20 @@ +from .base import BaseAPIView + + +class ModuleAPIEndpoint(BaseAPIView): + """ + This viewset automatically provides `list`, `create`, `retrieve`, + `update` and `destroy` actions related to module. + + """ + + pass + + +class ModuleIssueAPIEndpoint(BaseAPIView): + """ + This viewset automatically provides `list`, `create`, `retrieve`, + `update` and `destroy` actions related to module issues. + + """ + pass diff --git a/apiserver/plane/proxy/views/project.py b/apiserver/plane/proxy/views/project.py new file mode 100644 index 000000000..6eb43d941 --- /dev/null +++ b/apiserver/plane/proxy/views/project.py @@ -0,0 +1,5 @@ +from .base import BaseAPIView + + +class ProjectAPIEndpoint(BaseAPIView): + pass \ No newline at end of file diff --git a/apiserver/plane/settings/common.py b/apiserver/plane/settings/common.py index 110550f75..b14e38515 100644 --- a/apiserver/plane/settings/common.py +++ b/apiserver/plane/settings/common.py @@ -1,22 +1,43 @@ +"""Global Settings""" +# Python imports import os -import datetime +import ssl +import certifi from datetime import timedelta +from urllib.parse import urlparse + +# Django imports from django.core.management.utils import get_random_secret_key +# Third party imports +import dj_database_url +import sentry_sdk +from sentry_sdk.integrations.django import DjangoIntegration +from sentry_sdk.integrations.redis import RedisIntegration +from sentry_sdk.integrations.celery import CeleryIntegration BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) - +# Secret Key SECRET_KEY = os.environ.get("SECRET_KEY", get_random_secret_key()) # SECURITY WARNING: don't run with debug turned on in production! -DEBUG = True +DEBUG = False -ALLOWED_HOSTS = [] +# Allowed Hosts +ALLOWED_HOSTS = ["*"] +# To access webhook +ENABLE_WEBHOOK = os.environ.get("ENABLE_WEBHOOK", "1") == "1" + +# To access plane api through api tokens +ENABLE_API = os.environ.get("ENABLE_API", "1") == "1" + +# Redirect if / is not present +APPEND_SLASH = True + # Application definition - INSTALLED_APPS = [ "django.contrib.auth", "django.contrib.contenttypes", @@ -30,6 +51,7 @@ INSTALLED_APPS = [ "plane.web", "plane.middleware", "plane.license", + "plane.proxy", # Third-party things "rest_framework", "rest_framework.authtoken", @@ -37,12 +59,13 @@ INSTALLED_APPS = [ "corsheaders", "taggit", "django_celery_beat", + "storages", ] +# Middlewares MIDDLEWARE = [ "corsheaders.middleware.CorsMiddleware", "django.middleware.security.SecurityMiddleware", - # "whitenoise.middleware.WhiteNoiseMiddleware", "django.contrib.sessions.middleware.SessionMiddleware", "django.middleware.common.CommonMiddleware", "django.middleware.csrf.CsrfViewMiddleware", @@ -50,8 +73,10 @@ MIDDLEWARE = [ "django.middleware.clickjacking.XFrameOptionsMiddleware", "crum.CurrentRequestUserMiddleware", "django.middleware.gzip.GZipMiddleware", - ] + "plane.middleware.api_log_middleware.APITokenLogMiddleware", +] +# Rest Framework settings REST_FRAMEWORK = { "DEFAULT_AUTHENTICATION_CLASSES": ( "rest_framework_simplejwt.authentication.JWTAuthentication", @@ -59,15 +84,19 @@ REST_FRAMEWORK = { "DEFAULT_PERMISSION_CLASSES": ("rest_framework.permissions.IsAuthenticated",), "DEFAULT_RENDERER_CLASSES": ("rest_framework.renderers.JSONRenderer",), "DEFAULT_FILTER_BACKENDS": ("django_filters.rest_framework.DjangoFilterBackend",), + "DEFAULT_THROTTLE_CLASSES": ("plane.proxy.rate_limit.ApiKeyRateThrottle",), + "DEFAULT_THROTTLE_RATES": { + "api_key": "60/minute", + }, } -AUTHENTICATION_BACKENDS = ( - "django.contrib.auth.backends.ModelBackend", # default - # "guardian.backends.ObjectPermissionBackend", -) +# Django Auth Backend +AUTHENTICATION_BACKENDS = ("django.contrib.auth.backends.ModelBackend",) # default +# Root Urls ROOT_URLCONF = "plane.urls" +# Templates TEMPLATES = [ { "BACKEND": "django.template.backends.django.DjangoTemplates", @@ -86,52 +115,68 @@ TEMPLATES = [ }, ] +# Cookie Settings +SESSION_COOKIE_SECURE = True +CSRF_COOKIE_SECURE = True -JWT_AUTH = { - "JWT_ENCODE_HANDLER": "rest_framework_jwt.utils.jwt_encode_handler", - "JWT_DECODE_HANDLER": "rest_framework_jwt.utils.jwt_decode_handler", - "JWT_PAYLOAD_HANDLER": "rest_framework_jwt.utils.jwt_payload_handler", - "JWT_PAYLOAD_GET_USER_ID_HANDLER": "rest_framework_jwt.utils.jwt_get_user_id_from_payload_handler", - "JWT_RESPONSE_PAYLOAD_HANDLER": "rest_framework_jwt.utils.jwt_response_payload_handler", - "JWT_SECRET_KEY": SECRET_KEY, - "JWT_GET_USER_SECRET_KEY": None, - "JWT_PUBLIC_KEY": None, - "JWT_PRIVATE_KEY": None, - "JWT_ALGORITHM": "HS256", - "JWT_VERIFY": True, - "JWT_VERIFY_EXPIRATION": True, - "JWT_LEEWAY": 0, - "JWT_EXPIRATION_DELTA": datetime.timedelta(seconds=604800), - "JWT_AUDIENCE": None, - "JWT_ISSUER": None, - "JWT_ALLOW_REFRESH": False, - "JWT_REFRESH_EXPIRATION_DELTA": datetime.timedelta(days=7), - "JWT_AUTH_HEADER_PREFIX": "JWT", - "JWT_AUTH_COOKIE": None, -} +# CORS Settings +CORS_ALLOW_CREDENTIALS = True +CORS_ALLOWED_ORIGINS = os.environ.get("CORS_ALLOWED_ORIGINS", "").split(",") +# Application Settings WSGI_APPLICATION = "plane.wsgi.application" ASGI_APPLICATION = "plane.asgi.application" # Django Sites - SITE_ID = 1 # User Model AUTH_USER_MODEL = "db.User" # Database - -DATABASES = { - "default": { - "ENGINE": "django.db.backends.sqlite3", - "NAME": os.path.join(BASE_DIR, "db.sqlite3"), +if bool(os.environ.get("DATABASE_URL")): + # Parse database configuration from $DATABASE_URL + DATABASES = { + "default": dj_database_url.config(), + } +else: + DATABASES = { + "default": { + "ENGINE": "django.db.backends.postgresql", + "NAME": os.environ.get("POSTGRES_DB"), + "USER": os.environ.get("POSTGRES_USER"), + "PASSWORD": os.environ.get("POSTGRES_PASSWORD"), + "HOST": os.environ.get("POSTGRES_HOST"), + } } -} +# Redis Config +REDIS_URL = os.environ.get("REDIS_URL") +REDIS_SSL = "rediss" in REDIS_URL -# Password validation +if REDIS_SSL: + CACHES = { + "default": { + "BACKEND": "django_redis.cache.RedisCache", + "LOCATION": REDIS_URL, + "OPTIONS": { + "CLIENT_CLASS": "django_redis.client.DefaultClient", + "CONNECTION_POOL_KWARGS": {"ssl_cert_reqs": False}, + }, + } + } +else: + CACHES = { + "default": { + "BACKEND": "django_redis.cache.RedisCache", + "LOCATION": REDIS_URL, + "OPTIONS": { + "CLIENT_CLASS": "django_redis.client.DefaultClient", + }, + } + } +# Password validations AUTH_PASSWORD_VALIDATORS = [ { "NAME": "django.contrib.auth.password_validation.UserAttributeSimilarityValidator", @@ -148,7 +193,6 @@ AUTH_PASSWORD_VALIDATORS = [ ] # Static files (CSS, JavaScript, Images) - STATIC_URL = "/static/" STATIC_ROOT = os.path.join(BASE_DIR, "static-assets", "collected-static") STATICFILES_DIRS = (os.path.join(BASE_DIR, "static"),) @@ -157,24 +201,44 @@ STATICFILES_DIRS = (os.path.join(BASE_DIR, "static"),) MEDIA_ROOT = "mediafiles" MEDIA_URL = "/media/" - # Internationalization - LANGUAGE_CODE = "en-us" - -TIME_ZONE = "UTC" - USE_I18N = True - USE_L10N = True +# Timezones USE_TZ = True +TIME_ZONE = "UTC" +# Default Auto Field DEFAULT_AUTO_FIELD = "django.db.models.BigAutoField" +# Email settings EMAIL_BACKEND = "django.core.mail.backends.smtp.EmailBackend" +# Storage Settings +STORAGES = { + "staticfiles": { + "BACKEND": "whitenoise.storage.CompressedManifestStaticFilesStorage", + }, +} +STORAGES["default"] = {"BACKEND": "storages.backends.s3boto3.S3Boto3Storage"} +AWS_ACCESS_KEY_ID = os.environ.get("AWS_ACCESS_KEY_ID", "access-key") +AWS_SECRET_ACCESS_KEY = os.environ.get("AWS_SECRET_ACCESS_KEY", "secret-key") +AWS_STORAGE_BUCKET_NAME = os.environ.get("AWS_S3_BUCKET_NAME", "uploads") +AWS_DEFAULT_ACL = "public-read" +AWS_QUERYSTRING_AUTH = False +AWS_S3_FILE_OVERWRITE = False +AWS_S3_ENDPOINT_URL = os.environ.get("AWS_S3_ENDPOINT_URL", None) or os.environ.get( + "MINIO_ENDPOINT_URL", None +) +if AWS_S3_ENDPOINT_URL: + parsed_url = urlparse(os.environ.get("WEB_URL", "http://localhost")) + AWS_S3_CUSTOM_DOMAIN = f"{parsed_url.netloc}/{AWS_STORAGE_BUCKET_NAME}" + AWS_S3_URL_PROTOCOL = f"{parsed_url.scheme}:" + +# JWT Auth Configuration SIMPLE_JWT = { "ACCESS_TOKEN_LIFETIME": timedelta(minutes=10080), "REFRESH_TOKEN_LIFETIME": timedelta(days=43200), @@ -202,7 +266,70 @@ SIMPLE_JWT = { "SLIDING_TOKEN_REFRESH_LIFETIME": timedelta(days=1), } + +# Celery Configuration CELERY_TIMEZONE = TIME_ZONE -CELERY_TASK_SERIALIZER = 'json' -CELERY_ACCEPT_CONTENT = ['application/json'] -CELERY_IMPORTS = ("plane.bgtasks.issue_automation_task","plane.bgtasks.exporter_expired_task") +CELERY_TASK_SERIALIZER = "json" +CELERY_ACCEPT_CONTENT = ["application/json"] + +if REDIS_SSL: + redis_url = os.environ.get("REDIS_URL") + broker_url = ( + f"{redis_url}?ssl_cert_reqs={ssl.CERT_NONE.name}&ssl_ca_certs={certifi.where()}" + ) + CELERY_BROKER_URL = broker_url + CELERY_RESULT_BACKEND = broker_url +else: + CELERY_BROKER_URL = REDIS_URL + CELERY_RESULT_BACKEND = REDIS_URL + +CELERY_IMPORTS = ( + "plane.bgtasks.issue_automation_task", + "plane.bgtasks.exporter_expired_task", +) + +# Sentry Settings +# Enable Sentry Settings +if bool(os.environ.get("SENTRY_DSN", False)): + sentry_sdk.init( + dsn=os.environ.get("SENTRY_DSN", ""), + integrations=[ + DjangoIntegration(), + RedisIntegration(), + CeleryIntegration(monitor_beat_tasks=True), + ], + traces_sample_rate=1, + send_default_pii=True, + environment=os.environ.get("ENVIRONMENT", "development"), + profiles_sample_rate=1.0, + ) + + +# Application Envs +PROXY_BASE_URL = os.environ.get("PROXY_BASE_URL", False) # For External +SLACK_BOT_TOKEN = os.environ.get("SLACK_BOT_TOKEN", False) +FILE_SIZE_LIMIT = int(os.environ.get("FILE_SIZE_LIMIT", 5242880)) +ENABLE_SIGNUP = os.environ.get("ENABLE_SIGNUP", "1") == "1" + +# Unsplash Access key +UNSPLASH_ACCESS_KEY = os.environ.get("UNSPLASH_ACCESS_KEY") +# Github Access Token +GITHUB_ACCESS_TOKEN = os.environ.get("GITHUB_ACCESS_TOKEN", False) + +# Analytics +ANALYTICS_SECRET_KEY = os.environ.get("ANALYTICS_SECRET_KEY", False) +ANALYTICS_BASE_API = os.environ.get("ANALYTICS_BASE_API", False) + +# Open AI Settings +OPENAI_API_BASE = os.environ.get("OPENAI_API_BASE", "https://api.openai.com/v1") +OPENAI_API_KEY = os.environ.get("OPENAI_API_KEY", False) +GPT_ENGINE = os.environ.get("GPT_ENGINE", "gpt-3.5-turbo") + +# Scout Settings +SCOUT_MONITOR = os.environ.get("SCOUT_MONITOR", False) +SCOUT_KEY = os.environ.get("SCOUT_KEY", "") +SCOUT_NAME = "Plane" + +# Set the variable true if running in docker environment +DOCKERIZED = int(os.environ.get("DOCKERIZED", 1)) == 1 +USE_MINIO = int(os.environ.get("USE_MINIO", 0)) == 1 diff --git a/apiserver/plane/settings/local.py b/apiserver/plane/settings/local.py index 76586b0fe..9fa5ed0aa 100644 --- a/apiserver/plane/settings/local.py +++ b/apiserver/plane/settings/local.py @@ -1,123 +1,39 @@ -"""Development settings and globals.""" - -from __future__ import absolute_import - -import dj_database_url -import sentry_sdk -from sentry_sdk.integrations.django import DjangoIntegration -from sentry_sdk.integrations.redis import RedisIntegration - - +"""Development settings""" from .common import * # noqa -DEBUG = int(os.environ.get("DEBUG", 1)) == 1 +DEBUG = True ALLOWED_HOSTS = [ "*", ] +# Debug Toolbar settings +INSTALLED_APPS += ("debug_toolbar",) +MIDDLEWARE += ("debug_toolbar.middleware.DebugToolbarMiddleware",) + +DEBUG_TOOLBAR_PATCH_SETTINGS = False + +# Only show emails in console don't send it to smtp EMAIL_BACKEND = "django.core.mail.backends.console.EmailBackend" - -DATABASES = { - "default": { - "ENGINE": "django.db.backends.postgresql", - "NAME": os.environ.get("PGUSER", "plane"), - "USER": "", - "PASSWORD": "", - "HOST": os.environ.get("PGHOST", "localhost"), - } -} - -DOCKERIZED = int(os.environ.get("DOCKERIZED", 0)) == 1 - -USE_MINIO = int(os.environ.get("USE_MINIO", 0)) == 1 - -FILE_SIZE_LIMIT = int(os.environ.get("FILE_SIZE_LIMIT", 5242880)) - -if DOCKERIZED: - DATABASES["default"] = dj_database_url.config() - CACHES = { "default": { "BACKEND": "django.core.cache.backends.locmem.LocMemCache", } } -INSTALLED_APPS += ("debug_toolbar",) - -MIDDLEWARE += ("debug_toolbar.middleware.DebugToolbarMiddleware",) - -DEBUG_TOOLBAR_PATCH_SETTINGS = False - INTERNAL_IPS = ("127.0.0.1",) CORS_ORIGIN_ALLOW_ALL = True -if os.environ.get("SENTRY_DSN", False): - sentry_sdk.init( - dsn=os.environ.get("SENTRY_DSN"), - integrations=[DjangoIntegration(), RedisIntegration()], - # If you wish to associate users to errors (assuming you are using - # django.contrib.auth) you may enable sending PII data. - send_default_pii=True, - environment="local", - traces_sample_rate=0.7, - profiles_sample_rate=1.0, - ) -else: - LOGGING = { - "version": 1, - "disable_existing_loggers": False, - "handlers": { - "console": { - "class": "logging.StreamHandler", - }, - }, - "root": { - "handlers": ["console"], - "level": "DEBUG", - }, - "loggers": { - "*": { - "handlers": ["console"], - "level": "DEBUG", - "propagate": True, - }, - }, - } - -REDIS_HOST = "localhost" -REDIS_PORT = 6379 -REDIS_URL = os.environ.get("REDIS_URL") - - MEDIA_URL = "/uploads/" MEDIA_ROOT = os.path.join(BASE_DIR, "uploads") -if DOCKERIZED: - REDIS_URL = os.environ.get("REDIS_URL") - -WEB_URL = os.environ.get("WEB_URL", "http://localhost:3000") -PROXY_BASE_URL = os.environ.get("PROXY_BASE_URL", False) - -ANALYTICS_SECRET_KEY = os.environ.get("ANALYTICS_SECRET_KEY", False) -ANALYTICS_BASE_API = os.environ.get("ANALYTICS_BASE_API", False) - -OPENAI_API_BASE = os.environ.get("OPENAI_API_BASE", "https://api.openai.com/v1") -OPENAI_API_KEY = os.environ.get("OPENAI_API_KEY", False) -GPT_ENGINE = os.environ.get("GPT_ENGINE", "gpt-3.5-turbo") - -SLACK_BOT_TOKEN = os.environ.get("SLACK_BOT_TOKEN", False) - -LOGGER_BASE_URL = os.environ.get("LOGGER_BASE_URL", False) - -CELERY_RESULT_BACKEND = os.environ.get("REDIS_URL") -CELERY_BROKER_URL = os.environ.get("REDIS_URL") - -GITHUB_ACCESS_TOKEN = os.environ.get("GITHUB_ACCESS_TOKEN", False) - -ENABLE_SIGNUP = os.environ.get("ENABLE_SIGNUP", "1") == "1" - -# Unsplash Access key -UNSPLASH_ACCESS_KEY = os.environ.get("UNSPLASH_ACCESS_KEY") +# For local settings +CORS_ALLOW_ALL_ORIGINS = True +CORS_ALLOWED_ORIGINS = [ + "http://localhost:3000", + "http://127.0.0.1:3000", + "http://localhost:4000", + "http://127.0.0.1:4000", +] diff --git a/apiserver/plane/settings/production.py b/apiserver/plane/settings/production.py index 541a0cfd4..b230bbc32 100644 --- a/apiserver/plane/settings/production.py +++ b/apiserver/plane/settings/production.py @@ -1,282 +1,13 @@ -"""Production settings and globals.""" -import ssl -import certifi - -import dj_database_url - -import sentry_sdk -from sentry_sdk.integrations.django import DjangoIntegration -from sentry_sdk.integrations.redis import RedisIntegration -from urllib.parse import urlparse - +"""Production settings""" from .common import * # noqa -# Database +# SECURITY WARNING: don't run with debug turned on in production! DEBUG = int(os.environ.get("DEBUG", 0)) == 1 -if bool(os.environ.get("DATABASE_URL")): - # Parse database configuration from $DATABASE_URL - DATABASES["default"] = dj_database_url.config() -else: - DATABASES = { - "default": { - "ENGINE": "django.db.backends.postgresql", - "NAME": os.environ.get("POSTGRES_DB"), - "USER": os.environ.get("POSTGRES_USER"), - "PASSWORD": os.environ.get("POSTGRES_PASSWORD"), - "HOST": os.environ.get("POSTGRES_HOST"), - } - } - - -SITE_ID = 1 - -# Set the variable true if running in docker environment -DOCKERIZED = int(os.environ.get("DOCKERIZED", 0)) == 1 - -USE_MINIO = int(os.environ.get("USE_MINIO", 0)) == 1 - -FILE_SIZE_LIMIT = int(os.environ.get("FILE_SIZE_LIMIT", 5242880)) - -# Enable Connection Pooling (if desired) -# DATABASES['default']['ENGINE'] = 'django_postgrespool' - # Honor the 'X-Forwarded-Proto' header for request.is_secure() SECURE_PROXY_SSL_HEADER = ("HTTP_X_FORWARDED_PROTO", "https") - -# TODO: Make it FALSE and LIST DOMAINS IN FULL PROD. -CORS_ALLOW_ALL_ORIGINS = True - - -CORS_ALLOW_METHODS = [ - "DELETE", - "GET", - "OPTIONS", - "PATCH", - "POST", - "PUT", -] - -CORS_ALLOW_HEADERS = [ - "accept", - "accept-encoding", - "authorization", - "content-type", - "dnt", - "origin", - "user-agent", - "x-csrftoken", - "x-requested-with", -] - -CORS_ALLOW_CREDENTIALS = True - INSTALLED_APPS += ("scout_apm.django",) -STORAGES = { - "staticfiles": { - "BACKEND": "whitenoise.storage.CompressedManifestStaticFilesStorage", - }, -} - -if bool(os.environ.get("SENTRY_DSN", False)): - sentry_sdk.init( - dsn=os.environ.get("SENTRY_DSN", ""), - integrations=[DjangoIntegration(), RedisIntegration()], - # If you wish to associate users to errors (assuming you are using - # django.contrib.auth) you may enable sending PII data. - traces_sample_rate=1, - send_default_pii=True, - environment="production", - profiles_sample_rate=1.0, - ) - -if DOCKERIZED and USE_MINIO: - INSTALLED_APPS += ("storages",) - STORAGES["default"] = {"BACKEND": "storages.backends.s3boto3.S3Boto3Storage"} - # The AWS access key to use. - AWS_ACCESS_KEY_ID = os.environ.get("AWS_ACCESS_KEY_ID", "access-key") - # The AWS secret access key to use. - AWS_SECRET_ACCESS_KEY = os.environ.get("AWS_SECRET_ACCESS_KEY", "secret-key") - # The name of the bucket to store files in. - AWS_STORAGE_BUCKET_NAME = os.environ.get("AWS_S3_BUCKET_NAME", "uploads") - # The full URL to the S3 endpoint. Leave blank to use the default region URL. - AWS_S3_ENDPOINT_URL = os.environ.get( - "AWS_S3_ENDPOINT_URL", "http://plane-minio:9000" - ) - # Default permissions - AWS_DEFAULT_ACL = "public-read" - AWS_QUERYSTRING_AUTH = False - AWS_S3_FILE_OVERWRITE = False - - # Custom Domain settings - parsed_url = urlparse(os.environ.get("WEB_URL", "http://localhost")) - AWS_S3_CUSTOM_DOMAIN = f"{parsed_url.netloc}/{AWS_STORAGE_BUCKET_NAME}" - AWS_S3_URL_PROTOCOL = f"{parsed_url.scheme}:" -else: - # The AWS region to connect to. - AWS_REGION = os.environ.get("AWS_REGION", "") - - # The AWS access key to use. - AWS_ACCESS_KEY_ID = os.environ.get("AWS_ACCESS_KEY_ID", "") - - # The AWS secret access key to use. - AWS_SECRET_ACCESS_KEY = os.environ.get("AWS_SECRET_ACCESS_KEY", "") - - # The optional AWS session token to use. - # AWS_SESSION_TOKEN = "" - - # The name of the bucket to store files in. - AWS_S3_BUCKET_NAME = os.environ.get("AWS_S3_BUCKET_NAME") - - # How to construct S3 URLs ("auto", "path", "virtual"). - AWS_S3_ADDRESSING_STYLE = "auto" - - # The full URL to the S3 endpoint. Leave blank to use the default region URL. - AWS_S3_ENDPOINT_URL = os.environ.get("AWS_S3_ENDPOINT_URL", "") - - # A prefix to be applied to every stored file. This will be joined to every filename using the "/" separator. - AWS_S3_KEY_PREFIX = "" - - # Whether to enable authentication for stored files. If True, then generated URLs will include an authentication - # token valid for `AWS_S3_MAX_AGE_SECONDS`. If False, then generated URLs will not include an authentication token, - # and their permissions will be set to "public-read". - AWS_S3_BUCKET_AUTH = False - - # How long generated URLs are valid for. This affects the expiry of authentication tokens if `AWS_S3_BUCKET_AUTH` - # is True. It also affects the "Cache-Control" header of the files. - # Important: Changing this setting will not affect existing files. - AWS_S3_MAX_AGE_SECONDS = 60 * 60 # 1 hours. - - # A URL prefix to be used for generated URLs. This is useful if your bucket is served through a CDN. This setting - # cannot be used with `AWS_S3_BUCKET_AUTH`. - AWS_S3_PUBLIC_URL = "" - - # If True, then files will be stored with reduced redundancy. Check the S3 documentation and make sure you - # understand the consequences before enabling. - # Important: Changing this setting will not affect existing files. - AWS_S3_REDUCED_REDUNDANCY = False - - # The Content-Disposition header used when the file is downloaded. This can be a string, or a function taking a - # single `name` argument. - # Important: Changing this setting will not affect existing files. - AWS_S3_CONTENT_DISPOSITION = "" - - # The Content-Language header used when the file is downloaded. This can be a string, or a function taking a - # single `name` argument. - # Important: Changing this setting will not affect existing files. - AWS_S3_CONTENT_LANGUAGE = "" - - # A mapping of custom metadata for each file. Each value can be a string, or a function taking a - # single `name` argument. - # Important: Changing this setting will not affect existing files. - AWS_S3_METADATA = {} - - # If True, then files will be stored using AES256 server-side encryption. - # If this is a string value (e.g., "aws:kms"), that encryption type will be used. - # Otherwise, server-side encryption is not be enabled. - # Important: Changing this setting will not affect existing files. - AWS_S3_ENCRYPT_KEY = False - - # The AWS S3 KMS encryption key ID (the `SSEKMSKeyId` parameter) is set from this string if present. - # This is only relevant if AWS S3 KMS server-side encryption is enabled (above). - # AWS_S3_KMS_ENCRYPTION_KEY_ID = "" - - # If True, then text files will be stored using gzip content encoding. Files will only be gzipped if their - # compressed size is smaller than their uncompressed size. - # Important: Changing this setting will not affect existing files. - AWS_S3_GZIP = True - - # The signature version to use for S3 requests. - AWS_S3_SIGNATURE_VERSION = None - - # If True, then files with the same name will overwrite each other. By default it's set to False to have - # extra characters appended. - AWS_S3_FILE_OVERWRITE = False - - STORAGES["default"] = { - "BACKEND": "django_s3_storage.storage.S3Storage", - } -# AWS Settings End - -# Enable Connection Pooling (if desired) -# DATABASES['default']['ENGINE'] = 'django_postgrespool' - # Honor the 'X-Forwarded-Proto' header for request.is_secure() SECURE_PROXY_SSL_HEADER = ("HTTP_X_FORWARDED_PROTO", "https") - -# Allow all host headers -ALLOWED_HOSTS = [ - "*", -] - - -SESSION_COOKIE_SECURE = True -CSRF_COOKIE_SECURE = True - - -REDIS_URL = os.environ.get("REDIS_URL") - -if DOCKERIZED: - CACHES = { - "default": { - "BACKEND": "django_redis.cache.RedisCache", - "LOCATION": REDIS_URL, - "OPTIONS": { - "CLIENT_CLASS": "django_redis.client.DefaultClient", - }, - } - } -else: - CACHES = { - "default": { - "BACKEND": "django_redis.cache.RedisCache", - "LOCATION": REDIS_URL, - "OPTIONS": { - "CLIENT_CLASS": "django_redis.client.DefaultClient", - "CONNECTION_POOL_KWARGS": {"ssl_cert_reqs": False}, - }, - } - } - - -WEB_URL = os.environ.get("WEB_URL", "https://app.plane.so") - -PROXY_BASE_URL = os.environ.get("PROXY_BASE_URL", False) - -ANALYTICS_SECRET_KEY = os.environ.get("ANALYTICS_SECRET_KEY", False) -ANALYTICS_BASE_API = os.environ.get("ANALYTICS_BASE_API", False) - -OPENAI_API_BASE = os.environ.get("OPENAI_API_BASE", "https://api.openai.com/v1") -OPENAI_API_KEY = os.environ.get("OPENAI_API_KEY", False) -GPT_ENGINE = os.environ.get("GPT_ENGINE", "gpt-3.5-turbo") - -SLACK_BOT_TOKEN = os.environ.get("SLACK_BOT_TOKEN", False) - -LOGGER_BASE_URL = os.environ.get("LOGGER_BASE_URL", False) - -redis_url = os.environ.get("REDIS_URL") -broker_url = ( - f"{redis_url}?ssl_cert_reqs={ssl.CERT_NONE.name}&ssl_ca_certs={certifi.where()}" -) - -if DOCKERIZED: - CELERY_BROKER_URL = REDIS_URL - CELERY_RESULT_BACKEND = REDIS_URL -else: - CELERY_BROKER_URL = broker_url - CELERY_RESULT_BACKEND = broker_url - -GITHUB_ACCESS_TOKEN = os.environ.get("GITHUB_ACCESS_TOKEN", False) - -# Enable or Disable signups -ENABLE_SIGNUP = os.environ.get("ENABLE_SIGNUP", "1") == "1" - -# Scout Settings -SCOUT_MONITOR = os.environ.get("SCOUT_MONITOR", False) -SCOUT_KEY = os.environ.get("SCOUT_KEY", "") -SCOUT_NAME = "Plane" - -# Unsplash Access key -UNSPLASH_ACCESS_KEY = os.environ.get("UNSPLASH_ACCESS_KEY") diff --git a/apiserver/plane/settings/selfhosted.py b/apiserver/plane/settings/selfhosted.py deleted file mode 100644 index ee529a7c3..000000000 --- a/apiserver/plane/settings/selfhosted.py +++ /dev/null @@ -1,129 +0,0 @@ -"""Self hosted settings and globals.""" -from urllib.parse import urlparse - -import dj_database_url -from urllib.parse import urlparse - - -from .common import * # noqa - -# Database -DEBUG = int(os.environ.get("DEBUG", 0)) == 1 - -# Docker configurations -DOCKERIZED = 1 -USE_MINIO = 1 - -DATABASES = { - "default": { - "ENGINE": "django.db.backends.postgresql", - "NAME": "plane", - "USER": os.environ.get("PGUSER", ""), - "PASSWORD": os.environ.get("PGPASSWORD", ""), - "HOST": os.environ.get("PGHOST", ""), - } -} - -# Parse database configuration from $DATABASE_URL -DATABASES["default"] = dj_database_url.config() -SITE_ID = 1 - -# File size limit -FILE_SIZE_LIMIT = int(os.environ.get("FILE_SIZE_LIMIT", 5242880)) - -CORS_ALLOW_METHODS = [ - "DELETE", - "GET", - "OPTIONS", - "PATCH", - "POST", - "PUT", -] - -CORS_ALLOW_HEADERS = [ - "accept", - "accept-encoding", - "authorization", - "content-type", - "dnt", - "origin", - "user-agent", - "x-csrftoken", - "x-requested-with", -] - -CORS_ALLOW_CREDENTIALS = True -CORS_ALLOW_ALL_ORIGINS = True - -STORAGES = { - "staticfiles": { - "BACKEND": "whitenoise.storage.CompressedManifestStaticFilesStorage", - }, -} - -INSTALLED_APPS += ("storages",) -STORAGES["default"] = {"BACKEND": "storages.backends.s3boto3.S3Boto3Storage"} -# The AWS access key to use. -AWS_ACCESS_KEY_ID = os.environ.get("AWS_ACCESS_KEY_ID", "access-key") -# The AWS secret access key to use. -AWS_SECRET_ACCESS_KEY = os.environ.get("AWS_SECRET_ACCESS_KEY", "secret-key") -# The name of the bucket to store files in. -AWS_STORAGE_BUCKET_NAME = os.environ.get("AWS_S3_BUCKET_NAME", "uploads") -# The full URL to the S3 endpoint. Leave blank to use the default region URL. -AWS_S3_ENDPOINT_URL = os.environ.get( - "AWS_S3_ENDPOINT_URL", "http://plane-minio:9000" -) -# Default permissions -AWS_DEFAULT_ACL = "public-read" -AWS_QUERYSTRING_AUTH = False -AWS_S3_FILE_OVERWRITE = False - -# Custom Domain settings -parsed_url = urlparse(os.environ.get("WEB_URL", "http://localhost")) -AWS_S3_CUSTOM_DOMAIN = f"{parsed_url.netloc}/{AWS_STORAGE_BUCKET_NAME}" -AWS_S3_URL_PROTOCOL = f"{parsed_url.scheme}:" - -# Honor the 'X-Forwarded-Proto' header for request.is_secure() -SECURE_PROXY_SSL_HEADER = ("HTTP_X_FORWARDED_PROTO", "https") - -# Allow all host headers -ALLOWED_HOSTS = [ - "*", -] - -# Security settings -SESSION_COOKIE_SECURE = True -CSRF_COOKIE_SECURE = True - -# Redis URL -REDIS_URL = os.environ.get("REDIS_URL") - -# Caches -CACHES = { - "default": { - "BACKEND": "django_redis.cache.RedisCache", - "LOCATION": REDIS_URL, - "OPTIONS": { - "CLIENT_CLASS": "django_redis.client.DefaultClient", - }, - } -} - -# URL used for email redirects -WEB_URL = os.environ.get("WEB_URL", "http://localhost") - -# Celery settings -CELERY_BROKER_URL = REDIS_URL -CELERY_RESULT_BACKEND = REDIS_URL - -# Enable or Disable signups -ENABLE_SIGNUP = os.environ.get("ENABLE_SIGNUP", "1") == "1" - -# Analytics -ANALYTICS_BASE_API = False - -# OPEN AI Settings -OPENAI_API_BASE = os.environ.get("OPENAI_API_BASE", "https://api.openai.com/v1") -OPENAI_API_KEY = os.environ.get("OPENAI_API_KEY", False) -GPT_ENGINE = os.environ.get("GPT_ENGINE", "gpt-3.5-turbo") - diff --git a/apiserver/plane/settings/staging.py b/apiserver/plane/settings/staging.py deleted file mode 100644 index fe4732343..000000000 --- a/apiserver/plane/settings/staging.py +++ /dev/null @@ -1,223 +0,0 @@ -"""Production settings and globals.""" -from urllib.parse import urlparse -import ssl -import certifi - -import dj_database_url - -import sentry_sdk -from sentry_sdk.integrations.django import DjangoIntegration -from sentry_sdk.integrations.redis import RedisIntegration - -from .common import * # noqa - -# Database -DEBUG = int(os.environ.get("DEBUG", 1)) == 1 -DATABASES = { - "default": { - "ENGINE": "django.db.backends.postgresql", - "NAME": os.environ.get("PGUSER", "plane"), - "USER": "", - "PASSWORD": "", - "HOST": os.environ.get("PGHOST", "localhost"), - } -} - -# CORS WHITELIST ON PROD -CORS_ORIGIN_WHITELIST = [ - # "https://example.com", - # "https://sub.example.com", - # "http://localhost:8080", - # "http://127.0.0.1:9000" -] -# Parse database configuration from $DATABASE_URL -DATABASES["default"] = dj_database_url.config() -SITE_ID = 1 - -# Enable Connection Pooling (if desired) -# DATABASES['default']['ENGINE'] = 'django_postgrespool' - -# Honor the 'X-Forwarded-Proto' header for request.is_secure() -SECURE_PROXY_SSL_HEADER = ("HTTP_X_FORWARDED_PROTO", "https") - -# Allow all host headers -ALLOWED_HOSTS = ["*"] - -# TODO: Make it FALSE and LIST DOMAINS IN FULL PROD. -CORS_ALLOW_ALL_ORIGINS = True - -STORAGES = { - "staticfiles": { - "BACKEND": "whitenoise.storage.CompressedManifestStaticFilesStorage", - }, -} - - -# Make true if running in a docker environment -DOCKERIZED = int(os.environ.get("DOCKERIZED", 0)) == 1 -FILE_SIZE_LIMIT = int(os.environ.get("FILE_SIZE_LIMIT", 5242880)) -USE_MINIO = int(os.environ.get("USE_MINIO", 0)) == 1 - -sentry_sdk.init( - dsn=os.environ.get("SENTRY_DSN"), - integrations=[DjangoIntegration(), RedisIntegration()], - # If you wish to associate users to errors (assuming you are using - # django.contrib.auth) you may enable sending PII data. - traces_sample_rate=1, - send_default_pii=True, - environment="staging", - profiles_sample_rate=1.0, -) - -# The AWS region to connect to. -AWS_REGION = os.environ.get("AWS_REGION") - -# The AWS access key to use. -AWS_ACCESS_KEY_ID = os.environ.get("AWS_ACCESS_KEY_ID") - -# The AWS secret access key to use. -AWS_SECRET_ACCESS_KEY = os.environ.get("AWS_SECRET_ACCESS_KEY") - -# The optional AWS session token to use. -# AWS_SESSION_TOKEN = "" - - -# The name of the bucket to store files in. -AWS_S3_BUCKET_NAME = os.environ.get("AWS_S3_BUCKET_NAME") - -# How to construct S3 URLs ("auto", "path", "virtual"). -AWS_S3_ADDRESSING_STYLE = "auto" - -# The full URL to the S3 endpoint. Leave blank to use the default region URL. -AWS_S3_ENDPOINT_URL = os.environ.get("AWS_S3_ENDPOINT_URL", "") - -# A prefix to be applied to every stored file. This will be joined to every filename using the "/" separator. -AWS_S3_KEY_PREFIX = "" - -# Whether to enable authentication for stored files. If True, then generated URLs will include an authentication -# token valid for `AWS_S3_MAX_AGE_SECONDS`. If False, then generated URLs will not include an authentication token, -# and their permissions will be set to "public-read". -AWS_S3_BUCKET_AUTH = False - -# How long generated URLs are valid for. This affects the expiry of authentication tokens if `AWS_S3_BUCKET_AUTH` -# is True. It also affects the "Cache-Control" header of the files. -# Important: Changing this setting will not affect existing files. -AWS_S3_MAX_AGE_SECONDS = 60 * 60 # 1 hours. - -# A URL prefix to be used for generated URLs. This is useful if your bucket is served through a CDN. This setting -# cannot be used with `AWS_S3_BUCKET_AUTH`. -AWS_S3_PUBLIC_URL = "" - -# If True, then files will be stored with reduced redundancy. Check the S3 documentation and make sure you -# understand the consequences before enabling. -# Important: Changing this setting will not affect existing files. -AWS_S3_REDUCED_REDUNDANCY = False - -# The Content-Disposition header used when the file is downloaded. This can be a string, or a function taking a -# single `name` argument. -# Important: Changing this setting will not affect existing files. -AWS_S3_CONTENT_DISPOSITION = "" - -# The Content-Language header used when the file is downloaded. This can be a string, or a function taking a -# single `name` argument. -# Important: Changing this setting will not affect existing files. -AWS_S3_CONTENT_LANGUAGE = "" - -# A mapping of custom metadata for each file. Each value can be a string, or a function taking a -# single `name` argument. -# Important: Changing this setting will not affect existing files. -AWS_S3_METADATA = {} - -# If True, then files will be stored using AES256 server-side encryption. -# If this is a string value (e.g., "aws:kms"), that encryption type will be used. -# Otherwise, server-side encryption is not be enabled. -# Important: Changing this setting will not affect existing files. -AWS_S3_ENCRYPT_KEY = False - -# The AWS S3 KMS encryption key ID (the `SSEKMSKeyId` parameter) is set from this string if present. -# This is only relevant if AWS S3 KMS server-side encryption is enabled (above). -# AWS_S3_KMS_ENCRYPTION_KEY_ID = "" - -# If True, then text files will be stored using gzip content encoding. Files will only be gzipped if their -# compressed size is smaller than their uncompressed size. -# Important: Changing this setting will not affect existing files. -AWS_S3_GZIP = True - -# The signature version to use for S3 requests. -AWS_S3_SIGNATURE_VERSION = None - -# If True, then files with the same name will overwrite each other. By default it's set to False to have -# extra characters appended. -AWS_S3_FILE_OVERWRITE = False - -# AWS Settings End -STORAGES["default"] = { - "BACKEND": "django_s3_storage.storage.S3Storage", -} - -# Enable Connection Pooling (if desired) -# DATABASES['default']['ENGINE'] = 'django_postgrespool' - -# Honor the 'X-Forwarded-Proto' header for request.is_secure() -SECURE_PROXY_SSL_HEADER = ("HTTP_X_FORWARDED_PROTO", "https") - -# Allow all host headers -ALLOWED_HOSTS = [ - "*", -] - -SESSION_COOKIE_SECURE = True -CSRF_COOKIE_SECURE = True - - -REDIS_URL = os.environ.get("REDIS_URL") - -CACHES = { - "default": { - "BACKEND": "django_redis.cache.RedisCache", - "LOCATION": REDIS_URL, - "OPTIONS": { - "CLIENT_CLASS": "django_redis.client.DefaultClient", - "CONNECTION_POOL_KWARGS": {"ssl_cert_reqs": False}, - }, - } -} - -RQ_QUEUES = { - "default": { - "USE_REDIS_CACHE": "default", - } -} - - -WEB_URL = os.environ.get("WEB_URL") - -PROXY_BASE_URL = os.environ.get("PROXY_BASE_URL", False) - -ANALYTICS_SECRET_KEY = os.environ.get("ANALYTICS_SECRET_KEY", False) -ANALYTICS_BASE_API = os.environ.get("ANALYTICS_BASE_API", False) - - -OPENAI_API_BASE = os.environ.get("OPENAI_API_BASE", "https://api.openai.com/v1") -OPENAI_API_KEY = os.environ.get("OPENAI_API_KEY", False) -GPT_ENGINE = os.environ.get("GPT_ENGINE", "gpt-3.5-turbo") - -SLACK_BOT_TOKEN = os.environ.get("SLACK_BOT_TOKEN", False) - -LOGGER_BASE_URL = os.environ.get("LOGGER_BASE_URL", False) - -redis_url = os.environ.get("REDIS_URL") -broker_url = ( - f"{redis_url}?ssl_cert_reqs={ssl.CERT_NONE.name}&ssl_ca_certs={certifi.where()}" -) - -CELERY_RESULT_BACKEND = broker_url -CELERY_BROKER_URL = broker_url - -GITHUB_ACCESS_TOKEN = os.environ.get("GITHUB_ACCESS_TOKEN", False) - -ENABLE_SIGNUP = os.environ.get("ENABLE_SIGNUP", "1") == "1" - - -# Unsplash Access key -UNSPLASH_ACCESS_KEY = os.environ.get("UNSPLASH_ACCESS_KEY") diff --git a/apiserver/plane/settings/test.py b/apiserver/plane/settings/test.py index 6c009997c..34ae16555 100644 --- a/apiserver/plane/settings/test.py +++ b/apiserver/plane/settings/test.py @@ -1,45 +1,9 @@ -from __future__ import absolute_import - +"""Test Settings""" from .common import * # noqa DEBUG = True -INSTALLED_APPS.append("plane.tests") +# Send it in a dummy outbox +EMAIL_BACKEND = "django.core.mail.backends.locmem.EmailBackend" -if os.environ.get('GITHUB_WORKFLOW'): - DATABASES = { - 'default': { - 'ENGINE': 'django.db.backends.postgresql', - 'NAME': 'github_actions', - 'USER': 'postgres', - 'PASSWORD': 'postgres', - 'HOST': '127.0.0.1', - 'PORT': '5432', - } - } -else: - DATABASES = { - 'default': { - 'ENGINE': 'django.db.backends.postgresql', - 'NAME': 'plane_test', - 'USER': 'postgres', - 'PASSWORD': 'password123', - 'HOST': '127.0.0.1', - 'PORT': '5432', - } - } - -REDIS_HOST = "localhost" -REDIS_PORT = 6379 -REDIS_URL = False - -RQ_QUEUES = { - "default": { - "HOST": "localhost", - "PORT": 6379, - "DB": 0, - "DEFAULT_TIMEOUT": 360, - }, -} - -WEB_URL = "http://localhost:3000" +INSTALLED_APPS.append("plane.tests",) diff --git a/apiserver/plane/urls.py b/apiserver/plane/urls.py index c4e7cec22..1d150d51d 100644 --- a/apiserver/plane/urls.py +++ b/apiserver/plane/urls.py @@ -15,6 +15,8 @@ urlpatterns = [ path("", include("plane.web.urls")), ] +if settings.ENABLE_API: + urlpatterns += path("api/v1/", include("plane.proxy.urls")), if settings.DEBUG: import debug_toolbar diff --git a/apiserver/templates/emails/invitations/project_invitation.html b/apiserver/templates/emails/invitations/project_invitation.html index ea2f1cdcf..630a5eab3 100644 --- a/apiserver/templates/emails/invitations/project_invitation.html +++ b/apiserver/templates/emails/invitations/project_invitation.html @@ -5,7 +5,7 @@ - {{ Inviter }} invited you to join {{ Workspace-Name }} on Plane + {{ first_name }} invited you to join {{ project_name }} on Plane diff --git a/deploy/selfhost/docker-compose.yml b/deploy/selfhost/docker-compose.yml index 3bb65f7f5..c9ea84004 100644 --- a/deploy/selfhost/docker-compose.yml +++ b/deploy/selfhost/docker-compose.yml @@ -10,6 +10,11 @@ x-app-env : &app-env - SENTRY_DSN=${SENTRY_DSN:-""} - GITHUB_CLIENT_SECRET=${GITHUB_CLIENT_SECRET:-""} - DOCKERIZED=${DOCKERIZED:-1} + # BASE WEBHOOK + - ENABLE_WEBHOOK=${ENABLE_WEBHOOK:-1} + # BASE API + - ENABLE_API=${ENABLE_API:-1} + - CORS_ALLOWED_ORIGINS=${CORS_ALLOWED_ORIGINS:-http://localhost} # Gunicorn Workers - GUNICORN_WORKERS=${GUNICORN_WORKERS:-2} #DB SETTINGS @@ -53,6 +58,8 @@ x-app-env : &app-env - BUCKET_NAME=${BUCKET_NAME:-uploads} - FILE_SIZE_LIMIT=${FILE_SIZE_LIMIT:-5242880} + + services: web: <<: *app-env @@ -141,14 +148,6 @@ services: volumes: - uploads:/export - createbuckets: - <<: *app-env - image: minio/mc - entrypoint: > - /bin/sh -c " /usr/bin/mc config host add plane-minio http://plane-minio:9000 \$AWS_ACCESS_KEY_ID \$AWS_SECRET_ACCESS_KEY; /usr/bin/mc mb plane-minio/\$AWS_S3_BUCKET_NAME; /usr/bin/mc anonymous set download plane-minio/\$AWS_S3_BUCKET_NAME; exit 0; " - depends_on: - - plane-minio - # Comment this if you already have a reverse proxy running proxy: <<: *app-env diff --git a/deploy/selfhost/variables.env b/deploy/selfhost/variables.env index 13bc0977f..8c8b446fe 100644 --- a/deploy/selfhost/variables.env +++ b/deploy/selfhost/variables.env @@ -12,6 +12,12 @@ NEXT_PUBLIC_DEPLOY_URL=http://localhost/spaces SENTRY_DSN="" GITHUB_CLIENT_SECRET="" DOCKERIZED=1 +CORS_ALLOWED_ORIGINS="http://localhost" + +# Webhook +ENABLE_WEBHOOK=1 +# API +ENABLE_API=1 #DB SETTINGS PGHOST=plane-db diff --git a/docker-compose-local.yml b/docker-compose-local.yml index 4f433e3ac..58cab3776 100644 --- a/docker-compose-local.yml +++ b/docker-compose-local.yml @@ -35,17 +35,6 @@ services: MINIO_ROOT_USER: ${AWS_ACCESS_KEY_ID} MINIO_ROOT_PASSWORD: ${AWS_SECRET_ACCESS_KEY} - createbuckets: - image: minio/mc - networks: - - dev_env - entrypoint: > - /bin/sh -c " /usr/bin/mc config host add plane-minio http://plane-minio:9000 \$AWS_ACCESS_KEY_ID \$AWS_SECRET_ACCESS_KEY; /usr/bin/mc mb plane-minio/\$AWS_S3_BUCKET_NAME; /usr/bin/mc anonymous set download plane-minio/\$AWS_S3_BUCKET_NAME; exit 0; " - env_file: - - .env - depends_on: - - plane-minio - plane-db: container_name: plane-db image: postgres:15.2-alpine diff --git a/docker-compose.yml b/docker-compose.yml index 0895aa1ae..e39f0d8d2 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -108,15 +108,6 @@ services: MINIO_ROOT_USER: ${AWS_ACCESS_KEY_ID} MINIO_ROOT_PASSWORD: ${AWS_SECRET_ACCESS_KEY} - createbuckets: - image: minio/mc - entrypoint: > - /bin/sh -c " /usr/bin/mc config host add plane-minio http://plane-minio:9000 \$AWS_ACCESS_KEY_ID \$AWS_SECRET_ACCESS_KEY; /usr/bin/mc mb plane-minio/\$AWS_S3_BUCKET_NAME; /usr/bin/mc anonymous set download plane-minio/\$AWS_S3_BUCKET_NAME; exit 0; " - env_file: - - .env - depends_on: - - plane-minio - # Comment this if you already have a reverse proxy running proxy: container_name: proxy diff --git a/packages/ui/src/dropdowns/custom-menu.tsx b/packages/ui/src/dropdowns/custom-menu.tsx index 0e8d50064..360c53ad6 100644 --- a/packages/ui/src/dropdowns/custom-menu.tsx +++ b/packages/ui/src/dropdowns/custom-menu.tsx @@ -138,7 +138,10 @@ const MenuItem: React.FC = (props) => { className={`w-full select-none truncate rounded px-1 py-1.5 text-left text-custom-text-200 hover:bg-custom-background-80 ${ active ? "bg-custom-background-80" : "" } ${className}`} - onClick={onClick} + onClick={(e) => { + close(); + onClick && onClick(e); + }} > {children} diff --git a/packages/ui/src/icons/priority-icon.tsx b/packages/ui/src/icons/priority-icon.tsx index 2c2e012e9..7d7f02694 100644 --- a/packages/ui/src/icons/priority-icon.tsx +++ b/packages/ui/src/icons/priority-icon.tsx @@ -15,24 +15,42 @@ import { IPriorityIcon } from "./type"; export const PriorityIcon: React.FC = ({ priority, className = "", + transparentBg = false }) => { - if (!className || className === "") className = "h-3.5 w-3.5"; + if (!className || className === "") className = "h-4 w-4"; // Convert to lowercase for string comparison const lowercasePriority = priority?.toLowerCase(); + //get priority icon + const getPriorityIcon = (): React.ReactNode => { + switch (lowercasePriority) { + case 'urgent': + return ; + case 'high': + return ; + case 'medium': + return ; + case 'low': + return ; + default: + return ; + } + }; + return ( <> - {lowercasePriority === "urgent" ? ( - - ) : lowercasePriority === "high" ? ( - - ) : lowercasePriority === "medium" ? ( - - ) : lowercasePriority === "low" ? ( - + { transparentBg ? ( + getPriorityIcon() ) : ( - +
+ { getPriorityIcon() } +
)} ); diff --git a/packages/ui/src/icons/type.d.ts b/packages/ui/src/icons/type.d.ts index 0261ab163..65b188e4c 100644 --- a/packages/ui/src/icons/type.d.ts +++ b/packages/ui/src/icons/type.d.ts @@ -7,4 +7,5 @@ export type TIssuePriorities = "urgent" | "high" | "medium" | "low" | "none"; export interface IPriorityIcon { priority: TIssuePriorities | null; className?: string; + transparentBg?: boolean | false; } diff --git a/web/components/api-token/ApiTokenForm/ApiTokenDescription.tsx b/web/components/api-token/ApiTokenForm/ApiTokenDescription.tsx new file mode 100644 index 000000000..d17e4662e --- /dev/null +++ b/web/components/api-token/ApiTokenForm/ApiTokenDescription.tsx @@ -0,0 +1,55 @@ +import { TextArea } from "@plane/ui"; +import { Control, Controller, FieldErrors } from "react-hook-form"; +import { IApiToken } from "types/api_token"; +import { IApiFormFields } from "./types"; +import { Dispatch, SetStateAction } from "react"; + +interface IApiTokenDescription { + generatedToken: IApiToken | null | undefined; + control: Control; + focusDescription: boolean; + setFocusTitle: Dispatch>; + setFocusDescription: Dispatch>; +} + +export const ApiTokenDescription = ({ + generatedToken, + control, + focusDescription, + setFocusTitle, + setFocusDescription, +}: IApiTokenDescription) => ( + + focusDescription ? ( +