diff --git a/apiserver/plane/authentication/__init__.py b/apiserver/plane/api/__init__.py similarity index 100% rename from apiserver/plane/authentication/__init__.py rename to apiserver/plane/api/__init__.py diff --git a/apiserver/plane/authentication/apps.py b/apiserver/plane/api/apps.py similarity index 65% rename from apiserver/plane/authentication/apps.py rename to apiserver/plane/api/apps.py index de6100e0f..292ad9344 100644 --- a/apiserver/plane/authentication/apps.py +++ b/apiserver/plane/api/apps.py @@ -2,4 +2,4 @@ from django.apps import AppConfig class ApiConfig(AppConfig): - name = "plane.authentication" + name = "plane.api" \ No newline at end of file diff --git a/apiserver/plane/proxy/__init__.py b/apiserver/plane/api/middleware/__init__.py similarity index 100% rename from apiserver/plane/proxy/__init__.py rename to apiserver/plane/api/middleware/__init__.py diff --git a/apiserver/plane/api/middleware/api_authentication.py b/apiserver/plane/api/middleware/api_authentication.py new file mode 100644 index 000000000..1b2c03318 --- /dev/null +++ b/apiserver/plane/api/middleware/api_authentication.py @@ -0,0 +1,47 @@ +# Django imports +from django.utils import timezone +from django.db.models import Q + +# Third party imports +from rest_framework import authentication +from rest_framework.exceptions import AuthenticationFailed + +# Module imports +from plane.db.models import APIToken + + +class APIKeyAuthentication(authentication.BaseAuthentication): + """ + Authentication with an API Key + """ + + www_authenticate_realm = "api" + media_type = "application/json" + auth_header_name = "X-Api-Key" + + def get_api_token(self, request): + return request.headers.get(self.auth_header_name) + + def validate_api_token(self, token): + try: + api_token = APIToken.objects.get( + Q(Q(expired_at__gt=timezone.now()) | Q(expired_at__isnull=True)), + token=token, + is_active=True, + ) + except APIToken.DoesNotExist: + raise AuthenticationFailed("Given API token is not valid") + + # save api token last used + api_token.last_used = timezone.now() + api_token.save(update_fields=["last_used"]) + return (api_token.user, api_token.token) + + def authenticate(self, request): + token = self.get_api_token(request=request) + if not token: + return None + + # Validate the API token + user, token = self.validate_api_token(token) + return user, token \ No newline at end of file diff --git a/apiserver/plane/api/rate_limit.py b/apiserver/plane/api/rate_limit.py new file mode 100644 index 000000000..617baa62c --- /dev/null +++ b/apiserver/plane/api/rate_limit.py @@ -0,0 +1,45 @@ +from django.utils import timezone +from rest_framework.throttling import SimpleRateThrottle + + +class ApiKeyRateThrottle(SimpleRateThrottle): + scope = 'api_key' + + def get_cache_key(self, request, view): + # Retrieve the API key from the request header + api_key = request.headers.get('X-Api-Key') + if not api_key: + return None # Allow the request if there's no API key + + # Use the API key as part of the cache key + return f'{self.scope}:{api_key}' + + def allow_request(self, request, view): + # Calculate the current time as a Unix timestamp + now = timezone.now().timestamp() + + # Use the parent class's method to check if the request is allowed + allowed = super().allow_request(request, view) + + if allowed: + # Calculate the remaining limit and reset time + history = self.cache.get(self.key, []) + + # Remove old histories + while history and history[-1] <= now - self.duration: + history.pop() + + # Calculate the requests + num_requests = len(history) + + # Check available requests + available = self.num_requests - num_requests + + # Unix timestamp for when the rate limit will reset + reset_time = int(now + self.duration) + + # Add headers + request.META['X-RateLimit-Remaining'] = max(0, available) + request.META['X-RateLimit-Reset'] = reset_time + + return allowed \ No newline at end of file diff --git a/apiserver/plane/api/serializers/__init__.py b/apiserver/plane/api/serializers/__init__.py new file mode 100644 index 000000000..1c08e6f86 --- /dev/null +++ b/apiserver/plane/api/serializers/__init__.py @@ -0,0 +1,16 @@ +from .user import UserLiteSerializer +from .workspace import WorkspaceLiteSerializer +from .project import ProjectSerializer, ProjectLiteSerializer +from .issue import ( + IssueSerializer, + LabelSerializer, + IssueLinkSerializer, + IssueAttachmentSerializer, + IssueCommentSerializer, + IssueAttachmentSerializer, + IssueActivitySerializer, +) +from .state import StateLiteSerializer, StateSerializer +from .cycle import CycleSerializer, CycleIssueSerializer +from .module import ModuleSerializer, ModuleIssueSerializer +from .inbox import InboxIssueSerializer \ No newline at end of file diff --git a/apiserver/plane/api/serializers/base.py b/apiserver/plane/api/serializers/base.py new file mode 100644 index 000000000..b96422501 --- /dev/null +++ b/apiserver/plane/api/serializers/base.py @@ -0,0 +1,105 @@ +# Third party imports +from rest_framework import serializers + + +class BaseSerializer(serializers.ModelSerializer): + id = serializers.PrimaryKeyRelatedField(read_only=True) + + def __init__(self, *args, **kwargs): + # If 'fields' is provided in the arguments, remove it and store it separately. + # This is done so as not to pass this custom argument up to the superclass. + fields = kwargs.pop("fields", []) + self.expand = kwargs.pop("expand", []) or [] + + # Call the initialization of the superclass. + super().__init__(*args, **kwargs) + + # If 'fields' was provided, filter the fields of the serializer accordingly. + if fields: + self.fields = self._filter_fields(fields=fields) + + def _filter_fields(self, fields): + """ + Adjust the serializer's fields based on the provided 'fields' list. + + :param fields: List or dictionary specifying which fields to include in the serializer. + :return: The updated fields for the serializer. + """ + # Check each field_name in the provided fields. + for field_name in fields: + # If the field is a dictionary (indicating nested fields), + # loop through its keys and values. + if isinstance(field_name, dict): + for key, value in field_name.items(): + # If the value of this nested field is a list, + # perform a recursive filter on it. + if isinstance(value, list): + self._filter_fields(self.fields[key], value) + + # Create a list to store allowed fields. + allowed = [] + for item in fields: + # If the item is a string, it directly represents a field's name. + if isinstance(item, str): + allowed.append(item) + # If the item is a dictionary, it represents a nested field. + # Add the key of this dictionary to the allowed list. + elif isinstance(item, dict): + allowed.append(list(item.keys())[0]) + + # Convert the current serializer's fields and the allowed fields to sets. + existing = set(self.fields) + allowed = set(allowed) + + # Remove fields from the serializer that aren't in the 'allowed' list. + for field_name in existing - allowed: + self.fields.pop(field_name) + + return self.fields + + def to_representation(self, instance): + response = super().to_representation(instance) + + # Ensure 'expand' is iterable before processing + if self.expand: + for expand in self.expand: + if expand in self.fields: + # Import all the expandable serializers + from . import ( + WorkspaceLiteSerializer, + ProjectLiteSerializer, + UserLiteSerializer, + StateLiteSerializer, + IssueSerializer, + ) + + # Expansion mapper + expansion = { + "user": UserLiteSerializer, + "workspace": WorkspaceLiteSerializer, + "project": ProjectLiteSerializer, + "default_assignee": UserLiteSerializer, + "project_lead": UserLiteSerializer, + "state": StateLiteSerializer, + "created_by": UserLiteSerializer, + "issue": IssueSerializer, + "actor": UserLiteSerializer, + "owned_by": UserLiteSerializer, + "members": UserLiteSerializer, + } + # Check if field in expansion then expand the field + if expand in expansion: + if isinstance(response.get(expand), list): + exp_serializer = expansion[expand]( + getattr(instance, expand), many=True + ) + else: + exp_serializer = expansion[expand]( + getattr(instance, expand) + ) + response[expand] = exp_serializer.data + else: + # You might need to handle this case differently + response[expand] = getattr(instance, f"{expand}_id", None) + + return response \ No newline at end of file diff --git a/apiserver/plane/api/serializers/cycle.py b/apiserver/plane/api/serializers/cycle.py new file mode 100644 index 000000000..b3e7708ef --- /dev/null +++ b/apiserver/plane/api/serializers/cycle.py @@ -0,0 +1,49 @@ +# Third party imports +from rest_framework import serializers + +# Module imports +from .base import BaseSerializer +from plane.db.models import Cycle, CycleIssue + + +class CycleSerializer(BaseSerializer): + total_issues = serializers.IntegerField(read_only=True) + cancelled_issues = serializers.IntegerField(read_only=True) + completed_issues = serializers.IntegerField(read_only=True) + started_issues = serializers.IntegerField(read_only=True) + unstarted_issues = serializers.IntegerField(read_only=True) + backlog_issues = serializers.IntegerField(read_only=True) + total_estimates = serializers.IntegerField(read_only=True) + completed_estimates = serializers.IntegerField(read_only=True) + started_estimates = serializers.IntegerField(read_only=True) + + def validate(self, data): + if ( + data.get("start_date", None) is not None + and data.get("end_date", None) is not None + and data.get("start_date", None) > data.get("end_date", None) + ): + raise serializers.ValidationError("Start date cannot exceed end date") + return data + + class Meta: + model = Cycle + fields = "__all__" + read_only_fields = [ + "workspace", + "project", + "owned_by", + ] + + +class CycleIssueSerializer(BaseSerializer): + sub_issues_count = serializers.IntegerField(read_only=True) + + class Meta: + model = CycleIssue + fields = "__all__" + read_only_fields = [ + "workspace", + "project", + "cycle", + ] \ No newline at end of file diff --git a/apiserver/plane/api/serializers/inbox.py b/apiserver/plane/api/serializers/inbox.py new file mode 100644 index 000000000..df3fb9eb5 --- /dev/null +++ b/apiserver/plane/api/serializers/inbox.py @@ -0,0 +1,13 @@ +# Module improts +from .base import BaseSerializer +from plane.db.models import InboxIssue + +class InboxIssueSerializer(BaseSerializer): + + class Meta: + model = InboxIssue + fields = "__all__" + read_only_fields = [ + "project", + "workspace", + ] \ No newline at end of file diff --git a/apiserver/plane/api/serializers/issue.py b/apiserver/plane/api/serializers/issue.py new file mode 100644 index 000000000..8fcab0a38 --- /dev/null +++ b/apiserver/plane/api/serializers/issue.py @@ -0,0 +1,319 @@ +# Django imports +from django.utils import timezone + +# Third party imports +from rest_framework import serializers + +# Module imports +from plane.db.models import ( + User, + Issue, + State, + IssueAssignee, + Label, + IssueLabel, + IssueLink, + IssueComment, + IssueAttachment, + IssueActivity, + ProjectMember, +) +from .base import BaseSerializer + + +class IssueSerializer(BaseSerializer): + assignees = serializers.ListField( + child=serializers.PrimaryKeyRelatedField( + queryset=User.objects.values_list("id", flat=True) + ), + write_only=True, + required=False, + ) + + labels = serializers.ListField( + child=serializers.PrimaryKeyRelatedField( + queryset=Label.objects.values_list("id", flat=True) + ), + write_only=True, + required=False, + ) + + class Meta: + model = Issue + fields = "__all__" + read_only_fields = [ + "workspace", + "project", + "created_by", + "updated_by", + "created_at", + "updated_at", + ] + + def validate(self, data): + if ( + data.get("start_date", None) is not None + and data.get("target_date", None) is not None + and data.get("start_date", None) > data.get("target_date", None) + ): + raise serializers.ValidationError("Start date cannot exceed target date") + + # Validate assignees are from project + if data.get("assignees", []): + print(data.get("assignees")) + data["assignees"] = ProjectMember.objects.filter( + project_id=self.context.get("project_id"), + member_id__in=data["assignees"], + ).values_list("member_id", flat=True) + + # Validate labels are from project + if data.get("labels", []): + data["labels"] = Label.objects.filter( + project_id=self.context.get("project_id"), + id__in=data["labels"], + ).values_list("id", flat=True) + + # Check state is from the project only else raise validation error + if ( + data.get("state") + and not State.objects.filter( + project_id=self.context.get("project_id"), pk=data.get("state") + ).exists() + ): + raise serializers.ValidationError( + "State is not valid please pass a valid state_id" + ) + + # Check parent issue is from workspace as it can be cross workspace + if ( + data.get("parent") + and not Issue.objects.filter( + workspce_id=self.context.get("workspace_id"), pk=data.get("parent") + ).exists() + ): + raise serializers.ValidationError( + "Parent is not valid issue_id please pass a valid issue_id" + ) + + return data + + def create(self, validated_data): + assignees = validated_data.pop("assignees", None) + labels = validated_data.pop("labels", None) + + project_id = self.context["project_id"] + workspace_id = self.context["workspace_id"] + default_assignee_id = self.context["default_assignee_id"] + + issue = Issue.objects.create(**validated_data, project_id=project_id) + + # Issue Audit Users + created_by_id = issue.created_by_id + updated_by_id = issue.updated_by_id + + if assignees is not None and len(assignees): + IssueAssignee.objects.bulk_create( + [ + IssueAssignee( + assignee_id=assignee_id, + issue=issue, + project_id=project_id, + workspace_id=workspace_id, + created_by_id=created_by_id, + updated_by_id=updated_by_id, + ) + for assignee_id in assignees + ], + batch_size=10, + ) + else: + # Then assign it to default assignee + if default_assignee_id is not None: + IssueAssignee.objects.create( + assignee_id=default_assignee_id, + issue=issue, + project_id=project_id, + workspace_id=workspace_id, + created_by_id=created_by_id, + updated_by_id=updated_by_id, + ) + + if labels is not None and len(labels): + IssueLabel.objects.bulk_create( + [ + IssueLabel( + label_id=label_id, + issue=issue, + project_id=project_id, + workspace_id=workspace_id, + created_by_id=created_by_id, + updated_by_id=updated_by_id, + ) + for label_id in labels + ], + batch_size=10, + ) + + return issue + + def update(self, instance, validated_data): + assignees = validated_data.pop("assignees", None) + labels = validated_data.pop("labels", None) + + # Related models + project_id = instance.project_id + workspace_id = instance.workspace_id + created_by_id = instance.created_by_id + updated_by_id = instance.updated_by_id + + if assignees is not None: + IssueAssignee.objects.filter(issue=instance).delete() + IssueAssignee.objects.bulk_create( + [ + IssueAssignee( + assignee_id=assignee_id, + issue=instance, + project_id=project_id, + workspace_id=workspace_id, + created_by_id=created_by_id, + updated_by_id=updated_by_id, + ) + for assignee_id in assignees + ], + batch_size=10, + ) + + if labels is not None: + IssueLabel.objects.filter(issue=instance).delete() + IssueLabel.objects.bulk_create( + [ + IssueLabel( + label_id=label_id, + issue=instance, + project_id=project_id, + workspace_id=workspace_id, + created_by_id=created_by_id, + updated_by_id=updated_by_id, + ) + for label_id in labels + ], + batch_size=10, + ) + + # Time updation occues even when other related models are updated + instance.updated_at = timezone.now() + return super().update(instance, validated_data) + + def to_representation(self, instance): + data = super().to_representation(instance) + if "assignees" in self.fields: + if "assignees" in self.expand: + from .user import UserLiteSerializer + + data["assignees"] = UserLiteSerializer( + instance.assignees.all(), many=True + ).data + else: + data["assignees"] = [ + str(assignee.id) for assignee in instance.assignees.all() + ] + if "labels" in self.fields: + if "labels" in self.expand: + data["labels"] = LabelSerializer(instance.labels.all(), many=True).data + else: + data["labels"] = [str(label.id) for label in instance.labels.all()] + + return data + + +class LabelSerializer(BaseSerializer): + class Meta: + model = Label + fields = "__all__" + read_only_fields = [ + "workspace", + "project", + ] + + +class IssueLinkSerializer(BaseSerializer): + class Meta: + model = IssueLink + fields = "__all__" + read_only_fields = [ + "workspace", + "project", + "created_by", + "updated_by", + "created_at", + "updated_at", + "issue", + ] + + # Validation if url already exists + def create(self, validated_data): + if IssueLink.objects.filter( + url=validated_data.get("url"), issue_id=validated_data.get("issue_id") + ).exists(): + raise serializers.ValidationError( + {"error": "URL already exists for this Issue"} + ) + return IssueLink.objects.create(**validated_data) + + +class IssueAttachmentSerializer(BaseSerializer): + class Meta: + model = IssueAttachment + fields = "__all__" + read_only_fields = [ + "created_by", + "updated_by", + "created_at", + "updated_at", + "workspace", + "project", + "issue", + ] + + +class IssueCommentSerializer(BaseSerializer): + is_member = serializers.BooleanField(read_only=True) + + class Meta: + model = IssueComment + fields = "__all__" + read_only_fields = [ + "workspace", + "project", + "issue", + "created_by", + "updated_by", + "created_at", + "updated_at", + ] + + +class IssueAttachmentSerializer(BaseSerializer): + class Meta: + model = IssueAttachment + fields = "__all__" + read_only_fields = [ + "id", + "created_by", + "updated_by", + "created_at", + "updated_at", + "workspace", + "project", + "issue", + ] + + +class IssueActivitySerializer(BaseSerializer): + class Meta: + model = IssueActivity + fields = "__all__" + exclude = [ + "created_by", + "udpated_by", + ] diff --git a/apiserver/plane/api/serializers/module.py b/apiserver/plane/api/serializers/module.py new file mode 100644 index 000000000..fb2f2c870 --- /dev/null +++ b/apiserver/plane/api/serializers/module.py @@ -0,0 +1,155 @@ +# Third party imports +from rest_framework import serializers + +# Module imports +from .base import BaseSerializer +from plane.db.models import ( + User, + Module, + ModuleLink, + ModuleMember, + ModuleIssue, + ProjectMember, +) + + +class ModuleSerializer(BaseSerializer): + members = serializers.ListField( + child=serializers.PrimaryKeyRelatedField( + queryset=User.objects.values_list("id", flat=True) + ), + write_only=True, + required=False, + ) + is_favorite = serializers.BooleanField(read_only=True) + total_issues = serializers.IntegerField(read_only=True) + cancelled_issues = serializers.IntegerField(read_only=True) + completed_issues = serializers.IntegerField(read_only=True) + started_issues = serializers.IntegerField(read_only=True) + unstarted_issues = serializers.IntegerField(read_only=True) + backlog_issues = serializers.IntegerField(read_only=True) + + class Meta: + model = Module + fields = "__all__" + read_only_fields = [ + "workspace", + "project", + "created_by", + "updated_by", + "created_at", + "updated_at", + ] + + def to_representation(self, instance): + data = super().to_representation(instance) + data["members"] = [str(member.id) for member in instance.members.all()] + return data + + def validate(self, data): + if ( + data.get("start_date", None) is not None + and data.get("target_date", None) is not None + and data.get("start_date", None) > data.get("target_date", None) + ): + raise serializers.ValidationError("Start date cannot exceed target date") + + if data.get("members", []): + print(data.get("members")) + data["members"] = ProjectMember.objects.filter( + project_id=self.context.get("project_id"), + member_id__in=data["members"], + ).values_list("member_id", flat=True) + + return data + + def create(self, validated_data): + members = validated_data.pop("members", None) + + project = self.context["project"] + + module = Module.objects.create(**validated_data, project=project) + + if members is not None: + ModuleMember.objects.bulk_create( + [ + ModuleMember( + module=module, + member=member, + project=project, + workspace=project.workspace, + created_by=module.created_by, + updated_by=module.updated_by, + ) + for member in members + ], + batch_size=10, + ignore_conflicts=True, + ) + + return module + + def update(self, instance, validated_data): + members = validated_data.pop("members", None) + + if members is not None: + ModuleMember.objects.filter(module=instance).delete() + ModuleMember.objects.bulk_create( + [ + ModuleMember( + module=instance, + member=member, + project=instance.project, + workspace=instance.project.workspace, + created_by=instance.created_by, + updated_by=instance.updated_by, + ) + for member in members + ], + batch_size=10, + ignore_conflicts=True, + ) + + return super().update(instance, validated_data) + + +class ModuleIssueSerializer(BaseSerializer): + sub_issues_count = serializers.IntegerField(read_only=True) + + class Meta: + model = ModuleIssue + fields = "__all__" + read_only_fields = [ + "workspace", + "project", + "created_by", + "updated_by", + "created_at", + "updated_at", + "module", + ] + + +class ModuleLinkSerializer(BaseSerializer): + class Meta: + model = ModuleLink + fields = "__all__" + read_only_fields = [ + "workspace", + "project", + "created_by", + "updated_by", + "created_at", + "updated_at", + "module", + ] + + # Validation if url already exists + def create(self, validated_data): + if ModuleLink.objects.filter( + url=validated_data.get("url"), module_id=validated_data.get("module_id") + ).exists(): + raise serializers.ValidationError( + {"error": "URL already exists for this Issue"} + ) + return ModuleLink.objects.create(**validated_data) \ No newline at end of file diff --git a/apiserver/plane/api/serializers/project.py b/apiserver/plane/api/serializers/project.py new file mode 100644 index 000000000..61f4d6f60 --- /dev/null +++ b/apiserver/plane/api/serializers/project.py @@ -0,0 +1,87 @@ +# Third party imports +from rest_framework import serializers + +# Module imports +from plane.db.models import Project, ProjectIdentifier, WorkspaceMember, State, Estimate +from .base import BaseSerializer + + +class ProjectSerializer(BaseSerializer): + + total_members = serializers.IntegerField(read_only=True) + total_cycles = serializers.IntegerField(read_only=True) + total_modules = serializers.IntegerField(read_only=True) + is_member = serializers.BooleanField(read_only=True) + sort_order = serializers.FloatField(read_only=True) + member_role = serializers.IntegerField(read_only=True) + is_deployed = serializers.BooleanField(read_only=True) + + class Meta: + model = Project + fields = "__all__" + read_only_fields = [ + "workspace", + "id", + ] + + def validate(self, data): + # Check project lead should be a member of the workspace + if ( + data.get("project_lead", None) is not None + and not WorkspaceMember.objects.filter( + workspace_id=self.context["workspace_id"], + member_id=data.get("project_lead"), + ).exists() + ): + raise serializers.ValidationError( + "Project lead should be a user in the workspace" + ) + + # Check default assignee should be a member of the workspace + if ( + data.get("default_assignee", None) is not None + and not WorkspaceMember.objects.filter( + workspace_id=self.context["workspace_id"], + member_id=data.get("default_assignee"), + ).exists() + ): + raise serializers.ValidationError( + "Default assignee should be a user in the workspace" + ) + + return data + + def create(self, validated_data): + identifier = validated_data.get("identifier", "").strip().upper() + if identifier == "": + raise serializers.ValidationError(detail="Project Identifier is required") + + if ProjectIdentifier.objects.filter( + name=identifier, workspace_id=self.context["workspace_id"] + ).exists(): + raise serializers.ValidationError(detail="Project Identifier is taken") + + project = Project.objects.create( + **validated_data, workspace_id=self.context["workspace_id"] + ) + _ = ProjectIdentifier.objects.create( + name=project.identifier, + project=project, + workspace_id=self.context["workspace_id"], + ) + return project + + +class ProjectLiteSerializer(BaseSerializer): + class Meta: + model = Project + fields = [ + "id", + "identifier", + "name", + "cover_image", + "icon_prop", + "emoji", + "description", + ] + read_only_fields = fields \ No newline at end of file diff --git a/apiserver/plane/api/serializers/state.py b/apiserver/plane/api/serializers/state.py new file mode 100644 index 000000000..4c7f05ab8 --- /dev/null +++ b/apiserver/plane/api/serializers/state.py @@ -0,0 +1,33 @@ +# Module imports +from .base import BaseSerializer +from plane.db.models import State + + +class StateSerializer(BaseSerializer): + def validate(self, data): + # If the default is being provided then make all other states default False + if data.get("default", False): + State.objects.filter(project_id=self.context.get("project_id")).update( + default=False + ) + return data + + class Meta: + model = State + fields = "__all__" + read_only_fields = [ + "workspace", + "project", + ] + + +class StateLiteSerializer(BaseSerializer): + class Meta: + model = State + fields = [ + "id", + "name", + "color", + "group", + ] + read_only_fields = fields \ No newline at end of file diff --git a/apiserver/plane/api/serializers/user.py b/apiserver/plane/api/serializers/user.py new file mode 100644 index 000000000..e5a77da93 --- /dev/null +++ b/apiserver/plane/api/serializers/user.py @@ -0,0 +1,20 @@ +# Module imports +from plane.db.models import User +from .base import BaseSerializer + + +class UserLiteSerializer(BaseSerializer): + class Meta: + model = User + fields = [ + "id", + "first_name", + "last_name", + "avatar", + "is_bot", + "display_name", + ] + read_only_fields = [ + "id", + "is_bot", + ] \ No newline at end of file diff --git a/apiserver/plane/api/serializers/workspace.py b/apiserver/plane/api/serializers/workspace.py new file mode 100644 index 000000000..c4c5caceb --- /dev/null +++ b/apiserver/plane/api/serializers/workspace.py @@ -0,0 +1,15 @@ +# Module imports +from plane.db.models import Workspace +from .base import BaseSerializer + + +class WorkspaceLiteSerializer(BaseSerializer): + """Lite serializer with only required fields""" + class Meta: + model = Workspace + fields = [ + "name", + "slug", + "id", + ] + read_only_fields = fields \ No newline at end of file diff --git a/apiserver/plane/proxy/urls/__init__.py b/apiserver/plane/api/urls/__init__.py similarity index 84% rename from apiserver/plane/proxy/urls/__init__.py rename to apiserver/plane/api/urls/__init__.py index 2ba6385d5..a5ef0f5f1 100644 --- a/apiserver/plane/proxy/urls/__init__.py +++ b/apiserver/plane/api/urls/__init__.py @@ -1,13 +1,15 @@ -from .cycle import urlpatterns as cycle_patterns -from .inbox import urlpatterns as inbox_patterns -from .issue import urlpatterns as issue_patterns -from .module import urlpatterns as module_patterns from .project import urlpatterns as project_patterns +from .state import urlpatterns as state_patterns +from .issue import urlpatterns as issue_patterns +from .cycle import urlpatterns as cycle_patterns +from .module import urlpatterns as module_patterns +from .inbox import urlpatterns as inbox_patterns urlpatterns = [ - *cycle_patterns, - *inbox_patterns, - *issue_patterns, - *module_patterns, *project_patterns, -] + *state_patterns, + *issue_patterns, + *cycle_patterns, + *module_patterns, + *inbox_patterns, +] \ No newline at end of file diff --git a/apiserver/plane/proxy/urls/cycle.py b/apiserver/plane/api/urls/cycle.py similarity index 96% rename from apiserver/plane/proxy/urls/cycle.py rename to apiserver/plane/api/urls/cycle.py index e4f7cfe78..b859c28b2 100644 --- a/apiserver/plane/proxy/urls/cycle.py +++ b/apiserver/plane/api/urls/cycle.py @@ -1,6 +1,6 @@ from django.urls import path -from plane.proxy.views.cycle import ( +from plane.api.views.cycle import ( CycleAPIEndpoint, CycleIssueAPIEndpoint, TransferCycleIssueAPIEndpoint, @@ -32,4 +32,4 @@ urlpatterns = [ TransferCycleIssueAPIEndpoint.as_view(), name="transfer-issues", ), -] +] \ No newline at end of file diff --git a/apiserver/plane/proxy/urls/inbox.py b/apiserver/plane/api/urls/inbox.py similarity index 88% rename from apiserver/plane/proxy/urls/inbox.py rename to apiserver/plane/api/urls/inbox.py index 39a630ee8..3284fd81b 100644 --- a/apiserver/plane/proxy/urls/inbox.py +++ b/apiserver/plane/api/urls/inbox.py @@ -1,6 +1,6 @@ from django.urls import path -from plane.proxy.views import InboxIssueAPIEndpoint +from plane.api.views import InboxIssueAPIEndpoint urlpatterns = [ @@ -14,4 +14,4 @@ urlpatterns = [ InboxIssueAPIEndpoint.as_view(), name="inbox-issue", ), -] +] \ No newline at end of file diff --git a/apiserver/plane/proxy/urls/issue.py b/apiserver/plane/api/urls/issue.py similarity index 65% rename from apiserver/plane/proxy/urls/issue.py rename to apiserver/plane/api/urls/issue.py index 0fb236521..910fda5e1 100644 --- a/apiserver/plane/proxy/urls/issue.py +++ b/apiserver/plane/api/urls/issue.py @@ -1,51 +1,62 @@ from django.urls import path -from plane.proxy.views import ( +from plane.api.views import ( IssueAPIEndpoint, LabelAPIEndpoint, IssueLinkAPIEndpoint, IssueCommentAPIEndpoint, + IssueActivityAPIEndpoint, ) urlpatterns = [ path( "workspaces//projects//issues/", IssueAPIEndpoint.as_view(), - name="issues", + name="issue", ), path( "workspaces//projects//issues//", IssueAPIEndpoint.as_view(), - name="issues", + name="issue", ), path( "workspaces//projects//issue-labels/", LabelAPIEndpoint.as_view(), - name="labels", + name="label", ), path( "workspaces//projects//issue-labels//", LabelAPIEndpoint.as_view(), - name="labels", + name="label", ), path( "workspaces//projects//issues//issue-links/", IssueLinkAPIEndpoint.as_view(), - name="issue-links", + name="link", ), path( "workspaces//projects//issues//issue-links//", IssueLinkAPIEndpoint.as_view(), - name="issue-links", + name="link", ), path( "workspaces//projects//issues//comments/", - IssueCommentAPIEndpoint.as_view(), - name="project-issue-comment", + IssueCommentAPIEndpoint.as_view(), + name="comment", ), path( "workspaces//projects//issues//comments//", IssueCommentAPIEndpoint.as_view(), - name="project-issue-comment", + name="comment", + ), + path( + "workspaces//projects//issues//activites/", + IssueActivityAPIEndpoint.as_view(), + name="activity", + ), + path( + "workspaces//projects//issues//activites//", + IssueActivityAPIEndpoint.as_view(), + name="activity", ), ] diff --git a/apiserver/plane/proxy/urls/module.py b/apiserver/plane/api/urls/module.py similarity index 90% rename from apiserver/plane/proxy/urls/module.py rename to apiserver/plane/api/urls/module.py index 289c8596b..7860a0fce 100644 --- a/apiserver/plane/proxy/urls/module.py +++ b/apiserver/plane/api/urls/module.py @@ -1,6 +1,6 @@ from django.urls import path -from plane.proxy.views import ModuleAPIEndpoint, ModuleIssueAPIEndpoint +from plane.api.views import ModuleAPIEndpoint, ModuleIssueAPIEndpoint urlpatterns = [ path( @@ -23,4 +23,4 @@ urlpatterns = [ ModuleIssueAPIEndpoint.as_view(), name="module-issues", ), -] +] \ No newline at end of file diff --git a/apiserver/plane/proxy/urls/project.py b/apiserver/plane/api/urls/project.py similarity index 85% rename from apiserver/plane/proxy/urls/project.py rename to apiserver/plane/api/urls/project.py index c97625197..ffd2af843 100644 --- a/apiserver/plane/proxy/urls/project.py +++ b/apiserver/plane/api/urls/project.py @@ -1,6 +1,6 @@ from django.urls import path -from plane.proxy.views import ProjectAPIEndpoint +from plane.api.views import ProjectAPIEndpoint urlpatterns = [ path( @@ -13,4 +13,4 @@ urlpatterns = [ ProjectAPIEndpoint.as_view(), name="project", ), -] +] \ No newline at end of file diff --git a/apiserver/plane/api/urls/state.py b/apiserver/plane/api/urls/state.py new file mode 100644 index 000000000..cf5eefd53 --- /dev/null +++ b/apiserver/plane/api/urls/state.py @@ -0,0 +1,11 @@ +from django.urls import path + +from plane.api.views import StateAPIEndpoint + +urlpatterns = [ + path( + "workspaces//projects//states/", + StateAPIEndpoint.as_view(), + name="states", + ), +] \ No newline at end of file diff --git a/apiserver/plane/proxy/views/__init__.py b/apiserver/plane/api/views/__init__.py similarity index 84% rename from apiserver/plane/proxy/views/__init__.py rename to apiserver/plane/api/views/__init__.py index fcbd5182b..84d8dcabb 100644 --- a/apiserver/plane/proxy/views/__init__.py +++ b/apiserver/plane/api/views/__init__.py @@ -1,10 +1,13 @@ from .project import ProjectAPIEndpoint +from .state import StateAPIEndpoint + from .issue import ( IssueAPIEndpoint, LabelAPIEndpoint, IssueLinkAPIEndpoint, IssueCommentAPIEndpoint, + IssueActivityAPIEndpoint, ) from .cycle import ( diff --git a/apiserver/plane/api/views/base.py b/apiserver/plane/api/views/base.py new file mode 100644 index 000000000..6cd8b2356 --- /dev/null +++ b/apiserver/plane/api/views/base.py @@ -0,0 +1,172 @@ +# Python imports +import zoneinfo +import json + +# Django imports +from django.conf import settings +from django.db import IntegrityError +from django.core.exceptions import ObjectDoesNotExist, ValidationError +from django.utils import timezone +from django.core.serializers.json import DjangoJSONEncoder + +# Third party imports +from rest_framework.views import APIView +from rest_framework.response import Response +from rest_framework.permissions import IsAuthenticated +from rest_framework import status +from sentry_sdk import capture_exception + +# Module imports +from plane.api.middleware.api_authentication import APIKeyAuthentication +from plane.api.rate_limit import ApiKeyRateThrottle +from plane.utils.paginator import BasePaginator +from plane.bgtasks.webhook_task import send_webhook + + +class TimezoneMixin: + """ + This enables timezone conversion according + to the user set timezone + """ + + def initial(self, request, *args, **kwargs): + super().initial(request, *args, **kwargs) + if request.user.is_authenticated: + timezone.activate(zoneinfo.ZoneInfo(request.user.user_timezone)) + else: + timezone.deactivate() + +class WebhookMixin: + webhook_event = None + + def finalize_response(self, request, response, *args, **kwargs): + response = super().finalize_response(request, response, *args, **kwargs) + + if ( + self.webhook_event + and self.request.method in ["POST", "PATCH", "DELETE"] + and response.status_code in [200, 201, 204] + ): + send_webhook.delay( + event=self.webhook_event, + event_data=json.dumps(response.data, cls=DjangoJSONEncoder), + action=self.request.method, + slug=self.workspace_slug, + ) + + return response + + + +class BaseAPIView(TimezoneMixin, APIView, BasePaginator): + authentication_classes = [ + APIKeyAuthentication, + ] + + permission_classes = [ + IsAuthenticated, + ] + + throttle_classes = [ + ApiKeyRateThrottle, + ] + + def filter_queryset(self, queryset): + for backend in list(self.filter_backends): + queryset = backend().filter_queryset(self.request, queryset, self) + return queryset + + def handle_exception(self, exc): + """ + Handle any exception that occurs, by returning an appropriate response, + or re-raising the error. + """ + try: + response = super().handle_exception(exc) + return response + except Exception as e: + if isinstance(e, IntegrityError): + return Response( + {"error": "The payload is not valid"}, + status=status.HTTP_400_BAD_REQUEST, + ) + + if isinstance(e, ValidationError): + return Response( + { + "error": "The provided payload is not valid please try with a valid payload" + }, + status=status.HTTP_400_BAD_REQUEST, + ) + + if isinstance(e, ObjectDoesNotExist): + model_name = str(exc).split(" matching query does not exist.")[0] + return Response( + {"error": f"{model_name} does not exist."}, + status=status.HTTP_404_NOT_FOUND, + ) + + if isinstance(e, KeyError): + return Response( + {"error": f"key {e} does not exist"}, + status=status.HTTP_400_BAD_REQUEST, + ) + + if settings.DEBUG: + print(e) + capture_exception(e) + return Response( + {"error": "Something went wrong please try again later"}, + status=status.HTTP_500_INTERNAL_SERVER_ERROR, + ) + + def dispatch(self, request, *args, **kwargs): + try: + response = super().dispatch(request, *args, **kwargs) + if settings.DEBUG: + from django.db import connection + + print( + f"{request.method} - {request.get_full_path()} of Queries: {len(connection.queries)}" + ) + return response + except Exception as exc: + response = self.handle_exception(exc) + return exc + + def finalize_response(self, request, response, *args, **kwargs): + # Call super to get the default response + response = super().finalize_response(request, response, *args, **kwargs) + + # Add custom headers if they exist in the request META + ratelimit_remaining = request.META.get('X-RateLimit-Remaining') + if ratelimit_remaining is not None: + response['X-RateLimit-Remaining'] = ratelimit_remaining + + ratelimit_reset = request.META.get('X-RateLimit-Reset') + if ratelimit_reset is not None: + response['X-RateLimit-Reset'] = ratelimit_reset + + return response + + @property + def workspace_slug(self): + return self.kwargs.get("slug", None) + + @property + def project_id(self): + return self.kwargs.get("project_id", None) + + @property + def fields(self): + fields = [ + field for field in self.request.GET.get("fields", "").split(",") if field + ] + return fields if fields else None + + @property + def expand(self): + expand = [ + expand for expand in self.request.GET.get("expand", "").split(",") if expand + ] + return expand if expand else None \ No newline at end of file diff --git a/apiserver/plane/api/views/cycle.py b/apiserver/plane/api/views/cycle.py new file mode 100644 index 000000000..f9ed5a7a4 --- /dev/null +++ b/apiserver/plane/api/views/cycle.py @@ -0,0 +1,554 @@ +# Python imports +import json + +# Django imports +from django.db.models import Q, Count, Sum, Prefetch, F, OuterRef, Func +from django.utils import timezone +from django.core import serializers + +# Third party imports +from rest_framework.response import Response +from rest_framework import status + +# Module imports +from .base import BaseAPIView, WebhookMixin +from plane.db.models import Cycle, Issue, CycleIssue, IssueLink, IssueAttachment +from plane.app.permissions import ProjectEntityPermission +from plane.api.serializers import ( + CycleSerializer, + CycleIssueSerializer, + IssueSerializer, +) +from plane.bgtasks.issue_activites_task import issue_activity + + +class CycleAPIEndpoint(WebhookMixin, BaseAPIView): + """ + This viewset automatically provides `list`, `create`, `retrieve`, + `update` and `destroy` actions related to cycle. + + """ + + serializer_class = CycleSerializer + model = Cycle + webhook_event = "cycle" + permission_classes = [ + ProjectEntityPermission, + ] + + def get_queryset(self): + return ( + Cycle.objects.filter(workspace__slug=self.kwargs.get("slug")) + .filter(project_id=self.kwargs.get("project_id")) + .filter(project__project_projectmember__member=self.request.user) + .select_related("project") + .select_related("workspace") + .select_related("owned_by") + .annotate( + total_issues=Count( + "issue_cycle", + filter=Q( + issue_cycle__issue__archived_at__isnull=True, + issue_cycle__issue__is_draft=False, + ), + ) + ) + .annotate( + completed_issues=Count( + "issue_cycle__issue__state__group", + filter=Q( + issue_cycle__issue__state__group="completed", + issue_cycle__issue__archived_at__isnull=True, + issue_cycle__issue__is_draft=False, + ), + ) + ) + .annotate( + cancelled_issues=Count( + "issue_cycle__issue__state__group", + filter=Q( + issue_cycle__issue__state__group="cancelled", + issue_cycle__issue__archived_at__isnull=True, + issue_cycle__issue__is_draft=False, + ), + ) + ) + .annotate( + started_issues=Count( + "issue_cycle__issue__state__group", + filter=Q( + issue_cycle__issue__state__group="started", + issue_cycle__issue__archived_at__isnull=True, + issue_cycle__issue__is_draft=False, + ), + ) + ) + .annotate( + unstarted_issues=Count( + "issue_cycle__issue__state__group", + filter=Q( + issue_cycle__issue__state__group="unstarted", + issue_cycle__issue__archived_at__isnull=True, + issue_cycle__issue__is_draft=False, + ), + ) + ) + .annotate( + backlog_issues=Count( + "issue_cycle__issue__state__group", + filter=Q( + issue_cycle__issue__state__group="backlog", + issue_cycle__issue__archived_at__isnull=True, + issue_cycle__issue__is_draft=False, + ), + ) + ) + .annotate(total_estimates=Sum("issue_cycle__issue__estimate_point")) + .annotate( + completed_estimates=Sum( + "issue_cycle__issue__estimate_point", + filter=Q( + issue_cycle__issue__state__group="completed", + issue_cycle__issue__archived_at__isnull=True, + issue_cycle__issue__is_draft=False, + ), + ) + ) + .annotate( + started_estimates=Sum( + "issue_cycle__issue__estimate_point", + filter=Q( + issue_cycle__issue__state__group="started", + issue_cycle__issue__archived_at__isnull=True, + issue_cycle__issue__is_draft=False, + ), + ) + ) + .order_by(self.kwargs.get("order_by", "-created_at")) + .distinct() + ) + + def get(self, request, slug, project_id, pk=None): + if pk: + queryset = self.get_queryset().get(pk=pk) + data = CycleSerializer( + queryset, + fields=self.fields, + expand=self.expand, + ).data + return Response( + data, + status=status.HTTP_200_OK, + ) + queryset = self.get_queryset() + cycle_view = request.GET.get("cycle_view", "all") + queryset = queryset.order_by("-is_favorite", "-created_at") + + # Current Cycle + if cycle_view == "current": + queryset = queryset.filter( + start_date__lte=timezone.now(), + end_date__gte=timezone.now(), + ) + data = CycleSerializer( + queryset, many=True, fields=self.fields, expand=self.expand + ).data + return Response(data, status=status.HTTP_200_OK) + + # Upcoming Cycles + if cycle_view == "upcoming": + queryset = queryset.filter(start_date__gt=timezone.now()) + return self.paginate( + request=request, + queryset=(queryset), + on_results=lambda cycles: CycleSerializer( + cycles, + many=True, + fields=self.fields, + expand=self.expand, + ).data, + ) + + # Completed Cycles + if cycle_view == "completed": + queryset = queryset.filter(end_date__lt=timezone.now()) + return self.paginate( + request=request, + queryset=(queryset), + on_results=lambda cycles: CycleSerializer( + cycles, + many=True, + fields=self.fields, + expand=self.expand, + ).data, + ) + + # Draft Cycles + if cycle_view == "draft": + queryset = queryset.filter( + end_date=None, + start_date=None, + ) + return self.paginate( + request=request, + queryset=(queryset), + on_results=lambda cycles: CycleSerializer( + cycles, + many=True, + fields=self.fields, + expand=self.expand, + ).data, + ) + + # Incomplete Cycles + if cycle_view == "incomplete": + queryset = queryset.filter( + Q(end_date__gte=timezone.now().date()) | Q(end_date__isnull=True), + ) + return self.paginate( + request=request, + queryset=(queryset), + on_results=lambda cycles: CycleSerializer( + cycles, + many=True, + fields=self.fields, + expand=self.expand, + ).data, + ) + return self.paginate( + request=request, + queryset=(queryset), + on_results=lambda cycles: CycleSerializer( + cycles, + many=True, + fields=self.fields, + expand=self.expand, + ).data, + ) + + def post(self, request, slug, project_id): + if ( + request.data.get("start_date", None) is None + and request.data.get("end_date", None) is None + ) or ( + request.data.get("start_date", None) is not None + and request.data.get("end_date", None) is not None + ): + serializer = CycleSerializer(data=request.data) + if serializer.is_valid(): + serializer.save( + project_id=project_id, + owned_by=request.user, + ) + return Response(serializer.data, status=status.HTTP_201_CREATED) + return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) + else: + return Response( + { + "error": "Both start date and end date are either required or are to be null" + }, + status=status.HTTP_400_BAD_REQUEST, + ) + + def patch(self, request, slug, project_id, pk): + cycle = Cycle.objects.get(workspace__slug=slug, project_id=project_id, pk=pk) + + request_data = request.data + + if cycle.end_date is not None and cycle.end_date < timezone.now().date(): + if "sort_order" in request_data: + # Can only change sort order + request_data = { + "sort_order": request_data.get("sort_order", cycle.sort_order) + } + else: + return Response( + { + "error": "The Cycle has already been completed so it cannot be edited" + }, + status=status.HTTP_400_BAD_REQUEST, + ) + + serializer = CycleSerializer(cycle, data=request.data, partial=True) + if serializer.is_valid(): + serializer.save() + return Response(serializer.data, status=status.HTTP_200_OK) + return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) + + def delete(self, request, slug, project_id, pk): + cycle_issues = list( + CycleIssue.objects.filter(cycle_id=self.kwargs.get("pk")).values_list( + "issue", flat=True + ) + ) + cycle = Cycle.objects.get(workspace__slug=slug, project_id=project_id, pk=pk) + + issue_activity.delay( + type="cycle.activity.deleted", + requested_data=json.dumps( + { + "cycle_id": str(pk), + "cycle_name": str(cycle.name), + "issues": [str(issue_id) for issue_id in cycle_issues], + } + ), + actor_id=str(request.user.id), + issue_id=str(pk), + project_id=str(project_id), + current_instance=None, + epoch=int(timezone.now().timestamp()), + ) + # Delete the cycle + cycle.delete() + return Response(status=status.HTTP_204_NO_CONTENT) + + +class CycleIssueAPIEndpoint(WebhookMixin, BaseAPIView): + """ + This viewset automatically provides `list`, `create`, `retrieve`, + `update` and `destroy` actions related to cycle issues. + + """ + + serializer_class = CycleIssueSerializer + model = CycleIssue + webhook_event = "cycle" + permission_classes = [ + ProjectEntityPermission, + ] + + def get_queryset(self): + return ( + CycleIssue.objects.annotate( + sub_issues_count=Issue.issue_objects.filter(parent=OuterRef("issue_id")) + .order_by() + .annotate(count=Func(F("id"), function="Count")) + .values("count") + ) + .filter(workspace__slug=self.kwargs.get("slug")) + .filter(project_id=self.kwargs.get("project_id")) + .filter(project__project_projectmember__member=self.request.user) + .filter(cycle_id=self.kwargs.get("cycle_id")) + .select_related("project") + .select_related("workspace") + .select_related("cycle") + .select_related("issue", "issue__state", "issue__project") + .prefetch_related("issue__assignees", "issue__labels") + .order_by(self.kwargs.get("order_by", "-created_at")) + .distinct() + ) + + def get(self, request, slug, project_id, cycle_id): + order_by = request.GET.get("order_by", "created_at") + issues = ( + Issue.issue_objects.filter(issue_cycle__cycle_id=cycle_id) + .annotate( + sub_issues_count=Issue.issue_objects.filter(parent=OuterRef("id")) + .order_by() + .annotate(count=Func(F("id"), function="Count")) + .values("count") + ) + .annotate(bridge_id=F("issue_cycle__id")) + .filter(project_id=project_id) + .filter(workspace__slug=slug) + .select_related("project") + .select_related("workspace") + .select_related("state") + .select_related("parent") + .prefetch_related("assignees") + .prefetch_related("labels") + .order_by(order_by) + .annotate( + link_count=IssueLink.objects.filter(issue=OuterRef("id")) + .order_by() + .annotate(count=Func(F("id"), function="Count")) + .values("count") + ) + .annotate( + attachment_count=IssueAttachment.objects.filter(issue=OuterRef("id")) + .order_by() + .annotate(count=Func(F("id"), function="Count")) + .values("count") + ) + ) + + return self.paginate( + request=request, + queryset=(issues), + on_results=lambda issues: CycleSerializer( + issues, + many=True, + fields=self.fields, + expand=self.expand, + ).data, + ) + + def post(self, request, slug, project_id, cycle_id): + issues = request.data.get("issues", []) + + if not issues: + return Response( + {"error": "Issues are required"}, status=status.HTTP_400_BAD_REQUEST + ) + + cycle = Cycle.objects.get( + workspace__slug=slug, project_id=project_id, pk=cycle_id + ) + + if cycle.end_date is not None and cycle.end_date < timezone.now().date(): + return Response( + { + "error": "The Cycle has already been completed so no new issues can be added" + }, + status=status.HTTP_400_BAD_REQUEST, + ) + + issues = Issue.objects.filter( + pk__in=issues, workspace__slug=slug, project_id=project_id + ).values_list("id", flat=True) + + # Get all CycleIssues already created + cycle_issues = list(CycleIssue.objects.filter(issue_id__in=issues)) + update_cycle_issue_activity = [] + record_to_create = [] + records_to_update = [] + + for issue in issues: + cycle_issue = [ + cycle_issue + for cycle_issue in cycle_issues + if str(cycle_issue.issue_id) in issues + ] + # Update only when cycle changes + if len(cycle_issue): + if cycle_issue[0].cycle_id != cycle_id: + update_cycle_issue_activity.append( + { + "old_cycle_id": str(cycle_issue[0].cycle_id), + "new_cycle_id": str(cycle_id), + "issue_id": str(cycle_issue[0].issue_id), + } + ) + cycle_issue[0].cycle_id = cycle_id + records_to_update.append(cycle_issue[0]) + else: + record_to_create.append( + CycleIssue( + project_id=project_id, + workspace=cycle.workspace, + created_by=request.user, + updated_by=request.user, + cycle=cycle, + issue_id=issue, + ) + ) + + CycleIssue.objects.bulk_create( + record_to_create, + batch_size=10, + ignore_conflicts=True, + ) + CycleIssue.objects.bulk_update( + records_to_update, + ["cycle"], + batch_size=10, + ) + + # Capture Issue Activity + issue_activity.delay( + type="cycle.activity.created", + requested_data=json.dumps({"cycles_list": issues}), + actor_id=str(self.request.user.id), + issue_id=None, + project_id=str(self.kwargs.get("project_id", None)), + current_instance=json.dumps( + { + "updated_cycle_issues": update_cycle_issue_activity, + "created_cycle_issues": serializers.serialize( + "json", record_to_create + ), + } + ), + epoch=int(timezone.now().timestamp()), + ) + + # Return all Cycle Issues + return Response( + CycleIssueSerializer(self.get_queryset(), many=True).data, + status=status.HTTP_200_OK, + ) + + def delete(self, request, slug, project_id, cycle_id, pk): + cycle_issue = CycleIssue.objects.get( + pk=pk, workspace__slug=slug, project_id=project_id, cycle_id=cycle_id + ) + issue_id = cycle_issue.issue_id + cycle_issue.delete() + issue_activity.delay( + type="cycle.activity.deleted", + requested_data=json.dumps( + { + "cycle_id": str(self.kwargs.get("cycle_id")), + "issues": [str(issue_id)], + } + ), + actor_id=str(self.request.user.id), + issue_id=str(self.kwargs.get("pk", None)), + project_id=str(self.kwargs.get("project_id", None)), + current_instance=None, + epoch=int(timezone.now().timestamp()), + ) + return Response(status=status.HTTP_204_NO_CONTENT) + + +class TransferCycleIssueAPIEndpoint(BaseAPIView): + """ + This viewset provides `create` actions for transfering the issues into a particular cycle. + + """ + + permission_classes = [ + ProjectEntityPermission, + ] + + def post(self, request, slug, project_id, cycle_id): + new_cycle_id = request.data.get("new_cycle_id", False) + + if not new_cycle_id: + return Response( + {"error": "New Cycle Id is required"}, + status=status.HTTP_400_BAD_REQUEST, + ) + + new_cycle = Cycle.objects.get( + workspace__slug=slug, project_id=project_id, pk=new_cycle_id + ) + + if ( + new_cycle.end_date is not None + and new_cycle.end_date < timezone.now().date() + ): + return Response( + { + "error": "The cycle where the issues are transferred is already completed" + }, + status=status.HTTP_400_BAD_REQUEST, + ) + + cycle_issues = CycleIssue.objects.filter( + cycle_id=cycle_id, + project_id=project_id, + workspace__slug=slug, + issue__state__group__in=["backlog", "unstarted", "started"], + ) + + updated_cycles = [] + for cycle_issue in cycle_issues: + cycle_issue.cycle_id = new_cycle_id + updated_cycles.append(cycle_issue) + + cycle_issues = CycleIssue.objects.bulk_update( + updated_cycles, ["cycle_id"], batch_size=100 + ) + + return Response({"message": "Success"}, status=status.HTTP_200_OK) \ No newline at end of file diff --git a/apiserver/plane/api/views/inbox.py b/apiserver/plane/api/views/inbox.py new file mode 100644 index 000000000..e670578d1 --- /dev/null +++ b/apiserver/plane/api/views/inbox.py @@ -0,0 +1,275 @@ +# Python imports +import json + +# Django improts +from django.utils import timezone +from django.db.models import Q +from django.core.serializers.json import DjangoJSONEncoder + +# Third party imports +from rest_framework import status +from rest_framework.response import Response + +# Module imports +from .base import BaseAPIView +from plane.app.permissions import ProjectLitePermission +from plane.api.serializers import InboxIssueSerializer, IssueSerializer +from plane.db.models import InboxIssue, Issue, State, ProjectMember +from plane.bgtasks.issue_activites_task import issue_activity + + +class InboxIssueAPIEndpoint(BaseAPIView): + """ + This viewset automatically provides `list`, `create`, `retrieve`, + `update` and `destroy` actions related to inbox issues. + + """ + + permission_classes = [ + ProjectLitePermission, + ] + + serializer_class = InboxIssueSerializer + model = InboxIssue + + filterset_fields = [ + "status", + ] + + def get_queryset(self): + return self.filter_queryset( + super() + .get_queryset() + .filter( + Q(snoozed_till__gte=timezone.now()) | Q(snoozed_till__isnull=True), + workspace__slug=self.kwargs.get("slug"), + project_id=self.kwargs.get("project_id"), + inbox_id=self.kwargs.get("inbox_id"), + ) + .select_related("issue", "workspace", "project") + .order_by(self.kwargs.get("order_by", "-created_at")) + ) + + def get(self, request, slug, project_id, inbox_id, pk=None): + if pk: + issue_queryset = self.get_queryset().get(pk=pk) + issues_data = InboxIssueSerializer( + issue_queryset, + fields=self.fields, + expand=self.expand, + ).data + return Response( + issues_data, + status=status.HTTP_200_OK, + ) + issue_queryset = self.get_queryset() + return self.paginate( + request=request, + queryset=(issue_queryset), + on_results=lambda inbox_issues: InboxIssueSerializer( + inbox_issues, + many=True, + fields=self.fields, + expand=self.expand, + ).data, + ) + + def post(self, request, slug, project_id, inbox_id): + if not request.data.get("issue", {}).get("name", False): + return Response( + {"error": "Name is required"}, status=status.HTTP_400_BAD_REQUEST + ) + + # Check for valid priority + if not request.data.get("issue", {}).get("priority", "none") in [ + "low", + "medium", + "high", + "urgent", + "none", + ]: + return Response( + {"error": "Invalid priority"}, status=status.HTTP_400_BAD_REQUEST + ) + + # Create or get state + state, _ = State.objects.get_or_create( + name="Triage", + group="backlog", + description="Default state for managing all Inbox Issues", + project_id=project_id, + color="#ff7700", + ) + + # create an issue + issue = Issue.objects.create( + name=request.data.get("issue", {}).get("name"), + description=request.data.get("issue", {}).get("description", {}), + description_html=request.data.get("issue", {}).get( + "description_html", "

" + ), + priority=request.data.get("issue", {}).get("priority", "low"), + project_id=project_id, + state=state, + ) + + # Create an Issue Activity + issue_activity.delay( + type="issue.activity.created", + requested_data=json.dumps(request.data, cls=DjangoJSONEncoder), + actor_id=str(request.user.id), + issue_id=str(issue.id), + project_id=str(project_id), + current_instance=None, + epoch=int(timezone.now().timestamp()), + ) + # create an inbox issue + InboxIssue.objects.create( + inbox_id=inbox_id, + project_id=project_id, + issue=issue, + source=request.data.get("source", "in-app"), + ) + + serializer = IssueSerializer(issue) + return Response(serializer.data, status=status.HTTP_200_OK) + + def patch(self, request, slug, project_id, inbox_id, pk): + inbox_issue = InboxIssue.objects.get( + pk=pk, workspace__slug=slug, project_id=project_id, inbox_id=inbox_id + ) + # Get the project member + project_member = ProjectMember.objects.get( + workspace__slug=slug, + project_id=project_id, + member=request.user, + is_active=True, + ) + # Only project members admins and created_by users can access this endpoint + if project_member.role <= 10 and str(inbox_issue.created_by_id) != str( + request.user.id + ): + return Response( + {"error": "You cannot edit inbox issues"}, + status=status.HTTP_400_BAD_REQUEST, + ) + + # Get issue data + issue_data = request.data.pop("issue", False) + + if bool(issue_data): + issue = Issue.objects.get( + pk=inbox_issue.issue_id, workspace__slug=slug, project_id=project_id + ) + # Only allow guests and viewers to edit name and description + if project_member.role <= 10: + # viewers and guests since only viewers and guests + issue_data = { + "name": issue_data.get("name", issue.name), + "description_html": issue_data.get( + "description_html", issue.description_html + ), + "description": issue_data.get("description", issue.description), + } + + issue_serializer = IssueSerializer(issue, data=issue_data, partial=True) + + if issue_serializer.is_valid(): + current_instance = issue + # Log all the updates + requested_data = json.dumps(issue_data, cls=DjangoJSONEncoder) + if issue is not None: + issue_activity.delay( + type="issue.activity.updated", + requested_data=requested_data, + actor_id=str(request.user.id), + issue_id=str(issue.id), + project_id=str(project_id), + current_instance=json.dumps( + IssueSerializer(current_instance).data, + cls=DjangoJSONEncoder, + ), + epoch=int(timezone.now().timestamp()), + ) + issue_serializer.save() + else: + return Response( + issue_serializer.errors, status=status.HTTP_400_BAD_REQUEST + ) + + # Only project admins and members can edit inbox issue attributes + if project_member.role > 10: + serializer = InboxIssueSerializer( + inbox_issue, data=request.data, partial=True + ) + + if serializer.is_valid(): + serializer.save() + # Update the issue state if the issue is rejected or marked as duplicate + if serializer.data["status"] in [-1, 2]: + issue = Issue.objects.get( + pk=inbox_issue.issue_id, + workspace__slug=slug, + project_id=project_id, + ) + state = State.objects.filter( + group="cancelled", workspace__slug=slug, project_id=project_id + ).first() + if state is not None: + issue.state = state + issue.save() + + # Update the issue state if it is accepted + if serializer.data["status"] in [1]: + issue = Issue.objects.get( + pk=inbox_issue.issue_id, + workspace__slug=slug, + project_id=project_id, + ) + + # Update the issue state only if it is in triage state + if issue.state.name == "Triage": + # Move to default state + state = State.objects.filter( + workspace__slug=slug, project_id=project_id, default=True + ).first() + if state is not None: + issue.state = state + issue.save() + + return Response(serializer.data, status=status.HTTP_200_OK) + return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) + else: + return Response( + InboxIssueSerializer(inbox_issue).data, status=status.HTTP_200_OK + ) + + def delete(self, request, slug, project_id, inbox_id, pk): + inbox_issue = InboxIssue.objects.get( + pk=pk, workspace__slug=slug, project_id=project_id, inbox_id=inbox_id + ) + # Get the project member + project_member = ProjectMember.objects.get( + workspace__slug=slug, + project_id=project_id, + member=request.user, + is_active=True, + ) + + if project_member.role <= 10 and str(inbox_issue.created_by_id) != str( + request.user.id + ): + return Response( + {"error": "You cannot delete inbox issue"}, + status=status.HTTP_400_BAD_REQUEST, + ) + + # Check the issue status + if inbox_issue.status in [-2, -1, 0, 2]: + # Delete the issue also + Issue.objects.filter( + workspace__slug=slug, project_id=project_id, pk=inbox_issue.issue_id + ).delete() + + inbox_issue.delete() + return Response(status=status.HTTP_204_NO_CONTENT) \ No newline at end of file diff --git a/apiserver/plane/api/views/issue.py b/apiserver/plane/api/views/issue.py new file mode 100644 index 000000000..04efdd6ff --- /dev/null +++ b/apiserver/plane/api/views/issue.py @@ -0,0 +1,599 @@ +# Python imports +import json +from itertools import chain + +# Django imports +from django.db import IntegrityError +from django.db.models import ( + OuterRef, + Func, + Q, + F, + Case, + When, + Value, + CharField, + Max, + Exists, +) +from django.core.serializers.json import DjangoJSONEncoder +from django.utils import timezone + +# Third party imports +from rest_framework import status +from rest_framework.response import Response +from rest_framework.parsers import MultiPartParser, FormParser + +# Module imports +from .base import BaseAPIView, WebhookMixin +from plane.app.permissions import ( + ProjectEntityPermission, + ProjectMemberPermission, + ProjectLitePermission, +) +from plane.db.models import ( + Issue, + IssueAttachment, + IssueLink, + Project, + Label, + ProjectMember, + IssueComment, + IssueActivity, +) +from plane.utils.issue_filters import issue_filters +from plane.bgtasks.issue_activites_task import issue_activity +from plane.api.serializers import ( + IssueSerializer, + LabelSerializer, + IssueLinkSerializer, + IssueCommentSerializer, + IssueAttachmentSerializer, + IssueActivitySerializer, +) + + +class IssueAPIEndpoint(WebhookMixin, BaseAPIView): + """ + This viewset automatically provides `list`, `create`, `retrieve`, + `update` and `destroy` actions related to issue. + + """ + + model = Issue + webhook_event = "issue" + permission_classes = [ + ProjectEntityPermission, + ] + serializer_class = IssueSerializer + + def get_queryset(self): + return ( + Issue.issue_objects.annotate( + sub_issues_count=Issue.issue_objects.filter(parent=OuterRef("id")) + .order_by() + .annotate(count=Func(F("id"), function="Count")) + .values("count") + ) + .filter(project_id=self.kwargs.get("project_id")) + .filter(workspace__slug=self.kwargs.get("slug")) + .select_related("project") + .select_related("workspace") + .select_related("state") + .select_related("parent") + .prefetch_related("assignees") + .prefetch_related("labels") + .order_by(self.kwargs.get("order_by", "-created_at")) + ).distinct() + + def get(self, request, slug, project_id, pk=None): + if pk: + issue = Issue.issue_objects.annotate( + sub_issues_count=Issue.issue_objects.filter(parent=OuterRef("id")) + .order_by() + .annotate(count=Func(F("id"), function="Count")) + .values("count") + ).get(workspace__slug=slug, project_id=project_id, pk=pk) + return Response( + IssueSerializer( + issue, + fields=self.fields, + expand=self.expand, + ).data, + status=status.HTTP_200_OK, + ) + + filters = issue_filters(request.query_params, "GET") + # Custom ordering for priority and state + priority_order = ["urgent", "high", "medium", "low", "none"] + state_order = ["backlog", "unstarted", "started", "completed", "cancelled"] + + order_by_param = request.GET.get("order_by", "-created_at") + + issue_queryset = ( + self.get_queryset() + .filter(**filters) + .annotate(cycle_id=F("issue_cycle__cycle_id")) + .annotate(module_id=F("issue_module__module_id")) + .annotate( + link_count=IssueLink.objects.filter(issue=OuterRef("id")) + .order_by() + .annotate(count=Func(F("id"), function="Count")) + .values("count") + ) + .annotate( + attachment_count=IssueAttachment.objects.filter(issue=OuterRef("id")) + .order_by() + .annotate(count=Func(F("id"), function="Count")) + .values("count") + ) + ) + + # Priority Ordering + if order_by_param == "priority" or order_by_param == "-priority": + priority_order = ( + priority_order if order_by_param == "priority" else priority_order[::-1] + ) + issue_queryset = issue_queryset.annotate( + priority_order=Case( + *[ + When(priority=p, then=Value(i)) + for i, p in enumerate(priority_order) + ], + output_field=CharField(), + ) + ).order_by("priority_order") + + # State Ordering + elif order_by_param in [ + "state__name", + "state__group", + "-state__name", + "-state__group", + ]: + state_order = ( + state_order + if order_by_param in ["state__name", "state__group"] + else state_order[::-1] + ) + issue_queryset = issue_queryset.annotate( + state_order=Case( + *[ + When(state__group=state_group, then=Value(i)) + for i, state_group in enumerate(state_order) + ], + default=Value(len(state_order)), + output_field=CharField(), + ) + ).order_by("state_order") + # assignee and label ordering + elif order_by_param in [ + "labels__name", + "-labels__name", + "assignees__first_name", + "-assignees__first_name", + ]: + issue_queryset = issue_queryset.annotate( + max_values=Max( + order_by_param[1::] + if order_by_param.startswith("-") + else order_by_param + ) + ).order_by( + "-max_values" if order_by_param.startswith("-") else "max_values" + ) + else: + issue_queryset = issue_queryset.order_by(order_by_param) + + return self.paginate( + request=request, + queryset=(issue_queryset), + on_results=lambda issues: IssueSerializer( + issues, + many=True, + fields=self.fields, + expand=self.expand, + ).data, + ) + + def post(self, request, slug, project_id): + project = Project.objects.get(pk=project_id) + + serializer = IssueSerializer( + data=request.data, + context={ + "project_id": project_id, + "workspace_id": project.workspace_id, + "default_assignee_id": project.default_assignee_id, + }, + ) + + if serializer.is_valid(): + serializer.save() + + # Track the issue + issue_activity.delay( + type="issue.activity.created", + requested_data=json.dumps(self.request.data, cls=DjangoJSONEncoder), + actor_id=str(request.user.id), + issue_id=str(serializer.data.get("id", None)), + project_id=str(project_id), + current_instance=None, + epoch=int(timezone.now().timestamp()), + ) + return Response(serializer.data, status=status.HTTP_201_CREATED) + return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) + + def patch(self, request, slug, project_id, pk=None): + issue = Issue.objects.get(workspace__slug=slug, project_id=project_id, pk=pk) + current_instance = json.dumps( + IssueSerializer(issue).data, cls=DjangoJSONEncoder + ) + requested_data = json.dumps(self.request.data, cls=DjangoJSONEncoder) + serializer = IssueSerializer(issue, data=request.data, partial=True) + if serializer.is_valid(): + serializer.save() + issue_activity.delay( + type="issue.activity.updated", + requested_data=requested_data, + actor_id=str(request.user.id), + issue_id=str(pk), + project_id=str(project_id), + current_instance=current_instance, + epoch=int(timezone.now().timestamp()), + ) + return Response(serializer.data, status=status.HTTP_200_OK) + return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) + + def delete(self, request, slug, project_id, pk=None): + issue = Issue.objects.get(workspace__slug=slug, project_id=project_id, pk=pk) + current_instance = json.dumps( + IssueSerializer(issue).data, cls=DjangoJSONEncoder + ) + issue.delete() + issue_activity.delay( + type="issue.activity.deleted", + requested_data=json.dumps({"issue_id": str(pk)}), + actor_id=str(request.user.id), + issue_id=str(pk), + project_id=str(project_id), + current_instance=current_instance, + epoch=int(timezone.now().timestamp()), + ) + return Response(status=status.HTTP_204_NO_CONTENT) + + +class LabelAPIEndpoint(BaseAPIView): + """ + This viewset automatically provides `list`, `create`, `retrieve`, + `update` and `destroy` actions related to the labels. + + """ + + serializer_class = LabelSerializer + model = Label + permission_classes = [ + ProjectMemberPermission, + ] + + def get_queryset(self): + return ( + Project.objects.filter(workspace__slug=self.kwargs.get("slug")) + .filter(project_id=self.kwargs.get("project_id")) + .filter(project__project_projectmember__member=self.request.user) + .select_related("project") + .select_related("workspace") + .select_related("parent") + .distinct() + .order_by(self.kwargs.get("order_by", "-created_at")) + ) + + def post(self, request, slug, project_id): + try: + serializer = LabelSerializer(data=request.data) + if serializer.is_valid(): + serializer.save(project_id=project_id) + return Response(serializer.data, status=status.HTTP_201_CREATED) + return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) + except IntegrityError: + return Response( + {"error": "Label with the same name already exists in the project"}, + status=status.HTTP_400_BAD_REQUEST, + ) + + def get(self, request, slug, project_id, pk=None): + if pk: + label = self.get_queryset().get(pk=pk) + serializer = LabelSerializer( + label, + fields=self.fields, + expand=self.expand, + ) + return Response(serializer.data, status=status.HTTP_200_OK) + return self.paginate( + request=request, + queryset=(self.get_queryset()), + on_results=lambda labels: LabelSerializer( + labels, + many=True, + fields=self.fields, + expand=self.expand, + ).data, + ) + + def patch(self, request, slug, project_id, pk=None): + label = self.get_queryset().get(pk=pk) + serializer = LabelSerializer(label, data=request.data, partial=True) + return Response(serializer.data, status=status.HTTP_200_OK) + + def delete(self, request, slug, project_id, pk=None): + label = self.get_queryset().get(pk=pk) + label.delete() + return Response(status=status.HTTP_204_NO_CONTENT) + + +class IssueLinkAPIEndpoint(BaseAPIView): + """ + This viewset automatically provides `list`, `create`, `retrieve`, + `update` and `destroy` actions related to the links of the particular issue. + + """ + + permission_classes = [ + ProjectEntityPermission, + ] + + model = IssueLink + serializer_class = IssueLinkSerializer + + def get_queryset(self): + return ( + IssueLink.objects.filter(workspace__slug=self.kwargs.get("slug")) + .filter(project_id=self.kwargs.get("project_id")) + .filter(issue_id=self.kwargs.get("issue_id")) + .filter(project__project_projectmember__member=self.request.user) + .order_by(self.kwargs.get("order_by", "-created_at")) + .distinct() + ) + + def get(self, request, slug, project_id, pk=None): + if pk: + label = self.get_queryset().get(pk=pk) + serializer = IssueLinkSerializer( + label, + fields=self.fields, + expand=self.expand, + ) + return Response(serializer.data, status=status.HTTP_200_OK) + return self.paginate( + request=request, + queryset=(self.get_queryset()), + on_results=lambda issue_links: IssueLinkSerializer( + issue_links, + many=True, + fields=self.fields, + expand=self.expand, + ).data, + ) + + def post(self, request, slug, project_id, issue_id): + serializer = IssueLinkSerializer(data=request.data) + if serializer.is_valid(): + serializer.save( + project_id=project_id, + issue_id=issue_id, + ) + issue_activity.delay( + type="link.activity.created", + requested_data=json.dumps(serializer.data, cls=DjangoJSONEncoder), + actor_id=str(self.request.user.id), + issue_id=str(self.kwargs.get("issue_id")), + project_id=str(self.kwargs.get("project_id")), + current_instance=None, + epoch=int(timezone.now().timestamp()), + ) + return Response(serializer.data, status=status.HTTP_201_CREATED) + return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) + + def patch(self, request, slug, project_id, issue_id, pk): + issue_link = IssueLink.objects.get( + workspace__slug=slug, project_id=project_id, issue_id=issue_id, pk=pk + ) + requested_data = json.dumps(request.data, cls=DjangoJSONEncoder) + current_instance = json.dumps( + IssueLinkSerializer(issue_link).data, + cls=DjangoJSONEncoder, + ) + serializer = IssueLinkSerializer(issue_link, data=request.data, partial=True) + if serializer.is_valid(): + serializer.save() + issue_activity.delay( + type="link.activity.updated", + requested_data=requested_data, + actor_id=str(request.user.id), + issue_id=str(issue_id), + project_id=str(project_id), + current_instance=current_instance, + epoch=int(timezone.now().timestamp()), + ) + return Response(serializer.data, status=status.HTTP_200_OK) + return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) + + def delete(self, request, slug, project_id, issue_id, pk): + issue_link = IssueLink.objects.get( + workspace__slug=slug, project_id=project_id, issue_id=issue_id, pk=pk + ) + current_instance = json.dumps( + IssueLinkSerializer(issue_link).data, + cls=DjangoJSONEncoder, + ) + issue_activity.delay( + type="link.activity.deleted", + requested_data=json.dumps({"link_id": str(pk)}), + actor_id=str(request.user.id), + issue_id=str(issue_id), + project_id=str(project_id), + current_instance=current_instance, + epoch=int(timezone.now().timestamp()), + ) + issue_link.delete() + return Response(status=status.HTTP_204_NO_CONTENT) + + +class IssueCommentAPIEndpoint(WebhookMixin, BaseAPIView): + """ + This viewset automatically provides `list`, `create`, `retrieve`, + `update` and `destroy` actions related to comments of the particular issue. + + """ + + serializer_class = IssueCommentSerializer + model = IssueComment + webhook_event = "issue-comment" + permission_classes = [ + ProjectLitePermission, + ] + + def get_queryset(self): + return ( + IssueComment.objects.filter(workspace__slug=self.kwargs.get("slug")) + .filter(project_id=self.kwargs.get("project_id")) + .filter(issue_id=self.kwargs.get("issue_id")) + .filter(project__project_projectmember__member=self.request.user) + .select_related("project") + .select_related("workspace") + .select_related("issue") + .select_related("actor") + .annotate( + is_member=Exists( + ProjectMember.objects.filter( + workspace__slug=self.kwargs.get("slug"), + project_id=self.kwargs.get("project_id"), + member_id=self.request.user.id, + is_active=True, + ) + ) + ) + .order_by(self.kwargs.get("order_by", "-created_at")) + .distinct() + ) + + def get(self, request, slug, project_id, issue_id, pk=None): + if pk: + issue_comment = self.get_queryset().get(pk=pk) + serializer = IssueCommentSerializer( + issue_comment, + fields=self.fields, + expand=self.expand, + ) + return Response(serializer.data, status=status.HTTP_200_OK) + return self.paginate( + request=request, + queryset=(self.get_queryset()), + on_results=lambda issue_comment: IssueCommentSerializer( + issue_comment, + many=True, + fields=self.fields, + expand=self.expand, + ).data, + ) + + def post(self, request, slug, project_id, issue_id): + serializer = IssueCommentSerializer(data=request.data) + if serializer.is_valid(): + serializer.save( + project_id=project_id, + issue_id=issue_id, + actor=request.user, + ) + issue_activity.delay( + type="comment.activity.created", + requested_data=json.dumps(serializer.data, cls=DjangoJSONEncoder), + actor_id=str(self.request.user.id), + issue_id=str(self.kwargs.get("issue_id")), + project_id=str(self.kwargs.get("project_id")), + current_instance=None, + epoch=int(timezone.now().timestamp()), + ) + return Response(serializer.data, status=status.HTTP_201_CREATED) + return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) + + def patch(self, request, slug, project_id, issue_id, pk): + issue_comment = IssueComment.objects.get( + workspace__slug=slug, project_id=project_id, issue_id=issue_id, pk=pk + ) + requested_data = json.dumps(self.request.data, cls=DjangoJSONEncoder) + current_instance = json.dumps( + IssueCommentSerializer(issue_comment).data, + cls=DjangoJSONEncoder, + ) + serializer = IssueCommentSerializer( + issue_comment, data=request.data, partial=True + ) + if serializer.is_valid(): + serializer.save() + issue_activity.delay( + type="comment.activity.updated", + requested_data=requested_data, + actor_id=str(request.user.id), + issue_id=str(issue_id), + project_id=str(project_id), + current_instance=current_instance, + epoch=int(timezone.now().timestamp()), + ) + return Response(serializer.data, status=status.HTTP_200_OK) + return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) + + def delete(self, request, slug, project_id, issue_id, pk): + issue_comment = IssueComment.objects.get( + workspace__slug=slug, project_id=project_id, issue_id=issue_id, pk=pk + ) + current_instance = json.dumps( + IssueCommentSerializer(issue_comment).data, + cls=DjangoJSONEncoder, + ) + issue_comment.delete() + issue_activity.delay( + type="comment.activity.deleted", + requested_data=json.dumps({"comment_id": str(pk)}), + actor_id=str(request.user.id), + issue_id=str(issue_id), + project_id=str(project_id), + current_instance=current_instance, + epoch=int(timezone.now().timestamp()), + ) + return Response(status=status.HTTP_204_NO_CONTENT) + + +class IssueActivityAPIEndpoint(BaseAPIView): + permission_classes = [ + ProjectEntityPermission, + ] + + def get(self, request, slug, project_id, issue_id, pk=None): + issue_activities = ( + IssueActivity.objects.filter( + issue_id=issue_id, workspace__slug=slug, project_id=project_id + ) + .filter( + ~Q(field__in=["comment", "vote", "reaction", "draft"]), + project__project_projectmember__member=self.request.user, + ) + .select_related("actor", "workspace", "issue", "project") + ).order_by(request.GET.get("order_by", "created_at")) + + if pk: + issue_activities = issue_activities.get(pk=pk) + serializer = IssueActivitySerializer(issue_activities) + return Response(serializer.data, status=status.HTTP_200_OK) + + self.paginate( + request=request, + queryset=(issue_activities), + on_results=lambda issue_activity: IssueActivitySerializer( + issue_activity, + many=True, + fields=self.fields, + expand=self.expand, + ).data, + ) diff --git a/apiserver/plane/api/views/module.py b/apiserver/plane/api/views/module.py new file mode 100644 index 000000000..78f721adc --- /dev/null +++ b/apiserver/plane/api/views/module.py @@ -0,0 +1,365 @@ +# Python imports +import json + +# Django imports +from django.db.models import Count, Prefetch, Q, F, Func, OuterRef +from django.utils import timezone +from django.core import serializers + +# Third party imports +from rest_framework import status +from rest_framework.response import Response + +# Module imports +from .base import BaseAPIView, WebhookMixin +from plane.app.permissions import ProjectEntityPermission +from plane.db.models import ( + Project, + Module, + ModuleLink, + Issue, + ModuleIssue, + IssueAttachment, + IssueLink, +) +from plane.api.serializers import ( + ModuleSerializer, + ModuleIssueSerializer, + IssueSerializer, +) +from plane.bgtasks.issue_activites_task import issue_activity + + +class ModuleAPIEndpoint(WebhookMixin, BaseAPIView): + """ + This viewset automatically provides `list`, `create`, `retrieve`, + `update` and `destroy` actions related to module. + + """ + + model = Module + permission_classes = [ + ProjectEntityPermission, + ] + serializer_class = ModuleSerializer + webhook_event = "module" + + def get_queryset(self): + return ( + Module.objects.filter(project_id=self.kwargs.get("project_id")) + .filter(workspace__slug=self.kwargs.get("slug")) + .select_related("project") + .select_related("workspace") + .select_related("lead") + .prefetch_related("members") + .prefetch_related( + Prefetch( + "link_module", + queryset=ModuleLink.objects.select_related("module", "created_by"), + ) + ) + .annotate( + total_issues=Count( + "issue_module", + filter=Q( + issue_module__issue__archived_at__isnull=True, + issue_module__issue__is_draft=False, + ), + ), + ) + .annotate( + completed_issues=Count( + "issue_module__issue__state__group", + filter=Q( + issue_module__issue__state__group="completed", + issue_module__issue__archived_at__isnull=True, + issue_module__issue__is_draft=False, + ), + ) + ) + .annotate( + cancelled_issues=Count( + "issue_module__issue__state__group", + filter=Q( + issue_module__issue__state__group="cancelled", + issue_module__issue__archived_at__isnull=True, + issue_module__issue__is_draft=False, + ), + ) + ) + .annotate( + started_issues=Count( + "issue_module__issue__state__group", + filter=Q( + issue_module__issue__state__group="started", + issue_module__issue__archived_at__isnull=True, + issue_module__issue__is_draft=False, + ), + ) + ) + .annotate( + unstarted_issues=Count( + "issue_module__issue__state__group", + filter=Q( + issue_module__issue__state__group="unstarted", + issue_module__issue__archived_at__isnull=True, + issue_module__issue__is_draft=False, + ), + ) + ) + .annotate( + backlog_issues=Count( + "issue_module__issue__state__group", + filter=Q( + issue_module__issue__state__group="backlog", + issue_module__issue__archived_at__isnull=True, + issue_module__issue__is_draft=False, + ), + ) + ) + .order_by(self.kwargs.get("order_by", "-created_at")) + ) + + def post(self, request, slug, project_id): + project = Project.objects.get(workspace__slug=slug, pk=project_id) + serializer = ModuleSerializer(data=request.data, context={"project": project}) + if serializer.is_valid(): + serializer.save() + module = Module.objects.get(pk=serializer.data["id"]) + serializer = ModuleSerializer(module) + return Response(serializer.data, status=status.HTTP_201_CREATED) + return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) + + def get(self, request, slug, project_id, pk=None): + if pk: + queryset = self.get_queryset().get(pk=pk) + data = ModuleSerializer( + queryset, + fields=self.fields, + expand=self.expand, + ).data + return Response( + data, + status=status.HTTP_200_OK, + ) + return self.paginate( + request=request, + queryset=(self.get_queryset()), + on_results=lambda modules: ModuleSerializer( + modules, + many=True, + fields=self.fields, + expand=self.expand, + ).data, + ) + + def delete(self, request, slug, project_id, pk): + module = Module.objects.get(workspace__slug=slug, project_id=project_id, pk=pk) + module_issues = list( + ModuleIssue.objects.filter(module_id=pk).values_list("issue", flat=True) + ) + issue_activity.delay( + type="module.activity.deleted", + requested_data=json.dumps( + { + "module_id": str(pk), + "module_name": str(module.name), + "issues": [str(issue_id) for issue_id in module_issues], + } + ), + actor_id=str(request.user.id), + issue_id=str(pk), + project_id=str(project_id), + current_instance=None, + epoch=int(timezone.now().timestamp()), + ) + module.delete() + return Response(status=status.HTTP_204_NO_CONTENT) + + +class ModuleIssueAPIEndpoint(WebhookMixin, BaseAPIView): + """ + This viewset automatically provides `list`, `create`, `retrieve`, + `update` and `destroy` actions related to module issues. + + """ + + serializer_class = ModuleIssueSerializer + model = ModuleIssue + webhook_event = "module" + + permission_classes = [ + ProjectEntityPermission, + ] + + def get_queryset(self): + return ( + ModuleIssue.objects.annotate( + sub_issues_count=Issue.issue_objects.filter(parent=OuterRef("issue")) + .order_by() + .annotate(count=Func(F("id"), function="Count")) + .values("count") + ) + .filter(workspace__slug=self.kwargs.get("slug")) + .filter(project_id=self.kwargs.get("project_id")) + .filter(module_id=self.kwargs.get("module_id")) + .filter(project__project_projectmember__member=self.request.user) + .select_related("project") + .select_related("workspace") + .select_related("module") + .select_related("issue", "issue__state", "issue__project") + .prefetch_related("issue__assignees", "issue__labels") + .prefetch_related("module__members") + .order_by(self.kwargs.get("order_by", "-created_at")) + .distinct() + ) + + def get(self, request, slug, project_id, module_id): + order_by = request.GET.get("order_by", "created_at") + issues = ( + Issue.issue_objects.filter(issue_module__module_id=module_id) + .annotate( + sub_issues_count=Issue.issue_objects.filter(parent=OuterRef("id")) + .order_by() + .annotate(count=Func(F("id"), function="Count")) + .values("count") + ) + .annotate(bridge_id=F("issue_module__id")) + .filter(project_id=project_id) + .filter(workspace__slug=slug) + .select_related("project") + .select_related("workspace") + .select_related("state") + .select_related("parent") + .prefetch_related("assignees") + .prefetch_related("labels") + .order_by(order_by) + .annotate( + link_count=IssueLink.objects.filter(issue=OuterRef("id")) + .order_by() + .annotate(count=Func(F("id"), function="Count")) + .values("count") + ) + .annotate( + attachment_count=IssueAttachment.objects.filter(issue=OuterRef("id")) + .order_by() + .annotate(count=Func(F("id"), function="Count")) + .values("count") + ) + ) + return self.paginate( + request=request, + queryset=(issues), + on_results=lambda issues: IssueSerializer( + issues, + many=True, + fields=self.fields, + expand=self.expand, + ).data, + ) + + def post(self, request, slug, project_id, module_id): + issues = request.data.get("issues", []) + if not len(issues): + return Response( + {"error": "Issues are required"}, status=status.HTTP_400_BAD_REQUEST + ) + module = Module.objects.get( + workspace__slug=slug, project_id=project_id, pk=module_id + ) + + issues = Issue.objects.filter( + workspace__slug=slug, project_id=project_id, pk__in=issues + ).values_list("id", flat=True) + + module_issues = list(ModuleIssue.objects.filter(issue_id__in=issues)) + + update_module_issue_activity = [] + records_to_update = [] + record_to_create = [] + + for issue in issues: + module_issue = [ + module_issue + for module_issue in module_issues + if str(module_issue.issue_id) in issues + ] + + if len(module_issue): + if module_issue[0].module_id != module_id: + update_module_issue_activity.append( + { + "old_module_id": str(module_issue[0].module_id), + "new_module_id": str(module_id), + "issue_id": str(module_issue[0].issue_id), + } + ) + module_issue[0].module_id = module_id + records_to_update.append(module_issue[0]) + else: + record_to_create.append( + ModuleIssue( + module=module, + issue_id=issue, + project_id=project_id, + workspace=module.workspace, + created_by=request.user, + updated_by=request.user, + ) + ) + + ModuleIssue.objects.bulk_create( + record_to_create, + batch_size=10, + ignore_conflicts=True, + ) + + ModuleIssue.objects.bulk_update( + records_to_update, + ["module"], + batch_size=10, + ) + + # Capture Issue Activity + issue_activity.delay( + type="module.activity.created", + requested_data=json.dumps({"modules_list": issues}), + actor_id=str(self.request.user.id), + issue_id=None, + project_id=str(self.kwargs.get("project_id", None)), + current_instance=json.dumps( + { + "updated_module_issues": update_module_issue_activity, + "created_module_issues": serializers.serialize( + "json", record_to_create + ), + } + ), + epoch=int(timezone.now().timestamp()), + ) + + return Response( + ModuleIssueSerializer(self.get_queryset(), many=True).data, + status=status.HTTP_200_OK, + ) + + def delete(self, request, slug, project_id, module_id, pk): + module_issue = ModuleIssue.objects.get( + workspace__slug=slug, project_id=project_id, module_id=module_id, pk=pk + ) + module_issue.delete() + issue_activity.delay( + type="module.activity.deleted", + requested_data=json.dumps( + { + "module_id": str(module_id), + "issues": [str(module_issue.issue_id)], + } + ), + actor_id=str(request.user.id), + issue_id=str(pk), + project_id=str(project_id), + current_instance=None, + epoch=int(timezone.now().timestamp()), + ) + return Response(status=status.HTTP_204_NO_CONTENT) \ No newline at end of file diff --git a/apiserver/plane/api/views/project.py b/apiserver/plane/api/views/project.py new file mode 100644 index 000000000..674e82acc --- /dev/null +++ b/apiserver/plane/api/views/project.py @@ -0,0 +1,285 @@ +# Django imports +from django.db import IntegrityError +from django.db.models import Exists, OuterRef, Q, F, Func, Subquery, Prefetch + +# Third party imports +from rest_framework import status +from rest_framework.response import Response +from rest_framework.serializers import ValidationError + +# Module imports +from plane.db.models import ( + Workspace, + Project, + ProjectFavorite, + ProjectMember, + ProjectDeployBoard, + State, + Cycle, + Module, + IssueProperty, + Inbox, +) +from plane.app.permissions import ProjectBasePermission +from plane.api.serializers import ProjectSerializer +from .base import BaseAPIView, WebhookMixin + + +class ProjectAPIEndpoint(WebhookMixin, BaseAPIView): + """Project Endpoints to create, update, list, retrieve and delete endpoint""" + + serializer_class = ProjectSerializer + model = Project + webhook_event = "project" + + permission_classes = [ + ProjectBasePermission, + ] + + def get_queryset(self): + return ( + Project.objects.filter(workspace__slug=self.kwargs.get("slug")) + .filter(Q(project_projectmember__member=self.request.user) | Q(network=2)) + .select_related( + "workspace", "workspace__owner", "default_assignee", "project_lead" + ) + .annotate( + is_member=Exists( + ProjectMember.objects.filter( + member=self.request.user, + project_id=OuterRef("pk"), + workspace__slug=self.kwargs.get("slug"), + is_active=True, + ) + ) + ) + .annotate( + total_members=ProjectMember.objects.filter( + project_id=OuterRef("id"), + member__is_bot=False, + is_active=True, + ) + .order_by() + .annotate(count=Func(F("id"), function="Count")) + .values("count") + ) + .annotate( + total_cycles=Cycle.objects.filter(project_id=OuterRef("id")) + .order_by() + .annotate(count=Func(F("id"), function="Count")) + .values("count") + ) + .annotate( + total_modules=Module.objects.filter(project_id=OuterRef("id")) + .order_by() + .annotate(count=Func(F("id"), function="Count")) + .values("count") + ) + .annotate( + member_role=ProjectMember.objects.filter( + project_id=OuterRef("pk"), + member_id=self.request.user.id, + is_active=True, + ).values("role") + ) + .annotate( + is_deployed=Exists( + ProjectDeployBoard.objects.filter( + project_id=OuterRef("pk"), + workspace__slug=self.kwargs.get("slug"), + ) + ) + ) + .order_by(self.kwargs.get("order_by", "-created_at")) + .distinct() + ) + + def get(self, request, slug, pk=None): + if pk is None: + sort_order_query = ProjectMember.objects.filter( + member=request.user, + project_id=OuterRef("pk"), + workspace__slug=self.kwargs.get("slug"), + is_active=True, + ).values("sort_order") + projects = ( + self.get_queryset() + .annotate(sort_order=Subquery(sort_order_query)) + .prefetch_related( + Prefetch( + "project_projectmember", + queryset=ProjectMember.objects.filter( + workspace__slug=slug, + is_active=True, + ).select_related("member"), + ) + ) + .order_by("sort_order", "name") + ) + return self.paginate( + request=request, + queryset=(projects), + on_results=lambda projects: ProjectSerializer( + projects, many=True, fields=self.fields, expand=self.expand, + ).data, + ) + else: + project = self.get_queryset().get(workspace__slug=slug, pk=pk) + serializer = ProjectSerializer(project, fields=self.fields, expand=self.expand,) + return Response(serializer.data, status=status.HTTP_200_OK) + + def post(self, request, slug): + try: + workspace = Workspace.objects.get(slug=slug) + + serializer = ProjectSerializer( + data={**request.data}, context={"workspace_id": workspace.id} + ) + if serializer.is_valid(): + serializer.save() + + # Add the user as Administrator to the project + project_member = ProjectMember.objects.create( + project_id=serializer.data["id"], member=request.user, role=20 + ) + # Also create the issue property for the user + _ = IssueProperty.objects.create( + project_id=serializer.data["id"], + user=request.user, + ) + + if serializer.data["project_lead"] is not None and str( + serializer.data["project_lead"] + ) != str(request.user.id): + ProjectMember.objects.create( + project_id=serializer.data["id"], + member_id=serializer.data["project_lead"], + role=20, + ) + # Also create the issue property for the user + IssueProperty.objects.create( + project_id=serializer.data["id"], + user_id=serializer.data["project_lead"], + ) + + # Default states + states = [ + { + "name": "Backlog", + "color": "#A3A3A3", + "sequence": 15000, + "group": "backlog", + "default": True, + }, + { + "name": "Todo", + "color": "#3A3A3A", + "sequence": 25000, + "group": "unstarted", + }, + { + "name": "In Progress", + "color": "#F59E0B", + "sequence": 35000, + "group": "started", + }, + { + "name": "Done", + "color": "#16A34A", + "sequence": 45000, + "group": "completed", + }, + { + "name": "Cancelled", + "color": "#EF4444", + "sequence": 55000, + "group": "cancelled", + }, + ] + + State.objects.bulk_create( + [ + State( + name=state["name"], + color=state["color"], + project=serializer.instance, + sequence=state["sequence"], + workspace=serializer.instance.workspace, + group=state["group"], + default=state.get("default", False), + created_by=request.user, + ) + for state in states + ] + ) + + project = self.get_queryset().filter(pk=serializer.data["id"]).first() + serializer = ProjectSerializer(project) + return Response(serializer.data, status=status.HTTP_201_CREATED) + return Response( + serializer.errors, + status=status.HTTP_400_BAD_REQUEST, + ) + except IntegrityError as e: + if "already exists" in str(e): + return Response( + {"name": "The project name is already taken"}, + status=status.HTTP_410_GONE, + ) + except Workspace.DoesNotExist as e: + return Response( + {"error": "Workspace does not exist"}, status=status.HTTP_404_NOT_FOUND + ) + except ValidationError as e: + return Response( + {"identifier": "The project identifier is already taken"}, + status=status.HTTP_410_GONE, + ) + + def patch(self, request, slug, pk=None): + try: + workspace = Workspace.objects.get(slug=slug) + project = Project.objects.get(pk=pk) + + serializer = ProjectSerializer( + project, + data={**request.data}, + context={"workspace_id": workspace.id}, + partial=True, + ) + + if serializer.is_valid(): + serializer.save() + if serializer.data["inbox_view"]: + Inbox.objects.get_or_create( + name=f"{project.name} Inbox", project=project, is_default=True + ) + + # Create the triage state in Backlog group + State.objects.get_or_create( + name="Triage", + group="backlog", + description="Default state for managing all Inbox Issues", + project_id=pk, + color="#ff7700", + ) + + project = self.get_queryset().filter(pk=serializer.data["id"]).first() + serializer = ProjectSerializer(project) + return Response(serializer.data, status=status.HTTP_200_OK) + return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) + except IntegrityError as e: + if "already exists" in str(e): + return Response( + {"name": "The project name is already taken"}, + status=status.HTTP_410_GONE, + ) + except (Project.DoesNotExist, Workspace.DoesNotExist): + return Response( + {"error": "Project does not exist"}, status=status.HTTP_404_NOT_FOUND + ) + except ValidationError as e: + return Response( + {"identifier": "The project identifier is already taken"}, + status=status.HTTP_410_GONE, + ) \ No newline at end of file diff --git a/apiserver/plane/api/views/state.py b/apiserver/plane/api/views/state.py new file mode 100644 index 000000000..8e7a73d9b --- /dev/null +++ b/apiserver/plane/api/views/state.py @@ -0,0 +1,89 @@ +# Python imports +from itertools import groupby + +# Django imports +from django.db.models import Q + +# Third party imports +from rest_framework.response import Response +from rest_framework import status + +# Module imports +from .base import BaseAPIView +from plane.api.serializers import StateSerializer +from plane.app.permissions import ProjectEntityPermission +from plane.db.models import State, Issue + + +class StateAPIEndpoint(BaseAPIView): + serializer_class = StateSerializer + model = State + permission_classes = [ + ProjectEntityPermission, + ] + + def get_queryset(self): + return self.filter_queryset( + super() + .get_queryset() + .filter(workspace__slug=self.kwargs.get("slug")) + .filter(project_id=self.kwargs.get("project_id")) + .filter(project__project_projectmember__member=self.request.user) + .filter(~Q(name="Triage")) + .select_related("project") + .select_related("workspace") + .distinct() + ) + + def post(self, request, slug, project_id): + serializer = StateSerializer(data=request.data, context={"project_id": project_id}) + if serializer.is_valid(): + serializer.save(project_id=project_id) + return Response(serializer.data, status=status.HTTP_200_OK) + return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) + + def get(self, request, slug, project_id, pk=None): + if pk: + serializer = StateSerializer(self.get_queryset().get(pk=pk)) + return Response(serializer.data, status=status.HTTP_200_OK) + return self.paginate( + request=request, + queryset=(self.get_queryset()), + on_results=lambda states: StateSerializer( + states, + many=True, + fields=self.fields, + expand=self.expand, + ).data, + ) + + def delete(self, request, slug, project_id, pk): + state = State.objects.get( + ~Q(name="Triage"), + pk=pk, + project_id=project_id, + workspace__slug=slug, + ) + + if state.default: + return Response({"error": "Default state cannot be deleted"}, status=False) + + # Check for any issues in the state + issue_exist = Issue.issue_objects.filter(state=pk).exists() + + if issue_exist: + return Response( + {"error": "The state is not empty, only empty states can be deleted"}, + status=status.HTTP_400_BAD_REQUEST, + ) + + state.delete() + return Response(status=status.HTTP_204_NO_CONTENT) + + def patch(self, request, slug, project_id, pk=None): + state = State.objects.filter(workspace__slug=slug, project_id=project_id, pk=pk) + serializer = StateSerializer(state, data=request.data, partial=True) + if serializer.is_valid(): + serializer.save() + return Response(serializer.data, status=status.HTTP_200_OK) + return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) \ No newline at end of file diff --git a/apiserver/plane/app/apps.py b/apiserver/plane/app/apps.py index 6057d131a..e3277fc4d 100644 --- a/apiserver/plane/app/apps.py +++ b/apiserver/plane/app/apps.py @@ -1,5 +1,5 @@ from django.apps import AppConfig -class AppConfig(AppConfig): +class AppApiConfig(AppConfig): name = "plane.app" diff --git a/apiserver/plane/app/middleware/__init__.py b/apiserver/plane/app/middleware/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/apiserver/plane/authentication/api_authentication.py b/apiserver/plane/app/middleware/api_authentication.py similarity index 100% rename from apiserver/plane/authentication/api_authentication.py rename to apiserver/plane/app/middleware/api_authentication.py diff --git a/apiserver/plane/proxy/rate_limit.py b/apiserver/plane/app/rate_limit.py similarity index 100% rename from apiserver/plane/proxy/rate_limit.py rename to apiserver/plane/app/rate_limit.py diff --git a/apiserver/plane/app/serializers/project.py b/apiserver/plane/app/serializers/project.py index e9bdf7be4..58a38f154 100644 --- a/apiserver/plane/app/serializers/project.py +++ b/apiserver/plane/app/serializers/project.py @@ -214,4 +214,4 @@ class ProjectPublicMemberSerializer(BaseSerializer): "workspace", "project", "member", - ] + ] \ No newline at end of file diff --git a/apiserver/plane/app/serializers/state.py b/apiserver/plane/app/serializers/state.py index 7cf645fae..323254f26 100644 --- a/apiserver/plane/app/serializers/state.py +++ b/apiserver/plane/app/serializers/state.py @@ -25,4 +25,4 @@ class StateLiteSerializer(BaseSerializer): "color", "group", ] - read_only_fields = fields + read_only_fields = fields \ No newline at end of file diff --git a/apiserver/plane/app/urls/__init__.py b/apiserver/plane/app/urls/__init__.py index 7d057ad9e..d8334ed57 100644 --- a/apiserver/plane/app/urls/__init__.py +++ b/apiserver/plane/app/urls/__init__.py @@ -22,10 +22,6 @@ from .api import urlpatterns as api_urls from .webhook import urlpatterns as webhook_urls -# Django imports -from django.conf import settings - - urlpatterns = [ *analytic_urls, *asset_urls, @@ -49,4 +45,4 @@ urlpatterns = [ *workspace_urls, *api_urls, *webhook_urls, -] +] \ No newline at end of file diff --git a/apiserver/plane/app/urls/project.py b/apiserver/plane/app/urls/project.py index 4f0771952..b2819176c 100644 --- a/apiserver/plane/app/urls/project.py +++ b/apiserver/plane/app/urls/project.py @@ -169,4 +169,4 @@ urlpatterns = [ ), name="project-deploy-board", ), -] +] \ No newline at end of file diff --git a/apiserver/plane/app/views/__init__.py b/apiserver/plane/app/views/__init__.py index f945f00e3..e36d6a14b 100644 --- a/apiserver/plane/app/views/__init__.py +++ b/apiserver/plane/app/views/__init__.py @@ -168,4 +168,4 @@ from .exporter import ExportIssuesEndpoint from .config import ConfigurationEndpoint -from .webhook import WebhookEndpoint, WebhookLogsEndpoint, WebhookSecretRegenerateEndpoint +from .webhook import WebhookEndpoint, WebhookLogsEndpoint, WebhookSecretRegenerateEndpoint \ No newline at end of file diff --git a/apiserver/plane/app/views/config.py b/apiserver/plane/app/views/config.py index 4a6b05859..411f9c5dd 100644 --- a/apiserver/plane/app/views/config.py +++ b/apiserver/plane/app/views/config.py @@ -92,4 +92,13 @@ class ConfigurationEndpoint(BaseAPIView): ) ) + # Open AI settings + data["has_openai_configured"] = bool( + get_configuration_value( + instance_configuration, + "OPENAI_API_KEY", + os.environ.get("OPENAI_API_KEY", None), + ) + ) + return Response(data, status=status.HTTP_200_OK) diff --git a/apiserver/plane/app/views/cycle.py b/apiserver/plane/app/views/cycle.py index a590dc214..7228aa088 100644 --- a/apiserver/plane/app/views/cycle.py +++ b/apiserver/plane/app/views/cycle.py @@ -868,4 +868,4 @@ class TransferCycleIssueEndpoint(BaseAPIView): updated_cycles, ["cycle_id"], batch_size=100 ) - return Response({"message": "Success"}, status=status.HTTP_200_OK) + return Response({"message": "Success"}, status=status.HTTP_200_OK) \ No newline at end of file diff --git a/apiserver/plane/app/views/inbox.py b/apiserver/plane/app/views/inbox.py index 38c0808b5..331ee2175 100644 --- a/apiserver/plane/app/views/inbox.py +++ b/apiserver/plane/app/views/inbox.py @@ -356,4 +356,3 @@ class InboxIssueViewSet(BaseViewSet): inbox_issue.delete() return Response(status=status.HTTP_204_NO_CONTENT) - diff --git a/apiserver/plane/app/views/issue.py b/apiserver/plane/app/views/issue.py index 4f7883868..b03c0ea4f 100644 --- a/apiserver/plane/app/views/issue.py +++ b/apiserver/plane/app/views/issue.py @@ -1750,4 +1750,4 @@ class IssueDraftViewSet(BaseViewSet): current_instance=current_instance, epoch=int(timezone.now().timestamp()), ) - return Response(status=status.HTTP_204_NO_CONTENT) + return Response(status=status.HTTP_204_NO_CONTENT) \ No newline at end of file diff --git a/apiserver/plane/app/views/module.py b/apiserver/plane/app/views/module.py index cc6369fe2..28986ea0f 100644 --- a/apiserver/plane/app/views/module.py +++ b/apiserver/plane/app/views/module.py @@ -583,4 +583,4 @@ class ModuleFavoriteViewSet(BaseViewSet): module_id=module_id, ) module_favorite.delete() - return Response(status=status.HTTP_204_NO_CONTENT) + return Response(status=status.HTTP_204_NO_CONTENT) \ No newline at end of file diff --git a/apiserver/plane/app/views/oauth.py b/apiserver/plane/app/views/oauth.py index d7d9fe9e0..31b28415a 100644 --- a/apiserver/plane/app/views/oauth.py +++ b/apiserver/plane/app/views/oauth.py @@ -420,4 +420,4 @@ class OauthEndpoint(BaseAPIView): "access_token": access_token, "refresh_token": refresh_token, } - return Response(data, status=status.HTTP_201_CREATED) + return Response(data, status=status.HTTP_201_CREATED) \ No newline at end of file diff --git a/apiserver/plane/app/views/state.py b/apiserver/plane/app/views/state.py index 124bdf8fd..5867edb68 100644 --- a/apiserver/plane/app/views/state.py +++ b/apiserver/plane/app/views/state.py @@ -89,4 +89,4 @@ class StateViewSet(BaseViewSet): ) state.delete() - return Response(status=status.HTTP_204_NO_CONTENT) + return Response(status=status.HTTP_204_NO_CONTENT) \ No newline at end of file diff --git a/apiserver/plane/app/views/view.py b/apiserver/plane/app/views/view.py index 8e0e72f66..1fd1ccf76 100644 --- a/apiserver/plane/app/views/view.py +++ b/apiserver/plane/app/views/view.py @@ -257,4 +257,4 @@ class IssueViewFavoriteViewSet(BaseViewSet): view_id=view_id, ) view_favourite.delete() - return Response(status=status.HTTP_204_NO_CONTENT) + return Response(status=status.HTTP_204_NO_CONTENT) \ No newline at end of file diff --git a/apiserver/plane/bgtasks/export_task.py b/apiserver/plane/bgtasks/export_task.py index 1329697e9..7941344ef 100644 --- a/apiserver/plane/bgtasks/export_task.py +++ b/apiserver/plane/bgtasks/export_task.py @@ -81,13 +81,13 @@ def upload_to_s3(zip_file, workspace_id, token_id, slug): ) s3.upload_fileobj( zip_file, - settings.AWS_STORAGE_BUCKET_NAME, + settings.AWS_S3_BUCKET_NAME, file_name, ExtraArgs={"ACL": "public-read", "ContentType": "application/zip"}, ) presigned_url = s3.generate_presigned_url( "get_object", - Params={"Bucket": settings.AWS_STORAGE_BUCKET_NAME, "Key": file_name}, + Params={"Bucket": settings.AWS_S3_BUCKET_NAME, "Key": file_name}, ExpiresIn=expires_in, ) # Create the new url with updated domain and protocol diff --git a/apiserver/plane/bgtasks/exporter_expired_task.py b/apiserver/plane/bgtasks/exporter_expired_task.py index 45c53eaca..34b254d95 100644 --- a/apiserver/plane/bgtasks/exporter_expired_task.py +++ b/apiserver/plane/bgtasks/exporter_expired_task.py @@ -42,7 +42,7 @@ def delete_old_s3_link(): # Delete object from S3 if file_name: if settings.DOCKERIZED and settings.USE_MINIO: - s3.delete_object(Bucket=settings.AWS_STORAGE_BUCKET_NAME, Key=file_name) + s3.delete_object(Bucket=settings.AWS_S3_BUCKET_NAME, Key=file_name) else: s3.delete_object(Bucket=settings.AWS_S3_BUCKET_NAME, Key=file_name) diff --git a/apiserver/plane/db/management/commands/create_bucket.py b/apiserver/plane/db/management/commands/create_bucket.py index 054523bf9..fbda34f77 100644 --- a/apiserver/plane/db/management/commands/create_bucket.py +++ b/apiserver/plane/db/management/commands/create_bucket.py @@ -40,7 +40,7 @@ class Command(BaseCommand): ) # Create an S3 client using the session s3_client = session.client('s3', endpoint_url=settings.AWS_S3_ENDPOINT_URL) - bucket_name = settings.AWS_STORAGE_BUCKET_NAME + bucket_name = settings.AWS_S3_BUCKET_NAME self.stdout.write(self.style.NOTICE("Checking bucket...")) @@ -50,7 +50,7 @@ class Command(BaseCommand): self.set_bucket_public_policy(s3_client, bucket_name) except ClientError as e: error_code = int(e.response['Error']['Code']) - bucket_name = settings.AWS_STORAGE_BUCKET_NAME + bucket_name = settings.AWS_S3_BUCKET_NAME if error_code == 404: # Bucket does not exist, create it self.stdout.write(self.style.WARNING(f"Bucket '{bucket_name}' does not exist. Creating bucket...")) diff --git a/apiserver/plane/proxy/apps.py b/apiserver/plane/proxy/apps.py deleted file mode 100644 index e5a5a80ef..000000000 --- a/apiserver/plane/proxy/apps.py +++ /dev/null @@ -1,5 +0,0 @@ -from django.apps import AppConfig - - -class ProxyConfig(AppConfig): - name = "plane.proxy" diff --git a/apiserver/plane/proxy/views/base.py b/apiserver/plane/proxy/views/base.py deleted file mode 100644 index d5dc9fc4c..000000000 --- a/apiserver/plane/proxy/views/base.py +++ /dev/null @@ -1,101 +0,0 @@ -# Python imports -import re -import json -import requests - -# Django imports -from django.conf import settings - -# Third party imports -from rest_framework.views import APIView -from rest_framework.response import Response -from rest_framework.permissions import IsAuthenticated -from rest_framework_simplejwt.tokens import RefreshToken - -# Module imports -from plane.authentication.api_authentication import APIKeyAuthentication -from plane.proxy.rate_limit import ApiKeyRateThrottle - - -class BaseAPIView(APIView): - authentication_classes = [ - APIKeyAuthentication, - ] - - permission_classes = [ - IsAuthenticated, - ] - - throttle_classes = [ - ApiKeyRateThrottle, - ] - - def _get_jwt_token(self, request): - refresh = RefreshToken.for_user(request.user) - return str(refresh.access_token) - - def _get_url_path(self, request): - match = re.search(r"/v1/(.*)", request.path) - return match.group(1) if match else "" - - def _get_headers(self, request): - return { - "Authorization": f"Bearer {self._get_jwt_token(request=request)}", - "Content-Type": request.headers.get("Content-Type", "application/json"), - } - - def _get_url(self, request): - path = self._get_url_path(request=request) - url = request.build_absolute_uri("/api/" + path) - return url - - def _get_query_params(self, request): - query_params = request.GET - return query_params - - def _get_payload(self, request): - content_type = request.headers.get("Content-Type", "application/json") - if content_type.startswith("multipart/form-data"): - files_dict = {k: v[0] for k, v in request.FILES.lists()} - return (None, files_dict) - else: - return (json.dumps(request.data), None) - - def _make_request(self, request, method="GET"): - data_payload, files_payload = self._get_payload(request=request) - response = requests.request( - method=method, - url=self._get_url(request=request), - headers=self._get_headers(request=request), - params=self._get_query_params(request=request), - data=data_payload, - files=files_payload, - ) - return response.json(), response.status_code - - def finalize_response(self, request, response, *args, **kwargs): - # Call super to get the default response - response = super().finalize_response(request, response, *args, **kwargs) - - # Add custom headers if they exist in the request META - ratelimit_remaining = request.META.get('X-RateLimit-Remaining') - if ratelimit_remaining is not None: - response['X-RateLimit-Remaining'] = ratelimit_remaining - - ratelimit_reset = request.META.get('X-RateLimit-Reset') - if ratelimit_reset is not None: - response['X-RateLimit-Reset'] = ratelimit_reset - - return response - - def get(self, request, *args, **kwargs): - response, status_code = self._make_request(request=request, method="GET") - return Response(response, status=status_code) - - def post(self, request, *args, **kwargs): - response, status_code = self._make_request(request=request, method="POST") - return Response(response, status=status_code) - - def partial_update(self, request, *args, **kwargs): - response, status_code = self._make_request(request=request, method="PATCH") - return Response(response, status=status_code) diff --git a/apiserver/plane/proxy/views/cycle.py b/apiserver/plane/proxy/views/cycle.py deleted file mode 100644 index 2407693af..000000000 --- a/apiserver/plane/proxy/views/cycle.py +++ /dev/null @@ -1,30 +0,0 @@ -from .base import BaseAPIView - - -class CycleAPIEndpoint(BaseAPIView): - """ - This viewset automatically provides `list`, `create`, `retrieve`, - `update` and `destroy` actions related to cycle. - - """ - - pass - - -class CycleIssueAPIEndpoint(BaseAPIView): - """ - This viewset automatically provides `list`, `create`, `retrieve`, - `update` and `destroy` actions related to cycle issues. - - """ - - pass - - -class TransferCycleIssueAPIEndpoint(BaseAPIView): - """ - This viewset provides `create` actions for transfering the issues into a particular cycle. - - """ - - pass diff --git a/apiserver/plane/proxy/views/inbox.py b/apiserver/plane/proxy/views/inbox.py deleted file mode 100644 index 7e79f4c0b..000000000 --- a/apiserver/plane/proxy/views/inbox.py +++ /dev/null @@ -1,10 +0,0 @@ -from .base import BaseAPIView - - -class InboxIssueAPIEndpoint(BaseAPIView): - """ - This viewset automatically provides `list`, `create`, `retrieve`, - `update` and `destroy` actions related to inbox issues. - - """ - pass \ No newline at end of file diff --git a/apiserver/plane/proxy/views/issue.py b/apiserver/plane/proxy/views/issue.py deleted file mode 100644 index 56dc71a3a..000000000 --- a/apiserver/plane/proxy/views/issue.py +++ /dev/null @@ -1,37 +0,0 @@ -from .base import BaseAPIView - - -class IssueAPIEndpoint(BaseAPIView): - """ - This viewset automatically provides `list`, `create`, `retrieve`, - `update` and `destroy` actions related to issue. - - """ - pass - - -class LabelAPIEndpoint(BaseAPIView): - """ - This viewset automatically provides `list`, `create`, `retrieve`, - `update` and `destroy` actions related to the labels. - - """ - pass - - -class IssueLinkAPIEndpoint(BaseAPIView): - """ - This viewset automatically provides `list`, `create`, `retrieve`, - `update` and `destroy` actions related to the links of the particular issue. - - """ - pass - - -class IssueCommentAPIEndpoint(BaseAPIView): - """ - This viewset automatically provides `list`, `create`, `retrieve`, - `update` and `destroy` actions related to comments of the particular issue. - - """ - pass \ No newline at end of file diff --git a/apiserver/plane/proxy/views/module.py b/apiserver/plane/proxy/views/module.py deleted file mode 100644 index 3726d4af5..000000000 --- a/apiserver/plane/proxy/views/module.py +++ /dev/null @@ -1,20 +0,0 @@ -from .base import BaseAPIView - - -class ModuleAPIEndpoint(BaseAPIView): - """ - This viewset automatically provides `list`, `create`, `retrieve`, - `update` and `destroy` actions related to module. - - """ - - pass - - -class ModuleIssueAPIEndpoint(BaseAPIView): - """ - This viewset automatically provides `list`, `create`, `retrieve`, - `update` and `destroy` actions related to module issues. - - """ - pass diff --git a/apiserver/plane/proxy/views/project.py b/apiserver/plane/proxy/views/project.py deleted file mode 100644 index 6eb43d941..000000000 --- a/apiserver/plane/proxy/views/project.py +++ /dev/null @@ -1,5 +0,0 @@ -from .base import BaseAPIView - - -class ProjectAPIEndpoint(BaseAPIView): - pass \ No newline at end of file diff --git a/apiserver/plane/settings/common.py b/apiserver/plane/settings/common.py index 8b81102fe..46ef090ab 100644 --- a/apiserver/plane/settings/common.py +++ b/apiserver/plane/settings/common.py @@ -42,7 +42,7 @@ INSTALLED_APPS = [ "plane.web", "plane.middleware", "plane.license", - "plane.proxy", + "plane.api", # Third-party things "rest_framework", "rest_framework.authtoken", @@ -75,7 +75,7 @@ REST_FRAMEWORK = { "DEFAULT_PERMISSION_CLASSES": ("rest_framework.permissions.IsAuthenticated",), "DEFAULT_RENDERER_CLASSES": ("rest_framework.renderers.JSONRenderer",), "DEFAULT_FILTER_BACKENDS": ("django_filters.rest_framework.DjangoFilterBackend",), - "DEFAULT_THROTTLE_CLASSES": ("plane.proxy.rate_limit.ApiKeyRateThrottle",), + "DEFAULT_THROTTLE_CLASSES": ("plane.api.rate_limit.ApiKeyRateThrottle",), "DEFAULT_THROTTLE_RATES": { "api_key": "60/minute", }, @@ -224,7 +224,8 @@ STORAGES["default"] = { } AWS_ACCESS_KEY_ID = os.environ.get("AWS_ACCESS_KEY_ID", "access-key") AWS_SECRET_ACCESS_KEY = os.environ.get("AWS_SECRET_ACCESS_KEY", "secret-key") -AWS_STORAGE_BUCKET_NAME = os.environ.get("AWS_S3_BUCKET_NAME", "uploads") +AWS_S3_BUCKET_NAME = os.environ.get("AWS_S3_BUCKET_NAME", "uploads") +AWS_REGION = os.environ.get("AWS_REGION", "") AWS_DEFAULT_ACL = "public-read" AWS_QUERYSTRING_AUTH = False AWS_S3_FILE_OVERWRITE = False @@ -233,7 +234,7 @@ AWS_S3_ENDPOINT_URL = os.environ.get("AWS_S3_ENDPOINT_URL", None) or os.environ. ) if AWS_S3_ENDPOINT_URL: parsed_url = urlparse(os.environ.get("WEB_URL", "http://localhost")) - AWS_S3_CUSTOM_DOMAIN = f"{parsed_url.netloc}/{AWS_STORAGE_BUCKET_NAME}" + AWS_S3_CUSTOM_DOMAIN = f"{parsed_url.netloc}/{AWS_S3_BUCKET_NAME}" AWS_S3_URL_PROTOCOL = f"{parsed_url.scheme}:" @@ -321,4 +322,4 @@ ANALYTICS_SECRET_KEY = os.environ.get("ANALYTICS_SECRET_KEY", False) ANALYTICS_BASE_API = os.environ.get("ANALYTICS_BASE_API", False) # Use Minio settings -USE_MINIO = int(os.environ.get("USE_MINIO", 0)) == 1 +USE_MINIO = int(os.environ.get("USE_MINIO", 0)) == 1 \ No newline at end of file diff --git a/apiserver/plane/urls.py b/apiserver/plane/urls.py index 1b6f95bba..75b4c2609 100644 --- a/apiserver/plane/urls.py +++ b/apiserver/plane/urls.py @@ -13,7 +13,7 @@ urlpatterns = [ path("api/", include("plane.app.urls")), path("api/public/", include("plane.space.urls")), path("api/licenses/", include("plane.license.urls")), - path("api/v1/", include("plane.proxy.urls")), + path("api/v1/", include("plane.api.urls")), path("", include("plane.web.urls")), ] diff --git a/apiserver/plane/utils/paginator.py b/apiserver/plane/utils/paginator.py index 544ed8fef..793614cc0 100644 --- a/apiserver/plane/utils/paginator.py +++ b/apiserver/plane/utils/paginator.py @@ -28,15 +28,15 @@ class Cursor: @classmethod def from_string(cls, value): - bits = value.split(":") - if len(bits) != 3: - raise ValueError try: + bits = value.split(":") + if len(bits) != 3: + raise ValueError("Cursor must be in the format 'value:offset:is_prev'") + value = float(bits[0]) if "." in bits[0] else int(bits[0]) - bits = value, int(bits[1]), int(bits[2]) - except (TypeError, ValueError): - raise ValueError - return cls(*bits) + return cls(value, int(bits[1]), bool(int(bits[2]))) + except (TypeError, ValueError) as e: + raise ValueError(f"Invalid cursor format: {e}") class CursorResult(Sequence): @@ -125,7 +125,8 @@ class OffsetPaginator: if self.on_results: results = self.on_results(results) - max_hits = math.ceil(queryset.count() / limit) + count = queryset.count() + max_hits = math.ceil(count / limit) return CursorResult( results=results, diff --git a/web/components/estimates/create-update-estimate-modal.tsx b/web/components/estimates/create-update-estimate-modal.tsx index b3116b767..e2845936e 100644 --- a/web/components/estimates/create-update-estimate-modal.tsx +++ b/web/components/estimates/create-update-estimate-modal.tsx @@ -42,7 +42,9 @@ export const CreateUpdateEstimateModal: React.FC = observer((props) => { const { workspaceSlug, projectId } = router.query; // store - const { projectEstimates: projectEstimatesStore } = useMobxStore(); + const { + projectEstimates: { createEstimate, updateEstimate }, + } = useMobxStore(); const { formState: { errors, isSubmitting }, @@ -60,11 +62,10 @@ export const CreateUpdateEstimateModal: React.FC = observer((props) => { const { setToastAlert } = useToast(); - const createEstimate = async (payload: IEstimateFormData) => { + const handleCreateEstimate = async (payload: IEstimateFormData) => { if (!workspaceSlug || !projectId) return; - await projectEstimatesStore - .createEstimate(workspaceSlug.toString(), projectId.toString(), payload) + await createEstimate(workspaceSlug.toString(), projectId.toString(), payload) .then(() => { onClose(); }) @@ -83,13 +84,12 @@ export const CreateUpdateEstimateModal: React.FC = observer((props) => { }); }; - const updateEstimate = async (payload: IEstimateFormData) => { + const handleUpdateEstimate = async (payload: IEstimateFormData) => { if (!workspaceSlug || !projectId || !data) return; - await projectEstimatesStore - .updateEstimate(workspaceSlug.toString(), projectId.toString(), data.id, payload) + await updateEstimate(workspaceSlug.toString(), projectId.toString(), data.id, payload) .then(() => { - handleClose(); + onClose(); }) .catch((err) => { const error = err?.error; @@ -101,8 +101,6 @@ export const CreateUpdateEstimateModal: React.FC = observer((props) => { message: errorString ?? "Estimate could not be updated. Please try again.", }); }); - - onClose(); }; const onSubmit = async (formData: FormValues) => { @@ -171,8 +169,8 @@ export const CreateUpdateEstimateModal: React.FC = observer((props) => { else payload.estimate_points.push({ ...point }); } - if (data) await updateEstimate(payload); - else await createEstimate(payload); + if (data) await handleUpdateEstimate(payload); + else await handleCreateEstimate(payload); }; useEffect(() => { diff --git a/web/components/estimates/estimate-list-item.tsx b/web/components/estimates/estimate-list-item.tsx index beaa942d3..21f4f41f6 100644 --- a/web/components/estimates/estimate-list-item.tsx +++ b/web/components/estimates/estimate-list-item.tsx @@ -28,28 +28,27 @@ export const EstimateListItem: React.FC = observer((props) => { const router = useRouter(); const { workspaceSlug, projectId } = router.query; // store - const { project: projectStore } = useMobxStore(); - const { currentProjectDetails } = projectStore; + const { + project: { currentProjectDetails, updateProject }, + } = useMobxStore(); // hooks const { setToastAlert } = useToast(); const handleUseEstimate = async () => { if (!workspaceSlug || !projectId) return; - await projectStore - .updateProject(workspaceSlug.toString(), projectId.toString(), { - estimate: estimate.id, - }) - .catch((err) => { - const error = err?.error; - const errorString = Array.isArray(error) ? error[0] : error; + await updateProject(workspaceSlug.toString(), projectId.toString(), { + estimate: estimate.id, + }).catch((err) => { + const error = err?.error; + const errorString = Array.isArray(error) ? error[0] : error; - setToastAlert({ - type: "error", - title: "Error!", - message: errorString ?? "Estimate points could not be used. Please try again.", - }); + setToastAlert({ + type: "error", + title: "Error!", + message: errorString ?? "Estimate points could not be used. Please try again.", }); + }); }; return ( @@ -69,7 +68,7 @@ export const EstimateListItem: React.FC = observer((props) => {
{currentProjectDetails?.estimate !== estimate?.id && estimate?.points?.length > 0 && ( - )} diff --git a/web/components/estimates/estimate-list.tsx b/web/components/estimates/estimates-list.tsx similarity index 90% rename from web/components/estimates/estimate-list.tsx rename to web/components/estimates/estimates-list.tsx index 07770b183..df1088d7d 100644 --- a/web/components/estimates/estimate-list.tsx +++ b/web/components/estimates/estimates-list.tsx @@ -23,8 +23,10 @@ export const EstimatesList: React.FC = observer(() => { const { workspaceSlug, projectId } = router.query; // store - const { project: projectStore } = useMobxStore(); - const { currentProjectDetails } = projectStore; + const { + project: { currentProjectDetails, updateProject }, + projectEstimates: { projectEstimates, getProjectEstimateById }, + } = useMobxStore(); // states const [estimateFormOpen, setEstimateFormOpen] = useState(false); const [estimateToDelete, setEstimateToDelete] = useState(null); @@ -32,7 +34,7 @@ export const EstimatesList: React.FC = observer(() => { // hooks const { setToastAlert } = useToast(); // derived values - const estimatesList = projectStore.projectEstimates; + const estimatesList = projectEstimates; const editEstimate = (estimate: IEstimate) => { setEstimateFormOpen(true); @@ -42,7 +44,7 @@ export const EstimatesList: React.FC = observer(() => { const disableEstimates = () => { if (!workspaceSlug || !projectId) return; - projectStore.updateProject(workspaceSlug.toString(), projectId.toString(), { estimate: null }).catch((err) => { + updateProject(workspaceSlug.toString(), projectId.toString(), { estimate: null }).catch((err) => { const error = err?.error; const errorString = Array.isArray(error) ? error[0] : error; @@ -68,7 +70,7 @@ export const EstimatesList: React.FC = observer(() => { setEstimateToDelete(null)} - data={projectStore.getProjectEstimateById(estimateToDelete!)} + data={getProjectEstimateById(estimateToDelete!)} />
@@ -81,11 +83,12 @@ export const EstimatesList: React.FC = observer(() => { setEstimateFormOpen(true); setEstimateToUpdate(undefined); }} + size="sm" > Add Estimate {currentProjectDetails?.estimate && ( - )} diff --git a/web/components/estimates/index.ts b/web/components/estimates/index.ts index e9a22a53d..a0dea2d25 100644 --- a/web/components/estimates/index.ts +++ b/web/components/estimates/index.ts @@ -1,4 +1,5 @@ export * from "./create-update-estimate-modal"; export * from "./delete-estimate-modal"; -export * from "./estimate-select"; export * from "./estimate-list-item"; +export * from "./estimate-select"; +export * from "./estimates-list"; diff --git a/web/components/issues/issue-layouts/list/roots/archived-issue-root.tsx b/web/components/issues/issue-layouts/list/roots/archived-issue-root.tsx index 0d1fce245..d991049ac 100644 --- a/web/components/issues/issue-layouts/list/roots/archived-issue-root.tsx +++ b/web/components/issues/issue-layouts/list/roots/archived-issue-root.tsx @@ -22,6 +22,7 @@ export const ArchivedIssueListLayout: FC = observer(() => { projectLabel: { projectLabels }, projectMember: { projectMembers }, projectState: projectStateStore, + projectEstimates: { projectEstimates }, archivedIssues: archivedIssueStore, archivedIssueFilters: archivedIssueFiltersStore, } = useMobxStore(); @@ -48,9 +49,7 @@ export const ArchivedIssueListLayout: FC = observer(() => { const stateGroups = ISSUE_STATE_GROUPS || null; const projects = workspaceSlug ? projectStore?.projects[workspaceSlug.toString()] || null : null; const estimates = - projectDetails?.estimate !== null - ? projectStore.projectEstimates?.find((e) => e.id === projectDetails?.estimate) || null - : null; + projectDetails?.estimate !== null ? projectEstimates?.find((e) => e.id === projectDetails?.estimate) || null : null; return (
diff --git a/web/components/issues/issue-layouts/list/roots/cycle-root.tsx b/web/components/issues/issue-layouts/list/roots/cycle-root.tsx index b92a57fa8..c47b3ceb8 100644 --- a/web/components/issues/issue-layouts/list/roots/cycle-root.tsx +++ b/web/components/issues/issue-layouts/list/roots/cycle-root.tsx @@ -24,6 +24,7 @@ export const CycleListLayout: React.FC = observer(() => { projectLabel: { projectLabels }, projectMember: { projectMembers }, projectState: projectStateStore, + projectEstimates: { projectEstimates }, issueFilter: issueFilterStore, cycleIssue: cycleIssueStore, issueDetail: issueDetailStore, @@ -64,7 +65,7 @@ export const CycleListLayout: React.FC = observer(() => { const projects = workspaceSlug ? projectStore?.projects[workspaceSlug.toString()] || null : null; const estimates = currentProjectDetails?.estimate !== null - ? projectStore.projectEstimates?.find((e) => e.id === currentProjectDetails?.estimate) || null + ? projectEstimates?.find((e) => e.id === currentProjectDetails?.estimate) || null : null; return ( diff --git a/web/components/issues/issue-layouts/list/roots/module-root.tsx b/web/components/issues/issue-layouts/list/roots/module-root.tsx index 7fa1f4718..e27379df2 100644 --- a/web/components/issues/issue-layouts/list/roots/module-root.tsx +++ b/web/components/issues/issue-layouts/list/roots/module-root.tsx @@ -24,6 +24,7 @@ export const ModuleListLayout: React.FC = observer(() => { projectLabel: { projectLabels }, projectMember: { projectMembers }, projectState: projectStateStore, + projectEstimates: { projectEstimates }, issueFilter: issueFilterStore, moduleIssue: moduleIssueStore, issueDetail: issueDetailStore, @@ -64,7 +65,7 @@ export const ModuleListLayout: React.FC = observer(() => { const projects = workspaceSlug ? projectStore?.projects[workspaceSlug.toString()] || null : null; const estimates = currentProjectDetails?.estimate !== null - ? projectStore.projectEstimates?.find((e) => e.id === currentProjectDetails?.estimate) || null + ? projectEstimates?.find((e) => e.id === currentProjectDetails?.estimate) || null : null; return ( diff --git a/web/components/issues/issue-layouts/list/roots/project-root.tsx b/web/components/issues/issue-layouts/list/roots/project-root.tsx index cc78145f0..91a04f57d 100644 --- a/web/components/issues/issue-layouts/list/roots/project-root.tsx +++ b/web/components/issues/issue-layouts/list/roots/project-root.tsx @@ -23,6 +23,7 @@ export const ListLayout: FC = observer(() => { projectLabel: { projectLabels }, projectMember: { projectMembers }, projectState: projectStateStore, + projectEstimates: { projectEstimates }, issue: issueStore, issueDetail: issueDetailStore, issueFilter: issueFilterStore, @@ -54,7 +55,7 @@ export const ListLayout: FC = observer(() => { const projects = workspaceSlug ? projectStore?.projects[workspaceSlug.toString()] || null : null; const estimates = currentProjectDetails?.estimate !== null - ? projectStore.projectEstimates?.find((e) => e.id === currentProjectDetails?.estimate) || null + ? projectEstimates?.find((e) => e.id === currentProjectDetails?.estimate) || null : null; return ( diff --git a/web/components/issues/issue-layouts/properties/estimates.tsx b/web/components/issues/issue-layouts/properties/estimates.tsx index 432a39f4d..3c7179f2e 100644 --- a/web/components/issues/issue-layouts/properties/estimates.tsx +++ b/web/components/issues/issue-layouts/properties/estimates.tsx @@ -52,11 +52,14 @@ export const IssuePropertyEstimates: React.FC = observe ], }); - const { project: projectStore } = useMobxStore(); + const { + project: { project_details }, + projectEstimates: { projectEstimates }, + } = useMobxStore(); - const projectDetails = projectId ? projectStore.project_details[projectId] : null; + const projectDetails = projectId ? project_details[projectId] : null; const isEstimateEnabled = projectDetails?.estimate !== null; - const estimates = projectId ? projectStore.estimates?.[projectId] : null; + const estimates = projectEstimates; const estimatePoints = projectDetails && isEstimateEnabled ? estimates?.find((e) => e.id === projectDetails.estimate)?.points : null; diff --git a/web/layouts/auth-layout/project-wrapper.tsx b/web/layouts/auth-layout/project-wrapper.tsx index 8e7d27f19..845f86900 100644 --- a/web/layouts/auth-layout/project-wrapper.tsx +++ b/web/layouts/auth-layout/project-wrapper.tsx @@ -20,10 +20,11 @@ export const ProjectAuthWrapper: FC = observer((props) => { // store const { user: { fetchUserProjectInfo, projectMemberInfo, hasPermissionToProject }, - project: { fetchProjectDetails, fetchProjectEstimates, workspaceProjects }, + project: { fetchProjectDetails, workspaceProjects }, projectLabel: { fetchProjectLabels }, projectMember: { fetchProjectMembers }, projectState: { fetchProjectStates }, + projectEstimates: { fetchProjectEstimates }, cycle: { fetchCycles }, module: { fetchModules }, projectViews: { fetchAllViews }, diff --git a/web/pages/[workspaceSlug]/projects/[projectId]/settings/estimates.tsx b/web/pages/[workspaceSlug]/projects/[projectId]/settings/estimates.tsx index 152bbc522..783243c4a 100644 --- a/web/pages/[workspaceSlug]/projects/[projectId]/settings/estimates.tsx +++ b/web/pages/[workspaceSlug]/projects/[projectId]/settings/estimates.tsx @@ -4,7 +4,7 @@ import { AppLayout } from "layouts/app-layout"; import { ProjectSettingLayout } from "layouts/settings-layout"; // components import { ProjectSettingHeader } from "components/headers"; -import { EstimatesList } from "components/estimates/estimate-list"; +import { EstimatesList } from "components/estimates"; // types import { NextPageWithLayout } from "types/app"; diff --git a/web/store/project/project-estimates.store.ts b/web/store/project/project-estimates.store.ts index f634d3fd9..b7e31af1f 100644 --- a/web/store/project/project-estimates.store.ts +++ b/web/store/project/project-estimates.store.ts @@ -1,4 +1,4 @@ -import { observable, action, makeObservable, runInAction } from "mobx"; +import { observable, action, makeObservable, runInAction, computed } from "mobx"; // types import { RootStore } from "../root"; import { IEstimate, IEstimateFormData } from "types"; @@ -9,7 +9,14 @@ export interface IProjectEstimateStore { loader: boolean; error: any | null; - // estimates + // observables + estimates: { + [projectId: string]: IEstimate[] | null; // project_id: members + } | null; + + // actions + getProjectEstimateById: (estimateId: string) => IEstimate | null; + fetchProjectEstimates: (workspaceSlug: string, projectId: string) => Promise; createEstimate: (workspaceSlug: string, projectId: string, data: IEstimateFormData) => Promise; updateEstimate: ( workspaceSlug: string, @@ -18,14 +25,23 @@ export interface IProjectEstimateStore { data: IEstimateFormData ) => Promise; deleteEstimate: (workspaceSlug: string, projectId: string, estimateId: string) => Promise; + + // computed + projectEstimates: IEstimate[] | undefined; } export class ProjectEstimatesStore implements IProjectEstimateStore { loader: boolean = false; error: any | null = null; + // observables + estimates: { + [projectId: string]: IEstimate[]; // projectId: estimates + } | null = {}; + // root store rootStore; + // service projectService; estimateService; @@ -36,10 +52,17 @@ export class ProjectEstimatesStore implements IProjectEstimateStore { loader: observable, error: observable, - // estimates + estimates: observable.ref, + + // actions + getProjectEstimateById: action, + fetchProjectEstimates: action, createEstimate: action, updateEstimate: action, deleteEstimate: action, + + // computed + projectEstimates: computed, }); this.rootStore = _rootStore; @@ -47,6 +70,43 @@ export class ProjectEstimatesStore implements IProjectEstimateStore { this.estimateService = new ProjectEstimateService(); } + get projectEstimates() { + const projectId = this.rootStore.project.projectId; + + if (!projectId) return undefined; + return this.estimates?.[projectId] || undefined; + } + + getProjectEstimateById = (estimateId: string) => { + const estimates = this.projectEstimates; + if (!estimates) return null; + const estimateInfo: IEstimate | null = estimates.find((estimate) => estimate.id === estimateId) || null; + return estimateInfo; + }; + + fetchProjectEstimates = async (workspaceSlug: string, projectId: string) => { + try { + this.loader = true; + this.error = null; + + const estimatesResponse = await this.estimateService.getEstimatesList(workspaceSlug, projectId); + const _estimates = { + ...this.estimates, + [projectId]: estimatesResponse, + }; + + runInAction(() => { + this.estimates = _estimates; + this.loader = false; + this.error = null; + }); + } catch (error) { + console.error(error); + this.loader = false; + this.error = error; + } + }; + createEstimate = async (workspaceSlug: string, projectId: string, data: IEstimateFormData) => { try { const response = await this.estimateService.createEstimate(workspaceSlug, projectId, data); @@ -57,9 +117,9 @@ export class ProjectEstimatesStore implements IProjectEstimateStore { }; runInAction(() => { - this.rootStore.project.estimates = { - ...this.rootStore.project.estimates, - [projectId]: [responseEstimate, ...(this.rootStore.project.estimates?.[projectId] || [])], + this.estimates = { + ...this.estimates, + [projectId]: [responseEstimate, ...(this.estimates?.[projectId] || [])], }; }); @@ -71,12 +131,12 @@ export class ProjectEstimatesStore implements IProjectEstimateStore { }; updateEstimate = async (workspaceSlug: string, projectId: string, estimateId: string, data: IEstimateFormData) => { - const originalEstimates = this.rootStore.project.getProjectEstimateById(estimateId); + const originalEstimates = this.getProjectEstimateById(estimateId); runInAction(() => { - this.rootStore.project.estimates = { - ...this.rootStore.project.estimates, - [projectId]: (this.rootStore.project.estimates?.[projectId] || [])?.map((estimate) => + this.estimates = { + ...this.estimates, + [projectId]: (this.estimates?.[projectId] || [])?.map((estimate) => estimate.id === estimateId ? { ...estimate, ...data.estimate } : estimate ), }; @@ -84,15 +144,15 @@ export class ProjectEstimatesStore implements IProjectEstimateStore { try { const response = await this.estimateService.patchEstimate(workspaceSlug, projectId, estimateId, data); - await this.rootStore.project.fetchProjectEstimates(workspaceSlug, projectId); + await this.fetchProjectEstimates(workspaceSlug, projectId); return response; } catch (error) { console.log("Failed to update estimate from project store"); runInAction(() => { - this.rootStore.project.estimates = { - ...this.rootStore.project.estimates, - [projectId]: (this.rootStore.project.estimates?.[projectId] || [])?.map((estimate) => + this.estimates = { + ...this.estimates, + [projectId]: (this.estimates?.[projectId] || [])?.map((estimate) => estimate.id === estimateId ? { ...estimate, ...originalEstimates } : estimate ), }; @@ -102,14 +162,12 @@ export class ProjectEstimatesStore implements IProjectEstimateStore { }; deleteEstimate = async (workspaceSlug: string, projectId: string, estimateId: string) => { - const originalEstimateList = this.rootStore.project.projectEstimates || []; + const originalEstimateList = this.projectEstimates || []; runInAction(() => { - this.rootStore.project.estimates = { - ...this.rootStore.project.estimates, - [projectId]: (this.rootStore.project.estimates?.[projectId] || [])?.filter( - (estimate) => estimate.id !== estimateId - ), + this.estimates = { + ...this.estimates, + [projectId]: (this.estimates?.[projectId] || [])?.filter((estimate) => estimate.id !== estimateId), }; }); @@ -120,8 +178,8 @@ export class ProjectEstimatesStore implements IProjectEstimateStore { console.log("Failed to delete estimate from project store"); // reverting back to original estimate list runInAction(() => { - this.rootStore.project.estimates = { - ...this.rootStore.project.estimates, + this.estimates = { + ...this.estimates, [projectId]: originalEstimateList, }; }); diff --git a/web/store/project/project.store.ts b/web/store/project/project.store.ts index 3216a5e74..1a4d0bcfe 100644 --- a/web/store/project/project.store.ts +++ b/web/store/project/project.store.ts @@ -1,9 +1,9 @@ import { observable, action, computed, makeObservable, runInAction } from "mobx"; // types import { RootStore } from "../root"; -import { IProject, IEstimate } from "types"; +import { IProject } from "types"; // services -import { ProjectService, ProjectStateService, ProjectEstimateService } from "services/project"; +import { ProjectService, ProjectStateService } from "services/project"; import { IssueService, IssueLabelService } from "services/issue"; export interface IProjectStore { @@ -16,14 +16,10 @@ export interface IProjectStore { project_details: { [projectId: string]: IProject; // projectId: project Info }; - estimates: { - [projectId: string]: IEstimate[] | null; // project_id: members - } | null; // computed searchedProjects: IProject[]; workspaceProjects: IProject[] | null; - projectEstimates: IEstimate[] | null; joinedProjects: IProject[]; favoriteProjects: IProject[]; currentProjectDetails: IProject | undefined; @@ -34,10 +30,8 @@ export interface IProjectStore { getProjectById: (workspaceSlug: string, projectId: string) => IProject | null; - getProjectEstimateById: (estimateId: string) => IEstimate | null; fetchProjects: (workspaceSlug: string) => Promise; fetchProjectDetails: (workspaceSlug: string, projectId: string) => Promise; - fetchProjectEstimates: (workspaceSlug: string, projectId: string) => Promise; addProjectToFavorites: (workspaceSlug: string, projectId: string) => Promise; removeProjectFromFavorites: (workspaceSlug: string, projectId: string) => Promise; @@ -62,9 +56,6 @@ export class ProjectStore implements IProjectStore { project_details: { [projectId: string]: IProject; // projectId: project } = {}; - estimates: { - [projectId: string]: IEstimate[]; // projectId: estimates - } | null = {}; // root store rootStore; @@ -73,7 +64,6 @@ export class ProjectStore implements IProjectStore { issueLabelService; issueService; stateService; - estimateService; constructor(_rootStore: RootStore) { makeObservable(this, { @@ -86,14 +76,10 @@ export class ProjectStore implements IProjectStore { projects: observable.ref, project_details: observable.ref, - estimates: observable.ref, - // computed searchedProjects: computed, workspaceProjects: computed, - projectEstimates: computed, - currentProjectDetails: computed, joinedProjects: computed, @@ -106,9 +92,6 @@ export class ProjectStore implements IProjectStore { fetchProjectDetails: action, getProjectById: action, - getProjectEstimateById: action, - - fetchProjectEstimates: action, addProjectToFavorites: action, removeProjectFromFavorites: action, @@ -125,7 +108,6 @@ export class ProjectStore implements IProjectStore { this.issueService = new IssueService(); this.issueLabelService = new IssueLabelService(); this.stateService = new ProjectStateService(); - this.estimateService = new ProjectEstimateService(); } get searchedProjects() { @@ -164,11 +146,6 @@ export class ProjectStore implements IProjectStore { return this.projects?.[this.rootStore.workspace.workspaceSlug]?.filter((p) => p.is_favorite); } - get projectEstimates() { - if (!this.projectId) return null; - return this.estimates?.[this.projectId] || null; - } - // actions setProjectId = (projectId: string | null) => { this.projectId = projectId; @@ -223,37 +200,6 @@ export class ProjectStore implements IProjectStore { return projectInfo; }; - getProjectEstimateById = (estimateId: string) => { - if (!this.projectId) return null; - const estimates = this.projectEstimates; - if (!estimates) return null; - const estimateInfo: IEstimate | null = estimates.find((estimate) => estimate.id === estimateId) || null; - return estimateInfo; - }; - - fetchProjectEstimates = async (workspaceSlug: string, projectId: string) => { - try { - this.loader = true; - this.error = null; - - const estimatesResponse = await this.estimateService.getEstimatesList(workspaceSlug, projectId); - const _estimates = { - ...this.estimates, - [projectId]: estimatesResponse, - }; - - runInAction(() => { - this.estimates = _estimates; - this.loader = false; - this.error = null; - }); - } catch (error) { - console.error(error); - this.loader = false; - this.error = error; - } - }; - addProjectToFavorites = async (workspaceSlug: string, projectId: string) => { try { runInAction(() => {