forked from github/plane
dev: external apis (#2806)
* dev: new proxy api setup * dev: updated endpoints with serializers and structure * dev: external apis for cycles, modules and inbox issue * dev: order by for all the apis * dev: enable webhooks for external apis * dev: fields and expand for the apis * dev: move authentication to proxy middleware * dev: fix imports * dev: api serializer updates and paginator * dev: renamed api to app * dev: renamed proxy to api * dev: validation for project, issues, modules and cycles * dev: remove favourites from project apis * dev: states api * dev: rewrite the url endpoints * dev: exception handling for the apis * dev: merge updated structure * dev: remove attachment apis * dev: issue activities endpoints
This commit is contained in:
parent
668dfd2e38
commit
9a704458b3
@ -2,4 +2,4 @@ from django.apps import AppConfig
|
||||
|
||||
|
||||
class ApiConfig(AppConfig):
|
||||
name = "plane.authentication"
|
||||
name = "plane.api"
|
47
apiserver/plane/api/middleware/api_authentication.py
Normal file
47
apiserver/plane/api/middleware/api_authentication.py
Normal file
@ -0,0 +1,47 @@
|
||||
# Django imports
|
||||
from django.utils import timezone
|
||||
from django.db.models import Q
|
||||
|
||||
# Third party imports
|
||||
from rest_framework import authentication
|
||||
from rest_framework.exceptions import AuthenticationFailed
|
||||
|
||||
# Module imports
|
||||
from plane.db.models import APIToken
|
||||
|
||||
|
||||
class APIKeyAuthentication(authentication.BaseAuthentication):
|
||||
"""
|
||||
Authentication with an API Key
|
||||
"""
|
||||
|
||||
www_authenticate_realm = "api"
|
||||
media_type = "application/json"
|
||||
auth_header_name = "X-Api-Key"
|
||||
|
||||
def get_api_token(self, request):
|
||||
return request.headers.get(self.auth_header_name)
|
||||
|
||||
def validate_api_token(self, token):
|
||||
try:
|
||||
api_token = APIToken.objects.get(
|
||||
Q(Q(expired_at__gt=timezone.now()) | Q(expired_at__isnull=True)),
|
||||
token=token,
|
||||
is_active=True,
|
||||
)
|
||||
except APIToken.DoesNotExist:
|
||||
raise AuthenticationFailed("Given API token is not valid")
|
||||
|
||||
# save api token last used
|
||||
api_token.last_used = timezone.now()
|
||||
api_token.save(update_fields=["last_used"])
|
||||
return (api_token.user, api_token.token)
|
||||
|
||||
def authenticate(self, request):
|
||||
token = self.get_api_token(request=request)
|
||||
if not token:
|
||||
return None
|
||||
|
||||
# Validate the API token
|
||||
user, token = self.validate_api_token(token)
|
||||
return user, token
|
45
apiserver/plane/api/rate_limit.py
Normal file
45
apiserver/plane/api/rate_limit.py
Normal file
@ -0,0 +1,45 @@
|
||||
from django.utils import timezone
|
||||
from rest_framework.throttling import SimpleRateThrottle
|
||||
|
||||
|
||||
class ApiKeyRateThrottle(SimpleRateThrottle):
|
||||
scope = 'api_key'
|
||||
|
||||
def get_cache_key(self, request, view):
|
||||
# Retrieve the API key from the request header
|
||||
api_key = request.headers.get('X-Api-Key')
|
||||
if not api_key:
|
||||
return None # Allow the request if there's no API key
|
||||
|
||||
# Use the API key as part of the cache key
|
||||
return f'{self.scope}:{api_key}'
|
||||
|
||||
def allow_request(self, request, view):
|
||||
# Calculate the current time as a Unix timestamp
|
||||
now = timezone.now().timestamp()
|
||||
|
||||
# Use the parent class's method to check if the request is allowed
|
||||
allowed = super().allow_request(request, view)
|
||||
|
||||
if allowed:
|
||||
# Calculate the remaining limit and reset time
|
||||
history = self.cache.get(self.key, [])
|
||||
|
||||
# Remove old histories
|
||||
while history and history[-1] <= now - self.duration:
|
||||
history.pop()
|
||||
|
||||
# Calculate the requests
|
||||
num_requests = len(history)
|
||||
|
||||
# Check available requests
|
||||
available = self.num_requests - num_requests
|
||||
|
||||
# Unix timestamp for when the rate limit will reset
|
||||
reset_time = int(now + self.duration)
|
||||
|
||||
# Add headers
|
||||
request.META['X-RateLimit-Remaining'] = max(0, available)
|
||||
request.META['X-RateLimit-Reset'] = reset_time
|
||||
|
||||
return allowed
|
16
apiserver/plane/api/serializers/__init__.py
Normal file
16
apiserver/plane/api/serializers/__init__.py
Normal file
@ -0,0 +1,16 @@
|
||||
from .user import UserLiteSerializer
|
||||
from .workspace import WorkspaceLiteSerializer
|
||||
from .project import ProjectSerializer, ProjectLiteSerializer
|
||||
from .issue import (
|
||||
IssueSerializer,
|
||||
LabelSerializer,
|
||||
IssueLinkSerializer,
|
||||
IssueAttachmentSerializer,
|
||||
IssueCommentSerializer,
|
||||
IssueAttachmentSerializer,
|
||||
IssueActivitySerializer,
|
||||
)
|
||||
from .state import StateLiteSerializer, StateSerializer
|
||||
from .cycle import CycleSerializer, CycleIssueSerializer
|
||||
from .module import ModuleSerializer, ModuleIssueSerializer
|
||||
from .inbox import InboxIssueSerializer
|
105
apiserver/plane/api/serializers/base.py
Normal file
105
apiserver/plane/api/serializers/base.py
Normal file
@ -0,0 +1,105 @@
|
||||
# Third party imports
|
||||
from rest_framework import serializers
|
||||
|
||||
|
||||
class BaseSerializer(serializers.ModelSerializer):
|
||||
id = serializers.PrimaryKeyRelatedField(read_only=True)
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
# If 'fields' is provided in the arguments, remove it and store it separately.
|
||||
# This is done so as not to pass this custom argument up to the superclass.
|
||||
fields = kwargs.pop("fields", [])
|
||||
self.expand = kwargs.pop("expand", []) or []
|
||||
|
||||
# Call the initialization of the superclass.
|
||||
super().__init__(*args, **kwargs)
|
||||
|
||||
# If 'fields' was provided, filter the fields of the serializer accordingly.
|
||||
if fields:
|
||||
self.fields = self._filter_fields(fields=fields)
|
||||
|
||||
def _filter_fields(self, fields):
|
||||
"""
|
||||
Adjust the serializer's fields based on the provided 'fields' list.
|
||||
|
||||
:param fields: List or dictionary specifying which fields to include in the serializer.
|
||||
:return: The updated fields for the serializer.
|
||||
"""
|
||||
# Check each field_name in the provided fields.
|
||||
for field_name in fields:
|
||||
# If the field is a dictionary (indicating nested fields),
|
||||
# loop through its keys and values.
|
||||
if isinstance(field_name, dict):
|
||||
for key, value in field_name.items():
|
||||
# If the value of this nested field is a list,
|
||||
# perform a recursive filter on it.
|
||||
if isinstance(value, list):
|
||||
self._filter_fields(self.fields[key], value)
|
||||
|
||||
# Create a list to store allowed fields.
|
||||
allowed = []
|
||||
for item in fields:
|
||||
# If the item is a string, it directly represents a field's name.
|
||||
if isinstance(item, str):
|
||||
allowed.append(item)
|
||||
# If the item is a dictionary, it represents a nested field.
|
||||
# Add the key of this dictionary to the allowed list.
|
||||
elif isinstance(item, dict):
|
||||
allowed.append(list(item.keys())[0])
|
||||
|
||||
# Convert the current serializer's fields and the allowed fields to sets.
|
||||
existing = set(self.fields)
|
||||
allowed = set(allowed)
|
||||
|
||||
# Remove fields from the serializer that aren't in the 'allowed' list.
|
||||
for field_name in existing - allowed:
|
||||
self.fields.pop(field_name)
|
||||
|
||||
return self.fields
|
||||
|
||||
def to_representation(self, instance):
|
||||
response = super().to_representation(instance)
|
||||
|
||||
# Ensure 'expand' is iterable before processing
|
||||
if self.expand:
|
||||
for expand in self.expand:
|
||||
if expand in self.fields:
|
||||
# Import all the expandable serializers
|
||||
from . import (
|
||||
WorkspaceLiteSerializer,
|
||||
ProjectLiteSerializer,
|
||||
UserLiteSerializer,
|
||||
StateLiteSerializer,
|
||||
IssueSerializer,
|
||||
)
|
||||
|
||||
# Expansion mapper
|
||||
expansion = {
|
||||
"user": UserLiteSerializer,
|
||||
"workspace": WorkspaceLiteSerializer,
|
||||
"project": ProjectLiteSerializer,
|
||||
"default_assignee": UserLiteSerializer,
|
||||
"project_lead": UserLiteSerializer,
|
||||
"state": StateLiteSerializer,
|
||||
"created_by": UserLiteSerializer,
|
||||
"issue": IssueSerializer,
|
||||
"actor": UserLiteSerializer,
|
||||
"owned_by": UserLiteSerializer,
|
||||
"members": UserLiteSerializer,
|
||||
}
|
||||
# Check if field in expansion then expand the field
|
||||
if expand in expansion:
|
||||
if isinstance(response.get(expand), list):
|
||||
exp_serializer = expansion[expand](
|
||||
getattr(instance, expand), many=True
|
||||
)
|
||||
else:
|
||||
exp_serializer = expansion[expand](
|
||||
getattr(instance, expand)
|
||||
)
|
||||
response[expand] = exp_serializer.data
|
||||
else:
|
||||
# You might need to handle this case differently
|
||||
response[expand] = getattr(instance, f"{expand}_id", None)
|
||||
|
||||
return response
|
49
apiserver/plane/api/serializers/cycle.py
Normal file
49
apiserver/plane/api/serializers/cycle.py
Normal file
@ -0,0 +1,49 @@
|
||||
# Third party imports
|
||||
from rest_framework import serializers
|
||||
|
||||
# Module imports
|
||||
from .base import BaseSerializer
|
||||
from plane.db.models import Cycle, CycleIssue
|
||||
|
||||
|
||||
class CycleSerializer(BaseSerializer):
|
||||
total_issues = serializers.IntegerField(read_only=True)
|
||||
cancelled_issues = serializers.IntegerField(read_only=True)
|
||||
completed_issues = serializers.IntegerField(read_only=True)
|
||||
started_issues = serializers.IntegerField(read_only=True)
|
||||
unstarted_issues = serializers.IntegerField(read_only=True)
|
||||
backlog_issues = serializers.IntegerField(read_only=True)
|
||||
total_estimates = serializers.IntegerField(read_only=True)
|
||||
completed_estimates = serializers.IntegerField(read_only=True)
|
||||
started_estimates = serializers.IntegerField(read_only=True)
|
||||
|
||||
def validate(self, data):
|
||||
if (
|
||||
data.get("start_date", None) is not None
|
||||
and data.get("end_date", None) is not None
|
||||
and data.get("start_date", None) > data.get("end_date", None)
|
||||
):
|
||||
raise serializers.ValidationError("Start date cannot exceed end date")
|
||||
return data
|
||||
|
||||
class Meta:
|
||||
model = Cycle
|
||||
fields = "__all__"
|
||||
read_only_fields = [
|
||||
"workspace",
|
||||
"project",
|
||||
"owned_by",
|
||||
]
|
||||
|
||||
|
||||
class CycleIssueSerializer(BaseSerializer):
|
||||
sub_issues_count = serializers.IntegerField(read_only=True)
|
||||
|
||||
class Meta:
|
||||
model = CycleIssue
|
||||
fields = "__all__"
|
||||
read_only_fields = [
|
||||
"workspace",
|
||||
"project",
|
||||
"cycle",
|
||||
]
|
13
apiserver/plane/api/serializers/inbox.py
Normal file
13
apiserver/plane/api/serializers/inbox.py
Normal file
@ -0,0 +1,13 @@
|
||||
# Module improts
|
||||
from .base import BaseSerializer
|
||||
from plane.db.models import InboxIssue
|
||||
|
||||
class InboxIssueSerializer(BaseSerializer):
|
||||
|
||||
class Meta:
|
||||
model = InboxIssue
|
||||
fields = "__all__"
|
||||
read_only_fields = [
|
||||
"project",
|
||||
"workspace",
|
||||
]
|
319
apiserver/plane/api/serializers/issue.py
Normal file
319
apiserver/plane/api/serializers/issue.py
Normal file
@ -0,0 +1,319 @@
|
||||
# Django imports
|
||||
from django.utils import timezone
|
||||
|
||||
# Third party imports
|
||||
from rest_framework import serializers
|
||||
|
||||
# Module imports
|
||||
from plane.db.models import (
|
||||
User,
|
||||
Issue,
|
||||
State,
|
||||
IssueAssignee,
|
||||
Label,
|
||||
IssueLabel,
|
||||
IssueLink,
|
||||
IssueComment,
|
||||
IssueAttachment,
|
||||
IssueActivity,
|
||||
ProjectMember,
|
||||
)
|
||||
from .base import BaseSerializer
|
||||
|
||||
|
||||
class IssueSerializer(BaseSerializer):
|
||||
assignees = serializers.ListField(
|
||||
child=serializers.PrimaryKeyRelatedField(
|
||||
queryset=User.objects.values_list("id", flat=True)
|
||||
),
|
||||
write_only=True,
|
||||
required=False,
|
||||
)
|
||||
|
||||
labels = serializers.ListField(
|
||||
child=serializers.PrimaryKeyRelatedField(
|
||||
queryset=Label.objects.values_list("id", flat=True)
|
||||
),
|
||||
write_only=True,
|
||||
required=False,
|
||||
)
|
||||
|
||||
class Meta:
|
||||
model = Issue
|
||||
fields = "__all__"
|
||||
read_only_fields = [
|
||||
"workspace",
|
||||
"project",
|
||||
"created_by",
|
||||
"updated_by",
|
||||
"created_at",
|
||||
"updated_at",
|
||||
]
|
||||
|
||||
def validate(self, data):
|
||||
if (
|
||||
data.get("start_date", None) is not None
|
||||
and data.get("target_date", None) is not None
|
||||
and data.get("start_date", None) > data.get("target_date", None)
|
||||
):
|
||||
raise serializers.ValidationError("Start date cannot exceed target date")
|
||||
|
||||
# Validate assignees are from project
|
||||
if data.get("assignees", []):
|
||||
print(data.get("assignees"))
|
||||
data["assignees"] = ProjectMember.objects.filter(
|
||||
project_id=self.context.get("project_id"),
|
||||
member_id__in=data["assignees"],
|
||||
).values_list("member_id", flat=True)
|
||||
|
||||
# Validate labels are from project
|
||||
if data.get("labels", []):
|
||||
data["labels"] = Label.objects.filter(
|
||||
project_id=self.context.get("project_id"),
|
||||
id__in=data["labels"],
|
||||
).values_list("id", flat=True)
|
||||
|
||||
# Check state is from the project only else raise validation error
|
||||
if (
|
||||
data.get("state")
|
||||
and not State.objects.filter(
|
||||
project_id=self.context.get("project_id"), pk=data.get("state")
|
||||
).exists()
|
||||
):
|
||||
raise serializers.ValidationError(
|
||||
"State is not valid please pass a valid state_id"
|
||||
)
|
||||
|
||||
# Check parent issue is from workspace as it can be cross workspace
|
||||
if (
|
||||
data.get("parent")
|
||||
and not Issue.objects.filter(
|
||||
workspce_id=self.context.get("workspace_id"), pk=data.get("parent")
|
||||
).exists()
|
||||
):
|
||||
raise serializers.ValidationError(
|
||||
"Parent is not valid issue_id please pass a valid issue_id"
|
||||
)
|
||||
|
||||
return data
|
||||
|
||||
def create(self, validated_data):
|
||||
assignees = validated_data.pop("assignees", None)
|
||||
labels = validated_data.pop("labels", None)
|
||||
|
||||
project_id = self.context["project_id"]
|
||||
workspace_id = self.context["workspace_id"]
|
||||
default_assignee_id = self.context["default_assignee_id"]
|
||||
|
||||
issue = Issue.objects.create(**validated_data, project_id=project_id)
|
||||
|
||||
# Issue Audit Users
|
||||
created_by_id = issue.created_by_id
|
||||
updated_by_id = issue.updated_by_id
|
||||
|
||||
if assignees is not None and len(assignees):
|
||||
IssueAssignee.objects.bulk_create(
|
||||
[
|
||||
IssueAssignee(
|
||||
assignee_id=assignee_id,
|
||||
issue=issue,
|
||||
project_id=project_id,
|
||||
workspace_id=workspace_id,
|
||||
created_by_id=created_by_id,
|
||||
updated_by_id=updated_by_id,
|
||||
)
|
||||
for assignee_id in assignees
|
||||
],
|
||||
batch_size=10,
|
||||
)
|
||||
else:
|
||||
# Then assign it to default assignee
|
||||
if default_assignee_id is not None:
|
||||
IssueAssignee.objects.create(
|
||||
assignee_id=default_assignee_id,
|
||||
issue=issue,
|
||||
project_id=project_id,
|
||||
workspace_id=workspace_id,
|
||||
created_by_id=created_by_id,
|
||||
updated_by_id=updated_by_id,
|
||||
)
|
||||
|
||||
if labels is not None and len(labels):
|
||||
IssueLabel.objects.bulk_create(
|
||||
[
|
||||
IssueLabel(
|
||||
label_id=label_id,
|
||||
issue=issue,
|
||||
project_id=project_id,
|
||||
workspace_id=workspace_id,
|
||||
created_by_id=created_by_id,
|
||||
updated_by_id=updated_by_id,
|
||||
)
|
||||
for label_id in labels
|
||||
],
|
||||
batch_size=10,
|
||||
)
|
||||
|
||||
return issue
|
||||
|
||||
def update(self, instance, validated_data):
|
||||
assignees = validated_data.pop("assignees", None)
|
||||
labels = validated_data.pop("labels", None)
|
||||
|
||||
# Related models
|
||||
project_id = instance.project_id
|
||||
workspace_id = instance.workspace_id
|
||||
created_by_id = instance.created_by_id
|
||||
updated_by_id = instance.updated_by_id
|
||||
|
||||
if assignees is not None:
|
||||
IssueAssignee.objects.filter(issue=instance).delete()
|
||||
IssueAssignee.objects.bulk_create(
|
||||
[
|
||||
IssueAssignee(
|
||||
assignee_id=assignee_id,
|
||||
issue=instance,
|
||||
project_id=project_id,
|
||||
workspace_id=workspace_id,
|
||||
created_by_id=created_by_id,
|
||||
updated_by_id=updated_by_id,
|
||||
)
|
||||
for assignee_id in assignees
|
||||
],
|
||||
batch_size=10,
|
||||
)
|
||||
|
||||
if labels is not None:
|
||||
IssueLabel.objects.filter(issue=instance).delete()
|
||||
IssueLabel.objects.bulk_create(
|
||||
[
|
||||
IssueLabel(
|
||||
label_id=label_id,
|
||||
issue=instance,
|
||||
project_id=project_id,
|
||||
workspace_id=workspace_id,
|
||||
created_by_id=created_by_id,
|
||||
updated_by_id=updated_by_id,
|
||||
)
|
||||
for label_id in labels
|
||||
],
|
||||
batch_size=10,
|
||||
)
|
||||
|
||||
# Time updation occues even when other related models are updated
|
||||
instance.updated_at = timezone.now()
|
||||
return super().update(instance, validated_data)
|
||||
|
||||
def to_representation(self, instance):
|
||||
data = super().to_representation(instance)
|
||||
if "assignees" in self.fields:
|
||||
if "assignees" in self.expand:
|
||||
from .user import UserLiteSerializer
|
||||
|
||||
data["assignees"] = UserLiteSerializer(
|
||||
instance.assignees.all(), many=True
|
||||
).data
|
||||
else:
|
||||
data["assignees"] = [
|
||||
str(assignee.id) for assignee in instance.assignees.all()
|
||||
]
|
||||
if "labels" in self.fields:
|
||||
if "labels" in self.expand:
|
||||
data["labels"] = LabelSerializer(instance.labels.all(), many=True).data
|
||||
else:
|
||||
data["labels"] = [str(label.id) for label in instance.labels.all()]
|
||||
|
||||
return data
|
||||
|
||||
|
||||
class LabelSerializer(BaseSerializer):
|
||||
class Meta:
|
||||
model = Label
|
||||
fields = "__all__"
|
||||
read_only_fields = [
|
||||
"workspace",
|
||||
"project",
|
||||
]
|
||||
|
||||
|
||||
class IssueLinkSerializer(BaseSerializer):
|
||||
class Meta:
|
||||
model = IssueLink
|
||||
fields = "__all__"
|
||||
read_only_fields = [
|
||||
"workspace",
|
||||
"project",
|
||||
"created_by",
|
||||
"updated_by",
|
||||
"created_at",
|
||||
"updated_at",
|
||||
"issue",
|
||||
]
|
||||
|
||||
# Validation if url already exists
|
||||
def create(self, validated_data):
|
||||
if IssueLink.objects.filter(
|
||||
url=validated_data.get("url"), issue_id=validated_data.get("issue_id")
|
||||
).exists():
|
||||
raise serializers.ValidationError(
|
||||
{"error": "URL already exists for this Issue"}
|
||||
)
|
||||
return IssueLink.objects.create(**validated_data)
|
||||
|
||||
|
||||
class IssueAttachmentSerializer(BaseSerializer):
|
||||
class Meta:
|
||||
model = IssueAttachment
|
||||
fields = "__all__"
|
||||
read_only_fields = [
|
||||
"created_by",
|
||||
"updated_by",
|
||||
"created_at",
|
||||
"updated_at",
|
||||
"workspace",
|
||||
"project",
|
||||
"issue",
|
||||
]
|
||||
|
||||
|
||||
class IssueCommentSerializer(BaseSerializer):
|
||||
is_member = serializers.BooleanField(read_only=True)
|
||||
|
||||
class Meta:
|
||||
model = IssueComment
|
||||
fields = "__all__"
|
||||
read_only_fields = [
|
||||
"workspace",
|
||||
"project",
|
||||
"issue",
|
||||
"created_by",
|
||||
"updated_by",
|
||||
"created_at",
|
||||
"updated_at",
|
||||
]
|
||||
|
||||
|
||||
class IssueAttachmentSerializer(BaseSerializer):
|
||||
class Meta:
|
||||
model = IssueAttachment
|
||||
fields = "__all__"
|
||||
read_only_fields = [
|
||||
"id",
|
||||
"created_by",
|
||||
"updated_by",
|
||||
"created_at",
|
||||
"updated_at",
|
||||
"workspace",
|
||||
"project",
|
||||
"issue",
|
||||
]
|
||||
|
||||
|
||||
class IssueActivitySerializer(BaseSerializer):
|
||||
class Meta:
|
||||
model = IssueActivity
|
||||
fields = "__all__"
|
||||
exclude = [
|
||||
"created_by",
|
||||
"udpated_by",
|
||||
]
|
155
apiserver/plane/api/serializers/module.py
Normal file
155
apiserver/plane/api/serializers/module.py
Normal file
@ -0,0 +1,155 @@
|
||||
# Third party imports
|
||||
from rest_framework import serializers
|
||||
|
||||
# Module imports
|
||||
from .base import BaseSerializer
|
||||
from plane.db.models import (
|
||||
User,
|
||||
Module,
|
||||
ModuleLink,
|
||||
ModuleMember,
|
||||
ModuleIssue,
|
||||
ProjectMember,
|
||||
)
|
||||
|
||||
|
||||
class ModuleSerializer(BaseSerializer):
|
||||
members = serializers.ListField(
|
||||
child=serializers.PrimaryKeyRelatedField(
|
||||
queryset=User.objects.values_list("id", flat=True)
|
||||
),
|
||||
write_only=True,
|
||||
required=False,
|
||||
)
|
||||
is_favorite = serializers.BooleanField(read_only=True)
|
||||
total_issues = serializers.IntegerField(read_only=True)
|
||||
cancelled_issues = serializers.IntegerField(read_only=True)
|
||||
completed_issues = serializers.IntegerField(read_only=True)
|
||||
started_issues = serializers.IntegerField(read_only=True)
|
||||
unstarted_issues = serializers.IntegerField(read_only=True)
|
||||
backlog_issues = serializers.IntegerField(read_only=True)
|
||||
|
||||
class Meta:
|
||||
model = Module
|
||||
fields = "__all__"
|
||||
read_only_fields = [
|
||||
"workspace",
|
||||
"project",
|
||||
"created_by",
|
||||
"updated_by",
|
||||
"created_at",
|
||||
"updated_at",
|
||||
]
|
||||
|
||||
def to_representation(self, instance):
|
||||
data = super().to_representation(instance)
|
||||
data["members"] = [str(member.id) for member in instance.members.all()]
|
||||
return data
|
||||
|
||||
def validate(self, data):
|
||||
if (
|
||||
data.get("start_date", None) is not None
|
||||
and data.get("target_date", None) is not None
|
||||
and data.get("start_date", None) > data.get("target_date", None)
|
||||
):
|
||||
raise serializers.ValidationError("Start date cannot exceed target date")
|
||||
|
||||
if data.get("members", []):
|
||||
print(data.get("members"))
|
||||
data["members"] = ProjectMember.objects.filter(
|
||||
project_id=self.context.get("project_id"),
|
||||
member_id__in=data["members"],
|
||||
).values_list("member_id", flat=True)
|
||||
|
||||
return data
|
||||
|
||||
def create(self, validated_data):
|
||||
members = validated_data.pop("members", None)
|
||||
|
||||
project = self.context["project"]
|
||||
|
||||
module = Module.objects.create(**validated_data, project=project)
|
||||
|
||||
if members is not None:
|
||||
ModuleMember.objects.bulk_create(
|
||||
[
|
||||
ModuleMember(
|
||||
module=module,
|
||||
member=member,
|
||||
project=project,
|
||||
workspace=project.workspace,
|
||||
created_by=module.created_by,
|
||||
updated_by=module.updated_by,
|
||||
)
|
||||
for member in members
|
||||
],
|
||||
batch_size=10,
|
||||
ignore_conflicts=True,
|
||||
)
|
||||
|
||||
return module
|
||||
|
||||
def update(self, instance, validated_data):
|
||||
members = validated_data.pop("members", None)
|
||||
|
||||
if members is not None:
|
||||
ModuleMember.objects.filter(module=instance).delete()
|
||||
ModuleMember.objects.bulk_create(
|
||||
[
|
||||
ModuleMember(
|
||||
module=instance,
|
||||
member=member,
|
||||
project=instance.project,
|
||||
workspace=instance.project.workspace,
|
||||
created_by=instance.created_by,
|
||||
updated_by=instance.updated_by,
|
||||
)
|
||||
for member in members
|
||||
],
|
||||
batch_size=10,
|
||||
ignore_conflicts=True,
|
||||
)
|
||||
|
||||
return super().update(instance, validated_data)
|
||||
|
||||
|
||||
class ModuleIssueSerializer(BaseSerializer):
|
||||
sub_issues_count = serializers.IntegerField(read_only=True)
|
||||
|
||||
class Meta:
|
||||
model = ModuleIssue
|
||||
fields = "__all__"
|
||||
read_only_fields = [
|
||||
"workspace",
|
||||
"project",
|
||||
"created_by",
|
||||
"updated_by",
|
||||
"created_at",
|
||||
"updated_at",
|
||||
"module",
|
||||
]
|
||||
|
||||
|
||||
class ModuleLinkSerializer(BaseSerializer):
|
||||
class Meta:
|
||||
model = ModuleLink
|
||||
fields = "__all__"
|
||||
read_only_fields = [
|
||||
"workspace",
|
||||
"project",
|
||||
"created_by",
|
||||
"updated_by",
|
||||
"created_at",
|
||||
"updated_at",
|
||||
"module",
|
||||
]
|
||||
|
||||
# Validation if url already exists
|
||||
def create(self, validated_data):
|
||||
if ModuleLink.objects.filter(
|
||||
url=validated_data.get("url"), module_id=validated_data.get("module_id")
|
||||
).exists():
|
||||
raise serializers.ValidationError(
|
||||
{"error": "URL already exists for this Issue"}
|
||||
)
|
||||
return ModuleLink.objects.create(**validated_data)
|
87
apiserver/plane/api/serializers/project.py
Normal file
87
apiserver/plane/api/serializers/project.py
Normal file
@ -0,0 +1,87 @@
|
||||
# Third party imports
|
||||
from rest_framework import serializers
|
||||
|
||||
# Module imports
|
||||
from plane.db.models import Project, ProjectIdentifier, WorkspaceMember, State, Estimate
|
||||
from .base import BaseSerializer
|
||||
|
||||
|
||||
class ProjectSerializer(BaseSerializer):
|
||||
|
||||
total_members = serializers.IntegerField(read_only=True)
|
||||
total_cycles = serializers.IntegerField(read_only=True)
|
||||
total_modules = serializers.IntegerField(read_only=True)
|
||||
is_member = serializers.BooleanField(read_only=True)
|
||||
sort_order = serializers.FloatField(read_only=True)
|
||||
member_role = serializers.IntegerField(read_only=True)
|
||||
is_deployed = serializers.BooleanField(read_only=True)
|
||||
|
||||
class Meta:
|
||||
model = Project
|
||||
fields = "__all__"
|
||||
read_only_fields = [
|
||||
"workspace",
|
||||
"id",
|
||||
]
|
||||
|
||||
def validate(self, data):
|
||||
# Check project lead should be a member of the workspace
|
||||
if (
|
||||
data.get("project_lead", None) is not None
|
||||
and not WorkspaceMember.objects.filter(
|
||||
workspace_id=self.context["workspace_id"],
|
||||
member_id=data.get("project_lead"),
|
||||
).exists()
|
||||
):
|
||||
raise serializers.ValidationError(
|
||||
"Project lead should be a user in the workspace"
|
||||
)
|
||||
|
||||
# Check default assignee should be a member of the workspace
|
||||
if (
|
||||
data.get("default_assignee", None) is not None
|
||||
and not WorkspaceMember.objects.filter(
|
||||
workspace_id=self.context["workspace_id"],
|
||||
member_id=data.get("default_assignee"),
|
||||
).exists()
|
||||
):
|
||||
raise serializers.ValidationError(
|
||||
"Default assignee should be a user in the workspace"
|
||||
)
|
||||
|
||||
return data
|
||||
|
||||
def create(self, validated_data):
|
||||
identifier = validated_data.get("identifier", "").strip().upper()
|
||||
if identifier == "":
|
||||
raise serializers.ValidationError(detail="Project Identifier is required")
|
||||
|
||||
if ProjectIdentifier.objects.filter(
|
||||
name=identifier, workspace_id=self.context["workspace_id"]
|
||||
).exists():
|
||||
raise serializers.ValidationError(detail="Project Identifier is taken")
|
||||
|
||||
project = Project.objects.create(
|
||||
**validated_data, workspace_id=self.context["workspace_id"]
|
||||
)
|
||||
_ = ProjectIdentifier.objects.create(
|
||||
name=project.identifier,
|
||||
project=project,
|
||||
workspace_id=self.context["workspace_id"],
|
||||
)
|
||||
return project
|
||||
|
||||
|
||||
class ProjectLiteSerializer(BaseSerializer):
|
||||
class Meta:
|
||||
model = Project
|
||||
fields = [
|
||||
"id",
|
||||
"identifier",
|
||||
"name",
|
||||
"cover_image",
|
||||
"icon_prop",
|
||||
"emoji",
|
||||
"description",
|
||||
]
|
||||
read_only_fields = fields
|
33
apiserver/plane/api/serializers/state.py
Normal file
33
apiserver/plane/api/serializers/state.py
Normal file
@ -0,0 +1,33 @@
|
||||
# Module imports
|
||||
from .base import BaseSerializer
|
||||
from plane.db.models import State
|
||||
|
||||
|
||||
class StateSerializer(BaseSerializer):
|
||||
def validate(self, data):
|
||||
# If the default is being provided then make all other states default False
|
||||
if data.get("default", False):
|
||||
State.objects.filter(project_id=self.context.get("project_id")).update(
|
||||
default=False
|
||||
)
|
||||
return data
|
||||
|
||||
class Meta:
|
||||
model = State
|
||||
fields = "__all__"
|
||||
read_only_fields = [
|
||||
"workspace",
|
||||
"project",
|
||||
]
|
||||
|
||||
|
||||
class StateLiteSerializer(BaseSerializer):
|
||||
class Meta:
|
||||
model = State
|
||||
fields = [
|
||||
"id",
|
||||
"name",
|
||||
"color",
|
||||
"group",
|
||||
]
|
||||
read_only_fields = fields
|
20
apiserver/plane/api/serializers/user.py
Normal file
20
apiserver/plane/api/serializers/user.py
Normal file
@ -0,0 +1,20 @@
|
||||
# Module imports
|
||||
from plane.db.models import User
|
||||
from .base import BaseSerializer
|
||||
|
||||
|
||||
class UserLiteSerializer(BaseSerializer):
|
||||
class Meta:
|
||||
model = User
|
||||
fields = [
|
||||
"id",
|
||||
"first_name",
|
||||
"last_name",
|
||||
"avatar",
|
||||
"is_bot",
|
||||
"display_name",
|
||||
]
|
||||
read_only_fields = [
|
||||
"id",
|
||||
"is_bot",
|
||||
]
|
15
apiserver/plane/api/serializers/workspace.py
Normal file
15
apiserver/plane/api/serializers/workspace.py
Normal file
@ -0,0 +1,15 @@
|
||||
# Module imports
|
||||
from plane.db.models import Workspace
|
||||
from .base import BaseSerializer
|
||||
|
||||
|
||||
class WorkspaceLiteSerializer(BaseSerializer):
|
||||
"""Lite serializer with only required fields"""
|
||||
class Meta:
|
||||
model = Workspace
|
||||
fields = [
|
||||
"name",
|
||||
"slug",
|
||||
"id",
|
||||
]
|
||||
read_only_fields = fields
|
@ -1,13 +1,15 @@
|
||||
from .cycle import urlpatterns as cycle_patterns
|
||||
from .inbox import urlpatterns as inbox_patterns
|
||||
from .issue import urlpatterns as issue_patterns
|
||||
from .module import urlpatterns as module_patterns
|
||||
from .project import urlpatterns as project_patterns
|
||||
from .state import urlpatterns as state_patterns
|
||||
from .issue import urlpatterns as issue_patterns
|
||||
from .cycle import urlpatterns as cycle_patterns
|
||||
from .module import urlpatterns as module_patterns
|
||||
from .inbox import urlpatterns as inbox_patterns
|
||||
|
||||
urlpatterns = [
|
||||
*cycle_patterns,
|
||||
*inbox_patterns,
|
||||
*issue_patterns,
|
||||
*module_patterns,
|
||||
*project_patterns,
|
||||
]
|
||||
*state_patterns,
|
||||
*issue_patterns,
|
||||
*cycle_patterns,
|
||||
*module_patterns,
|
||||
*inbox_patterns,
|
||||
]
|
@ -1,6 +1,6 @@
|
||||
from django.urls import path
|
||||
|
||||
from plane.proxy.views.cycle import (
|
||||
from plane.api.views.cycle import (
|
||||
CycleAPIEndpoint,
|
||||
CycleIssueAPIEndpoint,
|
||||
TransferCycleIssueAPIEndpoint,
|
||||
@ -32,4 +32,4 @@ urlpatterns = [
|
||||
TransferCycleIssueAPIEndpoint.as_view(),
|
||||
name="transfer-issues",
|
||||
),
|
||||
]
|
||||
]
|
@ -1,6 +1,6 @@
|
||||
from django.urls import path
|
||||
|
||||
from plane.proxy.views import InboxIssueAPIEndpoint
|
||||
from plane.api.views import InboxIssueAPIEndpoint
|
||||
|
||||
|
||||
urlpatterns = [
|
||||
@ -14,4 +14,4 @@ urlpatterns = [
|
||||
InboxIssueAPIEndpoint.as_view(),
|
||||
name="inbox-issue",
|
||||
),
|
||||
]
|
||||
]
|
@ -1,51 +1,62 @@
|
||||
from django.urls import path
|
||||
|
||||
from plane.proxy.views import (
|
||||
from plane.api.views import (
|
||||
IssueAPIEndpoint,
|
||||
LabelAPIEndpoint,
|
||||
IssueLinkAPIEndpoint,
|
||||
IssueCommentAPIEndpoint,
|
||||
IssueActivityAPIEndpoint,
|
||||
)
|
||||
|
||||
urlpatterns = [
|
||||
path(
|
||||
"workspaces/<str:slug>/projects/<uuid:project_id>/issues/",
|
||||
IssueAPIEndpoint.as_view(),
|
||||
name="issues",
|
||||
name="issue",
|
||||
),
|
||||
path(
|
||||
"workspaces/<str:slug>/projects/<uuid:project_id>/issues/<uuid:issue_id>/",
|
||||
IssueAPIEndpoint.as_view(),
|
||||
name="issues",
|
||||
name="issue",
|
||||
),
|
||||
path(
|
||||
"workspaces/<str:slug>/projects/<uuid:project_id>/issue-labels/",
|
||||
LabelAPIEndpoint.as_view(),
|
||||
name="labels",
|
||||
name="label",
|
||||
),
|
||||
path(
|
||||
"workspaces/<str:slug>/projects/<uuid:project_id>/issue-labels/<uuid:pk>/",
|
||||
LabelAPIEndpoint.as_view(),
|
||||
name="labels",
|
||||
name="label",
|
||||
),
|
||||
path(
|
||||
"workspaces/<str:slug>/projects/<uuid:project_id>/issues/<uuid:issue_id>/issue-links/",
|
||||
IssueLinkAPIEndpoint.as_view(),
|
||||
name="issue-links",
|
||||
name="link",
|
||||
),
|
||||
path(
|
||||
"workspaces/<str:slug>/projects/<uuid:project_id>/issues/<uuid:issue_id>/issue-links/<uuid:pk>/",
|
||||
IssueLinkAPIEndpoint.as_view(),
|
||||
name="issue-links",
|
||||
name="link",
|
||||
),
|
||||
path(
|
||||
"workspaces/<str:slug>/projects/<uuid:project_id>/issues/<uuid:issue_id>/comments/",
|
||||
IssueCommentAPIEndpoint.as_view(),
|
||||
name="project-issue-comment",
|
||||
IssueCommentAPIEndpoint.as_view(),
|
||||
name="comment",
|
||||
),
|
||||
path(
|
||||
"workspaces/<str:slug>/projects/<uuid:project_id>/issues/<uuid:issue_id>/comments/<uuid:pk>/",
|
||||
IssueCommentAPIEndpoint.as_view(),
|
||||
name="project-issue-comment",
|
||||
name="comment",
|
||||
),
|
||||
path(
|
||||
"workspaces/<str:slug>/projects/<uuid:project_id>/issues/<uuid:issue_id>/activites/",
|
||||
IssueActivityAPIEndpoint.as_view(),
|
||||
name="activity",
|
||||
),
|
||||
path(
|
||||
"workspaces/<str:slug>/projects/<uuid:project_id>/issues/<uuid:issue_id>/activites/<uuid:pk>/",
|
||||
IssueActivityAPIEndpoint.as_view(),
|
||||
name="activity",
|
||||
),
|
||||
]
|
@ -1,6 +1,6 @@
|
||||
from django.urls import path
|
||||
|
||||
from plane.proxy.views import ModuleAPIEndpoint, ModuleIssueAPIEndpoint
|
||||
from plane.api.views import ModuleAPIEndpoint, ModuleIssueAPIEndpoint
|
||||
|
||||
urlpatterns = [
|
||||
path(
|
||||
@ -23,4 +23,4 @@ urlpatterns = [
|
||||
ModuleIssueAPIEndpoint.as_view(),
|
||||
name="module-issues",
|
||||
),
|
||||
]
|
||||
]
|
@ -1,6 +1,6 @@
|
||||
from django.urls import path
|
||||
|
||||
from plane.proxy.views import ProjectAPIEndpoint
|
||||
from plane.api.views import ProjectAPIEndpoint
|
||||
|
||||
urlpatterns = [
|
||||
path(
|
||||
@ -13,4 +13,4 @@ urlpatterns = [
|
||||
ProjectAPIEndpoint.as_view(),
|
||||
name="project",
|
||||
),
|
||||
]
|
||||
]
|
11
apiserver/plane/api/urls/state.py
Normal file
11
apiserver/plane/api/urls/state.py
Normal file
@ -0,0 +1,11 @@
|
||||
from django.urls import path
|
||||
|
||||
from plane.api.views import StateAPIEndpoint
|
||||
|
||||
urlpatterns = [
|
||||
path(
|
||||
"workspaces/<str:slug>/projects/<uuid:project_id>/states/",
|
||||
StateAPIEndpoint.as_view(),
|
||||
name="states",
|
||||
),
|
||||
]
|
@ -1,10 +1,13 @@
|
||||
from .project import ProjectAPIEndpoint
|
||||
|
||||
from .state import StateAPIEndpoint
|
||||
|
||||
from .issue import (
|
||||
IssueAPIEndpoint,
|
||||
LabelAPIEndpoint,
|
||||
IssueLinkAPIEndpoint,
|
||||
IssueCommentAPIEndpoint,
|
||||
IssueActivityAPIEndpoint,
|
||||
)
|
||||
|
||||
from .cycle import (
|
172
apiserver/plane/api/views/base.py
Normal file
172
apiserver/plane/api/views/base.py
Normal file
@ -0,0 +1,172 @@
|
||||
# Python imports
|
||||
import zoneinfo
|
||||
import json
|
||||
|
||||
# Django imports
|
||||
from django.conf import settings
|
||||
from django.db import IntegrityError
|
||||
from django.core.exceptions import ObjectDoesNotExist, ValidationError
|
||||
from django.utils import timezone
|
||||
from django.core.serializers.json import DjangoJSONEncoder
|
||||
|
||||
# Third party imports
|
||||
from rest_framework.views import APIView
|
||||
from rest_framework.response import Response
|
||||
from rest_framework.permissions import IsAuthenticated
|
||||
from rest_framework import status
|
||||
from sentry_sdk import capture_exception
|
||||
|
||||
# Module imports
|
||||
from plane.api.middleware.api_authentication import APIKeyAuthentication
|
||||
from plane.api.rate_limit import ApiKeyRateThrottle
|
||||
from plane.utils.paginator import BasePaginator
|
||||
from plane.bgtasks.webhook_task import send_webhook
|
||||
|
||||
|
||||
class TimezoneMixin:
|
||||
"""
|
||||
This enables timezone conversion according
|
||||
to the user set timezone
|
||||
"""
|
||||
|
||||
def initial(self, request, *args, **kwargs):
|
||||
super().initial(request, *args, **kwargs)
|
||||
if request.user.is_authenticated:
|
||||
timezone.activate(zoneinfo.ZoneInfo(request.user.user_timezone))
|
||||
else:
|
||||
timezone.deactivate()
|
||||
|
||||
class WebhookMixin:
|
||||
webhook_event = None
|
||||
|
||||
def finalize_response(self, request, response, *args, **kwargs):
|
||||
response = super().finalize_response(request, response, *args, **kwargs)
|
||||
|
||||
if (
|
||||
self.webhook_event
|
||||
and self.request.method in ["POST", "PATCH", "DELETE"]
|
||||
and response.status_code in [200, 201, 204]
|
||||
):
|
||||
send_webhook.delay(
|
||||
event=self.webhook_event,
|
||||
event_data=json.dumps(response.data, cls=DjangoJSONEncoder),
|
||||
action=self.request.method,
|
||||
slug=self.workspace_slug,
|
||||
)
|
||||
|
||||
return response
|
||||
|
||||
|
||||
|
||||
class BaseAPIView(TimezoneMixin, APIView, BasePaginator):
|
||||
authentication_classes = [
|
||||
APIKeyAuthentication,
|
||||
]
|
||||
|
||||
permission_classes = [
|
||||
IsAuthenticated,
|
||||
]
|
||||
|
||||
throttle_classes = [
|
||||
ApiKeyRateThrottle,
|
||||
]
|
||||
|
||||
def filter_queryset(self, queryset):
|
||||
for backend in list(self.filter_backends):
|
||||
queryset = backend().filter_queryset(self.request, queryset, self)
|
||||
return queryset
|
||||
|
||||
def handle_exception(self, exc):
|
||||
"""
|
||||
Handle any exception that occurs, by returning an appropriate response,
|
||||
or re-raising the error.
|
||||
"""
|
||||
try:
|
||||
response = super().handle_exception(exc)
|
||||
return response
|
||||
except Exception as e:
|
||||
if isinstance(e, IntegrityError):
|
||||
return Response(
|
||||
{"error": "The payload is not valid"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
if isinstance(e, ValidationError):
|
||||
return Response(
|
||||
{
|
||||
"error": "The provided payload is not valid please try with a valid payload"
|
||||
},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
if isinstance(e, ObjectDoesNotExist):
|
||||
model_name = str(exc).split(" matching query does not exist.")[0]
|
||||
return Response(
|
||||
{"error": f"{model_name} does not exist."},
|
||||
status=status.HTTP_404_NOT_FOUND,
|
||||
)
|
||||
|
||||
if isinstance(e, KeyError):
|
||||
return Response(
|
||||
{"error": f"key {e} does not exist"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
if settings.DEBUG:
|
||||
print(e)
|
||||
capture_exception(e)
|
||||
return Response(
|
||||
{"error": "Something went wrong please try again later"},
|
||||
status=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||
)
|
||||
|
||||
def dispatch(self, request, *args, **kwargs):
|
||||
try:
|
||||
response = super().dispatch(request, *args, **kwargs)
|
||||
if settings.DEBUG:
|
||||
from django.db import connection
|
||||
|
||||
print(
|
||||
f"{request.method} - {request.get_full_path()} of Queries: {len(connection.queries)}"
|
||||
)
|
||||
return response
|
||||
except Exception as exc:
|
||||
response = self.handle_exception(exc)
|
||||
return exc
|
||||
|
||||
def finalize_response(self, request, response, *args, **kwargs):
|
||||
# Call super to get the default response
|
||||
response = super().finalize_response(request, response, *args, **kwargs)
|
||||
|
||||
# Add custom headers if they exist in the request META
|
||||
ratelimit_remaining = request.META.get('X-RateLimit-Remaining')
|
||||
if ratelimit_remaining is not None:
|
||||
response['X-RateLimit-Remaining'] = ratelimit_remaining
|
||||
|
||||
ratelimit_reset = request.META.get('X-RateLimit-Reset')
|
||||
if ratelimit_reset is not None:
|
||||
response['X-RateLimit-Reset'] = ratelimit_reset
|
||||
|
||||
return response
|
||||
|
||||
@property
|
||||
def workspace_slug(self):
|
||||
return self.kwargs.get("slug", None)
|
||||
|
||||
@property
|
||||
def project_id(self):
|
||||
return self.kwargs.get("project_id", None)
|
||||
|
||||
@property
|
||||
def fields(self):
|
||||
fields = [
|
||||
field for field in self.request.GET.get("fields", "").split(",") if field
|
||||
]
|
||||
return fields if fields else None
|
||||
|
||||
@property
|
||||
def expand(self):
|
||||
expand = [
|
||||
expand for expand in self.request.GET.get("expand", "").split(",") if expand
|
||||
]
|
||||
return expand if expand else None
|
554
apiserver/plane/api/views/cycle.py
Normal file
554
apiserver/plane/api/views/cycle.py
Normal file
@ -0,0 +1,554 @@
|
||||
# Python imports
|
||||
import json
|
||||
|
||||
# Django imports
|
||||
from django.db.models import Q, Count, Sum, Prefetch, F, OuterRef, Func
|
||||
from django.utils import timezone
|
||||
from django.core import serializers
|
||||
|
||||
# Third party imports
|
||||
from rest_framework.response import Response
|
||||
from rest_framework import status
|
||||
|
||||
# Module imports
|
||||
from .base import BaseAPIView, WebhookMixin
|
||||
from plane.db.models import Cycle, Issue, CycleIssue, IssueLink, IssueAttachment
|
||||
from plane.app.permissions import ProjectEntityPermission
|
||||
from plane.api.serializers import (
|
||||
CycleSerializer,
|
||||
CycleIssueSerializer,
|
||||
IssueSerializer,
|
||||
)
|
||||
from plane.bgtasks.issue_activites_task import issue_activity
|
||||
|
||||
|
||||
class CycleAPIEndpoint(WebhookMixin, BaseAPIView):
|
||||
"""
|
||||
This viewset automatically provides `list`, `create`, `retrieve`,
|
||||
`update` and `destroy` actions related to cycle.
|
||||
|
||||
"""
|
||||
|
||||
serializer_class = CycleSerializer
|
||||
model = Cycle
|
||||
webhook_event = "cycle"
|
||||
permission_classes = [
|
||||
ProjectEntityPermission,
|
||||
]
|
||||
|
||||
def get_queryset(self):
|
||||
return (
|
||||
Cycle.objects.filter(workspace__slug=self.kwargs.get("slug"))
|
||||
.filter(project_id=self.kwargs.get("project_id"))
|
||||
.filter(project__project_projectmember__member=self.request.user)
|
||||
.select_related("project")
|
||||
.select_related("workspace")
|
||||
.select_related("owned_by")
|
||||
.annotate(
|
||||
total_issues=Count(
|
||||
"issue_cycle",
|
||||
filter=Q(
|
||||
issue_cycle__issue__archived_at__isnull=True,
|
||||
issue_cycle__issue__is_draft=False,
|
||||
),
|
||||
)
|
||||
)
|
||||
.annotate(
|
||||
completed_issues=Count(
|
||||
"issue_cycle__issue__state__group",
|
||||
filter=Q(
|
||||
issue_cycle__issue__state__group="completed",
|
||||
issue_cycle__issue__archived_at__isnull=True,
|
||||
issue_cycle__issue__is_draft=False,
|
||||
),
|
||||
)
|
||||
)
|
||||
.annotate(
|
||||
cancelled_issues=Count(
|
||||
"issue_cycle__issue__state__group",
|
||||
filter=Q(
|
||||
issue_cycle__issue__state__group="cancelled",
|
||||
issue_cycle__issue__archived_at__isnull=True,
|
||||
issue_cycle__issue__is_draft=False,
|
||||
),
|
||||
)
|
||||
)
|
||||
.annotate(
|
||||
started_issues=Count(
|
||||
"issue_cycle__issue__state__group",
|
||||
filter=Q(
|
||||
issue_cycle__issue__state__group="started",
|
||||
issue_cycle__issue__archived_at__isnull=True,
|
||||
issue_cycle__issue__is_draft=False,
|
||||
),
|
||||
)
|
||||
)
|
||||
.annotate(
|
||||
unstarted_issues=Count(
|
||||
"issue_cycle__issue__state__group",
|
||||
filter=Q(
|
||||
issue_cycle__issue__state__group="unstarted",
|
||||
issue_cycle__issue__archived_at__isnull=True,
|
||||
issue_cycle__issue__is_draft=False,
|
||||
),
|
||||
)
|
||||
)
|
||||
.annotate(
|
||||
backlog_issues=Count(
|
||||
"issue_cycle__issue__state__group",
|
||||
filter=Q(
|
||||
issue_cycle__issue__state__group="backlog",
|
||||
issue_cycle__issue__archived_at__isnull=True,
|
||||
issue_cycle__issue__is_draft=False,
|
||||
),
|
||||
)
|
||||
)
|
||||
.annotate(total_estimates=Sum("issue_cycle__issue__estimate_point"))
|
||||
.annotate(
|
||||
completed_estimates=Sum(
|
||||
"issue_cycle__issue__estimate_point",
|
||||
filter=Q(
|
||||
issue_cycle__issue__state__group="completed",
|
||||
issue_cycle__issue__archived_at__isnull=True,
|
||||
issue_cycle__issue__is_draft=False,
|
||||
),
|
||||
)
|
||||
)
|
||||
.annotate(
|
||||
started_estimates=Sum(
|
||||
"issue_cycle__issue__estimate_point",
|
||||
filter=Q(
|
||||
issue_cycle__issue__state__group="started",
|
||||
issue_cycle__issue__archived_at__isnull=True,
|
||||
issue_cycle__issue__is_draft=False,
|
||||
),
|
||||
)
|
||||
)
|
||||
.order_by(self.kwargs.get("order_by", "-created_at"))
|
||||
.distinct()
|
||||
)
|
||||
|
||||
def get(self, request, slug, project_id, pk=None):
|
||||
if pk:
|
||||
queryset = self.get_queryset().get(pk=pk)
|
||||
data = CycleSerializer(
|
||||
queryset,
|
||||
fields=self.fields,
|
||||
expand=self.expand,
|
||||
).data
|
||||
return Response(
|
||||
data,
|
||||
status=status.HTTP_200_OK,
|
||||
)
|
||||
queryset = self.get_queryset()
|
||||
cycle_view = request.GET.get("cycle_view", "all")
|
||||
queryset = queryset.order_by("-is_favorite", "-created_at")
|
||||
|
||||
# Current Cycle
|
||||
if cycle_view == "current":
|
||||
queryset = queryset.filter(
|
||||
start_date__lte=timezone.now(),
|
||||
end_date__gte=timezone.now(),
|
||||
)
|
||||
data = CycleSerializer(
|
||||
queryset, many=True, fields=self.fields, expand=self.expand
|
||||
).data
|
||||
return Response(data, status=status.HTTP_200_OK)
|
||||
|
||||
# Upcoming Cycles
|
||||
if cycle_view == "upcoming":
|
||||
queryset = queryset.filter(start_date__gt=timezone.now())
|
||||
return self.paginate(
|
||||
request=request,
|
||||
queryset=(queryset),
|
||||
on_results=lambda cycles: CycleSerializer(
|
||||
cycles,
|
||||
many=True,
|
||||
fields=self.fields,
|
||||
expand=self.expand,
|
||||
).data,
|
||||
)
|
||||
|
||||
# Completed Cycles
|
||||
if cycle_view == "completed":
|
||||
queryset = queryset.filter(end_date__lt=timezone.now())
|
||||
return self.paginate(
|
||||
request=request,
|
||||
queryset=(queryset),
|
||||
on_results=lambda cycles: CycleSerializer(
|
||||
cycles,
|
||||
many=True,
|
||||
fields=self.fields,
|
||||
expand=self.expand,
|
||||
).data,
|
||||
)
|
||||
|
||||
# Draft Cycles
|
||||
if cycle_view == "draft":
|
||||
queryset = queryset.filter(
|
||||
end_date=None,
|
||||
start_date=None,
|
||||
)
|
||||
return self.paginate(
|
||||
request=request,
|
||||
queryset=(queryset),
|
||||
on_results=lambda cycles: CycleSerializer(
|
||||
cycles,
|
||||
many=True,
|
||||
fields=self.fields,
|
||||
expand=self.expand,
|
||||
).data,
|
||||
)
|
||||
|
||||
# Incomplete Cycles
|
||||
if cycle_view == "incomplete":
|
||||
queryset = queryset.filter(
|
||||
Q(end_date__gte=timezone.now().date()) | Q(end_date__isnull=True),
|
||||
)
|
||||
return self.paginate(
|
||||
request=request,
|
||||
queryset=(queryset),
|
||||
on_results=lambda cycles: CycleSerializer(
|
||||
cycles,
|
||||
many=True,
|
||||
fields=self.fields,
|
||||
expand=self.expand,
|
||||
).data,
|
||||
)
|
||||
return self.paginate(
|
||||
request=request,
|
||||
queryset=(queryset),
|
||||
on_results=lambda cycles: CycleSerializer(
|
||||
cycles,
|
||||
many=True,
|
||||
fields=self.fields,
|
||||
expand=self.expand,
|
||||
).data,
|
||||
)
|
||||
|
||||
def post(self, request, slug, project_id):
|
||||
if (
|
||||
request.data.get("start_date", None) is None
|
||||
and request.data.get("end_date", None) is None
|
||||
) or (
|
||||
request.data.get("start_date", None) is not None
|
||||
and request.data.get("end_date", None) is not None
|
||||
):
|
||||
serializer = CycleSerializer(data=request.data)
|
||||
if serializer.is_valid():
|
||||
serializer.save(
|
||||
project_id=project_id,
|
||||
owned_by=request.user,
|
||||
)
|
||||
return Response(serializer.data, status=status.HTTP_201_CREATED)
|
||||
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
|
||||
else:
|
||||
return Response(
|
||||
{
|
||||
"error": "Both start date and end date are either required or are to be null"
|
||||
},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
def patch(self, request, slug, project_id, pk):
|
||||
cycle = Cycle.objects.get(workspace__slug=slug, project_id=project_id, pk=pk)
|
||||
|
||||
request_data = request.data
|
||||
|
||||
if cycle.end_date is not None and cycle.end_date < timezone.now().date():
|
||||
if "sort_order" in request_data:
|
||||
# Can only change sort order
|
||||
request_data = {
|
||||
"sort_order": request_data.get("sort_order", cycle.sort_order)
|
||||
}
|
||||
else:
|
||||
return Response(
|
||||
{
|
||||
"error": "The Cycle has already been completed so it cannot be edited"
|
||||
},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
serializer = CycleSerializer(cycle, data=request.data, partial=True)
|
||||
if serializer.is_valid():
|
||||
serializer.save()
|
||||
return Response(serializer.data, status=status.HTTP_200_OK)
|
||||
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
|
||||
|
||||
def delete(self, request, slug, project_id, pk):
|
||||
cycle_issues = list(
|
||||
CycleIssue.objects.filter(cycle_id=self.kwargs.get("pk")).values_list(
|
||||
"issue", flat=True
|
||||
)
|
||||
)
|
||||
cycle = Cycle.objects.get(workspace__slug=slug, project_id=project_id, pk=pk)
|
||||
|
||||
issue_activity.delay(
|
||||
type="cycle.activity.deleted",
|
||||
requested_data=json.dumps(
|
||||
{
|
||||
"cycle_id": str(pk),
|
||||
"cycle_name": str(cycle.name),
|
||||
"issues": [str(issue_id) for issue_id in cycle_issues],
|
||||
}
|
||||
),
|
||||
actor_id=str(request.user.id),
|
||||
issue_id=str(pk),
|
||||
project_id=str(project_id),
|
||||
current_instance=None,
|
||||
epoch=int(timezone.now().timestamp()),
|
||||
)
|
||||
# Delete the cycle
|
||||
cycle.delete()
|
||||
return Response(status=status.HTTP_204_NO_CONTENT)
|
||||
|
||||
|
||||
class CycleIssueAPIEndpoint(WebhookMixin, BaseAPIView):
|
||||
"""
|
||||
This viewset automatically provides `list`, `create`, `retrieve`,
|
||||
`update` and `destroy` actions related to cycle issues.
|
||||
|
||||
"""
|
||||
|
||||
serializer_class = CycleIssueSerializer
|
||||
model = CycleIssue
|
||||
webhook_event = "cycle"
|
||||
permission_classes = [
|
||||
ProjectEntityPermission,
|
||||
]
|
||||
|
||||
def get_queryset(self):
|
||||
return (
|
||||
CycleIssue.objects.annotate(
|
||||
sub_issues_count=Issue.issue_objects.filter(parent=OuterRef("issue_id"))
|
||||
.order_by()
|
||||
.annotate(count=Func(F("id"), function="Count"))
|
||||
.values("count")
|
||||
)
|
||||
.filter(workspace__slug=self.kwargs.get("slug"))
|
||||
.filter(project_id=self.kwargs.get("project_id"))
|
||||
.filter(project__project_projectmember__member=self.request.user)
|
||||
.filter(cycle_id=self.kwargs.get("cycle_id"))
|
||||
.select_related("project")
|
||||
.select_related("workspace")
|
||||
.select_related("cycle")
|
||||
.select_related("issue", "issue__state", "issue__project")
|
||||
.prefetch_related("issue__assignees", "issue__labels")
|
||||
.order_by(self.kwargs.get("order_by", "-created_at"))
|
||||
.distinct()
|
||||
)
|
||||
|
||||
def get(self, request, slug, project_id, cycle_id):
|
||||
order_by = request.GET.get("order_by", "created_at")
|
||||
issues = (
|
||||
Issue.issue_objects.filter(issue_cycle__cycle_id=cycle_id)
|
||||
.annotate(
|
||||
sub_issues_count=Issue.issue_objects.filter(parent=OuterRef("id"))
|
||||
.order_by()
|
||||
.annotate(count=Func(F("id"), function="Count"))
|
||||
.values("count")
|
||||
)
|
||||
.annotate(bridge_id=F("issue_cycle__id"))
|
||||
.filter(project_id=project_id)
|
||||
.filter(workspace__slug=slug)
|
||||
.select_related("project")
|
||||
.select_related("workspace")
|
||||
.select_related("state")
|
||||
.select_related("parent")
|
||||
.prefetch_related("assignees")
|
||||
.prefetch_related("labels")
|
||||
.order_by(order_by)
|
||||
.annotate(
|
||||
link_count=IssueLink.objects.filter(issue=OuterRef("id"))
|
||||
.order_by()
|
||||
.annotate(count=Func(F("id"), function="Count"))
|
||||
.values("count")
|
||||
)
|
||||
.annotate(
|
||||
attachment_count=IssueAttachment.objects.filter(issue=OuterRef("id"))
|
||||
.order_by()
|
||||
.annotate(count=Func(F("id"), function="Count"))
|
||||
.values("count")
|
||||
)
|
||||
)
|
||||
|
||||
return self.paginate(
|
||||
request=request,
|
||||
queryset=(issues),
|
||||
on_results=lambda issues: CycleSerializer(
|
||||
issues,
|
||||
many=True,
|
||||
fields=self.fields,
|
||||
expand=self.expand,
|
||||
).data,
|
||||
)
|
||||
|
||||
def post(self, request, slug, project_id, cycle_id):
|
||||
issues = request.data.get("issues", [])
|
||||
|
||||
if not issues:
|
||||
return Response(
|
||||
{"error": "Issues are required"}, status=status.HTTP_400_BAD_REQUEST
|
||||
)
|
||||
|
||||
cycle = Cycle.objects.get(
|
||||
workspace__slug=slug, project_id=project_id, pk=cycle_id
|
||||
)
|
||||
|
||||
if cycle.end_date is not None and cycle.end_date < timezone.now().date():
|
||||
return Response(
|
||||
{
|
||||
"error": "The Cycle has already been completed so no new issues can be added"
|
||||
},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
issues = Issue.objects.filter(
|
||||
pk__in=issues, workspace__slug=slug, project_id=project_id
|
||||
).values_list("id", flat=True)
|
||||
|
||||
# Get all CycleIssues already created
|
||||
cycle_issues = list(CycleIssue.objects.filter(issue_id__in=issues))
|
||||
update_cycle_issue_activity = []
|
||||
record_to_create = []
|
||||
records_to_update = []
|
||||
|
||||
for issue in issues:
|
||||
cycle_issue = [
|
||||
cycle_issue
|
||||
for cycle_issue in cycle_issues
|
||||
if str(cycle_issue.issue_id) in issues
|
||||
]
|
||||
# Update only when cycle changes
|
||||
if len(cycle_issue):
|
||||
if cycle_issue[0].cycle_id != cycle_id:
|
||||
update_cycle_issue_activity.append(
|
||||
{
|
||||
"old_cycle_id": str(cycle_issue[0].cycle_id),
|
||||
"new_cycle_id": str(cycle_id),
|
||||
"issue_id": str(cycle_issue[0].issue_id),
|
||||
}
|
||||
)
|
||||
cycle_issue[0].cycle_id = cycle_id
|
||||
records_to_update.append(cycle_issue[0])
|
||||
else:
|
||||
record_to_create.append(
|
||||
CycleIssue(
|
||||
project_id=project_id,
|
||||
workspace=cycle.workspace,
|
||||
created_by=request.user,
|
||||
updated_by=request.user,
|
||||
cycle=cycle,
|
||||
issue_id=issue,
|
||||
)
|
||||
)
|
||||
|
||||
CycleIssue.objects.bulk_create(
|
||||
record_to_create,
|
||||
batch_size=10,
|
||||
ignore_conflicts=True,
|
||||
)
|
||||
CycleIssue.objects.bulk_update(
|
||||
records_to_update,
|
||||
["cycle"],
|
||||
batch_size=10,
|
||||
)
|
||||
|
||||
# Capture Issue Activity
|
||||
issue_activity.delay(
|
||||
type="cycle.activity.created",
|
||||
requested_data=json.dumps({"cycles_list": issues}),
|
||||
actor_id=str(self.request.user.id),
|
||||
issue_id=None,
|
||||
project_id=str(self.kwargs.get("project_id", None)),
|
||||
current_instance=json.dumps(
|
||||
{
|
||||
"updated_cycle_issues": update_cycle_issue_activity,
|
||||
"created_cycle_issues": serializers.serialize(
|
||||
"json", record_to_create
|
||||
),
|
||||
}
|
||||
),
|
||||
epoch=int(timezone.now().timestamp()),
|
||||
)
|
||||
|
||||
# Return all Cycle Issues
|
||||
return Response(
|
||||
CycleIssueSerializer(self.get_queryset(), many=True).data,
|
||||
status=status.HTTP_200_OK,
|
||||
)
|
||||
|
||||
def delete(self, request, slug, project_id, cycle_id, pk):
|
||||
cycle_issue = CycleIssue.objects.get(
|
||||
pk=pk, workspace__slug=slug, project_id=project_id, cycle_id=cycle_id
|
||||
)
|
||||
issue_id = cycle_issue.issue_id
|
||||
cycle_issue.delete()
|
||||
issue_activity.delay(
|
||||
type="cycle.activity.deleted",
|
||||
requested_data=json.dumps(
|
||||
{
|
||||
"cycle_id": str(self.kwargs.get("cycle_id")),
|
||||
"issues": [str(issue_id)],
|
||||
}
|
||||
),
|
||||
actor_id=str(self.request.user.id),
|
||||
issue_id=str(self.kwargs.get("pk", None)),
|
||||
project_id=str(self.kwargs.get("project_id", None)),
|
||||
current_instance=None,
|
||||
epoch=int(timezone.now().timestamp()),
|
||||
)
|
||||
return Response(status=status.HTTP_204_NO_CONTENT)
|
||||
|
||||
|
||||
class TransferCycleIssueAPIEndpoint(BaseAPIView):
|
||||
"""
|
||||
This viewset provides `create` actions for transfering the issues into a particular cycle.
|
||||
|
||||
"""
|
||||
|
||||
permission_classes = [
|
||||
ProjectEntityPermission,
|
||||
]
|
||||
|
||||
def post(self, request, slug, project_id, cycle_id):
|
||||
new_cycle_id = request.data.get("new_cycle_id", False)
|
||||
|
||||
if not new_cycle_id:
|
||||
return Response(
|
||||
{"error": "New Cycle Id is required"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
new_cycle = Cycle.objects.get(
|
||||
workspace__slug=slug, project_id=project_id, pk=new_cycle_id
|
||||
)
|
||||
|
||||
if (
|
||||
new_cycle.end_date is not None
|
||||
and new_cycle.end_date < timezone.now().date()
|
||||
):
|
||||
return Response(
|
||||
{
|
||||
"error": "The cycle where the issues are transferred is already completed"
|
||||
},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
cycle_issues = CycleIssue.objects.filter(
|
||||
cycle_id=cycle_id,
|
||||
project_id=project_id,
|
||||
workspace__slug=slug,
|
||||
issue__state__group__in=["backlog", "unstarted", "started"],
|
||||
)
|
||||
|
||||
updated_cycles = []
|
||||
for cycle_issue in cycle_issues:
|
||||
cycle_issue.cycle_id = new_cycle_id
|
||||
updated_cycles.append(cycle_issue)
|
||||
|
||||
cycle_issues = CycleIssue.objects.bulk_update(
|
||||
updated_cycles, ["cycle_id"], batch_size=100
|
||||
)
|
||||
|
||||
return Response({"message": "Success"}, status=status.HTTP_200_OK)
|
275
apiserver/plane/api/views/inbox.py
Normal file
275
apiserver/plane/api/views/inbox.py
Normal file
@ -0,0 +1,275 @@
|
||||
# Python imports
|
||||
import json
|
||||
|
||||
# Django improts
|
||||
from django.utils import timezone
|
||||
from django.db.models import Q
|
||||
from django.core.serializers.json import DjangoJSONEncoder
|
||||
|
||||
# Third party imports
|
||||
from rest_framework import status
|
||||
from rest_framework.response import Response
|
||||
|
||||
# Module imports
|
||||
from .base import BaseAPIView
|
||||
from plane.app.permissions import ProjectLitePermission
|
||||
from plane.api.serializers import InboxIssueSerializer, IssueSerializer
|
||||
from plane.db.models import InboxIssue, Issue, State, ProjectMember
|
||||
from plane.bgtasks.issue_activites_task import issue_activity
|
||||
|
||||
|
||||
class InboxIssueAPIEndpoint(BaseAPIView):
|
||||
"""
|
||||
This viewset automatically provides `list`, `create`, `retrieve`,
|
||||
`update` and `destroy` actions related to inbox issues.
|
||||
|
||||
"""
|
||||
|
||||
permission_classes = [
|
||||
ProjectLitePermission,
|
||||
]
|
||||
|
||||
serializer_class = InboxIssueSerializer
|
||||
model = InboxIssue
|
||||
|
||||
filterset_fields = [
|
||||
"status",
|
||||
]
|
||||
|
||||
def get_queryset(self):
|
||||
return self.filter_queryset(
|
||||
super()
|
||||
.get_queryset()
|
||||
.filter(
|
||||
Q(snoozed_till__gte=timezone.now()) | Q(snoozed_till__isnull=True),
|
||||
workspace__slug=self.kwargs.get("slug"),
|
||||
project_id=self.kwargs.get("project_id"),
|
||||
inbox_id=self.kwargs.get("inbox_id"),
|
||||
)
|
||||
.select_related("issue", "workspace", "project")
|
||||
.order_by(self.kwargs.get("order_by", "-created_at"))
|
||||
)
|
||||
|
||||
def get(self, request, slug, project_id, inbox_id, pk=None):
|
||||
if pk:
|
||||
issue_queryset = self.get_queryset().get(pk=pk)
|
||||
issues_data = InboxIssueSerializer(
|
||||
issue_queryset,
|
||||
fields=self.fields,
|
||||
expand=self.expand,
|
||||
).data
|
||||
return Response(
|
||||
issues_data,
|
||||
status=status.HTTP_200_OK,
|
||||
)
|
||||
issue_queryset = self.get_queryset()
|
||||
return self.paginate(
|
||||
request=request,
|
||||
queryset=(issue_queryset),
|
||||
on_results=lambda inbox_issues: InboxIssueSerializer(
|
||||
inbox_issues,
|
||||
many=True,
|
||||
fields=self.fields,
|
||||
expand=self.expand,
|
||||
).data,
|
||||
)
|
||||
|
||||
def post(self, request, slug, project_id, inbox_id):
|
||||
if not request.data.get("issue", {}).get("name", False):
|
||||
return Response(
|
||||
{"error": "Name is required"}, status=status.HTTP_400_BAD_REQUEST
|
||||
)
|
||||
|
||||
# Check for valid priority
|
||||
if not request.data.get("issue", {}).get("priority", "none") in [
|
||||
"low",
|
||||
"medium",
|
||||
"high",
|
||||
"urgent",
|
||||
"none",
|
||||
]:
|
||||
return Response(
|
||||
{"error": "Invalid priority"}, status=status.HTTP_400_BAD_REQUEST
|
||||
)
|
||||
|
||||
# Create or get state
|
||||
state, _ = State.objects.get_or_create(
|
||||
name="Triage",
|
||||
group="backlog",
|
||||
description="Default state for managing all Inbox Issues",
|
||||
project_id=project_id,
|
||||
color="#ff7700",
|
||||
)
|
||||
|
||||
# create an issue
|
||||
issue = Issue.objects.create(
|
||||
name=request.data.get("issue", {}).get("name"),
|
||||
description=request.data.get("issue", {}).get("description", {}),
|
||||
description_html=request.data.get("issue", {}).get(
|
||||
"description_html", "<p></p>"
|
||||
),
|
||||
priority=request.data.get("issue", {}).get("priority", "low"),
|
||||
project_id=project_id,
|
||||
state=state,
|
||||
)
|
||||
|
||||
# Create an Issue Activity
|
||||
issue_activity.delay(
|
||||
type="issue.activity.created",
|
||||
requested_data=json.dumps(request.data, cls=DjangoJSONEncoder),
|
||||
actor_id=str(request.user.id),
|
||||
issue_id=str(issue.id),
|
||||
project_id=str(project_id),
|
||||
current_instance=None,
|
||||
epoch=int(timezone.now().timestamp()),
|
||||
)
|
||||
# create an inbox issue
|
||||
InboxIssue.objects.create(
|
||||
inbox_id=inbox_id,
|
||||
project_id=project_id,
|
||||
issue=issue,
|
||||
source=request.data.get("source", "in-app"),
|
||||
)
|
||||
|
||||
serializer = IssueSerializer(issue)
|
||||
return Response(serializer.data, status=status.HTTP_200_OK)
|
||||
|
||||
def patch(self, request, slug, project_id, inbox_id, pk):
|
||||
inbox_issue = InboxIssue.objects.get(
|
||||
pk=pk, workspace__slug=slug, project_id=project_id, inbox_id=inbox_id
|
||||
)
|
||||
# Get the project member
|
||||
project_member = ProjectMember.objects.get(
|
||||
workspace__slug=slug,
|
||||
project_id=project_id,
|
||||
member=request.user,
|
||||
is_active=True,
|
||||
)
|
||||
# Only project members admins and created_by users can access this endpoint
|
||||
if project_member.role <= 10 and str(inbox_issue.created_by_id) != str(
|
||||
request.user.id
|
||||
):
|
||||
return Response(
|
||||
{"error": "You cannot edit inbox issues"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
# Get issue data
|
||||
issue_data = request.data.pop("issue", False)
|
||||
|
||||
if bool(issue_data):
|
||||
issue = Issue.objects.get(
|
||||
pk=inbox_issue.issue_id, workspace__slug=slug, project_id=project_id
|
||||
)
|
||||
# Only allow guests and viewers to edit name and description
|
||||
if project_member.role <= 10:
|
||||
# viewers and guests since only viewers and guests
|
||||
issue_data = {
|
||||
"name": issue_data.get("name", issue.name),
|
||||
"description_html": issue_data.get(
|
||||
"description_html", issue.description_html
|
||||
),
|
||||
"description": issue_data.get("description", issue.description),
|
||||
}
|
||||
|
||||
issue_serializer = IssueSerializer(issue, data=issue_data, partial=True)
|
||||
|
||||
if issue_serializer.is_valid():
|
||||
current_instance = issue
|
||||
# Log all the updates
|
||||
requested_data = json.dumps(issue_data, cls=DjangoJSONEncoder)
|
||||
if issue is not None:
|
||||
issue_activity.delay(
|
||||
type="issue.activity.updated",
|
||||
requested_data=requested_data,
|
||||
actor_id=str(request.user.id),
|
||||
issue_id=str(issue.id),
|
||||
project_id=str(project_id),
|
||||
current_instance=json.dumps(
|
||||
IssueSerializer(current_instance).data,
|
||||
cls=DjangoJSONEncoder,
|
||||
),
|
||||
epoch=int(timezone.now().timestamp()),
|
||||
)
|
||||
issue_serializer.save()
|
||||
else:
|
||||
return Response(
|
||||
issue_serializer.errors, status=status.HTTP_400_BAD_REQUEST
|
||||
)
|
||||
|
||||
# Only project admins and members can edit inbox issue attributes
|
||||
if project_member.role > 10:
|
||||
serializer = InboxIssueSerializer(
|
||||
inbox_issue, data=request.data, partial=True
|
||||
)
|
||||
|
||||
if serializer.is_valid():
|
||||
serializer.save()
|
||||
# Update the issue state if the issue is rejected or marked as duplicate
|
||||
if serializer.data["status"] in [-1, 2]:
|
||||
issue = Issue.objects.get(
|
||||
pk=inbox_issue.issue_id,
|
||||
workspace__slug=slug,
|
||||
project_id=project_id,
|
||||
)
|
||||
state = State.objects.filter(
|
||||
group="cancelled", workspace__slug=slug, project_id=project_id
|
||||
).first()
|
||||
if state is not None:
|
||||
issue.state = state
|
||||
issue.save()
|
||||
|
||||
# Update the issue state if it is accepted
|
||||
if serializer.data["status"] in [1]:
|
||||
issue = Issue.objects.get(
|
||||
pk=inbox_issue.issue_id,
|
||||
workspace__slug=slug,
|
||||
project_id=project_id,
|
||||
)
|
||||
|
||||
# Update the issue state only if it is in triage state
|
||||
if issue.state.name == "Triage":
|
||||
# Move to default state
|
||||
state = State.objects.filter(
|
||||
workspace__slug=slug, project_id=project_id, default=True
|
||||
).first()
|
||||
if state is not None:
|
||||
issue.state = state
|
||||
issue.save()
|
||||
|
||||
return Response(serializer.data, status=status.HTTP_200_OK)
|
||||
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
|
||||
else:
|
||||
return Response(
|
||||
InboxIssueSerializer(inbox_issue).data, status=status.HTTP_200_OK
|
||||
)
|
||||
|
||||
def delete(self, request, slug, project_id, inbox_id, pk):
|
||||
inbox_issue = InboxIssue.objects.get(
|
||||
pk=pk, workspace__slug=slug, project_id=project_id, inbox_id=inbox_id
|
||||
)
|
||||
# Get the project member
|
||||
project_member = ProjectMember.objects.get(
|
||||
workspace__slug=slug,
|
||||
project_id=project_id,
|
||||
member=request.user,
|
||||
is_active=True,
|
||||
)
|
||||
|
||||
if project_member.role <= 10 and str(inbox_issue.created_by_id) != str(
|
||||
request.user.id
|
||||
):
|
||||
return Response(
|
||||
{"error": "You cannot delete inbox issue"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
# Check the issue status
|
||||
if inbox_issue.status in [-2, -1, 0, 2]:
|
||||
# Delete the issue also
|
||||
Issue.objects.filter(
|
||||
workspace__slug=slug, project_id=project_id, pk=inbox_issue.issue_id
|
||||
).delete()
|
||||
|
||||
inbox_issue.delete()
|
||||
return Response(status=status.HTTP_204_NO_CONTENT)
|
599
apiserver/plane/api/views/issue.py
Normal file
599
apiserver/plane/api/views/issue.py
Normal file
@ -0,0 +1,599 @@
|
||||
# Python imports
|
||||
import json
|
||||
from itertools import chain
|
||||
|
||||
# Django imports
|
||||
from django.db import IntegrityError
|
||||
from django.db.models import (
|
||||
OuterRef,
|
||||
Func,
|
||||
Q,
|
||||
F,
|
||||
Case,
|
||||
When,
|
||||
Value,
|
||||
CharField,
|
||||
Max,
|
||||
Exists,
|
||||
)
|
||||
from django.core.serializers.json import DjangoJSONEncoder
|
||||
from django.utils import timezone
|
||||
|
||||
# Third party imports
|
||||
from rest_framework import status
|
||||
from rest_framework.response import Response
|
||||
from rest_framework.parsers import MultiPartParser, FormParser
|
||||
|
||||
# Module imports
|
||||
from .base import BaseAPIView, WebhookMixin
|
||||
from plane.app.permissions import (
|
||||
ProjectEntityPermission,
|
||||
ProjectMemberPermission,
|
||||
ProjectLitePermission,
|
||||
)
|
||||
from plane.db.models import (
|
||||
Issue,
|
||||
IssueAttachment,
|
||||
IssueLink,
|
||||
Project,
|
||||
Label,
|
||||
ProjectMember,
|
||||
IssueComment,
|
||||
IssueActivity,
|
||||
)
|
||||
from plane.utils.issue_filters import issue_filters
|
||||
from plane.bgtasks.issue_activites_task import issue_activity
|
||||
from plane.api.serializers import (
|
||||
IssueSerializer,
|
||||
LabelSerializer,
|
||||
IssueLinkSerializer,
|
||||
IssueCommentSerializer,
|
||||
IssueAttachmentSerializer,
|
||||
IssueActivitySerializer,
|
||||
)
|
||||
|
||||
|
||||
class IssueAPIEndpoint(WebhookMixin, BaseAPIView):
|
||||
"""
|
||||
This viewset automatically provides `list`, `create`, `retrieve`,
|
||||
`update` and `destroy` actions related to issue.
|
||||
|
||||
"""
|
||||
|
||||
model = Issue
|
||||
webhook_event = "issue"
|
||||
permission_classes = [
|
||||
ProjectEntityPermission,
|
||||
]
|
||||
serializer_class = IssueSerializer
|
||||
|
||||
def get_queryset(self):
|
||||
return (
|
||||
Issue.issue_objects.annotate(
|
||||
sub_issues_count=Issue.issue_objects.filter(parent=OuterRef("id"))
|
||||
.order_by()
|
||||
.annotate(count=Func(F("id"), function="Count"))
|
||||
.values("count")
|
||||
)
|
||||
.filter(project_id=self.kwargs.get("project_id"))
|
||||
.filter(workspace__slug=self.kwargs.get("slug"))
|
||||
.select_related("project")
|
||||
.select_related("workspace")
|
||||
.select_related("state")
|
||||
.select_related("parent")
|
||||
.prefetch_related("assignees")
|
||||
.prefetch_related("labels")
|
||||
.order_by(self.kwargs.get("order_by", "-created_at"))
|
||||
).distinct()
|
||||
|
||||
def get(self, request, slug, project_id, pk=None):
|
||||
if pk:
|
||||
issue = Issue.issue_objects.annotate(
|
||||
sub_issues_count=Issue.issue_objects.filter(parent=OuterRef("id"))
|
||||
.order_by()
|
||||
.annotate(count=Func(F("id"), function="Count"))
|
||||
.values("count")
|
||||
).get(workspace__slug=slug, project_id=project_id, pk=pk)
|
||||
return Response(
|
||||
IssueSerializer(
|
||||
issue,
|
||||
fields=self.fields,
|
||||
expand=self.expand,
|
||||
).data,
|
||||
status=status.HTTP_200_OK,
|
||||
)
|
||||
|
||||
filters = issue_filters(request.query_params, "GET")
|
||||
# Custom ordering for priority and state
|
||||
priority_order = ["urgent", "high", "medium", "low", "none"]
|
||||
state_order = ["backlog", "unstarted", "started", "completed", "cancelled"]
|
||||
|
||||
order_by_param = request.GET.get("order_by", "-created_at")
|
||||
|
||||
issue_queryset = (
|
||||
self.get_queryset()
|
||||
.filter(**filters)
|
||||
.annotate(cycle_id=F("issue_cycle__cycle_id"))
|
||||
.annotate(module_id=F("issue_module__module_id"))
|
||||
.annotate(
|
||||
link_count=IssueLink.objects.filter(issue=OuterRef("id"))
|
||||
.order_by()
|
||||
.annotate(count=Func(F("id"), function="Count"))
|
||||
.values("count")
|
||||
)
|
||||
.annotate(
|
||||
attachment_count=IssueAttachment.objects.filter(issue=OuterRef("id"))
|
||||
.order_by()
|
||||
.annotate(count=Func(F("id"), function="Count"))
|
||||
.values("count")
|
||||
)
|
||||
)
|
||||
|
||||
# Priority Ordering
|
||||
if order_by_param == "priority" or order_by_param == "-priority":
|
||||
priority_order = (
|
||||
priority_order if order_by_param == "priority" else priority_order[::-1]
|
||||
)
|
||||
issue_queryset = issue_queryset.annotate(
|
||||
priority_order=Case(
|
||||
*[
|
||||
When(priority=p, then=Value(i))
|
||||
for i, p in enumerate(priority_order)
|
||||
],
|
||||
output_field=CharField(),
|
||||
)
|
||||
).order_by("priority_order")
|
||||
|
||||
# State Ordering
|
||||
elif order_by_param in [
|
||||
"state__name",
|
||||
"state__group",
|
||||
"-state__name",
|
||||
"-state__group",
|
||||
]:
|
||||
state_order = (
|
||||
state_order
|
||||
if order_by_param in ["state__name", "state__group"]
|
||||
else state_order[::-1]
|
||||
)
|
||||
issue_queryset = issue_queryset.annotate(
|
||||
state_order=Case(
|
||||
*[
|
||||
When(state__group=state_group, then=Value(i))
|
||||
for i, state_group in enumerate(state_order)
|
||||
],
|
||||
default=Value(len(state_order)),
|
||||
output_field=CharField(),
|
||||
)
|
||||
).order_by("state_order")
|
||||
# assignee and label ordering
|
||||
elif order_by_param in [
|
||||
"labels__name",
|
||||
"-labels__name",
|
||||
"assignees__first_name",
|
||||
"-assignees__first_name",
|
||||
]:
|
||||
issue_queryset = issue_queryset.annotate(
|
||||
max_values=Max(
|
||||
order_by_param[1::]
|
||||
if order_by_param.startswith("-")
|
||||
else order_by_param
|
||||
)
|
||||
).order_by(
|
||||
"-max_values" if order_by_param.startswith("-") else "max_values"
|
||||
)
|
||||
else:
|
||||
issue_queryset = issue_queryset.order_by(order_by_param)
|
||||
|
||||
return self.paginate(
|
||||
request=request,
|
||||
queryset=(issue_queryset),
|
||||
on_results=lambda issues: IssueSerializer(
|
||||
issues,
|
||||
many=True,
|
||||
fields=self.fields,
|
||||
expand=self.expand,
|
||||
).data,
|
||||
)
|
||||
|
||||
def post(self, request, slug, project_id):
|
||||
project = Project.objects.get(pk=project_id)
|
||||
|
||||
serializer = IssueSerializer(
|
||||
data=request.data,
|
||||
context={
|
||||
"project_id": project_id,
|
||||
"workspace_id": project.workspace_id,
|
||||
"default_assignee_id": project.default_assignee_id,
|
||||
},
|
||||
)
|
||||
|
||||
if serializer.is_valid():
|
||||
serializer.save()
|
||||
|
||||
# Track the issue
|
||||
issue_activity.delay(
|
||||
type="issue.activity.created",
|
||||
requested_data=json.dumps(self.request.data, cls=DjangoJSONEncoder),
|
||||
actor_id=str(request.user.id),
|
||||
issue_id=str(serializer.data.get("id", None)),
|
||||
project_id=str(project_id),
|
||||
current_instance=None,
|
||||
epoch=int(timezone.now().timestamp()),
|
||||
)
|
||||
return Response(serializer.data, status=status.HTTP_201_CREATED)
|
||||
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
|
||||
|
||||
def patch(self, request, slug, project_id, pk=None):
|
||||
issue = Issue.objects.get(workspace__slug=slug, project_id=project_id, pk=pk)
|
||||
current_instance = json.dumps(
|
||||
IssueSerializer(issue).data, cls=DjangoJSONEncoder
|
||||
)
|
||||
requested_data = json.dumps(self.request.data, cls=DjangoJSONEncoder)
|
||||
serializer = IssueSerializer(issue, data=request.data, partial=True)
|
||||
if serializer.is_valid():
|
||||
serializer.save()
|
||||
issue_activity.delay(
|
||||
type="issue.activity.updated",
|
||||
requested_data=requested_data,
|
||||
actor_id=str(request.user.id),
|
||||
issue_id=str(pk),
|
||||
project_id=str(project_id),
|
||||
current_instance=current_instance,
|
||||
epoch=int(timezone.now().timestamp()),
|
||||
)
|
||||
return Response(serializer.data, status=status.HTTP_200_OK)
|
||||
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
|
||||
|
||||
def delete(self, request, slug, project_id, pk=None):
|
||||
issue = Issue.objects.get(workspace__slug=slug, project_id=project_id, pk=pk)
|
||||
current_instance = json.dumps(
|
||||
IssueSerializer(issue).data, cls=DjangoJSONEncoder
|
||||
)
|
||||
issue.delete()
|
||||
issue_activity.delay(
|
||||
type="issue.activity.deleted",
|
||||
requested_data=json.dumps({"issue_id": str(pk)}),
|
||||
actor_id=str(request.user.id),
|
||||
issue_id=str(pk),
|
||||
project_id=str(project_id),
|
||||
current_instance=current_instance,
|
||||
epoch=int(timezone.now().timestamp()),
|
||||
)
|
||||
return Response(status=status.HTTP_204_NO_CONTENT)
|
||||
|
||||
|
||||
class LabelAPIEndpoint(BaseAPIView):
|
||||
"""
|
||||
This viewset automatically provides `list`, `create`, `retrieve`,
|
||||
`update` and `destroy` actions related to the labels.
|
||||
|
||||
"""
|
||||
|
||||
serializer_class = LabelSerializer
|
||||
model = Label
|
||||
permission_classes = [
|
||||
ProjectMemberPermission,
|
||||
]
|
||||
|
||||
def get_queryset(self):
|
||||
return (
|
||||
Project.objects.filter(workspace__slug=self.kwargs.get("slug"))
|
||||
.filter(project_id=self.kwargs.get("project_id"))
|
||||
.filter(project__project_projectmember__member=self.request.user)
|
||||
.select_related("project")
|
||||
.select_related("workspace")
|
||||
.select_related("parent")
|
||||
.distinct()
|
||||
.order_by(self.kwargs.get("order_by", "-created_at"))
|
||||
)
|
||||
|
||||
def post(self, request, slug, project_id):
|
||||
try:
|
||||
serializer = LabelSerializer(data=request.data)
|
||||
if serializer.is_valid():
|
||||
serializer.save(project_id=project_id)
|
||||
return Response(serializer.data, status=status.HTTP_201_CREATED)
|
||||
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
|
||||
except IntegrityError:
|
||||
return Response(
|
||||
{"error": "Label with the same name already exists in the project"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
def get(self, request, slug, project_id, pk=None):
|
||||
if pk:
|
||||
label = self.get_queryset().get(pk=pk)
|
||||
serializer = LabelSerializer(
|
||||
label,
|
||||
fields=self.fields,
|
||||
expand=self.expand,
|
||||
)
|
||||
return Response(serializer.data, status=status.HTTP_200_OK)
|
||||
return self.paginate(
|
||||
request=request,
|
||||
queryset=(self.get_queryset()),
|
||||
on_results=lambda labels: LabelSerializer(
|
||||
labels,
|
||||
many=True,
|
||||
fields=self.fields,
|
||||
expand=self.expand,
|
||||
).data,
|
||||
)
|
||||
|
||||
def patch(self, request, slug, project_id, pk=None):
|
||||
label = self.get_queryset().get(pk=pk)
|
||||
serializer = LabelSerializer(label, data=request.data, partial=True)
|
||||
return Response(serializer.data, status=status.HTTP_200_OK)
|
||||
|
||||
def delete(self, request, slug, project_id, pk=None):
|
||||
label = self.get_queryset().get(pk=pk)
|
||||
label.delete()
|
||||
return Response(status=status.HTTP_204_NO_CONTENT)
|
||||
|
||||
|
||||
class IssueLinkAPIEndpoint(BaseAPIView):
|
||||
"""
|
||||
This viewset automatically provides `list`, `create`, `retrieve`,
|
||||
`update` and `destroy` actions related to the links of the particular issue.
|
||||
|
||||
"""
|
||||
|
||||
permission_classes = [
|
||||
ProjectEntityPermission,
|
||||
]
|
||||
|
||||
model = IssueLink
|
||||
serializer_class = IssueLinkSerializer
|
||||
|
||||
def get_queryset(self):
|
||||
return (
|
||||
IssueLink.objects.filter(workspace__slug=self.kwargs.get("slug"))
|
||||
.filter(project_id=self.kwargs.get("project_id"))
|
||||
.filter(issue_id=self.kwargs.get("issue_id"))
|
||||
.filter(project__project_projectmember__member=self.request.user)
|
||||
.order_by(self.kwargs.get("order_by", "-created_at"))
|
||||
.distinct()
|
||||
)
|
||||
|
||||
def get(self, request, slug, project_id, pk=None):
|
||||
if pk:
|
||||
label = self.get_queryset().get(pk=pk)
|
||||
serializer = IssueLinkSerializer(
|
||||
label,
|
||||
fields=self.fields,
|
||||
expand=self.expand,
|
||||
)
|
||||
return Response(serializer.data, status=status.HTTP_200_OK)
|
||||
return self.paginate(
|
||||
request=request,
|
||||
queryset=(self.get_queryset()),
|
||||
on_results=lambda issue_links: IssueLinkSerializer(
|
||||
issue_links,
|
||||
many=True,
|
||||
fields=self.fields,
|
||||
expand=self.expand,
|
||||
).data,
|
||||
)
|
||||
|
||||
def post(self, request, slug, project_id, issue_id):
|
||||
serializer = IssueLinkSerializer(data=request.data)
|
||||
if serializer.is_valid():
|
||||
serializer.save(
|
||||
project_id=project_id,
|
||||
issue_id=issue_id,
|
||||
)
|
||||
issue_activity.delay(
|
||||
type="link.activity.created",
|
||||
requested_data=json.dumps(serializer.data, cls=DjangoJSONEncoder),
|
||||
actor_id=str(self.request.user.id),
|
||||
issue_id=str(self.kwargs.get("issue_id")),
|
||||
project_id=str(self.kwargs.get("project_id")),
|
||||
current_instance=None,
|
||||
epoch=int(timezone.now().timestamp()),
|
||||
)
|
||||
return Response(serializer.data, status=status.HTTP_201_CREATED)
|
||||
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
|
||||
|
||||
def patch(self, request, slug, project_id, issue_id, pk):
|
||||
issue_link = IssueLink.objects.get(
|
||||
workspace__slug=slug, project_id=project_id, issue_id=issue_id, pk=pk
|
||||
)
|
||||
requested_data = json.dumps(request.data, cls=DjangoJSONEncoder)
|
||||
current_instance = json.dumps(
|
||||
IssueLinkSerializer(issue_link).data,
|
||||
cls=DjangoJSONEncoder,
|
||||
)
|
||||
serializer = IssueLinkSerializer(issue_link, data=request.data, partial=True)
|
||||
if serializer.is_valid():
|
||||
serializer.save()
|
||||
issue_activity.delay(
|
||||
type="link.activity.updated",
|
||||
requested_data=requested_data,
|
||||
actor_id=str(request.user.id),
|
||||
issue_id=str(issue_id),
|
||||
project_id=str(project_id),
|
||||
current_instance=current_instance,
|
||||
epoch=int(timezone.now().timestamp()),
|
||||
)
|
||||
return Response(serializer.data, status=status.HTTP_200_OK)
|
||||
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
|
||||
|
||||
def delete(self, request, slug, project_id, issue_id, pk):
|
||||
issue_link = IssueLink.objects.get(
|
||||
workspace__slug=slug, project_id=project_id, issue_id=issue_id, pk=pk
|
||||
)
|
||||
current_instance = json.dumps(
|
||||
IssueLinkSerializer(issue_link).data,
|
||||
cls=DjangoJSONEncoder,
|
||||
)
|
||||
issue_activity.delay(
|
||||
type="link.activity.deleted",
|
||||
requested_data=json.dumps({"link_id": str(pk)}),
|
||||
actor_id=str(request.user.id),
|
||||
issue_id=str(issue_id),
|
||||
project_id=str(project_id),
|
||||
current_instance=current_instance,
|
||||
epoch=int(timezone.now().timestamp()),
|
||||
)
|
||||
issue_link.delete()
|
||||
return Response(status=status.HTTP_204_NO_CONTENT)
|
||||
|
||||
|
||||
class IssueCommentAPIEndpoint(WebhookMixin, BaseAPIView):
|
||||
"""
|
||||
This viewset automatically provides `list`, `create`, `retrieve`,
|
||||
`update` and `destroy` actions related to comments of the particular issue.
|
||||
|
||||
"""
|
||||
|
||||
serializer_class = IssueCommentSerializer
|
||||
model = IssueComment
|
||||
webhook_event = "issue-comment"
|
||||
permission_classes = [
|
||||
ProjectLitePermission,
|
||||
]
|
||||
|
||||
def get_queryset(self):
|
||||
return (
|
||||
IssueComment.objects.filter(workspace__slug=self.kwargs.get("slug"))
|
||||
.filter(project_id=self.kwargs.get("project_id"))
|
||||
.filter(issue_id=self.kwargs.get("issue_id"))
|
||||
.filter(project__project_projectmember__member=self.request.user)
|
||||
.select_related("project")
|
||||
.select_related("workspace")
|
||||
.select_related("issue")
|
||||
.select_related("actor")
|
||||
.annotate(
|
||||
is_member=Exists(
|
||||
ProjectMember.objects.filter(
|
||||
workspace__slug=self.kwargs.get("slug"),
|
||||
project_id=self.kwargs.get("project_id"),
|
||||
member_id=self.request.user.id,
|
||||
is_active=True,
|
||||
)
|
||||
)
|
||||
)
|
||||
.order_by(self.kwargs.get("order_by", "-created_at"))
|
||||
.distinct()
|
||||
)
|
||||
|
||||
def get(self, request, slug, project_id, issue_id, pk=None):
|
||||
if pk:
|
||||
issue_comment = self.get_queryset().get(pk=pk)
|
||||
serializer = IssueCommentSerializer(
|
||||
issue_comment,
|
||||
fields=self.fields,
|
||||
expand=self.expand,
|
||||
)
|
||||
return Response(serializer.data, status=status.HTTP_200_OK)
|
||||
return self.paginate(
|
||||
request=request,
|
||||
queryset=(self.get_queryset()),
|
||||
on_results=lambda issue_comment: IssueCommentSerializer(
|
||||
issue_comment,
|
||||
many=True,
|
||||
fields=self.fields,
|
||||
expand=self.expand,
|
||||
).data,
|
||||
)
|
||||
|
||||
def post(self, request, slug, project_id, issue_id):
|
||||
serializer = IssueCommentSerializer(data=request.data)
|
||||
if serializer.is_valid():
|
||||
serializer.save(
|
||||
project_id=project_id,
|
||||
issue_id=issue_id,
|
||||
actor=request.user,
|
||||
)
|
||||
issue_activity.delay(
|
||||
type="comment.activity.created",
|
||||
requested_data=json.dumps(serializer.data, cls=DjangoJSONEncoder),
|
||||
actor_id=str(self.request.user.id),
|
||||
issue_id=str(self.kwargs.get("issue_id")),
|
||||
project_id=str(self.kwargs.get("project_id")),
|
||||
current_instance=None,
|
||||
epoch=int(timezone.now().timestamp()),
|
||||
)
|
||||
return Response(serializer.data, status=status.HTTP_201_CREATED)
|
||||
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
|
||||
|
||||
def patch(self, request, slug, project_id, issue_id, pk):
|
||||
issue_comment = IssueComment.objects.get(
|
||||
workspace__slug=slug, project_id=project_id, issue_id=issue_id, pk=pk
|
||||
)
|
||||
requested_data = json.dumps(self.request.data, cls=DjangoJSONEncoder)
|
||||
current_instance = json.dumps(
|
||||
IssueCommentSerializer(issue_comment).data,
|
||||
cls=DjangoJSONEncoder,
|
||||
)
|
||||
serializer = IssueCommentSerializer(
|
||||
issue_comment, data=request.data, partial=True
|
||||
)
|
||||
if serializer.is_valid():
|
||||
serializer.save()
|
||||
issue_activity.delay(
|
||||
type="comment.activity.updated",
|
||||
requested_data=requested_data,
|
||||
actor_id=str(request.user.id),
|
||||
issue_id=str(issue_id),
|
||||
project_id=str(project_id),
|
||||
current_instance=current_instance,
|
||||
epoch=int(timezone.now().timestamp()),
|
||||
)
|
||||
return Response(serializer.data, status=status.HTTP_200_OK)
|
||||
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
|
||||
|
||||
def delete(self, request, slug, project_id, issue_id, pk):
|
||||
issue_comment = IssueComment.objects.get(
|
||||
workspace__slug=slug, project_id=project_id, issue_id=issue_id, pk=pk
|
||||
)
|
||||
current_instance = json.dumps(
|
||||
IssueCommentSerializer(issue_comment).data,
|
||||
cls=DjangoJSONEncoder,
|
||||
)
|
||||
issue_comment.delete()
|
||||
issue_activity.delay(
|
||||
type="comment.activity.deleted",
|
||||
requested_data=json.dumps({"comment_id": str(pk)}),
|
||||
actor_id=str(request.user.id),
|
||||
issue_id=str(issue_id),
|
||||
project_id=str(project_id),
|
||||
current_instance=current_instance,
|
||||
epoch=int(timezone.now().timestamp()),
|
||||
)
|
||||
return Response(status=status.HTTP_204_NO_CONTENT)
|
||||
|
||||
|
||||
class IssueActivityAPIEndpoint(BaseAPIView):
|
||||
permission_classes = [
|
||||
ProjectEntityPermission,
|
||||
]
|
||||
|
||||
def get(self, request, slug, project_id, issue_id, pk=None):
|
||||
issue_activities = (
|
||||
IssueActivity.objects.filter(
|
||||
issue_id=issue_id, workspace__slug=slug, project_id=project_id
|
||||
)
|
||||
.filter(
|
||||
~Q(field__in=["comment", "vote", "reaction", "draft"]),
|
||||
project__project_projectmember__member=self.request.user,
|
||||
)
|
||||
.select_related("actor", "workspace", "issue", "project")
|
||||
).order_by(request.GET.get("order_by", "created_at"))
|
||||
|
||||
if pk:
|
||||
issue_activities = issue_activities.get(pk=pk)
|
||||
serializer = IssueActivitySerializer(issue_activities)
|
||||
return Response(serializer.data, status=status.HTTP_200_OK)
|
||||
|
||||
self.paginate(
|
||||
request=request,
|
||||
queryset=(issue_activities),
|
||||
on_results=lambda issue_activity: IssueActivitySerializer(
|
||||
issue_activity,
|
||||
many=True,
|
||||
fields=self.fields,
|
||||
expand=self.expand,
|
||||
).data,
|
||||
)
|
365
apiserver/plane/api/views/module.py
Normal file
365
apiserver/plane/api/views/module.py
Normal file
@ -0,0 +1,365 @@
|
||||
# Python imports
|
||||
import json
|
||||
|
||||
# Django imports
|
||||
from django.db.models import Count, Prefetch, Q, F, Func, OuterRef
|
||||
from django.utils import timezone
|
||||
from django.core import serializers
|
||||
|
||||
# Third party imports
|
||||
from rest_framework import status
|
||||
from rest_framework.response import Response
|
||||
|
||||
# Module imports
|
||||
from .base import BaseAPIView, WebhookMixin
|
||||
from plane.app.permissions import ProjectEntityPermission
|
||||
from plane.db.models import (
|
||||
Project,
|
||||
Module,
|
||||
ModuleLink,
|
||||
Issue,
|
||||
ModuleIssue,
|
||||
IssueAttachment,
|
||||
IssueLink,
|
||||
)
|
||||
from plane.api.serializers import (
|
||||
ModuleSerializer,
|
||||
ModuleIssueSerializer,
|
||||
IssueSerializer,
|
||||
)
|
||||
from plane.bgtasks.issue_activites_task import issue_activity
|
||||
|
||||
|
||||
class ModuleAPIEndpoint(WebhookMixin, BaseAPIView):
|
||||
"""
|
||||
This viewset automatically provides `list`, `create`, `retrieve`,
|
||||
`update` and `destroy` actions related to module.
|
||||
|
||||
"""
|
||||
|
||||
model = Module
|
||||
permission_classes = [
|
||||
ProjectEntityPermission,
|
||||
]
|
||||
serializer_class = ModuleSerializer
|
||||
webhook_event = "module"
|
||||
|
||||
def get_queryset(self):
|
||||
return (
|
||||
Module.objects.filter(project_id=self.kwargs.get("project_id"))
|
||||
.filter(workspace__slug=self.kwargs.get("slug"))
|
||||
.select_related("project")
|
||||
.select_related("workspace")
|
||||
.select_related("lead")
|
||||
.prefetch_related("members")
|
||||
.prefetch_related(
|
||||
Prefetch(
|
||||
"link_module",
|
||||
queryset=ModuleLink.objects.select_related("module", "created_by"),
|
||||
)
|
||||
)
|
||||
.annotate(
|
||||
total_issues=Count(
|
||||
"issue_module",
|
||||
filter=Q(
|
||||
issue_module__issue__archived_at__isnull=True,
|
||||
issue_module__issue__is_draft=False,
|
||||
),
|
||||
),
|
||||
)
|
||||
.annotate(
|
||||
completed_issues=Count(
|
||||
"issue_module__issue__state__group",
|
||||
filter=Q(
|
||||
issue_module__issue__state__group="completed",
|
||||
issue_module__issue__archived_at__isnull=True,
|
||||
issue_module__issue__is_draft=False,
|
||||
),
|
||||
)
|
||||
)
|
||||
.annotate(
|
||||
cancelled_issues=Count(
|
||||
"issue_module__issue__state__group",
|
||||
filter=Q(
|
||||
issue_module__issue__state__group="cancelled",
|
||||
issue_module__issue__archived_at__isnull=True,
|
||||
issue_module__issue__is_draft=False,
|
||||
),
|
||||
)
|
||||
)
|
||||
.annotate(
|
||||
started_issues=Count(
|
||||
"issue_module__issue__state__group",
|
||||
filter=Q(
|
||||
issue_module__issue__state__group="started",
|
||||
issue_module__issue__archived_at__isnull=True,
|
||||
issue_module__issue__is_draft=False,
|
||||
),
|
||||
)
|
||||
)
|
||||
.annotate(
|
||||
unstarted_issues=Count(
|
||||
"issue_module__issue__state__group",
|
||||
filter=Q(
|
||||
issue_module__issue__state__group="unstarted",
|
||||
issue_module__issue__archived_at__isnull=True,
|
||||
issue_module__issue__is_draft=False,
|
||||
),
|
||||
)
|
||||
)
|
||||
.annotate(
|
||||
backlog_issues=Count(
|
||||
"issue_module__issue__state__group",
|
||||
filter=Q(
|
||||
issue_module__issue__state__group="backlog",
|
||||
issue_module__issue__archived_at__isnull=True,
|
||||
issue_module__issue__is_draft=False,
|
||||
),
|
||||
)
|
||||
)
|
||||
.order_by(self.kwargs.get("order_by", "-created_at"))
|
||||
)
|
||||
|
||||
def post(self, request, slug, project_id):
|
||||
project = Project.objects.get(workspace__slug=slug, pk=project_id)
|
||||
serializer = ModuleSerializer(data=request.data, context={"project": project})
|
||||
if serializer.is_valid():
|
||||
serializer.save()
|
||||
module = Module.objects.get(pk=serializer.data["id"])
|
||||
serializer = ModuleSerializer(module)
|
||||
return Response(serializer.data, status=status.HTTP_201_CREATED)
|
||||
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
|
||||
|
||||
def get(self, request, slug, project_id, pk=None):
|
||||
if pk:
|
||||
queryset = self.get_queryset().get(pk=pk)
|
||||
data = ModuleSerializer(
|
||||
queryset,
|
||||
fields=self.fields,
|
||||
expand=self.expand,
|
||||
).data
|
||||
return Response(
|
||||
data,
|
||||
status=status.HTTP_200_OK,
|
||||
)
|
||||
return self.paginate(
|
||||
request=request,
|
||||
queryset=(self.get_queryset()),
|
||||
on_results=lambda modules: ModuleSerializer(
|
||||
modules,
|
||||
many=True,
|
||||
fields=self.fields,
|
||||
expand=self.expand,
|
||||
).data,
|
||||
)
|
||||
|
||||
def delete(self, request, slug, project_id, pk):
|
||||
module = Module.objects.get(workspace__slug=slug, project_id=project_id, pk=pk)
|
||||
module_issues = list(
|
||||
ModuleIssue.objects.filter(module_id=pk).values_list("issue", flat=True)
|
||||
)
|
||||
issue_activity.delay(
|
||||
type="module.activity.deleted",
|
||||
requested_data=json.dumps(
|
||||
{
|
||||
"module_id": str(pk),
|
||||
"module_name": str(module.name),
|
||||
"issues": [str(issue_id) for issue_id in module_issues],
|
||||
}
|
||||
),
|
||||
actor_id=str(request.user.id),
|
||||
issue_id=str(pk),
|
||||
project_id=str(project_id),
|
||||
current_instance=None,
|
||||
epoch=int(timezone.now().timestamp()),
|
||||
)
|
||||
module.delete()
|
||||
return Response(status=status.HTTP_204_NO_CONTENT)
|
||||
|
||||
|
||||
class ModuleIssueAPIEndpoint(WebhookMixin, BaseAPIView):
|
||||
"""
|
||||
This viewset automatically provides `list`, `create`, `retrieve`,
|
||||
`update` and `destroy` actions related to module issues.
|
||||
|
||||
"""
|
||||
|
||||
serializer_class = ModuleIssueSerializer
|
||||
model = ModuleIssue
|
||||
webhook_event = "module"
|
||||
|
||||
permission_classes = [
|
||||
ProjectEntityPermission,
|
||||
]
|
||||
|
||||
def get_queryset(self):
|
||||
return (
|
||||
ModuleIssue.objects.annotate(
|
||||
sub_issues_count=Issue.issue_objects.filter(parent=OuterRef("issue"))
|
||||
.order_by()
|
||||
.annotate(count=Func(F("id"), function="Count"))
|
||||
.values("count")
|
||||
)
|
||||
.filter(workspace__slug=self.kwargs.get("slug"))
|
||||
.filter(project_id=self.kwargs.get("project_id"))
|
||||
.filter(module_id=self.kwargs.get("module_id"))
|
||||
.filter(project__project_projectmember__member=self.request.user)
|
||||
.select_related("project")
|
||||
.select_related("workspace")
|
||||
.select_related("module")
|
||||
.select_related("issue", "issue__state", "issue__project")
|
||||
.prefetch_related("issue__assignees", "issue__labels")
|
||||
.prefetch_related("module__members")
|
||||
.order_by(self.kwargs.get("order_by", "-created_at"))
|
||||
.distinct()
|
||||
)
|
||||
|
||||
def get(self, request, slug, project_id, module_id):
|
||||
order_by = request.GET.get("order_by", "created_at")
|
||||
issues = (
|
||||
Issue.issue_objects.filter(issue_module__module_id=module_id)
|
||||
.annotate(
|
||||
sub_issues_count=Issue.issue_objects.filter(parent=OuterRef("id"))
|
||||
.order_by()
|
||||
.annotate(count=Func(F("id"), function="Count"))
|
||||
.values("count")
|
||||
)
|
||||
.annotate(bridge_id=F("issue_module__id"))
|
||||
.filter(project_id=project_id)
|
||||
.filter(workspace__slug=slug)
|
||||
.select_related("project")
|
||||
.select_related("workspace")
|
||||
.select_related("state")
|
||||
.select_related("parent")
|
||||
.prefetch_related("assignees")
|
||||
.prefetch_related("labels")
|
||||
.order_by(order_by)
|
||||
.annotate(
|
||||
link_count=IssueLink.objects.filter(issue=OuterRef("id"))
|
||||
.order_by()
|
||||
.annotate(count=Func(F("id"), function="Count"))
|
||||
.values("count")
|
||||
)
|
||||
.annotate(
|
||||
attachment_count=IssueAttachment.objects.filter(issue=OuterRef("id"))
|
||||
.order_by()
|
||||
.annotate(count=Func(F("id"), function="Count"))
|
||||
.values("count")
|
||||
)
|
||||
)
|
||||
return self.paginate(
|
||||
request=request,
|
||||
queryset=(issues),
|
||||
on_results=lambda issues: IssueSerializer(
|
||||
issues,
|
||||
many=True,
|
||||
fields=self.fields,
|
||||
expand=self.expand,
|
||||
).data,
|
||||
)
|
||||
|
||||
def post(self, request, slug, project_id, module_id):
|
||||
issues = request.data.get("issues", [])
|
||||
if not len(issues):
|
||||
return Response(
|
||||
{"error": "Issues are required"}, status=status.HTTP_400_BAD_REQUEST
|
||||
)
|
||||
module = Module.objects.get(
|
||||
workspace__slug=slug, project_id=project_id, pk=module_id
|
||||
)
|
||||
|
||||
issues = Issue.objects.filter(
|
||||
workspace__slug=slug, project_id=project_id, pk__in=issues
|
||||
).values_list("id", flat=True)
|
||||
|
||||
module_issues = list(ModuleIssue.objects.filter(issue_id__in=issues))
|
||||
|
||||
update_module_issue_activity = []
|
||||
records_to_update = []
|
||||
record_to_create = []
|
||||
|
||||
for issue in issues:
|
||||
module_issue = [
|
||||
module_issue
|
||||
for module_issue in module_issues
|
||||
if str(module_issue.issue_id) in issues
|
||||
]
|
||||
|
||||
if len(module_issue):
|
||||
if module_issue[0].module_id != module_id:
|
||||
update_module_issue_activity.append(
|
||||
{
|
||||
"old_module_id": str(module_issue[0].module_id),
|
||||
"new_module_id": str(module_id),
|
||||
"issue_id": str(module_issue[0].issue_id),
|
||||
}
|
||||
)
|
||||
module_issue[0].module_id = module_id
|
||||
records_to_update.append(module_issue[0])
|
||||
else:
|
||||
record_to_create.append(
|
||||
ModuleIssue(
|
||||
module=module,
|
||||
issue_id=issue,
|
||||
project_id=project_id,
|
||||
workspace=module.workspace,
|
||||
created_by=request.user,
|
||||
updated_by=request.user,
|
||||
)
|
||||
)
|
||||
|
||||
ModuleIssue.objects.bulk_create(
|
||||
record_to_create,
|
||||
batch_size=10,
|
||||
ignore_conflicts=True,
|
||||
)
|
||||
|
||||
ModuleIssue.objects.bulk_update(
|
||||
records_to_update,
|
||||
["module"],
|
||||
batch_size=10,
|
||||
)
|
||||
|
||||
# Capture Issue Activity
|
||||
issue_activity.delay(
|
||||
type="module.activity.created",
|
||||
requested_data=json.dumps({"modules_list": issues}),
|
||||
actor_id=str(self.request.user.id),
|
||||
issue_id=None,
|
||||
project_id=str(self.kwargs.get("project_id", None)),
|
||||
current_instance=json.dumps(
|
||||
{
|
||||
"updated_module_issues": update_module_issue_activity,
|
||||
"created_module_issues": serializers.serialize(
|
||||
"json", record_to_create
|
||||
),
|
||||
}
|
||||
),
|
||||
epoch=int(timezone.now().timestamp()),
|
||||
)
|
||||
|
||||
return Response(
|
||||
ModuleIssueSerializer(self.get_queryset(), many=True).data,
|
||||
status=status.HTTP_200_OK,
|
||||
)
|
||||
|
||||
def delete(self, request, slug, project_id, module_id, pk):
|
||||
module_issue = ModuleIssue.objects.get(
|
||||
workspace__slug=slug, project_id=project_id, module_id=module_id, pk=pk
|
||||
)
|
||||
module_issue.delete()
|
||||
issue_activity.delay(
|
||||
type="module.activity.deleted",
|
||||
requested_data=json.dumps(
|
||||
{
|
||||
"module_id": str(module_id),
|
||||
"issues": [str(module_issue.issue_id)],
|
||||
}
|
||||
),
|
||||
actor_id=str(request.user.id),
|
||||
issue_id=str(pk),
|
||||
project_id=str(project_id),
|
||||
current_instance=None,
|
||||
epoch=int(timezone.now().timestamp()),
|
||||
)
|
||||
return Response(status=status.HTTP_204_NO_CONTENT)
|
285
apiserver/plane/api/views/project.py
Normal file
285
apiserver/plane/api/views/project.py
Normal file
@ -0,0 +1,285 @@
|
||||
# Django imports
|
||||
from django.db import IntegrityError
|
||||
from django.db.models import Exists, OuterRef, Q, F, Func, Subquery, Prefetch
|
||||
|
||||
# Third party imports
|
||||
from rest_framework import status
|
||||
from rest_framework.response import Response
|
||||
from rest_framework.serializers import ValidationError
|
||||
|
||||
# Module imports
|
||||
from plane.db.models import (
|
||||
Workspace,
|
||||
Project,
|
||||
ProjectFavorite,
|
||||
ProjectMember,
|
||||
ProjectDeployBoard,
|
||||
State,
|
||||
Cycle,
|
||||
Module,
|
||||
IssueProperty,
|
||||
Inbox,
|
||||
)
|
||||
from plane.app.permissions import ProjectBasePermission
|
||||
from plane.api.serializers import ProjectSerializer
|
||||
from .base import BaseAPIView, WebhookMixin
|
||||
|
||||
|
||||
class ProjectAPIEndpoint(WebhookMixin, BaseAPIView):
|
||||
"""Project Endpoints to create, update, list, retrieve and delete endpoint"""
|
||||
|
||||
serializer_class = ProjectSerializer
|
||||
model = Project
|
||||
webhook_event = "project"
|
||||
|
||||
permission_classes = [
|
||||
ProjectBasePermission,
|
||||
]
|
||||
|
||||
def get_queryset(self):
|
||||
return (
|
||||
Project.objects.filter(workspace__slug=self.kwargs.get("slug"))
|
||||
.filter(Q(project_projectmember__member=self.request.user) | Q(network=2))
|
||||
.select_related(
|
||||
"workspace", "workspace__owner", "default_assignee", "project_lead"
|
||||
)
|
||||
.annotate(
|
||||
is_member=Exists(
|
||||
ProjectMember.objects.filter(
|
||||
member=self.request.user,
|
||||
project_id=OuterRef("pk"),
|
||||
workspace__slug=self.kwargs.get("slug"),
|
||||
is_active=True,
|
||||
)
|
||||
)
|
||||
)
|
||||
.annotate(
|
||||
total_members=ProjectMember.objects.filter(
|
||||
project_id=OuterRef("id"),
|
||||
member__is_bot=False,
|
||||
is_active=True,
|
||||
)
|
||||
.order_by()
|
||||
.annotate(count=Func(F("id"), function="Count"))
|
||||
.values("count")
|
||||
)
|
||||
.annotate(
|
||||
total_cycles=Cycle.objects.filter(project_id=OuterRef("id"))
|
||||
.order_by()
|
||||
.annotate(count=Func(F("id"), function="Count"))
|
||||
.values("count")
|
||||
)
|
||||
.annotate(
|
||||
total_modules=Module.objects.filter(project_id=OuterRef("id"))
|
||||
.order_by()
|
||||
.annotate(count=Func(F("id"), function="Count"))
|
||||
.values("count")
|
||||
)
|
||||
.annotate(
|
||||
member_role=ProjectMember.objects.filter(
|
||||
project_id=OuterRef("pk"),
|
||||
member_id=self.request.user.id,
|
||||
is_active=True,
|
||||
).values("role")
|
||||
)
|
||||
.annotate(
|
||||
is_deployed=Exists(
|
||||
ProjectDeployBoard.objects.filter(
|
||||
project_id=OuterRef("pk"),
|
||||
workspace__slug=self.kwargs.get("slug"),
|
||||
)
|
||||
)
|
||||
)
|
||||
.order_by(self.kwargs.get("order_by", "-created_at"))
|
||||
.distinct()
|
||||
)
|
||||
|
||||
def get(self, request, slug, pk=None):
|
||||
if pk is None:
|
||||
sort_order_query = ProjectMember.objects.filter(
|
||||
member=request.user,
|
||||
project_id=OuterRef("pk"),
|
||||
workspace__slug=self.kwargs.get("slug"),
|
||||
is_active=True,
|
||||
).values("sort_order")
|
||||
projects = (
|
||||
self.get_queryset()
|
||||
.annotate(sort_order=Subquery(sort_order_query))
|
||||
.prefetch_related(
|
||||
Prefetch(
|
||||
"project_projectmember",
|
||||
queryset=ProjectMember.objects.filter(
|
||||
workspace__slug=slug,
|
||||
is_active=True,
|
||||
).select_related("member"),
|
||||
)
|
||||
)
|
||||
.order_by("sort_order", "name")
|
||||
)
|
||||
return self.paginate(
|
||||
request=request,
|
||||
queryset=(projects),
|
||||
on_results=lambda projects: ProjectSerializer(
|
||||
projects, many=True, fields=self.fields, expand=self.expand,
|
||||
).data,
|
||||
)
|
||||
else:
|
||||
project = self.get_queryset().get(workspace__slug=slug, pk=pk)
|
||||
serializer = ProjectSerializer(project, fields=self.fields, expand=self.expand,)
|
||||
return Response(serializer.data, status=status.HTTP_200_OK)
|
||||
|
||||
def post(self, request, slug):
|
||||
try:
|
||||
workspace = Workspace.objects.get(slug=slug)
|
||||
|
||||
serializer = ProjectSerializer(
|
||||
data={**request.data}, context={"workspace_id": workspace.id}
|
||||
)
|
||||
if serializer.is_valid():
|
||||
serializer.save()
|
||||
|
||||
# Add the user as Administrator to the project
|
||||
project_member = ProjectMember.objects.create(
|
||||
project_id=serializer.data["id"], member=request.user, role=20
|
||||
)
|
||||
# Also create the issue property for the user
|
||||
_ = IssueProperty.objects.create(
|
||||
project_id=serializer.data["id"],
|
||||
user=request.user,
|
||||
)
|
||||
|
||||
if serializer.data["project_lead"] is not None and str(
|
||||
serializer.data["project_lead"]
|
||||
) != str(request.user.id):
|
||||
ProjectMember.objects.create(
|
||||
project_id=serializer.data["id"],
|
||||
member_id=serializer.data["project_lead"],
|
||||
role=20,
|
||||
)
|
||||
# Also create the issue property for the user
|
||||
IssueProperty.objects.create(
|
||||
project_id=serializer.data["id"],
|
||||
user_id=serializer.data["project_lead"],
|
||||
)
|
||||
|
||||
# Default states
|
||||
states = [
|
||||
{
|
||||
"name": "Backlog",
|
||||
"color": "#A3A3A3",
|
||||
"sequence": 15000,
|
||||
"group": "backlog",
|
||||
"default": True,
|
||||
},
|
||||
{
|
||||
"name": "Todo",
|
||||
"color": "#3A3A3A",
|
||||
"sequence": 25000,
|
||||
"group": "unstarted",
|
||||
},
|
||||
{
|
||||
"name": "In Progress",
|
||||
"color": "#F59E0B",
|
||||
"sequence": 35000,
|
||||
"group": "started",
|
||||
},
|
||||
{
|
||||
"name": "Done",
|
||||
"color": "#16A34A",
|
||||
"sequence": 45000,
|
||||
"group": "completed",
|
||||
},
|
||||
{
|
||||
"name": "Cancelled",
|
||||
"color": "#EF4444",
|
||||
"sequence": 55000,
|
||||
"group": "cancelled",
|
||||
},
|
||||
]
|
||||
|
||||
State.objects.bulk_create(
|
||||
[
|
||||
State(
|
||||
name=state["name"],
|
||||
color=state["color"],
|
||||
project=serializer.instance,
|
||||
sequence=state["sequence"],
|
||||
workspace=serializer.instance.workspace,
|
||||
group=state["group"],
|
||||
default=state.get("default", False),
|
||||
created_by=request.user,
|
||||
)
|
||||
for state in states
|
||||
]
|
||||
)
|
||||
|
||||
project = self.get_queryset().filter(pk=serializer.data["id"]).first()
|
||||
serializer = ProjectSerializer(project)
|
||||
return Response(serializer.data, status=status.HTTP_201_CREATED)
|
||||
return Response(
|
||||
serializer.errors,
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
except IntegrityError as e:
|
||||
if "already exists" in str(e):
|
||||
return Response(
|
||||
{"name": "The project name is already taken"},
|
||||
status=status.HTTP_410_GONE,
|
||||
)
|
||||
except Workspace.DoesNotExist as e:
|
||||
return Response(
|
||||
{"error": "Workspace does not exist"}, status=status.HTTP_404_NOT_FOUND
|
||||
)
|
||||
except ValidationError as e:
|
||||
return Response(
|
||||
{"identifier": "The project identifier is already taken"},
|
||||
status=status.HTTP_410_GONE,
|
||||
)
|
||||
|
||||
def patch(self, request, slug, pk=None):
|
||||
try:
|
||||
workspace = Workspace.objects.get(slug=slug)
|
||||
project = Project.objects.get(pk=pk)
|
||||
|
||||
serializer = ProjectSerializer(
|
||||
project,
|
||||
data={**request.data},
|
||||
context={"workspace_id": workspace.id},
|
||||
partial=True,
|
||||
)
|
||||
|
||||
if serializer.is_valid():
|
||||
serializer.save()
|
||||
if serializer.data["inbox_view"]:
|
||||
Inbox.objects.get_or_create(
|
||||
name=f"{project.name} Inbox", project=project, is_default=True
|
||||
)
|
||||
|
||||
# Create the triage state in Backlog group
|
||||
State.objects.get_or_create(
|
||||
name="Triage",
|
||||
group="backlog",
|
||||
description="Default state for managing all Inbox Issues",
|
||||
project_id=pk,
|
||||
color="#ff7700",
|
||||
)
|
||||
|
||||
project = self.get_queryset().filter(pk=serializer.data["id"]).first()
|
||||
serializer = ProjectSerializer(project)
|
||||
return Response(serializer.data, status=status.HTTP_200_OK)
|
||||
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
|
||||
except IntegrityError as e:
|
||||
if "already exists" in str(e):
|
||||
return Response(
|
||||
{"name": "The project name is already taken"},
|
||||
status=status.HTTP_410_GONE,
|
||||
)
|
||||
except (Project.DoesNotExist, Workspace.DoesNotExist):
|
||||
return Response(
|
||||
{"error": "Project does not exist"}, status=status.HTTP_404_NOT_FOUND
|
||||
)
|
||||
except ValidationError as e:
|
||||
return Response(
|
||||
{"identifier": "The project identifier is already taken"},
|
||||
status=status.HTTP_410_GONE,
|
||||
)
|
89
apiserver/plane/api/views/state.py
Normal file
89
apiserver/plane/api/views/state.py
Normal file
@ -0,0 +1,89 @@
|
||||
# Python imports
|
||||
from itertools import groupby
|
||||
|
||||
# Django imports
|
||||
from django.db.models import Q
|
||||
|
||||
# Third party imports
|
||||
from rest_framework.response import Response
|
||||
from rest_framework import status
|
||||
|
||||
# Module imports
|
||||
from .base import BaseAPIView
|
||||
from plane.api.serializers import StateSerializer
|
||||
from plane.app.permissions import ProjectEntityPermission
|
||||
from plane.db.models import State, Issue
|
||||
|
||||
|
||||
class StateAPIEndpoint(BaseAPIView):
|
||||
serializer_class = StateSerializer
|
||||
model = State
|
||||
permission_classes = [
|
||||
ProjectEntityPermission,
|
||||
]
|
||||
|
||||
def get_queryset(self):
|
||||
return self.filter_queryset(
|
||||
super()
|
||||
.get_queryset()
|
||||
.filter(workspace__slug=self.kwargs.get("slug"))
|
||||
.filter(project_id=self.kwargs.get("project_id"))
|
||||
.filter(project__project_projectmember__member=self.request.user)
|
||||
.filter(~Q(name="Triage"))
|
||||
.select_related("project")
|
||||
.select_related("workspace")
|
||||
.distinct()
|
||||
)
|
||||
|
||||
def post(self, request, slug, project_id):
|
||||
serializer = StateSerializer(data=request.data, context={"project_id": project_id})
|
||||
if serializer.is_valid():
|
||||
serializer.save(project_id=project_id)
|
||||
return Response(serializer.data, status=status.HTTP_200_OK)
|
||||
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
|
||||
|
||||
def get(self, request, slug, project_id, pk=None):
|
||||
if pk:
|
||||
serializer = StateSerializer(self.get_queryset().get(pk=pk))
|
||||
return Response(serializer.data, status=status.HTTP_200_OK)
|
||||
return self.paginate(
|
||||
request=request,
|
||||
queryset=(self.get_queryset()),
|
||||
on_results=lambda states: StateSerializer(
|
||||
states,
|
||||
many=True,
|
||||
fields=self.fields,
|
||||
expand=self.expand,
|
||||
).data,
|
||||
)
|
||||
|
||||
def delete(self, request, slug, project_id, pk):
|
||||
state = State.objects.get(
|
||||
~Q(name="Triage"),
|
||||
pk=pk,
|
||||
project_id=project_id,
|
||||
workspace__slug=slug,
|
||||
)
|
||||
|
||||
if state.default:
|
||||
return Response({"error": "Default state cannot be deleted"}, status=False)
|
||||
|
||||
# Check for any issues in the state
|
||||
issue_exist = Issue.issue_objects.filter(state=pk).exists()
|
||||
|
||||
if issue_exist:
|
||||
return Response(
|
||||
{"error": "The state is not empty, only empty states can be deleted"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
state.delete()
|
||||
return Response(status=status.HTTP_204_NO_CONTENT)
|
||||
|
||||
def patch(self, request, slug, project_id, pk=None):
|
||||
state = State.objects.filter(workspace__slug=slug, project_id=project_id, pk=pk)
|
||||
serializer = StateSerializer(state, data=request.data, partial=True)
|
||||
if serializer.is_valid():
|
||||
serializer.save()
|
||||
return Response(serializer.data, status=status.HTTP_200_OK)
|
||||
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
|
@ -1,5 +1,5 @@
|
||||
from django.apps import AppConfig
|
||||
|
||||
|
||||
class AppConfig(AppConfig):
|
||||
class AppApiConfig(AppConfig):
|
||||
name = "plane.app"
|
||||
|
0
apiserver/plane/app/middleware/__init__.py
Normal file
0
apiserver/plane/app/middleware/__init__.py
Normal file
@ -214,4 +214,4 @@ class ProjectPublicMemberSerializer(BaseSerializer):
|
||||
"workspace",
|
||||
"project",
|
||||
"member",
|
||||
]
|
||||
]
|
@ -25,4 +25,4 @@ class StateLiteSerializer(BaseSerializer):
|
||||
"color",
|
||||
"group",
|
||||
]
|
||||
read_only_fields = fields
|
||||
read_only_fields = fields
|
@ -22,10 +22,6 @@ from .api import urlpatterns as api_urls
|
||||
from .webhook import urlpatterns as webhook_urls
|
||||
|
||||
|
||||
# Django imports
|
||||
from django.conf import settings
|
||||
|
||||
|
||||
urlpatterns = [
|
||||
*analytic_urls,
|
||||
*asset_urls,
|
||||
@ -49,4 +45,4 @@ urlpatterns = [
|
||||
*workspace_urls,
|
||||
*api_urls,
|
||||
*webhook_urls,
|
||||
]
|
||||
]
|
@ -169,4 +169,4 @@ urlpatterns = [
|
||||
),
|
||||
name="project-deploy-board",
|
||||
),
|
||||
]
|
||||
]
|
@ -168,4 +168,4 @@ from .exporter import ExportIssuesEndpoint
|
||||
|
||||
from .config import ConfigurationEndpoint
|
||||
|
||||
from .webhook import WebhookEndpoint, WebhookLogsEndpoint, WebhookSecretRegenerateEndpoint
|
||||
from .webhook import WebhookEndpoint, WebhookLogsEndpoint, WebhookSecretRegenerateEndpoint
|
@ -868,4 +868,4 @@ class TransferCycleIssueEndpoint(BaseAPIView):
|
||||
updated_cycles, ["cycle_id"], batch_size=100
|
||||
)
|
||||
|
||||
return Response({"message": "Success"}, status=status.HTTP_200_OK)
|
||||
return Response({"message": "Success"}, status=status.HTTP_200_OK)
|
@ -356,4 +356,3 @@ class InboxIssueViewSet(BaseViewSet):
|
||||
inbox_issue.delete()
|
||||
return Response(status=status.HTTP_204_NO_CONTENT)
|
||||
|
||||
|
||||
|
@ -1750,4 +1750,4 @@ class IssueDraftViewSet(BaseViewSet):
|
||||
current_instance=current_instance,
|
||||
epoch=int(timezone.now().timestamp()),
|
||||
)
|
||||
return Response(status=status.HTTP_204_NO_CONTENT)
|
||||
return Response(status=status.HTTP_204_NO_CONTENT)
|
@ -583,4 +583,4 @@ class ModuleFavoriteViewSet(BaseViewSet):
|
||||
module_id=module_id,
|
||||
)
|
||||
module_favorite.delete()
|
||||
return Response(status=status.HTTP_204_NO_CONTENT)
|
||||
return Response(status=status.HTTP_204_NO_CONTENT)
|
@ -420,4 +420,4 @@ class OauthEndpoint(BaseAPIView):
|
||||
"access_token": access_token,
|
||||
"refresh_token": refresh_token,
|
||||
}
|
||||
return Response(data, status=status.HTTP_201_CREATED)
|
||||
return Response(data, status=status.HTTP_201_CREATED)
|
@ -89,4 +89,4 @@ class StateViewSet(BaseViewSet):
|
||||
)
|
||||
|
||||
state.delete()
|
||||
return Response(status=status.HTTP_204_NO_CONTENT)
|
||||
return Response(status=status.HTTP_204_NO_CONTENT)
|
@ -257,4 +257,4 @@ class IssueViewFavoriteViewSet(BaseViewSet):
|
||||
view_id=view_id,
|
||||
)
|
||||
view_favourite.delete()
|
||||
return Response(status=status.HTTP_204_NO_CONTENT)
|
||||
return Response(status=status.HTTP_204_NO_CONTENT)
|
@ -1,5 +0,0 @@
|
||||
from django.apps import AppConfig
|
||||
|
||||
|
||||
class ProxyConfig(AppConfig):
|
||||
name = "plane.proxy"
|
@ -1,101 +0,0 @@
|
||||
# Python imports
|
||||
import re
|
||||
import json
|
||||
import requests
|
||||
|
||||
# Django imports
|
||||
from django.conf import settings
|
||||
|
||||
# Third party imports
|
||||
from rest_framework.views import APIView
|
||||
from rest_framework.response import Response
|
||||
from rest_framework.permissions import IsAuthenticated
|
||||
from rest_framework_simplejwt.tokens import RefreshToken
|
||||
|
||||
# Module imports
|
||||
from plane.authentication.api_authentication import APIKeyAuthentication
|
||||
from plane.proxy.rate_limit import ApiKeyRateThrottle
|
||||
|
||||
|
||||
class BaseAPIView(APIView):
|
||||
authentication_classes = [
|
||||
APIKeyAuthentication,
|
||||
]
|
||||
|
||||
permission_classes = [
|
||||
IsAuthenticated,
|
||||
]
|
||||
|
||||
throttle_classes = [
|
||||
ApiKeyRateThrottle,
|
||||
]
|
||||
|
||||
def _get_jwt_token(self, request):
|
||||
refresh = RefreshToken.for_user(request.user)
|
||||
return str(refresh.access_token)
|
||||
|
||||
def _get_url_path(self, request):
|
||||
match = re.search(r"/v1/(.*)", request.path)
|
||||
return match.group(1) if match else ""
|
||||
|
||||
def _get_headers(self, request):
|
||||
return {
|
||||
"Authorization": f"Bearer {self._get_jwt_token(request=request)}",
|
||||
"Content-Type": request.headers.get("Content-Type", "application/json"),
|
||||
}
|
||||
|
||||
def _get_url(self, request):
|
||||
path = self._get_url_path(request=request)
|
||||
url = request.build_absolute_uri("/api/" + path)
|
||||
return url
|
||||
|
||||
def _get_query_params(self, request):
|
||||
query_params = request.GET
|
||||
return query_params
|
||||
|
||||
def _get_payload(self, request):
|
||||
content_type = request.headers.get("Content-Type", "application/json")
|
||||
if content_type.startswith("multipart/form-data"):
|
||||
files_dict = {k: v[0] for k, v in request.FILES.lists()}
|
||||
return (None, files_dict)
|
||||
else:
|
||||
return (json.dumps(request.data), None)
|
||||
|
||||
def _make_request(self, request, method="GET"):
|
||||
data_payload, files_payload = self._get_payload(request=request)
|
||||
response = requests.request(
|
||||
method=method,
|
||||
url=self._get_url(request=request),
|
||||
headers=self._get_headers(request=request),
|
||||
params=self._get_query_params(request=request),
|
||||
data=data_payload,
|
||||
files=files_payload,
|
||||
)
|
||||
return response.json(), response.status_code
|
||||
|
||||
def finalize_response(self, request, response, *args, **kwargs):
|
||||
# Call super to get the default response
|
||||
response = super().finalize_response(request, response, *args, **kwargs)
|
||||
|
||||
# Add custom headers if they exist in the request META
|
||||
ratelimit_remaining = request.META.get('X-RateLimit-Remaining')
|
||||
if ratelimit_remaining is not None:
|
||||
response['X-RateLimit-Remaining'] = ratelimit_remaining
|
||||
|
||||
ratelimit_reset = request.META.get('X-RateLimit-Reset')
|
||||
if ratelimit_reset is not None:
|
||||
response['X-RateLimit-Reset'] = ratelimit_reset
|
||||
|
||||
return response
|
||||
|
||||
def get(self, request, *args, **kwargs):
|
||||
response, status_code = self._make_request(request=request, method="GET")
|
||||
return Response(response, status=status_code)
|
||||
|
||||
def post(self, request, *args, **kwargs):
|
||||
response, status_code = self._make_request(request=request, method="POST")
|
||||
return Response(response, status=status_code)
|
||||
|
||||
def partial_update(self, request, *args, **kwargs):
|
||||
response, status_code = self._make_request(request=request, method="PATCH")
|
||||
return Response(response, status=status_code)
|
@ -1,30 +0,0 @@
|
||||
from .base import BaseAPIView
|
||||
|
||||
|
||||
class CycleAPIEndpoint(BaseAPIView):
|
||||
"""
|
||||
This viewset automatically provides `list`, `create`, `retrieve`,
|
||||
`update` and `destroy` actions related to cycle.
|
||||
|
||||
"""
|
||||
|
||||
pass
|
||||
|
||||
|
||||
class CycleIssueAPIEndpoint(BaseAPIView):
|
||||
"""
|
||||
This viewset automatically provides `list`, `create`, `retrieve`,
|
||||
`update` and `destroy` actions related to cycle issues.
|
||||
|
||||
"""
|
||||
|
||||
pass
|
||||
|
||||
|
||||
class TransferCycleIssueAPIEndpoint(BaseAPIView):
|
||||
"""
|
||||
This viewset provides `create` actions for transfering the issues into a particular cycle.
|
||||
|
||||
"""
|
||||
|
||||
pass
|
@ -1,10 +0,0 @@
|
||||
from .base import BaseAPIView
|
||||
|
||||
|
||||
class InboxIssueAPIEndpoint(BaseAPIView):
|
||||
"""
|
||||
This viewset automatically provides `list`, `create`, `retrieve`,
|
||||
`update` and `destroy` actions related to inbox issues.
|
||||
|
||||
"""
|
||||
pass
|
@ -1,37 +0,0 @@
|
||||
from .base import BaseAPIView
|
||||
|
||||
|
||||
class IssueAPIEndpoint(BaseAPIView):
|
||||
"""
|
||||
This viewset automatically provides `list`, `create`, `retrieve`,
|
||||
`update` and `destroy` actions related to issue.
|
||||
|
||||
"""
|
||||
pass
|
||||
|
||||
|
||||
class LabelAPIEndpoint(BaseAPIView):
|
||||
"""
|
||||
This viewset automatically provides `list`, `create`, `retrieve`,
|
||||
`update` and `destroy` actions related to the labels.
|
||||
|
||||
"""
|
||||
pass
|
||||
|
||||
|
||||
class IssueLinkAPIEndpoint(BaseAPIView):
|
||||
"""
|
||||
This viewset automatically provides `list`, `create`, `retrieve`,
|
||||
`update` and `destroy` actions related to the links of the particular issue.
|
||||
|
||||
"""
|
||||
pass
|
||||
|
||||
|
||||
class IssueCommentAPIEndpoint(BaseAPIView):
|
||||
"""
|
||||
This viewset automatically provides `list`, `create`, `retrieve`,
|
||||
`update` and `destroy` actions related to comments of the particular issue.
|
||||
|
||||
"""
|
||||
pass
|
@ -1,20 +0,0 @@
|
||||
from .base import BaseAPIView
|
||||
|
||||
|
||||
class ModuleAPIEndpoint(BaseAPIView):
|
||||
"""
|
||||
This viewset automatically provides `list`, `create`, `retrieve`,
|
||||
`update` and `destroy` actions related to module.
|
||||
|
||||
"""
|
||||
|
||||
pass
|
||||
|
||||
|
||||
class ModuleIssueAPIEndpoint(BaseAPIView):
|
||||
"""
|
||||
This viewset automatically provides `list`, `create`, `retrieve`,
|
||||
`update` and `destroy` actions related to module issues.
|
||||
|
||||
"""
|
||||
pass
|
@ -1,5 +0,0 @@
|
||||
from .base import BaseAPIView
|
||||
|
||||
|
||||
class ProjectAPIEndpoint(BaseAPIView):
|
||||
pass
|
@ -42,7 +42,7 @@ INSTALLED_APPS = [
|
||||
"plane.web",
|
||||
"plane.middleware",
|
||||
"plane.license",
|
||||
"plane.proxy",
|
||||
"plane.api",
|
||||
# Third-party things
|
||||
"rest_framework",
|
||||
"rest_framework.authtoken",
|
||||
@ -75,7 +75,7 @@ REST_FRAMEWORK = {
|
||||
"DEFAULT_PERMISSION_CLASSES": ("rest_framework.permissions.IsAuthenticated",),
|
||||
"DEFAULT_RENDERER_CLASSES": ("rest_framework.renderers.JSONRenderer",),
|
||||
"DEFAULT_FILTER_BACKENDS": ("django_filters.rest_framework.DjangoFilterBackend",),
|
||||
"DEFAULT_THROTTLE_CLASSES": ("plane.proxy.rate_limit.ApiKeyRateThrottle",),
|
||||
"DEFAULT_THROTTLE_CLASSES": ("plane.api.rate_limit.ApiKeyRateThrottle",),
|
||||
"DEFAULT_THROTTLE_RATES": {
|
||||
"api_key": "60/minute",
|
||||
},
|
||||
@ -321,4 +321,4 @@ ANALYTICS_SECRET_KEY = os.environ.get("ANALYTICS_SECRET_KEY", False)
|
||||
ANALYTICS_BASE_API = os.environ.get("ANALYTICS_BASE_API", False)
|
||||
|
||||
# Use Minio settings
|
||||
USE_MINIO = int(os.environ.get("USE_MINIO", 0)) == 1
|
||||
USE_MINIO = int(os.environ.get("USE_MINIO", 0)) == 1
|
@ -13,7 +13,7 @@ urlpatterns = [
|
||||
path("api/", include("plane.app.urls")),
|
||||
path("api/public/", include("plane.space.urls")),
|
||||
path("api/licenses/", include("plane.license.urls")),
|
||||
path("api/v1/", include("plane.proxy.urls")),
|
||||
path("api/v1/", include("plane.api.urls")),
|
||||
path("", include("plane.web.urls")),
|
||||
]
|
||||
|
||||
|
@ -28,15 +28,15 @@ class Cursor:
|
||||
|
||||
@classmethod
|
||||
def from_string(cls, value):
|
||||
bits = value.split(":")
|
||||
if len(bits) != 3:
|
||||
raise ValueError
|
||||
try:
|
||||
bits = value.split(":")
|
||||
if len(bits) != 3:
|
||||
raise ValueError("Cursor must be in the format 'value:offset:is_prev'")
|
||||
|
||||
value = float(bits[0]) if "." in bits[0] else int(bits[0])
|
||||
bits = value, int(bits[1]), int(bits[2])
|
||||
except (TypeError, ValueError):
|
||||
raise ValueError
|
||||
return cls(*bits)
|
||||
return cls(value, int(bits[1]), bool(int(bits[2])))
|
||||
except (TypeError, ValueError) as e:
|
||||
raise ValueError(f"Invalid cursor format: {e}")
|
||||
|
||||
|
||||
class CursorResult(Sequence):
|
||||
@ -125,7 +125,8 @@ class OffsetPaginator:
|
||||
if self.on_results:
|
||||
results = self.on_results(results)
|
||||
|
||||
max_hits = math.ceil(queryset.count() / limit)
|
||||
count = queryset.count()
|
||||
max_hits = math.ceil(count / limit)
|
||||
|
||||
return CursorResult(
|
||||
results=results,
|
||||
|
Loading…
Reference in New Issue
Block a user