Merge branch 'develop' into fix/table-colors-row-col-add

This commit is contained in:
Palanikannan1437 2024-01-18 16:34:44 +05:30
commit 7c86fbc554
677 changed files with 29529 additions and 13339 deletions

View File

@ -1,14 +1,12 @@
# Database Settings # Database Settings
PGUSER="plane" POSTGRES_USER="plane"
PGPASSWORD="plane" POSTGRES_PASSWORD="plane"
PGHOST="plane-db" POSTGRES_DB="plane"
PGDATABASE="plane" PGDATA="/var/lib/postgresql/data"
DATABASE_URL=postgresql://${PGUSER}:${PGPASSWORD}@${PGHOST}/${PGDATABASE}
# Redis Settings # Redis Settings
REDIS_HOST="plane-redis" REDIS_HOST="plane-redis"
REDIS_PORT="6379" REDIS_PORT="6379"
REDIS_URL="redis://${REDIS_HOST}:6379/"
# AWS Settings # AWS Settings
AWS_REGION="" AWS_REGION=""

View File

@ -1,7 +1,8 @@
name: Bug report name: Bug report
description: Create a bug report to help us improve Plane description: Create a bug report to help us improve Plane
title: "[bug]: " title: "[bug]: "
labels: [bug, need testing] labels: [🐛bug]
assignees: [srinivaspendem, pushya-plane]
body: body:
- type: markdown - type: markdown
attributes: attributes:

View File

@ -1,7 +1,8 @@
name: Feature request name: Feature request
description: Suggest a feature to improve Plane description: Suggest a feature to improve Plane
title: "[feature]: " title: "[feature]: "
labels: [feature] labels: [✨feature]
assignees: [srinivaspendem, pushya-plane]
body: body:
- type: markdown - type: markdown
attributes: attributes:

View File

@ -25,7 +25,7 @@ jobs:
- name: Get changed files - name: Get changed files
id: changed-files id: changed-files
uses: tj-actions/changed-files@v38 uses: tj-actions/changed-files@v41
with: with:
files_yaml: | files_yaml: |
apiserver: apiserver:

View File

@ -2,10 +2,10 @@ name: "CodeQL"
on: on:
push: push:
branches: [ 'develop', 'hot-fix', 'stage-release' ] branches: [ 'develop', 'preview', 'master' ]
pull_request: pull_request:
# The branches below must be a subset of the branches above # The branches below must be a subset of the branches above
branches: [ 'develop' ] branches: [ 'develop', 'preview', 'master' ]
schedule: schedule:
- cron: '53 19 * * 5' - cron: '53 19 * * 5'

View File

@ -3,14 +3,14 @@ name: Create Sync Action
on: on:
pull_request: pull_request:
branches: branches:
- develop # Change this to preview - preview
types: types:
- closed - closed
env: env:
SOURCE_BRANCH_NAME: ${{github.event.pull_request.base.ref}} SOURCE_BRANCH_NAME: ${{github.event.pull_request.base.ref}}
jobs: jobs:
create_pr: sync_changes:
# Only run the job when a PR is merged # Only run the job when a PR is merged
if: github.event.pull_request.merged == true if: github.event.pull_request.merged == true
runs-on: ubuntu-latest runs-on: ubuntu-latest
@ -33,23 +33,14 @@ jobs:
sudo apt update sudo apt update
sudo apt install gh -y sudo apt install gh -y
- name: Create Pull Request - name: Push Changes to Target Repo
env: env:
GH_TOKEN: ${{ secrets.ACCESS_TOKEN }} GH_TOKEN: ${{ secrets.ACCESS_TOKEN }}
run: | run: |
TARGET_REPO="${{ secrets.SYNC_TARGET_REPO_NAME }}" TARGET_REPO="${{ secrets.SYNC_TARGET_REPO_NAME }}"
TARGET_BRANCH="${{ secrets.SYNC_TARGET_BRANCH_NAME }}" TARGET_BRANCH="${{ secrets.SYNC_TARGET_BRANCH_NAME }}"
TARGET_BASE_BRANCH="${{ secrets.SYNC_TARGET_BASE_BRANCH_NAME }}"
SOURCE_BRANCH="${{ env.SOURCE_BRANCH_NAME }}" SOURCE_BRANCH="${{ env.SOURCE_BRANCH_NAME }}"
git checkout $SOURCE_BRANCH git checkout $SOURCE_BRANCH
git remote add target-origin "https://$GH_TOKEN@github.com/$TARGET_REPO.git" git remote add target-origin "https://$GH_TOKEN@github.com/$TARGET_REPO.git"
git push target-origin $SOURCE_BRANCH:$TARGET_BRANCH git push target-origin $SOURCE_BRANCH:$TARGET_BRANCH
PR_TITLE=${{secrets.SYNC_PR_TITLE}}
gh pr create \
--base $TARGET_BASE_BRANCH \
--head $TARGET_BRANCH \
--title "$PR_TITLE" \
--repo $TARGET_REPO

View File

@ -63,7 +63,7 @@ Thats it!
## 🍙 Self Hosting ## 🍙 Self Hosting
For self hosting environment setup, visit the [Self Hosting](https://docs.plane.so/self-hosting/docker-compose) documentation page For self hosting environment setup, visit the [Self Hosting](https://docs.plane.so/docker-compose) documentation page
## 🚀 Features ## 🚀 Features

View File

@ -26,7 +26,9 @@ def update_description():
updated_issues.append(issue) updated_issues.append(issue)
Issue.objects.bulk_update( Issue.objects.bulk_update(
updated_issues, ["description_html", "description_stripped"], batch_size=100 updated_issues,
["description_html", "description_stripped"],
batch_size=100,
) )
print("Success") print("Success")
except Exception as e: except Exception as e:
@ -40,7 +42,9 @@ def update_comments():
updated_issue_comments = [] updated_issue_comments = []
for issue_comment in issue_comments: for issue_comment in issue_comments:
issue_comment.comment_html = f"<p>{issue_comment.comment_stripped}</p>" issue_comment.comment_html = (
f"<p>{issue_comment.comment_stripped}</p>"
)
updated_issue_comments.append(issue_comment) updated_issue_comments.append(issue_comment)
IssueComment.objects.bulk_update( IssueComment.objects.bulk_update(
@ -99,7 +103,9 @@ def updated_issue_sort_order():
issue.sort_order = issue.sequence_id * random.randint(100, 500) issue.sort_order = issue.sequence_id * random.randint(100, 500)
updated_issues.append(issue) updated_issues.append(issue)
Issue.objects.bulk_update(updated_issues, ["sort_order"], batch_size=100) Issue.objects.bulk_update(
updated_issues, ["sort_order"], batch_size=100
)
print("Success") print("Success")
except Exception as e: except Exception as e:
print(e) print(e)
@ -137,7 +143,9 @@ def update_project_cover_images():
project.cover_image = project_cover_images[random.randint(0, 19)] project.cover_image = project_cover_images[random.randint(0, 19)]
updated_projects.append(project) updated_projects.append(project)
Project.objects.bulk_update(updated_projects, ["cover_image"], batch_size=100) Project.objects.bulk_update(
updated_projects, ["cover_image"], batch_size=100
)
print("Success") print("Success")
except Exception as e: except Exception as e:
print(e) print(e)
@ -186,7 +194,9 @@ def update_label_color():
def create_slack_integration(): def create_slack_integration():
try: try:
_ = Integration.objects.create(provider="slack", network=2, title="Slack") _ = Integration.objects.create(
provider="slack", network=2, title="Slack"
)
print("Success") print("Success")
except Exception as e: except Exception as e:
print(e) print(e)
@ -212,12 +222,16 @@ def update_integration_verified():
def update_start_date(): def update_start_date():
try: try:
issues = Issue.objects.filter(state__group__in=["started", "completed"]) issues = Issue.objects.filter(
state__group__in=["started", "completed"]
)
updated_issues = [] updated_issues = []
for issue in issues: for issue in issues:
issue.start_date = issue.created_at.date() issue.start_date = issue.created_at.date()
updated_issues.append(issue) updated_issues.append(issue)
Issue.objects.bulk_update(updated_issues, ["start_date"], batch_size=500) Issue.objects.bulk_update(
updated_issues, ["start_date"], batch_size=500
)
print("Success") print("Success")
except Exception as e: except Exception as e:
print(e) print(e)

View File

@ -2,10 +2,10 @@
import os import os
import sys import sys
if __name__ == '__main__': if __name__ == "__main__":
os.environ.setdefault( os.environ.setdefault(
'DJANGO_SETTINGS_MODULE', "DJANGO_SETTINGS_MODULE", "plane.settings.production"
'plane.settings.production') )
try: try:
from django.core.management import execute_from_command_line from django.core.management import execute_from_command_line
except ImportError as exc: except ImportError as exc:

View File

@ -1,3 +1,3 @@
from .celery import app as celery_app from .celery import app as celery_app
__all__ = ('celery_app',) __all__ = ("celery_app",)

View File

@ -2,4 +2,4 @@ from django.apps import AppConfig
class AnalyticsConfig(AppConfig): class AnalyticsConfig(AppConfig):
name = 'plane.analytics' name = "plane.analytics"

View File

@ -25,7 +25,10 @@ class APIKeyAuthentication(authentication.BaseAuthentication):
def validate_api_token(self, token): def validate_api_token(self, token):
try: try:
api_token = APIToken.objects.get( api_token = APIToken.objects.get(
Q(Q(expired_at__gt=timezone.now()) | Q(expired_at__isnull=True)), Q(
Q(expired_at__gt=timezone.now())
| Q(expired_at__isnull=True)
),
token=token, token=token,
is_active=True, is_active=True,
) )

View File

@ -1,17 +1,18 @@
from rest_framework.throttling import SimpleRateThrottle from rest_framework.throttling import SimpleRateThrottle
class ApiKeyRateThrottle(SimpleRateThrottle): class ApiKeyRateThrottle(SimpleRateThrottle):
scope = 'api_key' scope = "api_key"
rate = '60/minute' rate = "60/minute"
def get_cache_key(self, request, view): def get_cache_key(self, request, view):
# Retrieve the API key from the request header # Retrieve the API key from the request header
api_key = request.headers.get('X-Api-Key') api_key = request.headers.get("X-Api-Key")
if not api_key: if not api_key:
return None # Allow the request if there's no API key return None # Allow the request if there's no API key
# Use the API key as part of the cache key # Use the API key as part of the cache key
return f'{self.scope}:{api_key}' return f"{self.scope}:{api_key}"
def allow_request(self, request, view): def allow_request(self, request, view):
allowed = super().allow_request(request, view) allowed = super().allow_request(request, view)
@ -35,7 +36,7 @@ class ApiKeyRateThrottle(SimpleRateThrottle):
reset_time = int(now + self.duration) reset_time = int(now + self.duration)
# Add headers # Add headers
request.META['X-RateLimit-Remaining'] = max(0, available) request.META["X-RateLimit-Remaining"] = max(0, available)
request.META['X-RateLimit-Reset'] = reset_time request.META["X-RateLimit-Reset"] = reset_time
return allowed return allowed

View File

@ -13,5 +13,9 @@ from .issue import (
) )
from .state import StateLiteSerializer, StateSerializer from .state import StateLiteSerializer, StateSerializer
from .cycle import CycleSerializer, CycleIssueSerializer, CycleLiteSerializer from .cycle import CycleSerializer, CycleIssueSerializer, CycleLiteSerializer
from .module import ModuleSerializer, ModuleIssueSerializer, ModuleLiteSerializer from .module import (
ModuleSerializer,
ModuleIssueSerializer,
ModuleLiteSerializer,
)
from .inbox import InboxIssueSerializer from .inbox import InboxIssueSerializer

View File

@ -100,6 +100,8 @@ class BaseSerializer(serializers.ModelSerializer):
response[expand] = exp_serializer.data response[expand] = exp_serializer.data
else: else:
# You might need to handle this case differently # You might need to handle this case differently
response[expand] = getattr(instance, f"{expand}_id", None) response[expand] = getattr(
instance, f"{expand}_id", None
)
return response return response

View File

@ -23,7 +23,9 @@ class CycleSerializer(BaseSerializer):
and data.get("end_date", None) is not None and data.get("end_date", None) is not None
and data.get("start_date", None) > data.get("end_date", None) and data.get("start_date", None) > data.get("end_date", None)
): ):
raise serializers.ValidationError("Start date cannot exceed end date") raise serializers.ValidationError(
"Start date cannot exceed end date"
)
return data return data
class Meta: class Meta:
@ -55,7 +57,6 @@ class CycleIssueSerializer(BaseSerializer):
class CycleLiteSerializer(BaseSerializer): class CycleLiteSerializer(BaseSerializer):
class Meta: class Meta:
model = Cycle model = Cycle
fields = "__all__" fields = "__all__"

View File

@ -2,8 +2,8 @@
from .base import BaseSerializer from .base import BaseSerializer
from plane.db.models import InboxIssue from plane.db.models import InboxIssue
class InboxIssueSerializer(BaseSerializer):
class InboxIssueSerializer(BaseSerializer):
class Meta: class Meta:
model = InboxIssue model = InboxIssue
fields = "__all__" fields = "__all__"

View File

@ -27,6 +27,7 @@ from .module import ModuleSerializer, ModuleLiteSerializer
from .user import UserLiteSerializer from .user import UserLiteSerializer
from .state import StateLiteSerializer from .state import StateLiteSerializer
class IssueSerializer(BaseSerializer): class IssueSerializer(BaseSerializer):
assignees = serializers.ListField( assignees = serializers.ListField(
child=serializers.PrimaryKeyRelatedField( child=serializers.PrimaryKeyRelatedField(
@ -66,12 +67,14 @@ class IssueSerializer(BaseSerializer):
and data.get("target_date", None) is not None and data.get("target_date", None) is not None
and data.get("start_date", None) > data.get("target_date", None) and data.get("start_date", None) > data.get("target_date", None)
): ):
raise serializers.ValidationError("Start date cannot exceed target date") raise serializers.ValidationError(
"Start date cannot exceed target date"
)
try: try:
if(data.get("description_html", None) is not None): if data.get("description_html", None) is not None:
parsed = html.fromstring(data["description_html"]) parsed = html.fromstring(data["description_html"])
parsed_str = html.tostring(parsed, encoding='unicode') parsed_str = html.tostring(parsed, encoding="unicode")
data["description_html"] = parsed_str data["description_html"] = parsed_str
except Exception as e: except Exception as e:
@ -96,7 +99,8 @@ class IssueSerializer(BaseSerializer):
if ( if (
data.get("state") data.get("state")
and not State.objects.filter( and not State.objects.filter(
project_id=self.context.get("project_id"), pk=data.get("state").id project_id=self.context.get("project_id"),
pk=data.get("state").id,
).exists() ).exists()
): ):
raise serializers.ValidationError( raise serializers.ValidationError(
@ -107,7 +111,8 @@ class IssueSerializer(BaseSerializer):
if ( if (
data.get("parent") data.get("parent")
and not Issue.objects.filter( and not Issue.objects.filter(
workspace_id=self.context.get("workspace_id"), pk=data.get("parent").id workspace_id=self.context.get("workspace_id"),
pk=data.get("parent").id,
).exists() ).exists()
): ):
raise serializers.ValidationError( raise serializers.ValidationError(
@ -238,9 +243,13 @@ class IssueSerializer(BaseSerializer):
] ]
if "labels" in self.fields: if "labels" in self.fields:
if "labels" in self.expand: if "labels" in self.expand:
data["labels"] = LabelSerializer(instance.labels.all(), many=True).data data["labels"] = LabelSerializer(
instance.labels.all(), many=True
).data
else: else:
data["labels"] = [str(label.id) for label in instance.labels.all()] data["labels"] = [
str(label.id) for label in instance.labels.all()
]
return data return data
@ -278,7 +287,8 @@ class IssueLinkSerializer(BaseSerializer):
# Validation if url already exists # Validation if url already exists
def create(self, validated_data): def create(self, validated_data):
if IssueLink.objects.filter( if IssueLink.objects.filter(
url=validated_data.get("url"), issue_id=validated_data.get("issue_id") url=validated_data.get("url"),
issue_id=validated_data.get("issue_id"),
).exists(): ).exists():
raise serializers.ValidationError( raise serializers.ValidationError(
{"error": "URL already exists for this Issue"} {"error": "URL already exists for this Issue"}
@ -324,9 +334,9 @@ class IssueCommentSerializer(BaseSerializer):
def validate(self, data): def validate(self, data):
try: try:
if(data.get("comment_html", None) is not None): if data.get("comment_html", None) is not None:
parsed = html.fromstring(data["comment_html"]) parsed = html.fromstring(data["comment_html"])
parsed_str = html.tostring(parsed, encoding='unicode') parsed_str = html.tostring(parsed, encoding="unicode")
data["comment_html"] = parsed_str data["comment_html"] = parsed_str
except Exception as e: except Exception as e:
@ -362,7 +372,6 @@ class ModuleIssueSerializer(BaseSerializer):
class LabelLiteSerializer(BaseSerializer): class LabelLiteSerializer(BaseSerializer):
class Meta: class Meta:
model = Label model = Label
fields = [ fields = [

View File

@ -52,7 +52,9 @@ class ModuleSerializer(BaseSerializer):
and data.get("target_date", None) is not None and data.get("target_date", None) is not None
and data.get("start_date", None) > data.get("target_date", None) and data.get("start_date", None) > data.get("target_date", None)
): ):
raise serializers.ValidationError("Start date cannot exceed target date") raise serializers.ValidationError(
"Start date cannot exceed target date"
)
if data.get("members", []): if data.get("members", []):
data["members"] = ProjectMember.objects.filter( data["members"] = ProjectMember.objects.filter(
@ -146,7 +148,8 @@ class ModuleLinkSerializer(BaseSerializer):
# Validation if url already exists # Validation if url already exists
def create(self, validated_data): def create(self, validated_data):
if ModuleLink.objects.filter( if ModuleLink.objects.filter(
url=validated_data.get("url"), module_id=validated_data.get("module_id") url=validated_data.get("url"),
module_id=validated_data.get("module_id"),
).exists(): ).exists():
raise serializers.ValidationError( raise serializers.ValidationError(
{"error": "URL already exists for this Issue"} {"error": "URL already exists for this Issue"}
@ -155,7 +158,6 @@ class ModuleLinkSerializer(BaseSerializer):
class ModuleLiteSerializer(BaseSerializer): class ModuleLiteSerializer(BaseSerializer):
class Meta: class Meta:
model = Module model = Module
fields = "__all__" fields = "__all__"

View File

@ -2,12 +2,17 @@
from rest_framework import serializers from rest_framework import serializers
# Module imports # Module imports
from plane.db.models import Project, ProjectIdentifier, WorkspaceMember, State, Estimate from plane.db.models import (
Project,
ProjectIdentifier,
WorkspaceMember,
State,
Estimate,
)
from .base import BaseSerializer from .base import BaseSerializer
class ProjectSerializer(BaseSerializer): class ProjectSerializer(BaseSerializer):
total_members = serializers.IntegerField(read_only=True) total_members = serializers.IntegerField(read_only=True)
total_cycles = serializers.IntegerField(read_only=True) total_cycles = serializers.IntegerField(read_only=True)
total_modules = serializers.IntegerField(read_only=True) total_modules = serializers.IntegerField(read_only=True)
@ -21,7 +26,7 @@ class ProjectSerializer(BaseSerializer):
fields = "__all__" fields = "__all__"
read_only_fields = [ read_only_fields = [
"id", "id",
'emoji', "emoji",
"workspace", "workspace",
"created_at", "created_at",
"updated_at", "updated_at",
@ -59,12 +64,16 @@ class ProjectSerializer(BaseSerializer):
def create(self, validated_data): def create(self, validated_data):
identifier = validated_data.get("identifier", "").strip().upper() identifier = validated_data.get("identifier", "").strip().upper()
if identifier == "": if identifier == "":
raise serializers.ValidationError(detail="Project Identifier is required") raise serializers.ValidationError(
detail="Project Identifier is required"
)
if ProjectIdentifier.objects.filter( if ProjectIdentifier.objects.filter(
name=identifier, workspace_id=self.context["workspace_id"] name=identifier, workspace_id=self.context["workspace_id"]
).exists(): ).exists():
raise serializers.ValidationError(detail="Project Identifier is taken") raise serializers.ValidationError(
detail="Project Identifier is taken"
)
project = Project.objects.create( project = Project.objects.create(
**validated_data, workspace_id=self.context["workspace_id"] **validated_data, workspace_id=self.context["workspace_id"]

View File

@ -7,9 +7,9 @@ class StateSerializer(BaseSerializer):
def validate(self, data): def validate(self, data):
# If the default is being provided then make all other states default False # If the default is being provided then make all other states default False
if data.get("default", False): if data.get("default", False):
State.objects.filter(project_id=self.context.get("project_id")).update( State.objects.filter(
default=False project_id=self.context.get("project_id")
) ).update(default=False)
return data return data
class Meta: class Meta:

View File

@ -5,6 +5,7 @@ from .base import BaseSerializer
class WorkspaceLiteSerializer(BaseSerializer): class WorkspaceLiteSerializer(BaseSerializer):
"""Lite serializer with only required fields""" """Lite serializer with only required fields"""
class Meta: class Meta:
model = Workspace model = Workspace
fields = [ fields = [

View File

@ -41,7 +41,9 @@ class WebhookMixin:
bulk = False bulk = False
def finalize_response(self, request, response, *args, **kwargs): def finalize_response(self, request, response, *args, **kwargs):
response = super().finalize_response(request, response, *args, **kwargs) response = super().finalize_response(
request, response, *args, **kwargs
)
# Check for the case should webhook be sent # Check for the case should webhook be sent
if ( if (
@ -104,15 +106,14 @@ class BaseAPIView(TimezoneMixin, APIView, BasePaginator):
) )
if isinstance(e, ObjectDoesNotExist): if isinstance(e, ObjectDoesNotExist):
model_name = str(exc).split(" matching query does not exist.")[0]
return Response( return Response(
{"error": f"{model_name} does not exist."}, {"error": f"The required object does not exist."},
status=status.HTTP_404_NOT_FOUND, status=status.HTTP_404_NOT_FOUND,
) )
if isinstance(e, KeyError): if isinstance(e, KeyError):
return Response( return Response(
{"error": f"key {e} does not exist"}, {"error": f" The required key does not exist."},
status=status.HTTP_400_BAD_REQUEST, status=status.HTTP_400_BAD_REQUEST,
) )
@ -140,7 +141,9 @@ class BaseAPIView(TimezoneMixin, APIView, BasePaginator):
def finalize_response(self, request, response, *args, **kwargs): def finalize_response(self, request, response, *args, **kwargs):
# Call super to get the default response # Call super to get the default response
response = super().finalize_response(request, response, *args, **kwargs) response = super().finalize_response(
request, response, *args, **kwargs
)
# Add custom headers if they exist in the request META # Add custom headers if they exist in the request META
ratelimit_remaining = request.META.get("X-RateLimit-Remaining") ratelimit_remaining = request.META.get("X-RateLimit-Remaining")
@ -164,13 +167,17 @@ class BaseAPIView(TimezoneMixin, APIView, BasePaginator):
@property @property
def fields(self): def fields(self):
fields = [ fields = [
field for field in self.request.GET.get("fields", "").split(",") if field field
for field in self.request.GET.get("fields", "").split(",")
if field
] ]
return fields if fields else None return fields if fields else None
@property @property
def expand(self): def expand(self):
expand = [ expand = [
expand for expand in self.request.GET.get("expand", "").split(",") if expand expand
for expand in self.request.GET.get("expand", "").split(",")
if expand
] ]
return expand if expand else None return expand if expand else None

View File

@ -12,7 +12,13 @@ from rest_framework import status
# Module imports # Module imports
from .base import BaseAPIView, WebhookMixin from .base import BaseAPIView, WebhookMixin
from plane.db.models import Cycle, Issue, CycleIssue, IssueLink, IssueAttachment from plane.db.models import (
Cycle,
Issue,
CycleIssue,
IssueLink,
IssueAttachment,
)
from plane.app.permissions import ProjectEntityPermission from plane.app.permissions import ProjectEntityPermission
from plane.api.serializers import ( from plane.api.serializers import (
CycleSerializer, CycleSerializer,
@ -102,7 +108,9 @@ class CycleAPIEndpoint(WebhookMixin, BaseAPIView):
), ),
) )
) )
.annotate(total_estimates=Sum("issue_cycle__issue__estimate_point")) .annotate(
total_estimates=Sum("issue_cycle__issue__estimate_point")
)
.annotate( .annotate(
completed_estimates=Sum( completed_estimates=Sum(
"issue_cycle__issue__estimate_point", "issue_cycle__issue__estimate_point",
@ -201,7 +209,8 @@ class CycleAPIEndpoint(WebhookMixin, BaseAPIView):
# Incomplete Cycles # Incomplete Cycles
if cycle_view == "incomplete": if cycle_view == "incomplete":
queryset = queryset.filter( queryset = queryset.filter(
Q(end_date__gte=timezone.now().date()) | Q(end_date__isnull=True), Q(end_date__gte=timezone.now().date())
| Q(end_date__isnull=True),
) )
return self.paginate( return self.paginate(
request=request, request=request,
@ -238,8 +247,12 @@ class CycleAPIEndpoint(WebhookMixin, BaseAPIView):
project_id=project_id, project_id=project_id,
owned_by=request.user, owned_by=request.user,
) )
return Response(serializer.data, status=status.HTTP_201_CREATED) return Response(
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) serializer.data, status=status.HTTP_201_CREATED
)
return Response(
serializer.errors, status=status.HTTP_400_BAD_REQUEST
)
else: else:
return Response( return Response(
{ {
@ -249,15 +262,22 @@ class CycleAPIEndpoint(WebhookMixin, BaseAPIView):
) )
def patch(self, request, slug, project_id, pk): def patch(self, request, slug, project_id, pk):
cycle = Cycle.objects.get(workspace__slug=slug, project_id=project_id, pk=pk) cycle = Cycle.objects.get(
workspace__slug=slug, project_id=project_id, pk=pk
)
request_data = request.data request_data = request.data
if cycle.end_date is not None and cycle.end_date < timezone.now().date(): if (
cycle.end_date is not None
and cycle.end_date < timezone.now().date()
):
if "sort_order" in request_data: if "sort_order" in request_data:
# Can only change sort order # Can only change sort order
request_data = { request_data = {
"sort_order": request_data.get("sort_order", cycle.sort_order) "sort_order": request_data.get(
"sort_order", cycle.sort_order
)
} }
else: else:
return Response( return Response(
@ -275,11 +295,13 @@ class CycleAPIEndpoint(WebhookMixin, BaseAPIView):
def delete(self, request, slug, project_id, pk): def delete(self, request, slug, project_id, pk):
cycle_issues = list( cycle_issues = list(
CycleIssue.objects.filter(cycle_id=self.kwargs.get("pk")).values_list( CycleIssue.objects.filter(
"issue", flat=True cycle_id=self.kwargs.get("pk")
).values_list("issue", flat=True)
) )
cycle = Cycle.objects.get(
workspace__slug=slug, project_id=project_id, pk=pk
) )
cycle = Cycle.objects.get(workspace__slug=slug, project_id=project_id, pk=pk)
issue_activity.delay( issue_activity.delay(
type="cycle.activity.deleted", type="cycle.activity.deleted",
@ -319,7 +341,9 @@ class CycleIssueAPIEndpoint(WebhookMixin, BaseAPIView):
def get_queryset(self): def get_queryset(self):
return ( return (
CycleIssue.objects.annotate( CycleIssue.objects.annotate(
sub_issues_count=Issue.issue_objects.filter(parent=OuterRef("issue_id")) sub_issues_count=Issue.issue_objects.filter(
parent=OuterRef("issue_id")
)
.order_by() .order_by()
.annotate(count=Func(F("id"), function="Count")) .annotate(count=Func(F("id"), function="Count"))
.values("count") .values("count")
@ -342,7 +366,9 @@ class CycleIssueAPIEndpoint(WebhookMixin, BaseAPIView):
issues = ( issues = (
Issue.issue_objects.filter(issue_cycle__cycle_id=cycle_id) Issue.issue_objects.filter(issue_cycle__cycle_id=cycle_id)
.annotate( .annotate(
sub_issues_count=Issue.issue_objects.filter(parent=OuterRef("id")) sub_issues_count=Issue.issue_objects.filter(
parent=OuterRef("id")
)
.order_by() .order_by()
.annotate(count=Func(F("id"), function="Count")) .annotate(count=Func(F("id"), function="Count"))
.values("count") .values("count")
@ -364,7 +390,9 @@ class CycleIssueAPIEndpoint(WebhookMixin, BaseAPIView):
.values("count") .values("count")
) )
.annotate( .annotate(
attachment_count=IssueAttachment.objects.filter(issue=OuterRef("id")) attachment_count=IssueAttachment.objects.filter(
issue=OuterRef("id")
)
.order_by() .order_by()
.annotate(count=Func(F("id"), function="Count")) .annotate(count=Func(F("id"), function="Count"))
.values("count") .values("count")
@ -387,14 +415,18 @@ class CycleIssueAPIEndpoint(WebhookMixin, BaseAPIView):
if not issues: if not issues:
return Response( return Response(
{"error": "Issues are required"}, status=status.HTTP_400_BAD_REQUEST {"error": "Issues are required"},
status=status.HTTP_400_BAD_REQUEST,
) )
cycle = Cycle.objects.get( cycle = Cycle.objects.get(
workspace__slug=slug, project_id=project_id, pk=cycle_id workspace__slug=slug, project_id=project_id, pk=cycle_id
) )
if cycle.end_date is not None and cycle.end_date < timezone.now().date(): if (
cycle.end_date is not None
and cycle.end_date < timezone.now().date()
):
return Response( return Response(
{ {
"error": "The Cycle has already been completed so no new issues can be added" "error": "The Cycle has already been completed so no new issues can be added"
@ -479,7 +511,10 @@ class CycleIssueAPIEndpoint(WebhookMixin, BaseAPIView):
def delete(self, request, slug, project_id, cycle_id, issue_id): def delete(self, request, slug, project_id, cycle_id, issue_id):
cycle_issue = CycleIssue.objects.get( cycle_issue = CycleIssue.objects.get(
issue_id=issue_id, workspace__slug=slug, project_id=project_id, cycle_id=cycle_id issue_id=issue_id,
workspace__slug=slug,
project_id=project_id,
cycle_id=cycle_id,
) )
issue_id = cycle_issue.issue_id issue_id = cycle_issue.issue_id
cycle_issue.delete() cycle_issue.delete()

View File

@ -14,7 +14,14 @@ from rest_framework.response import Response
from .base import BaseAPIView from .base import BaseAPIView
from plane.app.permissions import ProjectLitePermission from plane.app.permissions import ProjectLitePermission
from plane.api.serializers import InboxIssueSerializer, IssueSerializer from plane.api.serializers import InboxIssueSerializer, IssueSerializer
from plane.db.models import InboxIssue, Issue, State, ProjectMember, Project, Inbox from plane.db.models import (
InboxIssue,
Issue,
State,
ProjectMember,
Project,
Inbox,
)
from plane.bgtasks.issue_activites_task import issue_activity from plane.bgtasks.issue_activites_task import issue_activity
@ -43,7 +50,8 @@ class InboxIssueAPIEndpoint(BaseAPIView):
).first() ).first()
project = Project.objects.get( project = Project.objects.get(
workspace__slug=self.kwargs.get("slug"), pk=self.kwargs.get("project_id") workspace__slug=self.kwargs.get("slug"),
pk=self.kwargs.get("project_id"),
) )
if inbox is None and not project.inbox_view: if inbox is None and not project.inbox_view:
@ -51,7 +59,8 @@ class InboxIssueAPIEndpoint(BaseAPIView):
return ( return (
InboxIssue.objects.filter( InboxIssue.objects.filter(
Q(snoozed_till__gte=timezone.now()) | Q(snoozed_till__isnull=True), Q(snoozed_till__gte=timezone.now())
| Q(snoozed_till__isnull=True),
workspace__slug=self.kwargs.get("slug"), workspace__slug=self.kwargs.get("slug"),
project_id=self.kwargs.get("project_id"), project_id=self.kwargs.get("project_id"),
inbox_id=inbox.id, inbox_id=inbox.id,
@ -87,7 +96,8 @@ class InboxIssueAPIEndpoint(BaseAPIView):
def post(self, request, slug, project_id): def post(self, request, slug, project_id):
if not request.data.get("issue", {}).get("name", False): if not request.data.get("issue", {}).get("name", False):
return Response( return Response(
{"error": "Name is required"}, status=status.HTTP_400_BAD_REQUEST {"error": "Name is required"},
status=status.HTTP_400_BAD_REQUEST,
) )
inbox = Inbox.objects.filter( inbox = Inbox.objects.filter(
@ -117,7 +127,8 @@ class InboxIssueAPIEndpoint(BaseAPIView):
"none", "none",
]: ]:
return Response( return Response(
{"error": "Invalid priority"}, status=status.HTTP_400_BAD_REQUEST {"error": "Invalid priority"},
status=status.HTTP_400_BAD_REQUEST,
) )
# Create or get state # Create or get state
@ -222,10 +233,14 @@ class InboxIssueAPIEndpoint(BaseAPIView):
"description_html": issue_data.get( "description_html": issue_data.get(
"description_html", issue.description_html "description_html", issue.description_html
), ),
"description": issue_data.get("description", issue.description), "description": issue_data.get(
"description", issue.description
),
} }
issue_serializer = IssueSerializer(issue, data=issue_data, partial=True) issue_serializer = IssueSerializer(
issue, data=issue_data, partial=True
)
if issue_serializer.is_valid(): if issue_serializer.is_valid():
current_instance = issue current_instance = issue
@ -266,7 +281,9 @@ class InboxIssueAPIEndpoint(BaseAPIView):
project_id=project_id, project_id=project_id,
) )
state = State.objects.filter( state = State.objects.filter(
group="cancelled", workspace__slug=slug, project_id=project_id group="cancelled",
workspace__slug=slug,
project_id=project_id,
).first() ).first()
if state is not None: if state is not None:
issue.state = state issue.state = state
@ -284,17 +301,22 @@ class InboxIssueAPIEndpoint(BaseAPIView):
if issue.state.name == "Triage": if issue.state.name == "Triage":
# Move to default state # Move to default state
state = State.objects.filter( state = State.objects.filter(
workspace__slug=slug, project_id=project_id, default=True workspace__slug=slug,
project_id=project_id,
default=True,
).first() ).first()
if state is not None: if state is not None:
issue.state = state issue.state = state
issue.save() issue.save()
return Response(serializer.data, status=status.HTTP_200_OK) return Response(serializer.data, status=status.HTTP_200_OK)
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) return Response(
serializer.errors, status=status.HTTP_400_BAD_REQUEST
)
else: else:
return Response( return Response(
InboxIssueSerializer(inbox_issue).data, status=status.HTTP_200_OK InboxIssueSerializer(inbox_issue).data,
status=status.HTTP_200_OK,
) )
def delete(self, request, slug, project_id, issue_id): def delete(self, request, slug, project_id, issue_id):

View File

@ -67,7 +67,9 @@ class IssueAPIEndpoint(WebhookMixin, BaseAPIView):
def get_queryset(self): def get_queryset(self):
return ( return (
Issue.issue_objects.annotate( Issue.issue_objects.annotate(
sub_issues_count=Issue.issue_objects.filter(parent=OuterRef("id")) sub_issues_count=Issue.issue_objects.filter(
parent=OuterRef("id")
)
.order_by() .order_by()
.annotate(count=Func(F("id"), function="Count")) .annotate(count=Func(F("id"), function="Count"))
.values("count") .values("count")
@ -86,7 +88,9 @@ class IssueAPIEndpoint(WebhookMixin, BaseAPIView):
def get(self, request, slug, project_id, pk=None): def get(self, request, slug, project_id, pk=None):
if pk: if pk:
issue = Issue.issue_objects.annotate( issue = Issue.issue_objects.annotate(
sub_issues_count=Issue.issue_objects.filter(parent=OuterRef("id")) sub_issues_count=Issue.issue_objects.filter(
parent=OuterRef("id")
)
.order_by() .order_by()
.annotate(count=Func(F("id"), function="Count")) .annotate(count=Func(F("id"), function="Count"))
.values("count") .values("count")
@ -102,7 +106,13 @@ class IssueAPIEndpoint(WebhookMixin, BaseAPIView):
# Custom ordering for priority and state # Custom ordering for priority and state
priority_order = ["urgent", "high", "medium", "low", "none"] priority_order = ["urgent", "high", "medium", "low", "none"]
state_order = ["backlog", "unstarted", "started", "completed", "cancelled"] state_order = [
"backlog",
"unstarted",
"started",
"completed",
"cancelled",
]
order_by_param = request.GET.get("order_by", "-created_at") order_by_param = request.GET.get("order_by", "-created_at")
@ -117,7 +127,9 @@ class IssueAPIEndpoint(WebhookMixin, BaseAPIView):
.values("count") .values("count")
) )
.annotate( .annotate(
attachment_count=IssueAttachment.objects.filter(issue=OuterRef("id")) attachment_count=IssueAttachment.objects.filter(
issue=OuterRef("id")
)
.order_by() .order_by()
.annotate(count=Func(F("id"), function="Count")) .annotate(count=Func(F("id"), function="Count"))
.values("count") .values("count")
@ -127,7 +139,9 @@ class IssueAPIEndpoint(WebhookMixin, BaseAPIView):
# Priority Ordering # Priority Ordering
if order_by_param == "priority" or order_by_param == "-priority": if order_by_param == "priority" or order_by_param == "-priority":
priority_order = ( priority_order = (
priority_order if order_by_param == "priority" else priority_order[::-1] priority_order
if order_by_param == "priority"
else priority_order[::-1]
) )
issue_queryset = issue_queryset.annotate( issue_queryset = issue_queryset.annotate(
priority_order=Case( priority_order=Case(
@ -175,7 +189,9 @@ class IssueAPIEndpoint(WebhookMixin, BaseAPIView):
else order_by_param else order_by_param
) )
).order_by( ).order_by(
"-max_values" if order_by_param.startswith("-") else "max_values" "-max_values"
if order_by_param.startswith("-")
else "max_values"
) )
else: else:
issue_queryset = issue_queryset.order_by(order_by_param) issue_queryset = issue_queryset.order_by(order_by_param)
@ -209,7 +225,9 @@ class IssueAPIEndpoint(WebhookMixin, BaseAPIView):
# Track the issue # Track the issue
issue_activity.delay( issue_activity.delay(
type="issue.activity.created", type="issue.activity.created",
requested_data=json.dumps(self.request.data, cls=DjangoJSONEncoder), requested_data=json.dumps(
self.request.data, cls=DjangoJSONEncoder
),
actor_id=str(request.user.id), actor_id=str(request.user.id),
issue_id=str(serializer.data.get("id", None)), issue_id=str(serializer.data.get("id", None)),
project_id=str(project_id), project_id=str(project_id),
@ -220,7 +238,9 @@ class IssueAPIEndpoint(WebhookMixin, BaseAPIView):
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
def patch(self, request, slug, project_id, pk=None): def patch(self, request, slug, project_id, pk=None):
issue = Issue.objects.get(workspace__slug=slug, project_id=project_id, pk=pk) issue = Issue.objects.get(
workspace__slug=slug, project_id=project_id, pk=pk
)
project = Project.objects.get(pk=project_id) project = Project.objects.get(pk=project_id)
current_instance = json.dumps( current_instance = json.dumps(
IssueSerializer(issue).data, cls=DjangoJSONEncoder IssueSerializer(issue).data, cls=DjangoJSONEncoder
@ -250,7 +270,9 @@ class IssueAPIEndpoint(WebhookMixin, BaseAPIView):
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
def delete(self, request, slug, project_id, pk=None): def delete(self, request, slug, project_id, pk=None):
issue = Issue.objects.get(workspace__slug=slug, project_id=project_id, pk=pk) issue = Issue.objects.get(
workspace__slug=slug, project_id=project_id, pk=pk
)
current_instance = json.dumps( current_instance = json.dumps(
IssueSerializer(issue).data, cls=DjangoJSONEncoder IssueSerializer(issue).data, cls=DjangoJSONEncoder
) )
@ -297,11 +319,17 @@ class LabelAPIEndpoint(BaseAPIView):
serializer = LabelSerializer(data=request.data) serializer = LabelSerializer(data=request.data)
if serializer.is_valid(): if serializer.is_valid():
serializer.save(project_id=project_id) serializer.save(project_id=project_id)
return Response(serializer.data, status=status.HTTP_201_CREATED) return Response(
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) serializer.data, status=status.HTTP_201_CREATED
)
return Response(
serializer.errors, status=status.HTTP_400_BAD_REQUEST
)
except IntegrityError: except IntegrityError:
return Response( return Response(
{"error": "Label with the same name already exists in the project"}, {
"error": "Label with the same name already exists in the project"
},
status=status.HTTP_400_BAD_REQUEST, status=status.HTTP_400_BAD_REQUEST,
) )
@ -318,7 +346,11 @@ class LabelAPIEndpoint(BaseAPIView):
).data, ).data,
) )
label = self.get_queryset().get(pk=pk) label = self.get_queryset().get(pk=pk)
serializer = LabelSerializer(label, fields=self.fields, expand=self.expand,) serializer = LabelSerializer(
label,
fields=self.fields,
expand=self.expand,
)
return Response(serializer.data, status=status.HTTP_200_OK) return Response(serializer.data, status=status.HTTP_200_OK)
def patch(self, request, slug, project_id, pk=None): def patch(self, request, slug, project_id, pk=None):
@ -329,7 +361,6 @@ class LabelAPIEndpoint(BaseAPIView):
return Response(serializer.data, status=status.HTTP_200_OK) return Response(serializer.data, status=status.HTTP_200_OK)
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
def delete(self, request, slug, project_id, pk=None): def delete(self, request, slug, project_id, pk=None):
label = self.get_queryset().get(pk=pk) label = self.get_queryset().get(pk=pk)
label.delete() label.delete()
@ -395,7 +426,9 @@ class IssueLinkAPIEndpoint(BaseAPIView):
) )
issue_activity.delay( issue_activity.delay(
type="link.activity.created", type="link.activity.created",
requested_data=json.dumps(serializer.data, cls=DjangoJSONEncoder), requested_data=json.dumps(
serializer.data, cls=DjangoJSONEncoder
),
actor_id=str(self.request.user.id), actor_id=str(self.request.user.id),
issue_id=str(self.kwargs.get("issue_id")), issue_id=str(self.kwargs.get("issue_id")),
project_id=str(self.kwargs.get("project_id")), project_id=str(self.kwargs.get("project_id")),
@ -407,14 +440,19 @@ class IssueLinkAPIEndpoint(BaseAPIView):
def patch(self, request, slug, project_id, issue_id, pk): def patch(self, request, slug, project_id, issue_id, pk):
issue_link = IssueLink.objects.get( issue_link = IssueLink.objects.get(
workspace__slug=slug, project_id=project_id, issue_id=issue_id, pk=pk workspace__slug=slug,
project_id=project_id,
issue_id=issue_id,
pk=pk,
) )
requested_data = json.dumps(request.data, cls=DjangoJSONEncoder) requested_data = json.dumps(request.data, cls=DjangoJSONEncoder)
current_instance = json.dumps( current_instance = json.dumps(
IssueLinkSerializer(issue_link).data, IssueLinkSerializer(issue_link).data,
cls=DjangoJSONEncoder, cls=DjangoJSONEncoder,
) )
serializer = IssueLinkSerializer(issue_link, data=request.data, partial=True) serializer = IssueLinkSerializer(
issue_link, data=request.data, partial=True
)
if serializer.is_valid(): if serializer.is_valid():
serializer.save() serializer.save()
issue_activity.delay( issue_activity.delay(
@ -431,7 +469,10 @@ class IssueLinkAPIEndpoint(BaseAPIView):
def delete(self, request, slug, project_id, issue_id, pk): def delete(self, request, slug, project_id, issue_id, pk):
issue_link = IssueLink.objects.get( issue_link = IssueLink.objects.get(
workspace__slug=slug, project_id=project_id, issue_id=issue_id, pk=pk workspace__slug=slug,
project_id=project_id,
issue_id=issue_id,
pk=pk,
) )
current_instance = json.dumps( current_instance = json.dumps(
IssueLinkSerializer(issue_link).data, IssueLinkSerializer(issue_link).data,
@ -466,7 +507,9 @@ class IssueCommentAPIEndpoint(WebhookMixin, BaseAPIView):
def get_queryset(self): def get_queryset(self):
return ( return (
IssueComment.objects.filter(workspace__slug=self.kwargs.get("slug")) IssueComment.objects.filter(
workspace__slug=self.kwargs.get("slug")
)
.filter(project_id=self.kwargs.get("project_id")) .filter(project_id=self.kwargs.get("project_id"))
.filter(issue_id=self.kwargs.get("issue_id")) .filter(issue_id=self.kwargs.get("issue_id"))
.filter(project__project_projectmember__member=self.request.user) .filter(project__project_projectmember__member=self.request.user)
@ -518,7 +561,9 @@ class IssueCommentAPIEndpoint(WebhookMixin, BaseAPIView):
) )
issue_activity.delay( issue_activity.delay(
type="comment.activity.created", type="comment.activity.created",
requested_data=json.dumps(serializer.data, cls=DjangoJSONEncoder), requested_data=json.dumps(
serializer.data, cls=DjangoJSONEncoder
),
actor_id=str(self.request.user.id), actor_id=str(self.request.user.id),
issue_id=str(self.kwargs.get("issue_id")), issue_id=str(self.kwargs.get("issue_id")),
project_id=str(self.kwargs.get("project_id")), project_id=str(self.kwargs.get("project_id")),
@ -530,7 +575,10 @@ class IssueCommentAPIEndpoint(WebhookMixin, BaseAPIView):
def patch(self, request, slug, project_id, issue_id, pk): def patch(self, request, slug, project_id, issue_id, pk):
issue_comment = IssueComment.objects.get( issue_comment = IssueComment.objects.get(
workspace__slug=slug, project_id=project_id, issue_id=issue_id, pk=pk workspace__slug=slug,
project_id=project_id,
issue_id=issue_id,
pk=pk,
) )
requested_data = json.dumps(self.request.data, cls=DjangoJSONEncoder) requested_data = json.dumps(self.request.data, cls=DjangoJSONEncoder)
current_instance = json.dumps( current_instance = json.dumps(
@ -556,7 +604,10 @@ class IssueCommentAPIEndpoint(WebhookMixin, BaseAPIView):
def delete(self, request, slug, project_id, issue_id, pk): def delete(self, request, slug, project_id, issue_id, pk):
issue_comment = IssueComment.objects.get( issue_comment = IssueComment.objects.get(
workspace__slug=slug, project_id=project_id, issue_id=issue_id, pk=pk workspace__slug=slug,
project_id=project_id,
issue_id=issue_id,
pk=pk,
) )
current_instance = json.dumps( current_instance = json.dumps(
IssueCommentSerializer(issue_comment).data, IssueCommentSerializer(issue_comment).data,

View File

@ -55,7 +55,9 @@ class ModuleAPIEndpoint(WebhookMixin, BaseAPIView):
.prefetch_related( .prefetch_related(
Prefetch( Prefetch(
"link_module", "link_module",
queryset=ModuleLink.objects.select_related("module", "created_by"), queryset=ModuleLink.objects.select_related(
"module", "created_by"
),
) )
) )
.annotate( .annotate(
@ -122,7 +124,13 @@ class ModuleAPIEndpoint(WebhookMixin, BaseAPIView):
def post(self, request, slug, project_id): def post(self, request, slug, project_id):
project = Project.objects.get(pk=project_id, workspace__slug=slug) project = Project.objects.get(pk=project_id, workspace__slug=slug)
serializer = ModuleSerializer(data=request.data, context={"project_id": project_id, "workspace_id": project.workspace_id}) serializer = ModuleSerializer(
data=request.data,
context={
"project_id": project_id,
"workspace_id": project.workspace_id,
},
)
if serializer.is_valid(): if serializer.is_valid():
serializer.save() serializer.save()
module = Module.objects.get(pk=serializer.data["id"]) module = Module.objects.get(pk=serializer.data["id"])
@ -131,8 +139,15 @@ class ModuleAPIEndpoint(WebhookMixin, BaseAPIView):
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
def patch(self, request, slug, project_id, pk): def patch(self, request, slug, project_id, pk):
module = Module.objects.get(pk=pk, project_id=project_id, workspace__slug=slug) module = Module.objects.get(
serializer = ModuleSerializer(module, data=request.data, context={"project_id": project_id}, partial=True) pk=pk, project_id=project_id, workspace__slug=slug
)
serializer = ModuleSerializer(
module,
data=request.data,
context={"project_id": project_id},
partial=True,
)
if serializer.is_valid(): if serializer.is_valid():
serializer.save() serializer.save()
return Response(serializer.data, status=status.HTTP_201_CREATED) return Response(serializer.data, status=status.HTTP_201_CREATED)
@ -162,9 +177,13 @@ class ModuleAPIEndpoint(WebhookMixin, BaseAPIView):
) )
def delete(self, request, slug, project_id, pk): def delete(self, request, slug, project_id, pk):
module = Module.objects.get(workspace__slug=slug, project_id=project_id, pk=pk) module = Module.objects.get(
workspace__slug=slug, project_id=project_id, pk=pk
)
module_issues = list( module_issues = list(
ModuleIssue.objects.filter(module_id=pk).values_list("issue", flat=True) ModuleIssue.objects.filter(module_id=pk).values_list(
"issue", flat=True
)
) )
issue_activity.delay( issue_activity.delay(
type="module.activity.deleted", type="module.activity.deleted",
@ -204,7 +223,9 @@ class ModuleIssueAPIEndpoint(WebhookMixin, BaseAPIView):
def get_queryset(self): def get_queryset(self):
return ( return (
ModuleIssue.objects.annotate( ModuleIssue.objects.annotate(
sub_issues_count=Issue.issue_objects.filter(parent=OuterRef("issue")) sub_issues_count=Issue.issue_objects.filter(
parent=OuterRef("issue")
)
.order_by() .order_by()
.annotate(count=Func(F("id"), function="Count")) .annotate(count=Func(F("id"), function="Count"))
.values("count") .values("count")
@ -228,7 +249,9 @@ class ModuleIssueAPIEndpoint(WebhookMixin, BaseAPIView):
issues = ( issues = (
Issue.issue_objects.filter(issue_module__module_id=module_id) Issue.issue_objects.filter(issue_module__module_id=module_id)
.annotate( .annotate(
sub_issues_count=Issue.issue_objects.filter(parent=OuterRef("id")) sub_issues_count=Issue.issue_objects.filter(
parent=OuterRef("id")
)
.order_by() .order_by()
.annotate(count=Func(F("id"), function="Count")) .annotate(count=Func(F("id"), function="Count"))
.values("count") .values("count")
@ -250,7 +273,9 @@ class ModuleIssueAPIEndpoint(WebhookMixin, BaseAPIView):
.values("count") .values("count")
) )
.annotate( .annotate(
attachment_count=IssueAttachment.objects.filter(issue=OuterRef("id")) attachment_count=IssueAttachment.objects.filter(
issue=OuterRef("id")
)
.order_by() .order_by()
.annotate(count=Func(F("id"), function="Count")) .annotate(count=Func(F("id"), function="Count"))
.values("count") .values("count")
@ -271,7 +296,8 @@ class ModuleIssueAPIEndpoint(WebhookMixin, BaseAPIView):
issues = request.data.get("issues", []) issues = request.data.get("issues", [])
if not len(issues): if not len(issues):
return Response( return Response(
{"error": "Issues are required"}, status=status.HTTP_400_BAD_REQUEST {"error": "Issues are required"},
status=status.HTTP_400_BAD_REQUEST,
) )
module = Module.objects.get( module = Module.objects.get(
workspace__slug=slug, project_id=project_id, pk=module_id workspace__slug=slug, project_id=project_id, pk=module_id
@ -354,7 +380,10 @@ class ModuleIssueAPIEndpoint(WebhookMixin, BaseAPIView):
def delete(self, request, slug, project_id, module_id, issue_id): def delete(self, request, slug, project_id, module_id, issue_id):
module_issue = ModuleIssue.objects.get( module_issue = ModuleIssue.objects.get(
workspace__slug=slug, project_id=project_id, module_id=module_id, issue_id=issue_id workspace__slug=slug,
project_id=project_id,
module_id=module_id,
issue_id=issue_id,
) )
module_issue.delete() module_issue.delete()
issue_activity.delay( issue_activity.delay(

View File

@ -39,9 +39,15 @@ class ProjectAPIEndpoint(WebhookMixin, BaseAPIView):
def get_queryset(self): def get_queryset(self):
return ( return (
Project.objects.filter(workspace__slug=self.kwargs.get("slug")) Project.objects.filter(workspace__slug=self.kwargs.get("slug"))
.filter(Q(project_projectmember__member=self.request.user) | Q(network=2)) .filter(
Q(project_projectmember__member=self.request.user)
| Q(network=2)
)
.select_related( .select_related(
"workspace", "workspace__owner", "default_assignee", "project_lead" "workspace",
"workspace__owner",
"default_assignee",
"project_lead",
) )
.annotate( .annotate(
is_member=Exists( is_member=Exists(
@ -120,11 +126,18 @@ class ProjectAPIEndpoint(WebhookMixin, BaseAPIView):
request=request, request=request,
queryset=(projects), queryset=(projects),
on_results=lambda projects: ProjectSerializer( on_results=lambda projects: ProjectSerializer(
projects, many=True, fields=self.fields, expand=self.expand, projects,
many=True,
fields=self.fields,
expand=self.expand,
).data, ).data,
) )
project = self.get_queryset().get(workspace__slug=slug, pk=project_id) project = self.get_queryset().get(workspace__slug=slug, pk=project_id)
serializer = ProjectSerializer(project, fields=self.fields, expand=self.expand,) serializer = ProjectSerializer(
project,
fields=self.fields,
expand=self.expand,
)
return Response(serializer.data, status=status.HTTP_200_OK) return Response(serializer.data, status=status.HTTP_200_OK)
def post(self, request, slug): def post(self, request, slug):
@ -138,7 +151,9 @@ class ProjectAPIEndpoint(WebhookMixin, BaseAPIView):
# Add the user as Administrator to the project # Add the user as Administrator to the project
project_member = ProjectMember.objects.create( project_member = ProjectMember.objects.create(
project_id=serializer.data["id"], member=request.user, role=20 project_id=serializer.data["id"],
member=request.user,
role=20,
) )
# Also create the issue property for the user # Also create the issue property for the user
_ = IssueProperty.objects.create( _ = IssueProperty.objects.create(
@ -211,9 +226,15 @@ class ProjectAPIEndpoint(WebhookMixin, BaseAPIView):
] ]
) )
project = self.get_queryset().filter(pk=serializer.data["id"]).first() project = (
self.get_queryset()
.filter(pk=serializer.data["id"])
.first()
)
serializer = ProjectSerializer(project) serializer = ProjectSerializer(project)
return Response(serializer.data, status=status.HTTP_201_CREATED) return Response(
serializer.data, status=status.HTTP_201_CREATED
)
return Response( return Response(
serializer.errors, serializer.errors,
status=status.HTTP_400_BAD_REQUEST, status=status.HTTP_400_BAD_REQUEST,
@ -226,7 +247,8 @@ class ProjectAPIEndpoint(WebhookMixin, BaseAPIView):
) )
except Workspace.DoesNotExist as e: except Workspace.DoesNotExist as e:
return Response( return Response(
{"error": "Workspace does not exist"}, status=status.HTTP_404_NOT_FOUND {"error": "Workspace does not exist"},
status=status.HTTP_404_NOT_FOUND,
) )
except ValidationError as e: except ValidationError as e:
return Response( return Response(
@ -250,7 +272,9 @@ class ProjectAPIEndpoint(WebhookMixin, BaseAPIView):
serializer.save() serializer.save()
if serializer.data["inbox_view"]: if serializer.data["inbox_view"]:
Inbox.objects.get_or_create( Inbox.objects.get_or_create(
name=f"{project.name} Inbox", project=project, is_default=True name=f"{project.name} Inbox",
project=project,
is_default=True,
) )
# Create the triage state in Backlog group # Create the triage state in Backlog group
@ -262,10 +286,16 @@ class ProjectAPIEndpoint(WebhookMixin, BaseAPIView):
color="#ff7700", color="#ff7700",
) )
project = self.get_queryset().filter(pk=serializer.data["id"]).first() project = (
self.get_queryset()
.filter(pk=serializer.data["id"])
.first()
)
serializer = ProjectSerializer(project) serializer = ProjectSerializer(project)
return Response(serializer.data, status=status.HTTP_200_OK) return Response(serializer.data, status=status.HTTP_200_OK)
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) return Response(
serializer.errors, status=status.HTTP_400_BAD_REQUEST
)
except IntegrityError as e: except IntegrityError as e:
if "already exists" in str(e): if "already exists" in str(e):
return Response( return Response(
@ -274,7 +304,8 @@ class ProjectAPIEndpoint(WebhookMixin, BaseAPIView):
) )
except (Project.DoesNotExist, Workspace.DoesNotExist): except (Project.DoesNotExist, Workspace.DoesNotExist):
return Response( return Response(
{"error": "Project does not exist"}, status=status.HTTP_404_NOT_FOUND {"error": "Project does not exist"},
status=status.HTTP_404_NOT_FOUND,
) )
except ValidationError as e: except ValidationError as e:
return Response( return Response(

View File

@ -34,7 +34,9 @@ class StateAPIEndpoint(BaseAPIView):
) )
def post(self, request, slug, project_id): def post(self, request, slug, project_id):
serializer = StateSerializer(data=request.data, context={"project_id": project_id}) serializer = StateSerializer(
data=request.data, context={"project_id": project_id}
)
if serializer.is_valid(): if serializer.is_valid():
serializer.save(project_id=project_id) serializer.save(project_id=project_id)
return Response(serializer.data, status=status.HTTP_200_OK) return Response(serializer.data, status=status.HTTP_200_OK)
@ -64,14 +66,19 @@ class StateAPIEndpoint(BaseAPIView):
) )
if state.default: if state.default:
return Response({"error": "Default state cannot be deleted"}, status=status.HTTP_400_BAD_REQUEST) return Response(
{"error": "Default state cannot be deleted"},
status=status.HTTP_400_BAD_REQUEST,
)
# Check for any issues in the state # Check for any issues in the state
issue_exist = Issue.issue_objects.filter(state=state_id).exists() issue_exist = Issue.issue_objects.filter(state=state_id).exists()
if issue_exist: if issue_exist:
return Response( return Response(
{"error": "The state is not empty, only empty states can be deleted"}, {
"error": "The state is not empty, only empty states can be deleted"
},
status=status.HTTP_400_BAD_REQUEST, status=status.HTTP_400_BAD_REQUEST,
) )
@ -79,7 +86,9 @@ class StateAPIEndpoint(BaseAPIView):
return Response(status=status.HTTP_204_NO_CONTENT) return Response(status=status.HTTP_204_NO_CONTENT)
def patch(self, request, slug, project_id, state_id=None): def patch(self, request, slug, project_id, state_id=None):
state = State.objects.get(workspace__slug=slug, project_id=project_id, pk=state_id) state = State.objects.get(
workspace__slug=slug, project_id=project_id, pk=state_id
)
serializer = StateSerializer(state, data=request.data, partial=True) serializer = StateSerializer(state, data=request.data, partial=True)
if serializer.is_valid(): if serializer.is_valid():
serializer.save() serializer.save()

View File

@ -25,7 +25,10 @@ class APIKeyAuthentication(authentication.BaseAuthentication):
def validate_api_token(self, token): def validate_api_token(self, token):
try: try:
api_token = APIToken.objects.get( api_token = APIToken.objects.get(
Q(Q(expired_at__gt=timezone.now()) | Q(expired_at__isnull=True)), Q(
Q(expired_at__gt=timezone.now())
| Q(expired_at__isnull=True)
),
token=token, token=token,
is_active=True, is_active=True,
) )

View File

@ -1,4 +1,3 @@
from .workspace import ( from .workspace import (
WorkSpaceBasePermission, WorkSpaceBasePermission,
WorkspaceOwnerPermission, WorkspaceOwnerPermission,
@ -13,5 +12,3 @@ from .project import (
ProjectMemberPermission, ProjectMemberPermission,
ProjectLitePermission, ProjectLitePermission,
) )

View File

@ -35,7 +35,11 @@ from .project import (
ProjectMemberRoleSerializer, ProjectMemberRoleSerializer,
) )
from .state import StateSerializer, StateLiteSerializer from .state import StateSerializer, StateLiteSerializer
from .view import GlobalViewSerializer, IssueViewSerializer, IssueViewFavoriteSerializer from .view import (
GlobalViewSerializer,
IssueViewSerializer,
IssueViewFavoriteSerializer,
)
from .cycle import ( from .cycle import (
CycleSerializer, CycleSerializer,
CycleIssueSerializer, CycleIssueSerializer,
@ -64,8 +68,6 @@ from .issue import (
IssueRelationSerializer, IssueRelationSerializer,
RelatedIssueSerializer, RelatedIssueSerializer,
IssuePublicSerializer, IssuePublicSerializer,
IssueRelationLiteSerializer,
) )
from .module import ( from .module import (
@ -91,7 +93,12 @@ from .integration import (
from .importer import ImporterSerializer from .importer import ImporterSerializer
from .page import PageSerializer, PageLogSerializer, SubPageSerializer, PageFavoriteSerializer from .page import (
PageSerializer,
PageLogSerializer,
SubPageSerializer,
PageFavoriteSerializer,
)
from .estimate import ( from .estimate import (
EstimateSerializer, EstimateSerializer,
@ -99,7 +106,11 @@ from .estimate import (
EstimateReadSerializer, EstimateReadSerializer,
) )
from .inbox import InboxSerializer, InboxIssueSerializer, IssueStateInboxSerializer from .inbox import (
InboxSerializer,
InboxIssueSerializer,
IssueStateInboxSerializer,
)
from .analytic import AnalyticViewSerializer from .analytic import AnalyticViewSerializer
@ -108,3 +119,5 @@ from .notification import NotificationSerializer
from .exporter import ExporterHistorySerializer from .exporter import ExporterHistorySerializer
from .webhook import WebhookSerializer, WebhookLogSerializer from .webhook import WebhookSerializer, WebhookLogSerializer
from .dashboard import DashboardSerializer, WidgetSerializer

View File

@ -3,7 +3,6 @@ from plane.db.models import APIToken, APIActivityLog
class APITokenSerializer(BaseSerializer): class APITokenSerializer(BaseSerializer):
class Meta: class Meta:
model = APIToken model = APIToken
fields = "__all__" fields = "__all__"
@ -18,14 +17,12 @@ class APITokenSerializer(BaseSerializer):
class APITokenReadSerializer(BaseSerializer): class APITokenReadSerializer(BaseSerializer):
class Meta: class Meta:
model = APIToken model = APIToken
exclude = ('token',) exclude = ("token",)
class APIActivityLogSerializer(BaseSerializer): class APIActivityLogSerializer(BaseSerializer):
class Meta: class Meta:
model = APIActivityLog model = APIActivityLog
fields = "__all__" fields = "__all__"

View File

@ -4,8 +4,8 @@ from rest_framework import serializers
class BaseSerializer(serializers.ModelSerializer): class BaseSerializer(serializers.ModelSerializer):
id = serializers.PrimaryKeyRelatedField(read_only=True) id = serializers.PrimaryKeyRelatedField(read_only=True)
class DynamicBaseSerializer(BaseSerializer):
class DynamicBaseSerializer(BaseSerializer):
def __init__(self, *args, **kwargs): def __init__(self, *args, **kwargs):
# If 'fields' is provided in the arguments, remove it and store it separately. # If 'fields' is provided in the arguments, remove it and store it separately.
# This is done so as not to pass this custom argument up to the superclass. # This is done so as not to pass this custom argument up to the superclass.
@ -59,6 +59,7 @@ class DynamicBaseSerializer(BaseSerializer):
LabelSerializer, LabelSerializer,
CycleIssueSerializer, CycleIssueSerializer,
IssueFlatSerializer, IssueFlatSerializer,
IssueRelationSerializer,
) )
# Expansion mapper # Expansion mapper
@ -77,14 +78,14 @@ class DynamicBaseSerializer(BaseSerializer):
"assignees": UserLiteSerializer, "assignees": UserLiteSerializer,
"labels": LabelSerializer, "labels": LabelSerializer,
"issue_cycle": CycleIssueSerializer, "issue_cycle": CycleIssueSerializer,
"parent": IssueFlatSerializer, "parent": IssueSerializer,
"issue_relation": IssueRelationSerializer,
} }
self.fields[field] = expansion[field](many=True if field in ["members", "assignees", "labels", "issue_cycle"] else False) self.fields[field] = expansion[field](many=True if field in ["members", "assignees", "labels", "issue_cycle", "issue_relation"] else False)
return self.fields return self.fields
def to_representation(self, instance): def to_representation(self, instance):
response = super().to_representation(instance) response = super().to_representation(instance)
@ -101,6 +102,7 @@ class DynamicBaseSerializer(BaseSerializer):
IssueSerializer, IssueSerializer,
LabelSerializer, LabelSerializer,
CycleIssueSerializer, CycleIssueSerializer,
IssueRelationSerializer,
) )
# Expansion mapper # Expansion mapper
@ -119,6 +121,8 @@ class DynamicBaseSerializer(BaseSerializer):
"assignees": UserLiteSerializer, "assignees": UserLiteSerializer,
"labels": LabelSerializer, "labels": LabelSerializer,
"issue_cycle": CycleIssueSerializer, "issue_cycle": CycleIssueSerializer,
"parent": IssueSerializer,
"issue_relation": IssueRelationSerializer
} }
# Check if field in expansion then expand the field # Check if field in expansion then expand the field
if expand in expansion: if expand in expansion:
@ -133,6 +137,8 @@ class DynamicBaseSerializer(BaseSerializer):
response[expand] = exp_serializer.data response[expand] = exp_serializer.data
else: else:
# You might need to handle this case differently # You might need to handle this case differently
response[expand] = getattr(instance, f"{expand}_id", None) response[expand] = getattr(
instance, f"{expand}_id", None
)
return response return response

View File

@ -7,7 +7,12 @@ from .user import UserLiteSerializer
from .issue import IssueStateSerializer from .issue import IssueStateSerializer
from .workspace import WorkspaceLiteSerializer from .workspace import WorkspaceLiteSerializer
from .project import ProjectLiteSerializer from .project import ProjectLiteSerializer
from plane.db.models import Cycle, CycleIssue, CycleFavorite, CycleUserProperties from plane.db.models import (
Cycle,
CycleIssue,
CycleFavorite,
CycleUserProperties,
)
class CycleWriteSerializer(BaseSerializer): class CycleWriteSerializer(BaseSerializer):
@ -17,7 +22,9 @@ class CycleWriteSerializer(BaseSerializer):
and data.get("end_date", None) is not None and data.get("end_date", None) is not None
and data.get("start_date", None) > data.get("end_date", None) and data.get("start_date", None) > data.get("end_date", None)
): ):
raise serializers.ValidationError("Start date cannot exceed end date") raise serializers.ValidationError(
"Start date cannot exceed end date"
)
return data return data
class Meta: class Meta:
@ -38,7 +45,9 @@ class CycleSerializer(BaseSerializer):
total_estimates = serializers.IntegerField(read_only=True) total_estimates = serializers.IntegerField(read_only=True)
completed_estimates = serializers.IntegerField(read_only=True) completed_estimates = serializers.IntegerField(read_only=True)
started_estimates = serializers.IntegerField(read_only=True) started_estimates = serializers.IntegerField(read_only=True)
workspace_detail = WorkspaceLiteSerializer(read_only=True, source="workspace") workspace_detail = WorkspaceLiteSerializer(
read_only=True, source="workspace"
)
project_detail = ProjectLiteSerializer(read_only=True, source="project") project_detail = ProjectLiteSerializer(read_only=True, source="project")
status = serializers.CharField(read_only=True) status = serializers.CharField(read_only=True)
@ -48,7 +57,9 @@ class CycleSerializer(BaseSerializer):
and data.get("end_date", None) is not None and data.get("end_date", None) is not None
and data.get("start_date", None) > data.get("end_date", None) and data.get("start_date", None) > data.get("end_date", None)
): ):
raise serializers.ValidationError("Start date cannot exceed end date") raise serializers.ValidationError(
"Start date cannot exceed end date"
)
return data return data
def get_assignees(self, obj): def get_assignees(self, obj):
@ -115,6 +126,5 @@ class CycleUserPropertiesSerializer(BaseSerializer):
read_only_fields = [ read_only_fields = [
"workspace", "workspace",
"project", "project",
"cycle" "cycle" "user",
"user",
] ]

View File

@ -0,0 +1,26 @@
# Module imports
from .base import BaseSerializer
from plane.db.models import Dashboard, Widget
# Third party frameworks
from rest_framework import serializers
class DashboardSerializer(BaseSerializer):
class Meta:
model = Dashboard
fields = "__all__"
class WidgetSerializer(BaseSerializer):
is_visible = serializers.BooleanField(read_only=True)
widget_filters = serializers.JSONField(read_only=True)
class Meta:
model = Widget
fields = [
"id",
"key",
"is_visible",
"widget_filters"
]

View File

@ -2,12 +2,18 @@
from .base import BaseSerializer from .base import BaseSerializer
from plane.db.models import Estimate, EstimatePoint from plane.db.models import Estimate, EstimatePoint
from plane.app.serializers import WorkspaceLiteSerializer, ProjectLiteSerializer from plane.app.serializers import (
WorkspaceLiteSerializer,
ProjectLiteSerializer,
)
from rest_framework import serializers from rest_framework import serializers
class EstimateSerializer(BaseSerializer): class EstimateSerializer(BaseSerializer):
workspace_detail = WorkspaceLiteSerializer(read_only=True, source="workspace") workspace_detail = WorkspaceLiteSerializer(
read_only=True, source="workspace"
)
project_detail = ProjectLiteSerializer(read_only=True, source="project") project_detail = ProjectLiteSerializer(read_only=True, source="project")
class Meta: class Meta:
@ -20,13 +26,14 @@ class EstimateSerializer(BaseSerializer):
class EstimatePointSerializer(BaseSerializer): class EstimatePointSerializer(BaseSerializer):
def validate(self, data): def validate(self, data):
if not data: if not data:
raise serializers.ValidationError("Estimate points are required") raise serializers.ValidationError("Estimate points are required")
value = data.get("value") value = data.get("value")
if value and len(value) > 20: if value and len(value) > 20:
raise serializers.ValidationError("Value can't be more than 20 characters") raise serializers.ValidationError(
"Value can't be more than 20 characters"
)
return data return data
class Meta: class Meta:
@ -41,7 +48,9 @@ class EstimatePointSerializer(BaseSerializer):
class EstimateReadSerializer(BaseSerializer): class EstimateReadSerializer(BaseSerializer):
points = EstimatePointSerializer(read_only=True, many=True) points = EstimatePointSerializer(read_only=True, many=True)
workspace_detail = WorkspaceLiteSerializer(read_only=True, source="workspace") workspace_detail = WorkspaceLiteSerializer(
read_only=True, source="workspace"
)
project_detail = ProjectLiteSerializer(read_only=True, source="project") project_detail = ProjectLiteSerializer(read_only=True, source="project")
class Meta: class Meta:

View File

@ -5,7 +5,9 @@ from .user import UserLiteSerializer
class ExporterHistorySerializer(BaseSerializer): class ExporterHistorySerializer(BaseSerializer):
initiated_by_detail = UserLiteSerializer(source="initiated_by", read_only=True) initiated_by_detail = UserLiteSerializer(
source="initiated_by", read_only=True
)
class Meta: class Meta:
model = ExporterHistory model = ExporterHistory

View File

@ -7,9 +7,13 @@ from plane.db.models import Importer
class ImporterSerializer(BaseSerializer): class ImporterSerializer(BaseSerializer):
initiated_by_detail = UserLiteSerializer(source="initiated_by", read_only=True) initiated_by_detail = UserLiteSerializer(
source="initiated_by", read_only=True
)
project_detail = ProjectLiteSerializer(source="project", read_only=True) project_detail = ProjectLiteSerializer(source="project", read_only=True)
workspace_detail = WorkspaceLiteSerializer(source="workspace", read_only=True) workspace_detail = WorkspaceLiteSerializer(
source="workspace", read_only=True
)
class Meta: class Meta:
model = Importer model = Importer

View File

@ -46,8 +46,12 @@ class InboxIssueLiteSerializer(BaseSerializer):
class IssueStateInboxSerializer(BaseSerializer): class IssueStateInboxSerializer(BaseSerializer):
state_detail = StateLiteSerializer(read_only=True, source="state") state_detail = StateLiteSerializer(read_only=True, source="state")
project_detail = ProjectLiteSerializer(read_only=True, source="project") project_detail = ProjectLiteSerializer(read_only=True, source="project")
label_details = LabelLiteSerializer(read_only=True, source="labels", many=True) label_details = LabelLiteSerializer(
assignee_details = UserLiteSerializer(read_only=True, source="assignees", many=True) read_only=True, source="labels", many=True
)
assignee_details = UserLiteSerializer(
read_only=True, source="assignees", many=True
)
sub_issues_count = serializers.IntegerField(read_only=True) sub_issues_count = serializers.IntegerField(read_only=True)
issue_inbox = InboxIssueLiteSerializer(read_only=True, many=True) issue_inbox = InboxIssueLiteSerializer(read_only=True, many=True)

View File

@ -13,7 +13,9 @@ class IntegrationSerializer(BaseSerializer):
class WorkspaceIntegrationSerializer(BaseSerializer): class WorkspaceIntegrationSerializer(BaseSerializer):
integration_detail = IntegrationSerializer(read_only=True, source="integration") integration_detail = IntegrationSerializer(
read_only=True, source="integration"
)
class Meta: class Meta:
model = WorkspaceIntegration model = WorkspaceIntegration

View File

@ -30,6 +30,8 @@ from plane.db.models import (
CommentReaction, CommentReaction,
IssueVote, IssueVote,
IssueRelation, IssueRelation,
State,
Project,
) )
@ -69,19 +71,26 @@ class IssueProjectLiteSerializer(BaseSerializer):
##TODO: Find a better way to write this serializer ##TODO: Find a better way to write this serializer
## Find a better approach to save manytomany? ## Find a better approach to save manytomany?
class IssueCreateSerializer(BaseSerializer): class IssueCreateSerializer(BaseSerializer):
state_detail = StateSerializer(read_only=True, source="state") # ids
created_by_detail = UserLiteSerializer(read_only=True, source="created_by") state_id = serializers.PrimaryKeyRelatedField(
project_detail = ProjectLiteSerializer(read_only=True, source="project") source="state",
workspace_detail = WorkspaceLiteSerializer(read_only=True, source="workspace") queryset=State.objects.all(),
required=False,
assignees = serializers.ListField( allow_null=True,
child=serializers.PrimaryKeyRelatedField(queryset=User.objects.all()), )
parent_id = serializers.PrimaryKeyRelatedField(
source="parent",
queryset=Issue.objects.all(),
required=False,
allow_null=True,
)
label_ids = serializers.ListField(
child=serializers.PrimaryKeyRelatedField(queryset=Label.objects.all()),
write_only=True, write_only=True,
required=False, required=False,
) )
assignee_ids = serializers.ListField(
labels = serializers.ListField( child=serializers.PrimaryKeyRelatedField(queryset=User.objects.all()),
child=serializers.PrimaryKeyRelatedField(queryset=Label.objects.all()),
write_only=True, write_only=True,
required=False, required=False,
) )
@ -100,8 +109,10 @@ class IssueCreateSerializer(BaseSerializer):
def to_representation(self, instance): def to_representation(self, instance):
data = super().to_representation(instance) data = super().to_representation(instance)
data['assignees'] = [str(assignee.id) for assignee in instance.assignees.all()] assignee_ids = self.initial_data.get("assignee_ids")
data['labels'] = [str(label.id) for label in instance.labels.all()] data["assignee_ids"] = assignee_ids if assignee_ids else []
label_ids = self.initial_data.get("label_ids")
data["label_ids"] = label_ids if label_ids else []
return data return data
def validate(self, data): def validate(self, data):
@ -110,12 +121,14 @@ class IssueCreateSerializer(BaseSerializer):
and data.get("target_date", None) is not None and data.get("target_date", None) is not None
and data.get("start_date", None) > data.get("target_date", None) and data.get("start_date", None) > data.get("target_date", None)
): ):
raise serializers.ValidationError("Start date cannot exceed target date") raise serializers.ValidationError(
"Start date cannot exceed target date"
)
return data return data
def create(self, validated_data): def create(self, validated_data):
assignees = validated_data.pop("assignees", None) assignees = validated_data.pop("assignee_ids", None)
labels = validated_data.pop("labels", None) labels = validated_data.pop("label_ids", None)
project_id = self.context["project_id"] project_id = self.context["project_id"]
workspace_id = self.context["workspace_id"] workspace_id = self.context["workspace_id"]
@ -173,8 +186,8 @@ class IssueCreateSerializer(BaseSerializer):
return issue return issue
def update(self, instance, validated_data): def update(self, instance, validated_data):
assignees = validated_data.pop("assignees", None) assignees = validated_data.pop("assignee_ids", None)
labels = validated_data.pop("labels", None) labels = validated_data.pop("label_ids", None)
# Related models # Related models
project_id = instance.project_id project_id = instance.project_id
@ -225,14 +238,15 @@ class IssueActivitySerializer(BaseSerializer):
actor_detail = UserLiteSerializer(read_only=True, source="actor") actor_detail = UserLiteSerializer(read_only=True, source="actor")
issue_detail = IssueFlatSerializer(read_only=True, source="issue") issue_detail = IssueFlatSerializer(read_only=True, source="issue")
project_detail = ProjectLiteSerializer(read_only=True, source="project") project_detail = ProjectLiteSerializer(read_only=True, source="project")
workspace_detail = WorkspaceLiteSerializer(read_only=True, source="workspace") workspace_detail = WorkspaceLiteSerializer(
read_only=True, source="workspace"
)
class Meta: class Meta:
model = IssueActivity model = IssueActivity
fields = "__all__" fields = "__all__"
class IssuePropertySerializer(BaseSerializer): class IssuePropertySerializer(BaseSerializer):
class Meta: class Meta:
model = IssueProperty model = IssueProperty
@ -245,7 +259,9 @@ class IssuePropertySerializer(BaseSerializer):
class LabelSerializer(BaseSerializer): class LabelSerializer(BaseSerializer):
workspace_detail = WorkspaceLiteSerializer(source="workspace", read_only=True) workspace_detail = WorkspaceLiteSerializer(
source="workspace", read_only=True
)
project_detail = ProjectLiteSerializer(source="project", read_only=True) project_detail = ProjectLiteSerializer(source="project", read_only=True)
class Meta: class Meta:
@ -268,7 +284,6 @@ class LabelLiteSerializer(BaseSerializer):
class IssueLabelSerializer(BaseSerializer): class IssueLabelSerializer(BaseSerializer):
class Meta: class Meta:
model = IssueLabel model = IssueLabel
fields = "__all__" fields = "__all__"
@ -278,14 +293,19 @@ class IssueLabelSerializer(BaseSerializer):
] ]
class IssueRelationLiteSerializer(DynamicBaseSerializer): class IssueRelationSerializer(BaseSerializer):
project_id = serializers.PrimaryKeyRelatedField(read_only=True) id = serializers.UUIDField(source="related_issue.id", read_only=True)
project_id = serializers.PrimaryKeyRelatedField(source="related_issue.project_id", read_only=True)
sequence_id = serializers.IntegerField(source="related_issue.sequence_id", read_only=True)
relation_type = serializers.CharField(read_only=True)
class Meta: class Meta:
model = Issue model = IssueRelation
fields = [ fields = [
"id", "id",
"project_id", "project_id",
"sequence_id", "sequence_id",
"relation_type",
] ]
read_only_fields = [ read_only_fields = [
"workspace", "workspace",
@ -293,26 +313,19 @@ class IssueRelationLiteSerializer(DynamicBaseSerializer):
] ]
class IssueRelationSerializer(BaseSerializer):
issue_detail = IssueRelationLiteSerializer(read_only=True, source="related_issue")
class Meta:
model = IssueRelation
fields = [
"issue_detail",
]
read_only_fields = [
"workspace",
"project",
]
class RelatedIssueSerializer(BaseSerializer): class RelatedIssueSerializer(BaseSerializer):
issue_detail = IssueRelationLiteSerializer(read_only=True, source="issue") id = serializers.UUIDField(source="issue.id", read_only=True)
project_id = serializers.PrimaryKeyRelatedField(source="issue.project_id", read_only=True)
sequence_id = serializers.IntegerField(source="issue.sequence_id", read_only=True)
relation_type = serializers.CharField(read_only=True)
class Meta: class Meta:
model = IssueRelation model = IssueRelation
fields = [ fields = [
"issue_detail", "id",
"project_id",
"sequence_id",
"relation_type",
] ]
read_only_fields = [ read_only_fields = [
"workspace", "workspace",
@ -407,7 +420,8 @@ class IssueLinkSerializer(BaseSerializer):
# Validation if url already exists # Validation if url already exists
def create(self, validated_data): def create(self, validated_data):
if IssueLink.objects.filter( if IssueLink.objects.filter(
url=validated_data.get("url"), issue_id=validated_data.get("issue_id") url=validated_data.get("url"),
issue_id=validated_data.get("issue_id"),
).exists(): ).exists():
raise serializers.ValidationError( raise serializers.ValidationError(
{"error": "URL already exists for this Issue"} {"error": "URL already exists for this Issue"}
@ -431,7 +445,6 @@ class IssueAttachmentSerializer(BaseSerializer):
class IssueReactionSerializer(BaseSerializer): class IssueReactionSerializer(BaseSerializer):
actor_detail = UserLiteSerializer(read_only=True, source="actor") actor_detail = UserLiteSerializer(read_only=True, source="actor")
class Meta: class Meta:
@ -466,12 +479,18 @@ class CommentReactionSerializer(BaseSerializer):
class IssueVoteSerializer(BaseSerializer): class IssueVoteSerializer(BaseSerializer):
actor_detail = UserLiteSerializer(read_only=True, source="actor") actor_detail = UserLiteSerializer(read_only=True, source="actor")
class Meta: class Meta:
model = IssueVote model = IssueVote
fields = ["issue", "vote", "workspace", "project", "actor", "actor_detail"] fields = [
"issue",
"vote",
"workspace",
"project",
"actor",
"actor_detail",
]
read_only_fields = fields read_only_fields = fields
@ -479,8 +498,12 @@ class IssueCommentSerializer(BaseSerializer):
actor_detail = UserLiteSerializer(read_only=True, source="actor") actor_detail = UserLiteSerializer(read_only=True, source="actor")
issue_detail = IssueFlatSerializer(read_only=True, source="issue") issue_detail = IssueFlatSerializer(read_only=True, source="issue")
project_detail = ProjectLiteSerializer(read_only=True, source="project") project_detail = ProjectLiteSerializer(read_only=True, source="project")
workspace_detail = WorkspaceLiteSerializer(read_only=True, source="workspace") workspace_detail = WorkspaceLiteSerializer(
comment_reactions = CommentReactionLiteSerializer(read_only=True, many=True) read_only=True, source="workspace"
)
comment_reactions = CommentReactionLiteSerializer(
read_only=True, many=True
)
is_member = serializers.BooleanField(read_only=True) is_member = serializers.BooleanField(read_only=True)
class Meta: class Meta:
@ -514,10 +537,14 @@ class IssueStateFlatSerializer(BaseSerializer):
# Issue Serializer with state details # Issue Serializer with state details
class IssueStateSerializer(DynamicBaseSerializer): class IssueStateSerializer(DynamicBaseSerializer):
label_details = LabelLiteSerializer(read_only=True, source="labels", many=True) label_details = LabelLiteSerializer(
read_only=True, source="labels", many=True
)
state_detail = StateLiteSerializer(read_only=True, source="state") state_detail = StateLiteSerializer(read_only=True, source="state")
project_detail = ProjectLiteSerializer(read_only=True, source="project") project_detail = ProjectLiteSerializer(read_only=True, source="project")
assignee_details = UserLiteSerializer(read_only=True, source="assignees", many=True) assignee_details = UserLiteSerializer(
read_only=True, source="assignees", many=True
)
sub_issues_count = serializers.IntegerField(read_only=True) sub_issues_count = serializers.IntegerField(read_only=True)
attachment_count = serializers.IntegerField(read_only=True) attachment_count = serializers.IntegerField(read_only=True)
link_count = serializers.IntegerField(read_only=True) link_count = serializers.IntegerField(read_only=True)
@ -536,15 +563,19 @@ class IssueSerializer(DynamicBaseSerializer):
module_id = serializers.PrimaryKeyRelatedField(read_only=True) module_id = serializers.PrimaryKeyRelatedField(read_only=True)
# Many to many # Many to many
label_ids = serializers.PrimaryKeyRelatedField(read_only=True, many=True, source="labels") label_ids = serializers.PrimaryKeyRelatedField(
assignee_ids = serializers.PrimaryKeyRelatedField(read_only=True, many=True, source="assignees") read_only=True, many=True, source="labels"
)
assignee_ids = serializers.PrimaryKeyRelatedField(
read_only=True, many=True, source="assignees"
)
# Count items # Count items
sub_issues_count = serializers.IntegerField(read_only=True) sub_issues_count = serializers.IntegerField(read_only=True)
attachment_count = serializers.IntegerField(read_only=True) attachment_count = serializers.IntegerField(read_only=True)
link_count = serializers.IntegerField(read_only=True) link_count = serializers.IntegerField(read_only=True)
# is # is_subscribed
is_subscribed = serializers.BooleanField(read_only=True) is_subscribed = serializers.BooleanField(read_only=True)
class Meta: class Meta:
@ -582,11 +613,17 @@ class IssueSerializer(DynamicBaseSerializer):
class IssueLiteSerializer(DynamicBaseSerializer): class IssueLiteSerializer(DynamicBaseSerializer):
workspace_detail = WorkspaceLiteSerializer(read_only=True, source="workspace") workspace_detail = WorkspaceLiteSerializer(
read_only=True, source="workspace"
)
project_detail = ProjectLiteSerializer(read_only=True, source="project") project_detail = ProjectLiteSerializer(read_only=True, source="project")
state_detail = StateLiteSerializer(read_only=True, source="state") state_detail = StateLiteSerializer(read_only=True, source="state")
label_details = LabelLiteSerializer(read_only=True, source="labels", many=True) label_details = LabelLiteSerializer(
assignee_details = UserLiteSerializer(read_only=True, source="assignees", many=True) read_only=True, source="labels", many=True
)
assignee_details = UserLiteSerializer(
read_only=True, source="assignees", many=True
)
sub_issues_count = serializers.IntegerField(read_only=True) sub_issues_count = serializers.IntegerField(read_only=True)
cycle_id = serializers.UUIDField(read_only=True) cycle_id = serializers.UUIDField(read_only=True)
module_id = serializers.UUIDField(read_only=True) module_id = serializers.UUIDField(read_only=True)
@ -613,7 +650,9 @@ class IssueLiteSerializer(DynamicBaseSerializer):
class IssuePublicSerializer(BaseSerializer): class IssuePublicSerializer(BaseSerializer):
project_detail = ProjectLiteSerializer(read_only=True, source="project") project_detail = ProjectLiteSerializer(read_only=True, source="project")
state_detail = StateLiteSerializer(read_only=True, source="state") state_detail = StateLiteSerializer(read_only=True, source="state")
reactions = IssueReactionSerializer(read_only=True, many=True, source="issue_reactions") reactions = IssueReactionSerializer(
read_only=True, many=True, source="issue_reactions"
)
votes = IssueVoteSerializer(read_only=True, many=True) votes = IssueVoteSerializer(read_only=True, many=True)
class Meta: class Meta:
@ -636,7 +675,6 @@ class IssuePublicSerializer(BaseSerializer):
read_only_fields = fields read_only_fields = fields
class IssueSubscriberSerializer(BaseSerializer): class IssueSubscriberSerializer(BaseSerializer):
class Meta: class Meta:
model = IssueSubscriber model = IssueSubscriber

View File

@ -26,7 +26,9 @@ class ModuleWriteSerializer(BaseSerializer):
) )
project_detail = ProjectLiteSerializer(source="project", read_only=True) project_detail = ProjectLiteSerializer(source="project", read_only=True)
workspace_detail = WorkspaceLiteSerializer(source="workspace", read_only=True) workspace_detail = WorkspaceLiteSerializer(
source="workspace", read_only=True
)
class Meta: class Meta:
model = Module model = Module
@ -42,12 +44,18 @@ class ModuleWriteSerializer(BaseSerializer):
def to_representation(self, instance): def to_representation(self, instance):
data = super().to_representation(instance) data = super().to_representation(instance)
data['members'] = [str(member.id) for member in instance.members.all()] data["members"] = [str(member.id) for member in instance.members.all()]
return data return data
def validate(self, data): def validate(self, data):
if data.get("start_date", None) is not None and data.get("target_date", None) is not None and data.get("start_date", None) > data.get("target_date", None): if (
raise serializers.ValidationError("Start date cannot exceed target date") data.get("start_date", None) is not None
and data.get("target_date", None) is not None
and data.get("start_date", None) > data.get("target_date", None)
):
raise serializers.ValidationError(
"Start date cannot exceed target date"
)
return data return data
def create(self, validated_data): def create(self, validated_data):
@ -152,7 +160,8 @@ class ModuleLinkSerializer(BaseSerializer):
# Validation if url already exists # Validation if url already exists
def create(self, validated_data): def create(self, validated_data):
if ModuleLink.objects.filter( if ModuleLink.objects.filter(
url=validated_data.get("url"), module_id=validated_data.get("module_id") url=validated_data.get("url"),
module_id=validated_data.get("module_id"),
).exists(): ).exists():
raise serializers.ValidationError( raise serializers.ValidationError(
{"error": "URL already exists for this Issue"} {"error": "URL already exists for this Issue"}
@ -163,7 +172,9 @@ class ModuleLinkSerializer(BaseSerializer):
class ModuleSerializer(DynamicBaseSerializer): class ModuleSerializer(DynamicBaseSerializer):
project_detail = ProjectLiteSerializer(read_only=True, source="project") project_detail = ProjectLiteSerializer(read_only=True, source="project")
lead_detail = UserLiteSerializer(read_only=True, source="lead") lead_detail = UserLiteSerializer(read_only=True, source="lead")
members_detail = UserLiteSerializer(read_only=True, many=True, source="members") members_detail = UserLiteSerializer(
read_only=True, many=True, source="members"
)
link_module = ModuleLinkSerializer(read_only=True, many=True) link_module = ModuleLinkSerializer(read_only=True, many=True)
is_favorite = serializers.BooleanField(read_only=True) is_favorite = serializers.BooleanField(read_only=True)
total_issues = serializers.IntegerField(read_only=True) total_issues = serializers.IntegerField(read_only=True)
@ -198,13 +209,9 @@ class ModuleFavoriteSerializer(BaseSerializer):
"user", "user",
] ]
class ModuleUserPropertiesSerializer(BaseSerializer): class ModuleUserPropertiesSerializer(BaseSerializer):
class Meta: class Meta:
model = ModuleUserProperties model = ModuleUserProperties
fields = "__all__" fields = "__all__"
read_only_fields = [ read_only_fields = ["workspace", "project", "module", "user"]
"workspace",
"project",
"module",
"user"
]

View File

@ -3,10 +3,12 @@ from .base import BaseSerializer
from .user import UserLiteSerializer from .user import UserLiteSerializer
from plane.db.models import Notification from plane.db.models import Notification
class NotificationSerializer(BaseSerializer): class NotificationSerializer(BaseSerializer):
triggered_by_details = UserLiteSerializer(read_only=True, source="triggered_by") triggered_by_details = UserLiteSerializer(
read_only=True, source="triggered_by"
)
class Meta: class Meta:
model = Notification model = Notification
fields = "__all__" fields = "__all__"

View File

@ -6,19 +6,31 @@ from .base import BaseSerializer
from .issue import IssueFlatSerializer, LabelLiteSerializer from .issue import IssueFlatSerializer, LabelLiteSerializer
from .workspace import WorkspaceLiteSerializer from .workspace import WorkspaceLiteSerializer
from .project import ProjectLiteSerializer from .project import ProjectLiteSerializer
from plane.db.models import Page, PageLog, PageFavorite, PageLabel, Label, Issue, Module from plane.db.models import (
Page,
PageLog,
PageFavorite,
PageLabel,
Label,
Issue,
Module,
)
class PageSerializer(BaseSerializer): class PageSerializer(BaseSerializer):
is_favorite = serializers.BooleanField(read_only=True) is_favorite = serializers.BooleanField(read_only=True)
label_details = LabelLiteSerializer(read_only=True, source="labels", many=True) label_details = LabelLiteSerializer(
read_only=True, source="labels", many=True
)
labels = serializers.ListField( labels = serializers.ListField(
child=serializers.PrimaryKeyRelatedField(queryset=Label.objects.all()), child=serializers.PrimaryKeyRelatedField(queryset=Label.objects.all()),
write_only=True, write_only=True,
required=False, required=False,
) )
project_detail = ProjectLiteSerializer(source="project", read_only=True) project_detail = ProjectLiteSerializer(source="project", read_only=True)
workspace_detail = WorkspaceLiteSerializer(source="workspace", read_only=True) workspace_detail = WorkspaceLiteSerializer(
source="workspace", read_only=True
)
class Meta: class Meta:
model = Page model = Page
@ -28,9 +40,10 @@ class PageSerializer(BaseSerializer):
"project", "project",
"owned_by", "owned_by",
] ]
def to_representation(self, instance): def to_representation(self, instance):
data = super().to_representation(instance) data = super().to_representation(instance)
data['labels'] = [str(label.id) for label in instance.labels.all()] data["labels"] = [str(label.id) for label in instance.labels.all()]
return data return data
def create(self, validated_data): def create(self, validated_data):
@ -94,7 +107,7 @@ class SubPageSerializer(BaseSerializer):
def get_entity_details(self, obj): def get_entity_details(self, obj):
entity_name = obj.entity_name entity_name = obj.entity_name
if entity_name == 'forward_link' or entity_name == 'back_link': if entity_name == "forward_link" or entity_name == "back_link":
try: try:
page = Page.objects.get(pk=obj.entity_identifier) page = Page.objects.get(pk=obj.entity_identifier)
return PageSerializer(page).data return PageSerializer(page).data
@ -104,7 +117,6 @@ class SubPageSerializer(BaseSerializer):
class PageLogSerializer(BaseSerializer): class PageLogSerializer(BaseSerializer):
class Meta: class Meta:
model = PageLog model = PageLog
fields = "__all__" fields = "__all__"

View File

@ -4,7 +4,10 @@ from rest_framework import serializers
# Module imports # Module imports
from .base import BaseSerializer, DynamicBaseSerializer from .base import BaseSerializer, DynamicBaseSerializer
from plane.app.serializers.workspace import WorkspaceLiteSerializer from plane.app.serializers.workspace import WorkspaceLiteSerializer
from plane.app.serializers.user import UserLiteSerializer, UserAdminLiteSerializer from plane.app.serializers.user import (
UserLiteSerializer,
UserAdminLiteSerializer,
)
from plane.db.models import ( from plane.db.models import (
Project, Project,
ProjectMember, ProjectMember,
@ -17,7 +20,9 @@ from plane.db.models import (
class ProjectSerializer(BaseSerializer): class ProjectSerializer(BaseSerializer):
workspace_detail = WorkspaceLiteSerializer(source="workspace", read_only=True) workspace_detail = WorkspaceLiteSerializer(
source="workspace", read_only=True
)
class Meta: class Meta:
model = Project model = Project
@ -29,12 +34,16 @@ class ProjectSerializer(BaseSerializer):
def create(self, validated_data): def create(self, validated_data):
identifier = validated_data.get("identifier", "").strip().upper() identifier = validated_data.get("identifier", "").strip().upper()
if identifier == "": if identifier == "":
raise serializers.ValidationError(detail="Project Identifier is required") raise serializers.ValidationError(
detail="Project Identifier is required"
)
if ProjectIdentifier.objects.filter( if ProjectIdentifier.objects.filter(
name=identifier, workspace_id=self.context["workspace_id"] name=identifier, workspace_id=self.context["workspace_id"]
).exists(): ).exists():
raise serializers.ValidationError(detail="Project Identifier is taken") raise serializers.ValidationError(
detail="Project Identifier is taken"
)
project = Project.objects.create( project = Project.objects.create(
**validated_data, workspace_id=self.context["workspace_id"] **validated_data, workspace_id=self.context["workspace_id"]
) )
@ -73,7 +82,9 @@ class ProjectSerializer(BaseSerializer):
return project return project
# If not same fail update # If not same fail update
raise serializers.ValidationError(detail="Project Identifier is already taken") raise serializers.ValidationError(
detail="Project Identifier is already taken"
)
class ProjectLiteSerializer(BaseSerializer): class ProjectLiteSerializer(BaseSerializer):
@ -159,12 +170,13 @@ class ProjectMemberAdminSerializer(BaseSerializer):
model = ProjectMember model = ProjectMember
fields = "__all__" fields = "__all__"
class ProjectMemberRoleSerializer(DynamicBaseSerializer):
class ProjectMemberRoleSerializer(DynamicBaseSerializer):
class Meta: class Meta:
model = ProjectMember model = ProjectMember
fields = ("id", "role", "member", "project") fields = ("id", "role", "member", "project")
class ProjectMemberInviteSerializer(BaseSerializer): class ProjectMemberInviteSerializer(BaseSerializer):
project = ProjectLiteSerializer(read_only=True) project = ProjectLiteSerializer(read_only=True)
workspace = WorkspaceLiteSerializer(read_only=True) workspace = WorkspaceLiteSerializer(read_only=True)
@ -202,7 +214,9 @@ class ProjectMemberLiteSerializer(BaseSerializer):
class ProjectDeployBoardSerializer(BaseSerializer): class ProjectDeployBoardSerializer(BaseSerializer):
project_details = ProjectLiteSerializer(read_only=True, source="project") project_details = ProjectLiteSerializer(read_only=True, source="project")
workspace_detail = WorkspaceLiteSerializer(read_only=True, source="workspace") workspace_detail = WorkspaceLiteSerializer(
read_only=True, source="workspace"
)
class Meta: class Meta:
model = ProjectDeployBoard model = ProjectDeployBoard

View File

@ -6,7 +6,6 @@ from plane.db.models import State
class StateSerializer(BaseSerializer): class StateSerializer(BaseSerializer):
class Meta: class Meta:
model = State model = State
fields = "__all__" fields = "__all__"

View File

@ -99,7 +99,9 @@ class UserMeSettingsSerializer(BaseSerializer):
).first() ).first()
return { return {
"last_workspace_id": obj.last_workspace_id, "last_workspace_id": obj.last_workspace_id,
"last_workspace_slug": workspace.slug if workspace is not None else "", "last_workspace_slug": workspace.slug
if workspace is not None
else "",
"fallback_workspace_id": obj.last_workspace_id, "fallback_workspace_id": obj.last_workspace_id,
"fallback_workspace_slug": workspace.slug "fallback_workspace_slug": workspace.slug
if workspace is not None if workspace is not None
@ -109,7 +111,8 @@ class UserMeSettingsSerializer(BaseSerializer):
else: else:
fallback_workspace = ( fallback_workspace = (
Workspace.objects.filter( Workspace.objects.filter(
workspace_member__member_id=obj.id, workspace_member__is_active=True workspace_member__member_id=obj.id,
workspace_member__is_active=True,
) )
.order_by("created_at") .order_by("created_at")
.first() .first()
@ -180,7 +183,9 @@ class ChangePasswordSerializer(serializers.Serializer):
if data.get("new_password") != data.get("confirm_password"): if data.get("new_password") != data.get("confirm_password"):
raise serializers.ValidationError( raise serializers.ValidationError(
{"error": "Confirm password should be same as the new password."} {
"error": "Confirm password should be same as the new password."
}
) )
return data return data
@ -190,4 +195,5 @@ class ResetPasswordSerializer(serializers.Serializer):
""" """
Serializer for password change endpoint. Serializer for password change endpoint.
""" """
new_password = serializers.CharField(required=True, min_length=8) new_password = serializers.CharField(required=True, min_length=8)

View File

@ -10,7 +10,9 @@ from plane.utils.issue_filters import issue_filters
class GlobalViewSerializer(BaseSerializer): class GlobalViewSerializer(BaseSerializer):
workspace_detail = WorkspaceLiteSerializer(source="workspace", read_only=True) workspace_detail = WorkspaceLiteSerializer(
source="workspace", read_only=True
)
class Meta: class Meta:
model = GlobalView model = GlobalView
@ -41,7 +43,9 @@ class GlobalViewSerializer(BaseSerializer):
class IssueViewSerializer(DynamicBaseSerializer): class IssueViewSerializer(DynamicBaseSerializer):
is_favorite = serializers.BooleanField(read_only=True) is_favorite = serializers.BooleanField(read_only=True)
project_detail = ProjectLiteSerializer(source="project", read_only=True) project_detail = ProjectLiteSerializer(source="project", read_only=True)
workspace_detail = WorkspaceLiteSerializer(source="workspace", read_only=True) workspace_detail = WorkspaceLiteSerializer(
source="workspace", read_only=True
)
class Meta: class Meta:
model = IssueView model = IssueView

View File

@ -12,6 +12,7 @@ from .base import DynamicBaseSerializer
from plane.db.models import Webhook, WebhookLog from plane.db.models import Webhook, WebhookLog
from plane.db.models.webhook import validate_domain, validate_schema from plane.db.models.webhook import validate_domain, validate_schema
class WebhookSerializer(DynamicBaseSerializer): class WebhookSerializer(DynamicBaseSerializer):
url = serializers.URLField(validators=[validate_schema, validate_domain]) url = serializers.URLField(validators=[validate_schema, validate_domain])
@ -21,32 +22,49 @@ class WebhookSerializer(DynamicBaseSerializer):
# Extract the hostname from the URL # Extract the hostname from the URL
hostname = urlparse(url).hostname hostname = urlparse(url).hostname
if not hostname: if not hostname:
raise serializers.ValidationError({"url": "Invalid URL: No hostname found."}) raise serializers.ValidationError(
{"url": "Invalid URL: No hostname found."}
)
# Resolve the hostname to IP addresses # Resolve the hostname to IP addresses
try: try:
ip_addresses = socket.getaddrinfo(hostname, None) ip_addresses = socket.getaddrinfo(hostname, None)
except socket.gaierror: except socket.gaierror:
raise serializers.ValidationError({"url": "Hostname could not be resolved."}) raise serializers.ValidationError(
{"url": "Hostname could not be resolved."}
)
if not ip_addresses: if not ip_addresses:
raise serializers.ValidationError({"url": "No IP addresses found for the hostname."}) raise serializers.ValidationError(
{"url": "No IP addresses found for the hostname."}
)
for addr in ip_addresses: for addr in ip_addresses:
ip = ipaddress.ip_address(addr[4][0]) ip = ipaddress.ip_address(addr[4][0])
if ip.is_private or ip.is_loopback: if ip.is_private or ip.is_loopback:
raise serializers.ValidationError({"url": "URL resolves to a blocked IP address."}) raise serializers.ValidationError(
{"url": "URL resolves to a blocked IP address."}
)
# Additional validation for multiple request domains and their subdomains # Additional validation for multiple request domains and their subdomains
request = self.context.get('request') request = self.context.get("request")
disallowed_domains = ['plane.so',] # Add your disallowed domains here disallowed_domains = [
"plane.so",
] # Add your disallowed domains here
if request: if request:
request_host = request.get_host().split(':')[0] # Remove port if present request_host = request.get_host().split(":")[
0
] # Remove port if present
disallowed_domains.append(request_host) disallowed_domains.append(request_host)
# Check if hostname is a subdomain or exact match of any disallowed domain # Check if hostname is a subdomain or exact match of any disallowed domain
if any(hostname == domain or hostname.endswith('.' + domain) for domain in disallowed_domains): if any(
raise serializers.ValidationError({"url": "URL domain or its subdomain is not allowed."}) hostname == domain or hostname.endswith("." + domain)
for domain in disallowed_domains
):
raise serializers.ValidationError(
{"url": "URL domain or its subdomain is not allowed."}
)
return Webhook.objects.create(**validated_data) return Webhook.objects.create(**validated_data)
@ -56,32 +74,49 @@ class WebhookSerializer(DynamicBaseSerializer):
# Extract the hostname from the URL # Extract the hostname from the URL
hostname = urlparse(url).hostname hostname = urlparse(url).hostname
if not hostname: if not hostname:
raise serializers.ValidationError({"url": "Invalid URL: No hostname found."}) raise serializers.ValidationError(
{"url": "Invalid URL: No hostname found."}
)
# Resolve the hostname to IP addresses # Resolve the hostname to IP addresses
try: try:
ip_addresses = socket.getaddrinfo(hostname, None) ip_addresses = socket.getaddrinfo(hostname, None)
except socket.gaierror: except socket.gaierror:
raise serializers.ValidationError({"url": "Hostname could not be resolved."}) raise serializers.ValidationError(
{"url": "Hostname could not be resolved."}
)
if not ip_addresses: if not ip_addresses:
raise serializers.ValidationError({"url": "No IP addresses found for the hostname."}) raise serializers.ValidationError(
{"url": "No IP addresses found for the hostname."}
)
for addr in ip_addresses: for addr in ip_addresses:
ip = ipaddress.ip_address(addr[4][0]) ip = ipaddress.ip_address(addr[4][0])
if ip.is_private or ip.is_loopback: if ip.is_private or ip.is_loopback:
raise serializers.ValidationError({"url": "URL resolves to a blocked IP address."}) raise serializers.ValidationError(
{"url": "URL resolves to a blocked IP address."}
)
# Additional validation for multiple request domains and their subdomains # Additional validation for multiple request domains and their subdomains
request = self.context.get('request') request = self.context.get("request")
disallowed_domains = ['plane.so',] # Add your disallowed domains here disallowed_domains = [
"plane.so",
] # Add your disallowed domains here
if request: if request:
request_host = request.get_host().split(':')[0] # Remove port if present request_host = request.get_host().split(":")[
0
] # Remove port if present
disallowed_domains.append(request_host) disallowed_domains.append(request_host)
# Check if hostname is a subdomain or exact match of any disallowed domain # Check if hostname is a subdomain or exact match of any disallowed domain
if any(hostname == domain or hostname.endswith('.' + domain) for domain in disallowed_domains): if any(
raise serializers.ValidationError({"url": "URL domain or its subdomain is not allowed."}) hostname == domain or hostname.endswith("." + domain)
for domain in disallowed_domains
):
raise serializers.ValidationError(
{"url": "URL domain or its subdomain is not allowed."}
)
return super().update(instance, validated_data) return super().update(instance, validated_data)
@ -95,12 +130,7 @@ class WebhookSerializer(DynamicBaseSerializer):
class WebhookLogSerializer(DynamicBaseSerializer): class WebhookLogSerializer(DynamicBaseSerializer):
class Meta: class Meta:
model = WebhookLog model = WebhookLog
fields = "__all__" fields = "__all__"
read_only_fields = [ read_only_fields = ["workspace", "webhook"]
"workspace",
"webhook"
]

View File

@ -51,6 +51,7 @@ class WorkSpaceSerializer(DynamicBaseSerializer):
"owner", "owner",
] ]
class WorkspaceLiteSerializer(BaseSerializer): class WorkspaceLiteSerializer(BaseSerializer):
class Meta: class Meta:
model = Workspace model = Workspace
@ -62,7 +63,6 @@ class WorkspaceLiteSerializer(BaseSerializer):
read_only_fields = fields read_only_fields = fields
class WorkSpaceMemberSerializer(DynamicBaseSerializer): class WorkSpaceMemberSerializer(DynamicBaseSerializer):
member = UserLiteSerializer(read_only=True) member = UserLiteSerializer(read_only=True)
workspace = WorkspaceLiteSerializer(read_only=True) workspace = WorkspaceLiteSerializer(read_only=True)
@ -73,7 +73,6 @@ class WorkSpaceMemberSerializer(DynamicBaseSerializer):
class WorkspaceMemberMeSerializer(BaseSerializer): class WorkspaceMemberMeSerializer(BaseSerializer):
class Meta: class Meta:
model = WorkspaceMember model = WorkspaceMember
fields = "__all__" fields = "__all__"
@ -109,7 +108,9 @@ class WorkSpaceMemberInviteSerializer(BaseSerializer):
class TeamSerializer(BaseSerializer): class TeamSerializer(BaseSerializer):
members_detail = UserLiteSerializer(read_only=True, source="members", many=True) members_detail = UserLiteSerializer(
read_only=True, source="members", many=True
)
members = serializers.ListField( members = serializers.ListField(
child=serializers.PrimaryKeyRelatedField(queryset=User.objects.all()), child=serializers.PrimaryKeyRelatedField(queryset=User.objects.all()),
write_only=True, write_only=True,
@ -146,7 +147,9 @@ class TeamSerializer(BaseSerializer):
members = validated_data.pop("members") members = validated_data.pop("members")
TeamMember.objects.filter(team=instance).delete() TeamMember.objects.filter(team=instance).delete()
team_members = [ team_members = [
TeamMember(member=member, team=instance, workspace=instance.workspace) TeamMember(
member=member, team=instance, workspace=instance.workspace
)
for member in members for member in members
] ]
TeamMember.objects.bulk_create(team_members, batch_size=10) TeamMember.objects.bulk_create(team_members, batch_size=10)

View File

@ -3,6 +3,7 @@ from .asset import urlpatterns as asset_urls
from .authentication import urlpatterns as authentication_urls from .authentication import urlpatterns as authentication_urls
from .config import urlpatterns as configuration_urls from .config import urlpatterns as configuration_urls
from .cycle import urlpatterns as cycle_urls from .cycle import urlpatterns as cycle_urls
from .dashboard import urlpatterns as dashboard_urls
from .estimate import urlpatterns as estimate_urls from .estimate import urlpatterns as estimate_urls
from .external import urlpatterns as external_urls from .external import urlpatterns as external_urls
from .importer import urlpatterns as importer_urls from .importer import urlpatterns as importer_urls
@ -28,6 +29,7 @@ urlpatterns = [
*authentication_urls, *authentication_urls,
*configuration_urls, *configuration_urls,
*cycle_urls, *cycle_urls,
*dashboard_urls,
*estimate_urls, *estimate_urls,
*external_urls, *external_urls,
*importer_urls, *importer_urls,

View File

@ -31,8 +31,14 @@ urlpatterns = [
path("sign-in/", SignInEndpoint.as_view(), name="sign-in"), path("sign-in/", SignInEndpoint.as_view(), name="sign-in"),
path("sign-out/", SignOutEndpoint.as_view(), name="sign-out"), path("sign-out/", SignOutEndpoint.as_view(), name="sign-out"),
# magic sign in # magic sign in
path("magic-generate/", MagicGenerateEndpoint.as_view(), name="magic-generate"), path(
path("magic-sign-in/", MagicSignInEndpoint.as_view(), name="magic-sign-in"), "magic-generate/",
MagicGenerateEndpoint.as_view(),
name="magic-generate",
),
path(
"magic-sign-in/", MagicSignInEndpoint.as_view(), name="magic-sign-in"
),
path("token/refresh/", TokenRefreshView.as_view(), name="token_refresh"), path("token/refresh/", TokenRefreshView.as_view(), name="token_refresh"),
# Password Manipulation # Password Manipulation
path( path(
@ -52,6 +58,8 @@ urlpatterns = [
), ),
# API Tokens # API Tokens
path("api-tokens/", ApiTokenEndpoint.as_view(), name="api-tokens"), path("api-tokens/", ApiTokenEndpoint.as_view(), name="api-tokens"),
path("api-tokens/<uuid:pk>/", ApiTokenEndpoint.as_view(), name="api-tokens"), path(
"api-tokens/<uuid:pk>/", ApiTokenEndpoint.as_view(), name="api-tokens"
),
## End API Tokens ## End API Tokens
] ]

View File

@ -1,7 +1,7 @@
from django.urls import path from django.urls import path
from plane.app.views import ConfigurationEndpoint from plane.app.views import ConfigurationEndpoint, MobileConfigurationEndpoint
urlpatterns = [ urlpatterns = [
path( path(
@ -9,4 +9,9 @@ urlpatterns = [
ConfigurationEndpoint.as_view(), ConfigurationEndpoint.as_view(),
name="configuration", name="configuration",
), ),
path(
"mobile-configs/",
MobileConfigurationEndpoint.as_view(),
name="configuration",
),
] ]

View File

@ -8,10 +8,16 @@ from plane.app.views import (
CycleFavoriteViewSet, CycleFavoriteViewSet,
TransferCycleIssueEndpoint, TransferCycleIssueEndpoint,
CycleUserPropertiesEndpoint, CycleUserPropertiesEndpoint,
ActiveCycleEndpoint
) )
urlpatterns = [ urlpatterns = [
path(
"workspaces/<str:slug>/active-cycles/",
ActiveCycleEndpoint.as_view(),
name="workspace-active-cycle",
),
path( path(
"workspaces/<str:slug>/projects/<uuid:project_id>/cycles/", "workspaces/<str:slug>/projects/<uuid:project_id>/cycles/",
CycleViewSet.as_view( CycleViewSet.as_view(
@ -89,5 +95,5 @@ urlpatterns = [
"workspaces/<str:slug>/projects/<uuid:project_id>/cycles/<uuid:cycle_id>/user-properties/", "workspaces/<str:slug>/projects/<uuid:project_id>/cycles/<uuid:cycle_id>/user-properties/",
CycleUserPropertiesEndpoint.as_view(), CycleUserPropertiesEndpoint.as_view(),
name="cycle-user-filters", name="cycle-user-filters",
) ),
] ]

View File

@ -0,0 +1,23 @@
from django.urls import path
from plane.app.views import DashboardEndpoint, WidgetsEndpoint
urlpatterns = [
path(
"workspaces/<str:slug>/dashboard/",
DashboardEndpoint.as_view(),
name="dashboard",
),
path(
"workspaces/<str:slug>/dashboard/<uuid:dashboard_id>/",
DashboardEndpoint.as_view(),
name="dashboard",
),
path(
"dashboard/<uuid:dashboard_id>/widgets/<uuid:widget_id>/",
WidgetsEndpoint.as_view(),
name="widgets",
),
]

View File

@ -7,7 +7,7 @@ from plane.app.views import (
ModuleLinkViewSet, ModuleLinkViewSet,
ModuleFavoriteViewSet, ModuleFavoriteViewSet,
BulkImportModulesEndpoint, BulkImportModulesEndpoint,
ModuleUserPropertiesEndpoint ModuleUserPropertiesEndpoint,
) )
@ -106,5 +106,5 @@ urlpatterns = [
"workspaces/<str:slug>/projects/<uuid:project_id>/modules/<uuid:module_id>/user-properties/", "workspaces/<str:slug>/projects/<uuid:project_id>/modules/<uuid:module_id>/user-properties/",
ModuleUserPropertiesEndpoint.as_view(), ModuleUserPropertiesEndpoint.as_view(),
name="cycle-user-filters", name="cycle-user-filters",
) ),
] ]

View File

@ -206,5 +206,5 @@ urlpatterns = [
"workspaces/<str:slug>/user-properties/", "workspaces/<str:slug>/user-properties/",
WorkspaceUserPropertiesEndpoint.as_view(), WorkspaceUserPropertiesEndpoint.as_view(),
name="workspace-user-filters", name="workspace-user-filters",
) ),
] ]

View File

@ -204,10 +204,14 @@ urlpatterns = [
path("sign-out/", SignOutEndpoint.as_view(), name="sign-out"), path("sign-out/", SignOutEndpoint.as_view(), name="sign-out"),
# Magic Sign In/Up # Magic Sign In/Up
path( path(
"magic-generate/", MagicSignInGenerateEndpoint.as_view(), name="magic-generate" "magic-generate/",
MagicSignInGenerateEndpoint.as_view(),
name="magic-generate",
), ),
path("magic-sign-in/", MagicSignInEndpoint.as_view(), name="magic-sign-in"), path(
path('token/refresh/', TokenRefreshView.as_view(), name='token_refresh'), "magic-sign-in/", MagicSignInEndpoint.as_view(), name="magic-sign-in"
),
path("token/refresh/", TokenRefreshView.as_view(), name="token_refresh"),
# Email verification # Email verification
path("email-verify/", VerifyEmailEndpoint.as_view(), name="email-verify"), path("email-verify/", VerifyEmailEndpoint.as_view(), name="email-verify"),
path( path(
@ -272,7 +276,9 @@ urlpatterns = [
# user workspace invitations # user workspace invitations
path( path(
"users/me/invitations/workspaces/", "users/me/invitations/workspaces/",
UserWorkspaceInvitationsEndpoint.as_view({"get": "list", "post": "create"}), UserWorkspaceInvitationsEndpoint.as_view(
{"get": "list", "post": "create"}
),
name="user-workspace-invitations", name="user-workspace-invitations",
), ),
# user workspace invitation # user workspace invitation
@ -311,7 +317,9 @@ urlpatterns = [
# user project invitations # user project invitations
path( path(
"users/me/invitations/projects/", "users/me/invitations/projects/",
UserProjectInvitationsViewset.as_view({"get": "list", "post": "create"}), UserProjectInvitationsViewset.as_view(
{"get": "list", "post": "create"}
),
name="user-project-invitaions", name="user-project-invitaions",
), ),
## Workspaces ## ## Workspaces ##
@ -1238,7 +1246,7 @@ urlpatterns = [
"post": "unarchive", "post": "unarchive",
} }
), ),
name="project-page-unarchive" name="project-page-unarchive",
), ),
path( path(
"workspaces/<str:slug>/projects/<uuid:project_id>/archived-pages/", "workspaces/<str:slug>/projects/<uuid:project_id>/archived-pages/",
@ -1264,19 +1272,22 @@ urlpatterns = [
{ {
"post": "unlock", "post": "unlock",
} }
) ),
), ),
path( path(
"workspaces/<str:slug>/projects/<uuid:project_id>/pages/<uuid:page_id>/transactions/", "workspaces/<str:slug>/projects/<uuid:project_id>/pages/<uuid:page_id>/transactions/",
PageLogEndpoint.as_view(), name="page-transactions" PageLogEndpoint.as_view(),
name="page-transactions",
), ),
path( path(
"workspaces/<str:slug>/projects/<uuid:project_id>/pages/<uuid:page_id>/transactions/<uuid:transaction>/", "workspaces/<str:slug>/projects/<uuid:project_id>/pages/<uuid:page_id>/transactions/<uuid:transaction>/",
PageLogEndpoint.as_view(), name="page-transactions" PageLogEndpoint.as_view(),
name="page-transactions",
), ),
path( path(
"workspaces/<str:slug>/projects/<uuid:project_id>/pages/<uuid:page_id>/sub-pages/", "workspaces/<str:slug>/projects/<uuid:project_id>/pages/<uuid:page_id>/sub-pages/",
SubPagesEndpoint.as_view(), name="sub-page" SubPagesEndpoint.as_view(),
name="sub-page",
), ),
path( path(
"workspaces/<str:slug>/projects/<uuid:project_id>/estimates/", "workspaces/<str:slug>/projects/<uuid:project_id>/estimates/",
@ -1326,7 +1337,9 @@ urlpatterns = [
## End Pages ## End Pages
# API Tokens # API Tokens
path("api-tokens/", ApiTokenEndpoint.as_view(), name="api-tokens"), path("api-tokens/", ApiTokenEndpoint.as_view(), name="api-tokens"),
path("api-tokens/<uuid:pk>/", ApiTokenEndpoint.as_view(), name="api-tokens"), path(
"api-tokens/<uuid:pk>/", ApiTokenEndpoint.as_view(), name="api-tokens"
),
## End API Tokens ## End API Tokens
# Integrations # Integrations
path( path(

View File

@ -62,6 +62,7 @@ from .cycle import (
CycleFavoriteViewSet, CycleFavoriteViewSet,
TransferCycleIssueEndpoint, TransferCycleIssueEndpoint,
CycleUserPropertiesEndpoint, CycleUserPropertiesEndpoint,
ActiveCycleEndpoint,
) )
from .asset import FileAssetEndpoint, UserAssetsEndpoint, FileAssetViewSet from .asset import FileAssetEndpoint, UserAssetsEndpoint, FileAssetViewSet
from .issue import ( from .issue import (
@ -140,7 +141,11 @@ from .page import (
from .search import GlobalSearchEndpoint, IssueSearchEndpoint from .search import GlobalSearchEndpoint, IssueSearchEndpoint
from .external import GPTIntegrationEndpoint, ReleaseNotesEndpoint, UnsplashEndpoint from .external import (
GPTIntegrationEndpoint,
ReleaseNotesEndpoint,
UnsplashEndpoint,
)
from .estimate import ( from .estimate import (
ProjectEstimatePointEndpoint, ProjectEstimatePointEndpoint,
@ -165,10 +170,15 @@ from .notification import (
from .exporter import ExportIssuesEndpoint from .exporter import ExportIssuesEndpoint
from .config import ConfigurationEndpoint from .config import ConfigurationEndpoint, MobileConfigurationEndpoint
from .webhook import ( from .webhook import (
WebhookEndpoint, WebhookEndpoint,
WebhookLogsEndpoint, WebhookLogsEndpoint,
WebhookSecretRegenerateEndpoint, WebhookSecretRegenerateEndpoint,
) )
from .dashboard import (
DashboardEndpoint,
WidgetsEndpoint
)

View File

@ -61,7 +61,9 @@ class AnalyticsEndpoint(BaseAPIView):
) )
# If segment is present it cannot be same as x-axis # If segment is present it cannot be same as x-axis
if segment and (segment not in valid_xaxis_segment or x_axis == segment): if segment and (
segment not in valid_xaxis_segment or x_axis == segment
):
return Response( return Response(
{ {
"error": "Both segment and x axis cannot be same and segment should be valid" "error": "Both segment and x axis cannot be same and segment should be valid"
@ -110,7 +112,9 @@ class AnalyticsEndpoint(BaseAPIView):
if x_axis in ["assignees__id"] or segment in ["assignees__id"]: if x_axis in ["assignees__id"] or segment in ["assignees__id"]:
assignee_details = ( assignee_details = (
Issue.issue_objects.filter( Issue.issue_objects.filter(
workspace__slug=slug, **filters, assignees__avatar__isnull=False workspace__slug=slug,
**filters,
assignees__avatar__isnull=False,
) )
.order_by("assignees__id") .order_by("assignees__id")
.distinct("assignees__id") .distinct("assignees__id")
@ -124,7 +128,9 @@ class AnalyticsEndpoint(BaseAPIView):
) )
cycle_details = {} cycle_details = {}
if x_axis in ["issue_cycle__cycle_id"] or segment in ["issue_cycle__cycle_id"]: if x_axis in ["issue_cycle__cycle_id"] or segment in [
"issue_cycle__cycle_id"
]:
cycle_details = ( cycle_details = (
Issue.issue_objects.filter( Issue.issue_objects.filter(
workspace__slug=slug, workspace__slug=slug,
@ -186,7 +192,9 @@ class AnalyticViewViewset(BaseViewSet):
def get_queryset(self): def get_queryset(self):
return self.filter_queryset( return self.filter_queryset(
super().get_queryset().filter(workspace__slug=self.kwargs.get("slug")) super()
.get_queryset()
.filter(workspace__slug=self.kwargs.get("slug"))
) )
@ -196,7 +204,9 @@ class SavedAnalyticEndpoint(BaseAPIView):
] ]
def get(self, request, slug, analytic_id): def get(self, request, slug, analytic_id):
analytic_view = AnalyticView.objects.get(pk=analytic_id, workspace__slug=slug) analytic_view = AnalyticView.objects.get(
pk=analytic_id, workspace__slug=slug
)
filter = analytic_view.query filter = analytic_view.query
queryset = Issue.issue_objects.filter(**filter) queryset = Issue.issue_objects.filter(**filter)
@ -266,7 +276,9 @@ class ExportAnalyticsEndpoint(BaseAPIView):
) )
# If segment is present it cannot be same as x-axis # If segment is present it cannot be same as x-axis
if segment and (segment not in valid_xaxis_segment or x_axis == segment): if segment and (
segment not in valid_xaxis_segment or x_axis == segment
):
return Response( return Response(
{ {
"error": "Both segment and x axis cannot be same and segment should be valid" "error": "Both segment and x axis cannot be same and segment should be valid"
@ -293,7 +305,9 @@ class DefaultAnalyticsEndpoint(BaseAPIView):
def get(self, request, slug): def get(self, request, slug):
filters = issue_filters(request.GET, "GET") filters = issue_filters(request.GET, "GET")
base_issues = Issue.issue_objects.filter(workspace__slug=slug, **filters) base_issues = Issue.issue_objects.filter(
workspace__slug=slug, **filters
)
total_issues = base_issues.count() total_issues = base_issues.count()
@ -306,7 +320,9 @@ class DefaultAnalyticsEndpoint(BaseAPIView):
) )
open_issues_groups = ["backlog", "unstarted", "started"] open_issues_groups = ["backlog", "unstarted", "started"]
open_issues_queryset = state_groups.filter(state__group__in=open_issues_groups) open_issues_queryset = state_groups.filter(
state__group__in=open_issues_groups
)
open_issues = open_issues_queryset.count() open_issues = open_issues_queryset.count()
open_issues_classified = ( open_issues_classified = (
@ -361,10 +377,12 @@ class DefaultAnalyticsEndpoint(BaseAPIView):
.order_by("-count") .order_by("-count")
) )
open_estimate_sum = open_issues_queryset.aggregate(sum=Sum("estimate_point"))[ open_estimate_sum = open_issues_queryset.aggregate(
sum=Sum("estimate_point")
)["sum"]
total_estimate_sum = base_issues.aggregate(sum=Sum("estimate_point"))[
"sum" "sum"
] ]
total_estimate_sum = base_issues.aggregate(sum=Sum("estimate_point"))["sum"]
return Response( return Response(
{ {

View File

@ -71,7 +71,9 @@ class ApiTokenEndpoint(BaseAPIView):
user=request.user, user=request.user,
pk=pk, pk=pk,
) )
serializer = APITokenSerializer(api_token, data=request.data, partial=True) serializer = APITokenSerializer(
api_token, data=request.data, partial=True
)
if serializer.is_valid(): if serializer.is_valid():
serializer.save() serializer.save()
return Response(serializer.data, status=status.HTTP_200_OK) return Response(serializer.data, status=status.HTTP_200_OK)

View File

@ -10,7 +10,11 @@ from plane.app.serializers import FileAssetSerializer
class FileAssetEndpoint(BaseAPIView): class FileAssetEndpoint(BaseAPIView):
parser_classes = (MultiPartParser, FormParser, JSONParser,) parser_classes = (
MultiPartParser,
FormParser,
JSONParser,
)
""" """
A viewset for viewing and editing task instances. A viewset for viewing and editing task instances.
@ -20,10 +24,18 @@ class FileAssetEndpoint(BaseAPIView):
asset_key = str(workspace_id) + "/" + asset_key asset_key = str(workspace_id) + "/" + asset_key
files = FileAsset.objects.filter(asset=asset_key) files = FileAsset.objects.filter(asset=asset_key)
if files.exists(): if files.exists():
serializer = FileAssetSerializer(files, context={"request": request}, many=True) serializer = FileAssetSerializer(
return Response({"data": serializer.data, "status": True}, status=status.HTTP_200_OK) files, context={"request": request}, many=True
)
return Response(
{"data": serializer.data, "status": True},
status=status.HTTP_200_OK,
)
else: else:
return Response({"error": "Asset key does not exist", "status": False}, status=status.HTTP_200_OK) return Response(
{"error": "Asset key does not exist", "status": False},
status=status.HTTP_200_OK,
)
def post(self, request, slug): def post(self, request, slug):
serializer = FileAssetSerializer(data=request.data) serializer = FileAssetSerializer(data=request.data)
@ -43,7 +55,6 @@ class FileAssetEndpoint(BaseAPIView):
class FileAssetViewSet(BaseViewSet): class FileAssetViewSet(BaseViewSet):
def restore(self, request, workspace_id, asset_key): def restore(self, request, workspace_id, asset_key):
asset_key = str(workspace_id) + "/" + asset_key asset_key = str(workspace_id) + "/" + asset_key
file_asset = FileAsset.objects.get(asset=asset_key) file_asset = FileAsset.objects.get(asset=asset_key)
@ -56,12 +67,22 @@ class UserAssetsEndpoint(BaseAPIView):
parser_classes = (MultiPartParser, FormParser) parser_classes = (MultiPartParser, FormParser)
def get(self, request, asset_key): def get(self, request, asset_key):
files = FileAsset.objects.filter(asset=asset_key, created_by=request.user) files = FileAsset.objects.filter(
asset=asset_key, created_by=request.user
)
if files.exists(): if files.exists():
serializer = FileAssetSerializer(files, context={"request": request}) serializer = FileAssetSerializer(
return Response({"data": serializer.data, "status": True}, status=status.HTTP_200_OK) files, context={"request": request}
)
return Response(
{"data": serializer.data, "status": True},
status=status.HTTP_200_OK,
)
else: else:
return Response({"error": "Asset key does not exist", "status": False}, status=status.HTTP_200_OK) return Response(
{"error": "Asset key does not exist", "status": False},
status=status.HTTP_200_OK,
)
def post(self, request): def post(self, request):
serializer = FileAssetSerializer(data=request.data) serializer = FileAssetSerializer(data=request.data)
@ -70,9 +91,10 @@ class UserAssetsEndpoint(BaseAPIView):
return Response(serializer.data, status=status.HTTP_201_CREATED) return Response(serializer.data, status=status.HTTP_201_CREATED)
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
def delete(self, request, asset_key): def delete(self, request, asset_key):
file_asset = FileAsset.objects.get(asset=asset_key, created_by=request.user) file_asset = FileAsset.objects.get(
asset=asset_key, created_by=request.user
)
file_asset.is_deleted = True file_asset.is_deleted = True
file_asset.save() file_asset.save()
return Response(status=status.HTTP_204_NO_CONTENT) return Response(status=status.HTTP_204_NO_CONTENT)

View File

@ -128,7 +128,8 @@ class ForgotPasswordEndpoint(BaseAPIView):
status=status.HTTP_200_OK, status=status.HTTP_200_OK,
) )
return Response( return Response(
{"error": "Please check the email"}, status=status.HTTP_400_BAD_REQUEST {"error": "Please check the email"},
status=status.HTTP_400_BAD_REQUEST,
) )
@ -167,7 +168,9 @@ class ResetPasswordEndpoint(BaseAPIView):
} }
return Response(data, status=status.HTTP_200_OK) return Response(data, status=status.HTTP_200_OK)
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) return Response(
serializer.errors, status=status.HTTP_400_BAD_REQUEST
)
except DjangoUnicodeDecodeError as indentifier: except DjangoUnicodeDecodeError as indentifier:
return Response( return Response(
@ -191,7 +194,8 @@ class ChangePasswordEndpoint(BaseAPIView):
user.is_password_autoset = False user.is_password_autoset = False
user.save() user.save()
return Response( return Response(
{"message": "Password updated successfully"}, status=status.HTTP_200_OK {"message": "Password updated successfully"},
status=status.HTTP_200_OK,
) )
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
@ -213,7 +217,8 @@ class SetUserPasswordEndpoint(BaseAPIView):
# Check password validation # Check password validation
if not password and len(str(password)) < 8: if not password and len(str(password)) < 8:
return Response( return Response(
{"error": "Password is not valid"}, status=status.HTTP_400_BAD_REQUEST {"error": "Password is not valid"},
status=status.HTTP_400_BAD_REQUEST,
) )
# Set the user password # Set the user password
@ -281,7 +286,9 @@ class MagicGenerateEndpoint(BaseAPIView):
if data["current_attempt"] > 2: if data["current_attempt"] > 2:
return Response( return Response(
{"error": "Max attempts exhausted. Please try again later."}, {
"error": "Max attempts exhausted. Please try again later."
},
status=status.HTTP_400_BAD_REQUEST, status=status.HTTP_400_BAD_REQUEST,
) )
@ -339,7 +346,8 @@ class EmailCheckEndpoint(BaseAPIView):
if not email: if not email:
return Response( return Response(
{"error": "Email is required"}, status=status.HTTP_400_BAD_REQUEST {"error": "Email is required"},
status=status.HTTP_400_BAD_REQUEST,
) )
# validate the email # validate the email
@ -347,7 +355,8 @@ class EmailCheckEndpoint(BaseAPIView):
validate_email(email) validate_email(email)
except ValidationError: except ValidationError:
return Response( return Response(
{"error": "Email is not valid"}, status=status.HTTP_400_BAD_REQUEST {"error": "Email is not valid"},
status=status.HTTP_400_BAD_REQUEST,
) )
# Check if the user exists # Check if the user exists
@ -399,13 +408,18 @@ class EmailCheckEndpoint(BaseAPIView):
key, token, current_attempt = generate_magic_token(email=email) key, token, current_attempt = generate_magic_token(email=email)
if not current_attempt: if not current_attempt:
return Response( return Response(
{"error": "Max attempts exhausted. Please try again later."}, {
"error": "Max attempts exhausted. Please try again later."
},
status=status.HTTP_400_BAD_REQUEST, status=status.HTTP_400_BAD_REQUEST,
) )
# Trigger the email # Trigger the email
magic_link.delay(email, "magic_" + str(email), token, current_site) magic_link.delay(email, "magic_" + str(email), token, current_site)
return Response( return Response(
{"is_password_autoset": user.is_password_autoset, "is_existing": False}, {
"is_password_autoset": user.is_password_autoset,
"is_existing": False,
},
status=status.HTTP_200_OK, status=status.HTTP_200_OK,
) )
@ -433,7 +447,9 @@ class EmailCheckEndpoint(BaseAPIView):
key, token, current_attempt = generate_magic_token(email=email) key, token, current_attempt = generate_magic_token(email=email)
if not current_attempt: if not current_attempt:
return Response( return Response(
{"error": "Max attempts exhausted. Please try again later."}, {
"error": "Max attempts exhausted. Please try again later."
},
status=status.HTTP_400_BAD_REQUEST, status=status.HTTP_400_BAD_REQUEST,
) )

View File

@ -73,7 +73,7 @@ class SignUpEndpoint(BaseAPIView):
# get configuration values # get configuration values
# Get configuration values # Get configuration values
ENABLE_SIGNUP, = get_configuration_value( (ENABLE_SIGNUP,) = get_configuration_value(
[ [
{ {
"key": "ENABLE_SIGNUP", "key": "ENABLE_SIGNUP",
@ -173,7 +173,7 @@ class SignInEndpoint(BaseAPIView):
# Create the user # Create the user
else: else:
ENABLE_SIGNUP, = get_configuration_value( (ENABLE_SIGNUP,) = get_configuration_value(
[ [
{ {
"key": "ENABLE_SIGNUP", "key": "ENABLE_SIGNUP",
@ -325,7 +325,7 @@ class MagicSignInEndpoint(BaseAPIView):
) )
user_token = request.data.get("token", "").strip() user_token = request.data.get("token", "").strip()
key = request.data.get("key", False).strip().lower() key = request.data.get("key", "").strip().lower()
if not key or user_token == "": if not key or user_token == "":
return Response( return Response(
@ -364,9 +364,11 @@ class MagicSignInEndpoint(BaseAPIView):
user.save() user.save()
# Check if user has any accepted invites for workspace and add them to workspace # Check if user has any accepted invites for workspace and add them to workspace
workspace_member_invites = WorkspaceMemberInvite.objects.filter( workspace_member_invites = (
WorkspaceMemberInvite.objects.filter(
email=user.email, accepted=True email=user.email, accepted=True
) )
)
WorkspaceMember.objects.bulk_create( WorkspaceMember.objects.bulk_create(
[ [
@ -431,7 +433,9 @@ class MagicSignInEndpoint(BaseAPIView):
else: else:
return Response( return Response(
{"error": "Your login code was incorrect. Please try again."}, {
"error": "Your login code was incorrect. Please try again."
},
status=status.HTTP_400_BAD_REQUEST, status=status.HTTP_400_BAD_REQUEST,
) )

View File

@ -46,7 +46,9 @@ class WebhookMixin:
bulk = False bulk = False
def finalize_response(self, request, response, *args, **kwargs): def finalize_response(self, request, response, *args, **kwargs):
response = super().finalize_response(request, response, *args, **kwargs) response = super().finalize_response(
request, response, *args, **kwargs
)
# Check for the case should webhook be sent # Check for the case should webhook be sent
if ( if (
@ -88,7 +90,9 @@ class BaseViewSet(TimezoneMixin, ModelViewSet, BasePaginator):
return self.model.objects.all() return self.model.objects.all()
except Exception as e: except Exception as e:
capture_exception(e) capture_exception(e)
raise APIException("Please check the view", status.HTTP_400_BAD_REQUEST) raise APIException(
"Please check the view", status.HTTP_400_BAD_REQUEST
)
def handle_exception(self, exc): def handle_exception(self, exc):
""" """
@ -99,6 +103,7 @@ class BaseViewSet(TimezoneMixin, ModelViewSet, BasePaginator):
response = super().handle_exception(exc) response = super().handle_exception(exc)
return response return response
except Exception as e: except Exception as e:
print(e) if settings.DEBUG else print("Server Error")
if isinstance(e, IntegrityError): if isinstance(e, IntegrityError):
return Response( return Response(
{"error": "The payload is not valid"}, {"error": "The payload is not valid"},
@ -112,23 +117,23 @@ class BaseViewSet(TimezoneMixin, ModelViewSet, BasePaginator):
) )
if isinstance(e, ObjectDoesNotExist): if isinstance(e, ObjectDoesNotExist):
model_name = str(exc).split(" matching query does not exist.")[0]
return Response( return Response(
{"error": f"{model_name} does not exist."}, {"error": f"The required object does not exist."},
status=status.HTTP_404_NOT_FOUND, status=status.HTTP_404_NOT_FOUND,
) )
if isinstance(e, KeyError): if isinstance(e, KeyError):
capture_exception(e) capture_exception(e)
return Response( return Response(
{"error": f"key {e} does not exist"}, {"error": f"The required key does not exist."},
status=status.HTTP_400_BAD_REQUEST, status=status.HTTP_400_BAD_REQUEST,
) )
print(e) if settings.DEBUG else print("Server Error")
capture_exception(e) capture_exception(e)
return Response({"error": "Something went wrong please try again later"}, status=status.HTTP_500_INTERNAL_SERVER_ERROR) return Response(
{"error": "Something went wrong please try again later"},
status=status.HTTP_500_INTERNAL_SERVER_ERROR,
)
def dispatch(self, request, *args, **kwargs): def dispatch(self, request, *args, **kwargs):
try: try:
@ -162,19 +167,22 @@ class BaseViewSet(TimezoneMixin, ModelViewSet, BasePaginator):
@property @property
def fields(self): def fields(self):
fields = [ fields = [
field for field in self.request.GET.get("fields", "").split(",") if field field
for field in self.request.GET.get("fields", "").split(",")
if field
] ]
return fields if fields else None return fields if fields else None
@property @property
def expand(self): def expand(self):
expand = [ expand = [
expand for expand in self.request.GET.get("expand", "").split(",") if expand expand
for expand in self.request.GET.get("expand", "").split(",")
if expand
] ]
return expand if expand else None return expand if expand else None
class BaseAPIView(TimezoneMixin, APIView, BasePaginator): class BaseAPIView(TimezoneMixin, APIView, BasePaginator):
permission_classes = [ permission_classes = [
IsAuthenticated, IsAuthenticated,
@ -216,20 +224,24 @@ class BaseAPIView(TimezoneMixin, APIView, BasePaginator):
) )
if isinstance(e, ObjectDoesNotExist): if isinstance(e, ObjectDoesNotExist):
model_name = str(exc).split(" matching query does not exist.")[0]
return Response( return Response(
{"error": f"{model_name} does not exist."}, {"error": f"The required object does not exist."},
status=status.HTTP_404_NOT_FOUND, status=status.HTTP_404_NOT_FOUND,
) )
if isinstance(e, KeyError): if isinstance(e, KeyError):
return Response({"error": f"key {e} does not exist"}, status=status.HTTP_400_BAD_REQUEST) return Response(
{"error": f"The required key does not exist."},
status=status.HTTP_400_BAD_REQUEST,
)
if settings.DEBUG: if settings.DEBUG:
print(e) print(e)
capture_exception(e) capture_exception(e)
return Response({"error": "Something went wrong please try again later"}, status=status.HTTP_500_INTERNAL_SERVER_ERROR) return Response(
{"error": "Something went wrong please try again later"},
status=status.HTTP_500_INTERNAL_SERVER_ERROR,
)
def dispatch(self, request, *args, **kwargs): def dispatch(self, request, *args, **kwargs):
try: try:
@ -258,13 +270,17 @@ class BaseAPIView(TimezoneMixin, APIView, BasePaginator):
@property @property
def fields(self): def fields(self):
fields = [ fields = [
field for field in self.request.GET.get("fields", "").split(",") if field field
for field in self.request.GET.get("fields", "").split(",")
if field
] ]
return fields if fields else None return fields if fields else None
@property @property
def expand(self): def expand(self):
expand = [ expand = [
expand for expand in self.request.GET.get("expand", "").split(",") if expand expand
for expand in self.request.GET.get("expand", "").split(",")
if expand
] ]
return expand if expand else None return expand if expand else None

View File

@ -90,10 +90,14 @@ class ConfigurationEndpoint(BaseAPIView):
data = {} data = {}
# Authentication # Authentication
data["google_client_id"] = ( data["google_client_id"] = (
GOOGLE_CLIENT_ID if GOOGLE_CLIENT_ID and GOOGLE_CLIENT_ID != '""' else None GOOGLE_CLIENT_ID
if GOOGLE_CLIENT_ID and GOOGLE_CLIENT_ID != '""'
else None
) )
data["github_client_id"] = ( data["github_client_id"] = (
GITHUB_CLIENT_ID if GITHUB_CLIENT_ID and GITHUB_CLIENT_ID != '""' else None GITHUB_CLIENT_ID
if GITHUB_CLIENT_ID and GITHUB_CLIENT_ID != '""'
else None
) )
data["github_app_name"] = GITHUB_APP_NAME data["github_app_name"] = GITHUB_APP_NAME
data["magic_login"] = ( data["magic_login"] = (
@ -115,7 +119,125 @@ class ConfigurationEndpoint(BaseAPIView):
data["has_openai_configured"] = bool(OPENAI_API_KEY) data["has_openai_configured"] = bool(OPENAI_API_KEY)
# File size settings # File size settings
data["file_size_limit"] = float(os.environ.get("FILE_SIZE_LIMIT", 5242880)) data["file_size_limit"] = float(
os.environ.get("FILE_SIZE_LIMIT", 5242880)
)
# is smtp configured
data["is_smtp_configured"] = bool(EMAIL_HOST_USER) and bool(
EMAIL_HOST_PASSWORD
)
return Response(data, status=status.HTTP_200_OK)
class MobileConfigurationEndpoint(BaseAPIView):
permission_classes = [
AllowAny,
]
def get(self, request):
(
GOOGLE_CLIENT_ID,
GOOGLE_SERVER_CLIENT_ID,
GOOGLE_IOS_CLIENT_ID,
EMAIL_HOST_USER,
EMAIL_HOST_PASSWORD,
ENABLE_MAGIC_LINK_LOGIN,
ENABLE_EMAIL_PASSWORD,
POSTHOG_API_KEY,
POSTHOG_HOST,
UNSPLASH_ACCESS_KEY,
OPENAI_API_KEY,
) = get_configuration_value(
[
{
"key": "GOOGLE_CLIENT_ID",
"default": os.environ.get("GOOGLE_CLIENT_ID", None),
},
{
"key": "GOOGLE_SERVER_CLIENT_ID",
"default": os.environ.get("GOOGLE_SERVER_CLIENT_ID", None),
},
{
"key": "GOOGLE_IOS_CLIENT_ID",
"default": os.environ.get("GOOGLE_IOS_CLIENT_ID", None),
},
{
"key": "EMAIL_HOST_USER",
"default": os.environ.get("EMAIL_HOST_USER", None),
},
{
"key": "EMAIL_HOST_PASSWORD",
"default": os.environ.get("EMAIL_HOST_PASSWORD", None),
},
{
"key": "ENABLE_MAGIC_LINK_LOGIN",
"default": os.environ.get("ENABLE_MAGIC_LINK_LOGIN", "1"),
},
{
"key": "ENABLE_EMAIL_PASSWORD",
"default": os.environ.get("ENABLE_EMAIL_PASSWORD", "1"),
},
{
"key": "POSTHOG_API_KEY",
"default": os.environ.get("POSTHOG_API_KEY", "1"),
},
{
"key": "POSTHOG_HOST",
"default": os.environ.get("POSTHOG_HOST", "1"),
},
{
"key": "UNSPLASH_ACCESS_KEY",
"default": os.environ.get("UNSPLASH_ACCESS_KEY", "1"),
},
{
"key": "OPENAI_API_KEY",
"default": os.environ.get("OPENAI_API_KEY", "1"),
},
]
)
data = {}
# Authentication
data["google_client_id"] = (
GOOGLE_CLIENT_ID
if GOOGLE_CLIENT_ID and GOOGLE_CLIENT_ID != '""'
else None
)
data["google_server_client_id"] = (
GOOGLE_SERVER_CLIENT_ID
if GOOGLE_SERVER_CLIENT_ID and GOOGLE_SERVER_CLIENT_ID != '""'
else None
)
data["google_ios_client_id"] = (
(GOOGLE_IOS_CLIENT_ID)[::-1]
if GOOGLE_IOS_CLIENT_ID is not None
else None
)
# Posthog
data["posthog_api_key"] = POSTHOG_API_KEY
data["posthog_host"] = POSTHOG_HOST
data["magic_login"] = (
bool(EMAIL_HOST_USER) and bool(EMAIL_HOST_PASSWORD)
) and ENABLE_MAGIC_LINK_LOGIN == "1"
data["email_password_login"] = ENABLE_EMAIL_PASSWORD == "1"
# Posthog
data["posthog_api_key"] = POSTHOG_API_KEY
data["posthog_host"] = POSTHOG_HOST
# Unsplash
data["has_unsplash_configured"] = bool(UNSPLASH_ACCESS_KEY)
# Open AI settings
data["has_openai_configured"] = bool(OPENAI_API_KEY)
# File size settings
data["file_size_limit"] = float(
os.environ.get("FILE_SIZE_LIMIT", 5242880)
)
# is smtp configured # is smtp configured
data["is_smtp_configured"] = not ( data["is_smtp_configured"] = not (

View File

@ -31,11 +31,16 @@ from plane.app.serializers import (
CycleSerializer, CycleSerializer,
CycleIssueSerializer, CycleIssueSerializer,
CycleFavoriteSerializer, CycleFavoriteSerializer,
IssueSerializer,
IssueStateSerializer, IssueStateSerializer,
CycleWriteSerializer, CycleWriteSerializer,
CycleUserPropertiesSerializer, CycleUserPropertiesSerializer,
) )
from plane.app.permissions import ProjectEntityPermission, ProjectLitePermission from plane.app.permissions import (
ProjectEntityPermission,
ProjectLitePermission,
WorkspaceUserPermission
)
from plane.db.models import ( from plane.db.models import (
User, User,
Cycle, Cycle,
@ -46,9 +51,9 @@ from plane.db.models import (
IssueAttachment, IssueAttachment,
Label, Label,
CycleUserProperties, CycleUserProperties,
IssueSubscriber,
) )
from plane.bgtasks.issue_activites_task import issue_activity from plane.bgtasks.issue_activites_task import issue_activity
from plane.utils.grouper import group_results
from plane.utils.issue_filters import issue_filters from plane.utils.issue_filters import issue_filters
from plane.utils.analytics_plot import burndown_plot from plane.utils.analytics_plot import burndown_plot
@ -63,7 +68,8 @@ class CycleViewSet(WebhookMixin, BaseViewSet):
def perform_create(self, serializer): def perform_create(self, serializer):
serializer.save( serializer.save(
project_id=self.kwargs.get("project_id"), owned_by=self.request.user project_id=self.kwargs.get("project_id"),
owned_by=self.request.user,
) )
def get_queryset(self): def get_queryset(self):
@ -142,7 +148,9 @@ class CycleViewSet(WebhookMixin, BaseViewSet):
), ),
) )
) )
.annotate(total_estimates=Sum("issue_cycle__issue__estimate_point")) .annotate(
total_estimates=Sum("issue_cycle__issue__estimate_point")
)
.annotate( .annotate(
completed_estimates=Sum( completed_estimates=Sum(
"issue_cycle__issue__estimate_point", "issue_cycle__issue__estimate_point",
@ -170,7 +178,9 @@ class CycleViewSet(WebhookMixin, BaseViewSet):
& Q(end_date__gte=timezone.now()), & Q(end_date__gte=timezone.now()),
then=Value("CURRENT"), then=Value("CURRENT"),
), ),
When(start_date__gt=timezone.now(), then=Value("UPCOMING")), When(
start_date__gt=timezone.now(), then=Value("UPCOMING")
),
When(end_date__lt=timezone.now(), then=Value("COMPLETED")), When(end_date__lt=timezone.now(), then=Value("COMPLETED")),
When( When(
Q(start_date__isnull=True) & Q(end_date__isnull=True), Q(start_date__isnull=True) & Q(end_date__isnull=True),
@ -183,13 +193,17 @@ class CycleViewSet(WebhookMixin, BaseViewSet):
.prefetch_related( .prefetch_related(
Prefetch( Prefetch(
"issue_cycle__issue__assignees", "issue_cycle__issue__assignees",
queryset=User.objects.only("avatar", "first_name", "id").distinct(), queryset=User.objects.only(
"avatar", "first_name", "id"
).distinct(),
) )
) )
.prefetch_related( .prefetch_related(
Prefetch( Prefetch(
"issue_cycle__issue__labels", "issue_cycle__issue__labels",
queryset=Label.objects.only("name", "color", "id").distinct(), queryset=Label.objects.only(
"name", "color", "id"
).distinct(),
) )
) )
.order_by("-is_favorite", "name") .order_by("-is_favorite", "name")
@ -199,7 +213,11 @@ class CycleViewSet(WebhookMixin, BaseViewSet):
def list(self, request, slug, project_id): def list(self, request, slug, project_id):
queryset = self.get_queryset() queryset = self.get_queryset()
cycle_view = request.GET.get("cycle_view", "all") cycle_view = request.GET.get("cycle_view", "all")
fields = [field for field in request.GET.get("fields", "").split(",") if field] fields = [
field
for field in request.GET.get("fields", "").split(",")
if field
]
queryset = queryset.order_by("-is_favorite", "-created_at") queryset = queryset.order_by("-is_favorite", "-created_at")
@ -296,7 +314,9 @@ class CycleViewSet(WebhookMixin, BaseViewSet):
"completion_chart": {}, "completion_chart": {},
} }
if data[0]["start_date"] and data[0]["end_date"]: if data[0]["start_date"] and data[0]["end_date"]:
data[0]["distribution"]["completion_chart"] = burndown_plot( data[0]["distribution"][
"completion_chart"
] = burndown_plot(
queryset=queryset.first(), queryset=queryset.first(),
slug=slug, slug=slug,
project_id=project_id, project_id=project_id,
@ -322,8 +342,18 @@ class CycleViewSet(WebhookMixin, BaseViewSet):
project_id=project_id, project_id=project_id,
owned_by=request.user, owned_by=request.user,
) )
return Response(serializer.data, status=status.HTTP_201_CREATED) cycle = (
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) self.get_queryset()
.filter(pk=serializer.data["id"])
.first()
)
serializer = CycleSerializer(cycle)
return Response(
serializer.data, status=status.HTTP_201_CREATED
)
return Response(
serializer.errors, status=status.HTTP_400_BAD_REQUEST
)
else: else:
return Response( return Response(
{ {
@ -333,15 +363,22 @@ class CycleViewSet(WebhookMixin, BaseViewSet):
) )
def partial_update(self, request, slug, project_id, pk): def partial_update(self, request, slug, project_id, pk):
cycle = Cycle.objects.get(workspace__slug=slug, project_id=project_id, pk=pk) cycle = Cycle.objects.get(
workspace__slug=slug, project_id=project_id, pk=pk
)
request_data = request.data request_data = request.data
if cycle.end_date is not None and cycle.end_date < timezone.now().date(): if (
cycle.end_date is not None
and cycle.end_date < timezone.now().date()
):
if "sort_order" in request_data: if "sort_order" in request_data:
# Can only change sort order # Can only change sort order
request_data = { request_data = {
"sort_order": request_data.get("sort_order", cycle.sort_order) "sort_order": request_data.get(
"sort_order", cycle.sort_order
)
} }
else: else:
return Response( return Response(
@ -351,7 +388,9 @@ class CycleViewSet(WebhookMixin, BaseViewSet):
status=status.HTTP_400_BAD_REQUEST, status=status.HTTP_400_BAD_REQUEST,
) )
serializer = CycleWriteSerializer(cycle, data=request.data, partial=True) serializer = CycleWriteSerializer(
cycle, data=request.data, partial=True
)
if serializer.is_valid(): if serializer.is_valid():
serializer.save() serializer.save()
return Response(serializer.data, status=status.HTTP_200_OK) return Response(serializer.data, status=status.HTTP_200_OK)
@ -372,7 +411,13 @@ class CycleViewSet(WebhookMixin, BaseViewSet):
.annotate(assignee_id=F("assignees__id")) .annotate(assignee_id=F("assignees__id"))
.annotate(avatar=F("assignees__avatar")) .annotate(avatar=F("assignees__avatar"))
.annotate(display_name=F("assignees__display_name")) .annotate(display_name=F("assignees__display_name"))
.values("first_name", "last_name", "assignee_id", "avatar", "display_name") .values(
"first_name",
"last_name",
"assignee_id",
"avatar",
"display_name",
)
.annotate( .annotate(
total_issues=Count( total_issues=Count(
"assignee_id", "assignee_id",
@ -451,7 +496,10 @@ class CycleViewSet(WebhookMixin, BaseViewSet):
if queryset.start_date and queryset.end_date: if queryset.start_date and queryset.end_date:
data["distribution"]["completion_chart"] = burndown_plot( data["distribution"]["completion_chart"] = burndown_plot(
queryset=queryset, slug=slug, project_id=project_id, cycle_id=pk queryset=queryset,
slug=slug,
project_id=project_id,
cycle_id=pk,
) )
return Response( return Response(
@ -461,11 +509,13 @@ class CycleViewSet(WebhookMixin, BaseViewSet):
def destroy(self, request, slug, project_id, pk): def destroy(self, request, slug, project_id, pk):
cycle_issues = list( cycle_issues = list(
CycleIssue.objects.filter(cycle_id=self.kwargs.get("pk")).values_list( CycleIssue.objects.filter(
"issue", flat=True cycle_id=self.kwargs.get("pk")
).values_list("issue", flat=True)
) )
cycle = Cycle.objects.get(
workspace__slug=slug, project_id=project_id, pk=pk
) )
cycle = Cycle.objects.get(workspace__slug=slug, project_id=project_id, pk=pk)
issue_activity.delay( issue_activity.delay(
type="cycle.activity.deleted", type="cycle.activity.deleted",
@ -508,7 +558,9 @@ class CycleIssueViewSet(WebhookMixin, BaseViewSet):
super() super()
.get_queryset() .get_queryset()
.annotate( .annotate(
sub_issues_count=Issue.issue_objects.filter(parent=OuterRef("issue_id")) sub_issues_count=Issue.issue_objects.filter(
parent=OuterRef("issue_id")
)
.order_by() .order_by()
.annotate(count=Func(F("id"), function="Count")) .annotate(count=Func(F("id"), function="Count"))
.values("count") .values("count")
@ -527,13 +579,19 @@ class CycleIssueViewSet(WebhookMixin, BaseViewSet):
@method_decorator(gzip_page) @method_decorator(gzip_page)
def list(self, request, slug, project_id, cycle_id): def list(self, request, slug, project_id, cycle_id):
fields = [field for field in request.GET.get("fields", "").split(",") if field] fields = [
field
for field in request.GET.get("fields", "").split(",")
if field
]
order_by = request.GET.get("order_by", "created_at") order_by = request.GET.get("order_by", "created_at")
filters = issue_filters(request.query_params, "GET") filters = issue_filters(request.query_params, "GET")
issues = ( issues = (
Issue.issue_objects.filter(issue_cycle__cycle_id=cycle_id) Issue.issue_objects.filter(issue_cycle__cycle_id=cycle_id)
.annotate( .annotate(
sub_issues_count=Issue.issue_objects.filter(parent=OuterRef("id")) sub_issues_count=Issue.issue_objects.filter(
parent=OuterRef("id")
)
.order_by() .order_by()
.annotate(count=Func(F("id"), function="Count")) .annotate(count=Func(F("id"), function="Count"))
.values("count") .values("count")
@ -548,6 +606,8 @@ class CycleIssueViewSet(WebhookMixin, BaseViewSet):
.prefetch_related("labels") .prefetch_related("labels")
.order_by(order_by) .order_by(order_by)
.filter(**filters) .filter(**filters)
.annotate(cycle_id=F("issue_cycle__cycle_id"))
.annotate(module_id=F("issue_module__module_id"))
.annotate( .annotate(
link_count=IssueLink.objects.filter(issue=OuterRef("id")) link_count=IssueLink.objects.filter(issue=OuterRef("id"))
.order_by() .order_by()
@ -555,13 +615,22 @@ class CycleIssueViewSet(WebhookMixin, BaseViewSet):
.values("count") .values("count")
) )
.annotate( .annotate(
attachment_count=IssueAttachment.objects.filter(issue=OuterRef("id")) attachment_count=IssueAttachment.objects.filter(
issue=OuterRef("id")
)
.order_by() .order_by()
.annotate(count=Func(F("id"), function="Count")) .annotate(count=Func(F("id"), function="Count"))
.values("count") .values("count")
) )
.annotate(
is_subscribed=Exists(
IssueSubscriber.objects.filter(
subscriber=self.request.user, issue_id=OuterRef("id")
) )
serializer = IssueStateSerializer( )
)
)
serializer = IssueSerializer(
issues, many=True, fields=fields if fields else None issues, many=True, fields=fields if fields else None
) )
return Response(serializer.data, status=status.HTTP_200_OK) return Response(serializer.data, status=status.HTTP_200_OK)
@ -571,14 +640,18 @@ class CycleIssueViewSet(WebhookMixin, BaseViewSet):
if not len(issues): if not len(issues):
return Response( return Response(
{"error": "Issues are required"}, status=status.HTTP_400_BAD_REQUEST {"error": "Issues are required"},
status=status.HTTP_400_BAD_REQUEST,
) )
cycle = Cycle.objects.get( cycle = Cycle.objects.get(
workspace__slug=slug, project_id=project_id, pk=cycle_id workspace__slug=slug, project_id=project_id, pk=cycle_id
) )
if cycle.end_date is not None and cycle.end_date < timezone.now().date(): if (
cycle.end_date is not None
and cycle.end_date < timezone.now().date()
):
return Response( return Response(
{ {
"error": "The Cycle has already been completed so no new issues can be added" "error": "The Cycle has already been completed so no new issues can be added"
@ -652,8 +725,12 @@ class CycleIssueViewSet(WebhookMixin, BaseViewSet):
) )
# Return all Cycle Issues # Return all Cycle Issues
issues = self.get_queryset().values_list("issue_id", flat=True)
return Response( return Response(
CycleIssueSerializer(self.get_queryset(), many=True).data, IssueSerializer(
Issue.objects.filter(pk__in=issues), many=True
).data,
status=status.HTTP_200_OK, status=status.HTTP_200_OK,
) )
@ -810,7 +887,9 @@ class CycleUserPropertiesEndpoint(BaseAPIView):
workspace__slug=slug, workspace__slug=slug,
) )
cycle_properties.filters = request.data.get("filters", cycle_properties.filters) cycle_properties.filters = request.data.get(
"filters", cycle_properties.filters
)
cycle_properties.display_filters = request.data.get( cycle_properties.display_filters = request.data.get(
"display_filters", cycle_properties.display_filters "display_filters", cycle_properties.display_filters
) )
@ -831,3 +910,235 @@ class CycleUserPropertiesEndpoint(BaseAPIView):
) )
serializer = CycleUserPropertiesSerializer(cycle_properties) serializer = CycleUserPropertiesSerializer(cycle_properties)
return Response(serializer.data, status=status.HTTP_200_OK) return Response(serializer.data, status=status.HTTP_200_OK)
class ActiveCycleEndpoint(BaseAPIView):
permission_classes = [
WorkspaceUserPermission,
]
def get(self, request, slug):
subquery = CycleFavorite.objects.filter(
user=self.request.user,
cycle_id=OuterRef("pk"),
project_id=self.kwargs.get("project_id"),
workspace__slug=self.kwargs.get("slug"),
)
active_cycles = (
Cycle.objects.filter(
workspace__slug=slug,
project__project_projectmember__member=self.request.user,
start_date__lte=timezone.now(),
end_date__gte=timezone.now(),
)
.select_related("project")
.select_related("workspace")
.select_related("owned_by")
.annotate(is_favorite=Exists(subquery))
.annotate(
total_issues=Count(
"issue_cycle",
filter=Q(
issue_cycle__issue__archived_at__isnull=True,
issue_cycle__issue__is_draft=False,
),
)
)
.annotate(
completed_issues=Count(
"issue_cycle__issue__state__group",
filter=Q(
issue_cycle__issue__state__group="completed",
issue_cycle__issue__archived_at__isnull=True,
issue_cycle__issue__is_draft=False,
),
)
)
.annotate(
cancelled_issues=Count(
"issue_cycle__issue__state__group",
filter=Q(
issue_cycle__issue__state__group="cancelled",
issue_cycle__issue__archived_at__isnull=True,
issue_cycle__issue__is_draft=False,
),
)
)
.annotate(
started_issues=Count(
"issue_cycle__issue__state__group",
filter=Q(
issue_cycle__issue__state__group="started",
issue_cycle__issue__archived_at__isnull=True,
issue_cycle__issue__is_draft=False,
),
)
)
.annotate(
unstarted_issues=Count(
"issue_cycle__issue__state__group",
filter=Q(
issue_cycle__issue__state__group="unstarted",
issue_cycle__issue__archived_at__isnull=True,
issue_cycle__issue__is_draft=False,
),
)
)
.annotate(
backlog_issues=Count(
"issue_cycle__issue__state__group",
filter=Q(
issue_cycle__issue__state__group="backlog",
issue_cycle__issue__archived_at__isnull=True,
issue_cycle__issue__is_draft=False,
),
)
)
.annotate(total_estimates=Sum("issue_cycle__issue__estimate_point"))
.annotate(
completed_estimates=Sum(
"issue_cycle__issue__estimate_point",
filter=Q(
issue_cycle__issue__state__group="completed",
issue_cycle__issue__archived_at__isnull=True,
issue_cycle__issue__is_draft=False,
),
)
)
.annotate(
started_estimates=Sum(
"issue_cycle__issue__estimate_point",
filter=Q(
issue_cycle__issue__state__group="started",
issue_cycle__issue__archived_at__isnull=True,
issue_cycle__issue__is_draft=False,
),
)
)
.annotate(
status=Case(
When(
Q(start_date__lte=timezone.now())
& Q(end_date__gte=timezone.now()),
then=Value("CURRENT"),
),
When(start_date__gt=timezone.now(), then=Value("UPCOMING")),
When(end_date__lt=timezone.now(), then=Value("COMPLETED")),
When(
Q(start_date__isnull=True) & Q(end_date__isnull=True),
then=Value("DRAFT"),
),
default=Value("DRAFT"),
output_field=CharField(),
)
)
.prefetch_related(
Prefetch(
"issue_cycle__issue__assignees",
queryset=User.objects.only("avatar", "first_name", "id").distinct(),
)
)
.prefetch_related(
Prefetch(
"issue_cycle__issue__labels",
queryset=Label.objects.only("name", "color", "id").distinct(),
)
)
.order_by("-created_at")
)
cycles = CycleSerializer(active_cycles, many=True).data
for cycle in cycles:
assignee_distribution = (
Issue.objects.filter(
issue_cycle__cycle_id=cycle["id"],
project_id=cycle["project"],
workspace__slug=slug,
)
.annotate(display_name=F("assignees__display_name"))
.annotate(assignee_id=F("assignees__id"))
.annotate(avatar=F("assignees__avatar"))
.values("display_name", "assignee_id", "avatar")
.annotate(
total_issues=Count(
"assignee_id",
filter=Q(archived_at__isnull=True, is_draft=False),
),
)
.annotate(
completed_issues=Count(
"assignee_id",
filter=Q(
completed_at__isnull=False,
archived_at__isnull=True,
is_draft=False,
),
)
)
.annotate(
pending_issues=Count(
"assignee_id",
filter=Q(
completed_at__isnull=True,
archived_at__isnull=True,
is_draft=False,
),
)
)
.order_by("display_name")
)
label_distribution = (
Issue.objects.filter(
issue_cycle__cycle_id=cycle["id"],
project_id=cycle["project"],
workspace__slug=slug,
)
.annotate(label_name=F("labels__name"))
.annotate(color=F("labels__color"))
.annotate(label_id=F("labels__id"))
.values("label_name", "color", "label_id")
.annotate(
total_issues=Count(
"label_id",
filter=Q(archived_at__isnull=True, is_draft=False),
)
)
.annotate(
completed_issues=Count(
"label_id",
filter=Q(
completed_at__isnull=False,
archived_at__isnull=True,
is_draft=False,
),
)
)
.annotate(
pending_issues=Count(
"label_id",
filter=Q(
completed_at__isnull=True,
archived_at__isnull=True,
is_draft=False,
),
)
)
.order_by("label_name")
)
cycle["distribution"] = {
"assignees": assignee_distribution,
"labels": label_distribution,
"completion_chart": {},
}
if cycle["start_date"] and cycle["end_date"]:
cycle["distribution"][
"completion_chart"
] = burndown_plot(
queryset=active_cycles.get(pk=cycle["id"]),
slug=slug,
project_id=cycle["project"],
cycle_id=cycle["id"],
)
return Response(cycles, status=status.HTTP_200_OK)

View File

@ -0,0 +1,658 @@
# Django imports
from django.db.models import (
Q,
Case,
When,
Value,
CharField,
Count,
F,
Exists,
OuterRef,
Max,
Subquery,
JSONField,
Func,
Prefetch,
)
from django.utils import timezone
# Third Party imports
from rest_framework.response import Response
from rest_framework import status
# Module imports
from . import BaseAPIView
from plane.db.models import (
Issue,
IssueActivity,
ProjectMember,
Widget,
DashboardWidget,
Dashboard,
Project,
IssueLink,
IssueAttachment,
IssueRelation,
)
from plane.app.serializers import (
IssueActivitySerializer,
IssueSerializer,
DashboardSerializer,
WidgetSerializer,
)
from plane.utils.issue_filters import issue_filters
def dashboard_overview_stats(self, request, slug):
assigned_issues = Issue.issue_objects.filter(
project__project_projectmember__is_active=True,
project__project_projectmember__member=request.user,
workspace__slug=slug,
assignees__in=[request.user],
).count()
pending_issues_count = Issue.issue_objects.filter(
~Q(state__group__in=["completed", "cancelled"]),
project__project_projectmember__is_active=True,
project__project_projectmember__member=request.user,
workspace__slug=slug,
assignees__in=[request.user],
).count()
created_issues_count = Issue.issue_objects.filter(
workspace__slug=slug,
project__project_projectmember__is_active=True,
project__project_projectmember__member=request.user,
created_by_id=request.user.id,
).count()
completed_issues_count = Issue.issue_objects.filter(
workspace__slug=slug,
project__project_projectmember__is_active=True,
project__project_projectmember__member=request.user,
assignees__in=[request.user],
state__group="completed",
).count()
return Response(
{
"assigned_issues_count": assigned_issues,
"pending_issues_count": pending_issues_count,
"completed_issues_count": completed_issues_count,
"created_issues_count": created_issues_count,
},
status=status.HTTP_200_OK,
)
def dashboard_assigned_issues(self, request, slug):
filters = issue_filters(request.query_params, "GET")
issue_type = request.GET.get("issue_type", None)
# get all the assigned issues
assigned_issues = (
Issue.issue_objects.filter(
workspace__slug=slug,
project__project_projectmember__member=request.user,
project__project_projectmember__is_active=True,
assignees__in=[request.user],
)
.filter(**filters)
.select_related("workspace", "project", "state", "parent")
.prefetch_related("assignees", "labels")
.prefetch_related(
Prefetch(
"issue_relation",
queryset=IssueRelation.objects.select_related(
"related_issue"
).select_related("issue"),
)
)
.annotate(cycle_id=F("issue_cycle__cycle_id"))
.annotate(module_id=F("issue_module__module_id"))
.annotate(
link_count=IssueLink.objects.filter(issue=OuterRef("id"))
.order_by()
.annotate(count=Func(F("id"), function="Count"))
.values("count")
)
.annotate(
attachment_count=IssueAttachment.objects.filter(
issue=OuterRef("id")
)
.order_by()
.annotate(count=Func(F("id"), function="Count"))
.values("count")
)
.annotate(
sub_issues_count=Issue.issue_objects.filter(parent=OuterRef("id"))
.order_by()
.annotate(count=Func(F("id"), function="Count"))
.values("count")
)
.order_by("created_at")
)
# Priority Ordering
priority_order = ["urgent", "high", "medium", "low", "none"]
assigned_issues = assigned_issues.annotate(
priority_order=Case(
*[
When(priority=p, then=Value(i))
for i, p in enumerate(priority_order)
],
output_field=CharField(),
)
).order_by("priority_order")
if issue_type == "completed":
completed_issues_count = assigned_issues.filter(
state__group__in=["completed"]
).count()
completed_issues = assigned_issues.filter(
state__group__in=["completed"]
)[:5]
return Response(
{
"issues": IssueSerializer(
completed_issues, many=True, expand=self.expand
).data,
"count": completed_issues_count,
},
status=status.HTTP_200_OK,
)
if issue_type == "overdue":
overdue_issues_count = assigned_issues.filter(
state__group__in=["backlog", "unstarted", "started"],
target_date__lt=timezone.now()
).count()
overdue_issues = assigned_issues.filter(
state__group__in=["backlog", "unstarted", "started"],
target_date__lt=timezone.now()
)[:5]
return Response(
{
"issues": IssueSerializer(
overdue_issues, many=True, expand=self.expand
).data,
"count": overdue_issues_count,
},
status=status.HTTP_200_OK,
)
if issue_type == "upcoming":
upcoming_issues_count = assigned_issues.filter(
state__group__in=["backlog", "unstarted", "started"],
target_date__gte=timezone.now()
).count()
upcoming_issues = assigned_issues.filter(
state__group__in=["backlog", "unstarted", "started"],
target_date__gte=timezone.now()
)[:5]
return Response(
{
"issues": IssueSerializer(
upcoming_issues, many=True, expand=self.expand
).data,
"count": upcoming_issues_count,
},
status=status.HTTP_200_OK,
)
return Response(
{"error": "Please specify a valid issue type"},
status=status.HTTP_400_BAD_REQUEST,
)
def dashboard_created_issues(self, request, slug):
filters = issue_filters(request.query_params, "GET")
issue_type = request.GET.get("issue_type", None)
# get all the assigned issues
created_issues = (
Issue.issue_objects.filter(
workspace__slug=slug,
project__project_projectmember__member=request.user,
project__project_projectmember__is_active=True,
created_by=request.user,
)
.filter(**filters)
.select_related("workspace", "project", "state", "parent")
.prefetch_related("assignees", "labels")
.annotate(cycle_id=F("issue_cycle__cycle_id"))
.annotate(module_id=F("issue_module__module_id"))
.annotate(
link_count=IssueLink.objects.filter(issue=OuterRef("id"))
.order_by()
.annotate(count=Func(F("id"), function="Count"))
.values("count")
)
.annotate(
attachment_count=IssueAttachment.objects.filter(
issue=OuterRef("id")
)
.order_by()
.annotate(count=Func(F("id"), function="Count"))
.values("count")
)
.annotate(
sub_issues_count=Issue.issue_objects.filter(parent=OuterRef("id"))
.order_by()
.annotate(count=Func(F("id"), function="Count"))
.values("count")
)
.order_by("created_at")
)
# Priority Ordering
priority_order = ["urgent", "high", "medium", "low", "none"]
created_issues = created_issues.annotate(
priority_order=Case(
*[
When(priority=p, then=Value(i))
for i, p in enumerate(priority_order)
],
output_field=CharField(),
)
).order_by("priority_order")
if issue_type == "completed":
completed_issues_count = created_issues.filter(
state__group__in=["completed"]
).count()
completed_issues = created_issues.filter(
state__group__in=["completed"]
)[:5]
return Response(
{
"issues": IssueSerializer(completed_issues, many=True).data,
"count": completed_issues_count,
},
status=status.HTTP_200_OK,
)
if issue_type == "overdue":
overdue_issues_count = created_issues.filter(
state__group__in=["backlog", "unstarted", "started"],
target_date__lt=timezone.now()
).count()
overdue_issues = created_issues.filter(
state__group__in=["backlog", "unstarted", "started"],
target_date__lt=timezone.now()
)[:5]
return Response(
{
"issues": IssueSerializer(overdue_issues, many=True).data,
"count": overdue_issues_count,
},
status=status.HTTP_200_OK,
)
if issue_type == "upcoming":
upcoming_issues_count = created_issues.filter(
state__group__in=["backlog", "unstarted", "started"],
target_date__gte=timezone.now()
).count()
upcoming_issues = created_issues.filter(
state__group__in=["backlog", "unstarted", "started"],
target_date__gte=timezone.now()
)[:5]
return Response(
{
"issues": IssueSerializer(upcoming_issues, many=True).data,
"count": upcoming_issues_count,
},
status=status.HTTP_200_OK,
)
return Response(
{"error": "Please specify a valid issue type"},
status=status.HTTP_400_BAD_REQUEST,
)
def dashboard_issues_by_state_groups(self, request, slug):
filters = issue_filters(request.query_params, "GET")
state_order = ["backlog", "unstarted", "started", "completed", "cancelled"]
issues_by_state_groups = (
Issue.issue_objects.filter(
workspace__slug=slug,
project__project_projectmember__is_active=True,
project__project_projectmember__member=request.user,
assignees__in=[request.user],
)
.filter(**filters)
.values("state__group")
.annotate(count=Count("id"))
)
# default state
all_groups = {state: 0 for state in state_order}
# Update counts for existing groups
for entry in issues_by_state_groups:
all_groups[entry["state__group"]] = entry["count"]
# Prepare output including all groups with their counts
output_data = [
{"state": group, "count": count} for group, count in all_groups.items()
]
return Response(output_data, status=status.HTTP_200_OK)
def dashboard_issues_by_priority(self, request, slug):
filters = issue_filters(request.query_params, "GET")
priority_order = ["urgent", "high", "medium", "low", "none"]
issues_by_priority = (
Issue.issue_objects.filter(
workspace__slug=slug,
project__project_projectmember__is_active=True,
project__project_projectmember__member=request.user,
assignees__in=[request.user],
)
.filter(**filters)
.values("priority")
.annotate(count=Count("id"))
)
# default priority
all_groups = {priority: 0 for priority in priority_order}
# Update counts for existing groups
for entry in issues_by_priority:
all_groups[entry["priority"]] = entry["count"]
# Prepare output including all groups with their counts
output_data = [
{"priority": group, "count": count}
for group, count in all_groups.items()
]
return Response(output_data, status=status.HTTP_200_OK)
def dashboard_recent_activity(self, request, slug):
queryset = IssueActivity.objects.filter(
~Q(field__in=["comment", "vote", "reaction", "draft"]),
workspace__slug=slug,
project__project_projectmember__member=request.user,
project__project_projectmember__is_active=True,
actor=request.user,
).select_related("actor", "workspace", "issue", "project")[:8]
return Response(
IssueActivitySerializer(queryset, many=True).data,
status=status.HTTP_200_OK,
)
def dashboard_recent_projects(self, request, slug):
project_ids = (
IssueActivity.objects.filter(
workspace__slug=slug,
project__project_projectmember__member=request.user,
project__project_projectmember__is_active=True,
actor=request.user,
)
.values_list("project_id", flat=True)
.distinct()
)
# Extract project IDs from the recent projects
unique_project_ids = set(project_id for project_id in project_ids)
# Fetch additional projects only if needed
if len(unique_project_ids) < 4:
additional_projects = Project.objects.filter(
project_projectmember__member=request.user,
project_projectmember__is_active=True,
workspace__slug=slug,
).exclude(id__in=unique_project_ids)
# Append additional project IDs to the existing list
unique_project_ids.update(additional_projects.values_list("id", flat=True))
return Response(
list(unique_project_ids)[:4],
status=status.HTTP_200_OK,
)
def dashboard_recent_collaborators(self, request, slug):
# Fetch all project IDs where the user belongs to
user_projects = Project.objects.filter(
project_projectmember__member=request.user,
project_projectmember__is_active=True,
workspace__slug=slug,
).values_list("id", flat=True)
# Fetch all users who have performed an activity in the projects where the user exists
users_with_activities = (
IssueActivity.objects.filter(
workspace__slug=slug,
project_id__in=user_projects,
)
.values("actor")
.exclude(actor=request.user)
.annotate(num_activities=Count("actor"))
.order_by("-num_activities")
)[:7]
# Get the count of active issues for each user in users_with_activities
users_with_active_issues = []
for user_activity in users_with_activities:
user_id = user_activity["actor"]
active_issue_count = Issue.objects.filter(
assignees__in=[user_id],
state__group__in=["unstarted", "started"],
).count()
users_with_active_issues.append(
{"user_id": user_id, "active_issue_count": active_issue_count}
)
# Insert the logged-in user's ID and their active issue count at the beginning
active_issue_count = Issue.objects.filter(
assignees__in=[request.user],
state__group__in=["unstarted", "started"],
).count()
if users_with_activities.count() < 7:
# Calculate the additional collaborators needed
additional_collaborators_needed = 7 - users_with_activities.count()
# Fetch additional collaborators from the project_member table
additional_collaborators = list(
set(
ProjectMember.objects.filter(
~Q(member=request.user),
project_id__in=user_projects,
workspace__slug=slug,
)
.exclude(
member__in=[
user["actor"] for user in users_with_activities
]
)
.values_list("member", flat=True)
)
)
additional_collaborators = additional_collaborators[
:additional_collaborators_needed
]
# Append additional collaborators to the list
for collaborator_id in additional_collaborators:
active_issue_count = Issue.objects.filter(
assignees__in=[collaborator_id],
state__group__in=["unstarted", "started"],
).count()
users_with_active_issues.append(
{
"user_id": str(collaborator_id),
"active_issue_count": active_issue_count,
}
)
users_with_active_issues.insert(
0,
{"user_id": request.user.id, "active_issue_count": active_issue_count},
)
return Response(users_with_active_issues, status=status.HTTP_200_OK)
class DashboardEndpoint(BaseAPIView):
def create(self, request, slug):
serializer = DashboardSerializer(data=request.data)
if serializer.is_valid():
serializer.save()
return Response(serializer.data, status=status.HTTP_201_CREATED)
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
def patch(self, request, slug, pk):
serializer = DashboardSerializer(data=request.data, partial=True)
if serializer.is_valid():
serializer.save()
return Response(serializer.data, status=status.HTTP_200_OK)
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
def delete(self, request, slug, pk):
serializer = DashboardSerializer(data=request.data, partial=True)
if serializer.is_valid():
serializer.save()
return Response(serializer.data, status=status.HTTP_204_NO_CONTENT)
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
def get(self, request, slug, dashboard_id=None):
if not dashboard_id:
dashboard_type = request.GET.get("dashboard_type", None)
if dashboard_type == "home":
dashboard, created = Dashboard.objects.get_or_create(
type_identifier=dashboard_type, owned_by=request.user, is_default=True
)
if created:
widgets_to_fetch = [
"overview_stats",
"assigned_issues",
"created_issues",
"issues_by_state_groups",
"issues_by_priority",
"recent_activity",
"recent_projects",
"recent_collaborators",
]
updated_dashboard_widgets = []
for widget_key in widgets_to_fetch:
widget = Widget.objects.filter(key=widget_key).values_list("id", flat=True)
if widget:
updated_dashboard_widgets.append(
DashboardWidget(
widget_id=widget,
dashboard_id=dashboard.id,
)
)
DashboardWidget.objects.bulk_create(
updated_dashboard_widgets, batch_size=100
)
widgets = (
Widget.objects.annotate(
is_visible=Exists(
DashboardWidget.objects.filter(
widget_id=OuterRef("pk"),
dashboard_id=dashboard.id,
is_visible=True,
)
)
)
.annotate(
dashboard_filters=Subquery(
DashboardWidget.objects.filter(
widget_id=OuterRef("pk"),
dashboard_id=dashboard.id,
filters__isnull=False,
)
.exclude(filters={})
.values("filters")[:1]
)
)
.annotate(
widget_filters=Case(
When(
dashboard_filters__isnull=False,
then=F("dashboard_filters"),
),
default=F("filters"),
output_field=JSONField(),
)
)
)
return Response(
{
"dashboard": DashboardSerializer(dashboard).data,
"widgets": WidgetSerializer(widgets, many=True).data,
},
status=status.HTTP_200_OK,
)
return Response(
{"error": "Please specify a valid dashboard type"},
status=status.HTTP_400_BAD_REQUEST,
)
widget_key = request.GET.get("widget_key", "overview_stats")
WIDGETS_MAPPER = {
"overview_stats": dashboard_overview_stats,
"assigned_issues": dashboard_assigned_issues,
"created_issues": dashboard_created_issues,
"issues_by_state_groups": dashboard_issues_by_state_groups,
"issues_by_priority": dashboard_issues_by_priority,
"recent_activity": dashboard_recent_activity,
"recent_projects": dashboard_recent_projects,
"recent_collaborators": dashboard_recent_collaborators,
}
func = WIDGETS_MAPPER.get(widget_key)
if func is not None:
response = func(
self,
request=request,
slug=slug,
)
if isinstance(response, Response):
return response
return Response(
{"error": "Please specify a valid widget key"},
status=status.HTTP_400_BAD_REQUEST,
)
class WidgetsEndpoint(BaseAPIView):
def patch(self, request, dashboard_id, widget_id):
dashboard_widget = DashboardWidget.objects.filter(
widget_id=widget_id,
dashboard_id=dashboard_id,
).first()
dashboard_widget.is_visible = request.data.get(
"is_visible", dashboard_widget.is_visible
)
dashboard_widget.sort_order = request.data.get(
"sort_order", dashboard_widget.sort_order
)
dashboard_widget.filters = request.data.get(
"filters", dashboard_widget.filters
)
dashboard_widget.save()
return Response(
{"message": "successfully updated"}, status=status.HTTP_200_OK
)

View File

@ -39,9 +39,13 @@ class BulkEstimatePointEndpoint(BaseViewSet):
serializer_class = EstimateSerializer serializer_class = EstimateSerializer
def list(self, request, slug, project_id): def list(self, request, slug, project_id):
estimates = Estimate.objects.filter( estimates = (
Estimate.objects.filter(
workspace__slug=slug, project_id=project_id workspace__slug=slug, project_id=project_id
).prefetch_related("points").select_related("workspace", "project") )
.prefetch_related("points")
.select_related("workspace", "project")
)
serializer = EstimateReadSerializer(estimates, many=True) serializer = EstimateReadSerializer(estimates, many=True)
return Response(serializer.data, status=status.HTTP_200_OK) return Response(serializer.data, status=status.HTTP_200_OK)
@ -54,13 +58,17 @@ class BulkEstimatePointEndpoint(BaseViewSet):
estimate_points = request.data.get("estimate_points", []) estimate_points = request.data.get("estimate_points", [])
serializer = EstimatePointSerializer(data=request.data.get("estimate_points"), many=True) serializer = EstimatePointSerializer(
data=request.data.get("estimate_points"), many=True
)
if not serializer.is_valid(): if not serializer.is_valid():
return Response( return Response(
serializer.errors, status=status.HTTP_400_BAD_REQUEST serializer.errors, status=status.HTTP_400_BAD_REQUEST
) )
estimate_serializer = EstimateSerializer(data=request.data.get("estimate")) estimate_serializer = EstimateSerializer(
data=request.data.get("estimate")
)
if not estimate_serializer.is_valid(): if not estimate_serializer.is_valid():
return Response( return Response(
estimate_serializer.errors, status=status.HTTP_400_BAD_REQUEST estimate_serializer.errors, status=status.HTTP_400_BAD_REQUEST
@ -135,7 +143,8 @@ class BulkEstimatePointEndpoint(BaseViewSet):
estimate_points = EstimatePoint.objects.filter( estimate_points = EstimatePoint.objects.filter(
pk__in=[ pk__in=[
estimate_point.get("id") for estimate_point in estimate_points_data estimate_point.get("id")
for estimate_point in estimate_points_data
], ],
workspace__slug=slug, workspace__slug=slug,
project_id=project_id, project_id=project_id,
@ -157,10 +166,14 @@ class BulkEstimatePointEndpoint(BaseViewSet):
updated_estimate_points.append(estimate_point) updated_estimate_points.append(estimate_point)
EstimatePoint.objects.bulk_update( EstimatePoint.objects.bulk_update(
updated_estimate_points, ["value"], batch_size=10, updated_estimate_points,
["value"],
batch_size=10,
) )
estimate_point_serializer = EstimatePointSerializer(estimate_points, many=True) estimate_point_serializer = EstimatePointSerializer(
estimate_points, many=True
)
return Response( return Response(
{ {
"estimate": estimate_serializer.data, "estimate": estimate_serializer.data,

View File

@ -65,7 +65,9 @@ class ExportIssuesEndpoint(BaseAPIView):
workspace__slug=slug workspace__slug=slug
).select_related("workspace", "initiated_by") ).select_related("workspace", "initiated_by")
if request.GET.get("per_page", False) and request.GET.get("cursor", False): if request.GET.get("per_page", False) and request.GET.get(
"cursor", False
):
return self.paginate( return self.paginate(
request=request, request=request,
queryset=exporter_history, queryset=exporter_history,

View File

@ -14,7 +14,10 @@ from django.conf import settings
from .base import BaseAPIView from .base import BaseAPIView
from plane.app.permissions import ProjectEntityPermission from plane.app.permissions import ProjectEntityPermission
from plane.db.models import Workspace, Project from plane.db.models import Workspace, Project
from plane.app.serializers import ProjectLiteSerializer, WorkspaceLiteSerializer from plane.app.serializers import (
ProjectLiteSerializer,
WorkspaceLiteSerializer,
)
from plane.utils.integrations.github import get_release_notes from plane.utils.integrations.github import get_release_notes
from plane.license.utils.instance_value import get_configuration_value from plane.license.utils.instance_value import get_configuration_value
@ -51,7 +54,8 @@ class GPTIntegrationEndpoint(BaseAPIView):
if not task: if not task:
return Response( return Response(
{"error": "Task is required"}, status=status.HTTP_400_BAD_REQUEST {"error": "Task is required"},
status=status.HTTP_400_BAD_REQUEST,
) )
final_text = task + "\n" + prompt final_text = task + "\n" + prompt
@ -89,7 +93,7 @@ class ReleaseNotesEndpoint(BaseAPIView):
class UnsplashEndpoint(BaseAPIView): class UnsplashEndpoint(BaseAPIView):
def get(self, request): def get(self, request):
UNSPLASH_ACCESS_KEY, = get_configuration_value( (UNSPLASH_ACCESS_KEY,) = get_configuration_value(
[ [
{ {
"key": "UNSPLASH_ACCESS_KEY", "key": "UNSPLASH_ACCESS_KEY",

View File

@ -35,14 +35,16 @@ from plane.app.serializers import (
ModuleSerializer, ModuleSerializer,
) )
from plane.utils.integrations.github import get_github_repo_details from plane.utils.integrations.github import get_github_repo_details
from plane.utils.importers.jira import jira_project_issue_summary from plane.utils.importers.jira import (
jira_project_issue_summary,
is_allowed_hostname,
)
from plane.bgtasks.importer_task import service_importer from plane.bgtasks.importer_task import service_importer
from plane.utils.html_processor import strip_tags from plane.utils.html_processor import strip_tags
from plane.app.permissions import WorkSpaceAdminPermission from plane.app.permissions import WorkSpaceAdminPermission
class ServiceIssueImportSummaryEndpoint(BaseAPIView): class ServiceIssueImportSummaryEndpoint(BaseAPIView):
def get(self, request, slug, service): def get(self, request, slug, service):
if service == "github": if service == "github":
owner = request.GET.get("owner", False) owner = request.GET.get("owner", False)
@ -94,7 +96,8 @@ class ServiceIssueImportSummaryEndpoint(BaseAPIView):
for key, error_message in params.items(): for key, error_message in params.items():
if not request.GET.get(key, False): if not request.GET.get(key, False):
return Response( return Response(
{"error": error_message}, status=status.HTTP_400_BAD_REQUEST {"error": error_message},
status=status.HTTP_400_BAD_REQUEST,
) )
project_key = request.GET.get("project_key", "") project_key = request.GET.get("project_key", "")
@ -122,6 +125,7 @@ class ImportServiceEndpoint(BaseAPIView):
permission_classes = [ permission_classes = [
WorkSpaceAdminPermission, WorkSpaceAdminPermission,
] ]
def post(self, request, slug, service): def post(self, request, slug, service):
project_id = request.data.get("project_id", False) project_id = request.data.get("project_id", False)
@ -174,6 +178,21 @@ class ImportServiceEndpoint(BaseAPIView):
data = request.data.get("data", False) data = request.data.get("data", False)
metadata = request.data.get("metadata", False) metadata = request.data.get("metadata", False)
config = request.data.get("config", False) config = request.data.get("config", False)
cloud_hostname = metadata.get("cloud_hostname", False)
if not cloud_hostname:
return Response(
{"error": "Cloud hostname is required"},
status=status.HTTP_400_BAD_REQUEST,
)
if not is_allowed_hostname(cloud_hostname):
return Response(
{"error": "Hostname is not a valid hostname."},
status=status.HTTP_400_BAD_REQUEST,
)
if not data or not metadata: if not data or not metadata:
return Response( return Response(
{"error": "Data, config and metadata are required"}, {"error": "Data, config and metadata are required"},
@ -244,7 +263,9 @@ class ImportServiceEndpoint(BaseAPIView):
importer = Importer.objects.get( importer = Importer.objects.get(
pk=pk, service=service, workspace__slug=slug pk=pk, service=service, workspace__slug=slug
) )
serializer = ImporterSerializer(importer, data=request.data, partial=True) serializer = ImporterSerializer(
importer, data=request.data, partial=True
)
if serializer.is_valid(): if serializer.is_valid():
serializer.save() serializer.save()
return Response(serializer.data, status=status.HTTP_200_OK) return Response(serializer.data, status=status.HTTP_200_OK)
@ -280,9 +301,9 @@ class BulkImportIssuesEndpoint(BaseAPIView):
).first() ).first()
# Get the maximum sequence_id # Get the maximum sequence_id
last_id = IssueSequence.objects.filter(project_id=project_id).aggregate( last_id = IssueSequence.objects.filter(
largest=Max("sequence") project_id=project_id
)["largest"] ).aggregate(largest=Max("sequence"))["largest"]
last_id = 1 if last_id is None else last_id + 1 last_id = 1 if last_id is None else last_id + 1
@ -315,7 +336,9 @@ class BulkImportIssuesEndpoint(BaseAPIView):
if issue_data.get("state", False) if issue_data.get("state", False)
else default_state.id, else default_state.id,
name=issue_data.get("name", "Issue Created through Bulk"), name=issue_data.get("name", "Issue Created through Bulk"),
description_html=issue_data.get("description_html", "<p></p>"), description_html=issue_data.get(
"description_html", "<p></p>"
),
description_stripped=( description_stripped=(
None None
if ( if (
@ -427,15 +450,21 @@ class BulkImportIssuesEndpoint(BaseAPIView):
for comment in comments_list for comment in comments_list
] ]
_ = IssueComment.objects.bulk_create(bulk_issue_comments, batch_size=100) _ = IssueComment.objects.bulk_create(
bulk_issue_comments, batch_size=100
)
# Attach Links # Attach Links
_ = IssueLink.objects.bulk_create( _ = IssueLink.objects.bulk_create(
[ [
IssueLink( IssueLink(
issue=issue, issue=issue,
url=issue_data.get("link", {}).get("url", "https://github.com"), url=issue_data.get("link", {}).get(
title=issue_data.get("link", {}).get("title", "Original Issue"), "url", "https://github.com"
),
title=issue_data.get("link", {}).get(
"title", "Original Issue"
),
project_id=project_id, project_id=project_id,
workspace_id=project.workspace_id, workspace_id=project.workspace_id,
created_by=request.user, created_by=request.user,
@ -472,7 +501,9 @@ class BulkImportModulesEndpoint(BaseAPIView):
ignore_conflicts=True, ignore_conflicts=True,
) )
modules = Module.objects.filter(id__in=[module.id for module in modules]) modules = Module.objects.filter(
id__in=[module.id for module in modules]
)
if len(modules) == len(modules_data): if len(modules) == len(modules_data):
_ = ModuleLink.objects.bulk_create( _ = ModuleLink.objects.bulk_create(
@ -520,6 +551,8 @@ class BulkImportModulesEndpoint(BaseAPIView):
else: else:
return Response( return Response(
{"message": "Modules created but issues could not be imported"}, {
"message": "Modules created but issues could not be imported"
},
status=status.HTTP_200_OK, status=status.HTTP_200_OK,
) )

View File

@ -62,7 +62,9 @@ class InboxViewSet(BaseViewSet):
serializer.save(project_id=self.kwargs.get("project_id")) serializer.save(project_id=self.kwargs.get("project_id"))
def destroy(self, request, slug, project_id, pk): def destroy(self, request, slug, project_id, pk):
inbox = Inbox.objects.get(workspace__slug=slug, project_id=project_id, pk=pk) inbox = Inbox.objects.get(
workspace__slug=slug, project_id=project_id, pk=pk
)
# Handle default inbox delete # Handle default inbox delete
if inbox.is_default: if inbox.is_default:
return Response( return Response(
@ -90,7 +92,8 @@ class InboxIssueViewSet(BaseViewSet):
super() super()
.get_queryset() .get_queryset()
.filter( .filter(
Q(snoozed_till__gte=timezone.now()) | Q(snoozed_till__isnull=True), Q(snoozed_till__gte=timezone.now())
| Q(snoozed_till__isnull=True),
workspace__slug=self.kwargs.get("slug"), workspace__slug=self.kwargs.get("slug"),
project_id=self.kwargs.get("project_id"), project_id=self.kwargs.get("project_id"),
inbox_id=self.kwargs.get("inbox_id"), inbox_id=self.kwargs.get("inbox_id"),
@ -111,7 +114,9 @@ class InboxIssueViewSet(BaseViewSet):
.prefetch_related("assignees", "labels") .prefetch_related("assignees", "labels")
.order_by("issue_inbox__snoozed_till", "issue_inbox__status") .order_by("issue_inbox__snoozed_till", "issue_inbox__status")
.annotate( .annotate(
sub_issues_count=Issue.issue_objects.filter(parent=OuterRef("id")) sub_issues_count=Issue.issue_objects.filter(
parent=OuterRef("id")
)
.order_by() .order_by()
.annotate(count=Func(F("id"), function="Count")) .annotate(count=Func(F("id"), function="Count"))
.values("count") .values("count")
@ -123,7 +128,9 @@ class InboxIssueViewSet(BaseViewSet):
.values("count") .values("count")
) )
.annotate( .annotate(
attachment_count=IssueAttachment.objects.filter(issue=OuterRef("id")) attachment_count=IssueAttachment.objects.filter(
issue=OuterRef("id")
)
.order_by() .order_by()
.annotate(count=Func(F("id"), function="Count")) .annotate(count=Func(F("id"), function="Count"))
.values("count") .values("count")
@ -146,7 +153,8 @@ class InboxIssueViewSet(BaseViewSet):
def create(self, request, slug, project_id, inbox_id): def create(self, request, slug, project_id, inbox_id):
if not request.data.get("issue", {}).get("name", False): if not request.data.get("issue", {}).get("name", False):
return Response( return Response(
{"error": "Name is required"}, status=status.HTTP_400_BAD_REQUEST {"error": "Name is required"},
status=status.HTTP_400_BAD_REQUEST,
) )
# Check for valid priority # Check for valid priority
@ -158,7 +166,8 @@ class InboxIssueViewSet(BaseViewSet):
"none", "none",
]: ]:
return Response( return Response(
{"error": "Invalid priority"}, status=status.HTTP_400_BAD_REQUEST {"error": "Invalid priority"},
status=status.HTTP_400_BAD_REQUEST,
) )
# Create or get state # Create or get state
@ -205,7 +214,10 @@ class InboxIssueViewSet(BaseViewSet):
def partial_update(self, request, slug, project_id, inbox_id, issue_id): def partial_update(self, request, slug, project_id, inbox_id, issue_id):
inbox_issue = InboxIssue.objects.get( inbox_issue = InboxIssue.objects.get(
issue_id=issue_id, workspace__slug=slug, project_id=project_id, inbox_id=inbox_id issue_id=issue_id,
workspace__slug=slug,
project_id=project_id,
inbox_id=inbox_id,
) )
# Get the project member # Get the project member
project_member = ProjectMember.objects.get( project_member = ProjectMember.objects.get(
@ -228,7 +240,9 @@ class InboxIssueViewSet(BaseViewSet):
if bool(issue_data): if bool(issue_data):
issue = Issue.objects.get( issue = Issue.objects.get(
pk=inbox_issue.issue_id, workspace__slug=slug, project_id=project_id pk=inbox_issue.issue_id,
workspace__slug=slug,
project_id=project_id,
) )
# Only allow guests and viewers to edit name and description # Only allow guests and viewers to edit name and description
if project_member.role <= 10: if project_member.role <= 10:
@ -238,7 +252,9 @@ class InboxIssueViewSet(BaseViewSet):
"description_html": issue_data.get( "description_html": issue_data.get(
"description_html", issue.description_html "description_html", issue.description_html
), ),
"description": issue_data.get("description", issue.description), "description": issue_data.get(
"description", issue.description
),
} }
issue_serializer = IssueCreateSerializer( issue_serializer = IssueCreateSerializer(
@ -284,7 +300,9 @@ class InboxIssueViewSet(BaseViewSet):
project_id=project_id, project_id=project_id,
) )
state = State.objects.filter( state = State.objects.filter(
group="cancelled", workspace__slug=slug, project_id=project_id group="cancelled",
workspace__slug=slug,
project_id=project_id,
).first() ).first()
if state is not None: if state is not None:
issue.state = state issue.state = state
@ -302,17 +320,22 @@ class InboxIssueViewSet(BaseViewSet):
if issue.state.name == "Triage": if issue.state.name == "Triage":
# Move to default state # Move to default state
state = State.objects.filter( state = State.objects.filter(
workspace__slug=slug, project_id=project_id, default=True workspace__slug=slug,
project_id=project_id,
default=True,
).first() ).first()
if state is not None: if state is not None:
issue.state = state issue.state = state
issue.save() issue.save()
return Response(serializer.data, status=status.HTTP_200_OK) return Response(serializer.data, status=status.HTTP_200_OK)
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) return Response(
serializer.errors, status=status.HTTP_400_BAD_REQUEST
)
else: else:
return Response( return Response(
InboxIssueSerializer(inbox_issue).data, status=status.HTTP_200_OK InboxIssueSerializer(inbox_issue).data,
status=status.HTTP_200_OK,
) )
def retrieve(self, request, slug, project_id, inbox_id, issue_id): def retrieve(self, request, slug, project_id, inbox_id, issue_id):
@ -324,7 +347,10 @@ class InboxIssueViewSet(BaseViewSet):
def destroy(self, request, slug, project_id, inbox_id, issue_id): def destroy(self, request, slug, project_id, inbox_id, issue_id):
inbox_issue = InboxIssue.objects.get( inbox_issue = InboxIssue.objects.get(
issue_id=issue_id, workspace__slug=slug, project_id=project_id, inbox_id=inbox_id issue_id=issue_id,
workspace__slug=slug,
project_id=project_id,
inbox_id=inbox_id,
) )
# Get the project member # Get the project member
project_member = ProjectMember.objects.get( project_member = ProjectMember.objects.get(
@ -351,4 +377,3 @@ class InboxIssueViewSet(BaseViewSet):
inbox_issue.delete() inbox_issue.delete()
return Response(status=status.HTTP_204_NO_CONTENT) return Response(status=status.HTTP_204_NO_CONTENT)

View File

@ -1,6 +1,7 @@
# Python improts # Python improts
import uuid import uuid
import requests import requests
# Django imports # Django imports
from django.contrib.auth.hashers import make_password from django.contrib.auth.hashers import make_password
@ -19,7 +20,10 @@ from plane.db.models import (
WorkspaceMember, WorkspaceMember,
APIToken, APIToken,
) )
from plane.app.serializers import IntegrationSerializer, WorkspaceIntegrationSerializer from plane.app.serializers import (
IntegrationSerializer,
WorkspaceIntegrationSerializer,
)
from plane.utils.integrations.github import ( from plane.utils.integrations.github import (
get_github_metadata, get_github_metadata,
delete_github_installation, delete_github_installation,
@ -27,6 +31,7 @@ from plane.utils.integrations.github import (
from plane.app.permissions import WorkSpaceAdminPermission from plane.app.permissions import WorkSpaceAdminPermission
from plane.utils.integrations.slack import slack_oauth from plane.utils.integrations.slack import slack_oauth
class IntegrationViewSet(BaseViewSet): class IntegrationViewSet(BaseViewSet):
serializer_class = IntegrationSerializer serializer_class = IntegrationSerializer
model = Integration model = Integration
@ -101,7 +106,10 @@ class WorkspaceIntegrationViewSet(BaseViewSet):
code = request.data.get("code", False) code = request.data.get("code", False)
if not code: if not code:
return Response({"error": "Code is required"}, status=status.HTTP_400_BAD_REQUEST) return Response(
{"error": "Code is required"},
status=status.HTTP_400_BAD_REQUEST,
)
slack_response = slack_oauth(code=code) slack_response = slack_oauth(code=code)
@ -110,7 +118,9 @@ class WorkspaceIntegrationViewSet(BaseViewSet):
team_id = metadata.get("team", {}).get("id", False) team_id = metadata.get("team", {}).get("id", False)
if not metadata or not access_token or not team_id: if not metadata or not access_token or not team_id:
return Response( return Response(
{"error": "Slack could not be installed. Please try again later"}, {
"error": "Slack could not be installed. Please try again later"
},
status=status.HTTP_400_BAD_REQUEST, status=status.HTTP_400_BAD_REQUEST,
) )
config = {"team_id": team_id, "access_token": access_token} config = {"team_id": team_id, "access_token": access_token}

View File

@ -21,7 +21,10 @@ from plane.app.serializers import (
GithubCommentSyncSerializer, GithubCommentSyncSerializer,
) )
from plane.utils.integrations.github import get_github_repos from plane.utils.integrations.github import get_github_repos
from plane.app.permissions import ProjectBasePermission, ProjectEntityPermission from plane.app.permissions import (
ProjectBasePermission,
ProjectEntityPermission,
)
class GithubRepositoriesEndpoint(BaseAPIView): class GithubRepositoriesEndpoint(BaseAPIView):
@ -185,7 +188,6 @@ class BulkCreateGithubIssueSyncEndpoint(BaseAPIView):
class GithubCommentSyncViewSet(BaseViewSet): class GithubCommentSyncViewSet(BaseViewSet):
permission_classes = [ permission_classes = [
ProjectEntityPermission, ProjectEntityPermission,
] ]

View File

@ -8,9 +8,16 @@ from sentry_sdk import capture_exception
# Module imports # Module imports
from plane.app.views import BaseViewSet, BaseAPIView from plane.app.views import BaseViewSet, BaseAPIView
from plane.db.models import SlackProjectSync, WorkspaceIntegration, ProjectMember from plane.db.models import (
SlackProjectSync,
WorkspaceIntegration,
ProjectMember,
)
from plane.app.serializers import SlackProjectSyncSerializer from plane.app.serializers import SlackProjectSyncSerializer
from plane.app.permissions import ProjectBasePermission, ProjectEntityPermission from plane.app.permissions import (
ProjectBasePermission,
ProjectEntityPermission,
)
from plane.utils.integrations.slack import slack_oauth from plane.utils.integrations.slack import slack_oauth
@ -38,7 +45,8 @@ class SlackProjectSyncViewSet(BaseViewSet):
if not code: if not code:
return Response( return Response(
{"error": "Code is required"}, status=status.HTTP_400_BAD_REQUEST {"error": "Code is required"},
status=status.HTTP_400_BAD_REQUEST,
) )
slack_response = slack_oauth(code=code) slack_response = slack_oauth(code=code)
@ -54,7 +62,9 @@ class SlackProjectSyncViewSet(BaseViewSet):
access_token=slack_response.get("access_token"), access_token=slack_response.get("access_token"),
scopes=slack_response.get("scope"), scopes=slack_response.get("scope"),
bot_user_id=slack_response.get("bot_user_id"), bot_user_id=slack_response.get("bot_user_id"),
webhook_url=slack_response.get("incoming_webhook", {}).get("url"), webhook_url=slack_response.get("incoming_webhook", {}).get(
"url"
),
data=slack_response, data=slack_response,
team_id=slack_response.get("team", {}).get("id"), team_id=slack_response.get("team", {}).get("id"),
team_name=slack_response.get("team", {}).get("name"), team_name=slack_response.get("team", {}).get("name"),
@ -62,7 +72,9 @@ class SlackProjectSyncViewSet(BaseViewSet):
project_id=project_id, project_id=project_id,
) )
_ = ProjectMember.objects.get_or_create( _ = ProjectMember.objects.get_or_create(
member=workspace_integration.actor, role=20, project_id=project_id member=workspace_integration.actor,
role=20,
project_id=project_id,
) )
serializer = SlackProjectSyncSerializer(slack_project_sync) serializer = SlackProjectSyncSerializer(slack_project_sync)
return Response(serializer.data, status=status.HTTP_200_OK) return Response(serializer.data, status=status.HTTP_200_OK)
@ -74,6 +86,8 @@ class SlackProjectSyncViewSet(BaseViewSet):
) )
capture_exception(e) capture_exception(e)
return Response( return Response(
{"error": "Slack could not be installed. Please try again later"}, {
"error": "Slack could not be installed. Please try again later"
},
status=status.HTTP_400_BAD_REQUEST, status=status.HTTP_400_BAD_REQUEST,
) )

View File

@ -34,11 +34,11 @@ from rest_framework.parsers import MultiPartParser, FormParser
# Module imports # Module imports
from . import BaseViewSet, BaseAPIView, WebhookMixin from . import BaseViewSet, BaseAPIView, WebhookMixin
from plane.app.serializers import ( from plane.app.serializers import (
IssueCreateSerializer,
IssueActivitySerializer, IssueActivitySerializer,
IssueCommentSerializer, IssueCommentSerializer,
IssuePropertySerializer, IssuePropertySerializer,
IssueSerializer, IssueSerializer,
IssueCreateSerializer,
LabelSerializer, LabelSerializer,
IssueFlatSerializer, IssueFlatSerializer,
IssueLinkSerializer, IssueLinkSerializer,
@ -52,7 +52,6 @@ from plane.app.serializers import (
IssueRelationSerializer, IssueRelationSerializer,
RelatedIssueSerializer, RelatedIssueSerializer,
IssuePublicSerializer, IssuePublicSerializer,
IssueRelationLiteSerializer,
) )
from plane.app.permissions import ( from plane.app.permissions import (
ProjectEntityPermission, ProjectEntityPermission,
@ -82,7 +81,7 @@ from plane.db.models import (
from plane.bgtasks.issue_activites_task import issue_activity from plane.bgtasks.issue_activites_task import issue_activity
from plane.utils.grouper import group_results from plane.utils.grouper import group_results
from plane.utils.issue_filters import issue_filters from plane.utils.issue_filters import issue_filters
from collections import defaultdict
class IssueViewSet(WebhookMixin, BaseViewSet): class IssueViewSet(WebhookMixin, BaseViewSet):
def get_serializer_class(self): def get_serializer_class(self):
@ -110,13 +109,9 @@ class IssueViewSet(WebhookMixin, BaseViewSet):
def get_queryset(self): def get_queryset(self):
return ( return (
Issue.issue_objects.annotate( Issue.issue_objects.filter(
sub_issues_count=Issue.issue_objects.filter(parent=OuterRef("id")) project_id=self.kwargs.get("project_id")
.order_by()
.annotate(count=Func(F("id"), function="Count"))
.values("count")
) )
.filter(project_id=self.kwargs.get("project_id"))
.filter(workspace__slug=self.kwargs.get("slug")) .filter(workspace__slug=self.kwargs.get("slug"))
.select_related("project") .select_related("project")
.select_related("workspace") .select_related("workspace")
@ -139,17 +134,20 @@ class IssueViewSet(WebhookMixin, BaseViewSet):
.values("count") .values("count")
) )
.annotate( .annotate(
attachment_count=IssueAttachment.objects.filter(issue=OuterRef("id")) attachment_count=IssueAttachment.objects.filter(
issue=OuterRef("id")
)
.order_by() .order_by()
.annotate(count=Func(F("id"), function="Count")) .annotate(count=Func(F("id"), function="Count"))
.values("count") .values("count")
) )
.annotate( .annotate(
is_subscribed=Exists( sub_issues_count=Issue.issue_objects.filter(
IssueSubscriber.objects.filter( parent=OuterRef("id")
subscriber=self.request.user, issue_id=OuterRef("id")
)
) )
.order_by()
.annotate(count=Func(F("id"), function="Count"))
.values("count")
) )
).distinct() ).distinct()
@ -159,7 +157,13 @@ class IssueViewSet(WebhookMixin, BaseViewSet):
# Custom ordering for priority and state # Custom ordering for priority and state
priority_order = ["urgent", "high", "medium", "low", "none"] priority_order = ["urgent", "high", "medium", "low", "none"]
state_order = ["backlog", "unstarted", "started", "completed", "cancelled"] state_order = [
"backlog",
"unstarted",
"started",
"completed",
"cancelled",
]
order_by_param = request.GET.get("order_by", "-created_at") order_by_param = request.GET.get("order_by", "-created_at")
@ -168,7 +172,9 @@ class IssueViewSet(WebhookMixin, BaseViewSet):
# Priority Ordering # Priority Ordering
if order_by_param == "priority" or order_by_param == "-priority": if order_by_param == "priority" or order_by_param == "-priority":
priority_order = ( priority_order = (
priority_order if order_by_param == "priority" else priority_order[::-1] priority_order
if order_by_param == "priority"
else priority_order[::-1]
) )
issue_queryset = issue_queryset.annotate( issue_queryset = issue_queryset.annotate(
priority_order=Case( priority_order=Case(
@ -216,7 +222,9 @@ class IssueViewSet(WebhookMixin, BaseViewSet):
else order_by_param else order_by_param
) )
).order_by( ).order_by(
"-max_values" if order_by_param.startswith("-") else "max_values" "-max_values"
if order_by_param.startswith("-")
else "max_values"
) )
else: else:
issue_queryset = issue_queryset.order_by(order_by_param) issue_queryset = issue_queryset.order_by(order_by_param)
@ -244,35 +252,42 @@ class IssueViewSet(WebhookMixin, BaseViewSet):
# Track the issue # Track the issue
issue_activity.delay( issue_activity.delay(
type="issue.activity.created", type="issue.activity.created",
requested_data=json.dumps(self.request.data, cls=DjangoJSONEncoder), requested_data=json.dumps(
self.request.data, cls=DjangoJSONEncoder
),
actor_id=str(request.user.id), actor_id=str(request.user.id),
issue_id=str(serializer.data.get("id", None)), issue_id=str(serializer.data.get("id", None)),
project_id=str(project_id), project_id=str(project_id),
current_instance=None, current_instance=None,
epoch=int(timezone.now().timestamp()), epoch=int(timezone.now().timestamp()),
) )
issue = (
self.get_queryset().filter(pk=serializer.data["id"]).first()
)
serializer = IssueSerializer(issue)
return Response(serializer.data, status=status.HTTP_201_CREATED) return Response(serializer.data, status=status.HTTP_201_CREATED)
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
def retrieve(self, request, slug, project_id, pk=None): def retrieve(self, request, slug, project_id, pk=None):
issue = Issue.issue_objects.annotate( issue = self.get_queryset().filter(pk=pk).first()
sub_issues_count=Issue.issue_objects.filter(parent=OuterRef("id"))
.order_by()
.annotate(count=Func(F("id"), function="Count"))
.values("count")
).get(workspace__slug=slug, project_id=project_id, pk=pk)
return Response( return Response(
IssueSerializer(issue, fields=self.fields, expand=self.expand).data, IssueSerializer(
issue, fields=self.fields, expand=self.expand
).data,
status=status.HTTP_200_OK, status=status.HTTP_200_OK,
) )
def partial_update(self, request, slug, project_id, pk=None): def partial_update(self, request, slug, project_id, pk=None):
issue = Issue.objects.get(workspace__slug=slug, project_id=project_id, pk=pk) issue = Issue.objects.get(
workspace__slug=slug, project_id=project_id, pk=pk
)
current_instance = json.dumps( current_instance = json.dumps(
IssueSerializer(issue).data, cls=DjangoJSONEncoder IssueSerializer(issue).data, cls=DjangoJSONEncoder
) )
requested_data = json.dumps(self.request.data, cls=DjangoJSONEncoder) requested_data = json.dumps(self.request.data, cls=DjangoJSONEncoder)
serializer = IssueCreateSerializer(issue, data=request.data, partial=True) serializer = IssueCreateSerializer(
issue, data=request.data, partial=True
)
if serializer.is_valid(): if serializer.is_valid():
serializer.save() serializer.save()
issue_activity.delay( issue_activity.delay(
@ -284,11 +299,16 @@ class IssueViewSet(WebhookMixin, BaseViewSet):
current_instance=current_instance, current_instance=current_instance,
epoch=int(timezone.now().timestamp()), epoch=int(timezone.now().timestamp()),
) )
return Response(serializer.data, status=status.HTTP_200_OK) issue = self.get_queryset().filter(pk=pk).first()
return Response(
IssueSerializer(issue).data, status=status.HTTP_200_OK
)
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
def destroy(self, request, slug, project_id, pk=None): def destroy(self, request, slug, project_id, pk=None):
issue = Issue.objects.get(workspace__slug=slug, project_id=project_id, pk=pk) issue = Issue.objects.get(
workspace__slug=slug, project_id=project_id, pk=pk
)
current_instance = json.dumps( current_instance = json.dumps(
IssueSerializer(issue).data, cls=DjangoJSONEncoder IssueSerializer(issue).data, cls=DjangoJSONEncoder
) )
@ -311,7 +331,13 @@ class UserWorkSpaceIssues(BaseAPIView):
filters = issue_filters(request.query_params, "GET") filters = issue_filters(request.query_params, "GET")
# Custom ordering for priority and state # Custom ordering for priority and state
priority_order = ["urgent", "high", "medium", "low", "none"] priority_order = ["urgent", "high", "medium", "low", "none"]
state_order = ["backlog", "unstarted", "started", "completed", "cancelled"] state_order = [
"backlog",
"unstarted",
"started",
"completed",
"cancelled",
]
order_by_param = request.GET.get("order_by", "-created_at") order_by_param = request.GET.get("order_by", "-created_at")
@ -325,7 +351,9 @@ class UserWorkSpaceIssues(BaseAPIView):
workspace__slug=slug, workspace__slug=slug,
) )
.annotate( .annotate(
sub_issues_count=Issue.issue_objects.filter(parent=OuterRef("id")) sub_issues_count=Issue.issue_objects.filter(
parent=OuterRef("id")
)
.order_by() .order_by()
.annotate(count=Func(F("id"), function="Count")) .annotate(count=Func(F("id"), function="Count"))
.values("count") .values("count")
@ -344,7 +372,9 @@ class UserWorkSpaceIssues(BaseAPIView):
.values("count") .values("count")
) )
.annotate( .annotate(
attachment_count=IssueAttachment.objects.filter(issue=OuterRef("id")) attachment_count=IssueAttachment.objects.filter(
issue=OuterRef("id")
)
.order_by() .order_by()
.annotate(count=Func(F("id"), function="Count")) .annotate(count=Func(F("id"), function="Count"))
.values("count") .values("count")
@ -361,7 +391,9 @@ class UserWorkSpaceIssues(BaseAPIView):
# Priority Ordering # Priority Ordering
if order_by_param == "priority" or order_by_param == "-priority": if order_by_param == "priority" or order_by_param == "-priority":
priority_order = ( priority_order = (
priority_order if order_by_param == "priority" else priority_order[::-1] priority_order
if order_by_param == "priority"
else priority_order[::-1]
) )
issue_queryset = issue_queryset.annotate( issue_queryset = issue_queryset.annotate(
priority_order=Case( priority_order=Case(
@ -409,7 +441,9 @@ class UserWorkSpaceIssues(BaseAPIView):
else order_by_param else order_by_param
) )
).order_by( ).order_by(
"-max_values" if order_by_param.startswith("-") else "max_values" "-max_values"
if order_by_param.startswith("-")
else "max_values"
) )
else: else:
issue_queryset = issue_queryset.order_by(order_by_param) issue_queryset = issue_queryset.order_by(order_by_param)
@ -478,7 +512,9 @@ class IssueActivityEndpoint(BaseAPIView):
) )
) )
) )
issue_activities = IssueActivitySerializer(issue_activities, many=True).data issue_activities = IssueActivitySerializer(
issue_activities, many=True
).data
issue_comments = IssueCommentSerializer(issue_comments, many=True).data issue_comments = IssueCommentSerializer(issue_comments, many=True).data
result_list = sorted( result_list = sorted(
@ -536,7 +572,9 @@ class IssueCommentViewSet(WebhookMixin, BaseViewSet):
) )
issue_activity.delay( issue_activity.delay(
type="comment.activity.created", type="comment.activity.created",
requested_data=json.dumps(serializer.data, cls=DjangoJSONEncoder), requested_data=json.dumps(
serializer.data, cls=DjangoJSONEncoder
),
actor_id=str(self.request.user.id), actor_id=str(self.request.user.id),
issue_id=str(self.kwargs.get("issue_id")), issue_id=str(self.kwargs.get("issue_id")),
project_id=str(self.kwargs.get("project_id")), project_id=str(self.kwargs.get("project_id")),
@ -548,7 +586,10 @@ class IssueCommentViewSet(WebhookMixin, BaseViewSet):
def partial_update(self, request, slug, project_id, issue_id, pk): def partial_update(self, request, slug, project_id, issue_id, pk):
issue_comment = IssueComment.objects.get( issue_comment = IssueComment.objects.get(
workspace__slug=slug, project_id=project_id, issue_id=issue_id, pk=pk workspace__slug=slug,
project_id=project_id,
issue_id=issue_id,
pk=pk,
) )
requested_data = json.dumps(self.request.data, cls=DjangoJSONEncoder) requested_data = json.dumps(self.request.data, cls=DjangoJSONEncoder)
current_instance = json.dumps( current_instance = json.dumps(
@ -574,7 +615,10 @@ class IssueCommentViewSet(WebhookMixin, BaseViewSet):
def destroy(self, request, slug, project_id, issue_id, pk): def destroy(self, request, slug, project_id, issue_id, pk):
issue_comment = IssueComment.objects.get( issue_comment = IssueComment.objects.get(
workspace__slug=slug, project_id=project_id, issue_id=issue_id, pk=pk workspace__slug=slug,
project_id=project_id,
issue_id=issue_id,
pk=pk,
) )
current_instance = json.dumps( current_instance = json.dumps(
IssueCommentSerializer(issue_comment).data, IssueCommentSerializer(issue_comment).data,
@ -604,7 +648,9 @@ class IssueUserDisplayPropertyEndpoint(BaseAPIView):
project_id=project_id, project_id=project_id,
) )
issue_property.filters = request.data.get("filters", issue_property.filters) issue_property.filters = request.data.get(
"filters", issue_property.filters
)
issue_property.display_filters = request.data.get( issue_property.display_filters = request.data.get(
"display_filters", issue_property.display_filters "display_filters", issue_property.display_filters
) )
@ -635,11 +681,17 @@ class LabelViewSet(BaseViewSet):
serializer = LabelSerializer(data=request.data) serializer = LabelSerializer(data=request.data)
if serializer.is_valid(): if serializer.is_valid():
serializer.save(project_id=project_id) serializer.save(project_id=project_id)
return Response(serializer.data, status=status.HTTP_201_CREATED) return Response(
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) serializer.data, status=status.HTTP_201_CREATED
)
return Response(
serializer.errors, status=status.HTTP_400_BAD_REQUEST
)
except IntegrityError: except IntegrityError:
return Response( return Response(
{"error": "Label with the same name already exists in the project"}, {
"error": "Label with the same name already exists in the project"
},
status=status.HTTP_400_BAD_REQUEST, status=status.HTTP_400_BAD_REQUEST,
) )
@ -694,7 +746,9 @@ class SubIssuesEndpoint(BaseAPIView):
@method_decorator(gzip_page) @method_decorator(gzip_page)
def get(self, request, slug, project_id, issue_id): def get(self, request, slug, project_id, issue_id):
sub_issues = ( sub_issues = (
Issue.issue_objects.filter(parent_id=issue_id, workspace__slug=slug) Issue.issue_objects.filter(
parent_id=issue_id, workspace__slug=slug
)
.select_related("project") .select_related("project")
.select_related("workspace") .select_related("workspace")
.select_related("state") .select_related("state")
@ -702,7 +756,9 @@ class SubIssuesEndpoint(BaseAPIView):
.prefetch_related("assignees") .prefetch_related("assignees")
.prefetch_related("labels") .prefetch_related("labels")
.annotate( .annotate(
sub_issues_count=Issue.issue_objects.filter(parent=OuterRef("id")) sub_issues_count=Issue.issue_objects.filter(
parent=OuterRef("id")
)
.order_by() .order_by()
.annotate(count=Func(F("id"), function="Count")) .annotate(count=Func(F("id"), function="Count"))
.values("count") .values("count")
@ -714,37 +770,26 @@ class SubIssuesEndpoint(BaseAPIView):
.values("count") .values("count")
) )
.annotate( .annotate(
attachment_count=IssueAttachment.objects.filter(issue=OuterRef("id")) attachment_count=IssueAttachment.objects.filter(
issue=OuterRef("id")
)
.order_by() .order_by()
.annotate(count=Func(F("id"), function="Count")) .annotate(count=Func(F("id"), function="Count"))
.values("count") .values("count")
) )
.annotate(
is_subscribed=Exists(
IssueSubscriber.objects.filter(
subscriber=self.request.user, issue_id=OuterRef("id")
)
)
)
.prefetch_related( .prefetch_related(
Prefetch( Prefetch(
"issue_reactions", "issue_reactions",
queryset=IssueReaction.objects.select_related("actor"), queryset=IssueReaction.objects.select_related("actor"),
) )
) )
.annotate(state_group=F("state__group"))
) )
state_distribution = ( # create's a dict with state group name with their respective issue id's
State.objects.filter(workspace__slug=slug, state_issue__parent_id=issue_id) result = defaultdict(list)
.annotate(state_group=F("group")) for sub_issue in sub_issues:
.values("state_group") result[sub_issue.state_group].append(str(sub_issue.id))
.annotate(state_count=Count("state_group"))
.order_by("state_group")
)
result = {
item["state_group"]: item["state_count"] for item in state_distribution
}
serializer = IssueSerializer( serializer = IssueSerializer(
sub_issues, sub_issues,
@ -776,7 +821,7 @@ class SubIssuesEndpoint(BaseAPIView):
_ = Issue.objects.bulk_update(sub_issues, ["parent"], batch_size=10) _ = Issue.objects.bulk_update(sub_issues, ["parent"], batch_size=10)
updated_sub_issues = Issue.issue_objects.filter(id__in=sub_issue_ids) updated_sub_issues = Issue.issue_objects.filter(id__in=sub_issue_ids).annotate(state_group=F("state__group"))
# Track the issue # Track the issue
_ = [ _ = [
@ -792,12 +837,25 @@ class SubIssuesEndpoint(BaseAPIView):
for sub_issue_id in sub_issue_ids for sub_issue_id in sub_issue_ids
] ]
# create's a dict with state group name with their respective issue id's
result = defaultdict(list)
for sub_issue in updated_sub_issues:
result[sub_issue.state_group].append(str(sub_issue.id))
serializer = IssueSerializer(
updated_sub_issues,
many=True,
)
return Response( return Response(
IssueSerializer(updated_sub_issues, many=True).data, {
"sub_issues": serializer.data,
"state_distribution": result,
},
status=status.HTTP_200_OK, status=status.HTTP_200_OK,
) )
class IssueLinkViewSet(BaseViewSet): class IssueLinkViewSet(BaseViewSet):
permission_classes = [ permission_classes = [
ProjectEntityPermission, ProjectEntityPermission,
@ -827,7 +885,9 @@ class IssueLinkViewSet(BaseViewSet):
) )
issue_activity.delay( issue_activity.delay(
type="link.activity.created", type="link.activity.created",
requested_data=json.dumps(serializer.data, cls=DjangoJSONEncoder), requested_data=json.dumps(
serializer.data, cls=DjangoJSONEncoder
),
actor_id=str(self.request.user.id), actor_id=str(self.request.user.id),
issue_id=str(self.kwargs.get("issue_id")), issue_id=str(self.kwargs.get("issue_id")),
project_id=str(self.kwargs.get("project_id")), project_id=str(self.kwargs.get("project_id")),
@ -839,14 +899,19 @@ class IssueLinkViewSet(BaseViewSet):
def partial_update(self, request, slug, project_id, issue_id, pk): def partial_update(self, request, slug, project_id, issue_id, pk):
issue_link = IssueLink.objects.get( issue_link = IssueLink.objects.get(
workspace__slug=slug, project_id=project_id, issue_id=issue_id, pk=pk workspace__slug=slug,
project_id=project_id,
issue_id=issue_id,
pk=pk,
) )
requested_data = json.dumps(request.data, cls=DjangoJSONEncoder) requested_data = json.dumps(request.data, cls=DjangoJSONEncoder)
current_instance = json.dumps( current_instance = json.dumps(
IssueLinkSerializer(issue_link).data, IssueLinkSerializer(issue_link).data,
cls=DjangoJSONEncoder, cls=DjangoJSONEncoder,
) )
serializer = IssueLinkSerializer(issue_link, data=request.data, partial=True) serializer = IssueLinkSerializer(
issue_link, data=request.data, partial=True
)
if serializer.is_valid(): if serializer.is_valid():
serializer.save() serializer.save()
issue_activity.delay( issue_activity.delay(
@ -863,7 +928,10 @@ class IssueLinkViewSet(BaseViewSet):
def destroy(self, request, slug, project_id, issue_id, pk): def destroy(self, request, slug, project_id, issue_id, pk):
issue_link = IssueLink.objects.get( issue_link = IssueLink.objects.get(
workspace__slug=slug, project_id=project_id, issue_id=issue_id, pk=pk workspace__slug=slug,
project_id=project_id,
issue_id=issue_id,
pk=pk,
) )
current_instance = json.dumps( current_instance = json.dumps(
IssueLinkSerializer(issue_link).data, IssueLinkSerializer(issue_link).data,
@ -989,13 +1057,23 @@ class IssueArchiveViewSet(BaseViewSet):
@method_decorator(gzip_page) @method_decorator(gzip_page)
def list(self, request, slug, project_id): def list(self, request, slug, project_id):
fields = [field for field in request.GET.get("fields", "").split(",") if field] fields = [
field
for field in request.GET.get("fields", "").split(",")
if field
]
filters = issue_filters(request.query_params, "GET") filters = issue_filters(request.query_params, "GET")
show_sub_issues = request.GET.get("show_sub_issues", "true") show_sub_issues = request.GET.get("show_sub_issues", "true")
# Custom ordering for priority and state # Custom ordering for priority and state
priority_order = ["urgent", "high", "medium", "low", "none"] priority_order = ["urgent", "high", "medium", "low", "none"]
state_order = ["backlog", "unstarted", "started", "completed", "cancelled"] state_order = [
"backlog",
"unstarted",
"started",
"completed",
"cancelled",
]
order_by_param = request.GET.get("order_by", "-created_at") order_by_param = request.GET.get("order_by", "-created_at")
@ -1011,7 +1089,9 @@ class IssueArchiveViewSet(BaseViewSet):
.values("count") .values("count")
) )
.annotate( .annotate(
attachment_count=IssueAttachment.objects.filter(issue=OuterRef("id")) attachment_count=IssueAttachment.objects.filter(
issue=OuterRef("id")
)
.order_by() .order_by()
.annotate(count=Func(F("id"), function="Count")) .annotate(count=Func(F("id"), function="Count"))
.values("count") .values("count")
@ -1021,7 +1101,9 @@ class IssueArchiveViewSet(BaseViewSet):
# Priority Ordering # Priority Ordering
if order_by_param == "priority" or order_by_param == "-priority": if order_by_param == "priority" or order_by_param == "-priority":
priority_order = ( priority_order = (
priority_order if order_by_param == "priority" else priority_order[::-1] priority_order
if order_by_param == "priority"
else priority_order[::-1]
) )
issue_queryset = issue_queryset.annotate( issue_queryset = issue_queryset.annotate(
priority_order=Case( priority_order=Case(
@ -1069,7 +1151,9 @@ class IssueArchiveViewSet(BaseViewSet):
else order_by_param else order_by_param
) )
).order_by( ).order_by(
"-max_values" if order_by_param.startswith("-") else "max_values" "-max_values"
if order_by_param.startswith("-")
else "max_values"
) )
else: else:
issue_queryset = issue_queryset.order_by(order_by_param) issue_queryset = issue_queryset.order_by(order_by_param)
@ -1080,7 +1164,7 @@ class IssueArchiveViewSet(BaseViewSet):
else issue_queryset.filter(parent__isnull=True) else issue_queryset.filter(parent__isnull=True)
) )
issues = IssueLiteSerializer( issues = IssueSerializer(
issue_queryset, many=True, fields=fields if fields else None issue_queryset, many=True, fields=fields if fields else None
).data ).data
return Response(issues, status=status.HTTP_200_OK) return Response(issues, status=status.HTTP_200_OK)
@ -1157,24 +1241,11 @@ class IssueSubscriberViewSet(BaseViewSet):
) )
def list(self, request, slug, project_id, issue_id): def list(self, request, slug, project_id, issue_id):
members = ( members = ProjectMember.objects.filter(
ProjectMember.objects.filter(
workspace__slug=slug, workspace__slug=slug,
project_id=project_id, project_id=project_id,
is_active=True, is_active=True,
) ).select_related("member")
.annotate(
is_subscribed=Exists(
IssueSubscriber.objects.filter(
workspace__slug=slug,
project_id=project_id,
issue_id=issue_id,
subscriber=OuterRef("member"),
)
)
)
.select_related("member")
)
serializer = ProjectMemberLiteSerializer(members, many=True) serializer = ProjectMemberLiteSerializer(members, many=True)
return Response(serializer.data, status=status.HTTP_200_OK) return Response(serializer.data, status=status.HTTP_200_OK)
@ -1229,7 +1300,9 @@ class IssueSubscriberViewSet(BaseViewSet):
workspace__slug=slug, workspace__slug=slug,
project=project_id, project=project_id,
).exists() ).exists()
return Response({"subscribed": issue_subscriber}, status=status.HTTP_200_OK) return Response(
{"subscribed": issue_subscriber}, status=status.HTTP_200_OK
)
class IssueReactionViewSet(BaseViewSet): class IssueReactionViewSet(BaseViewSet):
@ -1386,7 +1459,9 @@ class IssueRelationViewSet(BaseViewSet):
def list(self, request, slug, project_id, issue_id): def list(self, request, slug, project_id, issue_id):
issue_relations = ( issue_relations = (
IssueRelation.objects.filter(Q(issue_id=issue_id) | Q(related_issue=issue_id)) IssueRelation.objects.filter(
Q(issue_id=issue_id) | Q(related_issue=issue_id)
)
.filter(workspace__slug=self.kwargs.get("slug")) .filter(workspace__slug=self.kwargs.get("slug"))
.select_related("project") .select_related("project")
.select_related("workspace") .select_related("workspace")
@ -1395,34 +1470,59 @@ class IssueRelationViewSet(BaseViewSet):
.distinct() .distinct()
) )
blocking_issues = issue_relations.filter(relation_type="blocked_by", related_issue_id=issue_id) blocking_issues = issue_relations.filter(
blocked_by_issues = issue_relations.filter(relation_type="blocked_by", issue_id=issue_id) relation_type="blocked_by", related_issue_id=issue_id
duplicate_issues = issue_relations.filter(issue_id=issue_id, relation_type="duplicate") )
duplicate_issues_related = issue_relations.filter(related_issue_id=issue_id, relation_type="duplicate") blocked_by_issues = issue_relations.filter(
relates_to_issues = issue_relations.filter(issue_id=issue_id, relation_type="relates_to") relation_type="blocked_by", issue_id=issue_id
relates_to_issues_related = issue_relations.filter(related_issue_id=issue_id, relation_type="relates_to") )
duplicate_issues = issue_relations.filter(
issue_id=issue_id, relation_type="duplicate"
)
duplicate_issues_related = issue_relations.filter(
related_issue_id=issue_id, relation_type="duplicate"
)
relates_to_issues = issue_relations.filter(
issue_id=issue_id, relation_type="relates_to"
)
relates_to_issues_related = issue_relations.filter(
related_issue_id=issue_id, relation_type="relates_to"
)
blocked_by_issues_serialized = IssueRelationSerializer(blocked_by_issues, many=True).data blocked_by_issues_serialized = IssueRelationSerializer(
duplicate_issues_serialized = IssueRelationSerializer(duplicate_issues, many=True).data blocked_by_issues, many=True
relates_to_issues_serialized = IssueRelationSerializer(relates_to_issues, many=True).data ).data
duplicate_issues_serialized = IssueRelationSerializer(
duplicate_issues, many=True
).data
relates_to_issues_serialized = IssueRelationSerializer(
relates_to_issues, many=True
).data
# revere relation for blocked by issues # revere relation for blocked by issues
blocking_issues_serialized = RelatedIssueSerializer(blocking_issues, many=True).data blocking_issues_serialized = RelatedIssueSerializer(
blocking_issues, many=True
).data
# reverse relation for duplicate issues # reverse relation for duplicate issues
duplicate_issues_related_serialized = RelatedIssueSerializer(duplicate_issues_related, many=True).data duplicate_issues_related_serialized = RelatedIssueSerializer(
duplicate_issues_related, many=True
).data
# reverse relation for related issues # reverse relation for related issues
relates_to_issues_related_serialized = RelatedIssueSerializer(relates_to_issues_related, many=True).data relates_to_issues_related_serialized = RelatedIssueSerializer(
relates_to_issues_related, many=True
).data
response_data = { response_data = {
'blocking': blocking_issues_serialized, "blocking": blocking_issues_serialized,
'blocked_by': blocked_by_issues_serialized, "blocked_by": blocked_by_issues_serialized,
'duplicate': duplicate_issues_serialized + duplicate_issues_related_serialized, "duplicate": duplicate_issues_serialized
'relates_to': relates_to_issues_serialized + relates_to_issues_related_serialized, + duplicate_issues_related_serialized,
"relates_to": relates_to_issues_serialized
+ relates_to_issues_related_serialized,
} }
return Response(response_data, status=status.HTTP_200_OK) return Response(response_data, status=status.HTTP_200_OK)
def create(self, request, slug, project_id, issue_id): def create(self, request, slug, project_id, issue_id):
relation_type = request.data.get("relation_type", None) relation_type = request.data.get("relation_type", None)
issues = request.data.get("issues", []) issues = request.data.get("issues", [])
@ -1431,9 +1531,15 @@ class IssueRelationViewSet(BaseViewSet):
issue_relation = IssueRelation.objects.bulk_create( issue_relation = IssueRelation.objects.bulk_create(
[ [
IssueRelation( IssueRelation(
issue_id=issue if relation_type == "blocking" else issue_id, issue_id=issue
related_issue_id=issue_id if relation_type == "blocking" else issue, if relation_type == "blocking"
relation_type="blocked_by" if relation_type == "blocking" else relation_type, else issue_id,
related_issue_id=issue_id
if relation_type == "blocking"
else issue,
relation_type="blocked_by"
if relation_type == "blocking"
else relation_type,
project_id=project_id, project_id=project_id,
workspace_id=project.workspace_id, workspace_id=project.workspace_id,
created_by=request.user, created_by=request.user,
@ -1472,11 +1578,17 @@ class IssueRelationViewSet(BaseViewSet):
if relation_type == "blocking": if relation_type == "blocking":
issue_relation = IssueRelation.objects.get( issue_relation = IssueRelation.objects.get(
workspace__slug=slug, project_id=project_id, issue_id=related_issue, related_issue_id=issue_id workspace__slug=slug,
project_id=project_id,
issue_id=related_issue,
related_issue_id=issue_id,
) )
else: else:
issue_relation = IssueRelation.objects.get( issue_relation = IssueRelation.objects.get(
workspace__slug=slug, project_id=project_id, issue_id=issue_id, related_issue_id=related_issue workspace__slug=slug,
project_id=project_id,
issue_id=issue_id,
related_issue_id=related_issue,
) )
current_instance = json.dumps( current_instance = json.dumps(
IssueRelationSerializer(issue_relation).data, IssueRelationSerializer(issue_relation).data,
@ -1505,7 +1617,9 @@ class IssueDraftViewSet(BaseViewSet):
def get_queryset(self): def get_queryset(self):
return ( return (
Issue.objects.annotate( Issue.objects.annotate(
sub_issues_count=Issue.issue_objects.filter(parent=OuterRef("id")) sub_issues_count=Issue.issue_objects.filter(
parent=OuterRef("id")
)
.order_by() .order_by()
.annotate(count=Func(F("id"), function="Count")) .annotate(count=Func(F("id"), function="Count"))
.values("count") .values("count")
@ -1530,11 +1644,21 @@ class IssueDraftViewSet(BaseViewSet):
@method_decorator(gzip_page) @method_decorator(gzip_page)
def list(self, request, slug, project_id): def list(self, request, slug, project_id):
filters = issue_filters(request.query_params, "GET") filters = issue_filters(request.query_params, "GET")
fields = [field for field in request.GET.get("fields", "").split(",") if field] fields = [
field
for field in request.GET.get("fields", "").split(",")
if field
]
# Custom ordering for priority and state # Custom ordering for priority and state
priority_order = ["urgent", "high", "medium", "low", "none"] priority_order = ["urgent", "high", "medium", "low", "none"]
state_order = ["backlog", "unstarted", "started", "completed", "cancelled"] state_order = [
"backlog",
"unstarted",
"started",
"completed",
"cancelled",
]
order_by_param = request.GET.get("order_by", "-created_at") order_by_param = request.GET.get("order_by", "-created_at")
@ -1550,7 +1674,9 @@ class IssueDraftViewSet(BaseViewSet):
.values("count") .values("count")
) )
.annotate( .annotate(
attachment_count=IssueAttachment.objects.filter(issue=OuterRef("id")) attachment_count=IssueAttachment.objects.filter(
issue=OuterRef("id")
)
.order_by() .order_by()
.annotate(count=Func(F("id"), function="Count")) .annotate(count=Func(F("id"), function="Count"))
.values("count") .values("count")
@ -1560,7 +1686,9 @@ class IssueDraftViewSet(BaseViewSet):
# Priority Ordering # Priority Ordering
if order_by_param == "priority" or order_by_param == "-priority": if order_by_param == "priority" or order_by_param == "-priority":
priority_order = ( priority_order = (
priority_order if order_by_param == "priority" else priority_order[::-1] priority_order
if order_by_param == "priority"
else priority_order[::-1]
) )
issue_queryset = issue_queryset.annotate( issue_queryset = issue_queryset.annotate(
priority_order=Case( priority_order=Case(
@ -1608,12 +1736,14 @@ class IssueDraftViewSet(BaseViewSet):
else order_by_param else order_by_param
) )
).order_by( ).order_by(
"-max_values" if order_by_param.startswith("-") else "max_values" "-max_values"
if order_by_param.startswith("-")
else "max_values"
) )
else: else:
issue_queryset = issue_queryset.order_by(order_by_param) issue_queryset = issue_queryset.order_by(order_by_param)
issues = IssueLiteSerializer( issues = IssueSerializer(
issue_queryset, many=True, fields=fields if fields else None issue_queryset, many=True, fields=fields if fields else None
).data ).data
return Response(issues, status=status.HTTP_200_OK) return Response(issues, status=status.HTTP_200_OK)
@ -1636,7 +1766,9 @@ class IssueDraftViewSet(BaseViewSet):
# Track the issue # Track the issue
issue_activity.delay( issue_activity.delay(
type="issue_draft.activity.created", type="issue_draft.activity.created",
requested_data=json.dumps(self.request.data, cls=DjangoJSONEncoder), requested_data=json.dumps(
self.request.data, cls=DjangoJSONEncoder
),
actor_id=str(request.user.id), actor_id=str(request.user.id),
issue_id=str(serializer.data.get("id", None)), issue_id=str(serializer.data.get("id", None)),
project_id=str(project_id), project_id=str(project_id),
@ -1647,14 +1779,18 @@ class IssueDraftViewSet(BaseViewSet):
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
def partial_update(self, request, slug, project_id, pk): def partial_update(self, request, slug, project_id, pk):
issue = Issue.objects.get(workspace__slug=slug, project_id=project_id, pk=pk) issue = Issue.objects.get(
workspace__slug=slug, project_id=project_id, pk=pk
)
serializer = IssueSerializer(issue, data=request.data, partial=True) serializer = IssueSerializer(issue, data=request.data, partial=True)
if serializer.is_valid(): if serializer.is_valid():
if request.data.get("is_draft") is not None and not request.data.get( if request.data.get(
"is_draft" "is_draft"
): ) is not None and not request.data.get("is_draft"):
serializer.save(created_at=timezone.now(), updated_at=timezone.now()) serializer.save(
created_at=timezone.now(), updated_at=timezone.now()
)
else: else:
serializer.save() serializer.save()
issue_activity.delay( issue_activity.delay(
@ -1679,7 +1815,9 @@ class IssueDraftViewSet(BaseViewSet):
return Response(IssueSerializer(issue).data, status=status.HTTP_200_OK) return Response(IssueSerializer(issue).data, status=status.HTTP_200_OK)
def destroy(self, request, slug, project_id, pk=None): def destroy(self, request, slug, project_id, pk=None):
issue = Issue.objects.get(workspace__slug=slug, project_id=project_id, pk=pk) issue = Issue.objects.get(
workspace__slug=slug, project_id=project_id, pk=pk
)
current_instance = json.dumps( current_instance = json.dumps(
IssueSerializer(issue).data, cls=DjangoJSONEncoder IssueSerializer(issue).data, cls=DjangoJSONEncoder
) )

View File

@ -20,10 +20,13 @@ from plane.app.serializers import (
ModuleIssueSerializer, ModuleIssueSerializer,
ModuleLinkSerializer, ModuleLinkSerializer,
ModuleFavoriteSerializer, ModuleFavoriteSerializer,
IssueStateSerializer, IssueSerializer,
ModuleUserPropertiesSerializer, ModuleUserPropertiesSerializer,
) )
from plane.app.permissions import ProjectEntityPermission, ProjectLitePermission from plane.app.permissions import (
ProjectEntityPermission,
ProjectLitePermission,
)
from plane.db.models import ( from plane.db.models import (
Module, Module,
ModuleIssue, ModuleIssue,
@ -33,6 +36,7 @@ from plane.db.models import (
ModuleFavorite, ModuleFavorite,
IssueLink, IssueLink,
IssueAttachment, IssueAttachment,
IssueSubscriber,
ModuleUserProperties, ModuleUserProperties,
) )
from plane.bgtasks.issue_activites_task import issue_activity from plane.bgtasks.issue_activites_task import issue_activity
@ -75,7 +79,9 @@ class ModuleViewSet(WebhookMixin, BaseViewSet):
.prefetch_related( .prefetch_related(
Prefetch( Prefetch(
"link_module", "link_module",
queryset=ModuleLink.objects.select_related("module", "created_by"), queryset=ModuleLink.objects.select_related(
"module", "created_by"
),
) )
) )
.annotate( .annotate(
@ -156,7 +162,11 @@ class ModuleViewSet(WebhookMixin, BaseViewSet):
def list(self, request, slug, project_id): def list(self, request, slug, project_id):
queryset = self.get_queryset() queryset = self.get_queryset()
fields = [field for field in request.GET.get("fields", "").split(",") if field] fields = [
field
for field in request.GET.get("fields", "").split(",")
if field
]
modules = ModuleSerializer( modules = ModuleSerializer(
queryset, many=True, fields=fields if fields else None queryset, many=True, fields=fields if fields else None
).data ).data
@ -176,7 +186,13 @@ class ModuleViewSet(WebhookMixin, BaseViewSet):
.annotate(assignee_id=F("assignees__id")) .annotate(assignee_id=F("assignees__id"))
.annotate(display_name=F("assignees__display_name")) .annotate(display_name=F("assignees__display_name"))
.annotate(avatar=F("assignees__avatar")) .annotate(avatar=F("assignees__avatar"))
.values("first_name", "last_name", "assignee_id", "avatar", "display_name") .values(
"first_name",
"last_name",
"assignee_id",
"avatar",
"display_name",
)
.annotate( .annotate(
total_issues=Count( total_issues=Count(
"assignee_id", "assignee_id",
@ -260,7 +276,10 @@ class ModuleViewSet(WebhookMixin, BaseViewSet):
if queryset.start_date and queryset.target_date: if queryset.start_date and queryset.target_date:
data["distribution"]["completion_chart"] = burndown_plot( data["distribution"]["completion_chart"] = burndown_plot(
queryset=queryset, slug=slug, project_id=project_id, module_id=pk queryset=queryset,
slug=slug,
project_id=project_id,
module_id=pk,
) )
return Response( return Response(
@ -269,9 +288,13 @@ class ModuleViewSet(WebhookMixin, BaseViewSet):
) )
def destroy(self, request, slug, project_id, pk): def destroy(self, request, slug, project_id, pk):
module = Module.objects.get(workspace__slug=slug, project_id=project_id, pk=pk) module = Module.objects.get(
workspace__slug=slug, project_id=project_id, pk=pk
)
module_issues = list( module_issues = list(
ModuleIssue.objects.filter(module_id=pk).values_list("issue", flat=True) ModuleIssue.objects.filter(module_id=pk).values_list(
"issue", flat=True
)
) )
issue_activity.delay( issue_activity.delay(
type="module.activity.deleted", type="module.activity.deleted",
@ -312,7 +335,9 @@ class ModuleIssueViewSet(WebhookMixin, BaseViewSet):
super() super()
.get_queryset() .get_queryset()
.annotate( .annotate(
sub_issues_count=Issue.issue_objects.filter(parent=OuterRef("issue")) sub_issues_count=Issue.issue_objects.filter(
parent=OuterRef("issue")
)
.order_by() .order_by()
.annotate(count=Func(F("id"), function="Count")) .annotate(count=Func(F("id"), function="Count"))
.values("count") .values("count")
@ -332,13 +357,19 @@ class ModuleIssueViewSet(WebhookMixin, BaseViewSet):
@method_decorator(gzip_page) @method_decorator(gzip_page)
def list(self, request, slug, project_id, module_id): def list(self, request, slug, project_id, module_id):
fields = [field for field in request.GET.get("fields", "").split(",") if field] fields = [
field
for field in request.GET.get("fields", "").split(",")
if field
]
order_by = request.GET.get("order_by", "created_at") order_by = request.GET.get("order_by", "created_at")
filters = issue_filters(request.query_params, "GET") filters = issue_filters(request.query_params, "GET")
issues = ( issues = (
Issue.issue_objects.filter(issue_module__module_id=module_id) Issue.issue_objects.filter(issue_module__module_id=module_id)
.annotate( .annotate(
sub_issues_count=Issue.issue_objects.filter(parent=OuterRef("id")) sub_issues_count=Issue.issue_objects.filter(
parent=OuterRef("id")
)
.order_by() .order_by()
.annotate(count=Func(F("id"), function="Count")) .annotate(count=Func(F("id"), function="Count"))
.values("count") .values("count")
@ -353,6 +384,8 @@ class ModuleIssueViewSet(WebhookMixin, BaseViewSet):
.prefetch_related("labels") .prefetch_related("labels")
.order_by(order_by) .order_by(order_by)
.filter(**filters) .filter(**filters)
.annotate(cycle_id=F("issue_cycle__cycle_id"))
.annotate(module_id=F("issue_module__module_id"))
.annotate( .annotate(
link_count=IssueLink.objects.filter(issue=OuterRef("id")) link_count=IssueLink.objects.filter(issue=OuterRef("id"))
.order_by() .order_by()
@ -360,13 +393,22 @@ class ModuleIssueViewSet(WebhookMixin, BaseViewSet):
.values("count") .values("count")
) )
.annotate( .annotate(
attachment_count=IssueAttachment.objects.filter(issue=OuterRef("id")) attachment_count=IssueAttachment.objects.filter(
issue=OuterRef("id")
)
.order_by() .order_by()
.annotate(count=Func(F("id"), function="Count")) .annotate(count=Func(F("id"), function="Count"))
.values("count") .values("count")
) )
.annotate(
is_subscribed=Exists(
IssueSubscriber.objects.filter(
subscriber=self.request.user, issue_id=OuterRef("id")
) )
serializer = IssueStateSerializer( )
)
)
serializer = IssueSerializer(
issues, many=True, fields=fields if fields else None issues, many=True, fields=fields if fields else None
) )
return Response(serializer.data, status=status.HTTP_200_OK) return Response(serializer.data, status=status.HTTP_200_OK)
@ -375,7 +417,8 @@ class ModuleIssueViewSet(WebhookMixin, BaseViewSet):
issues = request.data.get("issues", []) issues = request.data.get("issues", [])
if not len(issues): if not len(issues):
return Response( return Response(
{"error": "Issues are required"}, status=status.HTTP_400_BAD_REQUEST {"error": "Issues are required"},
status=status.HTTP_400_BAD_REQUEST,
) )
module = Module.objects.get( module = Module.objects.get(
workspace__slug=slug, project_id=project_id, pk=module_id workspace__slug=slug, project_id=project_id, pk=module_id
@ -447,8 +490,12 @@ class ModuleIssueViewSet(WebhookMixin, BaseViewSet):
epoch=int(timezone.now().timestamp()), epoch=int(timezone.now().timestamp()),
) )
issues = self.get_queryset().values_list("issue_id", flat=True)
return Response( return Response(
ModuleIssueSerializer(self.get_queryset(), many=True).data, IssueSerializer(
Issue.objects.filter(pk__in=issues), many=True
).data,
status=status.HTTP_200_OK, status=status.HTTP_200_OK,
) )

View File

@ -51,8 +51,10 @@ class NotificationViewSet(BaseViewSet, BasePaginator):
# Filters based on query parameters # Filters based on query parameters
snoozed_filters = { snoozed_filters = {
"true": Q(snoozed_till__lt=timezone.now()) | Q(snoozed_till__isnull=False), "true": Q(snoozed_till__lt=timezone.now())
"false": Q(snoozed_till__gte=timezone.now()) | Q(snoozed_till__isnull=True), | Q(snoozed_till__isnull=False),
"false": Q(snoozed_till__gte=timezone.now())
| Q(snoozed_till__isnull=True),
} }
notifications = notifications.filter(snoozed_filters[snoozed]) notifications = notifications.filter(snoozed_filters[snoozed])
@ -72,14 +74,18 @@ class NotificationViewSet(BaseViewSet, BasePaginator):
issue_ids = IssueSubscriber.objects.filter( issue_ids = IssueSubscriber.objects.filter(
workspace__slug=slug, subscriber_id=request.user.id workspace__slug=slug, subscriber_id=request.user.id
).values_list("issue_id", flat=True) ).values_list("issue_id", flat=True)
notifications = notifications.filter(entity_identifier__in=issue_ids) notifications = notifications.filter(
entity_identifier__in=issue_ids
)
# Assigned Issues # Assigned Issues
if type == "assigned": if type == "assigned":
issue_ids = IssueAssignee.objects.filter( issue_ids = IssueAssignee.objects.filter(
workspace__slug=slug, assignee_id=request.user.id workspace__slug=slug, assignee_id=request.user.id
).values_list("issue_id", flat=True) ).values_list("issue_id", flat=True)
notifications = notifications.filter(entity_identifier__in=issue_ids) notifications = notifications.filter(
entity_identifier__in=issue_ids
)
# Created issues # Created issues
if type == "created": if type == "created":
@ -94,10 +100,14 @@ class NotificationViewSet(BaseViewSet, BasePaginator):
issue_ids = Issue.objects.filter( issue_ids = Issue.objects.filter(
workspace__slug=slug, created_by=request.user workspace__slug=slug, created_by=request.user
).values_list("pk", flat=True) ).values_list("pk", flat=True)
notifications = notifications.filter(entity_identifier__in=issue_ids) notifications = notifications.filter(
entity_identifier__in=issue_ids
)
# Pagination # Pagination
if request.GET.get("per_page", False) and request.GET.get("cursor", False): if request.GET.get("per_page", False) and request.GET.get(
"cursor", False
):
return self.paginate( return self.paginate(
request=request, request=request,
queryset=(notifications), queryset=(notifications),
@ -227,11 +237,13 @@ class MarkAllReadNotificationViewSet(BaseViewSet):
# Filter for snoozed notifications # Filter for snoozed notifications
if snoozed: if snoozed:
notifications = notifications.filter( notifications = notifications.filter(
Q(snoozed_till__lt=timezone.now()) | Q(snoozed_till__isnull=False) Q(snoozed_till__lt=timezone.now())
| Q(snoozed_till__isnull=False)
) )
else: else:
notifications = notifications.filter( notifications = notifications.filter(
Q(snoozed_till__gte=timezone.now()) | Q(snoozed_till__isnull=True), Q(snoozed_till__gte=timezone.now())
| Q(snoozed_till__isnull=True),
) )
# Filter for archived or unarchive # Filter for archived or unarchive
@ -245,14 +257,18 @@ class MarkAllReadNotificationViewSet(BaseViewSet):
issue_ids = IssueSubscriber.objects.filter( issue_ids = IssueSubscriber.objects.filter(
workspace__slug=slug, subscriber_id=request.user.id workspace__slug=slug, subscriber_id=request.user.id
).values_list("issue_id", flat=True) ).values_list("issue_id", flat=True)
notifications = notifications.filter(entity_identifier__in=issue_ids) notifications = notifications.filter(
entity_identifier__in=issue_ids
)
# Assigned Issues # Assigned Issues
if type == "assigned": if type == "assigned":
issue_ids = IssueAssignee.objects.filter( issue_ids = IssueAssignee.objects.filter(
workspace__slug=slug, assignee_id=request.user.id workspace__slug=slug, assignee_id=request.user.id
).values_list("issue_id", flat=True) ).values_list("issue_id", flat=True)
notifications = notifications.filter(entity_identifier__in=issue_ids) notifications = notifications.filter(
entity_identifier__in=issue_ids
)
# Created issues # Created issues
if type == "created": if type == "created":
@ -267,7 +283,9 @@ class MarkAllReadNotificationViewSet(BaseViewSet):
issue_ids = Issue.objects.filter( issue_ids = Issue.objects.filter(
workspace__slug=slug, created_by=request.user workspace__slug=slug, created_by=request.user
).values_list("pk", flat=True) ).values_list("pk", flat=True)
notifications = notifications.filter(entity_identifier__in=issue_ids) notifications = notifications.filter(
entity_identifier__in=issue_ids
)
updated_notifications = [] updated_notifications = []
for notification in notifications: for notification in notifications:

View File

@ -97,7 +97,9 @@ class PageViewSet(BaseViewSet):
def partial_update(self, request, slug, project_id, pk): def partial_update(self, request, slug, project_id, pk):
try: try:
page = Page.objects.get(pk=pk, workspace__slug=slug, project_id=project_id) page = Page.objects.get(
pk=pk, workspace__slug=slug, project_id=project_id
)
if page.is_locked: if page.is_locked:
return Response( return Response(
@ -127,7 +129,9 @@ class PageViewSet(BaseViewSet):
if serializer.is_valid(): if serializer.is_valid():
serializer.save() serializer.save()
return Response(serializer.data, status=status.HTTP_200_OK) return Response(serializer.data, status=status.HTTP_200_OK)
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) return Response(
serializer.errors, status=status.HTTP_400_BAD_REQUEST
)
except Page.DoesNotExist: except Page.DoesNotExist:
return Response( return Response(
{ {
@ -161,12 +165,17 @@ class PageViewSet(BaseViewSet):
return Response(pages, status=status.HTTP_200_OK) return Response(pages, status=status.HTTP_200_OK)
def archive(self, request, slug, project_id, page_id): def archive(self, request, slug, project_id, page_id):
page = Page.objects.get(pk=page_id, workspace__slug=slug, project_id=project_id) page = Page.objects.get(
pk=page_id, workspace__slug=slug, project_id=project_id
)
# only the owner and admin can archive the page # only the owner and admin can archive the page
if ( if (
ProjectMember.objects.filter( ProjectMember.objects.filter(
project_id=project_id, member=request.user, is_active=True, role__gt=20 project_id=project_id,
member=request.user,
is_active=True,
role__gt=20,
).exists() ).exists()
or request.user.id != page.owned_by_id or request.user.id != page.owned_by_id
): ):
@ -180,12 +189,17 @@ class PageViewSet(BaseViewSet):
return Response(status=status.HTTP_204_NO_CONTENT) return Response(status=status.HTTP_204_NO_CONTENT)
def unarchive(self, request, slug, project_id, page_id): def unarchive(self, request, slug, project_id, page_id):
page = Page.objects.get(pk=page_id, workspace__slug=slug, project_id=project_id) page = Page.objects.get(
pk=page_id, workspace__slug=slug, project_id=project_id
)
# only the owner and admin can un archive the page # only the owner and admin can un archive the page
if ( if (
ProjectMember.objects.filter( ProjectMember.objects.filter(
project_id=project_id, member=request.user, is_active=True, role__gt=20 project_id=project_id,
member=request.user,
is_active=True,
role__gt=20,
).exists() ).exists()
or request.user.id != page.owned_by_id or request.user.id != page.owned_by_id
): ):
@ -212,14 +226,18 @@ class PageViewSet(BaseViewSet):
pages = PageSerializer(pages, many=True).data pages = PageSerializer(pages, many=True).data
return Response(pages, status=status.HTTP_200_OK) return Response(pages, status=status.HTTP_200_OK)
def destroy(self, request, slug, project_id, pk): def destroy(self, request, slug, project_id, pk):
page = Page.objects.get(pk=pk, workspace__slug=slug, project_id=project_id) page = Page.objects.get(
pk=pk, workspace__slug=slug, project_id=project_id
)
# only the owner and admin can delete the page # only the owner and admin can delete the page
if ( if (
ProjectMember.objects.filter( ProjectMember.objects.filter(
project_id=project_id, member=request.user, is_active=True, role__gt=20 project_id=project_id,
member=request.user,
is_active=True,
role__gt=20,
).exists() ).exists()
or request.user.id != page.owned_by_id or request.user.id != page.owned_by_id
): ):

View File

@ -86,9 +86,15 @@ class ProjectViewSet(WebhookMixin, BaseViewSet):
super() super()
.get_queryset() .get_queryset()
.filter(workspace__slug=self.kwargs.get("slug")) .filter(workspace__slug=self.kwargs.get("slug"))
.filter(Q(project_projectmember__member=self.request.user) | Q(network=2)) .filter(
Q(project_projectmember__member=self.request.user)
| Q(network=2)
)
.select_related( .select_related(
"workspace", "workspace__owner", "default_assignee", "project_lead" "workspace",
"workspace__owner",
"default_assignee",
"project_lead",
) )
.annotate( .annotate(
is_favorite=Exists( is_favorite=Exists(
@ -160,7 +166,11 @@ class ProjectViewSet(WebhookMixin, BaseViewSet):
) )
def list(self, request, slug): def list(self, request, slug):
fields = [field for field in request.GET.get("fields", "").split(",") if field] fields = [
field
for field in request.GET.get("fields", "").split(",")
if field
]
sort_order_query = ProjectMember.objects.filter( sort_order_query = ProjectMember.objects.filter(
member=request.user, member=request.user,
@ -173,7 +183,9 @@ class ProjectViewSet(WebhookMixin, BaseViewSet):
.annotate(sort_order=Subquery(sort_order_query)) .annotate(sort_order=Subquery(sort_order_query))
.order_by("sort_order", "name") .order_by("sort_order", "name")
) )
if request.GET.get("per_page", False) and request.GET.get("cursor", False): if request.GET.get("per_page", False) and request.GET.get(
"cursor", False
):
return self.paginate( return self.paginate(
request=request, request=request,
queryset=(projects), queryset=(projects),
@ -181,10 +193,11 @@ class ProjectViewSet(WebhookMixin, BaseViewSet):
projects, many=True projects, many=True
).data, ).data,
) )
projects = ProjectListSerializer(projects, many=True, fields=fields if fields else None).data projects = ProjectListSerializer(
projects, many=True, fields=fields if fields else None
).data
return Response(projects, status=status.HTTP_200_OK) return Response(projects, status=status.HTTP_200_OK)
def create(self, request, slug): def create(self, request, slug):
try: try:
workspace = Workspace.objects.get(slug=slug) workspace = Workspace.objects.get(slug=slug)
@ -197,7 +210,9 @@ class ProjectViewSet(WebhookMixin, BaseViewSet):
# Add the user as Administrator to the project # Add the user as Administrator to the project
project_member = ProjectMember.objects.create( project_member = ProjectMember.objects.create(
project_id=serializer.data["id"], member=request.user, role=20 project_id=serializer.data["id"],
member=request.user,
role=20,
) )
# Also create the issue property for the user # Also create the issue property for the user
_ = IssueProperty.objects.create( _ = IssueProperty.objects.create(
@ -270,9 +285,15 @@ class ProjectViewSet(WebhookMixin, BaseViewSet):
] ]
) )
project = self.get_queryset().filter(pk=serializer.data["id"]).first() project = (
self.get_queryset()
.filter(pk=serializer.data["id"])
.first()
)
serializer = ProjectListSerializer(project) serializer = ProjectListSerializer(project)
return Response(serializer.data, status=status.HTTP_201_CREATED) return Response(
serializer.data, status=status.HTTP_201_CREATED
)
return Response( return Response(
serializer.errors, serializer.errors,
status=status.HTTP_400_BAD_REQUEST, status=status.HTTP_400_BAD_REQUEST,
@ -285,7 +306,8 @@ class ProjectViewSet(WebhookMixin, BaseViewSet):
) )
except Workspace.DoesNotExist as e: except Workspace.DoesNotExist as e:
return Response( return Response(
{"error": "Workspace does not exist"}, status=status.HTTP_404_NOT_FOUND {"error": "Workspace does not exist"},
status=status.HTTP_404_NOT_FOUND,
) )
except serializers.ValidationError as e: except serializers.ValidationError as e:
return Response( return Response(
@ -310,7 +332,9 @@ class ProjectViewSet(WebhookMixin, BaseViewSet):
serializer.save() serializer.save()
if serializer.data["inbox_view"]: if serializer.data["inbox_view"]:
Inbox.objects.get_or_create( Inbox.objects.get_or_create(
name=f"{project.name} Inbox", project=project, is_default=True name=f"{project.name} Inbox",
project=project,
is_default=True,
) )
# Create the triage state in Backlog group # Create the triage state in Backlog group
@ -322,10 +346,16 @@ class ProjectViewSet(WebhookMixin, BaseViewSet):
color="#ff7700", color="#ff7700",
) )
project = self.get_queryset().filter(pk=serializer.data["id"]).first() project = (
self.get_queryset()
.filter(pk=serializer.data["id"])
.first()
)
serializer = ProjectListSerializer(project) serializer = ProjectListSerializer(project)
return Response(serializer.data, status=status.HTTP_200_OK) return Response(serializer.data, status=status.HTTP_200_OK)
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) return Response(
serializer.errors, status=status.HTTP_400_BAD_REQUEST
)
except IntegrityError as e: except IntegrityError as e:
if "already exists" in str(e): if "already exists" in str(e):
@ -335,7 +365,8 @@ class ProjectViewSet(WebhookMixin, BaseViewSet):
) )
except (Project.DoesNotExist, Workspace.DoesNotExist): except (Project.DoesNotExist, Workspace.DoesNotExist):
return Response( return Response(
{"error": "Project does not exist"}, status=status.HTTP_404_NOT_FOUND {"error": "Project does not exist"},
status=status.HTTP_404_NOT_FOUND,
) )
except serializers.ValidationError as e: except serializers.ValidationError as e:
return Response( return Response(
@ -370,11 +401,14 @@ class ProjectInvitationsViewset(BaseViewSet):
# Check if email is provided # Check if email is provided
if not emails: if not emails:
return Response( return Response(
{"error": "Emails are required"}, status=status.HTTP_400_BAD_REQUEST {"error": "Emails are required"},
status=status.HTTP_400_BAD_REQUEST,
) )
requesting_user = ProjectMember.objects.get( requesting_user = ProjectMember.objects.get(
workspace__slug=slug, project_id=project_id, member_id=request.user.id workspace__slug=slug,
project_id=project_id,
member_id=request.user.id,
) )
# Check if any invited user has an higher role # Check if any invited user has an higher role
@ -548,7 +582,9 @@ class ProjectJoinEndpoint(BaseAPIView):
_ = WorkspaceMember.objects.create( _ = WorkspaceMember.objects.create(
workspace_id=project_invite.workspace_id, workspace_id=project_invite.workspace_id,
member=user, member=user,
role=15 if project_invite.role >= 15 else project_invite.role, role=15
if project_invite.role >= 15
else project_invite.role,
) )
else: else:
# Else make him active # Else make him active
@ -658,7 +694,8 @@ class ProjectMemberViewSet(BaseViewSet):
sort_order = [ sort_order = [
project_member.get("sort_order") project_member.get("sort_order")
for project_member in project_members for project_member in project_members
if str(project_member.get("member_id")) == str(member.get("member_id")) if str(project_member.get("member_id"))
== str(member.get("member_id"))
] ]
bulk_project_members.append( bulk_project_members.append(
ProjectMember( ProjectMember(
@ -666,7 +703,9 @@ class ProjectMemberViewSet(BaseViewSet):
role=member.get("role", 10), role=member.get("role", 10),
project_id=project_id, project_id=project_id,
workspace_id=project.workspace_id, workspace_id=project.workspace_id,
sort_order=sort_order[0] - 10000 if len(sort_order) else 65535, sort_order=sort_order[0] - 10000
if len(sort_order)
else 65535,
) )
) )
bulk_issue_props.append( bulk_issue_props.append(
@ -719,7 +758,9 @@ class ProjectMemberViewSet(BaseViewSet):
is_active=True, is_active=True,
).select_related("project", "member", "workspace") ).select_related("project", "member", "workspace")
serializer = ProjectMemberRoleSerializer(project_members, fields=("id", "member", "role"), many=True) serializer = ProjectMemberRoleSerializer(
project_members, fields=("id", "member", "role"), many=True
)
return Response(serializer.data, status=status.HTTP_200_OK) return Response(serializer.data, status=status.HTTP_200_OK)
def partial_update(self, request, slug, project_id, pk): def partial_update(self, request, slug, project_id, pk):
@ -747,7 +788,9 @@ class ProjectMemberViewSet(BaseViewSet):
> requested_project_member.role > requested_project_member.role
): ):
return Response( return Response(
{"error": "You cannot update a role that is higher than your own role"}, {
"error": "You cannot update a role that is higher than your own role"
},
status=status.HTTP_400_BAD_REQUEST, status=status.HTTP_400_BAD_REQUEST,
) )
@ -786,7 +829,9 @@ class ProjectMemberViewSet(BaseViewSet):
# User cannot deactivate higher role # User cannot deactivate higher role
if requesting_project_member.role < project_member.role: if requesting_project_member.role < project_member.role:
return Response( return Response(
{"error": "You cannot remove a user having role higher than you"}, {
"error": "You cannot remove a user having role higher than you"
},
status=status.HTTP_400_BAD_REQUEST, status=status.HTTP_400_BAD_REQUEST,
) )
@ -837,7 +882,8 @@ class AddTeamToProjectEndpoint(BaseAPIView):
if len(team_members) == 0: if len(team_members) == 0:
return Response( return Response(
{"error": "No such team exists"}, status=status.HTTP_400_BAD_REQUEST {"error": "No such team exists"},
status=status.HTTP_400_BAD_REQUEST,
) )
workspace = Workspace.objects.get(slug=slug) workspace = Workspace.objects.get(slug=slug)
@ -884,7 +930,8 @@ class ProjectIdentifierEndpoint(BaseAPIView):
if name == "": if name == "":
return Response( return Response(
{"error": "Name is required"}, status=status.HTTP_400_BAD_REQUEST {"error": "Name is required"},
status=status.HTTP_400_BAD_REQUEST,
) )
exists = ProjectIdentifier.objects.filter( exists = ProjectIdentifier.objects.filter(
@ -901,16 +948,23 @@ class ProjectIdentifierEndpoint(BaseAPIView):
if name == "": if name == "":
return Response( return Response(
{"error": "Name is required"}, status=status.HTTP_400_BAD_REQUEST {"error": "Name is required"},
)
if Project.objects.filter(identifier=name, workspace__slug=slug).exists():
return Response(
{"error": "Cannot delete an identifier of an existing project"},
status=status.HTTP_400_BAD_REQUEST, status=status.HTTP_400_BAD_REQUEST,
) )
ProjectIdentifier.objects.filter(name=name, workspace__slug=slug).delete() if Project.objects.filter(
identifier=name, workspace__slug=slug
).exists():
return Response(
{
"error": "Cannot delete an identifier of an existing project"
},
status=status.HTTP_400_BAD_REQUEST,
)
ProjectIdentifier.objects.filter(
name=name, workspace__slug=slug
).delete()
return Response( return Response(
status=status.HTTP_204_NO_CONTENT, status=status.HTTP_204_NO_CONTENT,
@ -928,7 +982,9 @@ class ProjectUserViewsEndpoint(BaseAPIView):
).first() ).first()
if project_member is None: if project_member is None:
return Response({"error": "Forbidden"}, status=status.HTTP_403_FORBIDDEN) return Response(
{"error": "Forbidden"}, status=status.HTTP_403_FORBIDDEN
)
view_props = project_member.view_props view_props = project_member.view_props
default_props = project_member.default_props default_props = project_member.default_props
@ -936,8 +992,12 @@ class ProjectUserViewsEndpoint(BaseAPIView):
sort_order = project_member.sort_order sort_order = project_member.sort_order
project_member.view_props = request.data.get("view_props", view_props) project_member.view_props = request.data.get("view_props", view_props)
project_member.default_props = request.data.get("default_props", default_props) project_member.default_props = request.data.get(
project_member.preferences = request.data.get("preferences", preferences) "default_props", default_props
)
project_member.preferences = request.data.get(
"preferences", preferences
)
project_member.sort_order = request.data.get("sort_order", sort_order) project_member.sort_order = request.data.get("sort_order", sort_order)
project_member.save() project_member.save()
@ -1085,6 +1145,7 @@ class UserProjectRolesEndpoint(BaseAPIView):
).values("project_id", "role") ).values("project_id", "role")
project_members = { project_members = {
str(member["project_id"]): member["role"] for member in project_members str(member["project_id"]): member["role"]
for member in project_members
} }
return Response(project_members, status=status.HTTP_200_OK) return Response(project_members, status=status.HTTP_200_OK)

View File

@ -10,7 +10,15 @@ from rest_framework.response import Response
# Module imports # Module imports
from .base import BaseAPIView from .base import BaseAPIView
from plane.db.models import Workspace, Project, Issue, Cycle, Module, Page, IssueView from plane.db.models import (
Workspace,
Project,
Issue,
Cycle,
Module,
Page,
IssueView,
)
from plane.utils.issue_search import search_issues from plane.utils.issue_search import search_issues
@ -25,7 +33,9 @@ class GlobalSearchEndpoint(BaseAPIView):
for field in fields: for field in fields:
q |= Q(**{f"{field}__icontains": query}) q |= Q(**{f"{field}__icontains": query})
return ( return (
Workspace.objects.filter(q, workspace_member__member=self.request.user) Workspace.objects.filter(
q, workspace_member__member=self.request.user
)
.distinct() .distinct()
.values("name", "id", "slug") .values("name", "id", "slug")
) )
@ -38,7 +48,8 @@ class GlobalSearchEndpoint(BaseAPIView):
return ( return (
Project.objects.filter( Project.objects.filter(
q, q,
Q(project_projectmember__member=self.request.user) | Q(network=2), Q(project_projectmember__member=self.request.user)
| Q(network=2),
workspace__slug=slug, workspace__slug=slug,
) )
.distinct() .distinct()
@ -169,7 +180,9 @@ class GlobalSearchEndpoint(BaseAPIView):
def get(self, request, slug): def get(self, request, slug):
query = request.query_params.get("search", False) query = request.query_params.get("search", False)
workspace_search = request.query_params.get("workspace_search", "false") workspace_search = request.query_params.get(
"workspace_search", "false"
)
project_id = request.query_params.get("project_id", False) project_id = request.query_params.get("project_id", False)
if not query: if not query:
@ -209,7 +222,9 @@ class GlobalSearchEndpoint(BaseAPIView):
class IssueSearchEndpoint(BaseAPIView): class IssueSearchEndpoint(BaseAPIView):
def get(self, request, slug, project_id): def get(self, request, slug, project_id):
query = request.query_params.get("search", False) query = request.query_params.get("search", False)
workspace_search = request.query_params.get("workspace_search", "false") workspace_search = request.query_params.get(
"workspace_search", "false"
)
parent = request.query_params.get("parent", "false") parent = request.query_params.get("parent", "false")
issue_relation = request.query_params.get("issue_relation", "false") issue_relation = request.query_params.get("issue_relation", "false")
cycle = request.query_params.get("cycle", "false") cycle = request.query_params.get("cycle", "false")
@ -234,9 +249,9 @@ class IssueSearchEndpoint(BaseAPIView):
issues = issues.filter( issues = issues.filter(
~Q(pk=issue_id), ~Q(pk=issue.parent_id), parent__isnull=True ~Q(pk=issue_id), ~Q(pk=issue.parent_id), parent__isnull=True
).exclude( ).exclude(
pk__in=Issue.issue_objects.filter(parent__isnull=False).values_list( pk__in=Issue.issue_objects.filter(
"parent_id", flat=True parent__isnull=False
) ).values_list("parent_id", flat=True)
) )
if issue_relation == "true" and issue_id: if issue_relation == "true" and issue_id:
issue = Issue.issue_objects.get(pk=issue_id) issue = Issue.issue_objects.get(pk=issue_id)

View File

@ -77,14 +77,19 @@ class StateViewSet(BaseViewSet):
) )
if state.default: if state.default:
return Response({"error": "Default state cannot be deleted"}, status=status.HTTP_400_BAD_REQUEST) return Response(
{"error": "Default state cannot be deleted"},
status=status.HTTP_400_BAD_REQUEST,
)
# Check for any issues in the state # Check for any issues in the state
issue_exist = Issue.issue_objects.filter(state=pk).exists() issue_exist = Issue.issue_objects.filter(state=pk).exists()
if issue_exist: if issue_exist:
return Response( return Response(
{"error": "The state is not empty, only empty states can be deleted"}, {
"error": "The state is not empty, only empty states can be deleted"
},
status=status.HTTP_400_BAD_REQUEST, status=status.HTTP_400_BAD_REQUEST,
) )

View File

@ -43,7 +43,9 @@ class UserEndpoint(BaseViewSet):
is_admin = InstanceAdmin.objects.filter( is_admin = InstanceAdmin.objects.filter(
instance=instance, user=request.user instance=instance, user=request.user
).exists() ).exists()
return Response({"is_instance_admin": is_admin}, status=status.HTTP_200_OK) return Response(
{"is_instance_admin": is_admin}, status=status.HTTP_200_OK
)
def deactivate(self, request): def deactivate(self, request):
# Check all workspace user is active # Check all workspace user is active
@ -51,7 +53,12 @@ class UserEndpoint(BaseViewSet):
# Instance admin check # Instance admin check
if InstanceAdmin.objects.filter(user=user).exists(): if InstanceAdmin.objects.filter(user=user).exists():
return Response({"error": "You cannot deactivate your account since you are an instance admin"}, status=status.HTTP_400_BAD_REQUEST) return Response(
{
"error": "You cannot deactivate your account since you are an instance admin"
},
status=status.HTTP_400_BAD_REQUEST,
)
projects_to_deactivate = [] projects_to_deactivate = []
workspaces_to_deactivate = [] workspaces_to_deactivate = []
@ -61,7 +68,10 @@ class UserEndpoint(BaseViewSet):
).annotate( ).annotate(
other_admin_exists=Count( other_admin_exists=Count(
Case( Case(
When(Q(role=20, is_active=True) & ~Q(member=request.user), then=1), When(
Q(role=20, is_active=True) & ~Q(member=request.user),
then=1,
),
default=0, default=0,
output_field=IntegerField(), output_field=IntegerField(),
) )
@ -86,7 +96,10 @@ class UserEndpoint(BaseViewSet):
).annotate( ).annotate(
other_admin_exists=Count( other_admin_exists=Count(
Case( Case(
When(Q(role=20, is_active=True) & ~Q(member=request.user), then=1), When(
Q(role=20, is_active=True) & ~Q(member=request.user),
then=1,
),
default=0, default=0,
output_field=IntegerField(), output_field=IntegerField(),
) )
@ -95,7 +108,9 @@ class UserEndpoint(BaseViewSet):
) )
for workspace in workspaces: for workspace in workspaces:
if workspace.other_admin_exists > 0 or (workspace.total_members == 1): if workspace.other_admin_exists > 0 or (
workspace.total_members == 1
):
workspace.is_active = False workspace.is_active = False
workspaces_to_deactivate.append(workspace) workspaces_to_deactivate.append(workspace)
else: else:
@ -134,7 +149,9 @@ class UpdateUserOnBoardedEndpoint(BaseAPIView):
user = User.objects.get(pk=request.user.id, is_active=True) user = User.objects.get(pk=request.user.id, is_active=True)
user.is_onboarded = request.data.get("is_onboarded", False) user.is_onboarded = request.data.get("is_onboarded", False)
user.save() user.save()
return Response({"message": "Updated successfully"}, status=status.HTTP_200_OK) return Response(
{"message": "Updated successfully"}, status=status.HTTP_200_OK
)
class UpdateUserTourCompletedEndpoint(BaseAPIView): class UpdateUserTourCompletedEndpoint(BaseAPIView):
@ -142,14 +159,16 @@ class UpdateUserTourCompletedEndpoint(BaseAPIView):
user = User.objects.get(pk=request.user.id, is_active=True) user = User.objects.get(pk=request.user.id, is_active=True)
user.is_tour_completed = request.data.get("is_tour_completed", False) user.is_tour_completed = request.data.get("is_tour_completed", False)
user.save() user.save()
return Response({"message": "Updated successfully"}, status=status.HTTP_200_OK) return Response(
{"message": "Updated successfully"}, status=status.HTTP_200_OK
)
class UserActivityEndpoint(BaseAPIView, BasePaginator): class UserActivityEndpoint(BaseAPIView, BasePaginator):
def get(self, request): def get(self, request):
queryset = IssueActivity.objects.filter(actor=request.user).select_related( queryset = IssueActivity.objects.filter(
"actor", "workspace", "issue", "project" actor=request.user
) ).select_related("actor", "workspace", "issue", "project")
return self.paginate( return self.paginate(
request=request, request=request,
@ -158,4 +177,3 @@ class UserActivityEndpoint(BaseAPIView, BasePaginator):
issue_activities, many=True issue_activities, many=True
).data, ).data,
) )

View File

@ -24,7 +24,7 @@ from . import BaseViewSet, BaseAPIView
from plane.app.serializers import ( from plane.app.serializers import (
GlobalViewSerializer, GlobalViewSerializer,
IssueViewSerializer, IssueViewSerializer,
IssueLiteSerializer, IssueSerializer,
IssueViewFavoriteSerializer, IssueViewFavoriteSerializer,
) )
from plane.app.permissions import ( from plane.app.permissions import (
@ -42,6 +42,7 @@ from plane.db.models import (
IssueReaction, IssueReaction,
IssueLink, IssueLink,
IssueAttachment, IssueAttachment,
IssueSubscriber,
) )
from plane.utils.issue_filters import issue_filters from plane.utils.issue_filters import issue_filters
from plane.utils.grouper import group_results from plane.utils.grouper import group_results
@ -78,7 +79,9 @@ class GlobalViewIssuesViewSet(BaseViewSet):
def get_queryset(self): def get_queryset(self):
return ( return (
Issue.issue_objects.annotate( Issue.issue_objects.annotate(
sub_issues_count=Issue.issue_objects.filter(parent=OuterRef("id")) sub_issues_count=Issue.issue_objects.filter(
parent=OuterRef("id")
)
.order_by() .order_by()
.annotate(count=Func(F("id"), function="Count")) .annotate(count=Func(F("id"), function="Count"))
.values("count") .values("count")
@ -101,11 +104,21 @@ class GlobalViewIssuesViewSet(BaseViewSet):
@method_decorator(gzip_page) @method_decorator(gzip_page)
def list(self, request, slug): def list(self, request, slug):
filters = issue_filters(request.query_params, "GET") filters = issue_filters(request.query_params, "GET")
fields = [field for field in request.GET.get("fields", "").split(",") if field] fields = [
field
for field in request.GET.get("fields", "").split(",")
if field
]
# Custom ordering for priority and state # Custom ordering for priority and state
priority_order = ["urgent", "high", "medium", "low", "none"] priority_order = ["urgent", "high", "medium", "low", "none"]
state_order = ["backlog", "unstarted", "started", "completed", "cancelled"] state_order = [
"backlog",
"unstarted",
"started",
"completed",
"cancelled",
]
order_by_param = request.GET.get("order_by", "-created_at") order_by_param = request.GET.get("order_by", "-created_at")
@ -122,17 +135,36 @@ class GlobalViewIssuesViewSet(BaseViewSet):
.values("count") .values("count")
) )
.annotate( .annotate(
attachment_count=IssueAttachment.objects.filter(issue=OuterRef("id")) attachment_count=IssueAttachment.objects.filter(
issue=OuterRef("id")
)
.order_by() .order_by()
.annotate(count=Func(F("id"), function="Count")) .annotate(count=Func(F("id"), function="Count"))
.values("count") .values("count")
) )
.annotate(
sub_issues_count=Issue.issue_objects.filter(
parent=OuterRef("id")
)
.order_by()
.annotate(count=Func(F("id"), function="Count"))
.values("count")
)
.annotate(
is_subscribed=Exists(
IssueSubscriber.objects.filter(
subscriber=self.request.user, issue_id=OuterRef("id")
)
)
)
) )
# Priority Ordering # Priority Ordering
if order_by_param == "priority" or order_by_param == "-priority": if order_by_param == "priority" or order_by_param == "-priority":
priority_order = ( priority_order = (
priority_order if order_by_param == "priority" else priority_order[::-1] priority_order
if order_by_param == "priority"
else priority_order[::-1]
) )
issue_queryset = issue_queryset.annotate( issue_queryset = issue_queryset.annotate(
priority_order=Case( priority_order=Case(
@ -180,12 +212,14 @@ class GlobalViewIssuesViewSet(BaseViewSet):
else order_by_param else order_by_param
) )
).order_by( ).order_by(
"-max_values" if order_by_param.startswith("-") else "max_values" "-max_values"
if order_by_param.startswith("-")
else "max_values"
) )
else: else:
issue_queryset = issue_queryset.order_by(order_by_param) issue_queryset = issue_queryset.order_by(order_by_param)
serializer = IssueLiteSerializer( serializer = IssueSerializer(
issue_queryset, many=True, fields=fields if fields else None issue_queryset, many=True, fields=fields if fields else None
) )
return Response(serializer.data, status=status.HTTP_200_OK) return Response(serializer.data, status=status.HTTP_200_OK)
@ -223,7 +257,11 @@ class IssueViewViewSet(BaseViewSet):
def list(self, request, slug, project_id): def list(self, request, slug, project_id):
queryset = self.get_queryset() queryset = self.get_queryset()
fields = [field for field in request.GET.get("fields", "").split(",") if field] fields = [
field
for field in request.GET.get("fields", "").split(",")
if field
]
views = IssueViewSerializer( views = IssueViewSerializer(
queryset, many=True, fields=fields if fields else None queryset, many=True, fields=fields if fields else None
).data ).data

View File

@ -26,8 +26,12 @@ class WebhookEndpoint(BaseAPIView):
) )
if serializer.is_valid(): if serializer.is_valid():
serializer.save(workspace_id=workspace.id) serializer.save(workspace_id=workspace.id)
return Response(serializer.data, status=status.HTTP_201_CREATED) return Response(
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) serializer.data, status=status.HTTP_201_CREATED
)
return Response(
serializer.errors, status=status.HTTP_400_BAD_REQUEST
)
except IntegrityError as e: except IntegrityError as e:
if "already exists" in str(e): if "already exists" in str(e):
return Response( return Response(

View File

@ -66,7 +66,7 @@ from plane.db.models import (
WorkspaceMember, WorkspaceMember,
CycleIssue, CycleIssue,
IssueReaction, IssueReaction,
WorkspaceUserProperties WorkspaceUserProperties,
) )
from plane.app.permissions import ( from plane.app.permissions import (
WorkSpaceBasePermission, WorkSpaceBasePermission,
@ -116,7 +116,9 @@ class WorkSpaceViewSet(BaseViewSet):
.values("count") .values("count")
) )
return ( return (
self.filter_queryset(super().get_queryset().select_related("owner")) self.filter_queryset(
super().get_queryset().select_related("owner")
)
.order_by("name") .order_by("name")
.filter( .filter(
workspace_member__member=self.request.user, workspace_member__member=self.request.user,
@ -142,7 +144,9 @@ class WorkSpaceViewSet(BaseViewSet):
if len(name) > 80 or len(slug) > 48: if len(name) > 80 or len(slug) > 48:
return Response( return Response(
{"error": "The maximum length for name is 80 and for slug is 48"}, {
"error": "The maximum length for name is 80 and for slug is 48"
},
status=status.HTTP_400_BAD_REQUEST, status=status.HTTP_400_BAD_REQUEST,
) )
@ -155,7 +159,9 @@ class WorkSpaceViewSet(BaseViewSet):
role=20, role=20,
company_role=request.data.get("company_role", ""), company_role=request.data.get("company_role", ""),
) )
return Response(serializer.data, status=status.HTTP_201_CREATED) return Response(
serializer.data, status=status.HTTP_201_CREATED
)
return Response( return Response(
[serializer.errors[error][0] for error in serializer.errors], [serializer.errors[error][0] for error in serializer.errors],
status=status.HTTP_400_BAD_REQUEST, status=status.HTTP_400_BAD_REQUEST,
@ -178,7 +184,11 @@ class UserWorkSpacesEndpoint(BaseAPIView):
] ]
def get(self, request): def get(self, request):
fields = [field for field in request.GET.get("fields", "").split(",") if field] fields = [
field
for field in request.GET.get("fields", "").split(",")
if field
]
member_count = ( member_count = (
WorkspaceMember.objects.filter( WorkspaceMember.objects.filter(
workspace=OuterRef("id"), workspace=OuterRef("id"),
@ -210,7 +220,8 @@ class UserWorkSpacesEndpoint(BaseAPIView):
.annotate(total_members=member_count) .annotate(total_members=member_count)
.annotate(total_issues=issue_count) .annotate(total_issues=issue_count)
.filter( .filter(
workspace_member__member=request.user, workspace_member__is_active=True workspace_member__member=request.user,
workspace_member__is_active=True,
) )
.distinct() .distinct()
) )
@ -259,7 +270,8 @@ class WorkspaceInvitationsViewset(BaseViewSet):
# Check if email is provided # Check if email is provided
if not emails: if not emails:
return Response( return Response(
{"error": "Emails are required"}, status=status.HTTP_400_BAD_REQUEST {"error": "Emails are required"},
status=status.HTTP_400_BAD_REQUEST,
) )
# check for role level of the requesting user # check for role level of the requesting user
@ -586,7 +598,9 @@ class WorkSpaceMemberViewSet(BaseViewSet):
> requested_workspace_member.role > requested_workspace_member.role
): ):
return Response( return Response(
{"error": "You cannot update a role that is higher than your own role"}, {
"error": "You cannot update a role that is higher than your own role"
},
status=status.HTTP_400_BAD_REQUEST, status=status.HTTP_400_BAD_REQUEST,
) )
@ -625,7 +639,9 @@ class WorkSpaceMemberViewSet(BaseViewSet):
if requesting_workspace_member.role < workspace_member.role: if requesting_workspace_member.role < workspace_member.role:
return Response( return Response(
{"error": "You cannot remove a user having role higher than you"}, {
"error": "You cannot remove a user having role higher than you"
},
status=status.HTTP_400_BAD_REQUEST, status=status.HTTP_400_BAD_REQUEST,
) )
@ -729,11 +745,15 @@ class WorkspaceProjectMemberEndpoint(BaseAPIView):
def get(self, request, slug): def get(self, request, slug):
# Fetch all project IDs where the user is involved # Fetch all project IDs where the user is involved
project_ids = ProjectMember.objects.filter( project_ids = (
ProjectMember.objects.filter(
member=request.user, member=request.user,
member__is_bot=False, member__is_bot=False,
is_active=True, is_active=True,
).values_list('project_id', flat=True).distinct() )
.values_list("project_id", flat=True)
.distinct()
)
# Get all the project members in which the user is involved # Get all the project members in which the user is involved
project_members = ProjectMember.objects.filter( project_members = ProjectMember.objects.filter(
@ -742,7 +762,9 @@ class WorkspaceProjectMemberEndpoint(BaseAPIView):
project_id__in=project_ids, project_id__in=project_ids,
is_active=True, is_active=True,
).select_related("project", "member", "workspace") ).select_related("project", "member", "workspace")
project_members = ProjectMemberRoleSerializer(project_members, many=True).data project_members = ProjectMemberRoleSerializer(
project_members, many=True
).data
project_members_dict = dict() project_members_dict = dict()
@ -790,7 +812,9 @@ class TeamMemberViewSet(BaseViewSet):
) )
if len(members) != len(request.data.get("members", [])): if len(members) != len(request.data.get("members", [])):
users = list(set(request.data.get("members", [])).difference(members)) users = list(
set(request.data.get("members", [])).difference(members)
)
users = User.objects.filter(pk__in=users) users = User.objects.filter(pk__in=users)
serializer = UserLiteSerializer(users, many=True) serializer = UserLiteSerializer(users, many=True)
@ -804,7 +828,9 @@ class TeamMemberViewSet(BaseViewSet):
workspace = Workspace.objects.get(slug=slug) workspace = Workspace.objects.get(slug=slug)
serializer = TeamSerializer(data=request.data, context={"workspace": workspace}) serializer = TeamSerializer(
data=request.data, context={"workspace": workspace}
)
if serializer.is_valid(): if serializer.is_valid():
serializer.save() serializer.save()
return Response(serializer.data, status=status.HTTP_201_CREATED) return Response(serializer.data, status=status.HTTP_201_CREATED)
@ -833,7 +859,9 @@ class UserLastProjectWithWorkspaceEndpoint(BaseAPIView):
workspace_id=last_workspace_id, member=request.user workspace_id=last_workspace_id, member=request.user
).select_related("workspace", "project", "member", "workspace__owner") ).select_related("workspace", "project", "member", "workspace__owner")
project_member_serializer = ProjectMemberSerializer(project_member, many=True) project_member_serializer = ProjectMemberSerializer(
project_member, many=True
)
return Response( return Response(
{ {
@ -1017,7 +1045,11 @@ class WorkspaceThemeViewSet(BaseViewSet):
serializer_class = WorkspaceThemeSerializer serializer_class = WorkspaceThemeSerializer
def get_queryset(self): def get_queryset(self):
return super().get_queryset().filter(workspace__slug=self.kwargs.get("slug")) return (
super()
.get_queryset()
.filter(workspace__slug=self.kwargs.get("slug"))
)
def create(self, request, slug): def create(self, request, slug):
workspace = Workspace.objects.get(slug=slug) workspace = Workspace.objects.get(slug=slug)
@ -1280,12 +1312,22 @@ class WorkspaceUserProfileIssuesEndpoint(BaseAPIView):
] ]
def get(self, request, slug, user_id): def get(self, request, slug, user_id):
fields = [field for field in request.GET.get("fields", "").split(",") if field] fields = [
field
for field in request.GET.get("fields", "").split(",")
if field
]
filters = issue_filters(request.query_params, "GET") filters = issue_filters(request.query_params, "GET")
# Custom ordering for priority and state # Custom ordering for priority and state
priority_order = ["urgent", "high", "medium", "low", "none"] priority_order = ["urgent", "high", "medium", "low", "none"]
state_order = ["backlog", "unstarted", "started", "completed", "cancelled"] state_order = [
"backlog",
"unstarted",
"started",
"completed",
"cancelled",
]
order_by_param = request.GET.get("order_by", "-created_at") order_by_param = request.GET.get("order_by", "-created_at")
issue_queryset = ( issue_queryset = (
@ -1298,7 +1340,9 @@ class WorkspaceUserProfileIssuesEndpoint(BaseAPIView):
) )
.filter(**filters) .filter(**filters)
.annotate( .annotate(
sub_issues_count=Issue.issue_objects.filter(parent=OuterRef("id")) sub_issues_count=Issue.issue_objects.filter(
parent=OuterRef("id")
)
.order_by() .order_by()
.annotate(count=Func(F("id"), function="Count")) .annotate(count=Func(F("id"), function="Count"))
.values("count") .values("count")
@ -1319,7 +1363,9 @@ class WorkspaceUserProfileIssuesEndpoint(BaseAPIView):
.values("count") .values("count")
) )
.annotate( .annotate(
attachment_count=IssueAttachment.objects.filter(issue=OuterRef("id")) attachment_count=IssueAttachment.objects.filter(
issue=OuterRef("id")
)
.order_by() .order_by()
.annotate(count=Func(F("id"), function="Count")) .annotate(count=Func(F("id"), function="Count"))
.values("count") .values("count")
@ -1329,7 +1375,9 @@ class WorkspaceUserProfileIssuesEndpoint(BaseAPIView):
# Priority Ordering # Priority Ordering
if order_by_param == "priority" or order_by_param == "-priority": if order_by_param == "priority" or order_by_param == "-priority":
priority_order = ( priority_order = (
priority_order if order_by_param == "priority" else priority_order[::-1] priority_order
if order_by_param == "priority"
else priority_order[::-1]
) )
issue_queryset = issue_queryset.annotate( issue_queryset = issue_queryset.annotate(
priority_order=Case( priority_order=Case(
@ -1377,7 +1425,9 @@ class WorkspaceUserProfileIssuesEndpoint(BaseAPIView):
else order_by_param else order_by_param
) )
).order_by( ).order_by(
"-max_values" if order_by_param.startswith("-") else "max_values" "-max_values"
if order_by_param.startswith("-")
else "max_values"
) )
else: else:
issue_queryset = issue_queryset.order_by(order_by_param) issue_queryset = issue_queryset.order_by(order_by_param)
@ -1397,7 +1447,9 @@ class WorkspaceLabelsEndpoint(BaseAPIView):
labels = Label.objects.filter( labels = Label.objects.filter(
workspace__slug=slug, workspace__slug=slug,
project__project_projectmember__member=request.user, project__project_projectmember__member=request.user,
).values("parent", "name", "color", "id", "project_id", "workspace__slug") ).values(
"parent", "name", "color", "id", "project_id", "workspace__slug"
)
return Response(labels, status=status.HTTP_200_OK) return Response(labels, status=status.HTTP_200_OK)
@ -1412,16 +1464,25 @@ class WorkspaceUserPropertiesEndpoint(BaseAPIView):
workspace__slug=slug, workspace__slug=slug,
) )
workspace_properties.filters = request.data.get("filters", workspace_properties.filters) workspace_properties.filters = request.data.get(
workspace_properties.display_filters = request.data.get("display_filters", workspace_properties.display_filters) "filters", workspace_properties.filters
workspace_properties.display_properties = request.data.get("display_properties", workspace_properties.display_properties) )
workspace_properties.display_filters = request.data.get(
"display_filters", workspace_properties.display_filters
)
workspace_properties.display_properties = request.data.get(
"display_properties", workspace_properties.display_properties
)
workspace_properties.save() workspace_properties.save()
serializer = WorkspaceUserPropertiesSerializer(workspace_properties) serializer = WorkspaceUserPropertiesSerializer(workspace_properties)
return Response(serializer.data, status=status.HTTP_201_CREATED) return Response(serializer.data, status=status.HTTP_201_CREATED)
def get(self, request, slug): def get(self, request, slug):
workspace_properties, _ = WorkspaceUserProperties.objects.get_or_create( (
workspace_properties,
_,
) = WorkspaceUserProperties.objects.get_or_create(
user=request.user, workspace__slug=slug user=request.user, workspace__slug=slug
) )
serializer = WorkspaceUserPropertiesSerializer(workspace_properties) serializer = WorkspaceUserPropertiesSerializer(workspace_properties)

View File

@ -101,7 +101,9 @@ def get_assignee_details(slug, filters):
def get_label_details(slug, filters): def get_label_details(slug, filters):
"""Fetch label details if required""" """Fetch label details if required"""
return ( return (
Issue.objects.filter(workspace__slug=slug, **filters, labels__id__isnull=False) Issue.objects.filter(
workspace__slug=slug, **filters, labels__id__isnull=False
)
.distinct("labels__id") .distinct("labels__id")
.order_by("labels__id") .order_by("labels__id")
.values("labels__id", "labels__color", "labels__name") .values("labels__id", "labels__color", "labels__name")
@ -174,7 +176,9 @@ def generate_segmented_rows(
): ):
segment_zero = list( segment_zero = list(
set( set(
item.get("segment") for sublist in distribution.values() for item in sublist item.get("segment")
for sublist in distribution.values()
for item in sublist
) )
) )
@ -193,7 +197,9 @@ def generate_segmented_rows(
] ]
for segment in segment_zero: for segment in segment_zero:
value = next((x.get(key) for x in data if x.get("segment") == segment), "0") value = next(
(x.get(key) for x in data if x.get("segment") == segment), "0"
)
generated_row.append(value) generated_row.append(value)
if x_axis == ASSIGNEE_ID: if x_axis == ASSIGNEE_ID:
@ -212,7 +218,11 @@ def generate_segmented_rows(
if x_axis == LABEL_ID: if x_axis == LABEL_ID:
label = next( label = next(
(lab for lab in label_details if str(lab[LABEL_ID]) == str(item)), (
lab
for lab in label_details
if str(lab[LABEL_ID]) == str(item)
),
None, None,
) )
@ -221,7 +231,11 @@ def generate_segmented_rows(
if x_axis == STATE_ID: if x_axis == STATE_ID:
state = next( state = next(
(sta for sta in state_details if str(sta[STATE_ID]) == str(item)), (
sta
for sta in state_details
if str(sta[STATE_ID]) == str(item)
),
None, None,
) )
@ -230,7 +244,11 @@ def generate_segmented_rows(
if x_axis == CYCLE_ID: if x_axis == CYCLE_ID:
cycle = next( cycle = next(
(cyc for cyc in cycle_details if str(cyc[CYCLE_ID]) == str(item)), (
cyc
for cyc in cycle_details
if str(cyc[CYCLE_ID]) == str(item)
),
None, None,
) )
@ -239,7 +257,11 @@ def generate_segmented_rows(
if x_axis == MODULE_ID: if x_axis == MODULE_ID:
module = next( module = next(
(mod for mod in module_details if str(mod[MODULE_ID]) == str(item)), (
mod
for mod in module_details
if str(mod[MODULE_ID]) == str(item)
),
None, None,
) )
@ -266,7 +288,11 @@ def generate_segmented_rows(
if segmented == LABEL_ID: if segmented == LABEL_ID:
for index, segm in enumerate(row_zero[2:]): for index, segm in enumerate(row_zero[2:]):
label = next( label = next(
(lab for lab in label_details if str(lab[LABEL_ID]) == str(segm)), (
lab
for lab in label_details
if str(lab[LABEL_ID]) == str(segm)
),
None, None,
) )
if label: if label:
@ -275,7 +301,11 @@ def generate_segmented_rows(
if segmented == STATE_ID: if segmented == STATE_ID:
for index, segm in enumerate(row_zero[2:]): for index, segm in enumerate(row_zero[2:]):
state = next( state = next(
(sta for sta in state_details if str(sta[STATE_ID]) == str(segm)), (
sta
for sta in state_details
if str(sta[STATE_ID]) == str(segm)
),
None, None,
) )
if state: if state:
@ -284,7 +314,11 @@ def generate_segmented_rows(
if segmented == MODULE_ID: if segmented == MODULE_ID:
for index, segm in enumerate(row_zero[2:]): for index, segm in enumerate(row_zero[2:]):
module = next( module = next(
(mod for mod in label_details if str(mod[MODULE_ID]) == str(segm)), (
mod
for mod in label_details
if str(mod[MODULE_ID]) == str(segm)
),
None, None,
) )
if module: if module:
@ -293,7 +327,11 @@ def generate_segmented_rows(
if segmented == CYCLE_ID: if segmented == CYCLE_ID:
for index, segm in enumerate(row_zero[2:]): for index, segm in enumerate(row_zero[2:]):
cycle = next( cycle = next(
(cyc for cyc in cycle_details if str(cyc[CYCLE_ID]) == str(segm)), (
cyc
for cyc in cycle_details
if str(cyc[CYCLE_ID]) == str(segm)
),
None, None,
) )
if cycle: if cycle:
@ -315,7 +353,10 @@ def generate_non_segmented_rows(
): ):
rows = [] rows = []
for item, data in distribution.items(): for item, data in distribution.items():
row = [item, data[0].get("count" if y_axis == "issue_count" else "estimate")] row = [
item,
data[0].get("count" if y_axis == "issue_count" else "estimate"),
]
if x_axis == ASSIGNEE_ID: if x_axis == ASSIGNEE_ID:
assignee = next( assignee = next(
@ -333,7 +374,11 @@ def generate_non_segmented_rows(
if x_axis == LABEL_ID: if x_axis == LABEL_ID:
label = next( label = next(
(lab for lab in label_details if str(lab[LABEL_ID]) == str(item)), (
lab
for lab in label_details
if str(lab[LABEL_ID]) == str(item)
),
None, None,
) )
@ -342,7 +387,11 @@ def generate_non_segmented_rows(
if x_axis == STATE_ID: if x_axis == STATE_ID:
state = next( state = next(
(sta for sta in state_details if str(sta[STATE_ID]) == str(item)), (
sta
for sta in state_details
if str(sta[STATE_ID]) == str(item)
),
None, None,
) )
@ -351,7 +400,11 @@ def generate_non_segmented_rows(
if x_axis == CYCLE_ID: if x_axis == CYCLE_ID:
cycle = next( cycle = next(
(cyc for cyc in cycle_details if str(cyc[CYCLE_ID]) == str(item)), (
cyc
for cyc in cycle_details
if str(cyc[CYCLE_ID]) == str(item)
),
None, None,
) )
@ -360,7 +413,11 @@ def generate_non_segmented_rows(
if x_axis == MODULE_ID: if x_axis == MODULE_ID:
module = next( module = next(
(mod for mod in module_details if str(mod[MODULE_ID]) == str(item)), (
mod
for mod in module_details
if str(mod[MODULE_ID]) == str(item)
),
None, None,
) )
@ -369,7 +426,10 @@ def generate_non_segmented_rows(
rows.append(tuple(row)) rows.append(tuple(row))
row_zero = [row_mapping.get(x_axis, "X-Axis"), row_mapping.get(y_axis, "Y-Axis")] row_zero = [
row_mapping.get(x_axis, "X-Axis"),
row_mapping.get(y_axis, "Y-Axis"),
]
return [tuple(row_zero)] + rows return [tuple(row_zero)] + rows

View File

@ -2,4 +2,4 @@ from django.apps import AppConfig
class BgtasksConfig(AppConfig): class BgtasksConfig(AppConfig):
name = 'plane.bgtasks' name = "plane.bgtasks"

View File

@ -47,15 +47,17 @@ def auth_events(user, email, user_agent, ip, event_name, medium, first_time):
"user_agent": user_agent, "user_agent": user_agent,
}, },
"medium": medium, "medium": medium,
"first_time": first_time "first_time": first_time,
} },
) )
except Exception as e: except Exception as e:
capture_exception(e) capture_exception(e)
@shared_task @shared_task
def workspace_invite_event(user, email, user_agent, ip, event_name, accepted_from): def workspace_invite_event(
user, email, user_agent, ip, event_name, accepted_from
):
try: try:
POSTHOG_API_KEY, POSTHOG_HOST = posthogConfiguration() POSTHOG_API_KEY, POSTHOG_HOST = posthogConfiguration()
@ -71,8 +73,8 @@ def workspace_invite_event(user, email, user_agent, ip, event_name, accepted_fro
"ip": ip, "ip": ip,
"user_agent": user_agent, "user_agent": user_agent,
}, },
"accepted_from": accepted_from "accepted_from": accepted_from,
} },
) )
except Exception as e: except Exception as e:
capture_exception(e) capture_exception(e)

View File

@ -68,7 +68,9 @@ def create_zip_file(files):
def upload_to_s3(zip_file, workspace_id, token_id, slug): def upload_to_s3(zip_file, workspace_id, token_id, slug):
file_name = f"{workspace_id}/export-{slug}-{token_id[:6]}-{timezone.now()}.zip" file_name = (
f"{workspace_id}/export-{slug}-{token_id[:6]}-{timezone.now()}.zip"
)
expires_in = 7 * 24 * 60 * 60 expires_in = 7 * 24 * 60 * 60
if settings.USE_MINIO: if settings.USE_MINIO:
@ -87,7 +89,10 @@ def upload_to_s3(zip_file, workspace_id, token_id, slug):
) )
presigned_url = s3.generate_presigned_url( presigned_url = s3.generate_presigned_url(
"get_object", "get_object",
Params={"Bucket": settings.AWS_STORAGE_BUCKET_NAME, "Key": file_name}, Params={
"Bucket": settings.AWS_STORAGE_BUCKET_NAME,
"Key": file_name,
},
ExpiresIn=expires_in, ExpiresIn=expires_in,
) )
# Create the new url with updated domain and protocol # Create the new url with updated domain and protocol
@ -112,7 +117,10 @@ def upload_to_s3(zip_file, workspace_id, token_id, slug):
presigned_url = s3.generate_presigned_url( presigned_url = s3.generate_presigned_url(
"get_object", "get_object",
Params={"Bucket": settings.AWS_STORAGE_BUCKET_NAME, "Key": file_name}, Params={
"Bucket": settings.AWS_STORAGE_BUCKET_NAME,
"Key": file_name,
},
ExpiresIn=expires_in, ExpiresIn=expires_in,
) )
@ -172,11 +180,17 @@ def generate_json_row(issue):
else "", else "",
"Labels": issue["labels__name"], "Labels": issue["labels__name"],
"Cycle Name": issue["issue_cycle__cycle__name"], "Cycle Name": issue["issue_cycle__cycle__name"],
"Cycle Start Date": dateConverter(issue["issue_cycle__cycle__start_date"]), "Cycle Start Date": dateConverter(
issue["issue_cycle__cycle__start_date"]
),
"Cycle End Date": dateConverter(issue["issue_cycle__cycle__end_date"]), "Cycle End Date": dateConverter(issue["issue_cycle__cycle__end_date"]),
"Module Name": issue["issue_module__module__name"], "Module Name": issue["issue_module__module__name"],
"Module Start Date": dateConverter(issue["issue_module__module__start_date"]), "Module Start Date": dateConverter(
"Module Target Date": dateConverter(issue["issue_module__module__target_date"]), issue["issue_module__module__start_date"]
),
"Module Target Date": dateConverter(
issue["issue_module__module__target_date"]
),
"Created At": dateTimeConverter(issue["created_at"]), "Created At": dateTimeConverter(issue["created_at"]),
"Updated At": dateTimeConverter(issue["updated_at"]), "Updated At": dateTimeConverter(issue["updated_at"]),
"Completed At": dateTimeConverter(issue["completed_at"]), "Completed At": dateTimeConverter(issue["completed_at"]),
@ -211,7 +225,11 @@ def update_json_row(rows, row):
def update_table_row(rows, row): def update_table_row(rows, row):
matched_index = next( matched_index = next(
(index for index, existing_row in enumerate(rows) if existing_row[0] == row[0]), (
index
for index, existing_row in enumerate(rows)
if existing_row[0] == row[0]
),
None, None,
) )
@ -260,7 +278,9 @@ def generate_xlsx(header, project_id, issues, files):
@shared_task @shared_task
def issue_export_task(provider, workspace_id, project_ids, token_id, multiple, slug): def issue_export_task(
provider, workspace_id, project_ids, token_id, multiple, slug
):
try: try:
exporter_instance = ExporterHistory.objects.get(token=token_id) exporter_instance = ExporterHistory.objects.get(token=token_id)
exporter_instance.status = "processing" exporter_instance.status = "processing"
@ -273,9 +293,14 @@ def issue_export_task(provider, workspace_id, project_ids, token_id, multiple, s
project_id__in=project_ids, project_id__in=project_ids,
project__project_projectmember__member=exporter_instance.initiated_by_id, project__project_projectmember__member=exporter_instance.initiated_by_id,
) )
.select_related("project", "workspace", "state", "parent", "created_by") .select_related(
"project", "workspace", "state", "parent", "created_by"
)
.prefetch_related( .prefetch_related(
"assignees", "labels", "issue_cycle__cycle", "issue_module__module" "assignees",
"labels",
"issue_cycle__cycle",
"issue_module__module",
) )
.values( .values(
"id", "id",

View File

@ -19,7 +19,8 @@ from plane.db.models import ExporterHistory
def delete_old_s3_link(): def delete_old_s3_link():
# Get a list of keys and IDs to process # Get a list of keys and IDs to process
expired_exporter_history = ExporterHistory.objects.filter( expired_exporter_history = ExporterHistory.objects.filter(
Q(url__isnull=False) & Q(created_at__lte=timezone.now() - timedelta(days=8)) Q(url__isnull=False)
& Q(created_at__lte=timezone.now() - timedelta(days=8))
).values_list("key", "id") ).values_list("key", "id")
if settings.USE_MINIO: if settings.USE_MINIO:
s3 = boto3.client( s3 = boto3.client(
@ -42,8 +43,12 @@ def delete_old_s3_link():
# Delete object from S3 # Delete object from S3
if file_name: if file_name:
if settings.USE_MINIO: if settings.USE_MINIO:
s3.delete_object(Bucket=settings.AWS_STORAGE_BUCKET_NAME, Key=file_name) s3.delete_object(
Bucket=settings.AWS_STORAGE_BUCKET_NAME, Key=file_name
)
else: else:
s3.delete_object(Bucket=settings.AWS_STORAGE_BUCKET_NAME, Key=file_name) s3.delete_object(
Bucket=settings.AWS_STORAGE_BUCKET_NAME, Key=file_name
)
ExporterHistory.objects.filter(id=exporter_id).update(url=None) ExporterHistory.objects.filter(id=exporter_id).update(url=None)

View File

@ -14,10 +14,10 @@ from plane.db.models import FileAsset
@shared_task @shared_task
def delete_file_asset(): def delete_file_asset():
# file assets to delete # file assets to delete
file_assets_to_delete = FileAsset.objects.filter( file_assets_to_delete = FileAsset.objects.filter(
Q(is_deleted=True) & Q(updated_at__lte=timezone.now() - timedelta(days=7)) Q(is_deleted=True)
& Q(updated_at__lte=timezone.now() - timedelta(days=7))
) )
# Delete the file from storage and the file object from the database # Delete the file from storage and the file object from the database
@ -26,4 +26,3 @@ def delete_file_asset():
file_asset.asset.delete(save=False) file_asset.asset.delete(save=False)
# Delete the file object # Delete the file object
file_asset.delete() file_asset.delete()

View File

@ -42,7 +42,9 @@ def forgot_password(first_name, email, uidb64, token, current_site):
"email": email, "email": email,
} }
html_content = render_to_string("emails/auth/forgot_password.html", context) html_content = render_to_string(
"emails/auth/forgot_password.html", context
)
text_content = strip_tags(html_content) text_content = strip_tags(html_content)

View File

@ -25,7 +25,6 @@ from plane.db.models import (
User, User,
IssueProperty, IssueProperty,
) )
from plane.bgtasks.user_welcome_task import send_welcome_slack
@shared_task @shared_task
@ -55,15 +54,6 @@ def service_importer(service, importer_id):
ignore_conflicts=True, ignore_conflicts=True,
) )
_ = [
send_welcome_slack.delay(
str(user.id),
True,
f"{user.email} was imported to Plane from {service}",
)
for user in new_users
]
workspace_users = User.objects.filter( workspace_users = User.objects.filter(
email__in=[ email__in=[
user.get("email").strip().lower() user.get("email").strip().lower()
@ -130,12 +120,17 @@ def service_importer(service, importer_id):
repository_id = importer.metadata.get("repository_id", False) repository_id = importer.metadata.get("repository_id", False)
workspace_integration = WorkspaceIntegration.objects.get( workspace_integration = WorkspaceIntegration.objects.get(
workspace_id=importer.workspace_id, integration__provider="github" workspace_id=importer.workspace_id,
integration__provider="github",
) )
# Delete the old repository object # Delete the old repository object
GithubRepositorySync.objects.filter(project_id=importer.project_id).delete() GithubRepositorySync.objects.filter(
GithubRepository.objects.filter(project_id=importer.project_id).delete() project_id=importer.project_id
).delete()
GithubRepository.objects.filter(
project_id=importer.project_id
).delete()
# Create a Label for github # Create a Label for github
label = Label.objects.filter( label = Label.objects.filter(

View File

@ -138,8 +138,12 @@ def track_parent(
project_id=project_id, project_id=project_id,
workspace_id=workspace_id, workspace_id=workspace_id,
comment=f"updated the parent issue to", comment=f"updated the parent issue to",
old_identifier=old_parent.id if old_parent is not None else None, old_identifier=old_parent.id
new_identifier=new_parent.id if new_parent is not None else None, if old_parent is not None
else None,
new_identifier=new_parent.id
if new_parent is not None
else None,
epoch=epoch, epoch=epoch,
) )
) )
@ -217,7 +221,9 @@ def track_target_date(
issue_activities, issue_activities,
epoch, epoch,
): ):
if current_instance.get("target_date") != requested_data.get("target_date"): if current_instance.get("target_date") != requested_data.get(
"target_date"
):
issue_activities.append( issue_activities.append(
IssueActivity( IssueActivity(
issue_id=issue_id, issue_id=issue_id,
@ -281,8 +287,12 @@ def track_labels(
issue_activities, issue_activities,
epoch, epoch,
): ):
requested_labels = set([str(lab) for lab in requested_data.get("labels", [])]) requested_labels = set(
current_labels = set([str(lab) for lab in current_instance.get("labels", [])]) [str(lab) for lab in requested_data.get("labels", [])]
)
current_labels = set(
[str(lab) for lab in current_instance.get("labels", [])]
)
added_labels = requested_labels - current_labels added_labels = requested_labels - current_labels
dropped_labels = current_labels - requested_labels dropped_labels = current_labels - requested_labels
@ -339,8 +349,12 @@ def track_assignees(
issue_activities, issue_activities,
epoch, epoch,
): ):
requested_assignees = set([str(asg) for asg in requested_data.get("assignees", [])]) requested_assignees = set(
current_assignees = set([str(asg) for asg in current_instance.get("assignees", [])]) [str(asg) for asg in requested_data.get("assignees", [])]
)
current_assignees = set(
[str(asg) for asg in current_instance.get("assignees", [])]
)
added_assignees = requested_assignees - current_assignees added_assignees = requested_assignees - current_assignees
dropped_assginees = current_assignees - requested_assignees dropped_assginees = current_assignees - requested_assignees
@ -392,7 +406,9 @@ def track_estimate_points(
issue_activities, issue_activities,
epoch, epoch,
): ):
if current_instance.get("estimate_point") != requested_data.get("estimate_point"): if current_instance.get("estimate_point") != requested_data.get(
"estimate_point"
):
issue_activities.append( issue_activities.append(
IssueActivity( IssueActivity(
issue_id=issue_id, issue_id=issue_id,
@ -423,7 +439,9 @@ def track_archive_at(
issue_activities, issue_activities,
epoch, epoch,
): ):
if current_instance.get("archived_at") != requested_data.get("archived_at"): if current_instance.get("archived_at") != requested_data.get(
"archived_at"
):
if requested_data.get("archived_at") is None: if requested_data.get("archived_at") is None:
issue_activities.append( issue_activities.append(
IssueActivity( IssueActivity(
@ -536,7 +554,9 @@ def update_issue_activity(
"closed_to": track_closed_to, "closed_to": track_closed_to,
} }
requested_data = json.loads(requested_data) if requested_data is not None else None requested_data = (
json.loads(requested_data) if requested_data is not None else None
)
current_instance = ( current_instance = (
json.loads(current_instance) if current_instance is not None else None json.loads(current_instance) if current_instance is not None else None
) )
@ -589,7 +609,9 @@ def create_comment_activity(
issue_activities, issue_activities,
epoch, epoch,
): ):
requested_data = json.loads(requested_data) if requested_data is not None else None requested_data = (
json.loads(requested_data) if requested_data is not None else None
)
current_instance = ( current_instance = (
json.loads(current_instance) if current_instance is not None else None json.loads(current_instance) if current_instance is not None else None
) )
@ -621,12 +643,16 @@ def update_comment_activity(
issue_activities, issue_activities,
epoch, epoch,
): ):
requested_data = json.loads(requested_data) if requested_data is not None else None requested_data = (
json.loads(requested_data) if requested_data is not None else None
)
current_instance = ( current_instance = (
json.loads(current_instance) if current_instance is not None else None json.loads(current_instance) if current_instance is not None else None
) )
if current_instance.get("comment_html") != requested_data.get("comment_html"): if current_instance.get("comment_html") != requested_data.get(
"comment_html"
):
issue_activities.append( issue_activities.append(
IssueActivity( IssueActivity(
issue_id=issue_id, issue_id=issue_id,
@ -680,14 +706,18 @@ def create_cycle_issue_activity(
issue_activities, issue_activities,
epoch, epoch,
): ):
requested_data = json.loads(requested_data) if requested_data is not None else None requested_data = (
json.loads(requested_data) if requested_data is not None else None
)
current_instance = ( current_instance = (
json.loads(current_instance) if current_instance is not None else None json.loads(current_instance) if current_instance is not None else None
) )
# Updated Records: # Updated Records:
updated_records = current_instance.get("updated_cycle_issues", []) updated_records = current_instance.get("updated_cycle_issues", [])
created_records = json.loads(current_instance.get("created_cycle_issues", [])) created_records = json.loads(
current_instance.get("created_cycle_issues", [])
)
for updated_record in updated_records: for updated_record in updated_records:
old_cycle = Cycle.objects.filter( old_cycle = Cycle.objects.filter(
@ -756,7 +786,9 @@ def delete_cycle_issue_activity(
issue_activities, issue_activities,
epoch, epoch,
): ):
requested_data = json.loads(requested_data) if requested_data is not None else None requested_data = (
json.loads(requested_data) if requested_data is not None else None
)
current_instance = ( current_instance = (
json.loads(current_instance) if current_instance is not None else None json.loads(current_instance) if current_instance is not None else None
) )
@ -798,14 +830,18 @@ def create_module_issue_activity(
issue_activities, issue_activities,
epoch, epoch,
): ):
requested_data = json.loads(requested_data) if requested_data is not None else None requested_data = (
json.loads(requested_data) if requested_data is not None else None
)
current_instance = ( current_instance = (
json.loads(current_instance) if current_instance is not None else None json.loads(current_instance) if current_instance is not None else None
) )
# Updated Records: # Updated Records:
updated_records = current_instance.get("updated_module_issues", []) updated_records = current_instance.get("updated_module_issues", [])
created_records = json.loads(current_instance.get("created_module_issues", [])) created_records = json.loads(
current_instance.get("created_module_issues", [])
)
for updated_record in updated_records: for updated_record in updated_records:
old_module = Module.objects.filter( old_module = Module.objects.filter(
@ -873,7 +909,9 @@ def delete_module_issue_activity(
issue_activities, issue_activities,
epoch, epoch,
): ):
requested_data = json.loads(requested_data) if requested_data is not None else None requested_data = (
json.loads(requested_data) if requested_data is not None else None
)
current_instance = ( current_instance = (
json.loads(current_instance) if current_instance is not None else None json.loads(current_instance) if current_instance is not None else None
) )
@ -915,7 +953,9 @@ def create_link_activity(
issue_activities, issue_activities,
epoch, epoch,
): ):
requested_data = json.loads(requested_data) if requested_data is not None else None requested_data = (
json.loads(requested_data) if requested_data is not None else None
)
current_instance = ( current_instance = (
json.loads(current_instance) if current_instance is not None else None json.loads(current_instance) if current_instance is not None else None
) )
@ -946,7 +986,9 @@ def update_link_activity(
issue_activities, issue_activities,
epoch, epoch,
): ):
requested_data = json.loads(requested_data) if requested_data is not None else None requested_data = (
json.loads(requested_data) if requested_data is not None else None
)
current_instance = ( current_instance = (
json.loads(current_instance) if current_instance is not None else None json.loads(current_instance) if current_instance is not None else None
) )
@ -1010,7 +1052,9 @@ def create_attachment_activity(
issue_activities, issue_activities,
epoch, epoch,
): ):
requested_data = json.loads(requested_data) if requested_data is not None else None requested_data = (
json.loads(requested_data) if requested_data is not None else None
)
current_instance = ( current_instance = (
json.loads(current_instance) if current_instance is not None else None json.loads(current_instance) if current_instance is not None else None
) )
@ -1065,7 +1109,9 @@ def create_issue_reaction_activity(
issue_activities, issue_activities,
epoch, epoch,
): ):
requested_data = json.loads(requested_data) if requested_data is not None else None requested_data = (
json.loads(requested_data) if requested_data is not None else None
)
if requested_data and requested_data.get("reaction") is not None: if requested_data and requested_data.get("reaction") is not None:
issue_reaction = ( issue_reaction = (
IssueReaction.objects.filter( IssueReaction.objects.filter(
@ -1137,7 +1183,9 @@ def create_comment_reaction_activity(
issue_activities, issue_activities,
epoch, epoch,
): ):
requested_data = json.loads(requested_data) if requested_data is not None else None requested_data = (
json.loads(requested_data) if requested_data is not None else None
)
if requested_data and requested_data.get("reaction") is not None: if requested_data and requested_data.get("reaction") is not None:
comment_reaction_id, comment_id = ( comment_reaction_id, comment_id = (
CommentReaction.objects.filter( CommentReaction.objects.filter(
@ -1148,7 +1196,9 @@ def create_comment_reaction_activity(
.values_list("id", "comment__id") .values_list("id", "comment__id")
.first() .first()
) )
comment = IssueComment.objects.get(pk=comment_id, project_id=project_id) comment = IssueComment.objects.get(
pk=comment_id, project_id=project_id
)
if ( if (
comment is not None comment is not None
and comment_reaction_id is not None and comment_reaction_id is not None
@ -1222,7 +1272,9 @@ def create_issue_vote_activity(
issue_activities, issue_activities,
epoch, epoch,
): ):
requested_data = json.loads(requested_data) if requested_data is not None else None requested_data = (
json.loads(requested_data) if requested_data is not None else None
)
if requested_data and requested_data.get("vote") is not None: if requested_data and requested_data.get("vote") is not None:
issue_activities.append( issue_activities.append(
IssueActivity( IssueActivity(
@ -1284,7 +1336,9 @@ def create_issue_relation_activity(
issue_activities, issue_activities,
epoch, epoch,
): ):
requested_data = json.loads(requested_data) if requested_data is not None else None requested_data = (
json.loads(requested_data) if requested_data is not None else None
)
current_instance = ( current_instance = (
json.loads(current_instance) if current_instance is not None else None json.loads(current_instance) if current_instance is not None else None
) )
@ -1339,7 +1393,9 @@ def delete_issue_relation_activity(
issue_activities, issue_activities,
epoch, epoch,
): ):
requested_data = json.loads(requested_data) if requested_data is not None else None requested_data = (
json.loads(requested_data) if requested_data is not None else None
)
current_instance = ( current_instance = (
json.loads(current_instance) if current_instance is not None else None json.loads(current_instance) if current_instance is not None else None
) )
@ -1382,6 +1438,7 @@ def delete_issue_relation_activity(
) )
) )
def create_draft_issue_activity( def create_draft_issue_activity(
requested_data, requested_data,
current_instance, current_instance,
@ -1416,7 +1473,9 @@ def update_draft_issue_activity(
issue_activities, issue_activities,
epoch, epoch,
): ):
requested_data = json.loads(requested_data) if requested_data is not None else None requested_data = (
json.loads(requested_data) if requested_data is not None else None
)
current_instance = ( current_instance = (
json.loads(current_instance) if current_instance is not None else None json.loads(current_instance) if current_instance is not None else None
) )
@ -1543,7 +1602,9 @@ def issue_activity(
) )
# Save all the values to database # Save all the values to database
issue_activities_created = IssueActivity.objects.bulk_create(issue_activities) issue_activities_created = IssueActivity.objects.bulk_create(
issue_activities
)
# Post the updates to segway for integrations and webhooks # Post the updates to segway for integrations and webhooks
if len(issue_activities_created): if len(issue_activities_created):
# Don't send activities if the actor is a bot # Don't send activities if the actor is a bot
@ -1570,7 +1631,9 @@ def issue_activity(
project_id=project_id, project_id=project_id,
subscriber=subscriber, subscriber=subscriber,
issue_activities_created=json.dumps( issue_activities_created=json.dumps(
IssueActivitySerializer(issue_activities_created, many=True).data, IssueActivitySerializer(
issue_activities_created, many=True
).data,
cls=DjangoJSONEncoder, cls=DjangoJSONEncoder,
), ),
requested_data=requested_data, requested_data=requested_data,

View File

@ -36,7 +36,9 @@ def archive_old_issues():
Q( Q(
project=project_id, project=project_id,
archived_at__isnull=True, archived_at__isnull=True,
updated_at__lte=(timezone.now() - timedelta(days=archive_in * 30)), updated_at__lte=(
timezone.now() - timedelta(days=archive_in * 30)
),
state__group__in=["completed", "cancelled"], state__group__in=["completed", "cancelled"],
), ),
Q(issue_cycle__isnull=True) Q(issue_cycle__isnull=True)
@ -46,7 +48,9 @@ def archive_old_issues():
), ),
Q(issue_module__isnull=True) Q(issue_module__isnull=True)
| ( | (
Q(issue_module__module__target_date__lt=timezone.now().date()) Q(
issue_module__module__target_date__lt=timezone.now().date()
)
& Q(issue_module__isnull=False) & Q(issue_module__isnull=False)
), ),
).filter( ).filter(
@ -74,7 +78,9 @@ def archive_old_issues():
_ = [ _ = [
issue_activity.delay( issue_activity.delay(
type="issue.activity.updated", type="issue.activity.updated",
requested_data=json.dumps({"archived_at": str(archive_at)}), requested_data=json.dumps(
{"archived_at": str(archive_at)}
),
actor_id=str(project.created_by_id), actor_id=str(project.created_by_id),
issue_id=issue.id, issue_id=issue.id,
project_id=project_id, project_id=project_id,
@ -108,7 +114,9 @@ def close_old_issues():
Q( Q(
project=project_id, project=project_id,
archived_at__isnull=True, archived_at__isnull=True,
updated_at__lte=(timezone.now() - timedelta(days=close_in * 30)), updated_at__lte=(
timezone.now() - timedelta(days=close_in * 30)
),
state__group__in=["backlog", "unstarted", "started"], state__group__in=["backlog", "unstarted", "started"],
), ),
Q(issue_cycle__isnull=True) Q(issue_cycle__isnull=True)
@ -118,7 +126,9 @@ def close_old_issues():
), ),
Q(issue_module__isnull=True) Q(issue_module__isnull=True)
| ( | (
Q(issue_module__module__target_date__lt=timezone.now().date()) Q(
issue_module__module__target_date__lt=timezone.now().date()
)
& Q(issue_module__isnull=False) & Q(issue_module__isnull=False)
), ),
).filter( ).filter(
@ -131,7 +141,9 @@ def close_old_issues():
# Check if Issues # Check if Issues
if issues: if issues:
if project.default_state is None: if project.default_state is None:
close_state = State.objects.filter(group="cancelled").first() close_state = State.objects.filter(
group="cancelled"
).first()
else: else:
close_state = project.default_state close_state = project.default_state

View File

@ -33,7 +33,9 @@ def magic_link(email, key, token, current_site):
subject = f"Your unique Plane login code is {token}" subject = f"Your unique Plane login code is {token}"
context = {"code": token, "email": email} context = {"code": token, "email": email}
html_content = render_to_string("emails/auth/magic_signin.html", context) html_content = render_to_string(
"emails/auth/magic_signin.html", context
)
text_content = strip_tags(html_content) text_content = strip_tags(html_content)
connection = get_connection( connection = get_connection(

View File

@ -12,7 +12,7 @@ from plane.db.models import (
Issue, Issue,
Notification, Notification,
IssueComment, IssueComment,
IssueActivity IssueActivity,
) )
# Third Party imports # Third Party imports
@ -20,9 +20,9 @@ from celery import shared_task
from bs4 import BeautifulSoup from bs4 import BeautifulSoup
# =========== Issue Description Html Parsing and Notification Functions ====================== # =========== Issue Description Html Parsing and Notification Functions ======================
def update_mentions_for_issue(issue, project, new_mentions, removed_mention): def update_mentions_for_issue(issue, project, new_mentions, removed_mention):
aggregated_issue_mentions = [] aggregated_issue_mentions = []
@ -32,14 +32,14 @@ def update_mentions_for_issue(issue, project, new_mentions, removed_mention):
mention_id=mention_id, mention_id=mention_id,
issue=issue, issue=issue,
project=project, project=project,
workspace_id=project.workspace_id workspace_id=project.workspace_id,
) )
) )
IssueMention.objects.bulk_create( IssueMention.objects.bulk_create(aggregated_issue_mentions, batch_size=100)
aggregated_issue_mentions, batch_size=100)
IssueMention.objects.filter( IssueMention.objects.filter(
issue=issue, mention__in=removed_mention).delete() issue=issue, mention__in=removed_mention
).delete()
def get_new_mentions(requested_instance, current_instance): def get_new_mentions(requested_instance, current_instance):
@ -53,10 +53,12 @@ def get_new_mentions(requested_instance, current_instance):
# Getting Set Difference from mentions_newer # Getting Set Difference from mentions_newer
new_mentions = [ new_mentions = [
mention for mention in mentions_newer if mention not in mentions_older] mention for mention in mentions_newer if mention not in mentions_older
]
return new_mentions return new_mentions
# Get Removed Mention # Get Removed Mention
@ -70,10 +72,12 @@ def get_removed_mentions(requested_instance, current_instance):
# Getting Set Difference from mentions_newer # Getting Set Difference from mentions_newer
removed_mentions = [ removed_mentions = [
mention for mention in mentions_older if mention not in mentions_newer] mention for mention in mentions_older if mention not in mentions_newer
]
return removed_mentions return removed_mentions
# Adds mentions as subscribers # Adds mentions as subscribers
@ -84,27 +88,34 @@ def extract_mentions_as_subscribers(project_id, issue_id, mentions):
for mention_id in mentions: for mention_id in mentions:
# If the particular mention has not already been subscribed to the issue, he must be sent the mentioned notification # If the particular mention has not already been subscribed to the issue, he must be sent the mentioned notification
if not IssueSubscriber.objects.filter( if (
not IssueSubscriber.objects.filter(
issue_id=issue_id, issue_id=issue_id,
subscriber_id=mention_id, subscriber_id=mention_id,
project_id=project_id, project_id=project_id,
).exists() and not IssueAssignee.objects.filter( ).exists()
project_id=project_id, issue_id=issue_id, and not IssueAssignee.objects.filter(
assignee_id=mention_id project_id=project_id,
).exists() and not Issue.objects.filter( issue_id=issue_id,
assignee_id=mention_id,
).exists()
and not Issue.objects.filter(
project_id=project_id, pk=issue_id, created_by_id=mention_id project_id=project_id, pk=issue_id, created_by_id=mention_id
).exists(): ).exists()
):
project = Project.objects.get(pk=project_id) project = Project.objects.get(pk=project_id)
bulk_mention_subscribers.append(IssueSubscriber( bulk_mention_subscribers.append(
IssueSubscriber(
workspace_id=project.workspace_id, workspace_id=project.workspace_id,
project_id=project_id, project_id=project_id,
issue_id=issue_id, issue_id=issue_id,
subscriber_id=mention_id, subscriber_id=mention_id,
)) )
)
return bulk_mention_subscribers return bulk_mention_subscribers
# Parse Issue Description & extracts mentions # Parse Issue Description & extracts mentions
def extract_mentions(issue_instance): def extract_mentions(issue_instance):
try: try:
@ -113,11 +124,12 @@ def extract_mentions(issue_instance):
# Convert string to dictionary # Convert string to dictionary
data = json.loads(issue_instance) data = json.loads(issue_instance)
html = data.get("description_html") html = data.get("description_html")
soup = BeautifulSoup(html, 'html.parser') soup = BeautifulSoup(html, "html.parser")
mention_tags = soup.find_all( mention_tags = soup.find_all(
'mention-component', attrs={'target': 'users'}) "mention-component", attrs={"target": "users"}
)
mentions = [mention_tag['id'] for mention_tag in mention_tags] mentions = [mention_tag["id"] for mention_tag in mention_tags]
return list(set(mentions)) return list(set(mentions))
except Exception as e: except Exception as e:
@ -128,18 +140,18 @@ def extract_mentions(issue_instance):
def extract_comment_mentions(comment_value): def extract_comment_mentions(comment_value):
try: try:
mentions = [] mentions = []
soup = BeautifulSoup(comment_value, 'html.parser') soup = BeautifulSoup(comment_value, "html.parser")
mentions_tags = soup.find_all( mentions_tags = soup.find_all(
'mention-component', attrs={'target': 'users'} "mention-component", attrs={"target": "users"}
) )
for mention_tag in mentions_tags: for mention_tag in mentions_tags:
mentions.append(mention_tag['id']) mentions.append(mention_tag["id"])
return list(set(mentions)) return list(set(mentions))
except Exception as e: except Exception as e:
return [] return []
def get_new_comment_mentions(new_value, old_value):
def get_new_comment_mentions(new_value, old_value):
mentions_newer = extract_comment_mentions(new_value) mentions_newer = extract_comment_mentions(new_value)
if old_value is None: if old_value is None:
return mentions_newer return mentions_newer
@ -147,12 +159,21 @@ def get_new_comment_mentions(new_value, old_value):
mentions_older = extract_comment_mentions(old_value) mentions_older = extract_comment_mentions(old_value)
# Getting Set Difference from mentions_newer # Getting Set Difference from mentions_newer
new_mentions = [ new_mentions = [
mention for mention in mentions_newer if mention not in mentions_older] mention for mention in mentions_newer if mention not in mentions_older
]
return new_mentions return new_mentions
def createMentionNotification(project, notification_comment, issue, actor_id, mention_id, issue_id, activity): def createMentionNotification(
project,
notification_comment,
issue,
actor_id,
mention_id,
issue_id,
activity,
):
return Notification( return Notification(
workspace=project.workspace, workspace=project.workspace,
sender="in_app:issue_activities:mentioned", sender="in_app:issue_activities:mentioned",
@ -178,16 +199,26 @@ def createMentionNotification(project, notification_comment, issue, actor_id, me
"actor": str(activity.get("actor_id")), "actor": str(activity.get("actor_id")),
"new_value": str(activity.get("new_value")), "new_value": str(activity.get("new_value")),
"old_value": str(activity.get("old_value")), "old_value": str(activity.get("old_value")),
} },
}, },
) )
@shared_task @shared_task
def notifications(type, issue_id, project_id, actor_id, subscriber, issue_activities_created, requested_data, current_instance): def notifications(
type,
issue_id,
project_id,
actor_id,
subscriber,
issue_activities_created,
requested_data,
current_instance,
):
issue_activities_created = ( issue_activities_created = (
json.loads( json.loads(issue_activities_created)
issue_activities_created) if issue_activities_created is not None else None if issue_activities_created is not None
else None
) )
if type not in [ if type not in [
"issue.activity.deleted", "issue.activity.deleted",
@ -216,18 +247,24 @@ def notifications(type, issue_id, project_id, actor_id, subscriber, issue_activi
# Get new mentions from the newer instance # Get new mentions from the newer instance
new_mentions = get_new_mentions( new_mentions = get_new_mentions(
requested_instance=requested_data, current_instance=current_instance) requested_instance=requested_data,
current_instance=current_instance,
)
removed_mention = get_removed_mentions( removed_mention = get_removed_mentions(
requested_instance=requested_data, current_instance=current_instance) requested_instance=requested_data,
current_instance=current_instance,
)
comment_mentions = [] comment_mentions = []
all_comment_mentions = [] all_comment_mentions = []
# Get New Subscribers from the mentions of the newer instance # Get New Subscribers from the mentions of the newer instance
requested_mentions = extract_mentions( requested_mentions = extract_mentions(issue_instance=requested_data)
issue_instance=requested_data)
mention_subscribers = extract_mentions_as_subscribers( mention_subscribers = extract_mentions_as_subscribers(
project_id=project_id, issue_id=issue_id, mentions=requested_mentions) project_id=project_id,
issue_id=issue_id,
mentions=requested_mentions,
)
for issue_activity in issue_activities_created: for issue_activity in issue_activities_created:
issue_comment = issue_activity.get("issue_comment") issue_comment = issue_activity.get("issue_comment")
@ -236,12 +273,22 @@ def notifications(type, issue_id, project_id, actor_id, subscriber, issue_activi
if issue_comment is not None: if issue_comment is not None:
# TODO: Maybe save the comment mentions, so that in future, we can filter out the issues based on comment mentions as well. # TODO: Maybe save the comment mentions, so that in future, we can filter out the issues based on comment mentions as well.
all_comment_mentions = all_comment_mentions + extract_comment_mentions(issue_comment_new_value) all_comment_mentions = (
all_comment_mentions
+ extract_comment_mentions(issue_comment_new_value)
)
new_comment_mentions = get_new_comment_mentions(old_value=issue_comment_old_value, new_value=issue_comment_new_value) new_comment_mentions = get_new_comment_mentions(
old_value=issue_comment_old_value,
new_value=issue_comment_new_value,
)
comment_mentions = comment_mentions + new_comment_mentions comment_mentions = comment_mentions + new_comment_mentions
comment_mention_subscribers = extract_mentions_as_subscribers( project_id=project_id, issue_id=issue_id, mentions=all_comment_mentions) comment_mention_subscribers = extract_mentions_as_subscribers(
project_id=project_id,
issue_id=issue_id,
mentions=all_comment_mentions,
)
""" """
We will not send subscription activity notification to the below mentioned user sets We will not send subscription activity notification to the below mentioned user sets
- Those who have been newly mentioned in the issue description, we will send mention notification to them. - Those who have been newly mentioned in the issue description, we will send mention notification to them.
@ -251,41 +298,59 @@ def notifications(type, issue_id, project_id, actor_id, subscriber, issue_activi
issue_assignees = list( issue_assignees = list(
IssueAssignee.objects.filter( IssueAssignee.objects.filter(
project_id=project_id, issue_id=issue_id) project_id=project_id, issue_id=issue_id
)
.exclude(assignee_id__in=list(new_mentions + comment_mentions)) .exclude(assignee_id__in=list(new_mentions + comment_mentions))
.values_list("assignee", flat=True) .values_list("assignee", flat=True)
) )
issue_subscribers = list( issue_subscribers = list(
IssueSubscriber.objects.filter( IssueSubscriber.objects.filter(
project_id=project_id, issue_id=issue_id) project_id=project_id, issue_id=issue_id
.exclude(subscriber_id__in=list(new_mentions + comment_mentions + [actor_id])) )
.exclude(
subscriber_id__in=list(
new_mentions + comment_mentions + [actor_id]
)
)
.values_list("subscriber", flat=True) .values_list("subscriber", flat=True)
) )
issue = Issue.objects.filter(pk=issue_id).first() issue = Issue.objects.filter(pk=issue_id).first()
if (issue.created_by_id is not None and str(issue.created_by_id) != str(actor_id)): if issue.created_by_id is not None and str(issue.created_by_id) != str(
actor_id
):
issue_subscribers = issue_subscribers + [issue.created_by_id] issue_subscribers = issue_subscribers + [issue.created_by_id]
if subscriber: if subscriber:
# add the user to issue subscriber # add the user to issue subscriber
try: try:
if str(issue.created_by_id) != str(actor_id) and uuid.UUID(actor_id) not in issue_assignees: if (
str(issue.created_by_id) != str(actor_id)
and uuid.UUID(actor_id) not in issue_assignees
):
_ = IssueSubscriber.objects.get_or_create( _ = IssueSubscriber.objects.get_or_create(
project_id=project_id, issue_id=issue_id, subscriber_id=actor_id project_id=project_id,
issue_id=issue_id,
subscriber_id=actor_id,
) )
except Exception as e: except Exception as e:
pass pass
project = Project.objects.get(pk=project_id) project = Project.objects.get(pk=project_id)
issue_subscribers = list(set(issue_subscribers + issue_assignees) - {uuid.UUID(actor_id)}) issue_subscribers = list(
set(issue_subscribers + issue_assignees) - {uuid.UUID(actor_id)}
)
for subscriber in issue_subscribers: for subscriber in issue_subscribers:
if subscriber in issue_subscribers: if subscriber in issue_subscribers:
sender = "in_app:issue_activities:subscribed" sender = "in_app:issue_activities:subscribed"
if issue.created_by_id is not None and subscriber == issue.created_by_id: if (
issue.created_by_id is not None
and subscriber == issue.created_by_id
):
sender = "in_app:issue_activities:created" sender = "in_app:issue_activities:created"
if subscriber in issue_assignees: if subscriber in issue_assignees:
sender = "in_app:issue_activities:assigned" sender = "in_app:issue_activities:assigned"
@ -293,11 +358,15 @@ def notifications(type, issue_id, project_id, actor_id, subscriber, issue_activi
for issue_activity in issue_activities_created: for issue_activity in issue_activities_created:
# Do not send notification for description update # Do not send notification for description update
if issue_activity.get("field") == "description": if issue_activity.get("field") == "description":
continue; continue
issue_comment = issue_activity.get("issue_comment") issue_comment = issue_activity.get("issue_comment")
if issue_comment is not None: if issue_comment is not None:
issue_comment = IssueComment.objects.get( issue_comment = IssueComment.objects.get(
id=issue_comment, issue_id=issue_id, project_id=project_id, workspace_id=project.workspace_id) id=issue_comment,
issue_id=issue_id,
project_id=project_id,
workspace_id=project.workspace_id,
)
bulk_notifications.append( bulk_notifications.append(
Notification( Notification(
@ -323,11 +392,16 @@ def notifications(type, issue_id, project_id, actor_id, subscriber, issue_activi
"verb": str(issue_activity.get("verb")), "verb": str(issue_activity.get("verb")),
"field": str(issue_activity.get("field")), "field": str(issue_activity.get("field")),
"actor": str(issue_activity.get("actor_id")), "actor": str(issue_activity.get("actor_id")),
"new_value": str(issue_activity.get("new_value")), "new_value": str(
"old_value": str(issue_activity.get("old_value")), issue_activity.get("new_value")
),
"old_value": str(
issue_activity.get("old_value")
),
"issue_comment": str( "issue_comment": str(
issue_comment.comment_stripped issue_comment.comment_stripped
if issue_activity.get("issue_comment") is not None if issue_activity.get("issue_comment")
is not None
else "" else ""
), ),
}, },
@ -337,7 +411,8 @@ def notifications(type, issue_id, project_id, actor_id, subscriber, issue_activi
# Add Mentioned as Issue Subscribers # Add Mentioned as Issue Subscribers
IssueSubscriber.objects.bulk_create( IssueSubscriber.objects.bulk_create(
mention_subscribers + comment_mention_subscribers, batch_size=100) mention_subscribers + comment_mention_subscribers, batch_size=100
)
last_activity = ( last_activity = (
IssueActivity.objects.filter(issue_id=issue_id) IssueActivity.objects.filter(issue_id=issue_id)
@ -348,7 +423,7 @@ def notifications(type, issue_id, project_id, actor_id, subscriber, issue_activi
actor = User.objects.get(pk=actor_id) actor = User.objects.get(pk=actor_id)
for mention_id in comment_mentions: for mention_id in comment_mentions:
if (mention_id != actor_id): if mention_id != actor_id:
for issue_activity in issue_activities_created: for issue_activity in issue_activities_created:
notification = createMentionNotification( notification = createMentionNotification(
project=project, project=project,
@ -357,13 +432,12 @@ def notifications(type, issue_id, project_id, actor_id, subscriber, issue_activi
actor_id=actor_id, actor_id=actor_id,
mention_id=mention_id, mention_id=mention_id,
issue_id=issue_id, issue_id=issue_id,
activity=issue_activity activity=issue_activity,
) )
bulk_notifications.append(notification) bulk_notifications.append(notification)
for mention_id in new_mentions: for mention_id in new_mentions:
if (mention_id != actor_id): if mention_id != actor_id:
if ( if (
last_activity is not None last_activity is not None
and last_activity.field == "description" and last_activity.field == "description"
@ -383,7 +457,9 @@ def notifications(type, issue_id, project_id, actor_id, subscriber, issue_activi
"issue": { "issue": {
"id": str(issue_id), "id": str(issue_id),
"name": str(issue.name), "name": str(issue.name),
"identifier": str(issue.project.identifier), "identifier": str(
issue.project.identifier
),
"sequence_id": issue.sequence_id, "sequence_id": issue.sequence_id,
"state_name": issue.state.name, "state_name": issue.state.name,
"state_group": issue.state.group, "state_group": issue.state.group,
@ -408,15 +484,17 @@ def notifications(type, issue_id, project_id, actor_id, subscriber, issue_activi
actor_id=actor_id, actor_id=actor_id,
mention_id=mention_id, mention_id=mention_id,
issue_id=issue_id, issue_id=issue_id,
activity=issue_activity activity=issue_activity,
) )
bulk_notifications.append(notification) bulk_notifications.append(notification)
# save new mentions for the particular issue and remove the mentions that has been deleted from the description # save new mentions for the particular issue and remove the mentions that has been deleted from the description
update_mentions_for_issue(issue=issue, project=project, new_mentions=new_mentions, update_mentions_for_issue(
removed_mention=removed_mention) issue=issue,
project=project,
new_mentions=new_mentions,
removed_mention=removed_mention,
)
# Bulk create notifications # Bulk create notifications
Notification.objects.bulk_create(bulk_notifications, batch_size=100) Notification.objects.bulk_create(bulk_notifications, batch_size=100)

Some files were not shown because too many files have changed in this diff Show More