Merge branch 'develop' of github.com:makeplane/plane into dev/private_bucket_for_attachments

This commit is contained in:
NarayanBavisetti 2023-10-27 12:19:33 +05:30
commit 2628890068
1159 changed files with 62414 additions and 41851 deletions

View File

@ -36,15 +36,13 @@ jobs:
- name: Build Plane's Main App - name: Build Plane's Main App
if: steps.changed-files.outputs.web_any_changed == 'true' if: steps.changed-files.outputs.web_any_changed == 'true'
run: | run: |
cd web
yarn yarn
yarn build yarn build --filter=web
- name: Build Plane's Deploy App - name: Build Plane's Deploy App
if: steps.changed-files.outputs.deploy_any_changed == 'true' if: steps.changed-files.outputs.deploy_any_changed == 'true'
run: | run: |
cd space
yarn yarn
yarn build yarn build --filter=space

79
.github/workflows/create-sync-pr.yml vendored Normal file
View File

@ -0,0 +1,79 @@
name: Create PR in Plane EE Repository to sync the changes
on:
pull_request:
branches:
- master
types:
- closed
jobs:
create_pr:
# Only run the job when a PR is merged
if: github.event.pull_request.merged == true
runs-on: ubuntu-latest
permissions:
pull-requests: write
contents: read
steps:
- name: Check SOURCE_REPO
id: check_repo
env:
SOURCE_REPO: ${{ secrets.SOURCE_REPO_NAME }}
run: |
echo "::set-output name=is_correct_repo::$(if [[ "$SOURCE_REPO" == "makeplane/plane" ]]; then echo 'true'; else echo 'false'; fi)"
- name: Checkout Code
if: steps.check_repo.outputs.is_correct_repo == 'true'
uses: actions/checkout@v2
with:
persist-credentials: false
fetch-depth: 0
- name: Set up Branch Name
if: steps.check_repo.outputs.is_correct_repo == 'true'
run: |
echo "SOURCE_BRANCH_NAME=${{ github.head_ref }}" >> $GITHUB_ENV
- name: Setup GH CLI
if: steps.check_repo.outputs.is_correct_repo == 'true'
run: |
type -p curl >/dev/null || (sudo apt update && sudo apt install curl -y)
curl -fsSL https://cli.github.com/packages/githubcli-archive-keyring.gpg | sudo dd of=/usr/share/keyrings/githubcli-archive-keyring.gpg
sudo chmod go+r /usr/share/keyrings/githubcli-archive-keyring.gpg
echo "deb [arch=$(dpkg --print-architecture) signed-by=/usr/share/keyrings/githubcli-archive-keyring.gpg] https://cli.github.com/packages stable main" | sudo tee /etc/apt/sources.list.d/github-cli.list > /dev/null
sudo apt update
sudo apt install gh -y
- name: Create Pull Request
if: steps.check_repo.outputs.is_correct_repo == 'true'
env:
GH_TOKEN: ${{ secrets.ACCESS_TOKEN }}
run: |
TARGET_REPO="${{ secrets.TARGET_REPO_NAME }}"
TARGET_BRANCH="${{ secrets.TARGET_REPO_BRANCH }}"
SOURCE_BRANCH="${{ env.SOURCE_BRANCH_NAME }}"
git checkout $SOURCE_BRANCH
git remote add target "https://$GH_TOKEN@github.com/$TARGET_REPO.git"
git push target $SOURCE_BRANCH:$SOURCE_BRANCH
PR_TITLE="${{ github.event.pull_request.title }}"
PR_BODY="${{ github.event.pull_request.body }}"
# Remove double quotes
PR_TITLE_CLEANED="${PR_TITLE//\"/}"
PR_BODY_CLEANED="${PR_BODY//\"/}"
# Construct PR_BODY_CONTENT using a here-document
PR_BODY_CONTENT=$(cat <<EOF
$PR_BODY_CLEANED
EOF
)
gh pr create \
--base $TARGET_BRANCH \
--head $SOURCE_BRANCH \
--title "[SYNC] $PR_TITLE_CLEANED" \
--body "$PR_BODY_CONTENT" \
--repo $TARGET_REPO

View File

@ -39,10 +39,10 @@ jobs:
type=ref,event=tag type=ref,event=tag
- name: Extract metadata (tags, labels) for Docker (Docker Hub) from Github Release - name: Extract metadata (tags, labels) for Docker (Docker Hub) from Github Release
id: metaDeploy id: metaSpace
uses: docker/metadata-action@v4.3.0 uses: docker/metadata-action@v4.3.0
with: with:
images: ${{ secrets.DOCKERHUB_USERNAME }}/plane-deploy images: ${{ secrets.DOCKERHUB_USERNAME }}/plane-space
tags: | tags: |
type=ref,event=tag type=ref,event=tag
@ -87,7 +87,7 @@ jobs:
file: ./space/Dockerfile.space file: ./space/Dockerfile.space
platforms: linux/amd64 platforms: linux/amd64
push: true push: true
tags: ${{ steps.metaDeploy.outputs.tags }} tags: ${{ steps.metaSpace.outputs.tags }}
env: env:
DOCKER_BUILDKIT: 1 DOCKER_BUILDKIT: 1
DOCKER_USERNAME: ${{ secrets.DOCKERHUB_USERNAME }} DOCKER_USERNAME: ${{ secrets.DOCKERHUB_USERNAME }}

5
.gitignore vendored
View File

@ -16,6 +16,7 @@ node_modules
# Production # Production
/build /build
dist
# Misc # Misc
.DS_Store .DS_Store
@ -73,3 +74,7 @@ pnpm-lock.yaml
pnpm-workspace.yaml pnpm-workspace.yaml
.npmrc .npmrc
tmp/
## packages
dist

View File

@ -30,6 +30,48 @@ The project is a monorepo, with backend api and frontend in a single repo.
The backend is a django project which is kept inside apiserver The backend is a django project which is kept inside apiserver
1. Clone the repo
```bash
git clone https://github.com/makeplane/plane
cd plane
chmod +x setup.sh
```
2. Run setup.sh
```bash
./setup.sh
```
3. Define `NEXT_PUBLIC_API_BASE_URL=http://localhost` in **web/.env** and **space/.env** file
```bash
echo "\nNEXT_PUBLIC_API_BASE_URL=http://localhost\n" >> ./web/.env
```
```bash
echo "\nNEXT_PUBLIC_API_BASE_URL=http://localhost\n" >> ./space/.env
```
4. Run Docker compose up
```bash
docker compose up -d
```
5. Install dependencies
```bash
yarn install
```
6. Run the web app in development mode
```bash
yarn dev
```
## Missing a Feature? ## Missing a Feature?
If a feature is missing, you can directly _request_ a new one [here](https://github.com/makeplane/plane/issues/new?assignees=&labels=feature&template=feature_request.yml&title=%F0%9F%9A%80+Feature%3A+). You also can do the same by choosing "🚀 Feature" when raising a [New Issue](https://github.com/makeplane/plane/issues/new/choose) on our GitHub Repository. If a feature is missing, you can directly _request_ a new one [here](https://github.com/makeplane/plane/issues/new?assignees=&labels=feature&template=feature_request.yml&title=%F0%9F%9A%80+Feature%3A+). You also can do the same by choosing "🚀 Feature" when raising a [New Issue](https://github.com/makeplane/plane/issues/new/choose) on our GitHub Repository.

134
ENV_SETUP.md Normal file
View File

@ -0,0 +1,134 @@
# Environment Variables
Environment variables are distributed in various files. Please refer them carefully.
## {PROJECT_FOLDER}/.env
File is available in the project root folder
```
# Database Settings
PGUSER="plane"
PGPASSWORD="plane"
PGHOST="plane-db"
PGDATABASE="plane"
DATABASE_URL=postgresql://${PGUSER}:${PGPASSWORD}@${PGHOST}/${PGDATABASE}
# Redis Settings
REDIS_HOST="plane-redis"
REDIS_PORT="6379"
REDIS_URL="redis://${REDIS_HOST}:6379/"
# AWS Settings
AWS_REGION=""
AWS_ACCESS_KEY_ID="access-key"
AWS_SECRET_ACCESS_KEY="secret-key"
AWS_S3_ENDPOINT_URL="http://plane-minio:9000"
# Changing this requires change in the nginx.conf for uploads if using minio setup
AWS_S3_BUCKET_NAME="uploads"
# Maximum file upload limit
FILE_SIZE_LIMIT=5242880
# GPT settings
OPENAI_API_BASE="https://api.openai.com/v1" # change if using a custom endpoint
OPENAI_API_KEY="sk-" # add your openai key here
GPT_ENGINE="gpt-3.5-turbo" # use "gpt-4" if you have access
# Settings related to Docker
DOCKERIZED=1
# set to 1 If using the pre-configured minio setup
USE_MINIO=1
# Nginx Configuration
NGINX_PORT=80
```
## {PROJECT_FOLDER}/web/.env.example
```
# Enable/Disable OAUTH - default 0 for selfhosted instance
NEXT_PUBLIC_ENABLE_OAUTH=0
# Public boards deploy URL
NEXT_PUBLIC_DEPLOY_URL="http://localhost/spaces"
```
## {PROJECT_FOLDER}/spaces/.env.example
```
# Flag to toggle OAuth
NEXT_PUBLIC_ENABLE_OAUTH=0
```
## {PROJECT_FOLDER}/apiserver/.env
```
# Backend
# Debug value for api server use it as 0 for production use
DEBUG=0
DJANGO_SETTINGS_MODULE="plane.settings.selfhosted"
# Error logs
SENTRY_DSN=""
# Database Settings
PGUSER="plane"
PGPASSWORD="plane"
PGHOST="plane-db"
PGDATABASE="plane"
DATABASE_URL=postgresql://${PGUSER}:${PGPASSWORD}@${PGHOST}/${PGDATABASE}
# Redis Settings
REDIS_HOST="plane-redis"
REDIS_PORT="6379"
REDIS_URL="redis://${REDIS_HOST}:6379/"
# Email Settings
EMAIL_HOST=""
EMAIL_HOST_USER=""
EMAIL_HOST_PASSWORD=""
EMAIL_PORT=587
EMAIL_FROM="Team Plane <team@mailer.plane.so>"
EMAIL_USE_TLS="1"
EMAIL_USE_SSL="0"
# AWS Settings
AWS_REGION=""
AWS_ACCESS_KEY_ID="access-key"
AWS_SECRET_ACCESS_KEY="secret-key"
AWS_S3_ENDPOINT_URL="http://plane-minio:9000"
# Changing this requires change in the nginx.conf for uploads if using minio setup
AWS_S3_BUCKET_NAME="uploads"
# Maximum file upload limit
FILE_SIZE_LIMIT=5242880
# GPT settings
OPENAI_API_BASE="https://api.openai.com/v1" # change if using a custom endpoint
OPENAI_API_KEY="sk-" # add your openai key here
GPT_ENGINE="gpt-3.5-turbo" # use "gpt-4" if you have access
# Github
GITHUB_CLIENT_SECRET="" # For fetching release notes
# Settings related to Docker
DOCKERIZED=1
# set to 1 If using the pre-configured minio setup
USE_MINIO=1
# Nginx Configuration
NGINX_PORT=80
# Default Creds
DEFAULT_EMAIL="captain@plane.so"
DEFAULT_PASSWORD="password123"
# SignUps
ENABLE_SIGNUP="1"
# Email Redirection URL
WEB_URL="http://localhost"
```
## Updates
- The environment variable NEXT_PUBLIC_API_BASE_URL has been removed from both the web and space projects.
- The naming convention for containers and images has been updated.
- The plane-worker image will no longer be maintained, as it has been merged with plane-backend.
- The Tiptap pro-extension dependency has been removed, eliminating the need for Tiptap API keys.
- The image name for Plane deployment has been changed to plane-space.

View File

@ -39,33 +39,35 @@ Meet [Plane](https://plane.so). An open-source software development tool to mana
The easiest way to get started with Plane is by creating a [Plane Cloud](https://app.plane.so) account. Plane Cloud offers a hosted solution for Plane. If you prefer to self-host Plane, please refer to our [deployment documentation](https://docs.plane.so/self-hosting). The easiest way to get started with Plane is by creating a [Plane Cloud](https://app.plane.so) account. Plane Cloud offers a hosted solution for Plane. If you prefer to self-host Plane, please refer to our [deployment documentation](https://docs.plane.so/self-hosting).
## ⚡️ Quick start with Docker Compose ## ⚡️ Contributors Quick Start
### Docker Compose Setup ### Prerequisite
- Clone the repository Development system must have docker engine installed and running.
### Steps
Setting up local environment is extremely easy and straight forward. Follow the below step and you will be ready to contribute
1. Clone the code locally using `git clone https://github.com/makeplane/plane.git`
1. Switch to the code folder `cd plane`
1. Create your feature or fix branch you plan to work on using `git checkout -b <feature-branch-name>`
1. Open terminal and run `./setup.sh`
1. Open the code on VSCode or similar equivalent IDE
1. Review the `.env` files available in various folders. Visit [Environment Setup](./ENV_SETUP.md) to know about various environment variables used in system
1. Run the docker command to initiate various services `docker compose -f docker-compose-local.yml up -d`
```bash ```bash
git clone https://github.com/makeplane/plane ./setup.sh
cd plane
chmod +x setup.sh
``` ```
- Run setup.sh You are ready to make changes to the code. Do not forget to refresh the browser (in case id does not auto-reload)
```bash Thats it!
./setup.sh http://localhost
```
> If running in a cloud env replace localhost with public facing IP address of the VM ## 🍙 Self Hosting
- Run Docker compose up For self hosting environment setup, visit the [Self Hosting](https://docs.plane.so/self-hosting) documentation page
```bash
docker compose up -d
```
<strong>You can use the default email and password for your first login `captain@plane.so` and `password123`.</strong>
## 🚀 Features ## 🚀 Features

View File

@ -1,7 +1,7 @@
# Backend # Backend
# Debug value for api server use it as 0 for production use # Debug value for api server use it as 0 for production use
DEBUG=0 DEBUG=0
DJANGO_SETTINGS_MODULE="plane.settings.selfhosted" DJANGO_SETTINGS_MODULE="plane.settings.production"
# Error logs # Error logs
SENTRY_DSN="" SENTRY_DSN=""
@ -59,3 +59,14 @@ DEFAULT_PASSWORD="password123"
# SignUps # SignUps
ENABLE_SIGNUP="1" ENABLE_SIGNUP="1"
# Enable Email/Password Signup
ENABLE_EMAIL_PASSWORD="1"
# Enable Magic link Login
ENABLE_MAGIC_LINK_LOGIN="0"
# Email redirections and minio domain settings
WEB_URL="http://localhost"

52
apiserver/Dockerfile.dev Normal file
View File

@ -0,0 +1,52 @@
FROM python:3.11.1-alpine3.17 AS backend
# set environment variables
ENV PYTHONDONTWRITEBYTECODE 1
ENV PYTHONUNBUFFERED 1
ENV PIP_DISABLE_PIP_VERSION_CHECK=1
RUN apk --no-cache add \
"bash~=5.2" \
"libpq~=15" \
"libxslt~=1.1" \
"nodejs-current~=19" \
"xmlsec~=1.2" \
"libffi-dev" \
"bash~=5.2" \
"g++~=12.2" \
"gcc~=12.2" \
"cargo~=1.64" \
"git~=2" \
"make~=4.3" \
"postgresql13-dev~=13" \
"libc-dev" \
"linux-headers"
WORKDIR /code
COPY requirements.txt ./requirements.txt
ADD requirements ./requirements
RUN pip install -r requirements.txt --compile --no-cache-dir
RUN addgroup -S plane && \
adduser -S captain -G plane
RUN chown captain.plane /code
USER captain
# Add in Django deps and generate Django's static files
USER root
# RUN chmod +x ./bin/takeoff ./bin/worker ./bin/beat
RUN chmod -R 777 /code
USER captain
# Expose container port and run entry point script
EXPOSE 8000
# CMD [ "./bin/takeoff" ]

View File

@ -58,8 +58,17 @@ class WorkspaceEntityPermission(BasePermission):
if request.user.is_anonymous: if request.user.is_anonymous:
return False return False
## Safe Methods -> Handle the filtering logic in queryset
if request.method in SAFE_METHODS:
return WorkspaceMember.objects.filter( return WorkspaceMember.objects.filter(
member=request.user, workspace__slug=view.workspace_slug workspace__slug=view.workspace_slug,
member=request.user,
).exists()
return WorkspaceMember.objects.filter(
member=request.user,
workspace__slug=view.workspace_slug,
role__in=[Owner, Admin],
).exists() ).exists()

View File

@ -1,5 +1,13 @@
from .base import BaseSerializer from .base import BaseSerializer
from .user import UserSerializer, UserLiteSerializer, ChangePasswordSerializer, ResetPasswordSerializer, UserAdminLiteSerializer from .user import (
UserSerializer,
UserLiteSerializer,
ChangePasswordSerializer,
ResetPasswordSerializer,
UserAdminLiteSerializer,
UserMeSerializer,
UserMeSettingsSerializer,
)
from .workspace import ( from .workspace import (
WorkSpaceSerializer, WorkSpaceSerializer,
WorkSpaceMemberSerializer, WorkSpaceMemberSerializer,
@ -8,9 +16,11 @@ from .workspace import (
WorkspaceLiteSerializer, WorkspaceLiteSerializer,
WorkspaceThemeSerializer, WorkspaceThemeSerializer,
WorkspaceMemberAdminSerializer, WorkspaceMemberAdminSerializer,
WorkspaceMemberMeSerializer,
) )
from .project import ( from .project import (
ProjectSerializer, ProjectSerializer,
ProjectListSerializer,
ProjectDetailSerializer, ProjectDetailSerializer,
ProjectMemberSerializer, ProjectMemberSerializer,
ProjectMemberInviteSerializer, ProjectMemberInviteSerializer,
@ -20,11 +30,16 @@ from .project import (
ProjectMemberLiteSerializer, ProjectMemberLiteSerializer,
ProjectDeployBoardSerializer, ProjectDeployBoardSerializer,
ProjectMemberAdminSerializer, ProjectMemberAdminSerializer,
ProjectPublicMemberSerializer ProjectPublicMemberSerializer,
) )
from .state import StateSerializer, StateLiteSerializer from .state import StateSerializer, StateLiteSerializer
from .view import GlobalViewSerializer, IssueViewSerializer, IssueViewFavoriteSerializer from .view import GlobalViewSerializer, IssueViewSerializer, IssueViewFavoriteSerializer
from .cycle import CycleSerializer, CycleIssueSerializer, CycleFavoriteSerializer, CycleWriteSerializer from .cycle import (
CycleSerializer,
CycleIssueSerializer,
CycleFavoriteSerializer,
CycleWriteSerializer,
)
from .asset import FileAssetSerializer from .asset import FileAssetSerializer
from .issue import ( from .issue import (
IssueCreateSerializer, IssueCreateSerializer,

View File

@ -3,3 +3,56 @@ from rest_framework import serializers
class BaseSerializer(serializers.ModelSerializer): class BaseSerializer(serializers.ModelSerializer):
id = serializers.PrimaryKeyRelatedField(read_only=True) id = serializers.PrimaryKeyRelatedField(read_only=True)
class DynamicBaseSerializer(BaseSerializer):
def __init__(self, *args, **kwargs):
# If 'fields' is provided in the arguments, remove it and store it separately.
# This is done so as not to pass this custom argument up to the superclass.
fields = kwargs.pop("fields", None)
# Call the initialization of the superclass.
super().__init__(*args, **kwargs)
# If 'fields' was provided, filter the fields of the serializer accordingly.
if fields is not None:
self.fields = self._filter_fields(fields)
def _filter_fields(self, fields):
"""
Adjust the serializer's fields based on the provided 'fields' list.
:param fields: List or dictionary specifying which fields to include in the serializer.
:return: The updated fields for the serializer.
"""
# Check each field_name in the provided fields.
for field_name in fields:
# If the field is a dictionary (indicating nested fields),
# loop through its keys and values.
if isinstance(field_name, dict):
for key, value in field_name.items():
# If the value of this nested field is a list,
# perform a recursive filter on it.
if isinstance(value, list):
self._filter_fields(self.fields[key], value)
# Create a list to store allowed fields.
allowed = []
for item in fields:
# If the item is a string, it directly represents a field's name.
if isinstance(item, str):
allowed.append(item)
# If the item is a dictionary, it represents a nested field.
# Add the key of this dictionary to the allowed list.
elif isinstance(item, dict):
allowed.append(list(item.keys())[0])
# Convert the current serializer's fields and the allowed fields to sets.
existing = set(self.fields)
allowed = set(allowed)
# Remove fields from the serializer that aren't in the 'allowed' list.
for field_name in (existing - allowed):
self.fields.pop(field_name)
return self.fields

View File

@ -12,10 +12,14 @@ from .workspace import WorkspaceLiteSerializer
from .project import ProjectLiteSerializer from .project import ProjectLiteSerializer
from plane.db.models import Cycle, CycleIssue, CycleFavorite from plane.db.models import Cycle, CycleIssue, CycleFavorite
class CycleWriteSerializer(BaseSerializer):
class CycleWriteSerializer(BaseSerializer):
def validate(self, data): def validate(self, data):
if data.get("start_date", None) is not None and data.get("end_date", None) is not None and data.get("start_date", None) > data.get("end_date", None): if (
data.get("start_date", None) is not None
and data.get("end_date", None) is not None
and data.get("start_date", None) > data.get("end_date", None)
):
raise serializers.ValidationError("Start date cannot exceed end date") raise serializers.ValidationError("Start date cannot exceed end date")
return data return data
@ -34,7 +38,6 @@ class CycleSerializer(BaseSerializer):
unstarted_issues = serializers.IntegerField(read_only=True) unstarted_issues = serializers.IntegerField(read_only=True)
backlog_issues = serializers.IntegerField(read_only=True) backlog_issues = serializers.IntegerField(read_only=True)
assignees = serializers.SerializerMethodField(read_only=True) assignees = serializers.SerializerMethodField(read_only=True)
labels = serializers.SerializerMethodField(read_only=True)
total_estimates = serializers.IntegerField(read_only=True) total_estimates = serializers.IntegerField(read_only=True)
completed_estimates = serializers.IntegerField(read_only=True) completed_estimates = serializers.IntegerField(read_only=True)
started_estimates = serializers.IntegerField(read_only=True) started_estimates = serializers.IntegerField(read_only=True)
@ -42,7 +45,11 @@ class CycleSerializer(BaseSerializer):
project_detail = ProjectLiteSerializer(read_only=True, source="project") project_detail = ProjectLiteSerializer(read_only=True, source="project")
def validate(self, data): def validate(self, data):
if data.get("start_date", None) is not None and data.get("end_date", None) is not None and data.get("start_date", None) > data.get("end_date", None): if (
data.get("start_date", None) is not None
and data.get("end_date", None) is not None
and data.get("start_date", None) > data.get("end_date", None)
):
raise serializers.ValidationError("Start date cannot exceed end date") raise serializers.ValidationError("Start date cannot exceed end date")
return data return data
@ -50,11 +57,12 @@ class CycleSerializer(BaseSerializer):
members = [ members = [
{ {
"avatar": assignee.avatar, "avatar": assignee.avatar,
"first_name": assignee.first_name,
"display_name": assignee.display_name, "display_name": assignee.display_name,
"id": assignee.id, "id": assignee.id,
} }
for issue_cycle in obj.issue_cycle.all() for issue_cycle in obj.issue_cycle.prefetch_related(
"issue__assignees"
).all()
for assignee in issue_cycle.issue.assignees.all() for assignee in issue_cycle.issue.assignees.all()
] ]
# Use a set comprehension to return only the unique objects # Use a set comprehension to return only the unique objects
@ -65,24 +73,6 @@ class CycleSerializer(BaseSerializer):
return unique_list return unique_list
def get_labels(self, obj):
labels = [
{
"name": label.name,
"color": label.color,
"id": label.id,
}
for issue_cycle in obj.issue_cycle.all()
for label in issue_cycle.issue.labels.all()
]
# Use a set comprehension to return only the unique objects
unique_objects = {frozenset(item.items()) for item in labels}
# Convert the set back to a list of dictionaries
unique_list = [dict(item) for item in unique_objects]
return unique_list
class Meta: class Meta:
model = Cycle model = Cycle
fields = "__all__" fields = "__all__"

View File

@ -1,11 +1,8 @@
# Django imports
from django.db import IntegrityError
# Third party imports # Third party imports
from rest_framework import serializers from rest_framework import serializers
# Module imports # Module imports
from .base import BaseSerializer from .base import BaseSerializer, DynamicBaseSerializer
from plane.api.serializers.workspace import WorkSpaceSerializer, WorkspaceLiteSerializer from plane.api.serializers.workspace import WorkSpaceSerializer, WorkspaceLiteSerializer
from plane.api.serializers.user import UserLiteSerializer, UserAdminLiteSerializer from plane.api.serializers.user import UserLiteSerializer, UserAdminLiteSerializer
from plane.db.models import ( from plane.db.models import (
@ -94,8 +91,33 @@ class ProjectLiteSerializer(BaseSerializer):
read_only_fields = fields read_only_fields = fields
class ProjectListSerializer(DynamicBaseSerializer):
is_favorite = serializers.BooleanField(read_only=True)
total_members = serializers.IntegerField(read_only=True)
total_cycles = serializers.IntegerField(read_only=True)
total_modules = serializers.IntegerField(read_only=True)
is_member = serializers.BooleanField(read_only=True)
sort_order = serializers.FloatField(read_only=True)
member_role = serializers.IntegerField(read_only=True)
is_deployed = serializers.BooleanField(read_only=True)
members = serializers.SerializerMethodField()
def get_members(self, obj):
project_members = ProjectMember.objects.filter(project_id=obj.id).values(
"id",
"member_id",
"member__display_name",
"member__avatar",
)
return project_members
class Meta:
model = Project
fields = "__all__"
class ProjectDetailSerializer(BaseSerializer): class ProjectDetailSerializer(BaseSerializer):
workspace = WorkSpaceSerializer(read_only=True) # workspace = WorkSpaceSerializer(read_only=True)
default_assignee = UserLiteSerializer(read_only=True) default_assignee = UserLiteSerializer(read_only=True)
project_lead = UserLiteSerializer(read_only=True) project_lead = UserLiteSerializer(read_only=True)
is_favorite = serializers.BooleanField(read_only=True) is_favorite = serializers.BooleanField(read_only=True)
@ -148,8 +170,6 @@ class ProjectIdentifierSerializer(BaseSerializer):
class ProjectFavoriteSerializer(BaseSerializer): class ProjectFavoriteSerializer(BaseSerializer):
project_detail = ProjectLiteSerializer(source="project", read_only=True)
class Meta: class Meta:
model = ProjectFavorite model = ProjectFavorite
fields = "__all__" fields = "__all__"
@ -178,12 +198,12 @@ class ProjectDeployBoardSerializer(BaseSerializer):
fields = "__all__" fields = "__all__"
read_only_fields = [ read_only_fields = [
"workspace", "workspace",
"project", "anchor", "project",
"anchor",
] ]
class ProjectPublicMemberSerializer(BaseSerializer): class ProjectPublicMemberSerializer(BaseSerializer):
class Meta: class Meta:
model = ProjectPublicMember model = ProjectPublicMember
fields = "__all__" fields = "__all__"

View File

@ -3,7 +3,7 @@ from rest_framework import serializers
# Module import # Module import
from .base import BaseSerializer from .base import BaseSerializer
from plane.db.models import User from plane.db.models import User, Workspace, WorkspaceMemberInvite
class UserSerializer(BaseSerializer): class UserSerializer(BaseSerializer):
@ -33,6 +33,81 @@ class UserSerializer(BaseSerializer):
return bool(obj.first_name) or bool(obj.last_name) return bool(obj.first_name) or bool(obj.last_name)
class UserMeSerializer(BaseSerializer):
class Meta:
model = User
fields = [
"id",
"avatar",
"cover_image",
"date_joined",
"display_name",
"email",
"first_name",
"last_name",
"is_active",
"is_bot",
"is_email_verified",
"is_managed",
"is_onboarded",
"is_tour_completed",
"mobile_number",
"role",
"onboarding_step",
"user_timezone",
"username",
"theme",
"last_workspace_id",
]
read_only_fields = fields
class UserMeSettingsSerializer(BaseSerializer):
workspace = serializers.SerializerMethodField()
class Meta:
model = User
fields = [
"id",
"email",
"workspace",
]
read_only_fields = fields
def get_workspace(self, obj):
workspace_invites = WorkspaceMemberInvite.objects.filter(
email=obj.email
).count()
if obj.last_workspace_id is not None:
workspace = Workspace.objects.get(
pk=obj.last_workspace_id, workspace_member__member=obj.id
)
return {
"last_workspace_id": obj.last_workspace_id,
"last_workspace_slug": workspace.slug,
"fallback_workspace_id": obj.last_workspace_id,
"fallback_workspace_slug": workspace.slug,
"invites": workspace_invites,
}
else:
fallback_workspace = (
Workspace.objects.filter(workspace_member__member_id=obj.id)
.order_by("created_at")
.first()
)
return {
"last_workspace_id": None,
"last_workspace_slug": None,
"fallback_workspace_id": fallback_workspace.id
if fallback_workspace is not None
else None,
"fallback_workspace_slug": fallback_workspace.slug
if fallback_workspace is not None
else None,
"invites": workspace_invites,
}
class UserLiteSerializer(BaseSerializer): class UserLiteSerializer(BaseSerializer):
class Meta: class Meta:
model = User model = User
@ -51,7 +126,6 @@ class UserLiteSerializer(BaseSerializer):
class UserAdminLiteSerializer(BaseSerializer): class UserAdminLiteSerializer(BaseSerializer):
class Meta: class Meta:
model = User model = User
fields = [ fields = [

View File

@ -54,6 +54,13 @@ class WorkSpaceMemberSerializer(BaseSerializer):
fields = "__all__" fields = "__all__"
class WorkspaceMemberMeSerializer(BaseSerializer):
class Meta:
model = WorkspaceMember
fields = "__all__"
class WorkspaceMemberAdminSerializer(BaseSerializer): class WorkspaceMemberAdminSerializer(BaseSerializer):
member = UserAdminLiteSerializer(read_only=True) member = UserAdminLiteSerializer(read_only=True)
workspace = WorkspaceLiteSerializer(read_only=True) workspace = WorkspaceLiteSerializer(read_only=True)

View File

@ -0,0 +1,50 @@
from .analytic import urlpatterns as analytic_urls
from .asset import urlpatterns as asset_urls
from .authentication import urlpatterns as authentication_urls
from .configuration import urlpatterns as configuration_urls
from .cycle import urlpatterns as cycle_urls
from .estimate import urlpatterns as estimate_urls
from .gpt import urlpatterns as gpt_urls
from .importer import urlpatterns as importer_urls
from .inbox import urlpatterns as inbox_urls
from .integration import urlpatterns as integration_urls
from .issue import urlpatterns as issue_urls
from .module import urlpatterns as module_urls
from .notification import urlpatterns as notification_urls
from .page import urlpatterns as page_urls
from .project import urlpatterns as project_urls
from .public_board import urlpatterns as public_board_urls
from .release_note import urlpatterns as release_note_urls
from .search import urlpatterns as search_urls
from .state import urlpatterns as state_urls
from .unsplash import urlpatterns as unsplash_urls
from .user import urlpatterns as user_urls
from .views import urlpatterns as view_urls
from .workspace import urlpatterns as workspace_urls
urlpatterns = [
*analytic_urls,
*asset_urls,
*authentication_urls,
*configuration_urls,
*cycle_urls,
*estimate_urls,
*gpt_urls,
*importer_urls,
*inbox_urls,
*integration_urls,
*issue_urls,
*module_urls,
*notification_urls,
*page_urls,
*project_urls,
*public_board_urls,
*release_note_urls,
*search_urls,
*state_urls,
*unsplash_urls,
*user_urls,
*view_urls,
*workspace_urls,
]

View File

@ -0,0 +1,46 @@
from django.urls import path
from plane.api.views import (
AnalyticsEndpoint,
AnalyticViewViewset,
SavedAnalyticEndpoint,
ExportAnalyticsEndpoint,
DefaultAnalyticsEndpoint,
)
urlpatterns = [
path(
"workspaces/<str:slug>/analytics/",
AnalyticsEndpoint.as_view(),
name="plane-analytics",
),
path(
"workspaces/<str:slug>/analytic-view/",
AnalyticViewViewset.as_view({"get": "list", "post": "create"}),
name="analytic-view",
),
path(
"workspaces/<str:slug>/analytic-view/<uuid:pk>/",
AnalyticViewViewset.as_view(
{"get": "retrieve", "patch": "partial_update", "delete": "destroy"}
),
name="analytic-view",
),
path(
"workspaces/<str:slug>/saved-analytic-view/<uuid:analytic_id>/",
SavedAnalyticEndpoint.as_view(),
name="saved-analytic-view",
),
path(
"workspaces/<str:slug>/export-analytics/",
ExportAnalyticsEndpoint.as_view(),
name="export-analytics",
),
path(
"workspaces/<str:slug>/default-analytics/",
DefaultAnalyticsEndpoint.as_view(),
name="default-analytics",
),
]

View File

@ -0,0 +1,31 @@
from django.urls import path
from plane.api.views import (
FileAssetEndpoint,
UserAssetsEndpoint,
)
urlpatterns = [
path(
"workspaces/<str:slug>/file-assets/",
FileAssetEndpoint.as_view(),
name="file-assets",
),
path(
"workspaces/file-assets/<uuid:workspace_id>/<str:asset_key>/",
FileAssetEndpoint.as_view(),
name="file-assets",
),
path(
"users/file-assets/",
UserAssetsEndpoint.as_view(),
name="user-file-assets",
),
path(
"users/file-assets/<str:asset_key>/",
UserAssetsEndpoint.as_view(),
name="user-file-assets",
),
]

View File

@ -0,0 +1,68 @@
from django.urls import path
from rest_framework_simplejwt.views import TokenRefreshView
from plane.api.views import (
# Authentication
SignUpEndpoint,
SignInEndpoint,
SignOutEndpoint,
MagicSignInEndpoint,
MagicSignInGenerateEndpoint,
OauthEndpoint,
## End Authentication
# Auth Extended
ForgotPasswordEndpoint,
VerifyEmailEndpoint,
ResetPasswordEndpoint,
RequestEmailVerificationEndpoint,
ChangePasswordEndpoint,
## End Auth Extender
# API Tokens
ApiTokenEndpoint,
## End API Tokens
)
urlpatterns = [
# Social Auth
path("social-auth/", OauthEndpoint.as_view(), name="oauth"),
# Auth
path("sign-up/", SignUpEndpoint.as_view(), name="sign-up"),
path("sign-in/", SignInEndpoint.as_view(), name="sign-in"),
path("sign-out/", SignOutEndpoint.as_view(), name="sign-out"),
# Magic Sign In/Up
path(
"magic-generate/", MagicSignInGenerateEndpoint.as_view(), name="magic-generate"
),
path("magic-sign-in/", MagicSignInEndpoint.as_view(), name="magic-sign-in"),
path("token/refresh/", TokenRefreshView.as_view(), name="token_refresh"),
# Email verification
path("email-verify/", VerifyEmailEndpoint.as_view(), name="email-verify"),
path(
"request-email-verify/",
RequestEmailVerificationEndpoint.as_view(),
name="request-reset-email",
),
# Password Manipulation
path(
"users/me/change-password/",
ChangePasswordEndpoint.as_view(),
name="change-password",
),
path(
"reset-password/<uidb64>/<token>/",
ResetPasswordEndpoint.as_view(),
name="password-reset",
),
path(
"forgot-password/",
ForgotPasswordEndpoint.as_view(),
name="forgot-password",
),
# API Tokens
path("api-tokens/", ApiTokenEndpoint.as_view(), name="api-tokens"),
path("api-tokens/<uuid:pk>/", ApiTokenEndpoint.as_view(), name="api-tokens"),
## End API Tokens
]

View File

@ -0,0 +1,12 @@
from django.urls import path
from plane.api.views import ConfigurationEndpoint
urlpatterns = [
path(
"configs/",
ConfigurationEndpoint.as_view(),
name="configuration",
),
]

View File

@ -0,0 +1,87 @@
from django.urls import path
from plane.api.views import (
CycleViewSet,
CycleIssueViewSet,
CycleDateCheckEndpoint,
CycleFavoriteViewSet,
TransferCycleIssueEndpoint,
)
urlpatterns = [
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/cycles/",
CycleViewSet.as_view(
{
"get": "list",
"post": "create",
}
),
name="project-cycle",
),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/cycles/<uuid:pk>/",
CycleViewSet.as_view(
{
"get": "retrieve",
"put": "update",
"patch": "partial_update",
"delete": "destroy",
}
),
name="project-cycle",
),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/cycles/<uuid:cycle_id>/cycle-issues/",
CycleIssueViewSet.as_view(
{
"get": "list",
"post": "create",
}
),
name="project-issue-cycle",
),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/cycles/<uuid:cycle_id>/cycle-issues/<uuid:pk>/",
CycleIssueViewSet.as_view(
{
"get": "retrieve",
"put": "update",
"patch": "partial_update",
"delete": "destroy",
}
),
name="project-issue-cycle",
),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/cycles/date-check/",
CycleDateCheckEndpoint.as_view(),
name="project-cycle-date",
),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/user-favorite-cycles/",
CycleFavoriteViewSet.as_view(
{
"get": "list",
"post": "create",
}
),
name="user-favorite-cycle",
),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/user-favorite-cycles/<uuid:cycle_id>/",
CycleFavoriteViewSet.as_view(
{
"delete": "destroy",
}
),
name="user-favorite-cycle",
),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/cycles/<uuid:cycle_id>/transfer-issues/",
TransferCycleIssueEndpoint.as_view(),
name="transfer-issues",
),
]

View File

@ -0,0 +1,37 @@
from django.urls import path
from plane.api.views import (
ProjectEstimatePointEndpoint,
BulkEstimatePointEndpoint,
)
urlpatterns = [
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/project-estimates/",
ProjectEstimatePointEndpoint.as_view(),
name="project-estimate-points",
),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/estimates/",
BulkEstimatePointEndpoint.as_view(
{
"get": "list",
"post": "create",
}
),
name="bulk-create-estimate-points",
),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/estimates/<uuid:estimate_id>/",
BulkEstimatePointEndpoint.as_view(
{
"get": "retrieve",
"patch": "partial_update",
"delete": "destroy",
}
),
name="bulk-create-estimate-points",
),
]

View File

@ -0,0 +1,13 @@
from django.urls import path
from plane.api.views import GPTIntegrationEndpoint
urlpatterns = [
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/ai-assistant/",
GPTIntegrationEndpoint.as_view(),
name="importer",
),
]

View File

@ -0,0 +1,37 @@
from django.urls import path
from plane.api.views import (
ServiceIssueImportSummaryEndpoint,
ImportServiceEndpoint,
UpdateServiceImportStatusEndpoint,
)
urlpatterns = [
path(
"workspaces/<str:slug>/importers/<str:service>/",
ServiceIssueImportSummaryEndpoint.as_view(),
name="importer-summary",
),
path(
"workspaces/<str:slug>/projects/importers/<str:service>/",
ImportServiceEndpoint.as_view(),
name="importer",
),
path(
"workspaces/<str:slug>/importers/",
ImportServiceEndpoint.as_view(),
name="importer",
),
path(
"workspaces/<str:slug>/importers/<str:service>/<uuid:pk>/",
ImportServiceEndpoint.as_view(),
name="importer",
),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/service/<str:service>/importers/<uuid:importer_id>/",
UpdateServiceImportStatusEndpoint.as_view(),
name="importer-status",
),
]

View File

@ -0,0 +1,53 @@
from django.urls import path
from plane.api.views import (
InboxViewSet,
InboxIssueViewSet,
)
urlpatterns = [
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/inboxes/",
InboxViewSet.as_view(
{
"get": "list",
"post": "create",
}
),
name="inbox",
),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/inboxes/<uuid:pk>/",
InboxViewSet.as_view(
{
"get": "retrieve",
"patch": "partial_update",
"delete": "destroy",
}
),
name="inbox",
),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/inboxes/<uuid:inbox_id>/inbox-issues/",
InboxIssueViewSet.as_view(
{
"get": "list",
"post": "create",
}
),
name="inbox-issue",
),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/inboxes/<uuid:inbox_id>/inbox-issues/<uuid:pk>/",
InboxIssueViewSet.as_view(
{
"get": "retrieve",
"patch": "partial_update",
"delete": "destroy",
}
),
name="inbox-issue",
),
]

View File

@ -0,0 +1,150 @@
from django.urls import path
from plane.api.views import (
IntegrationViewSet,
WorkspaceIntegrationViewSet,
GithubRepositoriesEndpoint,
GithubRepositorySyncViewSet,
GithubIssueSyncViewSet,
GithubCommentSyncViewSet,
BulkCreateGithubIssueSyncEndpoint,
SlackProjectSyncViewSet,
)
urlpatterns = [
path(
"integrations/",
IntegrationViewSet.as_view(
{
"get": "list",
"post": "create",
}
),
name="integrations",
),
path(
"integrations/<uuid:pk>/",
IntegrationViewSet.as_view(
{
"get": "retrieve",
"patch": "partial_update",
"delete": "destroy",
}
),
name="integrations",
),
path(
"workspaces/<str:slug>/workspace-integrations/",
WorkspaceIntegrationViewSet.as_view(
{
"get": "list",
}
),
name="workspace-integrations",
),
path(
"workspaces/<str:slug>/workspace-integrations/<str:provider>/",
WorkspaceIntegrationViewSet.as_view(
{
"post": "create",
}
),
name="workspace-integrations",
),
path(
"workspaces/<str:slug>/workspace-integrations/<uuid:pk>/provider/",
WorkspaceIntegrationViewSet.as_view(
{
"get": "retrieve",
"delete": "destroy",
}
),
name="workspace-integrations",
),
# Github Integrations
path(
"workspaces/<str:slug>/workspace-integrations/<uuid:workspace_integration_id>/github-repositories/",
GithubRepositoriesEndpoint.as_view(),
),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/workspace-integrations/<uuid:workspace_integration_id>/github-repository-sync/",
GithubRepositorySyncViewSet.as_view(
{
"get": "list",
"post": "create",
}
),
),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/workspace-integrations/<uuid:workspace_integration_id>/github-repository-sync/<uuid:pk>/",
GithubRepositorySyncViewSet.as_view(
{
"get": "retrieve",
"delete": "destroy",
}
),
),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/github-repository-sync/<uuid:repo_sync_id>/github-issue-sync/",
GithubIssueSyncViewSet.as_view(
{
"post": "create",
"get": "list",
}
),
),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/github-repository-sync/<uuid:repo_sync_id>/bulk-create-github-issue-sync/",
BulkCreateGithubIssueSyncEndpoint.as_view(),
),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/github-repository-sync/<uuid:repo_sync_id>/github-issue-sync/<uuid:pk>/",
GithubIssueSyncViewSet.as_view(
{
"get": "retrieve",
"delete": "destroy",
}
),
),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/github-repository-sync/<uuid:repo_sync_id>/github-issue-sync/<uuid:issue_sync_id>/github-comment-sync/",
GithubCommentSyncViewSet.as_view(
{
"post": "create",
"get": "list",
}
),
),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/github-repository-sync/<uuid:repo_sync_id>/github-issue-sync/<uuid:issue_sync_id>/github-comment-sync/<uuid:pk>/",
GithubCommentSyncViewSet.as_view(
{
"get": "retrieve",
"delete": "destroy",
}
),
),
## End Github Integrations
# Slack Integration
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/workspace-integrations/<uuid:workspace_integration_id>/project-slack-sync/",
SlackProjectSyncViewSet.as_view(
{
"post": "create",
"get": "list",
}
),
),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/workspace-integrations/<uuid:workspace_integration_id>/project-slack-sync/<uuid:pk>/",
SlackProjectSyncViewSet.as_view(
{
"delete": "destroy",
"get": "retrieve",
}
),
),
## End Slack Integration
]

View File

@ -0,0 +1,332 @@
from django.urls import path
from plane.api.views import (
IssueViewSet,
LabelViewSet,
BulkCreateIssueLabelsEndpoint,
BulkDeleteIssuesEndpoint,
BulkImportIssuesEndpoint,
UserWorkSpaceIssues,
SubIssuesEndpoint,
IssueLinkViewSet,
IssueAttachmentEndpoint,
ExportIssuesEndpoint,
IssueActivityEndpoint,
IssueCommentViewSet,
IssueSubscriberViewSet,
IssueReactionViewSet,
CommentReactionViewSet,
IssuePropertyViewSet,
IssueArchiveViewSet,
IssueRelationViewSet,
IssueDraftViewSet,
)
urlpatterns = [
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/issues/",
IssueViewSet.as_view(
{
"get": "list",
"post": "create",
}
),
name="project-issue",
),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/issues/<uuid:pk>/",
IssueViewSet.as_view(
{
"get": "retrieve",
"put": "update",
"patch": "partial_update",
"delete": "destroy",
}
),
name="project-issue",
),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/issue-labels/",
LabelViewSet.as_view(
{
"get": "list",
"post": "create",
}
),
name="project-issue-labels",
),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/issue-labels/<uuid:pk>/",
LabelViewSet.as_view(
{
"get": "retrieve",
"put": "update",
"patch": "partial_update",
"delete": "destroy",
}
),
name="project-issue-labels",
),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/bulk-create-labels/",
BulkCreateIssueLabelsEndpoint.as_view(),
name="project-bulk-labels",
),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/bulk-delete-issues/",
BulkDeleteIssuesEndpoint.as_view(),
name="project-issues-bulk",
),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/bulk-import-issues/<str:service>/",
BulkImportIssuesEndpoint.as_view(),
name="project-issues-bulk",
),
path(
"workspaces/<str:slug>/my-issues/",
UserWorkSpaceIssues.as_view(),
name="workspace-issues",
),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/issues/<uuid:issue_id>/sub-issues/",
SubIssuesEndpoint.as_view(),
name="sub-issues",
),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/issues/<uuid:issue_id>/issue-links/",
IssueLinkViewSet.as_view(
{
"get": "list",
"post": "create",
}
),
name="project-issue-links",
),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/issues/<uuid:issue_id>/issue-links/<uuid:pk>/",
IssueLinkViewSet.as_view(
{
"get": "retrieve",
"put": "update",
"patch": "partial_update",
"delete": "destroy",
}
),
name="project-issue-links",
),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/issues/<uuid:issue_id>/issue-attachments/",
IssueAttachmentEndpoint.as_view(),
name="project-issue-attachments",
),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/issues/<uuid:issue_id>/issue-attachments/<uuid:pk>/",
IssueAttachmentEndpoint.as_view(),
name="project-issue-attachments",
),
path(
"workspaces/<str:slug>/export-issues/",
ExportIssuesEndpoint.as_view(),
name="export-issues",
),
## End Issues
## Issue Activity
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/issues/<uuid:issue_id>/history/",
IssueActivityEndpoint.as_view(),
name="project-issue-history",
),
## Issue Activity
## IssueComments
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/issues/<uuid:issue_id>/comments/",
IssueCommentViewSet.as_view(
{
"get": "list",
"post": "create",
}
),
name="project-issue-comment",
),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/issues/<uuid:issue_id>/comments/<uuid:pk>/",
IssueCommentViewSet.as_view(
{
"get": "retrieve",
"put": "update",
"patch": "partial_update",
"delete": "destroy",
}
),
name="project-issue-comment",
),
## End IssueComments
# Issue Subscribers
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/issues/<uuid:issue_id>/issue-subscribers/",
IssueSubscriberViewSet.as_view(
{
"get": "list",
"post": "create",
}
),
name="project-issue-subscribers",
),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/issues/<uuid:issue_id>/issue-subscribers/<uuid:subscriber_id>/",
IssueSubscriberViewSet.as_view({"delete": "destroy"}),
name="project-issue-subscribers",
),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/issues/<uuid:issue_id>/subscribe/",
IssueSubscriberViewSet.as_view(
{
"get": "subscription_status",
"post": "subscribe",
"delete": "unsubscribe",
}
),
name="project-issue-subscribers",
),
## End Issue Subscribers
# Issue Reactions
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/issues/<uuid:issue_id>/reactions/",
IssueReactionViewSet.as_view(
{
"get": "list",
"post": "create",
}
),
name="project-issue-reactions",
),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/issues/<uuid:issue_id>/reactions/<str:reaction_code>/",
IssueReactionViewSet.as_view(
{
"delete": "destroy",
}
),
name="project-issue-reactions",
),
## End Issue Reactions
# Comment Reactions
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/comments/<uuid:comment_id>/reactions/",
CommentReactionViewSet.as_view(
{
"get": "list",
"post": "create",
}
),
name="project-issue-comment-reactions",
),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/comments/<uuid:comment_id>/reactions/<str:reaction_code>/",
CommentReactionViewSet.as_view(
{
"delete": "destroy",
}
),
name="project-issue-comment-reactions",
),
## End Comment Reactions
## IssueProperty
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/issue-properties/",
IssuePropertyViewSet.as_view(
{
"get": "list",
"post": "create",
}
),
name="project-issue-roadmap",
),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/issue-properties/<uuid:pk>/",
IssuePropertyViewSet.as_view(
{
"get": "retrieve",
"put": "update",
"patch": "partial_update",
"delete": "destroy",
}
),
name="project-issue-roadmap",
),
## IssueProperty Ebd
## Issue Archives
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/archived-issues/",
IssueArchiveViewSet.as_view(
{
"get": "list",
}
),
name="project-issue-archive",
),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/archived-issues/<uuid:pk>/",
IssueArchiveViewSet.as_view(
{
"get": "retrieve",
"delete": "destroy",
}
),
name="project-issue-archive",
),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/unarchive/<uuid:pk>/",
IssueArchiveViewSet.as_view(
{
"post": "unarchive",
}
),
name="project-issue-archive",
),
## End Issue Archives
## Issue Relation
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/issues/<uuid:issue_id>/issue-relation/",
IssueRelationViewSet.as_view(
{
"post": "create",
}
),
name="issue-relation",
),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/issues/<uuid:issue_id>/issue-relation/<uuid:pk>/",
IssueRelationViewSet.as_view(
{
"delete": "destroy",
}
),
name="issue-relation",
),
## End Issue Relation
## Issue Drafts
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/issue-drafts/",
IssueDraftViewSet.as_view(
{
"get": "list",
"post": "create",
}
),
name="project-issue-draft",
),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/issue-drafts/<uuid:pk>/",
IssueDraftViewSet.as_view(
{
"get": "retrieve",
"patch": "partial_update",
"delete": "destroy",
}
),
name="project-issue-draft",
),
]

View File

@ -0,0 +1,104 @@
from django.urls import path
from plane.api.views import (
ModuleViewSet,
ModuleIssueViewSet,
ModuleLinkViewSet,
ModuleFavoriteViewSet,
BulkImportModulesEndpoint,
)
urlpatterns = [
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/modules/",
ModuleViewSet.as_view(
{
"get": "list",
"post": "create",
}
),
name="project-modules",
),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/modules/<uuid:pk>/",
ModuleViewSet.as_view(
{
"get": "retrieve",
"put": "update",
"patch": "partial_update",
"delete": "destroy",
}
),
name="project-modules",
),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/modules/<uuid:module_id>/module-issues/",
ModuleIssueViewSet.as_view(
{
"get": "list",
"post": "create",
}
),
name="project-module-issues",
),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/modules/<uuid:module_id>/module-issues/<uuid:pk>/",
ModuleIssueViewSet.as_view(
{
"get": "retrieve",
"put": "update",
"patch": "partial_update",
"delete": "destroy",
}
),
name="project-module-issues",
),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/modules/<uuid:module_id>/module-links/",
ModuleLinkViewSet.as_view(
{
"get": "list",
"post": "create",
}
),
name="project-issue-module-links",
),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/modules/<uuid:module_id>/module-links/<uuid:pk>/",
ModuleLinkViewSet.as_view(
{
"get": "retrieve",
"put": "update",
"patch": "partial_update",
"delete": "destroy",
}
),
name="project-issue-module-links",
),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/user-favorite-modules/",
ModuleFavoriteViewSet.as_view(
{
"get": "list",
"post": "create",
}
),
name="user-favorite-module",
),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/user-favorite-modules/<uuid:module_id>/",
ModuleFavoriteViewSet.as_view(
{
"delete": "destroy",
}
),
name="user-favorite-module",
),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/bulk-import-modules/<str:service>/",
BulkImportModulesEndpoint.as_view(),
name="bulk-modules-create",
),
]

View File

@ -0,0 +1,66 @@
from django.urls import path
from plane.api.views import (
NotificationViewSet,
UnreadNotificationEndpoint,
MarkAllReadNotificationViewSet,
)
urlpatterns = [
path(
"workspaces/<str:slug>/users/notifications/",
NotificationViewSet.as_view(
{
"get": "list",
}
),
name="notifications",
),
path(
"workspaces/<str:slug>/users/notifications/<uuid:pk>/",
NotificationViewSet.as_view(
{
"get": "retrieve",
"patch": "partial_update",
"delete": "destroy",
}
),
name="notifications",
),
path(
"workspaces/<str:slug>/users/notifications/<uuid:pk>/read/",
NotificationViewSet.as_view(
{
"post": "mark_read",
"delete": "mark_unread",
}
),
name="notifications",
),
path(
"workspaces/<str:slug>/users/notifications/<uuid:pk>/archive/",
NotificationViewSet.as_view(
{
"post": "archive",
"delete": "unarchive",
}
),
name="notifications",
),
path(
"workspaces/<str:slug>/users/notifications/unread/",
UnreadNotificationEndpoint.as_view(),
name="unread-notifications",
),
path(
"workspaces/<str:slug>/users/notifications/mark-all-read/",
MarkAllReadNotificationViewSet.as_view(
{
"post": "create",
}
),
name="mark-all-read-notifications",
),
]

View File

@ -0,0 +1,79 @@
from django.urls import path
from plane.api.views import (
PageViewSet,
PageBlockViewSet,
PageFavoriteViewSet,
CreateIssueFromPageBlockEndpoint,
)
urlpatterns = [
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/pages/",
PageViewSet.as_view(
{
"get": "list",
"post": "create",
}
),
name="project-pages",
),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/pages/<uuid:pk>/",
PageViewSet.as_view(
{
"get": "retrieve",
"patch": "partial_update",
"delete": "destroy",
}
),
name="project-pages",
),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/pages/<uuid:page_id>/page-blocks/",
PageBlockViewSet.as_view(
{
"get": "list",
"post": "create",
}
),
name="project-page-blocks",
),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/pages/<uuid:page_id>/page-blocks/<uuid:pk>/",
PageBlockViewSet.as_view(
{
"get": "retrieve",
"patch": "partial_update",
"delete": "destroy",
}
),
name="project-page-blocks",
),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/user-favorite-pages/",
PageFavoriteViewSet.as_view(
{
"get": "list",
"post": "create",
}
),
name="user-favorite-pages",
),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/user-favorite-pages/<uuid:page_id>/",
PageFavoriteViewSet.as_view(
{
"delete": "destroy",
}
),
name="user-favorite-pages",
),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/pages/<uuid:page_id>/page-blocks/<uuid:page_block_id>/issues/",
CreateIssueFromPageBlockEndpoint.as_view(),
name="page-block-issues",
),
]

View File

@ -0,0 +1,144 @@
from django.urls import path
from plane.api.views import (
ProjectViewSet,
InviteProjectEndpoint,
ProjectMemberViewSet,
ProjectMemberEndpoint,
ProjectMemberInvitationsViewset,
ProjectMemberUserEndpoint,
AddMemberToProjectEndpoint,
ProjectJoinEndpoint,
AddTeamToProjectEndpoint,
ProjectUserViewsEndpoint,
ProjectIdentifierEndpoint,
ProjectFavoritesViewSet,
LeaveProjectEndpoint,
ProjectPublicCoverImagesEndpoint
)
urlpatterns = [
path(
"workspaces/<str:slug>/projects/",
ProjectViewSet.as_view(
{
"get": "list",
"post": "create",
}
),
name="project",
),
path(
"workspaces/<str:slug>/projects/<uuid:pk>/",
ProjectViewSet.as_view(
{
"get": "retrieve",
"put": "update",
"patch": "partial_update",
"delete": "destroy",
}
),
name="project",
),
path(
"workspaces/<str:slug>/project-identifiers/",
ProjectIdentifierEndpoint.as_view(),
name="project-identifiers",
),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/invite/",
InviteProjectEndpoint.as_view(),
name="invite-project",
),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/members/",
ProjectMemberViewSet.as_view({"get": "list"}),
name="project-member",
),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/members/<uuid:pk>/",
ProjectMemberViewSet.as_view(
{
"get": "retrieve",
"patch": "partial_update",
"delete": "destroy",
}
),
name="project-member",
),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/project-members/",
ProjectMemberEndpoint.as_view(),
name="project-member",
),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/members/add/",
AddMemberToProjectEndpoint.as_view(),
name="project",
),
path(
"workspaces/<str:slug>/projects/join/",
ProjectJoinEndpoint.as_view(),
name="project-join",
),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/team-invite/",
AddTeamToProjectEndpoint.as_view(),
name="projects",
),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/invitations/",
ProjectMemberInvitationsViewset.as_view({"get": "list"}),
name="project-member-invite",
),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/invitations/<uuid:pk>/",
ProjectMemberInvitationsViewset.as_view(
{
"get": "retrieve",
"delete": "destroy",
}
),
name="project-member-invite",
),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/project-views/",
ProjectUserViewsEndpoint.as_view(),
name="project-view",
),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/project-members/me/",
ProjectMemberUserEndpoint.as_view(),
name="project-member-view",
),
path(
"workspaces/<str:slug>/user-favorite-projects/",
ProjectFavoritesViewSet.as_view(
{
"get": "list",
"post": "create",
}
),
name="project-favorite",
),
path(
"workspaces/<str:slug>/user-favorite-projects/<uuid:project_id>/",
ProjectFavoritesViewSet.as_view(
{
"delete": "destroy",
}
),
name="project-favorite",
),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/members/leave/",
LeaveProjectEndpoint.as_view(),
name="leave-project",
),
path(
"project-covers/",
ProjectPublicCoverImagesEndpoint.as_view(),
name="project-covers",
),
]

View File

@ -0,0 +1,151 @@
from django.urls import path
from plane.api.views import (
ProjectDeployBoardViewSet,
ProjectDeployBoardPublicSettingsEndpoint,
ProjectIssuesPublicEndpoint,
IssueRetrievePublicEndpoint,
IssueCommentPublicViewSet,
IssueReactionPublicViewSet,
CommentReactionPublicViewSet,
InboxIssuePublicViewSet,
IssueVotePublicViewSet,
WorkspaceProjectDeployBoardEndpoint,
)
urlpatterns = [
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/project-deploy-boards/",
ProjectDeployBoardViewSet.as_view(
{
"get": "list",
"post": "create",
}
),
name="project-deploy-board",
),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/project-deploy-boards/<uuid:pk>/",
ProjectDeployBoardViewSet.as_view(
{
"get": "retrieve",
"patch": "partial_update",
"delete": "destroy",
}
),
name="project-deploy-board",
),
path(
"public/workspaces/<str:slug>/project-boards/<uuid:project_id>/settings/",
ProjectDeployBoardPublicSettingsEndpoint.as_view(),
name="project-deploy-board-settings",
),
path(
"public/workspaces/<str:slug>/project-boards/<uuid:project_id>/issues/",
ProjectIssuesPublicEndpoint.as_view(),
name="project-deploy-board",
),
path(
"public/workspaces/<str:slug>/project-boards/<uuid:project_id>/issues/<uuid:issue_id>/",
IssueRetrievePublicEndpoint.as_view(),
name="workspace-project-boards",
),
path(
"public/workspaces/<str:slug>/project-boards/<uuid:project_id>/issues/<uuid:issue_id>/comments/",
IssueCommentPublicViewSet.as_view(
{
"get": "list",
"post": "create",
}
),
name="issue-comments-project-board",
),
path(
"public/workspaces/<str:slug>/project-boards/<uuid:project_id>/issues/<uuid:issue_id>/comments/<uuid:pk>/",
IssueCommentPublicViewSet.as_view(
{
"get": "retrieve",
"patch": "partial_update",
"delete": "destroy",
}
),
name="issue-comments-project-board",
),
path(
"public/workspaces/<str:slug>/project-boards/<uuid:project_id>/issues/<uuid:issue_id>/reactions/",
IssueReactionPublicViewSet.as_view(
{
"get": "list",
"post": "create",
}
),
name="issue-reactions-project-board",
),
path(
"public/workspaces/<str:slug>/project-boards/<uuid:project_id>/issues/<uuid:issue_id>/reactions/<str:reaction_code>/",
IssueReactionPublicViewSet.as_view(
{
"delete": "destroy",
}
),
name="issue-reactions-project-board",
),
path(
"public/workspaces/<str:slug>/project-boards/<uuid:project_id>/comments/<uuid:comment_id>/reactions/",
CommentReactionPublicViewSet.as_view(
{
"get": "list",
"post": "create",
}
),
name="comment-reactions-project-board",
),
path(
"public/workspaces/<str:slug>/project-boards/<uuid:project_id>/comments/<uuid:comment_id>/reactions/<str:reaction_code>/",
CommentReactionPublicViewSet.as_view(
{
"delete": "destroy",
}
),
name="comment-reactions-project-board",
),
path(
"public/workspaces/<str:slug>/project-boards/<uuid:project_id>/inboxes/<uuid:inbox_id>/inbox-issues/",
InboxIssuePublicViewSet.as_view(
{
"get": "list",
"post": "create",
}
),
name="inbox-issue",
),
path(
"public/workspaces/<str:slug>/project-boards/<uuid:project_id>/inboxes/<uuid:inbox_id>/inbox-issues/<uuid:pk>/",
InboxIssuePublicViewSet.as_view(
{
"get": "retrieve",
"patch": "partial_update",
"delete": "destroy",
}
),
name="inbox-issue",
),
path(
"public/workspaces/<str:slug>/project-boards/<uuid:project_id>/issues/<uuid:issue_id>/votes/",
IssueVotePublicViewSet.as_view(
{
"get": "list",
"post": "create",
"delete": "destroy",
}
),
name="issue-vote-project-board",
),
path(
"public/workspaces/<str:slug>/project-boards/",
WorkspaceProjectDeployBoardEndpoint.as_view(),
name="workspace-project-boards",
),
]

View File

@ -0,0 +1,13 @@
from django.urls import path
from plane.api.views import ReleaseNotesEndpoint
urlpatterns = [
path(
"release-notes/",
ReleaseNotesEndpoint.as_view(),
name="release-notes",
),
]

View File

@ -0,0 +1,21 @@
from django.urls import path
from plane.api.views import (
GlobalSearchEndpoint,
IssueSearchEndpoint,
)
urlpatterns = [
path(
"workspaces/<str:slug>/search/",
GlobalSearchEndpoint.as_view(),
name="global-search",
),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/search-issues/",
IssueSearchEndpoint.as_view(),
name="project-issue-search",
),
]

View File

@ -0,0 +1,30 @@
from django.urls import path
from plane.api.views import StateViewSet
urlpatterns = [
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/states/",
StateViewSet.as_view(
{
"get": "list",
"post": "create",
}
),
name="project-states",
),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/states/<uuid:pk>/",
StateViewSet.as_view(
{
"get": "retrieve",
"put": "update",
"patch": "partial_update",
"delete": "destroy",
}
),
name="project-state",
),
]

View File

@ -0,0 +1,13 @@
from django.urls import path
from plane.api.views import UnsplashEndpoint
urlpatterns = [
path(
"unsplash/",
UnsplashEndpoint.as_view(),
name="unsplash",
),
]

View File

@ -0,0 +1,113 @@
from django.urls import path
from plane.api.views import (
## User
UserEndpoint,
UpdateUserOnBoardedEndpoint,
UpdateUserTourCompletedEndpoint,
UserActivityEndpoint,
ChangePasswordEndpoint,
## End User
## Workspaces
UserWorkspaceInvitationsEndpoint,
UserWorkSpacesEndpoint,
JoinWorkspaceEndpoint,
UserWorkspaceInvitationsEndpoint,
UserWorkspaceInvitationEndpoint,
UserActivityGraphEndpoint,
UserIssueCompletedGraphEndpoint,
UserWorkspaceDashboardEndpoint,
UserProjectInvitationsViewset,
## End Workspaces
)
urlpatterns = [
# User Profile
path(
"users/me/",
UserEndpoint.as_view(
{"get": "retrieve", "patch": "partial_update", "delete": "destroy"}
),
name="users",
),
path(
"users/me/settings/",
UserEndpoint.as_view(
{
"get": "retrieve_user_settings",
}
),
name="users",
),
path(
"users/me/change-password/",
ChangePasswordEndpoint.as_view(),
name="change-password",
),
path(
"users/me/onboard/",
UpdateUserOnBoardedEndpoint.as_view(),
name="user-onboard",
),
path(
"users/me/tour-completed/",
UpdateUserTourCompletedEndpoint.as_view(),
name="user-tour",
),
path(
"users/workspaces/<str:slug>/activities/",
UserActivityEndpoint.as_view(),
name="user-activities",
),
# user workspaces
path(
"users/me/workspaces/",
UserWorkSpacesEndpoint.as_view(),
name="user-workspace",
),
# user workspace invitations
path(
"users/me/invitations/workspaces/",
UserWorkspaceInvitationsEndpoint.as_view({"get": "list", "post": "create"}),
name="user-workspace-invitations",
),
# user workspace invitation
path(
"users/me/invitations/<uuid:pk>/",
UserWorkspaceInvitationEndpoint.as_view(
{
"get": "retrieve",
}
),
name="user-workspace-invitation",
),
# user join workspace
# User Graphs
path(
"users/me/workspaces/<str:slug>/activity-graph/",
UserActivityGraphEndpoint.as_view(),
name="user-activity-graph",
),
path(
"users/me/workspaces/<str:slug>/issues-completed-graph/",
UserIssueCompletedGraphEndpoint.as_view(),
name="completed-graph",
),
path(
"users/me/workspaces/<str:slug>/dashboard/",
UserWorkspaceDashboardEndpoint.as_view(),
name="user-workspace-dashboard",
),
## End User Graph
path(
"users/me/invitations/workspaces/<str:slug>/<uuid:pk>/join/",
JoinWorkspaceEndpoint.as_view(),
name="user-join-workspace",
),
# user project invitations
path(
"users/me/invitations/projects/",
UserProjectInvitationsViewset.as_view({"get": "list", "post": "create"}),
name="user-project-invitations",
),
]

View File

@ -0,0 +1,85 @@
from django.urls import path
from plane.api.views import (
IssueViewViewSet,
GlobalViewViewSet,
GlobalViewIssuesViewSet,
IssueViewFavoriteViewSet,
)
urlpatterns = [
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/views/",
IssueViewViewSet.as_view(
{
"get": "list",
"post": "create",
}
),
name="project-view",
),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/views/<uuid:pk>/",
IssueViewViewSet.as_view(
{
"get": "retrieve",
"put": "update",
"patch": "partial_update",
"delete": "destroy",
}
),
name="project-view",
),
path(
"workspaces/<str:slug>/views/",
GlobalViewViewSet.as_view(
{
"get": "list",
"post": "create",
}
),
name="global-view",
),
path(
"workspaces/<str:slug>/views/<uuid:pk>/",
GlobalViewViewSet.as_view(
{
"get": "retrieve",
"put": "update",
"patch": "partial_update",
"delete": "destroy",
}
),
name="global-view",
),
path(
"workspaces/<str:slug>/issues/",
GlobalViewIssuesViewSet.as_view(
{
"get": "list",
}
),
name="global-view-issues",
),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/user-favorite-views/",
IssueViewFavoriteViewSet.as_view(
{
"get": "list",
"post": "create",
}
),
name="user-favorite-view",
),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/user-favorite-views/<uuid:view_id>/",
IssueViewFavoriteViewSet.as_view(
{
"delete": "destroy",
}
),
name="user-favorite-view",
),
]

View File

@ -0,0 +1,182 @@
from django.urls import path
from plane.api.views import (
WorkSpaceViewSet,
InviteWorkspaceEndpoint,
WorkSpaceMemberViewSet,
WorkspaceMembersEndpoint,
WorkspaceInvitationsViewset,
WorkspaceMemberUserEndpoint,
WorkspaceMemberUserViewsEndpoint,
WorkSpaceAvailabilityCheckEndpoint,
TeamMemberViewSet,
UserLastProjectWithWorkspaceEndpoint,
WorkspaceThemeViewSet,
WorkspaceUserProfileStatsEndpoint,
WorkspaceUserActivityEndpoint,
WorkspaceUserProfileEndpoint,
WorkspaceUserProfileIssuesEndpoint,
WorkspaceLabelsEndpoint,
LeaveWorkspaceEndpoint,
)
urlpatterns = [
path(
"workspace-slug-check/",
WorkSpaceAvailabilityCheckEndpoint.as_view(),
name="workspace-availability",
),
path(
"workspaces/",
WorkSpaceViewSet.as_view(
{
"get": "list",
"post": "create",
}
),
name="workspace",
),
path(
"workspaces/<str:slug>/",
WorkSpaceViewSet.as_view(
{
"get": "retrieve",
"put": "update",
"patch": "partial_update",
"delete": "destroy",
}
),
name="workspace",
),
path(
"workspaces/<str:slug>/invite/",
InviteWorkspaceEndpoint.as_view(),
name="invite-workspace",
),
path(
"workspaces/<str:slug>/invitations/",
WorkspaceInvitationsViewset.as_view({"get": "list"}),
name="workspace-invitations",
),
path(
"workspaces/<str:slug>/invitations/<uuid:pk>/",
WorkspaceInvitationsViewset.as_view(
{
"delete": "destroy",
"get": "retrieve",
}
),
name="workspace-invitations",
),
path(
"workspaces/<str:slug>/members/",
WorkSpaceMemberViewSet.as_view({"get": "list"}),
name="workspace-member",
),
path(
"workspaces/<str:slug>/members/<uuid:pk>/",
WorkSpaceMemberViewSet.as_view(
{
"patch": "partial_update",
"delete": "destroy",
"get": "retrieve",
}
),
name="workspace-member",
),
path(
"workspaces/<str:slug>/workspace-members/",
WorkspaceMembersEndpoint.as_view(),
name="workspace-members",
),
path(
"workspaces/<str:slug>/teams/",
TeamMemberViewSet.as_view(
{
"get": "list",
"post": "create",
}
),
name="workspace-team-members",
),
path(
"workspaces/<str:slug>/teams/<uuid:pk>/",
TeamMemberViewSet.as_view(
{
"put": "update",
"patch": "partial_update",
"delete": "destroy",
"get": "retrieve",
}
),
name="workspace-team-members",
),
path(
"users/last-visited-workspace/",
UserLastProjectWithWorkspaceEndpoint.as_view(),
name="workspace-project-details",
),
path(
"workspaces/<str:slug>/workspace-members/me/",
WorkspaceMemberUserEndpoint.as_view(),
name="workspace-member-details",
),
path(
"workspaces/<str:slug>/workspace-views/",
WorkspaceMemberUserViewsEndpoint.as_view(),
name="workspace-member-views-details",
),
path(
"workspaces/<str:slug>/workspace-themes/",
WorkspaceThemeViewSet.as_view(
{
"get": "list",
"post": "create",
}
),
name="workspace-themes",
),
path(
"workspaces/<str:slug>/workspace-themes/<uuid:pk>/",
WorkspaceThemeViewSet.as_view(
{
"get": "retrieve",
"patch": "partial_update",
"delete": "destroy",
}
),
name="workspace-themes",
),
path(
"workspaces/<str:slug>/user-stats/<uuid:user_id>/",
WorkspaceUserProfileStatsEndpoint.as_view(),
name="workspace-user-stats",
),
path(
"workspaces/<str:slug>/user-activity/<uuid:user_id>/",
WorkspaceUserActivityEndpoint.as_view(),
name="workspace-user-activity",
),
path(
"workspaces/<str:slug>/user-profile/<uuid:user_id>/",
WorkspaceUserProfileEndpoint.as_view(),
name="workspace-user-profile-page",
),
path(
"workspaces/<str:slug>/user-issues/<uuid:user_id>/",
WorkspaceUserProfileIssuesEndpoint.as_view(),
name="workspace-user-profile-issues",
),
path(
"workspaces/<str:slug>/labels/",
WorkspaceLabelsEndpoint.as_view(),
name="workspace-labels",
),
path(
"workspaces/<str:slug>/members/leave/",
LeaveWorkspaceEndpoint.as_view(),
name="leave-workspace-members",
),
]

View File

@ -1,5 +1,6 @@
from django.urls import path from django.urls import path
from rest_framework_simplejwt.views import TokenRefreshView
# Create your urls here. # Create your urls here.
@ -70,6 +71,7 @@ from plane.api.views import (
ProjectIdentifierEndpoint, ProjectIdentifierEndpoint,
ProjectFavoritesViewSet, ProjectFavoritesViewSet,
LeaveProjectEndpoint, LeaveProjectEndpoint,
ProjectPublicCoverImagesEndpoint,
## End Projects ## End Projects
# Issues # Issues
IssueViewSet, IssueViewSet,
@ -105,7 +107,6 @@ from plane.api.views import (
GlobalViewViewSet, GlobalViewViewSet,
GlobalViewIssuesViewSet, GlobalViewIssuesViewSet,
IssueViewViewSet, IssueViewViewSet,
ViewIssuesEndpoint,
IssueViewFavoriteViewSet, IssueViewFavoriteViewSet,
## End Views ## End Views
# Cycles # Cycles
@ -150,12 +151,11 @@ from plane.api.views import (
GlobalSearchEndpoint, GlobalSearchEndpoint,
IssueSearchEndpoint, IssueSearchEndpoint,
## End Search ## End Search
# Gpt # External
GPTIntegrationEndpoint, GPTIntegrationEndpoint,
## End Gpt
# Release Notes
ReleaseNotesEndpoint, ReleaseNotesEndpoint,
## End Release Notes UnsplashEndpoint,
## End External
# Inbox # Inbox
InboxViewSet, InboxViewSet,
InboxIssueViewSet, InboxIssueViewSet,
@ -186,9 +186,15 @@ from plane.api.views import (
## Exporter ## Exporter
ExportIssuesEndpoint, ExportIssuesEndpoint,
## End Exporter ## End Exporter
# Configuration
ConfigurationEndpoint,
## End Configuration
) )
#TODO: Delete this file
# This url file has been deprecated use apiserver/plane/urls folder to create new urls
urlpatterns = [ urlpatterns = [
# Social Auth # Social Auth
path("social-auth/", OauthEndpoint.as_view(), name="oauth"), path("social-auth/", OauthEndpoint.as_view(), name="oauth"),
@ -201,6 +207,7 @@ urlpatterns = [
"magic-generate/", MagicSignInGenerateEndpoint.as_view(), name="magic-generate" "magic-generate/", MagicSignInGenerateEndpoint.as_view(), name="magic-generate"
), ),
path("magic-sign-in/", MagicSignInEndpoint.as_view(), name="magic-sign-in"), path("magic-sign-in/", MagicSignInEndpoint.as_view(), name="magic-sign-in"),
path('token/refresh/', TokenRefreshView.as_view(), name='token_refresh'),
# Email verification # Email verification
path("email-verify/", VerifyEmailEndpoint.as_view(), name="email-verify"), path("email-verify/", VerifyEmailEndpoint.as_view(), name="email-verify"),
path( path(
@ -227,6 +234,15 @@ urlpatterns = [
), ),
name="users", name="users",
), ),
path(
"users/me/settings/",
UserEndpoint.as_view(
{
"get": "retrieve_user_settings",
}
),
name="users",
),
path( path(
"users/me/change-password/", "users/me/change-password/",
ChangePasswordEndpoint.as_view(), ChangePasswordEndpoint.as_view(),
@ -554,6 +570,7 @@ urlpatterns = [
"workspaces/<str:slug>/user-favorite-projects/", "workspaces/<str:slug>/user-favorite-projects/",
ProjectFavoritesViewSet.as_view( ProjectFavoritesViewSet.as_view(
{ {
"get": "list",
"post": "create", "post": "create",
} }
), ),
@ -573,6 +590,11 @@ urlpatterns = [
LeaveProjectEndpoint.as_view(), LeaveProjectEndpoint.as_view(),
name="project", name="project",
), ),
path(
"project-covers/",
ProjectPublicCoverImagesEndpoint.as_view(),
name="project-covers",
),
# End Projects # End Projects
# States # States
path( path(
@ -649,11 +671,6 @@ urlpatterns = [
), ),
name="project-view", name="project-view",
), ),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/views/<uuid:view_id>/issues/",
ViewIssuesEndpoint.as_view(),
name="project-view-issues",
),
path( path(
"workspaces/<str:slug>/views/", "workspaces/<str:slug>/views/",
GlobalViewViewSet.as_view( GlobalViewViewSet.as_view(
@ -1446,20 +1463,23 @@ urlpatterns = [
name="project-issue-search", name="project-issue-search",
), ),
## End Search ## End Search
# Gpt # External
path( path(
"workspaces/<str:slug>/projects/<uuid:project_id>/ai-assistant/", "workspaces/<str:slug>/projects/<uuid:project_id>/ai-assistant/",
GPTIntegrationEndpoint.as_view(), GPTIntegrationEndpoint.as_view(),
name="importer", name="importer",
), ),
## End Gpt
# Release Notes
path( path(
"release-notes/", "release-notes/",
ReleaseNotesEndpoint.as_view(), ReleaseNotesEndpoint.as_view(),
name="release-notes", name="release-notes",
), ),
## End Release Notes path(
"unsplash/",
UnsplashEndpoint.as_view(),
name="release-notes",
),
## End External
# Inbox # Inbox
path( path(
"workspaces/<str:slug>/projects/<uuid:project_id>/inboxes/", "workspaces/<str:slug>/projects/<uuid:project_id>/inboxes/",
@ -1728,4 +1748,11 @@ urlpatterns = [
name="workspace-project-boards", name="workspace-project-boards",
), ),
## End Public Boards ## End Public Boards
# Configuration
path(
"configs/",
ConfigurationEndpoint.as_view(),
name="configuration",
),
## End Configuration
] ]

View File

@ -17,6 +17,7 @@ from .project import (
ProjectMemberEndpoint, ProjectMemberEndpoint,
WorkspaceProjectDeployBoardEndpoint, WorkspaceProjectDeployBoardEndpoint,
LeaveProjectEndpoint, LeaveProjectEndpoint,
ProjectPublicCoverImagesEndpoint,
) )
from .user import ( from .user import (
UserEndpoint, UserEndpoint,
@ -56,7 +57,7 @@ from .workspace import (
LeaveWorkspaceEndpoint, LeaveWorkspaceEndpoint,
) )
from .state import StateViewSet from .state import StateViewSet
from .view import GlobalViewViewSet, GlobalViewIssuesViewSet, IssueViewViewSet, ViewIssuesEndpoint, IssueViewFavoriteViewSet from .view import GlobalViewViewSet, GlobalViewIssuesViewSet, IssueViewViewSet, IssueViewFavoriteViewSet
from .cycle import ( from .cycle import (
CycleViewSet, CycleViewSet,
CycleIssueViewSet, CycleIssueViewSet,
@ -147,16 +148,13 @@ from .page import (
from .search import GlobalSearchEndpoint, IssueSearchEndpoint from .search import GlobalSearchEndpoint, IssueSearchEndpoint
from .gpt import GPTIntegrationEndpoint from .external import GPTIntegrationEndpoint, ReleaseNotesEndpoint, UnsplashEndpoint
from .estimate import ( from .estimate import (
ProjectEstimatePointEndpoint, ProjectEstimatePointEndpoint,
BulkEstimatePointEndpoint, BulkEstimatePointEndpoint,
) )
from .release import ReleaseNotesEndpoint
from .inbox import InboxViewSet, InboxIssueViewSet, InboxIssuePublicViewSet from .inbox import InboxViewSet, InboxIssueViewSet, InboxIssuePublicViewSet
from .analytic import ( from .analytic import (
@ -170,3 +168,5 @@ from .analytic import (
from .notification import NotificationViewSet, UnreadNotificationEndpoint, MarkAllReadNotificationViewSet from .notification import NotificationViewSet, UnreadNotificationEndpoint, MarkAllReadNotificationViewSet
from .exporter import ExportIssuesEndpoint from .exporter import ExportIssuesEndpoint
from .config import ConfigurationEndpoint

View File

@ -1,10 +1,5 @@
# Django imports # Django imports
from django.db.models import ( from django.db.models import Count, Sum, F, Q
Count,
Sum,
F,
Q
)
from django.db.models.functions import ExtractMonth from django.db.models.functions import ExtractMonth
# Third party imports # Third party imports
@ -28,82 +23,156 @@ class AnalyticsEndpoint(BaseAPIView):
] ]
def get(self, request, slug): def get(self, request, slug):
try:
x_axis = request.GET.get("x_axis", False) x_axis = request.GET.get("x_axis", False)
y_axis = request.GET.get("y_axis", False) y_axis = request.GET.get("y_axis", False)
segment = request.GET.get("segment", False)
if not x_axis or not y_axis: valid_xaxis_segment = [
"state_id",
"state__group",
"labels__id",
"assignees__id",
"estimate_point",
"issue_cycle__cycle_id",
"issue_module__module_id",
"priority",
"start_date",
"target_date",
"created_at",
"completed_at",
]
valid_yaxis = [
"issue_count",
"estimate",
]
# Check for x-axis and y-axis as thery are required parameters
if (
not x_axis
or not y_axis
or not x_axis in valid_xaxis_segment
or not y_axis in valid_yaxis
):
return Response( return Response(
{"error": "x-axis and y-axis dimensions are required"}, {
"error": "x-axis and y-axis dimensions are required and the values should be valid"
},
status=status.HTTP_400_BAD_REQUEST, status=status.HTTP_400_BAD_REQUEST,
) )
segment = request.GET.get("segment", False) # If segment is present it cannot be same as x-axis
if segment and (segment not in valid_xaxis_segment or x_axis == segment):
return Response(
{
"error": "Both segment and x axis cannot be same and segment should be valid"
},
status=status.HTTP_400_BAD_REQUEST,
)
# Additional filters that need to be applied
filters = issue_filters(request.GET, "GET") filters = issue_filters(request.GET, "GET")
# Get the issues for the workspace with the additional filters applied
queryset = Issue.issue_objects.filter(workspace__slug=slug, **filters) queryset = Issue.issue_objects.filter(workspace__slug=slug, **filters)
# Get the total issue count
total_issues = queryset.count() total_issues = queryset.count()
# Build the graph payload
distribution = build_graph_plot( distribution = build_graph_plot(
queryset=queryset, x_axis=x_axis, y_axis=y_axis, segment=segment queryset=queryset, x_axis=x_axis, y_axis=y_axis, segment=segment
) )
colors = dict() state_details = {}
if x_axis in ["state__name", "state__group"] or segment in [ if x_axis in ["state_id"] or segment in ["state_id"]:
"state__name", state_details = (
"state__group", Issue.issue_objects.filter(
]: workspace__slug=slug,
if x_axis in ["state__name", "state__group"]: **filters,
key = "name" if x_axis == "state__name" else "group" )
else: .distinct("state_id")
key = "name" if segment == "state__name" else "group" .order_by("state_id")
.values("state_id", "state__name", "state__color")
colors = (
State.objects.filter(
~Q(name="Triage"),
workspace__slug=slug, project_id__in=filters.get("project__in")
).values(key, "color")
if filters.get("project__in", False)
else State.objects.filter(~Q(name="Triage"), workspace__slug=slug).values(key, "color")
) )
if x_axis in ["labels__name"] or segment in ["labels__name"]: label_details = {}
colors = ( if x_axis in ["labels__id"] or segment in ["labels__id"]:
Label.objects.filter( label_details = (
workspace__slug=slug, project_id__in=filters.get("project__in") Issue.objects.filter(
).values("name", "color") workspace__slug=slug, **filters, labels__id__isnull=False
if filters.get("project__in", False)
else Label.objects.filter(workspace__slug=slug).values(
"name", "color"
) )
.distinct("labels__id")
.order_by("labels__id")
.values("labels__id", "labels__color", "labels__name")
) )
assignee_details = {} assignee_details = {}
if x_axis in ["assignees__id"] or segment in ["assignees__id"]: if x_axis in ["assignees__id"] or segment in ["assignees__id"]:
assignee_details = ( assignee_details = (
Issue.issue_objects.filter(workspace__slug=slug, **filters, assignees__avatar__isnull=False) Issue.issue_objects.filter(
workspace__slug=slug, **filters, assignees__avatar__isnull=False
)
.order_by("assignees__id") .order_by("assignees__id")
.distinct("assignees__id") .distinct("assignees__id")
.values("assignees__avatar", "assignees__display_name", "assignees__first_name", "assignees__last_name", "assignees__id") .values(
"assignees__avatar",
"assignees__display_name",
"assignees__first_name",
"assignees__last_name",
"assignees__id",
)
) )
cycle_details = {}
if x_axis in ["issue_cycle__cycle_id"] or segment in ["issue_cycle__cycle_id"]:
cycle_details = (
Issue.issue_objects.filter(
workspace__slug=slug,
**filters,
issue_cycle__cycle_id__isnull=False,
)
.distinct("issue_cycle__cycle_id")
.order_by("issue_cycle__cycle_id")
.values(
"issue_cycle__cycle_id",
"issue_cycle__cycle__name",
)
)
module_details = {}
if x_axis in ["issue_module__module_id"] or segment in [
"issue_module__module_id"
]:
module_details = (
Issue.issue_objects.filter(
workspace__slug=slug,
**filters,
issue_module__module_id__isnull=False,
)
.distinct("issue_module__module_id")
.order_by("issue_module__module_id")
.values(
"issue_module__module_id",
"issue_module__module__name",
)
)
return Response( return Response(
{ {
"total": total_issues, "total": total_issues,
"distribution": distribution, "distribution": distribution,
"extras": {"colors": colors, "assignee_details": assignee_details}, "extras": {
"state_details": state_details,
"assignee_details": assignee_details,
"label_details": label_details,
"cycle_details": cycle_details,
"module_details": module_details,
},
}, },
status=status.HTTP_200_OK, status=status.HTTP_200_OK,
) )
except Exception as e:
capture_exception(e)
return Response(
{"error": "Something went wrong please try again later"},
status=status.HTTP_400_BAD_REQUEST,
)
class AnalyticViewViewset(BaseViewSet): class AnalyticViewViewset(BaseViewSet):
permission_classes = [ permission_classes = [
@ -128,10 +197,7 @@ class SavedAnalyticEndpoint(BaseAPIView):
] ]
def get(self, request, slug, analytic_id): def get(self, request, slug, analytic_id):
try: analytic_view = AnalyticView.objects.get(pk=analytic_id, workspace__slug=slug)
analytic_view = AnalyticView.objects.get(
pk=analytic_id, workspace__slug=slug
)
filter = analytic_view.query filter = analytic_view.query
queryset = Issue.issue_objects.filter(**filter) queryset = Issue.issue_objects.filter(**filter)
@ -155,18 +221,6 @@ class SavedAnalyticEndpoint(BaseAPIView):
status=status.HTTP_200_OK, status=status.HTTP_200_OK,
) )
except AnalyticView.DoesNotExist:
return Response(
{"error": "Analytic View Does not exist"},
status=status.HTTP_400_BAD_REQUEST,
)
except Exception as e:
capture_exception(e)
return Response(
{"error": "Something went wrong please try again later"},
status=status.HTTP_400_BAD_REQUEST,
)
class ExportAnalyticsEndpoint(BaseAPIView): class ExportAnalyticsEndpoint(BaseAPIView):
permission_classes = [ permission_classes = [
@ -174,13 +228,50 @@ class ExportAnalyticsEndpoint(BaseAPIView):
] ]
def post(self, request, slug): def post(self, request, slug):
try:
x_axis = request.data.get("x_axis", False) x_axis = request.data.get("x_axis", False)
y_axis = request.data.get("y_axis", False) y_axis = request.data.get("y_axis", False)
segment = request.data.get("segment", False)
if not x_axis or not y_axis: valid_xaxis_segment = [
"state_id",
"state__group",
"labels__id",
"assignees__id",
"estimate_point",
"issue_cycle__cycle_id",
"issue_module__module_id",
"priority",
"start_date",
"target_date",
"created_at",
"completed_at",
]
valid_yaxis = [
"issue_count",
"estimate",
]
# Check for x-axis and y-axis as thery are required parameters
if (
not x_axis
or not y_axis
or not x_axis in valid_xaxis_segment
or not y_axis in valid_yaxis
):
return Response( return Response(
{"error": "x-axis and y-axis dimensions are required"}, {
"error": "x-axis and y-axis dimensions are required and the values should be valid"
},
status=status.HTTP_400_BAD_REQUEST,
)
# If segment is present it cannot be same as x-axis
if segment and (segment not in valid_xaxis_segment or x_axis == segment):
return Response(
{
"error": "Both segment and x axis cannot be same and segment should be valid"
},
status=status.HTTP_400_BAD_REQUEST, status=status.HTTP_400_BAD_REQUEST,
) )
@ -194,12 +285,6 @@ class ExportAnalyticsEndpoint(BaseAPIView):
}, },
status=status.HTTP_200_OK, status=status.HTTP_200_OK,
) )
except Exception as e:
capture_exception(e)
return Response(
{"error": "Something went wrong please try again later"},
status=status.HTTP_400_BAD_REQUEST,
)
class DefaultAnalyticsEndpoint(BaseAPIView): class DefaultAnalyticsEndpoint(BaseAPIView):
@ -208,70 +293,79 @@ class DefaultAnalyticsEndpoint(BaseAPIView):
] ]
def get(self, request, slug): def get(self, request, slug):
try:
filters = issue_filters(request.GET, "GET") filters = issue_filters(request.GET, "GET")
base_issues = Issue.issue_objects.filter(workspace__slug=slug, **filters)
queryset = Issue.issue_objects.filter(workspace__slug=slug, **filters) total_issues = base_issues.count()
total_issues = queryset.count() state_groups = base_issues.annotate(state_group=F("state__group"))
total_issues_classified = ( total_issues_classified = (
queryset.annotate(state_group=F("state__group")) state_groups.values("state_group")
.values("state_group")
.annotate(state_count=Count("state_group")) .annotate(state_count=Count("state_group"))
.order_by("state_group") .order_by("state_group")
) )
open_issues = queryset.filter( open_issues_groups = ["backlog", "unstarted", "started"]
state__group__in=["backlog", "unstarted", "started"] open_issues_queryset = state_groups.filter(state__group__in=open_issues_groups)
).count()
open_issues = open_issues_queryset.count()
open_issues_classified = ( open_issues_classified = (
queryset.filter(state__group__in=["backlog", "unstarted", "started"]) open_issues_queryset.values("state_group")
.annotate(state_group=F("state__group"))
.values("state_group")
.annotate(state_count=Count("state_group")) .annotate(state_count=Count("state_group"))
.order_by("state_group") .order_by("state_group")
) )
issue_completed_month_wise = ( issue_completed_month_wise = (
queryset.filter(completed_at__isnull=False) base_issues.filter(completed_at__isnull=False)
.annotate(month=ExtractMonth("completed_at")) .annotate(month=ExtractMonth("completed_at"))
.values("month") .values("month")
.annotate(count=Count("*")) .annotate(count=Count("*"))
.order_by("month") .order_by("month")
) )
user_details = [
"created_by__first_name",
"created_by__last_name",
"created_by__avatar",
"created_by__display_name",
"created_by__id",
]
most_issue_created_user = ( most_issue_created_user = (
queryset.exclude(created_by=None) base_issues.exclude(created_by=None)
.values("created_by__first_name", "created_by__last_name", "created_by__avatar", "created_by__display_name", "created_by__id") .values(*user_details)
.annotate(count=Count("id")) .annotate(count=Count("id"))
.order_by("-count") .order_by("-count")[:5]
)[:5] )
user_assignee_details = [
"assignees__first_name",
"assignees__last_name",
"assignees__avatar",
"assignees__display_name",
"assignees__id",
]
most_issue_closed_user = ( most_issue_closed_user = (
queryset.filter(completed_at__isnull=False, assignees__isnull=False) base_issues.filter(completed_at__isnull=False)
.values("assignees__first_name", "assignees__last_name", "assignees__avatar", "assignees__display_name", "assignees__id") .exclude(assignees=None)
.values(*user_assignee_details)
.annotate(count=Count("id")) .annotate(count=Count("id"))
.order_by("-count") .order_by("-count")[:5]
)[:5] )
pending_issue_user = ( pending_issue_user = (
queryset.filter(completed_at__isnull=True) base_issues.filter(completed_at__isnull=True)
.values("assignees__first_name", "assignees__last_name", "assignees__avatar", "assignees__display_name", "assignees__id") .values(*user_assignee_details)
.annotate(count=Count("id")) .annotate(count=Count("id"))
.order_by("-count") .order_by("-count")
) )
open_estimate_sum = ( open_estimate_sum = open_issues_queryset.aggregate(sum=Sum("estimate_point"))[
queryset.filter( "sum"
state__group__in=["backlog", "unstarted", "started"] ]
).aggregate(open_estimate_sum=Sum("estimate_point")) total_estimate_sum = base_issues.aggregate(sum=Sum("estimate_point"))["sum"]
)["open_estimate_sum"]
print(open_estimate_sum)
total_estimate_sum = queryset.aggregate(
total_estimate_sum=Sum("estimate_point")
)["total_estimate_sum"]
return Response( return Response(
{ {
@ -288,10 +382,3 @@ class DefaultAnalyticsEndpoint(BaseAPIView):
}, },
status=status.HTTP_200_OK, status=status.HTTP_200_OK,
) )
except Exception as e:
capture_exception(e)
return Response(
{"error": "Something went wrong please try again later"},
status=status.HTTP_400_BAD_REQUEST,
)

View File

@ -14,7 +14,6 @@ from plane.api.serializers import APITokenSerializer
class ApiTokenEndpoint(BaseAPIView): class ApiTokenEndpoint(BaseAPIView):
def post(self, request): def post(self, request):
try:
label = request.data.get("label", str(uuid4().hex)) label = request.data.get("label", str(uuid4().hex))
workspace = request.data.get("workspace", False) workspace = request.data.get("workspace", False)
@ -34,37 +33,15 @@ class ApiTokenEndpoint(BaseAPIView):
status=status.HTTP_201_CREATED, status=status.HTTP_201_CREATED,
) )
except Exception as e:
capture_exception(e)
return Response(
{"error": "Something went wrong please try again later"},
status=status.HTTP_400_BAD_REQUEST,
)
def get(self, request): def get(self, request):
try:
api_tokens = APIToken.objects.filter(user=request.user) api_tokens = APIToken.objects.filter(user=request.user)
serializer = APITokenSerializer(api_tokens, many=True) serializer = APITokenSerializer(api_tokens, many=True)
return Response(serializer.data, status=status.HTTP_200_OK) return Response(serializer.data, status=status.HTTP_200_OK)
except Exception as e:
capture_exception(e)
return Response(
{"error": "Something went wrong please try again later"},
status=status.HTTP_400_BAD_REQUEST,
)
def delete(self, request, pk): def delete(self, request, pk):
try:
api_token = APIToken.objects.get(pk=pk) api_token = APIToken.objects.get(pk=pk)
api_token.delete() api_token.delete()
return Response(status=status.HTTP_204_NO_CONTENT) return Response(status=status.HTTP_204_NO_CONTENT)
except APIToken.DoesNotExist:
return Response(
{"error": "Token does not exists"}, status=status.HTTP_400_BAD_REQUEST
)
except Exception as e:
capture_exception(e)
return Response(
{"error": "Something went wrong please try again later"},
status=status.HTTP_400_BAD_REQUEST,
)

View File

@ -18,7 +18,6 @@ class FileAssetEndpoint(BaseAPIView):
""" """
def get(self, request, workspace_id, asset_key): def get(self, request, workspace_id, asset_key):
try:
asset_key = str(workspace_id) + "/" + asset_key asset_key = str(workspace_id) + "/" + asset_key
files = FileAsset.objects.filter(asset=asset_key) files = FileAsset.objects.filter(asset=asset_key)
if files.exists(): if files.exists():
@ -26,16 +25,9 @@ class FileAssetEndpoint(BaseAPIView):
return Response({"data": serializer.data, "status": True}, status=status.HTTP_200_OK) return Response({"data": serializer.data, "status": True}, status=status.HTTP_200_OK)
else: else:
return Response({"error": "Asset key does not exist", "status": False}, status=status.HTTP_200_OK) return Response({"error": "Asset key does not exist", "status": False}, status=status.HTTP_200_OK)
except Exception as e:
capture_exception(e)
return Response(
{"error": "Something went wrong please try again later"},
status=status.HTTP_400_BAD_REQUEST,
)
def post(self, request, slug): def post(self, request, slug):
try:
serializer = FileAssetSerializer(data=request.data) serializer = FileAssetSerializer(data=request.data)
if serializer.is_valid(): if serializer.is_valid():
# Get the workspace # Get the workspace
@ -43,17 +35,9 @@ class FileAssetEndpoint(BaseAPIView):
serializer.save(workspace_id=workspace.id) serializer.save(workspace_id=workspace.id)
return Response(serializer.data, status=status.HTTP_201_CREATED) return Response(serializer.data, status=status.HTTP_201_CREATED)
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
except Workspace.DoesNotExist:
return Response({"error": "Workspace does not exist"}, status=status.HTTP_400_BAD_REQUEST)
except Exception as e:
capture_exception(e)
return Response(
{"error": "Something went wrong please try again later"},
status=status.HTTP_400_BAD_REQUEST,
)
def delete(self, request, workspace_id, asset_key): def delete(self, request, workspace_id, asset_key):
try:
asset_key = str(workspace_id) + "/" + asset_key asset_key = str(workspace_id) + "/" + asset_key
file_asset = FileAsset.objects.get(asset=asset_key) file_asset = FileAsset.objects.get(asset=asset_key)
# Delete the file from storage # Delete the file from storage
@ -61,65 +45,31 @@ class FileAssetEndpoint(BaseAPIView):
# Delete the file object # Delete the file object
file_asset.delete() file_asset.delete()
return Response(status=status.HTTP_204_NO_CONTENT) return Response(status=status.HTTP_204_NO_CONTENT)
except FileAsset.DoesNotExist:
return Response(
{"error": "File Asset doesn't exist"}, status=status.HTTP_404_NOT_FOUND
)
except Exception as e:
capture_exception(e)
return Response(
{"error": "Something went wrong please try again later"},
status=status.HTTP_400_BAD_REQUEST,
)
class UserAssetsEndpoint(BaseAPIView): class UserAssetsEndpoint(BaseAPIView):
parser_classes = (MultiPartParser, FormParser) parser_classes = (MultiPartParser, FormParser)
def get(self, request, asset_key): def get(self, request, asset_key):
try:
files = FileAsset.objects.filter(asset=asset_key, created_by=request.user) files = FileAsset.objects.filter(asset=asset_key, created_by=request.user)
if files.exists(): if files.exists():
serializer = FileAssetSerializer(files, context={"request": request}) serializer = FileAssetSerializer(files, context={"request": request})
return Response({"data": serializer.data, "status": True}, status=status.HTTP_200_OK) return Response({"data": serializer.data, "status": True}, status=status.HTTP_200_OK)
else: else:
return Response({"error": "Asset key does not exist", "status": False}, status=status.HTTP_200_OK) return Response({"error": "Asset key does not exist", "status": False}, status=status.HTTP_200_OK)
except Exception as e:
capture_exception(e)
return Response(
{"error": "Something went wrong please try again later"},
status=status.HTTP_400_BAD_REQUEST,
)
def post(self, request): def post(self, request):
try:
serializer = FileAssetSerializer(data=request.data) serializer = FileAssetSerializer(data=request.data)
if serializer.is_valid(): if serializer.is_valid():
serializer.save() serializer.save()
return Response(serializer.data, status=status.HTTP_201_CREATED) return Response(serializer.data, status=status.HTTP_201_CREATED)
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
except Exception as e:
capture_exception(e)
return Response(
{"error": "Something went wrong please try again later"},
status=status.HTTP_400_BAD_REQUEST,
)
def delete(self, request, asset_key): def delete(self, request, asset_key):
try:
file_asset = FileAsset.objects.get(asset=asset_key, created_by=request.user) file_asset = FileAsset.objects.get(asset=asset_key, created_by=request.user)
# Delete the file from storage # Delete the file from storage
file_asset.asset.delete(save=False) file_asset.asset.delete(save=False)
# Delete the file object # Delete the file object
file_asset.delete() file_asset.delete()
return Response(status=status.HTTP_204_NO_CONTENT) return Response(status=status.HTTP_204_NO_CONTENT)
except FileAsset.DoesNotExist:
return Response(
{"error": "File Asset doesn't exist"}, status=status.HTTP_404_NOT_FOUND
)
except Exception as e:
capture_exception(e)
return Response(
{"error": "Something went wrong please try again later"},
status=status.HTTP_400_BAD_REQUEST,
)

View File

@ -9,7 +9,6 @@ from django.utils.encoding import (
DjangoUnicodeDecodeError, DjangoUnicodeDecodeError,
) )
from django.utils.http import urlsafe_base64_decode, urlsafe_base64_encode from django.utils.http import urlsafe_base64_decode, urlsafe_base64_encode
from django.contrib.sites.shortcuts import get_current_site
from django.conf import settings from django.conf import settings
## Third Party Imports ## Third Party Imports
@ -128,7 +127,6 @@ class ResetPasswordEndpoint(BaseAPIView):
class ChangePasswordEndpoint(BaseAPIView): class ChangePasswordEndpoint(BaseAPIView):
def post(self, request): def post(self, request):
try:
serializer = ChangePasswordSerializer(data=request.data) serializer = ChangePasswordSerializer(data=request.data)
user = User.objects.get(pk=request.user.id) user = User.objects.get(pk=request.user.id)
@ -151,9 +149,3 @@ class ChangePasswordEndpoint(BaseAPIView):
return Response(response) return Response(response)
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
except Exception as e:
capture_exception(e)
return Response(
{"error": "Something went wrong please try again later"},
status=status.HTTP_400_BAD_REQUEST,
)

View File

@ -40,7 +40,6 @@ class SignUpEndpoint(BaseAPIView):
permission_classes = (AllowAny,) permission_classes = (AllowAny,)
def post(self, request): def post(self, request):
try:
if not settings.ENABLE_SIGNUP: if not settings.ENABLE_SIGNUP:
return Response( return Response(
{ {
@ -87,14 +86,11 @@ class SignUpEndpoint(BaseAPIView):
user.token_updated_at = timezone.now() user.token_updated_at = timezone.now()
user.save() user.save()
serialized_user = UserSerializer(user).data
access_token, refresh_token = get_tokens_for_user(user) access_token, refresh_token = get_tokens_for_user(user)
data = { data = {
"access_token": access_token, "access_token": access_token,
"refresh_token": refresh_token, "refresh_token": refresh_token,
"user": serialized_user,
} }
# Send Analytics # Send Analytics
@ -121,19 +117,11 @@ class SignUpEndpoint(BaseAPIView):
return Response(data, status=status.HTTP_200_OK) return Response(data, status=status.HTTP_200_OK)
except Exception as e:
capture_exception(e)
return Response(
{"error": "Something went wrong please try again later"},
status=status.HTTP_400_BAD_REQUEST,
)
class SignInEndpoint(BaseAPIView): class SignInEndpoint(BaseAPIView):
permission_classes = (AllowAny,) permission_classes = (AllowAny,)
def post(self, request): def post(self, request):
try:
email = request.data.get("email", False) email = request.data.get("email", False)
password = request.data.get("password", False) password = request.data.get("password", False)
@ -180,8 +168,6 @@ class SignInEndpoint(BaseAPIView):
status=status.HTTP_403_FORBIDDEN, status=status.HTTP_403_FORBIDDEN,
) )
serialized_user = UserSerializer(user).data
# settings last active for the user # settings last active for the user
user.last_active = timezone.now() user.last_active = timezone.now()
user.last_login_time = timezone.now() user.last_login_time = timezone.now()
@ -215,32 +201,19 @@ class SignInEndpoint(BaseAPIView):
data = { data = {
"access_token": access_token, "access_token": access_token,
"refresh_token": refresh_token, "refresh_token": refresh_token,
"user": serialized_user,
} }
return Response(data, status=status.HTTP_200_OK) return Response(data, status=status.HTTP_200_OK)
except Exception as e:
capture_exception(e)
return Response(
{
"error": "Something went wrong. Please try again later or contact the support team."
},
status=status.HTTP_400_BAD_REQUEST,
)
class SignOutEndpoint(BaseAPIView): class SignOutEndpoint(BaseAPIView):
def post(self, request): def post(self, request):
try:
refresh_token = request.data.get("refresh_token", False) refresh_token = request.data.get("refresh_token", False)
if not refresh_token: if not refresh_token:
capture_message("No refresh token provided") capture_message("No refresh token provided")
return Response( return Response(
{ {"error": "No refresh token provided"},
"error": "Something went wrong. Please try again later or contact the support team."
},
status=status.HTTP_400_BAD_REQUEST, status=status.HTTP_400_BAD_REQUEST,
) )
@ -254,14 +227,6 @@ class SignOutEndpoint(BaseAPIView):
token = RefreshToken(refresh_token) token = RefreshToken(refresh_token)
token.blacklist() token.blacklist()
return Response({"message": "success"}, status=status.HTTP_200_OK) return Response({"message": "success"}, status=status.HTTP_200_OK)
except Exception as e:
capture_exception(e)
return Response(
{
"error": "Something went wrong. Please try again later or contact the support team."
},
status=status.HTTP_400_BAD_REQUEST,
)
class MagicSignInGenerateEndpoint(BaseAPIView): class MagicSignInGenerateEndpoint(BaseAPIView):
@ -270,7 +235,6 @@ class MagicSignInGenerateEndpoint(BaseAPIView):
] ]
def post(self, request): def post(self, request):
try:
email = request.data.get("email", False) email = request.data.get("email", False)
if not email: if not email:
@ -327,17 +291,6 @@ class MagicSignInGenerateEndpoint(BaseAPIView):
magic_link.delay(email, key, token, current_site) magic_link.delay(email, key, token, current_site)
return Response({"key": key}, status=status.HTTP_200_OK) return Response({"key": key}, status=status.HTTP_200_OK)
except ValidationError:
return Response(
{"error": "Please provide a valid email address."},
status=status.HTTP_400_BAD_REQUEST,
)
except Exception as e:
capture_exception(e)
return Response(
{"error": "Something went wrong please try again later"},
status=status.HTTP_400_BAD_REQUEST,
)
class MagicSignInEndpoint(BaseAPIView): class MagicSignInEndpoint(BaseAPIView):
@ -346,7 +299,6 @@ class MagicSignInEndpoint(BaseAPIView):
] ]
def post(self, request): def post(self, request):
try:
user_token = request.data.get("token", "").strip() user_token = request.data.get("token", "").strip()
key = request.data.get("key", False).strip().lower() key = request.data.get("key", False).strip().lower()
@ -383,9 +335,7 @@ class MagicSignInEndpoint(BaseAPIView):
"user": {"email": email, "id": str(user.id)}, "user": {"email": email, "id": str(user.id)},
"device_ctx": { "device_ctx": {
"ip": request.META.get("REMOTE_ADDR"), "ip": request.META.get("REMOTE_ADDR"),
"user_agent": request.META.get( "user_agent": request.META.get("HTTP_USER_AGENT"),
"HTTP_USER_AGENT"
),
}, },
"event_type": "SIGN_IN", "event_type": "SIGN_IN",
}, },
@ -413,9 +363,7 @@ class MagicSignInEndpoint(BaseAPIView):
"user": {"email": email, "id": str(user.id)}, "user": {"email": email, "id": str(user.id)},
"device_ctx": { "device_ctx": {
"ip": request.META.get("REMOTE_ADDR"), "ip": request.META.get("REMOTE_ADDR"),
"user_agent": request.META.get( "user_agent": request.META.get("HTTP_USER_AGENT"),
"HTTP_USER_AGENT"
),
}, },
"event_type": "SIGN_UP", "event_type": "SIGN_UP",
}, },
@ -427,13 +375,11 @@ class MagicSignInEndpoint(BaseAPIView):
user.last_login_uagent = request.META.get("HTTP_USER_AGENT") user.last_login_uagent = request.META.get("HTTP_USER_AGENT")
user.token_updated_at = timezone.now() user.token_updated_at = timezone.now()
user.save() user.save()
serialized_user = UserSerializer(user).data
access_token, refresh_token = get_tokens_for_user(user) access_token, refresh_token = get_tokens_for_user(user)
data = { data = {
"access_token": access_token, "access_token": access_token,
"refresh_token": refresh_token, "refresh_token": refresh_token,
"user": serialized_user,
} }
return Response(data, status=status.HTTP_200_OK) return Response(data, status=status.HTTP_200_OK)
@ -449,10 +395,3 @@ class MagicSignInEndpoint(BaseAPIView):
{"error": "The magic code/link has expired please try again"}, {"error": "The magic code/link has expired please try again"},
status=status.HTTP_400_BAD_REQUEST, status=status.HTTP_400_BAD_REQUEST,
) )
except Exception as e:
capture_exception(e)
return Response(
{"error": "Something went wrong please try again later"},
status=status.HTTP_400_BAD_REQUEST,
)

View File

@ -5,10 +5,14 @@ import zoneinfo
from django.urls import resolve from django.urls import resolve
from django.conf import settings from django.conf import settings
from django.utils import timezone from django.utils import timezone
# Third part imports from django.db import IntegrityError
from django.core.exceptions import ObjectDoesNotExist, ValidationError
# Third part imports
from rest_framework import status
from rest_framework import status from rest_framework import status
from rest_framework.viewsets import ModelViewSet from rest_framework.viewsets import ModelViewSet
from rest_framework.response import Response
from rest_framework.exceptions import APIException from rest_framework.exceptions import APIException
from rest_framework.views import APIView from rest_framework.views import APIView
from rest_framework.filters import SearchFilter from rest_framework.filters import SearchFilter
@ -33,8 +37,6 @@ class TimezoneMixin:
timezone.deactivate() timezone.deactivate()
class BaseViewSet(TimezoneMixin, ModelViewSet, BasePaginator): class BaseViewSet(TimezoneMixin, ModelViewSet, BasePaginator):
model = None model = None
@ -59,7 +61,35 @@ class BaseViewSet(TimezoneMixin, ModelViewSet, BasePaginator):
capture_exception(e) capture_exception(e)
raise APIException("Please check the view", status.HTTP_400_BAD_REQUEST) raise APIException("Please check the view", status.HTTP_400_BAD_REQUEST)
def handle_exception(self, exc):
"""
Handle any exception that occurs, by returning an appropriate response,
or re-raising the error.
"""
try:
response = super().handle_exception(exc)
return response
except Exception as e:
if isinstance(e, IntegrityError):
return Response({"error": "The payload is not valid"}, status=status.HTTP_400_BAD_REQUEST)
if isinstance(e, ValidationError):
return Response({"error": "Please provide valid detail"}, status=status.HTTP_400_BAD_REQUEST)
if isinstance(e, ObjectDoesNotExist):
model_name = str(exc).split(" matching query does not exist.")[0]
return Response({"error": f"{model_name} does not exist."}, status=status.HTTP_404_NOT_FOUND)
if isinstance(e, KeyError):
capture_exception(e)
return Response({"error": f"key {e} does not exist"}, status=status.HTTP_400_BAD_REQUEST)
capture_exception(e)
return Response({"error": "Something went wrong please try again later"}, status=status.HTTP_500_INTERNAL_SERVER_ERROR)
def dispatch(self, request, *args, **kwargs): def dispatch(self, request, *args, **kwargs):
try:
response = super().dispatch(request, *args, **kwargs) response = super().dispatch(request, *args, **kwargs)
if settings.DEBUG: if settings.DEBUG:
@ -70,6 +100,10 @@ class BaseViewSet(TimezoneMixin, ModelViewSet, BasePaginator):
) )
return response return response
except Exception as exc:
response = self.handle_exception(exc)
return exc
@property @property
def workspace_slug(self): def workspace_slug(self):
return self.kwargs.get("slug", None) return self.kwargs.get("slug", None)
@ -104,7 +138,35 @@ class BaseAPIView(TimezoneMixin, APIView, BasePaginator):
queryset = backend().filter_queryset(self.request, queryset, self) queryset = backend().filter_queryset(self.request, queryset, self)
return queryset return queryset
def handle_exception(self, exc):
"""
Handle any exception that occurs, by returning an appropriate response,
or re-raising the error.
"""
try:
response = super().handle_exception(exc)
return response
except Exception as e:
if isinstance(e, IntegrityError):
return Response({"error": "The payload is not valid"}, status=status.HTTP_400_BAD_REQUEST)
if isinstance(e, ValidationError):
return Response({"error": "Please provide valid detail"}, status=status.HTTP_400_BAD_REQUEST)
if isinstance(e, ObjectDoesNotExist):
model_name = str(exc).split(" matching query does not exist.")[0]
return Response({"error": f"{model_name} does not exist."}, status=status.HTTP_404_NOT_FOUND)
if isinstance(e, KeyError):
return Response({"error": f"key {e} does not exist"}, status=status.HTTP_400_BAD_REQUEST)
capture_exception(e)
return Response({"error": "Something went wrong please try again later"}, status=status.HTTP_500_INTERNAL_SERVER_ERROR)
def dispatch(self, request, *args, **kwargs): def dispatch(self, request, *args, **kwargs):
try:
response = super().dispatch(request, *args, **kwargs) response = super().dispatch(request, *args, **kwargs)
if settings.DEBUG: if settings.DEBUG:
@ -115,6 +177,10 @@ class BaseAPIView(TimezoneMixin, APIView, BasePaginator):
) )
return response return response
except Exception as exc:
response = self.handle_exception(exc)
return exc
@property @property
def workspace_slug(self): def workspace_slug(self):
return self.kwargs.get("slug", None) return self.kwargs.get("slug", None)

View File

@ -0,0 +1,33 @@
# Python imports
import os
# Django imports
from django.conf import settings
# Third party imports
from rest_framework.permissions import AllowAny
from rest_framework import status
from rest_framework.response import Response
from sentry_sdk import capture_exception
# Module imports
from .base import BaseAPIView
class ConfigurationEndpoint(BaseAPIView):
permission_classes = [
AllowAny,
]
def get(self, request):
data = {}
data["google"] = os.environ.get("GOOGLE_CLIENT_ID", None)
data["github"] = os.environ.get("GITHUB_CLIENT_ID", None)
data["github_app_name"] = os.environ.get("GITHUB_APP_NAME", None)
data["magic_login"] = (
bool(settings.EMAIL_HOST_USER) and bool(settings.EMAIL_HOST_PASSWORD)
) and os.environ.get("ENABLE_MAGIC_LINK_LOGIN", "0") == "1"
data["email_password_login"] = (
os.environ.get("ENABLE_EMAIL_PASSWORD", "0") == "1"
)
return Response(data, status=status.HTTP_200_OK)

View File

@ -2,7 +2,6 @@
import json import json
# Django imports # Django imports
from django.db import IntegrityError
from django.db.models import ( from django.db.models import (
OuterRef, OuterRef,
Func, Func,
@ -62,29 +61,6 @@ class CycleViewSet(BaseViewSet):
project_id=self.kwargs.get("project_id"), owned_by=self.request.user project_id=self.kwargs.get("project_id"), owned_by=self.request.user
) )
def perform_destroy(self, instance):
cycle_issues = list(
CycleIssue.objects.filter(cycle_id=self.kwargs.get("pk")).values_list(
"issue", flat=True
)
)
issue_activity.delay(
type="cycle.activity.deleted",
requested_data=json.dumps(
{
"cycle_id": str(self.kwargs.get("pk")),
"issues": [str(issue_id) for issue_id in cycle_issues],
}
),
actor_id=str(self.request.user.id),
issue_id=str(self.kwargs.get("pk", None)),
project_id=str(self.kwargs.get("project_id", None)),
current_instance=None,
epoch=int(timezone.now().timestamp())
)
return super().perform_destroy(instance)
def get_queryset(self): def get_queryset(self):
subquery = CycleFavorite.objects.filter( subquery = CycleFavorite.objects.filter(
user=self.request.user, user=self.request.user,
@ -199,19 +175,12 @@ class CycleViewSet(BaseViewSet):
) )
def list(self, request, slug, project_id): def list(self, request, slug, project_id):
try:
queryset = self.get_queryset() queryset = self.get_queryset()
cycle_view = request.GET.get("cycle_view", "all") cycle_view = request.GET.get("cycle_view", "all")
order_by = request.GET.get("order_by", "sort_order") order_by = request.GET.get("order_by", "sort_order")
queryset = queryset.order_by(order_by) queryset = queryset.order_by(order_by)
# All Cycles
if cycle_view == "all":
return Response(
CycleSerializer(queryset, many=True).data, status=status.HTTP_200_OK
)
# Current Cycle # Current Cycle
if cycle_view == "current": if cycle_view == "current":
queryset = queryset.filter( queryset = queryset.filter(
@ -348,19 +317,12 @@ class CycleViewSet(BaseViewSet):
CycleSerializer(queryset, many=True).data, status=status.HTTP_200_OK CycleSerializer(queryset, many=True).data, status=status.HTTP_200_OK
) )
# If no matching view is found return all cycles
return Response( return Response(
{"error": "No matching view found"}, status=status.HTTP_400_BAD_REQUEST CycleSerializer(queryset, many=True).data, status=status.HTTP_200_OK
)
except Exception as e:
capture_exception(e)
return Response(
{"error": "Something went wrong please try again later"},
status=status.HTTP_400_BAD_REQUEST,
) )
def create(self, request, slug, project_id): def create(self, request, slug, project_id):
try:
if ( if (
request.data.get("start_date", None) is None request.data.get("start_date", None) is None
and request.data.get("end_date", None) is None and request.data.get("end_date", None) is None
@ -383,18 +345,9 @@ class CycleViewSet(BaseViewSet):
}, },
status=status.HTTP_400_BAD_REQUEST, status=status.HTTP_400_BAD_REQUEST,
) )
except Exception as e:
capture_exception(e)
return Response(
{"error": "Something went wrong please try again later"},
status=status.HTTP_400_BAD_REQUEST,
)
def partial_update(self, request, slug, project_id, pk): def partial_update(self, request, slug, project_id, pk):
try: cycle = Cycle.objects.get(workspace__slug=slug, project_id=project_id, pk=pk)
cycle = Cycle.objects.get(
workspace__slug=slug, project_id=project_id, pk=pk
)
request_data = request.data request_data = request.data
@ -417,19 +370,8 @@ class CycleViewSet(BaseViewSet):
serializer.save() serializer.save()
return Response(serializer.data, status=status.HTTP_200_OK) return Response(serializer.data, status=status.HTTP_200_OK)
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
except Cycle.DoesNotExist:
return Response(
{"error": "Cycle does not exist"}, status=status.HTTP_400_BAD_REQUEST
)
except Exception as e:
capture_exception(e)
return Response(
{"error": "Something went wrong please try again later"},
status=status.HTTP_400_BAD_REQUEST,
)
def retrieve(self, request, slug, project_id, pk): def retrieve(self, request, slug, project_id, pk):
try:
queryset = self.get_queryset().get(pk=pk) queryset = self.get_queryset().get(pk=pk)
# Assignee Distribution # Assignee Distribution
@ -444,9 +386,7 @@ class CycleViewSet(BaseViewSet):
.annotate(assignee_id=F("assignees__id")) .annotate(assignee_id=F("assignees__id"))
.annotate(avatar=F("assignees__avatar")) .annotate(avatar=F("assignees__avatar"))
.annotate(display_name=F("assignees__display_name")) .annotate(display_name=F("assignees__display_name"))
.values( .values("first_name", "last_name", "assignee_id", "avatar", "display_name")
"first_name", "last_name", "assignee_id", "avatar", "display_name"
)
.annotate( .annotate(
total_issues=Count( total_issues=Count(
"assignee_id", "assignee_id",
@ -532,16 +472,31 @@ class CycleViewSet(BaseViewSet):
data, data,
status=status.HTTP_200_OK, status=status.HTTP_200_OK,
) )
except Cycle.DoesNotExist:
return Response( def destroy(self, request, slug, project_id, pk):
{"error": "Cycle Does not exists"}, status=status.HTTP_400_BAD_REQUEST cycle_issues = list(
CycleIssue.objects.filter(cycle_id=self.kwargs.get("pk")).values_list(
"issue", flat=True
) )
except Exception as e:
capture_exception(e)
return Response(
{"error": "Something went wrong please try again later"},
status=status.HTTP_400_BAD_REQUEST,
) )
cycle = Cycle.objects.get(workspace__slug=slug, project_id=project_id, pk=pk)
# Delete the cycle
cycle.delete()
issue_activity.delay(
type="cycle.activity.deleted",
requested_data=json.dumps(
{
"cycle_id": str(pk),
"issues": [str(issue_id) for issue_id in cycle_issues],
}
),
actor_id=str(request.user.id),
issue_id=str(pk),
project_id=str(project_id),
current_instance=None,
epoch=int(timezone.now().timestamp()),
)
return Response(status=status.HTTP_204_NO_CONTENT)
class CycleIssueViewSet(BaseViewSet): class CycleIssueViewSet(BaseViewSet):
@ -563,23 +518,6 @@ class CycleIssueViewSet(BaseViewSet):
cycle_id=self.kwargs.get("cycle_id"), cycle_id=self.kwargs.get("cycle_id"),
) )
def perform_destroy(self, instance):
issue_activity.delay(
type="cycle.activity.deleted",
requested_data=json.dumps(
{
"cycle_id": str(self.kwargs.get("cycle_id")),
"issues": [str(instance.issue_id)],
}
),
actor_id=str(self.request.user.id),
issue_id=str(self.kwargs.get("pk", None)),
project_id=str(self.kwargs.get("project_id", None)),
current_instance=None,
epoch=int(timezone.now().timestamp())
)
return super().perform_destroy(instance)
def get_queryset(self): def get_queryset(self):
return self.filter_queryset( return self.filter_queryset(
super() super()
@ -604,7 +542,6 @@ class CycleIssueViewSet(BaseViewSet):
@method_decorator(gzip_page) @method_decorator(gzip_page)
def list(self, request, slug, project_id, cycle_id): def list(self, request, slug, project_id, cycle_id):
try:
order_by = request.GET.get("order_by", "created_at") order_by = request.GET.get("order_by", "created_at")
group_by = request.GET.get("group_by", False) group_by = request.GET.get("group_by", False)
sub_group_by = request.GET.get("sub_group_by", False) sub_group_by = request.GET.get("sub_group_by", False)
@ -635,9 +572,7 @@ class CycleIssueViewSet(BaseViewSet):
.values("count") .values("count")
) )
.annotate( .annotate(
attachment_count=IssueAttachment.objects.filter( attachment_count=IssueAttachment.objects.filter(issue=OuterRef("id"))
issue=OuterRef("id")
)
.order_by() .order_by()
.annotate(count=Func(F("id"), function="Count")) .annotate(count=Func(F("id"), function="Count"))
.values("count") .values("count")
@ -662,15 +597,8 @@ class CycleIssueViewSet(BaseViewSet):
issues_data, issues_data,
status=status.HTTP_200_OK, status=status.HTTP_200_OK,
) )
except Exception as e:
capture_exception(e)
return Response(
{"error": "Something went wrong please try again later"},
status=status.HTTP_400_BAD_REQUEST,
)
def create(self, request, slug, project_id, cycle_id): def create(self, request, slug, project_id, cycle_id):
try:
issues = request.data.get("issues", []) issues = request.data.get("issues", [])
if not len(issues): if not len(issues):
@ -752,7 +680,7 @@ class CycleIssueViewSet(BaseViewSet):
), ),
} }
), ),
epoch=int(timezone.now().timestamp()) epoch=int(timezone.now().timestamp()),
) )
# Return all Cycle Issues # Return all Cycle Issues
@ -761,16 +689,27 @@ class CycleIssueViewSet(BaseViewSet):
status=status.HTTP_200_OK, status=status.HTTP_200_OK,
) )
except Cycle.DoesNotExist: def destroy(self, request, slug, project_id, cycle_id, pk):
return Response( cycle_issue = CycleIssue.objects.get(
{"error": "Cycle not found"}, status=status.HTTP_404_NOT_FOUND pk=pk, workspace__slug=slug, project_id=project_id, cycle_id=cycle_id
) )
except Exception as e: issue_id = cycle_issue.issue_id
capture_exception(e) cycle_issue.delete()
return Response( issue_activity.delay(
{"error": "Something went wrong please try again later"}, type="cycle.activity.deleted",
status=status.HTTP_400_BAD_REQUEST, requested_data=json.dumps(
{
"cycle_id": str(self.kwargs.get("cycle_id")),
"issues": [str(issue_id)],
}
),
actor_id=str(self.request.user.id),
issue_id=str(self.kwargs.get("pk", None)),
project_id=str(self.kwargs.get("project_id", None)),
current_instance=None,
epoch=int(timezone.now().timestamp()),
) )
return Response(status=status.HTTP_204_NO_CONTENT)
class CycleDateCheckEndpoint(BaseAPIView): class CycleDateCheckEndpoint(BaseAPIView):
@ -779,7 +718,6 @@ class CycleDateCheckEndpoint(BaseAPIView):
] ]
def post(self, request, slug, project_id): def post(self, request, slug, project_id):
try:
start_date = request.data.get("start_date", False) start_date = request.data.get("start_date", False)
end_date = request.data.get("end_date", False) end_date = request.data.get("end_date", False)
cycle_id = request.data.get("cycle_id") cycle_id = request.data.get("cycle_id")
@ -802,18 +740,12 @@ class CycleDateCheckEndpoint(BaseAPIView):
if cycles.exists(): if cycles.exists():
return Response( return Response(
{ {
"error": "You have a cycle already on the given dates, if you want to create your draft cycle you can do that by removing dates", "error": "You have a cycle already on the given dates, if you want to create a draft cycle you can do that by removing dates",
"status": False, "status": False,
} }
) )
else: else:
return Response({"status": True}, status=status.HTTP_200_OK) return Response({"status": True}, status=status.HTTP_200_OK)
except Exception as e:
capture_exception(e)
return Response(
{"error": "Something went wrong please try again later"},
status=status.HTTP_400_BAD_REQUEST,
)
class CycleFavoriteViewSet(BaseViewSet): class CycleFavoriteViewSet(BaseViewSet):
@ -830,33 +762,13 @@ class CycleFavoriteViewSet(BaseViewSet):
) )
def create(self, request, slug, project_id): def create(self, request, slug, project_id):
try:
serializer = CycleFavoriteSerializer(data=request.data) serializer = CycleFavoriteSerializer(data=request.data)
if serializer.is_valid(): if serializer.is_valid():
serializer.save(user=request.user, project_id=project_id) serializer.save(user=request.user, project_id=project_id)
return Response(serializer.data, status=status.HTTP_201_CREATED) return Response(serializer.data, status=status.HTTP_201_CREATED)
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
except IntegrityError as e:
if "already exists" in str(e):
return Response(
{"error": "The cycle is already added to favorites"},
status=status.HTTP_410_GONE,
)
else:
capture_exception(e)
return Response(
{"error": "Something went wrong please try again later"},
status=status.HTTP_400_BAD_REQUEST,
)
except Exception as e:
capture_exception(e)
return Response(
{"error": "Something went wrong please try again later"},
status=status.HTTP_400_BAD_REQUEST,
)
def destroy(self, request, slug, project_id, cycle_id): def destroy(self, request, slug, project_id, cycle_id):
try:
cycle_favorite = CycleFavorite.objects.get( cycle_favorite = CycleFavorite.objects.get(
project=project_id, project=project_id,
user=request.user, user=request.user,
@ -865,17 +777,6 @@ class CycleFavoriteViewSet(BaseViewSet):
) )
cycle_favorite.delete() cycle_favorite.delete()
return Response(status=status.HTTP_204_NO_CONTENT) return Response(status=status.HTTP_204_NO_CONTENT)
except CycleFavorite.DoesNotExist:
return Response(
{"error": "Cycle is not in favorites"},
status=status.HTTP_400_BAD_REQUEST,
)
except Exception as e:
capture_exception(e)
return Response(
{"error": "Something went wrong please try again later"},
status=status.HTTP_400_BAD_REQUEST,
)
class TransferCycleIssueEndpoint(BaseAPIView): class TransferCycleIssueEndpoint(BaseAPIView):
@ -884,7 +785,6 @@ class TransferCycleIssueEndpoint(BaseAPIView):
] ]
def post(self, request, slug, project_id, cycle_id): def post(self, request, slug, project_id, cycle_id):
try:
new_cycle_id = request.data.get("new_cycle_id", False) new_cycle_id = request.data.get("new_cycle_id", False)
if not new_cycle_id: if not new_cycle_id:
@ -925,14 +825,3 @@ class TransferCycleIssueEndpoint(BaseAPIView):
) )
return Response({"message": "Success"}, status=status.HTTP_200_OK) return Response({"message": "Success"}, status=status.HTTP_200_OK)
except Cycle.DoesNotExist:
return Response(
{"error": "New Cycle Does not exist"},
status=status.HTTP_400_BAD_REQUEST,
)
except Exception as e:
capture_exception(e)
return Response(
{"error": "Something went wrong please try again later"},
status=status.HTTP_400_BAD_REQUEST,
)

View File

@ -1,6 +1,3 @@
# Django imports
from django.db import IntegrityError
# Third party imports # Third party imports
from rest_framework.response import Response from rest_framework.response import Response
from rest_framework import status from rest_framework import status
@ -23,7 +20,6 @@ class ProjectEstimatePointEndpoint(BaseAPIView):
] ]
def get(self, request, slug, project_id): def get(self, request, slug, project_id):
try:
project = Project.objects.get(workspace__slug=slug, pk=project_id) project = Project.objects.get(workspace__slug=slug, pk=project_id)
if project.estimate_id is not None: if project.estimate_id is not None:
estimate_points = EstimatePoint.objects.filter( estimate_points = EstimatePoint.objects.filter(
@ -34,12 +30,6 @@ class ProjectEstimatePointEndpoint(BaseAPIView):
serializer = EstimatePointSerializer(estimate_points, many=True) serializer = EstimatePointSerializer(estimate_points, many=True)
return Response(serializer.data, status=status.HTTP_200_OK) return Response(serializer.data, status=status.HTTP_200_OK)
return Response([], status=status.HTTP_200_OK) return Response([], status=status.HTTP_200_OK)
except Exception as e:
capture_exception(e)
return Response(
{"error": "Something went wrong please try again later"},
status=status.HTTP_400_BAD_REQUEST,
)
class BulkEstimatePointEndpoint(BaseViewSet): class BulkEstimatePointEndpoint(BaseViewSet):
@ -50,21 +40,13 @@ class BulkEstimatePointEndpoint(BaseViewSet):
serializer_class = EstimateSerializer serializer_class = EstimateSerializer
def list(self, request, slug, project_id): def list(self, request, slug, project_id):
try:
estimates = Estimate.objects.filter( estimates = Estimate.objects.filter(
workspace__slug=slug, project_id=project_id workspace__slug=slug, project_id=project_id
).prefetch_related("points").select_related("workspace", "project") ).prefetch_related("points").select_related("workspace", "project")
serializer = EstimateReadSerializer(estimates, many=True) serializer = EstimateReadSerializer(estimates, many=True)
return Response(serializer.data, status=status.HTTP_200_OK) return Response(serializer.data, status=status.HTTP_200_OK)
except Exception as e:
capture_exception(e)
return Response(
{"error": "Something went wrong please try again later"},
status=status.HTTP_400_BAD_REQUEST,
)
def create(self, request, slug, project_id): def create(self, request, slug, project_id):
try:
if not request.data.get("estimate", False): if not request.data.get("estimate", False):
return Response( return Response(
{"error": "Estimate is required"}, {"error": "Estimate is required"},
@ -84,13 +66,7 @@ class BulkEstimatePointEndpoint(BaseViewSet):
return Response( return Response(
estimate_serializer.errors, status=status.HTTP_400_BAD_REQUEST estimate_serializer.errors, status=status.HTTP_400_BAD_REQUEST
) )
try:
estimate = estimate_serializer.save(project_id=project_id) estimate = estimate_serializer.save(project_id=project_id)
except IntegrityError:
return Response(
{"errror": "Estimate with the name already exists"},
status=status.HTTP_400_BAD_REQUEST,
)
estimate_points = EstimatePoint.objects.bulk_create( estimate_points = EstimatePoint.objects.bulk_create(
[ [
EstimatePoint( EstimatePoint(
@ -120,20 +96,8 @@ class BulkEstimatePointEndpoint(BaseViewSet):
}, },
status=status.HTTP_200_OK, status=status.HTTP_200_OK,
) )
except Estimate.DoesNotExist:
return Response(
{"error": "Estimate does not exist"},
status=status.HTTP_400_BAD_REQUEST,
)
except Exception as e:
capture_exception(e)
return Response(
{"error": "Something went wrong please try again later"},
status=status.HTTP_400_BAD_REQUEST,
)
def retrieve(self, request, slug, project_id, estimate_id): def retrieve(self, request, slug, project_id, estimate_id):
try:
estimate = Estimate.objects.get( estimate = Estimate.objects.get(
pk=estimate_id, workspace__slug=slug, project_id=project_id pk=estimate_id, workspace__slug=slug, project_id=project_id
) )
@ -142,19 +106,8 @@ class BulkEstimatePointEndpoint(BaseViewSet):
serializer.data, serializer.data,
status=status.HTTP_200_OK, status=status.HTTP_200_OK,
) )
except Estimate.DoesNotExist:
return Response(
{"error": "Estimate does not exist"}, status=status.HTTP_400_BAD_REQUEST
)
except Exception as e:
capture_exception(e)
return Response(
{"error": "Something went wrong please try again later"},
status=status.HTTP_400_BAD_REQUEST,
)
def partial_update(self, request, slug, project_id, estimate_id): def partial_update(self, request, slug, project_id, estimate_id):
try:
if not request.data.get("estimate", False): if not request.data.get("estimate", False):
return Response( return Response(
{"error": "Estimate is required"}, {"error": "Estimate is required"},
@ -176,13 +129,8 @@ class BulkEstimatePointEndpoint(BaseViewSet):
return Response( return Response(
estimate_serializer.errors, status=status.HTTP_400_BAD_REQUEST estimate_serializer.errors, status=status.HTTP_400_BAD_REQUEST
) )
try:
estimate = estimate_serializer.save() estimate = estimate_serializer.save()
except IntegrityError:
return Response(
{"errror": "Estimate with the name already exists"},
status=status.HTTP_400_BAD_REQUEST,
)
estimate_points_data = request.data.get("estimate_points", []) estimate_points_data = request.data.get("estimate_points", [])
@ -209,15 +157,9 @@ class BulkEstimatePointEndpoint(BaseViewSet):
) )
updated_estimate_points.append(estimate_point) updated_estimate_points.append(estimate_point)
try:
EstimatePoint.objects.bulk_update( EstimatePoint.objects.bulk_update(
updated_estimate_points, ["value"], batch_size=10, updated_estimate_points, ["value"], batch_size=10,
) )
except IntegrityError as e:
return Response(
{"error": "Values need to be unique for each key"},
status=status.HTTP_400_BAD_REQUEST,
)
estimate_point_serializer = EstimatePointSerializer(estimate_points, many=True) estimate_point_serializer = EstimatePointSerializer(estimate_points, many=True)
return Response( return Response(
@ -227,27 +169,10 @@ class BulkEstimatePointEndpoint(BaseViewSet):
}, },
status=status.HTTP_200_OK, status=status.HTTP_200_OK,
) )
except Estimate.DoesNotExist:
return Response(
{"error": "Estimate does not exist"}, status=status.HTTP_400_BAD_REQUEST
)
except Exception as e:
capture_exception(e)
return Response(
{"error": "Something went wrong please try again later"},
status=status.HTTP_400_BAD_REQUEST,
)
def destroy(self, request, slug, project_id, estimate_id): def destroy(self, request, slug, project_id, estimate_id):
try:
estimate = Estimate.objects.get( estimate = Estimate.objects.get(
pk=estimate_id, workspace__slug=slug, project_id=project_id pk=estimate_id, workspace__slug=slug, project_id=project_id
) )
estimate.delete() estimate.delete()
return Response(status=status.HTTP_204_NO_CONTENT) return Response(status=status.HTTP_204_NO_CONTENT)
except Exception as e:
capture_exception(e)
return Response(
{"error": "Something went wrong please try again later"},
status=status.HTTP_400_BAD_REQUEST,
)

View File

@ -20,7 +20,6 @@ class ExportIssuesEndpoint(BaseAPIView):
serializer_class = ExporterHistorySerializer serializer_class = ExporterHistorySerializer
def post(self, request, slug): def post(self, request, slug):
try:
# Get the workspace # Get the workspace
workspace = Workspace.objects.get(slug=slug) workspace = Workspace.objects.get(slug=slug)
@ -61,20 +60,8 @@ class ExportIssuesEndpoint(BaseAPIView):
{"error": f"Provider '{provider}' not found."}, {"error": f"Provider '{provider}' not found."},
status=status.HTTP_400_BAD_REQUEST, status=status.HTTP_400_BAD_REQUEST,
) )
except Workspace.DoesNotExist:
return Response(
{"error": "Workspace does not exists"},
status=status.HTTP_400_BAD_REQUEST,
)
except Exception as e:
capture_exception(e)
return Response(
{"error": "Something went wrong please try again later"},
status=status.HTTP_400_BAD_REQUEST,
)
def get(self, request, slug): def get(self, request, slug):
try:
exporter_history = ExporterHistory.objects.filter( exporter_history = ExporterHistory.objects.filter(
workspace__slug=slug workspace__slug=slug
).select_related("workspace","initiated_by") ).select_related("workspace","initiated_by")
@ -92,9 +79,3 @@ class ExportIssuesEndpoint(BaseAPIView):
{"error": "per_page and cursor are required"}, {"error": "per_page and cursor are required"},
status=status.HTTP_400_BAD_REQUEST, status=status.HTTP_400_BAD_REQUEST,
) )
except Exception as e:
capture_exception(e)
return Response(
{"error": "Something went wrong please try again later"},
status=status.HTTP_400_BAD_REQUEST,
)

View File

@ -0,0 +1,92 @@
# Python imports
import requests
# Third party imports
import openai
from rest_framework.response import Response
from rest_framework import status
from rest_framework.permissions import AllowAny
from sentry_sdk import capture_exception
# Django imports
from django.conf import settings
# Module imports
from .base import BaseAPIView
from plane.api.permissions import ProjectEntityPermission
from plane.db.models import Workspace, Project
from plane.api.serializers import ProjectLiteSerializer, WorkspaceLiteSerializer
from plane.utils.integrations.github import get_release_notes
class GPTIntegrationEndpoint(BaseAPIView):
permission_classes = [
ProjectEntityPermission,
]
def post(self, request, slug, project_id):
if not settings.OPENAI_API_KEY or not settings.GPT_ENGINE:
return Response(
{"error": "OpenAI API key and engine is required"},
status=status.HTTP_400_BAD_REQUEST,
)
prompt = request.data.get("prompt", False)
task = request.data.get("task", False)
if not task:
return Response(
{"error": "Task is required"}, status=status.HTTP_400_BAD_REQUEST
)
final_text = task + "\n" + prompt
openai.api_key = settings.OPENAI_API_KEY
response = openai.ChatCompletion.create(
model=settings.GPT_ENGINE,
messages=[{"role": "user", "content": final_text}],
temperature=0.7,
max_tokens=1024,
)
workspace = Workspace.objects.get(slug=slug)
project = Project.objects.get(pk=project_id)
text = response.choices[0].message.content.strip()
text_html = text.replace("\n", "<br/>")
return Response(
{
"response": text,
"response_html": text_html,
"project_detail": ProjectLiteSerializer(project).data,
"workspace_detail": WorkspaceLiteSerializer(workspace).data,
},
status=status.HTTP_200_OK,
)
class ReleaseNotesEndpoint(BaseAPIView):
def get(self, request):
release_notes = get_release_notes()
return Response(release_notes, status=status.HTTP_200_OK)
class UnsplashEndpoint(BaseAPIView):
def get(self, request):
query = request.GET.get("query", False)
page = request.GET.get("page", 1)
per_page = request.GET.get("per_page", 20)
url = (
f"https://api.unsplash.com/search/photos/?client_id={settings.UNSPLASH_ACCESS_KEY}&query={query}&page=${page}&per_page={per_page}"
if query
else f"https://api.unsplash.com/photos/?client_id={settings.UNSPLASH_ACCESS_KEY}&page={page}&per_page={per_page}"
)
headers = {
"Content-Type": "application/json",
}
resp = requests.get(url=url, headers=headers)
return Response(resp.json(), status=status.HTTP_200_OK)

View File

@ -1,75 +0,0 @@
# Python imports
import requests
# Third party imports
from rest_framework.response import Response
from rest_framework import status
import openai
from sentry_sdk import capture_exception
# Django imports
from django.conf import settings
# Module imports
from .base import BaseAPIView
from plane.api.permissions import ProjectEntityPermission
from plane.db.models import Workspace, Project
from plane.api.serializers import ProjectLiteSerializer, WorkspaceLiteSerializer
class GPTIntegrationEndpoint(BaseAPIView):
permission_classes = [
ProjectEntityPermission,
]
def post(self, request, slug, project_id):
try:
if not settings.OPENAI_API_KEY or not settings.GPT_ENGINE:
return Response(
{"error": "OpenAI API key and engine is required"},
status=status.HTTP_400_BAD_REQUEST,
)
prompt = request.data.get("prompt", False)
task = request.data.get("task", False)
if not task:
return Response(
{"error": "Task is required"}, status=status.HTTP_400_BAD_REQUEST
)
final_text = task + "\n" + prompt
openai.api_key = settings.OPENAI_API_KEY
response = openai.ChatCompletion.create(
model=settings.GPT_ENGINE,
messages=[{"role": "user", "content": final_text}],
temperature=0.7,
max_tokens=1024,
)
workspace = Workspace.objects.get(slug=slug)
project = Project.objects.get(pk=project_id)
text = response.choices[0].message.content.strip()
text_html = text.replace("\n", "<br/>")
return Response(
{
"response": text,
"response_html": text_html,
"project_detail": ProjectLiteSerializer(project).data,
"workspace_detail": WorkspaceLiteSerializer(workspace).data,
},
status=status.HTTP_200_OK,
)
except (Workspace.DoesNotExist, Project.DoesNotExist) as e:
return Response(
{"error": "Workspace or Project Does not exist"},
status=status.HTTP_400_BAD_REQUEST,
)
except Exception as e:
capture_exception(e)
return Response(
{"error": "Something went wrong please try again later"},
status=status.HTTP_400_BAD_REQUEST,
)

View File

@ -44,7 +44,6 @@ from plane.utils.html_processor import strip_tags
class ServiceIssueImportSummaryEndpoint(BaseAPIView): class ServiceIssueImportSummaryEndpoint(BaseAPIView):
def get(self, request, slug, service): def get(self, request, slug, service):
try:
if service == "github": if service == "github":
owner = request.GET.get("owner", False) owner = request.GET.get("owner", False)
repo = request.GET.get("repo", False) repo = request.GET.get("repo", False)
@ -117,22 +116,10 @@ class ServiceIssueImportSummaryEndpoint(BaseAPIView):
{"error": "Service not supported yet"}, {"error": "Service not supported yet"},
status=status.HTTP_400_BAD_REQUEST, status=status.HTTP_400_BAD_REQUEST,
) )
except WorkspaceIntegration.DoesNotExist:
return Response(
{"error": "Requested integration was not installed in the workspace"},
status=status.HTTP_400_BAD_REQUEST,
)
except Exception as e:
capture_exception(e)
return Response(
{"error": "Something went wrong please try again later"},
status=status.HTTP_400_BAD_REQUEST,
)
class ImportServiceEndpoint(BaseAPIView): class ImportServiceEndpoint(BaseAPIView):
def post(self, request, slug, service): def post(self, request, slug, service):
try:
project_id = request.data.get("project_id", False) project_id = request.data.get("project_id", False)
if not project_id: if not project_id:
@ -220,24 +207,8 @@ class ImportServiceEndpoint(BaseAPIView):
{"error": "Servivce not supported yet"}, {"error": "Servivce not supported yet"},
status=status.HTTP_400_BAD_REQUEST, status=status.HTTP_400_BAD_REQUEST,
) )
except (
Workspace.DoesNotExist,
WorkspaceIntegration.DoesNotExist,
Project.DoesNotExist,
) as e:
return Response(
{"error": "Workspace Integration or Project does not exist"},
status=status.HTTP_404_NOT_FOUND,
)
except Exception as e:
capture_exception(e)
return Response(
{"error": "Something went wrong please try again later"},
status=status.HTTP_400_BAD_REQUEST,
)
def get(self, request, slug): def get(self, request, slug):
try:
imports = ( imports = (
Importer.objects.filter(workspace__slug=slug) Importer.objects.filter(workspace__slug=slug)
.order_by("-created_at") .order_by("-created_at")
@ -245,15 +216,8 @@ class ImportServiceEndpoint(BaseAPIView):
) )
serializer = ImporterSerializer(imports, many=True) serializer = ImporterSerializer(imports, many=True)
return Response(serializer.data) return Response(serializer.data)
except Exception as e:
capture_exception(e)
return Response(
{"error": "Something went wrong please try again later"},
status=status.HTTP_400_BAD_REQUEST,
)
def delete(self, request, slug, service, pk): def delete(self, request, slug, service, pk):
try:
importer = Importer.objects.get( importer = Importer.objects.get(
pk=pk, service=service, workspace__slug=slug pk=pk, service=service, workspace__slug=slug
) )
@ -272,15 +236,8 @@ class ImportServiceEndpoint(BaseAPIView):
Module.objects.filter(id__in=imported_modules).delete() Module.objects.filter(id__in=imported_modules).delete()
importer.delete() importer.delete()
return Response(status=status.HTTP_204_NO_CONTENT) return Response(status=status.HTTP_204_NO_CONTENT)
except Exception as e:
capture_exception(e)
return Response(
{"error": "Something went wrong please try again later"},
status=status.HTTP_400_BAD_REQUEST,
)
def patch(self, request, slug, service, pk): def patch(self, request, slug, service, pk):
try:
importer = Importer.objects.get( importer = Importer.objects.get(
pk=pk, service=service, workspace__slug=slug pk=pk, service=service, workspace__slug=slug
) )
@ -289,21 +246,10 @@ class ImportServiceEndpoint(BaseAPIView):
serializer.save() serializer.save()
return Response(serializer.data, status=status.HTTP_200_OK) return Response(serializer.data, status=status.HTTP_200_OK)
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
except Importer.DoesNotExist:
return Response(
{"error": "Importer Does not exists"}, status=status.HTTP_404_NOT_FOUND
)
except Exception as e:
capture_exception(e)
return Response(
{"error": "Something went wrong please try again later"},
status=status.HTTP_400_BAD_REQUEST,
)
class UpdateServiceImportStatusEndpoint(BaseAPIView): class UpdateServiceImportStatusEndpoint(BaseAPIView):
def post(self, request, slug, project_id, service, importer_id): def post(self, request, slug, project_id, service, importer_id):
try:
importer = Importer.objects.get( importer = Importer.objects.get(
pk=importer_id, pk=importer_id,
workspace__slug=slug, workspace__slug=slug,
@ -313,15 +259,10 @@ class UpdateServiceImportStatusEndpoint(BaseAPIView):
importer.status = request.data.get("status", "processing") importer.status = request.data.get("status", "processing")
importer.save() importer.save()
return Response(status.HTTP_200_OK) return Response(status.HTTP_200_OK)
except Importer.DoesNotExist:
return Response(
{"error": "Importer does not exist"}, status=status.HTTP_404_NOT_FOUND
)
class BulkImportIssuesEndpoint(BaseAPIView): class BulkImportIssuesEndpoint(BaseAPIView):
def post(self, request, slug, project_id, service): def post(self, request, slug, project_id, service):
try:
# Get the project # Get the project
project = Project.objects.get(pk=project_id, workspace__slug=slug) project = Project.objects.get(pk=project_id, workspace__slug=slug)
@ -504,21 +445,10 @@ class BulkImportIssuesEndpoint(BaseAPIView):
{"issues": IssueFlatSerializer(issues, many=True).data}, {"issues": IssueFlatSerializer(issues, many=True).data},
status=status.HTTP_201_CREATED, status=status.HTTP_201_CREATED,
) )
except Project.DoesNotExist:
return Response(
{"error": "Project Does not exist"}, status=status.HTTP_404_NOT_FOUND
)
except Exception as e:
capture_exception(e)
return Response(
{"error": "Something went wrong please try again later"},
status=status.HTTP_400_BAD_REQUEST,
)
class BulkImportModulesEndpoint(BaseAPIView): class BulkImportModulesEndpoint(BaseAPIView):
def post(self, request, slug, project_id, service): def post(self, request, slug, project_id, service):
try:
modules_data = request.data.get("modules_data", []) modules_data = request.data.get("modules_data", [])
project = Project.objects.get(pk=project_id, workspace__slug=slug) project = Project.objects.get(pk=project_id, workspace__slug=slug)
@ -590,13 +520,3 @@ class BulkImportModulesEndpoint(BaseAPIView):
{"message": "Modules created but issues could not be imported"}, {"message": "Modules created but issues could not be imported"},
status=status.HTTP_200_OK, status=status.HTTP_200_OK,
) )
except Project.DoesNotExist:
return Response(
{"error": "Project does not exist"}, status=status.HTTP_404_NOT_FOUND
)
except Exception as e:
capture_exception(e)
return Response(
{"error": "Something went wrong please try again later"},
status=status.HTTP_400_BAD_REQUEST,
)

View File

@ -64,7 +64,6 @@ class InboxViewSet(BaseViewSet):
serializer.save(project_id=self.kwargs.get("project_id")) serializer.save(project_id=self.kwargs.get("project_id"))
def destroy(self, request, slug, project_id, pk): def destroy(self, request, slug, project_id, pk):
try:
inbox = Inbox.objects.get( inbox = Inbox.objects.get(
workspace__slug=slug, project_id=project_id, pk=pk workspace__slug=slug, project_id=project_id, pk=pk
) )
@ -76,12 +75,6 @@ class InboxViewSet(BaseViewSet):
) )
inbox.delete() inbox.delete()
return Response(status=status.HTTP_204_NO_CONTENT) return Response(status=status.HTTP_204_NO_CONTENT)
except Exception as e:
capture_exception(e)
return Response(
{"error": "Something went wronf please try again later"},
status=status.HTTP_400_BAD_REQUEST,
)
class InboxIssueViewSet(BaseViewSet): class InboxIssueViewSet(BaseViewSet):
@ -110,7 +103,6 @@ class InboxIssueViewSet(BaseViewSet):
) )
def list(self, request, slug, project_id, inbox_id): def list(self, request, slug, project_id, inbox_id):
try:
filters = issue_filters(request.query_params, "GET") filters = issue_filters(request.query_params, "GET")
issues = ( issues = (
Issue.objects.filter( Issue.objects.filter(
@ -158,15 +150,8 @@ class InboxIssueViewSet(BaseViewSet):
status=status.HTTP_200_OK, status=status.HTTP_200_OK,
) )
except Exception as e:
capture_exception(e)
return Response(
{"error": "Something went wrong please try again later"},
status=status.HTTP_400_BAD_REQUEST,
)
def create(self, request, slug, project_id, inbox_id): def create(self, request, slug, project_id, inbox_id):
try:
if not request.data.get("issue", {}).get("name", False): if not request.data.get("issue", {}).get("name", False):
return Response( return Response(
{"error": "Name is required"}, status=status.HTTP_400_BAD_REQUEST {"error": "Name is required"}, status=status.HTTP_400_BAD_REQUEST
@ -225,15 +210,8 @@ class InboxIssueViewSet(BaseViewSet):
serializer = IssueStateInboxSerializer(issue) serializer = IssueStateInboxSerializer(issue)
return Response(serializer.data, status=status.HTTP_200_OK) return Response(serializer.data, status=status.HTTP_200_OK)
except Exception as e:
capture_exception(e)
return Response(
{"error": "Something went wrong please try again later"},
status=status.HTTP_400_BAD_REQUEST,
)
def partial_update(self, request, slug, project_id, inbox_id, pk): def partial_update(self, request, slug, project_id, inbox_id, pk):
try:
inbox_issue = InboxIssue.objects.get( inbox_issue = InboxIssue.objects.get(
pk=pk, workspace__slug=slug, project_id=project_id, inbox_id=inbox_id pk=pk, workspace__slug=slug, project_id=project_id, inbox_id=inbox_id
) )
@ -330,20 +308,8 @@ class InboxIssueViewSet(BaseViewSet):
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
else: else:
return Response(InboxIssueSerializer(inbox_issue).data, status=status.HTTP_200_OK) return Response(InboxIssueSerializer(inbox_issue).data, status=status.HTTP_200_OK)
except InboxIssue.DoesNotExist:
return Response(
{"error": "Inbox Issue does not exist"},
status=status.HTTP_400_BAD_REQUEST,
)
except Exception as e:
capture_exception(e)
return Response(
{"error": "Something went wrong please try again later"},
status=status.HTTP_400_BAD_REQUEST,
)
def retrieve(self, request, slug, project_id, inbox_id, pk): def retrieve(self, request, slug, project_id, inbox_id, pk):
try:
inbox_issue = InboxIssue.objects.get( inbox_issue = InboxIssue.objects.get(
pk=pk, workspace__slug=slug, project_id=project_id, inbox_id=inbox_id pk=pk, workspace__slug=slug, project_id=project_id, inbox_id=inbox_id
) )
@ -352,15 +318,8 @@ class InboxIssueViewSet(BaseViewSet):
) )
serializer = IssueStateInboxSerializer(issue) serializer = IssueStateInboxSerializer(issue)
return Response(serializer.data, status=status.HTTP_200_OK) return Response(serializer.data, status=status.HTTP_200_OK)
except Exception as e:
capture_exception(e)
return Response(
{"error": "Something went wrong please try again later"},
status=status.HTTP_400_BAD_REQUEST,
)
def destroy(self, request, slug, project_id, inbox_id, pk): def destroy(self, request, slug, project_id, inbox_id, pk):
try:
inbox_issue = InboxIssue.objects.get( inbox_issue = InboxIssue.objects.get(
pk=pk, workspace__slug=slug, project_id=project_id, inbox_id=inbox_id pk=pk, workspace__slug=slug, project_id=project_id, inbox_id=inbox_id
) )
@ -370,16 +329,13 @@ class InboxIssueViewSet(BaseViewSet):
if project_member.role <= 10 and str(inbox_issue.created_by_id) != str(request.user.id): if project_member.role <= 10 and str(inbox_issue.created_by_id) != str(request.user.id):
return Response({"error": "You cannot delete inbox issue"}, status=status.HTTP_400_BAD_REQUEST) return Response({"error": "You cannot delete inbox issue"}, status=status.HTTP_400_BAD_REQUEST)
# Check the issue status
if inbox_issue.status in [-2, -1, 0, 2]:
# Delete the issue also
Issue.objects.filter(workspace__slug=slug, project_id=project_id, pk=inbox_issue.issue_id).delete()
inbox_issue.delete() inbox_issue.delete()
return Response(status=status.HTTP_204_NO_CONTENT) return Response(status=status.HTTP_204_NO_CONTENT)
except InboxIssue.DoesNotExist:
return Response({"error": "Inbox Issue does not exists"}, status=status.HTTP_400_BAD_REQUEST)
except Exception as e:
capture_exception(e)
return Response(
{"error": "Something went wrong please try again later"},
status=status.HTTP_400_BAD_REQUEST,
)
class InboxIssuePublicViewSet(BaseViewSet): class InboxIssuePublicViewSet(BaseViewSet):
@ -408,7 +364,6 @@ class InboxIssuePublicViewSet(BaseViewSet):
return InboxIssue.objects.none() return InboxIssue.objects.none()
def list(self, request, slug, project_id, inbox_id): def list(self, request, slug, project_id, inbox_id):
try:
project_deploy_board = ProjectDeployBoard.objects.get(workspace__slug=slug, project_id=project_id) project_deploy_board = ProjectDeployBoard.objects.get(workspace__slug=slug, project_id=project_id)
if project_deploy_board.inbox is None: if project_deploy_board.inbox is None:
return Response({"error": "Inbox is not enabled for this Project Board"}, status=status.HTTP_400_BAD_REQUEST) return Response({"error": "Inbox is not enabled for this Project Board"}, status=status.HTTP_400_BAD_REQUEST)
@ -459,17 +414,8 @@ class InboxIssuePublicViewSet(BaseViewSet):
issues_data, issues_data,
status=status.HTTP_200_OK, status=status.HTTP_200_OK,
) )
except ProjectDeployBoard.DoesNotExist:
return Response({"error": "Project Deploy Board does not exist"}, status=status.HTTP_400_BAD_REQUEST)
except Exception as e:
capture_exception(e)
return Response(
{"error": "Something went wrong please try again later"},
status=status.HTTP_400_BAD_REQUEST,
)
def create(self, request, slug, project_id, inbox_id): def create(self, request, slug, project_id, inbox_id):
try:
project_deploy_board = ProjectDeployBoard.objects.get(workspace__slug=slug, project_id=project_id) project_deploy_board = ProjectDeployBoard.objects.get(workspace__slug=slug, project_id=project_id)
if project_deploy_board.inbox is None: if project_deploy_board.inbox is None:
return Response({"error": "Inbox is not enabled for this Project Board"}, status=status.HTTP_400_BAD_REQUEST) return Response({"error": "Inbox is not enabled for this Project Board"}, status=status.HTTP_400_BAD_REQUEST)
@ -532,15 +478,8 @@ class InboxIssuePublicViewSet(BaseViewSet):
serializer = IssueStateInboxSerializer(issue) serializer = IssueStateInboxSerializer(issue)
return Response(serializer.data, status=status.HTTP_200_OK) return Response(serializer.data, status=status.HTTP_200_OK)
except Exception as e:
capture_exception(e)
return Response(
{"error": "Something went wrong please try again later"},
status=status.HTTP_400_BAD_REQUEST,
)
def partial_update(self, request, slug, project_id, inbox_id, pk): def partial_update(self, request, slug, project_id, inbox_id, pk):
try:
project_deploy_board = ProjectDeployBoard.objects.get(workspace__slug=slug, project_id=project_id) project_deploy_board = ProjectDeployBoard.objects.get(workspace__slug=slug, project_id=project_id)
if project_deploy_board.inbox is None: if project_deploy_board.inbox is None:
return Response({"error": "Inbox is not enabled for this Project Board"}, status=status.HTTP_400_BAD_REQUEST) return Response({"error": "Inbox is not enabled for this Project Board"}, status=status.HTTP_400_BAD_REQUEST)
@ -590,20 +529,8 @@ class InboxIssuePublicViewSet(BaseViewSet):
issue_serializer.save() issue_serializer.save()
return Response(issue_serializer.data, status=status.HTTP_200_OK) return Response(issue_serializer.data, status=status.HTTP_200_OK)
return Response(issue_serializer.errors, status=status.HTTP_400_BAD_REQUEST) return Response(issue_serializer.errors, status=status.HTTP_400_BAD_REQUEST)
except InboxIssue.DoesNotExist:
return Response(
{"error": "Inbox Issue does not exist"},
status=status.HTTP_400_BAD_REQUEST,
)
except Exception as e:
capture_exception(e)
return Response(
{"error": "Something went wrong please try again later"},
status=status.HTTP_400_BAD_REQUEST,
)
def retrieve(self, request, slug, project_id, inbox_id, pk): def retrieve(self, request, slug, project_id, inbox_id, pk):
try:
project_deploy_board = ProjectDeployBoard.objects.get(workspace__slug=slug, project_id=project_id) project_deploy_board = ProjectDeployBoard.objects.get(workspace__slug=slug, project_id=project_id)
if project_deploy_board.inbox is None: if project_deploy_board.inbox is None:
return Response({"error": "Inbox is not enabled for this Project Board"}, status=status.HTTP_400_BAD_REQUEST) return Response({"error": "Inbox is not enabled for this Project Board"}, status=status.HTTP_400_BAD_REQUEST)
@ -616,15 +543,8 @@ class InboxIssuePublicViewSet(BaseViewSet):
) )
serializer = IssueStateInboxSerializer(issue) serializer = IssueStateInboxSerializer(issue)
return Response(serializer.data, status=status.HTTP_200_OK) return Response(serializer.data, status=status.HTTP_200_OK)
except Exception as e:
capture_exception(e)
return Response(
{"error": "Something went wrong please try again later"},
status=status.HTTP_400_BAD_REQUEST,
)
def destroy(self, request, slug, project_id, inbox_id, pk): def destroy(self, request, slug, project_id, inbox_id, pk):
try:
project_deploy_board = ProjectDeployBoard.objects.get(workspace__slug=slug, project_id=project_id) project_deploy_board = ProjectDeployBoard.objects.get(workspace__slug=slug, project_id=project_id)
if project_deploy_board.inbox is None: if project_deploy_board.inbox is None:
return Response({"error": "Inbox is not enabled for this Project Board"}, status=status.HTTP_400_BAD_REQUEST) return Response({"error": "Inbox is not enabled for this Project Board"}, status=status.HTTP_400_BAD_REQUEST)
@ -638,12 +558,3 @@ class InboxIssuePublicViewSet(BaseViewSet):
inbox_issue.delete() inbox_issue.delete()
return Response(status=status.HTTP_204_NO_CONTENT) return Response(status=status.HTTP_204_NO_CONTENT)
except InboxIssue.DoesNotExist:
return Response({"error": "Inbox Issue does not exists"}, status=status.HTTP_400_BAD_REQUEST)
except Exception as e:
capture_exception(e)
return Response(
{"error": "Something went wrong please try again later"},
status=status.HTTP_400_BAD_REQUEST,
)

View File

@ -2,7 +2,6 @@
import uuid import uuid
# Django imports # Django imports
from django.db import IntegrityError
from django.contrib.auth.hashers import make_password from django.contrib.auth.hashers import make_password
# Third party imports # Third party imports
@ -33,21 +32,13 @@ class IntegrationViewSet(BaseViewSet):
model = Integration model = Integration
def create(self, request): def create(self, request):
try:
serializer = IntegrationSerializer(data=request.data) serializer = IntegrationSerializer(data=request.data)
if serializer.is_valid(): if serializer.is_valid():
serializer.save() serializer.save()
return Response(serializer.data, status=status.HTTP_201_CREATED) return Response(serializer.data, status=status.HTTP_201_CREATED)
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
except Exception as e:
capture_exception(e)
return Response(
{"error": "Something went wrong please try again later"},
status=status.HTTP_400_BAD_REQUEST,
)
def partial_update(self, request, pk): def partial_update(self, request, pk):
try:
integration = Integration.objects.get(pk=pk) integration = Integration.objects.get(pk=pk)
if integration.verified: if integration.verified:
return Response( return Response(
@ -64,20 +55,7 @@ class IntegrationViewSet(BaseViewSet):
return Response(serializer.data, status=status.HTTP_200_OK) return Response(serializer.data, status=status.HTTP_200_OK)
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
except Integration.DoesNotExist:
return Response(
{"error": "Integration Does not exist"},
status=status.HTTP_404_NOT_FOUND,
)
except Exception as e:
capture_exception(e)
return Response(
{"error": "Something went wrong please try again later"},
status=status.HTTP_400_BAD_REQUEST,
)
def destroy(self, request, pk): def destroy(self, request, pk):
try:
integration = Integration.objects.get(pk=pk) integration = Integration.objects.get(pk=pk)
if integration.verified: if integration.verified:
return Response( return Response(
@ -87,11 +65,6 @@ class IntegrationViewSet(BaseViewSet):
integration.delete() integration.delete()
return Response(status=status.HTTP_204_NO_CONTENT) return Response(status=status.HTTP_204_NO_CONTENT)
except Integration.DoesNotExist:
return Response(
{"error": "Integration Does not exist"},
status=status.HTTP_404_NOT_FOUND,
)
class WorkspaceIntegrationViewSet(BaseViewSet): class WorkspaceIntegrationViewSet(BaseViewSet):
@ -111,7 +84,6 @@ class WorkspaceIntegrationViewSet(BaseViewSet):
) )
def create(self, request, slug, provider): def create(self, request, slug, provider):
try:
workspace = Workspace.objects.get(slug=slug) workspace = Workspace.objects.get(slug=slug)
integration = Integration.objects.get(provider=provider) integration = Integration.objects.get(provider=provider)
config = {} config = {}
@ -175,33 +147,8 @@ class WorkspaceIntegrationViewSet(BaseViewSet):
WorkspaceIntegrationSerializer(workspace_integration).data, WorkspaceIntegrationSerializer(workspace_integration).data,
status=status.HTTP_201_CREATED, status=status.HTTP_201_CREATED,
) )
except IntegrityError as e:
if "already exists" in str(e):
return Response(
{"error": "Integration is already active in the workspace"},
status=status.HTTP_410_GONE,
)
else:
capture_exception(e)
return Response(
{"error": "Something went wrong please try again later"},
status=status.HTTP_400_BAD_REQUEST,
)
except (Workspace.DoesNotExist, Integration.DoesNotExist) as e:
capture_exception(e)
return Response(
{"error": "Workspace or Integration not found"},
status=status.HTTP_400_BAD_REQUEST,
)
except Exception as e:
capture_exception(e)
return Response(
{"error": "Something went wrong please try again later"},
status=status.HTTP_400_BAD_REQUEST,
)
def destroy(self, request, slug, pk): def destroy(self, request, slug, pk):
try:
workspace_integration = WorkspaceIntegration.objects.get( workspace_integration = WorkspaceIntegration.objects.get(
pk=pk, workspace__slug=slug pk=pk, workspace__slug=slug
) )
@ -215,15 +162,3 @@ class WorkspaceIntegrationViewSet(BaseViewSet):
workspace_integration.delete() workspace_integration.delete()
return Response(status=status.HTTP_204_NO_CONTENT) return Response(status=status.HTTP_204_NO_CONTENT)
except WorkspaceIntegration.DoesNotExist:
return Response(
{"error": "Workspace Integration Does not exists"},
status=status.HTTP_400_BAD_REQUEST,
)
except Exception as e:
capture_exception(e)
return Response(
{"error": "Something went wrong please try again later"},
status=status.HTTP_400_BAD_REQUEST,
)

View File

@ -30,7 +30,6 @@ class GithubRepositoriesEndpoint(BaseAPIView):
] ]
def get(self, request, slug, workspace_integration_id): def get(self, request, slug, workspace_integration_id):
try:
page = request.GET.get("page", 1) page = request.GET.get("page", 1)
workspace_integration = WorkspaceIntegration.objects.get( workspace_integration = WorkspaceIntegration.objects.get(
workspace__slug=slug, pk=workspace_integration_id workspace__slug=slug, pk=workspace_integration_id
@ -49,11 +48,6 @@ class GithubRepositoriesEndpoint(BaseAPIView):
) )
repositories = get_github_repos(access_tokens_url, repositories_url) repositories = get_github_repos(access_tokens_url, repositories_url)
return Response(repositories, status=status.HTTP_200_OK) return Response(repositories, status=status.HTTP_200_OK)
except WorkspaceIntegration.DoesNotExist:
return Response(
{"error": "Workspace Integration Does not exists"},
status=status.HTTP_400_BAD_REQUEST,
)
class GithubRepositorySyncViewSet(BaseViewSet): class GithubRepositorySyncViewSet(BaseViewSet):
@ -76,7 +70,6 @@ class GithubRepositorySyncViewSet(BaseViewSet):
) )
def create(self, request, slug, project_id, workspace_integration_id): def create(self, request, slug, project_id, workspace_integration_id):
try:
name = request.data.get("name", False) name = request.data.get("name", False)
url = request.data.get("url", False) url = request.data.get("url", False)
config = request.data.get("config", {}) config = request.data.get("config", {})
@ -147,18 +140,6 @@ class GithubRepositorySyncViewSet(BaseViewSet):
status=status.HTTP_201_CREATED, status=status.HTTP_201_CREATED,
) )
except WorkspaceIntegration.DoesNotExist:
return Response(
{"error": "Workspace Integration does not exist"},
status=status.HTTP_404_NOT_FOUND,
)
except Exception as e:
capture_exception(e)
return Response(
{"error": "Something went wrong please try again later"},
status=status.HTTP_400_BAD_REQUEST,
)
class GithubIssueSyncViewSet(BaseViewSet): class GithubIssueSyncViewSet(BaseViewSet):
permission_classes = [ permission_classes = [
@ -177,7 +158,6 @@ class GithubIssueSyncViewSet(BaseViewSet):
class BulkCreateGithubIssueSyncEndpoint(BaseAPIView): class BulkCreateGithubIssueSyncEndpoint(BaseAPIView):
def post(self, request, slug, project_id, repo_sync_id): def post(self, request, slug, project_id, repo_sync_id):
try:
project = Project.objects.get(pk=project_id, workspace__slug=slug) project = Project.objects.get(pk=project_id, workspace__slug=slug)
github_issue_syncs = request.data.get("github_issue_syncs", []) github_issue_syncs = request.data.get("github_issue_syncs", [])
@ -202,17 +182,6 @@ class BulkCreateGithubIssueSyncEndpoint(BaseAPIView):
serializer = GithubIssueSyncSerializer(github_issue_syncs, many=True) serializer = GithubIssueSyncSerializer(github_issue_syncs, many=True)
return Response(serializer.data, status=status.HTTP_201_CREATED) return Response(serializer.data, status=status.HTTP_201_CREATED)
except Project.DoesNotExist:
return Response(
{"error": "Project does not exist"},
status=status.HTTP_404_NOT_FOUND,
)
except Exception as e:
capture_exception(e)
return Response(
{"error": "Something went wrong please try again later"},
status=status.HTTP_400_BAD_REQUEST,
)
class GithubCommentSyncViewSet(BaseViewSet): class GithubCommentSyncViewSet(BaseViewSet):

View File

@ -32,7 +32,6 @@ class SlackProjectSyncViewSet(BaseViewSet):
) )
def create(self, request, slug, project_id, workspace_integration_id): def create(self, request, slug, project_id, workspace_integration_id):
try:
serializer = SlackProjectSyncSerializer(data=request.data) serializer = SlackProjectSyncSerializer(data=request.data)
workspace_integration = WorkspaceIntegration.objects.get( workspace_integration = WorkspaceIntegration.objects.get(
@ -55,19 +54,3 @@ class SlackProjectSyncViewSet(BaseViewSet):
return Response(serializer.data, status=status.HTTP_200_OK) return Response(serializer.data, status=status.HTTP_200_OK)
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
except IntegrityError:
return Response(
{"error": "Slack is already enabled for the project"},
status=status.HTTP_400_BAD_REQUEST,
)
except WorkspaceIntegration.DoesNotExist:
return Response(
{"error": "Workspace Integration does not exist"},
status=status.HTTP_400_BAD_REQUEST,
)
except Exception as e:
print(e)
return Response(
{"error": "Something went wrong please try again later"},
status=status.HTTP_400_BAD_REQUEST,
)

File diff suppressed because it is too large Load Diff

View File

@ -141,31 +141,7 @@ class ModuleViewSet(BaseViewSet):
.order_by(order_by, "name") .order_by(order_by, "name")
) )
def perform_destroy(self, instance):
module_issues = list(
ModuleIssue.objects.filter(module_id=self.kwargs.get("pk")).values_list(
"issue", flat=True
)
)
issue_activity.delay(
type="module.activity.deleted",
requested_data=json.dumps(
{
"module_id": str(self.kwargs.get("pk")),
"issues": [str(issue_id) for issue_id in module_issues],
}
),
actor_id=str(self.request.user.id),
issue_id=str(self.kwargs.get("pk", None)),
project_id=str(self.kwargs.get("project_id", None)),
current_instance=None,
epoch=int(timezone.now().timestamp())
)
return super().perform_destroy(instance)
def create(self, request, slug, project_id): def create(self, request, slug, project_id):
try:
project = Project.objects.get(workspace__slug=slug, pk=project_id) project = Project.objects.get(workspace__slug=slug, pk=project_id)
serializer = ModuleWriteSerializer( serializer = ModuleWriteSerializer(
data=request.data, context={"project": project} data=request.data, context={"project": project}
@ -176,25 +152,7 @@ class ModuleViewSet(BaseViewSet):
return Response(serializer.data, status=status.HTTP_201_CREATED) return Response(serializer.data, status=status.HTTP_201_CREATED)
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
except Project.DoesNotExist:
return Response(
{"error": "Project was not found"}, status=status.HTTP_404_NOT_FOUND
)
except IntegrityError as e:
if "already exists" in str(e):
return Response(
{"name": "The module name is already taken"},
status=status.HTTP_410_GONE,
)
except Exception as e:
capture_exception(e)
return Response(
{"error": "Something went wrong please try again later"},
status=status.HTTP_400_BAD_REQUEST,
)
def retrieve(self, request, slug, project_id, pk): def retrieve(self, request, slug, project_id, pk):
try:
queryset = self.get_queryset().get(pk=pk) queryset = self.get_queryset().get(pk=pk)
assignee_distribution = ( assignee_distribution = (
@ -208,9 +166,7 @@ class ModuleViewSet(BaseViewSet):
.annotate(assignee_id=F("assignees__id")) .annotate(assignee_id=F("assignees__id"))
.annotate(display_name=F("assignees__display_name")) .annotate(display_name=F("assignees__display_name"))
.annotate(avatar=F("assignees__avatar")) .annotate(avatar=F("assignees__avatar"))
.values( .values("first_name", "last_name", "assignee_id", "avatar", "display_name")
"first_name", "last_name", "assignee_id", "avatar", "display_name"
)
.annotate( .annotate(
total_issues=Count( total_issues=Count(
"assignee_id", "assignee_id",
@ -302,12 +258,27 @@ class ModuleViewSet(BaseViewSet):
status=status.HTTP_200_OK, status=status.HTTP_200_OK,
) )
except Exception as e: def destroy(self, request, slug, project_id, pk):
capture_exception(e) module = Module.objects.get(workspace__slug=slug, project_id=project_id, pk=pk)
return Response( module_issues = list(
{"error": "Something went wrong please try again later"}, ModuleIssue.objects.filter(module_id=pk).values_list("issue", flat=True)
status=status.HTTP_400_BAD_REQUEST,
) )
module.delete()
issue_activity.delay(
type="module.activity.deleted",
requested_data=json.dumps(
{
"module_id": str(pk),
"issues": [str(issue_id) for issue_id in module_issues],
}
),
actor_id=str(request.user.id),
issue_id=str(pk),
project_id=str(project_id),
current_instance=None,
epoch=int(timezone.now().timestamp()),
)
return Response(status=status.HTTP_204_NO_CONTENT)
class ModuleIssueViewSet(BaseViewSet): class ModuleIssueViewSet(BaseViewSet):
@ -329,23 +300,6 @@ class ModuleIssueViewSet(BaseViewSet):
module_id=self.kwargs.get("module_id"), module_id=self.kwargs.get("module_id"),
) )
def perform_destroy(self, instance):
issue_activity.delay(
type="module.activity.deleted",
requested_data=json.dumps(
{
"module_id": str(self.kwargs.get("module_id")),
"issues": [str(instance.issue_id)],
}
),
actor_id=str(self.request.user.id),
issue_id=str(self.kwargs.get("pk", None)),
project_id=str(self.kwargs.get("project_id", None)),
current_instance=None,
epoch=int(timezone.now().timestamp())
)
return super().perform_destroy(instance)
def get_queryset(self): def get_queryset(self):
return self.filter_queryset( return self.filter_queryset(
super() super()
@ -371,7 +325,6 @@ class ModuleIssueViewSet(BaseViewSet):
@method_decorator(gzip_page) @method_decorator(gzip_page)
def list(self, request, slug, project_id, module_id): def list(self, request, slug, project_id, module_id):
try:
order_by = request.GET.get("order_by", "created_at") order_by = request.GET.get("order_by", "created_at")
group_by = request.GET.get("group_by", False) group_by = request.GET.get("group_by", False)
sub_group_by = request.GET.get("sub_group_by", False) sub_group_by = request.GET.get("sub_group_by", False)
@ -402,9 +355,7 @@ class ModuleIssueViewSet(BaseViewSet):
.values("count") .values("count")
) )
.annotate( .annotate(
attachment_count=IssueAttachment.objects.filter( attachment_count=IssueAttachment.objects.filter(issue=OuterRef("id"))
issue=OuterRef("id")
)
.order_by() .order_by()
.annotate(count=Func(F("id"), function="Count")) .annotate(count=Func(F("id"), function="Count"))
.values("count") .values("count")
@ -429,15 +380,8 @@ class ModuleIssueViewSet(BaseViewSet):
issues_data, issues_data,
status=status.HTTP_200_OK, status=status.HTTP_200_OK,
) )
except Exception as e:
capture_exception(e)
return Response(
{"error": "Something went wrong please try again later"},
status=status.HTTP_400_BAD_REQUEST,
)
def create(self, request, slug, project_id, module_id): def create(self, request, slug, project_id, module_id):
try:
issues = request.data.get("issues", []) issues = request.data.get("issues", [])
if not len(issues): if not len(issues):
return Response( return Response(
@ -510,23 +454,34 @@ class ModuleIssueViewSet(BaseViewSet):
), ),
} }
), ),
epoch=int(timezone.now().timestamp()) epoch=int(timezone.now().timestamp()),
) )
return Response( return Response(
ModuleIssueSerializer(self.get_queryset(), many=True).data, ModuleIssueSerializer(self.get_queryset(), many=True).data,
status=status.HTTP_200_OK, status=status.HTTP_200_OK,
) )
except Module.DoesNotExist:
return Response( def destroy(self, request, slug, project_id, module_id, pk):
{"error": "Module Does not exists"}, status=status.HTTP_400_BAD_REQUEST module_issue = ModuleIssue.objects.get(
workspace__slug=slug, project_id=project_id, module_id=module_id, pk=pk
) )
except Exception as e: module_issue.delete()
capture_exception(e) issue_activity.delay(
return Response( type="module.activity.deleted",
{"error": "Something went wrong please try again later"}, requested_data=json.dumps(
status=status.HTTP_400_BAD_REQUEST, {
"module_id": str(module_id),
"issues": [str(module_issue.issue_id)],
}
),
actor_id=str(request.user.id),
issue_id=str(pk),
project_id=str(project_id),
current_instance=None,
epoch=int(timezone.now().timestamp()),
) )
return Response(status=status.HTTP_204_NO_CONTENT)
class ModuleLinkViewSet(BaseViewSet): class ModuleLinkViewSet(BaseViewSet):
@ -570,33 +525,13 @@ class ModuleFavoriteViewSet(BaseViewSet):
) )
def create(self, request, slug, project_id): def create(self, request, slug, project_id):
try:
serializer = ModuleFavoriteSerializer(data=request.data) serializer = ModuleFavoriteSerializer(data=request.data)
if serializer.is_valid(): if serializer.is_valid():
serializer.save(user=request.user, project_id=project_id) serializer.save(user=request.user, project_id=project_id)
return Response(serializer.data, status=status.HTTP_201_CREATED) return Response(serializer.data, status=status.HTTP_201_CREATED)
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
except IntegrityError as e:
if "already exists" in str(e):
return Response(
{"error": "The module is already added to favorites"},
status=status.HTTP_410_GONE,
)
else:
capture_exception(e)
return Response(
{"error": "Something went wrong please try again later"},
status=status.HTTP_400_BAD_REQUEST,
)
except Exception as e:
capture_exception(e)
return Response(
{"error": "Something went wrong please try again later"},
status=status.HTTP_400_BAD_REQUEST,
)
def destroy(self, request, slug, project_id, module_id): def destroy(self, request, slug, project_id, module_id):
try:
module_favorite = ModuleFavorite.objects.get( module_favorite = ModuleFavorite.objects.get(
project=project_id, project=project_id,
user=request.user, user=request.user,
@ -605,14 +540,3 @@ class ModuleFavoriteViewSet(BaseViewSet):
) )
module_favorite.delete() module_favorite.delete()
return Response(status=status.HTTP_204_NO_CONTENT) return Response(status=status.HTTP_204_NO_CONTENT)
except ModuleFavorite.DoesNotExist:
return Response(
{"error": "Module is not in favorites"},
status=status.HTTP_400_BAD_REQUEST,
)
except Exception as e:
capture_exception(e)
return Response(
{"error": "Something went wrong please try again later"},
status=status.HTTP_400_BAD_REQUEST,
)

View File

@ -36,12 +36,10 @@ class NotificationViewSet(BaseViewSet, BasePaginator):
) )
def list(self, request, slug): def list(self, request, slug):
try: # Get query parameters
snoozed = request.GET.get("snoozed", "false") snoozed = request.GET.get("snoozed", "false")
archived = request.GET.get("archived", "false") archived = request.GET.get("archived", "false")
read = request.GET.get("read", "true") read = request.GET.get("read", "true")
# Filter type
type = request.GET.get("type", "all") type = request.GET.get("type", "all")
notifications = ( notifications = (
@ -52,27 +50,24 @@ class NotificationViewSet(BaseViewSet, BasePaginator):
.order_by("snoozed_till", "-created_at") .order_by("snoozed_till", "-created_at")
) )
# Filter for snoozed notifications # Filters based on query parameters
if snoozed == "false": snoozed_filters = {
notifications = notifications.filter( "true": Q(snoozed_till__lt=timezone.now()) | Q(snoozed_till__isnull=False),
Q(snoozed_till__gte=timezone.now()) | Q(snoozed_till__isnull=True), "false": Q(snoozed_till__gte=timezone.now()) | Q(snoozed_till__isnull=True),
) }
if snoozed == "true": notifications = notifications.filter(snoozed_filters[snoozed])
notifications = notifications.filter(
Q(snoozed_till__lt=timezone.now()) | Q(snoozed_till__isnull=False) archived_filters = {
) "true": Q(archived_at__isnull=False),
"false": Q(archived_at__isnull=True),
}
notifications = notifications.filter(archived_filters[archived])
if read == "false": if read == "false":
notifications = notifications.filter(read_at__isnull=True) notifications = notifications.filter(read_at__isnull=True)
# Filter for archived or unarchive
if archived == "false":
notifications = notifications.filter(archived_at__isnull=True)
if archived == "true":
notifications = notifications.filter(archived_at__isnull=False)
# Subscribed issues # Subscribed issues
if type == "watching": if type == "watching":
issue_ids = IssueSubscriber.objects.filter( issue_ids = IssueSubscriber.objects.filter(
@ -97,9 +92,7 @@ class NotificationViewSet(BaseViewSet, BasePaginator):
issue_ids = Issue.objects.filter( issue_ids = Issue.objects.filter(
workspace__slug=slug, created_by=request.user workspace__slug=slug, created_by=request.user
).values_list("pk", flat=True) ).values_list("pk", flat=True)
notifications = notifications.filter( notifications = notifications.filter(entity_identifier__in=issue_ids)
entity_identifier__in=issue_ids
)
# Pagination # Pagination
if request.GET.get("per_page", False) and request.GET.get("cursor", False): if request.GET.get("per_page", False) and request.GET.get("cursor", False):
@ -113,15 +106,8 @@ class NotificationViewSet(BaseViewSet, BasePaginator):
serializer = NotificationSerializer(notifications, many=True) serializer = NotificationSerializer(notifications, many=True)
return Response(serializer.data, status=status.HTTP_200_OK) return Response(serializer.data, status=status.HTTP_200_OK)
except Exception as e:
capture_exception(e)
return Response(
{"error": "Something went wrong please try again later"},
status=status.HTTP_400_BAD_REQUEST,
)
def partial_update(self, request, slug, pk): def partial_update(self, request, slug, pk):
try:
notification = Notification.objects.get( notification = Notification.objects.get(
workspace__slug=slug, pk=pk, receiver=request.user workspace__slug=slug, pk=pk, receiver=request.user
) )
@ -137,20 +123,8 @@ class NotificationViewSet(BaseViewSet, BasePaginator):
serializer.save() serializer.save()
return Response(serializer.data, status=status.HTTP_200_OK) return Response(serializer.data, status=status.HTTP_200_OK)
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
except Notification.DoesNotExist:
return Response(
{"error": "Notification does not exists"},
status=status.HTTP_400_BAD_REQUEST,
)
except Exception as e:
capture_exception(e)
return Response(
{"error": "Something went wrong please try again later"},
status=status.HTTP_400_BAD_REQUEST,
)
def mark_read(self, request, slug, pk): def mark_read(self, request, slug, pk):
try:
notification = Notification.objects.get( notification = Notification.objects.get(
receiver=request.user, workspace__slug=slug, pk=pk receiver=request.user, workspace__slug=slug, pk=pk
) )
@ -158,20 +132,8 @@ class NotificationViewSet(BaseViewSet, BasePaginator):
notification.save() notification.save()
serializer = NotificationSerializer(notification) serializer = NotificationSerializer(notification)
return Response(serializer.data, status=status.HTTP_200_OK) return Response(serializer.data, status=status.HTTP_200_OK)
except Notification.DoesNotExist:
return Response(
{"error": "Notification does not exists"},
status=status.HTTP_400_BAD_REQUEST,
)
except Exception as e:
capture_exception(e)
return Response(
{"error": "Something went wrong please try again later"},
status=status.HTTP_400_BAD_REQUEST,
)
def mark_unread(self, request, slug, pk): def mark_unread(self, request, slug, pk):
try:
notification = Notification.objects.get( notification = Notification.objects.get(
receiver=request.user, workspace__slug=slug, pk=pk receiver=request.user, workspace__slug=slug, pk=pk
) )
@ -179,20 +141,8 @@ class NotificationViewSet(BaseViewSet, BasePaginator):
notification.save() notification.save()
serializer = NotificationSerializer(notification) serializer = NotificationSerializer(notification)
return Response(serializer.data, status=status.HTTP_200_OK) return Response(serializer.data, status=status.HTTP_200_OK)
except Notification.DoesNotExist:
return Response(
{"error": "Notification does not exists"},
status=status.HTTP_400_BAD_REQUEST,
)
except Exception as e:
capture_exception(e)
return Response(
{"error": "Something went wrong please try again later"},
status=status.HTTP_400_BAD_REQUEST,
)
def archive(self, request, slug, pk): def archive(self, request, slug, pk):
try:
notification = Notification.objects.get( notification = Notification.objects.get(
receiver=request.user, workspace__slug=slug, pk=pk receiver=request.user, workspace__slug=slug, pk=pk
) )
@ -200,20 +150,8 @@ class NotificationViewSet(BaseViewSet, BasePaginator):
notification.save() notification.save()
serializer = NotificationSerializer(notification) serializer = NotificationSerializer(notification)
return Response(serializer.data, status=status.HTTP_200_OK) return Response(serializer.data, status=status.HTTP_200_OK)
except Notification.DoesNotExist:
return Response(
{"error": "Notification does not exists"},
status=status.HTTP_400_BAD_REQUEST,
)
except Exception as e:
capture_exception(e)
return Response(
{"error": "Something went wrong please try again later"},
status=status.HTTP_400_BAD_REQUEST,
)
def unarchive(self, request, slug, pk): def unarchive(self, request, slug, pk):
try:
notification = Notification.objects.get( notification = Notification.objects.get(
receiver=request.user, workspace__slug=slug, pk=pk receiver=request.user, workspace__slug=slug, pk=pk
) )
@ -221,22 +159,10 @@ class NotificationViewSet(BaseViewSet, BasePaginator):
notification.save() notification.save()
serializer = NotificationSerializer(notification) serializer = NotificationSerializer(notification)
return Response(serializer.data, status=status.HTTP_200_OK) return Response(serializer.data, status=status.HTTP_200_OK)
except Notification.DoesNotExist:
return Response(
{"error": "Notification does not exists"},
status=status.HTTP_400_BAD_REQUEST,
)
except Exception as e:
capture_exception(e)
return Response(
{"error": "Something went wrong please try again later"},
status=status.HTTP_400_BAD_REQUEST,
)
class UnreadNotificationEndpoint(BaseAPIView): class UnreadNotificationEndpoint(BaseAPIView):
def get(self, request, slug): def get(self, request, slug):
try:
# Watching Issues Count # Watching Issues Count
watching_issues_count = Notification.objects.filter( watching_issues_count = Notification.objects.filter(
workspace__slug=slug, workspace__slug=slug,
@ -278,17 +204,10 @@ class UnreadNotificationEndpoint(BaseAPIView):
}, },
status=status.HTTP_200_OK, status=status.HTTP_200_OK,
) )
except Exception as e:
capture_exception(e)
return Response(
{"error": "Something went wrong please try again later"},
status=status.HTTP_400_BAD_REQUEST,
)
class MarkAllReadNotificationViewSet(BaseViewSet): class MarkAllReadNotificationViewSet(BaseViewSet):
def create(self, request, slug): def create(self, request, slug):
try:
snoozed = request.data.get("snoozed", False) snoozed = request.data.get("snoozed", False)
archived = request.data.get("archived", False) archived = request.data.get("archived", False)
type = request.data.get("type", "all") type = request.data.get("type", "all")
@ -343,9 +262,7 @@ class MarkAllReadNotificationViewSet(BaseViewSet):
issue_ids = Issue.objects.filter( issue_ids = Issue.objects.filter(
workspace__slug=slug, created_by=request.user workspace__slug=slug, created_by=request.user
).values_list("pk", flat=True) ).values_list("pk", flat=True)
notifications = notifications.filter( notifications = notifications.filter(entity_identifier__in=issue_ids)
entity_identifier__in=issue_ids
)
updated_notifications = [] updated_notifications = []
for notification in notifications: for notification in notifications:
@ -355,9 +272,3 @@ class MarkAllReadNotificationViewSet(BaseViewSet):
updated_notifications, ["read_at"], batch_size=100 updated_notifications, ["read_at"], batch_size=100
) )
return Response({"message": "Successful"}, status=status.HTTP_200_OK) return Response({"message": "Successful"}, status=status.HTTP_200_OK)
except Exception as e:
capture_exception(e)
return Response(
{"error": "Something went wrong please try again later"},
status=status.HTTP_400_BAD_REQUEST,
)

View File

@ -15,6 +15,7 @@ from rest_framework.views import APIView
from rest_framework_simplejwt.tokens import RefreshToken from rest_framework_simplejwt.tokens import RefreshToken
from rest_framework import status from rest_framework import status
from sentry_sdk import capture_exception from sentry_sdk import capture_exception
# sso authentication # sso authentication
from google.oauth2 import id_token from google.oauth2 import id_token
from google.auth.transport import requests as google_auth_request from google.auth.transport import requests as google_auth_request
@ -186,14 +187,11 @@ class OauthEndpoint(BaseAPIView):
user.is_email_verified = email_verified user.is_email_verified = email_verified
user.save() user.save()
serialized_user = UserSerializer(user).data
access_token, refresh_token = get_tokens_for_user(user) access_token, refresh_token = get_tokens_for_user(user)
data = { data = {
"access_token": access_token, "access_token": access_token,
"refresh_token": refresh_token, "refresh_token": refresh_token,
"user": serialized_user,
} }
SocialLoginConnection.objects.update_or_create( SocialLoginConnection.objects.update_or_create(
@ -264,14 +262,11 @@ class OauthEndpoint(BaseAPIView):
user.last_login_uagent = request.META.get("HTTP_USER_AGENT") user.last_login_uagent = request.META.get("HTTP_USER_AGENT")
user.token_updated_at = timezone.now() user.token_updated_at = timezone.now()
user.save() user.save()
serialized_user = UserSerializer(user).data
access_token, refresh_token = get_tokens_for_user(user) access_token, refresh_token = get_tokens_for_user(user)
data = { data = {
"access_token": access_token, "access_token": access_token,
"refresh_token": refresh_token, "refresh_token": refresh_token,
"user": serialized_user,
"permissions": [],
} }
if settings.ANALYTICS_BASE_API: if settings.ANALYTICS_BASE_API:
_ = requests.post( _ = requests.post(
@ -304,11 +299,3 @@ class OauthEndpoint(BaseAPIView):
}, },
) )
return Response(data, status=status.HTTP_201_CREATED) return Response(data, status=status.HTTP_201_CREATED)
except Exception as e:
capture_exception(e)
return Response(
{
"error": "Something went wrong. Please try again later or contact the support team."
},
status=status.HTTP_400_BAD_REQUEST,
)

View File

@ -2,7 +2,6 @@
from datetime import timedelta, datetime, date from datetime import timedelta, datetime, date
# Django imports # Django imports
from django.db import IntegrityError
from django.db.models import Exists, OuterRef, Q, Prefetch from django.db.models import Exists, OuterRef, Q, Prefetch
from django.utils import timezone from django.utils import timezone
@ -78,7 +77,6 @@ class PageViewSet(BaseViewSet):
) )
def create(self, request, slug, project_id): def create(self, request, slug, project_id):
try:
serializer = PageSerializer( serializer = PageSerializer(
data=request.data, data=request.data,
context={"project_id": project_id, "owned_by_id": request.user.id}, context={"project_id": project_id, "owned_by_id": request.user.id},
@ -89,15 +87,7 @@ class PageViewSet(BaseViewSet):
return Response(serializer.data, status=status.HTTP_201_CREATED) return Response(serializer.data, status=status.HTTP_201_CREATED)
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
except Exception as e:
capture_exception(e)
return Response(
{"error": "Something went wrong please try again later"},
status=status.HTTP_400_BAD_REQUEST,
)
def partial_update(self, request, slug, project_id, pk): def partial_update(self, request, slug, project_id, pk):
try:
page = Page.objects.get(pk=pk, workspace__slug=slug, project_id=project_id) page = Page.objects.get(pk=pk, workspace__slug=slug, project_id=project_id)
# Only update access if the page owner is the requesting user # Only update access if the page owner is the requesting user
if ( if (
@ -115,19 +105,8 @@ class PageViewSet(BaseViewSet):
serializer.save() serializer.save()
return Response(serializer.data, status=status.HTTP_200_OK) return Response(serializer.data, status=status.HTTP_200_OK)
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
except Page.DoesNotExist:
return Response(
{"error": "Page Does not exist"}, status=status.HTTP_400_BAD_REQUEST
)
except Exception as e:
capture_exception(e)
return Response(
{"error": "Something went wrong please try again later"},
status=status.HTTP_400_BAD_REQUEST,
)
def list(self, request, slug, project_id): def list(self, request, slug, project_id):
try:
queryset = self.get_queryset() queryset = self.get_queryset()
page_view = request.GET.get("page_view", False) page_view = request.GET.get("page_view", False)
@ -173,9 +152,7 @@ class PageViewSet(BaseViewSet):
return Response(PageSerializer(queryset, many=True).data, status=status.HTTP_200_OK) return Response(PageSerializer(queryset, many=True).data, status=status.HTTP_200_OK)
return Response({"error": "No matching view found"}, status=status.HTTP_400_BAD_REQUEST) return Response({"error": "No matching view found"}, status=status.HTTP_400_BAD_REQUEST)
except Exception as e:
capture_exception(e)
return Response({"error": "Something went wrong please try again later"}, status=status.HTTP_400_BAD_REQUEST)
class PageBlockViewSet(BaseViewSet): class PageBlockViewSet(BaseViewSet):
serializer_class = PageBlockSerializer serializer_class = PageBlockSerializer
@ -225,33 +202,13 @@ class PageFavoriteViewSet(BaseViewSet):
) )
def create(self, request, slug, project_id): def create(self, request, slug, project_id):
try:
serializer = PageFavoriteSerializer(data=request.data) serializer = PageFavoriteSerializer(data=request.data)
if serializer.is_valid(): if serializer.is_valid():
serializer.save(user=request.user, project_id=project_id) serializer.save(user=request.user, project_id=project_id)
return Response(serializer.data, status=status.HTTP_201_CREATED) return Response(serializer.data, status=status.HTTP_201_CREATED)
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
except IntegrityError as e:
if "already exists" in str(e):
return Response(
{"error": "The page is already added to favorites"},
status=status.HTTP_410_GONE,
)
else:
capture_exception(e)
return Response(
{"error": "Something went wrong please try again later"},
status=status.HTTP_400_BAD_REQUEST,
)
except Exception as e:
capture_exception(e)
return Response(
{"error": "Something went wrong please try again later"},
status=status.HTTP_400_BAD_REQUEST,
)
def destroy(self, request, slug, project_id, page_id): def destroy(self, request, slug, project_id, page_id):
try:
page_favorite = PageFavorite.objects.get( page_favorite = PageFavorite.objects.get(
project=project_id, project=project_id,
user=request.user, user=request.user,
@ -260,18 +217,6 @@ class PageFavoriteViewSet(BaseViewSet):
) )
page_favorite.delete() page_favorite.delete()
return Response(status=status.HTTP_204_NO_CONTENT) return Response(status=status.HTTP_204_NO_CONTENT)
except PageFavorite.DoesNotExist:
return Response(
{"error": "Page is not in favorites"},
status=status.HTTP_400_BAD_REQUEST,
)
except Exception as e:
capture_exception(e)
return Response(
{"error": "Something went wrong please try again later"},
status=status.HTTP_400_BAD_REQUEST,
)
class CreateIssueFromPageBlockEndpoint(BaseAPIView): class CreateIssueFromPageBlockEndpoint(BaseAPIView):
permission_classes = [ permission_classes = [
@ -279,7 +224,6 @@ class CreateIssueFromPageBlockEndpoint(BaseAPIView):
] ]
def post(self, request, slug, project_id, page_id, page_block_id): def post(self, request, slug, project_id, page_id, page_block_id):
try:
page_block = PageBlock.objects.get( page_block = PageBlock.objects.get(
pk=page_block_id, pk=page_block_id,
workspace__slug=slug, workspace__slug=slug,
@ -309,13 +253,3 @@ class CreateIssueFromPageBlockEndpoint(BaseAPIView):
page_block.save() page_block.save()
return Response(IssueLiteSerializer(issue).data, status=status.HTTP_200_OK) return Response(IssueLiteSerializer(issue).data, status=status.HTTP_200_OK)
except PageBlock.DoesNotExist:
return Response(
{"error": "Page Block does not exist"}, status=status.HTTP_404_NOT_FOUND
)
except Exception as e:
capture_exception(e)
return Response(
{"error": "Something went wrong please try again later"},
status=status.HTTP_400_BAD_REQUEST,
)

View File

@ -1,11 +1,13 @@
# Python imports # Python imports
import jwt import jwt
import boto3
from datetime import datetime from datetime import datetime
# Django imports # Django imports
from django.core.exceptions import ValidationError from django.core.exceptions import ValidationError
from django.db import IntegrityError from django.db import IntegrityError
from django.db.models import ( from django.db.models import (
Prefetch,
Q, Q,
Exists, Exists,
OuterRef, OuterRef,
@ -28,6 +30,7 @@ from sentry_sdk import capture_exception
from .base import BaseViewSet, BaseAPIView from .base import BaseViewSet, BaseAPIView
from plane.api.serializers import ( from plane.api.serializers import (
ProjectSerializer, ProjectSerializer,
ProjectListSerializer,
ProjectMemberSerializer, ProjectMemberSerializer,
ProjectDetailSerializer, ProjectDetailSerializer,
ProjectMemberInviteSerializer, ProjectMemberInviteSerializer,
@ -85,12 +88,6 @@ class ProjectViewSet(BaseViewSet):
return ProjectDetailSerializer return ProjectDetailSerializer
def get_queryset(self): def get_queryset(self):
subquery = ProjectFavorite.objects.filter(
user=self.request.user,
project_id=OuterRef("pk"),
workspace__slug=self.kwargs.get("slug"),
)
return self.filter_queryset( return self.filter_queryset(
super() super()
.get_queryset() .get_queryset()
@ -99,7 +96,15 @@ class ProjectViewSet(BaseViewSet):
.select_related( .select_related(
"workspace", "workspace__owner", "default_assignee", "project_lead" "workspace", "workspace__owner", "default_assignee", "project_lead"
) )
.annotate(is_favorite=Exists(subquery)) .annotate(
is_favorite=Exists(
ProjectFavorite.objects.filter(
user=self.request.user,
project_id=OuterRef("pk"),
workspace__slug=self.kwargs.get("slug"),
)
)
)
.annotate( .annotate(
is_member=Exists( is_member=Exists(
ProjectMember.objects.filter( ProjectMember.objects.filter(
@ -147,13 +152,8 @@ class ProjectViewSet(BaseViewSet):
) )
def list(self, request, slug): def list(self, request, slug):
try: fields = [field for field in request.GET.get("fields", "").split(",") if field]
is_favorite = request.GET.get("is_favorite", "all")
subquery = ProjectFavorite.objects.filter(
user=self.request.user,
project_id=OuterRef("pk"),
workspace__slug=self.kwargs.get("slug"),
)
sort_order_query = ProjectMember.objects.filter( sort_order_query = ProjectMember.objects.filter(
member=request.user, member=request.user,
project_id=OuterRef("pk"), project_id=OuterRef("pk"),
@ -161,42 +161,30 @@ class ProjectViewSet(BaseViewSet):
).values("sort_order") ).values("sort_order")
projects = ( projects = (
self.get_queryset() self.get_queryset()
.annotate(is_favorite=Exists(subquery))
.annotate(sort_order=Subquery(sort_order_query)) .annotate(sort_order=Subquery(sort_order_query))
.prefetch_related(
Prefetch(
"project_projectmember",
queryset=ProjectMember.objects.filter(
workspace__slug=slug,
).select_related("member"),
)
)
.order_by("sort_order", "name") .order_by("sort_order", "name")
.annotate(
total_members=ProjectMember.objects.filter(
project_id=OuterRef("id")
)
.order_by()
.annotate(count=Func(F("id"), function="Count"))
.values("count")
)
.annotate(
total_cycles=Cycle.objects.filter(project_id=OuterRef("id"))
.order_by()
.annotate(count=Func(F("id"), function="Count"))
.values("count")
)
.annotate(
total_modules=Module.objects.filter(project_id=OuterRef("id"))
.order_by()
.annotate(count=Func(F("id"), function="Count"))
.values("count")
) )
if request.GET.get("per_page", False) and request.GET.get("cursor", False):
return self.paginate(
request=request,
queryset=(projects),
on_results=lambda projects: ProjectListSerializer(
projects, many=True
).data,
) )
if is_favorite == "true":
projects = projects.filter(is_favorite=True)
if is_favorite == "false":
projects = projects.filter(is_favorite=False)
return Response(ProjectDetailSerializer(projects, many=True).data)
except Exception as e:
capture_exception(e)
return Response( return Response(
{"error": "Something went wrong please try again later"}, ProjectListSerializer(
status=status.HTTP_400_BAD_REQUEST, projects, many=True, fields=fields if fields else None
).data
) )
def create(self, request, slug): def create(self, request, slug):
@ -290,12 +278,6 @@ class ProjectViewSet(BaseViewSet):
{"name": "The project name is already taken"}, {"name": "The project name is already taken"},
status=status.HTTP_410_GONE, status=status.HTTP_410_GONE,
) )
else:
capture_exception(e)
return Response(
{"error": "Something went wrong please try again later"},
status=status.HTTP_410_GONE,
)
except Workspace.DoesNotExist as e: except Workspace.DoesNotExist as e:
return Response( return Response(
{"error": "Workspace does not exist"}, status=status.HTTP_404_NOT_FOUND {"error": "Workspace does not exist"}, status=status.HTTP_404_NOT_FOUND
@ -305,12 +287,6 @@ class ProjectViewSet(BaseViewSet):
{"identifier": "The project identifier is already taken"}, {"identifier": "The project identifier is already taken"},
status=status.HTTP_410_GONE, status=status.HTTP_410_GONE,
) )
except Exception as e:
capture_exception(e)
return Response(
{"error": "Something went wrong please try again later"},
status=status.HTTP_400_BAD_REQUEST,
)
def partial_update(self, request, slug, pk=None): def partial_update(self, request, slug, pk=None):
try: try:
@ -359,12 +335,6 @@ class ProjectViewSet(BaseViewSet):
{"identifier": "The project identifier is already taken"}, {"identifier": "The project identifier is already taken"},
status=status.HTTP_410_GONE, status=status.HTTP_410_GONE,
) )
except Exception as e:
capture_exception(e)
return Response(
{"error": "Something went wrong please try again later"},
status=status.HTTP_400_BAD_REQUEST,
)
class InviteProjectEndpoint(BaseAPIView): class InviteProjectEndpoint(BaseAPIView):
@ -373,7 +343,6 @@ class InviteProjectEndpoint(BaseAPIView):
] ]
def post(self, request, slug, project_id): def post(self, request, slug, project_id):
try:
email = request.data.get("email", False) email = request.data.get("email", False)
role = request.data.get("role", False) role = request.data.get("role", False)
@ -428,25 +397,6 @@ class InviteProjectEndpoint(BaseAPIView):
ProjectMemberSerializer(project_member).data, status=status.HTTP_200_OK ProjectMemberSerializer(project_member).data, status=status.HTTP_200_OK
) )
except ValidationError:
return Response(
{
"error": "Invalid email address provided a valid email address is required to send the invite"
},
status=status.HTTP_400_BAD_REQUEST,
)
except (Workspace.DoesNotExist, Project.DoesNotExist) as e:
return Response(
{"error": "Workspace or Project does not exists"},
status=status.HTTP_400_BAD_REQUEST,
)
except Exception as e:
capture_exception(e)
return Response(
{"error": "Something went wrong please try again later"},
status=status.HTTP_400_BAD_REQUEST,
)
class UserProjectInvitationsViewset(BaseViewSet): class UserProjectInvitationsViewset(BaseViewSet):
serializer_class = ProjectMemberInviteSerializer serializer_class = ProjectMemberInviteSerializer
@ -461,7 +411,6 @@ class UserProjectInvitationsViewset(BaseViewSet):
) )
def create(self, request): def create(self, request):
try:
invitations = request.data.get("invitations") invitations = request.data.get("invitations")
project_invitations = ProjectMemberInvite.objects.filter( project_invitations = ProjectMemberInvite.objects.filter(
pk__in=invitations, accepted=True pk__in=invitations, accepted=True
@ -483,19 +432,13 @@ class UserProjectInvitationsViewset(BaseViewSet):
project_invitations.delete() project_invitations.delete()
return Response(status=status.HTTP_204_NO_CONTENT) return Response(status=status.HTTP_204_NO_CONTENT)
except Exception as e:
capture_exception(e)
return Response(
{"error": "Something went wrong please try again later"},
status=status.HTTP_400_BAD_REQUEST,
)
class ProjectMemberViewSet(BaseViewSet): class ProjectMemberViewSet(BaseViewSet):
serializer_class = ProjectMemberAdminSerializer serializer_class = ProjectMemberAdminSerializer
model = ProjectMember model = ProjectMember
permission_classes = [ permission_classes = [
ProjectBasePermission, ProjectMemberPermission,
] ]
search_fields = [ search_fields = [
@ -516,7 +459,6 @@ class ProjectMemberViewSet(BaseViewSet):
) )
def partial_update(self, request, slug, project_id, pk): def partial_update(self, request, slug, project_id, pk):
try:
project_member = ProjectMember.objects.get( project_member = ProjectMember.objects.get(
pk=pk, workspace__slug=slug, project_id=project_id pk=pk, workspace__slug=slug, project_id=project_id
) )
@ -535,9 +477,7 @@ class ProjectMemberViewSet(BaseViewSet):
> requested_project_member.role > requested_project_member.role
): ):
return Response( return Response(
{ {"error": "You cannot update a role that is higher than your own role"},
"error": "You cannot update a role that is higher than your own role"
},
status=status.HTTP_400_BAD_REQUEST, status=status.HTTP_400_BAD_REQUEST,
) )
@ -549,20 +489,8 @@ class ProjectMemberViewSet(BaseViewSet):
serializer.save() serializer.save()
return Response(serializer.data, status=status.HTTP_200_OK) return Response(serializer.data, status=status.HTTP_200_OK)
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
except ProjectMember.DoesNotExist:
return Response(
{"error": "Project Member does not exist"},
status=status.HTTP_400_BAD_REQUEST,
)
except Exception as e:
capture_exception(e)
return Response(
{"error": "Something went wrong please try again later"},
status=status.HTTP_400_BAD_REQUEST,
)
def destroy(self, request, slug, project_id, pk): def destroy(self, request, slug, project_id, pk):
try:
project_member = ProjectMember.objects.get( project_member = ProjectMember.objects.get(
workspace__slug=slug, project_id=project_id, pk=pk workspace__slug=slug, project_id=project_id, pk=pk
) )
@ -572,9 +500,7 @@ class ProjectMemberViewSet(BaseViewSet):
) )
if requesting_project_member.role < project_member.role: if requesting_project_member.role < project_member.role:
return Response( return Response(
{ {"error": "You cannot remove a user having role higher than yourself"},
"error": "You cannot remove a user having role higher than yourself"
},
status=status.HTTP_400_BAD_REQUEST, status=status.HTTP_400_BAD_REQUEST,
) )
@ -615,13 +541,6 @@ class ProjectMemberViewSet(BaseViewSet):
).delete() ).delete()
project_member.delete() project_member.delete()
return Response(status=status.HTTP_204_NO_CONTENT) return Response(status=status.HTTP_204_NO_CONTENT)
except ProjectMember.DoesNotExist:
return Response(
{"error": "Project Member does not exist"}, status=status.HTTP_400_BAD_REQUEST
)
except Exception as e:
capture_exception(e)
return Response({"error": "Something went wrong please try again later"})
class AddMemberToProjectEndpoint(BaseAPIView): class AddMemberToProjectEndpoint(BaseAPIView):
@ -630,7 +549,6 @@ class AddMemberToProjectEndpoint(BaseAPIView):
] ]
def post(self, request, slug, project_id): def post(self, request, slug, project_id):
try:
members = request.data.get("members", []) members = request.data.get("members", [])
# get the project # get the project
@ -656,8 +574,7 @@ class AddMemberToProjectEndpoint(BaseAPIView):
sort_order = [ sort_order = [
project_member.get("sort_order") project_member.get("sort_order")
for project_member in project_members for project_member in project_members
if str(project_member.get("member_id")) if str(project_member.get("member_id")) == str(member.get("member_id"))
== str(member.get("member_id"))
] ]
bulk_project_members.append( bulk_project_members.append(
ProjectMember( ProjectMember(
@ -676,27 +593,7 @@ class AddMemberToProjectEndpoint(BaseAPIView):
) )
serializer = ProjectMemberSerializer(project_members, many=True) serializer = ProjectMemberSerializer(project_members, many=True)
return Response(serializer.data, status=status.HTTP_201_CREATED) return Response(serializer.data, status=status.HTTP_201_CREATED)
except KeyError:
return Response(
{"error": "Incorrect data sent"}, status=status.HTTP_400_BAD_REQUEST
)
except Project.DoesNotExist:
return Response(
{"error": "Project does not exist"}, status=status.HTTP_400_BAD_REQUEST
)
except IntegrityError:
return Response(
{"error": "User not member of the workspace"},
status=status.HTTP_400_BAD_REQUEST,
)
except Exception as e:
capture_exception(e)
return Response(
{"error": "Something went wrong please try again later"},
status=status.HTTP_400_BAD_REQUEST,
)
class AddTeamToProjectEndpoint(BaseAPIView): class AddTeamToProjectEndpoint(BaseAPIView):
@ -705,7 +602,6 @@ class AddTeamToProjectEndpoint(BaseAPIView):
] ]
def post(self, request, slug, project_id): def post(self, request, slug, project_id):
try:
team_members = TeamMember.objects.filter( team_members = TeamMember.objects.filter(
workspace__slug=slug, team__in=request.data.get("teams", []) workspace__slug=slug, team__in=request.data.get("teams", [])
).values_list("member", flat=True) ).values_list("member", flat=True)
@ -734,23 +630,6 @@ class AddTeamToProjectEndpoint(BaseAPIView):
serializer = ProjectMemberSerializer(project_members, many=True) serializer = ProjectMemberSerializer(project_members, many=True)
return Response(serializer.data, status=status.HTTP_201_CREATED) return Response(serializer.data, status=status.HTTP_201_CREATED)
except IntegrityError as e:
if "already exists" in str(e):
return Response(
{"error": "The team with the name already exists"},
status=status.HTTP_410_GONE,
)
except Workspace.DoesNotExist:
return Response(
{"error": "The requested workspace could not be found"},
status=status.HTTP_404_NOT_FOUND,
)
except Exception as e:
capture_exception(e)
return Response(
{"error": "Something went wrong please try again later"},
status=status.HTTP_400_BAD_REQUEST,
)
class ProjectMemberInvitationsViewset(BaseViewSet): class ProjectMemberInvitationsViewset(BaseViewSet):
@ -799,7 +678,6 @@ class ProjectIdentifierEndpoint(BaseAPIView):
] ]
def get(self, request, slug): def get(self, request, slug):
try:
name = request.GET.get("name", "").strip().upper() name = request.GET.get("name", "").strip().upper()
if name == "": if name == "":
@ -815,15 +693,8 @@ class ProjectIdentifierEndpoint(BaseAPIView):
{"exists": len(exists), "identifiers": exists}, {"exists": len(exists), "identifiers": exists},
status=status.HTTP_200_OK, status=status.HTTP_200_OK,
) )
except Exception as e:
capture_exception(e)
return Response(
{"error": "Something went wrong please try again later"},
status=status.HTTP_400_BAD_REQUEST,
)
def delete(self, request, slug): def delete(self, request, slug):
try:
name = request.data.get("name", "").strip().upper() name = request.data.get("name", "").strip().upper()
if name == "": if name == "":
@ -842,17 +713,10 @@ class ProjectIdentifierEndpoint(BaseAPIView):
return Response( return Response(
status=status.HTTP_204_NO_CONTENT, status=status.HTTP_204_NO_CONTENT,
) )
except Exception as e:
capture_exception(e)
return Response(
{"error": "Something went wrong please try again later"},
status=status.HTTP_400_BAD_REQUEST,
)
class ProjectJoinEndpoint(BaseAPIView): class ProjectJoinEndpoint(BaseAPIView):
def post(self, request, slug): def post(self, request, slug):
try:
project_ids = request.data.get("project_ids", []) project_ids = request.data.get("project_ids", [])
# Get the workspace user role # Get the workspace user role
@ -883,22 +747,10 @@ class ProjectJoinEndpoint(BaseAPIView):
{"message": "Projects joined successfully"}, {"message": "Projects joined successfully"},
status=status.HTTP_201_CREATED, status=status.HTTP_201_CREATED,
) )
except WorkspaceMember.DoesNotExist:
return Response(
{"error": "User is not a member of workspace"},
status=status.HTTP_403_FORBIDDEN,
)
except Exception as e:
capture_exception(e)
return Response(
{"error": "Something went wrong please try again later"},
status=status.HTTP_400_BAD_REQUEST,
)
class ProjectUserViewsEndpoint(BaseAPIView): class ProjectUserViewsEndpoint(BaseAPIView):
def post(self, request, slug, project_id): def post(self, request, slug, project_id):
try:
project = Project.objects.get(pk=project_id, workspace__slug=slug) project = Project.objects.get(pk=project_id, workspace__slug=slug)
project_member = ProjectMember.objects.filter( project_member = ProjectMember.objects.filter(
@ -906,9 +758,7 @@ class ProjectUserViewsEndpoint(BaseAPIView):
).first() ).first()
if project_member is None: if project_member is None:
return Response( return Response({"error": "Forbidden"}, status=status.HTTP_403_FORBIDDEN)
{"error": "Forbidden"}, status=status.HTTP_403_FORBIDDEN
)
view_props = project_member.view_props view_props = project_member.view_props
default_props = project_member.default_props default_props = project_member.default_props
@ -916,30 +766,17 @@ class ProjectUserViewsEndpoint(BaseAPIView):
sort_order = project_member.sort_order sort_order = project_member.sort_order
project_member.view_props = request.data.get("view_props", view_props) project_member.view_props = request.data.get("view_props", view_props)
project_member.default_props = request.data.get( project_member.default_props = request.data.get("default_props", default_props)
"default_props", default_props
)
project_member.preferences = request.data.get("preferences", preferences) project_member.preferences = request.data.get("preferences", preferences)
project_member.sort_order = request.data.get("sort_order", sort_order) project_member.sort_order = request.data.get("sort_order", sort_order)
project_member.save() project_member.save()
return Response(status=status.HTTP_204_NO_CONTENT) return Response(status=status.HTTP_204_NO_CONTENT)
except Project.DoesNotExist:
return Response(
{"error": "The requested resource does not exists"},
status=status.HTTP_404_NOT_FOUND,
)
except Exception as e:
return Response(
{"error": "Something went wrong please try again later"},
status=status.HTTP_400_BAD_REQUEST,
)
class ProjectMemberUserEndpoint(BaseAPIView): class ProjectMemberUserEndpoint(BaseAPIView):
def get(self, request, slug, project_id): def get(self, request, slug, project_id):
try:
project_member = ProjectMember.objects.get( project_member = ProjectMember.objects.get(
project_id=project_id, workspace__slug=slug, member=request.user project_id=project_id, workspace__slug=slug, member=request.user
) )
@ -947,18 +784,6 @@ class ProjectMemberUserEndpoint(BaseAPIView):
return Response(serializer.data, status=status.HTTP_200_OK) return Response(serializer.data, status=status.HTTP_200_OK)
except ProjectMember.DoesNotExist:
return Response(
{"error": "User not a member of the project"},
status=status.HTTP_403_FORBIDDEN,
)
except Exception as e:
capture_exception(e)
return Response(
{"error": "Something went wrong please try again later"},
status=status.HTTP_400_BAD_REQUEST,
)
class ProjectFavoritesViewSet(BaseViewSet): class ProjectFavoritesViewSet(BaseViewSet):
serializer_class = ProjectFavoriteSerializer serializer_class = ProjectFavoriteSerializer
@ -980,50 +805,18 @@ class ProjectFavoritesViewSet(BaseViewSet):
serializer.save(user=self.request.user) serializer.save(user=self.request.user)
def create(self, request, slug): def create(self, request, slug):
try:
serializer = ProjectFavoriteSerializer(data=request.data) serializer = ProjectFavoriteSerializer(data=request.data)
if serializer.is_valid(): if serializer.is_valid():
serializer.save(user=request.user) serializer.save(user=request.user)
return Response(serializer.data, status=status.HTTP_201_CREATED) return Response(serializer.data, status=status.HTTP_201_CREATED)
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
except IntegrityError as e:
print(str(e))
if "already exists" in str(e):
return Response(
{"error": "The project is already added to favorites"},
status=status.HTTP_410_GONE,
)
else:
capture_exception(e)
return Response(
{"error": "Something went wrong please try again later"},
status=status.HTTP_410_GONE,
)
except Exception as e:
capture_exception(e)
return Response(
{"error": "Something went wrong please try again later"},
status=status.HTTP_400_BAD_REQUEST,
)
def destroy(self, request, slug, project_id): def destroy(self, request, slug, project_id):
try:
project_favorite = ProjectFavorite.objects.get( project_favorite = ProjectFavorite.objects.get(
project=project_id, user=request.user, workspace__slug=slug project=project_id, user=request.user, workspace__slug=slug
) )
project_favorite.delete() project_favorite.delete()
return Response(status=status.HTTP_204_NO_CONTENT) return Response(status=status.HTTP_204_NO_CONTENT)
except ProjectFavorite.DoesNotExist:
return Response(
{"error": "Project is not in favorites"},
status=status.HTTP_400_BAD_REQUEST,
)
except Exception as e:
capture_exception(e)
return Response(
{"error": "Something went wrong please try again later"},
status=status.HTTP_400_BAD_REQUEST,
)
class ProjectDeployBoardViewSet(BaseViewSet): class ProjectDeployBoardViewSet(BaseViewSet):
@ -1045,7 +838,6 @@ class ProjectDeployBoardViewSet(BaseViewSet):
) )
def create(self, request, slug, project_id): def create(self, request, slug, project_id):
try:
comments = request.data.get("comments", False) comments = request.data.get("comments", False)
reactions = request.data.get("reactions", False) reactions = request.data.get("reactions", False)
inbox = request.data.get("inbox", None) inbox = request.data.get("inbox", None)
@ -1075,12 +867,6 @@ class ProjectDeployBoardViewSet(BaseViewSet):
serializer = ProjectDeployBoardSerializer(project_deploy_board) serializer = ProjectDeployBoardSerializer(project_deploy_board)
return Response(serializer.data, status=status.HTTP_200_OK) return Response(serializer.data, status=status.HTTP_200_OK)
except Exception as e:
capture_exception(e)
return Response(
{"error": "Something went wrong please try again later"},
status=status.HTTP_400_BAD_REQUEST,
)
class ProjectMemberEndpoint(BaseAPIView): class ProjectMemberEndpoint(BaseAPIView):
@ -1089,20 +875,13 @@ class ProjectMemberEndpoint(BaseAPIView):
] ]
def get(self, request, slug, project_id): def get(self, request, slug, project_id):
try:
project_members = ProjectMember.objects.filter( project_members = ProjectMember.objects.filter(
project_id=project_id, project_id=project_id,
workspace__slug=slug, workspace__slug=slug,
member__is_bot=False, member__is_bot=False,
).select_related("project", "member") ).select_related("project", "member", "workspace")
serializer = ProjectMemberSerializer(project_members, many=True) serializer = ProjectMemberSerializer(project_members, many=True)
return Response(serializer.data, status=status.HTTP_200_OK) return Response(serializer.data, status=status.HTTP_200_OK)
except Exception as e:
capture_exception(e)
return Response(
{"error": "Something went wrong please try again later"},
status=status.HTTP_400_BAD_REQUEST,
)
class ProjectDeployBoardPublicSettingsEndpoint(BaseAPIView): class ProjectDeployBoardPublicSettingsEndpoint(BaseAPIView):
@ -1111,23 +890,11 @@ class ProjectDeployBoardPublicSettingsEndpoint(BaseAPIView):
] ]
def get(self, request, slug, project_id): def get(self, request, slug, project_id):
try:
project_deploy_board = ProjectDeployBoard.objects.get( project_deploy_board = ProjectDeployBoard.objects.get(
workspace__slug=slug, project_id=project_id workspace__slug=slug, project_id=project_id
) )
serializer = ProjectDeployBoardSerializer(project_deploy_board) serializer = ProjectDeployBoardSerializer(project_deploy_board)
return Response(serializer.data, status=status.HTTP_200_OK) return Response(serializer.data, status=status.HTTP_200_OK)
except ProjectDeployBoard.DoesNotExist:
return Response(
{"error": "Project Deploy Board does not exists"},
status=status.HTTP_404_NOT_FOUND,
)
except Exception as e:
capture_exception(e)
return Response(
{"error": "Something went wrong please try again later"},
status=status.HTTP_400_BAD_REQUEST,
)
class WorkspaceProjectDeployBoardEndpoint(BaseAPIView): class WorkspaceProjectDeployBoardEndpoint(BaseAPIView):
@ -1136,7 +903,6 @@ class WorkspaceProjectDeployBoardEndpoint(BaseAPIView):
] ]
def get(self, request, slug): def get(self, request, slug):
try:
projects = ( projects = (
Project.objects.filter(workspace__slug=slug) Project.objects.filter(workspace__slug=slug)
.annotate( .annotate(
@ -1158,12 +924,6 @@ class WorkspaceProjectDeployBoardEndpoint(BaseAPIView):
) )
return Response(projects, status=status.HTTP_200_OK) return Response(projects, status=status.HTTP_200_OK)
except Exception as e:
capture_exception(e)
return Response(
{"error": "Something went wrong please try again later"},
status=status.HTTP_400_BAD_REQUEST,
)
class LeaveProjectEndpoint(BaseAPIView): class LeaveProjectEndpoint(BaseAPIView):
@ -1172,7 +932,6 @@ class LeaveProjectEndpoint(BaseAPIView):
] ]
def delete(self, request, slug, project_id): def delete(self, request, slug, project_id):
try:
project_member = ProjectMember.objects.get( project_member = ProjectMember.objects.get(
workspace__slug=slug, workspace__slug=slug,
member=request.user, member=request.user,
@ -1198,14 +957,34 @@ class LeaveProjectEndpoint(BaseAPIView):
# Delete the member from workspace # Delete the member from workspace
project_member.delete() project_member.delete()
return Response(status=status.HTTP_204_NO_CONTENT) return Response(status=status.HTTP_204_NO_CONTENT)
except ProjectMember.DoesNotExist:
return Response(
{"error": "Workspace member does not exists"}, class ProjectPublicCoverImagesEndpoint(BaseAPIView):
status=status.HTTP_400_BAD_REQUEST, permission_classes = [
AllowAny,
]
def get(self, request):
files = []
s3 = boto3.client(
"s3",
aws_access_key_id=settings.AWS_ACCESS_KEY_ID,
aws_secret_access_key=settings.AWS_SECRET_ACCESS_KEY,
) )
except Exception as e: params = {
capture_exception(e) "Bucket": settings.AWS_S3_BUCKET_NAME,
return Response( "Prefix": "static/project-cover/",
{"error": "Something went wrong please try again later"}, }
status=status.HTTP_400_BAD_REQUEST,
response = s3.list_objects_v2(**params)
# Extracting file keys from the response
if "Contents" in response:
for content in response["Contents"]:
if not content["Key"].endswith(
"/"
): # This line ensures we're only getting files, not "sub-folders"
files.append(
f"https://{settings.AWS_S3_BUCKET_NAME}.s3.{settings.AWS_REGION}.amazonaws.com/{content['Key']}"
) )
return Response(files, status=status.HTTP_200_OK)

View File

@ -1,21 +0,0 @@
# Third party imports
from rest_framework.response import Response
from rest_framework import status
from sentry_sdk import capture_exception
# Module imports
from .base import BaseAPIView
from plane.utils.integrations.github import get_release_notes
class ReleaseNotesEndpoint(BaseAPIView):
def get(self, request):
try:
release_notes = get_release_notes()
return Response(release_notes, status=status.HTTP_200_OK)
except Exception as e:
capture_exception(e)
return Response(
{"error": "Something went wrong please try again later"},
status=status.HTTP_400_BAD_REQUEST,
)

View File

@ -168,7 +168,6 @@ class GlobalSearchEndpoint(BaseAPIView):
) )
def get(self, request, slug): def get(self, request, slug):
try:
query = request.query_params.get("search", False) query = request.query_params.get("search", False)
workspace_search = request.query_params.get("workspace_search", "false") workspace_search = request.query_params.get("workspace_search", "false")
project_id = request.query_params.get("project_id", False) project_id = request.query_params.get("project_id", False)
@ -206,17 +205,9 @@ class GlobalSearchEndpoint(BaseAPIView):
results[model] = func(query, slug, project_id, workspace_search) results[model] = func(query, slug, project_id, workspace_search)
return Response({"results": results}, status=status.HTTP_200_OK) return Response({"results": results}, status=status.HTTP_200_OK)
except Exception as e:
capture_exception(e)
return Response(
{"error": "Something went wrong please try again later"},
status=status.HTTP_400_BAD_REQUEST,
)
class IssueSearchEndpoint(BaseAPIView): class IssueSearchEndpoint(BaseAPIView):
def get(self, request, slug, project_id): def get(self, request, slug, project_id):
try:
query = request.query_params.get("search", False) query = request.query_params.get("search", False)
workspace_search = request.query_params.get("workspace_search", "false") workspace_search = request.query_params.get("workspace_search", "false")
parent = request.query_params.get("parent", "false") parent = request.query_params.get("parent", "false")
@ -281,13 +272,3 @@ class IssueSearchEndpoint(BaseAPIView):
), ),
status=status.HTTP_200_OK, status=status.HTTP_200_OK,
) )
except Issue.DoesNotExist:
return Response(
{"error": "Issue Does not exist"}, status=status.HTTP_400_BAD_REQUEST
)
except Exception as e:
print(e)
return Response(
{"error": "Something went wrong please try again later"},
status=status.HTTP_400_BAD_REQUEST,
)

View File

@ -2,7 +2,6 @@
from itertools import groupby from itertools import groupby
# Django imports # Django imports
from django.db import IntegrityError
from django.db.models import Q from django.db.models import Q
# Third party imports # Third party imports
@ -41,26 +40,13 @@ class StateViewSet(BaseViewSet):
) )
def create(self, request, slug, project_id): def create(self, request, slug, project_id):
try:
serializer = StateSerializer(data=request.data) serializer = StateSerializer(data=request.data)
if serializer.is_valid(): if serializer.is_valid():
serializer.save(project_id=project_id) serializer.save(project_id=project_id)
return Response(serializer.data, status=status.HTTP_200_OK) return Response(serializer.data, status=status.HTTP_200_OK)
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
except IntegrityError:
return Response(
{"error": "State with the name already exists"},
status=status.HTTP_400_BAD_REQUEST,
)
except Exception as e:
capture_exception(e)
return Response(
{"error": "Something went wrong please try again later"},
status=status.HTTP_400_BAD_REQUEST,
)
def list(self, request, slug, project_id): def list(self, request, slug, project_id):
try:
state_dict = dict() state_dict = dict()
states = StateSerializer(self.get_queryset(), many=True).data states = StateSerializer(self.get_queryset(), many=True).data
@ -71,15 +57,8 @@ class StateViewSet(BaseViewSet):
state_dict[str(key)] = list(value) state_dict[str(key)] = list(value)
return Response(state_dict, status=status.HTTP_200_OK) return Response(state_dict, status=status.HTTP_200_OK)
except Exception as e:
capture_exception(e)
return Response(
{"error": "Something went wrong please try again later"},
status=status.HTTP_400_BAD_REQUEST,
)
def destroy(self, request, slug, project_id, pk): def destroy(self, request, slug, project_id, pk):
try:
state = State.objects.get( state = State.objects.get(
~Q(name="Triage"), ~Q(name="Triage"),
pk=pk, project_id=project_id, workspace__slug=slug, pk=pk, project_id=project_id, workspace__slug=slug,
@ -103,5 +82,3 @@ class StateViewSet(BaseViewSet):
state.delete() state.delete()
return Response(status=status.HTTP_204_NO_CONTENT) return Response(status=status.HTTP_204_NO_CONTENT)
except State.DoesNotExist:
return Response({"error": "State does not exists"}, status=status.HTTP_404)

View File

@ -8,6 +8,8 @@ from sentry_sdk import capture_exception
from plane.api.serializers import ( from plane.api.serializers import (
UserSerializer, UserSerializer,
IssueActivitySerializer, IssueActivitySerializer,
UserMeSerializer,
UserMeSettingsSerializer,
) )
from plane.api.views.base import BaseViewSet, BaseAPIView from plane.api.views.base import BaseViewSet, BaseAPIView
@ -30,115 +32,35 @@ class UserEndpoint(BaseViewSet):
return self.request.user return self.request.user
def retrieve(self, request): def retrieve(self, request):
try: serialized_data = UserMeSerializer(request.user).data
workspace = Workspace.objects.get(
pk=request.user.last_workspace_id, workspace_member__member=request.user
)
workspace_invites = WorkspaceMemberInvite.objects.filter(
email=request.user.email
).count()
assigned_issues = Issue.issue_objects.filter(
assignees__in=[request.user]
).count()
serialized_data = UserSerializer(request.user).data
serialized_data["workspace"] = {
"last_workspace_id": request.user.last_workspace_id,
"last_workspace_slug": workspace.slug,
"fallback_workspace_id": request.user.last_workspace_id,
"fallback_workspace_slug": workspace.slug,
"invites": workspace_invites,
}
serialized_data.setdefault("issues", {})[
"assigned_issues"
] = assigned_issues
return Response( return Response(
serialized_data, serialized_data,
status=status.HTTP_200_OK, status=status.HTTP_200_OK,
) )
except Workspace.DoesNotExist:
# This exception will be hit even when the `last_workspace_id` is None
workspace_invites = WorkspaceMemberInvite.objects.filter( def retrieve_user_settings(self, request):
email=request.user.email serialized_data = UserMeSettingsSerializer(request.user).data
).count() return Response(serialized_data, status=status.HTTP_200_OK)
assigned_issues = Issue.issue_objects.filter(
assignees__in=[request.user]
).count()
fallback_workspace = (
Workspace.objects.filter(workspace_member__member=request.user)
.order_by("created_at")
.first()
)
serialized_data = UserSerializer(request.user).data
serialized_data["workspace"] = {
"last_workspace_id": None,
"last_workspace_slug": None,
"fallback_workspace_id": fallback_workspace.id
if fallback_workspace is not None
else None,
"fallback_workspace_slug": fallback_workspace.slug
if fallback_workspace is not None
else None,
"invites": workspace_invites,
}
serialized_data.setdefault("issues", {})[
"assigned_issues"
] = assigned_issues
return Response(
serialized_data,
status=status.HTTP_200_OK,
)
except Exception as e:
capture_exception(e)
return Response(
{"error": "Something went wrong please try again later"},
status=status.HTTP_400_BAD_REQUEST,
)
class UpdateUserOnBoardedEndpoint(BaseAPIView): class UpdateUserOnBoardedEndpoint(BaseAPIView):
def patch(self, request): def patch(self, request):
try:
user = User.objects.get(pk=request.user.id) user = User.objects.get(pk=request.user.id)
user.is_onboarded = request.data.get("is_onboarded", False) user.is_onboarded = request.data.get("is_onboarded", False)
user.save() user.save()
return Response( return Response({"message": "Updated successfully"}, status=status.HTTP_200_OK)
{"message": "Updated successfully"}, status=status.HTTP_200_OK
)
except Exception as e:
capture_exception(e)
return Response(
{"error": "Something went wrong please try again later"},
status=status.HTTP_400_BAD_REQUEST,
)
class UpdateUserTourCompletedEndpoint(BaseAPIView): class UpdateUserTourCompletedEndpoint(BaseAPIView):
def patch(self, request): def patch(self, request):
try:
user = User.objects.get(pk=request.user.id) user = User.objects.get(pk=request.user.id)
user.is_tour_completed = request.data.get("is_tour_completed", False) user.is_tour_completed = request.data.get("is_tour_completed", False)
user.save() user.save()
return Response( return Response({"message": "Updated successfully"}, status=status.HTTP_200_OK)
{"message": "Updated successfully"}, status=status.HTTP_200_OK
)
except Exception as e:
capture_exception(e)
return Response(
{"error": "Something went wrong please try again later"},
status=status.HTTP_400_BAD_REQUEST,
)
class UserActivityEndpoint(BaseAPIView, BasePaginator): class UserActivityEndpoint(BaseAPIView, BasePaginator):
def get(self, request, slug): def get(self, request, slug):
try:
queryset = IssueActivity.objects.filter( queryset = IssueActivity.objects.filter(
actor=request.user, workspace__slug=slug actor=request.user, workspace__slug=slug
).select_related("actor", "workspace", "issue", "project") ).select_related("actor", "workspace", "issue", "project")
@ -150,9 +72,3 @@ class UserActivityEndpoint(BaseAPIView, BasePaginator):
issue_activities, many=True issue_activities, many=True
).data, ).data,
) )
except Exception as e:
capture_exception(e)
return Response(
{"error": "Something went wrong please try again later"},
status=status.HTTP_400_BAD_REQUEST,
)

View File

@ -13,7 +13,6 @@ from django.db.models import (
) )
from django.utils.decorators import method_decorator from django.utils.decorators import method_decorator
from django.views.decorators.gzip import gzip_page from django.views.decorators.gzip import gzip_page
from django.db import IntegrityError
from django.db.models import Prefetch, OuterRef, Exists from django.db.models import Prefetch, OuterRef, Exists
# Third party imports # Third party imports
@ -61,7 +60,7 @@ class GlobalViewViewSet(BaseViewSet):
.get_queryset() .get_queryset()
.filter(workspace__slug=self.kwargs.get("slug")) .filter(workspace__slug=self.kwargs.get("slug"))
.select_related("workspace") .select_related("workspace")
.order_by("-created_at") .order_by(self.request.GET.get("order_by", "-created_at"))
.distinct() .distinct()
) )
@ -97,7 +96,6 @@ class GlobalViewIssuesViewSet(BaseViewSet):
@method_decorator(gzip_page) @method_decorator(gzip_page)
def list(self, request, slug): def list(self, request, slug):
try:
filters = issue_filters(request.query_params, "GET") filters = issue_filters(request.query_params, "GET")
# Custom ordering for priority and state # Custom ordering for priority and state
@ -204,13 +202,6 @@ class GlobalViewIssuesViewSet(BaseViewSet):
return Response(issues, status=status.HTTP_200_OK) return Response(issues, status=status.HTTP_200_OK)
except Exception as e:
capture_exception(e)
return Response(
{"error": "Something went wrong please try again later"},
status=status.HTTP_400_BAD_REQUEST,
)
class IssueViewViewSet(BaseViewSet): class IssueViewViewSet(BaseViewSet):
serializer_class = IssueViewSerializer serializer_class = IssueViewSerializer
@ -243,51 +234,6 @@ class IssueViewViewSet(BaseViewSet):
) )
class ViewIssuesEndpoint(BaseAPIView):
permission_classes = [
ProjectEntityPermission,
]
def get(self, request, slug, project_id, view_id):
try:
view = IssueView.objects.get(pk=view_id)
queries = view.query
filters = issue_filters(request.query_params, "GET")
issues = (
Issue.issue_objects.filter(
**queries, project_id=project_id, workspace__slug=slug
)
.filter(**filters)
.select_related("project")
.select_related("workspace")
.select_related("state")
.select_related("parent")
.prefetch_related("assignees")
.prefetch_related("labels")
.prefetch_related(
Prefetch(
"issue_reactions",
queryset=IssueReaction.objects.select_related("actor"),
)
)
)
serializer = IssueLiteSerializer(issues, many=True)
return Response(serializer.data, status=status.HTTP_200_OK)
except IssueView.DoesNotExist:
return Response(
{"error": "Issue View does not exist"}, status=status.HTTP_404_NOT_FOUND
)
except Exception as e:
capture_exception(e)
return Response(
{"error": "Something went wrong please try again later"},
status=status.HTTP_400_BAD_REQUEST,
)
class IssueViewFavoriteViewSet(BaseViewSet): class IssueViewFavoriteViewSet(BaseViewSet):
serializer_class = IssueViewFavoriteSerializer serializer_class = IssueViewFavoriteSerializer
model = IssueViewFavorite model = IssueViewFavorite
@ -302,33 +248,13 @@ class IssueViewFavoriteViewSet(BaseViewSet):
) )
def create(self, request, slug, project_id): def create(self, request, slug, project_id):
try:
serializer = IssueViewFavoriteSerializer(data=request.data) serializer = IssueViewFavoriteSerializer(data=request.data)
if serializer.is_valid(): if serializer.is_valid():
serializer.save(user=request.user, project_id=project_id) serializer.save(user=request.user, project_id=project_id)
return Response(serializer.data, status=status.HTTP_201_CREATED) return Response(serializer.data, status=status.HTTP_201_CREATED)
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
except IntegrityError as e:
if "already exists" in str(e):
return Response(
{"error": "The view is already added to favorites"},
status=status.HTTP_410_GONE,
)
else:
capture_exception(e)
return Response(
{"error": "Something went wrong please try again later"},
status=status.HTTP_400_BAD_REQUEST,
)
except Exception as e:
capture_exception(e)
return Response(
{"error": "Something went wrong please try again later"},
status=status.HTTP_400_BAD_REQUEST,
)
def destroy(self, request, slug, project_id, view_id): def destroy(self, request, slug, project_id, view_id):
try:
view_favourite = IssueViewFavorite.objects.get( view_favourite = IssueViewFavorite.objects.get(
project=project_id, project=project_id,
user=request.user, user=request.user,
@ -337,14 +263,3 @@ class IssueViewFavoriteViewSet(BaseViewSet):
) )
view_favourite.delete() view_favourite.delete()
return Response(status=status.HTTP_204_NO_CONTENT) return Response(status=status.HTTP_204_NO_CONTENT)
except IssueViewFavorite.DoesNotExist:
return Response(
{"error": "View is not in favorites"},
status=status.HTTP_400_BAD_REQUEST,
)
except Exception as e:
capture_exception(e)
return Response(
{"error": "Something went wrong please try again later"},
status=status.HTTP_400_BAD_REQUEST,
)

View File

@ -48,6 +48,7 @@ from plane.api.serializers import (
IssueActivitySerializer, IssueActivitySerializer,
IssueLiteSerializer, IssueLiteSerializer,
WorkspaceMemberAdminSerializer, WorkspaceMemberAdminSerializer,
WorkspaceMemberMeSerializer,
) )
from plane.api.views.base import BaseAPIView from plane.api.views.base import BaseAPIView
from . import BaseViewSet from . import BaseViewSet
@ -164,23 +165,12 @@ class WorkSpaceViewSet(BaseViewSet):
status=status.HTTP_400_BAD_REQUEST, status=status.HTTP_400_BAD_REQUEST,
) )
## Handling unique integrity error for now
## TODO: Extend this to handle other common errors which are not automatically handled by APIException
except IntegrityError as e: except IntegrityError as e:
if "already exists" in str(e): if "already exists" in str(e):
return Response( return Response(
{"slug": "The workspace with the slug already exists"}, {"slug": "The workspace with the slug already exists"},
status=status.HTTP_410_GONE, status=status.HTTP_410_GONE,
) )
except Exception as e:
capture_exception(e)
return Response(
{
"error": "Something went wrong please try again later",
"identifier": None,
},
status=status.HTTP_400_BAD_REQUEST,
)
class UserWorkSpacesEndpoint(BaseAPIView): class UserWorkSpacesEndpoint(BaseAPIView):
@ -192,7 +182,6 @@ class UserWorkSpacesEndpoint(BaseAPIView):
] ]
def get(self, request): def get(self, request):
try:
member_count = ( member_count = (
WorkspaceMember.objects.filter( WorkspaceMember.objects.filter(
workspace=OuterRef("id"), member__is_bot=False workspace=OuterRef("id"), member__is_bot=False
@ -212,9 +201,7 @@ class UserWorkSpacesEndpoint(BaseAPIView):
workspace = ( workspace = (
( (
Workspace.objects.prefetch_related( Workspace.objects.prefetch_related(
Prefetch( Prefetch("workspace_member", queryset=WorkspaceMember.objects.all())
"workspace_member", queryset=WorkspaceMember.objects.all()
)
) )
.filter( .filter(
workspace_member__member=request.user, workspace_member__member=request.user,
@ -228,17 +215,9 @@ class UserWorkSpacesEndpoint(BaseAPIView):
serializer = WorkSpaceSerializer(self.filter_queryset(workspace), many=True) serializer = WorkSpaceSerializer(self.filter_queryset(workspace), many=True)
return Response(serializer.data, status=status.HTTP_200_OK) return Response(serializer.data, status=status.HTTP_200_OK)
except Exception as e:
capture_exception(e)
return Response(
{"error": "Something went wrong please try again later"},
status=status.HTTP_400_BAD_REQUEST,
)
class WorkSpaceAvailabilityCheckEndpoint(BaseAPIView): class WorkSpaceAvailabilityCheckEndpoint(BaseAPIView):
def get(self, request): def get(self, request):
try:
slug = request.GET.get("slug", False) slug = request.GET.get("slug", False)
if not slug or slug == "": if not slug or slug == "":
@ -249,12 +228,6 @@ class WorkSpaceAvailabilityCheckEndpoint(BaseAPIView):
workspace = Workspace.objects.filter(slug=slug).exists() workspace = Workspace.objects.filter(slug=slug).exists()
return Response({"status": not workspace}, status=status.HTTP_200_OK) return Response({"status": not workspace}, status=status.HTTP_200_OK)
except Exception as e:
capture_exception(e)
return Response(
{"error": "Something went wrong please try again later"},
status=status.HTTP_400_BAD_REQUEST,
)
class InviteWorkspaceEndpoint(BaseAPIView): class InviteWorkspaceEndpoint(BaseAPIView):
@ -263,7 +236,6 @@ class InviteWorkspaceEndpoint(BaseAPIView):
] ]
def post(self, request, slug): def post(self, request, slug):
try:
emails = request.data.get("emails", False) emails = request.data.get("emails", False)
# Check if email is provided # Check if email is provided
if not emails or not len(emails): if not emails or not len(emails):
@ -372,18 +344,6 @@ class InviteWorkspaceEndpoint(BaseAPIView):
status=status.HTTP_200_OK, status=status.HTTP_200_OK,
) )
except Workspace.DoesNotExist:
return Response(
{"error": "Workspace does not exists"},
status=status.HTTP_400_BAD_REQUEST,
)
except Exception as e:
capture_exception(e)
return Response(
{"error": "Something went wrong please try again later"},
status=status.HTTP_400_BAD_REQUEST,
)
class JoinWorkspaceEndpoint(BaseAPIView): class JoinWorkspaceEndpoint(BaseAPIView):
permission_classes = [ permission_classes = [
@ -391,7 +351,6 @@ class JoinWorkspaceEndpoint(BaseAPIView):
] ]
def post(self, request, slug, pk): def post(self, request, slug, pk):
try:
workspace_invite = WorkspaceMemberInvite.objects.get( workspace_invite = WorkspaceMemberInvite.objects.get(
pk=pk, workspace__slug=slug pk=pk, workspace__slug=slug
) )
@ -442,18 +401,6 @@ class JoinWorkspaceEndpoint(BaseAPIView):
status=status.HTTP_400_BAD_REQUEST, status=status.HTTP_400_BAD_REQUEST,
) )
except WorkspaceMemberInvite.DoesNotExist:
return Response(
{"error": "The invitation either got expired or could not be found"},
status=status.HTTP_404_NOT_FOUND,
)
except Exception as e:
capture_exception(e)
return Response(
{"error": "Something went wrong please try again later"},
status=status.HTTP_400_BAD_REQUEST,
)
class WorkspaceInvitationsViewset(BaseViewSet): class WorkspaceInvitationsViewset(BaseViewSet):
serializer_class = WorkSpaceMemberInviteSerializer serializer_class = WorkSpaceMemberInviteSerializer
@ -472,7 +419,6 @@ class WorkspaceInvitationsViewset(BaseViewSet):
) )
def destroy(self, request, slug, pk): def destroy(self, request, slug, pk):
try:
workspace_member_invite = WorkspaceMemberInvite.objects.get( workspace_member_invite = WorkspaceMemberInvite.objects.get(
pk=pk, workspace__slug=slug pk=pk, workspace__slug=slug
) )
@ -483,17 +429,6 @@ class WorkspaceInvitationsViewset(BaseViewSet):
user.delete() user.delete()
workspace_member_invite.delete() workspace_member_invite.delete()
return Response(status=status.HTTP_204_NO_CONTENT) return Response(status=status.HTTP_204_NO_CONTENT)
except WorkspaceMemberInvite.DoesNotExist:
return Response(
{"error": "Workspace member invite does not exists"},
status=status.HTTP_400_BAD_REQUEST,
)
except Exception as e:
capture_exception(e)
return Response(
{"error": "Something went wrong please try again later"},
status=status.HTTP_400_BAD_REQUEST,
)
class UserWorkspaceInvitationsEndpoint(BaseViewSet): class UserWorkspaceInvitationsEndpoint(BaseViewSet):
@ -510,11 +445,8 @@ class UserWorkspaceInvitationsEndpoint(BaseViewSet):
) )
def create(self, request): def create(self, request):
try:
invitations = request.data.get("invitations") invitations = request.data.get("invitations")
workspace_invitations = WorkspaceMemberInvite.objects.filter( workspace_invitations = WorkspaceMemberInvite.objects.filter(pk__in=invitations)
pk__in=invitations
)
WorkspaceMember.objects.bulk_create( WorkspaceMember.objects.bulk_create(
[ [
@ -533,12 +465,6 @@ class UserWorkspaceInvitationsEndpoint(BaseViewSet):
workspace_invitations.delete() workspace_invitations.delete()
return Response(status=status.HTTP_204_NO_CONTENT) return Response(status=status.HTTP_204_NO_CONTENT)
except Exception as e:
capture_exception(e)
return Response(
{"error": "Something went wrong please try again later"},
status=status.HTTP_400_BAD_REQUEST,
)
class WorkSpaceMemberViewSet(BaseViewSet): class WorkSpaceMemberViewSet(BaseViewSet):
@ -564,7 +490,6 @@ class WorkSpaceMemberViewSet(BaseViewSet):
) )
def partial_update(self, request, slug, pk): def partial_update(self, request, slug, pk):
try:
workspace_member = WorkspaceMember.objects.get(pk=pk, workspace__slug=slug) workspace_member = WorkspaceMember.objects.get(pk=pk, workspace__slug=slug)
if request.user.id == workspace_member.member_id: if request.user.id == workspace_member.member_id:
return Response( return Response(
@ -584,9 +509,7 @@ class WorkSpaceMemberViewSet(BaseViewSet):
> requested_workspace_member.role > requested_workspace_member.role
): ):
return Response( return Response(
{ {"error": "You cannot update a role that is higher than your own role"},
"error": "You cannot update a role that is higher than your own role"
},
status=status.HTTP_400_BAD_REQUEST, status=status.HTTP_400_BAD_REQUEST,
) )
@ -598,20 +521,8 @@ class WorkSpaceMemberViewSet(BaseViewSet):
serializer.save() serializer.save()
return Response(serializer.data, status=status.HTTP_200_OK) return Response(serializer.data, status=status.HTTP_200_OK)
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
except WorkspaceMember.DoesNotExist:
return Response(
{"error": "Workspace Member does not exist"},
status=status.HTTP_400_BAD_REQUEST,
)
except Exception as e:
capture_exception(e)
return Response(
{"error": "Something went wrong please try again later"},
status=status.HTTP_400_BAD_REQUEST,
)
def destroy(self, request, slug, pk): def destroy(self, request, slug, pk):
try:
# Check the user role who is deleting the user # Check the user role who is deleting the user
workspace_member = WorkspaceMember.objects.get(workspace__slug=slug, pk=pk) workspace_member = WorkspaceMember.objects.get(workspace__slug=slug, pk=pk)
@ -676,17 +587,6 @@ class WorkSpaceMemberViewSet(BaseViewSet):
workspace_member.delete() workspace_member.delete()
return Response(status=status.HTTP_204_NO_CONTENT) return Response(status=status.HTTP_204_NO_CONTENT)
except WorkspaceMember.DoesNotExist:
return Response(
{"error": "Workspace Member does not exists"},
status=status.HTTP_400_BAD_REQUEST,
)
except Exception as e:
capture_exception(e)
return Response(
{"error": "Something went wrong please try again later"},
status=status.HTTP_400_BAD_REQUEST,
)
class TeamMemberViewSet(BaseViewSet): class TeamMemberViewSet(BaseViewSet):
@ -711,7 +611,6 @@ class TeamMemberViewSet(BaseViewSet):
) )
def create(self, request, slug): def create(self, request, slug):
try:
members = list( members = list(
WorkspaceMember.objects.filter( WorkspaceMember.objects.filter(
workspace__slug=slug, member__id__in=request.data.get("members", []) workspace__slug=slug, member__id__in=request.data.get("members", [])
@ -736,25 +635,11 @@ class TeamMemberViewSet(BaseViewSet):
workspace = Workspace.objects.get(slug=slug) workspace = Workspace.objects.get(slug=slug)
serializer = TeamSerializer( serializer = TeamSerializer(data=request.data, context={"workspace": workspace})
data=request.data, context={"workspace": workspace}
)
if serializer.is_valid(): if serializer.is_valid():
serializer.save() serializer.save()
return Response(serializer.data, status=status.HTTP_201_CREATED) return Response(serializer.data, status=status.HTTP_201_CREATED)
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
except IntegrityError as e:
if "already exists" in str(e):
return Response(
{"error": "The team with the name already exists"},
status=status.HTTP_410_GONE,
)
except Exception as e:
capture_exception(e)
return Response(
{"error": "Something went wrong please try again later"},
status=status.HTTP_400_BAD_REQUEST,
)
class UserWorkspaceInvitationEndpoint(BaseViewSet): class UserWorkspaceInvitationEndpoint(BaseViewSet):
@ -776,7 +661,6 @@ class UserWorkspaceInvitationEndpoint(BaseViewSet):
class UserLastProjectWithWorkspaceEndpoint(BaseAPIView): class UserLastProjectWithWorkspaceEndpoint(BaseAPIView):
def get(self, request): def get(self, request):
try:
user = User.objects.get(pk=request.user.id) user = User.objects.get(pk=request.user.id)
last_workspace_id = user.last_workspace_id last_workspace_id = user.last_workspace_id
@ -797,9 +681,7 @@ class UserLastProjectWithWorkspaceEndpoint(BaseAPIView):
workspace_id=last_workspace_id, member=request.user workspace_id=last_workspace_id, member=request.user
).select_related("workspace", "project", "member", "workspace__owner") ).select_related("workspace", "project", "member", "workspace__owner")
project_member_serializer = ProjectMemberSerializer( project_member_serializer = ProjectMemberSerializer(project_member, many=True)
project_member, many=True
)
return Response( return Response(
{ {
@ -809,37 +691,18 @@ class UserLastProjectWithWorkspaceEndpoint(BaseAPIView):
status=status.HTTP_200_OK, status=status.HTTP_200_OK,
) )
except User.DoesNotExist:
return Response({"error": "Forbidden"}, status=status.HTTP_403_FORBIDDEN)
except Exception as e:
capture_exception(e)
return Response(
{"error": "Something went wrong please try again later"},
status=status.HTTP_400_BAD_REQUEST,
)
class WorkspaceMemberUserEndpoint(BaseAPIView): class WorkspaceMemberUserEndpoint(BaseAPIView):
def get(self, request, slug): def get(self, request, slug):
try:
workspace_member = WorkspaceMember.objects.get( workspace_member = WorkspaceMember.objects.get(
member=request.user, workspace__slug=slug member=request.user, workspace__slug=slug
) )
serializer = WorkSpaceMemberSerializer(workspace_member) serializer = WorkspaceMemberMeSerializer(workspace_member)
return Response(serializer.data, status=status.HTTP_200_OK) return Response(serializer.data, status=status.HTTP_200_OK)
except (Workspace.DoesNotExist, WorkspaceMember.DoesNotExist):
return Response({"error": "Forbidden"}, status=status.HTTP_403_FORBIDDEN)
except Exception as e:
capture_exception(e)
return Response(
{"error": "Something went wrong please try again later"},
status=status.HTTP_400_BAD_REQUEST,
)
class WorkspaceMemberUserViewsEndpoint(BaseAPIView): class WorkspaceMemberUserViewsEndpoint(BaseAPIView):
def post(self, request, slug): def post(self, request, slug):
try:
workspace_member = WorkspaceMember.objects.get( workspace_member = WorkspaceMember.objects.get(
workspace__slug=slug, member=request.user workspace__slug=slug, member=request.user
) )
@ -847,22 +710,10 @@ class WorkspaceMemberUserViewsEndpoint(BaseAPIView):
workspace_member.save() workspace_member.save()
return Response(status=status.HTTP_204_NO_CONTENT) return Response(status=status.HTTP_204_NO_CONTENT)
except WorkspaceMember.DoesNotExist:
return Response(
{"error": "User not a member of workspace"},
status=status.HTTP_403_FORBIDDEN,
)
except Exception as e:
capture_exception(e)
return Response(
{"error": "Something went wrong please try again later"},
status=status.HTTP_400_BAD_REQUEST,
)
class UserActivityGraphEndpoint(BaseAPIView): class UserActivityGraphEndpoint(BaseAPIView):
def get(self, request, slug): def get(self, request, slug):
try:
issue_activities = ( issue_activities = (
IssueActivity.objects.filter( IssueActivity.objects.filter(
actor=request.user, actor=request.user,
@ -876,17 +727,10 @@ class UserActivityGraphEndpoint(BaseAPIView):
) )
return Response(issue_activities, status=status.HTTP_200_OK) return Response(issue_activities, status=status.HTTP_200_OK)
except Exception as e:
capture_exception(e)
return Response(
{"error": "Something went wrong please try again later"},
status=status.HTTP_400_BAD_REQUEST,
)
class UserIssueCompletedGraphEndpoint(BaseAPIView): class UserIssueCompletedGraphEndpoint(BaseAPIView):
def get(self, request, slug): def get(self, request, slug):
try:
month = request.GET.get("month", 1) month = request.GET.get("month", 1)
issues = ( issues = (
@ -904,12 +748,6 @@ class UserIssueCompletedGraphEndpoint(BaseAPIView):
) )
return Response(issues, status=status.HTTP_200_OK) return Response(issues, status=status.HTTP_200_OK)
except Exception as e:
capture_exception(e)
return Response(
{"error": "Something went wrong please try again later"},
status=status.HTTP_400_BAD_REQUEST,
)
class WeekInMonth(Func): class WeekInMonth(Func):
@ -919,7 +757,6 @@ class WeekInMonth(Func):
class UserWorkspaceDashboardEndpoint(BaseAPIView): class UserWorkspaceDashboardEndpoint(BaseAPIView):
def get(self, request, slug): def get(self, request, slug):
try:
issue_activities = ( issue_activities = (
IssueActivity.objects.filter( IssueActivity.objects.filter(
actor=request.user, actor=request.user,
@ -1015,13 +852,6 @@ class UserWorkspaceDashboardEndpoint(BaseAPIView):
status=status.HTTP_200_OK, status=status.HTTP_200_OK,
) )
except Exception as e:
capture_exception(e)
return Response(
{"error": "Something went wrong please try again later"},
status=status.HTTP_400_BAD_REQUEST,
)
class WorkspaceThemeViewSet(BaseViewSet): class WorkspaceThemeViewSet(BaseViewSet):
permission_classes = [ permission_classes = [
@ -1034,29 +864,16 @@ class WorkspaceThemeViewSet(BaseViewSet):
return super().get_queryset().filter(workspace__slug=self.kwargs.get("slug")) return super().get_queryset().filter(workspace__slug=self.kwargs.get("slug"))
def create(self, request, slug): def create(self, request, slug):
try:
workspace = Workspace.objects.get(slug=slug) workspace = Workspace.objects.get(slug=slug)
serializer = WorkspaceThemeSerializer(data=request.data) serializer = WorkspaceThemeSerializer(data=request.data)
if serializer.is_valid(): if serializer.is_valid():
serializer.save(workspace=workspace, actor=request.user) serializer.save(workspace=workspace, actor=request.user)
return Response(serializer.data, status=status.HTTP_201_CREATED) return Response(serializer.data, status=status.HTTP_201_CREATED)
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
except Workspace.DoesNotExist:
return Response(
{"error": "Workspace does not exist"},
status=status.HTTP_400_BAD_REQUEST,
)
except Exception as e:
capture_exception(e)
return Response(
{"error": "Something went wrong please try again later"},
status=status.HTTP_400_BAD_REQUEST,
)
class WorkspaceUserProfileStatsEndpoint(BaseAPIView): class WorkspaceUserProfileStatsEndpoint(BaseAPIView):
def get(self, request, slug, user_id): def get(self, request, slug, user_id):
try:
filters = issue_filters(request.query_params, "GET") filters = issue_filters(request.query_params, "GET")
state_distribution = ( state_distribution = (
@ -1179,12 +996,6 @@ class WorkspaceUserProfileStatsEndpoint(BaseAPIView):
"upcoming_cycles": upcoming_cycles, "upcoming_cycles": upcoming_cycles,
} }
) )
except Exception as e:
capture_exception(e)
return Response(
{"error": "Something went wrong please try again later"},
status=status.HTTP_400_BAD_REQUEST,
)
class WorkspaceUserActivityEndpoint(BaseAPIView): class WorkspaceUserActivityEndpoint(BaseAPIView):
@ -1193,11 +1004,10 @@ class WorkspaceUserActivityEndpoint(BaseAPIView):
] ]
def get(self, request, slug, user_id): def get(self, request, slug, user_id):
try:
projects = request.query_params.getlist("project", []) projects = request.query_params.getlist("project", [])
queryset = IssueActivity.objects.filter( queryset = IssueActivity.objects.filter(
~Q(field__in=["comment", "vote", "reaction"]), ~Q(field__in=["comment", "vote", "reaction", "draft"]),
workspace__slug=slug, workspace__slug=slug,
project__project_projectmember__member=request.user, project__project_projectmember__member=request.user,
actor=user_id, actor=user_id,
@ -1213,17 +1023,10 @@ class WorkspaceUserActivityEndpoint(BaseAPIView):
issue_activities, many=True issue_activities, many=True
).data, ).data,
) )
except Exception as e:
capture_exception(e)
return Response(
{"error": "Something went wrong please try again later"},
status=status.HTTP_400_BAD_REQUEST,
)
class WorkspaceUserProfileEndpoint(BaseAPIView): class WorkspaceUserProfileEndpoint(BaseAPIView):
def get(self, request, slug, user_id): def get(self, request, slug, user_id):
try:
user_data = User.objects.get(pk=user_id) user_data = User.objects.get(pk=user_id)
requesting_workspace_member = WorkspaceMember.objects.get( requesting_workspace_member = WorkspaceMember.objects.get(
@ -1311,14 +1114,6 @@ class WorkspaceUserProfileEndpoint(BaseAPIView):
}, },
status=status.HTTP_200_OK, status=status.HTTP_200_OK,
) )
except WorkspaceMember.DoesNotExist:
return Response({"error": "Forbidden"}, status=status.HTTP_403_FORBIDDEN)
except Exception as e:
capture_exception(e)
return Response(
{"error": "Something went wrong please try again later"},
status=status.HTTP_400_BAD_REQUEST,
)
class WorkspaceUserProfileIssuesEndpoint(BaseAPIView): class WorkspaceUserProfileIssuesEndpoint(BaseAPIView):
@ -1327,7 +1122,6 @@ class WorkspaceUserProfileIssuesEndpoint(BaseAPIView):
] ]
def get(self, request, slug, user_id): def get(self, request, slug, user_id):
try:
filters = issue_filters(request.query_params, "GET") filters = issue_filters(request.query_params, "GET")
# Custom ordering for priority and state # Custom ordering for priority and state
@ -1366,9 +1160,7 @@ class WorkspaceUserProfileIssuesEndpoint(BaseAPIView):
.values("count") .values("count")
) )
.annotate( .annotate(
attachment_count=IssueAttachment.objects.filter( attachment_count=IssueAttachment.objects.filter(issue=OuterRef("id"))
issue=OuterRef("id")
)
.order_by() .order_by()
.annotate(count=Func(F("id"), function="Count")) .annotate(count=Func(F("id"), function="Count"))
.values("count") .values("count")
@ -1378,9 +1170,7 @@ class WorkspaceUserProfileIssuesEndpoint(BaseAPIView):
# Priority Ordering # Priority Ordering
if order_by_param == "priority" or order_by_param == "-priority": if order_by_param == "priority" or order_by_param == "-priority":
priority_order = ( priority_order = (
priority_order priority_order if order_by_param == "priority" else priority_order[::-1]
if order_by_param == "priority"
else priority_order[::-1]
) )
issue_queryset = issue_queryset.annotate( issue_queryset = issue_queryset.annotate(
priority_order=Case( priority_order=Case(
@ -1438,17 +1228,9 @@ class WorkspaceUserProfileIssuesEndpoint(BaseAPIView):
## Grouping the results ## Grouping the results
group_by = request.GET.get("group_by", False) group_by = request.GET.get("group_by", False)
if group_by: if group_by:
return Response( return Response(group_results(issues, group_by), status=status.HTTP_200_OK)
group_results(issues, group_by), status=status.HTTP_200_OK
)
return Response(issues, status=status.HTTP_200_OK) return Response(issues, status=status.HTTP_200_OK)
except Exception as e:
capture_exception(e)
return Response(
{"error": "Something went wrong please try again later"},
status=status.HTTP_400_BAD_REQUEST,
)
class WorkspaceLabelsEndpoint(BaseAPIView): class WorkspaceLabelsEndpoint(BaseAPIView):
@ -1457,18 +1239,11 @@ class WorkspaceLabelsEndpoint(BaseAPIView):
] ]
def get(self, request, slug): def get(self, request, slug):
try:
labels = Label.objects.filter( labels = Label.objects.filter(
workspace__slug=slug, workspace__slug=slug,
project__project_projectmember__member=request.user, project__project_projectmember__member=request.user,
).values("parent", "name", "color", "id", "project_id", "workspace__slug") ).values("parent", "name", "color", "id", "project_id", "workspace__slug")
return Response(labels, status=status.HTTP_200_OK) return Response(labels, status=status.HTTP_200_OK)
except Exception as e:
capture_exception(e)
return Response(
{"error": "Something went wrong please try again later"},
status=status.HTTP_400_BAD_REQUEST,
)
class WorkspaceMembersEndpoint(BaseAPIView): class WorkspaceMembersEndpoint(BaseAPIView):
@ -1477,19 +1252,12 @@ class WorkspaceMembersEndpoint(BaseAPIView):
] ]
def get(self, request, slug): def get(self, request, slug):
try:
workspace_members = WorkspaceMember.objects.filter( workspace_members = WorkspaceMember.objects.filter(
workspace__slug=slug, workspace__slug=slug,
member__is_bot=False, member__is_bot=False,
).select_related("workspace", "member") ).select_related("workspace", "member")
serialzier = WorkSpaceMemberSerializer(workspace_members, many=True) serialzier = WorkSpaceMemberSerializer(workspace_members, many=True)
return Response(serialzier.data, status=status.HTTP_200_OK) return Response(serialzier.data, status=status.HTTP_200_OK)
except Exception as e:
capture_exception(e)
return Response(
{"error": "Something went wrong please try again later"},
status=status.HTTP_400_BAD_REQUEST,
)
class LeaveWorkspaceEndpoint(BaseAPIView): class LeaveWorkspaceEndpoint(BaseAPIView):
@ -1498,7 +1266,6 @@ class LeaveWorkspaceEndpoint(BaseAPIView):
] ]
def delete(self, request, slug): def delete(self, request, slug):
try:
workspace_member = WorkspaceMember.objects.get( workspace_member = WorkspaceMember.objects.get(
workspace__slug=slug, member=request.user workspace__slug=slug, member=request.user
) )
@ -1506,9 +1273,7 @@ class LeaveWorkspaceEndpoint(BaseAPIView):
# Only Admin case # Only Admin case
if ( if (
workspace_member.role == 20 workspace_member.role == 20
and WorkspaceMember.objects.filter( and WorkspaceMember.objects.filter(workspace__slug=slug, role=20).count()
workspace__slug=slug, role=20
).count()
== 1 == 1
): ):
return Response( return Response(
@ -1520,14 +1285,3 @@ class LeaveWorkspaceEndpoint(BaseAPIView):
# Delete the member from workspace # Delete the member from workspace
workspace_member.delete() workspace_member.delete()
return Response(status=status.HTTP_204_NO_CONTENT) return Response(status=status.HTTP_204_NO_CONTENT)
except WorkspaceMember.DoesNotExist:
return Response(
{"error": "Workspace member does not exists"},
status=status.HTTP_400_BAD_REQUEST,
)
except Exception as e:
capture_exception(e)
return Response(
{"error": "Something went wrong please try again later"},
status=status.HTTP_400_BAD_REQUEST,
)

View File

@ -20,8 +20,8 @@ from plane.utils.issue_filters import issue_filters
row_mapping = { row_mapping = {
"state__name": "State", "state__name": "State",
"state__group": "State Group", "state__group": "State Group",
"labels__name": "Label", "labels__id": "Label",
"assignees__display_name": "Assignee Name", "assignees__id": "Assignee Name",
"start_date": "Start Date", "start_date": "Start Date",
"target_date": "Due Date", "target_date": "Due Date",
"completed_at": "Completed At", "completed_at": "Completed At",
@ -29,8 +29,321 @@ row_mapping = {
"issue_count": "Issue Count", "issue_count": "Issue Count",
"priority": "Priority", "priority": "Priority",
"estimate": "Estimate", "estimate": "Estimate",
"issue_cycle__cycle_id": "Cycle",
"issue_module__module_id": "Module"
} }
ASSIGNEE_ID = "assignees__id"
LABEL_ID = "labels__id"
STATE_ID = "state_id"
CYCLE_ID = "issue_cycle__cycle_id"
MODULE_ID = "issue_module__module_id"
def send_export_email(email, slug, csv_buffer):
"""Helper function to send export email."""
subject = "Your Export is ready"
html_content = render_to_string("emails/exports/analytics.html", {})
text_content = strip_tags(html_content)
csv_buffer.seek(0)
msg = EmailMultiAlternatives(subject, text_content, settings.EMAIL_FROM, [email])
msg.attach(f"{slug}-analytics.csv", csv_buffer.getvalue())
msg.send(fail_silently=False)
def get_assignee_details(slug, filters):
"""Fetch assignee details if required."""
return (
Issue.issue_objects.filter(
workspace__slug=slug, **filters, assignees__avatar__isnull=False
)
.distinct("assignees__id")
.order_by("assignees__id")
.values(
"assignees__avatar",
"assignees__display_name",
"assignees__first_name",
"assignees__last_name",
"assignees__id",
)
)
def get_label_details(slug, filters):
"""Fetch label details if required"""
return (
Issue.objects.filter(workspace__slug=slug, **filters, labels__id__isnull=False)
.distinct("labels__id")
.order_by("labels__id")
.values("labels__id", "labels__color", "labels__name")
)
def get_state_details(slug, filters):
return (
Issue.issue_objects.filter(
workspace__slug=slug,
**filters,
)
.distinct("state_id")
.order_by("state_id")
.values("state_id", "state__name", "state__color")
)
def get_module_details(slug, filters):
return (
Issue.issue_objects.filter(
workspace__slug=slug,
**filters,
issue_module__module_id__isnull=False,
)
.distinct("issue_module__module_id")
.order_by("issue_module__module_id")
.values(
"issue_module__module_id",
"issue_module__module__name",
)
)
def get_cycle_details(slug, filters):
return (
Issue.issue_objects.filter(
workspace__slug=slug,
**filters,
issue_cycle__cycle_id__isnull=False,
)
.distinct("issue_cycle__cycle_id")
.order_by("issue_cycle__cycle_id")
.values(
"issue_cycle__cycle_id",
"issue_cycle__cycle__name",
)
)
def generate_csv_from_rows(rows):
"""Generate CSV buffer from rows."""
csv_buffer = io.StringIO()
writer = csv.writer(csv_buffer, delimiter=",", quoting=csv.QUOTE_ALL)
[writer.writerow(row) for row in rows]
return csv_buffer
def generate_segmented_rows(
distribution,
x_axis,
y_axis,
segment,
key,
assignee_details,
label_details,
state_details,
cycle_details,
module_details,
):
segment_zero = list(
set(
item.get("segment") for sublist in distribution.values() for item in sublist
)
)
segmented = segment
row_zero = [
row_mapping.get(x_axis, "X-Axis"),
row_mapping.get(y_axis, "Y-Axis"),
] + segment_zero
rows = []
for item, data in distribution.items():
generated_row = [
item,
sum(obj.get(key) for obj in data if obj.get(key) is not None),
]
for segment in segment_zero:
value = next((x.get(key) for x in data if x.get("segment") == segment), "0")
generated_row.append(value)
if x_axis == ASSIGNEE_ID:
assignee = next(
(
user
for user in assignee_details
if str(user[ASSIGNEE_ID]) == str(item)
),
None,
)
if assignee:
generated_row[
0
] = f"{assignee['assignees__first_name']} {assignee['assignees__last_name']}"
if x_axis == LABEL_ID:
label = next(
(lab for lab in label_details if str(lab[LABEL_ID]) == str(item)),
None,
)
if label:
generated_row[0] = f"{label['labels__name']}"
if x_axis == STATE_ID:
state = next(
(sta for sta in state_details if str(sta[STATE_ID]) == str(item)),
None,
)
if state:
generated_row[0] = f"{state['state__name']}"
if x_axis == CYCLE_ID:
cycle = next(
(cyc for cyc in cycle_details if str(cyc[CYCLE_ID]) == str(item)),
None,
)
if cycle:
generated_row[0] = f"{cycle['issue_cycle__cycle__name']}"
if x_axis == MODULE_ID:
module = next(
(mod for mod in module_details if str(mod[MODULE_ID]) == str(item)),
None,
)
if module:
generated_row[0] = f"{module['issue_module__module__name']}"
rows.append(tuple(generated_row))
if segmented == ASSIGNEE_ID:
for index, segm in enumerate(row_zero[2:]):
assignee = next(
(
user
for user in assignee_details
if str(user[ASSIGNEE_ID]) == str(segm)
),
None,
)
if assignee:
row_zero[
index + 2
] = f"{assignee['assignees__first_name']} {assignee['assignees__last_name']}"
if segmented == LABEL_ID:
for index, segm in enumerate(row_zero[2:]):
label = next(
(lab for lab in label_details if str(lab[LABEL_ID]) == str(segm)),
None,
)
if label:
row_zero[index + 2] = label["labels__name"]
if segmented == STATE_ID:
for index, segm in enumerate(row_zero[2:]):
state = next(
(sta for sta in state_details if str(sta[STATE_ID]) == str(segm)),
None,
)
if state:
row_zero[index + 2] = state["state__name"]
if segmented == MODULE_ID:
for index, segm in enumerate(row_zero[2:]):
module = next(
(mod for mod in label_details if str(mod[MODULE_ID]) == str(segm)),
None,
)
if module:
row_zero[index + 2] = module["issue_module__module__name"]
if segmented == CYCLE_ID:
for index, segm in enumerate(row_zero[2:]):
cycle = next(
(cyc for cyc in cycle_details if str(cyc[CYCLE_ID]) == str(segm)),
None,
)
if cycle:
row_zero[index + 2] = cycle["issue_cycle__cycle__name"]
return [tuple(row_zero)] + rows
def generate_non_segmented_rows(
distribution,
x_axis,
y_axis,
key,
assignee_details,
label_details,
state_details,
cycle_details,
module_details,
):
rows = []
for item, data in distribution.items():
row = [item, data[0].get("count" if y_axis == "issue_count" else "estimate")]
if x_axis == ASSIGNEE_ID:
assignee = next(
(
user
for user in assignee_details
if str(user[ASSIGNEE_ID]) == str(item)
),
None,
)
if assignee:
row[
0
] = f"{assignee['assignees__first_name']} {assignee['assignees__last_name']}"
if x_axis == LABEL_ID:
label = next(
(lab for lab in label_details if str(lab[LABEL_ID]) == str(item)),
None,
)
if label:
row[0] = f"{label['labels__name']}"
if x_axis == STATE_ID:
state = next(
(sta for sta in state_details if str(sta[STATE_ID]) == str(item)),
None,
)
if state:
row[0] = f"{state['state__name']}"
if x_axis == CYCLE_ID:
cycle = next(
(cyc for cyc in cycle_details if str(cyc[CYCLE_ID]) == str(item)),
None,
)
if cycle:
row[0] = f"{cycle['issue_cycle__cycle__name']}"
if x_axis == MODULE_ID:
module = next(
(mod for mod in module_details if str(mod[MODULE_ID]) == str(item)),
None,
)
if module:
row[0] = f"{module['issue_module__module__name']}"
rows.append(tuple(row))
row_zero = [row_mapping.get(x_axis, "X-Axis"), row_mapping.get(y_axis, "Y-Axis")]
return [tuple(row_zero)] + rows
@shared_task @shared_task
def analytic_export_task(email, data, slug): def analytic_export_task(email, data, slug):
@ -43,134 +356,70 @@ def analytic_export_task(email, data, slug):
segment = data.get("segment", False) segment = data.get("segment", False)
distribution = build_graph_plot( distribution = build_graph_plot(
queryset=queryset, x_axis=x_axis, y_axis=y_axis, segment=segment queryset, x_axis=x_axis, y_axis=y_axis, segment=segment
) )
key = "count" if y_axis == "issue_count" else "estimate" key = "count" if y_axis == "issue_count" else "estimate"
segmented = segment
assignee_details = {}
if x_axis in ["assignees__id"] or segment in ["assignees__id"]:
assignee_details = ( assignee_details = (
Issue.issue_objects.filter(workspace__slug=slug, **filters, assignees__avatar__isnull=False) get_assignee_details(slug, filters)
.order_by("assignees__id") if x_axis == ASSIGNEE_ID or segment == ASSIGNEE_ID
.distinct("assignees__id") else {}
.values("assignees__avatar", "assignees__display_name", "assignees__first_name", "assignees__last_name", "assignees__id") )
label_details = (
get_label_details(slug, filters)
if x_axis == LABEL_ID or segment == LABEL_ID
else {}
)
state_details = (
get_state_details(slug, filters)
if x_axis == STATE_ID or segment == STATE_ID
else {}
)
cycle_details = (
get_cycle_details(slug, filters)
if x_axis == CYCLE_ID or segment == CYCLE_ID
else {}
)
module_details = (
get_module_details(slug, filters)
if x_axis == MODULE_ID or segment == MODULE_ID
else {}
) )
if segment: if segment:
segment_zero = [] rows = generate_segmented_rows(
for item in distribution: distribution,
current_dict = distribution.get(item) x_axis,
for current in current_dict: y_axis,
segment_zero.append(current.get("segment")) segment,
key,
segment_zero = list(set(segment_zero)) assignee_details,
row_zero = ( label_details,
[ state_details,
row_mapping.get(x_axis, "X-Axis"), cycle_details,
] module_details,
+ [
row_mapping.get(y_axis, "Y-Axis"),
]
+ segment_zero
) )
rows = []
for item in distribution:
generated_row = [
item,
]
data = distribution.get(item)
# Add y axis values
generated_row.append(sum(obj.get(key) for obj in data if obj.get(key, None) is not None))
for segment in segment_zero:
value = [x for x in data if x.get("segment") == segment]
if len(value):
generated_row.append(value[0].get(key))
else: else:
generated_row.append("0") rows = generate_non_segmented_rows(
# x-axis replacement for names distribution,
if x_axis in ["assignees__id"]: x_axis,
assignee = [user for user in assignee_details if str(user.get("assignees__id")) == str(item)] y_axis,
if len(assignee): segment,
generated_row[0] = str(assignee[0].get("assignees__first_name")) + " " + str(assignee[0].get("assignees__last_name")) key,
rows.append(tuple(generated_row)) assignee_details,
label_details,
# If segment is ["assignees__display_name"] then replace segment_zero rows with first and last names state_details,
if segmented in ["assignees__id"]: cycle_details,
for index, segm in enumerate(row_zero[2:]): module_details,
# find the name of the user
assignee = [user for user in assignee_details if str(user.get("assignees__id")) == str(segm)]
if len(assignee):
row_zero[index + 2] = str(assignee[0].get("assignees__first_name")) + " " + str(assignee[0].get("assignees__last_name"))
rows = [tuple(row_zero)] + rows
csv_buffer = io.StringIO()
writer = csv.writer(csv_buffer, delimiter=",", quoting=csv.QUOTE_ALL)
# Write CSV data to the buffer
for row in rows:
writer.writerow(row)
subject = "Your Export is ready"
html_content = render_to_string("emails/exports/analytics.html", {})
text_content = strip_tags(html_content)
csv_buffer.seek(0)
msg = EmailMultiAlternatives(
subject, text_content, settings.EMAIL_FROM, [email]
) )
msg.attach(f"{slug}-analytics.csv", csv_buffer.read())
msg.send(fail_silently=False)
else:
row_zero = [
row_mapping.get(x_axis, "X-Axis"),
row_mapping.get(y_axis, "Y-Axis"),
]
rows = []
for item in distribution:
row = [
item,
distribution.get(item)[0].get("count")
if y_axis == "issue_count"
else distribution.get(item)[0].get("estimate "),
]
# x-axis replacement to names
if x_axis in ["assignees__id"]:
assignee = [user for user in assignee_details if str(user.get("assignees__id")) == str(item)]
if len(assignee):
row[0] = str(assignee[0].get("assignees__first_name")) + " " + str(assignee[0].get("assignees__last_name"))
rows.append(tuple(row))
rows = [tuple(row_zero)] + rows
csv_buffer = io.StringIO()
writer = csv.writer(csv_buffer, delimiter=",", quoting=csv.QUOTE_ALL)
# Write CSV data to the buffer
for row in rows:
writer.writerow(row)
subject = "Your Export is ready"
html_content = render_to_string("emails/exports/analytics.html", {})
text_content = strip_tags(html_content)
csv_buffer.seek(0)
msg = EmailMultiAlternatives(
subject, text_content, settings.EMAIL_FROM, [email]
)
msg.attach(f"{slug}-analytics.csv", csv_buffer.read())
msg.send(fail_silently=False)
csv_buffer = generate_csv_from_rows(rows)
send_export_email(email, slug, csv_buffer)
except Exception as e: except Exception as e:
# Print logs if in DEBUG mode
if settings.DEBUG: if settings.DEBUG:
print(e) print(e)
capture_exception(e) capture_exception(e)
return

View File

@ -16,7 +16,7 @@ from plane.db.models import User
def forgot_password(first_name, email, uidb64, token, current_site): def forgot_password(first_name, email, uidb64, token, current_site):
try: try:
realtivelink = f"/reset-password/?uidb64={uidb64}&token={token}" realtivelink = f"/accounts/reset-password/?uidb64={uidb64}&token={token}"
abs_url = current_site + realtivelink abs_url = current_site + realtivelink
from_email_string = settings.EMAIL_FROM from_email_string = settings.EMAIL_FROM

View File

@ -33,13 +33,7 @@ from plane.api.serializers import IssueActivitySerializer
# Track Chnages in name # Track Chnages in name
def track_name( def track_name(
requested_data, requested_data, current_instance, issue_id, project, actor, issue_activities, epoch
current_instance,
issue_id,
project,
actor,
issue_activities,
epoch
): ):
if current_instance.get("name") != requested_data.get("name"): if current_instance.get("name") != requested_data.get("name"):
issue_activities.append( issue_activities.append(
@ -60,13 +54,7 @@ def track_name(
# Track changes in parent issue # Track changes in parent issue
def track_parent( def track_parent(
requested_data, requested_data, current_instance, issue_id, project, actor, issue_activities, epoch
current_instance,
issue_id,
project,
actor,
issue_activities,
epoch
): ):
if current_instance.get("parent") != requested_data.get("parent"): if current_instance.get("parent") != requested_data.get("parent"):
if requested_data.get("parent") == None: if requested_data.get("parent") == None:
@ -112,13 +100,7 @@ def track_parent(
# Track changes in priority # Track changes in priority
def track_priority( def track_priority(
requested_data, requested_data, current_instance, issue_id, project, actor, issue_activities, epoch
current_instance,
issue_id,
project,
actor,
issue_activities,
epoch
): ):
if current_instance.get("priority") != requested_data.get("priority"): if current_instance.get("priority") != requested_data.get("priority"):
issue_activities.append( issue_activities.append(
@ -139,13 +121,7 @@ def track_priority(
# Track chnages in state of the issue # Track chnages in state of the issue
def track_state( def track_state(
requested_data, requested_data, current_instance, issue_id, project, actor, issue_activities, epoch
current_instance,
issue_id,
project,
actor,
issue_activities,
epoch
): ):
if current_instance.get("state") != requested_data.get("state"): if current_instance.get("state") != requested_data.get("state"):
new_state = State.objects.get(pk=requested_data.get("state", None)) new_state = State.objects.get(pk=requested_data.get("state", None))
@ -171,19 +147,21 @@ def track_state(
# Track issue description # Track issue description
def track_description( def track_description(
requested_data, requested_data, current_instance, issue_id, project, actor, issue_activities, epoch
current_instance,
issue_id,
project,
actor,
issue_activities,
epoch
): ):
if current_instance.get("description_html") != requested_data.get( if current_instance.get("description_html") != requested_data.get(
"description_html" "description_html"
): ):
last_activity = IssueActivity.objects.filter(issue_id=issue_id).order_by("-created_at").first() last_activity = (
if(last_activity is not None and last_activity.field == "description" and actor.id == last_activity.actor_id): IssueActivity.objects.filter(issue_id=issue_id)
.order_by("-created_at")
.first()
)
if (
last_activity is not None
and last_activity.field == "description"
and actor.id == last_activity.actor_id
):
last_activity.created_at = timezone.now() last_activity.created_at = timezone.now()
last_activity.save(update_fields=["created_at"]) last_activity.save(update_fields=["created_at"])
else: else:
@ -205,13 +183,7 @@ def track_description(
# Track changes in issue target date # Track changes in issue target date
def track_target_date( def track_target_date(
requested_data, requested_data, current_instance, issue_id, project, actor, issue_activities, epoch
current_instance,
issue_id,
project,
actor,
issue_activities,
epoch
): ):
if current_instance.get("target_date") != requested_data.get("target_date"): if current_instance.get("target_date") != requested_data.get("target_date"):
if requested_data.get("target_date") == None: if requested_data.get("target_date") == None:
@ -248,13 +220,7 @@ def track_target_date(
# Track changes in issue start date # Track changes in issue start date
def track_start_date( def track_start_date(
requested_data, requested_data, current_instance, issue_id, project, actor, issue_activities, epoch
current_instance,
issue_id,
project,
actor,
issue_activities,
epoch
): ):
if current_instance.get("start_date") != requested_data.get("start_date"): if current_instance.get("start_date") != requested_data.get("start_date"):
if requested_data.get("start_date") == None: if requested_data.get("start_date") == None:
@ -291,13 +257,7 @@ def track_start_date(
# Track changes in issue labels # Track changes in issue labels
def track_labels( def track_labels(
requested_data, requested_data, current_instance, issue_id, project, actor, issue_activities, epoch
current_instance,
issue_id,
project,
actor,
issue_activities,
epoch
): ):
# Label Addition # Label Addition
if len(requested_data.get("labels_list")) > len(current_instance.get("labels")): if len(requested_data.get("labels_list")) > len(current_instance.get("labels")):
@ -346,13 +306,7 @@ def track_labels(
# Track changes in issue assignees # Track changes in issue assignees
def track_assignees( def track_assignees(
requested_data, requested_data, current_instance, issue_id, project, actor, issue_activities, epoch
current_instance,
issue_id,
project,
actor,
issue_activities,
epoch
): ):
# Assignee Addition # Assignee Addition
if len(requested_data.get("assignees_list")) > len( if len(requested_data.get("assignees_list")) > len(
@ -547,7 +501,7 @@ def update_issue_activity(
project, project,
actor, actor,
issue_activities, issue_activities,
epoch epoch,
) )
@ -868,7 +822,6 @@ def update_link_activity(
def delete_link_activity( def delete_link_activity(
requested_data, current_instance, issue_id, project, actor, issue_activities, epoch requested_data, current_instance, issue_id, project, actor, issue_activities, epoch
): ):
current_instance = ( current_instance = (
json.loads(current_instance) if current_instance is not None else None json.loads(current_instance) if current_instance is not None else None
) )
@ -929,12 +882,19 @@ def delete_attachment_activity(
) )
) )
def create_issue_reaction_activity( def create_issue_reaction_activity(
requested_data, current_instance, issue_id, project, actor, issue_activities, epoch requested_data, current_instance, issue_id, project, actor, issue_activities, epoch
): ):
requested_data = json.loads(requested_data) if requested_data is not None else None requested_data = json.loads(requested_data) if requested_data is not None else None
if requested_data and requested_data.get("reaction") is not None: if requested_data and requested_data.get("reaction") is not None:
issue_reaction = IssueReaction.objects.filter(reaction=requested_data.get("reaction"), project=project, actor=actor).values_list('id', flat=True).first() issue_reaction = (
IssueReaction.objects.filter(
reaction=requested_data.get("reaction"), project=project, actor=actor
)
.values_list("id", flat=True)
.first()
)
if issue_reaction is not None: if issue_reaction is not None:
issue_activities.append( issue_activities.append(
IssueActivity( IssueActivity(
@ -984,9 +944,19 @@ def create_comment_reaction_activity(
): ):
requested_data = json.loads(requested_data) if requested_data is not None else None requested_data = json.loads(requested_data) if requested_data is not None else None
if requested_data and requested_data.get("reaction") is not None: if requested_data and requested_data.get("reaction") is not None:
comment_reaction_id, comment_id = CommentReaction.objects.filter(reaction=requested_data.get("reaction"), project=project, actor=actor).values_list('id', 'comment__id').first() comment_reaction_id, comment_id = (
comment = IssueComment.objects.get(pk=comment_id,project=project) CommentReaction.objects.filter(
if comment is not None and comment_reaction_id is not None and comment_id is not None: reaction=requested_data.get("reaction"), project=project, actor=actor
)
.values_list("id", "comment__id")
.first()
)
comment = IssueComment.objects.get(pk=comment_id, project=project)
if (
comment is not None
and comment_reaction_id is not None
and comment_id is not None
):
issue_activities.append( issue_activities.append(
IssueActivity( IssueActivity(
issue_id=comment.issue_id, issue_id=comment.issue_id,
@ -1012,7 +982,13 @@ def delete_comment_reaction_activity(
json.loads(current_instance) if current_instance is not None else None json.loads(current_instance) if current_instance is not None else None
) )
if current_instance and current_instance.get("reaction") is not None: if current_instance and current_instance.get("reaction") is not None:
issue_id = IssueComment.objects.filter(pk=current_instance.get("comment_id"), project=project).values_list('issue_id', flat=True).first() issue_id = (
IssueComment.objects.filter(
pk=current_instance.get("comment_id"), project=project
)
.values_list("issue_id", flat=True)
.first()
)
if issue_id is not None: if issue_id is not None:
issue_activities.append( issue_activities.append(
IssueActivity( IssueActivity(
@ -1104,7 +1080,7 @@ def create_issue_relation_activity(
field=relation_type, field=relation_type,
project=project, project=project,
workspace=project.workspace, workspace=project.workspace,
comment=f'added {relation_type} relation', comment=f"added {relation_type} relation",
old_identifier=issue_relation.get("issue"), old_identifier=issue_relation.get("issue"),
) )
) )
@ -1149,7 +1125,7 @@ def delete_issue_relation_activity(
field=relation_type, field=relation_type,
project=project, project=project,
workspace=project.workspace, workspace=project.workspace,
comment=f'deleted {relation_type} relation', comment=f"deleted {relation_type} relation",
old_identifier=current_instance.get("issue"), old_identifier=current_instance.get("issue"),
epoch=epoch, epoch=epoch,
) )
@ -1196,7 +1172,10 @@ def update_draft_issue_activity(
current_instance = ( current_instance = (
json.loads(current_instance) if current_instance is not None else None json.loads(current_instance) if current_instance is not None else None
) )
if requested_data.get("is_draft") is not None and requested_data.get("is_draft") == False: if (
requested_data.get("is_draft") is not None
and requested_data.get("is_draft") == False
):
issue_activities.append( issue_activities.append(
IssueActivity( IssueActivity(
issue_id=issue_id, issue_id=issue_id,
@ -1223,7 +1202,6 @@ def update_draft_issue_activity(
) )
def delete_draft_issue_activity( def delete_draft_issue_activity(
requested_data, current_instance, issue_id, project, actor, issue_activities, epoch requested_data, current_instance, issue_id, project, actor, issue_activities, epoch
): ):
@ -1239,6 +1217,7 @@ def delete_draft_issue_activity(
) )
) )
# Receive message from room group # Receive message from room group
@shared_task @shared_task
def issue_activity( def issue_activity(
@ -1252,6 +1231,7 @@ def issue_activity(
subscriber=True, subscriber=True,
): ):
try: try:
issue_activities = [] issue_activities = []
actor = User.objects.get(pk=actor_id) actor = User.objects.get(pk=actor_id)
@ -1268,6 +1248,9 @@ def issue_activity(
"comment_reaction.activity.deleted", "comment_reaction.activity.deleted",
"issue_vote.activity.created", "issue_vote.activity.created",
"issue_vote.activity.deleted", "issue_vote.activity.deleted",
"issue_draft.activity.created",
"issue_draft.activity.updated",
"issue_draft.activity.deleted",
]: ]:
issue = Issue.objects.filter(pk=issue_id).first() issue = Issue.objects.filter(pk=issue_id).first()
@ -1360,6 +1343,9 @@ def issue_activity(
"comment_reaction.activity.deleted", "comment_reaction.activity.deleted",
"issue_vote.activity.created", "issue_vote.activity.created",
"issue_vote.activity.deleted", "issue_vote.activity.deleted",
"issue_draft.activity.created",
"issue_draft.activity.updated",
"issue_draft.activity.deleted",
]: ]:
# Create Notifications # Create Notifications
bulk_notifications = [] bulk_notifications = []
@ -1389,7 +1375,7 @@ def issue_activity(
): ):
issue_subscribers = issue_subscribers + [issue.created_by_id] issue_subscribers = issue_subscribers + [issue.created_by_id]
for subscriber in issue_subscribers: for subscriber in list(set(issue_subscribers)):
for issue_activity in issue_activities_created: for issue_activity in issue_activities_created:
bulk_notifications.append( bulk_notifications.append(
Notification( Notification(

View File

@ -58,20 +58,23 @@ def archive_old_issues():
# Check if Issues # Check if Issues
if issues: if issues:
# Set the archive time to current time
archive_at = timezone.now()
issues_to_update = [] issues_to_update = []
for issue in issues: for issue in issues:
issue.archived_at = timezone.now() issue.archived_at = archive_at
issues_to_update.append(issue) issues_to_update.append(issue)
# Bulk Update the issues and log the activity # Bulk Update the issues and log the activity
if issues_to_update: if issues_to_update:
updated_issues = Issue.objects.bulk_update( Issue.objects.bulk_update(
issues_to_update, ["archived_at"], batch_size=100 issues_to_update, ["archived_at"], batch_size=100
) )
[ [
issue_activity.delay( issue_activity.delay(
type="issue.activity.updated", type="issue.activity.updated",
requested_data=json.dumps({"archived_at": str(issue.archived_at)}), requested_data=json.dumps({"archived_at": str(archive_at)}),
actor_id=str(project.created_by_id), actor_id=str(project.created_by_id),
issue_id=issue.id, issue_id=issue.id,
project_id=project_id, project_id=project_id,
@ -79,7 +82,7 @@ def archive_old_issues():
subscriber=False, subscriber=False,
epoch=int(timezone.now().timestamp()) epoch=int(timezone.now().timestamp())
) )
for issue in updated_issues for issue in issues_to_update
] ]
return return
except Exception as e: except Exception as e:
@ -139,7 +142,7 @@ def close_old_issues():
# Bulk Update the issues and log the activity # Bulk Update the issues and log the activity
if issues_to_update: if issues_to_update:
updated_issues = Issue.objects.bulk_update(issues_to_update, ["state"], batch_size=100) Issue.objects.bulk_update(issues_to_update, ["state"], batch_size=100)
[ [
issue_activity.delay( issue_activity.delay(
type="issue.activity.updated", type="issue.activity.updated",
@ -151,7 +154,7 @@ def close_old_issues():
subscriber=False, subscriber=False,
epoch=int(timezone.now().timestamp()) epoch=int(timezone.now().timestamp())
) )
for issue in updated_issues for issue in issues_to_update
] ]
return return
except Exception as e: except Exception as e:

View File

@ -33,8 +33,7 @@ def create_issue_relation(apps, schema_editor):
def update_issue_priority_choice(apps, schema_editor): def update_issue_priority_choice(apps, schema_editor):
IssueModel = apps.get_model("db", "Issue") IssueModel = apps.get_model("db", "Issue")
updated_issues = [] updated_issues = []
for obj in IssueModel.objects.all(): for obj in IssueModel.objects.filter(priority=None):
if obj.priority is None:
obj.priority = "none" obj.priority = "none"
updated_issues.append(obj) updated_issues.append(obj)
IssueModel.objects.bulk_update(updated_issues, ["priority"], batch_size=100) IssueModel.objects.bulk_update(updated_issues, ["priority"], batch_size=100)

View File

@ -26,19 +26,19 @@ def workspace_member_props(old_props):
"calendar_date_range": old_props.get("calendarDateRange", ""), "calendar_date_range": old_props.get("calendarDateRange", ""),
}, },
"display_properties": { "display_properties": {
"assignee": old_props.get("properties", {}).get("assignee",None), "assignee": old_props.get("properties", {}).get("assignee", True),
"attachment_count": old_props.get("properties", {}).get("attachment_count", None), "attachment_count": old_props.get("properties", {}).get("attachment_count", True),
"created_on": old_props.get("properties", {}).get("created_on", None), "created_on": old_props.get("properties", {}).get("created_on", True),
"due_date": old_props.get("properties", {}).get("due_date", None), "due_date": old_props.get("properties", {}).get("due_date", True),
"estimate": old_props.get("properties", {}).get("estimate", None), "estimate": old_props.get("properties", {}).get("estimate", True),
"key": old_props.get("properties", {}).get("key", None), "key": old_props.get("properties", {}).get("key", True),
"labels": old_props.get("properties", {}).get("labels", None), "labels": old_props.get("properties", {}).get("labels", True),
"link": old_props.get("properties", {}).get("link", None), "link": old_props.get("properties", {}).get("link", True),
"priority": old_props.get("properties", {}).get("priority", None), "priority": old_props.get("properties", {}).get("priority", True),
"start_date": old_props.get("properties", {}).get("start_date", None), "start_date": old_props.get("properties", {}).get("start_date", True),
"state": old_props.get("properties", {}).get("state", None), "state": old_props.get("properties", {}).get("state", True),
"sub_issue_count": old_props.get("properties", {}).get("sub_issue_count", None), "sub_issue_count": old_props.get("properties", {}).get("sub_issue_count", True),
"updated_on": old_props.get("properties", {}).get("updated_on", None), "updated_on": old_props.get("properties", {}).get("updated_on", True),
}, },
} }
return new_props return new_props

View File

@ -1,24 +0,0 @@
# Generated by Django 4.2.3 on 2023-09-15 06:55
from django.db import migrations
def update_issue_activity(apps, schema_editor):
IssueActivityModel = apps.get_model("db", "IssueActivity")
updated_issue_activity = []
for obj in IssueActivityModel.objects.all():
if obj.field == "blocks":
obj.field = "blocked_by"
updated_issue_activity.append(obj)
IssueActivityModel.objects.bulk_update(updated_issue_activity, ["field"], batch_size=100)
class Migration(migrations.Migration):
dependencies = [
('db', '0044_auto_20230913_0709'),
]
operations = [
migrations.RunPython(update_issue_activity),
]

View File

@ -1,24 +1,43 @@
# Generated by Django 4.2.3 on 2023-09-19 14:21 # Generated by Django 4.2.5 on 2023-09-29 10:14
from django.conf import settings from django.conf import settings
from django.db import migrations, models from django.db import migrations, models
import django.db.models.deletion import django.db.models.deletion
import plane.db.models.workspace
import uuid import uuid
def update_epoch(apps, schema_editor): def update_issue_activity_priority(apps, schema_editor):
IssueActivity = apps.get_model('db', 'IssueActivity') IssueActivity = apps.get_model("db", "IssueActivity")
updated_issue_activity = [] updated_issue_activity = []
for obj in IssueActivity.objects.all(): for obj in IssueActivity.objects.filter(field="priority"):
obj.epoch = int(obj.created_at.timestamp()) # Set the old and new value to none if it is empty for Priority
obj.new_value = obj.new_value or "none"
obj.old_value = obj.old_value or "none"
updated_issue_activity.append(obj) updated_issue_activity.append(obj)
IssueActivity.objects.bulk_update(updated_issue_activity, ["epoch"], batch_size=100) IssueActivity.objects.bulk_update(
updated_issue_activity,
["new_value", "old_value"],
batch_size=2000,
)
def update_issue_activity_blocked(apps, schema_editor):
IssueActivity = apps.get_model("db", "IssueActivity")
updated_issue_activity = []
for obj in IssueActivity.objects.filter(field="blocks"):
# Set the field to blocked_by
obj.field = "blocked_by"
updated_issue_activity.append(obj)
IssueActivity.objects.bulk_update(
updated_issue_activity,
["field"],
batch_size=1000,
)
class Migration(migrations.Migration): class Migration(migrations.Migration):
dependencies = [ dependencies = [
('db', '0045_auto_20230915_0655'), ('db', '0044_auto_20230913_0709'),
] ]
operations = [ operations = [
@ -33,6 +52,7 @@ class Migration(migrations.Migration):
('query', models.JSONField(verbose_name='View Query')), ('query', models.JSONField(verbose_name='View Query')),
('access', models.PositiveSmallIntegerField(choices=[(0, 'Private'), (1, 'Public')], default=1)), ('access', models.PositiveSmallIntegerField(choices=[(0, 'Private'), (1, 'Public')], default=1)),
('query_data', models.JSONField(default=dict)), ('query_data', models.JSONField(default=dict)),
('sort_order', models.FloatField(default=65535)),
('created_by', models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='%(class)s_created_by', to=settings.AUTH_USER_MODEL, verbose_name='Created By')), ('created_by', models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='%(class)s_created_by', to=settings.AUTH_USER_MODEL, verbose_name='Created By')),
('updated_by', models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='%(class)s_updated_by', to=settings.AUTH_USER_MODEL, verbose_name='Last Modified By')), ('updated_by', models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='%(class)s_updated_by', to=settings.AUTH_USER_MODEL, verbose_name='Last Modified By')),
('workspace', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='global_views', to='db.workspace')), ('workspace', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='global_views', to='db.workspace')),
@ -44,10 +64,16 @@ class Migration(migrations.Migration):
'ordering': ('-created_at',), 'ordering': ('-created_at',),
}, },
), ),
migrations.AddField(
model_name='workspacemember',
name='issue_props',
field=models.JSONField(default=plane.db.models.workspace.get_issue_props),
),
migrations.AddField( migrations.AddField(
model_name='issueactivity', model_name='issueactivity',
name='epoch', name='epoch',
field=models.FloatField(null=True), field=models.FloatField(null=True),
), ),
migrations.RunPython(update_epoch), migrations.RunPython(update_issue_activity_priority),
migrations.RunPython(update_issue_activity_blocked),
] ]

View File

@ -1,27 +0,0 @@
# Generated by Django 4.2.3 on 2023-09-21 07:58
from django.db import migrations
def update_priority_history(apps, schema_editor):
IssueActivity = apps.get_model("db", "IssueActivity")
updated_issue_activity = []
for obj in IssueActivity.objects.all():
if obj.field == "priority":
obj.new_value = obj.new_value or "none"
obj.old_value = obj.old_value or "none"
updated_issue_activity.append(obj)
IssueActivity.objects.bulk_update(
updated_issue_activity, ["new_value", "old_value"], batch_size=100
)
class Migration(migrations.Migration):
dependencies = [
("db", "0046_auto_20230919_1421"),
]
operations = [
migrations.RunPython(update_priority_history),
]

View File

@ -17,6 +17,7 @@ class GlobalView(BaseModel):
default=1, choices=((0, "Private"), (1, "Public")) default=1, choices=((0, "Private"), (1, "Public"))
) )
query_data = models.JSONField(default=dict) query_data = models.JSONField(default=dict)
sort_order = models.FloatField(default=65535)
class Meta: class Meta:
verbose_name = "Global View" verbose_name = "Global View"
@ -24,6 +25,16 @@ class GlobalView(BaseModel):
db_table = "global_views" db_table = "global_views"
ordering = ("-created_at",) ordering = ("-created_at",)
def save(self, *args, **kwargs):
if self._state.adding:
largest_sort_order = GlobalView.objects.filter(
workspace=self.workspace
).aggregate(largest=models.Max("sort_order"))["largest"]
if largest_sort_order is not None:
self.sort_order = largest_sort_order + 10000
super(GlobalView, self).save(*args, **kwargs)
def __str__(self): def __str__(self):
"""Return name of the View""" """Return name of the View"""
return f"{self.name} <{self.workspace.name}>" return f"{self.name} <{self.workspace.name}>"

View File

@ -29,7 +29,7 @@ def get_default_props():
}, },
"display_filters": { "display_filters": {
"group_by": None, "group_by": None,
"order_by": '-created_at', "order_by": "-created_at",
"type": None, "type": None,
"sub_issue": True, "sub_issue": True,
"show_empty_groups": True, "show_empty_groups": True,
@ -54,6 +54,15 @@ def get_default_props():
} }
def get_issue_props():
return {
"subscribed": True,
"assigned": True,
"created": True,
"all_issues": True,
}
class Workspace(BaseModel): class Workspace(BaseModel):
name = models.CharField(max_length=80, verbose_name="Workspace Name") name = models.CharField(max_length=80, verbose_name="Workspace Name")
logo = models.URLField(verbose_name="Logo", blank=True, null=True) logo = models.URLField(verbose_name="Logo", blank=True, null=True)
@ -89,6 +98,7 @@ class WorkspaceMember(BaseModel):
company_role = models.TextField(null=True, blank=True) company_role = models.TextField(null=True, blank=True)
view_props = models.JSONField(default=get_default_props) view_props = models.JSONField(default=get_default_props)
default_props = models.JSONField(default=get_default_props) default_props = models.JSONField(default=get_default_props)
issue_props = models.JSONField(default=get_issue_props)
class Meta: class Meta:
unique_together = ["workspace", "member"] unique_together = ["workspace", "member"]

View File

@ -12,6 +12,10 @@ from .common import * # noqa
DEBUG = int(os.environ.get("DEBUG", 1)) == 1 DEBUG = int(os.environ.get("DEBUG", 1)) == 1
ALLOWED_HOSTS = [
"*",
]
EMAIL_BACKEND = "django.core.mail.backends.console.EmailBackend" EMAIL_BACKEND = "django.core.mail.backends.console.EmailBackend"
@ -139,3 +143,5 @@ AWS_S3_PRIVATE_FILE_OVERWRITE = False
AWS_PRIVATE_DEFAULT_ACL = "private" AWS_PRIVATE_DEFAULT_ACL = "private"
PRIVATE_FILE_STORAGE = "storages.backends.s3boto3.S3Boto3Storage" PRIVATE_FILE_STORAGE = "storages.backends.s3boto3.S3Boto3Storage"
# End Storage Settings # End Storage Settings
# Unsplash Access key
UNSPLASH_ACCESS_KEY = os.environ.get("UNSPLASH_ACCESS_KEY")

View File

@ -7,6 +7,7 @@ import dj_database_url
import sentry_sdk import sentry_sdk
from sentry_sdk.integrations.django import DjangoIntegration from sentry_sdk.integrations.django import DjangoIntegration
from sentry_sdk.integrations.redis import RedisIntegration from sentry_sdk.integrations.redis import RedisIntegration
from urllib.parse import urlparse
from .common import * # noqa from .common import * # noqa
@ -135,7 +136,18 @@ CSRF_COOKIE_SECURE = True
REDIS_URL = os.environ.get("REDIS_URL") REDIS_URL = os.environ.get("REDIS_URL")
CACHES = { if DOCKERIZED:
CACHES = {
"default": {
"BACKEND": "django_redis.cache.RedisCache",
"LOCATION": REDIS_URL,
"OPTIONS": {
"CLIENT_CLASS": "django_redis.client.DefaultClient",
},
}
}
else:
CACHES = {
"default": { "default": {
"BACKEND": "django_redis.cache.RedisCache", "BACKEND": "django_redis.cache.RedisCache",
"LOCATION": REDIS_URL, "LOCATION": REDIS_URL,
@ -144,7 +156,7 @@ CACHES = {
"CONNECTION_POOL_KWARGS": {"ssl_cert_reqs": False}, "CONNECTION_POOL_KWARGS": {"ssl_cert_reqs": False},
}, },
} }
} }
WEB_URL = os.environ.get("WEB_URL", "https://app.plane.so") WEB_URL = os.environ.get("WEB_URL", "https://app.plane.so")
@ -167,8 +179,12 @@ broker_url = (
f"{redis_url}?ssl_cert_reqs={ssl.CERT_NONE.name}&ssl_ca_certs={certifi.where()}" f"{redis_url}?ssl_cert_reqs={ssl.CERT_NONE.name}&ssl_ca_certs={certifi.where()}"
) )
CELERY_RESULT_BACKEND = broker_url if DOCKERIZED:
CELERY_BROKER_URL = broker_url CELERY_BROKER_URL = REDIS_URL
CELERY_RESULT_BACKEND = REDIS_URL
else:
CELERY_BROKER_URL = broker_url
CELERY_RESULT_BACKEND = broker_url
GITHUB_ACCESS_TOKEN = os.environ.get("GITHUB_ACCESS_TOKEN", False) GITHUB_ACCESS_TOKEN = os.environ.get("GITHUB_ACCESS_TOKEN", False)
@ -179,3 +195,7 @@ ENABLE_SIGNUP = os.environ.get("ENABLE_SIGNUP", "1") == "1"
SCOUT_MONITOR = os.environ.get("SCOUT_MONITOR", False) SCOUT_MONITOR = os.environ.get("SCOUT_MONITOR", False)
SCOUT_KEY = os.environ.get("SCOUT_KEY", "") SCOUT_KEY = os.environ.get("SCOUT_KEY", "")
SCOUT_NAME = "Plane" SCOUT_NAME = "Plane"
# Unsplash Access key
UNSPLASH_ACCESS_KEY = os.environ.get("UNSPLASH_ACCESS_KEY")

View File

@ -123,3 +123,4 @@ ANALYTICS_BASE_API = False
OPENAI_API_BASE = os.environ.get("OPENAI_API_BASE", "https://api.openai.com/v1") OPENAI_API_BASE = os.environ.get("OPENAI_API_BASE", "https://api.openai.com/v1")
OPENAI_API_KEY = os.environ.get("OPENAI_API_KEY", False) OPENAI_API_KEY = os.environ.get("OPENAI_API_KEY", False)
GPT_ENGINE = os.environ.get("GPT_ENGINE", "gpt-3.5-turbo") GPT_ENGINE = os.environ.get("GPT_ENGINE", "gpt-3.5-turbo")

View File

@ -159,3 +159,7 @@ CELERY_BROKER_URL = broker_url
GITHUB_ACCESS_TOKEN = os.environ.get("GITHUB_ACCESS_TOKEN", False) GITHUB_ACCESS_TOKEN = os.environ.get("GITHUB_ACCESS_TOKEN", False)
ENABLE_SIGNUP = os.environ.get("ENABLE_SIGNUP", "1") == "1" ENABLE_SIGNUP = os.environ.get("ENABLE_SIGNUP", "1") == "1"
# Unsplash Access key
UNSPLASH_ACCESS_KEY = os.environ.get("UNSPLASH_ACCESS_KEY")

View File

@ -12,34 +12,47 @@ from django.db.models.functions import Coalesce, ExtractMonth, ExtractYear, Conc
from plane.db.models import Issue from plane.db.models import Issue
def build_graph_plot(queryset, x_axis, y_axis, segment=None): def annotate_with_monthly_dimension(queryset, field_name):
# Get the year and the months
temp_axis = x_axis year = ExtractYear(field_name)
month = ExtractMonth(field_name)
if x_axis in ["created_at", "start_date", "target_date", "completed_at"]: # Concat the year and month
year = ExtractYear(x_axis)
month = ExtractMonth(x_axis)
dimension = Concat(year, Value("-"), month, output_field=CharField()) dimension = Concat(year, Value("-"), month, output_field=CharField())
queryset = queryset.annotate(dimension=dimension) # Annotate the dimension
x_axis = "dimension" return queryset.annotate(dimension=dimension)
def extract_axis(queryset, x_axis):
# Format the dimension when the axis is in date
if x_axis in ["created_at", "start_date", "target_date", "completed_at"]:
queryset = annotate_with_monthly_dimension(queryset, x_axis)
return queryset, "dimension"
else: else:
queryset = queryset.annotate(dimension=F(x_axis)) return queryset.annotate(dimension=F(x_axis)), "dimension"
x_axis = "dimension"
if x_axis in ["created_at", "start_date", "target_date", "completed_at"]: def sort_data(data, temp_axis):
queryset = queryset.exclude(x_axis__is_null=True) # When the axis is in priority order by
if temp_axis == "priority":
order = ["low", "medium", "high", "urgent", "none"]
return {key: data[key] for key in order if key in data}
else:
return dict(sorted(data.items(), key=lambda x: (x[0] == "none", x[0])))
def build_graph_plot(queryset, x_axis, y_axis, segment=None):
# temp x_axis
temp_axis = x_axis
# Extract the x_axis and queryset
queryset, x_axis = extract_axis(queryset, x_axis)
if x_axis == "dimension":
queryset = queryset.exclude(dimension__isnull=True)
#
if segment in ["created_at", "start_date", "target_date", "completed_at"]: if segment in ["created_at", "start_date", "target_date", "completed_at"]:
year = ExtractYear(segment) queryset = annotate_with_monthly_dimension(queryset, segment)
month = ExtractMonth(segment)
dimension = Concat(year, Value("-"), month, output_field=CharField())
queryset = queryset.annotate(segmented=dimension)
segment = "segmented" segment = "segmented"
queryset = queryset.values(x_axis) queryset = queryset.values(x_axis)
# Group queryset by x_axis field # Issue count
if y_axis == "issue_count": if y_axis == "issue_count":
queryset = queryset.annotate( queryset = queryset.annotate(
is_null=Case( is_null=Case(
@ -49,37 +62,20 @@ def build_graph_plot(queryset, x_axis, y_axis, segment=None):
), ),
dimension_ex=Coalesce("dimension", Value("null")), dimension_ex=Coalesce("dimension", Value("null")),
).values("dimension") ).values("dimension")
if segment: queryset = queryset.annotate(segment=F(segment)) if segment else queryset
queryset = queryset.annotate(segment=F(segment)).values( queryset = queryset.values("dimension", "segment") if segment else queryset.values("dimension")
"dimension", "segment"
)
else:
queryset = queryset.values("dimension")
queryset = queryset.annotate(count=Count("*")).order_by("dimension") queryset = queryset.annotate(count=Count("*")).order_by("dimension")
if y_axis == "estimate": # Estimate
queryset = queryset.annotate(estimate=Sum("estimate_point")).order_by(x_axis)
if segment:
queryset = queryset.annotate(segment=F(segment)).values(
"dimension", "segment", "estimate"
)
else: else:
queryset = queryset.values("dimension", "estimate") queryset = queryset.annotate(estimate=Sum("estimate_point")).order_by(x_axis)
queryset = queryset.annotate(segment=F(segment)) if segment else queryset
queryset = queryset.values("dimension", "segment", "estimate") if segment else queryset.values("dimension", "estimate")
result_values = list(queryset) result_values = list(queryset)
grouped_data = {} grouped_data = {str(key): list(items) for key, items in groupby(result_values, key=lambda x: x[str("dimension")])}
for key, items in groupby(result_values, key=lambda x: x[str("dimension")]):
grouped_data[str(key)] = list(items)
sorted_data = grouped_data
if temp_axis == "priority":
order = ["low", "medium", "high", "urgent", "none"]
sorted_data = {key: grouped_data[key] for key in order if key in grouped_data}
else:
sorted_data = dict(sorted(grouped_data.items(), key=lambda x: (x[0] == "none", x[0])))
return sorted_data
return sort_data(grouped_data, temp_axis)
def burndown_plot(queryset, slug, project_id, cycle_id=None, module_id=None): def burndown_plot(queryset, slug, project_id, cycle_id=None, module_id=None):
# Total Issues in Cycle or Module # Total Issues in Cycle or Module

View File

@ -1,5 +1,62 @@
from django.utils.timezone import make_aware import re
from django.utils.dateparse import parse_datetime from datetime import timedelta
from django.utils import timezone
# The date from pattern
pattern = re.compile(r"\d+_(weeks|months)$")
# Get the 2_weeks, 3_months
def string_date_filter(filter, duration, subsequent, term, date_filter, offset):
now = timezone.now().date()
if term == "months":
if subsequent == "after":
if offset == "fromnow":
filter[f"{date_filter}__gte"] = now + timedelta(days=duration * 30)
else:
filter[f"{date_filter}__gte"] = now - timedelta(days=duration * 30)
else:
if offset == "fromnow":
filter[f"{date_filter}__lte"] = now + timedelta(days=duration * 30)
else:
filter[f"{date_filter}__lte"] = now - timedelta(days=duration * 30)
if term == "weeks":
if subsequent == "after":
if offset == "fromnow":
filter[f"{date_filter}__gte"] = now + timedelta(weeks=duration)
else:
filter[f"{date_filter}__gte"] = now - timedelta(weeks=duration)
else:
if offset == "fromnow":
filter[f"{date_filter}__lte"] = now + timedelta(days=duration)
else:
filter[f"{date_filter}__lte"] = now - timedelta(days=duration)
def date_filter(filter, date_term, queries):
"""
Handle all date filters
"""
for query in queries:
date_query = query.split(";")
if len(date_query) >= 2:
match = pattern.match(date_query[0])
if match:
if len(date_query) == 3:
digit, term = date_query[0].split("_")
string_date_filter(
filter=filter,
duration=int(digit),
subsequent=date_query[1],
term=term,
date_filter="created_at__date",
offset=date_query[2],
)
else:
if "after" in date_query:
filter[f"{date_term}__gte"] = date_query[0]
else:
filter[f"{date_term}__lte"] = date_query[0]
def filter_state(params, filter, method): def filter_state(params, filter, method):
@ -97,20 +154,10 @@ def filter_created_at(params, filter, method):
if method == "GET": if method == "GET":
created_ats = params.get("created_at").split(",") created_ats = params.get("created_at").split(",")
if len(created_ats) and "" not in created_ats: if len(created_ats) and "" not in created_ats:
for query in created_ats: date_filter(filter=filter, date_term="created_at__date", queries=created_ats)
created_at_query = query.split(";")
if len(created_at_query) == 2 and "after" in created_at_query:
filter["created_at__date__gte"] = created_at_query[0]
else:
filter["created_at__date__lte"] = created_at_query[0]
else: else:
if params.get("created_at", None) and len(params.get("created_at")): if params.get("created_at", None) and len(params.get("created_at")):
for query in params.get("created_at"): date_filter(filter=filter, date_term="created_at__date", queries=params.get("created_at", []))
created_at_query = query.split(";")
if len(created_at_query) == 2 and "after" in created_at_query:
filter["created_at__date__gte"] = created_at_query[0]
else:
filter["created_at__date__lte"] = created_at_query[0]
return filter return filter
@ -118,20 +165,10 @@ def filter_updated_at(params, filter, method):
if method == "GET": if method == "GET":
updated_ats = params.get("updated_at").split(",") updated_ats = params.get("updated_at").split(",")
if len(updated_ats) and "" not in updated_ats: if len(updated_ats) and "" not in updated_ats:
for query in updated_ats: date_filter(filter=filter, date_term="created_at__date", queries=updated_ats)
updated_at_query = query.split(";")
if len(updated_at_query) == 2 and "after" in updated_at_query:
filter["updated_at__date__gte"] = updated_at_query[0]
else:
filter["updated_at__date__lte"] = updated_at_query[0]
else: else:
if params.get("updated_at", None) and len(params.get("updated_at")): if params.get("updated_at", None) and len(params.get("updated_at")):
for query in params.get("updated_at"): date_filter(filter=filter, date_term="created_at__date", queries=params.get("updated_at", []))
updated_at_query = query.split(";")
if len(updated_at_query) == 2 and "after" in updated_at_query:
filter["updated_at__date__gte"] = updated_at_query[0]
else:
filter["updated_at__date__lte"] = updated_at_query[0]
return filter return filter
@ -139,20 +176,10 @@ def filter_start_date(params, filter, method):
if method == "GET": if method == "GET":
start_dates = params.get("start_date").split(",") start_dates = params.get("start_date").split(",")
if len(start_dates) and "" not in start_dates: if len(start_dates) and "" not in start_dates:
for query in start_dates: date_filter(filter=filter, date_term="start_date", queries=start_dates)
start_date_query = query.split(";")
if len(start_date_query) == 2 and "after" in start_date_query:
filter["start_date__gte"] = start_date_query[0]
else:
filter["start_date__lte"] = start_date_query[0]
else: else:
if params.get("start_date", None) and len(params.get("start_date")): if params.get("start_date", None) and len(params.get("start_date")):
for query in params.get("start_date"): date_filter(filter=filter, date_term="start_date", queries=params.get("start_date", []))
start_date_query = query.split(";")
if len(start_date_query) == 2 and "after" in start_date_query:
filter["start_date__gte"] = start_date_query[0]
else:
filter["start_date__lte"] = start_date_query[0]
return filter return filter
@ -160,21 +187,10 @@ def filter_target_date(params, filter, method):
if method == "GET": if method == "GET":
target_dates = params.get("target_date").split(",") target_dates = params.get("target_date").split(",")
if len(target_dates) and "" not in target_dates: if len(target_dates) and "" not in target_dates:
for query in target_dates: date_filter(filter=filter, date_term="target_date", queries=target_dates)
target_date_query = query.split(";")
if len(target_date_query) == 2 and "after" in target_date_query:
filter["target_date__gt"] = target_date_query[0]
else:
filter["target_date__lt"] = target_date_query[0]
else: else:
if params.get("target_date", None) and len(params.get("target_date")): if params.get("target_date", None) and len(params.get("target_date")):
for query in params.get("target_date"): date_filter(filter=filter, date_term="target_date", queries=params.get("target_date", []))
target_date_query = query.split(";")
if len(target_date_query) == 2 and "after" in target_date_query:
filter["target_date__gt"] = target_date_query[0]
else:
filter["target_date__lt"] = target_date_query[0]
return filter return filter
@ -182,20 +198,10 @@ def filter_completed_at(params, filter, method):
if method == "GET": if method == "GET":
completed_ats = params.get("completed_at").split(",") completed_ats = params.get("completed_at").split(",")
if len(completed_ats) and "" not in completed_ats: if len(completed_ats) and "" not in completed_ats:
for query in completed_ats: date_filter(filter=filter, date_term="completed_at__date", queries=completed_ats)
completed_at_query = query.split(";")
if len(completed_at_query) == 2 and "after" in completed_at_query:
filter["completed_at__date__gte"] = completed_at_query[0]
else:
filter["completed_at__lte"] = completed_at_query[0]
else: else:
if params.get("completed_at", None) and len(params.get("completed_at")): if params.get("completed_at", None) and len(params.get("completed_at")):
for query in params.get("completed_at"): date_filter(filter=filter, date_term="completed_at__date", queries=params.get("completed_at", []))
completed_at_query = query.split(";")
if len(completed_at_query) == 2 and "after" in completed_at_query:
filter["completed_at__date__gte"] = completed_at_query[0]
else:
filter["completed_at__lte"] = completed_at_query[0]
return filter return filter

View File

@ -1 +1 @@
python-3.11.5 python-3.11.6

View File

@ -1,4 +0,0 @@
# Deploy the Plane image
FROM makeplane/plane
LABEL maintainer="engineering@plane.so"

View File

@ -0,0 +1,168 @@
version: "3.8"
x-app-env : &app-env
environment:
- NGINX_PORT=${NGINX_PORT:-84}
- DEBUG=${DEBUG:-0}
- DJANGO_SETTINGS_MODULE=${DJANGO_SETTINGS_MODULE:-plane.settings.selfhosted}
- NEXT_PUBLIC_ENABLE_OAUTH=${NEXT_PUBLIC_ENABLE_OAUTH:-0}
- NEXT_PUBLIC_DEPLOY_URL=${NEXT_PUBLIC_DEPLOY_URL:-http://localhost/spaces}
- SENTRY_DSN=${SENTRY_DSN:-""}
- GITHUB_CLIENT_SECRET=${GITHUB_CLIENT_SECRET:-""}
- DOCKERIZED=${DOCKERIZED:-1}
#DB SETTINGS
- PGHOST=${PGHOST:-plane-db}
- PGDATABASE=${PGDATABASE:-plane}
- POSTGRES_USER=${POSTGRES_USER:-plane}
- POSTGRES_PASSWORD=${POSTGRES_PASSWORD:-plane}
- POSTGRES_DB=${POSTGRES_DB:-plane}
- PGDATA=${PGDATA:-/var/lib/postgresql/data}
- DATABASE_URL=${DATABASE_URL:-postgresql://${POSTGRES_USER}:${POSTGRES_PASSWORD}@${PGHOST}/${PGDATABASE}}
# REDIS SETTINGS
- REDIS_HOST=${REDIS_HOST:-plane-redis}
- REDIS_PORT=${REDIS_PORT:-6379}
- REDIS_URL=${REDIS_URL:-redis://${REDIS_HOST}:6379/}
# EMAIL SETTINGS
- EMAIL_HOST=${EMAIL_HOST:-""}
- EMAIL_HOST_USER=${EMAIL_HOST_USER:-""}
- EMAIL_HOST_PASSWORD=${EMAIL_HOST_PASSWORD:-""}
- EMAIL_PORT=${EMAIL_PORT:-587}
- EMAIL_FROM=${EMAIL_FROM:-"Team Plane &lt;team@mailer.plane.so&gt;"}
- EMAIL_USE_TLS=${EMAIL_USE_TLS:-1}
- EMAIL_USE_SSL=${EMAIL_USE_SSL:-0}
- DEFAULT_EMAIL=${DEFAULT_EMAIL:-captain@plane.so}
- DEFAULT_PASSWORD=${DEFAULT_PASSWORD:-password123}
# OPENAI SETTINGS
- OPENAI_API_BASE=${OPENAI_API_BASE:-https://api.openai.com/v1}
- OPENAI_API_KEY=${OPENAI_API_KEY:-"sk-"}
- GPT_ENGINE=${GPT_ENGINE:-"gpt-3.5-turbo"}
# LOGIN/SIGNUP SETTINGS
- ENABLE_SIGNUP=${ENABLE_SIGNUP:-1}
- ENABLE_EMAIL_PASSWORD=${ENABLE_EMAIL_PASSWORD:-1}
- ENABLE_MAGIC_LINK_LOGIN=${ENABLE_MAGIC_LINK_LOGIN:-0}
- SECRET_KEY=${SECRET_KEY:-60gp0byfz2dvffa45cxl20p1scy9xbpf6d8c5y0geejgkyp1b5}
# DATA STORE SETTINGS
- USE_MINIO=${USE_MINIO:-1}
- AWS_REGION=${AWS_REGION:-""}
- AWS_ACCESS_KEY_ID=${AWS_ACCESS_KEY_ID:-"access-key"}
- AWS_SECRET_ACCESS_KEY=${AWS_SECRET_ACCESS_KEY:-"secret-key"}
- AWS_S3_ENDPOINT_URL=${AWS_S3_ENDPOINT_URL:-http://plane-minio:9000}
- AWS_S3_BUCKET_NAME=${AWS_S3_BUCKET_NAME:-uploads}
- MINIO_ROOT_USER=${MINIO_ROOT_USER:-"access-key"}
- MINIO_ROOT_PASSWORD=${MINIO_ROOT_PASSWORD:-"secret-key"}
- BUCKET_NAME=${BUCKET_NAME:-uploads}
- FILE_SIZE_LIMIT=${FILE_SIZE_LIMIT:-5242880}
services:
web:
<<: *app-env
platform: linux/amd64
image: makeplane/plane-frontend:${APP_RELEASE:-latest}
restart: unless-stopped
command: /usr/local/bin/start.sh web/server.js web
deploy:
replicas: ${WEB_REPLICAS:-1}
depends_on:
- api
- worker
space:
<<: *app-env
platform: linux/amd64
image: makeplane/plane-space:${APP_RELEASE:-latest}
restart: unless-stopped
command: /usr/local/bin/start.sh space/server.js space
deploy:
replicas: ${SPACE_REPLICAS:-1}
depends_on:
- api
- worker
- web
api:
<<: *app-env
platform: linux/amd64
image: makeplane/plane-backend:${APP_RELEASE:-latest}
restart: unless-stopped
command: ./bin/takeoff
deploy:
replicas: ${API_REPLICAS:-1}
depends_on:
- plane-db
- plane-redis
worker:
<<: *app-env
container_name: bgworker
platform: linux/amd64
image: makeplane/plane-backend:${APP_RELEASE:-latest}
restart: unless-stopped
command: ./bin/worker
depends_on:
- api
- plane-db
- plane-redis
beat-worker:
<<: *app-env
container_name: beatworker
platform: linux/amd64
image: makeplane/plane-backend:${APP_RELEASE:-latest}
restart: unless-stopped
command: ./bin/beat
depends_on:
- api
- plane-db
- plane-redis
plane-db:
<<: *app-env
container_name: plane-db
image: postgres:15.2-alpine
restart: unless-stopped
command: postgres -c 'max_connections=1000'
volumes:
- pgdata:/var/lib/postgresql/data
plane-redis:
<<: *app-env
container_name: plane-redis
image: redis:6.2.7-alpine
restart: unless-stopped
volumes:
- redisdata:/data
plane-minio:
<<: *app-env
container_name: plane-minio
image: minio/minio
restart: unless-stopped
command: server /export --console-address ":9090"
volumes:
- uploads:/export
createbuckets:
<<: *app-env
image: minio/mc
entrypoint: >
/bin/sh -c " /usr/bin/mc config host add plane-minio http://plane-minio:9000 \$AWS_ACCESS_KEY_ID \$AWS_SECRET_ACCESS_KEY; /usr/bin/mc mb plane-minio/\$AWS_S3_BUCKET_NAME; /usr/bin/mc anonymous set download plane-minio/\$AWS_S3_BUCKET_NAME; exit 0; "
depends_on:
- plane-minio
# Comment this if you already have a reverse proxy running
proxy:
<<: *app-env
container_name: proxy
platform: linux/amd64
image: makeplane/plane-proxy:${APP_RELEASE:-latest}
ports:
- ${NGINX_PORT}:80
depends_on:
- web
- api
- space
volumes:
pgdata:
redisdata:
uploads:

111
deploy/selfhost/install.sh Executable file
View File

@ -0,0 +1,111 @@
#!/bin/bash
BRANCH=${BRANCH:-master}
SCRIPT_DIR=$PWD
PLANE_INSTALL_DIR=$PWD/plane-app
mkdir -p $PLANE_INSTALL_DIR/archive
function install(){
echo
echo "Installing on $PLANE_INSTALL_DIR"
download
}
function download(){
cd $SCRIPT_DIR
TS=$(date +%s)
if [ -f "$PLANE_INSTALL_DIR/docker-compose.yaml" ]
then
mv $PLANE_INSTALL_DIR/docker-compose.yaml $PLANE_INSTALL_DIR/archive/$TS.docker-compose.yaml
fi
curl -H 'Cache-Control: no-cache, no-store' -s -o $PLANE_INSTALL_DIR/docker-compose.yaml https://raw.githubusercontent.com/makeplane/plane/$BRANCH/deploy/selfhost/docker-compose.yml?$(date +%s)
curl -H 'Cache-Control: no-cache, no-store' -s -o $PLANE_INSTALL_DIR/variables-upgrade.env https://raw.githubusercontent.com/makeplane/plane/$BRANCH/deploy/selfhost/variables.env?$(date +%s)
if [ -f "$PLANE_INSTALL_DIR/.env" ];
then
cp $PLANE_INSTALL_DIR/.env $PLANE_INSTALL_DIR/archive/$TS.env
else
mv $PLANE_INSTALL_DIR/variables-upgrade.env $PLANE_INSTALL_DIR/.env
fi
echo ""
echo "Latest version is now available for you to use"
echo ""
echo "In case of Upgrade, your new setting file is availabe as 'variables-upgrade.env'. Please compare and set the required values in '.env 'file."
echo ""
}
function startServices(){
cd $PLANE_INSTALL_DIR
docker compose up -d
cd $SCRIPT_DIR
}
function stopServices(){
cd $PLANE_INSTALL_DIR
docker compose down
cd $SCRIPT_DIR
}
function restartServices(){
cd $PLANE_INSTALL_DIR
docker compose restart
cd $SCRIPT_DIR
}
function upgrade(){
echo "***** STOPPING SERVICES ****"
stopServices
echo
echo "***** DOWNLOADING LATEST VERSION ****"
download
echo "***** PLEASE VALIDATE AND START SERVICES ****"
}
function askForAction(){
echo
echo "Select a Action you want to perform:"
echo " 1) Install"
echo " 2) Start"
echo " 3) Stop"
echo " 4) Restart"
echo " 5) Upgrade"
echo " 6) Exit"
echo
read -p "Action [2]: " ACTION
until [[ -z "$ACTION" || "$ACTION" =~ ^[1-6]$ ]]; do
echo "$ACTION: invalid selection."
read -p "Action [2]: " ACTION
done
echo
if [ "$ACTION" == "1" ]
then
install
askForAction
elif [ "$ACTION" == "2" ] || [ "$ACTION" == "" ]
then
startServices
askForAction
elif [ "$ACTION" == "3" ]
then
stopServices
askForAction
elif [ "$ACTION" == "4" ]
then
restartServices
askForAction
elif [ "$ACTION" == "5" ]
then
upgrade
askForAction
elif [ "$ACTION" == "6" ]
then
exit 0
else
echo "INVALID ACTION SUPPLIED"
fi
}
askForAction

View File

@ -0,0 +1,63 @@
APP_RELEASE=latest
WEB_REPLICAS=1
SPACE_REPLICAS=1
API_REPLICAS=1
NGINX_PORT=80
DEBUG=0
DJANGO_SETTINGS_MODULE=plane.settings.selfhosted
NEXT_PUBLIC_ENABLE_OAUTH=0
NEXT_PUBLIC_DEPLOY_URL=http://localhost/spaces
SENTRY_DSN=""
GITHUB_CLIENT_SECRET=""
DOCKERIZED=1
#DB SETTINGS
PGHOST=plane-db
PGDATABASE=plane
POSTGRES_USER=plane
POSTGRES_PASSWORD=plane
POSTGRES_DB=plane
PGDATA=/var/lib/postgresql/data
DATABASE_URL=postgresql://${POSTGRES_USER}:${POSTGRES_PASSWORD}@${PGHOST}/${PGDATABASE}
# REDIS SETTINGS
REDIS_HOST=plane-redis
REDIS_PORT=6379
REDIS_URL=redis://${REDIS_HOST}:6379/
# EMAIL SETTINGS
EMAIL_HOST=""
EMAIL_HOST_USER=""
EMAIL_HOST_PASSWORD=""
EMAIL_PORT=587
EMAIL_FROM="Team Plane &lt;team@mailer.plane.so&gt;"
EMAIL_USE_TLS=1
EMAIL_USE_SSL=0
DEFAULT_EMAIL=captain@plane.so
DEFAULT_PASSWORD=password123
# OPENAI SETTINGS
OPENAI_API_BASE=https://api.openai.com/v1
OPENAI_API_KEY="sk-"
GPT_ENGINE="gpt-3.5-turbo"
# LOGIN/SIGNUP SETTINGS
ENABLE_SIGNUP=1
ENABLE_EMAIL_PASSWORD=1
ENABLE_MAGIC_LINK_LOGIN=0
SECRET_KEY=60gp0byfz2dvffa45cxl20p1scy9xbpf6d8c5y0geejgkyp1b5
# DATA STORE SETTINGS
USE_MINIO=1
AWS_REGION=""
AWS_ACCESS_KEY_ID="access-key"
AWS_SECRET_ACCESS_KEY="secret-key"
AWS_S3_ENDPOINT_URL=http://plane-minio:9000
AWS_S3_BUCKET_NAME=uploads
MINIO_ROOT_USER="access-key"
MINIO_ROOT_PASSWORD="secret-key"
BUCKET_NAME=uploads
FILE_SIZE_LIMIT=5242880

View File

@ -1,133 +0,0 @@
version: "3.8"
services:
web:
container_name: web
image: makeplane/plane-frontend:latest
restart: always
command: /usr/local/bin/start.sh web/server.js web
env_file:
- ./web/.env
depends_on:
- api
- worker
space:
container_name: space
image: makeplane/plane-space:latest
restart: always
command: /usr/local/bin/start.sh space/server.js space
env_file:
- ./space/.env
depends_on:
- api
- worker
- web
api:
container_name: api
image: makeplane/plane-backend:latest
restart: always
command: ./bin/takeoff
env_file:
- ./apiserver/.env
depends_on:
- plane-db
- plane-redis
worker:
container_name: bgworker
image: makeplane/plane-backend:latest
restart: always
command: ./bin/worker
env_file:
- ./apiserver/.env
depends_on:
- api
- plane-db
- plane-redis
beat-worker:
container_name: beatworker
image: makeplane/plane-backend:latest
restart: always
command: ./bin/beat
env_file:
- ./apiserver/.env
depends_on:
- api
- plane-db
- plane-redis
plane-db:
container_name: plane-db
image: postgres:15.2-alpine
restart: always
command: postgres -c 'max_connections=1000'
volumes:
- pgdata:/var/lib/postgresql/data
env_file:
- .env
environment:
POSTGRES_USER: ${PGUSER}
POSTGRES_DB: ${PGDATABASE}
POSTGRES_PASSWORD: ${PGPASSWORD}
PGDATA: /var/lib/postgresql/data
plane-redis:
container_name: plane-redis
image: redis:6.2.7-alpine
restart: always
volumes:
- redisdata:/data
plane-minio:
container_name: plane-minio
image: minio/minio
restart: always
command: server /export --console-address ":9090"
volumes:
- uploads:/export
env_file:
- .env
environment:
MINIO_ROOT_USER: ${AWS_ACCESS_KEY_ID}
MINIO_ROOT_PASSWORD: ${AWS_SECRET_ACCESS_KEY}
createbuckets:
image: minio/mc
entrypoint: >
/bin/sh -c "
/usr/bin/mc config host add plane-minio http://plane-minio:9000 \$AWS_ACCESS_KEY_ID \$AWS_SECRET_ACCESS_KEY;
/usr/bin/mc mb plane-minio/\$AWS_PUBLIC_STORAGE_BUCKET_NAME;
/usr/bin/mc anonymous set download plane-minio/\$AWS_PUBLIC_STORAGE_BUCKET_NAME;
/usr/bin/mc config host add plane-minio http://plane-minio:9000 \$AWS_ACCESS_KEY_ID \$AWS_SECRET_ACCESS_KEY;
/usr/bin/mc mb plane-minio/\$AWS_PRIVATE_STORAGE_BUCKET_NAME;
/usr/bin/mc anonymous set none plane-minio/\$AWS_PRIVATE_STORAGE_BUCKET_NAME; exit 0;
"
env_file:
- .env
depends_on:
- plane-minio
# Comment this if you already have a reverse proxy running
proxy:
container_name: proxy
image: makeplane/plane-proxy:latest
ports:
- ${NGINX_PORT}:80
env_file:
- .env
environment:
FILE_SIZE_LIMIT: ${FILE_SIZE_LIMIT:-5242880}
BUCKET_NAME: ${AWS_S3_BUCKET_NAME:-uploads}
depends_on:
- web
- api
- space
volumes:
pgdata:
redisdata:
uploads:

178
docker-compose-local.yml Normal file
View File

@ -0,0 +1,178 @@
version: "3.8"
networks:
dev_env:
driver: bridge
volumes:
redisdata:
uploads:
pgdata:
services:
plane-redis:
container_name: plane-redis
image: redis:6.2.7-alpine
restart: unless-stopped
networks:
- dev_env
volumes:
- redisdata:/data
plane-minio:
container_name: plane-minio
image: minio/minio
restart: unless-stopped
networks:
- dev_env
command: server /export --console-address ":9090"
volumes:
- uploads:/export
env_file:
- .env
environment:
MINIO_ROOT_USER: ${AWS_ACCESS_KEY_ID}
MINIO_ROOT_PASSWORD: ${AWS_SECRET_ACCESS_KEY}
createbuckets:
image: minio/mc
networks:
- dev_env
entrypoint: >
/bin/sh -c " /usr/bin/mc config host add plane-minio http://plane-minio:9000 \$AWS_ACCESS_KEY_ID \$AWS_SECRET_ACCESS_KEY; /usr/bin/mc mb plane-minio/\$AWS_S3_BUCKET_NAME; /usr/bin/mc anonymous set download plane-minio/\$AWS_S3_BUCKET_NAME; exit 0; "
env_file:
- .env
depends_on:
- plane-minio
plane-db:
container_name: plane-db
image: postgres:15.2-alpine
restart: unless-stopped
networks:
- dev_env
command: postgres -c 'max_connections=1000'
volumes:
- pgdata:/var/lib/postgresql/data
env_file:
- .env
environment:
POSTGRES_USER: ${PGUSER}
POSTGRES_DB: ${PGDATABASE}
POSTGRES_PASSWORD: ${PGPASSWORD}
PGDATA: /var/lib/postgresql/data
web:
container_name: web
build:
context: .
dockerfile: ./web/Dockerfile.dev
restart: unless-stopped
networks:
- dev_env
volumes:
- .:/app
command: yarn dev --filter=web
env_file:
- ./web/.env
depends_on:
- api
- worker
space:
build:
context: .
dockerfile: ./space/Dockerfile.dev
container_name: space
restart: unless-stopped
networks:
- dev_env
volumes:
- .:/app
command: yarn dev --filter=space
env_file:
- ./space/.env
depends_on:
- api
- worker
- web
api:
container_name: api
build:
context: ./apiserver
dockerfile: Dockerfile.dev
args:
DOCKER_BUILDKIT: 1
restart: unless-stopped
networks:
- dev_env
volumes:
- ./apiserver:/code
command: /bin/sh -c "python manage.py migrate && python manage.py runserver 0.0.0.0:8000 --settings=plane.settings.local"
env_file:
- ./apiserver/.env
depends_on:
- plane-db
- plane-redis
worker:
container_name: bgworker
build:
context: ./apiserver
dockerfile: Dockerfile.dev
args:
DOCKER_BUILDKIT: 1
restart: unless-stopped
networks:
- dev_env
volumes:
- ./apiserver:/code
command: /bin/sh -c "celery -A plane worker -l info"
env_file:
- ./apiserver/.env
depends_on:
- api
- plane-db
- plane-redis
beat-worker:
container_name: beatworker
build:
context: ./apiserver
dockerfile: Dockerfile.dev
args:
DOCKER_BUILDKIT: 1
restart: unless-stopped
networks:
- dev_env
volumes:
- ./apiserver:/code
command: /bin/sh -c "celery -A plane beat -l info"
env_file:
- ./apiserver/.env
depends_on:
- api
- plane-db
- plane-redis
proxy:
container_name: proxy
build:
context: ./nginx
dockerfile: Dockerfile
restart: unless-stopped
networks:
- dev_env
ports:
- ${NGINX_PORT}:80
env_file:
- .env
environment:
FILE_SIZE_LIMIT: ${FILE_SIZE_LIMIT:-5242880}
BUCKET_NAME: ${AWS_S3_BUCKET_NAME:-uploads}
depends_on:
- web
- api
- space

View File

@ -11,6 +11,11 @@ http {
client_max_body_size ${FILE_SIZE_LIMIT}; client_max_body_size ${FILE_SIZE_LIMIT};
add_header X-Content-Type-Options "nosniff" always;
add_header Referrer-Policy "no-referrer-when-downgrade" always;
add_header Permissions-Policy "interest-cohort=()" always;
add_header Strict-Transport-Security "max-age=31536000; includeSubDomains" always;
location / { location / {
proxy_pass http://web:3000/; proxy_pass http://web:3000/;
} }
@ -20,6 +25,7 @@ http {
} }
location /spaces/ { location /spaces/ {
rewrite ^/spaces/?$ /spaces/login break;
proxy_pass http://space:3000/spaces/; proxy_pass http://space:3000/spaces/;
} }

View File

@ -1,11 +1,16 @@
{ {
"repository": "https://github.com/makeplane/plane.git", "repository": "https://github.com/makeplane/plane.git",
"version": "0.13.2",
"license": "AGPL-3.0", "license": "AGPL-3.0",
"private": true, "private": true,
"workspaces": [ "workspaces": [
"web", "web",
"space", "space",
"packages/*" "packages/editor/*",
"packages/eslint-config-custom",
"packages/tailwind-config-custom",
"packages/tsconfig",
"packages/ui"
], ],
"scripts": { "scripts": {
"build": "turbo run build", "build": "turbo run build",
@ -22,7 +27,10 @@
"prettier": "latest", "prettier": "latest",
"prettier-plugin-tailwindcss": "^0.5.4", "prettier-plugin-tailwindcss": "^0.5.4",
"tailwindcss": "^3.3.3", "tailwindcss": "^3.3.3",
"turbo": "latest" "turbo": "^1.10.14"
},
"resolutions": {
"@types/react": "18.2.0"
}, },
"packageManager": "yarn@1.22.19" "packageManager": "yarn@1.22.19"
} }

View File

@ -0,0 +1,112 @@
# @plane/editor-core
## Description
The `@plane/editor-core` package serves as the foundation for our editor system. It provides the base functionality for our other editor packages, but it will not be used directly in any of the projects but only for extending other editors.
## Utilities
We provide a wide range of utilities for extending the core itself.
1. Merging classes and custom styling
2. Adding new extensions
3. Adding custom props
4. Base menu items, and their commands
This allows for extensive customization and flexibility in the Editors created using our `editor-core` package.
### Here's a detailed overview of what's exported
1. useEditor - A hook that you can use to extend the Plane editor.
| Prop | Type | Description |
| --- | --- | --- |
| `extensions` | `Extension[]` | An array of custom extensions you want to add into the editor to extend it's core features |
| `editorProps` | `EditorProps` | Extend the editor props by passing in a custom props object |
| `uploadFile` | `(file: File) => Promise<string>` | A function that handles file upload. It takes a file as input and handles the process of uploading that file. |
| `deleteFile` | `(assetUrlWithWorkspaceId: string) => Promise<any>` | A function that handles deleting an image. It takes the asset url from your bucket and handles the process of deleting that image. |
| `value` | `html string` | The initial content of the editor. |
| `debouncedUpdatesEnabled` | `boolean` | If set to true, the `onChange` event handler is debounced, meaning it will only be invoked after the specified delay (default 1500ms) once the user has stopped typing. |
| `onChange` | `(json: any, html: string) => void` | This function is invoked whenever the content of the editor changes. It is passed the new content in both JSON and HTML formats. |
| `setIsSubmitting` | `(isSubmitting: "submitting" \| "submitted" \| "saved") => void` | This function is called to update the submission status. |
| `setShouldShowAlert` | `(showAlert: boolean) => void` | This function is used to show or hide an alert in case of content not being "saved". |
| `forwardedRef` | `any` | Pass this in whenever you want to control the editor's state from an external component |
2. useReadOnlyEditor - A hook that can be used to extend a Read Only instance of the core editor.
| Prop | Type | Description |
| --- | --- | --- |
| `value` | `string` | The initial content of the editor. |
| `forwardedRef` | `any` | Pass this in whenever you want to control the editor's state from an external component |
| `extensions` | `Extension[]` | An array of custom extensions you want to add into the editor to extend it's core features |
| `editorProps` | `EditorProps` | Extend the editor props by passing in a custom props object |
3. Items and Commands - H1, H2, H3, task list, quote, code block, etc's methods.
4. UI Wrappers
- `EditorContainer` - Wrap your Editor Container with this to apply base classes and styles.
- `EditorContentWrapper` - Use this to get Editor's Content and base menus.
5. Extending with Custom Styles
```ts
const customEditorClassNames = getEditorClassNames({ noBorder, borderOnFocus, customClassName });
```
## Core features
- **Content Trimming**: The Editors content is now automatically trimmed of empty line breaks from the start and end before submitting it to the backend. This ensures cleaner, more consistent data.
- **Value Cleaning**: The Editors value is cleaned at the editor core level, eliminating the need for additional validation before sending from our app. This results in cleaner code and less potential for errors.
- **Turbo Pipeline**: Added a turbo pipeline for both dev and build tasks for projects depending on the editor package.
```json
"web#develop": {
"cache": false,
"persistent": true,
"dependsOn": [
"@plane/lite-text-editor#build",
"@plane/rich-text-editor#build"
]
},
"space#develop": {
"cache": false,
"persistent": true,
"dependsOn": [
"@plane/lite-text-editor#build",
"@plane/rich-text-editor#build"
]
},
"web#build": {
"cache": true,
"dependsOn": [
"@plane/lite-text-editor#build",
"@plane/rich-text-editor#build"
]
},
"space#build": {
"cache": true,
"dependsOn": [
"@plane/lite-text-editor#build",
"@plane/rich-text-editor#build"
]
},
```
## Base extensions included
- BulletList
- OrderedList
- Blockquote
- Code
- Gapcursor
- Link
- Image
- Basic Marks
- Underline
- TextStyle
- Color
- TaskList
- Markdown
- Table

View File

@ -0,0 +1,77 @@
{
"name": "@plane/editor-core",
"version": "0.0.1",
"description": "Core Editor that powers Plane",
"main": "./dist/index.mjs",
"module": "./dist/index.mjs",
"types": "./dist/index.d.mts",
"files": [
"dist/**/*"
],
"exports": {
".": {
"types": "./dist/index.d.mts",
"import": "./dist/index.mjs",
"module": "./dist/index.mjs"
}
},
"scripts": {
"build": "tsup",
"dev": "tsup --watch",
"check-types": "tsc --noEmit"
},
"peerDependencies": {
"react": "^18.2.0",
"react-dom": "18.2.0",
"next": "12.3.2",
"next-themes": "^0.2.1"
},
"dependencies": {
"react-moveable" : "^0.54.2",
"@blueprintjs/popover2": "^2.0.10",
"@tiptap/core": "^2.1.7",
"@tiptap/extension-color": "^2.1.11",
"@tiptap/extension-image": "^2.1.7",
"@tiptap/extension-link": "^2.1.7",
"@tiptap/extension-table": "^2.1.6",
"@tiptap/extension-table-cell": "^2.1.6",
"@tiptap/extension-table-header": "^2.1.6",
"@tiptap/extension-table-row": "^2.1.6",
"@tiptap/extension-task-item": "^2.1.7",
"@tiptap/extension-task-list": "^2.1.7",
"@tiptap/extension-text-style": "^2.1.11",
"@tiptap/extension-underline": "^2.1.7",
"@tiptap/pm": "^2.1.7",
"@tiptap/react": "^2.1.7",
"@tiptap/starter-kit": "^2.1.10",
"@types/react": "^18.2.5",
"@types/react-dom": "18.0.11",
"@types/node": "18.15.3",
"class-variance-authority": "^0.7.0",
"clsx": "^1.2.1",
"eslint": "8.36.0",
"eslint-config-next": "13.2.4",
"eventsource-parser": "^0.1.0",
"lucide-react": "^0.244.0",
"react-markdown": "^8.0.7",
"tailwind-merge": "^1.14.0",
"tippy.js": "^6.3.7",
"tiptap-markdown": "^0.8.2",
"use-debounce": "^9.0.4"
},
"devDependencies": {
"eslint": "^7.32.0",
"postcss": "^8.4.29",
"tailwind-config-custom": "*",
"tsconfig": "*",
"tsup": "^7.2.0",
"typescript": "4.9.5"
},
"keywords": [
"editor",
"rich-text",
"markdown",
"nextjs",
"react"
]
}

View File

@ -0,0 +1,9 @@
// If you want to use other PostCSS plugins, see the following:
// https://tailwindcss.com/docs/using-with-preprocessors
module.exports = {
plugins: {
tailwindcss: {},
autoprefixer: {},
},
};

Some files were not shown because too many files have changed in this diff Show More