Merge branch 'develop' of github.com:makeplane/plane into feat/bulk_issue_operations

This commit is contained in:
NarayanBavisetti 2023-11-03 19:15:15 +05:30
commit e5eeb11899
1277 changed files with 71259 additions and 51583 deletions

17
.deepsource.toml Normal file
View File

@ -0,0 +1,17 @@
version = 1
[[analyzers]]
name = "shell"
[[analyzers]]
name = "javascript"
[analyzers.meta]
plugins = ["react"]
environment = ["nodejs"]
[[analyzers]]
name = "python"
[analyzers.meta]
runtime_version = "3.x.x"

View File

@ -36,15 +36,13 @@ jobs:
- name: Build Plane's Main App
if: steps.changed-files.outputs.web_any_changed == 'true'
run: |
cd web
yarn
yarn build
yarn build --filter=web
- name: Build Plane's Deploy App
if: steps.changed-files.outputs.deploy_any_changed == 'true'
run: |
cd space
yarn
yarn build
yarn build --filter=space

79
.github/workflows/create-sync-pr.yml vendored Normal file
View File

@ -0,0 +1,79 @@
name: Create PR in Plane EE Repository to sync the changes
on:
pull_request:
branches:
- master
types:
- closed
jobs:
create_pr:
# Only run the job when a PR is merged
if: github.event.pull_request.merged == true
runs-on: ubuntu-latest
permissions:
pull-requests: write
contents: read
steps:
- name: Check SOURCE_REPO
id: check_repo
env:
SOURCE_REPO: ${{ secrets.SOURCE_REPO_NAME }}
run: |
echo "::set-output name=is_correct_repo::$(if [[ "$SOURCE_REPO" == "makeplane/plane" ]]; then echo 'true'; else echo 'false'; fi)"
- name: Checkout Code
if: steps.check_repo.outputs.is_correct_repo == 'true'
uses: actions/checkout@v2
with:
persist-credentials: false
fetch-depth: 0
- name: Set up Branch Name
if: steps.check_repo.outputs.is_correct_repo == 'true'
run: |
echo "SOURCE_BRANCH_NAME=${{ github.head_ref }}" >> $GITHUB_ENV
- name: Setup GH CLI
if: steps.check_repo.outputs.is_correct_repo == 'true'
run: |
type -p curl >/dev/null || (sudo apt update && sudo apt install curl -y)
curl -fsSL https://cli.github.com/packages/githubcli-archive-keyring.gpg | sudo dd of=/usr/share/keyrings/githubcli-archive-keyring.gpg
sudo chmod go+r /usr/share/keyrings/githubcli-archive-keyring.gpg
echo "deb [arch=$(dpkg --print-architecture) signed-by=/usr/share/keyrings/githubcli-archive-keyring.gpg] https://cli.github.com/packages stable main" | sudo tee /etc/apt/sources.list.d/github-cli.list > /dev/null
sudo apt update
sudo apt install gh -y
- name: Create Pull Request
if: steps.check_repo.outputs.is_correct_repo == 'true'
env:
GH_TOKEN: ${{ secrets.ACCESS_TOKEN }}
run: |
TARGET_REPO="${{ secrets.TARGET_REPO_NAME }}"
TARGET_BRANCH="${{ secrets.TARGET_REPO_BRANCH }}"
SOURCE_BRANCH="${{ env.SOURCE_BRANCH_NAME }}"
git checkout $SOURCE_BRANCH
git remote add target "https://$GH_TOKEN@github.com/$TARGET_REPO.git"
git push target $SOURCE_BRANCH:$SOURCE_BRANCH
PR_TITLE="${{ github.event.pull_request.title }}"
PR_BODY="${{ github.event.pull_request.body }}"
# Remove double quotes
PR_TITLE_CLEANED="${PR_TITLE//\"/}"
PR_BODY_CLEANED="${PR_BODY//\"/}"
# Construct PR_BODY_CONTENT using a here-document
PR_BODY_CONTENT=$(cat <<EOF
$PR_BODY_CLEANED
EOF
)
gh pr create \
--base $TARGET_BRANCH \
--head $SOURCE_BRANCH \
--title "[SYNC] $PR_TITLE_CLEANED" \
--body "$PR_BODY_CONTENT" \
--repo $TARGET_REPO

View File

@ -39,10 +39,10 @@ jobs:
type=ref,event=tag
- name: Extract metadata (tags, labels) for Docker (Docker Hub) from Github Release
id: metaDeploy
id: metaSpace
uses: docker/metadata-action@v4.3.0
with:
images: ${{ secrets.DOCKERHUB_USERNAME }}/plane-deploy
images: ${{ secrets.DOCKERHUB_USERNAME }}/plane-space
tags: |
type=ref,event=tag
@ -87,7 +87,7 @@ jobs:
file: ./space/Dockerfile.space
platforms: linux/amd64
push: true
tags: ${{ steps.metaDeploy.outputs.tags }}
tags: ${{ steps.metaSpace.outputs.tags }}
env:
DOCKER_BUILDKIT: 1
DOCKER_USERNAME: ${{ secrets.DOCKERHUB_USERNAME }}

6
.gitignore vendored
View File

@ -16,6 +16,8 @@ node_modules
# Production
/build
dist/
out/
# Misc
.DS_Store
@ -73,3 +75,7 @@ pnpm-lock.yaml
pnpm-workspace.yaml
.npmrc
tmp/
## packages
dist

View File

@ -60,7 +60,7 @@ representative at an online or offline event.
Instances of abusive, harassing, or otherwise unacceptable behavior may be
reported to the community leaders responsible for enforcement at
hello@plane.so.
squawk@plane.so.
All complaints will be reviewed and investigated promptly and fairly.
All community leaders are obligated to respect the privacy and security of the

View File

@ -8,8 +8,8 @@ Before submitting a new issue, please search the [issues](https://github.com/mak
While we want to fix all the [issues](https://github.com/makeplane/plane/issues), before fixing a bug we need to be able to reproduce and confirm it. Please provide us with a minimal reproduction scenario using a repository or [Gist](https://gist.github.com/). Having a live, reproducible scenario gives us the information without asking questions back & forth with additional questions like:
- 3rd-party libraries being used and their versions
- a use-case that fails
- 3rd-party libraries being used and their versions
- a use-case that fails
Without said minimal reproduction, we won't be able to investigate all [issues](https://github.com/makeplane/plane/issues), and the issue might not be resolved.
@ -19,10 +19,10 @@ You can open a new issue with this [issue form](https://github.com/makeplane/pla
### Requirements
- Node.js version v16.18.0
- Python version 3.8+
- Postgres version v14
- Redis version v6.2.7
- Node.js version v16.18.0
- Python version 3.8+
- Postgres version v14
- Redis version v6.2.7
### Setup the project
@ -30,6 +30,48 @@ The project is a monorepo, with backend api and frontend in a single repo.
The backend is a django project which is kept inside apiserver
1. Clone the repo
```bash
git clone https://github.com/makeplane/plane
cd plane
chmod +x setup.sh
```
2. Run setup.sh
```bash
./setup.sh
```
3. Define `NEXT_PUBLIC_API_BASE_URL=http://localhost` in **web/.env** and **space/.env** file
```bash
echo "\nNEXT_PUBLIC_API_BASE_URL=http://localhost\n" >> ./web/.env
```
```bash
echo "\nNEXT_PUBLIC_API_BASE_URL=http://localhost\n" >> ./space/.env
```
4. Run Docker compose up
```bash
docker compose up -d
```
5. Install dependencies
```bash
yarn install
```
6. Run the web app in development mode
```bash
yarn dev
```
## Missing a Feature?
If a feature is missing, you can directly _request_ a new one [here](https://github.com/makeplane/plane/issues/new?assignees=&labels=feature&template=feature_request.yml&title=%F0%9F%9A%80+Feature%3A+). You also can do the same by choosing "🚀 Feature" when raising a [New Issue](https://github.com/makeplane/plane/issues/new/choose) on our GitHub Repository.
@ -39,8 +81,8 @@ If you would like to _implement_ it, an issue with your proposal must be submitt
To ensure consistency throughout the source code, please keep these rules in mind as you are working:
- All features or bug fixes must be tested by one or more specs (unit-tests).
- We use [Eslint default rule guide](https://eslint.org/docs/rules/), with minor changes. An automated formatter is available using prettier.
- All features or bug fixes must be tested by one or more specs (unit-tests).
- We use [Eslint default rule guide](https://eslint.org/docs/rules/), with minor changes. An automated formatter is available using prettier.
## Need help? Questions and suggestions
@ -48,11 +90,11 @@ Questions, suggestions, and thoughts are most welcome. We can also be reached in
## Ways to contribute
- Try Plane Cloud and the self hosting platform and give feedback
- Add new integrations
- Help with open [issues](https://github.com/makeplane/plane/issues) or [create your own](https://github.com/makeplane/plane/issues/new/choose)
- Share your thoughts and suggestions with us
- Help create tutorials and blog posts
- Request a feature by submitting a proposal
- Report a bug
- **Improve documentation** - fix incomplete or missing [docs](https://docs.plane.so/), bad wording, examples or explanations.
- Try Plane Cloud and the self hosting platform and give feedback
- Add new integrations
- Help with open [issues](https://github.com/makeplane/plane/issues) or [create your own](https://github.com/makeplane/plane/issues/new/choose)
- Share your thoughts and suggestions with us
- Help create tutorials and blog posts
- Request a feature by submitting a proposal
- Report a bug
- **Improve documentation** - fix incomplete or missing [docs](https://docs.plane.so/), bad wording, examples or explanations.

134
ENV_SETUP.md Normal file
View File

@ -0,0 +1,134 @@
# Environment Variables
Environment variables are distributed in various files. Please refer them carefully.
## {PROJECT_FOLDER}/.env
File is available in the project root folder
```
# Database Settings
PGUSER="plane"
PGPASSWORD="plane"
PGHOST="plane-db"
PGDATABASE="plane"
DATABASE_URL=postgresql://${PGUSER}:${PGPASSWORD}@${PGHOST}/${PGDATABASE}
# Redis Settings
REDIS_HOST="plane-redis"
REDIS_PORT="6379"
REDIS_URL="redis://${REDIS_HOST}:6379/"
# AWS Settings
AWS_REGION=""
AWS_ACCESS_KEY_ID="access-key"
AWS_SECRET_ACCESS_KEY="secret-key"
AWS_S3_ENDPOINT_URL="http://plane-minio:9000"
# Changing this requires change in the nginx.conf for uploads if using minio setup
AWS_S3_BUCKET_NAME="uploads"
# Maximum file upload limit
FILE_SIZE_LIMIT=5242880
# GPT settings
OPENAI_API_BASE="https://api.openai.com/v1" # change if using a custom endpoint
OPENAI_API_KEY="sk-" # add your openai key here
GPT_ENGINE="gpt-3.5-turbo" # use "gpt-4" if you have access
# Settings related to Docker
DOCKERIZED=1
# set to 1 If using the pre-configured minio setup
USE_MINIO=1
# Nginx Configuration
NGINX_PORT=80
```
## {PROJECT_FOLDER}/web/.env.example
```
# Enable/Disable OAUTH - default 0 for selfhosted instance
NEXT_PUBLIC_ENABLE_OAUTH=0
# Public boards deploy URL
NEXT_PUBLIC_DEPLOY_URL="http://localhost/spaces"
```
## {PROJECT_FOLDER}/spaces/.env.example
```
# Flag to toggle OAuth
NEXT_PUBLIC_ENABLE_OAUTH=0
```
## {PROJECT_FOLDER}/apiserver/.env
```
# Backend
# Debug value for api server use it as 0 for production use
DEBUG=0
DJANGO_SETTINGS_MODULE="plane.settings.selfhosted"
# Error logs
SENTRY_DSN=""
# Database Settings
PGUSER="plane"
PGPASSWORD="plane"
PGHOST="plane-db"
PGDATABASE="plane"
DATABASE_URL=postgresql://${PGUSER}:${PGPASSWORD}@${PGHOST}/${PGDATABASE}
# Redis Settings
REDIS_HOST="plane-redis"
REDIS_PORT="6379"
REDIS_URL="redis://${REDIS_HOST}:6379/"
# Email Settings
EMAIL_HOST=""
EMAIL_HOST_USER=""
EMAIL_HOST_PASSWORD=""
EMAIL_PORT=587
EMAIL_FROM="Team Plane <team@mailer.plane.so>"
EMAIL_USE_TLS="1"
EMAIL_USE_SSL="0"
# AWS Settings
AWS_REGION=""
AWS_ACCESS_KEY_ID="access-key"
AWS_SECRET_ACCESS_KEY="secret-key"
AWS_S3_ENDPOINT_URL="http://plane-minio:9000"
# Changing this requires change in the nginx.conf for uploads if using minio setup
AWS_S3_BUCKET_NAME="uploads"
# Maximum file upload limit
FILE_SIZE_LIMIT=5242880
# GPT settings
OPENAI_API_BASE="https://api.openai.com/v1" # change if using a custom endpoint
OPENAI_API_KEY="sk-" # add your openai key here
GPT_ENGINE="gpt-3.5-turbo" # use "gpt-4" if you have access
# Github
GITHUB_CLIENT_SECRET="" # For fetching release notes
# Settings related to Docker
DOCKERIZED=1
# set to 1 If using the pre-configured minio setup
USE_MINIO=1
# Nginx Configuration
NGINX_PORT=80
# Default Creds
DEFAULT_EMAIL="captain@plane.so"
DEFAULT_PASSWORD="password123"
# SignUps
ENABLE_SIGNUP="1"
# Email Redirection URL
WEB_URL="http://localhost"
```
## Updates
- The environment variable NEXT_PUBLIC_API_BASE_URL has been removed from both the web and space projects.
- The naming convention for containers and images has been updated.
- The plane-worker image will no longer be maintained, as it has been merged with plane-backend.
- The Tiptap pro-extension dependency has been removed, eliminating the need for Tiptap API keys.
- The image name for Plane deployment has been changed to plane-space.

View File

@ -39,33 +39,35 @@ Meet [Plane](https://plane.so). An open-source software development tool to mana
The easiest way to get started with Plane is by creating a [Plane Cloud](https://app.plane.so) account. Plane Cloud offers a hosted solution for Plane. If you prefer to self-host Plane, please refer to our [deployment documentation](https://docs.plane.so/self-hosting).
## ⚡️ Quick start with Docker Compose
## ⚡️ Contributors Quick Start
### Docker Compose Setup
### Prerequisite
- Clone the repository
Development system must have docker engine installed and running.
### Steps
Setting up local environment is extremely easy and straight forward. Follow the below step and you will be ready to contribute
1. Clone the code locally using `git clone https://github.com/makeplane/plane.git`
1. Switch to the code folder `cd plane`
1. Create your feature or fix branch you plan to work on using `git checkout -b <feature-branch-name>`
1. Open terminal and run `./setup.sh`
1. Open the code on VSCode or similar equivalent IDE
1. Review the `.env` files available in various folders. Visit [Environment Setup](./ENV_SETUP.md) to know about various environment variables used in system
1. Run the docker command to initiate various services `docker compose -f docker-compose-local.yml up -d`
```bash
git clone https://github.com/makeplane/plane
cd plane
chmod +x setup.sh
./setup.sh
```
- Run setup.sh
You are ready to make changes to the code. Do not forget to refresh the browser (in case id does not auto-reload)
```bash
./setup.sh http://localhost
```
Thats it!
> If running in a cloud env replace localhost with public facing IP address of the VM
## 🍙 Self Hosting
- Run Docker compose up
```bash
docker compose up -d
```
<strong>You can use the default email and password for your first login `captain@plane.so` and `password123`.</strong>
For self hosting environment setup, visit the [Self Hosting](https://docs.plane.so/self-hosting) documentation page
## 🚀 Features

View File

@ -1,7 +1,7 @@
# Backend
# Debug value for api server use it as 0 for production use
DEBUG=0
DJANGO_SETTINGS_MODULE="plane.settings.selfhosted"
DJANGO_SETTINGS_MODULE="plane.settings.production"
# Error logs
SENTRY_DSN=""
@ -59,3 +59,14 @@ DEFAULT_PASSWORD="password123"
# SignUps
ENABLE_SIGNUP="1"
# Enable Email/Password Signup
ENABLE_EMAIL_PASSWORD="1"
# Enable Magic link Login
ENABLE_MAGIC_LINK_LOGIN="0"
# Email redirections and minio domain settings
WEB_URL="http://localhost"

52
apiserver/Dockerfile.dev Normal file
View File

@ -0,0 +1,52 @@
FROM python:3.11.1-alpine3.17 AS backend
# set environment variables
ENV PYTHONDONTWRITEBYTECODE 1
ENV PYTHONUNBUFFERED 1
ENV PIP_DISABLE_PIP_VERSION_CHECK=1
RUN apk --no-cache add \
"bash~=5.2" \
"libpq~=15" \
"libxslt~=1.1" \
"nodejs-current~=19" \
"xmlsec~=1.2" \
"libffi-dev" \
"bash~=5.2" \
"g++~=12.2" \
"gcc~=12.2" \
"cargo~=1.64" \
"git~=2" \
"make~=4.3" \
"postgresql13-dev~=13" \
"libc-dev" \
"linux-headers"
WORKDIR /code
COPY requirements.txt ./requirements.txt
ADD requirements ./requirements
RUN pip install -r requirements.txt --compile --no-cache-dir
RUN addgroup -S plane && \
adduser -S captain -G plane
RUN chown captain.plane /code
USER captain
# Add in Django deps and generate Django's static files
USER root
# RUN chmod +x ./bin/takeoff ./bin/worker ./bin/beat
RUN chmod -R 777 /code
USER captain
# Expose container port and run entry point script
EXPOSE 8000
# CMD [ "./bin/takeoff" ]

View File

@ -1,4 +1,4 @@
import os, sys, random, string
import os, sys
import uuid
sys.path.append("/code")

View File

@ -3,4 +3,4 @@ from psycogreen.gevent import patch_psycopg
def post_fork(server, worker):
patch_psycopg()
worker.log.info("Made Psycopg2 Green")
worker.log.info("Made Psycopg2 Green")

View File

@ -101,4 +101,4 @@ class ProjectLitePermission(BasePermission):
workspace__slug=view.workspace_slug,
member=request.user,
project_id=view.project_id,
).exists()
).exists()

View File

@ -58,8 +58,17 @@ class WorkspaceEntityPermission(BasePermission):
if request.user.is_anonymous:
return False
## Safe Methods -> Handle the filtering logic in queryset
if request.method in SAFE_METHODS:
return WorkspaceMember.objects.filter(
workspace__slug=view.workspace_slug,
member=request.user,
).exists()
return WorkspaceMember.objects.filter(
member=request.user, workspace__slug=view.workspace_slug
member=request.user,
workspace__slug=view.workspace_slug,
role__in=[Owner, Admin],
).exists()

View File

@ -1,5 +1,13 @@
from .base import BaseSerializer
from .user import UserSerializer, UserLiteSerializer, ChangePasswordSerializer, ResetPasswordSerializer, UserAdminLiteSerializer
from .user import (
UserSerializer,
UserLiteSerializer,
ChangePasswordSerializer,
ResetPasswordSerializer,
UserAdminLiteSerializer,
UserMeSerializer,
UserMeSettingsSerializer,
)
from .workspace import (
WorkSpaceSerializer,
WorkSpaceMemberSerializer,
@ -8,9 +16,11 @@ from .workspace import (
WorkspaceLiteSerializer,
WorkspaceThemeSerializer,
WorkspaceMemberAdminSerializer,
WorkspaceMemberMeSerializer,
)
from .project import (
ProjectSerializer,
ProjectListSerializer,
ProjectDetailSerializer,
ProjectMemberSerializer,
ProjectMemberInviteSerializer,
@ -20,11 +30,16 @@ from .project import (
ProjectMemberLiteSerializer,
ProjectDeployBoardSerializer,
ProjectMemberAdminSerializer,
ProjectPublicMemberSerializer
ProjectPublicMemberSerializer,
)
from .state import StateSerializer, StateLiteSerializer
from .view import GlobalViewSerializer, IssueViewSerializer, IssueViewFavoriteSerializer
from .cycle import CycleSerializer, CycleIssueSerializer, CycleFavoriteSerializer, CycleWriteSerializer
from .cycle import (
CycleSerializer,
CycleIssueSerializer,
CycleFavoriteSerializer,
CycleWriteSerializer,
)
from .asset import FileAssetSerializer
from .issue import (
IssueCreateSerializer,

View File

@ -17,7 +17,7 @@ class AnalyticViewSerializer(BaseSerializer):
if bool(query_params):
validated_data["query"] = issue_filters(query_params, "POST")
else:
validated_data["query"] = dict()
validated_data["query"] = {}
return AnalyticView.objects.create(**validated_data)
def update(self, instance, validated_data):
@ -25,6 +25,6 @@ class AnalyticViewSerializer(BaseSerializer):
if bool(query_params):
validated_data["query"] = issue_filters(query_params, "POST")
else:
validated_data["query"] = dict()
validated_data["query"] = {}
validated_data["query"] = issue_filters(query_params, "PATCH")
return super().update(instance, validated_data)

View File

@ -3,3 +3,56 @@ from rest_framework import serializers
class BaseSerializer(serializers.ModelSerializer):
id = serializers.PrimaryKeyRelatedField(read_only=True)
class DynamicBaseSerializer(BaseSerializer):
def __init__(self, *args, **kwargs):
# If 'fields' is provided in the arguments, remove it and store it separately.
# This is done so as not to pass this custom argument up to the superclass.
fields = kwargs.pop("fields", None)
# Call the initialization of the superclass.
super().__init__(*args, **kwargs)
# If 'fields' was provided, filter the fields of the serializer accordingly.
if fields is not None:
self.fields = self._filter_fields(fields)
def _filter_fields(self, fields):
"""
Adjust the serializer's fields based on the provided 'fields' list.
:param fields: List or dictionary specifying which fields to include in the serializer.
:return: The updated fields for the serializer.
"""
# Check each field_name in the provided fields.
for field_name in fields:
# If the field is a dictionary (indicating nested fields),
# loop through its keys and values.
if isinstance(field_name, dict):
for key, value in field_name.items():
# If the value of this nested field is a list,
# perform a recursive filter on it.
if isinstance(value, list):
self._filter_fields(self.fields[key], value)
# Create a list to store allowed fields.
allowed = []
for item in fields:
# If the item is a string, it directly represents a field's name.
if isinstance(item, str):
allowed.append(item)
# If the item is a dictionary, it represents a nested field.
# Add the key of this dictionary to the allowed list.
elif isinstance(item, dict):
allowed.append(list(item.keys())[0])
# Convert the current serializer's fields and the allowed fields to sets.
existing = set(self.fields)
allowed = set(allowed)
# Remove fields from the serializer that aren't in the 'allowed' list.
for field_name in (existing - allowed):
self.fields.pop(field_name)
return self.fields

View File

@ -1,6 +1,3 @@
# Django imports
from django.db.models.functions import TruncDate
# Third party imports
from rest_framework import serializers
@ -12,10 +9,14 @@ from .workspace import WorkspaceLiteSerializer
from .project import ProjectLiteSerializer
from plane.db.models import Cycle, CycleIssue, CycleFavorite
class CycleWriteSerializer(BaseSerializer):
class CycleWriteSerializer(BaseSerializer):
def validate(self, data):
if data.get("start_date", None) is not None and data.get("end_date", None) is not None and data.get("start_date", None) > data.get("end_date", None):
if (
data.get("start_date", None) is not None
and data.get("end_date", None) is not None
and data.get("start_date", None) > data.get("end_date", None)
):
raise serializers.ValidationError("Start date cannot exceed end date")
return data
@ -34,7 +35,6 @@ class CycleSerializer(BaseSerializer):
unstarted_issues = serializers.IntegerField(read_only=True)
backlog_issues = serializers.IntegerField(read_only=True)
assignees = serializers.SerializerMethodField(read_only=True)
labels = serializers.SerializerMethodField(read_only=True)
total_estimates = serializers.IntegerField(read_only=True)
completed_estimates = serializers.IntegerField(read_only=True)
started_estimates = serializers.IntegerField(read_only=True)
@ -42,19 +42,24 @@ class CycleSerializer(BaseSerializer):
project_detail = ProjectLiteSerializer(read_only=True, source="project")
def validate(self, data):
if data.get("start_date", None) is not None and data.get("end_date", None) is not None and data.get("start_date", None) > data.get("end_date", None):
if (
data.get("start_date", None) is not None
and data.get("end_date", None) is not None
and data.get("start_date", None) > data.get("end_date", None)
):
raise serializers.ValidationError("Start date cannot exceed end date")
return data
def get_assignees(self, obj):
members = [
{
"avatar": assignee.avatar,
"first_name": assignee.first_name,
"display_name": assignee.display_name,
"id": assignee.id,
}
for issue_cycle in obj.issue_cycle.all()
for issue_cycle in obj.issue_cycle.prefetch_related(
"issue__assignees"
).all()
for assignee in issue_cycle.issue.assignees.all()
]
# Use a set comprehension to return only the unique objects
@ -64,24 +69,6 @@ class CycleSerializer(BaseSerializer):
unique_list = [dict(item) for item in unique_objects]
return unique_list
def get_labels(self, obj):
labels = [
{
"name": label.name,
"color": label.color,
"id": label.id,
}
for issue_cycle in obj.issue_cycle.all()
for label in issue_cycle.issue.labels.all()
]
# Use a set comprehension to return only the unique objects
unique_objects = {frozenset(item.items()) for item in labels}
# Convert the set back to a list of dictionaries
unique_list = [dict(item) for item in unique_objects]
return unique_list
class Meta:
model = Cycle

View File

@ -6,7 +6,6 @@ from .base import BaseSerializer
from .issue import IssueFlatSerializer, LabelLiteSerializer
from .project import ProjectLiteSerializer
from .state import StateLiteSerializer
from .project import ProjectLiteSerializer
from .user import UserLiteSerializer
from plane.db.models import Inbox, InboxIssue, Issue

View File

@ -5,4 +5,4 @@ from .github import (
GithubIssueSyncSerializer,
GithubCommentSyncSerializer,
)
from .slack import SlackProjectSyncSerializer
from .slack import SlackProjectSyncSerializer

View File

@ -8,8 +8,7 @@ from rest_framework import serializers
from .base import BaseSerializer
from .user import UserLiteSerializer
from .state import StateSerializer, StateLiteSerializer
from .user import UserLiteSerializer
from .project import ProjectSerializer, ProjectLiteSerializer
from .project import ProjectLiteSerializer
from .workspace import WorkspaceLiteSerializer
from plane.db.models import (
User,
@ -75,13 +74,13 @@ class IssueCreateSerializer(BaseSerializer):
project_detail = ProjectLiteSerializer(read_only=True, source="project")
workspace_detail = WorkspaceLiteSerializer(read_only=True, source="workspace")
assignees_list = serializers.ListField(
assignees = serializers.ListField(
child=serializers.PrimaryKeyRelatedField(queryset=User.objects.all()),
write_only=True,
required=False,
)
labels_list = serializers.ListField(
labels = serializers.ListField(
child=serializers.PrimaryKeyRelatedField(queryset=Label.objects.all()),
write_only=True,
required=False,
@ -99,6 +98,12 @@ class IssueCreateSerializer(BaseSerializer):
"updated_at",
]
def to_representation(self, instance):
data = super().to_representation(instance)
data['assignees'] = [str(assignee.id) for assignee in instance.assignees.all()]
data['labels'] = [str(label.id) for label in instance.labels.all()]
return data
def validate(self, data):
if (
data.get("start_date", None) is not None
@ -109,8 +114,8 @@ class IssueCreateSerializer(BaseSerializer):
return data
def create(self, validated_data):
assignees = validated_data.pop("assignees_list", None)
labels = validated_data.pop("labels_list", None)
assignees = validated_data.pop("assignees", None)
labels = validated_data.pop("labels", None)
project_id = self.context["project_id"]
workspace_id = self.context["workspace_id"]
@ -168,8 +173,8 @@ class IssueCreateSerializer(BaseSerializer):
return issue
def update(self, instance, validated_data):
assignees = validated_data.pop("assignees_list", None)
labels = validated_data.pop("labels_list", None)
assignees = validated_data.pop("assignees", None)
labels = validated_data.pop("labels", None)
# Related models
project_id = instance.project_id
@ -226,25 +231,6 @@ class IssueActivitySerializer(BaseSerializer):
fields = "__all__"
class IssueCommentSerializer(BaseSerializer):
actor_detail = UserLiteSerializer(read_only=True, source="actor")
issue_detail = IssueFlatSerializer(read_only=True, source="issue")
project_detail = ProjectLiteSerializer(read_only=True, source="project")
workspace_detail = WorkspaceLiteSerializer(read_only=True, source="workspace")
class Meta:
model = IssueComment
fields = "__all__"
read_only_fields = [
"workspace",
"project",
"issue",
"created_by",
"updated_by",
"created_at",
"updated_at",
]
class IssuePropertySerializer(BaseSerializer):
class Meta:
@ -281,7 +267,6 @@ class LabelLiteSerializer(BaseSerializer):
class IssueLabelSerializer(BaseSerializer):
# label_details = LabelSerializer(read_only=True, source="label")
class Meta:
model = IssueLabel

View File

@ -4,9 +4,8 @@ from rest_framework import serializers
# Module imports
from .base import BaseSerializer
from .user import UserLiteSerializer
from .project import ProjectSerializer, ProjectLiteSerializer
from .project import ProjectLiteSerializer
from .workspace import WorkspaceLiteSerializer
from .issue import IssueStateSerializer
from plane.db.models import (
User,
@ -19,7 +18,7 @@ from plane.db.models import (
class ModuleWriteSerializer(BaseSerializer):
members_list = serializers.ListField(
members = serializers.ListField(
child=serializers.PrimaryKeyRelatedField(queryset=User.objects.all()),
write_only=True,
required=False,
@ -39,6 +38,11 @@ class ModuleWriteSerializer(BaseSerializer):
"created_at",
"updated_at",
]
def to_representation(self, instance):
data = super().to_representation(instance)
data['members'] = [str(member.id) for member in instance.members.all()]
return data
def validate(self, data):
if data.get("start_date", None) is not None and data.get("target_date", None) is not None and data.get("start_date", None) > data.get("target_date", None):
@ -46,7 +50,7 @@ class ModuleWriteSerializer(BaseSerializer):
return data
def create(self, validated_data):
members = validated_data.pop("members_list", None)
members = validated_data.pop("members", None)
project = self.context["project"]
@ -72,7 +76,7 @@ class ModuleWriteSerializer(BaseSerializer):
return module
def update(self, instance, validated_data):
members = validated_data.pop("members_list", None)
members = validated_data.pop("members", None)
if members is not None:
ModuleMember.objects.filter(module=instance).delete()

View File

@ -33,7 +33,7 @@ class PageBlockLiteSerializer(BaseSerializer):
class PageSerializer(BaseSerializer):
is_favorite = serializers.BooleanField(read_only=True)
label_details = LabelLiteSerializer(read_only=True, source="labels", many=True)
labels_list = serializers.ListField(
labels = serializers.ListField(
child=serializers.PrimaryKeyRelatedField(queryset=Label.objects.all()),
write_only=True,
required=False,
@ -50,9 +50,13 @@ class PageSerializer(BaseSerializer):
"project",
"owned_by",
]
def to_representation(self, instance):
data = super().to_representation(instance)
data['labels'] = [str(label.id) for label in instance.labels.all()]
return data
def create(self, validated_data):
labels = validated_data.pop("labels_list", None)
labels = validated_data.pop("labels", None)
project_id = self.context["project_id"]
owned_by_id = self.context["owned_by_id"]
page = Page.objects.create(
@ -77,7 +81,7 @@ class PageSerializer(BaseSerializer):
return page
def update(self, instance, validated_data):
labels = validated_data.pop("labels_list", None)
labels = validated_data.pop("labels", None)
if labels is not None:
PageLabel.objects.filter(page=instance).delete()
PageLabel.objects.bulk_create(

View File

@ -1,11 +1,8 @@
# Django imports
from django.db import IntegrityError
# Third party imports
from rest_framework import serializers
# Module imports
from .base import BaseSerializer
from .base import BaseSerializer, DynamicBaseSerializer
from plane.api.serializers.workspace import WorkSpaceSerializer, WorkspaceLiteSerializer
from plane.api.serializers.user import UserLiteSerializer, UserAdminLiteSerializer
from plane.db.models import (
@ -94,8 +91,33 @@ class ProjectLiteSerializer(BaseSerializer):
read_only_fields = fields
class ProjectListSerializer(DynamicBaseSerializer):
is_favorite = serializers.BooleanField(read_only=True)
total_members = serializers.IntegerField(read_only=True)
total_cycles = serializers.IntegerField(read_only=True)
total_modules = serializers.IntegerField(read_only=True)
is_member = serializers.BooleanField(read_only=True)
sort_order = serializers.FloatField(read_only=True)
member_role = serializers.IntegerField(read_only=True)
is_deployed = serializers.BooleanField(read_only=True)
members = serializers.SerializerMethodField()
def get_members(self, obj):
project_members = ProjectMember.objects.filter(project_id=obj.id).values(
"id",
"member_id",
"member__display_name",
"member__avatar",
)
return project_members
class Meta:
model = Project
fields = "__all__"
class ProjectDetailSerializer(BaseSerializer):
workspace = WorkSpaceSerializer(read_only=True)
# workspace = WorkSpaceSerializer(read_only=True)
default_assignee = UserLiteSerializer(read_only=True)
project_lead = UserLiteSerializer(read_only=True)
is_favorite = serializers.BooleanField(read_only=True)
@ -148,8 +170,6 @@ class ProjectIdentifierSerializer(BaseSerializer):
class ProjectFavoriteSerializer(BaseSerializer):
project_detail = ProjectLiteSerializer(source="project", read_only=True)
class Meta:
model = ProjectFavorite
fields = "__all__"
@ -178,12 +198,12 @@ class ProjectDeployBoardSerializer(BaseSerializer):
fields = "__all__"
read_only_fields = [
"workspace",
"project", "anchor",
"project",
"anchor",
]
class ProjectPublicMemberSerializer(BaseSerializer):
class Meta:
model = ProjectPublicMember
fields = "__all__"

View File

@ -3,7 +3,7 @@ from rest_framework import serializers
# Module import
from .base import BaseSerializer
from plane.db.models import User
from plane.db.models import User, Workspace, WorkspaceMemberInvite
class UserSerializer(BaseSerializer):
@ -33,6 +33,81 @@ class UserSerializer(BaseSerializer):
return bool(obj.first_name) or bool(obj.last_name)
class UserMeSerializer(BaseSerializer):
class Meta:
model = User
fields = [
"id",
"avatar",
"cover_image",
"date_joined",
"display_name",
"email",
"first_name",
"last_name",
"is_active",
"is_bot",
"is_email_verified",
"is_managed",
"is_onboarded",
"is_tour_completed",
"mobile_number",
"role",
"onboarding_step",
"user_timezone",
"username",
"theme",
"last_workspace_id",
]
read_only_fields = fields
class UserMeSettingsSerializer(BaseSerializer):
workspace = serializers.SerializerMethodField()
class Meta:
model = User
fields = [
"id",
"email",
"workspace",
]
read_only_fields = fields
def get_workspace(self, obj):
workspace_invites = WorkspaceMemberInvite.objects.filter(
email=obj.email
).count()
if obj.last_workspace_id is not None:
workspace = Workspace.objects.filter(
pk=obj.last_workspace_id, workspace_member__member=obj.id
).first()
return {
"last_workspace_id": obj.last_workspace_id,
"last_workspace_slug": workspace.slug if workspace is not None else "",
"fallback_workspace_id": obj.last_workspace_id,
"fallback_workspace_slug": workspace.slug if workspace is not None else "",
"invites": workspace_invites,
}
else:
fallback_workspace = (
Workspace.objects.filter(workspace_member__member_id=obj.id)
.order_by("created_at")
.first()
)
return {
"last_workspace_id": None,
"last_workspace_slug": None,
"fallback_workspace_id": fallback_workspace.id
if fallback_workspace is not None
else None,
"fallback_workspace_slug": fallback_workspace.slug
if fallback_workspace is not None
else None,
"invites": workspace_invites,
}
class UserLiteSerializer(BaseSerializer):
class Meta:
model = User
@ -51,7 +126,6 @@ class UserLiteSerializer(BaseSerializer):
class UserAdminLiteSerializer(BaseSerializer):
class Meta:
model = User
fields = [

View File

@ -57,7 +57,7 @@ class IssueViewSerializer(BaseSerializer):
if bool(query_params):
validated_data["query"] = issue_filters(query_params, "POST")
else:
validated_data["query"] = dict()
validated_data["query"] = {}
return IssueView.objects.create(**validated_data)
def update(self, instance, validated_data):
@ -65,7 +65,7 @@ class IssueViewSerializer(BaseSerializer):
if bool(query_params):
validated_data["query"] = issue_filters(query_params, "POST")
else:
validated_data["query"] = dict()
validated_data["query"] = {}
validated_data["query"] = issue_filters(query_params, "PATCH")
return super().update(instance, validated_data)

View File

@ -54,6 +54,13 @@ class WorkSpaceMemberSerializer(BaseSerializer):
fields = "__all__"
class WorkspaceMemberMeSerializer(BaseSerializer):
class Meta:
model = WorkspaceMember
fields = "__all__"
class WorkspaceMemberAdminSerializer(BaseSerializer):
member = UserAdminLiteSerializer(read_only=True)
workspace = WorkspaceLiteSerializer(read_only=True)
@ -103,9 +110,8 @@ class TeamSerializer(BaseSerializer):
]
TeamMember.objects.bulk_create(team_members, batch_size=10)
return team
else:
team = Team.objects.create(**validated_data)
return team
team = Team.objects.create(**validated_data)
return team
def update(self, instance, validated_data):
if "members" in validated_data:
@ -117,8 +123,7 @@ class TeamSerializer(BaseSerializer):
]
TeamMember.objects.bulk_create(team_members, batch_size=10)
return super().update(instance, validated_data)
else:
return super().update(instance, validated_data)
return super().update(instance, validated_data)
class WorkspaceThemeSerializer(BaseSerializer):

View File

@ -0,0 +1,50 @@
from .analytic import urlpatterns as analytic_urls
from .asset import urlpatterns as asset_urls
from .authentication import urlpatterns as authentication_urls
from .configuration import urlpatterns as configuration_urls
from .cycle import urlpatterns as cycle_urls
from .estimate import urlpatterns as estimate_urls
from .gpt import urlpatterns as gpt_urls
from .importer import urlpatterns as importer_urls
from .inbox import urlpatterns as inbox_urls
from .integration import urlpatterns as integration_urls
from .issue import urlpatterns as issue_urls
from .module import urlpatterns as module_urls
from .notification import urlpatterns as notification_urls
from .page import urlpatterns as page_urls
from .project import urlpatterns as project_urls
from .public_board import urlpatterns as public_board_urls
from .release_note import urlpatterns as release_note_urls
from .search import urlpatterns as search_urls
from .state import urlpatterns as state_urls
from .unsplash import urlpatterns as unsplash_urls
from .user import urlpatterns as user_urls
from .views import urlpatterns as view_urls
from .workspace import urlpatterns as workspace_urls
urlpatterns = [
*analytic_urls,
*asset_urls,
*authentication_urls,
*configuration_urls,
*cycle_urls,
*estimate_urls,
*gpt_urls,
*importer_urls,
*inbox_urls,
*integration_urls,
*issue_urls,
*module_urls,
*notification_urls,
*page_urls,
*project_urls,
*public_board_urls,
*release_note_urls,
*search_urls,
*state_urls,
*unsplash_urls,
*user_urls,
*view_urls,
*workspace_urls,
]

View File

@ -0,0 +1,46 @@
from django.urls import path
from plane.api.views import (
AnalyticsEndpoint,
AnalyticViewViewset,
SavedAnalyticEndpoint,
ExportAnalyticsEndpoint,
DefaultAnalyticsEndpoint,
)
urlpatterns = [
path(
"workspaces/<str:slug>/analytics/",
AnalyticsEndpoint.as_view(),
name="plane-analytics",
),
path(
"workspaces/<str:slug>/analytic-view/",
AnalyticViewViewset.as_view({"get": "list", "post": "create"}),
name="analytic-view",
),
path(
"workspaces/<str:slug>/analytic-view/<uuid:pk>/",
AnalyticViewViewset.as_view(
{"get": "retrieve", "patch": "partial_update", "delete": "destroy"}
),
name="analytic-view",
),
path(
"workspaces/<str:slug>/saved-analytic-view/<uuid:analytic_id>/",
SavedAnalyticEndpoint.as_view(),
name="saved-analytic-view",
),
path(
"workspaces/<str:slug>/export-analytics/",
ExportAnalyticsEndpoint.as_view(),
name="export-analytics",
),
path(
"workspaces/<str:slug>/default-analytics/",
DefaultAnalyticsEndpoint.as_view(),
name="default-analytics",
),
]

View File

@ -0,0 +1,31 @@
from django.urls import path
from plane.api.views import (
FileAssetEndpoint,
UserAssetsEndpoint,
)
urlpatterns = [
path(
"workspaces/<str:slug>/file-assets/",
FileAssetEndpoint.as_view(),
name="file-assets",
),
path(
"workspaces/file-assets/<uuid:workspace_id>/<str:asset_key>/",
FileAssetEndpoint.as_view(),
name="file-assets",
),
path(
"users/file-assets/",
UserAssetsEndpoint.as_view(),
name="user-file-assets",
),
path(
"users/file-assets/<str:asset_key>/",
UserAssetsEndpoint.as_view(),
name="user-file-assets",
),
]

View File

@ -0,0 +1,68 @@
from django.urls import path
from rest_framework_simplejwt.views import TokenRefreshView
from plane.api.views import (
# Authentication
SignUpEndpoint,
SignInEndpoint,
SignOutEndpoint,
MagicSignInEndpoint,
MagicSignInGenerateEndpoint,
OauthEndpoint,
## End Authentication
# Auth Extended
ForgotPasswordEndpoint,
VerifyEmailEndpoint,
ResetPasswordEndpoint,
RequestEmailVerificationEndpoint,
ChangePasswordEndpoint,
## End Auth Extender
# API Tokens
ApiTokenEndpoint,
## End API Tokens
)
urlpatterns = [
# Social Auth
path("social-auth/", OauthEndpoint.as_view(), name="oauth"),
# Auth
path("sign-up/", SignUpEndpoint.as_view(), name="sign-up"),
path("sign-in/", SignInEndpoint.as_view(), name="sign-in"),
path("sign-out/", SignOutEndpoint.as_view(), name="sign-out"),
# Magic Sign In/Up
path(
"magic-generate/", MagicSignInGenerateEndpoint.as_view(), name="magic-generate"
),
path("magic-sign-in/", MagicSignInEndpoint.as_view(), name="magic-sign-in"),
path("token/refresh/", TokenRefreshView.as_view(), name="token_refresh"),
# Email verification
path("email-verify/", VerifyEmailEndpoint.as_view(), name="email-verify"),
path(
"request-email-verify/",
RequestEmailVerificationEndpoint.as_view(),
name="request-reset-email",
),
# Password Manipulation
path(
"users/me/change-password/",
ChangePasswordEndpoint.as_view(),
name="change-password",
),
path(
"reset-password/<uidb64>/<token>/",
ResetPasswordEndpoint.as_view(),
name="password-reset",
),
path(
"forgot-password/",
ForgotPasswordEndpoint.as_view(),
name="forgot-password",
),
# API Tokens
path("api-tokens/", ApiTokenEndpoint.as_view(), name="api-tokens"),
path("api-tokens/<uuid:pk>/", ApiTokenEndpoint.as_view(), name="api-tokens"),
## End API Tokens
]

View File

@ -0,0 +1,12 @@
from django.urls import path
from plane.api.views import ConfigurationEndpoint
urlpatterns = [
path(
"configs/",
ConfigurationEndpoint.as_view(),
name="configuration",
),
]

View File

@ -0,0 +1,87 @@
from django.urls import path
from plane.api.views import (
CycleViewSet,
CycleIssueViewSet,
CycleDateCheckEndpoint,
CycleFavoriteViewSet,
TransferCycleIssueEndpoint,
)
urlpatterns = [
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/cycles/",
CycleViewSet.as_view(
{
"get": "list",
"post": "create",
}
),
name="project-cycle",
),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/cycles/<uuid:pk>/",
CycleViewSet.as_view(
{
"get": "retrieve",
"put": "update",
"patch": "partial_update",
"delete": "destroy",
}
),
name="project-cycle",
),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/cycles/<uuid:cycle_id>/cycle-issues/",
CycleIssueViewSet.as_view(
{
"get": "list",
"post": "create",
}
),
name="project-issue-cycle",
),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/cycles/<uuid:cycle_id>/cycle-issues/<uuid:pk>/",
CycleIssueViewSet.as_view(
{
"get": "retrieve",
"put": "update",
"patch": "partial_update",
"delete": "destroy",
}
),
name="project-issue-cycle",
),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/cycles/date-check/",
CycleDateCheckEndpoint.as_view(),
name="project-cycle-date",
),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/user-favorite-cycles/",
CycleFavoriteViewSet.as_view(
{
"get": "list",
"post": "create",
}
),
name="user-favorite-cycle",
),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/user-favorite-cycles/<uuid:cycle_id>/",
CycleFavoriteViewSet.as_view(
{
"delete": "destroy",
}
),
name="user-favorite-cycle",
),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/cycles/<uuid:cycle_id>/transfer-issues/",
TransferCycleIssueEndpoint.as_view(),
name="transfer-issues",
),
]

View File

@ -0,0 +1,37 @@
from django.urls import path
from plane.api.views import (
ProjectEstimatePointEndpoint,
BulkEstimatePointEndpoint,
)
urlpatterns = [
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/project-estimates/",
ProjectEstimatePointEndpoint.as_view(),
name="project-estimate-points",
),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/estimates/",
BulkEstimatePointEndpoint.as_view(
{
"get": "list",
"post": "create",
}
),
name="bulk-create-estimate-points",
),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/estimates/<uuid:estimate_id>/",
BulkEstimatePointEndpoint.as_view(
{
"get": "retrieve",
"patch": "partial_update",
"delete": "destroy",
}
),
name="bulk-create-estimate-points",
),
]

View File

@ -0,0 +1,13 @@
from django.urls import path
from plane.api.views import GPTIntegrationEndpoint
urlpatterns = [
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/ai-assistant/",
GPTIntegrationEndpoint.as_view(),
name="importer",
),
]

View File

@ -0,0 +1,37 @@
from django.urls import path
from plane.api.views import (
ServiceIssueImportSummaryEndpoint,
ImportServiceEndpoint,
UpdateServiceImportStatusEndpoint,
)
urlpatterns = [
path(
"workspaces/<str:slug>/importers/<str:service>/",
ServiceIssueImportSummaryEndpoint.as_view(),
name="importer-summary",
),
path(
"workspaces/<str:slug>/projects/importers/<str:service>/",
ImportServiceEndpoint.as_view(),
name="importer",
),
path(
"workspaces/<str:slug>/importers/",
ImportServiceEndpoint.as_view(),
name="importer",
),
path(
"workspaces/<str:slug>/importers/<str:service>/<uuid:pk>/",
ImportServiceEndpoint.as_view(),
name="importer",
),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/service/<str:service>/importers/<uuid:importer_id>/",
UpdateServiceImportStatusEndpoint.as_view(),
name="importer-status",
),
]

View File

@ -0,0 +1,53 @@
from django.urls import path
from plane.api.views import (
InboxViewSet,
InboxIssueViewSet,
)
urlpatterns = [
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/inboxes/",
InboxViewSet.as_view(
{
"get": "list",
"post": "create",
}
),
name="inbox",
),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/inboxes/<uuid:pk>/",
InboxViewSet.as_view(
{
"get": "retrieve",
"patch": "partial_update",
"delete": "destroy",
}
),
name="inbox",
),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/inboxes/<uuid:inbox_id>/inbox-issues/",
InboxIssueViewSet.as_view(
{
"get": "list",
"post": "create",
}
),
name="inbox-issue",
),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/inboxes/<uuid:inbox_id>/inbox-issues/<uuid:pk>/",
InboxIssueViewSet.as_view(
{
"get": "retrieve",
"patch": "partial_update",
"delete": "destroy",
}
),
name="inbox-issue",
),
]

View File

@ -0,0 +1,150 @@
from django.urls import path
from plane.api.views import (
IntegrationViewSet,
WorkspaceIntegrationViewSet,
GithubRepositoriesEndpoint,
GithubRepositorySyncViewSet,
GithubIssueSyncViewSet,
GithubCommentSyncViewSet,
BulkCreateGithubIssueSyncEndpoint,
SlackProjectSyncViewSet,
)
urlpatterns = [
path(
"integrations/",
IntegrationViewSet.as_view(
{
"get": "list",
"post": "create",
}
),
name="integrations",
),
path(
"integrations/<uuid:pk>/",
IntegrationViewSet.as_view(
{
"get": "retrieve",
"patch": "partial_update",
"delete": "destroy",
}
),
name="integrations",
),
path(
"workspaces/<str:slug>/workspace-integrations/",
WorkspaceIntegrationViewSet.as_view(
{
"get": "list",
}
),
name="workspace-integrations",
),
path(
"workspaces/<str:slug>/workspace-integrations/<str:provider>/",
WorkspaceIntegrationViewSet.as_view(
{
"post": "create",
}
),
name="workspace-integrations",
),
path(
"workspaces/<str:slug>/workspace-integrations/<uuid:pk>/provider/",
WorkspaceIntegrationViewSet.as_view(
{
"get": "retrieve",
"delete": "destroy",
}
),
name="workspace-integrations",
),
# Github Integrations
path(
"workspaces/<str:slug>/workspace-integrations/<uuid:workspace_integration_id>/github-repositories/",
GithubRepositoriesEndpoint.as_view(),
),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/workspace-integrations/<uuid:workspace_integration_id>/github-repository-sync/",
GithubRepositorySyncViewSet.as_view(
{
"get": "list",
"post": "create",
}
),
),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/workspace-integrations/<uuid:workspace_integration_id>/github-repository-sync/<uuid:pk>/",
GithubRepositorySyncViewSet.as_view(
{
"get": "retrieve",
"delete": "destroy",
}
),
),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/github-repository-sync/<uuid:repo_sync_id>/github-issue-sync/",
GithubIssueSyncViewSet.as_view(
{
"post": "create",
"get": "list",
}
),
),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/github-repository-sync/<uuid:repo_sync_id>/bulk-create-github-issue-sync/",
BulkCreateGithubIssueSyncEndpoint.as_view(),
),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/github-repository-sync/<uuid:repo_sync_id>/github-issue-sync/<uuid:pk>/",
GithubIssueSyncViewSet.as_view(
{
"get": "retrieve",
"delete": "destroy",
}
),
),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/github-repository-sync/<uuid:repo_sync_id>/github-issue-sync/<uuid:issue_sync_id>/github-comment-sync/",
GithubCommentSyncViewSet.as_view(
{
"post": "create",
"get": "list",
}
),
),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/github-repository-sync/<uuid:repo_sync_id>/github-issue-sync/<uuid:issue_sync_id>/github-comment-sync/<uuid:pk>/",
GithubCommentSyncViewSet.as_view(
{
"get": "retrieve",
"delete": "destroy",
}
),
),
## End Github Integrations
# Slack Integration
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/workspace-integrations/<uuid:workspace_integration_id>/project-slack-sync/",
SlackProjectSyncViewSet.as_view(
{
"post": "create",
"get": "list",
}
),
),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/workspace-integrations/<uuid:workspace_integration_id>/project-slack-sync/<uuid:pk>/",
SlackProjectSyncViewSet.as_view(
{
"delete": "destroy",
"get": "retrieve",
}
),
),
## End Slack Integration
]

View File

@ -0,0 +1,315 @@
from django.urls import path
from plane.api.views import (
IssueViewSet,
LabelViewSet,
BulkCreateIssueLabelsEndpoint,
BulkDeleteIssuesEndpoint,
BulkImportIssuesEndpoint,
UserWorkSpaceIssues,
SubIssuesEndpoint,
IssueLinkViewSet,
IssueAttachmentEndpoint,
ExportIssuesEndpoint,
IssueActivityEndpoint,
IssueCommentViewSet,
IssueSubscriberViewSet,
IssueReactionViewSet,
CommentReactionViewSet,
IssueUserDisplayPropertyEndpoint,
IssueArchiveViewSet,
IssueRelationViewSet,
IssueDraftViewSet,
)
urlpatterns = [
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/issues/",
IssueViewSet.as_view(
{
"get": "list",
"post": "create",
}
),
name="project-issue",
),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/issues/<uuid:pk>/",
IssueViewSet.as_view(
{
"get": "retrieve",
"put": "update",
"patch": "partial_update",
"delete": "destroy",
}
),
name="project-issue",
),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/issue-labels/",
LabelViewSet.as_view(
{
"get": "list",
"post": "create",
}
),
name="project-issue-labels",
),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/issue-labels/<uuid:pk>/",
LabelViewSet.as_view(
{
"get": "retrieve",
"put": "update",
"patch": "partial_update",
"delete": "destroy",
}
),
name="project-issue-labels",
),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/bulk-create-labels/",
BulkCreateIssueLabelsEndpoint.as_view(),
name="project-bulk-labels",
),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/bulk-delete-issues/",
BulkDeleteIssuesEndpoint.as_view(),
name="project-issues-bulk",
),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/bulk-import-issues/<str:service>/",
BulkImportIssuesEndpoint.as_view(),
name="project-issues-bulk",
),
path(
"workspaces/<str:slug>/my-issues/",
UserWorkSpaceIssues.as_view(),
name="workspace-issues",
),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/issues/<uuid:issue_id>/sub-issues/",
SubIssuesEndpoint.as_view(),
name="sub-issues",
),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/issues/<uuid:issue_id>/issue-links/",
IssueLinkViewSet.as_view(
{
"get": "list",
"post": "create",
}
),
name="project-issue-links",
),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/issues/<uuid:issue_id>/issue-links/<uuid:pk>/",
IssueLinkViewSet.as_view(
{
"get": "retrieve",
"put": "update",
"patch": "partial_update",
"delete": "destroy",
}
),
name="project-issue-links",
),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/issues/<uuid:issue_id>/issue-attachments/",
IssueAttachmentEndpoint.as_view(),
name="project-issue-attachments",
),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/issues/<uuid:issue_id>/issue-attachments/<uuid:pk>/",
IssueAttachmentEndpoint.as_view(),
name="project-issue-attachments",
),
path(
"workspaces/<str:slug>/export-issues/",
ExportIssuesEndpoint.as_view(),
name="export-issues",
),
## End Issues
## Issue Activity
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/issues/<uuid:issue_id>/history/",
IssueActivityEndpoint.as_view(),
name="project-issue-history",
),
## Issue Activity
## IssueComments
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/issues/<uuid:issue_id>/comments/",
IssueCommentViewSet.as_view(
{
"get": "list",
"post": "create",
}
),
name="project-issue-comment",
),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/issues/<uuid:issue_id>/comments/<uuid:pk>/",
IssueCommentViewSet.as_view(
{
"get": "retrieve",
"put": "update",
"patch": "partial_update",
"delete": "destroy",
}
),
name="project-issue-comment",
),
## End IssueComments
# Issue Subscribers
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/issues/<uuid:issue_id>/issue-subscribers/",
IssueSubscriberViewSet.as_view(
{
"get": "list",
"post": "create",
}
),
name="project-issue-subscribers",
),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/issues/<uuid:issue_id>/issue-subscribers/<uuid:subscriber_id>/",
IssueSubscriberViewSet.as_view({"delete": "destroy"}),
name="project-issue-subscribers",
),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/issues/<uuid:issue_id>/subscribe/",
IssueSubscriberViewSet.as_view(
{
"get": "subscription_status",
"post": "subscribe",
"delete": "unsubscribe",
}
),
name="project-issue-subscribers",
),
## End Issue Subscribers
# Issue Reactions
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/issues/<uuid:issue_id>/reactions/",
IssueReactionViewSet.as_view(
{
"get": "list",
"post": "create",
}
),
name="project-issue-reactions",
),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/issues/<uuid:issue_id>/reactions/<str:reaction_code>/",
IssueReactionViewSet.as_view(
{
"delete": "destroy",
}
),
name="project-issue-reactions",
),
## End Issue Reactions
# Comment Reactions
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/comments/<uuid:comment_id>/reactions/",
CommentReactionViewSet.as_view(
{
"get": "list",
"post": "create",
}
),
name="project-issue-comment-reactions",
),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/comments/<uuid:comment_id>/reactions/<str:reaction_code>/",
CommentReactionViewSet.as_view(
{
"delete": "destroy",
}
),
name="project-issue-comment-reactions",
),
## End Comment Reactions
## IssueProperty
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/issue-display-properties/",
IssueUserDisplayPropertyEndpoint.as_view(),
name="project-issue-display-properties",
),
## IssueProperty End
## Issue Archives
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/archived-issues/",
IssueArchiveViewSet.as_view(
{
"get": "list",
}
),
name="project-issue-archive",
),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/archived-issues/<uuid:pk>/",
IssueArchiveViewSet.as_view(
{
"get": "retrieve",
"delete": "destroy",
}
),
name="project-issue-archive",
),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/unarchive/<uuid:pk>/",
IssueArchiveViewSet.as_view(
{
"post": "unarchive",
}
),
name="project-issue-archive",
),
## End Issue Archives
## Issue Relation
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/issues/<uuid:issue_id>/issue-relation/",
IssueRelationViewSet.as_view(
{
"post": "create",
}
),
name="issue-relation",
),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/issues/<uuid:issue_id>/issue-relation/<uuid:pk>/",
IssueRelationViewSet.as_view(
{
"delete": "destroy",
}
),
name="issue-relation",
),
## End Issue Relation
## Issue Drafts
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/issue-drafts/",
IssueDraftViewSet.as_view(
{
"get": "list",
"post": "create",
}
),
name="project-issue-draft",
),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/issue-drafts/<uuid:pk>/",
IssueDraftViewSet.as_view(
{
"get": "retrieve",
"patch": "partial_update",
"delete": "destroy",
}
),
name="project-issue-draft",
),
]

View File

@ -0,0 +1,104 @@
from django.urls import path
from plane.api.views import (
ModuleViewSet,
ModuleIssueViewSet,
ModuleLinkViewSet,
ModuleFavoriteViewSet,
BulkImportModulesEndpoint,
)
urlpatterns = [
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/modules/",
ModuleViewSet.as_view(
{
"get": "list",
"post": "create",
}
),
name="project-modules",
),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/modules/<uuid:pk>/",
ModuleViewSet.as_view(
{
"get": "retrieve",
"put": "update",
"patch": "partial_update",
"delete": "destroy",
}
),
name="project-modules",
),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/modules/<uuid:module_id>/module-issues/",
ModuleIssueViewSet.as_view(
{
"get": "list",
"post": "create",
}
),
name="project-module-issues",
),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/modules/<uuid:module_id>/module-issues/<uuid:pk>/",
ModuleIssueViewSet.as_view(
{
"get": "retrieve",
"put": "update",
"patch": "partial_update",
"delete": "destroy",
}
),
name="project-module-issues",
),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/modules/<uuid:module_id>/module-links/",
ModuleLinkViewSet.as_view(
{
"get": "list",
"post": "create",
}
),
name="project-issue-module-links",
),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/modules/<uuid:module_id>/module-links/<uuid:pk>/",
ModuleLinkViewSet.as_view(
{
"get": "retrieve",
"put": "update",
"patch": "partial_update",
"delete": "destroy",
}
),
name="project-issue-module-links",
),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/user-favorite-modules/",
ModuleFavoriteViewSet.as_view(
{
"get": "list",
"post": "create",
}
),
name="user-favorite-module",
),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/user-favorite-modules/<uuid:module_id>/",
ModuleFavoriteViewSet.as_view(
{
"delete": "destroy",
}
),
name="user-favorite-module",
),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/bulk-import-modules/<str:service>/",
BulkImportModulesEndpoint.as_view(),
name="bulk-modules-create",
),
]

View File

@ -0,0 +1,66 @@
from django.urls import path
from plane.api.views import (
NotificationViewSet,
UnreadNotificationEndpoint,
MarkAllReadNotificationViewSet,
)
urlpatterns = [
path(
"workspaces/<str:slug>/users/notifications/",
NotificationViewSet.as_view(
{
"get": "list",
}
),
name="notifications",
),
path(
"workspaces/<str:slug>/users/notifications/<uuid:pk>/",
NotificationViewSet.as_view(
{
"get": "retrieve",
"patch": "partial_update",
"delete": "destroy",
}
),
name="notifications",
),
path(
"workspaces/<str:slug>/users/notifications/<uuid:pk>/read/",
NotificationViewSet.as_view(
{
"post": "mark_read",
"delete": "mark_unread",
}
),
name="notifications",
),
path(
"workspaces/<str:slug>/users/notifications/<uuid:pk>/archive/",
NotificationViewSet.as_view(
{
"post": "archive",
"delete": "unarchive",
}
),
name="notifications",
),
path(
"workspaces/<str:slug>/users/notifications/unread/",
UnreadNotificationEndpoint.as_view(),
name="unread-notifications",
),
path(
"workspaces/<str:slug>/users/notifications/mark-all-read/",
MarkAllReadNotificationViewSet.as_view(
{
"post": "create",
}
),
name="mark-all-read-notifications",
),
]

View File

@ -0,0 +1,79 @@
from django.urls import path
from plane.api.views import (
PageViewSet,
PageBlockViewSet,
PageFavoriteViewSet,
CreateIssueFromPageBlockEndpoint,
)
urlpatterns = [
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/pages/",
PageViewSet.as_view(
{
"get": "list",
"post": "create",
}
),
name="project-pages",
),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/pages/<uuid:pk>/",
PageViewSet.as_view(
{
"get": "retrieve",
"patch": "partial_update",
"delete": "destroy",
}
),
name="project-pages",
),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/pages/<uuid:page_id>/page-blocks/",
PageBlockViewSet.as_view(
{
"get": "list",
"post": "create",
}
),
name="project-page-blocks",
),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/pages/<uuid:page_id>/page-blocks/<uuid:pk>/",
PageBlockViewSet.as_view(
{
"get": "retrieve",
"patch": "partial_update",
"delete": "destroy",
}
),
name="project-page-blocks",
),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/user-favorite-pages/",
PageFavoriteViewSet.as_view(
{
"get": "list",
"post": "create",
}
),
name="user-favorite-pages",
),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/user-favorite-pages/<uuid:page_id>/",
PageFavoriteViewSet.as_view(
{
"delete": "destroy",
}
),
name="user-favorite-pages",
),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/pages/<uuid:page_id>/page-blocks/<uuid:page_block_id>/issues/",
CreateIssueFromPageBlockEndpoint.as_view(),
name="page-block-issues",
),
]

View File

@ -0,0 +1,132 @@
from django.urls import path
from plane.api.views import (
ProjectViewSet,
InviteProjectEndpoint,
ProjectMemberViewSet,
ProjectMemberInvitationsViewset,
ProjectMemberUserEndpoint,
ProjectJoinEndpoint,
AddTeamToProjectEndpoint,
ProjectUserViewsEndpoint,
ProjectIdentifierEndpoint,
ProjectFavoritesViewSet,
LeaveProjectEndpoint,
ProjectPublicCoverImagesEndpoint,
)
urlpatterns = [
path(
"workspaces/<str:slug>/projects/",
ProjectViewSet.as_view(
{
"get": "list",
"post": "create",
}
),
name="project",
),
path(
"workspaces/<str:slug>/projects/<uuid:pk>/",
ProjectViewSet.as_view(
{
"get": "retrieve",
"put": "update",
"patch": "partial_update",
"delete": "destroy",
}
),
name="project",
),
path(
"workspaces/<str:slug>/project-identifiers/",
ProjectIdentifierEndpoint.as_view(),
name="project-identifiers",
),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/invite/",
InviteProjectEndpoint.as_view(),
name="invite-project",
),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/members/",
ProjectMemberViewSet.as_view({"get": "list", "post": "create"}),
name="project-member",
),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/members/<uuid:pk>/",
ProjectMemberViewSet.as_view(
{
"get": "retrieve",
"patch": "partial_update",
"delete": "destroy",
}
),
name="project-member",
),
path(
"workspaces/<str:slug>/projects/join/",
ProjectJoinEndpoint.as_view(),
name="project-join",
),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/team-invite/",
AddTeamToProjectEndpoint.as_view(),
name="projects",
),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/invitations/",
ProjectMemberInvitationsViewset.as_view({"get": "list"}),
name="project-member-invite",
),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/invitations/<uuid:pk>/",
ProjectMemberInvitationsViewset.as_view(
{
"get": "retrieve",
"delete": "destroy",
}
),
name="project-member-invite",
),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/project-views/",
ProjectUserViewsEndpoint.as_view(),
name="project-view",
),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/project-members/me/",
ProjectMemberUserEndpoint.as_view(),
name="project-member-view",
),
path(
"workspaces/<str:slug>/user-favorite-projects/",
ProjectFavoritesViewSet.as_view(
{
"get": "list",
"post": "create",
}
),
name="project-favorite",
),
path(
"workspaces/<str:slug>/user-favorite-projects/<uuid:project_id>/",
ProjectFavoritesViewSet.as_view(
{
"delete": "destroy",
}
),
name="project-favorite",
),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/members/leave/",
LeaveProjectEndpoint.as_view(),
name="leave-project",
),
path(
"project-covers/",
ProjectPublicCoverImagesEndpoint.as_view(),
name="project-covers",
),
]

View File

@ -0,0 +1,151 @@
from django.urls import path
from plane.api.views import (
ProjectDeployBoardViewSet,
ProjectDeployBoardPublicSettingsEndpoint,
ProjectIssuesPublicEndpoint,
IssueRetrievePublicEndpoint,
IssueCommentPublicViewSet,
IssueReactionPublicViewSet,
CommentReactionPublicViewSet,
InboxIssuePublicViewSet,
IssueVotePublicViewSet,
WorkspaceProjectDeployBoardEndpoint,
)
urlpatterns = [
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/project-deploy-boards/",
ProjectDeployBoardViewSet.as_view(
{
"get": "list",
"post": "create",
}
),
name="project-deploy-board",
),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/project-deploy-boards/<uuid:pk>/",
ProjectDeployBoardViewSet.as_view(
{
"get": "retrieve",
"patch": "partial_update",
"delete": "destroy",
}
),
name="project-deploy-board",
),
path(
"public/workspaces/<str:slug>/project-boards/<uuid:project_id>/settings/",
ProjectDeployBoardPublicSettingsEndpoint.as_view(),
name="project-deploy-board-settings",
),
path(
"public/workspaces/<str:slug>/project-boards/<uuid:project_id>/issues/",
ProjectIssuesPublicEndpoint.as_view(),
name="project-deploy-board",
),
path(
"public/workspaces/<str:slug>/project-boards/<uuid:project_id>/issues/<uuid:issue_id>/",
IssueRetrievePublicEndpoint.as_view(),
name="workspace-project-boards",
),
path(
"public/workspaces/<str:slug>/project-boards/<uuid:project_id>/issues/<uuid:issue_id>/comments/",
IssueCommentPublicViewSet.as_view(
{
"get": "list",
"post": "create",
}
),
name="issue-comments-project-board",
),
path(
"public/workspaces/<str:slug>/project-boards/<uuid:project_id>/issues/<uuid:issue_id>/comments/<uuid:pk>/",
IssueCommentPublicViewSet.as_view(
{
"get": "retrieve",
"patch": "partial_update",
"delete": "destroy",
}
),
name="issue-comments-project-board",
),
path(
"public/workspaces/<str:slug>/project-boards/<uuid:project_id>/issues/<uuid:issue_id>/reactions/",
IssueReactionPublicViewSet.as_view(
{
"get": "list",
"post": "create",
}
),
name="issue-reactions-project-board",
),
path(
"public/workspaces/<str:slug>/project-boards/<uuid:project_id>/issues/<uuid:issue_id>/reactions/<str:reaction_code>/",
IssueReactionPublicViewSet.as_view(
{
"delete": "destroy",
}
),
name="issue-reactions-project-board",
),
path(
"public/workspaces/<str:slug>/project-boards/<uuid:project_id>/comments/<uuid:comment_id>/reactions/",
CommentReactionPublicViewSet.as_view(
{
"get": "list",
"post": "create",
}
),
name="comment-reactions-project-board",
),
path(
"public/workspaces/<str:slug>/project-boards/<uuid:project_id>/comments/<uuid:comment_id>/reactions/<str:reaction_code>/",
CommentReactionPublicViewSet.as_view(
{
"delete": "destroy",
}
),
name="comment-reactions-project-board",
),
path(
"public/workspaces/<str:slug>/project-boards/<uuid:project_id>/inboxes/<uuid:inbox_id>/inbox-issues/",
InboxIssuePublicViewSet.as_view(
{
"get": "list",
"post": "create",
}
),
name="inbox-issue",
),
path(
"public/workspaces/<str:slug>/project-boards/<uuid:project_id>/inboxes/<uuid:inbox_id>/inbox-issues/<uuid:pk>/",
InboxIssuePublicViewSet.as_view(
{
"get": "retrieve",
"patch": "partial_update",
"delete": "destroy",
}
),
name="inbox-issue",
),
path(
"public/workspaces/<str:slug>/project-boards/<uuid:project_id>/issues/<uuid:issue_id>/votes/",
IssueVotePublicViewSet.as_view(
{
"get": "list",
"post": "create",
"delete": "destroy",
}
),
name="issue-vote-project-board",
),
path(
"public/workspaces/<str:slug>/project-boards/",
WorkspaceProjectDeployBoardEndpoint.as_view(),
name="workspace-project-boards",
),
]

View File

@ -0,0 +1,13 @@
from django.urls import path
from plane.api.views import ReleaseNotesEndpoint
urlpatterns = [
path(
"release-notes/",
ReleaseNotesEndpoint.as_view(),
name="release-notes",
),
]

View File

@ -0,0 +1,21 @@
from django.urls import path
from plane.api.views import (
GlobalSearchEndpoint,
IssueSearchEndpoint,
)
urlpatterns = [
path(
"workspaces/<str:slug>/search/",
GlobalSearchEndpoint.as_view(),
name="global-search",
),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/search-issues/",
IssueSearchEndpoint.as_view(),
name="project-issue-search",
),
]

View File

@ -0,0 +1,30 @@
from django.urls import path
from plane.api.views import StateViewSet
urlpatterns = [
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/states/",
StateViewSet.as_view(
{
"get": "list",
"post": "create",
}
),
name="project-states",
),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/states/<uuid:pk>/",
StateViewSet.as_view(
{
"get": "retrieve",
"put": "update",
"patch": "partial_update",
"delete": "destroy",
}
),
name="project-state",
),
]

View File

@ -0,0 +1,13 @@
from django.urls import path
from plane.api.views import UnsplashEndpoint
urlpatterns = [
path(
"unsplash/",
UnsplashEndpoint.as_view(),
name="unsplash",
),
]

View File

@ -0,0 +1,113 @@
from django.urls import path
from plane.api.views import (
## User
UserEndpoint,
UpdateUserOnBoardedEndpoint,
UpdateUserTourCompletedEndpoint,
UserActivityEndpoint,
ChangePasswordEndpoint,
## End User
## Workspaces
UserWorkspaceInvitationsEndpoint,
UserWorkSpacesEndpoint,
JoinWorkspaceEndpoint,
UserWorkspaceInvitationsEndpoint,
UserWorkspaceInvitationEndpoint,
UserActivityGraphEndpoint,
UserIssueCompletedGraphEndpoint,
UserWorkspaceDashboardEndpoint,
UserProjectInvitationsViewset,
## End Workspaces
)
urlpatterns = [
# User Profile
path(
"users/me/",
UserEndpoint.as_view(
{"get": "retrieve", "patch": "partial_update", "delete": "destroy"}
),
name="users",
),
path(
"users/me/settings/",
UserEndpoint.as_view(
{
"get": "retrieve_user_settings",
}
),
name="users",
),
path(
"users/me/change-password/",
ChangePasswordEndpoint.as_view(),
name="change-password",
),
path(
"users/me/onboard/",
UpdateUserOnBoardedEndpoint.as_view(),
name="user-onboard",
),
path(
"users/me/tour-completed/",
UpdateUserTourCompletedEndpoint.as_view(),
name="user-tour",
),
path(
"users/workspaces/<str:slug>/activities/",
UserActivityEndpoint.as_view(),
name="user-activities",
),
# user workspaces
path(
"users/me/workspaces/",
UserWorkSpacesEndpoint.as_view(),
name="user-workspace",
),
# user workspace invitations
path(
"users/me/invitations/workspaces/",
UserWorkspaceInvitationsEndpoint.as_view({"get": "list", "post": "create"}),
name="user-workspace-invitations",
),
# user workspace invitation
path(
"users/me/invitations/<uuid:pk>/",
UserWorkspaceInvitationEndpoint.as_view(
{
"get": "retrieve",
}
),
name="user-workspace-invitation",
),
# user join workspace
# User Graphs
path(
"users/me/workspaces/<str:slug>/activity-graph/",
UserActivityGraphEndpoint.as_view(),
name="user-activity-graph",
),
path(
"users/me/workspaces/<str:slug>/issues-completed-graph/",
UserIssueCompletedGraphEndpoint.as_view(),
name="completed-graph",
),
path(
"users/me/workspaces/<str:slug>/dashboard/",
UserWorkspaceDashboardEndpoint.as_view(),
name="user-workspace-dashboard",
),
## End User Graph
path(
"users/me/invitations/workspaces/<str:slug>/<uuid:pk>/join/",
JoinWorkspaceEndpoint.as_view(),
name="user-join-workspace",
),
# user project invitations
path(
"users/me/invitations/projects/",
UserProjectInvitationsViewset.as_view({"get": "list", "post": "create"}),
name="user-project-invitations",
),
]

View File

@ -0,0 +1,85 @@
from django.urls import path
from plane.api.views import (
IssueViewViewSet,
GlobalViewViewSet,
GlobalViewIssuesViewSet,
IssueViewFavoriteViewSet,
)
urlpatterns = [
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/views/",
IssueViewViewSet.as_view(
{
"get": "list",
"post": "create",
}
),
name="project-view",
),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/views/<uuid:pk>/",
IssueViewViewSet.as_view(
{
"get": "retrieve",
"put": "update",
"patch": "partial_update",
"delete": "destroy",
}
),
name="project-view",
),
path(
"workspaces/<str:slug>/views/",
GlobalViewViewSet.as_view(
{
"get": "list",
"post": "create",
}
),
name="global-view",
),
path(
"workspaces/<str:slug>/views/<uuid:pk>/",
GlobalViewViewSet.as_view(
{
"get": "retrieve",
"put": "update",
"patch": "partial_update",
"delete": "destroy",
}
),
name="global-view",
),
path(
"workspaces/<str:slug>/issues/",
GlobalViewIssuesViewSet.as_view(
{
"get": "list",
}
),
name="global-view-issues",
),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/user-favorite-views/",
IssueViewFavoriteViewSet.as_view(
{
"get": "list",
"post": "create",
}
),
name="user-favorite-view",
),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/user-favorite-views/<uuid:view_id>/",
IssueViewFavoriteViewSet.as_view(
{
"delete": "destroy",
}
),
name="user-favorite-view",
),
]

View File

@ -0,0 +1,176 @@
from django.urls import path
from plane.api.views import (
WorkSpaceViewSet,
InviteWorkspaceEndpoint,
WorkSpaceMemberViewSet,
WorkspaceInvitationsViewset,
WorkspaceMemberUserEndpoint,
WorkspaceMemberUserViewsEndpoint,
WorkSpaceAvailabilityCheckEndpoint,
TeamMemberViewSet,
UserLastProjectWithWorkspaceEndpoint,
WorkspaceThemeViewSet,
WorkspaceUserProfileStatsEndpoint,
WorkspaceUserActivityEndpoint,
WorkspaceUserProfileEndpoint,
WorkspaceUserProfileIssuesEndpoint,
WorkspaceLabelsEndpoint,
LeaveWorkspaceEndpoint,
)
urlpatterns = [
path(
"workspace-slug-check/",
WorkSpaceAvailabilityCheckEndpoint.as_view(),
name="workspace-availability",
),
path(
"workspaces/",
WorkSpaceViewSet.as_view(
{
"get": "list",
"post": "create",
}
),
name="workspace",
),
path(
"workspaces/<str:slug>/",
WorkSpaceViewSet.as_view(
{
"get": "retrieve",
"put": "update",
"patch": "partial_update",
"delete": "destroy",
}
),
name="workspace",
),
path(
"workspaces/<str:slug>/invite/",
InviteWorkspaceEndpoint.as_view(),
name="invite-workspace",
),
path(
"workspaces/<str:slug>/invitations/",
WorkspaceInvitationsViewset.as_view({"get": "list"}),
name="workspace-invitations",
),
path(
"workspaces/<str:slug>/invitations/<uuid:pk>/",
WorkspaceInvitationsViewset.as_view(
{
"delete": "destroy",
"get": "retrieve",
}
),
name="workspace-invitations",
),
path(
"workspaces/<str:slug>/members/",
WorkSpaceMemberViewSet.as_view({"get": "list"}),
name="workspace-member",
),
path(
"workspaces/<str:slug>/members/<uuid:pk>/",
WorkSpaceMemberViewSet.as_view(
{
"patch": "partial_update",
"delete": "destroy",
"get": "retrieve",
}
),
name="workspace-member",
),
path(
"workspaces/<str:slug>/teams/",
TeamMemberViewSet.as_view(
{
"get": "list",
"post": "create",
}
),
name="workspace-team-members",
),
path(
"workspaces/<str:slug>/teams/<uuid:pk>/",
TeamMemberViewSet.as_view(
{
"put": "update",
"patch": "partial_update",
"delete": "destroy",
"get": "retrieve",
}
),
name="workspace-team-members",
),
path(
"users/last-visited-workspace/",
UserLastProjectWithWorkspaceEndpoint.as_view(),
name="workspace-project-details",
),
path(
"workspaces/<str:slug>/workspace-members/me/",
WorkspaceMemberUserEndpoint.as_view(),
name="workspace-member-details",
),
path(
"workspaces/<str:slug>/workspace-views/",
WorkspaceMemberUserViewsEndpoint.as_view(),
name="workspace-member-views-details",
),
path(
"workspaces/<str:slug>/workspace-themes/",
WorkspaceThemeViewSet.as_view(
{
"get": "list",
"post": "create",
}
),
name="workspace-themes",
),
path(
"workspaces/<str:slug>/workspace-themes/<uuid:pk>/",
WorkspaceThemeViewSet.as_view(
{
"get": "retrieve",
"patch": "partial_update",
"delete": "destroy",
}
),
name="workspace-themes",
),
path(
"workspaces/<str:slug>/user-stats/<uuid:user_id>/",
WorkspaceUserProfileStatsEndpoint.as_view(),
name="workspace-user-stats",
),
path(
"workspaces/<str:slug>/user-activity/<uuid:user_id>/",
WorkspaceUserActivityEndpoint.as_view(),
name="workspace-user-activity",
),
path(
"workspaces/<str:slug>/user-profile/<uuid:user_id>/",
WorkspaceUserProfileEndpoint.as_view(),
name="workspace-user-profile-page",
),
path(
"workspaces/<str:slug>/user-issues/<uuid:user_id>/",
WorkspaceUserProfileIssuesEndpoint.as_view(),
name="workspace-user-profile-issues",
),
path(
"workspaces/<str:slug>/labels/",
WorkspaceLabelsEndpoint.as_view(),
name="workspace-labels",
),
path(
"workspaces/<str:slug>/members/leave/",
LeaveWorkspaceEndpoint.as_view(),
name="leave-workspace-members",
),
]

View File

@ -1,5 +1,6 @@
from django.urls import path
from rest_framework_simplejwt.views import TokenRefreshView
# Create your urls here.
@ -27,7 +28,6 @@ from plane.api.views import (
## End User
# Workspaces
WorkSpaceViewSet,
UserWorkspaceInvitationsEndpoint,
UserWorkSpacesEndpoint,
InviteWorkspaceEndpoint,
JoinWorkspaceEndpoint,
@ -70,6 +70,7 @@ from plane.api.views import (
ProjectIdentifierEndpoint,
ProjectFavoritesViewSet,
LeaveProjectEndpoint,
ProjectPublicCoverImagesEndpoint,
## End Projects
# Issues
IssueViewSet,
@ -80,7 +81,7 @@ from plane.api.views import (
BulkDeleteIssuesEndpoint,
BulkImportIssuesEndpoint,
ProjectUserViewsEndpoint,
IssuePropertyViewSet,
IssueUserDisplayPropertyEndpoint,
LabelViewSet,
SubIssuesEndpoint,
IssueLinkViewSet,
@ -105,7 +106,6 @@ from plane.api.views import (
GlobalViewViewSet,
GlobalViewIssuesViewSet,
IssueViewViewSet,
ViewIssuesEndpoint,
IssueViewFavoriteViewSet,
## End Views
# Cycles
@ -150,12 +150,11 @@ from plane.api.views import (
GlobalSearchEndpoint,
IssueSearchEndpoint,
## End Search
# Gpt
# External
GPTIntegrationEndpoint,
## End Gpt
# Release Notes
ReleaseNotesEndpoint,
## End Release Notes
UnsplashEndpoint,
## End External
# Inbox
InboxViewSet,
InboxIssueViewSet,
@ -189,9 +188,15 @@ from plane.api.views import (
# Bulk Issue Operations
BulkIssueOperationsEndpoint,
## End Bulk Issue Operations
# Configuration
ConfigurationEndpoint,
## End Configuration
)
#TODO: Delete this file
# This url file has been deprecated use apiserver/plane/urls folder to create new urls
urlpatterns = [
# Social Auth
path("social-auth/", OauthEndpoint.as_view(), name="oauth"),
@ -204,6 +209,7 @@ urlpatterns = [
"magic-generate/", MagicSignInGenerateEndpoint.as_view(), name="magic-generate"
),
path("magic-sign-in/", MagicSignInEndpoint.as_view(), name="magic-sign-in"),
path('token/refresh/', TokenRefreshView.as_view(), name='token_refresh'),
# Email verification
path("email-verify/", VerifyEmailEndpoint.as_view(), name="email-verify"),
path(
@ -230,6 +236,15 @@ urlpatterns = [
),
name="users",
),
path(
"users/me/settings/",
UserEndpoint.as_view(
{
"get": "retrieve_user_settings",
}
),
name="users",
),
path(
"users/me/change-password/",
ChangePasswordEndpoint.as_view(),
@ -557,6 +572,7 @@ urlpatterns = [
"workspaces/<str:slug>/user-favorite-projects/",
ProjectFavoritesViewSet.as_view(
{
"get": "list",
"post": "create",
}
),
@ -576,6 +592,11 @@ urlpatterns = [
LeaveProjectEndpoint.as_view(),
name="project",
),
path(
"project-covers/",
ProjectPublicCoverImagesEndpoint.as_view(),
name="project-covers",
),
# End Projects
# States
path(
@ -652,11 +673,6 @@ urlpatterns = [
),
name="project-view",
),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/views/<uuid:view_id>/issues/",
ViewIssuesEndpoint.as_view(),
name="project-view-issues",
),
path(
"workspaces/<str:slug>/views/",
GlobalViewViewSet.as_view(
@ -994,26 +1010,9 @@ urlpatterns = [
## End Comment Reactions
## IssueProperty
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/issue-properties/",
IssuePropertyViewSet.as_view(
{
"get": "list",
"post": "create",
}
),
name="project-issue-roadmap",
),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/issue-properties/<uuid:pk>/",
IssuePropertyViewSet.as_view(
{
"get": "retrieve",
"put": "update",
"patch": "partial_update",
"delete": "destroy",
}
),
name="project-issue-roadmap",
"workspaces/<str:slug>/projects/<uuid:project_id>/issue-display-properties/",
IssueUserDisplayPropertyEndpoint.as_view(),
name="project-issue-display-properties",
),
## IssueProperty Ebd
## Issue Archives
@ -1449,20 +1448,23 @@ urlpatterns = [
name="project-issue-search",
),
## End Search
# Gpt
# External
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/ai-assistant/",
GPTIntegrationEndpoint.as_view(),
name="importer",
),
## End Gpt
# Release Notes
path(
"release-notes/",
ReleaseNotesEndpoint.as_view(),
name="release-notes",
),
## End Release Notes
path(
"unsplash/",
UnsplashEndpoint.as_view(),
name="release-notes",
),
## End External
# Inbox
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/inboxes/",
@ -1736,4 +1738,11 @@ urlpatterns = [
BulkIssueOperationsEndpoint.as_view(),
name="bulk-issue-operation",
),
# Configuration
path(
"configs/",
ConfigurationEndpoint.as_view(),
name="configuration",
),
## End Configuration
]

View File

@ -7,16 +7,15 @@ from .project import (
ProjectMemberInvitationsViewset,
ProjectMemberInviteDetailViewSet,
ProjectIdentifierEndpoint,
AddMemberToProjectEndpoint,
ProjectJoinEndpoint,
ProjectUserViewsEndpoint,
ProjectMemberUserEndpoint,
ProjectFavoritesViewSet,
ProjectDeployBoardViewSet,
ProjectDeployBoardPublicSettingsEndpoint,
ProjectMemberEndpoint,
WorkspaceProjectDeployBoardEndpoint,
LeaveProjectEndpoint,
ProjectPublicCoverImagesEndpoint,
)
from .user import (
UserEndpoint,
@ -52,11 +51,10 @@ from .workspace import (
WorkspaceUserProfileEndpoint,
WorkspaceUserProfileIssuesEndpoint,
WorkspaceLabelsEndpoint,
WorkspaceMembersEndpoint,
LeaveWorkspaceEndpoint,
)
from .state import StateViewSet
from .view import GlobalViewViewSet, GlobalViewIssuesViewSet, IssueViewViewSet, ViewIssuesEndpoint, IssueViewFavoriteViewSet
from .view import GlobalViewViewSet, GlobalViewIssuesViewSet, IssueViewViewSet, IssueViewFavoriteViewSet
from .cycle import (
CycleViewSet,
CycleIssueViewSet,
@ -70,7 +68,7 @@ from .issue import (
WorkSpaceIssuesEndpoint,
IssueActivityEndpoint,
IssueCommentViewSet,
IssuePropertyViewSet,
IssueUserDisplayPropertyEndpoint,
LabelViewSet,
BulkDeleteIssuesEndpoint,
UserWorkSpaceIssues,
@ -148,16 +146,13 @@ from .page import (
from .search import GlobalSearchEndpoint, IssueSearchEndpoint
from .gpt import GPTIntegrationEndpoint
from .external import GPTIntegrationEndpoint, ReleaseNotesEndpoint, UnsplashEndpoint
from .estimate import (
ProjectEstimatePointEndpoint,
BulkEstimatePointEndpoint,
)
from .release import ReleaseNotesEndpoint
from .inbox import InboxViewSet, InboxIssueViewSet, InboxIssuePublicViewSet
from .analytic import (
@ -170,4 +165,6 @@ from .analytic import (
from .notification import NotificationViewSet, UnreadNotificationEndpoint, MarkAllReadNotificationViewSet
from .exporter import ExportIssuesEndpoint
from .exporter import ExportIssuesEndpoint
from .config import ConfigurationEndpoint

View File

@ -1,10 +1,5 @@
# Django imports
from django.db.models import (
Count,
Sum,
F,
Q
)
from django.db.models import Count, Sum, F, Q
from django.db.models.functions import ExtractMonth
# Third party imports
@ -28,82 +23,156 @@ class AnalyticsEndpoint(BaseAPIView):
]
def get(self, request, slug):
try:
x_axis = request.GET.get("x_axis", False)
y_axis = request.GET.get("y_axis", False)
x_axis = request.GET.get("x_axis", False)
y_axis = request.GET.get("y_axis", False)
segment = request.GET.get("segment", False)
if not x_axis or not y_axis:
return Response(
{"error": "x-axis and y-axis dimensions are required"},
status=status.HTTP_400_BAD_REQUEST,
)
segment = request.GET.get("segment", False)
filters = issue_filters(request.GET, "GET")
queryset = Issue.issue_objects.filter(workspace__slug=slug, **filters)
total_issues = queryset.count()
distribution = build_graph_plot(
queryset=queryset, x_axis=x_axis, y_axis=y_axis, segment=segment
)
colors = dict()
if x_axis in ["state__name", "state__group"] or segment in [
"state__name",
"state__group",
]:
if x_axis in ["state__name", "state__group"]:
key = "name" if x_axis == "state__name" else "group"
else:
key = "name" if segment == "state__name" else "group"
colors = (
State.objects.filter(
~Q(name="Triage"),
workspace__slug=slug, project_id__in=filters.get("project__in")
).values(key, "color")
if filters.get("project__in", False)
else State.objects.filter(~Q(name="Triage"), workspace__slug=slug).values(key, "color")
)
if x_axis in ["labels__name"] or segment in ["labels__name"]:
colors = (
Label.objects.filter(
workspace__slug=slug, project_id__in=filters.get("project__in")
).values("name", "color")
if filters.get("project__in", False)
else Label.objects.filter(workspace__slug=slug).values(
"name", "color"
)
)
assignee_details = {}
if x_axis in ["assignees__id"] or segment in ["assignees__id"]:
assignee_details = (
Issue.issue_objects.filter(workspace__slug=slug, **filters, assignees__avatar__isnull=False)
.order_by("assignees__id")
.distinct("assignees__id")
.values("assignees__avatar", "assignees__display_name", "assignees__first_name", "assignees__last_name", "assignees__id")
)
valid_xaxis_segment = [
"state_id",
"state__group",
"labels__id",
"assignees__id",
"estimate_point",
"issue_cycle__cycle_id",
"issue_module__module_id",
"priority",
"start_date",
"target_date",
"created_at",
"completed_at",
]
valid_yaxis = [
"issue_count",
"estimate",
]
# Check for x-axis and y-axis as thery are required parameters
if (
not x_axis
or not y_axis
or not x_axis in valid_xaxis_segment
or not y_axis in valid_yaxis
):
return Response(
{
"total": total_issues,
"distribution": distribution,
"extras": {"colors": colors, "assignee_details": assignee_details},
"error": "x-axis and y-axis dimensions are required and the values should be valid"
},
status=status.HTTP_200_OK,
)
except Exception as e:
capture_exception(e)
return Response(
{"error": "Something went wrong please try again later"},
status=status.HTTP_400_BAD_REQUEST,
)
# If segment is present it cannot be same as x-axis
if segment and (segment not in valid_xaxis_segment or x_axis == segment):
return Response(
{
"error": "Both segment and x axis cannot be same and segment should be valid"
},
status=status.HTTP_400_BAD_REQUEST,
)
# Additional filters that need to be applied
filters = issue_filters(request.GET, "GET")
# Get the issues for the workspace with the additional filters applied
queryset = Issue.issue_objects.filter(workspace__slug=slug, **filters)
# Get the total issue count
total_issues = queryset.count()
# Build the graph payload
distribution = build_graph_plot(
queryset=queryset, x_axis=x_axis, y_axis=y_axis, segment=segment
)
state_details = {}
if x_axis in ["state_id"] or segment in ["state_id"]:
state_details = (
Issue.issue_objects.filter(
workspace__slug=slug,
**filters,
)
.distinct("state_id")
.order_by("state_id")
.values("state_id", "state__name", "state__color")
)
label_details = {}
if x_axis in ["labels__id"] or segment in ["labels__id"]:
label_details = (
Issue.objects.filter(
workspace__slug=slug, **filters, labels__id__isnull=False
)
.distinct("labels__id")
.order_by("labels__id")
.values("labels__id", "labels__color", "labels__name")
)
assignee_details = {}
if x_axis in ["assignees__id"] or segment in ["assignees__id"]:
assignee_details = (
Issue.issue_objects.filter(
workspace__slug=slug, **filters, assignees__avatar__isnull=False
)
.order_by("assignees__id")
.distinct("assignees__id")
.values(
"assignees__avatar",
"assignees__display_name",
"assignees__first_name",
"assignees__last_name",
"assignees__id",
)
)
cycle_details = {}
if x_axis in ["issue_cycle__cycle_id"] or segment in ["issue_cycle__cycle_id"]:
cycle_details = (
Issue.issue_objects.filter(
workspace__slug=slug,
**filters,
issue_cycle__cycle_id__isnull=False,
)
.distinct("issue_cycle__cycle_id")
.order_by("issue_cycle__cycle_id")
.values(
"issue_cycle__cycle_id",
"issue_cycle__cycle__name",
)
)
module_details = {}
if x_axis in ["issue_module__module_id"] or segment in [
"issue_module__module_id"
]:
module_details = (
Issue.issue_objects.filter(
workspace__slug=slug,
**filters,
issue_module__module_id__isnull=False,
)
.distinct("issue_module__module_id")
.order_by("issue_module__module_id")
.values(
"issue_module__module_id",
"issue_module__module__name",
)
)
return Response(
{
"total": total_issues,
"distribution": distribution,
"extras": {
"state_details": state_details,
"assignee_details": assignee_details,
"label_details": label_details,
"cycle_details": cycle_details,
"module_details": module_details,
},
},
status=status.HTTP_200_OK,
)
class AnalyticViewViewset(BaseViewSet):
permission_classes = [
@ -128,45 +197,30 @@ class SavedAnalyticEndpoint(BaseAPIView):
]
def get(self, request, slug, analytic_id):
try:
analytic_view = AnalyticView.objects.get(
pk=analytic_id, workspace__slug=slug
)
analytic_view = AnalyticView.objects.get(pk=analytic_id, workspace__slug=slug)
filter = analytic_view.query
queryset = Issue.issue_objects.filter(**filter)
filter = analytic_view.query
queryset = Issue.issue_objects.filter(**filter)
x_axis = analytic_view.query_dict.get("x_axis", False)
y_axis = analytic_view.query_dict.get("y_axis", False)
x_axis = analytic_view.query_dict.get("x_axis", False)
y_axis = analytic_view.query_dict.get("y_axis", False)
if not x_axis or not y_axis:
return Response(
{"error": "x-axis and y-axis dimensions are required"},
status=status.HTTP_400_BAD_REQUEST,
)
segment = request.GET.get("segment", False)
distribution = build_graph_plot(
queryset=queryset, x_axis=x_axis, y_axis=y_axis, segment=segment
)
total_issues = queryset.count()
if not x_axis or not y_axis:
return Response(
{"total": total_issues, "distribution": distribution},
status=status.HTTP_200_OK,
)
except AnalyticView.DoesNotExist:
return Response(
{"error": "Analytic View Does not exist"},
status=status.HTTP_400_BAD_REQUEST,
)
except Exception as e:
capture_exception(e)
return Response(
{"error": "Something went wrong please try again later"},
{"error": "x-axis and y-axis dimensions are required"},
status=status.HTTP_400_BAD_REQUEST,
)
segment = request.GET.get("segment", False)
distribution = build_graph_plot(
queryset=queryset, x_axis=x_axis, y_axis=y_axis, segment=segment
)
total_issues = queryset.count()
return Response(
{"total": total_issues, "distribution": distribution},
status=status.HTTP_200_OK,
)
class ExportAnalyticsEndpoint(BaseAPIView):
permission_classes = [
@ -174,33 +228,64 @@ class ExportAnalyticsEndpoint(BaseAPIView):
]
def post(self, request, slug):
try:
x_axis = request.data.get("x_axis", False)
y_axis = request.data.get("y_axis", False)
x_axis = request.data.get("x_axis", False)
y_axis = request.data.get("y_axis", False)
segment = request.data.get("segment", False)
if not x_axis or not y_axis:
return Response(
{"error": "x-axis and y-axis dimensions are required"},
status=status.HTTP_400_BAD_REQUEST,
)
valid_xaxis_segment = [
"state_id",
"state__group",
"labels__id",
"assignees__id",
"estimate_point",
"issue_cycle__cycle_id",
"issue_module__module_id",
"priority",
"start_date",
"target_date",
"created_at",
"completed_at",
]
analytic_export_task.delay(
email=request.user.email, data=request.data, slug=slug
)
valid_yaxis = [
"issue_count",
"estimate",
]
# Check for x-axis and y-axis as thery are required parameters
if (
not x_axis
or not y_axis
or not x_axis in valid_xaxis_segment
or not y_axis in valid_yaxis
):
return Response(
{
"message": f"Once the export is ready it will be emailed to you at {str(request.user.email)}"
"error": "x-axis and y-axis dimensions are required and the values should be valid"
},
status=status.HTTP_200_OK,
)
except Exception as e:
capture_exception(e)
return Response(
{"error": "Something went wrong please try again later"},
status=status.HTTP_400_BAD_REQUEST,
)
# If segment is present it cannot be same as x-axis
if segment and (segment not in valid_xaxis_segment or x_axis == segment):
return Response(
{
"error": "Both segment and x axis cannot be same and segment should be valid"
},
status=status.HTTP_400_BAD_REQUEST,
)
analytic_export_task.delay(
email=request.user.email, data=request.data, slug=slug
)
return Response(
{
"message": f"Once the export is ready it will be emailed to you at {str(request.user.email)}"
},
status=status.HTTP_200_OK,
)
class DefaultAnalyticsEndpoint(BaseAPIView):
permission_classes = [
@ -208,90 +293,92 @@ class DefaultAnalyticsEndpoint(BaseAPIView):
]
def get(self, request, slug):
try:
filters = issue_filters(request.GET, "GET")
filters = issue_filters(request.GET, "GET")
base_issues = Issue.issue_objects.filter(workspace__slug=slug, **filters)
queryset = Issue.issue_objects.filter(workspace__slug=slug, **filters)
total_issues = base_issues.count()
total_issues = queryset.count()
state_groups = base_issues.annotate(state_group=F("state__group"))
total_issues_classified = (
queryset.annotate(state_group=F("state__group"))
.values("state_group")
.annotate(state_count=Count("state_group"))
.order_by("state_group")
)
total_issues_classified = (
state_groups.values("state_group")
.annotate(state_count=Count("state_group"))
.order_by("state_group")
)
open_issues = queryset.filter(
state__group__in=["backlog", "unstarted", "started"]
).count()
open_issues_groups = ["backlog", "unstarted", "started"]
open_issues_queryset = state_groups.filter(state__group__in=open_issues_groups)
open_issues_classified = (
queryset.filter(state__group__in=["backlog", "unstarted", "started"])
.annotate(state_group=F("state__group"))
.values("state_group")
.annotate(state_count=Count("state_group"))
.order_by("state_group")
)
open_issues = open_issues_queryset.count()
open_issues_classified = (
open_issues_queryset.values("state_group")
.annotate(state_count=Count("state_group"))
.order_by("state_group")
)
issue_completed_month_wise = (
queryset.filter(completed_at__isnull=False)
.annotate(month=ExtractMonth("completed_at"))
.values("month")
.annotate(count=Count("*"))
.order_by("month")
)
most_issue_created_user = (
queryset.exclude(created_by=None)
.values("created_by__first_name", "created_by__last_name", "created_by__avatar", "created_by__display_name", "created_by__id")
.annotate(count=Count("id"))
.order_by("-count")
)[:5]
issue_completed_month_wise = (
base_issues.filter(completed_at__isnull=False)
.annotate(month=ExtractMonth("completed_at"))
.values("month")
.annotate(count=Count("*"))
.order_by("month")
)
most_issue_closed_user = (
queryset.filter(completed_at__isnull=False, assignees__isnull=False)
.values("assignees__first_name", "assignees__last_name", "assignees__avatar", "assignees__display_name", "assignees__id")
.annotate(count=Count("id"))
.order_by("-count")
)[:5]
user_details = [
"created_by__first_name",
"created_by__last_name",
"created_by__avatar",
"created_by__display_name",
"created_by__id",
]
pending_issue_user = (
queryset.filter(completed_at__isnull=True)
.values("assignees__first_name", "assignees__last_name", "assignees__avatar", "assignees__display_name", "assignees__id")
.annotate(count=Count("id"))
.order_by("-count")
)
most_issue_created_user = (
base_issues.exclude(created_by=None)
.values(*user_details)
.annotate(count=Count("id"))
.order_by("-count")[:5]
)
open_estimate_sum = (
queryset.filter(
state__group__in=["backlog", "unstarted", "started"]
).aggregate(open_estimate_sum=Sum("estimate_point"))
)["open_estimate_sum"]
print(open_estimate_sum)
total_estimate_sum = queryset.aggregate(
total_estimate_sum=Sum("estimate_point")
)["total_estimate_sum"]
user_assignee_details = [
"assignees__first_name",
"assignees__last_name",
"assignees__avatar",
"assignees__display_name",
"assignees__id",
]
return Response(
{
"total_issues": total_issues,
"total_issues_classified": total_issues_classified,
"open_issues": open_issues,
"open_issues_classified": open_issues_classified,
"issue_completed_month_wise": issue_completed_month_wise,
"most_issue_created_user": most_issue_created_user,
"most_issue_closed_user": most_issue_closed_user,
"pending_issue_user": pending_issue_user,
"open_estimate_sum": open_estimate_sum,
"total_estimate_sum": total_estimate_sum,
},
status=status.HTTP_200_OK,
)
most_issue_closed_user = (
base_issues.filter(completed_at__isnull=False)
.exclude(assignees=None)
.values(*user_assignee_details)
.annotate(count=Count("id"))
.order_by("-count")[:5]
)
except Exception as e:
capture_exception(e)
return Response(
{"error": "Something went wrong please try again later"},
status=status.HTTP_400_BAD_REQUEST,
)
pending_issue_user = (
base_issues.filter(completed_at__isnull=True)
.values(*user_assignee_details)
.annotate(count=Count("id"))
.order_by("-count")
)
open_estimate_sum = open_issues_queryset.aggregate(sum=Sum("estimate_point"))[
"sum"
]
total_estimate_sum = base_issues.aggregate(sum=Sum("estimate_point"))["sum"]
return Response(
{
"total_issues": total_issues,
"total_issues_classified": total_issues_classified,
"open_issues": open_issues,
"open_issues_classified": open_issues_classified,
"issue_completed_month_wise": issue_completed_month_wise,
"most_issue_created_user": most_issue_created_user,
"most_issue_closed_user": most_issue_closed_user,
"pending_issue_user": pending_issue_user,
"open_estimate_sum": open_estimate_sum,
"total_estimate_sum": total_estimate_sum,
},
status=status.HTTP_200_OK,
)

View File

@ -14,57 +14,34 @@ from plane.api.serializers import APITokenSerializer
class ApiTokenEndpoint(BaseAPIView):
def post(self, request):
try:
label = request.data.get("label", str(uuid4().hex))
workspace = request.data.get("workspace", False)
label = request.data.get("label", str(uuid4().hex))
workspace = request.data.get("workspace", False)
if not workspace:
return Response(
{"error": "Workspace is required"}, status=status.HTTP_200_OK
)
api_token = APIToken.objects.create(
label=label, user=request.user, workspace_id=workspace
)
serializer = APITokenSerializer(api_token)
# Token will be only vissible while creating
if not workspace:
return Response(
{"api_token": serializer.data, "token": api_token.token},
status=status.HTTP_201_CREATED,
{"error": "Workspace is required"}, status=status.HTTP_200_OK
)
except Exception as e:
capture_exception(e)
return Response(
{"error": "Something went wrong please try again later"},
status=status.HTTP_400_BAD_REQUEST,
)
api_token = APIToken.objects.create(
label=label, user=request.user, workspace_id=workspace
)
serializer = APITokenSerializer(api_token)
# Token will be only vissible while creating
return Response(
{"api_token": serializer.data, "token": api_token.token},
status=status.HTTP_201_CREATED,
)
def get(self, request):
try:
api_tokens = APIToken.objects.filter(user=request.user)
serializer = APITokenSerializer(api_tokens, many=True)
return Response(serializer.data, status=status.HTTP_200_OK)
except Exception as e:
capture_exception(e)
return Response(
{"error": "Something went wrong please try again later"},
status=status.HTTP_400_BAD_REQUEST,
)
api_tokens = APIToken.objects.filter(user=request.user)
serializer = APITokenSerializer(api_tokens, many=True)
return Response(serializer.data, status=status.HTTP_200_OK)
def delete(self, request, pk):
try:
api_token = APIToken.objects.get(pk=pk)
api_token.delete()
return Response(status=status.HTTP_204_NO_CONTENT)
except APIToken.DoesNotExist:
return Response(
{"error": "Token does not exists"}, status=status.HTTP_400_BAD_REQUEST
)
except Exception as e:
capture_exception(e)
return Response(
{"error": "Something went wrong please try again later"},
status=status.HTTP_400_BAD_REQUEST,
)
api_token = APIToken.objects.get(pk=pk)
api_token.delete()
return Response(status=status.HTTP_204_NO_CONTENT)

View File

@ -18,108 +18,58 @@ class FileAssetEndpoint(BaseAPIView):
"""
def get(self, request, workspace_id, asset_key):
try:
asset_key = str(workspace_id) + "/" + asset_key
files = FileAsset.objects.filter(asset=asset_key)
if files.exists():
serializer = FileAssetSerializer(files, context={"request": request}, many=True)
return Response({"data": serializer.data, "status": True}, status=status.HTTP_200_OK)
else:
return Response({"error": "Asset key does not exist", "status": False}, status=status.HTTP_200_OK)
except Exception as e:
capture_exception(e)
return Response(
{"error": "Something went wrong please try again later"},
status=status.HTTP_400_BAD_REQUEST,
)
asset_key = str(workspace_id) + "/" + asset_key
files = FileAsset.objects.filter(asset=asset_key)
if files.exists():
serializer = FileAssetSerializer(files, context={"request": request}, many=True)
return Response({"data": serializer.data, "status": True}, status=status.HTTP_200_OK)
else:
return Response({"error": "Asset key does not exist", "status": False}, status=status.HTTP_200_OK)
def post(self, request, slug):
try:
serializer = FileAssetSerializer(data=request.data)
if serializer.is_valid():
# Get the workspace
workspace = Workspace.objects.get(slug=slug)
serializer.save(workspace_id=workspace.id)
return Response(serializer.data, status=status.HTTP_201_CREATED)
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
except Workspace.DoesNotExist:
return Response({"error": "Workspace does not exist"}, status=status.HTTP_400_BAD_REQUEST)
except Exception as e:
capture_exception(e)
return Response(
{"error": "Something went wrong please try again later"},
status=status.HTTP_400_BAD_REQUEST,
)
serializer = FileAssetSerializer(data=request.data)
if serializer.is_valid():
# Get the workspace
workspace = Workspace.objects.get(slug=slug)
serializer.save(workspace_id=workspace.id)
return Response(serializer.data, status=status.HTTP_201_CREATED)
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
def delete(self, request, workspace_id, asset_key):
try:
asset_key = str(workspace_id) + "/" + asset_key
file_asset = FileAsset.objects.get(asset=asset_key)
# Delete the file from storage
file_asset.asset.delete(save=False)
# Delete the file object
file_asset.delete()
return Response(status=status.HTTP_204_NO_CONTENT)
except FileAsset.DoesNotExist:
return Response(
{"error": "File Asset doesn't exist"}, status=status.HTTP_404_NOT_FOUND
)
except Exception as e:
capture_exception(e)
return Response(
{"error": "Something went wrong please try again later"},
status=status.HTTP_400_BAD_REQUEST,
)
asset_key = str(workspace_id) + "/" + asset_key
file_asset = FileAsset.objects.get(asset=asset_key)
# Delete the file from storage
file_asset.asset.delete(save=False)
# Delete the file object
file_asset.delete()
return Response(status=status.HTTP_204_NO_CONTENT)
class UserAssetsEndpoint(BaseAPIView):
parser_classes = (MultiPartParser, FormParser)
def get(self, request, asset_key):
try:
files = FileAsset.objects.filter(asset=asset_key, created_by=request.user)
if files.exists():
serializer = FileAssetSerializer(files, context={"request": request})
return Response({"data": serializer.data, "status": True}, status=status.HTTP_200_OK)
else:
return Response({"error": "Asset key does not exist", "status": False}, status=status.HTTP_200_OK)
except Exception as e:
capture_exception(e)
return Response(
{"error": "Something went wrong please try again later"},
status=status.HTTP_400_BAD_REQUEST,
)
def post(self, request):
try:
serializer = FileAssetSerializer(data=request.data)
if serializer.is_valid():
serializer.save()
return Response(serializer.data, status=status.HTTP_201_CREATED)
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
except Exception as e:
capture_exception(e)
return Response(
{"error": "Something went wrong please try again later"},
status=status.HTTP_400_BAD_REQUEST,
)
def delete(self, request, asset_key):
try:
file_asset = FileAsset.objects.get(asset=asset_key, created_by=request.user)
# Delete the file from storage
file_asset.asset.delete(save=False)
# Delete the file object
file_asset.delete()
return Response(status=status.HTTP_204_NO_CONTENT)
except FileAsset.DoesNotExist:
return Response(
{"error": "File Asset doesn't exist"}, status=status.HTTP_404_NOT_FOUND
)
except Exception as e:
capture_exception(e)
return Response(
{"error": "Something went wrong please try again later"},
status=status.HTTP_400_BAD_REQUEST,
)

View File

@ -9,7 +9,6 @@ from django.utils.encoding import (
DjangoUnicodeDecodeError,
)
from django.utils.http import urlsafe_base64_decode, urlsafe_base64_encode
from django.contrib.sites.shortcuts import get_current_site
from django.conf import settings
## Third Party Imports
@ -56,11 +55,11 @@ class VerifyEmailEndpoint(BaseAPIView):
return Response(
{"email": "Successfully activated"}, status=status.HTTP_200_OK
)
except jwt.ExpiredSignatureError as indentifier:
except jwt.ExpiredSignatureError as _indentifier:
return Response(
{"email": "Activation expired"}, status=status.HTTP_400_BAD_REQUEST
)
except jwt.exceptions.DecodeError as indentifier:
except jwt.exceptions.DecodeError as _indentifier:
return Response(
{"email": "Invalid token"}, status=status.HTTP_400_BAD_REQUEST
)
@ -128,32 +127,25 @@ class ResetPasswordEndpoint(BaseAPIView):
class ChangePasswordEndpoint(BaseAPIView):
def post(self, request):
try:
serializer = ChangePasswordSerializer(data=request.data)
serializer = ChangePasswordSerializer(data=request.data)
user = User.objects.get(pk=request.user.id)
if serializer.is_valid():
# Check old password
if not user.object.check_password(serializer.data.get("old_password")):
return Response(
{"old_password": ["Wrong password."]},
status=status.HTTP_400_BAD_REQUEST,
)
# set_password also hashes the password that the user will get
self.object.set_password(serializer.data.get("new_password"))
self.object.save()
response = {
"status": "success",
"code": status.HTTP_200_OK,
"message": "Password updated successfully",
}
user = User.objects.get(pk=request.user.id)
if serializer.is_valid():
# Check old password
if not user.object.check_password(serializer.data.get("old_password")):
return Response(
{"old_password": ["Wrong password."]},
status=status.HTTP_400_BAD_REQUEST,
)
# set_password also hashes the password that the user will get
self.object.set_password(serializer.data.get("new_password"))
self.object.save()
response = {
"status": "success",
"code": status.HTTP_200_OK,
"message": "Password updated successfully",
}
return Response(response)
return Response(response)
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
except Exception as e:
capture_exception(e)
return Response(
{"error": "Something went wrong please try again later"},
status=status.HTTP_400_BAD_REQUEST,
)
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)

View File

@ -40,229 +40,194 @@ class SignUpEndpoint(BaseAPIView):
permission_classes = (AllowAny,)
def post(self, request):
try:
if not settings.ENABLE_SIGNUP:
return Response(
{
"error": "New account creation is disabled. Please contact your site administrator"
},
status=status.HTTP_400_BAD_REQUEST,
)
email = request.data.get("email", False)
password = request.data.get("password", False)
## Raise exception if any of the above are missing
if not email or not password:
return Response(
{"error": "Both email and password are required"},
status=status.HTTP_400_BAD_REQUEST,
)
email = email.strip().lower()
try:
validate_email(email)
except ValidationError as e:
return Response(
{"error": "Please provide a valid email address."},
status=status.HTTP_400_BAD_REQUEST,
)
# Check if the user already exists
if User.objects.filter(email=email).exists():
return Response(
{"error": "User with this email already exists"},
status=status.HTTP_400_BAD_REQUEST,
)
user = User.objects.create(email=email, username=uuid.uuid4().hex)
user.set_password(password)
# settings last actives for the user
user.last_active = timezone.now()
user.last_login_time = timezone.now()
user.last_login_ip = request.META.get("REMOTE_ADDR")
user.last_login_uagent = request.META.get("HTTP_USER_AGENT")
user.token_updated_at = timezone.now()
user.save()
serialized_user = UserSerializer(user).data
access_token, refresh_token = get_tokens_for_user(user)
data = {
"access_token": access_token,
"refresh_token": refresh_token,
"user": serialized_user,
}
# Send Analytics
if settings.ANALYTICS_BASE_API:
_ = requests.post(
settings.ANALYTICS_BASE_API,
headers={
"Content-Type": "application/json",
"X-Auth-Token": settings.ANALYTICS_SECRET_KEY,
},
json={
"event_id": uuid.uuid4().hex,
"event_data": {
"medium": "email",
},
"user": {"email": email, "id": str(user.id)},
"device_ctx": {
"ip": request.META.get("REMOTE_ADDR"),
"user_agent": request.META.get("HTTP_USER_AGENT"),
},
"event_type": "SIGN_UP",
},
)
return Response(data, status=status.HTTP_200_OK)
except Exception as e:
capture_exception(e)
if not settings.ENABLE_SIGNUP:
return Response(
{"error": "Something went wrong please try again later"},
{
"error": "New account creation is disabled. Please contact your site administrator"
},
status=status.HTTP_400_BAD_REQUEST,
)
email = request.data.get("email", False)
password = request.data.get("password", False)
## Raise exception if any of the above are missing
if not email or not password:
return Response(
{"error": "Both email and password are required"},
status=status.HTTP_400_BAD_REQUEST,
)
email = email.strip().lower()
try:
validate_email(email)
except ValidationError as e:
return Response(
{"error": "Please provide a valid email address."},
status=status.HTTP_400_BAD_REQUEST,
)
# Check if the user already exists
if User.objects.filter(email=email).exists():
return Response(
{"error": "User with this email already exists"},
status=status.HTTP_400_BAD_REQUEST,
)
user = User.objects.create(email=email, username=uuid.uuid4().hex)
user.set_password(password)
# settings last actives for the user
user.last_active = timezone.now()
user.last_login_time = timezone.now()
user.last_login_ip = request.META.get("REMOTE_ADDR")
user.last_login_uagent = request.META.get("HTTP_USER_AGENT")
user.token_updated_at = timezone.now()
user.save()
access_token, refresh_token = get_tokens_for_user(user)
data = {
"access_token": access_token,
"refresh_token": refresh_token,
}
# Send Analytics
if settings.ANALYTICS_BASE_API:
_ = requests.post(
settings.ANALYTICS_BASE_API,
headers={
"Content-Type": "application/json",
"X-Auth-Token": settings.ANALYTICS_SECRET_KEY,
},
json={
"event_id": uuid.uuid4().hex,
"event_data": {
"medium": "email",
},
"user": {"email": email, "id": str(user.id)},
"device_ctx": {
"ip": request.META.get("REMOTE_ADDR"),
"user_agent": request.META.get("HTTP_USER_AGENT"),
},
"event_type": "SIGN_UP",
},
)
return Response(data, status=status.HTTP_200_OK)
class SignInEndpoint(BaseAPIView):
permission_classes = (AllowAny,)
def post(self, request):
try:
email = request.data.get("email", False)
password = request.data.get("password", False)
email = request.data.get("email", False)
password = request.data.get("password", False)
## Raise exception if any of the above are missing
if not email or not password:
return Response(
{"error": "Both email and password are required"},
status=status.HTTP_400_BAD_REQUEST,
)
email = email.strip().lower()
try:
validate_email(email)
except ValidationError as e:
return Response(
{"error": "Please provide a valid email address."},
status=status.HTTP_400_BAD_REQUEST,
)
user = User.objects.filter(email=email).first()
if user is None:
return Response(
{
"error": "Sorry, we could not find a user with the provided credentials. Please try again."
},
status=status.HTTP_403_FORBIDDEN,
)
# Sign up Process
if not user.check_password(password):
return Response(
{
"error": "Sorry, we could not find a user with the provided credentials. Please try again."
},
status=status.HTTP_403_FORBIDDEN,
)
if not user.is_active:
return Response(
{
"error": "Your account has been deactivated. Please contact your site administrator."
},
status=status.HTTP_403_FORBIDDEN,
)
serialized_user = UserSerializer(user).data
# settings last active for the user
user.last_active = timezone.now()
user.last_login_time = timezone.now()
user.last_login_ip = request.META.get("REMOTE_ADDR")
user.last_login_uagent = request.META.get("HTTP_USER_AGENT")
user.token_updated_at = timezone.now()
user.save()
access_token, refresh_token = get_tokens_for_user(user)
# Send Analytics
if settings.ANALYTICS_BASE_API:
_ = requests.post(
settings.ANALYTICS_BASE_API,
headers={
"Content-Type": "application/json",
"X-Auth-Token": settings.ANALYTICS_SECRET_KEY,
},
json={
"event_id": uuid.uuid4().hex,
"event_data": {
"medium": "email",
},
"user": {"email": email, "id": str(user.id)},
"device_ctx": {
"ip": request.META.get("REMOTE_ADDR"),
"user_agent": request.META.get("HTTP_USER_AGENT"),
},
"event_type": "SIGN_IN",
},
)
data = {
"access_token": access_token,
"refresh_token": refresh_token,
"user": serialized_user,
}
return Response(data, status=status.HTTP_200_OK)
except Exception as e:
capture_exception(e)
## Raise exception if any of the above are missing
if not email or not password:
return Response(
{
"error": "Something went wrong. Please try again later or contact the support team."
},
{"error": "Both email and password are required"},
status=status.HTTP_400_BAD_REQUEST,
)
email = email.strip().lower()
try:
validate_email(email)
except ValidationError as e:
return Response(
{"error": "Please provide a valid email address."},
status=status.HTTP_400_BAD_REQUEST,
)
user = User.objects.filter(email=email).first()
if user is None:
return Response(
{
"error": "Sorry, we could not find a user with the provided credentials. Please try again."
},
status=status.HTTP_403_FORBIDDEN,
)
# Sign up Process
if not user.check_password(password):
return Response(
{
"error": "Sorry, we could not find a user with the provided credentials. Please try again."
},
status=status.HTTP_403_FORBIDDEN,
)
if not user.is_active:
return Response(
{
"error": "Your account has been deactivated. Please contact your site administrator."
},
status=status.HTTP_403_FORBIDDEN,
)
# settings last active for the user
user.last_active = timezone.now()
user.last_login_time = timezone.now()
user.last_login_ip = request.META.get("REMOTE_ADDR")
user.last_login_uagent = request.META.get("HTTP_USER_AGENT")
user.token_updated_at = timezone.now()
user.save()
access_token, refresh_token = get_tokens_for_user(user)
# Send Analytics
if settings.ANALYTICS_BASE_API:
_ = requests.post(
settings.ANALYTICS_BASE_API,
headers={
"Content-Type": "application/json",
"X-Auth-Token": settings.ANALYTICS_SECRET_KEY,
},
json={
"event_id": uuid.uuid4().hex,
"event_data": {
"medium": "email",
},
"user": {"email": email, "id": str(user.id)},
"device_ctx": {
"ip": request.META.get("REMOTE_ADDR"),
"user_agent": request.META.get("HTTP_USER_AGENT"),
},
"event_type": "SIGN_IN",
},
)
data = {
"access_token": access_token,
"refresh_token": refresh_token,
}
return Response(data, status=status.HTTP_200_OK)
class SignOutEndpoint(BaseAPIView):
def post(self, request):
try:
refresh_token = request.data.get("refresh_token", False)
refresh_token = request.data.get("refresh_token", False)
if not refresh_token:
capture_message("No refresh token provided")
return Response(
{
"error": "Something went wrong. Please try again later or contact the support team."
},
status=status.HTTP_400_BAD_REQUEST,
)
user = User.objects.get(pk=request.user.id)
user.last_logout_time = timezone.now()
user.last_logout_ip = request.META.get("REMOTE_ADDR")
user.save()
token = RefreshToken(refresh_token)
token.blacklist()
return Response({"message": "success"}, status=status.HTTP_200_OK)
except Exception as e:
capture_exception(e)
if not refresh_token:
capture_message("No refresh token provided")
return Response(
{
"error": "Something went wrong. Please try again later or contact the support team."
},
{"error": "No refresh token provided"},
status=status.HTTP_400_BAD_REQUEST,
)
user = User.objects.get(pk=request.user.id)
user.last_logout_time = timezone.now()
user.last_logout_ip = request.META.get("REMOTE_ADDR")
user.save()
token = RefreshToken(refresh_token)
token.blacklist()
return Response({"message": "success"}, status=status.HTTP_200_OK)
class MagicSignInGenerateEndpoint(BaseAPIView):
permission_classes = [
@ -270,74 +235,62 @@ class MagicSignInGenerateEndpoint(BaseAPIView):
]
def post(self, request):
try:
email = request.data.get("email", False)
email = request.data.get("email", False)
if not email:
if not email:
return Response(
{"error": "Please provide a valid email address"},
status=status.HTTP_400_BAD_REQUEST,
)
# Clean up
email = email.strip().lower()
validate_email(email)
## Generate a random token
token = (
"".join(random.choices(string.ascii_lowercase, k=4))
+ "-"
+ "".join(random.choices(string.ascii_lowercase, k=4))
+ "-"
+ "".join(random.choices(string.ascii_lowercase, k=4))
)
ri = redis_instance()
key = "magic_" + str(email)
# Check if the key already exists in python
if ri.exists(key):
data = json.loads(ri.get(key))
current_attempt = data["current_attempt"] + 1
if data["current_attempt"] > 2:
return Response(
{"error": "Please provide a valid email address"},
{"error": "Max attempts exhausted. Please try again later."},
status=status.HTTP_400_BAD_REQUEST,
)
# Clean up
email = email.strip().lower()
validate_email(email)
value = {
"current_attempt": current_attempt,
"email": email,
"token": token,
}
expiry = 600
## Generate a random token
token = (
"".join(random.choices(string.ascii_lowercase + string.digits, k=4))
+ "-"
+ "".join(random.choices(string.ascii_lowercase + string.digits, k=4))
+ "-"
+ "".join(random.choices(string.ascii_lowercase + string.digits, k=4))
)
ri.set(key, json.dumps(value), ex=expiry)
ri = redis_instance()
else:
value = {"current_attempt": 0, "email": email, "token": token}
expiry = 600
key = "magic_" + str(email)
ri.set(key, json.dumps(value), ex=expiry)
# Check if the key already exists in python
if ri.exists(key):
data = json.loads(ri.get(key))
current_site = settings.WEB_URL
magic_link.delay(email, key, token, current_site)
current_attempt = data["current_attempt"] + 1
if data["current_attempt"] > 2:
return Response(
{"error": "Max attempts exhausted. Please try again later."},
status=status.HTTP_400_BAD_REQUEST,
)
value = {
"current_attempt": current_attempt,
"email": email,
"token": token,
}
expiry = 600
ri.set(key, json.dumps(value), ex=expiry)
else:
value = {"current_attempt": 0, "email": email, "token": token}
expiry = 600
ri.set(key, json.dumps(value), ex=expiry)
current_site = settings.WEB_URL
magic_link.delay(email, key, token, current_site)
return Response({"key": key}, status=status.HTTP_200_OK)
except ValidationError:
return Response(
{"error": "Please provide a valid email address."},
status=status.HTTP_400_BAD_REQUEST,
)
except Exception as e:
capture_exception(e)
return Response(
{"error": "Something went wrong please try again later"},
status=status.HTTP_400_BAD_REQUEST,
)
return Response({"key": key}, status=status.HTTP_200_OK)
class MagicSignInEndpoint(BaseAPIView):
@ -346,113 +299,99 @@ class MagicSignInEndpoint(BaseAPIView):
]
def post(self, request):
try:
user_token = request.data.get("token", "").strip()
key = request.data.get("key", False).strip().lower()
user_token = request.data.get("token", "").strip()
key = request.data.get("key", False).strip().lower()
if not key or user_token == "":
return Response(
{"error": "User token and key are required"},
status=status.HTTP_400_BAD_REQUEST,
)
if not key or user_token == "":
return Response(
{"error": "User token and key are required"},
status=status.HTTP_400_BAD_REQUEST,
)
ri = redis_instance()
ri = redis_instance()
if ri.exists(key):
data = json.loads(ri.get(key))
if ri.exists(key):
data = json.loads(ri.get(key))
token = data["token"]
email = data["email"]
token = data["token"]
email = data["email"]
if str(token) == str(user_token):
if User.objects.filter(email=email).exists():
user = User.objects.get(email=email)
# Send event to Jitsu for tracking
if settings.ANALYTICS_BASE_API:
_ = requests.post(
settings.ANALYTICS_BASE_API,
headers={
"Content-Type": "application/json",
"X-Auth-Token": settings.ANALYTICS_SECRET_KEY,
if str(token) == str(user_token):
if User.objects.filter(email=email).exists():
user = User.objects.get(email=email)
# Send event to Jitsu for tracking
if settings.ANALYTICS_BASE_API:
_ = requests.post(
settings.ANALYTICS_BASE_API,
headers={
"Content-Type": "application/json",
"X-Auth-Token": settings.ANALYTICS_SECRET_KEY,
},
json={
"event_id": uuid.uuid4().hex,
"event_data": {
"medium": "code",
},
json={
"event_id": uuid.uuid4().hex,
"event_data": {
"medium": "code",
},
"user": {"email": email, "id": str(user.id)},
"device_ctx": {
"ip": request.META.get("REMOTE_ADDR"),
"user_agent": request.META.get(
"HTTP_USER_AGENT"
),
},
"event_type": "SIGN_IN",
"user": {"email": email, "id": str(user.id)},
"device_ctx": {
"ip": request.META.get("REMOTE_ADDR"),
"user_agent": request.META.get("HTTP_USER_AGENT"),
},
)
else:
user = User.objects.create(
email=email,
username=uuid.uuid4().hex,
password=make_password(uuid.uuid4().hex),
is_password_autoset=True,
"event_type": "SIGN_IN",
},
)
# Send event to Jitsu for tracking
if settings.ANALYTICS_BASE_API:
_ = requests.post(
settings.ANALYTICS_BASE_API,
headers={
"Content-Type": "application/json",
"X-Auth-Token": settings.ANALYTICS_SECRET_KEY,
},
json={
"event_id": uuid.uuid4().hex,
"event_data": {
"medium": "code",
},
"user": {"email": email, "id": str(user.id)},
"device_ctx": {
"ip": request.META.get("REMOTE_ADDR"),
"user_agent": request.META.get(
"HTTP_USER_AGENT"
),
},
"event_type": "SIGN_UP",
},
)
user.last_active = timezone.now()
user.last_login_time = timezone.now()
user.last_login_ip = request.META.get("REMOTE_ADDR")
user.last_login_uagent = request.META.get("HTTP_USER_AGENT")
user.token_updated_at = timezone.now()
user.save()
serialized_user = UserSerializer(user).data
access_token, refresh_token = get_tokens_for_user(user)
data = {
"access_token": access_token,
"refresh_token": refresh_token,
"user": serialized_user,
}
return Response(data, status=status.HTTP_200_OK)
else:
return Response(
{"error": "Your login code was incorrect. Please try again."},
status=status.HTTP_400_BAD_REQUEST,
user = User.objects.create(
email=email,
username=uuid.uuid4().hex,
password=make_password(uuid.uuid4().hex),
is_password_autoset=True,
)
# Send event to Jitsu for tracking
if settings.ANALYTICS_BASE_API:
_ = requests.post(
settings.ANALYTICS_BASE_API,
headers={
"Content-Type": "application/json",
"X-Auth-Token": settings.ANALYTICS_SECRET_KEY,
},
json={
"event_id": uuid.uuid4().hex,
"event_data": {
"medium": "code",
},
"user": {"email": email, "id": str(user.id)},
"device_ctx": {
"ip": request.META.get("REMOTE_ADDR"),
"user_agent": request.META.get("HTTP_USER_AGENT"),
},
"event_type": "SIGN_UP",
},
)
user.last_active = timezone.now()
user.last_login_time = timezone.now()
user.last_login_ip = request.META.get("REMOTE_ADDR")
user.last_login_uagent = request.META.get("HTTP_USER_AGENT")
user.token_updated_at = timezone.now()
user.save()
access_token, refresh_token = get_tokens_for_user(user)
data = {
"access_token": access_token,
"refresh_token": refresh_token,
}
return Response(data, status=status.HTTP_200_OK)
else:
return Response(
{"error": "The magic code/link has expired please try again"},
{"error": "Your login code was incorrect. Please try again."},
status=status.HTTP_400_BAD_REQUEST,
)
except Exception as e:
capture_exception(e)
else:
return Response(
{"error": "Something went wrong please try again later"},
{"error": "The magic code/link has expired please try again"},
status=status.HTTP_400_BAD_REQUEST,
)

View File

@ -5,10 +5,14 @@ import zoneinfo
from django.urls import resolve
from django.conf import settings
from django.utils import timezone
# Third part imports
from django.db import IntegrityError
from django.core.exceptions import ObjectDoesNotExist, ValidationError
# Third part imports
from rest_framework import status
from rest_framework import status
from rest_framework.viewsets import ModelViewSet
from rest_framework.response import Response
from rest_framework.exceptions import APIException
from rest_framework.views import APIView
from rest_framework.filters import SearchFilter
@ -33,8 +37,6 @@ class TimezoneMixin:
timezone.deactivate()
class BaseViewSet(TimezoneMixin, ModelViewSet, BasePaginator):
model = None
@ -58,17 +60,50 @@ class BaseViewSet(TimezoneMixin, ModelViewSet, BasePaginator):
except Exception as e:
capture_exception(e)
raise APIException("Please check the view", status.HTTP_400_BAD_REQUEST)
def handle_exception(self, exc):
"""
Handle any exception that occurs, by returning an appropriate response,
or re-raising the error.
"""
try:
response = super().handle_exception(exc)
return response
except Exception as e:
if isinstance(e, IntegrityError):
return Response({"error": "The payload is not valid"}, status=status.HTTP_400_BAD_REQUEST)
if isinstance(e, ValidationError):
return Response({"error": "Please provide valid detail"}, status=status.HTTP_400_BAD_REQUEST)
if isinstance(e, ObjectDoesNotExist):
model_name = str(exc).split(" matching query does not exist.")[0]
return Response({"error": f"{model_name} does not exist."}, status=status.HTTP_404_NOT_FOUND)
if isinstance(e, KeyError):
capture_exception(e)
return Response({"error": f"key {e} does not exist"}, status=status.HTTP_400_BAD_REQUEST)
print(e) if settings.DEBUG else print("Server Error")
capture_exception(e)
return Response({"error": "Something went wrong please try again later"}, status=status.HTTP_500_INTERNAL_SERVER_ERROR)
def dispatch(self, request, *args, **kwargs):
response = super().dispatch(request, *args, **kwargs)
try:
response = super().dispatch(request, *args, **kwargs)
if settings.DEBUG:
from django.db import connection
if settings.DEBUG:
from django.db import connection
print(
f"{request.method} - {request.get_full_path()} of Queries: {len(connection.queries)}"
)
return response
print(
f"{request.method} - {request.get_full_path()} of Queries: {len(connection.queries)}"
)
return response
except Exception as exc:
response = self.handle_exception(exc)
return exc
@property
def workspace_slug(self):
@ -104,16 +139,49 @@ class BaseAPIView(TimezoneMixin, APIView, BasePaginator):
queryset = backend().filter_queryset(self.request, queryset, self)
return queryset
def handle_exception(self, exc):
"""
Handle any exception that occurs, by returning an appropriate response,
or re-raising the error.
"""
try:
response = super().handle_exception(exc)
return response
except Exception as e:
if isinstance(e, IntegrityError):
return Response({"error": "The payload is not valid"}, status=status.HTTP_400_BAD_REQUEST)
if isinstance(e, ValidationError):
return Response({"error": "Please provide valid detail"}, status=status.HTTP_400_BAD_REQUEST)
if isinstance(e, ObjectDoesNotExist):
model_name = str(exc).split(" matching query does not exist.")[0]
return Response({"error": f"{model_name} does not exist."}, status=status.HTTP_404_NOT_FOUND)
if isinstance(e, KeyError):
return Response({"error": f"key {e} does not exist"}, status=status.HTTP_400_BAD_REQUEST)
print(e) if settings.DEBUG else print("Server Error")
capture_exception(e)
return Response({"error": "Something went wrong please try again later"}, status=status.HTTP_500_INTERNAL_SERVER_ERROR)
def dispatch(self, request, *args, **kwargs):
response = super().dispatch(request, *args, **kwargs)
try:
response = super().dispatch(request, *args, **kwargs)
if settings.DEBUG:
from django.db import connection
if settings.DEBUG:
from django.db import connection
print(
f"{request.method} - {request.get_full_path()} of Queries: {len(connection.queries)}"
)
return response
print(
f"{request.method} - {request.get_full_path()} of Queries: {len(connection.queries)}"
)
return response
except Exception as exc:
response = self.handle_exception(exc)
return exc
@property
def workspace_slug(self):

View File

@ -0,0 +1,33 @@
# Python imports
import os
# Django imports
from django.conf import settings
# Third party imports
from rest_framework.permissions import AllowAny
from rest_framework import status
from rest_framework.response import Response
from sentry_sdk import capture_exception
# Module imports
from .base import BaseAPIView
class ConfigurationEndpoint(BaseAPIView):
permission_classes = [
AllowAny,
]
def get(self, request):
data = {}
data["google"] = os.environ.get("GOOGLE_CLIENT_ID", None)
data["github"] = os.environ.get("GITHUB_CLIENT_ID", None)
data["github_app_name"] = os.environ.get("GITHUB_APP_NAME", None)
data["magic_login"] = (
bool(settings.EMAIL_HOST_USER) and bool(settings.EMAIL_HOST_PASSWORD)
) and os.environ.get("ENABLE_MAGIC_LINK_LOGIN", "0") == "1"
data["email_password_login"] = (
os.environ.get("ENABLE_EMAIL_PASSWORD", "0") == "1"
)
return Response(data, status=status.HTTP_200_OK)

File diff suppressed because it is too large Load Diff

View File

@ -1,6 +1,3 @@
# Django imports
from django.db import IntegrityError
# Third party imports
from rest_framework.response import Response
from rest_framework import status
@ -23,7 +20,6 @@ class ProjectEstimatePointEndpoint(BaseAPIView):
]
def get(self, request, slug, project_id):
try:
project = Project.objects.get(workspace__slug=slug, pk=project_id)
if project.estimate_id is not None:
estimate_points = EstimatePoint.objects.filter(
@ -34,12 +30,6 @@ class ProjectEstimatePointEndpoint(BaseAPIView):
serializer = EstimatePointSerializer(estimate_points, many=True)
return Response(serializer.data, status=status.HTTP_200_OK)
return Response([], status=status.HTTP_200_OK)
except Exception as e:
capture_exception(e)
return Response(
{"error": "Something went wrong please try again later"},
status=status.HTTP_400_BAD_REQUEST,
)
class BulkEstimatePointEndpoint(BaseViewSet):
@ -50,204 +40,139 @@ class BulkEstimatePointEndpoint(BaseViewSet):
serializer_class = EstimateSerializer
def list(self, request, slug, project_id):
try:
estimates = Estimate.objects.filter(
workspace__slug=slug, project_id=project_id
).prefetch_related("points").select_related("workspace", "project")
serializer = EstimateReadSerializer(estimates, many=True)
return Response(serializer.data, status=status.HTTP_200_OK)
except Exception as e:
capture_exception(e)
return Response(
{"error": "Something went wrong please try again later"},
status=status.HTTP_400_BAD_REQUEST,
)
estimates = Estimate.objects.filter(
workspace__slug=slug, project_id=project_id
).prefetch_related("points").select_related("workspace", "project")
serializer = EstimateReadSerializer(estimates, many=True)
return Response(serializer.data, status=status.HTTP_200_OK)
def create(self, request, slug, project_id):
try:
if not request.data.get("estimate", False):
return Response(
{"error": "Estimate is required"},
status=status.HTTP_400_BAD_REQUEST,
)
estimate_points = request.data.get("estimate_points", [])
if not len(estimate_points) or len(estimate_points) > 8:
return Response(
{"error": "Estimate points are required"},
status=status.HTTP_400_BAD_REQUEST,
)
estimate_serializer = EstimateSerializer(data=request.data.get("estimate"))
if not estimate_serializer.is_valid():
return Response(
estimate_serializer.errors, status=status.HTTP_400_BAD_REQUEST
)
try:
estimate = estimate_serializer.save(project_id=project_id)
except IntegrityError:
return Response(
{"errror": "Estimate with the name already exists"},
status=status.HTTP_400_BAD_REQUEST,
)
estimate_points = EstimatePoint.objects.bulk_create(
[
EstimatePoint(
estimate=estimate,
key=estimate_point.get("key", 0),
value=estimate_point.get("value", ""),
description=estimate_point.get("description", ""),
project_id=project_id,
workspace_id=estimate.workspace_id,
created_by=request.user,
updated_by=request.user,
)
for estimate_point in estimate_points
],
batch_size=10,
ignore_conflicts=True,
)
estimate_point_serializer = EstimatePointSerializer(
estimate_points, many=True
)
if not request.data.get("estimate", False):
return Response(
{
"estimate": estimate_serializer.data,
"estimate_points": estimate_point_serializer.data,
},
status=status.HTTP_200_OK,
)
except Estimate.DoesNotExist:
return Response(
{"error": "Estimate does not exist"},
{"error": "Estimate is required"},
status=status.HTTP_400_BAD_REQUEST,
)
except Exception as e:
capture_exception(e)
estimate_points = request.data.get("estimate_points", [])
if not len(estimate_points) or len(estimate_points) > 8:
return Response(
{"error": "Something went wrong please try again later"},
{"error": "Estimate points are required"},
status=status.HTTP_400_BAD_REQUEST,
)
estimate_serializer = EstimateSerializer(data=request.data.get("estimate"))
if not estimate_serializer.is_valid():
return Response(
estimate_serializer.errors, status=status.HTTP_400_BAD_REQUEST
)
estimate = estimate_serializer.save(project_id=project_id)
estimate_points = EstimatePoint.objects.bulk_create(
[
EstimatePoint(
estimate=estimate,
key=estimate_point.get("key", 0),
value=estimate_point.get("value", ""),
description=estimate_point.get("description", ""),
project_id=project_id,
workspace_id=estimate.workspace_id,
created_by=request.user,
updated_by=request.user,
)
for estimate_point in estimate_points
],
batch_size=10,
ignore_conflicts=True,
)
estimate_point_serializer = EstimatePointSerializer(
estimate_points, many=True
)
return Response(
{
"estimate": estimate_serializer.data,
"estimate_points": estimate_point_serializer.data,
},
status=status.HTTP_200_OK,
)
def retrieve(self, request, slug, project_id, estimate_id):
try:
estimate = Estimate.objects.get(
pk=estimate_id, workspace__slug=slug, project_id=project_id
)
serializer = EstimateReadSerializer(estimate)
return Response(
serializer.data,
status=status.HTTP_200_OK,
)
except Estimate.DoesNotExist:
return Response(
{"error": "Estimate does not exist"}, status=status.HTTP_400_BAD_REQUEST
)
except Exception as e:
capture_exception(e)
return Response(
{"error": "Something went wrong please try again later"},
status=status.HTTP_400_BAD_REQUEST,
)
estimate = Estimate.objects.get(
pk=estimate_id, workspace__slug=slug, project_id=project_id
)
serializer = EstimateReadSerializer(estimate)
return Response(
serializer.data,
status=status.HTTP_200_OK,
)
def partial_update(self, request, slug, project_id, estimate_id):
try:
if not request.data.get("estimate", False):
return Response(
{"error": "Estimate is required"},
status=status.HTTP_400_BAD_REQUEST,
)
if not len(request.data.get("estimate_points", [])):
return Response(
{"error": "Estimate points are required"},
status=status.HTTP_400_BAD_REQUEST,
)
estimate = Estimate.objects.get(pk=estimate_id)
estimate_serializer = EstimateSerializer(
estimate, data=request.data.get("estimate"), partial=True
)
if not estimate_serializer.is_valid():
return Response(
estimate_serializer.errors, status=status.HTTP_400_BAD_REQUEST
)
try:
estimate = estimate_serializer.save()
except IntegrityError:
return Response(
{"errror": "Estimate with the name already exists"},
status=status.HTTP_400_BAD_REQUEST,
)
estimate_points_data = request.data.get("estimate_points", [])
estimate_points = EstimatePoint.objects.filter(
pk__in=[
estimate_point.get("id") for estimate_point in estimate_points_data
],
workspace__slug=slug,
project_id=project_id,
estimate_id=estimate_id,
)
updated_estimate_points = []
for estimate_point in estimate_points:
# Find the data for that estimate point
estimate_point_data = [
point
for point in estimate_points_data
if point.get("id") == str(estimate_point.id)
]
if len(estimate_point_data):
estimate_point.value = estimate_point_data[0].get(
"value", estimate_point.value
)
updated_estimate_points.append(estimate_point)
try:
EstimatePoint.objects.bulk_update(
updated_estimate_points, ["value"], batch_size=10,
)
except IntegrityError as e:
return Response(
{"error": "Values need to be unique for each key"},
status=status.HTTP_400_BAD_REQUEST,
)
estimate_point_serializer = EstimatePointSerializer(estimate_points, many=True)
if not request.data.get("estimate", False):
return Response(
{
"estimate": estimate_serializer.data,
"estimate_points": estimate_point_serializer.data,
},
status=status.HTTP_200_OK,
)
except Estimate.DoesNotExist:
return Response(
{"error": "Estimate does not exist"}, status=status.HTTP_400_BAD_REQUEST
)
except Exception as e:
capture_exception(e)
return Response(
{"error": "Something went wrong please try again later"},
{"error": "Estimate is required"},
status=status.HTTP_400_BAD_REQUEST,
)
if not len(request.data.get("estimate_points", [])):
return Response(
{"error": "Estimate points are required"},
status=status.HTTP_400_BAD_REQUEST,
)
estimate = Estimate.objects.get(pk=estimate_id)
estimate_serializer = EstimateSerializer(
estimate, data=request.data.get("estimate"), partial=True
)
if not estimate_serializer.is_valid():
return Response(
estimate_serializer.errors, status=status.HTTP_400_BAD_REQUEST
)
estimate = estimate_serializer.save()
estimate_points_data = request.data.get("estimate_points", [])
estimate_points = EstimatePoint.objects.filter(
pk__in=[
estimate_point.get("id") for estimate_point in estimate_points_data
],
workspace__slug=slug,
project_id=project_id,
estimate_id=estimate_id,
)
updated_estimate_points = []
for estimate_point in estimate_points:
# Find the data for that estimate point
estimate_point_data = [
point
for point in estimate_points_data
if point.get("id") == str(estimate_point.id)
]
if len(estimate_point_data):
estimate_point.value = estimate_point_data[0].get(
"value", estimate_point.value
)
updated_estimate_points.append(estimate_point)
EstimatePoint.objects.bulk_update(
updated_estimate_points, ["value"], batch_size=10,
)
estimate_point_serializer = EstimatePointSerializer(estimate_points, many=True)
return Response(
{
"estimate": estimate_serializer.data,
"estimate_points": estimate_point_serializer.data,
},
status=status.HTTP_200_OK,
)
def destroy(self, request, slug, project_id, estimate_id):
try:
estimate = Estimate.objects.get(
pk=estimate_id, workspace__slug=slug, project_id=project_id
)
estimate.delete()
return Response(status=status.HTTP_204_NO_CONTENT)
except Exception as e:
capture_exception(e)
return Response(
{"error": "Something went wrong please try again later"},
status=status.HTTP_400_BAD_REQUEST,
)
estimate = Estimate.objects.get(
pk=estimate_id, workspace__slug=slug, project_id=project_id
)
estimate.delete()
return Response(status=status.HTTP_204_NO_CONTENT)

View File

@ -20,81 +20,62 @@ class ExportIssuesEndpoint(BaseAPIView):
serializer_class = ExporterHistorySerializer
def post(self, request, slug):
try:
# Get the workspace
workspace = Workspace.objects.get(slug=slug)
provider = request.data.get("provider", False)
multiple = request.data.get("multiple", False)
project_ids = request.data.get("project", [])
if provider in ["csv", "xlsx", "json"]:
if not project_ids:
project_ids = Project.objects.filter(
workspace__slug=slug
).values_list("id", flat=True)
project_ids = [str(project_id) for project_id in project_ids]
# Get the workspace
workspace = Workspace.objects.get(slug=slug)
provider = request.data.get("provider", False)
multiple = request.data.get("multiple", False)
project_ids = request.data.get("project", [])
if provider in ["csv", "xlsx", "json"]:
if not project_ids:
project_ids = Project.objects.filter(
workspace__slug=slug
).values_list("id", flat=True)
project_ids = [str(project_id) for project_id in project_ids]
exporter = ExporterHistory.objects.create(
workspace=workspace,
project=project_ids,
initiated_by=request.user,
provider=provider,
)
issue_export_task.delay(
provider=exporter.provider,
workspace_id=workspace.id,
project_ids=project_ids,
token_id=exporter.token,
multiple=multiple,
slug=slug,
)
return Response(
{
"message": f"Once the export is ready you will be able to download it"
},
status=status.HTTP_200_OK,
)
else:
return Response(
{"error": f"Provider '{provider}' not found."},
status=status.HTTP_400_BAD_REQUEST,
)
except Workspace.DoesNotExist:
return Response(
{"error": "Workspace does not exists"},
status=status.HTTP_400_BAD_REQUEST,
exporter = ExporterHistory.objects.create(
workspace=workspace,
project=project_ids,
initiated_by=request.user,
provider=provider,
)
issue_export_task.delay(
provider=exporter.provider,
workspace_id=workspace.id,
project_ids=project_ids,
token_id=exporter.token,
multiple=multiple,
slug=slug,
)
except Exception as e:
capture_exception(e)
return Response(
{"error": "Something went wrong please try again later"},
{
"message": f"Once the export is ready you will be able to download it"
},
status=status.HTTP_200_OK,
)
else:
return Response(
{"error": f"Provider '{provider}' not found."},
status=status.HTTP_400_BAD_REQUEST,
)
def get(self, request, slug):
try:
exporter_history = ExporterHistory.objects.filter(
workspace__slug=slug
).select_related("workspace","initiated_by")
exporter_history = ExporterHistory.objects.filter(
workspace__slug=slug
).select_related("workspace","initiated_by")
if request.GET.get("per_page", False) and request.GET.get("cursor", False):
return self.paginate(
request=request,
queryset=exporter_history,
on_results=lambda exporter_history: ExporterHistorySerializer(
exporter_history, many=True
).data,
)
else:
return Response(
{"error": "per_page and cursor are required"},
status=status.HTTP_400_BAD_REQUEST,
)
except Exception as e:
capture_exception(e)
if request.GET.get("per_page", False) and request.GET.get("cursor", False):
return self.paginate(
request=request,
queryset=exporter_history,
on_results=lambda exporter_history: ExporterHistorySerializer(
exporter_history, many=True
).data,
)
else:
return Response(
{"error": "Something went wrong please try again later"},
{"error": "per_page and cursor are required"},
status=status.HTTP_400_BAD_REQUEST,
)

View File

@ -0,0 +1,92 @@
# Python imports
import requests
# Third party imports
import openai
from rest_framework.response import Response
from rest_framework import status
from rest_framework.permissions import AllowAny
from sentry_sdk import capture_exception
# Django imports
from django.conf import settings
# Module imports
from .base import BaseAPIView
from plane.api.permissions import ProjectEntityPermission
from plane.db.models import Workspace, Project
from plane.api.serializers import ProjectLiteSerializer, WorkspaceLiteSerializer
from plane.utils.integrations.github import get_release_notes
class GPTIntegrationEndpoint(BaseAPIView):
permission_classes = [
ProjectEntityPermission,
]
def post(self, request, slug, project_id):
if not settings.OPENAI_API_KEY or not settings.GPT_ENGINE:
return Response(
{"error": "OpenAI API key and engine is required"},
status=status.HTTP_400_BAD_REQUEST,
)
prompt = request.data.get("prompt", False)
task = request.data.get("task", False)
if not task:
return Response(
{"error": "Task is required"}, status=status.HTTP_400_BAD_REQUEST
)
final_text = task + "\n" + prompt
openai.api_key = settings.OPENAI_API_KEY
response = openai.ChatCompletion.create(
model=settings.GPT_ENGINE,
messages=[{"role": "user", "content": final_text}],
temperature=0.7,
max_tokens=1024,
)
workspace = Workspace.objects.get(slug=slug)
project = Project.objects.get(pk=project_id)
text = response.choices[0].message.content.strip()
text_html = text.replace("\n", "<br/>")
return Response(
{
"response": text,
"response_html": text_html,
"project_detail": ProjectLiteSerializer(project).data,
"workspace_detail": WorkspaceLiteSerializer(workspace).data,
},
status=status.HTTP_200_OK,
)
class ReleaseNotesEndpoint(BaseAPIView):
def get(self, request):
release_notes = get_release_notes()
return Response(release_notes, status=status.HTTP_200_OK)
class UnsplashEndpoint(BaseAPIView):
def get(self, request):
query = request.GET.get("query", False)
page = request.GET.get("page", 1)
per_page = request.GET.get("per_page", 20)
url = (
f"https://api.unsplash.com/search/photos/?client_id={settings.UNSPLASH_ACCESS_KEY}&query={query}&page=${page}&per_page={per_page}"
if query
else f"https://api.unsplash.com/photos/?client_id={settings.UNSPLASH_ACCESS_KEY}&page={page}&per_page={per_page}"
)
headers = {
"Content-Type": "application/json",
}
resp = requests.get(url=url, headers=headers)
return Response(resp.json(), status=status.HTTP_200_OK)

View File

@ -1,75 +0,0 @@
# Python imports
import requests
# Third party imports
from rest_framework.response import Response
from rest_framework import status
import openai
from sentry_sdk import capture_exception
# Django imports
from django.conf import settings
# Module imports
from .base import BaseAPIView
from plane.api.permissions import ProjectEntityPermission
from plane.db.models import Workspace, Project
from plane.api.serializers import ProjectLiteSerializer, WorkspaceLiteSerializer
class GPTIntegrationEndpoint(BaseAPIView):
permission_classes = [
ProjectEntityPermission,
]
def post(self, request, slug, project_id):
try:
if not settings.OPENAI_API_KEY or not settings.GPT_ENGINE:
return Response(
{"error": "OpenAI API key and engine is required"},
status=status.HTTP_400_BAD_REQUEST,
)
prompt = request.data.get("prompt", False)
task = request.data.get("task", False)
if not task:
return Response(
{"error": "Task is required"}, status=status.HTTP_400_BAD_REQUEST
)
final_text = task + "\n" + prompt
openai.api_key = settings.OPENAI_API_KEY
response = openai.ChatCompletion.create(
model=settings.GPT_ENGINE,
messages=[{"role": "user", "content": final_text}],
temperature=0.7,
max_tokens=1024,
)
workspace = Workspace.objects.get(slug=slug)
project = Project.objects.get(pk=project_id)
text = response.choices[0].message.content.strip()
text_html = text.replace("\n", "<br/>")
return Response(
{
"response": text,
"response_html": text_html,
"project_detail": ProjectLiteSerializer(project).data,
"workspace_detail": WorkspaceLiteSerializer(workspace).data,
},
status=status.HTTP_200_OK,
)
except (Workspace.DoesNotExist, Project.DoesNotExist) as e:
return Response(
{"error": "Workspace or Project Does not exist"},
status=status.HTTP_400_BAD_REQUEST,
)
except Exception as e:
capture_exception(e)
return Response(
{"error": "Something went wrong please try again later"},
status=status.HTTP_400_BAD_REQUEST,
)

View File

@ -39,564 +39,488 @@ from plane.utils.integrations.github import get_github_repo_details
from plane.utils.importers.jira import jira_project_issue_summary
from plane.bgtasks.importer_task import service_importer
from plane.utils.html_processor import strip_tags
from plane.api.permissions import WorkSpaceAdminPermission
class ServiceIssueImportSummaryEndpoint(BaseAPIView):
def get(self, request, slug, service):
try:
if service == "github":
owner = request.GET.get("owner", False)
repo = request.GET.get("repo", False)
if service == "github":
owner = request.GET.get("owner", False)
repo = request.GET.get("repo", False)
if not owner or not repo:
return Response(
{"error": "Owner and repo are required"},
status=status.HTTP_400_BAD_REQUEST,
)
workspace_integration = WorkspaceIntegration.objects.get(
integration__provider="github", workspace__slug=slug
)
access_tokens_url = workspace_integration.metadata.get(
"access_tokens_url", False
)
if not access_tokens_url:
return Response(
{
"error": "There was an error during the installation of the GitHub app. To resolve this issue, we recommend reinstalling the GitHub app."
},
status=status.HTTP_400_BAD_REQUEST,
)
issue_count, labels, collaborators = get_github_repo_details(
access_tokens_url, owner, repo
)
if not owner or not repo:
return Response(
{
"issue_count": issue_count,
"labels": labels,
"collaborators": collaborators,
},
status=status.HTTP_200_OK,
)
if service == "jira":
# Check for all the keys
params = {
"project_key": "Project key is required",
"api_token": "API token is required",
"email": "Email is required",
"cloud_hostname": "Cloud hostname is required",
}
for key, error_message in params.items():
if not request.GET.get(key, False):
return Response(
{"error": error_message}, status=status.HTTP_400_BAD_REQUEST
)
project_key = request.GET.get("project_key", "")
api_token = request.GET.get("api_token", "")
email = request.GET.get("email", "")
cloud_hostname = request.GET.get("cloud_hostname", "")
response = jira_project_issue_summary(
email, api_token, project_key, cloud_hostname
)
if "error" in response:
return Response(response, status=status.HTTP_400_BAD_REQUEST)
else:
return Response(
response,
status=status.HTTP_200_OK,
)
return Response(
{"error": "Service not supported yet"},
status=status.HTTP_400_BAD_REQUEST,
)
except WorkspaceIntegration.DoesNotExist:
return Response(
{"error": "Requested integration was not installed in the workspace"},
status=status.HTTP_400_BAD_REQUEST,
)
except Exception as e:
capture_exception(e)
return Response(
{"error": "Something went wrong please try again later"},
status=status.HTTP_400_BAD_REQUEST,
)
class ImportServiceEndpoint(BaseAPIView):
def post(self, request, slug, service):
try:
project_id = request.data.get("project_id", False)
if not project_id:
return Response(
{"error": "Project ID is required"},
{"error": "Owner and repo are required"},
status=status.HTTP_400_BAD_REQUEST,
)
workspace = Workspace.objects.get(slug=slug)
workspace_integration = WorkspaceIntegration.objects.get(
integration__provider="github", workspace__slug=slug
)
if service == "github":
data = request.data.get("data", False)
metadata = request.data.get("metadata", False)
config = request.data.get("config", False)
if not data or not metadata or not config:
return Response(
{"error": "Data, config and metadata are required"},
status=status.HTTP_400_BAD_REQUEST,
)
access_tokens_url = workspace_integration.metadata.get(
"access_tokens_url", False
)
api_token = APIToken.objects.filter(
user=request.user, workspace=workspace
).first()
if api_token is None:
api_token = APIToken.objects.create(
user=request.user,
label="Importer",
workspace=workspace,
)
importer = Importer.objects.create(
service=service,
project_id=project_id,
status="queued",
initiated_by=request.user,
data=data,
metadata=metadata,
token=api_token,
config=config,
created_by=request.user,
updated_by=request.user,
if not access_tokens_url:
return Response(
{
"error": "There was an error during the installation of the GitHub app. To resolve this issue, we recommend reinstalling the GitHub app."
},
status=status.HTTP_400_BAD_REQUEST,
)
service_importer.delay(service, importer.id)
serializer = ImporterSerializer(importer)
return Response(serializer.data, status=status.HTTP_201_CREATED)
issue_count, labels, collaborators = get_github_repo_details(
access_tokens_url, owner, repo
)
return Response(
{
"issue_count": issue_count,
"labels": labels,
"collaborators": collaborators,
},
status=status.HTTP_200_OK,
)
if service == "jira":
data = request.data.get("data", False)
metadata = request.data.get("metadata", False)
config = request.data.get("config", False)
if not data or not metadata:
if service == "jira":
# Check for all the keys
params = {
"project_key": "Project key is required",
"api_token": "API token is required",
"email": "Email is required",
"cloud_hostname": "Cloud hostname is required",
}
for key, error_message in params.items():
if not request.GET.get(key, False):
return Response(
{"error": "Data, config and metadata are required"},
status=status.HTTP_400_BAD_REQUEST,
)
api_token = APIToken.objects.filter(
user=request.user, workspace=workspace
).first()
if api_token is None:
api_token = APIToken.objects.create(
user=request.user,
label="Importer",
workspace=workspace,
{"error": error_message}, status=status.HTTP_400_BAD_REQUEST
)
importer = Importer.objects.create(
service=service,
project_id=project_id,
status="queued",
initiated_by=request.user,
data=data,
metadata=metadata,
token=api_token,
config=config,
created_by=request.user,
updated_by=request.user,
project_key = request.GET.get("project_key", "")
api_token = request.GET.get("api_token", "")
email = request.GET.get("email", "")
cloud_hostname = request.GET.get("cloud_hostname", "")
response = jira_project_issue_summary(
email, api_token, project_key, cloud_hostname
)
if "error" in response:
return Response(response, status=status.HTTP_400_BAD_REQUEST)
else:
return Response(
response,
status=status.HTTP_200_OK,
)
return Response(
{"error": "Service not supported yet"},
status=status.HTTP_400_BAD_REQUEST,
)
class ImportServiceEndpoint(BaseAPIView):
permission_classes = [
WorkSpaceAdminPermission,
]
def post(self, request, slug, service):
project_id = request.data.get("project_id", False)
if not project_id:
return Response(
{"error": "Project ID is required"},
status=status.HTTP_400_BAD_REQUEST,
)
workspace = Workspace.objects.get(slug=slug)
if service == "github":
data = request.data.get("data", False)
metadata = request.data.get("metadata", False)
config = request.data.get("config", False)
if not data or not metadata or not config:
return Response(
{"error": "Data, config and metadata are required"},
status=status.HTTP_400_BAD_REQUEST,
)
service_importer.delay(service, importer.id)
serializer = ImporterSerializer(importer)
return Response(serializer.data, status=status.HTTP_201_CREATED)
api_token = APIToken.objects.filter(
user=request.user, workspace=workspace
).first()
if api_token is None:
api_token = APIToken.objects.create(
user=request.user,
label="Importer",
workspace=workspace,
)
return Response(
{"error": "Servivce not supported yet"},
status=status.HTTP_400_BAD_REQUEST,
importer = Importer.objects.create(
service=service,
project_id=project_id,
status="queued",
initiated_by=request.user,
data=data,
metadata=metadata,
token=api_token,
config=config,
created_by=request.user,
updated_by=request.user,
)
except (
Workspace.DoesNotExist,
WorkspaceIntegration.DoesNotExist,
Project.DoesNotExist,
) as e:
return Response(
{"error": "Workspace Integration or Project does not exist"},
status=status.HTTP_404_NOT_FOUND,
)
except Exception as e:
capture_exception(e)
return Response(
{"error": "Something went wrong please try again later"},
status=status.HTTP_400_BAD_REQUEST,
service_importer.delay(service, importer.id)
serializer = ImporterSerializer(importer)
return Response(serializer.data, status=status.HTTP_201_CREATED)
if service == "jira":
data = request.data.get("data", False)
metadata = request.data.get("metadata", False)
config = request.data.get("config", False)
if not data or not metadata:
return Response(
{"error": "Data, config and metadata are required"},
status=status.HTTP_400_BAD_REQUEST,
)
api_token = APIToken.objects.filter(
user=request.user, workspace=workspace
).first()
if api_token is None:
api_token = APIToken.objects.create(
user=request.user,
label="Importer",
workspace=workspace,
)
importer = Importer.objects.create(
service=service,
project_id=project_id,
status="queued",
initiated_by=request.user,
data=data,
metadata=metadata,
token=api_token,
config=config,
created_by=request.user,
updated_by=request.user,
)
service_importer.delay(service, importer.id)
serializer = ImporterSerializer(importer)
return Response(serializer.data, status=status.HTTP_201_CREATED)
return Response(
{"error": "Servivce not supported yet"},
status=status.HTTP_400_BAD_REQUEST,
)
def get(self, request, slug):
try:
imports = (
Importer.objects.filter(workspace__slug=slug)
.order_by("-created_at")
.select_related("initiated_by", "project", "workspace")
)
serializer = ImporterSerializer(imports, many=True)
return Response(serializer.data)
except Exception as e:
capture_exception(e)
return Response(
{"error": "Something went wrong please try again later"},
status=status.HTTP_400_BAD_REQUEST,
)
imports = (
Importer.objects.filter(workspace__slug=slug)
.order_by("-created_at")
.select_related("initiated_by", "project", "workspace")
)
serializer = ImporterSerializer(imports, many=True)
return Response(serializer.data)
def delete(self, request, slug, service, pk):
try:
importer = Importer.objects.get(
pk=pk, service=service, workspace__slug=slug
)
importer = Importer.objects.get(
pk=pk, service=service, workspace__slug=slug
)
if importer.imported_data is not None:
# Delete all imported Issues
imported_issues = importer.imported_data.get("issues", [])
Issue.issue_objects.filter(id__in=imported_issues).delete()
if importer.imported_data is not None:
# Delete all imported Issues
imported_issues = importer.imported_data.get("issues", [])
Issue.issue_objects.filter(id__in=imported_issues).delete()
# Delete all imported Labels
imported_labels = importer.imported_data.get("labels", [])
Label.objects.filter(id__in=imported_labels).delete()
# Delete all imported Labels
imported_labels = importer.imported_data.get("labels", [])
Label.objects.filter(id__in=imported_labels).delete()
if importer.service == "jira":
imported_modules = importer.imported_data.get("modules", [])
Module.objects.filter(id__in=imported_modules).delete()
importer.delete()
return Response(status=status.HTTP_204_NO_CONTENT)
except Exception as e:
capture_exception(e)
return Response(
{"error": "Something went wrong please try again later"},
status=status.HTTP_400_BAD_REQUEST,
)
if importer.service == "jira":
imported_modules = importer.imported_data.get("modules", [])
Module.objects.filter(id__in=imported_modules).delete()
importer.delete()
return Response(status=status.HTTP_204_NO_CONTENT)
def patch(self, request, slug, service, pk):
try:
importer = Importer.objects.get(
pk=pk, service=service, workspace__slug=slug
)
serializer = ImporterSerializer(importer, data=request.data, partial=True)
if serializer.is_valid():
serializer.save()
return Response(serializer.data, status=status.HTTP_200_OK)
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
except Importer.DoesNotExist:
return Response(
{"error": "Importer Does not exists"}, status=status.HTTP_404_NOT_FOUND
)
except Exception as e:
capture_exception(e)
return Response(
{"error": "Something went wrong please try again later"},
status=status.HTTP_400_BAD_REQUEST,
)
importer = Importer.objects.get(
pk=pk, service=service, workspace__slug=slug
)
serializer = ImporterSerializer(importer, data=request.data, partial=True)
if serializer.is_valid():
serializer.save()
return Response(serializer.data, status=status.HTTP_200_OK)
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
class UpdateServiceImportStatusEndpoint(BaseAPIView):
def post(self, request, slug, project_id, service, importer_id):
try:
importer = Importer.objects.get(
pk=importer_id,
workspace__slug=slug,
project_id=project_id,
service=service,
)
importer.status = request.data.get("status", "processing")
importer.save()
return Response(status.HTTP_200_OK)
except Importer.DoesNotExist:
return Response(
{"error": "Importer does not exist"}, status=status.HTTP_404_NOT_FOUND
)
importer = Importer.objects.get(
pk=importer_id,
workspace__slug=slug,
project_id=project_id,
service=service,
)
importer.status = request.data.get("status", "processing")
importer.save()
return Response(status.HTTP_200_OK)
class BulkImportIssuesEndpoint(BaseAPIView):
def post(self, request, slug, project_id, service):
try:
# Get the project
project = Project.objects.get(pk=project_id, workspace__slug=slug)
# Get the project
project = Project.objects.get(pk=project_id, workspace__slug=slug)
# Get the default state
# Get the default state
default_state = State.objects.filter(
~Q(name="Triage"), project_id=project_id, default=True
).first()
# if there is no default state assign any random state
if default_state is None:
default_state = State.objects.filter(
~Q(name="Triage"), project_id=project_id, default=True
~Q(name="Triage"), project_id=project_id
).first()
# if there is no default state assign any random state
if default_state is None:
default_state = State.objects.filter(
~Q(name="Triage"), project_id=project_id
).first()
# Get the maximum sequence_id
last_id = IssueSequence.objects.filter(project_id=project_id).aggregate(
largest=Max("sequence")
)["largest"]
# Get the maximum sequence_id
last_id = IssueSequence.objects.filter(project_id=project_id).aggregate(
largest=Max("sequence")
)["largest"]
last_id = 1 if last_id is None else last_id + 1
last_id = 1 if last_id is None else last_id + 1
# Get the maximum sort order
largest_sort_order = Issue.objects.filter(
project_id=project_id, state=default_state
).aggregate(largest=Max("sort_order"))["largest"]
# Get the maximum sort order
largest_sort_order = Issue.objects.filter(
project_id=project_id, state=default_state
).aggregate(largest=Max("sort_order"))["largest"]
largest_sort_order = (
65535 if largest_sort_order is None else largest_sort_order + 10000
)
largest_sort_order = (
65535 if largest_sort_order is None else largest_sort_order + 10000
)
# Get the issues_data
issues_data = request.data.get("issues_data", [])
if not len(issues_data):
return Response(
{"error": "Issue data is required"},
status=status.HTTP_400_BAD_REQUEST,
)
# Issues
bulk_issues = []
for issue_data in issues_data:
bulk_issues.append(
Issue(
project_id=project_id,
workspace_id=project.workspace_id,
state_id=issue_data.get("state")
if issue_data.get("state", False)
else default_state.id,
name=issue_data.get("name", "Issue Created through Bulk"),
description_html=issue_data.get("description_html", "<p></p>"),
description_stripped=(
None
if (
issue_data.get("description_html") == ""
or issue_data.get("description_html") is None
)
else strip_tags(issue_data.get("description_html"))
),
sequence_id=last_id,
sort_order=largest_sort_order,
start_date=issue_data.get("start_date", None),
target_date=issue_data.get("target_date", None),
priority=issue_data.get("priority", None),
created_by=request.user,
)
)
largest_sort_order = largest_sort_order + 10000
last_id = last_id + 1
issues = Issue.objects.bulk_create(
bulk_issues,
batch_size=100,
ignore_conflicts=True,
)
# Sequences
_ = IssueSequence.objects.bulk_create(
[
IssueSequence(
issue=issue,
sequence=issue.sequence_id,
project_id=project_id,
workspace_id=project.workspace_id,
)
for issue in issues
],
batch_size=100,
)
# Attach Labels
bulk_issue_labels = []
for issue, issue_data in zip(issues, issues_data):
labels_list = issue_data.get("labels_list", [])
bulk_issue_labels = bulk_issue_labels + [
IssueLabel(
issue=issue,
label_id=label_id,
project_id=project_id,
workspace_id=project.workspace_id,
created_by=request.user,
)
for label_id in labels_list
]
_ = IssueLabel.objects.bulk_create(
bulk_issue_labels, batch_size=100, ignore_conflicts=True
)
# Attach Assignees
bulk_issue_assignees = []
for issue, issue_data in zip(issues, issues_data):
assignees_list = issue_data.get("assignees_list", [])
bulk_issue_assignees = bulk_issue_assignees + [
IssueAssignee(
issue=issue,
assignee_id=assignee_id,
project_id=project_id,
workspace_id=project.workspace_id,
created_by=request.user,
)
for assignee_id in assignees_list
]
_ = IssueAssignee.objects.bulk_create(
bulk_issue_assignees, batch_size=100, ignore_conflicts=True
)
# Track the issue activities
IssueActivity.objects.bulk_create(
[
IssueActivity(
issue=issue,
actor=request.user,
project_id=project_id,
workspace_id=project.workspace_id,
comment=f"imported the issue from {service}",
verb="created",
created_by=request.user,
)
for issue in issues
],
batch_size=100,
)
# Create Comments
bulk_issue_comments = []
for issue, issue_data in zip(issues, issues_data):
comments_list = issue_data.get("comments_list", [])
bulk_issue_comments = bulk_issue_comments + [
IssueComment(
issue=issue,
comment_html=comment.get("comment_html", "<p></p>"),
actor=request.user,
project_id=project_id,
workspace_id=project.workspace_id,
created_by=request.user,
)
for comment in comments_list
]
_ = IssueComment.objects.bulk_create(bulk_issue_comments, batch_size=100)
# Attach Links
_ = IssueLink.objects.bulk_create(
[
IssueLink(
issue=issue,
url=issue_data.get("link", {}).get("url", "https://github.com"),
title=issue_data.get("link", {}).get("title", "Original Issue"),
project_id=project_id,
workspace_id=project.workspace_id,
created_by=request.user,
)
for issue, issue_data in zip(issues, issues_data)
]
)
# Get the issues_data
issues_data = request.data.get("issues_data", [])
if not len(issues_data):
return Response(
{"issues": IssueFlatSerializer(issues, many=True).data},
status=status.HTTP_201_CREATED,
)
except Project.DoesNotExist:
return Response(
{"error": "Project Does not exist"}, status=status.HTTP_404_NOT_FOUND
)
except Exception as e:
capture_exception(e)
return Response(
{"error": "Something went wrong please try again later"},
{"error": "Issue data is required"},
status=status.HTTP_400_BAD_REQUEST,
)
# Issues
bulk_issues = []
for issue_data in issues_data:
bulk_issues.append(
Issue(
project_id=project_id,
workspace_id=project.workspace_id,
state_id=issue_data.get("state")
if issue_data.get("state", False)
else default_state.id,
name=issue_data.get("name", "Issue Created through Bulk"),
description_html=issue_data.get("description_html", "<p></p>"),
description_stripped=(
None
if (
issue_data.get("description_html") == ""
or issue_data.get("description_html") is None
)
else strip_tags(issue_data.get("description_html"))
),
sequence_id=last_id,
sort_order=largest_sort_order,
start_date=issue_data.get("start_date", None),
target_date=issue_data.get("target_date", None),
priority=issue_data.get("priority", "none"),
created_by=request.user,
)
)
largest_sort_order = largest_sort_order + 10000
last_id = last_id + 1
issues = Issue.objects.bulk_create(
bulk_issues,
batch_size=100,
ignore_conflicts=True,
)
# Sequences
_ = IssueSequence.objects.bulk_create(
[
IssueSequence(
issue=issue,
sequence=issue.sequence_id,
project_id=project_id,
workspace_id=project.workspace_id,
)
for issue in issues
],
batch_size=100,
)
# Attach Labels
bulk_issue_labels = []
for issue, issue_data in zip(issues, issues_data):
labels_list = issue_data.get("labels_list", [])
bulk_issue_labels = bulk_issue_labels + [
IssueLabel(
issue=issue,
label_id=label_id,
project_id=project_id,
workspace_id=project.workspace_id,
created_by=request.user,
)
for label_id in labels_list
]
_ = IssueLabel.objects.bulk_create(
bulk_issue_labels, batch_size=100, ignore_conflicts=True
)
# Attach Assignees
bulk_issue_assignees = []
for issue, issue_data in zip(issues, issues_data):
assignees_list = issue_data.get("assignees_list", [])
bulk_issue_assignees = bulk_issue_assignees + [
IssueAssignee(
issue=issue,
assignee_id=assignee_id,
project_id=project_id,
workspace_id=project.workspace_id,
created_by=request.user,
)
for assignee_id in assignees_list
]
_ = IssueAssignee.objects.bulk_create(
bulk_issue_assignees, batch_size=100, ignore_conflicts=True
)
# Track the issue activities
IssueActivity.objects.bulk_create(
[
IssueActivity(
issue=issue,
actor=request.user,
project_id=project_id,
workspace_id=project.workspace_id,
comment=f"imported the issue from {service}",
verb="created",
created_by=request.user,
)
for issue in issues
],
batch_size=100,
)
# Create Comments
bulk_issue_comments = []
for issue, issue_data in zip(issues, issues_data):
comments_list = issue_data.get("comments_list", [])
bulk_issue_comments = bulk_issue_comments + [
IssueComment(
issue=issue,
comment_html=comment.get("comment_html", "<p></p>"),
actor=request.user,
project_id=project_id,
workspace_id=project.workspace_id,
created_by=request.user,
)
for comment in comments_list
]
_ = IssueComment.objects.bulk_create(bulk_issue_comments, batch_size=100)
# Attach Links
_ = IssueLink.objects.bulk_create(
[
IssueLink(
issue=issue,
url=issue_data.get("link", {}).get("url", "https://github.com"),
title=issue_data.get("link", {}).get("title", "Original Issue"),
project_id=project_id,
workspace_id=project.workspace_id,
created_by=request.user,
)
for issue, issue_data in zip(issues, issues_data)
]
)
return Response(
{"issues": IssueFlatSerializer(issues, many=True).data},
status=status.HTTP_201_CREATED,
)
class BulkImportModulesEndpoint(BaseAPIView):
def post(self, request, slug, project_id, service):
try:
modules_data = request.data.get("modules_data", [])
project = Project.objects.get(pk=project_id, workspace__slug=slug)
modules_data = request.data.get("modules_data", [])
project = Project.objects.get(pk=project_id, workspace__slug=slug)
modules = Module.objects.bulk_create(
modules = Module.objects.bulk_create(
[
Module(
name=module.get("name", uuid.uuid4().hex),
description=module.get("description", ""),
start_date=module.get("start_date", None),
target_date=module.get("target_date", None),
project_id=project_id,
workspace_id=project.workspace_id,
created_by=request.user,
)
for module in modules_data
],
batch_size=100,
ignore_conflicts=True,
)
modules = Module.objects.filter(id__in=[module.id for module in modules])
if len(modules) == len(modules_data):
_ = ModuleLink.objects.bulk_create(
[
Module(
name=module.get("name", uuid.uuid4().hex),
description=module.get("description", ""),
start_date=module.get("start_date", None),
target_date=module.get("target_date", None),
ModuleLink(
module=module,
url=module_data.get("link", {}).get(
"url", "https://plane.so"
),
title=module_data.get("link", {}).get(
"title", "Original Issue"
),
project_id=project_id,
workspace_id=project.workspace_id,
created_by=request.user,
)
for module in modules_data
for module, module_data in zip(modules, modules_data)
],
batch_size=100,
ignore_conflicts=True,
)
modules = Module.objects.filter(id__in=[module.id for module in modules])
bulk_module_issues = []
for module, module_data in zip(modules, modules_data):
module_issues_list = module_data.get("module_issues_list", [])
bulk_module_issues = bulk_module_issues + [
ModuleIssue(
issue_id=issue,
module=module,
project_id=project_id,
workspace_id=project.workspace_id,
created_by=request.user,
)
for issue in module_issues_list
]
if len(modules) == len(modules_data):
_ = ModuleLink.objects.bulk_create(
[
ModuleLink(
module=module,
url=module_data.get("link", {}).get(
"url", "https://plane.so"
),
title=module_data.get("link", {}).get(
"title", "Original Issue"
),
project_id=project_id,
workspace_id=project.workspace_id,
created_by=request.user,
)
for module, module_data in zip(modules, modules_data)
],
batch_size=100,
ignore_conflicts=True,
)
bulk_module_issues = []
for module, module_data in zip(modules, modules_data):
module_issues_list = module_data.get("module_issues_list", [])
bulk_module_issues = bulk_module_issues + [
ModuleIssue(
issue_id=issue,
module=module,
project_id=project_id,
workspace_id=project.workspace_id,
created_by=request.user,
)
for issue in module_issues_list
]
_ = ModuleIssue.objects.bulk_create(
bulk_module_issues, batch_size=100, ignore_conflicts=True
)
serializer = ModuleSerializer(modules, many=True)
return Response(
{"modules": serializer.data}, status=status.HTTP_201_CREATED
)
else:
return Response(
{"message": "Modules created but issues could not be imported"},
status=status.HTTP_200_OK,
)
except Project.DoesNotExist:
return Response(
{"error": "Project does not exist"}, status=status.HTTP_404_NOT_FOUND
_ = ModuleIssue.objects.bulk_create(
bulk_module_issues, batch_size=100, ignore_conflicts=True
)
except Exception as e:
capture_exception(e)
serializer = ModuleSerializer(modules, many=True)
return Response(
{"error": "Something went wrong please try again later"},
status=status.HTTP_400_BAD_REQUEST,
{"modules": serializer.data}, status=status.HTTP_201_CREATED
)
else:
return Response(
{"message": "Modules created but issues could not be imported"},
status=status.HTTP_200_OK,
)

View File

@ -64,24 +64,17 @@ class InboxViewSet(BaseViewSet):
serializer.save(project_id=self.kwargs.get("project_id"))
def destroy(self, request, slug, project_id, pk):
try:
inbox = Inbox.objects.get(
workspace__slug=slug, project_id=project_id, pk=pk
)
# Handle default inbox delete
if inbox.is_default:
return Response(
{"error": "You cannot delete the default inbox"},
status=status.HTTP_400_BAD_REQUEST,
)
inbox.delete()
return Response(status=status.HTTP_204_NO_CONTENT)
except Exception as e:
capture_exception(e)
inbox = Inbox.objects.get(
workspace__slug=slug, project_id=project_id, pk=pk
)
# Handle default inbox delete
if inbox.is_default:
return Response(
{"error": "Something went wronf please try again later"},
{"error": "You cannot delete the default inbox"},
status=status.HTTP_400_BAD_REQUEST,
)
inbox.delete()
return Response(status=status.HTTP_204_NO_CONTENT)
class InboxIssueViewSet(BaseViewSet):
@ -110,276 +103,239 @@ class InboxIssueViewSet(BaseViewSet):
)
def list(self, request, slug, project_id, inbox_id):
try:
filters = issue_filters(request.query_params, "GET")
issues = (
Issue.objects.filter(
issue_inbox__inbox_id=inbox_id,
workspace__slug=slug,
project_id=project_id,
filters = issue_filters(request.query_params, "GET")
issues = (
Issue.objects.filter(
issue_inbox__inbox_id=inbox_id,
workspace__slug=slug,
project_id=project_id,
)
.filter(**filters)
.annotate(bridge_id=F("issue_inbox__id"))
.select_related("workspace", "project", "state", "parent")
.prefetch_related("assignees", "labels")
.order_by("issue_inbox__snoozed_till", "issue_inbox__status")
.annotate(
sub_issues_count=Issue.issue_objects.filter(parent=OuterRef("id"))
.order_by()
.annotate(count=Func(F("id"), function="Count"))
.values("count")
)
.annotate(
link_count=IssueLink.objects.filter(issue=OuterRef("id"))
.order_by()
.annotate(count=Func(F("id"), function="Count"))
.values("count")
)
.annotate(
attachment_count=IssueAttachment.objects.filter(
issue=OuterRef("id")
)
.filter(**filters)
.annotate(bridge_id=F("issue_inbox__id"))
.select_related("workspace", "project", "state", "parent")
.prefetch_related("assignees", "labels")
.order_by("issue_inbox__snoozed_till", "issue_inbox__status")
.annotate(
sub_issues_count=Issue.issue_objects.filter(parent=OuterRef("id"))
.order_by()
.annotate(count=Func(F("id"), function="Count"))
.values("count")
)
.annotate(
link_count=IssueLink.objects.filter(issue=OuterRef("id"))
.order_by()
.annotate(count=Func(F("id"), function="Count"))
.values("count")
)
.annotate(
attachment_count=IssueAttachment.objects.filter(
issue=OuterRef("id")
)
.order_by()
.annotate(count=Func(F("id"), function="Count"))
.values("count")
)
.prefetch_related(
Prefetch(
"issue_inbox",
queryset=InboxIssue.objects.only(
"status", "duplicate_to", "snoozed_till", "source"
),
)
.order_by()
.annotate(count=Func(F("id"), function="Count"))
.values("count")
)
.prefetch_related(
Prefetch(
"issue_inbox",
queryset=InboxIssue.objects.only(
"status", "duplicate_to", "snoozed_till", "source"
),
)
)
issues_data = IssueStateInboxSerializer(issues, many=True).data
return Response(
issues_data,
status=status.HTTP_200_OK,
)
)
issues_data = IssueStateInboxSerializer(issues, many=True).data
return Response(
issues_data,
status=status.HTTP_200_OK,
)
except Exception as e:
capture_exception(e)
return Response(
{"error": "Something went wrong please try again later"},
status=status.HTTP_400_BAD_REQUEST,
)
def create(self, request, slug, project_id, inbox_id):
try:
if not request.data.get("issue", {}).get("name", False):
return Response(
{"error": "Name is required"}, status=status.HTTP_400_BAD_REQUEST
)
# Check for valid priority
if not request.data.get("issue", {}).get("priority", None) in [
"low",
"medium",
"high",
"urgent",
None,
]:
return Response(
{"error": "Invalid priority"}, status=status.HTTP_400_BAD_REQUEST
)
# Create or get state
state, _ = State.objects.get_or_create(
name="Triage",
group="backlog",
description="Default state for managing all Inbox Issues",
project_id=project_id,
color="#ff7700",
)
# create an issue
issue = Issue.objects.create(
name=request.data.get("issue", {}).get("name"),
description=request.data.get("issue", {}).get("description", {}),
description_html=request.data.get("issue", {}).get(
"description_html", "<p></p>"
),
priority=request.data.get("issue", {}).get("priority", "low"),
project_id=project_id,
state=state,
)
# Create an Issue Activity
issue_activity.delay(
type="issue.activity.created",
requested_data=json.dumps(request.data, cls=DjangoJSONEncoder),
actor_id=str(request.user.id),
issue_id=str(issue.id),
project_id=str(project_id),
current_instance=None,
epoch = int(timezone.now().timestamp())
)
# create an inbox issue
InboxIssue.objects.create(
inbox_id=inbox_id,
project_id=project_id,
issue=issue,
source=request.data.get("source", "in-app"),
)
serializer = IssueStateInboxSerializer(issue)
return Response(serializer.data, status=status.HTTP_200_OK)
except Exception as e:
capture_exception(e)
if not request.data.get("issue", {}).get("name", False):
return Response(
{"error": "Something went wrong please try again later"},
status=status.HTTP_400_BAD_REQUEST,
{"error": "Name is required"}, status=status.HTTP_400_BAD_REQUEST
)
# Check for valid priority
if not request.data.get("issue", {}).get("priority", "none") in [
"low",
"medium",
"high",
"urgent",
"none",
]:
return Response(
{"error": "Invalid priority"}, status=status.HTTP_400_BAD_REQUEST
)
# Create or get state
state, _ = State.objects.get_or_create(
name="Triage",
group="backlog",
description="Default state for managing all Inbox Issues",
project_id=project_id,
color="#ff7700",
)
# create an issue
issue = Issue.objects.create(
name=request.data.get("issue", {}).get("name"),
description=request.data.get("issue", {}).get("description", {}),
description_html=request.data.get("issue", {}).get(
"description_html", "<p></p>"
),
priority=request.data.get("issue", {}).get("priority", "low"),
project_id=project_id,
state=state,
)
# Create an Issue Activity
issue_activity.delay(
type="issue.activity.created",
requested_data=json.dumps(request.data, cls=DjangoJSONEncoder),
actor_id=str(request.user.id),
issue_id=str(issue.id),
project_id=str(project_id),
current_instance=None,
epoch=int(timezone.now().timestamp())
)
# create an inbox issue
InboxIssue.objects.create(
inbox_id=inbox_id,
project_id=project_id,
issue=issue,
source=request.data.get("source", "in-app"),
)
serializer = IssueStateInboxSerializer(issue)
return Response(serializer.data, status=status.HTTP_200_OK)
def partial_update(self, request, slug, project_id, inbox_id, pk):
try:
inbox_issue = InboxIssue.objects.get(
pk=pk, workspace__slug=slug, project_id=project_id, inbox_id=inbox_id
inbox_issue = InboxIssue.objects.get(
pk=pk, workspace__slug=slug, project_id=project_id, inbox_id=inbox_id
)
# Get the project member
project_member = ProjectMember.objects.get(workspace__slug=slug, project_id=project_id, member=request.user)
# Only project members admins and created_by users can access this endpoint
if project_member.role <= 10 and str(inbox_issue.created_by_id) != str(request.user.id):
return Response({"error": "You cannot edit inbox issues"}, status=status.HTTP_400_BAD_REQUEST)
# Get issue data
issue_data = request.data.pop("issue", False)
if bool(issue_data):
issue = Issue.objects.get(
pk=inbox_issue.issue_id, workspace__slug=slug, project_id=project_id
)
# Get the project member
project_member = ProjectMember.objects.get(workspace__slug=slug, project_id=project_id, member=request.user)
# Only project members admins and created_by users can access this endpoint
if project_member.role <= 10 and str(inbox_issue.created_by_id) != str(request.user.id):
return Response({"error": "You cannot edit inbox issues"}, status=status.HTTP_400_BAD_REQUEST)
# Only allow guests and viewers to edit name and description
if project_member.role <= 10:
# viewers and guests since only viewers and guests
issue_data = {
"name": issue_data.get("name", issue.name),
"description_html": issue_data.get("description_html", issue.description_html),
"description": issue_data.get("description", issue.description)
}
# Get issue data
issue_data = request.data.pop("issue", False)
issue_serializer = IssueCreateSerializer(
issue, data=issue_data, partial=True
)
if bool(issue_data):
issue = Issue.objects.get(
pk=inbox_issue.issue_id, workspace__slug=slug, project_id=project_id
)
# Only allow guests and viewers to edit name and description
if project_member.role <= 10:
# viewers and guests since only viewers and guests
issue_data = {
"name": issue_data.get("name", issue.name),
"description_html": issue_data.get("description_html", issue.description_html),
"description": issue_data.get("description", issue.description)
}
issue_serializer = IssueCreateSerializer(
issue, data=issue_data, partial=True
if issue_serializer.is_valid():
current_instance = issue
# Log all the updates
requested_data = json.dumps(issue_data, cls=DjangoJSONEncoder)
if issue is not None:
issue_activity.delay(
type="issue.activity.updated",
requested_data=requested_data,
actor_id=str(request.user.id),
issue_id=str(issue.id),
project_id=str(project_id),
current_instance=json.dumps(
IssueSerializer(current_instance).data,
cls=DjangoJSONEncoder,
),
epoch=int(timezone.now().timestamp())
)
issue_serializer.save()
else:
return Response(
issue_serializer.errors, status=status.HTTP_400_BAD_REQUEST
)
if issue_serializer.is_valid():
current_instance = issue
# Log all the updates
requested_data = json.dumps(issue_data, cls=DjangoJSONEncoder)
if issue is not None:
issue_activity.delay(
type="issue.activity.updated",
requested_data=requested_data,
actor_id=str(request.user.id),
issue_id=str(issue.id),
project_id=str(project_id),
current_instance=json.dumps(
IssueSerializer(current_instance).data,
cls=DjangoJSONEncoder,
),
epoch = int(timezone.now().timestamp())
)
issue_serializer.save()
else:
return Response(
issue_serializer.errors, status=status.HTTP_400_BAD_REQUEST
# Only project admins and members can edit inbox issue attributes
if project_member.role > 10:
serializer = InboxIssueSerializer(
inbox_issue, data=request.data, partial=True
)
if serializer.is_valid():
serializer.save()
# Update the issue state if the issue is rejected or marked as duplicate
if serializer.data["status"] in [-1, 2]:
issue = Issue.objects.get(
pk=inbox_issue.issue_id,
workspace__slug=slug,
project_id=project_id,
)
state = State.objects.filter(
group="cancelled", workspace__slug=slug, project_id=project_id
).first()
if state is not None:
issue.state = state
issue.save()
# Update the issue state if it is accepted
if serializer.data["status"] in [1]:
issue = Issue.objects.get(
pk=inbox_issue.issue_id,
workspace__slug=slug,
project_id=project_id,
)
# Only project admins and members can edit inbox issue attributes
if project_member.role > 10:
serializer = InboxIssueSerializer(
inbox_issue, data=request.data, partial=True
)
if serializer.is_valid():
serializer.save()
# Update the issue state if the issue is rejected or marked as duplicate
if serializer.data["status"] in [-1, 2]:
issue = Issue.objects.get(
pk=inbox_issue.issue_id,
workspace__slug=slug,
project_id=project_id,
)
# Update the issue state only if it is in triage state
if issue.state.name == "Triage":
# Move to default state
state = State.objects.filter(
group="cancelled", workspace__slug=slug, project_id=project_id
workspace__slug=slug, project_id=project_id, default=True
).first()
if state is not None:
issue.state = state
issue.save()
# Update the issue state if it is accepted
if serializer.data["status"] in [1]:
issue = Issue.objects.get(
pk=inbox_issue.issue_id,
workspace__slug=slug,
project_id=project_id,
)
# Update the issue state only if it is in triage state
if issue.state.name == "Triage":
# Move to default state
state = State.objects.filter(
workspace__slug=slug, project_id=project_id, default=True
).first()
if state is not None:
issue.state = state
issue.save()
return Response(serializer.data, status=status.HTTP_200_OK)
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
else:
return Response(InboxIssueSerializer(inbox_issue).data, status=status.HTTP_200_OK)
except InboxIssue.DoesNotExist:
return Response(
{"error": "Inbox Issue does not exist"},
status=status.HTTP_400_BAD_REQUEST,
)
except Exception as e:
capture_exception(e)
return Response(
{"error": "Something went wrong please try again later"},
status=status.HTTP_400_BAD_REQUEST,
)
return Response(serializer.data, status=status.HTTP_200_OK)
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
else:
return Response(InboxIssueSerializer(inbox_issue).data, status=status.HTTP_200_OK)
def retrieve(self, request, slug, project_id, inbox_id, pk):
try:
inbox_issue = InboxIssue.objects.get(
pk=pk, workspace__slug=slug, project_id=project_id, inbox_id=inbox_id
)
issue = Issue.objects.get(
pk=inbox_issue.issue_id, workspace__slug=slug, project_id=project_id
)
serializer = IssueStateInboxSerializer(issue)
return Response(serializer.data, status=status.HTTP_200_OK)
except Exception as e:
capture_exception(e)
return Response(
{"error": "Something went wrong please try again later"},
status=status.HTTP_400_BAD_REQUEST,
)
inbox_issue = InboxIssue.objects.get(
pk=pk, workspace__slug=slug, project_id=project_id, inbox_id=inbox_id
)
issue = Issue.objects.get(
pk=inbox_issue.issue_id, workspace__slug=slug, project_id=project_id
)
serializer = IssueStateInboxSerializer(issue)
return Response(serializer.data, status=status.HTTP_200_OK)
def destroy(self, request, slug, project_id, inbox_id, pk):
try:
inbox_issue = InboxIssue.objects.get(
pk=pk, workspace__slug=slug, project_id=project_id, inbox_id=inbox_id
)
# Get the project member
project_member = ProjectMember.objects.get(workspace__slug=slug, project_id=project_id, member=request.user)
inbox_issue = InboxIssue.objects.get(
pk=pk, workspace__slug=slug, project_id=project_id, inbox_id=inbox_id
)
# Get the project member
project_member = ProjectMember.objects.get(workspace__slug=slug, project_id=project_id, member=request.user)
if project_member.role <= 10 and str(inbox_issue.created_by_id) != str(request.user.id):
return Response({"error": "You cannot delete inbox issue"}, status=status.HTTP_400_BAD_REQUEST)
if project_member.role <= 10 and str(inbox_issue.created_by_id) != str(request.user.id):
return Response({"error": "You cannot delete inbox issue"}, status=status.HTTP_400_BAD_REQUEST)
inbox_issue.delete()
return Response(status=status.HTTP_204_NO_CONTENT)
except InboxIssue.DoesNotExist:
return Response({"error": "Inbox Issue does not exists"}, status=status.HTTP_400_BAD_REQUEST)
except Exception as e:
capture_exception(e)
return Response(
{"error": "Something went wrong please try again later"},
status=status.HTTP_400_BAD_REQUEST,
)
# Check the issue status
if inbox_issue.status in [-2, -1, 0, 2]:
# Delete the issue also
Issue.objects.filter(workspace__slug=slug, project_id=project_id, pk=inbox_issue.issue_id).delete()
inbox_issue.delete()
return Response(status=status.HTTP_204_NO_CONTENT)
class InboxIssuePublicViewSet(BaseViewSet):
@ -404,246 +360,200 @@ class InboxIssuePublicViewSet(BaseViewSet):
)
.select_related("issue", "workspace", "project")
)
else:
return InboxIssue.objects.none()
return InboxIssue.objects.none()
def list(self, request, slug, project_id, inbox_id):
try:
project_deploy_board = ProjectDeployBoard.objects.get(workspace__slug=slug, project_id=project_id)
if project_deploy_board.inbox is None:
return Response({"error": "Inbox is not enabled for this Project Board"}, status=status.HTTP_400_BAD_REQUEST)
project_deploy_board = ProjectDeployBoard.objects.get(workspace__slug=slug, project_id=project_id)
if project_deploy_board.inbox is None:
return Response({"error": "Inbox is not enabled for this Project Board"}, status=status.HTTP_400_BAD_REQUEST)
filters = issue_filters(request.query_params, "GET")
issues = (
Issue.objects.filter(
issue_inbox__inbox_id=inbox_id,
workspace__slug=slug,
project_id=project_id,
filters = issue_filters(request.query_params, "GET")
issues = (
Issue.objects.filter(
issue_inbox__inbox_id=inbox_id,
workspace__slug=slug,
project_id=project_id,
)
.filter(**filters)
.annotate(bridge_id=F("issue_inbox__id"))
.select_related("workspace", "project", "state", "parent")
.prefetch_related("assignees", "labels")
.order_by("issue_inbox__snoozed_till", "issue_inbox__status")
.annotate(
sub_issues_count=Issue.issue_objects.filter(parent=OuterRef("id"))
.order_by()
.annotate(count=Func(F("id"), function="Count"))
.values("count")
)
.annotate(
link_count=IssueLink.objects.filter(issue=OuterRef("id"))
.order_by()
.annotate(count=Func(F("id"), function="Count"))
.values("count")
)
.annotate(
attachment_count=IssueAttachment.objects.filter(
issue=OuterRef("id")
)
.filter(**filters)
.annotate(bridge_id=F("issue_inbox__id"))
.select_related("workspace", "project", "state", "parent")
.prefetch_related("assignees", "labels")
.order_by("issue_inbox__snoozed_till", "issue_inbox__status")
.annotate(
sub_issues_count=Issue.issue_objects.filter(parent=OuterRef("id"))
.order_by()
.annotate(count=Func(F("id"), function="Count"))
.values("count")
)
.annotate(
link_count=IssueLink.objects.filter(issue=OuterRef("id"))
.order_by()
.annotate(count=Func(F("id"), function="Count"))
.values("count")
)
.annotate(
attachment_count=IssueAttachment.objects.filter(
issue=OuterRef("id")
)
.order_by()
.annotate(count=Func(F("id"), function="Count"))
.values("count")
)
.prefetch_related(
Prefetch(
"issue_inbox",
queryset=InboxIssue.objects.only(
"status", "duplicate_to", "snoozed_till", "source"
),
)
.order_by()
.annotate(count=Func(F("id"), function="Count"))
.values("count")
)
.prefetch_related(
Prefetch(
"issue_inbox",
queryset=InboxIssue.objects.only(
"status", "duplicate_to", "snoozed_till", "source"
),
)
)
issues_data = IssueStateInboxSerializer(issues, many=True).data
return Response(
issues_data,
status=status.HTTP_200_OK,
)
except ProjectDeployBoard.DoesNotExist:
return Response({"error": "Project Deploy Board does not exist"}, status=status.HTTP_400_BAD_REQUEST)
except Exception as e:
capture_exception(e)
return Response(
{"error": "Something went wrong please try again later"},
status=status.HTTP_400_BAD_REQUEST,
)
)
issues_data = IssueStateInboxSerializer(issues, many=True).data
return Response(
issues_data,
status=status.HTTP_200_OK,
)
def create(self, request, slug, project_id, inbox_id):
try:
project_deploy_board = ProjectDeployBoard.objects.get(workspace__slug=slug, project_id=project_id)
if project_deploy_board.inbox is None:
return Response({"error": "Inbox is not enabled for this Project Board"}, status=status.HTTP_400_BAD_REQUEST)
project_deploy_board = ProjectDeployBoard.objects.get(workspace__slug=slug, project_id=project_id)
if project_deploy_board.inbox is None:
return Response({"error": "Inbox is not enabled for this Project Board"}, status=status.HTTP_400_BAD_REQUEST)
if not request.data.get("issue", {}).get("name", False):
return Response(
{"error": "Name is required"}, status=status.HTTP_400_BAD_REQUEST
)
# Check for valid priority
if not request.data.get("issue", {}).get("priority", None) in [
"low",
"medium",
"high",
"urgent",
None,
]:
return Response(
{"error": "Invalid priority"}, status=status.HTTP_400_BAD_REQUEST
)
# Create or get state
state, _ = State.objects.get_or_create(
name="Triage",
group="backlog",
description="Default state for managing all Inbox Issues",
project_id=project_id,
color="#ff7700",
)
# create an issue
issue = Issue.objects.create(
name=request.data.get("issue", {}).get("name"),
description=request.data.get("issue", {}).get("description", {}),
description_html=request.data.get("issue", {}).get(
"description_html", "<p></p>"
),
priority=request.data.get("issue", {}).get("priority", "low"),
project_id=project_id,
state=state,
)
# Create an Issue Activity
issue_activity.delay(
type="issue.activity.created",
requested_data=json.dumps(request.data, cls=DjangoJSONEncoder),
actor_id=str(request.user.id),
issue_id=str(issue.id),
project_id=str(project_id),
current_instance=None,
epoch = int(timezone.now().timestamp())
)
# create an inbox issue
InboxIssue.objects.create(
inbox_id=inbox_id,
project_id=project_id,
issue=issue,
source=request.data.get("source", "in-app"),
)
serializer = IssueStateInboxSerializer(issue)
return Response(serializer.data, status=status.HTTP_200_OK)
except Exception as e:
capture_exception(e)
if not request.data.get("issue", {}).get("name", False):
return Response(
{"error": "Something went wrong please try again later"},
status=status.HTTP_400_BAD_REQUEST,
{"error": "Name is required"}, status=status.HTTP_400_BAD_REQUEST
)
# Check for valid priority
if not request.data.get("issue", {}).get("priority", "none") in [
"low",
"medium",
"high",
"urgent",
"none",
]:
return Response(
{"error": "Invalid priority"}, status=status.HTTP_400_BAD_REQUEST
)
# Create or get state
state, _ = State.objects.get_or_create(
name="Triage",
group="backlog",
description="Default state for managing all Inbox Issues",
project_id=project_id,
color="#ff7700",
)
# create an issue
issue = Issue.objects.create(
name=request.data.get("issue", {}).get("name"),
description=request.data.get("issue", {}).get("description", {}),
description_html=request.data.get("issue", {}).get(
"description_html", "<p></p>"
),
priority=request.data.get("issue", {}).get("priority", "low"),
project_id=project_id,
state=state,
)
# Create an Issue Activity
issue_activity.delay(
type="issue.activity.created",
requested_data=json.dumps(request.data, cls=DjangoJSONEncoder),
actor_id=str(request.user.id),
issue_id=str(issue.id),
project_id=str(project_id),
current_instance=None,
epoch=int(timezone.now().timestamp())
)
# create an inbox issue
InboxIssue.objects.create(
inbox_id=inbox_id,
project_id=project_id,
issue=issue,
source=request.data.get("source", "in-app"),
)
serializer = IssueStateInboxSerializer(issue)
return Response(serializer.data, status=status.HTTP_200_OK)
def partial_update(self, request, slug, project_id, inbox_id, pk):
try:
project_deploy_board = ProjectDeployBoard.objects.get(workspace__slug=slug, project_id=project_id)
if project_deploy_board.inbox is None:
return Response({"error": "Inbox is not enabled for this Project Board"}, status=status.HTTP_400_BAD_REQUEST)
project_deploy_board = ProjectDeployBoard.objects.get(workspace__slug=slug, project_id=project_id)
if project_deploy_board.inbox is None:
return Response({"error": "Inbox is not enabled for this Project Board"}, status=status.HTTP_400_BAD_REQUEST)
inbox_issue = InboxIssue.objects.get(
pk=pk, workspace__slug=slug, project_id=project_id, inbox_id=inbox_id
)
# Get the project member
if str(inbox_issue.created_by_id) != str(request.user.id):
return Response({"error": "You cannot edit inbox issues"}, status=status.HTTP_400_BAD_REQUEST)
inbox_issue = InboxIssue.objects.get(
pk=pk, workspace__slug=slug, project_id=project_id, inbox_id=inbox_id
)
# Get the project member
if str(inbox_issue.created_by_id) != str(request.user.id):
return Response({"error": "You cannot edit inbox issues"}, status=status.HTTP_400_BAD_REQUEST)
# Get issue data
issue_data = request.data.pop("issue", False)
# Get issue data
issue_data = request.data.pop("issue", False)
issue = Issue.objects.get(
pk=inbox_issue.issue_id, workspace__slug=slug, project_id=project_id
)
# viewers and guests since only viewers and guests
issue_data = {
"name": issue_data.get("name", issue.name),
"description_html": issue_data.get("description_html", issue.description_html),
"description": issue_data.get("description", issue.description)
}
issue = Issue.objects.get(
pk=inbox_issue.issue_id, workspace__slug=slug, project_id=project_id
)
# viewers and guests since only viewers and guests
issue_data = {
"name": issue_data.get("name", issue.name),
"description_html": issue_data.get("description_html", issue.description_html),
"description": issue_data.get("description", issue.description)
}
issue_serializer = IssueCreateSerializer(
issue, data=issue_data, partial=True
)
issue_serializer = IssueCreateSerializer(
issue, data=issue_data, partial=True
)
if issue_serializer.is_valid():
current_instance = issue
# Log all the updates
requested_data = json.dumps(issue_data, cls=DjangoJSONEncoder)
if issue is not None:
issue_activity.delay(
type="issue.activity.updated",
requested_data=requested_data,
actor_id=str(request.user.id),
issue_id=str(issue.id),
project_id=str(project_id),
current_instance=json.dumps(
IssueSerializer(current_instance).data,
cls=DjangoJSONEncoder,
),
epoch = int(timezone.now().timestamp())
)
issue_serializer.save()
return Response(issue_serializer.data, status=status.HTTP_200_OK)
return Response(issue_serializer.errors, status=status.HTTP_400_BAD_REQUEST)
except InboxIssue.DoesNotExist:
return Response(
{"error": "Inbox Issue does not exist"},
status=status.HTTP_400_BAD_REQUEST,
)
except Exception as e:
capture_exception(e)
return Response(
{"error": "Something went wrong please try again later"},
status=status.HTTP_400_BAD_REQUEST,
)
if issue_serializer.is_valid():
current_instance = issue
# Log all the updates
requested_data = json.dumps(issue_data, cls=DjangoJSONEncoder)
if issue is not None:
issue_activity.delay(
type="issue.activity.updated",
requested_data=requested_data,
actor_id=str(request.user.id),
issue_id=str(issue.id),
project_id=str(project_id),
current_instance=json.dumps(
IssueSerializer(current_instance).data,
cls=DjangoJSONEncoder,
),
epoch=int(timezone.now().timestamp())
)
issue_serializer.save()
return Response(issue_serializer.data, status=status.HTTP_200_OK)
return Response(issue_serializer.errors, status=status.HTTP_400_BAD_REQUEST)
def retrieve(self, request, slug, project_id, inbox_id, pk):
try:
project_deploy_board = ProjectDeployBoard.objects.get(workspace__slug=slug, project_id=project_id)
if project_deploy_board.inbox is None:
return Response({"error": "Inbox is not enabled for this Project Board"}, status=status.HTTP_400_BAD_REQUEST)
inbox_issue = InboxIssue.objects.get(
pk=pk, workspace__slug=slug, project_id=project_id, inbox_id=inbox_id
)
issue = Issue.objects.get(
pk=inbox_issue.issue_id, workspace__slug=slug, project_id=project_id
)
serializer = IssueStateInboxSerializer(issue)
return Response(serializer.data, status=status.HTTP_200_OK)
except Exception as e:
capture_exception(e)
return Response(
{"error": "Something went wrong please try again later"},
status=status.HTTP_400_BAD_REQUEST,
)
project_deploy_board = ProjectDeployBoard.objects.get(workspace__slug=slug, project_id=project_id)
if project_deploy_board.inbox is None:
return Response({"error": "Inbox is not enabled for this Project Board"}, status=status.HTTP_400_BAD_REQUEST)
inbox_issue = InboxIssue.objects.get(
pk=pk, workspace__slug=slug, project_id=project_id, inbox_id=inbox_id
)
issue = Issue.objects.get(
pk=inbox_issue.issue_id, workspace__slug=slug, project_id=project_id
)
serializer = IssueStateInboxSerializer(issue)
return Response(serializer.data, status=status.HTTP_200_OK)
def destroy(self, request, slug, project_id, inbox_id, pk):
try:
project_deploy_board = ProjectDeployBoard.objects.get(workspace__slug=slug, project_id=project_id)
if project_deploy_board.inbox is None:
return Response({"error": "Inbox is not enabled for this Project Board"}, status=status.HTTP_400_BAD_REQUEST)
project_deploy_board = ProjectDeployBoard.objects.get(workspace__slug=slug, project_id=project_id)
if project_deploy_board.inbox is None:
return Response({"error": "Inbox is not enabled for this Project Board"}, status=status.HTTP_400_BAD_REQUEST)
inbox_issue = InboxIssue.objects.get(
pk=pk, workspace__slug=slug, project_id=project_id, inbox_id=inbox_id
)
inbox_issue = InboxIssue.objects.get(
pk=pk, workspace__slug=slug, project_id=project_id, inbox_id=inbox_id
)
if str(inbox_issue.created_by_id) != str(request.user.id):
return Response({"error": "You cannot delete inbox issue"}, status=status.HTTP_400_BAD_REQUEST)
inbox_issue.delete()
return Response(status=status.HTTP_204_NO_CONTENT)
except InboxIssue.DoesNotExist:
return Response({"error": "Inbox Issue does not exists"}, status=status.HTTP_400_BAD_REQUEST)
except Exception as e:
capture_exception(e)
return Response(
{"error": "Something went wrong please try again later"},
status=status.HTTP_400_BAD_REQUEST,
)
if str(inbox_issue.created_by_id) != str(request.user.id):
return Response({"error": "You cannot delete inbox issue"}, status=status.HTTP_400_BAD_REQUEST)
inbox_issue.delete()
return Response(status=status.HTTP_204_NO_CONTENT)

View File

@ -2,7 +2,6 @@
import uuid
# Django imports
from django.db import IntegrityError
from django.contrib.auth.hashers import make_password
# Third party imports
@ -33,66 +32,40 @@ class IntegrationViewSet(BaseViewSet):
model = Integration
def create(self, request):
try:
serializer = IntegrationSerializer(data=request.data)
if serializer.is_valid():
serializer.save()
return Response(serializer.data, status=status.HTTP_201_CREATED)
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
except Exception as e:
capture_exception(e)
return Response(
{"error": "Something went wrong please try again later"},
status=status.HTTP_400_BAD_REQUEST,
)
serializer = IntegrationSerializer(data=request.data)
if serializer.is_valid():
serializer.save()
return Response(serializer.data, status=status.HTTP_201_CREATED)
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
def partial_update(self, request, pk):
try:
integration = Integration.objects.get(pk=pk)
if integration.verified:
return Response(
{"error": "Verified integrations cannot be updated"},
status=status.HTTP_400_BAD_REQUEST,
)
serializer = IntegrationSerializer(
integration, data=request.data, partial=True
)
if serializer.is_valid():
serializer.save()
return Response(serializer.data, status=status.HTTP_200_OK)
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
except Integration.DoesNotExist:
integration = Integration.objects.get(pk=pk)
if integration.verified:
return Response(
{"error": "Integration Does not exist"},
status=status.HTTP_404_NOT_FOUND,
)
except Exception as e:
capture_exception(e)
return Response(
{"error": "Something went wrong please try again later"},
{"error": "Verified integrations cannot be updated"},
status=status.HTTP_400_BAD_REQUEST,
)
def destroy(self, request, pk):
try:
integration = Integration.objects.get(pk=pk)
if integration.verified:
return Response(
{"error": "Verified integrations cannot be updated"},
status=status.HTTP_400_BAD_REQUEST,
)
serializer = IntegrationSerializer(
integration, data=request.data, partial=True
)
integration.delete()
return Response(status=status.HTTP_204_NO_CONTENT)
except Integration.DoesNotExist:
if serializer.is_valid():
serializer.save()
return Response(serializer.data, status=status.HTTP_200_OK)
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
def destroy(self, request, pk):
integration = Integration.objects.get(pk=pk)
if integration.verified:
return Response(
{"error": "Integration Does not exist"},
status=status.HTTP_404_NOT_FOUND,
{"error": "Verified integrations cannot be updated"},
status=status.HTTP_400_BAD_REQUEST,
)
integration.delete()
return Response(status=status.HTTP_204_NO_CONTENT)
class WorkspaceIntegrationViewSet(BaseViewSet):
serializer_class = WorkspaceIntegrationSerializer
@ -111,119 +84,81 @@ class WorkspaceIntegrationViewSet(BaseViewSet):
)
def create(self, request, slug, provider):
try:
workspace = Workspace.objects.get(slug=slug)
integration = Integration.objects.get(provider=provider)
config = {}
if provider == "github":
installation_id = request.data.get("installation_id", None)
if not installation_id:
return Response(
{"error": "Installation ID is required"},
status=status.HTTP_400_BAD_REQUEST,
)
metadata = get_github_metadata(installation_id)
config = {"installation_id": installation_id}
if provider == "slack":
metadata = request.data.get("metadata", {})
access_token = metadata.get("access_token", False)
team_id = metadata.get("team", {}).get("id", False)
if not metadata or not access_token or not team_id:
return Response(
{"error": "Access token and team id is required"},
status=status.HTTP_400_BAD_REQUEST,
)
config = {"team_id": team_id, "access_token": access_token}
# Create a bot user
bot_user = User.objects.create(
email=f"{uuid.uuid4().hex}@plane.so",
username=uuid.uuid4().hex,
password=make_password(uuid.uuid4().hex),
is_password_autoset=True,
is_bot=True,
first_name=integration.title,
avatar=integration.avatar_url
if integration.avatar_url is not None
else "",
)
# Create an API Token for the bot user
api_token = APIToken.objects.create(
user=bot_user,
user_type=1, # bot user
workspace=workspace,
)
workspace_integration = WorkspaceIntegration.objects.create(
workspace=workspace,
integration=integration,
actor=bot_user,
api_token=api_token,
metadata=metadata,
config=config,
)
# Add bot user as a member of workspace
_ = WorkspaceMember.objects.create(
workspace=workspace_integration.workspace,
member=bot_user,
role=20,
)
return Response(
WorkspaceIntegrationSerializer(workspace_integration).data,
status=status.HTTP_201_CREATED,
)
except IntegrityError as e:
if "already exists" in str(e):
workspace = Workspace.objects.get(slug=slug)
integration = Integration.objects.get(provider=provider)
config = {}
if provider == "github":
installation_id = request.data.get("installation_id", None)
if not installation_id:
return Response(
{"error": "Integration is already active in the workspace"},
status=status.HTTP_410_GONE,
)
else:
capture_exception(e)
return Response(
{"error": "Something went wrong please try again later"},
{"error": "Installation ID is required"},
status=status.HTTP_400_BAD_REQUEST,
)
except (Workspace.DoesNotExist, Integration.DoesNotExist) as e:
capture_exception(e)
return Response(
{"error": "Workspace or Integration not found"},
status=status.HTTP_400_BAD_REQUEST,
)
except Exception as e:
capture_exception(e)
return Response(
{"error": "Something went wrong please try again later"},
status=status.HTTP_400_BAD_REQUEST,
)
metadata = get_github_metadata(installation_id)
config = {"installation_id": installation_id}
if provider == "slack":
metadata = request.data.get("metadata", {})
access_token = metadata.get("access_token", False)
team_id = metadata.get("team", {}).get("id", False)
if not metadata or not access_token or not team_id:
return Response(
{"error": "Access token and team id is required"},
status=status.HTTP_400_BAD_REQUEST,
)
config = {"team_id": team_id, "access_token": access_token}
# Create a bot user
bot_user = User.objects.create(
email=f"{uuid.uuid4().hex}@plane.so",
username=uuid.uuid4().hex,
password=make_password(uuid.uuid4().hex),
is_password_autoset=True,
is_bot=True,
first_name=integration.title,
avatar=integration.avatar_url
if integration.avatar_url is not None
else "",
)
# Create an API Token for the bot user
api_token = APIToken.objects.create(
user=bot_user,
user_type=1, # bot user
workspace=workspace,
)
workspace_integration = WorkspaceIntegration.objects.create(
workspace=workspace,
integration=integration,
actor=bot_user,
api_token=api_token,
metadata=metadata,
config=config,
)
# Add bot user as a member of workspace
_ = WorkspaceMember.objects.create(
workspace=workspace_integration.workspace,
member=bot_user,
role=20,
)
return Response(
WorkspaceIntegrationSerializer(workspace_integration).data,
status=status.HTTP_201_CREATED,
)
def destroy(self, request, slug, pk):
try:
workspace_integration = WorkspaceIntegration.objects.get(
pk=pk, workspace__slug=slug
)
workspace_integration = WorkspaceIntegration.objects.get(
pk=pk, workspace__slug=slug
)
if workspace_integration.integration.provider == "github":
installation_id = workspace_integration.config.get(
"installation_id", False
)
if installation_id:
delete_github_installation(installation_id=installation_id)
workspace_integration.delete()
return Response(status=status.HTTP_204_NO_CONTENT)
except WorkspaceIntegration.DoesNotExist:
return Response(
{"error": "Workspace Integration Does not exists"},
status=status.HTTP_400_BAD_REQUEST,
)
except Exception as e:
capture_exception(e)
return Response(
{"error": "Something went wrong please try again later"},
status=status.HTTP_400_BAD_REQUEST,
if workspace_integration.integration.provider == "github":
installation_id = workspace_integration.config.get(
"installation_id", False
)
if installation_id:
delete_github_installation(installation_id=installation_id)
workspace_integration.delete()
return Response(status=status.HTTP_204_NO_CONTENT)

View File

@ -30,31 +30,25 @@ class GithubRepositoriesEndpoint(BaseAPIView):
]
def get(self, request, slug, workspace_integration_id):
try:
page = request.GET.get("page", 1)
workspace_integration = WorkspaceIntegration.objects.get(
workspace__slug=slug, pk=workspace_integration_id
)
page = request.GET.get("page", 1)
workspace_integration = WorkspaceIntegration.objects.get(
workspace__slug=slug, pk=workspace_integration_id
)
if workspace_integration.integration.provider != "github":
return Response(
{"error": "Not a github integration"},
status=status.HTTP_400_BAD_REQUEST,
)
access_tokens_url = workspace_integration.metadata["access_tokens_url"]
repositories_url = (
workspace_integration.metadata["repositories_url"]
+ f"?per_page=100&page={page}"
)
repositories = get_github_repos(access_tokens_url, repositories_url)
return Response(repositories, status=status.HTTP_200_OK)
except WorkspaceIntegration.DoesNotExist:
if workspace_integration.integration.provider != "github":
return Response(
{"error": "Workspace Integration Does not exists"},
{"error": "Not a github integration"},
status=status.HTTP_400_BAD_REQUEST,
)
access_tokens_url = workspace_integration.metadata["access_tokens_url"]
repositories_url = (
workspace_integration.metadata["repositories_url"]
+ f"?per_page=100&page={page}"
)
repositories = get_github_repos(access_tokens_url, repositories_url)
return Response(repositories, status=status.HTTP_200_OK)
class GithubRepositorySyncViewSet(BaseViewSet):
permission_classes = [
@ -76,89 +70,76 @@ class GithubRepositorySyncViewSet(BaseViewSet):
)
def create(self, request, slug, project_id, workspace_integration_id):
try:
name = request.data.get("name", False)
url = request.data.get("url", False)
config = request.data.get("config", {})
repository_id = request.data.get("repository_id", False)
owner = request.data.get("owner", False)
name = request.data.get("name", False)
url = request.data.get("url", False)
config = request.data.get("config", {})
repository_id = request.data.get("repository_id", False)
owner = request.data.get("owner", False)
if not name or not url or not repository_id or not owner:
return Response(
{"error": "Name, url, repository_id and owner are required"},
status=status.HTTP_400_BAD_REQUEST,
)
# Get the workspace integration
workspace_integration = WorkspaceIntegration.objects.get(
pk=workspace_integration_id
)
# Delete the old repository object
GithubRepositorySync.objects.filter(
project_id=project_id, workspace__slug=slug
).delete()
GithubRepository.objects.filter(
project_id=project_id, workspace__slug=slug
).delete()
# Create repository
repo = GithubRepository.objects.create(
name=name,
url=url,
config=config,
repository_id=repository_id,
owner=owner,
project_id=project_id,
)
# Create a Label for github
label = Label.objects.filter(
name="GitHub",
project_id=project_id,
).first()
if label is None:
label = Label.objects.create(
name="GitHub",
project_id=project_id,
description="Label to sync Plane issues with GitHub issues",
color="#003773",
)
# Create repo sync
repo_sync = GithubRepositorySync.objects.create(
repository=repo,
workspace_integration=workspace_integration,
actor=workspace_integration.actor,
credentials=request.data.get("credentials", {}),
project_id=project_id,
label=label,
)
# Add bot as a member in the project
_ = ProjectMember.objects.get_or_create(
member=workspace_integration.actor, role=20, project_id=project_id
)
# Return Response
if not name or not url or not repository_id or not owner:
return Response(
GithubRepositorySyncSerializer(repo_sync).data,
status=status.HTTP_201_CREATED,
)
except WorkspaceIntegration.DoesNotExist:
return Response(
{"error": "Workspace Integration does not exist"},
status=status.HTTP_404_NOT_FOUND,
)
except Exception as e:
capture_exception(e)
return Response(
{"error": "Something went wrong please try again later"},
{"error": "Name, url, repository_id and owner are required"},
status=status.HTTP_400_BAD_REQUEST,
)
# Get the workspace integration
workspace_integration = WorkspaceIntegration.objects.get(
pk=workspace_integration_id
)
# Delete the old repository object
GithubRepositorySync.objects.filter(
project_id=project_id, workspace__slug=slug
).delete()
GithubRepository.objects.filter(
project_id=project_id, workspace__slug=slug
).delete()
# Create repository
repo = GithubRepository.objects.create(
name=name,
url=url,
config=config,
repository_id=repository_id,
owner=owner,
project_id=project_id,
)
# Create a Label for github
label = Label.objects.filter(
name="GitHub",
project_id=project_id,
).first()
if label is None:
label = Label.objects.create(
name="GitHub",
project_id=project_id,
description="Label to sync Plane issues with GitHub issues",
color="#003773",
)
# Create repo sync
repo_sync = GithubRepositorySync.objects.create(
repository=repo,
workspace_integration=workspace_integration,
actor=workspace_integration.actor,
credentials=request.data.get("credentials", {}),
project_id=project_id,
label=label,
)
# Add bot as a member in the project
_ = ProjectMember.objects.get_or_create(
member=workspace_integration.actor, role=20, project_id=project_id
)
# Return Response
return Response(
GithubRepositorySyncSerializer(repo_sync).data,
status=status.HTTP_201_CREATED,
)
class GithubIssueSyncViewSet(BaseViewSet):
permission_classes = [
@ -177,42 +158,30 @@ class GithubIssueSyncViewSet(BaseViewSet):
class BulkCreateGithubIssueSyncEndpoint(BaseAPIView):
def post(self, request, slug, project_id, repo_sync_id):
try:
project = Project.objects.get(pk=project_id, workspace__slug=slug)
project = Project.objects.get(pk=project_id, workspace__slug=slug)
github_issue_syncs = request.data.get("github_issue_syncs", [])
github_issue_syncs = GithubIssueSync.objects.bulk_create(
[
GithubIssueSync(
issue_id=github_issue_sync.get("issue"),
repo_issue_id=github_issue_sync.get("repo_issue_id"),
issue_url=github_issue_sync.get("issue_url"),
github_issue_id=github_issue_sync.get("github_issue_id"),
repository_sync_id=repo_sync_id,
project_id=project_id,
workspace_id=project.workspace_id,
created_by=request.user,
updated_by=request.user,
)
for github_issue_sync in github_issue_syncs
],
batch_size=100,
ignore_conflicts=True,
)
github_issue_syncs = request.data.get("github_issue_syncs", [])
github_issue_syncs = GithubIssueSync.objects.bulk_create(
[
GithubIssueSync(
issue_id=github_issue_sync.get("issue"),
repo_issue_id=github_issue_sync.get("repo_issue_id"),
issue_url=github_issue_sync.get("issue_url"),
github_issue_id=github_issue_sync.get("github_issue_id"),
repository_sync_id=repo_sync_id,
project_id=project_id,
workspace_id=project.workspace_id,
created_by=request.user,
updated_by=request.user,
)
for github_issue_sync in github_issue_syncs
],
batch_size=100,
ignore_conflicts=True,
)
serializer = GithubIssueSyncSerializer(github_issue_syncs, many=True)
return Response(serializer.data, status=status.HTTP_201_CREATED)
except Project.DoesNotExist:
return Response(
{"error": "Project does not exist"},
status=status.HTTP_404_NOT_FOUND,
)
except Exception as e:
capture_exception(e)
return Response(
{"error": "Something went wrong please try again later"},
status=status.HTTP_400_BAD_REQUEST,
)
serializer = GithubIssueSyncSerializer(github_issue_syncs, many=True)
return Response(serializer.data, status=status.HTTP_201_CREATED)
class GithubCommentSyncViewSet(BaseViewSet):

View File

@ -32,42 +32,25 @@ class SlackProjectSyncViewSet(BaseViewSet):
)
def create(self, request, slug, project_id, workspace_integration_id):
try:
serializer = SlackProjectSyncSerializer(data=request.data)
serializer = SlackProjectSyncSerializer(data=request.data)
workspace_integration = WorkspaceIntegration.objects.get(
workspace__slug=slug, pk=workspace_integration_id
)
if serializer.is_valid():
serializer.save(
project_id=project_id,
workspace_integration_id=workspace_integration_id,
)
workspace_integration = WorkspaceIntegration.objects.get(
workspace__slug=slug, pk=workspace_integration_id
pk=workspace_integration_id, workspace__slug=slug
)
if serializer.is_valid():
serializer.save(
project_id=project_id,
workspace_integration_id=workspace_integration_id,
)
workspace_integration = WorkspaceIntegration.objects.get(
pk=workspace_integration_id, workspace__slug=slug
)
_ = ProjectMember.objects.get_or_create(
member=workspace_integration.actor, role=20, project_id=project_id
)
return Response(serializer.data, status=status.HTTP_200_OK)
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
except IntegrityError:
return Response(
{"error": "Slack is already enabled for the project"},
status=status.HTTP_400_BAD_REQUEST,
)
except WorkspaceIntegration.DoesNotExist:
return Response(
{"error": "Workspace Integration does not exist"},
status=status.HTTP_400_BAD_REQUEST,
)
except Exception as e:
print(e)
return Response(
{"error": "Something went wrong please try again later"},
status=status.HTTP_400_BAD_REQUEST,
_ = ProjectMember.objects.get_or_create(
member=workspace_integration.actor, role=20, project_id=project_id
)
return Response(serializer.data, status=status.HTTP_200_OK)
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)

View File

@ -24,8 +24,6 @@ from django.core.serializers.json import DjangoJSONEncoder
from django.utils.decorators import method_decorator
from django.views.decorators.gzip import gzip_page
from django.db import IntegrityError
from django.conf import settings
from django.db import IntegrityError
# Third Party imports
from rest_framework.response import Response
@ -58,7 +56,6 @@ from plane.api.serializers import (
IssuePublicSerializer,
)
from plane.api.permissions import (
WorkspaceEntityPermission,
ProjectEntityPermission,
WorkSpaceAdminPermission,
ProjectMemberPermission,
@ -82,8 +79,8 @@ from plane.db.models import (
IssueVote,
IssueRelation,
ProjectPublicMember,
IssueLabel,
IssueAssignee,
IssueLabel,
)
from plane.bgtasks.issue_activites_task import issue_activity
from plane.utils.grouper import group_results
@ -132,7 +129,7 @@ class IssueViewSet(BaseViewSet):
current_instance=json.dumps(
IssueSerializer(current_instance).data, cls=DjangoJSONEncoder
),
epoch=int(timezone.now().timestamp()),
epoch=int(timezone.now().timestamp())
)
return super().perform_update(serializer)
@ -153,7 +150,7 @@ class IssueViewSet(BaseViewSet):
current_instance=json.dumps(
IssueSerializer(current_instance).data, cls=DjangoJSONEncoder
),
epoch=int(timezone.now().timestamp()),
epoch=int(timezone.now().timestamp())
)
return super().perform_destroy(instance)
@ -284,8 +281,7 @@ class IssueViewSet(BaseViewSet):
if group_by:
return Response(
group_results(issues, group_by, sub_group_by),
status=status.HTTP_200_OK,
group_results(issues, group_by, sub_group_by), status=status.HTTP_200_OK
)
return Response(issues, status=status.HTTP_200_OK)
@ -321,7 +317,7 @@ class IssueViewSet(BaseViewSet):
issue_id=str(serializer.data.get("id", None)),
project_id=str(project_id),
current_instance=None,
epoch=int(timezone.now().timestamp()),
epoch=int(timezone.now().timestamp())
)
return Response(serializer.data, status=status.HTTP_201_CREATED)
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
@ -338,7 +334,9 @@ class IssueViewSet(BaseViewSet):
.order_by()
.annotate(count=Func(F("id"), function="Count"))
.values("count")
).get(workspace__slug=slug, project_id=project_id, pk=pk)
).get(
workspace__slug=slug, project_id=project_id, pk=pk
)
return Response(IssueSerializer(issue).data, status=status.HTTP_200_OK)
except Issue.DoesNotExist:
return Response(
@ -470,11 +468,10 @@ class UserWorkSpaceIssues(BaseAPIView):
{"error": "Group by and sub group by cannot be same"},
status=status.HTTP_400_BAD_REQUEST,
)
if group_by:
return Response(
group_results(issues, group_by, sub_group_by),
status=status.HTTP_200_OK,
group_results(issues, group_by, sub_group_by), status=status.HTTP_200_OK
)
return Response(issues, status=status.HTTP_200_OK)
@ -579,7 +576,7 @@ class IssueCommentViewSet(BaseViewSet):
issue_id=str(self.kwargs.get("issue_id")),
project_id=str(self.kwargs.get("project_id")),
current_instance=None,
epoch=int(timezone.now().timestamp()),
epoch=int(timezone.now().timestamp())
)
def perform_update(self, serializer):
@ -598,7 +595,7 @@ class IssueCommentViewSet(BaseViewSet):
IssueCommentSerializer(current_instance).data,
cls=DjangoJSONEncoder,
),
epoch=int(timezone.now().timestamp()),
epoch=int(timezone.now().timestamp())
)
return super().perform_update(serializer)
@ -620,7 +617,7 @@ class IssueCommentViewSet(BaseViewSet):
IssueCommentSerializer(current_instance).data,
cls=DjangoJSONEncoder,
),
epoch=int(timezone.now().timestamp()),
epoch=int(timezone.now().timestamp())
)
return super().perform_destroy(instance)
@ -716,10 +713,18 @@ class LabelViewSet(BaseViewSet):
ProjectMemberPermission,
]
def perform_create(self, serializer):
serializer.save(
project_id=self.kwargs.get("project_id"),
)
def create(self, request, slug, project_id):
try:
serializer = LabelSerializer(data=request.data)
if serializer.is_valid():
serializer.save(project_id=project_id)
return Response(serializer.data, status=status.HTTP_201_CREATED)
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
except IntegrityError:
return Response({"error": "Label with the same name already exists in the project"}, status=status.HTTP_400_BAD_REQUEST)
except Exception as e:
capture_exception(e)
return Response({"error": "Something went wrong please try again later"}, status=status.HTTP_400_BAD_REQUEST)
def get_queryset(self):
return self.filter_queryset(
@ -904,7 +909,7 @@ class IssueLinkViewSet(BaseViewSet):
issue_id=str(self.kwargs.get("issue_id")),
project_id=str(self.kwargs.get("project_id")),
current_instance=None,
epoch=int(timezone.now().timestamp()),
epoch=int(timezone.now().timestamp())
)
def perform_update(self, serializer):
@ -923,7 +928,7 @@ class IssueLinkViewSet(BaseViewSet):
IssueLinkSerializer(current_instance).data,
cls=DjangoJSONEncoder,
),
epoch=int(timezone.now().timestamp()),
epoch=int(timezone.now().timestamp())
)
return super().perform_update(serializer)
@ -945,7 +950,7 @@ class IssueLinkViewSet(BaseViewSet):
IssueLinkSerializer(current_instance).data,
cls=DjangoJSONEncoder,
),
epoch=int(timezone.now().timestamp()),
epoch=int(timezone.now().timestamp())
)
return super().perform_destroy(instance)
@ -1024,7 +1029,7 @@ class IssueAttachmentEndpoint(BaseAPIView):
serializer.data,
cls=DjangoJSONEncoder,
),
epoch=int(timezone.now().timestamp()),
epoch=int(timezone.now().timestamp())
)
return Response(serializer.data, status=status.HTTP_201_CREATED)
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
@ -1047,7 +1052,7 @@ class IssueAttachmentEndpoint(BaseAPIView):
issue_id=str(self.kwargs.get("issue_id", None)),
project_id=str(self.kwargs.get("project_id", None)),
current_instance=None,
epoch=int(timezone.now().timestamp()),
epoch=int(timezone.now().timestamp())
)
return Response(status=status.HTTP_204_NO_CONTENT)
@ -1250,7 +1255,7 @@ class IssueArchiveViewSet(BaseViewSet):
issue_id=str(issue.id),
project_id=str(project_id),
current_instance=None,
epoch=int(timezone.now().timestamp()),
epoch=int(timezone.now().timestamp())
)
return Response(IssueSerializer(issue).data, status=status.HTTP_200_OK)
@ -1455,7 +1460,7 @@ class IssueReactionViewSet(BaseViewSet):
issue_id=str(self.kwargs.get("issue_id", None)),
project_id=str(self.kwargs.get("project_id", None)),
current_instance=None,
epoch=int(timezone.now().timestamp()),
epoch=int(timezone.now().timestamp())
)
def destroy(self, request, slug, project_id, issue_id, reaction_code):
@ -1479,7 +1484,7 @@ class IssueReactionViewSet(BaseViewSet):
"identifier": str(issue_reaction.id),
}
),
epoch=int(timezone.now().timestamp()),
epoch=int(timezone.now().timestamp())
)
issue_reaction.delete()
return Response(status=status.HTTP_204_NO_CONTENT)
@ -1528,7 +1533,7 @@ class CommentReactionViewSet(BaseViewSet):
issue_id=None,
project_id=str(self.kwargs.get("project_id", None)),
current_instance=None,
epoch=int(timezone.now().timestamp()),
epoch=int(timezone.now().timestamp())
)
def destroy(self, request, slug, project_id, comment_id, reaction_code):
@ -1553,7 +1558,7 @@ class CommentReactionViewSet(BaseViewSet):
"comment_id": str(comment_id),
}
),
epoch=int(timezone.now().timestamp()),
epoch=int(timezone.now().timestamp())
)
comment_reaction.delete()
return Response(status=status.HTTP_204_NO_CONTENT)
@ -1650,7 +1655,7 @@ class IssueCommentPublicViewSet(BaseViewSet):
issue_id=str(issue_id),
project_id=str(project_id),
current_instance=None,
epoch=int(timezone.now().timestamp()),
epoch=int(timezone.now().timestamp())
)
if not ProjectMember.objects.filter(
project_id=project_id,
@ -1700,7 +1705,7 @@ class IssueCommentPublicViewSet(BaseViewSet):
IssueCommentSerializer(comment).data,
cls=DjangoJSONEncoder,
),
epoch=int(timezone.now().timestamp()),
epoch=int(timezone.now().timestamp())
)
return Response(serializer.data, status=status.HTTP_200_OK)
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
@ -1734,7 +1739,7 @@ class IssueCommentPublicViewSet(BaseViewSet):
IssueCommentSerializer(comment).data,
cls=DjangoJSONEncoder,
),
epoch=int(timezone.now().timestamp()),
epoch=int(timezone.now().timestamp())
)
comment.delete()
return Response(status=status.HTTP_204_NO_CONTENT)
@ -1809,7 +1814,7 @@ class IssueReactionPublicViewSet(BaseViewSet):
issue_id=str(self.kwargs.get("issue_id", None)),
project_id=str(self.kwargs.get("project_id", None)),
current_instance=None,
epoch=int(timezone.now().timestamp()),
epoch=int(timezone.now().timestamp())
)
return Response(serializer.data, status=status.HTTP_201_CREATED)
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
@ -1854,7 +1859,7 @@ class IssueReactionPublicViewSet(BaseViewSet):
"identifier": str(issue_reaction.id),
}
),
epoch=int(timezone.now().timestamp()),
epoch=int(timezone.now().timestamp())
)
issue_reaction.delete()
return Response(status=status.HTTP_204_NO_CONTENT)
@ -1928,7 +1933,7 @@ class CommentReactionPublicViewSet(BaseViewSet):
issue_id=None,
project_id=str(self.kwargs.get("project_id", None)),
current_instance=None,
epoch=int(timezone.now().timestamp()),
epoch=int(timezone.now().timestamp())
)
return Response(serializer.data, status=status.HTTP_201_CREATED)
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
@ -1980,7 +1985,7 @@ class CommentReactionPublicViewSet(BaseViewSet):
"comment_id": str(comment_id),
}
),
epoch=int(timezone.now().timestamp()),
epoch=int(timezone.now().timestamp())
)
comment_reaction.delete()
return Response(status=status.HTTP_204_NO_CONTENT)
@ -2044,7 +2049,7 @@ class IssueVotePublicViewSet(BaseViewSet):
issue_id=str(self.kwargs.get("issue_id", None)),
project_id=str(self.kwargs.get("project_id", None)),
current_instance=None,
epoch=int(timezone.now().timestamp()),
epoch=int(timezone.now().timestamp())
)
serializer = IssueVoteSerializer(issue_vote)
return Response(serializer.data, status=status.HTTP_201_CREATED)
@ -2079,7 +2084,7 @@ class IssueVotePublicViewSet(BaseViewSet):
"identifier": str(issue_vote.id),
}
),
epoch=int(timezone.now().timestamp()),
epoch=int(timezone.now().timestamp())
)
issue_vote.delete()
return Response(status=status.HTTP_204_NO_CONTENT)
@ -2113,7 +2118,7 @@ class IssueRelationViewSet(BaseViewSet):
IssueRelationSerializer(current_instance).data,
cls=DjangoJSONEncoder,
),
epoch=int(timezone.now().timestamp()),
epoch=int(timezone.now().timestamp())
)
return super().perform_destroy(instance)
@ -2147,9 +2152,9 @@ class IssueRelationViewSet(BaseViewSet):
issue_id=str(issue_id),
project_id=str(project_id),
current_instance=None,
epoch=int(timezone.now().timestamp()),
epoch=int(timezone.now().timestamp())
)
if relation == "blocking":
return Response(
RelatedIssueSerializer(issue_relation, many=True).data,
@ -2402,26 +2407,7 @@ class IssueDraftViewSet(BaseViewSet):
]
serializer_class = IssueFlatSerializer
model = Issue
def perform_update(self, serializer):
requested_data = json.dumps(self.request.data, cls=DjangoJSONEncoder)
current_instance = (
self.get_queryset().filter(pk=self.kwargs.get("pk", None)).first()
)
if current_instance is not None:
issue_activity.delay(
type="issue_draft.activity.updated",
requested_data=requested_data,
actor_id=str(self.request.user.id),
issue_id=str(self.kwargs.get("pk", None)),
project_id=str(self.kwargs.get("project_id", None)),
current_instance=json.dumps(
IssueSerializer(current_instance).data, cls=DjangoJSONEncoder
),
epoch=int(timezone.now().timestamp()),
)
return super().perform_update(serializer)
def perform_destroy(self, instance):
current_instance = (
@ -2439,9 +2425,11 @@ class IssueDraftViewSet(BaseViewSet):
current_instance=json.dumps(
IssueSerializer(current_instance).data, cls=DjangoJSONEncoder
),
epoch=int(timezone.now().timestamp())
)
return super().perform_destroy(instance)
def get_queryset(self):
return (
Issue.objects.annotate(
@ -2467,6 +2455,7 @@ class IssueDraftViewSet(BaseViewSet):
)
)
@method_decorator(gzip_page)
def list(self, request, slug, project_id):
try:
@ -2575,6 +2564,7 @@ class IssueDraftViewSet(BaseViewSet):
status=status.HTTP_400_BAD_REQUEST,
)
def create(self, request, slug, project_id):
try:
project = Project.objects.get(pk=project_id)
@ -2599,7 +2589,7 @@ class IssueDraftViewSet(BaseViewSet):
issue_id=str(serializer.data.get("id", None)),
project_id=str(project_id),
current_instance=None,
epoch=int(timezone.now().timestamp()),
epoch=int(timezone.now().timestamp())
)
return Response(serializer.data, status=status.HTTP_201_CREATED)
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
@ -2609,6 +2599,48 @@ class IssueDraftViewSet(BaseViewSet):
{"error": "Project was not found"}, status=status.HTTP_404_NOT_FOUND
)
def partial_update(self, request, slug, project_id, pk):
try:
issue = Issue.objects.get(
workspace__slug=slug, project_id=project_id, pk=pk
)
serializer = IssueSerializer(
issue, data=request.data, partial=True
)
if serializer.is_valid():
if(request.data.get("is_draft") is not None and not request.data.get("is_draft")):
serializer.save(created_at=timezone.now(), updated_at=timezone.now())
else:
serializer.save()
issue_activity.delay(
type="issue_draft.activity.updated",
requested_data=json.dumps(request.data, cls=DjangoJSONEncoder),
actor_id=str(self.request.user.id),
issue_id=str(self.kwargs.get("pk", None)),
project_id=str(self.kwargs.get("project_id", None)),
current_instance=json.dumps(
IssueSerializer(issue).data,
cls=DjangoJSONEncoder,
),
epoch=int(timezone.now().timestamp())
)
return Response(serializer.data, status=status.HTTP_200_OK)
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
except Issue.DoesNotExist:
return Response(
{"error": "Issue does not exists"},
status=status.HTTP_400_BAD_REQUEST,
)
except Exception as e:
capture_exception(e)
return Response(
{"error": "Something went wrong please try again later"},
status=status.HTTP_400_BAD_REQUEST,
)
def retrieve(self, request, slug, project_id, pk=None):
try:
issue = Issue.objects.get(
@ -2841,3 +2873,5 @@ class BulkIssueOperationsEndpoint(BaseAPIView):
{"error": "Something went wrong please try again later"},
status=status.HTTP_400_BAD_REQUEST,
)

View File

@ -40,6 +40,7 @@ from plane.utils.grouper import group_results
from plane.utils.issue_filters import issue_filters
from plane.utils.analytics_plot import burndown_plot
class ModuleViewSet(BaseViewSet):
model = Module
permission_classes = [
@ -78,173 +79,209 @@ class ModuleViewSet(BaseViewSet):
queryset=ModuleLink.objects.select_related("module", "created_by"),
)
)
.annotate(total_issues=Count("issue_module"))
.annotate(
total_issues=Count(
"issue_module",
filter=Q(
issue_module__issue__archived_at__isnull=True,
issue_module__issue__is_draft=False,
),
),
)
.annotate(
completed_issues=Count(
"issue_module__issue__state__group",
filter=Q(issue_module__issue__state__group="completed"),
filter=Q(
issue_module__issue__state__group="completed",
issue_module__issue__archived_at__isnull=True,
issue_module__issue__is_draft=False,
),
)
)
.annotate(
cancelled_issues=Count(
"issue_module__issue__state__group",
filter=Q(issue_module__issue__state__group="cancelled"),
filter=Q(
issue_module__issue__state__group="cancelled",
issue_module__issue__archived_at__isnull=True,
issue_module__issue__is_draft=False,
),
)
)
.annotate(
started_issues=Count(
"issue_module__issue__state__group",
filter=Q(issue_module__issue__state__group="started"),
filter=Q(
issue_module__issue__state__group="started",
issue_module__issue__archived_at__isnull=True,
issue_module__issue__is_draft=False,
),
)
)
.annotate(
unstarted_issues=Count(
"issue_module__issue__state__group",
filter=Q(issue_module__issue__state__group="unstarted"),
filter=Q(
issue_module__issue__state__group="unstarted",
issue_module__issue__archived_at__isnull=True,
issue_module__issue__is_draft=False,
),
)
)
.annotate(
backlog_issues=Count(
"issue_module__issue__state__group",
filter=Q(issue_module__issue__state__group="backlog"),
filter=Q(
issue_module__issue__state__group="backlog",
issue_module__issue__archived_at__isnull=True,
issue_module__issue__is_draft=False,
),
)
)
.order_by(order_by, "name")
)
def perform_destroy(self, instance):
module_issues = list(
ModuleIssue.objects.filter(module_id=self.kwargs.get("pk")).values_list(
"issue", flat=True
)
def create(self, request, slug, project_id):
project = Project.objects.get(workspace__slug=slug, pk=project_id)
serializer = ModuleWriteSerializer(
data=request.data, context={"project": project}
)
if serializer.is_valid():
serializer.save()
module = Module.objects.get(pk=serializer.data["id"])
serializer = ModuleSerializer(module)
return Response(serializer.data, status=status.HTTP_201_CREATED)
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
def retrieve(self, request, slug, project_id, pk):
queryset = self.get_queryset().get(pk=pk)
assignee_distribution = (
Issue.objects.filter(
issue_module__module_id=pk,
workspace__slug=slug,
project_id=project_id,
)
.annotate(first_name=F("assignees__first_name"))
.annotate(last_name=F("assignees__last_name"))
.annotate(assignee_id=F("assignees__id"))
.annotate(display_name=F("assignees__display_name"))
.annotate(avatar=F("assignees__avatar"))
.values("first_name", "last_name", "assignee_id", "avatar", "display_name")
.annotate(
total_issues=Count(
"assignee_id",
filter=Q(
archived_at__isnull=True,
is_draft=False,
),
)
)
.annotate(
completed_issues=Count(
"assignee_id",
filter=Q(
completed_at__isnull=False,
archived_at__isnull=True,
is_draft=False,
),
)
)
.annotate(
pending_issues=Count(
"assignee_id",
filter=Q(
completed_at__isnull=True,
archived_at__isnull=True,
is_draft=False,
),
)
)
.order_by("first_name", "last_name")
)
label_distribution = (
Issue.objects.filter(
issue_module__module_id=pk,
workspace__slug=slug,
project_id=project_id,
)
.annotate(label_name=F("labels__name"))
.annotate(color=F("labels__color"))
.annotate(label_id=F("labels__id"))
.values("label_name", "color", "label_id")
.annotate(
total_issues=Count(
"label_id",
filter=Q(
archived_at__isnull=True,
is_draft=False,
),
),
)
.annotate(
completed_issues=Count(
"label_id",
filter=Q(
completed_at__isnull=False,
archived_at__isnull=True,
is_draft=False,
),
)
)
.annotate(
pending_issues=Count(
"label_id",
filter=Q(
completed_at__isnull=True,
archived_at__isnull=True,
is_draft=False,
),
)
)
.order_by("label_name")
)
data = ModuleSerializer(queryset).data
data["distribution"] = {
"assignees": assignee_distribution,
"labels": label_distribution,
"completion_chart": {},
}
if queryset.start_date and queryset.target_date:
data["distribution"]["completion_chart"] = burndown_plot(
queryset=queryset, slug=slug, project_id=project_id, module_id=pk
)
return Response(
data,
status=status.HTTP_200_OK,
)
def destroy(self, request, slug, project_id, pk):
module = Module.objects.get(workspace__slug=slug, project_id=project_id, pk=pk)
module_issues = list(
ModuleIssue.objects.filter(module_id=pk).values_list("issue", flat=True)
)
module.delete()
issue_activity.delay(
type="module.activity.deleted",
requested_data=json.dumps(
{
"module_id": str(self.kwargs.get("pk")),
"module_id": str(pk),
"issues": [str(issue_id) for issue_id in module_issues],
}
),
actor_id=str(self.request.user.id),
issue_id=str(self.kwargs.get("pk", None)),
project_id=str(self.kwargs.get("project_id", None)),
actor_id=str(request.user.id),
issue_id=str(pk),
project_id=str(project_id),
current_instance=None,
epoch = int(timezone.now().timestamp())
epoch=int(timezone.now().timestamp()),
)
return super().perform_destroy(instance)
def create(self, request, slug, project_id):
try:
project = Project.objects.get(workspace__slug=slug, pk=project_id)
serializer = ModuleWriteSerializer(
data=request.data, context={"project": project}
)
if serializer.is_valid():
serializer.save()
return Response(serializer.data, status=status.HTTP_201_CREATED)
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
except Project.DoesNotExist:
return Response(
{"error": "Project was not found"}, status=status.HTTP_404_NOT_FOUND
)
except IntegrityError as e:
if "already exists" in str(e):
return Response(
{"name": "The module name is already taken"},
status=status.HTTP_410_GONE,
)
except Exception as e:
capture_exception(e)
return Response(
{"error": "Something went wrong please try again later"},
status=status.HTTP_400_BAD_REQUEST,
)
def retrieve(self, request, slug, project_id, pk):
try:
queryset = self.get_queryset().get(pk=pk)
assignee_distribution = (
Issue.objects.filter(
issue_module__module_id=pk,
workspace__slug=slug,
project_id=project_id,
)
.annotate(first_name=F("assignees__first_name"))
.annotate(last_name=F("assignees__last_name"))
.annotate(assignee_id=F("assignees__id"))
.annotate(display_name=F("assignees__display_name"))
.annotate(avatar=F("assignees__avatar"))
.values("first_name", "last_name", "assignee_id", "avatar", "display_name")
.annotate(total_issues=Count("assignee_id"))
.annotate(
completed_issues=Count(
"assignee_id",
filter=Q(completed_at__isnull=False),
)
)
.annotate(
pending_issues=Count(
"assignee_id",
filter=Q(completed_at__isnull=True),
)
)
.order_by("first_name", "last_name")
)
label_distribution = (
Issue.objects.filter(
issue_module__module_id=pk,
workspace__slug=slug,
project_id=project_id,
)
.annotate(label_name=F("labels__name"))
.annotate(color=F("labels__color"))
.annotate(label_id=F("labels__id"))
.values("label_name", "color", "label_id")
.annotate(total_issues=Count("label_id"))
.annotate(
completed_issues=Count(
"label_id",
filter=Q(completed_at__isnull=False),
)
)
.annotate(
pending_issues=Count(
"label_id",
filter=Q(completed_at__isnull=True),
)
)
.order_by("label_name")
)
data = ModuleSerializer(queryset).data
data["distribution"] = {
"assignees": assignee_distribution,
"labels": label_distribution,
"completion_chart": {},
}
if queryset.start_date and queryset.target_date:
data["distribution"]["completion_chart"] = burndown_plot(
queryset=queryset, slug=slug, project_id=project_id, module_id=pk
)
return Response(
data,
status=status.HTTP_200_OK,
)
except Exception as e:
capture_exception(e)
return Response(
{"error": "Something went wrong please try again later"},
status=status.HTTP_400_BAD_REQUEST,
)
return Response(status=status.HTTP_204_NO_CONTENT)
class ModuleIssueViewSet(BaseViewSet):
@ -266,23 +303,6 @@ class ModuleIssueViewSet(BaseViewSet):
module_id=self.kwargs.get("module_id"),
)
def perform_destroy(self, instance):
issue_activity.delay(
type="module.activity.deleted",
requested_data=json.dumps(
{
"module_id": str(self.kwargs.get("module_id")),
"issues": [str(instance.issue_id)],
}
),
actor_id=str(self.request.user.id),
issue_id=str(self.kwargs.get("pk", None)),
project_id=str(self.kwargs.get("project_id", None)),
current_instance=None,
epoch = int(timezone.now().timestamp())
)
return super().perform_destroy(instance)
def get_queryset(self):
return self.filter_queryset(
super()
@ -308,162 +328,162 @@ class ModuleIssueViewSet(BaseViewSet):
@method_decorator(gzip_page)
def list(self, request, slug, project_id, module_id):
try:
order_by = request.GET.get("order_by", "created_at")
group_by = request.GET.get("group_by", False)
sub_group_by = request.GET.get("sub_group_by", False)
filters = issue_filters(request.query_params, "GET")
issues = (
Issue.issue_objects.filter(issue_module__module_id=module_id)
.annotate(
sub_issues_count=Issue.issue_objects.filter(parent=OuterRef("id"))
.order_by()
.annotate(count=Func(F("id"), function="Count"))
.values("count")
)
.annotate(bridge_id=F("issue_module__id"))
.filter(project_id=project_id)
.filter(workspace__slug=slug)
.select_related("project")
.select_related("workspace")
.select_related("state")
.select_related("parent")
.prefetch_related("assignees")
.prefetch_related("labels")
.order_by(order_by)
.filter(**filters)
.annotate(
link_count=IssueLink.objects.filter(issue=OuterRef("id"))
.order_by()
.annotate(count=Func(F("id"), function="Count"))
.values("count")
)
.annotate(
attachment_count=IssueAttachment.objects.filter(
issue=OuterRef("id")
)
.order_by()
.annotate(count=Func(F("id"), function="Count"))
.values("count")
)
order_by = request.GET.get("order_by", "created_at")
group_by = request.GET.get("group_by", False)
sub_group_by = request.GET.get("sub_group_by", False)
filters = issue_filters(request.query_params, "GET")
issues = (
Issue.issue_objects.filter(issue_module__module_id=module_id)
.annotate(
sub_issues_count=Issue.issue_objects.filter(parent=OuterRef("id"))
.order_by()
.annotate(count=Func(F("id"), function="Count"))
.values("count")
)
issues_data = IssueStateSerializer(issues, many=True).data
if sub_group_by and sub_group_by == group_by:
return Response(
{"error": "Group by and sub group by cannot be same"},
status=status.HTTP_400_BAD_REQUEST,
)
if group_by:
return Response(
group_results(issues_data, group_by, sub_group_by),
status=status.HTTP_200_OK,
)
return Response(
issues_data,
status=status.HTTP_200_OK,
.annotate(bridge_id=F("issue_module__id"))
.filter(project_id=project_id)
.filter(workspace__slug=slug)
.select_related("project")
.select_related("workspace")
.select_related("state")
.select_related("parent")
.prefetch_related("assignees")
.prefetch_related("labels")
.order_by(order_by)
.filter(**filters)
.annotate(
link_count=IssueLink.objects.filter(issue=OuterRef("id"))
.order_by()
.annotate(count=Func(F("id"), function="Count"))
.values("count")
)
except Exception as e:
capture_exception(e)
.annotate(
attachment_count=IssueAttachment.objects.filter(issue=OuterRef("id"))
.order_by()
.annotate(count=Func(F("id"), function="Count"))
.values("count")
)
)
issues_data = IssueStateSerializer(issues, many=True).data
if sub_group_by and sub_group_by == group_by:
return Response(
{"error": "Something went wrong please try again later"},
{"error": "Group by and sub group by cannot be same"},
status=status.HTTP_400_BAD_REQUEST,
)
if group_by:
grouped_results = group_results(issues_data, group_by, sub_group_by)
return Response(
grouped_results,
status=status.HTTP_200_OK,
)
return Response(
issues_data, status=status.HTTP_200_OK
)
def create(self, request, slug, project_id, module_id):
try:
issues = request.data.get("issues", [])
if not len(issues):
return Response(
{"error": "Issues are required"}, status=status.HTTP_400_BAD_REQUEST
)
module = Module.objects.get(
workspace__slug=slug, project_id=project_id, pk=module_id
issues = request.data.get("issues", [])
if not len(issues):
return Response(
{"error": "Issues are required"}, status=status.HTTP_400_BAD_REQUEST
)
module = Module.objects.get(
workspace__slug=slug, project_id=project_id, pk=module_id
)
module_issues = list(ModuleIssue.objects.filter(issue_id__in=issues))
module_issues = list(ModuleIssue.objects.filter(issue_id__in=issues))
update_module_issue_activity = []
records_to_update = []
record_to_create = []
update_module_issue_activity = []
records_to_update = []
record_to_create = []
for issue in issues:
module_issue = [
module_issue
for module_issue in module_issues
if str(module_issue.issue_id) in issues
]
for issue in issues:
module_issue = [
module_issue
for module_issue in module_issues
if str(module_issue.issue_id) in issues
]
if len(module_issue):
if module_issue[0].module_id != module_id:
update_module_issue_activity.append(
{
"old_module_id": str(module_issue[0].module_id),
"new_module_id": str(module_id),
"issue_id": str(module_issue[0].issue_id),
}
)
module_issue[0].module_id = module_id
records_to_update.append(module_issue[0])
else:
record_to_create.append(
ModuleIssue(
module=module,
issue_id=issue,
project_id=project_id,
workspace=module.workspace,
created_by=request.user,
updated_by=request.user,
)
if len(module_issue):
if module_issue[0].module_id != module_id:
update_module_issue_activity.append(
{
"old_module_id": str(module_issue[0].module_id),
"new_module_id": str(module_id),
"issue_id": str(module_issue[0].issue_id),
}
)
module_issue[0].module_id = module_id
records_to_update.append(module_issue[0])
else:
record_to_create.append(
ModuleIssue(
module=module,
issue_id=issue,
project_id=project_id,
workspace=module.workspace,
created_by=request.user,
updated_by=request.user,
)
)
ModuleIssue.objects.bulk_create(
record_to_create,
batch_size=10,
ignore_conflicts=True,
)
ModuleIssue.objects.bulk_create(
record_to_create,
batch_size=10,
ignore_conflicts=True,
)
ModuleIssue.objects.bulk_update(
records_to_update,
["module"],
batch_size=10,
)
ModuleIssue.objects.bulk_update(
records_to_update,
["module"],
batch_size=10,
)
# Capture Issue Activity
issue_activity.delay(
type="module.activity.created",
requested_data=json.dumps({"modules_list": issues}),
actor_id=str(self.request.user.id),
issue_id=str(self.kwargs.get("pk", None)),
project_id=str(self.kwargs.get("project_id", None)),
current_instance=json.dumps(
{
"updated_module_issues": update_module_issue_activity,
"created_module_issues": serializers.serialize(
"json", record_to_create
),
}
),
epoch = int(timezone.now().timestamp())
)
# Capture Issue Activity
issue_activity.delay(
type="module.activity.created",
requested_data=json.dumps({"modules_list": issues}),
actor_id=str(self.request.user.id),
issue_id=str(self.kwargs.get("pk", None)),
project_id=str(self.kwargs.get("project_id", None)),
current_instance=json.dumps(
{
"updated_module_issues": update_module_issue_activity,
"created_module_issues": serializers.serialize(
"json", record_to_create
),
}
),
epoch=int(timezone.now().timestamp()),
)
return Response(
ModuleIssueSerializer(self.get_queryset(), many=True).data,
status=status.HTTP_200_OK,
)
except Module.DoesNotExist:
return Response(
{"error": "Module Does not exists"}, status=status.HTTP_400_BAD_REQUEST
)
except Exception as e:
capture_exception(e)
return Response(
{"error": "Something went wrong please try again later"},
status=status.HTTP_400_BAD_REQUEST,
)
return Response(
ModuleIssueSerializer(self.get_queryset(), many=True).data,
status=status.HTTP_200_OK,
)
def destroy(self, request, slug, project_id, module_id, pk):
module_issue = ModuleIssue.objects.get(
workspace__slug=slug, project_id=project_id, module_id=module_id, pk=pk
)
module_issue.delete()
issue_activity.delay(
type="module.activity.deleted",
requested_data=json.dumps(
{
"module_id": str(module_id),
"issues": [str(module_issue.issue_id)],
}
),
actor_id=str(request.user.id),
issue_id=str(pk),
project_id=str(project_id),
current_instance=None,
epoch=int(timezone.now().timestamp()),
)
return Response(status=status.HTTP_204_NO_CONTENT)
class ModuleLinkViewSet(BaseViewSet):
@ -494,7 +514,6 @@ class ModuleLinkViewSet(BaseViewSet):
class ModuleFavoriteViewSet(BaseViewSet):
serializer_class = ModuleFavoriteSerializer
model = ModuleFavorite
@ -508,49 +527,18 @@ class ModuleFavoriteViewSet(BaseViewSet):
)
def create(self, request, slug, project_id):
try:
serializer = ModuleFavoriteSerializer(data=request.data)
if serializer.is_valid():
serializer.save(user=request.user, project_id=project_id)
return Response(serializer.data, status=status.HTTP_201_CREATED)
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
except IntegrityError as e:
if "already exists" in str(e):
return Response(
{"error": "The module is already added to favorites"},
status=status.HTTP_410_GONE,
)
else:
capture_exception(e)
return Response(
{"error": "Something went wrong please try again later"},
status=status.HTTP_400_BAD_REQUEST,
)
except Exception as e:
capture_exception(e)
return Response(
{"error": "Something went wrong please try again later"},
status=status.HTTP_400_BAD_REQUEST,
)
serializer = ModuleFavoriteSerializer(data=request.data)
if serializer.is_valid():
serializer.save(user=request.user, project_id=project_id)
return Response(serializer.data, status=status.HTTP_201_CREATED)
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
def destroy(self, request, slug, project_id, module_id):
try:
module_favorite = ModuleFavorite.objects.get(
project=project_id,
user=request.user,
workspace__slug=slug,
module_id=module_id,
)
module_favorite.delete()
return Response(status=status.HTTP_204_NO_CONTENT)
except ModuleFavorite.DoesNotExist:
return Response(
{"error": "Module is not in favorites"},
status=status.HTTP_400_BAD_REQUEST,
)
except Exception as e:
capture_exception(e)
return Response(
{"error": "Something went wrong please try again later"},
status=status.HTTP_400_BAD_REQUEST,
)
module_favorite = ModuleFavorite.objects.get(
project=project_id,
user=request.user,
workspace__slug=slug,
module_id=module_id,
)
module_favorite.delete()
return Response(status=status.HTTP_204_NO_CONTENT)

View File

@ -36,328 +36,239 @@ class NotificationViewSet(BaseViewSet, BasePaginator):
)
def list(self, request, slug):
try:
snoozed = request.GET.get("snoozed", "false")
archived = request.GET.get("archived", "false")
read = request.GET.get("read", "true")
# Get query parameters
snoozed = request.GET.get("snoozed", "false")
archived = request.GET.get("archived", "false")
read = request.GET.get("read", "true")
type = request.GET.get("type", "all")
# Filter type
type = request.GET.get("type", "all")
notifications = (
Notification.objects.filter(
workspace__slug=slug, receiver_id=request.user.id
)
.select_related("workspace", "project", "triggered_by", "receiver")
.order_by("snoozed_till", "-created_at")
notifications = (
Notification.objects.filter(
workspace__slug=slug, receiver_id=request.user.id
)
.select_related("workspace", "project", "triggered_by", "receiver")
.order_by("snoozed_till", "-created_at")
)
# Filter for snoozed notifications
if snoozed == "false":
notifications = notifications.filter(
Q(snoozed_till__gte=timezone.now()) | Q(snoozed_till__isnull=True),
)
# Filters based on query parameters
snoozed_filters = {
"true": Q(snoozed_till__lt=timezone.now()) | Q(snoozed_till__isnull=False),
"false": Q(snoozed_till__gte=timezone.now()) | Q(snoozed_till__isnull=True),
}
if snoozed == "true":
notifications = notifications.filter(
Q(snoozed_till__lt=timezone.now()) | Q(snoozed_till__isnull=False)
)
notifications = notifications.filter(snoozed_filters[snoozed])
if read == "false":
notifications = notifications.filter(read_at__isnull=True)
archived_filters = {
"true": Q(archived_at__isnull=False),
"false": Q(archived_at__isnull=True),
}
# Filter for archived or unarchive
if archived == "false":
notifications = notifications.filter(archived_at__isnull=True)
notifications = notifications.filter(archived_filters[archived])
if archived == "true":
notifications = notifications.filter(archived_at__isnull=False)
if read == "false":
notifications = notifications.filter(read_at__isnull=True)
# Subscribed issues
if type == "watching":
issue_ids = IssueSubscriber.objects.filter(
workspace__slug=slug, subscriber_id=request.user.id
).values_list("issue_id", flat=True)
# Subscribed issues
if type == "watching":
issue_ids = IssueSubscriber.objects.filter(
workspace__slug=slug, subscriber_id=request.user.id
).values_list("issue_id", flat=True)
notifications = notifications.filter(entity_identifier__in=issue_ids)
# Assigned Issues
if type == "assigned":
issue_ids = IssueAssignee.objects.filter(
workspace__slug=slug, assignee_id=request.user.id
).values_list("issue_id", flat=True)
notifications = notifications.filter(entity_identifier__in=issue_ids)
# Created issues
if type == "created":
if WorkspaceMember.objects.filter(
workspace__slug=slug, member=request.user, role__lt=15
).exists():
notifications = Notification.objects.none()
else:
issue_ids = Issue.objects.filter(
workspace__slug=slug, created_by=request.user
).values_list("pk", flat=True)
notifications = notifications.filter(entity_identifier__in=issue_ids)
# Assigned Issues
if type == "assigned":
issue_ids = IssueAssignee.objects.filter(
workspace__slug=slug, assignee_id=request.user.id
).values_list("issue_id", flat=True)
notifications = notifications.filter(entity_identifier__in=issue_ids)
# Created issues
if type == "created":
if WorkspaceMember.objects.filter(
workspace__slug=slug, member=request.user, role__lt=15
).exists():
notifications = Notification.objects.none()
else:
issue_ids = Issue.objects.filter(
workspace__slug=slug, created_by=request.user
).values_list("pk", flat=True)
notifications = notifications.filter(
entity_identifier__in=issue_ids
)
# Pagination
if request.GET.get("per_page", False) and request.GET.get("cursor", False):
return self.paginate(
request=request,
queryset=(notifications),
on_results=lambda notifications: NotificationSerializer(
notifications, many=True
).data,
)
serializer = NotificationSerializer(notifications, many=True)
return Response(serializer.data, status=status.HTTP_200_OK)
except Exception as e:
capture_exception(e)
return Response(
{"error": "Something went wrong please try again later"},
status=status.HTTP_400_BAD_REQUEST,
# Pagination
if request.GET.get("per_page", False) and request.GET.get("cursor", False):
return self.paginate(
request=request,
queryset=(notifications),
on_results=lambda notifications: NotificationSerializer(
notifications, many=True
).data,
)
serializer = NotificationSerializer(notifications, many=True)
return Response(serializer.data, status=status.HTTP_200_OK)
def partial_update(self, request, slug, pk):
try:
notification = Notification.objects.get(
workspace__slug=slug, pk=pk, receiver=request.user
)
# Only read_at and snoozed_till can be updated
notification_data = {
"snoozed_till": request.data.get("snoozed_till", None),
}
serializer = NotificationSerializer(
notification, data=notification_data, partial=True
)
notification = Notification.objects.get(
workspace__slug=slug, pk=pk, receiver=request.user
)
# Only read_at and snoozed_till can be updated
notification_data = {
"snoozed_till": request.data.get("snoozed_till", None),
}
serializer = NotificationSerializer(
notification, data=notification_data, partial=True
)
if serializer.is_valid():
serializer.save()
return Response(serializer.data, status=status.HTTP_200_OK)
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
except Notification.DoesNotExist:
return Response(
{"error": "Notification does not exists"},
status=status.HTTP_400_BAD_REQUEST,
)
except Exception as e:
capture_exception(e)
return Response(
{"error": "Something went wrong please try again later"},
status=status.HTTP_400_BAD_REQUEST,
)
if serializer.is_valid():
serializer.save()
return Response(serializer.data, status=status.HTTP_200_OK)
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
def mark_read(self, request, slug, pk):
try:
notification = Notification.objects.get(
receiver=request.user, workspace__slug=slug, pk=pk
)
notification.read_at = timezone.now()
notification.save()
serializer = NotificationSerializer(notification)
return Response(serializer.data, status=status.HTTP_200_OK)
except Notification.DoesNotExist:
return Response(
{"error": "Notification does not exists"},
status=status.HTTP_400_BAD_REQUEST,
)
except Exception as e:
capture_exception(e)
return Response(
{"error": "Something went wrong please try again later"},
status=status.HTTP_400_BAD_REQUEST,
)
notification = Notification.objects.get(
receiver=request.user, workspace__slug=slug, pk=pk
)
notification.read_at = timezone.now()
notification.save()
serializer = NotificationSerializer(notification)
return Response(serializer.data, status=status.HTTP_200_OK)
def mark_unread(self, request, slug, pk):
try:
notification = Notification.objects.get(
receiver=request.user, workspace__slug=slug, pk=pk
)
notification.read_at = None
notification.save()
serializer = NotificationSerializer(notification)
return Response(serializer.data, status=status.HTTP_200_OK)
except Notification.DoesNotExist:
return Response(
{"error": "Notification does not exists"},
status=status.HTTP_400_BAD_REQUEST,
)
except Exception as e:
capture_exception(e)
return Response(
{"error": "Something went wrong please try again later"},
status=status.HTTP_400_BAD_REQUEST,
)
notification = Notification.objects.get(
receiver=request.user, workspace__slug=slug, pk=pk
)
notification.read_at = None
notification.save()
serializer = NotificationSerializer(notification)
return Response(serializer.data, status=status.HTTP_200_OK)
def archive(self, request, slug, pk):
try:
notification = Notification.objects.get(
receiver=request.user, workspace__slug=slug, pk=pk
)
notification.archived_at = timezone.now()
notification.save()
serializer = NotificationSerializer(notification)
return Response(serializer.data, status=status.HTTP_200_OK)
except Notification.DoesNotExist:
return Response(
{"error": "Notification does not exists"},
status=status.HTTP_400_BAD_REQUEST,
)
except Exception as e:
capture_exception(e)
return Response(
{"error": "Something went wrong please try again later"},
status=status.HTTP_400_BAD_REQUEST,
)
notification = Notification.objects.get(
receiver=request.user, workspace__slug=slug, pk=pk
)
notification.archived_at = timezone.now()
notification.save()
serializer = NotificationSerializer(notification)
return Response(serializer.data, status=status.HTTP_200_OK)
def unarchive(self, request, slug, pk):
try:
notification = Notification.objects.get(
receiver=request.user, workspace__slug=slug, pk=pk
)
notification.archived_at = None
notification.save()
serializer = NotificationSerializer(notification)
return Response(serializer.data, status=status.HTTP_200_OK)
except Notification.DoesNotExist:
return Response(
{"error": "Notification does not exists"},
status=status.HTTP_400_BAD_REQUEST,
)
except Exception as e:
capture_exception(e)
return Response(
{"error": "Something went wrong please try again later"},
status=status.HTTP_400_BAD_REQUEST,
)
notification = Notification.objects.get(
receiver=request.user, workspace__slug=slug, pk=pk
)
notification.archived_at = None
notification.save()
serializer = NotificationSerializer(notification)
return Response(serializer.data, status=status.HTTP_200_OK)
class UnreadNotificationEndpoint(BaseAPIView):
def get(self, request, slug):
try:
# Watching Issues Count
watching_issues_count = Notification.objects.filter(
workspace__slug=slug,
receiver_id=request.user.id,
read_at__isnull=True,
archived_at__isnull=True,
entity_identifier__in=IssueSubscriber.objects.filter(
workspace__slug=slug, subscriber_id=request.user.id
).values_list("issue_id", flat=True),
).count()
# Watching Issues Count
watching_issues_count = Notification.objects.filter(
workspace__slug=slug,
receiver_id=request.user.id,
read_at__isnull=True,
archived_at__isnull=True,
entity_identifier__in=IssueSubscriber.objects.filter(
workspace__slug=slug, subscriber_id=request.user.id
).values_list("issue_id", flat=True),
).count()
# My Issues Count
my_issues_count = Notification.objects.filter(
workspace__slug=slug,
receiver_id=request.user.id,
read_at__isnull=True,
archived_at__isnull=True,
entity_identifier__in=IssueAssignee.objects.filter(
workspace__slug=slug, assignee_id=request.user.id
).values_list("issue_id", flat=True),
).count()
# My Issues Count
my_issues_count = Notification.objects.filter(
workspace__slug=slug,
receiver_id=request.user.id,
read_at__isnull=True,
archived_at__isnull=True,
entity_identifier__in=IssueAssignee.objects.filter(
workspace__slug=slug, assignee_id=request.user.id
).values_list("issue_id", flat=True),
).count()
# Created Issues Count
created_issues_count = Notification.objects.filter(
workspace__slug=slug,
receiver_id=request.user.id,
read_at__isnull=True,
archived_at__isnull=True,
entity_identifier__in=Issue.objects.filter(
workspace__slug=slug, created_by=request.user
).values_list("pk", flat=True),
).count()
# Created Issues Count
created_issues_count = Notification.objects.filter(
workspace__slug=slug,
receiver_id=request.user.id,
read_at__isnull=True,
archived_at__isnull=True,
entity_identifier__in=Issue.objects.filter(
workspace__slug=slug, created_by=request.user
).values_list("pk", flat=True),
).count()
return Response(
{
"watching_issues": watching_issues_count,
"my_issues": my_issues_count,
"created_issues": created_issues_count,
},
status=status.HTTP_200_OK,
)
except Exception as e:
capture_exception(e)
return Response(
{"error": "Something went wrong please try again later"},
status=status.HTTP_400_BAD_REQUEST,
)
return Response(
{
"watching_issues": watching_issues_count,
"my_issues": my_issues_count,
"created_issues": created_issues_count,
},
status=status.HTTP_200_OK,
)
class MarkAllReadNotificationViewSet(BaseViewSet):
def create(self, request, slug):
try:
snoozed = request.data.get("snoozed", False)
archived = request.data.get("archived", False)
type = request.data.get("type", "all")
snoozed = request.data.get("snoozed", False)
archived = request.data.get("archived", False)
type = request.data.get("type", "all")
notifications = (
Notification.objects.filter(
workspace__slug=slug,
receiver_id=request.user.id,
read_at__isnull=True,
)
.select_related("workspace", "project", "triggered_by", "receiver")
.order_by("snoozed_till", "-created_at")
notifications = (
Notification.objects.filter(
workspace__slug=slug,
receiver_id=request.user.id,
read_at__isnull=True,
)
.select_related("workspace", "project", "triggered_by", "receiver")
.order_by("snoozed_till", "-created_at")
)
# Filter for snoozed notifications
if snoozed:
notifications = notifications.filter(
Q(snoozed_till__lt=timezone.now()) | Q(snoozed_till__isnull=False)
)
else:
notifications = notifications.filter(
Q(snoozed_till__gte=timezone.now()) | Q(snoozed_till__isnull=True),
)
# Filter for snoozed notifications
if snoozed:
notifications = notifications.filter(
Q(snoozed_till__lt=timezone.now()) | Q(snoozed_till__isnull=False)
)
else:
notifications = notifications.filter(
Q(snoozed_till__gte=timezone.now()) | Q(snoozed_till__isnull=True),
)
# Filter for archived or unarchive
if archived:
notifications = notifications.filter(archived_at__isnull=False)
else:
notifications = notifications.filter(archived_at__isnull=True)
# Filter for archived or unarchive
if archived:
notifications = notifications.filter(archived_at__isnull=False)
else:
notifications = notifications.filter(archived_at__isnull=True)
# Subscribed issues
if type == "watching":
issue_ids = IssueSubscriber.objects.filter(
workspace__slug=slug, subscriber_id=request.user.id
).values_list("issue_id", flat=True)
notifications = notifications.filter(entity_identifier__in=issue_ids)
# Subscribed issues
if type == "watching":
issue_ids = IssueSubscriber.objects.filter(
workspace__slug=slug, subscriber_id=request.user.id
).values_list("issue_id", flat=True)
# Assigned Issues
if type == "assigned":
issue_ids = IssueAssignee.objects.filter(
workspace__slug=slug, assignee_id=request.user.id
).values_list("issue_id", flat=True)
notifications = notifications.filter(entity_identifier__in=issue_ids)
# Created issues
if type == "created":
if WorkspaceMember.objects.filter(
workspace__slug=slug, member=request.user, role__lt=15
).exists():
notifications = Notification.objects.none()
else:
issue_ids = Issue.objects.filter(
workspace__slug=slug, created_by=request.user
).values_list("pk", flat=True)
notifications = notifications.filter(entity_identifier__in=issue_ids)
# Assigned Issues
if type == "assigned":
issue_ids = IssueAssignee.objects.filter(
workspace__slug=slug, assignee_id=request.user.id
).values_list("issue_id", flat=True)
notifications = notifications.filter(entity_identifier__in=issue_ids)
# Created issues
if type == "created":
if WorkspaceMember.objects.filter(
workspace__slug=slug, member=request.user, role__lt=15
).exists():
notifications = Notification.objects.none()
else:
issue_ids = Issue.objects.filter(
workspace__slug=slug, created_by=request.user
).values_list("pk", flat=True)
notifications = notifications.filter(
entity_identifier__in=issue_ids
)
updated_notifications = []
for notification in notifications:
notification.read_at = timezone.now()
updated_notifications.append(notification)
Notification.objects.bulk_update(
updated_notifications, ["read_at"], batch_size=100
)
return Response({"message": "Successful"}, status=status.HTTP_200_OK)
except Exception as e:
capture_exception(e)
return Response(
{"error": "Something went wrong please try again later"},
status=status.HTTP_400_BAD_REQUEST,
)
updated_notifications = []
for notification in notifications:
notification.read_at = timezone.now()
updated_notifications.append(notification)
Notification.objects.bulk_update(
updated_notifications, ["read_at"], batch_size=100
)
return Response({"message": "Successful"}, status=status.HTTP_200_OK)

View File

@ -11,10 +11,10 @@ from django.conf import settings
from rest_framework.response import Response
from rest_framework import exceptions
from rest_framework.permissions import AllowAny
from rest_framework.views import APIView
from rest_framework_simplejwt.tokens import RefreshToken
from rest_framework import status
from sentry_sdk import capture_exception
# sso authentication
from google.oauth2 import id_token
from google.auth.transport import requests as google_auth_request
@ -112,7 +112,7 @@ def get_user_data(access_token: str) -> dict:
url="https://api.github.com/user/emails", headers=headers
).json()
[
_ = [
user_data.update({"email": item.get("email")})
for item in response
if item.get("primary") is True
@ -146,7 +146,7 @@ class OauthEndpoint(BaseAPIView):
data = get_user_data(access_token)
email = data.get("email", None)
if email == None:
if email is None:
return Response(
{
"error": "Something went wrong. Please try again later or contact the support team."
@ -157,7 +157,6 @@ class OauthEndpoint(BaseAPIView):
if "@" in email:
user = User.objects.get(email=email)
email = data["email"]
channel = "email"
mobile_number = uuid.uuid4().hex
email_verified = True
else:
@ -181,19 +180,16 @@ class OauthEndpoint(BaseAPIView):
user.last_active = timezone.now()
user.last_login_time = timezone.now()
user.last_login_ip = request.META.get("REMOTE_ADDR")
user.last_login_medium = f"oauth"
user.last_login_medium = "oauth"
user.last_login_uagent = request.META.get("HTTP_USER_AGENT")
user.is_email_verified = email_verified
user.save()
serialized_user = UserSerializer(user).data
access_token, refresh_token = get_tokens_for_user(user)
data = {
"access_token": access_token,
"refresh_token": refresh_token,
"user": serialized_user,
}
SocialLoginConnection.objects.update_or_create(
@ -235,7 +231,6 @@ class OauthEndpoint(BaseAPIView):
if "@" in email:
email = data["email"]
mobile_number = uuid.uuid4().hex
channel = "email"
email_verified = True
else:
return Response(
@ -264,14 +259,11 @@ class OauthEndpoint(BaseAPIView):
user.last_login_uagent = request.META.get("HTTP_USER_AGENT")
user.token_updated_at = timezone.now()
user.save()
serialized_user = UserSerializer(user).data
access_token, refresh_token = get_tokens_for_user(user)
data = {
"access_token": access_token,
"refresh_token": refresh_token,
"user": serialized_user,
"permissions": [],
}
if settings.ANALYTICS_BASE_API:
_ = requests.post(
@ -304,11 +296,3 @@ class OauthEndpoint(BaseAPIView):
},
)
return Response(data, status=status.HTTP_201_CREATED)
except Exception as e:
capture_exception(e)
return Response(
{
"error": "Something went wrong. Please try again later or contact the support team."
},
status=status.HTTP_400_BAD_REQUEST,
)

View File

@ -1,8 +1,7 @@
# Python imports
from datetime import timedelta, datetime, date
from datetime import timedelta, date
# Django imports
from django.db import IntegrityError
from django.db.models import Exists, OuterRef, Q, Prefetch
from django.utils import timezone
@ -78,104 +77,82 @@ class PageViewSet(BaseViewSet):
)
def create(self, request, slug, project_id):
try:
serializer = PageSerializer(
data=request.data,
context={"project_id": project_id, "owned_by_id": request.user.id},
)
serializer = PageSerializer(
data=request.data,
context={"project_id": project_id, "owned_by_id": request.user.id},
)
if serializer.is_valid():
serializer.save()
return Response(serializer.data, status=status.HTTP_201_CREATED)
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
except Exception as e:
capture_exception(e)
return Response(
{"error": "Something went wrong please try again later"},
status=status.HTTP_400_BAD_REQUEST,
)
if serializer.is_valid():
serializer.save()
return Response(serializer.data, status=status.HTTP_201_CREATED)
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
def partial_update(self, request, slug, project_id, pk):
try:
page = Page.objects.get(pk=pk, workspace__slug=slug, project_id=project_id)
# Only update access if the page owner is the requesting user
if (
page.access != request.data.get("access", page.access)
and page.owned_by_id != request.user.id
):
return Response(
{
"error": "Access cannot be updated since this page is owned by someone else"
},
status=status.HTTP_400_BAD_REQUEST,
)
serializer = PageSerializer(page, data=request.data, partial=True)
if serializer.is_valid():
serializer.save()
return Response(serializer.data, status=status.HTTP_200_OK)
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
except Page.DoesNotExist:
page = Page.objects.get(pk=pk, workspace__slug=slug, project_id=project_id)
# Only update access if the page owner is the requesting user
if (
page.access != request.data.get("access", page.access)
and page.owned_by_id != request.user.id
):
return Response(
{"error": "Page Does not exist"}, status=status.HTTP_400_BAD_REQUEST
)
except Exception as e:
capture_exception(e)
return Response(
{"error": "Something went wrong please try again later"},
{
"error": "Access cannot be updated since this page is owned by someone else"
},
status=status.HTTP_400_BAD_REQUEST,
)
serializer = PageSerializer(page, data=request.data, partial=True)
if serializer.is_valid():
serializer.save()
return Response(serializer.data, status=status.HTTP_200_OK)
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
def list(self, request, slug, project_id):
try:
queryset = self.get_queryset()
page_view = request.GET.get("page_view", False)
queryset = self.get_queryset()
page_view = request.GET.get("page_view", False)
if not page_view:
return Response({"error": "Page View parameter is required"}, status=status.HTTP_400_BAD_REQUEST)
if not page_view:
return Response({"error": "Page View parameter is required"}, status=status.HTTP_400_BAD_REQUEST)
# All Pages
if page_view == "all":
return Response(PageSerializer(queryset, many=True).data, status=status.HTTP_200_OK)
# All Pages
if page_view == "all":
return Response(PageSerializer(queryset, many=True).data, status=status.HTTP_200_OK)
# Recent pages
if page_view == "recent":
current_time = date.today()
day_before = current_time - timedelta(days=1)
todays_pages = queryset.filter(updated_at__date=date.today())
yesterdays_pages = queryset.filter(updated_at__date=day_before)
earlier_this_week = queryset.filter( updated_at__date__range=(
(timezone.now() - timedelta(days=7)),
(timezone.now() - timedelta(days=2)),
))
return Response(
{
"today": PageSerializer(todays_pages, many=True).data,
"yesterday": PageSerializer(yesterdays_pages, many=True).data,
"earlier_this_week": PageSerializer(earlier_this_week, many=True).data,
},
status=status.HTTP_200_OK,
)
# Recent pages
if page_view == "recent":
current_time = date.today()
day_before = current_time - timedelta(days=1)
todays_pages = queryset.filter(updated_at__date=date.today())
yesterdays_pages = queryset.filter(updated_at__date=day_before)
earlier_this_week = queryset.filter( updated_at__date__range=(
(timezone.now() - timedelta(days=7)),
(timezone.now() - timedelta(days=2)),
))
return Response(
{
"today": PageSerializer(todays_pages, many=True).data,
"yesterday": PageSerializer(yesterdays_pages, many=True).data,
"earlier_this_week": PageSerializer(earlier_this_week, many=True).data,
},
status=status.HTTP_200_OK,
)
# Favorite Pages
if page_view == "favorite":
queryset = queryset.filter(is_favorite=True)
return Response(PageSerializer(queryset, many=True).data, status=status.HTTP_200_OK)
# My pages
if page_view == "created_by_me":
queryset = queryset.filter(owned_by=request.user)
return Response(PageSerializer(queryset, many=True).data, status=status.HTTP_200_OK)
# Favorite Pages
if page_view == "favorite":
queryset = queryset.filter(is_favorite=True)
return Response(PageSerializer(queryset, many=True).data, status=status.HTTP_200_OK)
# My pages
if page_view == "created_by_me":
queryset = queryset.filter(owned_by=request.user)
return Response(PageSerializer(queryset, many=True).data, status=status.HTTP_200_OK)
# Created by other Pages
if page_view == "created_by_other":
queryset = queryset.filter(~Q(owned_by=request.user), access=0)
return Response(PageSerializer(queryset, many=True).data, status=status.HTTP_200_OK)
# Created by other Pages
if page_view == "created_by_other":
queryset = queryset.filter(~Q(owned_by=request.user), access=0)
return Response(PageSerializer(queryset, many=True).data, status=status.HTTP_200_OK)
return Response({"error": "No matching view found"}, status=status.HTTP_400_BAD_REQUEST)
return Response({"error": "No matching view found"}, status=status.HTTP_400_BAD_REQUEST)
except Exception as e:
capture_exception(e)
return Response({"error": "Something went wrong please try again later"}, status=status.HTTP_400_BAD_REQUEST)
class PageBlockViewSet(BaseViewSet):
serializer_class = PageBlockSerializer
@ -225,53 +202,21 @@ class PageFavoriteViewSet(BaseViewSet):
)
def create(self, request, slug, project_id):
try:
serializer = PageFavoriteSerializer(data=request.data)
if serializer.is_valid():
serializer.save(user=request.user, project_id=project_id)
return Response(serializer.data, status=status.HTTP_201_CREATED)
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
except IntegrityError as e:
if "already exists" in str(e):
return Response(
{"error": "The page is already added to favorites"},
status=status.HTTP_410_GONE,
)
else:
capture_exception(e)
return Response(
{"error": "Something went wrong please try again later"},
status=status.HTTP_400_BAD_REQUEST,
)
except Exception as e:
capture_exception(e)
return Response(
{"error": "Something went wrong please try again later"},
status=status.HTTP_400_BAD_REQUEST,
)
serializer = PageFavoriteSerializer(data=request.data)
if serializer.is_valid():
serializer.save(user=request.user, project_id=project_id)
return Response(serializer.data, status=status.HTTP_201_CREATED)
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
def destroy(self, request, slug, project_id, page_id):
try:
page_favorite = PageFavorite.objects.get(
project=project_id,
user=request.user,
workspace__slug=slug,
page_id=page_id,
)
page_favorite.delete()
return Response(status=status.HTTP_204_NO_CONTENT)
except PageFavorite.DoesNotExist:
return Response(
{"error": "Page is not in favorites"},
status=status.HTTP_400_BAD_REQUEST,
)
except Exception as e:
capture_exception(e)
return Response(
{"error": "Something went wrong please try again later"},
status=status.HTTP_400_BAD_REQUEST,
)
page_favorite = PageFavorite.objects.get(
project=project_id,
user=request.user,
workspace__slug=slug,
page_id=page_id,
)
page_favorite.delete()
return Response(status=status.HTTP_204_NO_CONTENT)
class CreateIssueFromPageBlockEndpoint(BaseAPIView):
permission_classes = [
@ -279,43 +224,32 @@ class CreateIssueFromPageBlockEndpoint(BaseAPIView):
]
def post(self, request, slug, project_id, page_id, page_block_id):
try:
page_block = PageBlock.objects.get(
pk=page_block_id,
workspace__slug=slug,
project_id=project_id,
page_id=page_id,
)
issue = Issue.objects.create(
name=page_block.name,
project_id=project_id,
description=page_block.description,
description_html=page_block.description_html,
description_stripped=page_block.description_stripped,
)
_ = IssueAssignee.objects.create(
issue=issue, assignee=request.user, project_id=project_id
)
page_block = PageBlock.objects.get(
pk=page_block_id,
workspace__slug=slug,
project_id=project_id,
page_id=page_id,
)
issue = Issue.objects.create(
name=page_block.name,
project_id=project_id,
description=page_block.description,
description_html=page_block.description_html,
description_stripped=page_block.description_stripped,
)
_ = IssueAssignee.objects.create(
issue=issue, assignee=request.user, project_id=project_id
)
_ = IssueActivity.objects.create(
issue=issue,
actor=request.user,
project_id=project_id,
comment=f"created the issue from {page_block.name} block",
verb="created",
)
_ = IssueActivity.objects.create(
issue=issue,
actor=request.user,
project_id=project_id,
comment=f"created the issue from {page_block.name} block",
verb="created",
)
page_block.issue = issue
page_block.save()
page_block.issue = issue
page_block.save()
return Response(IssueLiteSerializer(issue).data, status=status.HTTP_200_OK)
except PageBlock.DoesNotExist:
return Response(
{"error": "Page Block does not exist"}, status=status.HTTP_404_NOT_FOUND
)
except Exception as e:
capture_exception(e)
return Response(
{"error": "Something went wrong please try again later"},
status=status.HTTP_400_BAD_REQUEST,
)
return Response(IssueLiteSerializer(issue).data, status=status.HTTP_200_OK)

File diff suppressed because it is too large Load Diff

View File

@ -1,21 +0,0 @@
# Third party imports
from rest_framework.response import Response
from rest_framework import status
from sentry_sdk import capture_exception
# Module imports
from .base import BaseAPIView
from plane.utils.integrations.github import get_release_notes
class ReleaseNotesEndpoint(BaseAPIView):
def get(self, request):
try:
release_notes = get_release_notes()
return Response(release_notes, status=status.HTTP_200_OK)
except Exception as e:
capture_exception(e)
return Response(
{"error": "Something went wrong please try again later"},
status=status.HTTP_400_BAD_REQUEST,
)

View File

@ -168,126 +168,107 @@ class GlobalSearchEndpoint(BaseAPIView):
)
def get(self, request, slug):
try:
query = request.query_params.get("search", False)
workspace_search = request.query_params.get("workspace_search", "false")
project_id = request.query_params.get("project_id", False)
query = request.query_params.get("search", False)
workspace_search = request.query_params.get("workspace_search", "false")
project_id = request.query_params.get("project_id", False)
if not query:
return Response(
{
"results": {
"workspace": [],
"project": [],
"issue": [],
"cycle": [],
"module": [],
"issue_view": [],
"page": [],
}
},
status=status.HTTP_200_OK,
)
MODELS_MAPPER = {
"workspace": self.filter_workspaces,
"project": self.filter_projects,
"issue": self.filter_issues,
"cycle": self.filter_cycles,
"module": self.filter_modules,
"issue_view": self.filter_views,
"page": self.filter_pages,
}
results = {}
for model in MODELS_MAPPER.keys():
func = MODELS_MAPPER.get(model, None)
results[model] = func(query, slug, project_id, workspace_search)
return Response({"results": results}, status=status.HTTP_200_OK)
except Exception as e:
capture_exception(e)
if not query:
return Response(
{"error": "Something went wrong please try again later"},
status=status.HTTP_400_BAD_REQUEST,
{
"results": {
"workspace": [],
"project": [],
"issue": [],
"cycle": [],
"module": [],
"issue_view": [],
"page": [],
}
},
status=status.HTTP_200_OK,
)
MODELS_MAPPER = {
"workspace": self.filter_workspaces,
"project": self.filter_projects,
"issue": self.filter_issues,
"cycle": self.filter_cycles,
"module": self.filter_modules,
"issue_view": self.filter_views,
"page": self.filter_pages,
}
results = {}
for model in MODELS_MAPPER.keys():
func = MODELS_MAPPER.get(model, None)
results[model] = func(query, slug, project_id, workspace_search)
return Response({"results": results}, status=status.HTTP_200_OK)
class IssueSearchEndpoint(BaseAPIView):
def get(self, request, slug, project_id):
try:
query = request.query_params.get("search", False)
workspace_search = request.query_params.get("workspace_search", "false")
parent = request.query_params.get("parent", "false")
issue_relation = request.query_params.get("issue_relation", "false")
cycle = request.query_params.get("cycle", "false")
module = request.query_params.get("module", "false")
sub_issue = request.query_params.get("sub_issue", "false")
query = request.query_params.get("search", False)
workspace_search = request.query_params.get("workspace_search", "false")
parent = request.query_params.get("parent", "false")
issue_relation = request.query_params.get("issue_relation", "false")
cycle = request.query_params.get("cycle", "false")
module = request.query_params.get("module", "false")
sub_issue = request.query_params.get("sub_issue", "false")
issue_id = request.query_params.get("issue_id", False)
issue_id = request.query_params.get("issue_id", False)
issues = Issue.issue_objects.filter(
workspace__slug=slug,
project__project_projectmember__member=self.request.user,
)
issues = Issue.issue_objects.filter(
workspace__slug=slug,
project__project_projectmember__member=self.request.user,
)
if workspace_search == "false":
issues = issues.filter(project_id=project_id)
if workspace_search == "false":
issues = issues.filter(project_id=project_id)
if query:
issues = search_issues(query, issues)
if query:
issues = search_issues(query, issues)
if parent == "true" and issue_id:
issue = Issue.issue_objects.get(pk=issue_id)
issues = issues.filter(
~Q(pk=issue_id), ~Q(pk=issue.parent_id), parent__isnull=True
).exclude(
pk__in=Issue.issue_objects.filter(parent__isnull=False).values_list(
"parent_id", flat=True
)
if parent == "true" and issue_id:
issue = Issue.issue_objects.get(pk=issue_id)
issues = issues.filter(
~Q(pk=issue_id), ~Q(pk=issue.parent_id), parent__isnull=True
).exclude(
pk__in=Issue.issue_objects.filter(parent__isnull=False).values_list(
"parent_id", flat=True
)
if issue_relation == "true" and issue_id:
issue = Issue.issue_objects.get(pk=issue_id)
issues = issues.filter(
~Q(pk=issue_id),
~Q(issue_related__issue=issue),
~Q(issue_relation__related_issue=issue),
)
if sub_issue == "true" and issue_id:
issue = Issue.issue_objects.get(pk=issue_id)
issues = issues.filter(~Q(pk=issue_id), parent__isnull=True)
if issue.parent:
issues = issues.filter(~Q(pk=issue.parent_id))
if cycle == "true":
issues = issues.exclude(issue_cycle__isnull=False)
if module == "true":
issues = issues.exclude(issue_module__isnull=False)
return Response(
issues.values(
"name",
"id",
"sequence_id",
"project__name",
"project__identifier",
"project_id",
"workspace__slug",
"state__name",
"state__group",
"state__color",
),
status=status.HTTP_200_OK,
)
except Issue.DoesNotExist:
return Response(
{"error": "Issue Does not exist"}, status=status.HTTP_400_BAD_REQUEST
)
except Exception as e:
print(e)
return Response(
{"error": "Something went wrong please try again later"},
status=status.HTTP_400_BAD_REQUEST,
if issue_relation == "true" and issue_id:
issue = Issue.issue_objects.get(pk=issue_id)
issues = issues.filter(
~Q(pk=issue_id),
~Q(issue_related__issue=issue),
~Q(issue_relation__related_issue=issue),
)
if sub_issue == "true" and issue_id:
issue = Issue.issue_objects.get(pk=issue_id)
issues = issues.filter(~Q(pk=issue_id), parent__isnull=True)
if issue.parent:
issues = issues.filter(~Q(pk=issue.parent_id))
if cycle == "true":
issues = issues.exclude(issue_cycle__isnull=False)
if module == "true":
issues = issues.exclude(issue_module__isnull=False)
return Response(
issues.values(
"name",
"id",
"sequence_id",
"project__name",
"project__identifier",
"project_id",
"workspace__slug",
"state__name",
"state__group",
"state__color",
),
status=status.HTTP_200_OK,
)

View File

@ -2,7 +2,6 @@
from itertools import groupby
# Django imports
from django.db import IntegrityError
from django.db.models import Q
# Third party imports
@ -41,67 +40,45 @@ class StateViewSet(BaseViewSet):
)
def create(self, request, slug, project_id):
try:
serializer = StateSerializer(data=request.data)
if serializer.is_valid():
serializer.save(project_id=project_id)
return Response(serializer.data, status=status.HTTP_200_OK)
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
except IntegrityError:
return Response(
{"error": "State with the name already exists"},
status=status.HTTP_400_BAD_REQUEST,
)
except Exception as e:
capture_exception(e)
return Response(
{"error": "Something went wrong please try again later"},
status=status.HTTP_400_BAD_REQUEST,
)
serializer = StateSerializer(data=request.data)
if serializer.is_valid():
serializer.save(project_id=project_id)
return Response(serializer.data, status=status.HTTP_200_OK)
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
def list(self, request, slug, project_id):
try:
state_dict = dict()
states = StateSerializer(self.get_queryset(), many=True).data
state_dict = dict()
states = StateSerializer(self.get_queryset(), many=True).data
for key, value in groupby(
sorted(states, key=lambda state: state["group"]),
lambda state: state.get("group"),
):
state_dict[str(key)] = list(value)
for key, value in groupby(
sorted(states, key=lambda state: state["group"]),
lambda state: state.get("group"),
):
state_dict[str(key)] = list(value)
return Response(state_dict, status=status.HTTP_200_OK)
except Exception as e:
capture_exception(e)
return Response(state_dict, status=status.HTTP_200_OK)
def destroy(self, request, slug, project_id, pk):
state = State.objects.get(
~Q(name="Triage"),
pk=pk, project_id=project_id, workspace__slug=slug,
)
if state.default:
return Response(
{"error": "Something went wrong please try again later"},
{"error": "Default state cannot be deleted"}, status=False
)
# Check for any issues in the state
issue_exist = Issue.issue_objects.filter(state=pk).exists()
if issue_exist:
return Response(
{
"error": "The state is not empty, only empty states can be deleted"
},
status=status.HTTP_400_BAD_REQUEST,
)
def destroy(self, request, slug, project_id, pk):
try:
state = State.objects.get(
~Q(name="Triage"),
pk=pk, project_id=project_id, workspace__slug=slug,
)
if state.default:
return Response(
{"error": "Default state cannot be deleted"}, status=False
)
# Check for any issues in the state
issue_exist = Issue.issue_objects.filter(state=pk).exists()
if issue_exist:
return Response(
{
"error": "The state is not empty, only empty states can be deleted"
},
status=status.HTTP_400_BAD_REQUEST,
)
state.delete()
return Response(status=status.HTTP_204_NO_CONTENT)
except State.DoesNotExist:
return Response({"error": "State does not exists"}, status=status.HTTP_404)
state.delete()
return Response(status=status.HTTP_204_NO_CONTENT)

View File

@ -8,6 +8,8 @@ from sentry_sdk import capture_exception
from plane.api.serializers import (
UserSerializer,
IssueActivitySerializer,
UserMeSerializer,
UserMeSettingsSerializer,
)
from plane.api.views.base import BaseViewSet, BaseAPIView
@ -17,7 +19,6 @@ from plane.db.models import (
WorkspaceMemberInvite,
Issue,
IssueActivity,
WorkspaceMember,
)
from plane.utils.paginator import BasePaginator
@ -30,129 +31,43 @@ class UserEndpoint(BaseViewSet):
return self.request.user
def retrieve(self, request):
try:
workspace = Workspace.objects.get(
pk=request.user.last_workspace_id, workspace_member__member=request.user
)
workspace_invites = WorkspaceMemberInvite.objects.filter(
email=request.user.email
).count()
assigned_issues = Issue.issue_objects.filter(
assignees__in=[request.user]
).count()
serialized_data = UserMeSerializer(request.user).data
return Response(
serialized_data,
status=status.HTTP_200_OK,
)
serialized_data = UserSerializer(request.user).data
serialized_data["workspace"] = {
"last_workspace_id": request.user.last_workspace_id,
"last_workspace_slug": workspace.slug,
"fallback_workspace_id": request.user.last_workspace_id,
"fallback_workspace_slug": workspace.slug,
"invites": workspace_invites,
}
serialized_data.setdefault("issues", {})[
"assigned_issues"
] = assigned_issues
return Response(
serialized_data,
status=status.HTTP_200_OK,
)
except Workspace.DoesNotExist:
# This exception will be hit even when the `last_workspace_id` is None
workspace_invites = WorkspaceMemberInvite.objects.filter(
email=request.user.email
).count()
assigned_issues = Issue.issue_objects.filter(
assignees__in=[request.user]
).count()
fallback_workspace = (
Workspace.objects.filter(workspace_member__member=request.user)
.order_by("created_at")
.first()
)
serialized_data = UserSerializer(request.user).data
serialized_data["workspace"] = {
"last_workspace_id": None,
"last_workspace_slug": None,
"fallback_workspace_id": fallback_workspace.id
if fallback_workspace is not None
else None,
"fallback_workspace_slug": fallback_workspace.slug
if fallback_workspace is not None
else None,
"invites": workspace_invites,
}
serialized_data.setdefault("issues", {})[
"assigned_issues"
] = assigned_issues
return Response(
serialized_data,
status=status.HTTP_200_OK,
)
except Exception as e:
capture_exception(e)
return Response(
{"error": "Something went wrong please try again later"},
status=status.HTTP_400_BAD_REQUEST,
)
def retrieve_user_settings(self, request):
serialized_data = UserMeSettingsSerializer(request.user).data
return Response(serialized_data, status=status.HTTP_200_OK)
class UpdateUserOnBoardedEndpoint(BaseAPIView):
def patch(self, request):
try:
user = User.objects.get(pk=request.user.id)
user.is_onboarded = request.data.get("is_onboarded", False)
user.save()
return Response(
{"message": "Updated successfully"}, status=status.HTTP_200_OK
)
except Exception as e:
capture_exception(e)
return Response(
{"error": "Something went wrong please try again later"},
status=status.HTTP_400_BAD_REQUEST,
)
user = User.objects.get(pk=request.user.id)
user.is_onboarded = request.data.get("is_onboarded", False)
user.save()
return Response({"message": "Updated successfully"}, status=status.HTTP_200_OK)
class UpdateUserTourCompletedEndpoint(BaseAPIView):
def patch(self, request):
try:
user = User.objects.get(pk=request.user.id)
user.is_tour_completed = request.data.get("is_tour_completed", False)
user.save()
return Response(
{"message": "Updated successfully"}, status=status.HTTP_200_OK
)
except Exception as e:
capture_exception(e)
return Response(
{"error": "Something went wrong please try again later"},
status=status.HTTP_400_BAD_REQUEST,
)
user = User.objects.get(pk=request.user.id)
user.is_tour_completed = request.data.get("is_tour_completed", False)
user.save()
return Response({"message": "Updated successfully"}, status=status.HTTP_200_OK)
class UserActivityEndpoint(BaseAPIView, BasePaginator):
def get(self, request, slug):
try:
queryset = IssueActivity.objects.filter(
actor=request.user, workspace__slug=slug
).select_related("actor", "workspace", "issue", "project")
queryset = IssueActivity.objects.filter(
actor=request.user, workspace__slug=slug
).select_related("actor", "workspace", "issue", "project")
return self.paginate(
request=request,
queryset=queryset,
on_results=lambda issue_activities: IssueActivitySerializer(
issue_activities, many=True
).data,
)
except Exception as e:
capture_exception(e)
return Response(
{"error": "Something went wrong please try again later"},
status=status.HTTP_400_BAD_REQUEST,
)
return self.paginate(
request=request,
queryset=queryset,
on_results=lambda issue_activities: IssueActivitySerializer(
issue_activities, many=True
).data,
)

View File

@ -13,7 +13,6 @@ from django.db.models import (
)
from django.utils.decorators import method_decorator
from django.views.decorators.gzip import gzip_page
from django.db import IntegrityError
from django.db.models import Prefetch, OuterRef, Exists
# Third party imports
@ -61,7 +60,7 @@ class GlobalViewViewSet(BaseViewSet):
.get_queryset()
.filter(workspace__slug=self.kwargs.get("slug"))
.select_related("workspace")
.order_by("-created_at")
.order_by(self.request.GET.get("order_by", "-created_at"))
.distinct()
)
@ -94,123 +93,111 @@ class GlobalViewIssuesViewSet(BaseViewSet):
)
)
@method_decorator(gzip_page)
def list(self, request, slug):
try:
filters = issue_filters(request.query_params, "GET")
filters = issue_filters(request.query_params, "GET")
# Custom ordering for priority and state
priority_order = ["urgent", "high", "medium", "low", "none"]
state_order = ["backlog", "unstarted", "started", "completed", "cancelled"]
# Custom ordering for priority and state
priority_order = ["urgent", "high", "medium", "low", "none"]
state_order = ["backlog", "unstarted", "started", "completed", "cancelled"]
order_by_param = request.GET.get("order_by", "-created_at")
order_by_param = request.GET.get("order_by", "-created_at")
issue_queryset = (
self.get_queryset()
.filter(**filters)
.filter(project__project_projectmember__member=self.request.user)
.annotate(cycle_id=F("issue_cycle__cycle_id"))
.annotate(module_id=F("issue_module__module_id"))
.annotate(
link_count=IssueLink.objects.filter(issue=OuterRef("id"))
.order_by()
.annotate(count=Func(F("id"), function="Count"))
.values("count")
)
.annotate(
attachment_count=IssueAttachment.objects.filter(
issue=OuterRef("id")
)
.order_by()
.annotate(count=Func(F("id"), function="Count"))
.values("count")
)
issue_queryset = (
self.get_queryset()
.filter(**filters)
.filter(project__project_projectmember__member=self.request.user)
.annotate(cycle_id=F("issue_cycle__cycle_id"))
.annotate(module_id=F("issue_module__module_id"))
.annotate(
link_count=IssueLink.objects.filter(issue=OuterRef("id"))
.order_by()
.annotate(count=Func(F("id"), function="Count"))
.values("count")
)
.annotate(
attachment_count=IssueAttachment.objects.filter(issue=OuterRef("id"))
.order_by()
.annotate(count=Func(F("id"), function="Count"))
.values("count")
)
)
# Priority Ordering
if order_by_param == "priority" or order_by_param == "-priority":
priority_order = (
priority_order
if order_by_param == "priority"
else priority_order[::-1]
# Priority Ordering
if order_by_param == "priority" or order_by_param == "-priority":
priority_order = (
priority_order if order_by_param == "priority" else priority_order[::-1]
)
issue_queryset = issue_queryset.annotate(
priority_order=Case(
*[
When(priority=p, then=Value(i))
for i, p in enumerate(priority_order)
],
output_field=CharField(),
)
issue_queryset = issue_queryset.annotate(
priority_order=Case(
*[
When(priority=p, then=Value(i))
for i, p in enumerate(priority_order)
],
output_field=CharField(),
)
).order_by("priority_order")
).order_by("priority_order")
# State Ordering
elif order_by_param in [
"state__name",
"state__group",
"-state__name",
"-state__group",
]:
state_order = (
state_order
if order_by_param in ["state__name", "state__group"]
else state_order[::-1]
# State Ordering
elif order_by_param in [
"state__name",
"state__group",
"-state__name",
"-state__group",
]:
state_order = (
state_order
if order_by_param in ["state__name", "state__group"]
else state_order[::-1]
)
issue_queryset = issue_queryset.annotate(
state_order=Case(
*[
When(state__group=state_group, then=Value(i))
for i, state_group in enumerate(state_order)
],
default=Value(len(state_order)),
output_field=CharField(),
)
issue_queryset = issue_queryset.annotate(
state_order=Case(
*[
When(state__group=state_group, then=Value(i))
for i, state_group in enumerate(state_order)
],
default=Value(len(state_order)),
output_field=CharField(),
)
).order_by("state_order")
# assignee and label ordering
elif order_by_param in [
"labels__name",
"-labels__name",
"assignees__first_name",
"-assignees__first_name",
]:
issue_queryset = issue_queryset.annotate(
max_values=Max(
order_by_param[1::]
if order_by_param.startswith("-")
else order_by_param
)
).order_by(
"-max_values" if order_by_param.startswith("-") else "max_values"
).order_by("state_order")
# assignee and label ordering
elif order_by_param in [
"labels__name",
"-labels__name",
"assignees__first_name",
"-assignees__first_name",
]:
issue_queryset = issue_queryset.annotate(
max_values=Max(
order_by_param[1::]
if order_by_param.startswith("-")
else order_by_param
)
else:
issue_queryset = issue_queryset.order_by(order_by_param)
).order_by(
"-max_values" if order_by_param.startswith("-") else "max_values"
)
else:
issue_queryset = issue_queryset.order_by(order_by_param)
issues = IssueLiteSerializer(issue_queryset, many=True).data
issues = IssueLiteSerializer(issue_queryset, many=True).data
## Grouping the results
group_by = request.GET.get("group_by", False)
sub_group_by = request.GET.get("sub_group_by", False)
if sub_group_by and sub_group_by == group_by:
return Response(
{"error": "Group by and sub group by cannot be same"},
status=status.HTTP_400_BAD_REQUEST,
)
if group_by:
return Response(
group_results(issues, group_by, sub_group_by), status=status.HTTP_200_OK
)
return Response(issues, status=status.HTTP_200_OK)
except Exception as e:
capture_exception(e)
## Grouping the results
group_by = request.GET.get("group_by", False)
sub_group_by = request.GET.get("sub_group_by", False)
if sub_group_by and sub_group_by == group_by:
return Response(
{"error": "Something went wrong please try again later"},
{"error": "Group by and sub group by cannot be same"},
status=status.HTTP_400_BAD_REQUEST,
)
if group_by:
grouped_results = group_results(issues, group_by, sub_group_by)
return Response(
grouped_results,
status=status.HTTP_200_OK,
)
return Response(issues, status=status.HTTP_200_OK)
class IssueViewViewSet(BaseViewSet):
serializer_class = IssueViewSerializer
@ -243,51 +230,6 @@ class IssueViewViewSet(BaseViewSet):
)
class ViewIssuesEndpoint(BaseAPIView):
permission_classes = [
ProjectEntityPermission,
]
def get(self, request, slug, project_id, view_id):
try:
view = IssueView.objects.get(pk=view_id)
queries = view.query
filters = issue_filters(request.query_params, "GET")
issues = (
Issue.issue_objects.filter(
**queries, project_id=project_id, workspace__slug=slug
)
.filter(**filters)
.select_related("project")
.select_related("workspace")
.select_related("state")
.select_related("parent")
.prefetch_related("assignees")
.prefetch_related("labels")
.prefetch_related(
Prefetch(
"issue_reactions",
queryset=IssueReaction.objects.select_related("actor"),
)
)
)
serializer = IssueLiteSerializer(issues, many=True)
return Response(serializer.data, status=status.HTTP_200_OK)
except IssueView.DoesNotExist:
return Response(
{"error": "Issue View does not exist"}, status=status.HTTP_404_NOT_FOUND
)
except Exception as e:
capture_exception(e)
return Response(
{"error": "Something went wrong please try again later"},
status=status.HTTP_400_BAD_REQUEST,
)
class IssueViewFavoriteViewSet(BaseViewSet):
serializer_class = IssueViewFavoriteSerializer
model = IssueViewFavorite
@ -302,49 +244,18 @@ class IssueViewFavoriteViewSet(BaseViewSet):
)
def create(self, request, slug, project_id):
try:
serializer = IssueViewFavoriteSerializer(data=request.data)
if serializer.is_valid():
serializer.save(user=request.user, project_id=project_id)
return Response(serializer.data, status=status.HTTP_201_CREATED)
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
except IntegrityError as e:
if "already exists" in str(e):
return Response(
{"error": "The view is already added to favorites"},
status=status.HTTP_410_GONE,
)
else:
capture_exception(e)
return Response(
{"error": "Something went wrong please try again later"},
status=status.HTTP_400_BAD_REQUEST,
)
except Exception as e:
capture_exception(e)
return Response(
{"error": "Something went wrong please try again later"},
status=status.HTTP_400_BAD_REQUEST,
)
serializer = IssueViewFavoriteSerializer(data=request.data)
if serializer.is_valid():
serializer.save(user=request.user, project_id=project_id)
return Response(serializer.data, status=status.HTTP_201_CREATED)
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
def destroy(self, request, slug, project_id, view_id):
try:
view_favourite = IssueViewFavorite.objects.get(
project=project_id,
user=request.user,
workspace__slug=slug,
view_id=view_id,
)
view_favourite.delete()
return Response(status=status.HTTP_204_NO_CONTENT)
except IssueViewFavorite.DoesNotExist:
return Response(
{"error": "View is not in favorites"},
status=status.HTTP_400_BAD_REQUEST,
)
except Exception as e:
capture_exception(e)
return Response(
{"error": "Something went wrong please try again later"},
status=status.HTTP_400_BAD_REQUEST,
)
view_favourite = IssueViewFavorite.objects.get(
project=project_id,
user=request.user,
workspace__slug=slug,
view_id=view_id,
)
view_favourite.delete()
return Response(status=status.HTTP_204_NO_CONTENT)

File diff suppressed because it is too large Load Diff

View File

@ -20,8 +20,8 @@ from plane.utils.issue_filters import issue_filters
row_mapping = {
"state__name": "State",
"state__group": "State Group",
"labels__name": "Label",
"assignees__display_name": "Assignee Name",
"labels__id": "Label",
"assignees__id": "Assignee Name",
"start_date": "Start Date",
"target_date": "Due Date",
"completed_at": "Completed At",
@ -29,8 +29,321 @@ row_mapping = {
"issue_count": "Issue Count",
"priority": "Priority",
"estimate": "Estimate",
"issue_cycle__cycle_id": "Cycle",
"issue_module__module_id": "Module"
}
ASSIGNEE_ID = "assignees__id"
LABEL_ID = "labels__id"
STATE_ID = "state_id"
CYCLE_ID = "issue_cycle__cycle_id"
MODULE_ID = "issue_module__module_id"
def send_export_email(email, slug, csv_buffer):
"""Helper function to send export email."""
subject = "Your Export is ready"
html_content = render_to_string("emails/exports/analytics.html", {})
text_content = strip_tags(html_content)
csv_buffer.seek(0)
msg = EmailMultiAlternatives(subject, text_content, settings.EMAIL_FROM, [email])
msg.attach(f"{slug}-analytics.csv", csv_buffer.getvalue())
msg.send(fail_silently=False)
def get_assignee_details(slug, filters):
"""Fetch assignee details if required."""
return (
Issue.issue_objects.filter(
workspace__slug=slug, **filters, assignees__avatar__isnull=False
)
.distinct("assignees__id")
.order_by("assignees__id")
.values(
"assignees__avatar",
"assignees__display_name",
"assignees__first_name",
"assignees__last_name",
"assignees__id",
)
)
def get_label_details(slug, filters):
"""Fetch label details if required"""
return (
Issue.objects.filter(workspace__slug=slug, **filters, labels__id__isnull=False)
.distinct("labels__id")
.order_by("labels__id")
.values("labels__id", "labels__color", "labels__name")
)
def get_state_details(slug, filters):
return (
Issue.issue_objects.filter(
workspace__slug=slug,
**filters,
)
.distinct("state_id")
.order_by("state_id")
.values("state_id", "state__name", "state__color")
)
def get_module_details(slug, filters):
return (
Issue.issue_objects.filter(
workspace__slug=slug,
**filters,
issue_module__module_id__isnull=False,
)
.distinct("issue_module__module_id")
.order_by("issue_module__module_id")
.values(
"issue_module__module_id",
"issue_module__module__name",
)
)
def get_cycle_details(slug, filters):
return (
Issue.issue_objects.filter(
workspace__slug=slug,
**filters,
issue_cycle__cycle_id__isnull=False,
)
.distinct("issue_cycle__cycle_id")
.order_by("issue_cycle__cycle_id")
.values(
"issue_cycle__cycle_id",
"issue_cycle__cycle__name",
)
)
def generate_csv_from_rows(rows):
"""Generate CSV buffer from rows."""
csv_buffer = io.StringIO()
writer = csv.writer(csv_buffer, delimiter=",", quoting=csv.QUOTE_ALL)
[writer.writerow(row) for row in rows]
return csv_buffer
def generate_segmented_rows(
distribution,
x_axis,
y_axis,
segment,
key,
assignee_details,
label_details,
state_details,
cycle_details,
module_details,
):
segment_zero = list(
set(
item.get("segment") for sublist in distribution.values() for item in sublist
)
)
segmented = segment
row_zero = [
row_mapping.get(x_axis, "X-Axis"),
row_mapping.get(y_axis, "Y-Axis"),
] + segment_zero
rows = []
for item, data in distribution.items():
generated_row = [
item,
sum(obj.get(key) for obj in data if obj.get(key) is not None),
]
for segment in segment_zero:
value = next((x.get(key) for x in data if x.get("segment") == segment), "0")
generated_row.append(value)
if x_axis == ASSIGNEE_ID:
assignee = next(
(
user
for user in assignee_details
if str(user[ASSIGNEE_ID]) == str(item)
),
None,
)
if assignee:
generated_row[
0
] = f"{assignee['assignees__first_name']} {assignee['assignees__last_name']}"
if x_axis == LABEL_ID:
label = next(
(lab for lab in label_details if str(lab[LABEL_ID]) == str(item)),
None,
)
if label:
generated_row[0] = f"{label['labels__name']}"
if x_axis == STATE_ID:
state = next(
(sta for sta in state_details if str(sta[STATE_ID]) == str(item)),
None,
)
if state:
generated_row[0] = f"{state['state__name']}"
if x_axis == CYCLE_ID:
cycle = next(
(cyc for cyc in cycle_details if str(cyc[CYCLE_ID]) == str(item)),
None,
)
if cycle:
generated_row[0] = f"{cycle['issue_cycle__cycle__name']}"
if x_axis == MODULE_ID:
module = next(
(mod for mod in module_details if str(mod[MODULE_ID]) == str(item)),
None,
)
if module:
generated_row[0] = f"{module['issue_module__module__name']}"
rows.append(tuple(generated_row))
if segmented == ASSIGNEE_ID:
for index, segm in enumerate(row_zero[2:]):
assignee = next(
(
user
for user in assignee_details
if str(user[ASSIGNEE_ID]) == str(segm)
),
None,
)
if assignee:
row_zero[
index + 2
] = f"{assignee['assignees__first_name']} {assignee['assignees__last_name']}"
if segmented == LABEL_ID:
for index, segm in enumerate(row_zero[2:]):
label = next(
(lab for lab in label_details if str(lab[LABEL_ID]) == str(segm)),
None,
)
if label:
row_zero[index + 2] = label["labels__name"]
if segmented == STATE_ID:
for index, segm in enumerate(row_zero[2:]):
state = next(
(sta for sta in state_details if str(sta[STATE_ID]) == str(segm)),
None,
)
if state:
row_zero[index + 2] = state["state__name"]
if segmented == MODULE_ID:
for index, segm in enumerate(row_zero[2:]):
module = next(
(mod for mod in label_details if str(mod[MODULE_ID]) == str(segm)),
None,
)
if module:
row_zero[index + 2] = module["issue_module__module__name"]
if segmented == CYCLE_ID:
for index, segm in enumerate(row_zero[2:]):
cycle = next(
(cyc for cyc in cycle_details if str(cyc[CYCLE_ID]) == str(segm)),
None,
)
if cycle:
row_zero[index + 2] = cycle["issue_cycle__cycle__name"]
return [tuple(row_zero)] + rows
def generate_non_segmented_rows(
distribution,
x_axis,
y_axis,
key,
assignee_details,
label_details,
state_details,
cycle_details,
module_details,
):
rows = []
for item, data in distribution.items():
row = [item, data[0].get("count" if y_axis == "issue_count" else "estimate")]
if x_axis == ASSIGNEE_ID:
assignee = next(
(
user
for user in assignee_details
if str(user[ASSIGNEE_ID]) == str(item)
),
None,
)
if assignee:
row[
0
] = f"{assignee['assignees__first_name']} {assignee['assignees__last_name']}"
if x_axis == LABEL_ID:
label = next(
(lab for lab in label_details if str(lab[LABEL_ID]) == str(item)),
None,
)
if label:
row[0] = f"{label['labels__name']}"
if x_axis == STATE_ID:
state = next(
(sta for sta in state_details if str(sta[STATE_ID]) == str(item)),
None,
)
if state:
row[0] = f"{state['state__name']}"
if x_axis == CYCLE_ID:
cycle = next(
(cyc for cyc in cycle_details if str(cyc[CYCLE_ID]) == str(item)),
None,
)
if cycle:
row[0] = f"{cycle['issue_cycle__cycle__name']}"
if x_axis == MODULE_ID:
module = next(
(mod for mod in module_details if str(mod[MODULE_ID]) == str(item)),
None,
)
if module:
row[0] = f"{module['issue_module__module__name']}"
rows.append(tuple(row))
row_zero = [row_mapping.get(x_axis, "X-Axis"), row_mapping.get(y_axis, "Y-Axis")]
return [tuple(row_zero)] + rows
@shared_task
def analytic_export_task(email, data, slug):
@ -43,134 +356,70 @@ def analytic_export_task(email, data, slug):
segment = data.get("segment", False)
distribution = build_graph_plot(
queryset=queryset, x_axis=x_axis, y_axis=y_axis, segment=segment
queryset, x_axis=x_axis, y_axis=y_axis, segment=segment
)
key = "count" if y_axis == "issue_count" else "estimate"
segmented = segment
assignee_details = (
get_assignee_details(slug, filters)
if x_axis == ASSIGNEE_ID or segment == ASSIGNEE_ID
else {}
)
assignee_details = {}
if x_axis in ["assignees__id"] or segment in ["assignees__id"]:
assignee_details = (
Issue.issue_objects.filter(workspace__slug=slug, **filters, assignees__avatar__isnull=False)
.order_by("assignees__id")
.distinct("assignees__id")
.values("assignees__avatar", "assignees__display_name", "assignees__first_name", "assignees__last_name", "assignees__id")
)
label_details = (
get_label_details(slug, filters)
if x_axis == LABEL_ID or segment == LABEL_ID
else {}
)
state_details = (
get_state_details(slug, filters)
if x_axis == STATE_ID or segment == STATE_ID
else {}
)
cycle_details = (
get_cycle_details(slug, filters)
if x_axis == CYCLE_ID or segment == CYCLE_ID
else {}
)
module_details = (
get_module_details(slug, filters)
if x_axis == MODULE_ID or segment == MODULE_ID
else {}
)
if segment:
segment_zero = []
for item in distribution:
current_dict = distribution.get(item)
for current in current_dict:
segment_zero.append(current.get("segment"))
segment_zero = list(set(segment_zero))
row_zero = (
[
row_mapping.get(x_axis, "X-Axis"),
]
+ [
row_mapping.get(y_axis, "Y-Axis"),
]
+ segment_zero
rows = generate_segmented_rows(
distribution,
x_axis,
y_axis,
segment,
key,
assignee_details,
label_details,
state_details,
cycle_details,
module_details,
)
rows = []
for item in distribution:
generated_row = [
item,
]
data = distribution.get(item)
# Add y axis values
generated_row.append(sum(obj.get(key) for obj in data if obj.get(key, None) is not None))
for segment in segment_zero:
value = [x for x in data if x.get("segment") == segment]
if len(value):
generated_row.append(value[0].get(key))
else:
generated_row.append("0")
# x-axis replacement for names
if x_axis in ["assignees__id"]:
assignee = [user for user in assignee_details if str(user.get("assignees__id")) == str(item)]
if len(assignee):
generated_row[0] = str(assignee[0].get("assignees__first_name")) + " " + str(assignee[0].get("assignees__last_name"))
rows.append(tuple(generated_row))
# If segment is ["assignees__display_name"] then replace segment_zero rows with first and last names
if segmented in ["assignees__id"]:
for index, segm in enumerate(row_zero[2:]):
# find the name of the user
assignee = [user for user in assignee_details if str(user.get("assignees__id")) == str(segm)]
if len(assignee):
row_zero[index + 2] = str(assignee[0].get("assignees__first_name")) + " " + str(assignee[0].get("assignees__last_name"))
rows = [tuple(row_zero)] + rows
csv_buffer = io.StringIO()
writer = csv.writer(csv_buffer, delimiter=",", quoting=csv.QUOTE_ALL)
# Write CSV data to the buffer
for row in rows:
writer.writerow(row)
subject = "Your Export is ready"
html_content = render_to_string("emails/exports/analytics.html", {})
text_content = strip_tags(html_content)
csv_buffer.seek(0)
msg = EmailMultiAlternatives(
subject, text_content, settings.EMAIL_FROM, [email]
)
msg.attach(f"{slug}-analytics.csv", csv_buffer.read())
msg.send(fail_silently=False)
else:
row_zero = [
row_mapping.get(x_axis, "X-Axis"),
row_mapping.get(y_axis, "Y-Axis"),
]
rows = []
for item in distribution:
row = [
item,
distribution.get(item)[0].get("count")
if y_axis == "issue_count"
else distribution.get(item)[0].get("estimate "),
]
# x-axis replacement to names
if x_axis in ["assignees__id"]:
assignee = [user for user in assignee_details if str(user.get("assignees__id")) == str(item)]
if len(assignee):
row[0] = str(assignee[0].get("assignees__first_name")) + " " + str(assignee[0].get("assignees__last_name"))
rows.append(tuple(row))
rows = [tuple(row_zero)] + rows
csv_buffer = io.StringIO()
writer = csv.writer(csv_buffer, delimiter=",", quoting=csv.QUOTE_ALL)
# Write CSV data to the buffer
for row in rows:
writer.writerow(row)
subject = "Your Export is ready"
html_content = render_to_string("emails/exports/analytics.html", {})
text_content = strip_tags(html_content)
csv_buffer.seek(0)
msg = EmailMultiAlternatives(
subject, text_content, settings.EMAIL_FROM, [email]
rows = generate_non_segmented_rows(
distribution,
x_axis,
y_axis,
segment,
key,
assignee_details,
label_details,
state_details,
cycle_details,
module_details,
)
msg.attach(f"{slug}-analytics.csv", csv_buffer.read())
msg.send(fail_silently=False)
csv_buffer = generate_csv_from_rows(rows)
send_export_email(email, slug, csv_buffer)
except Exception as e:
# Print logs if in DEBUG mode
if settings.DEBUG:
print(e)
capture_exception(e)
return

View File

@ -23,7 +23,7 @@ def email_verification(first_name, email, token, current_site):
from_email_string = settings.EMAIL_FROM
subject = f"Verify your Email!"
subject = "Verify your Email!"
context = {
"first_name": first_name,

View File

@ -4,7 +4,6 @@ import io
import json
import boto3
import zipfile
from urllib.parse import urlparse, urlunparse
# Django imports
from django.conf import settings

View File

@ -32,7 +32,7 @@ def delete_old_s3_link():
else:
s3 = boto3.client(
"s3",
region_name="ap-south-1",
region_name=settings.AWS_REGION,
aws_access_key_id=settings.AWS_ACCESS_KEY_ID,
aws_secret_access_key=settings.AWS_SECRET_ACCESS_KEY,
config=Config(signature_version="s3v4"),

View File

@ -8,20 +8,18 @@ from django.conf import settings
from celery import shared_task
from sentry_sdk import capture_exception
# Module imports
from plane.db.models import User
@shared_task
def forgot_password(first_name, email, uidb64, token, current_site):
try:
realtivelink = f"/reset-password/?uidb64={uidb64}&token={token}"
realtivelink = f"/accounts/reset-password/?uidb64={uidb64}&token={token}"
abs_url = current_site + realtivelink
from_email_string = settings.EMAIL_FROM
subject = f"Reset Your Password - Plane"
subject = "Reset Your Password - Plane"
context = {
"first_name": first_name,

View File

@ -2,8 +2,6 @@
import json
import requests
import uuid
import jwt
from datetime import datetime
# Django imports
from django.conf import settings
@ -25,8 +23,8 @@ from plane.db.models import (
WorkspaceIntegration,
Label,
User,
IssueProperty,
)
from .workspace_invitation_task import workspace_invitation
from plane.bgtasks.user_welcome_task import send_welcome_slack
@ -57,7 +55,7 @@ def service_importer(service, importer_id):
ignore_conflicts=True,
)
[
_ = [
send_welcome_slack.delay(
str(user.id),
True,
@ -103,6 +101,20 @@ def service_importer(service, importer_id):
ignore_conflicts=True,
)
IssueProperty.objects.bulk_create(
[
IssueProperty(
project_id=importer.project_id,
workspace_id=importer.workspace_id,
user=user,
created_by=importer.created_by,
)
for user in workspace_users
],
batch_size=100,
ignore_conflicts=True,
)
# Check if sync config is on for github importers
if service == "github" and importer.config.get("sync", False):
name = importer.metadata.get("name", False)
@ -142,7 +154,7 @@ def service_importer(service, importer_id):
)
# Create repo sync
repo_sync = GithubRepositorySync.objects.create(
_ = GithubRepositorySync.objects.create(
repository=repo,
workspace_integration=workspace_integration,
actor=workspace_integration.actor,
@ -164,7 +176,7 @@ def service_importer(service, importer_id):
ImporterSerializer(importer).data,
cls=DjangoJSONEncoder,
)
res = requests.post(
_ = requests.post(
f"{settings.PROXY_BASE_URL}/hooks/workspaces/{str(importer.workspace_id)}/projects/{str(importer.project_id)}/importers/{str(service)}/",
json=import_data_json,
headers=headers,

File diff suppressed because it is too large Load Diff

View File

@ -58,28 +58,31 @@ def archive_old_issues():
# Check if Issues
if issues:
# Set the archive time to current time
archive_at = timezone.now().date()
issues_to_update = []
for issue in issues:
issue.archived_at = timezone.now()
issue.archived_at = archive_at
issues_to_update.append(issue)
# Bulk Update the issues and log the activity
if issues_to_update:
updated_issues = Issue.objects.bulk_update(
Issue.objects.bulk_update(
issues_to_update, ["archived_at"], batch_size=100
)
[
_ = [
issue_activity.delay(
type="issue.activity.updated",
requested_data=json.dumps({"archived_at": str(issue.archived_at)}),
requested_data=json.dumps({"archived_at": str(archive_at)}),
actor_id=str(project.created_by_id),
issue_id=issue.id,
project_id=project_id,
current_instance=None,
current_instance=json.dumps({"archived_at": None}),
subscriber=False,
epoch = int(timezone.now().timestamp())
epoch=int(timezone.now().timestamp())
)
for issue in updated_issues
for issue in issues_to_update
]
return
except Exception as e:
@ -139,7 +142,7 @@ def close_old_issues():
# Bulk Update the issues and log the activity
if issues_to_update:
updated_issues = Issue.objects.bulk_update(issues_to_update, ["state"], batch_size=100)
Issue.objects.bulk_update(issues_to_update, ["state"], batch_size=100)
[
issue_activity.delay(
type="issue.activity.updated",
@ -149,9 +152,9 @@ def close_old_issues():
project_id=project_id,
current_instance=None,
subscriber=False,
epoch = int(timezone.now().timestamp())
epoch=int(timezone.now().timestamp())
)
for issue in updated_issues
for issue in issues_to_update
]
return
except Exception as e:

View File

@ -17,7 +17,7 @@ def magic_link(email, key, token, current_site):
from_email_string = settings.EMAIL_FROM
subject = f"Login for Plane"
subject = "Login for Plane"
context = {"magic_url": abs_url, "code": token}

View File

@ -0,0 +1,274 @@
# Python imports
import json
# Django imports
from django.utils import timezone
# Module imports
from plane.db.models import (
IssueMention,
IssueSubscriber,
Project,
User,
IssueAssignee,
Issue,
Notification,
IssueComment,
)
# Third Party imports
from celery import shared_task
from bs4 import BeautifulSoup
def get_new_mentions(requested_instance, current_instance):
# requested_data is the newer instance of the current issue
# current_instance is the older instance of the current issue, saved in the database
# extract mentions from both the instance of data
mentions_older = extract_mentions(current_instance)
mentions_newer = extract_mentions(requested_instance)
# Getting Set Difference from mentions_newer
new_mentions = [
mention for mention in mentions_newer if mention not in mentions_older]
return new_mentions
# Get Removed Mention
def get_removed_mentions(requested_instance, current_instance):
# requested_data is the newer instance of the current issue
# current_instance is the older instance of the current issue, saved in the database
# extract mentions from both the instance of data
mentions_older = extract_mentions(current_instance)
mentions_newer = extract_mentions(requested_instance)
# Getting Set Difference from mentions_newer
removed_mentions = [
mention for mention in mentions_older if mention not in mentions_newer]
return removed_mentions
# Adds mentions as subscribers
def extract_mentions_as_subscribers(project_id, issue_id, mentions):
# mentions is an array of User IDs representing the FILTERED set of mentioned users
bulk_mention_subscribers = []
for mention_id in mentions:
# If the particular mention has not already been subscribed to the issue, he must be sent the mentioned notification
if not IssueSubscriber.objects.filter(
issue_id=issue_id,
subscriber=mention_id,
project=project_id,
).exists():
mentioned_user = User.objects.get(pk=mention_id)
project = Project.objects.get(pk=project_id)
issue = Issue.objects.get(pk=issue_id)
bulk_mention_subscribers.append(IssueSubscriber(
workspace=project.workspace,
project=project,
issue=issue,
subscriber=mentioned_user,
))
return bulk_mention_subscribers
# Parse Issue Description & extracts mentions
def extract_mentions(issue_instance):
try:
# issue_instance has to be a dictionary passed, containing the description_html and other set of activity data.
mentions = []
# Convert string to dictionary
data = json.loads(issue_instance)
html = data.get("description_html")
soup = BeautifulSoup(html, 'html.parser')
mention_tags = soup.find_all(
'mention-component', attrs={'target': 'users'})
mentions = [mention_tag['id'] for mention_tag in mention_tags]
return list(set(mentions))
except Exception as e:
return []
@shared_task
def notifications(type, issue_id, project_id, actor_id, subscriber, issue_activities_created, requested_data, current_instance):
issue_activities_created = (
json.loads(
issue_activities_created) if issue_activities_created is not None else None
)
if type not in [
"cycle.activity.created",
"cycle.activity.deleted",
"module.activity.created",
"module.activity.deleted",
"issue_reaction.activity.created",
"issue_reaction.activity.deleted",
"comment_reaction.activity.created",
"comment_reaction.activity.deleted",
"issue_vote.activity.created",
"issue_vote.activity.deleted",
"issue_draft.activity.created",
"issue_draft.activity.updated",
"issue_draft.activity.deleted",
]:
# Create Notifications
bulk_notifications = []
"""
Mention Tasks
1. Perform Diffing and Extract the mentions, that mention notification needs to be sent
2. From the latest set of mentions, extract the users which are not a subscribers & make them subscribers
"""
# Get new mentions from the newer instance
new_mentions = get_new_mentions(
requested_instance=requested_data, current_instance=current_instance)
removed_mention = get_removed_mentions(
requested_instance=requested_data, current_instance=current_instance)
# Get New Subscribers from the mentions of the newer instance
requested_mentions = extract_mentions(
issue_instance=requested_data)
mention_subscribers = extract_mentions_as_subscribers(
project_id=project_id, issue_id=issue_id, mentions=requested_mentions)
issue_subscribers = list(
IssueSubscriber.objects.filter(
project_id=project_id, issue_id=issue_id)
.exclude(subscriber_id__in=list(new_mentions + [actor_id]))
.values_list("subscriber", flat=True)
)
issue_assignees = list(
IssueAssignee.objects.filter(
project_id=project_id, issue_id=issue_id)
.exclude(assignee_id=actor_id)
.values_list("assignee", flat=True)
)
issue_subscribers = issue_subscribers + issue_assignees
issue = Issue.objects.filter(pk=issue_id).first()
if subscriber:
# add the user to issue subscriber
try:
_ = IssueSubscriber.objects.get_or_create(
issue_id=issue_id, subscriber_id=actor_id
)
except Exception as e:
pass
project = Project.objects.get(pk=project_id)
for subscriber in list(set(issue_subscribers)):
for issue_activity in issue_activities_created:
issue_comment = issue_activity.get("issue_comment")
if issue_comment is not None:
issue_comment = IssueComment.objects.get(id=issue_comment, issue_id=issue_id, project_id=project_id, workspace_id=project.workspace_id)
bulk_notifications.append(
Notification(
workspace=project.workspace,
sender="in_app:issue_activities",
triggered_by_id=actor_id,
receiver_id=subscriber,
entity_identifier=issue_id,
entity_name="issue",
project=project,
title=issue_activity.get("comment"),
data={
"issue": {
"id": str(issue_id),
"name": str(issue.name),
"identifier": str(issue.project.identifier),
"sequence_id": issue.sequence_id,
"state_name": issue.state.name,
"state_group": issue.state.group,
},
"issue_activity": {
"id": str(issue_activity.get("id")),
"verb": str(issue_activity.get("verb")),
"field": str(issue_activity.get("field")),
"actor": str(issue_activity.get("actor_id")),
"new_value": str(issue_activity.get("new_value")),
"old_value": str(issue_activity.get("old_value")),
"issue_comment": str(
issue_comment.comment_stripped
if issue_activity.get("issue_comment") is not None
else ""
),
},
},
)
)
# Add Mentioned as Issue Subscribers
IssueSubscriber.objects.bulk_create(
mention_subscribers, batch_size=100)
for mention_id in new_mentions:
if (mention_id != actor_id):
for issue_activity in issue_activities_created:
bulk_notifications.append(
Notification(
workspace=project.workspace,
sender="in_app:issue_activities:mention",
triggered_by_id=actor_id,
receiver_id=mention_id,
entity_identifier=issue_id,
entity_name="issue",
project=project,
message=f"You have been mentioned in the issue {issue.name}",
data={
"issue": {
"id": str(issue_id),
"name": str(issue.name),
"identifier": str(issue.project.identifier),
"sequence_id": issue.sequence_id,
"state_name": issue.state.name,
"state_group": issue.state.group,
},
"issue_activity": {
"id": str(issue_activity.get("id")),
"verb": str(issue_activity.get("verb")),
"field": str(issue_activity.get("field")),
"actor": str(issue_activity.get("actor_id")),
"new_value": str(issue_activity.get("new_value")),
"old_value": str(issue_activity.get("old_value")),
},
},
)
)
# Create New Mentions Here
aggregated_issue_mentions = []
for mention_id in new_mentions:
mentioned_user = User.objects.get(pk=mention_id)
aggregated_issue_mentions.append(
IssueMention(
mention=mentioned_user,
issue=issue,
project=project,
workspace=project.workspace
)
)
IssueMention.objects.bulk_create(
aggregated_issue_mentions, batch_size=100)
IssueMention.objects.filter(
issue=issue, mention__in=removed_mention).delete()
# Bulk create notifications
Notification.objects.bulk_create(bulk_notifications, batch_size=100)

View File

@ -11,7 +11,7 @@ from slack_sdk import WebClient
from slack_sdk.errors import SlackApiError
# Module imports
from plane.db.models import Workspace, User, WorkspaceMemberInvite
from plane.db.models import Workspace, WorkspaceMemberInvite
@shared_task

View File

@ -29,4 +29,4 @@ app.conf.beat_schedule = {
# Load task modules from all registered Django app configs.
app.autodiscover_tasks()
app.conf.beat_scheduler = 'django_celery_beat.schedulers.DatabaseScheduler'
app.conf.beat_scheduler = 'django_celery_beat.schedulers.DatabaseScheduler'

View File

@ -33,9 +33,8 @@ def create_issue_relation(apps, schema_editor):
def update_issue_priority_choice(apps, schema_editor):
IssueModel = apps.get_model("db", "Issue")
updated_issues = []
for obj in IssueModel.objects.all():
if obj.priority is None:
obj.priority = "none"
for obj in IssueModel.objects.filter(priority=None):
obj.priority = "none"
updated_issues.append(obj)
IssueModel.objects.bulk_update(updated_issues, ["priority"], batch_size=100)

View File

@ -26,19 +26,19 @@ def workspace_member_props(old_props):
"calendar_date_range": old_props.get("calendarDateRange", ""),
},
"display_properties": {
"assignee": old_props.get("properties", {}).get("assignee",None),
"attachment_count": old_props.get("properties", {}).get("attachment_count", None),
"created_on": old_props.get("properties", {}).get("created_on", None),
"due_date": old_props.get("properties", {}).get("due_date", None),
"estimate": old_props.get("properties", {}).get("estimate", None),
"key": old_props.get("properties", {}).get("key", None),
"labels": old_props.get("properties", {}).get("labels", None),
"link": old_props.get("properties", {}).get("link", None),
"priority": old_props.get("properties", {}).get("priority", None),
"start_date": old_props.get("properties", {}).get("start_date", None),
"state": old_props.get("properties", {}).get("state", None),
"sub_issue_count": old_props.get("properties", {}).get("sub_issue_count", None),
"updated_on": old_props.get("properties", {}).get("updated_on", None),
"assignee": old_props.get("properties", {}).get("assignee", True),
"attachment_count": old_props.get("properties", {}).get("attachment_count", True),
"created_on": old_props.get("properties", {}).get("created_on", True),
"due_date": old_props.get("properties", {}).get("due_date", True),
"estimate": old_props.get("properties", {}).get("estimate", True),
"key": old_props.get("properties", {}).get("key", True),
"labels": old_props.get("properties", {}).get("labels", True),
"link": old_props.get("properties", {}).get("link", True),
"priority": old_props.get("properties", {}).get("priority", True),
"start_date": old_props.get("properties", {}).get("start_date", True),
"state": old_props.get("properties", {}).get("state", True),
"sub_issue_count": old_props.get("properties", {}).get("sub_issue_count", True),
"updated_on": old_props.get("properties", {}).get("updated_on", True),
},
}
return new_props

View File

@ -1,24 +0,0 @@
# Generated by Django 4.2.3 on 2023-09-15 06:55
from django.db import migrations
def update_issue_activity(apps, schema_editor):
IssueActivityModel = apps.get_model("db", "IssueActivity")
updated_issue_activity = []
for obj in IssueActivityModel.objects.all():
if obj.field == "blocks":
obj.field = "blocked_by"
updated_issue_activity.append(obj)
IssueActivityModel.objects.bulk_update(updated_issue_activity, ["field"], batch_size=100)
class Migration(migrations.Migration):
dependencies = [
('db', '0044_auto_20230913_0709'),
]
operations = [
migrations.RunPython(update_issue_activity),
]

View File

@ -1,28 +1,47 @@
# Generated by Django 4.2.3 on 2023-09-19 14:21
# Generated by Django 4.2.5 on 2023-09-29 10:14
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
import plane.db.models.workspace
import uuid
def update_epoch(apps, schema_editor):
IssueActivity = apps.get_model('db', 'IssueActivity')
def update_issue_activity_priority(apps, schema_editor):
IssueActivity = apps.get_model("db", "IssueActivity")
updated_issue_activity = []
for obj in IssueActivity.objects.all():
obj.epoch = int(obj.created_at.timestamp())
for obj in IssueActivity.objects.filter(field="priority"):
# Set the old and new value to none if it is empty for Priority
obj.new_value = obj.new_value or "none"
obj.old_value = obj.old_value or "none"
updated_issue_activity.append(obj)
IssueActivity.objects.bulk_update(updated_issue_activity, ["epoch"], batch_size=100)
IssueActivity.objects.bulk_update(
updated_issue_activity,
["new_value", "old_value"],
batch_size=2000,
)
def update_issue_activity_blocked(apps, schema_editor):
IssueActivity = apps.get_model("db", "IssueActivity")
updated_issue_activity = []
for obj in IssueActivity.objects.filter(field="blocks"):
# Set the field to blocked_by
obj.field = "blocked_by"
updated_issue_activity.append(obj)
IssueActivity.objects.bulk_update(
updated_issue_activity,
["field"],
batch_size=1000,
)
class Migration(migrations.Migration):
dependencies = [
('db', '0045_auto_20230915_0655'),
('db', '0044_auto_20230913_0709'),
]
operations = [
migrations.CreateModel(
migrations.CreateModel(
name='GlobalView',
fields=[
('created_at', models.DateTimeField(auto_now_add=True, verbose_name='Created At')),
@ -33,6 +52,7 @@ class Migration(migrations.Migration):
('query', models.JSONField(verbose_name='View Query')),
('access', models.PositiveSmallIntegerField(choices=[(0, 'Private'), (1, 'Public')], default=1)),
('query_data', models.JSONField(default=dict)),
('sort_order', models.FloatField(default=65535)),
('created_by', models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='%(class)s_created_by', to=settings.AUTH_USER_MODEL, verbose_name='Created By')),
('updated_by', models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='%(class)s_updated_by', to=settings.AUTH_USER_MODEL, verbose_name='Last Modified By')),
('workspace', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='global_views', to='db.workspace')),
@ -44,10 +64,16 @@ class Migration(migrations.Migration):
'ordering': ('-created_at',),
},
),
migrations.AddField(
model_name='workspacemember',
name='issue_props',
field=models.JSONField(default=plane.db.models.workspace.get_issue_props),
),
migrations.AddField(
model_name='issueactivity',
name='epoch',
field=models.FloatField(null=True),
),
migrations.RunPython(update_epoch),
migrations.RunPython(update_issue_activity_priority),
migrations.RunPython(update_issue_activity_blocked),
]

View File

@ -0,0 +1,41 @@
# Generated by Django 4.2.5 on 2023-10-18 12:04
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
import plane.db.models.issue
import uuid
class Migration(migrations.Migration):
dependencies = [
('db', '0045_issueactivity_epoch_workspacemember_issue_props_and_more'),
]
operations = [
migrations.CreateModel(
name="issue_mentions",
fields=[
('created_at', models.DateTimeField(auto_now_add=True, verbose_name='Created At')),
('updated_at', models.DateTimeField(auto_now=True, verbose_name='Last Modified At')),
('id', models.UUIDField(db_index=True, default=uuid.uuid4,editable=False, primary_key=True, serialize=False, unique=True)),
('mention', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='issue_mention', to=settings.AUTH_USER_MODEL)),
('created_by', models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL,related_name='issuemention_created_by', to=settings.AUTH_USER_MODEL, verbose_name='Created By')),
('issue', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='issue_mention', to='db.issue')),
('project', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='project_issuemention', to='db.project')),
('updated_by', models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='issuemention_updated_by', to=settings.AUTH_USER_MODEL, verbose_name='Last Modified By')),
('workspace', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='workspace_issuemention', to='db.workspace')),
],
options={
'verbose_name': 'IssueMention',
'verbose_name_plural': 'IssueMentions',
'db_table': 'issue_mentions',
'ordering': ('-created_at',),
},
),
migrations.AlterField(
model_name='issueproperty',
name='properties',
field=models.JSONField(default=plane.db.models.issue.get_default_properties),
),
]

Some files were not shown because too many files have changed in this diff Show More