forked from github/plane
Compare commits
222 Commits
preview
...
fix/typogr
Author | SHA1 | Date | |
---|---|---|---|
|
9cc69a0933 | ||
|
4f32831b8e | ||
|
7b32ccbad0 | ||
|
882c43dcc6 | ||
|
bacddcb348 | ||
|
6ab462d1e7 | ||
|
f7e140bf68 | ||
|
7397313543 | ||
|
8cbf77e993 | ||
|
a510ffcdf9 | ||
|
2b11e7771f | ||
|
c40e45528e | ||
|
dfb0c49070 | ||
|
318b984978 | ||
|
dc66d34655 | ||
|
7f3388b83a | ||
|
f8585e87f8 | ||
|
ddb6f8ed47 | ||
|
87dffe26c2 | ||
|
06ffcd67d1 | ||
|
b3cd08cc41 | ||
|
0b01b439a0 | ||
|
ef01f01191 | ||
|
30be08307a | ||
|
576f3761b4 | ||
|
6ded663af1 | ||
|
9b8a8722c5 | ||
|
6274745284 | ||
|
1c82354a76 | ||
|
5f880029bd | ||
|
fe0e4433dd | ||
|
97d9a40d2d | ||
|
763fd4dffc | ||
|
a4d231b39a | ||
|
6e834ada66 | ||
|
023bf8a0a5 | ||
|
14f8d380c6 | ||
|
ced6bc1b19 | ||
|
a4b2ccf6b2 | ||
|
5fc6370231 | ||
|
baa3669a19 | ||
|
e05a6e34c8 | ||
|
657d8e97da | ||
|
199357560d | ||
|
0ce63ec29e | ||
|
aa15a36693 | ||
|
66b728db90 | ||
|
581c89fb5f | ||
|
4336cfa0a5 | ||
|
108fc27a6e | ||
|
ca8e685c9c | ||
|
ef14f00777 | ||
|
0ff5f9ef62 | ||
|
8d021389a0 | ||
|
81bccc1884 | ||
|
e6ee7e01be | ||
|
819fcc77e7 | ||
|
6fb9c2c3fc | ||
|
d7be4788de | ||
|
beab501c96 | ||
|
ace9ca0e57 | ||
|
7d3fbbec63 | ||
|
6075bef96a | ||
|
b67da79e2d | ||
|
b2cbbd80d3 | ||
|
733e89ca76 | ||
|
329e93cd43 | ||
|
c5cced1f19 | ||
|
ea69d82ea5 | ||
|
380b00c1a2 | ||
|
d55b4ed2e8 | ||
|
46a1556f83 | ||
|
796fd2d6c8 | ||
|
0ffa075110 | ||
|
876cdc987c | ||
|
377ab63989 | ||
|
7edbc194f1 | ||
|
a1adee47b8 | ||
|
6db28a234a | ||
|
223984d900 | ||
|
8662305f0a | ||
|
43e0369f04 | ||
|
d34486aa37 | ||
|
9289bcbe9f | ||
|
1549a3f5dc | ||
|
66bc1cb167 | ||
|
a184b72056 | ||
|
e53781053b | ||
|
443522691d | ||
|
aae54fb69f | ||
|
3400c119bc | ||
|
d5853405ca | ||
|
db510dcfcd | ||
|
62c0615012 | ||
|
3914a75334 | ||
|
0cbb201348 | ||
|
f7264364bd | ||
|
35f8ffa5ab | ||
|
6607caade7 | ||
|
8ee8270697 | ||
|
41ab962dd7 | ||
|
c22c6bb9b2 | ||
|
67de6d0729 | ||
|
f361cd045e | ||
|
06d3cd7e73 | ||
|
10c52bf89b | ||
|
b717518fbe | ||
|
f48cd6f50c | ||
|
3203ae6549 | ||
|
b8f603f920 | ||
|
96ff76af94 | ||
|
830675741f | ||
|
e489ad50dc | ||
|
3dc18bc8fd | ||
|
2d04917951 | ||
|
2bf7e63625 | ||
|
eb78fd6088 | ||
|
202ecd21df | ||
|
b2ac7b9ac6 | ||
|
51dff31926 | ||
|
e89f152779 | ||
|
3c9f57f8f4 | ||
|
1bc859c68c | ||
|
11d57a5bf0 | ||
|
2980c7b00d | ||
|
5c6a59ba35 | ||
|
a3ea7c8f10 | ||
|
cb922fb113 | ||
|
06564ee856 | ||
|
c7e6118804 | ||
|
069b8b3ed9 | ||
|
38a5b7bec0 | ||
|
236caaafe8 | ||
|
a6d5eab634 | ||
|
8d76c96a6f | ||
|
97be4b60ae | ||
|
dece103873 | ||
|
c6125876be | ||
|
1f85bf2302 | ||
|
20baba3bb0 | ||
|
85907b32d1 | ||
|
ef2bef83dc | ||
|
6e7a96394a | ||
|
5726f6955c | ||
|
82665a35ee | ||
|
4efd225599 | ||
|
2481706581 | ||
|
a17b08dd15 | ||
|
a7d6b528bd | ||
|
9ba724b78d | ||
|
c2da9783a3 | ||
|
784be47e91 | ||
|
0fdd9c28bf | ||
|
644b06749b | ||
|
dd8c7a7487 | ||
|
e6a1f34713 | ||
|
1dff6b63f8 | ||
|
59dbbb29cd | ||
|
6cb3939835 | ||
|
021c0675b7 | ||
|
67000892e5 | ||
|
3df4794e77 | ||
|
42ccd1de58 | ||
|
c8c89007c0 | ||
|
4cf3e69e22 | ||
|
fb1f65c2c1 | ||
|
d91b4e6fa1 | ||
|
561223ea71 | ||
|
982eba0bd1 | ||
|
7aaf840fb1 | ||
|
15927c9cae | ||
|
d46d70fcd5 | ||
|
de581102e3 | ||
|
b903126e5a | ||
|
f44f70168f | ||
|
3c10f00b04 | ||
|
f1de05e4de | ||
|
61d4e2e016 | ||
|
c1eb5055e5 | ||
|
8d942e28da | ||
|
f7461af3f5 | ||
|
29f3e02adc | ||
|
9a704458b3 | ||
|
668dfd2e38 | ||
|
3b3f94ed03 | ||
|
e945aa9b71 | ||
|
6595a387d0 | ||
|
8839e42dc0 | ||
|
9db6312081 | ||
|
779ef2a4aa | ||
|
51e17643a2 | ||
|
4c2074b6ff | ||
|
c9ffc9465f | ||
|
2b6c489513 | ||
|
0c63f21718 | ||
|
a987df38f4 | ||
|
878707f444 | ||
|
9369ee5008 | ||
|
0a88db975a | ||
|
dd60dec887 | ||
|
0c1097592e | ||
|
bed66235f2 | ||
|
26b1e9d5f1 | ||
|
79347ec62b | ||
|
7b965179d8 | ||
|
fc51ffc589 | ||
|
96f6e37cc5 | ||
|
29774ce84a | ||
|
8cbe9c26fc | ||
|
7f42566207 | ||
|
b60237b676 | ||
|
1fe09d369f | ||
|
b7757c6b1a | ||
|
1a25bacce1 | ||
|
6797df239d | ||
|
43e7c10eb7 | ||
|
bdc9c9c2a8 | ||
|
f0c72bf249 | ||
|
a8904bfc48 | ||
|
b31041726b | ||
|
e6f947ad90 | ||
|
7963993171 |
@ -1,5 +1,11 @@
|
|||||||
version = 1
|
version = 1
|
||||||
|
|
||||||
|
exclude_patterns = [
|
||||||
|
"bin/**",
|
||||||
|
"**/node_modules/",
|
||||||
|
"**/*.min.js"
|
||||||
|
]
|
||||||
|
|
||||||
[[analyzers]]
|
[[analyzers]]
|
||||||
name = "shell"
|
name = "shell"
|
||||||
|
|
||||||
|
@ -2,5 +2,16 @@
|
|||||||
*.pyc
|
*.pyc
|
||||||
.env
|
.env
|
||||||
venv
|
venv
|
||||||
node_modules
|
node_modules/
|
||||||
|
**/node_modules/
|
||||||
npm-debug.log
|
npm-debug.log
|
||||||
|
.next/
|
||||||
|
**/.next/
|
||||||
|
.turbo/
|
||||||
|
**/.turbo/
|
||||||
|
build/
|
||||||
|
**/build/
|
||||||
|
out/
|
||||||
|
**/out/
|
||||||
|
dist/
|
||||||
|
**/dist/
|
10
.env.example
10
.env.example
@ -21,15 +21,15 @@ AWS_S3_BUCKET_NAME="uploads"
|
|||||||
FILE_SIZE_LIMIT=5242880
|
FILE_SIZE_LIMIT=5242880
|
||||||
|
|
||||||
# GPT settings
|
# GPT settings
|
||||||
OPENAI_API_BASE="https://api.openai.com/v1" # change if using a custom endpoint
|
OPENAI_API_BASE="https://api.openai.com/v1" # deprecated
|
||||||
OPENAI_API_KEY="sk-" # add your openai key here
|
OPENAI_API_KEY="sk-" # deprecated
|
||||||
GPT_ENGINE="gpt-3.5-turbo" # use "gpt-4" if you have access
|
GPT_ENGINE="gpt-3.5-turbo" # deprecated
|
||||||
|
|
||||||
# Settings related to Docker
|
# Settings related to Docker
|
||||||
DOCKERIZED=1
|
DOCKERIZED=1 # deprecated
|
||||||
|
|
||||||
# set to 1 If using the pre-configured minio setup
|
# set to 1 If using the pre-configured minio setup
|
||||||
USE_MINIO=1
|
USE_MINIO=1
|
||||||
|
|
||||||
# Nginx Configuration
|
# Nginx Configuration
|
||||||
NGINX_PORT=80
|
NGINX_PORT=80
|
||||||
|
|
||||||
|
111
.github/workflows/build-branch.yml
vendored
111
.github/workflows/build-branch.yml
vendored
@ -1,4 +1,3 @@
|
|||||||
|
|
||||||
name: Branch Build
|
name: Branch Build
|
||||||
|
|
||||||
on:
|
on:
|
||||||
@ -7,16 +6,18 @@ on:
|
|||||||
- closed
|
- closed
|
||||||
branches:
|
branches:
|
||||||
- master
|
- master
|
||||||
- release
|
- preview
|
||||||
- qa
|
- qa
|
||||||
- develop
|
- develop
|
||||||
|
release:
|
||||||
|
types: [released, prereleased]
|
||||||
|
|
||||||
env:
|
env:
|
||||||
TARGET_BRANCH: ${{ github.event.pull_request.base.ref }}
|
TARGET_BRANCH: ${{ github.event.pull_request.base.ref || github.event.release.target_commitish }}
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
branch_build_and_push:
|
branch_build_setup:
|
||||||
if: ${{ (github.event_name == 'pull_request' && github.event.action =='closed' && github.event.pull_request.merged == true) }}
|
if: ${{ (github.event_name == 'pull_request' && github.event.action =='closed' && github.event.pull_request.merged == true) || github.event_name == 'release' }}
|
||||||
name: Build-Push Web/Space/API/Proxy Docker Image
|
name: Build-Push Web/Space/API/Proxy Docker Image
|
||||||
runs-on: ubuntu-20.04
|
runs-on: ubuntu-20.04
|
||||||
|
|
||||||
@ -24,39 +25,6 @@ jobs:
|
|||||||
- name: Check out the repo
|
- name: Check out the repo
|
||||||
uses: actions/checkout@v3.3.0
|
uses: actions/checkout@v3.3.0
|
||||||
|
|
||||||
# - name: Set Target Branch Name on PR close
|
|
||||||
# if: ${{ github.event_name == 'pull_request' && github.event.action =='closed' }}
|
|
||||||
# run: echo "TARGET_BRANCH=${{ github.event.pull_request.base.ref }}" >> $GITHUB_ENV
|
|
||||||
|
|
||||||
# - name: Set Target Branch Name on other than PR close
|
|
||||||
# if: ${{ github.event_name == 'push' }}
|
|
||||||
# run: echo "TARGET_BRANCH=${{ github.ref_name }}" >> $GITHUB_ENV
|
|
||||||
|
|
||||||
- uses: ASzc/change-string-case-action@v2
|
|
||||||
id: gh_branch_upper_lower
|
|
||||||
with:
|
|
||||||
string: ${{env.TARGET_BRANCH}}
|
|
||||||
|
|
||||||
- uses: mad9000/actions-find-and-replace-string@2
|
|
||||||
id: gh_branch_replace_slash
|
|
||||||
with:
|
|
||||||
source: ${{ steps.gh_branch_upper_lower.outputs.lowercase }}
|
|
||||||
find: '/'
|
|
||||||
replace: '-'
|
|
||||||
|
|
||||||
- uses: mad9000/actions-find-and-replace-string@2
|
|
||||||
id: gh_branch_replace_dot
|
|
||||||
with:
|
|
||||||
source: ${{ steps.gh_branch_replace_slash.outputs.value }}
|
|
||||||
find: '.'
|
|
||||||
replace: ''
|
|
||||||
|
|
||||||
- uses: mad9000/actions-find-and-replace-string@2
|
|
||||||
id: gh_branch_clean
|
|
||||||
with:
|
|
||||||
source: ${{ steps.gh_branch_replace_dot.outputs.value }}
|
|
||||||
find: '_'
|
|
||||||
replace: ''
|
|
||||||
- name: Uploading Proxy Source
|
- name: Uploading Proxy Source
|
||||||
uses: actions/upload-artifact@v3
|
uses: actions/upload-artifact@v3
|
||||||
with:
|
with:
|
||||||
@ -77,7 +45,6 @@ jobs:
|
|||||||
!./nginx
|
!./nginx
|
||||||
!./deploy
|
!./deploy
|
||||||
!./space
|
!./space
|
||||||
|
|
||||||
- name: Uploading Space Source
|
- name: Uploading Space Source
|
||||||
uses: actions/upload-artifact@v3
|
uses: actions/upload-artifact@v3
|
||||||
with:
|
with:
|
||||||
@ -89,12 +56,24 @@ jobs:
|
|||||||
!./deploy
|
!./deploy
|
||||||
!./web
|
!./web
|
||||||
outputs:
|
outputs:
|
||||||
gh_branch_name: ${{ steps.gh_branch_clean.outputs.value }}
|
gh_branch_name: ${{ env.TARGET_BRANCH }}
|
||||||
|
|
||||||
branch_build_push_frontend:
|
branch_build_push_frontend:
|
||||||
runs-on: ubuntu-20.04
|
runs-on: ubuntu-20.04
|
||||||
needs: [ branch_build_and_push ]
|
needs: [branch_build_setup]
|
||||||
|
env:
|
||||||
|
FRONTEND_TAG: ${{ secrets.DOCKERHUB_USERNAME }}/plane-frontend:${{ needs.branch_build_setup.outputs.gh_branch_name }}
|
||||||
steps:
|
steps:
|
||||||
|
- name: Set Frontend Docker Tag
|
||||||
|
run: |
|
||||||
|
if [ "${{ needs.branch_build_setup.outputs.gh_branch_name }}" == "master" ] && [ "${{ github.event_name }}" == "release" ]; then
|
||||||
|
TAG=${{ secrets.DOCKERHUB_USERNAME }}/plane-frontend:latest,${{ secrets.DOCKERHUB_USERNAME }}/plane-frontend:${{ github.event.release.tag_name }}
|
||||||
|
elif [ "${{ needs.branch_build_setup.outputs.gh_branch_name }}" == "master" ]; then
|
||||||
|
TAG=${{ secrets.DOCKERHUB_USERNAME }}/plane-frontend:stable
|
||||||
|
else
|
||||||
|
TAG=${{ env.FRONTEND_TAG }}
|
||||||
|
fi
|
||||||
|
echo "FRONTEND_TAG=${TAG}" >> $GITHUB_ENV
|
||||||
- name: Set up Docker Buildx
|
- name: Set up Docker Buildx
|
||||||
uses: docker/setup-buildx-action@v2.5.0
|
uses: docker/setup-buildx-action@v2.5.0
|
||||||
|
|
||||||
@ -114,7 +93,7 @@ jobs:
|
|||||||
context: .
|
context: .
|
||||||
file: ./web/Dockerfile.web
|
file: ./web/Dockerfile.web
|
||||||
platforms: linux/amd64
|
platforms: linux/amd64
|
||||||
tags: ${{ secrets.DOCKERHUB_USERNAME }}/plane-frontend-private:${{ needs.branch_build_and_push.outputs.gh_branch_name }}
|
tags: ${{ env.FRONTEND_TAG }}
|
||||||
push: true
|
push: true
|
||||||
env:
|
env:
|
||||||
DOCKER_BUILDKIT: 1
|
DOCKER_BUILDKIT: 1
|
||||||
@ -123,8 +102,20 @@ jobs:
|
|||||||
|
|
||||||
branch_build_push_space:
|
branch_build_push_space:
|
||||||
runs-on: ubuntu-20.04
|
runs-on: ubuntu-20.04
|
||||||
needs: [ branch_build_and_push ]
|
needs: [branch_build_setup]
|
||||||
|
env:
|
||||||
|
SPACE_TAG: ${{ secrets.DOCKERHUB_USERNAME }}/plane-space:${{ needs.branch_build_setup.outputs.gh_branch_name }}
|
||||||
steps:
|
steps:
|
||||||
|
- name: Set Space Docker Tag
|
||||||
|
run: |
|
||||||
|
if [ "${{ needs.branch_build_setup.outputs.gh_branch_name }}" == "master" ] && [ "${{ github.event_name }}" == "release" ]; then
|
||||||
|
TAG=${{ secrets.DOCKERHUB_USERNAME }}/plane-space:latest,${{ secrets.DOCKERHUB_USERNAME }}/plane-space:${{ github.event.release.tag_name }}
|
||||||
|
elif [ "${{ needs.branch_build_setup.outputs.gh_branch_name }}" == "master" ]; then
|
||||||
|
TAG=${{ secrets.DOCKERHUB_USERNAME }}/plane-space:stable
|
||||||
|
else
|
||||||
|
TAG=${{ env.SPACE_TAG }}
|
||||||
|
fi
|
||||||
|
echo "SPACE_TAG=${TAG}" >> $GITHUB_ENV
|
||||||
- name: Set up Docker Buildx
|
- name: Set up Docker Buildx
|
||||||
uses: docker/setup-buildx-action@v2.5.0
|
uses: docker/setup-buildx-action@v2.5.0
|
||||||
|
|
||||||
@ -144,7 +135,7 @@ jobs:
|
|||||||
context: .
|
context: .
|
||||||
file: ./space/Dockerfile.space
|
file: ./space/Dockerfile.space
|
||||||
platforms: linux/amd64
|
platforms: linux/amd64
|
||||||
tags: ${{ secrets.DOCKERHUB_USERNAME }}/plane-space-private:${{ needs.branch_build_and_push.outputs.gh_branch_name }}
|
tags: ${{ env.SPACE_TAG }}
|
||||||
push: true
|
push: true
|
||||||
env:
|
env:
|
||||||
DOCKER_BUILDKIT: 1
|
DOCKER_BUILDKIT: 1
|
||||||
@ -153,8 +144,20 @@ jobs:
|
|||||||
|
|
||||||
branch_build_push_backend:
|
branch_build_push_backend:
|
||||||
runs-on: ubuntu-20.04
|
runs-on: ubuntu-20.04
|
||||||
needs: [ branch_build_and_push ]
|
needs: [branch_build_setup]
|
||||||
|
env:
|
||||||
|
BACKEND_TAG: ${{ secrets.DOCKERHUB_USERNAME }}/plane-backend:${{ needs.branch_build_setup.outputs.gh_branch_name }}
|
||||||
steps:
|
steps:
|
||||||
|
- name: Set Backend Docker Tag
|
||||||
|
run: |
|
||||||
|
if [ "${{ needs.branch_build_setup.outputs.gh_branch_name }}" == "master" ] && [ "${{ github.event_name }}" == "release" ]; then
|
||||||
|
TAG=${{ secrets.DOCKERHUB_USERNAME }}/plane-backend:latest,${{ secrets.DOCKERHUB_USERNAME }}/plane-backend:${{ github.event.release.tag_name }}
|
||||||
|
elif [ "${{ needs.branch_build_setup.outputs.gh_branch_name }}" == "master" ]; then
|
||||||
|
TAG=${{ secrets.DOCKERHUB_USERNAME }}/plane-backend:stable
|
||||||
|
else
|
||||||
|
TAG=${{ env.BACKEND_TAG }}
|
||||||
|
fi
|
||||||
|
echo "BACKEND_TAG=${TAG}" >> $GITHUB_ENV
|
||||||
- name: Set up Docker Buildx
|
- name: Set up Docker Buildx
|
||||||
uses: docker/setup-buildx-action@v2.5.0
|
uses: docker/setup-buildx-action@v2.5.0
|
||||||
|
|
||||||
@ -175,7 +178,7 @@ jobs:
|
|||||||
file: ./Dockerfile.api
|
file: ./Dockerfile.api
|
||||||
platforms: linux/amd64
|
platforms: linux/amd64
|
||||||
push: true
|
push: true
|
||||||
tags: ${{ secrets.DOCKERHUB_USERNAME }}/plane-backend-private:${{ needs.branch_build_and_push.outputs.gh_branch_name }}
|
tags: ${{ env.BACKEND_TAG }}
|
||||||
env:
|
env:
|
||||||
DOCKER_BUILDKIT: 1
|
DOCKER_BUILDKIT: 1
|
||||||
DOCKER_USERNAME: ${{ secrets.DOCKERHUB_USERNAME }}
|
DOCKER_USERNAME: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||||
@ -183,8 +186,20 @@ jobs:
|
|||||||
|
|
||||||
branch_build_push_proxy:
|
branch_build_push_proxy:
|
||||||
runs-on: ubuntu-20.04
|
runs-on: ubuntu-20.04
|
||||||
needs: [ branch_build_and_push ]
|
needs: [branch_build_setup]
|
||||||
|
env:
|
||||||
|
PROXY_TAG: ${{ secrets.DOCKERHUB_USERNAME }}/plane-proxy:${{ needs.branch_build_setup.outputs.gh_branch_name }}
|
||||||
steps:
|
steps:
|
||||||
|
- name: Set Proxy Docker Tag
|
||||||
|
run: |
|
||||||
|
if [ "${{ needs.branch_build_setup.outputs.gh_branch_name }}" == "master" ] && [ "${{ github.event_name }}" == "release" ]; then
|
||||||
|
TAG=${{ secrets.DOCKERHUB_USERNAME }}/plane-proxy:latest,${{ secrets.DOCKERHUB_USERNAME }}/plane-proxy:${{ github.event.release.tag_name }}
|
||||||
|
elif [ "${{ needs.branch_build_setup.outputs.gh_branch_name }}" == "master" ]; then
|
||||||
|
TAG=${{ secrets.DOCKERHUB_USERNAME }}/plane-proxy:stable
|
||||||
|
else
|
||||||
|
TAG=${{ env.PROXY_TAG }}
|
||||||
|
fi
|
||||||
|
echo "PROXY_TAG=${TAG}" >> $GITHUB_ENV
|
||||||
- name: Set up Docker Buildx
|
- name: Set up Docker Buildx
|
||||||
uses: docker/setup-buildx-action@v2.5.0
|
uses: docker/setup-buildx-action@v2.5.0
|
||||||
|
|
||||||
@ -205,7 +220,7 @@ jobs:
|
|||||||
context: .
|
context: .
|
||||||
file: ./Dockerfile
|
file: ./Dockerfile
|
||||||
platforms: linux/amd64
|
platforms: linux/amd64
|
||||||
tags: ${{ secrets.DOCKERHUB_USERNAME }}/plane-proxy-private:${{ needs.branch_build_and_push.outputs.gh_branch_name }}
|
tags: ${{ env.PROXY_TAG }}
|
||||||
push: true
|
push: true
|
||||||
env:
|
env:
|
||||||
DOCKER_BUILDKIT: 1
|
DOCKER_BUILDKIT: 1
|
||||||
|
65
.github/workflows/codeql.yml
vendored
Normal file
65
.github/workflows/codeql.yml
vendored
Normal file
@ -0,0 +1,65 @@
|
|||||||
|
name: "CodeQL"
|
||||||
|
|
||||||
|
on:
|
||||||
|
push:
|
||||||
|
branches: [ 'develop', 'hot-fix', 'stage-release' ]
|
||||||
|
pull_request:
|
||||||
|
# The branches below must be a subset of the branches above
|
||||||
|
branches: [ 'develop' ]
|
||||||
|
schedule:
|
||||||
|
- cron: '53 19 * * 5'
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
analyze:
|
||||||
|
name: Analyze
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
permissions:
|
||||||
|
actions: read
|
||||||
|
contents: read
|
||||||
|
security-events: write
|
||||||
|
|
||||||
|
strategy:
|
||||||
|
fail-fast: false
|
||||||
|
matrix:
|
||||||
|
language: [ 'python', 'javascript' ]
|
||||||
|
# CodeQL supports [ 'cpp', 'csharp', 'go', 'java', 'javascript', 'python', 'ruby' ]
|
||||||
|
# Use only 'java' to analyze code written in Java, Kotlin or both
|
||||||
|
# Use only 'javascript' to analyze code written in JavaScript, TypeScript or both
|
||||||
|
# Learn more about CodeQL language support at https://aka.ms/codeql-docs/language-support
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- name: Checkout repository
|
||||||
|
uses: actions/checkout@v3
|
||||||
|
|
||||||
|
# Initializes the CodeQL tools for scanning.
|
||||||
|
- name: Initialize CodeQL
|
||||||
|
uses: github/codeql-action/init@v2
|
||||||
|
with:
|
||||||
|
languages: ${{ matrix.language }}
|
||||||
|
# If you wish to specify custom queries, you can do so here or in a config file.
|
||||||
|
# By default, queries listed here will override any specified in a config file.
|
||||||
|
# Prefix the list here with "+" to use these queries and those in the config file.
|
||||||
|
|
||||||
|
# For more details on CodeQL's query packs, refer to: https://docs.github.com/en/code-security/code-scanning/automatically-scanning-your-code-for-vulnerabilities-and-errors/configuring-code-scanning#using-queries-in-ql-packs
|
||||||
|
# queries: security-extended,security-and-quality
|
||||||
|
|
||||||
|
|
||||||
|
# Autobuild attempts to build any compiled languages (C/C++, C#, Go, Java, or Swift).
|
||||||
|
# If this step fails, then you should remove it and run the build manually (see below)
|
||||||
|
- name: Autobuild
|
||||||
|
uses: github/codeql-action/autobuild@v2
|
||||||
|
|
||||||
|
# ℹ️ Command-line programs to run using the OS shell.
|
||||||
|
# 📚 See https://docs.github.com/en/actions/using-workflows/workflow-syntax-for-github-actions#jobsjob_idstepsrun
|
||||||
|
|
||||||
|
# If the Autobuild fails above, remove it and uncomment the following three lines.
|
||||||
|
# modify them (or add more) to build your code if your project, please refer to the EXAMPLE below for guidance.
|
||||||
|
|
||||||
|
# - run: |
|
||||||
|
# echo "Run, Build Application using script"
|
||||||
|
# ./location_of_script_within_repo/buildscript.sh
|
||||||
|
|
||||||
|
- name: Perform CodeQL Analysis
|
||||||
|
uses: github/codeql-action/analyze@v2
|
||||||
|
with:
|
||||||
|
category: "/language:${{matrix.language}}"
|
107
.github/workflows/update-docker-images.yml
vendored
107
.github/workflows/update-docker-images.yml
vendored
@ -1,107 +0,0 @@
|
|||||||
name: Update Docker Images for Plane on Release
|
|
||||||
|
|
||||||
on:
|
|
||||||
release:
|
|
||||||
types: [released, prereleased]
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
build_push_backend:
|
|
||||||
name: Build and Push Api Server Docker Image
|
|
||||||
runs-on: ubuntu-20.04
|
|
||||||
|
|
||||||
steps:
|
|
||||||
- name: Check out the repo
|
|
||||||
uses: actions/checkout@v3.3.0
|
|
||||||
|
|
||||||
- name: Set up Docker Buildx
|
|
||||||
uses: docker/setup-buildx-action@v2.5.0
|
|
||||||
|
|
||||||
- name: Login to Docker Hub
|
|
||||||
uses: docker/login-action@v2.1.0
|
|
||||||
with:
|
|
||||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
|
||||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
|
||||||
|
|
||||||
- name: Extract metadata (tags, labels) for Docker (Docker Hub) from Github Release
|
|
||||||
id: metaFrontend
|
|
||||||
uses: docker/metadata-action@v4.3.0
|
|
||||||
with:
|
|
||||||
images: ${{ secrets.DOCKERHUB_USERNAME }}/plane-frontend
|
|
||||||
tags: |
|
|
||||||
type=ref,event=tag
|
|
||||||
|
|
||||||
- name: Extract metadata (tags, labels) for Docker (Docker Hub) from Github Release
|
|
||||||
id: metaBackend
|
|
||||||
uses: docker/metadata-action@v4.3.0
|
|
||||||
with:
|
|
||||||
images: ${{ secrets.DOCKERHUB_USERNAME }}/plane-backend
|
|
||||||
tags: |
|
|
||||||
type=ref,event=tag
|
|
||||||
|
|
||||||
- name: Extract metadata (tags, labels) for Docker (Docker Hub) from Github Release
|
|
||||||
id: metaSpace
|
|
||||||
uses: docker/metadata-action@v4.3.0
|
|
||||||
with:
|
|
||||||
images: ${{ secrets.DOCKERHUB_USERNAME }}/plane-space
|
|
||||||
tags: |
|
|
||||||
type=ref,event=tag
|
|
||||||
|
|
||||||
- name: Extract metadata (tags, labels) for Docker (Docker Hub) from Github Release
|
|
||||||
id: metaProxy
|
|
||||||
uses: docker/metadata-action@v4.3.0
|
|
||||||
with:
|
|
||||||
images: ${{ secrets.DOCKERHUB_USERNAME }}/plane-proxy
|
|
||||||
tags: |
|
|
||||||
type=ref,event=tag
|
|
||||||
|
|
||||||
- name: Build and Push Frontend to Docker Container Registry
|
|
||||||
uses: docker/build-push-action@v4.0.0
|
|
||||||
with:
|
|
||||||
context: .
|
|
||||||
file: ./web/Dockerfile.web
|
|
||||||
platforms: linux/amd64
|
|
||||||
tags: ${{ steps.metaFrontend.outputs.tags }}
|
|
||||||
push: true
|
|
||||||
env:
|
|
||||||
DOCKER_BUILDKIT: 1
|
|
||||||
DOCKER_USERNAME: ${{ secrets.DOCKERHUB_USERNAME }}
|
|
||||||
DOCKET_PASSWORD: ${{ secrets.DOCKERHUB_TOKEN }}
|
|
||||||
|
|
||||||
- name: Build and Push Backend to Docker Hub
|
|
||||||
uses: docker/build-push-action@v4.0.0
|
|
||||||
with:
|
|
||||||
context: ./apiserver
|
|
||||||
file: ./apiserver/Dockerfile.api
|
|
||||||
platforms: linux/amd64
|
|
||||||
push: true
|
|
||||||
tags: ${{ steps.metaBackend.outputs.tags }}
|
|
||||||
env:
|
|
||||||
DOCKER_BUILDKIT: 1
|
|
||||||
DOCKER_USERNAME: ${{ secrets.DOCKERHUB_USERNAME }}
|
|
||||||
DOCKET_PASSWORD: ${{ secrets.DOCKERHUB_TOKEN }}
|
|
||||||
|
|
||||||
- name: Build and Push Plane-Deploy to Docker Hub
|
|
||||||
uses: docker/build-push-action@v4.0.0
|
|
||||||
with:
|
|
||||||
context: .
|
|
||||||
file: ./space/Dockerfile.space
|
|
||||||
platforms: linux/amd64
|
|
||||||
push: true
|
|
||||||
tags: ${{ steps.metaSpace.outputs.tags }}
|
|
||||||
env:
|
|
||||||
DOCKER_BUILDKIT: 1
|
|
||||||
DOCKER_USERNAME: ${{ secrets.DOCKERHUB_USERNAME }}
|
|
||||||
DOCKET_PASSWORD: ${{ secrets.DOCKERHUB_TOKEN }}
|
|
||||||
|
|
||||||
- name: Build and Push Plane-Proxy to Docker Hub
|
|
||||||
uses: docker/build-push-action@v4.0.0
|
|
||||||
with:
|
|
||||||
context: ./nginx
|
|
||||||
file: ./nginx/Dockerfile
|
|
||||||
platforms: linux/amd64
|
|
||||||
push: true
|
|
||||||
tags: ${{ steps.metaProxy.outputs.tags }}
|
|
||||||
env:
|
|
||||||
DOCKER_BUILDKIT: 1
|
|
||||||
DOCKER_USERNAME: ${{ secrets.DOCKERHUB_USERNAME }}
|
|
||||||
DOCKET_PASSWORD: ${{ secrets.DOCKERHUB_TOKEN }}
|
|
1
.gitignore
vendored
1
.gitignore
vendored
@ -79,3 +79,4 @@ pnpm-workspace.yaml
|
|||||||
tmp/
|
tmp/
|
||||||
## packages
|
## packages
|
||||||
dist
|
dist
|
||||||
|
.temp/
|
||||||
|
@ -43,8 +43,6 @@ FROM python:3.11.1-alpine3.17 AS backend
|
|||||||
ENV PYTHONDONTWRITEBYTECODE 1
|
ENV PYTHONDONTWRITEBYTECODE 1
|
||||||
ENV PYTHONUNBUFFERED 1
|
ENV PYTHONUNBUFFERED 1
|
||||||
ENV PIP_DISABLE_PIP_VERSION_CHECK=1
|
ENV PIP_DISABLE_PIP_VERSION_CHECK=1
|
||||||
ENV DJANGO_SETTINGS_MODULE plane.settings.production
|
|
||||||
ENV DOCKERIZED 1
|
|
||||||
|
|
||||||
WORKDIR /code
|
WORKDIR /code
|
||||||
|
|
||||||
@ -81,7 +79,6 @@ COPY apiserver/manage.py manage.py
|
|||||||
COPY apiserver/plane plane/
|
COPY apiserver/plane plane/
|
||||||
COPY apiserver/templates templates/
|
COPY apiserver/templates templates/
|
||||||
|
|
||||||
COPY apiserver/gunicorn.config.py ./
|
|
||||||
RUN apk --no-cache add "bash~=5.2"
|
RUN apk --no-cache add "bash~=5.2"
|
||||||
COPY apiserver/bin ./bin/
|
COPY apiserver/bin ./bin/
|
||||||
|
|
||||||
|
23
ENV_SETUP.md
23
ENV_SETUP.md
@ -31,12 +31,10 @@ AWS_S3_BUCKET_NAME="uploads"
|
|||||||
FILE_SIZE_LIMIT=5242880
|
FILE_SIZE_LIMIT=5242880
|
||||||
|
|
||||||
# GPT settings
|
# GPT settings
|
||||||
OPENAI_API_BASE="https://api.openai.com/v1" # change if using a custom endpoint
|
OPENAI_API_BASE="https://api.openai.com/v1" # deprecated
|
||||||
OPENAI_API_KEY="sk-" # add your openai key here
|
OPENAI_API_KEY="sk-" # deprecated
|
||||||
GPT_ENGINE="gpt-3.5-turbo" # use "gpt-4" if you have access
|
GPT_ENGINE="gpt-3.5-turbo" # deprecated
|
||||||
|
|
||||||
# Settings related to Docker
|
|
||||||
DOCKERIZED=1
|
|
||||||
# set to 1 If using the pre-configured minio setup
|
# set to 1 If using the pre-configured minio setup
|
||||||
USE_MINIO=1
|
USE_MINIO=1
|
||||||
|
|
||||||
@ -78,7 +76,6 @@ NEXT_PUBLIC_ENABLE_OAUTH=0
|
|||||||
# Backend
|
# Backend
|
||||||
# Debug value for api server use it as 0 for production use
|
# Debug value for api server use it as 0 for production use
|
||||||
DEBUG=0
|
DEBUG=0
|
||||||
DJANGO_SETTINGS_MODULE="plane.settings.selfhosted"
|
|
||||||
|
|
||||||
# Error logs
|
# Error logs
|
||||||
SENTRY_DSN=""
|
SENTRY_DSN=""
|
||||||
@ -115,24 +112,22 @@ AWS_S3_BUCKET_NAME="uploads"
|
|||||||
FILE_SIZE_LIMIT=5242880
|
FILE_SIZE_LIMIT=5242880
|
||||||
|
|
||||||
# GPT settings
|
# GPT settings
|
||||||
OPENAI_API_BASE="https://api.openai.com/v1" # change if using a custom endpoint
|
OPENAI_API_BASE="https://api.openai.com/v1" # deprecated
|
||||||
OPENAI_API_KEY="sk-" # add your openai key here
|
OPENAI_API_KEY="sk-" # deprecated
|
||||||
GPT_ENGINE="gpt-3.5-turbo" # use "gpt-4" if you have access
|
GPT_ENGINE="gpt-3.5-turbo" # deprecated
|
||||||
|
|
||||||
|
# Settings related to Docker
|
||||||
|
DOCKERIZED=1 # Deprecated
|
||||||
|
|
||||||
# Github
|
# Github
|
||||||
GITHUB_CLIENT_SECRET="" # For fetching release notes
|
GITHUB_CLIENT_SECRET="" # For fetching release notes
|
||||||
|
|
||||||
# Settings related to Docker
|
|
||||||
DOCKERIZED=1
|
|
||||||
# set to 1 If using the pre-configured minio setup
|
# set to 1 If using the pre-configured minio setup
|
||||||
USE_MINIO=1
|
USE_MINIO=1
|
||||||
|
|
||||||
# Nginx Configuration
|
# Nginx Configuration
|
||||||
NGINX_PORT=80
|
NGINX_PORT=80
|
||||||
|
|
||||||
# Default Creds
|
|
||||||
DEFAULT_EMAIL="captain@plane.so"
|
|
||||||
DEFAULT_PASSWORD="password123"
|
|
||||||
|
|
||||||
# SignUps
|
# SignUps
|
||||||
ENABLE_SIGNUP="1"
|
ENABLE_SIGNUP="1"
|
||||||
|
@ -57,10 +57,6 @@ Setting up local environment is extremely easy and straight forward. Follow the
|
|||||||
1. Review the `.env` files available in various folders. Visit [Environment Setup](./ENV_SETUP.md) to know about various environment variables used in system
|
1. Review the `.env` files available in various folders. Visit [Environment Setup](./ENV_SETUP.md) to know about various environment variables used in system
|
||||||
1. Run the docker command to initiate various services `docker compose -f docker-compose-local.yml up -d`
|
1. Run the docker command to initiate various services `docker compose -f docker-compose-local.yml up -d`
|
||||||
|
|
||||||
```bash
|
|
||||||
./setup.sh
|
|
||||||
```
|
|
||||||
|
|
||||||
You are ready to make changes to the code. Do not forget to refresh the browser (in case id does not auto-reload)
|
You are ready to make changes to the code. Do not forget to refresh the browser (in case id does not auto-reload)
|
||||||
|
|
||||||
Thats it!
|
Thats it!
|
||||||
|
@ -1,10 +1,11 @@
|
|||||||
# Backend
|
# Backend
|
||||||
# Debug value for api server use it as 0 for production use
|
# Debug value for api server use it as 0 for production use
|
||||||
DEBUG=0
|
DEBUG=0
|
||||||
DJANGO_SETTINGS_MODULE="plane.settings.production"
|
CORS_ALLOWED_ORIGINS=""
|
||||||
|
|
||||||
# Error logs
|
# Error logs
|
||||||
SENTRY_DSN=""
|
SENTRY_DSN=""
|
||||||
|
SENTRY_ENVIRONMENT="development"
|
||||||
|
|
||||||
# Database Settings
|
# Database Settings
|
||||||
PGUSER="plane"
|
PGUSER="plane"
|
||||||
@ -13,20 +14,16 @@ PGHOST="plane-db"
|
|||||||
PGDATABASE="plane"
|
PGDATABASE="plane"
|
||||||
DATABASE_URL=postgresql://${PGUSER}:${PGPASSWORD}@${PGHOST}/${PGDATABASE}
|
DATABASE_URL=postgresql://${PGUSER}:${PGPASSWORD}@${PGHOST}/${PGDATABASE}
|
||||||
|
|
||||||
|
# Oauth variables
|
||||||
|
GOOGLE_CLIENT_ID=""
|
||||||
|
GITHUB_CLIENT_ID=""
|
||||||
|
GITHUB_CLIENT_SECRET=""
|
||||||
|
|
||||||
# Redis Settings
|
# Redis Settings
|
||||||
REDIS_HOST="plane-redis"
|
REDIS_HOST="plane-redis"
|
||||||
REDIS_PORT="6379"
|
REDIS_PORT="6379"
|
||||||
REDIS_URL="redis://${REDIS_HOST}:6379/"
|
REDIS_URL="redis://${REDIS_HOST}:6379/"
|
||||||
|
|
||||||
# Email Settings
|
|
||||||
EMAIL_HOST=""
|
|
||||||
EMAIL_HOST_USER=""
|
|
||||||
EMAIL_HOST_PASSWORD=""
|
|
||||||
EMAIL_PORT=587
|
|
||||||
EMAIL_FROM="Team Plane <team@mailer.plane.so>"
|
|
||||||
EMAIL_USE_TLS="1"
|
|
||||||
EMAIL_USE_SSL="0"
|
|
||||||
|
|
||||||
# AWS Settings
|
# AWS Settings
|
||||||
AWS_REGION=""
|
AWS_REGION=""
|
||||||
AWS_ACCESS_KEY_ID="access-key"
|
AWS_ACCESS_KEY_ID="access-key"
|
||||||
@ -38,29 +35,26 @@ AWS_S3_BUCKET_NAME="uploads"
|
|||||||
FILE_SIZE_LIMIT=5242880
|
FILE_SIZE_LIMIT=5242880
|
||||||
|
|
||||||
# GPT settings
|
# GPT settings
|
||||||
OPENAI_API_BASE="https://api.openai.com/v1" # change if using a custom endpoint
|
OPENAI_API_BASE="https://api.openai.com/v1" # deprecated
|
||||||
OPENAI_API_KEY="sk-" # add your openai key here
|
OPENAI_API_KEY="sk-" # deprecated
|
||||||
GPT_ENGINE="gpt-3.5-turbo" # use "gpt-4" if you have access
|
GPT_ENGINE="gpt-3.5-turbo" # deprecated
|
||||||
|
|
||||||
# Github
|
# Github
|
||||||
GITHUB_CLIENT_SECRET="" # For fetching release notes
|
GITHUB_CLIENT_SECRET="" # For fetching release notes
|
||||||
|
|
||||||
# Settings related to Docker
|
# Settings related to Docker
|
||||||
DOCKERIZED=1
|
DOCKERIZED=1 # deprecated
|
||||||
|
|
||||||
# set to 1 If using the pre-configured minio setup
|
# set to 1 If using the pre-configured minio setup
|
||||||
USE_MINIO=1
|
USE_MINIO=1
|
||||||
|
|
||||||
# Nginx Configuration
|
# Nginx Configuration
|
||||||
NGINX_PORT=80
|
NGINX_PORT=80
|
||||||
|
|
||||||
# Default Creds
|
|
||||||
DEFAULT_EMAIL="captain@plane.so"
|
|
||||||
DEFAULT_PASSWORD="password123"
|
|
||||||
|
|
||||||
# SignUps
|
# SignUps
|
||||||
ENABLE_SIGNUP="1"
|
ENABLE_SIGNUP="1"
|
||||||
|
|
||||||
|
|
||||||
# Enable Email/Password Signup
|
# Enable Email/Password Signup
|
||||||
ENABLE_EMAIL_PASSWORD="1"
|
ENABLE_EMAIL_PASSWORD="1"
|
||||||
|
|
||||||
@ -70,6 +64,6 @@ ENABLE_MAGIC_LINK_LOGIN="0"
|
|||||||
# Email redirections and minio domain settings
|
# Email redirections and minio domain settings
|
||||||
WEB_URL="http://localhost"
|
WEB_URL="http://localhost"
|
||||||
|
|
||||||
|
|
||||||
# Gunicorn Workers
|
# Gunicorn Workers
|
||||||
GUNICORN_WORKERS=2
|
GUNICORN_WORKERS=2
|
||||||
|
|
||||||
|
@ -43,8 +43,7 @@ USER captain
|
|||||||
COPY manage.py manage.py
|
COPY manage.py manage.py
|
||||||
COPY plane plane/
|
COPY plane plane/
|
||||||
COPY templates templates/
|
COPY templates templates/
|
||||||
|
COPY package.json package.json
|
||||||
COPY gunicorn.config.py ./
|
|
||||||
USER root
|
USER root
|
||||||
RUN apk --no-cache add "bash~=5.2"
|
RUN apk --no-cache add "bash~=5.2"
|
||||||
COPY ./bin ./bin/
|
COPY ./bin ./bin/
|
||||||
|
@ -3,7 +3,28 @@ set -e
|
|||||||
python manage.py wait_for_db
|
python manage.py wait_for_db
|
||||||
python manage.py migrate
|
python manage.py migrate
|
||||||
|
|
||||||
# Create a Default User
|
# Create the default bucket
|
||||||
python bin/user_script.py
|
#!/bin/bash
|
||||||
|
|
||||||
exec gunicorn -w $GUNICORN_WORKERS -k uvicorn.workers.UvicornWorker plane.asgi:application --bind 0.0.0.0:8000 --max-requests 1200 --max-requests-jitter 1000 --access-logfile -
|
# Collect system information
|
||||||
|
HOSTNAME=$(hostname)
|
||||||
|
MAC_ADDRESS=$(ip link show | awk '/ether/ {print $2}' | head -n 1)
|
||||||
|
CPU_INFO=$(cat /proc/cpuinfo)
|
||||||
|
MEMORY_INFO=$(free -h)
|
||||||
|
DISK_INFO=$(df -h)
|
||||||
|
|
||||||
|
# Concatenate information and compute SHA-256 hash
|
||||||
|
SIGNATURE=$(echo "$HOSTNAME$MAC_ADDRESS$CPU_INFO$MEMORY_INFO$DISK_INFO" | sha256sum | awk '{print $1}')
|
||||||
|
|
||||||
|
# Export the variables
|
||||||
|
export MACHINE_SIGNATURE=$SIGNATURE
|
||||||
|
|
||||||
|
# Register instance
|
||||||
|
python manage.py register_instance $MACHINE_SIGNATURE
|
||||||
|
# Load the configuration variable
|
||||||
|
python manage.py configure_instance
|
||||||
|
|
||||||
|
# Create the default bucket
|
||||||
|
python manage.py create_bucket
|
||||||
|
|
||||||
|
exec gunicorn -w $GUNICORN_WORKERS -k uvicorn.workers.UvicornWorker plane.asgi:application --bind 0.0.0.0:${PORT:-8000} --max-requests 1200 --max-requests-jitter 1000 --access-logfile -
|
||||||
|
@ -1,28 +0,0 @@
|
|||||||
import os, sys
|
|
||||||
import uuid
|
|
||||||
|
|
||||||
sys.path.append("/code")
|
|
||||||
|
|
||||||
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "plane.settings.production")
|
|
||||||
import django
|
|
||||||
|
|
||||||
django.setup()
|
|
||||||
|
|
||||||
from plane.db.models import User
|
|
||||||
|
|
||||||
|
|
||||||
def populate():
|
|
||||||
default_email = os.environ.get("DEFAULT_EMAIL", "captain@plane.so")
|
|
||||||
default_password = os.environ.get("DEFAULT_PASSWORD", "password123")
|
|
||||||
|
|
||||||
if not User.objects.filter(email=default_email).exists():
|
|
||||||
user = User.objects.create(email=default_email, username=uuid.uuid4().hex)
|
|
||||||
user.set_password(default_password)
|
|
||||||
user.save()
|
|
||||||
print(f"User created with an email: {default_email}")
|
|
||||||
else:
|
|
||||||
print(f"User already exists with the default email: {default_email}")
|
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
|
||||||
populate()
|
|
0
apiserver/file.txt
Normal file
0
apiserver/file.txt
Normal file
@ -1,6 +0,0 @@
|
|||||||
from psycogreen.gevent import patch_psycopg
|
|
||||||
|
|
||||||
|
|
||||||
def post_fork(server, worker):
|
|
||||||
patch_psycopg()
|
|
||||||
worker.log.info("Made Psycopg2 Green")
|
|
4
apiserver/package.json
Normal file
4
apiserver/package.json
Normal file
@ -0,0 +1,4 @@
|
|||||||
|
{
|
||||||
|
"name": "plane-api",
|
||||||
|
"version": "0.13.2"
|
||||||
|
}
|
0
apiserver/plane/api/middleware/__init__.py
Normal file
0
apiserver/plane/api/middleware/__init__.py
Normal file
47
apiserver/plane/api/middleware/api_authentication.py
Normal file
47
apiserver/plane/api/middleware/api_authentication.py
Normal file
@ -0,0 +1,47 @@
|
|||||||
|
# Django imports
|
||||||
|
from django.utils import timezone
|
||||||
|
from django.db.models import Q
|
||||||
|
|
||||||
|
# Third party imports
|
||||||
|
from rest_framework import authentication
|
||||||
|
from rest_framework.exceptions import AuthenticationFailed
|
||||||
|
|
||||||
|
# Module imports
|
||||||
|
from plane.db.models import APIToken
|
||||||
|
|
||||||
|
|
||||||
|
class APIKeyAuthentication(authentication.BaseAuthentication):
|
||||||
|
"""
|
||||||
|
Authentication with an API Key
|
||||||
|
"""
|
||||||
|
|
||||||
|
www_authenticate_realm = "api"
|
||||||
|
media_type = "application/json"
|
||||||
|
auth_header_name = "X-Api-Key"
|
||||||
|
|
||||||
|
def get_api_token(self, request):
|
||||||
|
return request.headers.get(self.auth_header_name)
|
||||||
|
|
||||||
|
def validate_api_token(self, token):
|
||||||
|
try:
|
||||||
|
api_token = APIToken.objects.get(
|
||||||
|
Q(Q(expired_at__gt=timezone.now()) | Q(expired_at__isnull=True)),
|
||||||
|
token=token,
|
||||||
|
is_active=True,
|
||||||
|
)
|
||||||
|
except APIToken.DoesNotExist:
|
||||||
|
raise AuthenticationFailed("Given API token is not valid")
|
||||||
|
|
||||||
|
# save api token last used
|
||||||
|
api_token.last_used = timezone.now()
|
||||||
|
api_token.save(update_fields=["last_used"])
|
||||||
|
return (api_token.user, api_token.token)
|
||||||
|
|
||||||
|
def authenticate(self, request):
|
||||||
|
token = self.get_api_token(request=request)
|
||||||
|
if not token:
|
||||||
|
return None
|
||||||
|
|
||||||
|
# Validate the API token
|
||||||
|
user, token = self.validate_api_token(token)
|
||||||
|
return user, token
|
@ -1,2 +0,0 @@
|
|||||||
from .workspace import WorkSpaceBasePermission, WorkSpaceAdminPermission, WorkspaceEntityPermission, WorkspaceViewerPermission
|
|
||||||
from .project import ProjectBasePermission, ProjectEntityPermission, ProjectMemberPermission, ProjectLitePermission
|
|
41
apiserver/plane/api/rate_limit.py
Normal file
41
apiserver/plane/api/rate_limit.py
Normal file
@ -0,0 +1,41 @@
|
|||||||
|
from rest_framework.throttling import SimpleRateThrottle
|
||||||
|
|
||||||
|
class ApiKeyRateThrottle(SimpleRateThrottle):
|
||||||
|
scope = 'api_key'
|
||||||
|
rate = '60/minute'
|
||||||
|
|
||||||
|
def get_cache_key(self, request, view):
|
||||||
|
# Retrieve the API key from the request header
|
||||||
|
api_key = request.headers.get('X-Api-Key')
|
||||||
|
if not api_key:
|
||||||
|
return None # Allow the request if there's no API key
|
||||||
|
|
||||||
|
# Use the API key as part of the cache key
|
||||||
|
return f'{self.scope}:{api_key}'
|
||||||
|
|
||||||
|
def allow_request(self, request, view):
|
||||||
|
allowed = super().allow_request(request, view)
|
||||||
|
|
||||||
|
if allowed:
|
||||||
|
now = self.timer()
|
||||||
|
# Calculate the remaining limit and reset time
|
||||||
|
history = self.cache.get(self.key, [])
|
||||||
|
|
||||||
|
# Remove old histories
|
||||||
|
while history and history[-1] <= now - self.duration:
|
||||||
|
history.pop()
|
||||||
|
|
||||||
|
# Calculate the requests
|
||||||
|
num_requests = len(history)
|
||||||
|
|
||||||
|
# Check available requests
|
||||||
|
available = self.num_requests - num_requests
|
||||||
|
|
||||||
|
# Unix timestamp for when the rate limit will reset
|
||||||
|
reset_time = int(now + self.duration)
|
||||||
|
|
||||||
|
# Add headers
|
||||||
|
request.META['X-RateLimit-Remaining'] = max(0, available)
|
||||||
|
request.META['X-RateLimit-Reset'] = reset_time
|
||||||
|
|
||||||
|
return allowed
|
@ -1,102 +1,17 @@
|
|||||||
from .base import BaseSerializer
|
from .user import UserLiteSerializer
|
||||||
from .user import (
|
from .workspace import WorkspaceLiteSerializer
|
||||||
UserSerializer,
|
from .project import ProjectSerializer, ProjectLiteSerializer
|
||||||
UserLiteSerializer,
|
|
||||||
ChangePasswordSerializer,
|
|
||||||
ResetPasswordSerializer,
|
|
||||||
UserAdminLiteSerializer,
|
|
||||||
UserMeSerializer,
|
|
||||||
UserMeSettingsSerializer,
|
|
||||||
)
|
|
||||||
from .workspace import (
|
|
||||||
WorkSpaceSerializer,
|
|
||||||
WorkSpaceMemberSerializer,
|
|
||||||
TeamSerializer,
|
|
||||||
WorkSpaceMemberInviteSerializer,
|
|
||||||
WorkspaceLiteSerializer,
|
|
||||||
WorkspaceThemeSerializer,
|
|
||||||
WorkspaceMemberAdminSerializer,
|
|
||||||
WorkspaceMemberMeSerializer,
|
|
||||||
)
|
|
||||||
from .project import (
|
|
||||||
ProjectSerializer,
|
|
||||||
ProjectListSerializer,
|
|
||||||
ProjectDetailSerializer,
|
|
||||||
ProjectMemberSerializer,
|
|
||||||
ProjectMemberInviteSerializer,
|
|
||||||
ProjectIdentifierSerializer,
|
|
||||||
ProjectFavoriteSerializer,
|
|
||||||
ProjectLiteSerializer,
|
|
||||||
ProjectMemberLiteSerializer,
|
|
||||||
ProjectDeployBoardSerializer,
|
|
||||||
ProjectMemberAdminSerializer,
|
|
||||||
ProjectPublicMemberSerializer,
|
|
||||||
)
|
|
||||||
from .state import StateSerializer, StateLiteSerializer
|
|
||||||
from .view import GlobalViewSerializer, IssueViewSerializer, IssueViewFavoriteSerializer
|
|
||||||
from .cycle import (
|
|
||||||
CycleSerializer,
|
|
||||||
CycleIssueSerializer,
|
|
||||||
CycleFavoriteSerializer,
|
|
||||||
CycleWriteSerializer,
|
|
||||||
)
|
|
||||||
from .asset import FileAssetSerializer
|
|
||||||
from .issue import (
|
from .issue import (
|
||||||
IssueCreateSerializer,
|
|
||||||
IssueActivitySerializer,
|
|
||||||
IssueCommentSerializer,
|
|
||||||
IssuePropertySerializer,
|
|
||||||
IssueAssigneeSerializer,
|
|
||||||
LabelSerializer,
|
|
||||||
IssueSerializer,
|
IssueSerializer,
|
||||||
IssueFlatSerializer,
|
LabelSerializer,
|
||||||
IssueStateSerializer,
|
|
||||||
IssueLinkSerializer,
|
IssueLinkSerializer,
|
||||||
IssueLiteSerializer,
|
|
||||||
IssueAttachmentSerializer,
|
IssueAttachmentSerializer,
|
||||||
IssueSubscriberSerializer,
|
IssueCommentSerializer,
|
||||||
IssueReactionSerializer,
|
IssueAttachmentSerializer,
|
||||||
CommentReactionSerializer,
|
IssueActivitySerializer,
|
||||||
IssueVoteSerializer,
|
IssueExpandSerializer,
|
||||||
IssueRelationSerializer,
|
|
||||||
RelatedIssueSerializer,
|
|
||||||
IssuePublicSerializer,
|
|
||||||
)
|
)
|
||||||
|
from .state import StateLiteSerializer, StateSerializer
|
||||||
from .module import (
|
from .cycle import CycleSerializer, CycleIssueSerializer, CycleLiteSerializer
|
||||||
ModuleWriteSerializer,
|
from .module import ModuleSerializer, ModuleIssueSerializer, ModuleLiteSerializer
|
||||||
ModuleSerializer,
|
from .inbox import InboxIssueSerializer
|
||||||
ModuleIssueSerializer,
|
|
||||||
ModuleLinkSerializer,
|
|
||||||
ModuleFavoriteSerializer,
|
|
||||||
)
|
|
||||||
|
|
||||||
from .api_token import APITokenSerializer
|
|
||||||
|
|
||||||
from .integration import (
|
|
||||||
IntegrationSerializer,
|
|
||||||
WorkspaceIntegrationSerializer,
|
|
||||||
GithubIssueSyncSerializer,
|
|
||||||
GithubRepositorySerializer,
|
|
||||||
GithubRepositorySyncSerializer,
|
|
||||||
GithubCommentSyncSerializer,
|
|
||||||
SlackProjectSyncSerializer,
|
|
||||||
)
|
|
||||||
|
|
||||||
from .importer import ImporterSerializer
|
|
||||||
|
|
||||||
from .page import PageSerializer, PageBlockSerializer, PageFavoriteSerializer
|
|
||||||
|
|
||||||
from .estimate import (
|
|
||||||
EstimateSerializer,
|
|
||||||
EstimatePointSerializer,
|
|
||||||
EstimateReadSerializer,
|
|
||||||
)
|
|
||||||
|
|
||||||
from .inbox import InboxSerializer, InboxIssueSerializer, IssueStateInboxSerializer
|
|
||||||
|
|
||||||
from .analytic import AnalyticViewSerializer
|
|
||||||
|
|
||||||
from .notification import NotificationSerializer
|
|
||||||
|
|
||||||
from .exporter import ExporterHistorySerializer
|
|
@ -1,14 +0,0 @@
|
|||||||
from .base import BaseSerializer
|
|
||||||
from plane.db.models import APIToken
|
|
||||||
|
|
||||||
|
|
||||||
class APITokenSerializer(BaseSerializer):
|
|
||||||
class Meta:
|
|
||||||
model = APIToken
|
|
||||||
fields = [
|
|
||||||
"label",
|
|
||||||
"user",
|
|
||||||
"user_type",
|
|
||||||
"workspace",
|
|
||||||
"created_at",
|
|
||||||
]
|
|
@ -1,22 +1,22 @@
|
|||||||
|
# Third party imports
|
||||||
from rest_framework import serializers
|
from rest_framework import serializers
|
||||||
|
|
||||||
|
|
||||||
class BaseSerializer(serializers.ModelSerializer):
|
class BaseSerializer(serializers.ModelSerializer):
|
||||||
id = serializers.PrimaryKeyRelatedField(read_only=True)
|
id = serializers.PrimaryKeyRelatedField(read_only=True)
|
||||||
|
|
||||||
class DynamicBaseSerializer(BaseSerializer):
|
|
||||||
|
|
||||||
def __init__(self, *args, **kwargs):
|
def __init__(self, *args, **kwargs):
|
||||||
# If 'fields' is provided in the arguments, remove it and store it separately.
|
# If 'fields' is provided in the arguments, remove it and store it separately.
|
||||||
# This is done so as not to pass this custom argument up to the superclass.
|
# This is done so as not to pass this custom argument up to the superclass.
|
||||||
fields = kwargs.pop("fields", None)
|
fields = kwargs.pop("fields", [])
|
||||||
|
self.expand = kwargs.pop("expand", []) or []
|
||||||
|
|
||||||
# Call the initialization of the superclass.
|
# Call the initialization of the superclass.
|
||||||
super().__init__(*args, **kwargs)
|
super().__init__(*args, **kwargs)
|
||||||
|
|
||||||
# If 'fields' was provided, filter the fields of the serializer accordingly.
|
# If 'fields' was provided, filter the fields of the serializer accordingly.
|
||||||
if fields is not None:
|
if fields:
|
||||||
self.fields = self._filter_fields(fields)
|
self.fields = self._filter_fields(fields=fields)
|
||||||
|
|
||||||
def _filter_fields(self, fields):
|
def _filter_fields(self, fields):
|
||||||
"""
|
"""
|
||||||
@ -52,7 +52,54 @@ class DynamicBaseSerializer(BaseSerializer):
|
|||||||
allowed = set(allowed)
|
allowed = set(allowed)
|
||||||
|
|
||||||
# Remove fields from the serializer that aren't in the 'allowed' list.
|
# Remove fields from the serializer that aren't in the 'allowed' list.
|
||||||
for field_name in (existing - allowed):
|
for field_name in existing - allowed:
|
||||||
self.fields.pop(field_name)
|
self.fields.pop(field_name)
|
||||||
|
|
||||||
return self.fields
|
return self.fields
|
||||||
|
|
||||||
|
def to_representation(self, instance):
|
||||||
|
response = super().to_representation(instance)
|
||||||
|
|
||||||
|
# Ensure 'expand' is iterable before processing
|
||||||
|
if self.expand:
|
||||||
|
for expand in self.expand:
|
||||||
|
if expand in self.fields:
|
||||||
|
# Import all the expandable serializers
|
||||||
|
from . import (
|
||||||
|
WorkspaceLiteSerializer,
|
||||||
|
ProjectLiteSerializer,
|
||||||
|
UserLiteSerializer,
|
||||||
|
StateLiteSerializer,
|
||||||
|
IssueSerializer,
|
||||||
|
)
|
||||||
|
|
||||||
|
# Expansion mapper
|
||||||
|
expansion = {
|
||||||
|
"user": UserLiteSerializer,
|
||||||
|
"workspace": WorkspaceLiteSerializer,
|
||||||
|
"project": ProjectLiteSerializer,
|
||||||
|
"default_assignee": UserLiteSerializer,
|
||||||
|
"project_lead": UserLiteSerializer,
|
||||||
|
"state": StateLiteSerializer,
|
||||||
|
"created_by": UserLiteSerializer,
|
||||||
|
"issue": IssueSerializer,
|
||||||
|
"actor": UserLiteSerializer,
|
||||||
|
"owned_by": UserLiteSerializer,
|
||||||
|
"members": UserLiteSerializer,
|
||||||
|
}
|
||||||
|
# Check if field in expansion then expand the field
|
||||||
|
if expand in expansion:
|
||||||
|
if isinstance(response.get(expand), list):
|
||||||
|
exp_serializer = expansion[expand](
|
||||||
|
getattr(instance, expand), many=True
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
exp_serializer = expansion[expand](
|
||||||
|
getattr(instance, expand)
|
||||||
|
)
|
||||||
|
response[expand] = exp_serializer.data
|
||||||
|
else:
|
||||||
|
# You might need to handle this case differently
|
||||||
|
response[expand] = getattr(instance, f"{expand}_id", None)
|
||||||
|
|
||||||
|
return response
|
@ -3,43 +3,19 @@ from rest_framework import serializers
|
|||||||
|
|
||||||
# Module imports
|
# Module imports
|
||||||
from .base import BaseSerializer
|
from .base import BaseSerializer
|
||||||
from .user import UserLiteSerializer
|
from plane.db.models import Cycle, CycleIssue
|
||||||
from .issue import IssueStateSerializer
|
|
||||||
from .workspace import WorkspaceLiteSerializer
|
|
||||||
from .project import ProjectLiteSerializer
|
|
||||||
from plane.db.models import Cycle, CycleIssue, CycleFavorite
|
|
||||||
|
|
||||||
|
|
||||||
class CycleWriteSerializer(BaseSerializer):
|
|
||||||
def validate(self, data):
|
|
||||||
if (
|
|
||||||
data.get("start_date", None) is not None
|
|
||||||
and data.get("end_date", None) is not None
|
|
||||||
and data.get("start_date", None) > data.get("end_date", None)
|
|
||||||
):
|
|
||||||
raise serializers.ValidationError("Start date cannot exceed end date")
|
|
||||||
return data
|
|
||||||
|
|
||||||
class Meta:
|
|
||||||
model = Cycle
|
|
||||||
fields = "__all__"
|
|
||||||
|
|
||||||
|
|
||||||
class CycleSerializer(BaseSerializer):
|
class CycleSerializer(BaseSerializer):
|
||||||
owned_by = UserLiteSerializer(read_only=True)
|
|
||||||
is_favorite = serializers.BooleanField(read_only=True)
|
|
||||||
total_issues = serializers.IntegerField(read_only=True)
|
total_issues = serializers.IntegerField(read_only=True)
|
||||||
cancelled_issues = serializers.IntegerField(read_only=True)
|
cancelled_issues = serializers.IntegerField(read_only=True)
|
||||||
completed_issues = serializers.IntegerField(read_only=True)
|
completed_issues = serializers.IntegerField(read_only=True)
|
||||||
started_issues = serializers.IntegerField(read_only=True)
|
started_issues = serializers.IntegerField(read_only=True)
|
||||||
unstarted_issues = serializers.IntegerField(read_only=True)
|
unstarted_issues = serializers.IntegerField(read_only=True)
|
||||||
backlog_issues = serializers.IntegerField(read_only=True)
|
backlog_issues = serializers.IntegerField(read_only=True)
|
||||||
assignees = serializers.SerializerMethodField(read_only=True)
|
|
||||||
total_estimates = serializers.IntegerField(read_only=True)
|
total_estimates = serializers.IntegerField(read_only=True)
|
||||||
completed_estimates = serializers.IntegerField(read_only=True)
|
completed_estimates = serializers.IntegerField(read_only=True)
|
||||||
started_estimates = serializers.IntegerField(read_only=True)
|
started_estimates = serializers.IntegerField(read_only=True)
|
||||||
workspace_detail = WorkspaceLiteSerializer(read_only=True, source="workspace")
|
|
||||||
project_detail = ProjectLiteSerializer(read_only=True, source="project")
|
|
||||||
|
|
||||||
def validate(self, data):
|
def validate(self, data):
|
||||||
if (
|
if (
|
||||||
@ -50,30 +26,15 @@ class CycleSerializer(BaseSerializer):
|
|||||||
raise serializers.ValidationError("Start date cannot exceed end date")
|
raise serializers.ValidationError("Start date cannot exceed end date")
|
||||||
return data
|
return data
|
||||||
|
|
||||||
def get_assignees(self, obj):
|
|
||||||
members = [
|
|
||||||
{
|
|
||||||
"avatar": assignee.avatar,
|
|
||||||
"display_name": assignee.display_name,
|
|
||||||
"id": assignee.id,
|
|
||||||
}
|
|
||||||
for issue_cycle in obj.issue_cycle.prefetch_related(
|
|
||||||
"issue__assignees"
|
|
||||||
).all()
|
|
||||||
for assignee in issue_cycle.issue.assignees.all()
|
|
||||||
]
|
|
||||||
# Use a set comprehension to return only the unique objects
|
|
||||||
unique_objects = {frozenset(item.items()) for item in members}
|
|
||||||
|
|
||||||
# Convert the set back to a list of dictionaries
|
|
||||||
unique_list = [dict(item) for item in unique_objects]
|
|
||||||
|
|
||||||
return unique_list
|
|
||||||
|
|
||||||
class Meta:
|
class Meta:
|
||||||
model = Cycle
|
model = Cycle
|
||||||
fields = "__all__"
|
fields = "__all__"
|
||||||
read_only_fields = [
|
read_only_fields = [
|
||||||
|
"id",
|
||||||
|
"created_at",
|
||||||
|
"updated_at",
|
||||||
|
"created_by",
|
||||||
|
"updated_by",
|
||||||
"workspace",
|
"workspace",
|
||||||
"project",
|
"project",
|
||||||
"owned_by",
|
"owned_by",
|
||||||
@ -81,7 +42,6 @@ class CycleSerializer(BaseSerializer):
|
|||||||
|
|
||||||
|
|
||||||
class CycleIssueSerializer(BaseSerializer):
|
class CycleIssueSerializer(BaseSerializer):
|
||||||
issue_detail = IssueStateSerializer(read_only=True, source="issue")
|
|
||||||
sub_issues_count = serializers.IntegerField(read_only=True)
|
sub_issues_count = serializers.IntegerField(read_only=True)
|
||||||
|
|
||||||
class Meta:
|
class Meta:
|
||||||
@ -94,14 +54,8 @@ class CycleIssueSerializer(BaseSerializer):
|
|||||||
]
|
]
|
||||||
|
|
||||||
|
|
||||||
class CycleFavoriteSerializer(BaseSerializer):
|
class CycleLiteSerializer(BaseSerializer):
|
||||||
cycle_detail = CycleSerializer(source="cycle", read_only=True)
|
|
||||||
|
|
||||||
class Meta:
|
class Meta:
|
||||||
model = CycleFavorite
|
model = Cycle
|
||||||
fields = "__all__"
|
fields = "__all__"
|
||||||
read_only_fields = [
|
|
||||||
"workspace",
|
|
||||||
"project",
|
|
||||||
"user",
|
|
||||||
]
|
|
||||||
|
@ -1,57 +1,19 @@
|
|||||||
# Third party frameworks
|
# Module improts
|
||||||
from rest_framework import serializers
|
|
||||||
|
|
||||||
# Module imports
|
|
||||||
from .base import BaseSerializer
|
from .base import BaseSerializer
|
||||||
from .issue import IssueFlatSerializer, LabelLiteSerializer
|
from plane.db.models import InboxIssue
|
||||||
from .project import ProjectLiteSerializer
|
|
||||||
from .state import StateLiteSerializer
|
|
||||||
from .user import UserLiteSerializer
|
|
||||||
from plane.db.models import Inbox, InboxIssue, Issue
|
|
||||||
|
|
||||||
|
|
||||||
class InboxSerializer(BaseSerializer):
|
|
||||||
project_detail = ProjectLiteSerializer(source="project", read_only=True)
|
|
||||||
pending_issue_count = serializers.IntegerField(read_only=True)
|
|
||||||
|
|
||||||
class Meta:
|
|
||||||
model = Inbox
|
|
||||||
fields = "__all__"
|
|
||||||
read_only_fields = [
|
|
||||||
"project",
|
|
||||||
"workspace",
|
|
||||||
]
|
|
||||||
|
|
||||||
|
|
||||||
class InboxIssueSerializer(BaseSerializer):
|
class InboxIssueSerializer(BaseSerializer):
|
||||||
issue_detail = IssueFlatSerializer(source="issue", read_only=True)
|
|
||||||
project_detail = ProjectLiteSerializer(source="project", read_only=True)
|
|
||||||
|
|
||||||
class Meta:
|
class Meta:
|
||||||
model = InboxIssue
|
model = InboxIssue
|
||||||
fields = "__all__"
|
fields = "__all__"
|
||||||
read_only_fields = [
|
read_only_fields = [
|
||||||
"project",
|
"id",
|
||||||
"workspace",
|
"workspace",
|
||||||
|
"project",
|
||||||
|
"issue",
|
||||||
|
"created_by",
|
||||||
|
"updated_by",
|
||||||
|
"created_at",
|
||||||
|
"updated_at",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
|
||||||
class InboxIssueLiteSerializer(BaseSerializer):
|
|
||||||
class Meta:
|
|
||||||
model = InboxIssue
|
|
||||||
fields = ["id", "status", "duplicate_to", "snoozed_till", "source"]
|
|
||||||
read_only_fields = fields
|
|
||||||
|
|
||||||
|
|
||||||
class IssueStateInboxSerializer(BaseSerializer):
|
|
||||||
state_detail = StateLiteSerializer(read_only=True, source="state")
|
|
||||||
project_detail = ProjectLiteSerializer(read_only=True, source="project")
|
|
||||||
label_details = LabelLiteSerializer(read_only=True, source="labels", many=True)
|
|
||||||
assignee_details = UserLiteSerializer(read_only=True, source="assignees", many=True)
|
|
||||||
sub_issues_count = serializers.IntegerField(read_only=True)
|
|
||||||
bridge_id = serializers.UUIDField(read_only=True)
|
|
||||||
issue_inbox = InboxIssueLiteSerializer(read_only=True, many=True)
|
|
||||||
|
|
||||||
class Meta:
|
|
||||||
model = Issue
|
|
||||||
fields = "__all__"
|
|
||||||
|
@ -1,95 +1,53 @@
|
|||||||
|
from lxml import html
|
||||||
|
|
||||||
|
|
||||||
# Django imports
|
# Django imports
|
||||||
from django.utils import timezone
|
from django.utils import timezone
|
||||||
|
|
||||||
# Third Party imports
|
# Third party imports
|
||||||
from rest_framework import serializers
|
from rest_framework import serializers
|
||||||
|
|
||||||
# Module imports
|
# Module imports
|
||||||
from .base import BaseSerializer, DynamicBaseSerializer
|
|
||||||
from .user import UserLiteSerializer
|
|
||||||
from .state import StateSerializer, StateLiteSerializer
|
|
||||||
from .project import ProjectLiteSerializer
|
|
||||||
from .workspace import WorkspaceLiteSerializer
|
|
||||||
from plane.db.models import (
|
from plane.db.models import (
|
||||||
User,
|
User,
|
||||||
Issue,
|
Issue,
|
||||||
IssueActivity,
|
State,
|
||||||
IssueComment,
|
|
||||||
IssueProperty,
|
|
||||||
IssueAssignee,
|
IssueAssignee,
|
||||||
IssueSubscriber,
|
|
||||||
IssueLabel,
|
|
||||||
Label,
|
Label,
|
||||||
CycleIssue,
|
IssueLabel,
|
||||||
Cycle,
|
|
||||||
Module,
|
|
||||||
ModuleIssue,
|
|
||||||
IssueLink,
|
IssueLink,
|
||||||
|
IssueComment,
|
||||||
IssueAttachment,
|
IssueAttachment,
|
||||||
IssueReaction,
|
IssueActivity,
|
||||||
CommentReaction,
|
ProjectMember,
|
||||||
IssueVote,
|
|
||||||
IssueRelation,
|
|
||||||
)
|
)
|
||||||
|
from .base import BaseSerializer
|
||||||
|
from .cycle import CycleSerializer, CycleLiteSerializer
|
||||||
|
from .module import ModuleSerializer, ModuleLiteSerializer
|
||||||
|
from .user import UserLiteSerializer
|
||||||
|
from .state import StateLiteSerializer
|
||||||
|
|
||||||
|
class IssueSerializer(BaseSerializer):
|
||||||
class IssueFlatSerializer(BaseSerializer):
|
|
||||||
## Contain only flat fields
|
|
||||||
|
|
||||||
class Meta:
|
|
||||||
model = Issue
|
|
||||||
fields = [
|
|
||||||
"id",
|
|
||||||
"name",
|
|
||||||
"description",
|
|
||||||
"description_html",
|
|
||||||
"priority",
|
|
||||||
"start_date",
|
|
||||||
"target_date",
|
|
||||||
"sequence_id",
|
|
||||||
"sort_order",
|
|
||||||
"is_draft",
|
|
||||||
]
|
|
||||||
|
|
||||||
|
|
||||||
class IssueProjectLiteSerializer(BaseSerializer):
|
|
||||||
project_detail = ProjectLiteSerializer(source="project", read_only=True)
|
|
||||||
|
|
||||||
class Meta:
|
|
||||||
model = Issue
|
|
||||||
fields = [
|
|
||||||
"id",
|
|
||||||
"project_detail",
|
|
||||||
"name",
|
|
||||||
"sequence_id",
|
|
||||||
]
|
|
||||||
read_only_fields = fields
|
|
||||||
|
|
||||||
|
|
||||||
##TODO: Find a better way to write this serializer
|
|
||||||
## Find a better approach to save manytomany?
|
|
||||||
class IssueCreateSerializer(BaseSerializer):
|
|
||||||
state_detail = StateSerializer(read_only=True, source="state")
|
|
||||||
created_by_detail = UserLiteSerializer(read_only=True, source="created_by")
|
|
||||||
project_detail = ProjectLiteSerializer(read_only=True, source="project")
|
|
||||||
workspace_detail = WorkspaceLiteSerializer(read_only=True, source="workspace")
|
|
||||||
|
|
||||||
assignees = serializers.ListField(
|
assignees = serializers.ListField(
|
||||||
child=serializers.PrimaryKeyRelatedField(queryset=User.objects.all()),
|
child=serializers.PrimaryKeyRelatedField(
|
||||||
|
queryset=User.objects.values_list("id", flat=True)
|
||||||
|
),
|
||||||
write_only=True,
|
write_only=True,
|
||||||
required=False,
|
required=False,
|
||||||
)
|
)
|
||||||
|
|
||||||
labels = serializers.ListField(
|
labels = serializers.ListField(
|
||||||
child=serializers.PrimaryKeyRelatedField(queryset=Label.objects.all()),
|
child=serializers.PrimaryKeyRelatedField(
|
||||||
|
queryset=Label.objects.values_list("id", flat=True)
|
||||||
|
),
|
||||||
write_only=True,
|
write_only=True,
|
||||||
required=False,
|
required=False,
|
||||||
)
|
)
|
||||||
|
|
||||||
class Meta:
|
class Meta:
|
||||||
model = Issue
|
model = Issue
|
||||||
fields = "__all__"
|
|
||||||
read_only_fields = [
|
read_only_fields = [
|
||||||
|
"id",
|
||||||
"workspace",
|
"workspace",
|
||||||
"project",
|
"project",
|
||||||
"created_by",
|
"created_by",
|
||||||
@ -97,12 +55,10 @@ class IssueCreateSerializer(BaseSerializer):
|
|||||||
"created_at",
|
"created_at",
|
||||||
"updated_at",
|
"updated_at",
|
||||||
]
|
]
|
||||||
|
exclude = [
|
||||||
def to_representation(self, instance):
|
"description",
|
||||||
data = super().to_representation(instance)
|
"description_stripped",
|
||||||
data['assignees'] = [str(assignee.id) for assignee in instance.assignees.all()]
|
]
|
||||||
data['labels'] = [str(label.id) for label in instance.labels.all()]
|
|
||||||
return data
|
|
||||||
|
|
||||||
def validate(self, data):
|
def validate(self, data):
|
||||||
if (
|
if (
|
||||||
@ -111,6 +67,53 @@ class IssueCreateSerializer(BaseSerializer):
|
|||||||
and data.get("start_date", None) > data.get("target_date", None)
|
and data.get("start_date", None) > data.get("target_date", None)
|
||||||
):
|
):
|
||||||
raise serializers.ValidationError("Start date cannot exceed target date")
|
raise serializers.ValidationError("Start date cannot exceed target date")
|
||||||
|
|
||||||
|
try:
|
||||||
|
if(data.get("description_html", None) is not None):
|
||||||
|
parsed = html.fromstring(data["description_html"])
|
||||||
|
parsed_str = html.tostring(parsed, encoding='unicode')
|
||||||
|
data["description_html"] = parsed_str
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
raise serializers.ValidationError(f"Invalid HTML: {str(e)}")
|
||||||
|
|
||||||
|
# Validate assignees are from project
|
||||||
|
if data.get("assignees", []):
|
||||||
|
data["assignees"] = ProjectMember.objects.filter(
|
||||||
|
project_id=self.context.get("project_id"),
|
||||||
|
is_active=True,
|
||||||
|
member_id__in=data["assignees"],
|
||||||
|
).values_list("member_id", flat=True)
|
||||||
|
|
||||||
|
# Validate labels are from project
|
||||||
|
if data.get("labels", []):
|
||||||
|
data["labels"] = Label.objects.filter(
|
||||||
|
project_id=self.context.get("project_id"),
|
||||||
|
id__in=data["labels"],
|
||||||
|
).values_list("id", flat=True)
|
||||||
|
|
||||||
|
# Check state is from the project only else raise validation error
|
||||||
|
if (
|
||||||
|
data.get("state")
|
||||||
|
and not State.objects.filter(
|
||||||
|
project_id=self.context.get("project_id"), pk=data.get("state")
|
||||||
|
).exists()
|
||||||
|
):
|
||||||
|
raise serializers.ValidationError(
|
||||||
|
"State is not valid please pass a valid state_id"
|
||||||
|
)
|
||||||
|
|
||||||
|
# Check parent issue is from workspace as it can be cross workspace
|
||||||
|
if (
|
||||||
|
data.get("parent")
|
||||||
|
and not Issue.objects.filter(
|
||||||
|
workspace_id=self.context.get("workspace_id"), pk=data.get("parent")
|
||||||
|
).exists()
|
||||||
|
):
|
||||||
|
raise serializers.ValidationError(
|
||||||
|
"Parent is not valid issue_id please pass a valid issue_id"
|
||||||
|
)
|
||||||
|
|
||||||
return data
|
return data
|
||||||
|
|
||||||
def create(self, validated_data):
|
def create(self, validated_data):
|
||||||
@ -131,14 +134,14 @@ class IssueCreateSerializer(BaseSerializer):
|
|||||||
IssueAssignee.objects.bulk_create(
|
IssueAssignee.objects.bulk_create(
|
||||||
[
|
[
|
||||||
IssueAssignee(
|
IssueAssignee(
|
||||||
assignee=user,
|
assignee_id=assignee_id,
|
||||||
issue=issue,
|
issue=issue,
|
||||||
project_id=project_id,
|
project_id=project_id,
|
||||||
workspace_id=workspace_id,
|
workspace_id=workspace_id,
|
||||||
created_by_id=created_by_id,
|
created_by_id=created_by_id,
|
||||||
updated_by_id=updated_by_id,
|
updated_by_id=updated_by_id,
|
||||||
)
|
)
|
||||||
for user in assignees
|
for assignee_id in assignees
|
||||||
],
|
],
|
||||||
batch_size=10,
|
batch_size=10,
|
||||||
)
|
)
|
||||||
@ -158,14 +161,14 @@ class IssueCreateSerializer(BaseSerializer):
|
|||||||
IssueLabel.objects.bulk_create(
|
IssueLabel.objects.bulk_create(
|
||||||
[
|
[
|
||||||
IssueLabel(
|
IssueLabel(
|
||||||
label=label,
|
label_id=label_id,
|
||||||
issue=issue,
|
issue=issue,
|
||||||
project_id=project_id,
|
project_id=project_id,
|
||||||
workspace_id=workspace_id,
|
workspace_id=workspace_id,
|
||||||
created_by_id=created_by_id,
|
created_by_id=created_by_id,
|
||||||
updated_by_id=updated_by_id,
|
updated_by_id=updated_by_id,
|
||||||
)
|
)
|
||||||
for label in labels
|
for label_id in labels
|
||||||
],
|
],
|
||||||
batch_size=10,
|
batch_size=10,
|
||||||
)
|
)
|
||||||
@ -187,14 +190,14 @@ class IssueCreateSerializer(BaseSerializer):
|
|||||||
IssueAssignee.objects.bulk_create(
|
IssueAssignee.objects.bulk_create(
|
||||||
[
|
[
|
||||||
IssueAssignee(
|
IssueAssignee(
|
||||||
assignee=user,
|
assignee_id=assignee_id,
|
||||||
issue=instance,
|
issue=instance,
|
||||||
project_id=project_id,
|
project_id=project_id,
|
||||||
workspace_id=workspace_id,
|
workspace_id=workspace_id,
|
||||||
created_by_id=created_by_id,
|
created_by_id=created_by_id,
|
||||||
updated_by_id=updated_by_id,
|
updated_by_id=updated_by_id,
|
||||||
)
|
)
|
||||||
for user in assignees
|
for assignee_id in assignees
|
||||||
],
|
],
|
||||||
batch_size=10,
|
batch_size=10,
|
||||||
)
|
)
|
||||||
@ -204,14 +207,14 @@ class IssueCreateSerializer(BaseSerializer):
|
|||||||
IssueLabel.objects.bulk_create(
|
IssueLabel.objects.bulk_create(
|
||||||
[
|
[
|
||||||
IssueLabel(
|
IssueLabel(
|
||||||
label=label,
|
label_id=label_id,
|
||||||
issue=instance,
|
issue=instance,
|
||||||
project_id=project_id,
|
project_id=project_id,
|
||||||
workspace_id=workspace_id,
|
workspace_id=workspace_id,
|
||||||
created_by_id=created_by_id,
|
created_by_id=created_by_id,
|
||||||
updated_by_id=updated_by_id,
|
updated_by_id=updated_by_id,
|
||||||
)
|
)
|
||||||
for label in labels
|
for label_id in labels
|
||||||
],
|
],
|
||||||
batch_size=10,
|
batch_size=10,
|
||||||
)
|
)
|
||||||
@ -220,157 +223,34 @@ class IssueCreateSerializer(BaseSerializer):
|
|||||||
instance.updated_at = timezone.now()
|
instance.updated_at = timezone.now()
|
||||||
return super().update(instance, validated_data)
|
return super().update(instance, validated_data)
|
||||||
|
|
||||||
|
def to_representation(self, instance):
|
||||||
|
data = super().to_representation(instance)
|
||||||
|
if "assignees" in self.fields:
|
||||||
|
if "assignees" in self.expand:
|
||||||
|
from .user import UserLiteSerializer
|
||||||
|
|
||||||
class IssueActivitySerializer(BaseSerializer):
|
data["assignees"] = UserLiteSerializer(
|
||||||
actor_detail = UserLiteSerializer(read_only=True, source="actor")
|
instance.assignees.all(), many=True
|
||||||
issue_detail = IssueFlatSerializer(read_only=True, source="issue")
|
).data
|
||||||
project_detail = ProjectLiteSerializer(read_only=True, source="project")
|
else:
|
||||||
|
data["assignees"] = [
|
||||||
|
str(assignee.id) for assignee in instance.assignees.all()
|
||||||
|
]
|
||||||
|
if "labels" in self.fields:
|
||||||
|
if "labels" in self.expand:
|
||||||
|
data["labels"] = LabelSerializer(instance.labels.all(), many=True).data
|
||||||
|
else:
|
||||||
|
data["labels"] = [str(label.id) for label in instance.labels.all()]
|
||||||
|
|
||||||
class Meta:
|
return data
|
||||||
model = IssueActivity
|
|
||||||
fields = "__all__"
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
class IssuePropertySerializer(BaseSerializer):
|
|
||||||
class Meta:
|
|
||||||
model = IssueProperty
|
|
||||||
fields = "__all__"
|
|
||||||
read_only_fields = [
|
|
||||||
"user",
|
|
||||||
"workspace",
|
|
||||||
"project",
|
|
||||||
]
|
|
||||||
|
|
||||||
|
|
||||||
class LabelSerializer(BaseSerializer):
|
class LabelSerializer(BaseSerializer):
|
||||||
workspace_detail = WorkspaceLiteSerializer(source="workspace", read_only=True)
|
|
||||||
project_detail = ProjectLiteSerializer(source="project", read_only=True)
|
|
||||||
|
|
||||||
class Meta:
|
class Meta:
|
||||||
model = Label
|
model = Label
|
||||||
fields = "__all__"
|
fields = "__all__"
|
||||||
read_only_fields = [
|
read_only_fields = [
|
||||||
"workspace",
|
|
||||||
"project",
|
|
||||||
]
|
|
||||||
|
|
||||||
|
|
||||||
class LabelLiteSerializer(BaseSerializer):
|
|
||||||
class Meta:
|
|
||||||
model = Label
|
|
||||||
fields = [
|
|
||||||
"id",
|
"id",
|
||||||
"name",
|
|
||||||
"color",
|
|
||||||
]
|
|
||||||
|
|
||||||
|
|
||||||
class IssueLabelSerializer(BaseSerializer):
|
|
||||||
|
|
||||||
class Meta:
|
|
||||||
model = IssueLabel
|
|
||||||
fields = "__all__"
|
|
||||||
read_only_fields = [
|
|
||||||
"workspace",
|
|
||||||
"project",
|
|
||||||
]
|
|
||||||
|
|
||||||
|
|
||||||
class IssueRelationSerializer(BaseSerializer):
|
|
||||||
issue_detail = IssueProjectLiteSerializer(read_only=True, source="related_issue")
|
|
||||||
|
|
||||||
class Meta:
|
|
||||||
model = IssueRelation
|
|
||||||
fields = [
|
|
||||||
"issue_detail",
|
|
||||||
"relation_type",
|
|
||||||
"related_issue",
|
|
||||||
"issue",
|
|
||||||
"id"
|
|
||||||
]
|
|
||||||
read_only_fields = [
|
|
||||||
"workspace",
|
|
||||||
"project",
|
|
||||||
]
|
|
||||||
|
|
||||||
class RelatedIssueSerializer(BaseSerializer):
|
|
||||||
issue_detail = IssueProjectLiteSerializer(read_only=True, source="issue")
|
|
||||||
|
|
||||||
class Meta:
|
|
||||||
model = IssueRelation
|
|
||||||
fields = [
|
|
||||||
"issue_detail",
|
|
||||||
"relation_type",
|
|
||||||
"related_issue",
|
|
||||||
"issue",
|
|
||||||
"id"
|
|
||||||
]
|
|
||||||
read_only_fields = [
|
|
||||||
"workspace",
|
|
||||||
"project",
|
|
||||||
]
|
|
||||||
|
|
||||||
|
|
||||||
class IssueAssigneeSerializer(BaseSerializer):
|
|
||||||
assignee_details = UserLiteSerializer(read_only=True, source="assignee")
|
|
||||||
|
|
||||||
class Meta:
|
|
||||||
model = IssueAssignee
|
|
||||||
fields = "__all__"
|
|
||||||
|
|
||||||
|
|
||||||
class CycleBaseSerializer(BaseSerializer):
|
|
||||||
class Meta:
|
|
||||||
model = Cycle
|
|
||||||
fields = "__all__"
|
|
||||||
read_only_fields = [
|
|
||||||
"workspace",
|
|
||||||
"project",
|
|
||||||
"created_by",
|
|
||||||
"updated_by",
|
|
||||||
"created_at",
|
|
||||||
"updated_at",
|
|
||||||
]
|
|
||||||
|
|
||||||
|
|
||||||
class IssueCycleDetailSerializer(BaseSerializer):
|
|
||||||
cycle_detail = CycleBaseSerializer(read_only=True, source="cycle")
|
|
||||||
|
|
||||||
class Meta:
|
|
||||||
model = CycleIssue
|
|
||||||
fields = "__all__"
|
|
||||||
read_only_fields = [
|
|
||||||
"workspace",
|
|
||||||
"project",
|
|
||||||
"created_by",
|
|
||||||
"updated_by",
|
|
||||||
"created_at",
|
|
||||||
"updated_at",
|
|
||||||
]
|
|
||||||
|
|
||||||
|
|
||||||
class ModuleBaseSerializer(BaseSerializer):
|
|
||||||
class Meta:
|
|
||||||
model = Module
|
|
||||||
fields = "__all__"
|
|
||||||
read_only_fields = [
|
|
||||||
"workspace",
|
|
||||||
"project",
|
|
||||||
"created_by",
|
|
||||||
"updated_by",
|
|
||||||
"created_at",
|
|
||||||
"updated_at",
|
|
||||||
]
|
|
||||||
|
|
||||||
|
|
||||||
class IssueModuleDetailSerializer(BaseSerializer):
|
|
||||||
module_detail = ModuleBaseSerializer(read_only=True, source="module")
|
|
||||||
|
|
||||||
class Meta:
|
|
||||||
model = ModuleIssue
|
|
||||||
fields = "__all__"
|
|
||||||
read_only_fields = [
|
|
||||||
"workspace",
|
"workspace",
|
||||||
"project",
|
"project",
|
||||||
"created_by",
|
"created_by",
|
||||||
@ -381,19 +261,18 @@ class IssueModuleDetailSerializer(BaseSerializer):
|
|||||||
|
|
||||||
|
|
||||||
class IssueLinkSerializer(BaseSerializer):
|
class IssueLinkSerializer(BaseSerializer):
|
||||||
created_by_detail = UserLiteSerializer(read_only=True, source="created_by")
|
|
||||||
|
|
||||||
class Meta:
|
class Meta:
|
||||||
model = IssueLink
|
model = IssueLink
|
||||||
fields = "__all__"
|
fields = "__all__"
|
||||||
read_only_fields = [
|
read_only_fields = [
|
||||||
|
"id",
|
||||||
"workspace",
|
"workspace",
|
||||||
"project",
|
"project",
|
||||||
|
"issue",
|
||||||
"created_by",
|
"created_by",
|
||||||
"updated_by",
|
"updated_by",
|
||||||
"created_at",
|
"created_at",
|
||||||
"updated_at",
|
"updated_at",
|
||||||
"issue",
|
|
||||||
]
|
]
|
||||||
|
|
||||||
# Validation if url already exists
|
# Validation if url already exists
|
||||||
@ -412,73 +291,24 @@ class IssueAttachmentSerializer(BaseSerializer):
|
|||||||
model = IssueAttachment
|
model = IssueAttachment
|
||||||
fields = "__all__"
|
fields = "__all__"
|
||||||
read_only_fields = [
|
read_only_fields = [
|
||||||
|
"id",
|
||||||
|
"workspace",
|
||||||
|
"project",
|
||||||
|
"issue",
|
||||||
"created_by",
|
"created_by",
|
||||||
"updated_by",
|
"updated_by",
|
||||||
"created_at",
|
"created_at",
|
||||||
"updated_at",
|
"updated_at",
|
||||||
"workspace",
|
|
||||||
"project",
|
|
||||||
"issue",
|
|
||||||
]
|
]
|
||||||
|
|
||||||
|
|
||||||
class IssueReactionSerializer(BaseSerializer):
|
|
||||||
|
|
||||||
actor_detail = UserLiteSerializer(read_only=True, source="actor")
|
|
||||||
|
|
||||||
class Meta:
|
|
||||||
model = IssueReaction
|
|
||||||
fields = "__all__"
|
|
||||||
read_only_fields = [
|
|
||||||
"workspace",
|
|
||||||
"project",
|
|
||||||
"issue",
|
|
||||||
"actor",
|
|
||||||
]
|
|
||||||
|
|
||||||
|
|
||||||
class CommentReactionLiteSerializer(BaseSerializer):
|
|
||||||
actor_detail = UserLiteSerializer(read_only=True, source="actor")
|
|
||||||
|
|
||||||
class Meta:
|
|
||||||
model = CommentReaction
|
|
||||||
fields = [
|
|
||||||
"id",
|
|
||||||
"reaction",
|
|
||||||
"comment",
|
|
||||||
"actor_detail",
|
|
||||||
]
|
|
||||||
|
|
||||||
|
|
||||||
class CommentReactionSerializer(BaseSerializer):
|
|
||||||
class Meta:
|
|
||||||
model = CommentReaction
|
|
||||||
fields = "__all__"
|
|
||||||
read_only_fields = ["workspace", "project", "comment", "actor"]
|
|
||||||
|
|
||||||
|
|
||||||
class IssueVoteSerializer(BaseSerializer):
|
|
||||||
|
|
||||||
actor_detail = UserLiteSerializer(read_only=True, source="actor")
|
|
||||||
|
|
||||||
class Meta:
|
|
||||||
model = IssueVote
|
|
||||||
fields = ["issue", "vote", "workspace", "project", "actor", "actor_detail"]
|
|
||||||
read_only_fields = fields
|
|
||||||
|
|
||||||
|
|
||||||
class IssueCommentSerializer(BaseSerializer):
|
class IssueCommentSerializer(BaseSerializer):
|
||||||
actor_detail = UserLiteSerializer(read_only=True, source="actor")
|
|
||||||
issue_detail = IssueFlatSerializer(read_only=True, source="issue")
|
|
||||||
project_detail = ProjectLiteSerializer(read_only=True, source="project")
|
|
||||||
workspace_detail = WorkspaceLiteSerializer(read_only=True, source="workspace")
|
|
||||||
comment_reactions = CommentReactionLiteSerializer(read_only=True, many=True)
|
|
||||||
is_member = serializers.BooleanField(read_only=True)
|
is_member = serializers.BooleanField(read_only=True)
|
||||||
|
|
||||||
class Meta:
|
class Meta:
|
||||||
model = IssueComment
|
model = IssueComment
|
||||||
fields = "__all__"
|
|
||||||
read_only_fields = [
|
read_only_fields = [
|
||||||
|
"id",
|
||||||
"workspace",
|
"workspace",
|
||||||
"project",
|
"project",
|
||||||
"issue",
|
"issue",
|
||||||
@ -487,58 +317,73 @@ class IssueCommentSerializer(BaseSerializer):
|
|||||||
"created_at",
|
"created_at",
|
||||||
"updated_at",
|
"updated_at",
|
||||||
]
|
]
|
||||||
|
exclude = [
|
||||||
|
"comment_stripped",
|
||||||
|
"comment_json",
|
||||||
|
]
|
||||||
|
|
||||||
|
def validate(self, data):
|
||||||
|
try:
|
||||||
|
if(data.get("comment_html", None) is not None):
|
||||||
|
parsed = html.fromstring(data["comment_html"])
|
||||||
|
parsed_str = html.tostring(parsed, encoding='unicode')
|
||||||
|
data["comment_html"] = parsed_str
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
raise serializers.ValidationError(f"Invalid HTML: {str(e)}")
|
||||||
|
return data
|
||||||
|
|
||||||
|
|
||||||
class IssueStateFlatSerializer(BaseSerializer):
|
class IssueActivitySerializer(BaseSerializer):
|
||||||
state_detail = StateLiteSerializer(read_only=True, source="state")
|
|
||||||
project_detail = ProjectLiteSerializer(read_only=True, source="project")
|
|
||||||
|
|
||||||
class Meta:
|
class Meta:
|
||||||
model = Issue
|
model = IssueActivity
|
||||||
fields = [
|
exclude = [
|
||||||
"id",
|
"created_by",
|
||||||
"sequence_id",
|
"updated_by",
|
||||||
"name",
|
|
||||||
"state_detail",
|
|
||||||
"project_detail",
|
|
||||||
]
|
]
|
||||||
|
|
||||||
|
|
||||||
# Issue Serializer with state details
|
class CycleIssueSerializer(BaseSerializer):
|
||||||
class IssueStateSerializer(BaseSerializer):
|
cycle = CycleSerializer(read_only=True)
|
||||||
label_details = LabelLiteSerializer(read_only=True, source="labels", many=True)
|
|
||||||
state_detail = StateLiteSerializer(read_only=True, source="state")
|
|
||||||
project_detail = ProjectLiteSerializer(read_only=True, source="project")
|
|
||||||
assignee_details = UserLiteSerializer(read_only=True, source="assignees", many=True)
|
|
||||||
sub_issues_count = serializers.IntegerField(read_only=True)
|
|
||||||
bridge_id = serializers.UUIDField(read_only=True)
|
|
||||||
attachment_count = serializers.IntegerField(read_only=True)
|
|
||||||
link_count = serializers.IntegerField(read_only=True)
|
|
||||||
|
|
||||||
class Meta:
|
class Meta:
|
||||||
model = Issue
|
fields = [
|
||||||
fields = "__all__"
|
"cycle",
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
class IssueSerializer(BaseSerializer):
|
class ModuleIssueSerializer(BaseSerializer):
|
||||||
project_detail = ProjectLiteSerializer(read_only=True, source="project")
|
module = ModuleSerializer(read_only=True)
|
||||||
state_detail = StateSerializer(read_only=True, source="state")
|
|
||||||
parent_detail = IssueStateFlatSerializer(read_only=True, source="parent")
|
class Meta:
|
||||||
label_details = LabelSerializer(read_only=True, source="labels", many=True)
|
fields = [
|
||||||
assignee_details = UserLiteSerializer(read_only=True, source="assignees", many=True)
|
"module",
|
||||||
related_issues = IssueRelationSerializer(read_only=True, source="issue_relation", many=True)
|
]
|
||||||
issue_relations = RelatedIssueSerializer(read_only=True, source="issue_related", many=True)
|
|
||||||
issue_cycle = IssueCycleDetailSerializer(read_only=True)
|
|
||||||
issue_module = IssueModuleDetailSerializer(read_only=True)
|
class LabelLiteSerializer(BaseSerializer):
|
||||||
issue_link = IssueLinkSerializer(read_only=True, many=True)
|
|
||||||
issue_attachment = IssueAttachmentSerializer(read_only=True, many=True)
|
class Meta:
|
||||||
sub_issues_count = serializers.IntegerField(read_only=True)
|
model = Label
|
||||||
issue_reactions = IssueReactionSerializer(read_only=True, many=True)
|
fields = [
|
||||||
|
"id",
|
||||||
|
"name",
|
||||||
|
"color",
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
|
class IssueExpandSerializer(BaseSerializer):
|
||||||
|
cycle = CycleLiteSerializer(source="issue_cycle.cycle", read_only=True)
|
||||||
|
module = ModuleLiteSerializer(source="issue_module.module", read_only=True)
|
||||||
|
labels = LabelLiteSerializer(read_only=True, many=True)
|
||||||
|
assignees = UserLiteSerializer(read_only=True, many=True)
|
||||||
|
state = StateLiteSerializer(read_only=True)
|
||||||
|
|
||||||
class Meta:
|
class Meta:
|
||||||
model = Issue
|
model = Issue
|
||||||
fields = "__all__"
|
fields = "__all__"
|
||||||
read_only_fields = [
|
read_only_fields = [
|
||||||
|
"id",
|
||||||
"workspace",
|
"workspace",
|
||||||
"project",
|
"project",
|
||||||
"created_by",
|
"created_by",
|
||||||
@ -546,70 +391,3 @@ class IssueSerializer(BaseSerializer):
|
|||||||
"created_at",
|
"created_at",
|
||||||
"updated_at",
|
"updated_at",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
|
||||||
class IssueLiteSerializer(DynamicBaseSerializer):
|
|
||||||
workspace_detail = WorkspaceLiteSerializer(read_only=True, source="workspace")
|
|
||||||
project_detail = ProjectLiteSerializer(read_only=True, source="project")
|
|
||||||
state_detail = StateLiteSerializer(read_only=True, source="state")
|
|
||||||
label_details = LabelLiteSerializer(read_only=True, source="labels", many=True)
|
|
||||||
assignee_details = UserLiteSerializer(read_only=True, source="assignees", many=True)
|
|
||||||
sub_issues_count = serializers.IntegerField(read_only=True)
|
|
||||||
cycle_id = serializers.UUIDField(read_only=True)
|
|
||||||
module_id = serializers.UUIDField(read_only=True)
|
|
||||||
attachment_count = serializers.IntegerField(read_only=True)
|
|
||||||
link_count = serializers.IntegerField(read_only=True)
|
|
||||||
issue_reactions = IssueReactionSerializer(read_only=True, many=True)
|
|
||||||
|
|
||||||
class Meta:
|
|
||||||
model = Issue
|
|
||||||
fields = "__all__"
|
|
||||||
read_only_fields = [
|
|
||||||
"start_date",
|
|
||||||
"target_date",
|
|
||||||
"completed_at",
|
|
||||||
"workspace",
|
|
||||||
"project",
|
|
||||||
"created_by",
|
|
||||||
"updated_by",
|
|
||||||
"created_at",
|
|
||||||
"updated_at",
|
|
||||||
]
|
|
||||||
|
|
||||||
|
|
||||||
class IssuePublicSerializer(BaseSerializer):
|
|
||||||
project_detail = ProjectLiteSerializer(read_only=True, source="project")
|
|
||||||
state_detail = StateLiteSerializer(read_only=True, source="state")
|
|
||||||
reactions = IssueReactionSerializer(read_only=True, many=True, source="issue_reactions")
|
|
||||||
votes = IssueVoteSerializer(read_only=True, many=True)
|
|
||||||
|
|
||||||
class Meta:
|
|
||||||
model = Issue
|
|
||||||
fields = [
|
|
||||||
"id",
|
|
||||||
"name",
|
|
||||||
"description_html",
|
|
||||||
"sequence_id",
|
|
||||||
"state",
|
|
||||||
"state_detail",
|
|
||||||
"project",
|
|
||||||
"project_detail",
|
|
||||||
"workspace",
|
|
||||||
"priority",
|
|
||||||
"target_date",
|
|
||||||
"reactions",
|
|
||||||
"votes",
|
|
||||||
]
|
|
||||||
read_only_fields = fields
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
class IssueSubscriberSerializer(BaseSerializer):
|
|
||||||
class Meta:
|
|
||||||
model = IssueSubscriber
|
|
||||||
fields = "__all__"
|
|
||||||
read_only_fields = [
|
|
||||||
"workspace",
|
|
||||||
"project",
|
|
||||||
"issue",
|
|
||||||
]
|
|
||||||
|
@ -1,36 +1,38 @@
|
|||||||
# Third Party imports
|
# Third party imports
|
||||||
from rest_framework import serializers
|
from rest_framework import serializers
|
||||||
|
|
||||||
# Module imports
|
# Module imports
|
||||||
from .base import BaseSerializer
|
from .base import BaseSerializer
|
||||||
from .user import UserLiteSerializer
|
|
||||||
from .project import ProjectLiteSerializer
|
|
||||||
from .workspace import WorkspaceLiteSerializer
|
|
||||||
|
|
||||||
from plane.db.models import (
|
from plane.db.models import (
|
||||||
User,
|
User,
|
||||||
Module,
|
Module,
|
||||||
|
ModuleLink,
|
||||||
ModuleMember,
|
ModuleMember,
|
||||||
ModuleIssue,
|
ModuleIssue,
|
||||||
ModuleLink,
|
ProjectMember,
|
||||||
ModuleFavorite,
|
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
class ModuleWriteSerializer(BaseSerializer):
|
class ModuleSerializer(BaseSerializer):
|
||||||
members = serializers.ListField(
|
members = serializers.ListField(
|
||||||
child=serializers.PrimaryKeyRelatedField(queryset=User.objects.all()),
|
child=serializers.PrimaryKeyRelatedField(
|
||||||
|
queryset=User.objects.values_list("id", flat=True)
|
||||||
|
),
|
||||||
write_only=True,
|
write_only=True,
|
||||||
required=False,
|
required=False,
|
||||||
)
|
)
|
||||||
|
total_issues = serializers.IntegerField(read_only=True)
|
||||||
project_detail = ProjectLiteSerializer(source="project", read_only=True)
|
cancelled_issues = serializers.IntegerField(read_only=True)
|
||||||
workspace_detail = WorkspaceLiteSerializer(source="workspace", read_only=True)
|
completed_issues = serializers.IntegerField(read_only=True)
|
||||||
|
started_issues = serializers.IntegerField(read_only=True)
|
||||||
|
unstarted_issues = serializers.IntegerField(read_only=True)
|
||||||
|
backlog_issues = serializers.IntegerField(read_only=True)
|
||||||
|
|
||||||
class Meta:
|
class Meta:
|
||||||
model = Module
|
model = Module
|
||||||
fields = "__all__"
|
fields = "__all__"
|
||||||
read_only_fields = [
|
read_only_fields = [
|
||||||
|
"id",
|
||||||
"workspace",
|
"workspace",
|
||||||
"project",
|
"project",
|
||||||
"created_by",
|
"created_by",
|
||||||
@ -41,12 +43,23 @@ class ModuleWriteSerializer(BaseSerializer):
|
|||||||
|
|
||||||
def to_representation(self, instance):
|
def to_representation(self, instance):
|
||||||
data = super().to_representation(instance)
|
data = super().to_representation(instance)
|
||||||
data['members'] = [str(member.id) for member in instance.members.all()]
|
data["members"] = [str(member.id) for member in instance.members.all()]
|
||||||
return data
|
return data
|
||||||
|
|
||||||
def validate(self, data):
|
def validate(self, data):
|
||||||
if data.get("start_date", None) is not None and data.get("target_date", None) is not None and data.get("start_date", None) > data.get("target_date", None):
|
if (
|
||||||
|
data.get("start_date", None) is not None
|
||||||
|
and data.get("target_date", None) is not None
|
||||||
|
and data.get("start_date", None) > data.get("target_date", None)
|
||||||
|
):
|
||||||
raise serializers.ValidationError("Start date cannot exceed target date")
|
raise serializers.ValidationError("Start date cannot exceed target date")
|
||||||
|
|
||||||
|
if data.get("members", []):
|
||||||
|
data["members"] = ProjectMember.objects.filter(
|
||||||
|
project_id=self.context.get("project_id"),
|
||||||
|
member_id__in=data["members"],
|
||||||
|
).values_list("member_id", flat=True)
|
||||||
|
|
||||||
return data
|
return data
|
||||||
|
|
||||||
def create(self, validated_data):
|
def create(self, validated_data):
|
||||||
@ -99,23 +112,7 @@ class ModuleWriteSerializer(BaseSerializer):
|
|||||||
return super().update(instance, validated_data)
|
return super().update(instance, validated_data)
|
||||||
|
|
||||||
|
|
||||||
class ModuleFlatSerializer(BaseSerializer):
|
|
||||||
class Meta:
|
|
||||||
model = Module
|
|
||||||
fields = "__all__"
|
|
||||||
read_only_fields = [
|
|
||||||
"workspace",
|
|
||||||
"project",
|
|
||||||
"created_by",
|
|
||||||
"updated_by",
|
|
||||||
"created_at",
|
|
||||||
"updated_at",
|
|
||||||
]
|
|
||||||
|
|
||||||
|
|
||||||
class ModuleIssueSerializer(BaseSerializer):
|
class ModuleIssueSerializer(BaseSerializer):
|
||||||
module_detail = ModuleFlatSerializer(read_only=True, source="module")
|
|
||||||
issue_detail = ProjectLiteSerializer(read_only=True, source="issue")
|
|
||||||
sub_issues_count = serializers.IntegerField(read_only=True)
|
sub_issues_count = serializers.IntegerField(read_only=True)
|
||||||
|
|
||||||
class Meta:
|
class Meta:
|
||||||
@ -133,8 +130,6 @@ class ModuleIssueSerializer(BaseSerializer):
|
|||||||
|
|
||||||
|
|
||||||
class ModuleLinkSerializer(BaseSerializer):
|
class ModuleLinkSerializer(BaseSerializer):
|
||||||
created_by_detail = UserLiteSerializer(read_only=True, source="created_by")
|
|
||||||
|
|
||||||
class Meta:
|
class Meta:
|
||||||
model = ModuleLink
|
model = ModuleLink
|
||||||
fields = "__all__"
|
fields = "__all__"
|
||||||
@ -159,40 +154,8 @@ class ModuleLinkSerializer(BaseSerializer):
|
|||||||
return ModuleLink.objects.create(**validated_data)
|
return ModuleLink.objects.create(**validated_data)
|
||||||
|
|
||||||
|
|
||||||
class ModuleSerializer(BaseSerializer):
|
class ModuleLiteSerializer(BaseSerializer):
|
||||||
project_detail = ProjectLiteSerializer(read_only=True, source="project")
|
|
||||||
lead_detail = UserLiteSerializer(read_only=True, source="lead")
|
|
||||||
members_detail = UserLiteSerializer(read_only=True, many=True, source="members")
|
|
||||||
link_module = ModuleLinkSerializer(read_only=True, many=True)
|
|
||||||
is_favorite = serializers.BooleanField(read_only=True)
|
|
||||||
total_issues = serializers.IntegerField(read_only=True)
|
|
||||||
cancelled_issues = serializers.IntegerField(read_only=True)
|
|
||||||
completed_issues = serializers.IntegerField(read_only=True)
|
|
||||||
started_issues = serializers.IntegerField(read_only=True)
|
|
||||||
unstarted_issues = serializers.IntegerField(read_only=True)
|
|
||||||
backlog_issues = serializers.IntegerField(read_only=True)
|
|
||||||
|
|
||||||
class Meta:
|
class Meta:
|
||||||
model = Module
|
model = Module
|
||||||
fields = "__all__"
|
fields = "__all__"
|
||||||
read_only_fields = [
|
|
||||||
"workspace",
|
|
||||||
"project",
|
|
||||||
"created_by",
|
|
||||||
"updated_by",
|
|
||||||
"created_at",
|
|
||||||
"updated_at",
|
|
||||||
]
|
|
||||||
|
|
||||||
|
|
||||||
class ModuleFavoriteSerializer(BaseSerializer):
|
|
||||||
module_detail = ModuleFlatSerializer(source="module", read_only=True)
|
|
||||||
|
|
||||||
class Meta:
|
|
||||||
model = ModuleFavorite
|
|
||||||
fields = "__all__"
|
|
||||||
read_only_fields = [
|
|
||||||
"workspace",
|
|
||||||
"project",
|
|
||||||
"user",
|
|
||||||
]
|
|
||||||
|
@ -2,30 +2,60 @@
|
|||||||
from rest_framework import serializers
|
from rest_framework import serializers
|
||||||
|
|
||||||
# Module imports
|
# Module imports
|
||||||
from .base import BaseSerializer, DynamicBaseSerializer
|
from plane.db.models import Project, ProjectIdentifier, WorkspaceMember, State, Estimate
|
||||||
from plane.api.serializers.workspace import WorkSpaceSerializer, WorkspaceLiteSerializer
|
from .base import BaseSerializer
|
||||||
from plane.api.serializers.user import UserLiteSerializer, UserAdminLiteSerializer
|
|
||||||
from plane.db.models import (
|
|
||||||
Project,
|
|
||||||
ProjectMember,
|
|
||||||
ProjectMemberInvite,
|
|
||||||
ProjectIdentifier,
|
|
||||||
ProjectFavorite,
|
|
||||||
ProjectDeployBoard,
|
|
||||||
ProjectPublicMember,
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
class ProjectSerializer(BaseSerializer):
|
class ProjectSerializer(BaseSerializer):
|
||||||
workspace_detail = WorkspaceLiteSerializer(source="workspace", read_only=True)
|
|
||||||
|
total_members = serializers.IntegerField(read_only=True)
|
||||||
|
total_cycles = serializers.IntegerField(read_only=True)
|
||||||
|
total_modules = serializers.IntegerField(read_only=True)
|
||||||
|
is_member = serializers.BooleanField(read_only=True)
|
||||||
|
sort_order = serializers.FloatField(read_only=True)
|
||||||
|
member_role = serializers.IntegerField(read_only=True)
|
||||||
|
is_deployed = serializers.BooleanField(read_only=True)
|
||||||
|
|
||||||
class Meta:
|
class Meta:
|
||||||
model = Project
|
model = Project
|
||||||
fields = "__all__"
|
fields = "__all__"
|
||||||
read_only_fields = [
|
read_only_fields = [
|
||||||
|
"id",
|
||||||
|
'emoji',
|
||||||
"workspace",
|
"workspace",
|
||||||
|
"created_at",
|
||||||
|
"updated_at",
|
||||||
|
"created_by",
|
||||||
|
"updated_by",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
def validate(self, data):
|
||||||
|
# Check project lead should be a member of the workspace
|
||||||
|
if (
|
||||||
|
data.get("project_lead", None) is not None
|
||||||
|
and not WorkspaceMember.objects.filter(
|
||||||
|
workspace_id=self.context["workspace_id"],
|
||||||
|
member_id=data.get("project_lead"),
|
||||||
|
).exists()
|
||||||
|
):
|
||||||
|
raise serializers.ValidationError(
|
||||||
|
"Project lead should be a user in the workspace"
|
||||||
|
)
|
||||||
|
|
||||||
|
# Check default assignee should be a member of the workspace
|
||||||
|
if (
|
||||||
|
data.get("default_assignee", None) is not None
|
||||||
|
and not WorkspaceMember.objects.filter(
|
||||||
|
workspace_id=self.context["workspace_id"],
|
||||||
|
member_id=data.get("default_assignee"),
|
||||||
|
).exists()
|
||||||
|
):
|
||||||
|
raise serializers.ValidationError(
|
||||||
|
"Default assignee should be a user in the workspace"
|
||||||
|
)
|
||||||
|
|
||||||
|
return data
|
||||||
|
|
||||||
def create(self, validated_data):
|
def create(self, validated_data):
|
||||||
identifier = validated_data.get("identifier", "").strip().upper()
|
identifier = validated_data.get("identifier", "").strip().upper()
|
||||||
if identifier == "":
|
if identifier == "":
|
||||||
@ -35,6 +65,7 @@ class ProjectSerializer(BaseSerializer):
|
|||||||
name=identifier, workspace_id=self.context["workspace_id"]
|
name=identifier, workspace_id=self.context["workspace_id"]
|
||||||
).exists():
|
).exists():
|
||||||
raise serializers.ValidationError(detail="Project Identifier is taken")
|
raise serializers.ValidationError(detail="Project Identifier is taken")
|
||||||
|
|
||||||
project = Project.objects.create(
|
project = Project.objects.create(
|
||||||
**validated_data, workspace_id=self.context["workspace_id"]
|
**validated_data, workspace_id=self.context["workspace_id"]
|
||||||
)
|
)
|
||||||
@ -45,36 +76,6 @@ class ProjectSerializer(BaseSerializer):
|
|||||||
)
|
)
|
||||||
return project
|
return project
|
||||||
|
|
||||||
def update(self, instance, validated_data):
|
|
||||||
identifier = validated_data.get("identifier", "").strip().upper()
|
|
||||||
|
|
||||||
# If identifier is not passed update the project and return
|
|
||||||
if identifier == "":
|
|
||||||
project = super().update(instance, validated_data)
|
|
||||||
return project
|
|
||||||
|
|
||||||
# If no Project Identifier is found create it
|
|
||||||
project_identifier = ProjectIdentifier.objects.filter(
|
|
||||||
name=identifier, workspace_id=instance.workspace_id
|
|
||||||
).first()
|
|
||||||
if project_identifier is None:
|
|
||||||
project = super().update(instance, validated_data)
|
|
||||||
project_identifier = ProjectIdentifier.objects.filter(
|
|
||||||
project=project
|
|
||||||
).first()
|
|
||||||
if project_identifier is not None:
|
|
||||||
project_identifier.name = identifier
|
|
||||||
project_identifier.save()
|
|
||||||
return project
|
|
||||||
# If found check if the project_id to be updated and identifier project id is same
|
|
||||||
if project_identifier.project_id == instance.id:
|
|
||||||
# If same pass update
|
|
||||||
project = super().update(instance, validated_data)
|
|
||||||
return project
|
|
||||||
|
|
||||||
# If not same fail update
|
|
||||||
raise serializers.ValidationError(detail="Project Identifier is already taken")
|
|
||||||
|
|
||||||
|
|
||||||
class ProjectLiteSerializer(BaseSerializer):
|
class ProjectLiteSerializer(BaseSerializer):
|
||||||
class Meta:
|
class Meta:
|
||||||
@ -89,126 +90,3 @@ class ProjectLiteSerializer(BaseSerializer):
|
|||||||
"description",
|
"description",
|
||||||
]
|
]
|
||||||
read_only_fields = fields
|
read_only_fields = fields
|
||||||
|
|
||||||
|
|
||||||
class ProjectListSerializer(DynamicBaseSerializer):
|
|
||||||
is_favorite = serializers.BooleanField(read_only=True)
|
|
||||||
total_members = serializers.IntegerField(read_only=True)
|
|
||||||
total_cycles = serializers.IntegerField(read_only=True)
|
|
||||||
total_modules = serializers.IntegerField(read_only=True)
|
|
||||||
is_member = serializers.BooleanField(read_only=True)
|
|
||||||
sort_order = serializers.FloatField(read_only=True)
|
|
||||||
member_role = serializers.IntegerField(read_only=True)
|
|
||||||
is_deployed = serializers.BooleanField(read_only=True)
|
|
||||||
members = serializers.SerializerMethodField()
|
|
||||||
|
|
||||||
def get_members(self, obj):
|
|
||||||
project_members = ProjectMember.objects.filter(project_id=obj.id).values(
|
|
||||||
"id",
|
|
||||||
"member_id",
|
|
||||||
"member__display_name",
|
|
||||||
"member__avatar",
|
|
||||||
)
|
|
||||||
return project_members
|
|
||||||
|
|
||||||
class Meta:
|
|
||||||
model = Project
|
|
||||||
fields = "__all__"
|
|
||||||
|
|
||||||
|
|
||||||
class ProjectDetailSerializer(BaseSerializer):
|
|
||||||
# workspace = WorkSpaceSerializer(read_only=True)
|
|
||||||
default_assignee = UserLiteSerializer(read_only=True)
|
|
||||||
project_lead = UserLiteSerializer(read_only=True)
|
|
||||||
is_favorite = serializers.BooleanField(read_only=True)
|
|
||||||
total_members = serializers.IntegerField(read_only=True)
|
|
||||||
total_cycles = serializers.IntegerField(read_only=True)
|
|
||||||
total_modules = serializers.IntegerField(read_only=True)
|
|
||||||
is_member = serializers.BooleanField(read_only=True)
|
|
||||||
sort_order = serializers.FloatField(read_only=True)
|
|
||||||
member_role = serializers.IntegerField(read_only=True)
|
|
||||||
is_deployed = serializers.BooleanField(read_only=True)
|
|
||||||
|
|
||||||
class Meta:
|
|
||||||
model = Project
|
|
||||||
fields = "__all__"
|
|
||||||
|
|
||||||
|
|
||||||
class ProjectMemberSerializer(BaseSerializer):
|
|
||||||
workspace = WorkspaceLiteSerializer(read_only=True)
|
|
||||||
project = ProjectLiteSerializer(read_only=True)
|
|
||||||
member = UserLiteSerializer(read_only=True)
|
|
||||||
|
|
||||||
class Meta:
|
|
||||||
model = ProjectMember
|
|
||||||
fields = "__all__"
|
|
||||||
|
|
||||||
|
|
||||||
class ProjectMemberAdminSerializer(BaseSerializer):
|
|
||||||
workspace = WorkspaceLiteSerializer(read_only=True)
|
|
||||||
project = ProjectLiteSerializer(read_only=True)
|
|
||||||
member = UserAdminLiteSerializer(read_only=True)
|
|
||||||
|
|
||||||
class Meta:
|
|
||||||
model = ProjectMember
|
|
||||||
fields = "__all__"
|
|
||||||
|
|
||||||
|
|
||||||
class ProjectMemberInviteSerializer(BaseSerializer):
|
|
||||||
project = ProjectLiteSerializer(read_only=True)
|
|
||||||
workspace = WorkspaceLiteSerializer(read_only=True)
|
|
||||||
|
|
||||||
class Meta:
|
|
||||||
model = ProjectMemberInvite
|
|
||||||
fields = "__all__"
|
|
||||||
|
|
||||||
|
|
||||||
class ProjectIdentifierSerializer(BaseSerializer):
|
|
||||||
class Meta:
|
|
||||||
model = ProjectIdentifier
|
|
||||||
fields = "__all__"
|
|
||||||
|
|
||||||
|
|
||||||
class ProjectFavoriteSerializer(BaseSerializer):
|
|
||||||
class Meta:
|
|
||||||
model = ProjectFavorite
|
|
||||||
fields = "__all__"
|
|
||||||
read_only_fields = [
|
|
||||||
"workspace",
|
|
||||||
"user",
|
|
||||||
]
|
|
||||||
|
|
||||||
|
|
||||||
class ProjectMemberLiteSerializer(BaseSerializer):
|
|
||||||
member = UserLiteSerializer(read_only=True)
|
|
||||||
is_subscribed = serializers.BooleanField(read_only=True)
|
|
||||||
|
|
||||||
class Meta:
|
|
||||||
model = ProjectMember
|
|
||||||
fields = ["member", "id", "is_subscribed"]
|
|
||||||
read_only_fields = fields
|
|
||||||
|
|
||||||
|
|
||||||
class ProjectDeployBoardSerializer(BaseSerializer):
|
|
||||||
project_details = ProjectLiteSerializer(read_only=True, source="project")
|
|
||||||
workspace_detail = WorkspaceLiteSerializer(read_only=True, source="workspace")
|
|
||||||
|
|
||||||
class Meta:
|
|
||||||
model = ProjectDeployBoard
|
|
||||||
fields = "__all__"
|
|
||||||
read_only_fields = [
|
|
||||||
"workspace",
|
|
||||||
"project",
|
|
||||||
"anchor",
|
|
||||||
]
|
|
||||||
|
|
||||||
|
|
||||||
class ProjectPublicMemberSerializer(BaseSerializer):
|
|
||||||
class Meta:
|
|
||||||
model = ProjectPublicMember
|
|
||||||
fields = "__all__"
|
|
||||||
read_only_fields = [
|
|
||||||
"workspace",
|
|
||||||
"project",
|
|
||||||
"member",
|
|
||||||
]
|
|
||||||
|
@ -1,17 +1,26 @@
|
|||||||
# Module imports
|
# Module imports
|
||||||
from .base import BaseSerializer
|
from .base import BaseSerializer
|
||||||
from .workspace import WorkspaceLiteSerializer
|
|
||||||
from .project import ProjectLiteSerializer
|
|
||||||
|
|
||||||
from plane.db.models import State
|
from plane.db.models import State
|
||||||
|
|
||||||
|
|
||||||
class StateSerializer(BaseSerializer):
|
class StateSerializer(BaseSerializer):
|
||||||
|
def validate(self, data):
|
||||||
|
# If the default is being provided then make all other states default False
|
||||||
|
if data.get("default", False):
|
||||||
|
State.objects.filter(project_id=self.context.get("project_id")).update(
|
||||||
|
default=False
|
||||||
|
)
|
||||||
|
return data
|
||||||
|
|
||||||
class Meta:
|
class Meta:
|
||||||
model = State
|
model = State
|
||||||
fields = "__all__"
|
fields = "__all__"
|
||||||
read_only_fields = [
|
read_only_fields = [
|
||||||
|
"id",
|
||||||
|
"created_by",
|
||||||
|
"updated_by",
|
||||||
|
"created_at",
|
||||||
|
"updated_at",
|
||||||
"workspace",
|
"workspace",
|
||||||
"project",
|
"project",
|
||||||
]
|
]
|
||||||
|
@ -1,111 +1,6 @@
|
|||||||
# Third party imports
|
# Module imports
|
||||||
from rest_framework import serializers
|
from plane.db.models import User
|
||||||
|
|
||||||
# Module import
|
|
||||||
from .base import BaseSerializer
|
from .base import BaseSerializer
|
||||||
from plane.db.models import User, Workspace, WorkspaceMemberInvite
|
|
||||||
|
|
||||||
|
|
||||||
class UserSerializer(BaseSerializer):
|
|
||||||
class Meta:
|
|
||||||
model = User
|
|
||||||
fields = "__all__"
|
|
||||||
read_only_fields = [
|
|
||||||
"id",
|
|
||||||
"created_at",
|
|
||||||
"updated_at",
|
|
||||||
"is_superuser",
|
|
||||||
"is_staff",
|
|
||||||
"last_active",
|
|
||||||
"last_login_time",
|
|
||||||
"last_logout_time",
|
|
||||||
"last_login_ip",
|
|
||||||
"last_logout_ip",
|
|
||||||
"last_login_uagent",
|
|
||||||
"token_updated_at",
|
|
||||||
"is_onboarded",
|
|
||||||
"is_bot",
|
|
||||||
]
|
|
||||||
extra_kwargs = {"password": {"write_only": True}}
|
|
||||||
|
|
||||||
# If the user has already filled first name or last name then he is onboarded
|
|
||||||
def get_is_onboarded(self, obj):
|
|
||||||
return bool(obj.first_name) or bool(obj.last_name)
|
|
||||||
|
|
||||||
|
|
||||||
class UserMeSerializer(BaseSerializer):
|
|
||||||
class Meta:
|
|
||||||
model = User
|
|
||||||
fields = [
|
|
||||||
"id",
|
|
||||||
"avatar",
|
|
||||||
"cover_image",
|
|
||||||
"date_joined",
|
|
||||||
"display_name",
|
|
||||||
"email",
|
|
||||||
"first_name",
|
|
||||||
"last_name",
|
|
||||||
"is_active",
|
|
||||||
"is_bot",
|
|
||||||
"is_email_verified",
|
|
||||||
"is_managed",
|
|
||||||
"is_onboarded",
|
|
||||||
"is_tour_completed",
|
|
||||||
"mobile_number",
|
|
||||||
"role",
|
|
||||||
"onboarding_step",
|
|
||||||
"user_timezone",
|
|
||||||
"username",
|
|
||||||
"theme",
|
|
||||||
"last_workspace_id",
|
|
||||||
]
|
|
||||||
read_only_fields = fields
|
|
||||||
|
|
||||||
|
|
||||||
class UserMeSettingsSerializer(BaseSerializer):
|
|
||||||
workspace = serializers.SerializerMethodField()
|
|
||||||
|
|
||||||
class Meta:
|
|
||||||
model = User
|
|
||||||
fields = [
|
|
||||||
"id",
|
|
||||||
"email",
|
|
||||||
"workspace",
|
|
||||||
]
|
|
||||||
read_only_fields = fields
|
|
||||||
|
|
||||||
def get_workspace(self, obj):
|
|
||||||
workspace_invites = WorkspaceMemberInvite.objects.filter(
|
|
||||||
email=obj.email
|
|
||||||
).count()
|
|
||||||
if obj.last_workspace_id is not None:
|
|
||||||
workspace = Workspace.objects.filter(
|
|
||||||
pk=obj.last_workspace_id, workspace_member__member=obj.id
|
|
||||||
).first()
|
|
||||||
return {
|
|
||||||
"last_workspace_id": obj.last_workspace_id,
|
|
||||||
"last_workspace_slug": workspace.slug if workspace is not None else "",
|
|
||||||
"fallback_workspace_id": obj.last_workspace_id,
|
|
||||||
"fallback_workspace_slug": workspace.slug if workspace is not None else "",
|
|
||||||
"invites": workspace_invites,
|
|
||||||
}
|
|
||||||
else:
|
|
||||||
fallback_workspace = (
|
|
||||||
Workspace.objects.filter(workspace_member__member_id=obj.id)
|
|
||||||
.order_by("created_at")
|
|
||||||
.first()
|
|
||||||
)
|
|
||||||
return {
|
|
||||||
"last_workspace_id": None,
|
|
||||||
"last_workspace_slug": None,
|
|
||||||
"fallback_workspace_id": fallback_workspace.id
|
|
||||||
if fallback_workspace is not None
|
|
||||||
else None,
|
|
||||||
"fallback_workspace_slug": fallback_workspace.slug
|
|
||||||
if fallback_workspace is not None
|
|
||||||
else None,
|
|
||||||
"invites": workspace_invites,
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
class UserLiteSerializer(BaseSerializer):
|
class UserLiteSerializer(BaseSerializer):
|
||||||
@ -116,48 +11,6 @@ class UserLiteSerializer(BaseSerializer):
|
|||||||
"first_name",
|
"first_name",
|
||||||
"last_name",
|
"last_name",
|
||||||
"avatar",
|
"avatar",
|
||||||
"is_bot",
|
|
||||||
"display_name",
|
"display_name",
|
||||||
]
|
]
|
||||||
read_only_fields = [
|
read_only_fields = fields
|
||||||
"id",
|
|
||||||
"is_bot",
|
|
||||||
]
|
|
||||||
|
|
||||||
|
|
||||||
class UserAdminLiteSerializer(BaseSerializer):
|
|
||||||
class Meta:
|
|
||||||
model = User
|
|
||||||
fields = [
|
|
||||||
"id",
|
|
||||||
"first_name",
|
|
||||||
"last_name",
|
|
||||||
"avatar",
|
|
||||||
"is_bot",
|
|
||||||
"display_name",
|
|
||||||
"email",
|
|
||||||
]
|
|
||||||
read_only_fields = [
|
|
||||||
"id",
|
|
||||||
"is_bot",
|
|
||||||
]
|
|
||||||
|
|
||||||
|
|
||||||
class ChangePasswordSerializer(serializers.Serializer):
|
|
||||||
model = User
|
|
||||||
|
|
||||||
"""
|
|
||||||
Serializer for password change endpoint.
|
|
||||||
"""
|
|
||||||
old_password = serializers.CharField(required=True)
|
|
||||||
new_password = serializers.CharField(required=True)
|
|
||||||
|
|
||||||
|
|
||||||
class ResetPasswordSerializer(serializers.Serializer):
|
|
||||||
model = User
|
|
||||||
|
|
||||||
"""
|
|
||||||
Serializer for password change endpoint.
|
|
||||||
"""
|
|
||||||
new_password = serializers.CharField(required=True)
|
|
||||||
confirm_password = serializers.CharField(required=True)
|
|
@ -1,39 +1,10 @@
|
|||||||
# Third party imports
|
|
||||||
from rest_framework import serializers
|
|
||||||
|
|
||||||
# Module imports
|
# Module imports
|
||||||
|
from plane.db.models import Workspace
|
||||||
from .base import BaseSerializer
|
from .base import BaseSerializer
|
||||||
from .user import UserLiteSerializer, UserAdminLiteSerializer
|
|
||||||
|
|
||||||
from plane.db.models import (
|
|
||||||
User,
|
|
||||||
Workspace,
|
|
||||||
WorkspaceMember,
|
|
||||||
Team,
|
|
||||||
TeamMember,
|
|
||||||
WorkspaceMemberInvite,
|
|
||||||
WorkspaceTheme,
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
class WorkSpaceSerializer(BaseSerializer):
|
|
||||||
owner = UserLiteSerializer(read_only=True)
|
|
||||||
total_members = serializers.IntegerField(read_only=True)
|
|
||||||
total_issues = serializers.IntegerField(read_only=True)
|
|
||||||
|
|
||||||
class Meta:
|
|
||||||
model = Workspace
|
|
||||||
fields = "__all__"
|
|
||||||
read_only_fields = [
|
|
||||||
"id",
|
|
||||||
"created_by",
|
|
||||||
"updated_by",
|
|
||||||
"created_at",
|
|
||||||
"updated_at",
|
|
||||||
"owner",
|
|
||||||
]
|
|
||||||
|
|
||||||
class WorkspaceLiteSerializer(BaseSerializer):
|
class WorkspaceLiteSerializer(BaseSerializer):
|
||||||
|
"""Lite serializer with only required fields"""
|
||||||
class Meta:
|
class Meta:
|
||||||
model = Workspace
|
model = Workspace
|
||||||
fields = [
|
fields = [
|
||||||
@ -42,95 +13,3 @@ class WorkspaceLiteSerializer(BaseSerializer):
|
|||||||
"id",
|
"id",
|
||||||
]
|
]
|
||||||
read_only_fields = fields
|
read_only_fields = fields
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
class WorkSpaceMemberSerializer(BaseSerializer):
|
|
||||||
member = UserLiteSerializer(read_only=True)
|
|
||||||
workspace = WorkspaceLiteSerializer(read_only=True)
|
|
||||||
|
|
||||||
class Meta:
|
|
||||||
model = WorkspaceMember
|
|
||||||
fields = "__all__"
|
|
||||||
|
|
||||||
|
|
||||||
class WorkspaceMemberMeSerializer(BaseSerializer):
|
|
||||||
|
|
||||||
class Meta:
|
|
||||||
model = WorkspaceMember
|
|
||||||
fields = "__all__"
|
|
||||||
|
|
||||||
|
|
||||||
class WorkspaceMemberAdminSerializer(BaseSerializer):
|
|
||||||
member = UserAdminLiteSerializer(read_only=True)
|
|
||||||
workspace = WorkspaceLiteSerializer(read_only=True)
|
|
||||||
|
|
||||||
class Meta:
|
|
||||||
model = WorkspaceMember
|
|
||||||
fields = "__all__"
|
|
||||||
|
|
||||||
|
|
||||||
class WorkSpaceMemberInviteSerializer(BaseSerializer):
|
|
||||||
workspace = WorkSpaceSerializer(read_only=True)
|
|
||||||
total_members = serializers.IntegerField(read_only=True)
|
|
||||||
created_by_detail = UserLiteSerializer(read_only=True, source="created_by")
|
|
||||||
|
|
||||||
class Meta:
|
|
||||||
model = WorkspaceMemberInvite
|
|
||||||
fields = "__all__"
|
|
||||||
|
|
||||||
|
|
||||||
class TeamSerializer(BaseSerializer):
|
|
||||||
members_detail = UserLiteSerializer(read_only=True, source="members", many=True)
|
|
||||||
members = serializers.ListField(
|
|
||||||
child=serializers.PrimaryKeyRelatedField(queryset=User.objects.all()),
|
|
||||||
write_only=True,
|
|
||||||
required=False,
|
|
||||||
)
|
|
||||||
|
|
||||||
class Meta:
|
|
||||||
model = Team
|
|
||||||
fields = "__all__"
|
|
||||||
read_only_fields = [
|
|
||||||
"workspace",
|
|
||||||
"created_by",
|
|
||||||
"updated_by",
|
|
||||||
"created_at",
|
|
||||||
"updated_at",
|
|
||||||
]
|
|
||||||
|
|
||||||
def create(self, validated_data, **kwargs):
|
|
||||||
if "members" in validated_data:
|
|
||||||
members = validated_data.pop("members")
|
|
||||||
workspace = self.context["workspace"]
|
|
||||||
team = Team.objects.create(**validated_data, workspace=workspace)
|
|
||||||
team_members = [
|
|
||||||
TeamMember(member=member, team=team, workspace=workspace)
|
|
||||||
for member in members
|
|
||||||
]
|
|
||||||
TeamMember.objects.bulk_create(team_members, batch_size=10)
|
|
||||||
return team
|
|
||||||
team = Team.objects.create(**validated_data)
|
|
||||||
return team
|
|
||||||
|
|
||||||
def update(self, instance, validated_data):
|
|
||||||
if "members" in validated_data:
|
|
||||||
members = validated_data.pop("members")
|
|
||||||
TeamMember.objects.filter(team=instance).delete()
|
|
||||||
team_members = [
|
|
||||||
TeamMember(member=member, team=instance, workspace=instance.workspace)
|
|
||||||
for member in members
|
|
||||||
]
|
|
||||||
TeamMember.objects.bulk_create(team_members, batch_size=10)
|
|
||||||
return super().update(instance, validated_data)
|
|
||||||
return super().update(instance, validated_data)
|
|
||||||
|
|
||||||
|
|
||||||
class WorkspaceThemeSerializer(BaseSerializer):
|
|
||||||
class Meta:
|
|
||||||
model = WorkspaceTheme
|
|
||||||
fields = "__all__"
|
|
||||||
read_only_fields = [
|
|
||||||
"workspace",
|
|
||||||
"actor",
|
|
||||||
]
|
|
||||||
|
@ -1,46 +1,15 @@
|
|||||||
from .analytic import urlpatterns as analytic_urls
|
from .project import urlpatterns as project_patterns
|
||||||
from .asset import urlpatterns as asset_urls
|
from .state import urlpatterns as state_patterns
|
||||||
from .authentication import urlpatterns as authentication_urls
|
from .issue import urlpatterns as issue_patterns
|
||||||
from .config import urlpatterns as configuration_urls
|
from .cycle import urlpatterns as cycle_patterns
|
||||||
from .cycle import urlpatterns as cycle_urls
|
from .module import urlpatterns as module_patterns
|
||||||
from .estimate import urlpatterns as estimate_urls
|
from .inbox import urlpatterns as inbox_patterns
|
||||||
from .external import urlpatterns as external_urls
|
|
||||||
from .importer import urlpatterns as importer_urls
|
|
||||||
from .inbox import urlpatterns as inbox_urls
|
|
||||||
from .integration import urlpatterns as integration_urls
|
|
||||||
from .issue import urlpatterns as issue_urls
|
|
||||||
from .module import urlpatterns as module_urls
|
|
||||||
from .notification import urlpatterns as notification_urls
|
|
||||||
from .page import urlpatterns as page_urls
|
|
||||||
from .project import urlpatterns as project_urls
|
|
||||||
from .public_board import urlpatterns as public_board_urls
|
|
||||||
from .search import urlpatterns as search_urls
|
|
||||||
from .state import urlpatterns as state_urls
|
|
||||||
from .user import urlpatterns as user_urls
|
|
||||||
from .views import urlpatterns as view_urls
|
|
||||||
from .workspace import urlpatterns as workspace_urls
|
|
||||||
|
|
||||||
|
|
||||||
urlpatterns = [
|
urlpatterns = [
|
||||||
*analytic_urls,
|
*project_patterns,
|
||||||
*asset_urls,
|
*state_patterns,
|
||||||
*authentication_urls,
|
*issue_patterns,
|
||||||
*configuration_urls,
|
*cycle_patterns,
|
||||||
*cycle_urls,
|
*module_patterns,
|
||||||
*estimate_urls,
|
*inbox_patterns,
|
||||||
*external_urls,
|
|
||||||
*importer_urls,
|
|
||||||
*inbox_urls,
|
|
||||||
*integration_urls,
|
|
||||||
*issue_urls,
|
|
||||||
*module_urls,
|
|
||||||
*notification_urls,
|
|
||||||
*page_urls,
|
|
||||||
*project_urls,
|
|
||||||
*public_board_urls,
|
|
||||||
*search_urls,
|
|
||||||
*state_urls,
|
|
||||||
*user_urls,
|
|
||||||
*view_urls,
|
|
||||||
*workspace_urls,
|
|
||||||
]
|
]
|
@ -1,87 +1,35 @@
|
|||||||
from django.urls import path
|
from django.urls import path
|
||||||
|
|
||||||
|
from plane.api.views.cycle import (
|
||||||
from plane.api.views import (
|
CycleAPIEndpoint,
|
||||||
CycleViewSet,
|
CycleIssueAPIEndpoint,
|
||||||
CycleIssueViewSet,
|
TransferCycleIssueAPIEndpoint,
|
||||||
CycleDateCheckEndpoint,
|
|
||||||
CycleFavoriteViewSet,
|
|
||||||
TransferCycleIssueEndpoint,
|
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
urlpatterns = [
|
urlpatterns = [
|
||||||
path(
|
path(
|
||||||
"workspaces/<str:slug>/projects/<uuid:project_id>/cycles/",
|
"workspaces/<str:slug>/projects/<uuid:project_id>/cycles/",
|
||||||
CycleViewSet.as_view(
|
CycleAPIEndpoint.as_view(),
|
||||||
{
|
name="cycles",
|
||||||
"get": "list",
|
|
||||||
"post": "create",
|
|
||||||
}
|
|
||||||
),
|
|
||||||
name="project-cycle",
|
|
||||||
),
|
),
|
||||||
path(
|
path(
|
||||||
"workspaces/<str:slug>/projects/<uuid:project_id>/cycles/<uuid:pk>/",
|
"workspaces/<str:slug>/projects/<uuid:project_id>/cycles/<uuid:pk>/",
|
||||||
CycleViewSet.as_view(
|
CycleAPIEndpoint.as_view(),
|
||||||
{
|
name="cycles",
|
||||||
"get": "retrieve",
|
|
||||||
"put": "update",
|
|
||||||
"patch": "partial_update",
|
|
||||||
"delete": "destroy",
|
|
||||||
}
|
|
||||||
),
|
|
||||||
name="project-cycle",
|
|
||||||
),
|
),
|
||||||
path(
|
path(
|
||||||
"workspaces/<str:slug>/projects/<uuid:project_id>/cycles/<uuid:cycle_id>/cycle-issues/",
|
"workspaces/<str:slug>/projects/<uuid:project_id>/cycles/<uuid:cycle_id>/cycle-issues/",
|
||||||
CycleIssueViewSet.as_view(
|
CycleIssueAPIEndpoint.as_view(),
|
||||||
{
|
name="cycle-issues",
|
||||||
"get": "list",
|
|
||||||
"post": "create",
|
|
||||||
}
|
|
||||||
),
|
|
||||||
name="project-issue-cycle",
|
|
||||||
),
|
),
|
||||||
path(
|
path(
|
||||||
"workspaces/<str:slug>/projects/<uuid:project_id>/cycles/<uuid:cycle_id>/cycle-issues/<uuid:pk>/",
|
"workspaces/<str:slug>/projects/<uuid:project_id>/cycles/<uuid:cycle_id>/cycle-issues/<uuid:issue_id>/",
|
||||||
CycleIssueViewSet.as_view(
|
CycleIssueAPIEndpoint.as_view(),
|
||||||
{
|
name="cycle-issues",
|
||||||
"get": "retrieve",
|
|
||||||
"put": "update",
|
|
||||||
"patch": "partial_update",
|
|
||||||
"delete": "destroy",
|
|
||||||
}
|
|
||||||
),
|
|
||||||
name="project-issue-cycle",
|
|
||||||
),
|
|
||||||
path(
|
|
||||||
"workspaces/<str:slug>/projects/<uuid:project_id>/cycles/date-check/",
|
|
||||||
CycleDateCheckEndpoint.as_view(),
|
|
||||||
name="project-cycle-date",
|
|
||||||
),
|
|
||||||
path(
|
|
||||||
"workspaces/<str:slug>/projects/<uuid:project_id>/user-favorite-cycles/",
|
|
||||||
CycleFavoriteViewSet.as_view(
|
|
||||||
{
|
|
||||||
"get": "list",
|
|
||||||
"post": "create",
|
|
||||||
}
|
|
||||||
),
|
|
||||||
name="user-favorite-cycle",
|
|
||||||
),
|
|
||||||
path(
|
|
||||||
"workspaces/<str:slug>/projects/<uuid:project_id>/user-favorite-cycles/<uuid:cycle_id>/",
|
|
||||||
CycleFavoriteViewSet.as_view(
|
|
||||||
{
|
|
||||||
"delete": "destroy",
|
|
||||||
}
|
|
||||||
),
|
|
||||||
name="user-favorite-cycle",
|
|
||||||
),
|
),
|
||||||
path(
|
path(
|
||||||
"workspaces/<str:slug>/projects/<uuid:project_id>/cycles/<uuid:cycle_id>/transfer-issues/",
|
"workspaces/<str:slug>/projects/<uuid:project_id>/cycles/<uuid:cycle_id>/transfer-issues/",
|
||||||
TransferCycleIssueEndpoint.as_view(),
|
TransferCycleIssueAPIEndpoint.as_view(),
|
||||||
name="transfer-issues",
|
name="transfer-issues",
|
||||||
),
|
),
|
||||||
]
|
]
|
@ -1,53 +1,17 @@
|
|||||||
from django.urls import path
|
from django.urls import path
|
||||||
|
|
||||||
|
from plane.api.views import InboxIssueAPIEndpoint
|
||||||
from plane.api.views import (
|
|
||||||
InboxViewSet,
|
|
||||||
InboxIssueViewSet,
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
urlpatterns = [
|
urlpatterns = [
|
||||||
path(
|
path(
|
||||||
"workspaces/<str:slug>/projects/<uuid:project_id>/inboxes/",
|
"workspaces/<str:slug>/projects/<uuid:project_id>/inbox-issues/",
|
||||||
InboxViewSet.as_view(
|
InboxIssueAPIEndpoint.as_view(),
|
||||||
{
|
|
||||||
"get": "list",
|
|
||||||
"post": "create",
|
|
||||||
}
|
|
||||||
),
|
|
||||||
name="inbox",
|
|
||||||
),
|
|
||||||
path(
|
|
||||||
"workspaces/<str:slug>/projects/<uuid:project_id>/inboxes/<uuid:pk>/",
|
|
||||||
InboxViewSet.as_view(
|
|
||||||
{
|
|
||||||
"get": "retrieve",
|
|
||||||
"patch": "partial_update",
|
|
||||||
"delete": "destroy",
|
|
||||||
}
|
|
||||||
),
|
|
||||||
name="inbox",
|
|
||||||
),
|
|
||||||
path(
|
|
||||||
"workspaces/<str:slug>/projects/<uuid:project_id>/inboxes/<uuid:inbox_id>/inbox-issues/",
|
|
||||||
InboxIssueViewSet.as_view(
|
|
||||||
{
|
|
||||||
"get": "list",
|
|
||||||
"post": "create",
|
|
||||||
}
|
|
||||||
),
|
|
||||||
name="inbox-issue",
|
name="inbox-issue",
|
||||||
),
|
),
|
||||||
path(
|
path(
|
||||||
"workspaces/<str:slug>/projects/<uuid:project_id>/inboxes/<uuid:inbox_id>/inbox-issues/<uuid:pk>/",
|
"workspaces/<str:slug>/projects/<uuid:project_id>/inbox-issues/<uuid:issue_id>/",
|
||||||
InboxIssueViewSet.as_view(
|
InboxIssueAPIEndpoint.as_view(),
|
||||||
{
|
|
||||||
"get": "retrieve",
|
|
||||||
"patch": "partial_update",
|
|
||||||
"delete": "destroy",
|
|
||||||
}
|
|
||||||
),
|
|
||||||
name="inbox-issue",
|
name="inbox-issue",
|
||||||
),
|
),
|
||||||
]
|
]
|
@ -1,327 +1,62 @@
|
|||||||
from django.urls import path
|
from django.urls import path
|
||||||
|
|
||||||
|
|
||||||
from plane.api.views import (
|
from plane.api.views import (
|
||||||
IssueViewSet,
|
IssueAPIEndpoint,
|
||||||
IssueListEndpoint,
|
LabelAPIEndpoint,
|
||||||
IssueListGroupedEndpoint,
|
IssueLinkAPIEndpoint,
|
||||||
LabelViewSet,
|
IssueCommentAPIEndpoint,
|
||||||
BulkCreateIssueLabelsEndpoint,
|
IssueActivityAPIEndpoint,
|
||||||
BulkDeleteIssuesEndpoint,
|
|
||||||
BulkImportIssuesEndpoint,
|
|
||||||
UserWorkSpaceIssues,
|
|
||||||
SubIssuesEndpoint,
|
|
||||||
IssueLinkViewSet,
|
|
||||||
IssueAttachmentEndpoint,
|
|
||||||
ExportIssuesEndpoint,
|
|
||||||
IssueActivityEndpoint,
|
|
||||||
IssueCommentViewSet,
|
|
||||||
IssueSubscriberViewSet,
|
|
||||||
IssueReactionViewSet,
|
|
||||||
CommentReactionViewSet,
|
|
||||||
IssueUserDisplayPropertyEndpoint,
|
|
||||||
IssueArchiveViewSet,
|
|
||||||
IssueRelationViewSet,
|
|
||||||
IssueDraftViewSet,
|
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
urlpatterns = [
|
urlpatterns = [
|
||||||
path(
|
path(
|
||||||
"workspaces/<str:slug>/projects/<uuid:project_id>/issues/",
|
"workspaces/<str:slug>/projects/<uuid:project_id>/issues/",
|
||||||
IssueViewSet.as_view(
|
IssueAPIEndpoint.as_view(),
|
||||||
{
|
name="issue",
|
||||||
"get": "list",
|
|
||||||
"post": "create",
|
|
||||||
}
|
|
||||||
),
|
|
||||||
name="project-issue",
|
|
||||||
),
|
|
||||||
path(
|
|
||||||
"v2/workspaces/<str:slug>/projects/<uuid:project_id>/issues/",
|
|
||||||
IssueListEndpoint.as_view(),
|
|
||||||
name="project-issue",
|
|
||||||
),
|
|
||||||
path(
|
|
||||||
"v3/workspaces/<str:slug>/projects/<uuid:project_id>/issues/",
|
|
||||||
IssueListGroupedEndpoint.as_view(),
|
|
||||||
name="project-issue",
|
|
||||||
),
|
),
|
||||||
path(
|
path(
|
||||||
"workspaces/<str:slug>/projects/<uuid:project_id>/issues/<uuid:pk>/",
|
"workspaces/<str:slug>/projects/<uuid:project_id>/issues/<uuid:pk>/",
|
||||||
IssueViewSet.as_view(
|
IssueAPIEndpoint.as_view(),
|
||||||
{
|
name="issue",
|
||||||
"get": "retrieve",
|
|
||||||
"put": "update",
|
|
||||||
"patch": "partial_update",
|
|
||||||
"delete": "destroy",
|
|
||||||
}
|
|
||||||
),
|
|
||||||
name="project-issue",
|
|
||||||
),
|
),
|
||||||
path(
|
path(
|
||||||
"workspaces/<str:slug>/projects/<uuid:project_id>/issue-labels/",
|
"workspaces/<str:slug>/projects/<uuid:project_id>/labels/",
|
||||||
LabelViewSet.as_view(
|
LabelAPIEndpoint.as_view(),
|
||||||
{
|
name="label",
|
||||||
"get": "list",
|
|
||||||
"post": "create",
|
|
||||||
}
|
|
||||||
),
|
|
||||||
name="project-issue-labels",
|
|
||||||
),
|
),
|
||||||
path(
|
path(
|
||||||
"workspaces/<str:slug>/projects/<uuid:project_id>/issue-labels/<uuid:pk>/",
|
"workspaces/<str:slug>/projects/<uuid:project_id>/labels/<uuid:pk>/",
|
||||||
LabelViewSet.as_view(
|
LabelAPIEndpoint.as_view(),
|
||||||
{
|
name="label",
|
||||||
"get": "retrieve",
|
|
||||||
"put": "update",
|
|
||||||
"patch": "partial_update",
|
|
||||||
"delete": "destroy",
|
|
||||||
}
|
|
||||||
),
|
|
||||||
name="project-issue-labels",
|
|
||||||
),
|
),
|
||||||
path(
|
path(
|
||||||
"workspaces/<str:slug>/projects/<uuid:project_id>/bulk-create-labels/",
|
"workspaces/<str:slug>/projects/<uuid:project_id>/issues/<uuid:issue_id>/links/",
|
||||||
BulkCreateIssueLabelsEndpoint.as_view(),
|
IssueLinkAPIEndpoint.as_view(),
|
||||||
name="project-bulk-labels",
|
name="link",
|
||||||
),
|
),
|
||||||
path(
|
path(
|
||||||
"workspaces/<str:slug>/projects/<uuid:project_id>/bulk-delete-issues/",
|
"workspaces/<str:slug>/projects/<uuid:project_id>/issues/<uuid:issue_id>/links/<uuid:pk>/",
|
||||||
BulkDeleteIssuesEndpoint.as_view(),
|
IssueLinkAPIEndpoint.as_view(),
|
||||||
name="project-issues-bulk",
|
name="link",
|
||||||
),
|
),
|
||||||
path(
|
|
||||||
"workspaces/<str:slug>/projects/<uuid:project_id>/bulk-import-issues/<str:service>/",
|
|
||||||
BulkImportIssuesEndpoint.as_view(),
|
|
||||||
name="project-issues-bulk",
|
|
||||||
),
|
|
||||||
path(
|
|
||||||
"workspaces/<str:slug>/my-issues/",
|
|
||||||
UserWorkSpaceIssues.as_view(),
|
|
||||||
name="workspace-issues",
|
|
||||||
),
|
|
||||||
path(
|
|
||||||
"workspaces/<str:slug>/projects/<uuid:project_id>/issues/<uuid:issue_id>/sub-issues/",
|
|
||||||
SubIssuesEndpoint.as_view(),
|
|
||||||
name="sub-issues",
|
|
||||||
),
|
|
||||||
path(
|
|
||||||
"workspaces/<str:slug>/projects/<uuid:project_id>/issues/<uuid:issue_id>/issue-links/",
|
|
||||||
IssueLinkViewSet.as_view(
|
|
||||||
{
|
|
||||||
"get": "list",
|
|
||||||
"post": "create",
|
|
||||||
}
|
|
||||||
),
|
|
||||||
name="project-issue-links",
|
|
||||||
),
|
|
||||||
path(
|
|
||||||
"workspaces/<str:slug>/projects/<uuid:project_id>/issues/<uuid:issue_id>/issue-links/<uuid:pk>/",
|
|
||||||
IssueLinkViewSet.as_view(
|
|
||||||
{
|
|
||||||
"get": "retrieve",
|
|
||||||
"put": "update",
|
|
||||||
"patch": "partial_update",
|
|
||||||
"delete": "destroy",
|
|
||||||
}
|
|
||||||
),
|
|
||||||
name="project-issue-links",
|
|
||||||
),
|
|
||||||
path(
|
|
||||||
"workspaces/<str:slug>/projects/<uuid:project_id>/issues/<uuid:issue_id>/issue-attachments/",
|
|
||||||
IssueAttachmentEndpoint.as_view(),
|
|
||||||
name="project-issue-attachments",
|
|
||||||
),
|
|
||||||
path(
|
|
||||||
"workspaces/<str:slug>/projects/<uuid:project_id>/issues/<uuid:issue_id>/issue-attachments/<uuid:pk>/",
|
|
||||||
IssueAttachmentEndpoint.as_view(),
|
|
||||||
name="project-issue-attachments",
|
|
||||||
),
|
|
||||||
path(
|
|
||||||
"workspaces/<str:slug>/export-issues/",
|
|
||||||
ExportIssuesEndpoint.as_view(),
|
|
||||||
name="export-issues",
|
|
||||||
),
|
|
||||||
## End Issues
|
|
||||||
## Issue Activity
|
|
||||||
path(
|
|
||||||
"workspaces/<str:slug>/projects/<uuid:project_id>/issues/<uuid:issue_id>/history/",
|
|
||||||
IssueActivityEndpoint.as_view(),
|
|
||||||
name="project-issue-history",
|
|
||||||
),
|
|
||||||
## Issue Activity
|
|
||||||
## IssueComments
|
|
||||||
path(
|
path(
|
||||||
"workspaces/<str:slug>/projects/<uuid:project_id>/issues/<uuid:issue_id>/comments/",
|
"workspaces/<str:slug>/projects/<uuid:project_id>/issues/<uuid:issue_id>/comments/",
|
||||||
IssueCommentViewSet.as_view(
|
IssueCommentAPIEndpoint.as_view(),
|
||||||
{
|
name="comment",
|
||||||
"get": "list",
|
|
||||||
"post": "create",
|
|
||||||
}
|
|
||||||
),
|
|
||||||
name="project-issue-comment",
|
|
||||||
),
|
),
|
||||||
path(
|
path(
|
||||||
"workspaces/<str:slug>/projects/<uuid:project_id>/issues/<uuid:issue_id>/comments/<uuid:pk>/",
|
"workspaces/<str:slug>/projects/<uuid:project_id>/issues/<uuid:issue_id>/comments/<uuid:pk>/",
|
||||||
IssueCommentViewSet.as_view(
|
IssueCommentAPIEndpoint.as_view(),
|
||||||
{
|
name="comment",
|
||||||
"get": "retrieve",
|
|
||||||
"put": "update",
|
|
||||||
"patch": "partial_update",
|
|
||||||
"delete": "destroy",
|
|
||||||
}
|
|
||||||
),
|
|
||||||
name="project-issue-comment",
|
|
||||||
),
|
|
||||||
## End IssueComments
|
|
||||||
# Issue Subscribers
|
|
||||||
path(
|
|
||||||
"workspaces/<str:slug>/projects/<uuid:project_id>/issues/<uuid:issue_id>/issue-subscribers/",
|
|
||||||
IssueSubscriberViewSet.as_view(
|
|
||||||
{
|
|
||||||
"get": "list",
|
|
||||||
"post": "create",
|
|
||||||
}
|
|
||||||
),
|
|
||||||
name="project-issue-subscribers",
|
|
||||||
),
|
),
|
||||||
path(
|
path(
|
||||||
"workspaces/<str:slug>/projects/<uuid:project_id>/issues/<uuid:issue_id>/issue-subscribers/<uuid:subscriber_id>/",
|
"workspaces/<str:slug>/projects/<uuid:project_id>/issues/<uuid:issue_id>/activities/",
|
||||||
IssueSubscriberViewSet.as_view({"delete": "destroy"}),
|
IssueActivityAPIEndpoint.as_view(),
|
||||||
name="project-issue-subscribers",
|
name="activity",
|
||||||
),
|
),
|
||||||
path(
|
path(
|
||||||
"workspaces/<str:slug>/projects/<uuid:project_id>/issues/<uuid:issue_id>/subscribe/",
|
"workspaces/<str:slug>/projects/<uuid:project_id>/issues/<uuid:issue_id>/activities/<uuid:pk>/",
|
||||||
IssueSubscriberViewSet.as_view(
|
IssueActivityAPIEndpoint.as_view(),
|
||||||
{
|
name="activity",
|
||||||
"get": "subscription_status",
|
|
||||||
"post": "subscribe",
|
|
||||||
"delete": "unsubscribe",
|
|
||||||
}
|
|
||||||
),
|
|
||||||
name="project-issue-subscribers",
|
|
||||||
),
|
|
||||||
## End Issue Subscribers
|
|
||||||
# Issue Reactions
|
|
||||||
path(
|
|
||||||
"workspaces/<str:slug>/projects/<uuid:project_id>/issues/<uuid:issue_id>/reactions/",
|
|
||||||
IssueReactionViewSet.as_view(
|
|
||||||
{
|
|
||||||
"get": "list",
|
|
||||||
"post": "create",
|
|
||||||
}
|
|
||||||
),
|
|
||||||
name="project-issue-reactions",
|
|
||||||
),
|
|
||||||
path(
|
|
||||||
"workspaces/<str:slug>/projects/<uuid:project_id>/issues/<uuid:issue_id>/reactions/<str:reaction_code>/",
|
|
||||||
IssueReactionViewSet.as_view(
|
|
||||||
{
|
|
||||||
"delete": "destroy",
|
|
||||||
}
|
|
||||||
),
|
|
||||||
name="project-issue-reactions",
|
|
||||||
),
|
|
||||||
## End Issue Reactions
|
|
||||||
# Comment Reactions
|
|
||||||
path(
|
|
||||||
"workspaces/<str:slug>/projects/<uuid:project_id>/comments/<uuid:comment_id>/reactions/",
|
|
||||||
CommentReactionViewSet.as_view(
|
|
||||||
{
|
|
||||||
"get": "list",
|
|
||||||
"post": "create",
|
|
||||||
}
|
|
||||||
),
|
|
||||||
name="project-issue-comment-reactions",
|
|
||||||
),
|
|
||||||
path(
|
|
||||||
"workspaces/<str:slug>/projects/<uuid:project_id>/comments/<uuid:comment_id>/reactions/<str:reaction_code>/",
|
|
||||||
CommentReactionViewSet.as_view(
|
|
||||||
{
|
|
||||||
"delete": "destroy",
|
|
||||||
}
|
|
||||||
),
|
|
||||||
name="project-issue-comment-reactions",
|
|
||||||
),
|
|
||||||
## End Comment Reactions
|
|
||||||
## IssueProperty
|
|
||||||
path(
|
|
||||||
"workspaces/<str:slug>/projects/<uuid:project_id>/issue-display-properties/",
|
|
||||||
IssueUserDisplayPropertyEndpoint.as_view(),
|
|
||||||
name="project-issue-display-properties",
|
|
||||||
),
|
|
||||||
## IssueProperty End
|
|
||||||
## Issue Archives
|
|
||||||
path(
|
|
||||||
"workspaces/<str:slug>/projects/<uuid:project_id>/archived-issues/",
|
|
||||||
IssueArchiveViewSet.as_view(
|
|
||||||
{
|
|
||||||
"get": "list",
|
|
||||||
}
|
|
||||||
),
|
|
||||||
name="project-issue-archive",
|
|
||||||
),
|
|
||||||
path(
|
|
||||||
"workspaces/<str:slug>/projects/<uuid:project_id>/archived-issues/<uuid:pk>/",
|
|
||||||
IssueArchiveViewSet.as_view(
|
|
||||||
{
|
|
||||||
"get": "retrieve",
|
|
||||||
"delete": "destroy",
|
|
||||||
}
|
|
||||||
),
|
|
||||||
name="project-issue-archive",
|
|
||||||
),
|
|
||||||
path(
|
|
||||||
"workspaces/<str:slug>/projects/<uuid:project_id>/unarchive/<uuid:pk>/",
|
|
||||||
IssueArchiveViewSet.as_view(
|
|
||||||
{
|
|
||||||
"post": "unarchive",
|
|
||||||
}
|
|
||||||
),
|
|
||||||
name="project-issue-archive",
|
|
||||||
),
|
|
||||||
## End Issue Archives
|
|
||||||
## Issue Relation
|
|
||||||
path(
|
|
||||||
"workspaces/<str:slug>/projects/<uuid:project_id>/issues/<uuid:issue_id>/issue-relation/",
|
|
||||||
IssueRelationViewSet.as_view(
|
|
||||||
{
|
|
||||||
"post": "create",
|
|
||||||
}
|
|
||||||
),
|
|
||||||
name="issue-relation",
|
|
||||||
),
|
|
||||||
path(
|
|
||||||
"workspaces/<str:slug>/projects/<uuid:project_id>/issues/<uuid:issue_id>/issue-relation/<uuid:pk>/",
|
|
||||||
IssueRelationViewSet.as_view(
|
|
||||||
{
|
|
||||||
"delete": "destroy",
|
|
||||||
}
|
|
||||||
),
|
|
||||||
name="issue-relation",
|
|
||||||
),
|
|
||||||
## End Issue Relation
|
|
||||||
## Issue Drafts
|
|
||||||
path(
|
|
||||||
"workspaces/<str:slug>/projects/<uuid:project_id>/issue-drafts/",
|
|
||||||
IssueDraftViewSet.as_view(
|
|
||||||
{
|
|
||||||
"get": "list",
|
|
||||||
"post": "create",
|
|
||||||
}
|
|
||||||
),
|
|
||||||
name="project-issue-draft",
|
|
||||||
),
|
|
||||||
path(
|
|
||||||
"workspaces/<str:slug>/projects/<uuid:project_id>/issue-drafts/<uuid:pk>/",
|
|
||||||
IssueDraftViewSet.as_view(
|
|
||||||
{
|
|
||||||
"get": "retrieve",
|
|
||||||
"patch": "partial_update",
|
|
||||||
"delete": "destroy",
|
|
||||||
}
|
|
||||||
),
|
|
||||||
name="project-issue-draft",
|
|
||||||
),
|
),
|
||||||
]
|
]
|
||||||
|
@ -1,104 +1,26 @@
|
|||||||
from django.urls import path
|
from django.urls import path
|
||||||
|
|
||||||
|
from plane.api.views import ModuleAPIEndpoint, ModuleIssueAPIEndpoint
|
||||||
from plane.api.views import (
|
|
||||||
ModuleViewSet,
|
|
||||||
ModuleIssueViewSet,
|
|
||||||
ModuleLinkViewSet,
|
|
||||||
ModuleFavoriteViewSet,
|
|
||||||
BulkImportModulesEndpoint,
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
urlpatterns = [
|
urlpatterns = [
|
||||||
path(
|
path(
|
||||||
"workspaces/<str:slug>/projects/<uuid:project_id>/modules/",
|
"workspaces/<str:slug>/projects/<uuid:project_id>/modules/",
|
||||||
ModuleViewSet.as_view(
|
ModuleAPIEndpoint.as_view(),
|
||||||
{
|
name="modules",
|
||||||
"get": "list",
|
|
||||||
"post": "create",
|
|
||||||
}
|
|
||||||
),
|
|
||||||
name="project-modules",
|
|
||||||
),
|
),
|
||||||
path(
|
path(
|
||||||
"workspaces/<str:slug>/projects/<uuid:project_id>/modules/<uuid:pk>/",
|
"workspaces/<str:slug>/projects/<uuid:project_id>/modules/<uuid:pk>/",
|
||||||
ModuleViewSet.as_view(
|
ModuleAPIEndpoint.as_view(),
|
||||||
{
|
name="modules",
|
||||||
"get": "retrieve",
|
|
||||||
"put": "update",
|
|
||||||
"patch": "partial_update",
|
|
||||||
"delete": "destroy",
|
|
||||||
}
|
|
||||||
),
|
|
||||||
name="project-modules",
|
|
||||||
),
|
),
|
||||||
path(
|
path(
|
||||||
"workspaces/<str:slug>/projects/<uuid:project_id>/modules/<uuid:module_id>/module-issues/",
|
"workspaces/<str:slug>/projects/<uuid:project_id>/modules/<uuid:module_id>/module-issues/",
|
||||||
ModuleIssueViewSet.as_view(
|
ModuleIssueAPIEndpoint.as_view(),
|
||||||
{
|
name="module-issues",
|
||||||
"get": "list",
|
|
||||||
"post": "create",
|
|
||||||
}
|
|
||||||
),
|
|
||||||
name="project-module-issues",
|
|
||||||
),
|
),
|
||||||
path(
|
path(
|
||||||
"workspaces/<str:slug>/projects/<uuid:project_id>/modules/<uuid:module_id>/module-issues/<uuid:pk>/",
|
"workspaces/<str:slug>/projects/<uuid:project_id>/modules/<uuid:module_id>/module-issues/<uuid:issue_id>/",
|
||||||
ModuleIssueViewSet.as_view(
|
ModuleIssueAPIEndpoint.as_view(),
|
||||||
{
|
name="module-issues",
|
||||||
"get": "retrieve",
|
|
||||||
"put": "update",
|
|
||||||
"patch": "partial_update",
|
|
||||||
"delete": "destroy",
|
|
||||||
}
|
|
||||||
),
|
|
||||||
name="project-module-issues",
|
|
||||||
),
|
|
||||||
path(
|
|
||||||
"workspaces/<str:slug>/projects/<uuid:project_id>/modules/<uuid:module_id>/module-links/",
|
|
||||||
ModuleLinkViewSet.as_view(
|
|
||||||
{
|
|
||||||
"get": "list",
|
|
||||||
"post": "create",
|
|
||||||
}
|
|
||||||
),
|
|
||||||
name="project-issue-module-links",
|
|
||||||
),
|
|
||||||
path(
|
|
||||||
"workspaces/<str:slug>/projects/<uuid:project_id>/modules/<uuid:module_id>/module-links/<uuid:pk>/",
|
|
||||||
ModuleLinkViewSet.as_view(
|
|
||||||
{
|
|
||||||
"get": "retrieve",
|
|
||||||
"put": "update",
|
|
||||||
"patch": "partial_update",
|
|
||||||
"delete": "destroy",
|
|
||||||
}
|
|
||||||
),
|
|
||||||
name="project-issue-module-links",
|
|
||||||
),
|
|
||||||
path(
|
|
||||||
"workspaces/<str:slug>/projects/<uuid:project_id>/user-favorite-modules/",
|
|
||||||
ModuleFavoriteViewSet.as_view(
|
|
||||||
{
|
|
||||||
"get": "list",
|
|
||||||
"post": "create",
|
|
||||||
}
|
|
||||||
),
|
|
||||||
name="user-favorite-module",
|
|
||||||
),
|
|
||||||
path(
|
|
||||||
"workspaces/<str:slug>/projects/<uuid:project_id>/user-favorite-modules/<uuid:module_id>/",
|
|
||||||
ModuleFavoriteViewSet.as_view(
|
|
||||||
{
|
|
||||||
"delete": "destroy",
|
|
||||||
}
|
|
||||||
),
|
|
||||||
name="user-favorite-module",
|
|
||||||
),
|
|
||||||
path(
|
|
||||||
"workspaces/<str:slug>/projects/<uuid:project_id>/bulk-import-modules/<str:service>/",
|
|
||||||
BulkImportModulesEndpoint.as_view(),
|
|
||||||
name="bulk-modules-create",
|
|
||||||
),
|
),
|
||||||
]
|
]
|
@ -1,79 +0,0 @@
|
|||||||
from django.urls import path
|
|
||||||
|
|
||||||
|
|
||||||
from plane.api.views import (
|
|
||||||
PageViewSet,
|
|
||||||
PageBlockViewSet,
|
|
||||||
PageFavoriteViewSet,
|
|
||||||
CreateIssueFromPageBlockEndpoint,
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
urlpatterns = [
|
|
||||||
path(
|
|
||||||
"workspaces/<str:slug>/projects/<uuid:project_id>/pages/",
|
|
||||||
PageViewSet.as_view(
|
|
||||||
{
|
|
||||||
"get": "list",
|
|
||||||
"post": "create",
|
|
||||||
}
|
|
||||||
),
|
|
||||||
name="project-pages",
|
|
||||||
),
|
|
||||||
path(
|
|
||||||
"workspaces/<str:slug>/projects/<uuid:project_id>/pages/<uuid:pk>/",
|
|
||||||
PageViewSet.as_view(
|
|
||||||
{
|
|
||||||
"get": "retrieve",
|
|
||||||
"patch": "partial_update",
|
|
||||||
"delete": "destroy",
|
|
||||||
}
|
|
||||||
),
|
|
||||||
name="project-pages",
|
|
||||||
),
|
|
||||||
path(
|
|
||||||
"workspaces/<str:slug>/projects/<uuid:project_id>/pages/<uuid:page_id>/page-blocks/",
|
|
||||||
PageBlockViewSet.as_view(
|
|
||||||
{
|
|
||||||
"get": "list",
|
|
||||||
"post": "create",
|
|
||||||
}
|
|
||||||
),
|
|
||||||
name="project-page-blocks",
|
|
||||||
),
|
|
||||||
path(
|
|
||||||
"workspaces/<str:slug>/projects/<uuid:project_id>/pages/<uuid:page_id>/page-blocks/<uuid:pk>/",
|
|
||||||
PageBlockViewSet.as_view(
|
|
||||||
{
|
|
||||||
"get": "retrieve",
|
|
||||||
"patch": "partial_update",
|
|
||||||
"delete": "destroy",
|
|
||||||
}
|
|
||||||
),
|
|
||||||
name="project-page-blocks",
|
|
||||||
),
|
|
||||||
path(
|
|
||||||
"workspaces/<str:slug>/projects/<uuid:project_id>/user-favorite-pages/",
|
|
||||||
PageFavoriteViewSet.as_view(
|
|
||||||
{
|
|
||||||
"get": "list",
|
|
||||||
"post": "create",
|
|
||||||
}
|
|
||||||
),
|
|
||||||
name="user-favorite-pages",
|
|
||||||
),
|
|
||||||
path(
|
|
||||||
"workspaces/<str:slug>/projects/<uuid:project_id>/user-favorite-pages/<uuid:page_id>/",
|
|
||||||
PageFavoriteViewSet.as_view(
|
|
||||||
{
|
|
||||||
"delete": "destroy",
|
|
||||||
}
|
|
||||||
),
|
|
||||||
name="user-favorite-pages",
|
|
||||||
),
|
|
||||||
path(
|
|
||||||
"workspaces/<str:slug>/projects/<uuid:project_id>/pages/<uuid:page_id>/page-blocks/<uuid:page_block_id>/issues/",
|
|
||||||
CreateIssueFromPageBlockEndpoint.as_view(),
|
|
||||||
name="page-block-issues",
|
|
||||||
),
|
|
||||||
]
|
|
@ -1,132 +1,16 @@
|
|||||||
from django.urls import path
|
from django.urls import path
|
||||||
|
|
||||||
from plane.api.views import (
|
from plane.api.views import ProjectAPIEndpoint
|
||||||
ProjectViewSet,
|
|
||||||
InviteProjectEndpoint,
|
|
||||||
ProjectMemberViewSet,
|
|
||||||
ProjectMemberInvitationsViewset,
|
|
||||||
ProjectMemberUserEndpoint,
|
|
||||||
ProjectJoinEndpoint,
|
|
||||||
AddTeamToProjectEndpoint,
|
|
||||||
ProjectUserViewsEndpoint,
|
|
||||||
ProjectIdentifierEndpoint,
|
|
||||||
ProjectFavoritesViewSet,
|
|
||||||
LeaveProjectEndpoint,
|
|
||||||
ProjectPublicCoverImagesEndpoint,
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
urlpatterns = [
|
urlpatterns = [
|
||||||
path(
|
path(
|
||||||
"workspaces/<str:slug>/projects/",
|
"workspaces/<str:slug>/projects/",
|
||||||
ProjectViewSet.as_view(
|
ProjectAPIEndpoint.as_view(),
|
||||||
{
|
|
||||||
"get": "list",
|
|
||||||
"post": "create",
|
|
||||||
}
|
|
||||||
),
|
|
||||||
name="project",
|
name="project",
|
||||||
),
|
),
|
||||||
path(
|
path(
|
||||||
"workspaces/<str:slug>/projects/<uuid:pk>/",
|
"workspaces/<str:slug>/projects/<uuid:project_id>/",
|
||||||
ProjectViewSet.as_view(
|
ProjectAPIEndpoint.as_view(),
|
||||||
{
|
|
||||||
"get": "retrieve",
|
|
||||||
"put": "update",
|
|
||||||
"patch": "partial_update",
|
|
||||||
"delete": "destroy",
|
|
||||||
}
|
|
||||||
),
|
|
||||||
name="project",
|
name="project",
|
||||||
),
|
),
|
||||||
path(
|
|
||||||
"workspaces/<str:slug>/project-identifiers/",
|
|
||||||
ProjectIdentifierEndpoint.as_view(),
|
|
||||||
name="project-identifiers",
|
|
||||||
),
|
|
||||||
path(
|
|
||||||
"workspaces/<str:slug>/projects/<uuid:project_id>/invite/",
|
|
||||||
InviteProjectEndpoint.as_view(),
|
|
||||||
name="invite-project",
|
|
||||||
),
|
|
||||||
path(
|
|
||||||
"workspaces/<str:slug>/projects/<uuid:project_id>/members/",
|
|
||||||
ProjectMemberViewSet.as_view({"get": "list", "post": "create"}),
|
|
||||||
name="project-member",
|
|
||||||
),
|
|
||||||
path(
|
|
||||||
"workspaces/<str:slug>/projects/<uuid:project_id>/members/<uuid:pk>/",
|
|
||||||
ProjectMemberViewSet.as_view(
|
|
||||||
{
|
|
||||||
"get": "retrieve",
|
|
||||||
"patch": "partial_update",
|
|
||||||
"delete": "destroy",
|
|
||||||
}
|
|
||||||
),
|
|
||||||
name="project-member",
|
|
||||||
),
|
|
||||||
path(
|
|
||||||
"workspaces/<str:slug>/projects/join/",
|
|
||||||
ProjectJoinEndpoint.as_view(),
|
|
||||||
name="project-join",
|
|
||||||
),
|
|
||||||
path(
|
|
||||||
"workspaces/<str:slug>/projects/<uuid:project_id>/team-invite/",
|
|
||||||
AddTeamToProjectEndpoint.as_view(),
|
|
||||||
name="projects",
|
|
||||||
),
|
|
||||||
path(
|
|
||||||
"workspaces/<str:slug>/projects/<uuid:project_id>/invitations/",
|
|
||||||
ProjectMemberInvitationsViewset.as_view({"get": "list"}),
|
|
||||||
name="project-member-invite",
|
|
||||||
),
|
|
||||||
path(
|
|
||||||
"workspaces/<str:slug>/projects/<uuid:project_id>/invitations/<uuid:pk>/",
|
|
||||||
ProjectMemberInvitationsViewset.as_view(
|
|
||||||
{
|
|
||||||
"get": "retrieve",
|
|
||||||
"delete": "destroy",
|
|
||||||
}
|
|
||||||
),
|
|
||||||
name="project-member-invite",
|
|
||||||
),
|
|
||||||
path(
|
|
||||||
"workspaces/<str:slug>/projects/<uuid:project_id>/project-views/",
|
|
||||||
ProjectUserViewsEndpoint.as_view(),
|
|
||||||
name="project-view",
|
|
||||||
),
|
|
||||||
path(
|
|
||||||
"workspaces/<str:slug>/projects/<uuid:project_id>/project-members/me/",
|
|
||||||
ProjectMemberUserEndpoint.as_view(),
|
|
||||||
name="project-member-view",
|
|
||||||
),
|
|
||||||
path(
|
|
||||||
"workspaces/<str:slug>/user-favorite-projects/",
|
|
||||||
ProjectFavoritesViewSet.as_view(
|
|
||||||
{
|
|
||||||
"get": "list",
|
|
||||||
"post": "create",
|
|
||||||
}
|
|
||||||
),
|
|
||||||
name="project-favorite",
|
|
||||||
),
|
|
||||||
path(
|
|
||||||
"workspaces/<str:slug>/user-favorite-projects/<uuid:project_id>/",
|
|
||||||
ProjectFavoritesViewSet.as_view(
|
|
||||||
{
|
|
||||||
"delete": "destroy",
|
|
||||||
}
|
|
||||||
),
|
|
||||||
name="project-favorite",
|
|
||||||
),
|
|
||||||
path(
|
|
||||||
"workspaces/<str:slug>/projects/<uuid:project_id>/members/leave/",
|
|
||||||
LeaveProjectEndpoint.as_view(),
|
|
||||||
name="leave-project",
|
|
||||||
),
|
|
||||||
path(
|
|
||||||
"project-covers/",
|
|
||||||
ProjectPublicCoverImagesEndpoint.as_view(),
|
|
||||||
name="project-covers",
|
|
||||||
),
|
|
||||||
]
|
]
|
@ -1,151 +0,0 @@
|
|||||||
from django.urls import path
|
|
||||||
|
|
||||||
|
|
||||||
from plane.api.views import (
|
|
||||||
ProjectDeployBoardViewSet,
|
|
||||||
ProjectDeployBoardPublicSettingsEndpoint,
|
|
||||||
ProjectIssuesPublicEndpoint,
|
|
||||||
IssueRetrievePublicEndpoint,
|
|
||||||
IssueCommentPublicViewSet,
|
|
||||||
IssueReactionPublicViewSet,
|
|
||||||
CommentReactionPublicViewSet,
|
|
||||||
InboxIssuePublicViewSet,
|
|
||||||
IssueVotePublicViewSet,
|
|
||||||
WorkspaceProjectDeployBoardEndpoint,
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
urlpatterns = [
|
|
||||||
path(
|
|
||||||
"workspaces/<str:slug>/projects/<uuid:project_id>/project-deploy-boards/",
|
|
||||||
ProjectDeployBoardViewSet.as_view(
|
|
||||||
{
|
|
||||||
"get": "list",
|
|
||||||
"post": "create",
|
|
||||||
}
|
|
||||||
),
|
|
||||||
name="project-deploy-board",
|
|
||||||
),
|
|
||||||
path(
|
|
||||||
"workspaces/<str:slug>/projects/<uuid:project_id>/project-deploy-boards/<uuid:pk>/",
|
|
||||||
ProjectDeployBoardViewSet.as_view(
|
|
||||||
{
|
|
||||||
"get": "retrieve",
|
|
||||||
"patch": "partial_update",
|
|
||||||
"delete": "destroy",
|
|
||||||
}
|
|
||||||
),
|
|
||||||
name="project-deploy-board",
|
|
||||||
),
|
|
||||||
path(
|
|
||||||
"public/workspaces/<str:slug>/project-boards/<uuid:project_id>/settings/",
|
|
||||||
ProjectDeployBoardPublicSettingsEndpoint.as_view(),
|
|
||||||
name="project-deploy-board-settings",
|
|
||||||
),
|
|
||||||
path(
|
|
||||||
"public/workspaces/<str:slug>/project-boards/<uuid:project_id>/issues/",
|
|
||||||
ProjectIssuesPublicEndpoint.as_view(),
|
|
||||||
name="project-deploy-board",
|
|
||||||
),
|
|
||||||
path(
|
|
||||||
"public/workspaces/<str:slug>/project-boards/<uuid:project_id>/issues/<uuid:issue_id>/",
|
|
||||||
IssueRetrievePublicEndpoint.as_view(),
|
|
||||||
name="workspace-project-boards",
|
|
||||||
),
|
|
||||||
path(
|
|
||||||
"public/workspaces/<str:slug>/project-boards/<uuid:project_id>/issues/<uuid:issue_id>/comments/",
|
|
||||||
IssueCommentPublicViewSet.as_view(
|
|
||||||
{
|
|
||||||
"get": "list",
|
|
||||||
"post": "create",
|
|
||||||
}
|
|
||||||
),
|
|
||||||
name="issue-comments-project-board",
|
|
||||||
),
|
|
||||||
path(
|
|
||||||
"public/workspaces/<str:slug>/project-boards/<uuid:project_id>/issues/<uuid:issue_id>/comments/<uuid:pk>/",
|
|
||||||
IssueCommentPublicViewSet.as_view(
|
|
||||||
{
|
|
||||||
"get": "retrieve",
|
|
||||||
"patch": "partial_update",
|
|
||||||
"delete": "destroy",
|
|
||||||
}
|
|
||||||
),
|
|
||||||
name="issue-comments-project-board",
|
|
||||||
),
|
|
||||||
path(
|
|
||||||
"public/workspaces/<str:slug>/project-boards/<uuid:project_id>/issues/<uuid:issue_id>/reactions/",
|
|
||||||
IssueReactionPublicViewSet.as_view(
|
|
||||||
{
|
|
||||||
"get": "list",
|
|
||||||
"post": "create",
|
|
||||||
}
|
|
||||||
),
|
|
||||||
name="issue-reactions-project-board",
|
|
||||||
),
|
|
||||||
path(
|
|
||||||
"public/workspaces/<str:slug>/project-boards/<uuid:project_id>/issues/<uuid:issue_id>/reactions/<str:reaction_code>/",
|
|
||||||
IssueReactionPublicViewSet.as_view(
|
|
||||||
{
|
|
||||||
"delete": "destroy",
|
|
||||||
}
|
|
||||||
),
|
|
||||||
name="issue-reactions-project-board",
|
|
||||||
),
|
|
||||||
path(
|
|
||||||
"public/workspaces/<str:slug>/project-boards/<uuid:project_id>/comments/<uuid:comment_id>/reactions/",
|
|
||||||
CommentReactionPublicViewSet.as_view(
|
|
||||||
{
|
|
||||||
"get": "list",
|
|
||||||
"post": "create",
|
|
||||||
}
|
|
||||||
),
|
|
||||||
name="comment-reactions-project-board",
|
|
||||||
),
|
|
||||||
path(
|
|
||||||
"public/workspaces/<str:slug>/project-boards/<uuid:project_id>/comments/<uuid:comment_id>/reactions/<str:reaction_code>/",
|
|
||||||
CommentReactionPublicViewSet.as_view(
|
|
||||||
{
|
|
||||||
"delete": "destroy",
|
|
||||||
}
|
|
||||||
),
|
|
||||||
name="comment-reactions-project-board",
|
|
||||||
),
|
|
||||||
path(
|
|
||||||
"public/workspaces/<str:slug>/project-boards/<uuid:project_id>/inboxes/<uuid:inbox_id>/inbox-issues/",
|
|
||||||
InboxIssuePublicViewSet.as_view(
|
|
||||||
{
|
|
||||||
"get": "list",
|
|
||||||
"post": "create",
|
|
||||||
}
|
|
||||||
),
|
|
||||||
name="inbox-issue",
|
|
||||||
),
|
|
||||||
path(
|
|
||||||
"public/workspaces/<str:slug>/project-boards/<uuid:project_id>/inboxes/<uuid:inbox_id>/inbox-issues/<uuid:pk>/",
|
|
||||||
InboxIssuePublicViewSet.as_view(
|
|
||||||
{
|
|
||||||
"get": "retrieve",
|
|
||||||
"patch": "partial_update",
|
|
||||||
"delete": "destroy",
|
|
||||||
}
|
|
||||||
),
|
|
||||||
name="inbox-issue",
|
|
||||||
),
|
|
||||||
path(
|
|
||||||
"public/workspaces/<str:slug>/project-boards/<uuid:project_id>/issues/<uuid:issue_id>/votes/",
|
|
||||||
IssueVotePublicViewSet.as_view(
|
|
||||||
{
|
|
||||||
"get": "list",
|
|
||||||
"post": "create",
|
|
||||||
"delete": "destroy",
|
|
||||||
}
|
|
||||||
),
|
|
||||||
name="issue-vote-project-board",
|
|
||||||
),
|
|
||||||
path(
|
|
||||||
"public/workspaces/<str:slug>/project-boards/",
|
|
||||||
WorkspaceProjectDeployBoardEndpoint.as_view(),
|
|
||||||
name="workspace-project-boards",
|
|
||||||
),
|
|
||||||
]
|
|
@ -1,38 +1,16 @@
|
|||||||
from django.urls import path
|
from django.urls import path
|
||||||
|
|
||||||
|
from plane.api.views import StateAPIEndpoint
|
||||||
from plane.api.views import StateViewSet
|
|
||||||
|
|
||||||
|
|
||||||
urlpatterns = [
|
urlpatterns = [
|
||||||
path(
|
path(
|
||||||
"workspaces/<str:slug>/projects/<uuid:project_id>/states/",
|
"workspaces/<str:slug>/projects/<uuid:project_id>/states/",
|
||||||
StateViewSet.as_view(
|
StateAPIEndpoint.as_view(),
|
||||||
{
|
name="states",
|
||||||
"get": "list",
|
|
||||||
"post": "create",
|
|
||||||
}
|
|
||||||
),
|
|
||||||
name="project-states",
|
|
||||||
),
|
),
|
||||||
path(
|
path(
|
||||||
"workspaces/<str:slug>/projects/<uuid:project_id>/states/<uuid:pk>/",
|
"workspaces/<str:slug>/projects/<uuid:project_id>/states/<uuid:state_id>/",
|
||||||
StateViewSet.as_view(
|
StateAPIEndpoint.as_view(),
|
||||||
{
|
name="states",
|
||||||
"get": "retrieve",
|
|
||||||
"patch": "partial_update",
|
|
||||||
"delete": "destroy",
|
|
||||||
}
|
|
||||||
),
|
|
||||||
name="project-state",
|
|
||||||
),
|
|
||||||
path(
|
|
||||||
"workspaces/<str:slug>/projects/<uuid:project_id>/states/<uuid:pk>/mark-default/",
|
|
||||||
StateViewSet.as_view(
|
|
||||||
{
|
|
||||||
"post": "mark_as_default",
|
|
||||||
}
|
|
||||||
),
|
|
||||||
name="project-state",
|
|
||||||
),
|
),
|
||||||
]
|
]
|
@ -1,180 +1,21 @@
|
|||||||
from .project import (
|
from .project import ProjectAPIEndpoint
|
||||||
ProjectViewSet,
|
|
||||||
ProjectMemberViewSet,
|
|
||||||
UserProjectInvitationsViewset,
|
|
||||||
InviteProjectEndpoint,
|
|
||||||
AddTeamToProjectEndpoint,
|
|
||||||
ProjectMemberInvitationsViewset,
|
|
||||||
ProjectMemberInviteDetailViewSet,
|
|
||||||
ProjectIdentifierEndpoint,
|
|
||||||
ProjectJoinEndpoint,
|
|
||||||
ProjectUserViewsEndpoint,
|
|
||||||
ProjectMemberUserEndpoint,
|
|
||||||
ProjectFavoritesViewSet,
|
|
||||||
ProjectDeployBoardViewSet,
|
|
||||||
ProjectDeployBoardPublicSettingsEndpoint,
|
|
||||||
WorkspaceProjectDeployBoardEndpoint,
|
|
||||||
LeaveProjectEndpoint,
|
|
||||||
ProjectPublicCoverImagesEndpoint,
|
|
||||||
)
|
|
||||||
from .user import (
|
|
||||||
UserEndpoint,
|
|
||||||
UpdateUserOnBoardedEndpoint,
|
|
||||||
UpdateUserTourCompletedEndpoint,
|
|
||||||
UserActivityEndpoint,
|
|
||||||
)
|
|
||||||
|
|
||||||
from .oauth import OauthEndpoint
|
from .state import StateAPIEndpoint
|
||||||
|
|
||||||
from .base import BaseAPIView, BaseViewSet
|
|
||||||
|
|
||||||
from .workspace import (
|
|
||||||
WorkSpaceViewSet,
|
|
||||||
UserWorkSpacesEndpoint,
|
|
||||||
WorkSpaceAvailabilityCheckEndpoint,
|
|
||||||
InviteWorkspaceEndpoint,
|
|
||||||
JoinWorkspaceEndpoint,
|
|
||||||
WorkSpaceMemberViewSet,
|
|
||||||
TeamMemberViewSet,
|
|
||||||
WorkspaceInvitationsViewset,
|
|
||||||
UserWorkspaceInvitationsEndpoint,
|
|
||||||
UserWorkspaceInvitationEndpoint,
|
|
||||||
UserLastProjectWithWorkspaceEndpoint,
|
|
||||||
WorkspaceMemberUserEndpoint,
|
|
||||||
WorkspaceMemberUserViewsEndpoint,
|
|
||||||
UserActivityGraphEndpoint,
|
|
||||||
UserIssueCompletedGraphEndpoint,
|
|
||||||
UserWorkspaceDashboardEndpoint,
|
|
||||||
WorkspaceThemeViewSet,
|
|
||||||
WorkspaceUserProfileStatsEndpoint,
|
|
||||||
WorkspaceUserActivityEndpoint,
|
|
||||||
WorkspaceUserProfileEndpoint,
|
|
||||||
WorkspaceUserProfileIssuesEndpoint,
|
|
||||||
WorkspaceLabelsEndpoint,
|
|
||||||
LeaveWorkspaceEndpoint,
|
|
||||||
)
|
|
||||||
from .state import StateViewSet
|
|
||||||
from .view import (
|
|
||||||
GlobalViewViewSet,
|
|
||||||
GlobalViewIssuesViewSet,
|
|
||||||
IssueViewViewSet,
|
|
||||||
IssueViewFavoriteViewSet,
|
|
||||||
)
|
|
||||||
from .cycle import (
|
|
||||||
CycleViewSet,
|
|
||||||
CycleIssueViewSet,
|
|
||||||
CycleDateCheckEndpoint,
|
|
||||||
CycleFavoriteViewSet,
|
|
||||||
TransferCycleIssueEndpoint,
|
|
||||||
)
|
|
||||||
from .asset import FileAssetEndpoint, UserAssetsEndpoint
|
|
||||||
from .issue import (
|
from .issue import (
|
||||||
IssueViewSet,
|
IssueAPIEndpoint,
|
||||||
IssueListEndpoint,
|
LabelAPIEndpoint,
|
||||||
IssueListGroupedEndpoint,
|
IssueLinkAPIEndpoint,
|
||||||
WorkSpaceIssuesEndpoint,
|
IssueCommentAPIEndpoint,
|
||||||
IssueActivityEndpoint,
|
IssueActivityAPIEndpoint,
|
||||||
IssueCommentViewSet,
|
|
||||||
IssueUserDisplayPropertyEndpoint,
|
|
||||||
LabelViewSet,
|
|
||||||
BulkDeleteIssuesEndpoint,
|
|
||||||
UserWorkSpaceIssues,
|
|
||||||
SubIssuesEndpoint,
|
|
||||||
IssueLinkViewSet,
|
|
||||||
BulkCreateIssueLabelsEndpoint,
|
|
||||||
IssueAttachmentEndpoint,
|
|
||||||
IssueArchiveViewSet,
|
|
||||||
IssueSubscriberViewSet,
|
|
||||||
IssueCommentPublicViewSet,
|
|
||||||
CommentReactionViewSet,
|
|
||||||
IssueReactionViewSet,
|
|
||||||
IssueReactionPublicViewSet,
|
|
||||||
CommentReactionPublicViewSet,
|
|
||||||
IssueVotePublicViewSet,
|
|
||||||
IssueRelationViewSet,
|
|
||||||
IssueRetrievePublicEndpoint,
|
|
||||||
ProjectIssuesPublicEndpoint,
|
|
||||||
IssueDraftViewSet,
|
|
||||||
)
|
)
|
||||||
|
|
||||||
from .auth_extended import (
|
from .cycle import (
|
||||||
VerifyEmailEndpoint,
|
CycleAPIEndpoint,
|
||||||
RequestEmailVerificationEndpoint,
|
CycleIssueAPIEndpoint,
|
||||||
ForgotPasswordEndpoint,
|
TransferCycleIssueAPIEndpoint,
|
||||||
ResetPasswordEndpoint,
|
|
||||||
ChangePasswordEndpoint,
|
|
||||||
)
|
)
|
||||||
|
|
||||||
|
from .module import ModuleAPIEndpoint, ModuleIssueAPIEndpoint
|
||||||
|
|
||||||
from .authentication import (
|
from .inbox import InboxIssueAPIEndpoint
|
||||||
SignUpEndpoint,
|
|
||||||
SignInEndpoint,
|
|
||||||
SignOutEndpoint,
|
|
||||||
MagicSignInEndpoint,
|
|
||||||
MagicSignInGenerateEndpoint,
|
|
||||||
)
|
|
||||||
|
|
||||||
from .module import (
|
|
||||||
ModuleViewSet,
|
|
||||||
ModuleIssueViewSet,
|
|
||||||
ModuleLinkViewSet,
|
|
||||||
ModuleFavoriteViewSet,
|
|
||||||
)
|
|
||||||
|
|
||||||
from .api_token import ApiTokenEndpoint
|
|
||||||
|
|
||||||
from .integration import (
|
|
||||||
WorkspaceIntegrationViewSet,
|
|
||||||
IntegrationViewSet,
|
|
||||||
GithubIssueSyncViewSet,
|
|
||||||
GithubRepositorySyncViewSet,
|
|
||||||
GithubCommentSyncViewSet,
|
|
||||||
GithubRepositoriesEndpoint,
|
|
||||||
BulkCreateGithubIssueSyncEndpoint,
|
|
||||||
SlackProjectSyncViewSet,
|
|
||||||
)
|
|
||||||
|
|
||||||
from .importer import (
|
|
||||||
ServiceIssueImportSummaryEndpoint,
|
|
||||||
ImportServiceEndpoint,
|
|
||||||
UpdateServiceImportStatusEndpoint,
|
|
||||||
BulkImportIssuesEndpoint,
|
|
||||||
BulkImportModulesEndpoint,
|
|
||||||
)
|
|
||||||
|
|
||||||
from .page import (
|
|
||||||
PageViewSet,
|
|
||||||
PageBlockViewSet,
|
|
||||||
PageFavoriteViewSet,
|
|
||||||
CreateIssueFromPageBlockEndpoint,
|
|
||||||
)
|
|
||||||
|
|
||||||
from .search import GlobalSearchEndpoint, IssueSearchEndpoint
|
|
||||||
|
|
||||||
|
|
||||||
from .external import GPTIntegrationEndpoint, ReleaseNotesEndpoint, UnsplashEndpoint
|
|
||||||
|
|
||||||
from .estimate import (
|
|
||||||
ProjectEstimatePointEndpoint,
|
|
||||||
BulkEstimatePointEndpoint,
|
|
||||||
)
|
|
||||||
|
|
||||||
from .inbox import InboxViewSet, InboxIssueViewSet, InboxIssuePublicViewSet
|
|
||||||
|
|
||||||
from .analytic import (
|
|
||||||
AnalyticsEndpoint,
|
|
||||||
AnalyticViewViewset,
|
|
||||||
SavedAnalyticEndpoint,
|
|
||||||
ExportAnalyticsEndpoint,
|
|
||||||
DefaultAnalyticsEndpoint,
|
|
||||||
)
|
|
||||||
|
|
||||||
from .notification import (
|
|
||||||
NotificationViewSet,
|
|
||||||
UnreadNotificationEndpoint,
|
|
||||||
MarkAllReadNotificationViewSet,
|
|
||||||
)
|
|
||||||
|
|
||||||
from .exporter import ExportIssuesEndpoint
|
|
||||||
|
|
||||||
from .config import ConfigurationEndpoint
|
|
@ -1,47 +0,0 @@
|
|||||||
# Python import
|
|
||||||
from uuid import uuid4
|
|
||||||
|
|
||||||
# Third party
|
|
||||||
from rest_framework.response import Response
|
|
||||||
from rest_framework import status
|
|
||||||
from sentry_sdk import capture_exception
|
|
||||||
|
|
||||||
# Module import
|
|
||||||
from .base import BaseAPIView
|
|
||||||
from plane.db.models import APIToken
|
|
||||||
from plane.api.serializers import APITokenSerializer
|
|
||||||
|
|
||||||
|
|
||||||
class ApiTokenEndpoint(BaseAPIView):
|
|
||||||
def post(self, request):
|
|
||||||
label = request.data.get("label", str(uuid4().hex))
|
|
||||||
workspace = request.data.get("workspace", False)
|
|
||||||
|
|
||||||
if not workspace:
|
|
||||||
return Response(
|
|
||||||
{"error": "Workspace is required"}, status=status.HTTP_200_OK
|
|
||||||
)
|
|
||||||
|
|
||||||
api_token = APIToken.objects.create(
|
|
||||||
label=label, user=request.user, workspace_id=workspace
|
|
||||||
)
|
|
||||||
|
|
||||||
serializer = APITokenSerializer(api_token)
|
|
||||||
# Token will be only vissible while creating
|
|
||||||
return Response(
|
|
||||||
{"api_token": serializer.data, "token": api_token.token},
|
|
||||||
status=status.HTTP_201_CREATED,
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def get(self, request):
|
|
||||||
api_tokens = APIToken.objects.filter(user=request.user)
|
|
||||||
serializer = APITokenSerializer(api_tokens, many=True)
|
|
||||||
return Response(serializer.data, status=status.HTTP_200_OK)
|
|
||||||
|
|
||||||
|
|
||||||
def delete(self, request, pk):
|
|
||||||
api_token = APIToken.objects.get(pk=pk)
|
|
||||||
api_token.delete()
|
|
||||||
return Response(status=status.HTTP_204_NO_CONTENT)
|
|
||||||
|
|
@ -1,151 +0,0 @@
|
|||||||
## Python imports
|
|
||||||
import jwt
|
|
||||||
|
|
||||||
## Django imports
|
|
||||||
from django.contrib.auth.tokens import PasswordResetTokenGenerator
|
|
||||||
from django.utils.encoding import (
|
|
||||||
smart_str,
|
|
||||||
smart_bytes,
|
|
||||||
DjangoUnicodeDecodeError,
|
|
||||||
)
|
|
||||||
from django.utils.http import urlsafe_base64_decode, urlsafe_base64_encode
|
|
||||||
from django.conf import settings
|
|
||||||
|
|
||||||
## Third Party Imports
|
|
||||||
from rest_framework import status
|
|
||||||
from rest_framework.response import Response
|
|
||||||
from rest_framework import permissions
|
|
||||||
from rest_framework_simplejwt.tokens import RefreshToken
|
|
||||||
|
|
||||||
from sentry_sdk import capture_exception
|
|
||||||
|
|
||||||
## Module imports
|
|
||||||
from . import BaseAPIView
|
|
||||||
from plane.api.serializers import (
|
|
||||||
ChangePasswordSerializer,
|
|
||||||
ResetPasswordSerializer,
|
|
||||||
)
|
|
||||||
from plane.db.models import User
|
|
||||||
from plane.bgtasks.email_verification_task import email_verification
|
|
||||||
from plane.bgtasks.forgot_password_task import forgot_password
|
|
||||||
|
|
||||||
|
|
||||||
class RequestEmailVerificationEndpoint(BaseAPIView):
|
|
||||||
def get(self, request):
|
|
||||||
token = RefreshToken.for_user(request.user).access_token
|
|
||||||
current_site = settings.WEB_URL
|
|
||||||
email_verification.delay(
|
|
||||||
request.user.first_name, request.user.email, token, current_site
|
|
||||||
)
|
|
||||||
return Response(
|
|
||||||
{"message": "Email sent successfully"}, status=status.HTTP_200_OK
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
class VerifyEmailEndpoint(BaseAPIView):
|
|
||||||
def get(self, request):
|
|
||||||
token = request.GET.get("token")
|
|
||||||
try:
|
|
||||||
payload = jwt.decode(token, settings.SECRET_KEY, algorithms="HS256")
|
|
||||||
user = User.objects.get(id=payload["user_id"])
|
|
||||||
|
|
||||||
if not user.is_email_verified:
|
|
||||||
user.is_email_verified = True
|
|
||||||
user.save()
|
|
||||||
return Response(
|
|
||||||
{"email": "Successfully activated"}, status=status.HTTP_200_OK
|
|
||||||
)
|
|
||||||
except jwt.ExpiredSignatureError as _indentifier:
|
|
||||||
return Response(
|
|
||||||
{"email": "Activation expired"}, status=status.HTTP_400_BAD_REQUEST
|
|
||||||
)
|
|
||||||
except jwt.exceptions.DecodeError as _indentifier:
|
|
||||||
return Response(
|
|
||||||
{"email": "Invalid token"}, status=status.HTTP_400_BAD_REQUEST
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
class ForgotPasswordEndpoint(BaseAPIView):
|
|
||||||
permission_classes = [permissions.AllowAny]
|
|
||||||
|
|
||||||
def post(self, request):
|
|
||||||
email = request.data.get("email")
|
|
||||||
|
|
||||||
if User.objects.filter(email=email).exists():
|
|
||||||
user = User.objects.get(email=email)
|
|
||||||
uidb64 = urlsafe_base64_encode(smart_bytes(user.id))
|
|
||||||
token = PasswordResetTokenGenerator().make_token(user)
|
|
||||||
|
|
||||||
current_site = settings.WEB_URL
|
|
||||||
|
|
||||||
forgot_password.delay(
|
|
||||||
user.first_name, user.email, uidb64, token, current_site
|
|
||||||
)
|
|
||||||
|
|
||||||
return Response(
|
|
||||||
{"message": "Check your email to reset your password"},
|
|
||||||
status=status.HTTP_200_OK,
|
|
||||||
)
|
|
||||||
return Response(
|
|
||||||
{"error": "Please check the email"}, status=status.HTTP_400_BAD_REQUEST
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
class ResetPasswordEndpoint(BaseAPIView):
|
|
||||||
permission_classes = [permissions.AllowAny]
|
|
||||||
|
|
||||||
def post(self, request, uidb64, token):
|
|
||||||
try:
|
|
||||||
id = smart_str(urlsafe_base64_decode(uidb64))
|
|
||||||
user = User.objects.get(id=id)
|
|
||||||
if not PasswordResetTokenGenerator().check_token(user, token):
|
|
||||||
return Response(
|
|
||||||
{"error": "token is not valid, please check the new one"},
|
|
||||||
status=status.HTTP_401_UNAUTHORIZED,
|
|
||||||
)
|
|
||||||
serializer = ResetPasswordSerializer(data=request.data)
|
|
||||||
|
|
||||||
if serializer.is_valid():
|
|
||||||
# set_password also hashes the password that the user will get
|
|
||||||
user.set_password(serializer.data.get("new_password"))
|
|
||||||
user.save()
|
|
||||||
response = {
|
|
||||||
"status": "success",
|
|
||||||
"code": status.HTTP_200_OK,
|
|
||||||
"message": "Password updated successfully",
|
|
||||||
}
|
|
||||||
|
|
||||||
return Response(response)
|
|
||||||
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
|
|
||||||
|
|
||||||
except DjangoUnicodeDecodeError as indentifier:
|
|
||||||
return Response(
|
|
||||||
{"error": "token is not valid, please check the new one"},
|
|
||||||
status=status.HTTP_401_UNAUTHORIZED,
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
class ChangePasswordEndpoint(BaseAPIView):
|
|
||||||
def post(self, request):
|
|
||||||
serializer = ChangePasswordSerializer(data=request.data)
|
|
||||||
|
|
||||||
user = User.objects.get(pk=request.user.id)
|
|
||||||
if serializer.is_valid():
|
|
||||||
# Check old password
|
|
||||||
if not user.object.check_password(serializer.data.get("old_password")):
|
|
||||||
return Response(
|
|
||||||
{"old_password": ["Wrong password."]},
|
|
||||||
status=status.HTTP_400_BAD_REQUEST,
|
|
||||||
)
|
|
||||||
# set_password also hashes the password that the user will get
|
|
||||||
self.object.set_password(serializer.data.get("new_password"))
|
|
||||||
self.object.save()
|
|
||||||
response = {
|
|
||||||
"status": "success",
|
|
||||||
"code": status.HTTP_200_OK,
|
|
||||||
"message": "Password updated successfully",
|
|
||||||
}
|
|
||||||
|
|
||||||
return Response(response)
|
|
||||||
|
|
||||||
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
|
|
@ -1,397 +0,0 @@
|
|||||||
# Python imports
|
|
||||||
import uuid
|
|
||||||
import random
|
|
||||||
import string
|
|
||||||
import json
|
|
||||||
import requests
|
|
||||||
|
|
||||||
# Django imports
|
|
||||||
from django.utils import timezone
|
|
||||||
from django.core.exceptions import ValidationError
|
|
||||||
from django.core.validators import validate_email
|
|
||||||
from django.conf import settings
|
|
||||||
from django.contrib.auth.hashers import make_password
|
|
||||||
|
|
||||||
# Third party imports
|
|
||||||
from rest_framework.response import Response
|
|
||||||
from rest_framework.permissions import AllowAny
|
|
||||||
from rest_framework import status
|
|
||||||
from rest_framework_simplejwt.tokens import RefreshToken
|
|
||||||
|
|
||||||
from sentry_sdk import capture_exception, capture_message
|
|
||||||
|
|
||||||
# Module imports
|
|
||||||
from . import BaseAPIView
|
|
||||||
from plane.db.models import User
|
|
||||||
from plane.api.serializers import UserSerializer
|
|
||||||
from plane.settings.redis import redis_instance
|
|
||||||
from plane.bgtasks.magic_link_code_task import magic_link
|
|
||||||
|
|
||||||
|
|
||||||
def get_tokens_for_user(user):
|
|
||||||
refresh = RefreshToken.for_user(user)
|
|
||||||
return (
|
|
||||||
str(refresh.access_token),
|
|
||||||
str(refresh),
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
class SignUpEndpoint(BaseAPIView):
|
|
||||||
permission_classes = (AllowAny,)
|
|
||||||
|
|
||||||
def post(self, request):
|
|
||||||
if not settings.ENABLE_SIGNUP:
|
|
||||||
return Response(
|
|
||||||
{
|
|
||||||
"error": "New account creation is disabled. Please contact your site administrator"
|
|
||||||
},
|
|
||||||
status=status.HTTP_400_BAD_REQUEST,
|
|
||||||
)
|
|
||||||
|
|
||||||
email = request.data.get("email", False)
|
|
||||||
password = request.data.get("password", False)
|
|
||||||
|
|
||||||
## Raise exception if any of the above are missing
|
|
||||||
if not email or not password:
|
|
||||||
return Response(
|
|
||||||
{"error": "Both email and password are required"},
|
|
||||||
status=status.HTTP_400_BAD_REQUEST,
|
|
||||||
)
|
|
||||||
|
|
||||||
email = email.strip().lower()
|
|
||||||
|
|
||||||
try:
|
|
||||||
validate_email(email)
|
|
||||||
except ValidationError as e:
|
|
||||||
return Response(
|
|
||||||
{"error": "Please provide a valid email address."},
|
|
||||||
status=status.HTTP_400_BAD_REQUEST,
|
|
||||||
)
|
|
||||||
|
|
||||||
# Check if the user already exists
|
|
||||||
if User.objects.filter(email=email).exists():
|
|
||||||
return Response(
|
|
||||||
{"error": "User with this email already exists"},
|
|
||||||
status=status.HTTP_400_BAD_REQUEST,
|
|
||||||
)
|
|
||||||
|
|
||||||
user = User.objects.create(email=email, username=uuid.uuid4().hex)
|
|
||||||
user.set_password(password)
|
|
||||||
|
|
||||||
# settings last actives for the user
|
|
||||||
user.last_active = timezone.now()
|
|
||||||
user.last_login_time = timezone.now()
|
|
||||||
user.last_login_ip = request.META.get("REMOTE_ADDR")
|
|
||||||
user.last_login_uagent = request.META.get("HTTP_USER_AGENT")
|
|
||||||
user.token_updated_at = timezone.now()
|
|
||||||
user.save()
|
|
||||||
|
|
||||||
access_token, refresh_token = get_tokens_for_user(user)
|
|
||||||
|
|
||||||
data = {
|
|
||||||
"access_token": access_token,
|
|
||||||
"refresh_token": refresh_token,
|
|
||||||
}
|
|
||||||
|
|
||||||
# Send Analytics
|
|
||||||
if settings.ANALYTICS_BASE_API:
|
|
||||||
_ = requests.post(
|
|
||||||
settings.ANALYTICS_BASE_API,
|
|
||||||
headers={
|
|
||||||
"Content-Type": "application/json",
|
|
||||||
"X-Auth-Token": settings.ANALYTICS_SECRET_KEY,
|
|
||||||
},
|
|
||||||
json={
|
|
||||||
"event_id": uuid.uuid4().hex,
|
|
||||||
"event_data": {
|
|
||||||
"medium": "email",
|
|
||||||
},
|
|
||||||
"user": {"email": email, "id": str(user.id)},
|
|
||||||
"device_ctx": {
|
|
||||||
"ip": request.META.get("REMOTE_ADDR"),
|
|
||||||
"user_agent": request.META.get("HTTP_USER_AGENT"),
|
|
||||||
},
|
|
||||||
"event_type": "SIGN_UP",
|
|
||||||
},
|
|
||||||
)
|
|
||||||
|
|
||||||
return Response(data, status=status.HTTP_200_OK)
|
|
||||||
|
|
||||||
|
|
||||||
class SignInEndpoint(BaseAPIView):
|
|
||||||
permission_classes = (AllowAny,)
|
|
||||||
|
|
||||||
def post(self, request):
|
|
||||||
email = request.data.get("email", False)
|
|
||||||
password = request.data.get("password", False)
|
|
||||||
|
|
||||||
## Raise exception if any of the above are missing
|
|
||||||
if not email or not password:
|
|
||||||
return Response(
|
|
||||||
{"error": "Both email and password are required"},
|
|
||||||
status=status.HTTP_400_BAD_REQUEST,
|
|
||||||
)
|
|
||||||
|
|
||||||
email = email.strip().lower()
|
|
||||||
|
|
||||||
try:
|
|
||||||
validate_email(email)
|
|
||||||
except ValidationError as e:
|
|
||||||
return Response(
|
|
||||||
{"error": "Please provide a valid email address."},
|
|
||||||
status=status.HTTP_400_BAD_REQUEST,
|
|
||||||
)
|
|
||||||
|
|
||||||
user = User.objects.filter(email=email).first()
|
|
||||||
|
|
||||||
if user is None:
|
|
||||||
return Response(
|
|
||||||
{
|
|
||||||
"error": "Sorry, we could not find a user with the provided credentials. Please try again."
|
|
||||||
},
|
|
||||||
status=status.HTTP_403_FORBIDDEN,
|
|
||||||
)
|
|
||||||
|
|
||||||
# Sign up Process
|
|
||||||
if not user.check_password(password):
|
|
||||||
return Response(
|
|
||||||
{
|
|
||||||
"error": "Sorry, we could not find a user with the provided credentials. Please try again."
|
|
||||||
},
|
|
||||||
status=status.HTTP_403_FORBIDDEN,
|
|
||||||
)
|
|
||||||
if not user.is_active:
|
|
||||||
return Response(
|
|
||||||
{
|
|
||||||
"error": "Your account has been deactivated. Please contact your site administrator."
|
|
||||||
},
|
|
||||||
status=status.HTTP_403_FORBIDDEN,
|
|
||||||
)
|
|
||||||
|
|
||||||
# settings last active for the user
|
|
||||||
user.last_active = timezone.now()
|
|
||||||
user.last_login_time = timezone.now()
|
|
||||||
user.last_login_ip = request.META.get("REMOTE_ADDR")
|
|
||||||
user.last_login_uagent = request.META.get("HTTP_USER_AGENT")
|
|
||||||
user.token_updated_at = timezone.now()
|
|
||||||
user.save()
|
|
||||||
|
|
||||||
access_token, refresh_token = get_tokens_for_user(user)
|
|
||||||
# Send Analytics
|
|
||||||
if settings.ANALYTICS_BASE_API:
|
|
||||||
_ = requests.post(
|
|
||||||
settings.ANALYTICS_BASE_API,
|
|
||||||
headers={
|
|
||||||
"Content-Type": "application/json",
|
|
||||||
"X-Auth-Token": settings.ANALYTICS_SECRET_KEY,
|
|
||||||
},
|
|
||||||
json={
|
|
||||||
"event_id": uuid.uuid4().hex,
|
|
||||||
"event_data": {
|
|
||||||
"medium": "email",
|
|
||||||
},
|
|
||||||
"user": {"email": email, "id": str(user.id)},
|
|
||||||
"device_ctx": {
|
|
||||||
"ip": request.META.get("REMOTE_ADDR"),
|
|
||||||
"user_agent": request.META.get("HTTP_USER_AGENT"),
|
|
||||||
},
|
|
||||||
"event_type": "SIGN_IN",
|
|
||||||
},
|
|
||||||
)
|
|
||||||
data = {
|
|
||||||
"access_token": access_token,
|
|
||||||
"refresh_token": refresh_token,
|
|
||||||
}
|
|
||||||
|
|
||||||
return Response(data, status=status.HTTP_200_OK)
|
|
||||||
|
|
||||||
|
|
||||||
class SignOutEndpoint(BaseAPIView):
|
|
||||||
def post(self, request):
|
|
||||||
refresh_token = request.data.get("refresh_token", False)
|
|
||||||
|
|
||||||
if not refresh_token:
|
|
||||||
capture_message("No refresh token provided")
|
|
||||||
return Response(
|
|
||||||
{"error": "No refresh token provided"},
|
|
||||||
status=status.HTTP_400_BAD_REQUEST,
|
|
||||||
)
|
|
||||||
|
|
||||||
user = User.objects.get(pk=request.user.id)
|
|
||||||
|
|
||||||
user.last_logout_time = timezone.now()
|
|
||||||
user.last_logout_ip = request.META.get("REMOTE_ADDR")
|
|
||||||
|
|
||||||
user.save()
|
|
||||||
|
|
||||||
token = RefreshToken(refresh_token)
|
|
||||||
token.blacklist()
|
|
||||||
return Response({"message": "success"}, status=status.HTTP_200_OK)
|
|
||||||
|
|
||||||
|
|
||||||
class MagicSignInGenerateEndpoint(BaseAPIView):
|
|
||||||
permission_classes = [
|
|
||||||
AllowAny,
|
|
||||||
]
|
|
||||||
|
|
||||||
def post(self, request):
|
|
||||||
email = request.data.get("email", False)
|
|
||||||
|
|
||||||
if not email:
|
|
||||||
return Response(
|
|
||||||
{"error": "Please provide a valid email address"},
|
|
||||||
status=status.HTTP_400_BAD_REQUEST,
|
|
||||||
)
|
|
||||||
|
|
||||||
# Clean up
|
|
||||||
email = email.strip().lower()
|
|
||||||
validate_email(email)
|
|
||||||
|
|
||||||
## Generate a random token
|
|
||||||
token = (
|
|
||||||
"".join(random.choices(string.ascii_lowercase, k=4))
|
|
||||||
+ "-"
|
|
||||||
+ "".join(random.choices(string.ascii_lowercase, k=4))
|
|
||||||
+ "-"
|
|
||||||
+ "".join(random.choices(string.ascii_lowercase, k=4))
|
|
||||||
)
|
|
||||||
|
|
||||||
ri = redis_instance()
|
|
||||||
|
|
||||||
key = "magic_" + str(email)
|
|
||||||
|
|
||||||
# Check if the key already exists in python
|
|
||||||
if ri.exists(key):
|
|
||||||
data = json.loads(ri.get(key))
|
|
||||||
|
|
||||||
current_attempt = data["current_attempt"] + 1
|
|
||||||
|
|
||||||
if data["current_attempt"] > 2:
|
|
||||||
return Response(
|
|
||||||
{"error": "Max attempts exhausted. Please try again later."},
|
|
||||||
status=status.HTTP_400_BAD_REQUEST,
|
|
||||||
)
|
|
||||||
|
|
||||||
value = {
|
|
||||||
"current_attempt": current_attempt,
|
|
||||||
"email": email,
|
|
||||||
"token": token,
|
|
||||||
}
|
|
||||||
expiry = 600
|
|
||||||
|
|
||||||
ri.set(key, json.dumps(value), ex=expiry)
|
|
||||||
|
|
||||||
else:
|
|
||||||
value = {"current_attempt": 0, "email": email, "token": token}
|
|
||||||
expiry = 600
|
|
||||||
|
|
||||||
ri.set(key, json.dumps(value), ex=expiry)
|
|
||||||
|
|
||||||
current_site = settings.WEB_URL
|
|
||||||
magic_link.delay(email, key, token, current_site)
|
|
||||||
|
|
||||||
return Response({"key": key}, status=status.HTTP_200_OK)
|
|
||||||
|
|
||||||
|
|
||||||
class MagicSignInEndpoint(BaseAPIView):
|
|
||||||
permission_classes = [
|
|
||||||
AllowAny,
|
|
||||||
]
|
|
||||||
|
|
||||||
def post(self, request):
|
|
||||||
user_token = request.data.get("token", "").strip()
|
|
||||||
key = request.data.get("key", False).strip().lower()
|
|
||||||
|
|
||||||
if not key or user_token == "":
|
|
||||||
return Response(
|
|
||||||
{"error": "User token and key are required"},
|
|
||||||
status=status.HTTP_400_BAD_REQUEST,
|
|
||||||
)
|
|
||||||
|
|
||||||
ri = redis_instance()
|
|
||||||
|
|
||||||
if ri.exists(key):
|
|
||||||
data = json.loads(ri.get(key))
|
|
||||||
|
|
||||||
token = data["token"]
|
|
||||||
email = data["email"]
|
|
||||||
|
|
||||||
if str(token) == str(user_token):
|
|
||||||
if User.objects.filter(email=email).exists():
|
|
||||||
user = User.objects.get(email=email)
|
|
||||||
# Send event to Jitsu for tracking
|
|
||||||
if settings.ANALYTICS_BASE_API:
|
|
||||||
_ = requests.post(
|
|
||||||
settings.ANALYTICS_BASE_API,
|
|
||||||
headers={
|
|
||||||
"Content-Type": "application/json",
|
|
||||||
"X-Auth-Token": settings.ANALYTICS_SECRET_KEY,
|
|
||||||
},
|
|
||||||
json={
|
|
||||||
"event_id": uuid.uuid4().hex,
|
|
||||||
"event_data": {
|
|
||||||
"medium": "code",
|
|
||||||
},
|
|
||||||
"user": {"email": email, "id": str(user.id)},
|
|
||||||
"device_ctx": {
|
|
||||||
"ip": request.META.get("REMOTE_ADDR"),
|
|
||||||
"user_agent": request.META.get("HTTP_USER_AGENT"),
|
|
||||||
},
|
|
||||||
"event_type": "SIGN_IN",
|
|
||||||
},
|
|
||||||
)
|
|
||||||
else:
|
|
||||||
user = User.objects.create(
|
|
||||||
email=email,
|
|
||||||
username=uuid.uuid4().hex,
|
|
||||||
password=make_password(uuid.uuid4().hex),
|
|
||||||
is_password_autoset=True,
|
|
||||||
)
|
|
||||||
# Send event to Jitsu for tracking
|
|
||||||
if settings.ANALYTICS_BASE_API:
|
|
||||||
_ = requests.post(
|
|
||||||
settings.ANALYTICS_BASE_API,
|
|
||||||
headers={
|
|
||||||
"Content-Type": "application/json",
|
|
||||||
"X-Auth-Token": settings.ANALYTICS_SECRET_KEY,
|
|
||||||
},
|
|
||||||
json={
|
|
||||||
"event_id": uuid.uuid4().hex,
|
|
||||||
"event_data": {
|
|
||||||
"medium": "code",
|
|
||||||
},
|
|
||||||
"user": {"email": email, "id": str(user.id)},
|
|
||||||
"device_ctx": {
|
|
||||||
"ip": request.META.get("REMOTE_ADDR"),
|
|
||||||
"user_agent": request.META.get("HTTP_USER_AGENT"),
|
|
||||||
},
|
|
||||||
"event_type": "SIGN_UP",
|
|
||||||
},
|
|
||||||
)
|
|
||||||
|
|
||||||
user.last_active = timezone.now()
|
|
||||||
user.last_login_time = timezone.now()
|
|
||||||
user.last_login_ip = request.META.get("REMOTE_ADDR")
|
|
||||||
user.last_login_uagent = request.META.get("HTTP_USER_AGENT")
|
|
||||||
user.token_updated_at = timezone.now()
|
|
||||||
user.save()
|
|
||||||
|
|
||||||
access_token, refresh_token = get_tokens_for_user(user)
|
|
||||||
data = {
|
|
||||||
"access_token": access_token,
|
|
||||||
"refresh_token": refresh_token,
|
|
||||||
}
|
|
||||||
|
|
||||||
return Response(data, status=status.HTTP_200_OK)
|
|
||||||
|
|
||||||
else:
|
|
||||||
return Response(
|
|
||||||
{"error": "Your login code was incorrect. Please try again."},
|
|
||||||
status=status.HTTP_400_BAD_REQUEST,
|
|
||||||
)
|
|
||||||
|
|
||||||
else:
|
|
||||||
return Response(
|
|
||||||
{"error": "The magic code/link has expired please try again"},
|
|
||||||
status=status.HTTP_400_BAD_REQUEST,
|
|
||||||
)
|
|
@ -1,27 +1,25 @@
|
|||||||
# Python imports
|
# Python imports
|
||||||
import zoneinfo
|
import zoneinfo
|
||||||
|
import json
|
||||||
|
|
||||||
# Django imports
|
# Django imports
|
||||||
from django.urls import resolve
|
|
||||||
from django.conf import settings
|
from django.conf import settings
|
||||||
from django.utils import timezone
|
|
||||||
from django.db import IntegrityError
|
from django.db import IntegrityError
|
||||||
from django.core.exceptions import ObjectDoesNotExist, ValidationError
|
from django.core.exceptions import ObjectDoesNotExist, ValidationError
|
||||||
|
from django.utils import timezone
|
||||||
|
|
||||||
# Third part imports
|
# Third party imports
|
||||||
from rest_framework import status
|
|
||||||
from rest_framework import status
|
|
||||||
from rest_framework.viewsets import ModelViewSet
|
|
||||||
from rest_framework.response import Response
|
|
||||||
from rest_framework.exceptions import APIException
|
|
||||||
from rest_framework.views import APIView
|
from rest_framework.views import APIView
|
||||||
from rest_framework.filters import SearchFilter
|
from rest_framework.response import Response
|
||||||
from rest_framework.permissions import IsAuthenticated
|
from rest_framework.permissions import IsAuthenticated
|
||||||
|
from rest_framework import status
|
||||||
from sentry_sdk import capture_exception
|
from sentry_sdk import capture_exception
|
||||||
from django_filters.rest_framework import DjangoFilterBackend
|
|
||||||
|
|
||||||
# Module imports
|
# Module imports
|
||||||
|
from plane.api.middleware.api_authentication import APIKeyAuthentication
|
||||||
|
from plane.api.rate_limit import ApiKeyRateThrottle
|
||||||
from plane.utils.paginator import BasePaginator
|
from plane.utils.paginator import BasePaginator
|
||||||
|
from plane.bgtasks.webhook_task import send_webhook
|
||||||
|
|
||||||
|
|
||||||
class TimezoneMixin:
|
class TimezoneMixin:
|
||||||
@ -29,6 +27,7 @@ class TimezoneMixin:
|
|||||||
This enables timezone conversion according
|
This enables timezone conversion according
|
||||||
to the user set timezone
|
to the user set timezone
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def initial(self, request, *args, **kwargs):
|
def initial(self, request, *args, **kwargs):
|
||||||
super().initial(request, *args, **kwargs)
|
super().initial(request, *args, **kwargs)
|
||||||
if request.user.is_authenticated:
|
if request.user.is_authenticated:
|
||||||
@ -37,109 +36,50 @@ class TimezoneMixin:
|
|||||||
timezone.deactivate()
|
timezone.deactivate()
|
||||||
|
|
||||||
|
|
||||||
class BaseViewSet(TimezoneMixin, ModelViewSet, BasePaginator):
|
class WebhookMixin:
|
||||||
|
webhook_event = None
|
||||||
|
bulk = False
|
||||||
|
|
||||||
model = None
|
def finalize_response(self, request, response, *args, **kwargs):
|
||||||
|
response = super().finalize_response(request, response, *args, **kwargs)
|
||||||
|
|
||||||
permission_classes = [
|
# Check for the case should webhook be sent
|
||||||
IsAuthenticated,
|
if (
|
||||||
]
|
self.webhook_event
|
||||||
|
and self.request.method in ["POST", "PATCH", "DELETE"]
|
||||||
|
and response.status_code in [200, 201, 204]
|
||||||
|
):
|
||||||
|
# Push the object to delay
|
||||||
|
send_webhook.delay(
|
||||||
|
event=self.webhook_event,
|
||||||
|
payload=response.data,
|
||||||
|
kw=self.kwargs,
|
||||||
|
action=self.request.method,
|
||||||
|
slug=self.workspace_slug,
|
||||||
|
bulk=self.bulk,
|
||||||
|
)
|
||||||
|
|
||||||
filter_backends = (
|
return response
|
||||||
DjangoFilterBackend,
|
|
||||||
SearchFilter,
|
|
||||||
)
|
|
||||||
|
|
||||||
filterset_fields = []
|
|
||||||
|
|
||||||
search_fields = []
|
|
||||||
|
|
||||||
def get_queryset(self):
|
|
||||||
try:
|
|
||||||
return self.model.objects.all()
|
|
||||||
except Exception as e:
|
|
||||||
capture_exception(e)
|
|
||||||
raise APIException("Please check the view", status.HTTP_400_BAD_REQUEST)
|
|
||||||
|
|
||||||
def handle_exception(self, exc):
|
|
||||||
"""
|
|
||||||
Handle any exception that occurs, by returning an appropriate response,
|
|
||||||
or re-raising the error.
|
|
||||||
"""
|
|
||||||
try:
|
|
||||||
response = super().handle_exception(exc)
|
|
||||||
return response
|
|
||||||
except Exception as e:
|
|
||||||
if isinstance(e, IntegrityError):
|
|
||||||
return Response({"error": "The payload is not valid"}, status=status.HTTP_400_BAD_REQUEST)
|
|
||||||
|
|
||||||
if isinstance(e, ValidationError):
|
|
||||||
return Response({"error": "Please provide valid detail"}, status=status.HTTP_400_BAD_REQUEST)
|
|
||||||
|
|
||||||
if isinstance(e, ObjectDoesNotExist):
|
|
||||||
model_name = str(exc).split(" matching query does not exist.")[0]
|
|
||||||
return Response({"error": f"{model_name} does not exist."}, status=status.HTTP_404_NOT_FOUND)
|
|
||||||
|
|
||||||
if isinstance(e, KeyError):
|
|
||||||
capture_exception(e)
|
|
||||||
return Response({"error": f"key {e} does not exist"}, status=status.HTTP_400_BAD_REQUEST)
|
|
||||||
|
|
||||||
print(e) if settings.DEBUG else print("Server Error")
|
|
||||||
capture_exception(e)
|
|
||||||
return Response({"error": "Something went wrong please try again later"}, status=status.HTTP_500_INTERNAL_SERVER_ERROR)
|
|
||||||
|
|
||||||
|
|
||||||
def dispatch(self, request, *args, **kwargs):
|
|
||||||
try:
|
|
||||||
response = super().dispatch(request, *args, **kwargs)
|
|
||||||
|
|
||||||
if settings.DEBUG:
|
|
||||||
from django.db import connection
|
|
||||||
|
|
||||||
print(
|
|
||||||
f"{request.method} - {request.get_full_path()} of Queries: {len(connection.queries)}"
|
|
||||||
)
|
|
||||||
return response
|
|
||||||
|
|
||||||
except Exception as exc:
|
|
||||||
response = self.handle_exception(exc)
|
|
||||||
return exc
|
|
||||||
|
|
||||||
@property
|
|
||||||
def workspace_slug(self):
|
|
||||||
return self.kwargs.get("slug", None)
|
|
||||||
|
|
||||||
@property
|
|
||||||
def project_id(self):
|
|
||||||
project_id = self.kwargs.get("project_id", None)
|
|
||||||
if project_id:
|
|
||||||
return project_id
|
|
||||||
|
|
||||||
if resolve(self.request.path_info).url_name == "project":
|
|
||||||
return self.kwargs.get("pk", None)
|
|
||||||
|
|
||||||
|
|
||||||
class BaseAPIView(TimezoneMixin, APIView, BasePaginator):
|
class BaseAPIView(TimezoneMixin, APIView, BasePaginator):
|
||||||
|
authentication_classes = [
|
||||||
|
APIKeyAuthentication,
|
||||||
|
]
|
||||||
|
|
||||||
permission_classes = [
|
permission_classes = [
|
||||||
IsAuthenticated,
|
IsAuthenticated,
|
||||||
]
|
]
|
||||||
|
|
||||||
filter_backends = (
|
throttle_classes = [
|
||||||
DjangoFilterBackend,
|
ApiKeyRateThrottle,
|
||||||
SearchFilter,
|
]
|
||||||
)
|
|
||||||
|
|
||||||
filterset_fields = []
|
|
||||||
|
|
||||||
search_fields = []
|
|
||||||
|
|
||||||
def filter_queryset(self, queryset):
|
def filter_queryset(self, queryset):
|
||||||
for backend in list(self.filter_backends):
|
for backend in list(self.filter_backends):
|
||||||
queryset = backend().filter_queryset(self.request, queryset, self)
|
queryset = backend().filter_queryset(self.request, queryset, self)
|
||||||
return queryset
|
return queryset
|
||||||
|
|
||||||
|
|
||||||
def handle_exception(self, exc):
|
def handle_exception(self, exc):
|
||||||
"""
|
"""
|
||||||
Handle any exception that occurs, by returning an appropriate response,
|
Handle any exception that occurs, by returning an appropriate response,
|
||||||
@ -150,27 +90,43 @@ class BaseAPIView(TimezoneMixin, APIView, BasePaginator):
|
|||||||
return response
|
return response
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
if isinstance(e, IntegrityError):
|
if isinstance(e, IntegrityError):
|
||||||
return Response({"error": "The payload is not valid"}, status=status.HTTP_400_BAD_REQUEST)
|
return Response(
|
||||||
|
{"error": "The payload is not valid"},
|
||||||
|
status=status.HTTP_400_BAD_REQUEST,
|
||||||
|
)
|
||||||
|
|
||||||
if isinstance(e, ValidationError):
|
if isinstance(e, ValidationError):
|
||||||
return Response({"error": "Please provide valid detail"}, status=status.HTTP_400_BAD_REQUEST)
|
return Response(
|
||||||
|
{
|
||||||
|
"error": "The provided payload is not valid please try with a valid payload"
|
||||||
|
},
|
||||||
|
status=status.HTTP_400_BAD_REQUEST,
|
||||||
|
)
|
||||||
|
|
||||||
if isinstance(e, ObjectDoesNotExist):
|
if isinstance(e, ObjectDoesNotExist):
|
||||||
model_name = str(exc).split(" matching query does not exist.")[0]
|
model_name = str(exc).split(" matching query does not exist.")[0]
|
||||||
return Response({"error": f"{model_name} does not exist."}, status=status.HTTP_404_NOT_FOUND)
|
return Response(
|
||||||
|
{"error": f"{model_name} does not exist."},
|
||||||
|
status=status.HTTP_404_NOT_FOUND,
|
||||||
|
)
|
||||||
|
|
||||||
if isinstance(e, KeyError):
|
if isinstance(e, KeyError):
|
||||||
return Response({"error": f"key {e} does not exist"}, status=status.HTTP_400_BAD_REQUEST)
|
return Response(
|
||||||
|
{"error": f"key {e} does not exist"},
|
||||||
|
status=status.HTTP_400_BAD_REQUEST,
|
||||||
|
)
|
||||||
|
|
||||||
print(e) if settings.DEBUG else print("Server Error")
|
if settings.DEBUG:
|
||||||
|
print(e)
|
||||||
capture_exception(e)
|
capture_exception(e)
|
||||||
return Response({"error": "Something went wrong please try again later"}, status=status.HTTP_500_INTERNAL_SERVER_ERROR)
|
return Response(
|
||||||
|
{"error": "Something went wrong please try again later"},
|
||||||
|
status=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||||
|
)
|
||||||
|
|
||||||
def dispatch(self, request, *args, **kwargs):
|
def dispatch(self, request, *args, **kwargs):
|
||||||
try:
|
try:
|
||||||
response = super().dispatch(request, *args, **kwargs)
|
response = super().dispatch(request, *args, **kwargs)
|
||||||
|
|
||||||
if settings.DEBUG:
|
if settings.DEBUG:
|
||||||
from django.db import connection
|
from django.db import connection
|
||||||
|
|
||||||
@ -178,11 +134,25 @@ class BaseAPIView(TimezoneMixin, APIView, BasePaginator):
|
|||||||
f"{request.method} - {request.get_full_path()} of Queries: {len(connection.queries)}"
|
f"{request.method} - {request.get_full_path()} of Queries: {len(connection.queries)}"
|
||||||
)
|
)
|
||||||
return response
|
return response
|
||||||
|
|
||||||
except Exception as exc:
|
except Exception as exc:
|
||||||
response = self.handle_exception(exc)
|
response = self.handle_exception(exc)
|
||||||
return exc
|
return exc
|
||||||
|
|
||||||
|
def finalize_response(self, request, response, *args, **kwargs):
|
||||||
|
# Call super to get the default response
|
||||||
|
response = super().finalize_response(request, response, *args, **kwargs)
|
||||||
|
|
||||||
|
# Add custom headers if they exist in the request META
|
||||||
|
ratelimit_remaining = request.META.get("X-RateLimit-Remaining")
|
||||||
|
if ratelimit_remaining is not None:
|
||||||
|
response["X-RateLimit-Remaining"] = ratelimit_remaining
|
||||||
|
|
||||||
|
ratelimit_reset = request.META.get("X-RateLimit-Reset")
|
||||||
|
if ratelimit_reset is not None:
|
||||||
|
response["X-RateLimit-Reset"] = ratelimit_reset
|
||||||
|
|
||||||
|
return response
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def workspace_slug(self):
|
def workspace_slug(self):
|
||||||
return self.kwargs.get("slug", None)
|
return self.kwargs.get("slug", None)
|
||||||
@ -190,3 +160,17 @@ class BaseAPIView(TimezoneMixin, APIView, BasePaginator):
|
|||||||
@property
|
@property
|
||||||
def project_id(self):
|
def project_id(self):
|
||||||
return self.kwargs.get("project_id", None)
|
return self.kwargs.get("project_id", None)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def fields(self):
|
||||||
|
fields = [
|
||||||
|
field for field in self.request.GET.get("fields", "").split(",") if field
|
||||||
|
]
|
||||||
|
return fields if fields else None
|
||||||
|
|
||||||
|
@property
|
||||||
|
def expand(self):
|
||||||
|
expand = [
|
||||||
|
expand for expand in self.request.GET.get("expand", "").split(",") if expand
|
||||||
|
]
|
||||||
|
return expand if expand else None
|
||||||
|
@ -1,37 +0,0 @@
|
|||||||
# Python imports
|
|
||||||
import os
|
|
||||||
|
|
||||||
# Django imports
|
|
||||||
from django.conf import settings
|
|
||||||
|
|
||||||
# Third party imports
|
|
||||||
from rest_framework.permissions import AllowAny
|
|
||||||
from rest_framework import status
|
|
||||||
from rest_framework.response import Response
|
|
||||||
from sentry_sdk import capture_exception
|
|
||||||
|
|
||||||
# Module imports
|
|
||||||
from .base import BaseAPIView
|
|
||||||
|
|
||||||
|
|
||||||
class ConfigurationEndpoint(BaseAPIView):
|
|
||||||
permission_classes = [
|
|
||||||
AllowAny,
|
|
||||||
]
|
|
||||||
|
|
||||||
def get(self, request):
|
|
||||||
data = {}
|
|
||||||
data["google_client_id"] = os.environ.get("GOOGLE_CLIENT_ID", None)
|
|
||||||
data["github_client_id"] = os.environ.get("GITHUB_CLIENT_ID", None)
|
|
||||||
data["github_app_name"] = os.environ.get("GITHUB_APP_NAME", None)
|
|
||||||
data["magic_login"] = (
|
|
||||||
bool(settings.EMAIL_HOST_USER) and bool(settings.EMAIL_HOST_PASSWORD)
|
|
||||||
) and os.environ.get("ENABLE_MAGIC_LINK_LOGIN", "0") == "1"
|
|
||||||
data["email_password_login"] = (
|
|
||||||
os.environ.get("ENABLE_EMAIL_PASSWORD", "0") == "1"
|
|
||||||
)
|
|
||||||
data["slack_client_id"] = os.environ.get("SLACK_CLIENT_ID", None)
|
|
||||||
data["posthog_api_key"] = os.environ.get("POSTHOG_API_KEY", None)
|
|
||||||
data["posthog_host"] = os.environ.get("POSTHOG_HOST", None)
|
|
||||||
data["has_unsplash_configured"] = bool(settings.UNSPLASH_ACCESS_KEY)
|
|
||||||
return Response(data, status=status.HTTP_200_OK)
|
|
@ -2,81 +2,47 @@
|
|||||||
import json
|
import json
|
||||||
|
|
||||||
# Django imports
|
# Django imports
|
||||||
from django.db.models import (
|
from django.db.models import Q, Count, Sum, Prefetch, F, OuterRef, Func
|
||||||
Func,
|
|
||||||
F,
|
|
||||||
Q,
|
|
||||||
Exists,
|
|
||||||
OuterRef,
|
|
||||||
Count,
|
|
||||||
Prefetch,
|
|
||||||
Sum,
|
|
||||||
)
|
|
||||||
from django.core import serializers
|
|
||||||
from django.utils import timezone
|
from django.utils import timezone
|
||||||
from django.utils.decorators import method_decorator
|
from django.core import serializers
|
||||||
from django.views.decorators.gzip import gzip_page
|
|
||||||
|
|
||||||
# Third party imports
|
# Third party imports
|
||||||
from rest_framework.response import Response
|
from rest_framework.response import Response
|
||||||
from rest_framework import status
|
from rest_framework import status
|
||||||
from sentry_sdk import capture_exception
|
|
||||||
|
|
||||||
# Module imports
|
# Module imports
|
||||||
from . import BaseViewSet, BaseAPIView
|
from .base import BaseAPIView, WebhookMixin
|
||||||
|
from plane.db.models import Cycle, Issue, CycleIssue, IssueLink, IssueAttachment
|
||||||
|
from plane.app.permissions import ProjectEntityPermission
|
||||||
from plane.api.serializers import (
|
from plane.api.serializers import (
|
||||||
CycleSerializer,
|
CycleSerializer,
|
||||||
CycleIssueSerializer,
|
CycleIssueSerializer,
|
||||||
CycleFavoriteSerializer,
|
|
||||||
IssueStateSerializer,
|
|
||||||
CycleWriteSerializer,
|
|
||||||
)
|
|
||||||
from plane.api.permissions import ProjectEntityPermission
|
|
||||||
from plane.db.models import (
|
|
||||||
User,
|
|
||||||
Cycle,
|
|
||||||
CycleIssue,
|
|
||||||
Issue,
|
|
||||||
CycleFavorite,
|
|
||||||
IssueLink,
|
|
||||||
IssueAttachment,
|
|
||||||
Label,
|
|
||||||
)
|
)
|
||||||
from plane.bgtasks.issue_activites_task import issue_activity
|
from plane.bgtasks.issue_activites_task import issue_activity
|
||||||
from plane.utils.grouper import group_results
|
|
||||||
from plane.utils.issue_filters import issue_filters
|
|
||||||
from plane.utils.analytics_plot import burndown_plot
|
|
||||||
|
|
||||||
|
|
||||||
class CycleViewSet(BaseViewSet):
|
class CycleAPIEndpoint(WebhookMixin, BaseAPIView):
|
||||||
|
"""
|
||||||
|
This viewset automatically provides `list`, `create`, `retrieve`,
|
||||||
|
`update` and `destroy` actions related to cycle.
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
serializer_class = CycleSerializer
|
serializer_class = CycleSerializer
|
||||||
model = Cycle
|
model = Cycle
|
||||||
|
webhook_event = "cycle"
|
||||||
permission_classes = [
|
permission_classes = [
|
||||||
ProjectEntityPermission,
|
ProjectEntityPermission,
|
||||||
]
|
]
|
||||||
|
|
||||||
def perform_create(self, serializer):
|
|
||||||
serializer.save(
|
|
||||||
project_id=self.kwargs.get("project_id"), owned_by=self.request.user
|
|
||||||
)
|
|
||||||
|
|
||||||
def get_queryset(self):
|
def get_queryset(self):
|
||||||
subquery = CycleFavorite.objects.filter(
|
return (
|
||||||
user=self.request.user,
|
Cycle.objects.filter(workspace__slug=self.kwargs.get("slug"))
|
||||||
cycle_id=OuterRef("pk"),
|
|
||||||
project_id=self.kwargs.get("project_id"),
|
|
||||||
workspace__slug=self.kwargs.get("slug"),
|
|
||||||
)
|
|
||||||
return self.filter_queryset(
|
|
||||||
super()
|
|
||||||
.get_queryset()
|
|
||||||
.filter(workspace__slug=self.kwargs.get("slug"))
|
|
||||||
.filter(project_id=self.kwargs.get("project_id"))
|
.filter(project_id=self.kwargs.get("project_id"))
|
||||||
.filter(project__project_projectmember__member=self.request.user)
|
.filter(project__project_projectmember__member=self.request.user)
|
||||||
.select_related("project")
|
.select_related("project")
|
||||||
.select_related("workspace")
|
.select_related("workspace")
|
||||||
.select_related("owned_by")
|
.select_related("owned_by")
|
||||||
.annotate(is_favorite=Exists(subquery))
|
|
||||||
.annotate(
|
.annotate(
|
||||||
total_issues=Count(
|
total_issues=Count(
|
||||||
"issue_cycle",
|
"issue_cycle",
|
||||||
@ -157,142 +123,62 @@ class CycleViewSet(BaseViewSet):
|
|||||||
),
|
),
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
.prefetch_related(
|
.order_by(self.kwargs.get("order_by", "-created_at"))
|
||||||
Prefetch(
|
|
||||||
"issue_cycle__issue__assignees",
|
|
||||||
queryset=User.objects.only("avatar", "first_name", "id").distinct(),
|
|
||||||
)
|
|
||||||
)
|
|
||||||
.prefetch_related(
|
|
||||||
Prefetch(
|
|
||||||
"issue_cycle__issue__labels",
|
|
||||||
queryset=Label.objects.only("name", "color", "id").distinct(),
|
|
||||||
)
|
|
||||||
)
|
|
||||||
.order_by("-is_favorite", "name")
|
|
||||||
.distinct()
|
.distinct()
|
||||||
)
|
)
|
||||||
|
|
||||||
def list(self, request, slug, project_id):
|
def get(self, request, slug, project_id, pk=None):
|
||||||
|
if pk:
|
||||||
|
queryset = self.get_queryset().get(pk=pk)
|
||||||
|
data = CycleSerializer(
|
||||||
|
queryset,
|
||||||
|
fields=self.fields,
|
||||||
|
expand=self.expand,
|
||||||
|
).data
|
||||||
|
return Response(
|
||||||
|
data,
|
||||||
|
status=status.HTTP_200_OK,
|
||||||
|
)
|
||||||
queryset = self.get_queryset()
|
queryset = self.get_queryset()
|
||||||
cycle_view = request.GET.get("cycle_view", "all")
|
cycle_view = request.GET.get("cycle_view", "all")
|
||||||
|
|
||||||
queryset = queryset.order_by("-is_favorite","-created_at")
|
|
||||||
|
|
||||||
# Current Cycle
|
# Current Cycle
|
||||||
if cycle_view == "current":
|
if cycle_view == "current":
|
||||||
queryset = queryset.filter(
|
queryset = queryset.filter(
|
||||||
start_date__lte=timezone.now(),
|
start_date__lte=timezone.now(),
|
||||||
end_date__gte=timezone.now(),
|
end_date__gte=timezone.now(),
|
||||||
)
|
)
|
||||||
|
data = CycleSerializer(
|
||||||
data = CycleSerializer(queryset, many=True).data
|
queryset, many=True, fields=self.fields, expand=self.expand
|
||||||
|
).data
|
||||||
if len(data):
|
|
||||||
assignee_distribution = (
|
|
||||||
Issue.objects.filter(
|
|
||||||
issue_cycle__cycle_id=data[0]["id"],
|
|
||||||
workspace__slug=slug,
|
|
||||||
project_id=project_id,
|
|
||||||
)
|
|
||||||
.annotate(display_name=F("assignees__display_name"))
|
|
||||||
.annotate(assignee_id=F("assignees__id"))
|
|
||||||
.annotate(avatar=F("assignees__avatar"))
|
|
||||||
.values("display_name", "assignee_id", "avatar")
|
|
||||||
.annotate(
|
|
||||||
total_issues=Count(
|
|
||||||
"assignee_id",
|
|
||||||
filter=Q(archived_at__isnull=True, is_draft=False),
|
|
||||||
),
|
|
||||||
)
|
|
||||||
.annotate(
|
|
||||||
completed_issues=Count(
|
|
||||||
"assignee_id",
|
|
||||||
filter=Q(
|
|
||||||
completed_at__isnull=False,
|
|
||||||
archived_at__isnull=True,
|
|
||||||
is_draft=False,
|
|
||||||
),
|
|
||||||
)
|
|
||||||
)
|
|
||||||
.annotate(
|
|
||||||
pending_issues=Count(
|
|
||||||
"assignee_id",
|
|
||||||
filter=Q(
|
|
||||||
completed_at__isnull=True,
|
|
||||||
archived_at__isnull=True,
|
|
||||||
is_draft=False,
|
|
||||||
),
|
|
||||||
)
|
|
||||||
)
|
|
||||||
.order_by("display_name")
|
|
||||||
)
|
|
||||||
|
|
||||||
label_distribution = (
|
|
||||||
Issue.objects.filter(
|
|
||||||
issue_cycle__cycle_id=data[0]["id"],
|
|
||||||
workspace__slug=slug,
|
|
||||||
project_id=project_id,
|
|
||||||
)
|
|
||||||
.annotate(label_name=F("labels__name"))
|
|
||||||
.annotate(color=F("labels__color"))
|
|
||||||
.annotate(label_id=F("labels__id"))
|
|
||||||
.values("label_name", "color", "label_id")
|
|
||||||
.annotate(
|
|
||||||
total_issues=Count(
|
|
||||||
"label_id",
|
|
||||||
filter=Q(archived_at__isnull=True, is_draft=False),
|
|
||||||
)
|
|
||||||
)
|
|
||||||
.annotate(
|
|
||||||
completed_issues=Count(
|
|
||||||
"label_id",
|
|
||||||
filter=Q(
|
|
||||||
completed_at__isnull=False,
|
|
||||||
archived_at__isnull=True,
|
|
||||||
is_draft=False,
|
|
||||||
),
|
|
||||||
)
|
|
||||||
)
|
|
||||||
.annotate(
|
|
||||||
pending_issues=Count(
|
|
||||||
"label_id",
|
|
||||||
filter=Q(
|
|
||||||
completed_at__isnull=True,
|
|
||||||
archived_at__isnull=True,
|
|
||||||
is_draft=False,
|
|
||||||
),
|
|
||||||
)
|
|
||||||
)
|
|
||||||
.order_by("label_name")
|
|
||||||
)
|
|
||||||
data[0]["distribution"] = {
|
|
||||||
"assignees": assignee_distribution,
|
|
||||||
"labels": label_distribution,
|
|
||||||
"completion_chart": {},
|
|
||||||
}
|
|
||||||
if data[0]["start_date"] and data[0]["end_date"]:
|
|
||||||
data[0]["distribution"]["completion_chart"] = burndown_plot(
|
|
||||||
queryset=queryset.first(),
|
|
||||||
slug=slug,
|
|
||||||
project_id=project_id,
|
|
||||||
cycle_id=data[0]["id"],
|
|
||||||
)
|
|
||||||
|
|
||||||
return Response(data, status=status.HTTP_200_OK)
|
return Response(data, status=status.HTTP_200_OK)
|
||||||
|
|
||||||
# Upcoming Cycles
|
# Upcoming Cycles
|
||||||
if cycle_view == "upcoming":
|
if cycle_view == "upcoming":
|
||||||
queryset = queryset.filter(start_date__gt=timezone.now())
|
queryset = queryset.filter(start_date__gt=timezone.now())
|
||||||
return Response(
|
return self.paginate(
|
||||||
CycleSerializer(queryset, many=True).data, status=status.HTTP_200_OK
|
request=request,
|
||||||
|
queryset=(queryset),
|
||||||
|
on_results=lambda cycles: CycleSerializer(
|
||||||
|
cycles,
|
||||||
|
many=True,
|
||||||
|
fields=self.fields,
|
||||||
|
expand=self.expand,
|
||||||
|
).data,
|
||||||
)
|
)
|
||||||
|
|
||||||
# Completed Cycles
|
# Completed Cycles
|
||||||
if cycle_view == "completed":
|
if cycle_view == "completed":
|
||||||
queryset = queryset.filter(end_date__lt=timezone.now())
|
queryset = queryset.filter(end_date__lt=timezone.now())
|
||||||
return Response(
|
return self.paginate(
|
||||||
CycleSerializer(queryset, many=True).data, status=status.HTTP_200_OK
|
request=request,
|
||||||
|
queryset=(queryset),
|
||||||
|
on_results=lambda cycles: CycleSerializer(
|
||||||
|
cycles,
|
||||||
|
many=True,
|
||||||
|
fields=self.fields,
|
||||||
|
expand=self.expand,
|
||||||
|
).data,
|
||||||
)
|
)
|
||||||
|
|
||||||
# Draft Cycles
|
# Draft Cycles
|
||||||
@ -301,9 +187,15 @@ class CycleViewSet(BaseViewSet):
|
|||||||
end_date=None,
|
end_date=None,
|
||||||
start_date=None,
|
start_date=None,
|
||||||
)
|
)
|
||||||
|
return self.paginate(
|
||||||
return Response(
|
request=request,
|
||||||
CycleSerializer(queryset, many=True).data, status=status.HTTP_200_OK
|
queryset=(queryset),
|
||||||
|
on_results=lambda cycles: CycleSerializer(
|
||||||
|
cycles,
|
||||||
|
many=True,
|
||||||
|
fields=self.fields,
|
||||||
|
expand=self.expand,
|
||||||
|
).data,
|
||||||
)
|
)
|
||||||
|
|
||||||
# Incomplete Cycles
|
# Incomplete Cycles
|
||||||
@ -311,16 +203,28 @@ class CycleViewSet(BaseViewSet):
|
|||||||
queryset = queryset.filter(
|
queryset = queryset.filter(
|
||||||
Q(end_date__gte=timezone.now().date()) | Q(end_date__isnull=True),
|
Q(end_date__gte=timezone.now().date()) | Q(end_date__isnull=True),
|
||||||
)
|
)
|
||||||
return Response(
|
return self.paginate(
|
||||||
CycleSerializer(queryset, many=True).data, status=status.HTTP_200_OK
|
request=request,
|
||||||
|
queryset=(queryset),
|
||||||
|
on_results=lambda cycles: CycleSerializer(
|
||||||
|
cycles,
|
||||||
|
many=True,
|
||||||
|
fields=self.fields,
|
||||||
|
expand=self.expand,
|
||||||
|
).data,
|
||||||
)
|
)
|
||||||
|
return self.paginate(
|
||||||
# If no matching view is found return all cycles
|
request=request,
|
||||||
return Response(
|
queryset=(queryset),
|
||||||
CycleSerializer(queryset, many=True).data, status=status.HTTP_200_OK
|
on_results=lambda cycles: CycleSerializer(
|
||||||
|
cycles,
|
||||||
|
many=True,
|
||||||
|
fields=self.fields,
|
||||||
|
expand=self.expand,
|
||||||
|
).data,
|
||||||
)
|
)
|
||||||
|
|
||||||
def create(self, request, slug, project_id):
|
def post(self, request, slug, project_id):
|
||||||
if (
|
if (
|
||||||
request.data.get("start_date", None) is None
|
request.data.get("start_date", None) is None
|
||||||
and request.data.get("end_date", None) is None
|
and request.data.get("end_date", None) is None
|
||||||
@ -344,7 +248,7 @@ class CycleViewSet(BaseViewSet):
|
|||||||
status=status.HTTP_400_BAD_REQUEST,
|
status=status.HTTP_400_BAD_REQUEST,
|
||||||
)
|
)
|
||||||
|
|
||||||
def partial_update(self, request, slug, project_id, pk):
|
def patch(self, request, slug, project_id, pk):
|
||||||
cycle = Cycle.objects.get(workspace__slug=slug, project_id=project_id, pk=pk)
|
cycle = Cycle.objects.get(workspace__slug=slug, project_id=project_id, pk=pk)
|
||||||
|
|
||||||
request_data = request.data
|
request_data = request.data
|
||||||
@ -363,115 +267,13 @@ class CycleViewSet(BaseViewSet):
|
|||||||
status=status.HTTP_400_BAD_REQUEST,
|
status=status.HTTP_400_BAD_REQUEST,
|
||||||
)
|
)
|
||||||
|
|
||||||
serializer = CycleWriteSerializer(cycle, data=request.data, partial=True)
|
serializer = CycleSerializer(cycle, data=request.data, partial=True)
|
||||||
if serializer.is_valid():
|
if serializer.is_valid():
|
||||||
serializer.save()
|
serializer.save()
|
||||||
return Response(serializer.data, status=status.HTTP_200_OK)
|
return Response(serializer.data, status=status.HTTP_200_OK)
|
||||||
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
|
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
|
||||||
|
|
||||||
def retrieve(self, request, slug, project_id, pk):
|
def delete(self, request, slug, project_id, pk):
|
||||||
queryset = self.get_queryset().get(pk=pk)
|
|
||||||
|
|
||||||
# Assignee Distribution
|
|
||||||
assignee_distribution = (
|
|
||||||
Issue.objects.filter(
|
|
||||||
issue_cycle__cycle_id=pk,
|
|
||||||
workspace__slug=slug,
|
|
||||||
project_id=project_id,
|
|
||||||
)
|
|
||||||
.annotate(first_name=F("assignees__first_name"))
|
|
||||||
.annotate(last_name=F("assignees__last_name"))
|
|
||||||
.annotate(assignee_id=F("assignees__id"))
|
|
||||||
.annotate(avatar=F("assignees__avatar"))
|
|
||||||
.annotate(display_name=F("assignees__display_name"))
|
|
||||||
.values("first_name", "last_name", "assignee_id", "avatar", "display_name")
|
|
||||||
.annotate(
|
|
||||||
total_issues=Count(
|
|
||||||
"assignee_id",
|
|
||||||
filter=Q(archived_at__isnull=True, is_draft=False),
|
|
||||||
),
|
|
||||||
)
|
|
||||||
.annotate(
|
|
||||||
completed_issues=Count(
|
|
||||||
"assignee_id",
|
|
||||||
filter=Q(
|
|
||||||
completed_at__isnull=False,
|
|
||||||
archived_at__isnull=True,
|
|
||||||
is_draft=False,
|
|
||||||
),
|
|
||||||
)
|
|
||||||
)
|
|
||||||
.annotate(
|
|
||||||
pending_issues=Count(
|
|
||||||
"assignee_id",
|
|
||||||
filter=Q(
|
|
||||||
completed_at__isnull=True,
|
|
||||||
archived_at__isnull=True,
|
|
||||||
is_draft=False,
|
|
||||||
),
|
|
||||||
)
|
|
||||||
)
|
|
||||||
.order_by("first_name", "last_name")
|
|
||||||
)
|
|
||||||
|
|
||||||
# Label Distribution
|
|
||||||
label_distribution = (
|
|
||||||
Issue.objects.filter(
|
|
||||||
issue_cycle__cycle_id=pk,
|
|
||||||
workspace__slug=slug,
|
|
||||||
project_id=project_id,
|
|
||||||
)
|
|
||||||
.annotate(label_name=F("labels__name"))
|
|
||||||
.annotate(color=F("labels__color"))
|
|
||||||
.annotate(label_id=F("labels__id"))
|
|
||||||
.values("label_name", "color", "label_id")
|
|
||||||
.annotate(
|
|
||||||
total_issues=Count(
|
|
||||||
"label_id",
|
|
||||||
filter=Q(archived_at__isnull=True, is_draft=False),
|
|
||||||
),
|
|
||||||
)
|
|
||||||
.annotate(
|
|
||||||
completed_issues=Count(
|
|
||||||
"label_id",
|
|
||||||
filter=Q(
|
|
||||||
completed_at__isnull=False,
|
|
||||||
archived_at__isnull=True,
|
|
||||||
is_draft=False,
|
|
||||||
),
|
|
||||||
)
|
|
||||||
)
|
|
||||||
.annotate(
|
|
||||||
pending_issues=Count(
|
|
||||||
"label_id",
|
|
||||||
filter=Q(
|
|
||||||
completed_at__isnull=True,
|
|
||||||
archived_at__isnull=True,
|
|
||||||
is_draft=False,
|
|
||||||
),
|
|
||||||
)
|
|
||||||
)
|
|
||||||
.order_by("label_name")
|
|
||||||
)
|
|
||||||
|
|
||||||
data = CycleSerializer(queryset).data
|
|
||||||
data["distribution"] = {
|
|
||||||
"assignees": assignee_distribution,
|
|
||||||
"labels": label_distribution,
|
|
||||||
"completion_chart": {},
|
|
||||||
}
|
|
||||||
|
|
||||||
if queryset.start_date and queryset.end_date:
|
|
||||||
data["distribution"]["completion_chart"] = burndown_plot(
|
|
||||||
queryset=queryset, slug=slug, project_id=project_id, cycle_id=pk
|
|
||||||
)
|
|
||||||
|
|
||||||
return Response(
|
|
||||||
data,
|
|
||||||
status=status.HTTP_200_OK,
|
|
||||||
)
|
|
||||||
|
|
||||||
def destroy(self, request, slug, project_id, pk):
|
|
||||||
cycle_issues = list(
|
cycle_issues = list(
|
||||||
CycleIssue.objects.filter(cycle_id=self.kwargs.get("pk")).values_list(
|
CycleIssue.objects.filter(cycle_id=self.kwargs.get("pk")).values_list(
|
||||||
"issue", flat=True
|
"issue", flat=True
|
||||||
@ -489,7 +291,7 @@ class CycleViewSet(BaseViewSet):
|
|||||||
}
|
}
|
||||||
),
|
),
|
||||||
actor_id=str(request.user.id),
|
actor_id=str(request.user.id),
|
||||||
issue_id=str(pk),
|
issue_id=None,
|
||||||
project_id=str(project_id),
|
project_id=str(project_id),
|
||||||
current_instance=None,
|
current_instance=None,
|
||||||
epoch=int(timezone.now().timestamp()),
|
epoch=int(timezone.now().timestamp()),
|
||||||
@ -499,24 +301,24 @@ class CycleViewSet(BaseViewSet):
|
|||||||
return Response(status=status.HTTP_204_NO_CONTENT)
|
return Response(status=status.HTTP_204_NO_CONTENT)
|
||||||
|
|
||||||
|
|
||||||
class CycleIssueViewSet(BaseViewSet):
|
class CycleIssueAPIEndpoint(WebhookMixin, BaseAPIView):
|
||||||
|
"""
|
||||||
|
This viewset automatically provides `list`, `create`,
|
||||||
|
and `destroy` actions related to cycle issues.
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
serializer_class = CycleIssueSerializer
|
serializer_class = CycleIssueSerializer
|
||||||
model = CycleIssue
|
model = CycleIssue
|
||||||
|
webhook_event = "cycle_issue"
|
||||||
|
bulk = True
|
||||||
permission_classes = [
|
permission_classes = [
|
||||||
ProjectEntityPermission,
|
ProjectEntityPermission,
|
||||||
]
|
]
|
||||||
|
|
||||||
filterset_fields = [
|
|
||||||
"issue__labels__id",
|
|
||||||
"issue__assignees__id",
|
|
||||||
]
|
|
||||||
|
|
||||||
def get_queryset(self):
|
def get_queryset(self):
|
||||||
return self.filter_queryset(
|
return (
|
||||||
super()
|
CycleIssue.objects.annotate(
|
||||||
.get_queryset()
|
|
||||||
.annotate(
|
|
||||||
sub_issues_count=Issue.issue_objects.filter(parent=OuterRef("issue_id"))
|
sub_issues_count=Issue.issue_objects.filter(parent=OuterRef("issue_id"))
|
||||||
.order_by()
|
.order_by()
|
||||||
.annotate(count=Func(F("id"), function="Count"))
|
.annotate(count=Func(F("id"), function="Count"))
|
||||||
@ -531,15 +333,12 @@ class CycleIssueViewSet(BaseViewSet):
|
|||||||
.select_related("cycle")
|
.select_related("cycle")
|
||||||
.select_related("issue", "issue__state", "issue__project")
|
.select_related("issue", "issue__state", "issue__project")
|
||||||
.prefetch_related("issue__assignees", "issue__labels")
|
.prefetch_related("issue__assignees", "issue__labels")
|
||||||
|
.order_by(self.kwargs.get("order_by", "-created_at"))
|
||||||
.distinct()
|
.distinct()
|
||||||
)
|
)
|
||||||
|
|
||||||
@method_decorator(gzip_page)
|
def get(self, request, slug, project_id, cycle_id):
|
||||||
def list(self, request, slug, project_id, cycle_id):
|
|
||||||
order_by = request.GET.get("order_by", "created_at")
|
order_by = request.GET.get("order_by", "created_at")
|
||||||
group_by = request.GET.get("group_by", False)
|
|
||||||
sub_group_by = request.GET.get("sub_group_by", False)
|
|
||||||
filters = issue_filters(request.query_params, "GET")
|
|
||||||
issues = (
|
issues = (
|
||||||
Issue.issue_objects.filter(issue_cycle__cycle_id=cycle_id)
|
Issue.issue_objects.filter(issue_cycle__cycle_id=cycle_id)
|
||||||
.annotate(
|
.annotate(
|
||||||
@ -558,7 +357,6 @@ class CycleIssueViewSet(BaseViewSet):
|
|||||||
.prefetch_related("assignees")
|
.prefetch_related("assignees")
|
||||||
.prefetch_related("labels")
|
.prefetch_related("labels")
|
||||||
.order_by(order_by)
|
.order_by(order_by)
|
||||||
.filter(**filters)
|
|
||||||
.annotate(
|
.annotate(
|
||||||
link_count=IssueLink.objects.filter(issue=OuterRef("id"))
|
link_count=IssueLink.objects.filter(issue=OuterRef("id"))
|
||||||
.order_by()
|
.order_by()
|
||||||
@ -573,29 +371,21 @@ class CycleIssueViewSet(BaseViewSet):
|
|||||||
)
|
)
|
||||||
)
|
)
|
||||||
|
|
||||||
issues_data = IssueStateSerializer(issues, many=True).data
|
return self.paginate(
|
||||||
|
request=request,
|
||||||
if sub_group_by and sub_group_by == group_by:
|
queryset=(issues),
|
||||||
return Response(
|
on_results=lambda issues: CycleSerializer(
|
||||||
{"error": "Group by and sub group by cannot be same"},
|
issues,
|
||||||
status=status.HTTP_400_BAD_REQUEST,
|
many=True,
|
||||||
)
|
fields=self.fields,
|
||||||
|
expand=self.expand,
|
||||||
if group_by:
|
).data,
|
||||||
grouped_results = group_results(issues_data, group_by, sub_group_by)
|
|
||||||
return Response(
|
|
||||||
grouped_results,
|
|
||||||
status=status.HTTP_200_OK,
|
|
||||||
)
|
|
||||||
|
|
||||||
return Response(
|
|
||||||
issues_data, status=status.HTTP_200_OK
|
|
||||||
)
|
)
|
||||||
|
|
||||||
def create(self, request, slug, project_id, cycle_id):
|
def post(self, request, slug, project_id, cycle_id):
|
||||||
issues = request.data.get("issues", [])
|
issues = request.data.get("issues", [])
|
||||||
|
|
||||||
if not len(issues):
|
if not issues:
|
||||||
return Response(
|
return Response(
|
||||||
{"error": "Issues are required"}, status=status.HTTP_400_BAD_REQUEST
|
{"error": "Issues are required"}, status=status.HTTP_400_BAD_REQUEST
|
||||||
)
|
)
|
||||||
@ -612,6 +402,10 @@ class CycleIssueViewSet(BaseViewSet):
|
|||||||
status=status.HTTP_400_BAD_REQUEST,
|
status=status.HTTP_400_BAD_REQUEST,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
issues = Issue.objects.filter(
|
||||||
|
pk__in=issues, workspace__slug=slug, project_id=project_id
|
||||||
|
).values_list("id", flat=True)
|
||||||
|
|
||||||
# Get all CycleIssues already created
|
# Get all CycleIssues already created
|
||||||
cycle_issues = list(CycleIssue.objects.filter(issue_id__in=issues))
|
cycle_issues = list(CycleIssue.objects.filter(issue_id__in=issues))
|
||||||
update_cycle_issue_activity = []
|
update_cycle_issue_activity = []
|
||||||
@ -662,7 +456,7 @@ class CycleIssueViewSet(BaseViewSet):
|
|||||||
# Capture Issue Activity
|
# Capture Issue Activity
|
||||||
issue_activity.delay(
|
issue_activity.delay(
|
||||||
type="cycle.activity.created",
|
type="cycle.activity.created",
|
||||||
requested_data=json.dumps({"cycles_list": issues}),
|
requested_data=json.dumps({"cycles_list": str(issues)}),
|
||||||
actor_id=str(self.request.user.id),
|
actor_id=str(self.request.user.id),
|
||||||
issue_id=None,
|
issue_id=None,
|
||||||
project_id=str(self.kwargs.get("project_id", None)),
|
project_id=str(self.kwargs.get("project_id", None)),
|
||||||
@ -683,9 +477,9 @@ class CycleIssueViewSet(BaseViewSet):
|
|||||||
status=status.HTTP_200_OK,
|
status=status.HTTP_200_OK,
|
||||||
)
|
)
|
||||||
|
|
||||||
def destroy(self, request, slug, project_id, cycle_id, pk):
|
def delete(self, request, slug, project_id, cycle_id, issue_id):
|
||||||
cycle_issue = CycleIssue.objects.get(
|
cycle_issue = CycleIssue.objects.get(
|
||||||
pk=pk, workspace__slug=slug, project_id=project_id, cycle_id=cycle_id
|
issue_id=issue_id, workspace__slug=slug, project_id=project_id, cycle_id=cycle_id
|
||||||
)
|
)
|
||||||
issue_id = cycle_issue.issue_id
|
issue_id = cycle_issue.issue_id
|
||||||
cycle_issue.delete()
|
cycle_issue.delete()
|
||||||
@ -698,7 +492,7 @@ class CycleIssueViewSet(BaseViewSet):
|
|||||||
}
|
}
|
||||||
),
|
),
|
||||||
actor_id=str(self.request.user.id),
|
actor_id=str(self.request.user.id),
|
||||||
issue_id=str(self.kwargs.get("pk", None)),
|
issue_id=str(issue_id),
|
||||||
project_id=str(self.kwargs.get("project_id", None)),
|
project_id=str(self.kwargs.get("project_id", None)),
|
||||||
current_instance=None,
|
current_instance=None,
|
||||||
epoch=int(timezone.now().timestamp()),
|
epoch=int(timezone.now().timestamp()),
|
||||||
@ -706,74 +500,12 @@ class CycleIssueViewSet(BaseViewSet):
|
|||||||
return Response(status=status.HTTP_204_NO_CONTENT)
|
return Response(status=status.HTTP_204_NO_CONTENT)
|
||||||
|
|
||||||
|
|
||||||
class CycleDateCheckEndpoint(BaseAPIView):
|
class TransferCycleIssueAPIEndpoint(BaseAPIView):
|
||||||
permission_classes = [
|
"""
|
||||||
ProjectEntityPermission,
|
This viewset provides `create` actions for transfering the issues into a particular cycle.
|
||||||
]
|
|
||||||
|
|
||||||
def post(self, request, slug, project_id):
|
"""
|
||||||
start_date = request.data.get("start_date", False)
|
|
||||||
end_date = request.data.get("end_date", False)
|
|
||||||
cycle_id = request.data.get("cycle_id")
|
|
||||||
if not start_date or not end_date:
|
|
||||||
return Response(
|
|
||||||
{"error": "Start date and end date both are required"},
|
|
||||||
status=status.HTTP_400_BAD_REQUEST,
|
|
||||||
)
|
|
||||||
|
|
||||||
cycles = Cycle.objects.filter(
|
|
||||||
Q(workspace__slug=slug)
|
|
||||||
& Q(project_id=project_id)
|
|
||||||
& (
|
|
||||||
Q(start_date__lte=start_date, end_date__gte=start_date)
|
|
||||||
| Q(start_date__lte=end_date, end_date__gte=end_date)
|
|
||||||
| Q(start_date__gte=start_date, end_date__lte=end_date)
|
|
||||||
)
|
|
||||||
).exclude(pk=cycle_id)
|
|
||||||
|
|
||||||
if cycles.exists():
|
|
||||||
return Response(
|
|
||||||
{
|
|
||||||
"error": "You have a cycle already on the given dates, if you want to create a draft cycle you can do that by removing dates",
|
|
||||||
"status": False,
|
|
||||||
}
|
|
||||||
)
|
|
||||||
else:
|
|
||||||
return Response({"status": True}, status=status.HTTP_200_OK)
|
|
||||||
|
|
||||||
|
|
||||||
class CycleFavoriteViewSet(BaseViewSet):
|
|
||||||
serializer_class = CycleFavoriteSerializer
|
|
||||||
model = CycleFavorite
|
|
||||||
|
|
||||||
def get_queryset(self):
|
|
||||||
return self.filter_queryset(
|
|
||||||
super()
|
|
||||||
.get_queryset()
|
|
||||||
.filter(workspace__slug=self.kwargs.get("slug"))
|
|
||||||
.filter(user=self.request.user)
|
|
||||||
.select_related("cycle", "cycle__owned_by")
|
|
||||||
)
|
|
||||||
|
|
||||||
def create(self, request, slug, project_id):
|
|
||||||
serializer = CycleFavoriteSerializer(data=request.data)
|
|
||||||
if serializer.is_valid():
|
|
||||||
serializer.save(user=request.user, project_id=project_id)
|
|
||||||
return Response(serializer.data, status=status.HTTP_201_CREATED)
|
|
||||||
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
|
|
||||||
|
|
||||||
def destroy(self, request, slug, project_id, cycle_id):
|
|
||||||
cycle_favorite = CycleFavorite.objects.get(
|
|
||||||
project=project_id,
|
|
||||||
user=request.user,
|
|
||||||
workspace__slug=slug,
|
|
||||||
cycle_id=cycle_id,
|
|
||||||
)
|
|
||||||
cycle_favorite.delete()
|
|
||||||
return Response(status=status.HTTP_204_NO_CONTENT)
|
|
||||||
|
|
||||||
|
|
||||||
class TransferCycleIssueEndpoint(BaseAPIView):
|
|
||||||
permission_classes = [
|
permission_classes = [
|
||||||
ProjectEntityPermission,
|
ProjectEntityPermission,
|
||||||
]
|
]
|
||||||
|
@ -1,83 +1,30 @@
|
|||||||
# Python imports
|
# Python imports
|
||||||
import json
|
import json
|
||||||
|
|
||||||
# Django import
|
# Django improts
|
||||||
from django.utils import timezone
|
from django.utils import timezone
|
||||||
from django.db.models import Q, Count, OuterRef, Func, F, Prefetch
|
from django.db.models import Q
|
||||||
from django.core.serializers.json import DjangoJSONEncoder
|
from django.core.serializers.json import DjangoJSONEncoder
|
||||||
|
|
||||||
# Third party imports
|
# Third party imports
|
||||||
from rest_framework import status
|
from rest_framework import status
|
||||||
from rest_framework.response import Response
|
from rest_framework.response import Response
|
||||||
from sentry_sdk import capture_exception
|
|
||||||
|
|
||||||
# Module imports
|
# Module imports
|
||||||
from .base import BaseViewSet
|
from .base import BaseAPIView
|
||||||
from plane.api.permissions import ProjectBasePermission, ProjectLitePermission
|
from plane.app.permissions import ProjectLitePermission
|
||||||
from plane.db.models import (
|
from plane.api.serializers import InboxIssueSerializer, IssueSerializer
|
||||||
Inbox,
|
from plane.db.models import InboxIssue, Issue, State, ProjectMember, Project, Inbox
|
||||||
InboxIssue,
|
|
||||||
Issue,
|
|
||||||
State,
|
|
||||||
IssueLink,
|
|
||||||
IssueAttachment,
|
|
||||||
ProjectMember,
|
|
||||||
ProjectDeployBoard,
|
|
||||||
)
|
|
||||||
from plane.api.serializers import (
|
|
||||||
IssueSerializer,
|
|
||||||
InboxSerializer,
|
|
||||||
InboxIssueSerializer,
|
|
||||||
IssueCreateSerializer,
|
|
||||||
IssueStateInboxSerializer,
|
|
||||||
)
|
|
||||||
from plane.utils.issue_filters import issue_filters
|
|
||||||
from plane.bgtasks.issue_activites_task import issue_activity
|
from plane.bgtasks.issue_activites_task import issue_activity
|
||||||
|
|
||||||
|
|
||||||
class InboxViewSet(BaseViewSet):
|
class InboxIssueAPIEndpoint(BaseAPIView):
|
||||||
permission_classes = [
|
"""
|
||||||
ProjectBasePermission,
|
This viewset automatically provides `list`, `create`, `retrieve`,
|
||||||
]
|
`update` and `destroy` actions related to inbox issues.
|
||||||
|
|
||||||
serializer_class = InboxSerializer
|
"""
|
||||||
model = Inbox
|
|
||||||
|
|
||||||
def get_queryset(self):
|
|
||||||
return (
|
|
||||||
super()
|
|
||||||
.get_queryset()
|
|
||||||
.filter(
|
|
||||||
workspace__slug=self.kwargs.get("slug"),
|
|
||||||
project_id=self.kwargs.get("project_id"),
|
|
||||||
)
|
|
||||||
.annotate(
|
|
||||||
pending_issue_count=Count(
|
|
||||||
"issue_inbox",
|
|
||||||
filter=Q(issue_inbox__status=-2),
|
|
||||||
)
|
|
||||||
)
|
|
||||||
.select_related("workspace", "project")
|
|
||||||
)
|
|
||||||
|
|
||||||
def perform_create(self, serializer):
|
|
||||||
serializer.save(project_id=self.kwargs.get("project_id"))
|
|
||||||
|
|
||||||
def destroy(self, request, slug, project_id, pk):
|
|
||||||
inbox = Inbox.objects.get(
|
|
||||||
workspace__slug=slug, project_id=project_id, pk=pk
|
|
||||||
)
|
|
||||||
# Handle default inbox delete
|
|
||||||
if inbox.is_default:
|
|
||||||
return Response(
|
|
||||||
{"error": "You cannot delete the default inbox"},
|
|
||||||
status=status.HTTP_400_BAD_REQUEST,
|
|
||||||
)
|
|
||||||
inbox.delete()
|
|
||||||
return Response(status=status.HTTP_204_NO_CONTENT)
|
|
||||||
|
|
||||||
|
|
||||||
class InboxIssueViewSet(BaseViewSet):
|
|
||||||
permission_classes = [
|
permission_classes = [
|
||||||
ProjectLitePermission,
|
ProjectLitePermission,
|
||||||
]
|
]
|
||||||
@ -90,73 +37,77 @@ class InboxIssueViewSet(BaseViewSet):
|
|||||||
]
|
]
|
||||||
|
|
||||||
def get_queryset(self):
|
def get_queryset(self):
|
||||||
return self.filter_queryset(
|
inbox = Inbox.objects.filter(
|
||||||
super()
|
workspace__slug=self.kwargs.get("slug"),
|
||||||
.get_queryset()
|
project_id=self.kwargs.get("project_id"),
|
||||||
.filter(
|
).first()
|
||||||
|
|
||||||
|
project = Project.objects.get(
|
||||||
|
workspace__slug=self.kwargs.get("slug"), pk=self.kwargs.get("project_id")
|
||||||
|
)
|
||||||
|
|
||||||
|
if inbox is None and not project.inbox_view:
|
||||||
|
return InboxIssue.objects.none()
|
||||||
|
|
||||||
|
return (
|
||||||
|
InboxIssue.objects.filter(
|
||||||
Q(snoozed_till__gte=timezone.now()) | Q(snoozed_till__isnull=True),
|
Q(snoozed_till__gte=timezone.now()) | Q(snoozed_till__isnull=True),
|
||||||
workspace__slug=self.kwargs.get("slug"),
|
workspace__slug=self.kwargs.get("slug"),
|
||||||
project_id=self.kwargs.get("project_id"),
|
project_id=self.kwargs.get("project_id"),
|
||||||
inbox_id=self.kwargs.get("inbox_id"),
|
inbox_id=inbox.id,
|
||||||
)
|
)
|
||||||
.select_related("issue", "workspace", "project")
|
.select_related("issue", "workspace", "project")
|
||||||
|
.order_by(self.kwargs.get("order_by", "-created_at"))
|
||||||
)
|
)
|
||||||
|
|
||||||
def list(self, request, slug, project_id, inbox_id):
|
def get(self, request, slug, project_id, issue_id=None):
|
||||||
filters = issue_filters(request.query_params, "GET")
|
if issue_id:
|
||||||
issues = (
|
inbox_issue_queryset = self.get_queryset().get(issue_id=issue_id)
|
||||||
Issue.objects.filter(
|
inbox_issue_data = InboxIssueSerializer(
|
||||||
issue_inbox__inbox_id=inbox_id,
|
inbox_issue_queryset,
|
||||||
workspace__slug=slug,
|
fields=self.fields,
|
||||||
project_id=project_id,
|
expand=self.expand,
|
||||||
|
).data
|
||||||
|
return Response(
|
||||||
|
inbox_issue_data,
|
||||||
|
status=status.HTTP_200_OK,
|
||||||
)
|
)
|
||||||
.filter(**filters)
|
issue_queryset = self.get_queryset()
|
||||||
.annotate(bridge_id=F("issue_inbox__id"))
|
return self.paginate(
|
||||||
.select_related("workspace", "project", "state", "parent")
|
request=request,
|
||||||
.prefetch_related("assignees", "labels")
|
queryset=(issue_queryset),
|
||||||
.order_by("issue_inbox__snoozed_till", "issue_inbox__status")
|
on_results=lambda inbox_issues: InboxIssueSerializer(
|
||||||
.annotate(
|
inbox_issues,
|
||||||
sub_issues_count=Issue.issue_objects.filter(parent=OuterRef("id"))
|
many=True,
|
||||||
.order_by()
|
fields=self.fields,
|
||||||
.annotate(count=Func(F("id"), function="Count"))
|
expand=self.expand,
|
||||||
.values("count")
|
).data,
|
||||||
)
|
|
||||||
.annotate(
|
|
||||||
link_count=IssueLink.objects.filter(issue=OuterRef("id"))
|
|
||||||
.order_by()
|
|
||||||
.annotate(count=Func(F("id"), function="Count"))
|
|
||||||
.values("count")
|
|
||||||
)
|
|
||||||
.annotate(
|
|
||||||
attachment_count=IssueAttachment.objects.filter(
|
|
||||||
issue=OuterRef("id")
|
|
||||||
)
|
|
||||||
.order_by()
|
|
||||||
.annotate(count=Func(F("id"), function="Count"))
|
|
||||||
.values("count")
|
|
||||||
)
|
|
||||||
.prefetch_related(
|
|
||||||
Prefetch(
|
|
||||||
"issue_inbox",
|
|
||||||
queryset=InboxIssue.objects.only(
|
|
||||||
"status", "duplicate_to", "snoozed_till", "source"
|
|
||||||
),
|
|
||||||
)
|
|
||||||
)
|
|
||||||
)
|
|
||||||
issues_data = IssueStateInboxSerializer(issues, many=True).data
|
|
||||||
return Response(
|
|
||||||
issues_data,
|
|
||||||
status=status.HTTP_200_OK,
|
|
||||||
)
|
)
|
||||||
|
|
||||||
|
def post(self, request, slug, project_id):
|
||||||
def create(self, request, slug, project_id, inbox_id):
|
|
||||||
if not request.data.get("issue", {}).get("name", False):
|
if not request.data.get("issue", {}).get("name", False):
|
||||||
return Response(
|
return Response(
|
||||||
{"error": "Name is required"}, status=status.HTTP_400_BAD_REQUEST
|
{"error": "Name is required"}, status=status.HTTP_400_BAD_REQUEST
|
||||||
)
|
)
|
||||||
|
|
||||||
|
inbox = Inbox.objects.filter(
|
||||||
|
workspace__slug=slug, project_id=project_id
|
||||||
|
).first()
|
||||||
|
|
||||||
|
project = Project.objects.get(
|
||||||
|
workspace__slug=slug,
|
||||||
|
pk=project_id,
|
||||||
|
)
|
||||||
|
|
||||||
|
# Inbox view
|
||||||
|
if inbox is None and not project.inbox_view:
|
||||||
|
return Response(
|
||||||
|
{
|
||||||
|
"error": "Inbox is not enabled for this project enable it through the project's api"
|
||||||
|
},
|
||||||
|
status=status.HTTP_400_BAD_REQUEST,
|
||||||
|
)
|
||||||
|
|
||||||
# Check for valid priority
|
# Check for valid priority
|
||||||
if not request.data.get("issue", {}).get("priority", "none") in [
|
if not request.data.get("issue", {}).get("priority", "none") in [
|
||||||
"low",
|
"low",
|
||||||
@ -198,48 +149,83 @@ class InboxIssueViewSet(BaseViewSet):
|
|||||||
issue_id=str(issue.id),
|
issue_id=str(issue.id),
|
||||||
project_id=str(project_id),
|
project_id=str(project_id),
|
||||||
current_instance=None,
|
current_instance=None,
|
||||||
epoch=int(timezone.now().timestamp())
|
epoch=int(timezone.now().timestamp()),
|
||||||
)
|
)
|
||||||
|
|
||||||
# create an inbox issue
|
# create an inbox issue
|
||||||
InboxIssue.objects.create(
|
inbox_issue = InboxIssue.objects.create(
|
||||||
inbox_id=inbox_id,
|
inbox_id=inbox.id,
|
||||||
project_id=project_id,
|
project_id=project_id,
|
||||||
issue=issue,
|
issue=issue,
|
||||||
source=request.data.get("source", "in-app"),
|
source=request.data.get("source", "in-app"),
|
||||||
)
|
)
|
||||||
|
|
||||||
serializer = IssueStateInboxSerializer(issue)
|
serializer = InboxIssueSerializer(inbox_issue)
|
||||||
return Response(serializer.data, status=status.HTTP_200_OK)
|
return Response(serializer.data, status=status.HTTP_200_OK)
|
||||||
|
|
||||||
def partial_update(self, request, slug, project_id, inbox_id, pk):
|
def patch(self, request, slug, project_id, issue_id):
|
||||||
inbox_issue = InboxIssue.objects.get(
|
inbox = Inbox.objects.filter(
|
||||||
pk=pk, workspace__slug=slug, project_id=project_id, inbox_id=inbox_id
|
workspace__slug=slug, project_id=project_id
|
||||||
|
).first()
|
||||||
|
|
||||||
|
project = Project.objects.get(
|
||||||
|
workspace__slug=slug,
|
||||||
|
pk=project_id,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
# Inbox view
|
||||||
|
if inbox is None and not project.inbox_view:
|
||||||
|
return Response(
|
||||||
|
{
|
||||||
|
"error": "Inbox is not enabled for this project enable it through the project's api"
|
||||||
|
},
|
||||||
|
status=status.HTTP_400_BAD_REQUEST,
|
||||||
|
)
|
||||||
|
|
||||||
|
# Get the inbox issue
|
||||||
|
inbox_issue = InboxIssue.objects.get(
|
||||||
|
issue_id=issue_id,
|
||||||
|
workspace__slug=slug,
|
||||||
|
project_id=project_id,
|
||||||
|
inbox_id=inbox.id,
|
||||||
|
)
|
||||||
|
|
||||||
# Get the project member
|
# Get the project member
|
||||||
project_member = ProjectMember.objects.get(workspace__slug=slug, project_id=project_id, member=request.user)
|
project_member = ProjectMember.objects.get(
|
||||||
|
workspace__slug=slug,
|
||||||
|
project_id=project_id,
|
||||||
|
member=request.user,
|
||||||
|
is_active=True,
|
||||||
|
)
|
||||||
|
|
||||||
# Only project members admins and created_by users can access this endpoint
|
# Only project members admins and created_by users can access this endpoint
|
||||||
if project_member.role <= 10 and str(inbox_issue.created_by_id) != str(request.user.id):
|
if project_member.role <= 10 and str(inbox_issue.created_by_id) != str(
|
||||||
return Response({"error": "You cannot edit inbox issues"}, status=status.HTTP_400_BAD_REQUEST)
|
request.user.id
|
||||||
|
):
|
||||||
|
return Response(
|
||||||
|
{"error": "You cannot edit inbox issues"},
|
||||||
|
status=status.HTTP_400_BAD_REQUEST,
|
||||||
|
)
|
||||||
|
|
||||||
# Get issue data
|
# Get issue data
|
||||||
issue_data = request.data.pop("issue", False)
|
issue_data = request.data.pop("issue", False)
|
||||||
|
|
||||||
if bool(issue_data):
|
if bool(issue_data):
|
||||||
issue = Issue.objects.get(
|
issue = Issue.objects.get(
|
||||||
pk=inbox_issue.issue_id, workspace__slug=slug, project_id=project_id
|
pk=issue_id, workspace__slug=slug, project_id=project_id
|
||||||
)
|
)
|
||||||
# Only allow guests and viewers to edit name and description
|
# Only allow guests and viewers to edit name and description
|
||||||
if project_member.role <= 10:
|
if project_member.role <= 10:
|
||||||
# viewers and guests since only viewers and guests
|
# viewers and guests since only viewers and guests
|
||||||
issue_data = {
|
issue_data = {
|
||||||
"name": issue_data.get("name", issue.name),
|
"name": issue_data.get("name", issue.name),
|
||||||
"description_html": issue_data.get("description_html", issue.description_html),
|
"description_html": issue_data.get(
|
||||||
"description": issue_data.get("description", issue.description)
|
"description_html", issue.description_html
|
||||||
|
),
|
||||||
|
"description": issue_data.get("description", issue.description),
|
||||||
}
|
}
|
||||||
|
|
||||||
issue_serializer = IssueCreateSerializer(
|
issue_serializer = IssueSerializer(issue, data=issue_data, partial=True)
|
||||||
issue, data=issue_data, partial=True
|
|
||||||
)
|
|
||||||
|
|
||||||
if issue_serializer.is_valid():
|
if issue_serializer.is_valid():
|
||||||
current_instance = issue
|
current_instance = issue
|
||||||
@ -250,13 +236,13 @@ class InboxIssueViewSet(BaseViewSet):
|
|||||||
type="issue.activity.updated",
|
type="issue.activity.updated",
|
||||||
requested_data=requested_data,
|
requested_data=requested_data,
|
||||||
actor_id=str(request.user.id),
|
actor_id=str(request.user.id),
|
||||||
issue_id=str(issue.id),
|
issue_id=str(issue_id),
|
||||||
project_id=str(project_id),
|
project_id=str(project_id),
|
||||||
current_instance=json.dumps(
|
current_instance=json.dumps(
|
||||||
IssueSerializer(current_instance).data,
|
IssueSerializer(current_instance).data,
|
||||||
cls=DjangoJSONEncoder,
|
cls=DjangoJSONEncoder,
|
||||||
),
|
),
|
||||||
epoch=int(timezone.now().timestamp())
|
epoch=int(timezone.now().timestamp()),
|
||||||
)
|
)
|
||||||
issue_serializer.save()
|
issue_serializer.save()
|
||||||
else:
|
else:
|
||||||
@ -275,7 +261,7 @@ class InboxIssueViewSet(BaseViewSet):
|
|||||||
# Update the issue state if the issue is rejected or marked as duplicate
|
# Update the issue state if the issue is rejected or marked as duplicate
|
||||||
if serializer.data["status"] in [-1, 2]:
|
if serializer.data["status"] in [-1, 2]:
|
||||||
issue = Issue.objects.get(
|
issue = Issue.objects.get(
|
||||||
pk=inbox_issue.issue_id,
|
pk=issue_id,
|
||||||
workspace__slug=slug,
|
workspace__slug=slug,
|
||||||
project_id=project_id,
|
project_id=project_id,
|
||||||
)
|
)
|
||||||
@ -289,7 +275,7 @@ class InboxIssueViewSet(BaseViewSet):
|
|||||||
# Update the issue state if it is accepted
|
# Update the issue state if it is accepted
|
||||||
if serializer.data["status"] in [1]:
|
if serializer.data["status"] in [1]:
|
||||||
issue = Issue.objects.get(
|
issue = Issue.objects.get(
|
||||||
pk=inbox_issue.issue_id,
|
pk=issue_id,
|
||||||
workspace__slug=slug,
|
workspace__slug=slug,
|
||||||
project_id=project_id,
|
project_id=project_id,
|
||||||
)
|
)
|
||||||
@ -307,253 +293,60 @@ class InboxIssueViewSet(BaseViewSet):
|
|||||||
return Response(serializer.data, status=status.HTTP_200_OK)
|
return Response(serializer.data, status=status.HTTP_200_OK)
|
||||||
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
|
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
|
||||||
else:
|
else:
|
||||||
return Response(InboxIssueSerializer(inbox_issue).data, status=status.HTTP_200_OK)
|
return Response(
|
||||||
|
InboxIssueSerializer(inbox_issue).data, status=status.HTTP_200_OK
|
||||||
|
)
|
||||||
|
|
||||||
def retrieve(self, request, slug, project_id, inbox_id, pk):
|
def delete(self, request, slug, project_id, issue_id):
|
||||||
inbox_issue = InboxIssue.objects.get(
|
inbox = Inbox.objects.filter(
|
||||||
pk=pk, workspace__slug=slug, project_id=project_id, inbox_id=inbox_id
|
workspace__slug=slug, project_id=project_id
|
||||||
)
|
).first()
|
||||||
issue = Issue.objects.get(
|
|
||||||
pk=inbox_issue.issue_id, workspace__slug=slug, project_id=project_id
|
|
||||||
)
|
|
||||||
serializer = IssueStateInboxSerializer(issue)
|
|
||||||
return Response(serializer.data, status=status.HTTP_200_OK)
|
|
||||||
|
|
||||||
def destroy(self, request, slug, project_id, inbox_id, pk):
|
project = Project.objects.get(
|
||||||
inbox_issue = InboxIssue.objects.get(
|
workspace__slug=slug,
|
||||||
pk=pk, workspace__slug=slug, project_id=project_id, inbox_id=inbox_id
|
pk=project_id,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
# Inbox view
|
||||||
|
if inbox is None and not project.inbox_view:
|
||||||
|
return Response(
|
||||||
|
{
|
||||||
|
"error": "Inbox is not enabled for this project enable it through the project's api"
|
||||||
|
},
|
||||||
|
status=status.HTTP_400_BAD_REQUEST,
|
||||||
|
)
|
||||||
|
|
||||||
|
# Get the inbox issue
|
||||||
|
inbox_issue = InboxIssue.objects.get(
|
||||||
|
issue_id=issue_id,
|
||||||
|
workspace__slug=slug,
|
||||||
|
project_id=project_id,
|
||||||
|
inbox_id=inbox.id,
|
||||||
|
)
|
||||||
|
|
||||||
# Get the project member
|
# Get the project member
|
||||||
project_member = ProjectMember.objects.get(workspace__slug=slug, project_id=project_id, member=request.user)
|
project_member = ProjectMember.objects.get(
|
||||||
|
workspace__slug=slug,
|
||||||
|
project_id=project_id,
|
||||||
|
member=request.user,
|
||||||
|
is_active=True,
|
||||||
|
)
|
||||||
|
|
||||||
if project_member.role <= 10 and str(inbox_issue.created_by_id) != str(request.user.id):
|
# Check the inbox issue created
|
||||||
return Response({"error": "You cannot delete inbox issue"}, status=status.HTTP_400_BAD_REQUEST)
|
if project_member.role <= 10 and str(inbox_issue.created_by_id) != str(
|
||||||
|
request.user.id
|
||||||
|
):
|
||||||
|
return Response(
|
||||||
|
{"error": "You cannot delete inbox issue"},
|
||||||
|
status=status.HTTP_400_BAD_REQUEST,
|
||||||
|
)
|
||||||
|
|
||||||
# Check the issue status
|
# Check the issue status
|
||||||
if inbox_issue.status in [-2, -1, 0, 2]:
|
if inbox_issue.status in [-2, -1, 0, 2]:
|
||||||
# Delete the issue also
|
# Delete the issue also
|
||||||
Issue.objects.filter(workspace__slug=slug, project_id=project_id, pk=inbox_issue.issue_id).delete()
|
|
||||||
|
|
||||||
inbox_issue.delete()
|
|
||||||
return Response(status=status.HTTP_204_NO_CONTENT)
|
|
||||||
|
|
||||||
|
|
||||||
class InboxIssuePublicViewSet(BaseViewSet):
|
|
||||||
serializer_class = InboxIssueSerializer
|
|
||||||
model = InboxIssue
|
|
||||||
|
|
||||||
filterset_fields = [
|
|
||||||
"status",
|
|
||||||
]
|
|
||||||
|
|
||||||
def get_queryset(self):
|
|
||||||
project_deploy_board = ProjectDeployBoard.objects.get(workspace__slug=self.kwargs.get("slug"), project_id=self.kwargs.get("project_id"))
|
|
||||||
if project_deploy_board is not None:
|
|
||||||
return self.filter_queryset(
|
|
||||||
super()
|
|
||||||
.get_queryset()
|
|
||||||
.filter(
|
|
||||||
Q(snoozed_till__gte=timezone.now()) | Q(snoozed_till__isnull=True),
|
|
||||||
project_id=self.kwargs.get("project_id"),
|
|
||||||
workspace__slug=self.kwargs.get("slug"),
|
|
||||||
inbox_id=self.kwargs.get("inbox_id"),
|
|
||||||
)
|
|
||||||
.select_related("issue", "workspace", "project")
|
|
||||||
)
|
|
||||||
return InboxIssue.objects.none()
|
|
||||||
|
|
||||||
def list(self, request, slug, project_id, inbox_id):
|
|
||||||
project_deploy_board = ProjectDeployBoard.objects.get(workspace__slug=slug, project_id=project_id)
|
|
||||||
if project_deploy_board.inbox is None:
|
|
||||||
return Response({"error": "Inbox is not enabled for this Project Board"}, status=status.HTTP_400_BAD_REQUEST)
|
|
||||||
|
|
||||||
filters = issue_filters(request.query_params, "GET")
|
|
||||||
issues = (
|
|
||||||
Issue.objects.filter(
|
Issue.objects.filter(
|
||||||
issue_inbox__inbox_id=inbox_id,
|
workspace__slug=slug, project_id=project_id, pk=issue_id
|
||||||
workspace__slug=slug,
|
).delete()
|
||||||
project_id=project_id,
|
|
||||||
)
|
|
||||||
.filter(**filters)
|
|
||||||
.annotate(bridge_id=F("issue_inbox__id"))
|
|
||||||
.select_related("workspace", "project", "state", "parent")
|
|
||||||
.prefetch_related("assignees", "labels")
|
|
||||||
.order_by("issue_inbox__snoozed_till", "issue_inbox__status")
|
|
||||||
.annotate(
|
|
||||||
sub_issues_count=Issue.issue_objects.filter(parent=OuterRef("id"))
|
|
||||||
.order_by()
|
|
||||||
.annotate(count=Func(F("id"), function="Count"))
|
|
||||||
.values("count")
|
|
||||||
)
|
|
||||||
.annotate(
|
|
||||||
link_count=IssueLink.objects.filter(issue=OuterRef("id"))
|
|
||||||
.order_by()
|
|
||||||
.annotate(count=Func(F("id"), function="Count"))
|
|
||||||
.values("count")
|
|
||||||
)
|
|
||||||
.annotate(
|
|
||||||
attachment_count=IssueAttachment.objects.filter(
|
|
||||||
issue=OuterRef("id")
|
|
||||||
)
|
|
||||||
.order_by()
|
|
||||||
.annotate(count=Func(F("id"), function="Count"))
|
|
||||||
.values("count")
|
|
||||||
)
|
|
||||||
.prefetch_related(
|
|
||||||
Prefetch(
|
|
||||||
"issue_inbox",
|
|
||||||
queryset=InboxIssue.objects.only(
|
|
||||||
"status", "duplicate_to", "snoozed_till", "source"
|
|
||||||
),
|
|
||||||
)
|
|
||||||
)
|
|
||||||
)
|
|
||||||
issues_data = IssueStateInboxSerializer(issues, many=True).data
|
|
||||||
return Response(
|
|
||||||
issues_data,
|
|
||||||
status=status.HTTP_200_OK,
|
|
||||||
)
|
|
||||||
|
|
||||||
def create(self, request, slug, project_id, inbox_id):
|
|
||||||
project_deploy_board = ProjectDeployBoard.objects.get(workspace__slug=slug, project_id=project_id)
|
|
||||||
if project_deploy_board.inbox is None:
|
|
||||||
return Response({"error": "Inbox is not enabled for this Project Board"}, status=status.HTTP_400_BAD_REQUEST)
|
|
||||||
|
|
||||||
if not request.data.get("issue", {}).get("name", False):
|
|
||||||
return Response(
|
|
||||||
{"error": "Name is required"}, status=status.HTTP_400_BAD_REQUEST
|
|
||||||
)
|
|
||||||
|
|
||||||
# Check for valid priority
|
|
||||||
if not request.data.get("issue", {}).get("priority", "none") in [
|
|
||||||
"low",
|
|
||||||
"medium",
|
|
||||||
"high",
|
|
||||||
"urgent",
|
|
||||||
"none",
|
|
||||||
]:
|
|
||||||
return Response(
|
|
||||||
{"error": "Invalid priority"}, status=status.HTTP_400_BAD_REQUEST
|
|
||||||
)
|
|
||||||
|
|
||||||
# Create or get state
|
|
||||||
state, _ = State.objects.get_or_create(
|
|
||||||
name="Triage",
|
|
||||||
group="backlog",
|
|
||||||
description="Default state for managing all Inbox Issues",
|
|
||||||
project_id=project_id,
|
|
||||||
color="#ff7700",
|
|
||||||
)
|
|
||||||
|
|
||||||
# create an issue
|
|
||||||
issue = Issue.objects.create(
|
|
||||||
name=request.data.get("issue", {}).get("name"),
|
|
||||||
description=request.data.get("issue", {}).get("description", {}),
|
|
||||||
description_html=request.data.get("issue", {}).get(
|
|
||||||
"description_html", "<p></p>"
|
|
||||||
),
|
|
||||||
priority=request.data.get("issue", {}).get("priority", "low"),
|
|
||||||
project_id=project_id,
|
|
||||||
state=state,
|
|
||||||
)
|
|
||||||
|
|
||||||
# Create an Issue Activity
|
|
||||||
issue_activity.delay(
|
|
||||||
type="issue.activity.created",
|
|
||||||
requested_data=json.dumps(request.data, cls=DjangoJSONEncoder),
|
|
||||||
actor_id=str(request.user.id),
|
|
||||||
issue_id=str(issue.id),
|
|
||||||
project_id=str(project_id),
|
|
||||||
current_instance=None,
|
|
||||||
epoch=int(timezone.now().timestamp())
|
|
||||||
)
|
|
||||||
# create an inbox issue
|
|
||||||
InboxIssue.objects.create(
|
|
||||||
inbox_id=inbox_id,
|
|
||||||
project_id=project_id,
|
|
||||||
issue=issue,
|
|
||||||
source=request.data.get("source", "in-app"),
|
|
||||||
)
|
|
||||||
|
|
||||||
serializer = IssueStateInboxSerializer(issue)
|
|
||||||
return Response(serializer.data, status=status.HTTP_200_OK)
|
|
||||||
|
|
||||||
def partial_update(self, request, slug, project_id, inbox_id, pk):
|
|
||||||
project_deploy_board = ProjectDeployBoard.objects.get(workspace__slug=slug, project_id=project_id)
|
|
||||||
if project_deploy_board.inbox is None:
|
|
||||||
return Response({"error": "Inbox is not enabled for this Project Board"}, status=status.HTTP_400_BAD_REQUEST)
|
|
||||||
|
|
||||||
inbox_issue = InboxIssue.objects.get(
|
|
||||||
pk=pk, workspace__slug=slug, project_id=project_id, inbox_id=inbox_id
|
|
||||||
)
|
|
||||||
# Get the project member
|
|
||||||
if str(inbox_issue.created_by_id) != str(request.user.id):
|
|
||||||
return Response({"error": "You cannot edit inbox issues"}, status=status.HTTP_400_BAD_REQUEST)
|
|
||||||
|
|
||||||
# Get issue data
|
|
||||||
issue_data = request.data.pop("issue", False)
|
|
||||||
|
|
||||||
|
|
||||||
issue = Issue.objects.get(
|
|
||||||
pk=inbox_issue.issue_id, workspace__slug=slug, project_id=project_id
|
|
||||||
)
|
|
||||||
# viewers and guests since only viewers and guests
|
|
||||||
issue_data = {
|
|
||||||
"name": issue_data.get("name", issue.name),
|
|
||||||
"description_html": issue_data.get("description_html", issue.description_html),
|
|
||||||
"description": issue_data.get("description", issue.description)
|
|
||||||
}
|
|
||||||
|
|
||||||
issue_serializer = IssueCreateSerializer(
|
|
||||||
issue, data=issue_data, partial=True
|
|
||||||
)
|
|
||||||
|
|
||||||
if issue_serializer.is_valid():
|
|
||||||
current_instance = issue
|
|
||||||
# Log all the updates
|
|
||||||
requested_data = json.dumps(issue_data, cls=DjangoJSONEncoder)
|
|
||||||
if issue is not None:
|
|
||||||
issue_activity.delay(
|
|
||||||
type="issue.activity.updated",
|
|
||||||
requested_data=requested_data,
|
|
||||||
actor_id=str(request.user.id),
|
|
||||||
issue_id=str(issue.id),
|
|
||||||
project_id=str(project_id),
|
|
||||||
current_instance=json.dumps(
|
|
||||||
IssueSerializer(current_instance).data,
|
|
||||||
cls=DjangoJSONEncoder,
|
|
||||||
),
|
|
||||||
epoch=int(timezone.now().timestamp())
|
|
||||||
)
|
|
||||||
issue_serializer.save()
|
|
||||||
return Response(issue_serializer.data, status=status.HTTP_200_OK)
|
|
||||||
return Response(issue_serializer.errors, status=status.HTTP_400_BAD_REQUEST)
|
|
||||||
|
|
||||||
def retrieve(self, request, slug, project_id, inbox_id, pk):
|
|
||||||
project_deploy_board = ProjectDeployBoard.objects.get(workspace__slug=slug, project_id=project_id)
|
|
||||||
if project_deploy_board.inbox is None:
|
|
||||||
return Response({"error": "Inbox is not enabled for this Project Board"}, status=status.HTTP_400_BAD_REQUEST)
|
|
||||||
|
|
||||||
inbox_issue = InboxIssue.objects.get(
|
|
||||||
pk=pk, workspace__slug=slug, project_id=project_id, inbox_id=inbox_id
|
|
||||||
)
|
|
||||||
issue = Issue.objects.get(
|
|
||||||
pk=inbox_issue.issue_id, workspace__slug=slug, project_id=project_id
|
|
||||||
)
|
|
||||||
serializer = IssueStateInboxSerializer(issue)
|
|
||||||
return Response(serializer.data, status=status.HTTP_200_OK)
|
|
||||||
|
|
||||||
def destroy(self, request, slug, project_id, inbox_id, pk):
|
|
||||||
project_deploy_board = ProjectDeployBoard.objects.get(workspace__slug=slug, project_id=project_id)
|
|
||||||
if project_deploy_board.inbox is None:
|
|
||||||
return Response({"error": "Inbox is not enabled for this Project Board"}, status=status.HTTP_400_BAD_REQUEST)
|
|
||||||
|
|
||||||
inbox_issue = InboxIssue.objects.get(
|
|
||||||
pk=pk, workspace__slug=slug, project_id=project_id, inbox_id=inbox_id
|
|
||||||
)
|
|
||||||
|
|
||||||
if str(inbox_issue.created_by_id) != str(request.user.id):
|
|
||||||
return Response({"error": "You cannot delete inbox issue"}, status=status.HTTP_400_BAD_REQUEST)
|
|
||||||
|
|
||||||
inbox_issue.delete()
|
inbox_issue.delete()
|
||||||
return Response(status=status.HTTP_204_NO_CONTENT)
|
return Response(status=status.HTTP_204_NO_CONTENT)
|
||||||
|
File diff suppressed because it is too large
Load Diff
@ -1,73 +1,53 @@
|
|||||||
# Python imports
|
# Python imports
|
||||||
import json
|
import json
|
||||||
|
|
||||||
# Django Imports
|
# Django imports
|
||||||
|
from django.db.models import Count, Prefetch, Q, F, Func, OuterRef
|
||||||
from django.utils import timezone
|
from django.utils import timezone
|
||||||
from django.db import IntegrityError
|
|
||||||
from django.db.models import Prefetch, F, OuterRef, Func, Exists, Count, Q
|
|
||||||
from django.core import serializers
|
from django.core import serializers
|
||||||
from django.utils.decorators import method_decorator
|
|
||||||
from django.views.decorators.gzip import gzip_page
|
|
||||||
|
|
||||||
# Third party imports
|
# Third party imports
|
||||||
from rest_framework.response import Response
|
|
||||||
from rest_framework import status
|
from rest_framework import status
|
||||||
from sentry_sdk import capture_exception
|
from rest_framework.response import Response
|
||||||
|
|
||||||
# Module imports
|
# Module imports
|
||||||
from . import BaseViewSet
|
from .base import BaseAPIView, WebhookMixin
|
||||||
|
from plane.app.permissions import ProjectEntityPermission
|
||||||
|
from plane.db.models import (
|
||||||
|
Project,
|
||||||
|
Module,
|
||||||
|
ModuleLink,
|
||||||
|
Issue,
|
||||||
|
ModuleIssue,
|
||||||
|
IssueAttachment,
|
||||||
|
IssueLink,
|
||||||
|
)
|
||||||
from plane.api.serializers import (
|
from plane.api.serializers import (
|
||||||
ModuleWriteSerializer,
|
|
||||||
ModuleSerializer,
|
ModuleSerializer,
|
||||||
ModuleIssueSerializer,
|
ModuleIssueSerializer,
|
||||||
ModuleLinkSerializer,
|
IssueSerializer,
|
||||||
ModuleFavoriteSerializer,
|
|
||||||
IssueStateSerializer,
|
|
||||||
)
|
|
||||||
from plane.api.permissions import ProjectEntityPermission
|
|
||||||
from plane.db.models import (
|
|
||||||
Module,
|
|
||||||
ModuleIssue,
|
|
||||||
Project,
|
|
||||||
Issue,
|
|
||||||
ModuleLink,
|
|
||||||
ModuleFavorite,
|
|
||||||
IssueLink,
|
|
||||||
IssueAttachment,
|
|
||||||
)
|
)
|
||||||
from plane.bgtasks.issue_activites_task import issue_activity
|
from plane.bgtasks.issue_activites_task import issue_activity
|
||||||
from plane.utils.grouper import group_results
|
|
||||||
from plane.utils.issue_filters import issue_filters
|
|
||||||
from plane.utils.analytics_plot import burndown_plot
|
|
||||||
|
|
||||||
|
|
||||||
class ModuleViewSet(BaseViewSet):
|
class ModuleAPIEndpoint(WebhookMixin, BaseAPIView):
|
||||||
|
"""
|
||||||
|
This viewset automatically provides `list`, `create`, `retrieve`,
|
||||||
|
`update` and `destroy` actions related to module.
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
model = Module
|
model = Module
|
||||||
permission_classes = [
|
permission_classes = [
|
||||||
ProjectEntityPermission,
|
ProjectEntityPermission,
|
||||||
]
|
]
|
||||||
|
serializer_class = ModuleSerializer
|
||||||
def get_serializer_class(self):
|
webhook_event = "module"
|
||||||
return (
|
|
||||||
ModuleWriteSerializer
|
|
||||||
if self.action in ["create", "update", "partial_update"]
|
|
||||||
else ModuleSerializer
|
|
||||||
)
|
|
||||||
|
|
||||||
def get_queryset(self):
|
def get_queryset(self):
|
||||||
|
|
||||||
subquery = ModuleFavorite.objects.filter(
|
|
||||||
user=self.request.user,
|
|
||||||
module_id=OuterRef("pk"),
|
|
||||||
project_id=self.kwargs.get("project_id"),
|
|
||||||
workspace__slug=self.kwargs.get("slug"),
|
|
||||||
)
|
|
||||||
return (
|
return (
|
||||||
super()
|
Module.objects.filter(project_id=self.kwargs.get("project_id"))
|
||||||
.get_queryset()
|
|
||||||
.filter(project_id=self.kwargs.get("project_id"))
|
|
||||||
.filter(workspace__slug=self.kwargs.get("slug"))
|
.filter(workspace__slug=self.kwargs.get("slug"))
|
||||||
.annotate(is_favorite=Exists(subquery))
|
|
||||||
.select_related("project")
|
.select_related("project")
|
||||||
.select_related("workspace")
|
.select_related("workspace")
|
||||||
.select_related("lead")
|
.select_related("lead")
|
||||||
@ -137,130 +117,51 @@ class ModuleViewSet(BaseViewSet):
|
|||||||
),
|
),
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
.order_by("-is_favorite","-created_at")
|
.order_by(self.kwargs.get("order_by", "-created_at"))
|
||||||
)
|
)
|
||||||
|
|
||||||
def create(self, request, slug, project_id):
|
def post(self, request, slug, project_id):
|
||||||
project = Project.objects.get(workspace__slug=slug, pk=project_id)
|
project = Project.objects.get(workspace__slug=slug, pk=project_id)
|
||||||
serializer = ModuleWriteSerializer(
|
serializer = ModuleSerializer(data=request.data, context={"project": project})
|
||||||
data=request.data, context={"project": project}
|
|
||||||
)
|
|
||||||
|
|
||||||
if serializer.is_valid():
|
if serializer.is_valid():
|
||||||
serializer.save()
|
serializer.save()
|
||||||
|
|
||||||
module = Module.objects.get(pk=serializer.data["id"])
|
module = Module.objects.get(pk=serializer.data["id"])
|
||||||
serializer = ModuleSerializer(module)
|
serializer = ModuleSerializer(module)
|
||||||
return Response(serializer.data, status=status.HTTP_201_CREATED)
|
return Response(serializer.data, status=status.HTTP_201_CREATED)
|
||||||
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
|
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
|
||||||
|
|
||||||
def retrieve(self, request, slug, project_id, pk):
|
def patch(self, request, slug, project_id, pk):
|
||||||
queryset = self.get_queryset().get(pk=pk)
|
module = Module.objects.get(pk=pk, project_id=project_id, workspace__slug=slug)
|
||||||
|
serializer = ModuleSerializer(module, data=request.data)
|
||||||
|
if serializer.is_valid():
|
||||||
|
serializer.save()
|
||||||
|
return Response(serializer.data, status=status.HTTP_201_CREATED)
|
||||||
|
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
|
||||||
|
|
||||||
assignee_distribution = (
|
def get(self, request, slug, project_id, pk=None):
|
||||||
Issue.objects.filter(
|
if pk:
|
||||||
issue_module__module_id=pk,
|
queryset = self.get_queryset().get(pk=pk)
|
||||||
workspace__slug=slug,
|
data = ModuleSerializer(
|
||||||
project_id=project_id,
|
queryset,
|
||||||
|
fields=self.fields,
|
||||||
|
expand=self.expand,
|
||||||
|
).data
|
||||||
|
return Response(
|
||||||
|
data,
|
||||||
|
status=status.HTTP_200_OK,
|
||||||
)
|
)
|
||||||
.annotate(first_name=F("assignees__first_name"))
|
return self.paginate(
|
||||||
.annotate(last_name=F("assignees__last_name"))
|
request=request,
|
||||||
.annotate(assignee_id=F("assignees__id"))
|
queryset=(self.get_queryset()),
|
||||||
.annotate(display_name=F("assignees__display_name"))
|
on_results=lambda modules: ModuleSerializer(
|
||||||
.annotate(avatar=F("assignees__avatar"))
|
modules,
|
||||||
.values("first_name", "last_name", "assignee_id", "avatar", "display_name")
|
many=True,
|
||||||
.annotate(
|
fields=self.fields,
|
||||||
total_issues=Count(
|
expand=self.expand,
|
||||||
"assignee_id",
|
).data,
|
||||||
filter=Q(
|
|
||||||
archived_at__isnull=True,
|
|
||||||
is_draft=False,
|
|
||||||
),
|
|
||||||
)
|
|
||||||
)
|
|
||||||
.annotate(
|
|
||||||
completed_issues=Count(
|
|
||||||
"assignee_id",
|
|
||||||
filter=Q(
|
|
||||||
completed_at__isnull=False,
|
|
||||||
archived_at__isnull=True,
|
|
||||||
is_draft=False,
|
|
||||||
),
|
|
||||||
)
|
|
||||||
)
|
|
||||||
.annotate(
|
|
||||||
pending_issues=Count(
|
|
||||||
"assignee_id",
|
|
||||||
filter=Q(
|
|
||||||
completed_at__isnull=True,
|
|
||||||
archived_at__isnull=True,
|
|
||||||
is_draft=False,
|
|
||||||
),
|
|
||||||
)
|
|
||||||
)
|
|
||||||
.order_by("first_name", "last_name")
|
|
||||||
)
|
)
|
||||||
|
|
||||||
label_distribution = (
|
def delete(self, request, slug, project_id, pk):
|
||||||
Issue.objects.filter(
|
|
||||||
issue_module__module_id=pk,
|
|
||||||
workspace__slug=slug,
|
|
||||||
project_id=project_id,
|
|
||||||
)
|
|
||||||
.annotate(label_name=F("labels__name"))
|
|
||||||
.annotate(color=F("labels__color"))
|
|
||||||
.annotate(label_id=F("labels__id"))
|
|
||||||
.values("label_name", "color", "label_id")
|
|
||||||
.annotate(
|
|
||||||
total_issues=Count(
|
|
||||||
"label_id",
|
|
||||||
filter=Q(
|
|
||||||
archived_at__isnull=True,
|
|
||||||
is_draft=False,
|
|
||||||
),
|
|
||||||
),
|
|
||||||
)
|
|
||||||
.annotate(
|
|
||||||
completed_issues=Count(
|
|
||||||
"label_id",
|
|
||||||
filter=Q(
|
|
||||||
completed_at__isnull=False,
|
|
||||||
archived_at__isnull=True,
|
|
||||||
is_draft=False,
|
|
||||||
),
|
|
||||||
)
|
|
||||||
)
|
|
||||||
.annotate(
|
|
||||||
pending_issues=Count(
|
|
||||||
"label_id",
|
|
||||||
filter=Q(
|
|
||||||
completed_at__isnull=True,
|
|
||||||
archived_at__isnull=True,
|
|
||||||
is_draft=False,
|
|
||||||
),
|
|
||||||
)
|
|
||||||
)
|
|
||||||
.order_by("label_name")
|
|
||||||
)
|
|
||||||
|
|
||||||
data = ModuleSerializer(queryset).data
|
|
||||||
data["distribution"] = {
|
|
||||||
"assignees": assignee_distribution,
|
|
||||||
"labels": label_distribution,
|
|
||||||
"completion_chart": {},
|
|
||||||
}
|
|
||||||
|
|
||||||
if queryset.start_date and queryset.target_date:
|
|
||||||
data["distribution"]["completion_chart"] = burndown_plot(
|
|
||||||
queryset=queryset, slug=slug, project_id=project_id, module_id=pk
|
|
||||||
)
|
|
||||||
|
|
||||||
return Response(
|
|
||||||
data,
|
|
||||||
status=status.HTTP_200_OK,
|
|
||||||
)
|
|
||||||
|
|
||||||
def destroy(self, request, slug, project_id, pk):
|
|
||||||
module = Module.objects.get(workspace__slug=slug, project_id=project_id, pk=pk)
|
module = Module.objects.get(workspace__slug=slug, project_id=project_id, pk=pk)
|
||||||
module_issues = list(
|
module_issues = list(
|
||||||
ModuleIssue.objects.filter(module_id=pk).values_list("issue", flat=True)
|
ModuleIssue.objects.filter(module_id=pk).values_list("issue", flat=True)
|
||||||
@ -275,7 +176,7 @@ class ModuleViewSet(BaseViewSet):
|
|||||||
}
|
}
|
||||||
),
|
),
|
||||||
actor_id=str(request.user.id),
|
actor_id=str(request.user.id),
|
||||||
issue_id=str(pk),
|
issue_id=None,
|
||||||
project_id=str(project_id),
|
project_id=str(project_id),
|
||||||
current_instance=None,
|
current_instance=None,
|
||||||
epoch=int(timezone.now().timestamp()),
|
epoch=int(timezone.now().timestamp()),
|
||||||
@ -284,24 +185,25 @@ class ModuleViewSet(BaseViewSet):
|
|||||||
return Response(status=status.HTTP_204_NO_CONTENT)
|
return Response(status=status.HTTP_204_NO_CONTENT)
|
||||||
|
|
||||||
|
|
||||||
class ModuleIssueViewSet(BaseViewSet):
|
class ModuleIssueAPIEndpoint(WebhookMixin, BaseAPIView):
|
||||||
|
"""
|
||||||
|
This viewset automatically provides `list`, `create`, `retrieve`,
|
||||||
|
`update` and `destroy` actions related to module issues.
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
serializer_class = ModuleIssueSerializer
|
serializer_class = ModuleIssueSerializer
|
||||||
model = ModuleIssue
|
model = ModuleIssue
|
||||||
|
webhook_event = "module_issue"
|
||||||
filterset_fields = [
|
bulk = True
|
||||||
"issue__labels__id",
|
|
||||||
"issue__assignees__id",
|
|
||||||
]
|
|
||||||
|
|
||||||
permission_classes = [
|
permission_classes = [
|
||||||
ProjectEntityPermission,
|
ProjectEntityPermission,
|
||||||
]
|
]
|
||||||
|
|
||||||
def get_queryset(self):
|
def get_queryset(self):
|
||||||
return self.filter_queryset(
|
return (
|
||||||
super()
|
ModuleIssue.objects.annotate(
|
||||||
.get_queryset()
|
|
||||||
.annotate(
|
|
||||||
sub_issues_count=Issue.issue_objects.filter(parent=OuterRef("issue"))
|
sub_issues_count=Issue.issue_objects.filter(parent=OuterRef("issue"))
|
||||||
.order_by()
|
.order_by()
|
||||||
.annotate(count=Func(F("id"), function="Count"))
|
.annotate(count=Func(F("id"), function="Count"))
|
||||||
@ -317,15 +219,12 @@ class ModuleIssueViewSet(BaseViewSet):
|
|||||||
.select_related("issue", "issue__state", "issue__project")
|
.select_related("issue", "issue__state", "issue__project")
|
||||||
.prefetch_related("issue__assignees", "issue__labels")
|
.prefetch_related("issue__assignees", "issue__labels")
|
||||||
.prefetch_related("module__members")
|
.prefetch_related("module__members")
|
||||||
|
.order_by(self.kwargs.get("order_by", "-created_at"))
|
||||||
.distinct()
|
.distinct()
|
||||||
)
|
)
|
||||||
|
|
||||||
@method_decorator(gzip_page)
|
def get(self, request, slug, project_id, module_id):
|
||||||
def list(self, request, slug, project_id, module_id):
|
|
||||||
order_by = request.GET.get("order_by", "created_at")
|
order_by = request.GET.get("order_by", "created_at")
|
||||||
group_by = request.GET.get("group_by", False)
|
|
||||||
sub_group_by = request.GET.get("sub_group_by", False)
|
|
||||||
filters = issue_filters(request.query_params, "GET")
|
|
||||||
issues = (
|
issues = (
|
||||||
Issue.issue_objects.filter(issue_module__module_id=module_id)
|
Issue.issue_objects.filter(issue_module__module_id=module_id)
|
||||||
.annotate(
|
.annotate(
|
||||||
@ -344,7 +243,6 @@ class ModuleIssueViewSet(BaseViewSet):
|
|||||||
.prefetch_related("assignees")
|
.prefetch_related("assignees")
|
||||||
.prefetch_related("labels")
|
.prefetch_related("labels")
|
||||||
.order_by(order_by)
|
.order_by(order_by)
|
||||||
.filter(**filters)
|
|
||||||
.annotate(
|
.annotate(
|
||||||
link_count=IssueLink.objects.filter(issue=OuterRef("id"))
|
link_count=IssueLink.objects.filter(issue=OuterRef("id"))
|
||||||
.order_by()
|
.order_by()
|
||||||
@ -358,26 +256,18 @@ class ModuleIssueViewSet(BaseViewSet):
|
|||||||
.values("count")
|
.values("count")
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
issues_data = IssueStateSerializer(issues, many=True).data
|
return self.paginate(
|
||||||
|
request=request,
|
||||||
if sub_group_by and sub_group_by == group_by:
|
queryset=(issues),
|
||||||
return Response(
|
on_results=lambda issues: IssueSerializer(
|
||||||
{"error": "Group by and sub group by cannot be same"},
|
issues,
|
||||||
status=status.HTTP_400_BAD_REQUEST,
|
many=True,
|
||||||
)
|
fields=self.fields,
|
||||||
|
expand=self.expand,
|
||||||
if group_by:
|
).data,
|
||||||
grouped_results = group_results(issues_data, group_by, sub_group_by)
|
|
||||||
return Response(
|
|
||||||
grouped_results,
|
|
||||||
status=status.HTTP_200_OK,
|
|
||||||
)
|
|
||||||
|
|
||||||
return Response(
|
|
||||||
issues_data, status=status.HTTP_200_OK
|
|
||||||
)
|
)
|
||||||
|
|
||||||
def create(self, request, slug, project_id, module_id):
|
def post(self, request, slug, project_id, module_id):
|
||||||
issues = request.data.get("issues", [])
|
issues = request.data.get("issues", [])
|
||||||
if not len(issues):
|
if not len(issues):
|
||||||
return Response(
|
return Response(
|
||||||
@ -387,6 +277,10 @@ class ModuleIssueViewSet(BaseViewSet):
|
|||||||
workspace__slug=slug, project_id=project_id, pk=module_id
|
workspace__slug=slug, project_id=project_id, pk=module_id
|
||||||
)
|
)
|
||||||
|
|
||||||
|
issues = Issue.objects.filter(
|
||||||
|
workspace__slug=slug, project_id=project_id, pk__in=issues
|
||||||
|
).values_list("id", flat=True)
|
||||||
|
|
||||||
module_issues = list(ModuleIssue.objects.filter(issue_id__in=issues))
|
module_issues = list(ModuleIssue.objects.filter(issue_id__in=issues))
|
||||||
|
|
||||||
update_module_issue_activity = []
|
update_module_issue_activity = []
|
||||||
@ -438,7 +332,7 @@ class ModuleIssueViewSet(BaseViewSet):
|
|||||||
# Capture Issue Activity
|
# Capture Issue Activity
|
||||||
issue_activity.delay(
|
issue_activity.delay(
|
||||||
type="module.activity.created",
|
type="module.activity.created",
|
||||||
requested_data=json.dumps({"modules_list": issues}),
|
requested_data=json.dumps({"modules_list": str(issues)}),
|
||||||
actor_id=str(self.request.user.id),
|
actor_id=str(self.request.user.id),
|
||||||
issue_id=None,
|
issue_id=None,
|
||||||
project_id=str(self.kwargs.get("project_id", None)),
|
project_id=str(self.kwargs.get("project_id", None)),
|
||||||
@ -458,9 +352,9 @@ class ModuleIssueViewSet(BaseViewSet):
|
|||||||
status=status.HTTP_200_OK,
|
status=status.HTTP_200_OK,
|
||||||
)
|
)
|
||||||
|
|
||||||
def destroy(self, request, slug, project_id, module_id, pk):
|
def delete(self, request, slug, project_id, module_id, issue_id):
|
||||||
module_issue = ModuleIssue.objects.get(
|
module_issue = ModuleIssue.objects.get(
|
||||||
workspace__slug=slug, project_id=project_id, module_id=module_id, pk=pk
|
workspace__slug=slug, project_id=project_id, module_id=module_id, issue_id=issue_id
|
||||||
)
|
)
|
||||||
module_issue.delete()
|
module_issue.delete()
|
||||||
issue_activity.delay(
|
issue_activity.delay(
|
||||||
@ -472,67 +366,9 @@ class ModuleIssueViewSet(BaseViewSet):
|
|||||||
}
|
}
|
||||||
),
|
),
|
||||||
actor_id=str(request.user.id),
|
actor_id=str(request.user.id),
|
||||||
issue_id=str(pk),
|
issue_id=str(issue_id),
|
||||||
project_id=str(project_id),
|
project_id=str(project_id),
|
||||||
current_instance=None,
|
current_instance=None,
|
||||||
epoch=int(timezone.now().timestamp()),
|
epoch=int(timezone.now().timestamp()),
|
||||||
)
|
)
|
||||||
return Response(status=status.HTTP_204_NO_CONTENT)
|
return Response(status=status.HTTP_204_NO_CONTENT)
|
||||||
|
|
||||||
|
|
||||||
class ModuleLinkViewSet(BaseViewSet):
|
|
||||||
permission_classes = [
|
|
||||||
ProjectEntityPermission,
|
|
||||||
]
|
|
||||||
|
|
||||||
model = ModuleLink
|
|
||||||
serializer_class = ModuleLinkSerializer
|
|
||||||
|
|
||||||
def perform_create(self, serializer):
|
|
||||||
serializer.save(
|
|
||||||
project_id=self.kwargs.get("project_id"),
|
|
||||||
module_id=self.kwargs.get("module_id"),
|
|
||||||
)
|
|
||||||
|
|
||||||
def get_queryset(self):
|
|
||||||
return (
|
|
||||||
super()
|
|
||||||
.get_queryset()
|
|
||||||
.filter(workspace__slug=self.kwargs.get("slug"))
|
|
||||||
.filter(project_id=self.kwargs.get("project_id"))
|
|
||||||
.filter(module_id=self.kwargs.get("module_id"))
|
|
||||||
.filter(project__project_projectmember__member=self.request.user)
|
|
||||||
.order_by("-created_at")
|
|
||||||
.distinct()
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
class ModuleFavoriteViewSet(BaseViewSet):
|
|
||||||
serializer_class = ModuleFavoriteSerializer
|
|
||||||
model = ModuleFavorite
|
|
||||||
|
|
||||||
def get_queryset(self):
|
|
||||||
return self.filter_queryset(
|
|
||||||
super()
|
|
||||||
.get_queryset()
|
|
||||||
.filter(workspace__slug=self.kwargs.get("slug"))
|
|
||||||
.filter(user=self.request.user)
|
|
||||||
.select_related("module")
|
|
||||||
)
|
|
||||||
|
|
||||||
def create(self, request, slug, project_id):
|
|
||||||
serializer = ModuleFavoriteSerializer(data=request.data)
|
|
||||||
if serializer.is_valid():
|
|
||||||
serializer.save(user=request.user, project_id=project_id)
|
|
||||||
return Response(serializer.data, status=status.HTTP_201_CREATED)
|
|
||||||
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
|
|
||||||
|
|
||||||
def destroy(self, request, slug, project_id, module_id):
|
|
||||||
module_favorite = ModuleFavorite.objects.get(
|
|
||||||
project=project_id,
|
|
||||||
user=request.user,
|
|
||||||
workspace__slug=slug,
|
|
||||||
module_id=module_id,
|
|
||||||
)
|
|
||||||
module_favorite.delete()
|
|
||||||
return Response(status=status.HTTP_204_NO_CONTENT)
|
|
||||||
|
@ -1,255 +0,0 @@
|
|||||||
# Python imports
|
|
||||||
from datetime import timedelta, date
|
|
||||||
|
|
||||||
# Django imports
|
|
||||||
from django.db.models import Exists, OuterRef, Q, Prefetch
|
|
||||||
from django.utils import timezone
|
|
||||||
|
|
||||||
# Third party imports
|
|
||||||
from rest_framework import status
|
|
||||||
from rest_framework.response import Response
|
|
||||||
from sentry_sdk import capture_exception
|
|
||||||
|
|
||||||
# Module imports
|
|
||||||
from .base import BaseViewSet, BaseAPIView
|
|
||||||
from plane.api.permissions import ProjectEntityPermission
|
|
||||||
from plane.db.models import (
|
|
||||||
Page,
|
|
||||||
PageBlock,
|
|
||||||
PageFavorite,
|
|
||||||
Issue,
|
|
||||||
IssueAssignee,
|
|
||||||
IssueActivity,
|
|
||||||
)
|
|
||||||
from plane.api.serializers import (
|
|
||||||
PageSerializer,
|
|
||||||
PageBlockSerializer,
|
|
||||||
PageFavoriteSerializer,
|
|
||||||
IssueLiteSerializer,
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
class PageViewSet(BaseViewSet):
|
|
||||||
serializer_class = PageSerializer
|
|
||||||
model = Page
|
|
||||||
permission_classes = [
|
|
||||||
ProjectEntityPermission,
|
|
||||||
]
|
|
||||||
search_fields = [
|
|
||||||
"name",
|
|
||||||
]
|
|
||||||
|
|
||||||
def get_queryset(self):
|
|
||||||
subquery = PageFavorite.objects.filter(
|
|
||||||
user=self.request.user,
|
|
||||||
page_id=OuterRef("pk"),
|
|
||||||
project_id=self.kwargs.get("project_id"),
|
|
||||||
workspace__slug=self.kwargs.get("slug"),
|
|
||||||
)
|
|
||||||
return self.filter_queryset(
|
|
||||||
super()
|
|
||||||
.get_queryset()
|
|
||||||
.filter(workspace__slug=self.kwargs.get("slug"))
|
|
||||||
.filter(project_id=self.kwargs.get("project_id"))
|
|
||||||
.filter(project__project_projectmember__member=self.request.user)
|
|
||||||
.filter(Q(owned_by=self.request.user) | Q(access=0))
|
|
||||||
.select_related("project")
|
|
||||||
.select_related("workspace")
|
|
||||||
.select_related("owned_by")
|
|
||||||
.annotate(is_favorite=Exists(subquery))
|
|
||||||
.order_by(self.request.GET.get("order_by", "-created_at"))
|
|
||||||
.prefetch_related("labels")
|
|
||||||
.order_by("name", "-is_favorite")
|
|
||||||
.prefetch_related(
|
|
||||||
Prefetch(
|
|
||||||
"blocks",
|
|
||||||
queryset=PageBlock.objects.select_related(
|
|
||||||
"page", "issue", "workspace", "project"
|
|
||||||
),
|
|
||||||
)
|
|
||||||
)
|
|
||||||
.distinct()
|
|
||||||
)
|
|
||||||
|
|
||||||
def perform_create(self, serializer):
|
|
||||||
serializer.save(
|
|
||||||
project_id=self.kwargs.get("project_id"), owned_by=self.request.user
|
|
||||||
)
|
|
||||||
|
|
||||||
def create(self, request, slug, project_id):
|
|
||||||
serializer = PageSerializer(
|
|
||||||
data=request.data,
|
|
||||||
context={"project_id": project_id, "owned_by_id": request.user.id},
|
|
||||||
)
|
|
||||||
|
|
||||||
if serializer.is_valid():
|
|
||||||
serializer.save()
|
|
||||||
return Response(serializer.data, status=status.HTTP_201_CREATED)
|
|
||||||
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
|
|
||||||
|
|
||||||
def partial_update(self, request, slug, project_id, pk):
|
|
||||||
page = Page.objects.get(pk=pk, workspace__slug=slug, project_id=project_id)
|
|
||||||
# Only update access if the page owner is the requesting user
|
|
||||||
if (
|
|
||||||
page.access != request.data.get("access", page.access)
|
|
||||||
and page.owned_by_id != request.user.id
|
|
||||||
):
|
|
||||||
return Response(
|
|
||||||
{
|
|
||||||
"error": "Access cannot be updated since this page is owned by someone else"
|
|
||||||
},
|
|
||||||
status=status.HTTP_400_BAD_REQUEST,
|
|
||||||
)
|
|
||||||
serializer = PageSerializer(page, data=request.data, partial=True)
|
|
||||||
if serializer.is_valid():
|
|
||||||
serializer.save()
|
|
||||||
return Response(serializer.data, status=status.HTTP_200_OK)
|
|
||||||
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
|
|
||||||
|
|
||||||
def list(self, request, slug, project_id):
|
|
||||||
queryset = self.get_queryset()
|
|
||||||
page_view = request.GET.get("page_view", False)
|
|
||||||
|
|
||||||
if not page_view:
|
|
||||||
return Response({"error": "Page View parameter is required"}, status=status.HTTP_400_BAD_REQUEST)
|
|
||||||
|
|
||||||
# All Pages
|
|
||||||
if page_view == "all":
|
|
||||||
return Response(PageSerializer(queryset, many=True).data, status=status.HTTP_200_OK)
|
|
||||||
|
|
||||||
# Recent pages
|
|
||||||
if page_view == "recent":
|
|
||||||
current_time = date.today()
|
|
||||||
day_before = current_time - timedelta(days=1)
|
|
||||||
todays_pages = queryset.filter(updated_at__date=date.today())
|
|
||||||
yesterdays_pages = queryset.filter(updated_at__date=day_before)
|
|
||||||
earlier_this_week = queryset.filter( updated_at__date__range=(
|
|
||||||
(timezone.now() - timedelta(days=7)),
|
|
||||||
(timezone.now() - timedelta(days=2)),
|
|
||||||
))
|
|
||||||
return Response(
|
|
||||||
{
|
|
||||||
"today": PageSerializer(todays_pages, many=True).data,
|
|
||||||
"yesterday": PageSerializer(yesterdays_pages, many=True).data,
|
|
||||||
"earlier_this_week": PageSerializer(earlier_this_week, many=True).data,
|
|
||||||
},
|
|
||||||
status=status.HTTP_200_OK,
|
|
||||||
)
|
|
||||||
|
|
||||||
# Favorite Pages
|
|
||||||
if page_view == "favorite":
|
|
||||||
queryset = queryset.filter(is_favorite=True)
|
|
||||||
return Response(PageSerializer(queryset, many=True).data, status=status.HTTP_200_OK)
|
|
||||||
|
|
||||||
# My pages
|
|
||||||
if page_view == "created_by_me":
|
|
||||||
queryset = queryset.filter(owned_by=request.user)
|
|
||||||
return Response(PageSerializer(queryset, many=True).data, status=status.HTTP_200_OK)
|
|
||||||
|
|
||||||
# Created by other Pages
|
|
||||||
if page_view == "created_by_other":
|
|
||||||
queryset = queryset.filter(~Q(owned_by=request.user), access=0)
|
|
||||||
return Response(PageSerializer(queryset, many=True).data, status=status.HTTP_200_OK)
|
|
||||||
|
|
||||||
return Response({"error": "No matching view found"}, status=status.HTTP_400_BAD_REQUEST)
|
|
||||||
|
|
||||||
|
|
||||||
class PageBlockViewSet(BaseViewSet):
|
|
||||||
serializer_class = PageBlockSerializer
|
|
||||||
model = PageBlock
|
|
||||||
permission_classes = [
|
|
||||||
ProjectEntityPermission,
|
|
||||||
]
|
|
||||||
|
|
||||||
def get_queryset(self):
|
|
||||||
return self.filter_queryset(
|
|
||||||
super()
|
|
||||||
.get_queryset()
|
|
||||||
.filter(workspace__slug=self.kwargs.get("slug"))
|
|
||||||
.filter(project_id=self.kwargs.get("project_id"))
|
|
||||||
.filter(page_id=self.kwargs.get("page_id"))
|
|
||||||
.filter(project__project_projectmember__member=self.request.user)
|
|
||||||
.select_related("project")
|
|
||||||
.select_related("workspace")
|
|
||||||
.select_related("page")
|
|
||||||
.select_related("issue")
|
|
||||||
.order_by("sort_order")
|
|
||||||
.distinct()
|
|
||||||
)
|
|
||||||
|
|
||||||
def perform_create(self, serializer):
|
|
||||||
serializer.save(
|
|
||||||
project_id=self.kwargs.get("project_id"),
|
|
||||||
page_id=self.kwargs.get("page_id"),
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
class PageFavoriteViewSet(BaseViewSet):
|
|
||||||
permission_classes = [
|
|
||||||
ProjectEntityPermission,
|
|
||||||
]
|
|
||||||
|
|
||||||
serializer_class = PageFavoriteSerializer
|
|
||||||
model = PageFavorite
|
|
||||||
|
|
||||||
def get_queryset(self):
|
|
||||||
return self.filter_queryset(
|
|
||||||
super()
|
|
||||||
.get_queryset()
|
|
||||||
.filter(workspace__slug=self.kwargs.get("slug"))
|
|
||||||
.filter(user=self.request.user)
|
|
||||||
.select_related("page", "page__owned_by")
|
|
||||||
)
|
|
||||||
|
|
||||||
def create(self, request, slug, project_id):
|
|
||||||
serializer = PageFavoriteSerializer(data=request.data)
|
|
||||||
if serializer.is_valid():
|
|
||||||
serializer.save(user=request.user, project_id=project_id)
|
|
||||||
return Response(serializer.data, status=status.HTTP_201_CREATED)
|
|
||||||
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
|
|
||||||
|
|
||||||
def destroy(self, request, slug, project_id, page_id):
|
|
||||||
page_favorite = PageFavorite.objects.get(
|
|
||||||
project=project_id,
|
|
||||||
user=request.user,
|
|
||||||
workspace__slug=slug,
|
|
||||||
page_id=page_id,
|
|
||||||
)
|
|
||||||
page_favorite.delete()
|
|
||||||
return Response(status=status.HTTP_204_NO_CONTENT)
|
|
||||||
|
|
||||||
class CreateIssueFromPageBlockEndpoint(BaseAPIView):
|
|
||||||
permission_classes = [
|
|
||||||
ProjectEntityPermission,
|
|
||||||
]
|
|
||||||
|
|
||||||
def post(self, request, slug, project_id, page_id, page_block_id):
|
|
||||||
page_block = PageBlock.objects.get(
|
|
||||||
pk=page_block_id,
|
|
||||||
workspace__slug=slug,
|
|
||||||
project_id=project_id,
|
|
||||||
page_id=page_id,
|
|
||||||
)
|
|
||||||
issue = Issue.objects.create(
|
|
||||||
name=page_block.name,
|
|
||||||
project_id=project_id,
|
|
||||||
description=page_block.description,
|
|
||||||
description_html=page_block.description_html,
|
|
||||||
description_stripped=page_block.description_stripped,
|
|
||||||
)
|
|
||||||
_ = IssueAssignee.objects.create(
|
|
||||||
issue=issue, assignee=request.user, project_id=project_id
|
|
||||||
)
|
|
||||||
|
|
||||||
_ = IssueActivity.objects.create(
|
|
||||||
issue=issue,
|
|
||||||
actor=request.user,
|
|
||||||
project_id=project_id,
|
|
||||||
comment=f"created the issue from {page_block.name} block",
|
|
||||||
verb="created",
|
|
||||||
)
|
|
||||||
|
|
||||||
page_block.issue = issue
|
|
||||||
page_block.save()
|
|
||||||
|
|
||||||
return Response(IssueLiteSerializer(issue).data, status=status.HTTP_200_OK)
|
|
@ -1,121 +1,63 @@
|
|||||||
# Python imports
|
|
||||||
import jwt
|
|
||||||
import boto3
|
|
||||||
from datetime import datetime
|
|
||||||
|
|
||||||
# Django imports
|
# Django imports
|
||||||
from django.core.exceptions import ValidationError
|
|
||||||
from django.db import IntegrityError
|
from django.db import IntegrityError
|
||||||
from django.db.models import (
|
from django.db.models import Exists, OuterRef, Q, F, Func, Subquery, Prefetch
|
||||||
Prefetch,
|
|
||||||
Q,
|
|
||||||
Exists,
|
|
||||||
OuterRef,
|
|
||||||
F,
|
|
||||||
Func,
|
|
||||||
Subquery,
|
|
||||||
)
|
|
||||||
from django.core.validators import validate_email
|
|
||||||
from django.conf import settings
|
|
||||||
|
|
||||||
# Third Party imports
|
# Third party imports
|
||||||
from rest_framework.response import Response
|
|
||||||
from rest_framework import status
|
from rest_framework import status
|
||||||
from rest_framework import serializers
|
from rest_framework.response import Response
|
||||||
from rest_framework.permissions import AllowAny
|
from rest_framework.serializers import ValidationError
|
||||||
from sentry_sdk import capture_exception
|
|
||||||
|
|
||||||
# Module imports
|
# Module imports
|
||||||
from .base import BaseViewSet, BaseAPIView
|
|
||||||
from plane.api.serializers import (
|
|
||||||
ProjectSerializer,
|
|
||||||
ProjectListSerializer,
|
|
||||||
ProjectMemberSerializer,
|
|
||||||
ProjectDetailSerializer,
|
|
||||||
ProjectMemberInviteSerializer,
|
|
||||||
ProjectFavoriteSerializer,
|
|
||||||
ProjectDeployBoardSerializer,
|
|
||||||
ProjectMemberAdminSerializer,
|
|
||||||
)
|
|
||||||
|
|
||||||
from plane.api.permissions import (
|
|
||||||
ProjectBasePermission,
|
|
||||||
ProjectEntityPermission,
|
|
||||||
ProjectMemberPermission,
|
|
||||||
ProjectLitePermission,
|
|
||||||
)
|
|
||||||
|
|
||||||
from plane.db.models import (
|
from plane.db.models import (
|
||||||
Project,
|
|
||||||
ProjectMember,
|
|
||||||
Workspace,
|
Workspace,
|
||||||
ProjectMemberInvite,
|
Project,
|
||||||
User,
|
|
||||||
WorkspaceMember,
|
|
||||||
State,
|
|
||||||
TeamMember,
|
|
||||||
ProjectFavorite,
|
ProjectFavorite,
|
||||||
ProjectIdentifier,
|
ProjectMember,
|
||||||
Module,
|
|
||||||
Cycle,
|
|
||||||
CycleFavorite,
|
|
||||||
ModuleFavorite,
|
|
||||||
PageFavorite,
|
|
||||||
IssueViewFavorite,
|
|
||||||
Page,
|
|
||||||
IssueAssignee,
|
|
||||||
ModuleMember,
|
|
||||||
Inbox,
|
|
||||||
ProjectDeployBoard,
|
ProjectDeployBoard,
|
||||||
|
State,
|
||||||
|
Cycle,
|
||||||
|
Module,
|
||||||
IssueProperty,
|
IssueProperty,
|
||||||
|
Inbox,
|
||||||
)
|
)
|
||||||
|
from plane.app.permissions import ProjectBasePermission
|
||||||
from plane.bgtasks.project_invitation_task import project_invitation
|
from plane.api.serializers import ProjectSerializer
|
||||||
|
from .base import BaseAPIView, WebhookMixin
|
||||||
|
|
||||||
|
|
||||||
class ProjectViewSet(BaseViewSet):
|
class ProjectAPIEndpoint(WebhookMixin, BaseAPIView):
|
||||||
|
"""Project Endpoints to create, update, list, retrieve and delete endpoint"""
|
||||||
|
|
||||||
serializer_class = ProjectSerializer
|
serializer_class = ProjectSerializer
|
||||||
model = Project
|
model = Project
|
||||||
|
webhook_event = "project"
|
||||||
|
|
||||||
permission_classes = [
|
permission_classes = [
|
||||||
ProjectBasePermission,
|
ProjectBasePermission,
|
||||||
]
|
]
|
||||||
|
|
||||||
def get_serializer_class(self, *args, **kwargs):
|
|
||||||
if self.action in ["update", "partial_update"]:
|
|
||||||
return ProjectSerializer
|
|
||||||
return ProjectDetailSerializer
|
|
||||||
|
|
||||||
def get_queryset(self):
|
def get_queryset(self):
|
||||||
return self.filter_queryset(
|
return (
|
||||||
super()
|
Project.objects.filter(workspace__slug=self.kwargs.get("slug"))
|
||||||
.get_queryset()
|
|
||||||
.filter(workspace__slug=self.kwargs.get("slug"))
|
|
||||||
.filter(Q(project_projectmember__member=self.request.user) | Q(network=2))
|
.filter(Q(project_projectmember__member=self.request.user) | Q(network=2))
|
||||||
.select_related(
|
.select_related(
|
||||||
"workspace", "workspace__owner", "default_assignee", "project_lead"
|
"workspace", "workspace__owner", "default_assignee", "project_lead"
|
||||||
)
|
)
|
||||||
.annotate(
|
|
||||||
is_favorite=Exists(
|
|
||||||
ProjectFavorite.objects.filter(
|
|
||||||
user=self.request.user,
|
|
||||||
project_id=OuterRef("pk"),
|
|
||||||
workspace__slug=self.kwargs.get("slug"),
|
|
||||||
)
|
|
||||||
)
|
|
||||||
)
|
|
||||||
.annotate(
|
.annotate(
|
||||||
is_member=Exists(
|
is_member=Exists(
|
||||||
ProjectMember.objects.filter(
|
ProjectMember.objects.filter(
|
||||||
member=self.request.user,
|
member=self.request.user,
|
||||||
project_id=OuterRef("pk"),
|
project_id=OuterRef("pk"),
|
||||||
workspace__slug=self.kwargs.get("slug"),
|
workspace__slug=self.kwargs.get("slug"),
|
||||||
|
is_active=True,
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
.annotate(
|
.annotate(
|
||||||
total_members=ProjectMember.objects.filter(
|
total_members=ProjectMember.objects.filter(
|
||||||
project_id=OuterRef("id"), member__is_bot=False
|
project_id=OuterRef("id"),
|
||||||
|
member__is_bot=False,
|
||||||
|
is_active=True,
|
||||||
)
|
)
|
||||||
.order_by()
|
.order_by()
|
||||||
.annotate(count=Func(F("id"), function="Count"))
|
.annotate(count=Func(F("id"), function="Count"))
|
||||||
@ -137,6 +79,7 @@ class ProjectViewSet(BaseViewSet):
|
|||||||
member_role=ProjectMember.objects.filter(
|
member_role=ProjectMember.objects.filter(
|
||||||
project_id=OuterRef("pk"),
|
project_id=OuterRef("pk"),
|
||||||
member_id=self.request.user.id,
|
member_id=self.request.user.id,
|
||||||
|
is_active=True,
|
||||||
).values("role")
|
).values("role")
|
||||||
)
|
)
|
||||||
.annotate(
|
.annotate(
|
||||||
@ -147,49 +90,46 @@ class ProjectViewSet(BaseViewSet):
|
|||||||
)
|
)
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
|
.order_by(self.kwargs.get("order_by", "-created_at"))
|
||||||
.distinct()
|
.distinct()
|
||||||
)
|
)
|
||||||
|
|
||||||
def list(self, request, slug):
|
def get(self, request, slug, project_id=None):
|
||||||
fields = [field for field in request.GET.get("fields", "").split(",") if field]
|
if project_id is None:
|
||||||
|
sort_order_query = ProjectMember.objects.filter(
|
||||||
sort_order_query = ProjectMember.objects.filter(
|
member=request.user,
|
||||||
member=request.user,
|
project_id=OuterRef("pk"),
|
||||||
project_id=OuterRef("pk"),
|
workspace__slug=self.kwargs.get("slug"),
|
||||||
workspace__slug=self.kwargs.get("slug"),
|
is_active=True,
|
||||||
).values("sort_order")
|
).values("sort_order")
|
||||||
projects = (
|
projects = (
|
||||||
self.get_queryset()
|
self.get_queryset()
|
||||||
.annotate(sort_order=Subquery(sort_order_query))
|
.annotate(sort_order=Subquery(sort_order_query))
|
||||||
.prefetch_related(
|
.prefetch_related(
|
||||||
Prefetch(
|
Prefetch(
|
||||||
"project_projectmember",
|
"project_projectmember",
|
||||||
queryset=ProjectMember.objects.filter(
|
queryset=ProjectMember.objects.filter(
|
||||||
workspace__slug=slug,
|
workspace__slug=slug,
|
||||||
).select_related("member"),
|
is_active=True,
|
||||||
|
).select_related("member"),
|
||||||
|
)
|
||||||
)
|
)
|
||||||
|
.order_by(request.GET.get("order_by", "sort_order"))
|
||||||
)
|
)
|
||||||
.order_by("sort_order", "name")
|
|
||||||
)
|
|
||||||
if request.GET.get("per_page", False) and request.GET.get("cursor", False):
|
|
||||||
return self.paginate(
|
return self.paginate(
|
||||||
request=request,
|
request=request,
|
||||||
queryset=(projects),
|
queryset=(projects),
|
||||||
on_results=lambda projects: ProjectListSerializer(
|
on_results=lambda projects: ProjectSerializer(
|
||||||
projects, many=True
|
projects, many=True, fields=self.fields, expand=self.expand,
|
||||||
).data,
|
).data,
|
||||||
)
|
)
|
||||||
|
project = self.get_queryset().get(workspace__slug=slug, pk=project_id)
|
||||||
|
serializer = ProjectSerializer(project, fields=self.fields, expand=self.expand,)
|
||||||
|
return Response(serializer.data, status=status.HTTP_200_OK)
|
||||||
|
|
||||||
return Response(
|
def post(self, request, slug):
|
||||||
ProjectListSerializer(
|
|
||||||
projects, many=True, fields=fields if fields else None
|
|
||||||
).data
|
|
||||||
)
|
|
||||||
|
|
||||||
def create(self, request, slug):
|
|
||||||
try:
|
try:
|
||||||
workspace = Workspace.objects.get(slug=slug)
|
workspace = Workspace.objects.get(slug=slug)
|
||||||
|
|
||||||
serializer = ProjectSerializer(
|
serializer = ProjectSerializer(
|
||||||
data={**request.data}, context={"workspace_id": workspace.id}
|
data={**request.data}, context={"workspace_id": workspace.id}
|
||||||
)
|
)
|
||||||
@ -272,7 +212,7 @@ class ProjectViewSet(BaseViewSet):
|
|||||||
)
|
)
|
||||||
|
|
||||||
project = self.get_queryset().filter(pk=serializer.data["id"]).first()
|
project = self.get_queryset().filter(pk=serializer.data["id"]).first()
|
||||||
serializer = ProjectListSerializer(project)
|
serializer = ProjectSerializer(project)
|
||||||
return Response(serializer.data, status=status.HTTP_201_CREATED)
|
return Response(serializer.data, status=status.HTTP_201_CREATED)
|
||||||
return Response(
|
return Response(
|
||||||
serializer.errors,
|
serializer.errors,
|
||||||
@ -288,17 +228,16 @@ class ProjectViewSet(BaseViewSet):
|
|||||||
return Response(
|
return Response(
|
||||||
{"error": "Workspace does not exist"}, status=status.HTTP_404_NOT_FOUND
|
{"error": "Workspace does not exist"}, status=status.HTTP_404_NOT_FOUND
|
||||||
)
|
)
|
||||||
except serializers.ValidationError as e:
|
except ValidationError as e:
|
||||||
return Response(
|
return Response(
|
||||||
{"identifier": "The project identifier is already taken"},
|
{"identifier": "The project identifier is already taken"},
|
||||||
status=status.HTTP_410_GONE,
|
status=status.HTTP_410_GONE,
|
||||||
)
|
)
|
||||||
|
|
||||||
def partial_update(self, request, slug, pk=None):
|
def patch(self, request, slug, project_id=None):
|
||||||
try:
|
try:
|
||||||
workspace = Workspace.objects.get(slug=slug)
|
workspace = Workspace.objects.get(slug=slug)
|
||||||
|
project = Project.objects.get(pk=project_id)
|
||||||
project = Project.objects.get(pk=pk)
|
|
||||||
|
|
||||||
serializer = ProjectSerializer(
|
serializer = ProjectSerializer(
|
||||||
project,
|
project,
|
||||||
@ -319,15 +258,14 @@ class ProjectViewSet(BaseViewSet):
|
|||||||
name="Triage",
|
name="Triage",
|
||||||
group="backlog",
|
group="backlog",
|
||||||
description="Default state for managing all Inbox Issues",
|
description="Default state for managing all Inbox Issues",
|
||||||
project_id=pk,
|
project_id=project_id,
|
||||||
color="#ff7700",
|
color="#ff7700",
|
||||||
)
|
)
|
||||||
|
|
||||||
project = self.get_queryset().filter(pk=serializer.data["id"]).first()
|
project = self.get_queryset().filter(pk=serializer.data["id"]).first()
|
||||||
serializer = ProjectListSerializer(project)
|
serializer = ProjectSerializer(project)
|
||||||
return Response(serializer.data, status=status.HTTP_200_OK)
|
return Response(serializer.data, status=status.HTTP_200_OK)
|
||||||
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
|
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
|
||||||
|
|
||||||
except IntegrityError as e:
|
except IntegrityError as e:
|
||||||
if "already exists" in str(e):
|
if "already exists" in str(e):
|
||||||
return Response(
|
return Response(
|
||||||
@ -338,710 +276,13 @@ class ProjectViewSet(BaseViewSet):
|
|||||||
return Response(
|
return Response(
|
||||||
{"error": "Project does not exist"}, status=status.HTTP_404_NOT_FOUND
|
{"error": "Project does not exist"}, status=status.HTTP_404_NOT_FOUND
|
||||||
)
|
)
|
||||||
except serializers.ValidationError as e:
|
except ValidationError as e:
|
||||||
return Response(
|
return Response(
|
||||||
{"identifier": "The project identifier is already taken"},
|
{"identifier": "The project identifier is already taken"},
|
||||||
status=status.HTTP_410_GONE,
|
status=status.HTTP_410_GONE,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
class InviteProjectEndpoint(BaseAPIView):
|
|
||||||
permission_classes = [
|
|
||||||
ProjectBasePermission,
|
|
||||||
]
|
|
||||||
|
|
||||||
def post(self, request, slug, project_id):
|
|
||||||
email = request.data.get("email", False)
|
|
||||||
role = request.data.get("role", False)
|
|
||||||
|
|
||||||
# Check if email is provided
|
|
||||||
if not email:
|
|
||||||
return Response(
|
|
||||||
{"error": "Email is required"}, status=status.HTTP_400_BAD_REQUEST
|
|
||||||
)
|
|
||||||
|
|
||||||
validate_email(email)
|
|
||||||
# Check if user is already a member of workspace
|
|
||||||
if ProjectMember.objects.filter(
|
|
||||||
project_id=project_id,
|
|
||||||
member__email=email,
|
|
||||||
member__is_bot=False,
|
|
||||||
).exists():
|
|
||||||
return Response(
|
|
||||||
{"error": "User is already member of workspace"},
|
|
||||||
status=status.HTTP_400_BAD_REQUEST,
|
|
||||||
)
|
|
||||||
|
|
||||||
user = User.objects.filter(email=email).first()
|
|
||||||
|
|
||||||
if user is None:
|
|
||||||
token = jwt.encode(
|
|
||||||
{"email": email, "timestamp": datetime.now().timestamp()},
|
|
||||||
settings.SECRET_KEY,
|
|
||||||
algorithm="HS256",
|
|
||||||
)
|
|
||||||
project_invitation_obj = ProjectMemberInvite.objects.create(
|
|
||||||
email=email.strip().lower(),
|
|
||||||
project_id=project_id,
|
|
||||||
token=token,
|
|
||||||
role=role,
|
|
||||||
)
|
|
||||||
domain = settings.WEB_URL
|
|
||||||
project_invitation.delay(email, project_id, token, domain)
|
|
||||||
|
|
||||||
return Response(
|
|
||||||
{
|
|
||||||
"message": "Email sent successfully",
|
|
||||||
"id": project_invitation_obj.id,
|
|
||||||
},
|
|
||||||
status=status.HTTP_200_OK,
|
|
||||||
)
|
|
||||||
|
|
||||||
project_member = ProjectMember.objects.create(
|
|
||||||
member=user, project_id=project_id, role=role
|
|
||||||
)
|
|
||||||
|
|
||||||
_ = IssueProperty.objects.create(user=user, project_id=project_id)
|
|
||||||
|
|
||||||
return Response(
|
|
||||||
ProjectMemberSerializer(project_member).data, status=status.HTTP_200_OK
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
class UserProjectInvitationsViewset(BaseViewSet):
|
|
||||||
serializer_class = ProjectMemberInviteSerializer
|
|
||||||
model = ProjectMemberInvite
|
|
||||||
|
|
||||||
def get_queryset(self):
|
|
||||||
return self.filter_queryset(
|
|
||||||
super()
|
|
||||||
.get_queryset()
|
|
||||||
.filter(email=self.request.user.email)
|
|
||||||
.select_related("workspace", "workspace__owner", "project")
|
|
||||||
)
|
|
||||||
|
|
||||||
def create(self, request):
|
|
||||||
invitations = request.data.get("invitations")
|
|
||||||
project_invitations = ProjectMemberInvite.objects.filter(
|
|
||||||
pk__in=invitations, accepted=True
|
|
||||||
)
|
|
||||||
ProjectMember.objects.bulk_create(
|
|
||||||
[
|
|
||||||
ProjectMember(
|
|
||||||
project=invitation.project,
|
|
||||||
workspace=invitation.project.workspace,
|
|
||||||
member=request.user,
|
|
||||||
role=invitation.role,
|
|
||||||
created_by=request.user,
|
|
||||||
)
|
|
||||||
for invitation in project_invitations
|
|
||||||
]
|
|
||||||
)
|
|
||||||
|
|
||||||
IssueProperty.objects.bulk_create(
|
|
||||||
[
|
|
||||||
ProjectMember(
|
|
||||||
project=invitation.project,
|
|
||||||
workspace=invitation.project.workspace,
|
|
||||||
user=request.user,
|
|
||||||
created_by=request.user,
|
|
||||||
)
|
|
||||||
for invitation in project_invitations
|
|
||||||
]
|
|
||||||
)
|
|
||||||
|
|
||||||
# Delete joined project invites
|
|
||||||
project_invitations.delete()
|
|
||||||
|
|
||||||
return Response(status=status.HTTP_204_NO_CONTENT)
|
|
||||||
|
|
||||||
|
|
||||||
class ProjectMemberViewSet(BaseViewSet):
|
|
||||||
serializer_class = ProjectMemberAdminSerializer
|
|
||||||
model = ProjectMember
|
|
||||||
permission_classes = [
|
|
||||||
ProjectMemberPermission,
|
|
||||||
]
|
|
||||||
|
|
||||||
search_fields = [
|
|
||||||
"member__display_name",
|
|
||||||
"member__first_name",
|
|
||||||
]
|
|
||||||
|
|
||||||
def get_queryset(self):
|
|
||||||
return self.filter_queryset(
|
|
||||||
super()
|
|
||||||
.get_queryset()
|
|
||||||
.filter(workspace__slug=self.kwargs.get("slug"))
|
|
||||||
.filter(project_id=self.kwargs.get("project_id"))
|
|
||||||
.filter(member__is_bot=False)
|
|
||||||
.select_related("project")
|
|
||||||
.select_related("member")
|
|
||||||
.select_related("workspace", "workspace__owner")
|
|
||||||
)
|
|
||||||
|
|
||||||
def create(self, request, slug, project_id):
|
|
||||||
members = request.data.get("members", [])
|
|
||||||
|
|
||||||
# get the project
|
|
||||||
project = Project.objects.get(pk=project_id, workspace__slug=slug)
|
|
||||||
|
|
||||||
if not len(members):
|
|
||||||
return Response(
|
|
||||||
{"error": "Atleast one member is required"},
|
|
||||||
status=status.HTTP_400_BAD_REQUEST,
|
|
||||||
)
|
|
||||||
bulk_project_members = []
|
|
||||||
bulk_issue_props = []
|
|
||||||
|
|
||||||
project_members = (
|
|
||||||
ProjectMember.objects.filter(
|
|
||||||
workspace__slug=slug,
|
|
||||||
member_id__in=[member.get("member_id") for member in members],
|
|
||||||
)
|
|
||||||
.values("member_id", "sort_order")
|
|
||||||
.order_by("sort_order")
|
|
||||||
)
|
|
||||||
|
|
||||||
for member in members:
|
|
||||||
sort_order = [
|
|
||||||
project_member.get("sort_order")
|
|
||||||
for project_member in project_members
|
|
||||||
if str(project_member.get("member_id")) == str(member.get("member_id"))
|
|
||||||
]
|
|
||||||
bulk_project_members.append(
|
|
||||||
ProjectMember(
|
|
||||||
member_id=member.get("member_id"),
|
|
||||||
role=member.get("role", 10),
|
|
||||||
project_id=project_id,
|
|
||||||
workspace_id=project.workspace_id,
|
|
||||||
sort_order=sort_order[0] - 10000 if len(sort_order) else 65535,
|
|
||||||
)
|
|
||||||
)
|
|
||||||
bulk_issue_props.append(
|
|
||||||
IssueProperty(
|
|
||||||
user_id=member.get("member_id"),
|
|
||||||
project_id=project_id,
|
|
||||||
workspace_id=project.workspace_id,
|
|
||||||
)
|
|
||||||
)
|
|
||||||
|
|
||||||
project_members = ProjectMember.objects.bulk_create(
|
|
||||||
bulk_project_members,
|
|
||||||
batch_size=10,
|
|
||||||
ignore_conflicts=True,
|
|
||||||
)
|
|
||||||
|
|
||||||
_ = IssueProperty.objects.bulk_create(
|
|
||||||
bulk_issue_props, batch_size=10, ignore_conflicts=True
|
|
||||||
)
|
|
||||||
|
|
||||||
serializer = ProjectMemberSerializer(project_members, many=True)
|
|
||||||
|
|
||||||
return Response(serializer.data, status=status.HTTP_201_CREATED)
|
|
||||||
|
|
||||||
def list(self, request, slug, project_id):
|
|
||||||
project_member = ProjectMember.objects.get(
|
|
||||||
member=request.user, workspace__slug=slug, project_id=project_id
|
|
||||||
)
|
|
||||||
|
|
||||||
project_members = ProjectMember.objects.filter(
|
|
||||||
project_id=project_id,
|
|
||||||
workspace__slug=slug,
|
|
||||||
member__is_bot=False,
|
|
||||||
).select_related("project", "member", "workspace")
|
|
||||||
|
|
||||||
if project_member.role > 10:
|
|
||||||
serializer = ProjectMemberAdminSerializer(project_members, many=True)
|
|
||||||
else:
|
|
||||||
serializer = ProjectMemberSerializer(project_members, many=True)
|
|
||||||
return Response(serializer.data, status=status.HTTP_200_OK)
|
|
||||||
|
|
||||||
def partial_update(self, request, slug, project_id, pk):
|
|
||||||
project_member = ProjectMember.objects.get(
|
|
||||||
pk=pk, workspace__slug=slug, project_id=project_id
|
|
||||||
)
|
|
||||||
if request.user.id == project_member.member_id:
|
|
||||||
return Response(
|
|
||||||
{"error": "You cannot update your own role"},
|
|
||||||
status=status.HTTP_400_BAD_REQUEST,
|
|
||||||
)
|
|
||||||
# Check while updating user roles
|
|
||||||
requested_project_member = ProjectMember.objects.get(
|
|
||||||
project_id=project_id, workspace__slug=slug, member=request.user
|
|
||||||
)
|
|
||||||
if (
|
|
||||||
"role" in request.data
|
|
||||||
and int(request.data.get("role", project_member.role))
|
|
||||||
> requested_project_member.role
|
|
||||||
):
|
|
||||||
return Response(
|
|
||||||
{"error": "You cannot update a role that is higher than your own role"},
|
|
||||||
status=status.HTTP_400_BAD_REQUEST,
|
|
||||||
)
|
|
||||||
|
|
||||||
serializer = ProjectMemberSerializer(
|
|
||||||
project_member, data=request.data, partial=True
|
|
||||||
)
|
|
||||||
|
|
||||||
if serializer.is_valid():
|
|
||||||
serializer.save()
|
|
||||||
return Response(serializer.data, status=status.HTTP_200_OK)
|
|
||||||
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
|
|
||||||
|
|
||||||
def destroy(self, request, slug, project_id, pk):
|
|
||||||
project_member = ProjectMember.objects.get(
|
|
||||||
workspace__slug=slug, project_id=project_id, pk=pk
|
|
||||||
)
|
|
||||||
# check requesting user role
|
|
||||||
requesting_project_member = ProjectMember.objects.get(
|
|
||||||
workspace__slug=slug, member=request.user, project_id=project_id
|
|
||||||
)
|
|
||||||
if requesting_project_member.role < project_member.role:
|
|
||||||
return Response(
|
|
||||||
{"error": "You cannot remove a user having role higher than yourself"},
|
|
||||||
status=status.HTTP_400_BAD_REQUEST,
|
|
||||||
)
|
|
||||||
|
|
||||||
# Remove all favorites
|
|
||||||
ProjectFavorite.objects.filter(
|
|
||||||
workspace__slug=slug, project_id=project_id, user=project_member.member
|
|
||||||
).delete()
|
|
||||||
CycleFavorite.objects.filter(
|
|
||||||
workspace__slug=slug, project_id=project_id, user=project_member.member
|
|
||||||
).delete()
|
|
||||||
ModuleFavorite.objects.filter(
|
|
||||||
workspace__slug=slug, project_id=project_id, user=project_member.member
|
|
||||||
).delete()
|
|
||||||
PageFavorite.objects.filter(
|
|
||||||
workspace__slug=slug, project_id=project_id, user=project_member.member
|
|
||||||
).delete()
|
|
||||||
IssueViewFavorite.objects.filter(
|
|
||||||
workspace__slug=slug, project_id=project_id, user=project_member.member
|
|
||||||
).delete()
|
|
||||||
# Also remove issue from issue assigned
|
|
||||||
IssueAssignee.objects.filter(
|
|
||||||
workspace__slug=slug,
|
|
||||||
project_id=project_id,
|
|
||||||
assignee=project_member.member,
|
|
||||||
).delete()
|
|
||||||
|
|
||||||
# Remove if module member
|
|
||||||
ModuleMember.objects.filter(
|
|
||||||
workspace__slug=slug,
|
|
||||||
project_id=project_id,
|
|
||||||
member=project_member.member,
|
|
||||||
).delete()
|
|
||||||
# Delete owned Pages
|
|
||||||
Page.objects.filter(
|
|
||||||
workspace__slug=slug,
|
|
||||||
project_id=project_id,
|
|
||||||
owned_by=project_member.member,
|
|
||||||
).delete()
|
|
||||||
project_member.delete()
|
|
||||||
return Response(status=status.HTTP_204_NO_CONTENT)
|
|
||||||
|
|
||||||
|
|
||||||
class AddTeamToProjectEndpoint(BaseAPIView):
|
|
||||||
permission_classes = [
|
|
||||||
ProjectBasePermission,
|
|
||||||
]
|
|
||||||
|
|
||||||
def post(self, request, slug, project_id):
|
|
||||||
team_members = TeamMember.objects.filter(
|
|
||||||
workspace__slug=slug, team__in=request.data.get("teams", [])
|
|
||||||
).values_list("member", flat=True)
|
|
||||||
|
|
||||||
if len(team_members) == 0:
|
|
||||||
return Response(
|
|
||||||
{"error": "No such team exists"}, status=status.HTTP_400_BAD_REQUEST
|
|
||||||
)
|
|
||||||
|
|
||||||
workspace = Workspace.objects.get(slug=slug)
|
|
||||||
|
|
||||||
project_members = []
|
|
||||||
issue_props = []
|
|
||||||
for member in team_members:
|
|
||||||
project_members.append(
|
|
||||||
ProjectMember(
|
|
||||||
project_id=project_id,
|
|
||||||
member_id=member,
|
|
||||||
workspace=workspace,
|
|
||||||
created_by=request.user,
|
|
||||||
)
|
|
||||||
)
|
|
||||||
issue_props.append(
|
|
||||||
IssueProperty(
|
|
||||||
project_id=project_id,
|
|
||||||
user_id=member,
|
|
||||||
workspace=workspace,
|
|
||||||
created_by=request.user,
|
|
||||||
)
|
|
||||||
)
|
|
||||||
|
|
||||||
ProjectMember.objects.bulk_create(
|
|
||||||
project_members, batch_size=10, ignore_conflicts=True
|
|
||||||
)
|
|
||||||
|
|
||||||
_ = IssueProperty.objects.bulk_create(
|
|
||||||
issue_props, batch_size=10, ignore_conflicts=True
|
|
||||||
)
|
|
||||||
|
|
||||||
serializer = ProjectMemberSerializer(project_members, many=True)
|
|
||||||
return Response(serializer.data, status=status.HTTP_201_CREATED)
|
|
||||||
|
|
||||||
|
|
||||||
class ProjectMemberInvitationsViewset(BaseViewSet):
|
|
||||||
serializer_class = ProjectMemberInviteSerializer
|
|
||||||
model = ProjectMemberInvite
|
|
||||||
|
|
||||||
search_fields = []
|
|
||||||
|
|
||||||
permission_classes = [
|
|
||||||
ProjectBasePermission,
|
|
||||||
]
|
|
||||||
|
|
||||||
def get_queryset(self):
|
|
||||||
return self.filter_queryset(
|
|
||||||
super()
|
|
||||||
.get_queryset()
|
|
||||||
.filter(workspace__slug=self.kwargs.get("slug"))
|
|
||||||
.filter(project_id=self.kwargs.get("project_id"))
|
|
||||||
.select_related("project")
|
|
||||||
.select_related("workspace", "workspace__owner")
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
class ProjectMemberInviteDetailViewSet(BaseViewSet):
|
|
||||||
serializer_class = ProjectMemberInviteSerializer
|
|
||||||
model = ProjectMemberInvite
|
|
||||||
|
|
||||||
search_fields = []
|
|
||||||
|
|
||||||
permission_classes = [
|
|
||||||
ProjectBasePermission,
|
|
||||||
]
|
|
||||||
|
|
||||||
def get_queryset(self):
|
|
||||||
return self.filter_queryset(
|
|
||||||
super()
|
|
||||||
.get_queryset()
|
|
||||||
.select_related("project")
|
|
||||||
.select_related("workspace", "workspace__owner")
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
class ProjectIdentifierEndpoint(BaseAPIView):
|
|
||||||
permission_classes = [
|
|
||||||
ProjectBasePermission,
|
|
||||||
]
|
|
||||||
|
|
||||||
def get(self, request, slug):
|
|
||||||
name = request.GET.get("name", "").strip().upper()
|
|
||||||
|
|
||||||
if name == "":
|
|
||||||
return Response(
|
|
||||||
{"error": "Name is required"}, status=status.HTTP_400_BAD_REQUEST
|
|
||||||
)
|
|
||||||
|
|
||||||
exists = ProjectIdentifier.objects.filter(
|
|
||||||
name=name, workspace__slug=slug
|
|
||||||
).values("id", "name", "project")
|
|
||||||
|
|
||||||
return Response(
|
|
||||||
{"exists": len(exists), "identifiers": exists},
|
|
||||||
status=status.HTTP_200_OK,
|
|
||||||
)
|
|
||||||
|
|
||||||
def delete(self, request, slug):
|
|
||||||
name = request.data.get("name", "").strip().upper()
|
|
||||||
|
|
||||||
if name == "":
|
|
||||||
return Response(
|
|
||||||
{"error": "Name is required"}, status=status.HTTP_400_BAD_REQUEST
|
|
||||||
)
|
|
||||||
|
|
||||||
if Project.objects.filter(identifier=name, workspace__slug=slug).exists():
|
|
||||||
return Response(
|
|
||||||
{"error": "Cannot delete an identifier of an existing project"},
|
|
||||||
status=status.HTTP_400_BAD_REQUEST,
|
|
||||||
)
|
|
||||||
|
|
||||||
ProjectIdentifier.objects.filter(name=name, workspace__slug=slug).delete()
|
|
||||||
|
|
||||||
return Response(
|
|
||||||
status=status.HTTP_204_NO_CONTENT,
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
class ProjectJoinEndpoint(BaseAPIView):
|
|
||||||
def post(self, request, slug):
|
|
||||||
project_ids = request.data.get("project_ids", [])
|
|
||||||
|
|
||||||
# Get the workspace user role
|
|
||||||
workspace_member = WorkspaceMember.objects.get(
|
|
||||||
member=request.user, workspace__slug=slug
|
|
||||||
)
|
|
||||||
|
|
||||||
workspace_role = workspace_member.role
|
|
||||||
workspace = workspace_member.workspace
|
|
||||||
|
|
||||||
ProjectMember.objects.bulk_create(
|
|
||||||
[
|
|
||||||
ProjectMember(
|
|
||||||
project_id=project_id,
|
|
||||||
member=request.user,
|
|
||||||
role=20
|
|
||||||
if workspace_role >= 15
|
|
||||||
else (15 if workspace_role == 10 else workspace_role),
|
|
||||||
workspace=workspace,
|
|
||||||
created_by=request.user,
|
|
||||||
)
|
|
||||||
for project_id in project_ids
|
|
||||||
],
|
|
||||||
ignore_conflicts=True,
|
|
||||||
)
|
|
||||||
|
|
||||||
IssueProperty.objects.bulk_create(
|
|
||||||
[
|
|
||||||
IssueProperty(
|
|
||||||
project_id=project_id,
|
|
||||||
user=request.user,
|
|
||||||
workspace=workspace,
|
|
||||||
created_by=request.user,
|
|
||||||
)
|
|
||||||
for project_id in project_ids
|
|
||||||
],
|
|
||||||
ignore_conflicts=True,
|
|
||||||
)
|
|
||||||
|
|
||||||
return Response(
|
|
||||||
{"message": "Projects joined successfully"},
|
|
||||||
status=status.HTTP_201_CREATED,
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
class ProjectUserViewsEndpoint(BaseAPIView):
|
|
||||||
def post(self, request, slug, project_id):
|
|
||||||
project = Project.objects.get(pk=project_id, workspace__slug=slug)
|
|
||||||
|
|
||||||
project_member = ProjectMember.objects.filter(
|
|
||||||
member=request.user, project=project
|
|
||||||
).first()
|
|
||||||
|
|
||||||
if project_member is None:
|
|
||||||
return Response({"error": "Forbidden"}, status=status.HTTP_403_FORBIDDEN)
|
|
||||||
|
|
||||||
view_props = project_member.view_props
|
|
||||||
default_props = project_member.default_props
|
|
||||||
preferences = project_member.preferences
|
|
||||||
sort_order = project_member.sort_order
|
|
||||||
|
|
||||||
project_member.view_props = request.data.get("view_props", view_props)
|
|
||||||
project_member.default_props = request.data.get("default_props", default_props)
|
|
||||||
project_member.preferences = request.data.get("preferences", preferences)
|
|
||||||
project_member.sort_order = request.data.get("sort_order", sort_order)
|
|
||||||
|
|
||||||
project_member.save()
|
|
||||||
|
|
||||||
return Response(status=status.HTTP_204_NO_CONTENT)
|
|
||||||
|
|
||||||
|
|
||||||
class ProjectMemberUserEndpoint(BaseAPIView):
|
|
||||||
def get(self, request, slug, project_id):
|
|
||||||
project_member = ProjectMember.objects.get(
|
|
||||||
project_id=project_id, workspace__slug=slug, member=request.user
|
|
||||||
)
|
|
||||||
serializer = ProjectMemberSerializer(project_member)
|
|
||||||
|
|
||||||
return Response(serializer.data, status=status.HTTP_200_OK)
|
|
||||||
|
|
||||||
|
|
||||||
class ProjectFavoritesViewSet(BaseViewSet):
|
|
||||||
serializer_class = ProjectFavoriteSerializer
|
|
||||||
model = ProjectFavorite
|
|
||||||
|
|
||||||
def get_queryset(self):
|
|
||||||
return self.filter_queryset(
|
|
||||||
super()
|
|
||||||
.get_queryset()
|
|
||||||
.filter(workspace__slug=self.kwargs.get("slug"))
|
|
||||||
.filter(user=self.request.user)
|
|
||||||
.select_related(
|
|
||||||
"project", "project__project_lead", "project__default_assignee"
|
|
||||||
)
|
|
||||||
.select_related("workspace", "workspace__owner")
|
|
||||||
)
|
|
||||||
|
|
||||||
def perform_create(self, serializer):
|
|
||||||
serializer.save(user=self.request.user)
|
|
||||||
|
|
||||||
def create(self, request, slug):
|
|
||||||
serializer = ProjectFavoriteSerializer(data=request.data)
|
|
||||||
if serializer.is_valid():
|
|
||||||
serializer.save(user=request.user)
|
|
||||||
return Response(serializer.data, status=status.HTTP_201_CREATED)
|
|
||||||
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
|
|
||||||
|
|
||||||
def destroy(self, request, slug, project_id):
|
|
||||||
project_favorite = ProjectFavorite.objects.get(
|
|
||||||
project=project_id, user=request.user, workspace__slug=slug
|
|
||||||
)
|
|
||||||
project_favorite.delete()
|
|
||||||
return Response(status=status.HTTP_204_NO_CONTENT)
|
|
||||||
|
|
||||||
|
|
||||||
class ProjectDeployBoardViewSet(BaseViewSet):
|
|
||||||
permission_classes = [
|
|
||||||
ProjectMemberPermission,
|
|
||||||
]
|
|
||||||
serializer_class = ProjectDeployBoardSerializer
|
|
||||||
model = ProjectDeployBoard
|
|
||||||
|
|
||||||
def get_queryset(self):
|
|
||||||
return (
|
|
||||||
super()
|
|
||||||
.get_queryset()
|
|
||||||
.filter(
|
|
||||||
workspace__slug=self.kwargs.get("slug"),
|
|
||||||
project_id=self.kwargs.get("project_id"),
|
|
||||||
)
|
|
||||||
.select_related("project")
|
|
||||||
)
|
|
||||||
|
|
||||||
def create(self, request, slug, project_id):
|
|
||||||
comments = request.data.get("comments", False)
|
|
||||||
reactions = request.data.get("reactions", False)
|
|
||||||
inbox = request.data.get("inbox", None)
|
|
||||||
votes = request.data.get("votes", False)
|
|
||||||
views = request.data.get(
|
|
||||||
"views",
|
|
||||||
{
|
|
||||||
"list": True,
|
|
||||||
"kanban": True,
|
|
||||||
"calendar": True,
|
|
||||||
"gantt": True,
|
|
||||||
"spreadsheet": True,
|
|
||||||
},
|
|
||||||
)
|
|
||||||
|
|
||||||
project_deploy_board, _ = ProjectDeployBoard.objects.get_or_create(
|
|
||||||
anchor=f"{slug}/{project_id}",
|
|
||||||
project_id=project_id,
|
|
||||||
)
|
|
||||||
project_deploy_board.comments = comments
|
|
||||||
project_deploy_board.reactions = reactions
|
|
||||||
project_deploy_board.inbox = inbox
|
|
||||||
project_deploy_board.votes = votes
|
|
||||||
project_deploy_board.views = views
|
|
||||||
|
|
||||||
project_deploy_board.save()
|
|
||||||
|
|
||||||
serializer = ProjectDeployBoardSerializer(project_deploy_board)
|
|
||||||
return Response(serializer.data, status=status.HTTP_200_OK)
|
|
||||||
|
|
||||||
|
|
||||||
class ProjectDeployBoardPublicSettingsEndpoint(BaseAPIView):
|
|
||||||
permission_classes = [
|
|
||||||
AllowAny,
|
|
||||||
]
|
|
||||||
|
|
||||||
def get(self, request, slug, project_id):
|
|
||||||
project_deploy_board = ProjectDeployBoard.objects.get(
|
|
||||||
workspace__slug=slug, project_id=project_id
|
|
||||||
)
|
|
||||||
serializer = ProjectDeployBoardSerializer(project_deploy_board)
|
|
||||||
return Response(serializer.data, status=status.HTTP_200_OK)
|
|
||||||
|
|
||||||
|
|
||||||
class WorkspaceProjectDeployBoardEndpoint(BaseAPIView):
|
|
||||||
permission_classes = [
|
|
||||||
AllowAny,
|
|
||||||
]
|
|
||||||
|
|
||||||
def get(self, request, slug):
|
|
||||||
projects = (
|
|
||||||
Project.objects.filter(workspace__slug=slug)
|
|
||||||
.annotate(
|
|
||||||
is_public=Exists(
|
|
||||||
ProjectDeployBoard.objects.filter(
|
|
||||||
workspace__slug=slug, project_id=OuterRef("pk")
|
|
||||||
)
|
|
||||||
)
|
|
||||||
)
|
|
||||||
.filter(is_public=True)
|
|
||||||
).values(
|
|
||||||
"id",
|
|
||||||
"identifier",
|
|
||||||
"name",
|
|
||||||
"description",
|
|
||||||
"emoji",
|
|
||||||
"icon_prop",
|
|
||||||
"cover_image",
|
|
||||||
)
|
|
||||||
|
|
||||||
return Response(projects, status=status.HTTP_200_OK)
|
|
||||||
|
|
||||||
|
|
||||||
class LeaveProjectEndpoint(BaseAPIView):
|
|
||||||
permission_classes = [
|
|
||||||
ProjectLitePermission,
|
|
||||||
]
|
|
||||||
|
|
||||||
def delete(self, request, slug, project_id):
|
def delete(self, request, slug, project_id):
|
||||||
project_member = ProjectMember.objects.get(
|
project = Project.objects.get(pk=project_id, workspace__slug=slug)
|
||||||
workspace__slug=slug,
|
project.delete()
|
||||||
member=request.user,
|
|
||||||
project_id=project_id,
|
|
||||||
)
|
|
||||||
|
|
||||||
# Only Admin case
|
|
||||||
if (
|
|
||||||
project_member.role == 20
|
|
||||||
and ProjectMember.objects.filter(
|
|
||||||
workspace__slug=slug,
|
|
||||||
role=20,
|
|
||||||
project_id=project_id,
|
|
||||||
).count()
|
|
||||||
== 1
|
|
||||||
):
|
|
||||||
return Response(
|
|
||||||
{
|
|
||||||
"error": "You cannot leave the project since you are the only admin of the project you should delete the project"
|
|
||||||
},
|
|
||||||
status=status.HTTP_400_BAD_REQUEST,
|
|
||||||
)
|
|
||||||
# Delete the member from workspace
|
|
||||||
project_member.delete()
|
|
||||||
return Response(status=status.HTTP_204_NO_CONTENT)
|
return Response(status=status.HTTP_204_NO_CONTENT)
|
||||||
|
|
||||||
|
|
||||||
class ProjectPublicCoverImagesEndpoint(BaseAPIView):
|
|
||||||
permission_classes = [
|
|
||||||
AllowAny,
|
|
||||||
]
|
|
||||||
|
|
||||||
def get(self, request):
|
|
||||||
files = []
|
|
||||||
s3 = boto3.client(
|
|
||||||
"s3",
|
|
||||||
aws_access_key_id=settings.AWS_ACCESS_KEY_ID,
|
|
||||||
aws_secret_access_key=settings.AWS_SECRET_ACCESS_KEY,
|
|
||||||
)
|
|
||||||
params = {
|
|
||||||
"Bucket": settings.AWS_S3_BUCKET_NAME,
|
|
||||||
"Prefix": "static/project-cover/",
|
|
||||||
}
|
|
||||||
|
|
||||||
response = s3.list_objects_v2(**params)
|
|
||||||
# Extracting file keys from the response
|
|
||||||
if "Contents" in response:
|
|
||||||
for content in response["Contents"]:
|
|
||||||
if not content["Key"].endswith(
|
|
||||||
"/"
|
|
||||||
): # This line ensures we're only getting files, not "sub-folders"
|
|
||||||
files.append(
|
|
||||||
f"https://{settings.AWS_S3_BUCKET_NAME}.s3.{settings.AWS_REGION}.amazonaws.com/{content['Key']}"
|
|
||||||
)
|
|
||||||
|
|
||||||
return Response(files, status=status.HTTP_200_OK)
|
|
||||||
|
@ -7,30 +7,24 @@ from django.db.models import Q
|
|||||||
# Third party imports
|
# Third party imports
|
||||||
from rest_framework.response import Response
|
from rest_framework.response import Response
|
||||||
from rest_framework import status
|
from rest_framework import status
|
||||||
from sentry_sdk import capture_exception
|
|
||||||
|
|
||||||
# Module imports
|
# Module imports
|
||||||
from . import BaseViewSet, BaseAPIView
|
from .base import BaseAPIView
|
||||||
from plane.api.serializers import StateSerializer
|
from plane.api.serializers import StateSerializer
|
||||||
from plane.api.permissions import ProjectEntityPermission
|
from plane.app.permissions import ProjectEntityPermission
|
||||||
from plane.db.models import State, Issue
|
from plane.db.models import State, Issue
|
||||||
|
|
||||||
|
|
||||||
class StateViewSet(BaseViewSet):
|
class StateAPIEndpoint(BaseAPIView):
|
||||||
serializer_class = StateSerializer
|
serializer_class = StateSerializer
|
||||||
model = State
|
model = State
|
||||||
permission_classes = [
|
permission_classes = [
|
||||||
ProjectEntityPermission,
|
ProjectEntityPermission,
|
||||||
]
|
]
|
||||||
|
|
||||||
def perform_create(self, serializer):
|
|
||||||
serializer.save(project_id=self.kwargs.get("project_id"))
|
|
||||||
|
|
||||||
def get_queryset(self):
|
def get_queryset(self):
|
||||||
return self.filter_queryset(
|
return (
|
||||||
super()
|
State.objects.filter(workspace__slug=self.kwargs.get("slug"))
|
||||||
.get_queryset()
|
|
||||||
.filter(workspace__slug=self.kwargs.get("slug"))
|
|
||||||
.filter(project_id=self.kwargs.get("project_id"))
|
.filter(project_id=self.kwargs.get("project_id"))
|
||||||
.filter(project__project_projectmember__member=self.request.user)
|
.filter(project__project_projectmember__member=self.request.user)
|
||||||
.filter(~Q(name="Triage"))
|
.filter(~Q(name="Triage"))
|
||||||
@ -39,49 +33,41 @@ class StateViewSet(BaseViewSet):
|
|||||||
.distinct()
|
.distinct()
|
||||||
)
|
)
|
||||||
|
|
||||||
def create(self, request, slug, project_id):
|
def post(self, request, slug, project_id):
|
||||||
serializer = StateSerializer(data=request.data)
|
serializer = StateSerializer(data=request.data, context={"project_id": project_id})
|
||||||
if serializer.is_valid():
|
if serializer.is_valid():
|
||||||
serializer.save(project_id=project_id)
|
serializer.save(project_id=project_id)
|
||||||
return Response(serializer.data, status=status.HTTP_200_OK)
|
return Response(serializer.data, status=status.HTTP_200_OK)
|
||||||
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
|
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
|
||||||
|
|
||||||
def list(self, request, slug, project_id):
|
def get(self, request, slug, project_id, state_id=None):
|
||||||
states = StateSerializer(self.get_queryset(), many=True).data
|
if state_id:
|
||||||
grouped = request.GET.get("grouped", False)
|
serializer = StateSerializer(self.get_queryset().get(pk=state_id))
|
||||||
if grouped == "true":
|
return Response(serializer.data, status=status.HTTP_200_OK)
|
||||||
state_dict = {}
|
return self.paginate(
|
||||||
for key, value in groupby(
|
request=request,
|
||||||
sorted(states, key=lambda state: state["group"]),
|
queryset=(self.get_queryset()),
|
||||||
lambda state: state.get("group"),
|
on_results=lambda states: StateSerializer(
|
||||||
):
|
states,
|
||||||
state_dict[str(key)] = list(value)
|
many=True,
|
||||||
return Response(state_dict, status=status.HTTP_200_OK)
|
fields=self.fields,
|
||||||
return Response(states, status=status.HTTP_200_OK)
|
expand=self.expand,
|
||||||
|
).data,
|
||||||
|
)
|
||||||
|
|
||||||
def mark_as_default(self, request, slug, project_id, pk):
|
def delete(self, request, slug, project_id, state_id):
|
||||||
# Select all the states which are marked as default
|
|
||||||
_ = State.objects.filter(
|
|
||||||
workspace__slug=slug, project_id=project_id, default=True
|
|
||||||
).update(default=False)
|
|
||||||
_ = State.objects.filter(
|
|
||||||
workspace__slug=slug, project_id=project_id, pk=pk
|
|
||||||
).update(default=True)
|
|
||||||
return Response(status=status.HTTP_204_NO_CONTENT)
|
|
||||||
|
|
||||||
def destroy(self, request, slug, project_id, pk):
|
|
||||||
state = State.objects.get(
|
state = State.objects.get(
|
||||||
~Q(name="Triage"),
|
~Q(name="Triage"),
|
||||||
pk=pk,
|
pk=state_id,
|
||||||
project_id=project_id,
|
project_id=project_id,
|
||||||
workspace__slug=slug,
|
workspace__slug=slug,
|
||||||
)
|
)
|
||||||
|
|
||||||
if state.default:
|
if state.default:
|
||||||
return Response({"error": "Default state cannot be deleted"}, status=False)
|
return Response({"error": "Default state cannot be deleted"}, status=status.HTTP_400_BAD_REQUEST)
|
||||||
|
|
||||||
# Check for any issues in the state
|
# Check for any issues in the state
|
||||||
issue_exist = Issue.issue_objects.filter(state=pk).exists()
|
issue_exist = Issue.issue_objects.filter(state=state_id).exists()
|
||||||
|
|
||||||
if issue_exist:
|
if issue_exist:
|
||||||
return Response(
|
return Response(
|
||||||
@ -91,3 +77,11 @@ class StateViewSet(BaseViewSet):
|
|||||||
|
|
||||||
state.delete()
|
state.delete()
|
||||||
return Response(status=status.HTTP_204_NO_CONTENT)
|
return Response(status=status.HTTP_204_NO_CONTENT)
|
||||||
|
|
||||||
|
def patch(self, request, slug, project_id, state_id=None):
|
||||||
|
state = State.objects.get(workspace__slug=slug, project_id=project_id, pk=state_id)
|
||||||
|
serializer = StateSerializer(state, data=request.data, partial=True)
|
||||||
|
if serializer.is_valid():
|
||||||
|
serializer.save()
|
||||||
|
return Response(serializer.data, status=status.HTTP_200_OK)
|
||||||
|
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
|
@ -1,73 +0,0 @@
|
|||||||
# Third party imports
|
|
||||||
from rest_framework.response import Response
|
|
||||||
from rest_framework import status
|
|
||||||
|
|
||||||
from sentry_sdk import capture_exception
|
|
||||||
|
|
||||||
# Module imports
|
|
||||||
from plane.api.serializers import (
|
|
||||||
UserSerializer,
|
|
||||||
IssueActivitySerializer,
|
|
||||||
UserMeSerializer,
|
|
||||||
UserMeSettingsSerializer,
|
|
||||||
)
|
|
||||||
|
|
||||||
from plane.api.views.base import BaseViewSet, BaseAPIView
|
|
||||||
from plane.db.models import (
|
|
||||||
User,
|
|
||||||
Workspace,
|
|
||||||
WorkspaceMemberInvite,
|
|
||||||
Issue,
|
|
||||||
IssueActivity,
|
|
||||||
)
|
|
||||||
from plane.utils.paginator import BasePaginator
|
|
||||||
|
|
||||||
|
|
||||||
class UserEndpoint(BaseViewSet):
|
|
||||||
serializer_class = UserSerializer
|
|
||||||
model = User
|
|
||||||
|
|
||||||
def get_object(self):
|
|
||||||
return self.request.user
|
|
||||||
|
|
||||||
def retrieve(self, request):
|
|
||||||
serialized_data = UserMeSerializer(request.user).data
|
|
||||||
return Response(
|
|
||||||
serialized_data,
|
|
||||||
status=status.HTTP_200_OK,
|
|
||||||
)
|
|
||||||
|
|
||||||
def retrieve_user_settings(self, request):
|
|
||||||
serialized_data = UserMeSettingsSerializer(request.user).data
|
|
||||||
return Response(serialized_data, status=status.HTTP_200_OK)
|
|
||||||
|
|
||||||
|
|
||||||
class UpdateUserOnBoardedEndpoint(BaseAPIView):
|
|
||||||
def patch(self, request):
|
|
||||||
user = User.objects.get(pk=request.user.id)
|
|
||||||
user.is_onboarded = request.data.get("is_onboarded", False)
|
|
||||||
user.save()
|
|
||||||
return Response({"message": "Updated successfully"}, status=status.HTTP_200_OK)
|
|
||||||
|
|
||||||
|
|
||||||
class UpdateUserTourCompletedEndpoint(BaseAPIView):
|
|
||||||
def patch(self, request):
|
|
||||||
user = User.objects.get(pk=request.user.id)
|
|
||||||
user.is_tour_completed = request.data.get("is_tour_completed", False)
|
|
||||||
user.save()
|
|
||||||
return Response({"message": "Updated successfully"}, status=status.HTTP_200_OK)
|
|
||||||
|
|
||||||
|
|
||||||
class UserActivityEndpoint(BaseAPIView, BasePaginator):
|
|
||||||
def get(self, request, slug):
|
|
||||||
queryset = IssueActivity.objects.filter(
|
|
||||||
actor=request.user, workspace__slug=slug
|
|
||||||
).select_related("actor", "workspace", "issue", "project")
|
|
||||||
|
|
||||||
return self.paginate(
|
|
||||||
request=request,
|
|
||||||
queryset=queryset,
|
|
||||||
on_results=lambda issue_activities: IssueActivitySerializer(
|
|
||||||
issue_activities, many=True
|
|
||||||
).data,
|
|
||||||
)
|
|
0
apiserver/plane/app/__init__.py
Normal file
0
apiserver/plane/app/__init__.py
Normal file
5
apiserver/plane/app/apps.py
Normal file
5
apiserver/plane/app/apps.py
Normal file
@ -0,0 +1,5 @@
|
|||||||
|
from django.apps import AppConfig
|
||||||
|
|
||||||
|
|
||||||
|
class AppApiConfig(AppConfig):
|
||||||
|
name = "plane.app"
|
0
apiserver/plane/app/middleware/__init__.py
Normal file
0
apiserver/plane/app/middleware/__init__.py
Normal file
47
apiserver/plane/app/middleware/api_authentication.py
Normal file
47
apiserver/plane/app/middleware/api_authentication.py
Normal file
@ -0,0 +1,47 @@
|
|||||||
|
# Django imports
|
||||||
|
from django.utils import timezone
|
||||||
|
from django.db.models import Q
|
||||||
|
|
||||||
|
# Third party imports
|
||||||
|
from rest_framework import authentication
|
||||||
|
from rest_framework.exceptions import AuthenticationFailed
|
||||||
|
|
||||||
|
# Module imports
|
||||||
|
from plane.db.models import APIToken
|
||||||
|
|
||||||
|
|
||||||
|
class APIKeyAuthentication(authentication.BaseAuthentication):
|
||||||
|
"""
|
||||||
|
Authentication with an API Key
|
||||||
|
"""
|
||||||
|
|
||||||
|
www_authenticate_realm = "api"
|
||||||
|
media_type = "application/json"
|
||||||
|
auth_header_name = "X-Api-Key"
|
||||||
|
|
||||||
|
def get_api_token(self, request):
|
||||||
|
return request.headers.get(self.auth_header_name)
|
||||||
|
|
||||||
|
def validate_api_token(self, token):
|
||||||
|
try:
|
||||||
|
api_token = APIToken.objects.get(
|
||||||
|
Q(Q(expired_at__gt=timezone.now()) | Q(expired_at__isnull=True)),
|
||||||
|
token=token,
|
||||||
|
is_active=True,
|
||||||
|
)
|
||||||
|
except APIToken.DoesNotExist:
|
||||||
|
raise AuthenticationFailed("Given API token is not valid")
|
||||||
|
|
||||||
|
# save api token last used
|
||||||
|
api_token.last_used = timezone.now()
|
||||||
|
api_token.save(update_fields=["last_used"])
|
||||||
|
return (api_token.user, api_token.token)
|
||||||
|
|
||||||
|
def authenticate(self, request):
|
||||||
|
token = self.get_api_token(request=request)
|
||||||
|
if not token:
|
||||||
|
return None
|
||||||
|
|
||||||
|
# Validate the API token
|
||||||
|
user, token = self.validate_api_token(token)
|
||||||
|
return user, token
|
17
apiserver/plane/app/permissions/__init__.py
Normal file
17
apiserver/plane/app/permissions/__init__.py
Normal file
@ -0,0 +1,17 @@
|
|||||||
|
|
||||||
|
from .workspace import (
|
||||||
|
WorkSpaceBasePermission,
|
||||||
|
WorkspaceOwnerPermission,
|
||||||
|
WorkSpaceAdminPermission,
|
||||||
|
WorkspaceEntityPermission,
|
||||||
|
WorkspaceViewerPermission,
|
||||||
|
WorkspaceUserPermission,
|
||||||
|
)
|
||||||
|
from .project import (
|
||||||
|
ProjectBasePermission,
|
||||||
|
ProjectEntityPermission,
|
||||||
|
ProjectMemberPermission,
|
||||||
|
ProjectLitePermission,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
@ -13,14 +13,15 @@ Guest = 5
|
|||||||
|
|
||||||
class ProjectBasePermission(BasePermission):
|
class ProjectBasePermission(BasePermission):
|
||||||
def has_permission(self, request, view):
|
def has_permission(self, request, view):
|
||||||
|
|
||||||
if request.user.is_anonymous:
|
if request.user.is_anonymous:
|
||||||
return False
|
return False
|
||||||
|
|
||||||
## Safe Methods -> Handle the filtering logic in queryset
|
## Safe Methods -> Handle the filtering logic in queryset
|
||||||
if request.method in SAFE_METHODS:
|
if request.method in SAFE_METHODS:
|
||||||
return WorkspaceMember.objects.filter(
|
return WorkspaceMember.objects.filter(
|
||||||
workspace__slug=view.workspace_slug, member=request.user
|
workspace__slug=view.workspace_slug,
|
||||||
|
member=request.user,
|
||||||
|
is_active=True,
|
||||||
).exists()
|
).exists()
|
||||||
|
|
||||||
## Only workspace owners or admins can create the projects
|
## Only workspace owners or admins can create the projects
|
||||||
@ -29,6 +30,7 @@ class ProjectBasePermission(BasePermission):
|
|||||||
workspace__slug=view.workspace_slug,
|
workspace__slug=view.workspace_slug,
|
||||||
member=request.user,
|
member=request.user,
|
||||||
role__in=[Admin, Member],
|
role__in=[Admin, Member],
|
||||||
|
is_active=True,
|
||||||
).exists()
|
).exists()
|
||||||
|
|
||||||
## Only Project Admins can update project attributes
|
## Only Project Admins can update project attributes
|
||||||
@ -37,19 +39,21 @@ class ProjectBasePermission(BasePermission):
|
|||||||
member=request.user,
|
member=request.user,
|
||||||
role=Admin,
|
role=Admin,
|
||||||
project_id=view.project_id,
|
project_id=view.project_id,
|
||||||
|
is_active=True,
|
||||||
).exists()
|
).exists()
|
||||||
|
|
||||||
|
|
||||||
class ProjectMemberPermission(BasePermission):
|
class ProjectMemberPermission(BasePermission):
|
||||||
def has_permission(self, request, view):
|
def has_permission(self, request, view):
|
||||||
|
|
||||||
if request.user.is_anonymous:
|
if request.user.is_anonymous:
|
||||||
return False
|
return False
|
||||||
|
|
||||||
## Safe Methods -> Handle the filtering logic in queryset
|
## Safe Methods -> Handle the filtering logic in queryset
|
||||||
if request.method in SAFE_METHODS:
|
if request.method in SAFE_METHODS:
|
||||||
return ProjectMember.objects.filter(
|
return ProjectMember.objects.filter(
|
||||||
workspace__slug=view.workspace_slug, member=request.user
|
workspace__slug=view.workspace_slug,
|
||||||
|
member=request.user,
|
||||||
|
is_active=True,
|
||||||
).exists()
|
).exists()
|
||||||
## Only workspace owners or admins can create the projects
|
## Only workspace owners or admins can create the projects
|
||||||
if request.method == "POST":
|
if request.method == "POST":
|
||||||
@ -57,6 +61,7 @@ class ProjectMemberPermission(BasePermission):
|
|||||||
workspace__slug=view.workspace_slug,
|
workspace__slug=view.workspace_slug,
|
||||||
member=request.user,
|
member=request.user,
|
||||||
role__in=[Admin, Member],
|
role__in=[Admin, Member],
|
||||||
|
is_active=True,
|
||||||
).exists()
|
).exists()
|
||||||
|
|
||||||
## Only Project Admins can update project attributes
|
## Only Project Admins can update project attributes
|
||||||
@ -65,12 +70,12 @@ class ProjectMemberPermission(BasePermission):
|
|||||||
member=request.user,
|
member=request.user,
|
||||||
role__in=[Admin, Member],
|
role__in=[Admin, Member],
|
||||||
project_id=view.project_id,
|
project_id=view.project_id,
|
||||||
|
is_active=True,
|
||||||
).exists()
|
).exists()
|
||||||
|
|
||||||
|
|
||||||
class ProjectEntityPermission(BasePermission):
|
class ProjectEntityPermission(BasePermission):
|
||||||
def has_permission(self, request, view):
|
def has_permission(self, request, view):
|
||||||
|
|
||||||
if request.user.is_anonymous:
|
if request.user.is_anonymous:
|
||||||
return False
|
return False
|
||||||
|
|
||||||
@ -80,6 +85,7 @@ class ProjectEntityPermission(BasePermission):
|
|||||||
workspace__slug=view.workspace_slug,
|
workspace__slug=view.workspace_slug,
|
||||||
member=request.user,
|
member=request.user,
|
||||||
project_id=view.project_id,
|
project_id=view.project_id,
|
||||||
|
is_active=True,
|
||||||
).exists()
|
).exists()
|
||||||
|
|
||||||
## Only project members or admins can create and edit the project attributes
|
## Only project members or admins can create and edit the project attributes
|
||||||
@ -88,11 +94,11 @@ class ProjectEntityPermission(BasePermission):
|
|||||||
member=request.user,
|
member=request.user,
|
||||||
role__in=[Admin, Member],
|
role__in=[Admin, Member],
|
||||||
project_id=view.project_id,
|
project_id=view.project_id,
|
||||||
|
is_active=True,
|
||||||
).exists()
|
).exists()
|
||||||
|
|
||||||
|
|
||||||
class ProjectLitePermission(BasePermission):
|
class ProjectLitePermission(BasePermission):
|
||||||
|
|
||||||
def has_permission(self, request, view):
|
def has_permission(self, request, view):
|
||||||
if request.user.is_anonymous:
|
if request.user.is_anonymous:
|
||||||
return False
|
return False
|
||||||
@ -101,4 +107,5 @@ class ProjectLitePermission(BasePermission):
|
|||||||
workspace__slug=view.workspace_slug,
|
workspace__slug=view.workspace_slug,
|
||||||
member=request.user,
|
member=request.user,
|
||||||
project_id=view.project_id,
|
project_id=view.project_id,
|
||||||
|
is_active=True,
|
||||||
).exists()
|
).exists()
|
@ -32,15 +32,31 @@ class WorkSpaceBasePermission(BasePermission):
|
|||||||
member=request.user,
|
member=request.user,
|
||||||
workspace__slug=view.workspace_slug,
|
workspace__slug=view.workspace_slug,
|
||||||
role__in=[Owner, Admin],
|
role__in=[Owner, Admin],
|
||||||
|
is_active=True,
|
||||||
).exists()
|
).exists()
|
||||||
|
|
||||||
# allow only owner to delete the workspace
|
# allow only owner to delete the workspace
|
||||||
if request.method == "DELETE":
|
if request.method == "DELETE":
|
||||||
return WorkspaceMember.objects.filter(
|
return WorkspaceMember.objects.filter(
|
||||||
member=request.user, workspace__slug=view.workspace_slug, role=Owner
|
member=request.user,
|
||||||
|
workspace__slug=view.workspace_slug,
|
||||||
|
role=Owner,
|
||||||
|
is_active=True,
|
||||||
).exists()
|
).exists()
|
||||||
|
|
||||||
|
|
||||||
|
class WorkspaceOwnerPermission(BasePermission):
|
||||||
|
def has_permission(self, request, view):
|
||||||
|
if request.user.is_anonymous:
|
||||||
|
return False
|
||||||
|
|
||||||
|
return WorkspaceMember.objects.filter(
|
||||||
|
workspace__slug=view.workspace_slug,
|
||||||
|
member=request.user,
|
||||||
|
role=Owner,
|
||||||
|
).exists()
|
||||||
|
|
||||||
|
|
||||||
class WorkSpaceAdminPermission(BasePermission):
|
class WorkSpaceAdminPermission(BasePermission):
|
||||||
def has_permission(self, request, view):
|
def has_permission(self, request, view):
|
||||||
if request.user.is_anonymous:
|
if request.user.is_anonymous:
|
||||||
@ -50,6 +66,7 @@ class WorkSpaceAdminPermission(BasePermission):
|
|||||||
member=request.user,
|
member=request.user,
|
||||||
workspace__slug=view.workspace_slug,
|
workspace__slug=view.workspace_slug,
|
||||||
role__in=[Owner, Admin],
|
role__in=[Owner, Admin],
|
||||||
|
is_active=True,
|
||||||
).exists()
|
).exists()
|
||||||
|
|
||||||
|
|
||||||
@ -63,12 +80,14 @@ class WorkspaceEntityPermission(BasePermission):
|
|||||||
return WorkspaceMember.objects.filter(
|
return WorkspaceMember.objects.filter(
|
||||||
workspace__slug=view.workspace_slug,
|
workspace__slug=view.workspace_slug,
|
||||||
member=request.user,
|
member=request.user,
|
||||||
|
is_active=True,
|
||||||
).exists()
|
).exists()
|
||||||
|
|
||||||
return WorkspaceMember.objects.filter(
|
return WorkspaceMember.objects.filter(
|
||||||
member=request.user,
|
member=request.user,
|
||||||
workspace__slug=view.workspace_slug,
|
workspace__slug=view.workspace_slug,
|
||||||
role__in=[Owner, Admin],
|
role__in=[Owner, Admin],
|
||||||
|
is_active=True,
|
||||||
).exists()
|
).exists()
|
||||||
|
|
||||||
|
|
||||||
@ -78,5 +97,19 @@ class WorkspaceViewerPermission(BasePermission):
|
|||||||
return False
|
return False
|
||||||
|
|
||||||
return WorkspaceMember.objects.filter(
|
return WorkspaceMember.objects.filter(
|
||||||
member=request.user, workspace__slug=view.workspace_slug, role__gte=10
|
member=request.user,
|
||||||
|
workspace__slug=view.workspace_slug,
|
||||||
|
is_active=True,
|
||||||
|
).exists()
|
||||||
|
|
||||||
|
|
||||||
|
class WorkspaceUserPermission(BasePermission):
|
||||||
|
def has_permission(self, request, view):
|
||||||
|
if request.user.is_anonymous:
|
||||||
|
return False
|
||||||
|
|
||||||
|
return WorkspaceMember.objects.filter(
|
||||||
|
member=request.user,
|
||||||
|
workspace__slug=view.workspace_slug,
|
||||||
|
is_active=True,
|
||||||
).exists()
|
).exists()
|
104
apiserver/plane/app/serializers/__init__.py
Normal file
104
apiserver/plane/app/serializers/__init__.py
Normal file
@ -0,0 +1,104 @@
|
|||||||
|
from .base import BaseSerializer
|
||||||
|
from .user import (
|
||||||
|
UserSerializer,
|
||||||
|
UserLiteSerializer,
|
||||||
|
ChangePasswordSerializer,
|
||||||
|
ResetPasswordSerializer,
|
||||||
|
UserAdminLiteSerializer,
|
||||||
|
UserMeSerializer,
|
||||||
|
UserMeSettingsSerializer,
|
||||||
|
)
|
||||||
|
from .workspace import (
|
||||||
|
WorkSpaceSerializer,
|
||||||
|
WorkSpaceMemberSerializer,
|
||||||
|
TeamSerializer,
|
||||||
|
WorkSpaceMemberInviteSerializer,
|
||||||
|
WorkspaceLiteSerializer,
|
||||||
|
WorkspaceThemeSerializer,
|
||||||
|
WorkspaceMemberAdminSerializer,
|
||||||
|
WorkspaceMemberMeSerializer,
|
||||||
|
)
|
||||||
|
from .project import (
|
||||||
|
ProjectSerializer,
|
||||||
|
ProjectListSerializer,
|
||||||
|
ProjectDetailSerializer,
|
||||||
|
ProjectMemberSerializer,
|
||||||
|
ProjectMemberInviteSerializer,
|
||||||
|
ProjectIdentifierSerializer,
|
||||||
|
ProjectFavoriteSerializer,
|
||||||
|
ProjectLiteSerializer,
|
||||||
|
ProjectMemberLiteSerializer,
|
||||||
|
ProjectDeployBoardSerializer,
|
||||||
|
ProjectMemberAdminSerializer,
|
||||||
|
ProjectPublicMemberSerializer,
|
||||||
|
)
|
||||||
|
from .state import StateSerializer, StateLiteSerializer
|
||||||
|
from .view import GlobalViewSerializer, IssueViewSerializer, IssueViewFavoriteSerializer
|
||||||
|
from .cycle import (
|
||||||
|
CycleSerializer,
|
||||||
|
CycleIssueSerializer,
|
||||||
|
CycleFavoriteSerializer,
|
||||||
|
CycleWriteSerializer,
|
||||||
|
)
|
||||||
|
from .asset import FileAssetSerializer
|
||||||
|
from .issue import (
|
||||||
|
IssueCreateSerializer,
|
||||||
|
IssueActivitySerializer,
|
||||||
|
IssueCommentSerializer,
|
||||||
|
IssuePropertySerializer,
|
||||||
|
IssueAssigneeSerializer,
|
||||||
|
LabelSerializer,
|
||||||
|
IssueSerializer,
|
||||||
|
IssueFlatSerializer,
|
||||||
|
IssueStateSerializer,
|
||||||
|
IssueLinkSerializer,
|
||||||
|
IssueLiteSerializer,
|
||||||
|
IssueAttachmentSerializer,
|
||||||
|
IssueSubscriberSerializer,
|
||||||
|
IssueReactionSerializer,
|
||||||
|
CommentReactionSerializer,
|
||||||
|
IssueVoteSerializer,
|
||||||
|
IssueRelationSerializer,
|
||||||
|
RelatedIssueSerializer,
|
||||||
|
IssuePublicSerializer,
|
||||||
|
)
|
||||||
|
|
||||||
|
from .module import (
|
||||||
|
ModuleWriteSerializer,
|
||||||
|
ModuleSerializer,
|
||||||
|
ModuleIssueSerializer,
|
||||||
|
ModuleLinkSerializer,
|
||||||
|
ModuleFavoriteSerializer,
|
||||||
|
)
|
||||||
|
|
||||||
|
from .api import APITokenSerializer, APITokenReadSerializer
|
||||||
|
|
||||||
|
from .integration import (
|
||||||
|
IntegrationSerializer,
|
||||||
|
WorkspaceIntegrationSerializer,
|
||||||
|
GithubIssueSyncSerializer,
|
||||||
|
GithubRepositorySerializer,
|
||||||
|
GithubRepositorySyncSerializer,
|
||||||
|
GithubCommentSyncSerializer,
|
||||||
|
SlackProjectSyncSerializer,
|
||||||
|
)
|
||||||
|
|
||||||
|
from .importer import ImporterSerializer
|
||||||
|
|
||||||
|
from .page import PageSerializer, PageLogSerializer, SubPageSerializer, PageFavoriteSerializer
|
||||||
|
|
||||||
|
from .estimate import (
|
||||||
|
EstimateSerializer,
|
||||||
|
EstimatePointSerializer,
|
||||||
|
EstimateReadSerializer,
|
||||||
|
)
|
||||||
|
|
||||||
|
from .inbox import InboxSerializer, InboxIssueSerializer, IssueStateInboxSerializer
|
||||||
|
|
||||||
|
from .analytic import AnalyticViewSerializer
|
||||||
|
|
||||||
|
from .notification import NotificationSerializer
|
||||||
|
|
||||||
|
from .exporter import ExporterHistorySerializer
|
||||||
|
|
||||||
|
from .webhook import WebhookSerializer, WebhookLogSerializer
|
31
apiserver/plane/app/serializers/api.py
Normal file
31
apiserver/plane/app/serializers/api.py
Normal file
@ -0,0 +1,31 @@
|
|||||||
|
from .base import BaseSerializer
|
||||||
|
from plane.db.models import APIToken, APIActivityLog
|
||||||
|
|
||||||
|
|
||||||
|
class APITokenSerializer(BaseSerializer):
|
||||||
|
|
||||||
|
class Meta:
|
||||||
|
model = APIToken
|
||||||
|
fields = "__all__"
|
||||||
|
read_only_fields = [
|
||||||
|
"token",
|
||||||
|
"expired_at",
|
||||||
|
"created_at",
|
||||||
|
"updated_at",
|
||||||
|
"workspace",
|
||||||
|
"user",
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
|
class APITokenReadSerializer(BaseSerializer):
|
||||||
|
|
||||||
|
class Meta:
|
||||||
|
model = APIToken
|
||||||
|
exclude = ('token',)
|
||||||
|
|
||||||
|
|
||||||
|
class APIActivityLogSerializer(BaseSerializer):
|
||||||
|
|
||||||
|
class Meta:
|
||||||
|
model = APIActivityLog
|
||||||
|
fields = "__all__"
|
58
apiserver/plane/app/serializers/base.py
Normal file
58
apiserver/plane/app/serializers/base.py
Normal file
@ -0,0 +1,58 @@
|
|||||||
|
from rest_framework import serializers
|
||||||
|
|
||||||
|
|
||||||
|
class BaseSerializer(serializers.ModelSerializer):
|
||||||
|
id = serializers.PrimaryKeyRelatedField(read_only=True)
|
||||||
|
|
||||||
|
class DynamicBaseSerializer(BaseSerializer):
|
||||||
|
|
||||||
|
def __init__(self, *args, **kwargs):
|
||||||
|
# If 'fields' is provided in the arguments, remove it and store it separately.
|
||||||
|
# This is done so as not to pass this custom argument up to the superclass.
|
||||||
|
fields = kwargs.pop("fields", None)
|
||||||
|
|
||||||
|
# Call the initialization of the superclass.
|
||||||
|
super().__init__(*args, **kwargs)
|
||||||
|
|
||||||
|
# If 'fields' was provided, filter the fields of the serializer accordingly.
|
||||||
|
if fields is not None:
|
||||||
|
self.fields = self._filter_fields(fields)
|
||||||
|
|
||||||
|
def _filter_fields(self, fields):
|
||||||
|
"""
|
||||||
|
Adjust the serializer's fields based on the provided 'fields' list.
|
||||||
|
|
||||||
|
:param fields: List or dictionary specifying which fields to include in the serializer.
|
||||||
|
:return: The updated fields for the serializer.
|
||||||
|
"""
|
||||||
|
# Check each field_name in the provided fields.
|
||||||
|
for field_name in fields:
|
||||||
|
# If the field is a dictionary (indicating nested fields),
|
||||||
|
# loop through its keys and values.
|
||||||
|
if isinstance(field_name, dict):
|
||||||
|
for key, value in field_name.items():
|
||||||
|
# If the value of this nested field is a list,
|
||||||
|
# perform a recursive filter on it.
|
||||||
|
if isinstance(value, list):
|
||||||
|
self._filter_fields(self.fields[key], value)
|
||||||
|
|
||||||
|
# Create a list to store allowed fields.
|
||||||
|
allowed = []
|
||||||
|
for item in fields:
|
||||||
|
# If the item is a string, it directly represents a field's name.
|
||||||
|
if isinstance(item, str):
|
||||||
|
allowed.append(item)
|
||||||
|
# If the item is a dictionary, it represents a nested field.
|
||||||
|
# Add the key of this dictionary to the allowed list.
|
||||||
|
elif isinstance(item, dict):
|
||||||
|
allowed.append(list(item.keys())[0])
|
||||||
|
|
||||||
|
# Convert the current serializer's fields and the allowed fields to sets.
|
||||||
|
existing = set(self.fields)
|
||||||
|
allowed = set(allowed)
|
||||||
|
|
||||||
|
# Remove fields from the serializer that aren't in the 'allowed' list.
|
||||||
|
for field_name in (existing - allowed):
|
||||||
|
self.fields.pop(field_name)
|
||||||
|
|
||||||
|
return self.fields
|
107
apiserver/plane/app/serializers/cycle.py
Normal file
107
apiserver/plane/app/serializers/cycle.py
Normal file
@ -0,0 +1,107 @@
|
|||||||
|
# Third party imports
|
||||||
|
from rest_framework import serializers
|
||||||
|
|
||||||
|
# Module imports
|
||||||
|
from .base import BaseSerializer
|
||||||
|
from .user import UserLiteSerializer
|
||||||
|
from .issue import IssueStateSerializer
|
||||||
|
from .workspace import WorkspaceLiteSerializer
|
||||||
|
from .project import ProjectLiteSerializer
|
||||||
|
from plane.db.models import Cycle, CycleIssue, CycleFavorite
|
||||||
|
|
||||||
|
|
||||||
|
class CycleWriteSerializer(BaseSerializer):
|
||||||
|
def validate(self, data):
|
||||||
|
if (
|
||||||
|
data.get("start_date", None) is not None
|
||||||
|
and data.get("end_date", None) is not None
|
||||||
|
and data.get("start_date", None) > data.get("end_date", None)
|
||||||
|
):
|
||||||
|
raise serializers.ValidationError("Start date cannot exceed end date")
|
||||||
|
return data
|
||||||
|
|
||||||
|
class Meta:
|
||||||
|
model = Cycle
|
||||||
|
fields = "__all__"
|
||||||
|
|
||||||
|
|
||||||
|
class CycleSerializer(BaseSerializer):
|
||||||
|
owned_by = UserLiteSerializer(read_only=True)
|
||||||
|
is_favorite = serializers.BooleanField(read_only=True)
|
||||||
|
total_issues = serializers.IntegerField(read_only=True)
|
||||||
|
cancelled_issues = serializers.IntegerField(read_only=True)
|
||||||
|
completed_issues = serializers.IntegerField(read_only=True)
|
||||||
|
started_issues = serializers.IntegerField(read_only=True)
|
||||||
|
unstarted_issues = serializers.IntegerField(read_only=True)
|
||||||
|
backlog_issues = serializers.IntegerField(read_only=True)
|
||||||
|
assignees = serializers.SerializerMethodField(read_only=True)
|
||||||
|
total_estimates = serializers.IntegerField(read_only=True)
|
||||||
|
completed_estimates = serializers.IntegerField(read_only=True)
|
||||||
|
started_estimates = serializers.IntegerField(read_only=True)
|
||||||
|
workspace_detail = WorkspaceLiteSerializer(read_only=True, source="workspace")
|
||||||
|
project_detail = ProjectLiteSerializer(read_only=True, source="project")
|
||||||
|
|
||||||
|
def validate(self, data):
|
||||||
|
if (
|
||||||
|
data.get("start_date", None) is not None
|
||||||
|
and data.get("end_date", None) is not None
|
||||||
|
and data.get("start_date", None) > data.get("end_date", None)
|
||||||
|
):
|
||||||
|
raise serializers.ValidationError("Start date cannot exceed end date")
|
||||||
|
return data
|
||||||
|
|
||||||
|
def get_assignees(self, obj):
|
||||||
|
members = [
|
||||||
|
{
|
||||||
|
"avatar": assignee.avatar,
|
||||||
|
"display_name": assignee.display_name,
|
||||||
|
"id": assignee.id,
|
||||||
|
}
|
||||||
|
for issue_cycle in obj.issue_cycle.prefetch_related(
|
||||||
|
"issue__assignees"
|
||||||
|
).all()
|
||||||
|
for assignee in issue_cycle.issue.assignees.all()
|
||||||
|
]
|
||||||
|
# Use a set comprehension to return only the unique objects
|
||||||
|
unique_objects = {frozenset(item.items()) for item in members}
|
||||||
|
|
||||||
|
# Convert the set back to a list of dictionaries
|
||||||
|
unique_list = [dict(item) for item in unique_objects]
|
||||||
|
|
||||||
|
return unique_list
|
||||||
|
|
||||||
|
class Meta:
|
||||||
|
model = Cycle
|
||||||
|
fields = "__all__"
|
||||||
|
read_only_fields = [
|
||||||
|
"workspace",
|
||||||
|
"project",
|
||||||
|
"owned_by",
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
|
class CycleIssueSerializer(BaseSerializer):
|
||||||
|
issue_detail = IssueStateSerializer(read_only=True, source="issue")
|
||||||
|
sub_issues_count = serializers.IntegerField(read_only=True)
|
||||||
|
|
||||||
|
class Meta:
|
||||||
|
model = CycleIssue
|
||||||
|
fields = "__all__"
|
||||||
|
read_only_fields = [
|
||||||
|
"workspace",
|
||||||
|
"project",
|
||||||
|
"cycle",
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
|
class CycleFavoriteSerializer(BaseSerializer):
|
||||||
|
cycle_detail = CycleSerializer(source="cycle", read_only=True)
|
||||||
|
|
||||||
|
class Meta:
|
||||||
|
model = CycleFavorite
|
||||||
|
fields = "__all__"
|
||||||
|
read_only_fields = [
|
||||||
|
"workspace",
|
||||||
|
"project",
|
||||||
|
"user",
|
||||||
|
]
|
@ -2,7 +2,7 @@
|
|||||||
from .base import BaseSerializer
|
from .base import BaseSerializer
|
||||||
|
|
||||||
from plane.db.models import Estimate, EstimatePoint
|
from plane.db.models import Estimate, EstimatePoint
|
||||||
from plane.api.serializers import WorkspaceLiteSerializer, ProjectLiteSerializer
|
from plane.app.serializers import WorkspaceLiteSerializer, ProjectLiteSerializer
|
||||||
|
|
||||||
|
|
||||||
class EstimateSerializer(BaseSerializer):
|
class EstimateSerializer(BaseSerializer):
|
57
apiserver/plane/app/serializers/inbox.py
Normal file
57
apiserver/plane/app/serializers/inbox.py
Normal file
@ -0,0 +1,57 @@
|
|||||||
|
# Third party frameworks
|
||||||
|
from rest_framework import serializers
|
||||||
|
|
||||||
|
# Module imports
|
||||||
|
from .base import BaseSerializer
|
||||||
|
from .issue import IssueFlatSerializer, LabelLiteSerializer
|
||||||
|
from .project import ProjectLiteSerializer
|
||||||
|
from .state import StateLiteSerializer
|
||||||
|
from .user import UserLiteSerializer
|
||||||
|
from plane.db.models import Inbox, InboxIssue, Issue
|
||||||
|
|
||||||
|
|
||||||
|
class InboxSerializer(BaseSerializer):
|
||||||
|
project_detail = ProjectLiteSerializer(source="project", read_only=True)
|
||||||
|
pending_issue_count = serializers.IntegerField(read_only=True)
|
||||||
|
|
||||||
|
class Meta:
|
||||||
|
model = Inbox
|
||||||
|
fields = "__all__"
|
||||||
|
read_only_fields = [
|
||||||
|
"project",
|
||||||
|
"workspace",
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
|
class InboxIssueSerializer(BaseSerializer):
|
||||||
|
issue_detail = IssueFlatSerializer(source="issue", read_only=True)
|
||||||
|
project_detail = ProjectLiteSerializer(source="project", read_only=True)
|
||||||
|
|
||||||
|
class Meta:
|
||||||
|
model = InboxIssue
|
||||||
|
fields = "__all__"
|
||||||
|
read_only_fields = [
|
||||||
|
"project",
|
||||||
|
"workspace",
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
|
class InboxIssueLiteSerializer(BaseSerializer):
|
||||||
|
class Meta:
|
||||||
|
model = InboxIssue
|
||||||
|
fields = ["id", "status", "duplicate_to", "snoozed_till", "source"]
|
||||||
|
read_only_fields = fields
|
||||||
|
|
||||||
|
|
||||||
|
class IssueStateInboxSerializer(BaseSerializer):
|
||||||
|
state_detail = StateLiteSerializer(read_only=True, source="state")
|
||||||
|
project_detail = ProjectLiteSerializer(read_only=True, source="project")
|
||||||
|
label_details = LabelLiteSerializer(read_only=True, source="labels", many=True)
|
||||||
|
assignee_details = UserLiteSerializer(read_only=True, source="assignees", many=True)
|
||||||
|
sub_issues_count = serializers.IntegerField(read_only=True)
|
||||||
|
bridge_id = serializers.UUIDField(read_only=True)
|
||||||
|
issue_inbox = InboxIssueLiteSerializer(read_only=True, many=True)
|
||||||
|
|
||||||
|
class Meta:
|
||||||
|
model = Issue
|
||||||
|
fields = "__all__"
|
@ -1,5 +1,5 @@
|
|||||||
# Module imports
|
# Module imports
|
||||||
from plane.api.serializers import BaseSerializer
|
from plane.app.serializers import BaseSerializer
|
||||||
from plane.db.models import Integration, WorkspaceIntegration
|
from plane.db.models import Integration, WorkspaceIntegration
|
||||||
|
|
||||||
|
|
@ -1,5 +1,5 @@
|
|||||||
# Module imports
|
# Module imports
|
||||||
from plane.api.serializers import BaseSerializer
|
from plane.app.serializers import BaseSerializer
|
||||||
from plane.db.models import (
|
from plane.db.models import (
|
||||||
GithubIssueSync,
|
GithubIssueSync,
|
||||||
GithubRepository,
|
GithubRepository,
|
@ -1,5 +1,5 @@
|
|||||||
# Module imports
|
# Module imports
|
||||||
from plane.api.serializers import BaseSerializer
|
from plane.app.serializers import BaseSerializer
|
||||||
from plane.db.models import SlackProjectSync
|
from plane.db.models import SlackProjectSync
|
||||||
|
|
||||||
|
|
616
apiserver/plane/app/serializers/issue.py
Normal file
616
apiserver/plane/app/serializers/issue.py
Normal file
@ -0,0 +1,616 @@
|
|||||||
|
# Django imports
|
||||||
|
from django.utils import timezone
|
||||||
|
|
||||||
|
# Third Party imports
|
||||||
|
from rest_framework import serializers
|
||||||
|
|
||||||
|
# Module imports
|
||||||
|
from .base import BaseSerializer, DynamicBaseSerializer
|
||||||
|
from .user import UserLiteSerializer
|
||||||
|
from .state import StateSerializer, StateLiteSerializer
|
||||||
|
from .project import ProjectLiteSerializer
|
||||||
|
from .workspace import WorkspaceLiteSerializer
|
||||||
|
from plane.db.models import (
|
||||||
|
User,
|
||||||
|
Issue,
|
||||||
|
IssueActivity,
|
||||||
|
IssueComment,
|
||||||
|
IssueProperty,
|
||||||
|
IssueAssignee,
|
||||||
|
IssueSubscriber,
|
||||||
|
IssueLabel,
|
||||||
|
Label,
|
||||||
|
CycleIssue,
|
||||||
|
Cycle,
|
||||||
|
Module,
|
||||||
|
ModuleIssue,
|
||||||
|
IssueLink,
|
||||||
|
IssueAttachment,
|
||||||
|
IssueReaction,
|
||||||
|
CommentReaction,
|
||||||
|
IssueVote,
|
||||||
|
IssueRelation,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class IssueFlatSerializer(BaseSerializer):
|
||||||
|
## Contain only flat fields
|
||||||
|
|
||||||
|
class Meta:
|
||||||
|
model = Issue
|
||||||
|
fields = [
|
||||||
|
"id",
|
||||||
|
"name",
|
||||||
|
"description",
|
||||||
|
"description_html",
|
||||||
|
"priority",
|
||||||
|
"start_date",
|
||||||
|
"target_date",
|
||||||
|
"sequence_id",
|
||||||
|
"sort_order",
|
||||||
|
"is_draft",
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
|
class IssueProjectLiteSerializer(BaseSerializer):
|
||||||
|
project_detail = ProjectLiteSerializer(source="project", read_only=True)
|
||||||
|
|
||||||
|
class Meta:
|
||||||
|
model = Issue
|
||||||
|
fields = [
|
||||||
|
"id",
|
||||||
|
"project_detail",
|
||||||
|
"name",
|
||||||
|
"sequence_id",
|
||||||
|
]
|
||||||
|
read_only_fields = fields
|
||||||
|
|
||||||
|
|
||||||
|
##TODO: Find a better way to write this serializer
|
||||||
|
## Find a better approach to save manytomany?
|
||||||
|
class IssueCreateSerializer(BaseSerializer):
|
||||||
|
state_detail = StateSerializer(read_only=True, source="state")
|
||||||
|
created_by_detail = UserLiteSerializer(read_only=True, source="created_by")
|
||||||
|
project_detail = ProjectLiteSerializer(read_only=True, source="project")
|
||||||
|
workspace_detail = WorkspaceLiteSerializer(read_only=True, source="workspace")
|
||||||
|
|
||||||
|
assignees = serializers.ListField(
|
||||||
|
child=serializers.PrimaryKeyRelatedField(queryset=User.objects.all()),
|
||||||
|
write_only=True,
|
||||||
|
required=False,
|
||||||
|
)
|
||||||
|
|
||||||
|
labels = serializers.ListField(
|
||||||
|
child=serializers.PrimaryKeyRelatedField(queryset=Label.objects.all()),
|
||||||
|
write_only=True,
|
||||||
|
required=False,
|
||||||
|
)
|
||||||
|
|
||||||
|
class Meta:
|
||||||
|
model = Issue
|
||||||
|
fields = "__all__"
|
||||||
|
read_only_fields = [
|
||||||
|
"workspace",
|
||||||
|
"project",
|
||||||
|
"created_by",
|
||||||
|
"updated_by",
|
||||||
|
"created_at",
|
||||||
|
"updated_at",
|
||||||
|
]
|
||||||
|
|
||||||
|
def to_representation(self, instance):
|
||||||
|
data = super().to_representation(instance)
|
||||||
|
data['assignees'] = [str(assignee.id) for assignee in instance.assignees.all()]
|
||||||
|
data['labels'] = [str(label.id) for label in instance.labels.all()]
|
||||||
|
return data
|
||||||
|
|
||||||
|
def validate(self, data):
|
||||||
|
if (
|
||||||
|
data.get("start_date", None) is not None
|
||||||
|
and data.get("target_date", None) is not None
|
||||||
|
and data.get("start_date", None) > data.get("target_date", None)
|
||||||
|
):
|
||||||
|
raise serializers.ValidationError("Start date cannot exceed target date")
|
||||||
|
return data
|
||||||
|
|
||||||
|
def create(self, validated_data):
|
||||||
|
assignees = validated_data.pop("assignees", None)
|
||||||
|
labels = validated_data.pop("labels", None)
|
||||||
|
|
||||||
|
project_id = self.context["project_id"]
|
||||||
|
workspace_id = self.context["workspace_id"]
|
||||||
|
default_assignee_id = self.context["default_assignee_id"]
|
||||||
|
|
||||||
|
issue = Issue.objects.create(**validated_data, project_id=project_id)
|
||||||
|
|
||||||
|
# Issue Audit Users
|
||||||
|
created_by_id = issue.created_by_id
|
||||||
|
updated_by_id = issue.updated_by_id
|
||||||
|
|
||||||
|
if assignees is not None and len(assignees):
|
||||||
|
IssueAssignee.objects.bulk_create(
|
||||||
|
[
|
||||||
|
IssueAssignee(
|
||||||
|
assignee=user,
|
||||||
|
issue=issue,
|
||||||
|
project_id=project_id,
|
||||||
|
workspace_id=workspace_id,
|
||||||
|
created_by_id=created_by_id,
|
||||||
|
updated_by_id=updated_by_id,
|
||||||
|
)
|
||||||
|
for user in assignees
|
||||||
|
],
|
||||||
|
batch_size=10,
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
# Then assign it to default assignee
|
||||||
|
if default_assignee_id is not None:
|
||||||
|
IssueAssignee.objects.create(
|
||||||
|
assignee_id=default_assignee_id,
|
||||||
|
issue=issue,
|
||||||
|
project_id=project_id,
|
||||||
|
workspace_id=workspace_id,
|
||||||
|
created_by_id=created_by_id,
|
||||||
|
updated_by_id=updated_by_id,
|
||||||
|
)
|
||||||
|
|
||||||
|
if labels is not None and len(labels):
|
||||||
|
IssueLabel.objects.bulk_create(
|
||||||
|
[
|
||||||
|
IssueLabel(
|
||||||
|
label=label,
|
||||||
|
issue=issue,
|
||||||
|
project_id=project_id,
|
||||||
|
workspace_id=workspace_id,
|
||||||
|
created_by_id=created_by_id,
|
||||||
|
updated_by_id=updated_by_id,
|
||||||
|
)
|
||||||
|
for label in labels
|
||||||
|
],
|
||||||
|
batch_size=10,
|
||||||
|
)
|
||||||
|
|
||||||
|
return issue
|
||||||
|
|
||||||
|
def update(self, instance, validated_data):
|
||||||
|
assignees = validated_data.pop("assignees", None)
|
||||||
|
labels = validated_data.pop("labels", None)
|
||||||
|
|
||||||
|
# Related models
|
||||||
|
project_id = instance.project_id
|
||||||
|
workspace_id = instance.workspace_id
|
||||||
|
created_by_id = instance.created_by_id
|
||||||
|
updated_by_id = instance.updated_by_id
|
||||||
|
|
||||||
|
if assignees is not None:
|
||||||
|
IssueAssignee.objects.filter(issue=instance).delete()
|
||||||
|
IssueAssignee.objects.bulk_create(
|
||||||
|
[
|
||||||
|
IssueAssignee(
|
||||||
|
assignee=user,
|
||||||
|
issue=instance,
|
||||||
|
project_id=project_id,
|
||||||
|
workspace_id=workspace_id,
|
||||||
|
created_by_id=created_by_id,
|
||||||
|
updated_by_id=updated_by_id,
|
||||||
|
)
|
||||||
|
for user in assignees
|
||||||
|
],
|
||||||
|
batch_size=10,
|
||||||
|
)
|
||||||
|
|
||||||
|
if labels is not None:
|
||||||
|
IssueLabel.objects.filter(issue=instance).delete()
|
||||||
|
IssueLabel.objects.bulk_create(
|
||||||
|
[
|
||||||
|
IssueLabel(
|
||||||
|
label=label,
|
||||||
|
issue=instance,
|
||||||
|
project_id=project_id,
|
||||||
|
workspace_id=workspace_id,
|
||||||
|
created_by_id=created_by_id,
|
||||||
|
updated_by_id=updated_by_id,
|
||||||
|
)
|
||||||
|
for label in labels
|
||||||
|
],
|
||||||
|
batch_size=10,
|
||||||
|
)
|
||||||
|
|
||||||
|
# Time updation occues even when other related models are updated
|
||||||
|
instance.updated_at = timezone.now()
|
||||||
|
return super().update(instance, validated_data)
|
||||||
|
|
||||||
|
|
||||||
|
class IssueActivitySerializer(BaseSerializer):
|
||||||
|
actor_detail = UserLiteSerializer(read_only=True, source="actor")
|
||||||
|
issue_detail = IssueFlatSerializer(read_only=True, source="issue")
|
||||||
|
project_detail = ProjectLiteSerializer(read_only=True, source="project")
|
||||||
|
workspace_detail = WorkspaceLiteSerializer(read_only=True, source="workspace")
|
||||||
|
|
||||||
|
class Meta:
|
||||||
|
model = IssueActivity
|
||||||
|
fields = "__all__"
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
class IssuePropertySerializer(BaseSerializer):
|
||||||
|
class Meta:
|
||||||
|
model = IssueProperty
|
||||||
|
fields = "__all__"
|
||||||
|
read_only_fields = [
|
||||||
|
"user",
|
||||||
|
"workspace",
|
||||||
|
"project",
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
|
class LabelSerializer(BaseSerializer):
|
||||||
|
workspace_detail = WorkspaceLiteSerializer(source="workspace", read_only=True)
|
||||||
|
project_detail = ProjectLiteSerializer(source="project", read_only=True)
|
||||||
|
|
||||||
|
class Meta:
|
||||||
|
model = Label
|
||||||
|
fields = "__all__"
|
||||||
|
read_only_fields = [
|
||||||
|
"workspace",
|
||||||
|
"project",
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
|
class LabelLiteSerializer(BaseSerializer):
|
||||||
|
class Meta:
|
||||||
|
model = Label
|
||||||
|
fields = [
|
||||||
|
"id",
|
||||||
|
"name",
|
||||||
|
"color",
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
|
class IssueLabelSerializer(BaseSerializer):
|
||||||
|
|
||||||
|
class Meta:
|
||||||
|
model = IssueLabel
|
||||||
|
fields = "__all__"
|
||||||
|
read_only_fields = [
|
||||||
|
"workspace",
|
||||||
|
"project",
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
|
class IssueRelationSerializer(BaseSerializer):
|
||||||
|
issue_detail = IssueProjectLiteSerializer(read_only=True, source="related_issue")
|
||||||
|
|
||||||
|
class Meta:
|
||||||
|
model = IssueRelation
|
||||||
|
fields = [
|
||||||
|
"issue_detail",
|
||||||
|
"relation_type",
|
||||||
|
"related_issue",
|
||||||
|
"issue",
|
||||||
|
"id"
|
||||||
|
]
|
||||||
|
read_only_fields = [
|
||||||
|
"workspace",
|
||||||
|
"project",
|
||||||
|
]
|
||||||
|
|
||||||
|
class RelatedIssueSerializer(BaseSerializer):
|
||||||
|
issue_detail = IssueProjectLiteSerializer(read_only=True, source="issue")
|
||||||
|
|
||||||
|
class Meta:
|
||||||
|
model = IssueRelation
|
||||||
|
fields = [
|
||||||
|
"issue_detail",
|
||||||
|
"relation_type",
|
||||||
|
"related_issue",
|
||||||
|
"issue",
|
||||||
|
"id"
|
||||||
|
]
|
||||||
|
read_only_fields = [
|
||||||
|
"workspace",
|
||||||
|
"project",
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
|
class IssueAssigneeSerializer(BaseSerializer):
|
||||||
|
assignee_details = UserLiteSerializer(read_only=True, source="assignee")
|
||||||
|
|
||||||
|
class Meta:
|
||||||
|
model = IssueAssignee
|
||||||
|
fields = "__all__"
|
||||||
|
|
||||||
|
|
||||||
|
class CycleBaseSerializer(BaseSerializer):
|
||||||
|
class Meta:
|
||||||
|
model = Cycle
|
||||||
|
fields = "__all__"
|
||||||
|
read_only_fields = [
|
||||||
|
"workspace",
|
||||||
|
"project",
|
||||||
|
"created_by",
|
||||||
|
"updated_by",
|
||||||
|
"created_at",
|
||||||
|
"updated_at",
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
|
class IssueCycleDetailSerializer(BaseSerializer):
|
||||||
|
cycle_detail = CycleBaseSerializer(read_only=True, source="cycle")
|
||||||
|
|
||||||
|
class Meta:
|
||||||
|
model = CycleIssue
|
||||||
|
fields = "__all__"
|
||||||
|
read_only_fields = [
|
||||||
|
"workspace",
|
||||||
|
"project",
|
||||||
|
"created_by",
|
||||||
|
"updated_by",
|
||||||
|
"created_at",
|
||||||
|
"updated_at",
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
|
class ModuleBaseSerializer(BaseSerializer):
|
||||||
|
class Meta:
|
||||||
|
model = Module
|
||||||
|
fields = "__all__"
|
||||||
|
read_only_fields = [
|
||||||
|
"workspace",
|
||||||
|
"project",
|
||||||
|
"created_by",
|
||||||
|
"updated_by",
|
||||||
|
"created_at",
|
||||||
|
"updated_at",
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
|
class IssueModuleDetailSerializer(BaseSerializer):
|
||||||
|
module_detail = ModuleBaseSerializer(read_only=True, source="module")
|
||||||
|
|
||||||
|
class Meta:
|
||||||
|
model = ModuleIssue
|
||||||
|
fields = "__all__"
|
||||||
|
read_only_fields = [
|
||||||
|
"workspace",
|
||||||
|
"project",
|
||||||
|
"created_by",
|
||||||
|
"updated_by",
|
||||||
|
"created_at",
|
||||||
|
"updated_at",
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
|
class IssueLinkSerializer(BaseSerializer):
|
||||||
|
created_by_detail = UserLiteSerializer(read_only=True, source="created_by")
|
||||||
|
|
||||||
|
class Meta:
|
||||||
|
model = IssueLink
|
||||||
|
fields = "__all__"
|
||||||
|
read_only_fields = [
|
||||||
|
"workspace",
|
||||||
|
"project",
|
||||||
|
"created_by",
|
||||||
|
"updated_by",
|
||||||
|
"created_at",
|
||||||
|
"updated_at",
|
||||||
|
"issue",
|
||||||
|
]
|
||||||
|
|
||||||
|
# Validation if url already exists
|
||||||
|
def create(self, validated_data):
|
||||||
|
if IssueLink.objects.filter(
|
||||||
|
url=validated_data.get("url"), issue_id=validated_data.get("issue_id")
|
||||||
|
).exists():
|
||||||
|
raise serializers.ValidationError(
|
||||||
|
{"error": "URL already exists for this Issue"}
|
||||||
|
)
|
||||||
|
return IssueLink.objects.create(**validated_data)
|
||||||
|
|
||||||
|
|
||||||
|
class IssueAttachmentSerializer(BaseSerializer):
|
||||||
|
class Meta:
|
||||||
|
model = IssueAttachment
|
||||||
|
fields = "__all__"
|
||||||
|
read_only_fields = [
|
||||||
|
"created_by",
|
||||||
|
"updated_by",
|
||||||
|
"created_at",
|
||||||
|
"updated_at",
|
||||||
|
"workspace",
|
||||||
|
"project",
|
||||||
|
"issue",
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
|
class IssueReactionSerializer(BaseSerializer):
|
||||||
|
|
||||||
|
actor_detail = UserLiteSerializer(read_only=True, source="actor")
|
||||||
|
|
||||||
|
class Meta:
|
||||||
|
model = IssueReaction
|
||||||
|
fields = "__all__"
|
||||||
|
read_only_fields = [
|
||||||
|
"workspace",
|
||||||
|
"project",
|
||||||
|
"issue",
|
||||||
|
"actor",
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
|
class CommentReactionLiteSerializer(BaseSerializer):
|
||||||
|
actor_detail = UserLiteSerializer(read_only=True, source="actor")
|
||||||
|
|
||||||
|
class Meta:
|
||||||
|
model = CommentReaction
|
||||||
|
fields = [
|
||||||
|
"id",
|
||||||
|
"reaction",
|
||||||
|
"comment",
|
||||||
|
"actor_detail",
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
|
class CommentReactionSerializer(BaseSerializer):
|
||||||
|
class Meta:
|
||||||
|
model = CommentReaction
|
||||||
|
fields = "__all__"
|
||||||
|
read_only_fields = ["workspace", "project", "comment", "actor"]
|
||||||
|
|
||||||
|
|
||||||
|
class IssueVoteSerializer(BaseSerializer):
|
||||||
|
|
||||||
|
actor_detail = UserLiteSerializer(read_only=True, source="actor")
|
||||||
|
|
||||||
|
class Meta:
|
||||||
|
model = IssueVote
|
||||||
|
fields = ["issue", "vote", "workspace", "project", "actor", "actor_detail"]
|
||||||
|
read_only_fields = fields
|
||||||
|
|
||||||
|
|
||||||
|
class IssueCommentSerializer(BaseSerializer):
|
||||||
|
actor_detail = UserLiteSerializer(read_only=True, source="actor")
|
||||||
|
issue_detail = IssueFlatSerializer(read_only=True, source="issue")
|
||||||
|
project_detail = ProjectLiteSerializer(read_only=True, source="project")
|
||||||
|
workspace_detail = WorkspaceLiteSerializer(read_only=True, source="workspace")
|
||||||
|
comment_reactions = CommentReactionLiteSerializer(read_only=True, many=True)
|
||||||
|
is_member = serializers.BooleanField(read_only=True)
|
||||||
|
|
||||||
|
class Meta:
|
||||||
|
model = IssueComment
|
||||||
|
fields = "__all__"
|
||||||
|
read_only_fields = [
|
||||||
|
"workspace",
|
||||||
|
"project",
|
||||||
|
"issue",
|
||||||
|
"created_by",
|
||||||
|
"updated_by",
|
||||||
|
"created_at",
|
||||||
|
"updated_at",
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
|
class IssueStateFlatSerializer(BaseSerializer):
|
||||||
|
state_detail = StateLiteSerializer(read_only=True, source="state")
|
||||||
|
project_detail = ProjectLiteSerializer(read_only=True, source="project")
|
||||||
|
|
||||||
|
class Meta:
|
||||||
|
model = Issue
|
||||||
|
fields = [
|
||||||
|
"id",
|
||||||
|
"sequence_id",
|
||||||
|
"name",
|
||||||
|
"state_detail",
|
||||||
|
"project_detail",
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
|
# Issue Serializer with state details
|
||||||
|
class IssueStateSerializer(DynamicBaseSerializer):
|
||||||
|
label_details = LabelLiteSerializer(read_only=True, source="labels", many=True)
|
||||||
|
state_detail = StateLiteSerializer(read_only=True, source="state")
|
||||||
|
project_detail = ProjectLiteSerializer(read_only=True, source="project")
|
||||||
|
assignee_details = UserLiteSerializer(read_only=True, source="assignees", many=True)
|
||||||
|
sub_issues_count = serializers.IntegerField(read_only=True)
|
||||||
|
bridge_id = serializers.UUIDField(read_only=True)
|
||||||
|
attachment_count = serializers.IntegerField(read_only=True)
|
||||||
|
link_count = serializers.IntegerField(read_only=True)
|
||||||
|
|
||||||
|
class Meta:
|
||||||
|
model = Issue
|
||||||
|
fields = "__all__"
|
||||||
|
|
||||||
|
|
||||||
|
class IssueSerializer(BaseSerializer):
|
||||||
|
project_detail = ProjectLiteSerializer(read_only=True, source="project")
|
||||||
|
state_detail = StateSerializer(read_only=True, source="state")
|
||||||
|
parent_detail = IssueStateFlatSerializer(read_only=True, source="parent")
|
||||||
|
label_details = LabelSerializer(read_only=True, source="labels", many=True)
|
||||||
|
assignee_details = UserLiteSerializer(read_only=True, source="assignees", many=True)
|
||||||
|
related_issues = IssueRelationSerializer(read_only=True, source="issue_relation", many=True)
|
||||||
|
issue_relations = RelatedIssueSerializer(read_only=True, source="issue_related", many=True)
|
||||||
|
issue_cycle = IssueCycleDetailSerializer(read_only=True)
|
||||||
|
issue_module = IssueModuleDetailSerializer(read_only=True)
|
||||||
|
issue_link = IssueLinkSerializer(read_only=True, many=True)
|
||||||
|
issue_attachment = IssueAttachmentSerializer(read_only=True, many=True)
|
||||||
|
sub_issues_count = serializers.IntegerField(read_only=True)
|
||||||
|
issue_reactions = IssueReactionSerializer(read_only=True, many=True)
|
||||||
|
|
||||||
|
class Meta:
|
||||||
|
model = Issue
|
||||||
|
fields = "__all__"
|
||||||
|
read_only_fields = [
|
||||||
|
"workspace",
|
||||||
|
"project",
|
||||||
|
"created_by",
|
||||||
|
"updated_by",
|
||||||
|
"created_at",
|
||||||
|
"updated_at",
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
|
class IssueLiteSerializer(DynamicBaseSerializer):
|
||||||
|
workspace_detail = WorkspaceLiteSerializer(read_only=True, source="workspace")
|
||||||
|
project_detail = ProjectLiteSerializer(read_only=True, source="project")
|
||||||
|
state_detail = StateLiteSerializer(read_only=True, source="state")
|
||||||
|
label_details = LabelLiteSerializer(read_only=True, source="labels", many=True)
|
||||||
|
assignee_details = UserLiteSerializer(read_only=True, source="assignees", many=True)
|
||||||
|
sub_issues_count = serializers.IntegerField(read_only=True)
|
||||||
|
cycle_id = serializers.UUIDField(read_only=True)
|
||||||
|
module_id = serializers.UUIDField(read_only=True)
|
||||||
|
attachment_count = serializers.IntegerField(read_only=True)
|
||||||
|
link_count = serializers.IntegerField(read_only=True)
|
||||||
|
issue_reactions = IssueReactionSerializer(read_only=True, many=True)
|
||||||
|
|
||||||
|
class Meta:
|
||||||
|
model = Issue
|
||||||
|
fields = "__all__"
|
||||||
|
read_only_fields = [
|
||||||
|
"start_date",
|
||||||
|
"target_date",
|
||||||
|
"completed_at",
|
||||||
|
"workspace",
|
||||||
|
"project",
|
||||||
|
"created_by",
|
||||||
|
"updated_by",
|
||||||
|
"created_at",
|
||||||
|
"updated_at",
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
|
class IssuePublicSerializer(BaseSerializer):
|
||||||
|
project_detail = ProjectLiteSerializer(read_only=True, source="project")
|
||||||
|
state_detail = StateLiteSerializer(read_only=True, source="state")
|
||||||
|
reactions = IssueReactionSerializer(read_only=True, many=True, source="issue_reactions")
|
||||||
|
votes = IssueVoteSerializer(read_only=True, many=True)
|
||||||
|
|
||||||
|
class Meta:
|
||||||
|
model = Issue
|
||||||
|
fields = [
|
||||||
|
"id",
|
||||||
|
"name",
|
||||||
|
"description_html",
|
||||||
|
"sequence_id",
|
||||||
|
"state",
|
||||||
|
"state_detail",
|
||||||
|
"project",
|
||||||
|
"project_detail",
|
||||||
|
"workspace",
|
||||||
|
"priority",
|
||||||
|
"target_date",
|
||||||
|
"reactions",
|
||||||
|
"votes",
|
||||||
|
]
|
||||||
|
read_only_fields = fields
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
class IssueSubscriberSerializer(BaseSerializer):
|
||||||
|
class Meta:
|
||||||
|
model = IssueSubscriber
|
||||||
|
fields = "__all__"
|
||||||
|
read_only_fields = [
|
||||||
|
"workspace",
|
||||||
|
"project",
|
||||||
|
"issue",
|
||||||
|
]
|
198
apiserver/plane/app/serializers/module.py
Normal file
198
apiserver/plane/app/serializers/module.py
Normal file
@ -0,0 +1,198 @@
|
|||||||
|
# Third Party imports
|
||||||
|
from rest_framework import serializers
|
||||||
|
|
||||||
|
# Module imports
|
||||||
|
from .base import BaseSerializer
|
||||||
|
from .user import UserLiteSerializer
|
||||||
|
from .project import ProjectLiteSerializer
|
||||||
|
from .workspace import WorkspaceLiteSerializer
|
||||||
|
|
||||||
|
from plane.db.models import (
|
||||||
|
User,
|
||||||
|
Module,
|
||||||
|
ModuleMember,
|
||||||
|
ModuleIssue,
|
||||||
|
ModuleLink,
|
||||||
|
ModuleFavorite,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class ModuleWriteSerializer(BaseSerializer):
|
||||||
|
members = serializers.ListField(
|
||||||
|
child=serializers.PrimaryKeyRelatedField(queryset=User.objects.all()),
|
||||||
|
write_only=True,
|
||||||
|
required=False,
|
||||||
|
)
|
||||||
|
|
||||||
|
project_detail = ProjectLiteSerializer(source="project", read_only=True)
|
||||||
|
workspace_detail = WorkspaceLiteSerializer(source="workspace", read_only=True)
|
||||||
|
|
||||||
|
class Meta:
|
||||||
|
model = Module
|
||||||
|
fields = "__all__"
|
||||||
|
read_only_fields = [
|
||||||
|
"workspace",
|
||||||
|
"project",
|
||||||
|
"created_by",
|
||||||
|
"updated_by",
|
||||||
|
"created_at",
|
||||||
|
"updated_at",
|
||||||
|
]
|
||||||
|
|
||||||
|
def to_representation(self, instance):
|
||||||
|
data = super().to_representation(instance)
|
||||||
|
data['members'] = [str(member.id) for member in instance.members.all()]
|
||||||
|
return data
|
||||||
|
|
||||||
|
def validate(self, data):
|
||||||
|
if data.get("start_date", None) is not None and data.get("target_date", None) is not None and data.get("start_date", None) > data.get("target_date", None):
|
||||||
|
raise serializers.ValidationError("Start date cannot exceed target date")
|
||||||
|
return data
|
||||||
|
|
||||||
|
def create(self, validated_data):
|
||||||
|
members = validated_data.pop("members", None)
|
||||||
|
|
||||||
|
project = self.context["project"]
|
||||||
|
|
||||||
|
module = Module.objects.create(**validated_data, project=project)
|
||||||
|
|
||||||
|
if members is not None:
|
||||||
|
ModuleMember.objects.bulk_create(
|
||||||
|
[
|
||||||
|
ModuleMember(
|
||||||
|
module=module,
|
||||||
|
member=member,
|
||||||
|
project=project,
|
||||||
|
workspace=project.workspace,
|
||||||
|
created_by=module.created_by,
|
||||||
|
updated_by=module.updated_by,
|
||||||
|
)
|
||||||
|
for member in members
|
||||||
|
],
|
||||||
|
batch_size=10,
|
||||||
|
ignore_conflicts=True,
|
||||||
|
)
|
||||||
|
|
||||||
|
return module
|
||||||
|
|
||||||
|
def update(self, instance, validated_data):
|
||||||
|
members = validated_data.pop("members", None)
|
||||||
|
|
||||||
|
if members is not None:
|
||||||
|
ModuleMember.objects.filter(module=instance).delete()
|
||||||
|
ModuleMember.objects.bulk_create(
|
||||||
|
[
|
||||||
|
ModuleMember(
|
||||||
|
module=instance,
|
||||||
|
member=member,
|
||||||
|
project=instance.project,
|
||||||
|
workspace=instance.project.workspace,
|
||||||
|
created_by=instance.created_by,
|
||||||
|
updated_by=instance.updated_by,
|
||||||
|
)
|
||||||
|
for member in members
|
||||||
|
],
|
||||||
|
batch_size=10,
|
||||||
|
ignore_conflicts=True,
|
||||||
|
)
|
||||||
|
|
||||||
|
return super().update(instance, validated_data)
|
||||||
|
|
||||||
|
|
||||||
|
class ModuleFlatSerializer(BaseSerializer):
|
||||||
|
class Meta:
|
||||||
|
model = Module
|
||||||
|
fields = "__all__"
|
||||||
|
read_only_fields = [
|
||||||
|
"workspace",
|
||||||
|
"project",
|
||||||
|
"created_by",
|
||||||
|
"updated_by",
|
||||||
|
"created_at",
|
||||||
|
"updated_at",
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
|
class ModuleIssueSerializer(BaseSerializer):
|
||||||
|
module_detail = ModuleFlatSerializer(read_only=True, source="module")
|
||||||
|
issue_detail = ProjectLiteSerializer(read_only=True, source="issue")
|
||||||
|
sub_issues_count = serializers.IntegerField(read_only=True)
|
||||||
|
|
||||||
|
class Meta:
|
||||||
|
model = ModuleIssue
|
||||||
|
fields = "__all__"
|
||||||
|
read_only_fields = [
|
||||||
|
"workspace",
|
||||||
|
"project",
|
||||||
|
"created_by",
|
||||||
|
"updated_by",
|
||||||
|
"created_at",
|
||||||
|
"updated_at",
|
||||||
|
"module",
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
|
class ModuleLinkSerializer(BaseSerializer):
|
||||||
|
created_by_detail = UserLiteSerializer(read_only=True, source="created_by")
|
||||||
|
|
||||||
|
class Meta:
|
||||||
|
model = ModuleLink
|
||||||
|
fields = "__all__"
|
||||||
|
read_only_fields = [
|
||||||
|
"workspace",
|
||||||
|
"project",
|
||||||
|
"created_by",
|
||||||
|
"updated_by",
|
||||||
|
"created_at",
|
||||||
|
"updated_at",
|
||||||
|
"module",
|
||||||
|
]
|
||||||
|
|
||||||
|
# Validation if url already exists
|
||||||
|
def create(self, validated_data):
|
||||||
|
if ModuleLink.objects.filter(
|
||||||
|
url=validated_data.get("url"), module_id=validated_data.get("module_id")
|
||||||
|
).exists():
|
||||||
|
raise serializers.ValidationError(
|
||||||
|
{"error": "URL already exists for this Issue"}
|
||||||
|
)
|
||||||
|
return ModuleLink.objects.create(**validated_data)
|
||||||
|
|
||||||
|
|
||||||
|
class ModuleSerializer(BaseSerializer):
|
||||||
|
project_detail = ProjectLiteSerializer(read_only=True, source="project")
|
||||||
|
lead_detail = UserLiteSerializer(read_only=True, source="lead")
|
||||||
|
members_detail = UserLiteSerializer(read_only=True, many=True, source="members")
|
||||||
|
link_module = ModuleLinkSerializer(read_only=True, many=True)
|
||||||
|
is_favorite = serializers.BooleanField(read_only=True)
|
||||||
|
total_issues = serializers.IntegerField(read_only=True)
|
||||||
|
cancelled_issues = serializers.IntegerField(read_only=True)
|
||||||
|
completed_issues = serializers.IntegerField(read_only=True)
|
||||||
|
started_issues = serializers.IntegerField(read_only=True)
|
||||||
|
unstarted_issues = serializers.IntegerField(read_only=True)
|
||||||
|
backlog_issues = serializers.IntegerField(read_only=True)
|
||||||
|
|
||||||
|
class Meta:
|
||||||
|
model = Module
|
||||||
|
fields = "__all__"
|
||||||
|
read_only_fields = [
|
||||||
|
"workspace",
|
||||||
|
"project",
|
||||||
|
"created_by",
|
||||||
|
"updated_by",
|
||||||
|
"created_at",
|
||||||
|
"updated_at",
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
|
class ModuleFavoriteSerializer(BaseSerializer):
|
||||||
|
module_detail = ModuleFlatSerializer(source="module", read_only=True)
|
||||||
|
|
||||||
|
class Meta:
|
||||||
|
model = ModuleFavorite
|
||||||
|
fields = "__all__"
|
||||||
|
read_only_fields = [
|
||||||
|
"workspace",
|
||||||
|
"project",
|
||||||
|
"user",
|
||||||
|
]
|
@ -6,28 +6,7 @@ from .base import BaseSerializer
|
|||||||
from .issue import IssueFlatSerializer, LabelLiteSerializer
|
from .issue import IssueFlatSerializer, LabelLiteSerializer
|
||||||
from .workspace import WorkspaceLiteSerializer
|
from .workspace import WorkspaceLiteSerializer
|
||||||
from .project import ProjectLiteSerializer
|
from .project import ProjectLiteSerializer
|
||||||
from plane.db.models import Page, PageBlock, PageFavorite, PageLabel, Label
|
from plane.db.models import Page, PageLog, PageFavorite, PageLabel, Label, Issue, Module
|
||||||
|
|
||||||
|
|
||||||
class PageBlockSerializer(BaseSerializer):
|
|
||||||
issue_detail = IssueFlatSerializer(source="issue", read_only=True)
|
|
||||||
project_detail = ProjectLiteSerializer(source="project", read_only=True)
|
|
||||||
workspace_detail = WorkspaceLiteSerializer(source="workspace", read_only=True)
|
|
||||||
|
|
||||||
class Meta:
|
|
||||||
model = PageBlock
|
|
||||||
fields = "__all__"
|
|
||||||
read_only_fields = [
|
|
||||||
"workspace",
|
|
||||||
"project",
|
|
||||||
"page",
|
|
||||||
]
|
|
||||||
|
|
||||||
class PageBlockLiteSerializer(BaseSerializer):
|
|
||||||
|
|
||||||
class Meta:
|
|
||||||
model = PageBlock
|
|
||||||
fields = "__all__"
|
|
||||||
|
|
||||||
|
|
||||||
class PageSerializer(BaseSerializer):
|
class PageSerializer(BaseSerializer):
|
||||||
@ -38,7 +17,6 @@ class PageSerializer(BaseSerializer):
|
|||||||
write_only=True,
|
write_only=True,
|
||||||
required=False,
|
required=False,
|
||||||
)
|
)
|
||||||
blocks = PageBlockLiteSerializer(read_only=True, many=True)
|
|
||||||
project_detail = ProjectLiteSerializer(source="project", read_only=True)
|
project_detail = ProjectLiteSerializer(source="project", read_only=True)
|
||||||
workspace_detail = WorkspaceLiteSerializer(source="workspace", read_only=True)
|
workspace_detail = WorkspaceLiteSerializer(source="workspace", read_only=True)
|
||||||
|
|
||||||
@ -102,6 +80,41 @@ class PageSerializer(BaseSerializer):
|
|||||||
return super().update(instance, validated_data)
|
return super().update(instance, validated_data)
|
||||||
|
|
||||||
|
|
||||||
|
class SubPageSerializer(BaseSerializer):
|
||||||
|
entity_details = serializers.SerializerMethodField()
|
||||||
|
|
||||||
|
class Meta:
|
||||||
|
model = PageLog
|
||||||
|
fields = "__all__"
|
||||||
|
read_only_fields = [
|
||||||
|
"workspace",
|
||||||
|
"project",
|
||||||
|
"page",
|
||||||
|
]
|
||||||
|
|
||||||
|
def get_entity_details(self, obj):
|
||||||
|
entity_name = obj.entity_name
|
||||||
|
if entity_name == 'forward_link' or entity_name == 'back_link':
|
||||||
|
try:
|
||||||
|
page = Page.objects.get(pk=obj.entity_identifier)
|
||||||
|
return PageSerializer(page).data
|
||||||
|
except Page.DoesNotExist:
|
||||||
|
return None
|
||||||
|
return None
|
||||||
|
|
||||||
|
|
||||||
|
class PageLogSerializer(BaseSerializer):
|
||||||
|
|
||||||
|
class Meta:
|
||||||
|
model = PageLog
|
||||||
|
fields = "__all__"
|
||||||
|
read_only_fields = [
|
||||||
|
"workspace",
|
||||||
|
"project",
|
||||||
|
"page",
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
class PageFavoriteSerializer(BaseSerializer):
|
class PageFavoriteSerializer(BaseSerializer):
|
||||||
page_detail = PageSerializer(source="page", read_only=True)
|
page_detail = PageSerializer(source="page", read_only=True)
|
||||||
|
|
220
apiserver/plane/app/serializers/project.py
Normal file
220
apiserver/plane/app/serializers/project.py
Normal file
@ -0,0 +1,220 @@
|
|||||||
|
# Third party imports
|
||||||
|
from rest_framework import serializers
|
||||||
|
|
||||||
|
# Module imports
|
||||||
|
from .base import BaseSerializer, DynamicBaseSerializer
|
||||||
|
from plane.app.serializers.workspace import WorkspaceLiteSerializer
|
||||||
|
from plane.app.serializers.user import UserLiteSerializer, UserAdminLiteSerializer
|
||||||
|
from plane.db.models import (
|
||||||
|
Project,
|
||||||
|
ProjectMember,
|
||||||
|
ProjectMemberInvite,
|
||||||
|
ProjectIdentifier,
|
||||||
|
ProjectFavorite,
|
||||||
|
ProjectDeployBoard,
|
||||||
|
ProjectPublicMember,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class ProjectSerializer(BaseSerializer):
|
||||||
|
workspace_detail = WorkspaceLiteSerializer(source="workspace", read_only=True)
|
||||||
|
|
||||||
|
class Meta:
|
||||||
|
model = Project
|
||||||
|
fields = "__all__"
|
||||||
|
read_only_fields = [
|
||||||
|
"workspace",
|
||||||
|
]
|
||||||
|
|
||||||
|
def create(self, validated_data):
|
||||||
|
identifier = validated_data.get("identifier", "").strip().upper()
|
||||||
|
if identifier == "":
|
||||||
|
raise serializers.ValidationError(detail="Project Identifier is required")
|
||||||
|
|
||||||
|
if ProjectIdentifier.objects.filter(
|
||||||
|
name=identifier, workspace_id=self.context["workspace_id"]
|
||||||
|
).exists():
|
||||||
|
raise serializers.ValidationError(detail="Project Identifier is taken")
|
||||||
|
project = Project.objects.create(
|
||||||
|
**validated_data, workspace_id=self.context["workspace_id"]
|
||||||
|
)
|
||||||
|
_ = ProjectIdentifier.objects.create(
|
||||||
|
name=project.identifier,
|
||||||
|
project=project,
|
||||||
|
workspace_id=self.context["workspace_id"],
|
||||||
|
)
|
||||||
|
return project
|
||||||
|
|
||||||
|
def update(self, instance, validated_data):
|
||||||
|
identifier = validated_data.get("identifier", "").strip().upper()
|
||||||
|
|
||||||
|
# If identifier is not passed update the project and return
|
||||||
|
if identifier == "":
|
||||||
|
project = super().update(instance, validated_data)
|
||||||
|
return project
|
||||||
|
|
||||||
|
# If no Project Identifier is found create it
|
||||||
|
project_identifier = ProjectIdentifier.objects.filter(
|
||||||
|
name=identifier, workspace_id=instance.workspace_id
|
||||||
|
).first()
|
||||||
|
if project_identifier is None:
|
||||||
|
project = super().update(instance, validated_data)
|
||||||
|
project_identifier = ProjectIdentifier.objects.filter(
|
||||||
|
project=project
|
||||||
|
).first()
|
||||||
|
if project_identifier is not None:
|
||||||
|
project_identifier.name = identifier
|
||||||
|
project_identifier.save()
|
||||||
|
return project
|
||||||
|
# If found check if the project_id to be updated and identifier project id is same
|
||||||
|
if project_identifier.project_id == instance.id:
|
||||||
|
# If same pass update
|
||||||
|
project = super().update(instance, validated_data)
|
||||||
|
return project
|
||||||
|
|
||||||
|
# If not same fail update
|
||||||
|
raise serializers.ValidationError(detail="Project Identifier is already taken")
|
||||||
|
|
||||||
|
|
||||||
|
class ProjectLiteSerializer(BaseSerializer):
|
||||||
|
class Meta:
|
||||||
|
model = Project
|
||||||
|
fields = [
|
||||||
|
"id",
|
||||||
|
"identifier",
|
||||||
|
"name",
|
||||||
|
"cover_image",
|
||||||
|
"icon_prop",
|
||||||
|
"emoji",
|
||||||
|
"description",
|
||||||
|
]
|
||||||
|
read_only_fields = fields
|
||||||
|
|
||||||
|
|
||||||
|
class ProjectListSerializer(DynamicBaseSerializer):
|
||||||
|
is_favorite = serializers.BooleanField(read_only=True)
|
||||||
|
total_members = serializers.IntegerField(read_only=True)
|
||||||
|
total_cycles = serializers.IntegerField(read_only=True)
|
||||||
|
total_modules = serializers.IntegerField(read_only=True)
|
||||||
|
is_member = serializers.BooleanField(read_only=True)
|
||||||
|
sort_order = serializers.FloatField(read_only=True)
|
||||||
|
member_role = serializers.IntegerField(read_only=True)
|
||||||
|
is_deployed = serializers.BooleanField(read_only=True)
|
||||||
|
members = serializers.SerializerMethodField()
|
||||||
|
|
||||||
|
def get_members(self, obj):
|
||||||
|
project_members = getattr(obj, "members_list", None)
|
||||||
|
if project_members is not None:
|
||||||
|
# Filter members by the project ID
|
||||||
|
return [
|
||||||
|
{
|
||||||
|
"id": member.id,
|
||||||
|
"member_id": member.member_id,
|
||||||
|
"member__display_name": member.member.display_name,
|
||||||
|
"member__avatar": member.member.avatar,
|
||||||
|
}
|
||||||
|
for member in project_members
|
||||||
|
]
|
||||||
|
return []
|
||||||
|
|
||||||
|
class Meta:
|
||||||
|
model = Project
|
||||||
|
fields = "__all__"
|
||||||
|
|
||||||
|
|
||||||
|
class ProjectDetailSerializer(BaseSerializer):
|
||||||
|
# workspace = WorkSpaceSerializer(read_only=True)
|
||||||
|
default_assignee = UserLiteSerializer(read_only=True)
|
||||||
|
project_lead = UserLiteSerializer(read_only=True)
|
||||||
|
is_favorite = serializers.BooleanField(read_only=True)
|
||||||
|
total_members = serializers.IntegerField(read_only=True)
|
||||||
|
total_cycles = serializers.IntegerField(read_only=True)
|
||||||
|
total_modules = serializers.IntegerField(read_only=True)
|
||||||
|
is_member = serializers.BooleanField(read_only=True)
|
||||||
|
sort_order = serializers.FloatField(read_only=True)
|
||||||
|
member_role = serializers.IntegerField(read_only=True)
|
||||||
|
is_deployed = serializers.BooleanField(read_only=True)
|
||||||
|
|
||||||
|
class Meta:
|
||||||
|
model = Project
|
||||||
|
fields = "__all__"
|
||||||
|
|
||||||
|
|
||||||
|
class ProjectMemberSerializer(BaseSerializer):
|
||||||
|
workspace = WorkspaceLiteSerializer(read_only=True)
|
||||||
|
project = ProjectLiteSerializer(read_only=True)
|
||||||
|
member = UserLiteSerializer(read_only=True)
|
||||||
|
|
||||||
|
class Meta:
|
||||||
|
model = ProjectMember
|
||||||
|
fields = "__all__"
|
||||||
|
|
||||||
|
|
||||||
|
class ProjectMemberAdminSerializer(BaseSerializer):
|
||||||
|
workspace = WorkspaceLiteSerializer(read_only=True)
|
||||||
|
project = ProjectLiteSerializer(read_only=True)
|
||||||
|
member = UserAdminLiteSerializer(read_only=True)
|
||||||
|
|
||||||
|
class Meta:
|
||||||
|
model = ProjectMember
|
||||||
|
fields = "__all__"
|
||||||
|
|
||||||
|
|
||||||
|
class ProjectMemberInviteSerializer(BaseSerializer):
|
||||||
|
project = ProjectLiteSerializer(read_only=True)
|
||||||
|
workspace = WorkspaceLiteSerializer(read_only=True)
|
||||||
|
|
||||||
|
class Meta:
|
||||||
|
model = ProjectMemberInvite
|
||||||
|
fields = "__all__"
|
||||||
|
|
||||||
|
|
||||||
|
class ProjectIdentifierSerializer(BaseSerializer):
|
||||||
|
class Meta:
|
||||||
|
model = ProjectIdentifier
|
||||||
|
fields = "__all__"
|
||||||
|
|
||||||
|
|
||||||
|
class ProjectFavoriteSerializer(BaseSerializer):
|
||||||
|
class Meta:
|
||||||
|
model = ProjectFavorite
|
||||||
|
fields = "__all__"
|
||||||
|
read_only_fields = [
|
||||||
|
"workspace",
|
||||||
|
"user",
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
|
class ProjectMemberLiteSerializer(BaseSerializer):
|
||||||
|
member = UserLiteSerializer(read_only=True)
|
||||||
|
is_subscribed = serializers.BooleanField(read_only=True)
|
||||||
|
|
||||||
|
class Meta:
|
||||||
|
model = ProjectMember
|
||||||
|
fields = ["member", "id", "is_subscribed"]
|
||||||
|
read_only_fields = fields
|
||||||
|
|
||||||
|
|
||||||
|
class ProjectDeployBoardSerializer(BaseSerializer):
|
||||||
|
project_details = ProjectLiteSerializer(read_only=True, source="project")
|
||||||
|
workspace_detail = WorkspaceLiteSerializer(read_only=True, source="workspace")
|
||||||
|
|
||||||
|
class Meta:
|
||||||
|
model = ProjectDeployBoard
|
||||||
|
fields = "__all__"
|
||||||
|
read_only_fields = [
|
||||||
|
"workspace",
|
||||||
|
"project",
|
||||||
|
"anchor",
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
|
class ProjectPublicMemberSerializer(BaseSerializer):
|
||||||
|
class Meta:
|
||||||
|
model = ProjectPublicMember
|
||||||
|
fields = "__all__"
|
||||||
|
read_only_fields = [
|
||||||
|
"workspace",
|
||||||
|
"project",
|
||||||
|
"member",
|
||||||
|
]
|
28
apiserver/plane/app/serializers/state.py
Normal file
28
apiserver/plane/app/serializers/state.py
Normal file
@ -0,0 +1,28 @@
|
|||||||
|
# Module imports
|
||||||
|
from .base import BaseSerializer
|
||||||
|
|
||||||
|
|
||||||
|
from plane.db.models import State
|
||||||
|
|
||||||
|
|
||||||
|
class StateSerializer(BaseSerializer):
|
||||||
|
|
||||||
|
class Meta:
|
||||||
|
model = State
|
||||||
|
fields = "__all__"
|
||||||
|
read_only_fields = [
|
||||||
|
"workspace",
|
||||||
|
"project",
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
|
class StateLiteSerializer(BaseSerializer):
|
||||||
|
class Meta:
|
||||||
|
model = State
|
||||||
|
fields = [
|
||||||
|
"id",
|
||||||
|
"name",
|
||||||
|
"color",
|
||||||
|
"group",
|
||||||
|
]
|
||||||
|
read_only_fields = fields
|
193
apiserver/plane/app/serializers/user.py
Normal file
193
apiserver/plane/app/serializers/user.py
Normal file
@ -0,0 +1,193 @@
|
|||||||
|
# Third party imports
|
||||||
|
from rest_framework import serializers
|
||||||
|
|
||||||
|
# Module import
|
||||||
|
from .base import BaseSerializer
|
||||||
|
from plane.db.models import User, Workspace, WorkspaceMemberInvite
|
||||||
|
from plane.license.models import InstanceAdmin, Instance
|
||||||
|
|
||||||
|
|
||||||
|
class UserSerializer(BaseSerializer):
|
||||||
|
class Meta:
|
||||||
|
model = User
|
||||||
|
fields = "__all__"
|
||||||
|
read_only_fields = [
|
||||||
|
"id",
|
||||||
|
"created_at",
|
||||||
|
"updated_at",
|
||||||
|
"is_superuser",
|
||||||
|
"is_staff",
|
||||||
|
"last_active",
|
||||||
|
"last_login_time",
|
||||||
|
"last_logout_time",
|
||||||
|
"last_login_ip",
|
||||||
|
"last_logout_ip",
|
||||||
|
"last_login_uagent",
|
||||||
|
"token_updated_at",
|
||||||
|
"is_onboarded",
|
||||||
|
"is_bot",
|
||||||
|
"is_password_autoset",
|
||||||
|
"is_email_verified",
|
||||||
|
]
|
||||||
|
extra_kwargs = {"password": {"write_only": True}}
|
||||||
|
|
||||||
|
# If the user has already filled first name or last name then he is onboarded
|
||||||
|
def get_is_onboarded(self, obj):
|
||||||
|
return bool(obj.first_name) or bool(obj.last_name)
|
||||||
|
|
||||||
|
|
||||||
|
class UserMeSerializer(BaseSerializer):
|
||||||
|
class Meta:
|
||||||
|
model = User
|
||||||
|
fields = [
|
||||||
|
"id",
|
||||||
|
"avatar",
|
||||||
|
"cover_image",
|
||||||
|
"date_joined",
|
||||||
|
"display_name",
|
||||||
|
"email",
|
||||||
|
"first_name",
|
||||||
|
"last_name",
|
||||||
|
"is_active",
|
||||||
|
"is_bot",
|
||||||
|
"is_email_verified",
|
||||||
|
"is_managed",
|
||||||
|
"is_onboarded",
|
||||||
|
"is_tour_completed",
|
||||||
|
"mobile_number",
|
||||||
|
"role",
|
||||||
|
"onboarding_step",
|
||||||
|
"user_timezone",
|
||||||
|
"username",
|
||||||
|
"theme",
|
||||||
|
"last_workspace_id",
|
||||||
|
"use_case",
|
||||||
|
"is_password_autoset",
|
||||||
|
"is_email_verified",
|
||||||
|
]
|
||||||
|
read_only_fields = fields
|
||||||
|
|
||||||
|
|
||||||
|
class UserMeSettingsSerializer(BaseSerializer):
|
||||||
|
workspace = serializers.SerializerMethodField()
|
||||||
|
|
||||||
|
class Meta:
|
||||||
|
model = User
|
||||||
|
fields = [
|
||||||
|
"id",
|
||||||
|
"email",
|
||||||
|
"workspace",
|
||||||
|
]
|
||||||
|
read_only_fields = fields
|
||||||
|
|
||||||
|
def get_workspace(self, obj):
|
||||||
|
workspace_invites = WorkspaceMemberInvite.objects.filter(
|
||||||
|
email=obj.email
|
||||||
|
).count()
|
||||||
|
if (
|
||||||
|
obj.last_workspace_id is not None
|
||||||
|
and Workspace.objects.filter(
|
||||||
|
pk=obj.last_workspace_id,
|
||||||
|
workspace_member__member=obj.id,
|
||||||
|
workspace_member__is_active=True,
|
||||||
|
).exists()
|
||||||
|
):
|
||||||
|
workspace = Workspace.objects.filter(
|
||||||
|
pk=obj.last_workspace_id,
|
||||||
|
workspace_member__member=obj.id,
|
||||||
|
workspace_member__is_active=True,
|
||||||
|
).first()
|
||||||
|
return {
|
||||||
|
"last_workspace_id": obj.last_workspace_id,
|
||||||
|
"last_workspace_slug": workspace.slug if workspace is not None else "",
|
||||||
|
"fallback_workspace_id": obj.last_workspace_id,
|
||||||
|
"fallback_workspace_slug": workspace.slug
|
||||||
|
if workspace is not None
|
||||||
|
else "",
|
||||||
|
"invites": workspace_invites,
|
||||||
|
}
|
||||||
|
else:
|
||||||
|
fallback_workspace = (
|
||||||
|
Workspace.objects.filter(
|
||||||
|
workspace_member__member_id=obj.id, workspace_member__is_active=True
|
||||||
|
)
|
||||||
|
.order_by("created_at")
|
||||||
|
.first()
|
||||||
|
)
|
||||||
|
return {
|
||||||
|
"last_workspace_id": None,
|
||||||
|
"last_workspace_slug": None,
|
||||||
|
"fallback_workspace_id": fallback_workspace.id
|
||||||
|
if fallback_workspace is not None
|
||||||
|
else None,
|
||||||
|
"fallback_workspace_slug": fallback_workspace.slug
|
||||||
|
if fallback_workspace is not None
|
||||||
|
else None,
|
||||||
|
"invites": workspace_invites,
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
class UserLiteSerializer(BaseSerializer):
|
||||||
|
class Meta:
|
||||||
|
model = User
|
||||||
|
fields = [
|
||||||
|
"id",
|
||||||
|
"first_name",
|
||||||
|
"last_name",
|
||||||
|
"avatar",
|
||||||
|
"is_bot",
|
||||||
|
"display_name",
|
||||||
|
]
|
||||||
|
read_only_fields = [
|
||||||
|
"id",
|
||||||
|
"is_bot",
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
|
class UserAdminLiteSerializer(BaseSerializer):
|
||||||
|
class Meta:
|
||||||
|
model = User
|
||||||
|
fields = [
|
||||||
|
"id",
|
||||||
|
"first_name",
|
||||||
|
"last_name",
|
||||||
|
"avatar",
|
||||||
|
"is_bot",
|
||||||
|
"display_name",
|
||||||
|
"email",
|
||||||
|
]
|
||||||
|
read_only_fields = [
|
||||||
|
"id",
|
||||||
|
"is_bot",
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
|
class ChangePasswordSerializer(serializers.Serializer):
|
||||||
|
model = User
|
||||||
|
|
||||||
|
"""
|
||||||
|
Serializer for password change endpoint.
|
||||||
|
"""
|
||||||
|
old_password = serializers.CharField(required=True)
|
||||||
|
new_password = serializers.CharField(required=True, min_length=8)
|
||||||
|
confirm_password = serializers.CharField(required=True, min_length=8)
|
||||||
|
|
||||||
|
def validate(self, data):
|
||||||
|
if data.get("old_password") == data.get("new_password"):
|
||||||
|
raise serializers.ValidationError(
|
||||||
|
{"error": "New password cannot be same as old password."}
|
||||||
|
)
|
||||||
|
|
||||||
|
if data.get("new_password") != data.get("confirm_password"):
|
||||||
|
raise serializers.ValidationError(
|
||||||
|
{"error": "Confirm password should be same as the new password."}
|
||||||
|
)
|
||||||
|
|
||||||
|
return data
|
||||||
|
|
||||||
|
|
||||||
|
class ResetPasswordSerializer(serializers.Serializer):
|
||||||
|
"""
|
||||||
|
Serializer for password change endpoint.
|
||||||
|
"""
|
||||||
|
new_password = serializers.CharField(required=True, min_length=8)
|
106
apiserver/plane/app/serializers/webhook.py
Normal file
106
apiserver/plane/app/serializers/webhook.py
Normal file
@ -0,0 +1,106 @@
|
|||||||
|
# Python imports
|
||||||
|
import urllib
|
||||||
|
import socket
|
||||||
|
import ipaddress
|
||||||
|
from urllib.parse import urlparse
|
||||||
|
|
||||||
|
# Third party imports
|
||||||
|
from rest_framework import serializers
|
||||||
|
|
||||||
|
# Module imports
|
||||||
|
from .base import DynamicBaseSerializer
|
||||||
|
from plane.db.models import Webhook, WebhookLog
|
||||||
|
from plane.db.models.webhook import validate_domain, validate_schema
|
||||||
|
|
||||||
|
class WebhookSerializer(DynamicBaseSerializer):
|
||||||
|
url = serializers.URLField(validators=[validate_schema, validate_domain])
|
||||||
|
|
||||||
|
def create(self, validated_data):
|
||||||
|
url = validated_data.get("url", None)
|
||||||
|
|
||||||
|
# Extract the hostname from the URL
|
||||||
|
hostname = urlparse(url).hostname
|
||||||
|
if not hostname:
|
||||||
|
raise serializers.ValidationError({"url": "Invalid URL: No hostname found."})
|
||||||
|
|
||||||
|
# Resolve the hostname to IP addresses
|
||||||
|
try:
|
||||||
|
ip_addresses = socket.getaddrinfo(hostname, None)
|
||||||
|
except socket.gaierror:
|
||||||
|
raise serializers.ValidationError({"url": "Hostname could not be resolved."})
|
||||||
|
|
||||||
|
if not ip_addresses:
|
||||||
|
raise serializers.ValidationError({"url": "No IP addresses found for the hostname."})
|
||||||
|
|
||||||
|
for addr in ip_addresses:
|
||||||
|
ip = ipaddress.ip_address(addr[4][0])
|
||||||
|
if ip.is_private or ip.is_loopback:
|
||||||
|
raise serializers.ValidationError({"url": "URL resolves to a blocked IP address."})
|
||||||
|
|
||||||
|
# Additional validation for multiple request domains and their subdomains
|
||||||
|
request = self.context.get('request')
|
||||||
|
disallowed_domains = ['plane.so',] # Add your disallowed domains here
|
||||||
|
if request:
|
||||||
|
request_host = request.get_host().split(':')[0] # Remove port if present
|
||||||
|
disallowed_domains.append(request_host)
|
||||||
|
|
||||||
|
# Check if hostname is a subdomain or exact match of any disallowed domain
|
||||||
|
if any(hostname == domain or hostname.endswith('.' + domain) for domain in disallowed_domains):
|
||||||
|
raise serializers.ValidationError({"url": "URL domain or its subdomain is not allowed."})
|
||||||
|
|
||||||
|
return Webhook.objects.create(**validated_data)
|
||||||
|
|
||||||
|
def update(self, instance, validated_data):
|
||||||
|
url = validated_data.get("url", None)
|
||||||
|
if url:
|
||||||
|
# Extract the hostname from the URL
|
||||||
|
hostname = urlparse(url).hostname
|
||||||
|
if not hostname:
|
||||||
|
raise serializers.ValidationError({"url": "Invalid URL: No hostname found."})
|
||||||
|
|
||||||
|
# Resolve the hostname to IP addresses
|
||||||
|
try:
|
||||||
|
ip_addresses = socket.getaddrinfo(hostname, None)
|
||||||
|
except socket.gaierror:
|
||||||
|
raise serializers.ValidationError({"url": "Hostname could not be resolved."})
|
||||||
|
|
||||||
|
if not ip_addresses:
|
||||||
|
raise serializers.ValidationError({"url": "No IP addresses found for the hostname."})
|
||||||
|
|
||||||
|
for addr in ip_addresses:
|
||||||
|
ip = ipaddress.ip_address(addr[4][0])
|
||||||
|
if ip.is_private or ip.is_loopback:
|
||||||
|
raise serializers.ValidationError({"url": "URL resolves to a blocked IP address."})
|
||||||
|
|
||||||
|
# Additional validation for multiple request domains and their subdomains
|
||||||
|
request = self.context.get('request')
|
||||||
|
disallowed_domains = ['plane.so',] # Add your disallowed domains here
|
||||||
|
if request:
|
||||||
|
request_host = request.get_host().split(':')[0] # Remove port if present
|
||||||
|
disallowed_domains.append(request_host)
|
||||||
|
|
||||||
|
# Check if hostname is a subdomain or exact match of any disallowed domain
|
||||||
|
if any(hostname == domain or hostname.endswith('.' + domain) for domain in disallowed_domains):
|
||||||
|
raise serializers.ValidationError({"url": "URL domain or its subdomain is not allowed."})
|
||||||
|
|
||||||
|
return super().update(instance, validated_data)
|
||||||
|
|
||||||
|
class Meta:
|
||||||
|
model = Webhook
|
||||||
|
fields = "__all__"
|
||||||
|
read_only_fields = [
|
||||||
|
"workspace",
|
||||||
|
"secret_key",
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
|
class WebhookLogSerializer(DynamicBaseSerializer):
|
||||||
|
|
||||||
|
class Meta:
|
||||||
|
model = WebhookLog
|
||||||
|
fields = "__all__"
|
||||||
|
read_only_fields = [
|
||||||
|
"workspace",
|
||||||
|
"webhook"
|
||||||
|
]
|
||||||
|
|
163
apiserver/plane/app/serializers/workspace.py
Normal file
163
apiserver/plane/app/serializers/workspace.py
Normal file
@ -0,0 +1,163 @@
|
|||||||
|
# Third party imports
|
||||||
|
from rest_framework import serializers
|
||||||
|
|
||||||
|
# Module imports
|
||||||
|
from .base import BaseSerializer
|
||||||
|
from .user import UserLiteSerializer, UserAdminLiteSerializer
|
||||||
|
|
||||||
|
from plane.db.models import (
|
||||||
|
User,
|
||||||
|
Workspace,
|
||||||
|
WorkspaceMember,
|
||||||
|
Team,
|
||||||
|
TeamMember,
|
||||||
|
WorkspaceMemberInvite,
|
||||||
|
WorkspaceTheme,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class WorkSpaceSerializer(BaseSerializer):
|
||||||
|
owner = UserLiteSerializer(read_only=True)
|
||||||
|
total_members = serializers.IntegerField(read_only=True)
|
||||||
|
total_issues = serializers.IntegerField(read_only=True)
|
||||||
|
|
||||||
|
def validated(self, data):
|
||||||
|
if data.get("slug") in [
|
||||||
|
"404",
|
||||||
|
"accounts",
|
||||||
|
"api",
|
||||||
|
"create-workspace",
|
||||||
|
"god-mode",
|
||||||
|
"installations",
|
||||||
|
"invitations",
|
||||||
|
"onboarding",
|
||||||
|
"profile",
|
||||||
|
"spaces",
|
||||||
|
"workspace-invitations",
|
||||||
|
"password",
|
||||||
|
]:
|
||||||
|
raise serializers.ValidationError({"slug": "Slug is not valid"})
|
||||||
|
|
||||||
|
class Meta:
|
||||||
|
model = Workspace
|
||||||
|
fields = "__all__"
|
||||||
|
read_only_fields = [
|
||||||
|
"id",
|
||||||
|
"created_by",
|
||||||
|
"updated_by",
|
||||||
|
"created_at",
|
||||||
|
"updated_at",
|
||||||
|
"owner",
|
||||||
|
]
|
||||||
|
|
||||||
|
class WorkspaceLiteSerializer(BaseSerializer):
|
||||||
|
class Meta:
|
||||||
|
model = Workspace
|
||||||
|
fields = [
|
||||||
|
"name",
|
||||||
|
"slug",
|
||||||
|
"id",
|
||||||
|
]
|
||||||
|
read_only_fields = fields
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
class WorkSpaceMemberSerializer(BaseSerializer):
|
||||||
|
member = UserLiteSerializer(read_only=True)
|
||||||
|
workspace = WorkspaceLiteSerializer(read_only=True)
|
||||||
|
|
||||||
|
class Meta:
|
||||||
|
model = WorkspaceMember
|
||||||
|
fields = "__all__"
|
||||||
|
|
||||||
|
|
||||||
|
class WorkspaceMemberMeSerializer(BaseSerializer):
|
||||||
|
|
||||||
|
class Meta:
|
||||||
|
model = WorkspaceMember
|
||||||
|
fields = "__all__"
|
||||||
|
|
||||||
|
|
||||||
|
class WorkspaceMemberAdminSerializer(BaseSerializer):
|
||||||
|
member = UserAdminLiteSerializer(read_only=True)
|
||||||
|
workspace = WorkspaceLiteSerializer(read_only=True)
|
||||||
|
|
||||||
|
class Meta:
|
||||||
|
model = WorkspaceMember
|
||||||
|
fields = "__all__"
|
||||||
|
|
||||||
|
|
||||||
|
class WorkSpaceMemberInviteSerializer(BaseSerializer):
|
||||||
|
workspace = WorkSpaceSerializer(read_only=True)
|
||||||
|
total_members = serializers.IntegerField(read_only=True)
|
||||||
|
created_by_detail = UserLiteSerializer(read_only=True, source="created_by")
|
||||||
|
|
||||||
|
class Meta:
|
||||||
|
model = WorkspaceMemberInvite
|
||||||
|
fields = "__all__"
|
||||||
|
read_only_fields = [
|
||||||
|
"id",
|
||||||
|
"email",
|
||||||
|
"token",
|
||||||
|
"workspace",
|
||||||
|
"message",
|
||||||
|
"responded_at",
|
||||||
|
"created_at",
|
||||||
|
"updated_at",
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
|
class TeamSerializer(BaseSerializer):
|
||||||
|
members_detail = UserLiteSerializer(read_only=True, source="members", many=True)
|
||||||
|
members = serializers.ListField(
|
||||||
|
child=serializers.PrimaryKeyRelatedField(queryset=User.objects.all()),
|
||||||
|
write_only=True,
|
||||||
|
required=False,
|
||||||
|
)
|
||||||
|
|
||||||
|
class Meta:
|
||||||
|
model = Team
|
||||||
|
fields = "__all__"
|
||||||
|
read_only_fields = [
|
||||||
|
"workspace",
|
||||||
|
"created_by",
|
||||||
|
"updated_by",
|
||||||
|
"created_at",
|
||||||
|
"updated_at",
|
||||||
|
]
|
||||||
|
|
||||||
|
def create(self, validated_data, **kwargs):
|
||||||
|
if "members" in validated_data:
|
||||||
|
members = validated_data.pop("members")
|
||||||
|
workspace = self.context["workspace"]
|
||||||
|
team = Team.objects.create(**validated_data, workspace=workspace)
|
||||||
|
team_members = [
|
||||||
|
TeamMember(member=member, team=team, workspace=workspace)
|
||||||
|
for member in members
|
||||||
|
]
|
||||||
|
TeamMember.objects.bulk_create(team_members, batch_size=10)
|
||||||
|
return team
|
||||||
|
team = Team.objects.create(**validated_data)
|
||||||
|
return team
|
||||||
|
|
||||||
|
def update(self, instance, validated_data):
|
||||||
|
if "members" in validated_data:
|
||||||
|
members = validated_data.pop("members")
|
||||||
|
TeamMember.objects.filter(team=instance).delete()
|
||||||
|
team_members = [
|
||||||
|
TeamMember(member=member, team=instance, workspace=instance.workspace)
|
||||||
|
for member in members
|
||||||
|
]
|
||||||
|
TeamMember.objects.bulk_create(team_members, batch_size=10)
|
||||||
|
return super().update(instance, validated_data)
|
||||||
|
return super().update(instance, validated_data)
|
||||||
|
|
||||||
|
|
||||||
|
class WorkspaceThemeSerializer(BaseSerializer):
|
||||||
|
class Meta:
|
||||||
|
model = WorkspaceTheme
|
||||||
|
fields = "__all__"
|
||||||
|
read_only_fields = [
|
||||||
|
"workspace",
|
||||||
|
"actor",
|
||||||
|
]
|
48
apiserver/plane/app/urls/__init__.py
Normal file
48
apiserver/plane/app/urls/__init__.py
Normal file
@ -0,0 +1,48 @@
|
|||||||
|
from .analytic import urlpatterns as analytic_urls
|
||||||
|
from .asset import urlpatterns as asset_urls
|
||||||
|
from .authentication import urlpatterns as authentication_urls
|
||||||
|
from .config import urlpatterns as configuration_urls
|
||||||
|
from .cycle import urlpatterns as cycle_urls
|
||||||
|
from .estimate import urlpatterns as estimate_urls
|
||||||
|
from .external import urlpatterns as external_urls
|
||||||
|
from .importer import urlpatterns as importer_urls
|
||||||
|
from .inbox import urlpatterns as inbox_urls
|
||||||
|
from .integration import urlpatterns as integration_urls
|
||||||
|
from .issue import urlpatterns as issue_urls
|
||||||
|
from .module import urlpatterns as module_urls
|
||||||
|
from .notification import urlpatterns as notification_urls
|
||||||
|
from .page import urlpatterns as page_urls
|
||||||
|
from .project import urlpatterns as project_urls
|
||||||
|
from .search import urlpatterns as search_urls
|
||||||
|
from .state import urlpatterns as state_urls
|
||||||
|
from .user import urlpatterns as user_urls
|
||||||
|
from .views import urlpatterns as view_urls
|
||||||
|
from .workspace import urlpatterns as workspace_urls
|
||||||
|
from .api import urlpatterns as api_urls
|
||||||
|
from .webhook import urlpatterns as webhook_urls
|
||||||
|
|
||||||
|
|
||||||
|
urlpatterns = [
|
||||||
|
*analytic_urls,
|
||||||
|
*asset_urls,
|
||||||
|
*authentication_urls,
|
||||||
|
*configuration_urls,
|
||||||
|
*cycle_urls,
|
||||||
|
*estimate_urls,
|
||||||
|
*external_urls,
|
||||||
|
*importer_urls,
|
||||||
|
*inbox_urls,
|
||||||
|
*integration_urls,
|
||||||
|
*issue_urls,
|
||||||
|
*module_urls,
|
||||||
|
*notification_urls,
|
||||||
|
*page_urls,
|
||||||
|
*project_urls,
|
||||||
|
*search_urls,
|
||||||
|
*state_urls,
|
||||||
|
*user_urls,
|
||||||
|
*view_urls,
|
||||||
|
*workspace_urls,
|
||||||
|
*api_urls,
|
||||||
|
*webhook_urls,
|
||||||
|
]
|
@ -1,7 +1,7 @@
|
|||||||
from django.urls import path
|
from django.urls import path
|
||||||
|
|
||||||
|
|
||||||
from plane.api.views import (
|
from plane.app.views import (
|
||||||
AnalyticsEndpoint,
|
AnalyticsEndpoint,
|
||||||
AnalyticViewViewset,
|
AnalyticViewViewset,
|
||||||
SavedAnalyticEndpoint,
|
SavedAnalyticEndpoint,
|
17
apiserver/plane/app/urls/api.py
Normal file
17
apiserver/plane/app/urls/api.py
Normal file
@ -0,0 +1,17 @@
|
|||||||
|
from django.urls import path
|
||||||
|
from plane.app.views import ApiTokenEndpoint
|
||||||
|
|
||||||
|
urlpatterns = [
|
||||||
|
# API Tokens
|
||||||
|
path(
|
||||||
|
"workspaces/<str:slug>/api-tokens/",
|
||||||
|
ApiTokenEndpoint.as_view(),
|
||||||
|
name="api-tokens",
|
||||||
|
),
|
||||||
|
path(
|
||||||
|
"workspaces/<str:slug>/api-tokens/<uuid:pk>/",
|
||||||
|
ApiTokenEndpoint.as_view(),
|
||||||
|
name="api-tokens",
|
||||||
|
),
|
||||||
|
## End API Tokens
|
||||||
|
]
|
@ -1,9 +1,10 @@
|
|||||||
from django.urls import path
|
from django.urls import path
|
||||||
|
|
||||||
|
|
||||||
from plane.api.views import (
|
from plane.app.views import (
|
||||||
FileAssetEndpoint,
|
FileAssetEndpoint,
|
||||||
UserAssetsEndpoint,
|
UserAssetsEndpoint,
|
||||||
|
FileAssetViewSet,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
@ -28,4 +29,13 @@ urlpatterns = [
|
|||||||
UserAssetsEndpoint.as_view(),
|
UserAssetsEndpoint.as_view(),
|
||||||
name="user-file-assets",
|
name="user-file-assets",
|
||||||
),
|
),
|
||||||
|
path(
|
||||||
|
"workspaces/file-assets/<uuid:workspace_id>/<str:asset_key>/restore/",
|
||||||
|
FileAssetViewSet.as_view(
|
||||||
|
{
|
||||||
|
"post": "restore",
|
||||||
|
}
|
||||||
|
),
|
||||||
|
name="file-assets-restore",
|
||||||
|
),
|
||||||
]
|
]
|
@ -3,20 +3,18 @@ from django.urls import path
|
|||||||
from rest_framework_simplejwt.views import TokenRefreshView
|
from rest_framework_simplejwt.views import TokenRefreshView
|
||||||
|
|
||||||
|
|
||||||
from plane.api.views import (
|
from plane.app.views import (
|
||||||
# Authentication
|
# Authentication
|
||||||
SignUpEndpoint,
|
|
||||||
SignInEndpoint,
|
SignInEndpoint,
|
||||||
SignOutEndpoint,
|
SignOutEndpoint,
|
||||||
|
MagicGenerateEndpoint,
|
||||||
MagicSignInEndpoint,
|
MagicSignInEndpoint,
|
||||||
MagicSignInGenerateEndpoint,
|
|
||||||
OauthEndpoint,
|
OauthEndpoint,
|
||||||
|
EmailCheckEndpoint,
|
||||||
## End Authentication
|
## End Authentication
|
||||||
# Auth Extended
|
# Auth Extended
|
||||||
ForgotPasswordEndpoint,
|
ForgotPasswordEndpoint,
|
||||||
VerifyEmailEndpoint,
|
|
||||||
ResetPasswordEndpoint,
|
ResetPasswordEndpoint,
|
||||||
RequestEmailVerificationEndpoint,
|
|
||||||
ChangePasswordEndpoint,
|
ChangePasswordEndpoint,
|
||||||
## End Auth Extender
|
## End Auth Extender
|
||||||
# API Tokens
|
# API Tokens
|
||||||
@ -27,24 +25,15 @@ from plane.api.views import (
|
|||||||
|
|
||||||
urlpatterns = [
|
urlpatterns = [
|
||||||
# Social Auth
|
# Social Auth
|
||||||
|
path("email-check/", EmailCheckEndpoint.as_view(), name="email"),
|
||||||
path("social-auth/", OauthEndpoint.as_view(), name="oauth"),
|
path("social-auth/", OauthEndpoint.as_view(), name="oauth"),
|
||||||
# Auth
|
# Auth
|
||||||
path("sign-up/", SignUpEndpoint.as_view(), name="sign-up"),
|
|
||||||
path("sign-in/", SignInEndpoint.as_view(), name="sign-in"),
|
path("sign-in/", SignInEndpoint.as_view(), name="sign-in"),
|
||||||
path("sign-out/", SignOutEndpoint.as_view(), name="sign-out"),
|
path("sign-out/", SignOutEndpoint.as_view(), name="sign-out"),
|
||||||
# Magic Sign In/Up
|
# magic sign in
|
||||||
path(
|
path("magic-generate/", MagicGenerateEndpoint.as_view(), name="magic-generate"),
|
||||||
"magic-generate/", MagicSignInGenerateEndpoint.as_view(), name="magic-generate"
|
|
||||||
),
|
|
||||||
path("magic-sign-in/", MagicSignInEndpoint.as_view(), name="magic-sign-in"),
|
path("magic-sign-in/", MagicSignInEndpoint.as_view(), name="magic-sign-in"),
|
||||||
path("token/refresh/", TokenRefreshView.as_view(), name="token_refresh"),
|
path("token/refresh/", TokenRefreshView.as_view(), name="token_refresh"),
|
||||||
# Email verification
|
|
||||||
path("email-verify/", VerifyEmailEndpoint.as_view(), name="email-verify"),
|
|
||||||
path(
|
|
||||||
"request-email-verify/",
|
|
||||||
RequestEmailVerificationEndpoint.as_view(),
|
|
||||||
name="request-reset-email",
|
|
||||||
),
|
|
||||||
# Password Manipulation
|
# Password Manipulation
|
||||||
path(
|
path(
|
||||||
"users/me/change-password/",
|
"users/me/change-password/",
|
@ -1,7 +1,7 @@
|
|||||||
from django.urls import path
|
from django.urls import path
|
||||||
|
|
||||||
|
|
||||||
from plane.api.views import ConfigurationEndpoint
|
from plane.app.views import ConfigurationEndpoint
|
||||||
|
|
||||||
urlpatterns = [
|
urlpatterns = [
|
||||||
path(
|
path(
|
87
apiserver/plane/app/urls/cycle.py
Normal file
87
apiserver/plane/app/urls/cycle.py
Normal file
@ -0,0 +1,87 @@
|
|||||||
|
from django.urls import path
|
||||||
|
|
||||||
|
|
||||||
|
from plane.app.views import (
|
||||||
|
CycleViewSet,
|
||||||
|
CycleIssueViewSet,
|
||||||
|
CycleDateCheckEndpoint,
|
||||||
|
CycleFavoriteViewSet,
|
||||||
|
TransferCycleIssueEndpoint,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
urlpatterns = [
|
||||||
|
path(
|
||||||
|
"workspaces/<str:slug>/projects/<uuid:project_id>/cycles/",
|
||||||
|
CycleViewSet.as_view(
|
||||||
|
{
|
||||||
|
"get": "list",
|
||||||
|
"post": "create",
|
||||||
|
}
|
||||||
|
),
|
||||||
|
name="project-cycle",
|
||||||
|
),
|
||||||
|
path(
|
||||||
|
"workspaces/<str:slug>/projects/<uuid:project_id>/cycles/<uuid:pk>/",
|
||||||
|
CycleViewSet.as_view(
|
||||||
|
{
|
||||||
|
"get": "retrieve",
|
||||||
|
"put": "update",
|
||||||
|
"patch": "partial_update",
|
||||||
|
"delete": "destroy",
|
||||||
|
}
|
||||||
|
),
|
||||||
|
name="project-cycle",
|
||||||
|
),
|
||||||
|
path(
|
||||||
|
"workspaces/<str:slug>/projects/<uuid:project_id>/cycles/<uuid:cycle_id>/cycle-issues/",
|
||||||
|
CycleIssueViewSet.as_view(
|
||||||
|
{
|
||||||
|
"get": "list",
|
||||||
|
"post": "create",
|
||||||
|
}
|
||||||
|
),
|
||||||
|
name="project-issue-cycle",
|
||||||
|
),
|
||||||
|
path(
|
||||||
|
"workspaces/<str:slug>/projects/<uuid:project_id>/cycles/<uuid:cycle_id>/cycle-issues/<uuid:pk>/",
|
||||||
|
CycleIssueViewSet.as_view(
|
||||||
|
{
|
||||||
|
"get": "retrieve",
|
||||||
|
"put": "update",
|
||||||
|
"patch": "partial_update",
|
||||||
|
"delete": "destroy",
|
||||||
|
}
|
||||||
|
),
|
||||||
|
name="project-issue-cycle",
|
||||||
|
),
|
||||||
|
path(
|
||||||
|
"workspaces/<str:slug>/projects/<uuid:project_id>/cycles/date-check/",
|
||||||
|
CycleDateCheckEndpoint.as_view(),
|
||||||
|
name="project-cycle-date",
|
||||||
|
),
|
||||||
|
path(
|
||||||
|
"workspaces/<str:slug>/projects/<uuid:project_id>/user-favorite-cycles/",
|
||||||
|
CycleFavoriteViewSet.as_view(
|
||||||
|
{
|
||||||
|
"get": "list",
|
||||||
|
"post": "create",
|
||||||
|
}
|
||||||
|
),
|
||||||
|
name="user-favorite-cycle",
|
||||||
|
),
|
||||||
|
path(
|
||||||
|
"workspaces/<str:slug>/projects/<uuid:project_id>/user-favorite-cycles/<uuid:cycle_id>/",
|
||||||
|
CycleFavoriteViewSet.as_view(
|
||||||
|
{
|
||||||
|
"delete": "destroy",
|
||||||
|
}
|
||||||
|
),
|
||||||
|
name="user-favorite-cycle",
|
||||||
|
),
|
||||||
|
path(
|
||||||
|
"workspaces/<str:slug>/projects/<uuid:project_id>/cycles/<uuid:cycle_id>/transfer-issues/",
|
||||||
|
TransferCycleIssueEndpoint.as_view(),
|
||||||
|
name="transfer-issues",
|
||||||
|
),
|
||||||
|
]
|
@ -1,7 +1,7 @@
|
|||||||
from django.urls import path
|
from django.urls import path
|
||||||
|
|
||||||
|
|
||||||
from plane.api.views import (
|
from plane.app.views import (
|
||||||
ProjectEstimatePointEndpoint,
|
ProjectEstimatePointEndpoint,
|
||||||
BulkEstimatePointEndpoint,
|
BulkEstimatePointEndpoint,
|
||||||
)
|
)
|
@ -1,9 +1,9 @@
|
|||||||
from django.urls import path
|
from django.urls import path
|
||||||
|
|
||||||
|
|
||||||
from plane.api.views import UnsplashEndpoint
|
from plane.app.views import UnsplashEndpoint
|
||||||
from plane.api.views import ReleaseNotesEndpoint
|
from plane.app.views import ReleaseNotesEndpoint
|
||||||
from plane.api.views import GPTIntegrationEndpoint
|
from plane.app.views import GPTIntegrationEndpoint
|
||||||
|
|
||||||
|
|
||||||
urlpatterns = [
|
urlpatterns = [
|
@ -1,7 +1,7 @@
|
|||||||
from django.urls import path
|
from django.urls import path
|
||||||
|
|
||||||
|
|
||||||
from plane.api.views import (
|
from plane.app.views import (
|
||||||
ServiceIssueImportSummaryEndpoint,
|
ServiceIssueImportSummaryEndpoint,
|
||||||
ImportServiceEndpoint,
|
ImportServiceEndpoint,
|
||||||
UpdateServiceImportStatusEndpoint,
|
UpdateServiceImportStatusEndpoint,
|
53
apiserver/plane/app/urls/inbox.py
Normal file
53
apiserver/plane/app/urls/inbox.py
Normal file
@ -0,0 +1,53 @@
|
|||||||
|
from django.urls import path
|
||||||
|
|
||||||
|
|
||||||
|
from plane.app.views import (
|
||||||
|
InboxViewSet,
|
||||||
|
InboxIssueViewSet,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
urlpatterns = [
|
||||||
|
path(
|
||||||
|
"workspaces/<str:slug>/projects/<uuid:project_id>/inboxes/",
|
||||||
|
InboxViewSet.as_view(
|
||||||
|
{
|
||||||
|
"get": "list",
|
||||||
|
"post": "create",
|
||||||
|
}
|
||||||
|
),
|
||||||
|
name="inbox",
|
||||||
|
),
|
||||||
|
path(
|
||||||
|
"workspaces/<str:slug>/projects/<uuid:project_id>/inboxes/<uuid:pk>/",
|
||||||
|
InboxViewSet.as_view(
|
||||||
|
{
|
||||||
|
"get": "retrieve",
|
||||||
|
"patch": "partial_update",
|
||||||
|
"delete": "destroy",
|
||||||
|
}
|
||||||
|
),
|
||||||
|
name="inbox",
|
||||||
|
),
|
||||||
|
path(
|
||||||
|
"workspaces/<str:slug>/projects/<uuid:project_id>/inboxes/<uuid:inbox_id>/inbox-issues/",
|
||||||
|
InboxIssueViewSet.as_view(
|
||||||
|
{
|
||||||
|
"get": "list",
|
||||||
|
"post": "create",
|
||||||
|
}
|
||||||
|
),
|
||||||
|
name="inbox-issue",
|
||||||
|
),
|
||||||
|
path(
|
||||||
|
"workspaces/<str:slug>/projects/<uuid:project_id>/inboxes/<uuid:inbox_id>/inbox-issues/<uuid:pk>/",
|
||||||
|
InboxIssueViewSet.as_view(
|
||||||
|
{
|
||||||
|
"get": "retrieve",
|
||||||
|
"patch": "partial_update",
|
||||||
|
"delete": "destroy",
|
||||||
|
}
|
||||||
|
),
|
||||||
|
name="inbox-issue",
|
||||||
|
),
|
||||||
|
]
|
@ -1,7 +1,7 @@
|
|||||||
from django.urls import path
|
from django.urls import path
|
||||||
|
|
||||||
|
|
||||||
from plane.api.views import (
|
from plane.app.views import (
|
||||||
IntegrationViewSet,
|
IntegrationViewSet,
|
||||||
WorkspaceIntegrationViewSet,
|
WorkspaceIntegrationViewSet,
|
||||||
GithubRepositoriesEndpoint,
|
GithubRepositoriesEndpoint,
|
315
apiserver/plane/app/urls/issue.py
Normal file
315
apiserver/plane/app/urls/issue.py
Normal file
@ -0,0 +1,315 @@
|
|||||||
|
from django.urls import path
|
||||||
|
|
||||||
|
|
||||||
|
from plane.app.views import (
|
||||||
|
IssueViewSet,
|
||||||
|
LabelViewSet,
|
||||||
|
BulkCreateIssueLabelsEndpoint,
|
||||||
|
BulkDeleteIssuesEndpoint,
|
||||||
|
BulkImportIssuesEndpoint,
|
||||||
|
UserWorkSpaceIssues,
|
||||||
|
SubIssuesEndpoint,
|
||||||
|
IssueLinkViewSet,
|
||||||
|
IssueAttachmentEndpoint,
|
||||||
|
ExportIssuesEndpoint,
|
||||||
|
IssueActivityEndpoint,
|
||||||
|
IssueCommentViewSet,
|
||||||
|
IssueSubscriberViewSet,
|
||||||
|
IssueReactionViewSet,
|
||||||
|
CommentReactionViewSet,
|
||||||
|
IssueUserDisplayPropertyEndpoint,
|
||||||
|
IssueArchiveViewSet,
|
||||||
|
IssueRelationViewSet,
|
||||||
|
IssueDraftViewSet,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
urlpatterns = [
|
||||||
|
path(
|
||||||
|
"workspaces/<str:slug>/projects/<uuid:project_id>/issues/",
|
||||||
|
IssueViewSet.as_view(
|
||||||
|
{
|
||||||
|
"get": "list",
|
||||||
|
"post": "create",
|
||||||
|
}
|
||||||
|
),
|
||||||
|
name="project-issue",
|
||||||
|
),
|
||||||
|
path(
|
||||||
|
"workspaces/<str:slug>/projects/<uuid:project_id>/issues/<uuid:pk>/",
|
||||||
|
IssueViewSet.as_view(
|
||||||
|
{
|
||||||
|
"get": "retrieve",
|
||||||
|
"put": "update",
|
||||||
|
"patch": "partial_update",
|
||||||
|
"delete": "destroy",
|
||||||
|
}
|
||||||
|
),
|
||||||
|
name="project-issue",
|
||||||
|
),
|
||||||
|
path(
|
||||||
|
"workspaces/<str:slug>/projects/<uuid:project_id>/issue-labels/",
|
||||||
|
LabelViewSet.as_view(
|
||||||
|
{
|
||||||
|
"get": "list",
|
||||||
|
"post": "create",
|
||||||
|
}
|
||||||
|
),
|
||||||
|
name="project-issue-labels",
|
||||||
|
),
|
||||||
|
path(
|
||||||
|
"workspaces/<str:slug>/projects/<uuid:project_id>/issue-labels/<uuid:pk>/",
|
||||||
|
LabelViewSet.as_view(
|
||||||
|
{
|
||||||
|
"get": "retrieve",
|
||||||
|
"put": "update",
|
||||||
|
"patch": "partial_update",
|
||||||
|
"delete": "destroy",
|
||||||
|
}
|
||||||
|
),
|
||||||
|
name="project-issue-labels",
|
||||||
|
),
|
||||||
|
path(
|
||||||
|
"workspaces/<str:slug>/projects/<uuid:project_id>/bulk-create-labels/",
|
||||||
|
BulkCreateIssueLabelsEndpoint.as_view(),
|
||||||
|
name="project-bulk-labels",
|
||||||
|
),
|
||||||
|
path(
|
||||||
|
"workspaces/<str:slug>/projects/<uuid:project_id>/bulk-delete-issues/",
|
||||||
|
BulkDeleteIssuesEndpoint.as_view(),
|
||||||
|
name="project-issues-bulk",
|
||||||
|
),
|
||||||
|
path(
|
||||||
|
"workspaces/<str:slug>/projects/<uuid:project_id>/bulk-import-issues/<str:service>/",
|
||||||
|
BulkImportIssuesEndpoint.as_view(),
|
||||||
|
name="project-issues-bulk",
|
||||||
|
),
|
||||||
|
path(
|
||||||
|
"workspaces/<str:slug>/my-issues/",
|
||||||
|
UserWorkSpaceIssues.as_view(),
|
||||||
|
name="workspace-issues",
|
||||||
|
),
|
||||||
|
path(
|
||||||
|
"workspaces/<str:slug>/projects/<uuid:project_id>/issues/<uuid:issue_id>/sub-issues/",
|
||||||
|
SubIssuesEndpoint.as_view(),
|
||||||
|
name="sub-issues",
|
||||||
|
),
|
||||||
|
path(
|
||||||
|
"workspaces/<str:slug>/projects/<uuid:project_id>/issues/<uuid:issue_id>/issue-links/",
|
||||||
|
IssueLinkViewSet.as_view(
|
||||||
|
{
|
||||||
|
"get": "list",
|
||||||
|
"post": "create",
|
||||||
|
}
|
||||||
|
),
|
||||||
|
name="project-issue-links",
|
||||||
|
),
|
||||||
|
path(
|
||||||
|
"workspaces/<str:slug>/projects/<uuid:project_id>/issues/<uuid:issue_id>/issue-links/<uuid:pk>/",
|
||||||
|
IssueLinkViewSet.as_view(
|
||||||
|
{
|
||||||
|
"get": "retrieve",
|
||||||
|
"put": "update",
|
||||||
|
"patch": "partial_update",
|
||||||
|
"delete": "destroy",
|
||||||
|
}
|
||||||
|
),
|
||||||
|
name="project-issue-links",
|
||||||
|
),
|
||||||
|
path(
|
||||||
|
"workspaces/<str:slug>/projects/<uuid:project_id>/issues/<uuid:issue_id>/issue-attachments/",
|
||||||
|
IssueAttachmentEndpoint.as_view(),
|
||||||
|
name="project-issue-attachments",
|
||||||
|
),
|
||||||
|
path(
|
||||||
|
"workspaces/<str:slug>/projects/<uuid:project_id>/issues/<uuid:issue_id>/issue-attachments/<uuid:pk>/",
|
||||||
|
IssueAttachmentEndpoint.as_view(),
|
||||||
|
name="project-issue-attachments",
|
||||||
|
),
|
||||||
|
path(
|
||||||
|
"workspaces/<str:slug>/export-issues/",
|
||||||
|
ExportIssuesEndpoint.as_view(),
|
||||||
|
name="export-issues",
|
||||||
|
),
|
||||||
|
## End Issues
|
||||||
|
## Issue Activity
|
||||||
|
path(
|
||||||
|
"workspaces/<str:slug>/projects/<uuid:project_id>/issues/<uuid:issue_id>/history/",
|
||||||
|
IssueActivityEndpoint.as_view(),
|
||||||
|
name="project-issue-history",
|
||||||
|
),
|
||||||
|
## Issue Activity
|
||||||
|
## IssueComments
|
||||||
|
path(
|
||||||
|
"workspaces/<str:slug>/projects/<uuid:project_id>/issues/<uuid:issue_id>/comments/",
|
||||||
|
IssueCommentViewSet.as_view(
|
||||||
|
{
|
||||||
|
"get": "list",
|
||||||
|
"post": "create",
|
||||||
|
}
|
||||||
|
),
|
||||||
|
name="project-issue-comment",
|
||||||
|
),
|
||||||
|
path(
|
||||||
|
"workspaces/<str:slug>/projects/<uuid:project_id>/issues/<uuid:issue_id>/comments/<uuid:pk>/",
|
||||||
|
IssueCommentViewSet.as_view(
|
||||||
|
{
|
||||||
|
"get": "retrieve",
|
||||||
|
"put": "update",
|
||||||
|
"patch": "partial_update",
|
||||||
|
"delete": "destroy",
|
||||||
|
}
|
||||||
|
),
|
||||||
|
name="project-issue-comment",
|
||||||
|
),
|
||||||
|
## End IssueComments
|
||||||
|
# Issue Subscribers
|
||||||
|
path(
|
||||||
|
"workspaces/<str:slug>/projects/<uuid:project_id>/issues/<uuid:issue_id>/issue-subscribers/",
|
||||||
|
IssueSubscriberViewSet.as_view(
|
||||||
|
{
|
||||||
|
"get": "list",
|
||||||
|
"post": "create",
|
||||||
|
}
|
||||||
|
),
|
||||||
|
name="project-issue-subscribers",
|
||||||
|
),
|
||||||
|
path(
|
||||||
|
"workspaces/<str:slug>/projects/<uuid:project_id>/issues/<uuid:issue_id>/issue-subscribers/<uuid:subscriber_id>/",
|
||||||
|
IssueSubscriberViewSet.as_view({"delete": "destroy"}),
|
||||||
|
name="project-issue-subscribers",
|
||||||
|
),
|
||||||
|
path(
|
||||||
|
"workspaces/<str:slug>/projects/<uuid:project_id>/issues/<uuid:issue_id>/subscribe/",
|
||||||
|
IssueSubscriberViewSet.as_view(
|
||||||
|
{
|
||||||
|
"get": "subscription_status",
|
||||||
|
"post": "subscribe",
|
||||||
|
"delete": "unsubscribe",
|
||||||
|
}
|
||||||
|
),
|
||||||
|
name="project-issue-subscribers",
|
||||||
|
),
|
||||||
|
## End Issue Subscribers
|
||||||
|
# Issue Reactions
|
||||||
|
path(
|
||||||
|
"workspaces/<str:slug>/projects/<uuid:project_id>/issues/<uuid:issue_id>/reactions/",
|
||||||
|
IssueReactionViewSet.as_view(
|
||||||
|
{
|
||||||
|
"get": "list",
|
||||||
|
"post": "create",
|
||||||
|
}
|
||||||
|
),
|
||||||
|
name="project-issue-reactions",
|
||||||
|
),
|
||||||
|
path(
|
||||||
|
"workspaces/<str:slug>/projects/<uuid:project_id>/issues/<uuid:issue_id>/reactions/<str:reaction_code>/",
|
||||||
|
IssueReactionViewSet.as_view(
|
||||||
|
{
|
||||||
|
"delete": "destroy",
|
||||||
|
}
|
||||||
|
),
|
||||||
|
name="project-issue-reactions",
|
||||||
|
),
|
||||||
|
## End Issue Reactions
|
||||||
|
# Comment Reactions
|
||||||
|
path(
|
||||||
|
"workspaces/<str:slug>/projects/<uuid:project_id>/comments/<uuid:comment_id>/reactions/",
|
||||||
|
CommentReactionViewSet.as_view(
|
||||||
|
{
|
||||||
|
"get": "list",
|
||||||
|
"post": "create",
|
||||||
|
}
|
||||||
|
),
|
||||||
|
name="project-issue-comment-reactions",
|
||||||
|
),
|
||||||
|
path(
|
||||||
|
"workspaces/<str:slug>/projects/<uuid:project_id>/comments/<uuid:comment_id>/reactions/<str:reaction_code>/",
|
||||||
|
CommentReactionViewSet.as_view(
|
||||||
|
{
|
||||||
|
"delete": "destroy",
|
||||||
|
}
|
||||||
|
),
|
||||||
|
name="project-issue-comment-reactions",
|
||||||
|
),
|
||||||
|
## End Comment Reactions
|
||||||
|
## IssueProperty
|
||||||
|
path(
|
||||||
|
"workspaces/<str:slug>/projects/<uuid:project_id>/issue-display-properties/",
|
||||||
|
IssueUserDisplayPropertyEndpoint.as_view(),
|
||||||
|
name="project-issue-display-properties",
|
||||||
|
),
|
||||||
|
## IssueProperty End
|
||||||
|
## Issue Archives
|
||||||
|
path(
|
||||||
|
"workspaces/<str:slug>/projects/<uuid:project_id>/archived-issues/",
|
||||||
|
IssueArchiveViewSet.as_view(
|
||||||
|
{
|
||||||
|
"get": "list",
|
||||||
|
}
|
||||||
|
),
|
||||||
|
name="project-issue-archive",
|
||||||
|
),
|
||||||
|
path(
|
||||||
|
"workspaces/<str:slug>/projects/<uuid:project_id>/archived-issues/<uuid:pk>/",
|
||||||
|
IssueArchiveViewSet.as_view(
|
||||||
|
{
|
||||||
|
"get": "retrieve",
|
||||||
|
"delete": "destroy",
|
||||||
|
}
|
||||||
|
),
|
||||||
|
name="project-issue-archive",
|
||||||
|
),
|
||||||
|
path(
|
||||||
|
"workspaces/<str:slug>/projects/<uuid:project_id>/unarchive/<uuid:pk>/",
|
||||||
|
IssueArchiveViewSet.as_view(
|
||||||
|
{
|
||||||
|
"post": "unarchive",
|
||||||
|
}
|
||||||
|
),
|
||||||
|
name="project-issue-archive",
|
||||||
|
),
|
||||||
|
## End Issue Archives
|
||||||
|
## Issue Relation
|
||||||
|
path(
|
||||||
|
"workspaces/<str:slug>/projects/<uuid:project_id>/issues/<uuid:issue_id>/issue-relation/",
|
||||||
|
IssueRelationViewSet.as_view(
|
||||||
|
{
|
||||||
|
"post": "create",
|
||||||
|
}
|
||||||
|
),
|
||||||
|
name="issue-relation",
|
||||||
|
),
|
||||||
|
path(
|
||||||
|
"workspaces/<str:slug>/projects/<uuid:project_id>/issues/<uuid:issue_id>/issue-relation/<uuid:pk>/",
|
||||||
|
IssueRelationViewSet.as_view(
|
||||||
|
{
|
||||||
|
"delete": "destroy",
|
||||||
|
}
|
||||||
|
),
|
||||||
|
name="issue-relation",
|
||||||
|
),
|
||||||
|
## End Issue Relation
|
||||||
|
## Issue Drafts
|
||||||
|
path(
|
||||||
|
"workspaces/<str:slug>/projects/<uuid:project_id>/issue-drafts/",
|
||||||
|
IssueDraftViewSet.as_view(
|
||||||
|
{
|
||||||
|
"get": "list",
|
||||||
|
"post": "create",
|
||||||
|
}
|
||||||
|
),
|
||||||
|
name="project-issue-draft",
|
||||||
|
),
|
||||||
|
path(
|
||||||
|
"workspaces/<str:slug>/projects/<uuid:project_id>/issue-drafts/<uuid:pk>/",
|
||||||
|
IssueDraftViewSet.as_view(
|
||||||
|
{
|
||||||
|
"get": "retrieve",
|
||||||
|
"patch": "partial_update",
|
||||||
|
"delete": "destroy",
|
||||||
|
}
|
||||||
|
),
|
||||||
|
name="project-issue-draft",
|
||||||
|
),
|
||||||
|
]
|
104
apiserver/plane/app/urls/module.py
Normal file
104
apiserver/plane/app/urls/module.py
Normal file
@ -0,0 +1,104 @@
|
|||||||
|
from django.urls import path
|
||||||
|
|
||||||
|
|
||||||
|
from plane.app.views import (
|
||||||
|
ModuleViewSet,
|
||||||
|
ModuleIssueViewSet,
|
||||||
|
ModuleLinkViewSet,
|
||||||
|
ModuleFavoriteViewSet,
|
||||||
|
BulkImportModulesEndpoint,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
urlpatterns = [
|
||||||
|
path(
|
||||||
|
"workspaces/<str:slug>/projects/<uuid:project_id>/modules/",
|
||||||
|
ModuleViewSet.as_view(
|
||||||
|
{
|
||||||
|
"get": "list",
|
||||||
|
"post": "create",
|
||||||
|
}
|
||||||
|
),
|
||||||
|
name="project-modules",
|
||||||
|
),
|
||||||
|
path(
|
||||||
|
"workspaces/<str:slug>/projects/<uuid:project_id>/modules/<uuid:pk>/",
|
||||||
|
ModuleViewSet.as_view(
|
||||||
|
{
|
||||||
|
"get": "retrieve",
|
||||||
|
"put": "update",
|
||||||
|
"patch": "partial_update",
|
||||||
|
"delete": "destroy",
|
||||||
|
}
|
||||||
|
),
|
||||||
|
name="project-modules",
|
||||||
|
),
|
||||||
|
path(
|
||||||
|
"workspaces/<str:slug>/projects/<uuid:project_id>/modules/<uuid:module_id>/module-issues/",
|
||||||
|
ModuleIssueViewSet.as_view(
|
||||||
|
{
|
||||||
|
"get": "list",
|
||||||
|
"post": "create",
|
||||||
|
}
|
||||||
|
),
|
||||||
|
name="project-module-issues",
|
||||||
|
),
|
||||||
|
path(
|
||||||
|
"workspaces/<str:slug>/projects/<uuid:project_id>/modules/<uuid:module_id>/module-issues/<uuid:pk>/",
|
||||||
|
ModuleIssueViewSet.as_view(
|
||||||
|
{
|
||||||
|
"get": "retrieve",
|
||||||
|
"put": "update",
|
||||||
|
"patch": "partial_update",
|
||||||
|
"delete": "destroy",
|
||||||
|
}
|
||||||
|
),
|
||||||
|
name="project-module-issues",
|
||||||
|
),
|
||||||
|
path(
|
||||||
|
"workspaces/<str:slug>/projects/<uuid:project_id>/modules/<uuid:module_id>/module-links/",
|
||||||
|
ModuleLinkViewSet.as_view(
|
||||||
|
{
|
||||||
|
"get": "list",
|
||||||
|
"post": "create",
|
||||||
|
}
|
||||||
|
),
|
||||||
|
name="project-issue-module-links",
|
||||||
|
),
|
||||||
|
path(
|
||||||
|
"workspaces/<str:slug>/projects/<uuid:project_id>/modules/<uuid:module_id>/module-links/<uuid:pk>/",
|
||||||
|
ModuleLinkViewSet.as_view(
|
||||||
|
{
|
||||||
|
"get": "retrieve",
|
||||||
|
"put": "update",
|
||||||
|
"patch": "partial_update",
|
||||||
|
"delete": "destroy",
|
||||||
|
}
|
||||||
|
),
|
||||||
|
name="project-issue-module-links",
|
||||||
|
),
|
||||||
|
path(
|
||||||
|
"workspaces/<str:slug>/projects/<uuid:project_id>/user-favorite-modules/",
|
||||||
|
ModuleFavoriteViewSet.as_view(
|
||||||
|
{
|
||||||
|
"get": "list",
|
||||||
|
"post": "create",
|
||||||
|
}
|
||||||
|
),
|
||||||
|
name="user-favorite-module",
|
||||||
|
),
|
||||||
|
path(
|
||||||
|
"workspaces/<str:slug>/projects/<uuid:project_id>/user-favorite-modules/<uuid:module_id>/",
|
||||||
|
ModuleFavoriteViewSet.as_view(
|
||||||
|
{
|
||||||
|
"delete": "destroy",
|
||||||
|
}
|
||||||
|
),
|
||||||
|
name="user-favorite-module",
|
||||||
|
),
|
||||||
|
path(
|
||||||
|
"workspaces/<str:slug>/projects/<uuid:project_id>/bulk-import-modules/<str:service>/",
|
||||||
|
BulkImportModulesEndpoint.as_view(),
|
||||||
|
name="bulk-modules-create",
|
||||||
|
),
|
||||||
|
]
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue
Block a user