diff --git a/.deepsource.toml b/.deepsource.toml
deleted file mode 100644
index 2b40af672..000000000
--- a/.deepsource.toml
+++ /dev/null
@@ -1,23 +0,0 @@
-version = 1
-
-exclude_patterns = [
- "bin/**",
- "**/node_modules/",
- "**/*.min.js"
-]
-
-[[analyzers]]
-name = "shell"
-
-[[analyzers]]
-name = "javascript"
-
- [analyzers.meta]
- plugins = ["react"]
- environment = ["nodejs"]
-
-[[analyzers]]
-name = "python"
-
- [analyzers.meta]
- runtime_version = "3.x.x"
\ No newline at end of file
diff --git a/.github/workflows/auto-merge.yml b/.github/workflows/auto-merge.yml
new file mode 100644
index 000000000..ed3814532
--- /dev/null
+++ b/.github/workflows/auto-merge.yml
@@ -0,0 +1,84 @@
+name: Auto Merge or Create PR on Push
+
+on:
+ workflow_dispatch:
+ push:
+ branches:
+ - "sync/**"
+
+env:
+ CURRENT_BRANCH: ${{ github.ref_name }}
+ SOURCE_BRANCH: ${{ secrets.SYNC_SOURCE_BRANCH_NAME }} # The sync branch such as "sync/ce"
+ TARGET_BRANCH: ${{ secrets.SYNC_TARGET_BRANCH_NAME }} # The target branch that you would like to merge changes like develop
+ GITHUB_TOKEN: ${{ secrets.ACCESS_TOKEN }} # Personal access token required to modify contents and workflows
+ REVIEWER: ${{ secrets.SYNC_PR_REVIEWER }}
+
+jobs:
+ Check_Branch:
+ runs-on: ubuntu-latest
+ outputs:
+ BRANCH_MATCH: ${{ steps.check-branch.outputs.MATCH }}
+ steps:
+ - name: Check if current branch matches the secret
+ id: check-branch
+ run: |
+ if [ "$CURRENT_BRANCH" = "$SOURCE_BRANCH" ]; then
+ echo "MATCH=true" >> $GITHUB_OUTPUT
+ else
+ echo "MATCH=false" >> $GITHUB_OUTPUT
+ fi
+
+ Auto_Merge:
+ if: ${{ needs.Check_Branch.outputs.BRANCH_MATCH == 'true' }}
+ needs: [Check_Branch]
+ runs-on: ubuntu-latest
+ permissions:
+ pull-requests: write
+ contents: write
+ steps:
+ - name: Checkout code
+ uses: actions/checkout@v4.1.1
+ with:
+ fetch-depth: 0 # Fetch all history for all branches and tags
+
+ - name: Setup Git
+ run: |
+ git config user.name "GitHub Actions"
+ git config user.email "actions@github.com"
+
+ - name: Setup GH CLI and Git Config
+ run: |
+ type -p curl >/dev/null || (sudo apt update && sudo apt install curl -y)
+ curl -fsSL https://cli.github.com/packages/githubcli-archive-keyring.gpg | sudo dd of=/usr/share/keyrings/githubcli-archive-keyring.gpg
+ sudo chmod go+r /usr/share/keyrings/githubcli-archive-keyring.gpg
+ echo "deb [arch=$(dpkg --print-architecture) signed-by=/usr/share/keyrings/githubcli-archive-keyring.gpg] https://cli.github.com/packages stable main" | sudo tee /etc/apt/sources.list.d/github-cli.list > /dev/null
+ sudo apt update
+ sudo apt install gh -y
+
+ - name: Check for merge conflicts
+ id: conflicts
+ run: |
+ git fetch origin $TARGET_BRANCH
+ git checkout $TARGET_BRANCH
+ # Attempt to merge the main branch into the current branch
+ if $(git merge --no-commit --no-ff $SOURCE_BRANCH); then
+ echo "No merge conflicts detected."
+ echo "HAS_CONFLICTS=false" >> $GITHUB_ENV
+ else
+ echo "Merge conflicts detected."
+ echo "HAS_CONFLICTS=true" >> $GITHUB_ENV
+ git merge --abort
+ fi
+
+ - name: Merge Change to Target Branch
+ if: env.HAS_CONFLICTS == 'false'
+ run: |
+ git commit -m "Merge branch '$SOURCE_BRANCH' into $TARGET_BRANCH"
+ git push origin $TARGET_BRANCH
+
+ - name: Create PR to Target Branch
+ if: env.HAS_CONFLICTS == 'true'
+ run: |
+ # Replace 'username' with the actual GitHub username of the reviewer.
+ PR_URL=$(gh pr create --base $TARGET_BRANCH --head $SOURCE_BRANCH --title "sync: merge conflicts need to be resolved" --body "" --reviewer $REVIEWER)
+ echo "Pull Request created: $PR_URL"
diff --git a/.github/workflows/build-branch.yml b/.github/workflows/build-branch.yml
index 44bae0efa..306f92957 100644
--- a/.github/workflows/build-branch.yml
+++ b/.github/workflows/build-branch.yml
@@ -6,7 +6,6 @@ on:
branches:
- master
- preview
- - develop
release:
types: [released, prereleased]
@@ -18,7 +17,7 @@ jobs:
name: Build-Push Web/Space/API/Proxy Docker Image
runs-on: ubuntu-latest
outputs:
- gh_branch_name: ${{ steps.set_env_variables.outputs.TARGET_BRANCH }}
+ gh_branch_name: ${{ steps.set_env_variables.outputs.TARGET_BRANCH }}
gh_buildx_driver: ${{ steps.set_env_variables.outputs.BUILDX_DRIVER }}
gh_buildx_version: ${{ steps.set_env_variables.outputs.BUILDX_VERSION }}
gh_buildx_platforms: ${{ steps.set_env_variables.outputs.BUILDX_PLATFORMS }}
@@ -74,7 +73,7 @@ jobs:
- nginx/**
branch_build_push_frontend:
- if: ${{ needs.branch_build_setup.outputs.build_frontend == 'true' || github.event_name == 'release' || needs.branch_build_setup.outputs.gh_branch_name == 'master' }}
+ if: ${{ needs.branch_build_setup.outputs.build_frontend == 'true' || github.event_name == 'workflow_dispatch' || github.event_name == 'release' || needs.branch_build_setup.outputs.gh_branch_name == 'master' }}
runs-on: ubuntu-20.04
needs: [branch_build_setup]
env:
@@ -126,7 +125,7 @@ jobs:
DOCKER_PASSWORD: ${{ secrets.DOCKERHUB_TOKEN }}
branch_build_push_space:
- if: ${{ needs.branch_build_setup.outputs.build_space == 'true' || github.event_name == 'release' || needs.branch_build_setup.outputs.gh_branch_name == 'master' }}
+ if: ${{ needs.branch_build_setup.outputs.build_space == 'true' || github.event_name == 'workflow_dispatch' || github.event_name == 'release' || needs.branch_build_setup.outputs.gh_branch_name == 'master' }}
runs-on: ubuntu-20.04
needs: [branch_build_setup]
env:
@@ -178,7 +177,7 @@ jobs:
DOCKER_PASSWORD: ${{ secrets.DOCKERHUB_TOKEN }}
branch_build_push_backend:
- if: ${{ needs.branch_build_setup.outputs.build_backend == 'true' || github.event_name == 'release' || needs.branch_build_setup.outputs.gh_branch_name == 'master' }}
+ if: ${{ needs.branch_build_setup.outputs.build_backend == 'true' || github.event_name == 'workflow_dispatch' || github.event_name == 'release' || needs.branch_build_setup.outputs.gh_branch_name == 'master' }}
runs-on: ubuntu-20.04
needs: [branch_build_setup]
env:
@@ -230,7 +229,7 @@ jobs:
DOCKER_PASSWORD: ${{ secrets.DOCKERHUB_TOKEN }}
branch_build_push_proxy:
- if: ${{ needs.branch_build_setup.outputs.build_proxy == 'true' || github.event_name == 'release' || needs.branch_build_setup.outputs.gh_branch_name == 'master' }}
+ if: ${{ needs.branch_build_setup.outputs.build_proxy == 'true' || github.event_name == 'workflow_dispatch' || github.event_name == 'release' || needs.branch_build_setup.outputs.gh_branch_name == 'master' }}
runs-on: ubuntu-20.04
needs: [branch_build_setup]
env:
@@ -280,4 +279,3 @@ jobs:
DOCKER_BUILDKIT: 1
DOCKER_USERNAME: ${{ secrets.DOCKERHUB_USERNAME }}
DOCKER_PASSWORD: ${{ secrets.DOCKERHUB_TOKEN }}
-
diff --git a/.github/workflows/build-test-pull-request.yml b/.github/workflows/build-test-pull-request.yml
index 83ed41625..e0014f696 100644
--- a/.github/workflows/build-test-pull-request.yml
+++ b/.github/workflows/build-test-pull-request.yml
@@ -1,27 +1,19 @@
-name: Build Pull Request Contents
+name: Build and Lint on Pull Request
on:
+ workflow_dispatch:
pull_request:
types: ["opened", "synchronize"]
jobs:
- build-pull-request-contents:
- name: Build Pull Request Contents
- runs-on: ubuntu-20.04
- permissions:
- pull-requests: read
-
+ get-changed-files:
+ runs-on: ubuntu-latest
+ outputs:
+ apiserver_changed: ${{ steps.changed-files.outputs.apiserver_any_changed }}
+ web_changed: ${{ steps.changed-files.outputs.web_any_changed }}
+ space_changed: ${{ steps.changed-files.outputs.deploy_any_changed }}
steps:
- - name: Checkout Repository to Actions
- uses: actions/checkout@v3.3.0
- with:
- token: ${{ secrets.ACCESS_TOKEN }}
-
- - name: Setup Node.js 18.x
- uses: actions/setup-node@v2
- with:
- node-version: 18.x
-
+ - uses: actions/checkout@v3
- name: Get changed files
id: changed-files
uses: tj-actions/changed-files@v41
@@ -31,17 +23,82 @@ jobs:
- apiserver/**
web:
- web/**
+ - packages/**
+ - 'package.json'
+ - 'yarn.lock'
+ - 'tsconfig.json'
+ - 'turbo.json'
deploy:
- space/**
+ - packages/**
+ - 'package.json'
+ - 'yarn.lock'
+ - 'tsconfig.json'
+ - 'turbo.json'
- - name: Build Plane's Main App
- if: steps.changed-files.outputs.web_any_changed == 'true'
- run: |
- yarn
- yarn build --filter=web
+ lint-apiserver:
+ needs: get-changed-files
+ runs-on: ubuntu-latest
+ if: needs.get-changed-files.outputs.apiserver_changed == 'true'
+ steps:
+ - uses: actions/checkout@v3
+ - name: Set up Python
+ uses: actions/setup-python@v4
+ with:
+ python-version: '3.x' # Specify the Python version you need
+ - name: Install Pylint
+ run: python -m pip install ruff
+ - name: Install Apiserver Dependencies
+ run: cd apiserver && pip install -r requirements.txt
+ - name: Lint apiserver
+ run: ruff check --fix apiserver
- - name: Build Plane's Deploy App
- if: steps.changed-files.outputs.deploy_any_changed == 'true'
- run: |
- yarn
- yarn build --filter=space
+ lint-web:
+ needs: get-changed-files
+ if: needs.get-changed-files.outputs.web_changed == 'true'
+ runs-on: ubuntu-latest
+ steps:
+ - uses: actions/checkout@v3
+ - name: Setup Node.js
+ uses: actions/setup-node@v2
+ with:
+ node-version: 18.x
+ - run: yarn install
+ - run: yarn lint --filter=web
+
+ lint-space:
+ needs: get-changed-files
+ if: needs.get-changed-files.outputs.space_changed == 'true'
+ runs-on: ubuntu-latest
+ steps:
+ - uses: actions/checkout@v3
+ - name: Setup Node.js
+ uses: actions/setup-node@v2
+ with:
+ node-version: 18.x
+ - run: yarn install
+ - run: yarn lint --filter=space
+
+ build-web:
+ needs: lint-web
+ runs-on: ubuntu-latest
+ steps:
+ - uses: actions/checkout@v3
+ - name: Setup Node.js
+ uses: actions/setup-node@v2
+ with:
+ node-version: 18.x
+ - run: yarn install
+ - run: yarn build --filter=web
+
+ build-space:
+ needs: lint-space
+ runs-on: ubuntu-latest
+ steps:
+ - uses: actions/checkout@v3
+ - name: Setup Node.js
+ uses: actions/setup-node@v2
+ with:
+ node-version: 18.x
+ - run: yarn install
+ - run: yarn build --filter=space
diff --git a/.github/workflows/check-version.yml b/.github/workflows/check-version.yml
new file mode 100644
index 000000000..ca8b6f8b3
--- /dev/null
+++ b/.github/workflows/check-version.yml
@@ -0,0 +1,45 @@
+name: Version Change Before Release
+
+on:
+ pull_request:
+ branches:
+ - master
+
+jobs:
+ check-version:
+ runs-on: ubuntu-latest
+ steps:
+ - name: Checkout repository
+ uses: actions/checkout@v4
+ with:
+ ref: ${{ github.head_ref }}
+ fetch-depth: 0
+
+ - name: Setup Node.js
+ uses: actions/setup-node@v4
+ with:
+ node-version: '18'
+
+ - name: Get PR Branch version
+ run: echo "PR_VERSION=$(node -p "require('./package.json').version")" >> $GITHUB_ENV
+
+ - name: Fetch base branch
+ run: git fetch origin master:master
+
+ - name: Get Master Branch version
+ run: |
+ git checkout master
+ echo "MASTER_VERSION=$(node -p "require('./package.json').version")" >> $GITHUB_ENV
+
+ - name: Get master branch version and compare
+ run: |
+ echo "Comparing versions: PR version is $PR_VERSION, Master version is $MASTER_VERSION"
+ if [ "$PR_VERSION" == "$MASTER_VERSION" ]; then
+ echo "Version in PR branch is the same as in master. Failing the CI."
+ exit 1
+ else
+ echo "Version check passed. Versions are different."
+ fi
+ env:
+ PR_VERSION: ${{ env.PR_VERSION }}
+ MASTER_VERSION: ${{ env.MASTER_VERSION }}
diff --git a/.github/workflows/codeql.yml b/.github/workflows/codeql.yml
index 9f6ab1bfb..d7b94d245 100644
--- a/.github/workflows/codeql.yml
+++ b/.github/workflows/codeql.yml
@@ -1,13 +1,13 @@
name: "CodeQL"
on:
+ workflow_dispatch:
push:
- branches: [ 'develop', 'preview', 'master' ]
+ branches: ["develop", "preview", "master"]
pull_request:
- # The branches below must be a subset of the branches above
- branches: [ 'develop', 'preview', 'master' ]
+ branches: ["develop", "preview", "master"]
schedule:
- - cron: '53 19 * * 5'
+ - cron: "53 19 * * 5"
jobs:
analyze:
@@ -21,45 +21,44 @@ jobs:
strategy:
fail-fast: false
matrix:
- language: [ 'python', 'javascript' ]
+ language: ["python", "javascript"]
# CodeQL supports [ 'cpp', 'csharp', 'go', 'java', 'javascript', 'python', 'ruby' ]
# Use only 'java' to analyze code written in Java, Kotlin or both
# Use only 'javascript' to analyze code written in JavaScript, TypeScript or both
# Learn more about CodeQL language support at https://aka.ms/codeql-docs/language-support
steps:
- - name: Checkout repository
- uses: actions/checkout@v3
+ - name: Checkout repository
+ uses: actions/checkout@v3
- # Initializes the CodeQL tools for scanning.
- - name: Initialize CodeQL
- uses: github/codeql-action/init@v2
- with:
- languages: ${{ matrix.language }}
- # If you wish to specify custom queries, you can do so here or in a config file.
- # By default, queries listed here will override any specified in a config file.
- # Prefix the list here with "+" to use these queries and those in the config file.
+ # Initializes the CodeQL tools for scanning.
+ - name: Initialize CodeQL
+ uses: github/codeql-action/init@v2
+ with:
+ languages: ${{ matrix.language }}
+ # If you wish to specify custom queries, you can do so here or in a config file.
+ # By default, queries listed here will override any specified in a config file.
+ # Prefix the list here with "+" to use these queries and those in the config file.
- # For more details on CodeQL's query packs, refer to: https://docs.github.com/en/code-security/code-scanning/automatically-scanning-your-code-for-vulnerabilities-and-errors/configuring-code-scanning#using-queries-in-ql-packs
- # queries: security-extended,security-and-quality
+ # For more details on CodeQL's query packs, refer to: https://docs.github.com/en/code-security/code-scanning/automatically-scanning-your-code-for-vulnerabilities-and-errors/configuring-code-scanning#using-queries-in-ql-packs
+ # queries: security-extended,security-and-quality
+ # Autobuild attempts to build any compiled languages (C/C++, C#, Go, Java, or Swift).
+ # If this step fails, then you should remove it and run the build manually (see below)
+ - name: Autobuild
+ uses: github/codeql-action/autobuild@v2
- # Autobuild attempts to build any compiled languages (C/C++, C#, Go, Java, or Swift).
- # If this step fails, then you should remove it and run the build manually (see below)
- - name: Autobuild
- uses: github/codeql-action/autobuild@v2
+ # âšī¸ Command-line programs to run using the OS shell.
+ # đ See https://docs.github.com/en/actions/using-workflows/workflow-syntax-for-github-actions#jobsjob_idstepsrun
- # âšī¸ Command-line programs to run using the OS shell.
- # đ See https://docs.github.com/en/actions/using-workflows/workflow-syntax-for-github-actions#jobsjob_idstepsrun
+ # If the Autobuild fails above, remove it and uncomment the following three lines.
+ # modify them (or add more) to build your code if your project, please refer to the EXAMPLE below for guidance.
- # If the Autobuild fails above, remove it and uncomment the following three lines.
- # modify them (or add more) to build your code if your project, please refer to the EXAMPLE below for guidance.
+ # - run: |
+ # echo "Run, Build Application using script"
+ # ./location_of_script_within_repo/buildscript.sh
- # - run: |
- # echo "Run, Build Application using script"
- # ./location_of_script_within_repo/buildscript.sh
-
- - name: Perform CodeQL Analysis
- uses: github/codeql-action/analyze@v2
- with:
- category: "/language:${{matrix.language}}"
+ - name: Perform CodeQL Analysis
+ uses: github/codeql-action/analyze@v2
+ with:
+ category: "/language:${{matrix.language}}"
diff --git a/.github/workflows/feature-deployment.yml b/.github/workflows/feature-deployment.yml
new file mode 100644
index 000000000..c5eec3cd3
--- /dev/null
+++ b/.github/workflows/feature-deployment.yml
@@ -0,0 +1,199 @@
+name: Feature Preview
+
+on:
+ workflow_dispatch:
+ inputs:
+ web-build:
+ required: false
+ description: 'Build Web'
+ type: boolean
+ default: true
+ space-build:
+ required: false
+ description: 'Build Space'
+ type: boolean
+ default: false
+
+env:
+ BUILD_WEB: ${{ github.event.inputs.web-build }}
+ BUILD_SPACE: ${{ github.event.inputs.space-build }}
+
+jobs:
+ setup-feature-build:
+ name: Feature Build Setup
+ runs-on: ubuntu-latest
+ steps:
+ - name: Checkout
+ run: |
+ echo "BUILD_WEB=$BUILD_WEB"
+ echo "BUILD_SPACE=$BUILD_SPACE"
+ outputs:
+ web-build: ${{ env.BUILD_WEB}}
+ space-build: ${{env.BUILD_SPACE}}
+
+ feature-build-web:
+ if: ${{ needs.setup-feature-build.outputs.web-build == 'true' }}
+ needs: setup-feature-build
+ name: Feature Build Web
+ runs-on: ubuntu-latest
+ env:
+ AWS_ACCESS_KEY_ID: ${{ vars.FEATURE_PREVIEW_AWS_ACCESS_KEY_ID }}
+ AWS_SECRET_ACCESS_KEY: ${{ secrets.FEATURE_PREVIEW_AWS_SECRET_ACCESS_KEY }}
+ AWS_BUCKET: ${{ vars.FEATURE_PREVIEW_AWS_BUCKET }}
+ NEXT_PUBLIC_API_BASE_URL: ${{ vars.FEATURE_PREVIEW_NEXT_PUBLIC_API_BASE_URL }}
+ steps:
+ - name: Set up Node.js
+ uses: actions/setup-node@v4
+ with:
+ node-version: '18'
+ - name: Install AWS cli
+ run: |
+ sudo apt-get update
+ sudo apt-get install -y python3-pip
+ pip3 install awscli
+ - name: Checkout
+ uses: actions/checkout@v4
+ with:
+ path: plane
+ - name: Install Dependencies
+ run: |
+ cd $GITHUB_WORKSPACE/plane
+ yarn install
+ - name: Build Web
+ id: build-web
+ run: |
+ cd $GITHUB_WORKSPACE/plane
+ yarn build --filter=web
+ cd $GITHUB_WORKSPACE
+
+ TAR_NAME="web.tar.gz"
+ tar -czf $TAR_NAME ./plane
+
+ FILE_EXPIRY=$(date -u -d "+2 days" +"%Y-%m-%dT%H:%M:%SZ")
+ aws s3 cp $TAR_NAME s3://${{ env.AWS_BUCKET }}/${{github.sha}}/$TAR_NAME --expires $FILE_EXPIRY
+
+ feature-build-space:
+ if: ${{ needs.setup-feature-build.outputs.space-build == 'true' }}
+ needs: setup-feature-build
+ name: Feature Build Space
+ runs-on: ubuntu-latest
+ env:
+ AWS_ACCESS_KEY_ID: ${{ vars.FEATURE_PREVIEW_AWS_ACCESS_KEY_ID }}
+ AWS_SECRET_ACCESS_KEY: ${{ secrets.FEATURE_PREVIEW_AWS_SECRET_ACCESS_KEY }}
+ AWS_BUCKET: ${{ vars.FEATURE_PREVIEW_AWS_BUCKET }}
+ NEXT_PUBLIC_DEPLOY_WITH_NGINX: 1
+ NEXT_PUBLIC_API_BASE_URL: ${{ vars.FEATURE_PREVIEW_NEXT_PUBLIC_API_BASE_URL }}
+ outputs:
+ do-build: ${{ needs.setup-feature-build.outputs.space-build }}
+ s3-url: ${{ steps.build-space.outputs.S3_PRESIGNED_URL }}
+ steps:
+ - name: Set up Node.js
+ uses: actions/setup-node@v4
+ with:
+ node-version: '18'
+ - name: Install AWS cli
+ run: |
+ sudo apt-get update
+ sudo apt-get install -y python3-pip
+ pip3 install awscli
+ - name: Checkout
+ uses: actions/checkout@v4
+ with:
+ path: plane
+ - name: Install Dependencies
+ run: |
+ cd $GITHUB_WORKSPACE/plane
+ yarn install
+ - name: Build Space
+ id: build-space
+ run: |
+ cd $GITHUB_WORKSPACE/plane
+ yarn build --filter=space
+ cd $GITHUB_WORKSPACE
+
+ TAR_NAME="space.tar.gz"
+ tar -czf $TAR_NAME ./plane
+
+ FILE_EXPIRY=$(date -u -d "+2 days" +"%Y-%m-%dT%H:%M:%SZ")
+ aws s3 cp $TAR_NAME s3://${{ env.AWS_BUCKET }}/${{github.sha}}/$TAR_NAME --expires $FILE_EXPIRY
+
+ feature-deploy:
+ if: ${{ always() && (needs.setup-feature-build.outputs.web-build == 'true' || needs.setup-feature-build.outputs.space-build == 'true') }}
+ needs: [feature-build-web, feature-build-space]
+ name: Feature Deploy
+ runs-on: ubuntu-latest
+ env:
+ AWS_ACCESS_KEY_ID: ${{ vars.FEATURE_PREVIEW_AWS_ACCESS_KEY_ID }}
+ AWS_SECRET_ACCESS_KEY: ${{ secrets.FEATURE_PREVIEW_AWS_SECRET_ACCESS_KEY }}
+ AWS_BUCKET: ${{ vars.FEATURE_PREVIEW_AWS_BUCKET }}
+ KUBE_CONFIG_FILE: ${{ secrets.FEATURE_PREVIEW_KUBE_CONFIG }}
+ steps:
+ - name: Install AWS cli
+ run: |
+ sudo apt-get update
+ sudo apt-get install -y python3-pip
+ pip3 install awscli
+ - name: Tailscale
+ uses: tailscale/github-action@v2
+ with:
+ oauth-client-id: ${{ secrets.TAILSCALE_OAUTH_CLIENT_ID }}
+ oauth-secret: ${{ secrets.TAILSCALE_OAUTH_SECRET }}
+ tags: tag:ci
+ - name: Kubectl Setup
+ run: |
+ curl -LO "https://dl.k8s.io/release/${{ vars.FEATURE_PREVIEW_KUBE_VERSION }}/bin/linux/amd64/kubectl"
+ chmod +x kubectl
+
+ mkdir -p ~/.kube
+ echo "$KUBE_CONFIG_FILE" > ~/.kube/config
+ chmod 600 ~/.kube/config
+ - name: HELM Setup
+ run: |
+ curl -fsSL -o get_helm.sh https://raw.githubusercontent.com/helm/helm/main/scripts/get-helm-3
+ chmod 700 get_helm.sh
+ ./get_helm.sh
+ - name: App Deploy
+ run: |
+ WEB_S3_URL=""
+ if [ ${{ env.BUILD_WEB }} == true ]; then
+ WEB_S3_URL=$(aws s3 presign s3://${{ vars.FEATURE_PREVIEW_AWS_BUCKET }}/${{github.sha}}/web.tar.gz --expires-in 3600)
+ fi
+
+ SPACE_S3_URL=""
+ if [ ${{ env.BUILD_SPACE }} == true ]; then
+ SPACE_S3_URL=$(aws s3 presign s3://${{ vars.FEATURE_PREVIEW_AWS_BUCKET }}/${{github.sha}}/space.tar.gz --expires-in 3600)
+ fi
+
+ if [ ${{ env.BUILD_WEB }} == true ] || [ ${{ env.BUILD_SPACE }} == true ]; then
+
+ helm --kube-insecure-skip-tls-verify repo add feature-preview ${{ vars.FEATURE_PREVIEW_HELM_CHART_URL }}
+
+ APP_NAMESPACE="${{ vars.FEATURE_PREVIEW_NAMESPACE }}"
+ DEPLOY_SCRIPT_URL="${{ vars.FEATURE_PREVIEW_DEPLOY_SCRIPT_URL }}"
+
+ METADATA=$(helm --kube-insecure-skip-tls-verify install feature-preview/${{ vars.FEATURE_PREVIEW_HELM_CHART_NAME }} \
+ --generate-name \
+ --namespace $APP_NAMESPACE \
+ --set ingress.primaryDomain=${{vars.FEATURE_PREVIEW_PRIMARY_DOMAIN || 'feature.plane.tools' }} \
+ --set web.image=${{vars.FEATURE_PREVIEW_DOCKER_BASE}} \
+ --set web.enabled=${{ env.BUILD_WEB || false }} \
+ --set web.artifact_url=$WEB_S3_URL \
+ --set space.image=${{vars.FEATURE_PREVIEW_DOCKER_BASE}} \
+ --set space.enabled=${{ env.BUILD_SPACE || false }} \
+ --set space.artifact_url=$SPACE_S3_URL \
+ --set shared_config.deploy_script_url=$DEPLOY_SCRIPT_URL \
+ --set shared_config.api_base_url=${{vars.FEATURE_PREVIEW_NEXT_PUBLIC_API_BASE_URL}} \
+ --output json \
+ --timeout 1000s)
+
+ APP_NAME=$(echo $METADATA | jq -r '.name')
+
+ INGRESS_HOSTNAME=$(kubectl get ingress -n feature-builds --insecure-skip-tls-verify \
+ -o jsonpath='{.items[?(@.metadata.annotations.meta\.helm\.sh\/release-name=="'$APP_NAME'")]}' | \
+ jq -r '.spec.rules[0].host')
+
+ echo "****************************************"
+ echo "APP NAME ::: $APP_NAME"
+ echo "INGRESS HOSTNAME ::: $INGRESS_HOSTNAME"
+ echo "****************************************"
+ fi
diff --git a/.gitignore b/.gitignore
index 0b655bd0e..3989f4356 100644
--- a/.gitignore
+++ b/.gitignore
@@ -51,6 +51,7 @@ staticfiles
mediafiles
.env
.DS_Store
+logs/
node_modules/
assets/dist/
diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md
index 148568d76..f40c1a244 100644
--- a/CONTRIBUTING.md
+++ b/CONTRIBUTING.md
@@ -50,7 +50,6 @@ chmod +x setup.sh
docker compose -f docker-compose-local.yml up
```
-
## Missing a Feature?
If a feature is missing, you can directly _request_ a new one [here](https://github.com/makeplane/plane/issues/new?assignees=&labels=feature&template=feature_request.yml&title=%F0%9F%9A%80+Feature%3A+). You also can do the same by choosing "đ Feature" when raising a [New Issue](https://github.com/makeplane/plane/issues/new/choose) on our GitHub Repository.
diff --git a/ENV_SETUP.md b/ENV_SETUP.md
index bfc300196..df05683ef 100644
--- a/ENV_SETUP.md
+++ b/ENV_SETUP.md
@@ -53,7 +53,6 @@ NGINX_PORT=80
NEXT_PUBLIC_DEPLOY_URL="http://localhost/spaces"
```
-
## {PROJECT_FOLDER}/apiserver/.env
â
diff --git a/README.md b/README.md
index 52ccda474..ece8ff1e2 100644
--- a/README.md
+++ b/README.md
@@ -17,10 +17,10 @@
- Website âĸ
- Releases âĸ
- Twitter âĸ
- Documentation
+ Website âĸ
+ Releases âĸ
+ Twitter âĸ
+ Documentation
@@ -40,30 +40,28 @@
-Meet [Plane](https://plane.so). An open-source software development tool to manage issues, sprints, and product roadmaps with peace of mind. đ§ââī¸
-
-> Plane is still in its early days, not everything will be perfect yet, and hiccups may happen. Please let us know of any suggestions, ideas, or bugs that you encounter on our [Discord](https://discord.com/invite/A92xrEGCge) or GitHub issues, and we will use your feedback to improve on our upcoming releases.
-
+Meet [Plane](https://dub.sh/plane-website-readme). An open-source software development tool to manage issues, sprints, and product roadmaps with peace of mind. đ§ââī¸
+> Plane is still in its early days, not everything will be perfect yet, and hiccups may happen. Please let us know of any suggestions, ideas, or bugs that you encounter on our [Discord](https://discord.com/invite/A92xrEGCge) or GitHub issues, and we will use your feedback to improve in our upcoming releases.
## ⥠Installation
The easiest way to get started with Plane is by creating a [Plane Cloud](https://app.plane.so) account where we offer a hosted solution for users.
-If you want more control over your data prefer to self-host Plane, please refer to our [deployment documentation](https://docs.plane.so/docker-compose).
+If you want more control over your data, prefer to self-host Plane, please refer to our [deployment documentation](https://docs.plane.so/docker-compose).
-| Installation Methods | Documentation Link |
-|-----------------|----------------------------------------------------------------------------------------------------------|
-| Docker | [![Docker](https://img.shields.io/badge/docker-%230db7ed.svg?style=for-the-badge&logo=docker&logoColor=white)](https://docs.plane.so/docker-compose) |
-| Kubernetes | [![Kubernetes](https://img.shields.io/badge/kubernetes-%23326ce5.svg?style=for-the-badge&logo=kubernetes&logoColor=white)](https://docs.plane.so/kubernetes) |
+| Installation Methods | Documentation Link |
+| -------------------- | ------------------------------------------------------------------------------------------------------------------------------------------------------------ |
+| Docker | [![Docker](https://img.shields.io/badge/docker-%230db7ed.svg?style=for-the-badge&logo=docker&logoColor=white)](https://docs.plane.so/self-hosting/methods/docker-compose) |
+| Kubernetes | [![Kubernetes](https://img.shields.io/badge/kubernetes-%23326ce5.svg?style=for-the-badge&logo=kubernetes&logoColor=white)](https://docs.plane.so/kubernetes) |
-`Instance admin` can configure instance settings using our [God-mode](https://docs.plane.so/instance-admin) feature.
+`Instance admin` can configure instance settings using our [God-mode](https://docs.plane.so/instance-admin) feature.
## đ Features
-- **Issues**: Quickly create issues and add details using a powerful, rich text editor that supports file uploads. Add sub-properties and references to problems for better organization and tracking.
+- **Issues**: Quickly create issues and add details using a powerful rich text editor that supports file uploads. Add sub-properties and references to problems for better organization and tracking.
-- **Cycles**
+- **Cycles**:
Keep up your team's momentum with Cycles. Gain insights into your project's progress with burn-down charts and other valuable features.
- **Modules**: Break down your large projects into smaller, more manageable modules. Assign modules between teams to track and plan your project's progress easily.
@@ -74,15 +72,13 @@ If you want more control over your data prefer to self-host Plane, please refer
- **Analytics**: Get insights into all your Plane data in real-time. Visualize issue data to spot trends, remove blockers, and progress your work.
-- **Drive** (*coming soon*): The drive helps you share documents, images, videos, or any other files that make sense to you or your team and align on the problem/solution.
+- **Drive** (_coming soon_): The drive helps you share documents, images, videos, or any other files that make sense to you or your team and align on the problem/solution.
-
-
-## đ ī¸ Contributors Quick Start
+## đ ī¸ Quick start for contributors
> Development system must have docker engine installed and running.
-Setting up local environment is extremely easy and straight forward. Follow the below step and you will be ready to contribute
+Setting up local environment is extremely easy and straight forward. Follow the below step and you will be ready to contribute -
1. Clone the code locally using:
```
@@ -101,10 +97,10 @@ Setting up local environment is extremely easy and straight forward. Follow the
./setup.sh
```
5. Open the code on VSCode or similar equivalent IDE.
-6. Review the `.env` files available in various folders.
+6. Review the `.env` files available in various folders.
Visit [Environment Setup](./ENV_SETUP.md) to know about various environment variables used in system.
7. Run the docker command to initiate services:
- ```
+ ```
docker compose -f docker-compose-local.yml up -d
```
@@ -119,6 +115,7 @@ The Plane community can be found on [GitHub Discussions](https://github.com/orgs
Ask questions, report bugs, join discussions, voice ideas, make feature requests, or share your projects.
### Repo Activity
+
![Plane Repo Activity](https://repobeats.axiom.co/api/embed/2523c6ed2f77c082b7908c33e2ab208981d76c39.svg "Repobeats analytics image")
## đ¸ Screenshots
@@ -181,20 +178,21 @@ Ask questions, report bugs, join discussions, voice ideas, make feature requests
## âī¸ Security
-If you believe you have found a security vulnerability in Plane, we encourage you to responsibly disclose this and not open a public issue. We will investigate all legitimate reports.
+If you believe you have found a security vulnerability in Plane, we encourage you to responsibly disclose this and not open a public issue. We will investigate all legitimate reports.
Email squawk@plane.so to disclose any security vulnerabilities.
## â¤ī¸ Contribute
-There are many ways to contribute to Plane, including:
-- Submitting [bugs](https://github.com/makeplane/plane/issues/new?assignees=srinivaspendem%2Cpushya22&labels=%F0%9F%90%9Bbug&projects=&template=--bug-report.yaml&title=%5Bbug%5D%3A+) and [feature requests](https://github.com/makeplane/plane/issues/new?assignees=srinivaspendem%2Cpushya22&labels=%E2%9C%A8feature&projects=&template=--feature-request.yaml&title=%5Bfeature%5D%3A+) for various components.
-- Reviewing [the documentation](https://docs.plane.so/) and submitting [pull requests](https://github.com/makeplane/plane), from fixing typos to adding new features.
-- Speaking or writing about Plane or any other ecosystem integration and [letting us know](https://discord.com/invite/A92xrEGCge)!
-- Upvoting [popular feature requests](https://github.com/makeplane/plane/issues) to show your support.
+There are many ways to contribute to Plane, including:
+
+- Submitting [bugs](https://github.com/makeplane/plane/issues/new?assignees=srinivaspendem%2Cpushya22&labels=%F0%9F%90%9Bbug&projects=&template=--bug-report.yaml&title=%5Bbug%5D%3A+) and [feature requests](https://github.com/makeplane/plane/issues/new?assignees=srinivaspendem%2Cpushya22&labels=%E2%9C%A8feature&projects=&template=--feature-request.yaml&title=%5Bfeature%5D%3A+) for various components.
+- Reviewing [the documentation](https://docs.plane.so/) and submitting [pull requests](https://github.com/makeplane/plane), from fixing typos to adding new features.
+- Speaking or writing about Plane or any other ecosystem integration and [letting us know](https://discord.com/invite/A92xrEGCge)!
+- Upvoting [popular feature requests](https://github.com/makeplane/plane/issues) to show your support.
### We couldn't have done this without you.
-
\ No newline at end of file
+
diff --git a/apiserver/.env.example b/apiserver/.env.example
index 42b0e32e5..d8554f400 100644
--- a/apiserver/.env.example
+++ b/apiserver/.env.example
@@ -14,10 +14,6 @@ POSTGRES_HOST="plane-db"
POSTGRES_DB="plane"
DATABASE_URL=postgresql://${POSTGRES_USER}:${POSTGRES_PASSWORD}@${POSTGRES_HOST}/${POSTGRES_DB}
-# Oauth variables
-GOOGLE_CLIENT_ID=""
-GITHUB_CLIENT_ID=""
-GITHUB_CLIENT_SECRET=""
# Redis Settings
REDIS_HOST="plane-redis"
@@ -34,11 +30,6 @@ AWS_S3_BUCKET_NAME="uploads"
# Maximum file upload limit
FILE_SIZE_LIMIT=5242880
-# GPT settings
-OPENAI_API_BASE="https://api.openai.com/v1" # deprecated
-OPENAI_API_KEY="sk-" # deprecated
-GPT_ENGINE="gpt-3.5-turbo" # deprecated
-
# Settings related to Docker
DOCKERIZED=1 # deprecated
@@ -48,19 +39,8 @@ USE_MINIO=1
# Nginx Configuration
NGINX_PORT=80
-
-# SignUps
-ENABLE_SIGNUP="1"
-
-# Enable Email/Password Signup
-ENABLE_EMAIL_PASSWORD="1"
-
-# Enable Magic link Login
-ENABLE_MAGIC_LINK_LOGIN="0"
-
# Email redirections and minio domain settings
WEB_URL="http://localhost"
# Gunicorn Workers
GUNICORN_WORKERS=2
-
diff --git a/apiserver/Dockerfile.api b/apiserver/Dockerfile.api
index 0e4e0ac50..34a50334a 100644
--- a/apiserver/Dockerfile.api
+++ b/apiserver/Dockerfile.api
@@ -48,8 +48,10 @@ USER root
RUN apk --no-cache add "bash~=5.2"
COPY ./bin ./bin/
+RUN mkdir -p /code/plane/logs
RUN chmod +x ./bin/takeoff ./bin/worker ./bin/beat
RUN chmod -R 777 /code
+RUN chown -R captain:plane /code
USER captain
diff --git a/apiserver/Dockerfile.dev b/apiserver/Dockerfile.dev
index bd6684fd5..06f15231c 100644
--- a/apiserver/Dockerfile.dev
+++ b/apiserver/Dockerfile.dev
@@ -35,6 +35,7 @@ RUN addgroup -S plane && \
COPY . .
+RUN mkdir -p /code/plane/logs
RUN chown -R captain.plane /code
RUN chmod -R +x /code/bin
RUN chmod -R 777 /code
diff --git a/apiserver/back_migration.py b/apiserver/back_migration.py
index a0e45416a..328b9db2b 100644
--- a/apiserver/back_migration.py
+++ b/apiserver/back_migration.py
@@ -182,7 +182,7 @@ def update_label_color():
labels = Label.objects.filter(color="")
updated_labels = []
for label in labels:
- label.color = "#" + "%06x" % random.randint(0, 0xFFFFFF)
+ label.color = f"#{random.randint(0, 0xFFFFFF+1):06X}"
updated_labels.append(label)
Label.objects.bulk_update(updated_labels, ["color"], batch_size=100)
diff --git a/apiserver/bin/takeoff b/apiserver/bin/takeoff
index efea53f87..5a1da1570 100755
--- a/apiserver/bin/takeoff
+++ b/apiserver/bin/takeoff
@@ -21,11 +21,15 @@ SIGNATURE=$(echo "$HOSTNAME$MAC_ADDRESS$CPU_INFO$MEMORY_INFO$DISK_INFO" | sha256
export MACHINE_SIGNATURE=$SIGNATURE
# Register instance
-python manage.py register_instance $MACHINE_SIGNATURE
+python manage.py register_instance "$MACHINE_SIGNATURE"
+
# Load the configuration variable
python manage.py configure_instance
# Create the default bucket
python manage.py create_bucket
-exec gunicorn -w $GUNICORN_WORKERS -k uvicorn.workers.UvicornWorker plane.asgi:application --bind 0.0.0.0:${PORT:-8000} --max-requests 1200 --max-requests-jitter 1000 --access-logfile -
+# Clear Cache before starting to remove stale values
+python manage.py clear_cache
+
+exec gunicorn -w "$GUNICORN_WORKERS" -k uvicorn.workers.UvicornWorker plane.asgi:application --bind 0.0.0.0:"${PORT:-8000}" --max-requests 1200 --max-requests-jitter 1000 --access-logfile -
diff --git a/apiserver/bin/takeoff.local b/apiserver/bin/takeoff.local
index 8f62370ec..3194009b2 100755
--- a/apiserver/bin/takeoff.local
+++ b/apiserver/bin/takeoff.local
@@ -21,12 +21,15 @@ SIGNATURE=$(echo "$HOSTNAME$MAC_ADDRESS$CPU_INFO$MEMORY_INFO$DISK_INFO" | sha256
export MACHINE_SIGNATURE=$SIGNATURE
# Register instance
-python manage.py register_instance $MACHINE_SIGNATURE
+python manage.py register_instance "$MACHINE_SIGNATURE"
# Load the configuration variable
python manage.py configure_instance
# Create the default bucket
python manage.py create_bucket
+# Clear Cache before starting to remove stale values
+python manage.py clear_cache
+
python manage.py runserver 0.0.0.0:8000 --settings=plane.settings.local
diff --git a/apiserver/package.json b/apiserver/package.json
index 060944406..2474aa2f2 100644
--- a/apiserver/package.json
+++ b/apiserver/package.json
@@ -1,4 +1,4 @@
{
"name": "plane-api",
- "version": "0.16.0"
+ "version": "0.17.0"
}
diff --git a/apiserver/plane/api/serializers/issue.py b/apiserver/plane/api/serializers/issue.py
index 4c8d6e815..c78b109ef 100644
--- a/apiserver/plane/api/serializers/issue.py
+++ b/apiserver/plane/api/serializers/issue.py
@@ -1,31 +1,33 @@
-from lxml import html
-
+from django.core.exceptions import ValidationError
+from django.core.validators import URLValidator
# Django imports
from django.utils import timezone
+from lxml import html
# Third party imports
from rest_framework import serializers
# Module imports
from plane.db.models import (
- User,
Issue,
- State,
+ IssueActivity,
IssueAssignee,
- Label,
+ IssueAttachment,
+ IssueComment,
IssueLabel,
IssueLink,
- IssueComment,
- IssueAttachment,
- IssueActivity,
+ Label,
ProjectMember,
+ State,
+ User,
)
+
from .base import BaseSerializer
-from .cycle import CycleSerializer, CycleLiteSerializer
-from .module import ModuleSerializer, ModuleLiteSerializer
-from .user import UserLiteSerializer
+from .cycle import CycleLiteSerializer, CycleSerializer
+from .module import ModuleLiteSerializer, ModuleSerializer
from .state import StateLiteSerializer
+from .user import UserLiteSerializer
class IssueSerializer(BaseSerializer):
@@ -78,7 +80,7 @@ class IssueSerializer(BaseSerializer):
data["description_html"] = parsed_str
except Exception as e:
- raise serializers.ValidationError(f"Invalid HTML: {str(e)}")
+ raise serializers.ValidationError("Invalid HTML passed")
# Validate assignees are from project
if data.get("assignees", []):
@@ -284,6 +286,20 @@ class IssueLinkSerializer(BaseSerializer):
"updated_at",
]
+ def validate_url(self, value):
+ # Check URL format
+ validate_url = URLValidator()
+ try:
+ validate_url(value)
+ except ValidationError:
+ raise serializers.ValidationError("Invalid URL format.")
+
+ # Check URL scheme
+ if not value.startswith(("http://", "https://")):
+ raise serializers.ValidationError("Invalid URL scheme.")
+
+ return value
+
# Validation if url already exists
def create(self, validated_data):
if IssueLink.objects.filter(
@@ -295,6 +311,17 @@ class IssueLinkSerializer(BaseSerializer):
)
return IssueLink.objects.create(**validated_data)
+ def update(self, instance, validated_data):
+ if IssueLink.objects.filter(
+ url=validated_data.get("url"),
+ issue_id=instance.issue_id,
+ ).exists():
+ raise serializers.ValidationError(
+ {"error": "URL already exists for this Issue"}
+ )
+
+ return super().update(instance, validated_data)
+
class IssueAttachmentSerializer(BaseSerializer):
class Meta:
@@ -340,7 +367,7 @@ class IssueCommentSerializer(BaseSerializer):
data["comment_html"] = parsed_str
except Exception as e:
- raise serializers.ValidationError(f"Invalid HTML: {str(e)}")
+ raise serializers.ValidationError("Invalid HTML passed")
return data
diff --git a/apiserver/plane/api/serializers/project.py b/apiserver/plane/api/serializers/project.py
index 342cc1a81..9dd4c9b85 100644
--- a/apiserver/plane/api/serializers/project.py
+++ b/apiserver/plane/api/serializers/project.py
@@ -6,8 +6,6 @@ from plane.db.models import (
Project,
ProjectIdentifier,
WorkspaceMember,
- State,
- Estimate,
)
from .base import BaseSerializer
diff --git a/apiserver/plane/api/urls/cycle.py b/apiserver/plane/api/urls/cycle.py
index 593e501bf..0a775454b 100644
--- a/apiserver/plane/api/urls/cycle.py
+++ b/apiserver/plane/api/urls/cycle.py
@@ -4,6 +4,7 @@ from plane.api.views.cycle import (
CycleAPIEndpoint,
CycleIssueAPIEndpoint,
TransferCycleIssueAPIEndpoint,
+ CycleArchiveUnarchiveAPIEndpoint,
)
urlpatterns = [
@@ -32,4 +33,14 @@ urlpatterns = [
TransferCycleIssueAPIEndpoint.as_view(),
name="transfer-issues",
),
+ path(
+ "workspaces//projects//cycles//archive/",
+ CycleArchiveUnarchiveAPIEndpoint.as_view(),
+ name="cycle-archive-unarchive",
+ ),
+ path(
+ "workspaces//projects//archived-cycles/",
+ CycleArchiveUnarchiveAPIEndpoint.as_view(),
+ name="cycle-archive-unarchive",
+ ),
]
diff --git a/apiserver/plane/api/urls/module.py b/apiserver/plane/api/urls/module.py
index 4309f44e9..a131f4d4f 100644
--- a/apiserver/plane/api/urls/module.py
+++ b/apiserver/plane/api/urls/module.py
@@ -1,6 +1,10 @@
from django.urls import path
-from plane.api.views import ModuleAPIEndpoint, ModuleIssueAPIEndpoint
+from plane.api.views import (
+ ModuleAPIEndpoint,
+ ModuleIssueAPIEndpoint,
+ ModuleArchiveUnarchiveAPIEndpoint,
+)
urlpatterns = [
path(
@@ -23,4 +27,14 @@ urlpatterns = [
ModuleIssueAPIEndpoint.as_view(),
name="module-issues",
),
+ path(
+ "workspaces//projects//modules//archive/",
+ ModuleArchiveUnarchiveAPIEndpoint.as_view(),
+ name="module-archive-unarchive",
+ ),
+ path(
+ "workspaces//projects//archived-modules/",
+ ModuleArchiveUnarchiveAPIEndpoint.as_view(),
+ name="module-archive-unarchive",
+ ),
]
diff --git a/apiserver/plane/api/urls/project.py b/apiserver/plane/api/urls/project.py
index 1ed450c86..490371cca 100644
--- a/apiserver/plane/api/urls/project.py
+++ b/apiserver/plane/api/urls/project.py
@@ -1,6 +1,9 @@
from django.urls import path
-from plane.api.views import ProjectAPIEndpoint
+from plane.api.views import (
+ ProjectAPIEndpoint,
+ ProjectArchiveUnarchiveAPIEndpoint,
+)
urlpatterns = [
path(
@@ -13,4 +16,9 @@ urlpatterns = [
ProjectAPIEndpoint.as_view(),
name="project",
),
+ path(
+ "workspaces//projects//archive/",
+ ProjectArchiveUnarchiveAPIEndpoint.as_view(),
+ name="project-archive-unarchive",
+ ),
]
diff --git a/apiserver/plane/api/views/__init__.py b/apiserver/plane/api/views/__init__.py
index 0da79566f..574ec69b6 100644
--- a/apiserver/plane/api/views/__init__.py
+++ b/apiserver/plane/api/views/__init__.py
@@ -1,4 +1,4 @@
-from .project import ProjectAPIEndpoint
+from .project import ProjectAPIEndpoint, ProjectArchiveUnarchiveAPIEndpoint
from .state import StateAPIEndpoint
@@ -14,8 +14,13 @@ from .cycle import (
CycleAPIEndpoint,
CycleIssueAPIEndpoint,
TransferCycleIssueAPIEndpoint,
+ CycleArchiveUnarchiveAPIEndpoint,
)
-from .module import ModuleAPIEndpoint, ModuleIssueAPIEndpoint
+from .module import (
+ ModuleAPIEndpoint,
+ ModuleIssueAPIEndpoint,
+ ModuleArchiveUnarchiveAPIEndpoint,
+)
from .inbox import InboxIssueAPIEndpoint
diff --git a/apiserver/plane/api/views/base.py b/apiserver/plane/api/views/base.py
index edb89f9b1..0cf5e8731 100644
--- a/apiserver/plane/api/views/base.py
+++ b/apiserver/plane/api/views/base.py
@@ -1,27 +1,26 @@
# Python imports
-import zoneinfo
-import json
from urllib.parse import urlparse
+import zoneinfo
# Django imports
from django.conf import settings
-from django.db import IntegrityError
from django.core.exceptions import ObjectDoesNotExist, ValidationError
+from django.db import IntegrityError
from django.utils import timezone
+from rest_framework import status
+from rest_framework.permissions import IsAuthenticated
+from rest_framework.response import Response
# Third party imports
from rest_framework.views import APIView
-from rest_framework.response import Response
-from rest_framework.permissions import IsAuthenticated
-from rest_framework import status
-from sentry_sdk import capture_exception
# Module imports
from plane.api.middleware.api_authentication import APIKeyAuthentication
from plane.api.rate_limit import ApiKeyRateThrottle
-from plane.utils.paginator import BasePaginator
from plane.bgtasks.webhook_task import send_webhook
+from plane.utils.exception_logger import log_exception
+from plane.utils.paginator import BasePaginator
class TimezoneMixin:
@@ -107,27 +106,23 @@ class BaseAPIView(TimezoneMixin, APIView, BasePaginator):
if isinstance(e, ValidationError):
return Response(
- {
- "error": "The provided payload is not valid please try with a valid payload"
- },
+ {"error": "Please provide valid detail"},
status=status.HTTP_400_BAD_REQUEST,
)
if isinstance(e, ObjectDoesNotExist):
return Response(
- {"error": f"The required object does not exist."},
+ {"error": "The requested resource does not exist."},
status=status.HTTP_404_NOT_FOUND,
)
if isinstance(e, KeyError):
return Response(
- {"error": f" The required key does not exist."},
+ {"error": "The required key does not exist."},
status=status.HTTP_400_BAD_REQUEST,
)
- if settings.DEBUG:
- print(e)
- capture_exception(e)
+ log_exception(e)
return Response(
{"error": "Something went wrong please try again later"},
status=status.HTTP_500_INTERNAL_SERVER_ERROR,
diff --git a/apiserver/plane/api/views/cycle.py b/apiserver/plane/api/views/cycle.py
index 84931f46b..637d713c3 100644
--- a/apiserver/plane/api/views/cycle.py
+++ b/apiserver/plane/api/views/cycle.py
@@ -2,7 +2,7 @@
import json
# Django imports
-from django.db.models import Q, Count, Sum, Prefetch, F, OuterRef, Func
+from django.db.models import Q, Count, Sum, F, OuterRef, Func
from django.utils import timezone
from django.core import serializers
@@ -140,7 +140,9 @@ class CycleAPIEndpoint(WebhookMixin, BaseAPIView):
def get(self, request, slug, project_id, pk=None):
if pk:
- queryset = self.get_queryset().get(pk=pk)
+ queryset = (
+ self.get_queryset().filter(archived_at__isnull=True).get(pk=pk)
+ )
data = CycleSerializer(
queryset,
fields=self.fields,
@@ -150,7 +152,9 @@ class CycleAPIEndpoint(WebhookMixin, BaseAPIView):
data,
status=status.HTTP_200_OK,
)
- queryset = self.get_queryset()
+ queryset = (
+ self.get_queryset().filter(archived_at__isnull=True)
+ )
cycle_view = request.GET.get("cycle_view", "all")
# Current Cycle
@@ -291,6 +295,11 @@ class CycleAPIEndpoint(WebhookMixin, BaseAPIView):
cycle = Cycle.objects.get(
workspace__slug=slug, project_id=project_id, pk=pk
)
+ if cycle.archived_at:
+ return Response(
+ {"error": "Archived cycle cannot be edited"},
+ status=status.HTTP_400_BAD_REQUEST,
+ )
request_data = request.data
@@ -321,7 +330,9 @@ class CycleAPIEndpoint(WebhookMixin, BaseAPIView):
and Cycle.objects.filter(
project_id=project_id,
workspace__slug=slug,
- external_source=request.data.get("external_source", cycle.external_source),
+ external_source=request.data.get(
+ "external_source", cycle.external_source
+ ),
external_id=request.data.get("external_id"),
).exists()
):
@@ -366,6 +377,139 @@ class CycleAPIEndpoint(WebhookMixin, BaseAPIView):
return Response(status=status.HTTP_204_NO_CONTENT)
+class CycleArchiveUnarchiveAPIEndpoint(BaseAPIView):
+
+ permission_classes = [
+ ProjectEntityPermission,
+ ]
+
+ def get_queryset(self):
+ return (
+ Cycle.objects.filter(workspace__slug=self.kwargs.get("slug"))
+ .filter(project_id=self.kwargs.get("project_id"))
+ .filter(
+ project__project_projectmember__member=self.request.user,
+ project__project_projectmember__is_active=True,
+ )
+ .filter(archived_at__isnull=False)
+ .select_related("project")
+ .select_related("workspace")
+ .select_related("owned_by")
+ .annotate(
+ total_issues=Count(
+ "issue_cycle",
+ filter=Q(
+ issue_cycle__issue__archived_at__isnull=True,
+ issue_cycle__issue__is_draft=False,
+ ),
+ )
+ )
+ .annotate(
+ completed_issues=Count(
+ "issue_cycle__issue__state__group",
+ filter=Q(
+ issue_cycle__issue__state__group="completed",
+ issue_cycle__issue__archived_at__isnull=True,
+ issue_cycle__issue__is_draft=False,
+ ),
+ )
+ )
+ .annotate(
+ cancelled_issues=Count(
+ "issue_cycle__issue__state__group",
+ filter=Q(
+ issue_cycle__issue__state__group="cancelled",
+ issue_cycle__issue__archived_at__isnull=True,
+ issue_cycle__issue__is_draft=False,
+ ),
+ )
+ )
+ .annotate(
+ started_issues=Count(
+ "issue_cycle__issue__state__group",
+ filter=Q(
+ issue_cycle__issue__state__group="started",
+ issue_cycle__issue__archived_at__isnull=True,
+ issue_cycle__issue__is_draft=False,
+ ),
+ )
+ )
+ .annotate(
+ unstarted_issues=Count(
+ "issue_cycle__issue__state__group",
+ filter=Q(
+ issue_cycle__issue__state__group="unstarted",
+ issue_cycle__issue__archived_at__isnull=True,
+ issue_cycle__issue__is_draft=False,
+ ),
+ )
+ )
+ .annotate(
+ backlog_issues=Count(
+ "issue_cycle__issue__state__group",
+ filter=Q(
+ issue_cycle__issue__state__group="backlog",
+ issue_cycle__issue__archived_at__isnull=True,
+ issue_cycle__issue__is_draft=False,
+ ),
+ )
+ )
+ .annotate(
+ total_estimates=Sum("issue_cycle__issue__estimate_point")
+ )
+ .annotate(
+ completed_estimates=Sum(
+ "issue_cycle__issue__estimate_point",
+ filter=Q(
+ issue_cycle__issue__state__group="completed",
+ issue_cycle__issue__archived_at__isnull=True,
+ issue_cycle__issue__is_draft=False,
+ ),
+ )
+ )
+ .annotate(
+ started_estimates=Sum(
+ "issue_cycle__issue__estimate_point",
+ filter=Q(
+ issue_cycle__issue__state__group="started",
+ issue_cycle__issue__archived_at__isnull=True,
+ issue_cycle__issue__is_draft=False,
+ ),
+ )
+ )
+ .order_by(self.kwargs.get("order_by", "-created_at"))
+ .distinct()
+ )
+
+ def get(self, request, slug, project_id):
+ return self.paginate(
+ request=request,
+ queryset=(self.get_queryset()),
+ on_results=lambda cycles: CycleSerializer(
+ cycles,
+ many=True,
+ fields=self.fields,
+ expand=self.expand,
+ ).data,
+ )
+
+ def post(self, request, slug, project_id, pk):
+ cycle = Cycle.objects.get(
+ pk=pk, project_id=project_id, workspace__slug=slug
+ )
+ cycle.archived_at = timezone.now()
+ cycle.save()
+ return Response(status=status.HTTP_204_NO_CONTENT)
+
+ def delete(self, request, slug, project_id, pk):
+ cycle = Cycle.objects.get(
+ pk=pk, project_id=project_id, workspace__slug=slug
+ )
+ cycle.archived_at = None
+ cycle.save()
+ return Response(status=status.HTTP_204_NO_CONTENT)
+
+
class CycleIssueAPIEndpoint(WebhookMixin, BaseAPIView):
"""
This viewset automatically provides `list`, `create`,
diff --git a/apiserver/plane/api/views/inbox.py b/apiserver/plane/api/views/inbox.py
index c1079345a..fb36ea2a9 100644
--- a/apiserver/plane/api/views/inbox.py
+++ b/apiserver/plane/api/views/inbox.py
@@ -119,7 +119,7 @@ class InboxIssueAPIEndpoint(BaseAPIView):
)
# Check for valid priority
- if not request.data.get("issue", {}).get("priority", "none") in [
+ if request.data.get("issue", {}).get("priority", "none") not in [
"low",
"medium",
"high",
diff --git a/apiserver/plane/api/views/issue.py b/apiserver/plane/api/views/issue.py
index bf3313779..4b59dc020 100644
--- a/apiserver/plane/api/views/issue.py
+++ b/apiserver/plane/api/views/issue.py
@@ -1,22 +1,22 @@
# Python imports
import json
-from itertools import chain
+
+from django.core.serializers.json import DjangoJSONEncoder
# Django imports
from django.db import IntegrityError
from django.db.models import (
- OuterRef,
- Func,
- Q,
- F,
Case,
- When,
- Value,
CharField,
- Max,
Exists,
+ F,
+ Func,
+ Max,
+ OuterRef,
+ Q,
+ Value,
+ When,
)
-from django.core.serializers.json import DjangoJSONEncoder
from django.utils import timezone
# Third party imports
@@ -24,30 +24,31 @@ from rest_framework import status
from rest_framework.response import Response
# Module imports
-from .base import BaseAPIView, WebhookMixin
-from plane.app.permissions import (
- ProjectEntityPermission,
- ProjectMemberPermission,
- ProjectLitePermission,
-)
-from plane.db.models import (
- Issue,
- IssueAttachment,
- IssueLink,
- Project,
- Label,
- ProjectMember,
- IssueComment,
- IssueActivity,
-)
-from plane.bgtasks.issue_activites_task import issue_activity
from plane.api.serializers import (
+ IssueActivitySerializer,
+ IssueCommentSerializer,
+ IssueLinkSerializer,
IssueSerializer,
LabelSerializer,
- IssueLinkSerializer,
- IssueCommentSerializer,
- IssueActivitySerializer,
)
+from plane.app.permissions import (
+ ProjectEntityPermission,
+ ProjectLitePermission,
+ ProjectMemberPermission,
+)
+from plane.bgtasks.issue_activites_task import issue_activity
+from plane.db.models import (
+ Issue,
+ IssueActivity,
+ IssueAttachment,
+ IssueComment,
+ IssueLink,
+ Label,
+ Project,
+ ProjectMember,
+)
+
+from .base import BaseAPIView, WebhookMixin
class IssueAPIEndpoint(WebhookMixin, BaseAPIView):
@@ -356,6 +357,7 @@ class LabelAPIEndpoint(BaseAPIView):
project__project_projectmember__member=self.request.user,
project__project_projectmember__is_active=True,
)
+ .filter(project__archived_at__isnull=True)
.select_related("project")
.select_related("workspace")
.select_related("parent")
@@ -488,6 +490,7 @@ class IssueLinkAPIEndpoint(BaseAPIView):
project__project_projectmember__member=self.request.user,
project__project_projectmember__is_active=True,
)
+ .filter(project__archived_at__isnull=True)
.order_by(self.kwargs.get("order_by", "-created_at"))
.distinct()
)
@@ -617,6 +620,7 @@ class IssueCommentAPIEndpoint(WebhookMixin, BaseAPIView):
project__project_projectmember__member=self.request.user,
project__project_projectmember__is_active=True,
)
+ .filter(project__archived_at__isnull=True)
.select_related("workspace", "project", "issue", "actor")
.annotate(
is_member=Exists(
@@ -653,7 +657,6 @@ class IssueCommentAPIEndpoint(WebhookMixin, BaseAPIView):
)
def post(self, request, slug, project_id, issue_id):
-
# Validation check if the issue already exists
if (
request.data.get("external_id")
@@ -679,7 +682,6 @@ class IssueCommentAPIEndpoint(WebhookMixin, BaseAPIView):
status=status.HTTP_409_CONFLICT,
)
-
serializer = IssueCommentSerializer(data=request.data)
if serializer.is_valid():
serializer.save(
@@ -717,7 +719,10 @@ class IssueCommentAPIEndpoint(WebhookMixin, BaseAPIView):
# Validation check if the issue already exists
if (
request.data.get("external_id")
- and (issue_comment.external_id != str(request.data.get("external_id")))
+ and (
+ issue_comment.external_id
+ != str(request.data.get("external_id"))
+ )
and IssueComment.objects.filter(
project_id=project_id,
workspace__slug=slug,
@@ -735,7 +740,6 @@ class IssueCommentAPIEndpoint(WebhookMixin, BaseAPIView):
status=status.HTTP_409_CONFLICT,
)
-
serializer = IssueCommentSerializer(
issue_comment, data=request.data, partial=True
)
@@ -792,6 +796,7 @@ class IssueActivityAPIEndpoint(BaseAPIView):
project__project_projectmember__member=self.request.user,
project__project_projectmember__is_active=True,
)
+ .filter(project__archived_at__isnull=True)
.select_related("actor", "workspace", "issue", "project")
).order_by(request.GET.get("order_by", "created_at"))
diff --git a/apiserver/plane/api/views/module.py b/apiserver/plane/api/views/module.py
index 2e5bb85e2..643221dca 100644
--- a/apiserver/plane/api/views/module.py
+++ b/apiserver/plane/api/views/module.py
@@ -67,6 +67,7 @@ class ModuleAPIEndpoint(WebhookMixin, BaseAPIView):
issue_module__issue__archived_at__isnull=True,
issue_module__issue__is_draft=False,
),
+ distinct=True,
),
)
.annotate(
@@ -77,6 +78,7 @@ class ModuleAPIEndpoint(WebhookMixin, BaseAPIView):
issue_module__issue__archived_at__isnull=True,
issue_module__issue__is_draft=False,
),
+ distinct=True,
)
)
.annotate(
@@ -87,6 +89,7 @@ class ModuleAPIEndpoint(WebhookMixin, BaseAPIView):
issue_module__issue__archived_at__isnull=True,
issue_module__issue__is_draft=False,
),
+ distinct=True,
)
)
.annotate(
@@ -97,6 +100,7 @@ class ModuleAPIEndpoint(WebhookMixin, BaseAPIView):
issue_module__issue__archived_at__isnull=True,
issue_module__issue__is_draft=False,
),
+ distinct=True,
)
)
.annotate(
@@ -107,6 +111,7 @@ class ModuleAPIEndpoint(WebhookMixin, BaseAPIView):
issue_module__issue__archived_at__isnull=True,
issue_module__issue__is_draft=False,
),
+ distinct=True,
)
)
.annotate(
@@ -117,6 +122,7 @@ class ModuleAPIEndpoint(WebhookMixin, BaseAPIView):
issue_module__issue__archived_at__isnull=True,
issue_module__issue__is_draft=False,
),
+ distinct=True,
)
)
.order_by(self.kwargs.get("order_by", "-created_at"))
@@ -165,6 +171,11 @@ class ModuleAPIEndpoint(WebhookMixin, BaseAPIView):
module = Module.objects.get(
pk=pk, project_id=project_id, workspace__slug=slug
)
+ if module.archived_at:
+ return Response(
+ {"error": "Archived module cannot be edited"},
+ status=status.HTTP_400_BAD_REQUEST,
+ )
serializer = ModuleSerializer(
module,
data=request.data,
@@ -178,7 +189,9 @@ class ModuleAPIEndpoint(WebhookMixin, BaseAPIView):
and Module.objects.filter(
project_id=project_id,
workspace__slug=slug,
- external_source=request.data.get("external_source", module.external_source),
+ external_source=request.data.get(
+ "external_source", module.external_source
+ ),
external_id=request.data.get("external_id"),
).exists()
):
@@ -195,7 +208,9 @@ class ModuleAPIEndpoint(WebhookMixin, BaseAPIView):
def get(self, request, slug, project_id, pk=None):
if pk:
- queryset = self.get_queryset().get(pk=pk)
+ queryset = (
+ self.get_queryset().filter(archived_at__isnull=True).get(pk=pk)
+ )
data = ModuleSerializer(
queryset,
fields=self.fields,
@@ -207,7 +222,7 @@ class ModuleAPIEndpoint(WebhookMixin, BaseAPIView):
)
return self.paginate(
request=request,
- queryset=(self.get_queryset()),
+ queryset=(self.get_queryset().filter(archived_at__isnull=True)),
on_results=lambda modules: ModuleSerializer(
modules,
many=True,
@@ -277,6 +292,7 @@ class ModuleIssueAPIEndpoint(WebhookMixin, BaseAPIView):
project__project_projectmember__member=self.request.user,
project__project_projectmember__is_active=True,
)
+ .filter(project__archived_at__isnull=True)
.select_related("project")
.select_related("workspace")
.select_related("module")
@@ -444,3 +460,123 @@ class ModuleIssueAPIEndpoint(WebhookMixin, BaseAPIView):
epoch=int(timezone.now().timestamp()),
)
return Response(status=status.HTTP_204_NO_CONTENT)
+
+
+class ModuleArchiveUnarchiveAPIEndpoint(BaseAPIView):
+
+ permission_classes = [
+ ProjectEntityPermission,
+ ]
+
+ def get_queryset(self):
+ return (
+ Module.objects.filter(project_id=self.kwargs.get("project_id"))
+ .filter(workspace__slug=self.kwargs.get("slug"))
+ .filter(archived_at__isnull=False)
+ .select_related("project")
+ .select_related("workspace")
+ .select_related("lead")
+ .prefetch_related("members")
+ .prefetch_related(
+ Prefetch(
+ "link_module",
+ queryset=ModuleLink.objects.select_related(
+ "module", "created_by"
+ ),
+ )
+ )
+ .annotate(
+ total_issues=Count(
+ "issue_module",
+ filter=Q(
+ issue_module__issue__archived_at__isnull=True,
+ issue_module__issue__is_draft=False,
+ ),
+ distinct=True,
+ ),
+ )
+ .annotate(
+ completed_issues=Count(
+ "issue_module__issue__state__group",
+ filter=Q(
+ issue_module__issue__state__group="completed",
+ issue_module__issue__archived_at__isnull=True,
+ issue_module__issue__is_draft=False,
+ ),
+ distinct=True,
+ )
+ )
+ .annotate(
+ cancelled_issues=Count(
+ "issue_module__issue__state__group",
+ filter=Q(
+ issue_module__issue__state__group="cancelled",
+ issue_module__issue__archived_at__isnull=True,
+ issue_module__issue__is_draft=False,
+ ),
+ distinct=True,
+ )
+ )
+ .annotate(
+ started_issues=Count(
+ "issue_module__issue__state__group",
+ filter=Q(
+ issue_module__issue__state__group="started",
+ issue_module__issue__archived_at__isnull=True,
+ issue_module__issue__is_draft=False,
+ ),
+ distinct=True,
+ )
+ )
+ .annotate(
+ unstarted_issues=Count(
+ "issue_module__issue__state__group",
+ filter=Q(
+ issue_module__issue__state__group="unstarted",
+ issue_module__issue__archived_at__isnull=True,
+ issue_module__issue__is_draft=False,
+ ),
+ distinct=True,
+ )
+ )
+ .annotate(
+ backlog_issues=Count(
+ "issue_module__issue__state__group",
+ filter=Q(
+ issue_module__issue__state__group="backlog",
+ issue_module__issue__archived_at__isnull=True,
+ issue_module__issue__is_draft=False,
+ ),
+ distinct=True,
+ )
+ )
+ .order_by(self.kwargs.get("order_by", "-created_at"))
+ )
+
+ def get(self, request, slug, project_id):
+ return self.paginate(
+ request=request,
+ queryset=(self.get_queryset()),
+ on_results=lambda modules: ModuleSerializer(
+ modules,
+ many=True,
+ fields=self.fields,
+ expand=self.expand,
+ ).data,
+ )
+
+ def post(self, request, slug, project_id, pk):
+ module = Module.objects.get(
+ pk=pk, project_id=project_id, workspace__slug=slug
+ )
+ module.archived_at = timezone.now()
+ module.save()
+ return Response(status=status.HTTP_204_NO_CONTENT)
+
+ def delete(self, request, slug, project_id, pk):
+ module = Module.objects.get(
+ pk=pk, project_id=project_id, workspace__slug=slug
+ )
+ module.archived_at = None
+ module.save()
+ return Response(status=status.HTTP_204_NO_CONTENT)
diff --git a/apiserver/plane/api/views/project.py b/apiserver/plane/api/views/project.py
index cb1f7dc7b..e0bce5514 100644
--- a/apiserver/plane/api/views/project.py
+++ b/apiserver/plane/api/views/project.py
@@ -1,4 +1,5 @@
# Django imports
+from django.utils import timezone
from django.db import IntegrityError
from django.db.models import Exists, OuterRef, Q, F, Func, Subquery, Prefetch
@@ -11,7 +12,6 @@ from rest_framework.serializers import ValidationError
from plane.db.models import (
Workspace,
Project,
- ProjectFavorite,
ProjectMember,
ProjectDeployBoard,
State,
@@ -40,7 +40,10 @@ class ProjectAPIEndpoint(WebhookMixin, BaseAPIView):
return (
Project.objects.filter(workspace__slug=self.kwargs.get("slug"))
.filter(
- Q(project_projectmember__member=self.request.user)
+ Q(
+ project_projectmember__member=self.request.user,
+ project_projectmember__is_active=True,
+ )
| Q(network=2)
)
.select_related(
@@ -150,7 +153,7 @@ class ProjectAPIEndpoint(WebhookMixin, BaseAPIView):
serializer.save()
# Add the user as Administrator to the project
- project_member = ProjectMember.objects.create(
+ _ = ProjectMember.objects.create(
project_id=serializer.data["id"],
member=request.user,
role=20,
@@ -245,12 +248,12 @@ class ProjectAPIEndpoint(WebhookMixin, BaseAPIView):
{"name": "The project name is already taken"},
status=status.HTTP_410_GONE,
)
- except Workspace.DoesNotExist as e:
+ except Workspace.DoesNotExist:
return Response(
{"error": "Workspace does not exist"},
status=status.HTTP_404_NOT_FOUND,
)
- except ValidationError as e:
+ except ValidationError:
return Response(
{"identifier": "The project identifier is already taken"},
status=status.HTTP_410_GONE,
@@ -261,6 +264,12 @@ class ProjectAPIEndpoint(WebhookMixin, BaseAPIView):
workspace = Workspace.objects.get(slug=slug)
project = Project.objects.get(pk=project_id)
+ if project.archived_at:
+ return Response(
+ {"error": "Archived project cannot be updated"},
+ status=status.HTTP_400_BAD_REQUEST,
+ )
+
serializer = ProjectSerializer(
project,
data={**request.data},
@@ -307,7 +316,7 @@ class ProjectAPIEndpoint(WebhookMixin, BaseAPIView):
{"error": "Project does not exist"},
status=status.HTTP_404_NOT_FOUND,
)
- except ValidationError as e:
+ except ValidationError:
return Response(
{"identifier": "The project identifier is already taken"},
status=status.HTTP_410_GONE,
@@ -317,3 +326,22 @@ class ProjectAPIEndpoint(WebhookMixin, BaseAPIView):
project = Project.objects.get(pk=project_id, workspace__slug=slug)
project.delete()
return Response(status=status.HTTP_204_NO_CONTENT)
+
+
+class ProjectArchiveUnarchiveAPIEndpoint(BaseAPIView):
+
+ permission_classes = [
+ ProjectBasePermission,
+ ]
+
+ def post(self, request, slug, project_id):
+ project = Project.objects.get(pk=project_id, workspace__slug=slug)
+ project.archived_at = timezone.now()
+ project.save()
+ return Response(status=status.HTTP_204_NO_CONTENT)
+
+ def delete(self, request, slug, project_id):
+ project = Project.objects.get(pk=project_id, workspace__slug=slug)
+ project.archived_at = None
+ project.save()
+ return Response(status=status.HTTP_204_NO_CONTENT)
diff --git a/apiserver/plane/api/views/state.py b/apiserver/plane/api/views/state.py
index ec10f9bab..4ee899831 100644
--- a/apiserver/plane/api/views/state.py
+++ b/apiserver/plane/api/views/state.py
@@ -28,6 +28,7 @@ class StateAPIEndpoint(BaseAPIView):
project__project_projectmember__member=self.request.user,
project__project_projectmember__is_active=True,
)
+ .filter(project__archived_at__isnull=True)
.filter(~Q(name="Triage"))
.select_related("project")
.select_related("workspace")
@@ -66,8 +67,10 @@ class StateAPIEndpoint(BaseAPIView):
serializer.save(project_id=project_id)
return Response(serializer.data, status=status.HTTP_200_OK)
- return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
- except IntegrityError as e:
+ return Response(
+ serializer.errors, status=status.HTTP_400_BAD_REQUEST
+ )
+ except IntegrityError:
state = State.objects.filter(
workspace__slug=slug,
project_id=project_id,
@@ -136,7 +139,9 @@ class StateAPIEndpoint(BaseAPIView):
and State.objects.filter(
project_id=project_id,
workspace__slug=slug,
- external_source=request.data.get("external_source", state.external_source),
+ external_source=request.data.get(
+ "external_source", state.external_source
+ ),
external_id=request.data.get("external_id"),
).exists()
):
diff --git a/apiserver/plane/app/serializers/__init__.py b/apiserver/plane/app/serializers/__init__.py
index 9bdd4baaf..22673dabc 100644
--- a/apiserver/plane/app/serializers/__init__.py
+++ b/apiserver/plane/app/serializers/__init__.py
@@ -86,16 +86,6 @@ from .module import (
from .api import APITokenSerializer, APITokenReadSerializer
-from .integration import (
- IntegrationSerializer,
- WorkspaceIntegrationSerializer,
- GithubIssueSyncSerializer,
- GithubRepositorySerializer,
- GithubRepositorySyncSerializer,
- GithubCommentSyncSerializer,
- SlackProjectSyncSerializer,
-)
-
from .importer import ImporterSerializer
from .page import (
@@ -121,7 +111,10 @@ from .inbox import (
from .analytic import AnalyticViewSerializer
-from .notification import NotificationSerializer, UserNotificationPreferenceSerializer
+from .notification import (
+ NotificationSerializer,
+ UserNotificationPreferenceSerializer,
+)
from .exporter import ExporterHistorySerializer
diff --git a/apiserver/plane/app/serializers/cycle.py b/apiserver/plane/app/serializers/cycle.py
index a273b349c..13d321780 100644
--- a/apiserver/plane/app/serializers/cycle.py
+++ b/apiserver/plane/app/serializers/cycle.py
@@ -11,6 +11,7 @@ from plane.db.models import (
CycleUserProperties,
)
+
class CycleWriteSerializer(BaseSerializer):
def validate(self, data):
if (
@@ -30,6 +31,7 @@ class CycleWriteSerializer(BaseSerializer):
"workspace",
"project",
"owned_by",
+ "archived_at",
]
@@ -47,7 +49,6 @@ class CycleSerializer(BaseSerializer):
# active | draft | upcoming | completed
status = serializers.CharField(read_only=True)
-
class Meta:
model = Cycle
fields = [
diff --git a/apiserver/plane/app/serializers/dashboard.py b/apiserver/plane/app/serializers/dashboard.py
index 8fca3c906..b0ed8841b 100644
--- a/apiserver/plane/app/serializers/dashboard.py
+++ b/apiserver/plane/app/serializers/dashboard.py
@@ -18,9 +18,4 @@ class WidgetSerializer(BaseSerializer):
class Meta:
model = Widget
- fields = [
- "id",
- "key",
- "is_visible",
- "widget_filters"
- ]
\ No newline at end of file
+ fields = ["id", "key", "is_visible", "widget_filters"]
diff --git a/apiserver/plane/app/serializers/estimate.py b/apiserver/plane/app/serializers/estimate.py
index 675390080..d28f38c75 100644
--- a/apiserver/plane/app/serializers/estimate.py
+++ b/apiserver/plane/app/serializers/estimate.py
@@ -74,5 +74,3 @@ class WorkspaceEstimateSerializer(BaseSerializer):
"name",
"description",
]
-
-
diff --git a/apiserver/plane/app/serializers/integration/__init__.py b/apiserver/plane/app/serializers/integration/__init__.py
deleted file mode 100644
index 112ff02d1..000000000
--- a/apiserver/plane/app/serializers/integration/__init__.py
+++ /dev/null
@@ -1,8 +0,0 @@
-from .base import IntegrationSerializer, WorkspaceIntegrationSerializer
-from .github import (
- GithubRepositorySerializer,
- GithubRepositorySyncSerializer,
- GithubIssueSyncSerializer,
- GithubCommentSyncSerializer,
-)
-from .slack import SlackProjectSyncSerializer
diff --git a/apiserver/plane/app/serializers/integration/base.py b/apiserver/plane/app/serializers/integration/base.py
deleted file mode 100644
index 01e484ed0..000000000
--- a/apiserver/plane/app/serializers/integration/base.py
+++ /dev/null
@@ -1,22 +0,0 @@
-# Module imports
-from plane.app.serializers import BaseSerializer
-from plane.db.models import Integration, WorkspaceIntegration
-
-
-class IntegrationSerializer(BaseSerializer):
- class Meta:
- model = Integration
- fields = "__all__"
- read_only_fields = [
- "verified",
- ]
-
-
-class WorkspaceIntegrationSerializer(BaseSerializer):
- integration_detail = IntegrationSerializer(
- read_only=True, source="integration"
- )
-
- class Meta:
- model = WorkspaceIntegration
- fields = "__all__"
diff --git a/apiserver/plane/app/serializers/integration/github.py b/apiserver/plane/app/serializers/integration/github.py
deleted file mode 100644
index 850bccf1b..000000000
--- a/apiserver/plane/app/serializers/integration/github.py
+++ /dev/null
@@ -1,45 +0,0 @@
-# Module imports
-from plane.app.serializers import BaseSerializer
-from plane.db.models import (
- GithubIssueSync,
- GithubRepository,
- GithubRepositorySync,
- GithubCommentSync,
-)
-
-
-class GithubRepositorySerializer(BaseSerializer):
- class Meta:
- model = GithubRepository
- fields = "__all__"
-
-
-class GithubRepositorySyncSerializer(BaseSerializer):
- repo_detail = GithubRepositorySerializer(source="repository")
-
- class Meta:
- model = GithubRepositorySync
- fields = "__all__"
-
-
-class GithubIssueSyncSerializer(BaseSerializer):
- class Meta:
- model = GithubIssueSync
- fields = "__all__"
- read_only_fields = [
- "project",
- "workspace",
- "repository_sync",
- ]
-
-
-class GithubCommentSyncSerializer(BaseSerializer):
- class Meta:
- model = GithubCommentSync
- fields = "__all__"
- read_only_fields = [
- "project",
- "workspace",
- "repository_sync",
- "issue_sync",
- ]
diff --git a/apiserver/plane/app/serializers/integration/slack.py b/apiserver/plane/app/serializers/integration/slack.py
deleted file mode 100644
index 9c461c5b9..000000000
--- a/apiserver/plane/app/serializers/integration/slack.py
+++ /dev/null
@@ -1,14 +0,0 @@
-# Module imports
-from plane.app.serializers import BaseSerializer
-from plane.db.models import SlackProjectSync
-
-
-class SlackProjectSyncSerializer(BaseSerializer):
- class Meta:
- model = SlackProjectSync
- fields = "__all__"
- read_only_fields = [
- "project",
- "workspace",
- "workspace_integration",
- ]
diff --git a/apiserver/plane/app/serializers/issue.py b/apiserver/plane/app/serializers/issue.py
index 411c5b73f..fc0e6f838 100644
--- a/apiserver/plane/app/serializers/issue.py
+++ b/apiserver/plane/app/serializers/issue.py
@@ -1,5 +1,7 @@
# Django imports
from django.utils import timezone
+from django.core.validators import URLValidator
+from django.core.exceptions import ValidationError
# Third Party imports
from rest_framework import serializers
@@ -7,7 +9,7 @@ from rest_framework import serializers
# Module imports
from .base import BaseSerializer, DynamicBaseSerializer
from .user import UserLiteSerializer
-from .state import StateSerializer, StateLiteSerializer
+from .state import StateLiteSerializer
from .project import ProjectLiteSerializer
from .workspace import WorkspaceLiteSerializer
from plane.db.models import (
@@ -31,7 +33,6 @@ from plane.db.models import (
IssueVote,
IssueRelation,
State,
- Project,
)
@@ -432,6 +433,20 @@ class IssueLinkSerializer(BaseSerializer):
"issue",
]
+ def validate_url(self, value):
+ # Check URL format
+ validate_url = URLValidator()
+ try:
+ validate_url(value)
+ except ValidationError:
+ raise serializers.ValidationError("Invalid URL format.")
+
+ # Check URL scheme
+ if not value.startswith(('http://', 'https://')):
+ raise serializers.ValidationError("Invalid URL scheme.")
+
+ return value
+
# Validation if url already exists
def create(self, validated_data):
if IssueLink.objects.filter(
@@ -443,9 +458,19 @@ class IssueLinkSerializer(BaseSerializer):
)
return IssueLink.objects.create(**validated_data)
+ def update(self, instance, validated_data):
+ if IssueLink.objects.filter(
+ url=validated_data.get("url"),
+ issue_id=instance.issue_id,
+ ).exists():
+ raise serializers.ValidationError(
+ {"error": "URL already exists for this Issue"}
+ )
+
+ return super().update(instance, validated_data)
+
class IssueLinkLiteSerializer(BaseSerializer):
-
class Meta:
model = IssueLink
fields = [
@@ -476,7 +501,6 @@ class IssueAttachmentSerializer(BaseSerializer):
class IssueAttachmentLiteSerializer(DynamicBaseSerializer):
-
class Meta:
model = IssueAttachment
fields = [
@@ -505,13 +529,12 @@ class IssueReactionSerializer(BaseSerializer):
class IssueReactionLiteSerializer(DynamicBaseSerializer):
-
class Meta:
model = IssueReaction
fields = [
"id",
- "actor_id",
- "issue_id",
+ "actor",
+ "issue",
"reaction",
]
@@ -601,15 +624,18 @@ class IssueSerializer(DynamicBaseSerializer):
# ids
cycle_id = serializers.PrimaryKeyRelatedField(read_only=True)
module_ids = serializers.ListField(
- child=serializers.UUIDField(), required=False,
+ child=serializers.UUIDField(),
+ required=False,
)
# Many to many
label_ids = serializers.ListField(
- child=serializers.UUIDField(), required=False,
+ child=serializers.UUIDField(),
+ required=False,
)
assignee_ids = serializers.ListField(
- child=serializers.UUIDField(), required=False,
+ child=serializers.UUIDField(),
+ required=False,
)
# Count items
@@ -649,19 +675,7 @@ class IssueSerializer(DynamicBaseSerializer):
read_only_fields = fields
-class IssueDetailSerializer(IssueSerializer):
- description_html = serializers.CharField()
- is_subscribed = serializers.BooleanField(read_only=True)
-
- class Meta(IssueSerializer.Meta):
- fields = IssueSerializer.Meta.fields + [
- "description_html",
- "is_subscribed",
- ]
-
-
class IssueLiteSerializer(DynamicBaseSerializer):
-
class Meta:
model = Issue
fields = [
diff --git a/apiserver/plane/app/serializers/module.py b/apiserver/plane/app/serializers/module.py
index 4aabfc50e..dfdd265cd 100644
--- a/apiserver/plane/app/serializers/module.py
+++ b/apiserver/plane/app/serializers/module.py
@@ -3,7 +3,6 @@ from rest_framework import serializers
# Module imports
from .base import BaseSerializer, DynamicBaseSerializer
-from .user import UserLiteSerializer
from .project import ProjectLiteSerializer
from plane.db.models import (
@@ -40,6 +39,7 @@ class ModuleWriteSerializer(BaseSerializer):
"updated_by",
"created_at",
"updated_at",
+ "archived_at",
]
def to_representation(self, instance):
@@ -142,7 +142,6 @@ class ModuleIssueSerializer(BaseSerializer):
class ModuleLinkSerializer(BaseSerializer):
-
class Meta:
model = ModuleLink
fields = "__all__"
@@ -215,13 +214,12 @@ class ModuleSerializer(DynamicBaseSerializer):
read_only_fields = fields
-
class ModuleDetailSerializer(ModuleSerializer):
-
link_module = ModuleLinkSerializer(read_only=True, many=True)
+ sub_issues = serializers.IntegerField(read_only=True)
class Meta(ModuleSerializer.Meta):
- fields = ModuleSerializer.Meta.fields + ['link_module']
+ fields = ModuleSerializer.Meta.fields + ["link_module", "sub_issues"]
class ModuleFavoriteSerializer(BaseSerializer):
diff --git a/apiserver/plane/app/serializers/notification.py b/apiserver/plane/app/serializers/notification.py
index 2152fcf0f..c6713a354 100644
--- a/apiserver/plane/app/serializers/notification.py
+++ b/apiserver/plane/app/serializers/notification.py
@@ -15,7 +15,6 @@ class NotificationSerializer(BaseSerializer):
class UserNotificationPreferenceSerializer(BaseSerializer):
-
class Meta:
model = UserNotificationPreference
fields = "__all__"
diff --git a/apiserver/plane/app/serializers/page.py b/apiserver/plane/app/serializers/page.py
index a0f5986d6..4dfe6ea9d 100644
--- a/apiserver/plane/app/serializers/page.py
+++ b/apiserver/plane/app/serializers/page.py
@@ -3,7 +3,7 @@ from rest_framework import serializers
# Module imports
from .base import BaseSerializer
-from .issue import IssueFlatSerializer, LabelLiteSerializer
+from .issue import LabelLiteSerializer
from .workspace import WorkspaceLiteSerializer
from .project import ProjectLiteSerializer
from plane.db.models import (
@@ -12,8 +12,6 @@ from plane.db.models import (
PageFavorite,
PageLabel,
Label,
- Issue,
- Module,
)
diff --git a/apiserver/plane/app/serializers/project.py b/apiserver/plane/app/serializers/project.py
index 999233442..a0c2318e3 100644
--- a/apiserver/plane/app/serializers/project.py
+++ b/apiserver/plane/app/serializers/project.py
@@ -95,14 +95,19 @@ class ProjectLiteSerializer(BaseSerializer):
"identifier",
"name",
"cover_image",
- "icon_prop",
- "emoji",
+ "logo_props",
"description",
]
read_only_fields = fields
class ProjectListSerializer(DynamicBaseSerializer):
+ total_issues = serializers.IntegerField(read_only=True)
+ archived_issues = serializers.IntegerField(read_only=True)
+ archived_sub_issues = serializers.IntegerField(read_only=True)
+ draft_issues = serializers.IntegerField(read_only=True)
+ draft_sub_issues = serializers.IntegerField(read_only=True)
+ sub_issues = serializers.IntegerField(read_only=True)
is_favorite = serializers.BooleanField(read_only=True)
total_members = serializers.IntegerField(read_only=True)
total_cycles = serializers.IntegerField(read_only=True)
diff --git a/apiserver/plane/app/serializers/user.py b/apiserver/plane/app/serializers/user.py
index 8cd48827e..d6c15ee7f 100644
--- a/apiserver/plane/app/serializers/user.py
+++ b/apiserver/plane/app/serializers/user.py
@@ -4,7 +4,6 @@ from rest_framework import serializers
# Module import
from .base import BaseSerializer
from plane.db.models import User, Workspace, WorkspaceMemberInvite
-from plane.license.models import InstanceAdmin, Instance
class UserSerializer(BaseSerializer):
@@ -99,13 +98,13 @@ class UserMeSettingsSerializer(BaseSerializer):
).first()
return {
"last_workspace_id": obj.last_workspace_id,
- "last_workspace_slug": workspace.slug
- if workspace is not None
- else "",
+ "last_workspace_slug": (
+ workspace.slug if workspace is not None else ""
+ ),
"fallback_workspace_id": obj.last_workspace_id,
- "fallback_workspace_slug": workspace.slug
- if workspace is not None
- else "",
+ "fallback_workspace_slug": (
+ workspace.slug if workspace is not None else ""
+ ),
"invites": workspace_invites,
}
else:
@@ -120,12 +119,16 @@ class UserMeSettingsSerializer(BaseSerializer):
return {
"last_workspace_id": None,
"last_workspace_slug": None,
- "fallback_workspace_id": fallback_workspace.id
- if fallback_workspace is not None
- else None,
- "fallback_workspace_slug": fallback_workspace.slug
- if fallback_workspace is not None
- else None,
+ "fallback_workspace_id": (
+ fallback_workspace.id
+ if fallback_workspace is not None
+ else None
+ ),
+ "fallback_workspace_slug": (
+ fallback_workspace.slug
+ if fallback_workspace is not None
+ else None
+ ),
"invites": workspace_invites,
}
diff --git a/apiserver/plane/app/serializers/webhook.py b/apiserver/plane/app/serializers/webhook.py
index 95ca149ff..175dea304 100644
--- a/apiserver/plane/app/serializers/webhook.py
+++ b/apiserver/plane/app/serializers/webhook.py
@@ -1,5 +1,4 @@
# Python imports
-import urllib
import socket
import ipaddress
from urllib.parse import urlparse
diff --git a/apiserver/plane/app/urls/__init__.py b/apiserver/plane/app/urls/__init__.py
index f2b11f127..40b96687d 100644
--- a/apiserver/plane/app/urls/__init__.py
+++ b/apiserver/plane/app/urls/__init__.py
@@ -6,9 +6,7 @@ from .cycle import urlpatterns as cycle_urls
from .dashboard import urlpatterns as dashboard_urls
from .estimate import urlpatterns as estimate_urls
from .external import urlpatterns as external_urls
-from .importer import urlpatterns as importer_urls
from .inbox import urlpatterns as inbox_urls
-from .integration import urlpatterns as integration_urls
from .issue import urlpatterns as issue_urls
from .module import urlpatterns as module_urls
from .notification import urlpatterns as notification_urls
@@ -32,9 +30,7 @@ urlpatterns = [
*dashboard_urls,
*estimate_urls,
*external_urls,
- *importer_urls,
*inbox_urls,
- *integration_urls,
*issue_urls,
*module_urls,
*notification_urls,
diff --git a/apiserver/plane/app/urls/cycle.py b/apiserver/plane/app/urls/cycle.py
index 740b0ab43..2e1779420 100644
--- a/apiserver/plane/app/urls/cycle.py
+++ b/apiserver/plane/app/urls/cycle.py
@@ -8,6 +8,7 @@ from plane.app.views import (
CycleFavoriteViewSet,
TransferCycleIssueEndpoint,
CycleUserPropertiesEndpoint,
+ CycleArchiveUnarchiveEndpoint,
)
@@ -90,4 +91,14 @@ urlpatterns = [
CycleUserPropertiesEndpoint.as_view(),
name="cycle-user-filters",
),
+ path(
+ "workspaces//projects//cycles//archive/",
+ CycleArchiveUnarchiveEndpoint.as_view(),
+ name="cycle-archive-unarchive",
+ ),
+ path(
+ "workspaces//projects//archived-cycles/",
+ CycleArchiveUnarchiveEndpoint.as_view(),
+ name="cycle-archive-unarchive",
+ ),
]
diff --git a/apiserver/plane/app/urls/external.py b/apiserver/plane/app/urls/external.py
index 774e6fb7c..8db87a249 100644
--- a/apiserver/plane/app/urls/external.py
+++ b/apiserver/plane/app/urls/external.py
@@ -2,7 +2,6 @@ from django.urls import path
from plane.app.views import UnsplashEndpoint
-from plane.app.views import ReleaseNotesEndpoint
from plane.app.views import GPTIntegrationEndpoint
@@ -12,11 +11,6 @@ urlpatterns = [
UnsplashEndpoint.as_view(),
name="unsplash",
),
- path(
- "release-notes/",
- ReleaseNotesEndpoint.as_view(),
- name="release-notes",
- ),
path(
"workspaces//projects//ai-assistant/",
GPTIntegrationEndpoint.as_view(),
diff --git a/apiserver/plane/app/urls/importer.py b/apiserver/plane/app/urls/importer.py
deleted file mode 100644
index f3a018d78..000000000
--- a/apiserver/plane/app/urls/importer.py
+++ /dev/null
@@ -1,37 +0,0 @@
-from django.urls import path
-
-
-from plane.app.views import (
- ServiceIssueImportSummaryEndpoint,
- ImportServiceEndpoint,
- UpdateServiceImportStatusEndpoint,
-)
-
-
-urlpatterns = [
- path(
- "workspaces//importers//",
- ServiceIssueImportSummaryEndpoint.as_view(),
- name="importer-summary",
- ),
- path(
- "workspaces//projects/importers//",
- ImportServiceEndpoint.as_view(),
- name="importer",
- ),
- path(
- "workspaces//importers/",
- ImportServiceEndpoint.as_view(),
- name="importer",
- ),
- path(
- "workspaces//importers///",
- ImportServiceEndpoint.as_view(),
- name="importer",
- ),
- path(
- "workspaces//projects//service//importers//",
- UpdateServiceImportStatusEndpoint.as_view(),
- name="importer-status",
- ),
-]
diff --git a/apiserver/plane/app/urls/integration.py b/apiserver/plane/app/urls/integration.py
deleted file mode 100644
index cf3f82d5a..000000000
--- a/apiserver/plane/app/urls/integration.py
+++ /dev/null
@@ -1,150 +0,0 @@
-from django.urls import path
-
-
-from plane.app.views import (
- IntegrationViewSet,
- WorkspaceIntegrationViewSet,
- GithubRepositoriesEndpoint,
- GithubRepositorySyncViewSet,
- GithubIssueSyncViewSet,
- GithubCommentSyncViewSet,
- BulkCreateGithubIssueSyncEndpoint,
- SlackProjectSyncViewSet,
-)
-
-
-urlpatterns = [
- path(
- "integrations/",
- IntegrationViewSet.as_view(
- {
- "get": "list",
- "post": "create",
- }
- ),
- name="integrations",
- ),
- path(
- "integrations//",
- IntegrationViewSet.as_view(
- {
- "get": "retrieve",
- "patch": "partial_update",
- "delete": "destroy",
- }
- ),
- name="integrations",
- ),
- path(
- "workspaces//workspace-integrations/",
- WorkspaceIntegrationViewSet.as_view(
- {
- "get": "list",
- }
- ),
- name="workspace-integrations",
- ),
- path(
- "workspaces//workspace-integrations//",
- WorkspaceIntegrationViewSet.as_view(
- {
- "post": "create",
- }
- ),
- name="workspace-integrations",
- ),
- path(
- "workspaces//workspace-integrations//provider/",
- WorkspaceIntegrationViewSet.as_view(
- {
- "get": "retrieve",
- "delete": "destroy",
- }
- ),
- name="workspace-integrations",
- ),
- # Github Integrations
- path(
- "workspaces//workspace-integrations//github-repositories/",
- GithubRepositoriesEndpoint.as_view(),
- ),
- path(
- "workspaces//projects//workspace-integrations//github-repository-sync/",
- GithubRepositorySyncViewSet.as_view(
- {
- "get": "list",
- "post": "create",
- }
- ),
- ),
- path(
- "workspaces//projects//workspace-integrations//github-repository-sync//",
- GithubRepositorySyncViewSet.as_view(
- {
- "get": "retrieve",
- "delete": "destroy",
- }
- ),
- ),
- path(
- "workspaces//projects//github-repository-sync//github-issue-sync/",
- GithubIssueSyncViewSet.as_view(
- {
- "post": "create",
- "get": "list",
- }
- ),
- ),
- path(
- "workspaces//projects//github-repository-sync//bulk-create-github-issue-sync/",
- BulkCreateGithubIssueSyncEndpoint.as_view(),
- ),
- path(
- "workspaces//projects//github-repository-sync//github-issue-sync//",
- GithubIssueSyncViewSet.as_view(
- {
- "get": "retrieve",
- "delete": "destroy",
- }
- ),
- ),
- path(
- "workspaces//projects//github-repository-sync//github-issue-sync//github-comment-sync/",
- GithubCommentSyncViewSet.as_view(
- {
- "post": "create",
- "get": "list",
- }
- ),
- ),
- path(
- "workspaces//projects//github-repository-sync//github-issue-sync//github-comment-sync//",
- GithubCommentSyncViewSet.as_view(
- {
- "get": "retrieve",
- "delete": "destroy",
- }
- ),
- ),
- ## End Github Integrations
- # Slack Integration
- path(
- "workspaces//projects//workspace-integrations//project-slack-sync/",
- SlackProjectSyncViewSet.as_view(
- {
- "post": "create",
- "get": "list",
- }
- ),
- ),
- path(
- "workspaces//projects//workspace-integrations//project-slack-sync//",
- SlackProjectSyncViewSet.as_view(
- {
- "delete": "destroy",
- "get": "retrieve",
- }
- ),
- ),
- ## End Slack Integration
-]
diff --git a/apiserver/plane/app/urls/issue.py b/apiserver/plane/app/urls/issue.py
index 4ee70450b..0d3b9e063 100644
--- a/apiserver/plane/app/urls/issue.py
+++ b/apiserver/plane/app/urls/issue.py
@@ -1,30 +1,26 @@
from django.urls import path
-
from plane.app.views import (
- IssueListEndpoint,
- IssueViewSet,
- LabelViewSet,
BulkCreateIssueLabelsEndpoint,
BulkDeleteIssuesEndpoint,
- BulkImportIssuesEndpoint,
- UserWorkSpaceIssues,
SubIssuesEndpoint,
IssueLinkViewSet,
IssueAttachmentEndpoint,
+ CommentReactionViewSet,
ExportIssuesEndpoint,
IssueActivityEndpoint,
- IssueCommentViewSet,
- IssueSubscriberViewSet,
- IssueReactionViewSet,
- CommentReactionViewSet,
- IssueUserDisplayPropertyEndpoint,
IssueArchiveViewSet,
- IssueRelationViewSet,
+ IssueCommentViewSet,
IssueDraftViewSet,
+ IssueListEndpoint,
+ IssueReactionViewSet,
+ IssueRelationViewSet,
+ IssueSubscriberViewSet,
+ IssueUserDisplayPropertyEndpoint,
+ IssueViewSet,
+ LabelViewSet,
)
-
urlpatterns = [
path(
"workspaces//projects//issues/list/",
@@ -85,18 +81,7 @@ urlpatterns = [
BulkDeleteIssuesEndpoint.as_view(),
name="project-issues-bulk",
),
- path(
- "workspaces//projects//bulk-import-issues//",
- BulkImportIssuesEndpoint.as_view(),
- name="project-issues-bulk",
- ),
- # deprecated endpoint TODO: remove once confirmed
- path(
- "workspaces//my-issues/",
- UserWorkSpaceIssues.as_view(),
- name="workspace-issues",
- ),
- ##
+ ##
path(
"workspaces//projects//issues//sub-issues/",
SubIssuesEndpoint.as_view(),
diff --git a/apiserver/plane/app/urls/module.py b/apiserver/plane/app/urls/module.py
index 5e9f4f123..a730fcd50 100644
--- a/apiserver/plane/app/urls/module.py
+++ b/apiserver/plane/app/urls/module.py
@@ -6,8 +6,8 @@ from plane.app.views import (
ModuleIssueViewSet,
ModuleLinkViewSet,
ModuleFavoriteViewSet,
- BulkImportModulesEndpoint,
ModuleUserPropertiesEndpoint,
+ ModuleArchiveUnarchiveEndpoint,
)
@@ -106,14 +106,19 @@ urlpatterns = [
),
name="user-favorite-module",
),
- path(
- "workspaces//projects//bulk-import-modules//",
- BulkImportModulesEndpoint.as_view(),
- name="bulk-modules-create",
- ),
path(
"workspaces//projects//modules//user-properties/",
ModuleUserPropertiesEndpoint.as_view(),
name="cycle-user-filters",
),
+ path(
+ "workspaces//projects//modules//archive/",
+ ModuleArchiveUnarchiveEndpoint.as_view(),
+ name="module-archive-unarchive",
+ ),
+ path(
+ "workspaces//projects//archived-modules/",
+ ModuleArchiveUnarchiveEndpoint.as_view(),
+ name="module-archive-unarchive",
+ ),
]
diff --git a/apiserver/plane/app/urls/project.py b/apiserver/plane/app/urls/project.py
index f8ecac4c0..7ea636df8 100644
--- a/apiserver/plane/app/urls/project.py
+++ b/apiserver/plane/app/urls/project.py
@@ -14,6 +14,7 @@ from plane.app.views import (
ProjectPublicCoverImagesEndpoint,
ProjectDeployBoardViewSet,
UserProjectRolesEndpoint,
+ ProjectArchiveUnarchiveEndpoint,
)
@@ -175,4 +176,9 @@ urlpatterns = [
),
name="project-deploy-board",
),
+ path(
+ "workspaces//projects//archive/",
+ ProjectArchiveUnarchiveEndpoint.as_view(),
+ name="project-archive-unarchive",
+ ),
]
diff --git a/apiserver/plane/app/urls/workspace.py b/apiserver/plane/app/urls/workspace.py
index a70ff18e5..8b21bb9e1 100644
--- a/apiserver/plane/app/urls/workspace.py
+++ b/apiserver/plane/app/urls/workspace.py
@@ -22,6 +22,7 @@ from plane.app.views import (
WorkspaceUserPropertiesEndpoint,
WorkspaceStatesEndpoint,
WorkspaceEstimatesEndpoint,
+ ExportWorkspaceUserActivityEndpoint,
WorkspaceModulesEndpoint,
WorkspaceCyclesEndpoint,
)
@@ -191,6 +192,11 @@ urlpatterns = [
WorkspaceUserActivityEndpoint.as_view(),
name="workspace-user-activity",
),
+ path(
+ "workspaces//user-activity//export/",
+ ExportWorkspaceUserActivityEndpoint.as_view(),
+ name="export-workspace-user-activity",
+ ),
path(
"workspaces//user-profile//",
WorkspaceUserProfileEndpoint.as_view(),
diff --git a/apiserver/plane/app/urls_deprecated.py b/apiserver/plane/app/urls_deprecated.py
deleted file mode 100644
index 2a47285aa..000000000
--- a/apiserver/plane/app/urls_deprecated.py
+++ /dev/null
@@ -1,1810 +0,0 @@
-from django.urls import path
-
-from rest_framework_simplejwt.views import TokenRefreshView
-
-# Create your urls here.
-
-from plane.app.views import (
- # Authentication
- SignUpEndpoint,
- SignInEndpoint,
- SignOutEndpoint,
- MagicSignInEndpoint,
- MagicSignInGenerateEndpoint,
- OauthEndpoint,
- ## End Authentication
- # Auth Extended
- ForgotPasswordEndpoint,
- VerifyEmailEndpoint,
- ResetPasswordEndpoint,
- RequestEmailVerificationEndpoint,
- ChangePasswordEndpoint,
- ## End Auth Extender
- # User
- UserEndpoint,
- UpdateUserOnBoardedEndpoint,
- UpdateUserTourCompletedEndpoint,
- UserActivityEndpoint,
- ## End User
- # Workspaces
- WorkSpaceViewSet,
- UserWorkSpacesEndpoint,
- InviteWorkspaceEndpoint,
- JoinWorkspaceEndpoint,
- WorkSpaceMemberViewSet,
- WorkspaceMembersEndpoint,
- WorkspaceInvitationsViewset,
- UserWorkspaceInvitationsEndpoint,
- WorkspaceMemberUserEndpoint,
- WorkspaceMemberUserViewsEndpoint,
- WorkSpaceAvailabilityCheckEndpoint,
- TeamMemberViewSet,
- AddTeamToProjectEndpoint,
- UserLastProjectWithWorkspaceEndpoint,
- UserWorkspaceInvitationEndpoint,
- UserActivityGraphEndpoint,
- UserIssueCompletedGraphEndpoint,
- UserWorkspaceDashboardEndpoint,
- WorkspaceThemeViewSet,
- WorkspaceUserProfileStatsEndpoint,
- WorkspaceUserActivityEndpoint,
- WorkspaceUserProfileEndpoint,
- WorkspaceUserProfileIssuesEndpoint,
- WorkspaceLabelsEndpoint,
- LeaveWorkspaceEndpoint,
- ## End Workspaces
- # File Assets
- FileAssetEndpoint,
- UserAssetsEndpoint,
- ## End File Assets
- # Projects
- ProjectViewSet,
- InviteProjectEndpoint,
- ProjectMemberViewSet,
- ProjectMemberEndpoint,
- ProjectMemberInvitationsViewset,
- ProjectMemberUserEndpoint,
- AddMemberToProjectEndpoint,
- ProjectJoinEndpoint,
- UserProjectInvitationsViewset,
- ProjectIdentifierEndpoint,
- ProjectFavoritesViewSet,
- LeaveProjectEndpoint,
- ProjectPublicCoverImagesEndpoint,
- ## End Projects
- # Issues
- IssueViewSet,
- WorkSpaceIssuesEndpoint,
- IssueActivityEndpoint,
- IssueCommentViewSet,
- UserWorkSpaceIssues,
- BulkDeleteIssuesEndpoint,
- BulkImportIssuesEndpoint,
- ProjectUserViewsEndpoint,
- IssueUserDisplayPropertyEndpoint,
- LabelViewSet,
- SubIssuesEndpoint,
- IssueLinkViewSet,
- BulkCreateIssueLabelsEndpoint,
- IssueAttachmentEndpoint,
- IssueArchiveViewSet,
- IssueSubscriberViewSet,
- IssueCommentPublicViewSet,
- IssueReactionViewSet,
- IssueRelationViewSet,
- CommentReactionViewSet,
- IssueDraftViewSet,
- ## End Issues
- # States
- StateViewSet,
- ## End States
- # Estimates
- ProjectEstimatePointEndpoint,
- BulkEstimatePointEndpoint,
- ## End Estimates
- # Views
- GlobalViewViewSet,
- GlobalViewIssuesViewSet,
- IssueViewViewSet,
- IssueViewFavoriteViewSet,
- ## End Views
- # Cycles
- CycleViewSet,
- CycleIssueViewSet,
- CycleDateCheckEndpoint,
- CycleFavoriteViewSet,
- TransferCycleIssueEndpoint,
- ## End Cycles
- # Modules
- ModuleViewSet,
- ModuleIssueViewSet,
- ModuleFavoriteViewSet,
- ModuleLinkViewSet,
- BulkImportModulesEndpoint,
- ## End Modules
- # Pages
- PageViewSet,
- PageLogEndpoint,
- SubPagesEndpoint,
- PageFavoriteViewSet,
- CreateIssueFromBlockEndpoint,
- ## End Pages
- # Api Tokens
- ApiTokenEndpoint,
- ## End Api Tokens
- # Integrations
- IntegrationViewSet,
- WorkspaceIntegrationViewSet,
- GithubRepositoriesEndpoint,
- GithubRepositorySyncViewSet,
- GithubIssueSyncViewSet,
- GithubCommentSyncViewSet,
- BulkCreateGithubIssueSyncEndpoint,
- SlackProjectSyncViewSet,
- ## End Integrations
- # Importer
- ServiceIssueImportSummaryEndpoint,
- ImportServiceEndpoint,
- UpdateServiceImportStatusEndpoint,
- ## End importer
- # Search
- GlobalSearchEndpoint,
- IssueSearchEndpoint,
- ## End Search
- # External
- GPTIntegrationEndpoint,
- ReleaseNotesEndpoint,
- UnsplashEndpoint,
- ## End External
- # Inbox
- InboxViewSet,
- InboxIssueViewSet,
- ## End Inbox
- # Analytics
- AnalyticsEndpoint,
- AnalyticViewViewset,
- SavedAnalyticEndpoint,
- ExportAnalyticsEndpoint,
- DefaultAnalyticsEndpoint,
- ## End Analytics
- # Notification
- NotificationViewSet,
- UnreadNotificationEndpoint,
- MarkAllReadNotificationViewSet,
- ## End Notification
- # Public Boards
- ProjectDeployBoardViewSet,
- ProjectIssuesPublicEndpoint,
- ProjectDeployBoardPublicSettingsEndpoint,
- IssueReactionPublicViewSet,
- CommentReactionPublicViewSet,
- InboxIssuePublicViewSet,
- IssueVotePublicViewSet,
- WorkspaceProjectDeployBoardEndpoint,
- IssueRetrievePublicEndpoint,
- ## End Public Boards
- ## Exporter
- ExportIssuesEndpoint,
- ## End Exporter
- # Configuration
- ConfigurationEndpoint,
- ## End Configuration
-)
-
-
-# TODO: Delete this file
-# This url file has been deprecated use apiserver/plane/urls folder to create new urls
-
-urlpatterns = [
- # Social Auth
- path("social-auth/", OauthEndpoint.as_view(), name="oauth"),
- # Auth
- path("sign-up/", SignUpEndpoint.as_view(), name="sign-up"),
- path("sign-in/", SignInEndpoint.as_view(), name="sign-in"),
- path("sign-out/", SignOutEndpoint.as_view(), name="sign-out"),
- # Magic Sign In/Up
- path(
- "magic-generate/",
- MagicSignInGenerateEndpoint.as_view(),
- name="magic-generate",
- ),
- path(
- "magic-sign-in/", MagicSignInEndpoint.as_view(), name="magic-sign-in"
- ),
- path("token/refresh/", TokenRefreshView.as_view(), name="token_refresh"),
- # Email verification
- path("email-verify/", VerifyEmailEndpoint.as_view(), name="email-verify"),
- path(
- "request-email-verify/",
- RequestEmailVerificationEndpoint.as_view(),
- name="request-reset-email",
- ),
- # Password Manipulation
- path(
- "reset-password///",
- ResetPasswordEndpoint.as_view(),
- name="password-reset",
- ),
- path(
- "forgot-password/",
- ForgotPasswordEndpoint.as_view(),
- name="forgot-password",
- ),
- # User Profile
- path(
- "users/me/",
- UserEndpoint.as_view(
- {"get": "retrieve", "patch": "partial_update", "delete": "destroy"}
- ),
- name="users",
- ),
- path(
- "users/me/settings/",
- UserEndpoint.as_view(
- {
- "get": "retrieve_user_settings",
- }
- ),
- name="users",
- ),
- path(
- "users/me/change-password/",
- ChangePasswordEndpoint.as_view(),
- name="change-password",
- ),
- path(
- "users/me/onboard/",
- UpdateUserOnBoardedEndpoint.as_view(),
- name="user-onboard",
- ),
- path(
- "users/me/tour-completed/",
- UpdateUserTourCompletedEndpoint.as_view(),
- name="user-tour",
- ),
- path(
- "users/workspaces//activities/",
- UserActivityEndpoint.as_view(),
- name="user-activities",
- ),
- # user workspaces
- path(
- "users/me/workspaces/",
- UserWorkSpacesEndpoint.as_view(),
- name="user-workspace",
- ),
- # user workspace invitations
- path(
- "users/me/invitations/workspaces/",
- UserWorkspaceInvitationsEndpoint.as_view(
- {"get": "list", "post": "create"}
- ),
- name="user-workspace-invitations",
- ),
- # user workspace invitation
- path(
- "users/me/invitations//",
- UserWorkspaceInvitationEndpoint.as_view(
- {
- "get": "retrieve",
- }
- ),
- name="workspace",
- ),
- # user join workspace
- # User Graphs
- path(
- "users/me/workspaces//activity-graph/",
- UserActivityGraphEndpoint.as_view(),
- name="user-activity-graph",
- ),
- path(
- "users/me/workspaces//issues-completed-graph/",
- UserIssueCompletedGraphEndpoint.as_view(),
- name="completed-graph",
- ),
- path(
- "users/me/workspaces//dashboard/",
- UserWorkspaceDashboardEndpoint.as_view(),
- name="user-workspace-dashboard",
- ),
- ## User Graph
- path(
- "users/me/invitations/workspaces///join/",
- JoinWorkspaceEndpoint.as_view(),
- name="user-join-workspace",
- ),
- # user project invitations
- path(
- "users/me/invitations/projects/",
- UserProjectInvitationsViewset.as_view(
- {"get": "list", "post": "create"}
- ),
- name="user-project-invitaions",
- ),
- ## Workspaces ##
- path(
- "workspace-slug-check/",
- WorkSpaceAvailabilityCheckEndpoint.as_view(),
- name="workspace-availability",
- ),
- path(
- "workspaces/",
- WorkSpaceViewSet.as_view(
- {
- "get": "list",
- "post": "create",
- }
- ),
- name="workspace",
- ),
- path(
- "workspaces//",
- WorkSpaceViewSet.as_view(
- {
- "get": "retrieve",
- "put": "update",
- "patch": "partial_update",
- "delete": "destroy",
- }
- ),
- name="workspace",
- ),
- path(
- "workspaces//invite/",
- InviteWorkspaceEndpoint.as_view(),
- name="workspace",
- ),
- path(
- "workspaces//invitations/",
- WorkspaceInvitationsViewset.as_view({"get": "list"}),
- name="workspace",
- ),
- path(
- "workspaces//invitations//",
- WorkspaceInvitationsViewset.as_view(
- {
- "delete": "destroy",
- "get": "retrieve",
- }
- ),
- name="workspace",
- ),
- path(
- "workspaces//members/",
- WorkSpaceMemberViewSet.as_view({"get": "list"}),
- name="workspace",
- ),
- path(
- "workspaces//members//",
- WorkSpaceMemberViewSet.as_view(
- {
- "patch": "partial_update",
- "delete": "destroy",
- "get": "retrieve",
- }
- ),
- name="workspace",
- ),
- path(
- "workspaces//workspace-members/",
- WorkspaceMembersEndpoint.as_view(),
- name="workspace-members",
- ),
- path(
- "workspaces//teams/",
- TeamMemberViewSet.as_view(
- {
- "get": "list",
- "post": "create",
- }
- ),
- name="workspace",
- ),
- path(
- "workspaces//teams//",
- TeamMemberViewSet.as_view(
- {
- "put": "update",
- "patch": "partial_update",
- "delete": "destroy",
- "get": "retrieve",
- }
- ),
- name="workspace",
- ),
- path(
- "users/last-visited-workspace/",
- UserLastProjectWithWorkspaceEndpoint.as_view(),
- name="workspace-project-details",
- ),
- path(
- "workspaces//workspace-members/me/",
- WorkspaceMemberUserEndpoint.as_view(),
- name="workspace-member-details",
- ),
- path(
- "workspaces//workspace-views/",
- WorkspaceMemberUserViewsEndpoint.as_view(),
- name="workspace-member-details",
- ),
- path(
- "workspaces//workspace-themes/",
- WorkspaceThemeViewSet.as_view(
- {
- "get": "list",
- "post": "create",
- }
- ),
- name="workspace-themes",
- ),
- path(
- "workspaces//workspace-themes//",
- WorkspaceThemeViewSet.as_view(
- {
- "get": "retrieve",
- "patch": "partial_update",
- "delete": "destroy",
- }
- ),
- name="workspace-themes",
- ),
- path(
- "workspaces//user-stats//",
- WorkspaceUserProfileStatsEndpoint.as_view(),
- name="workspace-user-stats",
- ),
- path(
- "workspaces//user-activity//",
- WorkspaceUserActivityEndpoint.as_view(),
- name="workspace-user-activity",
- ),
- path(
- "workspaces//user-profile//",
- WorkspaceUserProfileEndpoint.as_view(),
- name="workspace-user-profile-page",
- ),
- path(
- "workspaces//user-issues//",
- WorkspaceUserProfileIssuesEndpoint.as_view(),
- name="workspace-user-profile-issues",
- ),
- path(
- "workspaces//labels/",
- WorkspaceLabelsEndpoint.as_view(),
- name="workspace-labels",
- ),
- path(
- "workspaces//members/leave/",
- LeaveWorkspaceEndpoint.as_view(),
- name="workspace-labels",
- ),
- ## End Workspaces ##
- # Projects
- path(
- "workspaces//projects/",
- ProjectViewSet.as_view(
- {
- "get": "list",
- "post": "create",
- }
- ),
- name="project",
- ),
- path(
- "workspaces//projects//",
- ProjectViewSet.as_view(
- {
- "get": "retrieve",
- "put": "update",
- "patch": "partial_update",
- "delete": "destroy",
- }
- ),
- name="project",
- ),
- path(
- "workspaces//project-identifiers/",
- ProjectIdentifierEndpoint.as_view(),
- name="project-identifiers",
- ),
- path(
- "workspaces//projects//invite/",
- InviteProjectEndpoint.as_view(),
- name="project",
- ),
- path(
- "workspaces//projects//members/",
- ProjectMemberViewSet.as_view({"get": "list"}),
- name="project",
- ),
- path(
- "workspaces//projects//members//",
- ProjectMemberViewSet.as_view(
- {
- "get": "retrieve",
- "patch": "partial_update",
- "delete": "destroy",
- }
- ),
- name="project",
- ),
- path(
- "workspaces//projects//project-members/",
- ProjectMemberEndpoint.as_view(),
- name="project",
- ),
- path(
- "workspaces//projects//members/add/",
- AddMemberToProjectEndpoint.as_view(),
- name="project",
- ),
- path(
- "workspaces//projects/join/",
- ProjectJoinEndpoint.as_view(),
- name="project",
- ),
- path(
- "workspaces//projects//team-invite/",
- AddTeamToProjectEndpoint.as_view(),
- name="projects",
- ),
- path(
- "workspaces//projects//invitations/",
- ProjectMemberInvitationsViewset.as_view({"get": "list"}),
- name="workspace",
- ),
- path(
- "workspaces//projects//invitations//",
- ProjectMemberInvitationsViewset.as_view(
- {
- "get": "retrieve",
- "delete": "destroy",
- }
- ),
- name="project",
- ),
- path(
- "workspaces//projects//project-views/",
- ProjectUserViewsEndpoint.as_view(),
- name="project-view",
- ),
- path(
- "workspaces//projects//project-members/me/",
- ProjectMemberUserEndpoint.as_view(),
- name="project-view",
- ),
- path(
- "workspaces//user-favorite-projects/",
- ProjectFavoritesViewSet.as_view(
- {
- "get": "list",
- "post": "create",
- }
- ),
- name="project",
- ),
- path(
- "workspaces//user-favorite-projects//",
- ProjectFavoritesViewSet.as_view(
- {
- "delete": "destroy",
- }
- ),
- name="project",
- ),
- path(
- "workspaces//projects//members/leave/",
- LeaveProjectEndpoint.as_view(),
- name="project",
- ),
- path(
- "project-covers/",
- ProjectPublicCoverImagesEndpoint.as_view(),
- name="project-covers",
- ),
- # End Projects
- # States
- path(
- "workspaces//projects//states/",
- StateViewSet.as_view(
- {
- "get": "list",
- "post": "create",
- }
- ),
- name="project-states",
- ),
- path(
- "workspaces//projects//states//",
- StateViewSet.as_view(
- {
- "get": "retrieve",
- "put": "update",
- "patch": "partial_update",
- "delete": "destroy",
- }
- ),
- name="project-state",
- ),
- # End States ##
- # Estimates
- path(
- "workspaces//projects//project-estimates/",
- ProjectEstimatePointEndpoint.as_view(),
- name="project-estimate-points",
- ),
- path(
- "workspaces//projects//estimates/",
- BulkEstimatePointEndpoint.as_view(
- {
- "get": "list",
- "post": "create",
- }
- ),
- name="bulk-create-estimate-points",
- ),
- path(
- "workspaces//projects//estimates//",
- BulkEstimatePointEndpoint.as_view(
- {
- "get": "retrieve",
- "patch": "partial_update",
- "delete": "destroy",
- }
- ),
- name="bulk-create-estimate-points",
- ),
- # End Estimates ##
- # Views
- path(
- "workspaces//projects//views/",
- IssueViewViewSet.as_view(
- {
- "get": "list",
- "post": "create",
- }
- ),
- name="project-view",
- ),
- path(
- "workspaces//projects//views//",
- IssueViewViewSet.as_view(
- {
- "get": "retrieve",
- "put": "update",
- "patch": "partial_update",
- "delete": "destroy",
- }
- ),
- name="project-view",
- ),
- path(
- "workspaces//views/",
- GlobalViewViewSet.as_view(
- {
- "get": "list",
- "post": "create",
- }
- ),
- name="global-view",
- ),
- path(
- "workspaces//views//",
- GlobalViewViewSet.as_view(
- {
- "get": "retrieve",
- "put": "update",
- "patch": "partial_update",
- "delete": "destroy",
- }
- ),
- name="global-view",
- ),
- path(
- "workspaces//issues/",
- GlobalViewIssuesViewSet.as_view(
- {
- "get": "list",
- }
- ),
- name="global-view-issues",
- ),
- path(
- "workspaces//projects//user-favorite-views/",
- IssueViewFavoriteViewSet.as_view(
- {
- "get": "list",
- "post": "create",
- }
- ),
- name="user-favorite-view",
- ),
- path(
- "workspaces//projects//user-favorite-views//",
- IssueViewFavoriteViewSet.as_view(
- {
- "delete": "destroy",
- }
- ),
- name="user-favorite-view",
- ),
- ## End Views
- ## Cycles
- path(
- "workspaces//projects//cycles/",
- CycleViewSet.as_view(
- {
- "get": "list",
- "post": "create",
- }
- ),
- name="project-cycle",
- ),
- path(
- "workspaces//projects//cycles//",
- CycleViewSet.as_view(
- {
- "get": "retrieve",
- "put": "update",
- "patch": "partial_update",
- "delete": "destroy",
- }
- ),
- name="project-cycle",
- ),
- path(
- "workspaces//projects//cycles//cycle-issues/",
- CycleIssueViewSet.as_view(
- {
- "get": "list",
- "post": "create",
- }
- ),
- name="project-cycle",
- ),
- path(
- "workspaces//projects//cycles//cycle-issues//",
- CycleIssueViewSet.as_view(
- {
- "get": "retrieve",
- "put": "update",
- "patch": "partial_update",
- "delete": "destroy",
- }
- ),
- name="project-cycle",
- ),
- path(
- "workspaces//projects//cycles/date-check/",
- CycleDateCheckEndpoint.as_view(),
- name="project-cycle",
- ),
- path(
- "workspaces//projects//user-favorite-cycles/",
- CycleFavoriteViewSet.as_view(
- {
- "get": "list",
- "post": "create",
- }
- ),
- name="user-favorite-cycle",
- ),
- path(
- "workspaces//projects//user-favorite-cycles//",
- CycleFavoriteViewSet.as_view(
- {
- "delete": "destroy",
- }
- ),
- name="user-favorite-cycle",
- ),
- path(
- "workspaces//projects//cycles//transfer-issues/",
- TransferCycleIssueEndpoint.as_view(),
- name="transfer-issues",
- ),
- ## End Cycles
- # Issue
- path(
- "workspaces//projects//issues/",
- IssueViewSet.as_view(
- {
- "get": "list",
- "post": "create",
- }
- ),
- name="project-issue",
- ),
- path(
- "workspaces//projects//issues//",
- IssueViewSet.as_view(
- {
- "get": "retrieve",
- "put": "update",
- "patch": "partial_update",
- "delete": "destroy",
- }
- ),
- name="project-issue",
- ),
- path(
- "workspaces//projects//issue-labels/",
- LabelViewSet.as_view(
- {
- "get": "list",
- "post": "create",
- }
- ),
- name="project-issue-labels",
- ),
- path(
- "workspaces//projects//issue-labels//",
- LabelViewSet.as_view(
- {
- "get": "retrieve",
- "put": "update",
- "patch": "partial_update",
- "delete": "destroy",
- }
- ),
- name="project-issue-labels",
- ),
- path(
- "workspaces//projects//bulk-create-labels/",
- BulkCreateIssueLabelsEndpoint.as_view(),
- name="project-bulk-labels",
- ),
- path(
- "workspaces//projects//bulk-delete-issues/",
- BulkDeleteIssuesEndpoint.as_view(),
- name="project-issues-bulk",
- ),
- path(
- "workspaces//projects//bulk-import-issues//",
- BulkImportIssuesEndpoint.as_view(),
- name="project-issues-bulk",
- ),
- path(
- "workspaces//my-issues/",
- UserWorkSpaceIssues.as_view(),
- name="workspace-issues",
- ),
- path(
- "workspaces//projects//issues//sub-issues/",
- SubIssuesEndpoint.as_view(),
- name="sub-issues",
- ),
- path(
- "workspaces//projects//issues//issue-links/",
- IssueLinkViewSet.as_view(
- {
- "get": "list",
- "post": "create",
- }
- ),
- name="project-issue-links",
- ),
- path(
- "workspaces//projects//issues//issue-links//",
- IssueLinkViewSet.as_view(
- {
- "get": "retrieve",
- "put": "update",
- "patch": "partial_update",
- "delete": "destroy",
- }
- ),
- name="project-issue-links",
- ),
- path(
- "workspaces//projects//issues//issue-attachments/",
- IssueAttachmentEndpoint.as_view(),
- name="project-issue-attachments",
- ),
- path(
- "workspaces//projects//issues//issue-attachments//",
- IssueAttachmentEndpoint.as_view(),
- name="project-issue-attachments",
- ),
- path(
- "workspaces//export-issues/",
- ExportIssuesEndpoint.as_view(),
- name="export-issues",
- ),
- ## End Issues
- ## Issue Activity
- path(
- "workspaces//projects//issues//history/",
- IssueActivityEndpoint.as_view(),
- name="project-issue-history",
- ),
- ## Issue Activity
- ## IssueComments
- path(
- "workspaces/