diff --git a/.github/workflows/build-branch.yml b/.github/workflows/build-branch.yml index 38694a62e..603f08e94 100644 --- a/.github/workflows/build-branch.yml +++ b/.github/workflows/build-branch.yml @@ -2,11 +2,6 @@ name: Branch Build on: workflow_dispatch: - inputs: - branch_name: - description: "Branch Name" - required: true - default: "preview" push: branches: - master @@ -16,49 +11,71 @@ on: types: [released, prereleased] env: - TARGET_BRANCH: ${{ inputs.branch_name || github.ref_name || github.event.release.target_commitish }} + TARGET_BRANCH: ${{ github.ref_name || github.event.release.target_commitish }} jobs: branch_build_setup: name: Build-Push Web/Space/API/Proxy Docker Image - runs-on: ubuntu-20.04 - steps: - - name: Check out the repo - uses: actions/checkout@v3.3.0 + runs-on: ubuntu-latest outputs: - gh_branch_name: ${{ env.TARGET_BRANCH }} + gh_branch_name: ${{ steps.set_env_variables.outputs.TARGET_BRANCH }} + gh_buildx_driver: ${{ steps.set_env_variables.outputs.BUILDX_DRIVER }} + gh_buildx_version: ${{ steps.set_env_variables.outputs.BUILDX_VERSION }} + gh_buildx_platforms: ${{ steps.set_env_variables.outputs.BUILDX_PLATFORMS }} + gh_buildx_endpoint: ${{ steps.set_env_variables.outputs.BUILDX_ENDPOINT }} + + steps: + - id: set_env_variables + name: Set Environment Variables + run: | + if [ "${{ env.TARGET_BRANCH }}" == "master" ]; then + echo "BUILDX_DRIVER=cloud" >> $GITHUB_OUTPUT + echo "BUILDX_VERSION=lab:latest" >> $GITHUB_OUTPUT + echo "BUILDX_PLATFORMS=linux/amd64,linux/arm64" >> $GITHUB_OUTPUT + echo "BUILDX_ENDPOINT=makeplane/plane-dev" >> $GITHUB_OUTPUT + else + echo "BUILDX_DRIVER=docker-container" >> $GITHUB_OUTPUT + echo "BUILDX_VERSION=latest" >> $GITHUB_OUTPUT + echo "BUILDX_PLATFORMS=linux/amd64" >> $GITHUB_OUTPUT + echo "BUILDX_ENDPOINT=" >> $GITHUB_OUTPUT + fi + echo "TARGET_BRANCH=${{ env.TARGET_BRANCH }}" >> $GITHUB_OUTPUT branch_build_push_frontend: runs-on: ubuntu-20.04 needs: [branch_build_setup] env: FRONTEND_TAG: ${{ secrets.DOCKERHUB_USERNAME }}/plane-frontend:${{ needs.branch_build_setup.outputs.gh_branch_name }} + TARGET_BRANCH: ${{ needs.branch_build_setup.outputs.gh_branch_name }} + BUILDX_DRIVER: ${{ needs.branch_build_setup.outputs.gh_buildx_driver }} + BUILDX_VERSION: ${{ needs.branch_build_setup.outputs.gh_buildx_version }} + BUILDX_PLATFORMS: ${{ needs.branch_build_setup.outputs.gh_buildx_platforms }} + BUILDX_ENDPOINT: ${{ needs.branch_build_setup.outputs.gh_buildx_endpoint }} steps: - name: Set Frontend Docker Tag run: | - if [ "${{ needs.branch_build_setup.outputs.gh_branch_name }}" == "master" ] && [ "${{ github.event_name }}" == "release" ]; then + if [ "${{ env.TARGET_BRANCH }}" == "master" ] && [ "${{ github.event_name }}" == "release" ]; then TAG=${{ secrets.DOCKERHUB_USERNAME }}/plane-frontend:latest,${{ secrets.DOCKERHUB_USERNAME }}/plane-frontend:${{ github.event.release.tag_name }} - elif [ "${{ needs.branch_build_setup.outputs.gh_branch_name }}" == "master" ]; then + elif [ "${{ env.TARGET_BRANCH }}" == "master" ]; then TAG=${{ secrets.DOCKERHUB_USERNAME }}/plane-frontend:stable else TAG=${{ env.FRONTEND_TAG }} fi echo "FRONTEND_TAG=${TAG}" >> $GITHUB_ENV - - name: Docker Setup QEMU - uses: docker/setup-qemu-action@v3.0.0 - - - name: Set up Docker Buildx - uses: docker/setup-buildx-action@v3.0.0 - with: - platforms: linux/amd64,linux/arm64 - buildkitd-flags: "--allow-insecure-entitlement security.insecure" - name: Login to Docker Hub - uses: docker/login-action@v3.0.0 + uses: docker/login-action@v3 with: username: ${{ secrets.DOCKERHUB_USERNAME }} password: ${{ secrets.DOCKERHUB_TOKEN }} + - name: Set up Docker Buildx + uses: docker/setup-buildx-action@v3 + with: + driver: ${{ env.BUILDX_DRIVER }} + version: ${{ env.BUILDX_VERSION }} + endpoint: ${{ env.BUILDX_ENDPOINT }} + - name: Check out the repo uses: actions/checkout@v4.1.1 @@ -67,7 +84,7 @@ jobs: with: context: . file: ./web/Dockerfile.web - platforms: linux/amd64 + platforms: ${{ env.BUILDX_PLATFORMS }} tags: ${{ env.FRONTEND_TAG }} push: true env: @@ -80,33 +97,36 @@ jobs: needs: [branch_build_setup] env: SPACE_TAG: ${{ secrets.DOCKERHUB_USERNAME }}/plane-space:${{ needs.branch_build_setup.outputs.gh_branch_name }} + TARGET_BRANCH: ${{ needs.branch_build_setup.outputs.gh_branch_name }} + BUILDX_DRIVER: ${{ needs.branch_build_setup.outputs.gh_buildx_driver }} + BUILDX_VERSION: ${{ needs.branch_build_setup.outputs.gh_buildx_version }} + BUILDX_PLATFORMS: ${{ needs.branch_build_setup.outputs.gh_buildx_platforms }} + BUILDX_ENDPOINT: ${{ needs.branch_build_setup.outputs.gh_buildx_endpoint }} steps: - name: Set Space Docker Tag run: | - if [ "${{ needs.branch_build_setup.outputs.gh_branch_name }}" == "master" ] && [ "${{ github.event_name }}" == "release" ]; then + if [ "${{ env.TARGET_BRANCH }}" == "master" ] && [ "${{ github.event_name }}" == "release" ]; then TAG=${{ secrets.DOCKERHUB_USERNAME }}/plane-space:latest,${{ secrets.DOCKERHUB_USERNAME }}/plane-space:${{ github.event.release.tag_name }} - elif [ "${{ needs.branch_build_setup.outputs.gh_branch_name }}" == "master" ]; then + elif [ "${{ env.TARGET_BRANCH }}" == "master" ]; then TAG=${{ secrets.DOCKERHUB_USERNAME }}/plane-space:stable else TAG=${{ env.SPACE_TAG }} fi echo "SPACE_TAG=${TAG}" >> $GITHUB_ENV - - name: Docker Setup QEMU - uses: docker/setup-qemu-action@v3.0.0 - - - name: Set up Docker Buildx - uses: docker/setup-buildx-action@v3.0.0 - with: - platforms: linux/amd64,linux/arm64 - buildkitd-flags: "--allow-insecure-entitlement security.insecure" - - name: Login to Docker Hub - uses: docker/login-action@v3.0.0 + uses: docker/login-action@v3 with: username: ${{ secrets.DOCKERHUB_USERNAME }} password: ${{ secrets.DOCKERHUB_TOKEN }} + - name: Set up Docker Buildx + uses: docker/setup-buildx-action@v3 + with: + driver: ${{ env.BUILDX_DRIVER }} + version: ${{ env.BUILDX_VERSION }} + endpoint: ${{ env.BUILDX_ENDPOINT }} + - name: Check out the repo uses: actions/checkout@v4.1.1 @@ -115,7 +135,7 @@ jobs: with: context: . file: ./space/Dockerfile.space - platforms: linux/amd64 + platforms: ${{ env.BUILDX_PLATFORMS }} tags: ${{ env.SPACE_TAG }} push: true env: @@ -128,33 +148,36 @@ jobs: needs: [branch_build_setup] env: BACKEND_TAG: ${{ secrets.DOCKERHUB_USERNAME }}/plane-backend:${{ needs.branch_build_setup.outputs.gh_branch_name }} + TARGET_BRANCH: ${{ needs.branch_build_setup.outputs.gh_branch_name }} + BUILDX_DRIVER: ${{ needs.branch_build_setup.outputs.gh_buildx_driver }} + BUILDX_VERSION: ${{ needs.branch_build_setup.outputs.gh_buildx_version }} + BUILDX_PLATFORMS: ${{ needs.branch_build_setup.outputs.gh_buildx_platforms }} + BUILDX_ENDPOINT: ${{ needs.branch_build_setup.outputs.gh_buildx_endpoint }} steps: - name: Set Backend Docker Tag run: | - if [ "${{ needs.branch_build_setup.outputs.gh_branch_name }}" == "master" ] && [ "${{ github.event_name }}" == "release" ]; then + if [ "${{ env.TARGET_BRANCH }}" == "master" ] && [ "${{ github.event_name }}" == "release" ]; then TAG=${{ secrets.DOCKERHUB_USERNAME }}/plane-backend:latest,${{ secrets.DOCKERHUB_USERNAME }}/plane-backend:${{ github.event.release.tag_name }} - elif [ "${{ needs.branch_build_setup.outputs.gh_branch_name }}" == "master" ]; then + elif [ "${{ env.TARGET_BRANCH }}" == "master" ]; then TAG=${{ secrets.DOCKERHUB_USERNAME }}/plane-backend:stable else TAG=${{ env.BACKEND_TAG }} fi echo "BACKEND_TAG=${TAG}" >> $GITHUB_ENV - - name: Docker Setup QEMU - uses: docker/setup-qemu-action@v3.0.0 - - - name: Set up Docker Buildx - uses: docker/setup-buildx-action@v3.0.0 - with: - platforms: linux/amd64,linux/arm64 - buildkitd-flags: "--allow-insecure-entitlement security.insecure" - - name: Login to Docker Hub - uses: docker/login-action@v3.0.0 + uses: docker/login-action@v3 with: username: ${{ secrets.DOCKERHUB_USERNAME }} password: ${{ secrets.DOCKERHUB_TOKEN }} + - name: Set up Docker Buildx + uses: docker/setup-buildx-action@v3 + with: + driver: ${{ env.BUILDX_DRIVER }} + version: ${{ env.BUILDX_VERSION }} + endpoint: ${{ env.BUILDX_ENDPOINT }} + - name: Check out the repo uses: actions/checkout@v4.1.1 @@ -163,7 +186,7 @@ jobs: with: context: ./apiserver file: ./apiserver/Dockerfile.api - platforms: linux/amd64 + platforms: ${{ env.BUILDX_PLATFORMS }} push: true tags: ${{ env.BACKEND_TAG }} env: @@ -171,38 +194,42 @@ jobs: DOCKER_USERNAME: ${{ secrets.DOCKERHUB_USERNAME }} DOCKER_PASSWORD: ${{ secrets.DOCKERHUB_TOKEN }} + branch_build_push_proxy: runs-on: ubuntu-20.04 needs: [branch_build_setup] env: PROXY_TAG: ${{ secrets.DOCKERHUB_USERNAME }}/plane-proxy:${{ needs.branch_build_setup.outputs.gh_branch_name }} + TARGET_BRANCH: ${{ needs.branch_build_setup.outputs.gh_branch_name }} + BUILDX_DRIVER: ${{ needs.branch_build_setup.outputs.gh_buildx_driver }} + BUILDX_VERSION: ${{ needs.branch_build_setup.outputs.gh_buildx_version }} + BUILDX_PLATFORMS: ${{ needs.branch_build_setup.outputs.gh_buildx_platforms }} + BUILDX_ENDPOINT: ${{ needs.branch_build_setup.outputs.gh_buildx_endpoint }} steps: - name: Set Proxy Docker Tag run: | - if [ "${{ needs.branch_build_setup.outputs.gh_branch_name }}" == "master" ] && [ "${{ github.event_name }}" == "release" ]; then + if [ "${{ env.TARGET_BRANCH }}" == "master" ] && [ "${{ github.event_name }}" == "release" ]; then TAG=${{ secrets.DOCKERHUB_USERNAME }}/plane-proxy:latest,${{ secrets.DOCKERHUB_USERNAME }}/plane-proxy:${{ github.event.release.tag_name }} - elif [ "${{ needs.branch_build_setup.outputs.gh_branch_name }}" == "master" ]; then + elif [ "${{ env.TARGET_BRANCH }}" == "master" ]; then TAG=${{ secrets.DOCKERHUB_USERNAME }}/plane-proxy:stable else TAG=${{ env.PROXY_TAG }} fi echo "PROXY_TAG=${TAG}" >> $GITHUB_ENV - - name: Docker Setup QEMU - uses: docker/setup-qemu-action@v3.0.0 - - - name: Set up Docker Buildx - uses: docker/setup-buildx-action@v3.0.0 - with: - platforms: linux/amd64,linux/arm64 - buildkitd-flags: "--allow-insecure-entitlement security.insecure" - - name: Login to Docker Hub - uses: docker/login-action@v3.0.0 + uses: docker/login-action@v3 with: username: ${{ secrets.DOCKERHUB_USERNAME }} password: ${{ secrets.DOCKERHUB_TOKEN }} + - name: Set up Docker Buildx + uses: docker/setup-buildx-action@v3 + with: + driver: ${{ env.BUILDX_DRIVER }} + version: ${{ env.BUILDX_VERSION }} + endpoint: ${{ env.BUILDX_ENDPOINT }} + - name: Check out the repo uses: actions/checkout@v4.1.1 @@ -211,10 +238,11 @@ jobs: with: context: ./nginx file: ./nginx/Dockerfile - platforms: linux/amd64 + platforms: ${{ env.BUILDX_PLATFORMS }} tags: ${{ env.PROXY_TAG }} push: true env: DOCKER_BUILDKIT: 1 DOCKER_USERNAME: ${{ secrets.DOCKERHUB_USERNAME }} DOCKER_PASSWORD: ${{ secrets.DOCKERHUB_TOKEN }} + diff --git a/apiserver/package.json b/apiserver/package.json index 120314ed3..fb4f8441d 100644 --- a/apiserver/package.json +++ b/apiserver/package.json @@ -1,4 +1,4 @@ { "name": "plane-api", - "version": "0.15.0" + "version": "0.15.1" } diff --git a/apiserver/plane/api/views/base.py b/apiserver/plane/api/views/base.py index b069ef78c..edb89f9b1 100644 --- a/apiserver/plane/api/views/base.py +++ b/apiserver/plane/api/views/base.py @@ -1,6 +1,8 @@ # Python imports import zoneinfo import json +from urllib.parse import urlparse + # Django imports from django.conf import settings @@ -51,6 +53,11 @@ class WebhookMixin: and self.request.method in ["POST", "PATCH", "DELETE"] and response.status_code in [200, 201, 204] ): + url = request.build_absolute_uri() + parsed_url = urlparse(url) + # Extract the scheme and netloc + scheme = parsed_url.scheme + netloc = parsed_url.netloc # Push the object to delay send_webhook.delay( event=self.webhook_event, @@ -59,6 +66,7 @@ class WebhookMixin: action=self.request.method, slug=self.workspace_slug, bulk=self.bulk, + current_site=f"{scheme}://{netloc}", ) return response diff --git a/apiserver/plane/api/views/cycle.py b/apiserver/plane/api/views/cycle.py index c296bb111..6f66c373e 100644 --- a/apiserver/plane/api/views/cycle.py +++ b/apiserver/plane/api/views/cycle.py @@ -243,6 +243,29 @@ class CycleAPIEndpoint(WebhookMixin, BaseAPIView): ): serializer = CycleSerializer(data=request.data) if serializer.is_valid(): + if ( + request.data.get("external_id") + and request.data.get("external_source") + and Cycle.objects.filter( + project_id=project_id, + workspace__slug=slug, + external_source=request.data.get("external_source"), + external_id=request.data.get("external_id"), + ).exists() + ): + cycle = Cycle.objects.filter( + workspace__slug=slug, + project_id=project_id, + external_source=request.data.get("external_source"), + external_id=request.data.get("external_id"), + ).first() + return Response( + { + "error": "Cycle with the same external id and external source already exists", + "id": str(cycle.id), + }, + status=status.HTTP_409_CONFLICT, + ) serializer.save( project_id=project_id, owned_by=request.user, @@ -289,6 +312,23 @@ class CycleAPIEndpoint(WebhookMixin, BaseAPIView): serializer = CycleSerializer(cycle, data=request.data, partial=True) if serializer.is_valid(): + if ( + request.data.get("external_id") + and (cycle.external_id != request.data.get("external_id")) + and Cycle.objects.filter( + project_id=project_id, + workspace__slug=slug, + external_source=request.data.get("external_source", cycle.external_source), + external_id=request.data.get("external_id"), + ).exists() + ): + return Response( + { + "error": "Cycle with the same external id and external source already exists", + "id": str(cycle.id), + }, + status=status.HTTP_409_CONFLICT, + ) serializer.save() return Response(serializer.data, status=status.HTTP_200_OK) return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) diff --git a/apiserver/plane/api/views/issue.py b/apiserver/plane/api/views/issue.py index e91f2a5f6..a759b15f6 100644 --- a/apiserver/plane/api/views/issue.py +++ b/apiserver/plane/api/views/issue.py @@ -220,6 +220,30 @@ class IssueAPIEndpoint(WebhookMixin, BaseAPIView): ) if serializer.is_valid(): + if ( + request.data.get("external_id") + and request.data.get("external_source") + and Issue.objects.filter( + project_id=project_id, + workspace__slug=slug, + external_source=request.data.get("external_source"), + external_id=request.data.get("external_id"), + ).exists() + ): + issue = Issue.objects.filter( + workspace__slug=slug, + project_id=project_id, + external_id=request.data.get("external_id"), + external_source=request.data.get("external_source"), + ).first() + return Response( + { + "error": "Issue with the same external id and external source already exists", + "id": str(issue.id), + }, + status=status.HTTP_409_CONFLICT, + ) + serializer.save() # Track the issue @@ -256,6 +280,26 @@ class IssueAPIEndpoint(WebhookMixin, BaseAPIView): partial=True, ) if serializer.is_valid(): + if ( + str(request.data.get("external_id")) + and (issue.external_id != str(request.data.get("external_id"))) + and Issue.objects.filter( + project_id=project_id, + workspace__slug=slug, + external_source=request.data.get( + "external_source", issue.external_source + ), + external_id=request.data.get("external_id"), + ).exists() + ): + return Response( + { + "error": "Issue with the same external id and external source already exists", + "id": str(issue.id), + }, + status=status.HTTP_409_CONFLICT, + ) + serializer.save() issue_activity.delay( type="issue.activity.updated", @@ -263,6 +307,8 @@ class IssueAPIEndpoint(WebhookMixin, BaseAPIView): actor_id=str(request.user.id), issue_id=str(pk), project_id=str(project_id), + external_id__isnull=False, + external_source__isnull=False, current_instance=current_instance, epoch=int(timezone.now().timestamp()), ) @@ -318,6 +364,30 @@ class LabelAPIEndpoint(BaseAPIView): try: serializer = LabelSerializer(data=request.data) if serializer.is_valid(): + if ( + request.data.get("external_id") + and request.data.get("external_source") + and Label.objects.filter( + project_id=project_id, + workspace__slug=slug, + external_source=request.data.get("external_source"), + external_id=request.data.get("external_id"), + ).exists() + ): + label = Label.objects.filter( + workspace__slug=slug, + project_id=project_id, + external_id=request.data.get("external_id"), + external_source=request.data.get("external_source"), + ).first() + return Response( + { + "error": "Label with the same external id and external source already exists", + "id": str(label.id), + }, + status=status.HTTP_409_CONFLICT, + ) + serializer.save(project_id=project_id) return Response( serializer.data, status=status.HTTP_201_CREATED @@ -326,11 +396,17 @@ class LabelAPIEndpoint(BaseAPIView): serializer.errors, status=status.HTTP_400_BAD_REQUEST ) except IntegrityError: + label = Label.objects.filter( + workspace__slug=slug, + project_id=project_id, + name=request.data.get("name"), + ).first() return Response( { - "error": "Label with the same name already exists in the project" + "error": "Label with the same name already exists in the project", + "id": str(label.id), }, - status=status.HTTP_400_BAD_REQUEST, + status=status.HTTP_409_CONFLICT, ) def get(self, request, slug, project_id, pk=None): @@ -357,6 +433,25 @@ class LabelAPIEndpoint(BaseAPIView): label = self.get_queryset().get(pk=pk) serializer = LabelSerializer(label, data=request.data, partial=True) if serializer.is_valid(): + if ( + str(request.data.get("external_id")) + and (label.external_id != str(request.data.get("external_id"))) + and Issue.objects.filter( + project_id=project_id, + workspace__slug=slug, + external_source=request.data.get( + "external_source", label.external_source + ), + external_id=request.data.get("external_id"), + ).exists() + ): + return Response( + { + "error": "Label with the same external id and external source already exists", + "id": str(label.id), + }, + status=status.HTTP_409_CONFLICT, + ) serializer.save() return Response(serializer.data, status=status.HTTP_200_OK) return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) diff --git a/apiserver/plane/api/views/module.py b/apiserver/plane/api/views/module.py index 1a9a21a3c..d509a53c7 100644 --- a/apiserver/plane/api/views/module.py +++ b/apiserver/plane/api/views/module.py @@ -132,6 +132,29 @@ class ModuleAPIEndpoint(WebhookMixin, BaseAPIView): }, ) if serializer.is_valid(): + if ( + request.data.get("external_id") + and request.data.get("external_source") + and Module.objects.filter( + project_id=project_id, + workspace__slug=slug, + external_source=request.data.get("external_source"), + external_id=request.data.get("external_id"), + ).exists() + ): + module = Module.objects.filter( + project_id=project_id, + workspace__slug=slug, + external_source=request.data.get("external_source"), + external_id=request.data.get("external_id"), + ).first() + return Response( + { + "error": "Module with the same external id and external source already exists", + "id": str(module.id), + }, + status=status.HTTP_409_CONFLICT, + ) serializer.save() module = Module.objects.get(pk=serializer.data["id"]) serializer = ModuleSerializer(module) @@ -149,8 +172,25 @@ class ModuleAPIEndpoint(WebhookMixin, BaseAPIView): partial=True, ) if serializer.is_valid(): + if ( + request.data.get("external_id") + and (module.external_id != request.data.get("external_id")) + and Module.objects.filter( + project_id=project_id, + workspace__slug=slug, + external_source=request.data.get("external_source", module.external_source), + external_id=request.data.get("external_id"), + ).exists() + ): + return Response( + { + "error": "Module with the same external id and external source already exists", + "id": str(module.id), + }, + status=status.HTTP_409_CONFLICT, + ) serializer.save() - return Response(serializer.data, status=status.HTTP_201_CREATED) + return Response(serializer.data, status=status.HTTP_200_OK) return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) def get(self, request, slug, project_id, pk=None): diff --git a/apiserver/plane/api/views/state.py b/apiserver/plane/api/views/state.py index f931c2ed2..0a262a071 100644 --- a/apiserver/plane/api/views/state.py +++ b/apiserver/plane/api/views/state.py @@ -38,6 +38,30 @@ class StateAPIEndpoint(BaseAPIView): data=request.data, context={"project_id": project_id} ) if serializer.is_valid(): + if ( + request.data.get("external_id") + and request.data.get("external_source") + and State.objects.filter( + project_id=project_id, + workspace__slug=slug, + external_source=request.data.get("external_source"), + external_id=request.data.get("external_id"), + ).exists() + ): + state = State.objects.filter( + workspace__slug=slug, + project_id=project_id, + external_id=request.data.get("external_id"), + external_source=request.data.get("external_source"), + ).first() + return Response( + { + "error": "State with the same external id and external source already exists", + "id": str(state.id), + }, + status=status.HTTP_409_CONFLICT, + ) + serializer.save(project_id=project_id) return Response(serializer.data, status=status.HTTP_200_OK) return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) @@ -91,6 +115,23 @@ class StateAPIEndpoint(BaseAPIView): ) serializer = StateSerializer(state, data=request.data, partial=True) if serializer.is_valid(): + if ( + str(request.data.get("external_id")) + and (state.external_id != str(request.data.get("external_id"))) + and State.objects.filter( + project_id=project_id, + workspace__slug=slug, + external_source=request.data.get("external_source", state.external_source), + external_id=request.data.get("external_id"), + ).exists() + ): + return Response( + { + "error": "State with the same external id and external source already exists", + "id": str(state.id), + }, + status=status.HTTP_409_CONFLICT, + ) serializer.save() return Response(serializer.data, status=status.HTTP_200_OK) return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) diff --git a/apiserver/plane/app/serializers/__init__.py b/apiserver/plane/app/serializers/__init__.py index 0d72f9192..28e881060 100644 --- a/apiserver/plane/app/serializers/__init__.py +++ b/apiserver/plane/app/serializers/__init__.py @@ -68,6 +68,7 @@ from .issue import ( IssueRelationSerializer, RelatedIssueSerializer, IssuePublicSerializer, + IssueDetailSerializer, ) from .module import ( diff --git a/apiserver/plane/app/serializers/issue.py b/apiserver/plane/app/serializers/issue.py index be98bc312..90069bd41 100644 --- a/apiserver/plane/app/serializers/issue.py +++ b/apiserver/plane/app/serializers/issue.py @@ -586,7 +586,6 @@ class IssueSerializer(DynamicBaseSerializer): "id", "name", "state_id", - "description_html", "sort_order", "completed_at", "estimate_point", @@ -618,6 +617,13 @@ class IssueSerializer(DynamicBaseSerializer): return [module for module in obj.issue_module.values_list("module_id", flat=True)] +class IssueDetailSerializer(IssueSerializer): + description_html = serializers.CharField() + + class Meta(IssueSerializer.Meta): + fields = IssueSerializer.Meta.fields + ['description_html'] + + class IssueLiteSerializer(DynamicBaseSerializer): workspace_detail = WorkspaceLiteSerializer( read_only=True, source="workspace" diff --git a/apiserver/plane/app/views/auth_extended.py b/apiserver/plane/app/views/auth_extended.py index 501f47657..29cb43e38 100644 --- a/apiserver/plane/app/views/auth_extended.py +++ b/apiserver/plane/app/views/auth_extended.py @@ -401,8 +401,8 @@ class EmailCheckEndpoint(BaseAPIView): email=email, user_agent=request.META.get("HTTP_USER_AGENT"), ip=request.META.get("REMOTE_ADDR"), - event_name="SIGN_IN", - medium="MAGIC_LINK", + event_name="Sign up", + medium="Magic link", first_time=True, ) key, token, current_attempt = generate_magic_token(email=email) @@ -438,8 +438,8 @@ class EmailCheckEndpoint(BaseAPIView): email=email, user_agent=request.META.get("HTTP_USER_AGENT"), ip=request.META.get("REMOTE_ADDR"), - event_name="SIGN_IN", - medium="MAGIC_LINK", + event_name="Sign in", + medium="Magic link", first_time=False, ) @@ -468,8 +468,8 @@ class EmailCheckEndpoint(BaseAPIView): email=email, user_agent=request.META.get("HTTP_USER_AGENT"), ip=request.META.get("REMOTE_ADDR"), - event_name="SIGN_IN", - medium="EMAIL", + event_name="Sign in", + medium="Email", first_time=False, ) diff --git a/apiserver/plane/app/views/authentication.py b/apiserver/plane/app/views/authentication.py index a41200d61..c2b3e0b7e 100644 --- a/apiserver/plane/app/views/authentication.py +++ b/apiserver/plane/app/views/authentication.py @@ -274,8 +274,8 @@ class SignInEndpoint(BaseAPIView): email=email, user_agent=request.META.get("HTTP_USER_AGENT"), ip=request.META.get("REMOTE_ADDR"), - event_name="SIGN_IN", - medium="EMAIL", + event_name="Sign in", + medium="Email", first_time=False, ) @@ -349,8 +349,8 @@ class MagicSignInEndpoint(BaseAPIView): email=email, user_agent=request.META.get("HTTP_USER_AGENT"), ip=request.META.get("REMOTE_ADDR"), - event_name="SIGN_IN", - medium="MAGIC_LINK", + event_name="Sign in", + medium="Magic link", first_time=False, ) diff --git a/apiserver/plane/app/views/base.py b/apiserver/plane/app/views/base.py index e07cb811c..fa1e7559b 100644 --- a/apiserver/plane/app/views/base.py +++ b/apiserver/plane/app/views/base.py @@ -64,6 +64,7 @@ class WebhookMixin: action=self.request.method, slug=self.workspace_slug, bulk=self.bulk, + current_site=request.META.get("HTTP_ORIGIN"), ) return response diff --git a/apiserver/plane/app/views/cycle.py b/apiserver/plane/app/views/cycle.py index 23a227fef..63d8d28ae 100644 --- a/apiserver/plane/app/views/cycle.py +++ b/apiserver/plane/app/views/cycle.py @@ -20,6 +20,7 @@ from django.core import serializers from django.utils import timezone from django.utils.decorators import method_decorator from django.views.decorators.gzip import gzip_page +from django.core.serializers.json import DjangoJSONEncoder # Third party imports from rest_framework.response import Response @@ -242,13 +243,13 @@ class CycleViewSet(WebhookMixin, BaseViewSet): .values("display_name", "assignee_id", "avatar") .annotate( total_issues=Count( - "assignee_id", + "id", filter=Q(archived_at__isnull=True, is_draft=False), ), ) .annotate( completed_issues=Count( - "assignee_id", + "id", filter=Q( completed_at__isnull=False, archived_at__isnull=True, @@ -258,7 +259,7 @@ class CycleViewSet(WebhookMixin, BaseViewSet): ) .annotate( pending_issues=Count( - "assignee_id", + "id", filter=Q( completed_at__isnull=True, archived_at__isnull=True, @@ -281,13 +282,13 @@ class CycleViewSet(WebhookMixin, BaseViewSet): .values("label_name", "color", "label_id") .annotate( total_issues=Count( - "label_id", + "id", filter=Q(archived_at__isnull=True, is_draft=False), ) ) .annotate( completed_issues=Count( - "label_id", + "id", filter=Q( completed_at__isnull=False, archived_at__isnull=True, @@ -297,7 +298,7 @@ class CycleViewSet(WebhookMixin, BaseViewSet): ) .annotate( pending_issues=Count( - "label_id", + "id", filter=Q( completed_at__isnull=True, archived_at__isnull=True, @@ -312,6 +313,7 @@ class CycleViewSet(WebhookMixin, BaseViewSet): "labels": label_distribution, "completion_chart": {}, } + if data[0]["start_date"] and data[0]["end_date"]: data[0]["distribution"][ "completion_chart" @@ -419,13 +421,13 @@ class CycleViewSet(WebhookMixin, BaseViewSet): ) .annotate( total_issues=Count( - "assignee_id", + "id", filter=Q(archived_at__isnull=True, is_draft=False), ), ) .annotate( completed_issues=Count( - "assignee_id", + "id", filter=Q( completed_at__isnull=False, archived_at__isnull=True, @@ -435,7 +437,7 @@ class CycleViewSet(WebhookMixin, BaseViewSet): ) .annotate( pending_issues=Count( - "assignee_id", + "id", filter=Q( completed_at__isnull=True, archived_at__isnull=True, @@ -459,13 +461,13 @@ class CycleViewSet(WebhookMixin, BaseViewSet): .values("label_name", "color", "label_id") .annotate( total_issues=Count( - "label_id", + "id", filter=Q(archived_at__isnull=True, is_draft=False), ), ) .annotate( completed_issues=Count( - "label_id", + "id", filter=Q( completed_at__isnull=False, archived_at__isnull=True, @@ -475,7 +477,7 @@ class CycleViewSet(WebhookMixin, BaseViewSet): ) .annotate( pending_issues=Count( - "label_id", + "id", filter=Q( completed_at__isnull=True, archived_at__isnull=True, @@ -840,10 +842,230 @@ class TransferCycleIssueEndpoint(BaseAPIView): status=status.HTTP_400_BAD_REQUEST, ) - new_cycle = Cycle.objects.get( + new_cycle = Cycle.objects.filter( workspace__slug=slug, project_id=project_id, pk=new_cycle_id + ).first() + + old_cycle = ( + Cycle.objects.filter( + workspace__slug=slug, project_id=project_id, pk=cycle_id + ) + .annotate( + total_issues=Count( + "issue_cycle", + filter=Q( + issue_cycle__issue__archived_at__isnull=True, + issue_cycle__issue__is_draft=False, + ), + ) + ) + .annotate( + completed_issues=Count( + "issue_cycle__issue__state__group", + filter=Q( + issue_cycle__issue__state__group="completed", + issue_cycle__issue__archived_at__isnull=True, + issue_cycle__issue__is_draft=False, + ), + ) + ) + .annotate( + cancelled_issues=Count( + "issue_cycle__issue__state__group", + filter=Q( + issue_cycle__issue__state__group="cancelled", + issue_cycle__issue__archived_at__isnull=True, + issue_cycle__issue__is_draft=False, + ), + ) + ) + .annotate( + started_issues=Count( + "issue_cycle__issue__state__group", + filter=Q( + issue_cycle__issue__state__group="started", + issue_cycle__issue__archived_at__isnull=True, + issue_cycle__issue__is_draft=False, + ), + ) + ) + .annotate( + unstarted_issues=Count( + "issue_cycle__issue__state__group", + filter=Q( + issue_cycle__issue__state__group="unstarted", + issue_cycle__issue__archived_at__isnull=True, + issue_cycle__issue__is_draft=False, + ), + ) + ) + .annotate( + backlog_issues=Count( + "issue_cycle__issue__state__group", + filter=Q( + issue_cycle__issue__state__group="backlog", + issue_cycle__issue__archived_at__isnull=True, + issue_cycle__issue__is_draft=False, + ), + ) + ) + .annotate( + total_estimates=Sum("issue_cycle__issue__estimate_point") + ) + .annotate( + completed_estimates=Sum( + "issue_cycle__issue__estimate_point", + filter=Q( + issue_cycle__issue__state__group="completed", + issue_cycle__issue__archived_at__isnull=True, + issue_cycle__issue__is_draft=False, + ), + ) + ) + .annotate( + started_estimates=Sum( + "issue_cycle__issue__estimate_point", + filter=Q( + issue_cycle__issue__state__group="started", + issue_cycle__issue__archived_at__isnull=True, + issue_cycle__issue__is_draft=False, + ), + ) + ) ) + # Pass the new_cycle queryset to burndown_plot + completion_chart = burndown_plot( + queryset=old_cycle.first(), + slug=slug, + project_id=project_id, + cycle_id=cycle_id, + ) + + assignee_distribution = ( + Issue.objects.filter( + issue_cycle__cycle_id=cycle_id, + workspace__slug=slug, + project_id=project_id, + ) + .annotate(display_name=F("assignees__display_name")) + .annotate(assignee_id=F("assignees__id")) + .annotate(avatar=F("assignees__avatar")) + .values("display_name", "assignee_id", "avatar") + .annotate( + total_issues=Count( + "id", + filter=Q(archived_at__isnull=True, is_draft=False), + ), + ) + .annotate( + completed_issues=Count( + "id", + filter=Q( + completed_at__isnull=False, + archived_at__isnull=True, + is_draft=False, + ), + ) + ) + .annotate( + pending_issues=Count( + "id", + filter=Q( + completed_at__isnull=True, + archived_at__isnull=True, + is_draft=False, + ), + ) + ) + .order_by("display_name") + ) + + label_distribution = ( + Issue.objects.filter( + issue_cycle__cycle_id=cycle_id, + workspace__slug=slug, + project_id=project_id, + ) + .annotate(label_name=F("labels__name")) + .annotate(color=F("labels__color")) + .annotate(label_id=F("labels__id")) + .values("label_name", "color", "label_id") + .annotate( + total_issues=Count( + "id", + filter=Q(archived_at__isnull=True, is_draft=False), + ) + ) + .annotate( + completed_issues=Count( + "id", + filter=Q( + completed_at__isnull=False, + archived_at__isnull=True, + is_draft=False, + ), + ) + ) + .annotate( + pending_issues=Count( + "id", + filter=Q( + completed_at__isnull=True, + archived_at__isnull=True, + is_draft=False, + ), + ) + ) + .order_by("label_name") + ) + + assignee_distribution_data = [ + { + "display_name": item["display_name"], + "assignee_id": str(item["assignee_id"]) if item["assignee_id"] else None, + "avatar": item["avatar"], + "total_issues": item["total_issues"], + "completed_issues": item["completed_issues"], + "pending_issues": item["pending_issues"], + } + for item in assignee_distribution + ] + + label_distribution_data = [ + { + "label_name": item["label_name"], + "color": item["color"], + "label_id": str(item["label_id"]) if item["label_id"] else None, + "total_issues": item["total_issues"], + "completed_issues": item["completed_issues"], + "pending_issues": item["pending_issues"], + } + for item in label_distribution + ] + + current_cycle = Cycle.objects.filter( + workspace__slug=slug, project_id=project_id, pk=cycle_id + ).first() + + current_cycle.progress_snapshot = { + "total_issues": old_cycle.first().total_issues, + "completed_issues": old_cycle.first().completed_issues, + "cancelled_issues": old_cycle.first().cancelled_issues, + "started_issues": old_cycle.first().started_issues, + "unstarted_issues": old_cycle.first().unstarted_issues, + "backlog_issues": old_cycle.first().backlog_issues, + "total_estimates": old_cycle.first().total_estimates, + "completed_estimates": old_cycle.first().completed_estimates, + "started_estimates": old_cycle.first().started_estimates, + "distribution":{ + "labels": label_distribution_data, + "assignees": assignee_distribution_data, + "completion_chart": completion_chart, + }, + } + current_cycle.save(update_fields=["progress_snapshot"]) + if ( new_cycle.end_date is not None and new_cycle.end_date < timezone.now().date() diff --git a/apiserver/plane/app/views/dashboard.py b/apiserver/plane/app/views/dashboard.py index 47fae2c9c..1366a2886 100644 --- a/apiserver/plane/app/views/dashboard.py +++ b/apiserver/plane/app/views/dashboard.py @@ -145,6 +145,23 @@ def dashboard_assigned_issues(self, request, slug): ) ).order_by("priority_order") + if issue_type == "pending": + pending_issues_count = assigned_issues.filter( + state__group__in=["backlog", "started", "unstarted"] + ).count() + pending_issues = assigned_issues.filter( + state__group__in=["backlog", "started", "unstarted"] + )[:5] + return Response( + { + "issues": IssueSerializer( + pending_issues, many=True, expand=self.expand + ).data, + "count": pending_issues_count, + }, + status=status.HTTP_200_OK, + ) + if issue_type == "completed": completed_issues_count = assigned_issues.filter( state__group__in=["completed"] @@ -257,6 +274,23 @@ def dashboard_created_issues(self, request, slug): ) ).order_by("priority_order") + if issue_type == "pending": + pending_issues_count = created_issues.filter( + state__group__in=["backlog", "started", "unstarted"] + ).count() + pending_issues = created_issues.filter( + state__group__in=["backlog", "started", "unstarted"] + )[:5] + return Response( + { + "issues": IssueSerializer( + pending_issues, many=True, expand=self.expand + ).data, + "count": pending_issues_count, + }, + status=status.HTTP_200_OK, + ) + if issue_type == "completed": completed_issues_count = created_issues.filter( state__group__in=["completed"] diff --git a/apiserver/plane/app/views/issue.py b/apiserver/plane/app/views/issue.py index 0b5c612d3..c8845150a 100644 --- a/apiserver/plane/app/views/issue.py +++ b/apiserver/plane/app/views/issue.py @@ -50,6 +50,7 @@ from plane.app.serializers import ( CommentReactionSerializer, IssueRelationSerializer, RelatedIssueSerializer, + IssueDetailSerializer, ) from plane.app.permissions import ( ProjectEntityPermission, @@ -267,7 +268,7 @@ class IssueViewSet(WebhookMixin, BaseViewSet): def retrieve(self, request, slug, project_id, pk=None): issue = self.get_queryset().filter(pk=pk).first() return Response( - IssueSerializer( + IssueDetailSerializer( issue, fields=self.fields, expand=self.expand ).data, status=status.HTTP_200_OK, @@ -1668,15 +1669,9 @@ class IssueDraftViewSet(BaseViewSet): def get_queryset(self): return ( - Issue.objects.annotate( - sub_issues_count=Issue.issue_objects.filter( - parent=OuterRef("id") - ) - .order_by() - .annotate(count=Func(F("id"), function="Count")) - .values("count") + Issue.objects.filter( + project_id=self.kwargs.get("project_id") ) - .filter(project_id=self.kwargs.get("project_id")) .filter(workspace__slug=self.kwargs.get("slug")) .filter(is_draft=True) .select_related("workspace", "project", "state", "parent") @@ -1710,7 +1705,7 @@ class IssueDraftViewSet(BaseViewSet): .annotate(count=Func(F("id"), function="Count")) .values("count") ) - ) + ).distinct() @method_decorator(gzip_page) def list(self, request, slug, project_id): @@ -1832,7 +1827,10 @@ class IssueDraftViewSet(BaseViewSet): notification=True, origin=request.META.get("HTTP_ORIGIN"), ) - return Response(serializer.data, status=status.HTTP_201_CREATED) + issue = ( + self.get_queryset().filter(pk=serializer.data["id"]).first() + ) + return Response(IssueSerializer(issue).data, status=status.HTTP_201_CREATED) return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) def partial_update(self, request, slug, project_id, pk): @@ -1868,10 +1866,13 @@ class IssueDraftViewSet(BaseViewSet): return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) def retrieve(self, request, slug, project_id, pk=None): - issue = Issue.objects.get( - workspace__slug=slug, project_id=project_id, pk=pk, is_draft=True + issue = self.get_queryset().filter(pk=pk).first() + return Response( + IssueSerializer( + issue, fields=self.fields, expand=self.expand + ).data, + status=status.HTTP_200_OK, ) - return Response(IssueSerializer(issue).data, status=status.HTTP_200_OK) def destroy(self, request, slug, project_id, pk=None): issue = Issue.objects.get( diff --git a/apiserver/plane/app/views/module.py b/apiserver/plane/app/views/module.py index 1f055129a..4792a1f79 100644 --- a/apiserver/plane/app/views/module.py +++ b/apiserver/plane/app/views/module.py @@ -197,7 +197,7 @@ class ModuleViewSet(WebhookMixin, BaseViewSet): ) .annotate( total_issues=Count( - "assignee_id", + "id", filter=Q( archived_at__isnull=True, is_draft=False, @@ -206,7 +206,7 @@ class ModuleViewSet(WebhookMixin, BaseViewSet): ) .annotate( completed_issues=Count( - "assignee_id", + "id", filter=Q( completed_at__isnull=False, archived_at__isnull=True, @@ -216,7 +216,7 @@ class ModuleViewSet(WebhookMixin, BaseViewSet): ) .annotate( pending_issues=Count( - "assignee_id", + "id", filter=Q( completed_at__isnull=True, archived_at__isnull=True, @@ -239,7 +239,7 @@ class ModuleViewSet(WebhookMixin, BaseViewSet): .values("label_name", "color", "label_id") .annotate( total_issues=Count( - "label_id", + "id", filter=Q( archived_at__isnull=True, is_draft=False, @@ -248,7 +248,7 @@ class ModuleViewSet(WebhookMixin, BaseViewSet): ) .annotate( completed_issues=Count( - "label_id", + "id", filter=Q( completed_at__isnull=False, archived_at__isnull=True, @@ -258,7 +258,7 @@ class ModuleViewSet(WebhookMixin, BaseViewSet): ) .annotate( pending_issues=Count( - "label_id", + "id", filter=Q( completed_at__isnull=True, archived_at__isnull=True, @@ -334,7 +334,7 @@ class ModuleIssueViewSet(WebhookMixin, BaseViewSet): def get_queryset(self): return ( - Issue.objects.filter( + Issue.issue_objects.filter( project_id=self.kwargs.get("project_id"), workspace__slug=self.kwargs.get("slug"), issue_module__module_id=self.kwargs.get("module_id") diff --git a/apiserver/plane/app/views/oauth.py b/apiserver/plane/app/views/oauth.py index de90e4337..8152fb0ee 100644 --- a/apiserver/plane/app/views/oauth.py +++ b/apiserver/plane/app/views/oauth.py @@ -296,7 +296,7 @@ class OauthEndpoint(BaseAPIView): email=email, user_agent=request.META.get("HTTP_USER_AGENT"), ip=request.META.get("REMOTE_ADDR"), - event_name="SIGN_IN", + event_name="Sign in", medium=medium.upper(), first_time=False, ) @@ -427,7 +427,7 @@ class OauthEndpoint(BaseAPIView): email=email, user_agent=request.META.get("HTTP_USER_AGENT"), ip=request.META.get("REMOTE_ADDR"), - event_name="SIGN_IN", + event_name="Sign up", medium=medium.upper(), first_time=True, ) diff --git a/apiserver/plane/bgtasks/email_notification_task.py b/apiserver/plane/bgtasks/email_notification_task.py index 713835033..9e9b348e1 100644 --- a/apiserver/plane/bgtasks/email_notification_task.py +++ b/apiserver/plane/bgtasks/email_notification_task.py @@ -1,5 +1,6 @@ -import json from datetime import datetime +from bs4 import BeautifulSoup + # Third party imports from celery import shared_task @@ -9,7 +10,6 @@ from django.utils import timezone from django.core.mail import EmailMultiAlternatives, get_connection from django.template.loader import render_to_string from django.utils.html import strip_tags -from django.conf import settings # Module imports from plane.db.models import EmailNotificationLog, User, Issue @@ -40,7 +40,7 @@ def stack_email_notification(): processed_notifications = [] # Loop through all the issues to create the emails for receiver_id in receivers: - # Notifcation triggered for the receiver + # Notification triggered for the receiver receiver_notifications = [ notification for notification in email_notifications @@ -124,119 +124,153 @@ def create_payload(notification_data): return data +def process_mention(mention_component): + soup = BeautifulSoup(mention_component, 'html.parser') + mentions = soup.find_all('mention-component') + for mention in mentions: + user_id = mention['id'] + user = User.objects.get(pk=user_id) + user_name = user.display_name + highlighted_name = f"@{user_name}" + mention.replace_with(highlighted_name) + return str(soup) + +def process_html_content(content): + processed_content_list = [] + for html_content in content: + processed_content = process_mention(html_content) + processed_content_list.append(processed_content) + return processed_content_list @shared_task def send_email_notification( issue_id, notification_data, receiver_id, email_notification_ids ): - ri = redis_instance() - base_api = (ri.get(str(issue_id)).decode()) - data = create_payload(notification_data=notification_data) - - # Get email configurations - ( - EMAIL_HOST, - EMAIL_HOST_USER, - EMAIL_HOST_PASSWORD, - EMAIL_PORT, - EMAIL_USE_TLS, - EMAIL_FROM, - ) = get_email_configuration() - - receiver = User.objects.get(pk=receiver_id) - issue = Issue.objects.get(pk=issue_id) - template_data = [] - total_changes = 0 - comments = [] - actors_involved = [] - for actor_id, changes in data.items(): - actor = User.objects.get(pk=actor_id) - total_changes = total_changes + len(changes) - comment = changes.pop("comment", False) - actors_involved.append(actor_id) - if comment: - comments.append( - { - "actor_comments": comment, - "actor_detail": { - "avatar_url": actor.avatar, - "first_name": actor.first_name, - "last_name": actor.last_name, - }, - } - ) - activity_time = changes.pop("activity_time") - # Parse the input string into a datetime object - formatted_time = datetime.strptime(activity_time, "%Y-%m-%d %H:%M:%S").strftime("%H:%M %p") - - if changes: - template_data.append( - { - "actor_detail": { - "avatar_url": actor.avatar, - "first_name": actor.first_name, - "last_name": actor.last_name, - }, - "changes": changes, - "issue_details": { - "name": issue.name, - "identifier": f"{issue.project.identifier}-{issue.sequence_id}", - }, - "activity_time": str(formatted_time), - } - ) - - summary = "Updates were made to the issue by" - - # Send the mail - subject = f"{issue.project.identifier}-{issue.sequence_id} {issue.name}" - context = { - "data": template_data, - "summary": summary, - "actors_involved": len(set(actors_involved)), - "issue": { - "issue_identifier": f"{str(issue.project.identifier)}-{str(issue.sequence_id)}", - "name": issue.name, - "issue_url": f"{base_api}/{str(issue.project.workspace.slug)}/projects/{str(issue.project.id)}/issues/{str(issue.id)}", - }, - "receiver": { - "email": receiver.email, - }, - "issue_url": f"{base_api}/{str(issue.project.workspace.slug)}/projects/{str(issue.project.id)}/issues/{str(issue.id)}", - "project_url": f"{base_api}/{str(issue.project.workspace.slug)}/projects/{str(issue.project.id)}/issues/", - "workspace":str(issue.project.workspace.slug), - "project": str(issue.project.name), - "user_preference": f"{base_api}/profile/preferences/email", - "comments": comments, - } - html_content = render_to_string( - "emails/notifications/issue-updates.html", context - ) - text_content = strip_tags(html_content) - try: - connection = get_connection( - host=EMAIL_HOST, - port=int(EMAIL_PORT), - username=EMAIL_HOST_USER, - password=EMAIL_HOST_PASSWORD, - use_tls=EMAIL_USE_TLS == "1", - ) + ri = redis_instance() + base_api = (ri.get(str(issue_id)).decode()) + data = create_payload(notification_data=notification_data) - msg = EmailMultiAlternatives( - subject=subject, - body=text_content, - from_email=EMAIL_FROM, - to=[receiver.email], - connection=connection, - ) - msg.attach_alternative(html_content, "text/html") - msg.send() + # Get email configurations + ( + EMAIL_HOST, + EMAIL_HOST_USER, + EMAIL_HOST_PASSWORD, + EMAIL_PORT, + EMAIL_USE_TLS, + EMAIL_FROM, + ) = get_email_configuration() - EmailNotificationLog.objects.filter( - pk__in=email_notification_ids - ).update(sent_at=timezone.now()) - return - except Exception as e: - print(e) + receiver = User.objects.get(pk=receiver_id) + issue = Issue.objects.get(pk=issue_id) + template_data = [] + total_changes = 0 + comments = [] + actors_involved = [] + for actor_id, changes in data.items(): + actor = User.objects.get(pk=actor_id) + total_changes = total_changes + len(changes) + comment = changes.pop("comment", False) + mention = changes.pop("mention", False) + actors_involved.append(actor_id) + if comment: + comments.append( + { + "actor_comments": comment, + "actor_detail": { + "avatar_url": actor.avatar, + "first_name": actor.first_name, + "last_name": actor.last_name, + }, + } + ) + if mention: + mention["new_value"] = process_html_content(mention.get("new_value")) + mention["old_value"] = process_html_content(mention.get("old_value")) + comments.append( + { + "actor_comments": mention, + "actor_detail": { + "avatar_url": actor.avatar, + "first_name": actor.first_name, + "last_name": actor.last_name, + }, + } + ) + activity_time = changes.pop("activity_time") + # Parse the input string into a datetime object + formatted_time = datetime.strptime(activity_time, "%Y-%m-%d %H:%M:%S").strftime("%H:%M %p") + + if changes: + template_data.append( + { + "actor_detail": { + "avatar_url": actor.avatar, + "first_name": actor.first_name, + "last_name": actor.last_name, + }, + "changes": changes, + "issue_details": { + "name": issue.name, + "identifier": f"{issue.project.identifier}-{issue.sequence_id}", + }, + "activity_time": str(formatted_time), + } + ) + + summary = "Updates were made to the issue by" + + # Send the mail + subject = f"{issue.project.identifier}-{issue.sequence_id} {issue.name}" + context = { + "data": template_data, + "summary": summary, + "actors_involved": len(set(actors_involved)), + "issue": { + "issue_identifier": f"{str(issue.project.identifier)}-{str(issue.sequence_id)}", + "name": issue.name, + "issue_url": f"{base_api}/{str(issue.project.workspace.slug)}/projects/{str(issue.project.id)}/issues/{str(issue.id)}", + }, + "receiver": { + "email": receiver.email, + }, + "issue_url": f"{base_api}/{str(issue.project.workspace.slug)}/projects/{str(issue.project.id)}/issues/{str(issue.id)}", + "project_url": f"{base_api}/{str(issue.project.workspace.slug)}/projects/{str(issue.project.id)}/issues/", + "workspace":str(issue.project.workspace.slug), + "project": str(issue.project.name), + "user_preference": f"{base_api}/profile/preferences/email", + "comments": comments, + } + html_content = render_to_string( + "emails/notifications/issue-updates.html", context + ) + text_content = strip_tags(html_content) + + try: + connection = get_connection( + host=EMAIL_HOST, + port=int(EMAIL_PORT), + username=EMAIL_HOST_USER, + password=EMAIL_HOST_PASSWORD, + use_tls=EMAIL_USE_TLS == "1", + ) + + msg = EmailMultiAlternatives( + subject=subject, + body=text_content, + from_email=EMAIL_FROM, + to=[receiver.email], + connection=connection, + ) + msg.attach_alternative(html_content, "text/html") + msg.send() + + EmailNotificationLog.objects.filter( + pk__in=email_notification_ids + ).update(sent_at=timezone.now()) + return + except Exception as e: + print(e) + return + except Issue.DoesNotExist: return diff --git a/apiserver/plane/bgtasks/issue_activites_task.py b/apiserver/plane/bgtasks/issue_activites_task.py index b9f6bd411..b86ab5e78 100644 --- a/apiserver/plane/bgtasks/issue_activites_task.py +++ b/apiserver/plane/bgtasks/issue_activites_task.py @@ -353,13 +353,18 @@ def track_assignees( issue_activities, epoch, ): - requested_assignees = set( - [str(asg) for asg in requested_data.get("assignee_ids", [])] + requested_assignees = ( + set([str(asg) for asg in requested_data.get("assignee_ids", [])]) + if requested_data is not None + else set() ) - current_assignees = set( - [str(asg) for asg in current_instance.get("assignee_ids", [])] + current_assignees = ( + set([str(asg) for asg in current_instance.get("assignee_ids", [])]) + if current_instance is not None + else set() ) + added_assignees = requested_assignees - current_assignees dropped_assginees = current_assignees - requested_assignees @@ -547,6 +552,20 @@ def create_issue_activity( epoch=epoch, ) ) + requested_data = ( + json.loads(requested_data) if requested_data is not None else None + ) + if requested_data.get("assignee_ids") is not None: + track_assignees( + requested_data, + current_instance, + issue_id, + project_id, + workspace_id, + actor_id, + issue_activities, + epoch, + ) def update_issue_activity( diff --git a/apiserver/plane/bgtasks/notification_task.py b/apiserver/plane/bgtasks/notification_task.py index 6cfbec72a..0a843e4a6 100644 --- a/apiserver/plane/bgtasks/notification_task.py +++ b/apiserver/plane/bgtasks/notification_task.py @@ -515,7 +515,7 @@ def notifications( bulk_email_logs.append( EmailNotificationLog( triggered_by_id=actor_id, - receiver_id=subscriber, + receiver_id=mention_id, entity_identifier=issue_id, entity_name="issue", data={ @@ -552,6 +552,7 @@ def notifications( "old_value": str( issue_activity.get("old_value") ), + "activity_time": issue_activity.get("created_at"), }, }, ) @@ -639,6 +640,7 @@ def notifications( "old_value": str( last_activity.old_value ), + "activity_time": issue_activity.get("created_at"), }, }, ) @@ -695,6 +697,7 @@ def notifications( "old_value" ) ), + "activity_time": issue_activity.get("created_at"), }, }, ) diff --git a/apiserver/plane/bgtasks/webhook_task.py b/apiserver/plane/bgtasks/webhook_task.py index 34bba0cf8..605f48dd9 100644 --- a/apiserver/plane/bgtasks/webhook_task.py +++ b/apiserver/plane/bgtasks/webhook_task.py @@ -7,6 +7,9 @@ import hmac # Django imports from django.conf import settings from django.core.serializers.json import DjangoJSONEncoder +from django.core.mail import EmailMultiAlternatives, get_connection +from django.template.loader import render_to_string +from django.utils.html import strip_tags # Third party imports from celery import shared_task @@ -22,10 +25,10 @@ from plane.db.models import ( ModuleIssue, CycleIssue, IssueComment, + User, ) from plane.api.serializers import ( ProjectSerializer, - IssueSerializer, CycleSerializer, ModuleSerializer, CycleIssueSerializer, @@ -34,6 +37,9 @@ from plane.api.serializers import ( IssueExpandSerializer, ) +# Module imports +from plane.license.utils.instance_value import get_email_configuration + SERIALIZER_MAPPER = { "project": ProjectSerializer, "issue": IssueExpandSerializer, @@ -72,7 +78,7 @@ def get_model_data(event, event_id, many=False): max_retries=5, retry_jitter=True, ) -def webhook_task(self, webhook, slug, event, event_data, action): +def webhook_task(self, webhook, slug, event, event_data, action, current_site): try: webhook = Webhook.objects.get(id=webhook, workspace__slug=slug) @@ -151,7 +157,18 @@ def webhook_task(self, webhook, slug, event, event_data, action): response_body=str(e), retry_count=str(self.request.retries), ) - + # Retry logic + if self.request.retries >= self.max_retries: + Webhook.objects.filter(pk=webhook.id).update(is_active=False) + if webhook: + # send email for the deactivation of the webhook + send_webhook_deactivation_email( + webhook_id=webhook.id, + receiver_id=webhook.created_by_id, + reason=str(e), + current_site=current_site, + ) + return raise requests.RequestException() except Exception as e: @@ -162,7 +179,7 @@ def webhook_task(self, webhook, slug, event, event_data, action): @shared_task() -def send_webhook(event, payload, kw, action, slug, bulk): +def send_webhook(event, payload, kw, action, slug, bulk, current_site): try: webhooks = Webhook.objects.filter(workspace__slug=slug, is_active=True) @@ -216,6 +233,7 @@ def send_webhook(event, payload, kw, action, slug, bulk): event=event, event_data=data, action=action, + current_site=current_site, ) except Exception as e: @@ -223,3 +241,56 @@ def send_webhook(event, payload, kw, action, slug, bulk): print(e) capture_exception(e) return + + +@shared_task +def send_webhook_deactivation_email(webhook_id, receiver_id, current_site, reason): + # Get email configurations + ( + EMAIL_HOST, + EMAIL_HOST_USER, + EMAIL_HOST_PASSWORD, + EMAIL_PORT, + EMAIL_USE_TLS, + EMAIL_FROM, + ) = get_email_configuration() + + receiver = User.objects.get(pk=receiver_id) + webhook = Webhook.objects.get(pk=webhook_id) + subject="Webhook Deactivated" + message=f"Webhook {webhook.url} has been deactivated due to failed requests." + + # Send the mail + context = { + "email": receiver.email, + "message": message, + "webhook_url":f"{current_site}/{str(webhook.workspace.slug)}/settings/webhooks/{str(webhook.id)}", + } + html_content = render_to_string( + "emails/notifications/webhook-deactivate.html", context + ) + text_content = strip_tags(html_content) + + try: + connection = get_connection( + host=EMAIL_HOST, + port=int(EMAIL_PORT), + username=EMAIL_HOST_USER, + password=EMAIL_HOST_PASSWORD, + use_tls=EMAIL_USE_TLS == "1", + ) + + msg = EmailMultiAlternatives( + subject=subject, + body=text_content, + from_email=EMAIL_FROM, + to=[receiver.email], + connection=connection, + ) + msg.attach_alternative(html_content, "text/html") + msg.send() + + return + except Exception as e: + print(e) + return diff --git a/apiserver/plane/db/migrations/0059_auto_20240208_0957.py b/apiserver/plane/db/migrations/0059_auto_20240208_0957.py new file mode 100644 index 000000000..c4c43fa4b --- /dev/null +++ b/apiserver/plane/db/migrations/0059_auto_20240208_0957.py @@ -0,0 +1,33 @@ +# Generated by Django 4.2.7 on 2024-02-08 09:57 + +from django.db import migrations + + +def widgets_filter_change(apps, schema_editor): + Widget = apps.get_model("db", "Widget") + widgets_to_update = [] + + # Define the filter dictionaries for each widget key + filters_mapping = { + "assigned_issues": {"duration": "none", "tab": "pending"}, + "created_issues": {"duration": "none", "tab": "pending"}, + "issues_by_state_groups": {"duration": "none"}, + "issues_by_priority": {"duration": "none"}, + } + + # Iterate over widgets and update filters if applicable + for widget in Widget.objects.all(): + if widget.key in filters_mapping: + widget.filters = filters_mapping[widget.key] + widgets_to_update.append(widget) + + # Bulk update the widgets + Widget.objects.bulk_update(widgets_to_update, ["filters"], batch_size=10) + +class Migration(migrations.Migration): + dependencies = [ + ('db', '0058_alter_moduleissue_issue_and_more'), + ] + operations = [ + migrations.RunPython(widgets_filter_change) + ] diff --git a/apiserver/plane/db/migrations/0060_cycle_progress_snapshot.py b/apiserver/plane/db/migrations/0060_cycle_progress_snapshot.py new file mode 100644 index 000000000..074e20a16 --- /dev/null +++ b/apiserver/plane/db/migrations/0060_cycle_progress_snapshot.py @@ -0,0 +1,18 @@ +# Generated by Django 4.2.7 on 2024-02-08 09:18 + +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ('db', '0059_auto_20240208_0957'), + ] + + operations = [ + migrations.AddField( + model_name='cycle', + name='progress_snapshot', + field=models.JSONField(default=dict), + ), + ] diff --git a/apiserver/plane/db/models/cycle.py b/apiserver/plane/db/models/cycle.py index 5251c68ec..d802dbc1e 100644 --- a/apiserver/plane/db/models/cycle.py +++ b/apiserver/plane/db/models/cycle.py @@ -68,6 +68,7 @@ class Cycle(ProjectBaseModel): sort_order = models.FloatField(default=65535) external_source = models.CharField(max_length=255, null=True, blank=True) external_id = models.CharField(max_length=255, blank=True, null=True) + progress_snapshot = models.JSONField(default=dict) class Meta: verbose_name = "Cycle" diff --git a/apiserver/plane/db/models/user.py b/apiserver/plane/db/models/user.py index 6f8a82e56..f254a3cb7 100644 --- a/apiserver/plane/db/models/user.py +++ b/apiserver/plane/db/models/user.py @@ -172,4 +172,9 @@ def create_user_notification(sender, instance, created, **kwargs): from plane.db.models import UserNotificationPreference UserNotificationPreference.objects.create( user=instance, + property_change=False, + state_change=False, + comment=False, + mention=False, + issue_completed=False, ) diff --git a/apiserver/plane/settings/common.py b/apiserver/plane/settings/common.py index 444248382..f03209250 100644 --- a/apiserver/plane/settings/common.py +++ b/apiserver/plane/settings/common.py @@ -282,10 +282,8 @@ if REDIS_SSL: redis_url = os.environ.get("REDIS_URL") broker_url = f"{redis_url}?ssl_cert_reqs={ssl.CERT_NONE.name}&ssl_ca_certs={certifi.where()}" CELERY_BROKER_URL = broker_url - CELERY_RESULT_BACKEND = broker_url else: CELERY_BROKER_URL = REDIS_URL - CELERY_RESULT_BACKEND = REDIS_URL CELERY_IMPORTS = ( "plane.bgtasks.issue_automation_task", diff --git a/apiserver/requirements/base.txt b/apiserver/requirements/base.txt index 0e7a18fa8..194bf8d90 100644 --- a/apiserver/requirements/base.txt +++ b/apiserver/requirements/base.txt @@ -1,6 +1,6 @@ # base requirements -Django==4.2.7 +Django==4.2.10 psycopg==3.1.12 djangorestframework==3.14.0 redis==4.6.0 @@ -30,7 +30,7 @@ openpyxl==3.1.2 beautifulsoup4==4.12.2 dj-database-url==2.1.0 posthog==3.0.2 -cryptography==41.0.6 +cryptography==42.0.0 lxml==4.9.3 boto3==1.28.40 diff --git a/apiserver/templates/emails/notifications/issue-updates.html b/apiserver/templates/emails/notifications/issue-updates.html index fa50631c5..3c561f37a 100644 --- a/apiserver/templates/emails/notifications/issue-updates.html +++ b/apiserver/templates/emails/notifications/issue-updates.html @@ -66,7 +66,7 @@ style="margin-left: 30px; margin-bottom: 20px; margin-top: 20px" > - {% if actors_involved == 1 %} -

- {{summary}} - - {{ data.0.actor_detail.first_name}} - {{data.0.actor_detail.last_name}} - . -

- {% else %} -

- {{summary}} - - {{ data.0.actor_detail.first_name}} - {{data.0.actor_detail.last_name }} - and others. -

- {% endif %} - - + {% if actors_involved == 1 %} +

+ {{summary}} + + {% if data|length > 0 %} + {{ data.0.actor_detail.first_name}} + {{data.0.actor_detail.last_name}} + {% else %} + {{ comments.0.actor_detail.first_name}} + {{comments.0.actor_detail.last_name}} + {% endif %} + . +

+ {% else %} +

+ {{summary}} + + {% if data|length > 0 %} + {{ data.0.actor_detail.first_name}} + {{data.0.actor_detail.last_name}} + {% else %} + {{ comments.0.actor_detail.first_name}} + {{comments.0.actor_detail.last_name}} + {% endif %} + and others. +

+ {% endif %} + + + + + + + + + + diff --git a/deploy/1-click/install.sh b/deploy/1-click/install.sh index f32be504d..917d08fdf 100644 --- a/deploy/1-click/install.sh +++ b/deploy/1-click/install.sh @@ -1,5 +1,6 @@ #!/bin/bash +# Check if the user has sudo access if command -v curl &> /dev/null; then sudo curl -sSL \ -o /usr/local/bin/plane-app \ @@ -11,6 +12,6 @@ else fi sudo chmod +x /usr/local/bin/plane-app -sudo sed -i 's/export BRANCH=${BRANCH:-master}/export BRANCH='${BRANCH:-master}'/' /usr/local/bin/plane-app +sudo sed -i 's/export DEPLOY_BRANCH=${BRANCH:-master}/export DEPLOY_BRANCH='${BRANCH:-master}'/' /usr/local/bin/plane-app -sudo plane-app --help \ No newline at end of file +plane-app --help diff --git a/deploy/1-click/plane-app b/deploy/1-click/plane-app index 445f39d69..2d6ef0a6f 100644 --- a/deploy/1-click/plane-app +++ b/deploy/1-click/plane-app @@ -17,7 +17,7 @@ Project management tool from the future EOF } -function update_env_files() { +function update_env_file() { config_file=$1 key=$2 value=$3 @@ -25,14 +25,16 @@ function update_env_files() { # Check if the config file exists if [ ! -f "$config_file" ]; then echo "Config file not found. Creating a new one..." >&2 - touch "$config_file" + sudo touch "$config_file" fi # Check if the key already exists in the config file - if grep -q "^$key=" "$config_file"; then - awk -v key="$key" -v value="$value" -F '=' '{if ($1 == key) $2 = value} 1' OFS='=' "$config_file" > "$config_file.tmp" && mv "$config_file.tmp" "$config_file" + if sudo grep "^$key=" "$config_file"; then + sudo awk -v key="$key" -v value="$value" -F '=' '{if ($1 == key) $2 = value} 1' OFS='=' "$config_file" | sudo tee "$config_file.tmp" > /dev/null + sudo mv "$config_file.tmp" "$config_file" &> /dev/null else - echo "$key=$value" >> "$config_file" + # sudo echo "$key=$value" >> "$config_file" + echo -e "$key=$value" | sudo tee -a "$config_file" > /dev/null fi } function read_env_file() { @@ -42,12 +44,12 @@ function read_env_file() { # Check if the config file exists if [ ! -f "$config_file" ]; then echo "Config file not found. Creating a new one..." >&2 - touch "$config_file" + sudo touch "$config_file" fi # Check if the key already exists in the config file - if grep -q "^$key=" "$config_file"; then - value=$(awk -v key="$key" -F '=' '{if ($1 == key) print $2}' "$config_file") + if sudo grep -q "^$key=" "$config_file"; then + value=$(sudo awk -v key="$key" -F '=' '{if ($1 == key) print $2}' "$config_file") echo "$value" else echo "" @@ -55,19 +57,19 @@ function read_env_file() { } function update_config() { config_file="$PLANE_INSTALL_DIR/config.env" - update_env_files "$config_file" "$1" "$2" + update_env_file $config_file $1 $2 } function read_config() { config_file="$PLANE_INSTALL_DIR/config.env" - read_env_file "$config_file" "$1" + read_env_file $config_file $1 } function update_env() { config_file="$PLANE_INSTALL_DIR/.env" - update_env_files "$config_file" "$1" "$2" + update_env_file $config_file $1 $2 } function read_env() { config_file="$PLANE_INSTALL_DIR/.env" - read_env_file "$config_file" "$1" + read_env_file $config_file $1 } function show_message() { print_header @@ -87,14 +89,14 @@ function prepare_environment() { show_message "Prepare Environment..." >&2 show_message "- Updating OS with required tools ✋" >&2 - sudo apt-get update -y &> /dev/null - sudo apt-get upgrade -y &> /dev/null + sudo "$PACKAGE_MANAGER" update -y + sudo "$PACKAGE_MANAGER" upgrade -y - required_tools=("curl" "awk" "wget" "nano" "dialog" "git") + local required_tools=("curl" "awk" "wget" "nano" "dialog" "git" "uidmap") for tool in "${required_tools[@]}"; do if ! command -v $tool &> /dev/null; then - sudo apt install -y $tool &> /dev/null + sudo "$PACKAGE_MANAGER" install -y $tool fi done @@ -103,11 +105,30 @@ function prepare_environment() { # Install Docker if not installed if ! command -v docker &> /dev/null; then show_message "- Installing Docker ✋" >&2 - sudo curl -o- https://get.docker.com | bash - + # curl -o- https://get.docker.com | bash - - if [ "$EUID" -ne 0 ]; then - dockerd-rootless-setuptool.sh install &> /dev/null + if [ "$PACKAGE_MANAGER" == "yum" ]; then + sudo $PACKAGE_MANAGER install -y yum-utils + sudo yum-config-manager --add-repo https://download.docker.com/linux/centos/docker-ce.repo &> /dev/null + elif [ "$PACKAGE_MANAGER" == "apt-get" ]; then + # Add Docker's official GPG key: + sudo $PACKAGE_MANAGER update + sudo $PACKAGE_MANAGER install ca-certificates curl &> /dev/null + sudo install -m 0755 -d /etc/apt/keyrings &> /dev/null + sudo curl -fsSL https://download.docker.com/linux/ubuntu/gpg -o /etc/apt/keyrings/docker.asc &> /dev/null + sudo chmod a+r /etc/apt/keyrings/docker.asc &> /dev/null + + # Add the repository to Apt sources: + echo \ + "deb [arch=$(dpkg --print-architecture) signed-by=/etc/apt/keyrings/docker.asc] https://download.docker.com/linux/ubuntu \ + $(. /etc/os-release && echo "$VERSION_CODENAME") stable" | \ + sudo tee /etc/apt/sources.list.d/docker.list > /dev/null + + sudo $PACKAGE_MANAGER update fi + + sudo $PACKAGE_MANAGER install docker-ce docker-ce-cli containerd.io docker-buildx-plugin docker-compose-plugin -y + show_message "- Docker Installed ✅" "replace_last_line" >&2 else show_message "- Docker is already installed ✅" >&2 @@ -127,17 +148,17 @@ function prepare_environment() { function download_plane() { # Download Docker Compose File from github url show_message "Downloading Plane Setup Files ✋" >&2 - curl -H 'Cache-Control: no-cache, no-store' \ + sudo curl -H 'Cache-Control: no-cache, no-store' \ -s -o $PLANE_INSTALL_DIR/docker-compose.yaml \ - https://raw.githubusercontent.com/makeplane/plane/$BRANCH/deploy/selfhost/docker-compose.yml?$(date +%s) + https://raw.githubusercontent.com/makeplane/plane/$DEPLOY_BRANCH/deploy/selfhost/docker-compose.yml?token=$(date +%s) - curl -H 'Cache-Control: no-cache, no-store' \ + sudo curl -H 'Cache-Control: no-cache, no-store' \ -s -o $PLANE_INSTALL_DIR/variables-upgrade.env \ - https://raw.githubusercontent.com/makeplane/plane/$BRANCH/deploy/selfhost/variables.env?$(date +%s) + https://raw.githubusercontent.com/makeplane/plane/$DEPLOY_BRANCH/deploy/selfhost/variables.env?token=$(date +%s) # if .env does not exists rename variables-upgrade.env to .env if [ ! -f "$PLANE_INSTALL_DIR/.env" ]; then - mv $PLANE_INSTALL_DIR/variables-upgrade.env $PLANE_INSTALL_DIR/.env + sudo mv $PLANE_INSTALL_DIR/variables-upgrade.env $PLANE_INSTALL_DIR/.env fi show_message "Plane Setup Files Downloaded ✅" "replace_last_line" >&2 @@ -186,7 +207,7 @@ function build_local_image() { PLANE_TEMP_CODE_DIR=$PLANE_INSTALL_DIR/temp sudo rm -rf $PLANE_TEMP_CODE_DIR > /dev/null - sudo git clone $REPO $PLANE_TEMP_CODE_DIR --branch $BRANCH --single-branch -q > /dev/null + sudo git clone $REPO $PLANE_TEMP_CODE_DIR --branch $DEPLOY_BRANCH --single-branch -q > /dev/null sudo cp $PLANE_TEMP_CODE_DIR/deploy/selfhost/build.yml $PLANE_TEMP_CODE_DIR/build.yml @@ -199,25 +220,26 @@ function check_for_docker_images() { show_message "" >&2 # show_message "Building Plane Images" >&2 - update_env "DOCKERHUB_USER" "makeplane" - update_env "PULL_POLICY" "always" CURR_DIR=$(pwd) - if [ "$BRANCH" == "master" ]; then + if [ "$DEPLOY_BRANCH" == "master" ]; then update_env "APP_RELEASE" "latest" export APP_RELEASE=latest else - update_env "APP_RELEASE" "$BRANCH" - export APP_RELEASE=$BRANCH + update_env "APP_RELEASE" "$DEPLOY_BRANCH" + export APP_RELEASE=$DEPLOY_BRANCH fi - if [ $CPU_ARCH == "amd64" ] || [ $CPU_ARCH == "x86_64" ]; then + if [ $USE_GLOBAL_IMAGES == 1 ]; then # show_message "Building Plane Images for $CPU_ARCH is not required. Skipping... ✅" "replace_last_line" >&2 + export DOCKERHUB_USER=makeplane + update_env "DOCKERHUB_USER" "$DOCKERHUB_USER" + update_env "PULL_POLICY" "always" echo "Building Plane Images for $CPU_ARCH is not required. Skipping..." else export DOCKERHUB_USER=myplane show_message "Building Plane Images for $CPU_ARCH " >&2 - update_env "DOCKERHUB_USER" "myplane" + update_env "DOCKERHUB_USER" "$DOCKERHUB_USER" update_env "PULL_POLICY" "never" build_local_image @@ -233,7 +255,7 @@ function check_for_docker_images() { sudo sed -i "s|- uploads:|- $DATA_DIR/minio:|g" $PLANE_INSTALL_DIR/docker-compose.yaml show_message "Downloading Plane Images for $CPU_ARCH ✋" >&2 - docker compose -f $PLANE_INSTALL_DIR/docker-compose.yaml --env-file=$PLANE_INSTALL_DIR/.env pull + sudo docker compose -f $PLANE_INSTALL_DIR/docker-compose.yaml --env-file=$PLANE_INSTALL_DIR/.env pull show_message "Plane Images Downloaded ✅" "replace_last_line" >&2 } function configure_plane() { @@ -453,9 +475,11 @@ function install() { show_message "" if [ "$(uname)" == "Linux" ]; then OS="linux" - OS_NAME=$(awk -F= '/^ID=/{print $2}' /etc/os-release) - # check the OS - if [ "$OS_NAME" == "ubuntu" ]; then + OS_NAME=$(sudo awk -F= '/^ID=/{print $2}' /etc/os-release) + OS_NAME=$(echo "$OS_NAME" | tr -d '"') + print_header + if [ "$OS_NAME" == "ubuntu" ] || [ "$OS_NAME" == "debian" ] || + [ "$OS_NAME" == "centos" ] || [ "$OS_NAME" == "amazon" ]; then OS_SUPPORTED=true show_message "******** Installing Plane ********" show_message "" @@ -488,7 +512,8 @@ function install() { fi else - PROGRESS_MSG="❌❌❌ Unsupported OS Detected ❌❌❌" + OS_SUPPORTED=false + PROGRESS_MSG="❌❌ Unsupported OS Varient Detected : $OS_NAME ❌❌" show_message "" exit 1 fi @@ -499,12 +524,17 @@ function install() { fi } function upgrade() { + print_header if [ "$(uname)" == "Linux" ]; then OS="linux" - OS_NAME=$(awk -F= '/^ID=/{print $2}' /etc/os-release) - # check the OS - if [ "$OS_NAME" == "ubuntu" ]; then + OS_NAME=$(sudo awk -F= '/^ID=/{print $2}' /etc/os-release) + OS_NAME=$(echo "$OS_NAME" | tr -d '"') + if [ "$OS_NAME" == "ubuntu" ] || [ "$OS_NAME" == "debian" ] || + [ "$OS_NAME" == "centos" ] || [ "$OS_NAME" == "amazon" ]; then + OS_SUPPORTED=true + show_message "******** Upgrading Plane ********" + show_message "" prepare_environment @@ -528,53 +558,49 @@ function upgrade() { exit 1 fi else - PROGRESS_MSG="Unsupported OS Detected" + PROGRESS_MSG="❌❌ Unsupported OS Varient Detected : $OS_NAME ❌❌" show_message "" exit 1 fi else - PROGRESS_MSG="Unsupported OS Detected : $(uname)" + PROGRESS_MSG="❌❌❌ Unsupported OS Detected : $(uname) ❌❌❌" show_message "" exit 1 fi } function uninstall() { + print_header if [ "$(uname)" == "Linux" ]; then OS="linux" OS_NAME=$(awk -F= '/^ID=/{print $2}' /etc/os-release) - # check the OS - if [ "$OS_NAME" == "ubuntu" ]; then + OS_NAME=$(echo "$OS_NAME" | tr -d '"') + if [ "$OS_NAME" == "ubuntu" ] || [ "$OS_NAME" == "debian" ] || + [ "$OS_NAME" == "centos" ] || [ "$OS_NAME" == "amazon" ]; then + OS_SUPPORTED=true show_message "******** Uninstalling Plane ********" show_message "" stop_server - # CHECK IF PLANE SERVICE EXISTS - # if [ -f "/etc/systemd/system/plane.service" ]; then - # sudo systemctl stop plane.service &> /dev/null - # sudo systemctl disable plane.service &> /dev/null - # sudo rm /etc/systemd/system/plane.service &> /dev/null - # sudo systemctl daemon-reload &> /dev/null - # fi - # show_message "- Plane Service removed ✅" if ! [ -x "$(command -v docker)" ]; then echo "DOCKER_NOT_INSTALLED" &> /dev/null else # Ask of user input to confirm uninstall docker ? - CONFIRM_DOCKER_PURGE=$(dialog --title "Uninstall Docker" --yesno "Are you sure you want to uninstall docker ?" 8 60 3>&1 1>&2 2>&3) + CONFIRM_DOCKER_PURGE=$(dialog --title "Uninstall Docker" --defaultno --yesno "Are you sure you want to uninstall docker ?" 8 60 3>&1 1>&2 2>&3) if [ $? -eq 0 ]; then show_message "- Uninstalling Docker ✋" - sudo apt-get purge -y docker-engine docker docker.io docker-ce docker-ce-cli docker-compose-plugin &> /dev/null - sudo apt-get autoremove -y --purge docker-engine docker docker.io docker-ce docker-compose-plugin &> /dev/null + sudo docker images -q | xargs -r sudo docker rmi -f &> /dev/null + sudo "$PACKAGE_MANAGER" remove -y docker-engine docker docker.io docker-ce docker-ce-cli docker-compose-plugin &> /dev/null + sudo "$PACKAGE_MANAGER" autoremove -y docker-engine docker docker.io docker-ce docker-compose-plugin &> /dev/null show_message "- Docker Uninstalled ✅" "replace_last_line" >&2 fi fi - rm $PLANE_INSTALL_DIR/.env &> /dev/null - rm $PLANE_INSTALL_DIR/variables-upgrade.env &> /dev/null - rm $PLANE_INSTALL_DIR/config.env &> /dev/null - rm $PLANE_INSTALL_DIR/docker-compose.yaml &> /dev/null + sudo rm $PLANE_INSTALL_DIR/.env &> /dev/null + sudo rm $PLANE_INSTALL_DIR/variables-upgrade.env &> /dev/null + sudo rm $PLANE_INSTALL_DIR/config.env &> /dev/null + sudo rm $PLANE_INSTALL_DIR/docker-compose.yaml &> /dev/null # rm -rf $PLANE_INSTALL_DIR &> /dev/null show_message "- Configuration Cleaned ✅" @@ -593,12 +619,12 @@ function uninstall() { show_message "" show_message "" else - PROGRESS_MSG="Unsupported OS Detected : $(uname) ❌" + PROGRESS_MSG="❌❌ Unsupported OS Varient Detected : $OS_NAME ❌❌" show_message "" exit 1 fi else - PROGRESS_MSG="Unsupported OS Detected : $(uname) ❌" + PROGRESS_MSG="❌❌❌ Unsupported OS Detected : $(uname) ❌❌❌" show_message "" exit 1 fi @@ -608,15 +634,15 @@ function start_server() { env_file="$PLANE_INSTALL_DIR/.env" # check if both the files exits if [ -f "$docker_compose_file" ] && [ -f "$env_file" ]; then - show_message "Starting Plane Server ✋" - docker compose -f $docker_compose_file --env-file=$env_file up -d + show_message "Starting Plane Server ($APP_RELEASE) ✋" + sudo docker compose -f $docker_compose_file --env-file=$env_file up -d # Wait for containers to be running echo "Waiting for containers to start..." - while ! docker compose -f "$docker_compose_file" --env-file="$env_file" ps --services --filter "status=running" --quiet | grep -q "."; do + while ! sudo docker compose -f "$docker_compose_file" --env-file="$env_file" ps --services --filter "status=running" --quiet | grep -q "."; do sleep 1 done - show_message "Plane Server Started ✅" "replace_last_line" >&2 + show_message "Plane Server Started ($APP_RELEASE) ✅" "replace_last_line" >&2 else show_message "Plane Server not installed. Please install Plane first ❌" "replace_last_line" >&2 fi @@ -626,11 +652,11 @@ function stop_server() { env_file="$PLANE_INSTALL_DIR/.env" # check if both the files exits if [ -f "$docker_compose_file" ] && [ -f "$env_file" ]; then - show_message "Stopping Plane Server ✋" - docker compose -f $docker_compose_file --env-file=$env_file down - show_message "Plane Server Stopped ✅" "replace_last_line" >&2 + show_message "Stopping Plane Server ($APP_RELEASE) ✋" + sudo docker compose -f $docker_compose_file --env-file=$env_file down + show_message "Plane Server Stopped ($APP_RELEASE) ✅" "replace_last_line" >&2 else - show_message "Plane Server not installed. Please install Plane first ❌" "replace_last_line" >&2 + show_message "Plane Server not installed [Skipping] ✅" "replace_last_line" >&2 fi } function restart_server() { @@ -638,9 +664,9 @@ function restart_server() { env_file="$PLANE_INSTALL_DIR/.env" # check if both the files exits if [ -f "$docker_compose_file" ] && [ -f "$env_file" ]; then - show_message "Restarting Plane Server ✋" - docker compose -f $docker_compose_file --env-file=$env_file restart - show_message "Plane Server Restarted ✅" "replace_last_line" >&2 + show_message "Restarting Plane Server ($APP_RELEASE) ✋" + sudo docker compose -f $docker_compose_file --env-file=$env_file restart + show_message "Plane Server Restarted ($APP_RELEASE) ✅" "replace_last_line" >&2 else show_message "Plane Server not installed. Please install Plane first ❌" "replace_last_line" >&2 fi @@ -666,28 +692,45 @@ function show_help() { } function update_installer() { show_message "Updating Plane Installer ✋" >&2 - curl -H 'Cache-Control: no-cache, no-store' \ + sudo curl -H 'Cache-Control: no-cache, no-store' \ -s -o /usr/local/bin/plane-app \ - https://raw.githubusercontent.com/makeplane/plane/$BRANCH/deploy/1-click/install.sh?token=$(date +%s) + https://raw.githubusercontent.com/makeplane/plane/$DEPLOY_BRANCH/deploy/1-click/plane-app?token=$(date +%s) - chmod +x /usr/local/bin/plane-app > /dev/null&> /dev/null + sudo chmod +x /usr/local/bin/plane-app > /dev/null&> /dev/null show_message "Plane Installer Updated ✅" "replace_last_line" >&2 } -export BRANCH=${BRANCH:-master} -export APP_RELEASE=$BRANCH +export DEPLOY_BRANCH=${BRANCH:-master} +export APP_RELEASE=$DEPLOY_BRANCH export DOCKERHUB_USER=makeplane export PULL_POLICY=always +if [ "$DEPLOY_BRANCH" == "master" ]; then + export APP_RELEASE=latest +fi + PLANE_INSTALL_DIR=/opt/plane DATA_DIR=$PLANE_INSTALL_DIR/data LOG_DIR=$PLANE_INSTALL_DIR/log OS_SUPPORTED=false CPU_ARCH=$(uname -m) PROGRESS_MSG="" -USE_GLOBAL_IMAGES=1 +USE_GLOBAL_IMAGES=0 +PACKAGE_MANAGER="" -mkdir -p $PLANE_INSTALL_DIR/{data,log} +if [[ $CPU_ARCH == "amd64" || $CPU_ARCH == "x86_64" || ( $DEPLOY_BRANCH == "master" && ( $CPU_ARCH == "arm64" || $CPU_ARCH == "aarch64" ) ) ]]; then + USE_GLOBAL_IMAGES=1 +fi + +sudo mkdir -p $PLANE_INSTALL_DIR/{data,log} + +if command -v apt-get &> /dev/null; then + PACKAGE_MANAGER="apt-get" +elif command -v yum &> /dev/null; then + PACKAGE_MANAGER="yum" +elif command -v apk &> /dev/null; then + PACKAGE_MANAGER="apk" +fi if [ "$1" == "start" ]; then start_server @@ -704,7 +747,7 @@ elif [ "$1" == "--upgrade" ] || [ "$1" == "-up" ]; then upgrade elif [ "$1" == "--uninstall" ] || [ "$1" == "-un" ]; then uninstall -elif [ "$1" == "--update-installer" ] || [ "$1" == "-ui" ] ; then +elif [ "$1" == "--update-installer" ] || [ "$1" == "-ui" ]; then update_installer elif [ "$1" == "--help" ] || [ "$1" == "-h" ]; then show_help diff --git a/deploy/selfhost/docker-compose.yml b/deploy/selfhost/docker-compose.yml index b223e722a..60861878c 100644 --- a/deploy/selfhost/docker-compose.yml +++ b/deploy/selfhost/docker-compose.yml @@ -38,10 +38,6 @@ x-app-env : &app-env - EMAIL_USE_SSL=${EMAIL_USE_SSL:-0} - DEFAULT_EMAIL=${DEFAULT_EMAIL:-captain@plane.so} - DEFAULT_PASSWORD=${DEFAULT_PASSWORD:-password123} - # OPENAI SETTINGS - Deprecated can be configured through admin panel - - OPENAI_API_BASE=${OPENAI_API_BASE:-https://api.openai.com/v1} - - OPENAI_API_KEY=${OPENAI_API_KEY:-""} - - GPT_ENGINE=${GPT_ENGINE:-"gpt-3.5-turbo"} # LOGIN/SIGNUP SETTINGS - Deprecated can be configured through admin panel - ENABLE_SIGNUP=${ENABLE_SIGNUP:-1} - ENABLE_EMAIL_PASSWORD=${ENABLE_EMAIL_PASSWORD:-1} diff --git a/deploy/selfhost/install.sh b/deploy/selfhost/install.sh index 3f306c559..30f2d15d7 100755 --- a/deploy/selfhost/install.sh +++ b/deploy/selfhost/install.sh @@ -20,8 +20,8 @@ function buildLocalImage() { DO_BUILD="2" else printf "\n" >&2 - printf "${YELLOW}You are on ${ARCH} cpu architecture. ${NC}\n" >&2 - printf "${YELLOW}Since the prebuilt ${ARCH} compatible docker images are not available for, we will be running the docker build on this system. ${NC} \n" >&2 + printf "${YELLOW}You are on ${CPU_ARCH} cpu architecture. ${NC}\n" >&2 + printf "${YELLOW}Since the prebuilt ${CPU_ARCH} compatible docker images are not available for, we will be running the docker build on this system. ${NC} \n" >&2 printf "${YELLOW}This might take ${YELLOW}5-30 min based on your system's hardware configuration. \n ${NC} \n" >&2 printf "\n" >&2 printf "${GREEN}Select an option to proceed: ${NC}\n" >&2 @@ -49,7 +49,7 @@ function buildLocalImage() { cd $PLANE_TEMP_CODE_DIR if [ "$BRANCH" == "master" ]; then - APP_RELEASE=latest + export APP_RELEASE=latest fi docker compose -f build.yml build --no-cache >&2 @@ -149,7 +149,7 @@ function upgrade() { function askForAction() { echo echo "Select a Action you want to perform:" - echo " 1) Install (${ARCH})" + echo " 1) Install (${CPU_ARCH})" echo " 2) Start" echo " 3) Stop" echo " 4) Restart" @@ -193,8 +193,8 @@ function askForAction() { } # CPU ARCHITECHTURE BASED SETTINGS -ARCH=$(uname -m) -if [ $ARCH == "amd64" ] || [ $ARCH == "x86_64" ]; +CPU_ARCH=$(uname -m) +if [[ $CPU_ARCH == "amd64" || $CPU_ARCH == "x86_64" || ( $BRANCH == "master" && ( $CPU_ARCH == "arm64" || $CPU_ARCH == "aarch64" ) ) ]]; then USE_GLOBAL_IMAGES=1 DOCKERHUB_USER=makeplane @@ -205,6 +205,11 @@ else PULL_POLICY=never fi +if [ "$BRANCH" == "master" ]; +then + export APP_RELEASE=latest +fi + # REMOVE SPECIAL CHARACTERS FROM BRANCH NAME if [ "$BRANCH" != "master" ]; then diff --git a/deploy/selfhost/variables.env b/deploy/selfhost/variables.env index 4a3781811..6d2cde0ff 100644 --- a/deploy/selfhost/variables.env +++ b/deploy/selfhost/variables.env @@ -8,13 +8,13 @@ NGINX_PORT=80 WEB_URL=http://localhost DEBUG=0 NEXT_PUBLIC_DEPLOY_URL=http://localhost/spaces -SENTRY_DSN="" -SENTRY_ENVIRONMENT="production" -GOOGLE_CLIENT_ID="" -GITHUB_CLIENT_ID="" -GITHUB_CLIENT_SECRET="" +SENTRY_DSN= +SENTRY_ENVIRONMENT=production +GOOGLE_CLIENT_ID= +GITHUB_CLIENT_ID= +GITHUB_CLIENT_SECRET= DOCKERIZED=1 # deprecated -CORS_ALLOWED_ORIGINS="http://localhost" +CORS_ALLOWED_ORIGINS=http://localhost #DB SETTINGS PGHOST=plane-db @@ -31,19 +31,14 @@ REDIS_PORT=6379 REDIS_URL=redis://${REDIS_HOST}:6379/ # EMAIL SETTINGS -EMAIL_HOST="" -EMAIL_HOST_USER="" -EMAIL_HOST_PASSWORD="" +EMAIL_HOST= +EMAIL_HOST_USER= +EMAIL_HOST_PASSWORD= EMAIL_PORT=587 -EMAIL_FROM="Team Plane " +EMAIL_FROM=Team Plane EMAIL_USE_TLS=1 EMAIL_USE_SSL=0 -# OPENAI SETTINGS -OPENAI_API_BASE=https://api.openai.com/v1 # deprecated -OPENAI_API_KEY="sk-" # deprecated -GPT_ENGINE="gpt-3.5-turbo" # deprecated - # LOGIN/SIGNUP SETTINGS ENABLE_SIGNUP=1 ENABLE_EMAIL_PASSWORD=1 @@ -52,13 +47,13 @@ SECRET_KEY=60gp0byfz2dvffa45cxl20p1scy9xbpf6d8c5y0geejgkyp1b5 # DATA STORE SETTINGS USE_MINIO=1 -AWS_REGION="" -AWS_ACCESS_KEY_ID="access-key" -AWS_SECRET_ACCESS_KEY="secret-key" +AWS_REGION= +AWS_ACCESS_KEY_ID=access-key +AWS_SECRET_ACCESS_KEY=secret-key AWS_S3_ENDPOINT_URL=http://plane-minio:9000 AWS_S3_BUCKET_NAME=uploads -MINIO_ROOT_USER="access-key" -MINIO_ROOT_PASSWORD="secret-key" +MINIO_ROOT_USER=access-key +MINIO_ROOT_PASSWORD=secret-key BUCKET_NAME=uploads FILE_SIZE_LIMIT=5242880 diff --git a/package.json b/package.json index 64bd22058..762ce322a 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,6 @@ { "repository": "https://github.com/makeplane/plane.git", - "version": "0.15.0", + "version": "0.15.1", "license": "AGPL-3.0", "private": true, "workspaces": [ @@ -34,4 +34,4 @@ "@types/react": "18.2.42" }, "packageManager": "yarn@1.22.19" -} \ No newline at end of file +} diff --git a/packages/editor/core/package.json b/packages/editor/core/package.json index 8b31acdaf..7f7f4831a 100644 --- a/packages/editor/core/package.json +++ b/packages/editor/core/package.json @@ -1,6 +1,6 @@ { "name": "@plane/editor-core", - "version": "0.15.0", + "version": "0.15.1", "description": "Core Editor that powers Plane", "private": true, "main": "./dist/index.mjs", diff --git a/packages/editor/document-editor/package.json b/packages/editor/document-editor/package.json index 7a3e9bdad..b33bc12fb 100644 --- a/packages/editor/document-editor/package.json +++ b/packages/editor/document-editor/package.json @@ -1,6 +1,6 @@ { "name": "@plane/document-editor", - "version": "0.15.0", + "version": "0.15.1", "description": "Package that powers Plane's Pages Editor", "main": "./dist/index.mjs", "module": "./dist/index.mjs", diff --git a/packages/editor/document-editor/src/ui/components/content-browser.tsx b/packages/editor/document-editor/src/ui/components/content-browser.tsx index 18a50a5a8..97231ea96 100644 --- a/packages/editor/document-editor/src/ui/components/content-browser.tsx +++ b/packages/editor/document-editor/src/ui/components/content-browser.tsx @@ -6,10 +6,16 @@ import { scrollSummary } from "src/utils/editor-summary-utils"; interface ContentBrowserProps { editor: Editor; markings: IMarking[]; + setSidePeekVisible?: (sidePeekState: boolean) => void; } export const ContentBrowser = (props: ContentBrowserProps) => { - const { editor, markings } = props; + const { editor, markings, setSidePeekVisible } = props; + + const handleOnClick = (marking: IMarking) => { + scrollSummary(editor, marking); + if (setSidePeekVisible) setSidePeekVisible(false); + } return (
@@ -18,11 +24,11 @@ export const ContentBrowser = (props: ContentBrowserProps) => { {markings.length !== 0 ? ( markings.map((marking) => marking.level === 1 ? ( - scrollSummary(editor, marking)} heading={marking.text} /> + handleOnClick(marking)} heading={marking.text} /> ) : marking.level === 2 ? ( - scrollSummary(editor, marking)} subHeading={marking.text} /> + handleOnClick(marking)} subHeading={marking.text} /> ) : ( - scrollSummary(editor, marking)} /> + handleOnClick(marking)} /> ) ) ) : ( diff --git a/packages/editor/document-editor/src/ui/components/editor-header.tsx b/packages/editor/document-editor/src/ui/components/editor-header.tsx index 3501785a7..a322ddddc 100644 --- a/packages/editor/document-editor/src/ui/components/editor-header.tsx +++ b/packages/editor/document-editor/src/ui/components/editor-header.tsx @@ -42,8 +42,8 @@ export const EditorHeader = (props: IEditorHeader) => { } = props; return ( -
-
+
+
{ />
-
+
{!readonly && uploadFile && ( )} diff --git a/packages/editor/document-editor/src/ui/components/page-renderer.tsx b/packages/editor/document-editor/src/ui/components/page-renderer.tsx index c60ac0e7a..06b9e70ff 100644 --- a/packages/editor/document-editor/src/ui/components/page-renderer.tsx +++ b/packages/editor/document-editor/src/ui/components/page-renderer.tsx @@ -152,7 +152,7 @@ export const PageRenderer = (props: IPageRenderer) => { ); return ( -
+
{!readonly ? ( handlePageTitleChange(e.target.value)} diff --git a/packages/editor/document-editor/src/ui/components/summary-popover.tsx b/packages/editor/document-editor/src/ui/components/summary-popover.tsx index d3ec64f1c..6ad7cad83 100644 --- a/packages/editor/document-editor/src/ui/components/summary-popover.tsx +++ b/packages/editor/document-editor/src/ui/components/summary-popover.tsx @@ -33,23 +33,36 @@ export const SummaryPopover: React.FC = (props) => { - {!sidePeekVisible && ( -
- -
- )} +
+ {sidePeekVisible && ( +
+ +
+ )} +
+
+ {!sidePeekVisible && ( +
+ +
+ )} +
); }; diff --git a/packages/editor/document-editor/src/ui/index.tsx b/packages/editor/document-editor/src/ui/index.tsx index d1bdbc935..2491e04c7 100644 --- a/packages/editor/document-editor/src/ui/index.tsx +++ b/packages/editor/document-editor/src/ui/index.tsx @@ -10,6 +10,7 @@ import { DocumentDetails } from "src/types/editor-types"; import { PageRenderer } from "src/ui/components/page-renderer"; import { getMenuOptions } from "src/utils/menu-options"; import { useRouter } from "next/router"; +import { FixedMenu } from "src"; interface IDocumentEditor { // document info @@ -149,11 +150,14 @@ const DocumentEditor = ({ documentDetails={documentDetails} isSubmitting={isSubmitting} /> +
+ {uploadFile && } +
-
+
-
+
{ } return ( -
+
{basicMarkItems.map((item) => ( @@ -114,7 +129,8 @@ const CustomMenu = (props: ICustomMenuDropdownProps) => { @@ -138,10 +155,12 @@ const CustomMenu = (props: ICustomMenuDropdownProps) => { ? "cursor-not-allowed text-custom-text-200" : "cursor-pointer hover:bg-custom-background-80" } ${buttonClassName}`} - onClick={() => { + onClick={(e) => { + e.stopPropagation(); openDropdown(); if (menuButtonOnClick) menuButtonOnClick(); }} + tabIndex={customButtonTabIndex} > {label} {!noChevron && } @@ -159,6 +178,7 @@ const CustomMenu = (props: ICustomMenuDropdownProps) => { const MenuItem: React.FC = (props) => { const { children, onClick, className = "" } = props; + return ( {({ active, close }) => ( diff --git a/packages/ui/src/dropdowns/helper.tsx b/packages/ui/src/dropdowns/helper.tsx index 06f1c44c0..930f332b9 100644 --- a/packages/ui/src/dropdowns/helper.tsx +++ b/packages/ui/src/dropdowns/helper.tsx @@ -3,6 +3,7 @@ import { Placement } from "@blueprintjs/popover2"; export interface IDropdownProps { customButtonClassName?: string; + customButtonTabIndex?: number; buttonClassName?: string; className?: string; customButton?: JSX.Element; @@ -23,6 +24,7 @@ export interface ICustomMenuDropdownProps extends IDropdownProps { noBorder?: boolean; verticalEllipsis?: boolean; menuButtonOnClick?: (...args: any) => void; + onMenuClose?: () => void; closeOnSelect?: boolean; portalElement?: Element | null; } diff --git a/packages/ui/src/form-fields/checkbox.tsx b/packages/ui/src/form-fields/checkbox.tsx new file mode 100644 index 000000000..09b90b03b --- /dev/null +++ b/packages/ui/src/form-fields/checkbox.tsx @@ -0,0 +1,67 @@ +import * as React from "react"; + +export interface CheckboxProps extends React.InputHTMLAttributes { + intermediate?: boolean; + className?: string; +} + +const Checkbox = React.forwardRef((props, ref) => { + const { id, name, checked, intermediate = false, disabled, className = "", ...rest } = props; + + return ( +
+ + + + + + + +
+ ); +}); +Checkbox.displayName = "form-checkbox-field"; + +export { Checkbox }; diff --git a/packages/ui/src/form-fields/index.ts b/packages/ui/src/form-fields/index.ts index 9cac73428..f19adcdc5 100644 --- a/packages/ui/src/form-fields/index.ts +++ b/packages/ui/src/form-fields/index.ts @@ -1,3 +1,4 @@ export * from "./input"; export * from "./textarea"; export * from "./input-color-picker"; +export * from "./checkbox"; diff --git a/packages/ui/src/hooks/use-dropdown-key-down.tsx b/packages/ui/src/hooks/use-dropdown-key-down.tsx index 1bb861477..b93a4d551 100644 --- a/packages/ui/src/hooks/use-dropdown-key-down.tsx +++ b/packages/ui/src/hooks/use-dropdown-key-down.tsx @@ -1,16 +1,23 @@ import { useCallback } from "react"; type TUseDropdownKeyDown = { - (onOpen: () => void, onClose: () => void, isOpen: boolean): (event: React.KeyboardEvent) => void; + ( + onOpen: () => void, + onClose: () => void, + isOpen: boolean, + selectActiveItem?: () => void + ): (event: React.KeyboardEvent) => void; }; -export const useDropdownKeyDown: TUseDropdownKeyDown = (onOpen, onClose, isOpen) => { +export const useDropdownKeyDown: TUseDropdownKeyDown = (onOpen, onClose, isOpen, selectActiveItem?) => { const handleKeyDown = useCallback( (event: React.KeyboardEvent) => { if (event.key === "Enter") { - event.stopPropagation(); if (!isOpen) { + event.stopPropagation(); onOpen(); + } else { + selectActiveItem && selectActiveItem(); } } else if (event.key === "Escape" && isOpen) { event.stopPropagation(); diff --git a/space/package.json b/space/package.json index 7d180d5ff..9ee7279cd 100644 --- a/space/package.json +++ b/space/package.json @@ -1,6 +1,6 @@ { "name": "space", - "version": "0.15.0", + "version": "0.15.1", "private": true, "scripts": { "dev": "turbo run develop", diff --git a/web/components/account/deactivate-account-modal.tsx b/web/components/account/deactivate-account-modal.tsx index 307a65ad2..701db6ad9 100644 --- a/web/components/account/deactivate-account-modal.tsx +++ b/web/components/account/deactivate-account-modal.tsx @@ -89,8 +89,8 @@ export const DeactivateAccountModal: React.FC = (props) => {
-
-