diff --git a/apps/app/.env.example b/.env.example similarity index 66% rename from apps/app/.env.example rename to .env.example index 9e41ba88d..118a94883 100644 --- a/apps/app/.env.example +++ b/.env.example @@ -1,5 +1,4 @@ # Replace with your instance Public IP -# NEXT_PUBLIC_API_BASE_URL = "http://localhost" NEXT_PUBLIC_EXTRA_IMAGE_DOMAINS= NEXT_PUBLIC_GOOGLE_CLIENTID="" NEXT_PUBLIC_GITHUB_APP_NAME="" @@ -10,3 +9,12 @@ NEXT_PUBLIC_ENABLE_SENTRY=0 NEXT_PUBLIC_ENABLE_SESSION_RECORDER=0 NEXT_PUBLIC_TRACK_EVENTS=0 NEXT_PUBLIC_SLACK_CLIENT_ID="" +EMAIL_HOST="" +EMAIL_HOST_USER="" +EMAIL_HOST_PASSWORD="" +AWS_REGION="" +AWS_ACCESS_KEY_ID="" +AWS_SECRET_ACCESS_KEY="" +AWS_S3_BUCKET_NAME="" +OPENAI_API_KEY="" +GPT_ENGINE="" \ No newline at end of file diff --git a/.github/workflows/push-image-backend.yml b/.github/workflows/push-image-backend.yml index abb833922..95d93f813 100644 --- a/.github/workflows/push-image-backend.yml +++ b/.github/workflows/push-image-backend.yml @@ -1,4 +1,4 @@ -name: Build Api Server Docker Image +name: Build and Push Backend Docker Image on: push: @@ -10,11 +10,8 @@ on: jobs: build_push_backend: - name: Build Api Server Docker Image + name: Build and Push Api Server Docker Image runs-on: ubuntu-20.04 - permissions: - contents: read - packages: write steps: - name: Check out the repo @@ -28,20 +25,33 @@ jobs: - name: Set up Docker Buildx uses: docker/setup-buildx-action@v2.5.0 - - name: Login to Github Container Registry + - name: Login to GitHub Container Registry uses: docker/login-action@v2.1.0 with: registry: "ghcr.io" username: ${{ github.actor }} password: ${{ secrets.GITHUB_TOKEN }} - - name: Extract metadata (tags, labels) for Docker - id: meta + - name: Login to Docker Hub + uses: docker/login-action@v2.1.0 + with: + registry: "registry.hub.docker.com" + username: ${{ secrets.DOCKERHUB_USERNAME }} + password: ${{ secrets.DOCKERHUB_PASSWORD }} + + - name: Extract metadata (tags, labels) for Docker (Docker Hub) + id: ghmeta + uses: docker/metadata-action@v4.3.0 + with: + images: makeplane/plane-backend + + - name: Extract metadata (tags, labels) for Docker (Github) + id: dkrmeta uses: docker/metadata-action@v4.3.0 with: images: ghcr.io/${{ github.repository }}-backend - - name: Build Api Server + - name: Build and Push to GitHub Container Registry uses: docker/build-push-action@v4.0.0 with: context: ./apiserver @@ -50,5 +60,18 @@ jobs: push: true cache-from: type=gha cache-to: type=gha - tags: ${{ steps.meta.outputs.tags }} - labels: ${{ steps.meta.outputs.labels }} + tags: ${{ steps.ghmeta.outputs.tags }} + labels: ${{ steps.ghmeta.outputs.labels }} + + - name: Build and Push to Docker Hub + uses: docker/build-push-action@v4.0.0 + with: + context: ./apiserver + file: ./apiserver/Dockerfile.api + platforms: linux/arm64,linux/amd64 + push: true + cache-from: type=gha + cache-to: type=gha + tags: ${{ steps.dkrmeta.outputs.tags }} + labels: ${{ steps.dkrmeta.outputs.labels }} + diff --git a/.github/workflows/push-image-frontend.yml b/.github/workflows/push-image-frontend.yml index c6a3bf1b8..cbd742511 100644 --- a/.github/workflows/push-image-frontend.yml +++ b/.github/workflows/push-image-frontend.yml @@ -1,4 +1,4 @@ -name: Build Frontend Docker Image +name: Build and Push Frontend Docker Image on: push: @@ -12,9 +12,6 @@ jobs: build_push_frontend: name: Build Frontend Docker Image runs-on: ubuntu-20.04 - permissions: - contents: read - packages: write steps: - name: Check out the repo @@ -35,13 +32,26 @@ jobs: username: ${{ github.actor }} password: ${{ secrets.GITHUB_TOKEN }} - - name: Extract metadata (tags, labels) for Docker + - name: Login to Docker Hub + uses: docker/login-action@v2.1.0 + with: + registry: "registry.hub.docker.com" + username: ${{ secrets.DOCKERHUB_USERNAME }} + password: ${{ secrets.DOCKERHUB_PASSWORD }} + + - name: Extract metadata (tags, labels) for Docker (Docker Hub) + id: ghmeta + uses: docker/metadata-action@v4.3.0 + with: + images: makeplane/plane-frontend + + - name: Extract metadata (tags, labels) for Docker (Github) id: meta uses: docker/metadata-action@v4.3.0 with: images: ghcr.io/${{ github.repository }}-frontend - - name: Build Frontend Server + - name: Build and Push to GitHub Container Registry uses: docker/build-push-action@v4.0.0 with: context: . @@ -50,5 +60,18 @@ jobs: push: true cache-from: type=gha cache-to: type=gha - tags: ${{ steps.meta.outputs.tags }} - labels: ${{ steps.meta.outputs.labels }} + tags: ${{ steps.ghmeta.outputs.tags }} + labels: ${{ steps.ghmeta.outputs.labels }} + + - name: Build and Push to Docker Container Registry + uses: docker/build-push-action@v4.0.0 + with: + context: . + file: ./apps/app/Dockerfile.web + platforms: linux/arm64,linux/amd64 + push: true + cache-from: type=gha + cache-to: type=gha + tags: ${{ steps.dkrmeta.outputs.tags }} + labels: ${{ steps.dkrmeta.outputs.labels }} + diff --git a/Dockerfile b/Dockerfile index 094d628e3..cb7ef6887 100644 --- a/Dockerfile +++ b/Dockerfile @@ -3,6 +3,7 @@ RUN apk add --no-cache libc6-compat RUN apk update # Set working directory WORKDIR /app +ENV NEXT_PUBLIC_API_BASE_URL=http://NEXT_PUBLIC_API_BASE_URL_PLACEHOLDER RUN yarn global add turbo COPY . . @@ -16,7 +17,7 @@ FROM node:18-alpine AS installer RUN apk add --no-cache libc6-compat RUN apk update WORKDIR /app - +ARG NEXT_PUBLIC_API_BASE_URL=http://localhost:8000 # First install the dependencies (as they change less often) COPY .gitignore .gitignore COPY --from=builder /app/out/json/ . @@ -26,9 +27,16 @@ RUN yarn install # Build the project COPY --from=builder /app/out/full/ . COPY turbo.json turbo.json +COPY replace-env-vars.sh /usr/local/bin/ +USER root +RUN chmod +x /usr/local/bin/replace-env-vars.sh RUN yarn turbo run build --filter=app +ENV NEXT_PUBLIC_API_BASE_URL=$NEXT_PUBLIC_API_BASE_URL \ + BUILT_NEXT_PUBLIC_API_BASE_URL=$NEXT_PUBLIC_API_BASE_URL + +RUN /usr/local/bin/replace-env-vars.sh http://NEXT_PUBLIC_WEBAPP_URL_PLACEHOLDER ${NEXT_PUBLIC_API_BASE_URL} FROM python:3.11.1-alpine3.17 AS backend @@ -108,6 +116,16 @@ COPY nginx/nginx-single-docker-image.conf /etc/nginx/http.d/default.conf COPY nginx/supervisor.conf /code/supervisor.conf +ARG NEXT_PUBLIC_API_BASE_URL=http://localhost:8000 +ENV NEXT_PUBLIC_API_BASE_URL=$NEXT_PUBLIC_API_BASE_URL \ + BUILT_NEXT_PUBLIC_API_BASE_URL=$NEXT_PUBLIC_API_BASE_URL + +USER root +COPY replace-env-vars.sh /usr/local/bin/ +COPY start.sh /usr/local/bin/ +RUN chmod +x /usr/local/bin/replace-env-vars.sh +RUN chmod +x /usr/local/bin/start.sh + CMD ["supervisord","-c","/code/supervisor.conf"] diff --git a/README.md b/README.md index 102739e4e..827a2b146 100644 --- a/README.md +++ b/README.md @@ -26,7 +26,7 @@

-Meet Plane. An open-source software development tool to manage issues, sprints, and product roadmaps with peace of mind 🧘‍♀️. +Meet [Plane](https://plane.so). An open-source software development tool to manage issues, sprints, and product roadmaps with peace of mind 🧘‍♀️. > Plane is still in its early days, not everything will be perfect yet, and hiccups may happen. Please let us know of any suggestions, ideas, or bugs that you encounter on our [Discord](https://discord.com/invite/A92xrEGCge) or GitHub issues, and we will use your feedback to improve on our upcoming releases. @@ -58,11 +58,18 @@ cd plane > If running in a cloud env replace localhost with public facing IP address of the VM +- Export Environment Variables + +```bash +set -a +source .env +set +a +``` - Run Docker compose up ```bash -docker-compose up +docker-compose -f docker-compose-hub.yml up ``` You can use the default email and password for your first login `captain@plane.so` and `password123`. @@ -128,7 +135,7 @@ To see how to Contribute, visit [here](https://github.com/makeplane/plane/blob/m The Plane community can be found on GitHub Discussions, where you can ask questions, voice ideas, and share your projects. -To chat with other community members you can join the [Plane Discord](https://discord.com/invite/q9HKAdau). +To chat with other community members you can join the [Plane Discord](https://discord.com/invite/A92xrEGCge). Our [Code of Conduct](https://github.com/makeplane/plane/blob/master/CODE_OF_CONDUCT.md) applies to all Plane community channels. diff --git a/apiserver/.env.example b/apiserver/.env.example deleted file mode 100644 index 8a7c76ffa..000000000 --- a/apiserver/.env.example +++ /dev/null @@ -1,28 +0,0 @@ -DJANGO_SETTINGS_MODULE="plane.settings.production" -# Database -DATABASE_URL=postgres://plane:xyzzyspoon@db:5432/plane -# Cache -REDIS_URL=redis://redis:6379/ -# SMTP -EMAIL_HOST="" -EMAIL_HOST_USER="" -EMAIL_HOST_PASSWORD="" -EMAIL_PORT="587" -EMAIL_USE_TLS="1" -EMAIL_FROM="Team Plane " -# AWS -AWS_REGION="" -AWS_ACCESS_KEY_ID="" -AWS_SECRET_ACCESS_KEY="" -AWS_S3_BUCKET_NAME="" -AWS_S3_ENDPOINT_URL="" -# FE -WEB_URL="localhost/" -# OAUTH -GITHUB_CLIENT_SECRET="" -# Flags -DISABLE_COLLECTSTATIC=1 -DOCKERIZED=1 -# GPT Envs -OPENAI_API_KEY=0 -GPT_ENGINE=0 diff --git a/apiserver/back_migration.py b/apiserver/back_migration.py index 0f272755f..c04ee7771 100644 --- a/apiserver/back_migration.py +++ b/apiserver/back_migration.py @@ -204,7 +204,21 @@ def update_integration_verified(): Integration.objects.bulk_update( updated_integrations, ["verified"], batch_size=10 ) - print("Sucess") + print("Success") + except Exception as e: + print(e) + print("Failed") + + +def update_start_date(): + try: + issues = Issue.objects.filter(state__group__in=["started", "completed"]) + updated_issues = [] + for issue in issues: + issue.start_date = issue.created_at.date() + updated_issues.append(issue) + Issue.objects.bulk_update(updated_issues, ["start_date"], batch_size=500) + print("Success") except Exception as e: print(e) print("Failed") diff --git a/apiserver/plane/api/serializers/__init__.py b/apiserver/plane/api/serializers/__init__.py index 79014c53d..505a9978d 100644 --- a/apiserver/plane/api/serializers/__init__.py +++ b/apiserver/plane/api/serializers/__init__.py @@ -70,3 +70,5 @@ from .importer import ImporterSerializer from .page import PageSerializer, PageBlockSerializer, PageFavoriteSerializer from .estimate import EstimateSerializer, EstimatePointSerializer, EstimateReadSerializer + +from .analytic import AnalyticViewSerializer diff --git a/apiserver/plane/api/serializers/analytic.py b/apiserver/plane/api/serializers/analytic.py new file mode 100644 index 000000000..5f35e1117 --- /dev/null +++ b/apiserver/plane/api/serializers/analytic.py @@ -0,0 +1,30 @@ +from .base import BaseSerializer +from plane.db.models import AnalyticView +from plane.utils.issue_filters import issue_filters + + +class AnalyticViewSerializer(BaseSerializer): + class Meta: + model = AnalyticView + fields = "__all__" + read_only_fields = [ + "workspace", + "query", + ] + + def create(self, validated_data): + query_params = validated_data.get("query_dict", {}) + if bool(query_params): + validated_data["query"] = issue_filters(query_params, "POST") + else: + validated_data["query"] = dict() + return AnalyticView.objects.create(**validated_data) + + def update(self, instance, validated_data): + query_params = validated_data.get("query_data", {}) + if bool(query_params): + validated_data["query"] = issue_filters(query_params, "POST") + else: + validated_data["query"] = dict() + validated_data["query"] = issue_filters(query_params, "PATCH") + return super().update(instance, validated_data) diff --git a/apiserver/plane/api/serializers/cycle.py b/apiserver/plane/api/serializers/cycle.py index 5c06a28e7..5c1c68fb8 100644 --- a/apiserver/plane/api/serializers/cycle.py +++ b/apiserver/plane/api/serializers/cycle.py @@ -19,10 +19,32 @@ class CycleSerializer(BaseSerializer): started_issues = serializers.IntegerField(read_only=True) unstarted_issues = serializers.IntegerField(read_only=True) backlog_issues = serializers.IntegerField(read_only=True) + assignees = serializers.SerializerMethodField(read_only=True) + total_estimates = serializers.IntegerField(read_only=True) + completed_estimates = serializers.IntegerField(read_only=True) + started_estimates = serializers.IntegerField(read_only=True) workspace_detail = WorkspaceLiteSerializer(read_only=True, source="workspace") project_detail = ProjectLiteSerializer(read_only=True, source="project") + def get_assignees(self, obj): + members = [ + { + "avatar": assignee.avatar, + "first_name": assignee.first_name, + "id": assignee.id, + } + for issue_cycle in obj.issue_cycle.all() + for assignee in issue_cycle.issue.assignees.all() + ] + # Use a set comprehension to return only the unique objects + unique_objects = {frozenset(item.items()) for item in members} + + # Convert the set back to a list of dictionaries + unique_list = [dict(item) for item in unique_objects] + + return unique_list + class Meta: model = Cycle fields = "__all__" diff --git a/apiserver/plane/api/serializers/estimate.py b/apiserver/plane/api/serializers/estimate.py index 360275562..3cb0e4713 100644 --- a/apiserver/plane/api/serializers/estimate.py +++ b/apiserver/plane/api/serializers/estimate.py @@ -2,9 +2,13 @@ from .base import BaseSerializer from plane.db.models import Estimate, EstimatePoint +from plane.api.serializers import WorkspaceLiteSerializer, ProjectLiteSerializer class EstimateSerializer(BaseSerializer): + workspace_detail = WorkspaceLiteSerializer(read_only=True, source="workspace") + project_detail = ProjectLiteSerializer(read_only=True, source="project") + class Meta: model = Estimate fields = "__all__" @@ -27,6 +31,8 @@ class EstimatePointSerializer(BaseSerializer): class EstimateReadSerializer(BaseSerializer): points = EstimatePointSerializer(read_only=True, many=True) + workspace_detail = WorkspaceLiteSerializer(read_only=True, source="workspace") + project_detail = ProjectLiteSerializer(read_only=True, source="project") class Meta: model = Estimate diff --git a/apiserver/plane/api/serializers/importer.py b/apiserver/plane/api/serializers/importer.py index fcc7da6ce..8997f6392 100644 --- a/apiserver/plane/api/serializers/importer.py +++ b/apiserver/plane/api/serializers/importer.py @@ -2,12 +2,14 @@ from .base import BaseSerializer from .user import UserLiteSerializer from .project import ProjectLiteSerializer +from .workspace import WorkspaceLiteSerializer from plane.db.models import Importer class ImporterSerializer(BaseSerializer): initiated_by_detail = UserLiteSerializer(source="initiated_by", read_only=True) project_detail = ProjectLiteSerializer(source="project", read_only=True) + workspace_detail = WorkspaceLiteSerializer(source="workspace", read_only=True) class Meta: model = Importer diff --git a/apiserver/plane/api/serializers/project.py b/apiserver/plane/api/serializers/project.py index bf06b3fa2..0a8ad1cf8 100644 --- a/apiserver/plane/api/serializers/project.py +++ b/apiserver/plane/api/serializers/project.py @@ -82,6 +82,9 @@ class ProjectDetailSerializer(BaseSerializer): default_assignee = UserLiteSerializer(read_only=True) project_lead = UserLiteSerializer(read_only=True) is_favorite = serializers.BooleanField(read_only=True) + total_members = serializers.IntegerField(read_only=True) + total_cycles = serializers.IntegerField(read_only=True) + total_modules = serializers.IntegerField(read_only=True) class Meta: model = Project diff --git a/apiserver/plane/api/urls.py b/apiserver/plane/api/urls.py index a88744b4a..93af9d762 100644 --- a/apiserver/plane/api/urls.py +++ b/apiserver/plane/api/urls.py @@ -148,6 +148,13 @@ from plane.api.views import ( # Release Notes ReleaseNotesEndpoint, ## End Release Notes + # Analytics + AnalyticsEndpoint, + AnalyticViewViewset, + SavedAnalyticEndpoint, + ExportAnalyticsEndpoint, + DefaultAnalyticsEndpoint, + ## End Analytics ) @@ -308,7 +315,6 @@ urlpatterns = [ "workspaces//members//", WorkSpaceMemberViewSet.as_view( { - "put": "update", "patch": "partial_update", "delete": "destroy", "get": "retrieve", @@ -418,7 +424,6 @@ urlpatterns = [ ProjectMemberViewSet.as_view( { "get": "retrieve", - "put": "update", "patch": "partial_update", "delete": "destroy", } @@ -1285,4 +1290,38 @@ urlpatterns = [ name="release-notes", ), ## End Release Notes + # Analytics + path( + "workspaces//analytics/", + AnalyticsEndpoint.as_view(), + name="plane-analytics", + ), + path( + "workspaces//analytic-view/", + AnalyticViewViewset.as_view({"get": "list", "post": "create"}), + name="analytic-view", + ), + path( + "workspaces//analytic-view//", + AnalyticViewViewset.as_view( + {"get": "retrieve", "patch": "partial_update", "delete": "destroy"} + ), + name="analytic-view", + ), + path( + "workspaces//saved-analytic-view//", + SavedAnalyticEndpoint.as_view(), + name="saved-analytic-view", + ), + path( + "workspaces//export-analytics/", + ExportAnalyticsEndpoint.as_view(), + name="export-analytics", + ), + path( + "workspaces//default-analytics/", + DefaultAnalyticsEndpoint.as_view(), + name="default-analytics", + ), + ## End Analytics ] diff --git a/apiserver/plane/api/views/__init__.py b/apiserver/plane/api/views/__init__.py index 536fd83bf..65554f529 100644 --- a/apiserver/plane/api/views/__init__.py +++ b/apiserver/plane/api/views/__init__.py @@ -140,3 +140,11 @@ from .estimate import ( from .release import ReleaseNotesEndpoint + +from .analytic import ( + AnalyticsEndpoint, + AnalyticViewViewset, + SavedAnalyticEndpoint, + ExportAnalyticsEndpoint, + DefaultAnalyticsEndpoint, +) diff --git a/apiserver/plane/api/views/analytic.py b/apiserver/plane/api/views/analytic.py new file mode 100644 index 000000000..56ca12bae --- /dev/null +++ b/apiserver/plane/api/views/analytic.py @@ -0,0 +1,295 @@ +# Django imports +from django.db.models import ( + Count, + Sum, + F, +) +from django.db.models.functions import ExtractMonth + +# Third party imports +from rest_framework import status +from rest_framework.response import Response +from sentry_sdk import capture_exception + +# Module imports +from plane.api.views import BaseAPIView, BaseViewSet +from plane.api.permissions import WorkSpaceAdminPermission +from plane.db.models import Issue, AnalyticView, Workspace, State, Label +from plane.api.serializers import AnalyticViewSerializer +from plane.utils.analytics_plot import build_graph_plot +from plane.bgtasks.analytic_plot_export import analytic_export_task +from plane.utils.issue_filters import issue_filters + + +class AnalyticsEndpoint(BaseAPIView): + permission_classes = [ + WorkSpaceAdminPermission, + ] + + def get(self, request, slug): + try: + x_axis = request.GET.get("x_axis", False) + y_axis = request.GET.get("y_axis", False) + + if not x_axis or not y_axis: + return Response( + {"error": "x-axis and y-axis dimensions are required"}, + status=status.HTTP_400_BAD_REQUEST, + ) + + segment = request.GET.get("segment", False) + filters = issue_filters(request.GET, "GET") + + queryset = Issue.objects.filter(workspace__slug=slug, **filters) + + total_issues = queryset.count() + distribution = build_graph_plot( + queryset=queryset, x_axis=x_axis, y_axis=y_axis, segment=segment + ) + + colors = dict() + if x_axis in ["state__name", "state__group"] or segment in [ + "state__name", + "state__group", + ]: + if x_axis in ["state__name", "state__group"]: + key = "name" if x_axis == "state__name" else "group" + else: + key = "name" if segment == "state__name" else "group" + + colors = ( + State.objects.filter( + workspace__slug=slug, project_id__in=filters.get("project__in") + ).values(key, "color") + if filters.get("project__in", False) + else State.objects.filter(workspace__slug=slug).values(key, "color") + ) + + if x_axis in ["labels__name"] or segment in ["labels__name"]: + colors = ( + Label.objects.filter( + workspace__slug=slug, project_id__in=filters.get("project__in") + ).values("name", "color") + if filters.get("project__in", False) + else Label.objects.filter(workspace__slug=slug).values( + "name", "color" + ) + ) + + assignee_details = {} + if x_axis in ["assignees__email"] or segment in ["assignees__email"]: + assignee_details = ( + Issue.objects.filter(workspace__slug=slug, **filters, assignees__avatar__isnull=False) + .order_by("assignees__id") + .distinct("assignees__id") + .values("assignees__avatar", "assignees__email", "assignees__first_name", "assignees__last_name") + ) + + + return Response( + { + "total": total_issues, + "distribution": distribution, + "extras": {"colors": colors, "assignee_details": assignee_details}, + }, + status=status.HTTP_200_OK, + ) + + except Exception as e: + capture_exception(e) + return Response( + {"error": "Something went wrong please try again later"}, + status=status.HTTP_400_BAD_REQUEST, + ) + + +class AnalyticViewViewset(BaseViewSet): + permission_classes = [ + WorkSpaceAdminPermission, + ] + model = AnalyticView + serializer_class = AnalyticViewSerializer + + def perform_create(self, serializer): + workspace = Workspace.objects.get(slug=self.kwargs.get("slug")) + serializer.save(workspace_id=workspace.id) + + def get_queryset(self): + return self.filter_queryset( + super().get_queryset().filter(workspace__slug=self.kwargs.get("slug")) + ) + + +class SavedAnalyticEndpoint(BaseAPIView): + permission_classes = [ + WorkSpaceAdminPermission, + ] + + def get(self, request, slug, analytic_id): + try: + analytic_view = AnalyticView.objects.get( + pk=analytic_id, workspace__slug=slug + ) + + filter = analytic_view.query + queryset = Issue.objects.filter(**filter) + + x_axis = analytic_view.query_dict.get("x_axis", False) + y_axis = analytic_view.query_dict.get("y_axis", False) + + if not x_axis or not y_axis: + return Response( + {"error": "x-axis and y-axis dimensions are required"}, + status=status.HTTP_400_BAD_REQUEST, + ) + + segment = request.GET.get("segment", False) + distribution = build_graph_plot( + queryset=queryset, x_axis=x_axis, y_axis=y_axis, segment=segment + ) + total_issues = queryset.count() + return Response( + {"total": total_issues, "distribution": distribution}, + status=status.HTTP_200_OK, + ) + + except AnalyticView.DoesNotExist: + return Response( + {"error": "Analytic View Does not exist"}, + status=status.HTTP_400_BAD_REQUEST, + ) + except Exception as e: + capture_exception(e) + return Response( + {"error": "Something went wrong please try again later"}, + status=status.HTTP_400_BAD_REQUEST, + ) + + +class ExportAnalyticsEndpoint(BaseAPIView): + permission_classes = [ + WorkSpaceAdminPermission, + ] + + def post(self, request, slug): + try: + x_axis = request.data.get("x_axis", False) + y_axis = request.data.get("y_axis", False) + + if not x_axis or not y_axis: + return Response( + {"error": "x-axis and y-axis dimensions are required"}, + status=status.HTTP_400_BAD_REQUEST, + ) + + analytic_export_task.delay( + email=request.user.email, data=request.data, slug=slug + ) + + return Response( + { + "message": f"Once the export is ready it will be emailed to you at {str(request.user.email)}" + }, + status=status.HTTP_200_OK, + ) + except Exception as e: + capture_exception(e) + return Response( + {"error": "Something went wrong please try again later"}, + status=status.HTTP_400_BAD_REQUEST, + ) + + +class DefaultAnalyticsEndpoint(BaseAPIView): + permission_classes = [ + WorkSpaceAdminPermission, + ] + + def get(self, request, slug): + try: + filters = issue_filters(request.GET, "GET") + + queryset = Issue.objects.filter(workspace__slug=slug, **filters) + + total_issues = queryset.count() + + total_issues_classified = ( + queryset.annotate(state_group=F("state__group")) + .values("state_group") + .annotate(state_count=Count("state_group")) + .order_by("state_group") + ) + + open_issues = queryset.filter( + state__group__in=["backlog", "unstarted", "started"] + ).count() + + open_issues_classified = ( + queryset.filter(state__group__in=["backlog", "unstarted", "started"]) + .annotate(state_group=F("state__group")) + .values("state_group") + .annotate(state_count=Count("state_group")) + .order_by("state_group") + ) + + issue_completed_month_wise = ( + queryset.filter(completed_at__isnull=False) + .annotate(month=ExtractMonth("completed_at")) + .values("month") + .annotate(count=Count("*")) + .order_by("month") + ) + most_issue_created_user = ( + queryset.exclude(created_by=None) + .values("created_by__first_name", "created_by__last_name", "created_by__avatar", "created_by__email") + .annotate(count=Count("id")) + .order_by("-count") + )[:5] + + most_issue_closed_user = ( + queryset.filter(completed_at__isnull=False, assignees__isnull=False) + .values("assignees__first_name", "assignees__last_name", "assignees__avatar", "assignees__email") + .annotate(count=Count("id")) + .order_by("-count") + )[:5] + + pending_issue_user = ( + queryset.filter(completed_at__isnull=True) + .values("assignees__first_name", "assignees__last_name", "assignees__avatar", "assignees__email") + .annotate(count=Count("id")) + .order_by("-count") + ) + + open_estimate_sum = ( + queryset.filter( + state__group__in=["backlog", "unstarted", "started"] + ).aggregate(open_estimate_sum=Sum("estimate_point")) + )["open_estimate_sum"] + print(open_estimate_sum) + + total_estimate_sum = queryset.aggregate( + total_estimate_sum=Sum("estimate_point") + )["total_estimate_sum"] + + return Response( + { + "total_issues": total_issues, + "total_issues_classified": total_issues_classified, + "open_issues": open_issues, + "open_issues_classified": open_issues_classified, + "issue_completed_month_wise": issue_completed_month_wise, + "most_issue_created_user": most_issue_created_user, + "most_issue_closed_user": most_issue_closed_user, + "pending_issue_user": pending_issue_user, + "open_estimate_sum": open_estimate_sum, + "total_estimate_sum": total_estimate_sum, + }, + status=status.HTTP_200_OK, + ) + + except Exception as e: + capture_exception(e) + return Response( + {"error": "Something went wrong please try again later"}, + status=status.HTTP_400_BAD_REQUEST, + ) diff --git a/apiserver/plane/api/views/cycle.py b/apiserver/plane/api/views/cycle.py index 9265aca00..b12b49b2f 100644 --- a/apiserver/plane/api/views/cycle.py +++ b/apiserver/plane/api/views/cycle.py @@ -3,7 +3,17 @@ import json # Django imports from django.db import IntegrityError -from django.db.models import OuterRef, Func, F, Q, Exists, OuterRef, Count, Prefetch +from django.db.models import ( + OuterRef, + Func, + F, + Q, + Exists, + OuterRef, + Count, + Prefetch, + Sum, +) from django.core import serializers from django.utils import timezone from django.utils.decorators import method_decorator @@ -24,6 +34,7 @@ from plane.api.serializers import ( ) from plane.api.permissions import ProjectEntityPermission from plane.db.models import ( + User, Cycle, CycleIssue, Issue, @@ -118,6 +129,25 @@ class CycleViewSet(BaseViewSet): filter=Q(issue_cycle__issue__state__group="backlog"), ) ) + .annotate(total_estimates=Sum("issue_cycle__issue__estimate_point")) + .annotate( + completed_estimates=Sum( + "issue_cycle__issue__estimate_point", + filter=Q(issue_cycle__issue__state__group="completed"), + ) + ) + .annotate( + started_estimates=Sum( + "issue_cycle__issue__estimate_point", + filter=Q(issue_cycle__issue__state__group="started"), + ) + ) + .prefetch_related( + Prefetch( + "issue_cycle__issue__assignees", + queryset=User.objects.only("avatar", "first_name", "id").distinct(), + ) + ) .order_by("-is_favorite", "name") .distinct() ) @@ -413,7 +443,7 @@ class CycleDateCheckEndpoint(BaseAPIView): try: start_date = request.data.get("start_date", False) end_date = request.data.get("end_date", False) - + cycle_id = request.data.get("cycle_id") if not start_date or not end_date: return Response( {"error": "Start date and end date both are required"}, @@ -421,12 +451,14 @@ class CycleDateCheckEndpoint(BaseAPIView): ) cycles = Cycle.objects.filter( - Q(start_date__lte=start_date, end_date__gte=start_date) - | Q(start_date__lte=end_date, end_date__gte=end_date) - | Q(start_date__gte=start_date, end_date__lte=end_date), - workspace__slug=slug, - project_id=project_id, - ) + Q(workspace__slug=slug) + & Q(project_id=project_id) + & ( + Q(start_date__lte=start_date, end_date__gte=start_date) + | Q(start_date__lte=end_date, end_date__gte=end_date) + | Q(start_date__gte=start_date, end_date__lte=end_date) + ) + ).exclude(pk=cycle_id) if cycles.exists(): return Response( @@ -501,6 +533,27 @@ class CurrentUpcomingCyclesEndpoint(BaseAPIView): filter=Q(issue_cycle__issue__state__group="backlog"), ) ) + .annotate(total_estimates=Sum("issue_cycle__issue__estimate_point")) + .annotate( + completed_estimates=Sum( + "issue_cycle__issue__estimate_point", + filter=Q(issue_cycle__issue__state__group="completed"), + ) + ) + .annotate( + started_estimates=Sum( + "issue_cycle__issue__estimate_point", + filter=Q(issue_cycle__issue__state__group="started"), + ) + ) + .prefetch_related( + Prefetch( + "issue_cycle__issue__assignees", + queryset=User.objects.only( + "avatar", "first_name", "id" + ).distinct(), + ) + ) .order_by("name", "-is_favorite") ) @@ -545,6 +598,27 @@ class CurrentUpcomingCyclesEndpoint(BaseAPIView): filter=Q(issue_cycle__issue__state__group="backlog"), ) ) + .annotate(total_estimates=Sum("issue_cycle__issue__estimate_point")) + .annotate( + completed_estimates=Sum( + "issue_cycle__issue__estimate_point", + filter=Q(issue_cycle__issue__state__group="completed"), + ) + ) + .annotate( + started_estimates=Sum( + "issue_cycle__issue__estimate_point", + filter=Q(issue_cycle__issue__state__group="started"), + ) + ) + .prefetch_related( + Prefetch( + "issue_cycle__issue__assignees", + queryset=User.objects.only( + "avatar", "first_name", "id" + ).distinct(), + ) + ) .order_by("name", "-is_favorite") ) @@ -618,6 +692,27 @@ class CompletedCyclesEndpoint(BaseAPIView): filter=Q(issue_cycle__issue__state__group="backlog"), ) ) + .annotate(total_estimates=Sum("issue_cycle__issue__estimate_point")) + .annotate( + completed_estimates=Sum( + "issue_cycle__issue__estimate_point", + filter=Q(issue_cycle__issue__state__group="completed"), + ) + ) + .annotate( + started_estimates=Sum( + "issue_cycle__issue__estimate_point", + filter=Q(issue_cycle__issue__state__group="started"), + ) + ) + .prefetch_related( + Prefetch( + "issue_cycle__issue__assignees", + queryset=User.objects.only( + "avatar", "first_name", "id" + ).distinct(), + ) + ) .order_by("name", "-is_favorite") ) @@ -693,6 +788,27 @@ class DraftCyclesEndpoint(BaseAPIView): filter=Q(issue_cycle__issue__state__group="backlog"), ) ) + .annotate(total_estimates=Sum("issue_cycle__issue__estimate_point")) + .annotate( + completed_estimates=Sum( + "issue_cycle__issue__estimate_point", + filter=Q(issue_cycle__issue__state__group="completed"), + ) + ) + .annotate( + started_estimates=Sum( + "issue_cycle__issue__estimate_point", + filter=Q(issue_cycle__issue__state__group="started"), + ) + ) + .prefetch_related( + Prefetch( + "issue_cycle__issue__assignees", + queryset=User.objects.only( + "avatar", "first_name", "id" + ).distinct(), + ) + ) .order_by("name", "-is_favorite") ) diff --git a/apiserver/plane/api/views/estimate.py b/apiserver/plane/api/views/estimate.py index e878ccafc..68de54d7a 100644 --- a/apiserver/plane/api/views/estimate.py +++ b/apiserver/plane/api/views/estimate.py @@ -53,11 +53,11 @@ class BulkEstimatePointEndpoint(BaseViewSet): try: estimates = Estimate.objects.filter( workspace__slug=slug, project_id=project_id - ).prefetch_related("points") + ).prefetch_related("points").select_related("workspace", "project") serializer = EstimateReadSerializer(estimates, many=True) return Response(serializer.data, status=status.HTTP_200_OK) except Exception as e: - print(e) + capture_exception(e) return Response( {"error": "Something went wrong please try again later"}, status=status.HTTP_400_BAD_REQUEST, @@ -211,7 +211,7 @@ class BulkEstimatePointEndpoint(BaseViewSet): try: EstimatePoint.objects.bulk_update( - updated_estimate_points, ["value"], batch_size=10 + updated_estimate_points, ["value"], batch_size=10, ) except IntegrityError as e: return Response( diff --git a/apiserver/plane/api/views/importer.py b/apiserver/plane/api/views/importer.py index b9a7fe0c5..2e0f1cec0 100644 --- a/apiserver/plane/api/views/importer.py +++ b/apiserver/plane/api/views/importer.py @@ -363,6 +363,7 @@ class BulkImportIssuesEndpoint(BaseAPIView): start_date=issue_data.get("start_date", None), target_date=issue_data.get("target_date", None), priority=issue_data.get("priority", None), + created_by=request.user, ) ) @@ -400,7 +401,6 @@ class BulkImportIssuesEndpoint(BaseAPIView): project_id=project_id, workspace_id=project.workspace_id, created_by=request.user, - updated_by=request.user, ) for label_id in labels_list ] @@ -420,7 +420,6 @@ class BulkImportIssuesEndpoint(BaseAPIView): project_id=project_id, workspace_id=project.workspace_id, created_by=request.user, - updated_by=request.user, ) for assignee_id in assignees_list ] @@ -439,6 +438,7 @@ class BulkImportIssuesEndpoint(BaseAPIView): workspace_id=project.workspace_id, comment=f"{request.user.email} importer the issue from {service}", verb="created", + created_by=request.user, ) for issue in issues ], @@ -457,7 +457,6 @@ class BulkImportIssuesEndpoint(BaseAPIView): project_id=project_id, workspace_id=project.workspace_id, created_by=request.user, - updated_by=request.user, ) for comment in comments_list ] @@ -474,7 +473,6 @@ class BulkImportIssuesEndpoint(BaseAPIView): project_id=project_id, workspace_id=project.workspace_id, created_by=request.user, - updated_by=request.user, ) for issue, issue_data in zip(issues, issues_data) ] @@ -512,7 +510,6 @@ class BulkImportModulesEndpoint(BaseAPIView): project_id=project_id, workspace_id=project.workspace_id, created_by=request.user, - updated_by=request.user, ) for module in modules_data ], @@ -536,7 +533,6 @@ class BulkImportModulesEndpoint(BaseAPIView): project_id=project_id, workspace_id=project.workspace_id, created_by=request.user, - updated_by=request.user, ) for module, module_data in zip(modules, modules_data) ], @@ -554,7 +550,6 @@ class BulkImportModulesEndpoint(BaseAPIView): project_id=project_id, workspace_id=project.workspace_id, created_by=request.user, - updated_by=request.user, ) for issue in module_issues_list ] diff --git a/apiserver/plane/api/views/project.py b/apiserver/plane/api/views/project.py index 869bd15c9..f6c4ed87d 100644 --- a/apiserver/plane/api/views/project.py +++ b/apiserver/plane/api/views/project.py @@ -5,7 +5,7 @@ from datetime import datetime # Django imports from django.core.exceptions import ValidationError from django.db import IntegrityError -from django.db.models import Q, Exists, OuterRef +from django.db.models import Q, Exists, OuterRef, Func, F from django.core.validators import validate_email from django.conf import settings @@ -46,6 +46,8 @@ from plane.db.models import ( ProjectMemberInvite, User, ProjectIdentifier, + Cycle, + Module, ) from plane.bgtasks.project_invitation_task import project_invitation @@ -92,6 +94,26 @@ class ProjectViewSet(BaseViewSet): self.get_queryset() .annotate(is_favorite=Exists(subquery)) .order_by("-is_favorite", "name") + .annotate( + total_members=ProjectMember.objects.filter( + project_id=OuterRef("id") + ) + .order_by() + .annotate(count=Func(F("id"), function="Count")) + .values("count") + ) + .annotate( + total_cycles=Cycle.objects.filter(project_id=OuterRef("id")) + .order_by() + .annotate(count=Func(F("id"), function="Count")) + .values("count") + ) + .annotate( + total_modules=Module.objects.filter(project_id=OuterRef("id")) + .order_by() + .annotate(count=Func(F("id"), function="Count")) + .values("count") + ) ) return Response(ProjectDetailSerializer(projects, many=True).data) except Exception as e: @@ -161,6 +183,7 @@ class ProjectViewSet(BaseViewSet): workspace=serializer.instance.workspace, group=state["group"], default=state.get("default", False), + created_by=request.user, ) for state in states ] @@ -344,6 +367,7 @@ class UserProjectInvitationsViewset(BaseViewSet): workspace=invitation.project.workspace, member=request.user, role=invitation.role, + created_by=request.user, ) for invitation in project_invitations ] @@ -385,6 +409,41 @@ class ProjectMemberViewSet(BaseViewSet): .select_related("workspace", "workspace__owner") ) + def partial_update(self, request, slug, project_id, pk): + try: + project_member = ProjectMember.objects.get(pk=pk, workspace__slug=slug, project_id=project_id) + if request.user.id == project_member.member_id: + return Response( + {"error": "You cannot update your own role"}, + status=status.HTTP_400_BAD_REQUEST, + ) + + if request.data.get("role", 10) > project_member.role: + return Response( + { + "error": "You cannot update a role that is higher than your own role" + }, + status=status.HTTP_400_BAD_REQUEST, + ) + + serializer = ProjectMemberSerializer( + project_member, data=request.data, partial=True + ) + + if serializer.is_valid(): + serializer.save() + return Response(serializer.data, status=status.HTTP_200_OK) + return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) + except ProjectMember.DoesNotExist: + return Response( + {"error": "Project Member does not exist"}, + status=status.HTTP_400_BAD_REQUEST, + ) + except Exception as e: + capture_exception(e) + return Response({"error": "Something went wrong please try again later"}, status=status.HTTP_400_BAD_REQUEST) + + class AddMemberToProjectEndpoint(BaseAPIView): permission_classes = [ @@ -465,6 +524,7 @@ class AddTeamToProjectEndpoint(BaseAPIView): project_id=project_id, member_id=member, workspace=workspace, + created_by=request.user, ) ) @@ -612,6 +672,7 @@ class ProjectJoinEndpoint(BaseAPIView): if workspace_role >= 15 else (15 if workspace_role == 10 else workspace_role), workspace=workspace, + created_by=request.user, ) for project_id in project_ids ], diff --git a/apiserver/plane/api/views/view.py b/apiserver/plane/api/views/view.py index b4e300dcb..1b6fb42cc 100644 --- a/apiserver/plane/api/views/view.py +++ b/apiserver/plane/api/views/view.py @@ -18,10 +18,6 @@ from plane.api.permissions import ProjectEntityPermission from plane.db.models import ( IssueView, Issue, - IssueBlocker, - IssueLink, - CycleIssue, - ModuleIssue, IssueViewFavorite, ) from plane.utils.issue_filters import issue_filters diff --git a/apiserver/plane/api/views/workspace.py b/apiserver/plane/api/views/workspace.py index 8a2791e3b..dcb8941a1 100644 --- a/apiserver/plane/api/views/workspace.py +++ b/apiserver/plane/api/views/workspace.py @@ -223,6 +223,7 @@ class InviteWorkspaceEndpoint(BaseAPIView): algorithm="HS256", ), role=email.get("role", 10), + created_by=request.user, ) ) except ValidationError: @@ -381,6 +382,7 @@ class UserWorkspaceInvitationsEndpoint(BaseViewSet): workspace=invitation.workspace, member=request.user, role=invitation.role, + created_by=request.user, ) for invitation in workspace_invitations ], @@ -421,6 +423,43 @@ class WorkSpaceMemberViewSet(BaseViewSet): .select_related("member") ) + def partial_update(self, request, slug, pk): + try: + workspace_member = WorkspaceMember.objects.get(pk=pk, workspace__slug=slug) + if request.user.id == workspace_member.member_id: + return Response( + {"error": "You cannot update your own role"}, + status=status.HTTP_400_BAD_REQUEST, + ) + + if request.data.get("role", 10) > workspace_member.role: + return Response( + { + "error": "You cannot update a role that is higher than your own role" + }, + status=status.HTTP_400_BAD_REQUEST, + ) + + serializer = WorkSpaceMemberSerializer( + workspace_member, data=request.data, partial=True + ) + + if serializer.is_valid(): + serializer.save() + return Response(serializer.data, status=status.HTTP_200_OK) + return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) + except WorkspaceMember.DoesNotExist: + return Response( + {"error": "Workspace Member does not exist"}, + status=status.HTTP_400_BAD_REQUEST, + ) + except Exception as e: + capture_exception(e) + return Response( + {"error": "Something went wrong please try again later"}, + status=status.HTTP_400_BAD_REQUEST, + ) + class TeamMemberViewSet(BaseViewSet): serializer_class = TeamSerializer @@ -783,4 +822,3 @@ class WorkspaceThemeViewSet(BaseViewSet): {"error": "Something went wrong please try again later"}, status=status.HTTP_400_BAD_REQUEST, ) - diff --git a/apiserver/plane/bgtasks/analytic_plot_export.py b/apiserver/plane/bgtasks/analytic_plot_export.py new file mode 100644 index 000000000..7f276be82 --- /dev/null +++ b/apiserver/plane/bgtasks/analytic_plot_export.py @@ -0,0 +1,174 @@ +# Python imports +import csv +import io + +# Django imports +from django.core.mail import EmailMultiAlternatives +from django.template.loader import render_to_string +from django.utils.html import strip_tags +from django.conf import settings + +# Third party imports +from celery import shared_task +from sentry_sdk import capture_exception + +# Module imports +from plane.db.models import Issue +from plane.utils.analytics_plot import build_graph_plot +from plane.utils.issue_filters import issue_filters + +row_mapping = { + "state__name": "State", + "state__group": "State Group", + "labels__name": "Label", + "assignees__email": "Assignee Name", + "start_date": "Start Date", + "target_date": "Due Date", + "completed_at": "Completed At", + "created_at": "Created At", + "issue_count": "Issue Count", + "priority": "Priority", + "estimate": "Estimate", +} + + +@shared_task +def analytic_export_task(email, data, slug): + try: + filters = issue_filters(data, "POST") + queryset = Issue.objects.filter(**filters, workspace__slug=slug) + + x_axis = data.get("x_axis", False) + y_axis = data.get("y_axis", False) + segment = data.get("segment", False) + + distribution = build_graph_plot( + queryset=queryset, x_axis=x_axis, y_axis=y_axis, segment=segment + ) + + key = "count" if y_axis == "issue_count" else "estimate" + + segmented = segment + + assignee_details = {} + if x_axis in ["assignees__email"] or segment in ["assignees__email"]: + assignee_details = ( + Issue.objects.filter(workspace__slug=slug, **filters, assignees__avatar__isnull=False) + .order_by("assignees__id") + .distinct("assignees__id") + .values("assignees__avatar", "assignees__email", "assignees__first_name", "assignees__last_name") + ) + + if segment: + segment_zero = [] + for item in distribution: + current_dict = distribution.get(item) + for current in current_dict: + segment_zero.append(current.get("segment")) + + segment_zero = list(set(segment_zero)) + row_zero = ( + [ + row_mapping.get(x_axis, "X-Axis"), + ] + + [ + row_mapping.get(y_axis, "Y-Axis"), + ] + + segment_zero + ) + rows = [] + for item in distribution: + generated_row = [ + item, + ] + + data = distribution.get(item) + # Add y axis values + generated_row.append(sum(obj.get(key) for obj in data if obj.get(key, None) is not None)) + + for segment in segment_zero: + value = [x for x in data if x.get("segment") == segment] + if len(value): + generated_row.append(value[0].get(key)) + else: + generated_row.append("0") + # x-axis replacement for names + if x_axis in ["assignees__email"]: + assignee = [user for user in assignee_details if str(user.get("assignees__email")) == str(item)] + if len(assignee): + generated_row[0] = str(assignee[0].get("assignees__first_name")) + " " + str(assignee[0].get("assignees__last_name")) + rows.append(tuple(generated_row)) + + # If segment is ["assignees__email"] then replace segment_zero rows with first and last names + if segmented in ["assignees__email"]: + for index, segm in enumerate(row_zero[2:]): + # find the name of the user + assignee = [user for user in assignee_details if str(user.get("assignees__email")) == str(segm)] + if len(assignee): + row_zero[index] = str(assignee[0].get("assignees__first_name")) + " " + str(assignee[0].get("assignees__last_name")) + + rows = [tuple(row_zero)] + rows + csv_buffer = io.StringIO() + writer = csv.writer(csv_buffer, delimiter=",", quoting=csv.QUOTE_ALL) + + # Write CSV data to the buffer + for row in rows: + writer.writerow(row) + + subject = "Your Export is ready" + + html_content = render_to_string("emails/exports/analytics.html", {}) + + text_content = strip_tags(html_content) + csv_buffer.seek(0) + msg = EmailMultiAlternatives( + subject, text_content, settings.EMAIL_FROM, [email] + ) + msg.attach(f"{slug}-analytics.csv", csv_buffer.read()) + msg.send(fail_silently=False) + + else: + row_zero = [ + row_mapping.get(x_axis, "X-Axis"), + row_mapping.get(y_axis, "Y-Axis"), + ] + rows = [] + for item in distribution: + row = [ + item, + distribution.get(item)[0].get("count") + if y_axis == "issue_count" + else distribution.get(item)[0].get("estimate "), + ] + # x-axis replacement to names + if x_axis in ["assignees__email"]: + assignee = [user for user in assignee_details if str(user.get("assignees__email")) == str(item)] + if len(assignee): + row[0] = str(assignee[0].get("assignees__first_name")) + " " + str(assignee[0].get("assignees__last_name")) + + rows.append(tuple(row)) + rows = [tuple(row_zero)] + rows + csv_buffer = io.StringIO() + writer = csv.writer(csv_buffer, delimiter=",", quoting=csv.QUOTE_ALL) + + # Write CSV data to the buffer + for row in rows: + writer.writerow(row) + + subject = "Your Export is ready" + + html_content = render_to_string("emails/exports/analytics.html", {}) + + text_content = strip_tags(html_content) + + csv_buffer.seek(0) + msg = EmailMultiAlternatives( + subject, text_content, settings.EMAIL_FROM, [email] + ) + msg.attach(f"{slug}-analytics.csv", csv_buffer.read()) + msg.send(fail_silently=False) + + except Exception as e: + print(e) + capture_exception(e) + return diff --git a/apiserver/plane/bgtasks/importer_task.py b/apiserver/plane/bgtasks/importer_task.py index 291b71be3..85ac1c89b 100644 --- a/apiserver/plane/bgtasks/importer_task.py +++ b/apiserver/plane/bgtasks/importer_task.py @@ -27,7 +27,7 @@ from plane.db.models import ( User, ) from .workspace_invitation_task import workspace_invitation -from plane.bgtasks.user_welcome_task import send_welcome_email +from plane.bgtasks.user_welcome_task import send_welcome_slack @shared_task @@ -58,7 +58,7 @@ def service_importer(service, importer_id): ) [ - send_welcome_email.delay( + send_welcome_slack.delay( str(user.id), True, f"{user.email} was imported to Plane from {service}", @@ -78,7 +78,11 @@ def service_importer(service, importer_id): # Add new users to Workspace and project automatically WorkspaceMember.objects.bulk_create( [ - WorkspaceMember(member=user, workspace_id=importer.workspace_id) + WorkspaceMember( + member=user, + workspace_id=importer.workspace_id, + created_by=importer.created_by, + ) for user in workspace_users ], batch_size=100, @@ -91,6 +95,7 @@ def service_importer(service, importer_id): project_id=importer.project_id, workspace_id=importer.workspace_id, member=user, + created_by=importer.created_by, ) for user in workspace_users ], diff --git a/apiserver/plane/bgtasks/issue_activites_task.py b/apiserver/plane/bgtasks/issue_activites_task.py index c749d9c15..417fe2324 100644 --- a/apiserver/plane/bgtasks/issue_activites_task.py +++ b/apiserver/plane/bgtasks/issue_activites_task.py @@ -136,7 +136,6 @@ def track_priority( comment=f"{actor.email} updated the priority to {requested_data.get('priority')}", ) ) - print(issue_activities) # Track chnages in state of the issue diff --git a/apiserver/plane/bgtasks/user_welcome_task.py b/apiserver/plane/bgtasks/user_welcome_task.py index c042d0a0b..bea2ee33d 100644 --- a/apiserver/plane/bgtasks/user_welcome_task.py +++ b/apiserver/plane/bgtasks/user_welcome_task.py @@ -1,8 +1,5 @@ # Django imports from django.conf import settings -from django.core.mail import EmailMultiAlternatives -from django.template.loader import render_to_string -from django.utils.html import strip_tags # Third party imports from celery import shared_task @@ -15,31 +12,11 @@ from plane.db.models import User @shared_task -def send_welcome_email(user_id, created, message): +def send_welcome_slack(user_id, created, message): try: instance = User.objects.get(pk=user_id) if created and not instance.is_bot: - first_name = instance.first_name.capitalize() - to_email = instance.email - from_email_string = settings.EMAIL_FROM - - subject = f"Welcome to Plane ✈️!" - - context = {"first_name": first_name, "email": instance.email} - - html_content = render_to_string( - "emails/auth/user_welcome_email.html", context - ) - - text_content = strip_tags(html_content) - - msg = EmailMultiAlternatives( - subject, text_content, from_email_string, [to_email] - ) - msg.attach_alternative(html_content, "text/html") - msg.send() - # Send message on slack as well if settings.SLACK_BOT_TOKEN: client = WebClient(token=settings.SLACK_BOT_TOKEN) diff --git a/apiserver/plane/db/migrations/0030_alter_estimatepoint_unique_together.py b/apiserver/plane/db/migrations/0030_alter_estimatepoint_unique_together.py new file mode 100644 index 000000000..bfc1da530 --- /dev/null +++ b/apiserver/plane/db/migrations/0030_alter_estimatepoint_unique_together.py @@ -0,0 +1,17 @@ +# Generated by Django 3.2.18 on 2023-05-05 14:17 + +from django.db import migrations + + +class Migration(migrations.Migration): + + dependencies = [ + ('db', '0029_auto_20230502_0126'), + ] + + operations = [ + migrations.AlterUniqueTogether( + name='estimatepoint', + unique_together=set(), + ), + ] diff --git a/apiserver/plane/db/migrations/0031_analyticview.py b/apiserver/plane/db/migrations/0031_analyticview.py new file mode 100644 index 000000000..7e02b78b2 --- /dev/null +++ b/apiserver/plane/db/migrations/0031_analyticview.py @@ -0,0 +1,37 @@ +# Generated by Django 3.2.18 on 2023-05-12 11:31 + +from django.conf import settings +from django.db import migrations, models +import django.db.models.deletion +import uuid + + +class Migration(migrations.Migration): + + dependencies = [ + ('db', '0030_alter_estimatepoint_unique_together'), + ] + + operations = [ + migrations.CreateModel( + name='AnalyticView', + fields=[ + ('created_at', models.DateTimeField(auto_now_add=True, verbose_name='Created At')), + ('updated_at', models.DateTimeField(auto_now=True, verbose_name='Last Modified At')), + ('id', models.UUIDField(db_index=True, default=uuid.uuid4, editable=False, primary_key=True, serialize=False, unique=True)), + ('name', models.CharField(max_length=255)), + ('description', models.TextField(blank=True)), + ('query', models.JSONField()), + ('query_dict', models.JSONField(default=dict)), + ('created_by', models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='analyticview_created_by', to=settings.AUTH_USER_MODEL, verbose_name='Created By')), + ('updated_by', models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='analyticview_updated_by', to=settings.AUTH_USER_MODEL, verbose_name='Last Modified By')), + ('workspace', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='analytics', to='db.workspace')), + ], + options={ + 'verbose_name': 'Analytic', + 'verbose_name_plural': 'Analytics', + 'db_table': 'analytic_views', + 'ordering': ('-created_at',), + }, + ), + ] diff --git a/apiserver/plane/db/migrations/0032_auto_20230520_2015.py b/apiserver/plane/db/migrations/0032_auto_20230520_2015.py new file mode 100644 index 000000000..27c13537e --- /dev/null +++ b/apiserver/plane/db/migrations/0032_auto_20230520_2015.py @@ -0,0 +1,23 @@ +# Generated by Django 3.2.19 on 2023-05-20 14:45 + +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ('db', '0031_analyticview'), + ] + + operations = [ + migrations.RenameField( + model_name='project', + old_name='icon', + new_name='emoji', + ), + migrations.AddField( + model_name='project', + name='icon_prop', + field=models.JSONField(null=True), + ), + ] diff --git a/apiserver/plane/db/models/__init__.py b/apiserver/plane/db/models/__init__.py index e32d768e0..53b501716 100644 --- a/apiserver/plane/db/models/__init__.py +++ b/apiserver/plane/db/models/__init__.py @@ -67,3 +67,5 @@ from .importer import Importer from .page import Page, PageBlock, PageFavorite, PageLabel from .estimate import Estimate, EstimatePoint + +from .analytic import AnalyticView \ No newline at end of file diff --git a/apiserver/plane/db/models/analytic.py b/apiserver/plane/db/models/analytic.py new file mode 100644 index 000000000..d097051af --- /dev/null +++ b/apiserver/plane/db/models/analytic.py @@ -0,0 +1,25 @@ +# Django models +from django.db import models +from django.conf import settings + +from .base import BaseModel + + +class AnalyticView(BaseModel): + workspace = models.ForeignKey( + "db.Workspace", related_name="analytics", on_delete=models.CASCADE + ) + name = models.CharField(max_length=255) + description = models.TextField(blank=True) + query = models.JSONField() + query_dict = models.JSONField(default=dict) + + class Meta: + verbose_name = "Analytic" + verbose_name_plural = "Analytics" + db_table = "analytic_views" + ordering = ("-created_at",) + + def __str__(self): + """Return name of the analytic view""" + return f"{self.name} <{self.workspace.name}>" diff --git a/apiserver/plane/db/models/estimate.py b/apiserver/plane/db/models/estimate.py index f163a1407..d95a86316 100644 --- a/apiserver/plane/db/models/estimate.py +++ b/apiserver/plane/db/models/estimate.py @@ -39,7 +39,6 @@ class EstimatePoint(ProjectBaseModel): return f"{self.estimate.name} <{self.key}> <{self.value}>" class Meta: - unique_together = ["value", "estimate"] verbose_name = "Estimate Point" verbose_name_plural = "Estimate Points" db_table = "estimate_points" diff --git a/apiserver/plane/db/models/issue.py b/apiserver/plane/db/models/issue.py index fed946a61..6e264566d 100644 --- a/apiserver/plane/db/models/issue.py +++ b/apiserver/plane/db/models/issue.py @@ -85,8 +85,13 @@ class Issue(ProjectBaseModel): ).first() # if there is no default state assign any random state if default_state is None: - self.state = State.objects.filter(project=self.project).first() + random_state = State.objects.filter(project=self.project).first() + self.state = random_state + if random_state.group == "started": + self.start_date = timezone.now().date() else: + if default_state.group == "started": + self.start_date = timezone.now().date() self.state = default_state except ImportError: pass @@ -94,18 +99,15 @@ class Issue(ProjectBaseModel): try: from plane.db.models import State, PageBlock - # Get the completed states of the project - completed_states = State.objects.filter( - group="completed", project=self.project - ).values_list("pk", flat=True) # Check if the current issue state and completed state id are same - if self.state.id in completed_states: + if self.state.group == "completed": self.completed_at = timezone.now() # check if there are any page blocks PageBlock.objects.filter(issue_id=self.id).filter().update( completed_at=timezone.now() ) - + elif self.state.group == "started": + self.start_date = timezone.now().date() else: PageBlock.objects.filter(issue_id=self.id).filter().update( completed_at=None @@ -116,7 +118,6 @@ class Issue(ProjectBaseModel): pass if self._state.adding: # Get the maximum display_id value from the database - last_id = IssueSequence.objects.filter(project=self.project).aggregate( largest=models.Max("sequence") )["largest"] @@ -131,6 +132,9 @@ class Issue(ProjectBaseModel): if largest_sort_order is not None: self.sort_order = largest_sort_order + 10000 + # If adding it to started state + if self.state.group == "started": + self.start_date = timezone.now().date() # Strip the html tags using html parser self.description_stripped = ( None diff --git a/apiserver/plane/db/models/project.py b/apiserver/plane/db/models/project.py index 04435cadf..41b1ac654 100644 --- a/apiserver/plane/db/models/project.py +++ b/apiserver/plane/db/models/project.py @@ -63,7 +63,8 @@ class Project(BaseModel): null=True, blank=True, ) - icon = models.CharField(max_length=255, null=True, blank=True) + emoji = models.CharField(max_length=255, null=True, blank=True) + icon_prop = models.JSONField(null=True) module_view = models.BooleanField(default=True) cycle_view = models.BooleanField(default=True) issue_views_view = models.BooleanField(default=True) diff --git a/apiserver/plane/db/models/user.py b/apiserver/plane/db/models/user.py index 5a4f487c1..b0ab72159 100644 --- a/apiserver/plane/db/models/user.py +++ b/apiserver/plane/db/models/user.py @@ -104,29 +104,9 @@ class User(AbstractBaseUser, PermissionsMixin): @receiver(post_save, sender=User) -def send_welcome_email(sender, instance, created, **kwargs): +def send_welcome_slack(sender, instance, created, **kwargs): try: if created and not instance.is_bot: - first_name = instance.first_name.capitalize() - to_email = instance.email - from_email_string = settings.EMAIL_FROM - - subject = f"Welcome to Plane ✈️!" - - context = {"first_name": first_name, "email": instance.email} - - html_content = render_to_string( - "emails/auth/user_welcome_email.html", context - ) - - text_content = strip_tags(html_content) - - msg = EmailMultiAlternatives( - subject, text_content, from_email_string, [to_email] - ) - msg.attach_alternative(html_content, "text/html") - msg.send() - # Send message on slack as well if settings.SLACK_BOT_TOKEN: client = WebClient(token=settings.SLACK_BOT_TOKEN) diff --git a/apiserver/plane/utils/analytics_plot.py b/apiserver/plane/utils/analytics_plot.py new file mode 100644 index 000000000..161f6497e --- /dev/null +++ b/apiserver/plane/utils/analytics_plot.py @@ -0,0 +1,76 @@ +# Python imports +from itertools import groupby + +# Django import +from django.db import models +from django.db.models import Count, F, Sum, Value, Case, When, CharField +from django.db.models.functions import Coalesce, ExtractMonth, ExtractYear, Concat + + +def build_graph_plot(queryset, x_axis, y_axis, segment=None): + + temp_axis = x_axis + + if x_axis in ["created_at", "start_date", "target_date", "completed_at"]: + year = ExtractYear(x_axis) + month = ExtractMonth(x_axis) + dimension = Concat(year, Value("-"), month, output_field=CharField()) + queryset = queryset.annotate(dimension=dimension) + x_axis = "dimension" + else: + queryset = queryset.annotate(dimension=F(x_axis)) + x_axis = "dimension" + + if x_axis in ["created_at", "start_date", "target_date", "completed_at"]: + queryset = queryset.exclude(x_axis__is_null=True) + + if segment in ["created_at", "start_date", "target_date", "completed_at"]: + year = ExtractYear(segment) + month = ExtractMonth(segment) + dimension = Concat(year, Value("-"), month, output_field=CharField()) + queryset = queryset.annotate(segmented=dimension) + segment = "segmented" + + queryset = queryset.values(x_axis) + + # Group queryset by x_axis field + + if y_axis == "issue_count": + queryset = queryset.annotate( + is_null=Case( + When(dimension__isnull=True, then=Value("None")), + default=Value("not_null"), + output_field=models.CharField(max_length=8), + ), + dimension_ex=Coalesce("dimension", Value("null")), + ).values("dimension") + if segment: + queryset = queryset.annotate(segment=F(segment)).values( + "dimension", "segment" + ) + else: + queryset = queryset.values("dimension") + + queryset = queryset.annotate(count=Count("*")).order_by("dimension") + + if y_axis == "estimate": + queryset = queryset.annotate(estimate=Sum("estimate_point")).order_by(x_axis) + if segment: + queryset = queryset.annotate(segment=F(segment)).values( + "dimension", "segment", "estimate" + ) + else: + queryset = queryset.values("dimension", "estimate") + + result_values = list(queryset) + grouped_data = {} + for key, items in groupby(result_values, key=lambda x: x[str("dimension")]): + grouped_data[str(key)] = list(items) + + sorted_data = grouped_data + if temp_axis == "priority": + order = ["low", "medium", "high", "urgent", "None"] + sorted_data = {key: grouped_data[key] for key in order if key in grouped_data} + else: + sorted_data = dict(sorted(grouped_data.items(), key=lambda x: (x[0] == "None", x[0]))) + return sorted_data diff --git a/apiserver/plane/utils/issue_filters.py b/apiserver/plane/utils/issue_filters.py index 8b62da722..944906f92 100644 --- a/apiserver/plane/utils/issue_filters.py +++ b/apiserver/plane/utils/issue_filters.py @@ -198,6 +198,39 @@ def filter_issue_state_type(params, filter, method): return filter +def filter_project(params, filter, method): + if method == "GET": + projects = params.get("project").split(",") + if len(projects) and "" not in projects: + filter["project__in"] = projects + else: + if params.get("project", None) and len(params.get("project")): + filter["project__in"] = params.get("project") + return filter + + +def filter_cycle(params, filter, method): + if method == "GET": + cycles = params.get("cycle").split(",") + if len(cycles) and "" not in cycles: + filter["issue_cycle__cycle_id__in"] = cycles + else: + if params.get("cycle", None) and len(params.get("cycle")): + filter["issue_cycle__cycle_id__in"] = params.get("cycle") + return filter + + +def filter_module(params, filter, method): + if method == "GET": + modules = params.get("module").split(",") + if len(modules) and "" not in modules: + filter["issue_module__module_id__in"] = modules + else: + if params.get("module", None) and len(params.get("module")): + filter["issue_module__module_id__in"] = params.get("module") + return filter + + def issue_filters(query_params, method): filter = dict() @@ -216,6 +249,9 @@ def issue_filters(query_params, method): "target_date": filter_target_date, "completed_at": filter_completed_at, "type": filter_issue_state_type, + "project": filter_project, + "cycle": filter_cycle, + "module": filter_module, } for key, value in ISSUE_FILTER.items(): diff --git a/apiserver/requirements/base.txt b/apiserver/requirements/base.txt index e3e58450c..2bc109968 100644 --- a/apiserver/requirements/base.txt +++ b/apiserver/requirements/base.txt @@ -1,6 +1,6 @@ # base requirements -Django==3.2.18 +Django==3.2.19 django-braces==1.15.0 django-taggit==3.1.0 psycopg2==2.9.5 diff --git a/apiserver/templates/emails/auth/user_welcome_email.html b/apiserver/templates/emails/auth/user_welcome_email.html deleted file mode 100644 index af4e60d99..000000000 --- a/apiserver/templates/emails/auth/user_welcome_email.html +++ /dev/null @@ -1,481 +0,0 @@ - - - - - - - - - Welcome to Plane ✈️! - - - - - - - - - - - - - - - - - - \ No newline at end of file diff --git a/apiserver/templates/emails/exports/analytics.html b/apiserver/templates/emails/exports/analytics.html new file mode 100644 index 000000000..f5611705e --- /dev/null +++ b/apiserver/templates/emails/exports/analytics.html @@ -0,0 +1,8 @@ + + + Hey there,
+ Your requested data export from Plane Analytics is now ready. The information has been compiled into a CSV format for your convenience.
+ Please find the attachment and download the CSV file. This file can easily be imported into any spreadsheet program for further analysis.
+ If you require any assistance or have any questions, please do not hesitate to contact us.
+ Thank you + diff --git a/apps/app/Dockerfile.web b/apps/app/Dockerfile.web index 11bf98bd4..0b3e45f7a 100644 --- a/apps/app/Dockerfile.web +++ b/apps/app/Dockerfile.web @@ -3,6 +3,7 @@ RUN apk add --no-cache libc6-compat RUN apk update # Set working directory WORKDIR /app +ENV NEXT_PUBLIC_API_BASE_URL=http://NEXT_PUBLIC_API_BASE_URL_PLACEHOLDER RUN yarn global add turbo COPY . . @@ -12,10 +13,10 @@ RUN turbo prune --scope=app --docker # Add lockfile and package.json's of isolated subworkspace FROM node:18-alpine AS installer - RUN apk add --no-cache libc6-compat RUN apk update WORKDIR /app +ARG NEXT_PUBLIC_API_BASE_URL=http://localhost:8000 # First install the dependencies (as they change less often) COPY .gitignore .gitignore @@ -26,9 +27,17 @@ RUN yarn install # Build the project COPY --from=builder /app/out/full/ . COPY turbo.json turbo.json +COPY replace-env-vars.sh /usr/local/bin/ +USER root +RUN chmod +x /usr/local/bin/replace-env-vars.sh RUN yarn turbo run build --filter=app +ENV NEXT_PUBLIC_API_BASE_URL=$NEXT_PUBLIC_API_BASE_URL \ + BUILT_NEXT_PUBLIC_API_BASE_URL=$NEXT_PUBLIC_API_BASE_URL + +RUN /usr/local/bin/replace-env-vars.sh http://NEXT_PUBLIC_WEBAPP_URL_PLACEHOLDER ${NEXT_PUBLIC_API_BASE_URL} + FROM node:18-alpine AS runner WORKDIR /app @@ -43,8 +52,20 @@ COPY --from=installer /app/apps/app/package.json . # Automatically leverage output traces to reduce image size # https://nextjs.org/docs/advanced-features/output-file-tracing COPY --from=installer --chown=captain:plane /app/apps/app/.next/standalone ./ -# COPY --from=installer --chown=captain:plane /app/apps/app/.next/standalone/node_modules ./apps/app/node_modules -COPY --from=installer --chown=captain:plane /app/apps/app/.next/static ./apps/app/.next/static + +COPY --from=installer --chown=captain:plane /app/apps/app/.next ./apps/app/.next + +ARG NEXT_PUBLIC_API_BASE_URL=http://localhost:8000 +ENV NEXT_PUBLIC_API_BASE_URL=$NEXT_PUBLIC_API_BASE_URL \ + BUILT_NEXT_PUBLIC_API_BASE_URL=$NEXT_PUBLIC_API_BASE_URL + +USER root +COPY replace-env-vars.sh /usr/local/bin/ +COPY start.sh /usr/local/bin/ +RUN chmod +x /usr/local/bin/replace-env-vars.sh +RUN chmod +x /usr/local/bin/start.sh + +USER captain ENV NEXT_TELEMETRY_DISABLED 1 diff --git a/apps/app/components/analytics/custom-analytics/create-update-analytics-modal.tsx b/apps/app/components/analytics/custom-analytics/create-update-analytics-modal.tsx new file mode 100644 index 000000000..afaaf1b98 --- /dev/null +++ b/apps/app/components/analytics/custom-analytics/create-update-analytics-modal.tsx @@ -0,0 +1,158 @@ +import React from "react"; + +import { useRouter } from "next/router"; + +// react-hook-form +import { useForm } from "react-hook-form"; +// headless ui +import { Dialog, Transition } from "@headlessui/react"; +// services +import analyticsService from "services/analytics.service"; +// hooks +import useToast from "hooks/use-toast"; +// ui +import { Input, PrimaryButton, SecondaryButton, TextArea } from "components/ui"; +// types +import { IAnalyticsParams, ISaveAnalyticsFormData } from "types"; + +// types +type Props = { + isOpen: boolean; + handleClose: () => void; + params?: IAnalyticsParams; +}; + +type FormValues = { + name: string; + description: string; +}; + +const defaultValues: FormValues = { + name: "", + description: "", +}; + +export const CreateUpdateAnalyticsModal: React.FC = ({ isOpen, handleClose, params }) => { + const router = useRouter(); + const { workspaceSlug } = router.query; + + const { setToastAlert } = useToast(); + + const { + register, + formState: { errors, isSubmitting }, + handleSubmit, + reset, + } = useForm({ + defaultValues, + }); + + const onClose = () => { + handleClose(); + reset(defaultValues); + }; + + const onSubmit = async (formData: FormValues) => { + if (!workspaceSlug) return; + + const payload: ISaveAnalyticsFormData = { + name: formData.name, + description: formData.description, + query_dict: { + x_axis: "priority", + y_axis: "issue_count", + ...params, + project: params?.project ?? [], + }, + }; + + await analyticsService + .saveAnalytics(workspaceSlug.toString(), payload) + .then(() => { + setToastAlert({ + type: "success", + title: "Success!", + message: "Analytics saved successfully.", + }); + onClose(); + }) + .catch(() => + setToastAlert({ + type: "error", + title: "Error!", + message: "Analytics could not be saved. Please try again.", + }) + ); + }; + + return ( + + + +
+ + +
+
+ + +
+
+ + Save Analytics + +
+ +