diff --git a/apiserver/Procfile b/apiserver/Procfile index 30d734913..694c49df4 100644 --- a/apiserver/Procfile +++ b/apiserver/Procfile @@ -1,2 +1,3 @@ web: gunicorn -w 4 -k uvicorn.workers.UvicornWorker plane.asgi:application --bind 0.0.0.0:$PORT --config gunicorn.config.py --max-requests 10000 --max-requests-jitter 1000 --access-logfile - -worker: celery -A plane worker -l info \ No newline at end of file +worker: celery -A plane worker -l info +beat: celery -A plane beat -l INFO \ No newline at end of file diff --git a/apiserver/plane/api/urls.py b/apiserver/plane/api/urls.py index 34e711be6..1958f5c18 100644 --- a/apiserver/plane/api/urls.py +++ b/apiserver/plane/api/urls.py @@ -76,6 +76,7 @@ from plane.api.views import ( IssueLinkViewSet, BulkCreateIssueLabelsEndpoint, IssueAttachmentEndpoint, + IssueArchiveViewSet, IssueSubscriberViewSet, ## End Issues # States @@ -853,6 +854,36 @@ urlpatterns = [ name="project-issue-roadmap", ), ## IssueProperty Ebd + ## Issue Archives + path( + "workspaces//projects//archived-issues/", + IssueArchiveViewSet.as_view( + { + "get": "list", + } + ), + name="project-issue-archive", + ), + path( + "workspaces//projects//archived-issues//", + IssueArchiveViewSet.as_view( + { + "get": "retrieve", + "delete": "destroy", + } + ), + name="project-issue-archive", + ), + path( + "workspaces//projects//unarchive//", + IssueArchiveViewSet.as_view( + { + "post": "unarchive", + } + ), + name="project-issue-archive", + ), + ## End Issue Archives ## File Assets path( "workspaces//file-assets/", diff --git a/apiserver/plane/api/views/__init__.py b/apiserver/plane/api/views/__init__.py index 327dd6037..9eba0868a 100644 --- a/apiserver/plane/api/views/__init__.py +++ b/apiserver/plane/api/views/__init__.py @@ -65,6 +65,7 @@ from .issue import ( IssueLinkViewSet, BulkCreateIssueLabelsEndpoint, IssueAttachmentEndpoint, + IssueArchiveViewSet, IssueSubscriberViewSet, ) diff --git a/apiserver/plane/api/views/issue.py b/apiserver/plane/api/views/issue.py index d96441c75..415e7e2fa 100644 --- a/apiserver/plane/api/views/issue.py +++ b/apiserver/plane/api/views/issue.py @@ -914,6 +914,197 @@ class IssueAttachmentEndpoint(BaseAPIView): ) +class IssueArchiveViewSet(BaseViewSet): + permission_classes = [ + ProjectEntityPermission, + ] + serializer_class = IssueFlatSerializer + model = Issue + + def get_queryset(self): + return ( + Issue.objects.annotate( + sub_issues_count=Issue.objects.filter(parent=OuterRef("id")) + .order_by() + .annotate(count=Func(F("id"), function="Count")) + .values("count") + ) + .filter(archived_at__isnull=False) + .filter(project_id=self.kwargs.get("project_id")) + .filter(workspace__slug=self.kwargs.get("slug")) + .select_related("project") + .select_related("workspace") + .select_related("state") + .select_related("parent") + .prefetch_related("assignees") + .prefetch_related("labels") + ) + + @method_decorator(gzip_page) + def list(self, request, slug, project_id): + try: + filters = issue_filters(request.query_params, "GET") + show_sub_issues = request.GET.get("show_sub_issues", "true") + + # Custom ordering for priority and state + priority_order = ["urgent", "high", "medium", "low", None] + state_order = ["backlog", "unstarted", "started", "completed", "cancelled"] + + order_by_param = request.GET.get("order_by", "-created_at") + + issue_queryset = ( + self.get_queryset() + .filter(**filters) + .annotate(cycle_id=F("issue_cycle__id")) + .annotate(module_id=F("issue_module__id")) + .annotate( + link_count=IssueLink.objects.filter(issue=OuterRef("id")) + .order_by() + .annotate(count=Func(F("id"), function="Count")) + .values("count") + ) + .annotate( + attachment_count=IssueAttachment.objects.filter( + issue=OuterRef("id") + ) + .order_by() + .annotate(count=Func(F("id"), function="Count")) + .values("count") + ) + ) + + # Priority Ordering + if order_by_param == "priority" or order_by_param == "-priority": + priority_order = ( + priority_order + if order_by_param == "priority" + else priority_order[::-1] + ) + issue_queryset = issue_queryset.annotate( + priority_order=Case( + *[ + When(priority=p, then=Value(i)) + for i, p in enumerate(priority_order) + ], + output_field=CharField(), + ) + ).order_by("priority_order") + + # State Ordering + elif order_by_param in [ + "state__name", + "state__group", + "-state__name", + "-state__group", + ]: + state_order = ( + state_order + if order_by_param in ["state__name", "state__group"] + else state_order[::-1] + ) + issue_queryset = issue_queryset.annotate( + state_order=Case( + *[ + When(state__group=state_group, then=Value(i)) + for i, state_group in enumerate(state_order) + ], + default=Value(len(state_order)), + output_field=CharField(), + ) + ).order_by("state_order") + # assignee and label ordering + elif order_by_param in [ + "labels__name", + "-labels__name", + "assignees__first_name", + "-assignees__first_name", + ]: + issue_queryset = issue_queryset.annotate( + max_values=Max( + order_by_param[1::] + if order_by_param.startswith("-") + else order_by_param + ) + ).order_by( + "-max_values" if order_by_param.startswith("-") else "max_values" + ) + else: + issue_queryset = issue_queryset.order_by(order_by_param) + + issue_queryset = ( + issue_queryset + if show_sub_issues == "true" + else issue_queryset.filter(parent__isnull=True) + ) + + issues = IssueLiteSerializer(issue_queryset, many=True).data + + ## Grouping the results + group_by = request.GET.get("group_by", False) + if group_by: + return Response( + group_results(issues, group_by), status=status.HTTP_200_OK + ) + + return Response(issues, status=status.HTTP_200_OK) + + except Exception as e: + capture_exception(e) + return Response( + {"error": "Something went wrong please try again later"}, + status=status.HTTP_400_BAD_REQUEST, + ) + + def retrieve(self, request, slug, project_id, pk=None): + try: + issue = Issue.objects.get( + workspace__slug=slug, + project_id=project_id, + archived_at__isnull=False, + pk=pk, + ) + return Response(IssueSerializer(issue).data, status=status.HTTP_200_OK) + except Issue.DoesNotExist: + return Response( + {"error": "Issue Does not exist"}, status=status.HTTP_404_NOT_FOUND + ) + except Exception as e: + capture_exception(e) + return Response( + {"error": "Something went wrong please try again later"}, + status=status.HTTP_400_BAD_REQUEST, + ) + + def unarchive(self, request, slug, project_id, pk=None): + try: + issue = Issue.objects.get( + workspace__slug=slug, + project_id=project_id, + archived_at__isnull=False, + pk=pk, + ) + issue.archived_at = None + issue.save() + issue_activity.delay( + type="issue.activity.updated", + requested_data=json.dumps({"archived_in": None}), + actor_id=str(request.user.id), + issue_id=str(issue.id), + project_id=str(project_id), + current_instance=None, + ) + + return Response(IssueSerializer(issue).data, status=status.HTTP_200_OK) + except Issue.DoesNotExist: + return Response( + {"error": "Issue Does not exist"}, status=status.HTTP_404_NOT_FOUND) + except Exception as e: + capture_exception(e) + return Response( + {"error": "Something went wrong, please try again later"}, + status=status.HTTP_400_BAD_REQUEST, + ) + class IssueSubscriberViewSet(BaseViewSet): serializer_class = IssueSubscriberSerializer model = IssueSubscriber diff --git a/apiserver/plane/bgtasks/issue_activites_task.py b/apiserver/plane/bgtasks/issue_activites_task.py index 7bb6010dd..26f617033 100644 --- a/apiserver/plane/bgtasks/issue_activites_task.py +++ b/apiserver/plane/bgtasks/issue_activites_task.py @@ -5,6 +5,7 @@ import requests # Django imports from django.conf import settings from django.core.serializers.json import DjangoJSONEncoder +from django.utils import timezone # Third Party imports from celery import shared_task @@ -557,6 +558,22 @@ def track_estimate_points( ) +def track_archive_in( + requested_data, current_instance, issue_id, project, actor, issue_activities +): + issue_activities.append( + IssueActivity( + issue_id=issue_id, + project=project, + workspace=project.workspace, + comment=f"{actor.email} has restored the issue", + verb="updated", + actor=actor, + field="archvied_at", + ) + ) + + def update_issue_activity( requested_data, current_instance, issue_id, project, actor, issue_activities ): @@ -573,6 +590,7 @@ def update_issue_activity( "blocks_list": track_blocks, "blockers_list": track_blockings, "estimate_point": track_estimate_points, + "archived_in": track_archive_in, } requested_data = json.loads(requested_data) if requested_data is not None else None @@ -950,6 +968,7 @@ def delete_attachment_activity( ) + # Receive message from room group @shared_task def issue_activity( @@ -961,6 +980,11 @@ def issue_activity( actor = User.objects.get(pk=actor_id) project = Project.objects.get(pk=project_id) + issue = Issue.objects.filter(pk=issue_id).first() + if issue is not None: + issue.updated_at = timezone.now() + issue.save() + # add the user to issue subscriber try: _ = IssueSubscriber.objects.create(issue_id=issue_id, subscriber=actor) diff --git a/apiserver/plane/bgtasks/issue_automation_task.py b/apiserver/plane/bgtasks/issue_automation_task.py new file mode 100644 index 000000000..c52994a43 --- /dev/null +++ b/apiserver/plane/bgtasks/issue_automation_task.py @@ -0,0 +1,146 @@ +# Python improts +from datetime import timedelta + +# Django imports +from django.utils import timezone +from django.db.models import Q +from django.conf import settings + +# Third party imports +from celery import shared_task +from sentry_sdk import capture_exception + +# Module imports +from plane.db.models import Issue, Project, IssueActivity, State + + +@shared_task +def archive_and_close_old_issues(): + archive_old_issues() + close_old_issues() + +def archive_old_issues(): + try: + # Get all the projects whose archive_in is greater than 0 + projects = Project.objects.filter(archive_in__gt=0) + + for project in projects: + project_id = project.id + archive_in = project.archive_in + + # Get all the issues whose updated_at in less that the archive_in month + issues = Issue.objects.filter( + Q( + project=project_id, + archived_at__isnull=True, + updated_at__lte=(timezone.now() - timedelta(days=archive_in * 30)), + state__group__in=["completed", "cancelled"], + ), + Q(issue_cycle__isnull=True) + | ( + Q(issue_cycle__cycle__end_date__lt=timezone.now().date()) + & Q(issue_cycle__isnull=False) + ), + Q(issue_module__isnull=True) + | ( + Q(issue_module__module__target_date__lt=timezone.now().date()) + & Q(issue_module__isnull=False) + ), + ) + + # Check if Issues + if issues: + issues_to_update = [] + for issue in issues: + issue.archived_at = timezone.now() + issues_to_update.append(issue) + + # Bulk Update the issues and log the activity + Issue.objects.bulk_update(issues_to_update, ["archived_at"], batch_size=100) + IssueActivity.objects.bulk_create( + [ + IssueActivity( + issue_id=issue.id, + actor=project.created_by, + verb="updated", + field="archived_at", + project=project, + workspace=project.workspace, + comment="Plane archived the issue", + ) + for issue in issues_to_update + ], + batch_size=100, + ) + return + except Exception as e: + if settings.DEBUG: + print(e) + capture_exception(e) + return + +def close_old_issues(): + try: + # Get all the projects whose close_in is greater than 0 + projects = Project.objects.filter(close_in__gt=0).select_related("default_state") + + for project in projects: + project_id = project.id + close_in = project.close_in + + # Get all the issues whose updated_at in less that the close_in month + issues = Issue.objects.filter( + Q( + project=project_id, + archived_at__isnull=True, + updated_at__lte=(timezone.now() - timedelta(days=close_in * 30)), + state__group__in=["backlog", "unstarted", "started"], + ), + Q(issue_cycle__isnull=True) + | ( + Q(issue_cycle__cycle__end_date__lt=timezone.now().date()) + & Q(issue_cycle__isnull=False) + ), + Q(issue_module__isnull=True) + | ( + Q(issue_module__module__target_date__lt=timezone.now().date()) + & Q(issue_module__isnull=False) + ), + ) + + # Check if Issues + if issues: + if project.default_state is None: + close_state = project.default_state + else: + close_state = State.objects.filter(group="cancelled").first() + + + issues_to_update = [] + for issue in issues: + issue.state = close_state + issues_to_update.append(issue) + + # Bulk Update the issues and log the activity + Issue.objects.bulk_update(issues_to_update, ["state"], batch_size=100) + IssueActivity.objects.bulk_create( + [ + IssueActivity( + issue_id=issue.id, + actor=project.created_by, + verb="updated", + field="state", + project=project, + workspace=project.workspace, + comment="Plane cancelled the issue", + ) + for issue in issues_to_update + ], + batch_size=100, + ) + return + except Exception as e: + if settings.DEBUG: + print(e) + capture_exception(e) + return \ No newline at end of file diff --git a/apiserver/plane/celery.py b/apiserver/plane/celery.py index 1fbbdd732..ed0dc419e 100644 --- a/apiserver/plane/celery.py +++ b/apiserver/plane/celery.py @@ -1,6 +1,7 @@ import os from celery import Celery from plane.settings.redis import redis_instance +from celery.schedules import crontab # Set the default Django settings module for the 'celery' program. os.environ.setdefault("DJANGO_SETTINGS_MODULE", "plane.settings.production") @@ -13,5 +14,15 @@ app = Celery("plane") # pickle the object when using Windows. app.config_from_object("django.conf:settings", namespace="CELERY") +app.conf.beat_schedule = { + # Executes every day at 12 AM + "check-every-day-to-archive-and-close": { + "task": "plane.bgtasks.issue_automation_task.archive_and_close_old_issues", + "schedule": crontab(hour=0, minute=0), + }, +} + # Load task modules from all registered Django app configs. app.autodiscover_tasks() + +app.conf.beat_scheduler = 'django_celery_beat.schedulers.DatabaseScheduler' \ No newline at end of file diff --git a/apiserver/plane/db/models/issue.py b/apiserver/plane/db/models/issue.py index 4b765a516..f301d4191 100644 --- a/apiserver/plane/db/models/issue.py +++ b/apiserver/plane/db/models/issue.py @@ -28,6 +28,8 @@ class IssueManager(models.Manager): | models.Q(issue_inbox__status=2) | models.Q(issue_inbox__isnull=True) ) + .filter(archived_at__isnull=True) + .exclude(archived_at__isnull=False) ) @@ -81,6 +83,7 @@ class Issue(ProjectBaseModel): ) sort_order = models.FloatField(default=65535) completed_at = models.DateTimeField(null=True) + archived_at = models.DateField(null=True) objects = models.Manager() issue_objects = IssueManager() diff --git a/apiserver/plane/db/models/project.py b/apiserver/plane/db/models/project.py index 0b6c4b50d..b28cbc69e 100644 --- a/apiserver/plane/db/models/project.py +++ b/apiserver/plane/db/models/project.py @@ -4,6 +4,7 @@ from django.conf import settings from django.template.defaultfilters import slugify from django.db.models.signals import post_save from django.dispatch import receiver +from django.core.validators import MinValueValidator, MaxValueValidator # Modeule imports from plane.db.mixins import AuditModel @@ -74,6 +75,15 @@ class Project(BaseModel): estimate = models.ForeignKey( "db.Estimate", on_delete=models.SET_NULL, related_name="projects", null=True ) + archive_in = models.IntegerField( + default=0, validators=[MinValueValidator(0), MaxValueValidator(12)] + ) + close_in = models.IntegerField( + default=0, validators=[MinValueValidator(0), MaxValueValidator(12)] + ) + default_state = models.ForeignKey( + "db.State", on_delete=models.SET_NULL, null=True, related_name="default_state" + ) def __str__(self): """Return name of the project""" diff --git a/apiserver/plane/settings/common.py b/apiserver/plane/settings/common.py index 2e0266159..e3a918c18 100644 --- a/apiserver/plane/settings/common.py +++ b/apiserver/plane/settings/common.py @@ -35,6 +35,7 @@ INSTALLED_APPS = [ "rest_framework_simplejwt.token_blacklist", "corsheaders", "taggit", + "django_celery_beat", ] MIDDLEWARE = [ @@ -213,3 +214,4 @@ SIMPLE_JWT = { CELERY_TIMEZONE = TIME_ZONE CELERY_TASK_SERIALIZER = 'json' CELERY_ACCEPT_CONTENT = ['application/json'] +CELERY_IMPORTS = ("plane.bgtasks.issue_automation_task",) diff --git a/apiserver/requirements/base.txt b/apiserver/requirements/base.txt index 537564828..c4fa8ef2c 100644 --- a/apiserver/requirements/base.txt +++ b/apiserver/requirements/base.txt @@ -28,4 +28,5 @@ uvicorn==0.22.0 channels==4.0.0 openai==0.27.8 slack-sdk==3.21.3 -celery==5.3.1 \ No newline at end of file +celery==5.3.1 +django_celery_beat==2.5.0