mirror of
https://github.com/makeplane/plane
synced 2024-06-14 14:31:34 +00:00
feat: issue archival and close (#1474)
* chore: added issue archive using celery beat * chore: changed the file name * fix: created API and updated logic for achived-issues * chore: added issue activity message * chore: added the beat scheduler command * feat: added unarchive issue functionality * feat: auto issue close * dev: refactor endpoints and issue archive activity * dev: update manager for global filtering * fix: added id in issue unarchive url * dev: update auto close to include default close state * fix: updated the list and retrive function * fix: added the prefetch fields * dev: update unarchive --------- Co-authored-by: pablohashescobar <nikhilschacko@gmail.com>
This commit is contained in:
parent
7087b1b5f2
commit
7554988164
@ -1,2 +1,3 @@
|
|||||||
web: gunicorn -w 4 -k uvicorn.workers.UvicornWorker plane.asgi:application --bind 0.0.0.0:$PORT --config gunicorn.config.py --max-requests 10000 --max-requests-jitter 1000 --access-logfile -
|
web: gunicorn -w 4 -k uvicorn.workers.UvicornWorker plane.asgi:application --bind 0.0.0.0:$PORT --config gunicorn.config.py --max-requests 10000 --max-requests-jitter 1000 --access-logfile -
|
||||||
worker: celery -A plane worker -l info
|
worker: celery -A plane worker -l info
|
||||||
|
beat: celery -A plane beat -l INFO
|
@ -76,6 +76,7 @@ from plane.api.views import (
|
|||||||
IssueLinkViewSet,
|
IssueLinkViewSet,
|
||||||
BulkCreateIssueLabelsEndpoint,
|
BulkCreateIssueLabelsEndpoint,
|
||||||
IssueAttachmentEndpoint,
|
IssueAttachmentEndpoint,
|
||||||
|
IssueArchiveViewSet,
|
||||||
IssueSubscriberViewSet,
|
IssueSubscriberViewSet,
|
||||||
## End Issues
|
## End Issues
|
||||||
# States
|
# States
|
||||||
@ -853,6 +854,36 @@ urlpatterns = [
|
|||||||
name="project-issue-roadmap",
|
name="project-issue-roadmap",
|
||||||
),
|
),
|
||||||
## IssueProperty Ebd
|
## IssueProperty Ebd
|
||||||
|
## Issue Archives
|
||||||
|
path(
|
||||||
|
"workspaces/<str:slug>/projects/<uuid:project_id>/archived-issues/",
|
||||||
|
IssueArchiveViewSet.as_view(
|
||||||
|
{
|
||||||
|
"get": "list",
|
||||||
|
}
|
||||||
|
),
|
||||||
|
name="project-issue-archive",
|
||||||
|
),
|
||||||
|
path(
|
||||||
|
"workspaces/<str:slug>/projects/<uuid:project_id>/archived-issues/<uuid:pk>/",
|
||||||
|
IssueArchiveViewSet.as_view(
|
||||||
|
{
|
||||||
|
"get": "retrieve",
|
||||||
|
"delete": "destroy",
|
||||||
|
}
|
||||||
|
),
|
||||||
|
name="project-issue-archive",
|
||||||
|
),
|
||||||
|
path(
|
||||||
|
"workspaces/<str:slug>/projects/<uuid:project_id>/unarchive/<uuid:pk>/",
|
||||||
|
IssueArchiveViewSet.as_view(
|
||||||
|
{
|
||||||
|
"post": "unarchive",
|
||||||
|
}
|
||||||
|
),
|
||||||
|
name="project-issue-archive",
|
||||||
|
),
|
||||||
|
## End Issue Archives
|
||||||
## File Assets
|
## File Assets
|
||||||
path(
|
path(
|
||||||
"workspaces/<str:slug>/file-assets/",
|
"workspaces/<str:slug>/file-assets/",
|
||||||
|
@ -65,6 +65,7 @@ from .issue import (
|
|||||||
IssueLinkViewSet,
|
IssueLinkViewSet,
|
||||||
BulkCreateIssueLabelsEndpoint,
|
BulkCreateIssueLabelsEndpoint,
|
||||||
IssueAttachmentEndpoint,
|
IssueAttachmentEndpoint,
|
||||||
|
IssueArchiveViewSet,
|
||||||
IssueSubscriberViewSet,
|
IssueSubscriberViewSet,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@ -914,6 +914,197 @@ class IssueAttachmentEndpoint(BaseAPIView):
|
|||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class IssueArchiveViewSet(BaseViewSet):
|
||||||
|
permission_classes = [
|
||||||
|
ProjectEntityPermission,
|
||||||
|
]
|
||||||
|
serializer_class = IssueFlatSerializer
|
||||||
|
model = Issue
|
||||||
|
|
||||||
|
def get_queryset(self):
|
||||||
|
return (
|
||||||
|
Issue.objects.annotate(
|
||||||
|
sub_issues_count=Issue.objects.filter(parent=OuterRef("id"))
|
||||||
|
.order_by()
|
||||||
|
.annotate(count=Func(F("id"), function="Count"))
|
||||||
|
.values("count")
|
||||||
|
)
|
||||||
|
.filter(archived_at__isnull=False)
|
||||||
|
.filter(project_id=self.kwargs.get("project_id"))
|
||||||
|
.filter(workspace__slug=self.kwargs.get("slug"))
|
||||||
|
.select_related("project")
|
||||||
|
.select_related("workspace")
|
||||||
|
.select_related("state")
|
||||||
|
.select_related("parent")
|
||||||
|
.prefetch_related("assignees")
|
||||||
|
.prefetch_related("labels")
|
||||||
|
)
|
||||||
|
|
||||||
|
@method_decorator(gzip_page)
|
||||||
|
def list(self, request, slug, project_id):
|
||||||
|
try:
|
||||||
|
filters = issue_filters(request.query_params, "GET")
|
||||||
|
show_sub_issues = request.GET.get("show_sub_issues", "true")
|
||||||
|
|
||||||
|
# Custom ordering for priority and state
|
||||||
|
priority_order = ["urgent", "high", "medium", "low", None]
|
||||||
|
state_order = ["backlog", "unstarted", "started", "completed", "cancelled"]
|
||||||
|
|
||||||
|
order_by_param = request.GET.get("order_by", "-created_at")
|
||||||
|
|
||||||
|
issue_queryset = (
|
||||||
|
self.get_queryset()
|
||||||
|
.filter(**filters)
|
||||||
|
.annotate(cycle_id=F("issue_cycle__id"))
|
||||||
|
.annotate(module_id=F("issue_module__id"))
|
||||||
|
.annotate(
|
||||||
|
link_count=IssueLink.objects.filter(issue=OuterRef("id"))
|
||||||
|
.order_by()
|
||||||
|
.annotate(count=Func(F("id"), function="Count"))
|
||||||
|
.values("count")
|
||||||
|
)
|
||||||
|
.annotate(
|
||||||
|
attachment_count=IssueAttachment.objects.filter(
|
||||||
|
issue=OuterRef("id")
|
||||||
|
)
|
||||||
|
.order_by()
|
||||||
|
.annotate(count=Func(F("id"), function="Count"))
|
||||||
|
.values("count")
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
# Priority Ordering
|
||||||
|
if order_by_param == "priority" or order_by_param == "-priority":
|
||||||
|
priority_order = (
|
||||||
|
priority_order
|
||||||
|
if order_by_param == "priority"
|
||||||
|
else priority_order[::-1]
|
||||||
|
)
|
||||||
|
issue_queryset = issue_queryset.annotate(
|
||||||
|
priority_order=Case(
|
||||||
|
*[
|
||||||
|
When(priority=p, then=Value(i))
|
||||||
|
for i, p in enumerate(priority_order)
|
||||||
|
],
|
||||||
|
output_field=CharField(),
|
||||||
|
)
|
||||||
|
).order_by("priority_order")
|
||||||
|
|
||||||
|
# State Ordering
|
||||||
|
elif order_by_param in [
|
||||||
|
"state__name",
|
||||||
|
"state__group",
|
||||||
|
"-state__name",
|
||||||
|
"-state__group",
|
||||||
|
]:
|
||||||
|
state_order = (
|
||||||
|
state_order
|
||||||
|
if order_by_param in ["state__name", "state__group"]
|
||||||
|
else state_order[::-1]
|
||||||
|
)
|
||||||
|
issue_queryset = issue_queryset.annotate(
|
||||||
|
state_order=Case(
|
||||||
|
*[
|
||||||
|
When(state__group=state_group, then=Value(i))
|
||||||
|
for i, state_group in enumerate(state_order)
|
||||||
|
],
|
||||||
|
default=Value(len(state_order)),
|
||||||
|
output_field=CharField(),
|
||||||
|
)
|
||||||
|
).order_by("state_order")
|
||||||
|
# assignee and label ordering
|
||||||
|
elif order_by_param in [
|
||||||
|
"labels__name",
|
||||||
|
"-labels__name",
|
||||||
|
"assignees__first_name",
|
||||||
|
"-assignees__first_name",
|
||||||
|
]:
|
||||||
|
issue_queryset = issue_queryset.annotate(
|
||||||
|
max_values=Max(
|
||||||
|
order_by_param[1::]
|
||||||
|
if order_by_param.startswith("-")
|
||||||
|
else order_by_param
|
||||||
|
)
|
||||||
|
).order_by(
|
||||||
|
"-max_values" if order_by_param.startswith("-") else "max_values"
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
issue_queryset = issue_queryset.order_by(order_by_param)
|
||||||
|
|
||||||
|
issue_queryset = (
|
||||||
|
issue_queryset
|
||||||
|
if show_sub_issues == "true"
|
||||||
|
else issue_queryset.filter(parent__isnull=True)
|
||||||
|
)
|
||||||
|
|
||||||
|
issues = IssueLiteSerializer(issue_queryset, many=True).data
|
||||||
|
|
||||||
|
## Grouping the results
|
||||||
|
group_by = request.GET.get("group_by", False)
|
||||||
|
if group_by:
|
||||||
|
return Response(
|
||||||
|
group_results(issues, group_by), status=status.HTTP_200_OK
|
||||||
|
)
|
||||||
|
|
||||||
|
return Response(issues, status=status.HTTP_200_OK)
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
capture_exception(e)
|
||||||
|
return Response(
|
||||||
|
{"error": "Something went wrong please try again later"},
|
||||||
|
status=status.HTTP_400_BAD_REQUEST,
|
||||||
|
)
|
||||||
|
|
||||||
|
def retrieve(self, request, slug, project_id, pk=None):
|
||||||
|
try:
|
||||||
|
issue = Issue.objects.get(
|
||||||
|
workspace__slug=slug,
|
||||||
|
project_id=project_id,
|
||||||
|
archived_at__isnull=False,
|
||||||
|
pk=pk,
|
||||||
|
)
|
||||||
|
return Response(IssueSerializer(issue).data, status=status.HTTP_200_OK)
|
||||||
|
except Issue.DoesNotExist:
|
||||||
|
return Response(
|
||||||
|
{"error": "Issue Does not exist"}, status=status.HTTP_404_NOT_FOUND
|
||||||
|
)
|
||||||
|
except Exception as e:
|
||||||
|
capture_exception(e)
|
||||||
|
return Response(
|
||||||
|
{"error": "Something went wrong please try again later"},
|
||||||
|
status=status.HTTP_400_BAD_REQUEST,
|
||||||
|
)
|
||||||
|
|
||||||
|
def unarchive(self, request, slug, project_id, pk=None):
|
||||||
|
try:
|
||||||
|
issue = Issue.objects.get(
|
||||||
|
workspace__slug=slug,
|
||||||
|
project_id=project_id,
|
||||||
|
archived_at__isnull=False,
|
||||||
|
pk=pk,
|
||||||
|
)
|
||||||
|
issue.archived_at = None
|
||||||
|
issue.save()
|
||||||
|
issue_activity.delay(
|
||||||
|
type="issue.activity.updated",
|
||||||
|
requested_data=json.dumps({"archived_in": None}),
|
||||||
|
actor_id=str(request.user.id),
|
||||||
|
issue_id=str(issue.id),
|
||||||
|
project_id=str(project_id),
|
||||||
|
current_instance=None,
|
||||||
|
)
|
||||||
|
|
||||||
|
return Response(IssueSerializer(issue).data, status=status.HTTP_200_OK)
|
||||||
|
except Issue.DoesNotExist:
|
||||||
|
return Response(
|
||||||
|
{"error": "Issue Does not exist"}, status=status.HTTP_404_NOT_FOUND)
|
||||||
|
except Exception as e:
|
||||||
|
capture_exception(e)
|
||||||
|
return Response(
|
||||||
|
{"error": "Something went wrong, please try again later"},
|
||||||
|
status=status.HTTP_400_BAD_REQUEST,
|
||||||
|
)
|
||||||
|
|
||||||
class IssueSubscriberViewSet(BaseViewSet):
|
class IssueSubscriberViewSet(BaseViewSet):
|
||||||
serializer_class = IssueSubscriberSerializer
|
serializer_class = IssueSubscriberSerializer
|
||||||
model = IssueSubscriber
|
model = IssueSubscriber
|
||||||
|
@ -5,6 +5,7 @@ import requests
|
|||||||
# Django imports
|
# Django imports
|
||||||
from django.conf import settings
|
from django.conf import settings
|
||||||
from django.core.serializers.json import DjangoJSONEncoder
|
from django.core.serializers.json import DjangoJSONEncoder
|
||||||
|
from django.utils import timezone
|
||||||
|
|
||||||
# Third Party imports
|
# Third Party imports
|
||||||
from celery import shared_task
|
from celery import shared_task
|
||||||
@ -557,6 +558,22 @@ def track_estimate_points(
|
|||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def track_archive_in(
|
||||||
|
requested_data, current_instance, issue_id, project, actor, issue_activities
|
||||||
|
):
|
||||||
|
issue_activities.append(
|
||||||
|
IssueActivity(
|
||||||
|
issue_id=issue_id,
|
||||||
|
project=project,
|
||||||
|
workspace=project.workspace,
|
||||||
|
comment=f"{actor.email} has restored the issue",
|
||||||
|
verb="updated",
|
||||||
|
actor=actor,
|
||||||
|
field="archvied_at",
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
def update_issue_activity(
|
def update_issue_activity(
|
||||||
requested_data, current_instance, issue_id, project, actor, issue_activities
|
requested_data, current_instance, issue_id, project, actor, issue_activities
|
||||||
):
|
):
|
||||||
@ -573,6 +590,7 @@ def update_issue_activity(
|
|||||||
"blocks_list": track_blocks,
|
"blocks_list": track_blocks,
|
||||||
"blockers_list": track_blockings,
|
"blockers_list": track_blockings,
|
||||||
"estimate_point": track_estimate_points,
|
"estimate_point": track_estimate_points,
|
||||||
|
"archived_in": track_archive_in,
|
||||||
}
|
}
|
||||||
|
|
||||||
requested_data = json.loads(requested_data) if requested_data is not None else None
|
requested_data = json.loads(requested_data) if requested_data is not None else None
|
||||||
@ -950,6 +968,7 @@ def delete_attachment_activity(
|
|||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
# Receive message from room group
|
# Receive message from room group
|
||||||
@shared_task
|
@shared_task
|
||||||
def issue_activity(
|
def issue_activity(
|
||||||
@ -961,6 +980,11 @@ def issue_activity(
|
|||||||
actor = User.objects.get(pk=actor_id)
|
actor = User.objects.get(pk=actor_id)
|
||||||
project = Project.objects.get(pk=project_id)
|
project = Project.objects.get(pk=project_id)
|
||||||
|
|
||||||
|
issue = Issue.objects.filter(pk=issue_id).first()
|
||||||
|
if issue is not None:
|
||||||
|
issue.updated_at = timezone.now()
|
||||||
|
issue.save()
|
||||||
|
|
||||||
# add the user to issue subscriber
|
# add the user to issue subscriber
|
||||||
try:
|
try:
|
||||||
_ = IssueSubscriber.objects.create(issue_id=issue_id, subscriber=actor)
|
_ = IssueSubscriber.objects.create(issue_id=issue_id, subscriber=actor)
|
||||||
|
146
apiserver/plane/bgtasks/issue_automation_task.py
Normal file
146
apiserver/plane/bgtasks/issue_automation_task.py
Normal file
@ -0,0 +1,146 @@
|
|||||||
|
# Python improts
|
||||||
|
from datetime import timedelta
|
||||||
|
|
||||||
|
# Django imports
|
||||||
|
from django.utils import timezone
|
||||||
|
from django.db.models import Q
|
||||||
|
from django.conf import settings
|
||||||
|
|
||||||
|
# Third party imports
|
||||||
|
from celery import shared_task
|
||||||
|
from sentry_sdk import capture_exception
|
||||||
|
|
||||||
|
# Module imports
|
||||||
|
from plane.db.models import Issue, Project, IssueActivity, State
|
||||||
|
|
||||||
|
|
||||||
|
@shared_task
|
||||||
|
def archive_and_close_old_issues():
|
||||||
|
archive_old_issues()
|
||||||
|
close_old_issues()
|
||||||
|
|
||||||
|
def archive_old_issues():
|
||||||
|
try:
|
||||||
|
# Get all the projects whose archive_in is greater than 0
|
||||||
|
projects = Project.objects.filter(archive_in__gt=0)
|
||||||
|
|
||||||
|
for project in projects:
|
||||||
|
project_id = project.id
|
||||||
|
archive_in = project.archive_in
|
||||||
|
|
||||||
|
# Get all the issues whose updated_at in less that the archive_in month
|
||||||
|
issues = Issue.objects.filter(
|
||||||
|
Q(
|
||||||
|
project=project_id,
|
||||||
|
archived_at__isnull=True,
|
||||||
|
updated_at__lte=(timezone.now() - timedelta(days=archive_in * 30)),
|
||||||
|
state__group__in=["completed", "cancelled"],
|
||||||
|
),
|
||||||
|
Q(issue_cycle__isnull=True)
|
||||||
|
| (
|
||||||
|
Q(issue_cycle__cycle__end_date__lt=timezone.now().date())
|
||||||
|
& Q(issue_cycle__isnull=False)
|
||||||
|
),
|
||||||
|
Q(issue_module__isnull=True)
|
||||||
|
| (
|
||||||
|
Q(issue_module__module__target_date__lt=timezone.now().date())
|
||||||
|
& Q(issue_module__isnull=False)
|
||||||
|
),
|
||||||
|
)
|
||||||
|
|
||||||
|
# Check if Issues
|
||||||
|
if issues:
|
||||||
|
issues_to_update = []
|
||||||
|
for issue in issues:
|
||||||
|
issue.archived_at = timezone.now()
|
||||||
|
issues_to_update.append(issue)
|
||||||
|
|
||||||
|
# Bulk Update the issues and log the activity
|
||||||
|
Issue.objects.bulk_update(issues_to_update, ["archived_at"], batch_size=100)
|
||||||
|
IssueActivity.objects.bulk_create(
|
||||||
|
[
|
||||||
|
IssueActivity(
|
||||||
|
issue_id=issue.id,
|
||||||
|
actor=project.created_by,
|
||||||
|
verb="updated",
|
||||||
|
field="archived_at",
|
||||||
|
project=project,
|
||||||
|
workspace=project.workspace,
|
||||||
|
comment="Plane archived the issue",
|
||||||
|
)
|
||||||
|
for issue in issues_to_update
|
||||||
|
],
|
||||||
|
batch_size=100,
|
||||||
|
)
|
||||||
|
return
|
||||||
|
except Exception as e:
|
||||||
|
if settings.DEBUG:
|
||||||
|
print(e)
|
||||||
|
capture_exception(e)
|
||||||
|
return
|
||||||
|
|
||||||
|
def close_old_issues():
|
||||||
|
try:
|
||||||
|
# Get all the projects whose close_in is greater than 0
|
||||||
|
projects = Project.objects.filter(close_in__gt=0).select_related("default_state")
|
||||||
|
|
||||||
|
for project in projects:
|
||||||
|
project_id = project.id
|
||||||
|
close_in = project.close_in
|
||||||
|
|
||||||
|
# Get all the issues whose updated_at in less that the close_in month
|
||||||
|
issues = Issue.objects.filter(
|
||||||
|
Q(
|
||||||
|
project=project_id,
|
||||||
|
archived_at__isnull=True,
|
||||||
|
updated_at__lte=(timezone.now() - timedelta(days=close_in * 30)),
|
||||||
|
state__group__in=["backlog", "unstarted", "started"],
|
||||||
|
),
|
||||||
|
Q(issue_cycle__isnull=True)
|
||||||
|
| (
|
||||||
|
Q(issue_cycle__cycle__end_date__lt=timezone.now().date())
|
||||||
|
& Q(issue_cycle__isnull=False)
|
||||||
|
),
|
||||||
|
Q(issue_module__isnull=True)
|
||||||
|
| (
|
||||||
|
Q(issue_module__module__target_date__lt=timezone.now().date())
|
||||||
|
& Q(issue_module__isnull=False)
|
||||||
|
),
|
||||||
|
)
|
||||||
|
|
||||||
|
# Check if Issues
|
||||||
|
if issues:
|
||||||
|
if project.default_state is None:
|
||||||
|
close_state = project.default_state
|
||||||
|
else:
|
||||||
|
close_state = State.objects.filter(group="cancelled").first()
|
||||||
|
|
||||||
|
|
||||||
|
issues_to_update = []
|
||||||
|
for issue in issues:
|
||||||
|
issue.state = close_state
|
||||||
|
issues_to_update.append(issue)
|
||||||
|
|
||||||
|
# Bulk Update the issues and log the activity
|
||||||
|
Issue.objects.bulk_update(issues_to_update, ["state"], batch_size=100)
|
||||||
|
IssueActivity.objects.bulk_create(
|
||||||
|
[
|
||||||
|
IssueActivity(
|
||||||
|
issue_id=issue.id,
|
||||||
|
actor=project.created_by,
|
||||||
|
verb="updated",
|
||||||
|
field="state",
|
||||||
|
project=project,
|
||||||
|
workspace=project.workspace,
|
||||||
|
comment="Plane cancelled the issue",
|
||||||
|
)
|
||||||
|
for issue in issues_to_update
|
||||||
|
],
|
||||||
|
batch_size=100,
|
||||||
|
)
|
||||||
|
return
|
||||||
|
except Exception as e:
|
||||||
|
if settings.DEBUG:
|
||||||
|
print(e)
|
||||||
|
capture_exception(e)
|
||||||
|
return
|
@ -1,6 +1,7 @@
|
|||||||
import os
|
import os
|
||||||
from celery import Celery
|
from celery import Celery
|
||||||
from plane.settings.redis import redis_instance
|
from plane.settings.redis import redis_instance
|
||||||
|
from celery.schedules import crontab
|
||||||
|
|
||||||
# Set the default Django settings module for the 'celery' program.
|
# Set the default Django settings module for the 'celery' program.
|
||||||
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "plane.settings.production")
|
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "plane.settings.production")
|
||||||
@ -13,5 +14,15 @@ app = Celery("plane")
|
|||||||
# pickle the object when using Windows.
|
# pickle the object when using Windows.
|
||||||
app.config_from_object("django.conf:settings", namespace="CELERY")
|
app.config_from_object("django.conf:settings", namespace="CELERY")
|
||||||
|
|
||||||
|
app.conf.beat_schedule = {
|
||||||
|
# Executes every day at 12 AM
|
||||||
|
"check-every-day-to-archive-and-close": {
|
||||||
|
"task": "plane.bgtasks.issue_automation_task.archive_and_close_old_issues",
|
||||||
|
"schedule": crontab(hour=0, minute=0),
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
# Load task modules from all registered Django app configs.
|
# Load task modules from all registered Django app configs.
|
||||||
app.autodiscover_tasks()
|
app.autodiscover_tasks()
|
||||||
|
|
||||||
|
app.conf.beat_scheduler = 'django_celery_beat.schedulers.DatabaseScheduler'
|
@ -28,6 +28,8 @@ class IssueManager(models.Manager):
|
|||||||
| models.Q(issue_inbox__status=2)
|
| models.Q(issue_inbox__status=2)
|
||||||
| models.Q(issue_inbox__isnull=True)
|
| models.Q(issue_inbox__isnull=True)
|
||||||
)
|
)
|
||||||
|
.filter(archived_at__isnull=True)
|
||||||
|
.exclude(archived_at__isnull=False)
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
@ -81,6 +83,7 @@ class Issue(ProjectBaseModel):
|
|||||||
)
|
)
|
||||||
sort_order = models.FloatField(default=65535)
|
sort_order = models.FloatField(default=65535)
|
||||||
completed_at = models.DateTimeField(null=True)
|
completed_at = models.DateTimeField(null=True)
|
||||||
|
archived_at = models.DateField(null=True)
|
||||||
|
|
||||||
objects = models.Manager()
|
objects = models.Manager()
|
||||||
issue_objects = IssueManager()
|
issue_objects = IssueManager()
|
||||||
|
@ -4,6 +4,7 @@ from django.conf import settings
|
|||||||
from django.template.defaultfilters import slugify
|
from django.template.defaultfilters import slugify
|
||||||
from django.db.models.signals import post_save
|
from django.db.models.signals import post_save
|
||||||
from django.dispatch import receiver
|
from django.dispatch import receiver
|
||||||
|
from django.core.validators import MinValueValidator, MaxValueValidator
|
||||||
|
|
||||||
# Modeule imports
|
# Modeule imports
|
||||||
from plane.db.mixins import AuditModel
|
from plane.db.mixins import AuditModel
|
||||||
@ -74,6 +75,15 @@ class Project(BaseModel):
|
|||||||
estimate = models.ForeignKey(
|
estimate = models.ForeignKey(
|
||||||
"db.Estimate", on_delete=models.SET_NULL, related_name="projects", null=True
|
"db.Estimate", on_delete=models.SET_NULL, related_name="projects", null=True
|
||||||
)
|
)
|
||||||
|
archive_in = models.IntegerField(
|
||||||
|
default=0, validators=[MinValueValidator(0), MaxValueValidator(12)]
|
||||||
|
)
|
||||||
|
close_in = models.IntegerField(
|
||||||
|
default=0, validators=[MinValueValidator(0), MaxValueValidator(12)]
|
||||||
|
)
|
||||||
|
default_state = models.ForeignKey(
|
||||||
|
"db.State", on_delete=models.SET_NULL, null=True, related_name="default_state"
|
||||||
|
)
|
||||||
|
|
||||||
def __str__(self):
|
def __str__(self):
|
||||||
"""Return name of the project"""
|
"""Return name of the project"""
|
||||||
|
@ -35,6 +35,7 @@ INSTALLED_APPS = [
|
|||||||
"rest_framework_simplejwt.token_blacklist",
|
"rest_framework_simplejwt.token_blacklist",
|
||||||
"corsheaders",
|
"corsheaders",
|
||||||
"taggit",
|
"taggit",
|
||||||
|
"django_celery_beat",
|
||||||
]
|
]
|
||||||
|
|
||||||
MIDDLEWARE = [
|
MIDDLEWARE = [
|
||||||
@ -213,3 +214,4 @@ SIMPLE_JWT = {
|
|||||||
CELERY_TIMEZONE = TIME_ZONE
|
CELERY_TIMEZONE = TIME_ZONE
|
||||||
CELERY_TASK_SERIALIZER = 'json'
|
CELERY_TASK_SERIALIZER = 'json'
|
||||||
CELERY_ACCEPT_CONTENT = ['application/json']
|
CELERY_ACCEPT_CONTENT = ['application/json']
|
||||||
|
CELERY_IMPORTS = ("plane.bgtasks.issue_automation_task",)
|
||||||
|
@ -29,3 +29,4 @@ channels==4.0.0
|
|||||||
openai==0.27.8
|
openai==0.27.8
|
||||||
slack-sdk==3.21.3
|
slack-sdk==3.21.3
|
||||||
celery==5.3.1
|
celery==5.3.1
|
||||||
|
django_celery_beat==2.5.0
|
||||||
|
Loading…
Reference in New Issue
Block a user