dev: update automatic issue archival and close to send notifications

This commit is contained in:
pablohashescobar 2023-07-11 23:58:22 +05:30
parent e545d49142
commit 0f61013bb5
3 changed files with 71 additions and 54 deletions

View File

@ -1087,7 +1087,7 @@ class IssueArchiveViewSet(BaseViewSet):
issue.save()
issue_activity.delay(
type="issue.activity.updated",
requested_data=json.dumps({"archived_in": None}),
requested_data=json.dumps({"archived_at": None}),
actor_id=str(request.user.id),
issue_id=str(issue.id),
project_id=str(project_id),

View File

@ -558,20 +558,34 @@ def track_estimate_points(
)
def track_archive_in(
def track_archive_at(
requested_data, current_instance, issue_id, project, actor, issue_activities
):
issue_activities.append(
IssueActivity(
issue_id=issue_id,
project=project,
workspace=project.workspace,
comment=f"{actor.email} has restored the issue",
verb="updated",
actor=actor,
field="archvied_at",
if requested_data.get("archived_at") is None:
issue_activities.append(
IssueActivity(
issue_id=issue_id,
project=project,
workspace=project.workspace,
comment=f"{actor.email} has restored the issue",
verb="updated",
actor=actor,
field="archvied_at",
)
)
else:
issue_activities.append(
IssueActivity(
issue_id=issue_id,
project=project,
workspace=project.workspace,
comment=f"Plane has archived the issue",
verb="updated",
actor=actor,
field="archvied_at",
)
)
)
def update_issue_activity(
@ -590,7 +604,7 @@ def update_issue_activity(
"blocks_list": track_blocks,
"blockers_list": track_blockings,
"estimate_point": track_estimate_points,
"archived_in": track_archive_in,
"archived_at": track_archive_at,
}
requested_data = json.loads(requested_data) if requested_data is not None else None
@ -972,7 +986,7 @@ def delete_attachment_activity(
# Receive message from room group
@shared_task
def issue_activity(
type, requested_data, current_instance, issue_id, actor_id, project_id
type, requested_data, current_instance, issue_id, actor_id, project_id, subscriber=True
):
try:
issue_activities = []
@ -984,12 +998,14 @@ def issue_activity(
if issue is not None:
issue.updated_at = timezone.now()
issue.save()
# add the user to issue subscriber
try:
_ = IssueSubscriber.objects.create(issue_id=issue_id, subscriber=actor)
except Exception as e:
pass
if subscriber:
# add the user to issue subscriber
try:
_ = IssueSubscriber.objects.get_or_create(issue_id=issue_id, subscriber=actor)
except Exception as e:
pass
ACTIVITY_MAPPER = {
"issue.activity.created": create_issue_activity,

View File

@ -1,4 +1,5 @@
# Python improts
# Python imports
import json
from datetime import timedelta
# Django imports
@ -12,6 +13,7 @@ from sentry_sdk import capture_exception
# Module imports
from plane.db.models import Issue, Project, IssueActivity, State
from plane.bgtasks.issue_activites_task import issue_activity
@shared_task
@ -19,6 +21,7 @@ def archive_and_close_old_issues():
archive_old_issues()
close_old_issues()
def archive_old_issues():
try:
# Get all the projects whose archive_in is greater than 0
@ -56,22 +59,21 @@ def archive_old_issues():
issues_to_update.append(issue)
# Bulk Update the issues and log the activity
Issue.objects.bulk_update(issues_to_update, ["archived_at"], batch_size=100)
IssueActivity.objects.bulk_create(
[
IssueActivity(
issue_id=issue.id,
actor=project.created_by,
verb="updated",
field="archived_at",
project=project,
workspace=project.workspace,
comment="Plane archived the issue",
)
for issue in issues_to_update
],
batch_size=100,
Issue.objects.bulk_update(
issues_to_update, ["archived_at"], batch_size=100
)
[
issue_activity.delay(
type="issue.activity.updated",
requested_data=json.dumps({"archive_at": issue.archived_at}),
actor_id=str(project.created_by_id),
issue_id=issue.id,
project_id=project_id,
current_instance=None,
subscriber=False,
)
for issue in issues_to_update
]
return
except Exception as e:
if settings.DEBUG:
@ -79,10 +81,13 @@ def archive_old_issues():
capture_exception(e)
return
def close_old_issues():
try:
# Get all the projects whose close_in is greater than 0
projects = Project.objects.filter(close_in__gt=0).select_related("default_state")
projects = Project.objects.filter(close_in__gt=0).select_related(
"default_state"
)
for project in projects:
project_id = project.id
@ -115,7 +120,6 @@ def close_old_issues():
else:
close_state = State.objects.filter(group="cancelled").first()
issues_to_update = []
for issue in issues:
issue.state = close_state
@ -123,24 +127,21 @@ def close_old_issues():
# Bulk Update the issues and log the activity
Issue.objects.bulk_update(issues_to_update, ["state"], batch_size=100)
IssueActivity.objects.bulk_create(
[
IssueActivity(
issue_id=issue.id,
actor=project.created_by,
verb="updated",
field="state",
project=project,
workspace=project.workspace,
comment="Plane cancelled the issue",
)
for issue in issues_to_update
],
batch_size=100,
)
[
issue_activity.delay(
type="issue.activity.updated",
requested_data=json.dumps({"state": issue.state}),
actor_id=str(project.created_by_id),
issue_id=issue.id,
project_id=project_id,
current_instance=None,
subscriber=False,
)
for issue in issues_to_update
]
return
except Exception as e:
if settings.DEBUG:
print(e)
capture_exception(e)
return
return