diff --git a/apiserver/plane/api/serializers/__init__.py b/apiserver/plane/api/serializers/__init__.py index 9814ace37..9a5b04b77 100644 --- a/apiserver/plane/api/serializers/__init__.py +++ b/apiserver/plane/api/serializers/__init__.py @@ -58,3 +58,5 @@ from .integration import ( GithubRepositorySyncSerializer, GithubCommentSyncSerializer, ) + +from .importer import ImporterSerializer diff --git a/apiserver/plane/api/serializers/importer.py b/apiserver/plane/api/serializers/importer.py new file mode 100644 index 000000000..28f2153c8 --- /dev/null +++ b/apiserver/plane/api/serializers/importer.py @@ -0,0 +1,12 @@ +# Module imports +from .base import BaseSerializer +from .user import UserLiteSerializer +from plane.db.models import Importer + + +class ImporterSerializer(BaseSerializer): + initiated_by_detail = UserLiteSerializer(source="initiated_by", read_only=True) + + class Meta: + model = Importer + fields = "__all__" diff --git a/apiserver/plane/api/urls.py b/apiserver/plane/api/urls.py index 93dd9b90c..ab3a2f9d8 100644 --- a/apiserver/plane/api/urls.py +++ b/apiserver/plane/api/urls.py @@ -63,13 +63,14 @@ from plane.api.views import ( IssueCommentViewSet, UserWorkSpaceIssues, BulkDeleteIssuesEndpoint, + BulkImportIssuesEndpoint, ProjectUserViewsEndpoint, TimeLineIssueViewSet, IssuePropertyViewSet, LabelViewSet, SubIssuesEndpoint, IssueLinkViewSet, - ModuleLinkViewSet, + BulkCreateIssueLabelsEndpoint, ## End Issues # States StateViewSet, @@ -93,6 +94,7 @@ from plane.api.views import ( ModuleViewSet, ModuleIssueViewSet, ModuleFavoriteViewSet, + ModuleLinkViewSet, ## End Modules # Api Tokens ApiTokenEndpoint, @@ -104,7 +106,13 @@ from plane.api.views import ( GithubRepositorySyncViewSet, GithubIssueSyncViewSet, GithubCommentSyncViewSet, + BulkCreateGithubIssueSyncEndpoint, ## End Integrations + # Importer + ServiceIssueImportSummaryEndpoint, + ImportServiceEndpoint, + UpdateServiceImportStatusEndpoint, + ## End importer ) @@ -622,9 +630,20 @@ urlpatterns = [ ), name="project-issue-labels", ), + path( + "workspaces//projects//bulk-create-labels/", + BulkCreateIssueLabelsEndpoint.as_view(), + name="project-bulk-labels", + ), path( "workspaces//projects//bulk-delete-issues/", BulkDeleteIssuesEndpoint.as_view(), + name="project-issues-bulk", + ), + path( + "workspaces//projects//bulk-import-issues//", + BulkImportIssuesEndpoint.as_view(), + name="project-issues-bulk", ), path( "workspaces//my-issues/", @@ -923,6 +942,10 @@ urlpatterns = [ } ), ), + path( + "workspaces//projects//github-repository-sync//bulk-create-github-issue-sync/", + BulkCreateGithubIssueSyncEndpoint.as_view(), + ), path( "workspaces//projects//github-repository-sync//github-issue-sync//", GithubIssueSyncViewSet.as_view( @@ -952,4 +975,26 @@ urlpatterns = [ ), ## End Github Integrations ## End Integrations + # Importer + path( + "workspaces//importers//", + ServiceIssueImportSummaryEndpoint.as_view(), + name="importer", + ), + path( + "workspaces//projects/importers//", + ImportServiceEndpoint.as_view(), + name="importer", + ), + path( + "workspaces//importers/", + ImportServiceEndpoint.as_view(), + name="importer", + ), + path( + "workspaces//projects//service//importers//", + UpdateServiceImportStatusEndpoint.as_view(), + name="importer", + ), + ## End Importer ] diff --git a/apiserver/plane/api/views/__init__.py b/apiserver/plane/api/views/__init__.py index c59a4f92f..d7cfb2bb4 100644 --- a/apiserver/plane/api/views/__init__.py +++ b/apiserver/plane/api/views/__init__.py @@ -64,6 +64,7 @@ from .issue import ( UserWorkSpaceIssues, SubIssuesEndpoint, IssueLinkViewSet, + BulkCreateIssueLabelsEndpoint, ) from .auth_extended import ( @@ -98,4 +99,12 @@ from .integration import ( GithubRepositorySyncViewSet, GithubCommentSyncViewSet, GithubRepositoriesEndpoint, + BulkCreateGithubIssueSyncEndpoint, +) + +from .importer import ( + ServiceIssueImportSummaryEndpoint, + ImportServiceEndpoint, + UpdateServiceImportStatusEndpoint, + BulkImportIssuesEndpoint, ) diff --git a/apiserver/plane/api/views/importer.py b/apiserver/plane/api/views/importer.py new file mode 100644 index 000000000..6d7d11a13 --- /dev/null +++ b/apiserver/plane/api/views/importer.py @@ -0,0 +1,336 @@ +# Third party imports +from rest_framework import status +from rest_framework.response import Response +from sentry_sdk import capture_exception + +# Django imports +from django.db.models import Max + +# Module imports +from plane.api.views import BaseAPIView +from plane.db.models import ( + WorkspaceIntegration, + Importer, + APIToken, + Project, + State, + IssueSequence, + Issue, + IssueActivity, + IssueComment, + IssueLink, + IssueLabel, + Workspace, +) +from plane.api.serializers import ImporterSerializer, IssueFlatSerializer +from plane.utils.integrations.github import get_github_repo_details +from plane.bgtasks.importer_task import service_importer +from plane.utils.html_processor import strip_tags + + +class ServiceIssueImportSummaryEndpoint(BaseAPIView): + def get(self, request, slug, service): + try: + if service == "github": + workspace_integration = WorkspaceIntegration.objects.get( + integration__provider="github", workspace__slug=slug + ) + + access_tokens_url = workspace_integration.metadata["access_tokens_url"] + owner = request.GET.get("owner") + repo = request.GET.get("repo") + + issue_count, labels, collaborators = get_github_repo_details( + access_tokens_url, owner, repo + ) + return Response( + { + "issue_count": issue_count, + "labels": labels, + "collaborators": collaborators, + }, + status=status.HTTP_200_OK, + ) + + return Response( + {"error": "Service not supported yet"}, + status=status.HTTP_400_BAD_REQUEST, + ) + except WorkspaceIntegration.DoesNotExist: + return Response( + {"error": "Requested integration was not installed in the workspace"}, + status=status.HTTP_400_BAD_REQUEST, + ) + except Exception as e: + capture_exception(e) + return Response( + {"error": "Something went wrong please try again later"}, + status=status.HTTP_400_BAD_REQUEST, + ) + + +class BulkImportIssuesEndpoint(BaseAPIView): + def post(self, request, slug, project_id, service): + try: + # Get the project + project = Project.objects.get(pk=project_id, workspace__slug=slug) + + # Get the default state + default_state = State.objects.filter( + project_id=project_id, default=True + ).first() + # if there is no default state assign any random state + if default_state is None: + default_state = State.objects.filter(project_id=project_id).first() + + # Get the maximum sequence_id + last_id = IssueSequence.objects.filter(project_id=project_id).aggregate( + largest=Max("sequence") + )["largest"] + + last_id = 1 if last_id is None else last_id + 1 + + # Get the maximum sort order + largest_sort_order = Issue.objects.filter( + project_id=project_id, state=default_state + ).aggregate(largest=Max("sort_order"))["largest"] + + largest_sort_order = ( + 65535 if largest_sort_order is None else largest_sort_order + 10000 + ) + + # Get the issues_data + issues_data = request.data.get("issues_data", []) + + if not len(issues_data): + return Response( + {"error": "Issue data is required"}, + status=status.HTTP_400_BAD_REQUEST, + ) + + # Issues + bulk_issues = [] + for issue_data in issues_data: + bulk_issues.append( + Issue( + project_id=project_id, + workspace_id=project.workspace_id, + state=default_state, + name=issue_data.get("name", "Issue Created through Bulk"), + description_html=issue_data.get("description_html", "

"), + description_stripped=( + None + if ( + issue_data.get("description_html") == "" + or issue_data.get("description_html") is None + ) + else strip_tags(issue_data.get("description_html")) + ), + sequence_id=last_id, + sort_order=largest_sort_order, + start_date=issue_data.get("start_date", None), + target_date=issue_data.get("target_date", None), + ) + ) + + largest_sort_order = largest_sort_order + 10000 + last_id = last_id + 1 + + issues = Issue.objects.bulk_create( + bulk_issues, + batch_size=100, + ignore_conflicts=True, + ) + + # Sequences + _ = IssueSequence.objects.bulk_create( + [ + IssueSequence( + issue=issue, + sequence=issue.sequence_id, + project_id=project_id, + workspace_id=project.workspace_id, + ) + for issue in issues + ], + batch_size=100, + ) + + # Attach Labels + bulk_issue_labels = [] + for issue, issue_data in zip(issues, issues_data): + labels_list = issue_data.get("labels_list", []) + bulk_issue_labels = bulk_issue_labels + [ + IssueLabel( + issue=issue, + label_id=label_id, + project_id=project_id, + workspace_id=project.workspace_id, + created_by=request.user, + updated_by=request.user, + ) + for label_id in labels_list + ] + + _ = IssueLabel.objects.bulk_create(bulk_issue_labels, batch_size=100) + + # Track the issue activities + IssueActivity.objects.bulk_create( + [ + IssueActivity( + issue=issue, + actor=request.user, + project_id=project_id, + workspace_id=project.workspace_id, + comment=f"{request.user.email} importer the issue from {service}", + verb="created", + ) + for issue in issues + ], + batch_size=100, + ) + + # Create Comments + bulk_issue_comments = [] + for issue, issue_data in zip(issues, issues_data): + comments_list = issue_data.get("comments_list", []) + bulk_issue_comments = bulk_issue_comments + [ + IssueComment( + issue=issue, + comment_html=comment.get("comment_html", "

"), + actor=request.user, + project_id=project_id, + workspace_id=project.workspace_id, + created_by=request.user, + updated_by=request.user, + ) + for comment in comments_list + ] + + _ = IssueComment.objects.bulk_create(bulk_issue_comments, batch_size=100) + + # Attach Links + _ = IssueLink.objects.bulk_create( + [ + IssueLink( + issue=issue, + url=issue_data.get("link", {}).get("url", "https://github.com"), + title=issue_data.get("link", {}).get("title", "Original Issue"), + project_id=project_id, + workspace_id=project.workspace_id, + created_by=request.user, + updated_by=request.user, + ) + for issue, issue_data in zip(issues, issues_data) + ] + ) + + return Response( + {"issues": IssueFlatSerializer(issues, many=True).data}, + status=status.HTTP_201_CREATED, + ) + except Project.DoesNotExist: + return Response( + {"error": "Project Does not exist"}, status=status.HTTP_404_NOT_FOUND + ) + except Exception as e: + capture_exception(e) + return Response( + {"error": "Something went wrong please try again later"}, + status=status.HTTP_400_BAD_REQUEST, + ) + + +class ImportServiceEndpoint(BaseAPIView): + def post(self, request, slug, service): + try: + project_id = request.data.get("project_id", False) + + if not project_id: + return Response( + {"error": "Project ID is required"}, + status=status.HTTP_400_BAD_REQUEST, + ) + + workspace = Workspace.objects.get(slug=slug) + + if service == "github": + data = request.data.get("data", False) + metadata = request.data.get("metadata", False) + config = request.data.get("config", False) + if not data or not metadata or not config: + return Response( + {"error": "Data, config and metadata are required"}, + status=status.HTTP_400_BAD_REQUEST, + ) + + api_token = APIToken.objects.filter(user=request.user).first() + if api_token is None: + api_token = APIToken.objects.create( + user=request.user, + label="Importer", + workspace=workspace, + ) + + importer = Importer.objects.create( + service=service, + project_id=project_id, + status="queued", + initiated_by=request.user, + data=data, + metadata=metadata, + token=api_token, + config=config, + created_by=request.user, + updated_by=request.user, + ) + + service_importer.delay(service, importer.id) + serializer = ImporterSerializer(importer) + return Response(serializer.data, status=status.HTTP_200_OK) + + return Response( + {"error": "Servivce not supported yet"}, + status=status.HTTP_400_BAD_REQUEST, + ) + except (Workspace.DoesNotExist, WorkspaceIntegration.DoesNotExist) as e: + return Response( + {"error": "Workspace Integration does not exist"}, + status=status.HTTP_404_NOT_FOUND, + ) + except Exception as e: + capture_exception(e) + return Response( + {"error": "Something went wrong please try again later"}, + status=status.HTTP_400_BAD_REQUEST, + ) + + def get(self, request, slug): + try: + imports = Importer.objects.filter(workspace__slug=slug) + serializer = ImporterSerializer(imports, many=True) + return Response(serializer.data) + except Exception as e: + capture_exception(e) + return Response( + {"error": "Something went wrong please try again later"}, + status=status.HTTP_400_BAD_REQUEST, + ) + + +class UpdateServiceImportStatusEndpoint(BaseAPIView): + def post(self, request, slug, project_id, service, importer_id): + try: + importer = Importer.objects.get( + pk=importer_id, + workspace__slug=slug, + project_id=project_id, + service=service, + ) + importer.status = request.data.get("status", "processing") + importer.save() + return Response(status.HTTP_200_OK) + except Importer.DoesNotExist: + return Response( + {"error": "Importer does not exist"}, status=status.HTTP_404_NOT_FOUND + ) diff --git a/apiserver/plane/api/views/integration/__init__.py b/apiserver/plane/api/views/integration/__init__.py index 693202573..67dd370d9 100644 --- a/apiserver/plane/api/views/integration/__init__.py +++ b/apiserver/plane/api/views/integration/__init__.py @@ -2,6 +2,7 @@ from .base import IntegrationViewSet, WorkspaceIntegrationViewSet from .github import ( GithubRepositorySyncViewSet, GithubIssueSyncViewSet, + BulkCreateGithubIssueSyncEndpoint, GithubCommentSyncViewSet, GithubRepositoriesEndpoint, ) diff --git a/apiserver/plane/api/views/integration/github.py b/apiserver/plane/api/views/integration/github.py index 85e1efa7e..4cf07c705 100644 --- a/apiserver/plane/api/views/integration/github.py +++ b/apiserver/plane/api/views/integration/github.py @@ -13,6 +13,7 @@ from plane.db.models import ( ProjectMember, Label, GithubCommentSync, + Project, ) from plane.api.serializers import ( GithubIssueSyncSerializer, @@ -34,6 +35,13 @@ class GithubRepositoriesEndpoint(BaseAPIView): workspace_integration = WorkspaceIntegration.objects.get( workspace__slug=slug, pk=workspace_integration_id ) + + if workspace_integration.integration.provider != "github": + return Response( + {"error": "Not a github integration"}, + status=status.HTTP_400_BAD_REQUEST, + ) + access_tokens_url = workspace_integration.metadata["access_tokens_url"] repositories_url = ( workspace_integration.metadata["repositories_url"] @@ -93,10 +101,6 @@ class GithubRepositorySyncViewSet(BaseViewSet): GithubRepository.objects.filter( project_id=project_id, workspace__slug=slug ).delete() - # Project member delete - ProjectMember.objects.filter( - member=workspace_integration.actor, role=20, project_id=project_id - ).delete() # Create repository repo = GithubRepository.objects.create( @@ -133,7 +137,7 @@ class GithubRepositorySyncViewSet(BaseViewSet): ) # Add bot as a member in the project - _ = ProjectMember.objects.create( + _ = ProjectMember.objects.get_or_create( member=workspace_integration.actor, role=20, project_id=project_id ) @@ -171,6 +175,46 @@ class GithubIssueSyncViewSet(BaseViewSet): ) +class BulkCreateGithubIssueSyncEndpoint(BaseAPIView): + def post(self, request, slug, project_id, repo_sync_id): + try: + project = Project.objects.get(pk=project_id, workspace__slug=slug) + + github_issue_syncs = request.data.get("github_issue_syncs", []) + github_issue_syncs = GithubIssueSync.objects.bulk_create( + [ + GithubIssueSync( + issue_id=github_issue_sync.get("issue"), + repo_issue_id=github_issue_sync.get("repo_issue_id"), + issue_url=github_issue_sync.get("issue_url"), + github_issue_id=github_issue_sync.get("github_issue_id"), + repository_sync_id=repo_sync_id, + project_id=project_id, + workspace_id=project.workspace_id, + created_by=request.user, + updated_by=request.user, + ) + for github_issue_sync in github_issue_syncs + ], + batch_size=100, + ignore_conflicts=True, + ) + + serializer = GithubIssueSyncSerializer(github_issue_syncs, many=True) + return Response(serializer.data, status=status.HTTP_201_CREATED) + except Project.DoesNotExist: + return Response( + {"error": "Project does not exist"}, + status=status.HTTP_404_NOT_FOUND, + ) + except Exception as e: + capture_exception(e) + return Response( + {"error": "Something went wrong please try again later"}, + status=status.HTTP_400_BAD_REQUEST, + ) + + class GithubCommentSyncViewSet(BaseViewSet): permission_classes = [ diff --git a/apiserver/plane/api/views/issue.py b/apiserver/plane/api/views/issue.py index ca40606ec..0d16565ca 100644 --- a/apiserver/plane/api/views/issue.py +++ b/apiserver/plane/api/views/issue.py @@ -1,5 +1,6 @@ # Python imports import json +import random from itertools import groupby, chain # Django imports @@ -717,3 +718,42 @@ class IssueLinkViewSet(BaseViewSet): .filter(project__project_projectmember__member=self.request.user) .distinct() ) + + +class BulkCreateIssueLabelsEndpoint(BaseAPIView): + def post(self, request, slug, project_id): + try: + label_data = request.data.get("label_data", []) + project = Project.objects.get(pk=project_id) + + labels = Label.objects.bulk_create( + [ + Label( + name=label.get("name", "Migrated"), + description=label.get("description", "Migrated Issue"), + color="#" + "%06x" % random.randint(0, 0xFFFFFF), + project_id=project_id, + workspace_id=project.workspace_id, + created_by=request.user, + updated_by=request.user, + ) + for label in label_data + ], + batch_size=50, + ignore_conflicts=True, + ) + + return Response( + {"labels": LabelSerializer(labels, many=True).data}, + status=status.HTTP_201_CREATED, + ) + except Project.DoesNotExist: + return Response( + {"error": "Project Does not exist"}, status=status.HTTP_404_NOT_FOUND + ) + except Exception as e: + capture_exception(e) + return Response( + {"error": "Something went wrong please try again later"}, + status=status.HTTP_400_BAD_REQUEST, + ) diff --git a/apiserver/plane/bgtasks/importer_task.py b/apiserver/plane/bgtasks/importer_task.py new file mode 100644 index 000000000..fdb32a81b --- /dev/null +++ b/apiserver/plane/bgtasks/importer_task.py @@ -0,0 +1,147 @@ +# Python imports +import json +import requests +import jwt +from datetime import datetime + +# Django imports +from django.conf import settings +from django.core.serializers.json import DjangoJSONEncoder + +# Third Party imports +from django_rq import job +from sentry_sdk import capture_exception + +# Module imports +from plane.api.serializers import ImporterSerializer +from plane.db.models import ( + Importer, + WorkspaceMemberInvite, + GithubRepositorySync, + GithubRepository, + ProjectMember, + WorkspaceIntegration, + Label, +) +from .workspace_invitation_task import workspace_invitation + + +@job("default") +def service_importer(service, importer_id): + try: + importer = Importer.objects.get(pk=importer_id) + importer.status = "processing" + importer.save() + + users = importer.data.get("users", []) + + workspace_invitations = WorkspaceMemberInvite.objects.bulk_create( + [ + WorkspaceMemberInvite( + email=user.get("email").strip().lower(), + workspace_id=importer.workspace_id, + token=jwt.encode( + { + "email": user.get("email").strip().lower(), + "timestamp": datetime.now().timestamp(), + }, + settings.SECRET_KEY, + algorithm="HS256", + ), + role=10, + ) + for user in users + if user.get("import", False) == "invite" + or user.get("import", False) == "map" + ], + batch_size=100, + ignore_conflicts=True, + ) + + # Send the invites + [ + workspace_invitation.delay( + invitation.email, + importer.workspace_id, + invitation.token, + settings.WEB_URL, + importer.initiated_by.email, + ) + for invitation in workspace_invitations + ] + + # Check if sync config is on for github importers + if service == "github" and importer.config.get("sync", False): + name = importer.metadata.get("name", False) + url = importer.metadata.get("url", False) + config = importer.metadata.get("config", {}) + owner = importer.metadata.get("owner", False) + repository_id = importer.metadata.get("repository_id", False) + + workspace_integration = WorkspaceIntegration.objects.get( + workspace_id=importer.workspace_id, integration__provider="github" + ) + + # Delete the old repository object + GithubRepositorySync.objects.filter(project_id=importer.project_id).delete() + GithubRepository.objects.filter(project_id=importer.project_id).delete() + # Project member delete + + # Create a Label for github + label = Label.objects.filter( + name="GitHub", project_id=importer.project_id + ).first() + + if label is None: + label = Label.objects.create( + name="GitHub", + project_id=importer.project_id, + description="Label to sync Plane issues with GitHub issues", + color="#003773", + ) + # Create repository + repo = GithubRepository.objects.create( + name=name, + url=url, + config=config, + repository_id=repository_id, + owner=owner, + project_id=importer.project_id, + ) + + # Create repo sync + repo_sync = GithubRepositorySync.objects.create( + repository=repo, + workspace_integration=workspace_integration, + actor=workspace_integration.actor, + credentials=importer.data.get("credentials", {}), + project_id=importer.project_id, + label=label, + ) + + # Add bot as a member in the project + _ = ProjectMember.objects.get_or_create( + member=workspace_integration.actor, + role=20, + project_id=importer.project_id, + ) + + if settings.PROXY_BASE_URL: + headers = {"Content-Type": "application/json"} + import_data_json = json.dumps( + ImporterSerializer(importer).data, + cls=DjangoJSONEncoder, + ) + res = requests.post( + f"{settings.PROXY_BASE_URL}/hooks/workspaces/{str(importer.workspace_id)}/projects/{str(importer.project_id)}/importers/{str(service)}/", + json=import_data_json, + headers=headers, + ) + + return + except Exception as e: + importer = Importer.objects.get(pk=importer_id) + importer.status = "failed" + importer.save() + capture_exception(e) + return diff --git a/apiserver/plane/bgtasks/issue_activites_task.py b/apiserver/plane/bgtasks/issue_activites_task.py index a9bf30712..7b65ca4e7 100644 --- a/apiserver/plane/bgtasks/issue_activites_task.py +++ b/apiserver/plane/bgtasks/issue_activites_task.py @@ -742,7 +742,11 @@ def issue_activity(event): try: issue_activities = [] type = event.get("type") - requested_data = json.loads(event.get("requested_data")) + requested_data = ( + json.loads(event.get("requested_data")) + if event.get("current_instance") is not None + else None + ) current_instance = ( json.loads(event.get("current_instance")) if event.get("current_instance") is not None diff --git a/apiserver/plane/db/models/__init__.py b/apiserver/plane/db/models/__init__.py index 09b44b422..edd65cbc9 100644 --- a/apiserver/plane/db/models/__init__.py +++ b/apiserver/plane/db/models/__init__.py @@ -31,6 +31,7 @@ from .issue import ( Label, IssueBlocker, IssueLink, + IssueSequence, ) from .asset import FileAsset @@ -57,3 +58,5 @@ from .integration import ( GithubIssueSync, GithubCommentSync, ) + +from .importer import Importer diff --git a/apiserver/plane/db/models/importer.py b/apiserver/plane/db/models/importer.py new file mode 100644 index 000000000..d3f55b750 --- /dev/null +++ b/apiserver/plane/db/models/importer.py @@ -0,0 +1,39 @@ +# Django imports +from django.db import models +from django.conf import settings + +# Module imports +from . import ProjectBaseModel + + +class Importer(ProjectBaseModel): + service = models.CharField(max_length=50, choices=(("github", "GitHub"),)) + status = models.CharField( + max_length=50, + choices=( + ("queued", "Queued"), + ("processing", "Processing"), + ("completed", "Completed"), + ("failed", "Failed"), + ), + default="queued", + ) + initiated_by = models.ForeignKey( + settings.AUTH_USER_MODEL, on_delete=models.CASCADE, related_name="imports" + ) + metadata = models.JSONField(default=dict) + config = models.JSONField(default=dict) + data = models.JSONField(default=dict) + token = models.ForeignKey( + "db.APIToken", on_delete=models.CASCADE, related_name="importer" + ) + + class Meta: + verbose_name = "Importer" + verbose_name_plural = "Importers" + db_table = "importers" + ordering = ("-created_at",) + + def __str__(self): + """Return name of the service""" + return f"{self.service} <{self.project.name}>" diff --git a/apiserver/plane/db/models/issue.py b/apiserver/plane/db/models/issue.py index fc9971000..f5e8b5c20 100644 --- a/apiserver/plane/db/models/issue.py +++ b/apiserver/plane/db/models/issue.py @@ -307,6 +307,7 @@ class Label(ProjectBaseModel): color = models.CharField(max_length=255, blank=True) class Meta: + unique_together = ["name", "project"] verbose_name = "Label" verbose_name_plural = "Labels" db_table = "labels" diff --git a/apiserver/plane/utils/integrations/github.py b/apiserver/plane/utils/integrations/github.py index e06ac31f7..d9185cb10 100644 --- a/apiserver/plane/utils/integrations/github.py +++ b/apiserver/plane/utils/integrations/github.py @@ -1,6 +1,7 @@ import os import jwt import requests +from urllib.parse import urlparse, parse_qs from datetime import datetime, timedelta from cryptography.hazmat.primitives.serialization import load_pem_private_key from cryptography.hazmat.backends import default_backend @@ -30,7 +31,7 @@ def get_github_metadata(installation_id): url = f"https://api.github.com/app/installations/{installation_id}" headers = { - "Authorization": "Bearer " + token, + "Authorization": "Bearer " + str(token), "Accept": "application/vnd.github+json", } response = requests.get(url, headers=headers).json() @@ -41,7 +42,7 @@ def get_github_repos(access_tokens_url, repositories_url): token = get_jwt_token() headers = { - "Authorization": "Bearer " + token, + "Authorization": "Bearer " + str(token), "Accept": "application/vnd.github+json", } @@ -50,9 +51,9 @@ def get_github_repos(access_tokens_url, repositories_url): headers=headers, ).json() - oauth_token = oauth_response.get("token") + oauth_token = oauth_response.get("token", "") headers = { - "Authorization": "Bearer " + oauth_token, + "Authorization": "Bearer " + str(oauth_token), "Accept": "application/vnd.github+json", } response = requests.get( @@ -67,8 +68,63 @@ def delete_github_installation(installation_id): url = f"https://api.github.com/app/installations/{installation_id}" headers = { - "Authorization": "Bearer " + token, + "Authorization": "Bearer " + str(token), "Accept": "application/vnd.github+json", } response = requests.delete(url, headers=headers) return response + + +def get_github_repo_details(access_tokens_url, owner, repo): + token = get_jwt_token() + + headers = { + "Authorization": "Bearer " + str(token), + "Accept": "application/vnd.github+json", + "X-GitHub-Api-Version": "2022-11-28", + } + + oauth_response = requests.post( + access_tokens_url, + headers=headers, + ).json() + + oauth_token = oauth_response.get("token") + headers = { + "Authorization": "Bearer " + oauth_token, + "Accept": "application/vnd.github+json", + } + open_issues = requests.get( + f"https://api.github.com/repos/{owner}/{repo}", + headers=headers, + ).json()["open_issues_count"] + + total_labels = 0 + + labels_response = requests.get( + f"https://api.github.com/repos/{owner}/{repo}/labels?per_page=100&page=1", + headers=headers, + ) + + # Check if there are more pages + if len(labels_response.links.keys()): + # get the query parameter of last + last_url = labels_response.links.get("last").get("url") + parsed_url = urlparse(last_url) + last_page_value = parse_qs(parsed_url.query)["page"][0] + total_labels = total_labels + 100 * (last_page_value - 1) + + # Get labels in last page + last_page_labels = requests.get(last_url, headers=headers).json() + total_labels = total_labels + len(last_page_labels) + else: + total_labels = len(labels_response.json()) + + # Currently only supporting upto 100 collaborators + # TODO: Update this function to fetch all collaborators + collaborators = requests.get( + f"https://api.github.com/repos/{owner}/{repo}/collaborators?per_page=100&page=1", + headers=headers, + ).json() + + return open_issues, total_labels, collaborators