From 8d4e4a7485a5fdf56be20d50258b9c6d7a773011 Mon Sep 17 00:00:00 2001 From: pablohashescobar Date: Tue, 31 Oct 2023 12:38:52 +0530 Subject: [PATCH] dev: enable global level log settings --- .gitignore | 1 + apiserver/plane/settings/common.py | 43 ++++++++++++++++++++++++++ apiserver/plane/settings/selfhosted.py | 36 --------------------- deploy/selfhost/docker-compose.yml | 7 +++++ 4 files changed, 51 insertions(+), 36 deletions(-) diff --git a/.gitignore b/.gitignore index 7568602d3..f48505350 100644 --- a/.gitignore +++ b/.gitignore @@ -51,6 +51,7 @@ staticfiles mediafiles .env .DS_Store +logs/ node_modules/ assets/dist/ diff --git a/apiserver/plane/settings/common.py b/apiserver/plane/settings/common.py index 27da44d9c..6f233a06c 100644 --- a/apiserver/plane/settings/common.py +++ b/apiserver/plane/settings/common.py @@ -215,3 +215,46 @@ CELERY_TIMEZONE = TIME_ZONE CELERY_TASK_SERIALIZER = 'json' CELERY_ACCEPT_CONTENT = ['application/json'] CELERY_IMPORTS = ("plane.bgtasks.issue_automation_task","plane.bgtasks.exporter_expired_task") + +LOG_DIR = os.path.join(BASE_DIR, 'logs') + +if not os.path.exists(LOG_DIR): + os.makedirs(LOG_DIR) + + +LOGGING = { + "version": 1, + "disable_existing_loggers": False, + "formatters": { + "verbose": { + "format": "{levelname} {asctime} {module} {process:d} {thread:d} {message}", + "style": "{", + }, + "json": { + "()": "pythonjsonlogger.jsonlogger.JsonFormatter", + "fmt": "%(levelname)s %(asctime)s %(module)s %(name)s %(message)s", + }, + }, + "handlers": { + "console": { + "class": "logging.StreamHandler", + "formatter": "verbose", + }, + "file": { + "class": "logging.handlers.TimedRotatingFileHandler", + "filename": os.path.join(BASE_DIR, 'logs', 'debug.log') if DEBUG else os.path.join(BASE_DIR, 'logs', 'error.log'), + "when": "midnight", + "interval": 1, # One day + "backupCount": 5, # Keep last 5 days of logs, + "formatter": "json", + }, + }, + "loggers": { + "plane": { + "level": "DEBUG" if DEBUG else "ERROR", + "handlers": ["console", "file"], + "propagate": False, + }, + }, +} + diff --git a/apiserver/plane/settings/selfhosted.py b/apiserver/plane/settings/selfhosted.py index 43c4dcb26..799d2b5e7 100644 --- a/apiserver/plane/settings/selfhosted.py +++ b/apiserver/plane/settings/selfhosted.py @@ -125,39 +125,3 @@ OPENAI_API_BASE = os.environ.get("OPENAI_API_BASE", "https://api.openai.com/v1") OPENAI_API_KEY = os.environ.get("OPENAI_API_KEY", False) GPT_ENGINE = os.environ.get("GPT_ENGINE", "gpt-3.5-turbo") - -LOGGING = { - "version": 1, - "disable_existing_loggers": False, - "formatters": { - "verbose": { - "format": "{levelname} {asctime} {module} {process:d} {thread:d} {message}", - "style": "{", - }, - "json": { - "()": "pythonjsonlogger.jsonlogger.JsonFormatter", - "fmt": "%(levelname)s %(asctime)s %(module)s %(name)s %(message)s", - }, - }, - "handlers": { - "console": { - "class": "logging.StreamHandler", - "formatter": "verbose", - }, - "file": { - "class": "logging.handlers.TimedRotatingFileHandler", - "filename": "debug.log", - "when": "midnight", - "interval": 1, # One day - "backupCount": 5, # Keep last 5 days of logs, - "formatter": "json", - }, - }, - "loggers": { - "plane": { - "level": "ERROR", - "handlers": ["console", "file"], - "propagate": False, - }, - }, -} diff --git a/deploy/selfhost/docker-compose.yml b/deploy/selfhost/docker-compose.yml index 33a0f6673..9a369a65f 100644 --- a/deploy/selfhost/docker-compose.yml +++ b/deploy/selfhost/docker-compose.yml @@ -87,6 +87,8 @@ services: command: ./bin/takeoff deploy: replicas: ${API_REPLICAS:-1} + volumes: + - logs/api:/code/plane/logs depends_on: - plane-db - plane-redis @@ -98,6 +100,8 @@ services: image: makeplane/plane-backend:${APP_RELEASE:-latest} restart: unless-stopped command: ./bin/worker + volumes: + - logs/worker:/code/plane/logs depends_on: - api - plane-db @@ -110,6 +114,8 @@ services: image: makeplane/plane-backend:${APP_RELEASE:-latest} restart: unless-stopped command: ./bin/beat + volumes: + - logs/beat-worker:/code/plane/logs depends_on: - api - plane-db @@ -166,3 +172,4 @@ volumes: pgdata: redisdata: uploads: + logs: