dev: update apiserver configuration files (#2348)

* dev: update apiserver configuration files

* dev: add email and minio redirection urls
This commit is contained in:
Nikhil 2023-10-04 17:39:12 +05:30 committed by GitHub
parent 981e2aafdd
commit eacf543439
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
2 changed files with 116 additions and 76 deletions

View File

@ -1,7 +1,7 @@
# Backend # Backend
# Debug value for api server use it as 0 for production use # Debug value for api server use it as 0 for production use
DEBUG=0 DEBUG=0
DJANGO_SETTINGS_MODULE="plane.settings.selfhosted" DJANGO_SETTINGS_MODULE="plane.settings.production"
# Error logs # Error logs
SENTRY_DSN="" SENTRY_DSN=""
@ -59,3 +59,6 @@ DEFAULT_PASSWORD="password123"
# SignUps # SignUps
ENABLE_SIGNUP="1" ENABLE_SIGNUP="1"
# Email redirections and minio domain settings
WEB_URL="http://localhost"

View File

@ -7,6 +7,7 @@ import dj_database_url
import sentry_sdk import sentry_sdk
from sentry_sdk.integrations.django import DjangoIntegration from sentry_sdk.integrations.django import DjangoIntegration
from sentry_sdk.integrations.redis import RedisIntegration from sentry_sdk.integrations.redis import RedisIntegration
from urllib.parse import urlparse
from .common import * # noqa from .common import * # noqa
@ -89,90 +90,112 @@ if bool(os.environ.get("SENTRY_DSN", False)):
profiles_sample_rate=1.0, profiles_sample_rate=1.0,
) )
# The AWS region to connect to. if DOCKERIZED and USE_MINIO:
AWS_REGION = os.environ.get("AWS_REGION", "") INSTALLED_APPS += ("storages",)
STORAGES["default"] = {"BACKEND": "storages.backends.s3boto3.S3Boto3Storage"}
# The AWS access key to use.
AWS_ACCESS_KEY_ID = os.environ.get("AWS_ACCESS_KEY_ID", "access-key")
# The AWS secret access key to use.
AWS_SECRET_ACCESS_KEY = os.environ.get("AWS_SECRET_ACCESS_KEY", "secret-key")
# The name of the bucket to store files in.
AWS_STORAGE_BUCKET_NAME = os.environ.get("AWS_S3_BUCKET_NAME", "uploads")
# The full URL to the S3 endpoint. Leave blank to use the default region URL.
AWS_S3_ENDPOINT_URL = os.environ.get(
"AWS_S3_ENDPOINT_URL", "http://plane-minio:9000"
)
# Default permissions
AWS_DEFAULT_ACL = "public-read"
AWS_QUERYSTRING_AUTH = False
AWS_S3_FILE_OVERWRITE = False
# The AWS access key to use. # Custom Domain settings
AWS_ACCESS_KEY_ID = os.environ.get("AWS_ACCESS_KEY_ID", "") parsed_url = urlparse(os.environ.get("WEB_URL", "http://localhost"))
AWS_S3_CUSTOM_DOMAIN = f"{parsed_url.netloc}/{AWS_STORAGE_BUCKET_NAME}"
AWS_S3_URL_PROTOCOL = f"{parsed_url.scheme}:"
else:
# The AWS region to connect to.
AWS_REGION = os.environ.get("AWS_REGION", "")
# The AWS secret access key to use. # The AWS access key to use.
AWS_SECRET_ACCESS_KEY = os.environ.get("AWS_SECRET_ACCESS_KEY", "") AWS_ACCESS_KEY_ID = os.environ.get("AWS_ACCESS_KEY_ID", "")
# The optional AWS session token to use. # The AWS secret access key to use.
# AWS_SESSION_TOKEN = "" AWS_SECRET_ACCESS_KEY = os.environ.get("AWS_SECRET_ACCESS_KEY", "")
# The name of the bucket to store files in. # The optional AWS session token to use.
AWS_S3_BUCKET_NAME = os.environ.get("AWS_S3_BUCKET_NAME") # AWS_SESSION_TOKEN = ""
# How to construct S3 URLs ("auto", "path", "virtual"). # The name of the bucket to store files in.
AWS_S3_ADDRESSING_STYLE = "auto" AWS_S3_BUCKET_NAME = os.environ.get("AWS_S3_BUCKET_NAME")
# The full URL to the S3 endpoint. Leave blank to use the default region URL. # How to construct S3 URLs ("auto", "path", "virtual").
AWS_S3_ENDPOINT_URL = os.environ.get("AWS_S3_ENDPOINT_URL", "") AWS_S3_ADDRESSING_STYLE = "auto"
# A prefix to be applied to every stored file. This will be joined to every filename using the "/" separator. # The full URL to the S3 endpoint. Leave blank to use the default region URL.
AWS_S3_KEY_PREFIX = "" AWS_S3_ENDPOINT_URL = os.environ.get("AWS_S3_ENDPOINT_URL", "")
# Whether to enable authentication for stored files. If True, then generated URLs will include an authentication # A prefix to be applied to every stored file. This will be joined to every filename using the "/" separator.
# token valid for `AWS_S3_MAX_AGE_SECONDS`. If False, then generated URLs will not include an authentication token, AWS_S3_KEY_PREFIX = ""
# and their permissions will be set to "public-read".
AWS_S3_BUCKET_AUTH = False
# How long generated URLs are valid for. This affects the expiry of authentication tokens if `AWS_S3_BUCKET_AUTH` # Whether to enable authentication for stored files. If True, then generated URLs will include an authentication
# is True. It also affects the "Cache-Control" header of the files. # token valid for `AWS_S3_MAX_AGE_SECONDS`. If False, then generated URLs will not include an authentication token,
# Important: Changing this setting will not affect existing files. # and their permissions will be set to "public-read".
AWS_S3_MAX_AGE_SECONDS = 60 * 60 # 1 hours. AWS_S3_BUCKET_AUTH = False
# A URL prefix to be used for generated URLs. This is useful if your bucket is served through a CDN. This setting # How long generated URLs are valid for. This affects the expiry of authentication tokens if `AWS_S3_BUCKET_AUTH`
# cannot be used with `AWS_S3_BUCKET_AUTH`. # is True. It also affects the "Cache-Control" header of the files.
AWS_S3_PUBLIC_URL = "" # Important: Changing this setting will not affect existing files.
AWS_S3_MAX_AGE_SECONDS = 60 * 60 # 1 hours.
# If True, then files will be stored with reduced redundancy. Check the S3 documentation and make sure you # A URL prefix to be used for generated URLs. This is useful if your bucket is served through a CDN. This setting
# understand the consequences before enabling. # cannot be used with `AWS_S3_BUCKET_AUTH`.
# Important: Changing this setting will not affect existing files. AWS_S3_PUBLIC_URL = ""
AWS_S3_REDUCED_REDUNDANCY = False
# The Content-Disposition header used when the file is downloaded. This can be a string, or a function taking a # If True, then files will be stored with reduced redundancy. Check the S3 documentation and make sure you
# single `name` argument. # understand the consequences before enabling.
# Important: Changing this setting will not affect existing files. # Important: Changing this setting will not affect existing files.
AWS_S3_CONTENT_DISPOSITION = "" AWS_S3_REDUCED_REDUNDANCY = False
# The Content-Language header used when the file is downloaded. This can be a string, or a function taking a # The Content-Disposition header used when the file is downloaded. This can be a string, or a function taking a
# single `name` argument. # single `name` argument.
# Important: Changing this setting will not affect existing files. # Important: Changing this setting will not affect existing files.
AWS_S3_CONTENT_LANGUAGE = "" AWS_S3_CONTENT_DISPOSITION = ""
# A mapping of custom metadata for each file. Each value can be a string, or a function taking a # The Content-Language header used when the file is downloaded. This can be a string, or a function taking a
# single `name` argument. # single `name` argument.
# Important: Changing this setting will not affect existing files. # Important: Changing this setting will not affect existing files.
AWS_S3_METADATA = {} AWS_S3_CONTENT_LANGUAGE = ""
# If True, then files will be stored using AES256 server-side encryption. # A mapping of custom metadata for each file. Each value can be a string, or a function taking a
# If this is a string value (e.g., "aws:kms"), that encryption type will be used. # single `name` argument.
# Otherwise, server-side encryption is not be enabled. # Important: Changing this setting will not affect existing files.
# Important: Changing this setting will not affect existing files. AWS_S3_METADATA = {}
AWS_S3_ENCRYPT_KEY = False
# The AWS S3 KMS encryption key ID (the `SSEKMSKeyId` parameter) is set from this string if present. # If True, then files will be stored using AES256 server-side encryption.
# This is only relevant if AWS S3 KMS server-side encryption is enabled (above). # If this is a string value (e.g., "aws:kms"), that encryption type will be used.
# AWS_S3_KMS_ENCRYPTION_KEY_ID = "" # Otherwise, server-side encryption is not be enabled.
# Important: Changing this setting will not affect existing files.
AWS_S3_ENCRYPT_KEY = False
# If True, then text files will be stored using gzip content encoding. Files will only be gzipped if their # The AWS S3 KMS encryption key ID (the `SSEKMSKeyId` parameter) is set from this string if present.
# compressed size is smaller than their uncompressed size. # This is only relevant if AWS S3 KMS server-side encryption is enabled (above).
# Important: Changing this setting will not affect existing files. # AWS_S3_KMS_ENCRYPTION_KEY_ID = ""
AWS_S3_GZIP = True
# The signature version to use for S3 requests. # If True, then text files will be stored using gzip content encoding. Files will only be gzipped if their
AWS_S3_SIGNATURE_VERSION = None # compressed size is smaller than their uncompressed size.
# Important: Changing this setting will not affect existing files.
AWS_S3_GZIP = True
# If True, then files with the same name will overwrite each other. By default it's set to False to have # The signature version to use for S3 requests.
# extra characters appended. AWS_S3_SIGNATURE_VERSION = None
AWS_S3_FILE_OVERWRITE = False
STORAGES["default"] = { # If True, then files with the same name will overwrite each other. By default it's set to False to have
"BACKEND": "django_s3_storage.storage.S3Storage", # extra characters appended.
} AWS_S3_FILE_OVERWRITE = False
STORAGES["default"] = {
"BACKEND": "django_s3_storage.storage.S3Storage",
}
# AWS Settings End # AWS Settings End
# Enable Connection Pooling (if desired) # Enable Connection Pooling (if desired)
@ -193,16 +216,27 @@ CSRF_COOKIE_SECURE = True
REDIS_URL = os.environ.get("REDIS_URL") REDIS_URL = os.environ.get("REDIS_URL")
CACHES = { if DOCKERIZED:
"default": { CACHES = {
"BACKEND": "django_redis.cache.RedisCache", "default": {
"LOCATION": REDIS_URL, "BACKEND": "django_redis.cache.RedisCache",
"OPTIONS": { "LOCATION": REDIS_URL,
"CLIENT_CLASS": "django_redis.client.DefaultClient", "OPTIONS": {
"CONNECTION_POOL_KWARGS": {"ssl_cert_reqs": False}, "CLIENT_CLASS": "django_redis.client.DefaultClient",
}, },
}
}
else:
CACHES = {
"default": {
"BACKEND": "django_redis.cache.RedisCache",
"LOCATION": REDIS_URL,
"OPTIONS": {
"CLIENT_CLASS": "django_redis.client.DefaultClient",
"CONNECTION_POOL_KWARGS": {"ssl_cert_reqs": False},
},
}
} }
}
WEB_URL = os.environ.get("WEB_URL", "https://app.plane.so") WEB_URL = os.environ.get("WEB_URL", "https://app.plane.so")
@ -225,8 +259,12 @@ broker_url = (
f"{redis_url}?ssl_cert_reqs={ssl.CERT_NONE.name}&ssl_ca_certs={certifi.where()}" f"{redis_url}?ssl_cert_reqs={ssl.CERT_NONE.name}&ssl_ca_certs={certifi.where()}"
) )
CELERY_RESULT_BACKEND = broker_url if DOCKERIZED:
CELERY_BROKER_URL = broker_url CELERY_BROKER_URL = REDIS_URL
CELERY_RESULT_BACKEND = REDIS_URL
else:
CELERY_BROKER_URL = broker_url
CELERY_RESULT_BACKEND = broker_url
GITHUB_ACCESS_TOKEN = os.environ.get("GITHUB_ACCESS_TOKEN", False) GITHUB_ACCESS_TOKEN = os.environ.get("GITHUB_ACCESS_TOKEN", False)
@ -237,4 +275,3 @@ ENABLE_SIGNUP = os.environ.get("ENABLE_SIGNUP", "1") == "1"
SCOUT_MONITOR = os.environ.get("SCOUT_MONITOR", False) SCOUT_MONITOR = os.environ.get("SCOUT_MONITOR", False)
SCOUT_KEY = os.environ.get("SCOUT_KEY", "") SCOUT_KEY = os.environ.get("SCOUT_KEY", "")
SCOUT_NAME = "Plane" SCOUT_NAME = "Plane"