2
0
mirror of https://github.com/inventree/InvenTree.git synced 2026-04-25 04:23:33 +00:00

Ensure configuration settings are documented (#11674)

* Export configuration keys to JSON

* Support rendering of config options in docs

* Check for missing config settings

* Simplify macro

* Initial tests

* Fix collisions

* Updates

* Ensure values are stringified

* Ensure null values are exported correctly

* Refactor database config

- Observability on the config settings

* More docs updates

* Updates

* Add observability of cache settings

* More updates

* Observability

* Set env config for RTD

* Revert RTD config file

- Handled by ENV VAR on RTD account

* Visibility on background worker settings

* Tweaks

* Tweaks

* Split tracing settings out into separate file

- Improved discovery
- declutter settings.py

* Cleanup LDAP settings

* Social providers docs

* More updates

* Refactor ldap setup into own module

* Tweaks

* Formatting tweaks

* Tweak logic

* Fix INVENTREE_SESSION_COOKIE_SECURE setting

* Fix wrapping

* Add custom default
This commit is contained in:
Oliver
2026-04-21 12:23:53 +10:00
committed by GitHub
parent 3c9b014939
commit d81a87225c
20 changed files with 754 additions and 635 deletions
+1 -1
View File
@@ -297,7 +297,7 @@ class InfoView(APIView):
'instance': InvenTree.version.inventreeInstanceName(),
'apiVersion': InvenTree.version.inventreeApiVersion(),
'worker_running': is_worker_running(),
'worker_count': settings.BACKGROUND_WORKER_COUNT,
'worker_count': settings.Q_CLUSTER['workers'],
'worker_pending_tasks': self.worker_pending_tasks(),
'plugins_enabled': settings.PLUGINS_ENABLED,
'plugins_install_disabled': settings.PLUGINS_INSTALL_DISABLED,
+39 -39
View File
@@ -94,51 +94,51 @@ def get_cache_config(global_cache: bool) -> dict:
Returns:
A dictionary containing the cache configuration options.
"""
if global_cache:
# Build Redis URL with optional password
password = cache_password()
user = cache_user() or ''
# Build Redis URL with optional password
password = cache_password()
user = cache_user() or ''
host = cache_host()
port = cache_port()
db = cache_db()
if password:
redis_url = (
f'redis://{user}:{password}@{cache_host()}:{cache_port()}/{cache_db()}'
)
else:
redis_url = f'redis://{cache_host()}:{cache_port()}/{cache_db()}'
if password:
redis_url = f'redis://{user}:{password}@{host}:{port}/{db}'
else:
redis_url = f'redis://{host}:{port}/{db}'
keepalive_options = {
'TCP_KEEPCNT': cache_setting('keepalive_count', 5, typecast=int),
'TCP_KEEPIDLE': cache_setting('keepalive_idle', 1, typecast=int),
'TCP_KEEPINTVL': cache_setting('keepalive_interval', 1, typecast=int),
'TCP_USER_TIMEOUT': cache_setting('user_timeout', 1000, typecast=int),
}
keepalive_options = {
'TCP_KEEPCNT': cache_setting('keepalive_count', 5, typecast=int),
'TCP_KEEPIDLE': cache_setting('keepalive_idle', 1, typecast=int),
'TCP_KEEPINTVL': cache_setting('keepalive_interval', 1, typecast=int),
'TCP_USER_TIMEOUT': cache_setting('user_timeout', 1000, typecast=int),
}
return {
'BACKEND': 'django_redis.cache.RedisCache',
'LOCATION': redis_url,
'OPTIONS': {
'CLIENT_CLASS': 'django_redis.client.DefaultClient',
'SOCKET_CONNECT_TIMEOUT': cache_setting(
'connect_timeout', 5, typecast=int
),
'SOCKET_TIMEOUT': cache_setting('timeout', 3, typecast=int),
'CONNECTION_POOL_KWARGS': {
'socket_keepalive': cache_setting(
'tcp_keepalive', True, typecast=bool
),
'socket_keepalive_options': {
# Only include options which are available on this platform
# e.g. MacOS does not have TCP_KEEPIDLE and TCP_USER_TIMEOUT
getattr(socket, key): value
for key, value in keepalive_options.items()
if hasattr(socket, key)
},
global_cache_config = {
'BACKEND': 'django_redis.cache.RedisCache',
'LOCATION': redis_url,
'OPTIONS': {
'CLIENT_CLASS': 'django_redis.client.DefaultClient',
'SOCKET_CONNECT_TIMEOUT': cache_setting('connect_timeout', 5, typecast=int),
'SOCKET_TIMEOUT': cache_setting('timeout', 3, typecast=int),
'CONNECTION_POOL_KWARGS': {
'socket_keepalive': cache_setting('tcp_keepalive', True, typecast=bool),
'socket_keepalive_options': {
# Only include options which are available on this platform
# e.g. MacOS does not have TCP_KEEPIDLE and TCP_USER_TIMEOUT
getattr(socket, key): value
for key, value in keepalive_options.items()
if hasattr(socket, key)
},
},
}
},
}
# Default: Use django local memory cache
return {'BACKEND': 'django.core.cache.backends.locmem.LocMemCache'}
if global_cache:
# Only return the global cache configuration if the global cache is enabled
return global_cache_config
else:
# Default: Use django local memory cache
return {'BACKEND': 'django.core.cache.backends.locmem.LocMemCache'}
def create_session_cache(request) -> None:
+8 -4
View File
@@ -270,10 +270,13 @@ def get_setting(env_var=None, config_key=None, default_value=None, typecast=None
def set_metadata(source: str):
"""Set lookup metadata for the setting."""
global CONFIG_LOOKUPS
key = env_var or config_key
CONFIG_LOOKUPS[key] = {
'env_var': env_var,
'config_key': config_key,
'default_value': default_value,
'source': source,
'accessed': datetime.datetime.now(),
}
@@ -544,13 +547,14 @@ def get_frontend_settings(debug=True):
'INVENTREE_FRONTEND_SETTINGS', 'frontend_settings', {}, typecast=dict
)
base_url = get_setting(
'INVENTREE_FRONTEND_URL_BASE', 'frontend_url_base', 'web', typecast=str
)
# Set the base URL for the user interface
# This is the UI path e.g. '/web/'
if 'base_url' not in frontend_settings:
frontend_settings['base_url'] = (
get_setting('INVENTREE_FRONTEND_URL_BASE', 'frontend_url_base', 'web')
or 'web'
)
frontend_settings['base_url'] = base_url
# If provided, specify the API host
api_host = frontend_settings.get('api_host', None) or get_setting(
@@ -24,8 +24,9 @@ class Command(BaseCommand):
def handle(self, *args, **kwargs):
"""Export settings information to a JSON file."""
from common.models import InvenTreeSetting, InvenTreeUserSetting
from InvenTree.config import CONFIG_LOOKUPS
settings = {'global': {}, 'user': {}}
settings = {'global': {}, 'user': {}, 'config': {}}
# Global settings
for key, setting in InvenTreeSetting.SETTINGS.items():
@@ -45,6 +46,18 @@ class Command(BaseCommand):
'units': str(setting.get('units', '')),
}
# Configuration settings
for key, config in CONFIG_LOOKUPS.items():
default_value = config.get('default_value', None)
if default_value is not None:
default_value = str(default_value)
settings['config'][key] = {
'env_var': str(config.get('env_var', '')),
'config_key': str(config.get('config_key', '')),
'default_value': default_value,
}
filename = kwargs.get('filename', 'inventree_settings.json')
with open(filename, 'w', encoding='utf-8') as f:
@@ -1,5 +1,7 @@
"""Configuration settings specific to a particular database backend."""
from pathlib import Path
import structlog
from InvenTree.config import get_boolean_setting, get_setting
@@ -7,6 +9,62 @@ from InvenTree.config import get_boolean_setting, get_setting
logger = structlog.get_logger('inventree')
def get_db_backend():
"""Return the database backend configuration."""
db_config = {
'ENGINE': get_setting('INVENTREE_DB_ENGINE', 'database.engine', None),
'NAME': get_setting('INVENTREE_DB_NAME', 'database.name', None),
'USER': get_setting('INVENTREE_DB_USER', 'database.user', None),
'PASSWORD': get_setting('INVENTREE_DB_PASSWORD', 'database.password', None),
'HOST': get_setting('INVENTREE_DB_HOST', 'database.host', None),
'PORT': get_setting('INVENTREE_DB_PORT', 'database.port', 5432, typecast=int),
'OPTIONS': get_setting(
'INVENTREE_DB_OPTIONS', 'database.options', {}, typecast=dict
)
or {},
}
# Check for required keys
required_keys = ['ENGINE', 'NAME']
for key in required_keys:
if not db_config[key]:
raise ValueError(
f'Missing required database configuration key: INVENTREE_DB_{key}'
)
DB_ENGINE = db_config['ENGINE'].lower()
# Correct common misspelling
if DB_ENGINE == 'sqlite':
DB_ENGINE = 'sqlite3' # pragma: no cover
if DB_ENGINE in ['sqlite3', 'postgresql', 'mysql']:
# Prepend the required python module string
DB_ENGINE = f'django.db.backends.{DB_ENGINE}'
db_config['ENGINE'] = DB_ENGINE
if 'sqlite' in DB_ENGINE:
db_name = str(Path(db_config['NAME']).resolve())
db_config['NAME'] = db_name
logger.info('DB_ENGINE: %s', DB_ENGINE)
logger.info('DB_NAME: %s', db_config['NAME'])
logger.info('DB_HOST: %s', db_config.get('HOST', "''"))
# Set testing options for the database
db_config['TEST'] = {'CHARSET': 'utf8'}
# Set collation option for mysql test database
if 'mysql' in DB_ENGINE:
db_config['TEST']['COLLATION'] = 'utf8_general_ci' # pragma: no cover
# Specify database specific configuration
set_db_options(DB_ENGINE, db_config['OPTIONS'])
return db_config
def set_db_options(engine: str, db_options: dict):
"""Update database options based on the specified database backend.
@@ -0,0 +1,139 @@
"""Configuration of LDAP support for InvenTree."""
from InvenTree.config import get_boolean_setting, get_setting
def get_ldap_config(debug: bool = False) -> dict:
"""Return a dictionary of LDAP configuration settings.
The returned settings will be updated into the globals() object,
and will be used to configure the LDAP authentication backend.
"""
import django_auth_ldap.config # type: ignore[unresolved-import]
import ldap # type: ignore[unresolved-import]
# get global options from dict and use ldap.OPT_* as keys and values
global_options_dict = get_setting(
'INVENTREE_LDAP_GLOBAL_OPTIONS',
'ldap.global_options',
default_value=None,
typecast=dict,
)
global_options = {}
for k, v in global_options_dict.items():
# keys are always ldap.OPT_* constants
k_attr = getattr(ldap, k, None)
if not k.startswith('OPT_') or k_attr is None:
print(f"[LDAP] ldap.global_options, key '{k}' not found, skipping...")
continue
# values can also be other strings, e.g. paths
v_attr = v
if v.startswith('OPT_'):
v_attr = getattr(ldap, v, None)
if v_attr is None:
print(f"[LDAP] ldap.global_options, value key '{v}' not found, skipping...")
continue
global_options[k_attr] = v_attr
if debug:
print('[LDAP] ldap.global_options =', global_options)
group_type_class = get_setting(
'INVENTREE_LDAP_GROUP_TYPE_CLASS',
'ldap.group_type_class',
'GroupOfUniqueNamesType',
str,
)
group_type_class_args = get_setting(
'INVENTREE_LDAP_GROUP_TYPE_CLASS_ARGS', 'ldap.group_type_class_args', [], list
)
group_type_class_kwargs = get_setting(
'INVENTREE_LDAP_GROUP_TYPE_CLASS_KWARGS',
'ldap.group_type_class_kwargs',
{'name_attr': 'cn'},
dict,
)
group_object_class = get_setting(
'INVENTREE_LDAP_GROUP_OBJECT_CLASS',
'ldap.group_object_class',
'groupOfUniqueNames',
str,
)
ldap_config = {
'AUTH_LDAP_GLOBAL_OPTIONS': global_options,
'AUTH_LDAP_SERVER_URI': get_setting(
'INVENTREE_LDAP_SERVER_URI', 'ldap.server_uri'
),
'AUTH_LDAP_START_TLS': get_boolean_setting(
'INVENTREE_LDAP_START_TLS', 'ldap.start_tls', False
),
'AUTH_LDAP_BIND_DN': get_setting('INVENTREE_LDAP_BIND_DN', 'ldap.bind_dn'),
'AUTH_LDAP_BIND_PASSWORD': get_setting(
'INVENTREE_LDAP_BIND_PASSWORD', 'ldap.bind_password'
),
'AUTH_LDAP_USER_SEARCH': django_auth_ldap.config.LDAPSearch(
get_setting('INVENTREE_LDAP_SEARCH_BASE_DN', 'ldap.search_base_dn'),
ldap.SCOPE_SUBTREE,
str(
get_setting(
'INVENTREE_LDAP_SEARCH_FILTER_STR',
'ldap.search_filter_str',
'(uid= %(user)s)',
)
),
),
'AUTH_LDAP_USER_DN_TEMPLATE': get_setting(
'INVENTREE_LDAP_USER_DN_TEMPLATE', 'ldap.user_dn_template'
),
'AUTH_LDAP_USER_ATTR_MAP': get_setting(
'INVENTREE_LDAP_USER_ATTR_MAP',
'ldap.user_attr_map',
{'first_name': 'givenName', 'last_name': 'sn', 'email': 'mail'},
dict,
),
'AUTH_LDAP_ALWAYS_UPDATE_USER': get_boolean_setting(
'INVENTREE_LDAP_ALWAYS_UPDATE_USER', 'ldap.always_update_user', True
),
'AUTH_LDAP_CACHE_TIMEOUT': get_setting(
'INVENTREE_LDAP_CACHE_TIMEOUT', 'ldap.cache_timeout', 3600, int
),
'AUTH_LDAP_MIRROR_GROUPS': get_boolean_setting(
'INVENTREE_LDAP_MIRROR_GROUPS', 'ldap.mirror_groups', False
),
'AUTH_LDAP_GROUP_OBJECT_CLASS': group_object_class,
'AUTH_LDAP_GROUP_SEARCH': django_auth_ldap.config.LDAPSearch(
get_setting('INVENTREE_LDAP_GROUP_SEARCH', 'ldap.group_search'),
ldap.SCOPE_SUBTREE,
f'(objectClass={group_object_class})',
),
'AUTH_LDAP_GROUP_TYPE_CLASS': group_type_class,
'AUTH_LDAP_GROUP_TYPE_CLASS_ARGS': [*group_type_class_args],
'AUTH_LDAP_GROUP_TYPE_CLASS_KWARGS': {**group_type_class_kwargs},
'AUTH_LDAP_GROUP_TYPE': getattr(django_auth_ldap.config, group_type_class)(
*group_type_class_args, **group_type_class_kwargs
),
'AUTH_LDAP_REQUIRE_GROUP': get_setting(
'INVENTREE_LDAP_REQUIRE_GROUP', 'ldap.require_group'
),
'AUTH_LDAP_DENY_GROUP': get_setting(
'INVENTREE_LDAP_DENY_GROUP', 'ldap.deny_group'
),
'AUTH_LDAP_USER_FLAGS_BY_GROUP': get_setting(
'INVENTREE_LDAP_USER_FLAGS_BY_GROUP',
'ldap.user_flags_by_group',
default_value=None,
typecast=dict,
),
'AUTH_LDAP_FIND_GROUP_PERMS': True,
}
return ldap_config
@@ -0,0 +1,61 @@
"""Start-up configuration options for InvenTree tracing integrations."""
import structlog
from InvenTree.config import get_boolean_setting, get_setting
from InvenTree.tracing import setup_instruments, setup_tracing
logger = structlog.get_logger('inventree')
def configure_tracing(db_engine: str, enabled: bool, tags: dict) -> dict:
"""Configure tracing integrations for InvenTree.
Arguments:
db_engine: The database engine being used
enabled: Whether tracing is enabled
tags: A dictionary of tags to be included in tracing spans, with keys prefixed by
"""
endpoint = get_setting('INVENTREE_TRACING_ENDPOINT', 'tracing.endpoint', None)
headers = (
get_setting('INVENTREE_TRACING_HEADERS', 'tracing.headers', None, typecast=dict)
or {}
)
if enabled and endpoint:
logger.info('Tracing enabled with endpoint: %s', endpoint)
TRACING_DETAILS = {
'endpoint': endpoint,
'headers': headers,
'resources_input': {
**{'inventree.env.' + k: v for k, v in tags.items()},
**get_setting(
'INVENTREE_TRACING_RESOURCES',
'tracing.resources',
default_value=None,
typecast=dict,
),
},
'console': get_boolean_setting(
'INVENTREE_TRACING_CONSOLE', 'tracing.console', False
),
'auth': get_setting(
'INVENTREE_TRACING_AUTH', 'tracing.auth', default_value=None, typecast=dict
),
'is_http': get_setting('INVENTREE_TRACING_IS_HTTP', 'tracing.is_http', True),
'append_http': get_boolean_setting(
'INVENTREE_TRACING_APPEND_HTTP', 'tracing.append_http', True
),
}
if not enabled:
return None
if endpoint:
setup_tracing(**TRACING_DETAILS)
setup_instruments(db_engine)
else:
logger.warning('OpenTelemetry tracing not enabled because endpoint is not set')
return TRACING_DETAILS
@@ -0,0 +1,88 @@
"""Configuration settings for the InvenTree background worker process."""
import sys
from InvenTree.config import get_setting
def get_worker_config(
db_engine: str,
global_cache: bool = False,
sentry_dsn: str = '',
debug: bool = False,
) -> dict:
"""Return a dictionary of configuration settings for the background worker.
Arguments:
db_engine: The database engine being used (e.g. 'sqlite', 'postgresql', 'mysql')
global_cache: Whether a global redis cache is enabled
sentry_dsn: The DSN for sentry.io integration (if enabled)
debug: Whether the application is running in debug mode
Ref: https://django-q2.readthedocs.io/en/master/configure.html
"""
BACKGROUND_WORKER_TIMEOUT = int(
get_setting('INVENTREE_BACKGROUND_TIMEOUT', 'background.timeout', 90)
)
# Set the retry time for background workers to be slightly longer than the worker timeout, to ensure that workers have time to timeout before being retried
BACKGROUND_WORKER_RETRY = max(
int(get_setting('INVENTREE_BACKGROUND_RETRY', 'background.retry', 300)),
BACKGROUND_WORKER_TIMEOUT + 120,
)
BACKGROUND_WORKER_ATTEMPTS = int(
get_setting('INVENTREE_BACKGROUND_MAX_ATTEMPTS', 'background.max_attempts', 5)
)
# Prevent running multiple background workers if global cache is disabled
# This is to prevent scheduling conflicts due to the lack of a shared cache
BACKGROUND_WORKER_COUNT = int(
get_setting('INVENTREE_BACKGROUND_WORKERS', 'background.workers', 4)
)
# If global cache is disabled, we cannot run multiple background workers
if not global_cache:
BACKGROUND_WORKER_COUNT = 1
# If running with SQLite, limit background worker threads to 1 to prevent database locking issues
if 'sqlite' in db_engine:
BACKGROUND_WORKER_COUNT = 1
# Check if '--sync' was passed in the command line
if '--sync' in sys.argv and '--noreload' in sys.argv and debug:
SYNC_TASKS = True
else:
SYNC_TASKS = False
# Clean up sys.argv so Django doesn't complain about an unknown argument
if SYNC_TASKS:
sys.argv.remove('--sync')
# django-q background worker configuration
config = {
'name': 'InvenTree',
'label': 'Background Tasks',
'workers': BACKGROUND_WORKER_COUNT,
'timeout': BACKGROUND_WORKER_TIMEOUT,
'retry': BACKGROUND_WORKER_RETRY,
'max_attempts': BACKGROUND_WORKER_ATTEMPTS,
'save_limit': 1000,
'queue_limit': 50,
'catch_up': False,
'bulk': 10,
'orm': 'default',
'cache': 'default',
'sync': SYNC_TASKS,
'poll': 1.5,
}
if global_cache:
# If using external redis cache, make the cache the broker for Django Q
config['django_redis'] = 'worker'
if sentry_dsn:
# If sentry is enabled, configure django-q to report errors to sentry
config['error_reporter'] = {'sentry': {'dsn': sentry_dsn}}
return config
+55 -347
View File
@@ -12,7 +12,6 @@ database setup in this file.
import logging
import os
import sys
from pathlib import Path
from typing import Optional
from zoneinfo import ZoneInfo, ZoneInfoNotFoundError
@@ -33,7 +32,16 @@ from InvenTree.version import checkMinPythonVersion, inventreeCommitHash
from users.oauth2_scopes import oauth2_scopes
from . import config
from .setting import db_backend, locales, markdown, spectacular, storages
from .setting import (
db_backend,
ldap,
locales,
markdown,
spectacular,
storages,
tracing,
worker,
)
try:
import django_stubs_ext
@@ -81,6 +89,8 @@ DEBUG = get_boolean_setting('INVENTREE_DEBUG', 'debug', False)
# Internal flag to determine if we are running in docker mode
DOCKER = get_boolean_setting('INVENTREE_DOCKER', default_value=False)
AUTO_UPDATE = get_boolean_setting('INVENTREE_AUTO_UPDATE', 'auto_update', False)
# Configure logging settings
LOG_LEVEL = get_setting('INVENTREE_LOG_LEVEL', 'log_level', 'WARNING')
JSON_LOG = get_boolean_setting('INVENTREE_JSON_LOG', 'json_log', False)
@@ -222,9 +232,7 @@ PLUGIN_TESTING_EVENTS_ASYNC = False # Flag if events are tested asynchronously
PLUGIN_TESTING_RELOAD = False # Flag if plugin reloading is in testing (check_reload)
# Plugin development settings
PLUGIN_DEV_SLUG = (
get_setting('INVENTREE_PLUGIN_DEV_SLUG', 'plugin_dev.slug') if DEBUG else None
)
PLUGIN_DEV_SLUG = get_setting('INVENTREE_PLUGIN_DEV_SLUG', 'plugin_dev.slug')
PLUGIN_DEV_HOST = get_setting(
'INVENTREE_PLUGIN_DEV_HOST', 'plugin_dev.host', 'http://localhost:5174'
@@ -386,8 +394,11 @@ MIDDLEWARE = CONFIG.get(
# In DEBUG mode, add support for django-silk
# Ref: https://silk.readthedocs.io/en/latest/
DJANGO_SILK_ENABLED = DEBUG and get_boolean_setting( # pragma: no cover
'INVENTREE_DEBUG_SILK', 'debug_silk', False
DJANGO_SILK_ENABLED = (
get_boolean_setting( # pragma: no cover
'INVENTREE_DEBUG_SILK', 'debug_silk', False
)
and DEBUG
)
if DJANGO_SILK_ENABLED: # pragma: no cover
@@ -401,8 +412,11 @@ if DJANGO_SILK_ENABLED: # pragma: no cover
# In DEBUG mode, add support for django-querycount
# Ref: https://github.com/bradmontgomery/django-querycount
if DEBUG and get_boolean_setting( # pragma: no cover
'INVENTREE_DEBUG_QUERYCOUNT', 'debug_querycount', False
if (
get_boolean_setting( # pragma: no cover
'INVENTREE_DEBUG_QUERYCOUNT', 'debug_querycount', False
)
and DEBUG
):
MIDDLEWARE.append('querycount.middleware.QueryCountMiddleware')
logger.debug('Running with debug_querycount middleware enabled')
@@ -437,14 +451,14 @@ AUTHENTICATION_BACKENDS = (
# LDAP support
LDAP_AUTH = get_boolean_setting('INVENTREE_LDAP_ENABLED', 'ldap.enabled', False)
if LDAP_AUTH: # pragma: no cover
import django_auth_ldap.config # type: ignore[unresolved-import]
import ldap # type: ignore[unresolved-import]
LDAP_DEBUG = (
get_boolean_setting('INVENTREE_LDAP_DEBUG', 'ldap.debug', False) and LDAP_AUTH
)
if LDAP_AUTH: # pragma: no cover
AUTHENTICATION_BACKENDS.append('django_auth_ldap.backend.LDAPBackend')
# debug mode to troubleshoot configuration
LDAP_DEBUG = get_boolean_setting('INVENTREE_LDAP_DEBUG', 'ldap.debug', False)
if LDAP_DEBUG:
if 'loggers' not in LOGGING:
LOGGING['loggers'] = {}
@@ -453,113 +467,10 @@ if LDAP_AUTH: # pragma: no cover
'handlers': DEFAULT_LOG_HANDLER,
}
# get global options from dict and use ldap.OPT_* as keys and values
global_options_dict = get_setting(
'INVENTREE_LDAP_GLOBAL_OPTIONS',
'ldap.global_options',
default_value=None,
typecast=dict,
)
global_options = {}
for k, v in global_options_dict.items():
# keys are always ldap.OPT_* constants
k_attr = getattr(ldap, k, None)
if not k.startswith('OPT_') or k_attr is None:
print(f"[LDAP] ldap.global_options, key '{k}' not found, skipping...")
continue
# Determine LDAP settings
ldap_settings = ldap.get_ldap_config(debug=LDAP_DEBUG)
globals().update(ldap_settings)
# values can also be other strings, e.g. paths
v_attr = v
if v.startswith('OPT_'):
v_attr = getattr(ldap, v, None)
if v_attr is None:
print(f"[LDAP] ldap.global_options, value key '{v}' not found, skipping...")
continue
global_options[k_attr] = v_attr
AUTH_LDAP_GLOBAL_OPTIONS = global_options
if LDAP_DEBUG:
print('[LDAP] ldap.global_options =', global_options)
AUTH_LDAP_SERVER_URI = get_setting('INVENTREE_LDAP_SERVER_URI', 'ldap.server_uri')
AUTH_LDAP_START_TLS = get_boolean_setting(
'INVENTREE_LDAP_START_TLS', 'ldap.start_tls', False
)
AUTH_LDAP_BIND_DN = get_setting('INVENTREE_LDAP_BIND_DN', 'ldap.bind_dn')
AUTH_LDAP_BIND_PASSWORD = get_setting(
'INVENTREE_LDAP_BIND_PASSWORD', 'ldap.bind_password'
)
AUTH_LDAP_USER_SEARCH = django_auth_ldap.config.LDAPSearch(
get_setting('INVENTREE_LDAP_SEARCH_BASE_DN', 'ldap.search_base_dn'),
ldap.SCOPE_SUBTREE,
str(
get_setting(
'INVENTREE_LDAP_SEARCH_FILTER_STR',
'ldap.search_filter_str',
'(uid= %(user)s)',
)
),
)
AUTH_LDAP_USER_DN_TEMPLATE = get_setting(
'INVENTREE_LDAP_USER_DN_TEMPLATE', 'ldap.user_dn_template'
)
AUTH_LDAP_USER_ATTR_MAP = get_setting(
'INVENTREE_LDAP_USER_ATTR_MAP',
'ldap.user_attr_map',
{'first_name': 'givenName', 'last_name': 'sn', 'email': 'mail'},
dict,
)
AUTH_LDAP_ALWAYS_UPDATE_USER = get_boolean_setting(
'INVENTREE_LDAP_ALWAYS_UPDATE_USER', 'ldap.always_update_user', True
)
AUTH_LDAP_CACHE_TIMEOUT = get_setting(
'INVENTREE_LDAP_CACHE_TIMEOUT', 'ldap.cache_timeout', 3600, int
)
AUTH_LDAP_MIRROR_GROUPS = get_boolean_setting(
'INVENTREE_LDAP_MIRROR_GROUPS', 'ldap.mirror_groups', False
)
AUTH_LDAP_GROUP_OBJECT_CLASS = get_setting(
'INVENTREE_LDAP_GROUP_OBJECT_CLASS',
'ldap.group_object_class',
'groupOfUniqueNames',
str,
)
AUTH_LDAP_GROUP_SEARCH = django_auth_ldap.config.LDAPSearch(
get_setting('INVENTREE_LDAP_GROUP_SEARCH', 'ldap.group_search'),
ldap.SCOPE_SUBTREE,
f'(objectClass={AUTH_LDAP_GROUP_OBJECT_CLASS})',
)
AUTH_LDAP_GROUP_TYPE_CLASS = get_setting(
'INVENTREE_LDAP_GROUP_TYPE_CLASS',
'ldap.group_type_class',
'GroupOfUniqueNamesType',
str,
)
AUTH_LDAP_GROUP_TYPE_CLASS_ARGS = get_setting(
'INVENTREE_LDAP_GROUP_TYPE_CLASS_ARGS', 'ldap.group_type_class_args', [], list
)
AUTH_LDAP_GROUP_TYPE_CLASS_KWARGS = get_setting(
'INVENTREE_LDAP_GROUP_TYPE_CLASS_KWARGS',
'ldap.group_type_class_kwargs',
{'name_attr': 'cn'},
dict,
)
AUTH_LDAP_GROUP_TYPE = getattr(django_auth_ldap.config, AUTH_LDAP_GROUP_TYPE_CLASS)(
*AUTH_LDAP_GROUP_TYPE_CLASS_ARGS, **AUTH_LDAP_GROUP_TYPE_CLASS_KWARGS
)
AUTH_LDAP_REQUIRE_GROUP = get_setting(
'INVENTREE_LDAP_REQUIRE_GROUP', 'ldap.require_group'
)
AUTH_LDAP_DENY_GROUP = get_setting('INVENTREE_LDAP_DENY_GROUP', 'ldap.deny_group')
AUTH_LDAP_USER_FLAGS_BY_GROUP = get_setting(
'INVENTREE_LDAP_USER_FLAGS_BY_GROUP',
'ldap.user_flags_by_group',
default_value=None,
typecast=dict,
)
AUTH_LDAP_FIND_GROUP_PERMS = True
# Allow secure http developer server in debug mode
if DEBUG:
@@ -642,107 +553,11 @@ Configure the database backend based on the user-specified values.
logger.debug('Configuring database backend:')
# Extract database configuration from the config.yaml file
db_config = CONFIG.get('database', None) if CONFIG else None
# Load database configuration from the config file and environment variables
database = db_backend.get_db_backend()
DB_ENGINE = database['ENGINE']
if not db_config:
db_config = {}
# Environment variables take preference over config file!
db_keys = ['ENGINE', 'NAME', 'USER', 'PASSWORD', 'HOST', 'PORT']
for key in db_keys:
# First, check the environment variables
env_key = f'INVENTREE_DB_{key}'
env_var = os.environ.get(env_key, None)
if env_var:
# Make use PORT is int
if key == 'PORT':
try:
env_var = int(env_var)
except ValueError:
logger.exception('Invalid number for %s: %s', env_key, env_var)
# Override configuration value
db_config[key] = env_var
# Check that required database configuration options are specified
required_keys = ['ENGINE', 'NAME']
# Ensure all database keys are upper case
db_config = {key.upper(): value for key, value in db_config.items()}
for key in required_keys:
if key not in db_config: # pragma: no cover
error_msg = (
f'Missing required database configuration value `INVENTREE_DB_{key}`'
)
logger.error(error_msg)
print('Error: ' + error_msg)
sys.exit(-1)
"""
Special considerations for the database 'ENGINE' setting.
It can be specified in config.yaml (or envvar) as either (for example):
- sqlite3
- django.db.backends.sqlite3
- django.db.backends.postgresql
"""
DB_ENGINE = db_config['ENGINE'].lower()
# Correct common misspelling
if DB_ENGINE == 'sqlite':
DB_ENGINE = 'sqlite3' # pragma: no cover
if DB_ENGINE in ['sqlite3', 'postgresql', 'mysql']:
# Prepend the required python module string
DB_ENGINE = f'django.db.backends.{DB_ENGINE}'
db_config['ENGINE'] = DB_ENGINE
db_name = db_config['NAME']
db_host = db_config.get('HOST', "''")
if 'sqlite' in DB_ENGINE:
db_name = str(Path(db_name).resolve())
db_config['NAME'] = db_name
logger.info('DB_ENGINE: %s', DB_ENGINE)
logger.info('DB_NAME: %s', db_name)
logger.info('DB_HOST: %s', db_host)
"""
In addition to base-level database configuration, we may wish to specify specific options to the database backend
Ref: https://docs.djangoproject.com/en/3.2/ref/settings/#std:setting-OPTIONS
"""
# 'OPTIONS' or 'options' can be specified in config.yaml
# Set useful sensible timeouts for a transactional webserver to communicate
# with its database server, that is, if the webserver is having issues
# connecting to the database server (such as a replica failover) don't sit and
# wait for possibly an hour or more, just tell the client something went wrong
# and let the client retry when they want to.
db_options = db_config.get('OPTIONS', db_config.get('options'))
if db_options is None:
db_options = {}
# Set database-specific options
db_backend.set_db_options(DB_ENGINE, db_options)
# Provide OPTIONS dict back to the database configuration dict
db_config['OPTIONS'] = db_options
# Set testing options for the database
db_config['TEST'] = {'CHARSET': 'utf8'}
# Set collation option for mysql test database
if 'mysql' in DB_ENGINE:
db_config['TEST']['COLLATION'] = 'utf8_general_ci' # pragma: no cover
DATABASES = {'default': db_config}
DATABASES = {'default': database}
# login settings
REMOTE_LOGIN = get_boolean_setting(
@@ -776,133 +591,28 @@ if SENTRY_ENABLED and SENTRY_DSN and not TESTING: # pragma: no cover
init_sentry(SENTRY_DSN, SENTRY_SAMPLE_RATE, inventree_tags)
# OpenTelemetry tracing
TRACING_ENABLED = get_boolean_setting(
'INVENTREE_TRACING_ENABLED', 'tracing.enabled', False
TRACING_ENABLED = (
get_boolean_setting('INVENTREE_TRACING_ENABLED', 'tracing.enabled', False)
and not isRunningBackup()
)
TRACING_DETAILS: Optional[dict] = None
if TRACING_ENABLED and not isRunningBackup(): # pragma: no cover
from InvenTree.tracing import setup_instruments, setup_tracing
_t_endpoint = get_setting('INVENTREE_TRACING_ENDPOINT', 'tracing.endpoint', None)
_t_headers = get_setting('INVENTREE_TRACING_HEADERS', 'tracing.headers', None, dict)
if _t_headers is None:
_t_headers = {}
if _t_endpoint:
logger.info('OpenTelemetry tracing enabled')
TRACING_DETAILS = (
TRACING_DETAILS
if TRACING_DETAILS
else {
'endpoint': _t_endpoint,
'headers': _t_headers,
'resources_input': {
**{'inventree.env.' + k: v for k, v in inventree_tags.items()},
**get_setting(
'INVENTREE_TRACING_RESOURCES',
'tracing.resources',
default_value=None,
typecast=dict,
),
},
'console': get_boolean_setting(
'INVENTREE_TRACING_CONSOLE', 'tracing.console', False
),
'auth': get_setting(
'INVENTREE_TRACING_AUTH',
'tracing.auth',
default_value=None,
typecast=dict,
),
'is_http': get_setting(
'INVENTREE_TRACING_IS_HTTP', 'tracing.is_http', True
),
'append_http': get_boolean_setting(
'INVENTREE_TRACING_APPEND_HTTP', 'tracing.append_http', True
),
}
)
# Run tracing/logging instrumentation
setup_tracing(**TRACING_DETAILS)
setup_instruments(DB_ENGINE)
else:
logger.warning('OpenTelemetry tracing not enabled because endpoint is not set')
# endregion
TRACING_DETAILS: Optional[dict] = tracing.configure_tracing(
DB_ENGINE, TRACING_ENABLED, inventree_tags
)
# Cache configuration
GLOBAL_CACHE_ENABLED = is_global_cache_enabled()
CACHES = {'default': get_cache_config(GLOBAL_CACHE_ENABLED)}
BACKGROUND_WORKER_TIMEOUT = int(
get_setting('INVENTREE_BACKGROUND_TIMEOUT', 'background.timeout', 90)
# Background task processing with django-q
Q_CLUSTER = worker.get_worker_config(
DB_ENGINE,
global_cache=GLOBAL_CACHE_ENABLED,
sentry_dsn=SENTRY_DSN if SENTRY_ENABLED and SENTRY_DSN else None,
debug=DEBUG,
)
# Set the retry time for background workers to be slightly longer than the worker timeout, to ensure that workers have time to timeout before being retried
BACKGROUND_WORKER_RETRY = max(
int(get_setting('INVENTREE_BACKGROUND_RETRY', 'background.retry', 300)),
BACKGROUND_WORKER_TIMEOUT + 120,
)
# Prevent running multiple background workers if global cache is disabled
# This is to prevent scheduling conflicts due to the lack of a shared cache
BACKGROUND_WORKER_COUNT = (
int(get_setting('INVENTREE_BACKGROUND_WORKERS', 'background.workers', 4))
if GLOBAL_CACHE_ENABLED
else 1
)
# If running with SQLite, limit background worker threads to 1 to prevent database locking issues
if 'sqlite' in DB_ENGINE:
BACKGROUND_WORKER_COUNT = 1
BACKGROUND_WORKER_ATTEMPTS = int(
get_setting('INVENTREE_BACKGROUND_MAX_ATTEMPTS', 'background.max_attempts', 5)
)
# Check if '--sync' was passed in the command line
if '--sync' in sys.argv and '--noreload' in sys.argv and DEBUG:
SYNC_TASKS = True
else:
SYNC_TASKS = False
# Clean up sys.argv so Django doesn't complain about an unknown argument
if SYNC_TASKS:
sys.argv.remove('--sync')
# django-q background worker configuration
Q_CLUSTER = {
'name': 'InvenTree',
'label': 'Background Tasks',
'workers': BACKGROUND_WORKER_COUNT,
'timeout': BACKGROUND_WORKER_TIMEOUT,
'retry': BACKGROUND_WORKER_RETRY,
'max_attempts': BACKGROUND_WORKER_ATTEMPTS,
'save_limit': 1000,
'queue_limit': 50,
'catch_up': False,
'bulk': 10,
'orm': 'default',
'cache': 'default',
'sync': SYNC_TASKS,
'poll': 1.5,
}
# Configure django-q sentry integration
if SENTRY_ENABLED and SENTRY_DSN: # pragma: no cover
Q_CLUSTER['error_reporter'] = {'sentry': {'dsn': SENTRY_DSN}}
if GLOBAL_CACHE_ENABLED: # pragma: no cover
# If using external redis cache, make the cache the broker for Django Q
# as well
Q_CLUSTER['django_redis'] = 'worker'
SILENCED_SYSTEM_CHECKS = ['templates.E003', 'templates.W003']
# Password validation
@@ -976,10 +686,10 @@ INTERNAL_EMAIL_BACKEND = get_setting(
)
# SMTP backend
EMAIL_HOST = get_setting('INVENTREE_EMAIL_HOST', 'email.host', '')
EMAIL_HOST = get_setting('INVENTREE_EMAIL_HOST', 'email.host')
EMAIL_PORT = get_setting('INVENTREE_EMAIL_PORT', 'email.port', 25, typecast=int)
EMAIL_HOST_USER = get_setting('INVENTREE_EMAIL_USERNAME', 'email.username', '')
EMAIL_HOST_PASSWORD = get_setting('INVENTREE_EMAIL_PASSWORD', 'email.password', '')
EMAIL_HOST_USER = get_setting('INVENTREE_EMAIL_USERNAME', 'email.username')
EMAIL_HOST_PASSWORD = get_setting('INVENTREE_EMAIL_PASSWORD', 'email.password')
EMAIL_USE_TLS = get_boolean_setting('INVENTREE_EMAIL_TLS', 'email.tls', False)
EMAIL_USE_SSL = get_boolean_setting('INVENTREE_EMAIL_SSL', 'email.ssl', False)
# Anymail
@@ -1150,16 +860,14 @@ LANGUAGE_COOKIE_SAMESITE = COOKIE_MODE
- Otherwise, use the value specified in the configuration file (or env var)
"""
COOKIE_SECURE = (
False
if DEBUG
else (
SESSION_COOKIE_SAMESITE == 'None'
or get_boolean_setting(
'INVENTREE_SESSION_COOKIE_SECURE', 'cookie.secure', False
)
)
get_boolean_setting('INVENTREE_SESSION_COOKIE_SECURE', 'cookie.secure', False)
or SESSION_COOKIE_SAMESITE == 'None'
)
# Override COOKIE_SECURE value in DEBUG mode
if DEBUG:
COOKIE_SECURE = False
CSRF_COOKIE_SECURE = COOKIE_SECURE
SESSION_COOKIE_SECURE = COOKIE_SECURE
LANGUAGE_COOKIE_SECURE = COOKIE_SECURE
+1 -2
View File
@@ -29,7 +29,6 @@ from maintenance_mode.core import (
from opentelemetry import trace
from common.settings import get_global_setting, set_global_setting
from InvenTree.config import get_setting
from plugin import registry
from .version import isInvenTreeUpToDate
@@ -822,7 +821,7 @@ def check_for_migrations(force: bool = False, reload_registry: bool = True) -> b
set_pending_migrations(n)
# Test if auto-updates are enabled
if not force and not get_setting('INVENTREE_AUTO_UPDATE', 'auto_update'):
if not force and not settings.AUTO_UPDATE:
logger.info('Auto-update is disabled - skipping migrations')
return False
+10 -1
View File
@@ -381,7 +381,16 @@ class ConfigSerializer(serializers.Serializer):
"""Return the configuration data as a dictionary."""
if not isinstance(instance, str):
instance = list(instance.keys())[0]
return {'key': instance, **self.instance.get(instance)}
data = {'key': instance}
for k, v in self.instance.get(instance, {}).items():
if k == 'default_value':
# Skip sensitive default values
continue
data[k] = v
return data
class NotesImageSerializer(InvenTreeModelSerializer):