mirror of
https://github.com/inventree/InvenTree.git
synced 2026-05-07 02:03:50 +00:00
Code structure refactor (#5582)
* moved docker files to /contrib/container * changed code owners to make more precise * updated CI to use new subdirs * added manual trigger for testing * moved ci files * moved assets into subdir * moved deploy template file to contrib * moved django files to src/backend * updated paths in scripts etc * updated reqs path * fixed version file path * fixed flake8 path * fixed path to node ressources * fixed task paths * added dep path for node * removed unused yarn lockfile * removed unused ci script * updated internal backend paths for tasks * updated translation stats path * fixed source path for coverage * fixed main commit repo path * fit in changes from testing * gather packager improvements (#149) * Matmair/issue5578 (#143) * moved docker files to /contrib/container * changed code owners to make more precise * updated CI to use new subdirs * added manual trigger for testing * moved ci files * moved assets into subdir * moved deploy template file to contrib * moved django files to src/backend * updated paths in scripts etc * updated reqs path * fixed version file path * fixed flake8 path * fixed path to node ressources * fixed task paths * added dep path for node * removed unused yarn lockfile * removed unused ci script * updated internal backend paths for tasks * updated translation stats path * fixed source path for coverage * fixed main commit repo path * fix docker path * use project dir * move project dir command * fixed docker paths * another fix? * seperate tasks out * remove tasks * some debugging * ci: add .deepsource.toml * Update .deepsource.toml * also ignore migrations * more debugging * fix path issues * remove debug script * fix style * change locale path * Fixed paths for requirements * Added dummy requirements to fool packager * fixed exec path * remove deepsource --------- Co-authored-by: deepsource-io[bot] <42547082+deepsource-io[bot]@users.noreply.github.com> * Added docs for file structure * Fixed style errors * updated deepsource paths * fix deepsource paths * fixed reqs * merge fixes * move newly added dirs too * fix reqs files * another dep fix * merge upstream/master * revert removal of tags * merge upstream * enabled detection of old config files * adapt coverage src * also detect and support old location for plugins.txt * style fix * fix ~/init.sh location * fix requirements path * fix config to current master * move new folders * fix import order * fix paths for qc_check * fix docs build * fix fix path * set docker project dir * just use a cd * set image path? * set file correct * fix copy path * fix tasks dir * fix init path * fix copy path * set prject dir * fix paths * remove old prod files * fix dev env path * set docker file * Fix devcontainer docker compose file * fix login attempt values * fix init.sh path * Fix pathing for Docker * Docker build fix - Set INVENTREE_BACKEND_DIR separately * Update init.sh * Fix path * Update requirements.txt * merge * fix rq merge * fix docker compose usage --------- Co-authored-by: deepsource-io[bot] <42547082+deepsource-io[bot]@users.noreply.github.com> Co-authored-by: Oliver <oliver.henry.walters@gmail.com>
This commit is contained in:
@@ -0,0 +1,4 @@
|
||||
"""The InvenTree module provides high-level management and functionality.
|
||||
|
||||
It provides a number of helper functions and generic classes which are used by InvenTree apps.
|
||||
"""
|
||||
@@ -0,0 +1,113 @@
|
||||
"""Admin classes."""
|
||||
|
||||
from django.contrib import admin
|
||||
from django.db.models.fields import CharField
|
||||
from django.http.request import HttpRequest
|
||||
|
||||
from djmoney.contrib.exchange.admin import RateAdmin
|
||||
from djmoney.contrib.exchange.models import Rate
|
||||
from import_export.exceptions import ImportExportError
|
||||
from import_export.resources import ModelResource
|
||||
|
||||
|
||||
class InvenTreeResource(ModelResource):
|
||||
"""Custom subclass of the ModelResource class provided by django-import-export".
|
||||
|
||||
Ensures that exported data are escaped to prevent malicious formula injection.
|
||||
Ref: https://owasp.org/www-community/attacks/CSV_Injection
|
||||
"""
|
||||
|
||||
MAX_IMPORT_ROWS = 1000
|
||||
MAX_IMPORT_COLS = 100
|
||||
|
||||
# List of fields which should be converted to empty strings if they are null
|
||||
CONVERT_NULL_FIELDS = []
|
||||
|
||||
def import_data_inner(
|
||||
self,
|
||||
dataset,
|
||||
dry_run,
|
||||
raise_errors,
|
||||
using_transactions,
|
||||
collect_failed_rows,
|
||||
rollback_on_validation_errors=None,
|
||||
**kwargs,
|
||||
):
|
||||
"""Override the default import_data_inner function to provide better error handling."""
|
||||
if len(dataset) > self.MAX_IMPORT_ROWS:
|
||||
raise ImportExportError(
|
||||
f'Dataset contains too many rows (max {self.MAX_IMPORT_ROWS})'
|
||||
)
|
||||
|
||||
if len(dataset.headers) > self.MAX_IMPORT_COLS:
|
||||
raise ImportExportError(
|
||||
f'Dataset contains too many columns (max {self.MAX_IMPORT_COLS})'
|
||||
)
|
||||
|
||||
return super().import_data_inner(
|
||||
dataset,
|
||||
dry_run,
|
||||
raise_errors,
|
||||
using_transactions,
|
||||
collect_failed_rows,
|
||||
rollback_on_validation_errors=rollback_on_validation_errors,
|
||||
**kwargs,
|
||||
)
|
||||
|
||||
def export_resource(self, obj):
|
||||
"""Custom function to override default row export behavior.
|
||||
|
||||
Specifically, strip illegal leading characters to prevent formula injection
|
||||
"""
|
||||
row = super().export_resource(obj)
|
||||
|
||||
illegal_start_vals = ['@', '=', '+', '-', '@', '\t', '\r', '\n']
|
||||
|
||||
for idx, val in enumerate(row):
|
||||
if type(val) is str:
|
||||
val = val.strip()
|
||||
|
||||
# If the value starts with certain 'suspicious' values, remove it!
|
||||
while len(val) > 0 and val[0] in illegal_start_vals:
|
||||
# Remove the first character
|
||||
val = val[1:]
|
||||
|
||||
row[idx] = val
|
||||
|
||||
return row
|
||||
|
||||
def get_fields(self, **kwargs):
|
||||
"""Return fields, with some common exclusions."""
|
||||
fields = super().get_fields(**kwargs)
|
||||
|
||||
fields_to_exclude = ['metadata', 'lft', 'rght', 'tree_id', 'level']
|
||||
|
||||
return [f for f in fields if f.column_name not in fields_to_exclude]
|
||||
|
||||
def before_import_row(self, row, row_number=None, **kwargs):
|
||||
"""Run custom code before importing each row.
|
||||
|
||||
- Convert any null fields to empty strings, for fields which do not support null values
|
||||
"""
|
||||
# We can automatically determine which fields might need such a conversion
|
||||
for field in self.Meta.model._meta.fields:
|
||||
if isinstance(field, CharField):
|
||||
if field.blank and not field.null:
|
||||
if field.name not in self.CONVERT_NULL_FIELDS:
|
||||
self.CONVERT_NULL_FIELDS.append(field.name)
|
||||
|
||||
for field in self.CONVERT_NULL_FIELDS:
|
||||
if field in row and row[field] is None:
|
||||
row[field] = ''
|
||||
|
||||
|
||||
class CustomRateAdmin(RateAdmin):
|
||||
"""Admin interface for the Rate class."""
|
||||
|
||||
def has_add_permission(self, request: HttpRequest) -> bool:
|
||||
"""Disable the 'add' permission for Rate objects."""
|
||||
return False
|
||||
|
||||
|
||||
admin.site.unregister(Rate)
|
||||
admin.site.register(Rate, CustomRateAdmin)
|
||||
@@ -0,0 +1,500 @@
|
||||
"""Main JSON interface views."""
|
||||
|
||||
import sys
|
||||
|
||||
from django.conf import settings
|
||||
from django.db import transaction
|
||||
from django.http import JsonResponse
|
||||
from django.utils.translation import gettext_lazy as _
|
||||
|
||||
from django_q.models import OrmQ
|
||||
from drf_spectacular.utils import OpenApiResponse, extend_schema
|
||||
from rest_framework import permissions, serializers
|
||||
from rest_framework.generics import GenericAPIView
|
||||
from rest_framework.response import Response
|
||||
from rest_framework.serializers import ValidationError
|
||||
from rest_framework.views import APIView
|
||||
|
||||
import InvenTree.version
|
||||
import users.models
|
||||
from InvenTree.filters import SEARCH_ORDER_FILTER
|
||||
from InvenTree.mixins import ListCreateAPI
|
||||
from InvenTree.permissions import RolePermission
|
||||
from InvenTree.templatetags.inventree_extras import plugins_info
|
||||
from part.models import Part
|
||||
from plugin.serializers import MetadataSerializer
|
||||
from users.models import ApiToken
|
||||
|
||||
from .email import is_email_configured
|
||||
from .mixins import ListAPI, RetrieveUpdateAPI
|
||||
from .status import check_system_health, is_worker_running
|
||||
from .version import inventreeApiText
|
||||
from .views import AjaxView
|
||||
|
||||
|
||||
class VersionViewSerializer(serializers.Serializer):
|
||||
"""Serializer for a single version."""
|
||||
|
||||
class VersionSerializer(serializers.Serializer):
|
||||
"""Serializer for server version."""
|
||||
|
||||
server = serializers.CharField()
|
||||
api = serializers.IntegerField()
|
||||
commit_hash = serializers.CharField()
|
||||
commit_date = serializers.CharField()
|
||||
commit_branch = serializers.CharField()
|
||||
python = serializers.CharField()
|
||||
django = serializers.CharField()
|
||||
|
||||
class LinkSerializer(serializers.Serializer):
|
||||
"""Serializer for all possible links."""
|
||||
|
||||
doc = serializers.URLField()
|
||||
code = serializers.URLField()
|
||||
credit = serializers.URLField()
|
||||
app = serializers.URLField()
|
||||
bug = serializers.URLField()
|
||||
|
||||
dev = serializers.BooleanField()
|
||||
up_to_date = serializers.BooleanField()
|
||||
version = VersionSerializer()
|
||||
links = LinkSerializer()
|
||||
|
||||
|
||||
class VersionView(APIView):
|
||||
"""Simple JSON endpoint for InvenTree version information."""
|
||||
|
||||
permission_classes = [permissions.IsAdminUser]
|
||||
|
||||
@extend_schema(responses={200: OpenApiResponse(response=VersionViewSerializer)})
|
||||
def get(self, request, *args, **kwargs):
|
||||
"""Return information about the InvenTree server."""
|
||||
return JsonResponse({
|
||||
'dev': InvenTree.version.isInvenTreeDevelopmentVersion(),
|
||||
'up_to_date': InvenTree.version.isInvenTreeUpToDate(),
|
||||
'version': {
|
||||
'server': InvenTree.version.inventreeVersion(),
|
||||
'api': InvenTree.version.inventreeApiVersion(),
|
||||
'commit_hash': InvenTree.version.inventreeCommitHash(),
|
||||
'commit_date': InvenTree.version.inventreeCommitDate(),
|
||||
'commit_branch': InvenTree.version.inventreeBranch(),
|
||||
'python': InvenTree.version.inventreePythonVersion(),
|
||||
'django': InvenTree.version.inventreeDjangoVersion(),
|
||||
},
|
||||
'links': {
|
||||
'doc': InvenTree.version.inventreeDocUrl(),
|
||||
'code': InvenTree.version.inventreeGithubUrl(),
|
||||
'credit': InvenTree.version.inventreeCreditsUrl(),
|
||||
'app': InvenTree.version.inventreeAppUrl(),
|
||||
'bug': f'{InvenTree.version.inventreeGithubUrl()}issues',
|
||||
},
|
||||
})
|
||||
|
||||
|
||||
class VersionInformationSerializer(serializers.Serializer):
|
||||
"""Serializer for a single version."""
|
||||
|
||||
version = serializers.CharField()
|
||||
date = serializers.CharField()
|
||||
gh = serializers.CharField()
|
||||
text = serializers.CharField()
|
||||
latest = serializers.BooleanField()
|
||||
|
||||
class Meta:
|
||||
"""Meta class for VersionInformationSerializer."""
|
||||
|
||||
fields = '__all__'
|
||||
|
||||
|
||||
class VersionApiSerializer(serializers.Serializer):
|
||||
"""Serializer for the version api endpoint."""
|
||||
|
||||
VersionInformationSerializer(many=True)
|
||||
|
||||
|
||||
class VersionTextView(ListAPI):
|
||||
"""Simple JSON endpoint for InvenTree version text."""
|
||||
|
||||
serializer_class = VersionInformationSerializer
|
||||
|
||||
permission_classes = [permissions.IsAdminUser]
|
||||
|
||||
@extend_schema(responses={200: OpenApiResponse(response=VersionApiSerializer)})
|
||||
def list(self, request, *args, **kwargs):
|
||||
"""Return information about the InvenTree server."""
|
||||
return JsonResponse(inventreeApiText())
|
||||
|
||||
|
||||
class InfoView(AjaxView):
|
||||
"""Simple JSON endpoint for InvenTree information.
|
||||
|
||||
Use to confirm that the server is running, etc.
|
||||
"""
|
||||
|
||||
permission_classes = [permissions.AllowAny]
|
||||
|
||||
def worker_pending_tasks(self):
|
||||
"""Return the current number of outstanding background tasks."""
|
||||
return OrmQ.objects.count()
|
||||
|
||||
def get(self, request, *args, **kwargs):
|
||||
"""Serve current server information."""
|
||||
is_staff = request.user.is_staff
|
||||
if not is_staff and request.user.is_anonymous:
|
||||
# Might be Token auth - check if so
|
||||
is_staff = self.check_auth_header(request)
|
||||
|
||||
data = {
|
||||
'server': 'InvenTree',
|
||||
'version': InvenTree.version.inventreeVersion(),
|
||||
'instance': InvenTree.version.inventreeInstanceName(),
|
||||
'apiVersion': InvenTree.version.inventreeApiVersion(),
|
||||
'worker_running': is_worker_running(),
|
||||
'worker_pending_tasks': self.worker_pending_tasks(),
|
||||
'plugins_enabled': settings.PLUGINS_ENABLED,
|
||||
'plugins_install_disabled': settings.PLUGINS_INSTALL_DISABLED,
|
||||
'active_plugins': plugins_info(),
|
||||
'email_configured': is_email_configured(),
|
||||
'debug_mode': settings.DEBUG,
|
||||
'docker_mode': settings.DOCKER,
|
||||
'default_locale': settings.LANGUAGE_CODE,
|
||||
# Following fields are only available to staff users
|
||||
'system_health': check_system_health() if is_staff else None,
|
||||
'database': InvenTree.version.inventreeDatabase() if is_staff else None,
|
||||
'platform': InvenTree.version.inventreePlatform() if is_staff else None,
|
||||
'installer': InvenTree.version.inventreeInstaller() if is_staff else None,
|
||||
'target': InvenTree.version.inventreeTarget() if is_staff else None,
|
||||
}
|
||||
|
||||
return JsonResponse(data)
|
||||
|
||||
def check_auth_header(self, request):
|
||||
"""Check if user is authenticated via a token in the header."""
|
||||
from InvenTree.middleware import get_token_from_request
|
||||
|
||||
if token := get_token_from_request(request):
|
||||
# Does the provided token match a valid user?
|
||||
try:
|
||||
token = ApiToken.objects.get(key=token)
|
||||
|
||||
# Check if the token is active and the user is a staff member
|
||||
if token.active and token.user and token.user.is_staff:
|
||||
return True
|
||||
except ApiToken.DoesNotExist:
|
||||
pass
|
||||
|
||||
return False
|
||||
|
||||
|
||||
class NotFoundView(AjaxView):
|
||||
"""Simple JSON view when accessing an invalid API view."""
|
||||
|
||||
permission_classes = [permissions.AllowAny]
|
||||
|
||||
def not_found(self, request):
|
||||
"""Return a 404 error."""
|
||||
return JsonResponse(
|
||||
{
|
||||
'detail': _('API endpoint not found'),
|
||||
'url': request.build_absolute_uri(),
|
||||
},
|
||||
status=404,
|
||||
)
|
||||
|
||||
def options(self, request, *args, **kwargs):
|
||||
"""Return 404."""
|
||||
return self.not_found(request)
|
||||
|
||||
def get(self, request, *args, **kwargs):
|
||||
"""Return 404."""
|
||||
return self.not_found(request)
|
||||
|
||||
def post(self, request, *args, **kwargs):
|
||||
"""Return 404."""
|
||||
return self.not_found(request)
|
||||
|
||||
def patch(self, request, *args, **kwargs):
|
||||
"""Return 404."""
|
||||
return self.not_found(request)
|
||||
|
||||
def put(self, request, *args, **kwargs):
|
||||
"""Return 404."""
|
||||
return self.not_found(request)
|
||||
|
||||
def delete(self, request, *args, **kwargs):
|
||||
"""Return 404."""
|
||||
return self.not_found(request)
|
||||
|
||||
|
||||
class BulkDeleteMixin:
|
||||
"""Mixin class for enabling 'bulk delete' operations for various models.
|
||||
|
||||
Bulk delete allows for multiple items to be deleted in a single API query,
|
||||
rather than using multiple API calls to the various detail endpoints.
|
||||
|
||||
This is implemented for two major reasons:
|
||||
- Atomicity (guaranteed that either *all* items are deleted, or *none*)
|
||||
- Speed (single API call and DB query)
|
||||
"""
|
||||
|
||||
def filter_delete_queryset(self, queryset, request):
|
||||
"""Provide custom filtering for the queryset *before* it is deleted."""
|
||||
return queryset
|
||||
|
||||
def delete(self, request, *args, **kwargs):
|
||||
"""Perform a DELETE operation against this list endpoint.
|
||||
|
||||
We expect a list of primary-key (ID) values to be supplied as a JSON object, e.g.
|
||||
{
|
||||
items: [4, 8, 15, 16, 23, 42]
|
||||
}
|
||||
|
||||
"""
|
||||
model = self.serializer_class.Meta.model
|
||||
|
||||
# Extract the items from the request body
|
||||
try:
|
||||
items = request.data.getlist('items', None)
|
||||
except AttributeError:
|
||||
items = request.data.get('items', None)
|
||||
|
||||
# Extract the filters from the request body
|
||||
try:
|
||||
filters = request.data.getlist('filters', None)
|
||||
except AttributeError:
|
||||
filters = request.data.get('filters', None)
|
||||
|
||||
if not items and not filters:
|
||||
raise ValidationError({
|
||||
'non_field_errors': [
|
||||
'List of items or filters must be provided for bulk deletion'
|
||||
]
|
||||
})
|
||||
|
||||
if items and type(items) is not list:
|
||||
raise ValidationError({
|
||||
'items': ["'items' must be supplied as a list object"]
|
||||
})
|
||||
|
||||
if filters and type(filters) is not dict:
|
||||
raise ValidationError({
|
||||
'filters': ["'filters' must be supplied as a dict object"]
|
||||
})
|
||||
|
||||
# Keep track of how many items we deleted
|
||||
n_deleted = 0
|
||||
|
||||
with transaction.atomic():
|
||||
# Start with *all* models and perform basic filtering
|
||||
queryset = model.objects.all()
|
||||
queryset = self.filter_delete_queryset(queryset, request)
|
||||
|
||||
# Filter by provided item ID values
|
||||
if items:
|
||||
queryset = queryset.filter(id__in=items)
|
||||
|
||||
# Filter by provided filters
|
||||
if filters:
|
||||
queryset = queryset.filter(**filters)
|
||||
|
||||
n_deleted = queryset.count()
|
||||
queryset.delete()
|
||||
|
||||
return Response({'success': f'Deleted {n_deleted} items'}, status=204)
|
||||
|
||||
|
||||
class ListCreateDestroyAPIView(BulkDeleteMixin, ListCreateAPI):
|
||||
"""Custom API endpoint which provides BulkDelete functionality in addition to List and Create."""
|
||||
|
||||
...
|
||||
|
||||
|
||||
class APIDownloadMixin:
|
||||
"""Mixin for enabling a LIST endpoint to be downloaded a file.
|
||||
|
||||
To download the data, add the ?export=<fmt> to the query string.
|
||||
|
||||
The implementing class must provided a download_queryset method,
|
||||
e.g.
|
||||
|
||||
def download_queryset(self, queryset, export_format):
|
||||
dataset = StockItemResource().export(queryset=queryset)
|
||||
|
||||
filedata = dataset.export(export_format)
|
||||
|
||||
filename = 'InvenTree_Stocktake_{date}.{fmt}'.format(
|
||||
date=datetime.now().strftime("%d-%b-%Y"),
|
||||
fmt=export_format
|
||||
)
|
||||
|
||||
return DownloadFile(filedata, filename)
|
||||
"""
|
||||
|
||||
def get(self, request, *args, **kwargs):
|
||||
"""Generic handler for a download request."""
|
||||
export_format = request.query_params.get('export', None)
|
||||
|
||||
if export_format and export_format in ['csv', 'tsv', 'xls', 'xlsx']:
|
||||
queryset = self.filter_queryset(self.get_queryset())
|
||||
return self.download_queryset(queryset, export_format)
|
||||
# Default to the parent class implementation
|
||||
return super().get(request, *args, **kwargs)
|
||||
|
||||
def download_queryset(self, queryset, export_format):
|
||||
"""This function must be implemented to provide a downloadFile request."""
|
||||
raise NotImplementedError('download_queryset method not implemented!')
|
||||
|
||||
|
||||
class AttachmentMixin:
|
||||
"""Mixin for creating attachment objects, and ensuring the user information is saved correctly."""
|
||||
|
||||
permission_classes = [permissions.IsAuthenticated, RolePermission]
|
||||
|
||||
filter_backends = SEARCH_ORDER_FILTER
|
||||
|
||||
search_fields = ['attachment', 'comment', 'link']
|
||||
|
||||
def perform_create(self, serializer):
|
||||
"""Save the user information when a file is uploaded."""
|
||||
attachment = serializer.save()
|
||||
attachment.user = self.request.user
|
||||
attachment.save()
|
||||
|
||||
|
||||
class APISearchViewSerializer(serializers.Serializer):
|
||||
"""Serializer for the APISearchView."""
|
||||
|
||||
search = serializers.CharField()
|
||||
search_regex = serializers.BooleanField(default=False, required=False)
|
||||
search_whole = serializers.BooleanField(default=False, required=False)
|
||||
limit = serializers.IntegerField(default=1, required=False)
|
||||
offset = serializers.IntegerField(default=0, required=False)
|
||||
|
||||
|
||||
class APISearchView(GenericAPIView):
|
||||
"""A general-purpose 'search' API endpoint.
|
||||
|
||||
Returns hits against a number of different models simultaneously,
|
||||
to consolidate multiple API requests into a single query.
|
||||
|
||||
Is much more efficient and simplifies code!
|
||||
"""
|
||||
|
||||
permission_classes = [permissions.IsAuthenticated]
|
||||
serializer_class = APISearchViewSerializer
|
||||
|
||||
def get_result_types(self):
|
||||
"""Construct a list of search types we can return."""
|
||||
import build.api
|
||||
import company.api
|
||||
import order.api
|
||||
import part.api
|
||||
import stock.api
|
||||
|
||||
return {
|
||||
'build': build.api.BuildList,
|
||||
'company': company.api.CompanyList,
|
||||
'manufacturerpart': company.api.ManufacturerPartList,
|
||||
'supplierpart': company.api.SupplierPartList,
|
||||
'part': part.api.PartList,
|
||||
'partcategory': part.api.CategoryList,
|
||||
'purchaseorder': order.api.PurchaseOrderList,
|
||||
'returnorder': order.api.ReturnOrderList,
|
||||
'salesorder': order.api.SalesOrderList,
|
||||
'stockitem': stock.api.StockList,
|
||||
'stocklocation': stock.api.StockLocationList,
|
||||
}
|
||||
|
||||
def post(self, request, *args, **kwargs):
|
||||
"""Perform search query against available models."""
|
||||
data = request.data
|
||||
|
||||
results = {}
|
||||
|
||||
# These parameters are passed through to the individual queries, with optional default values
|
||||
pass_through_params = {
|
||||
'search': '',
|
||||
'search_regex': False,
|
||||
'search_whole': False,
|
||||
'limit': 1,
|
||||
'offset': 0,
|
||||
}
|
||||
|
||||
if 'search' not in data:
|
||||
raise ValidationError({'search': 'Search term must be provided'})
|
||||
|
||||
for key, cls in self.get_result_types().items():
|
||||
# Only return results which are specifically requested
|
||||
if key in data:
|
||||
params = data[key]
|
||||
|
||||
for k, v in pass_through_params.items():
|
||||
params[k] = request.data.get(k, v)
|
||||
|
||||
# Enforce json encoding
|
||||
params['format'] = 'json'
|
||||
|
||||
# Ignore if the params are wrong
|
||||
if type(params) is not dict:
|
||||
continue
|
||||
|
||||
view = cls()
|
||||
|
||||
# Override regular query params with specific ones for this search request
|
||||
request._request.GET = params
|
||||
view.request = request
|
||||
view.format_kwarg = 'format'
|
||||
|
||||
# Check permissions and update results dict with particular query
|
||||
model = view.serializer_class.Meta.model
|
||||
app_label = model._meta.app_label
|
||||
model_name = model._meta.model_name
|
||||
table = f'{app_label}_{model_name}'
|
||||
|
||||
try:
|
||||
if users.models.RuleSet.check_table_permission(
|
||||
request.user, table, 'view'
|
||||
):
|
||||
results[key] = view.list(request, *args, **kwargs).data
|
||||
else:
|
||||
results[key] = {
|
||||
'error': _(
|
||||
'User does not have permission to view this model'
|
||||
)
|
||||
}
|
||||
except Exception as exc:
|
||||
results[key] = {'error': str(exc)}
|
||||
|
||||
return Response(results)
|
||||
|
||||
|
||||
class MetadataView(RetrieveUpdateAPI):
|
||||
"""Generic API endpoint for reading and editing metadata for a model."""
|
||||
|
||||
MODEL_REF = 'model'
|
||||
|
||||
def get_model_type(self):
|
||||
"""Return the model type associated with this API instance."""
|
||||
model = self.kwargs.get(self.MODEL_REF, None)
|
||||
|
||||
if model is None:
|
||||
raise ValidationError(
|
||||
f"MetadataView called without '{self.MODEL_REF}' parameter"
|
||||
)
|
||||
|
||||
return model
|
||||
|
||||
def get_permission_model(self):
|
||||
"""Return the 'permission' model associated with this view."""
|
||||
return self.get_model_type()
|
||||
|
||||
def get_queryset(self):
|
||||
"""Return the queryset for this endpoint."""
|
||||
return self.get_model_type().objects.all()
|
||||
|
||||
def get_serializer(self, *args, **kwargs):
|
||||
"""Return MetadataSerializer instance."""
|
||||
# Detect if we are currently generating the OpenAPI schema
|
||||
if 'spectacular' in sys.argv:
|
||||
return MetadataSerializer(Part, *args, **kwargs)
|
||||
return MetadataSerializer(self.get_model_type(), *args, **kwargs)
|
||||
@@ -0,0 +1,667 @@
|
||||
"""InvenTree API version information."""
|
||||
|
||||
# InvenTree API version
|
||||
INVENTREE_API_VERSION = 185
|
||||
"""Increment this API version number whenever there is a significant change to the API that any clients need to know about."""
|
||||
|
||||
INVENTREE_API_TEXT = """
|
||||
|
||||
v185 - 2024-03-24 : https://github.com/inventree/InvenTree/pull/6836
|
||||
- Remove /plugin/activate endpoint
|
||||
- Update docstrings and typing for various API endpoints (no functional changes)
|
||||
|
||||
v184 - 2024-03-17 : https://github.com/inventree/InvenTree/pull/10464
|
||||
- Add additional fields for tests (start/end datetime, test station)
|
||||
|
||||
v183 - 2024-03-14 : https://github.com/inventree/InvenTree/pull/5972
|
||||
- Adds "category_default_location" annotated field to part serializer
|
||||
- Adds "part_detail.category_default_location" annotated field to stock item serializer
|
||||
- Adds "part_detail.category_default_location" annotated field to purchase order line serializer
|
||||
- Adds "parent_default_location" annotated field to category serializer
|
||||
|
||||
v182 - 2024-03-13 : https://github.com/inventree/InvenTree/pull/6714
|
||||
- Expose ReportSnippet model to the /report/snippet/ API endpoint
|
||||
- Expose ReportAsset model to the /report/asset/ API endpoint
|
||||
|
||||
v181 - 2024-02-21 : https://github.com/inventree/InvenTree/pull/6541
|
||||
- Adds "width" and "height" fields to the LabelTemplate API endpoint
|
||||
- Adds "page_size" and "landscape" fields to the ReportTemplate API endpoint
|
||||
|
||||
v180 - 2024-3-02 : https://github.com/inventree/InvenTree/pull/6463
|
||||
- Tweaks to API documentation to allow automatic documentation generation
|
||||
|
||||
v179 - 2024-03-01 : https://github.com/inventree/InvenTree/pull/6605
|
||||
- Adds "subcategories" count to PartCategory serializer
|
||||
- Adds "sublocations" count to StockLocation serializer
|
||||
- Adds "image" field to PartBrief serializer
|
||||
- Adds "image" field to CompanyBrief serializer
|
||||
|
||||
v178 - 2024-02-29 : https://github.com/inventree/InvenTree/pull/6604
|
||||
- Adds "external_stock" field to the Part API endpoint
|
||||
- Adds "external_stock" field to the BomItem API endpoint
|
||||
- Adds "external_stock" field to the BuildLine API endpoint
|
||||
- Stock quantites represented in the BuildLine API endpoint are now filtered by Build.source_location
|
||||
|
||||
v177 - 2024-02-27 : https://github.com/inventree/InvenTree/pull/6581
|
||||
- Adds "subcategoies" count to PartCategoryTree serializer
|
||||
- Adds "sublocations" count to StockLocationTree serializer
|
||||
|
||||
v176 - 2024-02-26 : https://github.com/inventree/InvenTree/pull/6535
|
||||
- Adds the field "plugins_install_disabled" to the Server info API endpoint
|
||||
|
||||
v175 - 2024-02-21 : https://github.com/inventree/InvenTree/pull/6538
|
||||
- Adds "parts" count to PartParameterTemplate serializer
|
||||
|
||||
v174 - 2024-02-21 : https://github.com/inventree/InvenTree/pull/6536
|
||||
- Expose PartCategory filters to the API documentation
|
||||
- Expose StockLocation filters to the API documentation
|
||||
|
||||
v173 - 2024-02-20 : https://github.com/inventree/InvenTree/pull/6483
|
||||
- Adds "merge_items" to the PurchaseOrderLine create API endpoint
|
||||
- Adds "auto_pricing" to the PurchaseOrderLine create/update API endpoint
|
||||
|
||||
v172 - 2024-02-20 : https://github.com/inventree/InvenTree/pull/6526
|
||||
- Adds "enabled" field to the PartTestTemplate API endpoint
|
||||
- Adds "enabled" filter to the PartTestTemplate list
|
||||
- Adds "enabled" filter to the StockItemTestResult list
|
||||
|
||||
v171 - 2024-02-19 : https://github.com/inventree/InvenTree/pull/6516
|
||||
- Adds "key" as a filterable parameter to PartTestTemplate list endpoint
|
||||
|
||||
v170 -> 2024-02-19 : https://github.com/inventree/InvenTree/pull/6514
|
||||
- Adds "has_results" filter to the PartTestTemplate list endpoint
|
||||
|
||||
v169 -> 2024-02-14 : https://github.com/inventree/InvenTree/pull/6430
|
||||
- Adds 'key' field to PartTestTemplate API endpoint
|
||||
- Adds annotated 'results' field to PartTestTemplate API endpoint
|
||||
- Adds 'template' field to StockItemTestResult API endpoint
|
||||
|
||||
v168 -> 2024-02-14 : https://github.com/inventree/InvenTree/pull/4824
|
||||
- Adds machine CRUD API endpoints
|
||||
- Adds machine settings API endpoints
|
||||
- Adds machine restart API endpoint
|
||||
- Adds machine types/drivers list API endpoints
|
||||
- Adds machine registry status API endpoint
|
||||
- Adds 'required' field to the global Settings API
|
||||
- Discover sub-sub classes of the StatusCode API
|
||||
|
||||
v167 -> 2024-02-07: https://github.com/inventree/InvenTree/pull/6440
|
||||
- Fixes for OpenAPI schema generation
|
||||
|
||||
v166 -> 2024-02-04 : https://github.com/inventree/InvenTree/pull/6400
|
||||
- Adds package_name to plugin API
|
||||
- Adds mechanism for uninstalling plugins via the API
|
||||
|
||||
v165 -> 2024-01-28 : https://github.com/inventree/InvenTree/pull/6040
|
||||
- Adds supplier_part.name, part.creation_user, part.required_for_sales_order
|
||||
|
||||
v164 -> 2024-01-24 : https://github.com/inventree/InvenTree/pull/6343
|
||||
- Adds "building" quantity to BuildLine API serializer
|
||||
|
||||
v163 -> 2024-01-22 : https://github.com/inventree/InvenTree/pull/6314
|
||||
- Extends API endpoint to expose auth configuration information for signin pages
|
||||
|
||||
v162 -> 2024-01-14 : https://github.com/inventree/InvenTree/pull/6230
|
||||
- Adds API endpoints to provide information on background tasks
|
||||
|
||||
v161 -> 2024-01-13 : https://github.com/inventree/InvenTree/pull/6222
|
||||
- Adds API endpoint for system error information
|
||||
|
||||
v160 -> 2023-12-11 : https://github.com/inventree/InvenTree/pull/6072
|
||||
- Adds API endpoint for allocating stock items against a sales order via barcode scan
|
||||
|
||||
v159 -> 2023-12-08 : https://github.com/inventree/InvenTree/pull/6056
|
||||
- Adds API endpoint for reloading plugin registry
|
||||
|
||||
v158 -> 2023-11-21 : https://github.com/inventree/InvenTree/pull/5953
|
||||
- Adds API endpoint for listing all settings of a particular plugin
|
||||
- Adds API endpoint for registry status (errors)
|
||||
|
||||
v157 -> 2023-12-02 : https://github.com/inventree/InvenTree/pull/6021
|
||||
- Add write-only "existing_image" field to Part API serializer
|
||||
|
||||
v156 -> 2023-11-26 : https://github.com/inventree/InvenTree/pull/5982
|
||||
- Add POST endpoint for report and label creation
|
||||
|
||||
v155 -> 2023-11-24 : https://github.com/inventree/InvenTree/pull/5979
|
||||
- Add "creation_date" field to Part instance serializer
|
||||
|
||||
v154 -> 2023-11-21 : https://github.com/inventree/InvenTree/pull/5944
|
||||
- Adds "responsible" field to the ProjectCode table
|
||||
|
||||
v153 -> 2023-11-21 : https://github.com/inventree/InvenTree/pull/5956
|
||||
- Adds override_min and override_max fields to part pricing API
|
||||
|
||||
v152 -> 2023-11-20 : https://github.com/inventree/InvenTree/pull/5949
|
||||
- Adds barcode support for manufacturerpart model
|
||||
- Adds API endpoint for adding parts to purchase order using barcode scan
|
||||
|
||||
v151 -> 2023-11-13 : https://github.com/inventree/InvenTree/pull/5906
|
||||
- Allow user list API to be filtered by user active status
|
||||
- Allow owner list API to be filtered by user active status
|
||||
|
||||
v150 -> 2023-11-07: https://github.com/inventree/InvenTree/pull/5875
|
||||
- Extended user API endpoints to enable ordering
|
||||
- Extended user API endpoints to enable user role changes
|
||||
- Added endpoint to create a new user
|
||||
|
||||
v149 -> 2023-11-07 : https://github.com/inventree/InvenTree/pull/5876
|
||||
- Add 'building' quantity to BomItem serializer
|
||||
- Add extra ordering options for the BomItem list API
|
||||
|
||||
v148 -> 2023-11-06 : https://github.com/inventree/InvenTree/pull/5872
|
||||
- Allow "quantity" to be specified when installing an item into another item
|
||||
|
||||
v147 -> 2023-11-04: https://github.com/inventree/InvenTree/pull/5860
|
||||
- Adds "completed_lines" field to SalesOrder API endpoint
|
||||
- Adds "completed_lines" field to PurchaseOrder API endpoint
|
||||
|
||||
v146 -> 2023-11-02: https://github.com/inventree/InvenTree/pull/5822
|
||||
- Extended SSO Provider endpoint to contain if a provider is configured
|
||||
- Adds API endpoints for Email Address model
|
||||
|
||||
v145 -> 2023-10-30: https://github.com/inventree/InvenTree/pull/5786
|
||||
- Allow printing labels via POST including printing options in the body
|
||||
|
||||
v144 -> 2023-10-23: https://github.com/inventree/InvenTree/pull/5811
|
||||
- Adds version information API endpoint
|
||||
|
||||
v143 -> 2023-10-29: https://github.com/inventree/InvenTree/pull/5810
|
||||
- Extends the status endpoint to include information about system status and health
|
||||
|
||||
v142 -> 2023-10-20: https://github.com/inventree/InvenTree/pull/5759
|
||||
- Adds generic API endpoints for looking up status models
|
||||
|
||||
v141 -> 2023-10-23 : https://github.com/inventree/InvenTree/pull/5774
|
||||
- Changed 'part.responsible' from User to Owner
|
||||
|
||||
v140 -> 2023-10-20 : https://github.com/inventree/InvenTree/pull/5664
|
||||
- Expand API token functionality
|
||||
- Multiple API tokens can be generated per user
|
||||
|
||||
v139 -> 2023-10-11 : https://github.com/inventree/InvenTree/pull/5509
|
||||
- Add new BarcodePOReceive endpoint to receive line items by scanning supplier barcodes
|
||||
|
||||
v138 -> 2023-10-11 : https://github.com/inventree/InvenTree/pull/5679
|
||||
- Settings keys are no longer case sensitive
|
||||
- Include settings units in API serializer
|
||||
|
||||
v137 -> 2023-10-04 : https://github.com/inventree/InvenTree/pull/5588
|
||||
- Adds StockLocationType API endpoints
|
||||
- Adds custom_icon, location_type to StockLocation endpoint
|
||||
|
||||
v136 -> 2023-09-23 : https://github.com/inventree/InvenTree/pull/5595
|
||||
- Adds structural to StockLocation and PartCategory tree endpoints
|
||||
|
||||
v135 -> 2023-09-19 : https://github.com/inventree/InvenTree/pull/5569
|
||||
- Adds location path detail to StockLocation and StockItem API endpoints
|
||||
- Adds category path detail to PartCategory and Part API endpoints
|
||||
|
||||
v134 -> 2023-09-11 : https://github.com/inventree/InvenTree/pull/5525
|
||||
- Allow "Attachment" list endpoints to be searched by attachment, link and comment fields
|
||||
|
||||
v133 -> 2023-09-08 : https://github.com/inventree/InvenTree/pull/5518
|
||||
- Add extra optional fields which can be used for StockAdjustment endpoints
|
||||
|
||||
v132 -> 2023-09-07 : https://github.com/inventree/InvenTree/pull/5515
|
||||
- Add 'issued_by' filter to BuildOrder API list endpoint
|
||||
|
||||
v131 -> 2023-08-09 : https://github.com/inventree/InvenTree/pull/5415
|
||||
- Annotate 'available_variant_stock' to the SalesOrderLine serializer
|
||||
|
||||
v130 -> 2023-07-14 : https://github.com/inventree/InvenTree/pull/5251
|
||||
- Refactor label printing interface
|
||||
|
||||
v129 -> 2023-07-06 : https://github.com/inventree/InvenTree/pull/5189
|
||||
- Changes 'serial_lte' and 'serial_gte' stock filters to point to 'serial_int' field
|
||||
|
||||
v128 -> 2023-07-06 : https://github.com/inventree/InvenTree/pull/5186
|
||||
- Adds 'available' filter for BuildLine API endpoint
|
||||
|
||||
v127 -> 2023-06-24 : https://github.com/inventree/InvenTree/pull/5094
|
||||
- Enhancements for the PartParameter API endpoints
|
||||
|
||||
v126 -> 2023-06-19 : https://github.com/inventree/InvenTree/pull/5075
|
||||
- Adds API endpoint for setting the "category" for multiple parts simultaneously
|
||||
|
||||
v125 -> 2023-06-17 : https://github.com/inventree/InvenTree/pull/5064
|
||||
- Adds API endpoint for setting the "status" field for multiple stock items simultaneously
|
||||
|
||||
v124 -> 2023-06-17 : https://github.com/inventree/InvenTree/pull/5057
|
||||
- Add "created_before" and "created_after" filters to the Part API
|
||||
|
||||
v123 -> 2023-06-15 : https://github.com/inventree/InvenTree/pull/5019
|
||||
- Add Metadata to: Plugin Config
|
||||
|
||||
v122 -> 2023-06-14 : https://github.com/inventree/InvenTree/pull/5034
|
||||
- Adds new BuildLineLabel label type
|
||||
|
||||
v121 -> 2023-06-14 : https://github.com/inventree/InvenTree/pull/4808
|
||||
- Adds "ProjectCode" link to Build model
|
||||
|
||||
v120 -> 2023-06-07 : https://github.com/inventree/InvenTree/pull/4855
|
||||
- Major overhaul of the build order API
|
||||
- Adds new BuildLine model
|
||||
|
||||
v119 -> 2023-06-01 : https://github.com/inventree/InvenTree/pull/4898
|
||||
- Add Metadata to: Part test templates, Part parameters, Part category parameter templates, BOM item substitute, Related Parts, Stock item test result
|
||||
|
||||
v118 -> 2023-06-01 : https://github.com/inventree/InvenTree/pull/4935
|
||||
- Adds extra fields for the PartParameterTemplate model
|
||||
|
||||
v117 -> 2023-05-22 : https://github.com/inventree/InvenTree/pull/4854
|
||||
- Part.units model now supports physical units (e.g. "kg", "m", "mm", etc)
|
||||
- Replaces SupplierPart "pack_size" field with "pack_quantity"
|
||||
- New field supports physical units, and allows for conversion between compatible units
|
||||
|
||||
v116 -> 2023-05-18 : https://github.com/inventree/InvenTree/pull/4823
|
||||
- Updates to part parameter implementation, to use physical units
|
||||
|
||||
v115 -> 2023-05-18 : https://github.com/inventree/InvenTree/pull/4846
|
||||
- Adds ability to partially scrap a build output
|
||||
|
||||
v114 -> 2023-05-16 : https://github.com/inventree/InvenTree/pull/4825
|
||||
- Adds "delivery_date" to shipments
|
||||
|
||||
v113 -> 2023-05-13 : https://github.com/inventree/InvenTree/pull/4800
|
||||
- Adds API endpoints for scrapping a build output
|
||||
|
||||
v112 -> 2023-05-13: https://github.com/inventree/InvenTree/pull/4741
|
||||
- Adds flag use_pack_size to the stock addition API, which allows adding packs
|
||||
|
||||
v111 -> 2023-05-02 : https://github.com/inventree/InvenTree/pull/4367
|
||||
- Adds tags to the Part serializer
|
||||
- Adds tags to the SupplierPart serializer
|
||||
- Adds tags to the ManufacturerPart serializer
|
||||
- Adds tags to the StockItem serializer
|
||||
- Adds tags to the StockLocation serializer
|
||||
|
||||
v110 -> 2023-04-26 : https://github.com/inventree/InvenTree/pull/4698
|
||||
- Adds 'order_currency' field for PurchaseOrder / SalesOrder endpoints
|
||||
|
||||
v109 -> 2023-04-19 : https://github.com/inventree/InvenTree/pull/4636
|
||||
- Adds API endpoints for the "ProjectCode" model
|
||||
|
||||
v108 -> 2023-04-17 : https://github.com/inventree/InvenTree/pull/4615
|
||||
- Adds functionality to upload images for rendering in markdown notes
|
||||
|
||||
v107 -> 2023-04-04 : https://github.com/inventree/InvenTree/pull/4575
|
||||
- Adds barcode support for PurchaseOrder model
|
||||
- Adds barcode support for ReturnOrder model
|
||||
- Adds barcode support for SalesOrder model
|
||||
- Adds barcode support for BuildOrder model
|
||||
|
||||
v106 -> 2023-04-03 : https://github.com/inventree/InvenTree/pull/4566
|
||||
- Adds 'search_regex' parameter to all searchable API endpoints
|
||||
|
||||
v105 -> 2023-03-31 : https://github.com/inventree/InvenTree/pull/4543
|
||||
- Adds API endpoints for status label information on various models
|
||||
|
||||
v104 -> 2023-03-23 : https://github.com/inventree/InvenTree/pull/4488
|
||||
- Adds various endpoints for new "ReturnOrder" models
|
||||
- Adds various endpoints for new "ReturnOrderReport" templates
|
||||
- Exposes API endpoints for "Contact" model
|
||||
|
||||
v103 -> 2023-03-17 : https://github.com/inventree/InvenTree/pull/4410
|
||||
- Add metadata to several more models
|
||||
|
||||
v102 -> 2023-03-18 : https://github.com/inventree/InvenTree/pull/4505
|
||||
- Adds global search API endpoint for consolidated search results
|
||||
|
||||
v101 -> 2023-03-07 : https://github.com/inventree/InvenTree/pull/4462
|
||||
- Adds 'total_in_stock' to Part serializer, and supports API ordering
|
||||
|
||||
v100 -> 2023-03-04 : https://github.com/inventree/InvenTree/pull/4452
|
||||
- Adds bulk delete of PurchaseOrderLineItems to API
|
||||
|
||||
v99 -> 2023-03-03 : https://github.com/inventree/InvenTree/pull/4445
|
||||
- Adds sort by "responsible" to PurchaseOrderAPI
|
||||
|
||||
v98 -> 2023-02-24 : https://github.com/inventree/InvenTree/pull/4408
|
||||
- Adds "responsible" filter to Build API
|
||||
|
||||
v97 -> 2023-02-20 : https://github.com/inventree/InvenTree/pull/4377
|
||||
- Adds "external" attribute to StockLocation model
|
||||
|
||||
v96 -> 2023-02-16 : https://github.com/inventree/InvenTree/pull/4345
|
||||
- Adds stocktake report generation functionality
|
||||
|
||||
v95 -> 2023-02-16 : https://github.com/inventree/InvenTree/pull/4346
|
||||
- Adds "CompanyAttachment" model (and associated API endpoints)
|
||||
|
||||
v94 -> 2023-02-10 : https://github.com/inventree/InvenTree/pull/4327
|
||||
- Adds API endpoints for the "Group" auth model
|
||||
|
||||
v93 -> 2023-02-03 : https://github.com/inventree/InvenTree/pull/4300
|
||||
- Adds extra information to the currency exchange endpoint
|
||||
- Adds API endpoint for manually updating exchange rates
|
||||
|
||||
v92 -> 2023-02-02 : https://github.com/inventree/InvenTree/pull/4293
|
||||
- Adds API endpoint for currency exchange information
|
||||
|
||||
v91 -> 2023-01-31 : https://github.com/inventree/InvenTree/pull/4281
|
||||
- Improves the API endpoint for creating new Part instances
|
||||
|
||||
v90 -> 2023-01-25 : https://github.com/inventree/InvenTree/pull/4186/files
|
||||
- Adds a dedicated endpoint to activate a plugin
|
||||
|
||||
v89 -> 2023-01-25 : https://github.com/inventree/InvenTree/pull/4214
|
||||
- Adds updated field to SupplierPart API
|
||||
- Adds API date ordering for supplier part list
|
||||
|
||||
v88 -> 2023-01-17: https://github.com/inventree/InvenTree/pull/4225
|
||||
- Adds 'priority' field to Build model and api endpoints
|
||||
|
||||
v87 -> 2023-01-04 : https://github.com/inventree/InvenTree/pull/4067
|
||||
- Add API date filter for stock table on Expiry date
|
||||
|
||||
v86 -> 2022-12-22 : https://github.com/inventree/InvenTree/pull/4069
|
||||
- Adds API endpoints for part stocktake
|
||||
|
||||
v85 -> 2022-12-21 : https://github.com/inventree/InvenTree/pull/3858
|
||||
- Add endpoints serving ICS calendars for purchase and sales orders through API
|
||||
|
||||
v84 -> 2022-12-21: https://github.com/inventree/InvenTree/pull/4083
|
||||
- Add support for listing PO, BO, SO by their reference
|
||||
|
||||
v83 -> 2022-11-19 : https://github.com/inventree/InvenTree/pull/3949
|
||||
- Add support for structural Stock locations
|
||||
|
||||
v82 -> 2022-11-16 : https://github.com/inventree/InvenTree/pull/3931
|
||||
- Add support for structural Part categories
|
||||
|
||||
v81 -> 2022-11-08 : https://github.com/inventree/InvenTree/pull/3710
|
||||
- Adds cached pricing information to Part API
|
||||
- Adds cached pricing information to BomItem API
|
||||
- Allows Part and BomItem list endpoints to be filtered by 'has_pricing'
|
||||
- Remove calculated 'price_string' values from API endpoints
|
||||
- Allows PurchaseOrderLineItem API endpoint to be filtered by 'has_pricing'
|
||||
- Allows SalesOrderLineItem API endpoint to be filtered by 'has_pricing'
|
||||
- Allows SalesOrderLineItem API endpoint to be filtered by 'order_status'
|
||||
- Adds more information to SupplierPriceBreak serializer
|
||||
|
||||
v80 -> 2022-11-07 : https://github.com/inventree/InvenTree/pull/3906
|
||||
- Adds 'barcode_hash' to Part API serializer
|
||||
- Adds 'barcode_hash' to StockLocation API serializer
|
||||
- Adds 'barcode_hash' to SupplierPart API serializer
|
||||
|
||||
v79 -> 2022-11-03 : https://github.com/inventree/InvenTree/pull/3895
|
||||
- Add metadata to Company
|
||||
|
||||
v78 -> 2022-10-25 : https://github.com/inventree/InvenTree/pull/3854
|
||||
- Make PartCategory to be filtered by name and description
|
||||
|
||||
v77 -> 2022-10-12 : https://github.com/inventree/InvenTree/pull/3772
|
||||
- Adds model permission checks for barcode assignment actions
|
||||
|
||||
v76 -> 2022-09-10 : https://github.com/inventree/InvenTree/pull/3640
|
||||
- Refactor of barcode data on the API
|
||||
- StockItem.uid renamed to StockItem.barcode_hash
|
||||
|
||||
v75 -> 2022-09-05 : https://github.com/inventree/InvenTree/pull/3644
|
||||
- Adds "pack_size" attribute to SupplierPart API serializer
|
||||
|
||||
v74 -> 2022-08-28 : https://github.com/inventree/InvenTree/pull/3615
|
||||
- Add confirmation field for completing PurchaseOrder if the order has incomplete lines
|
||||
- Add confirmation field for completing SalesOrder if the order has incomplete lines
|
||||
|
||||
v73 -> 2022-08-24 : https://github.com/inventree/InvenTree/pull/3605
|
||||
- Add 'description' field to PartParameterTemplate model
|
||||
|
||||
v72 -> 2022-08-18 : https://github.com/inventree/InvenTree/pull/3567
|
||||
- Allow PurchaseOrder to be duplicated via the API
|
||||
|
||||
v71 -> 2022-08-18 : https://github.com/inventree/InvenTree/pull/3564
|
||||
- Updates to the "part scheduling" API endpoint
|
||||
|
||||
v70 -> 2022-08-02 : https://github.com/inventree/InvenTree/pull/3451
|
||||
- Adds a 'depth' parameter to the PartCategory list API
|
||||
- Adds a 'depth' parameter to the StockLocation list API
|
||||
|
||||
v69 -> 2022-08-01 : https://github.com/inventree/InvenTree/pull/3443
|
||||
- Updates the PartCategory list API:
|
||||
- Improve query efficiency: O(n) becomes O(1)
|
||||
- Rename 'parts' field to 'part_count'
|
||||
- Updates the StockLocation list API:
|
||||
- Improve query efficiency: O(n) becomes O(1)
|
||||
|
||||
v68 -> 2022-07-27 : https://github.com/inventree/InvenTree/pull/3417
|
||||
- Allows SupplierPart list to be filtered by SKU value
|
||||
- Allows SupplierPart list to be filtered by MPN value
|
||||
|
||||
v67 -> 2022-07-25 : https://github.com/inventree/InvenTree/pull/3395
|
||||
- Adds a 'requirements' endpoint for Part instance
|
||||
- Provides information on outstanding order requirements for a given part
|
||||
|
||||
v66 -> 2022-07-24 : https://github.com/inventree/InvenTree/pull/3393
|
||||
- Part images can now be downloaded from a remote URL via the API
|
||||
- Company images can now be downloaded from a remote URL via the API
|
||||
|
||||
v65 -> 2022-07-15 : https://github.com/inventree/InvenTree/pull/3335
|
||||
- Annotates 'in_stock' quantity to the SupplierPart API
|
||||
|
||||
v64 -> 2022-07-08 : https://github.com/inventree/InvenTree/pull/3310
|
||||
- Annotate 'on_order' quantity to BOM list API
|
||||
- Allow BOM List API endpoint to be filtered by "on_order" parameter
|
||||
|
||||
v63 -> 2022-07-06 : https://github.com/inventree/InvenTree/pull/3301
|
||||
- Allow BOM List API endpoint to be filtered by "available_stock" parameter
|
||||
|
||||
v62 -> 2022-07-05 : https://github.com/inventree/InvenTree/pull/3296
|
||||
- Allows search on BOM List API endpoint
|
||||
- Allows ordering on BOM List API endpoint
|
||||
|
||||
v61 -> 2022-06-12 : https://github.com/inventree/InvenTree/pull/3183
|
||||
- Migrate the "Convert Stock Item" form class to use the API
|
||||
- There is now an API endpoint for converting a stock item to a valid variant
|
||||
|
||||
v60 -> 2022-06-08 : https://github.com/inventree/InvenTree/pull/3148
|
||||
- Add availability data fields to the SupplierPart model
|
||||
|
||||
v59 -> 2022-06-07 : https://github.com/inventree/InvenTree/pull/3154
|
||||
- Adds further improvements to BulkDelete mixin class
|
||||
- Fixes multiple bugs in custom OPTIONS metadata implementation
|
||||
- Adds 'bulk delete' for Notifications
|
||||
|
||||
v58 -> 2022-06-06 : https://github.com/inventree/InvenTree/pull/3146
|
||||
- Adds a BulkDelete API mixin class for fast, safe deletion of multiple objects with a single API request
|
||||
|
||||
v57 -> 2022-06-05 : https://github.com/inventree/InvenTree/pull/3130
|
||||
- Transfer PartCategoryTemplateParameter actions to the API
|
||||
|
||||
v56 -> 2022-06-02 : https://github.com/inventree/InvenTree/pull/3123
|
||||
- Expose the PartParameterTemplate model to use the API
|
||||
|
||||
v55 -> 2022-06-02 : https://github.com/inventree/InvenTree/pull/3120
|
||||
- Converts the 'StockItemReturn' functionality to make use of the API
|
||||
|
||||
v54 -> 2022-06-02 : https://github.com/inventree/InvenTree/pull/3117
|
||||
- Adds 'available_stock' annotation on the SalesOrderLineItem API
|
||||
- Adds (well, fixes) 'overdue' annotation on the SalesOrderLineItem API
|
||||
|
||||
v53 -> 2022-06-01 : https://github.com/inventree/InvenTree/pull/3110
|
||||
- Adds extra search fields to the BuildOrder list API endpoint
|
||||
|
||||
v52 -> 2022-05-31 : https://github.com/inventree/InvenTree/pull/3103
|
||||
- Allow part list API to be searched by supplier SKU
|
||||
|
||||
v51 -> 2022-05-24 : https://github.com/inventree/InvenTree/pull/3058
|
||||
- Adds new fields to the SalesOrderShipment model
|
||||
|
||||
v50 -> 2022-05-18 : https://github.com/inventree/InvenTree/pull/2912
|
||||
- Implement Attachments for manufacturer parts
|
||||
|
||||
v49 -> 2022-05-09 : https://github.com/inventree/InvenTree/pull/2957
|
||||
- Allows filtering of plugin list by 'active' status
|
||||
- Allows filtering of plugin list by 'mixin' support
|
||||
- Adds endpoint to "identify" or "locate" stock items and locations (using plugins)
|
||||
|
||||
v48 -> 2022-05-12 : https://github.com/inventree/InvenTree/pull/2977
|
||||
- Adds "export to file" functionality for PurchaseOrder API endpoint
|
||||
- Adds "export to file" functionality for SalesOrder API endpoint
|
||||
- Adds "export to file" functionality for BuildOrder API endpoint
|
||||
|
||||
v47 -> 2022-05-10 : https://github.com/inventree/InvenTree/pull/2964
|
||||
- Fixes barcode API error response when scanning a StockItem which does not exist
|
||||
- Fixes barcode API error response when scanning a StockLocation which does not exist
|
||||
|
||||
v46 -> 2022-05-09
|
||||
- Fixes read permissions on settings API
|
||||
- Allows non-staff users to read global settings via the API
|
||||
|
||||
v45 -> 2022-05-08 : https://github.com/inventree/InvenTree/pull/2944
|
||||
- Settings are now accessed via the API using their unique key, not their PK
|
||||
- This allows the settings to be accessed without prior knowledge of the PK
|
||||
|
||||
v44 -> 2022-05-04 : https://github.com/inventree/InvenTree/pull/2931
|
||||
- Converting more server-side rendered forms to the API
|
||||
- Exposes more core functionality to API endpoints
|
||||
|
||||
v43 -> 2022-04-26 : https://github.com/inventree/InvenTree/pull/2875
|
||||
- Adds API detail endpoint for PartSalePrice model
|
||||
- Adds API detail endpoint for PartInternalPrice model
|
||||
|
||||
v42 -> 2022-04-26 : https://github.com/inventree/InvenTree/pull/2833
|
||||
- Adds variant stock information to the Part and BomItem serializers
|
||||
|
||||
v41 -> 2022-04-26
|
||||
- Fixes 'variant_of' filter for Part list endpoint
|
||||
|
||||
v40 -> 2022-04-19
|
||||
- Adds ability to filter StockItem list by "tracked" parameter
|
||||
- This checks the serial number or batch code fields
|
||||
|
||||
v39 -> 2022-04-18
|
||||
- Adds ability to filter StockItem list by "has_batch" parameter
|
||||
|
||||
v38 -> 2022-04-14 : https://github.com/inventree/InvenTree/pull/2828
|
||||
- Adds the ability to include stock test results for "installed items"
|
||||
|
||||
v37 -> 2022-04-07 : https://github.com/inventree/InvenTree/pull/2806
|
||||
- Adds extra stock availability information to the BomItem serializer
|
||||
|
||||
v36 -> 2022-04-03
|
||||
- Adds ability to filter part list endpoint by unallocated_stock argument
|
||||
|
||||
v35 -> 2022-04-01 : https://github.com/inventree/InvenTree/pull/2797
|
||||
- Adds stock allocation information to the Part API
|
||||
- Adds calculated field for "unallocated_quantity"
|
||||
|
||||
v34 -> 2022-03-25
|
||||
- Change permissions for "plugin list" API endpoint (now allows any authenticated user)
|
||||
|
||||
v33 -> 2022-03-24
|
||||
- Adds "plugins_enabled" information to root API endpoint
|
||||
|
||||
v32 -> 2022-03-19
|
||||
- Adds "parameters" detail to Part API endpoint (use ¶meters=true)
|
||||
- Adds ability to filter PartParameterTemplate API by Part instance
|
||||
- Adds ability to filter PartParameterTemplate API by PartCategory instance
|
||||
|
||||
v31 -> 2022-03-14
|
||||
- Adds "updated" field to SupplierPriceBreakList and SupplierPriceBreakDetail API endpoints
|
||||
|
||||
v30 -> 2022-03-09
|
||||
- Adds "exclude_location" field to BuildAutoAllocation API endpoint
|
||||
- Allows BuildItem API endpoint to be filtered by BomItem relation
|
||||
|
||||
v29 -> 2022-03-08
|
||||
- Adds "scheduling" endpoint for predicted stock scheduling information
|
||||
|
||||
v28 -> 2022-03-04
|
||||
- Adds an API endpoint for auto allocation of stock items against a build order
|
||||
- Ref: https://github.com/inventree/InvenTree/pull/2713
|
||||
|
||||
v27 -> 2022-02-28
|
||||
- Adds target_date field to individual line items for purchase orders and sales orders
|
||||
|
||||
v26 -> 2022-02-17
|
||||
- Adds API endpoint for uploading a BOM file and extracting data
|
||||
|
||||
v25 -> 2022-02-17
|
||||
- Adds ability to filter "part" list endpoint by "in_bom_for" argument
|
||||
|
||||
v24 -> 2022-02-10
|
||||
- Adds API endpoint for deleting (cancelling) build order outputs
|
||||
|
||||
v23 -> 2022-02-02
|
||||
- Adds API endpoints for managing plugin classes
|
||||
- Adds API endpoints for managing plugin settings
|
||||
|
||||
v22 -> 2021-12-20
|
||||
- Adds API endpoint to "merge" multiple stock items
|
||||
|
||||
v21 -> 2021-12-04
|
||||
- Adds support for multiple "Shipments" against a SalesOrder
|
||||
- Refactors process for stock allocation against a SalesOrder
|
||||
|
||||
v20 -> 2021-12-03
|
||||
- Adds ability to filter POLineItem endpoint by "base_part"
|
||||
- Adds optional "order_detail" to POLineItem list endpoint
|
||||
|
||||
v19 -> 2021-12-02
|
||||
- Adds the ability to filter the StockItem API by "part_tree"
|
||||
- Returns only stock items which match a particular part.tree_id field
|
||||
|
||||
v18 -> 2021-11-15
|
||||
- Adds the ability to filter BomItem API by "uses" field
|
||||
- This returns a list of all BomItems which "use" the specified part
|
||||
- Includes inherited BomItem objects
|
||||
|
||||
v17 -> 2021-11-09
|
||||
- Adds API endpoints for GLOBAL and USER settings objects
|
||||
- Ref: https://github.com/inventree/InvenTree/pull/2275
|
||||
|
||||
v16 -> 2021-10-17
|
||||
- Adds API endpoint for completing build order outputs
|
||||
|
||||
v15 -> 2021-10-06
|
||||
- Adds detail endpoint for SalesOrderAllocation model
|
||||
- Allows use of the API forms interface for adjusting SalesOrderAllocation objects
|
||||
|
||||
v14 -> 2021-10-05
|
||||
- Stock adjustment actions API is improved, using native DRF serializer support
|
||||
- However adjustment actions now only support 'pk' as a lookup field
|
||||
|
||||
v13 -> 2021-10-05
|
||||
- Adds API endpoint to allocate stock items against a BuildOrder
|
||||
- Updates StockItem API with improved filtering against BomItem data
|
||||
|
||||
v12 -> 2021-09-07
|
||||
- Adds API endpoint to receive stock items against a PurchaseOrder
|
||||
|
||||
v11 -> 2021-08-26
|
||||
- Adds "units" field to PartBriefSerializer
|
||||
- This allows units to be introspected from the "part_detail" field in the StockItem serializer
|
||||
|
||||
v10 -> 2021-08-23
|
||||
- Adds "purchase_price_currency" to StockItem serializer
|
||||
- Adds "purchase_price_string" to StockItem serializer
|
||||
- Purchase price is now writable for StockItem serializer
|
||||
|
||||
v9 -> 2021-08-09
|
||||
- Adds "price_string" to part pricing serializers
|
||||
|
||||
v8 -> 2021-07-19
|
||||
- Refactors the API interface for SupplierPart and ManufacturerPart models
|
||||
- ManufacturerPart objects can no longer be created via the SupplierPart API endpoint
|
||||
|
||||
v7 -> 2021-07-03
|
||||
- Introduced the concept of "API forms" in https://github.com/inventree/InvenTree/pull/1716
|
||||
- API OPTIONS endpoints provide comprehensive field metedata
|
||||
- Multiple new API endpoints added for database models
|
||||
|
||||
v6 -> 2021-06-23
|
||||
- Part and Company images can now be directly uploaded via the REST API
|
||||
|
||||
v5 -> 2021-06-21
|
||||
- Adds API interface for manufacturer part parameters
|
||||
|
||||
v4 -> 2021-06-01
|
||||
- BOM items can now accept "variant stock" to be assigned against them
|
||||
- Many slight API tweaks were needed to get this to work properly!
|
||||
|
||||
v3 -> 2021-05-22:
|
||||
- The updated StockItem "history tracking" now uses a different interface
|
||||
|
||||
"""
|
||||
@@ -0,0 +1,368 @@
|
||||
"""AppConfig for InvenTree app."""
|
||||
|
||||
import logging
|
||||
from importlib import import_module
|
||||
from pathlib import Path
|
||||
|
||||
from django.apps import AppConfig, apps
|
||||
from django.conf import settings
|
||||
from django.contrib.auth import get_user_model
|
||||
from django.core.exceptions import AppRegistryNotReady
|
||||
from django.db import transaction
|
||||
from django.db.utils import IntegrityError, OperationalError
|
||||
|
||||
import InvenTree.conversion
|
||||
import InvenTree.ready
|
||||
import InvenTree.tasks
|
||||
from InvenTree.config import get_setting
|
||||
|
||||
logger = logging.getLogger('inventree')
|
||||
|
||||
|
||||
class InvenTreeConfig(AppConfig):
|
||||
"""AppConfig for inventree app."""
|
||||
|
||||
name = 'InvenTree'
|
||||
|
||||
def ready(self):
|
||||
"""Run system wide setup init steps.
|
||||
|
||||
Like:
|
||||
- Checking if migrations should be run
|
||||
- Cleaning up tasks
|
||||
- Starting regular tasks
|
||||
- Updating exchange rates
|
||||
- Collecting notification methods
|
||||
- Collecting state transition methods
|
||||
- Adding users set in the current environment
|
||||
"""
|
||||
# skip loading if plugin registry is not loaded or we run in a background thread
|
||||
if (
|
||||
not InvenTree.ready.isPluginRegistryLoaded()
|
||||
or not InvenTree.ready.isInMainThread()
|
||||
):
|
||||
return
|
||||
|
||||
# Skip if running migrations
|
||||
if InvenTree.ready.isRunningMigrations():
|
||||
return
|
||||
|
||||
if InvenTree.ready.canAppAccessDatabase() or settings.TESTING_ENV:
|
||||
self.remove_obsolete_tasks()
|
||||
|
||||
self.collect_tasks()
|
||||
self.start_background_tasks()
|
||||
|
||||
if not InvenTree.ready.isInTestMode(): # pragma: no cover
|
||||
self.update_exchange_rates()
|
||||
# Let the background worker check for migrations
|
||||
InvenTree.tasks.offload_task(InvenTree.tasks.check_for_migrations)
|
||||
|
||||
self.update_site_url()
|
||||
self.collect_notification_methods()
|
||||
self.collect_state_transition_methods()
|
||||
|
||||
# Ensure the unit registry is loaded
|
||||
InvenTree.conversion.get_unit_registry()
|
||||
|
||||
if InvenTree.ready.canAppAccessDatabase() or settings.TESTING_ENV:
|
||||
self.add_user_on_startup()
|
||||
self.add_user_from_file()
|
||||
|
||||
def remove_obsolete_tasks(self):
|
||||
"""Delete any obsolete scheduled tasks in the database."""
|
||||
obsolete = [
|
||||
'InvenTree.tasks.delete_expired_sessions',
|
||||
'stock.tasks.delete_old_stock_items',
|
||||
]
|
||||
|
||||
try:
|
||||
from django_q.models import Schedule
|
||||
except AppRegistryNotReady: # pragma: no cover
|
||||
return
|
||||
|
||||
# Remove any existing obsolete tasks
|
||||
try:
|
||||
Schedule.objects.filter(func__in=obsolete).delete()
|
||||
except Exception:
|
||||
logger.exception('Failed to remove obsolete tasks - database not ready')
|
||||
|
||||
def start_background_tasks(self):
|
||||
"""Start all background tests for InvenTree."""
|
||||
logger.info('Starting background tasks...')
|
||||
|
||||
from django_q.models import Schedule
|
||||
|
||||
# List of existing scheduled tasks (in the database)
|
||||
existing_tasks = {}
|
||||
|
||||
for existing_task in Schedule.objects.all():
|
||||
existing_tasks[existing_task.func] = existing_task
|
||||
|
||||
tasks_to_create = []
|
||||
tasks_to_update = []
|
||||
|
||||
# List of collected tasks found with the @scheduled_task decorator
|
||||
tasks = InvenTree.tasks.tasks.task_list
|
||||
|
||||
for task in tasks:
|
||||
ref_name = f'{task.func.__module__}.{task.func.__name__}'
|
||||
|
||||
if ref_name in existing_tasks.keys():
|
||||
# This task already exists - update the details if required
|
||||
existing_task = existing_tasks[ref_name]
|
||||
|
||||
if (
|
||||
existing_task.schedule_type != task.interval
|
||||
or existing_task.minutes != task.minutes
|
||||
):
|
||||
existing_task.schedule_type = task.interval
|
||||
existing_task.minutes = task.minutes
|
||||
tasks_to_update.append(existing_task)
|
||||
|
||||
else:
|
||||
# This task does *not* already exist - create it
|
||||
tasks_to_create.append(
|
||||
Schedule(
|
||||
name=ref_name,
|
||||
func=ref_name,
|
||||
schedule_type=task.interval,
|
||||
minutes=task.minutes,
|
||||
)
|
||||
)
|
||||
|
||||
if len(tasks_to_create) > 0:
|
||||
Schedule.objects.bulk_create(tasks_to_create)
|
||||
logger.info('Created %s new scheduled tasks', len(tasks_to_create))
|
||||
|
||||
if len(tasks_to_update) > 0:
|
||||
Schedule.objects.bulk_update(tasks_to_update, ['schedule_type', 'minutes'])
|
||||
logger.info('Updated %s existing scheduled tasks', len(tasks_to_update))
|
||||
|
||||
self.add_heartbeat()
|
||||
|
||||
logger.info('Started %s scheduled background tasks...', len(tasks))
|
||||
|
||||
def add_heartbeat(self):
|
||||
"""Ensure there is at least one background task in the queue."""
|
||||
import django_q.models
|
||||
|
||||
try:
|
||||
if django_q.models.OrmQ.objects.count() == 0:
|
||||
InvenTree.tasks.offload_task(
|
||||
InvenTree.tasks.heartbeat, force_async=True
|
||||
)
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
def collect_tasks(self):
|
||||
"""Collect all background tasks."""
|
||||
for app_name, app in apps.app_configs.items():
|
||||
if app_name == 'InvenTree':
|
||||
continue
|
||||
|
||||
if Path(app.path).joinpath('tasks.py').exists():
|
||||
try:
|
||||
import_module(f'{app.module.__package__}.tasks')
|
||||
except Exception as e: # pragma: no cover
|
||||
logger.exception('Error loading tasks for %s: %s', app_name, e)
|
||||
|
||||
def update_exchange_rates(self): # pragma: no cover
|
||||
"""Update exchange rates each time the server is started.
|
||||
|
||||
Only runs *if*:
|
||||
a) Have not been updated recently (one day or less)
|
||||
b) The base exchange rate has been altered
|
||||
"""
|
||||
try:
|
||||
from djmoney.contrib.exchange.models import ExchangeBackend
|
||||
|
||||
from common.settings import currency_code_default
|
||||
from InvenTree.tasks import update_exchange_rates
|
||||
except AppRegistryNotReady: # pragma: no cover
|
||||
pass
|
||||
|
||||
base_currency = currency_code_default()
|
||||
|
||||
update = False
|
||||
|
||||
try:
|
||||
backend = ExchangeBackend.objects.filter(name='InvenTreeExchange')
|
||||
|
||||
if backend.exists():
|
||||
backend = backend.first()
|
||||
|
||||
last_update = backend.last_update
|
||||
|
||||
if last_update is None:
|
||||
# Never been updated
|
||||
logger.info('Exchange backend has never been updated')
|
||||
update = True
|
||||
|
||||
# Backend currency has changed?
|
||||
if base_currency != backend.base_currency:
|
||||
logger.info(
|
||||
'Base currency changed from %s to %s',
|
||||
backend.base_currency,
|
||||
base_currency,
|
||||
)
|
||||
update = True
|
||||
|
||||
except ExchangeBackend.DoesNotExist:
|
||||
logger.info('Exchange backend not found - updating')
|
||||
update = True
|
||||
|
||||
except Exception:
|
||||
# Some other error - potentially the tables are not ready yet
|
||||
return
|
||||
|
||||
if update:
|
||||
try:
|
||||
update_exchange_rates()
|
||||
except OperationalError:
|
||||
logger.warning('Could not update exchange rates - database not ready')
|
||||
except Exception as e:
|
||||
logger.exception('Error updating exchange rates: %s (%s)', e, type(e))
|
||||
|
||||
def update_site_url(self):
|
||||
"""Update the site URL setting.
|
||||
|
||||
- If a fixed SITE_URL is specified (via configuration), it should override the INVENTREE_BASE_URL setting
|
||||
- If multi-site support is enabled, update the site URL for the current site
|
||||
"""
|
||||
import common.models
|
||||
|
||||
if not InvenTree.ready.canAppAccessDatabase():
|
||||
return
|
||||
|
||||
if InvenTree.ready.isImportingData() or InvenTree.ready.isRunningMigrations():
|
||||
return
|
||||
|
||||
if settings.SITE_URL:
|
||||
try:
|
||||
if (
|
||||
common.models.InvenTreeSetting.get_setting('INVENTREE_BASE_URL')
|
||||
!= settings.SITE_URL
|
||||
):
|
||||
common.models.InvenTreeSetting.set_setting(
|
||||
'INVENTREE_BASE_URL', settings.SITE_URL
|
||||
)
|
||||
logger.info('Updated INVENTREE_SITE_URL to %s', settings.SITE_URL)
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
# If multi-site support is enabled, update the site URL for the current site
|
||||
try:
|
||||
from django.contrib.sites.models import Site
|
||||
|
||||
site = Site.objects.get_current()
|
||||
site.domain = settings.SITE_URL
|
||||
site.save()
|
||||
|
||||
logger.info('Updated current site URL to %s', settings.SITE_URL)
|
||||
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
def add_user_on_startup(self):
|
||||
"""Add a user on startup."""
|
||||
# stop if checks were already created
|
||||
if hasattr(settings, 'USER_ADDED') and settings.USER_ADDED:
|
||||
return
|
||||
|
||||
# get values
|
||||
add_user = get_setting('INVENTREE_ADMIN_USER', 'admin_user')
|
||||
add_email = get_setting('INVENTREE_ADMIN_EMAIL', 'admin_email')
|
||||
add_password = get_setting('INVENTREE_ADMIN_PASSWORD', 'admin_password')
|
||||
add_password_file = get_setting(
|
||||
'INVENTREE_ADMIN_PASSWORD_FILE', 'admin_password_file', None
|
||||
)
|
||||
|
||||
# check if all values are present
|
||||
set_variables = 0
|
||||
|
||||
for tested_var in [add_user, add_email, add_password]:
|
||||
if tested_var:
|
||||
set_variables += 1
|
||||
|
||||
# no variable set -> do not try anything
|
||||
if set_variables == 0:
|
||||
settings.USER_ADDED = True
|
||||
return
|
||||
|
||||
# not all needed variables set
|
||||
if set_variables < 3:
|
||||
settings.USER_ADDED = True
|
||||
|
||||
# if a password file is present, do not warn - will be handled later
|
||||
if add_password_file:
|
||||
return
|
||||
logger.warning(
|
||||
'Not all required settings for adding a user on startup are present:\nINVENTREE_ADMIN_USER, INVENTREE_ADMIN_EMAIL, INVENTREE_ADMIN_PASSWORD'
|
||||
)
|
||||
return
|
||||
|
||||
# good to go -> create user
|
||||
self._create_admin_user(add_user, add_email, add_password)
|
||||
|
||||
# do not try again
|
||||
settings.USER_ADDED = True
|
||||
|
||||
def _create_admin_user(self, add_user, add_email, add_password):
|
||||
user = get_user_model()
|
||||
try:
|
||||
with transaction.atomic():
|
||||
if user.objects.filter(username=add_user).exists():
|
||||
logger.info('User %s already exists - skipping creation', add_user)
|
||||
else:
|
||||
new_user = user.objects.create_superuser(
|
||||
add_user, add_email, add_password
|
||||
)
|
||||
logger.info('User %s was created!', str(new_user))
|
||||
except IntegrityError:
|
||||
logger.warning('The user "%s" could not be created', add_user)
|
||||
|
||||
def add_user_from_file(self):
|
||||
"""Add the superuser from a file."""
|
||||
# stop if checks were already created
|
||||
if hasattr(settings, 'USER_ADDED_FILE') and settings.USER_ADDED_FILE:
|
||||
return
|
||||
|
||||
# get values
|
||||
add_password_file = get_setting(
|
||||
'INVENTREE_ADMIN_PASSWORD_FILE', 'admin_password_file', None
|
||||
)
|
||||
|
||||
# no variable set -> do not try anything
|
||||
if not add_password_file:
|
||||
settings.USER_ADDED_FILE = True
|
||||
return
|
||||
|
||||
# check if file exists
|
||||
add_password_file = Path(str(add_password_file))
|
||||
if not add_password_file.exists():
|
||||
logger.warning('The file "%s" does not exist', add_password_file)
|
||||
settings.USER_ADDED_FILE = True
|
||||
return
|
||||
|
||||
# good to go -> create user
|
||||
self._create_admin_user(
|
||||
get_setting('INVENTREE_ADMIN_USER', 'admin_user', 'admin'),
|
||||
get_setting('INVENTREE_ADMIN_EMAIL', 'admin_email', ''),
|
||||
add_password_file.read_text(encoding='utf-8'),
|
||||
)
|
||||
|
||||
# do not try again
|
||||
settings.USER_ADDED_FILE = True
|
||||
|
||||
def collect_notification_methods(self):
|
||||
"""Collect all notification methods."""
|
||||
from common.notifications import storage
|
||||
|
||||
storage.collect()
|
||||
|
||||
def collect_state_transition_methods(self):
|
||||
"""Collect all state transition methods."""
|
||||
from generic.states import storage
|
||||
|
||||
storage.collect()
|
||||
@@ -0,0 +1,85 @@
|
||||
"""Custom backend implementations."""
|
||||
|
||||
import datetime
|
||||
import logging
|
||||
import time
|
||||
|
||||
from django.db.utils import IntegrityError, OperationalError, ProgrammingError
|
||||
|
||||
from maintenance_mode.backends import AbstractStateBackend
|
||||
|
||||
import common.models
|
||||
|
||||
logger = logging.getLogger('inventree')
|
||||
|
||||
|
||||
class InvenTreeMaintenanceModeBackend(AbstractStateBackend):
|
||||
"""Custom backend for managing state of maintenance mode.
|
||||
|
||||
Stores a timestamp in the database to determine when maintenance mode will elapse.
|
||||
"""
|
||||
|
||||
SETTING_KEY = '_MAINTENANCE_MODE'
|
||||
|
||||
def get_value(self) -> bool:
|
||||
"""Get the current state of the maintenance mode.
|
||||
|
||||
Returns:
|
||||
bool: True if maintenance mode is active, False otherwise.
|
||||
"""
|
||||
try:
|
||||
setting = common.models.InvenTreeSetting.objects.get(key=self.SETTING_KEY)
|
||||
value = str(setting.value).strip()
|
||||
except common.models.InvenTreeSetting.DoesNotExist:
|
||||
# Database is accessible, but setting is not available - assume False
|
||||
return False
|
||||
except (IntegrityError, OperationalError, ProgrammingError):
|
||||
# Database is inaccessible - assume we are not in maintenance mode
|
||||
logger.debug('Failed to read maintenance mode state - assuming True')
|
||||
return True
|
||||
|
||||
# Extract timestamp from string
|
||||
try:
|
||||
# If the timestamp is in the past, we are now *out* of maintenance mode
|
||||
timestamp = datetime.datetime.fromisoformat(value)
|
||||
return timestamp > datetime.datetime.now()
|
||||
except ValueError:
|
||||
# If the value is not a valid timestamp, assume maintenance mode is not active
|
||||
return False
|
||||
|
||||
def set_value(self, value: bool, retries: int = 5, minutes: int = 5):
|
||||
"""Set the state of the maintenance mode.
|
||||
|
||||
Instead of simply writing "true" or "false" to the setting,
|
||||
we write a timestamp to the setting, which is used to determine
|
||||
when maintenance mode will elapse.
|
||||
This ensures that we will always *exit* maintenance mode after a certain time period.
|
||||
"""
|
||||
logger.debug('Setting maintenance mode state: %s', value)
|
||||
|
||||
if value:
|
||||
# Save as isoformat
|
||||
timestamp = datetime.datetime.now() + datetime.timedelta(minutes=minutes)
|
||||
timestamp = timestamp.isoformat()
|
||||
else:
|
||||
# Blank timestamp means maintenance mode is not active
|
||||
timestamp = ''
|
||||
|
||||
while retries > 0:
|
||||
try:
|
||||
common.models.InvenTreeSetting.set_setting(self.SETTING_KEY, timestamp)
|
||||
|
||||
# Read the value back to confirm
|
||||
if self.get_value() == value:
|
||||
break
|
||||
except (IntegrityError, OperationalError, ProgrammingError):
|
||||
# In the database is locked, then
|
||||
logger.debug(
|
||||
'Failed to set maintenance mode state (%s retries left)', retries
|
||||
)
|
||||
time.sleep(0.1)
|
||||
|
||||
retries -= 1
|
||||
|
||||
if retries == 0:
|
||||
logger.warning('Failed to set maintenance mode state')
|
||||
@@ -0,0 +1,71 @@
|
||||
"""Pull rendered copies of the templated.
|
||||
|
||||
Only used for testing the js files! - This file is omitted from coverage.
|
||||
"""
|
||||
|
||||
import os # pragma: no cover
|
||||
import pathlib # pragma: no cover
|
||||
|
||||
from InvenTree.unit_test import InvenTreeTestCase # pragma: no cover
|
||||
|
||||
|
||||
class RenderJavascriptFiles(InvenTreeTestCase): # pragma: no cover
|
||||
"""A unit test to "render" javascript files.
|
||||
|
||||
The server renders templated javascript files,
|
||||
we need the fully-rendered files for linting and static tests.
|
||||
"""
|
||||
|
||||
def download_file(self, filename, prefix):
|
||||
"""Function to `download`(copy) a file to a temporary firectory."""
|
||||
url = os.path.join(prefix, filename)
|
||||
|
||||
response = self.client.get(url)
|
||||
|
||||
here = os.path.abspath(os.path.dirname(__file__))
|
||||
|
||||
output_dir = os.path.join(here, '..', '..', 'js_tmp')
|
||||
|
||||
output_dir = os.path.abspath(output_dir)
|
||||
|
||||
if not os.path.exists(output_dir):
|
||||
os.mkdir(output_dir)
|
||||
|
||||
output_file = os.path.join(output_dir, filename)
|
||||
|
||||
with open(output_file, 'wb') as output:
|
||||
output.write(response.content)
|
||||
|
||||
def download_files(self, subdir, prefix):
|
||||
"""Download files in directory."""
|
||||
here = os.path.abspath(os.path.dirname(__file__))
|
||||
|
||||
js_template_dir = os.path.join(here, '..', 'templates', 'js')
|
||||
|
||||
directory = os.path.join(js_template_dir, subdir)
|
||||
|
||||
directory = os.path.abspath(directory)
|
||||
|
||||
js_files = pathlib.Path(directory).rglob('*.js')
|
||||
|
||||
n = 0
|
||||
|
||||
for f in js_files:
|
||||
js = os.path.basename(f)
|
||||
|
||||
self.download_file(js, prefix)
|
||||
|
||||
n += 1
|
||||
|
||||
return n
|
||||
|
||||
def test_render_files(self):
|
||||
"""Look for all javascript files."""
|
||||
n = 0
|
||||
|
||||
print('Rendering javascript files...')
|
||||
|
||||
n += self.download_files('translated', '/js/i18n')
|
||||
n += self.download_files('dynamic', '/js/dynamic')
|
||||
|
||||
print(f'Rendered {n} javascript files.')
|
||||
@@ -0,0 +1,453 @@
|
||||
"""Helper functions for loading InvenTree configuration options."""
|
||||
|
||||
import datetime
|
||||
import json
|
||||
import logging
|
||||
import os
|
||||
import random
|
||||
import shutil
|
||||
import string
|
||||
import warnings
|
||||
from pathlib import Path
|
||||
|
||||
from django.core.files.base import ContentFile
|
||||
from django.core.files.storage import Storage
|
||||
|
||||
logger = logging.getLogger('inventree')
|
||||
CONFIG_DATA = None
|
||||
CONFIG_LOOKUPS = {}
|
||||
|
||||
|
||||
def to_list(value, delimiter=','):
|
||||
"""Take a configuration setting and make sure it is a list.
|
||||
|
||||
For example, we might have a configuration setting taken from the .config file,
|
||||
which is already a list.
|
||||
|
||||
However, the same setting may be specified via an environment variable,
|
||||
using a comma delimited string!
|
||||
"""
|
||||
if type(value) in [list, tuple]:
|
||||
return value
|
||||
|
||||
# Otherwise, force string value
|
||||
value = str(value)
|
||||
|
||||
return [x.strip() for x in value.split(delimiter)]
|
||||
|
||||
|
||||
def to_dict(value):
|
||||
"""Take a configuration setting and make sure it is a dict.
|
||||
|
||||
For example, we might have a configuration setting taken from the .config file,
|
||||
which is already an object/dict.
|
||||
|
||||
However, the same setting may be specified via an environment variable,
|
||||
using a valid JSON string!
|
||||
"""
|
||||
if value is None:
|
||||
return {}
|
||||
|
||||
if isinstance(value, dict):
|
||||
return value
|
||||
|
||||
try:
|
||||
return json.loads(value)
|
||||
except Exception as error:
|
||||
logger.exception(
|
||||
"Failed to parse value '%s' as JSON with error %s. Ensure value is a valid JSON string.",
|
||||
value,
|
||||
error,
|
||||
)
|
||||
return {}
|
||||
|
||||
|
||||
def is_true(x):
|
||||
"""Shortcut function to determine if a value "looks" like a boolean."""
|
||||
return str(x).strip().lower() in ['1', 'y', 'yes', 't', 'true', 'on']
|
||||
|
||||
|
||||
def get_base_dir() -> Path:
|
||||
"""Returns the base (top-level) InvenTree directory."""
|
||||
return Path(__file__).parent.parent.resolve()
|
||||
|
||||
|
||||
def ensure_dir(path: Path, storage=None) -> None:
|
||||
"""Ensure that a directory exists.
|
||||
|
||||
If it does not exist, create it.
|
||||
"""
|
||||
if storage and isinstance(storage, Storage):
|
||||
if not storage.exists(str(path)):
|
||||
storage.save(str(path / '.empty'), ContentFile(''))
|
||||
return
|
||||
|
||||
if not path.exists():
|
||||
path.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
|
||||
def get_config_file(create=True) -> Path:
|
||||
"""Returns the path of the InvenTree configuration file.
|
||||
|
||||
Note: It will be created it if does not already exist!
|
||||
"""
|
||||
base_dir = get_base_dir()
|
||||
|
||||
cfg_filename = os.getenv('INVENTREE_CONFIG_FILE')
|
||||
|
||||
if cfg_filename:
|
||||
cfg_filename = Path(cfg_filename.strip()).resolve()
|
||||
else:
|
||||
# Config file is *not* specified - use the default
|
||||
cfg_filename = base_dir.joinpath('config.yaml').resolve()
|
||||
|
||||
if not cfg_filename.exists() and create:
|
||||
print(
|
||||
"InvenTree configuration file 'config.yaml' not found - creating default file"
|
||||
)
|
||||
ensure_dir(cfg_filename.parent)
|
||||
|
||||
cfg_template = base_dir.joinpath('config_template.yaml')
|
||||
shutil.copyfile(cfg_template, cfg_filename)
|
||||
print(f'Created config file {cfg_filename}')
|
||||
|
||||
return cfg_filename
|
||||
|
||||
|
||||
def load_config_data(set_cache: bool = False) -> map:
|
||||
"""Load configuration data from the config file.
|
||||
|
||||
Arguments:
|
||||
set_cache(bool): If True, the configuration data will be cached for future use after load.
|
||||
"""
|
||||
global CONFIG_DATA
|
||||
|
||||
# use cache if populated
|
||||
# skip cache if cache should be set
|
||||
if CONFIG_DATA is not None and not set_cache:
|
||||
return CONFIG_DATA
|
||||
|
||||
import yaml
|
||||
|
||||
cfg_file = get_config_file()
|
||||
|
||||
with open(cfg_file, 'r') as cfg:
|
||||
data = yaml.safe_load(cfg)
|
||||
|
||||
# Set the cache if requested
|
||||
if set_cache:
|
||||
CONFIG_DATA = data
|
||||
|
||||
return data
|
||||
|
||||
|
||||
def do_typecast(value, type, var_name=None):
|
||||
"""Attempt to typecast a value.
|
||||
|
||||
Arguments:
|
||||
value: Value to typecast
|
||||
type: Function to use for typecasting the value e.g. int, float, str, list, dict
|
||||
var_name: Name that should be logged e.g. 'INVENTREE_STATIC_ROOT'. Set if logging is required.
|
||||
|
||||
Returns:
|
||||
Typecasted value or original value if typecasting failed.
|
||||
"""
|
||||
# Force 'list' of strings
|
||||
if type is list:
|
||||
value = to_list(value)
|
||||
|
||||
# Valid JSON string is required
|
||||
elif type is dict:
|
||||
value = to_dict(value)
|
||||
|
||||
elif type is not None:
|
||||
# Try to typecast the value
|
||||
try:
|
||||
val = type(value)
|
||||
return val
|
||||
except Exception as error:
|
||||
if var_name:
|
||||
logger.exception(
|
||||
"Failed to typecast '%s' with value '%s' to type '%s' with error %s",
|
||||
var_name,
|
||||
value,
|
||||
type,
|
||||
error,
|
||||
)
|
||||
return value
|
||||
|
||||
|
||||
def get_setting(env_var=None, config_key=None, default_value=None, typecast=None):
|
||||
"""Helper function for retrieving a configuration setting value.
|
||||
|
||||
- First preference is to look for the environment variable
|
||||
- Second preference is to look for the value of the settings file
|
||||
- Third preference is the default value
|
||||
|
||||
Arguments:
|
||||
env_var: Name of the environment variable e.g. 'INVENTREE_STATIC_ROOT'
|
||||
config_key: Key to lookup in the configuration file
|
||||
default_value: Value to return if first two options are not provided
|
||||
typecast: Function to use for typecasting the value e.g. int, float, str, list, dict
|
||||
"""
|
||||
|
||||
def set_metadata(source: str):
|
||||
"""Set lookup metadata for the setting."""
|
||||
key = env_var or config_key
|
||||
CONFIG_LOOKUPS[key] = {
|
||||
'env_var': env_var,
|
||||
'config_key': config_key,
|
||||
'source': source,
|
||||
'accessed': datetime.datetime.now(),
|
||||
}
|
||||
|
||||
# First, try to load from the environment variables
|
||||
if env_var is not None:
|
||||
val = os.getenv(env_var, None)
|
||||
|
||||
if val is not None:
|
||||
set_metadata('env')
|
||||
return do_typecast(val, typecast, var_name=env_var)
|
||||
|
||||
# Next, try to load from configuration file
|
||||
if config_key is not None:
|
||||
cfg_data = load_config_data()
|
||||
|
||||
result = None
|
||||
|
||||
# Hack to allow 'path traversal' in configuration file
|
||||
for key in config_key.strip().split('.'):
|
||||
if type(cfg_data) is not dict or key not in cfg_data:
|
||||
result = None
|
||||
break
|
||||
|
||||
result = cfg_data[key]
|
||||
cfg_data = cfg_data[key]
|
||||
|
||||
if result is not None:
|
||||
set_metadata('yaml')
|
||||
return do_typecast(result, typecast, var_name=env_var)
|
||||
|
||||
# Finally, return the default value
|
||||
set_metadata('default')
|
||||
return do_typecast(default_value, typecast, var_name=env_var)
|
||||
|
||||
|
||||
def get_boolean_setting(env_var=None, config_key=None, default_value=False):
|
||||
"""Helper function for retrieving a boolean configuration setting."""
|
||||
return is_true(get_setting(env_var, config_key, default_value))
|
||||
|
||||
|
||||
def get_media_dir(create=True):
|
||||
"""Return the absolute path for the 'media' directory (where uploaded files are stored)."""
|
||||
md = get_setting('INVENTREE_MEDIA_ROOT', 'media_root')
|
||||
|
||||
if not md:
|
||||
raise FileNotFoundError('INVENTREE_MEDIA_ROOT not specified')
|
||||
|
||||
md = Path(md).resolve()
|
||||
|
||||
if create:
|
||||
md.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
return md
|
||||
|
||||
|
||||
def get_static_dir(create=True):
|
||||
"""Return the absolute path for the 'static' directory (where static files are stored)."""
|
||||
sd = get_setting('INVENTREE_STATIC_ROOT', 'static_root')
|
||||
|
||||
if not sd:
|
||||
raise FileNotFoundError('INVENTREE_STATIC_ROOT not specified')
|
||||
|
||||
sd = Path(sd).resolve()
|
||||
|
||||
if create:
|
||||
sd.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
return sd
|
||||
|
||||
|
||||
def get_backup_dir(create=True):
|
||||
"""Return the absolute path for the backup directory."""
|
||||
bd = get_setting('INVENTREE_BACKUP_DIR', 'backup_dir')
|
||||
|
||||
if not bd:
|
||||
raise FileNotFoundError('INVENTREE_BACKUP_DIR not specified')
|
||||
|
||||
bd = Path(bd).resolve()
|
||||
|
||||
if create:
|
||||
bd.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
return bd
|
||||
|
||||
|
||||
def get_plugin_file():
|
||||
"""Returns the path of the InvenTree plugins specification file.
|
||||
|
||||
Note: It will be created if it does not already exist!
|
||||
"""
|
||||
# Check if the plugin.txt file (specifying required plugins) is specified
|
||||
plugin_file = get_setting('INVENTREE_PLUGIN_FILE', 'plugin_file')
|
||||
|
||||
if not plugin_file:
|
||||
# If not specified, look in the same directory as the configuration file
|
||||
config_dir = get_config_file().parent
|
||||
plugin_file = config_dir.joinpath('plugins.txt')
|
||||
else:
|
||||
# Make sure we are using a modern Path object
|
||||
plugin_file = Path(plugin_file)
|
||||
|
||||
if not plugin_file.exists():
|
||||
logger.warning(
|
||||
'Plugin configuration file does not exist - creating default file'
|
||||
)
|
||||
logger.info("Creating plugin file at '%s'", plugin_file)
|
||||
ensure_dir(plugin_file.parent)
|
||||
|
||||
# If opening the file fails (no write permission, for example), then this will throw an error
|
||||
plugin_file.write_text(
|
||||
'# InvenTree Plugins (uses PIP framework to install)\n\n'
|
||||
)
|
||||
|
||||
return plugin_file
|
||||
|
||||
|
||||
def get_plugin_dir():
|
||||
"""Returns the path of the custom plugins directory."""
|
||||
return get_setting('INVENTREE_PLUGIN_DIR', 'plugin_dir')
|
||||
|
||||
|
||||
def get_secret_key():
|
||||
"""Return the secret key value which will be used by django.
|
||||
|
||||
Following options are tested, in descending order of preference:
|
||||
|
||||
A) Check for environment variable INVENTREE_SECRET_KEY => Use raw key data
|
||||
B) Check for environment variable INVENTREE_SECRET_KEY_FILE => Load key data from file
|
||||
C) Look for default key file "secret_key.txt"
|
||||
D) Create "secret_key.txt" if it does not exist
|
||||
"""
|
||||
# Look for environment variable
|
||||
if secret_key := get_setting('INVENTREE_SECRET_KEY', 'secret_key'):
|
||||
logger.info('SECRET_KEY loaded by INVENTREE_SECRET_KEY') # pragma: no cover
|
||||
return secret_key
|
||||
|
||||
# Look for secret key file
|
||||
if secret_key_file := get_setting('INVENTREE_SECRET_KEY_FILE', 'secret_key_file'):
|
||||
secret_key_file = Path(secret_key_file).resolve()
|
||||
else:
|
||||
# Default location for secret key file
|
||||
secret_key_file = get_base_dir().joinpath('secret_key.txt').resolve()
|
||||
|
||||
if not secret_key_file.exists():
|
||||
logger.info("Generating random key file at '%s'", secret_key_file)
|
||||
ensure_dir(secret_key_file.parent)
|
||||
|
||||
# Create a random key file
|
||||
options = string.digits + string.ascii_letters + string.punctuation
|
||||
key = ''.join([random.choice(options) for _idx in range(100)])
|
||||
secret_key_file.write_text(key)
|
||||
|
||||
logger.debug("Loading SECRET_KEY from '%s'", secret_key_file)
|
||||
|
||||
key_data = secret_key_file.read_text().strip()
|
||||
|
||||
return key_data
|
||||
|
||||
|
||||
def get_custom_file(
|
||||
env_ref: str, conf_ref: str, log_ref: str, lookup_media: bool = False
|
||||
):
|
||||
"""Returns the checked path to a custom file.
|
||||
|
||||
Set lookup_media to True to also search in the media folder.
|
||||
"""
|
||||
from django.contrib.staticfiles.storage import StaticFilesStorage
|
||||
from django.core.files.storage import default_storage
|
||||
|
||||
value = get_setting(env_ref, conf_ref, None)
|
||||
|
||||
if not value:
|
||||
return None
|
||||
|
||||
static_storage = StaticFilesStorage()
|
||||
|
||||
if static_storage.exists(value):
|
||||
logger.info('Loading %s from %s directory: %s', log_ref, 'static', value)
|
||||
elif lookup_media and default_storage.exists(value):
|
||||
logger.info('Loading %s from %s directory: %s', log_ref, 'media', value)
|
||||
else:
|
||||
add_dir_str = ' or media' if lookup_media else ''
|
||||
logger.warning(
|
||||
"The %s file '%s' could not be found in the static %s directories",
|
||||
log_ref,
|
||||
value,
|
||||
add_dir_str,
|
||||
)
|
||||
value = False
|
||||
|
||||
return value
|
||||
|
||||
|
||||
def get_frontend_settings(debug=True):
|
||||
"""Return a dictionary of settings for the frontend interface.
|
||||
|
||||
Note that the new config settings use the 'FRONTEND' key,
|
||||
whereas the legacy key was 'PUI' (platform UI) which is now deprecated
|
||||
"""
|
||||
# Legacy settings
|
||||
pui_settings = get_setting(
|
||||
'INVENTREE_PUI_SETTINGS', 'pui_settings', {}, typecast=dict
|
||||
)
|
||||
|
||||
if len(pui_settings) > 0:
|
||||
warnings.warn(
|
||||
"The 'INVENTREE_PUI_SETTINGS' key is deprecated. Please use 'INVENTREE_FRONTEND_SETTINGS' instead",
|
||||
DeprecationWarning,
|
||||
stacklevel=2,
|
||||
)
|
||||
|
||||
# New settings
|
||||
frontend_settings = get_setting(
|
||||
'INVENTREE_FRONTEND_SETTINGS', 'frontend_settings', {}, typecast=dict
|
||||
)
|
||||
|
||||
# Merge settings
|
||||
settings = {**pui_settings, **frontend_settings}
|
||||
|
||||
# Set the base URL
|
||||
if 'base_url' not in settings:
|
||||
base_url = get_setting('INVENTREE_PUI_URL_BASE', 'pui_url_base', '')
|
||||
|
||||
if base_url:
|
||||
warnings.warn(
|
||||
"The 'INVENTREE_PUI_URL_BASE' key is deprecated. Please use 'INVENTREE_FRONTEND_URL_BASE' instead",
|
||||
DeprecationWarning,
|
||||
stacklevel=2,
|
||||
)
|
||||
else:
|
||||
base_url = get_setting(
|
||||
'INVENTREE_FRONTEND_URL_BASE', 'frontend_url_base', 'platform'
|
||||
)
|
||||
|
||||
settings['base_url'] = base_url
|
||||
|
||||
# Set the server list
|
||||
settings['server_list'] = settings.get('server_list', [])
|
||||
|
||||
# Set the debug flag
|
||||
settings['debug'] = debug
|
||||
|
||||
if 'environment' not in settings:
|
||||
settings['environment'] = 'development' if debug else 'production'
|
||||
|
||||
if debug and 'show_server_selector' not in settings:
|
||||
# In debug mode, show server selector by default
|
||||
settings['show_server_selector'] = True
|
||||
elif len(settings['server_list']) == 0:
|
||||
# If no servers are specified, show server selector
|
||||
settings['show_server_selector'] = True
|
||||
|
||||
return settings
|
||||
@@ -0,0 +1,83 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
"""Provides extra global data to all templates."""
|
||||
|
||||
import InvenTree.email
|
||||
import InvenTree.status
|
||||
from generic.states import StatusCode
|
||||
from InvenTree.helpers import inheritors
|
||||
from users.models import RuleSet, check_user_role
|
||||
|
||||
|
||||
def health_status(request):
|
||||
"""Provide system health status information to the global context.
|
||||
|
||||
- Not required for AJAX requests
|
||||
- Do not provide if it is already provided to the context
|
||||
"""
|
||||
if request.path.endswith('.js'):
|
||||
# Do not provide to script requests
|
||||
return {} # pragma: no cover
|
||||
|
||||
if hasattr(request, '_inventree_health_status'):
|
||||
# Do not duplicate efforts
|
||||
return {}
|
||||
|
||||
request._inventree_health_status = True
|
||||
|
||||
status = {
|
||||
'django_q_running': InvenTree.status.is_worker_running(),
|
||||
'email_configured': InvenTree.email.is_email_configured(),
|
||||
}
|
||||
|
||||
# The following keys are required to denote system health
|
||||
health_keys = ['django_q_running']
|
||||
|
||||
all_healthy = True
|
||||
|
||||
for k in health_keys:
|
||||
if status[k] is not True:
|
||||
all_healthy = False
|
||||
|
||||
status['system_healthy'] = all_healthy
|
||||
|
||||
status['up_to_date'] = InvenTree.version.isInvenTreeUpToDate()
|
||||
|
||||
return status
|
||||
|
||||
|
||||
def status_codes(request):
|
||||
"""Provide status code enumerations."""
|
||||
if hasattr(request, '_inventree_status_codes'):
|
||||
# Do not duplicate efforts
|
||||
return {}
|
||||
|
||||
request._inventree_status_codes = True
|
||||
return {cls.__name__: cls.template_context() for cls in inheritors(StatusCode)}
|
||||
|
||||
|
||||
def user_roles(request):
|
||||
"""Return a map of the current roles assigned to the user.
|
||||
|
||||
Roles are denoted by their simple names, and then the permission type.
|
||||
|
||||
Permissions can be access as follows:
|
||||
|
||||
- roles.part.view
|
||||
- roles.build.delete
|
||||
|
||||
Each value will return a boolean True / False
|
||||
"""
|
||||
user = request.user
|
||||
|
||||
roles = {}
|
||||
|
||||
for role in RuleSet.get_ruleset_models().keys():
|
||||
permissions = {}
|
||||
|
||||
for perm in ['view', 'add', 'change', 'delete']:
|
||||
permissions[perm] = user.is_superuser or check_user_role(user, role, perm)
|
||||
|
||||
roles[role] = permissions
|
||||
|
||||
return {'roles': roles}
|
||||
@@ -0,0 +1,252 @@
|
||||
"""Helper functions for converting between units."""
|
||||
|
||||
import logging
|
||||
import re
|
||||
|
||||
from django.core.exceptions import ValidationError
|
||||
from django.utils.translation import gettext_lazy as _
|
||||
|
||||
import pint
|
||||
|
||||
_unit_registry = None
|
||||
|
||||
logger = logging.getLogger('inventree')
|
||||
|
||||
|
||||
def get_unit_registry():
|
||||
"""Return a custom instance of the Pint UnitRegistry."""
|
||||
global _unit_registry
|
||||
|
||||
# Cache the unit registry for speedier access
|
||||
if _unit_registry is None:
|
||||
return reload_unit_registry()
|
||||
return _unit_registry
|
||||
|
||||
|
||||
def reload_unit_registry():
|
||||
"""Reload the unit registry from the database.
|
||||
|
||||
This function is called at startup, and whenever the database is updated.
|
||||
"""
|
||||
import time
|
||||
|
||||
t_start = time.time()
|
||||
|
||||
global _unit_registry
|
||||
|
||||
_unit_registry = None
|
||||
|
||||
reg = pint.UnitRegistry(autoconvert_offset_to_baseunit=True)
|
||||
|
||||
# Aliases for temperature units
|
||||
reg.define('@alias degC = Celsius')
|
||||
reg.define('@alias degF = Fahrenheit')
|
||||
reg.define('@alias degK = Kelvin')
|
||||
|
||||
# Define some "standard" additional units
|
||||
reg.define('piece = 1')
|
||||
reg.define('each = 1 = ea')
|
||||
reg.define('dozen = 12 = dz')
|
||||
reg.define('hundred = 100')
|
||||
reg.define('thousand = 1000')
|
||||
|
||||
# Allow for custom units to be defined in the database
|
||||
try:
|
||||
from common.models import CustomUnit
|
||||
|
||||
for cu in CustomUnit.objects.all():
|
||||
try:
|
||||
reg.define(cu.fmt_string())
|
||||
except Exception as e:
|
||||
logger.exception(
|
||||
'Failed to load custom unit: %s - %s', cu.fmt_string(), e
|
||||
)
|
||||
|
||||
# Once custom units are loaded, save registry
|
||||
_unit_registry = reg
|
||||
|
||||
except Exception:
|
||||
# Database is not ready, or CustomUnit model is not available
|
||||
pass
|
||||
|
||||
dt = time.time() - t_start
|
||||
logger.debug('Loaded unit registry in %.3f s', dt)
|
||||
|
||||
return reg
|
||||
|
||||
|
||||
def from_engineering_notation(value):
|
||||
"""Convert a provided value to 'natural' representation from 'engineering' notation.
|
||||
|
||||
Ref: https://en.wikipedia.org/wiki/Engineering_notation
|
||||
|
||||
In "engineering notation", the unit (or SI prefix) is often combined with the value,
|
||||
and replaces the decimal point.
|
||||
|
||||
Examples:
|
||||
- 1K2 -> 1.2K
|
||||
- 3n05 -> 3.05n
|
||||
- 8R6 -> 8.6R
|
||||
|
||||
And, we should also take into account any provided trailing strings:
|
||||
|
||||
- 1K2 ohm -> 1.2K ohm
|
||||
- 10n005F -> 10.005nF
|
||||
"""
|
||||
value = str(value).strip()
|
||||
|
||||
pattern = '(\d+)([a-zA-Z]+)(\d+)(.*)'
|
||||
|
||||
if match := re.match(pattern, value):
|
||||
left, prefix, right, suffix = match.groups()
|
||||
return f'{left}.{right}{prefix}{suffix}'
|
||||
|
||||
return value
|
||||
|
||||
|
||||
def convert_value(value, unit):
|
||||
"""Attempt to convert a value to a specified unit.
|
||||
|
||||
Arguments:
|
||||
value: The value to convert
|
||||
unit: The target unit to convert to
|
||||
|
||||
Returns:
|
||||
The converted value (ideally a pint.Quantity value)
|
||||
|
||||
Raises:
|
||||
Exception if the value cannot be converted to the specified unit
|
||||
"""
|
||||
ureg = get_unit_registry()
|
||||
|
||||
# Convert the provided value to a pint.Quantity object
|
||||
value = ureg.Quantity(value)
|
||||
|
||||
# Convert to the specified unit
|
||||
if unit:
|
||||
if is_dimensionless(value):
|
||||
magnitude = value.to_base_units().magnitude
|
||||
value = ureg.Quantity(magnitude, unit)
|
||||
else:
|
||||
value = value.to(unit)
|
||||
|
||||
return value
|
||||
|
||||
|
||||
def convert_physical_value(value: str, unit: str = None, strip_units=True):
|
||||
"""Validate that the provided value is a valid physical quantity.
|
||||
|
||||
Arguments:
|
||||
value: Value to validate (str)
|
||||
unit: Optional unit to convert to, and validate against
|
||||
strip_units: If True, strip units from the returned value, and return only the dimension
|
||||
|
||||
Raises:
|
||||
ValidationError: If the value is invalid or cannot be converted to the specified unit
|
||||
|
||||
Returns:
|
||||
The converted quantity, in the specified units
|
||||
"""
|
||||
ureg = get_unit_registry()
|
||||
|
||||
# Check that the provided unit is available in the unit registry
|
||||
if unit:
|
||||
try:
|
||||
valid = unit in ureg
|
||||
except Exception as exc:
|
||||
valid = False
|
||||
|
||||
if not valid:
|
||||
raise ValidationError(_(f'Invalid unit provided ({unit})'))
|
||||
|
||||
original = str(value).strip()
|
||||
|
||||
# Ensure that the value is a string
|
||||
value = str(value).strip() if value else ''
|
||||
unit = str(unit).strip() if unit else ''
|
||||
|
||||
# Handle imperial length measurements
|
||||
if value.count("'") == 1 and value.endswith("'"):
|
||||
value = value.replace("'", ' feet')
|
||||
|
||||
if value.count('"') == 1 and value.endswith('"'):
|
||||
value = value.replace('"', ' inches')
|
||||
|
||||
# Error on blank values
|
||||
if not value:
|
||||
raise ValidationError(_('No value provided'))
|
||||
|
||||
# Construct a list of values to "attempt" to convert
|
||||
attempts = [value]
|
||||
|
||||
# Attempt to convert from engineering notation
|
||||
eng = from_engineering_notation(value)
|
||||
attempts.append(eng)
|
||||
|
||||
# Append the unit, if provided
|
||||
# These are the "final" attempts to convert the value, and *must* appear after previous attempts
|
||||
if unit:
|
||||
attempts.append(f'{value}{unit}')
|
||||
attempts.append(f'{eng}{unit}')
|
||||
|
||||
value = None
|
||||
|
||||
# Run through the available "attempts", take the first successful result
|
||||
for attempt in attempts:
|
||||
try:
|
||||
value = convert_value(attempt, unit)
|
||||
break
|
||||
except Exception as exc:
|
||||
value = None
|
||||
|
||||
if value is None:
|
||||
if unit:
|
||||
raise ValidationError(_(f'Could not convert {original} to {unit}'))
|
||||
else:
|
||||
raise ValidationError(_('Invalid quantity supplied'))
|
||||
|
||||
# Calculate the "magnitude" of the value, as a float
|
||||
# If the value is specified strangely (e.g. as a fraction or a dozen), this can cause issues
|
||||
# So, we ensure that it is converted to a floating point value
|
||||
# If we wish to return a "raw" value, some trickery is required
|
||||
try:
|
||||
if unit:
|
||||
magnitude = ureg.Quantity(value.to(ureg.Unit(unit))).magnitude
|
||||
else:
|
||||
magnitude = ureg.Quantity(value.to_base_units()).magnitude
|
||||
|
||||
magnitude = float(ureg.Quantity(magnitude).to_base_units().magnitude)
|
||||
except Exception as exc:
|
||||
raise ValidationError(_(f'Invalid quantity supplied ({exc})'))
|
||||
|
||||
if strip_units:
|
||||
return magnitude
|
||||
elif unit or value.units:
|
||||
return ureg.Quantity(magnitude, unit or value.units)
|
||||
return ureg.Quantity(magnitude)
|
||||
|
||||
|
||||
def is_dimensionless(value):
|
||||
"""Determine if the provided value is 'dimensionless'.
|
||||
|
||||
A dimensionless value might look like:
|
||||
|
||||
0.1
|
||||
1/2 dozen
|
||||
three thousand
|
||||
1.2 dozen
|
||||
(etc)
|
||||
"""
|
||||
ureg = get_unit_registry()
|
||||
|
||||
# Ensure the provided value is in the right format
|
||||
value = ureg.Quantity(value)
|
||||
|
||||
if value.units == ureg.dimensionless:
|
||||
return True
|
||||
|
||||
if value.to_base_units().units == ureg.dimensionless:
|
||||
return True
|
||||
|
||||
# At this point, the value is not dimensionless
|
||||
return False
|
||||
@@ -0,0 +1,89 @@
|
||||
"""Code for managing email functionality in InvenTree."""
|
||||
|
||||
import logging
|
||||
|
||||
from django.conf import settings
|
||||
from django.core import mail as django_mail
|
||||
|
||||
import InvenTree.ready
|
||||
import InvenTree.tasks
|
||||
|
||||
logger = logging.getLogger('inventree')
|
||||
|
||||
|
||||
def is_email_configured():
|
||||
"""Check if email backend is configured.
|
||||
|
||||
NOTE: This does not check if the configuration is valid!
|
||||
"""
|
||||
configured = True
|
||||
testing = settings.TESTING
|
||||
|
||||
if InvenTree.ready.isInTestMode():
|
||||
return False
|
||||
|
||||
if InvenTree.ready.isImportingData():
|
||||
return False
|
||||
|
||||
if not settings.EMAIL_HOST:
|
||||
configured = False
|
||||
|
||||
# Display warning unless in test mode
|
||||
if not testing: # pragma: no cover
|
||||
logger.debug('EMAIL_HOST is not configured')
|
||||
|
||||
# Display warning unless in test mode
|
||||
if not settings.EMAIL_HOST_USER and not testing: # pragma: no cover
|
||||
logger.debug('EMAIL_HOST_USER is not configured')
|
||||
|
||||
# Display warning unless in test mode
|
||||
if not settings.EMAIL_HOST_PASSWORD and testing: # pragma: no cover
|
||||
logger.debug('EMAIL_HOST_PASSWORD is not configured')
|
||||
|
||||
# Email sender must be configured
|
||||
if not settings.DEFAULT_FROM_EMAIL:
|
||||
configured = False
|
||||
|
||||
if not testing: # pragma: no cover
|
||||
logger.debug('DEFAULT_FROM_EMAIL is not configured')
|
||||
|
||||
return configured
|
||||
|
||||
|
||||
def send_email(subject, body, recipients, from_email=None, html_message=None):
|
||||
"""Send an email with the specified subject and body, to the specified recipients list."""
|
||||
if isinstance(recipients, str):
|
||||
recipients = [recipients]
|
||||
|
||||
import InvenTree.ready
|
||||
import InvenTree.status
|
||||
|
||||
if InvenTree.ready.isImportingData():
|
||||
# If we are importing data, don't send emails
|
||||
return
|
||||
|
||||
if not InvenTree.email.is_email_configured() and not settings.TESTING:
|
||||
# Email is not configured / enabled
|
||||
return
|
||||
|
||||
# If a *from_email* is not specified, ensure that the default is set
|
||||
if not from_email:
|
||||
from_email = settings.DEFAULT_FROM_EMAIL
|
||||
|
||||
# If we still don't have a valid from_email, then we can't send emails
|
||||
if not from_email:
|
||||
if settings.TESTING:
|
||||
from_email = 'from@test.com'
|
||||
else:
|
||||
logger.error('send_email failed: DEFAULT_FROM_EMAIL not specified')
|
||||
return
|
||||
|
||||
InvenTree.tasks.offload_task(
|
||||
django_mail.send_mail,
|
||||
subject,
|
||||
body,
|
||||
from_email,
|
||||
recipients,
|
||||
fail_silently=False,
|
||||
html_message=html_message,
|
||||
)
|
||||
@@ -0,0 +1,129 @@
|
||||
"""Custom exception handling for the DRF API."""
|
||||
|
||||
# -*- coding: utf-8 -*-
|
||||
from __future__ import unicode_literals
|
||||
|
||||
import logging
|
||||
import sys
|
||||
import traceback
|
||||
|
||||
from django.conf import settings
|
||||
from django.core.exceptions import ValidationError as DjangoValidationError
|
||||
from django.db.utils import IntegrityError, OperationalError
|
||||
from django.utils.translation import gettext_lazy as _
|
||||
|
||||
import rest_framework.views as drfviews
|
||||
from error_report.models import Error
|
||||
from rest_framework import serializers
|
||||
from rest_framework.exceptions import ValidationError as DRFValidationError
|
||||
from rest_framework.response import Response
|
||||
|
||||
import InvenTree.sentry
|
||||
|
||||
logger = logging.getLogger('inventree')
|
||||
|
||||
|
||||
def log_error(path, error_name=None, error_info=None, error_data=None):
|
||||
"""Log an error to the database.
|
||||
|
||||
- Uses python exception handling to extract error details
|
||||
|
||||
Arguments:
|
||||
path: The 'path' (most likely a URL) associated with this error (optional)
|
||||
|
||||
kwargs:
|
||||
error_name: The name of the error (optional, overrides 'kind')
|
||||
error_info: The error information (optional, overrides 'info')
|
||||
error_data: The error data (optional, overrides 'data')
|
||||
"""
|
||||
kind, info, data = sys.exc_info()
|
||||
|
||||
# Check if the error is on the ignore list
|
||||
if kind in settings.IGNORED_ERRORS:
|
||||
return
|
||||
|
||||
if error_name:
|
||||
kind = error_name
|
||||
else:
|
||||
kind = getattr(kind, '__name__', 'Unknown Error')
|
||||
|
||||
if error_info:
|
||||
info = error_info
|
||||
|
||||
if error_data:
|
||||
data = error_data
|
||||
else:
|
||||
try:
|
||||
data = '\n'.join(traceback.format_exception(kind, info, data))
|
||||
except AttributeError:
|
||||
data = 'No traceback information available'
|
||||
|
||||
# Log error to stderr
|
||||
logger.error(info)
|
||||
|
||||
# Ensure the error information does not exceed field size limits
|
||||
path = path[:200]
|
||||
kind = kind[:128]
|
||||
|
||||
try:
|
||||
Error.objects.create(kind=kind, info=info or '', data=data or '', path=path)
|
||||
except Exception:
|
||||
# Not much we can do if logging the error throws a db exception
|
||||
logger.exception('Failed to log exception to database')
|
||||
|
||||
|
||||
def exception_handler(exc, context):
|
||||
"""Custom exception handler for DRF framework.
|
||||
|
||||
Ref: https://www.django-rest-framework.org/api-guide/exceptions/#custom-exception-handling
|
||||
Catches any errors not natively handled by DRF, and re-throws as an error DRF can handle.
|
||||
|
||||
If sentry error reporting is enabled, we will also provide the original exception to sentry.io
|
||||
"""
|
||||
response = None
|
||||
|
||||
# Pass exception to sentry.io handler
|
||||
try:
|
||||
InvenTree.sentry.report_exception(exc)
|
||||
except Exception:
|
||||
# If sentry.io fails, we don't want to crash the server!
|
||||
pass
|
||||
|
||||
# Catch any django validation error, and re-throw a DRF validation error
|
||||
if isinstance(exc, DjangoValidationError):
|
||||
exc = DRFValidationError(detail=serializers.as_serializer_error(exc))
|
||||
|
||||
# Default to the built-in DRF exception handler
|
||||
response = drfviews.exception_handler(exc, context)
|
||||
|
||||
if response is None:
|
||||
# DRF handler did not provide a default response for this exception
|
||||
|
||||
if settings.TESTING:
|
||||
# If in TESTING mode, re-throw the exception for traceback
|
||||
raise exc
|
||||
elif settings.DEBUG:
|
||||
# If in DEBUG mode, provide error information in the response
|
||||
error_detail = str(exc)
|
||||
else:
|
||||
error_detail = _('Error details can be found in the admin panel')
|
||||
|
||||
response_data = {
|
||||
'error': type(exc).__name__,
|
||||
'error_class': str(type(exc)),
|
||||
'detail': error_detail,
|
||||
'path': context['request'].path,
|
||||
'status_code': 500,
|
||||
}
|
||||
|
||||
response = Response(response_data, status=500)
|
||||
|
||||
log_error(context['request'].path)
|
||||
|
||||
if response is not None:
|
||||
# Convert errors returned under the label '__all__' to 'non_field_errors'
|
||||
if '__all__' in response.data:
|
||||
response.data['non_field_errors'] = response.data['__all__']
|
||||
del response.data['__all__']
|
||||
|
||||
return response
|
||||
@@ -0,0 +1,116 @@
|
||||
"""Custom exchange backend which hooks into the InvenTree plugin system to fetch exchange rates from an external API."""
|
||||
|
||||
import logging
|
||||
|
||||
from django.db.transaction import atomic
|
||||
|
||||
from djmoney.contrib.exchange.backends.base import SimpleExchangeBackend
|
||||
from djmoney.contrib.exchange.models import ExchangeBackend, Rate
|
||||
|
||||
from common.settings import currency_code_default, currency_codes
|
||||
|
||||
logger = logging.getLogger('inventree')
|
||||
|
||||
|
||||
class InvenTreeExchange(SimpleExchangeBackend):
|
||||
"""Backend for automatically updating currency exchange rates.
|
||||
|
||||
Uses the plugin system to actually fetch the rates from an external API.
|
||||
"""
|
||||
|
||||
name = 'InvenTreeExchange'
|
||||
|
||||
def get_rates(self, **kwargs) -> dict:
|
||||
"""Set the requested currency codes and get rates."""
|
||||
from common.models import InvenTreeSetting
|
||||
from plugin import registry
|
||||
|
||||
base_currency = kwargs.get('base_currency', currency_code_default())
|
||||
symbols = kwargs.get('symbols', currency_codes())
|
||||
|
||||
# Find the selected exchange rate plugin
|
||||
slug = InvenTreeSetting.get_setting('CURRENCY_UPDATE_PLUGIN', '', create=False)
|
||||
|
||||
if slug:
|
||||
plugin = registry.get_plugin(slug)
|
||||
else:
|
||||
plugin = None
|
||||
|
||||
if not plugin:
|
||||
# Find the first active currency exchange plugin
|
||||
plugins = registry.with_mixin('currencyexchange', active=True)
|
||||
|
||||
if len(plugins) > 0:
|
||||
plugin = plugins[0]
|
||||
|
||||
if not plugin:
|
||||
logger.warning(
|
||||
'No active currency exchange plugins found - skipping update'
|
||||
)
|
||||
return {}
|
||||
|
||||
logger.info("Running exchange rate update using plugin '%s'", plugin.name)
|
||||
|
||||
# Plugin found - run the update task
|
||||
try:
|
||||
rates = plugin.update_exchange_rates(base_currency, symbols)
|
||||
except Exception as exc:
|
||||
logger.exception('Exchange rate update failed: %s', exc)
|
||||
return {}
|
||||
|
||||
if not rates:
|
||||
logger.warning(
|
||||
'Exchange rate update failed - no data returned from plugin %s', slug
|
||||
)
|
||||
return {}
|
||||
|
||||
# Update exchange rates based on returned data
|
||||
if type(rates) is not dict:
|
||||
logger.warning(
|
||||
'Invalid exchange rate data returned from plugin %s (type %s)',
|
||||
slug,
|
||||
type(rates),
|
||||
)
|
||||
return {}
|
||||
|
||||
# Ensure base currency is provided
|
||||
rates[base_currency] = 1.00
|
||||
|
||||
return rates
|
||||
|
||||
@atomic
|
||||
def update_rates(self, base_currency=None, **kwargs):
|
||||
"""Call to update all exchange rates."""
|
||||
backend, _ = ExchangeBackend.objects.update_or_create(
|
||||
name=self.name, defaults={'base_currency': base_currency}
|
||||
)
|
||||
|
||||
if base_currency is None:
|
||||
base_currency = currency_code_default()
|
||||
|
||||
symbols = currency_codes()
|
||||
|
||||
logger.info(
|
||||
'Updating exchange rates for %s (%s currencies)',
|
||||
base_currency,
|
||||
len(symbols),
|
||||
)
|
||||
|
||||
# Fetch new rates from the backend
|
||||
# If the backend fails, the existing rates will not be updated
|
||||
rates = self.get_rates(base_currency=base_currency, symbols=symbols)
|
||||
|
||||
if rates:
|
||||
# Clear out existing rates
|
||||
backend.clear_rates()
|
||||
|
||||
Rate.objects.bulk_create([
|
||||
Rate(currency=currency, value=amount, backend=backend)
|
||||
for currency, amount in rates.items()
|
||||
])
|
||||
else:
|
||||
logger.info(
|
||||
'No exchange rates returned from backend - currencies not updated'
|
||||
)
|
||||
|
||||
logger.info('Updated exchange rates for %s', base_currency)
|
||||
@@ -0,0 +1,213 @@
|
||||
"""Custom fields used in InvenTree."""
|
||||
|
||||
import sys
|
||||
from decimal import Decimal
|
||||
|
||||
from django import forms
|
||||
from django.db import models
|
||||
from django.utils.translation import gettext_lazy as _
|
||||
|
||||
from djmoney.forms.fields import MoneyField
|
||||
from djmoney.models.fields import MoneyField as ModelMoneyField
|
||||
from djmoney.models.validators import MinMoneyValidator
|
||||
from rest_framework.fields import URLField as RestURLField
|
||||
from rest_framework.fields import empty
|
||||
|
||||
import InvenTree.helpers
|
||||
|
||||
from .validators import AllowedURLValidator, allowable_url_schemes
|
||||
|
||||
|
||||
class InvenTreeRestURLField(RestURLField):
|
||||
"""Custom field for DRF with custom scheme validators."""
|
||||
|
||||
def __init__(self, **kwargs):
|
||||
"""Update schemes."""
|
||||
# Enforce 'max length' parameter in form validation
|
||||
if 'max_length' not in kwargs:
|
||||
kwargs['max_length'] = 200
|
||||
|
||||
super().__init__(**kwargs)
|
||||
self.validators[-1].schemes = allowable_url_schemes()
|
||||
|
||||
def run_validation(self, data=empty):
|
||||
"""Override default validation behaviour for this field type."""
|
||||
import common.models
|
||||
|
||||
strict_urls = common.models.InvenTreeSetting.get_setting(
|
||||
'INVENTREE_STRICT_URLS', True, cache=False
|
||||
)
|
||||
|
||||
if not strict_urls and data is not empty:
|
||||
if '://' not in data:
|
||||
# Validate as if there were a schema provided
|
||||
data = 'http://' + data
|
||||
|
||||
return super().run_validation(data=data)
|
||||
|
||||
|
||||
class InvenTreeURLField(models.URLField):
|
||||
"""Custom URL field which has custom scheme validators."""
|
||||
|
||||
default_validators = [AllowedURLValidator()]
|
||||
|
||||
def __init__(self, **kwargs):
|
||||
"""Initialization method for InvenTreeURLField."""
|
||||
# Max length for InvenTreeURLField is set to 200
|
||||
kwargs['max_length'] = 200
|
||||
super().__init__(**kwargs)
|
||||
|
||||
|
||||
def money_kwargs(**kwargs):
|
||||
"""Returns the database settings for MoneyFields."""
|
||||
from common.settings import currency_code_default, currency_code_mappings
|
||||
|
||||
# Default values (if not specified)
|
||||
if 'max_digits' not in kwargs:
|
||||
kwargs['max_digits'] = 19
|
||||
|
||||
if 'decimal_places' not in kwargs:
|
||||
kwargs['decimal_places'] = 6
|
||||
|
||||
if 'currency_choices' not in kwargs:
|
||||
kwargs['currency_choices'] = currency_code_mappings()
|
||||
|
||||
if 'default_currency' not in kwargs:
|
||||
kwargs['default_currency'] = currency_code_default()
|
||||
|
||||
return kwargs
|
||||
|
||||
|
||||
class InvenTreeModelMoneyField(ModelMoneyField):
|
||||
"""Custom MoneyField for clean migrations while using dynamic currency settings."""
|
||||
|
||||
def __init__(self, **kwargs):
|
||||
"""Overwrite default values and validators."""
|
||||
# detect if creating migration
|
||||
if 'migrate' in sys.argv or 'makemigrations' in sys.argv:
|
||||
# remove currency information for a clean migration
|
||||
kwargs['default_currency'] = ''
|
||||
kwargs['currency_choices'] = []
|
||||
|
||||
kwargs = money_kwargs(**kwargs)
|
||||
|
||||
# Set a minimum value validator
|
||||
validators = kwargs.get('validators', [])
|
||||
|
||||
allow_negative = kwargs.pop('allow_negative', False)
|
||||
|
||||
# If no validators are provided, add some "standard" ones
|
||||
if len(validators) == 0:
|
||||
if not allow_negative:
|
||||
validators.append(MinMoneyValidator(0))
|
||||
|
||||
kwargs['validators'] = validators
|
||||
|
||||
super().__init__(**kwargs)
|
||||
|
||||
def formfield(self, **kwargs):
|
||||
"""Override form class to use own function."""
|
||||
kwargs['form_class'] = InvenTreeMoneyField
|
||||
return super().formfield(**kwargs)
|
||||
|
||||
def to_python(self, value):
|
||||
"""Convert value to python type."""
|
||||
value = super().to_python(value)
|
||||
return round_decimal(value, self.decimal_places)
|
||||
|
||||
def prepare_value(self, value):
|
||||
"""Override the 'prepare_value' method, to remove trailing zeros when displaying.
|
||||
|
||||
Why? It looks nice!
|
||||
"""
|
||||
return round_decimal(value, self.decimal_places, normalize=True)
|
||||
|
||||
|
||||
class InvenTreeMoneyField(MoneyField):
|
||||
"""Custom MoneyField for clean migrations while using dynamic currency settings."""
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
"""Override initial values with the real info from database."""
|
||||
kwargs = money_kwargs(**kwargs)
|
||||
super().__init__(*args, **kwargs)
|
||||
|
||||
|
||||
class DatePickerFormField(forms.DateField):
|
||||
"""Custom date-picker field."""
|
||||
|
||||
def __init__(self, **kwargs):
|
||||
"""Set up custom values."""
|
||||
help_text = kwargs.get('help_text', _('Enter date'))
|
||||
label = kwargs.get('label', None)
|
||||
required = kwargs.get('required', False)
|
||||
initial = kwargs.get('initial', None)
|
||||
|
||||
widget = forms.DateInput(attrs={'type': 'date'})
|
||||
|
||||
forms.DateField.__init__(
|
||||
self,
|
||||
required=required,
|
||||
initial=initial,
|
||||
help_text=help_text,
|
||||
widget=widget,
|
||||
label=label,
|
||||
)
|
||||
|
||||
|
||||
def round_decimal(value, places, normalize=False):
|
||||
"""Round value to the specified number of places."""
|
||||
if type(value) in [Decimal, float]:
|
||||
value = round(value, places)
|
||||
|
||||
if normalize:
|
||||
# Remove any trailing zeroes
|
||||
value = InvenTree.helpers.normalize(value)
|
||||
|
||||
return value
|
||||
|
||||
|
||||
class RoundingDecimalFormField(forms.DecimalField):
|
||||
"""Custom FormField that automatically rounds inputs."""
|
||||
|
||||
def to_python(self, value):
|
||||
"""Convert value to python type."""
|
||||
value = super().to_python(value)
|
||||
return round_decimal(value, self.decimal_places)
|
||||
|
||||
def prepare_value(self, value):
|
||||
"""Override the 'prepare_value' method, to remove trailing zeros when displaying.
|
||||
|
||||
Why? It looks nice!
|
||||
"""
|
||||
return round_decimal(value, self.decimal_places, normalize=True)
|
||||
|
||||
|
||||
class RoundingDecimalField(models.DecimalField):
|
||||
"""Custom Field that automatically rounds inputs."""
|
||||
|
||||
def to_python(self, value):
|
||||
"""Convert value to python type."""
|
||||
value = super().to_python(value)
|
||||
return round_decimal(value, self.decimal_places)
|
||||
|
||||
def formfield(self, **kwargs):
|
||||
"""Return a Field instance for this field."""
|
||||
kwargs['form_class'] = RoundingDecimalFormField
|
||||
|
||||
return super().formfield(**kwargs)
|
||||
|
||||
|
||||
class InvenTreeNotesField(models.TextField):
|
||||
"""Custom implementation of a 'notes' field."""
|
||||
|
||||
# Maximum character limit for the various 'notes' fields
|
||||
NOTES_MAX_LENGTH = 50000
|
||||
|
||||
def __init__(self, **kwargs):
|
||||
"""Configure default initial values for this field."""
|
||||
kwargs['max_length'] = self.NOTES_MAX_LENGTH
|
||||
kwargs['verbose_name'] = _('Notes')
|
||||
kwargs['blank'] = True
|
||||
kwargs['null'] = True
|
||||
|
||||
super().__init__(**kwargs)
|
||||
@@ -0,0 +1,8 @@
|
||||
"""Helpers for file handling in InvenTree."""
|
||||
|
||||
from pathlib import Path
|
||||
|
||||
from django.conf import settings
|
||||
|
||||
TEMPLATES_DIR = Path(__file__).parent.parent
|
||||
MEDIA_STORAGE_DIR = settings.MEDIA_ROOT
|
||||
@@ -0,0 +1,168 @@
|
||||
"""General filters for InvenTree."""
|
||||
|
||||
from datetime import datetime
|
||||
|
||||
from django.conf import settings
|
||||
from django.utils import timezone
|
||||
from django.utils.timezone import make_aware
|
||||
|
||||
from django_filters import rest_framework as rest_filters
|
||||
from rest_framework import filters
|
||||
|
||||
import InvenTree.helpers
|
||||
|
||||
|
||||
class InvenTreeDateFilter(rest_filters.DateFilter):
|
||||
"""Custom DateFilter class which handles timezones correctly."""
|
||||
|
||||
def filter(self, qs, value):
|
||||
"""Override the filter method to handle timezones correctly."""
|
||||
if settings.USE_TZ and value is not None:
|
||||
tz = timezone.get_current_timezone()
|
||||
value = datetime(value.year, value.month, value.day)
|
||||
value = make_aware(value, tz, True)
|
||||
|
||||
return super().filter(qs, value)
|
||||
|
||||
|
||||
class InvenTreeSearchFilter(filters.SearchFilter):
|
||||
"""Custom search filter which allows adjusting of search terms dynamically."""
|
||||
|
||||
def get_search_fields(self, view, request):
|
||||
"""Return a set of search fields for the request, adjusted based on request params.
|
||||
|
||||
The following query params are available to 'augment' the search (in decreasing order of priority)
|
||||
- search_regex: If True, search is performed on 'regex' comparison
|
||||
"""
|
||||
regex = InvenTree.helpers.str2bool(
|
||||
request.query_params.get('search_regex', False)
|
||||
)
|
||||
|
||||
search_fields = super().get_search_fields(view, request)
|
||||
|
||||
fields = []
|
||||
|
||||
if search_fields:
|
||||
for field in search_fields:
|
||||
if regex:
|
||||
field = '$' + field
|
||||
|
||||
fields.append(field)
|
||||
|
||||
return fields
|
||||
|
||||
def get_search_terms(self, request):
|
||||
"""Return the search terms for this search request.
|
||||
|
||||
Depending on the request parameters, we may "augment" these somewhat
|
||||
"""
|
||||
whole = InvenTree.helpers.str2bool(
|
||||
request.query_params.get('search_whole', False)
|
||||
)
|
||||
|
||||
terms = []
|
||||
|
||||
search_terms = super().get_search_terms(request)
|
||||
|
||||
if search_terms:
|
||||
for term in search_terms:
|
||||
term = term.strip()
|
||||
|
||||
if not term:
|
||||
# Ignore blank inputs
|
||||
continue
|
||||
|
||||
if whole:
|
||||
# Wrap the search term to enable word-boundary matching
|
||||
term = r'\y' + term + r'\y'
|
||||
|
||||
terms.append(term)
|
||||
|
||||
return terms
|
||||
|
||||
|
||||
class InvenTreeOrderingFilter(filters.OrderingFilter):
|
||||
"""Custom OrderingFilter class which allows aliased filtering of related fields.
|
||||
|
||||
To use, simply specify this filter in the "filter_backends" section.
|
||||
|
||||
filter_backends = [
|
||||
InvenTreeOrderingFilter,
|
||||
]
|
||||
|
||||
Then, specify a ordering_field_aliases attribute:
|
||||
|
||||
ordering_field_alises = {
|
||||
'name': 'part__part__name',
|
||||
'SKU': 'part__SKU',
|
||||
}
|
||||
"""
|
||||
|
||||
def get_ordering(self, request, queryset, view):
|
||||
"""Override ordering for supporting aliases."""
|
||||
ordering = super().get_ordering(request, queryset, view)
|
||||
|
||||
aliases = getattr(view, 'ordering_field_aliases', None)
|
||||
|
||||
# Attempt to map ordering fields based on provided aliases
|
||||
if ordering is not None and aliases is not None:
|
||||
"""Ordering fields should be mapped to separate fields."""
|
||||
|
||||
ordering_initial = ordering
|
||||
ordering = []
|
||||
|
||||
for field in ordering_initial:
|
||||
reverse = field.startswith('-')
|
||||
|
||||
if reverse:
|
||||
field = field[1:]
|
||||
|
||||
# Are aliases defined for this field?
|
||||
if field in aliases:
|
||||
alias = aliases[field]
|
||||
else:
|
||||
alias = field
|
||||
|
||||
"""
|
||||
Potentially, a single field could be "aliased" to multiple field,
|
||||
|
||||
(For example to enforce a particular ordering sequence)
|
||||
|
||||
e.g. to filter first by the integer value...
|
||||
|
||||
ordering_field_aliases = {
|
||||
"reference": ["integer_ref", "reference"]
|
||||
}
|
||||
|
||||
"""
|
||||
|
||||
if type(alias) is str:
|
||||
alias = [alias]
|
||||
elif type(alias) in [list, tuple]:
|
||||
pass
|
||||
else:
|
||||
# Unsupported alias type
|
||||
continue
|
||||
|
||||
for a in alias:
|
||||
if reverse:
|
||||
a = '-' + a
|
||||
|
||||
ordering.append(a)
|
||||
|
||||
return ordering
|
||||
|
||||
|
||||
SEARCH_ORDER_FILTER = [
|
||||
rest_filters.DjangoFilterBackend,
|
||||
InvenTreeSearchFilter,
|
||||
filters.OrderingFilter,
|
||||
]
|
||||
|
||||
SEARCH_ORDER_FILTER_ALIAS = [
|
||||
rest_filters.DjangoFilterBackend,
|
||||
InvenTreeSearchFilter,
|
||||
InvenTreeOrderingFilter,
|
||||
]
|
||||
|
||||
ORDER_FILTER = [rest_filters.DjangoFilterBackend, filters.OrderingFilter]
|
||||
@@ -0,0 +1,219 @@
|
||||
"""Custom string formatting functions and helpers."""
|
||||
|
||||
import re
|
||||
import string
|
||||
|
||||
from django.conf import settings
|
||||
from django.utils import translation
|
||||
from django.utils.translation import gettext_lazy as _
|
||||
|
||||
from babel import Locale
|
||||
from babel.numbers import parse_pattern
|
||||
from djmoney.money import Money
|
||||
|
||||
|
||||
def parse_format_string(fmt_string: str) -> dict:
|
||||
"""Extract formatting information from the provided format string.
|
||||
|
||||
Returns a dict object which contains structured information about the format groups
|
||||
"""
|
||||
groups = string.Formatter().parse(fmt_string)
|
||||
|
||||
info = {}
|
||||
|
||||
seen_groups = set()
|
||||
|
||||
for group in groups:
|
||||
# Skip any group which does not have a named value
|
||||
if not group[1]:
|
||||
continue
|
||||
|
||||
name = group[1]
|
||||
|
||||
# Check for duplicate named groups
|
||||
if name in seen_groups:
|
||||
raise ValueError(f"Duplicate group '{name}'")
|
||||
else:
|
||||
seen_groups.add(name)
|
||||
|
||||
info[group[1]] = {'format': group[1], 'prefix': group[0]}
|
||||
|
||||
return info
|
||||
|
||||
|
||||
def construct_format_regex(fmt_string: str) -> str:
|
||||
r"""Construct a regular expression based on a provided format string.
|
||||
|
||||
This function turns a python format string into a regular expression,
|
||||
which can be used for two purposes:
|
||||
|
||||
- Ensure that a particular string matches the specified format
|
||||
- Extract named variables from a matching string
|
||||
|
||||
This function also provides support for wildcard characters:
|
||||
|
||||
- '?' provides single character matching; is converted to a '.' (period) for regex
|
||||
- '#' provides single digit matching; is converted to '\d'
|
||||
|
||||
Args:
|
||||
fmt_string: A typical format string e.g. "PO-???-{ref:04d}"
|
||||
|
||||
Returns:
|
||||
str: A regular expression pattern e.g. ^PO\-...\-(?P<ref>.*)$
|
||||
|
||||
Raises:
|
||||
ValueError: Format string is invalid
|
||||
"""
|
||||
pattern = '^'
|
||||
|
||||
for group in string.Formatter().parse(fmt_string):
|
||||
prefix = group[0] # Prefix (literal text appearing before this group)
|
||||
name = group[1] # Name of this format variable
|
||||
_fmt = group[2] # Format specifier e.g :04d
|
||||
|
||||
rep = [
|
||||
'+',
|
||||
'-',
|
||||
'.',
|
||||
'{',
|
||||
'}',
|
||||
'(',
|
||||
')',
|
||||
'^',
|
||||
'$',
|
||||
'~',
|
||||
'!',
|
||||
'@',
|
||||
':',
|
||||
';',
|
||||
'|',
|
||||
"'",
|
||||
'"',
|
||||
]
|
||||
|
||||
# Escape any special regex characters
|
||||
for ch in rep:
|
||||
prefix = prefix.replace(ch, '\\' + ch)
|
||||
|
||||
# Replace ? with single-character match
|
||||
prefix = prefix.replace('?', '.')
|
||||
|
||||
# Replace # with single-digit match
|
||||
prefix = prefix.replace('#', r'\d')
|
||||
|
||||
pattern += prefix
|
||||
|
||||
# Add a named capture group for the format entry
|
||||
if name:
|
||||
# Check if integer values are required
|
||||
if _fmt.endswith('d'):
|
||||
c = '\d'
|
||||
else:
|
||||
c = '.'
|
||||
|
||||
# Specify width
|
||||
# TODO: Introspect required width
|
||||
w = '+'
|
||||
|
||||
pattern += f'(?P<{name}>{c}{w})'
|
||||
|
||||
pattern += '$'
|
||||
|
||||
return pattern
|
||||
|
||||
|
||||
def validate_string(value: str, fmt_string: str) -> str:
|
||||
"""Validate that the provided string matches the specified format.
|
||||
|
||||
Args:
|
||||
value: The string to be tested e.g. 'SO-1234-ABC',
|
||||
fmt_string: The required format e.g. 'SO-{ref}-???',
|
||||
|
||||
Returns:
|
||||
bool: True if the value matches the required format, else False
|
||||
|
||||
Raises:
|
||||
ValueError: The provided format string is invalid
|
||||
"""
|
||||
pattern = construct_format_regex(fmt_string)
|
||||
|
||||
result = re.match(pattern, value)
|
||||
|
||||
return result is not None
|
||||
|
||||
|
||||
def extract_named_group(name: str, value: str, fmt_string: str) -> str:
|
||||
"""Extract a named value from the provided string, given the provided format string.
|
||||
|
||||
Args:
|
||||
name: Name of group to extract e.g. 'ref'
|
||||
value: Raw string e.g. 'PO-ABC-1234'
|
||||
fmt_string: Format pattern e.g. 'PO-???-{ref}
|
||||
|
||||
Returns:
|
||||
str: String value of the named group
|
||||
|
||||
Raises:
|
||||
ValueError: format string is incorrectly specified, or provided value does not match format string
|
||||
NameError: named value does not exist in the format string
|
||||
IndexError: named value could not be found in the provided entry
|
||||
"""
|
||||
info = parse_format_string(fmt_string)
|
||||
|
||||
if name not in info.keys():
|
||||
raise NameError(_(f"Value '{name}' does not appear in pattern format"))
|
||||
|
||||
# Construct a regular expression for matching against the provided format string
|
||||
# Note: This will raise a ValueError if 'fmt_string' is incorrectly specified
|
||||
pattern = construct_format_regex(fmt_string)
|
||||
|
||||
# Run the regex matcher against the raw string
|
||||
result = re.match(pattern, value)
|
||||
|
||||
if not result:
|
||||
raise ValueError(
|
||||
_('Provided value does not match required pattern: ') + fmt_string
|
||||
)
|
||||
|
||||
# And return the value we are interested in
|
||||
# Note: This will raise an IndexError if the named group was not matched
|
||||
return result.group(name)
|
||||
|
||||
|
||||
def format_money(
|
||||
money: Money,
|
||||
decimal_places: int = None,
|
||||
format: str = None,
|
||||
include_symbol: bool = True,
|
||||
) -> str:
|
||||
"""Format money object according to the currently set local.
|
||||
|
||||
Args:
|
||||
money (Money): The money object to format
|
||||
decimal_places (int): Number of decimal places to use
|
||||
format (str): Format pattern according LDML / the babel format pattern syntax (https://babel.pocoo.org/en/latest/numbers.html)
|
||||
|
||||
Returns:
|
||||
str: The formatted string
|
||||
|
||||
Raises:
|
||||
ValueError: format string is incorrectly specified
|
||||
"""
|
||||
language = None and translation.get_language() or settings.LANGUAGE_CODE
|
||||
locale = Locale.parse(translation.to_locale(language))
|
||||
if format:
|
||||
pattern = parse_pattern(format)
|
||||
else:
|
||||
pattern = locale.currency_formats['standard']
|
||||
if decimal_places is not None:
|
||||
pattern.frac_prec = (decimal_places, decimal_places)
|
||||
|
||||
result = pattern.apply(
|
||||
money.amount,
|
||||
locale,
|
||||
currency=money.currency.code if include_symbol else '',
|
||||
currency_digits=decimal_places is None,
|
||||
decimal_quantization=decimal_places is not None,
|
||||
)
|
||||
|
||||
return result
|
||||
@@ -0,0 +1,395 @@
|
||||
"""Helper forms which subclass Django forms to provide additional functionality."""
|
||||
|
||||
import logging
|
||||
from urllib.parse import urlencode
|
||||
|
||||
from django import forms
|
||||
from django.conf import settings
|
||||
from django.contrib.auth.models import Group, User
|
||||
from django.http import HttpResponseRedirect
|
||||
from django.urls import reverse
|
||||
from django.utils.translation import gettext_lazy as _
|
||||
|
||||
from allauth.account.adapter import DefaultAccountAdapter
|
||||
from allauth.account.forms import LoginForm, SignupForm, set_form_field_order
|
||||
from allauth.core.exceptions import ImmediateHttpResponse
|
||||
from allauth.socialaccount.adapter import DefaultSocialAccountAdapter
|
||||
from allauth_2fa.adapter import OTPAdapter
|
||||
from allauth_2fa.utils import user_has_valid_totp_device
|
||||
from crispy_forms.bootstrap import AppendedText, PrependedAppendedText, PrependedText
|
||||
from crispy_forms.helper import FormHelper
|
||||
from crispy_forms.layout import Field, Layout
|
||||
from dj_rest_auth.registration.serializers import RegisterSerializer
|
||||
from rest_framework import serializers
|
||||
|
||||
import InvenTree.helpers_model
|
||||
import InvenTree.sso
|
||||
from common.models import InvenTreeSetting
|
||||
from InvenTree.exceptions import log_error
|
||||
|
||||
logger = logging.getLogger('inventree')
|
||||
|
||||
|
||||
class HelperForm(forms.ModelForm):
|
||||
"""Provides simple integration of crispy_forms extension."""
|
||||
|
||||
# Custom field decorations can be specified here, per form class
|
||||
field_prefix = {}
|
||||
field_suffix = {}
|
||||
field_placeholder = {}
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
"""Setup layout."""
|
||||
super(forms.ModelForm, self).__init__(*args, **kwargs)
|
||||
self.helper = FormHelper()
|
||||
|
||||
self.helper.form_tag = False
|
||||
self.helper.form_show_errors = True
|
||||
|
||||
"""
|
||||
Create a default 'layout' for this form.
|
||||
Ref: https://django-crispy-forms.readthedocs.io/en/latest/layouts.html
|
||||
This is required to do fancy things later (like adding PrependedText, etc).
|
||||
|
||||
Simply create a 'blank' layout for each available field.
|
||||
"""
|
||||
|
||||
self.rebuild_layout()
|
||||
|
||||
def rebuild_layout(self):
|
||||
"""Build crispy layout out of current fields."""
|
||||
layouts = []
|
||||
|
||||
for field in self.fields:
|
||||
prefix = self.field_prefix.get(field, None)
|
||||
suffix = self.field_suffix.get(field, None)
|
||||
placeholder = self.field_placeholder.get(field, '')
|
||||
|
||||
# Look for font-awesome icons
|
||||
if prefix and prefix.startswith('fa-'):
|
||||
prefix = f"<i class='fas {prefix}'/>"
|
||||
|
||||
if suffix and suffix.startswith('fa-'):
|
||||
suffix = f"<i class='fas {suffix}'/>"
|
||||
|
||||
if prefix and suffix:
|
||||
layouts.append(
|
||||
Field(
|
||||
PrependedAppendedText(
|
||||
field,
|
||||
prepended_text=prefix,
|
||||
appended_text=suffix,
|
||||
placeholder=placeholder,
|
||||
)
|
||||
)
|
||||
)
|
||||
|
||||
elif prefix:
|
||||
layouts.append(
|
||||
Field(PrependedText(field, prefix, placeholder=placeholder))
|
||||
)
|
||||
|
||||
elif suffix:
|
||||
layouts.append(
|
||||
Field(AppendedText(field, suffix, placeholder=placeholder))
|
||||
)
|
||||
|
||||
else:
|
||||
layouts.append(Field(field, placeholder=placeholder))
|
||||
|
||||
self.helper.layout = Layout(*layouts)
|
||||
|
||||
|
||||
class EditUserForm(HelperForm):
|
||||
"""Form for editing user information."""
|
||||
|
||||
class Meta:
|
||||
"""Metaclass options."""
|
||||
|
||||
model = User
|
||||
fields = ['first_name', 'last_name']
|
||||
|
||||
|
||||
class SetPasswordForm(HelperForm):
|
||||
"""Form for setting user password."""
|
||||
|
||||
class Meta:
|
||||
"""Metaclass options."""
|
||||
|
||||
model = User
|
||||
fields = ['enter_password', 'confirm_password', 'old_password']
|
||||
|
||||
enter_password = forms.CharField(
|
||||
max_length=100,
|
||||
min_length=8,
|
||||
required=True,
|
||||
initial='',
|
||||
widget=forms.PasswordInput(attrs={'autocomplete': 'off'}),
|
||||
label=_('Enter password'),
|
||||
help_text=_('Enter new password'),
|
||||
)
|
||||
|
||||
confirm_password = forms.CharField(
|
||||
max_length=100,
|
||||
min_length=8,
|
||||
required=True,
|
||||
initial='',
|
||||
widget=forms.PasswordInput(attrs={'autocomplete': 'off'}),
|
||||
label=_('Confirm password'),
|
||||
help_text=_('Confirm new password'),
|
||||
)
|
||||
|
||||
old_password = forms.CharField(
|
||||
label=_('Old password'),
|
||||
strip=False,
|
||||
required=False,
|
||||
widget=forms.PasswordInput(
|
||||
attrs={'autocomplete': 'current-password', 'autofocus': True}
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
# override allauth
|
||||
class CustomLoginForm(LoginForm):
|
||||
"""Custom login form to override default allauth behaviour."""
|
||||
|
||||
def login(self, request, redirect_url=None):
|
||||
"""Perform login action.
|
||||
|
||||
First check that:
|
||||
- A valid user has been supplied
|
||||
"""
|
||||
if not self.user:
|
||||
# No user supplied - redirect to the login page
|
||||
return HttpResponseRedirect(reverse('account_login'))
|
||||
|
||||
# Now perform default login action
|
||||
return super().login(request, redirect_url)
|
||||
|
||||
|
||||
class CustomSignupForm(SignupForm):
|
||||
"""Override to use dynamic settings."""
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
"""Check settings to influence which fields are needed."""
|
||||
kwargs['email_required'] = InvenTreeSetting.get_setting('LOGIN_MAIL_REQUIRED')
|
||||
|
||||
super().__init__(*args, **kwargs)
|
||||
|
||||
# check for two mail fields
|
||||
if InvenTreeSetting.get_setting('LOGIN_SIGNUP_MAIL_TWICE'):
|
||||
self.fields['email2'] = forms.EmailField(
|
||||
label=_('Email (again)'),
|
||||
widget=forms.TextInput(
|
||||
attrs={
|
||||
'type': 'email',
|
||||
'placeholder': _('Email address confirmation'),
|
||||
}
|
||||
),
|
||||
)
|
||||
|
||||
# check for two password fields
|
||||
if not InvenTreeSetting.get_setting('LOGIN_SIGNUP_PWD_TWICE'):
|
||||
self.fields.pop('password2')
|
||||
|
||||
# reorder fields
|
||||
set_form_field_order(
|
||||
self, ['username', 'email', 'email2', 'password1', 'password2']
|
||||
)
|
||||
|
||||
def clean(self):
|
||||
"""Make sure the supplied emails match if enabled in settings."""
|
||||
cleaned_data = super().clean()
|
||||
|
||||
# check for two mail fields
|
||||
if InvenTreeSetting.get_setting('LOGIN_SIGNUP_MAIL_TWICE'):
|
||||
email = cleaned_data.get('email')
|
||||
email2 = cleaned_data.get('email2')
|
||||
if (email and email2) and email != email2:
|
||||
self.add_error('email2', _('You must type the same email each time.'))
|
||||
|
||||
return cleaned_data
|
||||
|
||||
|
||||
def registration_enabled():
|
||||
"""Determine whether user registration is enabled."""
|
||||
if (
|
||||
InvenTreeSetting.get_setting('LOGIN_ENABLE_REG')
|
||||
or InvenTree.sso.registration_enabled()
|
||||
):
|
||||
if settings.EMAIL_HOST:
|
||||
return True
|
||||
else:
|
||||
logger.error(
|
||||
'Registration cannot be enabled, because EMAIL_HOST is not configured.'
|
||||
)
|
||||
return False
|
||||
|
||||
|
||||
class RegistratonMixin:
|
||||
"""Mixin to check if registration should be enabled."""
|
||||
|
||||
def is_open_for_signup(self, request, *args, **kwargs):
|
||||
"""Check if signup is enabled in settings.
|
||||
|
||||
Configure the class variable `REGISTRATION_SETTING` to set which setting should be used, default: `LOGIN_ENABLE_REG`.
|
||||
"""
|
||||
if registration_enabled():
|
||||
return super().is_open_for_signup(request, *args, **kwargs)
|
||||
return False
|
||||
|
||||
def clean_email(self, email):
|
||||
"""Check if the mail is valid to the pattern in LOGIN_SIGNUP_MAIL_RESTRICTION (if enabled in settings)."""
|
||||
mail_restriction = InvenTreeSetting.get_setting(
|
||||
'LOGIN_SIGNUP_MAIL_RESTRICTION', None
|
||||
)
|
||||
if not mail_restriction:
|
||||
return super().clean_email(email)
|
||||
|
||||
split_email = email.split('@')
|
||||
if len(split_email) != 2:
|
||||
logger.error('The user %s has an invalid email address', email)
|
||||
raise forms.ValidationError(
|
||||
_('The provided primary email address is not valid.')
|
||||
)
|
||||
|
||||
mailoptions = mail_restriction.split(',')
|
||||
for option in mailoptions:
|
||||
if not option.startswith('@'):
|
||||
log_error('LOGIN_SIGNUP_MAIL_RESTRICTION is not configured correctly')
|
||||
raise forms.ValidationError(
|
||||
_('The provided primary email address is not valid.')
|
||||
)
|
||||
else:
|
||||
if split_email[1] == option[1:]:
|
||||
return super().clean_email(email)
|
||||
|
||||
logger.info('The provided email domain for %s is not approved', email)
|
||||
raise forms.ValidationError(_('The provided email domain is not approved.'))
|
||||
|
||||
def save_user(self, request, user, form, commit=True):
|
||||
"""Check if a default group is set in settings."""
|
||||
# Create the user
|
||||
user = super().save_user(request, user, form)
|
||||
|
||||
# Check if a default group is set in settings
|
||||
start_group = InvenTreeSetting.get_setting('SIGNUP_GROUP')
|
||||
if start_group:
|
||||
try:
|
||||
group = Group.objects.get(id=start_group)
|
||||
user.groups.add(group)
|
||||
except Group.DoesNotExist:
|
||||
logger.exception(
|
||||
'The setting `SIGNUP_GROUP` contains an non existent group',
|
||||
start_group,
|
||||
)
|
||||
user.save()
|
||||
return user
|
||||
|
||||
|
||||
class CustomUrlMixin:
|
||||
"""Mixin to set urls."""
|
||||
|
||||
def get_email_confirmation_url(self, request, emailconfirmation):
|
||||
"""Custom email confirmation (activation) url."""
|
||||
url = reverse('account_confirm_email', args=[emailconfirmation.key])
|
||||
|
||||
return InvenTree.helpers_model.construct_absolute_url(url)
|
||||
|
||||
|
||||
class CustomAccountAdapter(
|
||||
CustomUrlMixin, RegistratonMixin, OTPAdapter, DefaultAccountAdapter
|
||||
):
|
||||
"""Override of adapter to use dynamic settings."""
|
||||
|
||||
def send_mail(self, template_prefix, email, context):
|
||||
"""Only send mail if backend configured."""
|
||||
if settings.EMAIL_HOST:
|
||||
try:
|
||||
result = super().send_mail(template_prefix, email, context)
|
||||
except Exception:
|
||||
# An exception occurred while attempting to send email
|
||||
# Log it (for admin users) and return silently
|
||||
log_error('account email')
|
||||
result = False
|
||||
|
||||
return result
|
||||
|
||||
return False
|
||||
|
||||
def get_email_confirmation_url(self, request, emailconfirmation):
|
||||
"""Construct the email confirmation url."""
|
||||
from InvenTree.helpers_model import construct_absolute_url
|
||||
|
||||
url = super().get_email_confirmation_url(request, emailconfirmation)
|
||||
url = construct_absolute_url(url)
|
||||
return url
|
||||
|
||||
|
||||
class CustomSocialAccountAdapter(
|
||||
CustomUrlMixin, RegistratonMixin, DefaultSocialAccountAdapter
|
||||
):
|
||||
"""Override of adapter to use dynamic settings."""
|
||||
|
||||
def is_auto_signup_allowed(self, request, sociallogin):
|
||||
"""Check if auto signup is enabled in settings."""
|
||||
if InvenTreeSetting.get_setting('LOGIN_SIGNUP_SSO_AUTO', True):
|
||||
return super().is_auto_signup_allowed(request, sociallogin)
|
||||
return False
|
||||
|
||||
# from OTPAdapter
|
||||
def has_2fa_enabled(self, user):
|
||||
"""Returns True if the user has 2FA configured."""
|
||||
return user_has_valid_totp_device(user)
|
||||
|
||||
def login(self, request, user):
|
||||
"""Ensure user is send to 2FA before login if enabled."""
|
||||
# Require two-factor authentication if it has been configured.
|
||||
if self.has_2fa_enabled(user):
|
||||
# Cast to string for the case when this is not a JSON serializable
|
||||
# object, e.g. a UUID.
|
||||
request.session['allauth_2fa_user_id'] = str(user.id)
|
||||
|
||||
redirect_url = reverse('two-factor-authenticate')
|
||||
# Add GET parameters to the URL if they exist.
|
||||
if request.GET:
|
||||
redirect_url += '?' + urlencode(request.GET)
|
||||
|
||||
raise ImmediateHttpResponse(response=HttpResponseRedirect(redirect_url))
|
||||
|
||||
# Otherwise defer to the original allauth adapter.
|
||||
return super().login(request, user)
|
||||
|
||||
def authentication_error(
|
||||
self, request, provider_id, error=None, exception=None, extra_context=None
|
||||
):
|
||||
"""Callback method for authentication errors."""
|
||||
if not error:
|
||||
error = request.GET.get('error', None)
|
||||
|
||||
if not exception:
|
||||
exception = request.GET.get('error_description', None)
|
||||
|
||||
path = request.path or 'sso'
|
||||
|
||||
# Log the error to the database
|
||||
log_error(path, error_name=error, error_data=exception)
|
||||
logger.error("SSO error for provider '%s' - check admin error log", provider_id)
|
||||
|
||||
|
||||
# override dj-rest-auth
|
||||
class CustomRegisterSerializer(RegisterSerializer):
|
||||
"""Override of serializer to use dynamic settings."""
|
||||
|
||||
email = serializers.EmailField()
|
||||
|
||||
def __init__(self, instance=None, data=..., **kwargs):
|
||||
"""Check settings to influence which fields are needed."""
|
||||
kwargs['email_required'] = InvenTreeSetting.get_setting('LOGIN_MAIL_REQUIRED')
|
||||
super().__init__(instance, data, **kwargs)
|
||||
|
||||
def save(self, request):
|
||||
"""Override to check if registration is open."""
|
||||
if registration_enabled():
|
||||
return super().save(request)
|
||||
raise forms.ValidationError(_('Registration is disabled.'))
|
||||
File diff suppressed because it is too large
Load Diff
@@ -0,0 +1,106 @@
|
||||
"""Provides helper mixins that are used throughout the InvenTree project."""
|
||||
|
||||
import inspect
|
||||
from pathlib import Path
|
||||
|
||||
from django.conf import settings
|
||||
|
||||
from plugin import registry as plg_registry
|
||||
|
||||
|
||||
class ClassValidationMixin:
|
||||
"""Mixin to validate class attributes and overrides.
|
||||
|
||||
Class attributes:
|
||||
required_attributes: List of class attributes that need to be defined
|
||||
required_overrides: List of functions that need override, a nested list mean either one of them needs an override
|
||||
|
||||
Example:
|
||||
```py
|
||||
class Parent(ClassValidationMixin):
|
||||
NAME: str
|
||||
def test(self):
|
||||
pass
|
||||
|
||||
required_attributes = ["NAME"]
|
||||
required_overrides = [test]
|
||||
|
||||
class MyClass(Parent):
|
||||
pass
|
||||
|
||||
myClass = MyClass()
|
||||
myClass.validate() # raises NotImplementedError
|
||||
```
|
||||
"""
|
||||
|
||||
required_attributes = []
|
||||
required_overrides = []
|
||||
|
||||
@classmethod
|
||||
def validate(cls):
|
||||
"""Validate the class against the required attributes/overrides."""
|
||||
|
||||
def attribute_missing(key):
|
||||
"""Check if attribute is missing."""
|
||||
return not hasattr(cls, key) or getattr(cls, key) == ''
|
||||
|
||||
def override_missing(base_implementation):
|
||||
"""Check if override is missing."""
|
||||
if isinstance(base_implementation, list):
|
||||
return all(override_missing(x) for x in base_implementation)
|
||||
|
||||
return base_implementation == getattr(
|
||||
cls, base_implementation.__name__, None
|
||||
)
|
||||
|
||||
missing_attributes = list(filter(attribute_missing, cls.required_attributes))
|
||||
missing_overrides = list(filter(override_missing, cls.required_overrides))
|
||||
|
||||
errors = []
|
||||
|
||||
if len(missing_attributes) > 0:
|
||||
errors.append(
|
||||
f"did not provide the following attributes: {', '.join(missing_attributes)}"
|
||||
)
|
||||
if len(missing_overrides) > 0:
|
||||
missing_overrides_list = []
|
||||
for base_implementation in missing_overrides:
|
||||
if isinstance(base_implementation, list):
|
||||
missing_overrides_list.append(
|
||||
'one of '
|
||||
+ ' or '.join(attr.__name__ for attr in base_implementation)
|
||||
)
|
||||
else:
|
||||
missing_overrides_list.append(base_implementation.__name__)
|
||||
errors.append(
|
||||
f"did not override the required attributes: {', '.join(missing_overrides_list)}"
|
||||
)
|
||||
|
||||
if len(errors) > 0:
|
||||
raise NotImplementedError(f"'{cls}' " + ' and '.join(errors))
|
||||
|
||||
|
||||
class ClassProviderMixin:
|
||||
"""Mixin to get metadata about a class itself, e.g. the plugin that provided that class."""
|
||||
|
||||
@classmethod
|
||||
def get_provider_file(cls):
|
||||
"""File that contains the Class definition."""
|
||||
return inspect.getfile(cls)
|
||||
|
||||
@classmethod
|
||||
def get_provider_plugin(cls):
|
||||
"""Plugin that contains the Class definition, otherwise None."""
|
||||
for plg in plg_registry.plugins.values():
|
||||
if plg.package_path == cls.__module__:
|
||||
return plg
|
||||
|
||||
@classmethod
|
||||
def get_is_builtin(cls):
|
||||
"""Is this Class build in the Inventree source code?"""
|
||||
try:
|
||||
Path(cls.get_provider_file()).relative_to(settings.BASE_DIR)
|
||||
return True
|
||||
except ValueError:
|
||||
# Path(...).relative_to throws an ValueError if its not relative to the InvenTree source base dir
|
||||
return False
|
||||
@@ -0,0 +1,364 @@
|
||||
"""Provides helper functions used throughout the InvenTree project that access the database."""
|
||||
|
||||
import io
|
||||
import logging
|
||||
from decimal import Decimal
|
||||
from urllib.parse import urljoin
|
||||
|
||||
from django.conf import settings
|
||||
from django.core.validators import URLValidator
|
||||
from django.db.utils import OperationalError, ProgrammingError
|
||||
from django.utils.translation import gettext_lazy as _
|
||||
|
||||
import requests
|
||||
from djmoney.contrib.exchange.models import convert_money
|
||||
from djmoney.money import Money
|
||||
from PIL import Image
|
||||
|
||||
import common.models
|
||||
import InvenTree
|
||||
import InvenTree.helpers_model
|
||||
import InvenTree.version
|
||||
from common.notifications import (
|
||||
InvenTreeNotificationBodies,
|
||||
NotificationBody,
|
||||
trigger_notification,
|
||||
)
|
||||
from InvenTree.format import format_money
|
||||
|
||||
logger = logging.getLogger('inventree')
|
||||
|
||||
|
||||
def getSetting(key, backup_value=None):
|
||||
"""Shortcut for reading a setting value from the database."""
|
||||
return common.models.InvenTreeSetting.get_setting(key, backup_value=backup_value)
|
||||
|
||||
|
||||
def get_base_url(request=None):
|
||||
"""Return the base URL for the InvenTree server.
|
||||
|
||||
The base URL is determined in the following order of decreasing priority:
|
||||
|
||||
1. If a request object is provided, use the request URL
|
||||
2. Multi-site is enabled, and the current site has a valid URL
|
||||
3. If settings.SITE_URL is set (e.g. in the Django settings), use that
|
||||
4. If the InvenTree setting INVENTREE_BASE_URL is set, use that
|
||||
"""
|
||||
# Check if a request is provided
|
||||
if request:
|
||||
return request.build_absolute_uri('/')
|
||||
|
||||
# Check if multi-site is enabled
|
||||
try:
|
||||
from django.contrib.sites.models import Site
|
||||
|
||||
return Site.objects.get_current().domain
|
||||
except (ImportError, RuntimeError):
|
||||
pass
|
||||
|
||||
# Check if a global site URL is provided
|
||||
if site_url := getattr(settings, 'SITE_URL', None):
|
||||
return site_url
|
||||
|
||||
# Check if a global InvenTree setting is provided
|
||||
try:
|
||||
if site_url := common.models.InvenTreeSetting.get_setting(
|
||||
'INVENTREE_BASE_URL', create=False, cache=False
|
||||
):
|
||||
return site_url
|
||||
except (ProgrammingError, OperationalError):
|
||||
pass
|
||||
|
||||
# No base URL available
|
||||
return ''
|
||||
|
||||
|
||||
def construct_absolute_url(*arg, base_url=None, request=None):
|
||||
"""Construct (or attempt to construct) an absolute URL from a relative URL.
|
||||
|
||||
Args:
|
||||
*arg: The relative URL to construct
|
||||
base_url: The base URL to use for the construction (if not provided, will attempt to determine from settings)
|
||||
request: The request object to use for the construction (optional)
|
||||
"""
|
||||
relative_url = '/'.join(arg)
|
||||
|
||||
if not base_url:
|
||||
base_url = get_base_url(request=request)
|
||||
|
||||
return urljoin(base_url, relative_url)
|
||||
|
||||
|
||||
def download_image_from_url(remote_url, timeout=2.5):
|
||||
"""Download an image file from a remote URL.
|
||||
|
||||
This is a potentially dangerous operation, so we must perform some checks:
|
||||
- The remote URL is available
|
||||
- The Content-Length is provided, and is not too large
|
||||
- The file is a valid image file
|
||||
|
||||
Arguments:
|
||||
remote_url: The remote URL to retrieve image
|
||||
max_size: Maximum allowed image size (default = 1MB)
|
||||
timeout: Connection timeout in seconds (default = 5)
|
||||
|
||||
Returns:
|
||||
An in-memory PIL image file, if the download was successful
|
||||
|
||||
Raises:
|
||||
requests.exceptions.ConnectionError: Connection could not be established
|
||||
requests.exceptions.Timeout: Connection timed out
|
||||
requests.exceptions.HTTPError: Server responded with invalid response code
|
||||
ValueError: Server responded with invalid 'Content-Length' value
|
||||
TypeError: Response is not a valid image
|
||||
"""
|
||||
# Check that the provided URL at least looks valid
|
||||
validator = URLValidator()
|
||||
validator(remote_url)
|
||||
|
||||
# Calculate maximum allowable image size (in bytes)
|
||||
max_size = (
|
||||
int(
|
||||
common.models.InvenTreeSetting.get_setting(
|
||||
'INVENTREE_DOWNLOAD_IMAGE_MAX_SIZE'
|
||||
)
|
||||
)
|
||||
* 1024
|
||||
* 1024
|
||||
)
|
||||
|
||||
# Add user specified user-agent to request (if specified)
|
||||
user_agent = common.models.InvenTreeSetting.get_setting(
|
||||
'INVENTREE_DOWNLOAD_FROM_URL_USER_AGENT'
|
||||
)
|
||||
if user_agent:
|
||||
headers = {'User-Agent': user_agent}
|
||||
else:
|
||||
headers = None
|
||||
|
||||
try:
|
||||
response = requests.get(
|
||||
remote_url,
|
||||
timeout=timeout,
|
||||
allow_redirects=True,
|
||||
stream=True,
|
||||
headers=headers,
|
||||
)
|
||||
# Throw an error if anything goes wrong
|
||||
response.raise_for_status()
|
||||
except requests.exceptions.ConnectionError as exc:
|
||||
raise Exception(_('Connection error') + f': {str(exc)}')
|
||||
except requests.exceptions.Timeout as exc:
|
||||
raise exc
|
||||
except requests.exceptions.HTTPError:
|
||||
raise requests.exceptions.HTTPError(
|
||||
_('Server responded with invalid status code') + f': {response.status_code}'
|
||||
)
|
||||
except Exception as exc:
|
||||
raise Exception(_('Exception occurred') + f': {str(exc)}')
|
||||
|
||||
if response.status_code != 200:
|
||||
raise Exception(
|
||||
_('Server responded with invalid status code') + f': {response.status_code}'
|
||||
)
|
||||
|
||||
try:
|
||||
content_length = int(response.headers.get('Content-Length', 0))
|
||||
except ValueError:
|
||||
raise ValueError(_('Server responded with invalid Content-Length value'))
|
||||
|
||||
if content_length > max_size:
|
||||
raise ValueError(_('Image size is too large'))
|
||||
|
||||
# Download the file, ensuring we do not exceed the reported size
|
||||
file = io.BytesIO()
|
||||
|
||||
dl_size = 0
|
||||
chunk_size = 64 * 1024
|
||||
|
||||
for chunk in response.iter_content(chunk_size=chunk_size):
|
||||
dl_size += len(chunk)
|
||||
|
||||
if dl_size > max_size:
|
||||
raise ValueError(_('Image download exceeded maximum size'))
|
||||
|
||||
file.write(chunk)
|
||||
|
||||
if dl_size == 0:
|
||||
raise ValueError(_('Remote server returned empty response'))
|
||||
|
||||
# Now, attempt to convert the downloaded data to a valid image file
|
||||
# img.verify() will throw an exception if the image is not valid
|
||||
try:
|
||||
img = Image.open(file).convert()
|
||||
img.verify()
|
||||
except Exception:
|
||||
raise TypeError(_('Supplied URL is not a valid image file'))
|
||||
|
||||
return img
|
||||
|
||||
|
||||
def render_currency(
|
||||
money,
|
||||
decimal_places=None,
|
||||
currency=None,
|
||||
min_decimal_places=None,
|
||||
max_decimal_places=None,
|
||||
include_symbol=True,
|
||||
):
|
||||
"""Render a currency / Money object to a formatted string (e.g. for reports).
|
||||
|
||||
Arguments:
|
||||
money: The Money instance to be rendered
|
||||
decimal_places: The number of decimal places to render to. If unspecified, uses the PRICING_DECIMAL_PLACES setting.
|
||||
currency: Optionally convert to the specified currency
|
||||
min_decimal_places: The minimum number of decimal places to render to. If unspecified, uses the PRICING_DECIMAL_PLACES_MIN setting.
|
||||
max_decimal_places: The maximum number of decimal places to render to. If unspecified, uses the PRICING_DECIMAL_PLACES setting.
|
||||
include_symbol: If True, include the currency symbol in the output
|
||||
"""
|
||||
if money in [None, '']:
|
||||
return '-'
|
||||
|
||||
if type(money) is not Money:
|
||||
return '-'
|
||||
|
||||
if currency is not None:
|
||||
# Attempt to convert to the provided currency
|
||||
# If cannot be done, leave the original
|
||||
try:
|
||||
money = convert_money(money, currency)
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
if decimal_places is None:
|
||||
decimal_places = common.models.InvenTreeSetting.get_setting(
|
||||
'PRICING_DECIMAL_PLACES', 6
|
||||
)
|
||||
|
||||
if min_decimal_places is None:
|
||||
min_decimal_places = common.models.InvenTreeSetting.get_setting(
|
||||
'PRICING_DECIMAL_PLACES_MIN', 0
|
||||
)
|
||||
|
||||
if max_decimal_places is None:
|
||||
max_decimal_places = common.models.InvenTreeSetting.get_setting(
|
||||
'PRICING_DECIMAL_PLACES', 6
|
||||
)
|
||||
|
||||
value = Decimal(str(money.amount)).normalize()
|
||||
value = str(value)
|
||||
|
||||
if '.' in value:
|
||||
decimals = len(value.split('.')[-1])
|
||||
|
||||
decimals = max(decimals, min_decimal_places)
|
||||
decimals = min(decimals, decimal_places)
|
||||
|
||||
decimal_places = decimals
|
||||
else:
|
||||
decimal_places = max(decimal_places, 2)
|
||||
|
||||
decimal_places = max(decimal_places, max_decimal_places)
|
||||
|
||||
return format_money(
|
||||
money, decimal_places=decimal_places, include_symbol=include_symbol
|
||||
)
|
||||
|
||||
|
||||
def getModelsWithMixin(mixin_class) -> list:
|
||||
"""Return a list of models that inherit from the given mixin class.
|
||||
|
||||
Args:
|
||||
mixin_class: The mixin class to search for
|
||||
Returns:
|
||||
List of models that inherit from the given mixin class
|
||||
"""
|
||||
from django.contrib.contenttypes.models import ContentType
|
||||
|
||||
try:
|
||||
db_models = [
|
||||
x.model_class() for x in ContentType.objects.all() if x is not None
|
||||
]
|
||||
except (OperationalError, ProgrammingError):
|
||||
# Database is likely not yet ready
|
||||
db_models = []
|
||||
|
||||
return [x for x in db_models if x is not None and issubclass(x, mixin_class)]
|
||||
|
||||
|
||||
def notify_responsible(
|
||||
instance,
|
||||
sender,
|
||||
content: NotificationBody = InvenTreeNotificationBodies.NewOrder,
|
||||
exclude=None,
|
||||
):
|
||||
"""Notify all responsible parties of a change in an instance.
|
||||
|
||||
Parses the supplied content with the provided instance and sender and sends a notification to all responsible users,
|
||||
excluding the optional excluded list.
|
||||
|
||||
Args:
|
||||
instance: The newly created instance
|
||||
sender: Sender model reference
|
||||
content (NotificationBody, optional): _description_. Defaults to InvenTreeNotificationBodies.NewOrder.
|
||||
exclude (User, optional): User instance that should be excluded. Defaults to None.
|
||||
"""
|
||||
import InvenTree.ready
|
||||
|
||||
if InvenTree.ready.isImportingData() or InvenTree.ready.isRunningMigrations():
|
||||
return
|
||||
|
||||
notify_users(
|
||||
[instance.responsible], instance, sender, content=content, exclude=exclude
|
||||
)
|
||||
|
||||
|
||||
def notify_users(
|
||||
users,
|
||||
instance,
|
||||
sender,
|
||||
content: NotificationBody = InvenTreeNotificationBodies.NewOrder,
|
||||
exclude=None,
|
||||
):
|
||||
"""Notify all passed users or groups.
|
||||
|
||||
Parses the supplied content with the provided instance and sender and sends a notification to all users,
|
||||
excluding the optional excluded list.
|
||||
|
||||
Args:
|
||||
users: List of users or groups to notify
|
||||
instance: The newly created instance
|
||||
sender: Sender model reference
|
||||
content (NotificationBody, optional): _description_. Defaults to InvenTreeNotificationBodies.NewOrder.
|
||||
exclude (User, optional): User instance that should be excluded. Defaults to None.
|
||||
"""
|
||||
# Setup context for notification parsing
|
||||
content_context = {
|
||||
'instance': str(instance),
|
||||
'verbose_name': sender._meta.verbose_name,
|
||||
'app_label': sender._meta.app_label,
|
||||
'model_name': sender._meta.model_name,
|
||||
}
|
||||
|
||||
# Setup notification context
|
||||
context = {
|
||||
'instance': instance,
|
||||
'name': content.name.format(**content_context),
|
||||
'message': content.message.format(**content_context),
|
||||
'link': InvenTree.helpers_model.construct_absolute_url(
|
||||
instance.get_absolute_url()
|
||||
),
|
||||
'template': {'subject': content.name.format(**content_context)},
|
||||
}
|
||||
|
||||
if content.template:
|
||||
context['template']['html'] = content.template.format(**content_context)
|
||||
|
||||
# Create notification
|
||||
trigger_notification(
|
||||
instance,
|
||||
content.slug.format(**content_context),
|
||||
targets=users,
|
||||
target_exclude=[exclude],
|
||||
context=context,
|
||||
)
|
||||
@@ -0,0 +1,51 @@
|
||||
"""Support translation locales for InvenTree.
|
||||
|
||||
If a new language translation is supported, it must be added here
|
||||
After adding a new language, run the following command:
|
||||
|
||||
python manage.py makemessages -l <language_code> -e html,js,py --no-wrap
|
||||
- where <language_code> is the code for the new language
|
||||
|
||||
Additionally, update the following files with the new locale code:
|
||||
|
||||
- /src/frontend/.linguirc file
|
||||
- /src/frontend/src/contexts/LanguageContext.tsx
|
||||
"""
|
||||
|
||||
from django.utils.translation import gettext_lazy as _
|
||||
|
||||
LOCALES = [
|
||||
('bg', _('Bulgarian')),
|
||||
('cs', _('Czech')),
|
||||
('da', _('Danish')),
|
||||
('de', _('German')),
|
||||
('el', _('Greek')),
|
||||
('en', _('English')),
|
||||
('es', _('Spanish')),
|
||||
('es-mx', _('Spanish (Mexican)')),
|
||||
('fa', _('Farsi / Persian')),
|
||||
('fi', _('Finnish')),
|
||||
('fr', _('French')),
|
||||
('he', _('Hebrew')),
|
||||
('hi', _('Hindi')),
|
||||
('hu', _('Hungarian')),
|
||||
('it', _('Italian')),
|
||||
('ja', _('Japanese')),
|
||||
('ko', _('Korean')),
|
||||
('lv', _('Latvian')),
|
||||
('nl', _('Dutch')),
|
||||
('no', _('Norwegian')),
|
||||
('pl', _('Polish')),
|
||||
('pt', _('Portuguese')),
|
||||
('pt-br', _('Portuguese (Brazilian)')),
|
||||
('ru', _('Russian')),
|
||||
('sk', _('Slovak')),
|
||||
('sl', _('Slovenian')),
|
||||
('sr', _('Serbian')),
|
||||
('sv', _('Swedish')),
|
||||
('th', _('Thai')),
|
||||
('tr', _('Turkish')),
|
||||
('vi', _('Vietnamese')),
|
||||
('zh-hans', _('Chinese (Simplified)')),
|
||||
('zh-hant', _('Chinese (Traditional)')),
|
||||
]
|
||||
@@ -0,0 +1,75 @@
|
||||
"""Functions for magic login."""
|
||||
|
||||
from django.conf import settings
|
||||
from django.contrib.auth.models import User
|
||||
from django.core.mail import send_mail
|
||||
from django.template.loader import render_to_string
|
||||
from django.urls import reverse
|
||||
from django.utils.translation import gettext_lazy as _
|
||||
|
||||
import sesame.utils
|
||||
from rest_framework import serializers
|
||||
from rest_framework.generics import GenericAPIView
|
||||
from rest_framework.response import Response
|
||||
|
||||
import InvenTree.version
|
||||
|
||||
|
||||
def send_simple_login_email(user, link):
|
||||
"""Send an email with the login link to this user."""
|
||||
site_name = InvenTree.version.inventreeInstanceName()
|
||||
|
||||
context = {'username': user.username, 'site_name': site_name, 'link': link}
|
||||
email_plaintext_message = render_to_string(
|
||||
'InvenTree/user_simple_login.txt', context
|
||||
)
|
||||
|
||||
send_mail(
|
||||
_(f'[{site_name}] Log in to the app'),
|
||||
email_plaintext_message,
|
||||
settings.DEFAULT_FROM_EMAIL,
|
||||
[user.email],
|
||||
)
|
||||
|
||||
|
||||
class GetSimpleLoginSerializer(serializers.Serializer):
|
||||
"""Serializer for the simple login view."""
|
||||
|
||||
email = serializers.CharField(label=_('Email'))
|
||||
|
||||
|
||||
class GetSimpleLoginView(GenericAPIView):
|
||||
"""View to send a simple login link."""
|
||||
|
||||
permission_classes = ()
|
||||
serializer_class = GetSimpleLoginSerializer
|
||||
|
||||
def post(self, request, *args, **kwargs):
|
||||
"""Get the token for the current user or fail."""
|
||||
serializer = self.serializer_class(data=request.data)
|
||||
serializer.is_valid(raise_exception=True)
|
||||
self.email_submitted(email=serializer.data['email'])
|
||||
return Response({'status': 'ok'})
|
||||
|
||||
def email_submitted(self, email):
|
||||
"""Notify user about link."""
|
||||
user = self.get_user(email)
|
||||
if user is None:
|
||||
print('user not found:', email)
|
||||
return
|
||||
link = self.create_link(user)
|
||||
send_simple_login_email(user, link)
|
||||
|
||||
def get_user(self, email):
|
||||
"""Find the user with this email address."""
|
||||
try:
|
||||
return User.objects.get(email=email)
|
||||
except User.DoesNotExist:
|
||||
return None
|
||||
|
||||
def create_link(self, user):
|
||||
"""Create a login link for this user."""
|
||||
link = reverse('sesame-login')
|
||||
link = self.request.build_absolute_uri(link)
|
||||
link += sesame.utils.get_query_string(user)
|
||||
return link
|
||||
@@ -0,0 +1,19 @@
|
||||
"""Check if there are any pending database migrations, and run them."""
|
||||
|
||||
import logging
|
||||
|
||||
from django.core.management.base import BaseCommand
|
||||
|
||||
from InvenTree.tasks import check_for_migrations
|
||||
|
||||
logger = logging.getLogger('inventree')
|
||||
|
||||
|
||||
class Command(BaseCommand):
|
||||
"""Check if there are any pending database migrations, and run them."""
|
||||
|
||||
def handle(self, *args, **kwargs):
|
||||
"""Check for any pending database migrations."""
|
||||
logger.info('Checking for pending database migrations')
|
||||
check_for_migrations(force=True, reload_registry=False)
|
||||
logger.info('Database migrations complete')
|
||||
@@ -0,0 +1,38 @@
|
||||
"""Custom management command to cleanup old settings that are not defined anymore."""
|
||||
|
||||
import logging
|
||||
|
||||
from django.core.management.base import BaseCommand
|
||||
|
||||
logger = logging.getLogger('inventree')
|
||||
|
||||
|
||||
class Command(BaseCommand):
|
||||
"""Cleanup old (undefined) settings in the database."""
|
||||
|
||||
def handle(self, *args, **kwargs):
|
||||
"""Cleanup old (undefined) settings in the database."""
|
||||
logger.info('Collecting settings')
|
||||
from common.models import InvenTreeSetting, InvenTreeUserSetting
|
||||
|
||||
# general settings
|
||||
db_settings = InvenTreeSetting.objects.all()
|
||||
model_settings = InvenTreeSetting.SETTINGS
|
||||
|
||||
# check if key exist and delete if not
|
||||
for setting in db_settings:
|
||||
if setting.key not in model_settings:
|
||||
setting.delete()
|
||||
logger.info("deleted setting '%s'", setting.key)
|
||||
|
||||
# user settings
|
||||
db_settings = InvenTreeUserSetting.objects.all()
|
||||
model_settings = InvenTreeUserSetting.SETTINGS
|
||||
|
||||
# check if key exist and delete if not
|
||||
for setting in db_settings:
|
||||
if setting.key not in model_settings:
|
||||
setting.delete()
|
||||
logger.info("deleted user setting '%s'", setting.key)
|
||||
|
||||
logger.info('checked all settings')
|
||||
@@ -0,0 +1,66 @@
|
||||
"""Custom management command to prerender files."""
|
||||
|
||||
import os
|
||||
|
||||
from django.conf import settings
|
||||
from django.core.management.base import BaseCommand
|
||||
from django.http.request import HttpRequest
|
||||
from django.template.loader import render_to_string
|
||||
from django.utils.module_loading import import_string
|
||||
from django.utils.translation import override as lang_over
|
||||
|
||||
|
||||
def render_file(file_name, source, target, locales, ctx):
|
||||
"""Renders a file into all provided locales."""
|
||||
for locale in locales:
|
||||
# Enforce lower-case for locale names
|
||||
locale = locale.lower()
|
||||
locale = locale.replace('_', '-')
|
||||
|
||||
target_file = os.path.join(target, locale + '.' + file_name)
|
||||
|
||||
with open(target_file, 'w') as localised_file:
|
||||
with lang_over(locale):
|
||||
rendered = render_to_string(os.path.join(source, file_name), ctx)
|
||||
localised_file.write(rendered)
|
||||
|
||||
|
||||
class Command(BaseCommand):
|
||||
"""Django command to prerender files."""
|
||||
|
||||
def handle(self, *args, **kwargs):
|
||||
"""Django command to prerender files."""
|
||||
# static directories
|
||||
LC_DIR = settings.LOCALE_PATHS[0]
|
||||
SOURCE_DIR = settings.STATICFILES_I18_SRC
|
||||
TARGET_DIR = settings.STATICFILES_I18_TRG
|
||||
|
||||
# ensure static directory exists
|
||||
if not os.path.exists(TARGET_DIR):
|
||||
os.makedirs(TARGET_DIR, exist_ok=True)
|
||||
|
||||
# collect locales
|
||||
locales = {}
|
||||
for locale in os.listdir(LC_DIR):
|
||||
path = os.path.join(LC_DIR, locale)
|
||||
if os.path.exists(path) and os.path.isdir(path):
|
||||
locales[locale] = locale
|
||||
|
||||
# render!
|
||||
request = HttpRequest()
|
||||
ctx = {}
|
||||
processors = tuple(
|
||||
import_string(path) for path in settings.STATFILES_I18_PROCESSORS
|
||||
)
|
||||
for processor in processors:
|
||||
ctx.update(processor(request))
|
||||
|
||||
for file in os.listdir(SOURCE_DIR):
|
||||
path = os.path.join(SOURCE_DIR, file)
|
||||
if os.path.exists(path) and os.path.isfile(path):
|
||||
render_file(file, SOURCE_DIR, TARGET_DIR, locales, ctx)
|
||||
else:
|
||||
raise NotImplementedError(
|
||||
'Using multi-level directories is not implemented at this point'
|
||||
) # TODO multilevel dir if needed
|
||||
print(f'Rendered all files in {SOURCE_DIR}')
|
||||
@@ -0,0 +1,75 @@
|
||||
"""Custom management command to rebuild all MPTT models.
|
||||
|
||||
- This is crucial after importing any fixtures, etc
|
||||
"""
|
||||
|
||||
import logging
|
||||
|
||||
from django.core.management.base import BaseCommand
|
||||
|
||||
from maintenance_mode.core import maintenance_mode_on, set_maintenance_mode
|
||||
|
||||
logger = logging.getLogger('inventree')
|
||||
|
||||
|
||||
class Command(BaseCommand):
|
||||
"""Rebuild all database models which leverage the MPTT structure."""
|
||||
|
||||
def handle(self, *args, **kwargs):
|
||||
"""Rebuild all database models which leverage the MPTT structure."""
|
||||
with maintenance_mode_on():
|
||||
self.rebuild_models()
|
||||
|
||||
set_maintenance_mode(False)
|
||||
|
||||
def rebuild_models(self):
|
||||
"""Rebuild all MPTT models in the database."""
|
||||
# Part model
|
||||
try:
|
||||
logger.info('Rebuilding Part objects')
|
||||
|
||||
from part.models import Part
|
||||
|
||||
Part.objects.rebuild()
|
||||
except Exception:
|
||||
logger.info('Error rebuilding Part objects')
|
||||
|
||||
# Part category
|
||||
try:
|
||||
logger.info('Rebuilding PartCategory objects')
|
||||
|
||||
from part.models import PartCategory
|
||||
|
||||
PartCategory.objects.rebuild()
|
||||
except Exception:
|
||||
logger.info('Error rebuilding PartCategory objects')
|
||||
|
||||
# StockItem model
|
||||
try:
|
||||
logger.info('Rebuilding StockItem objects')
|
||||
|
||||
from stock.models import StockItem
|
||||
|
||||
StockItem.objects.rebuild()
|
||||
except Exception:
|
||||
logger.info('Error rebuilding StockItem objects')
|
||||
|
||||
# StockLocation model
|
||||
try:
|
||||
logger.info('Rebuilding StockLocation objects')
|
||||
|
||||
from stock.models import StockLocation
|
||||
|
||||
StockLocation.objects.rebuild()
|
||||
except Exception:
|
||||
logger.info('Error rebuilding StockLocation objects')
|
||||
|
||||
# Build model
|
||||
try:
|
||||
logger.info('Rebuilding Build objects')
|
||||
|
||||
from build.models import Build
|
||||
|
||||
Build.objects.rebuild()
|
||||
except Exception:
|
||||
logger.info('Error rebuilding Build objects')
|
||||
@@ -0,0 +1,69 @@
|
||||
"""Custom management command to rebuild thumbnail images.
|
||||
|
||||
- May be required after importing a new dataset, for example
|
||||
"""
|
||||
|
||||
import logging
|
||||
import os
|
||||
|
||||
from django.core.management.base import BaseCommand
|
||||
from django.db.utils import OperationalError, ProgrammingError
|
||||
|
||||
from PIL import UnidentifiedImageError
|
||||
|
||||
from company.models import Company
|
||||
from part.models import Part
|
||||
|
||||
logger = logging.getLogger('inventree')
|
||||
|
||||
|
||||
class Command(BaseCommand):
|
||||
"""Rebuild all thumbnail images."""
|
||||
|
||||
def rebuild_thumbnail(self, model):
|
||||
"""Rebuild the thumbnail specified by the "image" field of the provided model."""
|
||||
if not model.image:
|
||||
return
|
||||
|
||||
img = model.image
|
||||
|
||||
# Check for image paths
|
||||
img_paths = []
|
||||
|
||||
for x in [model.image, model.image.thumbnail, model.image.preview]:
|
||||
if x and x.path:
|
||||
img_paths.append(x.path)
|
||||
|
||||
if len(img_paths) > 0:
|
||||
if all((os.path.exists(path) for path in img_paths)):
|
||||
# All images exist - skip further work
|
||||
return
|
||||
|
||||
logger.info("Generating thumbnail image for '%s'", img)
|
||||
|
||||
try:
|
||||
model.image.render_variations(replace=False)
|
||||
except FileNotFoundError:
|
||||
logger.warning("Warning: Image file '%s' is missing", img)
|
||||
except UnidentifiedImageError:
|
||||
logger.warning("Warning: Image file '%s' is not a valid image", img)
|
||||
|
||||
def handle(self, *args, **kwargs):
|
||||
"""Rebuild all thumbnail images."""
|
||||
logger.info('Rebuilding Part thumbnails')
|
||||
|
||||
for part in Part.objects.exclude(image=None):
|
||||
try:
|
||||
self.rebuild_thumbnail(part)
|
||||
except (OperationalError, ProgrammingError):
|
||||
logger.exception('ERROR: Database read error.')
|
||||
break
|
||||
|
||||
logger.info('Rebuilding Company thumbnails')
|
||||
|
||||
for company in Company.objects.exclude(image=None):
|
||||
try:
|
||||
self.rebuild_thumbnail(company)
|
||||
except (OperationalError, ProgrammingError):
|
||||
logger.exception('ERROR: abase read error.')
|
||||
break
|
||||
@@ -0,0 +1,38 @@
|
||||
"""Custom management command to remove MFA for a user."""
|
||||
|
||||
from django.contrib.auth import get_user_model
|
||||
from django.core.management.base import BaseCommand
|
||||
|
||||
|
||||
class Command(BaseCommand):
|
||||
"""Remove MFA for a user."""
|
||||
|
||||
def add_arguments(self, parser):
|
||||
"""Add the arguments."""
|
||||
parser.add_argument('mail', type=str)
|
||||
|
||||
def handle(self, *args, **kwargs):
|
||||
"""Remove MFA for the supplied user (by mail)."""
|
||||
# general settings
|
||||
mail = kwargs.get('mail')
|
||||
if not mail:
|
||||
raise KeyError('A mail is required')
|
||||
user = get_user_model()
|
||||
mfa_user = [
|
||||
*set(
|
||||
user.objects.filter(email=mail)
|
||||
| user.objects.filter(emailaddress__email=mail)
|
||||
)
|
||||
]
|
||||
|
||||
if len(mfa_user) == 0:
|
||||
print('No user with this mail associated')
|
||||
elif len(mfa_user) > 1:
|
||||
print('More than one user found with this mail')
|
||||
else:
|
||||
# and clean out all MFA methods
|
||||
# backup codes
|
||||
mfa_user[0].staticdevice_set.all().delete()
|
||||
# TOTP tokens
|
||||
mfa_user[0].totpdevice_set.all().delete()
|
||||
print(f'Removed all MFA methods for user {str(mfa_user[0])}')
|
||||
@@ -0,0 +1,19 @@
|
||||
"""Check if there are any pending database migrations, and run them."""
|
||||
|
||||
import logging
|
||||
|
||||
from django.core.management.base import BaseCommand
|
||||
|
||||
from InvenTree.tasks import check_for_migrations
|
||||
|
||||
logger = logging.getLogger('inventree')
|
||||
|
||||
|
||||
class Command(BaseCommand):
|
||||
"""Check if there are any pending database migrations, and run them."""
|
||||
|
||||
def handle(self, *args, **kwargs):
|
||||
"""Check for any pending database migrations."""
|
||||
logger.info('Checking for pending database migrations')
|
||||
check_for_migrations(force=True, reload_registry=False)
|
||||
logger.info('Database migrations complete')
|
||||
@@ -0,0 +1,36 @@
|
||||
"""Custom management command, wait for the database to be ready!"""
|
||||
|
||||
import time
|
||||
|
||||
from django.core.management.base import BaseCommand
|
||||
from django.db import connection
|
||||
from django.db.utils import ImproperlyConfigured, OperationalError
|
||||
|
||||
|
||||
class Command(BaseCommand):
|
||||
"""Django command to pause execution until the database is ready."""
|
||||
|
||||
def handle(self, *args, **kwargs):
|
||||
"""Wait till the database is ready."""
|
||||
self.stdout.write('Waiting for database...')
|
||||
|
||||
connected = False
|
||||
|
||||
while not connected:
|
||||
time.sleep(2)
|
||||
|
||||
try:
|
||||
connection.ensure_connection()
|
||||
|
||||
connected = True
|
||||
|
||||
except OperationalError as e:
|
||||
self.stdout.write(f'Could not connect to database: {e}')
|
||||
except ImproperlyConfigured as e:
|
||||
self.stdout.write(f'Improperly configured: {e}')
|
||||
else:
|
||||
if not connection.is_usable():
|
||||
self.stdout.write('Database configuration is not usable')
|
||||
|
||||
if connected:
|
||||
self.stdout.write('Database connection successful!')
|
||||
@@ -0,0 +1,293 @@
|
||||
"""Custom metadata for DRF."""
|
||||
|
||||
import logging
|
||||
|
||||
from rest_framework import serializers
|
||||
from rest_framework.fields import empty
|
||||
from rest_framework.metadata import SimpleMetadata
|
||||
from rest_framework.utils import model_meta
|
||||
|
||||
import common.models
|
||||
import InvenTree.permissions
|
||||
import users.models
|
||||
from InvenTree.helpers import str2bool
|
||||
from InvenTree.serializers import DependentField
|
||||
|
||||
logger = logging.getLogger('inventree')
|
||||
|
||||
|
||||
class InvenTreeMetadata(SimpleMetadata):
|
||||
"""Custom metadata class for the DRF API.
|
||||
|
||||
This custom metadata class imits the available "actions",
|
||||
based on the user's role permissions.
|
||||
|
||||
Thus when a client send an OPTIONS request to an API endpoint,
|
||||
it will only receive a list of actions which it is allowed to perform!
|
||||
|
||||
Additionally, we include some extra information about database models,
|
||||
so we can perform lookup for ForeignKey related fields.
|
||||
"""
|
||||
|
||||
def determine_metadata(self, request, view):
|
||||
"""Overwrite the metadata to adapt to the request user."""
|
||||
self.request = request
|
||||
self.view = view
|
||||
|
||||
metadata = super().determine_metadata(request, view)
|
||||
|
||||
"""
|
||||
Custom context information to pass through to the OPTIONS endpoint,
|
||||
if the "context=True" is supplied to the OPTIONS request
|
||||
|
||||
Serializer class can supply context data by defining a get_context_data() method (no arguments)
|
||||
"""
|
||||
|
||||
context = {}
|
||||
|
||||
if str2bool(request.query_params.get('context', False)):
|
||||
if hasattr(self, 'serializer') and hasattr(
|
||||
self.serializer, 'get_context_data'
|
||||
):
|
||||
context = self.serializer.get_context_data()
|
||||
|
||||
metadata['context'] = context
|
||||
|
||||
user = request.user
|
||||
|
||||
if user is None:
|
||||
# No actions for you!
|
||||
metadata['actions'] = {}
|
||||
return metadata
|
||||
|
||||
try:
|
||||
# Extract the model name associated with the view
|
||||
self.model = InvenTree.permissions.get_model_for_view(view)
|
||||
|
||||
# Construct the 'table name' from the model
|
||||
app_label = self.model._meta.app_label
|
||||
tbl_label = self.model._meta.model_name
|
||||
|
||||
metadata['model'] = tbl_label
|
||||
|
||||
table = f'{app_label}_{tbl_label}'
|
||||
|
||||
actions = metadata.get('actions', None)
|
||||
|
||||
if actions is None:
|
||||
actions = {}
|
||||
|
||||
check = users.models.RuleSet.check_table_permission
|
||||
|
||||
# Map the request method to a permission type
|
||||
rolemap = {
|
||||
'POST': 'add',
|
||||
'PUT': 'change',
|
||||
'PATCH': 'change',
|
||||
'DELETE': 'delete',
|
||||
}
|
||||
|
||||
# let the view define a custom rolemap
|
||||
if hasattr(view, 'rolemap'):
|
||||
rolemap.update(view.rolemap)
|
||||
|
||||
# Remove any HTTP methods that the user does not have permission for
|
||||
for method, permission in rolemap.items():
|
||||
result = check(user, table, permission)
|
||||
|
||||
if method in actions and not result:
|
||||
del actions[method]
|
||||
|
||||
# Add a 'DELETE' action if we are allowed to delete
|
||||
if 'DELETE' in view.allowed_methods and check(user, table, 'delete'):
|
||||
actions['DELETE'] = {}
|
||||
|
||||
# Add a 'VIEW' action if we are allowed to view
|
||||
if 'GET' in view.allowed_methods and check(user, table, 'view'):
|
||||
actions['GET'] = {}
|
||||
|
||||
metadata['actions'] = actions
|
||||
|
||||
except AttributeError:
|
||||
# We will assume that if the serializer class does *not* have a Meta
|
||||
# then we don't need a permission
|
||||
pass
|
||||
|
||||
return metadata
|
||||
|
||||
def get_serializer_info(self, serializer):
|
||||
"""Override get_serializer_info so that we can add 'default' values to any fields whose Meta.model specifies a default value."""
|
||||
self.serializer = serializer
|
||||
|
||||
serializer_info = super().get_serializer_info(serializer)
|
||||
|
||||
model_class = None
|
||||
|
||||
# Attributes to copy extra attributes from the model to the field (if they don't exist)
|
||||
extra_attributes = ['help_text', 'max_length']
|
||||
|
||||
try:
|
||||
model_class = serializer.Meta.model
|
||||
|
||||
model_fields = model_meta.get_field_info(model_class)
|
||||
|
||||
model_default_func = getattr(model_class, 'api_defaults', None)
|
||||
|
||||
if model_default_func:
|
||||
model_default_values = model_class.api_defaults(self.request)
|
||||
else:
|
||||
model_default_values = {}
|
||||
|
||||
# Iterate through simple fields
|
||||
for name, field in model_fields.fields.items():
|
||||
if name in serializer_info.keys():
|
||||
if field.has_default():
|
||||
default = field.default
|
||||
|
||||
if callable(default):
|
||||
try:
|
||||
default = default()
|
||||
except Exception:
|
||||
continue
|
||||
|
||||
serializer_info[name]['default'] = default
|
||||
|
||||
elif name in model_default_values:
|
||||
serializer_info[name]['default'] = model_default_values[name]
|
||||
|
||||
for attr in extra_attributes:
|
||||
if attr not in serializer_info[name]:
|
||||
if hasattr(field, attr):
|
||||
serializer_info[name][attr] = getattr(field, attr)
|
||||
|
||||
# Iterate through relations
|
||||
for name, relation in model_fields.relations.items():
|
||||
if name not in serializer_info.keys():
|
||||
# Skip relation not defined in serializer
|
||||
continue
|
||||
|
||||
if relation.reverse:
|
||||
# Ignore reverse relations
|
||||
continue
|
||||
|
||||
# Extract and provide the "limit_choices_to" filters
|
||||
# This is used to automatically filter AJAX requests
|
||||
serializer_info[name]['filters'] = (
|
||||
relation.model_field.get_limit_choices_to()
|
||||
)
|
||||
|
||||
for attr in extra_attributes:
|
||||
if attr not in serializer_info[name] and hasattr(
|
||||
relation.model_field, attr
|
||||
):
|
||||
serializer_info[name][attr] = getattr(
|
||||
relation.model_field, attr
|
||||
)
|
||||
|
||||
if name in model_default_values:
|
||||
serializer_info[name]['default'] = model_default_values[name]
|
||||
|
||||
except AttributeError:
|
||||
pass
|
||||
|
||||
# Try to extract 'instance' information
|
||||
instance = None
|
||||
|
||||
# Extract extra information if an instance is available
|
||||
if hasattr(serializer, 'instance'):
|
||||
instance = serializer.instance
|
||||
|
||||
if instance is None and model_class is not None:
|
||||
# Attempt to find the instance based on kwargs lookup
|
||||
kwargs = getattr(self.view, 'kwargs', None)
|
||||
|
||||
if kwargs:
|
||||
pk = None
|
||||
|
||||
for field in ['pk', 'id', 'PK', 'ID']:
|
||||
if field in kwargs:
|
||||
pk = kwargs[field]
|
||||
break
|
||||
|
||||
if issubclass(model_class, common.models.BaseInvenTreeSetting):
|
||||
instance = model_class.get_setting_object(**kwargs, create=False)
|
||||
|
||||
elif pk is not None:
|
||||
try:
|
||||
instance = model_class.objects.get(pk=pk)
|
||||
except (ValueError, model_class.DoesNotExist):
|
||||
pass
|
||||
|
||||
if instance is not None:
|
||||
"""If there is an instance associated with this API View, introspect that instance to find any specific API info."""
|
||||
|
||||
if hasattr(instance, 'api_instance_filters'):
|
||||
instance_filters = instance.api_instance_filters()
|
||||
|
||||
for field_name, field_filters in instance_filters.items():
|
||||
if field_name not in serializer_info.keys():
|
||||
# The field might be missing, but is added later on
|
||||
# This function seems to get called multiple times?
|
||||
continue
|
||||
|
||||
if 'instance_filters' not in serializer_info[field_name].keys():
|
||||
serializer_info[field_name]['instance_filters'] = {}
|
||||
|
||||
for key, value in field_filters.items():
|
||||
serializer_info[field_name]['instance_filters'][key] = value
|
||||
|
||||
return serializer_info
|
||||
|
||||
def get_field_info(self, field):
|
||||
"""Given an instance of a serializer field, return a dictionary of metadata about it.
|
||||
|
||||
We take the regular DRF metadata and add our own unique flavor
|
||||
"""
|
||||
# Try to add the child property to the dependent field to be used by the super call
|
||||
if self.label_lookup[field] == 'dependent field':
|
||||
field.get_child(raise_exception=True)
|
||||
|
||||
# Run super method first
|
||||
field_info = super().get_field_info(field)
|
||||
|
||||
# If a default value is specified for the serializer field, add it!
|
||||
if 'default' not in field_info and field.default != empty:
|
||||
field_info['default'] = field.get_default()
|
||||
|
||||
# Force non-nullable fields to read as "required"
|
||||
# (even if there is a default value!)
|
||||
if not field.allow_null and not (
|
||||
hasattr(field, 'allow_blank') and field.allow_blank
|
||||
):
|
||||
field_info['required'] = True
|
||||
|
||||
# Introspect writable related fields
|
||||
if field_info['type'] == 'field' and not field_info['read_only']:
|
||||
# If the field is a PrimaryKeyRelatedField, we can extract the model from the queryset
|
||||
if isinstance(field, serializers.PrimaryKeyRelatedField):
|
||||
model = field.queryset.model
|
||||
else:
|
||||
logger.debug(
|
||||
'Could not extract model for:', field_info.get('label'), '->', field
|
||||
)
|
||||
model = None
|
||||
|
||||
if model:
|
||||
# Mark this field as "related", and point to the URL where we can get the data!
|
||||
field_info['type'] = 'related field'
|
||||
field_info['model'] = model._meta.model_name
|
||||
|
||||
# Special case for 'user' model
|
||||
if field_info['model'] == 'user':
|
||||
field_info['api_url'] = '/api/user/'
|
||||
else:
|
||||
field_info['api_url'] = model.get_api_url()
|
||||
|
||||
# Add more metadata about dependent fields
|
||||
if field_info['type'] == 'dependent field':
|
||||
field_info['depends_on'] = field.depends_on
|
||||
|
||||
return field_info
|
||||
|
||||
|
||||
InvenTreeMetadata.label_lookup[DependentField] = 'dependent field'
|
||||
@@ -0,0 +1,228 @@
|
||||
"""Middleware for InvenTree."""
|
||||
|
||||
import logging
|
||||
import sys
|
||||
|
||||
from django.conf import settings
|
||||
from django.contrib.auth.middleware import PersistentRemoteUserMiddleware
|
||||
from django.http import HttpResponse
|
||||
from django.shortcuts import redirect
|
||||
from django.urls import Resolver404, include, path, resolve, reverse_lazy
|
||||
|
||||
from allauth_2fa.middleware import AllauthTwoFactorMiddleware, BaseRequire2FAMiddleware
|
||||
from error_report.middleware import ExceptionProcessor
|
||||
|
||||
from InvenTree.urls import frontendpatterns
|
||||
from users.models import ApiToken
|
||||
|
||||
logger = logging.getLogger('inventree')
|
||||
|
||||
|
||||
def get_token_from_request(request):
|
||||
"""Extract token information from a request object."""
|
||||
auth_keys = ['Authorization', 'authorization']
|
||||
token_keys = ['token', 'bearer']
|
||||
|
||||
for k in auth_keys:
|
||||
if auth_header := request.headers.get(k, None):
|
||||
auth_header = auth_header.strip().lower().split()
|
||||
|
||||
if len(auth_header) > 1:
|
||||
if auth_header[0].strip().lower().replace(':', '') in token_keys:
|
||||
token = auth_header[1]
|
||||
return token
|
||||
|
||||
return None
|
||||
|
||||
|
||||
class AuthRequiredMiddleware(object):
|
||||
"""Check for user to be authenticated."""
|
||||
|
||||
def __init__(self, get_response):
|
||||
"""Save response object."""
|
||||
self.get_response = get_response
|
||||
|
||||
def check_token(self, request) -> bool:
|
||||
"""Check if the user is authenticated via token."""
|
||||
if token := get_token_from_request(request):
|
||||
# Does the provided token match a valid user?
|
||||
try:
|
||||
token = ApiToken.objects.get(key=token)
|
||||
|
||||
if token.active and token.user:
|
||||
# Provide the user information to the request
|
||||
request.user = token.user
|
||||
return True
|
||||
except ApiToken.DoesNotExist:
|
||||
logger.warning('Access denied for unknown token %s', token)
|
||||
|
||||
return False
|
||||
|
||||
def __call__(self, request):
|
||||
"""Check if user needs to be authenticated and is.
|
||||
|
||||
Redirects to login if not authenticated.
|
||||
"""
|
||||
# Code to be executed for each request before
|
||||
# the view (and later middleware) are called.
|
||||
|
||||
assert hasattr(request, 'user')
|
||||
|
||||
# API requests are handled by the DRF library
|
||||
if request.path_info.startswith('/api/'):
|
||||
return self.get_response(request)
|
||||
|
||||
# Is the function exempt from auth requirements?
|
||||
path_func = resolve(request.path).func
|
||||
|
||||
if getattr(path_func, 'auth_exempt', False) is True:
|
||||
return self.get_response(request)
|
||||
|
||||
if not request.user.is_authenticated:
|
||||
"""
|
||||
Normally, a web-based session would use csrftoken based authentication.
|
||||
|
||||
However when running an external application (e.g. the InvenTree app or Python library),
|
||||
we must validate the user token manually.
|
||||
"""
|
||||
|
||||
authorized = False
|
||||
|
||||
# Allow static files to be accessed without auth
|
||||
# Important for e.g. login page
|
||||
if request.path_info.startswith('/static/'):
|
||||
authorized = True
|
||||
|
||||
# Unauthorized users can access the login page
|
||||
elif request.path_info.startswith('/accounts/'):
|
||||
authorized = True
|
||||
|
||||
elif (
|
||||
request.path_info.startswith(f'/{settings.FRONTEND_URL_BASE}/')
|
||||
or request.path_info.startswith('/assets/')
|
||||
or request.path_info == f'/{settings.FRONTEND_URL_BASE}'
|
||||
):
|
||||
authorized = True
|
||||
|
||||
elif self.check_token(request):
|
||||
authorized = True
|
||||
|
||||
# No authorization was found for the request
|
||||
if not authorized:
|
||||
path = request.path_info
|
||||
|
||||
# List of URL endpoints we *do not* want to redirect to
|
||||
urls = [
|
||||
reverse_lazy('account_login'),
|
||||
reverse_lazy('account_logout'),
|
||||
reverse_lazy('admin:login'),
|
||||
reverse_lazy('admin:logout'),
|
||||
]
|
||||
|
||||
# Do not redirect requests to any of these paths
|
||||
paths_ignore = [
|
||||
'/api/',
|
||||
'/auth/',
|
||||
'/js/',
|
||||
settings.MEDIA_URL,
|
||||
settings.STATIC_URL,
|
||||
]
|
||||
|
||||
if path not in urls and not any(
|
||||
path.startswith(p) for p in paths_ignore
|
||||
):
|
||||
# Save the 'next' parameter to pass through to the login view
|
||||
|
||||
return redirect(
|
||||
f'{reverse_lazy("account_login")}?next={request.path}'
|
||||
)
|
||||
# Return a 401 (Unauthorized) response code for this request
|
||||
return HttpResponse('Unauthorized', status=401)
|
||||
|
||||
response = self.get_response(request)
|
||||
|
||||
return response
|
||||
|
||||
|
||||
url_matcher = path('', include(frontendpatterns))
|
||||
|
||||
|
||||
class Check2FAMiddleware(BaseRequire2FAMiddleware):
|
||||
"""Check if user is required to have MFA enabled."""
|
||||
|
||||
def require_2fa(self, request):
|
||||
"""Use setting to check if MFA should be enforced for frontend page."""
|
||||
from common.models import InvenTreeSetting
|
||||
|
||||
try:
|
||||
if url_matcher.resolve(request.path[1:]):
|
||||
return InvenTreeSetting.get_setting('LOGIN_ENFORCE_MFA')
|
||||
except Resolver404:
|
||||
pass
|
||||
return False
|
||||
|
||||
|
||||
class CustomAllauthTwoFactorMiddleware(AllauthTwoFactorMiddleware):
|
||||
"""This function ensures only frontend code triggers the MFA auth cycle."""
|
||||
|
||||
def process_request(self, request):
|
||||
"""Check if requested url is forntend and enforce MFA check."""
|
||||
try:
|
||||
if not url_matcher.resolve(request.path[1:]):
|
||||
super().process_request(request)
|
||||
except Resolver404:
|
||||
pass
|
||||
|
||||
|
||||
class InvenTreeRemoteUserMiddleware(PersistentRemoteUserMiddleware):
|
||||
"""Middleware to check if HTTP-header based auth is enabled and to set it up."""
|
||||
|
||||
header = settings.REMOTE_LOGIN_HEADER
|
||||
|
||||
def process_request(self, request):
|
||||
"""Check if proxy login is enabled."""
|
||||
if not settings.REMOTE_LOGIN:
|
||||
return
|
||||
|
||||
return super().process_request(request)
|
||||
|
||||
|
||||
class InvenTreeExceptionProcessor(ExceptionProcessor):
|
||||
"""Custom exception processor that respects blocked errors."""
|
||||
|
||||
def process_exception(self, request, exception):
|
||||
"""Check if kind is ignored before processing."""
|
||||
kind, info, data = sys.exc_info()
|
||||
|
||||
# Check if the error is on the ignore list
|
||||
if kind in settings.IGNORED_ERRORS:
|
||||
return
|
||||
|
||||
import traceback
|
||||
|
||||
from django.views.debug import ExceptionReporter
|
||||
|
||||
from error_report.models import Error
|
||||
from error_report.settings import ERROR_DETAIL_SETTINGS
|
||||
|
||||
# Error reporting is disabled
|
||||
if not ERROR_DETAIL_SETTINGS.get('ERROR_DETAIL_ENABLE', True):
|
||||
return
|
||||
|
||||
path = request.build_absolute_uri()
|
||||
|
||||
# Truncate the path to a reasonable length
|
||||
# Otherwise we get a database error,
|
||||
# because the path field is limited to 200 characters
|
||||
if len(path) > 200:
|
||||
path = path[:195] + '...'
|
||||
|
||||
error = Error.objects.create(
|
||||
kind=kind.__name__,
|
||||
html=ExceptionReporter(request, kind, info, data).get_traceback_html(),
|
||||
path=path,
|
||||
info=info,
|
||||
data='\n'.join(traceback.format_exception(kind, info, data)),
|
||||
)
|
||||
|
||||
error.save()
|
||||
@@ -0,0 +1,12 @@
|
||||
# Generated by Django 3.2.15 on 2022-10-03 18:57
|
||||
|
||||
from django.db import migrations
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
]
|
||||
|
||||
operations = [
|
||||
]
|
||||
@@ -0,0 +1,188 @@
|
||||
"""Mixins for (API) views in the whole project."""
|
||||
|
||||
from django.core.exceptions import FieldDoesNotExist
|
||||
|
||||
from rest_framework import generics, mixins, status
|
||||
from rest_framework.response import Response
|
||||
|
||||
from InvenTree.fields import InvenTreeNotesField
|
||||
from InvenTree.helpers import remove_non_printable_characters, strip_html_tags
|
||||
|
||||
|
||||
class CleanMixin:
|
||||
"""Model mixin class which cleans inputs using the Mozilla bleach tools."""
|
||||
|
||||
# Define a list of field names which will *not* be cleaned
|
||||
SAFE_FIELDS = []
|
||||
|
||||
def create(self, request, *args, **kwargs):
|
||||
"""Override to clean data before processing it."""
|
||||
serializer = self.get_serializer(data=self.clean_data(request.data))
|
||||
serializer.is_valid(raise_exception=True)
|
||||
self.perform_create(serializer)
|
||||
headers = self.get_success_headers(serializer.data)
|
||||
return Response(
|
||||
serializer.data, status=status.HTTP_201_CREATED, headers=headers
|
||||
)
|
||||
|
||||
def update(self, request, *args, **kwargs):
|
||||
"""Override to clean data before processing it."""
|
||||
partial = kwargs.pop('partial', False)
|
||||
instance = self.get_object()
|
||||
serializer = self.get_serializer(
|
||||
instance, data=self.clean_data(request.data), partial=partial
|
||||
)
|
||||
serializer.is_valid(raise_exception=True)
|
||||
self.perform_update(serializer)
|
||||
|
||||
if getattr(instance, '_prefetched_objects_cache', None):
|
||||
# If 'prefetch_related' has been applied to a queryset, we need to
|
||||
# forcibly invalidate the prefetch cache on the instance.
|
||||
instance._prefetched_objects_cache = {}
|
||||
|
||||
return Response(serializer.data)
|
||||
|
||||
def clean_string(self, field: str, data: str) -> str:
|
||||
"""Clean / sanitize a single input string.
|
||||
|
||||
Note that this function will *allow* orphaned <>& characters,
|
||||
which would normally be escaped by bleach.
|
||||
|
||||
Nominally, the only thing that will be "cleaned" will be HTML tags
|
||||
|
||||
Ref: https://github.com/mozilla/bleach/issues/192
|
||||
|
||||
"""
|
||||
cleaned = strip_html_tags(data, field_name=field)
|
||||
|
||||
# By default, newline characters are removed
|
||||
remove_newline = True
|
||||
|
||||
try:
|
||||
if hasattr(self, 'serializer_class'):
|
||||
model = self.serializer_class.Meta.model
|
||||
field = model._meta.get_field(field)
|
||||
|
||||
# The following field types allow newline characters
|
||||
allow_newline = [InvenTreeNotesField]
|
||||
|
||||
for field_type in allow_newline:
|
||||
if issubclass(type(field), field_type):
|
||||
remove_newline = False
|
||||
break
|
||||
|
||||
except AttributeError:
|
||||
pass
|
||||
except FieldDoesNotExist:
|
||||
pass
|
||||
|
||||
cleaned = remove_non_printable_characters(
|
||||
cleaned, remove_newline=remove_newline
|
||||
)
|
||||
|
||||
return cleaned
|
||||
|
||||
def clean_data(self, data: dict) -> dict:
|
||||
"""Clean / sanitize data.
|
||||
|
||||
This uses mozillas bleach under the hood to disable certain html tags by
|
||||
encoding them - this leads to script tags etc. to not work.
|
||||
The results can be longer then the input; might make some character combinations
|
||||
`ugly`. Prevents XSS on the server-level.
|
||||
|
||||
Args:
|
||||
data (dict): Data that should be Sanitized.
|
||||
|
||||
Returns:
|
||||
dict: Provided data Sanitized; still in the same order.
|
||||
"""
|
||||
clean_data = {}
|
||||
|
||||
for k, v in data.items():
|
||||
if k in self.SAFE_FIELDS:
|
||||
ret = v
|
||||
elif isinstance(v, str):
|
||||
ret = self.clean_string(k, v)
|
||||
elif isinstance(v, dict):
|
||||
ret = self.clean_data(v)
|
||||
else:
|
||||
ret = v
|
||||
|
||||
clean_data[k] = ret
|
||||
|
||||
return clean_data
|
||||
|
||||
|
||||
class ListAPI(generics.ListAPIView):
|
||||
"""View for list API."""
|
||||
|
||||
|
||||
class ListCreateAPI(CleanMixin, generics.ListCreateAPIView):
|
||||
"""View for list and create API."""
|
||||
|
||||
|
||||
class CreateAPI(CleanMixin, generics.CreateAPIView):
|
||||
"""View for create API."""
|
||||
|
||||
|
||||
class RetrieveAPI(generics.RetrieveAPIView):
|
||||
"""View for retrieve API."""
|
||||
|
||||
pass
|
||||
|
||||
|
||||
class RetrieveUpdateAPI(CleanMixin, generics.RetrieveUpdateAPIView):
|
||||
"""View for retrieve and update API."""
|
||||
|
||||
pass
|
||||
|
||||
|
||||
class CustomDestroyModelMixin:
|
||||
"""This mixin was created pass the kwargs from the API to the models."""
|
||||
|
||||
def destroy(self, request, *args, **kwargs):
|
||||
"""Custom destroy method to pass kwargs."""
|
||||
instance = self.get_object()
|
||||
self.perform_destroy(instance, **kwargs)
|
||||
return Response(status=status.HTTP_204_NO_CONTENT)
|
||||
|
||||
def perform_destroy(self, instance, **kwargs):
|
||||
"""Custom destroy method to pass kwargs."""
|
||||
instance.delete(**kwargs)
|
||||
|
||||
|
||||
class CustomRetrieveUpdateDestroyAPIView(
|
||||
mixins.RetrieveModelMixin,
|
||||
mixins.UpdateModelMixin,
|
||||
CustomDestroyModelMixin,
|
||||
generics.GenericAPIView,
|
||||
):
|
||||
"""This APIView was created pass the kwargs from the API to the models."""
|
||||
|
||||
def get(self, request, *args, **kwargs):
|
||||
"""Custom get method to pass kwargs."""
|
||||
return self.retrieve(request, *args, **kwargs)
|
||||
|
||||
def put(self, request, *args, **kwargs):
|
||||
"""Custom put method to pass kwargs."""
|
||||
return self.update(request, *args, **kwargs)
|
||||
|
||||
def patch(self, request, *args, **kwargs):
|
||||
"""Custom patch method to pass kwargs."""
|
||||
return self.partial_update(request, *args, **kwargs)
|
||||
|
||||
def delete(self, request, *args, **kwargs):
|
||||
"""Custom delete method to pass kwargs."""
|
||||
return self.destroy(request, *args, **kwargs)
|
||||
|
||||
|
||||
class CustomRetrieveUpdateDestroyAPI(CleanMixin, CustomRetrieveUpdateDestroyAPIView):
|
||||
"""This APIView was created pass the kwargs from the API to the models."""
|
||||
|
||||
|
||||
class RetrieveUpdateDestroyAPI(CleanMixin, generics.RetrieveUpdateDestroyAPIView):
|
||||
"""View for retrieve, update and destroy API."""
|
||||
|
||||
|
||||
class UpdateAPI(CleanMixin, generics.UpdateAPIView):
|
||||
"""View for update API."""
|
||||
File diff suppressed because it is too large
Load Diff
@@ -0,0 +1,121 @@
|
||||
"""Permission set for InvenTree."""
|
||||
|
||||
from functools import wraps
|
||||
|
||||
from rest_framework import permissions
|
||||
|
||||
import users.models
|
||||
|
||||
|
||||
def get_model_for_view(view):
|
||||
"""Attempt to introspect the 'model' type for an API view."""
|
||||
if hasattr(view, 'get_permission_model'):
|
||||
return view.get_permission_model()
|
||||
|
||||
if hasattr(view, 'serializer_class'):
|
||||
return view.serializer_class.Meta.model
|
||||
|
||||
if hasattr(view, 'get_serializer_class'):
|
||||
return view.get_serializr_class().Meta.model
|
||||
|
||||
raise AttributeError(f'Serializer class not specified for {view.__class__}')
|
||||
|
||||
|
||||
class RolePermission(permissions.BasePermission):
|
||||
"""Role mixin for API endpoints, allowing us to specify the user "role" which is required for certain operations.
|
||||
|
||||
Each endpoint can have one or more of the following actions:
|
||||
- GET
|
||||
- POST
|
||||
- PUT
|
||||
- PATCH
|
||||
- DELETE
|
||||
|
||||
Specify the required "role" using the role_required attribute.
|
||||
|
||||
e.g.
|
||||
|
||||
role_required = "part"
|
||||
|
||||
The RoleMixin class will then determine if the user has the required permission
|
||||
to perform the specified action.
|
||||
|
||||
For example, a DELETE action will be rejected unless the user has the "part.remove" permission
|
||||
"""
|
||||
|
||||
def has_permission(self, request, view):
|
||||
"""Determine if the current user has the specified permissions."""
|
||||
user = request.user
|
||||
|
||||
# Superuser can do it all
|
||||
if user.is_superuser:
|
||||
return True
|
||||
|
||||
# Map the request method to a permission type
|
||||
rolemap = {
|
||||
'GET': 'view',
|
||||
'OPTIONS': 'view',
|
||||
'POST': 'add',
|
||||
'PUT': 'change',
|
||||
'PATCH': 'change',
|
||||
'DELETE': 'delete',
|
||||
}
|
||||
|
||||
# let the view define a custom rolemap
|
||||
if hasattr(view, 'rolemap'):
|
||||
rolemap.update(view.rolemap)
|
||||
|
||||
permission = rolemap[request.method]
|
||||
|
||||
# The required role may be defined for the view class
|
||||
if role := getattr(view, 'role_required', None):
|
||||
# If the role is specified as "role.permission", split it
|
||||
if '.' in role:
|
||||
role, permission = role.split('.')
|
||||
|
||||
return users.models.check_user_role(user, role, permission)
|
||||
|
||||
try:
|
||||
# Extract the model name associated with this request
|
||||
model = get_model_for_view(view)
|
||||
|
||||
app_label = model._meta.app_label
|
||||
model_name = model._meta.model_name
|
||||
|
||||
table = f'{app_label}_{model_name}'
|
||||
except AttributeError:
|
||||
# We will assume that if the serializer class does *not* have a Meta,
|
||||
# then we don't need a permission
|
||||
return True
|
||||
|
||||
return users.models.RuleSet.check_table_permission(user, table, permission)
|
||||
|
||||
|
||||
class IsSuperuser(permissions.IsAdminUser):
|
||||
"""Allows access only to superuser users."""
|
||||
|
||||
def has_permission(self, request, view):
|
||||
"""Check if the user is a superuser."""
|
||||
return bool(request.user and request.user.is_superuser)
|
||||
|
||||
|
||||
class IsStaffOrReadOnly(permissions.IsAdminUser):
|
||||
"""Allows read-only access to any user, but write access is restricted to staff users."""
|
||||
|
||||
def has_permission(self, request, view):
|
||||
"""Check if the user is a superuser."""
|
||||
return bool(
|
||||
request.user
|
||||
and request.user.is_staff
|
||||
or request.method in permissions.SAFE_METHODS
|
||||
)
|
||||
|
||||
|
||||
def auth_exempt(view_func):
|
||||
"""Mark a view function as being exempt from auth requirements."""
|
||||
|
||||
def wrapped_view(*args, **kwargs):
|
||||
return view_func(*args, **kwargs)
|
||||
|
||||
wrapped_view.auth_exempt = True
|
||||
return wraps(view_func)(wrapped_view)
|
||||
@@ -0,0 +1,149 @@
|
||||
"""Functions to check if certain parts of InvenTree are ready."""
|
||||
|
||||
import os
|
||||
import sys
|
||||
|
||||
|
||||
def isInTestMode():
|
||||
"""Returns True if the database is in testing mode."""
|
||||
return 'test' in sys.argv
|
||||
|
||||
|
||||
def isImportingData():
|
||||
"""Returns True if the database is currently importing (or exporting) data, e.g. 'loaddata' command is performed."""
|
||||
return any((x in sys.argv for x in ['flush', 'loaddata', 'dumpdata']))
|
||||
|
||||
|
||||
def isRunningMigrations():
|
||||
"""Return True if the database is currently running migrations."""
|
||||
return any(
|
||||
(
|
||||
x in sys.argv
|
||||
for x in ['migrate', 'makemigrations', 'showmigrations', 'runmigrations']
|
||||
)
|
||||
)
|
||||
|
||||
|
||||
def isRebuildingData():
|
||||
"""Return true if any of the rebuilding commands are being executed."""
|
||||
return any(
|
||||
(
|
||||
x in sys.argv
|
||||
for x in ['prerender', 'rebuild_models', 'rebuild_thumbnails', 'rebuild']
|
||||
)
|
||||
)
|
||||
|
||||
|
||||
def isRunningBackup():
|
||||
"""Return true if any of the backup commands are being executed."""
|
||||
return any(
|
||||
(
|
||||
x in sys.argv
|
||||
for x in [
|
||||
'backup',
|
||||
'restore',
|
||||
'dbbackup',
|
||||
'dbresotore',
|
||||
'mediabackup',
|
||||
'mediarestore',
|
||||
]
|
||||
)
|
||||
)
|
||||
|
||||
|
||||
def isInWorkerThread():
|
||||
"""Returns True if the current thread is a background worker thread."""
|
||||
return 'qcluster' in sys.argv
|
||||
|
||||
|
||||
def isInServerThread():
|
||||
"""Returns True if the current thread is a server thread."""
|
||||
if isInWorkerThread():
|
||||
return False
|
||||
|
||||
if 'runserver' in sys.argv:
|
||||
return True
|
||||
|
||||
if 'gunicorn' in sys.argv[0]:
|
||||
return True
|
||||
|
||||
return False
|
||||
|
||||
|
||||
def isInMainThread():
|
||||
"""Django runserver starts two processes, one for the actual dev server and the other to reload the application.
|
||||
|
||||
- The RUN_MAIN env is set in that case. However if --noreload is applied, this variable
|
||||
is not set because there are no different threads.
|
||||
"""
|
||||
if 'runserver' in sys.argv and '--noreload' not in sys.argv:
|
||||
return os.environ.get('RUN_MAIN', None) == 'true'
|
||||
|
||||
return not isInWorkerThread()
|
||||
|
||||
|
||||
def canAppAccessDatabase(
|
||||
allow_test: bool = False, allow_plugins: bool = False, allow_shell: bool = False
|
||||
):
|
||||
"""Returns True if the apps.py file can access database records.
|
||||
|
||||
There are some circumstances where we don't want the ready function in apps.py
|
||||
to touch the database
|
||||
"""
|
||||
# Prevent database access if we are running backups
|
||||
if isRunningBackup():
|
||||
return False
|
||||
|
||||
# Prevent database access if we are importing data
|
||||
if isImportingData():
|
||||
return False
|
||||
|
||||
# Prevent database access if we are rebuilding data
|
||||
if isRebuildingData():
|
||||
return False
|
||||
|
||||
# Prevent database access if we are running migrations
|
||||
if not allow_plugins and isRunningMigrations():
|
||||
return False
|
||||
|
||||
# If any of the following management commands are being executed,
|
||||
# prevent custom "on load" code from running!
|
||||
excluded_commands = [
|
||||
'check',
|
||||
'createsuperuser',
|
||||
'wait_for_db',
|
||||
'makemessages',
|
||||
'compilemessages',
|
||||
]
|
||||
|
||||
if not allow_shell:
|
||||
excluded_commands.append('shell')
|
||||
|
||||
if not allow_test:
|
||||
# Override for testing mode?
|
||||
excluded_commands.append('test')
|
||||
|
||||
if not allow_plugins:
|
||||
excluded_commands.extend(['collectstatic'])
|
||||
|
||||
for cmd in excluded_commands:
|
||||
if cmd in sys.argv:
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
|
||||
def isPluginRegistryLoaded():
|
||||
"""Ensures that the plugin registry is already loaded.
|
||||
|
||||
The plugin registry reloads all apps onetime after starting if there are AppMixin plugins,
|
||||
so that the discovered AppConfigs are added to Django. This triggers the ready function of
|
||||
AppConfig to execute twice. Add this check to prevent from running two times.
|
||||
|
||||
Note: All apps using this check need to be registered after the plugins app in settings.py
|
||||
|
||||
Returns: 'False' if the registry has not fully loaded the plugins yet.
|
||||
"""
|
||||
from plugin import registry
|
||||
|
||||
return registry.plugins_loaded
|
||||
@@ -0,0 +1,218 @@
|
||||
"""Functions to sanitize user input files."""
|
||||
|
||||
from bleach import clean
|
||||
from bleach.css_sanitizer import CSSSanitizer
|
||||
|
||||
ALLOWED_ELEMENTS_SVG = [
|
||||
'a',
|
||||
'animate',
|
||||
'animateColor',
|
||||
'animateMotion',
|
||||
'animateTransform',
|
||||
'circle',
|
||||
'defs',
|
||||
'desc',
|
||||
'ellipse',
|
||||
'font-face',
|
||||
'font-face-name',
|
||||
'font-face-src',
|
||||
'g',
|
||||
'glyph',
|
||||
'hkern',
|
||||
'linearGradient',
|
||||
'line',
|
||||
'marker',
|
||||
'metadata',
|
||||
'missing-glyph',
|
||||
'mpath',
|
||||
'path',
|
||||
'polygon',
|
||||
'polyline',
|
||||
'radialGradient',
|
||||
'rect',
|
||||
'set',
|
||||
'stop',
|
||||
'svg',
|
||||
'switch',
|
||||
'text',
|
||||
'title',
|
||||
'tspan',
|
||||
'use',
|
||||
]
|
||||
|
||||
ALLOWED_ATTRIBUTES_SVG = [
|
||||
'accent-height',
|
||||
'accumulate',
|
||||
'additive',
|
||||
'alphabetic',
|
||||
'arabic-form',
|
||||
'ascent',
|
||||
'attributeName',
|
||||
'attributeType',
|
||||
'baseProfile',
|
||||
'bbox',
|
||||
'begin',
|
||||
'by',
|
||||
'calcMode',
|
||||
'cap-height',
|
||||
'class',
|
||||
'color',
|
||||
'color-rendering',
|
||||
'content',
|
||||
'cx',
|
||||
'cy',
|
||||
'd',
|
||||
'dx',
|
||||
'dy',
|
||||
'descent',
|
||||
'display',
|
||||
'dur',
|
||||
'end',
|
||||
'fill',
|
||||
'fill-opacity',
|
||||
'fill-rule',
|
||||
'font-family',
|
||||
'font-size',
|
||||
'font-stretch',
|
||||
'font-style',
|
||||
'font-variant',
|
||||
'font-weight',
|
||||
'from',
|
||||
'fx',
|
||||
'fy',
|
||||
'g1',
|
||||
'g2',
|
||||
'glyph-name',
|
||||
'gradientUnits',
|
||||
'hanging',
|
||||
'height',
|
||||
'horiz-adv-x',
|
||||
'horiz-origin-x',
|
||||
'id',
|
||||
'ideographic',
|
||||
'k',
|
||||
'keyPoints',
|
||||
'keySplines',
|
||||
'keyTimes',
|
||||
'lang',
|
||||
'marker-end',
|
||||
'marker-mid',
|
||||
'marker-start',
|
||||
'markerHeight',
|
||||
'markerUnits',
|
||||
'markerWidth',
|
||||
'mathematical',
|
||||
'max',
|
||||
'min',
|
||||
'name',
|
||||
'offset',
|
||||
'opacity',
|
||||
'orient',
|
||||
'origin',
|
||||
'overline-position',
|
||||
'overline-thickness',
|
||||
'panose-1',
|
||||
'path',
|
||||
'pathLength',
|
||||
'points',
|
||||
'preserveAspectRatio',
|
||||
'r',
|
||||
'refX',
|
||||
'refY',
|
||||
'repeatCount',
|
||||
'repeatDur',
|
||||
'requiredExtensions',
|
||||
'requiredFeatures',
|
||||
'restart',
|
||||
'rotate',
|
||||
'rx',
|
||||
'ry',
|
||||
'slope',
|
||||
'stemh',
|
||||
'stemv',
|
||||
'stop-color',
|
||||
'stop-opacity',
|
||||
'strikethrough-position',
|
||||
'strikethrough-thickness',
|
||||
'stroke',
|
||||
'stroke-dasharray',
|
||||
'stroke-dashoffset',
|
||||
'stroke-linecap',
|
||||
'stroke-linejoin',
|
||||
'stroke-miterlimit',
|
||||
'stroke-opacity',
|
||||
'stroke-width',
|
||||
'systemLanguage',
|
||||
'target',
|
||||
'text-anchor',
|
||||
'to',
|
||||
'transform',
|
||||
'type',
|
||||
'u1',
|
||||
'u2',
|
||||
'underline-position',
|
||||
'underline-thickness',
|
||||
'unicode',
|
||||
'unicode-range',
|
||||
'units-per-em',
|
||||
'values',
|
||||
'version',
|
||||
'viewBox',
|
||||
'visibility',
|
||||
'width',
|
||||
'widths',
|
||||
'x',
|
||||
'x-height',
|
||||
'x1',
|
||||
'x2',
|
||||
'xlink:actuate',
|
||||
'xlink:arcrole',
|
||||
'xlink:href',
|
||||
'xlink:role',
|
||||
'xlink:show',
|
||||
'xlink:title',
|
||||
'xlink:type',
|
||||
'xml:base',
|
||||
'xml:lang',
|
||||
'xml:space',
|
||||
'xmlns',
|
||||
'xmlns:xlink',
|
||||
'y',
|
||||
'y1',
|
||||
'y2',
|
||||
'zoomAndPan',
|
||||
'style',
|
||||
]
|
||||
|
||||
|
||||
def sanitize_svg(
|
||||
file_data,
|
||||
strip: bool = True,
|
||||
elements: str = ALLOWED_ELEMENTS_SVG,
|
||||
attributes: str = ALLOWED_ATTRIBUTES_SVG,
|
||||
) -> str:
|
||||
"""Sanitize a SVG file.
|
||||
|
||||
Args:
|
||||
file_data (str): SVG as string.
|
||||
strip (bool, optional): Should invalid elements get removed. Defaults to True.
|
||||
elements (str, optional): Allowed elements. Defaults to ALLOWED_ELEMENTS_SVG.
|
||||
attributes (str, optional): Allowed attributes. Defaults to ALLOWED_ATTRIBUTES_SVG.
|
||||
|
||||
Returns:
|
||||
str: Sanitzied SVG file.
|
||||
"""
|
||||
# Handle byte-encoded data
|
||||
if isinstance(file_data, bytes):
|
||||
file_data = file_data.decode('utf-8')
|
||||
|
||||
cleaned = clean(
|
||||
file_data,
|
||||
tags=elements,
|
||||
attributes=attributes,
|
||||
strip=strip,
|
||||
strip_comments=strip,
|
||||
css_sanitizer=CSSSanitizer(),
|
||||
)
|
||||
|
||||
return cleaned
|
||||
@@ -0,0 +1,73 @@
|
||||
"""Configuration for Sentry.io error reporting."""
|
||||
|
||||
import logging
|
||||
|
||||
from django.conf import settings
|
||||
from django.core.exceptions import ValidationError
|
||||
from django.http import Http404
|
||||
|
||||
import rest_framework.exceptions
|
||||
import sentry_sdk
|
||||
from sentry_sdk.integrations.django import DjangoIntegration
|
||||
|
||||
import InvenTree.version
|
||||
|
||||
logger = logging.getLogger('inventree')
|
||||
|
||||
|
||||
def default_sentry_dsn():
|
||||
"""Return the default Sentry.io DSN for InvenTree."""
|
||||
return 'https://3928ccdba1d34895abde28031fd00100@o378676.ingest.sentry.io/6494600'
|
||||
|
||||
|
||||
def sentry_ignore_errors():
|
||||
"""Return a list of error types to ignore.
|
||||
|
||||
These error types will *not* be reported to sentry.io.
|
||||
"""
|
||||
return [
|
||||
Http404,
|
||||
ValidationError,
|
||||
rest_framework.exceptions.AuthenticationFailed,
|
||||
rest_framework.exceptions.NotAuthenticated,
|
||||
rest_framework.exceptions.PermissionDenied,
|
||||
rest_framework.exceptions.ValidationError,
|
||||
]
|
||||
|
||||
|
||||
def init_sentry(dsn, sample_rate, tags):
|
||||
"""Initialize sentry.io error reporting."""
|
||||
logger.info('Initializing sentry.io integration')
|
||||
|
||||
sentry_sdk.init(
|
||||
dsn=dsn,
|
||||
integrations=[DjangoIntegration()],
|
||||
traces_sample_rate=sample_rate,
|
||||
send_default_pii=True,
|
||||
ignore_errors=sentry_ignore_errors(),
|
||||
release=InvenTree.version.INVENTREE_SW_VERSION,
|
||||
environment='development'
|
||||
if InvenTree.version.isInvenTreeDevelopmentVersion()
|
||||
else 'production',
|
||||
)
|
||||
|
||||
for key, val in tags.items():
|
||||
sentry_sdk.set_tag(f'inventree_{key}', val)
|
||||
|
||||
sentry_sdk.set_tag('api', InvenTree.version.inventreeApiVersion())
|
||||
sentry_sdk.set_tag('platform', InvenTree.version.inventreePlatform())
|
||||
sentry_sdk.set_tag('git_branch', InvenTree.version.inventreeBranch())
|
||||
sentry_sdk.set_tag('git_commit', InvenTree.version.inventreeCommitHash())
|
||||
sentry_sdk.set_tag('git_date', InvenTree.version.inventreeCommitDate())
|
||||
|
||||
|
||||
def report_exception(exc):
|
||||
"""Report an exception to sentry.io."""
|
||||
if settings.SENTRY_ENABLED and settings.SENTRY_DSN:
|
||||
if not any(isinstance(exc, e) for e in sentry_ignore_errors()):
|
||||
logger.info('Reporting exception to sentry.io: %s', exc)
|
||||
|
||||
try:
|
||||
sentry_sdk.capture_exception(exc)
|
||||
except Exception:
|
||||
logger.warning('Failed to report exception to sentry.io')
|
||||
@@ -0,0 +1,887 @@
|
||||
"""Serializers used in various InvenTree apps."""
|
||||
|
||||
import os
|
||||
from collections import OrderedDict
|
||||
from copy import deepcopy
|
||||
from decimal import Decimal
|
||||
|
||||
from django.conf import settings
|
||||
from django.contrib.auth.models import User
|
||||
from django.core.exceptions import ValidationError as DjangoValidationError
|
||||
from django.db import models
|
||||
from django.utils.translation import gettext_lazy as _
|
||||
|
||||
import tablib
|
||||
from djmoney.contrib.django_rest_framework.fields import MoneyField
|
||||
from djmoney.money import Money
|
||||
from djmoney.utils import MONEY_CLASSES, get_currency_field_name
|
||||
from rest_framework import serializers
|
||||
from rest_framework.exceptions import PermissionDenied, ValidationError
|
||||
from rest_framework.fields import empty
|
||||
from rest_framework.serializers import DecimalField
|
||||
from rest_framework.utils import model_meta
|
||||
from taggit.serializers import TaggitSerializer
|
||||
|
||||
import common.models as common_models
|
||||
from common.settings import currency_code_default, currency_code_mappings
|
||||
from InvenTree.fields import InvenTreeRestURLField, InvenTreeURLField
|
||||
|
||||
|
||||
class EmptySerializer(serializers.Serializer):
|
||||
"""Empty serializer for use in testing."""
|
||||
|
||||
|
||||
class InvenTreeMoneySerializer(MoneyField):
|
||||
"""Custom serializer for 'MoneyField', which ensures that passed values are numerically valid.
|
||||
|
||||
Ref: https://github.com/django-money/django-money/blob/master/djmoney/contrib/django_rest_framework/fields.py
|
||||
"""
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
"""Override default values."""
|
||||
kwargs['max_digits'] = kwargs.get('max_digits', 19)
|
||||
self.decimal_places = kwargs['decimal_places'] = kwargs.get('decimal_places', 6)
|
||||
kwargs['required'] = kwargs.get('required', False)
|
||||
|
||||
super().__init__(*args, **kwargs)
|
||||
|
||||
def get_value(self, data):
|
||||
"""Test that the returned amount is a valid Decimal."""
|
||||
amount = super(DecimalField, self).get_value(data)
|
||||
|
||||
# Convert an empty string to None
|
||||
if len(str(amount).strip()) == 0:
|
||||
amount = None
|
||||
|
||||
try:
|
||||
if amount is not None and amount is not empty:
|
||||
# Convert to a Decimal instance, and round to maximum allowed decimal places
|
||||
amount = Decimal(amount)
|
||||
amount = round(amount, self.decimal_places)
|
||||
except Exception:
|
||||
raise ValidationError({self.field_name: [_('Must be a valid number')]})
|
||||
|
||||
currency = data.get(
|
||||
get_currency_field_name(self.field_name), self.default_currency
|
||||
)
|
||||
|
||||
if (
|
||||
currency
|
||||
and amount is not None
|
||||
and not isinstance(amount, MONEY_CLASSES)
|
||||
and amount is not empty
|
||||
):
|
||||
return Money(amount, currency)
|
||||
|
||||
return amount
|
||||
|
||||
|
||||
class InvenTreeCurrencySerializer(serializers.ChoiceField):
|
||||
"""Custom serializers for selecting currency option."""
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
"""Initialize the currency serializer."""
|
||||
choices = currency_code_mappings()
|
||||
|
||||
allow_blank = kwargs.get('allow_blank', False) or kwargs.get(
|
||||
'allow_null', False
|
||||
)
|
||||
|
||||
if allow_blank:
|
||||
choices = [('', '---------')] + choices
|
||||
|
||||
kwargs['choices'] = choices
|
||||
|
||||
if 'default' not in kwargs and 'required' not in kwargs:
|
||||
kwargs['default'] = '' if allow_blank else currency_code_default
|
||||
|
||||
if 'label' not in kwargs:
|
||||
kwargs['label'] = _('Currency')
|
||||
|
||||
if 'help_text' not in kwargs:
|
||||
kwargs['help_text'] = _('Select currency from available options')
|
||||
|
||||
super().__init__(*args, **kwargs)
|
||||
|
||||
|
||||
class DependentField(serializers.Field):
|
||||
"""A dependent field can be used to dynamically return child fields based on the value of other fields."""
|
||||
|
||||
child = None
|
||||
|
||||
def __init__(self, *args, depends_on, field_serializer, **kwargs):
|
||||
"""A dependent field can be used to dynamically return child fields based on the value of other fields.
|
||||
|
||||
Example:
|
||||
This example adds two fields. If the client selects integer, an integer field will be shown, but if he
|
||||
selects char, an char field will be shown. For any other value, nothing will be shown.
|
||||
|
||||
class TestSerializer(serializers.Serializer):
|
||||
select_type = serializers.ChoiceField(choices=[
|
||||
("integer", "Integer"),
|
||||
("char", "Char"),
|
||||
])
|
||||
my_field = DependentField(depends_on=["select_type"], field_serializer="get_my_field")
|
||||
|
||||
def get_my_field(self, fields):
|
||||
if fields["select_type"] == "integer":
|
||||
return serializers.IntegerField()
|
||||
if fields["select_type"] == "char":
|
||||
return serializers.CharField()
|
||||
"""
|
||||
super().__init__(*args, **kwargs)
|
||||
|
||||
self.depends_on = depends_on
|
||||
self.field_serializer = field_serializer
|
||||
|
||||
def get_child(self, raise_exception=False):
|
||||
"""This method tries to extract the child based on the provided data in the request by the client."""
|
||||
data = deepcopy(self.context['request'].data)
|
||||
|
||||
def visit_parent(node):
|
||||
"""Recursively extract the data for the parent field/serializer in reverse."""
|
||||
nonlocal data
|
||||
|
||||
if node.parent:
|
||||
visit_parent(node.parent)
|
||||
|
||||
# only do for composite fields and stop right before the current field
|
||||
if hasattr(node, 'child') and node is not self and isinstance(data, dict):
|
||||
data = data.get(node.field_name, None)
|
||||
|
||||
visit_parent(self)
|
||||
|
||||
# ensure that data is a dictionary and that a parent exists
|
||||
if not isinstance(data, dict) or self.parent is None:
|
||||
return
|
||||
|
||||
# check if the request data contains the dependent fields, otherwise skip getting the child
|
||||
for f in self.depends_on:
|
||||
if data.get(f, None) is None:
|
||||
if (
|
||||
self.parent
|
||||
and (v := getattr(self.parent.fields[f], 'default', None))
|
||||
is not None
|
||||
):
|
||||
data[f] = v
|
||||
else:
|
||||
return
|
||||
|
||||
# partially validate the data for options requests that set raise_exception while calling .get_child(...)
|
||||
if raise_exception:
|
||||
validation_data = {k: v for k, v in data.items() if k in self.depends_on}
|
||||
serializer = self.parent.__class__(
|
||||
context=self.context, data=validation_data, partial=True
|
||||
)
|
||||
serializer.is_valid(raise_exception=raise_exception)
|
||||
|
||||
# try to get the field serializer
|
||||
field_serializer = getattr(self.parent, self.field_serializer)
|
||||
child = field_serializer(data)
|
||||
|
||||
if not child:
|
||||
return
|
||||
|
||||
self.child = child
|
||||
self.child.bind(field_name='', parent=self)
|
||||
|
||||
def to_internal_value(self, data):
|
||||
"""This method tries to convert the data to an internal representation based on the defined to_internal_value method on the child."""
|
||||
self.get_child()
|
||||
if self.child:
|
||||
return self.child.to_internal_value(data)
|
||||
|
||||
return None
|
||||
|
||||
def to_representation(self, value):
|
||||
"""This method tries to convert the data to representation based on the defined to_representation method on the child."""
|
||||
self.get_child()
|
||||
if self.child:
|
||||
return self.child.to_representation(value)
|
||||
|
||||
return None
|
||||
|
||||
|
||||
class InvenTreeModelSerializer(serializers.ModelSerializer):
|
||||
"""Inherits the standard Django ModelSerializer class, but also ensures that the underlying model class data are checked on validation."""
|
||||
|
||||
# Switch out URLField mapping
|
||||
serializer_field_mapping = {
|
||||
**serializers.ModelSerializer.serializer_field_mapping,
|
||||
models.URLField: InvenTreeRestURLField,
|
||||
InvenTreeURLField: InvenTreeRestURLField,
|
||||
}
|
||||
|
||||
def __init__(self, instance=None, data=empty, **kwargs):
|
||||
"""Custom __init__ routine to ensure that *default* values (as specified in the ORM) are used by the DRF serializers, *if* the values are not provided by the user."""
|
||||
# If instance is None, we are creating a new instance
|
||||
if instance is None and data is not empty:
|
||||
if data is None:
|
||||
data = OrderedDict()
|
||||
else:
|
||||
new_data = OrderedDict()
|
||||
new_data.update(data)
|
||||
|
||||
data = new_data
|
||||
|
||||
# Add missing fields which have default values
|
||||
ModelClass = self.Meta.model
|
||||
|
||||
fields = model_meta.get_field_info(ModelClass)
|
||||
|
||||
for field_name, field in fields.fields.items():
|
||||
"""
|
||||
Update the field IF (and ONLY IF):
|
||||
|
||||
- The field has a specified default value
|
||||
- The field does not already have a value set
|
||||
"""
|
||||
if field.has_default() and field_name not in data:
|
||||
value = field.default
|
||||
|
||||
# Account for callable functions
|
||||
if callable(value):
|
||||
try:
|
||||
value = value()
|
||||
except Exception:
|
||||
continue
|
||||
|
||||
data[field_name] = value
|
||||
|
||||
super().__init__(instance, data, **kwargs)
|
||||
|
||||
def get_initial(self):
|
||||
"""Construct initial data for the serializer.
|
||||
|
||||
Use the 'default' values specified by the django model definition
|
||||
"""
|
||||
initials = super().get_initial().copy()
|
||||
|
||||
# Are we creating a new instance?
|
||||
if self.instance is None:
|
||||
ModelClass = self.Meta.model
|
||||
|
||||
fields = model_meta.get_field_info(ModelClass)
|
||||
|
||||
for field_name, field in fields.fields.items():
|
||||
if field.has_default() and field_name not in initials:
|
||||
value = field.default
|
||||
|
||||
# Account for callable functions
|
||||
if callable(value):
|
||||
try:
|
||||
value = value()
|
||||
except Exception:
|
||||
continue
|
||||
|
||||
initials[field_name] = value
|
||||
|
||||
return initials
|
||||
|
||||
def skip_create_fields(self):
|
||||
"""Return a list of 'fields' which should be skipped for model creation.
|
||||
|
||||
This is used to 'bypass' a shortcoming of the DRF framework,
|
||||
which does not allow us to have writeable serializer fields which do not exist on the model.
|
||||
|
||||
Default implementation returns an empty list
|
||||
"""
|
||||
return []
|
||||
|
||||
def save(self, **kwargs):
|
||||
"""Catch any django ValidationError thrown at the moment `save` is called, and re-throw as a DRF ValidationError."""
|
||||
try:
|
||||
super().save(**kwargs)
|
||||
except (ValidationError, DjangoValidationError) as exc:
|
||||
raise ValidationError(detail=serializers.as_serializer_error(exc))
|
||||
|
||||
return self.instance
|
||||
|
||||
def create(self, validated_data):
|
||||
"""Custom create method which supports field adjustment."""
|
||||
initial_data = validated_data.copy()
|
||||
|
||||
# Remove any fields which do not exist on the model
|
||||
for field in self.skip_create_fields():
|
||||
initial_data.pop(field, None)
|
||||
|
||||
return super().create(initial_data)
|
||||
|
||||
def update(self, instance, validated_data):
|
||||
"""Catch any django ValidationError, and re-throw as a DRF ValidationError."""
|
||||
try:
|
||||
instance = super().update(instance, validated_data)
|
||||
except (ValidationError, DjangoValidationError) as exc:
|
||||
raise ValidationError(detail=serializers.as_serializer_error(exc))
|
||||
|
||||
return instance
|
||||
|
||||
def run_validation(self, data=empty):
|
||||
"""Perform serializer validation.
|
||||
|
||||
In addition to running validators on the serializer fields,
|
||||
this class ensures that the underlying model is also validated.
|
||||
"""
|
||||
# Run any native validation checks first (may raise a ValidationError)
|
||||
data = super().run_validation(data)
|
||||
|
||||
if not hasattr(self, 'instance') or self.instance is None:
|
||||
# No instance exists (we are creating a new one)
|
||||
|
||||
initial_data = data.copy()
|
||||
|
||||
for field in self.skip_create_fields():
|
||||
# Remove any fields we do not wish to provide to the model
|
||||
initial_data.pop(field, None)
|
||||
|
||||
# Create a (RAM only) instance for extra testing
|
||||
instance = self.Meta.model(**initial_data)
|
||||
else:
|
||||
# Instance already exists (we are updating!)
|
||||
instance = self.instance
|
||||
|
||||
# Update instance fields
|
||||
for attr, value in data.items():
|
||||
try:
|
||||
setattr(instance, attr, value)
|
||||
except (ValidationError, DjangoValidationError) as exc:
|
||||
raise ValidationError(detail=serializers.as_serializer_error(exc))
|
||||
|
||||
# Run a 'full_clean' on the model.
|
||||
# Note that by default, DRF does *not* perform full model validation!
|
||||
try:
|
||||
instance.full_clean()
|
||||
except (ValidationError, DjangoValidationError) as exc:
|
||||
if hasattr(exc, 'message_dict'):
|
||||
data = exc.message_dict
|
||||
elif hasattr(exc, 'message'):
|
||||
data = {'non_field_errors': [str(exc.message)]}
|
||||
else:
|
||||
data = {'non_field_errors': [str(exc)]}
|
||||
|
||||
# Change '__all__' key (django style) to 'non_field_errors' (DRF style)
|
||||
if '__all__' in data:
|
||||
data['non_field_errors'] = data['__all__']
|
||||
del data['__all__']
|
||||
|
||||
raise ValidationError(data)
|
||||
|
||||
return data
|
||||
|
||||
|
||||
class InvenTreeTaggitSerializer(TaggitSerializer):
|
||||
"""Updated from https://github.com/glemmaPaul/django-taggit-serializer."""
|
||||
|
||||
def update(self, instance, validated_data):
|
||||
"""Overridden update method to re-add the tagmanager."""
|
||||
to_be_tagged, validated_data = self._pop_tags(validated_data)
|
||||
|
||||
tag_object = super().update(instance, validated_data)
|
||||
|
||||
for key in to_be_tagged.keys():
|
||||
# re-add the tagmanager
|
||||
new_tagobject = tag_object.__class__.objects.get(id=tag_object.id)
|
||||
setattr(tag_object, key, getattr(new_tagobject, key))
|
||||
|
||||
return self._save_tags(tag_object, to_be_tagged)
|
||||
|
||||
|
||||
class InvenTreeTagModelSerializer(InvenTreeTaggitSerializer, InvenTreeModelSerializer):
|
||||
"""Combination of InvenTreeTaggitSerializer and InvenTreeModelSerializer."""
|
||||
|
||||
pass
|
||||
|
||||
|
||||
class UserSerializer(InvenTreeModelSerializer):
|
||||
"""Serializer for a User."""
|
||||
|
||||
class Meta:
|
||||
"""Metaclass defines serializer fields."""
|
||||
|
||||
model = User
|
||||
fields = ['pk', 'username', 'first_name', 'last_name', 'email']
|
||||
|
||||
read_only_fields = ['username']
|
||||
|
||||
|
||||
class ExendedUserSerializer(UserSerializer):
|
||||
"""Serializer for a User with a bit more info."""
|
||||
|
||||
from users.serializers import GroupSerializer
|
||||
|
||||
groups = GroupSerializer(read_only=True, many=True)
|
||||
|
||||
class Meta(UserSerializer.Meta):
|
||||
"""Metaclass defines serializer fields."""
|
||||
|
||||
fields = UserSerializer.Meta.fields + [
|
||||
'groups',
|
||||
'is_staff',
|
||||
'is_superuser',
|
||||
'is_active',
|
||||
]
|
||||
|
||||
read_only_fields = UserSerializer.Meta.read_only_fields + ['groups']
|
||||
|
||||
def validate(self, attrs):
|
||||
"""Expanded validation for changing user role."""
|
||||
# Check if is_staff or is_superuser is in attrs
|
||||
role_change = 'is_staff' in attrs or 'is_superuser' in attrs
|
||||
request_user = self.context['request'].user
|
||||
|
||||
if role_change:
|
||||
if request_user.is_superuser:
|
||||
# Superusers can change any role
|
||||
pass
|
||||
elif request_user.is_staff and 'is_superuser' not in attrs:
|
||||
# Staff can change any role except is_superuser
|
||||
pass
|
||||
else:
|
||||
raise PermissionDenied(
|
||||
_('You do not have permission to change this user role.')
|
||||
)
|
||||
return super().validate(attrs)
|
||||
|
||||
|
||||
class UserCreateSerializer(ExendedUserSerializer):
|
||||
"""Serializer for creating a new User."""
|
||||
|
||||
def validate(self, attrs):
|
||||
"""Expanded valiadation for auth."""
|
||||
# Check that the user trying to create a new user is a superuser
|
||||
if not self.context['request'].user.is_superuser:
|
||||
raise serializers.ValidationError(_('Only superusers can create new users'))
|
||||
|
||||
# Generate a random password
|
||||
password = User.objects.make_random_password(length=14)
|
||||
attrs.update({'password': password})
|
||||
return super().validate(attrs)
|
||||
|
||||
def create(self, validated_data):
|
||||
"""Send an e email to the user after creation."""
|
||||
from InvenTree.helpers_model import get_base_url
|
||||
|
||||
base_url = get_base_url()
|
||||
|
||||
instance = super().create(validated_data)
|
||||
|
||||
# Make sure the user cannot login until they have set a password
|
||||
instance.set_unusable_password()
|
||||
|
||||
message = (
|
||||
_('Your account has been created.')
|
||||
+ '\n\n'
|
||||
+ _('Please use the password reset function to login')
|
||||
)
|
||||
|
||||
if base_url:
|
||||
message += f'\n\nURL: {base_url}'
|
||||
|
||||
# Send the user an onboarding email (from current site)
|
||||
instance.email_user(subject=_('Welcome to InvenTree'), message=message)
|
||||
|
||||
return instance
|
||||
|
||||
|
||||
class InvenTreeAttachmentSerializerField(serializers.FileField):
|
||||
"""Override the DRF native FileField serializer, to remove the leading server path.
|
||||
|
||||
For example, the FileField might supply something like:
|
||||
|
||||
http://127.0.0.1:8000/media/foo/bar.jpg
|
||||
|
||||
Whereas we wish to return:
|
||||
|
||||
/media/foo/bar.jpg
|
||||
|
||||
If the server process is serving the data at 127.0.0.1,
|
||||
but a proxy service (e.g. nginx) is then providing DNS lookup to the outside world,
|
||||
then an attachment which prefixes the "address" of the internal server
|
||||
will not be accessible from the outside world.
|
||||
"""
|
||||
|
||||
def to_representation(self, value):
|
||||
"""To json-serializable type."""
|
||||
if not value:
|
||||
return None
|
||||
|
||||
return os.path.join(str(settings.MEDIA_URL), str(value))
|
||||
|
||||
|
||||
class InvenTreeAttachmentSerializer(InvenTreeModelSerializer):
|
||||
"""Special case of an InvenTreeModelSerializer, which handles an "attachment" model.
|
||||
|
||||
The only real addition here is that we support "renaming" of the attachment file.
|
||||
"""
|
||||
|
||||
@staticmethod
|
||||
def attachment_fields(extra_fields=None):
|
||||
"""Default set of fields for an attachment serializer."""
|
||||
fields = [
|
||||
'pk',
|
||||
'attachment',
|
||||
'filename',
|
||||
'link',
|
||||
'comment',
|
||||
'upload_date',
|
||||
'user',
|
||||
'user_detail',
|
||||
]
|
||||
|
||||
if extra_fields:
|
||||
fields += extra_fields
|
||||
|
||||
return fields
|
||||
|
||||
user_detail = UserSerializer(source='user', read_only=True, many=False)
|
||||
|
||||
attachment = InvenTreeAttachmentSerializerField(required=False, allow_null=False)
|
||||
|
||||
# The 'filename' field must be present in the serializer
|
||||
filename = serializers.CharField(
|
||||
label=_('Filename'), required=False, source='basename', allow_blank=False
|
||||
)
|
||||
|
||||
upload_date = serializers.DateField(read_only=True)
|
||||
|
||||
|
||||
class InvenTreeImageSerializerField(serializers.ImageField):
|
||||
"""Custom image serializer.
|
||||
|
||||
On upload, validate that the file is a valid image file
|
||||
"""
|
||||
|
||||
def to_representation(self, value):
|
||||
"""To json-serializable type."""
|
||||
if not value:
|
||||
return None
|
||||
|
||||
return os.path.join(str(settings.MEDIA_URL), str(value))
|
||||
|
||||
|
||||
class InvenTreeDecimalField(serializers.FloatField):
|
||||
"""Custom serializer for decimal fields.
|
||||
|
||||
Solves the following issues:
|
||||
- The normal DRF DecimalField renders values with trailing zeros
|
||||
- Using a FloatField can result in rounding issues: https://code.djangoproject.com/ticket/30290
|
||||
"""
|
||||
|
||||
def to_internal_value(self, data):
|
||||
"""Convert to python type."""
|
||||
# Convert the value to a string, and then a decimal
|
||||
try:
|
||||
return Decimal(str(data))
|
||||
except Exception:
|
||||
raise serializers.ValidationError(_('Invalid value'))
|
||||
|
||||
|
||||
class DataFileUploadSerializer(serializers.Serializer):
|
||||
"""Generic serializer for uploading a data file, and extracting a dataset.
|
||||
|
||||
- Validates uploaded file
|
||||
- Extracts column names
|
||||
- Extracts data rows
|
||||
"""
|
||||
|
||||
# Implementing class should register a target model (database model) to be used for import
|
||||
TARGET_MODEL = None
|
||||
|
||||
class Meta:
|
||||
"""Metaclass options."""
|
||||
|
||||
fields = ['data_file']
|
||||
|
||||
data_file = serializers.FileField(
|
||||
label=_('Data File'),
|
||||
help_text=_('Select data file for upload'),
|
||||
required=True,
|
||||
allow_empty_file=False,
|
||||
)
|
||||
|
||||
def validate_data_file(self, data_file):
|
||||
"""Perform validation checks on the uploaded data file."""
|
||||
self.filename = data_file.name
|
||||
|
||||
name, ext = os.path.splitext(data_file.name)
|
||||
|
||||
# Remove the leading . from the extension
|
||||
ext = ext[1:]
|
||||
|
||||
accepted_file_types = ['xls', 'xlsx', 'csv', 'tsv', 'xml']
|
||||
|
||||
if ext not in accepted_file_types:
|
||||
raise serializers.ValidationError(_('Unsupported file type'))
|
||||
|
||||
# Impose a 50MB limit on uploaded BOM files
|
||||
max_upload_file_size = 50 * 1024 * 1024
|
||||
|
||||
if data_file.size > max_upload_file_size:
|
||||
raise serializers.ValidationError(_('File is too large'))
|
||||
|
||||
# Read file data into memory (bytes object)
|
||||
try:
|
||||
data = data_file.read()
|
||||
except Exception as e:
|
||||
raise serializers.ValidationError(str(e))
|
||||
|
||||
if ext in ['csv', 'tsv', 'xml']:
|
||||
try:
|
||||
data = data.decode()
|
||||
except Exception as e:
|
||||
raise serializers.ValidationError(str(e))
|
||||
|
||||
# Convert to a tablib dataset (we expect headers)
|
||||
try:
|
||||
self.dataset = tablib.Dataset().load(data, ext, headers=True)
|
||||
except Exception as e:
|
||||
raise serializers.ValidationError(str(e))
|
||||
|
||||
if len(self.dataset.headers) == 0:
|
||||
raise serializers.ValidationError(_('No columns found in file'))
|
||||
|
||||
if len(self.dataset) == 0:
|
||||
raise serializers.ValidationError(_('No data rows found in file'))
|
||||
|
||||
return data_file
|
||||
|
||||
def match_column(self, column_name, field_names, exact=False):
|
||||
"""Attempt to match a column name (from the file) to a field (defined in the model).
|
||||
|
||||
Order of matching is:
|
||||
- Direct match
|
||||
- Case insensitive match
|
||||
- Fuzzy match
|
||||
"""
|
||||
if not column_name:
|
||||
return None
|
||||
|
||||
column_name = str(column_name).strip()
|
||||
|
||||
column_name_lower = column_name.lower()
|
||||
|
||||
if column_name in field_names:
|
||||
return column_name
|
||||
|
||||
for field_name in field_names:
|
||||
if field_name.lower() == column_name_lower:
|
||||
return field_name
|
||||
|
||||
if exact:
|
||||
# Finished available 'exact' matches
|
||||
return None
|
||||
|
||||
# TODO: Fuzzy pattern matching for column names
|
||||
|
||||
# No matches found
|
||||
return None
|
||||
|
||||
def extract_data(self):
|
||||
"""Returns dataset extracted from the file."""
|
||||
# Provide a dict of available import fields for the model
|
||||
model_fields = {}
|
||||
|
||||
# Keep track of columns we have already extracted
|
||||
matched_columns = set()
|
||||
|
||||
if self.TARGET_MODEL:
|
||||
try:
|
||||
model_fields = self.TARGET_MODEL.get_import_fields()
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
# Extract a list of valid model field names
|
||||
model_field_names = list(model_fields.keys())
|
||||
|
||||
# Provide a dict of available columns from the dataset
|
||||
file_columns = {}
|
||||
|
||||
for header in self.dataset.headers:
|
||||
column = {}
|
||||
|
||||
# Attempt to "match" file columns to model fields
|
||||
match = self.match_column(header, model_field_names, exact=True)
|
||||
|
||||
if match is not None and match not in matched_columns:
|
||||
matched_columns.add(match)
|
||||
column['value'] = match
|
||||
else:
|
||||
column['value'] = None
|
||||
|
||||
file_columns[header] = column
|
||||
|
||||
return {
|
||||
'file_fields': file_columns,
|
||||
'model_fields': model_fields,
|
||||
'rows': [row.values() for row in self.dataset.dict],
|
||||
'filename': self.filename,
|
||||
}
|
||||
|
||||
def save(self):
|
||||
"""Empty overwrite for save."""
|
||||
...
|
||||
|
||||
|
||||
class DataFileExtractSerializer(serializers.Serializer):
|
||||
"""Generic serializer for extracting data from an imported dataset.
|
||||
|
||||
- User provides an array of matched headers
|
||||
- User provides an array of raw data rows
|
||||
"""
|
||||
|
||||
# Implementing class should register a target model (database model) to be used for import
|
||||
TARGET_MODEL = None
|
||||
|
||||
class Meta:
|
||||
"""Metaclass options."""
|
||||
|
||||
fields = ['columns', 'rows']
|
||||
|
||||
# Mapping of columns
|
||||
columns = serializers.ListField(child=serializers.CharField(allow_blank=True))
|
||||
|
||||
rows = serializers.ListField(
|
||||
child=serializers.ListField(
|
||||
child=serializers.CharField(allow_blank=True, allow_null=True)
|
||||
)
|
||||
)
|
||||
|
||||
def validate(self, data):
|
||||
"""Clean data."""
|
||||
data = super().validate(data)
|
||||
|
||||
self.columns = data.get('columns', [])
|
||||
self.rows = data.get('rows', [])
|
||||
|
||||
if len(self.rows) == 0:
|
||||
raise serializers.ValidationError(_('No data rows provided'))
|
||||
|
||||
if len(self.columns) == 0:
|
||||
raise serializers.ValidationError(_('No data columns supplied'))
|
||||
|
||||
self.validate_extracted_columns()
|
||||
|
||||
return data
|
||||
|
||||
@property
|
||||
def data(self):
|
||||
"""Returns current data."""
|
||||
if self.TARGET_MODEL:
|
||||
try:
|
||||
model_fields = self.TARGET_MODEL.get_import_fields()
|
||||
except Exception:
|
||||
model_fields = {}
|
||||
|
||||
rows = []
|
||||
|
||||
for row in self.rows:
|
||||
"""Optionally pre-process each row, before sending back to the client."""
|
||||
|
||||
processed_row = self.process_row(self.row_to_dict(row))
|
||||
|
||||
if processed_row:
|
||||
rows.append({'original': row, 'data': processed_row})
|
||||
|
||||
return {'fields': model_fields, 'columns': self.columns, 'rows': rows}
|
||||
|
||||
def process_row(self, row):
|
||||
"""Process a 'row' of data, which is a mapped column:value dict.
|
||||
|
||||
Returns either a mapped column:value dict, or None.
|
||||
|
||||
If the function returns None, the column is ignored!
|
||||
"""
|
||||
# Default implementation simply returns the original row data
|
||||
return row
|
||||
|
||||
def row_to_dict(self, row):
|
||||
"""Convert a "row" to a named data dict."""
|
||||
row_dict = {'errors': {}}
|
||||
|
||||
for idx, value in enumerate(row):
|
||||
if idx < len(self.columns):
|
||||
col = self.columns[idx]
|
||||
|
||||
if col:
|
||||
row_dict[col] = value
|
||||
|
||||
return row_dict
|
||||
|
||||
def validate_extracted_columns(self):
|
||||
"""Perform custom validation of header mapping."""
|
||||
if self.TARGET_MODEL:
|
||||
try:
|
||||
model_fields = self.TARGET_MODEL.get_import_fields()
|
||||
except Exception:
|
||||
model_fields = {}
|
||||
|
||||
cols_seen = set()
|
||||
|
||||
for name, field in model_fields.items():
|
||||
required = field.get('required', False)
|
||||
|
||||
# Check for missing required columns
|
||||
if required:
|
||||
if name not in self.columns:
|
||||
raise serializers.ValidationError(
|
||||
_(f"Missing required column: '{name}'")
|
||||
)
|
||||
|
||||
for col in self.columns:
|
||||
if not col:
|
||||
continue
|
||||
|
||||
# Check for duplicated columns
|
||||
if col in cols_seen:
|
||||
raise serializers.ValidationError(_(f"Duplicate column: '{col}'"))
|
||||
|
||||
cols_seen.add(col)
|
||||
|
||||
def save(self):
|
||||
"""No "save" action for this serializer."""
|
||||
pass
|
||||
|
||||
|
||||
class RemoteImageMixin(metaclass=serializers.SerializerMetaclass):
|
||||
"""Mixin class which allows downloading an 'image' from a remote URL.
|
||||
|
||||
Adds the optional, write-only `remote_image` field to the serializer
|
||||
"""
|
||||
|
||||
def skip_create_fields(self):
|
||||
"""Ensure the 'remote_image' field is skipped when creating a new instance."""
|
||||
return ['remote_image']
|
||||
|
||||
remote_image = serializers.URLField(
|
||||
required=False,
|
||||
allow_blank=False,
|
||||
write_only=True,
|
||||
label=_('Remote Image'),
|
||||
help_text=_('URL of remote image file'),
|
||||
)
|
||||
|
||||
def validate_remote_image(self, url):
|
||||
"""Perform custom validation for the remote image URL.
|
||||
|
||||
- Attempt to download the image and store it against this object instance
|
||||
- Catches and re-throws any errors
|
||||
"""
|
||||
from InvenTree.helpers_model import download_image_from_url
|
||||
|
||||
if not url:
|
||||
return
|
||||
|
||||
if not common_models.InvenTreeSetting.get_setting(
|
||||
'INVENTREE_DOWNLOAD_FROM_URL'
|
||||
):
|
||||
raise ValidationError(
|
||||
_('Downloading images from remote URL is not enabled')
|
||||
)
|
||||
|
||||
try:
|
||||
self.remote_image_file = download_image_from_url(url)
|
||||
except Exception as exc:
|
||||
self.remote_image_file = None
|
||||
raise ValidationError(str(exc))
|
||||
|
||||
return url
|
||||
File diff suppressed because it is too large
Load Diff
@@ -0,0 +1,269 @@
|
||||
"""API endpoints for social authentication with allauth."""
|
||||
|
||||
import logging
|
||||
from importlib import import_module
|
||||
|
||||
from django.urls import NoReverseMatch, include, path, reverse
|
||||
|
||||
from allauth.account.models import EmailAddress
|
||||
from allauth.socialaccount import providers
|
||||
from allauth.socialaccount.providers.oauth2.views import OAuth2Adapter, OAuth2LoginView
|
||||
from drf_spectacular.utils import OpenApiResponse, extend_schema
|
||||
from rest_framework import serializers
|
||||
from rest_framework.exceptions import NotFound
|
||||
from rest_framework.permissions import AllowAny, IsAuthenticated
|
||||
from rest_framework.response import Response
|
||||
|
||||
import InvenTree.sso
|
||||
from common.models import InvenTreeSetting
|
||||
from InvenTree.mixins import CreateAPI, ListAPI, ListCreateAPI
|
||||
from InvenTree.serializers import EmptySerializer, InvenTreeModelSerializer
|
||||
|
||||
logger = logging.getLogger('inventree')
|
||||
|
||||
|
||||
class GenericOAuth2ApiLoginView(OAuth2LoginView):
|
||||
"""Api view to login a user with a social account."""
|
||||
|
||||
def dispatch(self, request, *args, **kwargs):
|
||||
"""Dispatch the regular login view directly."""
|
||||
return self.login(request, *args, **kwargs)
|
||||
|
||||
|
||||
class GenericOAuth2ApiConnectView(GenericOAuth2ApiLoginView):
|
||||
"""Api view to connect a social account to the current user."""
|
||||
|
||||
def dispatch(self, request, *args, **kwargs):
|
||||
"""Dispatch the connect request directly."""
|
||||
# Override the request method be in connection mode
|
||||
request.GET = request.GET.copy()
|
||||
request.GET['process'] = 'connect'
|
||||
|
||||
# Resume the dispatch
|
||||
return super().dispatch(request, *args, **kwargs)
|
||||
|
||||
|
||||
def handle_oauth2(adapter: OAuth2Adapter):
|
||||
"""Define urls for oauth2 endpoints."""
|
||||
return [
|
||||
path(
|
||||
'login/',
|
||||
GenericOAuth2ApiLoginView.adapter_view(adapter),
|
||||
name=f'{provider.id}_api_login',
|
||||
),
|
||||
path(
|
||||
'connect/',
|
||||
GenericOAuth2ApiConnectView.adapter_view(adapter),
|
||||
name=f'{provider.id}_api_connect',
|
||||
),
|
||||
]
|
||||
|
||||
|
||||
legacy = {
|
||||
'twitter': 'twitter_oauth2',
|
||||
'bitbucket': 'bitbucket_oauth2',
|
||||
'linkedin': 'linkedin_oauth2',
|
||||
'vimeo': 'vimeo_oauth2',
|
||||
'openid': 'openid_connect',
|
||||
} # legacy connectors
|
||||
|
||||
|
||||
# Collect urls for all loaded providers
|
||||
social_auth_urlpatterns = []
|
||||
|
||||
provider_urlpatterns = []
|
||||
|
||||
for name, provider in providers.registry.provider_map.items():
|
||||
try:
|
||||
prov_mod = import_module(provider.get_package() + '.views')
|
||||
except ImportError:
|
||||
logger.exception('Could not import authentication provider %s', name)
|
||||
continue
|
||||
|
||||
# Try to extract the adapter class
|
||||
adapters = [
|
||||
cls
|
||||
for cls in prov_mod.__dict__.values()
|
||||
if isinstance(cls, type)
|
||||
and cls != OAuth2Adapter
|
||||
and issubclass(cls, OAuth2Adapter)
|
||||
]
|
||||
|
||||
# Get urls
|
||||
urls = []
|
||||
if len(adapters) == 1:
|
||||
urls = handle_oauth2(adapter=adapters[0])
|
||||
else:
|
||||
if provider.id in legacy:
|
||||
logger.warning(
|
||||
'`%s` is not supported on platform UI. Use `%s` instead.',
|
||||
provider.id,
|
||||
legacy[provider.id],
|
||||
)
|
||||
continue
|
||||
else:
|
||||
logger.error(
|
||||
'Found handler that is not yet ready for platform UI: `%s`. Open an feature request on GitHub if you need it implemented.',
|
||||
provider.id,
|
||||
)
|
||||
continue
|
||||
provider_urlpatterns += [path(f'{provider.id}/', include(urls))]
|
||||
|
||||
|
||||
social_auth_urlpatterns += provider_urlpatterns
|
||||
|
||||
|
||||
class SocialProviderListResponseSerializer(serializers.Serializer):
|
||||
"""Serializer for the SocialProviderListView."""
|
||||
|
||||
class SocialProvider(serializers.Serializer):
|
||||
"""Serializer for the SocialProviderListResponseSerializer."""
|
||||
|
||||
id = serializers.CharField()
|
||||
name = serializers.CharField()
|
||||
configured = serializers.BooleanField()
|
||||
login = serializers.URLField()
|
||||
connect = serializers.URLField()
|
||||
display_name = serializers.CharField()
|
||||
|
||||
sso_enabled = serializers.BooleanField()
|
||||
sso_registration = serializers.BooleanField()
|
||||
mfa_required = serializers.BooleanField()
|
||||
providers = SocialProvider(many=True)
|
||||
registration_enabled = serializers.BooleanField()
|
||||
password_forgotten_enabled = serializers.BooleanField()
|
||||
|
||||
|
||||
class SocialProviderListView(ListAPI):
|
||||
"""List of available social providers."""
|
||||
|
||||
permission_classes = (AllowAny,)
|
||||
serializer_class = EmptySerializer
|
||||
|
||||
@extend_schema(
|
||||
responses={200: OpenApiResponse(response=SocialProviderListResponseSerializer)}
|
||||
)
|
||||
def get(self, request, *args, **kwargs):
|
||||
"""Get the list of providers."""
|
||||
provider_list = []
|
||||
for provider in providers.registry.provider_map.values():
|
||||
provider_data = {
|
||||
'id': provider.id,
|
||||
'name': provider.name,
|
||||
'configured': False,
|
||||
}
|
||||
|
||||
try:
|
||||
provider_data['login'] = request.build_absolute_uri(
|
||||
reverse(f'{provider.id}_api_login')
|
||||
)
|
||||
except NoReverseMatch:
|
||||
provider_data['login'] = None
|
||||
|
||||
try:
|
||||
provider_data['connect'] = request.build_absolute_uri(
|
||||
reverse(f'{provider.id}_api_connect')
|
||||
)
|
||||
except NoReverseMatch:
|
||||
provider_data['connect'] = None
|
||||
|
||||
provider_data['configured'] = InvenTree.sso.check_provider(provider)
|
||||
provider_data['display_name'] = InvenTree.sso.provider_display_name(
|
||||
provider
|
||||
)
|
||||
|
||||
provider_list.append(provider_data)
|
||||
|
||||
data = {
|
||||
'sso_enabled': InvenTree.sso.login_enabled(),
|
||||
'sso_registration': InvenTree.sso.registration_enabled(),
|
||||
'mfa_required': InvenTreeSetting.get_setting('LOGIN_ENFORCE_MFA'),
|
||||
'providers': provider_list,
|
||||
'registration_enabled': InvenTreeSetting.get_setting('LOGIN_ENABLE_REG'),
|
||||
'password_forgotten_enabled': InvenTreeSetting.get_setting(
|
||||
'LOGIN_ENABLE_PWD_FORGOT'
|
||||
),
|
||||
}
|
||||
return Response(data)
|
||||
|
||||
|
||||
class EmailAddressSerializer(InvenTreeModelSerializer):
|
||||
"""Serializer for the EmailAddress model."""
|
||||
|
||||
class Meta:
|
||||
"""Meta options for EmailAddressSerializer."""
|
||||
|
||||
model = EmailAddress
|
||||
fields = '__all__'
|
||||
|
||||
|
||||
class EmptyEmailAddressSerializer(InvenTreeModelSerializer):
|
||||
"""Empty Serializer for the EmailAddress model."""
|
||||
|
||||
class Meta:
|
||||
"""Meta options for EmailAddressSerializer."""
|
||||
|
||||
model = EmailAddress
|
||||
fields = []
|
||||
|
||||
|
||||
class EmailListView(ListCreateAPI):
|
||||
"""List of registered email addresses for current users."""
|
||||
|
||||
permission_classes = (IsAuthenticated,)
|
||||
serializer_class = EmailAddressSerializer
|
||||
|
||||
def get_queryset(self):
|
||||
"""Only return data for current user."""
|
||||
return EmailAddress.objects.filter(user=self.request.user)
|
||||
|
||||
|
||||
class EmailActionMixin(CreateAPI):
|
||||
"""Mixin to modify email addresses for current users."""
|
||||
|
||||
serializer_class = EmptyEmailAddressSerializer
|
||||
permission_classes = (IsAuthenticated,)
|
||||
|
||||
def get_queryset(self):
|
||||
"""Filter queryset for current user."""
|
||||
return EmailAddress.objects.filter(
|
||||
user=self.request.user, pk=self.kwargs['pk']
|
||||
).first()
|
||||
|
||||
@extend_schema(responses={200: OpenApiResponse(response=EmailAddressSerializer)})
|
||||
def post(self, request, *args, **kwargs):
|
||||
"""Filter item, run action and return data."""
|
||||
email = self.get_queryset()
|
||||
if not email:
|
||||
raise NotFound
|
||||
|
||||
self.special_action(email, request, *args, **kwargs)
|
||||
return Response(EmailAddressSerializer(email).data)
|
||||
|
||||
|
||||
class EmailVerifyView(EmailActionMixin):
|
||||
"""Re-verify an email for a currently logged in user."""
|
||||
|
||||
def special_action(self, email, request, *args, **kwargs):
|
||||
"""Send confirmation."""
|
||||
if email.verified:
|
||||
return
|
||||
email.send_confirmation(request)
|
||||
|
||||
|
||||
class EmailPrimaryView(EmailActionMixin):
|
||||
"""Make an email for a currently logged in user primary."""
|
||||
|
||||
def special_action(self, email, *args, **kwargs):
|
||||
"""Mark email as primary."""
|
||||
if email.primary:
|
||||
return
|
||||
email.set_as_primary()
|
||||
|
||||
|
||||
class EmailRemoveView(EmailActionMixin):
|
||||
"""Remove an email for a currently logged in user."""
|
||||
|
||||
def special_action(self, email, *args, **kwargs):
|
||||
"""Delete email."""
|
||||
email.delete()
|
||||
@@ -0,0 +1,77 @@
|
||||
"""Helper functions for Single Sign On functionality."""
|
||||
|
||||
import logging
|
||||
|
||||
from common.models import InvenTreeSetting
|
||||
from InvenTree.helpers import str2bool
|
||||
|
||||
logger = logging.getLogger('inventree')
|
||||
|
||||
|
||||
def get_provider_app(provider):
|
||||
"""Return the SocialApp object for the given provider."""
|
||||
from allauth.socialaccount.models import SocialApp
|
||||
|
||||
try:
|
||||
apps = SocialApp.objects.filter(provider__iexact=provider.id)
|
||||
except SocialApp.DoesNotExist:
|
||||
logger.warning("SSO SocialApp not found for provider '%s'", provider.id)
|
||||
return None
|
||||
|
||||
if apps.count() > 1:
|
||||
logger.warning("Multiple SocialApps found for provider '%s'", provider.id)
|
||||
|
||||
if apps.count() == 0:
|
||||
logger.warning("SSO SocialApp not found for provider '%s'", provider.id)
|
||||
|
||||
return apps.first()
|
||||
|
||||
|
||||
def check_provider(provider):
|
||||
"""Check if the given provider is correctly configured.
|
||||
|
||||
To be correctly configured, the following must be true:
|
||||
|
||||
- Provider must either have a registered SocialApp
|
||||
- Must have at least one site enabled
|
||||
"""
|
||||
import allauth.app_settings
|
||||
|
||||
# First, check that the provider is enabled
|
||||
app = get_provider_app(provider)
|
||||
|
||||
if not app:
|
||||
return False
|
||||
|
||||
if allauth.app_settings.SITES_ENABLED:
|
||||
# At least one matching site must be specified
|
||||
if not app.sites.exists():
|
||||
logger.error('SocialApp %s has no sites configured', app)
|
||||
return False
|
||||
|
||||
# At this point, we assume that the provider is correctly configured
|
||||
return True
|
||||
|
||||
|
||||
def provider_display_name(provider):
|
||||
"""Return the 'display name' for the given provider."""
|
||||
if app := get_provider_app(provider):
|
||||
return app.name
|
||||
|
||||
# Fallback value if app not found
|
||||
return provider.name
|
||||
|
||||
|
||||
def login_enabled() -> bool:
|
||||
"""Return True if SSO login is enabled."""
|
||||
return str2bool(InvenTreeSetting.get_setting('LOGIN_ENABLE_SSO'))
|
||||
|
||||
|
||||
def registration_enabled() -> bool:
|
||||
"""Return True if SSO registration is enabled."""
|
||||
return str2bool(InvenTreeSetting.get_setting('LOGIN_ENABLE_SSO_REG'))
|
||||
|
||||
|
||||
def auto_registration_enabled() -> bool:
|
||||
"""Return True if SSO auto-registration is enabled."""
|
||||
return str2bool(InvenTreeSetting.get_setting('LOGIN_SIGNUP_SSO_AUTO'))
|
||||
@@ -0,0 +1,371 @@
|
||||
/**
|
||||
* @author zhixin wen <wenzhixin2010@gmail.com>
|
||||
* version: 1.18.3
|
||||
* https://github.com/wenzhixin/bootstrap-table/
|
||||
*/
|
||||
.bootstrap-table .fixed-table-toolbar::after {
|
||||
content: "";
|
||||
display: block;
|
||||
clear: both;
|
||||
}
|
||||
|
||||
.bootstrap-table .fixed-table-toolbar .bs-bars,
|
||||
.bootstrap-table .fixed-table-toolbar .search,
|
||||
.bootstrap-table .fixed-table-toolbar .columns {
|
||||
position: relative;
|
||||
margin-top: 10px;
|
||||
margin-bottom: 10px;
|
||||
}
|
||||
|
||||
.bootstrap-table .fixed-table-toolbar .columns .btn-group > .btn-group {
|
||||
display: inline-block;
|
||||
margin-left: -1px !important;
|
||||
}
|
||||
|
||||
.bootstrap-table .fixed-table-toolbar .columns .btn-group > .btn-group > .btn {
|
||||
border-radius: 0;
|
||||
}
|
||||
|
||||
.bootstrap-table .fixed-table-toolbar .columns .btn-group > .btn-group:first-child > .btn {
|
||||
border-top-left-radius: 4px;
|
||||
border-bottom-left-radius: 4px;
|
||||
}
|
||||
|
||||
.bootstrap-table .fixed-table-toolbar .columns .btn-group > .btn-group:last-child > .btn {
|
||||
border-top-right-radius: 4px;
|
||||
border-bottom-right-radius: 4px;
|
||||
}
|
||||
|
||||
.bootstrap-table .fixed-table-toolbar .columns .dropdown-menu {
|
||||
text-align: left;
|
||||
max-height: 300px;
|
||||
overflow: auto;
|
||||
-ms-overflow-style: scrollbar;
|
||||
z-index: 1001;
|
||||
}
|
||||
|
||||
.bootstrap-table .fixed-table-toolbar .columns label {
|
||||
display: block;
|
||||
padding: 3px 20px;
|
||||
clear: both;
|
||||
font-weight: normal;
|
||||
line-height: 1.428571429;
|
||||
}
|
||||
|
||||
.bootstrap-table .fixed-table-toolbar .columns-left {
|
||||
margin-right: 5px;
|
||||
}
|
||||
|
||||
.bootstrap-table .fixed-table-toolbar .columns-right {
|
||||
margin-left: 5px;
|
||||
}
|
||||
|
||||
.bootstrap-table .fixed-table-toolbar .pull-right .dropdown-menu {
|
||||
right: 0;
|
||||
left: auto;
|
||||
}
|
||||
|
||||
.bootstrap-table .fixed-table-container {
|
||||
position: relative;
|
||||
clear: both;
|
||||
}
|
||||
|
||||
.bootstrap-table .fixed-table-container .table {
|
||||
width: 100%;
|
||||
margin-bottom: 0 !important;
|
||||
}
|
||||
|
||||
.bootstrap-table .fixed-table-container .table th,
|
||||
.bootstrap-table .fixed-table-container .table td {
|
||||
vertical-align: middle;
|
||||
box-sizing: border-box;
|
||||
}
|
||||
|
||||
.bootstrap-table .fixed-table-container .table thead th {
|
||||
vertical-align: bottom;
|
||||
padding: 0;
|
||||
margin: 0;
|
||||
}
|
||||
|
||||
.bootstrap-table .fixed-table-container .table thead th:focus {
|
||||
outline: 0 solid transparent;
|
||||
}
|
||||
|
||||
.bootstrap-table .fixed-table-container .table thead th.detail {
|
||||
width: 30px;
|
||||
}
|
||||
|
||||
.bootstrap-table .fixed-table-container .table thead th .th-inner {
|
||||
padding: 0.75rem;
|
||||
vertical-align: bottom;
|
||||
overflow: hidden;
|
||||
text-overflow: ellipsis;
|
||||
white-space: nowrap;
|
||||
}
|
||||
|
||||
.bootstrap-table .fixed-table-container .table thead th .sortable {
|
||||
cursor: pointer;
|
||||
background-position: right;
|
||||
background-repeat: no-repeat;
|
||||
padding-right: 30px !important;
|
||||
}
|
||||
|
||||
.bootstrap-table .fixed-table-container .table thead th .both {
|
||||
background-image: url("data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAABMAAAATCAQAAADYWf5HAAAAkElEQVQoz7X QMQ5AQBCF4dWQSJxC5wwax1Cq1e7BAdxD5SL+Tq/QCM1oNiJidwox0355mXnG/DrEtIQ6azioNZQxI0ykPhTQIwhCR+BmBYtlK7kLJYwWCcJA9M4qdrZrd8pPjZWPtOqdRQy320YSV17OatFC4euts6z39GYMKRPCTKY9UnPQ6P+GtMRfGtPnBCiqhAeJPmkqAAAAAElFTkSuQmCC");
|
||||
}
|
||||
|
||||
.bootstrap-table .fixed-table-container .table thead th .asc {
|
||||
background-image: url("data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAABMAAAATCAYAAAByUDbMAAAAZ0lEQVQ4y2NgGLKgquEuFxBPAGI2ahhWCsS/gDibUoO0gPgxEP8H4ttArEyuQYxAPBdqEAxPBImTY5gjEL9DM+wTENuQahAvEO9DMwiGdwAxOymGJQLxTyD+jgWDxCMZRsEoGAVoAADeemwtPcZI2wAAAABJRU5ErkJggg==");
|
||||
}
|
||||
|
||||
.bootstrap-table .fixed-table-container .table thead th .desc {
|
||||
background-image: url("data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAABMAAAATCAYAAAByUDbMAAAAZUlEQVQ4y2NgGAWjYBSggaqGu5FA/BOIv2PBIPFEUgxjB+IdQPwfC94HxLykus4GiD+hGfQOiB3J8SojEE9EM2wuSJzcsFMG4ttQgx4DsRalkZENxL+AuJQaMcsGxBOAmGvopk8AVz1sLZgg0bsAAAAASUVORK5CYII= ");
|
||||
}
|
||||
|
||||
.bootstrap-table .fixed-table-container .table tbody tr.selected td {
|
||||
background-color: rgba(0, 0, 0, 0.075);
|
||||
}
|
||||
|
||||
.bootstrap-table .fixed-table-container .table tbody tr.no-records-found td {
|
||||
text-align: center;
|
||||
}
|
||||
|
||||
.bootstrap-table .fixed-table-container .table tbody tr .card-view {
|
||||
display: flex;
|
||||
}
|
||||
|
||||
.bootstrap-table .fixed-table-container .table tbody tr .card-view .card-view-title {
|
||||
font-weight: bold;
|
||||
display: inline-block;
|
||||
min-width: 30%;
|
||||
width: auto !important;
|
||||
text-align: left !important;
|
||||
}
|
||||
|
||||
.bootstrap-table .fixed-table-container .table tbody tr .card-view .card-view-value {
|
||||
width: 100% !important;
|
||||
}
|
||||
|
||||
.bootstrap-table .fixed-table-container .table .bs-checkbox {
|
||||
text-align: center;
|
||||
}
|
||||
|
||||
.bootstrap-table .fixed-table-container .table .bs-checkbox label {
|
||||
margin-bottom: 0;
|
||||
}
|
||||
|
||||
.bootstrap-table .fixed-table-container .table .bs-checkbox label input[type="radio"],
|
||||
.bootstrap-table .fixed-table-container .table .bs-checkbox label input[type="checkbox"] {
|
||||
margin: 0 auto !important;
|
||||
}
|
||||
|
||||
.bootstrap-table .fixed-table-container .table.table-sm .th-inner {
|
||||
padding: 0.3rem;
|
||||
}
|
||||
|
||||
.bootstrap-table .fixed-table-container.fixed-height:not(.has-footer) {
|
||||
border-bottom: 1px solid #dee2e6;
|
||||
}
|
||||
|
||||
.bootstrap-table .fixed-table-container.fixed-height.has-card-view {
|
||||
border-top: 1px solid #dee2e6;
|
||||
border-bottom: 1px solid #dee2e6;
|
||||
}
|
||||
|
||||
.bootstrap-table .fixed-table-container.fixed-height .fixed-table-border {
|
||||
border-left: 1px solid #dee2e6;
|
||||
border-right: 1px solid #dee2e6;
|
||||
}
|
||||
|
||||
.bootstrap-table .fixed-table-container.fixed-height .table thead th {
|
||||
border-bottom: 1px solid #dee2e6;
|
||||
}
|
||||
|
||||
.bootstrap-table .fixed-table-container.fixed-height .table-dark thead th {
|
||||
border-bottom: 1px solid #32383e;
|
||||
}
|
||||
|
||||
.bootstrap-table .fixed-table-container .fixed-table-header {
|
||||
overflow: hidden;
|
||||
}
|
||||
|
||||
.bootstrap-table .fixed-table-container .fixed-table-body {
|
||||
overflow-x: auto;
|
||||
overflow-y: auto;
|
||||
height: 100%;
|
||||
}
|
||||
|
||||
.bootstrap-table .fixed-table-container .fixed-table-body .fixed-table-loading {
|
||||
align-items: center;
|
||||
background: #fff;
|
||||
display: flex;
|
||||
justify-content: center;
|
||||
position: absolute;
|
||||
bottom: 0;
|
||||
width: 100%;
|
||||
z-index: 1000;
|
||||
transition: visibility 0s, opacity 0.15s ease-in-out;
|
||||
opacity: 0;
|
||||
visibility: hidden;
|
||||
}
|
||||
|
||||
.bootstrap-table .fixed-table-container .fixed-table-body .fixed-table-loading.open {
|
||||
visibility: visible;
|
||||
opacity: 1;
|
||||
}
|
||||
|
||||
.bootstrap-table .fixed-table-container .fixed-table-body .fixed-table-loading .loading-wrap {
|
||||
align-items: baseline;
|
||||
display: flex;
|
||||
justify-content: center;
|
||||
}
|
||||
|
||||
.bootstrap-table .fixed-table-container .fixed-table-body .fixed-table-loading .loading-wrap .loading-text {
|
||||
margin-right: 6px;
|
||||
}
|
||||
|
||||
.bootstrap-table .fixed-table-container .fixed-table-body .fixed-table-loading .loading-wrap .animation-wrap {
|
||||
align-items: center;
|
||||
display: flex;
|
||||
justify-content: center;
|
||||
}
|
||||
|
||||
.bootstrap-table .fixed-table-container .fixed-table-body .fixed-table-loading .loading-wrap .animation-dot,
|
||||
.bootstrap-table .fixed-table-container .fixed-table-body .fixed-table-loading .loading-wrap .animation-wrap::after,
|
||||
.bootstrap-table .fixed-table-container .fixed-table-body .fixed-table-loading .loading-wrap .animation-wrap::before {
|
||||
content: "";
|
||||
animation-duration: 1.5s;
|
||||
animation-iteration-count: infinite;
|
||||
animation-name: LOADING;
|
||||
background: #212529;
|
||||
border-radius: 50%;
|
||||
display: block;
|
||||
height: 5px;
|
||||
margin: 0 4px;
|
||||
opacity: 0;
|
||||
width: 5px;
|
||||
}
|
||||
|
||||
.bootstrap-table .fixed-table-container .fixed-table-body .fixed-table-loading .loading-wrap .animation-dot {
|
||||
animation-delay: 0.3s;
|
||||
}
|
||||
|
||||
.bootstrap-table .fixed-table-container .fixed-table-body .fixed-table-loading .loading-wrap .animation-wrap::after {
|
||||
animation-delay: 0.6s;
|
||||
}
|
||||
|
||||
.bootstrap-table .fixed-table-container .fixed-table-body .fixed-table-loading.table-dark {
|
||||
background: #212529;
|
||||
}
|
||||
|
||||
.bootstrap-table .fixed-table-container .fixed-table-body .fixed-table-loading.table-dark .animation-dot,
|
||||
.bootstrap-table .fixed-table-container .fixed-table-body .fixed-table-loading.table-dark .animation-wrap::after,
|
||||
.bootstrap-table .fixed-table-container .fixed-table-body .fixed-table-loading.table-dark .animation-wrap::before {
|
||||
background: #fff;
|
||||
}
|
||||
|
||||
.bootstrap-table .fixed-table-container .fixed-table-footer {
|
||||
overflow: hidden;
|
||||
}
|
||||
|
||||
.bootstrap-table .fixed-table-pagination::after {
|
||||
content: "";
|
||||
display: block;
|
||||
clear: both;
|
||||
}
|
||||
|
||||
.bootstrap-table .fixed-table-pagination > .pagination-detail,
|
||||
.bootstrap-table .fixed-table-pagination > .pagination {
|
||||
margin-top: 10px;
|
||||
margin-bottom: 10px;
|
||||
}
|
||||
|
||||
.bootstrap-table .fixed-table-pagination > .pagination-detail .pagination-info {
|
||||
line-height: 34px;
|
||||
margin-right: 5px;
|
||||
}
|
||||
|
||||
.bootstrap-table .fixed-table-pagination > .pagination-detail .page-list {
|
||||
display: inline-block;
|
||||
}
|
||||
|
||||
.bootstrap-table .fixed-table-pagination > .pagination-detail .page-list .btn-group {
|
||||
position: relative;
|
||||
display: inline-block;
|
||||
vertical-align: middle;
|
||||
}
|
||||
|
||||
.bootstrap-table .fixed-table-pagination > .pagination-detail .page-list .btn-group .dropdown-menu {
|
||||
margin-bottom: 0;
|
||||
}
|
||||
|
||||
.bootstrap-table .fixed-table-pagination > .pagination ul.pagination {
|
||||
margin: 0;
|
||||
}
|
||||
|
||||
.bootstrap-table .fixed-table-pagination > .pagination ul.pagination li.page-intermediate a {
|
||||
color: #c8c8c8;
|
||||
}
|
||||
|
||||
.bootstrap-table .fixed-table-pagination > .pagination ul.pagination li.page-intermediate a::before {
|
||||
content: '\2B05';
|
||||
}
|
||||
|
||||
.bootstrap-table .fixed-table-pagination > .pagination ul.pagination li.page-intermediate a::after {
|
||||
content: '\27A1';
|
||||
}
|
||||
|
||||
.bootstrap-table .fixed-table-pagination > .pagination ul.pagination li.disabled a {
|
||||
pointer-events: none;
|
||||
cursor: default;
|
||||
}
|
||||
|
||||
.bootstrap-table.fullscreen {
|
||||
position: fixed;
|
||||
top: 0;
|
||||
left: 0;
|
||||
z-index: 1050;
|
||||
width: 100% !important;
|
||||
background: #fff;
|
||||
height: calc(100vh);
|
||||
overflow-y: scroll;
|
||||
}
|
||||
|
||||
.bootstrap-table.bootstrap4 .pagination-lg .page-link, .bootstrap-table.bootstrap5 .pagination-lg .page-link {
|
||||
padding: .5rem 1rem;
|
||||
}
|
||||
|
||||
.bootstrap-table.bootstrap5 .float-left {
|
||||
float: left;
|
||||
}
|
||||
|
||||
.bootstrap-table.bootstrap5 .float-right {
|
||||
float: right;
|
||||
}
|
||||
|
||||
/* calculate scrollbar width */
|
||||
div.fixed-table-scroll-inner {
|
||||
width: 100%;
|
||||
height: 200px;
|
||||
}
|
||||
|
||||
div.fixed-table-scroll-outer {
|
||||
top: 0;
|
||||
left: 0;
|
||||
visibility: hidden;
|
||||
width: 200px;
|
||||
height: 150px;
|
||||
overflow: hidden;
|
||||
}
|
||||
|
||||
@keyframes LOADING {
|
||||
0% {
|
||||
opacity: 0;
|
||||
}
|
||||
50% {
|
||||
opacity: 1;
|
||||
}
|
||||
to {
|
||||
opacity: 0;
|
||||
}
|
||||
}
|
||||
File diff suppressed because it is too large
Load Diff
+10
File diff suppressed because one or more lines are too long
+10
File diff suppressed because one or more lines are too long
src/backend/InvenTree/InvenTree/static/bootstrap-table/extensions/addrbar/bootstrap-table-addrbar.js
Vendored
+1779
File diff suppressed because it is too large
Load Diff
+10
File diff suppressed because one or more lines are too long
+1211
File diff suppressed because it is too large
Load Diff
+10
File diff suppressed because one or more lines are too long
Vendored
+2546
File diff suppressed because it is too large
Load Diff
+10
File diff suppressed because one or more lines are too long
+1237
File diff suppressed because it is too large
Load Diff
+10
File diff suppressed because one or more lines are too long
+1256
File diff suppressed because it is too large
Load Diff
+10
File diff suppressed because one or more lines are too long
+869
@@ -0,0 +1,869 @@
|
||||
(function (global, factory) {
|
||||
typeof exports === 'object' && typeof module !== 'undefined' ? factory(require('jquery')) :
|
||||
typeof define === 'function' && define.amd ? define(['jquery'], factory) :
|
||||
(global = typeof globalThis !== 'undefined' ? globalThis : global || self, factory(global.jQuery));
|
||||
}(this, (function ($) { 'use strict';
|
||||
|
||||
function _interopDefaultLegacy (e) { return e && typeof e === 'object' && 'default' in e ? e : { 'default': e }; }
|
||||
|
||||
var $__default = /*#__PURE__*/_interopDefaultLegacy($);
|
||||
|
||||
function _classCallCheck(instance, Constructor) {
|
||||
if (!(instance instanceof Constructor)) {
|
||||
throw new TypeError("Cannot call a class as a function");
|
||||
}
|
||||
}
|
||||
|
||||
function _defineProperties(target, props) {
|
||||
for (var i = 0; i < props.length; i++) {
|
||||
var descriptor = props[i];
|
||||
descriptor.enumerable = descriptor.enumerable || false;
|
||||
descriptor.configurable = true;
|
||||
if ("value" in descriptor) descriptor.writable = true;
|
||||
Object.defineProperty(target, descriptor.key, descriptor);
|
||||
}
|
||||
}
|
||||
|
||||
function _createClass(Constructor, protoProps, staticProps) {
|
||||
if (protoProps) _defineProperties(Constructor.prototype, protoProps);
|
||||
if (staticProps) _defineProperties(Constructor, staticProps);
|
||||
return Constructor;
|
||||
}
|
||||
|
||||
function _inherits(subClass, superClass) {
|
||||
if (typeof superClass !== "function" && superClass !== null) {
|
||||
throw new TypeError("Super expression must either be null or a function");
|
||||
}
|
||||
|
||||
subClass.prototype = Object.create(superClass && superClass.prototype, {
|
||||
constructor: {
|
||||
value: subClass,
|
||||
writable: true,
|
||||
configurable: true
|
||||
}
|
||||
});
|
||||
if (superClass) _setPrototypeOf(subClass, superClass);
|
||||
}
|
||||
|
||||
function _getPrototypeOf(o) {
|
||||
_getPrototypeOf = Object.setPrototypeOf ? Object.getPrototypeOf : function _getPrototypeOf(o) {
|
||||
return o.__proto__ || Object.getPrototypeOf(o);
|
||||
};
|
||||
return _getPrototypeOf(o);
|
||||
}
|
||||
|
||||
function _setPrototypeOf(o, p) {
|
||||
_setPrototypeOf = Object.setPrototypeOf || function _setPrototypeOf(o, p) {
|
||||
o.__proto__ = p;
|
||||
return o;
|
||||
};
|
||||
|
||||
return _setPrototypeOf(o, p);
|
||||
}
|
||||
|
||||
function _isNativeReflectConstruct() {
|
||||
if (typeof Reflect === "undefined" || !Reflect.construct) return false;
|
||||
if (Reflect.construct.sham) return false;
|
||||
if (typeof Proxy === "function") return true;
|
||||
|
||||
try {
|
||||
Boolean.prototype.valueOf.call(Reflect.construct(Boolean, [], function () {}));
|
||||
return true;
|
||||
} catch (e) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
function _assertThisInitialized(self) {
|
||||
if (self === void 0) {
|
||||
throw new ReferenceError("this hasn't been initialised - super() hasn't been called");
|
||||
}
|
||||
|
||||
return self;
|
||||
}
|
||||
|
||||
function _possibleConstructorReturn(self, call) {
|
||||
if (call && (typeof call === "object" || typeof call === "function")) {
|
||||
return call;
|
||||
}
|
||||
|
||||
return _assertThisInitialized(self);
|
||||
}
|
||||
|
||||
function _createSuper(Derived) {
|
||||
var hasNativeReflectConstruct = _isNativeReflectConstruct();
|
||||
|
||||
return function _createSuperInternal() {
|
||||
var Super = _getPrototypeOf(Derived),
|
||||
result;
|
||||
|
||||
if (hasNativeReflectConstruct) {
|
||||
var NewTarget = _getPrototypeOf(this).constructor;
|
||||
|
||||
result = Reflect.construct(Super, arguments, NewTarget);
|
||||
} else {
|
||||
result = Super.apply(this, arguments);
|
||||
}
|
||||
|
||||
return _possibleConstructorReturn(this, result);
|
||||
};
|
||||
}
|
||||
|
||||
function _superPropBase(object, property) {
|
||||
while (!Object.prototype.hasOwnProperty.call(object, property)) {
|
||||
object = _getPrototypeOf(object);
|
||||
if (object === null) break;
|
||||
}
|
||||
|
||||
return object;
|
||||
}
|
||||
|
||||
function _get(target, property, receiver) {
|
||||
if (typeof Reflect !== "undefined" && Reflect.get) {
|
||||
_get = Reflect.get;
|
||||
} else {
|
||||
_get = function _get(target, property, receiver) {
|
||||
var base = _superPropBase(target, property);
|
||||
|
||||
if (!base) return;
|
||||
var desc = Object.getOwnPropertyDescriptor(base, property);
|
||||
|
||||
if (desc.get) {
|
||||
return desc.get.call(receiver);
|
||||
}
|
||||
|
||||
return desc.value;
|
||||
};
|
||||
}
|
||||
|
||||
return _get(target, property, receiver || target);
|
||||
}
|
||||
|
||||
var commonjsGlobal = typeof globalThis !== 'undefined' ? globalThis : typeof window !== 'undefined' ? window : typeof global !== 'undefined' ? global : typeof self !== 'undefined' ? self : {};
|
||||
|
||||
function createCommonjsModule(fn, module) {
|
||||
return module = { exports: {} }, fn(module, module.exports), module.exports;
|
||||
}
|
||||
|
||||
var check = function (it) {
|
||||
return it && it.Math == Math && it;
|
||||
};
|
||||
|
||||
// https://github.com/zloirock/core-js/issues/86#issuecomment-115759028
|
||||
var global_1 =
|
||||
/* global globalThis -- safe */
|
||||
check(typeof globalThis == 'object' && globalThis) ||
|
||||
check(typeof window == 'object' && window) ||
|
||||
check(typeof self == 'object' && self) ||
|
||||
check(typeof commonjsGlobal == 'object' && commonjsGlobal) ||
|
||||
// eslint-disable-next-line no-new-func -- fallback
|
||||
(function () { return this; })() || Function('return this')();
|
||||
|
||||
var fails = function (exec) {
|
||||
try {
|
||||
return !!exec();
|
||||
} catch (error) {
|
||||
return true;
|
||||
}
|
||||
};
|
||||
|
||||
// Detect IE8's incomplete defineProperty implementation
|
||||
var descriptors = !fails(function () {
|
||||
return Object.defineProperty({}, 1, { get: function () { return 7; } })[1] != 7;
|
||||
});
|
||||
|
||||
var nativePropertyIsEnumerable = {}.propertyIsEnumerable;
|
||||
var getOwnPropertyDescriptor$1 = Object.getOwnPropertyDescriptor;
|
||||
|
||||
// Nashorn ~ JDK8 bug
|
||||
var NASHORN_BUG = getOwnPropertyDescriptor$1 && !nativePropertyIsEnumerable.call({ 1: 2 }, 1);
|
||||
|
||||
// `Object.prototype.propertyIsEnumerable` method implementation
|
||||
// https://tc39.es/ecma262/#sec-object.prototype.propertyisenumerable
|
||||
var f$4 = NASHORN_BUG ? function propertyIsEnumerable(V) {
|
||||
var descriptor = getOwnPropertyDescriptor$1(this, V);
|
||||
return !!descriptor && descriptor.enumerable;
|
||||
} : nativePropertyIsEnumerable;
|
||||
|
||||
var objectPropertyIsEnumerable = {
|
||||
f: f$4
|
||||
};
|
||||
|
||||
var createPropertyDescriptor = function (bitmap, value) {
|
||||
return {
|
||||
enumerable: !(bitmap & 1),
|
||||
configurable: !(bitmap & 2),
|
||||
writable: !(bitmap & 4),
|
||||
value: value
|
||||
};
|
||||
};
|
||||
|
||||
var toString = {}.toString;
|
||||
|
||||
var classofRaw = function (it) {
|
||||
return toString.call(it).slice(8, -1);
|
||||
};
|
||||
|
||||
var split = ''.split;
|
||||
|
||||
// fallback for non-array-like ES3 and non-enumerable old V8 strings
|
||||
var indexedObject = fails(function () {
|
||||
// throws an error in rhino, see https://github.com/mozilla/rhino/issues/346
|
||||
// eslint-disable-next-line no-prototype-builtins -- safe
|
||||
return !Object('z').propertyIsEnumerable(0);
|
||||
}) ? function (it) {
|
||||
return classofRaw(it) == 'String' ? split.call(it, '') : Object(it);
|
||||
} : Object;
|
||||
|
||||
// `RequireObjectCoercible` abstract operation
|
||||
// https://tc39.es/ecma262/#sec-requireobjectcoercible
|
||||
var requireObjectCoercible = function (it) {
|
||||
if (it == undefined) throw TypeError("Can't call method on " + it);
|
||||
return it;
|
||||
};
|
||||
|
||||
// toObject with fallback for non-array-like ES3 strings
|
||||
|
||||
|
||||
|
||||
var toIndexedObject = function (it) {
|
||||
return indexedObject(requireObjectCoercible(it));
|
||||
};
|
||||
|
||||
var isObject = function (it) {
|
||||
return typeof it === 'object' ? it !== null : typeof it === 'function';
|
||||
};
|
||||
|
||||
// `ToPrimitive` abstract operation
|
||||
// https://tc39.es/ecma262/#sec-toprimitive
|
||||
// instead of the ES6 spec version, we didn't implement @@toPrimitive case
|
||||
// and the second argument - flag - preferred type is a string
|
||||
var toPrimitive = function (input, PREFERRED_STRING) {
|
||||
if (!isObject(input)) return input;
|
||||
var fn, val;
|
||||
if (PREFERRED_STRING && typeof (fn = input.toString) == 'function' && !isObject(val = fn.call(input))) return val;
|
||||
if (typeof (fn = input.valueOf) == 'function' && !isObject(val = fn.call(input))) return val;
|
||||
if (!PREFERRED_STRING && typeof (fn = input.toString) == 'function' && !isObject(val = fn.call(input))) return val;
|
||||
throw TypeError("Can't convert object to primitive value");
|
||||
};
|
||||
|
||||
var hasOwnProperty = {}.hasOwnProperty;
|
||||
|
||||
var has$1 = function (it, key) {
|
||||
return hasOwnProperty.call(it, key);
|
||||
};
|
||||
|
||||
var document = global_1.document;
|
||||
// typeof document.createElement is 'object' in old IE
|
||||
var EXISTS = isObject(document) && isObject(document.createElement);
|
||||
|
||||
var documentCreateElement = function (it) {
|
||||
return EXISTS ? document.createElement(it) : {};
|
||||
};
|
||||
|
||||
// Thank's IE8 for his funny defineProperty
|
||||
var ie8DomDefine = !descriptors && !fails(function () {
|
||||
return Object.defineProperty(documentCreateElement('div'), 'a', {
|
||||
get: function () { return 7; }
|
||||
}).a != 7;
|
||||
});
|
||||
|
||||
var nativeGetOwnPropertyDescriptor = Object.getOwnPropertyDescriptor;
|
||||
|
||||
// `Object.getOwnPropertyDescriptor` method
|
||||
// https://tc39.es/ecma262/#sec-object.getownpropertydescriptor
|
||||
var f$3 = descriptors ? nativeGetOwnPropertyDescriptor : function getOwnPropertyDescriptor(O, P) {
|
||||
O = toIndexedObject(O);
|
||||
P = toPrimitive(P, true);
|
||||
if (ie8DomDefine) try {
|
||||
return nativeGetOwnPropertyDescriptor(O, P);
|
||||
} catch (error) { /* empty */ }
|
||||
if (has$1(O, P)) return createPropertyDescriptor(!objectPropertyIsEnumerable.f.call(O, P), O[P]);
|
||||
};
|
||||
|
||||
var objectGetOwnPropertyDescriptor = {
|
||||
f: f$3
|
||||
};
|
||||
|
||||
var anObject = function (it) {
|
||||
if (!isObject(it)) {
|
||||
throw TypeError(String(it) + ' is not an object');
|
||||
} return it;
|
||||
};
|
||||
|
||||
var nativeDefineProperty = Object.defineProperty;
|
||||
|
||||
// `Object.defineProperty` method
|
||||
// https://tc39.es/ecma262/#sec-object.defineproperty
|
||||
var f$2 = descriptors ? nativeDefineProperty : function defineProperty(O, P, Attributes) {
|
||||
anObject(O);
|
||||
P = toPrimitive(P, true);
|
||||
anObject(Attributes);
|
||||
if (ie8DomDefine) try {
|
||||
return nativeDefineProperty(O, P, Attributes);
|
||||
} catch (error) { /* empty */ }
|
||||
if ('get' in Attributes || 'set' in Attributes) throw TypeError('Accessors not supported');
|
||||
if ('value' in Attributes) O[P] = Attributes.value;
|
||||
return O;
|
||||
};
|
||||
|
||||
var objectDefineProperty = {
|
||||
f: f$2
|
||||
};
|
||||
|
||||
var createNonEnumerableProperty = descriptors ? function (object, key, value) {
|
||||
return objectDefineProperty.f(object, key, createPropertyDescriptor(1, value));
|
||||
} : function (object, key, value) {
|
||||
object[key] = value;
|
||||
return object;
|
||||
};
|
||||
|
||||
var setGlobal = function (key, value) {
|
||||
try {
|
||||
createNonEnumerableProperty(global_1, key, value);
|
||||
} catch (error) {
|
||||
global_1[key] = value;
|
||||
} return value;
|
||||
};
|
||||
|
||||
var SHARED = '__core-js_shared__';
|
||||
var store$1 = global_1[SHARED] || setGlobal(SHARED, {});
|
||||
|
||||
var sharedStore = store$1;
|
||||
|
||||
var functionToString = Function.toString;
|
||||
|
||||
// this helper broken in `3.4.1-3.4.4`, so we can't use `shared` helper
|
||||
if (typeof sharedStore.inspectSource != 'function') {
|
||||
sharedStore.inspectSource = function (it) {
|
||||
return functionToString.call(it);
|
||||
};
|
||||
}
|
||||
|
||||
var inspectSource = sharedStore.inspectSource;
|
||||
|
||||
var WeakMap$1 = global_1.WeakMap;
|
||||
|
||||
var nativeWeakMap = typeof WeakMap$1 === 'function' && /native code/.test(inspectSource(WeakMap$1));
|
||||
|
||||
var shared = createCommonjsModule(function (module) {
|
||||
(module.exports = function (key, value) {
|
||||
return sharedStore[key] || (sharedStore[key] = value !== undefined ? value : {});
|
||||
})('versions', []).push({
|
||||
version: '3.9.1',
|
||||
mode: 'global',
|
||||
copyright: '© 2021 Denis Pushkarev (zloirock.ru)'
|
||||
});
|
||||
});
|
||||
|
||||
var id = 0;
|
||||
var postfix = Math.random();
|
||||
|
||||
var uid = function (key) {
|
||||
return 'Symbol(' + String(key === undefined ? '' : key) + ')_' + (++id + postfix).toString(36);
|
||||
};
|
||||
|
||||
var keys = shared('keys');
|
||||
|
||||
var sharedKey = function (key) {
|
||||
return keys[key] || (keys[key] = uid(key));
|
||||
};
|
||||
|
||||
var hiddenKeys$1 = {};
|
||||
|
||||
var WeakMap = global_1.WeakMap;
|
||||
var set, get, has;
|
||||
|
||||
var enforce = function (it) {
|
||||
return has(it) ? get(it) : set(it, {});
|
||||
};
|
||||
|
||||
var getterFor = function (TYPE) {
|
||||
return function (it) {
|
||||
var state;
|
||||
if (!isObject(it) || (state = get(it)).type !== TYPE) {
|
||||
throw TypeError('Incompatible receiver, ' + TYPE + ' required');
|
||||
} return state;
|
||||
};
|
||||
};
|
||||
|
||||
if (nativeWeakMap) {
|
||||
var store = sharedStore.state || (sharedStore.state = new WeakMap());
|
||||
var wmget = store.get;
|
||||
var wmhas = store.has;
|
||||
var wmset = store.set;
|
||||
set = function (it, metadata) {
|
||||
metadata.facade = it;
|
||||
wmset.call(store, it, metadata);
|
||||
return metadata;
|
||||
};
|
||||
get = function (it) {
|
||||
return wmget.call(store, it) || {};
|
||||
};
|
||||
has = function (it) {
|
||||
return wmhas.call(store, it);
|
||||
};
|
||||
} else {
|
||||
var STATE = sharedKey('state');
|
||||
hiddenKeys$1[STATE] = true;
|
||||
set = function (it, metadata) {
|
||||
metadata.facade = it;
|
||||
createNonEnumerableProperty(it, STATE, metadata);
|
||||
return metadata;
|
||||
};
|
||||
get = function (it) {
|
||||
return has$1(it, STATE) ? it[STATE] : {};
|
||||
};
|
||||
has = function (it) {
|
||||
return has$1(it, STATE);
|
||||
};
|
||||
}
|
||||
|
||||
var internalState = {
|
||||
set: set,
|
||||
get: get,
|
||||
has: has,
|
||||
enforce: enforce,
|
||||
getterFor: getterFor
|
||||
};
|
||||
|
||||
var redefine = createCommonjsModule(function (module) {
|
||||
var getInternalState = internalState.get;
|
||||
var enforceInternalState = internalState.enforce;
|
||||
var TEMPLATE = String(String).split('String');
|
||||
|
||||
(module.exports = function (O, key, value, options) {
|
||||
var unsafe = options ? !!options.unsafe : false;
|
||||
var simple = options ? !!options.enumerable : false;
|
||||
var noTargetGet = options ? !!options.noTargetGet : false;
|
||||
var state;
|
||||
if (typeof value == 'function') {
|
||||
if (typeof key == 'string' && !has$1(value, 'name')) {
|
||||
createNonEnumerableProperty(value, 'name', key);
|
||||
}
|
||||
state = enforceInternalState(value);
|
||||
if (!state.source) {
|
||||
state.source = TEMPLATE.join(typeof key == 'string' ? key : '');
|
||||
}
|
||||
}
|
||||
if (O === global_1) {
|
||||
if (simple) O[key] = value;
|
||||
else setGlobal(key, value);
|
||||
return;
|
||||
} else if (!unsafe) {
|
||||
delete O[key];
|
||||
} else if (!noTargetGet && O[key]) {
|
||||
simple = true;
|
||||
}
|
||||
if (simple) O[key] = value;
|
||||
else createNonEnumerableProperty(O, key, value);
|
||||
// add fake Function#toString for correct work wrapped methods / constructors with methods like LoDash isNative
|
||||
})(Function.prototype, 'toString', function toString() {
|
||||
return typeof this == 'function' && getInternalState(this).source || inspectSource(this);
|
||||
});
|
||||
});
|
||||
|
||||
var path = global_1;
|
||||
|
||||
var aFunction = function (variable) {
|
||||
return typeof variable == 'function' ? variable : undefined;
|
||||
};
|
||||
|
||||
var getBuiltIn = function (namespace, method) {
|
||||
return arguments.length < 2 ? aFunction(path[namespace]) || aFunction(global_1[namespace])
|
||||
: path[namespace] && path[namespace][method] || global_1[namespace] && global_1[namespace][method];
|
||||
};
|
||||
|
||||
var ceil = Math.ceil;
|
||||
var floor = Math.floor;
|
||||
|
||||
// `ToInteger` abstract operation
|
||||
// https://tc39.es/ecma262/#sec-tointeger
|
||||
var toInteger = function (argument) {
|
||||
return isNaN(argument = +argument) ? 0 : (argument > 0 ? floor : ceil)(argument);
|
||||
};
|
||||
|
||||
var min$1 = Math.min;
|
||||
|
||||
// `ToLength` abstract operation
|
||||
// https://tc39.es/ecma262/#sec-tolength
|
||||
var toLength = function (argument) {
|
||||
return argument > 0 ? min$1(toInteger(argument), 0x1FFFFFFFFFFFFF) : 0; // 2 ** 53 - 1 == 9007199254740991
|
||||
};
|
||||
|
||||
var max = Math.max;
|
||||
var min = Math.min;
|
||||
|
||||
// Helper for a popular repeating case of the spec:
|
||||
// Let integer be ? ToInteger(index).
|
||||
// If integer < 0, let result be max((length + integer), 0); else let result be min(integer, length).
|
||||
var toAbsoluteIndex = function (index, length) {
|
||||
var integer = toInteger(index);
|
||||
return integer < 0 ? max(integer + length, 0) : min(integer, length);
|
||||
};
|
||||
|
||||
// `Array.prototype.{ indexOf, includes }` methods implementation
|
||||
var createMethod = function (IS_INCLUDES) {
|
||||
return function ($this, el, fromIndex) {
|
||||
var O = toIndexedObject($this);
|
||||
var length = toLength(O.length);
|
||||
var index = toAbsoluteIndex(fromIndex, length);
|
||||
var value;
|
||||
// Array#includes uses SameValueZero equality algorithm
|
||||
// eslint-disable-next-line no-self-compare -- NaN check
|
||||
if (IS_INCLUDES && el != el) while (length > index) {
|
||||
value = O[index++];
|
||||
// eslint-disable-next-line no-self-compare -- NaN check
|
||||
if (value != value) return true;
|
||||
// Array#indexOf ignores holes, Array#includes - not
|
||||
} else for (;length > index; index++) {
|
||||
if ((IS_INCLUDES || index in O) && O[index] === el) return IS_INCLUDES || index || 0;
|
||||
} return !IS_INCLUDES && -1;
|
||||
};
|
||||
};
|
||||
|
||||
var arrayIncludes = {
|
||||
// `Array.prototype.includes` method
|
||||
// https://tc39.es/ecma262/#sec-array.prototype.includes
|
||||
includes: createMethod(true),
|
||||
// `Array.prototype.indexOf` method
|
||||
// https://tc39.es/ecma262/#sec-array.prototype.indexof
|
||||
indexOf: createMethod(false)
|
||||
};
|
||||
|
||||
var indexOf = arrayIncludes.indexOf;
|
||||
|
||||
|
||||
var objectKeysInternal = function (object, names) {
|
||||
var O = toIndexedObject(object);
|
||||
var i = 0;
|
||||
var result = [];
|
||||
var key;
|
||||
for (key in O) !has$1(hiddenKeys$1, key) && has$1(O, key) && result.push(key);
|
||||
// Don't enum bug & hidden keys
|
||||
while (names.length > i) if (has$1(O, key = names[i++])) {
|
||||
~indexOf(result, key) || result.push(key);
|
||||
}
|
||||
return result;
|
||||
};
|
||||
|
||||
// IE8- don't enum bug keys
|
||||
var enumBugKeys = [
|
||||
'constructor',
|
||||
'hasOwnProperty',
|
||||
'isPrototypeOf',
|
||||
'propertyIsEnumerable',
|
||||
'toLocaleString',
|
||||
'toString',
|
||||
'valueOf'
|
||||
];
|
||||
|
||||
var hiddenKeys = enumBugKeys.concat('length', 'prototype');
|
||||
|
||||
// `Object.getOwnPropertyNames` method
|
||||
// https://tc39.es/ecma262/#sec-object.getownpropertynames
|
||||
var f$1 = Object.getOwnPropertyNames || function getOwnPropertyNames(O) {
|
||||
return objectKeysInternal(O, hiddenKeys);
|
||||
};
|
||||
|
||||
var objectGetOwnPropertyNames = {
|
||||
f: f$1
|
||||
};
|
||||
|
||||
var f = Object.getOwnPropertySymbols;
|
||||
|
||||
var objectGetOwnPropertySymbols = {
|
||||
f: f
|
||||
};
|
||||
|
||||
// all object keys, includes non-enumerable and symbols
|
||||
var ownKeys = getBuiltIn('Reflect', 'ownKeys') || function ownKeys(it) {
|
||||
var keys = objectGetOwnPropertyNames.f(anObject(it));
|
||||
var getOwnPropertySymbols = objectGetOwnPropertySymbols.f;
|
||||
return getOwnPropertySymbols ? keys.concat(getOwnPropertySymbols(it)) : keys;
|
||||
};
|
||||
|
||||
var copyConstructorProperties = function (target, source) {
|
||||
var keys = ownKeys(source);
|
||||
var defineProperty = objectDefineProperty.f;
|
||||
var getOwnPropertyDescriptor = objectGetOwnPropertyDescriptor.f;
|
||||
for (var i = 0; i < keys.length; i++) {
|
||||
var key = keys[i];
|
||||
if (!has$1(target, key)) defineProperty(target, key, getOwnPropertyDescriptor(source, key));
|
||||
}
|
||||
};
|
||||
|
||||
var replacement = /#|\.prototype\./;
|
||||
|
||||
var isForced = function (feature, detection) {
|
||||
var value = data[normalize(feature)];
|
||||
return value == POLYFILL ? true
|
||||
: value == NATIVE ? false
|
||||
: typeof detection == 'function' ? fails(detection)
|
||||
: !!detection;
|
||||
};
|
||||
|
||||
var normalize = isForced.normalize = function (string) {
|
||||
return String(string).replace(replacement, '.').toLowerCase();
|
||||
};
|
||||
|
||||
var data = isForced.data = {};
|
||||
var NATIVE = isForced.NATIVE = 'N';
|
||||
var POLYFILL = isForced.POLYFILL = 'P';
|
||||
|
||||
var isForced_1 = isForced;
|
||||
|
||||
var getOwnPropertyDescriptor = objectGetOwnPropertyDescriptor.f;
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
/*
|
||||
options.target - name of the target object
|
||||
options.global - target is the global object
|
||||
options.stat - export as static methods of target
|
||||
options.proto - export as prototype methods of target
|
||||
options.real - real prototype method for the `pure` version
|
||||
options.forced - export even if the native feature is available
|
||||
options.bind - bind methods to the target, required for the `pure` version
|
||||
options.wrap - wrap constructors to preventing global pollution, required for the `pure` version
|
||||
options.unsafe - use the simple assignment of property instead of delete + defineProperty
|
||||
options.sham - add a flag to not completely full polyfills
|
||||
options.enumerable - export as enumerable property
|
||||
options.noTargetGet - prevent calling a getter on target
|
||||
*/
|
||||
var _export = function (options, source) {
|
||||
var TARGET = options.target;
|
||||
var GLOBAL = options.global;
|
||||
var STATIC = options.stat;
|
||||
var FORCED, target, key, targetProperty, sourceProperty, descriptor;
|
||||
if (GLOBAL) {
|
||||
target = global_1;
|
||||
} else if (STATIC) {
|
||||
target = global_1[TARGET] || setGlobal(TARGET, {});
|
||||
} else {
|
||||
target = (global_1[TARGET] || {}).prototype;
|
||||
}
|
||||
if (target) for (key in source) {
|
||||
sourceProperty = source[key];
|
||||
if (options.noTargetGet) {
|
||||
descriptor = getOwnPropertyDescriptor(target, key);
|
||||
targetProperty = descriptor && descriptor.value;
|
||||
} else targetProperty = target[key];
|
||||
FORCED = isForced_1(GLOBAL ? key : TARGET + (STATIC ? '.' : '#') + key, options.forced);
|
||||
// contained in target
|
||||
if (!FORCED && targetProperty !== undefined) {
|
||||
if (typeof sourceProperty === typeof targetProperty) continue;
|
||||
copyConstructorProperties(sourceProperty, targetProperty);
|
||||
}
|
||||
// add a flag to not completely full polyfills
|
||||
if (options.sham || (targetProperty && targetProperty.sham)) {
|
||||
createNonEnumerableProperty(sourceProperty, 'sham', true);
|
||||
}
|
||||
// extend global
|
||||
redefine(target, key, sourceProperty, options);
|
||||
}
|
||||
};
|
||||
|
||||
// `IsArray` abstract operation
|
||||
// https://tc39.es/ecma262/#sec-isarray
|
||||
var isArray = Array.isArray || function isArray(arg) {
|
||||
return classofRaw(arg) == 'Array';
|
||||
};
|
||||
|
||||
// `ToObject` abstract operation
|
||||
// https://tc39.es/ecma262/#sec-toobject
|
||||
var toObject = function (argument) {
|
||||
return Object(requireObjectCoercible(argument));
|
||||
};
|
||||
|
||||
var createProperty = function (object, key, value) {
|
||||
var propertyKey = toPrimitive(key);
|
||||
if (propertyKey in object) objectDefineProperty.f(object, propertyKey, createPropertyDescriptor(0, value));
|
||||
else object[propertyKey] = value;
|
||||
};
|
||||
|
||||
var engineIsNode = classofRaw(global_1.process) == 'process';
|
||||
|
||||
var engineUserAgent = getBuiltIn('navigator', 'userAgent') || '';
|
||||
|
||||
var process = global_1.process;
|
||||
var versions = process && process.versions;
|
||||
var v8 = versions && versions.v8;
|
||||
var match, version;
|
||||
|
||||
if (v8) {
|
||||
match = v8.split('.');
|
||||
version = match[0] + match[1];
|
||||
} else if (engineUserAgent) {
|
||||
match = engineUserAgent.match(/Edge\/(\d+)/);
|
||||
if (!match || match[1] >= 74) {
|
||||
match = engineUserAgent.match(/Chrome\/(\d+)/);
|
||||
if (match) version = match[1];
|
||||
}
|
||||
}
|
||||
|
||||
var engineV8Version = version && +version;
|
||||
|
||||
var nativeSymbol = !!Object.getOwnPropertySymbols && !fails(function () {
|
||||
/* global Symbol -- required for testing */
|
||||
return !Symbol.sham &&
|
||||
// Chrome 38 Symbol has incorrect toString conversion
|
||||
// Chrome 38-40 symbols are not inherited from DOM collections prototypes to instances
|
||||
(engineIsNode ? engineV8Version === 38 : engineV8Version > 37 && engineV8Version < 41);
|
||||
});
|
||||
|
||||
var useSymbolAsUid = nativeSymbol
|
||||
/* global Symbol -- safe */
|
||||
&& !Symbol.sham
|
||||
&& typeof Symbol.iterator == 'symbol';
|
||||
|
||||
var WellKnownSymbolsStore = shared('wks');
|
||||
var Symbol$1 = global_1.Symbol;
|
||||
var createWellKnownSymbol = useSymbolAsUid ? Symbol$1 : Symbol$1 && Symbol$1.withoutSetter || uid;
|
||||
|
||||
var wellKnownSymbol = function (name) {
|
||||
if (!has$1(WellKnownSymbolsStore, name) || !(nativeSymbol || typeof WellKnownSymbolsStore[name] == 'string')) {
|
||||
if (nativeSymbol && has$1(Symbol$1, name)) {
|
||||
WellKnownSymbolsStore[name] = Symbol$1[name];
|
||||
} else {
|
||||
WellKnownSymbolsStore[name] = createWellKnownSymbol('Symbol.' + name);
|
||||
}
|
||||
} return WellKnownSymbolsStore[name];
|
||||
};
|
||||
|
||||
var SPECIES$1 = wellKnownSymbol('species');
|
||||
|
||||
// `ArraySpeciesCreate` abstract operation
|
||||
// https://tc39.es/ecma262/#sec-arrayspeciescreate
|
||||
var arraySpeciesCreate = function (originalArray, length) {
|
||||
var C;
|
||||
if (isArray(originalArray)) {
|
||||
C = originalArray.constructor;
|
||||
// cross-realm fallback
|
||||
if (typeof C == 'function' && (C === Array || isArray(C.prototype))) C = undefined;
|
||||
else if (isObject(C)) {
|
||||
C = C[SPECIES$1];
|
||||
if (C === null) C = undefined;
|
||||
}
|
||||
} return new (C === undefined ? Array : C)(length === 0 ? 0 : length);
|
||||
};
|
||||
|
||||
var SPECIES = wellKnownSymbol('species');
|
||||
|
||||
var arrayMethodHasSpeciesSupport = function (METHOD_NAME) {
|
||||
// We can't use this feature detection in V8 since it causes
|
||||
// deoptimization and serious performance degradation
|
||||
// https://github.com/zloirock/core-js/issues/677
|
||||
return engineV8Version >= 51 || !fails(function () {
|
||||
var array = [];
|
||||
var constructor = array.constructor = {};
|
||||
constructor[SPECIES] = function () {
|
||||
return { foo: 1 };
|
||||
};
|
||||
return array[METHOD_NAME](Boolean).foo !== 1;
|
||||
});
|
||||
};
|
||||
|
||||
var IS_CONCAT_SPREADABLE = wellKnownSymbol('isConcatSpreadable');
|
||||
var MAX_SAFE_INTEGER = 0x1FFFFFFFFFFFFF;
|
||||
var MAXIMUM_ALLOWED_INDEX_EXCEEDED = 'Maximum allowed index exceeded';
|
||||
|
||||
// We can't use this feature detection in V8 since it causes
|
||||
// deoptimization and serious performance degradation
|
||||
// https://github.com/zloirock/core-js/issues/679
|
||||
var IS_CONCAT_SPREADABLE_SUPPORT = engineV8Version >= 51 || !fails(function () {
|
||||
var array = [];
|
||||
array[IS_CONCAT_SPREADABLE] = false;
|
||||
return array.concat()[0] !== array;
|
||||
});
|
||||
|
||||
var SPECIES_SUPPORT = arrayMethodHasSpeciesSupport('concat');
|
||||
|
||||
var isConcatSpreadable = function (O) {
|
||||
if (!isObject(O)) return false;
|
||||
var spreadable = O[IS_CONCAT_SPREADABLE];
|
||||
return spreadable !== undefined ? !!spreadable : isArray(O);
|
||||
};
|
||||
|
||||
var FORCED = !IS_CONCAT_SPREADABLE_SUPPORT || !SPECIES_SUPPORT;
|
||||
|
||||
// `Array.prototype.concat` method
|
||||
// https://tc39.es/ecma262/#sec-array.prototype.concat
|
||||
// with adding support of @@isConcatSpreadable and @@species
|
||||
_export({ target: 'Array', proto: true, forced: FORCED }, {
|
||||
// eslint-disable-next-line no-unused-vars -- required for `.length`
|
||||
concat: function concat(arg) {
|
||||
var O = toObject(this);
|
||||
var A = arraySpeciesCreate(O, 0);
|
||||
var n = 0;
|
||||
var i, k, length, len, E;
|
||||
for (i = -1, length = arguments.length; i < length; i++) {
|
||||
E = i === -1 ? O : arguments[i];
|
||||
if (isConcatSpreadable(E)) {
|
||||
len = toLength(E.length);
|
||||
if (n + len > MAX_SAFE_INTEGER) throw TypeError(MAXIMUM_ALLOWED_INDEX_EXCEEDED);
|
||||
for (k = 0; k < len; k++, n++) if (k in E) createProperty(A, n, E[k]);
|
||||
} else {
|
||||
if (n >= MAX_SAFE_INTEGER) throw TypeError(MAXIMUM_ALLOWED_INDEX_EXCEEDED);
|
||||
createProperty(A, n++, E);
|
||||
}
|
||||
}
|
||||
A.length = n;
|
||||
return A;
|
||||
}
|
||||
});
|
||||
|
||||
/**
|
||||
* When using server-side processing, the default mode of operation for
|
||||
* bootstrap-table is to simply throw away any data that currently exists in the
|
||||
* table and make a request to the server to get the first page of data to
|
||||
* display. This is fine for an empty table, but if you already have the first
|
||||
* page of data displayed in the plain HTML, it is a waste of resources. As
|
||||
* such, you can use data-defer-url instead of data-url to allow you to instruct
|
||||
* bootstrap-table to not make that initial request, rather it will use the data
|
||||
* already on the page.
|
||||
*
|
||||
* @author: Ruben Suarez
|
||||
* @webSite: http://rubensa.eu.org
|
||||
* @update zhixin wen <wenzhixin2010@gmail.com>
|
||||
*/
|
||||
|
||||
$__default['default'].extend($__default['default'].fn.bootstrapTable.defaults, {
|
||||
deferUrl: undefined
|
||||
});
|
||||
|
||||
$__default['default'].BootstrapTable = /*#__PURE__*/function (_$$BootstrapTable) {
|
||||
_inherits(_class, _$$BootstrapTable);
|
||||
|
||||
var _super = _createSuper(_class);
|
||||
|
||||
function _class() {
|
||||
_classCallCheck(this, _class);
|
||||
|
||||
return _super.apply(this, arguments);
|
||||
}
|
||||
|
||||
_createClass(_class, [{
|
||||
key: "init",
|
||||
value: function init() {
|
||||
var _get2;
|
||||
|
||||
for (var _len = arguments.length, args = new Array(_len), _key = 0; _key < _len; _key++) {
|
||||
args[_key] = arguments[_key];
|
||||
}
|
||||
|
||||
(_get2 = _get(_getPrototypeOf(_class.prototype), "init", this)).call.apply(_get2, [this].concat(args));
|
||||
|
||||
if (this.options.deferUrl) {
|
||||
this.options.url = this.options.deferUrl;
|
||||
}
|
||||
}
|
||||
}]);
|
||||
|
||||
return _class;
|
||||
}($__default['default'].BootstrapTable);
|
||||
|
||||
})));
|
||||
+10
File diff suppressed because one or more lines are too long
+1904
File diff suppressed because it is too large
Load Diff
+10
File diff suppressed because one or more lines are too long
Vendored
+2232
File diff suppressed because it is too large
Load Diff
+10
File diff suppressed because one or more lines are too long
+13
@@ -0,0 +1,13 @@
|
||||
@charset "UTF-8";
|
||||
/**
|
||||
* @author: Dennis Hernández
|
||||
* @webSite: http://djhvscf.github.io/Blog
|
||||
* @version: v2.1.1
|
||||
*/
|
||||
.no-filter-control {
|
||||
height: 34px;
|
||||
}
|
||||
|
||||
.filter-control {
|
||||
margin: 0 2px 2px 2px;
|
||||
}
|
||||
+3140
File diff suppressed because it is too large
Load Diff
+10
@@ -0,0 +1,10 @@
|
||||
/**
|
||||
* bootstrap-table - An extended table to integration with some of the most widely used CSS frameworks. (Supports Bootstrap, Semantic UI, Bulma, Material Design, Foundation)
|
||||
*
|
||||
* @version v1.18.3
|
||||
* @homepage https://bootstrap-table.com
|
||||
* @author wenzhixin <wenzhixin2010@gmail.com> (http://wenzhixin.net.cn/)
|
||||
* @license MIT
|
||||
*/
|
||||
|
||||
@charset "UTF-8";.no-filter-control{height:34px}.filter-control{margin:0 2px 2px 2px}
|
||||
+10
File diff suppressed because one or more lines are too long
+2434
File diff suppressed because it is too large
Load Diff
Vendored
+10
File diff suppressed because one or more lines are too long
+25
@@ -0,0 +1,25 @@
|
||||
.fixed-columns,
|
||||
.fixed-columns-right {
|
||||
position: absolute;
|
||||
top: 0;
|
||||
height: 100%;
|
||||
background-color: #fff;
|
||||
box-sizing: border-box;
|
||||
z-index: 1;
|
||||
}
|
||||
|
||||
.fixed-columns {
|
||||
left: 0;
|
||||
}
|
||||
|
||||
.fixed-columns .fixed-table-body {
|
||||
overflow: hidden !important;
|
||||
}
|
||||
|
||||
.fixed-columns-right {
|
||||
right: 0;
|
||||
}
|
||||
|
||||
.fixed-columns-right .fixed-table-body {
|
||||
overflow-x: hidden !important;
|
||||
}
|
||||
+1610
File diff suppressed because it is too large
Load Diff
+10
@@ -0,0 +1,10 @@
|
||||
/**
|
||||
* bootstrap-table - An extended table to integration with some of the most widely used CSS frameworks. (Supports Bootstrap, Semantic UI, Bulma, Material Design, Foundation)
|
||||
*
|
||||
* @version v1.18.3
|
||||
* @homepage https://bootstrap-table.com
|
||||
* @author wenzhixin <wenzhixin2010@gmail.com> (http://wenzhixin.net.cn/)
|
||||
* @license MIT
|
||||
*/
|
||||
|
||||
.fixed-columns,.fixed-columns-right{position:absolute;top:0;height:100%;background-color:#fff;box-sizing:border-box;z-index:1}.fixed-columns{left:0}.fixed-columns .fixed-table-body{overflow:hidden!important}.fixed-columns-right{right:0}.fixed-columns-right .fixed-table-body{overflow-x:hidden!important}
|
||||
+10
File diff suppressed because one or more lines are too long
+8
@@ -0,0 +1,8 @@
|
||||
.bootstrap-table .table > tbody > tr.groupBy.expanded,
|
||||
.bootstrap-table .table > tbody > tr.groupBy.collapsed {
|
||||
cursor: pointer;
|
||||
}
|
||||
|
||||
.bootstrap-table .table > tbody > tr.hidden {
|
||||
display: none;
|
||||
}
|
||||
+1604
File diff suppressed because it is too large
Load Diff
+10
@@ -0,0 +1,10 @@
|
||||
/**
|
||||
* bootstrap-table - An extended table to integration with some of the most widely used CSS frameworks. (Supports Bootstrap, Semantic UI, Bulma, Material Design, Foundation)
|
||||
*
|
||||
* @version v1.18.3
|
||||
* @homepage https://bootstrap-table.com
|
||||
* @author wenzhixin <wenzhixin2010@gmail.com> (http://wenzhixin.net.cn/)
|
||||
* @license MIT
|
||||
*/
|
||||
|
||||
.bootstrap-table .table>tbody>tr.groupBy.collapsed,.bootstrap-table .table>tbody>tr.groupBy.expanded{cursor:pointer}.bootstrap-table .table>tbody>tr.hidden{display:none}
|
||||
+10
File diff suppressed because one or more lines are too long
+159
@@ -0,0 +1,159 @@
|
||||
(function (global, factory) {
|
||||
typeof exports === 'object' && typeof module !== 'undefined' ? factory(require('jquery')) :
|
||||
typeof define === 'function' && define.amd ? define(['jquery'], factory) :
|
||||
(global = typeof globalThis !== 'undefined' ? globalThis : global || self, factory(global.jQuery));
|
||||
}(this, (function ($) { 'use strict';
|
||||
|
||||
function _interopDefaultLegacy (e) { return e && typeof e === 'object' && 'default' in e ? e : { 'default': e }; }
|
||||
|
||||
var $__default = /*#__PURE__*/_interopDefaultLegacy($);
|
||||
|
||||
function _classCallCheck(instance, Constructor) {
|
||||
if (!(instance instanceof Constructor)) {
|
||||
throw new TypeError("Cannot call a class as a function");
|
||||
}
|
||||
}
|
||||
|
||||
function _defineProperties(target, props) {
|
||||
for (var i = 0; i < props.length; i++) {
|
||||
var descriptor = props[i];
|
||||
descriptor.enumerable = descriptor.enumerable || false;
|
||||
descriptor.configurable = true;
|
||||
if ("value" in descriptor) descriptor.writable = true;
|
||||
Object.defineProperty(target, descriptor.key, descriptor);
|
||||
}
|
||||
}
|
||||
|
||||
function _createClass(Constructor, protoProps, staticProps) {
|
||||
if (protoProps) _defineProperties(Constructor.prototype, protoProps);
|
||||
if (staticProps) _defineProperties(Constructor, staticProps);
|
||||
return Constructor;
|
||||
}
|
||||
|
||||
function _inherits(subClass, superClass) {
|
||||
if (typeof superClass !== "function" && superClass !== null) {
|
||||
throw new TypeError("Super expression must either be null or a function");
|
||||
}
|
||||
|
||||
subClass.prototype = Object.create(superClass && superClass.prototype, {
|
||||
constructor: {
|
||||
value: subClass,
|
||||
writable: true,
|
||||
configurable: true
|
||||
}
|
||||
});
|
||||
if (superClass) _setPrototypeOf(subClass, superClass);
|
||||
}
|
||||
|
||||
function _getPrototypeOf(o) {
|
||||
_getPrototypeOf = Object.setPrototypeOf ? Object.getPrototypeOf : function _getPrototypeOf(o) {
|
||||
return o.__proto__ || Object.getPrototypeOf(o);
|
||||
};
|
||||
return _getPrototypeOf(o);
|
||||
}
|
||||
|
||||
function _setPrototypeOf(o, p) {
|
||||
_setPrototypeOf = Object.setPrototypeOf || function _setPrototypeOf(o, p) {
|
||||
o.__proto__ = p;
|
||||
return o;
|
||||
};
|
||||
|
||||
return _setPrototypeOf(o, p);
|
||||
}
|
||||
|
||||
function _isNativeReflectConstruct() {
|
||||
if (typeof Reflect === "undefined" || !Reflect.construct) return false;
|
||||
if (Reflect.construct.sham) return false;
|
||||
if (typeof Proxy === "function") return true;
|
||||
|
||||
try {
|
||||
Boolean.prototype.valueOf.call(Reflect.construct(Boolean, [], function () {}));
|
||||
return true;
|
||||
} catch (e) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
function _assertThisInitialized(self) {
|
||||
if (self === void 0) {
|
||||
throw new ReferenceError("this hasn't been initialised - super() hasn't been called");
|
||||
}
|
||||
|
||||
return self;
|
||||
}
|
||||
|
||||
function _possibleConstructorReturn(self, call) {
|
||||
if (call && (typeof call === "object" || typeof call === "function")) {
|
||||
return call;
|
||||
}
|
||||
|
||||
return _assertThisInitialized(self);
|
||||
}
|
||||
|
||||
function _createSuper(Derived) {
|
||||
var hasNativeReflectConstruct = _isNativeReflectConstruct();
|
||||
|
||||
return function _createSuperInternal() {
|
||||
var Super = _getPrototypeOf(Derived),
|
||||
result;
|
||||
|
||||
if (hasNativeReflectConstruct) {
|
||||
var NewTarget = _getPrototypeOf(this).constructor;
|
||||
|
||||
result = Reflect.construct(Super, arguments, NewTarget);
|
||||
} else {
|
||||
result = Super.apply(this, arguments);
|
||||
}
|
||||
|
||||
return _possibleConstructorReturn(this, result);
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* @author: Jewway
|
||||
* @update zhixin wen <wenzhixin2010@gmail.com>
|
||||
*/
|
||||
|
||||
$__default['default'].fn.bootstrapTable.methods.push('changeTitle');
|
||||
$__default['default'].fn.bootstrapTable.methods.push('changeLocale');
|
||||
|
||||
$__default['default'].BootstrapTable = /*#__PURE__*/function (_$$BootstrapTable) {
|
||||
_inherits(_class, _$$BootstrapTable);
|
||||
|
||||
var _super = _createSuper(_class);
|
||||
|
||||
function _class() {
|
||||
_classCallCheck(this, _class);
|
||||
|
||||
return _super.apply(this, arguments);
|
||||
}
|
||||
|
||||
_createClass(_class, [{
|
||||
key: "changeTitle",
|
||||
value: function changeTitle(locale) {
|
||||
$__default['default'].each(this.options.columns, function (idx, columnList) {
|
||||
$__default['default'].each(columnList, function (idx, column) {
|
||||
if (column.field) {
|
||||
column.title = locale[column.field];
|
||||
}
|
||||
});
|
||||
});
|
||||
this.initHeader();
|
||||
this.initBody();
|
||||
this.initToolbar();
|
||||
}
|
||||
}, {
|
||||
key: "changeLocale",
|
||||
value: function changeLocale(localeId) {
|
||||
this.options.locale = localeId;
|
||||
this.initLocale();
|
||||
this.initPagination();
|
||||
this.initBody();
|
||||
this.initToolbar();
|
||||
}
|
||||
}]);
|
||||
|
||||
return _class;
|
||||
}($__default['default'].BootstrapTable);
|
||||
|
||||
})));
|
||||
+10
@@ -0,0 +1,10 @@
|
||||
/**
|
||||
* bootstrap-table - An extended table to integration with some of the most widely used CSS frameworks. (Supports Bootstrap, Semantic UI, Bulma, Material Design, Foundation)
|
||||
*
|
||||
* @version v1.18.3
|
||||
* @homepage https://bootstrap-table.com
|
||||
* @author wenzhixin <wenzhixin2010@gmail.com> (http://wenzhixin.net.cn/)
|
||||
* @license MIT
|
||||
*/
|
||||
|
||||
!function(t,e){"object"==typeof exports&&"undefined"!=typeof module?e(require("jquery")):"function"==typeof define&&define.amd?define(["jquery"],e):e((t="undefined"!=typeof globalThis?globalThis:t||self).jQuery)}(this,(function(t){"use strict";function e(t){return t&&"object"==typeof t&&"default"in t?t:{default:t}}var n=e(t);function o(t,e){if(!(t instanceof e))throw new TypeError("Cannot call a class as a function")}function r(t,e){for(var n=0;n<e.length;n++){var o=e[n];o.enumerable=o.enumerable||!1,o.configurable=!0,"value"in o&&(o.writable=!0),Object.defineProperty(t,o.key,o)}}function i(t){return(i=Object.setPrototypeOf?Object.getPrototypeOf:function(t){return t.__proto__||Object.getPrototypeOf(t)})(t)}function u(t,e){return(u=Object.setPrototypeOf||function(t,e){return t.__proto__=e,t})(t,e)}function f(t,e){return!e||"object"!=typeof e&&"function"!=typeof e?function(t){if(void 0===t)throw new ReferenceError("this hasn't been initialised - super() hasn't been called");return t}(t):e}function c(t){var e=function(){if("undefined"==typeof Reflect||!Reflect.construct)return!1;if(Reflect.construct.sham)return!1;if("function"==typeof Proxy)return!0;try{return Boolean.prototype.valueOf.call(Reflect.construct(Boolean,[],(function(){}))),!0}catch(t){return!1}}();return function(){var n,o=i(t);if(e){var r=i(this).constructor;n=Reflect.construct(o,arguments,r)}else n=o.apply(this,arguments);return f(this,n)}}n.default.fn.bootstrapTable.methods.push("changeTitle"),n.default.fn.bootstrapTable.methods.push("changeLocale"),n.default.BootstrapTable=function(t){!function(t,e){if("function"!=typeof e&&null!==e)throw new TypeError("Super expression must either be null or a function");t.prototype=Object.create(e&&e.prototype,{constructor:{value:t,writable:!0,configurable:!0}}),e&&u(t,e)}(l,t);var e,i,f,a=c(l);function l(){return o(this,l),a.apply(this,arguments)}return e=l,(i=[{key:"changeTitle",value:function(t){n.default.each(this.options.columns,(function(e,o){n.default.each(o,(function(e,n){n.field&&(n.title=t[n.field])}))})),this.initHeader(),this.initBody(),this.initToolbar()}},{key:"changeLocale",value:function(t){this.options.locale=t,this.initLocale(),this.initPagination(),this.initBody(),this.initToolbar()}}])&&r(e.prototype,i),f&&r(e,f),l}(n.default.BootstrapTable)}));
|
||||
+1472
File diff suppressed because it is too large
Load Diff
+10
File diff suppressed because one or more lines are too long
Vendored
+1295
File diff suppressed because it is too large
Load Diff
+10
File diff suppressed because one or more lines are too long
+1792
File diff suppressed because it is too large
Load Diff
+10
File diff suppressed because one or more lines are too long
+11
@@ -0,0 +1,11 @@
|
||||
.bootstrap-table.bootstrap3 .fixed-table-pagination > .pagination ul.pagination,
|
||||
.bootstrap-table.bootstrap3 .fixed-table-pagination > .pagination .page-jump-to {
|
||||
display: inline;
|
||||
}
|
||||
|
||||
.bootstrap-table .fixed-table-pagination > .pagination .page-jump-to input {
|
||||
width: 70px;
|
||||
margin-left: 5px;
|
||||
text-align: center;
|
||||
float: left;
|
||||
}
|
||||
+1123
File diff suppressed because it is too large
Load Diff
+10
@@ -0,0 +1,10 @@
|
||||
/**
|
||||
* bootstrap-table - An extended table to integration with some of the most widely used CSS frameworks. (Supports Bootstrap, Semantic UI, Bulma, Material Design, Foundation)
|
||||
*
|
||||
* @version v1.18.3
|
||||
* @homepage https://bootstrap-table.com
|
||||
* @author wenzhixin <wenzhixin2010@gmail.com> (http://wenzhixin.net.cn/)
|
||||
* @license MIT
|
||||
*/
|
||||
|
||||
.bootstrap-table.bootstrap3 .fixed-table-pagination>.pagination .page-jump-to,.bootstrap-table.bootstrap3 .fixed-table-pagination>.pagination ul.pagination{display:inline}.bootstrap-table .fixed-table-pagination>.pagination .page-jump-to input{width:70px;margin-left:5px;text-align:center;float:left}
|
||||
+10
File diff suppressed because one or more lines are too long
+1192
File diff suppressed because it is too large
Load Diff
+10
File diff suppressed because one or more lines are too long
Vendored
+1646
File diff suppressed because it is too large
Load Diff
src/backend/InvenTree/InvenTree/static/bootstrap-table/extensions/print/bootstrap-table-print.min.js
Vendored
+10
File diff suppressed because one or more lines are too long
+1477
File diff suppressed because it is too large
Load Diff
+10
File diff suppressed because one or more lines are too long
+14
@@ -0,0 +1,14 @@
|
||||
.reorder_rows_onDragClass td {
|
||||
background-color: #eee;
|
||||
-webkit-box-shadow: 11px 5px 12px 2px #333, 0 1px 0 #ccc inset, 0 -1px 0 #ccc inset;
|
||||
-webkit-box-shadow: 6px 3px 5px #555, 0 1px 0 #ccc inset, 0 -1px 0 #ccc inset;
|
||||
-moz-box-shadow: 6px 4px 5px 1px #555, 0 1px 0 #ccc inset, 0 -1px 0 #ccc inset;
|
||||
-box-shadow: 6px 4px 5px 1px #555, 0 1px 0 #ccc inset, 0 -1px 0 #ccc inset;
|
||||
}
|
||||
|
||||
.reorder_rows_onDragClass td:last-child {
|
||||
-webkit-box-shadow: 8px 7px 12px 0 #333, 0 1px 0 #ccc inset, 0 -1px 0 #ccc inset;
|
||||
-webkit-box-shadow: 1px 8px 6px -4px #555, 0 1px 0 #ccc inset, 0 -1px 0 #ccc inset;
|
||||
-moz-box-shadow: 0 9px 4px -4px #555, 0 1px 0 #ccc inset, 0 -1px 0 #ccc inset, -1px 0 0 #ccc inset;
|
||||
-box-shadow: 0 9px 4px -4px #555, 0 1px 0 #ccc inset, 0 -1px 0 #ccc inset, -1px 0 0 #ccc inset;
|
||||
}
|
||||
+1040
File diff suppressed because it is too large
Load Diff
+10
@@ -0,0 +1,10 @@
|
||||
/**
|
||||
* bootstrap-table - An extended table to integration with some of the most widely used CSS frameworks. (Supports Bootstrap, Semantic UI, Bulma, Material Design, Foundation)
|
||||
*
|
||||
* @version v1.18.3
|
||||
* @homepage https://bootstrap-table.com
|
||||
* @author wenzhixin <wenzhixin2010@gmail.com> (http://wenzhixin.net.cn/)
|
||||
* @license MIT
|
||||
*/
|
||||
|
||||
.reorder_rows_onDragClass td{background-color:#eee;-webkit-box-shadow:11px 5px 12px 2px #333,0 1px 0 #ccc inset,0 -1px 0 #ccc inset;-webkit-box-shadow:6px 3px 5px #555,0 1px 0 #ccc inset,0 -1px 0 #ccc inset;-moz-box-shadow:6px 4px 5px 1px #555,0 1px 0 #ccc inset,0 -1px 0 #ccc inset;-box-shadow:6px 4px 5px 1px #555,0 1px 0 #ccc inset,0 -1px 0 #ccc inset}.reorder_rows_onDragClass td:last-child{-webkit-box-shadow:8px 7px 12px 0 #333,0 1px 0 #ccc inset,0 -1px 0 #ccc inset;-webkit-box-shadow:1px 8px 6px -4px #555,0 1px 0 #ccc inset,0 -1px 0 #ccc inset;-moz-box-shadow:0 9px 4px -4px #555,0 1px 0 #ccc inset,0 -1px 0 #ccc inset,-1px 0 0 #ccc inset;-box-shadow:0 9px 4px -4px #555,0 1px 0 #ccc inset,0 -1px 0 #ccc inset,-1px 0 0 #ccc inset}
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user