2
0
mirror of https://github.com/inventree/InvenTree.git synced 2026-04-07 20:10:58 +00:00

Migrate plugin tables (#11648)

* Prevent creation of PluginConfig during migrations

* Refactor data import process

- Split into multiple separate steps

* Load plugins during data load / dump

- Required, otherwise we cannot dump the data

* Refactor export_records

- Use temporary file
- Cleanup docstring

* Force apps check on second validation step

* Improve import sequencing

* Update CI script

* Update migration docs

* CI pipeline for running import/export test

* Fix workflow naming

* Fix env vars

* Add placeholder script

* Fix matrix env vars

* Fix missing env var

* Install required packages

* Fix typo

* Tweak tasks.py

* Install dummy plugin as part of the

* Updated CI workflow

* Validate exported data

* Additional CI process

* Log mandatory plugins to INFO

* Force global setting

* Refactor CI pipeline

* Tweak file test

* Workflow updates

* Enable auto-update

* Test if import/export test should run

* Trigger if tasks.py changes
This commit is contained in:
Oliver
2026-04-02 21:26:34 +11:00
committed by GitHub
parent 9aa2308f52
commit 5c55f4f4c0
11 changed files with 417 additions and 116 deletions

212
tasks.py
View File

@@ -8,6 +8,7 @@ import re
import shutil
import subprocess
import sys
import tempfile
from functools import wraps
from pathlib import Path
from platform import python_version
@@ -1065,39 +1066,21 @@ def update(
'exclude_plugins': 'Exclude plugin data from the output file (default = False)',
'include_sso': 'Include SSO token data in the output file (default = False)',
'include_session': 'Include user session data in the output file (default = False)',
'retain_temp': 'Retain temporary files (containing permissions) at end of process (default = False)',
},
pre=[wait],
)
def export_records(
c,
filename='data.json',
overwrite=False,
include_email=False,
include_permissions=False,
include_tokens=False,
exclude_plugins=False,
include_sso=False,
include_session=False,
retain_temp=False,
overwrite: bool = False,
include_email: bool = False,
include_permissions: bool = False,
include_tokens: bool = False,
exclude_plugins: bool = False,
include_sso: bool = False,
include_session: bool = False,
):
"""Export all database records to a file.
Write data to the file defined by filename.
If --overwrite is not set, the user will be prompted about overwriting an existing files.
If --include-permissions is not set, the file defined by filename will have permissions specified for a user or group removed.
If --delete-temp is not set, the temporary file (which includes permissions) will not be deleted. This file is named filename.tmp
For historical reasons, calling this function without any arguments will thus result in two files:
- data.json: does not include permissions
- data.json.tmp: includes permissions
If you want the script to overwrite any existing files without asking, add argument -o / --overwrite.
If you only want one file, add argument - d / --delete-temp.
If you want only one file, with permissions, then additionally add argument -i / --include-permissions
"""
"""Export all database records to a file."""
# Get an absolute path to the file
target = Path(filename)
if not target.is_absolute():
@@ -1107,8 +1090,6 @@ def export_records(
check_file_existence(target, overwrite)
tmpfile = f'{target}.tmp'
excludes = content_excludes(
allow_email=include_email,
allow_tokens=include_tokens,
@@ -1117,16 +1098,19 @@ def export_records(
allow_sso=include_sso,
)
cmd = f"dumpdata --natural-foreign --indent 2 --output '{tmpfile}' {excludes}"
with tempfile.NamedTemporaryFile(
suffix='.json', encoding='utf-8', mode='w+t', delete=True
) as tmpfile:
cmd = f"dumpdata --natural-foreign --indent 2 --output '{tmpfile.name}' {excludes}"
# Dump data to temporary file
manage(c, cmd, pty=True)
# Dump data to temporary file
manage(c, cmd, pty=True)
info('Running data post-processing step...')
info('Running data post-processing step...')
# Post-process the file, to remove any "permissions" specified for a user or group
with open(tmpfile, encoding='utf-8') as f_in:
data = json.loads(f_in.read())
# Post-process the file, to remove any "permissions" specified for a user or group
tmpfile.seek(0)
data = json.loads(tmpfile.read())
data_out = [
{
@@ -1164,22 +1148,23 @@ def export_records(
with open(target, 'w', encoding='utf-8') as f_out:
f_out.write(json.dumps(data_out, indent=2))
if not retain_temp:
info('Removing temporary files')
os.remove(tmpfile)
success('Data export completed')
def validate_import_metadata(c, metadata: dict, strict: bool = False) -> bool:
def validate_import_metadata(
c, metadata: dict, strict: bool = False, apps: bool = True, verbose: bool = False
) -> bool:
"""Validate the metadata associated with an import file.
Arguments:
c: The context or connection object
metadata (dict): The metadata to validate
apps (bool): If True, validate that all apps listed in the metadata are installed in the current environment.
strict (bool): If True, the import process will fail if any issues are detected.
verbose (bool): If True, print detailed information during validation.
"""
info('Validating import metadata...')
if verbose:
info('Validating import metadata...')
valid = True
@@ -1207,16 +1192,17 @@ def validate_import_metadata(c, metadata: dict, strict: bool = False) -> bool:
f"Source version '{source_version}' does not match the current InvenTree version '{get_inventree_version()}' - this may lead to issues with the import process"
)
local_apps = set(installed_apps(c))
source_apps = set(metadata.get('installed_apps', []))
if apps:
local_apps = set(installed_apps(c))
source_apps = set(metadata.get('installed_apps', []))
for app in source_apps:
if app not in local_apps:
metadata_issue(
f"Source app '{app}' is not installed in the current environment - this may break the import process"
)
for app in source_apps:
if app not in local_apps:
metadata_issue(
f"Source app '{app}' is not installed in the current environment - this may break the import process"
)
if valid:
if verbose and valid:
success('Metadata validation succeeded - no issues detected')
return valid
@@ -1226,10 +1212,11 @@ def validate_import_metadata(c, metadata: dict, strict: bool = False) -> bool:
help={
'filename': 'Input filename',
'clear': 'Clear existing data before import',
'force': 'Force deletion of existing data without confirmation (only applies if --clear is set)',
'strict': 'Strict mode - fail if any issues are detected with the metadata (default = False)',
'retain_temp': 'Retain temporary files at end of process (default = False)',
'ignore_nonexistent': 'Ignore non-existent database models (default = False)',
'exclude_plugins': 'Exclude plugin data from the import process (default = False)',
'skip_migrations': 'Skip the migration step after clearing data (default = False)',
},
pre=[wait],
post=[rebuild_models, rebuild_thumbnails],
@@ -1240,12 +1227,14 @@ def import_records(
clear: bool = False,
retain_temp: bool = False,
strict: bool = False,
force: bool = False,
exclude_plugins: bool = False,
ignore_nonexistent: bool = False,
skip_migrations: bool = False,
):
"""Import database records from a file."""
# Get an absolute path to the supplied filename
target = Path(filename)
if not target.is_absolute():
target = local_dir().joinpath(filename)
@@ -1254,17 +1243,13 @@ def import_records(
sys.exit(1)
if clear:
delete_data(c, force=force, migrate=True)
delete_data(c, force=True, migrate=True)
if not skip_migrations:
migrate(c)
info(f"Importing database records from '{target}'")
# We need to load 'auth' data (users / groups) *first*
# This is due to the users.owner model, which has a ContentType foreign key
authfile = f'{target}.auth.json'
# Pre-process the data, to remove any "permissions" specified for a user or group
datafile = f'{target}.data.json'
with open(target, encoding='utf-8') as f_in:
try:
data = json.loads(f_in.read())
@@ -1272,71 +1257,100 @@ def import_records(
error(f'ERROR: Failed to decode JSON file: {exc}')
sys.exit(1)
# Separate out the data into different categories, to ensure they are loaded in the correct order
auth_data = []
load_data = []
common_data = []
plugin_data = []
all_data = []
# A dict containing metadata associated with the data file
metadata = {}
def load_data(
title: str,
data: list[dict],
app: Optional[str] = None,
excludes: Optional[list[str]] = None,
) -> tempfile.NamedTemporaryFile:
"""Helper function to save data to a temporary file, and then load into the database."""
nonlocal ignore_nonexistent
nonlocal c
info(f'Loading {len(data)} {title} records...')
with tempfile.NamedTemporaryFile(
suffix='.json', mode='w', encoding='utf-8', delete=False
) as f_out:
f_out.write(json.dumps(data, indent=2))
cmd = f'loaddata {f_out.name} -v 0 --force-color'
if app:
cmd += f' --app {app}'
if ignore_nonexistent:
cmd += ' --ignorenonexistent'
# A set of content types to exclude from the import process
if excludes:
cmd += f' -i {excludes}'
manage(c, cmd, pty=True)
# Iterate through each entry in the provided data file, and separate out into different categories based on the model type
for entry in data:
# Metadata needs to be extracted first
if entry.get('metadata', False):
metadata = entry
continue
if 'model' in entry:
if model := entry.get('model', None):
# Clear out any permissions specified for a group
if entry['model'] == 'auth.group':
if model == 'auth.group':
entry['fields']['permissions'] = []
# Clear out any permissions specified for a user
if entry['model'] == 'auth.user':
if model == 'auth.user':
entry['fields']['user_permissions'] = []
# Save auth data for later
if entry['model'].startswith('auth.'):
# Handle certain model types separately, to ensure they are loaded in the correct order
if model.startswith('auth.'):
auth_data.append(entry)
if model.startswith('users.'):
auth_data.append(entry)
elif model.startswith('common.'):
common_data.append(entry)
elif model.startswith('plugin.'):
plugin_data.append(entry)
else:
load_data.append(entry)
all_data.append(entry)
else:
warning('WARNING: Invalid entry in data file')
error(
f'{"ERROR" if strict else "WARNING"}: Invalid entry in data file - missing "model" key'
)
print(entry)
if strict:
sys.exit(1)
# Check the metadata associated with the imported data
validate_import_metadata(c, metadata, strict=strict)
# Do not validate the 'apps' list yet - as the plugins have not yet been loaded
validate_import_metadata(c, metadata, strict=strict, apps=False)
# Write the auth file data
with open(authfile, 'w', encoding='utf-8') as f_out:
f_out.write(json.dumps(auth_data, indent=2))
# Load the temporary files in order
load_data('auth', auth_data)
load_data('common', common_data, app='common')
# Write the processed data to the tmp file
with open(datafile, 'w', encoding='utf-8') as f_out:
f_out.write(json.dumps(load_data, indent=2))
if not exclude_plugins:
load_data('plugins', plugin_data, app='plugin')
# A set of content types to exclude from the import process
excludes = content_excludes(allow_auth=False)
# Now that the plugins have been loaded, run database migrations again to ensure any new plugins have their database schema up to date
if not skip_migrations:
migrate(c)
# Import auth models first
info('Importing user auth data...')
cmd = f"loaddata '{authfile}'"
# Run validation again - ensure that the plugin apps have been loaded correctly
validate_import_metadata(c, metadata, strict=strict, apps=True)
if ignore_nonexistent:
cmd += ' --ignorenonexistent'
manage(c, cmd, pty=True)
# Import everything else next
info('Importing database records...')
cmd = f"loaddata '{datafile}' -i {excludes}"
if ignore_nonexistent:
cmd += ' --ignorenonexistent'
manage(c, cmd, pty=True)
if not retain_temp:
info('Removing temporary files')
os.remove(datafile)
os.remove(authfile)
load_data('remaining', all_data, excludes=content_excludes(allow_auth=False))
success('Data import completed')
@@ -1664,9 +1678,7 @@ def setup_test(
# Load data
info('Loading database records ...')
import_records(
c, filename=template_dir.joinpath('inventree_data.json'), clear=True, force=True
)
import_records(c, filename=template_dir.joinpath('inventree_data.json'), clear=True)
# Copy media files
src = template_dir.joinpath('media')