2
0
mirror of https://github.com/inventree/InvenTree.git synced 2026-04-05 11:01:04 +00:00

Migrate plugin tables (#11648)

* Prevent creation of PluginConfig during migrations

* Refactor data import process

- Split into multiple separate steps

* Load plugins during data load / dump

- Required, otherwise we cannot dump the data

* Refactor export_records

- Use temporary file
- Cleanup docstring

* Force apps check on second validation step

* Improve import sequencing

* Update CI script

* Update migration docs

* CI pipeline for running import/export test

* Fix workflow naming

* Fix env vars

* Add placeholder script

* Fix matrix env vars

* Fix missing env var

* Install required packages

* Fix typo

* Tweak tasks.py

* Install dummy plugin as part of the

* Updated CI workflow

* Validate exported data

* Additional CI process

* Log mandatory plugins to INFO

* Force global setting

* Refactor CI pipeline

* Tweak file test

* Workflow updates

* Enable auto-update

* Test if import/export test should run

* Trigger if tasks.py changes
This commit is contained in:
Oliver
2026-04-02 21:26:34 +11:00
committed by GitHub
parent 9aa2308f52
commit 5c55f4f4c0
11 changed files with 417 additions and 116 deletions

View File

@@ -13,5 +13,5 @@ runs:
invoke export-records -f data.json
python3 ./src/backend/InvenTree/manage.py flush --noinput
invoke migrate
invoke import-records -c -f data.json --force --strict
invoke import-records -c -f data.json --force --strict
invoke import-records -c -f data.json --strict
invoke import-records -c -f data.json --strict

101
.github/scripts/check_exported_data.py vendored Normal file
View File

@@ -0,0 +1,101 @@
"""Script to check a data file exported using the 'export-records' command.
This script is intended to be used as part of the CI workflow,
in conjunction with the "workflows/import_export.yaml" workflow.
In reads the exported data file, to ensure that:
- The file can be read and parsed as JSON
- The file contains the expected metadata
- The file contains the expected plugin configuration
- The file contains the expected plugin database records
"""
PLUGIN_KEY = 'dummy_app_plugin'
PLUGIN_SLUG = 'dummy-app-plugin'
import argparse
import json
import os
if __name__ == '__main__':
parser = argparse.ArgumentParser(description='Check exported data file')
parser.add_argument('datafile', help='Path to the exported data file (JSON)')
args = parser.parse_args()
if not os.path.isfile(args.datafile):
print(f'Error: File not found: {args.datafile}')
exit(1)
with open(args.datafile, encoding='utf-8') as f:
try:
data = json.load(f)
print(f'Successfully loaded data from {args.datafile}')
print(f'Number of records: {len(data)}')
except json.JSONDecodeError as e:
print(f'Error: Failed to parse JSON file: {e}')
exit(1)
found_metadata = False
found_installed_apps = False
found_plugin_config = False
plugin_data_records = {}
# Inspect the data and check that it has the expected structure and content.
for entry in data:
# Check metadata entry for expected values
if entry.get('metadata', False):
print('Found metadata entry')
found_metadata = True
expected_apps = ['InvenTree', 'allauth', 'dbbackup', PLUGIN_KEY]
apps = entry.get('installed_apps', [])
for app in expected_apps:
if app not in apps:
print(f'- Expected app "{app}" not found in installed apps list')
exit(1)
found_installed_apps = True
elif entry.get('model', None) == 'plugin.pluginconfig':
key = entry['fields']['key']
if key == PLUGIN_SLUG:
print(f'Found plugin configuration for plugin "{PLUGIN_KEY}"')
found_plugin_config = True
elif entry.get('model', None) == f'{PLUGIN_KEY}.examplemodel':
key = entry['fields']['key']
value = entry['fields']['value']
plugin_data_records[key] = value
if not found_metadata:
print('Error: No metadata entry found in exported data')
exit(1)
if not found_installed_apps:
print(
f'Error: Plugin "{PLUGIN_KEY}" not found in installed apps list in metadata'
)
exit(1)
if not found_plugin_config:
print(f'Error: No plugin configuration found for plugin "{PLUGIN_KEY}"')
exit(1)
# Check the extracted plugin records
expected_keys = ['alpha', 'beta', 'gamma', 'delta']
for key in expected_keys:
if key not in plugin_data_records:
print(
f'Error: Expected plugin record with key "{key}" not found in exported data'
)
exit(1)
print('All checks passed successfully!')

118
.github/workflows/import_export.yaml vendored Normal file
View File

@@ -0,0 +1,118 @@
# Ensure that data import / export functionality works as expected.
# - Create a dataset in a Postgres database (including plugin data)
# - Export the dataset to an agnostic format (JSON)
# - Import the dataset into a Sqlite database
name: Import / Export
on:
push:
branches-ignore: ["l10*"]
pull_request:
branches-ignore: ["l10*"]
permissions:
contents: read
env:
python_version: 3.11
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
INVENTREE_DEBUG: false
INVENTREE_LOG_LEVEL: WARNING
INVENTREE_MEDIA_ROOT: /home/runner/work/InvenTree/test_inventree_media
INVENTREE_STATIC_ROOT: /home/runner/work/InvenTree/test_inventree_static
INVENTREE_BACKUP_DIR: /home/runner/work/InvenTree/test_inventree_backup
INVENTREE_SITE_URL: http://localhost:8000
INVENTREE_PLUGINS_ENABLED: true
INVENTREE_AUTO_UPDATE: true
INVENTREE_PLUGINS_MANDATORY: "dummy-app-plugin"
INVENTREE_GLOBAL_SETTINGS: '{"ENABLE_PLUGINS_APP": true}'
DATA_FILE: /home/runner/work/InvenTree/test_inventree_data.json
INVENTREE_DB_ENGINE: postgresql
INVENTREE_DB_NAME: inventree
INVENTREE_DB_USER: inventree
INVENTREE_DB_PASSWORD: password
INVENTREE_DB_HOST: "127.0.0.1"
INVENTREE_DB_PORT: 5432
jobs:
paths-filter:
name: filter
runs-on: ubuntu-latest
outputs:
server: ${{ steps.filter.outputs.server }}
steps:
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # pin@v6.0.2
with:
persist-credentials: false
- uses: dorny/paths-filter@fbd0ab8f3e69293af611ebaee6363fc25e6d187d # pin@v4.0.1
id: filter
with:
filters: |
server:
- 'src/backend/**'
- 'tasks.py'
test:
runs-on: ubuntu-latest
needs: paths-filter
if: needs.paths-filter.outputs.server == 'true'
services:
postgres:
image: postgres:17
env:
POSTGRES_USER: inventree
POSTGRES_PASSWORD: password
ports:
- 5432:5432
steps:
- name: Checkout code
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # pin@v6.0.2
with:
fetch-depth: 0
persist-credentials: false
- name: Environment Setup
uses: ./.github/actions/setup
with:
apt-dependency: gettext poppler-utils libpq-dev
pip-dependency: psycopg
update: true
static: false
- name: Setup Postgres Database
run: |
invoke migrate
invoke dev.setup-test -i
- name: Create Plugin Data
run: |
pip install -U inventree-dummy-app-plugin
invoke migrate
cd src/backend/InvenTree && python manage.py create_dummy_data
- name: Export Postgres Dataset
run: |
invoke export-records -o -f ${{ env.DATA_FILE }}
python .github/scripts/check_exported_data.py ${{ env.DATA_FILE }}
invoke dev.delete-data --force
- name: Update Environment Variables for Sqlite
run: |
echo "Updating environment variables for Sqlite"
echo "INVENTREE_DB_ENGINE=sqlite" >> $GITHUB_ENV
echo "INVENTREE_DB_NAME=/home/runner/work/InvenTree/test_inventree_db.sqlite3" >> $GITHUB_ENV
- name: Setup Sqlite Database
run: |
invoke migrate
test -f /home/runner/work/InvenTree/test_inventree_db.sqlite3 || (echo "Sqlite database not created" && exit 1)
- name: Import Sqlite Dataset
run: |
invoke import-records -c -f ${{ env.DATA_FILE }}
cd src/backend/InvenTree && python manage.py check_dummy_data
- name: Export Sqlite Dataset
run: |
invoke export-records -o -f ${{ env.DATA_FILE }}
python .github/scripts/check_exported_data.py ${{ env.DATA_FILE }}

View File

@@ -554,4 +554,4 @@ To override global settings, provide a "dictionary" of settings overrides in the
| Environment Variable | Configuration File | Description | Default |
| --- | --- | --- | --- |
| GLOBAL_SETTINGS_OVERRIDES | global_settings_overrides | JSON object containing global settings overrides | *Not specified* |
| INVENTREE_GLOBAL_SETTINGS | global_settings | JSON object containing global settings overrides | *Not specified* |

View File

@@ -201,3 +201,32 @@ This will load the database records from the backup file into the new database.
### Caveats
The process described here is a *suggested* procedure for migrating between incompatible database versions. However, due to the complexity of database software, there may be unforeseen complications that arise during the process.
## Migrating Plugin Data
Custom plugins may define their own database models, and thus have their own data records stored in the database. If a plugin is being migrated from one InvenTree installation to another, then the plugin data must also be migrated.
To account for this, the `export-records` and `import-records` commands have been designed to also export and import plugin data, in addition to the core InvenTree data.
### Exporting Plugin Data
When running the `export-records` command, any data records associated with plugins will also be exported, and included in the output JSON file.
### Importing Plugin Data
When running the `import-records` command, the import process will also attempt to import any plugin data records contained in the input JSON file. However, for the plugin data to be imported correctly, the following conditions must be met:
1. The plugin *code* must be present in the new InvenTree installation. Any plugins *not* installed will not have their tables created, and thus the import process will fail for those records.
2. The plugin *version* must be the same in both installations. If the plugin version is different, then the database schema may be different, and thus the import process may fail.
3. The InvenTree software version must be the same in both installations. If the InvenTree version is different, then the database schema may be different, and thus the import process may fail.
If all of the above conditions are met, then the plugin data *should* be imported correctly into the new database. To achieve this reliably, the following process steps are implemented in the `import-records` command:
1. The database is cleaned of all existing records (if the `-c` option is used).
2. The core InvenTree database migrations are run to ensure that the core database schema is correct.
3. User auth records are imported into the database
4. Common configuration records (such as global settings) are imported into the database
5. Plugin configuration records (defining which plugins are active) are imported into the database
6. Database migrations are run once more, to ensure that any plugin database schema are correctly initialized
7. The database is checked to ensure that all required apps are present (i.e. all plugins are installed and correctly activated)
8. All remaining records (including plugin data) are imported into the database

View File

@@ -125,6 +125,7 @@ def isGeneratingSchema():
'qcluster',
'check',
'shell',
'help',
]
if any(cmd in sys.argv for cmd in excluded_commands):
@@ -132,12 +133,14 @@ def isGeneratingSchema():
included_commands = [
'schema',
'spectactular',
# schema adjacent calls
'export_settings_definitions',
'export_tags',
'export_filters',
'export_report_context',
]
if any(cmd in sys.argv for cmd in included_commands):
return True
@@ -185,11 +188,38 @@ def isInMainThread():
return not isInWorkerThread()
def readOnlyCommands():
"""Return a list of read-only management commands which should not trigger database writes."""
return [
'help',
'check',
'shell',
'sqlflush',
'list_apps',
'wait_for_db',
'spectactular',
'makemessages',
'collectstatic',
'showmigrations',
'compilemessages',
]
def isReadOnlyCommand():
"""Return True if the current command is a read-only command, which should not trigger any database writes."""
return any(cmd in sys.argv for cmd in readOnlyCommands())
def canAppAccessDatabase(
allow_test: bool = False, allow_plugins: bool = False, allow_shell: bool = False
):
"""Returns True if the apps.py file can access database records.
Arguments:
allow_test: If True, override checks and allow database access during testing mode
allow_plugins: If True, override checks and allow database access during plugin loading
allow_shell: If True, override checks and allow database access during shell sessions
There are some circumstances where we don't want the ready function in apps.py
to touch the database
"""
@@ -198,7 +228,7 @@ def canAppAccessDatabase(
return False
# Prevent database access if we are importing data
if isImportingData():
if not allow_plugins and isImportingData():
return False
# Prevent database access if we are rebuilding data
@@ -212,13 +242,13 @@ def canAppAccessDatabase(
# If any of the following management commands are being executed,
# prevent custom "on load" code from running!
excluded_commands = [
'check',
'createsuperuser',
'wait_for_db',
'makemessages',
'compilemessages',
'spectactular',
'createsuperuser',
'collectstatic',
'makemessages',
'spectactular',
'wait_for_db',
'check',
]
if not allow_shell:

View File

@@ -194,6 +194,9 @@ PLUGINS_MANDATORY = get_setting(
'INVENTREE_PLUGINS_MANDATORY', 'plugins_mandatory', typecast=list, default_value=[]
)
if PLUGINS_MANDATORY:
logger.info('Mandatory plugins: %s', PLUGINS_MANDATORY)
PLUGINS_INSTALL_DISABLED = get_boolean_setting(
'INVENTREE_PLUGIN_NOINSTALL', 'plugin_noinstall', False
)

View File

@@ -222,13 +222,18 @@ class PluginsRegistry:
import InvenTree.ready
from plugin.models import PluginConfig
if InvenTree.ready.isImportingData():
return None
# Under certain circumstances, we want to avoid creating new PluginConfig instances in the database
can_create = (
InvenTree.ready.canAppAccessDatabase(
allow_plugins=False, allow_shell=True, allow_test=True
)
and not InvenTree.ready.isReadOnlyCommand()
)
try:
cfg = PluginConfig.objects.filter(key=slug).first()
if not cfg:
if not cfg and can_create:
logger.debug(
"get_plugin_config: Creating new PluginConfig for '%s'", slug
)

View File

@@ -49,6 +49,9 @@ class ReportConfig(AppConfig):
if not InvenTree.ready.canAppAccessDatabase(allow_test=False):
return # pragma: no cover
if InvenTree.ready.isReadOnlyCommand():
return # pragma: no cover
with maintenance_mode_on():
try:
self.create_default_labels()

View File

@@ -24,7 +24,7 @@ from rest_framework.authtoken.models import Token as AuthToken
import InvenTree.helpers
import InvenTree.models
from common.settings import get_global_setting
from InvenTree.ready import isImportingData
from InvenTree.ready import isImportingData, isReadOnlyCommand
from .ruleset import RULESET_CHOICES, get_ruleset_models
@@ -463,7 +463,7 @@ class Owner(models.Model):
def create_owner(sender, instance, **kwargs):
"""Callback function to create a new owner instance after either a new group or user instance is saved."""
# Ignore during data import process to avoid data duplication
if not isImportingData():
if not isReadOnlyCommand() and not isImportingData():
Owner.create(obj=instance)
@@ -600,8 +600,8 @@ class UserProfile(InvenTree.models.MetadataMixin):
@receiver(post_save, sender=User)
def create_or_update_user_profile(sender, instance, created, **kwargs):
"""Create or update user profile when user is saved."""
# Disable profile creation if importing data from file
if isImportingData():
# Disable profile creation if importing data from file or running a read-only command
if isReadOnlyCommand() or isImportingData():
return
if created:

212
tasks.py
View File

@@ -8,6 +8,7 @@ import re
import shutil
import subprocess
import sys
import tempfile
from functools import wraps
from pathlib import Path
from platform import python_version
@@ -1065,39 +1066,21 @@ def update(
'exclude_plugins': 'Exclude plugin data from the output file (default = False)',
'include_sso': 'Include SSO token data in the output file (default = False)',
'include_session': 'Include user session data in the output file (default = False)',
'retain_temp': 'Retain temporary files (containing permissions) at end of process (default = False)',
},
pre=[wait],
)
def export_records(
c,
filename='data.json',
overwrite=False,
include_email=False,
include_permissions=False,
include_tokens=False,
exclude_plugins=False,
include_sso=False,
include_session=False,
retain_temp=False,
overwrite: bool = False,
include_email: bool = False,
include_permissions: bool = False,
include_tokens: bool = False,
exclude_plugins: bool = False,
include_sso: bool = False,
include_session: bool = False,
):
"""Export all database records to a file.
Write data to the file defined by filename.
If --overwrite is not set, the user will be prompted about overwriting an existing files.
If --include-permissions is not set, the file defined by filename will have permissions specified for a user or group removed.
If --delete-temp is not set, the temporary file (which includes permissions) will not be deleted. This file is named filename.tmp
For historical reasons, calling this function without any arguments will thus result in two files:
- data.json: does not include permissions
- data.json.tmp: includes permissions
If you want the script to overwrite any existing files without asking, add argument -o / --overwrite.
If you only want one file, add argument - d / --delete-temp.
If you want only one file, with permissions, then additionally add argument -i / --include-permissions
"""
"""Export all database records to a file."""
# Get an absolute path to the file
target = Path(filename)
if not target.is_absolute():
@@ -1107,8 +1090,6 @@ def export_records(
check_file_existence(target, overwrite)
tmpfile = f'{target}.tmp'
excludes = content_excludes(
allow_email=include_email,
allow_tokens=include_tokens,
@@ -1117,16 +1098,19 @@ def export_records(
allow_sso=include_sso,
)
cmd = f"dumpdata --natural-foreign --indent 2 --output '{tmpfile}' {excludes}"
with tempfile.NamedTemporaryFile(
suffix='.json', encoding='utf-8', mode='w+t', delete=True
) as tmpfile:
cmd = f"dumpdata --natural-foreign --indent 2 --output '{tmpfile.name}' {excludes}"
# Dump data to temporary file
manage(c, cmd, pty=True)
# Dump data to temporary file
manage(c, cmd, pty=True)
info('Running data post-processing step...')
info('Running data post-processing step...')
# Post-process the file, to remove any "permissions" specified for a user or group
with open(tmpfile, encoding='utf-8') as f_in:
data = json.loads(f_in.read())
# Post-process the file, to remove any "permissions" specified for a user or group
tmpfile.seek(0)
data = json.loads(tmpfile.read())
data_out = [
{
@@ -1164,22 +1148,23 @@ def export_records(
with open(target, 'w', encoding='utf-8') as f_out:
f_out.write(json.dumps(data_out, indent=2))
if not retain_temp:
info('Removing temporary files')
os.remove(tmpfile)
success('Data export completed')
def validate_import_metadata(c, metadata: dict, strict: bool = False) -> bool:
def validate_import_metadata(
c, metadata: dict, strict: bool = False, apps: bool = True, verbose: bool = False
) -> bool:
"""Validate the metadata associated with an import file.
Arguments:
c: The context or connection object
metadata (dict): The metadata to validate
apps (bool): If True, validate that all apps listed in the metadata are installed in the current environment.
strict (bool): If True, the import process will fail if any issues are detected.
verbose (bool): If True, print detailed information during validation.
"""
info('Validating import metadata...')
if verbose:
info('Validating import metadata...')
valid = True
@@ -1207,16 +1192,17 @@ def validate_import_metadata(c, metadata: dict, strict: bool = False) -> bool:
f"Source version '{source_version}' does not match the current InvenTree version '{get_inventree_version()}' - this may lead to issues with the import process"
)
local_apps = set(installed_apps(c))
source_apps = set(metadata.get('installed_apps', []))
if apps:
local_apps = set(installed_apps(c))
source_apps = set(metadata.get('installed_apps', []))
for app in source_apps:
if app not in local_apps:
metadata_issue(
f"Source app '{app}' is not installed in the current environment - this may break the import process"
)
for app in source_apps:
if app not in local_apps:
metadata_issue(
f"Source app '{app}' is not installed in the current environment - this may break the import process"
)
if valid:
if verbose and valid:
success('Metadata validation succeeded - no issues detected')
return valid
@@ -1226,10 +1212,11 @@ def validate_import_metadata(c, metadata: dict, strict: bool = False) -> bool:
help={
'filename': 'Input filename',
'clear': 'Clear existing data before import',
'force': 'Force deletion of existing data without confirmation (only applies if --clear is set)',
'strict': 'Strict mode - fail if any issues are detected with the metadata (default = False)',
'retain_temp': 'Retain temporary files at end of process (default = False)',
'ignore_nonexistent': 'Ignore non-existent database models (default = False)',
'exclude_plugins': 'Exclude plugin data from the import process (default = False)',
'skip_migrations': 'Skip the migration step after clearing data (default = False)',
},
pre=[wait],
post=[rebuild_models, rebuild_thumbnails],
@@ -1240,12 +1227,14 @@ def import_records(
clear: bool = False,
retain_temp: bool = False,
strict: bool = False,
force: bool = False,
exclude_plugins: bool = False,
ignore_nonexistent: bool = False,
skip_migrations: bool = False,
):
"""Import database records from a file."""
# Get an absolute path to the supplied filename
target = Path(filename)
if not target.is_absolute():
target = local_dir().joinpath(filename)
@@ -1254,17 +1243,13 @@ def import_records(
sys.exit(1)
if clear:
delete_data(c, force=force, migrate=True)
delete_data(c, force=True, migrate=True)
if not skip_migrations:
migrate(c)
info(f"Importing database records from '{target}'")
# We need to load 'auth' data (users / groups) *first*
# This is due to the users.owner model, which has a ContentType foreign key
authfile = f'{target}.auth.json'
# Pre-process the data, to remove any "permissions" specified for a user or group
datafile = f'{target}.data.json'
with open(target, encoding='utf-8') as f_in:
try:
data = json.loads(f_in.read())
@@ -1272,71 +1257,100 @@ def import_records(
error(f'ERROR: Failed to decode JSON file: {exc}')
sys.exit(1)
# Separate out the data into different categories, to ensure they are loaded in the correct order
auth_data = []
load_data = []
common_data = []
plugin_data = []
all_data = []
# A dict containing metadata associated with the data file
metadata = {}
def load_data(
title: str,
data: list[dict],
app: Optional[str] = None,
excludes: Optional[list[str]] = None,
) -> tempfile.NamedTemporaryFile:
"""Helper function to save data to a temporary file, and then load into the database."""
nonlocal ignore_nonexistent
nonlocal c
info(f'Loading {len(data)} {title} records...')
with tempfile.NamedTemporaryFile(
suffix='.json', mode='w', encoding='utf-8', delete=False
) as f_out:
f_out.write(json.dumps(data, indent=2))
cmd = f'loaddata {f_out.name} -v 0 --force-color'
if app:
cmd += f' --app {app}'
if ignore_nonexistent:
cmd += ' --ignorenonexistent'
# A set of content types to exclude from the import process
if excludes:
cmd += f' -i {excludes}'
manage(c, cmd, pty=True)
# Iterate through each entry in the provided data file, and separate out into different categories based on the model type
for entry in data:
# Metadata needs to be extracted first
if entry.get('metadata', False):
metadata = entry
continue
if 'model' in entry:
if model := entry.get('model', None):
# Clear out any permissions specified for a group
if entry['model'] == 'auth.group':
if model == 'auth.group':
entry['fields']['permissions'] = []
# Clear out any permissions specified for a user
if entry['model'] == 'auth.user':
if model == 'auth.user':
entry['fields']['user_permissions'] = []
# Save auth data for later
if entry['model'].startswith('auth.'):
# Handle certain model types separately, to ensure they are loaded in the correct order
if model.startswith('auth.'):
auth_data.append(entry)
if model.startswith('users.'):
auth_data.append(entry)
elif model.startswith('common.'):
common_data.append(entry)
elif model.startswith('plugin.'):
plugin_data.append(entry)
else:
load_data.append(entry)
all_data.append(entry)
else:
warning('WARNING: Invalid entry in data file')
error(
f'{"ERROR" if strict else "WARNING"}: Invalid entry in data file - missing "model" key'
)
print(entry)
if strict:
sys.exit(1)
# Check the metadata associated with the imported data
validate_import_metadata(c, metadata, strict=strict)
# Do not validate the 'apps' list yet - as the plugins have not yet been loaded
validate_import_metadata(c, metadata, strict=strict, apps=False)
# Write the auth file data
with open(authfile, 'w', encoding='utf-8') as f_out:
f_out.write(json.dumps(auth_data, indent=2))
# Load the temporary files in order
load_data('auth', auth_data)
load_data('common', common_data, app='common')
# Write the processed data to the tmp file
with open(datafile, 'w', encoding='utf-8') as f_out:
f_out.write(json.dumps(load_data, indent=2))
if not exclude_plugins:
load_data('plugins', plugin_data, app='plugin')
# A set of content types to exclude from the import process
excludes = content_excludes(allow_auth=False)
# Now that the plugins have been loaded, run database migrations again to ensure any new plugins have their database schema up to date
if not skip_migrations:
migrate(c)
# Import auth models first
info('Importing user auth data...')
cmd = f"loaddata '{authfile}'"
# Run validation again - ensure that the plugin apps have been loaded correctly
validate_import_metadata(c, metadata, strict=strict, apps=True)
if ignore_nonexistent:
cmd += ' --ignorenonexistent'
manage(c, cmd, pty=True)
# Import everything else next
info('Importing database records...')
cmd = f"loaddata '{datafile}' -i {excludes}"
if ignore_nonexistent:
cmd += ' --ignorenonexistent'
manage(c, cmd, pty=True)
if not retain_temp:
info('Removing temporary files')
os.remove(datafile)
os.remove(authfile)
load_data('remaining', all_data, excludes=content_excludes(allow_auth=False))
success('Data import completed')
@@ -1664,9 +1678,7 @@ def setup_test(
# Load data
info('Loading database records ...')
import_records(
c, filename=template_dir.joinpath('inventree_data.json'), clear=True, force=True
)
import_records(c, filename=template_dir.joinpath('inventree_data.json'), clear=True)
# Copy media files
src = template_dir.joinpath('media')