2
0
mirror of https://github.com/inventree/InvenTree.git synced 2026-04-10 05:21:07 +00:00

Migrate plugin tables (#11648)

* Prevent creation of PluginConfig during migrations

* Refactor data import process

- Split into multiple separate steps

* Load plugins during data load / dump

- Required, otherwise we cannot dump the data

* Refactor export_records

- Use temporary file
- Cleanup docstring

* Force apps check on second validation step

* Improve import sequencing

* Update CI script

* Update migration docs

* CI pipeline for running import/export test

* Fix workflow naming

* Fix env vars

* Add placeholder script

* Fix matrix env vars

* Fix missing env var

* Install required packages

* Fix typo

* Tweak tasks.py

* Install dummy plugin as part of the

* Updated CI workflow

* Validate exported data

* Additional CI process

* Log mandatory plugins to INFO

* Force global setting

* Refactor CI pipeline

* Tweak file test

* Workflow updates

* Enable auto-update

* Test if import/export test should run

* Trigger if tasks.py changes
This commit is contained in:
Oliver
2026-04-02 21:26:34 +11:00
committed by GitHub
parent 9aa2308f52
commit 5c55f4f4c0
11 changed files with 417 additions and 116 deletions

View File

@@ -13,5 +13,5 @@ runs:
invoke export-records -f data.json
python3 ./src/backend/InvenTree/manage.py flush --noinput
invoke migrate
invoke import-records -c -f data.json --force --strict
invoke import-records -c -f data.json --force --strict
invoke import-records -c -f data.json --strict
invoke import-records -c -f data.json --strict

101
.github/scripts/check_exported_data.py vendored Normal file
View File

@@ -0,0 +1,101 @@
"""Script to check a data file exported using the 'export-records' command.
This script is intended to be used as part of the CI workflow,
in conjunction with the "workflows/import_export.yaml" workflow.
In reads the exported data file, to ensure that:
- The file can be read and parsed as JSON
- The file contains the expected metadata
- The file contains the expected plugin configuration
- The file contains the expected plugin database records
"""
PLUGIN_KEY = 'dummy_app_plugin'
PLUGIN_SLUG = 'dummy-app-plugin'
import argparse
import json
import os
if __name__ == '__main__':
parser = argparse.ArgumentParser(description='Check exported data file')
parser.add_argument('datafile', help='Path to the exported data file (JSON)')
args = parser.parse_args()
if not os.path.isfile(args.datafile):
print(f'Error: File not found: {args.datafile}')
exit(1)
with open(args.datafile, encoding='utf-8') as f:
try:
data = json.load(f)
print(f'Successfully loaded data from {args.datafile}')
print(f'Number of records: {len(data)}')
except json.JSONDecodeError as e:
print(f'Error: Failed to parse JSON file: {e}')
exit(1)
found_metadata = False
found_installed_apps = False
found_plugin_config = False
plugin_data_records = {}
# Inspect the data and check that it has the expected structure and content.
for entry in data:
# Check metadata entry for expected values
if entry.get('metadata', False):
print('Found metadata entry')
found_metadata = True
expected_apps = ['InvenTree', 'allauth', 'dbbackup', PLUGIN_KEY]
apps = entry.get('installed_apps', [])
for app in expected_apps:
if app not in apps:
print(f'- Expected app "{app}" not found in installed apps list')
exit(1)
found_installed_apps = True
elif entry.get('model', None) == 'plugin.pluginconfig':
key = entry['fields']['key']
if key == PLUGIN_SLUG:
print(f'Found plugin configuration for plugin "{PLUGIN_KEY}"')
found_plugin_config = True
elif entry.get('model', None) == f'{PLUGIN_KEY}.examplemodel':
key = entry['fields']['key']
value = entry['fields']['value']
plugin_data_records[key] = value
if not found_metadata:
print('Error: No metadata entry found in exported data')
exit(1)
if not found_installed_apps:
print(
f'Error: Plugin "{PLUGIN_KEY}" not found in installed apps list in metadata'
)
exit(1)
if not found_plugin_config:
print(f'Error: No plugin configuration found for plugin "{PLUGIN_KEY}"')
exit(1)
# Check the extracted plugin records
expected_keys = ['alpha', 'beta', 'gamma', 'delta']
for key in expected_keys:
if key not in plugin_data_records:
print(
f'Error: Expected plugin record with key "{key}" not found in exported data'
)
exit(1)
print('All checks passed successfully!')

118
.github/workflows/import_export.yaml vendored Normal file
View File

@@ -0,0 +1,118 @@
# Ensure that data import / export functionality works as expected.
# - Create a dataset in a Postgres database (including plugin data)
# - Export the dataset to an agnostic format (JSON)
# - Import the dataset into a Sqlite database
name: Import / Export
on:
push:
branches-ignore: ["l10*"]
pull_request:
branches-ignore: ["l10*"]
permissions:
contents: read
env:
python_version: 3.11
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
INVENTREE_DEBUG: false
INVENTREE_LOG_LEVEL: WARNING
INVENTREE_MEDIA_ROOT: /home/runner/work/InvenTree/test_inventree_media
INVENTREE_STATIC_ROOT: /home/runner/work/InvenTree/test_inventree_static
INVENTREE_BACKUP_DIR: /home/runner/work/InvenTree/test_inventree_backup
INVENTREE_SITE_URL: http://localhost:8000
INVENTREE_PLUGINS_ENABLED: true
INVENTREE_AUTO_UPDATE: true
INVENTREE_PLUGINS_MANDATORY: "dummy-app-plugin"
INVENTREE_GLOBAL_SETTINGS: '{"ENABLE_PLUGINS_APP": true}'
DATA_FILE: /home/runner/work/InvenTree/test_inventree_data.json
INVENTREE_DB_ENGINE: postgresql
INVENTREE_DB_NAME: inventree
INVENTREE_DB_USER: inventree
INVENTREE_DB_PASSWORD: password
INVENTREE_DB_HOST: "127.0.0.1"
INVENTREE_DB_PORT: 5432
jobs:
paths-filter:
name: filter
runs-on: ubuntu-latest
outputs:
server: ${{ steps.filter.outputs.server }}
steps:
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # pin@v6.0.2
with:
persist-credentials: false
- uses: dorny/paths-filter@fbd0ab8f3e69293af611ebaee6363fc25e6d187d # pin@v4.0.1
id: filter
with:
filters: |
server:
- 'src/backend/**'
- 'tasks.py'
test:
runs-on: ubuntu-latest
needs: paths-filter
if: needs.paths-filter.outputs.server == 'true'
services:
postgres:
image: postgres:17
env:
POSTGRES_USER: inventree
POSTGRES_PASSWORD: password
ports:
- 5432:5432
steps:
- name: Checkout code
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # pin@v6.0.2
with:
fetch-depth: 0
persist-credentials: false
- name: Environment Setup
uses: ./.github/actions/setup
with:
apt-dependency: gettext poppler-utils libpq-dev
pip-dependency: psycopg
update: true
static: false
- name: Setup Postgres Database
run: |
invoke migrate
invoke dev.setup-test -i
- name: Create Plugin Data
run: |
pip install -U inventree-dummy-app-plugin
invoke migrate
cd src/backend/InvenTree && python manage.py create_dummy_data
- name: Export Postgres Dataset
run: |
invoke export-records -o -f ${{ env.DATA_FILE }}
python .github/scripts/check_exported_data.py ${{ env.DATA_FILE }}
invoke dev.delete-data --force
- name: Update Environment Variables for Sqlite
run: |
echo "Updating environment variables for Sqlite"
echo "INVENTREE_DB_ENGINE=sqlite" >> $GITHUB_ENV
echo "INVENTREE_DB_NAME=/home/runner/work/InvenTree/test_inventree_db.sqlite3" >> $GITHUB_ENV
- name: Setup Sqlite Database
run: |
invoke migrate
test -f /home/runner/work/InvenTree/test_inventree_db.sqlite3 || (echo "Sqlite database not created" && exit 1)
- name: Import Sqlite Dataset
run: |
invoke import-records -c -f ${{ env.DATA_FILE }}
cd src/backend/InvenTree && python manage.py check_dummy_data
- name: Export Sqlite Dataset
run: |
invoke export-records -o -f ${{ env.DATA_FILE }}
python .github/scripts/check_exported_data.py ${{ env.DATA_FILE }}