mirror of
https://github.com/inventree/InvenTree.git
synced 2025-05-04 14:28:48 +00:00
Backport changes to tasks.py (#6256)
- Fixes ongoing issues with import/export
This commit is contained in:
parent
2b0ef2bc61
commit
1abdb1fd46
524
tasks.py
524
tasks.py
@ -18,11 +18,10 @@ def checkPythonVersion():
|
||||
|
||||
If the python version is not sufficient, exits with a non-zero exit code.
|
||||
"""
|
||||
|
||||
REQ_MAJOR = 3
|
||||
REQ_MINOR = 9
|
||||
|
||||
version = sys.version.split(" ")[0]
|
||||
version = sys.version.split(' ')[0]
|
||||
|
||||
valid = True
|
||||
|
||||
@ -33,8 +32,8 @@ def checkPythonVersion():
|
||||
valid = False
|
||||
|
||||
if not valid:
|
||||
print(f"The installed python version ({version}) is not supported!")
|
||||
print(f"InvenTree requires Python {REQ_MAJOR}.{REQ_MINOR} or above")
|
||||
print(f'The installed python version ({version}) is not supported!')
|
||||
print(f'InvenTree requires Python {REQ_MAJOR}.{REQ_MINOR} or above')
|
||||
sys.exit(1)
|
||||
|
||||
|
||||
@ -59,29 +58,57 @@ def apps():
|
||||
]
|
||||
|
||||
|
||||
def content_excludes():
|
||||
"""Returns a list of content types to exclude from import/export."""
|
||||
def content_excludes(
|
||||
allow_auth: bool = True,
|
||||
allow_tokens: bool = True,
|
||||
allow_plugins: bool = True,
|
||||
allow_sso: bool = True,
|
||||
):
|
||||
"""Returns a list of content types to exclude from import/export.
|
||||
|
||||
Arguments:
|
||||
allow_auth (bool): Allow user/group information to be exported/imported
|
||||
allow_tokens (bool): Allow tokens to be exported/importe
|
||||
allow_plugins (bool): Allow plugin information to be exported/imported
|
||||
allow_sso (bool): Allow SSO tokens to be exported/imported
|
||||
"""
|
||||
excludes = [
|
||||
"contenttypes",
|
||||
"auth.permission",
|
||||
"users.apitoken",
|
||||
"error_report.error",
|
||||
"admin.logentry",
|
||||
"django_q.schedule",
|
||||
"django_q.task",
|
||||
"django_q.ormq",
|
||||
"users.owner",
|
||||
"exchange.rate",
|
||||
"exchange.exchangebackend",
|
||||
"common.notificationentry",
|
||||
"common.notificationmessage",
|
||||
"user_sessions.session",
|
||||
'contenttypes',
|
||||
'auth.permission',
|
||||
'error_report.error',
|
||||
'admin.logentry',
|
||||
'django_q.schedule',
|
||||
'django_q.task',
|
||||
'django_q.ormq',
|
||||
'exchange.rate',
|
||||
'exchange.exchangebackend',
|
||||
'common.notificationentry',
|
||||
'common.notificationmessage',
|
||||
'user_sessions.session',
|
||||
]
|
||||
|
||||
output = ""
|
||||
# Optionally exclude user auth data
|
||||
if not allow_auth:
|
||||
excludes.append('auth.group')
|
||||
excludes.append('auth.user')
|
||||
|
||||
# Optionally exclude user token information
|
||||
if not allow_tokens:
|
||||
excludes.append('users.apitoken')
|
||||
|
||||
# Optionally exclude plugin information
|
||||
if not allow_plugins:
|
||||
excludes.append('plugin.pluginconfig')
|
||||
excludes.append('plugin.pluginsetting')
|
||||
|
||||
# Optionally exclude SSO application information
|
||||
if not allow_sso:
|
||||
excludes.append('socialaccount.socialapp')
|
||||
|
||||
output = ''
|
||||
|
||||
for e in excludes:
|
||||
output += f"--exclude {e} "
|
||||
output += f'--exclude {e} '
|
||||
|
||||
return output
|
||||
|
||||
@ -113,10 +140,10 @@ def manage(c, cmd, pty: bool = False):
|
||||
cmd: Django command to run.
|
||||
pty (bool, optional): Run an interactive session. Defaults to False.
|
||||
"""
|
||||
c.run('cd "{path}" && python3 manage.py {cmd}'.format(
|
||||
path=managePyDir(),
|
||||
cmd=cmd
|
||||
), pty=pty)
|
||||
c.run(
|
||||
'cd "{path}" && python3 manage.py {cmd}'.format(path=managePyDir(), cmd=cmd),
|
||||
pty=pty,
|
||||
)
|
||||
|
||||
|
||||
def yarn(c, cmd, pty: bool = False):
|
||||
@ -133,6 +160,7 @@ def yarn(c, cmd, pty: bool = False):
|
||||
|
||||
def node_available(versions: bool = False, bypass_yarn: bool = False):
|
||||
"""Checks if the frontend environment (ie node and yarn in bash) is available."""
|
||||
|
||||
def ret(val, val0=None, val1=None):
|
||||
if versions:
|
||||
return val, val0, val1
|
||||
@ -140,7 +168,10 @@ def node_available(versions: bool = False, bypass_yarn: bool = False):
|
||||
|
||||
def check(cmd):
|
||||
try:
|
||||
return str(subprocess.check_output([cmd], stderr=subprocess.STDOUT, shell=True), encoding='utf-8').strip()
|
||||
return str(
|
||||
subprocess.check_output([cmd], stderr=subprocess.STDOUT, shell=True),
|
||||
encoding='utf-8',
|
||||
).strip()
|
||||
except subprocess.CalledProcessError:
|
||||
return None
|
||||
except FileNotFoundError:
|
||||
@ -154,7 +185,9 @@ def node_available(versions: bool = False, bypass_yarn: bool = False):
|
||||
|
||||
# Print a warning if node is available but yarn is not
|
||||
if node_version and not yarn_passes:
|
||||
print('Node is available but yarn is not. Install yarn if you wish to build the frontend.')
|
||||
print(
|
||||
'Node is available but yarn is not. Install yarn if you wish to build the frontend.'
|
||||
)
|
||||
|
||||
# Return the result
|
||||
return ret(yarn_passes and node_version, node_version, yarn_version)
|
||||
@ -168,11 +201,13 @@ def check_file_existance(filename: str, overwrite: bool = False):
|
||||
overwrite (bool, optional): Overwrite the file without asking. Defaults to False.
|
||||
"""
|
||||
if Path(filename).is_file() and overwrite is False:
|
||||
response = input("Warning: file already exists. Do you want to overwrite? [y/N]: ")
|
||||
response = input(
|
||||
'Warning: file already exists. Do you want to overwrite? [y/N]: '
|
||||
)
|
||||
response = str(response).strip().lower()
|
||||
|
||||
if response not in ['y', 'yes']:
|
||||
print("Cancelled export operation")
|
||||
print('Cancelled export operation')
|
||||
sys.exit(1)
|
||||
|
||||
|
||||
@ -198,7 +233,9 @@ def install(c):
|
||||
# Install required Python packages with PIP
|
||||
c.run('pip3 install --upgrade pip')
|
||||
c.run('pip3 install --upgrade setuptools')
|
||||
c.run('pip3 install --no-cache-dir --disable-pip-version-check -U -r requirements.txt')
|
||||
c.run(
|
||||
'pip3 install --no-cache-dir --disable-pip-version-check -U -r requirements.txt'
|
||||
)
|
||||
|
||||
|
||||
@task(help={'tests': 'Set up test dataset at the end'})
|
||||
@ -210,12 +247,12 @@ def setup_dev(c, tests=False):
|
||||
c.run('pip3 install -U -r requirements-dev.txt')
|
||||
|
||||
# Install pre-commit hook
|
||||
print("Installing pre-commit for checks before git commits...")
|
||||
print('Installing pre-commit for checks before git commits...')
|
||||
c.run('pre-commit install')
|
||||
|
||||
# Update all the hooks
|
||||
c.run('pre-commit autoupdate')
|
||||
print("pre-commit set up is done...")
|
||||
print('pre-commit set up is done...')
|
||||
|
||||
# Set up test-data if flag is set
|
||||
if tests:
|
||||
@ -232,19 +269,19 @@ def superuser(c):
|
||||
@task
|
||||
def rebuild_models(c):
|
||||
"""Rebuild database models with MPTT structures."""
|
||||
manage(c, "rebuild_models", pty=True)
|
||||
manage(c, 'rebuild_models', pty=True)
|
||||
|
||||
|
||||
@task
|
||||
def rebuild_thumbnails(c):
|
||||
"""Rebuild missing image thumbnails."""
|
||||
manage(c, "rebuild_thumbnails", pty=True)
|
||||
manage(c, 'rebuild_thumbnails', pty=True)
|
||||
|
||||
|
||||
@task
|
||||
def clean_settings(c):
|
||||
"""Clean the setting tables of old settings."""
|
||||
manage(c, "clean_settings")
|
||||
manage(c, 'clean_settings')
|
||||
|
||||
|
||||
@task(help={'mail': "mail of the user who's MFA should be disabled"})
|
||||
@ -253,20 +290,16 @@ def remove_mfa(c, mail=''):
|
||||
if not mail:
|
||||
print('You must provide a users mail')
|
||||
|
||||
manage(c, f"remove_mfa {mail}")
|
||||
manage(c, f'remove_mfa {mail}')
|
||||
|
||||
|
||||
@task(
|
||||
help={
|
||||
'frontend': 'Build the frontend',
|
||||
}
|
||||
)
|
||||
@task(help={'frontend': 'Build the frontend'})
|
||||
def static(c, frontend=False):
|
||||
"""Copies required static files to the STATIC_ROOT directory, as per Django requirements."""
|
||||
manage(c, "prerender")
|
||||
manage(c, 'prerender')
|
||||
if frontend and node_available():
|
||||
frontend_build(c)
|
||||
manage(c, "collectstatic --no-input")
|
||||
manage(c, 'collectstatic --no-input')
|
||||
|
||||
|
||||
@task
|
||||
@ -280,48 +313,49 @@ def translate_stats(c):
|
||||
try:
|
||||
manage(c, 'compilemessages', pty=True)
|
||||
except Exception:
|
||||
print("WARNING: Translation files could not be compiled:")
|
||||
print('WARNING: Translation files could not be compiled:')
|
||||
|
||||
path = Path('InvenTree', 'script', 'translation_stats.py')
|
||||
c.run(f'python3 {path}')
|
||||
|
||||
|
||||
@task(post=[translate_stats])
|
||||
def translate(c):
|
||||
def translate(c, ignore_static=False, no_frontend=False):
|
||||
"""Rebuild translation source files. Advanced use only!
|
||||
|
||||
Note: This command should not be used on a local install,
|
||||
it is performed as part of the InvenTree translation toolchain.
|
||||
"""
|
||||
# Translate applicable .py / .html / .js / .tsx files
|
||||
manage(c, "makemessages --all -e py,html,js --no-wrap")
|
||||
manage(c, "compilemessages")
|
||||
# Translate applicable .py / .html / .js files
|
||||
manage(c, 'makemessages --all -e py,html,js --no-wrap')
|
||||
manage(c, 'compilemessages')
|
||||
|
||||
if node_available():
|
||||
if not no_frontend and node_available():
|
||||
frontend_install(c)
|
||||
frontend_trans(c)
|
||||
frontend_build(c)
|
||||
|
||||
# Update static files
|
||||
if not ignore_static:
|
||||
static(c)
|
||||
|
||||
|
||||
@task
|
||||
def backup(c):
|
||||
"""Backup the database and media files."""
|
||||
print("Backing up InvenTree database...")
|
||||
manage(c, "dbbackup --noinput --clean --compress")
|
||||
print("Backing up InvenTree media files...")
|
||||
manage(c, "mediabackup --noinput --clean --compress")
|
||||
print('Backing up InvenTree database...')
|
||||
manage(c, 'dbbackup --noinput --clean --compress')
|
||||
print('Backing up InvenTree media files...')
|
||||
manage(c, 'mediabackup --noinput --clean --compress')
|
||||
|
||||
|
||||
@task
|
||||
def restore(c):
|
||||
"""Restore the database and media files."""
|
||||
print("Restoring InvenTree database...")
|
||||
manage(c, "dbrestore --noinput --uncompress")
|
||||
print("Restoring InvenTree media files...")
|
||||
manage(c, "mediarestore --noinput --uncompress")
|
||||
print('Restoring InvenTree database...')
|
||||
manage(c, 'dbrestore --noinput --uncompress')
|
||||
print('Restoring InvenTree media files...')
|
||||
manage(c, 'mediarestore --noinput --uncompress')
|
||||
|
||||
|
||||
@task(post=[rebuild_models, rebuild_thumbnails])
|
||||
@ -330,16 +364,16 @@ def migrate(c):
|
||||
|
||||
This is a critical step if the database schema have been altered!
|
||||
"""
|
||||
print("Running InvenTree database migrations...")
|
||||
print("========================================")
|
||||
print('Running InvenTree database migrations...')
|
||||
print('========================================')
|
||||
|
||||
manage(c, "makemigrations")
|
||||
manage(c, "migrate --noinput")
|
||||
manage(c, "migrate --run-syncdb")
|
||||
manage(c, "check")
|
||||
manage(c, 'makemigrations')
|
||||
manage(c, 'migrate --noinput')
|
||||
manage(c, 'migrate --run-syncdb')
|
||||
manage(c, 'check')
|
||||
|
||||
print("========================================")
|
||||
print("InvenTree database migrations completed!")
|
||||
print('========================================')
|
||||
print('InvenTree database migrations completed!')
|
||||
|
||||
|
||||
@task(
|
||||
@ -347,8 +381,8 @@ def migrate(c):
|
||||
help={
|
||||
'skip_backup': 'Skip database backup step (advanced users)',
|
||||
'frontend': 'Force frontend compilation/download step (ignores INVENTREE_DOCKER)',
|
||||
'no_frontend': 'Skip frontend compilation/download step'
|
||||
}
|
||||
'no_frontend': 'Skip frontend compilation/download step',
|
||||
},
|
||||
)
|
||||
def update(c, skip_backup=False, frontend: bool = False, no_frontend: bool = False):
|
||||
"""Update InvenTree installation.
|
||||
@ -390,13 +424,27 @@ def update(c, skip_backup=False, frontend: bool = False, no_frontend: bool = Fal
|
||||
|
||||
|
||||
# Data tasks
|
||||
@task(help={
|
||||
@task(
|
||||
help={
|
||||
'filename': "Output filename (default = 'data.json')",
|
||||
'overwrite': "Overwrite existing files without asking first (default = off/False)",
|
||||
'include_permissions': "Include user and group permissions in the output file (filename) (default = off/False)",
|
||||
'delete_temp': "Delete temporary files (containing permissions) at end of run. Note that this will delete temporary files from previous runs as well. (default = off/False)"
|
||||
})
|
||||
def export_records(c, filename='data.json', overwrite=False, include_permissions=False, delete_temp=False):
|
||||
'overwrite': 'Overwrite existing files without asking first (default = False)',
|
||||
'include_permissions': 'Include user and group permissions in the output file (default = False)',
|
||||
'include_tokens': 'Include API tokens in the output file (default = False)',
|
||||
'exclude_plugins': 'Exclude plugin data from the output file (default = False)',
|
||||
'include_sso': 'Include SSO token data in the output file (default = False)',
|
||||
'retain_temp': 'Retain temporary files (containing permissions) at end of process (default = False)',
|
||||
}
|
||||
)
|
||||
def export_records(
|
||||
c,
|
||||
filename='data.json',
|
||||
overwrite=False,
|
||||
include_permissions=False,
|
||||
include_tokens=False,
|
||||
exclude_plugins=False,
|
||||
include_sso=False,
|
||||
retain_temp=False,
|
||||
):
|
||||
"""Export all database records to a file.
|
||||
|
||||
Write data to the file defined by filename.
|
||||
@ -422,44 +470,58 @@ def export_records(c, filename='data.json', overwrite=False, include_permissions
|
||||
|
||||
check_file_existance(filename, overwrite)
|
||||
|
||||
tmpfile = f"{filename}.tmp"
|
||||
tmpfile = f'{filename}.tmp'
|
||||
|
||||
cmd = f"dumpdata --indent 2 --output '{tmpfile}' {content_excludes()}"
|
||||
excludes = content_excludes(
|
||||
allow_tokens=include_tokens,
|
||||
allow_plugins=not exclude_plugins,
|
||||
allow_sso=include_sso,
|
||||
)
|
||||
|
||||
cmd = f"dumpdata --natural-foreign --indent 2 --output '{tmpfile}' {excludes}"
|
||||
|
||||
# Dump data to temporary file
|
||||
manage(c, cmd, pty=True)
|
||||
|
||||
print("Running data post-processing step...")
|
||||
print('Running data post-processing step...')
|
||||
|
||||
# Post-process the file, to remove any "permissions" specified for a user or group
|
||||
with open(tmpfile, "r") as f_in:
|
||||
with open(tmpfile, 'r') as f_in:
|
||||
data = json.loads(f_in.read())
|
||||
|
||||
if include_permissions is False:
|
||||
for entry in data:
|
||||
if "model" in entry:
|
||||
|
||||
if 'model' in entry:
|
||||
# Clear out any permissions specified for a group
|
||||
if entry["model"] == "auth.group":
|
||||
entry["fields"]["permissions"] = []
|
||||
if entry['model'] == 'auth.group':
|
||||
entry['fields']['permissions'] = []
|
||||
|
||||
# Clear out any permissions specified for a user
|
||||
if entry["model"] == "auth.user":
|
||||
entry["fields"]["user_permissions"] = []
|
||||
if entry['model'] == 'auth.user':
|
||||
entry['fields']['user_permissions'] = []
|
||||
|
||||
# Write the processed data to file
|
||||
with open(filename, "w") as f_out:
|
||||
with open(filename, 'w') as f_out:
|
||||
f_out.write(json.dumps(data, indent=2))
|
||||
|
||||
print("Data export completed")
|
||||
print('Data export completed')
|
||||
|
||||
if delete_temp is True:
|
||||
print("Removing temporary file")
|
||||
if not retain_temp:
|
||||
print('Removing temporary files')
|
||||
os.remove(tmpfile)
|
||||
|
||||
|
||||
@task(help={'filename': 'Input filename', 'clear': 'Clear existing data before import'}, post=[rebuild_models, rebuild_thumbnails])
|
||||
def import_records(c, filename='data.json', clear=False):
|
||||
@task(
|
||||
help={
|
||||
'filename': 'Input filename',
|
||||
'clear': 'Clear existing data before import',
|
||||
'retain_temp': 'Retain temporary files at end of process (default = False)',
|
||||
},
|
||||
post=[rebuild_models, rebuild_thumbnails],
|
||||
)
|
||||
def import_records(
|
||||
c, filename='data.json', clear: bool = False, retain_temp: bool = False
|
||||
):
|
||||
"""Import database records from a file."""
|
||||
# Get an absolute path to the supplied filename
|
||||
if not os.path.isabs(filename):
|
||||
@ -474,32 +536,69 @@ def import_records(c, filename='data.json', clear=False):
|
||||
|
||||
print(f"Importing database records from '{filename}'")
|
||||
|
||||
# Pre-process the data, to remove any "permissions" specified for a user or group
|
||||
tmpfile = f"{filename}.tmp.json"
|
||||
# We need to load 'auth' data (users / groups) *first*
|
||||
# This is due to the users.owner model, which has a ContentType foreign key
|
||||
authfile = f'{filename}.auth.json'
|
||||
|
||||
with open(filename, "r") as f_in:
|
||||
# Pre-process the data, to remove any "permissions" specified for a user or group
|
||||
datafile = f'{filename}.data.json'
|
||||
|
||||
with open(filename, 'r') as f_in:
|
||||
try:
|
||||
data = json.loads(f_in.read())
|
||||
except json.JSONDecodeError as exc:
|
||||
print(f'Error: Failed to decode JSON file: {exc}')
|
||||
sys.exit(1)
|
||||
|
||||
auth_data = []
|
||||
load_data = []
|
||||
|
||||
for entry in data:
|
||||
if "model" in entry:
|
||||
|
||||
if 'model' in entry:
|
||||
# Clear out any permissions specified for a group
|
||||
if entry["model"] == "auth.group":
|
||||
entry["fields"]["permissions"] = []
|
||||
if entry['model'] == 'auth.group':
|
||||
entry['fields']['permissions'] = []
|
||||
|
||||
# Clear out any permissions specified for a user
|
||||
if entry["model"] == "auth.user":
|
||||
entry["fields"]["user_permissions"] = []
|
||||
if entry['model'] == 'auth.user':
|
||||
entry['fields']['user_permissions'] = []
|
||||
|
||||
# Save auth data for later
|
||||
if entry['model'].startswith('auth.'):
|
||||
auth_data.append(entry)
|
||||
else:
|
||||
load_data.append(entry)
|
||||
else:
|
||||
print('Warning: Invalid entry in data file')
|
||||
print(entry)
|
||||
|
||||
# Write the auth file data
|
||||
with open(authfile, 'w') as f_out:
|
||||
f_out.write(json.dumps(auth_data, indent=2))
|
||||
|
||||
# Write the processed data to the tmp file
|
||||
with open(tmpfile, "w") as f_out:
|
||||
f_out.write(json.dumps(data, indent=2))
|
||||
with open(datafile, 'w') as f_out:
|
||||
f_out.write(json.dumps(load_data, indent=2))
|
||||
|
||||
cmd = f"loaddata '{tmpfile}' -i {content_excludes()}"
|
||||
excludes = content_excludes(allow_auth=False)
|
||||
|
||||
# Import auth models first
|
||||
print('Importing user auth data...')
|
||||
cmd = f"loaddata '{authfile}'"
|
||||
manage(c, cmd, pty=True)
|
||||
|
||||
# Import everything else next
|
||||
print('Importing database records...')
|
||||
cmd = f"loaddata '{datafile}' -i {excludes}"
|
||||
|
||||
manage(c, cmd, pty=True)
|
||||
|
||||
print("Data import completed")
|
||||
if not retain_temp:
|
||||
print('Removing temporary files')
|
||||
os.remove(datafile)
|
||||
os.remove(authfile)
|
||||
|
||||
print('Data import completed')
|
||||
|
||||
|
||||
@task
|
||||
@ -508,7 +607,7 @@ def delete_data(c, force=False):
|
||||
|
||||
Warning: This will REALLY delete all records in the database!!
|
||||
"""
|
||||
print("Deleting all data from InvenTree database...")
|
||||
print('Deleting all data from InvenTree database...')
|
||||
|
||||
if force:
|
||||
manage(c, 'flush --noinput')
|
||||
@ -530,32 +629,26 @@ def import_fixtures(c):
|
||||
fixtures = [
|
||||
# Build model
|
||||
'build',
|
||||
|
||||
# Common models
|
||||
'settings',
|
||||
|
||||
# Company model
|
||||
'company',
|
||||
'price_breaks',
|
||||
'supplier_part',
|
||||
|
||||
# Order model
|
||||
'order',
|
||||
|
||||
# Part model
|
||||
'bom',
|
||||
'category',
|
||||
'params',
|
||||
'part',
|
||||
'test_templates',
|
||||
|
||||
# Stock model
|
||||
'location',
|
||||
'stock_tests',
|
||||
'stock',
|
||||
|
||||
# Users
|
||||
'users'
|
||||
'users',
|
||||
]
|
||||
|
||||
command = 'loaddata ' + ' '.join(fixtures)
|
||||
@ -567,16 +660,16 @@ def import_fixtures(c):
|
||||
@task
|
||||
def wait(c):
|
||||
"""Wait until the database connection is ready."""
|
||||
return manage(c, "wait_for_db")
|
||||
return manage(c, 'wait_for_db')
|
||||
|
||||
|
||||
@task(pre=[wait], help={'address': 'Server address:port (default=127.0.0.1:8000)'})
|
||||
def server(c, address="127.0.0.1:8000"):
|
||||
def server(c, address='127.0.0.1:8000'):
|
||||
"""Launch a (development) server using Django's in-built webserver.
|
||||
|
||||
Note: This is *not* sufficient for a production installation.
|
||||
"""
|
||||
manage(c, "runserver {address}".format(address=address), pty=True)
|
||||
manage(c, 'runserver {address}'.format(address=address), pty=True)
|
||||
|
||||
|
||||
@task(pre=[wait])
|
||||
@ -589,7 +682,7 @@ def worker(c):
|
||||
@task
|
||||
def render_js_files(c):
|
||||
"""Render templated javascript files (used for static testing)."""
|
||||
manage(c, "test InvenTree.ci_render_js")
|
||||
manage(c, 'test InvenTree.ci_render_js')
|
||||
|
||||
|
||||
@task(post=[translate_stats, static, server])
|
||||
@ -607,40 +700,44 @@ def test_translations(c):
|
||||
django.setup()
|
||||
|
||||
# Add language
|
||||
print("Add dummy language...")
|
||||
print("========================================")
|
||||
manage(c, "makemessages -e py,html,js --no-wrap -l xx")
|
||||
print('Add dummy language...')
|
||||
print('========================================')
|
||||
manage(c, 'makemessages -e py,html,js --no-wrap -l xx')
|
||||
|
||||
# change translation
|
||||
print("Fill in dummy translations...")
|
||||
print("========================================")
|
||||
print('Fill in dummy translations...')
|
||||
print('========================================')
|
||||
|
||||
file_path = pathlib.Path(settings.LOCALE_PATHS[0], 'xx', 'LC_MESSAGES', 'django.po')
|
||||
new_file_path = str(file_path) + '_new'
|
||||
|
||||
# compile regex
|
||||
reg = re.compile(
|
||||
r"[a-zA-Z0-9]{1}" + # match any single letter and number # noqa: W504
|
||||
r"(?![^{\(\<]*[}\)\>])" + # that is not inside curly brackets, brackets or a tag # noqa: W504
|
||||
r"(?<![^\%][^\(][)][a-z])" + # that is not a specially formatted variable with singles # noqa: W504
|
||||
r"(?![^\\][\n])" # that is not a newline
|
||||
r'[a-zA-Z0-9]{1}' + # match any single letter and number # noqa: W504
|
||||
r'(?![^{\(\<]*[}\)\>])' + # that is not inside curly brackets, brackets or a tag # noqa: W504
|
||||
r'(?<![^\%][^\(][)][a-z])' + # that is not a specially formatted variable with singles # noqa: W504
|
||||
r'(?![^\\][\n])' # that is not a newline
|
||||
)
|
||||
last_string = ''
|
||||
|
||||
# loop through input file lines
|
||||
with open(file_path, "rt") as file_org:
|
||||
with open(new_file_path, "wt") as file_new:
|
||||
with open(file_path, 'rt') as file_org:
|
||||
with open(new_file_path, 'wt') as file_new:
|
||||
for line in file_org:
|
||||
if line.startswith('msgstr "'):
|
||||
# write output -> replace regex matches with x in the read in (multi)string
|
||||
file_new.write(f'msgstr "{reg.sub("x", last_string[7:-2])}"\n')
|
||||
last_string = "" # reset (multi)string
|
||||
last_string = '' # reset (multi)string
|
||||
elif line.startswith('msgid "'):
|
||||
last_string = last_string + line # a new translatable string starts -> start append
|
||||
last_string = (
|
||||
last_string + line
|
||||
) # a new translatable string starts -> start append
|
||||
file_new.write(line)
|
||||
else:
|
||||
if last_string:
|
||||
last_string = last_string + line # a string is being read in -> continue appending
|
||||
last_string = (
|
||||
last_string + line
|
||||
) # a string is being read in -> continue appending
|
||||
file_new.write(line)
|
||||
|
||||
# change out translation files
|
||||
@ -648,9 +745,9 @@ def test_translations(c):
|
||||
new_file_path.rename(file_path)
|
||||
|
||||
# compile languages
|
||||
print("Compile languages ...")
|
||||
print("========================================")
|
||||
manage(c, "compilemessages")
|
||||
print('Compile languages ...')
|
||||
print('========================================')
|
||||
manage(c, 'compilemessages')
|
||||
|
||||
# reset cwd
|
||||
os.chdir(base_path)
|
||||
@ -668,7 +765,9 @@ def test_translations(c):
|
||||
'coverage': 'Run code coverage analysis (requires coverage package)',
|
||||
}
|
||||
)
|
||||
def test(c, disable_pty=False, runtest='', migrations=False, report=False, coverage=False):
|
||||
def test(
|
||||
c, disable_pty=False, runtest='', migrations=False, report=False, coverage=False
|
||||
):
|
||||
"""Run unit-tests for InvenTree codebase.
|
||||
|
||||
To run only certain test, use the argument --runtest.
|
||||
@ -713,7 +812,7 @@ def test(c, disable_pty=False, runtest='', migrations=False, report=False, cover
|
||||
|
||||
|
||||
@task(help={'dev': 'Set up development environment at the end'})
|
||||
def setup_test(c, ignore_update=False, dev=False, path="inventree-demo-dataset"):
|
||||
def setup_test(c, ignore_update=False, dev=False, path='inventree-demo-dataset'):
|
||||
"""Setup a testing environment."""
|
||||
from InvenTree.InvenTree.config import get_media_dir
|
||||
|
||||
@ -722,41 +821,43 @@ def setup_test(c, ignore_update=False, dev=False, path="inventree-demo-dataset")
|
||||
|
||||
# Remove old data directory
|
||||
if os.path.exists(path):
|
||||
print("Removing old data ...")
|
||||
print('Removing old data ...')
|
||||
c.run(f'rm {path} -r')
|
||||
|
||||
# Get test data
|
||||
print("Cloning demo dataset ...")
|
||||
print('Cloning demo dataset ...')
|
||||
c.run(f'git clone https://github.com/inventree/demo-dataset {path} -v --depth=1')
|
||||
print("========================================")
|
||||
print('========================================')
|
||||
|
||||
# Make sure migrations are done - might have just deleted sqlite database
|
||||
if not ignore_update:
|
||||
migrate(c)
|
||||
|
||||
# Load data
|
||||
print("Loading database records ...")
|
||||
print('Loading database records ...')
|
||||
import_records(c, filename=f'{path}/inventree_data.json', clear=True)
|
||||
|
||||
# Copy media files
|
||||
print("Copying media files ...")
|
||||
print('Copying media files ...')
|
||||
src = Path(path).joinpath('media').resolve()
|
||||
dst = get_media_dir()
|
||||
|
||||
shutil.copytree(src, dst, dirs_exist_ok=True)
|
||||
|
||||
print("Done setting up test environment...")
|
||||
print("========================================")
|
||||
print('Done setting up test environment...')
|
||||
print('========================================')
|
||||
|
||||
# Set up development setup if flag is set
|
||||
if dev:
|
||||
setup_dev(c)
|
||||
|
||||
|
||||
@task(help={
|
||||
@task(
|
||||
help={
|
||||
'filename': "Output filename (default = 'schema.yml')",
|
||||
'overwrite': "Overwrite existing files without asking first (default = off/False)",
|
||||
})
|
||||
'overwrite': 'Overwrite existing files without asking first (default = off/False)',
|
||||
}
|
||||
)
|
||||
def schema(c, filename='schema.yml', overwrite=False):
|
||||
"""Export current API schema."""
|
||||
check_file_existance(filename, overwrite)
|
||||
@ -773,7 +874,8 @@ def version(c):
|
||||
# Gather frontend version information
|
||||
_, node, yarn = node_available(versions=True)
|
||||
|
||||
print(f"""
|
||||
print(
|
||||
f"""
|
||||
InvenTree - inventree.org
|
||||
The Open-Source Inventory Management System\n
|
||||
|
||||
@ -792,13 +894,16 @@ Node {node if node else 'N/A'}
|
||||
Yarn {yarn if yarn else 'N/A'}
|
||||
|
||||
Commit hash:{InvenTreeVersion.inventreeCommitHash()}
|
||||
Commit date:{InvenTreeVersion.inventreeCommitDate()}""")
|
||||
Commit date:{InvenTreeVersion.inventreeCommitDate()}"""
|
||||
)
|
||||
if len(sys.argv) == 1 and sys.argv[0].startswith('/opt/inventree/env/lib/python'):
|
||||
print("""
|
||||
print(
|
||||
"""
|
||||
You are probably running the package installer / single-line installer. Please mentioned that in any bug reports!
|
||||
|
||||
Use '--list' for a list of available commands
|
||||
Use '--help' for help on a specific command""")
|
||||
Use '--help' for help on a specific command"""
|
||||
)
|
||||
|
||||
|
||||
@task()
|
||||
@ -826,8 +931,8 @@ def frontend_install(c):
|
||||
Args:
|
||||
c: Context variable
|
||||
"""
|
||||
print("Installing frontend dependencies")
|
||||
yarn(c, "yarn install")
|
||||
print('Installing frontend dependencies')
|
||||
yarn(c, 'yarn install')
|
||||
|
||||
|
||||
@task
|
||||
@ -837,9 +942,9 @@ def frontend_trans(c):
|
||||
Args:
|
||||
c: Context variable
|
||||
"""
|
||||
print("Compiling frontend translations")
|
||||
yarn(c, "yarn run extract")
|
||||
yarn(c, "yarn run compile")
|
||||
print('Compiling frontend translations')
|
||||
yarn(c, 'yarn run extract')
|
||||
yarn(c, 'yarn run compile')
|
||||
|
||||
|
||||
@task
|
||||
@ -849,8 +954,8 @@ def frontend_build(c):
|
||||
Args:
|
||||
c: Context variable
|
||||
"""
|
||||
print("Building frontend")
|
||||
yarn(c, "yarn run build --emptyOutDir")
|
||||
print('Building frontend')
|
||||
yarn(c, 'yarn run build --emptyOutDir')
|
||||
|
||||
|
||||
@task
|
||||
@ -860,19 +965,29 @@ def frontend_dev(c):
|
||||
Args:
|
||||
c: Context variable
|
||||
"""
|
||||
print("Starting frontend development server")
|
||||
yarn(c, "yarn run dev")
|
||||
print('Starting frontend development server')
|
||||
yarn(c, 'yarn run dev')
|
||||
|
||||
|
||||
@task(help={
|
||||
'ref': "git ref, default: current git ref",
|
||||
'tag': "git tag to look for release",
|
||||
'file': "destination to frontend-build.zip file",
|
||||
'repo': "GitHub repository, default: InvenTree/inventree",
|
||||
'extract': "Also extract and place at the correct destination, default: True",
|
||||
'clean': "Delete old files from InvenTree/web/static/web first, default: True",
|
||||
})
|
||||
def frontend_download(c, ref=None, tag=None, file=None, repo="InvenTree/inventree", extract=True, clean=True):
|
||||
@task(
|
||||
help={
|
||||
'ref': 'git ref, default: current git ref',
|
||||
'tag': 'git tag to look for release',
|
||||
'file': 'destination to frontend-build.zip file',
|
||||
'repo': 'GitHub repository, default: InvenTree/inventree',
|
||||
'extract': 'Also extract and place at the correct destination, default: True',
|
||||
'clean': 'Delete old files from InvenTree/web/static/web first, default: True',
|
||||
}
|
||||
)
|
||||
def frontend_download(
|
||||
c,
|
||||
ref=None,
|
||||
tag=None,
|
||||
file=None,
|
||||
repo='InvenTree/inventree',
|
||||
extract=True,
|
||||
clean=True,
|
||||
):
|
||||
"""Download a pre-build frontend from GitHub if you dont want to install nodejs on your machine.
|
||||
|
||||
There are 3 possibilities to install the frontend:
|
||||
@ -894,7 +1009,7 @@ def frontend_download(c, ref=None, tag=None, file=None, repo="InvenTree/inventre
|
||||
import requests
|
||||
|
||||
# globals
|
||||
default_headers = {"Accept": "application/vnd.github.v3+json"}
|
||||
default_headers = {'Accept': 'application/vnd.github.v3+json'}
|
||||
|
||||
# helper functions
|
||||
def find_resource(resource, key, value):
|
||||
@ -908,30 +1023,34 @@ def frontend_download(c, ref=None, tag=None, file=None, repo="InvenTree/inventre
|
||||
if not extract:
|
||||
return
|
||||
|
||||
dest_path = Path(__file__).parent / "InvenTree/web/static/web"
|
||||
dest_path = Path(__file__).parent / 'InvenTree/web/static/web'
|
||||
|
||||
# if clean, delete static/web directory
|
||||
if clean:
|
||||
shutil.rmtree(dest_path, ignore_errors=True)
|
||||
os.makedirs(dest_path)
|
||||
print(f"Cleaned directory: {dest_path}")
|
||||
print(f'Cleaned directory: {dest_path}')
|
||||
|
||||
# unzip build to static folder
|
||||
with ZipFile(file, "r") as zip_ref:
|
||||
with ZipFile(file, 'r') as zip_ref:
|
||||
zip_ref.extractall(dest_path)
|
||||
|
||||
print(f"Unzipped downloaded frontend build to: {dest_path}")
|
||||
print(f'Unzipped downloaded frontend build to: {dest_path}')
|
||||
|
||||
def handle_download(url):
|
||||
# download frontend-build.zip to temporary file
|
||||
with requests.get(url, headers=default_headers, stream=True, allow_redirects=True) as response, NamedTemporaryFile(suffix=".zip") as dst:
|
||||
with requests.get(
|
||||
url, headers=default_headers, stream=True, allow_redirects=True
|
||||
) as response, NamedTemporaryFile(suffix='.zip') as dst:
|
||||
response.raise_for_status()
|
||||
|
||||
# auto decode the gzipped raw data
|
||||
response.raw.read = functools.partial(response.raw.read, decode_content=True)
|
||||
with open(dst.name, "wb") as f:
|
||||
response.raw.read = functools.partial(
|
||||
response.raw.read, decode_content=True
|
||||
)
|
||||
with open(dst.name, 'wb') as f:
|
||||
shutil.copyfileobj(response.raw, f)
|
||||
print(f"Downloaded frontend build to temporary file: {dst.name}")
|
||||
print(f'Downloaded frontend build to temporary file: {dst.name}')
|
||||
|
||||
handle_extract(dst.name)
|
||||
|
||||
@ -942,51 +1061,72 @@ def frontend_download(c, ref=None, tag=None, file=None, repo="InvenTree/inventre
|
||||
|
||||
# check arguments
|
||||
if ref is not None and tag is not None:
|
||||
print("[ERROR] Do not set ref and tag.")
|
||||
print('[ERROR] Do not set ref and tag.')
|
||||
return
|
||||
|
||||
if ref is None and tag is None:
|
||||
try:
|
||||
ref = subprocess.check_output(["git", "rev-parse", "HEAD"], encoding="utf-8").strip()
|
||||
ref = subprocess.check_output(
|
||||
['git', 'rev-parse', 'HEAD'], encoding='utf-8'
|
||||
).strip()
|
||||
except Exception:
|
||||
print("[ERROR] Cannot get current ref via 'git rev-parse HEAD'")
|
||||
return
|
||||
|
||||
if ref is None and tag is None:
|
||||
print("[ERROR] Either ref or tag needs to be set.")
|
||||
print('[ERROR] Either ref or tag needs to be set.')
|
||||
|
||||
if tag:
|
||||
tag = tag.lstrip("v")
|
||||
tag = tag.lstrip('v')
|
||||
try:
|
||||
handle_download(f"https://github.com/{repo}/releases/download/{tag}/frontend-build.zip")
|
||||
handle_download(
|
||||
f'https://github.com/{repo}/releases/download/{tag}/frontend-build.zip'
|
||||
)
|
||||
except Exception as e:
|
||||
if not isinstance(e, requests.HTTPError):
|
||||
raise e
|
||||
print(f"""[ERROR] An Error occurred. Unable to download frontend build, release or build does not exist,
|
||||
print(
|
||||
f"""[ERROR] An Error occurred. Unable to download frontend build, release or build does not exist,
|
||||
try downloading the frontend-build.zip yourself via: https://github.com/{repo}/releases
|
||||
Then try continuing by running: invoke frontend-download --file <path-to-downloaded-zip-file>""")
|
||||
Then try continuing by running: invoke frontend-download --file <path-to-downloaded-zip-file>"""
|
||||
)
|
||||
|
||||
return
|
||||
|
||||
if ref:
|
||||
# get workflow run from all workflow runs on that particular ref
|
||||
workflow_runs = requests.get(f"https://api.github.com/repos/{repo}/actions/runs?head_sha={ref}", headers=default_headers).json()
|
||||
workflow_runs = requests.get(
|
||||
f'https://api.github.com/repos/{repo}/actions/runs?head_sha={ref}',
|
||||
headers=default_headers,
|
||||
).json()
|
||||
|
||||
if not (qc_run := find_resource(workflow_runs["workflow_runs"], "name", "QC")):
|
||||
print("[ERROR] Cannot find any workflow runs for current sha")
|
||||
if not (qc_run := find_resource(workflow_runs['workflow_runs'], 'name', 'QC')):
|
||||
print('[ERROR] Cannot find any workflow runs for current sha')
|
||||
return
|
||||
print(f"Found workflow {qc_run['name']} (run {qc_run['run_number']}-{qc_run['run_attempt']})")
|
||||
print(
|
||||
f"Found workflow {qc_run['name']} (run {qc_run['run_number']}-{qc_run['run_attempt']})"
|
||||
)
|
||||
|
||||
# get frontend-build artifact from all artifacts available for this workflow run
|
||||
artifacts = requests.get(qc_run["artifacts_url"], headers=default_headers).json()
|
||||
if not (frontend_artifact := find_resource(artifacts["artifacts"], "name", "frontend-build")):
|
||||
print("[ERROR] Cannot find frontend-build.zip attachment for current sha")
|
||||
artifacts = requests.get(
|
||||
qc_run['artifacts_url'], headers=default_headers
|
||||
).json()
|
||||
if not (
|
||||
frontend_artifact := find_resource(
|
||||
artifacts['artifacts'], 'name', 'frontend-build'
|
||||
)
|
||||
):
|
||||
print('[ERROR] Cannot find frontend-build.zip attachment for current sha')
|
||||
return
|
||||
print(f"Found artifact {frontend_artifact['name']} with id {frontend_artifact['id']} ({frontend_artifact['size_in_bytes']/1e6:.2f}MB).")
|
||||
print(
|
||||
f"Found artifact {frontend_artifact['name']} with id {frontend_artifact['id']} ({frontend_artifact['size_in_bytes']/1e6:.2f}MB)."
|
||||
)
|
||||
|
||||
print(f"""
|
||||
print(
|
||||
f"""
|
||||
GitHub doesn't allow artifact downloads from anonymous users. Either download the following file
|
||||
via your signed in browser, or consider using a point release download via invoke frontend-download --tag <git-tag>
|
||||
|
||||
Download: https://github.com/{repo}/suites/{qc_run['check_suite_id']}/artifacts/{frontend_artifact['id']} manually and
|
||||
continue by running: invoke frontend-download --file <path-to-downloaded-zip-file>""")
|
||||
continue by running: invoke frontend-download --file <path-to-downloaded-zip-file>"""
|
||||
)
|
||||
|
Loading…
x
Reference in New Issue
Block a user