mirror of
https://github.com/inventree/InvenTree.git
synced 2025-06-15 11:35:41 +00:00
pylint checks
This commit is contained in:
4
.github/scripts/check_js_templates.py
vendored
4
.github/scripts/check_js_templates.py
vendored
@ -71,7 +71,7 @@ def check_prohibited_tags(data):
|
|||||||
for filename in pathlib.Path(js_i18n_dir).rglob('*.js'):
|
for filename in pathlib.Path(js_i18n_dir).rglob('*.js'):
|
||||||
print(f"Checking file 'translated/{os.path.basename(filename)}':")
|
print(f"Checking file 'translated/{os.path.basename(filename)}':")
|
||||||
|
|
||||||
with open(filename) as js_file:
|
with open(filename, encoding='utf-8') as js_file:
|
||||||
data = js_file.readlines()
|
data = js_file.readlines()
|
||||||
|
|
||||||
errors += check_invalid_tag(data)
|
errors += check_invalid_tag(data)
|
||||||
@ -81,7 +81,7 @@ for filename in pathlib.Path(js_dynamic_dir).rglob('*.js'):
|
|||||||
print(f"Checking file 'dynamic/{os.path.basename(filename)}':")
|
print(f"Checking file 'dynamic/{os.path.basename(filename)}':")
|
||||||
|
|
||||||
# Check that the 'dynamic' files do not contains any translated strings
|
# Check that the 'dynamic' files do not contains any translated strings
|
||||||
with open(filename) as js_file:
|
with open(filename, encoding='utf-8') as js_file:
|
||||||
data = js_file.readlines()
|
data = js_file.readlines()
|
||||||
|
|
||||||
invalid_tags = ['blocktrans', 'blocktranslate', 'trans', 'translate']
|
invalid_tags = ['blocktrans', 'blocktranslate', 'trans', 'translate']
|
||||||
|
4
.github/scripts/version_check.py
vendored
4
.github/scripts/version_check.py
vendored
@ -134,7 +134,7 @@ if __name__ == '__main__':
|
|||||||
|
|
||||||
version = None
|
version = None
|
||||||
|
|
||||||
with open(version_file) as f:
|
with open(version_file, encoding='utf-8') as f:
|
||||||
text = f.read()
|
text = f.read()
|
||||||
|
|
||||||
# Extract the InvenTree software version
|
# Extract the InvenTree software version
|
||||||
@ -199,7 +199,7 @@ if __name__ == '__main__':
|
|||||||
target_repos = [REPO.lower(), f'ghcr.io/{REPO.lower()}']
|
target_repos = [REPO.lower(), f'ghcr.io/{REPO.lower()}']
|
||||||
|
|
||||||
# Ref: https://getridbug.com/python/how-to-set-environment-variables-in-github-actions-using-python/
|
# Ref: https://getridbug.com/python/how-to-set-environment-variables-in-github-actions-using-python/
|
||||||
with open(os.getenv('GITHUB_ENV'), 'a') as env_file:
|
with open(os.getenv('GITHUB_ENV'), 'a', encoding='utf-8') as env_file:
|
||||||
# Construct tag string
|
# Construct tag string
|
||||||
tag_list = [[f'{r}:{t}' for t in docker_tags] for r in target_repos]
|
tag_list = [[f'{r}:{t}' for t in docker_tags] for r in target_repos]
|
||||||
tags = ','.join(itertools.chain(*tag_list))
|
tags = ','.join(itertools.chain(*tag_list))
|
||||||
|
@ -10,7 +10,7 @@ tld = os.path.abspath(os.path.join(here, '..'))
|
|||||||
|
|
||||||
config_file = os.path.join(tld, 'mkdocs.yml')
|
config_file = os.path.join(tld, 'mkdocs.yml')
|
||||||
|
|
||||||
with open(config_file) as f:
|
with open(config_file, encoding='utf-8') as f:
|
||||||
data = yaml.load(f, yaml.BaseLoader)
|
data = yaml.load(f, yaml.BaseLoader)
|
||||||
|
|
||||||
assert data['strict'] == 'true'
|
assert data['strict'] == 'true'
|
||||||
|
@ -82,7 +82,7 @@ def fetch_rtd_versions():
|
|||||||
print('Discovered the following versions:')
|
print('Discovered the following versions:')
|
||||||
print(versions)
|
print(versions)
|
||||||
|
|
||||||
with open(output_filename, 'w') as file:
|
with open(output_filename, 'w', encoding='utf-8') as file:
|
||||||
json.dump(versions, file, indent=2)
|
json.dump(versions, file, indent=2)
|
||||||
|
|
||||||
|
|
||||||
@ -100,7 +100,7 @@ def get_release_data():
|
|||||||
# Release information has been cached to file
|
# Release information has been cached to file
|
||||||
|
|
||||||
print("Loading release information from 'releases.json'")
|
print("Loading release information from 'releases.json'")
|
||||||
with open(json_file) as f:
|
with open(json_file, encoding='utf-8') as f:
|
||||||
return json.loads(f.read())
|
return json.loads(f.read())
|
||||||
|
|
||||||
# Download release information via the GitHub API
|
# Download release information via the GitHub API
|
||||||
@ -127,7 +127,7 @@ def get_release_data():
|
|||||||
page += 1
|
page += 1
|
||||||
|
|
||||||
# Cache these results to file
|
# Cache these results to file
|
||||||
with open(json_file, 'w') as f:
|
with open(json_file, 'w', encoding='utf-8') as f:
|
||||||
print("Saving release information to 'releases.json'")
|
print("Saving release information to 'releases.json'")
|
||||||
f.write(json.dumps(releases))
|
f.write(json.dumps(releases))
|
||||||
|
|
||||||
|
@ -73,7 +73,7 @@ def generate_schema_file(key: str) -> None:
|
|||||||
|
|
||||||
print('Writing schema file to:', output_file)
|
print('Writing schema file to:', output_file)
|
||||||
|
|
||||||
with open(output_file, 'w') as f:
|
with open(output_file, 'w', encoding='utf-8') as f:
|
||||||
f.write(output)
|
f.write(output)
|
||||||
|
|
||||||
|
|
||||||
@ -119,7 +119,7 @@ def generate_index_file(version: str):
|
|||||||
|
|
||||||
print('Writing index file to:', output_file)
|
print('Writing index file to:', output_file)
|
||||||
|
|
||||||
with open(output_file, 'w') as f:
|
with open(output_file, 'w', encoding='utf-8') as f:
|
||||||
f.write(output)
|
f.write(output)
|
||||||
|
|
||||||
|
|
||||||
@ -171,7 +171,7 @@ def parse_api_file(filename: str):
|
|||||||
|
|
||||||
The intent is to make the API schema easier to peruse on the documentation.
|
The intent is to make the API schema easier to peruse on the documentation.
|
||||||
"""
|
"""
|
||||||
with open(filename) as f:
|
with open(filename, encoding='utf-8') as f:
|
||||||
data = yaml.safe_load(f)
|
data = yaml.safe_load(f)
|
||||||
|
|
||||||
paths = data['paths']
|
paths = data['paths']
|
||||||
@ -211,7 +211,7 @@ def parse_api_file(filename: str):
|
|||||||
|
|
||||||
output_file = os.path.abspath(output_file)
|
output_file = os.path.abspath(output_file)
|
||||||
|
|
||||||
with open(output_file, 'w') as f:
|
with open(output_file, 'w', encoding='utf-8') as f:
|
||||||
yaml.dump(output, f)
|
yaml.dump(output, f)
|
||||||
|
|
||||||
# Generate a markdown file for the schema
|
# Generate a markdown file for the schema
|
||||||
|
12
docs/main.py
12
docs/main.py
@ -16,7 +16,7 @@ global USER_SETTINGS
|
|||||||
here = os.path.dirname(__file__)
|
here = os.path.dirname(__file__)
|
||||||
settings_file = os.path.join(here, 'inventree_settings.json')
|
settings_file = os.path.join(here, 'inventree_settings.json')
|
||||||
|
|
||||||
with open(settings_file) as sf:
|
with open(settings_file, encoding='utf-8') as sf:
|
||||||
settings = json.load(sf)
|
settings = json.load(sf)
|
||||||
|
|
||||||
GLOBAL_SETTINGS = settings['global']
|
GLOBAL_SETTINGS = settings['global']
|
||||||
@ -27,7 +27,7 @@ def get_repo_url(raw=False):
|
|||||||
"""Return the repository URL for the current project."""
|
"""Return the repository URL for the current project."""
|
||||||
mkdocs_yml = os.path.join(os.path.dirname(__file__), 'mkdocs.yml')
|
mkdocs_yml = os.path.join(os.path.dirname(__file__), 'mkdocs.yml')
|
||||||
|
|
||||||
with open(mkdocs_yml) as f:
|
with open(mkdocs_yml, encoding='utf-8') as f:
|
||||||
mkdocs_config = yaml.safe_load(f)
|
mkdocs_config = yaml.safe_load(f)
|
||||||
repo_name = mkdocs_config['repo_name']
|
repo_name = mkdocs_config['repo_name']
|
||||||
|
|
||||||
@ -47,7 +47,7 @@ def check_link(url) -> bool:
|
|||||||
|
|
||||||
# Keep a local cache file of URLs we have already checked
|
# Keep a local cache file of URLs we have already checked
|
||||||
if os.path.exists(CACHE_FILE):
|
if os.path.exists(CACHE_FILE):
|
||||||
with open(CACHE_FILE) as f:
|
with open(CACHE_FILE, encoding='utf-8') as f:
|
||||||
cache = f.read().splitlines()
|
cache = f.read().splitlines()
|
||||||
|
|
||||||
if url in cache:
|
if url in cache:
|
||||||
@ -59,7 +59,7 @@ def check_link(url) -> bool:
|
|||||||
response = requests.head(url, timeout=5000)
|
response = requests.head(url, timeout=5000)
|
||||||
if response.status_code == 200:
|
if response.status_code == 200:
|
||||||
# Update the cache file
|
# Update the cache file
|
||||||
with open(CACHE_FILE, 'a') as f:
|
with open(CACHE_FILE, 'a', encoding='utf-8') as f:
|
||||||
f.write(f'{url}\n')
|
f.write(f'{url}\n')
|
||||||
|
|
||||||
return True
|
return True
|
||||||
@ -177,7 +177,7 @@ def define_env(env):
|
|||||||
|
|
||||||
assert subprocess.call(command, shell=True) == 0
|
assert subprocess.call(command, shell=True) == 0
|
||||||
|
|
||||||
with open(output) as f:
|
with open(output, encoding='utf-8') as f:
|
||||||
content = f.read()
|
content = f.read()
|
||||||
|
|
||||||
return content
|
return content
|
||||||
@ -214,7 +214,7 @@ def define_env(env):
|
|||||||
if not os.path.exists(path):
|
if not os.path.exists(path):
|
||||||
raise FileNotFoundError(f'Required file {path} does not exist.')
|
raise FileNotFoundError(f'Required file {path} does not exist.')
|
||||||
|
|
||||||
with open(path) as f:
|
with open(path, encoding='utf-8') as f:
|
||||||
content = f.read()
|
content = f.read()
|
||||||
|
|
||||||
data = f'??? abstract "{title}"\n\n'
|
data = f'??? abstract "{title}"\n\n'
|
||||||
|
@ -20,13 +20,18 @@ src = ["src/backend/InvenTree"]
|
|||||||
"__init__.py" = ["D104"]
|
"__init__.py" = ["D104"]
|
||||||
|
|
||||||
[tool.ruff.lint]
|
[tool.ruff.lint]
|
||||||
select = ["A", "B", "C", "C4", "D", "F", "I", "N", "SIM", "PIE", "RUF", "UP", "W"]
|
select = ["A", "B", "C", "C4", "D", "F", "I", "N", "SIM", "PIE", "PLE", "PLW", "RUF", "UP", "W"]
|
||||||
# Things that should be enabled in the future:
|
# Things that should be enabled in the future:
|
||||||
# - LOG
|
# - LOG
|
||||||
# - DJ # for Django stuff
|
# - DJ # for Django stuff
|
||||||
# - S # for security stuff (bandit)
|
# - S # for security stuff (bandit)
|
||||||
|
|
||||||
ignore = [
|
ignore = [
|
||||||
|
"PLE1205",
|
||||||
|
# - PLE1205 - Too many arguments for logging format string
|
||||||
|
"PLW2901",
|
||||||
|
# - PLW2901 - Outer {outer_kind} variable {name} overwritten by inner {inner_kind} target
|
||||||
|
"PLW0602","PLW0603","PLW0604", # global variable things
|
||||||
"RUF015",
|
"RUF015",
|
||||||
# - RUF015 - Prefer next({iterable}) over single element slice
|
# - RUF015 - Prefer next({iterable}) over single element slice
|
||||||
"RUF012",
|
"RUF012",
|
||||||
|
@ -131,7 +131,7 @@ def load_config_data(set_cache: bool = False) -> map:
|
|||||||
|
|
||||||
cfg_file = get_config_file()
|
cfg_file = get_config_file()
|
||||||
|
|
||||||
with open(cfg_file) as cfg:
|
with open(cfg_file, encoding='utf-8') as cfg:
|
||||||
data = yaml.safe_load(cfg)
|
data = yaml.safe_load(cfg)
|
||||||
|
|
||||||
# Set the cache if requested
|
# Set the cache if requested
|
||||||
|
@ -47,7 +47,7 @@ class Command(BaseCommand):
|
|||||||
|
|
||||||
filename = kwargs.get('filename', 'inventree_settings.json')
|
filename = kwargs.get('filename', 'inventree_settings.json')
|
||||||
|
|
||||||
with open(filename, 'w') as f:
|
with open(filename, 'w', encoding='utf-8') as f:
|
||||||
json.dump(settings, f, indent=4)
|
json.dump(settings, f, indent=4)
|
||||||
|
|
||||||
print(f"Exported InvenTree settings definitions to '{filename}'")
|
print(f"Exported InvenTree settings definitions to '{filename}'")
|
||||||
|
@ -103,14 +103,14 @@ class Command(BaseCommand):
|
|||||||
})
|
})
|
||||||
|
|
||||||
self.stdout.write(f'Writing icon map for {len(icons.keys())} icons')
|
self.stdout.write(f'Writing icon map for {len(icons.keys())} icons')
|
||||||
with open(kwargs['output_file'], 'w') as f:
|
with open(kwargs['output_file'], 'w', encoding='utf-8') as f:
|
||||||
json.dump(icons, f, indent=2)
|
json.dump(icons, f, indent=2)
|
||||||
|
|
||||||
self.stdout.write(f'Icon map written to {kwargs["output_file"]}')
|
self.stdout.write(f'Icon map written to {kwargs["output_file"]}')
|
||||||
|
|
||||||
# Import icon map file
|
# Import icon map file
|
||||||
if kwargs['input_file']:
|
if kwargs['input_file']:
|
||||||
with open(kwargs['input_file']) as f:
|
with open(kwargs['input_file'], encoding='utf-8') as f:
|
||||||
icons = json.load(f)
|
icons = json.load(f)
|
||||||
|
|
||||||
self.stdout.write(f'Loaded icon map for {len(icons.keys())} icons')
|
self.stdout.write(f'Loaded icon map for {len(icons.keys())} icons')
|
||||||
|
@ -19,7 +19,9 @@ def render_file(file_name, source, target, locales, ctx):
|
|||||||
|
|
||||||
target_file = os.path.join(target, locale + '.' + file_name)
|
target_file = os.path.join(target, locale + '.' + file_name)
|
||||||
|
|
||||||
with open(target_file, 'w') as localised_file, lang_over(locale):
|
with open(target_file, 'w', encoding='utf-8') as localised_file, lang_over(
|
||||||
|
locale
|
||||||
|
):
|
||||||
rendered = render_to_string(os.path.join(source, file_name), ctx)
|
rendered = render_to_string(os.path.join(source, file_name), ctx)
|
||||||
localised_file.write(rendered)
|
localised_file.write(rendered)
|
||||||
|
|
||||||
|
@ -70,7 +70,7 @@ class URLTest(TestCase):
|
|||||||
|
|
||||||
pattern = '{% url [\'"]([^\'"]+)[\'"]([^%]*)%}'
|
pattern = '{% url [\'"]([^\'"]+)[\'"]([^%]*)%}'
|
||||||
|
|
||||||
with open(input_file) as f:
|
with open(input_file, encoding='utf-8') as f:
|
||||||
data = f.read()
|
data = f.read()
|
||||||
|
|
||||||
results = re.findall(pattern, data)
|
results = re.findall(pattern, data)
|
||||||
|
@ -15,7 +15,7 @@ def reload_translation_stats():
|
|||||||
STATS_FILE = settings.BASE_DIR.joinpath('InvenTree/locale_stats.json').absolute()
|
STATS_FILE = settings.BASE_DIR.joinpath('InvenTree/locale_stats.json').absolute()
|
||||||
|
|
||||||
try:
|
try:
|
||||||
with open(STATS_FILE) as f:
|
with open(STATS_FILE, encoding='utf-8') as f:
|
||||||
_translation_stats = json.load(f)
|
_translation_stats = json.load(f)
|
||||||
except Exception:
|
except Exception:
|
||||||
_translation_stats = None
|
_translation_stats = None
|
||||||
|
@ -55,7 +55,7 @@ def get_icon_packs():
|
|||||||
tabler_icons_path = Path(__file__).parent.parent.joinpath(
|
tabler_icons_path = Path(__file__).parent.parent.joinpath(
|
||||||
'InvenTree/static/tabler-icons/icons.json'
|
'InvenTree/static/tabler-icons/icons.json'
|
||||||
)
|
)
|
||||||
with open(tabler_icons_path) as tabler_icons_file:
|
with open(tabler_icons_path, encoding='utf-8') as tabler_icons_file:
|
||||||
tabler_icons = json.load(tabler_icons_file)
|
tabler_icons = json.load(tabler_icons_file)
|
||||||
|
|
||||||
icon_packs = [
|
icon_packs = [
|
||||||
|
@ -2060,7 +2060,7 @@ class InvenTreeSetting(BaseInvenTreeSetting):
|
|||||||
'description': _(
|
'description': _(
|
||||||
'Check that all plugins are installed on startup - enable in container environments'
|
'Check that all plugins are installed on startup - enable in container environments'
|
||||||
),
|
),
|
||||||
'default': str(os.getenv('INVENTREE_DOCKER', False)).lower()
|
'default': str(os.getenv('INVENTREE_DOCKER', 'False')).lower()
|
||||||
in ['1', 'true'],
|
in ['1', 'true'],
|
||||||
'validator': bool,
|
'validator': bool,
|
||||||
'requires_restart': True,
|
'requires_restart': True,
|
||||||
|
@ -5,4 +5,4 @@ from django import template
|
|||||||
register = template.Library()
|
register = template.Library()
|
||||||
from generic.states.tags import status_label
|
from generic.states.tags import status_label
|
||||||
|
|
||||||
__all__ = [status_label]
|
__all__ = ['status_label']
|
||||||
|
@ -19,7 +19,7 @@ class ImporterTest(InvenTreeTestCase):
|
|||||||
|
|
||||||
fn = os.path.join(os.path.dirname(__file__), 'test_data', 'companies.csv')
|
fn = os.path.join(os.path.dirname(__file__), 'test_data', 'companies.csv')
|
||||||
|
|
||||||
with open(fn) as input_file:
|
with open(fn, encoding='utf-8') as input_file:
|
||||||
data = input_file.read()
|
data = input_file.read()
|
||||||
|
|
||||||
session = DataImportSession.objects.create(
|
session = DataImportSession.objects.create(
|
||||||
|
@ -1618,7 +1618,7 @@ class PartDetailTests(PartAPITestBase):
|
|||||||
|
|
||||||
# Try to upload a non-image file
|
# Try to upload a non-image file
|
||||||
test_path = BASE_DIR / '_testfolder' / 'dummy_image'
|
test_path = BASE_DIR / '_testfolder' / 'dummy_image'
|
||||||
with open(f'{test_path}.txt', 'w') as dummy_image:
|
with open(f'{test_path}.txt', 'w', encoding='utf-8') as dummy_image:
|
||||||
dummy_image.write('hello world')
|
dummy_image.write('hello world')
|
||||||
|
|
||||||
with open(f'{test_path}.txt', 'rb') as dummy_image:
|
with open(f'{test_path}.txt', 'rb') as dummy_image:
|
||||||
|
@ -49,7 +49,7 @@ class BomExportTest(InvenTreeTestCase):
|
|||||||
with open(filename, 'wb') as f:
|
with open(filename, 'wb') as f:
|
||||||
f.write(response.getvalue())
|
f.write(response.getvalue())
|
||||||
|
|
||||||
with open(filename) as f:
|
with open(filename, encoding='utf-8') as f:
|
||||||
reader = csv.reader(f, delimiter=',')
|
reader = csv.reader(f, delimiter=',')
|
||||||
|
|
||||||
for line in reader:
|
for line in reader:
|
||||||
@ -96,7 +96,7 @@ class BomExportTest(InvenTreeTestCase):
|
|||||||
f.write(response.getvalue())
|
f.write(response.getvalue())
|
||||||
|
|
||||||
# Read the file
|
# Read the file
|
||||||
with open(filename) as f:
|
with open(filename, encoding='utf-8') as f:
|
||||||
reader = csv.reader(f, delimiter=',')
|
reader = csv.reader(f, delimiter=',')
|
||||||
|
|
||||||
for line in reader:
|
for line in reader:
|
||||||
|
@ -7,7 +7,7 @@ import sys
|
|||||||
|
|
||||||
def calculate_coverage(filename):
|
def calculate_coverage(filename):
|
||||||
"""Calculate translation coverage for a .po file."""
|
"""Calculate translation coverage for a .po file."""
|
||||||
with open(filename) as f:
|
with open(filename, encoding='utf-8') as f:
|
||||||
lines = f.readlines()
|
lines = f.readlines()
|
||||||
|
|
||||||
lines_count = 0
|
lines_count = 0
|
||||||
@ -72,7 +72,7 @@ if __name__ == '__main__':
|
|||||||
print('-' * 16)
|
print('-' * 16)
|
||||||
|
|
||||||
# write locale stats
|
# write locale stats
|
||||||
with open(STAT_FILE, 'w') as target:
|
with open(STAT_FILE, 'w', encoding='utf-8') as target:
|
||||||
json.dump(locales_perc, target)
|
json.dump(locales_perc, target)
|
||||||
|
|
||||||
avg = int(sum(percentages) / len(percentages)) if len(percentages) > 0 else 0
|
avg = int(sum(percentages) / len(percentages)) if len(percentages) > 0 else 0
|
||||||
|
@ -45,7 +45,8 @@ if __name__ == '__main__':
|
|||||||
|
|
||||||
print('Generating icon list...')
|
print('Generating icon list...')
|
||||||
with open(
|
with open(
|
||||||
os.path.join(TMP_FOLDER, 'node_modules', '@tabler', 'icons', 'icons.json')
|
os.path.join(TMP_FOLDER, 'node_modules', '@tabler', 'icons', 'icons.json'),
|
||||||
|
encoding='utf-8',
|
||||||
) as f:
|
) as f:
|
||||||
icons = json.load(f)
|
icons = json.load(f)
|
||||||
|
|
||||||
@ -60,7 +61,7 @@ if __name__ == '__main__':
|
|||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
with open(os.path.join(STATIC_FOLDER, 'icons.json'), 'w') as f:
|
with open(os.path.join(STATIC_FOLDER, 'icons.json'), 'w', encoding='utf-8') as f:
|
||||||
json.dump(res, f, separators=(',', ':'))
|
json.dump(res, f, separators=(',', ':'))
|
||||||
|
|
||||||
print('Cleaning up...')
|
print('Cleaning up...')
|
||||||
|
@ -41,7 +41,7 @@ from InvenTree.fields import InvenTreeModelMoneyField, InvenTreeURLField
|
|||||||
from order.status_codes import SalesOrderStatusGroups
|
from order.status_codes import SalesOrderStatusGroups
|
||||||
from part import models as PartModels
|
from part import models as PartModels
|
||||||
from plugin.events import trigger_event
|
from plugin.events import trigger_event
|
||||||
from stock import models as StockModels
|
from stock import models as StockModels # noqa: PLW0406
|
||||||
from stock.generators import generate_batch_code
|
from stock.generators import generate_batch_code
|
||||||
from stock.status_codes import StockHistoryCode, StockStatus, StockStatusGroups
|
from stock.status_codes import StockHistoryCode, StockStatus, StockStatusGroups
|
||||||
from users.models import Owner
|
from users.models import Owner
|
||||||
|
@ -38,7 +38,7 @@ class TemplateTagTest(InvenTreeTestCase):
|
|||||||
manifest_file = Path(__file__).parent.joinpath('static/web/.vite/manifest.json')
|
manifest_file = Path(__file__).parent.joinpath('static/web/.vite/manifest.json')
|
||||||
# Try with removed manifest file
|
# Try with removed manifest file
|
||||||
manifest_file.rename(manifest_file.with_suffix('.json.bak')) # Rename
|
manifest_file.rename(manifest_file.with_suffix('.json.bak')) # Rename
|
||||||
resp = resp = spa_helper.spa_bundle()
|
resp = spa_helper.spa_bundle()
|
||||||
self.assertIsNone(resp)
|
self.assertIsNone(resp)
|
||||||
manifest_file.with_suffix('.json.bak').rename(
|
manifest_file.with_suffix('.json.bak').rename(
|
||||||
manifest_file.with_suffix('.json')
|
manifest_file.with_suffix('.json')
|
||||||
|
14
tasks.py
14
tasks.py
@ -618,7 +618,7 @@ def export_records(
|
|||||||
print('Running data post-processing step...')
|
print('Running data post-processing step...')
|
||||||
|
|
||||||
# Post-process the file, to remove any "permissions" specified for a user or group
|
# Post-process the file, to remove any "permissions" specified for a user or group
|
||||||
with open(tmpfile) as f_in:
|
with open(tmpfile, encoding='utf-8') as f_in:
|
||||||
data = json.loads(f_in.read())
|
data = json.loads(f_in.read())
|
||||||
|
|
||||||
data_out = []
|
data_out = []
|
||||||
@ -641,7 +641,7 @@ def export_records(
|
|||||||
data_out.append(entry)
|
data_out.append(entry)
|
||||||
|
|
||||||
# Write the processed data to file
|
# Write the processed data to file
|
||||||
with open(target, 'w') as f_out:
|
with open(target, 'w', encoding='utf-8') as f_out:
|
||||||
f_out.write(json.dumps(data_out, indent=2))
|
f_out.write(json.dumps(data_out, indent=2))
|
||||||
|
|
||||||
print('Data export completed')
|
print('Data export completed')
|
||||||
@ -684,7 +684,7 @@ def import_records(
|
|||||||
# Pre-process the data, to remove any "permissions" specified for a user or group
|
# Pre-process the data, to remove any "permissions" specified for a user or group
|
||||||
datafile = f'{target}.data.json'
|
datafile = f'{target}.data.json'
|
||||||
|
|
||||||
with open(target) as f_in:
|
with open(target, encoding='utf-8') as f_in:
|
||||||
try:
|
try:
|
||||||
data = json.loads(f_in.read())
|
data = json.loads(f_in.read())
|
||||||
except json.JSONDecodeError as exc:
|
except json.JSONDecodeError as exc:
|
||||||
@ -714,11 +714,11 @@ def import_records(
|
|||||||
print(entry)
|
print(entry)
|
||||||
|
|
||||||
# Write the auth file data
|
# Write the auth file data
|
||||||
with open(authfile, 'w') as f_out:
|
with open(authfile, 'w', encoding='utf-8') as f_out:
|
||||||
f_out.write(json.dumps(auth_data, indent=2))
|
f_out.write(json.dumps(auth_data, indent=2))
|
||||||
|
|
||||||
# Write the processed data to the tmp file
|
# Write the processed data to the tmp file
|
||||||
with open(datafile, 'w') as f_out:
|
with open(datafile, 'w', encoding='utf-8') as f_out:
|
||||||
f_out.write(json.dumps(load_data, indent=2))
|
f_out.write(json.dumps(load_data, indent=2))
|
||||||
|
|
||||||
excludes = content_excludes(allow_auth=False)
|
excludes = content_excludes(allow_auth=False)
|
||||||
@ -888,8 +888,8 @@ def test_translations(c):
|
|||||||
last_string = ''
|
last_string = ''
|
||||||
|
|
||||||
# loop through input file lines
|
# loop through input file lines
|
||||||
with open(file_path) as file_org:
|
with open(file_path, encoding='utf-8') as file_org:
|
||||||
with open(new_file_path, 'w') as file_new:
|
with open(new_file_path, 'w', encoding='utf-8') as file_new:
|
||||||
for line in file_org:
|
for line in file_org:
|
||||||
if line.startswith('msgstr "'):
|
if line.startswith('msgstr "'):
|
||||||
# write output -> replace regex matches with x in the read in (multi)string
|
# write output -> replace regex matches with x in the read in (multi)string
|
||||||
|
Reference in New Issue
Block a user