mirror of
https://github.com/inventree/InvenTree.git
synced 2025-05-01 04:56:45 +00:00
* Squashed commit of the following: commit 52d7ff0f650bbcfa2d93ac96562b44269d3812a7 Author: Matthias Mair <code@mjmair.com> Date: Sun Jan 7 23:03:20 2024 +0100 fixed lookup commit 0d076eaea89dce24f08af247479b3b4dff1b4df3 Author: Matthias Mair <code@mjmair.com> Date: Sun Jan 7 23:03:08 2024 +0100 switched to pathlib for lookup commit 473e75eda205793769946e923748356ffd7e5b4b Author: Matthias Mair <code@mjmair.com> Date: Sun Jan 7 22:52:30 2024 +0100 fix wrong url response commit fd74f8d703399c19cb3616ea3b2656a50cd7a6e5 Author: Matthias Mair <code@mjmair.com> Date: Sun Jan 7 21:14:38 2024 +0100 switched to ruff for import sorting commit f83fedbbb8de261ff8c706e179519e58e7a91064 Author: Matthias Mair <code@mjmair.com> Date: Sun Jan 7 21:03:14 2024 +0100 switched to single quotes everywhere commit a92442e60e23be0ff5dcf42d222b0d95823ecb9b Author: Matthias Mair <code@mjmair.com> Date: Sun Jan 7 20:58:23 2024 +0100 added autofixes commit cc66c93136fcae8a701810a4f4f38ef3b570be61 Author: Matthias Mair <code@mjmair.com> Date: Sun Jan 7 20:56:47 2024 +0100 enable autoformat commit 1f343606ec1f2a99acf8a37b9900d78a8fb37282 Author: Matthias Mair <code@mjmair.com> Date: Sun Jan 7 20:42:14 2024 +0100 Squashed commit of the following: commit f5cf7b2e7872fc19633321713965763d1890b495 Author: Matthias Mair <code@mjmair.com> Date: Sun Jan 7 20:36:57 2024 +0100 fixed reqs commit 9d845bee98befa4e53c2ac3c783bd704369e3ad2 Author: Matthias Mair <code@mjmair.com> Date: Sun Jan 7 20:32:35 2024 +0100 disable autofix/format commit aff5f271484c3500df7ddde043767c008ce4af21 Author: Matthias Mair <code@mjmair.com> Date: Sun Jan 7 20:28:50 2024 +0100 adjust checks commit 47271cf1efa848ec8374a0d83b5646d06fffa6e7 Author: Matthias Mair <code@mjmair.com> Date: Sun Jan 7 20:28:22 2024 +0100 reorder order of operations commit e1bf178b40b3f0d2d59ba92209156c43095959d2 Author: Matthias Mair <code@mjmair.com> Date: Sun Jan 7 20:01:09 2024 +0100 adapted ruff settings to better fit code base commit ad7d88a6f4f15c9552522131c4e207256fc2bbf6 Author: Matthias Mair <code@mjmair.com> Date: Sun Jan 7 19:59:45 2024 +0100 auto fixed docstring commit a2e54a760e17932dbbc2de0dec23906107f2cda9 Author: Matthias Mair <code@mjmair.com> Date: Sun Jan 7 19:46:35 2024 +0100 fix getattr useage commit cb80c73bc6c0be7f5d2ed3cc9b2ac03fdefd5c41 Author: Matthias Mair <code@mjmair.com> Date: Sun Jan 7 19:25:09 2024 +0100 fix requirements file commit b7780bbd21a32007f3b0ce495b519bf59bb19bf5 Author: Matthias Mair <code@mjmair.com> Date: Sun Jan 7 18:42:28 2024 +0100 fix removed sections commit 71f1681f55c15f62c16c1d7f30a745adc496db97 Author: Matthias Mair <code@mjmair.com> Date: Sun Jan 7 18:41:21 2024 +0100 fix djlint syntax commit a0bcf1bccef8a8ffd482f38e2063bc9066e1d759 Author: Matthias Mair <code@mjmair.com> Date: Sun Jan 7 18:35:28 2024 +0100 remove flake8 from code base commit 22475b31cc06919785be046e007915e43f356793 Author: Matthias Mair <code@mjmair.com> Date: Sun Jan 7 18:34:56 2024 +0100 remove flake8 from code base commit 0413350f14773ac6161473e0cfb069713c13c691 Author: Matthias Mair <code@mjmair.com> Date: Sun Jan 7 18:24:39 2024 +0100 moved ruff section commit d90c48a0bf98befdfacbbb093ee56cdb28afb40d Author: Matthias Mair <code@mjmair.com> Date: Sun Jan 7 18:24:24 2024 +0100 move djlint config to pyproject commit c5ce55d5119bf2e35e429986f62f875c86178ae1 Author: Matthias Mair <code@mjmair.com> Date: Sun Jan 7 18:20:39 2024 +0100 added isort again commit 42a41d23afc280d4ee6f0e640148abc6f460f05a Author: Matthias Mair <code@mjmair.com> Date: Sun Jan 7 18:19:02 2024 +0100 move config section commit 85692331816348cb1145570340d1f6488a8265cc Author: Matthias Mair <code@mjmair.com> Date: Sun Jan 7 18:17:52 2024 +0100 fix codespell error commit 2897c6704d1311a800ce5aa47878d96d6980b377 Author: Matthias Mair <code@mjmair.com> Date: Sun Jan 7 17:29:21 2024 +0100 replaced flake8 with ruff mostly for speed improvements * enable docstring checks * fix docstrings * fixed D417 Missing argument description * Squashed commit of the following: commit d3b795824b5d6d1c0eda67150b45b5cd672b3f6b Author: Matthias Mair <code@mjmair.com> Date: Sun Jan 7 22:56:17 2024 +0100 fixed source path commit 0bac0c19b88897a19d5c995e4ff50427718b827e Author: Matthias Mair <code@mjmair.com> Date: Sun Jan 7 22:47:53 2024 +0100 fixed req commit 9f61f01d9cc01f1fb7123102f3658c890469b8ce Author: Matthias Mair <code@mjmair.com> Date: Sun Jan 7 22:45:18 2024 +0100 added missing toml req commit 91b71ed24a6761b629768d0ad8829fec2819a966 Author: Matthias Mair <code@mjmair.com> Date: Sun Jan 7 20:49:50 2024 +0100 moved isort config commit 12460b04196b12d0272d40552402476d5492fea5 Author: Matthias Mair <code@mjmair.com> Date: Sun Jan 7 20:43:22 2024 +0100 remove flake8 section from setup.cfg commit f5cf7b2e7872fc19633321713965763d1890b495 Author: Matthias Mair <code@mjmair.com> Date: Sun Jan 7 20:36:57 2024 +0100 fixed reqs commit 9d845bee98befa4e53c2ac3c783bd704369e3ad2 Author: Matthias Mair <code@mjmair.com> Date: Sun Jan 7 20:32:35 2024 +0100 disable autofix/format commit aff5f271484c3500df7ddde043767c008ce4af21 Author: Matthias Mair <code@mjmair.com> Date: Sun Jan 7 20:28:50 2024 +0100 adjust checks commit 47271cf1efa848ec8374a0d83b5646d06fffa6e7 Author: Matthias Mair <code@mjmair.com> Date: Sun Jan 7 20:28:22 2024 +0100 reorder order of operations commit e1bf178b40b3f0d2d59ba92209156c43095959d2 Author: Matthias Mair <code@mjmair.com> Date: Sun Jan 7 20:01:09 2024 +0100 adapted ruff settings to better fit code base commit ad7d88a6f4f15c9552522131c4e207256fc2bbf6 Author: Matthias Mair <code@mjmair.com> Date: Sun Jan 7 19:59:45 2024 +0100 auto fixed docstring commit a2e54a760e17932dbbc2de0dec23906107f2cda9 Author: Matthias Mair <code@mjmair.com> Date: Sun Jan 7 19:46:35 2024 +0100 fix getattr useage commit cb80c73bc6c0be7f5d2ed3cc9b2ac03fdefd5c41 Author: Matthias Mair <code@mjmair.com> Date: Sun Jan 7 19:25:09 2024 +0100 fix requirements file commit b7780bbd21a32007f3b0ce495b519bf59bb19bf5 Author: Matthias Mair <code@mjmair.com> Date: Sun Jan 7 18:42:28 2024 +0100 fix removed sections commit 71f1681f55c15f62c16c1d7f30a745adc496db97 Author: Matthias Mair <code@mjmair.com> Date: Sun Jan 7 18:41:21 2024 +0100 fix djlint syntax commit a0bcf1bccef8a8ffd482f38e2063bc9066e1d759 Author: Matthias Mair <code@mjmair.com> Date: Sun Jan 7 18:35:28 2024 +0100 remove flake8 from code base commit 22475b31cc06919785be046e007915e43f356793 Author: Matthias Mair <code@mjmair.com> Date: Sun Jan 7 18:34:56 2024 +0100 remove flake8 from code base commit 0413350f14773ac6161473e0cfb069713c13c691 Author: Matthias Mair <code@mjmair.com> Date: Sun Jan 7 18:24:39 2024 +0100 moved ruff section commit d90c48a0bf98befdfacbbb093ee56cdb28afb40d Author: Matthias Mair <code@mjmair.com> Date: Sun Jan 7 18:24:24 2024 +0100 move djlint config to pyproject commit c5ce55d5119bf2e35e429986f62f875c86178ae1 Author: Matthias Mair <code@mjmair.com> Date: Sun Jan 7 18:20:39 2024 +0100 added isort again commit 42a41d23afc280d4ee6f0e640148abc6f460f05a Author: Matthias Mair <code@mjmair.com> Date: Sun Jan 7 18:19:02 2024 +0100 move config section commit 85692331816348cb1145570340d1f6488a8265cc Author: Matthias Mair <code@mjmair.com> Date: Sun Jan 7 18:17:52 2024 +0100 fix codespell error commit 2897c6704d1311a800ce5aa47878d96d6980b377 Author: Matthias Mair <code@mjmair.com> Date: Sun Jan 7 17:29:21 2024 +0100 replaced flake8 with ruff mostly for speed improvements * fix pyproject * make docstrings more uniform * auto-format * fix order * revert url change
264 lines
7.8 KiB
Python
264 lines
7.8 KiB
Python
"""Unit testing for BOM upload / import functionality."""
|
|
|
|
from django.core.files.uploadedfile import SimpleUploadedFile
|
|
from django.urls import reverse
|
|
|
|
import tablib
|
|
|
|
from InvenTree.unit_test import InvenTreeAPITestCase
|
|
from part.models import Part
|
|
|
|
|
|
class BomUploadTest(InvenTreeAPITestCase):
|
|
"""Test BOM file upload API endpoint."""
|
|
|
|
roles = ['part.add', 'part.change']
|
|
|
|
@classmethod
|
|
def setUpTestData(cls):
|
|
"""Create BOM data as part of setup routine."""
|
|
super().setUpTestData()
|
|
|
|
Part.objects.rebuild()
|
|
|
|
cls.part = Part.objects.create(
|
|
name='Assembly',
|
|
description='An assembled part',
|
|
assembly=True,
|
|
component=False,
|
|
)
|
|
|
|
parts = []
|
|
|
|
for i in range(10):
|
|
parts.append(
|
|
Part(
|
|
name=f'Component {i}',
|
|
IPN=f'CMP_{i}',
|
|
description='A subcomponent that can be used in a BOM',
|
|
component=True,
|
|
assembly=False,
|
|
lft=0,
|
|
rght=0,
|
|
level=0,
|
|
tree_id=0,
|
|
)
|
|
)
|
|
|
|
Part.objects.bulk_create(parts)
|
|
|
|
def post_bom(
|
|
self,
|
|
filename,
|
|
file_data,
|
|
clear_existing=None,
|
|
expected_code=None,
|
|
content_type='text/plain',
|
|
):
|
|
"""Helper function for submitting a BOM file."""
|
|
bom_file = SimpleUploadedFile(filename, file_data, content_type=content_type)
|
|
|
|
if clear_existing is None:
|
|
clear_existing = False
|
|
|
|
response = self.post(
|
|
reverse('api-bom-import-upload'),
|
|
data={'data_file': bom_file},
|
|
expected_code=expected_code,
|
|
format='multipart',
|
|
)
|
|
|
|
return response
|
|
|
|
def test_missing_file(self):
|
|
"""POST without a file."""
|
|
response = self.post(
|
|
reverse('api-bom-import-upload'), data={}, expected_code=400
|
|
)
|
|
|
|
self.assertIn('No file was submitted', str(response.data['data_file']))
|
|
|
|
def test_unsupported_file(self):
|
|
"""POST with an unsupported file type."""
|
|
response = self.post_bom('sample.txt', b'hello world', expected_code=400)
|
|
|
|
self.assertIn('Unsupported file type', str(response.data['data_file']))
|
|
|
|
def test_broken_file(self):
|
|
"""Test upload with broken (corrupted) files."""
|
|
response = self.post_bom('sample.csv', b'', expected_code=400)
|
|
|
|
self.assertIn('The submitted file is empty', str(response.data['data_file']))
|
|
|
|
response = self.post_bom(
|
|
'test.xls',
|
|
b'hello world',
|
|
expected_code=400,
|
|
content_type='application/xls',
|
|
)
|
|
|
|
self.assertIn(
|
|
'Unsupported format, or corrupt file', str(response.data['data_file'])
|
|
)
|
|
|
|
def test_missing_rows(self):
|
|
"""Test upload of an invalid file (without data rows)."""
|
|
dataset = tablib.Dataset()
|
|
|
|
dataset.headers = ['apple', 'banana']
|
|
|
|
response = self.post_bom(
|
|
'test.csv',
|
|
bytes(dataset.csv, 'utf8'),
|
|
content_type='text/csv',
|
|
expected_code=400,
|
|
)
|
|
|
|
self.assertIn('No data rows found in file', str(response.data))
|
|
|
|
# Try again, with an .xlsx file
|
|
response = self.post_bom(
|
|
'bom.xlsx', dataset.xlsx, content_type='application/xlsx', expected_code=400
|
|
)
|
|
|
|
self.assertIn('No data rows found in file', str(response.data))
|
|
|
|
def test_missing_columns(self):
|
|
"""Upload extracted data, but with missing columns."""
|
|
url = reverse('api-bom-import-extract')
|
|
|
|
rows = [['1', 'test'], ['2', 'test']]
|
|
|
|
# Post without columns
|
|
response = self.post(url, {}, expected_code=400)
|
|
|
|
self.assertIn('This field is required', str(response.data['rows']))
|
|
self.assertIn('This field is required', str(response.data['columns']))
|
|
|
|
response = self.post(
|
|
url, {'rows': rows, 'columns': ['part', 'reference']}, expected_code=400
|
|
)
|
|
|
|
self.assertIn("Missing required column: 'quantity'", str(response.data))
|
|
|
|
response = self.post(
|
|
url, {'rows': rows, 'columns': ['quantity', 'reference']}, expected_code=400
|
|
)
|
|
|
|
self.assertIn('No part column specified', str(response.data))
|
|
|
|
self.post(
|
|
url, {'rows': rows, 'columns': ['quantity', 'part']}, expected_code=201
|
|
)
|
|
|
|
def test_invalid_data(self):
|
|
"""Upload data which contains errors."""
|
|
dataset = tablib.Dataset()
|
|
|
|
# Only these headers are strictly necessary
|
|
dataset.headers = ['part_id', 'quantity']
|
|
|
|
components = Part.objects.filter(component=True)
|
|
|
|
for idx, cmp in enumerate(components):
|
|
if idx == 5:
|
|
cmp.component = False
|
|
cmp.save()
|
|
|
|
dataset.append([cmp.pk, idx])
|
|
|
|
url = reverse('api-bom-import-extract')
|
|
|
|
response = self.post(url, {'columns': dataset.headers, 'rows': list(dataset)})
|
|
|
|
rows = response.data['rows']
|
|
|
|
# Returned data must be the same as the original dataset
|
|
self.assertEqual(len(rows), len(dataset))
|
|
|
|
for idx, row in enumerate(rows):
|
|
data = row['data']
|
|
cmp = components[idx]
|
|
|
|
# Should have guessed the correct part
|
|
data['part'] = cmp.pk
|
|
|
|
# Check some specific error messages
|
|
self.assertEqual(
|
|
rows[0]['data']['errors']['quantity'], 'Quantity must be greater than zero'
|
|
)
|
|
self.assertEqual(
|
|
rows[5]['data']['errors']['part'], 'Part is not designated as a component'
|
|
)
|
|
|
|
def test_part_guess(self):
|
|
"""Test part 'guessing' when PK values are not supplied."""
|
|
dataset = tablib.Dataset()
|
|
|
|
# Should be able to 'guess' the part from the name
|
|
dataset.headers = ['part_name', 'quantity']
|
|
|
|
components = Part.objects.filter(component=True)
|
|
|
|
for component in components:
|
|
dataset.append([component.name, 10])
|
|
|
|
url = reverse('api-bom-import-extract')
|
|
|
|
response = self.post(
|
|
url, {'columns': dataset.headers, 'rows': list(dataset)}, expected_code=201
|
|
)
|
|
|
|
rows = response.data['rows']
|
|
|
|
self.assertEqual(len(rows), 10)
|
|
|
|
for idx in range(10):
|
|
self.assertEqual(rows[idx]['data']['part'], components[idx].pk)
|
|
|
|
# Should also be able to 'guess' part by the IPN value
|
|
dataset = tablib.Dataset()
|
|
|
|
dataset.headers = ['part_ipn', 'quantity']
|
|
|
|
for component in components:
|
|
dataset.append([component.IPN, 10])
|
|
|
|
response = self.post(
|
|
url, {'columns': dataset.headers, 'rows': list(dataset)}, expected_code=201
|
|
)
|
|
|
|
rows = response.data['rows']
|
|
|
|
self.assertEqual(len(rows), 10)
|
|
|
|
for idx in range(10):
|
|
self.assertEqual(rows[idx]['data']['part'], components[idx].pk)
|
|
|
|
def test_levels(self):
|
|
"""Test that multi-level BOMs are correctly handled during upload."""
|
|
url = reverse('api-bom-import-extract')
|
|
|
|
dataset = tablib.Dataset()
|
|
|
|
dataset.headers = ['level', 'part', 'quantity']
|
|
|
|
components = Part.objects.filter(component=True)
|
|
|
|
for idx, cmp in enumerate(components):
|
|
dataset.append([idx % 3, cmp.pk, 2])
|
|
|
|
response = self.post(
|
|
url, {'rows': list(dataset), 'columns': dataset.headers}, expected_code=201
|
|
)
|
|
|
|
rows = response.data['rows']
|
|
|
|
# Only parts at index 1, 4, 7 should have been returned
|
|
self.assertEqual(len(response.data['rows']), 3)
|
|
|
|
# Check the returned PK values
|
|
self.assertEqual(rows[0]['data']['part'], components[1].pk)
|
|
self.assertEqual(rows[1]['data']['part'], components[4].pk)
|
|
self.assertEqual(rows[2]['data']['part'], components[7].pk)
|