diff --git a/InvenTree/InvenTree/api_tester.py b/InvenTree/InvenTree/api_tester.py index 3d8481f84e..fe2057b453 100644 --- a/InvenTree/InvenTree/api_tester.py +++ b/InvenTree/InvenTree/api_tester.py @@ -106,12 +106,12 @@ class InvenTreeAPITestCase(APITestCase): return response - def post(self, url, data, expected_code=None): + def post(self, url, data, expected_code=None, format='json'): """ Issue a POST request """ - response = self.client.post(url, data=data, format='json') + response = self.client.post(url, data=data, format=format) if expected_code is not None: self.assertEqual(response.status_code, expected_code) @@ -130,12 +130,12 @@ class InvenTreeAPITestCase(APITestCase): return response - def patch(self, url, data, files=None, expected_code=None): + def patch(self, url, data, expected_code=None, format='json'): """ Issue a PATCH request """ - response = self.client.patch(url, data=data, files=files, format='json') + response = self.client.patch(url, data=data, format=format) if expected_code is not None: self.assertEqual(response.status_code, expected_code) diff --git a/InvenTree/InvenTree/serializers.py b/InvenTree/InvenTree/serializers.py index 59ba0295cb..ffc84a5f71 100644 --- a/InvenTree/InvenTree/serializers.py +++ b/InvenTree/InvenTree/serializers.py @@ -328,4 +328,7 @@ class InvenTreeDecimalField(serializers.FloatField): def to_internal_value(self, data): # Convert the value to a string, and then a decimal - return Decimal(str(data)) + try: + return Decimal(str(data)) + except: + raise serializers.ValidationError(_("Invalid value")) diff --git a/InvenTree/part/serializers.py b/InvenTree/part/serializers.py index 351348c6bc..c5f5216f38 100644 --- a/InvenTree/part/serializers.py +++ b/InvenTree/part/serializers.py @@ -818,13 +818,22 @@ class BomExtractSerializer(serializers.Serializer): raise serializers.ValidationError(_("File is too large")) # Read file data into memory (bytes object) - data = bom_file.read() + try: + data = bom_file.read() + except Exception as e: + raise serializers.ValidationError(str(e)) if ext in ['csv', 'tsv', 'xml']: - data = data.decode() + try: + data = data.decode() + except Exception as e: + raise serializers.ValidationError(str(e)) # Convert to a tablib dataset (we expect headers) - self.dataset = tablib.Dataset().load(data, ext, headers=True) + try: + self.dataset = tablib.Dataset().load(data, ext, headers=True) + except Exception as e: + raise serializers.ValidationError(str(e)) for header in self.REQUIRED_COLUMNS: @@ -848,6 +857,9 @@ class BomExtractSerializer(serializers.Serializer): if not part_match: raise serializers.ValidationError(_("No part column found")) + if len(self.dataset) == 0: + raise serializers.ValidationError(_("No data rows found")) + return bom_file def extract_data(self): @@ -856,6 +868,9 @@ class BomExtractSerializer(serializers.Serializer): """ rows = [] + errors = [] + + found_parts = set() headers = self.dataset.headers @@ -863,6 +878,8 @@ class BomExtractSerializer(serializers.Serializer): for row in self.dataset.dict: + row_error = {} + """ If the "level" column is specified, and this is not a top-level BOM item, ignore the row! """ @@ -907,25 +924,64 @@ class BomExtractSerializer(serializers.Serializer): if part is None: - if part_name is not None or part_ipn is not None: + if part_name or part_ipn: queryset = Part.objects.all() - if part_name is not None: + if part_name: queryset = queryset.filter(name=part_name) - if part_ipn is not None: + if part_ipn: queryset = queryset.filter(IPN=part_ipn) # Only if we have a single direct match - if queryset.exists() and queryset.count() == 1: - part = queryset.first() + if queryset.exists(): + if queryset.count() == 1: + part = queryset.first() + else: + # Multiple matches! + row_error['part'] = _('Multiple matching parts found') + + if part is None: + if 'part' not in row_error: + row_error['part'] = _('No matching part found') + else: + if part.pk in found_parts: + row_error['part'] = _("Duplicate part selected") + + elif not part.component: + row_error['part'] = _('Part is not designated as a component') + + found_parts.add(part.pk) row['part'] = part.pk if part is not None else None + """ + Read out the 'quantity' column - check that it is valid + """ + quantity = self.find_matching_data(row, 'quantity', self.dataset.headers) + + if quantity is None: + row_error['quantity'] = _('Quantity not provided') + else: + try: + quantity = Decimal(quantity) + + if quantity <= 0: + row_error['quantity'] = _('Quantity must be greater than zero') + except: + row_error['quantity'] = _('Invalid quantity') + + # For each "optional" column, ensure the column names are allocated correctly + for field_name in self.OPTIONAL_COLUMNS: + if field_name not in row: + row[field_name] = self.find_matching_data(row, field_name, self.dataset.headers) + rows.append(row) + errors.append(row_error) return { 'rows': rows, + 'errors': errors, 'headers': headers, 'filename': self.filename, } diff --git a/InvenTree/part/templates/part/upload_bom.html b/InvenTree/part/templates/part/upload_bom.html index 27f681acae..57c7014197 100644 --- a/InvenTree/part/templates/part/upload_bom.html +++ b/InvenTree/part/templates/part/upload_bom.html @@ -22,8 +22,11 @@ + {% endblock %} @@ -43,7 +46,7 @@ -
{% trans "Part" %} | diff --git a/InvenTree/part/test_bom_import.py b/InvenTree/part/test_bom_import.py new file mode 100644 index 0000000000..ce622ed991 --- /dev/null +++ b/InvenTree/part/test_bom_import.py @@ -0,0 +1,298 @@ +""" +Unit testing for BOM upload / import functionality +""" + +import tablib + +from django.core.files.uploadedfile import SimpleUploadedFile +from django.urls import reverse + +from InvenTree.api_tester import InvenTreeAPITestCase + +from part.models import Part + + +class BomUploadTest(InvenTreeAPITestCase): + """ + Test BOM file upload API endpoint + """ + + roles = [ + 'part.add', + 'part.change', + ] + + def setUp(self): + super().setUp() + + self.part = Part.objects.create( + name='Assembly', + description='An assembled part', + assembly=True, + component=False, + ) + + for i in range(10): + Part.objects.create( + name=f"Component {i}", + IPN=f"CMP_{i}", + description="A subcomponent that can be used in a BOM", + component=True, + assembly=False, + ) + + self.url = reverse('api-bom-extract') + + def post_bom(self, filename, file_data, part=None, clear_existing=None, expected_code=None, content_type='text/plain'): + + bom_file = SimpleUploadedFile( + filename, + file_data, + content_type=content_type, + ) + + if part is None: + part = self.part.pk + + if clear_existing is None: + clear_existing = False + + response = self.post( + self.url, + data={ + 'bom_file': bom_file, + 'part': part, + 'clear_existing': clear_existing, + }, + expected_code=expected_code, + format='multipart', + ) + + return response + + def test_missing_file(self): + """ + POST without a file + """ + + response = self.post( + self.url, + data={}, + expected_code=400 + ) + + self.assertIn('No file was submitted', str(response.data['bom_file'])) + self.assertIn('This field is required', str(response.data['part'])) + self.assertIn('This field is required', str(response.data['clear_existing'])) + + def test_unsupported_file(self): + """ + POST with an unsupported file type + """ + + response = self.post_bom( + 'sample.txt', + b'hello world', + expected_code=400, + ) + + self.assertIn('Unsupported file type', str(response.data['bom_file'])) + + def test_broken_file(self): + """ + Test upload with broken (corrupted) files + """ + + response = self.post_bom( + 'sample.csv', + b'', + expected_code=400, + ) + + self.assertIn('The submitted file is empty', str(response.data['bom_file'])) + + response = self.post_bom( + 'test.xls', + b'hello world', + expected_code=400, + content_type='application/xls', + ) + + self.assertIn('Unsupported format, or corrupt file', str(response.data['bom_file'])) + + def test_invalid_upload(self): + """ + Test upload of an invalid file + """ + + dataset = tablib.Dataset() + + dataset.headers = [ + 'apple', + 'banana', + ] + + response = self.post_bom( + 'test.csv', + bytes(dataset.csv, 'utf8'), + content_type='text/csv', + expected_code=400, + ) + + self.assertIn("Missing required column: 'quantity'", str(response.data)) + + # Try again, with an .xlsx file + response = self.post_bom( + 'bom.xlsx', + dataset.xlsx, + content_type='application/xlsx', + expected_code=400, + ) + + self.assertIn("Missing required column: 'quantity'", str(response.data)) + + # Add the quantity field (or close enough) + dataset.headers.append('quAntiTy ') + + response = self.post_bom( + 'test.csv', + bytes(dataset.csv, 'utf8'), + content_type='text/csv', + expected_code=400, + ) + + self.assertIn('No part column found', str(response.data)) + + dataset.headers.append('part_id') + dataset.headers.append('part_name') + + response = self.post_bom( + 'test.csv', + bytes(dataset.csv, 'utf8'), + content_type='text/csv', + expected_code=400, + ) + + self.assertIn('No data rows found', str(response.data)) + + def test_invalid_data(self): + """ + Upload data which contains errors + """ + + dataset = tablib.Dataset() + + # Only these headers are strictly necessary + dataset.headers = ['part_id', 'quantity'] + + components = Part.objects.filter(component=True) + + for idx, cmp in enumerate(components): + + if idx == 5: + cmp.component = False + cmp.save() + + dataset.append([cmp.pk, idx]) + + # Add a duplicate part too + dataset.append([components.first().pk, 'invalid']) + + response = self.post_bom( + 'test.csv', + bytes(dataset.csv, 'utf8'), + content_type='text/csv', + expected_code=201 + ) + + errors = response.data['errors'] + + self.assertIn('Quantity must be greater than zero', str(errors[0])) + self.assertIn('Part is not designated as a component', str(errors[5])) + self.assertIn('Duplicate part selected', str(errors[-1])) + self.assertIn('Invalid quantity', str(errors[-1])) + + for idx, row in enumerate(response.data['rows'][:-1]): + self.assertEqual(str(row['part']), str(components[idx].pk)) + + def test_part_guess(self): + """ + Test part 'guessing' when PK values are not supplied + """ + + dataset = tablib.Dataset() + + # Should be able to 'guess' the part from the name + dataset.headers = ['part_name', 'quantity'] + + components = Part.objects.filter(component=True) + + for idx, cmp in enumerate(components): + dataset.append([ + f"Component {idx}", + 10, + ]) + + response = self.post_bom( + 'test.csv', + bytes(dataset.csv, 'utf8'), + expected_code=201, + ) + + rows = response.data['rows'] + + self.assertEqual(len(rows), 10) + + for idx in range(10): + self.assertEqual(rows[idx]['part'], components[idx].pk) + + # Should also be able to 'guess' part by the IPN value + dataset = tablib.Dataset() + + dataset.headers = ['part_ipn', 'quantity'] + + for idx, cmp in enumerate(components): + dataset.append([ + f"CMP_{idx}", + 10, + ]) + + response = self.post_bom( + 'test.csv', + bytes(dataset.csv, 'utf8'), + expected_code=201, + ) + + rows = response.data['rows'] + + self.assertEqual(len(rows), 10) + + for idx in range(10): + self.assertEqual(rows[idx]['part'], components[idx].pk) + + def test_levels(self): + """ + Test that multi-level BOMs are correctly handled during upload + """ + + dataset = tablib.Dataset() + + dataset.headers = ['level', 'part', 'quantity'] + + components = Part.objects.filter(component=True) + + for idx, cmp in enumerate(components): + dataset.append([ + idx % 3, + cmp.pk, + 2, + ]) + + response = self.post_bom( + 'test.csv', + bytes(dataset.csv, 'utf8'), + expected_code=201, + ) + + # Only parts at index 1, 4, 7 should have been returned + self.assertEqual(len(response.data['rows']), 3) diff --git a/InvenTree/templates/js/translated/bom.js b/InvenTree/templates/js/translated/bom.js index fd23e70ad0..0c70bd3d86 100644 --- a/InvenTree/templates/js/translated/bom.js +++ b/InvenTree/templates/js/translated/bom.js @@ -40,6 +40,12 @@ function constructBomUploadTable(data, options={}) { function constructRow(row, idx, fields) { // Construct an individual row from the provided data + var errors = {}; + + if (data.errors && data.errors.length > idx) { + errors = data.errors[idx]; + } + var field_options = { hideLabels: true, hideClearButton: true, @@ -72,7 +78,7 @@ function constructBomUploadTable(data, options={}) { var buttons = `
---|