2
0
mirror of https://github.com/inventree/InvenTree.git synced 2025-08-13 23:21:09 +00:00

Merge branch 'inventree:master' into matmair/issue2279

This commit is contained in:
Matthias Mair
2022-02-20 02:36:53 +01:00
committed by GitHub
25 changed files with 1198 additions and 756 deletions

View File

@@ -995,6 +995,23 @@ class PartList(generics.ListCreateAPIView):
except (ValueError, Part.DoesNotExist):
pass
# Filter only parts which are in the "BOM" for a given part
in_bom_for = params.get('in_bom_for', None)
if in_bom_for is not None:
try:
in_bom_for = Part.objects.get(pk=in_bom_for)
# Extract a list of parts within the BOM
bom_parts = in_bom_for.get_parts_in_bom()
print("bom_parts:", bom_parts)
print([p.pk for p in bom_parts])
queryset = queryset.filter(pk__in=[p.pk for p in bom_parts])
except (ValueError, Part.DoesNotExist):
pass
# Filter by whether the BOM has been validated (or not)
bom_valid = params.get('bom_valid', None)
@@ -1533,13 +1550,15 @@ class BomList(generics.ListCreateAPIView):
]
class BomExtract(generics.CreateAPIView):
class BomImportUpload(generics.CreateAPIView):
"""
API endpoint for extracting BOM data from a BOM file.
API endpoint for uploading a complete Bill of Materials.
It is assumed that the BOM has been extracted from a file using the BomExtract endpoint.
"""
queryset = Part.objects.none()
serializer_class = part_serializers.BomExtractSerializer
queryset = Part.objects.all()
serializer_class = part_serializers.BomImportUploadSerializer
def create(self, request, *args, **kwargs):
"""
@@ -1556,15 +1575,22 @@ class BomExtract(generics.CreateAPIView):
return Response(data, status=status.HTTP_201_CREATED, headers=headers)
class BomUpload(generics.CreateAPIView):
class BomImportExtract(generics.CreateAPIView):
"""
API endpoint for uploading a complete Bill of Materials.
It is assumed that the BOM has been extracted from a file using the BomExtract endpoint.
API endpoint for extracting BOM data from a BOM file.
"""
queryset = Part.objects.all()
serializer_class = part_serializers.BomUploadSerializer
queryset = Part.objects.none()
serializer_class = part_serializers.BomImportExtractSerializer
class BomImportSubmit(generics.CreateAPIView):
"""
API endpoint for submitting BOM data from a BOM file
"""
queryset = BomItem.objects.none()
serializer_class = part_serializers.BomImportSubmitSerializer
class BomDetail(generics.RetrieveUpdateDestroyAPIView):
@@ -1719,9 +1745,10 @@ bom_api_urls = [
url(r'^.*$', BomDetail.as_view(), name='api-bom-item-detail'),
])),
url(r'^extract/', BomExtract.as_view(), name='api-bom-extract'),
url(r'^upload/', BomUpload.as_view(), name='api-bom-upload'),
# API endpoint URLs for importing BOM data
url(r'^import/upload/', BomImportUpload.as_view(), name='api-bom-import-upload'),
url(r'^import/extract/', BomImportExtract.as_view(), name='api-bom-import-extract'),
url(r'^import/submit/', BomImportSubmit.as_view(), name='api-bom-import-submit'),
# Catch-all
url(r'^.*$', BomList.as_view(), name='api-bom-list'),

View File

@@ -46,7 +46,7 @@ from common.models import InvenTreeSetting
from InvenTree import helpers
from InvenTree import validators
from InvenTree.models import InvenTreeTree, InvenTreeAttachment
from InvenTree.models import InvenTreeTree, InvenTreeAttachment, DataImportMixin
from InvenTree.fields import InvenTreeURLField
from InvenTree.helpers import decimal2string, normalize, decimal2money
import InvenTree.tasks
@@ -483,6 +483,36 @@ class Part(MPTTModel):
def __str__(self):
return f"{self.full_name} - {self.description}"
def get_parts_in_bom(self):
"""
Return a list of all parts in the BOM for this part.
Takes into account substitutes, variant parts, and inherited BOM items
"""
parts = set()
for bom_item in self.get_bom_items():
for part in bom_item.get_valid_parts_for_allocation():
parts.add(part)
return parts
def check_if_part_in_bom(self, other_part):
"""
Check if the other_part is in the BOM for this part.
Note:
- Accounts for substitute parts
- Accounts for variant BOMs
"""
for bom_item in self.get_bom_items():
if other_part in bom_item.get_valid_parts_for_allocation():
return True
# No matches found
return False
def check_add_to_bom(self, parent, raise_error=False, recursive=True):
"""
Check if this Part can be added to the BOM of another part.
@@ -2550,7 +2580,7 @@ class PartCategoryParameterTemplate(models.Model):
help_text=_('Default Parameter Value'))
class BomItem(models.Model):
class BomItem(models.Model, DataImportMixin):
""" A BomItem links a part to its component items.
A part can have a BOM (bill of materials) which defines
which parts are required (and in what quantity) to make it.
@@ -2568,6 +2598,39 @@ class BomItem(models.Model):
allow_variants: Stock for part variants can be substituted for this BomItem
"""
# Fields available for bulk import
IMPORT_FIELDS = {
'quantity': {
'required': True
},
'reference': {},
'overage': {},
'allow_variants': {},
'inherited': {},
'optional': {},
'note': {},
'part': {
'label': _('Part'),
'help_text': _('Part ID or part name'),
},
'part_id': {
'label': _('Part ID'),
'help_text': _('Unique part ID value')
},
'part_name': {
'label': _('Part Name'),
'help_text': _('Part name'),
},
'part_ipn': {
'label': _('Part IPN'),
'help_text': _('Part IPN value'),
},
'level': {
'label': _('Level'),
'help_text': _('BOM level'),
}
}
@staticmethod
def get_api_url():
return reverse('api-bom-list')

View File

@@ -4,8 +4,6 @@ JSON serializers for Part app
import imghdr
from decimal import Decimal
import os
import tablib
from django.urls import reverse_lazy
from django.db import models, transaction
@@ -17,7 +15,9 @@ from rest_framework import serializers
from sql_util.utils import SubqueryCount, SubquerySum
from djmoney.contrib.django_rest_framework import MoneyField
from InvenTree.serializers import (InvenTreeAttachmentSerializerField,
from InvenTree.serializers import (DataFileUploadSerializer,
DataFileExtractSerializer,
InvenTreeAttachmentSerializerField,
InvenTreeDecimalField,
InvenTreeImageSerializerField,
InvenTreeModelSerializer,
@@ -709,307 +709,129 @@ class PartCopyBOMSerializer(serializers.Serializer):
)
class BomExtractSerializer(serializers.Serializer):
class BomImportUploadSerializer(DataFileUploadSerializer):
"""
Serializer for uploading a file and extracting data from it.
Note: 2022-02-04 - This needs a *serious* refactor in future, probably
When parsing the file, the following things happen:
a) Check file format and validity
b) Look for "required" fields
c) Look for "part" fields - used to "infer" part
Once the file itself has been validated, we iterate through each data row:
- If the "level" column is provided, ignore anything below level 1
- Try to "guess" the part based on part_id / part_name / part_ipn
- Extract other fields as required
"""
TARGET_MODEL = BomItem
class Meta:
fields = [
'bom_file',
'data_file',
'part',
'clear_existing',
'clear_existing_bom',
]
# These columns must be present
REQUIRED_COLUMNS = [
'quantity',
]
# We need at least one column to specify a "part"
PART_COLUMNS = [
'part',
'part_id',
'part_name',
'part_ipn',
]
# These columns are "optional"
OPTIONAL_COLUMNS = [
'allow_variants',
'inherited',
'optional',
'overage',
'note',
'reference',
]
def find_matching_column(self, col_name, columns):
# Direct match
if col_name in columns:
return col_name
col_name = col_name.lower().strip()
for col in columns:
if col.lower().strip() == col_name:
return col
# No match
return None
def find_matching_data(self, row, col_name, columns):
"""
Extract data from the row, based on the "expected" column name
"""
col_name = self.find_matching_column(col_name, columns)
return row.get(col_name, None)
bom_file = serializers.FileField(
label=_("BOM File"),
help_text=_("Select Bill of Materials file"),
part = serializers.PrimaryKeyRelatedField(
queryset=Part.objects.all(),
required=True,
allow_empty_file=False,
allow_null=False,
many=False,
)
def validate_bom_file(self, bom_file):
"""
Perform validation checks on the uploaded BOM file
"""
self.filename = bom_file.name
name, ext = os.path.splitext(bom_file.name)
# Remove the leading . from the extension
ext = ext[1:]
accepted_file_types = [
'xls', 'xlsx',
'csv', 'tsv',
'xml',
]
if ext not in accepted_file_types:
raise serializers.ValidationError(_("Unsupported file type"))
# Impose a 50MB limit on uploaded BOM files
max_upload_file_size = 50 * 1024 * 1024
if bom_file.size > max_upload_file_size:
raise serializers.ValidationError(_("File is too large"))
# Read file data into memory (bytes object)
try:
data = bom_file.read()
except Exception as e:
raise serializers.ValidationError(str(e))
if ext in ['csv', 'tsv', 'xml']:
try:
data = data.decode()
except Exception as e:
raise serializers.ValidationError(str(e))
# Convert to a tablib dataset (we expect headers)
try:
self.dataset = tablib.Dataset().load(data, ext, headers=True)
except Exception as e:
raise serializers.ValidationError(str(e))
for header in self.REQUIRED_COLUMNS:
match = self.find_matching_column(header, self.dataset.headers)
if match is None:
raise serializers.ValidationError(_("Missing required column") + f": '{header}'")
part_column_matches = {}
part_match = False
for col in self.PART_COLUMNS:
col_match = self.find_matching_column(col, self.dataset.headers)
part_column_matches[col] = col_match
if col_match is not None:
part_match = True
if not part_match:
raise serializers.ValidationError(_("No part column found"))
if len(self.dataset) == 0:
raise serializers.ValidationError(_("No data rows found"))
return bom_file
def extract_data(self):
"""
Read individual rows out of the BOM file
"""
rows = []
errors = []
found_parts = set()
headers = self.dataset.headers
level_column = self.find_matching_column('level', headers)
for row in self.dataset.dict:
row_error = {}
"""
If the "level" column is specified, and this is not a top-level BOM item, ignore the row!
"""
if level_column is not None:
level = row.get('level', None)
if level is not None:
try:
level = int(level)
if level != 1:
continue
except:
pass
"""
Next, we try to "guess" the part, based on the provided data.
A) If the part_id is supplied, use that!
B) If the part name and/or part_ipn are supplied, maybe we can use those?
"""
part_id = self.find_matching_data(row, 'part_id', headers)
part_name = self.find_matching_data(row, 'part_name', headers)
part_ipn = self.find_matching_data(row, 'part_ipn', headers)
part = None
if part_id is not None:
try:
part = Part.objects.get(pk=part_id)
except (ValueError, Part.DoesNotExist):
pass
# Optionally, specify using field "part"
if part is None:
pk = self.find_matching_data(row, 'part', headers)
if pk is not None:
try:
part = Part.objects.get(pk=pk)
except (ValueError, Part.DoesNotExist):
pass
if part is None:
if part_name or part_ipn:
queryset = Part.objects.all()
if part_name:
queryset = queryset.filter(name=part_name)
if part_ipn:
queryset = queryset.filter(IPN=part_ipn)
# Only if we have a single direct match
if queryset.exists():
if queryset.count() == 1:
part = queryset.first()
else:
# Multiple matches!
row_error['part'] = _('Multiple matching parts found')
if part is None:
if 'part' not in row_error:
row_error['part'] = _('No matching part found')
else:
if part.pk in found_parts:
row_error['part'] = _("Duplicate part selected")
elif not part.component:
row_error['part'] = _('Part is not designated as a component')
found_parts.add(part.pk)
row['part'] = part.pk if part is not None else None
"""
Read out the 'quantity' column - check that it is valid
"""
quantity = self.find_matching_data(row, 'quantity', self.dataset.headers)
# Ensure quantity field is provided
row['quantity'] = quantity
if quantity is None:
row_error['quantity'] = _('Quantity not provided')
else:
try:
quantity = Decimal(quantity)
if quantity <= 0:
row_error['quantity'] = _('Quantity must be greater than zero')
except:
row_error['quantity'] = _('Invalid quantity')
# For each "optional" column, ensure the column names are allocated correctly
for field_name in self.OPTIONAL_COLUMNS:
if field_name not in row:
row[field_name] = self.find_matching_data(row, field_name, self.dataset.headers)
rows.append(row)
errors.append(row_error)
return {
'rows': rows,
'errors': errors,
'headers': headers,
'filename': self.filename,
}
part = serializers.PrimaryKeyRelatedField(queryset=Part.objects.filter(assembly=True), required=True)
clear_existing = serializers.BooleanField(
label=_("Clear Existing BOM"),
help_text=_("Delete existing BOM data first"),
clear_existing_bom = serializers.BooleanField(
label=_('Clear Existing BOM'),
help_text=_('Delete existing BOM items before uploading')
)
def save(self):
data = self.validated_data
master_part = data['part']
clear_existing = data['clear_existing']
if data.get('clear_existing_bom', False):
part = data['part']
if clear_existing:
# Remove all existing BOM items
master_part.bom_items.all().delete()
with transaction.atomic():
part.bom_items.all().delete()
class BomUploadSerializer(serializers.Serializer):
class BomImportExtractSerializer(DataFileExtractSerializer):
"""
"""
TARGET_MODEL = BomItem
def validate_extracted_columns(self):
super().validate_extracted_columns()
part_columns = ['part', 'part_name', 'part_ipn', 'part_id']
if not any([col in self.columns for col in part_columns]):
# At least one part column is required!
raise serializers.ValidationError(_("No part column specified"))
def process_row(self, row):
# Skip any rows which are at a lower "level"
level = row.get('level', None)
if level is not None:
try:
level = int(level)
if level != 1:
# Skip this row
return None
except:
pass
# Attempt to extract a valid part based on the provided data
part_id = row.get('part_id', row.get('part', None))
part_name = row.get('part_name', row.get('part', None))
part_ipn = row.get('part_ipn', None)
part = None
if part_id is not None:
try:
part = Part.objects.get(pk=part_id)
except (ValueError, Part.DoesNotExist):
pass
# No direct match, where else can we look?
if part is None:
if part_name or part_ipn:
queryset = Part.objects.all()
if part_name:
queryset = queryset.filter(name=part_name)
if part_ipn:
queryset = queryset.filter(IPN=part_ipn)
if queryset.exists():
if queryset.count() == 1:
part = queryset.first()
else:
row['errors']['part'] = _('Multiple matching parts found')
if part is None:
row['errors']['part'] = _('No matching part found')
else:
if not part.component:
row['errors']['part'] = _('Part is not designated as a component')
# Update the 'part' value in the row
row['part'] = part.pk if part is not None else None
# Check the provided 'quantity' value
quantity = row.get('quantity', None)
if quantity is None:
row['errors']['quantity'] = _('Quantity not provided')
else:
try:
quantity = Decimal(quantity)
if quantity <= 0:
row['errors']['quantity'] = _('Quantity must be greater than zero')
except:
row['errors']['quantity'] = _('Invalid quantity')
return row
class BomImportSubmitSerializer(serializers.Serializer):
"""
Serializer for uploading a BOM against a specified part.

View File

@@ -77,15 +77,15 @@ $('#bom-template-download').click(function() {
$('#bom-upload').click(function() {
constructForm('{% url "api-bom-extract" %}', {
constructForm('{% url "api-bom-import-upload" %}', {
method: 'POST',
fields: {
bom_file: {},
data_file: {},
part: {
value: {{ part.pk }},
hidden: true,
},
clear_existing: {},
clear_existing_bom: {},
},
title: '{% trans "Upload BOM File" %}',
onSuccess: function(response) {
@@ -93,16 +93,24 @@ $('#bom-upload').click(function() {
// Clear existing entries from the table
$('.bom-import-row').remove();
// Disable the "submit" button
$('#bom-submit').show();
selectImportFields(
'{% url "api-bom-import-extract" %}',
response,
{
success: function(response) {
constructBomUploadTable(response);
constructBomUploadTable(response);
// Show the "submit" button
$('#bom-submit').show();
$('#bom-submit').click(function() {
submitBomTable({{ part.pk }}, {
bom_data: response,
});
});
$('#bom-submit').click(function() {
submitBomTable({{ part.pk }}, {
bom_data: response,
});
});
}
}
);
}
});

View File

@@ -41,8 +41,6 @@ class BomUploadTest(InvenTreeAPITestCase):
assembly=False,
)
self.url = reverse('api-bom-extract')
def post_bom(self, filename, file_data, part=None, clear_existing=None, expected_code=None, content_type='text/plain'):
bom_file = SimpleUploadedFile(
@@ -58,11 +56,9 @@ class BomUploadTest(InvenTreeAPITestCase):
clear_existing = False
response = self.post(
self.url,
reverse('api-bom-import-upload'),
data={
'bom_file': bom_file,
'part': part,
'clear_existing': clear_existing,
'data_file': bom_file,
},
expected_code=expected_code,
format='multipart',
@@ -76,14 +72,12 @@ class BomUploadTest(InvenTreeAPITestCase):
"""
response = self.post(
self.url,
reverse('api-bom-import-upload'),
data={},
expected_code=400
)
self.assertIn('No file was submitted', str(response.data['bom_file']))
self.assertIn('This field is required', str(response.data['part']))
self.assertIn('This field is required', str(response.data['clear_existing']))
self.assertIn('No file was submitted', str(response.data['data_file']))
def test_unsupported_file(self):
"""
@@ -96,7 +90,7 @@ class BomUploadTest(InvenTreeAPITestCase):
expected_code=400,
)
self.assertIn('Unsupported file type', str(response.data['bom_file']))
self.assertIn('Unsupported file type', str(response.data['data_file']))
def test_broken_file(self):
"""
@@ -109,7 +103,7 @@ class BomUploadTest(InvenTreeAPITestCase):
expected_code=400,
)
self.assertIn('The submitted file is empty', str(response.data['bom_file']))
self.assertIn('The submitted file is empty', str(response.data['data_file']))
response = self.post_bom(
'test.xls',
@@ -118,11 +112,11 @@ class BomUploadTest(InvenTreeAPITestCase):
content_type='application/xls',
)
self.assertIn('Unsupported format, or corrupt file', str(response.data['bom_file']))
self.assertIn('Unsupported format, or corrupt file', str(response.data['data_file']))
def test_invalid_upload(self):
def test_missing_rows(self):
"""
Test upload of an invalid file
Test upload of an invalid file (without data rows)
"""
dataset = tablib.Dataset()
@@ -139,7 +133,7 @@ class BomUploadTest(InvenTreeAPITestCase):
expected_code=400,
)
self.assertIn("Missing required column: 'quantity'", str(response.data))
self.assertIn('No data rows found in file', str(response.data))
# Try again, with an .xlsx file
response = self.post_bom(
@@ -149,32 +143,61 @@ class BomUploadTest(InvenTreeAPITestCase):
expected_code=400,
)
self.assertIn('No data rows found in file', str(response.data))
def test_missing_columns(self):
"""
Upload extracted data, but with missing columns
"""
url = reverse('api-bom-import-extract')
rows = [
['1', 'test'],
['2', 'test'],
]
# Post without columns
response = self.post(
url,
{},
expected_code=400,
)
self.assertIn('This field is required', str(response.data['rows']))
self.assertIn('This field is required', str(response.data['columns']))
response = self.post(
url,
{
'rows': rows,
'columns': ['part', 'reference'],
},
expected_code=400
)
self.assertIn("Missing required column: 'quantity'", str(response.data))
# Add the quantity field (or close enough)
dataset.headers.append('quAntiTy ')
response = self.post_bom(
'test.csv',
bytes(dataset.csv, 'utf8'),
content_type='text/csv',
response = self.post(
url,
{
'rows': rows,
'columns': ['quantity', 'reference'],
},
expected_code=400,
)
self.assertIn('No part column found', str(response.data))
self.assertIn('No part column specified', str(response.data))
dataset.headers.append('part_id')
dataset.headers.append('part_name')
response = self.post_bom(
'test.csv',
bytes(dataset.csv, 'utf8'),
content_type='text/csv',
expected_code=400,
response = self.post(
url,
{
'rows': rows,
'columns': ['quantity', 'part'],
},
expected_code=201,
)
self.assertIn('No data rows found', str(response.data))
def test_invalid_data(self):
"""
Upload data which contains errors
@@ -195,25 +218,31 @@ class BomUploadTest(InvenTreeAPITestCase):
dataset.append([cmp.pk, idx])
# Add a duplicate part too
dataset.append([components.first().pk, 'invalid'])
url = reverse('api-bom-import-extract')
response = self.post_bom(
'test.csv',
bytes(dataset.csv, 'utf8'),
content_type='text/csv',
expected_code=201
response = self.post(
url,
{
'columns': dataset.headers,
'rows': [row for row in dataset],
},
)
errors = response.data['errors']
rows = response.data['rows']
self.assertIn('Quantity must be greater than zero', str(errors[0]))
self.assertIn('Part is not designated as a component', str(errors[5]))
self.assertIn('Duplicate part selected', str(errors[-1]))
self.assertIn('Invalid quantity', str(errors[-1]))
# Returned data must be the same as the original dataset
self.assertEqual(len(rows), len(dataset))
for idx, row in enumerate(response.data['rows'][:-1]):
self.assertEqual(str(row['part']), str(components[idx].pk))
for idx, row in enumerate(rows):
data = row['data']
cmp = components[idx]
# Should have guessed the correct part
data['part'] = cmp.pk
# Check some specific error messages
self.assertEqual(rows[0]['data']['errors']['quantity'], 'Quantity must be greater than zero')
self.assertEqual(rows[5]['data']['errors']['part'], 'Part is not designated as a component')
def test_part_guess(self):
"""
@@ -233,9 +262,14 @@ class BomUploadTest(InvenTreeAPITestCase):
10,
])
response = self.post_bom(
'test.csv',
bytes(dataset.csv, 'utf8'),
url = reverse('api-bom-import-extract')
response = self.post(
url,
{
'columns': dataset.headers,
'rows': [row for row in dataset],
},
expected_code=201,
)
@@ -244,7 +278,7 @@ class BomUploadTest(InvenTreeAPITestCase):
self.assertEqual(len(rows), 10)
for idx in range(10):
self.assertEqual(rows[idx]['part'], components[idx].pk)
self.assertEqual(rows[idx]['data']['part'], components[idx].pk)
# Should also be able to 'guess' part by the IPN value
dataset = tablib.Dataset()
@@ -257,9 +291,12 @@ class BomUploadTest(InvenTreeAPITestCase):
10,
])
response = self.post_bom(
'test.csv',
bytes(dataset.csv, 'utf8'),
response = self.post(
url,
{
'columns': dataset.headers,
'rows': [row for row in dataset],
},
expected_code=201,
)
@@ -268,13 +305,15 @@ class BomUploadTest(InvenTreeAPITestCase):
self.assertEqual(len(rows), 10)
for idx in range(10):
self.assertEqual(rows[idx]['part'], components[idx].pk)
self.assertEqual(rows[idx]['data']['part'], components[idx].pk)
def test_levels(self):
"""
Test that multi-level BOMs are correctly handled during upload
"""
url = reverse('api-bom-import-extract')
dataset = tablib.Dataset()
dataset.headers = ['level', 'part', 'quantity']
@@ -288,11 +327,21 @@ class BomUploadTest(InvenTreeAPITestCase):
2,
])
response = self.post_bom(
'test.csv',
bytes(dataset.csv, 'utf8'),
response = self.post(
url,
{
'rows': [row for row in dataset],
'columns': dataset.headers,
},
expected_code=201,
)
rows = response.data['rows']
# Only parts at index 1, 4, 7 should have been returned
self.assertEqual(len(response.data['rows']), 3)
# Check the returned PK values
self.assertEqual(rows[0]['data']['part'], components[1].pk)
self.assertEqual(rows[1]['data']['part'], components[4].pk)
self.assertEqual(rows[2]['data']['part'], components[7].pk)