2
0
mirror of https://github.com/inventree/InvenTree.git synced 2026-04-02 09:31:02 +00:00

Update DataExport functionality (#11604)

* Update DataExport functionality

- Chunk queryset into memory

* Allow larger number of queries for chunked database fetching

* Handle possible exception in unit testing
This commit is contained in:
Oliver
2026-03-25 00:35:08 +11:00
committed by GitHub
parent 953b77bed9
commit 8ec61aca0a
3 changed files with 35 additions and 8 deletions

View File

@@ -1575,9 +1575,14 @@ class CurrencyAPITests(InvenTreeAPITestCase):
# Updating via the external exchange may not work every time # Updating via the external exchange may not work every time
for _idx in range(5): for _idx in range(5):
self.post( try:
reverse('api-currency-refresh'), expected_code=200, max_query_time=30 self.post(
) reverse('api-currency-refresh'),
expected_code=200,
max_query_time=30,
)
except Exception:
continue
# There should be some new exchange rate objects now # There should be some new exchange rate objects now
if Rate.objects.all().exists(): if Rate.objects.all().exists():

View File

@@ -22,6 +22,9 @@ class DataExportMixin:
ExportOptionsSerializer = None ExportOptionsSerializer = None
# How many rows to fetch into memory at once when exporting data?
EXPORT_CHUNK_SIZE: int = 250
class MixinMeta: class MixinMeta:
"""Meta options for this mixin.""" """Meta options for this mixin."""
@@ -108,10 +111,29 @@ class DataExportMixin:
Returns: The exported data (a list of dict objects) Returns: The exported data (a list of dict objects)
""" """
# The default implementation simply serializes the queryset output.refresh_from_db()
return serializer_class( N = queryset.count()
queryset, many=True, exporting=True, context=serializer_context or {}
).data rows = []
offset = 0
while offset < N:
chunk = queryset[offset : offset + self.EXPORT_CHUNK_SIZE]
chunk_rows = serializer_class(
chunk, many=True, exporting=True, context=serializer_context or {}
).data
rows.extend(chunk_rows)
offset += self.EXPORT_CHUNK_SIZE
# Update the export progress
output.progress += len(chunk_rows)
output.save()
return rows
def get_export_options_serializer(self, **kwargs) -> serializers.Serializer | None: def get_export_options_serializer(self, **kwargs) -> serializers.Serializer | None:
"""Return a serializer class with dynamic export options for this plugin. """Return a serializer class with dynamic export options for this plugin.

View File

@@ -1038,7 +1038,7 @@ class StockItemListTest(StockAPITestCase):
# Note: While the export is quick on pgsql, it is still quite slow on sqlite3 # Note: While the export is quick on pgsql, it is still quite slow on sqlite3
with self.export_data( with self.export_data(
self.list_url, self.list_url,
max_query_count=50, max_query_count=100,
max_query_time=12.0, # Test time increased due to worker variability max_query_time=12.0, # Test time increased due to worker variability
) as data_file: ) as data_file:
data = self.process_csv(data_file) data = self.process_csv(data_file)