From 76ed80107715da1b01d0dc181f8089594e8e2131 Mon Sep 17 00:00:00 2001 From: Philip Sargent Date: Tue, 31 Jan 2023 01:37:00 +0000 Subject: Minor bulk create tweak --- parsers/logbooks.py | 10 ++++++++-- 1 file changed, 8 insertions(+), 2 deletions(-) (limited to 'parsers/logbooks.py') diff --git a/parsers/logbooks.py b/parsers/logbooks.py index 485f993..4d26448 100644 --- a/parsers/logbooks.py +++ b/parsers/logbooks.py @@ -213,6 +213,10 @@ def tidy_tid(tid, title): def store_entry_into_database(date, place, tripcave, title, text, trippersons, author, expedition, logtime_underground, tid): """saves a single logbook entry and related personlogentry items + + We could do a bulk update to save all the entries, but then we would need to do a query on + each one to get the primary key to asign to the PersonLogEntries. So overall probably not much + faster ? """ nonLookupAttribs = { @@ -226,10 +230,12 @@ def store_entry_into_database(date, place, tripcave, title, text, trippersons, a lookupAttribs = {"date": date, "title": title} lbo = LogbookEntry.objects.create(**nonLookupAttribs, **lookupAttribs) + pt_list = [] for tripperson, time_underground in trippersons: - lookupAttribs = {"personexpedition": tripperson, "logbook_entry": lbo} + lookupAttribs = {"personexpedition": tripperson, "logbook_entry": lbo} # lbo is primary key nonLookupAttribs = {"time_underground": time_underground, "is_logbook_entry_author": (tripperson == author)} - pt = PersonLogEntry.objects.create(**nonLookupAttribs, **lookupAttribs) + pt_list.append(PersonLogEntry(**nonLookupAttribs, **lookupAttribs)) + PersonLogEntry.objects.bulk_create(pt_list) def parser_date(tripdate, year): """Interprets dates in the expo logbooks and returns a correct datetime.date object""" -- cgit v1.2.3