diff --git a/parsers/logbooks.py b/parsers/logbooks.py index 485f993..4d26448 100644 --- a/parsers/logbooks.py +++ b/parsers/logbooks.py @@ -213,6 +213,10 @@ def tidy_tid(tid, title): def store_entry_into_database(date, place, tripcave, title, text, trippersons, author, expedition, logtime_underground, tid): """saves a single logbook entry and related personlogentry items + + We could do a bulk update to save all the entries, but then we would need to do a query on + each one to get the primary key to asign to the PersonLogEntries. So overall probably not much + faster ? """ nonLookupAttribs = { @@ -226,10 +230,12 @@ def store_entry_into_database(date, place, tripcave, title, text, trippersons, a lookupAttribs = {"date": date, "title": title} lbo = LogbookEntry.objects.create(**nonLookupAttribs, **lookupAttribs) + pt_list = [] for tripperson, time_underground in trippersons: - lookupAttribs = {"personexpedition": tripperson, "logbook_entry": lbo} + lookupAttribs = {"personexpedition": tripperson, "logbook_entry": lbo} # lbo is primary key nonLookupAttribs = {"time_underground": time_underground, "is_logbook_entry_author": (tripperson == author)} - pt = PersonLogEntry.objects.create(**nonLookupAttribs, **lookupAttribs) + pt_list.append(PersonLogEntry(**nonLookupAttribs, **lookupAttribs)) + PersonLogEntry.objects.bulk_create(pt_list) def parser_date(tripdate, year): """Interprets dates in the expo logbooks and returns a correct datetime.date object"""