refactored, faster deletion prior to loading

This commit is contained in:
Philip Sargent 2023-01-27 17:41:10 +00:00
parent 2fee216e80
commit 6565b3f9c4
2 changed files with 8 additions and 4 deletions

View File

@ -18,7 +18,7 @@ from troggle.core.models.caves import LogbookEntry, PersonTrip
from troggle.core.models.survex import SurvexBlock, Wallet from troggle.core.models.survex import SurvexBlock, Wallet
from troggle.core.models.troggle import Expedition, Person, PersonExpedition from troggle.core.models.troggle import Expedition, Person, PersonExpedition
from troggle.core.utils import TROG from troggle.core.utils import TROG
from troggle.parsers.logbooks import LoadLogbookForExpedition from troggle.parsers.logbooks import parse_logbook_for_expedition
from troggle.parsers.people import GetPersonExpeditionNameLookup from troggle.parsers.people import GetPersonExpeditionNameLookup
from .auth import login_required_if_public from .auth import login_required_if_public

View File

@ -480,6 +480,10 @@ def parser_blog(year, expedition, txt, sq=""):
entrytuple = (tripdate, location, tripname, tripcontent, trippeople, expedition, tu, tid) entrytuple = (tripdate, location, tripname, tripcontent, trippeople, expedition, tu, tid)
logentries.append(entrytuple) logentries.append(entrytuple)
def clean_all_logbooks():
DataIssue.objects.filter(parser="logbooks").delete()
LogbookEntry.objects.all().delete()
def clean_logbook_for_expedition(expedition): def clean_logbook_for_expedition(expedition):
def cleanerrors(year): def cleanerrors(year):
dataissues = DataIssue.objects.filter(parser="logbooks") dataissues = DataIssue.objects.filter(parser="logbooks")
@ -576,12 +580,12 @@ def parse_logbook_for_expedition(expedition):
def LoadLogbook(year): def LoadLogbook(year):
"""One off logbook for testing purposes""" """One off logbook for testing purposes"""
global LOGBOOK_PARSER_SETTINGS global LOGBOOK_PARSER_SETTINGS
nlbe = {} nlbe = {}
expo = Expedition.objects.get(year=year) expo = Expedition.objects.get(year=year)
year = expo.year # some type funny year = expo.year # some type funny
clean_logbook_for_expedition(expo) clean_logbook_for_expedition(expo)
nlbe[expo] = parse_logbook_for_expedition(expo) # this actually loads the logbook for one expo nlbe[expo] = parse_logbook_for_expedition(expo) # this actually loads the logbook for one expo
if year in BLOG_PARSER_SETTINGS: if year in BLOG_PARSER_SETTINGS:
print("BLOG parsing") print("BLOG parsing")
@ -600,7 +604,7 @@ def LoadLogbooks():
""" """
global entries global entries
DataIssue.objects.filter(parser="logbooks").delete() clean_all_logbooks()
expos = Expedition.objects.all() expos = Expedition.objects.all()
if len(expos) <= 1: if len(expos) <= 1:
message = " ! - No expeditions found. Load 'people' first" message = " ! - No expeditions found. Load 'people' first"
@ -648,7 +652,7 @@ def LoadLogbooks():
orig = (DEFAULT_LOGBOOK_FILE, DEFAULT_LOGBOOK_PARSER) orig = (DEFAULT_LOGBOOK_FILE, DEFAULT_LOGBOOK_PARSER)
LOGBOOK_PARSER_SETTINGS[str(b)] = BLOG_PARSER_SETTINGS[str(b)] LOGBOOK_PARSER_SETTINGS[str(b)] = BLOG_PARSER_SETTINGS[str(b)]
print(f" - BLOG: {b}") print(f" - BLOG: {b}")
nlbe[b] = parse_logbook_for_expedition(b, clean=False) # this loads the blog logbook for one expo nlbe[b] = parse_logbook_for_expedition(b) # no clean. loads the blog logbook for one expo
LOGBOOK_PARSER_SETTINGS[str(b)] = orig LOGBOOK_PARSER_SETTINGS[str(b)] = orig
# tried to use map with concurrent threads - but sqlite database is not concurrent, so failed with database lock # tried to use map with concurrent threads - but sqlite database is not concurrent, so failed with database lock