2
0
mirror of https://expo.survex.com/repositories/troggle/.git synced 2024-11-22 07:11:52 +00:00

refactored, faster deletion prior to loading

This commit is contained in:
Philip Sargent 2023-01-27 17:41:10 +00:00
parent 2fee216e80
commit 6565b3f9c4
2 changed files with 8 additions and 4 deletions

View File

@ -18,7 +18,7 @@ from troggle.core.models.caves import LogbookEntry, PersonTrip
from troggle.core.models.survex import SurvexBlock, Wallet
from troggle.core.models.troggle import Expedition, Person, PersonExpedition
from troggle.core.utils import TROG
from troggle.parsers.logbooks import LoadLogbookForExpedition
from troggle.parsers.logbooks import parse_logbook_for_expedition
from troggle.parsers.people import GetPersonExpeditionNameLookup
from .auth import login_required_if_public

View File

@ -480,6 +480,10 @@ def parser_blog(year, expedition, txt, sq=""):
entrytuple = (tripdate, location, tripname, tripcontent, trippeople, expedition, tu, tid)
logentries.append(entrytuple)
def clean_all_logbooks():
DataIssue.objects.filter(parser="logbooks").delete()
LogbookEntry.objects.all().delete()
def clean_logbook_for_expedition(expedition):
def cleanerrors(year):
dataissues = DataIssue.objects.filter(parser="logbooks")
@ -576,12 +580,12 @@ def parse_logbook_for_expedition(expedition):
def LoadLogbook(year):
"""One off logbook for testing purposes"""
global LOGBOOK_PARSER_SETTINGS
nlbe = {}
expo = Expedition.objects.get(year=year)
year = expo.year # some type funny
clean_logbook_for_expedition(expo)
nlbe[expo] = parse_logbook_for_expedition(expo) # this actually loads the logbook for one expo
if year in BLOG_PARSER_SETTINGS:
print("BLOG parsing")
@ -600,7 +604,7 @@ def LoadLogbooks():
"""
global entries
DataIssue.objects.filter(parser="logbooks").delete()
clean_all_logbooks()
expos = Expedition.objects.all()
if len(expos) <= 1:
message = " ! - No expeditions found. Load 'people' first"
@ -648,7 +652,7 @@ def LoadLogbooks():
orig = (DEFAULT_LOGBOOK_FILE, DEFAULT_LOGBOOK_PARSER)
LOGBOOK_PARSER_SETTINGS[str(b)] = BLOG_PARSER_SETTINGS[str(b)]
print(f" - BLOG: {b}")
nlbe[b] = parse_logbook_for_expedition(b, clean=False) # this loads the blog logbook for one expo
nlbe[b] = parse_logbook_for_expedition(b) # no clean. loads the blog logbook for one expo
LOGBOOK_PARSER_SETTINGS[str(b)] = orig
# tried to use map with concurrent threads - but sqlite database is not concurrent, so failed with database lock