import csv import os import re import pickle import shelve import time from random import randint from datetime import datetime, date from pathlib import Path from django.conf import settings from django.template.defaultfilters import slugify from django.utils.timezone import get_current_timezone, make_aware from troggle.core.models.troggle import DataIssue, Expedition from troggle.core.utils import TROG, save_carefully from troggle.core.models.caves import Cave, LogbookEntry, PersonTrip, GetCaveLookup from parsers.people import GetPersonExpeditionNameLookup ''' Parses and imports logbooks in all their wonderful confusion The Objectstore stuff is an initial attempt to see how we can migrate away from the Django database. An idea which no longer seems sensible given that we rely on the database to do the multiuser bit. # When we edit logbook entries, allow a "?" after any piece of data to say we've frigged it and # it can be checked up later from the hard-copy if necessary; or it's not possible to determin (name, trip place, etc) ''' todo=''' - Use the .shelve.db cache for all logbooks, not just individually - refactor everything with some urgency, esp. LoadLogbookForExpedition() - profile the code to find bad repetitive things, of which there are many. - far too many uses of Django field dereferencing to get values, which is SLOW - Logbooks 1987, 1988, 1989 all crash on MySql - but not sqlite - with db constraint fail. Edit logbook to fix. - import/parse/re-export-as-html the 'artisanal-format' old logbooks so that we keep only a modern HTML05 format. Then we can retire the old parsers and reduce the volume of code here substantially. - rewrite to use generators rather than storing everything intermediate in lists - to reduce memory impact. - We should ensure logbook.html is utf-8 and stop this crap: file_in = open(logbookfile,'rb') txt = file_in.read().decode("latin1") - this is a slow and uncertain function: cave = getCaveByReference(caveRef) - the object store will need additional functions to replicate the persontrip calculation and storage. For the moment we leave all that to be done in the django db Concurrent synchronisation would be nice.. - DB lock currently prevents multiple threads for loading logbooks. But asyncio might work..? - Put the object store 'trips' and the 'logdataissues' into TROG global object ''' logentries = [] # the entire logbook for one year is a single object: a list of entries noncaveplaces = [ "QMplaceholder", "Journey", "Loser Plateau", "UNKNOWN", 'plateau', 'base camp', 'basecamp', 'top camp', 'topcamp' ] logdataissues = TROG['issues']['logdataissues'] trips ={} entries = { "2022": 64, "2019": 44, "2018": 74, "2017": 60, "2016": 81, "2015": 79, "2014": 65, "2013": 51, "2012": 75, "2011": 68, "2010": 22, "2009": 52, "2008": 49, "2007": 111, "2006": 60, "2005": 55, "2004": 76, "2003": 40, "2002": 31, "2001": 48, "2000": 54, "1999": 79, "1998": 43, "1997": 53, "1996": 94, "1995": 41, "1994": 32, "1993": 41, "1992": 61, "1991": 38, "1990": 87, "1989": 1,"1988": 1,"1987": 1, "1985": 24,"1984": 32,"1983": 52,"1982": 42,} # Logbooks log.htm exist for 1983, 84, 85, 87, 88, 89 but have no full-working parser, or need hand-editing. # # the logbook loading section # def set_trip_id(year, seq): tid= f"{year}_s{seq:02d}" return tid rx_tripperson = re.compile(r'(?i)(.*?)$') rx_round_bracket = re.compile(r"[\(\[].*?[\)\]]") def GetTripPersons(trippeople, expedition, logtime_underground, tid=None): res = [ ] author = None #print(f'# {tid}') for tripperson in re.split(r",|\+|&|&(?!\w+;)| and ", trippeople): tripperson = tripperson.strip() # mul = re.match(r"(?i)(.*?)$", tripperson) mul = rx_tripperson.match(tripperson) if mul: tripperson = mul.group(1).strip() if tripperson and tripperson[0] != '*': tripperson = re.sub(rx_round_bracket, "", tripperson).strip() if tripperson =="Wiggy": tripperson = "Phil Wigglesworth" if tripperson =="Animal": tripperson = "Mike Richardson" if tripperson =="MikeTA": tripperson = "Mike Richardson" personyear = GetPersonExpeditionNameLookup(expedition).get(tripperson.lower()) if not personyear: message = f" ! - {expedition.year} No name match for: '{tripperson}' in entry {tid=} for this expedition year." print(message) DataIssue.objects.create(parser='logbooks', message=message) logdataissues[tid]=message res.append((personyear, logtime_underground)) if mul: author = personyear if not author: if not res: return None, None author = res[-1][0] return res, author def EnterLogIntoDbase(date, place, title, text, trippeople, expedition, logtime_underground, tid=None): """ saves a logbook entry and related persontrips Does NOT save the expeditionday_id - all NULLs. why? Because we are deprecating expeditionday ! troggle.log shows that we are creating lots of duplicates, which is no no problem with SQL as they just overwrite but we are saving the same thing too many times.. Also seen in the ObjStore mimic """ try: trippersons, author = GetTripPersons(trippeople, expedition, logtime_underground, tid=tid) except: message = f" ! - {expedition.year} Skipping logentry: {title} - GetTripPersons FAIL" DataIssue.objects.create(parser='logbooks', message=message) logdataissues["title"]=message print(message) raise return if not author: message = f" ! - {expedition.year} Warning: logentry: {title} - no author for entry '{tid}'" DataIssue.objects.create(parser='logbooks', message=message) logdataissues["title"]=message print(message) #return # This needs attention. The slug field is derived from 'title' # both GetCaveLookup() and GetTripCave() need to work together better. None of this data is *used* though? #tripCave = GetTripCave(place): lplace = place.lower() cave=None if lplace not in noncaveplaces: cave = GetCaveLookup().get(lplace) y = str(date)[:4] text = text.replace('src="', f'src="/years/{y}/' ) text = text.replace("src='", f"src='/years/{y}/" ) #Check for an existing copy of the current entry, and save expeditionday = expedition.get_expedition_day(date) lookupAttribs={'date':date, 'title':title} # 'cave' is converted to a string doing this, which renders as the cave slug. # but it is a db query which we should try to avoid - rewrite this #NEW slug for a logbook entry here! Unique id + slugified title fragment # working for all cache files 2019-2005, failed on 2004; but fine when parsing logbook and not reading cache. Hmm. if tid is not None: slug = tid + "_" + slugify(title)[:10].replace('-','_') else: slug = str(randint(1000,10000)) + "_" + slugify(title)[:10].replace('-','_') nonLookupAttribs={'place':place, 'text':text, 'expedition':expedition, 'cave_slug':str(cave), 'slug': slug} # This creates the lbo instance of LogbookEntry lbo, created=save_carefully(LogbookEntry, lookupAttribs, nonLookupAttribs) for tripperson, time_underground in trippersons: lookupAttribs={'personexpedition':tripperson, 'logbook_entry':lbo} nonLookupAttribs={'time_underground':time_underground, 'is_logbook_entry_author':(tripperson == author)} # this creates the PersonTrip instance. save_carefully(PersonTrip, lookupAttribs, nonLookupAttribs) # PersonTrip also saved in SetDatesFromLogbookEntries def ParseDate(tripdate, year): """ Interprets dates in the expo logbooks and returns a correct datetime.date object """ dummydate = date(1970, 1, 1) month = 1 day = 1 # message = f" ! - Trying to parse date in logbook: {tripdate} - {year}" # print(message) try: mdatestandard = re.match(r"(\d\d\d\d)-(\d\d)-(\d\d)", tripdate) mdategoof = re.match(r"(\d\d?)/0?(\d)/(20|19)?(\d\d)", tripdate) if mdatestandard: if not (mdatestandard.group(1) == year): message = f" ! - Bad date (year) in logbook: {tripdate} - {year}" DataIssue.objects.create(parser='logbooks', message=message) logdataissues["tripdate"]=message return dummydate else: year, month, day = int(mdatestandard.group(1)), int(mdatestandard.group(2)), int(mdatestandard.group(3)) elif mdategoof: if not (not mdategoof.group(3) or mdategoof.group(3) == year[:2]): message = " ! - Bad date mdategoof.group(3) in logbook: " + tripdate + " - " + mdategoof.group(3) DataIssue.objects.create(parser='logbooks', message=message) logdataissues["tripdate"]=message return dummydate else: yadd = int(year[:2]) * 100 day, month, year = int(mdategoof.group(1)), int(mdategoof.group(2)), int(mdategoof.group(4)) + yadd else: year = 1970 message = f" ! - Bad date in logbook: {tripdate} - {year}" DataIssue.objects.create(parser='logbooks', message=message) logdataissues["tripdate"]=message return date(year, month, day) except: message = f" ! - Failed to parse date in logbook: {tripdate} - {year}" DataIssue.objects.create(parser='logbooks', message=message) logdataissues["tripdate"]=message return datetime.date(1970, 1, 1) # (2006 - not any more), 2008 - 2009 def Parselogwikitxt(year, expedition, txt): global logentries global logdataissues logbook_entry_count = 0 trippara = re.findall(r"===(.*?)===([\s\S]*?)(?====)", txt) for triphead, triptext in trippara: logbook_entry_count += 1 tid = set_trip_id(year,logbook_entry_count) tripheadp = triphead.split("|") if not (len(tripheadp) == 3): message = " ! - Bad no of items in tripdate in logbook: " + tripdate + " - " + tripheadp DataIssue.objects.create(parser='logbooks', message=message) logdataissues["tripdate"]=message tripdate, tripplace, trippeople = tripheadp tripsplace = tripplace.split(" - ") tripcave = tripsplace[0].strip() if len(tripsplace) == 1: tripsplace = tripsplace[0] else: tripsplace = tripsplace[1] #print(f"! LOGBOOK {year} {logbook_entry_count:2} {len(triptext):4} '{tripsplace}'") tul = re.findall(r"T/?U:?\s*(\d+(?:\.\d*)?|unknown)\s*(hrs|hours)?", triptext) if tul: tu = tul[0][0] else: tu = "" ldate = ParseDate(tripdate.strip(), year) tripid ="" entrytuple = (ldate, tripcave, tripsplace, triptext, trippeople, expedition, tu, "wiki", tripid) logentries.append(entrytuple) # EnterLogIntoDbase(date = ldate, place = tripcave, title = tripplace, text = triptext, trippeople=trippeople, # expedition=expedition, logtime_underground=0, tid=tid) # EnterLogIntoObjStore(year, ldate, tripcave, tripplace, triptext, trippeople, # tu, "wiki", tripid, logbook_entry_count, tid=tid) def EnterLogIntoObjStore(year, date, tripcave, triptitle, text, trippeople, tu, tripid1, seq, tid=None): '''Called once for each logbook entry as the logbook is parsed ''' # This will need additional functions to replicate the persontrip calculation and storage. For the # moment we leave all that to be done in the django db global trips # should be a singleton TROG eventually global logdataissues if tid in trips: tyear, tdate, *trest = trips[tid] msg = f" ! DUPLICATE tid: '{tid}' on date:{tdate} " print(msg) DataIssue.objects.create(parser='logbooks', message=msg) tid = set_trip_id(str(date),seq) #print(" - De-dup ",seq, tid) logdataissues[tid]=msg if not tid: tid = set_trip_id(str(date),seq) trips[tid] = (year, date, tripcave, triptitle, text, trippeople, tu) ## copy a lot of checking functionality here from EnterLogIntoDbase() # GetTripPersons is a db query, so this will need to be put in ObjStore before this will work.. # or design a different way to do it. #trippersons, author = GetTripPersons(trippeople, expedition, logtime_underground) # if not author: # print(" ! - Skipping logentry: " + title + " - no RECOGNISED author for entry") # message = " ! - Skipping logentry: %s - no author for entry in year '%s'" % (title, expedition.year) # DataIssue.objects.create(parser='logbooks', message=message) # logdataissues[tid+"author"]=message return # 2002, 2004, 2005, 2007, 2010 - now # 2006 wiki text is incomplete, but the html all there. So using this parser now. def Parseloghtmltxt(year, expedition, txt): global logentries global logdataissues tripparas = re.findall(r"
)? # second date \s*(?:\s*)? \s*
)? \s*
)? # second date \s*(?:\s*)? \s*
)? \s*
", "", ltriptext).strip() entrytuple = (ldate, tripcave, triptitle, ltriptext, trippeople, expedition, tu, "html", tripid1) logentries.append(entrytuple) # EnterLogIntoDbase(date = ldate, place = tripcave, title = triptitle, text = ltriptext, # trippeople=trippeople, expedition=expedition, logtime_underground=0, # entry_type="html", tid=tid) # EnterLogIntoObjStore(year, ldate, tripcave, triptitle, ltriptext, trippeople, tu, # "html", tripid1, logbook_entry_count, tid=tid) # main parser for 1991 - 2001. simpler because the data has been hacked so much to fit it # trying it out for years 1982 - 1990 too. Some logbook editing required by hand.. place def Parseloghtml01(year, expedition, txt): global logentries global logdataissues errorcount = 0 tripparas = re.findall(r"
]*>(T/?U.*)', triptext) if mtu: tu = mtu.group(1) triptext = triptext[:mtu.start(0)] + triptext[mtu.end():] else: tu = "" triptitles = triptitle.split(" - ") tripcave = triptitles[0].strip() ltriptext = triptext mtail = re.search(r'(?:[^<]*|\s|/|-|&|?p>|\((?:same day|\d+)\))*$', ltriptext) if mtail: ltriptext = ltriptext[:mtail.start(0)] ltriptext = re.sub(r"
", "", ltriptext) ltriptext = re.sub(r"\s*?\n\s*", " ", ltriptext) ltriptext = re.sub(r"|
", "\n\n", ltriptext).strip()
ltriptext = re.sub(r"?u>", "_", ltriptext)
ltriptext = re.sub(r"?i>", "''", ltriptext)
ltriptext = re.sub(r"?b>", "'''", ltriptext)
if ltriptext == "":
message = " ! - Zero content for logbook entry!: " + tid
DataIssue.objects.create(parser='logbooks', message=message)
logdataissues[tid]=message
print(message)
entrytuple = (ldate, tripcave, triptitle, ltriptext,
trippeople, expedition, tu, "html01", tid)
logentries.append(entrytuple)
# try:
# EnterLogIntoDbase(date=ldate, place=tripcave, title=triptitle, text=ltriptext,
# trippeople=trippeople, expedition=expedition, logtime_underground=0,
# entry_type="html", tid=tid)
# except:
# message = " ! - Enter log entry into database FAIL exception in: " + tid
# DataIssue.objects.create(parser='logbooks', message=message)
# logdataissues[tid]=message
# print(message)
# try:
# EnterLogIntoObjStore(year, ldate, tripcave, triptitle, ltriptext, trippeople, tu,
# "html01", tid, logbook_entry_count, tid=tid)
# except:
# message = " ! - Enter log entry into ObjectStore FAIL exception in: " + tid
# DataIssue.objects.create(parser='logbooks', message=message)
# logdataissues[tid]=message
# print(message)
except:
message = f" ! - Skipping logentry {year} due to exception in: {tid}"
DataIssue.objects.create(parser='logbooks', message=message)
logdataissues[tid]=message
print(message)
errorcount += 1
if errorcount >5 :
message = f" !!- TOO MANY ERRORS - aborting at '{tid}' logbook: {year}"
DataIssue.objects.create(parser='logbooks', message=message)
logdataissues[tid]=message
print(message)
return
# parser for 2003
def Parseloghtml03(year, expedition, txt):
global logentries
global logdataissues
tripparas = re.findall(r"
", "\n\n", ltriptext).strip() ltriptext = re.sub(r"[^\s0-9a-zA-Z\-.,:;'!&()\[\]<>?=+*%]", "_NONASCII_", ltriptext) entrytuple = (ldate, tripcave, triptitle, ltriptext, trippeople, expedition, tu, "html03", tid) logentries.append(entrytuple) # EnterLogIntoDbase(date = ldate, place = tripcave, title = triptitle, # text = ltriptext, trippeople=trippeople, expedition=expedition, # logtime_underground=0, entry_type="html", tid=tid) # EnterLogIntoObjStore(year, ldate, tripcave, triptitle, ltriptext, trippeople, tu, # "html03", tid, logbook_entry_count, tid=tid) def SetDatesFromLogbookEntries(expedition): """ Sets the date_from and date_to field for an expedition based on persontrips. Then sets the expedition date_from and date_to based on the personexpeditions. """ # Probably a faster way to do this. This uses a lot of db queries, but we have all this # in memory.. for personexpedition in expedition.personexpedition_set.all(): persontrips = personexpedition.persontrip_set.order_by('logbook_entry__date') # sequencing is difficult to do lprevpersontrip = None for persontrip in persontrips: persontrip.persontrip_prev = lprevpersontrip if lprevpersontrip: lprevpersontrip.persontrip_next = persontrip lprevpersontrip.save() persontrip.persontrip_next = None lprevpersontrip = persontrip #persontrip.save() # also saved in EnterLogIntoDbase. MERGE these to speed up import. def LoadLogbookForExpedition(expedition): """ Parses all logbook entries for one expedition If a cache is found it uses it. If not found, or fails sanity checks, parses source file. """ # absolutely horrid. REFACTOR THIS (all my fault..) global logentries global logdataissues global entries logbook_parseable = False logbook_cached = False yearlinks = settings.LOGBOOK_PARSER_SETTINGS expologbase = os.path.join(settings.EXPOWEB, "years") logentries=[] year = expedition.year expect = entries[year] # print(" - Logbook for: " + year) def validcache(year,n, lbsize): if year != expedition: print(" ! cache loading: year != expedition ",year, expedition ) return False currentsize = logbookpath.stat().st_size if lbsize != currentsize: print(f" ! cache loading: Logbook size {lbsize} != {currentsize} ") return False if len(logentries) != n: print(" ! cache loading: len(logentries) != n ",len(logentries), n ) return False if n != expect: print(" ! cache loading: n != expect ",n, expect ) return False return True def cleanerrors(year): global logdataissues dataissues = DataIssue.objects.filter(parser='logbooks') for di in dataissues: ph = year if re.search(ph, di.message) is not None: #print(f' - CLEANING dataissue {di.message}') di.delete() #print(f' - CLEAN {year} {len(logdataissues)} {type(logdataissues)} data issues for this year') dellist = [] for key, value in logdataissues.items(): #print(f' - CLEANING logdataissues [{key}]: {value}') if key.startswith(year): #print(f' - CLEANING logdataissues [{key:12}]: {value} ') dellist.append(key) for i in dellist: del logdataissues[i] cleanerrors(year) if year in yearlinks: logbookpath = Path(expologbase) / year / yearlinks[year][0] expedition.logbookfile = yearlinks[year][0] parsefunc = yearlinks[year][1] else: logbookpath = Path(expologbase) / year / settings.DEFAULT_LOGBOOK_FILE expedition.logbookfile = settings.DEFAULT_LOGBOOK_FILE parsefunc = settings.DEFAULT_LOGBOOK_PARSER cache_filename = Path(str(logbookpath) + ".cache") if not cache_filename.is_file(): print(" - Cache file does not exist \"" + str(cache_filename) +"\"") expedition.save() logbook_cached = False if True: # enable cache system now = time.time() bad_cache = False # temporarily disable reading the cache - buggy try: cache_t = os.path.getmtime(cache_filename) if os.path.getmtime(logbookpath) - cache_t > 2: # at least 2 secs later print(" - ! Cache is older than the logbook file") bad_cache= True if now - cache_t > 30*24*60*60: print(" - ! Cache is > 30 days old") bad_cache= True if bad_cache: print(" - so cache is either stale or more than 30 days old. Deleting it.") os.remove(cache_filename) logentries=[] print(" - Deleted stale or corrupt cache file") raise try: # print(" - Reading cache: " + str(cache_filename), end='') with open(cache_filename, "rb") as f: year, lbsize, n, logentries = pickle.load(f) if validcache(year, n, lbsize): print(f" -- {year} : Loaded {len(logentries)} log entries") logbook_cached = True else: print(" !- {year} : Cache failed validity checks") raise except: print(" ! Failed to load corrupt cache (or I was told to ignore it). Deleting it.") os.remove(cache_filename) logentries=[] raise except : print(" - Cache old or de-pickle failure \"" + str(cache_filename) +"\"") try: file_in = open(logbookpath,'rb') txt = file_in.read().decode("utf-8") file_in.close() logbook_parseable = True except (IOError): logbook_parseable = False print(" ! Couldn't open logbook as UTF-8 " + logbookpath) if logbook_parseable: parser = globals()[parsefunc] print(f' - Using parser {parsefunc}') parser(year, expedition, txt) # this launches the right parser for this year print(" - Setting dates from logbook entries") SetDatesFromLogbookEntries(expedition) if len(logentries) >0: print(" - Cacheing " , len(logentries), " log entries") lbsize = logbookpath.stat().st_size with open(cache_filename, "wb") as fc: # we much check that permission are g+w ! or expo can't delete the cache logbk=(expedition,lbsize,len(logentries),logentries) pickle.dump(logbk, fc, protocol=4) else: print(" ! NO TRIP entries found in logbook, check the syntax.") i=0 for entrytuple in logentries: try: date, tripcave, triptitle, text, trippeople, expedition, logtime_underground, tripid1 = entrytuple except ValueError: # cope with removal of entry_type but still in cache files. Remove in Sept. 2022. date, tripcave, triptitle, text, trippeople, expedition, logtime_underground, entry_type, tripid1 = entrytuple EnterLogIntoDbase(date, tripcave, triptitle, text, trippeople, expedition, 0, tripid1) EnterLogIntoObjStore(year, date, tripcave, triptitle, text, trippeople, logtime_underground, tripid1, i) i +=1 SetDatesFromLogbookEntries(expedition) if len(logentries) == expect: # print(f"OK {year} {len(logentries):5d} is {expect}\n") pass else: print(f"BAD {year} {len(logentries):5d} is not {expect}\n") return len(logentries) def LoadLogbooks(): """ This is the master function for parsing all logbooks into the Troggle database. Parser settings appropriate for each year are set in settings.py LOGBOOK_PARSER_SETTINGS. This should be rewritten to use coroutines to load all logbooks from disc in parallel. """ global logdataissues global entries logdataissues = {} DataIssue.objects.filter(parser='logbooks').delete() expos = Expedition.objects.all() if len(expos) <= 1: message = f" ! - No expeditions found. Load 'people' first" DataIssue.objects.create(parser='logbooks', message=message) logdataissues[f"sqlfail 0000"]=message print(message) noexpo = ["1986", "2020", "2021",] #no expo lostlogbook = ["1976", "1977", "1978", "1979", "1980", "1981"] sqlfail = ["1987", "1988", "1989"] # breaks mysql with db constraint fail - debug locally first] nologbook = noexpo + lostlogbook + sqlfail nlbe={} expd ={} actuals = [] for expo in expos: # pointless as we explicitly know the years in this code. year = expo.year TROG['pagecache']['expedition'][year] = None # clear cache if year in sqlfail: print(" - Logbook for: " + year + " NO parsing attempted - known sql failures") message = f" ! - Not even attempting to parse logbook for {year} until code fixed" DataIssue.objects.create(parser='logbooks', message=message) logdataissues[f"sqlfail {year}"]=message print(message) if year not in nologbook: if year in entries: actuals.append(expo) else: print(" - No Logbook yet for: " + year) # catch case when preparing for next expo for ex in actuals: nlbe[ex] = LoadLogbookForExpedition(ex) # this actually loads the logbook for one expo # tried to use map with concurrent threads - but sqlite database is not concurrent, so failed with database lock # yt = 0 # for r in map(LoadLogbookForExpedition, actuals): # yt = r yt = 0 for e in nlbe: yt += nlbe[e] print(f"total {yt:,} log entries parsed in all expeditions") if yt != len(trips): print(f"** total trips in ObjStore:{len(trips):,}") try: shelvfilenm = 'logbktrips.shelve' # ".db" automatically apended after python 3.8 with shelve.open(shelvfilenm, writeback=True) as odb: for lbe in trips: odb[lbe]=trips[lbe] odb.sync() odb.close() except: message = f" ! - Failed store cached logbooks in '{shelvfilenm}.db' - Delete old file and try again" DataIssue.objects.create(parser='logbooks', message=message) logdataissues["Shelve Fail"]=message print(message) # dateRegex = re.compile(r'(\d\d\d\d)-(\d\d)-(\d\d)', re.S) # expeditionYearRegex = re.compile(r'(.*?)', re.S) # titleRegex = re.compile(r'