#.-*- coding: utf-8 -*- import csv import datetime import os import pickle import re import time from django.conf import settings from django.template.defaultfilters import slugify from django.utils.timezone import get_current_timezone, make_aware from troggle.core.models import DataIssue, Expedition from troggle.core.models_caves import Cave, OtherCaveName, getCaveByReference, LogbookEntry, PersonTrip from parsers.people import GetPersonExpeditionNameLookup from utils import save_carefully # # When we edit logbook entries, allow a "?" after any piece of data to say we've frigged it and # it can be checked up later from the hard-copy if necessary; or it's not possible to determin (name, trip place, etc) # # # the logbook loading section # def GetTripPersons(trippeople, expedition, logtime_underground): res = [ ] author = None round_bracket_regex = re.compile(r"[\(\[].*?[\)\]]") for tripperson in re.split(r",|\+|&|&(?!\w+;)| and ", trippeople): tripperson = tripperson.strip() mul = re.match(r"(.*?)$(?i)", tripperson) if mul: tripperson = mul.group(1).strip() if tripperson and tripperson[0] != '*': #assert tripperson in personyearmap, "'%s' << %s\n\n %s" % (tripperson, trippeople, personyearmap) tripperson = re.sub(round_bracket_regex, "", tripperson).strip() personyear = GetPersonExpeditionNameLookup(expedition).get(tripperson.lower()) if not personyear: print((" - No name match for: '%s'" % tripperson)) message = "No name match for: '%s' in year '%s'" % (tripperson, expedition.year) DataIssue.objects.create(parser='logbooks', message=message) res.append((personyear, logtime_underground)) if mul: author = personyear if not author: if not res: return None, None author = res[-1][0] return res, author def GetTripCave(place): #need to be fuzzier about matching here. Already a very slow function... # print "Getting cave for " , place try: katastNumRes=[] katastNumRes=list(Cave.objects.filter(kataster_number=int(place))) except ValueError: pass officialNameRes=list(Cave.objects.filter(official_name=place)) tripCaveRes=officialNameRes+katastNumRes if len(tripCaveRes)==1: # print "Place " , place , "entered as" , tripCaveRes[0] return tripCaveRes[0] elif OtherCaveName.objects.filter(name=place): tripCaveRes=OtherCaveName.objects.filter(name__icontains=place)[0].cave # print "Place " , place , "entered as" , tripCaveRes return tripCaveRes elif len(tripCaveRes)>1: print(("Ambiguous place " + str(place) + " entered. Choose from " + str(tripCaveRes))) correctIndex=eval(input("type list index of correct cave")) return tripCaveRes[correctIndex] else: print(("No cave found for place " , place)) return # lookup function modelled on GetPersonExpeditionNameLookup Gcavelookup = None def GetCaveLookup(): global Gcavelookup if Gcavelookup: return Gcavelookup Gcavelookup = {"NONEPLACEHOLDER":None} for cave in Cave.objects.all(): Gcavelookup[cave.official_name.lower()] = cave if cave.kataster_number: Gcavelookup[cave.kataster_number] = cave if cave.unofficial_number: Gcavelookup[cave.unofficial_number] = cave Gcavelookup["tunnocks"] = Gcavelookup["258"] Gcavelookup["hauchhole"] = Gcavelookup["234"] return Gcavelookup logentries = [] # the entire logbook is a single object: a list of entries noncaveplaces = [ "Journey", "Loser Plateau" ] def EnterLogIntoDbase(date, place, title, text, trippeople, expedition, logtime_underground, entry_type="wiki"): """ saves a logbook entry and related persontrips """ global logentries entrytuple = (date, place, title, text, trippeople, expedition, logtime_underground, entry_type) logentries.append(entrytuple) trippersons, author = GetTripPersons(trippeople, expedition, logtime_underground) if not author: print((" - Skipping logentry: " + title + " - no author for entry")) message = "Skipping logentry: %s - no author for entry in year '%s'" % (title, expedition.year) DataIssue.objects.create(parser='logbooks', message=message) return #tripCave = GetTripCave(place) lplace = place.lower() if lplace not in noncaveplaces: cave=GetCaveLookup().get(lplace) #Check for an existing copy of the current entry, and save expeditionday = expedition.get_expedition_day(date) lookupAttribs={'date':date, 'title':title} nonLookupAttribs={'place':place, 'text':text, 'expedition':expedition, 'cave':cave, 'slug':slugify(title)[:50], 'entry_type':entry_type} lbo, created=save_carefully(LogbookEntry, lookupAttribs, nonLookupAttribs) for tripperson, time_underground in trippersons: lookupAttribs={'personexpedition':tripperson, 'logbook_entry':lbo} nonLookupAttribs={'time_underground':time_underground, 'is_logbook_entry_author':(tripperson == author)} save_carefully(PersonTrip, lookupAttribs, nonLookupAttribs) def ParseDate(tripdate, year): """ Interprets dates in the expo logbooks and returns a correct datetime.date object """ mdatestandard = re.match(r"(\d\d\d\d)-(\d\d)-(\d\d)", tripdate) mdategoof = re.match(r"(\d\d?)/0?(\d)/(20|19)?(\d\d)", tripdate) if mdatestandard: assert mdatestandard.group(1) == year, (tripdate, year) year, month, day = int(mdatestandard.group(1)), int(mdatestandard.group(2)), int(mdatestandard.group(3)) elif mdategoof: assert not mdategoof.group(3) or mdategoof.group(3) == year[:2], mdategoof.groups() yadd = int(year[:2]) * 100 day, month, year = int(mdategoof.group(1)), int(mdategoof.group(2)), int(mdategoof.group(4)) + yadd else: assert False, tripdate return datetime.date(year, month, day) # 2006, 2008 - 2009 def Parselogwikitxt(year, expedition, txt): trippara = re.findall(r"===(.*?)===([\s\S]*?)(?====)", txt) for triphead, triptext in trippara: tripheadp = triphead.split("|") #print "ttt", tripheadp assert len(tripheadp) == 3, (tripheadp, triptext) tripdate, tripplace, trippeople = tripheadp tripsplace = tripplace.split(" - ") tripcave = tripsplace[0].strip() tul = re.findall(r"T/?U:?\s*(\d+(?:\.\d*)?|unknown)\s*(hrs|hours)?", triptext) if tul: #assert len(tul) <= 1, (triphead, triptext) #assert tul[0][1] in ["hrs", "hours"], (triphead, triptext) tu = tul[0][0] else: tu = "" #assert tripcave == "Journey", (triphead, triptext) #print tripdate ldate = ParseDate(tripdate.strip(), year) #print "\n", tripcave, "--- ppp", trippeople, len(triptext) EnterLogIntoDbase(date = ldate, place = tripcave, title = tripplace, text = triptext, trippeople=trippeople, expedition=expedition, logtime_underground=0) # 2002, 2004, 2005, 2007, 2010 - 2018 def Parseloghtmltxt(year, expedition, txt): #print(" - Starting log html parser") tripparas = re.findall(r"([\s\S]*?)(?=.*?\s*

)? # second date \s*(?:\s*)? \s*(.*?)(?:

)? \s*\s*(.*?) \s*\s*(.*?) ([\s\S]*?) \s*(?:\s*(.*?))? \s*$ ''', trippara) if not s: if not re.search(r"Rigging Guide", trippara): print(("can't parse: ", trippara)) # this is 2007 which needs editing #assert s, trippara continue tripid, tripid1, tripdate, trippeople, triptitle, triptext, tu = s.groups() ldate = ParseDate(tripdate.strip(), year) #assert tripid[:-1] == "t" + tripdate, (tripid, tripdate) #trippeople = re.sub(r"Ol(?!l)", "Olly", trippeople) #trippeople = re.sub(r"Wook(?!e)", "Wookey", trippeople) triptitles = triptitle.split(" - ") if len(triptitles) >= 2: tripcave = triptitles[0] else: tripcave = "UNKNOWN" #print("\n", tripcave, "--- ppp", trippeople, len(triptext)) ltriptext = re.sub(r"

", "", triptext) ltriptext = re.sub(r"\s*?\n\s*", " ", ltriptext) ltriptext = re.sub(r"

", "

", ltriptext).strip() EnterLogIntoDbase(date = ldate, place = tripcave, title = triptitle, text = ltriptext, trippeople=trippeople, expedition=expedition, logtime_underground=0, entry_type="html") if logbook_entry_count == 0: print(" - No trip entries found in logbook, check the syntax matches htmltxt format") # main parser for 1991 - 2001. simpler because the data has been hacked so much to fit it def Parseloghtml01(year, expedition, txt): tripparas = re.findall(r"([\s\S]*?)(?=)?(.*?)(.*)$(?i)", trippara) assert s, trippara[:300] tripheader, triptext = s.group(1), s.group(2) mtripid = re.search(r']*>", "", tripheader) #print " ", [tripheader] #continue tripdate, triptitle, trippeople = tripheader.split("|") ldate = ParseDate(tripdate.strip(), year) mtu = re.search(r']*>(T/?U.*)', triptext) if mtu: tu = mtu.group(1) triptext = triptext[:mtu.start(0)] + triptext[mtu.end():] else: tu = "" triptitles = triptitle.split(" - ") tripcave = triptitles[0].strip() ltriptext = triptext mtail = re.search(r'(?:[^<]*|\s|/|-|&||\((?:same day|\d+)\))*$', ltriptext) if mtail: #print mtail.group(0) ltriptext = ltriptext[:mtail.start(0)] ltriptext = re.sub(r"

", "", ltriptext) ltriptext = re.sub(r"\s*?\n\s*", " ", ltriptext) ltriptext = re.sub(r"

|
", "\n\n", ltriptext).strip() #ltriptext = re.sub("[^\s0-9a-zA-Z\-.,:;'!]", "NONASCII", ltriptext) ltriptext = re.sub(r"", "_", ltriptext) ltriptext = re.sub(r"", "''", ltriptext) ltriptext = re.sub(r"", "'''", ltriptext) #print ldate, trippeople.strip() # could includ the tripid (url link for cross referencing) EnterLogIntoDbase(date=ldate, place=tripcave, title=triptitle, text=ltriptext, trippeople=trippeople, expedition=expedition, logtime_underground=0, entry_type="html") # parser for 2003 def Parseloghtml03(year, expedition, txt): tripparas = re.findall(r"([\s\S]*?)(?=(.*?)

(.*)$", trippara) assert s, trippara tripheader, triptext = s.group(1), s.group(2) tripheader = re.sub(r" ", " ", tripheader) tripheader = re.sub(r"\s+", " ", tripheader).strip() sheader = tripheader.split(" -- ") tu = "" if re.match("T/U|Time underwater", sheader[-1]): tu = sheader.pop() if len(sheader) != 3: print(("header not three pieces", sheader)) tripdate, triptitle, trippeople = sheader ldate = ParseDate(tripdate.strip(), year) triptitles = triptitle.split(" , ") if len(triptitles) >= 2: tripcave = triptitles[0] else: tripcave = "UNKNOWN" #print tripcave, "--- ppp", triptitle, trippeople, len(triptext) ltriptext = re.sub(r"

", "", triptext) ltriptext = re.sub(r"\s*?\n\s*", " ", ltriptext) ltriptext = re.sub(r"

", "\n\n", ltriptext).strip() ltriptext = re.sub(r"[^\s0-9a-zA-Z\-.,:;'!&()\[\]<>?=+*%]", "_NONASCII_", ltriptext) EnterLogIntoDbase(date = ldate, place = tripcave, title = triptitle, text = ltriptext, trippeople=trippeople, expedition=expedition, logtime_underground=0, entry_type="html") def SetDatesFromLogbookEntries(expedition): """ Sets the date_from and date_to field for an expedition based on persontrips. Then sets the expedition date_from and date_to based on the personexpeditions. """ for personexpedition in expedition.personexpedition_set.all(): persontrips = personexpedition.persontrip_set.order_by('logbook_entry__date') # sequencing is difficult to do lprevpersontrip = None for persontrip in persontrips: persontrip.persontrip_prev = lprevpersontrip if lprevpersontrip: lprevpersontrip.persontrip_next = persontrip lprevpersontrip.save() persontrip.persontrip_next = None lprevpersontrip = persontrip persontrip.save() def LoadLogbookForExpedition(expedition): """ Parses all logbook entries for one expedition """ global logentries expowebbase = os.path.join(settings.EXPOWEB, "years") yearlinks = settings.LOGBOOK_PARSER_SETTINGS logbook_parseable = False logbook_cached = False if expedition.year in yearlinks: # print " - Valid logbook year: ", expedition.year year_settings = yearlinks[expedition.year] try: bad_cache = False cache_filename = os.path.join(expowebbase, year_settings[0])+".cache" now = time.time() cache_t = os.path.getmtime(cache_filename) file_t = os.path.getmtime(os.path.join(expowebbase, year_settings[0])) if file_t - cache_t > 2: # at least 2 secs later #print " - Cache is stale." bad_cache= True if now - cache_t > 30*24*60*60: #print " - Cache is more than 30 days old." bad_cache= True if bad_cache: print(" - Cache is either stale or more than 30 days old. Deleting it.") os.remove(cache_filename) logentries=[] raise print((" - Reading cache: " + cache_filename )) try: with open(cache_filename, "rb") as f: logentries = pickle.load(f) print(" - Loaded ", len(logentries), " objects") logbook_cached = True except: print(" - Failed to load corrupt cache. Deleting it.\n") os.remove(cache_filename) logentries=[] raise except: print(" - Opening logbook: ") file_in = open(os.path.join(expowebbase, year_settings[0]),'rb') txt = file_in.read().decode("latin1") file_in.close() parsefunc = year_settings[1] logbook_parseable = True print((" - Parsing logbook: " + year_settings[0] + "\n - Using parser: " + year_settings[1])) if logbook_parseable: parser = globals()[parsefunc] parser(expedition.year, expedition, txt) SetDatesFromLogbookEntries(expedition) # and this has also stored all the objects in logentries[] print(" - Storing " , len(logentries), " log entries") cache_filename = os.path.join(expowebbase, year_settings[0])+".cache" with open(cache_filename, "wb") as f: pickle.dump(logentries, f, 2) logentries=[] # flush for next year if logbook_cached: i=0 for entrytuple in range(len(logentries)): date, place, title, text, trippeople, expedition, logtime_underground, \ entry_type = logentries[i] #print " - - obj ", i, date, title EnterLogIntoDbase(date, place, title, text, trippeople, expedition, logtime_underground,\ entry_type) i +=1 else: try: file_in = open(os.path.join(expowebbase, expedition.year, settings.DEFAULT_LOGBOOK_FILE),'rb') txt = file_in.read().decode("latin1") file_in.close() logbook_parseable = True print("No set parser found using default") parsefunc = settings.DEFAULT_LOGBOOK_PARSER except (IOError): logbook_parseable = False print(("Couldn't open default logbook file and nothing in settings for expo " + expedition.year)) #return "TOLOAD: " + year + " " + str(expedition.personexpedition_set.all()[1].logbookentry_set.count()) + " " + str(PersonTrip.objects.filter(personexpedition__expedition=expedition).count()) def LoadLogbooks(): """ This is the master function for parsing all logbooks into the Troggle database. """ # Clear the logbook data issues as we are reloading DataIssue.objects.filter(parser='logbooks').delete() # Fetch all expos expos = Expedition.objects.all() for expo in expos: print(("\nLoading Logbook for: " + expo.year)) # Load logbook for expo LoadLogbookForExpedition(expo) dateRegex = re.compile(r'(\d\d\d\d)-(\d\d)-(\d\d)', re.S) expeditionYearRegex = re.compile(r'(.*?)', re.S) titleRegex = re.compile(r'

(.*?)

', re.S) reportRegex = re.compile(r'(.*)\s*', re.S) personRegex = re.compile(r'(.*?)', re.S) nameAuthorRegex = re.compile(r'(.*?)', re.S) TURegex = re.compile(r'([0-9]*\.?[0-9]+)', re.S) locationRegex = re.compile(r'(.*?)', re.S) caveRegex = re.compile(r'(.*?)', re.S) def parseAutoLogBookEntry(filename): errors = [] f = open(filename, "r") contents = f.read() f.close() dateMatch = dateRegex.search(contents) if dateMatch: year, month, day = [int(x) for x in dateMatch.groups()] date = datetime.date(year, month, day) else: errors.append("Date could not be found") expeditionYearMatch = expeditionYearRegex.search(contents) if expeditionYearMatch: try: expedition = Expedition.objects.get(year = expeditionYearMatch.groups()[0]) personExpeditionNameLookup = GetPersonExpeditionNameLookup(expedition) except Expedition.DoesNotExist: errors.append("Expedition not in database") else: errors.append("Expedition Year could not be parsed") titleMatch = titleRegex.search(contents) if titleMatch: title, = titleMatch.groups() if len(title) > settings.MAX_LOGBOOK_ENTRY_TITLE_LENGTH: errors.append("Title too long") else: errors.append("Title could not be found") caveMatch = caveRegex.search(contents) if caveMatch: caveRef, = caveMatch.groups() try: cave = getCaveByReference(caveRef) except AssertionError: cave = None errors.append("Cave not found in database") else: cave = None locationMatch = locationRegex.search(contents) if locationMatch: location, = locationMatch.groups() else: location = None if cave is None and location is None: errors.append("Location nor cave could not be found") reportMatch = reportRegex.search(contents) if reportMatch: report, = reportMatch.groups() else: errors.append("Contents could not be found") if errors: return errors # Easiest to bail out at this point as we need to make sure that we know which expedition to look for people from. people = [] for personMatch in personRegex.findall(contents): nameAuthorMatch = nameAuthorRegex.search(contents) if nameAuthorMatch: author, name = nameAuthorMatch.groups() if name.lower() in personExpeditionNameLookup: personExpo = personExpeditionNameLookup[name.lower()] else: errors.append("Person could not be found in database") author = bool(author) else: errors.append("Persons name could not be found") TUMatch = TURegex.search(contents) if TUMatch: TU, = TUMatch.groups() else: errors.append("TU could not be found") if not errors: people.append((name, author, TU)) if errors: return errors # Bail out before commiting to the database logbookEntry = LogbookEntry(date = date, expedition = expedition, title = title, cave = cave, place = location, text = report, slug = slugify(title)[:50], filename = filename) logbookEntry.save() for name, author, TU in people: PersonTrip(personexpedition = personExpo, time_underground = TU, logbook_entry = logbookEntry, is_logbook_entry_author = author).save() print(logbookEntry)