mirror of
https://expo.survex.com/repositories/troggle/.git
synced 2024-11-25 16:51:54 +00:00
487 lines
20 KiB
Python
487 lines
20 KiB
Python
#.-*- coding: utf-8 -*-
|
|
import csv
|
|
import datetime
|
|
import os
|
|
import pickle
|
|
import re
|
|
import time
|
|
|
|
from django.conf import settings
|
|
from django.template.defaultfilters import slugify
|
|
from django.utils.timezone import get_current_timezone, make_aware
|
|
|
|
from troggle.core.models import DataIssue, Expedition
|
|
from troggle.core.models_caves import Cave, OtherCaveName, getCaveByReference, LogbookEntry, PersonTrip
|
|
from parsers.people import GetPersonExpeditionNameLookup
|
|
from utils import save_carefully
|
|
|
|
#
|
|
# When we edit logbook entries, allow a "?" after any piece of data to say we've frigged it and
|
|
# it can be checked up later from the hard-copy if necessary; or it's not possible to determin (name, trip place, etc)
|
|
#
|
|
|
|
#
|
|
# the logbook loading section
|
|
#
|
|
def GetTripPersons(trippeople, expedition, logtime_underground):
|
|
res = [ ]
|
|
author = None
|
|
round_bracket_regex = re.compile(r"[\(\[].*?[\)\]]")
|
|
for tripperson in re.split(r",|\+|&|&(?!\w+;)| and ", trippeople):
|
|
tripperson = tripperson.strip()
|
|
mul = re.match(r"<u>(.*?)</u>$(?i)", tripperson)
|
|
if mul:
|
|
tripperson = mul.group(1).strip()
|
|
if tripperson and tripperson[0] != '*':
|
|
#assert tripperson in personyearmap, "'%s' << %s\n\n %s" % (tripperson, trippeople, personyearmap)
|
|
tripperson = re.sub(round_bracket_regex, "", tripperson).strip()
|
|
personyear = GetPersonExpeditionNameLookup(expedition).get(tripperson.lower())
|
|
if not personyear:
|
|
print((" - No name match for: '%s'" % tripperson))
|
|
message = "No name match for: '%s' in year '%s'" % (tripperson, expedition.year)
|
|
DataIssue.objects.create(parser='logbooks', message=message)
|
|
res.append((personyear, logtime_underground))
|
|
if mul:
|
|
author = personyear
|
|
if not author:
|
|
if not res:
|
|
return None, None
|
|
author = res[-1][0]
|
|
return res, author
|
|
|
|
def GetTripCave(place):
|
|
#need to be fuzzier about matching here. Already a very slow function...
|
|
# print "Getting cave for " , place
|
|
try:
|
|
katastNumRes=[]
|
|
katastNumRes=list(Cave.objects.filter(kataster_number=int(place)))
|
|
except ValueError:
|
|
pass
|
|
officialNameRes=list(Cave.objects.filter(official_name=place))
|
|
tripCaveRes=officialNameRes+katastNumRes
|
|
|
|
if len(tripCaveRes)==1:
|
|
# print "Place " , place , "entered as" , tripCaveRes[0]
|
|
return tripCaveRes[0]
|
|
|
|
elif OtherCaveName.objects.filter(name=place):
|
|
tripCaveRes=OtherCaveName.objects.filter(name__icontains=place)[0].cave
|
|
# print "Place " , place , "entered as" , tripCaveRes
|
|
return tripCaveRes
|
|
|
|
elif len(tripCaveRes)>1:
|
|
print(("Ambiguous place " + str(place) + " entered. Choose from " + str(tripCaveRes)))
|
|
correctIndex=eval(input("type list index of correct cave"))
|
|
return tripCaveRes[correctIndex]
|
|
else:
|
|
print(("No cave found for place " , place))
|
|
return
|
|
|
|
# lookup function modelled on GetPersonExpeditionNameLookup
|
|
Gcavelookup = None
|
|
def GetCaveLookup():
|
|
global Gcavelookup
|
|
if Gcavelookup:
|
|
return Gcavelookup
|
|
Gcavelookup = {"NONEPLACEHOLDER":None}
|
|
for cave in Cave.objects.all():
|
|
Gcavelookup[cave.official_name.lower()] = cave
|
|
if cave.kataster_number:
|
|
Gcavelookup[cave.kataster_number] = cave
|
|
if cave.unofficial_number:
|
|
Gcavelookup[cave.unofficial_number] = cave
|
|
|
|
Gcavelookup["tunnocks"] = Gcavelookup["258"]
|
|
Gcavelookup["hauchhole"] = Gcavelookup["234"]
|
|
return Gcavelookup
|
|
|
|
|
|
logentries = [] # the entire logbook is a single object: a list of entries
|
|
noncaveplaces = [ "Journey", "Loser Plateau" ]
|
|
|
|
def EnterLogIntoDbase(date, place, title, text, trippeople, expedition, logtime_underground, entry_type="wiki"):
|
|
""" saves a logbook entry and related persontrips """
|
|
global logentries
|
|
|
|
entrytuple = (date, place, title, text,
|
|
trippeople, expedition, logtime_underground, entry_type)
|
|
logentries.append(entrytuple)
|
|
|
|
trippersons, author = GetTripPersons(trippeople, expedition, logtime_underground)
|
|
if not author:
|
|
print(" * Skipping logentry: " + title + " - no author for entry")
|
|
message = "Skipping logentry: %s - no author for entry in year '%s'" % (title, expedition.year)
|
|
DataIssue.objects.create(parser='logbooks', message=message)
|
|
return
|
|
|
|
#tripCave = GetTripCave(place)
|
|
|
|
lplace = place.lower()
|
|
if lplace not in noncaveplaces:
|
|
cave=GetCaveLookup().get(lplace)
|
|
|
|
#Check for an existing copy of the current entry, and save
|
|
expeditionday = expedition.get_expedition_day(date)
|
|
lookupAttribs={'date':date, 'title':title}
|
|
nonLookupAttribs={'place':place, 'text':text, 'expedition':expedition, 'cave':cave, 'slug':slugify(title)[:50], 'entry_type':entry_type}
|
|
lbo, created=save_carefully(LogbookEntry, lookupAttribs, nonLookupAttribs)
|
|
|
|
|
|
for tripperson, time_underground in trippersons:
|
|
lookupAttribs={'personexpedition':tripperson, 'logbook_entry':lbo}
|
|
nonLookupAttribs={'time_underground':time_underground, 'is_logbook_entry_author':(tripperson == author)}
|
|
save_carefully(PersonTrip, lookupAttribs, nonLookupAttribs)
|
|
|
|
def ParseDate(tripdate, year):
|
|
""" Interprets dates in the expo logbooks and returns a correct datetime.date object """
|
|
mdatestandard = re.match(r"(\d\d\d\d)-(\d\d)-(\d\d)", tripdate)
|
|
mdategoof = re.match(r"(\d\d?)/0?(\d)/(20|19)?(\d\d)", tripdate)
|
|
if mdatestandard:
|
|
assert mdatestandard.group(1) == year, (tripdate, year)
|
|
year, month, day = int(mdatestandard.group(1)), int(mdatestandard.group(2)), int(mdatestandard.group(3))
|
|
elif mdategoof:
|
|
assert not mdategoof.group(3) or mdategoof.group(3) == year[:2], mdategoof.groups()
|
|
yadd = int(year[:2]) * 100
|
|
day, month, year = int(mdategoof.group(1)), int(mdategoof.group(2)), int(mdategoof.group(4)) + yadd
|
|
else:
|
|
assert False, tripdate
|
|
return datetime.date(year, month, day)
|
|
|
|
# 2006, 2008 - 2009
|
|
def Parselogwikitxt(year, expedition, txt):
|
|
trippara = re.findall(r"===(.*?)===([\s\S]*?)(?====)", txt)
|
|
for triphead, triptext in trippara:
|
|
tripheadp = triphead.split("|")
|
|
assert len(tripheadp) == 3, (tripheadp, triptext)
|
|
tripdate, tripplace, trippeople = tripheadp
|
|
tripsplace = tripplace.split(" - ")
|
|
tripcave = tripsplace[0].strip()
|
|
|
|
tul = re.findall(r"T/?U:?\s*(\d+(?:\.\d*)?|unknown)\s*(hrs|hours)?", triptext)
|
|
if tul:
|
|
tu = tul[0][0]
|
|
else:
|
|
tu = ""
|
|
|
|
ldate = ParseDate(tripdate.strip(), year)
|
|
EnterLogIntoDbase(date = ldate, place = tripcave, title = tripplace, text = triptext, trippeople=trippeople, expedition=expedition, logtime_underground=0)
|
|
|
|
# 2002, 2004, 2005, 2007, 2010 - now
|
|
def Parseloghtmltxt(year, expedition, txt):
|
|
#print(" - Starting log html parser")
|
|
tripparas = re.findall(r"<hr\s*/>([\s\S]*?)(?=<hr)", txt)
|
|
logbook_entry_count = 0
|
|
for trippara in tripparas:
|
|
#print(" - HR detected - maybe a trip?")
|
|
logbook_entry_count += 1
|
|
|
|
s = re.match(r'''(?x)(?:\s*<div\sclass="tripdate"\sid=".*?">.*?</div>\s*<p>)? # second date
|
|
\s*(?:<a\s+id="(.*?)"\s*/>\s*</a>)?
|
|
\s*<div\s+class="tripdate"\s*(?:id="(.*?)")?>(.*?)</div>(?:<p>)?
|
|
\s*<div\s+class="trippeople">\s*(.*?)</div>
|
|
\s*<div\s+class="triptitle">\s*(.*?)</div>
|
|
([\s\S]*?)
|
|
\s*(?:<div\s+class="timeug">\s*(.*?)</div>)?
|
|
\s*$
|
|
''', trippara)
|
|
if not s:
|
|
if not re.search(r"Rigging Guide", trippara):
|
|
print(("can't parse: ", trippara)) # this is 2007 which needs editing
|
|
continue
|
|
tripid, tripid1, tripdate, trippeople, triptitle, triptext, tu = s.groups()
|
|
ldate = ParseDate(tripdate.strip(), year)
|
|
triptitles = triptitle.split(" - ")
|
|
if len(triptitles) >= 2:
|
|
tripcave = triptitles[0]
|
|
else:
|
|
tripcave = "UNKNOWN"
|
|
ltriptext = re.sub(r"</p>", "", triptext)
|
|
ltriptext = re.sub(r"\s*?\n\s*", " ", ltriptext)
|
|
ltriptext = re.sub(r"<p>", "</br></br>", ltriptext).strip()
|
|
EnterLogIntoDbase(date = ldate, place = tripcave, title = triptitle, text = ltriptext,
|
|
trippeople=trippeople, expedition=expedition, logtime_underground=0,
|
|
entry_type="html")
|
|
|
|
# main parser for 1991 - 2001. simpler because the data has been hacked so much to fit it
|
|
def Parseloghtml01(year, expedition, txt):
|
|
tripparas = re.findall(r"<hr[\s/]*>([\s\S]*?)(?=<hr)", txt)
|
|
for trippara in tripparas:
|
|
s = re.match("(?s)\s*(?:<p>)?(.*?)</?p>(.*)$(?i)", trippara)
|
|
assert s, trippara[:300]
|
|
tripheader, triptext = s.group(1), s.group(2)
|
|
mtripid = re.search(r'<a id="(.*?)"', tripheader)
|
|
tripid = mtripid and mtripid.group(1) or ""
|
|
tripheader = re.sub(r"</?(?:[ab]|span)[^>]*>", "", tripheader)
|
|
|
|
tripdate, triptitle, trippeople = tripheader.split("|")
|
|
ldate = ParseDate(tripdate.strip(), year)
|
|
|
|
mtu = re.search(r'<p[^>]*>(T/?U.*)', triptext)
|
|
if mtu:
|
|
tu = mtu.group(1)
|
|
triptext = triptext[:mtu.start(0)] + triptext[mtu.end():]
|
|
else:
|
|
tu = ""
|
|
|
|
triptitles = triptitle.split(" - ")
|
|
tripcave = triptitles[0].strip()
|
|
|
|
ltriptext = triptext
|
|
|
|
mtail = re.search(r'(?:<a href="[^"]*">[^<]*</a>|\s|/|-|&|</?p>|\((?:same day|\d+)\))*$', ltriptext)
|
|
if mtail:
|
|
ltriptext = ltriptext[:mtail.start(0)]
|
|
ltriptext = re.sub(r"</p>", "", ltriptext)
|
|
ltriptext = re.sub(r"\s*?\n\s*", " ", ltriptext)
|
|
ltriptext = re.sub(r"<p>|<br>", "\n\n", ltriptext).strip()
|
|
ltriptext = re.sub(r"</?u>", "_", ltriptext)
|
|
ltriptext = re.sub(r"</?i>", "''", ltriptext)
|
|
ltriptext = re.sub(r"</?b>", "'''", ltriptext)
|
|
|
|
EnterLogIntoDbase(date=ldate, place=tripcave, title=triptitle, text=ltriptext,
|
|
trippeople=trippeople, expedition=expedition, logtime_underground=0,
|
|
entry_type="html")
|
|
|
|
# parser for 2003
|
|
def Parseloghtml03(year, expedition, txt):
|
|
tripparas = re.findall(r"<hr\s*/>([\s\S]*?)(?=<hr)", txt)
|
|
for trippara in tripparas:
|
|
s = re.match("(?s)\s*<p>(.*?)</p>(.*)$", trippara)
|
|
assert s, trippara
|
|
tripheader, triptext = s.group(1), s.group(2)
|
|
tripheader = re.sub(r" ", " ", tripheader)
|
|
tripheader = re.sub(r"\s+", " ", tripheader).strip()
|
|
sheader = tripheader.split(" -- ")
|
|
tu = ""
|
|
if re.match("T/U|Time underwater", sheader[-1]):
|
|
tu = sheader.pop()
|
|
if len(sheader) != 3:
|
|
print(("header not three pieces", sheader))
|
|
tripdate, triptitle, trippeople = sheader
|
|
ldate = ParseDate(tripdate.strip(), year)
|
|
triptitles = triptitle.split(" , ")
|
|
if len(triptitles) >= 2:
|
|
tripcave = triptitles[0]
|
|
else:
|
|
tripcave = "UNKNOWN"
|
|
ltriptext = re.sub(r"</p>", "", triptext)
|
|
ltriptext = re.sub(r"\s*?\n\s*", " ", ltriptext)
|
|
ltriptext = re.sub(r"<p>", "\n\n", ltriptext).strip()
|
|
ltriptext = re.sub(r"[^\s0-9a-zA-Z\-.,:;'!&()\[\]<>?=+*%]", "_NONASCII_", ltriptext)
|
|
EnterLogIntoDbase(date = ldate, place = tripcave, title = triptitle,
|
|
text = ltriptext, trippeople=trippeople, expedition=expedition,
|
|
logtime_underground=0, entry_type="html")
|
|
|
|
|
|
def SetDatesFromLogbookEntries(expedition):
|
|
"""
|
|
Sets the date_from and date_to field for an expedition based on persontrips.
|
|
Then sets the expedition date_from and date_to based on the personexpeditions.
|
|
"""
|
|
for personexpedition in expedition.personexpedition_set.all():
|
|
persontrips = personexpedition.persontrip_set.order_by('logbook_entry__date')
|
|
# sequencing is difficult to do
|
|
lprevpersontrip = None
|
|
for persontrip in persontrips:
|
|
persontrip.persontrip_prev = lprevpersontrip
|
|
if lprevpersontrip:
|
|
lprevpersontrip.persontrip_next = persontrip
|
|
lprevpersontrip.save()
|
|
persontrip.persontrip_next = None
|
|
lprevpersontrip = persontrip
|
|
persontrip.save()
|
|
|
|
|
|
def LoadLogbookForExpedition(expedition):
|
|
""" Parses all logbook entries for one expedition
|
|
"""
|
|
global logentries
|
|
logbook_parseable = False
|
|
logbook_cached = False
|
|
yearlinks = settings.LOGBOOK_PARSER_SETTINGS
|
|
expologbase = os.path.join(settings.EXPOWEB, "years")
|
|
|
|
if expedition.year in yearlinks:
|
|
logbookfile = os.path.join(expologbase, yearlinks[expedition.year][0])
|
|
parsefunc = yearlinks[expedition.year][1]
|
|
else:
|
|
logbookfile = os.path.join(expologbase, expedition.year, settings.DEFAULT_LOGBOOK_FILE)
|
|
parsefunc = settings.DEFAULT_LOGBOOK_PARSER
|
|
cache_filename = logbookfile + ".cache"
|
|
|
|
try:
|
|
bad_cache = False
|
|
now = time.time()
|
|
cache_t = os.path.getmtime(cache_filename)
|
|
if os.path.getmtime(logbookfile) - cache_t > 2: # at least 2 secs later
|
|
bad_cache= True
|
|
if now - cache_t > 30*24*60*60:
|
|
bad_cache= True
|
|
if bad_cache:
|
|
print(" - ! Cache is either stale or more than 30 days old. Deleting it.")
|
|
os.remove(cache_filename)
|
|
logentries=[]
|
|
print(" ! Removed stale or corrupt cache file")
|
|
raise
|
|
print(" - Reading cache: " + cache_filename, end='')
|
|
try:
|
|
with open(cache_filename, "rb") as f:
|
|
logentries = pickle.load(f)
|
|
print(" -- Loaded ", len(logentries), " log entries")
|
|
logbook_cached = True
|
|
except:
|
|
print("\n ! Failed to load corrupt cache. Deleting it.\n")
|
|
os.remove(cache_filename)
|
|
logentries=[]
|
|
raise
|
|
except : # no cache found
|
|
#print(" - No cache \"" + cache_filename +"\"")
|
|
try:
|
|
file_in = open(logbookfile,'rb')
|
|
txt = file_in.read().decode("latin1")
|
|
file_in.close()
|
|
logbook_parseable = True
|
|
print((" - Using: " + parsefunc + " to parse " + logbookfile))
|
|
except (IOError):
|
|
logbook_parseable = False
|
|
print((" ! Couldn't open logbook " + logbookfile))
|
|
|
|
if logbook_parseable:
|
|
parser = globals()[parsefunc]
|
|
parser(expedition.year, expedition, txt)
|
|
SetDatesFromLogbookEntries(expedition)
|
|
# and this has also stored all the log entries in logentries[]
|
|
if len(logentries) >0:
|
|
print(" - Cacheing " , len(logentries), " log entries")
|
|
with open(cache_filename, "wb") as fc:
|
|
pickle.dump(logentries, fc, 2)
|
|
else:
|
|
print(" ! NO TRIP entries found in logbook, check the syntax.")
|
|
|
|
logentries=[] # flush for next year
|
|
|
|
if logbook_cached:
|
|
i=0
|
|
for entrytuple in range(len(logentries)):
|
|
date, place, title, text, trippeople, expedition, logtime_underground, \
|
|
entry_type = logentries[i]
|
|
EnterLogIntoDbase(date, place, title, text, trippeople, expedition, logtime_underground,\
|
|
entry_type)
|
|
i +=1
|
|
|
|
|
|
def LoadLogbooks():
|
|
""" This is the master function for parsing all logbooks into the Troggle database.
|
|
"""
|
|
DataIssue.objects.filter(parser='logbooks').delete()
|
|
expos = Expedition.objects.all()
|
|
nologbook = ["1976", "1977","1978","1979","1980","1980","1981","1983","1984",
|
|
"1985","1986","1987","1988","1989","1990",]
|
|
for expo in expos:
|
|
if expo.year not in nologbook:
|
|
print((" - Logbook for: " + expo.year))
|
|
LoadLogbookForExpedition(expo)
|
|
|
|
|
|
dateRegex = re.compile(r'<span\s+class="date">(\d\d\d\d)-(\d\d)-(\d\d)</span>', re.S)
|
|
expeditionYearRegex = re.compile(r'<span\s+class="expeditionyear">(.*?)</span>', re.S)
|
|
titleRegex = re.compile(r'<H1>(.*?)</H1>', re.S)
|
|
reportRegex = re.compile(r'<div\s+class="report">(.*)</div>\s*</body>', re.S)
|
|
personRegex = re.compile(r'<div\s+class="person">(.*?)</div>', re.S)
|
|
nameAuthorRegex = re.compile(r'<span\s+class="name(,author|)">(.*?)</span>', re.S)
|
|
TURegex = re.compile(r'<span\s+class="TU">([0-9]*\.?[0-9]+)</span>', re.S)
|
|
locationRegex = re.compile(r'<span\s+class="location">(.*?)</span>', re.S)
|
|
caveRegex = re.compile(r'<span\s+class="cave">(.*?)</span>', re.S)
|
|
|
|
def parseAutoLogBookEntry(filename):
|
|
errors = []
|
|
f = open(filename, "r")
|
|
contents = f.read()
|
|
f.close()
|
|
|
|
dateMatch = dateRegex.search(contents)
|
|
if dateMatch:
|
|
year, month, day = [int(x) for x in dateMatch.groups()]
|
|
date = datetime.date(year, month, day)
|
|
else:
|
|
errors.append(" - Date could not be found")
|
|
|
|
expeditionYearMatch = expeditionYearRegex.search(contents)
|
|
if expeditionYearMatch:
|
|
try:
|
|
expedition = Expedition.objects.get(year = expeditionYearMatch.groups()[0])
|
|
personExpeditionNameLookup = GetPersonExpeditionNameLookup(expedition)
|
|
except Expedition.DoesNotExist:
|
|
errors.append(" - Expedition not in database")
|
|
else:
|
|
errors.append(" - Expedition Year could not be parsed")
|
|
|
|
titleMatch = titleRegex.search(contents)
|
|
if titleMatch:
|
|
title, = titleMatch.groups()
|
|
if len(title) > settings.MAX_LOGBOOK_ENTRY_TITLE_LENGTH:
|
|
errors.append(" - Title too long")
|
|
else:
|
|
errors.append(" - Title could not be found")
|
|
|
|
caveMatch = caveRegex.search(contents)
|
|
if caveMatch:
|
|
caveRef, = caveMatch.groups()
|
|
try:
|
|
cave = getCaveByReference(caveRef)
|
|
except AssertionError:
|
|
cave = None
|
|
errors.append(" - Cave not found in database")
|
|
else:
|
|
cave = None
|
|
|
|
locationMatch = locationRegex.search(contents)
|
|
if locationMatch:
|
|
location, = locationMatch.groups()
|
|
else:
|
|
location = None
|
|
|
|
if cave is None and location is None:
|
|
errors.append(" - Location nor cave could not be found")
|
|
|
|
reportMatch = reportRegex.search(contents)
|
|
if reportMatch:
|
|
report, = reportMatch.groups()
|
|
else:
|
|
errors.append(" - Contents could not be found")
|
|
if errors:
|
|
return errors # Easiest to bail out at this point as we need to make sure that we know which expedition to look for people from.
|
|
people = []
|
|
for personMatch in personRegex.findall(contents):
|
|
nameAuthorMatch = nameAuthorRegex.search(contents)
|
|
if nameAuthorMatch:
|
|
author, name = nameAuthorMatch.groups()
|
|
if name.lower() in personExpeditionNameLookup:
|
|
personExpo = personExpeditionNameLookup[name.lower()]
|
|
else:
|
|
errors.append(" - Person could not be found in database")
|
|
author = bool(author)
|
|
else:
|
|
errors.append(" - Persons name could not be found")
|
|
|
|
TUMatch = TURegex.search(contents)
|
|
if TUMatch:
|
|
TU, = TUMatch.groups()
|
|
else:
|
|
errors.append(" - TU could not be found")
|
|
if not errors:
|
|
people.append((name, author, TU))
|
|
if errors:
|
|
return errors # Bail out before committing to the database
|
|
logbookEntry = LogbookEntry(date = date,
|
|
expedition = expedition,
|
|
title = title, cave = cave, place = location,
|
|
text = report, slug = slugify(title)[:50],
|
|
filename = filename)
|
|
logbookEntry.save()
|
|
for name, author, TU in people:
|
|
PersonTrip(personexpedition = personExpo,
|
|
time_underground = TU,
|
|
logbook_entry = logbookEntry,
|
|
is_logbook_entry_author = author).save()
|
|
print(logbookEntry) |