import troggle.settings as settings import troggle.core.models as models import troggle.settings as settings from subprocess import call, Popen, PIPE from troggle.parsers.people import GetPersonExpeditionNameLookup from django.utils.timezone import get_current_timezone from django.utils.timezone import make_aware import re import os import time from datetime import datetime, timedelta import sys """A 'survex block' is a *begin...*end set of cave data. A 'survexscansfolder' is what we today call a "survey scans folder" or a "wallet". """ line_leg_regex = re.compile(r"[\d\-+.]+$") def LoadSurvexLineLeg(survexblock, stardata, sline, comment, cave): # The try catches here need replacing as they are relatively expensive ls = sline.lower().split() ssfrom = survexblock.MakeSurvexStation(ls[stardata["from"]]) ssto = survexblock.MakeSurvexStation(ls[stardata["to"]]) survexleg = models.SurvexLeg(block=survexblock, stationfrom=ssfrom, stationto=ssto) if stardata["type"] == "normal": try: survexleg.tape = float(ls[stardata["tape"]]) except ValueError: print("! Tape misread in", survexblock.survexfile.path) print(" Stardata:", stardata) print(" Line:", ls) message = ' ! Value Error: line %s in %s' % (ls, survexblock.survexfile.path) models.DataIssue.objects.create(parser='survex', message=message) survexleg.tape = 1000 try: lclino = ls[stardata["clino"]] except: print("! Clino misread in", survexblock.survexfile.path) print(" Stardata:", stardata) print(" Line:", ls) message = ' ! Value Error: line %s in %s' % (ls, survexblock.survexfile.path) models.DataIssue.objects.create(parser='survex', message=message) lclino = error try: lcompass = ls[stardata["compass"]] except: print("! Compass misread in", survexblock.survexfile.path) print(" Stardata:", stardata) print(" Line:", ls) message = ' ! Value Error: line %s in %s' % (ls, survexblock.survexfile.path) models.DataIssue.objects.create(parser='survex', message=message) lcompass = error if lclino == "up": survexleg.compass = 0.0 survexleg.clino = 90.0 elif lclino == "down": survexleg.compass = 0.0 survexleg.clino = -90.0 elif lclino == "-" or lclino == "level": try: survexleg.compass = float(lcompass) except ValueError: print("! Compass misread in", survexblock.survexfile.path) print(" Stardata:", stardata) print(" Line:", ls) message = ' ! Value Error: line %s in %s' % (ls, survexblock.survexfile.path) models.DataIssue.objects.create(parser='survex', message=message) survexleg.compass = 1000 survexleg.clino = -90.0 else: assert line_leg_regex.match(lcompass), ls assert line_leg_regex.match(lclino) and lclino != "-", ls survexleg.compass = float(lcompass) survexleg.clino = float(lclino) if cave: survexleg.cave = cave # only save proper legs survexleg.save() itape = stardata.get("tape") if itape: try: survexblock.totalleglength += float(ls[itape]) except ValueError: print("! Length not added") survexblock.save() def LoadSurvexEquate(survexblock, sline): #print sline # stations = sline.split() assert len(stations) > 1 for station in stations: survexblock.MakeSurvexStation(station) def LoadSurvexLinePassage(survexblock, stardata, sline, comment): pass stardatadefault = {"type":"normal", "t":"leg", "from":0, "to":1, "tape":2, "compass":3, "clino":4} stardataparamconvert = {"length":"tape", "bearing":"compass", "gradient":"clino"} regex_comment = re.compile(r"([^;]*?)\s*(?:;\s*(.*))?\n?$") regex_ref = re.compile(r'.*?ref.*?(\d+)\s*#\s*(X)?\s*(\d+)') regex_star = re.compile(r'\s*\*[\s,]*(\w+)\s*(.*?)\s*(?:;.*)?$') # years from 1960 to 2039 regex_starref = re.compile(r'^\s*\*ref[\s.:]*((?:19[6789]\d)|(?:20[0123]\d))\s*#?\s*(X)?\s*(.*?\d+.*?)$(?i)') # regex_starref = re.compile("""?x # VERBOSE mode - can't get this to work # ^\s*\*ref # look for *ref at start of line # [\s.:]* # some spaces, stops or colons # ((?:19[6789]\d)|(?:20[0123]\d)) # a date from 1960 to 2039 - captured as one field # \s*# # spaces then hash separator # ?\s*(X) # optional X - captured # ?\s*(.*?\d+.*?) # maybe a space, then at least one digit in the string - captured # $(?i)""", re.X) # the end (do the whole thing case insensitively) regex_team = re.compile(r"(Insts|Notes|Tape|Dog|Useless|Pics|Helper|Disto|Consultant)\s+(.*)$(?i)") regex_team_member = re.compile(r" and | / |, | & | \+ |^both$|^none$(?i)") regex_qm = re.compile(r'^\s*QM(\d)\s+?([a-dA-DxX])\s+([\w\-]+)\.(\d+)\s+(([\w\-]+)\.(\d+)|\-)\s+(.+)$') insp = "" def RecursiveLoad(survexblock, survexfile, fin, textlines): """Follows the *include links in all the survex files from the root file 1623.svx and reads in the survex blocks, other data and the wallet references (survexscansfolder) as it goes. This part of the data import process is where the maximum memory is used and where it crashes on memory-constrained machines. """ iblankbegins = 0 text = [ ] stardata = stardatadefault teammembers = [ ] global insp # uncomment to print out all files during parsing print(insp+" - Reading file: " + survexblock.survexfile.path + " <> " + survexfile.path) stamp = datetime.now() lineno = 0 # Try to find the cave in the DB if not use the string as before path_match = re.search(r"caves-(\d\d\d\d)/(\d+|\d\d\d\d-?\w+-\d+)/", survexblock.survexfile.path) if path_match: pos_cave = '%s-%s' % (path_match.group(1), path_match.group(2)) # print(insp+'Match') # print(insp+os_cave) cave = models.getCaveByReference(pos_cave) if cave: survexfile.cave = cave svxlines = '' svxlines = fin.read().splitlines() # print(insp+'Cave - preloop ' + str(survexfile.cave)) # print(insp+survexblock) for svxline in svxlines: # print(insp+survexblock) # print(insp+svxline) # if not svxline: # print(insp+' - Not survex') # return # textlines.append(svxline) lineno += 1 # print(insp+' - Line: %d' % lineno) # break the line at the comment sline, comment = regex_comment.match(svxline.strip()).groups() # detect ref line pointing to the scans directory mref = comment and regex_ref.match(comment) if mref: yr, letterx, wallet = mref.groups() if not letterx: letterx = "" else: letterx = "X" if len(wallet)<2: wallet = "0" + wallet refscan = "%s#%s%s" % (yr, letterx, wallet ) #print(insp+' - Wallet ;ref - %s - looking for survexscansfolder' % refscan) survexscansfolders = models.SurvexScansFolder.objects.filter(walletname=refscan) if survexscansfolders: survexblock.survexscansfolder = survexscansfolders[0] #survexblock.refscandir = "%s/%s%%23%s" % (mref.group(1), mref.group(1), mref.group(2)) survexblock.save() # print(insp+' - Wallet ; ref - %s - found in survexscansfolders' % refscan) else: message = ' ! Wallet ; ref - %s - NOT found in survexscansfolders %s-%s-%s' % (refscan,yr,letterx,wallet) print(insp+message) models.DataIssue.objects.create(parser='survex', message=message) # This whole section should be moved if we can have *QM become a proper survex command # Spec of QM in SVX files, currently commented out need to add to survex # needs to match regex_qm # ;Serial number grade(A/B/C/D/X) nearest-station resolution-station description # ;QM1 a hobnob_hallway_2.42 hobnob-hallway_3.42 junction of keyhole passage # ;QM1 a hobnob_hallway_2.42 - junction of keyhole passage qmline = comment and regex_qm.match(comment) if qmline: # print(insp+qmline.groups()) #(u'1', u'B', u'miraclemaze', u'1.17', u'-', None, u'\tcontinuation of rift') qm_no = qmline.group(1) qm_grade = qmline.group(2) qm_from_section = qmline.group(3) qm_from_station = qmline.group(4) qm_resolve_section = qmline.group(6) qm_resolve_station = qmline.group(7) qm_notes = qmline.group(8) # print(insp+'Cave - %s' % survexfile.cave) # print(insp+'QM no %d' % int(qm_no)) # print(insp+'QM grade %s' % qm_grade) # print(insp+'QM section %s' % qm_from_section) # print(insp+'QM station %s' % qm_from_station) # print(insp+'QM res section %s' % qm_resolve_section) # print(insp+'QM res station %s' % qm_resolve_station) # print(insp+'QM notes %s' % qm_notes) # If the QM isn't resolved (has a resolving station) then load it if not qm_resolve_section or qm_resolve_section is not '-' or qm_resolve_section is not 'None': from_section = models.SurvexBlock.objects.filter(name=qm_from_section) # If we can find a section (survex note chunck, named) if len(from_section) > 0: # print(insp+from_section[0]) from_station = models.SurvexStation.objects.filter(block=from_section[0], name=qm_from_station) # If we can find a from station then we have the nearest station and can import it if len(from_station) > 0: # print(insp+from_station[0]) qm = models.QM.objects.create(number=qm_no, nearest_station=from_station[0], grade=qm_grade.upper(), location_description=qm_notes) else: # print(insp+' - QM found but resolved') pass #print(insp+'Cave -sline ' + str(cave)) if not sline: continue # detect the star ref command mstar = regex_starref.match(sline) if mstar: yr,letterx,wallet = mstar.groups() if not letterx: letterx = "" else: letterx = "X" if len(wallet)<2: wallet = "0" + wallet assert (int(yr)>1960 and int(yr)<2039), "Wallet year out of bounds: %s" % yr assert (int(wallet)<100), "Wallet number more than 100: %s" % wallet refscan = "%s#%s%s" % (yr, letterx, wallet) survexscansfolders = models.SurvexScansFolder.objects.filter(walletname=refscan) if survexscansfolders: survexblock.survexscansfolder = survexscansfolders[0] survexblock.save() # print(insp+' - Wallet *REF - %s - found in survexscansfolders' % refscan) else: message = ' ! Wallet *REF - %s - NOT found in survexscansfolders %s-%s-%s' % (refscan,yr,letterx,wallet) print(insp+message) models.DataIssue.objects.create(parser='survex', message=message) continue # detect the star command mstar = regex_star.match(sline) if not mstar: if "from" in stardata: # print(insp+'Cave ' + str(survexfile.cave)) # print(insp+survexblock) LoadSurvexLineLeg(survexblock, stardata, sline, comment, survexfile.cave) # print(insp+' - From: ') # print(insp+stardata) pass elif stardata["type"] == "passage": LoadSurvexLinePassage(survexblock, stardata, sline, comment) # print(insp+' - Passage: ') #Missing "station" in stardata. continue # detect the star command cmd, line = mstar.groups() cmd = cmd.lower() if re.match("include$(?i)", cmd): includepath = os.path.join(os.path.split(survexfile.path)[0], re.sub(r"\.svx$", "", line)) print(insp+' - Include path found including - ' + includepath) # Try to find the cave in the DB if not use the string as before path_match = re.search(r"caves-(\d\d\d\d)/(\d+|\d\d\d\d-?\w+-\d+)/", includepath) if path_match: pos_cave = '%s-%s' % (path_match.group(1), path_match.group(2)) # print(insp+pos_cave) cave = models.getCaveByReference(pos_cave) if cave: survexfile.cave = cave else: print(insp+' - No match in DB (i) for %s, so loading..' % includepath) includesurvexfile = models.SurvexFile(path=includepath) includesurvexfile.save() includesurvexfile.SetDirectory() if includesurvexfile.exists(): survexblock.save() fininclude = includesurvexfile.OpenFile() insp += "> " RecursiveLoad(survexblock, includesurvexfile, fininclude, textlines) insp = insp[2:] elif re.match("begin$(?i)", cmd): if line: newsvxpath = os.path.join(os.path.split(survexfile.path)[0], re.sub(r"\.svx$", "", line)) # Try to find the cave in the DB if not use the string as before path_match = re.search(r"caves-(\d\d\d\d)/(\d+|\d\d\d\d-?\w+-\d+)/", newsvxpath) if path_match: pos_cave = '%s-%s' % (path_match.group(1), path_match.group(2)) # print(insp+pos_cave) cave = models.getCaveByReference(pos_cave) if cave: survexfile.cave = cave else: print(insp+' - No match (b) for %s' % newsvxpath) name = line.lower() print(insp+' - Begin found for: ' + name) # print(insp+'Block cave: ' + str(survexfile.cave)) survexblockdown = models.SurvexBlock(name=name, begin_char=fin.tell(), parent=survexblock, survexpath=survexblock.survexpath+"."+name, cave=survexfile.cave, survexfile=survexfile, totalleglength=0.0) survexblockdown.save() survexblock.save() survexblock = survexblockdown # print(insp+survexblockdown) textlinesdown = [ ] insp += "> " RecursiveLoad(survexblockdown, survexfile, fin, textlinesdown) insp = insp[2:] else: iblankbegins += 1 elif re.match("end$(?i)", cmd): if iblankbegins: iblankbegins -= 1 else: survexblock.text = "".join(textlines) survexblock.save() # print(insp+' - End found: ') endstamp = datetime.now() timetaken = endstamp - stamp # print(insp+' - Time to process: ' + str(timetaken)) return elif re.match("date$(?i)", cmd): if len(line) == 10: #print(insp+' - Date found: ' + line) survexblock.date = make_aware(datetime.strptime(re.sub(r"\.", "-", line), '%Y-%m-%d'), get_current_timezone()) expeditions = models.Expedition.objects.filter(year=line[:4]) if expeditions: assert len(expeditions) == 1 survexblock.expedition = expeditions[0] survexblock.expeditionday = survexblock.expedition.get_expedition_day(survexblock.date) survexblock.save() elif re.match("team$(?i)", cmd): pass # print(insp+' - Team found: ') mteammember = regex_team.match(line) if mteammember: for tm in regex_team_member.split(mteammember.group(2)): if tm: personexpedition = survexblock.expedition and GetPersonExpeditionNameLookup(survexblock.expedition).get(tm.lower()) if (personexpedition, tm) not in teammembers: teammembers.append((personexpedition, tm)) personrole = models.SurvexPersonRole(survexblock=survexblock, nrole=mteammember.group(1).lower(), personexpedition=personexpedition, personname=tm) personrole.expeditionday = survexblock.expeditionday if personexpedition: personrole.person=personexpedition.person personrole.save() elif cmd == "title": #print(insp+' - Title found: ') survextitle = models.SurvexTitle(survexblock=survexblock, title=line.strip('"'), cave=survexfile.cave) survextitle.save() pass elif cmd == "require": # should we check survex version available for processing? pass elif cmd == "data": #print(insp+' - Data found: ') ls = line.lower().split() stardata = { "type":ls[0] } #print(insp+' - Star data: ', stardata) #print(insp+ls) for i in range(0, len(ls)): stardata[stardataparamconvert.get(ls[i], ls[i])] = i - 1 if ls[0] in ["normal", "cartesian", "nosurvey"]: assert (("from" in stardata and "to" in stardata) or "station" in stardata), line elif ls[0] == "default": stardata = stardatadefault else: assert ls[0] == "passage", line elif cmd == "equate": #print(insp+' - Equate found: ') LoadSurvexEquate(survexblock, line) elif cmd == "fix": #print(insp+' - Fix found: ') survexblock.MakeSurvexStation(line.split()[0]) else: #print(insp+' - Stuff') if cmd not in ["sd", "include", "units", "entrance", "data", "flags", "title", "export", "instrument", "calibrate", "set", "infer", "alias", "cs", "declination", "case"]: message = "! Bad svx command in line:%s %s %s %s" % (cmd, line, survexblock, survexblock.survexfile.path) print(insp+message) models.DataIssue.objects.create(parser='survex', message=message) endstamp = datetime.now() timetaken = endstamp - stamp # print(insp+' - Time to process: ' + str(timetaken)) def LoadAllSurvexBlocks(): print(' - Flushing All Survex Blocks...') models.SurvexBlock.objects.all().delete() models.SurvexFile.objects.all().delete() models.SurvexDirectory.objects.all().delete() models.SurvexEquate.objects.all().delete() models.SurvexLeg.objects.all().delete() models.SurvexTitle.objects.all().delete() models.SurvexPersonRole.objects.all().delete() models.SurvexStation.objects.all().delete() print(" - Data flushed") # Clear the data issues as we are reloading models.DataIssue.objects.filter(parser='survex').delete() print(' - Loading All Survex Blocks...') print(' - redirecting stdout to loadsurvexblks.log ...') stdout_orig = sys.stdout # Redirect sys.stdout to the file sys.stdout = open('loadsurvexblks.log', 'w') survexfile = models.SurvexFile(path=settings.SURVEX_TOPNAME, cave=None) survexfile.save() survexfile.SetDirectory() #Load all survexblockroot = models.SurvexBlock(name="root", survexpath="", begin_char=0, cave=None, survexfile=survexfile, totalleglength=0.0) survexblockroot.save() fin = survexfile.OpenFile() textlines = [ ] # The real work starts here RecursiveLoad(survexblockroot, survexfile, fin, textlines) fin.close() survexblockroot.text = "".join(textlines) survexblockroot.save() # Close the file sys.stdout.close() # Restore sys.stdout to our old saved file handler sys.stdout = stdout_orig print(' - Loaded All Survex Blocks.') poslineregex = re.compile(r"^\(\s*([+-]?\d*\.\d*),\s*([+-]?\d*\.\d*),\s*([+-]?\d*\.\d*)\s*\)\s*([^\s]+)$") def LoadPos(): """Run cavern to produce a complete .3d file, then run 3dtopos to produce a table of all survey point positions. Then lookup each position by name to see if we have it in the database and if we do, then save the x/y/z coordinates. If we don't have it in the database, print an error message and discard it. """ topdata = settings.SURVEX_DATA + settings.SURVEX_TOPNAME print(' - Generating a list of Pos from %s.svx and then loading...' % (topdata)) # Be careful with the cache file. # If LoadPos has been run before, # but without cave import being run before, # then *everything* may be in the fresh 'not found' cache file. cachefile = settings.SURVEX_DATA + "posnotfound.cache" notfoundbefore = {} if os.path.isfile(cachefile): # this is not a good test. 1623.svx may never change but *included files may have done. # When the *include is unrolled, we will be able to get a proper timestamp to use # and can increase the timeout from 3 days to 30 days. updtsvx = os.path.getmtime(topdata + ".svx") updtcache = os.path.getmtime(cachefile) age = updtcache - updtsvx print(' svx: %s cache: %s not-found cache is fresher by: %s' % (updtsvx, updtcache, str(timedelta(seconds=age) ))) now = time.time() if now - updtcache > 3*24*60*60: print " cache is more than 3 days old. Deleting." os.remove(cachefile) elif age < 0 : print " cache is stale. Deleting." os.remove(cachefile) else: print " cache is fresh. Reading..." try: with open(cachefile, "r") as f: for line in f: l = line.rstrip() if l in notfoundbefore: notfoundbefore[l] +=1 # should not be duplicates print " DUPLICATE ", line, notfoundbefore[l] else: notfoundbefore[l] =1 except: print " FAILURE READ opening cache file %s" % (cachefile) raise notfoundnow =[] found = 0 skip = {} print "\n" # extra line because cavern overwrites the text buffer somehow # cavern defaults to using same cwd as supplied input file call([settings.CAVERN, "--output=%s.3d" % (topdata), "%s.svx" % (topdata)]) call([settings.THREEDTOPOS, '%s.3d' % (topdata)], cwd = settings.SURVEX_DATA) print " - This next bit takes a while. Matching ~32,000 survey positions. Be patient..." posfile = open("%s.pos" % (topdata)) posfile.readline() #Drop header for line in posfile.readlines(): r = poslineregex.match(line) if r: x, y, z, name = r.groups() # easting, northing, altitude if name in notfoundbefore: skip[name] = 1 else: try: ss = models.SurvexStation.objects.lookup(name) ss.x = float(x) ss.y = float(y) ss.z = float(z) ss.save() found += 1 except: notfoundnow.append(name) print " - %s stations not found in lookup of SurvexStation.objects. %s found. %s skipped." % (len(notfoundnow),found, len(skip)) if found > 10: # i.e. a previous cave import has been done try: with open(cachefile, "w") as f: c = len(notfoundnow)+len(skip) for i in notfoundnow: f.write("%s\n" % i) for j in skip: f.write("%s\n" % j) # NB skip not notfoundbefore print(' Not-found cache file written: %s entries' % c) except: print " FAILURE WRITE opening cache file %s" % (cachefile) raise