import sys import os import re import time from datetime import datetime, timedelta from subprocess import call, Popen, PIPE from django.utils.timezone import get_current_timezone from django.utils.timezone import make_aware import troggle.settings as settings import troggle.core.models as models import troggle.core.models_caves as models_caves import troggle.core.models_survex as models_survex from troggle.parsers.people import GetPersonExpeditionNameLookup from troggle.parsers.logbooks import GetCaveLookup from troggle.core.views_caves import MapLocations survexblockroot = None ROOTBLOCK = "rootblock" class SurvexLeg(): """No longer a models.Model subclass, so no longer a database table """ tape = 0.0 compass = 0.0 clino = 0.0 class LoadingSurvex(): """A 'survex block' is a *begin...*end set of cave data. A survex file can contain many begin-end blocks, which can be nested, and which can *include other survex files. A 'scansfolder' is what we today call a "survey scans folder" or a "wallet". """ # This interprets the survex "*data normal" command which sets out the order of the fields in the data, e.g. # *DATA normal from to length gradient bearing ignore ignore ignore ignore stardatadefault = {"type":"normal", "t":"leg", "from":0, "to":1, "tape":2, "compass":3, "clino":4} stardataparamconvert = {"length":"tape", "bearing":"compass", "gradient":"clino"} rx_linelen = re.compile(r"[\d\-+.]+$") rx_team = re.compile(r"(?i)(Insts|Notes|Tape|Dog|Useless|Pics|Helper|Disto|Consultant)\s+(.*)$") rx_person = re.compile(r"(?i) and | / |, | & | \+ |^both$|^none$") rx_qm = re.compile(r'(?i)^\s*QM(\d)\s+?([a-dA-DxX])\s+([\w\-]+)\.(\d+)\s+(([\w\-]+)\.(\d+)|\-)\s+(.+)$') # remember there is also QM_PATTERN used in views_other and set in settings.py rx_cave = re.compile(r'(?i)caves-(\d\d\d\d)/([-\d\w]+|\d\d\d\d-?\w+-\d+)') rx_comment = re.compile(r'([^;]*?)\s*(?:;\s*(.*))?\n?$') rx_comminc = re.compile(r'(?i)^\*include[\s]*([-\w/]*).*$') # inserted by linear collate ;*include rx_commcni = re.compile(r'(?i)^\*edulcni[\s]*([-\w/]*).*$') # inserted by linear collate ;*edulcni rx_include = re.compile(r'(?i)^\s*(\*include[\s].*)$') rx_ref = re.compile(r'(?i)^\s*ref[\s.:]*(\d+)\s*#\s*(X)?\s*(\d+)') rx_star = re.compile(r'(?i)\s*\*[\s,]*(\w+)\s*(.*?)\s*(?:;.*)?$') rx_starref = re.compile(r'(?i)^\s*\*ref[\s.:]*((?:19[6789]\d)|(?:20[0123]\d))\s*#?\s*(X)?\s*(.*?\d+.*?)$') rx_argsref = re.compile(r'(?i)^[\s.:]*((?:19[6789]\d)|(?:20[0123]\d))\s*#?\s*(X)?\s*(.*?\d+.*?)$') survexlegsalllength = 0.0 survexlegsnumber = 0 depthbegin = 0 depthinclude = 0 stackbegin =[] stackinclude = [] stacksvxfiles = [] svxfileslist = [] svxdirs = {} lineno = 0 insp = "" callcount = 0 stardata ={} ignoreprefix = ["surface", "kataster", "fixedpts", "gpx"] ignorenoncave = ["caves-1623", "caves-1623/2007-neu"] includedfilename ="" currentsurvexblock = None currentsurvexfile = None currentcave = None def __init__(self): self.caveslist = GetCaveLookup() pass def LoadSurvexIgnore(self, survexblock, line, cmd): if cmd == "require": pass # should we check survex version available for processing? elif cmd in ["equate", "fix", "alias", "calibrate", "cs","entrance", "export", "case", "declination", "infer","instrument", "sd", "units"]: pass # we ignore all these, which is fine. else: if cmd in ["include", "data", "flags", "title", "set", "ref"]: message = "! Unparsed [*{}]: '{}' {}".format(cmd, line, survexblock.survexfile.path) print((self.insp+message)) models.DataIssue.objects.create(parser='survex', message=message) else: message = "! Bad svx command: [*{}] {} ({}) {}".format(cmd, line, survexblock, survexblock.survexfile.path) print((self.insp+message)) models.DataIssue.objects.create(parser='survex', message=message) def LoadSurvexTeam(self, survexblock, line): teammembers = [ ] mteammember = self.rx_team.match(line) if mteammember: for tm in self.rx_person.split(mteammember.group(2)): if tm: personexpedition = survexblock.expedition and GetPersonExpeditionNameLookup(survexblock.expedition).get(tm.lower()) if (personexpedition, tm) not in teammembers: teammembers.append((personexpedition, tm)) personrole = models_survex.SurvexPersonRole(survexblock=survexblock, nrole=mteammember.group(1).lower(), personexpedition=personexpedition, personname=tm) personrole.expeditionday = survexblock.expeditionday if personexpedition: personrole.person=personexpedition.person personrole.save() def LoadSurvexDate(self, survexblock, line): # we should make this a date range for everything if len(line) == 10: survexblock.date = make_aware(datetime.strptime(re.sub(r"\.", "-", line), '%Y-%m-%d'), get_current_timezone()) expeditions = models.Expedition.objects.filter(year=line[:4]) if expeditions: assert len(expeditions) == 1 survexblock.expedition = expeditions[0] survexblock.expeditionday = survexblock.expedition.get_expedition_day(survexblock.date) survexblock.save() def LoadSurvexLineLeg(self, survexblock, svxline, sline, comment): """This reads compass, clino and tape data but only keeps the tape lengths, the rest is discarded after error-checking. """ # Check first to see if we are in a splay and abort if so. # TO DO splay abort stardata = self.stardata survexleg = SurvexLeg() ls = sline.lower().split() # this next fails for two surface survey svx files which use / for decimal point # e.g. '29/09' in the tape measurement, or use decimals but in brackets, e.g. (06.05) if stardata["type"] == "normal": # should use current flags setting for this # print(" !! lineno '{}'\n !! svxline '{}'\n !! sline '{}'\n !! ls '{}'\n !! stardata {}".format(self.lineno, svxline, sline, ls,stardata)) tape = ls[stardata["tape"]] tape = tape.replace("(","") tape = tape.replace(")","") tape = tape.replace("/",".") try: survexleg.tape = float(tape) self.survexlegsnumber += 1 except ValueError: print(("! Tape misread in", survexblock.survexfile.path)) print((" Stardata:", stardata)) print((" Line:", ls)) message = ' ! Value Error: Tape misread in line %s in %s' % (ls, survexblock.survexfile.path) models.DataIssue.objects.create(parser='survex', message=message) survexleg.tape = 0 try: survexblock.totalleglength += survexleg.tape self.survexlegsalllength += survexleg.tape except ValueError: message = ' ! Value Error: Tape length not added %s in %s' % (ls, survexblock.survexfile.path) models.DataIssue.objects.create(parser='survex', message=message) try: lclino = ls[stardata["clino"]] except: print(("! Clino misread in", survexblock.survexfile.path)) print((" Stardata:", stardata)) print((" Line:", ls)) message = ' ! Value Error: Clino misread in line %s in %s' % (ls, survexblock.survexfile.path) models.DataIssue.objects.create(parser='survex', message=message) lclino = error try: lcompass = ls[stardata["compass"]] except: print(("! Compass misread in", survexblock.survexfile.path)) print((" Stardata:", stardata)) print((" Line:", ls)) message = ' ! Value Error: Compass misread in line %s in %s' % (ls, survexblock.survexfile.path) models.DataIssue.objects.create(parser='survex', message=message) lcompass = error if lclino == "up": survexleg.compass = 0.0 survexleg.clino = 90.0 elif lclino == "down": survexleg.compass = 0.0 survexleg.clino = -90.0 elif lclino == "-" or lclino == "level": try: survexleg.compass = float(lcompass) except ValueError: print(("! Compass misread in", survexblock.survexfile.path)) print((" Stardata:", stardata)) print((" Line:", ls)) message = " ! Value Error: lcompass:'{}' line {} in '{}'".format(lcompass, ls, survexblock.survexfile.path) models.DataIssue.objects.create(parser='survex', message=message) survexleg.compass = 1000 survexleg.clino = -90.0 else: assert self.rx_linelen.match(lcompass), ls assert self.rx_linelen.match(lclino) and lclino != "-", ls survexleg.compass = float(lcompass) survexleg.clino = float(lclino) # delete the object so that django autosaving doesn't save it. survexleg = None def LoadSurvexRef(self, survexblock, args): # *REF but also ; Ref years from 1960 to 2039 if len(args)< 4: message = " ! Empty or BAD *REF command '{}' in '{}'".format(args, survexblock.survexfile.path) print((self.insp+message)) models.DataIssue.objects.create(parser='survex', message=message) return argsgps = self.rx_argsref.match(args) if argsgps: yr, letterx, wallet = argsgps.groups() else: message = " ! BAD *REF command '{}' in '{}'".format(args, survexblock.survexfile.path) print((self.insp+message)) models.DataIssue.objects.create(parser='survex', message=message) return if not letterx: letterx = "" else: letterx = "X" if len(wallet)<2: wallet = "0" + wallet assert (int(yr)>1960 and int(yr)<2039), "Wallet year out of bounds: %s" % yr refscan = "%s#%s%s" % (yr, letterx, wallet) try: if int(wallet)>100: message = " ! Wallet *REF {} - too big in '{}'".format(refscan, survexblock.survexfile.path) print((self.insp+message)) models.DataIssue.objects.create(parser='survex', message=message) except: message = " ! Wallet *REF {} - not numeric in '{}'".format(refscan, survexblock.survexfile.path) print((self.insp+message)) models.DataIssue.objects.create(parser='survex', message=message) manyscansfolders = models_survex.ScansFolder.objects.filter(walletname=refscan) if manyscansfolders: survexblock.scansfolder = manyscansfolders[0] survexblock.save() if len(manyscansfolders) > 1: message = " ! Wallet *REF {} - {} scan folders from DB search in {}".format(refscan, len(manyscansfolders), survexblock.survexfile.path) print((self.insp+message)) models.DataIssue.objects.create(parser='survex', message=message) else: message = " ! Wallet *REF '{}' - NOT found in DB search '{}'".format(refscan, survexblock.survexfile.path) print((self.insp+message)) models.DataIssue.objects.create(parser='survex', message=message) def LoadSurvexQM(self, survexblock, qmline): insp = self.insp qm_no = qmline.group(1) qm_grade = qmline.group(2) if qmline.group(3): # usual closest survey station qm_nearest = qmline.group(3) if qmline.group(4): qm_nearest = qm_nearest +"."+ qmline.group(4) if qmline.group(6) and qmline.group(6) != '-': qm_resolve_station = qmline.group(6) if qmline.group(7): qm_resolve_station = qm_resolve_station +"."+ qmline.group(7) else: qm_resolve_station = "" qm_notes = qmline.group(8) # Spec of QM in SVX files: # ;Serial number grade(A/B/C/D/X) nearest-station resolution-station description # ;QM1 a hobnob_hallway_2.42 hobnob-hallway_3.42 junction of keyhole passage # ;QM1 a hobnob_hallway_2.42 - junction of keyhole passage # NB none of the SurveyStations are in the DB now, so if we want to link to aSurvexStation # we would have to create one. But that is not obligatory and no QMs loaded from CSVs have one try: qm = models_caves.QM.objects.create(number=qm_no, # nearest_station=a_survex_station_object, # can be null nearest_station_description=qm_resolve_station, nearest_station_name=qm_nearest, grade=qm_grade.upper(), location_description=qm_notes) qm.save # message = " ! QM{} '{}' CREATED in DB in '{}'".format(qm_no, qm_nearest,survexblock.survexfile.path) # print(insp+message) # models.DataIssue.objects.create(parser='survex', message=message) except: message = " ! QM{} FAIL to create {} in'{}'".format(qm_no, qm_nearest,survexblock.survexfile.path) print(insp+message) models.DataIssue.objects.create(parser='survex', message=message) def LoadSurvexDataCmd(self,survexblock,args): ls = args.lower().split() stardata = { "type":ls[0] } for i in range(0, len(ls)): stardata[self.stardataparamconvert.get(ls[i], ls[i])] = i - 1 self.stardata = stardata if ls[0] in ["normal", "cartesian", "nosurvey"]: assert (("from" in stardata and "to" in stardata) or "station" in stardata), args elif ls[0] == "default": stardata = self.stardatadefault else: assert ls[0] == "passage", args def LoadSurvexFlags(self, line, cmd): # Here we could set on/off 'splay', 'not splay', 'surface', 'not surface', or 'duplicate' # but this data is only used for sense-checking not to actually calculate anything important pass def IdentifyCave(self, cavepath): if cavepath.lower() in self.caveslist: return self.caveslist[cavepath.lower()] # TO DO - some of this is already done in generating self.caveslist so simplify this # esp. as it is in a loop. path_match = self.rx_cave.search(cavepath) if path_match: sluggy = '{}-{}'.format(path_match.group(1), path_match.group(2)) guesses = [sluggy.lower(), path_match.group(2).lower()] for g in guesses: if g in self.caveslist: self.caveslist[cavepath] = self.caveslist[g] return self.caveslist[g] print(' ! Failed to find cave for {}'.format(cavepath.lower())) else: print(' ! No regex cave match for %s' % cavepath.lower()) return None def GetSurvexDirectory(self, headpath): if not headpath: return self.svxdirs[""] if headpath.lower() not in self.svxdirs: self.svxdirs[headpath.lower()] = models_survex.SurvexDirectory(path=headpath, primarysurvexfile=self.currentsurvexfile) return self.svxdirs[headpath.lower()] def ReportNonCaveIncludes(self, headpath, includelabel): """Ignore surface, kataser and gps *include survex files """ if headpath in self.ignorenoncave: return for i in self.ignoreprefix: if headpath.startswith(i): return message = " ! {} is not a cave. (while creating {} sfile & sdirectory)".format(headpath, includelabel) print(message) print(message,file=sys.stderr) models.DataIssue.objects.create(parser='survex', message=message) def LoadSurvexFile(self, includelabel): """Creates SurvexFile in the database, and SurvexDirectory if needed with links to 'cave' Creates a new current survexblock with valid .survexfile and valid .survexdirectory The survexblock passed-in is not necessarily the parent. FIX THIS. """ depth = " " * self.depthbegin print("{:2}{} - NEW survexfile:'{}'".format(self.depthbegin, depth, includelabel)) newfile = models_survex.SurvexFile(path=includelabel) headpath, tail = os.path.split(includelabel) newdirectory = self.GetSurvexDirectory(headpath) if not newdirectory: message = " ! 'None' SurvexDirectory returned from GetSurvexDirectory({})".format(headpath) print(message) print(message,file=sys.stderr) models.DataIssue.objects.create(parser='survex', message=message) newfile.survexdirectory = newdirectory cave = self.IdentifyCave(headpath) if cave: newdirectory.cave = cave newfile.cave = cave else: self.ReportNonCaveIncludes(headpath, includelabel) if not newfile.survexdirectory: message = " ! SurvexDirectory NOT SET in new SurvexFile {} ".format(includelabel) print(message) print(message,file=sys.stderr) models.DataIssue.objects.create(parser='survex', message=message) self.currentsurvexfile.save() # django insists on this although it is already saved !? try: newdirectory.save() except: print(newdirectory, file=sys.stderr) print(newdirectory.primarysurvexfile, file=sys.stderr) raise self.currentsurvexfile = newfile def ProcessIncludeLine(self, included): svxid = included.groups()[0] #depth = " " * self.depthbegin #print("{:2}{} - Include survexfile:'{}'".format(self.depthbegin, depth, svxid)) self.LoadSurvexFile(svxid) self.stacksvxfiles.append(self.currentsurvexfile) def ProcessEdulcniLine(self, edulcni): """Saves the current survexfile in the db """ svxid = edulcni.groups()[0] #depth = " " * self.depthbegin #print("{:2}{} - Edulcni survexfile:'{}'".format(self.depthbegin, depth, svxid)) self.currentsurvexfile.save() self.currentsurvexfile = self.stacksvxfiles.pop() def LoadSurvexComment(self, survexblock, comment): # ignore all comments except ;ref and ;QM and ;*include (for collated survex file) refline = self.rx_ref.match(comment) if refline: comment = re.sub('(?i)\s*ref[.;]?',"",comment.strip()) self.LoadSurvexRef(survexblock, comment) qmline = self.rx_qm.match(comment) if qmline: self.LoadSurvexQM(survexblock, qmline) included = self.rx_comminc.match(comment) # ;*include means we have been included; not 'proceed to include' which *include means if included: self.ProcessIncludeLine(included) edulcni = self.rx_commcni.match(comment) # ;*edulcni means we are returning from an included file if edulcni: self.ProcessEdulcniLine(edulcni) def LoadSurvexSetup(self,survexblock, survexfile): self.depthbegin = 0 self.stardata = self.stardatadefault blocklegs = self.survexlegsnumber print(self.insp+" - MEM:{:.3f} Reading. parent:{} <> {} ".format(models.get_process_memory(),survexblock.survexfile.path, survexfile.path)) self.lineno = 0 sys.stderr.flush(); self.callcount +=1 if self.callcount % 10 ==0 : print(".", file=sys.stderr,end='') if self.callcount % 500 ==0 : print("\n", file=sys.stderr,end='') # Try to find the cave in the DB if not use the string as before path_match = re.search(r"caves-(\d\d\d\d)/(\d+|\d\d\d\d-?\w+-\d+)/", survexblock.survexfile.path) if path_match: pos_cave = '%s-%s' % (path_match.group(1), path_match.group(2)) cave = models_caves.getCaveByReference(pos_cave) if cave: survexfile.cave = cave def LinearRecursiveLoad(self, survexblock, path, svxlines): """Loads a single survex file. Usually used to import all the survex files which have been collated into a single file. Loads the begin/end blocks recursively. """ self.relativefilename = path cave = self.IdentifyCave(path) # this will produce null for survex files which are geographic collections self.currentsurvexfile = survexblock.survexfile self.currentsurvexfile.save() # django insists on this although it is already saved !? blockcount = 0 def tickle(): nonlocal blockcount blockcount +=1 if blockcount % 10 ==0 : print(".", file=sys.stderr,end='') if blockcount % 500 ==0 : print("\n", file=sys.stderr,end='') sys.stderr.flush(); for svxline in svxlines: sline, comment = self.rx_comment.match(svxline.strip()).groups() if comment: self.LoadSurvexComment(survexblock, comment) # this catches the ;*include and ;*edulcni lines too if not sline: continue # skip blank lines # detect a star command mstar = self.rx_star.match(sline) if mstar: # yes we are reading a *cmd cmd, args = mstar.groups() cmd = cmd.lower() # ------------------------BEGIN if re.match("begin$(?i)", cmd): self.depthbegin += 1 depth = " " * self.depthbegin blockid = args.lower() self.stackbegin.append(blockid) previousnlegs = self.survexlegsnumber print("{:2}{} - Begin for :'{}'".format(self.depthbegin,depth, blockid)) pathlist = "" for id in self.stackbegin: if len(id) > 0: pathlist += "." + id newsurvexblock = models_survex.SurvexBlock(name=blockid, parent=survexblock, survexpath=pathlist, cave=self.currentcave, survexfile=self.currentsurvexfile, legsall=0, legssplay=0, legssurfc=0, totalleglength=0.0) survexblock = newsurvexblock survexblock.survexfile.save() # django insists on this although it is already saved !? survexblock.save() # django insists on this , but we want to save at the end ! tickle() # ---------------------------END elif re.match("end$(?i)", cmd): depth = " " * self.depthbegin self.currentsurvexblock = survexblock.parent print("{:2}{} - End from:'{}'".format(self.depthbegin,depth,args)) legsinblock = self.survexlegsnumber - previousnlegs print("{:2}{} - LEGS: {} (previous: {}, now:{})".format(self.depthbegin, depth,legsinblock,previousnlegs,self.survexlegsnumber)) survexblock.legsall = legsinblock try: survexblock.parent.save() # django insists on this although it is already saved !? except: print(survexblock.parent, file=sys.stderr) raise try: survexblock.save() # save to db at end of block except: print(survexblock, file=sys.stderr) raise blockid = self.stackbegin.pop() self.depthbegin -= 1 # ----------------------------- elif re.match("(?i)title$", cmd): survexblock.title = args # only apply to current survexblock elif re.match("(?i)ref$", cmd): self.LoadSurvexRef(survexblock, args) elif re.match("(?i)flags$", cmd): self.LoadSurvexFlags(args, cmd) elif re.match("(?i)data$", cmd): self.LoadSurvexDataCmd(survexblock, args) elif re.match("(?i)date$", cmd): self.LoadSurvexDate(survexblock, args) elif re.match("(?i)team$", cmd): self.LoadSurvexTeam(survexblock, args) elif re.match("(?i)set$", cmd) and re.match("(?i)names", args): pass elif re.match("(?i)include$", cmd): message = " ! -ERROR *include command not expected here {}. Re-run a full Survex import.".format(path) print(message) print(message,file=sys.stderr) models.DataIssue.objects.create(parser='survex', message=message) else: self.LoadSurvexIgnore(survexblock, args, cmd) else: # not a *cmd so we are reading data OR rx_comment failed if "from" in self.stardata: # only interested in survey legs self.LoadSurvexLineLeg(survexblock, svxline, sline, comment) else: pass # ignore all other sorts of data def RecursiveScan(self, survexblock, survexfile, fin, flinear, fcollate): """Follows the *include links in all the survex files from the root file 1623.svx and reads only the *include and *begin and *end statements. It produces a linearised list of the include tree """ indent = " " * self.depthinclude sys.stderr.flush(); self.callcount +=1 if self.callcount % 10 ==0 : print(".", file=sys.stderr,end='') if self.callcount % 500 ==0 : print("\n", file=sys.stderr,end='') if survexfile in self.svxfileslist: message = " * Warning. Survex file already seen: {}".format(survexfile.path) print(message) print(message,file=flinear) print(message,file=sys.stderr) models.DataIssue.objects.create(parser='survex', message=message) if self.svxfileslist.count(survexfile) > 20: message = " ! ERROR. Survex file already seen 20x. Probably an infinite loop so fix your *include statements that include this. Aborting. {}".format(survexfile.path) print(message) print(message,file=flinear) print(message,file=sys.stderr) models.DataIssue.objects.create(parser='survex', message=message) return self.svxfileslist.append(survexfile) svxlines = fin.read().splitlines() for svxline in svxlines: self.lineno += 1 includestmt =self.rx_include.match(svxline) if not includestmt: fcollate.write("{}\n".format(svxline)) sline, comment = self.rx_comment.match(svxline.strip()).groups() mstar = self.rx_star.match(sline) if mstar: # yes we are reading a *cmd cmd, args = mstar.groups() cmd = cmd.lower() if re.match("(?i)include$", cmd): includepath = os.path.normpath(os.path.join(os.path.split(survexfile.path)[0], re.sub(r"\.svx$", "", args))) path_match = re.search(r"caves-(\d\d\d\d)/(\d+|\d\d\d\d-?\w+-\d+)/", includepath) includesurvexfile = models_survex.SurvexFile(path=includepath) if includesurvexfile.exists(): # do not create SurvexFile in DB here by doing includesurvexfile.save(). Do it when reading data. #-------------------------------------------------------- self.depthinclude += 1 fininclude = includesurvexfile.OpenFile() fcollate.write(";*include {}\n".format(includesurvexfile.path)) flinear.write("{:2} {} *include {}\n".format(self.depthinclude, indent, includesurvexfile.path)) push = includesurvexfile.path.lower() self.stackinclude.append(push) #----------------- self.RecursiveScan(survexblock, includesurvexfile, fininclude, flinear, fcollate) #----------------- pop = self.stackinclude.pop() if pop != push: message = "!!!!!!! ERROR pop != push {} != {} {}".format(pop, push, self.stackinclude) print(message) print(message,file=flinear) print(message,file=sys.stderr) models.DataIssue.objects.create(parser='survex', message=message) includesurvexfile = None flinear.write("{:2} {} *edulcni {}\n".format(self.depthinclude, indent, pop)) fcollate.write(";*edulcni {}\n".format(pop)) fininclude.close() self.depthinclude -= 1 #-------------------------------------------------------- else: message = " ! ERROR *include file not found for {}".format(includesurvexfile) print(message) print(message,file=sys.stderr) models.DataIssue.objects.create(parser='survex', message=message) elif re.match("(?i)begin$", cmd): self.depthbegin += 1 depth = " " * self.depthbegin if args: pushargs = args else: pushargs = " " self.stackbegin.append(pushargs.lower()) flinear.write(" {:2} {} *begin {}\n".format(self.depthbegin, depth, args)) pass elif re.match("(?i)end$", cmd): depth = " " * self.depthbegin flinear.write(" {:2} {} *end {}\n".format(self.depthbegin, depth, args)) if not args: args = " " popargs = self.stackbegin.pop() if popargs != args.lower(): message = "!!!!!!! ERROR BEGIN/END pop != push {} != {}\n{}".format(popargs, args, self. stackbegin) print(message) print(message,file=flinear) print(message,file=sys.stderr) models.DataIssue.objects.create(parser='survex', message=message) self.depthbegin -= 1 pass elif re.match("(?i)title$", cmd): depth = " " * self.depthbegin flinear.write(" {:2} {} *title {}\n".format(self.depthbegin, depth, args)) pass def FindAndLoadSurvex(survexblockroot): """Follows the *include links recursively to find files """ print(' - redirecting stdout to svxblks.log...') stdout_orig = sys.stdout # Redirect sys.stdout to the file sys.stdout = open('svxblks.log', 'w') print(' - SCANNING All Survex Blocks...',file=sys.stderr) survexfileroot = survexblockroot.survexfile collatefilename = "_" + survexfileroot.path + ".svx" svx_scan = LoadingSurvex() svx_scan.callcount = 0 svx_scan.depthinclude = 0 indent="" fcollate = open(collatefilename, 'w') mem0 = models.get_process_memory() print(" - MEM:{:7.2f} MB START".format(mem0),file=sys.stderr) flinear = open('svxlinear.log', 'w') flinear.write(" - MEM:{:7.2f} MB START {}\n".format(mem0,survexfileroot.path)) finroot = survexfileroot.OpenFile() fcollate.write(";*include {}\n".format(survexfileroot.path)) flinear.write("{:2} {} *include {}\n".format(svx_scan.depthinclude, indent, survexfileroot.path)) #---------------------------------------------------------------- svx_scan.RecursiveScan(survexblockroot, survexfileroot, finroot, flinear, fcollate) #---------------------------------------------------------------- flinear.write("{:2} {} *edulcni {}\n".format(svx_scan.depthinclude, indent, survexfileroot.path)) fcollate.write(";*edulcni {}\n".format(survexfileroot.path)) mem1 = models.get_process_memory() flinear.write("\n - MEM:{:.2f} MB STOP {}\n".format(mem1,survexfileroot.path)) flinear.write(" - MEM:{:.3f} MB USED\n".format(mem1-mem0)) svxfileslist = svx_scan.svxfileslist flinear.write(" - {:,} survex files in linear include list \n".format(len(svxfileslist))) flinear.close() fcollate.close() svx_scan = None print("\n - {:,} survex files in linear include list \n".format(len(svxfileslist)),file=sys.stderr) mem1 = models.get_process_memory() print(" - MEM:{:7.2f} MB END ".format(mem0),file=sys.stderr) print(" - MEM:{:7.3f} MB USED".format(mem1-mem0),file=sys.stderr) svxfileslist = [] # free memory # Before doing this, it would be good to identify the *equate and *entrance we need that are relevant to the # entrance locations currently loaded after this by LoadPos(), but could better be done before ? # look in MapLocations() for how we find the entrances print('\n - Loading All Survex Blocks (LinearRecursive)',file=sys.stderr) svx_load = LoadingSurvex() svx_load.svxdirs[""] = survexfileroot.survexdirectory with open(collatefilename, "r") as fcollate: svxlines = fcollate.read().splitlines() #---------------------------------------------------------------- svx_load.LinearRecursiveLoad(survexblockroot,survexfileroot.path, svxlines) #---------------------------------------------------------------- print("\n - MEM:{:7.2f} MB STOP".format(mem1),file=sys.stderr) print(" - MEM:{:7.3f} MB USED".format(mem1-mem0),file=sys.stderr) survexlegsnumber = svx_load.survexlegsnumber survexlegsalllength = svx_load.survexlegsalllength mem1 = models.get_process_memory() svx_load = None # Close the logging file, Restore sys.stdout to our old saved file handle sys.stdout.close() print("+", file=sys.stderr) sys.stderr.flush(); sys.stdout = stdout_orig return (survexlegsnumber, survexlegsalllength) def MakeSurvexFileRoot(): survexfileroot = models_survex.SurvexFile(path=settings.SURVEX_TOPNAME, cave=None) survexfileroot.save() survexdirectoryroot = models_survex.SurvexDirectory(path=settings.EXPOWEB, cave=None, primarysurvexfile=survexfileroot) survexdirectoryroot.save() survexfileroot.survexdirectory = survexdirectoryroot survexfileroot.save() # mutually dependent objects need a double-save like this return survexfileroot def LoadSurvexBlocks(): print(' - Flushing All Survex Blocks...') models_survex.SurvexBlock.objects.all().delete() models_survex.SurvexFile.objects.all().delete() models_survex.SurvexDirectory.objects.all().delete() models_survex.SurvexPersonRole.objects.all().delete() models_survex.SurvexStation.objects.all().delete() print(" - survex Data Issues flushed") models.DataIssue.objects.filter(parser='survex').delete() survexfileroot = MakeSurvexFileRoot() survexblockroot = models_survex.SurvexBlock(name=ROOTBLOCK, survexpath="", cave=None, survexfile=survexfileroot, legsall=0, legssplay=0, legssurfc=0, totalleglength=0.0) # this is the first so id=1 survexblockroot.save() print(' - Loading Survex Blocks...') memstart = models.get_process_memory() survexlegsnumber, survexlegsalllength = FindAndLoadSurvex(survexblockroot) memend = models.get_process_memory() print(" - MEMORY start:{:.3f} MB end:{:.3f} MB increase={:.3f} MB".format(memstart,memend, memend-memstart)) survexblockroot.totalleglength = survexlegsalllength survexblockroot.legsall = survexlegsnumber survexblockroot.save() print(" - total number of survex legs: {}".format(survexlegsnumber)) print(" - total leg lengths loaded: {}m".format(survexlegsalllength)) print(' - Loaded All Survex Blocks.') poslineregex = re.compile(r"^\(\s*([+-]?\d*\.\d*),\s*([+-]?\d*\.\d*),\s*([+-]?\d*\.\d*)\s*\)\s*([^\s]+)$") def LoadPos(): """First load the survex stations for entrances and fixed points (about 600) into the database. Run cavern to produce a complete .3d file, then run 3dtopos to produce a table of all survey point positions. Then lookup each position by name to see if we have it in the database and if we do, then save the x/y/z coordinates. This gives us coordinates of the entrances. If we don't have it in the database, print an error message and discard it. """ topdata = settings.SURVEX_DATA + settings.SURVEX_TOPNAME print((' - Generating a list of Pos from %s.svx and then loading...' % (topdata))) found = 0 skip = {} print("\n") # extra line because cavern overwrites the text buffer somehow # cavern defaults to using same cwd as supplied input file call([settings.CAVERN, "--output=%s.3d" % (topdata), "%s.svx" % (topdata)]) call([settings.THREEDTOPOS, '%s.3d' % (topdata)], cwd = settings.SURVEX_DATA) mappoints = {} for pt in MapLocations().points(): svxid, number, point_type, label = pt mappoints[svxid]=True posfile = open("%s.pos" % (topdata)) posfile.readline() #Drop header try: survexblockroot = models_survex.SurvexBlock.objects.get(name=ROOTBLOCK) except: try: survexblockroot = models_survex.SurvexBlock.objects.get(id=1) except: message = ' ! FAILED to find root SurvexBlock' print(message) models.DataIssue.objects.create(parser='survex', message=message) raise for line in posfile.readlines(): r = poslineregex.match(line) if r: x, y, z, id = r.groups() for sid in mappoints: if id.endswith(sid): blockpath = "." + id[:-len(sid)].strip(".") # But why are we doing this? Why do we need the survexblock id for each of these ? # ..because mostly they don't actually appear in any SVX file. We should match them up # via the cave data, not by this half-arsed syntactic match which almost never works. PMS. if False: try: sbqs = models_survex.SurvexBlock.objects.filter(survexpath=blockpath) if len(sbqs)==1: sb = sbqs[0] if len(sbqs)>1: message = " ! MULTIPLE SurvexBlocks {:3} matching Entrance point {} {} '{}'".format(len(sbqs), blockpath, sid, id) print(message) models.DataIssue.objects.create(parser='survex', message=message) sb = sbqs[0] elif len(sbqs)<=0: message = " ! ZERO SurvexBlocks matching Entrance point {} {} '{}'".format(blockpath, sid, id) print(message) models.DataIssue.objects.create(parser='survex', message=message) sb = survexblockroot except: message = ' ! FAIL in getting SurvexBlock matching Entrance point {} {}'.format(blockpath, sid) print(message) models.DataIssue.objects.create(parser='survex', message=message) try: ss = models_survex.SurvexStation(name=id, block=survexblockroot) ss.x = float(x) ss.y = float(y) ss.z = float(z) ss.save() found += 1 except: message = ' ! FAIL to create SurvexStation Entrance point {} {}'.format(blockpath, sid) print(message) models.DataIssue.objects.create(parser='survex', message=message) raise print(" - {} SurvexStation entrances found.".format(found))