import sys import os import re import time from datetime import datetime, timedelta from subprocess import call, Popen, PIPE from django.utils.timezone import get_current_timezone from django.utils.timezone import make_aware import troggle.settings as settings import troggle.core.models as models import troggle.core.models_caves as models_caves import troggle.core.models_survex as models_survex from troggle.parsers.people import GetPersonExpeditionNameLookup from troggle.parsers.logbooks import GetCaveLookup from troggle.core.views_caves import MapLocations survexblockroot = None ROOTBLOCK = "rootblock" class SurvexLeg(): """No longer a models.Model subclass, so no longer a database table """ tape = 0.0 compass = 0.0 clino = 0.0 class LoadingSurvex(): """A 'survex block' is a *begin...*end set of cave data. A survex file can contain many begin-end blocks, which can be nested, and which can *include other survex files. A 'scansfolder' is what we today call a "survey scans folder" or a "wallet". """ # This interprets the survex "*data normal" command which sets out the order of the fields in the data, e.g. # *DATA normal from to length gradient bearing ignore ignore ignore ignore stardatadefault = {"type":"normal", "t":"leg", "from":0, "to":1, "tape":2, "compass":3, "clino":4} stardataparamconvert = {"length":"tape", "bearing":"compass", "gradient":"clino"} rx_linelen = re.compile(r"[\d\-+.]+$") rx_team = re.compile(r"(?i)(Insts|Notes|Tape|Dog|Useless|Pics|Helper|Disto|Consultant)\s+(.*)$") rx_person = re.compile(r"(?i) and | / |, | & | \+ |^both$|^none$") rx_qm = re.compile(r'(?i)^\s*QM(\d)\s+?([a-dA-DxX])\s+([\w\-]+)\.(\d+)\s+(([\w\-]+)\.(\d+)|\-)\s+(.+)$') # remember there is also QM_PATTERN used in views_other and set in settings.py rx_cave = re.compile(r'caves-(\d\d\d\d)/(\d+|\d\d\d\d-?\w+-\d+)/') rx_comment = re.compile(r'([^;]*?)\s*(?:;\s*(.*))?\n?$') rx_comminc = re.compile(r'(?i)^\*include[\s]*([-\w/]*).*$') # inserted by linear collate ;*include rx_commcni = re.compile(r'(?i)^\*edulcni[\s]*([-\w/]*).*$') # inserted by linear collate ;*edulcni rx_include = re.compile(r'(?i)^\s*(\*include[\s].*)$') rx_ref = re.compile(r'(?i)^\s*ref[\s.:]*(\d+)\s*#\s*(X)?\s*(\d+)') rx_star = re.compile(r'(?i)\s*\*[\s,]*(\w+)\s*(.*?)\s*(?:;.*)?$') rx_starref = re.compile(r'(?i)^\s*\*ref[\s.:]*((?:19[6789]\d)|(?:20[0123]\d))\s*#?\s*(X)?\s*(.*?\d+.*?)$') rx_argsref = re.compile(r'(?i)^[\s.:]*((?:19[6789]\d)|(?:20[0123]\d))\s*#?\s*(X)?\s*(.*?\d+.*?)$') survexlegsalllength = 0.0 survexlegsnumber = 0 depthbegin = 0 depthinclude = 0 stackbegin =[] stackinclude = [] svxfileslist = [] svxdirs = {} svxcaves = {} svxfiletitle = {} lineno = 0 insp = "" callcount = 0 stardata ={} includedfilename ="" currenttitle ="" currentsurvexblock = None currentsurvexfile = None currentcave = None def __init__(self): pass def LoadSurvexIgnore(self, survexblock, line, cmd): if cmd == "title": pass # unused in troggle today - but will become text list on SurvexBlock elif cmd == "require": pass # should we check survex version available for processing? elif cmd in ["equate", "fix", "alias", "calibrate", "cs","entrance", "export", "case", "declination", "infer","instrument", "sd", "units"]: pass # we ignore all these, which is fine. else: if cmd in ["include", "data", "flags", "title", "set", "ref"]: message = "! Unparsed [*{}]: '{}' {}".format(cmd, line, survexblock.survexfile.path) print((self.insp+message)) models.DataIssue.objects.create(parser='survex', message=message) else: message = "! Bad svx command: [*{}] {} ({}) {}".format(cmd, line, survexblock, survexblock.survexfile.path) print((self.insp+message)) models.DataIssue.objects.create(parser='survex', message=message) def LoadSurvexTeam(self, survexblock, line): teammembers = [ ] mteammember = self.rx_team.match(line) if mteammember: for tm in self.rx_person.split(mteammember.group(2)): if tm: personexpedition = survexblock.expedition and GetPersonExpeditionNameLookup(survexblock.expedition).get(tm.lower()) if (personexpedition, tm) not in teammembers: teammembers.append((personexpedition, tm)) personrole = models_survex.SurvexPersonRole(survexblock=survexblock, nrole=mteammember.group(1).lower(), personexpedition=personexpedition, personname=tm) personrole.expeditionday = survexblock.expeditionday if personexpedition: personrole.person=personexpedition.person personrole.save() def LoadSurvexDate(self, survexblock, line): # we should make this a date range for everything if len(line) == 10: survexblock.date = make_aware(datetime.strptime(re.sub(r"\.", "-", line), '%Y-%m-%d'), get_current_timezone()) expeditions = models.Expedition.objects.filter(year=line[:4]) if expeditions: assert len(expeditions) == 1 survexblock.expedition = expeditions[0] survexblock.expeditionday = survexblock.expedition.get_expedition_day(survexblock.date) survexblock.save() def LoadSurvexLineLeg(self, survexblock, svxline, sline, comment): """This reads compass, clino and tape data but only keeps the tape lengths, the rest is discarded after error-checking. """ # Check first to see if we are in a splay and abort if so. # TO DO splay abort stardata = self.stardata survexleg = SurvexLeg() ls = sline.lower().split() # this next fails for two surface survey svx files which use / for decimal point # e.g. '29/09' in the tape measurement, or use decimals but in brackets, e.g. (06.05) if stardata["type"] == "normal": # should use current flags setting for this # print(" !! lineno '{}'\n !! svxline '{}'\n !! sline '{}'\n !! ls '{}'\n !! stardata {}".format(self.lineno, svxline, sline, ls,stardata)) tape = ls[stardata["tape"]] tape = tape.replace("(","") tape = tape.replace(")","") tape = tape.replace("/",".") try: survexleg.tape = float(tape) self.survexlegsnumber += 1 except ValueError: print(("! Tape misread in", survexblock.survexfile.path)) print((" Stardata:", stardata)) print((" Line:", ls)) message = ' ! Value Error: Tape misread in line %s in %s' % (ls, survexblock.survexfile.path) models.DataIssue.objects.create(parser='survex', message=message) survexleg.tape = 0 try: survexblock.totalleglength += survexleg.tape self.survexlegsalllength += survexleg.tape except ValueError: message = ' ! Value Error: Tape length not added %s in %s' % (ls, survexblock.survexfile.path) models.DataIssue.objects.create(parser='survex', message=message) try: lclino = ls[stardata["clino"]] except: print(("! Clino misread in", survexblock.survexfile.path)) print((" Stardata:", stardata)) print((" Line:", ls)) message = ' ! Value Error: Clino misread in line %s in %s' % (ls, survexblock.survexfile.path) models.DataIssue.objects.create(parser='survex', message=message) lclino = error try: lcompass = ls[stardata["compass"]] except: print(("! Compass misread in", survexblock.survexfile.path)) print((" Stardata:", stardata)) print((" Line:", ls)) message = ' ! Value Error: Compass misread in line %s in %s' % (ls, survexblock.survexfile.path) models.DataIssue.objects.create(parser='survex', message=message) lcompass = error if lclino == "up": survexleg.compass = 0.0 survexleg.clino = 90.0 elif lclino == "down": survexleg.compass = 0.0 survexleg.clino = -90.0 elif lclino == "-" or lclino == "level": try: survexleg.compass = float(lcompass) except ValueError: print(("! Compass misread in", survexblock.survexfile.path)) print((" Stardata:", stardata)) print((" Line:", ls)) message = " ! Value Error: lcompass:'{}' line {} in '{}'".format(lcompass, ls, survexblock.survexfile.path) models.DataIssue.objects.create(parser='survex', message=message) survexleg.compass = 1000 survexleg.clino = -90.0 else: assert self.rx_linelen.match(lcompass), ls assert self.rx_linelen.match(lclino) and lclino != "-", ls survexleg.compass = float(lcompass) survexleg.clino = float(lclino) # delete the object so that django autosaving doesn't save it. survexleg = None def LoadSurvexRef(self, survexblock, args): # *REF but also ; Ref years from 1960 to 2039 if len(args)< 4: message = " ! Empty or BAD *REF command '{}' in '{}'".format(args, survexblock.survexfile.path) print((self.insp+message)) models.DataIssue.objects.create(parser='survex', message=message) return argsgps = self.rx_argsref.match(args) if argsgps: yr, letterx, wallet = argsgps.groups() else: message = " ! BAD *REF command '{}' in '{}'".format(args, survexblock.survexfile.path) print((self.insp+message)) models.DataIssue.objects.create(parser='survex', message=message) return if not letterx: letterx = "" else: letterx = "X" if len(wallet)<2: wallet = "0" + wallet assert (int(yr)>1960 and int(yr)<2039), "Wallet year out of bounds: %s" % yr refscan = "%s#%s%s" % (yr, letterx, wallet) try: if int(wallet)>100: message = " ! Wallet *REF {} - too big in '{}'".format(refscan, survexblock.survexfile.path) print((self.insp+message)) models.DataIssue.objects.create(parser='survex', message=message) except: message = " ! Wallet *REF {} - not numeric in '{}'".format(refscan, survexblock.survexfile.path) print((self.insp+message)) models.DataIssue.objects.create(parser='survex', message=message) manyscansfolders = models_survex.ScansFolder.objects.filter(walletname=refscan) if manyscansfolders: survexblock.scansfolder = manyscansfolders[0] survexblock.save() if len(manyscansfolders) > 1: message = " ! Wallet *REF {} - {} scan folders from DB search in {}".format(refscan, len(manyscansfolders), survexblock.survexfile.path) print((self.insp+message)) models.DataIssue.objects.create(parser='survex', message=message) else: message = " ! Wallet *REF '{}' - NOT found in DB search '{}'".format(refscan, survexblock.survexfile.path) print((self.insp+message)) models.DataIssue.objects.create(parser='survex', message=message) def LoadSurvexQM(self, survexblock, qmline): insp = self.insp qm_no = qmline.group(1) qm_grade = qmline.group(2) if qmline.group(3): # usual closest survey station qm_nearest = qmline.group(3) if qmline.group(4): qm_nearest = qm_nearest +"."+ qmline.group(4) if qmline.group(6) and qmline.group(6) != '-': qm_resolve_station = qmline.group(6) if qmline.group(7): qm_resolve_station = qm_resolve_station +"."+ qmline.group(7) else: qm_resolve_station = "" qm_notes = qmline.group(8) # Spec of QM in SVX files: # ;Serial number grade(A/B/C/D/X) nearest-station resolution-station description # ;QM1 a hobnob_hallway_2.42 hobnob-hallway_3.42 junction of keyhole passage # ;QM1 a hobnob_hallway_2.42 - junction of keyhole passage # NB none of the SurveyStations are in the DB now, so if we want to link to aSurvexStation # we would have to create one. But that is not obligatory and no QMs loaded from CSVs have one try: qm = models_caves.QM.objects.create(number=qm_no, # nearest_station=a_survex_station_object, # can be null nearest_station_description=qm_resolve_station, nearest_station_name=qm_nearest, grade=qm_grade.upper(), location_description=qm_notes) qm.save # message = " ! QM{} '{}' CREATED in DB in '{}'".format(qm_no, qm_nearest,survexblock.survexfile.path) # print(insp+message) # models.DataIssue.objects.create(parser='survex', message=message) except: message = " ! QM{} FAIL to create {} in'{}'".format(qm_no, qm_nearest,survexblock.survexfile.path) print(insp+message) models.DataIssue.objects.create(parser='survex', message=message) def LoadSurvexDataCmd(self,survexblock,args): ls = args.lower().split() stardata = { "type":ls[0] } for i in range(0, len(ls)): stardata[self.stardataparamconvert.get(ls[i], ls[i])] = i - 1 self.stardata = stardata if ls[0] in ["normal", "cartesian", "nosurvey"]: assert (("from" in stardata and "to" in stardata) or "station" in stardata), args elif ls[0] == "default": stardata = self.stardatadefault else: assert ls[0] == "passage", args def LoadSurvexFlags(self, line, cmd): # Here we could set on/off 'splay', 'not splay', 'surface', 'not surface', or 'duplicate' # but this data is only used for sense-checking not to actually calculate anything important pass def IdentifyCave(self, cavepath): if cavepath in self.svxcaves: print(' - Cave FAST matched for %s' % cavepath) return self.svxcaves[cavepath] path_match = self.rx_cave.search(cavepath) #print(' - Attempting cave match for %s' % cavepath) if path_match: sluggy = '%s-%s'.format(path_match.group(1), path_match.group(2)) cave = GetCaveLookup().get(sluggy) # Below is how it has been done for years: very fuzzy & slow searches # ..and wrong! #cave = models_caves.getCaveByReference(sluggy) if cave: self.currentcave = cave self.svxcaves[cavepath] = cave print(' - Cave matched for %s' % cavepath) return cave else: print(' ! No cave match for %s' % cavepath) return None def LoadSurvexFileBlock(self, survexblock, includelabel): """Creates SurvexDirectory and SurvexFile in the database with links to 'cave' Creates a new current survexblock with valid .survexfile and valid .survexdirectory """ depth = " " * self.depthbegin print("{:2}{} - NEW survexfile:'{}'".format(self.depthbegin, depth, includelabel)) headpath, tail = os.path.split(includelabel) if headpath not in self.svxdirs: self.svxdirs[headpath] = models_survex.SurvexDirectory(path=headpath, primarysurvexfile=survexblock.survexfile) newsurvexdirectory = self.svxdirs[headpath] newsurvexfile = models_survex.SurvexFile(path=includelabel) newsurvexfile.survexdirectory = newsurvexdirectory # Do not create a survexblock. Yes, there is a virtual block before the *begin statement but # only the *title is usually in that, so just inherit the *title into the blocks. # name = includelabel # newsurvexblock = models_survex.SurvexBlock(name=name, parent=survexblock, # survexpath=survexblock.survexpath+"."+name, # survexfile=newsurvexfile, # legsall=0, legssplay=0, legssurfc=0, totalleglength=0.0) cave = self.IdentifyCave(headpath) if cave: newsurvexdirectory.cave = cave newsurvexfile.cave = cave #newsurvexblock.cave = cave newsurvexdirectory.save() newsurvexfile.save() #newsurvexblock.save self.currentsurvexfile = newsurvexfile #self.currentsurvexblock = newsurvexblock def ProcessIncludeLine(self, survexblock, included): # should do some push stuff here svxid = included.groups()[0] #depth = " " * self.depthbegin #print("{:2}{} - Include survexfile:'{}'".format(self.depthbegin, depth, svxid)) self.LoadSurvexFileBlock(survexblock, svxid) def ProcessEdulcniLine(self, survexblock, edulcni): # should do some pop stuff here svxid = edulcni.groups()[0] depth = " " * self.depthbegin print("{:2}{} - Edulcni survexfile:'{}'".format(self.depthbegin, depth, svxid)) self.currentsurvexblock = survexblock.parent self.currentsurvexfile = survexblock.parent.survexfile def LoadSurvexComment(self, survexblock, comment): # ignore all comments except ;ref and ;QM and ;*include (for collated survex file) refline = self.rx_ref.match(comment) if refline: comment = re.sub('(?i)\s*ref[.;]?',"",comment.strip()) self.LoadSurvexRef(survexblock, comment) qmline = self.rx_qm.match(comment) if qmline: self.LoadSurvexQM(survexblock, qmline) included = self.rx_comminc.match(comment) # ;*include means we have been included; not 'proceed to include' which *include means if included: self.ProcessIncludeLine(survexblock,included) edulcni = self.rx_commcni.match(comment) # ;*edulcni means we are returning from an included file if edulcni: self.ProcessEdulcniLine(survexblock,edulcni) def LoadSurvexSetup(self,survexblock, survexfile): self.depthbegin = 0 self.stardata = self.stardatadefault blocklegs = self.survexlegsnumber print(self.insp+" - MEM:{:.3f} Reading. parent:{} <> {} ".format(models.get_process_memory(),survexblock.survexfile.path, survexfile.path)) self.lineno = 0 sys.stderr.flush(); self.callcount +=1 if self.callcount % 10 ==0 : print(".", file=sys.stderr,end='') if self.callcount % 500 ==0 : print("\n", file=sys.stderr,end='') # Try to find the cave in the DB if not use the string as before path_match = re.search(r"caves-(\d\d\d\d)/(\d+|\d\d\d\d-?\w+-\d+)/", survexblock.survexfile.path) if path_match: pos_cave = '%s-%s' % (path_match.group(1), path_match.group(2)) cave = models_caves.getCaveByReference(pos_cave) if cave: survexfile.cave = cave def RecursiveRecursiveLoad(self, survexblock, survexfile, fin): """Follows the *include links in all the survex files from the root file 1623.svx and reads in the survex blocks, other data and the wallet references (scansfolder) as it goes. This part of the data include process is where the maximum memory is used and where it crashes on memory-constrained machines. Begin-end blocks may also be nested. """ self.LoadSurvexSetup(survexblock, survexfile) insp =self.insp previousnlegs = 0 svxlines = fin.read().splitlines() # cannot close file now as may be recursively called with the same fin if nested *begin-end for svxline in svxlines: self.lineno += 1 sline, comment = self.rx_comment.match(svxline.strip()).groups() if comment: self.LoadSurvexComment(survexblock, comment) if not sline: continue # skip blank lines # detect the star command mstar = self.rx_star.match(sline) if mstar: # yes we are reading a *cmd cmd, args = mstar.groups() cmd = cmd.lower() if re.match("include$(?i)", cmd): cave = self.IdentifyCave(args) if cave: survexfile.cave = cave includepath = os.path.normpath(os.path.join(os.path.split(survexfile.path)[0], re.sub(r"\.svx$", "", args))) print((insp+' - INCLUDE-go path found, including - ' + args)) includesurvexfile = models_survex.SurvexFile(path=includepath) includesurvexfile.save() includesurvexfile.SetDirectory() if includesurvexfile.exists(): survexblock.save() self.insp += "> " #-------------------------------------------------------- fininclude = includesurvexfile.OpenFile() self.RecursiveRecursiveLoad(survexblock, includesurvexfile, fininclude) fininclude.close() #-------------------------------------------------------- self.insp = self.insp[2:] insp = self.insp print((insp+' - INCLUDE-return from include - ' + includepath)) else: print((insp+' ! ERROR *include file not found for %s' % includesurvexfile)) elif re.match("begin$(?i)", cmd): # On a *begin statement we start a new survexblock. # There should not be any *include inside a begin-end block, so this is a simple # load not a recursive fileload. But there may be many blocks nested to any depth in one file. if args: newsvxpath = os.path.join(os.path.split(survexfile.path)[0], re.sub(r"\.svx$", "", args)) # Try to find the cave in the DB if not use the string as before path_match = re.search(r"caves-(\d\d\d\d)/(\d+|\d\d\d\d-?\w+-\d+)/", newsvxpath) if path_match: pos_cave = '%s-%s' % (path_match.group(1), path_match.group(2)) # print(insp+pos_cave) cave = models_caves.getCaveByReference(pos_cave) if cave: survexfile.cave = cave else: print((insp+' - No match (b) for %s' % newsvxpath)) previousnlegs = self.survexlegsnumber name = args.lower() print(insp+' - Begin found for:{}, creating new SurvexBlock '.format(name)) # the recursive call re-reads the entire file. This is wasteful. We should pass in only # the un-parsed part of the file. survexblockdown = models_survex.SurvexBlock(name=name, parent=survexblock, survexpath=survexblock.survexpath+"."+name, cave=survexfile.cave, survexfile=survexfile, legsall=0, legssplay=0, legssurfc=0, totalleglength=0.0) survexblockdown.save() survexblock.save() survexblock = survexblockdown print(insp+" - BLOCK-enter nested *begin/*end block: '{}'".format(name)) self.insp += "> " #-------------------------------------------------------- self.RecursiveRecursiveLoad(survexblockdown, survexfile, fin) #-------------------------------------------------------- # do not close the file as there may be more blocks in this one # and it is re-read afresh with every nested begin-end block. self.insp = self.insp[2:] insp = self.insp else: self.depthbegin += 1 elif re.match("end$(?i)", cmd): if self.depthbegin: print(insp+" - BLOCK-return from nested *begin/*end block: '{}'".format(args)) self.depthbegin -= 1 else: legsinblock = self.survexlegsnumber - previousnlegs print(insp+" - LEGS: {} (previous: {}, now:{})".format(legsinblock,previousnlegs,self.survexlegsnumber)) survexblock.legsall = legsinblock survexblock.save() return elif cmd == "ref": self.LoadSurvexRef(survexblock, args) elif cmd == "flags": self.LoadSurvexFlags(args, cmd) elif cmd == "data": self.LoadSurvexDataCmd(survexblock, args) elif cmd == "set" and re.match("names(?i)", args): pass elif re.match("date$(?i)", cmd): self.LoadSurvexDate(survexblock, args) elif re.match("team$(?i)", cmd): self.LoadSurvexTeam(survexblock, args) else: self.LoadSurvexIgnore(survexblock, args, cmd) else: # not a *cmd so we are reading data OR rx_comment failed if "from" in self.stardata: # only interested in survey legs self.LoadSurvexLineLeg(survexblock, svxline, sline, comment) else: pass # ignore all other sorts of data def LinearRecursiveLoad(self, survexblock, path, svxlines): """Loads a single survex file. Usually used to import all the survex files which have been collated into a single file. Loads the begin/end blocks recursively. """ self.relativefilename = path cave = self.IdentifyCave(path) # this will produce null for survex files which are geographic collections blockcount = 0 for svxline in svxlines: sline, comment = self.rx_comment.match(svxline.strip()).groups() if comment: self.LoadSurvexComment(survexblock, comment) # this catches the ;*include and ;*edulcni lines too if not sline: continue # skip blank lines # detect a star command mstar = self.rx_star.match(sline) if mstar: # yes we are reading a *cmd cmd, args = mstar.groups() cmd = cmd.lower() # ------------------------BEGIN if re.match("begin$(?i)", cmd): self.depthbegin += 1 depth = " " * self.depthbegin self.stackbegin.append(args.lower()) previousnlegs = self.survexlegsnumber name = args.lower() print("{:2}{} - Begin for :'{}'".format(self.depthbegin,depth, name)) survexblockdown = models_survex.SurvexBlock(name=name, parent=survexblock, survexpath=survexblock.survexpath+"."+name, cave=self.currentcave, survexfile=self.currentsurvexfile, legsall=0, legssplay=0, legssurfc=0, totalleglength=0.0) survexblockdown.save() survexblock.save() survexblock = survexblockdown blockcount +=1 if blockcount % 10 ==0 : print(".", file=sys.stderr,end='') if blockcount % 500 ==0 : print("\n", file=sys.stderr,end='') sys.stderr.flush(); # ---------------------------END elif re.match("end$(?i)", cmd): depth = " " * self.depthbegin self.currentsurvexblock = survexblock.parent self.currentsurvexfile = survexblock.parent.survexfile print("{:2}{} - End from:'{}'".format(self.depthbegin,depth,args)) legsinblock = self.survexlegsnumber - previousnlegs print("{:2}{} - LEGS: {} (previous: {}, now:{})".format(self.depthbegin, depth,legsinblock,previousnlegs,self.survexlegsnumber)) survexblock.legsall = legsinblock survexblock.save() self.depthbegin -= 1 # ----------------------------- elif re.match("(?i)title$", cmd): self.currenttitle = args elif re.match("(?i)ref$", cmd): self.LoadSurvexRef(survexblock, args) elif re.match("(?i)flags$", cmd): self.LoadSurvexFlags(args, cmd) elif re.match("(?i)data$", cmd): self.LoadSurvexDataCmd(survexblock, args) elif re.match("(?i)date$", cmd): self.LoadSurvexDate(survexblock, args) elif re.match("(?i)team$", cmd): self.LoadSurvexTeam(survexblock, args) elif re.match("(?i)set$", cmd) and re.match("(?i)names", args): pass elif re.match("(?i)include$", cmd): message = " ! -ERROR *include command not expected here {}. Re-run a full Survex import.".format(path) print(message) print(message,file=sys.stderr) models.DataIssue.objects.create(parser='survex', message=message) else: self.LoadSurvexIgnore(survexblock, args, cmd) else: # not a *cmd so we are reading data OR rx_comment failed if "from" in self.stardata: # only interested in survey legs self.LoadSurvexLineLeg(survexblock, svxline, sline, comment) else: pass # ignore all other sorts of data def RecursiveScan(self, survexblock, survexfile, fin, flinear, fcollate): """Follows the *include links in all the survex files from the root file 1623.svx and reads only the *include and *begin and *end statements. It produces a linearised list of the include tree """ indent = " " * self.depthinclude sys.stderr.flush(); self.callcount +=1 if self.callcount % 10 ==0 : print(".", file=sys.stderr,end='') if self.callcount % 500 ==0 : print("\n", file=sys.stderr,end='') if survexfile in self.svxfileslist: message = " * Warning. Survex file already seen: {}".format(survexfile.path) print(message) print(message,file=flinear) print(message,file=sys.stderr) models.DataIssue.objects.create(parser='survex', message=message) if self.svxfileslist.count(survexfile) > 20: message = " ! ERROR. Survex file already seen 20x. Probably an infinite loop so fix your *include statements that include this. Aborting. {}".format(survexfile.path) print(message) print(message,file=flinear) print(message,file=sys.stderr) models.DataIssue.objects.create(parser='survex', message=message) return self.svxfileslist.append(survexfile) svxlines = fin.read().splitlines() for svxline in svxlines: self.lineno += 1 includestmt =self.rx_include.match(svxline) if not includestmt: fcollate.write("{}\n".format(svxline)) sline, comment = self.rx_comment.match(svxline.strip()).groups() mstar = self.rx_star.match(sline) if mstar: # yes we are reading a *cmd cmd, args = mstar.groups() cmd = cmd.lower() if re.match("include$(?i)", cmd): includepath = os.path.normpath(os.path.join(os.path.split(survexfile.path)[0], re.sub(r"\.svx$", "", args))) path_match = re.search(r"caves-(\d\d\d\d)/(\d+|\d\d\d\d-?\w+-\d+)/", includepath) includesurvexfile = models_survex.SurvexFile(path=includepath) if includesurvexfile.exists(): # do not create SurvexFile in DB here by doing includesurvexfile.save(). Do it when reading data. #-------------------------------------------------------- self.depthinclude += 1 fininclude = includesurvexfile.OpenFile() fcollate.write(";*include {}\n".format(includesurvexfile.path)) flinear.write("{:2} {} *include {}\n".format(self.depthinclude, indent, includesurvexfile.path)) push = includesurvexfile.path.lower() self.stackinclude.append(push) self.RecursiveScan(survexblock, includesurvexfile, fininclude, flinear, fcollate) pop = self.stackinclude.pop() if pop != push: message = "!!!!!!! ERROR pop != push {} != {} {}".format(pop, push, self.stackinclude) print(message) print(message,file=flinear) print(message,file=sys.stderr) models.DataIssue.objects.create(parser='survex', message=message) flinear.write("{:2} {} *edulcni {}\n".format(self.depthinclude, indent, includesurvexfile.path)) fcollate.write(";*edulcni {}\n".format(includesurvexfile.path)) fininclude.close() self.depthinclude -= 1 #-------------------------------------------------------- else: message = " ! ERROR *include file not found for {}".format(includesurvexfile) print(message) print(message,file=sys.stderr) models.DataIssue.objects.create(parser='survex', message=message) elif re.match("begin$(?i)", cmd): self.depthbegin += 1 depth = " " * self.depthbegin if args: pushargs = args else: pushargs = " " self.stackbegin.append(pushargs.lower()) flinear.write(" {:2} {} *begin {}\n".format(self.depthbegin, depth, args)) pass elif re.match("end$(?i)", cmd): depth = " " * self.depthbegin flinear.write(" {:2} {} *end {}\n".format(self.depthbegin, depth, args)) if not args: args = " " popargs = self.stackbegin.pop() if popargs != args.lower(): message = "!!!!!!! ERROR BEGIN/END pop != push {} != {}\n{}".format(popargs, args, self. stackbegin) print(message) print(message,file=flinear) print(message,file=sys.stderr) models.DataIssue.objects.create(parser='survex', message=message) self.depthbegin -= 1 pass def FindAndLoadSurvex(survexblockroot): """Follows the *include links recursively to find files """ print(' - redirecting stdout to svxblks.log...') stdout_orig = sys.stdout # Redirect sys.stdout to the file sys.stdout = open('svxblks.log', 'w') print(' - SCANNING All Survex Blocks...',file=sys.stderr) survexfileroot = survexblockroot.survexfile collatefilename = "_" + survexfileroot.path + ".svx" svx_scan = LoadingSurvex() svx_scan.callcount = 0 svx_scan.depthinclude = 0 indent="" fcollate = open(collatefilename, 'w') mem0 = models.get_process_memory() print(" - MEM:{:7.2f} MB START".format(mem0),file=sys.stderr) flinear = open('svxlinear.log', 'w') flinear.write(" - MEM:{:7.2f} MB START {}\n".format(mem0,survexfileroot.path)) finroot = survexfileroot.OpenFile() fcollate.write(";*include {}\n".format(survexfileroot.path)) flinear.write("{:2} {} *include {}\n".format(svx_scan.depthinclude, indent, survexfileroot.path)) #---------------------------------------------------------------- svx_scan.RecursiveScan(survexblockroot, survexfileroot, finroot, flinear, fcollate) #---------------------------------------------------------------- flinear.write("{:2} {} *edulcni {}\n".format(svx_scan.depthinclude, indent, survexfileroot.path)) fcollate.write(";*edulcni {}\n".format(survexfileroot.path)) mem1 = models.get_process_memory() flinear.write(" - MEM:{:.2f} MB STOP {}\n".format(mem1,survexfileroot.path)) flinear.write(" - MEM:{:.3f} MB USED\n".format(mem1-mem0)) svxfileslist = svx_scan.svxfileslist flinear.write(" - {:,} survex files in linear include list \n".format(len(svxfileslist))) flinear.close() fcollate.close() svx_scan = None print("\n - {:,} survex files in linear include list \n".format(len(svxfileslist)),file=sys.stderr) mem1 = models.get_process_memory() print(" - MEM:{:7.2f} MB END ".format(mem0),file=sys.stderr) print(" - MEM:{:7.3f} MB USED".format(mem1-mem0),file=sys.stderr) svxfileslist = [] # free memory # Before doing this, it would be good to identify the *equate and *entrance we need that are relevant to the # entrance locations currently loaded after this by LoadPos(), but could better be done before ? # look in MapLocations() for how we find the entrances print('\n - Loading All Survex Blocks (LinearRecursive)',file=sys.stderr) svx_load = LoadingSurvex() with open(collatefilename, "r") as fcollate: svxlines = fcollate.read().splitlines() #---------------------------------------------------------------- svx_load.LinearRecursiveLoad(survexblockroot,survexfileroot.path, svxlines) #---------------------------------------------------------------- print(" - MEM:{:7.2f} MB STOP".format(mem1),file=sys.stderr) print(" - MEM:{:7.3f} MB USED".format(mem1-mem0),file=sys.stderr) survexlegsnumber = svx_load.survexlegsnumber survexlegsalllength = svx_load.survexlegsalllength mem1 = models.get_process_memory() svx_load = None print('\n - Loading All Survex Blocks (RecursiveRecursive)',file=sys.stderr) # svxlrl = LoadingSurvex() # finroot = survexfileroot.OpenFile() # svxlrl.RecursiveRecursiveLoad(survexblockroot, survexfileroot, finroot) # finroot.close() # survexlegsnumber = svxlrl.survexlegsnumber # survexlegsalllength = svxlrl.survexlegsalllength # svxlrl = None # Close the logging file, Restore sys.stdout to our old saved file handle sys.stdout.close() print("+", file=sys.stderr) sys.stderr.flush(); sys.stdout = stdout_orig return (survexlegsnumber, survexlegsalllength) def LoadSurvexBlocks(): print(' - Flushing All Survex Blocks...') models_survex.SurvexBlock.objects.all().delete() models_survex.SurvexFile.objects.all().delete() models_survex.SurvexDirectory.objects.all().delete() models_survex.SurvexPersonRole.objects.all().delete() models_survex.SurvexStation.objects.all().delete() print(" - survex Data Issues flushed") models.DataIssue.objects.filter(parser='survex').delete() survexfileroot = models_survex.SurvexFile(path=settings.SURVEX_TOPNAME, cave=None) survexfileroot.save() survexfileroot.SetDirectory() survexblockroot = models_survex.SurvexBlock(name=ROOTBLOCK, survexpath="", cave=None, survexfile=survexfileroot, legsall=0, legssplay=0, legssurfc=0, totalleglength=0.0) # this is the first so id=1 survexblockroot.save() print(' - Loading Survex Blocks...') memstart = models.get_process_memory() survexlegsnumber, survexlegsalllength = FindAndLoadSurvex(survexblockroot) memend = models.get_process_memory() print(" - MEMORY start:{:.3f} MB end:{:.3f} MB increase={:.3f} MB".format(memstart,memend, memend-memstart)) survexblockroot.totalleglength = survexlegsalllength survexblockroot.legsall = survexlegsnumber survexblockroot.save() print(" - total number of survex legs: {}".format(survexlegsnumber)) print(" - total leg lengths loaded: {}m".format(survexlegsalllength)) print(' - Loaded All Survex Blocks.') poslineregex = re.compile(r"^\(\s*([+-]?\d*\.\d*),\s*([+-]?\d*\.\d*),\s*([+-]?\d*\.\d*)\s*\)\s*([^\s]+)$") def LoadPos(): """Run cavern to produce a complete .3d file, then run 3dtopos to produce a table of all survey point positions. Then lookup each position by name to see if we have it in the database and if we do, then save the x/y/z coordinates. If we don't have it in the database, print an error message and discard it. This is ONLY ever used for entrance and fixedpts locations for the prospecting map: about 600 points out of 32,000. """ topdata = settings.SURVEX_DATA + settings.SURVEX_TOPNAME print((' - Generating a list of Pos from %s.svx and then loading...' % (topdata))) found = 0 skip = {} print("\n") # extra line because cavern overwrites the text buffer somehow # cavern defaults to using same cwd as supplied input file call([settings.CAVERN, "--output=%s.3d" % (topdata), "%s.svx" % (topdata)]) call([settings.THREEDTOPOS, '%s.3d' % (topdata)], cwd = settings.SURVEX_DATA) mappoints = {} for pt in MapLocations().points(): svxid, number, point_type, label = pt mappoints[svxid]=True posfile = open("%s.pos" % (topdata)) posfile.readline() #Drop header try: survexblockroot = models_survex.SurvexBlock.objects.get(name=ROOTBLOCK) except: try: survexblockroot = models_survex.SurvexBlock.objects.get(id=1) except: message = ' ! FAILED to find root SurvexBlock' print(message) models.DataIssue.objects.create(parser='survex', message=message) raise for line in posfile.readlines(): r = poslineregex.match(line) if r: x, y, z, id = r.groups() for sid in mappoints: if id.endswith(sid): blockpath = "." + id[:-len(sid)].strip(".") try: sbqs = models_survex.SurvexBlock.objects.filter(survexpath=blockpath) if len(sbqs)==1: sb = sbqs[0] if len(sbqs)>1: message = ' ! MULTIPLE SurvexBlocks matching Entrance point {} {}'.format(blockpath, sid) print(message) models.DataIssue.objects.create(parser='survex', message=message) sb = sbqs[0] elif len(sbqs)<=0: message = ' ! ZERO SurvexBlocks matching Entrance point {} {}'.format(blockpath, sid) print(message) models.DataIssue.objects.create(parser='survex', message=message) sb = survexblockroot except: message = ' ! FAIL in getting SurvexBlock matching Entrance point {} {}'.format(blockpath, sid) print(message) models.DataIssue.objects.create(parser='survex', message=message) try: ss = models_survex.SurvexStation(name=id, block=sb) ss.x = float(x) ss.y = float(y) ss.z = float(z) ss.save() found += 1 except: message = ' ! FAIL to create SurvexStation Entrance point {} {}'.format(blockpath, sid) print(message) models.DataIssue.objects.create(parser='survex', message=message) raise print(" - {} SurvexStation entrances found.".format(found))