2
0
mirror of https://expo.survex.com/repositories/troggle/.git synced 2024-11-29 21:31:54 +00:00
troggle/parsers/survex.py

961 lines
47 KiB
Python
Raw Normal View History

import sys
import os
import re
import time
from datetime import datetime, timedelta
from subprocess import call, Popen, PIPE
from django.utils.timezone import get_current_timezone
from django.utils.timezone import make_aware
import troggle.settings as settings
import troggle.core.models as models
import troggle.core.models_caves as models_caves
import troggle.core.models_survex as models_survex
from troggle.parsers.people import GetPersonExpeditionNameLookup
2020-06-28 01:50:34 +01:00
from troggle.parsers.logbooks import GetCaveLookup
from troggle.core.views_caves import MapLocations
2020-06-15 03:28:51 +01:00
survexblockroot = None
ROOTBLOCK = "rootblock"
2020-06-24 14:10:13 +01:00
class SurvexLeg():
"""No longer a models.Model subclass, so no longer a database table
"""
2020-06-24 14:10:13 +01:00
tape = 0.0
compass = 0.0
clino = 0.0
2020-06-27 18:00:24 +01:00
class LoadingSurvex():
2020-06-24 14:10:13 +01:00
"""A 'survex block' is a *begin...*end set of cave data.
A survex file can contain many begin-end blocks, which can be nested, and which can *include
other survex files.
A 'scansfolder' is what we today call a "survey scans folder" or a "wallet".
"""
# This interprets the survex "*data normal" command which sets out the order of the fields in the data, e.g.
# *DATA normal from to length gradient bearing ignore ignore ignore ignore
stardatadefault = {"type":"normal", "t":"leg", "from":0, "to":1, "tape":2, "compass":3, "clino":4}
stardataparamconvert = {"length":"tape", "bearing":"compass", "gradient":"clino"}
rx_linelen = re.compile(r"[\d\-+.]+$")
rx_team = re.compile(r"(?i)(Insts|Notes|Tape|Dog|Useless|Pics|Helper|Disto|Consultant)\s+(.*)$")
rx_person = re.compile(r"(?i) and | / |, | & | \+ |^both$|^none$")
2020-06-27 17:55:59 +01:00
rx_qm = re.compile(r'(?i)^\s*QM(\d)\s+?([a-dA-DxX])\s+([\w\-]+)\.(\d+)\s+(([\w\-]+)\.(\d+)|\-)\s+(.+)$')
2020-06-24 14:10:13 +01:00
# remember there is also QM_PATTERN used in views_other and set in settings.py
2020-06-29 21:16:13 +01:00
rx_cave = re.compile(r'(?i)caves-(\d\d\d\d)/([-\d\w]+|\d\d\d\d-?\w+-\d+)')
2020-06-27 17:55:59 +01:00
rx_comment = re.compile(r'([^;]*?)\s*(?:;\s*(.*))?\n?$')
2020-06-28 01:50:34 +01:00
rx_comminc = re.compile(r'(?i)^\*include[\s]*([-\w/]*).*$') # inserted by linear collate ;*include
rx_commcni = re.compile(r'(?i)^\*edulcni[\s]*([-\w/]*).*$') # inserted by linear collate ;*edulcni
2020-06-27 17:55:59 +01:00
rx_include = re.compile(r'(?i)^\s*(\*include[\s].*)$')
rx_ref = re.compile(r'(?i)^\s*ref[\s.:]*(\d+)\s*#\s*(X)?\s*(\d+)')
rx_star = re.compile(r'(?i)\s*\*[\s,]*(\w+)\s*(.*?)\s*(?:;.*)?$')
2020-06-24 14:10:13 +01:00
rx_starref = re.compile(r'(?i)^\s*\*ref[\s.:]*((?:19[6789]\d)|(?:20[0123]\d))\s*#?\s*(X)?\s*(.*?\d+.*?)$')
2020-06-24 22:46:18 +01:00
rx_argsref = re.compile(r'(?i)^[\s.:]*((?:19[6789]\d)|(?:20[0123]\d))\s*#?\s*(X)?\s*(.*?\d+.*?)$')
2020-06-24 14:10:13 +01:00
survexlegsalllength = 0.0
survexlegsnumber = 0
2020-06-24 22:46:18 +01:00
depthbegin = 0
2020-06-27 17:55:59 +01:00
depthinclude = 0
stackbegin =[]
2020-06-27 17:55:59 +01:00
stackinclude = []
2020-06-28 14:42:26 +01:00
stacksvxfiles = []
2020-06-28 01:50:34 +01:00
svxfileslist = []
svxdirs = {}
2020-06-24 22:46:18 +01:00
lineno = 0
2020-06-24 14:10:13 +01:00
insp = ""
callcount = 0
2020-06-24 17:55:42 +01:00
stardata ={}
2020-06-27 17:55:59 +01:00
includedfilename =""
currentsurvexblock = None
currentsurvexfile = None
currentcave = None
2020-06-24 14:10:13 +01:00
def __init__(self):
2020-06-29 21:16:13 +01:00
self.caveslist = GetCaveLookup()
2020-06-24 14:10:13 +01:00
pass
2020-06-24 19:07:11 +01:00
def LoadSurvexIgnore(self, survexblock, line, cmd):
2020-06-28 14:42:26 +01:00
if cmd == "require":
2020-06-24 19:07:11 +01:00
pass # should we check survex version available for processing?
elif cmd in ["equate", "fix", "alias", "calibrate", "cs","entrance", "export", "case",
"declination", "infer","instrument", "sd", "units"]:
pass # we ignore all these, which is fine.
else:
if cmd in ["include", "data", "flags", "title", "set", "ref"]:
message = "! Unparsed [*{}]: '{}' {}".format(cmd, line, survexblock.survexfile.path)
print((self.insp+message))
models.DataIssue.objects.create(parser='survex', message=message)
else:
message = "! Bad svx command: [*{}] {} ({}) {}".format(cmd, line, survexblock, survexblock.survexfile.path)
print((self.insp+message))
models.DataIssue.objects.create(parser='survex', message=message)
def LoadSurvexTeam(self, survexblock, line):
teammembers = [ ]
mteammember = self.rx_team.match(line)
if mteammember:
for tm in self.rx_person.split(mteammember.group(2)):
if tm:
personexpedition = survexblock.expedition and GetPersonExpeditionNameLookup(survexblock.expedition).get(tm.lower())
if (personexpedition, tm) not in teammembers:
teammembers.append((personexpedition, tm))
personrole = models_survex.SurvexPersonRole(survexblock=survexblock, nrole=mteammember.group(1).lower(), personexpedition=personexpedition, personname=tm)
personrole.expeditionday = survexblock.expeditionday
if personexpedition:
personrole.person=personexpedition.person
personrole.save()
2020-06-24 17:55:42 +01:00
def LoadSurvexDate(self, survexblock, line):
# we should make this a date range for everything
if len(line) == 10:
survexblock.date = make_aware(datetime.strptime(re.sub(r"\.", "-", line), '%Y-%m-%d'), get_current_timezone())
expeditions = models.Expedition.objects.filter(year=line[:4])
if expeditions:
assert len(expeditions) == 1
survexblock.expedition = expeditions[0]
survexblock.expeditionday = survexblock.expedition.get_expedition_day(survexblock.date)
survexblock.save()
2020-06-24 22:46:18 +01:00
def LoadSurvexLineLeg(self, survexblock, svxline, sline, comment):
2020-06-24 14:10:13 +01:00
"""This reads compass, clino and tape data but only keeps the tape lengths,
the rest is discarded after error-checking.
"""
2020-06-28 01:50:34 +01:00
# Check first to see if we are in a splay and abort if so.
# TO DO splay abort
2020-06-24 22:46:18 +01:00
stardata = self.stardata
2020-06-24 14:10:13 +01:00
survexleg = SurvexLeg()
2020-06-24 22:46:18 +01:00
ls = sline.lower().split()
2020-06-24 14:10:13 +01:00
# this next fails for two surface survey svx files which use / for decimal point
# e.g. '29/09' in the tape measurement, or use decimals but in brackets, e.g. (06.05)
2020-06-24 22:46:18 +01:00
if stardata["type"] == "normal": # should use current flags setting for this
# print(" !! lineno '{}'\n !! svxline '{}'\n !! sline '{}'\n !! ls '{}'\n !! stardata {}".format(self.lineno, svxline, sline, ls,stardata))
tape = ls[stardata["tape"]]
2020-06-25 02:10:20 +01:00
tape = tape.replace("(","")
tape = tape.replace(")","")
2020-06-24 14:10:13 +01:00
tape = tape.replace("/",".")
try:
2020-06-24 14:10:13 +01:00
survexleg.tape = float(tape)
self.survexlegsnumber += 1
except ValueError:
2020-06-24 14:10:13 +01:00
print(("! Tape misread in", survexblock.survexfile.path))
2020-05-24 01:57:06 +01:00
print((" Stardata:", stardata))
print((" Line:", ls))
2020-06-24 14:10:13 +01:00
message = ' ! Value Error: Tape misread in line %s in %s' % (ls, survexblock.survexfile.path)
models.DataIssue.objects.create(parser='survex', message=message)
2020-06-24 14:10:13 +01:00
survexleg.tape = 0
2020-06-24 22:46:18 +01:00
try:
survexblock.totalleglength += survexleg.tape
self.survexlegsalllength += survexleg.tape
except ValueError:
message = ' ! Value Error: Tape length not added %s in %s' % (ls, survexblock.survexfile.path)
models.DataIssue.objects.create(parser='survex', message=message)
2020-06-24 14:10:13 +01:00
try:
lclino = ls[stardata["clino"]]
except:
print(("! Clino misread in", survexblock.survexfile.path))
print((" Stardata:", stardata))
print((" Line:", ls))
message = ' ! Value Error: Clino misread in line %s in %s' % (ls, survexblock.survexfile.path)
models.DataIssue.objects.create(parser='survex', message=message)
2020-06-24 14:10:13 +01:00
lclino = error
2020-06-24 22:46:18 +01:00
2020-06-24 14:10:13 +01:00
try:
lcompass = ls[stardata["compass"]]
except:
print(("! Compass misread in", survexblock.survexfile.path))
print((" Stardata:", stardata))
print((" Line:", ls))
message = ' ! Value Error: Compass misread in line %s in %s' % (ls, survexblock.survexfile.path)
models.DataIssue.objects.create(parser='survex', message=message)
lcompass = error
2020-06-24 22:46:18 +01:00
2020-06-24 14:10:13 +01:00
if lclino == "up":
survexleg.compass = 0.0
survexleg.clino = 90.0
elif lclino == "down":
survexleg.compass = 0.0
survexleg.clino = -90.0
elif lclino == "-" or lclino == "level":
try:
survexleg.compass = float(lcompass)
except ValueError:
print(("! Compass misread in", survexblock.survexfile.path))
print((" Stardata:", stardata))
print((" Line:", ls))
2020-06-28 01:50:34 +01:00
message = " ! Value Error: lcompass:'{}' line {} in '{}'".format(lcompass,
ls, survexblock.survexfile.path)
2020-06-24 14:10:13 +01:00
models.DataIssue.objects.create(parser='survex', message=message)
survexleg.compass = 1000
survexleg.clino = -90.0
else:
2020-06-24 14:10:13 +01:00
assert self.rx_linelen.match(lcompass), ls
assert self.rx_linelen.match(lclino) and lclino != "-", ls
survexleg.compass = float(lcompass)
survexleg.clino = float(lclino)
# delete the object so that django autosaving doesn't save it.
survexleg = None
2020-05-13 19:57:07 +01:00
2020-06-24 22:46:18 +01:00
def LoadSurvexRef(self, survexblock, args):
# *REF but also ; Ref years from 1960 to 2039
if len(args)< 4:
2020-06-25 02:10:20 +01:00
message = " ! Empty or BAD *REF command '{}' in '{}'".format(args, survexblock.survexfile.path)
2020-06-24 22:46:18 +01:00
print((self.insp+message))
models.DataIssue.objects.create(parser='survex', message=message)
return
2020-06-24 14:10:13 +01:00
2020-06-24 22:46:18 +01:00
argsgps = self.rx_argsref.match(args)
if argsgps:
yr, letterx, wallet = argsgps.groups()
else:
2020-06-25 02:10:20 +01:00
message = " ! BAD *REF command '{}' in '{}'".format(args, survexblock.survexfile.path)
2020-06-24 22:46:18 +01:00
print((self.insp+message))
models.DataIssue.objects.create(parser='survex', message=message)
return
2020-06-24 14:10:13 +01:00
if not letterx:
letterx = ""
else:
letterx = "X"
if len(wallet)<2:
wallet = "0" + wallet
assert (int(yr)>1960 and int(yr)<2039), "Wallet year out of bounds: %s" % yr
refscan = "%s#%s%s" % (yr, letterx, wallet)
2020-06-24 22:46:18 +01:00
try:
if int(wallet)>100:
2020-06-25 02:10:20 +01:00
message = " ! Wallet *REF {} - too big in '{}'".format(refscan, survexblock.survexfile.path)
2020-06-24 22:46:18 +01:00
print((self.insp+message))
models.DataIssue.objects.create(parser='survex', message=message)
except:
2020-06-25 02:10:20 +01:00
message = " ! Wallet *REF {} - not numeric in '{}'".format(refscan, survexblock.survexfile.path)
2020-06-24 22:46:18 +01:00
print((self.insp+message))
models.DataIssue.objects.create(parser='survex', message=message)
manyscansfolders = models_survex.ScansFolder.objects.filter(walletname=refscan)
if manyscansfolders:
survexblock.scansfolder = manyscansfolders[0]
survexblock.save()
if len(manyscansfolders) > 1:
2020-06-25 02:10:20 +01:00
message = " ! Wallet *REF {} - {} scan folders from DB search in {}".format(refscan, len(manyscansfolders), survexblock.survexfile.path)
2020-06-24 17:55:42 +01:00
print((self.insp+message))
models.DataIssue.objects.create(parser='survex', message=message)
else:
2020-06-25 02:10:20 +01:00
message = " ! Wallet *REF '{}' - NOT found in DB search '{}'".format(refscan, survexblock.survexfile.path)
2020-06-24 17:55:42 +01:00
print((self.insp+message))
models.DataIssue.objects.create(parser='survex', message=message)
2020-06-24 22:46:18 +01:00
def LoadSurvexQM(self, survexblock, qmline):
insp = self.insp
qm_no = qmline.group(1)
qm_grade = qmline.group(2)
2020-06-25 03:17:56 +01:00
if qmline.group(3): # usual closest survey station
qm_nearest = qmline.group(3)
if qmline.group(4):
qm_nearest = qm_nearest +"."+ qmline.group(4)
if qmline.group(6) and qmline.group(6) != '-':
qm_resolve_station = qmline.group(6)
if qmline.group(7):
qm_resolve_station = qm_resolve_station +"."+ qmline.group(7)
else:
qm_resolve_station = ""
qm_notes = qmline.group(8)
2020-06-25 03:17:56 +01:00
# Spec of QM in SVX files:
# ;Serial number grade(A/B/C/D/X) nearest-station resolution-station description
# ;QM1 a hobnob_hallway_2.42 hobnob-hallway_3.42 junction of keyhole passage
# ;QM1 a hobnob_hallway_2.42 - junction of keyhole passage
2020-06-25 03:17:56 +01:00
# NB none of the SurveyStations are in the DB now, so if we want to link to aSurvexStation
# we would have to create one. But that is not obligatory and no QMs loaded from CSVs have one
try:
qm = models_caves.QM.objects.create(number=qm_no,
# nearest_station=a_survex_station_object, # can be null
nearest_station_description=qm_resolve_station,
nearest_station_name=qm_nearest,
grade=qm_grade.upper(),
location_description=qm_notes)
qm.save
# message = " ! QM{} '{}' CREATED in DB in '{}'".format(qm_no, qm_nearest,survexblock.survexfile.path)
# print(insp+message)
# models.DataIssue.objects.create(parser='survex', message=message)
except:
message = " ! QM{} FAIL to create {} in'{}'".format(qm_no, qm_nearest,survexblock.survexfile.path)
2020-06-24 17:55:42 +01:00
print(insp+message)
models.DataIssue.objects.create(parser='survex', message=message)
2020-06-25 03:17:56 +01:00
2020-06-27 19:00:26 +01:00
def LoadSurvexDataCmd(self,survexblock,args):
2020-06-27 17:55:59 +01:00
ls = args.lower().split()
stardata = { "type":ls[0] }
for i in range(0, len(ls)):
stardata[self.stardataparamconvert.get(ls[i], ls[i])] = i - 1
self.stardata = stardata
if ls[0] in ["normal", "cartesian", "nosurvey"]:
assert (("from" in stardata and "to" in stardata) or "station" in stardata), args
elif ls[0] == "default":
stardata = self.stardatadefault
else:
assert ls[0] == "passage", args
def LoadSurvexFlags(self, line, cmd):
# Here we could set on/off 'splay', 'not splay', 'surface', 'not surface', or 'duplicate'
# but this data is only used for sense-checking not to actually calculate anything important
pass
def IdentifyCave(self, cavepath):
2020-06-29 21:16:13 +01:00
if cavepath.lower() in self.caveslist:
return self.caveslist[cavepath.lower()]
2020-06-28 01:50:34 +01:00
path_match = self.rx_cave.search(cavepath)
2020-06-27 17:55:59 +01:00
if path_match:
2020-06-28 14:42:26 +01:00
sluggy = '{}-{}'.format(path_match.group(1), path_match.group(2))
2020-06-29 21:16:13 +01:00
guesses = [sluggy.lower(), path_match.group(2).lower()]
for g in guesses:
if g in self.caveslist:
self.caveslist[cavepath] = self.caveslist[g]
return self.caveslist[g]
print(' ! Failed to find cave for {}'.format(cavepath.lower()))
2020-06-27 17:55:59 +01:00
else:
2020-06-29 21:16:13 +01:00
print(' ! No regex cave match for %s' % cavepath.lower())
2020-06-27 17:55:59 +01:00
return None
2020-06-29 21:16:13 +01:00
def GetSurvexDirectory(self, headpath):
if not headpath:
return self.svxdirs[""]
if headpath.lower() not in self.svxdirs:
self.svxdirs[headpath.lower()] = models_survex.SurvexDirectory(path=headpath, primarysurvexfile=self.currentsurvexfile)
return self.svxdirs[headpath.lower()]
2020-06-28 14:42:26 +01:00
def LoadSurvexFile(self, includelabel):
"""Creates SurvexFile in the database, and SurvexDirectory if needed
2020-06-27 17:55:59 +01:00
with links to 'cave'
Creates a new current survexblock with valid .survexfile and valid .survexdirectory
2020-06-28 14:42:26 +01:00
The survexblock passed-in is not necessarily the parent. FIX THIS.
2020-06-27 17:55:59 +01:00
"""
2020-06-28 01:50:34 +01:00
depth = " " * self.depthbegin
print("{:2}{} - NEW survexfile:'{}'".format(self.depthbegin, depth, includelabel))
2020-06-27 17:55:59 +01:00
newsurvexfile = models_survex.SurvexFile(path=includelabel)
2020-06-29 21:16:13 +01:00
headpath, tail = os.path.split(includelabel)
newsurvexdirectory = self.GetSurvexDirectory(headpath)
2020-06-28 01:50:34 +01:00
newsurvexfile.survexdirectory = newsurvexdirectory
2020-06-29 21:16:13 +01:00
2020-06-28 01:50:34 +01:00
cave = self.IdentifyCave(headpath)
if cave:
newsurvexdirectory.cave = cave
newsurvexfile.cave = cave
2020-06-29 21:16:13 +01:00
# else:
# message = " ! Cannot identify cave from {} when creating sfile & sdirectory".format(headpath)
# print(message)
# print(message,file=sys.stderr)
# models.DataIssue.objects.create(parser='survex', message=message)
2020-06-28 14:42:26 +01:00
self.currentsurvexfile.save() # django insists on this although it is already saved !?
try:
newsurvexdirectory.save()
except:
print(newsurvexdirectory, file=sys.stderr)
print(newsurvexdirectory.primarysurvexfile, file=sys.stderr)
raise
2020-06-27 17:55:59 +01:00
self.currentsurvexfile = newsurvexfile
2020-06-28 01:50:34 +01:00
2020-06-28 14:42:26 +01:00
def ProcessIncludeLine(self, included):
2020-06-28 01:50:34 +01:00
svxid = included.groups()[0]
#depth = " " * self.depthbegin
#print("{:2}{} - Include survexfile:'{}'".format(self.depthbegin, depth, svxid))
2020-06-28 14:42:26 +01:00
self.LoadSurvexFile(svxid)
self.stacksvxfiles.append(self.currentsurvexfile)
def ProcessEdulcniLine(self, edulcni):
"""Saves the current survexfile in the db
"""
2020-06-28 01:50:34 +01:00
svxid = edulcni.groups()[0]
2020-06-28 14:42:26 +01:00
#depth = " " * self.depthbegin
#print("{:2}{} - Edulcni survexfile:'{}'".format(self.depthbegin, depth, svxid))
self.currentsurvexfile.save()
self.currentsurvexfile = self.stacksvxfiles.pop()
2020-06-24 22:46:18 +01:00
def LoadSurvexComment(self, survexblock, comment):
2020-06-27 17:55:59 +01:00
# ignore all comments except ;ref and ;QM and ;*include (for collated survex file)
2020-06-24 22:46:18 +01:00
refline = self.rx_ref.match(comment)
if refline:
2020-06-27 19:00:26 +01:00
comment = re.sub('(?i)\s*ref[.;]?',"",comment.strip())
2020-06-24 22:46:18 +01:00
self.LoadSurvexRef(survexblock, comment)
2020-06-24 14:10:13 +01:00
2020-06-24 22:46:18 +01:00
qmline = self.rx_qm.match(comment)
if qmline:
self.LoadSurvexQM(survexblock, qmline)
2020-06-27 17:55:59 +01:00
included = self.rx_comminc.match(comment)
# ;*include means we have been included; not 'proceed to include' which *include means
if included:
2020-06-28 14:42:26 +01:00
self.ProcessIncludeLine(included)
2020-06-27 17:55:59 +01:00
edulcni = self.rx_commcni.match(comment)
2020-06-28 01:50:34 +01:00
# ;*edulcni means we are returning from an included file
2020-06-27 17:55:59 +01:00
if edulcni:
2020-06-28 14:42:26 +01:00
self.ProcessEdulcniLine(edulcni)
2020-06-24 22:46:18 +01:00
def LoadSurvexSetup(self,survexblock, survexfile):
self.depthbegin = 0
self.stardata = self.stardatadefault
blocklegs = self.survexlegsnumber
print(self.insp+" - MEM:{:.3f} Reading. parent:{} <> {} ".format(models.get_process_memory(),survexblock.survexfile.path, survexfile.path))
self.lineno = 0
2020-06-24 14:10:13 +01:00
sys.stderr.flush();
self.callcount +=1
if self.callcount % 10 ==0 :
2020-06-24 14:10:13 +01:00
print(".", file=sys.stderr,end='')
if self.callcount % 500 ==0 :
print("\n", file=sys.stderr,end='')
2020-06-24 14:10:13 +01:00
# Try to find the cave in the DB if not use the string as before
path_match = re.search(r"caves-(\d\d\d\d)/(\d+|\d\d\d\d-?\w+-\d+)/", survexblock.survexfile.path)
if path_match:
pos_cave = '%s-%s' % (path_match.group(1), path_match.group(2))
cave = models_caves.getCaveByReference(pos_cave)
if cave:
survexfile.cave = cave
2020-06-24 22:46:18 +01:00
2020-06-27 17:55:59 +01:00
def RecursiveRecursiveLoad(self, survexblock, survexfile, fin):
2020-06-24 22:46:18 +01:00
"""Follows the *include links in all the survex files from the root file 1623.svx
and reads in the survex blocks, other data and the wallet references (scansfolder) as it
2020-06-27 17:55:59 +01:00
goes. This part of the data include process is where the maximum memory is used and where it
2020-06-24 22:46:18 +01:00
crashes on memory-constrained machines. Begin-end blocks may also be nested.
"""
2020-06-28 14:42:26 +01:00
# self.LoadSurvexSetup(survexblock, survexfile)
# insp =self.insp
# previousnlegs = 0
2020-06-24 22:46:18 +01:00
2020-06-28 14:42:26 +01:00
# svxlines = fin.read().splitlines()
# # cannot close file now as may be recursively called with the same fin if nested *begin-end
2020-06-24 22:46:18 +01:00
2020-06-28 14:42:26 +01:00
# for svxline in svxlines:
# self.lineno += 1
# sline, comment = self.rx_comment.match(svxline.strip()).groups()
# if comment:
# self.LoadSurvexComment(survexblock, comment)
# if not sline:
# continue # skip blank lines
# # detect the star command
# mstar = self.rx_star.match(sline)
# if mstar: # yes we are reading a *cmd
# cmd, args = mstar.groups()
# cmd = cmd.lower()
# if re.match("include$(?i)", cmd):
# cave = self.IdentifyCave(args)
# if cave:
# survexfile.cave = cave
# includepath = os.path.normpath(os.path.join(os.path.split(survexfile.path)[0], re.sub(r"\.svx$", "", args)))
# print((insp+' - INCLUDE-go path found, including - ' + args))
# includesurvexfile = models_survex.SurvexFile(path=includepath)
# includesurvexfile.save()
# includesurvexfile.SetDirectory()
# if includesurvexfile.exists():
# survexblock.save()
# self.insp += "> "
# #--------------------------------------------------------
# fininclude = includesurvexfile.OpenFile()
# self.RecursiveRecursiveLoad(survexblock, includesurvexfile, fininclude)
# fininclude.close()
# #--------------------------------------------------------
# self.insp = self.insp[2:]
# insp = self.insp
# print((insp+' - INCLUDE-return from include - ' + includepath))
# else:
# print((insp+' ! ERROR *include file not found for %s' % includesurvexfile))
# elif re.match("begin$(?i)", cmd):
# # On a *begin statement we start a new survexblock.
# # There should not be any *include inside a begin-end block, so this is a simple
# # load not a recursive fileload. But there may be many blocks nested to any depth in one file.
# if args:
# newsvxpath = os.path.join(os.path.split(survexfile.path)[0], re.sub(r"\.svx$", "", args))
# # Try to find the cave in the DB if not use the string as before
# path_match = re.search(r"caves-(\d\d\d\d)/(\d+|\d\d\d\d-?\w+-\d+)/", newsvxpath)
# if path_match:
# pos_cave = '%s-%s' % (path_match.group(1), path_match.group(2))
# # print(insp+pos_cave)
# cave = models_caves.getCaveByReference(pos_cave)
# if cave:
# survexfile.cave = cave
# else:
# print((insp+' - No match (b) for %s' % newsvxpath))
# previousnlegs = self.survexlegsnumber
# name = args.lower()
# print(insp+' - Begin found for:{}, creating new SurvexBlock '.format(name))
# # the recursive call re-reads the entire file. This is wasteful. We should pass in only
# # the un-parsed part of the file.
# survexblockdown = models_survex.SurvexBlock(name=name, parent=survexblock,
# survexpath=survexblock.survexpath+"."+name,
# cave=survexfile.cave, survexfile=survexfile,
# legsall=0, legssplay=0, legssurfc=0, totalleglength=0.0)
# survexblockdown.save()
# survexblock.save()
# survexblock = survexblockdown
# print(insp+" - BLOCK-enter nested *begin/*end block: '{}'".format(name))
# self.insp += "> "
# #--------------------------------------------------------
# self.RecursiveRecursiveLoad(survexblockdown, survexfile, fin)
# #--------------------------------------------------------
# # do not close the file as there may be more blocks in this one
# # and it is re-read afresh with every nested begin-end block.
# self.insp = self.insp[2:]
# insp = self.insp
# else:
# self.depthbegin += 1
# elif re.match("end$(?i)", cmd):
# if self.depthbegin:
# print(insp+" - BLOCK-return from nested *begin/*end block: '{}'".format(args))
# self.depthbegin -= 1
# else:
# legsinblock = self.survexlegsnumber - previousnlegs
# print(insp+" - LEGS: {} (previous: {}, now:{})".format(legsinblock,previousnlegs,self.survexlegsnumber))
# survexblock.legsall = legsinblock
# survexblock.save()
# return
# elif cmd == "ref":
# self.LoadSurvexRef(survexblock, args)
# elif cmd == "flags":
# self.LoadSurvexFlags(args, cmd)
# elif cmd == "data":
# self.LoadSurvexDataCmd(survexblock, args)
# elif cmd == "set" and re.match("names(?i)", args):
# pass
# elif re.match("date$(?i)", cmd):
# self.LoadSurvexDate(survexblock, args)
# elif re.match("team$(?i)", cmd):
# self.LoadSurvexTeam(survexblock, args)
# else:
# self.LoadSurvexIgnore(survexblock, args, cmd)
# else: # not a *cmd so we are reading data OR rx_comment failed
# if "from" in self.stardata: # only interested in survey legs
# self.LoadSurvexLineLeg(survexblock, svxline, sline, comment)
# else:
# pass # ignore all other sorts of data
pass
2020-06-28 01:50:34 +01:00
def LinearRecursiveLoad(self, survexblock, path, svxlines):
2020-06-27 17:55:59 +01:00
"""Loads a single survex file. Usually used to import all the survex files which have been collated
into a single file. Loads the begin/end blocks recursively.
"""
self.relativefilename = path
cave = self.IdentifyCave(path) # this will produce null for survex files which are geographic collections
2020-06-28 14:42:26 +01:00
self.currentsurvexfile = survexblock.survexfile
self.currentsurvexfile.save() # django insists on this although it is already saved !?
2020-06-28 01:50:34 +01:00
blockcount = 0
2020-06-28 14:42:26 +01:00
def tickle():
nonlocal blockcount
blockcount +=1
if blockcount % 10 ==0 :
print(".", file=sys.stderr,end='')
if blockcount % 500 ==0 :
print("\n", file=sys.stderr,end='')
sys.stderr.flush();
2020-06-27 17:55:59 +01:00
for svxline in svxlines:
sline, comment = self.rx_comment.match(svxline.strip()).groups()
if comment:
2020-06-28 01:50:34 +01:00
self.LoadSurvexComment(survexblock, comment) # this catches the ;*include and ;*edulcni lines too
2020-06-27 17:55:59 +01:00
if not sline:
continue # skip blank lines
# detect a star command
mstar = self.rx_star.match(sline)
if mstar: # yes we are reading a *cmd
cmd, args = mstar.groups()
cmd = cmd.lower()
2020-06-28 01:50:34 +01:00
# ------------------------BEGIN
2020-06-27 17:55:59 +01:00
if re.match("begin$(?i)", cmd):
self.depthbegin += 1
2020-06-28 01:50:34 +01:00
depth = " " * self.depthbegin
2020-06-28 14:42:26 +01:00
blockid = args.lower()
self.stackbegin.append(blockid)
2020-06-27 17:55:59 +01:00
2020-06-28 01:50:34 +01:00
previousnlegs = self.survexlegsnumber
2020-06-28 14:42:26 +01:00
print("{:2}{} - Begin for :'{}'".format(self.depthbegin,depth, blockid))
pathlist = ""
for id in self.stackbegin:
if len(id) > 0:
pathlist += "." + id
newsurvexblock = models_survex.SurvexBlock(name=blockid, parent=survexblock,
survexpath=pathlist,
2020-06-28 01:50:34 +01:00
cave=self.currentcave, survexfile=self.currentsurvexfile,
legsall=0, legssplay=0, legssurfc=0, totalleglength=0.0)
2020-06-28 14:42:26 +01:00
survexblock = newsurvexblock
survexblock.survexfile.save() # django insists on this although it is already saved !?
survexblock.save() # django insists on this , but we want to save at the end !
tickle()
2020-06-28 01:50:34 +01:00
# ---------------------------END
2020-06-27 17:55:59 +01:00
elif re.match("end$(?i)", cmd):
2020-06-28 01:50:34 +01:00
depth = " " * self.depthbegin
self.currentsurvexblock = survexblock.parent
print("{:2}{} - End from:'{}'".format(self.depthbegin,depth,args))
legsinblock = self.survexlegsnumber - previousnlegs
print("{:2}{} - LEGS: {} (previous: {}, now:{})".format(self.depthbegin,
depth,legsinblock,previousnlegs,self.survexlegsnumber))
survexblock.legsall = legsinblock
2020-06-28 14:42:26 +01:00
try:
survexblock.parent.save() # django insists on this although it is already saved !?
except:
print(survexblock.parent, file=sys.stderr)
raise
try:
survexblock.save() # save to db at end of block
except:
print(survexblock, file=sys.stderr)
raise
blockid = self.stackbegin.pop()
2020-06-28 01:50:34 +01:00
self.depthbegin -= 1
2020-06-27 17:55:59 +01:00
2020-06-28 01:50:34 +01:00
# -----------------------------
elif re.match("(?i)title$", cmd):
2020-06-28 14:42:26 +01:00
survexblock.title = args # only apply to current survexblock
2020-06-28 01:50:34 +01:00
elif re.match("(?i)ref$", cmd):
2020-06-27 17:55:59 +01:00
self.LoadSurvexRef(survexblock, args)
2020-06-28 01:50:34 +01:00
elif re.match("(?i)flags$", cmd):
2020-06-27 17:55:59 +01:00
self.LoadSurvexFlags(args, cmd)
2020-06-28 01:50:34 +01:00
elif re.match("(?i)data$", cmd):
2020-06-27 17:55:59 +01:00
self.LoadSurvexDataCmd(survexblock, args)
2020-06-28 01:50:34 +01:00
elif re.match("(?i)date$", cmd):
2020-06-27 17:55:59 +01:00
self.LoadSurvexDate(survexblock, args)
2020-06-28 01:50:34 +01:00
elif re.match("(?i)team$", cmd):
2020-06-27 17:55:59 +01:00
self.LoadSurvexTeam(survexblock, args)
2020-06-28 01:50:34 +01:00
elif re.match("(?i)set$", cmd) and re.match("(?i)names", args):
2020-06-27 17:55:59 +01:00
pass
2020-06-28 01:50:34 +01:00
elif re.match("(?i)include$", cmd):
2020-06-27 17:55:59 +01:00
message = " ! -ERROR *include command not expected here {}. Re-run a full Survex import.".format(path)
print(message)
print(message,file=sys.stderr)
models.DataIssue.objects.create(parser='survex', message=message)
else:
self.LoadSurvexIgnore(survexblock, args, cmd)
else: # not a *cmd so we are reading data OR rx_comment failed
if "from" in self.stardata: # only interested in survey legs
self.LoadSurvexLineLeg(survexblock, svxline, sline, comment)
else:
pass # ignore all other sorts of data
def RecursiveScan(self, survexblock, survexfile, fin, flinear, fcollate):
2020-06-27 12:08:02 +01:00
"""Follows the *include links in all the survex files from the root file 1623.svx
2020-06-27 17:55:59 +01:00
and reads only the *include and *begin and *end statements. It produces a linearised
list of the include tree
2020-06-27 12:08:02 +01:00
"""
2020-06-27 17:55:59 +01:00
indent = " " * self.depthinclude
2020-06-27 12:08:02 +01:00
sys.stderr.flush();
self.callcount +=1
if self.callcount % 10 ==0 :
print(".", file=sys.stderr,end='')
if self.callcount % 500 ==0 :
print("\n", file=sys.stderr,end='')
2020-06-27 17:55:59 +01:00
if survexfile in self.svxfileslist:
message = " * Warning. Survex file already seen: {}".format(survexfile.path)
print(message)
print(message,file=flinear)
print(message,file=sys.stderr)
models.DataIssue.objects.create(parser='survex', message=message)
if self.svxfileslist.count(survexfile) > 20:
message = " ! ERROR. Survex file already seen 20x. Probably an infinite loop so fix your *include statements that include this. Aborting. {}".format(survexfile.path)
print(message)
print(message,file=flinear)
print(message,file=sys.stderr)
models.DataIssue.objects.create(parser='survex', message=message)
return
2020-06-27 12:08:02 +01:00
self.svxfileslist.append(survexfile)
svxlines = fin.read().splitlines()
for svxline in svxlines:
self.lineno += 1
2020-06-27 17:55:59 +01:00
includestmt =self.rx_include.match(svxline)
if not includestmt:
fcollate.write("{}\n".format(svxline))
2020-06-27 12:08:02 +01:00
sline, comment = self.rx_comment.match(svxline.strip()).groups()
mstar = self.rx_star.match(sline)
if mstar: # yes we are reading a *cmd
cmd, args = mstar.groups()
cmd = cmd.lower()
2020-06-28 14:42:26 +01:00
if re.match("(?i)include$", cmd):
2020-06-27 12:08:02 +01:00
includepath = os.path.normpath(os.path.join(os.path.split(survexfile.path)[0], re.sub(r"\.svx$", "", args)))
path_match = re.search(r"caves-(\d\d\d\d)/(\d+|\d\d\d\d-?\w+-\d+)/", includepath)
includesurvexfile = models_survex.SurvexFile(path=includepath)
if includesurvexfile.exists():
2020-06-27 17:55:59 +01:00
# do not create SurvexFile in DB here by doing includesurvexfile.save(). Do it when reading data.
2020-06-27 12:08:02 +01:00
#--------------------------------------------------------
2020-06-27 17:55:59 +01:00
self.depthinclude += 1
2020-06-27 12:08:02 +01:00
fininclude = includesurvexfile.OpenFile()
2020-06-27 17:55:59 +01:00
fcollate.write(";*include {}\n".format(includesurvexfile.path))
flinear.write("{:2} {} *include {}\n".format(self.depthinclude, indent, includesurvexfile.path))
2020-06-27 12:08:02 +01:00
push = includesurvexfile.path.lower()
2020-06-27 17:55:59 +01:00
self.stackinclude.append(push)
self.RecursiveScan(survexblock, includesurvexfile, fininclude, flinear, fcollate)
pop = self.stackinclude.pop()
2020-06-27 12:08:02 +01:00
if pop != push:
2020-06-27 17:55:59 +01:00
message = "!!!!!!! ERROR pop != push {} != {} {}".format(pop, push, self.stackinclude)
print(message)
print(message,file=flinear)
print(message,file=sys.stderr)
models.DataIssue.objects.create(parser='survex', message=message)
flinear.write("{:2} {} *edulcni {}\n".format(self.depthinclude, indent, includesurvexfile.path))
fcollate.write(";*edulcni {}\n".format(includesurvexfile.path))
2020-06-27 12:08:02 +01:00
fininclude.close()
2020-06-27 17:55:59 +01:00
self.depthinclude -= 1
2020-06-27 12:08:02 +01:00
#--------------------------------------------------------
else:
2020-06-27 17:55:59 +01:00
message = " ! ERROR *include file not found for {}".format(includesurvexfile)
print(message)
print(message,file=sys.stderr)
models.DataIssue.objects.create(parser='survex', message=message)
2020-06-28 14:42:26 +01:00
elif re.match("(?i)begin$", cmd):
2020-06-27 12:08:02 +01:00
self.depthbegin += 1
depth = " " * self.depthbegin
if args:
pushargs = args
else:
pushargs = " "
self.stackbegin.append(pushargs.lower())
flinear.write(" {:2} {} *begin {}\n".format(self.depthbegin, depth, args))
pass
2020-06-28 14:42:26 +01:00
elif re.match("(?i)end$", cmd):
2020-06-27 12:08:02 +01:00
depth = " " * self.depthbegin
flinear.write(" {:2} {} *end {}\n".format(self.depthbegin, depth, args))
if not args:
args = " "
popargs = self.stackbegin.pop()
if popargs != args.lower():
2020-06-27 17:55:59 +01:00
message = "!!!!!!! ERROR BEGIN/END pop != push {} != {}\n{}".format(popargs, args, self. stackbegin)
print(message)
print(message,file=flinear)
print(message,file=sys.stderr)
models.DataIssue.objects.create(parser='survex', message=message)
2020-06-27 12:08:02 +01:00
self.depthbegin -= 1
pass
2020-06-28 14:42:26 +01:00
elif re.match("(?i)title$", cmd):
depth = " " * self.depthbegin
flinear.write(" {:2} {} *title {}\n".format(self.depthbegin, depth, args))
pass
2020-06-27 17:55:59 +01:00
2020-06-27 18:00:24 +01:00
def FindAndLoadSurvex(survexblockroot):
"""Follows the *include links recursively to find files
"""
2020-06-24 14:10:13 +01:00
print(' - redirecting stdout to svxblks.log...')
stdout_orig = sys.stdout
# Redirect sys.stdout to the file
2020-06-24 14:10:13 +01:00
sys.stdout = open('svxblks.log', 'w')
print(' - SCANNING All Survex Blocks...',file=sys.stderr)
2020-06-27 12:08:02 +01:00
survexfileroot = survexblockroot.survexfile
2020-06-27 17:55:59 +01:00
collatefilename = "_" + survexfileroot.path + ".svx"
2020-06-27 18:00:24 +01:00
svx_scan = LoadingSurvex()
2020-06-27 17:55:59 +01:00
svx_scan.callcount = 0
svx_scan.depthinclude = 0
indent=""
2020-06-27 17:55:59 +01:00
fcollate = open(collatefilename, 'w')
mem0 = models.get_process_memory()
2020-06-27 17:55:59 +01:00
print(" - MEM:{:7.2f} MB START".format(mem0),file=sys.stderr)
flinear = open('svxlinear.log', 'w')
2020-06-27 17:55:59 +01:00
flinear.write(" - MEM:{:7.2f} MB START {}\n".format(mem0,survexfileroot.path))
finroot = survexfileroot.OpenFile()
2020-06-27 17:55:59 +01:00
fcollate.write(";*include {}\n".format(survexfileroot.path))
flinear.write("{:2} {} *include {}\n".format(svx_scan.depthinclude, indent, survexfileroot.path))
2020-06-28 01:50:34 +01:00
#----------------------------------------------------------------
2020-06-27 17:55:59 +01:00
svx_scan.RecursiveScan(survexblockroot, survexfileroot, finroot, flinear, fcollate)
2020-06-28 01:50:34 +01:00
#----------------------------------------------------------------
2020-06-27 17:55:59 +01:00
flinear.write("{:2} {} *edulcni {}\n".format(svx_scan.depthinclude, indent, survexfileroot.path))
fcollate.write(";*edulcni {}\n".format(survexfileroot.path))
mem1 = models.get_process_memory()
2020-06-28 14:42:26 +01:00
flinear.write("\n - MEM:{:.2f} MB STOP {}\n".format(mem1,survexfileroot.path))
flinear.write(" - MEM:{:.3f} MB USED\n".format(mem1-mem0))
2020-06-27 17:55:59 +01:00
svxfileslist = svx_scan.svxfileslist
flinear.write(" - {:,} survex files in linear include list \n".format(len(svxfileslist)))
flinear.close()
2020-06-27 17:55:59 +01:00
fcollate.close()
svx_scan = None
print("\n - {:,} survex files in linear include list \n".format(len(svxfileslist)),file=sys.stderr)
2020-06-27 17:55:59 +01:00
mem1 = models.get_process_memory()
print(" - MEM:{:7.2f} MB END ".format(mem0),file=sys.stderr)
print(" - MEM:{:7.3f} MB USED".format(mem1-mem0),file=sys.stderr)
svxfileslist = [] # free memory
# Before doing this, it would be good to identify the *equate and *entrance we need that are relevant to the
# entrance locations currently loaded after this by LoadPos(), but could better be done before ?
# look in MapLocations() for how we find the entrances
2020-06-28 01:50:34 +01:00
print('\n - Loading All Survex Blocks (LinearRecursive)',file=sys.stderr)
2020-06-27 18:00:24 +01:00
svx_load = LoadingSurvex()
2020-06-29 21:16:13 +01:00
svx_load.svxdirs[""] = survexfileroot.survexdirectory
2020-06-27 17:55:59 +01:00
with open(collatefilename, "r") as fcollate:
2020-06-28 01:50:34 +01:00
svxlines = fcollate.read().splitlines()
#----------------------------------------------------------------
svx_load.LinearRecursiveLoad(survexblockroot,survexfileroot.path, svxlines)
#----------------------------------------------------------------
2020-06-27 17:55:59 +01:00
2020-06-28 14:42:26 +01:00
print("\n - MEM:{:7.2f} MB STOP".format(mem1),file=sys.stderr)
2020-06-27 17:55:59 +01:00
print(" - MEM:{:7.3f} MB USED".format(mem1-mem0),file=sys.stderr)
survexlegsnumber = svx_load.survexlegsnumber
survexlegsalllength = svx_load.survexlegsalllength
mem1 = models.get_process_memory()
svx_load = None
2020-06-28 14:42:26 +01:00
# print('\n - Loading All Survex Blocks (RecursiveRecursive)',file=sys.stderr)
2020-06-28 01:50:34 +01:00
# svxlrl = LoadingSurvex()
2020-06-28 01:50:34 +01:00
# finroot = survexfileroot.OpenFile()
# svxlrl.RecursiveRecursiveLoad(survexblockroot, survexfileroot, finroot)
# finroot.close()
# survexlegsnumber = svxlrl.survexlegsnumber
# survexlegsalllength = svxlrl.survexlegsalllength
# svxlrl = None
2020-06-27 17:55:59 +01:00
2020-06-24 14:10:13 +01:00
# Close the logging file, Restore sys.stdout to our old saved file handle
sys.stdout.close()
print("+", file=sys.stderr)
sys.stderr.flush();
sys.stdout = stdout_orig
2020-06-24 14:10:13 +01:00
return (survexlegsnumber, survexlegsalllength)
2020-06-29 21:16:13 +01:00
def MakeSurvexFileRoot():
survexfileroot = models_survex.SurvexFile(path=settings.SURVEX_TOPNAME, cave=None)
survexfileroot.save()
survexdirectoryroot = models_survex.SurvexDirectory(path=settings.EXPOWEB, cave=None, primarysurvexfile=survexfileroot)
survexdirectoryroot.save()
survexfileroot.survexdirectory = survexdirectoryroot
survexfileroot.save() # mutually dependent objects need a double-save like this
return survexfileroot
2020-06-27 18:00:24 +01:00
def LoadSurvexBlocks():
2015-01-19 22:48:50 +00:00
2020-04-28 01:18:57 +01:00
print(' - Flushing All Survex Blocks...')
models_survex.SurvexBlock.objects.all().delete()
models_survex.SurvexFile.objects.all().delete()
models_survex.SurvexDirectory.objects.all().delete()
models_survex.SurvexPersonRole.objects.all().delete()
models_survex.SurvexStation.objects.all().delete()
2020-06-24 19:07:11 +01:00
print(" - survex Data Issues flushed")
models.DataIssue.objects.filter(parser='survex').delete()
2020-06-29 21:16:13 +01:00
survexfileroot = MakeSurvexFileRoot()
survexblockroot = models_survex.SurvexBlock(name=ROOTBLOCK, survexpath="", cave=None, survexfile=survexfileroot,
2020-06-16 19:27:32 +01:00
legsall=0, legssplay=0, legssurfc=0, totalleglength=0.0)
# this is the first so id=1
survexblockroot.save()
2020-06-28 01:50:34 +01:00
print(' - Loading Survex Blocks...')
2020-06-24 14:10:13 +01:00
memstart = models.get_process_memory()
2020-06-27 18:00:24 +01:00
survexlegsnumber, survexlegsalllength = FindAndLoadSurvex(survexblockroot)
2020-06-24 14:10:13 +01:00
memend = models.get_process_memory()
print(" - MEMORY start:{:.3f} MB end:{:.3f} MB increase={:.3f} MB".format(memstart,memend, memend-memstart))
2020-06-12 00:34:53 +01:00
survexblockroot.totalleglength = survexlegsalllength
2020-06-16 19:27:32 +01:00
survexblockroot.legsall = survexlegsnumber
survexblockroot.save()
2020-06-28 01:50:34 +01:00
print(" - total number of survex legs: {}".format(survexlegsnumber))
2020-06-12 00:34:53 +01:00
print(" - total leg lengths loaded: {}m".format(survexlegsalllength))
2020-04-28 01:18:57 +01:00
print(' - Loaded All Survex Blocks.')
poslineregex = re.compile(r"^\(\s*([+-]?\d*\.\d*),\s*([+-]?\d*\.\d*),\s*([+-]?\d*\.\d*)\s*\)\s*([^\s]+)$")
def LoadPos():
2020-06-29 21:16:13 +01:00
"""First load the survex stations for entrances and fixed points (about 600) into the database.
Run cavern to produce a complete .3d file, then run 3dtopos to produce a table of
2020-04-28 01:18:57 +01:00
all survey point positions. Then lookup each position by name to see if we have it in the database
2020-06-29 21:16:13 +01:00
and if we do, then save the x/y/z coordinates. This gives us coordinates of the entrances.
2020-04-28 01:18:57 +01:00
If we don't have it in the database, print an error message and discard it.
"""
2020-04-28 18:26:08 +01:00
topdata = settings.SURVEX_DATA + settings.SURVEX_TOPNAME
2020-05-24 01:57:06 +01:00
print((' - Generating a list of Pos from %s.svx and then loading...' % (topdata)))
2020-04-28 18:26:08 +01:00
found = 0
skip = {}
2020-05-24 01:57:06 +01:00
print("\n") # extra line because cavern overwrites the text buffer somehow
2020-04-28 18:26:08 +01:00
# cavern defaults to using same cwd as supplied input file
call([settings.CAVERN, "--output=%s.3d" % (topdata), "%s.svx" % (topdata)])
call([settings.THREEDTOPOS, '%s.3d' % (topdata)], cwd = settings.SURVEX_DATA)
mappoints = {}
for pt in MapLocations().points():
svxid, number, point_type, label = pt
mappoints[svxid]=True
2020-04-28 18:26:08 +01:00
posfile = open("%s.pos" % (topdata))
posfile.readline() #Drop header
try:
survexblockroot = models_survex.SurvexBlock.objects.get(name=ROOTBLOCK)
except:
try:
survexblockroot = models_survex.SurvexBlock.objects.get(id=1)
except:
message = ' ! FAILED to find root SurvexBlock'
print(message)
models.DataIssue.objects.create(parser='survex', message=message)
raise
for line in posfile.readlines():
r = poslineregex.match(line)
if r:
x, y, z, id = r.groups()
2020-06-16 19:27:32 +01:00
for sid in mappoints:
if id.endswith(sid):
blockpath = "." + id[:-len(sid)].strip(".")
2020-06-29 21:16:13 +01:00
# But why are we doing this? Why do we need the survexblock id for each of these ?
# ..because mostly they don't actually appear in any SVX file. We should match them up
# via the cave data, not by this half-arsed syntactic match which almost never works. PMS.
if False:
try:
sbqs = models_survex.SurvexBlock.objects.filter(survexpath=blockpath)
if len(sbqs)==1:
sb = sbqs[0]
if len(sbqs)>1:
message = " ! MULTIPLE SurvexBlocks {:3} matching Entrance point {} {} '{}'".format(len(sbqs), blockpath, sid, id)
print(message)
models.DataIssue.objects.create(parser='survex', message=message)
sb = sbqs[0]
elif len(sbqs)<=0:
message = " ! ZERO SurvexBlocks matching Entrance point {} {} '{}'".format(blockpath, sid, id)
print(message)
models.DataIssue.objects.create(parser='survex', message=message)
sb = survexblockroot
except:
message = ' ! FAIL in getting SurvexBlock matching Entrance point {} {}'.format(blockpath, sid)
2020-06-15 03:28:51 +01:00
print(message)
models.DataIssue.objects.create(parser='survex', message=message)
2020-06-16 19:27:32 +01:00
try:
2020-06-29 21:16:13 +01:00
ss = models_survex.SurvexStation(name=id, block=survexblockroot)
2020-06-16 19:27:32 +01:00
ss.x = float(x)
ss.y = float(y)
ss.z = float(z)
ss.save()
found += 1
except:
message = ' ! FAIL to create SurvexStation Entrance point {} {}'.format(blockpath, sid)
print(message)
models.DataIssue.objects.create(parser='survex', message=message)
raise
print(" - {} SurvexStation entrances found.".format(found))