forked from expo/troggle
462 lines
19 KiB
Python
462 lines
19 KiB
Python
import troggle.settings as settings
|
|
import troggle.core.models as models
|
|
import troggle.settings as settings
|
|
|
|
from subprocess import call, Popen, PIPE
|
|
|
|
from troggle.parsers.people import GetPersonExpeditionNameLookup
|
|
from django.utils.timezone import get_current_timezone
|
|
from django.utils.timezone import make_aware
|
|
|
|
import re
|
|
import os
|
|
from datetime import datetime
|
|
|
|
line_leg_regex = re.compile(r"[\d\-+.]+$")
|
|
|
|
def LoadSurvexLineLeg(survexblock, stardata, sline, comment, cave):
|
|
# The try catches here need replacing as they are relatively expensive
|
|
ls = sline.lower().split()
|
|
ssfrom = survexblock.MakeSurvexStation(ls[stardata["from"]])
|
|
ssto = survexblock.MakeSurvexStation(ls[stardata["to"]])
|
|
|
|
survexleg = models.SurvexLeg(block=survexblock, stationfrom=ssfrom, stationto=ssto)
|
|
if stardata["type"] == "normal":
|
|
try:
|
|
survexleg.tape = float(ls[stardata["tape"]])
|
|
except ValueError:
|
|
print("Tape misread in", survexblock.survexfile.path)
|
|
print("Stardata:", stardata)
|
|
print("Line:", ls)
|
|
survexleg.tape = 1000
|
|
try:
|
|
lclino = ls[stardata["clino"]]
|
|
except:
|
|
print("Clino misread in", survexblock.survexfile.path)
|
|
print("Stardata:", stardata)
|
|
print("Line:", ls)
|
|
lclino = error
|
|
try:
|
|
lcompass = ls[stardata["compass"]]
|
|
except:
|
|
print("Compass misread in", survexblock.survexfile.path)
|
|
print("Stardata:", stardata)
|
|
print("Line:", ls)
|
|
lcompass = error
|
|
if lclino == "up":
|
|
survexleg.compass = 0.0
|
|
survexleg.clino = 90.0
|
|
elif lclino == "down":
|
|
survexleg.compass = 0.0
|
|
survexleg.clino = -90.0
|
|
elif lclino == "-" or lclino == "level":
|
|
try:
|
|
survexleg.compass = float(lcompass)
|
|
except ValueError:
|
|
print("Compass misread in", survexblock.survexfile.path)
|
|
print("Stardata:", stardata)
|
|
print("Line:", ls)
|
|
survexleg.compass = 1000
|
|
survexleg.clino = -90.0
|
|
else:
|
|
assert line_leg_regex.match(lcompass), ls
|
|
assert line_leg_regex.match(lclino) and lclino != "-", ls
|
|
survexleg.compass = float(lcompass)
|
|
survexleg.clino = float(lclino)
|
|
|
|
if cave:
|
|
survexleg.cave = cave
|
|
|
|
# only save proper legs
|
|
survexleg.save()
|
|
|
|
itape = stardata.get("tape")
|
|
if itape:
|
|
try:
|
|
survexblock.totalleglength += float(ls[itape])
|
|
except ValueError:
|
|
print("Length not added")
|
|
survexblock.save()
|
|
|
|
|
|
def LoadSurvexEquate(survexblock, sline):
|
|
#print sline #
|
|
stations = sline.split()
|
|
assert len(stations) > 1
|
|
for station in stations:
|
|
survexblock.MakeSurvexStation(station)
|
|
|
|
|
|
def LoadSurvexLinePassage(survexblock, stardata, sline, comment):
|
|
pass
|
|
|
|
stardatadefault = {"type":"normal", "t":"leg", "from":0, "to":1, "tape":2, "compass":3, "clino":4}
|
|
stardataparamconvert = {"length":"tape", "bearing":"compass", "gradient":"clino"}
|
|
|
|
regex_comment = re.compile(r"([^;]*?)\s*(?:;\s*(.*))?\n?$")
|
|
regex_ref = re.compile(r'.*?ref.*?(\d+)\s*#\s*(\d+)')
|
|
regex_star = re.compile(r'\s*\*[\s,]*(\w+)\s*(.*?)\s*(?:;.*)?$')
|
|
regex_team = re.compile(r"(Insts|Notes|Tape|Dog|Useless|Pics|Helper|Disto|Consultant)\s+(.*)$(?i)")
|
|
regex_team_member = re.compile(r" and | / |, | & | \+ |^both$|^none$(?i)")
|
|
regex_qm = re.compile(r'^\s*QM(\d)\s+?([a-dA-DxX])\s+([\w\-]+)\.(\d+)\s+(([\w\-]+)\.(\d+)|\-)\s+(.+)$')
|
|
|
|
def RecursiveLoad(survexblock, survexfile, fin, textlines):
|
|
iblankbegins = 0
|
|
text = [ ]
|
|
stardata = stardatadefault
|
|
teammembers = [ ]
|
|
|
|
# uncomment to print out all files during parsing
|
|
print(" - Reading file: " + survexblock.survexfile.path)
|
|
stamp = datetime.now()
|
|
lineno = 0
|
|
|
|
# Try to find the cave in the DB if not use the string as before
|
|
path_match = re.search(r"caves-(\d\d\d\d)/(\d+|\d\d\d\d-?\w+-\d+)/", survexblock.survexfile.path)
|
|
if path_match:
|
|
pos_cave = '%s-%s' % (path_match.group(1), path_match.group(2))
|
|
# print('Match')
|
|
# print(pos_cave)
|
|
cave = models.getCaveByReference(pos_cave)
|
|
if cave:
|
|
survexfile.cave = cave
|
|
svxlines = ''
|
|
svxlines = fin.read().splitlines()
|
|
# print('Cave - preloop ' + str(survexfile.cave))
|
|
# print(survexblock)
|
|
for svxline in svxlines:
|
|
|
|
# print(survexblock)
|
|
|
|
# print(svxline)
|
|
# if not svxline:
|
|
# print(' - Not survex')
|
|
# return
|
|
# textlines.append(svxline)
|
|
|
|
lineno += 1
|
|
|
|
# print(' - Line: %d' % lineno)
|
|
|
|
# break the line at the comment
|
|
sline, comment = regex_comment.match(svxline.strip()).groups()
|
|
# detect ref line pointing to the scans directory
|
|
mref = comment and regex_ref.match(comment)
|
|
if mref:
|
|
refscan = "%s#%s" % (mref.group(1), mref.group(2))
|
|
survexscansfolders = models.SurvexScansFolder.objects.filter(walletname=refscan)
|
|
if survexscansfolders:
|
|
survexblock.survexscansfolder = survexscansfolders[0]
|
|
#survexblock.refscandir = "%s/%s%%23%s" % (mref.group(1), mref.group(1), mref.group(2))
|
|
survexblock.save()
|
|
print(' - Wallet *ref - %s' % refscan)
|
|
continue
|
|
|
|
# This whole section should be moved if we can have *QM become a proper survex command
|
|
# Spec of QM in SVX files, currently commented out need to add to survex
|
|
# needs to match regex_qm
|
|
# ;Serial number grade(A/B/C/D/X) nearest-station resolution-station description
|
|
# ;QM1 a hobnob_hallway_2.42 hobnob-hallway_3.42 junction of keyhole passage
|
|
# ;QM1 a hobnob_hallway_2.42 - junction of keyhole passage
|
|
qmline = comment and regex_qm.match(comment)
|
|
if qmline:
|
|
# print(qmline.groups())
|
|
#(u'1', u'B', u'miraclemaze', u'1.17', u'-', None, u'\tcontinuation of rift')
|
|
qm_no = qmline.group(1)
|
|
qm_grade = qmline.group(2)
|
|
qm_from_section = qmline.group(3)
|
|
qm_from_station = qmline.group(4)
|
|
qm_resolve_section = qmline.group(6)
|
|
qm_resolve_station = qmline.group(7)
|
|
qm_notes = qmline.group(8)
|
|
|
|
# print('Cave - %s' % survexfile.cave)
|
|
# print('QM no %d' % int(qm_no))
|
|
# print('QM grade %s' % qm_grade)
|
|
# print('QM section %s' % qm_from_section)
|
|
# print('QM station %s' % qm_from_station)
|
|
# print('QM res section %s' % qm_resolve_section)
|
|
# print('QM res station %s' % qm_resolve_station)
|
|
# print('QM notes %s' % qm_notes)
|
|
|
|
# If the QM isn't resolved (has a resolving station) thn load it
|
|
if not qm_resolve_section or qm_resolve_section is not '-' or qm_resolve_section is not 'None':
|
|
from_section = models.SurvexBlock.objects.filter(name=qm_from_section)
|
|
# If we can find a section (survex note chunck, named)
|
|
if len(from_section) > 0:
|
|
# print(from_section[0])
|
|
from_station = models.SurvexStation.objects.filter(block=from_section[0], name=qm_from_station)
|
|
# If we can find a from station then we have the nearest station and can import it
|
|
if len(from_station) > 0:
|
|
# print(from_station[0])
|
|
qm = models.QM.objects.create(number=qm_no,
|
|
nearest_station=from_station[0],
|
|
grade=qm_grade.upper(),
|
|
location_description=qm_notes)
|
|
else:
|
|
# print(' - QM found but resolved')
|
|
pass
|
|
|
|
#print('Cave -sline ' + str(cave))
|
|
if not sline:
|
|
continue
|
|
|
|
# detect the star command
|
|
mstar = regex_star.match(sline)
|
|
if not mstar:
|
|
if "from" in stardata:
|
|
# print('Cave ' + str(survexfile.cave))
|
|
# print(survexblock)
|
|
LoadSurvexLineLeg(survexblock, stardata, sline, comment, survexfile.cave)
|
|
# print(' - From: ')
|
|
# print(stardata)
|
|
pass
|
|
elif stardata["type"] == "passage":
|
|
LoadSurvexLinePassage(survexblock, stardata, sline, comment)
|
|
# print(' - Passage: ')
|
|
#Missing "station" in stardata.
|
|
continue
|
|
|
|
# detect the star command
|
|
cmd, line = mstar.groups()
|
|
cmd = cmd.lower()
|
|
if re.match("include$(?i)", cmd):
|
|
includepath = os.path.join(os.path.split(survexfile.path)[0], re.sub(r"\.svx$", "", line))
|
|
print(' - Include file found including - ' + includepath)
|
|
# Try to find the cave in the DB if not use the string as before
|
|
path_match = re.search(r"caves-(\d\d\d\d)/(\d+|\d\d\d\d-?\w+-\d+)/", includepath)
|
|
if path_match:
|
|
pos_cave = '%s-%s' % (path_match.group(1), path_match.group(2))
|
|
# print(pos_cave)
|
|
cave = models.getCaveByReference(pos_cave)
|
|
if cave:
|
|
survexfile.cave = cave
|
|
else:
|
|
print(' - No match (i) for %s' % includepath)
|
|
includesurvexfile = models.SurvexFile(path=includepath)
|
|
includesurvexfile.save()
|
|
includesurvexfile.SetDirectory()
|
|
if includesurvexfile.exists():
|
|
survexblock.save()
|
|
fininclude = includesurvexfile.OpenFile()
|
|
RecursiveLoad(survexblock, includesurvexfile, fininclude, textlines)
|
|
|
|
elif re.match("begin$(?i)", cmd):
|
|
if line:
|
|
newsvxpath = os.path.join(os.path.split(survexfile.path)[0], re.sub(r"\.svx$", "", line))
|
|
# Try to find the cave in the DB if not use the string as before
|
|
path_match = re.search(r"caves-(\d\d\d\d)/(\d+|\d\d\d\d-?\w+-\d+)/", newsvxpath)
|
|
if path_match:
|
|
pos_cave = '%s-%s' % (path_match.group(1), path_match.group(2))
|
|
# print(pos_cave)
|
|
cave = models.getCaveByReference(pos_cave)
|
|
if cave:
|
|
survexfile.cave = cave
|
|
else:
|
|
print(' - No match (b) for %s' % newsvxpath)
|
|
|
|
name = line.lower()
|
|
print(' - Begin found for: ' + name)
|
|
# print('Block cave: ' + str(survexfile.cave))
|
|
survexblockdown = models.SurvexBlock(name=name, begin_char=fin.tell(), parent=survexblock, survexpath=survexblock.survexpath+"."+name, cave=survexfile.cave, survexfile=survexfile, totalleglength=0.0)
|
|
survexblockdown.save()
|
|
survexblock.save()
|
|
survexblock = survexblockdown
|
|
# print(survexblockdown)
|
|
textlinesdown = [ ]
|
|
RecursiveLoad(survexblockdown, survexfile, fin, textlinesdown)
|
|
else:
|
|
iblankbegins += 1
|
|
|
|
elif re.match("end$(?i)", cmd):
|
|
if iblankbegins:
|
|
iblankbegins -= 1
|
|
else:
|
|
survexblock.text = "".join(textlines)
|
|
survexblock.save()
|
|
# print(' - End found: ')
|
|
endstamp = datetime.now()
|
|
timetaken = endstamp - stamp
|
|
# print(' - Time to process: ' + str(timetaken))
|
|
return
|
|
|
|
elif re.match("date$(?i)", cmd):
|
|
if len(line) == 10:
|
|
#print(' - Date found: ' + line)
|
|
survexblock.date = make_aware(datetime.strptime(re.sub(r"\.", "-", line), '%Y-%m-%d'), get_current_timezone())
|
|
expeditions = models.Expedition.objects.filter(year=line[:4])
|
|
if expeditions:
|
|
assert len(expeditions) == 1
|
|
survexblock.expedition = expeditions[0]
|
|
survexblock.expeditionday = survexblock.expedition.get_expedition_day(survexblock.date)
|
|
survexblock.save()
|
|
|
|
elif re.match("team$(?i)", cmd):
|
|
pass
|
|
# print(' - Team found: ')
|
|
mteammember = regex_team.match(line)
|
|
if mteammember:
|
|
for tm in regex_team_member.split(mteammember.group(2)):
|
|
if tm:
|
|
personexpedition = survexblock.expedition and GetPersonExpeditionNameLookup(survexblock.expedition).get(tm.lower())
|
|
if (personexpedition, tm) not in teammembers:
|
|
teammembers.append((personexpedition, tm))
|
|
personrole = models.SurvexPersonRole(survexblock=survexblock, nrole=mteammember.group(1).lower(), personexpedition=personexpedition, personname=tm)
|
|
personrole.expeditionday = survexblock.expeditionday
|
|
if personexpedition:
|
|
personrole.person=personexpedition.person
|
|
personrole.save()
|
|
|
|
elif cmd == "title":
|
|
#print(' - Title found: ')
|
|
survextitle = models.SurvexTitle(survexblock=survexblock, title=line.strip('"'), cave=survexfile.cave)
|
|
survextitle.save()
|
|
pass
|
|
|
|
elif cmd == "require":
|
|
# should we check survex version available for processing?
|
|
pass
|
|
|
|
elif cmd == "data":
|
|
#print(' - Data found: ')
|
|
ls = line.lower().split()
|
|
stardata = { "type":ls[0] }
|
|
#print(' - Star data: ', stardata)
|
|
#print(ls)
|
|
for i in range(0, len(ls)):
|
|
stardata[stardataparamconvert.get(ls[i], ls[i])] = i - 1
|
|
if ls[0] in ["normal", "cartesian", "nosurvey"]:
|
|
assert (("from" in stardata and "to" in stardata) or "station" in stardata), line
|
|
elif ls[0] == "default":
|
|
stardata = stardatadefault
|
|
else:
|
|
assert ls[0] == "passage", line
|
|
|
|
elif cmd == "equate":
|
|
#print(' - Equate found: ')
|
|
LoadSurvexEquate(survexblock, line)
|
|
|
|
elif cmd == "fix":
|
|
#print(' - Fix found: ')
|
|
survexblock.MakeSurvexStation(line.split()[0])
|
|
|
|
else:
|
|
#print(' - Stuff')
|
|
if cmd not in ["sd", "include", "units", "entrance", "data", "flags", "title", "export", "instrument",
|
|
"calibrate", "set", "infer", "alias", "ref", "cs", "declination", "case"]:
|
|
print("Unrecognised command in line:", cmd, line, survexblock, survexblock.survexfile.path)
|
|
endstamp = datetime.now()
|
|
timetaken = endstamp - stamp
|
|
# print(' - Time to process: ' + str(timetaken))
|
|
|
|
def LoadAllSurvexBlocks():
|
|
|
|
print(' - Flushing All Survex Blocks...')
|
|
|
|
models.SurvexBlock.objects.all().delete()
|
|
models.SurvexFile.objects.all().delete()
|
|
models.SurvexDirectory.objects.all().delete()
|
|
models.SurvexEquate.objects.all().delete()
|
|
models.SurvexLeg.objects.all().delete()
|
|
models.SurvexTitle.objects.all().delete()
|
|
models.SurvexPersonRole.objects.all().delete()
|
|
models.SurvexStation.objects.all().delete()
|
|
|
|
print(" - Data flushed")
|
|
print(' - Loading All Survex Blocks...')
|
|
|
|
survexfile = models.SurvexFile(path=settings.SURVEX_TOPNAME, cave=None)
|
|
survexfile.save()
|
|
survexfile.SetDirectory()
|
|
|
|
#Load all
|
|
survexblockroot = models.SurvexBlock(name="root", survexpath="", begin_char=0, cave=None, survexfile=survexfile, totalleglength=0.0)
|
|
survexblockroot.save()
|
|
fin = survexfile.OpenFile()
|
|
textlines = [ ]
|
|
# The real work starts here
|
|
RecursiveLoad(survexblockroot, survexfile, fin, textlines)
|
|
fin.close()
|
|
survexblockroot.text = "".join(textlines)
|
|
survexblockroot.save()
|
|
print(' - Loaded All Survex Blocks.')
|
|
|
|
|
|
poslineregex = re.compile(r"^\(\s*([+-]?\d*\.\d*),\s*([+-]?\d*\.\d*),\s*([+-]?\d*\.\d*)\s*\)\s*([^\s]+)$")
|
|
|
|
|
|
def LoadPos():
|
|
"""Run cavern to produce a complete .3d file, then run 3dtopos to produce a table of
|
|
all survey point positions. Then lookup each position by name to see if we have it in the database
|
|
and if we do, then save the x/y/z coordinates.
|
|
If we don't have it in the database, print an error message and discard it.
|
|
"""
|
|
topdata = settings.SURVEX_DATA + settings.SURVEX_TOPNAME
|
|
print(' - Generating a list of Pos from %s.svx and then loading...' % (topdata))
|
|
|
|
# Be careful with the cache file.
|
|
# If LoadPos has been run before,
|
|
# but without cave import being run before,
|
|
# then *everything* may be in the fresh 'not found' cache file.
|
|
|
|
cachefile = settings.SURVEX_DATA + "posnotfound"
|
|
notfoundbefore = {}
|
|
if os.path.isfile(cachefile):
|
|
updtsvx = os.path.getmtime(topdata + ".svx")
|
|
updtcache = os.path.getmtime(cachefile)
|
|
age = updtcache - updtsvx
|
|
print(' svx: %s cache: %s cache age: %s' % (updtsvx, updtcache, age ))
|
|
if age < 0 :
|
|
print " cache is stale."
|
|
os.remove(cachefile)
|
|
else:
|
|
print " cache is fresh."
|
|
try:
|
|
f = open(cachefile, "r")
|
|
for line in f:
|
|
notfoundbefore[line] +=1 # should not be duplicates
|
|
except:
|
|
print " FAILURE READ opening cache file %s" % (cachefile)
|
|
f.close()
|
|
|
|
|
|
notfoundnow =[]
|
|
found = 0
|
|
skip = {}
|
|
print "\n" # extra line because cavern overwrites the text buffer somehow
|
|
# cavern defaults to using same cwd as supplied input file
|
|
call([settings.CAVERN, "--output=%s.3d" % (topdata), "%s.svx" % (topdata)])
|
|
call([settings.THREEDTOPOS, '%s.3d' % (topdata)], cwd = settings.SURVEX_DATA)
|
|
posfile = open("%s.pos" % (topdata))
|
|
posfile.readline() #Drop header
|
|
for line in posfile.readlines():
|
|
r = poslineregex.match(line)
|
|
if r:
|
|
x, y, z, name = r.groups() # easting, northing, altitude
|
|
if name in notfoundbefore:
|
|
skip[name] += 1
|
|
else:
|
|
try:
|
|
ss = models.SurvexStation.objects.lookup(name)
|
|
ss.x = float(x)
|
|
ss.y = float(y)
|
|
ss.z = float(z)
|
|
ss.save()
|
|
found += 1
|
|
except:
|
|
#print "%s in %s.pos not found in lookup of SurvexStation.objects" % (name, settings.SURVEX_TOPNAME)
|
|
notfoundnow.append(name)
|
|
print " - %s stations NOT found in lookup of SurvexStation.objects. %s found. %s skipper." % (len(notfoundnow),found, skip)
|
|
|
|
if found > 10: # i.e. a previous cave import has been done
|
|
try:
|
|
with open(cachefile, "w") as f:
|
|
print " cache file opened"
|
|
for i in notfoundnow:
|
|
f.write("%s\n" % i)
|
|
for j in skip:
|
|
f.write("%s\n" % j) # NB skip not notfoundbefore
|
|
except:
|
|
print " FAILURE WRITE opening cache file %s" % (cachefile)
|
|
|