2
0
mirror of https://expo.survex.com/repositories/troggle/.git synced 2024-11-24 16:21:53 +00:00
troggle/parsers/survex.py

621 lines
28 KiB
Python
Raw Normal View History

import sys
import os
import re
import time
from datetime import datetime, timedelta
from subprocess import call, Popen, PIPE
from django.utils.timezone import get_current_timezone
from django.utils.timezone import make_aware
import troggle.settings as settings
import troggle.core.models as models
import troggle.core.models_caves as models_caves
import troggle.core.models_survex as models_survex
from troggle.parsers.people import GetPersonExpeditionNameLookup
from troggle.core.views_caves import MapLocations
2020-05-14 17:21:34 +01:00
"""A 'survex block' is a *begin...*end set of cave data.
A 'survexscansfolder' is what we today call a "survey scans folder" or a "wallet".
"""
2020-06-13 01:24:46 +01:00
rx_braskets= re.compile(r"[()]")
rx_line_length = re.compile(r"[\d\-+.]+$")
2020-06-12 00:34:53 +01:00
survexlegsalllength = 0.0
survexlegsnumber = 0
2020-06-15 03:28:51 +01:00
survexblockroot = None
def LoadSurvexLineLeg(survexblock, stardata, sline, comment, cave):
2020-06-12 00:34:53 +01:00
global survexlegsalllength
global survexlegsnumber
# The try catches here need replacing as they are relatively expensive
ls = sline.lower().split()
2020-06-16 19:27:32 +01:00
#ssfrom = survexblock.MakeSurvexStation(ls[stardata["from"]])
#ssto = survexblock.MakeSurvexStation(ls[stardata["to"]])
2020-06-16 19:27:32 +01:00
# survexleg = models_survex.SurvexLeg(block=survexblock, stationfrom=ssfrom, stationto=ssto)
survexleg = models_survex.SurvexLeg()
2020-05-24 13:35:47 +01:00
# this next fails for two surface survey svx files which use / for decimal point
# e.g. '29/09' in the tape measurement, or use decimals but in brackets, e.g. (06.05)
if stardata["type"] == "normal":
2020-06-13 01:24:46 +01:00
tape = rx_braskets.sub("",ls[stardata["tape"]])
tape = tape.replace("/",".")
try:
2020-06-13 01:24:46 +01:00
survexleg.tape = float(tape)
2020-06-12 00:34:53 +01:00
survexlegsnumber += 1
except ValueError:
2020-05-24 01:57:06 +01:00
print(("! Tape misread in", survexblock.survexfile.path))
print((" Stardata:", stardata))
print((" Line:", ls))
2020-05-24 13:35:47 +01:00
message = ' ! Value Error: Tape misread in line %s in %s' % (ls, survexblock.survexfile.path)
models.DataIssue.objects.create(parser='survex', message=message)
2020-06-12 00:34:53 +01:00
survexleg.tape = 0
try:
lclino = ls[stardata["clino"]]
except:
2020-05-24 01:57:06 +01:00
print(("! Clino misread in", survexblock.survexfile.path))
print((" Stardata:", stardata))
print((" Line:", ls))
2020-05-24 13:35:47 +01:00
message = ' ! Value Error: Clino misread in line %s in %s' % (ls, survexblock.survexfile.path)
models.DataIssue.objects.create(parser='survex', message=message)
lclino = error
try:
lcompass = ls[stardata["compass"]]
except:
2020-05-24 01:57:06 +01:00
print(("! Compass misread in", survexblock.survexfile.path))
print((" Stardata:", stardata))
print((" Line:", ls))
2020-05-24 13:35:47 +01:00
message = ' ! Value Error: Compass misread in line %s in %s' % (ls, survexblock.survexfile.path)
models.DataIssue.objects.create(parser='survex', message=message)
lcompass = error
if lclino == "up":
survexleg.compass = 0.0
survexleg.clino = 90.0
elif lclino == "down":
survexleg.compass = 0.0
survexleg.clino = -90.0
elif lclino == "-" or lclino == "level":
try:
survexleg.compass = float(lcompass)
except ValueError:
2020-05-24 01:57:06 +01:00
print(("! Compass misread in", survexblock.survexfile.path))
print((" Stardata:", stardata))
print((" Line:", ls))
message = ' ! Value Error: line %s in %s' % (ls, survexblock.survexfile.path)
models.DataIssue.objects.create(parser='survex', message=message)
survexleg.compass = 1000
survexleg.clino = -90.0
else:
2020-06-13 01:24:46 +01:00
assert rx_line_length.match(lcompass), ls
assert rx_line_length.match(lclino) and lclino != "-", ls
survexleg.compass = float(lcompass)
survexleg.clino = float(lclino)
if cave:
survexleg.cave = cave
# only save proper legs
2020-06-12 00:34:53 +01:00
# No need to save as we are measuring lengths only on parsing now.
# delete the object so that django autosaving doesn't save it.
survexleg = None
#survexleg.save()
itape = stardata.get("tape")
if itape:
try:
survexblock.totalleglength += float(ls[itape])
2020-06-12 00:34:53 +01:00
survexlegsalllength += float(ls[itape])
except ValueError:
print("! Length not added")
2020-06-12 00:34:53 +01:00
# No need to save as we are measuring lengths only on parsing now.
#survexblock.save()
def LoadSurvexEquate(survexblock, sline):
#print sline #
stations = sline.split()
assert len(stations) > 1
for station in stations:
survexblock.MakeSurvexStation(station)
def LoadSurvexLinePassage(survexblock, stardata, sline, comment):
2020-06-12 18:10:07 +01:00
# do not import *data passage.. data which is LRUD not tape/compass/clino
pass
2020-06-13 01:24:46 +01:00
# This interprets the survex "*data normal" command which sets out the order of the fields in the data, e.g.
# *DATA normal from to length gradient bearing ignore ignore ignore ignore
stardatadefault = {"type":"normal", "t":"leg", "from":0, "to":1, "tape":2, "compass":3, "clino":4}
stardataparamconvert = {"length":"tape", "bearing":"compass", "gradient":"clino"}
2020-06-13 01:24:46 +01:00
rx_comment = re.compile(r"([^;]*?)\s*(?:;\s*(.*))?\n?$")
rx_ref = re.compile(r'.*?ref.*?(\d+)\s*#\s*(X)?\s*(\d+)')
rx_star = re.compile(r'\s*\*[\s,]*(\w+)\s*(.*?)\s*(?:;.*)?$')
2020-05-13 19:57:07 +01:00
# years from 1960 to 2039
2020-06-13 01:24:46 +01:00
rx_starref = re.compile(r'^\s*\*ref[\s.:]*((?:19[6789]\d)|(?:20[0123]\d))\s*#?\s*(X)?\s*(.*?\d+.*?)$(?i)')
# rx_starref = re.compile("""?x # VERBOSE mode - can't get this to work
2020-05-14 17:21:34 +01:00
# ^\s*\*ref # look for *ref at start of line
# [\s.:]* # some spaces, stops or colons
# ((?:19[6789]\d)|(?:20[0123]\d)) # a date from 1960 to 2039 - captured as one field
# \s*# # spaces then hash separator
# ?\s*(X) # optional X - captured
# ?\s*(.*?\d+.*?) # maybe a space, then at least one digit in the string - captured
# $(?i)""", re.X) # the end (do the whole thing case insensitively)
2020-06-13 01:24:46 +01:00
rx_team = re.compile(r"(Insts|Notes|Tape|Dog|Useless|Pics|Helper|Disto|Consultant)\s+(.*)$(?i)")
rx_team_member = re.compile(r" and | / |, | & | \+ |^both$|^none$(?i)")
rx_qm = re.compile(r'^\s*QM(\d)\s+?([a-dA-DxX])\s+([\w\-]+)\.(\d+)\s+(([\w\-]+)\.(\d+)|\-)\s+(.+)$')
2020-05-14 17:21:34 +01:00
insp = ""
2020-06-01 00:42:48 +01:00
callcount = 0
def RecursiveLoad(survexblock, survexfile, fin, textlines):
2020-05-14 17:21:34 +01:00
"""Follows the *include links in all the survex files from the root file 1623.svx
and reads in the survex blocks, other data and the wallet references (survexscansfolder) as it
goes. This part of the data import process is where the maximum memory is used and where it
crashes on memory-constrained machines.
"""
iblankbegins = 0
text = [ ]
stardata = stardatadefault
teammembers = [ ]
2020-05-14 17:21:34 +01:00
global insp
2020-06-01 00:42:48 +01:00
global callcount
2020-06-12 00:34:53 +01:00
global survexlegsnumber
2020-06-01 00:42:48 +01:00
print(insp+" - Reading file: " + survexblock.survexfile.path + " <> " + survexfile.path)
stamp = datetime.now()
lineno = 0
2020-06-01 00:42:48 +01:00
sys.stderr.flush();
callcount +=1
if callcount >=10:
callcount=0
print(".", file=sys.stderr,end='')
# Try to find the cave in the DB if not use the string as before
path_match = re.search(r"caves-(\d\d\d\d)/(\d+|\d\d\d\d-?\w+-\d+)/", survexblock.survexfile.path)
if path_match:
pos_cave = '%s-%s' % (path_match.group(1), path_match.group(2))
cave = models_caves.getCaveByReference(pos_cave)
if cave:
survexfile.cave = cave
svxlines = ''
svxlines = fin.read().splitlines()
for svxline in svxlines:
lineno += 1
# break the line at the comment
2020-06-13 01:24:46 +01:00
sline, comment = rx_comment.match(svxline.strip()).groups()
# detect ref line pointing to the scans directory
2020-06-13 01:24:46 +01:00
mref = comment and rx_ref.match(comment)
if mref:
2020-05-13 19:57:07 +01:00
yr, letterx, wallet = mref.groups()
if not letterx:
letterx = ""
else:
letterx = "X"
if len(wallet)<2:
wallet = "0" + wallet
refscan = "%s#%s%s" % (yr, letterx, wallet )
survexscansfolders = models_survex.SurvexScansFolder.objects.filter(walletname=refscan)
2009-09-10 22:07:31 +01:00
if survexscansfolders:
survexblock.survexscansfolder = survexscansfolders[0]
survexblock.save()
2020-05-13 19:57:07 +01:00
else:
2020-06-13 01:24:46 +01:00
message = ' ! Wallet ; ref {} - NOT found in survexscansfolders {}'.format(refscan, survexblock.survexfile.path)
2020-05-24 01:57:06 +01:00
print((insp+message))
models.DataIssue.objects.create(parser='survex', message=message)
# This whole section should be moved if we can have *QM become a proper survex command
# Spec of QM in SVX files, currently commented out need to add to survex
2020-06-13 01:24:46 +01:00
# needs to match rx_qm
# ;Serial number grade(A/B/C/D/X) nearest-station resolution-station description
# ;QM1 a hobnob_hallway_2.42 hobnob-hallway_3.42 junction of keyhole passage
# ;QM1 a hobnob_hallway_2.42 - junction of keyhole passage
2020-06-13 01:24:46 +01:00
qmline = comment and rx_qm.match(comment)
if qmline:
qm_no = qmline.group(1)
qm_grade = qmline.group(2)
qm_from_section = qmline.group(3)
qm_from_station = qmline.group(4)
qm_resolve_section = qmline.group(6)
qm_resolve_station = qmline.group(7)
qm_notes = qmline.group(8)
2020-05-14 17:21:34 +01:00
# print(insp+'Cave - %s' % survexfile.cave)
# print(insp+'QM no %d' % int(qm_no))
# print(insp+'QM grade %s' % qm_grade)
# print(insp+'QM section %s' % qm_from_section)
# print(insp+'QM station %s' % qm_from_station)
# print(insp+'QM res section %s' % qm_resolve_section)
# print(insp+'QM res station %s' % qm_resolve_station)
# print(insp+'QM notes %s' % qm_notes)
# If the QM isn't resolved (has a resolving station) then load it
if not qm_resolve_section or qm_resolve_section != '-' or qm_resolve_section != 'None':
from_section = models_survex.SurvexBlock.objects.filter(name=qm_from_section)
# If we can find a section (survex note chunck, named)
if len(from_section) > 0:
from_station = models_survex.SurvexStation.objects.filter(block=from_section[0], name=qm_from_station)
# If we can find a from station then we have the nearest station and can import it
if len(from_station) > 0:
qm = models_caves.QM.objects.create(number=qm_no,
nearest_station=from_station[0],
grade=qm_grade.upper(),
location_description=qm_notes)
else:
2020-05-14 17:21:34 +01:00
# print(insp+' - QM found but resolved')
2020-04-28 01:18:57 +01:00
pass
if not sline:
continue
2020-05-13 19:57:07 +01:00
# detect the star ref command
2020-06-13 01:24:46 +01:00
mstar = rx_starref.match(sline)
2020-05-13 19:57:07 +01:00
if mstar:
yr,letterx,wallet = mstar.groups()
if not letterx:
letterx = ""
else:
letterx = "X"
if len(wallet)<2:
wallet = "0" + wallet
assert (int(yr)>1960 and int(yr)<2039), "Wallet year out of bounds: %s" % yr
assert (int(wallet)<100), "Wallet number more than 100: %s" % wallet
refscan = "%s#%s%s" % (yr, letterx, wallet)
survexscansfolders = models_survex.SurvexScansFolder.objects.filter(walletname=refscan)
2020-05-13 19:57:07 +01:00
if survexscansfolders:
survexblock.survexscansfolder = survexscansfolders[0]
survexblock.save()
else:
2020-06-13 01:24:46 +01:00
message = ' ! Wallet *REF {} - NOT found in survexscansfolders {}'.format(refscan, survexblock.survexfile.path)
2020-05-24 01:57:06 +01:00
print((insp+message))
models.DataIssue.objects.create(parser='survex', message=message)
2020-05-13 19:57:07 +01:00
continue
# detect the star command
2020-06-13 01:24:46 +01:00
mstar = rx_star.match(sline)
if not mstar:
if "from" in stardata:
LoadSurvexLineLeg(survexblock, stardata, sline, comment, survexfile.cave)
pass
elif stardata["type"] == "passage":
LoadSurvexLinePassage(survexblock, stardata, sline, comment)
#Missing "station" in stardata.
continue
# detect the star command
cmd, line = mstar.groups()
cmd = cmd.lower()
if re.match("include$(?i)", cmd):
includepath = os.path.normpath(os.path.join(os.path.split(survexfile.path)[0], re.sub(r"\.svx$", "", line)))
2020-05-24 01:57:06 +01:00
print((insp+' - Include path found including - ' + includepath))
# Try to find the cave in the DB if not use the string as before
path_match = re.search(r"caves-(\d\d\d\d)/(\d+|\d\d\d\d-?\w+-\d+)/", includepath)
if path_match:
pos_cave = '%s-%s' % (path_match.group(1), path_match.group(2))
2020-05-14 17:21:34 +01:00
# print(insp+pos_cave)
cave = models_caves.getCaveByReference(pos_cave)
if cave:
survexfile.cave = cave
else:
2020-05-24 01:57:06 +01:00
print((insp+' - No match in DB (i) for %s, so loading..' % includepath))
includesurvexfile = models_survex.SurvexFile(path=includepath)
includesurvexfile.save()
includesurvexfile.SetDirectory()
if includesurvexfile.exists():
survexblock.save()
fininclude = includesurvexfile.OpenFile()
2020-05-14 17:21:34 +01:00
insp += "> "
RecursiveLoad(survexblock, includesurvexfile, fininclude, textlines)
2020-05-14 17:21:34 +01:00
insp = insp[2:]
elif re.match("begin$(?i)", cmd):
if line:
newsvxpath = os.path.join(os.path.split(survexfile.path)[0], re.sub(r"\.svx$", "", line))
# Try to find the cave in the DB if not use the string as before
path_match = re.search(r"caves-(\d\d\d\d)/(\d+|\d\d\d\d-?\w+-\d+)/", newsvxpath)
if path_match:
pos_cave = '%s-%s' % (path_match.group(1), path_match.group(2))
2020-05-14 17:21:34 +01:00
# print(insp+pos_cave)
cave = models_caves.getCaveByReference(pos_cave)
if cave:
survexfile.cave = cave
else:
2020-05-24 01:57:06 +01:00
print((insp+' - No match (b) for %s' % newsvxpath))
2020-06-12 00:34:53 +01:00
previousnlegs = survexlegsnumber
name = line.lower()
2020-05-24 01:57:06 +01:00
print((insp+' - Begin found for: ' + name))
2020-06-16 19:27:32 +01:00
# survexblockdown = models_survex.SurvexBlock(name=name, begin_char=fin.tell(), parent=survexblock, survexpath=survexblock.survexpath+"."+name, cave=survexfile.cave, survexfile=survexfile, totalleglength=0.0)
survexblockdown = models_survex.SurvexBlock(name=name, parent=survexblock, survexpath=survexblock.survexpath+"."+name,
cave=survexfile.cave, survexfile=survexfile, legsall=0, legssplay=0, legssurfc=0, totalleglength=0.0)
survexblockdown.save()
survexblock.save()
survexblock = survexblockdown
textlinesdown = [ ]
2020-05-14 17:21:34 +01:00
insp += "> "
RecursiveLoad(survexblockdown, survexfile, fin, textlinesdown)
2020-05-14 17:21:34 +01:00
insp = insp[2:]
else:
iblankbegins += 1
elif re.match("end$(?i)", cmd):
if iblankbegins:
iblankbegins -= 1
else:
2020-06-12 00:34:53 +01:00
# .text not used, using it for number of legs per block
legsinblock = survexlegsnumber - previousnlegs
2020-06-12 18:10:07 +01:00
print(insp+"LEGS: {} (previous: {}, now:{})".format(legsinblock,previousnlegs,survexlegsnumber))
2020-06-16 19:27:32 +01:00
survexblock.legsall = legsinblock
survexblock.save()
endstamp = datetime.now()
timetaken = endstamp - stamp
return
elif re.match("date$(?i)", cmd):
if len(line) == 10:
survexblock.date = make_aware(datetime.strptime(re.sub(r"\.", "-", line), '%Y-%m-%d'), get_current_timezone())
expeditions = models.Expedition.objects.filter(year=line[:4])
if expeditions:
2009-09-11 09:04:59 +01:00
assert len(expeditions) == 1
survexblock.expedition = expeditions[0]
2009-09-11 23:56:47 +01:00
survexblock.expeditionday = survexblock.expedition.get_expedition_day(survexblock.date)
survexblock.save()
elif re.match("team$(?i)", cmd):
pass
2020-05-14 17:21:34 +01:00
# print(insp+' - Team found: ')
2020-06-13 01:24:46 +01:00
mteammember = rx_team.match(line)
if mteammember:
2020-06-13 01:24:46 +01:00
for tm in rx_team_member.split(mteammember.group(2)):
if tm:
personexpedition = survexblock.expedition and GetPersonExpeditionNameLookup(survexblock.expedition).get(tm.lower())
if (personexpedition, tm) not in teammembers:
teammembers.append((personexpedition, tm))
personrole = models_survex.SurvexPersonRole(survexblock=survexblock, nrole=mteammember.group(1).lower(), personexpedition=personexpedition, personname=tm)
2009-09-11 23:56:47 +01:00
personrole.expeditionday = survexblock.expeditionday
if personexpedition:
personrole.person=personexpedition.person
personrole.save()
elif cmd == "title":
survextitle = models_survex.SurvexTitle(survexblock=survexblock, title=line.strip('"'), cave=survexfile.cave)
survextitle.save()
pass
elif cmd == "require":
# should we check survex version available for processing?
pass
elif cmd == "data":
ls = line.lower().split()
stardata = { "type":ls[0] }
for i in range(0, len(ls)):
stardata[stardataparamconvert.get(ls[i], ls[i])] = i - 1
if ls[0] in ["normal", "cartesian", "nosurvey"]:
assert (("from" in stardata and "to" in stardata) or "station" in stardata), line
elif ls[0] == "default":
stardata = stardatadefault
else:
assert ls[0] == "passage", line
elif cmd == "equate":
LoadSurvexEquate(survexblock, line)
2020-06-13 01:24:46 +01:00
elif cmd == "set" and re.match("names(?i)", line):
pass
elif cmd == "flags":
# Here we could set on/off 'splay', 'not splay', 'surface', 'not surface', or 'duplicate'
# but this data is only used for sense-checking not to actually calculate anything important
pass
elif cmd == "fix":
survexblock.MakeSurvexStation(line.split()[0])
2020-06-13 01:24:46 +01:00
elif cmd in ["alias", "calibrate", "cs","entrance", "export", "case",
"declination", "infer","instrument", "sd", "units"]:
# we ignore all these, which is fine.
pass
else:
2020-06-13 01:24:46 +01:00
if cmd not in ["include", "data", "flags", "title", "set", "ref"]:
message = "! Bad svx command: [*{}] {} ({}) {}".format(cmd, line, survexblock, survexblock.survexfile.path)
print((insp+message))
models.DataIssue.objects.create(parser='survex', message=message)
else:
message = "! Unparsed [*{}]: '{}' {}".format(cmd, line, survexblock.survexfile.path)
2020-05-24 01:57:06 +01:00
print((insp+message))
models.DataIssue.objects.create(parser='survex', message=message)
endstamp = datetime.now()
timetaken = endstamp - stamp
2020-05-14 17:21:34 +01:00
# print(insp+' - Time to process: ' + str(timetaken))
def LoadAllSurvexBlocks():
2020-06-12 00:34:53 +01:00
global survexlegsalllength
global survexlegsnumber
2015-01-19 22:48:50 +00:00
2020-04-28 01:18:57 +01:00
print(' - Flushing All Survex Blocks...')
2015-01-19 22:48:50 +00:00
models_survex.SurvexBlock.objects.all().delete()
models_survex.SurvexFile.objects.all().delete()
models_survex.SurvexDirectory.objects.all().delete()
models_survex.SurvexEquate.objects.all().delete()
2020-06-16 19:27:32 +01:00
#models_survex.SurvexLeg.objects.all().delete()
models_survex.SurvexTitle.objects.all().delete()
models_survex.SurvexPersonRole.objects.all().delete()
models_survex.SurvexStation.objects.all().delete()
print(" - Data flushed")
# Clear the data issues as we are reloading
models.DataIssue.objects.filter(parser='survex').delete()
2020-04-28 01:18:57 +01:00
print(' - Loading All Survex Blocks...')
2020-06-01 00:42:48 +01:00
print(' - redirecting stdout to loadsurvexblks.log...')
stdout_orig = sys.stdout
# Redirect sys.stdout to the file
sys.stdout = open('loadsurvexblks.log', 'w')
survexfile = models_survex.SurvexFile(path=settings.SURVEX_TOPNAME, cave=None)
survexfile.save()
survexfile.SetDirectory()
#Load all
2020-06-12 00:34:53 +01:00
# this is the first so id=1
2020-06-16 19:27:32 +01:00
survexblockroot = models_survex.SurvexBlock(name="rootblock", survexpath="", cave=None, survexfile=survexfile,
legsall=0, legssplay=0, legssurfc=0, totalleglength=0.0)
survexblockroot.save()
fin = survexfile.OpenFile()
textlines = [ ]
# The real work starts here
RecursiveLoad(survexblockroot, survexfile, fin, textlines)
fin.close()
2020-06-12 00:34:53 +01:00
survexblockroot.totalleglength = survexlegsalllength
2020-06-16 19:27:32 +01:00
survexblockroot.legsall = survexlegsnumber
2020-06-12 00:34:53 +01:00
#survexblockroot.text = "".join(textlines) these are all blank
survexblockroot.save()
# Close the file
sys.stdout.close()
2020-06-01 00:42:48 +01:00
print("+", file=sys.stderr)
sys.stderr.flush();
# Restore sys.stdout to our old saved file handler
sys.stdout = stdout_orig
2020-06-12 00:34:53 +01:00
print(" - total number of survex legs: {}m".format(survexlegsnumber))
print(" - total leg lengths loaded: {}m".format(survexlegsalllength))
2020-04-28 01:18:57 +01:00
print(' - Loaded All Survex Blocks.')
poslineregex = re.compile(r"^\(\s*([+-]?\d*\.\d*),\s*([+-]?\d*\.\d*),\s*([+-]?\d*\.\d*)\s*\)\s*([^\s]+)$")
def LoadPos():
2020-04-28 01:18:57 +01:00
"""Run cavern to produce a complete .3d file, then run 3dtopos to produce a table of
all survey point positions. Then lookup each position by name to see if we have it in the database
and if we do, then save the x/y/z coordinates.
If we don't have it in the database, print an error message and discard it.
2020-05-26 16:41:11 +01:00
This is ONLY ever used for entrance and fixedpts locations for the prospecting map:
about 600 points out of 32,000.
2020-04-28 01:18:57 +01:00
"""
2020-04-28 18:26:08 +01:00
topdata = settings.SURVEX_DATA + settings.SURVEX_TOPNAME
2020-05-24 01:57:06 +01:00
print((' - Generating a list of Pos from %s.svx and then loading...' % (topdata)))
# TO DO - remove the cache file apparatus. Not needed. Only laser points and entrances loaded now.
2020-04-28 18:26:08 +01:00
# Be careful with the cache file.
# If LoadPos has been run before,
# but without cave import being run before,
# then *everything* may be in the fresh 'not found' cache file.
2020-06-16 19:27:32 +01:00
# cachefile = settings.SURVEX_DATA + "posnotfound.cache"
# notfoundbefore = {}
# if os.path.isfile(cachefile):
# # this is not a good test. 1623.svx may never change but *included files may have done.
# # When the *include is unrolled, we will be able to get a proper timestamp to use
# # and can increase the timeout from 3 days to 30 days.
# updtsvx = os.path.getmtime(topdata + ".svx")
# updtcache = os.path.getmtime(cachefile)
# age = updtcache - updtsvx
# print((' svx: %s cache: %s not-found cache is fresher by: %s' % (updtsvx, updtcache, str(timedelta(seconds=age) ))))
2020-06-16 19:27:32 +01:00
# now = time.time()
# if now - updtcache > 3*24*60*60:
# print(" cache is more than 3 days old. Deleting.")
# os.remove(cachefile)
# elif age < 0 :
# print(" cache is stale. Deleting.")
# os.remove(cachefile)
# else:
# print(" cache is fresh. Reading...")
# try:
# with open(cachefile, "r") as f:
# for line in f:
# l = line.rstrip()
# if l in notfoundbefore:
# notfoundbefore[l] +=1 # should not be duplicates
# print(" DUPLICATE ", line, notfoundbefore[l])
# else:
# notfoundbefore[l] =1
# except:
# print(" FAILURE READ opening cache file %s" % (cachefile))
# raise
2020-04-28 21:50:53 +01:00
2020-04-28 18:26:08 +01:00
2020-06-16 19:27:32 +01:00
# notfoundnow =[]
2020-04-28 18:26:08 +01:00
found = 0
skip = {}
2020-05-24 01:57:06 +01:00
print("\n") # extra line because cavern overwrites the text buffer somehow
2020-04-28 18:26:08 +01:00
# cavern defaults to using same cwd as supplied input file
call([settings.CAVERN, "--output=%s.3d" % (topdata), "%s.svx" % (topdata)])
call([settings.THREEDTOPOS, '%s.3d' % (topdata)], cwd = settings.SURVEX_DATA)
#print(" - This next bit takes a while. Matching ~32,000 survey positions. Be patient...")
mappoints = {}
for pt in MapLocations().points():
svxid, number, point_type, label = pt
mappoints[svxid]=True
2020-04-28 18:26:08 +01:00
posfile = open("%s.pos" % (topdata))
posfile.readline() #Drop header
2020-06-15 03:28:51 +01:00
survexblockroot = models_survex.SurvexBlock.objects.get(id=1)
for line in posfile.readlines():
r = poslineregex.match(line)
if r:
x, y, z, id = r.groups()
2020-06-16 19:27:32 +01:00
# if id in notfoundbefore:
# skip[id] = 1
# else:
for sid in mappoints:
if id.endswith(sid):
# notfoundnow.append(id)
# Now that we don't import any stations, we create it rather than look it up
# ss = models_survex.SurvexStation.objects.lookup(id)
# need to set block_id which means doing a search on all the survex blocks..
# remove dot at end and add one at beginning
blockpath = "." + id[:-len(sid)].strip(".")
try:
sbqs = models_survex.SurvexBlock.objects.filter(survexpath=blockpath)
if len(sbqs)==1:
sb = sbqs[0]
if len(sbqs)>1:
message = ' ! MULTIPLE SurvexBlocks matching Entrance point {} {}'.format(blockpath, sid)
2020-06-15 03:28:51 +01:00
print(message)
models.DataIssue.objects.create(parser='survex', message=message)
2020-06-16 19:27:32 +01:00
sb = sbqs[0]
elif len(sbqs)<=0:
message = ' ! ZERO SurvexBlocks matching Entrance point {} {}'.format(blockpath, sid)
2020-06-15 03:28:51 +01:00
print(message)
models.DataIssue.objects.create(parser='survex', message=message)
2020-06-16 19:27:32 +01:00
sb = survexblockroot
except:
message = ' ! FAIL in getting SurvexBlock matching Entrance point {} {}'.format(blockpath, sid)
print(message)
models.DataIssue.objects.create(parser='survex', message=message)
try:
ss = models_survex.SurvexStation(name=id, block=sb)
ss.x = float(x)
ss.y = float(y)
ss.z = float(z)
ss.save()
found += 1
except:
message = ' ! FAIL to create SurvexStation Entrance point {} {}'.format(blockpath, sid)
print(message)
models.DataIssue.objects.create(parser='survex', message=message)
raise
2020-06-15 03:28:51 +01:00
#print(" - %s failed lookups of SurvexStation.objects. %s found. %s skipped." % (len(notfoundnow),found, len(skip)))
2020-06-16 19:27:32 +01:00
print(" - {} SurvexStation entrances found.".format(found))
# if found > 10: # i.e. a previous cave import has been done
# try:
# with open(cachefile, "w") as f:
# c = len(notfoundnow)+len(skip)
# for i in notfoundnow:
# pass #f.write("%s\n" % i)
# for j in skip:
# pass #f.write("%s\n" % j) # NB skip not notfoundbefore
# print((' Not-found cache file written: %s entries' % c))
# except:
# print(" FAILURE WRITE opening cache file %s" % (cachefile))
# raise