troggle-unchained/parsers/survex.py

1423 lines
65 KiB
Python
Raw Normal View History

import sys
import os
import re
import time
2020-07-03 14:53:36 +01:00
import copy
import subprocess
from pathlib import Path
from datetime import datetime, timedelta
from django.utils.timezone import get_current_timezone
from django.utils.timezone import make_aware
import troggle.settings as settings
from troggle.core.models.caves import Entrance, QM
2021-04-12 23:58:48 +01:00
from troggle.core.utils import get_process_memory, chaosmonkey
from troggle.parsers.people import GetPersonExpeditionNameLookup
2020-06-28 01:50:34 +01:00
from troggle.parsers.logbooks import GetCaveLookup
2021-04-13 01:13:08 +01:00
from troggle.core.models.troggle import DataIssue, Expedition
2021-04-26 18:18:16 +01:00
from troggle.core.models.survex import SurvexPersonRole, Wallet, SurvexDirectory, SurvexFile, SurvexBlock, SurvexStation
2021-04-13 01:37:42 +01:00
'''Imports the tree of survex files following form a defined root .svx file
It does also NOT scan the Loser repo for all the svx files - though it should !
'''
todo = '''Also walk the entire tree in the :loser: repo looking for unconnected survex files
- add them to the system so that they can be reported-on
- produce a parser report and create a troggle report page (some are OK, e.g. futility series replaced by ARGE survey in 115)
- If you look at e.g. http://expo.survex.com/survexfile/161#T_caves-1623/161/lhr/alllhr
you will see than have the team members are recognised by this parser, but not recognised by the
wider troggle system (the name is not a hyperlink) - apparently randomly.
GetPersonExpeditionNameLookup() needs to be fixed.
'''
2020-06-15 03:28:51 +01:00
survexblockroot = None
ROOTBLOCK = "rootblock"
2020-07-08 00:00:56 +01:00
METRESINFEET = 3.28084
2020-07-06 21:46:19 +01:00
debugprint = False # Turns on debug printout for just one *include file
2020-07-07 02:46:18 +01:00
debugprinttrigger = "!"
# debugprinttrigger = "caves-1623/40/old/EisSVH"
2020-07-06 21:46:19 +01:00
class MapLocations(object):
p = [
("laser.0_7", "BNase", "Reference", "Bräuning Nase laser point"),
("226-96", "BZkn", "Reference", "Bräuning Zinken trig point"),
("vd1","VD1","Reference", "VD1 survey point"),
("laser.kt114_96","HSK","Reference", "Hinterer Schwarzmooskogel trig point"),
("2000","Nipple","Reference", "Nipple (Weiße Warze)"),
("3000","VSK","Reference", "Vorderer Schwarzmooskogel summit"),
("topcamp", "OTC", "Reference", "Old Top Camp"),
("laser.0", "LSR0", "Reference", "Laser Point 0"),
("laser.0_1", "LSR1", "Reference", "Laser Point 0/1"),
("laser.0_3", "LSR3", "Reference", "Laser Point 0/3"),
("laser.0_5", "LSR5", "Reference", "Laser Point 0/5"),
("225-96", "BAlm", "Reference", "Bräuning Alm trig point")
]
def points(self):
for ent in Entrance.objects.all():
if ent.best_station():
try:
k = ent.caveandentrance_set.all()[0].cave
except:
message = " ! Failed to get Cave linked to Entrance:{} from:{} best:{}".format(ent.name, ent.filename, ent.best_station())
DataIssue.objects.create(parser='entrances', message=message)
print(message)
raise
try:
areaName = k.getArea().short_name
except:
message = " ! Failed to get Area on cave '{}' linked to Entrance:{} from:{} best:{}".format(cave, ent.name, ent.filename, ent.best_station())
DataIssue.objects.create(parser='entrances', message=message)
print(message)
raise
self.p.append((ent.best_station(), "%s-%s" % (areaName, str(ent)[5:]), ent.needs_surface_work(), str(ent)))
message = f" - {len(self.p)} entrances linked to caves."
print(message)
return self.p
def __str__(self):
return "{} map locations".format(len(self.p))
2020-06-24 14:10:13 +01:00
class SurvexLeg():
"""No longer a models.Model subclass, so no longer a database table
"""
2020-06-24 14:10:13 +01:00
tape = 0.0
compass = 0.0
clino = 0.0
2020-06-27 18:00:24 +01:00
class LoadingSurvex():
2020-06-24 14:10:13 +01:00
"""A 'survex block' is a *begin...*end set of cave data.
A survex file can contain many begin-end blocks, which can be nested, and which can *include
other survex files.
2021-04-26 19:50:03 +01:00
A 'scanswallet' is what we today call a "survey scans folder" or a "wallet".
2020-06-24 14:10:13 +01:00
"""
rx_begin = re.compile(r'(?i)begin')
rx_end = re.compile(r'(?i)end$')
rx_title = re.compile(r'(?i)title$')
rx_ref = re.compile(r'(?i)ref$')
rx_data = re.compile(r'(?i)data$')
rx_flags = re.compile(r'(?i)flags$')
2020-07-07 01:35:58 +01:00
rx_alias = re.compile(r'(?i)alias$')
rx_entrance = re.compile(r'(?i)entrance$')
rx_date = re.compile(r'(?i)date$')
rx_units = re.compile(r'(?i)units$')
rx_team = re.compile(r'(?i)team$')
rx_set = re.compile(r'(?i)set$')
2020-07-08 00:00:56 +01:00
rx_names = re.compile(r'(?i)names')
2020-07-07 01:35:58 +01:00
rx_flagsnot= re.compile(r"not\s")
rx_linelen = re.compile(r"[\d\-+.]+$")
2020-07-09 18:06:03 +01:00
instruments = "(waiting_patiently|slacker|Useless|nagging|unknown|Inst|instrument|rig|rigger|rigging|helper|something| compass|comp|clino|Notes|sketch|book|Tape|Dog|Pics|photo|drawing|Helper|GPS|Disto|Distox|Distox2|topodroid|point|Consultant|nail|polish|varnish|bitch|monkey)"
rx_teammem = re.compile(r"(?i)"+instruments+"?(?:es|s)?\s+(.*)"+instruments+"?(?:es|s)?$")
2020-07-07 01:35:58 +01:00
rx_person = re.compile(r"(?i) and | / |, | & | \+ |^both$|^none$")
rx_qm = re.compile(r'(?i)^\s*QM(\d)\s+?([a-dA-DxX])\s+([\w\-]+)\.(\d+)\s+(([\w\-]+)\.(\d+)|\-)\s+(.+)$')
2021-03-31 21:51:17 +01:00
# remember there is also QM_PATTERN used in views.other and set in settings.py
2020-07-07 02:46:18 +01:00
rx_tapelng = re.compile(r'(?i).*(tape|length).*$')
2020-06-29 21:16:13 +01:00
rx_cave = re.compile(r'(?i)caves-(\d\d\d\d)/([-\d\w]+|\d\d\d\d-?\w+-\d+)')
2020-06-27 17:55:59 +01:00
rx_comment = re.compile(r'([^;]*?)\s*(?:;\s*(.*))?\n?$')
2020-06-28 01:50:34 +01:00
rx_comminc = re.compile(r'(?i)^\*include[\s]*([-\w/]*).*$') # inserted by linear collate ;*include
rx_commcni = re.compile(r'(?i)^\*edulcni[\s]*([-\w/]*).*$') # inserted by linear collate ;*edulcni
2020-06-27 17:55:59 +01:00
rx_include = re.compile(r'(?i)^\s*(\*include[\s].*)$')
rx_commref = re.compile(r'(?i)^\s*ref(?:erence)?[\s.:]*(\d+)\s*#\s*(X)?\s*(\d+)')
rx_wallet = re.compile(r'(?i)^\s*wallet[\s.:]*(\d+)\s*#\s*(X)?\s*(\d+)')
rx_implicit= re.compile(r'(?i)^[\s.:]*(\d+)\s*#\s*(X)?\s*(\d+)')
2020-07-04 01:10:17 +01:00
rx_ref_text= re.compile(r'(?i)^\s*\"[^"]*\"\s*$')
2020-06-27 17:55:59 +01:00
rx_star = re.compile(r'(?i)\s*\*[\s,]*(\w+)\s*(.*?)\s*(?:;.*)?$')
2020-06-24 14:10:13 +01:00
rx_starref = re.compile(r'(?i)^\s*\*ref[\s.:]*((?:19[6789]\d)|(?:20[0123]\d))\s*#?\s*(X)?\s*(.*?\d+.*?)$')
2020-06-24 22:46:18 +01:00
rx_argsref = re.compile(r'(?i)^[\s.:]*((?:19[6789]\d)|(?:20[0123]\d))\s*#?\s*(X)?\s*(.*?\d+.*?)$')
2020-06-24 14:10:13 +01:00
# This interprets the survex "*data normal" command which sets out the order of the fields in the data, e.g.
# *DATA normal from to length gradient bearing ignore ignore ignore ignore
2020-07-03 17:22:15 +01:00
datastardefault = {"type":"normal", "from":0, "to":1, "tape":2, "compass":3, "clino":4}
2020-07-04 13:31:46 +01:00
flagsdefault = {"duplicate":False, "surface":False, "splay":False, "skiplegs":False, "splayalias":False}
2020-07-03 17:22:15 +01:00
datastar ={}
flagsstar = {}
2020-07-07 01:35:58 +01:00
units = "metres"
2020-07-08 00:00:56 +01:00
unitsfactor = None
2020-07-04 01:10:17 +01:00
slength = 0.0
legsnumber = 0
2020-06-24 22:46:18 +01:00
depthbegin = 0
2020-06-27 17:55:59 +01:00
depthinclude = 0
2020-07-07 01:35:58 +01:00
unitsstack = []
2020-07-04 01:10:17 +01:00
legsnumberstack = []
slengthstack = []
personexpedstack = []
stackbegin =[]
2020-07-03 17:22:15 +01:00
flagsstack =[]
datastack =[]
2020-07-07 01:35:58 +01:00
includestack = []
2020-06-28 14:42:26 +01:00
stacksvxfiles = []
2020-06-28 01:50:34 +01:00
svxfileslist = []
svxdirs = {}
2020-07-04 13:31:46 +01:00
expos = {}
survexdict = {} # each key is a directory, and its value is a list of files
2020-06-24 22:46:18 +01:00
lineno = 0
2020-06-24 14:10:13 +01:00
insp = ""
callcount = 0
ignoreprefix = ["surface", "kataster", "fixedpts", "gpx"]
ignorenoncave = ["caves-1623", "caves-1623/2007-neu"]
2020-06-27 17:55:59 +01:00
includedfilename =""
currentsurvexblock = None
currentsurvexfile = None
currentcave = None
caverndate = None
currentpersonexped = []
2020-06-24 14:10:13 +01:00
def __init__(self):
2020-06-29 21:16:13 +01:00
self.caveslist = GetCaveLookup()
2020-06-24 14:10:13 +01:00
pass
def LoadSurvexFallThrough(self, survexblock, line, cmd):
2020-06-28 14:42:26 +01:00
if cmd == "require":
2020-06-24 19:07:11 +01:00
pass # should we check survex version available for processing?
2020-07-04 13:31:46 +01:00
elif cmd in ["equate", "fix", "calibrate", "cs", "export", "case",
2020-07-07 01:35:58 +01:00
"declination", "infer","instrument", "sd"]:
2020-06-24 19:07:11 +01:00
pass # we ignore all these, which is fine.
else:
2020-07-04 13:31:46 +01:00
if cmd in ["include", "data", "flags", "title", "entrance","set", "units", "alias", "ref"]:
2020-06-24 19:07:11 +01:00
message = "! Unparsed [*{}]: '{}' {}".format(cmd, line, survexblock.survexfile.path)
print((self.insp+message))
2021-04-13 01:13:08 +01:00
DataIssue.objects.create(parser='survex', message=message)
2020-06-24 19:07:11 +01:00
else:
message = "! Bad svx command: [*{}] {} ({}) {}".format(cmd, line, survexblock, survexblock.survexfile.path)
print((self.insp+message))
2021-04-13 01:13:08 +01:00
DataIssue.objects.create(parser='survex', message=message)
2020-06-24 19:07:11 +01:00
def LoadSurvexTeam(self, survexblock, line):
2020-07-09 18:06:03 +01:00
"""Interpeting the *team fields means interpreting older style survex as well as current survex standard,
*team Insts Anthony Day - this is how most of our files specify the team member
*team "Anthony Day" notes pictures tape - this is how the survex documentation says it should be done
We have a huge variety of abbreviations and mispellings. The most laconic being
*team gb, bl
If you look at e.g. http://expo.survex.com/survexfile/161#T_caves-1623/161/lhr/alllhr
you will see than have the team members are recognised by this parser, but not recognised by the
wider troggle system (the name is not a hyperlink) - apparently randomly.
GetPersonExpeditionNameLookup() needs to be fixed.
"""
2020-06-24 19:07:11 +01:00
teammembers = [ ]
2020-07-07 01:35:58 +01:00
mteammember = self.rx_teammem.match(line)
2020-06-24 19:07:11 +01:00
if mteammember:
for tm in self.rx_person.split(mteammember.group(2)):
if tm:
personexpedition = survexblock.expedition and GetPersonExpeditionNameLookup(survexblock.expedition).get(tm.lower())
if (personexpedition, tm) not in teammembers:
teammembers.append((personexpedition, tm))
personrole = SurvexPersonRole(survexblock=survexblock, personexpedition=personexpedition, personname=tm)
# personrole = SurvexPersonRole(survexblock=survexblock, nrole=mteammember.group(1).lower(), personexpedition=personexpedition, personname=tm)
personrole.save()
2020-06-24 19:07:11 +01:00
personrole.expeditionday = survexblock.expeditionday
if personexpedition:
personrole.person=personexpedition.person
self.currentpersonexped.append(personexpedition)
2020-06-24 19:07:11 +01:00
personrole.save()
2020-07-04 13:31:46 +01:00
def LoadSurvexEntrance(self, survexblock, line):
# Not using this yet
pass
def LoadSurvexAlias(self, survexblock, line):
# *alias station - ..
splayalias = re.match("(?i)station\s*\-\s*\.\.\s*$",line)
if splayalias:
self.flagsstar["splayalias"] = True
else:
message = "! Bad *ALIAS: '{}' ({}) {}".format(line, survexblock, survexblock.survexfile.path)
print((self.insp+message))
2021-04-13 01:13:08 +01:00
DataIssue.objects.create(parser='survex', message=message)
2020-07-04 13:31:46 +01:00
2020-07-07 01:35:58 +01:00
def LoadSurvexUnits(self, survexblock, line):
# all for 4 survex files with measurements in feet. bugger.
tapeunits = self.rx_tapelng.match(line) # tape|length
if not tapeunits:
return
2020-07-08 00:00:56 +01:00
convert = re.match("(?i)(\w*)\s*([\.\d]+)\s*(\w*)",line)
2020-07-07 02:46:18 +01:00
if convert:
2020-07-08 00:00:56 +01:00
factor = convert.groups()[1]
self.unitsfactor = float(factor)
if debugprint:
message = "! *UNITS NUMERICAL conversion [{}x] '{}' ({}) {}".format(factor, line, survexblock, survexblock.survexfile.path)
print((self.insp+message))
2021-04-13 01:13:08 +01:00
DataIssue.objects.create(parser='survexunits', message=message)
2020-07-07 02:46:18 +01:00
2020-07-08 00:00:56 +01:00
feet = re.match("(?i).*feet$",line)
metres = re.match("(?i).*(METRIC|METRES|METERS)$",line)
2020-07-07 01:35:58 +01:00
if feet:
self.units = "feet"
elif metres:
self.units = "metres"
else:
2020-07-08 00:00:56 +01:00
message = "! *UNITS in YARDS!? - not converted '{}' ({}) {}".format(line, survexblock, survexblock.survexfile.path)
2020-07-07 01:35:58 +01:00
print((self.insp+message))
2021-04-13 01:13:08 +01:00
DataIssue.objects.create(parser='survexunits', message=message)
2020-07-07 01:35:58 +01:00
2020-06-24 17:55:42 +01:00
def LoadSurvexDate(self, survexblock, line):
2020-07-08 00:00:56 +01:00
# we should make this a date RANGE for everything
def setdate(year):
2020-07-04 13:31:46 +01:00
# cacheing to save DB query on every block and to prepare for django-less troggle in future
if year in self.expos:
expo = self.expos[year]
else:
2021-04-13 01:13:08 +01:00
expeditions = Expedition.objects.filter(year=year)
2021-04-13 22:27:01 +01:00
if len(expeditions) != 1 :
message = f"! More than one expedition in year {year} '{line}' ({survexblock}) {survexblock.survexfile.path}"
print((self.insp+message))
DataIssue.objects.create(parser='survexunits', message=message)
2020-07-04 13:31:46 +01:00
expo= expeditions[0]
self.expos[year]= expo
2020-06-24 17:55:42 +01:00
2020-07-04 13:31:46 +01:00
survexblock.expedition = expo
survexblock.expeditionday = survexblock.expedition.get_expedition_day(survexblock.date)
survexblock.save()
2020-07-08 00:00:56 +01:00
if len(line) > 10:
if line[10] == "-":
line = line[0:10]
if len(line) == 10:
year = line[:4]
# TO DO set to correct Austrian timezone Europe/Vienna
# %m and %d need leading zeros. Source svx files require them.
survexblock.date = datetime.strptime(re.sub(r"\.", "-", line), '%Y-%m-%d')
setdate(year)
elif len(line) == 7:
year = line[:4]
survexblock.date = datetime.strptime(re.sub(r"\.", "-", line), '%Y-%m') # sets to first of month
setdate(year)
elif len(line) == 4:
year = line[:4]
survexblock.date = datetime.strptime(line, '%Y') # sets to January 1st
setdate(year)
else:
message = "! DATE unrecognised '{}' ({}) {}".format(line, survexblock, survexblock.survexfile.path)
print((self.insp+message))
2021-04-13 01:13:08 +01:00
DataIssue.objects.create(parser='survex', message=message)
2020-07-08 00:00:56 +01:00
2020-07-04 13:31:46 +01:00
def LoadSurvexLeg(self, survexblock, sline, comment):
2020-06-24 14:10:13 +01:00
"""This reads compass, clino and tape data but only keeps the tape lengths,
the rest is discarded after error-checking.
2020-07-07 01:35:58 +01:00
Now skipping the error checking - returns as soon as the leg is not one we count.
2020-06-24 14:10:13 +01:00
"""
2020-07-04 01:10:17 +01:00
invalid_clino = 180.0
invalid_compass = 720.0
invalid_tape = 0.0
2020-07-04 13:31:46 +01:00
if self.flagsstar["skiplegs"]:
2020-07-06 21:46:19 +01:00
if debugprint:
print("skip in ", self.flagsstar, survexblock.survexfile.path)
2020-07-04 13:31:46 +01:00
return
2020-07-04 01:10:17 +01:00
2020-07-06 21:46:19 +01:00
if debugprint:
print("! LEG datastar type:{}++{}\n{} ".format(self.datastar["type"].upper(), survexblock.survexfile.path, sline))
2020-07-03 17:22:15 +01:00
if self.datastar["type"] == "passage":
2020-07-03 14:53:36 +01:00
return
2020-07-03 17:22:15 +01:00
if self.datastar["type"] == "cartesian":
2020-07-03 14:53:36 +01:00
return
2020-07-03 17:22:15 +01:00
if self.datastar["type"] == "nosurvey":
2020-07-03 14:53:36 +01:00
return
2020-07-03 17:22:15 +01:00
if self.datastar["type"] == "diving":
2020-07-03 14:53:36 +01:00
return
2020-07-03 17:22:15 +01:00
if self.datastar["type"] == "cylpolar":
2020-07-03 14:53:36 +01:00
return
2020-07-07 01:35:58 +01:00
if debugprint:
print(" !! LEG data lineno:{}\n !! sline:'{}'\n !! datastar['tape']: {}".format(self.lineno, sline, self.datastar["tape"]))
2020-07-04 01:10:17 +01:00
2020-07-03 17:22:15 +01:00
if self.datastar["type"] != "normal":
2020-07-03 14:53:36 +01:00
return
2020-07-03 17:22:15 +01:00
datastar = self.datastar # shallow copy: alias but the things inside are the same things
2020-06-24 14:10:13 +01:00
survexleg = SurvexLeg()
2020-07-04 01:10:17 +01:00
2020-06-24 22:46:18 +01:00
ls = sline.lower().split()
2020-07-04 13:31:46 +01:00
# skip all splay legs
if ls[datastar["from"]] == ".." or ls[datastar["from"]] == ".":
2020-07-06 21:46:19 +01:00
if debugprint:
print("Splay in ", survexblock.survexfile.path)
2020-07-04 13:31:46 +01:00
return
if ls[datastar["to"]] == ".." or ls[datastar["to"]] == ".":
2020-07-06 21:46:19 +01:00
if debugprint:
print("Splay in ", survexblock.survexfile.path)
2020-07-04 13:31:46 +01:00
return
if self.flagsstar["splayalias"]:
if ls[datastar["from"]] == "-":
2020-07-06 21:46:19 +01:00
if debugprint:
print("Aliased splay in ", survexblock.survexfile.path)
2020-07-04 13:31:46 +01:00
return
if ls[datastar["to"]] == "-":
2020-07-06 21:46:19 +01:00
if debugprint:
print("Aliased splay in ", survexblock.survexfile.path)
2020-07-04 13:31:46 +01:00
return
2020-07-03 14:53:36 +01:00
try:
2020-07-03 17:22:15 +01:00
tape = ls[datastar["tape"]]
2020-07-03 14:53:36 +01:00
except:
2020-07-03 17:22:15 +01:00
message = ' ! datastar parsing incorrect in line %s in %s' % (ls, survexblock.survexfile.path)
2020-07-08 00:00:56 +01:00
print((self.insp+message))
2021-04-13 01:13:08 +01:00
DataIssue.objects.create(parser='survexleg', message=message)
2020-07-03 14:53:36 +01:00
survexleg.tape = invalid_tape
return
2020-07-04 13:31:46 +01:00
# e.g. '29/09' or '(06.05)' in the tape measurement
2020-07-04 01:10:17 +01:00
# tape = tape.replace("(","") # edited original file (only one) instead
# tape = tape.replace(")","") # edited original file (only one) instead
# tape = tape.replace("/",".") # edited original file (only one) instead.
2020-07-03 14:53:36 +01:00
try:
2020-07-08 00:00:56 +01:00
if self.unitsfactor:
tape = float(tape) * self.unitsfactor
if debugprint:
message = " ! Units: Length scaled {}m '{}' in ({}) units:{} factor:{}x".format(tape, ls, survexblock.survexfile.path, self.units, self.unitsfactor)
print((self.insp+message))
2021-04-13 01:13:08 +01:00
DataIssue.objects.create(parser='survexleg', message=message)
2020-07-07 01:35:58 +01:00
if self.units =="feet":
2020-07-08 00:00:56 +01:00
tape = float(tape) / METRESINFEET
if debugprint:
message = " ! Units: converted to {:.3f}m from {} '{}' in ({})".format(tape, self.units, ls, survexblock.survexfile.path)
print((self.insp+message))
2021-04-13 01:13:08 +01:00
DataIssue.objects.create(parser='survexleg', message=message)
2020-07-08 00:00:56 +01:00
survexleg.tape = float(tape)
2020-07-04 01:10:17 +01:00
self.legsnumber += 1
2020-07-03 14:53:36 +01:00
except ValueError:
2020-07-08 00:00:56 +01:00
message = " ! Value Error: Tape misread in line'{}' in {} units:{}".format(ls, survexblock.survexfile.path, self.units)
print((self.insp+message))
2021-04-13 01:13:08 +01:00
DataIssue.objects.create(parser='survexleg', message=message)
2020-07-03 14:53:36 +01:00
survexleg.tape = invalid_tape
try:
2020-07-04 13:31:46 +01:00
survexblock.legslength += survexleg.tape
2020-07-04 01:10:17 +01:00
self.slength += survexleg.tape
2020-07-03 14:53:36 +01:00
except ValueError:
2020-07-08 00:00:56 +01:00
message = " ! Value Error: Tape length not added '{}' in {} units:{}".format(ls, survexblock.survexfile.path, self.units)
print((self.insp+message))
2021-04-13 01:13:08 +01:00
DataIssue.objects.create(parser='survexleg', message=message)
2020-07-03 14:53:36 +01:00
try:
2020-07-03 17:22:15 +01:00
lcompass = ls[datastar["compass"]]
2020-07-03 14:53:36 +01:00
except:
message = ' ! Value Error: Compass not found in line %s in %s' % (ls, survexblock.survexfile.path)
2020-07-08 00:00:56 +01:00
print((self.insp+message))
2021-04-13 01:13:08 +01:00
DataIssue.objects.create(parser='survexleg', message=message)
2020-07-03 14:53:36 +01:00
lcompass = invalid_compass
try:
2020-07-03 17:22:15 +01:00
lclino = ls[datastar["clino"]]
2020-07-03 14:53:36 +01:00
except:
print(("! Clino misread in", survexblock.survexfile.path))
2020-07-03 17:22:15 +01:00
print((" datastar:", datastar))
2020-07-03 14:53:36 +01:00
print((" Line:", ls))
message = ' ! Value Error: Clino misread in line %s in %s' % (ls, survexblock.survexfile.path)
2021-04-13 01:13:08 +01:00
DataIssue.objects.create(parser='survexleg', message=message)
2020-07-03 14:53:36 +01:00
lclino = invalid_clino
if lclino == "up":
survexleg.clino = 90.0
lcompass = invalid_compass
elif lclino == "down":
survexleg.clino = -90.0
lcompass = invalid_compass
elif lclino == "-" or lclino == "level":
survexleg.clino = -90.0
try:
survexleg.compass = float(lcompass)
except ValueError:
print(("! Compass misread in", survexblock.survexfile.path))
2020-07-03 17:22:15 +01:00
print((" datastar:", datastar))
2020-07-03 14:53:36 +01:00
print((" Line:", ls))
message = " ! Value Error: lcompass:'{}' line {} in '{}'".format(lcompass,
ls, survexblock.survexfile.path)
2021-04-13 01:13:08 +01:00
DataIssue.objects.create(parser='survexleg', message=message)
2020-07-03 14:53:36 +01:00
survexleg.compass = invalid_compass
2020-07-04 01:10:17 +01:00
# delete the object to save memory
2020-07-03 14:53:36 +01:00
survexleg = None
2020-05-13 19:57:07 +01:00
2020-06-24 22:46:18 +01:00
def LoadSurvexRef(self, survexblock, args):
#print(self.insp+ "*REF ---- '"+ args +"'")
2020-07-04 01:10:17 +01:00
2020-06-24 22:46:18 +01:00
# *REF but also ; Ref years from 1960 to 2039
2020-07-04 01:10:17 +01:00
refline = self.rx_ref_text.match(args)
if refline:
# a textual reference such as "1996-1999 Not-KH survey book pp 92-95"
2020-07-06 21:46:19 +01:00
# print(self.insp+ "*REF quoted text so ignored:"+ args)
2020-07-04 01:10:17 +01:00
return
2020-06-24 22:46:18 +01:00
if len(args)< 4:
2020-07-04 01:10:17 +01:00
message = " ! Empty or BAD *REF statement '{}' in '{}'".format(args, survexblock.survexfile.path)
2020-06-24 22:46:18 +01:00
print((self.insp+message))
2021-04-13 01:13:08 +01:00
DataIssue.objects.create(parser='survex', message=message)
2020-06-24 22:46:18 +01:00
return
2020-06-24 14:10:13 +01:00
2020-06-24 22:46:18 +01:00
argsgps = self.rx_argsref.match(args)
if argsgps:
yr, letterx, wallet = argsgps.groups()
else:
2020-07-04 01:10:17 +01:00
message = " ! BAD *REF statement '{}' in '{}'".format(args, survexblock.survexfile.path)
print(self.insp+message)
2021-04-13 01:13:08 +01:00
DataIssue.objects.create(parser='survex', message=message)
2020-06-24 22:46:18 +01:00
return
2020-06-24 14:10:13 +01:00
if not letterx:
letterx = ""
else:
letterx = "X"
if len(wallet)<2:
wallet = "0" + wallet
2021-04-13 22:27:01 +01:00
if not (int(yr)>1960 and int(yr)<2039):
message = " ! Wallet year out of bounds {yr} '{refscan}' {survexblock.survexfile.path}"
print((self.insp+message))
DataIssue.objects.create(parser='survex', message=message)
refscan = "%s#%s%s" % (yr, letterx, wallet)
2020-06-24 22:46:18 +01:00
try:
if int(wallet)>100:
2020-06-25 02:10:20 +01:00
message = " ! Wallet *REF {} - too big in '{}'".format(refscan, survexblock.survexfile.path)
2020-06-24 22:46:18 +01:00
print((self.insp+message))
2021-04-13 01:13:08 +01:00
DataIssue.objects.create(parser='survex', message=message)
2020-06-24 22:46:18 +01:00
except:
2020-06-25 02:10:20 +01:00
message = " ! Wallet *REF {} - not numeric in '{}'".format(refscan, survexblock.survexfile.path)
2020-06-24 22:46:18 +01:00
print((self.insp+message))
2021-04-13 01:13:08 +01:00
DataIssue.objects.create(parser='survex', message=message)
2021-04-26 19:22:29 +01:00
manywallets = Wallet.objects.filter(walletname=refscan)
if manywallets:
2021-04-26 19:50:03 +01:00
survexblock.scanswallet = manywallets[0] # this is a ForeignKey field
2021-04-26 19:22:29 +01:00
print(manywallets[0])
survexblock.save()
2021-04-26 19:22:29 +01:00
if len(manywallets) > 1:
message = " ! Wallet *REF {} - {} scan folders from DB search in {}".format(refscan, len(manywallets), survexblock.survexfile.path)
2020-06-24 17:55:42 +01:00
print((self.insp+message))
2021-04-13 01:13:08 +01:00
DataIssue.objects.create(parser='survex', message=message)
else:
2020-06-25 02:10:20 +01:00
message = " ! Wallet *REF '{}' - NOT found in DB search '{}'".format(refscan, survexblock.survexfile.path)
2020-06-24 17:55:42 +01:00
print((self.insp+message))
2021-04-13 01:13:08 +01:00
DataIssue.objects.create(parser='survex', message=message)
2020-06-24 22:46:18 +01:00
def LoadSurvexQM(self, survexblock, qmline):
insp = self.insp
qm_no = qmline.group(1)
qm_grade = qmline.group(2)
2020-06-25 03:17:56 +01:00
if qmline.group(3): # usual closest survey station
qm_nearest = qmline.group(3)
if qmline.group(4):
qm_nearest = qm_nearest +"."+ qmline.group(4)
if qmline.group(6) and qmline.group(6) != '-':
qm_resolve_station = qmline.group(6)
if qmline.group(7):
qm_resolve_station = qm_resolve_station +"."+ qmline.group(7)
else:
qm_resolve_station = ""
qm_notes = qmline.group(8)
2020-06-25 03:17:56 +01:00
# Spec of QM in SVX files:
# ;Serial number grade(A/B/C/D/X) nearest-station resolution-station description
# ;QM1 a hobnob_hallway_2.42 hobnob-hallway_3.42 junction of keyhole passage
# ;QM1 a hobnob_hallway_2.42 - junction of keyhole passage
2020-06-25 03:17:56 +01:00
# NB none of the SurveyStations are in the DB now, so if we want to link to aSurvexStation
# we would have to create one. But that is not obligatory and no QMs loaded from CSVs have one
try:
qm = QM.objects.create(number=qm_no,
2020-06-25 03:17:56 +01:00
# nearest_station=a_survex_station_object, # can be null
nearest_station_description=qm_resolve_station,
nearest_station_name=qm_nearest,
grade=qm_grade.upper(),
location_description=qm_notes)
qm.save
# message = " ! QM{} '{}' CREATED in DB in '{}'".format(qm_no, qm_nearest,survexblock.survexfile.path)
# print(insp+message)
2021-04-13 01:13:08 +01:00
# DataIssue.objects.create(parser='survex', message=message)
2020-06-25 03:17:56 +01:00
except:
message = " ! QM{} FAIL to create {} in'{}'".format(qm_no, qm_nearest,survexblock.survexfile.path)
2020-06-24 17:55:42 +01:00
print(insp+message)
2021-04-13 01:13:08 +01:00
DataIssue.objects.create(parser='survex', message=message)
2020-06-25 03:17:56 +01:00
2020-07-08 00:00:56 +01:00
def LoadSurvexDataNormal(self,survexblock,args):
"""Sets the order for data elements in this and following blocks, e.g.
*data normal from to compass clino tape
*data normal from to tape compass clino
We are only collecting length data so we are disinterested in from, to, LRUD etc.
"""
2020-07-03 17:22:15 +01:00
# datastardefault = { # included here as reference to help understand the code
# "type":"normal",
# "t":"leg",
# "from":0,
# "to":1,
# "tape":2,
# "compass":3,
# "clino":4}
2020-07-03 17:22:15 +01:00
datastar = copy.deepcopy(self.datastardefault)
if args == "":
# naked '*data' which is relevant only for passages. Ignore. Continue with previous settings.
return
2020-07-03 14:53:36 +01:00
# DEFAULT | NORMAL | CARTESIAN| NOSURVEY |PASSAGE | TOPOFIL | CYLPOLAR | DIVING
ls = args.lower().split()
2020-07-03 14:53:36 +01:00
if ls[0] == "default":
2020-07-03 17:22:15 +01:00
self.datastar = copy.deepcopy(self.datastardefault)
2020-07-03 14:53:36 +01:00
elif ls[0] == "normal" or ls[0] == "topofil":
2020-07-03 17:22:15 +01:00
if not ("from" in datastar and "to" in datastar):
message = " ! - Unrecognised *data normal statement '{}' {}|{}".format(args, survexblock.name, survexblock.survexpath)
print(message)
print(message,file=sys.stderr)
2021-04-13 01:13:08 +01:00
DataIssue.objects.create(parser='survex', message=message)
return
else:
2020-07-03 17:22:15 +01:00
datastar = self.datastardefault
# ls = ["normal", "from", "to", "tape", "compass", "clino" ]
for i in range(1, len(ls)): # len[0] is "normal"
if ls[i] in ["bearing","compass"]:
2020-07-03 17:22:15 +01:00
datastar["compass"] = i-1
if ls[i] in ["clino","gradient"]:
2020-07-03 17:22:15 +01:00
datastar["clino"] = i-1
if ls[i] in ["tape","length"]:
2020-07-03 17:22:15 +01:00
datastar["tape"] = i-1
self.datastar = copy.deepcopy(datastar)
return
2020-07-03 14:53:36 +01:00
elif ls[0] == "cartesian" or ls[0] == "nosurvey" or ls[0] == "diving" or ls[0] == "cylpolar" or ls[0] == "passage":
2020-07-04 01:10:17 +01:00
# message = " ! - *data {} blocks ignored. {}|{}" '{}' .format(ls[0].upper(), survexblock.name, survexblock.survexpath, args)
# print(message)
# print(message,file=sys.stderr)
2021-04-13 01:13:08 +01:00
# DataIssue.objects.create(parser='survex', message=message)
2020-07-03 17:22:15 +01:00
self.datastar["type"] = ls[0]
2020-06-27 17:55:59 +01:00
else:
2020-07-03 14:53:36 +01:00
message = " ! - Unrecognised *data statement '{}' {}|{}".format(args, survexblock.name, survexblock.survexpath)
print(message)
print(message,file=sys.stderr)
2021-04-13 01:13:08 +01:00
DataIssue.objects.create(parser='survex', message=message)
2020-06-27 17:55:59 +01:00
2020-07-03 14:53:36 +01:00
def LoadSurvexFlags(self, args):
# Valid flags are DUPLICATE, SPLAY, and SURFACE, and a flag may be preceded with NOT to turn it off.
# Default values are NOT any of them
2020-07-03 17:22:15 +01:00
self.flagsstar = copy.deepcopy(self.flagsdefault)
2020-07-03 14:53:36 +01:00
flags = []
args = self.rx_flagsnot.sub("not",args)
2020-07-03 14:53:36 +01:00
argslist = args.split()
for s in argslist:
flags.append(s)
2020-07-06 21:46:19 +01:00
if debugprint:
print(" ^ flagslist:{}".format(flags),)
2020-07-03 14:53:36 +01:00
if "duplicate" in flags:
2020-07-03 17:22:15 +01:00
self.flagsstar["duplicate"] = True
2020-07-03 14:53:36 +01:00
if "surface" in flags:
2020-07-03 17:22:15 +01:00
self.flagsstar["surface"] = True
2020-07-03 14:53:36 +01:00
if "splay" in flags:
2020-07-03 17:22:15 +01:00
self.flagsstar["splay"] = True
2020-07-03 14:53:36 +01:00
if "notduplicate" in flags:
2020-07-03 17:22:15 +01:00
self.flagsstar["duplicate"] = False
2020-07-03 14:53:36 +01:00
if "notsurface" in flags:
2020-07-03 17:22:15 +01:00
self.flagsstar["surface"] = False
2020-07-03 14:53:36 +01:00
if "notsplay" in flags:
2020-07-03 17:22:15 +01:00
self.flagsstar["splay"] = False
2020-07-03 14:53:36 +01:00
2020-07-03 17:22:15 +01:00
# if self.flagsstar["duplicate"] == True or self.flagsstar["surface"] == True or self.flagsstar["splay"] == True:
2020-07-03 14:53:36 +01:00
# actually we do want to count duplicates as this is for "effort expended in surveying underground"
2020-07-03 17:22:15 +01:00
if self.flagsstar["surface"] == True or self.flagsstar["splay"] == True:
2020-07-04 01:10:17 +01:00
self.flagsstar["skiplegs"] = True
2020-07-06 21:46:19 +01:00
if debugprint:
print(" $ flagslist:{}".format(flags),)
2020-06-27 17:55:59 +01:00
def IdentifyCave(self, cavepath):
2020-06-29 21:16:13 +01:00
if cavepath.lower() in self.caveslist:
return self.caveslist[cavepath.lower()]
# TO DO - some of this is already done in generating self.caveslist so simplify this
# esp. as it is in a loop.
# TO DO recognise cave if different name, e.g. gruenstein == 281
2020-06-28 01:50:34 +01:00
path_match = self.rx_cave.search(cavepath)
2020-06-27 17:55:59 +01:00
if path_match:
2020-06-28 14:42:26 +01:00
sluggy = '{}-{}'.format(path_match.group(1), path_match.group(2))
2020-06-29 21:16:13 +01:00
guesses = [sluggy.lower(), path_match.group(2).lower()]
for g in guesses:
if g in self.caveslist:
self.caveslist[cavepath] = self.caveslist[g]
return self.caveslist[g]
print(' ! Failed to find cave for {}'.format(cavepath.lower()))
2020-06-27 17:55:59 +01:00
else:
2020-07-01 22:49:38 +01:00
# not a cave, but that is fine.
# print(' ! No regex(standard identifier) cave match for %s' % cavepath.lower())
2020-06-27 17:55:59 +01:00
return None
2020-06-29 21:16:13 +01:00
def GetSurvexDirectory(self, headpath):
"""This creates a SurvexDirectory if it has not been seen before, and on creation
it sets the primarysurvexfile. This is correct as it should be set on the first file
in the directory, where first is defined by the *include ordering. Which is what we
are doing.
"""
2020-06-29 21:16:13 +01:00
if not headpath:
return self.svxdirs[""]
if headpath.lower() not in self.svxdirs:
2021-04-13 01:13:08 +01:00
self.svxdirs[headpath.lower()] = SurvexDirectory(path=headpath, primarysurvexfile=self.currentsurvexfile)
self.svxdirs[headpath.lower()].save()
2020-07-03 17:22:15 +01:00
self.survexdict[self.svxdirs[headpath.lower()]] = [] # list of the files in the directory
2020-06-29 21:16:13 +01:00
return self.svxdirs[headpath.lower()]
def ReportNonCaveIncludes(self, headpath, includelabel):
"""Ignore surface, kataser and gps *include survex files
"""
if headpath in self.ignorenoncave:
#message = f" - {headpath} is <ignorenoncave> (while creating '{includelabel}' sfile & sdirectory)"
#print("\n"+message)
#print("\n"+message,file=sys.stderr)
return
for i in self.ignoreprefix:
if headpath.startswith(i):
#message = f" - {headpath} starts with <ignoreprefix> (while creating '{includelabel}' sfile & sdirectory)"
#print("\n"+message)
#print("\n"+message,file=sys.stderr)
return
message = f" ! {headpath} is not a fully-registered cave. (while creating '{includelabel}' sfile & sdirectory in survex parsing)"
print("\n"+message)
print("\n"+message,file=sys.stderr)
2021-04-13 01:13:08 +01:00
DataIssue.objects.create(parser='survex', message=message)
2020-07-04 01:10:17 +01:00
2020-07-01 22:49:38 +01:00
def LoadSurvexFile(self, svxid):
2020-06-28 14:42:26 +01:00
"""Creates SurvexFile in the database, and SurvexDirectory if needed
2020-06-27 17:55:59 +01:00
with links to 'cave'
2020-07-01 22:49:38 +01:00
Creates a new current survexfile and valid .survexdirectory
2020-06-28 14:42:26 +01:00
The survexblock passed-in is not necessarily the parent. FIX THIS.
2020-06-27 17:55:59 +01:00
"""
2020-07-06 21:46:19 +01:00
if debugprint:
print(" # datastack in LoadSurvexFile:{} 'type':".format(svxid), end="")
for dict in self.datastack:
print("'{}' ".format(dict["type"].upper()), end="")
print("")
2020-07-03 17:22:15 +01:00
2020-06-28 01:50:34 +01:00
depth = " " * self.depthbegin
# print("{:2}{} - NEW survexfile:'{}'".format(self.depthbegin, depth, svxid))
2020-07-01 22:49:38 +01:00
headpath = os.path.dirname(svxid)
2020-06-27 17:55:59 +01:00
2021-04-13 01:13:08 +01:00
newfile = SurvexFile(path=svxid)
newfile.save() # until we do this there is no internal id so no foreign key works
self.currentsurvexfile = newfile
newdirectory = self.GetSurvexDirectory(headpath)
newdirectory.save()
newfile.survexdirectory = newdirectory
2020-07-03 17:22:15 +01:00
self.survexdict[newdirectory].append(newfile)
cave = self.IdentifyCave(headpath) # cave already exists in db
if not newdirectory:
message = " ! 'None' SurvexDirectory returned from GetSurvexDirectory({})".format(headpath)
print(message)
print(message,file=sys.stderr)
2021-04-13 01:13:08 +01:00
DataIssue.objects.create(parser='survex', message=message)
2020-06-29 21:16:13 +01:00
2020-06-28 01:50:34 +01:00
if cave:
newdirectory.cave = cave
newfile.cave = cave
#print("\n"+str(newdirectory.cave),file=sys.stderr)
else:
2020-07-01 22:49:38 +01:00
self.ReportNonCaveIncludes(headpath, svxid)
if not newfile.survexdirectory:
2020-07-01 22:49:38 +01:00
message = " ! SurvexDirectory NOT SET in new SurvexFile {} ".format(svxid)
print(message)
print(message,file=sys.stderr)
2021-04-13 01:13:08 +01:00
DataIssue.objects.create(parser='survex', message=message)
2020-06-28 14:42:26 +01:00
self.currentsurvexfile.save() # django insists on this although it is already saved !?
try:
newdirectory.save()
2020-06-28 14:42:26 +01:00
except:
print(newdirectory, file=sys.stderr)
print(newdirectory.primarysurvexfile, file=sys.stderr)
2020-06-28 14:42:26 +01:00
raise
2020-07-03 17:22:15 +01:00
2020-07-06 21:46:19 +01:00
if debugprint:
print(" # datastack end LoadSurvexFile:{} 'type':".format(svxid), end="")
for dict in self.datastack:
print("'{}' ".format(dict["type"].upper()), end="")
print("")
2020-06-28 01:50:34 +01:00
2020-06-28 14:42:26 +01:00
def ProcessIncludeLine(self, included):
2020-07-06 21:46:19 +01:00
global debugprint
2020-06-28 01:50:34 +01:00
svxid = included.groups()[0]
2020-07-06 21:46:19 +01:00
if svxid.lower() == debugprinttrigger.lower():
debugprint = True
2020-06-28 14:42:26 +01:00
self.LoadSurvexFile(svxid)
self.stacksvxfiles.append(self.currentsurvexfile)
def ProcessEdulcniLine(self, edulcni):
"""Saves the current survexfile in the db
"""
2020-07-06 21:46:19 +01:00
global debugprint
2020-06-28 01:50:34 +01:00
svxid = edulcni.groups()[0]
2020-07-06 21:46:19 +01:00
if debugprint:
depth = " " * self.depthbegin
print("{:2}{} - Edulcni survexfile:'{}'".format(self.depthbegin, depth, svxid))
if svxid.lower() == debugprinttrigger.lower():
debugprint = False
2020-06-28 14:42:26 +01:00
self.currentsurvexfile.save()
self.currentsurvexfile = self.stacksvxfiles.pop()
2020-06-24 22:46:18 +01:00
def LoadSurvexComment(self, survexblock, comment):
# ignore all comments except ;ref, ; wallet and ;QM and ;*include (for collated survex file)
refline = self.rx_commref.match(comment)
2020-06-24 22:46:18 +01:00
if refline:
2020-06-27 19:00:26 +01:00
comment = re.sub('(?i)\s*ref[.;]?',"",comment.strip())
2020-06-24 22:46:18 +01:00
self.LoadSurvexRef(survexblock, comment)
walletline = self.rx_wallet.match(comment)
if walletline:
comment = re.sub('(?i)\s*wallet[.;]?',"",comment.strip())
self.LoadSurvexRef(survexblock, comment)
implicitline = self.rx_implicit.match(comment)
if implicitline:
self.LoadSurvexRef(survexblock, comment)
2020-06-24 14:10:13 +01:00
2020-06-24 22:46:18 +01:00
qmline = self.rx_qm.match(comment)
if qmline:
self.LoadSurvexQM(survexblock, qmline)
2020-06-27 17:55:59 +01:00
included = self.rx_comminc.match(comment)
2020-07-01 22:49:38 +01:00
# ;*include means 'we have been included'; whereas *include means 'proceed to include'
2020-06-27 17:55:59 +01:00
if included:
2020-06-28 14:42:26 +01:00
self.ProcessIncludeLine(included)
2020-06-27 17:55:59 +01:00
edulcni = self.rx_commcni.match(comment)
2020-06-28 01:50:34 +01:00
# ;*edulcni means we are returning from an included file
2020-06-27 17:55:59 +01:00
if edulcni:
2020-06-28 14:42:26 +01:00
self.ProcessEdulcniLine(edulcni)
2020-06-24 22:46:18 +01:00
def LoadSurvexSetup(self,survexblock, survexfile):
self.depthbegin = 0
2020-07-03 17:22:15 +01:00
self.datastar = self.datastardefault
2020-07-04 01:10:17 +01:00
blocklegs = self.legsnumber
2021-04-12 23:58:48 +01:00
print(self.insp+" - MEM:{:.3f} Reading. parent:{} <> {} ".format(get_process_memory(),survexblock.survexfile.path, survexfile.path))
2020-06-24 22:46:18 +01:00
self.lineno = 0
2020-06-24 14:10:13 +01:00
sys.stderr.flush();
self.callcount +=1
if self.callcount % 10 ==0 :
2020-06-24 14:10:13 +01:00
print(".", file=sys.stderr,end='')
if self.callcount % 500 ==0 :
print("\n", file=sys.stderr,end='')
2020-06-24 14:10:13 +01:00
# Try to find the cave in the DB if not use the string as before
path_match = re.search(r"caves-(\d\d\d\d)/(\d+|\d\d\d\d-?\w+-\d+)/", survexblock.survexfile.path)
if path_match:
pos_cave = '%s-%s' % (path_match.group(1), path_match.group(2))
cave = getCaveByReference(pos_cave)
2020-06-24 14:10:13 +01:00
if cave:
survexfile.cave = cave
2020-06-24 22:46:18 +01:00
def LinearLoad(self, survexblock, path, svxlines):
2020-06-27 17:55:59 +01:00
"""Loads a single survex file. Usually used to import all the survex files which have been collated
2020-07-01 22:49:38 +01:00
into a single file. Loads the begin/end blocks using a stack for labels.
2020-06-27 17:55:59 +01:00
"""
2020-07-04 01:10:17 +01:00
blkid = None
pathlist = None
args = None
oldflags = None
blockcount = 0
self.lineno = 0
slengthtotal = 0.0
nlegstotal = 0
2020-06-27 17:55:59 +01:00
self.relativefilename = path
cave = self.IdentifyCave(path) # this will produce null for survex files which are geographic collections
2020-06-28 14:42:26 +01:00
self.currentsurvexfile = survexblock.survexfile
self.currentsurvexfile.save() # django insists on this although it is already saved !?
2020-07-03 14:53:36 +01:00
2020-07-03 17:22:15 +01:00
self.datastar = copy.deepcopy(self.datastardefault)
self.flagsstar = copy.deepcopy(self.flagsdefault)
2020-06-28 14:42:26 +01:00
def tickle():
nonlocal blockcount
2020-06-28 14:42:26 +01:00
blockcount +=1
if blockcount % 10 ==0 :
print(".", file=sys.stderr,end='')
2020-07-01 22:49:38 +01:00
if blockcount % 200 ==0 :
2020-06-28 14:42:26 +01:00
print("\n", file=sys.stderr,end='')
2021-04-12 23:58:48 +01:00
print(" - MEM:{:7.3f} MB in use".format(get_process_memory()),file=sys.stderr)
2020-07-03 14:53:36 +01:00
print(" ", file=sys.stderr,end='')
2020-07-01 22:49:38 +01:00
sys.stderr.flush()
2020-06-28 14:42:26 +01:00
def printbegin():
nonlocal blkid
nonlocal pathlist
depth = " " * self.depthbegin
2020-07-04 01:10:17 +01:00
self.insp = depth
2020-07-07 01:35:58 +01:00
if debugprint:
print("{:2}{} - Begin for :'{}'".format(self.depthbegin,depth, blkid))
pathlist = ""
for id in self.stackbegin:
if len(id) > 0:
pathlist += "." + id
def printend():
nonlocal args
depth = " " * self.depthbegin
2020-07-07 01:35:58 +01:00
if debugprint:
print("{:2}{} - End from:'{}'".format(self.depthbegin,depth,args))
print("{:2}{} - LEGS: {} (n: {}, length:{} units:{})".format(self.depthbegin,
depth, self.slength, self.slength, self.legsnumber, self.units))
def pushblock():
nonlocal blkid
2020-07-06 21:46:19 +01:00
if debugprint:
print(" # datastack at 1 *begin {} 'type':".format(blkid), end="")
for dict in self.datastack:
print("'{}' ".format(dict["type"].upper()), end="")
print("")
print("'{}' self.datastar ".format(self.datastar["type"].upper()))
# ------------ * DATA
self.datastack.append(copy.deepcopy(self.datastar))
# ------------ * DATA
2020-07-06 21:46:19 +01:00
if debugprint:
print(" # datastack at 2 *begin {} 'type':".format(blkid), end="")
for dict in self.datastack:
print("'{}' ".format(dict["type"].upper()), end="")
print("")
print("'{}' self.datastar ".format(self.datastar["type"].upper()))
# ------------ * FLAGS
self.flagsstack.append(copy.deepcopy(self.flagsstar))
# ------------ * FLAGS
2020-07-04 13:31:46 +01:00
pass
def popblock():
nonlocal blkid
nonlocal oldflags
2020-07-06 21:46:19 +01:00
if debugprint:
print(" # datastack at *end '{} 'type':".format(blkid), end="")
for dict in self.datastack:
print("'{}' ".format(dict["type"].upper()), end="")
print("")
print("'{}' self.datastar ".format(self.datastar["type"].upper()))
# ------------ * DATA
self.datastar = copy.deepcopy(self.datastack.pop())
# ------------ * DATA
2020-07-06 21:46:19 +01:00
if debugprint:
print(" # datastack after *end '{} 'type':".format(blkid), end="")
for dict in self.datastack:
print("'{}' ".format(dict["type"].upper()), end="")
print("")
print("'{}' self.datastar ".format(self.datastar["type"].upper()))
# ------------ * FLAGS
self.flagsstar = copy.deepcopy(self.flagsstack.pop())
# ------------ * FLAGS
2020-07-06 21:46:19 +01:00
if debugprint:
if oldflags["skiplegs"] != self.flagsstar["skiplegs"]:
print(" # POP 'any' flag now:'{}' was:{} ".format(self.flagsstar["skiplegs"], oldflags["skiplegs"]))
2020-07-04 13:31:46 +01:00
def starstatement(star):
nonlocal survexblock
nonlocal blkid
nonlocal pathlist
nonlocal args
nonlocal oldflags
2020-07-04 01:10:17 +01:00
nonlocal slengthtotal
nonlocal nlegstotal
2020-07-04 13:31:46 +01:00
cmd, args = star.groups()
cmd = cmd.lower()
# ------------------------BEGIN
if self.rx_begin.match(cmd):
blkid = args.lower()
# PUSH state ++++++++++++++
self.stackbegin.append(blkid)
2020-07-08 00:00:56 +01:00
self.unitsstack.append((self.units, self.unitsfactor))
2020-07-04 01:10:17 +01:00
self.legsnumberstack.append(self.legsnumber)
self.slengthstack.append(self.slength)
self.personexpedstack.append(self.currentpersonexped)
pushblock()
# PUSH state ++++++++++++++
2020-07-04 01:10:17 +01:00
self.legsnumber = 0
self.slength = 0.0
2020-07-07 01:35:58 +01:00
self.units = "metres"
self.currentpersonexped = []
printbegin()
2021-04-13 01:13:08 +01:00
newsurvexblock = SurvexBlock(name=blkid, parent=survexblock,
survexpath=pathlist,
cave=self.currentcave, survexfile=self.currentsurvexfile,
2020-07-04 13:31:46 +01:00
legsall=0, legslength=0.0)
newsurvexblock.save()
newsurvexblock.title = "("+survexblock.title+")" # copy parent inititally, overwrite if it has its own
survexblock = newsurvexblock
survexblock.save() # django insists on this , but we want to save at the end !
tickle()
# ---------------------------END
elif self.rx_end.match(cmd):
2020-07-04 01:10:17 +01:00
survexblock.legsall = self.legsnumber
2020-07-04 13:31:46 +01:00
survexblock.legslength = self.slength
printend()
2020-07-04 01:10:17 +01:00
slengthtotal += self.slength
nlegstotal += self.legsnumber
try:
survexblock.parent.save() # django insists on this although it is already saved !?
except:
print(survexblock.parent, file=sys.stderr)
raise
try:
survexblock.save() # save to db at end of block
except:
print(survexblock, file=sys.stderr)
raise
# POP state ++++++++++++++
popblock()
self.currentpersonexped = self.personexpedstack.pop()
2020-07-04 01:10:17 +01:00
self.legsnumber = self.legsnumberstack.pop()
2020-07-08 00:00:56 +01:00
self.units, self.unitsfactor = self.unitsstack.pop()
2020-07-04 01:10:17 +01:00
self.slength = self.slengthstack.pop()
blkid = self.stackbegin.pop()
self.currentsurvexblock = survexblock.parent
survexblock = survexblock.parent
oldflags = self.flagsstar
self.depthbegin -= 1
2020-07-04 01:10:17 +01:00
# POP state ++++++++++++++
# -----------------------------
elif self.rx_title.match(cmd):
quotedtitle = re.match("(?i)^\"(.*)\"$",args)
if quotedtitle:
survexblock.title = quotedtitle.groups()[0]
else:
survexblock.title = args
elif self.rx_ref.match(cmd):
self.LoadSurvexRef(survexblock, args)
elif self.rx_flags.match(cmd):
oldflags = self.flagsstar
self.LoadSurvexFlags(args)
2020-07-06 21:46:19 +01:00
if debugprint:
if oldflags["skiplegs"] != self.flagsstar["skiplegs"]:
print(" # CHANGE 'any' flag now:'{}' was:{} ".format(self.flagsstar["skiplegs"], oldflags["skiplegs"]))
elif self.rx_data.match(cmd):
2020-07-08 00:00:56 +01:00
self.LoadSurvexDataNormal(survexblock, args)
2020-07-07 01:35:58 +01:00
elif self.rx_alias.match(cmd):
2020-07-04 13:31:46 +01:00
self.LoadSurvexAlias(survexblock, args)
2020-07-07 01:35:58 +01:00
elif self.rx_entrance.match(cmd):
2020-07-04 13:31:46 +01:00
self.LoadSurvexEntrance(survexblock, args)
2020-07-07 01:35:58 +01:00
elif self.rx_date.match(cmd):
self.LoadSurvexDate(survexblock, args)
2020-07-07 01:35:58 +01:00
elif self.rx_units.match(cmd):
self.LoadSurvexUnits(survexblock, args)
elif self.rx_team.match(cmd):
self.LoadSurvexTeam(survexblock, args)
2020-07-07 01:35:58 +01:00
elif self.rx_set.match(cmd) and self.rx_names.match(cmd):
pass
2020-07-07 01:35:58 +01:00
elif self.rx_include.match(cmd):
message = " ! -ERROR *include command not expected here {}. Re-run a full Survex import.".format(path)
print(message)
print(message,file=sys.stderr)
2021-04-13 01:13:08 +01:00
DataIssue.objects.create(parser='survex', message=message)
else:
self.LoadSurvexFallThrough(survexblock, args, cmd)
2020-06-27 17:55:59 +01:00
for svxline in svxlines:
2020-07-03 14:53:36 +01:00
self.lineno += 1
2020-07-01 22:49:38 +01:00
sline, comment = self.rx_comment.match(svxline).groups()
2020-06-27 17:55:59 +01:00
if comment:
2020-07-03 14:53:36 +01:00
# this catches the ;*include NEWFILE and ;*edulcni ENDOFFILE lines too
self.LoadSurvexComment(survexblock, comment)
2020-07-01 22:49:38 +01:00
2020-06-27 17:55:59 +01:00
if not sline:
continue # skip blank lines
# detect a star command
2020-07-04 13:31:46 +01:00
star = self.rx_star.match(sline)
if star:
2020-07-04 01:10:17 +01:00
# yes we are reading a *command
2020-07-04 13:31:46 +01:00
starstatement(star)
2020-07-04 01:10:17 +01:00
else: # not a *cmd so we are reading data OR a ";" rx_comment failed
2020-07-04 13:31:46 +01:00
self.LoadSurvexLeg(survexblock, sline, comment)
2020-07-04 01:10:17 +01:00
self.legsnumber = slengthtotal
self.slength = nlegstotal
2020-06-27 17:55:59 +01:00
2021-04-13 23:52:56 +01:00
def PushdownStackScan(self, survexblock, path, fin, flinear, fcollate):
2020-06-27 12:08:02 +01:00
"""Follows the *include links in all the survex files from the root file 1623.svx
2020-06-27 17:55:59 +01:00
and reads only the *include and *begin and *end statements. It produces a linearised
list of the include tree and detects blocks included more than once.
2020-06-27 12:08:02 +01:00
"""
2020-06-27 17:55:59 +01:00
indent = " " * self.depthinclude
2020-06-27 12:08:02 +01:00
sys.stderr.flush();
self.callcount +=1
if self.callcount % 10 ==0 :
print(".", file=sys.stderr,end='')
if self.callcount % 500 ==0 :
2020-07-03 14:53:36 +01:00
print("\n ", file=sys.stderr,end='')
2020-06-27 12:08:02 +01:00
2020-07-01 22:49:38 +01:00
if path in self.svxfileslist:
2020-07-04 01:10:17 +01:00
message = " * Warning. Duplicate detected in *include list at callcount:{} depth:{} file:{}".format(self.callcount, self.depthinclude, path)
2020-06-27 17:55:59 +01:00
print(message)
print(message,file=flinear)
2020-07-01 22:49:38 +01:00
print("\n"+message,file=sys.stderr)
2021-04-13 01:13:08 +01:00
DataIssue.objects.create(parser='survex', message=message)
2020-07-01 22:49:38 +01:00
if self.svxfileslist.count(path) > 20:
message = " ! ERROR. Survex file already seen 20x. Probably an infinite loop so fix your *include statements that include this. Aborting. {}".format(path)
2020-06-27 17:55:59 +01:00
print(message)
print(message,file=flinear)
print(message,file=sys.stderr)
2021-04-13 01:13:08 +01:00
DataIssue.objects.create(parser='survex', message=message)
2020-06-27 17:55:59 +01:00
return
2020-07-01 22:49:38 +01:00
self.svxfileslist.append(path)
2020-06-27 12:08:02 +01:00
svxlines = fin.read().splitlines()
for svxline in svxlines:
self.lineno += 1
2020-06-27 17:55:59 +01:00
includestmt =self.rx_include.match(svxline)
if not includestmt:
2020-07-01 22:49:38 +01:00
fcollate.write("{}\n".format(svxline.strip()))
2020-06-27 17:55:59 +01:00
2020-06-27 12:08:02 +01:00
sline, comment = self.rx_comment.match(svxline.strip()).groups()
2020-07-04 13:31:46 +01:00
star = self.rx_star.match(sline)
if star: # yes we are reading a *cmd
cmd, args = star.groups()
2020-06-27 12:08:02 +01:00
cmd = cmd.lower()
2020-06-28 14:42:26 +01:00
if re.match("(?i)include$", cmd):
2020-07-01 22:49:38 +01:00
includepath = os.path.normpath(os.path.join(os.path.split(path)[0], re.sub(r"\.svx$", "", args)))
2020-06-27 12:08:02 +01:00
2020-07-01 22:49:38 +01:00
fullpath = os.path.join(settings.SURVEX_DATA, includepath + ".svx")
self.RunSurvexIfNeeded(os.path.join(settings.SURVEX_DATA, includepath))
2020-07-01 22:49:38 +01:00
if os.path.isfile(fullpath):
2020-06-27 12:08:02 +01:00
#--------------------------------------------------------
2020-06-27 17:55:59 +01:00
self.depthinclude += 1
2020-07-01 22:49:38 +01:00
fininclude = open(fullpath,'r')
fcollate.write(";*include {}\n".format(includepath))
flinear.write("{:2} {} *include {}\n".format(self.depthinclude, indent, includepath))
push = includepath.lower()
2020-07-07 01:35:58 +01:00
self.includestack.append(push)
#-----------------
2021-04-13 23:52:56 +01:00
self.PushdownStackScan(survexblock, includepath, fininclude, flinear, fcollate)
#-----------------
2020-07-07 01:35:58 +01:00
pop = self.includestack.pop()
2020-06-27 12:08:02 +01:00
if pop != push:
2020-07-07 01:35:58 +01:00
message = "!! ERROR mismatch *include pop!=push {}".format(pop, push, self.includestack)
2020-06-27 17:55:59 +01:00
print(message)
print(message,file=flinear)
print(message,file=sys.stderr)
2021-04-13 01:13:08 +01:00
DataIssue.objects.create(parser='survex', message=message)
flinear.write("{:2} {} *edulcni {}\n".format(self.depthinclude, indent, pop))
fcollate.write(";*edulcni {}\n".format(pop))
2020-06-27 12:08:02 +01:00
fininclude.close()
2020-06-27 17:55:59 +01:00
self.depthinclude -= 1
2020-06-27 12:08:02 +01:00
#--------------------------------------------------------
else:
2020-07-01 22:49:38 +01:00
message = " ! ERROR *include file not found for:'{}'".format(includepath)
2020-06-27 17:55:59 +01:00
print(message)
print(message,file=sys.stderr)
2021-04-13 01:13:08 +01:00
DataIssue.objects.create(parser='survex', message=message)
2020-06-28 14:42:26 +01:00
elif re.match("(?i)begin$", cmd):
2020-06-27 12:08:02 +01:00
self.depthbegin += 1
depth = " " * self.depthbegin
if args:
pushargs = args
else:
pushargs = " "
self.stackbegin.append(pushargs.lower())
flinear.write(" {:2} {} *begin {}\n".format(self.depthbegin, depth, args))
pass
2020-06-28 14:42:26 +01:00
elif re.match("(?i)end$", cmd):
2020-06-27 12:08:02 +01:00
depth = " " * self.depthbegin
flinear.write(" {:2} {} *end {}\n".format(self.depthbegin, depth, args))
if not args:
args = " "
popargs = self.stackbegin.pop()
if popargs != args.lower():
2020-07-01 22:49:38 +01:00
message = "!! ERROR mismatch in BEGIN/END labels pop!=push '{}'!='{}'\n{}".format(popargs, args, self. stackbegin)
2020-06-27 17:55:59 +01:00
print(message)
print(message,file=flinear)
print(message,file=sys.stderr)
2021-04-13 01:13:08 +01:00
DataIssue.objects.create(parser='survex', message=message)
2020-06-27 12:08:02 +01:00
self.depthbegin -= 1
pass
2020-06-28 14:42:26 +01:00
elif re.match("(?i)title$", cmd):
depth = " " * self.depthbegin
flinear.write(" {:2} {} *title {}\n".format(self.depthbegin, depth, args))
pass
def RunSurvexIfNeeded(self,fullpath):
now = time.time()
2020-07-03 14:53:36 +01:00
cav_t = now - 365*24*3600
log_t = now - 365*24*3600
svx_t = now - 365*24*3600
def runcavern():
2021-04-07 21:53:43 +01:00
# print(" - Regenerating stale (or chaos-monkeyed) cavern .log and .3d for '{}'\n days svx old: {:.1f} cav:{:.1f} log old: {:.1f}".
# format(fullpath, (svx_t - log_t)/(24*3600), (cav_t - log_t)/(24*3600), (now - log_t)/(24*3600)))
subprocess.call([settings.CAVERN, "--log", "--output={}".format(settings.THREEDCACHEDIR), "{}.svx".format(fullpath)])
svxpath = fullpath + ".svx"
logpath = fullpath + ".log"
if not os.path.isfile(logpath):
runcavern()
return
if not self.caverndate:
completed_process = subprocess.run(["which", "{}".format(settings.CAVERN)],
capture_output=True, check=True, text=True)
self.caverndate = os.path.getmtime(completed_process.stdout.strip())
cav_t = self.caverndate
log_t = os.path.getmtime(logpath)
svx_t = os.path.getmtime(svxpath)
now = time.time()
if svx_t - log_t > 0: # stale, older than svx file
runcavern()
return
if now - log_t > 60 *24*60*60: # >60 days, re-run anyway
runcavern()
return
if cav_t - log_t > 0: # new version of cavern
runcavern()
return
2021-04-12 23:58:48 +01:00
if chaosmonkey(200):
runcavern()
2020-06-27 17:55:59 +01:00
2020-06-27 18:00:24 +01:00
def FindAndLoadSurvex(survexblockroot):
2021-04-13 23:52:56 +01:00
"""Follows the *include links successively to find files in the whole include tree
"""
2020-06-24 14:10:13 +01:00
print(' - redirecting stdout to svxblks.log...')
stdout_orig = sys.stdout
# Redirect sys.stdout to the file
2020-06-24 14:10:13 +01:00
sys.stdout = open('svxblks.log', 'w')
2020-07-04 01:10:17 +01:00
print(' - Scanning Survex Blocks tree from {}.svx ...'.format(settings.SURVEX_TOPNAME),file=sys.stderr)
survexfileroot = survexblockroot.survexfile # i.e. SURVEX_TOPNAME only
2020-06-27 17:55:59 +01:00
collatefilename = "_" + survexfileroot.path + ".svx"
2020-06-27 18:00:24 +01:00
svx_scan = LoadingSurvex()
2020-06-27 17:55:59 +01:00
svx_scan.callcount = 0
svx_scan.depthinclude = 0
fullpathtotop = os.path.join(survexfileroot.survexdirectory.path, survexfileroot.path)
print(" - RunSurvexIfNeeded cavern on '{}'".format(fullpathtotop), file=sys.stderr)
svx_scan.RunSurvexIfNeeded(fullpathtotop)
indent=""
2020-06-27 17:55:59 +01:00
fcollate = open(collatefilename, 'w')
2021-04-12 23:58:48 +01:00
mem0 = get_process_memory()
2020-06-27 17:55:59 +01:00
print(" - MEM:{:7.2f} MB START".format(mem0),file=sys.stderr)
flinear = open('svxlinear.log', 'w')
2020-06-27 17:55:59 +01:00
flinear.write(" - MEM:{:7.2f} MB START {}\n".format(mem0,survexfileroot.path))
2020-07-03 14:53:36 +01:00
print(" ", file=sys.stderr,end='')
finroot = survexfileroot.OpenFile()
2020-06-27 17:55:59 +01:00
fcollate.write(";*include {}\n".format(survexfileroot.path))
flinear.write("{:2} {} *include {}\n".format(svx_scan.depthinclude, indent, survexfileroot.path))
2020-06-28 01:50:34 +01:00
#----------------------------------------------------------------
2021-04-13 23:52:56 +01:00
svx_scan.PushdownStackScan(survexblockroot, survexfileroot.path, finroot, flinear, fcollate)
2020-06-28 01:50:34 +01:00
#----------------------------------------------------------------
2020-06-27 17:55:59 +01:00
flinear.write("{:2} {} *edulcni {}\n".format(svx_scan.depthinclude, indent, survexfileroot.path))
fcollate.write(";*edulcni {}\n".format(survexfileroot.path))
2021-04-12 23:58:48 +01:00
mem1 = get_process_memory()
2020-06-28 14:42:26 +01:00
flinear.write("\n - MEM:{:.2f} MB STOP {}\n".format(mem1,survexfileroot.path))
flinear.write(" - MEM:{:.3f} MB USED\n".format(mem1-mem0))
2020-06-27 17:55:59 +01:00
svxfileslist = svx_scan.svxfileslist
flinear.write(" - {:,} survex files in linear include list \n".format(len(svxfileslist)))
flinear.close()
2020-06-27 17:55:59 +01:00
fcollate.close()
2020-07-01 22:49:38 +01:00
svx_scan = None # Hmm. Does this actually delete all the instance variables if they are lists, dicts etc.?
2020-06-27 17:55:59 +01:00
print("\n - {:,} survex files in linear include list \n".format(len(svxfileslist)),file=sys.stderr)
2021-04-12 23:58:48 +01:00
mem1 = get_process_memory()
2020-06-27 17:55:59 +01:00
print(" - MEM:{:7.2f} MB END ".format(mem0),file=sys.stderr)
print(" - MEM:{:7.3f} MB USED".format(mem1-mem0),file=sys.stderr)
svxfileslist = [] # free memory
# Before doing this, it would be good to identify the *equate and *entrance we need that are relevant to the
# entrance locations currently loaded after this by LoadPos(), but could better be done before ?
# look in MapLocations() for how we find the entrances
print('\n - Loading All Survex Blocks (LinearLoad)',file=sys.stderr)
2020-06-27 18:00:24 +01:00
svx_load = LoadingSurvex()
2020-07-03 17:22:15 +01:00
svx_load.survexdict[survexfileroot.survexdirectory] = []
svx_load.survexdict[survexfileroot.survexdirectory].append(survexfileroot)
2020-06-29 21:16:13 +01:00
svx_load.svxdirs[""] = survexfileroot.survexdirectory
2020-07-20 22:53:26 +01:00
# This next should be rewritten to use a generator so that only one
# line is held in memory at a time:
2020-06-27 17:55:59 +01:00
with open(collatefilename, "r") as fcollate:
2020-06-28 01:50:34 +01:00
svxlines = fcollate.read().splitlines()
#----------------------------------------------------------------
svx_load.LinearLoad(survexblockroot,survexfileroot.path, svxlines)
2020-06-28 01:50:34 +01:00
#----------------------------------------------------------------
2020-06-27 17:55:59 +01:00
2020-06-28 14:42:26 +01:00
print("\n - MEM:{:7.2f} MB STOP".format(mem1),file=sys.stderr)
2020-06-27 17:55:59 +01:00
print(" - MEM:{:7.3f} MB USED".format(mem1-mem0),file=sys.stderr)
2020-07-04 01:10:17 +01:00
legsnumber = svx_load.legsnumber
2021-04-12 23:58:48 +01:00
mem1 = get_process_memory()
print(" - Number of SurvexDirectories: {}".format(len(svx_load.survexdict)))
tf=0
for d in svx_load.survexdict:
tf += len(svx_load.survexdict[d])
print(" - Number of SurvexFiles: {}".format(tf))
2020-06-27 17:55:59 +01:00
svx_load = None
2020-06-24 14:10:13 +01:00
# Close the logging file, Restore sys.stdout to our old saved file handle
sys.stdout.close()
print("+", file=sys.stderr)
sys.stderr.flush();
sys.stdout = stdout_orig
2020-07-07 01:35:58 +01:00
return legsnumber
2020-06-29 21:16:13 +01:00
def MakeSurvexFileRoot():
"""Returns a file_object.path = SURVEX_TOPNAME associated with directory_object.path = SURVEX_DATA
"""
2021-04-13 01:13:08 +01:00
fileroot = SurvexFile(path=settings.SURVEX_TOPNAME, cave=None)
fileroot.save()
2021-04-13 01:13:08 +01:00
directoryroot = SurvexDirectory(path=settings.SURVEX_DATA, cave=None, primarysurvexfile=fileroot)
directoryroot.save()
fileroot.survexdirectory = directoryroot # i.e. SURVEX_DATA/SURVEX_TOPNAME
fileroot.save() # mutually dependent objects need a double-save like this
return fileroot
2020-06-27 18:00:24 +01:00
def LoadSurvexBlocks():
2015-01-19 22:48:50 +00:00
2020-04-28 01:18:57 +01:00
print(' - Flushing All Survex Blocks...')
2021-04-13 01:13:08 +01:00
SurvexBlock.objects.all().delete()
SurvexFile.objects.all().delete()
SurvexDirectory.objects.all().delete()
SurvexPersonRole.objects.all().delete()
SurvexStation.objects.all().delete()
2020-06-24 19:07:11 +01:00
print(" - survex Data Issues flushed")
2021-04-13 01:13:08 +01:00
DataIssue.objects.filter(parser='survex').delete()
DataIssue.objects.filter(parser='survexleg').delete()
DataIssue.objects.filter(parser='survexunits').delete()
2020-06-29 21:16:13 +01:00
survexfileroot = MakeSurvexFileRoot()
# this next makes a block_object assciated with a file_object.path = SURVEX_TOPNAME
2021-04-13 01:13:08 +01:00
survexblockroot = SurvexBlock(name=ROOTBLOCK, survexpath="", cave=None, survexfile=survexfileroot,
2020-07-04 13:31:46 +01:00
legsall=0, legslength=0.0)
survexblockroot.save()
2020-06-28 01:50:34 +01:00
print(' - Loading Survex Blocks...')
2021-04-12 23:58:48 +01:00
memstart = get_process_memory()
#----------------------------------------------------------------
2020-07-07 01:35:58 +01:00
legsnumber = FindAndLoadSurvex(survexblockroot)
#----------------------------------------------------------------
2021-04-12 23:58:48 +01:00
memend = get_process_memory()
print(" - MEMORY start:{:.3f} MB end:{:.3f} MB increase={:.3f} MB".format(memstart,memend, memend-memstart))
survexblockroot.save()
2020-07-04 01:10:17 +01:00
print(" - total number of survex legs: {}".format(legsnumber))
2020-04-28 01:18:57 +01:00
print(' - Loaded All Survex Blocks.')
poslineregex = re.compile(r"^\(\s*([+-]?\d*\.\d*),\s*([+-]?\d*\.\d*),\s*([+-]?\d*\.\d*)\s*\)\s*([^\s]+)$")
2020-07-04 01:10:17 +01:00
def LoadPositions():
2020-06-29 21:16:13 +01:00
"""First load the survex stations for entrances and fixed points (about 600) into the database.
Run cavern to produce a complete .3d file, then run 3dtopos to produce a table of
2020-04-28 01:18:57 +01:00
all survey point positions. Then lookup each position by name to see if we have it in the database
2020-06-29 21:16:13 +01:00
and if we do, then save the x/y/z coordinates. This gives us coordinates of the entrances.
2020-04-28 01:18:57 +01:00
If we don't have it in the database, print an error message and discard it.
"""
svx_t = 0
d3d_t = 0
def runcavern3d():
2021-04-07 21:53:43 +01:00
# print(" - Regenerating stale cavern .log and .3d for '{}'\n days old: {:.1f} {:.1f} {:.1f}".
# format(topdata, (svx_t - d3d_t)/(24*3600), (cav_t - d3d_t)/(24*3600), (now - d3d_t)/(24*3600)))
subprocess.call([settings.CAVERN, "--log", "--output={}".format(topdata), "{}.svx".format(topdata)])
2021-03-24 00:55:36 +00:00
2021-04-07 21:53:43 +01:00
# print(" - Regenerating {} {}.3d in {}".format(settings.SURVEXPORT, topdata, settings.SURVEX_DATA))
subprocess.call([settings.SURVEXPORT, '--pos', '{}.3d'.format(topdata)], cwd = settings.SURVEX_DATA)
topdata = os.fspath(Path(settings.SURVEX_DATA) / settings.SURVEX_TOPNAME)
2020-05-24 01:57:06 +01:00
print((' - Generating a list of Pos from %s.svx and then loading...' % (topdata)))
2020-04-28 18:26:08 +01:00
found = 0
skip = {}
2020-05-24 01:57:06 +01:00
print("\n") # extra line because cavern overwrites the text buffer somehow
2020-04-28 18:26:08 +01:00
# cavern defaults to using same cwd as supplied input file
completed_process = subprocess.run(["which", "{}".format(settings.CAVERN)],
capture_output=True, check=True, text=True)
cav_t = os.path.getmtime(completed_process.stdout.strip())
svxpath = topdata + ".svx"
d3dpath = topdata + ".3d"
2020-07-04 01:10:17 +01:00
pospath = topdata + ".pos"
svx_t = os.path.getmtime(svxpath)
if os.path.isfile(d3dpath):
2020-07-03 14:53:36 +01:00
# always fails to find log file if a double directory, e.g. caves-1623/B4/B4/B4.svx Why ?
d3d_t = os.path.getmtime(d3dpath)
now = time.time()
2020-07-04 01:10:17 +01:00
if not os.path.isfile(pospath):
runcavern3d()
if not os.path.isfile(d3dpath):
runcavern3d()
2020-07-04 01:10:17 +01:00
elif d3d_t - svx_t > 0: # stale, 3d older than svx file
runcavern3d()
2020-07-04 01:10:17 +01:00
elif now - d3d_t> 60 *24*60*60: # >60 days old, re-run anyway
runcavern3d()
elif cav_t - d3d_t > 0: # new version of cavern
runcavern3d()
mappoints = {}
for pt in MapLocations().points():
svxid, number, point_type, label = pt
mappoints[svxid]=True
2020-04-28 18:26:08 +01:00
posfile = open("%s.pos" % (topdata))
posfile.readline() #Drop header
try:
2021-04-13 01:13:08 +01:00
survexblockroot = SurvexBlock.objects.get(name=ROOTBLOCK)
except:
try:
2021-04-13 01:13:08 +01:00
survexblockroot = SurvexBlock.objects.get(id=1)
except:
message = ' ! FAILED to find root SurvexBlock'
print(message)
2021-04-13 01:13:08 +01:00
DataIssue.objects.create(parser='survex', message=message)
raise
for line in posfile.readlines():
r = poslineregex.match(line)
if r:
x, y, z, id = r.groups()
2020-06-16 19:27:32 +01:00
for sid in mappoints:
if id.endswith(sid):
blockpath = "." + id[:-len(sid)].strip(".")
2020-06-29 21:16:13 +01:00
# But why are we doing this? Why do we need the survexblock id for each of these ?
# ..because mostly they don't actually appear in any SVX file. We should match them up
# via the cave data, not by this half-arsed syntactic match which almost never works. PMS.
if False:
try:
2021-04-13 01:13:08 +01:00
sbqs = SurvexBlock.objects.filter(survexpath=blockpath)
2020-06-29 21:16:13 +01:00
if len(sbqs)==1:
sb = sbqs[0]
if len(sbqs)>1:
message = " ! MULTIPLE SurvexBlocks {:3} matching Entrance point {} {} '{}'".format(len(sbqs), blockpath, sid, id)
print(message)
2021-04-13 01:13:08 +01:00
DataIssue.objects.create(parser='survex', message=message)
2020-06-29 21:16:13 +01:00
sb = sbqs[0]
elif len(sbqs)<=0:
message = " ! ZERO SurvexBlocks matching Entrance point {} {} '{}'".format(blockpath, sid, id)
print(message)
2021-04-13 01:13:08 +01:00
DataIssue.objects.create(parser='survex', message=message)
2020-06-29 21:16:13 +01:00
sb = survexblockroot
except:
message = ' ! FAIL in getting SurvexBlock matching Entrance point {} {}'.format(blockpath, sid)
2020-06-15 03:28:51 +01:00
print(message)
2021-04-13 01:13:08 +01:00
DataIssue.objects.create(parser='survex', message=message)
2020-06-16 19:27:32 +01:00
try:
2021-04-13 01:13:08 +01:00
ss = SurvexStation(name=id, block=survexblockroot)
2020-06-16 19:27:32 +01:00
ss.x = float(x)
ss.y = float(y)
ss.z = float(z)
ss.save()
found += 1
except:
message = ' ! FAIL to create SurvexStation Entrance point {} {}'.format(blockpath, sid)
print(message)
2021-04-13 01:13:08 +01:00
DataIssue.objects.create(parser='survex', message=message)
2020-06-16 19:27:32 +01:00
raise
print(" - {} SurvexStation entrances found.".format(found))
2020-06-16 19:27:32 +01:00