forked from expo/troggle
rebuild all .3d files and progress on compass/clino/tape
This commit is contained in:
@@ -4,7 +4,7 @@ import re
|
|||||||
import time
|
import time
|
||||||
|
|
||||||
from datetime import datetime, timedelta
|
from datetime import datetime, timedelta
|
||||||
from subprocess import call, Popen, PIPE
|
from subprocess import call, run
|
||||||
|
|
||||||
from django.utils.timezone import get_current_timezone
|
from django.utils.timezone import get_current_timezone
|
||||||
from django.utils.timezone import make_aware
|
from django.utils.timezone import make_aware
|
||||||
@@ -13,6 +13,7 @@ import troggle.settings as settings
|
|||||||
import troggle.core.models as models
|
import troggle.core.models as models
|
||||||
import troggle.core.models_caves as models_caves
|
import troggle.core.models_caves as models_caves
|
||||||
import troggle.core.models_survex as models_survex
|
import troggle.core.models_survex as models_survex
|
||||||
|
from troggle.utils import ChaosMonkey
|
||||||
from troggle.parsers.people import GetPersonExpeditionNameLookup
|
from troggle.parsers.people import GetPersonExpeditionNameLookup
|
||||||
from troggle.parsers.logbooks import GetCaveLookup
|
from troggle.parsers.logbooks import GetCaveLookup
|
||||||
from troggle.core.views_caves import MapLocations
|
from troggle.core.views_caves import MapLocations
|
||||||
@@ -34,11 +35,6 @@ class LoadingSurvex():
|
|||||||
A 'scansfolder' is what we today call a "survey scans folder" or a "wallet".
|
A 'scansfolder' is what we today call a "survey scans folder" or a "wallet".
|
||||||
"""
|
"""
|
||||||
|
|
||||||
# This interprets the survex "*data normal" command which sets out the order of the fields in the data, e.g.
|
|
||||||
# *DATA normal from to length gradient bearing ignore ignore ignore ignore
|
|
||||||
stardatadefault = {"type":"normal", "t":"leg", "from":0, "to":1, "tape":2, "compass":3, "clino":4}
|
|
||||||
stardataparamconvert = {"length":"tape", "bearing":"compass", "gradient":"clino"}
|
|
||||||
|
|
||||||
rx_linelen = re.compile(r"[\d\-+.]+$")
|
rx_linelen = re.compile(r"[\d\-+.]+$")
|
||||||
rx_team = re.compile(r"(?i)(Insts|Notes|Tape|Dog|Useless|Pics|Helper|Disto|Consultant)\s+(.*)$")
|
rx_team = re.compile(r"(?i)(Insts|Notes|Tape|Dog|Useless|Pics|Helper|Disto|Consultant)\s+(.*)$")
|
||||||
rx_person = re.compile(r"(?i) and | / |, | & | \+ |^both$|^none$")
|
rx_person = re.compile(r"(?i) and | / |, | & | \+ |^both$|^none$")
|
||||||
@@ -55,6 +51,11 @@ class LoadingSurvex():
|
|||||||
rx_starref = re.compile(r'(?i)^\s*\*ref[\s.:]*((?:19[6789]\d)|(?:20[0123]\d))\s*#?\s*(X)?\s*(.*?\d+.*?)$')
|
rx_starref = re.compile(r'(?i)^\s*\*ref[\s.:]*((?:19[6789]\d)|(?:20[0123]\d))\s*#?\s*(X)?\s*(.*?\d+.*?)$')
|
||||||
rx_argsref = re.compile(r'(?i)^[\s.:]*((?:19[6789]\d)|(?:20[0123]\d))\s*#?\s*(X)?\s*(.*?\d+.*?)$')
|
rx_argsref = re.compile(r'(?i)^[\s.:]*((?:19[6789]\d)|(?:20[0123]\d))\s*#?\s*(X)?\s*(.*?\d+.*?)$')
|
||||||
|
|
||||||
|
# This interprets the survex "*data normal" command which sets out the order of the fields in the data, e.g.
|
||||||
|
# *DATA normal from to length gradient bearing ignore ignore ignore ignore
|
||||||
|
stardatadefault = {"type":"normal", "from":0, "to":1, "tape":2, "compass":3, "clino":4}
|
||||||
|
|
||||||
|
stardata ={}
|
||||||
survexlegsalllength = 0.0
|
survexlegsalllength = 0.0
|
||||||
survexlegsnumber = 0
|
survexlegsnumber = 0
|
||||||
depthbegin = 0
|
depthbegin = 0
|
||||||
@@ -68,13 +69,13 @@ class LoadingSurvex():
|
|||||||
lineno = 0
|
lineno = 0
|
||||||
insp = ""
|
insp = ""
|
||||||
callcount = 0
|
callcount = 0
|
||||||
stardata ={}
|
|
||||||
ignoreprefix = ["surface", "kataster", "fixedpts", "gpx"]
|
ignoreprefix = ["surface", "kataster", "fixedpts", "gpx"]
|
||||||
ignorenoncave = ["caves-1623", "caves-1623/2007-neu"]
|
ignorenoncave = ["caves-1623", "caves-1623/2007-neu"]
|
||||||
includedfilename =""
|
includedfilename =""
|
||||||
currentsurvexblock = None
|
currentsurvexblock = None
|
||||||
currentsurvexfile = None
|
currentsurvexfile = None
|
||||||
currentcave = None
|
currentcave = None
|
||||||
|
caverndate = None
|
||||||
|
|
||||||
def __init__(self):
|
def __init__(self):
|
||||||
self.caveslist = GetCaveLookup()
|
self.caveslist = GetCaveLookup()
|
||||||
@@ -129,16 +130,27 @@ class LoadingSurvex():
|
|||||||
"""
|
"""
|
||||||
# Check first to see if we are in a splay and abort if so.
|
# Check first to see if we are in a splay and abort if so.
|
||||||
# TO DO splay abort
|
# TO DO splay abort
|
||||||
|
invalid_clino = 180.0
|
||||||
|
invalid_compass = 720.0
|
||||||
|
invalid_tape = 0.0
|
||||||
stardata = self.stardata
|
stardata = self.stardata
|
||||||
survexleg = SurvexLeg()
|
survexleg = SurvexLeg()
|
||||||
|
|
||||||
ls = sline.lower().split()
|
ls = sline.lower().split()
|
||||||
# this next fails for two surface survey svx files which use / for decimal point
|
# this next fails for two surface survey svx files which use / for decimal point
|
||||||
# e.g. '29/09' in the tape measurement, or use decimals but in brackets, e.g. (06.05)
|
# e.g. '29/09' in the tape measurement, or use decimals but in brackets, e.g. (06.05)
|
||||||
if stardata["type"] == "normal": # should use current flags setting for this
|
if stardata["type"] == "normal": # should use current flags setting for this. May not be default order!
|
||||||
# print(" !! lineno '{}'\n !! svxline '{}'\n !! sline '{}'\n !! ls '{}'\n !! stardata {}".format(self.lineno, svxline, sline, ls,stardata))
|
#print("! stardata {}++{}\n{} ".format(stardata, survexblock.survexfile.path, sline), file=sys.stderr)
|
||||||
tape = ls[stardata["tape"]]
|
try:
|
||||||
|
tape = ls[stardata["tape"]]
|
||||||
|
except:
|
||||||
|
print(("! stardata parsing incorrect", survexblock.survexfile.path))
|
||||||
|
print((" Stardata:", stardata))
|
||||||
|
print((" Line:", ls))
|
||||||
|
message = ' ! stardata parsing incorrect in line %s in %s' % (ls, survexblock.survexfile.path)
|
||||||
|
models.DataIssue.objects.create(parser='survexleg', message=message)
|
||||||
|
survexleg.tape = invalid_tape
|
||||||
|
return
|
||||||
tape = tape.replace("(","")
|
tape = tape.replace("(","")
|
||||||
tape = tape.replace(")","")
|
tape = tape.replace(")","")
|
||||||
tape = tape.replace("/",".")
|
tape = tape.replace("/",".")
|
||||||
@@ -150,14 +162,24 @@ class LoadingSurvex():
|
|||||||
print((" Stardata:", stardata))
|
print((" Stardata:", stardata))
|
||||||
print((" Line:", ls))
|
print((" Line:", ls))
|
||||||
message = ' ! Value Error: Tape misread in line %s in %s' % (ls, survexblock.survexfile.path)
|
message = ' ! Value Error: Tape misread in line %s in %s' % (ls, survexblock.survexfile.path)
|
||||||
models.DataIssue.objects.create(parser='survex', message=message)
|
models.DataIssue.objects.create(parser='survexleg', message=message)
|
||||||
survexleg.tape = 0
|
survexleg.tape = invalid_tape
|
||||||
try:
|
try:
|
||||||
survexblock.totalleglength += survexleg.tape
|
survexblock.totalleglength += survexleg.tape
|
||||||
self.survexlegsalllength += survexleg.tape
|
self.survexlegsalllength += survexleg.tape
|
||||||
except ValueError:
|
except ValueError:
|
||||||
message = ' ! Value Error: Tape length not added %s in %s' % (ls, survexblock.survexfile.path)
|
message = ' ! Value Error: Tape length not added %s in %s' % (ls, survexblock.survexfile.path)
|
||||||
models.DataIssue.objects.create(parser='survex', message=message)
|
models.DataIssue.objects.create(parser='survexleg', message=message)
|
||||||
|
|
||||||
|
try:
|
||||||
|
lcompass = ls[stardata["compass"]]
|
||||||
|
except:
|
||||||
|
print(("! Compass not found in", survexblock.survexfile.path))
|
||||||
|
print((" Stardata:", stardata))
|
||||||
|
print((" Line:", ls))
|
||||||
|
message = ' ! Value Error: Compass not found in line %s in %s' % (ls, survexblock.survexfile.path)
|
||||||
|
models.DataIssue.objects.create(parser='survexleg', message=message)
|
||||||
|
lcompass = invalid_compass
|
||||||
|
|
||||||
try:
|
try:
|
||||||
lclino = ls[stardata["clino"]]
|
lclino = ls[stardata["clino"]]
|
||||||
@@ -166,43 +188,31 @@ class LoadingSurvex():
|
|||||||
print((" Stardata:", stardata))
|
print((" Stardata:", stardata))
|
||||||
print((" Line:", ls))
|
print((" Line:", ls))
|
||||||
message = ' ! Value Error: Clino misread in line %s in %s' % (ls, survexblock.survexfile.path)
|
message = ' ! Value Error: Clino misread in line %s in %s' % (ls, survexblock.survexfile.path)
|
||||||
models.DataIssue.objects.create(parser='survex', message=message)
|
models.DataIssue.objects.create(parser='survexleg', message=message)
|
||||||
lclino = error
|
lclino = invalid_clino
|
||||||
|
|
||||||
|
if lclino == "up":
|
||||||
|
survexleg.clino = 90.0
|
||||||
|
lcompass = invalid_compass
|
||||||
|
elif lclino == "down":
|
||||||
|
survexleg.clino = -90.0
|
||||||
|
lcompass = invalid_compass
|
||||||
|
elif lclino == "-" or lclino == "level":
|
||||||
|
survexleg.clino = -90.0
|
||||||
|
|
||||||
try:
|
try:
|
||||||
lcompass = ls[stardata["compass"]]
|
survexleg.compass = float(lcompass)
|
||||||
except:
|
except ValueError:
|
||||||
print(("! Compass misread in", survexblock.survexfile.path))
|
print(("! Compass misread in", survexblock.survexfile.path))
|
||||||
print((" Stardata:", stardata))
|
print((" Stardata:", stardata))
|
||||||
print((" Line:", ls))
|
print((" Line:", ls))
|
||||||
message = ' ! Value Error: Compass misread in line %s in %s' % (ls, survexblock.survexfile.path)
|
message = " ! Value Error: lcompass:'{}' line {} in '{}'".format(lcompass,
|
||||||
models.DataIssue.objects.create(parser='survex', message=message)
|
ls, survexblock.survexfile.path)
|
||||||
lcompass = error
|
models.DataIssue.objects.create(parser='survexleg', message=message)
|
||||||
|
survexleg.compass = invalid_compass
|
||||||
|
|
||||||
if lclino == "up":
|
#print(" !! lineno '{}'\n !! svxline '{}'\n !! sline '{}'\n !! ls '{}'\n !! stardata {}".format(self.lineno, svxline, sline, ls,stardata))
|
||||||
survexleg.compass = 0.0
|
# delete the object to save memory
|
||||||
survexleg.clino = 90.0
|
|
||||||
elif lclino == "down":
|
|
||||||
survexleg.compass = 0.0
|
|
||||||
survexleg.clino = -90.0
|
|
||||||
elif lclino == "-" or lclino == "level":
|
|
||||||
try:
|
|
||||||
survexleg.compass = float(lcompass)
|
|
||||||
except ValueError:
|
|
||||||
print(("! Compass misread in", survexblock.survexfile.path))
|
|
||||||
print((" Stardata:", stardata))
|
|
||||||
print((" Line:", ls))
|
|
||||||
message = " ! Value Error: lcompass:'{}' line {} in '{}'".format(lcompass,
|
|
||||||
ls, survexblock.survexfile.path)
|
|
||||||
models.DataIssue.objects.create(parser='survex', message=message)
|
|
||||||
survexleg.compass = 1000
|
|
||||||
survexleg.clino = -90.0
|
|
||||||
else:
|
|
||||||
assert self.rx_linelen.match(lcompass), ls
|
|
||||||
assert self.rx_linelen.match(lclino) and lclino != "-", ls
|
|
||||||
survexleg.compass = float(lcompass)
|
|
||||||
survexleg.clino = float(lclino)
|
|
||||||
# delete the object so that django autosaving doesn't save it.
|
|
||||||
survexleg = None
|
survexleg = None
|
||||||
|
|
||||||
def LoadSurvexRef(self, survexblock, args):
|
def LoadSurvexRef(self, survexblock, args):
|
||||||
@@ -292,17 +302,59 @@ class LoadingSurvex():
|
|||||||
models.DataIssue.objects.create(parser='survex', message=message)
|
models.DataIssue.objects.create(parser='survex', message=message)
|
||||||
|
|
||||||
def LoadSurvexDataCmd(self,survexblock,args):
|
def LoadSurvexDataCmd(self,survexblock,args):
|
||||||
ls = args.lower().split()
|
"""Sets the order for data elements in this and following blocks, e.g.
|
||||||
stardata = { "type":ls[0] }
|
*data normal from to compass clino tape
|
||||||
for i in range(0, len(ls)):
|
*data normal from to tape compass clino
|
||||||
stardata[self.stardataparamconvert.get(ls[i], ls[i])] = i - 1
|
We are only collecting length data so we are disinterested in from, to, LRUD etc.
|
||||||
self.stardata = stardata
|
"""
|
||||||
if ls[0] in ["normal", "cartesian", "nosurvey"]:
|
# stardatadefault = { # included here as reference to help understand the code
|
||||||
assert (("from" in stardata and "to" in stardata) or "station" in stardata), args
|
# "type":"normal",
|
||||||
|
# "t":"leg",
|
||||||
|
# "from":0,
|
||||||
|
# "to":1,
|
||||||
|
# "tape":2,
|
||||||
|
# "compass":3,
|
||||||
|
# "clino":4}
|
||||||
|
stardata = self.stardatadefault
|
||||||
|
if args == "":
|
||||||
|
# naked '*data' which is relevant only for passages. Ignore. Continue with previous settings.
|
||||||
|
return
|
||||||
|
|
||||||
|
ls = args.lower().split()
|
||||||
|
if ls[0] == "normal":
|
||||||
|
if not (("from" in stardata and "to" in stardata) or "station" in stardata):
|
||||||
|
message = " ! - Unrecognised *data normal statement '{}' {}|{}".format(args, survexblock.name, survexblock.survexpath)
|
||||||
|
print(message)
|
||||||
|
print(message,file=sys.stderr)
|
||||||
|
models.DataIssue.objects.create(parser='survex', message=message)
|
||||||
|
return
|
||||||
|
else:
|
||||||
|
stardata = self.stardatadefault
|
||||||
|
# ls = ["normal", "from", "to", "tape", "compass", "clino" ]
|
||||||
|
for i in range(1, len(ls)): # len[0] is "normal"
|
||||||
|
if ls[i] in ["bearing","compass"]:
|
||||||
|
stardata["compass"] = i-1
|
||||||
|
if ls[i] in ["clino","gradient"]:
|
||||||
|
stardata["clino"] = i-1
|
||||||
|
if ls[i] in ["tape","length"]:
|
||||||
|
stardata["tape"] = i-1
|
||||||
|
self.stardata = stardata
|
||||||
|
return
|
||||||
elif ls[0] == "default":
|
elif ls[0] == "default":
|
||||||
stardata = self.stardatadefault
|
self.stardata = self.stardatadefault
|
||||||
|
elif ls[0] == "passage" or ls[0] == "nosurvey":
|
||||||
|
# we ignore everything else, such as '*data passage'
|
||||||
|
pass
|
||||||
|
elif ls[0] == "cartesian" or ls[0] == "nosurvey":
|
||||||
|
message = " ! - *data cartesian survey blocks are ignored. Length not calculated. '{}' {}|{}".format(args, survexblock.name, survexblock.survexpath)
|
||||||
|
print(message)
|
||||||
|
print(message,file=sys.stderr)
|
||||||
|
models.DataIssue.objects.create(parser='survex', message=message)
|
||||||
else:
|
else:
|
||||||
assert ls[0] == "passage", args
|
message = " ! - Unrecognised *data statement '{}'".format(args)
|
||||||
|
print(message)
|
||||||
|
print(message,file=sys.stderr)
|
||||||
|
models.DataIssue.objects.create(parser='survex', message=message)
|
||||||
|
|
||||||
def LoadSurvexFlags(self, line, cmd):
|
def LoadSurvexFlags(self, line, cmd):
|
||||||
# Here we could set on/off 'splay', 'not splay', 'surface', 'not surface', or 'duplicate'
|
# Here we could set on/off 'splay', 'not splay', 'surface', 'not surface', or 'duplicate'
|
||||||
@@ -360,6 +412,8 @@ class LoadingSurvex():
|
|||||||
Creates a new current survexfile and valid .survexdirectory
|
Creates a new current survexfile and valid .survexdirectory
|
||||||
The survexblock passed-in is not necessarily the parent. FIX THIS.
|
The survexblock passed-in is not necessarily the parent. FIX THIS.
|
||||||
"""
|
"""
|
||||||
|
self.stardata = self.stardatadefault
|
||||||
|
|
||||||
depth = " " * self.depthbegin
|
depth = " " * self.depthbegin
|
||||||
print("{:2}{} - NEW survexfile:'{}'".format(self.depthbegin, depth, svxid))
|
print("{:2}{} - NEW survexfile:'{}'".format(self.depthbegin, depth, svxid))
|
||||||
headpath = os.path.dirname(svxid)
|
headpath = os.path.dirname(svxid)
|
||||||
@@ -401,8 +455,6 @@ class LoadingSurvex():
|
|||||||
|
|
||||||
def ProcessIncludeLine(self, included):
|
def ProcessIncludeLine(self, included):
|
||||||
svxid = included.groups()[0]
|
svxid = included.groups()[0]
|
||||||
#depth = " " * self.depthbegin
|
|
||||||
#print("{:2}{} - Include survexfile:'{}' {}".format(self.depthbegin, depth, svxid, included))
|
|
||||||
self.LoadSurvexFile(svxid)
|
self.LoadSurvexFile(svxid)
|
||||||
self.stacksvxfiles.append(self.currentsurvexfile)
|
self.stacksvxfiles.append(self.currentsurvexfile)
|
||||||
|
|
||||||
@@ -429,8 +481,6 @@ class LoadingSurvex():
|
|||||||
included = self.rx_comminc.match(comment)
|
included = self.rx_comminc.match(comment)
|
||||||
# ;*include means 'we have been included'; whereas *include means 'proceed to include'
|
# ;*include means 'we have been included'; whereas *include means 'proceed to include'
|
||||||
if included:
|
if included:
|
||||||
#depth = " " * self.depthbegin
|
|
||||||
#print("{:2}{} - Include comment:'{}' {}".format(self.depthbegin, depth, comment, included))
|
|
||||||
self.ProcessIncludeLine(included)
|
self.ProcessIncludeLine(included)
|
||||||
|
|
||||||
edulcni = self.rx_commcni.match(comment)
|
edulcni = self.rx_commcni.match(comment)
|
||||||
@@ -484,8 +534,6 @@ class LoadingSurvex():
|
|||||||
lineno += 1
|
lineno += 1
|
||||||
sline, comment = self.rx_comment.match(svxline).groups()
|
sline, comment = self.rx_comment.match(svxline).groups()
|
||||||
if comment:
|
if comment:
|
||||||
depth = " " * self.depthbegin
|
|
||||||
print("{:4} {:2}{} - Include comment:'{}' {}".format(lineno, self.depthbegin, depth, comment, sline))
|
|
||||||
self.LoadSurvexComment(survexblock, comment) # this catches the ;*include and ;*edulcni lines too
|
self.LoadSurvexComment(survexblock, comment) # this catches the ;*include and ;*edulcni lines too
|
||||||
|
|
||||||
if not sline:
|
if not sline:
|
||||||
@@ -517,7 +565,6 @@ class LoadingSurvex():
|
|||||||
newsurvexblock.save()
|
newsurvexblock.save()
|
||||||
newsurvexblock.title = "("+survexblock.title+")" # copy parent inititally
|
newsurvexblock.title = "("+survexblock.title+")" # copy parent inititally
|
||||||
survexblock = newsurvexblock
|
survexblock = newsurvexblock
|
||||||
# survexblock.survexfile.save()
|
|
||||||
survexblock.save() # django insists on this , but we want to save at the end !
|
survexblock.save() # django insists on this , but we want to save at the end !
|
||||||
tickle()
|
tickle()
|
||||||
|
|
||||||
@@ -586,6 +633,8 @@ class LoadingSurvex():
|
|||||||
if self.callcount % 500 ==0 :
|
if self.callcount % 500 ==0 :
|
||||||
print("\n", file=sys.stderr,end='')
|
print("\n", file=sys.stderr,end='')
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
if path in self.svxfileslist:
|
if path in self.svxfileslist:
|
||||||
message = " * Warning. Duplicate in *include list at:{} depth:{} file:{}".format(self.callcount, self.depthinclude, path)
|
message = " * Warning. Duplicate in *include list at:{} depth:{} file:{}".format(self.callcount, self.depthinclude, path)
|
||||||
print(message)
|
print(message)
|
||||||
@@ -600,7 +649,7 @@ class LoadingSurvex():
|
|||||||
models.DataIssue.objects.create(parser='survex', message=message)
|
models.DataIssue.objects.create(parser='survex', message=message)
|
||||||
return
|
return
|
||||||
self.svxfileslist.append(path)
|
self.svxfileslist.append(path)
|
||||||
|
|
||||||
svxlines = fin.read().splitlines()
|
svxlines = fin.read().splitlines()
|
||||||
for svxline in svxlines:
|
for svxline in svxlines:
|
||||||
self.lineno += 1
|
self.lineno += 1
|
||||||
@@ -615,9 +664,9 @@ class LoadingSurvex():
|
|||||||
cmd = cmd.lower()
|
cmd = cmd.lower()
|
||||||
if re.match("(?i)include$", cmd):
|
if re.match("(?i)include$", cmd):
|
||||||
includepath = os.path.normpath(os.path.join(os.path.split(path)[0], re.sub(r"\.svx$", "", args)))
|
includepath = os.path.normpath(os.path.join(os.path.split(path)[0], re.sub(r"\.svx$", "", args)))
|
||||||
#path_match = re.search(r"caves-(\d\d\d\d)/(\d+|\d\d\d\d-?\w+-\d+)/", includepath)
|
|
||||||
|
|
||||||
fullpath = os.path.join(settings.SURVEX_DATA, includepath + ".svx")
|
fullpath = os.path.join(settings.SURVEX_DATA, includepath + ".svx")
|
||||||
|
self.RunSurvexIfNeeded(os.path.join(settings.SURVEX_DATA, includepath))
|
||||||
if os.path.isfile(fullpath):
|
if os.path.isfile(fullpath):
|
||||||
#--------------------------------------------------------
|
#--------------------------------------------------------
|
||||||
self.depthinclude += 1
|
self.depthinclude += 1
|
||||||
@@ -676,6 +725,44 @@ class LoadingSurvex():
|
|||||||
flinear.write(" {:2} {} *title {}\n".format(self.depthbegin, depth, args))
|
flinear.write(" {:2} {} *title {}\n".format(self.depthbegin, depth, args))
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
def RunSurvexIfNeeded(self,fullpath):
|
||||||
|
cav_t = 0
|
||||||
|
log_t = 0
|
||||||
|
svx_t = 0
|
||||||
|
now = time.time()
|
||||||
|
|
||||||
|
def runcavern():
|
||||||
|
print(" - Regenerating stale cavern .log and .3d for '{}'\n days old: {:.1f} {:.1f} {:.1f}".
|
||||||
|
format(fullpath, (svx_t - log_t)/(24*3600), (cav_t - log_t)/(24*3600), (now - log_t)/(24*3600)))
|
||||||
|
call([settings.CAVERN, "--log", "--output={}".format(fullpath), "{}.svx".format(fullpath)])
|
||||||
|
|
||||||
|
svxpath = fullpath + ".svx"
|
||||||
|
logpath = fullpath + ".log"
|
||||||
|
|
||||||
|
if not os.path.isfile(logpath):
|
||||||
|
runcavern()
|
||||||
|
return
|
||||||
|
|
||||||
|
if not self.caverndate:
|
||||||
|
completed_process = run(["which", "{}".format(settings.CAVERN)],
|
||||||
|
capture_output=True, check=True, text=True)
|
||||||
|
self.caverndate = os.path.getmtime(completed_process.stdout.strip())
|
||||||
|
cav_t = self.caverndate
|
||||||
|
log_t = os.path.getmtime(logpath)
|
||||||
|
svx_t = os.path.getmtime(svxpath)
|
||||||
|
now = time.time()
|
||||||
|
|
||||||
|
if svx_t - log_t > 0: # stale, older than svx file
|
||||||
|
runcavern()
|
||||||
|
return
|
||||||
|
if now - log_t > 60 *24*60*60: # >60 days, re-run anyway
|
||||||
|
runcavern()
|
||||||
|
return
|
||||||
|
if cav_t - log_t > 0: # new version of cavern
|
||||||
|
runcavern()
|
||||||
|
return
|
||||||
|
if ChaosMonkey(30):
|
||||||
|
runcavern()
|
||||||
|
|
||||||
def FindAndLoadSurvex(survexblockroot):
|
def FindAndLoadSurvex(survexblockroot):
|
||||||
"""Follows the *include links recursively to find files
|
"""Follows the *include links recursively to find files
|
||||||
@@ -685,14 +772,16 @@ def FindAndLoadSurvex(survexblockroot):
|
|||||||
# Redirect sys.stdout to the file
|
# Redirect sys.stdout to the file
|
||||||
sys.stdout = open('svxblks.log', 'w')
|
sys.stdout = open('svxblks.log', 'w')
|
||||||
|
|
||||||
print(' - SCANNING All Survex Blocks...',file=sys.stderr)
|
print(' - SCANNING All Survex Blocks...',file=sys.stderr)
|
||||||
survexfileroot = survexblockroot.survexfile
|
survexfileroot = survexblockroot.survexfile # i.e. SURVEX_TOPNAME only
|
||||||
|
|
||||||
collatefilename = "_" + survexfileroot.path + ".svx"
|
collatefilename = "_" + survexfileroot.path + ".svx"
|
||||||
|
|
||||||
svx_scan = LoadingSurvex()
|
svx_scan = LoadingSurvex()
|
||||||
svx_scan.callcount = 0
|
svx_scan.callcount = 0
|
||||||
svx_scan.depthinclude = 0
|
svx_scan.depthinclude = 0
|
||||||
|
fullpathtotop = os.path.join(survexfileroot.survexdirectory.path, survexfileroot.path)
|
||||||
|
print(" - RunSurvexIfNeeded cavern on '{}'".format(fullpathtotop), file=sys.stderr)
|
||||||
|
svx_scan.RunSurvexIfNeeded(fullpathtotop)
|
||||||
indent=""
|
indent=""
|
||||||
fcollate = open(collatefilename, 'w')
|
fcollate = open(collatefilename, 'w')
|
||||||
|
|
||||||
@@ -728,7 +817,7 @@ def FindAndLoadSurvex(survexblockroot):
|
|||||||
# entrance locations currently loaded after this by LoadPos(), but could better be done before ?
|
# entrance locations currently loaded after this by LoadPos(), but could better be done before ?
|
||||||
# look in MapLocations() for how we find the entrances
|
# look in MapLocations() for how we find the entrances
|
||||||
|
|
||||||
print('\n - Loading All Survex Blocks (LinearLoad)',file=sys.stderr)
|
print('\n - Loading All Survex Blocks (LinearLoad)',file=sys.stderr)
|
||||||
svx_load = LoadingSurvex()
|
svx_load = LoadingSurvex()
|
||||||
|
|
||||||
svx_load.svxdirs[""] = survexfileroot.survexdirectory
|
svx_load.svxdirs[""] = survexfileroot.survexdirectory
|
||||||
@@ -745,11 +834,11 @@ def FindAndLoadSurvex(survexblockroot):
|
|||||||
survexlegsalllength = svx_load.survexlegsalllength
|
survexlegsalllength = svx_load.survexlegsalllength
|
||||||
mem1 = models.get_process_memory()
|
mem1 = models.get_process_memory()
|
||||||
|
|
||||||
print(" - Number of SurvexDirectories: {}".format(len(svx_load.survexdict)))
|
print(" - Number of SurvexDirectories: {}".format(len(svx_load.survexdict)))
|
||||||
tf=0
|
tf=0
|
||||||
for d in svx_load.survexdict:
|
for d in svx_load.survexdict:
|
||||||
tf += len(svx_load.survexdict[d])
|
tf += len(svx_load.survexdict[d])
|
||||||
print(" - Number of SurvexFiles: {}".format(tf))
|
print(" - Number of SurvexFiles: {}".format(tf))
|
||||||
svx_load = None
|
svx_load = None
|
||||||
|
|
||||||
# Close the logging file, Restore sys.stdout to our old saved file handle
|
# Close the logging file, Restore sys.stdout to our old saved file handle
|
||||||
@@ -760,14 +849,15 @@ def FindAndLoadSurvex(survexblockroot):
|
|||||||
return (survexlegsnumber, survexlegsalllength)
|
return (survexlegsnumber, survexlegsalllength)
|
||||||
|
|
||||||
def MakeSurvexFileRoot():
|
def MakeSurvexFileRoot():
|
||||||
survexfileroot = models_survex.SurvexFile(path=settings.SURVEX_TOPNAME, cave=None)
|
"""Returns a file_object.path = SURVEX_TOPNAME associated with directory_object.path = SURVEX_DATA
|
||||||
survexfileroot.save()
|
"""
|
||||||
survexdirectoryroot = models_survex.SurvexDirectory(path=settings.EXPOWEB, cave=None, primarysurvexfile=survexfileroot)
|
fileroot = models_survex.SurvexFile(path=settings.SURVEX_TOPNAME, cave=None)
|
||||||
survexdirectoryroot.save()
|
fileroot.save()
|
||||||
survexfileroot.survexdirectory = survexdirectoryroot
|
directoryroot = models_survex.SurvexDirectory(path=settings.SURVEX_DATA, cave=None, primarysurvexfile=fileroot)
|
||||||
survexfileroot.save() # mutually dependent objects need a double-save like this
|
directoryroot.save()
|
||||||
|
fileroot.survexdirectory = directoryroot # i.e. SURVEX_DATA/SURVEX_TOPNAME
|
||||||
return survexfileroot
|
fileroot.save() # mutually dependent objects need a double-save like this
|
||||||
|
return fileroot
|
||||||
|
|
||||||
def LoadSurvexBlocks():
|
def LoadSurvexBlocks():
|
||||||
|
|
||||||
@@ -781,6 +871,7 @@ def LoadSurvexBlocks():
|
|||||||
models.DataIssue.objects.filter(parser='survex').delete()
|
models.DataIssue.objects.filter(parser='survex').delete()
|
||||||
|
|
||||||
survexfileroot = MakeSurvexFileRoot()
|
survexfileroot = MakeSurvexFileRoot()
|
||||||
|
# this next makes a block_object assciated with a file_object.path = SURVEX_TOPNAME
|
||||||
survexblockroot = models_survex.SurvexBlock(name=ROOTBLOCK, survexpath="", cave=None, survexfile=survexfileroot,
|
survexblockroot = models_survex.SurvexBlock(name=ROOTBLOCK, survexpath="", cave=None, survexfile=survexfileroot,
|
||||||
legsall=0, legssplay=0, legssurfc=0, totalleglength=0.0)
|
legsall=0, legssplay=0, legssurfc=0, totalleglength=0.0)
|
||||||
# this is the first so id=1
|
# this is the first so id=1
|
||||||
@@ -788,7 +879,9 @@ def LoadSurvexBlocks():
|
|||||||
|
|
||||||
print(' - Loading Survex Blocks...')
|
print(' - Loading Survex Blocks...')
|
||||||
memstart = models.get_process_memory()
|
memstart = models.get_process_memory()
|
||||||
|
#----------------------------------------------------------------
|
||||||
survexlegsnumber, survexlegsalllength = FindAndLoadSurvex(survexblockroot)
|
survexlegsnumber, survexlegsalllength = FindAndLoadSurvex(survexblockroot)
|
||||||
|
#----------------------------------------------------------------
|
||||||
memend = models.get_process_memory()
|
memend = models.get_process_memory()
|
||||||
print(" - MEMORY start:{:.3f} MB end:{:.3f} MB increase={:.3f} MB".format(memstart,memend, memend-memstart))
|
print(" - MEMORY start:{:.3f} MB end:{:.3f} MB increase={:.3f} MB".format(memstart,memend, memend-memstart))
|
||||||
|
|
||||||
@@ -800,7 +893,6 @@ def LoadSurvexBlocks():
|
|||||||
print(" - total leg lengths loaded: {}m".format(survexlegsalllength))
|
print(" - total leg lengths loaded: {}m".format(survexlegsalllength))
|
||||||
print(' - Loaded All Survex Blocks.')
|
print(' - Loaded All Survex Blocks.')
|
||||||
|
|
||||||
|
|
||||||
poslineregex = re.compile(r"^\(\s*([+-]?\d*\.\d*),\s*([+-]?\d*\.\d*),\s*([+-]?\d*\.\d*)\s*\)\s*([^\s]+)$")
|
poslineregex = re.compile(r"^\(\s*([+-]?\d*\.\d*),\s*([+-]?\d*\.\d*),\s*([+-]?\d*\.\d*)\s*\)\s*([^\s]+)$")
|
||||||
|
|
||||||
def LoadPos():
|
def LoadPos():
|
||||||
@@ -810,6 +902,14 @@ def LoadPos():
|
|||||||
and if we do, then save the x/y/z coordinates. This gives us coordinates of the entrances.
|
and if we do, then save the x/y/z coordinates. This gives us coordinates of the entrances.
|
||||||
If we don't have it in the database, print an error message and discard it.
|
If we don't have it in the database, print an error message and discard it.
|
||||||
"""
|
"""
|
||||||
|
svx_t = 0
|
||||||
|
d3d_t = 0
|
||||||
|
def runcavern3d():
|
||||||
|
print(" - Regenerating stale (or chaos-monkeyed) cavern .log and .3d for '{}'\n days old: {:.1f} {:.1f} {:.1f}".
|
||||||
|
format(topdata, (svx_t - d3d_t)/(24*3600), (cav_t - d3d_t)/(24*3600), (now - d3d_t)/(24*3600)))
|
||||||
|
call([settings.CAVERN, "--log", "--output={}".format(topdata), "{}.svx".format(topdata)])
|
||||||
|
call([settings.THREEDTOPOS, '{}.3d'.format(topdata)], cwd = settings.SURVEX_DATA)
|
||||||
|
|
||||||
topdata = settings.SURVEX_DATA + settings.SURVEX_TOPNAME
|
topdata = settings.SURVEX_DATA + settings.SURVEX_TOPNAME
|
||||||
print((' - Generating a list of Pos from %s.svx and then loading...' % (topdata)))
|
print((' - Generating a list of Pos from %s.svx and then loading...' % (topdata)))
|
||||||
|
|
||||||
@@ -817,8 +917,28 @@ def LoadPos():
|
|||||||
skip = {}
|
skip = {}
|
||||||
print("\n") # extra line because cavern overwrites the text buffer somehow
|
print("\n") # extra line because cavern overwrites the text buffer somehow
|
||||||
# cavern defaults to using same cwd as supplied input file
|
# cavern defaults to using same cwd as supplied input file
|
||||||
call([settings.CAVERN, "--output=%s.3d" % (topdata), "%s.svx" % (topdata)])
|
|
||||||
call([settings.THREEDTOPOS, '%s.3d' % (topdata)], cwd = settings.SURVEX_DATA)
|
completed_process = run(["which", "{}".format(settings.CAVERN)],
|
||||||
|
capture_output=True, check=True, text=True)
|
||||||
|
cav_t = os.path.getmtime(completed_process.stdout.strip())
|
||||||
|
|
||||||
|
svxpath = topdata + ".svx"
|
||||||
|
d3dpath = topdata + ".3d"
|
||||||
|
|
||||||
|
svx_t = os.path.getmtime(svxpath)
|
||||||
|
|
||||||
|
if os.path.isfile(d3dpath):
|
||||||
|
d3d_t = os.path.getmtime(d3dpath)
|
||||||
|
|
||||||
|
now = time.time()
|
||||||
|
if not os.path.isfile(d3dpath):
|
||||||
|
runcavern3d()
|
||||||
|
elif svx_t - d3d_t > 0: # stale, 3d older than svx file
|
||||||
|
runcavern3d()
|
||||||
|
elif now - d3d_t > 60 *24*60*60: # >60 days old, re-run anyway
|
||||||
|
runcavern3d()
|
||||||
|
elif cav_t - d3d_t > 0: # new version of cavern
|
||||||
|
runcavern3d()
|
||||||
|
|
||||||
mappoints = {}
|
mappoints = {}
|
||||||
for pt in MapLocations().points():
|
for pt in MapLocations().points():
|
||||||
|
|||||||
Reference in New Issue
Block a user