2020-05-28 01:16:45 +01:00
import sys
import os
import re
import time
2020-07-03 14:53:36 +01:00
import copy
2020-06-23 23:34:08 +01:00
2020-05-28 01:16:45 +01:00
from datetime import datetime , timedelta
2020-07-02 16:25:51 +01:00
from subprocess import call , run
2009-05-13 05:39:52 +01:00
2020-02-21 15:57:07 +00:00
from django . utils . timezone import get_current_timezone
from django . utils . timezone import make_aware
2020-05-28 01:16:45 +01:00
import troggle . settings as settings
import troggle . core . models as models
2020-05-28 04:54:53 +01:00
import troggle . core . models_caves as models_caves
import troggle . core . models_survex as models_survex
2020-07-02 16:25:51 +01:00
from troggle . utils import ChaosMonkey
2020-05-28 01:16:45 +01:00
from troggle . parsers . people import GetPersonExpeditionNameLookup
2020-06-28 01:50:34 +01:00
from troggle . parsers . logbooks import GetCaveLookup
2020-05-28 01:16:45 +01:00
from troggle . core . views_caves import MapLocations
2020-06-15 03:28:51 +01:00
survexblockroot = None
2020-06-19 00:26:15 +01:00
ROOTBLOCK = " rootblock "
2020-06-24 14:10:13 +01:00
class SurvexLeg ( ) :
""" No longer a models.Model subclass, so no longer a database table
2020-06-23 23:34:08 +01:00
"""
2020-06-24 14:10:13 +01:00
tape = 0.0
compass = 0.0
clino = 0.0
2020-06-27 18:00:24 +01:00
class LoadingSurvex ( ) :
2020-06-24 14:10:13 +01:00
""" A ' survex block ' is a *begin...*end set of cave data.
A survex file can contain many begin - end blocks , which can be nested , and which can * include
other survex files .
A ' scansfolder ' is what we today call a " survey scans folder " or a " wallet " .
"""
2020-07-06 01:24:43 +01:00
rx_flagsnot = re . compile ( r " not \ s " )
2020-06-24 14:10:13 +01:00
rx_linelen = re . compile ( r " [ \ d \ -+.]+$ " )
rx_team = re . compile ( r " (?i)(Insts|Notes|Tape|Dog|Useless|Pics|Helper|Disto|Consultant) \ s+(.*)$ " )
rx_person = re . compile ( r " (?i) and | / |, | & | \ + |^both$|^none$ " )
2020-06-27 17:55:59 +01:00
rx_qm = re . compile ( r ' (?i)^ \ s*QM( \ d) \ s+?([a-dA-DxX]) \ s+([ \ w \ -]+) \ .( \ d+) \ s+(([ \ w \ -]+) \ .( \ d+)| \ -) \ s+(.+)$ ' )
2020-06-24 14:10:13 +01:00
# remember there is also QM_PATTERN used in views_other and set in settings.py
2020-07-05 17:22:26 +01:00
rx_begin = re . compile ( r ' (?i)begin ' )
rx_end = re . compile ( r ' (?i)end$ ' )
rx_title = re . compile ( r ' (?i)title$ ' )
rx_ref = re . compile ( r ' (?i)ref$ ' )
rx_data = re . compile ( r ' (?i)data$ ' )
rx_flags = re . compile ( r ' (?i)flags$ ' )
2020-06-29 21:16:13 +01:00
rx_cave = re . compile ( r ' (?i)caves-( \ d \ d \ d \ d)/([- \ d \ w]+| \ d \ d \ d \ d-? \ w+- \ d+) ' )
2020-06-27 17:55:59 +01:00
rx_comment = re . compile ( r ' ([^;]*?) \ s*(?:; \ s*(.*))? \ n?$ ' )
2020-06-28 01:50:34 +01:00
rx_comminc = re . compile ( r ' (?i)^ \ *include[ \ s]*([- \ w/]*).*$ ' ) # inserted by linear collate ;*include
rx_commcni = re . compile ( r ' (?i)^ \ *edulcni[ \ s]*([- \ w/]*).*$ ' ) # inserted by linear collate ;*edulcni
2020-06-27 17:55:59 +01:00
rx_include = re . compile ( r ' (?i)^ \ s*( \ *include[ \ s].*)$ ' )
2020-07-05 17:22:26 +01:00
rx_commref = re . compile ( r ' (?i)^ \ s*ref(?:erence)?[ \ s.:]*( \ d+) \ s*# \ s*(X)? \ s*( \ d+) ' )
rx_wallet = re . compile ( r ' (?i)^ \ s*wallet[ \ s.:]*( \ d+) \ s*# \ s*(X)? \ s*( \ d+) ' )
rx_implicit = re . compile ( r ' (?i)^[ \ s.:]*( \ d+) \ s*# \ s*(X)? \ s*( \ d+) ' )
2020-07-04 01:10:17 +01:00
rx_ref_text = re . compile ( r ' (?i)^ \ s* \ " [^ " ]* \ " \ s*$ ' )
2020-06-27 17:55:59 +01:00
rx_star = re . compile ( r ' (?i) \ s* \ *[ \ s,]*( \ w+) \ s*(.*?) \ s*(?:;.*)?$ ' )
2020-06-24 14:10:13 +01:00
rx_starref = re . compile ( r ' (?i)^ \ s* \ *ref[ \ s.:]*((?:19[6789] \ d)|(?:20[0123] \ d)) \ s*#? \ s*(X)? \ s*(.*? \ d+.*?)$ ' )
2020-06-24 22:46:18 +01:00
rx_argsref = re . compile ( r ' (?i)^[ \ s.:]*((?:19[6789] \ d)|(?:20[0123] \ d)) \ s*#? \ s*(X)? \ s*(.*? \ d+.*?)$ ' )
2020-06-24 14:10:13 +01:00
2020-07-02 16:25:51 +01:00
# This interprets the survex "*data normal" command which sets out the order of the fields in the data, e.g.
# *DATA normal from to length gradient bearing ignore ignore ignore ignore
2020-07-03 17:22:15 +01:00
datastardefault = { " type " : " normal " , " from " : 0 , " to " : 1 , " tape " : 2 , " compass " : 3 , " clino " : 4 }
2020-07-04 13:31:46 +01:00
flagsdefault = { " duplicate " : False , " surface " : False , " splay " : False , " skiplegs " : False , " splayalias " : False }
2020-07-02 16:25:51 +01:00
2020-07-03 17:22:15 +01:00
datastar = { }
flagsstar = { }
2020-07-04 01:10:17 +01:00
slength = 0.0
legsnumber = 0
2020-06-24 22:46:18 +01:00
depthbegin = 0
2020-06-27 17:55:59 +01:00
depthinclude = 0
2020-07-04 01:10:17 +01:00
legsnumberstack = [ ]
slengthstack = [ ]
2020-07-06 01:24:43 +01:00
personexpedstack = [ ]
2020-06-27 00:50:40 +01:00
stackbegin = [ ]
2020-07-03 17:22:15 +01:00
flagsstack = [ ]
datastack = [ ]
2020-06-27 17:55:59 +01:00
stackinclude = [ ]
2020-06-28 14:42:26 +01:00
stacksvxfiles = [ ]
2020-06-28 01:50:34 +01:00
svxfileslist = [ ]
svxdirs = { }
2020-07-04 13:31:46 +01:00
expos = { }
2020-07-01 17:41:09 +01:00
survexdict = { } # each key is a directory, and its value is a list of files
2020-06-24 22:46:18 +01:00
lineno = 0
2020-06-24 14:10:13 +01:00
insp = " "
callcount = 0
2020-06-30 15:39:24 +01:00
ignoreprefix = [ " surface " , " kataster " , " fixedpts " , " gpx " ]
ignorenoncave = [ " caves-1623 " , " caves-1623/2007-neu " ]
2020-06-27 17:55:59 +01:00
includedfilename = " "
currentsurvexblock = None
currentsurvexfile = None
currentcave = None
2020-07-02 16:25:51 +01:00
caverndate = None
2020-07-06 01:24:43 +01:00
currentpersonexped = [ ]
2020-06-24 14:10:13 +01:00
def __init__ ( self ) :
2020-06-29 21:16:13 +01:00
self . caveslist = GetCaveLookup ( )
2020-06-24 14:10:13 +01:00
pass
2020-07-05 17:22:26 +01:00
def LoadSurvexFallThrough ( self , survexblock , line , cmd ) :
2020-06-28 14:42:26 +01:00
if cmd == " require " :
2020-06-24 19:07:11 +01:00
pass # should we check survex version available for processing?
2020-07-04 13:31:46 +01:00
elif cmd in [ " equate " , " fix " , " calibrate " , " cs " , " export " , " case " ,
2020-06-24 19:07:11 +01:00
" declination " , " infer " , " instrument " , " sd " , " units " ] :
pass # we ignore all these, which is fine.
else :
2020-07-04 13:31:46 +01:00
if cmd in [ " include " , " data " , " flags " , " title " , " entrance " , " set " , " units " , " alias " , " ref " ] :
2020-06-24 19:07:11 +01:00
message = " ! Unparsed [* {} ]: ' {} ' {} " . format ( cmd , line , survexblock . survexfile . path )
print ( ( self . insp + message ) )
models . DataIssue . objects . create ( parser = ' survex ' , message = message )
else :
message = " ! Bad svx command: [* {} ] {} ( {} ) {} " . format ( cmd , line , survexblock , survexblock . survexfile . path )
print ( ( self . insp + message ) )
models . DataIssue . objects . create ( parser = ' survex ' , message = message )
def LoadSurvexTeam ( self , survexblock , line ) :
teammembers = [ ]
mteammember = self . rx_team . match ( line )
if mteammember :
for tm in self . rx_person . split ( mteammember . group ( 2 ) ) :
if tm :
personexpedition = survexblock . expedition and GetPersonExpeditionNameLookup ( survexblock . expedition ) . get ( tm . lower ( ) )
if ( personexpedition , tm ) not in teammembers :
teammembers . append ( ( personexpedition , tm ) )
personrole = models_survex . SurvexPersonRole ( survexblock = survexblock , nrole = mteammember . group ( 1 ) . lower ( ) , personexpedition = personexpedition , personname = tm )
2020-07-01 17:41:09 +01:00
personrole . save ( )
2020-06-24 19:07:11 +01:00
personrole . expeditionday = survexblock . expeditionday
if personexpedition :
personrole . person = personexpedition . person
2020-07-06 01:24:43 +01:00
self . currentpersonexped . append ( personexpedition )
2020-06-24 19:07:11 +01:00
personrole . save ( )
2020-07-04 13:31:46 +01:00
def LoadSurvexEntrance ( self , survexblock , line ) :
# Not using this yet
pass
def LoadSurvexAlias ( self , survexblock , line ) :
# *alias station - ..
splayalias = re . match ( " (?i)station \ s* \ - \ s* \ . \ . \ s*$ " , line )
if splayalias :
self . flagsstar [ " splayalias " ] = True
else :
message = " ! Bad *ALIAS: ' {} ' ( {} ) {} " . format ( line , survexblock , survexblock . survexfile . path )
print ( ( self . insp + message ) )
models . DataIssue . objects . create ( parser = ' survex ' , message = message )
2020-06-24 17:55:42 +01:00
def LoadSurvexDate ( self , survexblock , line ) :
# we should make this a date range for everything
if len ( line ) == 10 :
2020-07-04 13:31:46 +01:00
year = line [ : 4 ]
# make_aware is a django function, and may not be correct to use it like this anyway! We want Austrian time.
2020-06-24 17:55:42 +01:00
survexblock . date = make_aware ( datetime . strptime ( re . sub ( r " \ . " , " - " , line ) , ' % Y- % m- %d ' ) , get_current_timezone ( ) )
2020-07-04 13:31:46 +01:00
# cacheing to save DB query on every block and to prepare for django-less troggle in future
if year in self . expos :
expo = self . expos [ year ]
else :
expeditions = models . Expedition . objects . filter ( year = year )
2020-06-24 17:55:42 +01:00
assert len ( expeditions ) == 1
2020-07-04 13:31:46 +01:00
expo = expeditions [ 0 ]
self . expos [ year ] = expo
2020-06-24 17:55:42 +01:00
2020-07-04 13:31:46 +01:00
survexblock . expedition = expo
survexblock . expeditionday = survexblock . expedition . get_expedition_day ( survexblock . date )
survexblock . save ( )
def LoadSurvexLeg ( self , survexblock , sline , comment ) :
2020-06-24 14:10:13 +01:00
""" This reads compass, clino and tape data but only keeps the tape lengths,
the rest is discarded after error - checking .
"""
2020-07-04 01:10:17 +01:00
invalid_clino = 180.0
invalid_compass = 720.0
invalid_tape = 0.0
2020-07-04 13:31:46 +01:00
if self . flagsstar [ " skiplegs " ] :
#print("skip in ", self.flagsstar, survexblock.survexfile.path)
return
2020-07-04 01:10:17 +01:00
2020-07-03 17:22:15 +01:00
#print("! LEG datastar type:{}++{}\n{} ".format(self.datastar["type"].upper(), survexblock.survexfile.path, sline))
2020-07-03 14:53:36 +01:00
# SKIP PASSAGES *data passage
2020-07-03 17:22:15 +01:00
if self . datastar [ " type " ] == " passage " :
2020-07-03 14:53:36 +01:00
return
2020-07-03 17:22:15 +01:00
if self . datastar [ " type " ] == " cartesian " :
2020-07-03 14:53:36 +01:00
return
2020-07-03 17:22:15 +01:00
if self . datastar [ " type " ] == " nosurvey " :
2020-07-03 14:53:36 +01:00
return
2020-07-03 17:22:15 +01:00
if self . datastar [ " type " ] == " diving " :
2020-07-03 14:53:36 +01:00
return
2020-07-03 17:22:15 +01:00
if self . datastar [ " type " ] == " cylpolar " :
2020-07-03 14:53:36 +01:00
return
2020-07-04 01:10:17 +01:00
# print(" !! LEG data lineno:{}\n !! sline:'{}'\n !! datastar['tape']: {}".format(self.lineno, sline, self.datastar["tape"]))
2020-07-03 17:22:15 +01:00
if self . datastar [ " type " ] != " normal " :
2020-07-03 14:53:36 +01:00
return
2020-07-03 17:22:15 +01:00
datastar = self . datastar # shallow copy: alias but the things inside are the same things
2020-06-24 14:10:13 +01:00
survexleg = SurvexLeg ( )
2020-07-04 01:10:17 +01:00
2020-06-24 22:46:18 +01:00
ls = sline . lower ( ) . split ( )
2020-07-04 13:31:46 +01:00
# skip all splay legs
2020-07-06 01:24:43 +01:00
if ls [ datastar [ " from " ] ] == " .. " or ls [ datastar [ " from " ] ] == " . " :
2020-07-04 13:31:46 +01:00
#print("Splay in ", survexblock.survexfile.path)
return
2020-07-06 01:24:43 +01:00
if ls [ datastar [ " to " ] ] == " .. " or ls [ datastar [ " to " ] ] == " . " :
2020-07-04 13:31:46 +01:00
#print("Splay in ", survexblock.survexfile.path)
return
if self . flagsstar [ " splayalias " ] :
if ls [ datastar [ " from " ] ] == " - " :
#print("Aliased splay in ", survexblock.survexfile.path)
return
if ls [ datastar [ " to " ] ] == " - " :
#print("Aliased splay in ", survexblock.survexfile.path)
return
2020-07-03 14:53:36 +01:00
try :
2020-07-03 17:22:15 +01:00
tape = ls [ datastar [ " tape " ] ]
2020-07-03 14:53:36 +01:00
except :
2020-07-03 17:22:15 +01:00
print ( ( " ! datastar parsing incorrect " , survexblock . survexfile . path ) )
print ( ( " datastar: " , datastar ) )
2020-07-03 14:53:36 +01:00
print ( ( " Line: " , ls ) )
2020-07-03 17:22:15 +01:00
message = ' ! datastar parsing incorrect in line %s in %s ' % ( ls , survexblock . survexfile . path )
2020-07-03 14:53:36 +01:00
models . DataIssue . objects . create ( parser = ' survexleg ' , message = message )
survexleg . tape = invalid_tape
return
2020-07-04 13:31:46 +01:00
# e.g. '29/09' or '(06.05)' in the tape measurement
2020-07-04 01:10:17 +01:00
# tape = tape.replace("(","") # edited original file (only one) instead
# tape = tape.replace(")","") # edited original file (only one) instead
# tape = tape.replace("/",".") # edited original file (only one) instead.
2020-07-03 14:53:36 +01:00
try :
survexleg . tape = float ( tape )
2020-07-04 01:10:17 +01:00
self . legsnumber + = 1
2020-07-03 14:53:36 +01:00
except ValueError :
print ( ( " ! Tape misread in " , survexblock . survexfile . path ) )
2020-07-03 17:22:15 +01:00
print ( ( " datastar: " , datastar ) )
2020-07-03 14:53:36 +01:00
print ( ( " Line: " , ls ) )
message = ' ! Value Error: Tape misread in line %s in %s ' % ( ls , survexblock . survexfile . path )
models . DataIssue . objects . create ( parser = ' survexleg ' , message = message )
survexleg . tape = invalid_tape
try :
2020-07-04 13:31:46 +01:00
survexblock . legslength + = survexleg . tape
2020-07-04 01:10:17 +01:00
self . slength + = survexleg . tape
2020-07-03 14:53:36 +01:00
except ValueError :
message = ' ! Value Error: Tape length not added %s in %s ' % ( ls , survexblock . survexfile . path )
models . DataIssue . objects . create ( parser = ' survexleg ' , message = message )
try :
2020-07-03 17:22:15 +01:00
lcompass = ls [ datastar [ " compass " ] ]
2020-07-03 14:53:36 +01:00
except :
print ( ( " ! Compass not found in " , survexblock . survexfile . path ) )
2020-07-03 17:22:15 +01:00
print ( ( " datastar: " , datastar ) )
2020-07-03 14:53:36 +01:00
print ( ( " Line: " , ls ) )
message = ' ! Value Error: Compass not found in line %s in %s ' % ( ls , survexblock . survexfile . path )
models . DataIssue . objects . create ( parser = ' survexleg ' , message = message )
lcompass = invalid_compass
try :
2020-07-03 17:22:15 +01:00
lclino = ls [ datastar [ " clino " ] ]
2020-07-03 14:53:36 +01:00
except :
print ( ( " ! Clino misread in " , survexblock . survexfile . path ) )
2020-07-03 17:22:15 +01:00
print ( ( " datastar: " , datastar ) )
2020-07-03 14:53:36 +01:00
print ( ( " Line: " , ls ) )
message = ' ! Value Error: Clino misread in line %s in %s ' % ( ls , survexblock . survexfile . path )
models . DataIssue . objects . create ( parser = ' survexleg ' , message = message )
lclino = invalid_clino
if lclino == " up " :
survexleg . clino = 90.0
lcompass = invalid_compass
elif lclino == " down " :
survexleg . clino = - 90.0
lcompass = invalid_compass
elif lclino == " - " or lclino == " level " :
survexleg . clino = - 90.0
try :
survexleg . compass = float ( lcompass )
except ValueError :
print ( ( " ! Compass misread in " , survexblock . survexfile . path ) )
2020-07-03 17:22:15 +01:00
print ( ( " datastar: " , datastar ) )
2020-07-03 14:53:36 +01:00
print ( ( " Line: " , ls ) )
message = " ! Value Error: lcompass: ' {} ' line {} in ' {} ' " . format ( lcompass ,
ls , survexblock . survexfile . path )
models . DataIssue . objects . create ( parser = ' survexleg ' , message = message )
survexleg . compass = invalid_compass
2020-07-04 01:10:17 +01:00
# delete the object to save memory
2020-07-03 14:53:36 +01:00
survexleg = None
2020-05-13 19:57:07 +01:00
2020-06-24 22:46:18 +01:00
def LoadSurvexRef ( self , survexblock , args ) :
2020-07-05 17:22:26 +01:00
#print(self.insp+ "*REF ---- '"+ args +"'")
2020-07-04 01:10:17 +01:00
2020-06-24 22:46:18 +01:00
# *REF but also ; Ref years from 1960 to 2039
2020-07-04 01:10:17 +01:00
refline = self . rx_ref_text . match ( args )
if refline :
# a textual reference such as "1996-1999 Not-KH survey book pp 92-95"
print ( self . insp + " *REF quoted text so ignored: " + args )
return
2020-06-24 22:46:18 +01:00
if len ( args ) < 4 :
2020-07-04 01:10:17 +01:00
message = " ! Empty or BAD *REF statement ' {} ' in ' {} ' " . format ( args , survexblock . survexfile . path )
2020-06-24 22:46:18 +01:00
print ( ( self . insp + message ) )
models . DataIssue . objects . create ( parser = ' survex ' , message = message )
return
2020-06-24 14:10:13 +01:00
2020-06-24 22:46:18 +01:00
argsgps = self . rx_argsref . match ( args )
if argsgps :
yr , letterx , wallet = argsgps . groups ( )
else :
2020-07-04 01:10:17 +01:00
message = " ! BAD *REF statement ' {} ' in ' {} ' " . format ( args , survexblock . survexfile . path )
print ( self . insp + message )
2020-06-24 22:46:18 +01:00
models . DataIssue . objects . create ( parser = ' survex ' , message = message )
return
2020-06-24 14:10:13 +01:00
2020-06-24 14:49:39 +01:00
if not letterx :
letterx = " "
else :
letterx = " X "
if len ( wallet ) < 2 :
wallet = " 0 " + wallet
assert ( int ( yr ) > 1960 and int ( yr ) < 2039 ) , " Wallet year out of bounds: %s " % yr
refscan = " %s # %s %s " % ( yr , letterx , wallet )
2020-06-24 22:46:18 +01:00
try :
if int ( wallet ) > 100 :
2020-06-25 02:10:20 +01:00
message = " ! Wallet *REF {} - too big in ' {} ' " . format ( refscan , survexblock . survexfile . path )
2020-06-24 22:46:18 +01:00
print ( ( self . insp + message ) )
models . DataIssue . objects . create ( parser = ' survex ' , message = message )
except :
2020-06-25 02:10:20 +01:00
message = " ! Wallet *REF {} - not numeric in ' {} ' " . format ( refscan , survexblock . survexfile . path )
2020-06-24 22:46:18 +01:00
print ( ( self . insp + message ) )
models . DataIssue . objects . create ( parser = ' survex ' , message = message )
2020-06-24 14:49:39 +01:00
manyscansfolders = models_survex . ScansFolder . objects . filter ( walletname = refscan )
if manyscansfolders :
survexblock . scansfolder = manyscansfolders [ 0 ]
survexblock . save ( )
if len ( manyscansfolders ) > 1 :
2020-06-25 02:10:20 +01:00
message = " ! Wallet *REF {} - {} scan folders from DB search in {} " . format ( refscan , len ( manyscansfolders ) , survexblock . survexfile . path )
2020-06-24 17:55:42 +01:00
print ( ( self . insp + message ) )
2020-06-24 14:49:39 +01:00
models . DataIssue . objects . create ( parser = ' survex ' , message = message )
else :
2020-06-25 02:10:20 +01:00
message = " ! Wallet *REF ' {} ' - NOT found in DB search ' {} ' " . format ( refscan , survexblock . survexfile . path )
2020-06-24 17:55:42 +01:00
print ( ( self . insp + message ) )
2020-06-24 14:49:39 +01:00
models . DataIssue . objects . create ( parser = ' survex ' , message = message )
2020-06-24 22:46:18 +01:00
def LoadSurvexQM ( self , survexblock , qmline ) :
insp = self . insp
2020-06-24 14:49:39 +01:00
qm_no = qmline . group ( 1 )
qm_grade = qmline . group ( 2 )
2020-06-25 03:17:56 +01:00
if qmline . group ( 3 ) : # usual closest survey station
qm_nearest = qmline . group ( 3 )
if qmline . group ( 4 ) :
qm_nearest = qm_nearest + " . " + qmline . group ( 4 )
if qmline . group ( 6 ) and qmline . group ( 6 ) != ' - ' :
qm_resolve_station = qmline . group ( 6 )
if qmline . group ( 7 ) :
qm_resolve_station = qm_resolve_station + " . " + qmline . group ( 7 )
else :
qm_resolve_station = " "
2020-06-24 14:49:39 +01:00
qm_notes = qmline . group ( 8 )
2020-06-25 03:17:56 +01:00
# Spec of QM in SVX files:
2020-06-24 14:49:39 +01:00
# ;Serial number grade(A/B/C/D/X) nearest-station resolution-station description
# ;QM1 a hobnob_hallway_2.42 hobnob-hallway_3.42 junction of keyhole passage
# ;QM1 a hobnob_hallway_2.42 - junction of keyhole passage
2020-06-25 03:17:56 +01:00
# NB none of the SurveyStations are in the DB now, so if we want to link to aSurvexStation
# we would have to create one. But that is not obligatory and no QMs loaded from CSVs have one
try :
qm = models_caves . QM . objects . create ( number = qm_no ,
# nearest_station=a_survex_station_object, # can be null
nearest_station_description = qm_resolve_station ,
nearest_station_name = qm_nearest ,
grade = qm_grade . upper ( ) ,
location_description = qm_notes )
qm . save
# message = " ! QM{} '{}' CREATED in DB in '{}'".format(qm_no, qm_nearest,survexblock.survexfile.path)
# print(insp+message)
# models.DataIssue.objects.create(parser='survex', message=message)
except :
message = " ! QM {} FAIL to create {} in ' {} ' " . format ( qm_no , qm_nearest , survexblock . survexfile . path )
2020-06-24 17:55:42 +01:00
print ( insp + message )
models . DataIssue . objects . create ( parser = ' survex ' , message = message )
2020-06-25 03:17:56 +01:00
2020-06-27 19:00:26 +01:00
def LoadSurvexDataCmd ( self , survexblock , args ) :
2020-07-02 16:25:51 +01:00
""" Sets the order for data elements in this and following blocks, e.g.
* data normal from to compass clino tape
* data normal from to tape compass clino
We are only collecting length data so we are disinterested in from , to , LRUD etc .
"""
2020-07-03 17:22:15 +01:00
# datastardefault = { # included here as reference to help understand the code
2020-07-02 16:25:51 +01:00
# "type":"normal",
# "t":"leg",
# "from":0,
# "to":1,
# "tape":2,
# "compass":3,
# "clino":4}
2020-07-03 17:22:15 +01:00
datastar = copy . deepcopy ( self . datastardefault )
2020-07-02 16:25:51 +01:00
if args == " " :
# naked '*data' which is relevant only for passages. Ignore. Continue with previous settings.
return
2020-07-03 14:53:36 +01:00
# DEFAULT | NORMAL | CARTESIAN| NOSURVEY |PASSAGE | TOPOFIL | CYLPOLAR | DIVING
2020-07-02 16:25:51 +01:00
ls = args . lower ( ) . split ( )
2020-07-03 14:53:36 +01:00
if ls [ 0 ] == " default " :
2020-07-03 17:22:15 +01:00
self . datastar = copy . deepcopy ( self . datastardefault )
2020-07-03 14:53:36 +01:00
elif ls [ 0 ] == " normal " or ls [ 0 ] == " topofil " :
2020-07-03 17:22:15 +01:00
if not ( " from " in datastar and " to " in datastar ) :
2020-07-02 16:25:51 +01:00
message = " ! - Unrecognised *data normal statement ' {} ' {} | {} " . format ( args , survexblock . name , survexblock . survexpath )
print ( message )
print ( message , file = sys . stderr )
models . DataIssue . objects . create ( parser = ' survex ' , message = message )
return
else :
2020-07-03 17:22:15 +01:00
datastar = self . datastardefault
2020-07-02 16:25:51 +01:00
# ls = ["normal", "from", "to", "tape", "compass", "clino" ]
for i in range ( 1 , len ( ls ) ) : # len[0] is "normal"
if ls [ i ] in [ " bearing " , " compass " ] :
2020-07-03 17:22:15 +01:00
datastar [ " compass " ] = i - 1
2020-07-02 16:25:51 +01:00
if ls [ i ] in [ " clino " , " gradient " ] :
2020-07-03 17:22:15 +01:00
datastar [ " clino " ] = i - 1
2020-07-02 16:25:51 +01:00
if ls [ i ] in [ " tape " , " length " ] :
2020-07-03 17:22:15 +01:00
datastar [ " tape " ] = i - 1
self . datastar = copy . deepcopy ( datastar )
2020-07-02 16:25:51 +01:00
return
2020-07-03 14:53:36 +01:00
elif ls [ 0 ] == " cartesian " or ls [ 0 ] == " nosurvey " or ls [ 0 ] == " diving " or ls [ 0 ] == " cylpolar " or ls [ 0 ] == " passage " :
2020-07-04 01:10:17 +01:00
# message = " ! - *data {} blocks ignored. {}|{}" '{}' .format(ls[0].upper(), survexblock.name, survexblock.survexpath, args)
# print(message)
# print(message,file=sys.stderr)
# models.DataIssue.objects.create(parser='survex', message=message)
2020-07-03 17:22:15 +01:00
self . datastar [ " type " ] = ls [ 0 ]
2020-06-27 17:55:59 +01:00
else :
2020-07-03 14:53:36 +01:00
message = " ! - Unrecognised *data statement ' {} ' {} | {} " . format ( args , survexblock . name , survexblock . survexpath )
2020-07-02 16:25:51 +01:00
print ( message )
print ( message , file = sys . stderr )
models . DataIssue . objects . create ( parser = ' survex ' , message = message )
2020-06-27 17:55:59 +01:00
2020-07-03 14:53:36 +01:00
def LoadSurvexFlags ( self , args ) :
# Valid flags are DUPLICATE, SPLAY, and SURFACE, and a flag may be preceded with NOT to turn it off.
# Default values are NOT any of them
2020-07-03 17:22:15 +01:00
self . flagsstar = copy . deepcopy ( self . flagsdefault )
2020-07-03 14:53:36 +01:00
flags = [ ]
2020-07-06 01:24:43 +01:00
args = self . rx_flagsnot . sub ( " not " , args )
2020-07-03 14:53:36 +01:00
argslist = args . split ( )
for s in argslist :
flags . append ( s )
2020-07-06 01:24:43 +01:00
print ( " # flagslist: {} " . format ( flags ) , )
2020-07-03 14:53:36 +01:00
if " duplicate " in flags :
2020-07-03 17:22:15 +01:00
self . flagsstar [ " duplicate " ] = True
2020-07-03 14:53:36 +01:00
if " surface " in flags :
2020-07-03 17:22:15 +01:00
self . flagsstar [ " surface " ] = True
2020-07-03 14:53:36 +01:00
if " splay " in flags :
2020-07-03 17:22:15 +01:00
self . flagsstar [ " splay " ] = True
2020-07-03 14:53:36 +01:00
if " notduplicate " in flags :
2020-07-03 17:22:15 +01:00
self . flagsstar [ " duplicate " ] = False
2020-07-03 14:53:36 +01:00
if " notsurface " in flags :
2020-07-03 17:22:15 +01:00
self . flagsstar [ " surface " ] = False
2020-07-03 14:53:36 +01:00
if " notsplay " in flags :
2020-07-03 17:22:15 +01:00
self . flagsstar [ " splay " ] = False
2020-07-03 14:53:36 +01:00
2020-07-03 17:22:15 +01:00
# if self.flagsstar["duplicate"] == True or self.flagsstar["surface"] == True or self.flagsstar["splay"] == True:
2020-07-03 14:53:36 +01:00
# actually we do want to count duplicates as this is for "effort expended in surveying underground"
2020-07-03 17:22:15 +01:00
if self . flagsstar [ " surface " ] == True or self . flagsstar [ " splay " ] == True :
2020-07-04 01:10:17 +01:00
self . flagsstar [ " skiplegs " ] = True
2020-06-27 17:55:59 +01:00
def IdentifyCave ( self , cavepath ) :
2020-06-29 21:16:13 +01:00
if cavepath . lower ( ) in self . caveslist :
return self . caveslist [ cavepath . lower ( ) ]
2020-06-30 15:39:24 +01:00
# TO DO - some of this is already done in generating self.caveslist so simplify this
# esp. as it is in a loop.
2020-06-28 01:50:34 +01:00
path_match = self . rx_cave . search ( cavepath )
2020-06-27 17:55:59 +01:00
if path_match :
2020-06-28 14:42:26 +01:00
sluggy = ' {} - {} ' . format ( path_match . group ( 1 ) , path_match . group ( 2 ) )
2020-06-29 21:16:13 +01:00
guesses = [ sluggy . lower ( ) , path_match . group ( 2 ) . lower ( ) ]
for g in guesses :
if g in self . caveslist :
self . caveslist [ cavepath ] = self . caveslist [ g ]
return self . caveslist [ g ]
print ( ' ! Failed to find cave for {} ' . format ( cavepath . lower ( ) ) )
2020-06-27 17:55:59 +01:00
else :
2020-07-01 22:49:38 +01:00
# not a cave, but that is fine.
# print(' ! No regex(standard identifier) cave match for %s' % cavepath.lower())
2020-06-27 17:55:59 +01:00
return None
2020-06-29 21:16:13 +01:00
def GetSurvexDirectory ( self , headpath ) :
2020-07-01 17:41:09 +01:00
""" This creates a SurvexDirectory if it has not been seen before, and on creation
it sets the primarysurvexfile . This is correct as it should be set on the first file
in the directory , where first is defined by the * include ordering . Which is what we
are doing .
"""
2020-06-29 21:16:13 +01:00
if not headpath :
return self . svxdirs [ " " ]
if headpath . lower ( ) not in self . svxdirs :
2020-07-01 17:41:09 +01:00
self . svxdirs [ headpath . lower ( ) ] = models_survex . SurvexDirectory ( path = headpath , primarysurvexfile = self . currentsurvexfile )
self . svxdirs [ headpath . lower ( ) ] . save ( )
2020-07-03 17:22:15 +01:00
self . survexdict [ self . svxdirs [ headpath . lower ( ) ] ] = [ ] # list of the files in the directory
2020-06-29 21:16:13 +01:00
return self . svxdirs [ headpath . lower ( ) ]
2020-06-30 15:39:24 +01:00
def ReportNonCaveIncludes ( self , headpath , includelabel ) :
""" Ignore surface, kataser and gps *include survex files
"""
if headpath in self . ignorenoncave :
return
for i in self . ignoreprefix :
if headpath . startswith ( i ) :
return
2020-07-01 17:41:09 +01:00
message = " ! {} is not a cave. (while creating ' {} ' sfile & sdirectory) " . format ( headpath , includelabel )
print ( " \n " + message )
print ( " \n " + message , file = sys . stderr )
2020-06-30 15:39:24 +01:00
models . DataIssue . objects . create ( parser = ' survex ' , message = message )
2020-07-04 01:10:17 +01:00
2020-07-01 22:49:38 +01:00
def LoadSurvexFile ( self , svxid ) :
2020-06-28 14:42:26 +01:00
""" Creates SurvexFile in the database, and SurvexDirectory if needed
2020-06-27 17:55:59 +01:00
with links to ' cave '
2020-07-01 22:49:38 +01:00
Creates a new current survexfile and valid . survexdirectory
2020-06-28 14:42:26 +01:00
The survexblock passed - in is not necessarily the parent . FIX THIS .
2020-06-27 17:55:59 +01:00
"""
2020-07-04 01:10:17 +01:00
# print(" # datastack in LoadSurvexFile:{} 'type':".format(svxid), end="")
# for dict in self.datastack:
# print("'{}' ".format(dict["type"].upper()), end="")
# print("")
2020-07-03 17:22:15 +01:00
2020-07-02 16:25:51 +01:00
2020-06-28 01:50:34 +01:00
depth = " " * self . depthbegin
2020-07-05 17:22:26 +01:00
# print("{:2}{} - NEW survexfile:'{}'".format(self.depthbegin, depth, svxid))
2020-07-01 22:49:38 +01:00
headpath = os . path . dirname ( svxid )
2020-06-27 17:55:59 +01:00
2020-07-01 22:49:38 +01:00
newfile = models_survex . SurvexFile ( path = svxid )
2020-07-01 17:41:09 +01:00
newfile . save ( ) # until we do this there is no internal id so no foreign key works
self . currentsurvexfile = newfile
2020-06-30 15:39:24 +01:00
newdirectory = self . GetSurvexDirectory ( headpath )
2020-07-01 17:41:09 +01:00
newdirectory . save ( )
newfile . survexdirectory = newdirectory
2020-07-03 17:22:15 +01:00
self . survexdict [ newdirectory ] . append ( newfile )
2020-07-01 17:41:09 +01:00
cave = self . IdentifyCave ( headpath ) # cave already exists in db
2020-06-30 15:39:24 +01:00
if not newdirectory :
message = " ! ' None ' SurvexDirectory returned from GetSurvexDirectory( {} ) " . format ( headpath )
print ( message )
print ( message , file = sys . stderr )
models . DataIssue . objects . create ( parser = ' survex ' , message = message )
2020-06-29 21:16:13 +01:00
2020-06-28 01:50:34 +01:00
if cave :
2020-06-30 15:39:24 +01:00
newdirectory . cave = cave
newfile . cave = cave
2020-07-01 17:41:09 +01:00
#print("\n"+str(newdirectory.cave),file=sys.stderr)
2020-06-30 15:39:24 +01:00
else :
2020-07-01 22:49:38 +01:00
self . ReportNonCaveIncludes ( headpath , svxid )
2020-06-30 15:39:24 +01:00
if not newfile . survexdirectory :
2020-07-01 22:49:38 +01:00
message = " ! SurvexDirectory NOT SET in new SurvexFile {} " . format ( svxid )
2020-06-30 15:39:24 +01:00
print ( message )
print ( message , file = sys . stderr )
models . DataIssue . objects . create ( parser = ' survex ' , message = message )
2020-06-28 14:42:26 +01:00
self . currentsurvexfile . save ( ) # django insists on this although it is already saved !?
try :
2020-06-30 15:39:24 +01:00
newdirectory . save ( )
2020-06-28 14:42:26 +01:00
except :
2020-06-30 15:39:24 +01:00
print ( newdirectory , file = sys . stderr )
print ( newdirectory . primarysurvexfile , file = sys . stderr )
2020-06-28 14:42:26 +01:00
raise
2020-07-03 17:22:15 +01:00
2020-07-04 01:10:17 +01:00
# print(" # datastack end LoadSurvexFile:{} 'type':".format(svxid), end="")
# for dict in self.datastack:
# print("'{}' ".format(dict["type"].upper()), end="")
# print("")
pass
2020-06-28 01:50:34 +01:00
2020-06-28 14:42:26 +01:00
def ProcessIncludeLine ( self , included ) :
2020-06-28 01:50:34 +01:00
svxid = included . groups ( ) [ 0 ]
2020-06-28 14:42:26 +01:00
self . LoadSurvexFile ( svxid )
self . stacksvxfiles . append ( self . currentsurvexfile )
def ProcessEdulcniLine ( self , edulcni ) :
""" Saves the current survexfile in the db
"""
2020-06-28 01:50:34 +01:00
svxid = edulcni . groups ( ) [ 0 ]
2020-06-28 14:42:26 +01:00
#depth = " " * self.depthbegin
#print("{:2}{} - Edulcni survexfile:'{}'".format(self.depthbegin, depth, svxid))
self . currentsurvexfile . save ( )
self . currentsurvexfile = self . stacksvxfiles . pop ( )
2020-06-24 14:49:39 +01:00
2020-06-24 22:46:18 +01:00
def LoadSurvexComment ( self , survexblock , comment ) :
2020-07-05 17:22:26 +01:00
# ignore all comments except ;ref, ; wallet and ;QM and ;*include (for collated survex file)
refline = self . rx_commref . match ( comment )
2020-06-24 22:46:18 +01:00
if refline :
2020-06-27 19:00:26 +01:00
comment = re . sub ( ' (?i) \ s*ref[.;]? ' , " " , comment . strip ( ) )
2020-06-24 22:46:18 +01:00
self . LoadSurvexRef ( survexblock , comment )
2020-07-05 17:22:26 +01:00
walletline = self . rx_wallet . match ( comment )
if walletline :
comment = re . sub ( ' (?i) \ s*wallet[.;]? ' , " " , comment . strip ( ) )
self . LoadSurvexRef ( survexblock , comment )
implicitline = self . rx_implicit . match ( comment )
if implicitline :
self . LoadSurvexRef ( survexblock , comment )
2020-06-24 14:10:13 +01:00
2020-06-24 22:46:18 +01:00
qmline = self . rx_qm . match ( comment )
if qmline :
self . LoadSurvexQM ( survexblock , qmline )
2020-06-27 17:55:59 +01:00
included = self . rx_comminc . match ( comment )
2020-07-01 22:49:38 +01:00
# ;*include means 'we have been included'; whereas *include means 'proceed to include'
2020-06-27 17:55:59 +01:00
if included :
2020-06-28 14:42:26 +01:00
self . ProcessIncludeLine ( included )
2020-06-27 17:55:59 +01:00
edulcni = self . rx_commcni . match ( comment )
2020-06-28 01:50:34 +01:00
# ;*edulcni means we are returning from an included file
2020-06-27 17:55:59 +01:00
if edulcni :
2020-06-28 14:42:26 +01:00
self . ProcessEdulcniLine ( edulcni )
2020-06-24 22:46:18 +01:00
def LoadSurvexSetup ( self , survexblock , survexfile ) :
self . depthbegin = 0
2020-07-03 17:22:15 +01:00
self . datastar = self . datastardefault
2020-07-04 01:10:17 +01:00
blocklegs = self . legsnumber
2020-06-24 22:46:18 +01:00
print ( self . insp + " - MEM: {:.3f} Reading. parent: {} <> {} " . format ( models . get_process_memory ( ) , survexblock . survexfile . path , survexfile . path ) )
self . lineno = 0
2020-06-24 14:10:13 +01:00
sys . stderr . flush ( ) ;
self . callcount + = 1
2020-06-27 00:50:40 +01:00
if self . callcount % 10 == 0 :
2020-06-24 14:10:13 +01:00
print ( " . " , file = sys . stderr , end = ' ' )
2020-06-27 00:50:40 +01:00
if self . callcount % 500 == 0 :
print ( " \n " , file = sys . stderr , end = ' ' )
2020-06-24 14:10:13 +01:00
# Try to find the cave in the DB if not use the string as before
path_match = re . search ( r " caves-( \ d \ d \ d \ d)/( \ d+| \ d \ d \ d \ d-? \ w+- \ d+)/ " , survexblock . survexfile . path )
if path_match :
pos_cave = ' %s - %s ' % ( path_match . group ( 1 ) , path_match . group ( 2 ) )
cave = models_caves . getCaveByReference ( pos_cave )
if cave :
survexfile . cave = cave
2020-06-24 22:46:18 +01:00
2020-07-01 17:41:09 +01:00
def LinearLoad ( self , survexblock , path , svxlines ) :
2020-06-27 17:55:59 +01:00
""" Loads a single survex file. Usually used to import all the survex files which have been collated
2020-07-01 22:49:38 +01:00
into a single file . Loads the begin / end blocks using a stack for labels .
2020-06-27 17:55:59 +01:00
"""
2020-07-04 01:10:17 +01:00
blkid = None
pathlist = None
args = None
oldflags = None
blockcount = 0
self . lineno = 0
slengthtotal = 0.0
nlegstotal = 0
2020-06-27 17:55:59 +01:00
self . relativefilename = path
cave = self . IdentifyCave ( path ) # this will produce null for survex files which are geographic collections
2020-06-28 14:42:26 +01:00
self . currentsurvexfile = survexblock . survexfile
self . currentsurvexfile . save ( ) # django insists on this although it is already saved !?
2020-07-03 14:53:36 +01:00
2020-07-03 17:22:15 +01:00
self . datastar = copy . deepcopy ( self . datastardefault )
self . flagsstar = copy . deepcopy ( self . flagsdefault )
2020-07-03 18:08:59 +01:00
2020-06-28 14:42:26 +01:00
def tickle ( ) :
nonlocal blockcount
2020-07-03 18:08:59 +01:00
2020-06-28 14:42:26 +01:00
blockcount + = 1
if blockcount % 10 == 0 :
print ( " . " , file = sys . stderr , end = ' ' )
2020-07-01 22:49:38 +01:00
if blockcount % 200 == 0 :
2020-06-28 14:42:26 +01:00
print ( " \n " , file = sys . stderr , end = ' ' )
2020-07-01 22:49:38 +01:00
print ( " - MEM: {:7.3f} MB in use " . format ( models . get_process_memory ( ) ) , file = sys . stderr )
2020-07-03 14:53:36 +01:00
print ( " " , file = sys . stderr , end = ' ' )
2020-07-01 22:49:38 +01:00
sys . stderr . flush ( )
2020-06-28 14:42:26 +01:00
2020-07-06 01:24:43 +01:00
def addpersonlengths ( ) :
for personexpedition in self . currentpersonexped :
personexpedition . legslength + = self . slength
2020-07-03 18:08:59 +01:00
def printbegin ( ) :
nonlocal blkid
nonlocal pathlist
depth = " " * self . depthbegin
2020-07-04 01:10:17 +01:00
self . insp = depth
2020-07-05 17:22:26 +01:00
#print("{:2}{} - Begin for :'{}'".format(self.depthbegin,depth, blkid))
2020-07-03 18:08:59 +01:00
pathlist = " "
for id in self . stackbegin :
if len ( id ) > 0 :
pathlist + = " . " + id
def printend ( ) :
nonlocal args
depth = " " * self . depthbegin
2020-07-05 17:22:26 +01:00
#print("{:2}{} - End from:'{}'".format(self.depthbegin,depth,args))
#print("{:2}{} - LEGS: {} (n: {}, length:{})".format(self.depthbegin,
# depth, self.slength, self.slength, self.legsnumber))
2020-07-03 18:08:59 +01:00
def pushblock ( ) :
nonlocal blkid
2020-07-04 01:10:17 +01:00
# print(" # datastack at 1 *begin {} 'type':".format(blkid), end="")
# for dict in self.datastack:
# print("'{}' ".format(dict["type"].upper()), end="")
# print("")
# print("'{}' self.datastar ".format(self.datastar["type"].upper()))
2020-07-03 18:08:59 +01:00
# ------------ * DATA
self . datastack . append ( copy . deepcopy ( self . datastar ) )
# ------------ * DATA
2020-07-04 01:10:17 +01:00
# print(" # datastack at 2 *begin {} 'type':".format(blkid), end="")
# for dict in self.datastack:
# print("'{}' ".format(dict["type"].upper()), end="")
# print("")
# print("'{}' self.datastar ".format(self.datastar["type"].upper()))
2020-07-06 01:24:43 +01:00
2020-07-03 18:08:59 +01:00
# ------------ * FLAGS
self . flagsstack . append ( copy . deepcopy ( self . flagsstar ) )
# ------------ * FLAGS
2020-07-04 13:31:46 +01:00
pass
2020-07-03 18:08:59 +01:00
def popblock ( ) :
nonlocal blkid
nonlocal oldflags
2020-07-04 01:10:17 +01:00
# print(" # datastack at *end '{} 'type':".format(blkid), end="")
# for dict in self.datastack:
# print("'{}' ".format(dict["type"].upper()), end="")
# print("")
# print("'{}' self.datastar ".format(self.datastar["type"].upper()))
2020-07-03 18:08:59 +01:00
# ------------ * DATA
self . datastar = copy . deepcopy ( self . datastack . pop ( ) )
# ------------ * DATA
2020-07-04 01:10:17 +01:00
# print(" # datastack after *end '{} 'type':".format(blkid), end="")
# for dict in self.datastack:
# print("'{}' ".format(dict["type"].upper()), end="")
# print("")
# print("'{}' self.datastar ".format(self.datastar["type"].upper()))
2020-07-06 01:24:43 +01:00
2020-07-03 18:08:59 +01:00
# ------------ * FLAGS
self . flagsstar = copy . deepcopy ( self . flagsstack . pop ( ) )
# ------------ * FLAGS
2020-07-06 01:24:43 +01:00
if oldflags [ " skiplegs " ] != self . flagsstar [ " skiplegs " ] :
print ( " # POP ' any ' flag now: ' {} ' was: {} " . format ( self . flagsstar [ " skiplegs " ] , oldflags [ " skiplegs " ] ) )
2020-07-03 18:08:59 +01:00
2020-07-04 13:31:46 +01:00
def starstatement ( star ) :
2020-07-03 18:08:59 +01:00
nonlocal survexblock
nonlocal blkid
nonlocal pathlist
nonlocal args
nonlocal oldflags
2020-07-04 01:10:17 +01:00
nonlocal slengthtotal
nonlocal nlegstotal
2020-07-03 18:08:59 +01:00
2020-07-04 13:31:46 +01:00
cmd , args = star . groups ( )
2020-07-03 18:08:59 +01:00
cmd = cmd . lower ( )
# ------------------------BEGIN
2020-07-05 17:22:26 +01:00
if self . rx_begin . match ( cmd ) :
2020-07-03 18:08:59 +01:00
blkid = args . lower ( )
# PUSH state ++++++++++++++
self . stackbegin . append ( blkid )
2020-07-04 01:10:17 +01:00
self . legsnumberstack . append ( self . legsnumber )
self . slengthstack . append ( self . slength )
2020-07-06 01:24:43 +01:00
self . personexpedstack . append ( self . currentpersonexped )
2020-07-03 18:08:59 +01:00
pushblock ( )
# PUSH state ++++++++++++++
2020-07-04 01:10:17 +01:00
self . legsnumber = 0
self . slength = 0.0
2020-07-06 01:24:43 +01:00
self . currentpersonexped = [ ]
2020-07-03 18:08:59 +01:00
printbegin ( )
newsurvexblock = models_survex . SurvexBlock ( name = blkid , parent = survexblock ,
survexpath = pathlist ,
cave = self . currentcave , survexfile = self . currentsurvexfile ,
2020-07-04 13:31:46 +01:00
legsall = 0 , legslength = 0.0 )
2020-07-03 18:08:59 +01:00
newsurvexblock . save ( )
2020-07-05 17:22:26 +01:00
newsurvexblock . title = " ( " + survexblock . title + " ) " # copy parent inititally, overwrite if it has its own
2020-07-03 18:08:59 +01:00
survexblock = newsurvexblock
survexblock . save ( ) # django insists on this , but we want to save at the end !
tickle ( )
# ---------------------------END
2020-07-05 17:22:26 +01:00
elif self . rx_end . match ( cmd ) :
2020-07-04 01:10:17 +01:00
survexblock . legsall = self . legsnumber
2020-07-04 13:31:46 +01:00
survexblock . legslength = self . slength
2020-07-06 01:24:43 +01:00
addpersonlengths ( )
2020-07-03 18:08:59 +01:00
printend ( )
2020-07-04 01:10:17 +01:00
slengthtotal + = self . slength
nlegstotal + = self . legsnumber
2020-07-06 01:24:43 +01:00
2020-07-03 18:08:59 +01:00
try :
survexblock . parent . save ( ) # django insists on this although it is already saved !?
except :
print ( survexblock . parent , file = sys . stderr )
raise
try :
survexblock . save ( ) # save to db at end of block
except :
print ( survexblock , file = sys . stderr )
raise
# POP state ++++++++++++++
popblock ( )
2020-07-06 01:24:43 +01:00
self . currentpersonexped = self . personexpedstack . pop ( )
2020-07-04 01:10:17 +01:00
self . legsnumber = self . legsnumberstack . pop ( )
self . slength = self . slengthstack . pop ( )
2020-07-03 18:08:59 +01:00
blkid = self . stackbegin . pop ( )
self . currentsurvexblock = survexblock . parent
survexblock = survexblock . parent
oldflags = self . flagsstar
self . depthbegin - = 1
2020-07-04 01:10:17 +01:00
# POP state ++++++++++++++
2020-07-03 18:08:59 +01:00
# -----------------------------
2020-07-05 17:22:26 +01:00
elif self . rx_title . match ( cmd ) :
quotedtitle = re . match ( " (?i)^ \" (.*) \" $ " , args )
if quotedtitle :
survexblock . title = quotedtitle . groups ( ) [ 0 ]
else :
survexblock . title = args
elif self . rx_ref . match ( cmd ) :
2020-07-03 18:08:59 +01:00
self . LoadSurvexRef ( survexblock , args )
2020-07-05 17:22:26 +01:00
elif self . rx_flags . match ( cmd ) :
2020-07-03 18:08:59 +01:00
oldflags = self . flagsstar
self . LoadSurvexFlags ( args )
2020-07-05 17:22:26 +01:00
# if oldflags["skiplegs"] != self.flagsstar["skiplegs"]:
# print(" # CHANGE 'any' flag now:'{}' was:{} ".format(self.flagsstar["skiplegs"], oldflags["skiplegs"]))
2020-07-03 18:08:59 +01:00
2020-07-05 17:22:26 +01:00
elif self . rx_data . match ( cmd ) :
2020-07-03 18:08:59 +01:00
self . LoadSurvexDataCmd ( survexblock , args )
2020-07-04 13:31:46 +01:00
elif re . match ( " (?i)alias$ " , cmd ) :
self . LoadSurvexAlias ( survexblock , args )
elif re . match ( " (?i)entrance$ " , cmd ) :
self . LoadSurvexEntrance ( survexblock , args )
2020-07-03 18:08:59 +01:00
elif re . match ( " (?i)date$ " , cmd ) :
self . LoadSurvexDate ( survexblock , args )
elif re . match ( " (?i)team$ " , cmd ) :
self . LoadSurvexTeam ( survexblock , args )
elif re . match ( " (?i)set$ " , cmd ) and re . match ( " (?i)names " , args ) :
pass
elif re . match ( " (?i)include$ " , cmd ) :
message = " ! -ERROR *include command not expected here {} . Re-run a full Survex import. " . format ( path )
print ( message )
print ( message , file = sys . stderr )
models . DataIssue . objects . create ( parser = ' survex ' , message = message )
else :
2020-07-05 17:22:26 +01:00
self . LoadSurvexFallThrough ( survexblock , args , cmd )
2020-07-03 18:08:59 +01:00
2020-06-27 17:55:59 +01:00
for svxline in svxlines :
2020-07-03 14:53:36 +01:00
self . lineno + = 1
2020-07-01 22:49:38 +01:00
sline , comment = self . rx_comment . match ( svxline ) . groups ( )
2020-06-27 17:55:59 +01:00
if comment :
2020-07-03 14:53:36 +01:00
# this catches the ;*include NEWFILE and ;*edulcni ENDOFFILE lines too
self . LoadSurvexComment ( survexblock , comment )
2020-07-01 22:49:38 +01:00
2020-06-27 17:55:59 +01:00
if not sline :
continue # skip blank lines
# detect a star command
2020-07-04 13:31:46 +01:00
star = self . rx_star . match ( sline )
if star :
2020-07-04 01:10:17 +01:00
# yes we are reading a *command
2020-07-04 13:31:46 +01:00
starstatement ( star )
2020-07-04 01:10:17 +01:00
else : # not a *cmd so we are reading data OR a ";" rx_comment failed
2020-07-04 13:31:46 +01:00
self . LoadSurvexLeg ( survexblock , sline , comment )
2020-07-04 01:10:17 +01:00
self . legsnumber = slengthtotal
self . slength = nlegstotal
2020-06-27 17:55:59 +01:00
2020-07-01 22:49:38 +01:00
def RecursiveScan ( self , survexblock , path , fin , flinear , fcollate ) :
2020-06-27 12:08:02 +01:00
""" Follows the *include links in all the survex files from the root file 1623.svx
2020-06-27 17:55:59 +01:00
and reads only the * include and * begin and * end statements . It produces a linearised
2020-07-05 17:22:26 +01:00
list of the include tree and detects blocks included more than once .
2020-06-27 12:08:02 +01:00
"""
2020-06-27 17:55:59 +01:00
indent = " " * self . depthinclude
2020-06-27 12:08:02 +01:00
sys . stderr . flush ( ) ;
self . callcount + = 1
if self . callcount % 10 == 0 :
print ( " . " , file = sys . stderr , end = ' ' )
if self . callcount % 500 == 0 :
2020-07-03 14:53:36 +01:00
print ( " \n " , file = sys . stderr , end = ' ' )
2020-06-27 12:08:02 +01:00
2020-07-01 22:49:38 +01:00
if path in self . svxfileslist :
2020-07-04 01:10:17 +01:00
message = " * Warning. Duplicate detected in *include list at callcount: {} depth: {} file: {} " . format ( self . callcount , self . depthinclude , path )
2020-06-27 17:55:59 +01:00
print ( message )
print ( message , file = flinear )
2020-07-01 22:49:38 +01:00
print ( " \n " + message , file = sys . stderr )
2020-06-27 17:55:59 +01:00
models . DataIssue . objects . create ( parser = ' survex ' , message = message )
2020-07-01 22:49:38 +01:00
if self . svxfileslist . count ( path ) > 20 :
message = " ! ERROR. Survex file already seen 20x. Probably an infinite loop so fix your *include statements that include this. Aborting. {} " . format ( path )
2020-06-27 17:55:59 +01:00
print ( message )
print ( message , file = flinear )
print ( message , file = sys . stderr )
models . DataIssue . objects . create ( parser = ' survex ' , message = message )
return
2020-07-01 22:49:38 +01:00
self . svxfileslist . append ( path )
2020-07-02 16:25:51 +01:00
2020-06-27 12:08:02 +01:00
svxlines = fin . read ( ) . splitlines ( )
for svxline in svxlines :
self . lineno + = 1
2020-06-27 17:55:59 +01:00
includestmt = self . rx_include . match ( svxline )
if not includestmt :
2020-07-01 22:49:38 +01:00
fcollate . write ( " {} \n " . format ( svxline . strip ( ) ) )
2020-06-27 17:55:59 +01:00
2020-06-27 12:08:02 +01:00
sline , comment = self . rx_comment . match ( svxline . strip ( ) ) . groups ( )
2020-07-04 13:31:46 +01:00
star = self . rx_star . match ( sline )
if star : # yes we are reading a *cmd
cmd , args = star . groups ( )
2020-06-27 12:08:02 +01:00
cmd = cmd . lower ( )
2020-06-28 14:42:26 +01:00
if re . match ( " (?i)include$ " , cmd ) :
2020-07-01 22:49:38 +01:00
includepath = os . path . normpath ( os . path . join ( os . path . split ( path ) [ 0 ] , re . sub ( r " \ .svx$ " , " " , args ) ) )
2020-06-27 12:08:02 +01:00
2020-07-01 22:49:38 +01:00
fullpath = os . path . join ( settings . SURVEX_DATA , includepath + " .svx " )
2020-07-02 16:25:51 +01:00
self . RunSurvexIfNeeded ( os . path . join ( settings . SURVEX_DATA , includepath ) )
2020-07-01 22:49:38 +01:00
if os . path . isfile ( fullpath ) :
2020-06-27 12:08:02 +01:00
#--------------------------------------------------------
2020-06-27 17:55:59 +01:00
self . depthinclude + = 1
2020-07-01 22:49:38 +01:00
fininclude = open ( fullpath , ' r ' )
fcollate . write ( " ;*include {} \n " . format ( includepath ) )
flinear . write ( " {:2} {} *include {} \n " . format ( self . depthinclude , indent , includepath ) )
push = includepath . lower ( )
2020-06-27 17:55:59 +01:00
self . stackinclude . append ( push )
2020-06-30 15:39:24 +01:00
#-----------------
2020-07-01 22:49:38 +01:00
self . RecursiveScan ( survexblock , includepath , fininclude , flinear , fcollate )
2020-06-30 15:39:24 +01:00
#-----------------
2020-06-27 17:55:59 +01:00
pop = self . stackinclude . pop ( )
2020-06-27 12:08:02 +01:00
if pop != push :
2020-07-01 22:49:38 +01:00
message = " !! ERROR mismatch *include pop!=push {} " . format ( pop , push , self . stackinclude )
2020-06-27 17:55:59 +01:00
print ( message )
print ( message , file = flinear )
print ( message , file = sys . stderr )
models . DataIssue . objects . create ( parser = ' survex ' , message = message )
2020-06-30 15:39:24 +01:00
flinear . write ( " {:2} {} *edulcni {} \n " . format ( self . depthinclude , indent , pop ) )
fcollate . write ( " ;*edulcni {} \n " . format ( pop ) )
2020-06-27 12:08:02 +01:00
fininclude . close ( )
2020-06-27 17:55:59 +01:00
self . depthinclude - = 1
2020-06-27 12:08:02 +01:00
#--------------------------------------------------------
else :
2020-07-01 22:49:38 +01:00
message = " ! ERROR *include file not found for: ' {} ' " . format ( includepath )
2020-06-27 17:55:59 +01:00
print ( message )
print ( message , file = sys . stderr )
models . DataIssue . objects . create ( parser = ' survex ' , message = message )
2020-06-28 14:42:26 +01:00
elif re . match ( " (?i)begin$ " , cmd ) :
2020-06-27 12:08:02 +01:00
self . depthbegin + = 1
depth = " " * self . depthbegin
if args :
pushargs = args
else :
pushargs = " "
self . stackbegin . append ( pushargs . lower ( ) )
flinear . write ( " {:2} {} *begin {} \n " . format ( self . depthbegin , depth , args ) )
pass
2020-06-28 14:42:26 +01:00
elif re . match ( " (?i)end$ " , cmd ) :
2020-06-27 12:08:02 +01:00
depth = " " * self . depthbegin
flinear . write ( " {:2} {} *end {} \n " . format ( self . depthbegin , depth , args ) )
if not args :
args = " "
popargs = self . stackbegin . pop ( )
if popargs != args . lower ( ) :
2020-07-01 22:49:38 +01:00
message = " !! ERROR mismatch in BEGIN/END labels pop!=push ' {} ' != ' {} ' \n {} " . format ( popargs , args , self . stackbegin )
2020-06-27 17:55:59 +01:00
print ( message )
print ( message , file = flinear )
print ( message , file = sys . stderr )
models . DataIssue . objects . create ( parser = ' survex ' , message = message )
2020-06-27 12:08:02 +01:00
self . depthbegin - = 1
pass
2020-06-28 14:42:26 +01:00
elif re . match ( " (?i)title$ " , cmd ) :
depth = " " * self . depthbegin
flinear . write ( " {:2} {} *title {} \n " . format ( self . depthbegin , depth , args ) )
pass
2020-06-23 23:34:08 +01:00
2020-07-02 16:25:51 +01:00
def RunSurvexIfNeeded ( self , fullpath ) :
now = time . time ( )
2020-07-03 14:53:36 +01:00
cav_t = now - 365 * 24 * 3600
log_t = now - 365 * 24 * 3600
svx_t = now - 365 * 24 * 3600
2020-07-02 16:25:51 +01:00
def runcavern ( ) :
2020-07-04 01:10:17 +01:00
print ( " - Regenerating stale (or chaos-monkeyed) cavern .log and .3d for ' {} ' \n days svx old: {:.1f} cav: {:.1f} log old: {:.1f} " .
2020-07-02 16:25:51 +01:00
format ( fullpath , ( svx_t - log_t ) / ( 24 * 3600 ) , ( cav_t - log_t ) / ( 24 * 3600 ) , ( now - log_t ) / ( 24 * 3600 ) ) )
call ( [ settings . CAVERN , " --log " , " --output= {} " . format ( fullpath ) , " {} .svx " . format ( fullpath ) ] )
svxpath = fullpath + " .svx "
logpath = fullpath + " .log "
if not os . path . isfile ( logpath ) :
runcavern ( )
return
if not self . caverndate :
completed_process = run ( [ " which " , " {} " . format ( settings . CAVERN ) ] ,
capture_output = True , check = True , text = True )
self . caverndate = os . path . getmtime ( completed_process . stdout . strip ( ) )
cav_t = self . caverndate
log_t = os . path . getmtime ( logpath )
svx_t = os . path . getmtime ( svxpath )
now = time . time ( )
if svx_t - log_t > 0 : # stale, older than svx file
runcavern ( )
return
if now - log_t > 60 * 24 * 60 * 60 : # >60 days, re-run anyway
runcavern ( )
return
if cav_t - log_t > 0 : # new version of cavern
runcavern ( )
return
2020-07-03 14:53:36 +01:00
if ChaosMonkey ( 200 ) :
2020-07-02 16:25:51 +01:00
runcavern ( )
2020-06-27 17:55:59 +01:00
2020-06-27 18:00:24 +01:00
def FindAndLoadSurvex ( survexblockroot ) :
2020-06-23 23:34:08 +01:00
""" Follows the *include links recursively to find files
"""
2020-06-24 14:10:13 +01:00
print ( ' - redirecting stdout to svxblks.log... ' )
2020-06-23 23:34:08 +01:00
stdout_orig = sys . stdout
# Redirect sys.stdout to the file
2020-06-24 14:10:13 +01:00
sys . stdout = open ( ' svxblks.log ' , ' w ' )
2020-07-04 01:10:17 +01:00
print ( ' - Scanning Survex Blocks tree from {} .svx ... ' . format ( settings . SURVEX_TOPNAME ) , file = sys . stderr )
2020-07-02 16:25:51 +01:00
survexfileroot = survexblockroot . survexfile # i.e. SURVEX_TOPNAME only
2020-06-27 17:55:59 +01:00
collatefilename = " _ " + survexfileroot . path + " .svx "
2020-06-27 18:00:24 +01:00
svx_scan = LoadingSurvex ( )
2020-06-27 17:55:59 +01:00
svx_scan . callcount = 0
svx_scan . depthinclude = 0
2020-07-02 16:25:51 +01:00
fullpathtotop = os . path . join ( survexfileroot . survexdirectory . path , survexfileroot . path )
print ( " - RunSurvexIfNeeded cavern on ' {} ' " . format ( fullpathtotop ) , file = sys . stderr )
svx_scan . RunSurvexIfNeeded ( fullpathtotop )
2020-06-27 00:50:40 +01:00
indent = " "
2020-06-27 17:55:59 +01:00
fcollate = open ( collatefilename , ' w ' )
2020-06-27 00:50:40 +01:00
mem0 = models . get_process_memory ( )
2020-06-27 17:55:59 +01:00
print ( " - MEM: {:7.2f} MB START " . format ( mem0 ) , file = sys . stderr )
2020-06-27 00:50:40 +01:00
flinear = open ( ' svxlinear.log ' , ' w ' )
2020-06-27 17:55:59 +01:00
flinear . write ( " - MEM: {:7.2f} MB START {} \n " . format ( mem0 , survexfileroot . path ) )
2020-07-03 14:53:36 +01:00
print ( " " , file = sys . stderr , end = ' ' )
2020-06-27 00:50:40 +01:00
finroot = survexfileroot . OpenFile ( )
2020-06-27 17:55:59 +01:00
fcollate . write ( " ;*include {} \n " . format ( survexfileroot . path ) )
flinear . write ( " {:2} {} *include {} \n " . format ( svx_scan . depthinclude , indent , survexfileroot . path ) )
2020-06-28 01:50:34 +01:00
#----------------------------------------------------------------
2020-07-01 22:49:38 +01:00
svx_scan . RecursiveScan ( survexblockroot , survexfileroot . path , finroot , flinear , fcollate )
2020-06-28 01:50:34 +01:00
#----------------------------------------------------------------
2020-06-27 17:55:59 +01:00
flinear . write ( " {:2} {} *edulcni {} \n " . format ( svx_scan . depthinclude , indent , survexfileroot . path ) )
fcollate . write ( " ;*edulcni {} \n " . format ( survexfileroot . path ) )
2020-06-27 00:50:40 +01:00
mem1 = models . get_process_memory ( )
2020-06-28 14:42:26 +01:00
flinear . write ( " \n - MEM: {:.2f} MB STOP {} \n " . format ( mem1 , survexfileroot . path ) )
2020-06-27 00:50:40 +01:00
flinear . write ( " - MEM: {:.3f} MB USED \n " . format ( mem1 - mem0 ) )
2020-06-27 17:55:59 +01:00
svxfileslist = svx_scan . svxfileslist
flinear . write ( " - {:,} survex files in linear include list \n " . format ( len ( svxfileslist ) ) )
2020-06-27 00:50:40 +01:00
flinear . close ( )
2020-06-27 17:55:59 +01:00
fcollate . close ( )
2020-07-01 22:49:38 +01:00
svx_scan = None # Hmm. Does this actually delete all the instance variables if they are lists, dicts etc.?
2020-06-27 17:55:59 +01:00
print ( " \n - {:,} survex files in linear include list \n " . format ( len ( svxfileslist ) ) , file = sys . stderr )
2020-06-27 00:50:40 +01:00
2020-06-27 17:55:59 +01:00
mem1 = models . get_process_memory ( )
print ( " - MEM: {:7.2f} MB END " . format ( mem0 ) , file = sys . stderr )
print ( " - MEM: {:7.3f} MB USED " . format ( mem1 - mem0 ) , file = sys . stderr )
svxfileslist = [ ] # free memory
# Before doing this, it would be good to identify the *equate and *entrance we need that are relevant to the
# entrance locations currently loaded after this by LoadPos(), but could better be done before ?
# look in MapLocations() for how we find the entrances
2020-07-02 16:25:51 +01:00
print ( ' \n - Loading All Survex Blocks (LinearLoad) ' , file = sys . stderr )
2020-06-27 18:00:24 +01:00
svx_load = LoadingSurvex ( )
2020-07-03 17:22:15 +01:00
svx_load . survexdict [ survexfileroot . survexdirectory ] = [ ]
svx_load . survexdict [ survexfileroot . survexdirectory ] . append ( survexfileroot )
2020-06-29 21:16:13 +01:00
svx_load . svxdirs [ " " ] = survexfileroot . survexdirectory
2020-06-27 17:55:59 +01:00
with open ( collatefilename , " r " ) as fcollate :
2020-06-28 01:50:34 +01:00
svxlines = fcollate . read ( ) . splitlines ( )
#----------------------------------------------------------------
2020-07-01 17:41:09 +01:00
svx_load . LinearLoad ( survexblockroot , survexfileroot . path , svxlines )
2020-06-28 01:50:34 +01:00
#----------------------------------------------------------------
2020-06-27 17:55:59 +01:00
2020-06-28 14:42:26 +01:00
print ( " \n - MEM: {:7.2f} MB STOP " . format ( mem1 ) , file = sys . stderr )
2020-06-27 17:55:59 +01:00
print ( " - MEM: {:7.3f} MB USED " . format ( mem1 - mem0 ) , file = sys . stderr )
2020-07-04 01:10:17 +01:00
legsnumber = svx_load . legsnumber
slength = svx_load . slength
2020-06-27 17:55:59 +01:00
mem1 = models . get_process_memory ( )
2020-07-01 17:41:09 +01:00
2020-07-02 16:25:51 +01:00
print ( " - Number of SurvexDirectories: {} " . format ( len ( svx_load . survexdict ) ) )
2020-07-01 17:41:09 +01:00
tf = 0
for d in svx_load . survexdict :
tf + = len ( svx_load . survexdict [ d ] )
2020-07-02 16:25:51 +01:00
print ( " - Number of SurvexFiles: {} " . format ( tf ) )
2020-06-27 17:55:59 +01:00
svx_load = None
2020-06-24 14:10:13 +01:00
# Close the logging file, Restore sys.stdout to our old saved file handle
2020-06-23 23:34:08 +01:00
sys . stdout . close ( )
print ( " + " , file = sys . stderr )
sys . stderr . flush ( ) ;
sys . stdout = stdout_orig
2020-07-04 01:10:17 +01:00
return ( legsnumber , slength )
2020-06-23 23:34:08 +01:00
2020-06-29 21:16:13 +01:00
def MakeSurvexFileRoot ( ) :
2020-07-02 16:25:51 +01:00
""" Returns a file_object.path = SURVEX_TOPNAME associated with directory_object.path = SURVEX_DATA
"""
fileroot = models_survex . SurvexFile ( path = settings . SURVEX_TOPNAME , cave = None )
fileroot . save ( )
directoryroot = models_survex . SurvexDirectory ( path = settings . SURVEX_DATA , cave = None , primarysurvexfile = fileroot )
directoryroot . save ( )
fileroot . survexdirectory = directoryroot # i.e. SURVEX_DATA/SURVEX_TOPNAME
fileroot . save ( ) # mutually dependent objects need a double-save like this
return fileroot
2020-06-23 23:34:08 +01:00
2020-06-27 18:00:24 +01:00
def LoadSurvexBlocks ( ) :
2015-01-19 22:48:50 +00:00
2020-04-28 01:18:57 +01:00
print ( ' - Flushing All Survex Blocks... ' )
2020-05-28 04:54:53 +01:00
models_survex . SurvexBlock . objects . all ( ) . delete ( )
models_survex . SurvexFile . objects . all ( ) . delete ( )
models_survex . SurvexDirectory . objects . all ( ) . delete ( )
models_survex . SurvexPersonRole . objects . all ( ) . delete ( )
models_survex . SurvexStation . objects . all ( ) . delete ( )
2020-06-24 19:07:11 +01:00
print ( " - survex Data Issues flushed " )
2020-05-15 21:45:23 +01:00
models . DataIssue . objects . filter ( parser = ' survex ' ) . delete ( )
2020-06-29 21:16:13 +01:00
survexfileroot = MakeSurvexFileRoot ( )
2020-07-02 16:25:51 +01:00
# this next makes a block_object assciated with a file_object.path = SURVEX_TOPNAME
2020-06-23 23:34:08 +01:00
survexblockroot = models_survex . SurvexBlock ( name = ROOTBLOCK , survexpath = " " , cave = None , survexfile = survexfileroot ,
2020-07-04 13:31:46 +01:00
legsall = 0 , legslength = 0.0 )
2020-06-23 23:34:08 +01:00
# this is the first so id=1
2011-07-11 00:01:12 +01:00
survexblockroot . save ( )
2020-06-23 23:34:08 +01:00
2020-06-28 01:50:34 +01:00
print ( ' - Loading Survex Blocks... ' )
2020-06-24 14:10:13 +01:00
memstart = models . get_process_memory ( )
2020-07-02 16:25:51 +01:00
#----------------------------------------------------------------
2020-07-04 01:10:17 +01:00
legsnumber , slength = FindAndLoadSurvex ( survexblockroot )
2020-07-02 16:25:51 +01:00
#----------------------------------------------------------------
2020-06-24 14:10:13 +01:00
memend = models . get_process_memory ( )
2020-06-24 14:49:39 +01:00
print ( " - MEMORY start: {:.3f} MB end: {:.3f} MB increase= {:.3f} MB " . format ( memstart , memend , memend - memstart ) )
2020-06-23 23:34:08 +01:00
2020-07-04 01:10:17 +01:00
# Don't do this, it double-counts everything:
2020-07-04 13:31:46 +01:00
#survexblockroot.legslength = slength
2020-07-04 01:10:17 +01:00
#survexblockroot.legsall = legsnumber
2011-07-11 00:01:12 +01:00
survexblockroot . save ( )
2020-07-01 17:41:09 +01:00
2020-07-04 01:10:17 +01:00
print ( " - total number of survex legs: {} " . format ( legsnumber ) )
print ( " - total leg lengths loaded: {} m " . format ( slength ) )
2020-04-28 01:18:57 +01:00
print ( ' - Loaded All Survex Blocks. ' )
2011-07-11 00:01:12 +01:00
2019-02-24 13:03:34 +00:00
poslineregex = re . compile ( r " ^ \ ( \ s*([+-]? \ d* \ . \ d*), \ s*([+-]? \ d* \ . \ d*), \ s*([+-]? \ d* \ . \ d*) \ s* \ ) \ s*([^ \ s]+)$ " )
2020-07-04 01:10:17 +01:00
def LoadPositions ( ) :
2020-06-29 21:16:13 +01:00
""" First load the survex stations for entrances and fixed points (about 600) into the database.
Run cavern to produce a complete .3 d file , then run 3 dtopos to produce a table of
2020-04-28 01:18:57 +01:00
all survey point positions . Then lookup each position by name to see if we have it in the database
2020-06-29 21:16:13 +01:00
and if we do , then save the x / y / z coordinates . This gives us coordinates of the entrances .
2020-04-28 01:18:57 +01:00
If we don ' t have it in the database, print an error message and discard it.
"""
2020-07-02 16:25:51 +01:00
svx_t = 0
d3d_t = 0
def runcavern3d ( ) :
2020-07-03 14:53:36 +01:00
print ( " - Regenerating stale cavern .log and .3d for ' {} ' \n days old: {:.1f} {:.1f} {:.1f} " .
2020-07-02 16:25:51 +01:00
format ( topdata , ( svx_t - d3d_t ) / ( 24 * 3600 ) , ( cav_t - d3d_t ) / ( 24 * 3600 ) , ( now - d3d_t ) / ( 24 * 3600 ) ) )
call ( [ settings . CAVERN , " --log " , " --output= {} " . format ( topdata ) , " {} .svx " . format ( topdata ) ] )
call ( [ settings . THREEDTOPOS , ' {} .3d ' . format ( topdata ) ] , cwd = settings . SURVEX_DATA )
2020-04-28 18:26:08 +01:00
topdata = settings . SURVEX_DATA + settings . SURVEX_TOPNAME
2020-05-24 01:57:06 +01:00
print ( ( ' - Generating a list of Pos from %s .svx and then loading... ' % ( topdata ) ) )
2020-05-28 01:16:45 +01:00
2020-04-28 18:26:08 +01:00
found = 0
skip = { }
2020-05-24 01:57:06 +01:00
print ( " \n " ) # extra line because cavern overwrites the text buffer somehow
2020-04-28 18:26:08 +01:00
# cavern defaults to using same cwd as supplied input file
2020-07-02 16:25:51 +01:00
completed_process = run ( [ " which " , " {} " . format ( settings . CAVERN ) ] ,
capture_output = True , check = True , text = True )
cav_t = os . path . getmtime ( completed_process . stdout . strip ( ) )
svxpath = topdata + " .svx "
d3dpath = topdata + " .3d "
2020-07-04 01:10:17 +01:00
pospath = topdata + " .pos "
2020-07-02 16:25:51 +01:00
svx_t = os . path . getmtime ( svxpath )
if os . path . isfile ( d3dpath ) :
2020-07-03 14:53:36 +01:00
# always fails to find log file if a double directory, e.g. caves-1623/B4/B4/B4.svx Why ?
2020-07-02 16:25:51 +01:00
d3d_t = os . path . getmtime ( d3dpath )
now = time . time ( )
2020-07-04 01:10:17 +01:00
if not os . path . isfile ( pospath ) :
runcavern3d ( )
2020-07-02 16:25:51 +01:00
if not os . path . isfile ( d3dpath ) :
runcavern3d ( )
2020-07-04 01:10:17 +01:00
elif d3d_t - svx_t > 0 : # stale, 3d older than svx file
2020-07-02 16:25:51 +01:00
runcavern3d ( )
2020-07-04 01:10:17 +01:00
elif now - d3d_t > 60 * 24 * 60 * 60 : # >60 days old, re-run anyway
2020-07-02 16:25:51 +01:00
runcavern3d ( )
elif cav_t - d3d_t > 0 : # new version of cavern
runcavern3d ( )
2020-05-28 01:16:45 +01:00
mappoints = { }
for pt in MapLocations ( ) . points ( ) :
svxid , number , point_type , label = pt
mappoints [ svxid ] = True
2020-04-30 23:15:57 +01:00
2020-04-28 18:26:08 +01:00
posfile = open ( " %s .pos " % ( topdata ) )
2019-02-24 13:03:34 +00:00
posfile . readline ( ) #Drop header
2020-06-19 00:26:15 +01:00
try :
survexblockroot = models_survex . SurvexBlock . objects . get ( name = ROOTBLOCK )
except :
try :
survexblockroot = models_survex . SurvexBlock . objects . get ( id = 1 )
except :
message = ' ! FAILED to find root SurvexBlock '
print ( message )
models . DataIssue . objects . create ( parser = ' survex ' , message = message )
raise
2011-07-11 00:01:12 +01:00
for line in posfile . readlines ( ) :
2020-02-21 15:57:07 +00:00
r = poslineregex . match ( line )
2011-07-11 00:01:12 +01:00
if r :
2020-05-28 04:54:53 +01:00
x , y , z , id = r . groups ( )
2020-06-16 19:27:32 +01:00
for sid in mappoints :
if id . endswith ( sid ) :
blockpath = " . " + id [ : - len ( sid ) ] . strip ( " . " )
2020-06-29 21:16:13 +01:00
# But why are we doing this? Why do we need the survexblock id for each of these ?
# ..because mostly they don't actually appear in any SVX file. We should match them up
# via the cave data, not by this half-arsed syntactic match which almost never works. PMS.
if False :
try :
sbqs = models_survex . SurvexBlock . objects . filter ( survexpath = blockpath )
if len ( sbqs ) == 1 :
sb = sbqs [ 0 ]
if len ( sbqs ) > 1 :
message = " ! MULTIPLE SurvexBlocks {:3} matching Entrance point {} {} ' {} ' " . format ( len ( sbqs ) , blockpath , sid , id )
print ( message )
models . DataIssue . objects . create ( parser = ' survex ' , message = message )
sb = sbqs [ 0 ]
elif len ( sbqs ) < = 0 :
message = " ! ZERO SurvexBlocks matching Entrance point {} {} ' {} ' " . format ( blockpath , sid , id )
print ( message )
models . DataIssue . objects . create ( parser = ' survex ' , message = message )
sb = survexblockroot
except :
message = ' ! FAIL in getting SurvexBlock matching Entrance point {} {} ' . format ( blockpath , sid )
2020-06-15 03:28:51 +01:00
print ( message )
models . DataIssue . objects . create ( parser = ' survex ' , message = message )
2020-06-16 19:27:32 +01:00
try :
2020-06-29 21:16:13 +01:00
ss = models_survex . SurvexStation ( name = id , block = survexblockroot )
2020-06-16 19:27:32 +01:00
ss . x = float ( x )
ss . y = float ( y )
ss . z = float ( z )
ss . save ( )
found + = 1
except :
message = ' ! FAIL to create SurvexStation Entrance point {} {} ' . format ( blockpath , sid )
print ( message )
models . DataIssue . objects . create ( parser = ' survex ' , message = message )
raise
print ( " - {} SurvexStation entrances found. " . format ( found ) )