2020-05-28 01:16:45 +01:00
import sys
import os
import re
import time
2020-06-23 23:34:08 +01:00
2020-05-28 01:16:45 +01:00
from datetime import datetime , timedelta
2011-07-11 00:01:12 +01:00
from subprocess import call , Popen , PIPE
2009-05-13 05:39:52 +01:00
2020-02-21 15:57:07 +00:00
from django . utils . timezone import get_current_timezone
from django . utils . timezone import make_aware
2020-05-28 01:16:45 +01:00
import troggle . settings as settings
import troggle . core . models as models
2020-05-28 04:54:53 +01:00
import troggle . core . models_caves as models_caves
import troggle . core . models_survex as models_survex
2020-05-28 01:16:45 +01:00
from troggle . parsers . people import GetPersonExpeditionNameLookup
from troggle . core . views_caves import MapLocations
2020-06-15 03:28:51 +01:00
survexblockroot = None
2020-06-19 00:26:15 +01:00
ROOTBLOCK = " rootblock "
2020-06-24 14:10:13 +01:00
class SurvexLeg ( ) :
""" No longer a models.Model subclass, so no longer a database table
2020-06-23 23:34:08 +01:00
"""
2020-06-24 14:10:13 +01:00
tape = 0.0
compass = 0.0
clino = 0.0
2020-06-27 18:00:24 +01:00
class LoadingSurvex ( ) :
2020-06-24 14:10:13 +01:00
""" A ' survex block ' is a *begin...*end set of cave data.
A survex file can contain many begin - end blocks , which can be nested , and which can * include
other survex files .
A ' scansfolder ' is what we today call a " survey scans folder " or a " wallet " .
"""
# This interprets the survex "*data normal" command which sets out the order of the fields in the data, e.g.
# *DATA normal from to length gradient bearing ignore ignore ignore ignore
stardatadefault = { " type " : " normal " , " t " : " leg " , " from " : 0 , " to " : 1 , " tape " : 2 , " compass " : 3 , " clino " : 4 }
stardataparamconvert = { " length " : " tape " , " bearing " : " compass " , " gradient " : " clino " }
rx_linelen = re . compile ( r " [ \ d \ -+.]+$ " )
rx_team = re . compile ( r " (?i)(Insts|Notes|Tape|Dog|Useless|Pics|Helper|Disto|Consultant) \ s+(.*)$ " )
rx_person = re . compile ( r " (?i) and | / |, | & | \ + |^both$|^none$ " )
2020-06-27 17:55:59 +01:00
rx_qm = re . compile ( r ' (?i)^ \ s*QM( \ d) \ s+?([a-dA-DxX]) \ s+([ \ w \ -]+) \ .( \ d+) \ s+(([ \ w \ -]+) \ .( \ d+)| \ -) \ s+(.+)$ ' )
2020-06-24 14:10:13 +01:00
# remember there is also QM_PATTERN used in views_other and set in settings.py
2020-06-27 17:55:59 +01:00
rx_cave = re . compile ( r ' caves-( \ d \ d \ d \ d)/( \ d+| \ d \ d \ d \ d-? \ w+- \ d+)/ ' )
rx_comment = re . compile ( r ' ([^;]*?) \ s*(?:; \ s*(.*))? \ n?$ ' )
rx_comminc = re . compile ( r ' (?i)^ \ s*; \ *include[ \ s](.*)$ ' ) # inserted by linear collate ;*include
rx_commcni = re . compile ( r ' (?i)^ \ s*; \ *edulcni[ \ s](.*)$ ' ) # inserted by linear collate ;*edulcni
rx_include = re . compile ( r ' (?i)^ \ s*( \ *include[ \ s].*)$ ' )
rx_ref = re . compile ( r ' (?i)^ \ s*ref[ \ s.:]*( \ d+) \ s*# \ s*(X)? \ s*( \ d+) ' )
rx_star = re . compile ( r ' (?i) \ s* \ *[ \ s,]*( \ w+) \ s*(.*?) \ s*(?:;.*)?$ ' )
2020-06-24 14:10:13 +01:00
rx_starref = re . compile ( r ' (?i)^ \ s* \ *ref[ \ s.:]*((?:19[6789] \ d)|(?:20[0123] \ d)) \ s*#? \ s*(X)? \ s*(.*? \ d+.*?)$ ' )
2020-06-24 22:46:18 +01:00
rx_argsref = re . compile ( r ' (?i)^[ \ s.:]*((?:19[6789] \ d)|(?:20[0123] \ d)) \ s*#? \ s*(X)? \ s*(.*? \ d+.*?)$ ' )
2020-06-24 14:10:13 +01:00
survexlegsalllength = 0.0
survexlegsnumber = 0
2020-06-24 22:46:18 +01:00
depthbegin = 0
2020-06-27 17:55:59 +01:00
depthinclude = 0
2020-06-27 00:50:40 +01:00
stackbegin = [ ]
2020-06-27 17:55:59 +01:00
stackinclude = [ ]
2020-06-27 00:50:40 +01:00
svxfileslist = [ ]
2020-06-24 22:46:18 +01:00
lineno = 0
2020-06-24 14:10:13 +01:00
insp = " "
callcount = 0
2020-06-24 17:55:42 +01:00
stardata = { }
2020-06-27 17:55:59 +01:00
includedfilename = " "
currenttitle = " "
currentsurvexblock = None
currentsurvexfile = None
currentcave = None
2020-06-24 14:10:13 +01:00
def __init__ ( self ) :
pass
2020-06-24 19:07:11 +01:00
def LoadSurvexIgnore ( self , survexblock , line , cmd ) :
if cmd == " title " :
pass # unused in troggle today - but will become text list on SurvexBlock
elif cmd == " require " :
pass # should we check survex version available for processing?
elif cmd in [ " equate " , " fix " , " alias " , " calibrate " , " cs " , " entrance " , " export " , " case " ,
" declination " , " infer " , " instrument " , " sd " , " units " ] :
pass # we ignore all these, which is fine.
else :
if cmd in [ " include " , " data " , " flags " , " title " , " set " , " ref " ] :
message = " ! Unparsed [* {} ]: ' {} ' {} " . format ( cmd , line , survexblock . survexfile . path )
print ( ( self . insp + message ) )
models . DataIssue . objects . create ( parser = ' survex ' , message = message )
else :
message = " ! Bad svx command: [* {} ] {} ( {} ) {} " . format ( cmd , line , survexblock , survexblock . survexfile . path )
print ( ( self . insp + message ) )
models . DataIssue . objects . create ( parser = ' survex ' , message = message )
def LoadSurvexTeam ( self , survexblock , line ) :
teammembers = [ ]
mteammember = self . rx_team . match ( line )
if mteammember :
for tm in self . rx_person . split ( mteammember . group ( 2 ) ) :
if tm :
personexpedition = survexblock . expedition and GetPersonExpeditionNameLookup ( survexblock . expedition ) . get ( tm . lower ( ) )
if ( personexpedition , tm ) not in teammembers :
teammembers . append ( ( personexpedition , tm ) )
personrole = models_survex . SurvexPersonRole ( survexblock = survexblock , nrole = mteammember . group ( 1 ) . lower ( ) , personexpedition = personexpedition , personname = tm )
personrole . expeditionday = survexblock . expeditionday
if personexpedition :
personrole . person = personexpedition . person
personrole . save ( )
2020-06-24 17:55:42 +01:00
def LoadSurvexDate ( self , survexblock , line ) :
# we should make this a date range for everything
if len ( line ) == 10 :
survexblock . date = make_aware ( datetime . strptime ( re . sub ( r " \ . " , " - " , line ) , ' % Y- % m- %d ' ) , get_current_timezone ( ) )
expeditions = models . Expedition . objects . filter ( year = line [ : 4 ] )
if expeditions :
assert len ( expeditions ) == 1
survexblock . expedition = expeditions [ 0 ]
survexblock . expeditionday = survexblock . expedition . get_expedition_day ( survexblock . date )
survexblock . save ( )
2020-06-24 22:46:18 +01:00
def LoadSurvexLineLeg ( self , survexblock , svxline , sline , comment ) :
2020-06-24 14:10:13 +01:00
""" This reads compass, clino and tape data but only keeps the tape lengths,
the rest is discarded after error - checking .
"""
2020-06-24 22:46:18 +01:00
stardata = self . stardata
2020-06-24 14:10:13 +01:00
survexleg = SurvexLeg ( )
2020-06-24 22:46:18 +01:00
ls = sline . lower ( ) . split ( )
2020-06-24 14:10:13 +01:00
# this next fails for two surface survey svx files which use / for decimal point
# e.g. '29/09' in the tape measurement, or use decimals but in brackets, e.g. (06.05)
2020-06-24 22:46:18 +01:00
if stardata [ " type " ] == " normal " : # should use current flags setting for this
# print(" !! lineno '{}'\n !! svxline '{}'\n !! sline '{}'\n !! ls '{}'\n !! stardata {}".format(self.lineno, svxline, sline, ls,stardata))
tape = ls [ stardata [ " tape " ] ]
2020-06-25 02:10:20 +01:00
tape = tape . replace ( " ( " , " " )
tape = tape . replace ( " ) " , " " )
2020-06-24 14:10:13 +01:00
tape = tape . replace ( " / " , " . " )
2011-07-11 00:01:12 +01:00
try :
2020-06-24 14:10:13 +01:00
survexleg . tape = float ( tape )
self . survexlegsnumber + = 1
2011-07-11 00:01:12 +01:00
except ValueError :
2020-06-24 14:10:13 +01:00
print ( ( " ! Tape misread in " , survexblock . survexfile . path ) )
2020-05-24 01:57:06 +01:00
print ( ( " Stardata: " , stardata ) )
print ( ( " Line: " , ls ) )
2020-06-24 14:10:13 +01:00
message = ' ! Value Error: Tape misread in line %s in %s ' % ( ls , survexblock . survexfile . path )
2020-05-15 21:45:23 +01:00
models . DataIssue . objects . create ( parser = ' survex ' , message = message )
2020-06-24 14:10:13 +01:00
survexleg . tape = 0
2020-06-24 22:46:18 +01:00
try :
survexblock . totalleglength + = survexleg . tape
self . survexlegsalllength + = survexleg . tape
except ValueError :
message = ' ! Value Error: Tape length not added %s in %s ' % ( ls , survexblock . survexfile . path )
models . DataIssue . objects . create ( parser = ' survex ' , message = message )
2020-06-24 14:10:13 +01:00
try :
lclino = ls [ stardata [ " clino " ] ]
except :
print ( ( " ! Clino misread in " , survexblock . survexfile . path ) )
print ( ( " Stardata: " , stardata ) )
print ( ( " Line: " , ls ) )
message = ' ! Value Error: Clino misread in line %s in %s ' % ( ls , survexblock . survexfile . path )
2020-05-15 21:45:23 +01:00
models . DataIssue . objects . create ( parser = ' survex ' , message = message )
2020-06-24 14:10:13 +01:00
lclino = error
2020-06-24 22:46:18 +01:00
2020-06-24 14:10:13 +01:00
try :
lcompass = ls [ stardata [ " compass " ] ]
except :
print ( ( " ! Compass misread in " , survexblock . survexfile . path ) )
print ( ( " Stardata: " , stardata ) )
print ( ( " Line: " , ls ) )
message = ' ! Value Error: Compass misread in line %s in %s ' % ( ls , survexblock . survexfile . path )
models . DataIssue . objects . create ( parser = ' survex ' , message = message )
lcompass = error
2020-06-24 22:46:18 +01:00
2020-06-24 14:10:13 +01:00
if lclino == " up " :
survexleg . compass = 0.0
survexleg . clino = 90.0
elif lclino == " down " :
survexleg . compass = 0.0
survexleg . clino = - 90.0
elif lclino == " - " or lclino == " level " :
try :
survexleg . compass = float ( lcompass )
except ValueError :
print ( ( " ! Compass misread in " , survexblock . survexfile . path ) )
print ( ( " Stardata: " , stardata ) )
print ( ( " Line: " , ls ) )
message = ' ! Value Error: line %s in %s ' % ( ls , survexblock . survexfile . path )
models . DataIssue . objects . create ( parser = ' survex ' , message = message )
survexleg . compass = 1000
survexleg . clino = - 90.0
2020-02-21 15:57:07 +00:00
else :
2020-06-24 14:10:13 +01:00
assert self . rx_linelen . match ( lcompass ) , ls
assert self . rx_linelen . match ( lclino ) and lclino != " - " , ls
survexleg . compass = float ( lcompass )
survexleg . clino = float ( lclino )
# delete the object so that django autosaving doesn't save it.
survexleg = None
2020-05-13 19:57:07 +01:00
2020-06-24 22:46:18 +01:00
def LoadSurvexRef ( self , survexblock , args ) :
# *REF but also ; Ref years from 1960 to 2039
if len ( args ) < 4 :
2020-06-25 02:10:20 +01:00
message = " ! Empty or BAD *REF command ' {} ' in ' {} ' " . format ( args , survexblock . survexfile . path )
2020-06-24 22:46:18 +01:00
print ( ( self . insp + message ) )
models . DataIssue . objects . create ( parser = ' survex ' , message = message )
return
2020-06-24 14:10:13 +01:00
2020-06-24 22:46:18 +01:00
argsgps = self . rx_argsref . match ( args )
if argsgps :
yr , letterx , wallet = argsgps . groups ( )
else :
2020-06-25 02:10:20 +01:00
message = " ! BAD *REF command ' {} ' in ' {} ' " . format ( args , survexblock . survexfile . path )
2020-06-24 22:46:18 +01:00
print ( ( self . insp + message ) )
models . DataIssue . objects . create ( parser = ' survex ' , message = message )
return
2020-06-24 14:10:13 +01:00
2020-06-24 14:49:39 +01:00
if not letterx :
letterx = " "
else :
letterx = " X "
if len ( wallet ) < 2 :
wallet = " 0 " + wallet
assert ( int ( yr ) > 1960 and int ( yr ) < 2039 ) , " Wallet year out of bounds: %s " % yr
refscan = " %s # %s %s " % ( yr , letterx , wallet )
2020-06-24 22:46:18 +01:00
try :
if int ( wallet ) > 100 :
2020-06-25 02:10:20 +01:00
message = " ! Wallet *REF {} - too big in ' {} ' " . format ( refscan , survexblock . survexfile . path )
2020-06-24 22:46:18 +01:00
print ( ( self . insp + message ) )
models . DataIssue . objects . create ( parser = ' survex ' , message = message )
except :
2020-06-25 02:10:20 +01:00
message = " ! Wallet *REF {} - not numeric in ' {} ' " . format ( refscan , survexblock . survexfile . path )
2020-06-24 22:46:18 +01:00
print ( ( self . insp + message ) )
models . DataIssue . objects . create ( parser = ' survex ' , message = message )
2020-06-24 14:49:39 +01:00
manyscansfolders = models_survex . ScansFolder . objects . filter ( walletname = refscan )
if manyscansfolders :
survexblock . scansfolder = manyscansfolders [ 0 ]
survexblock . save ( )
if len ( manyscansfolders ) > 1 :
2020-06-25 02:10:20 +01:00
message = " ! Wallet *REF {} - {} scan folders from DB search in {} " . format ( refscan , len ( manyscansfolders ) , survexblock . survexfile . path )
2020-06-24 17:55:42 +01:00
print ( ( self . insp + message ) )
2020-06-24 14:49:39 +01:00
models . DataIssue . objects . create ( parser = ' survex ' , message = message )
else :
2020-06-25 02:10:20 +01:00
message = " ! Wallet *REF ' {} ' - NOT found in DB search ' {} ' " . format ( refscan , survexblock . survexfile . path )
2020-06-24 17:55:42 +01:00
print ( ( self . insp + message ) )
2020-06-24 14:49:39 +01:00
models . DataIssue . objects . create ( parser = ' survex ' , message = message )
2020-06-24 22:46:18 +01:00
def LoadSurvexQM ( self , survexblock , qmline ) :
insp = self . insp
2020-06-24 14:49:39 +01:00
qm_no = qmline . group ( 1 )
qm_grade = qmline . group ( 2 )
2020-06-25 03:17:56 +01:00
if qmline . group ( 3 ) : # usual closest survey station
qm_nearest = qmline . group ( 3 )
if qmline . group ( 4 ) :
qm_nearest = qm_nearest + " . " + qmline . group ( 4 )
if qmline . group ( 6 ) and qmline . group ( 6 ) != ' - ' :
qm_resolve_station = qmline . group ( 6 )
if qmline . group ( 7 ) :
qm_resolve_station = qm_resolve_station + " . " + qmline . group ( 7 )
else :
qm_resolve_station = " "
2020-06-24 14:49:39 +01:00
qm_notes = qmline . group ( 8 )
2020-06-25 03:17:56 +01:00
# Spec of QM in SVX files:
2020-06-24 14:49:39 +01:00
# ;Serial number grade(A/B/C/D/X) nearest-station resolution-station description
# ;QM1 a hobnob_hallway_2.42 hobnob-hallway_3.42 junction of keyhole passage
# ;QM1 a hobnob_hallway_2.42 - junction of keyhole passage
2020-06-25 03:17:56 +01:00
# NB none of the SurveyStations are in the DB now, so if we want to link to aSurvexStation
# we would have to create one. But that is not obligatory and no QMs loaded from CSVs have one
try :
qm = models_caves . QM . objects . create ( number = qm_no ,
# nearest_station=a_survex_station_object, # can be null
nearest_station_description = qm_resolve_station ,
nearest_station_name = qm_nearest ,
grade = qm_grade . upper ( ) ,
location_description = qm_notes )
qm . save
# message = " ! QM{} '{}' CREATED in DB in '{}'".format(qm_no, qm_nearest,survexblock.survexfile.path)
# print(insp+message)
# models.DataIssue.objects.create(parser='survex', message=message)
except :
message = " ! QM {} FAIL to create {} in ' {} ' " . format ( qm_no , qm_nearest , survexblock . survexfile . path )
2020-06-24 17:55:42 +01:00
print ( insp + message )
models . DataIssue . objects . create ( parser = ' survex ' , message = message )
2020-06-25 03:17:56 +01:00
2020-06-27 17:55:59 +01:00
def LoadSurvexDataCmd ( survexblock , args ) :
ls = args . lower ( ) . split ( )
stardata = { " type " : ls [ 0 ] }
for i in range ( 0 , len ( ls ) ) :
stardata [ self . stardataparamconvert . get ( ls [ i ] , ls [ i ] ) ] = i - 1
self . stardata = stardata
if ls [ 0 ] in [ " normal " , " cartesian " , " nosurvey " ] :
assert ( ( " from " in stardata and " to " in stardata ) or " station " in stardata ) , args
elif ls [ 0 ] == " default " :
stardata = self . stardatadefault
else :
assert ls [ 0 ] == " passage " , args
def LoadSurvexFlags ( self , line , cmd ) :
# Here we could set on/off 'splay', 'not splay', 'surface', 'not surface', or 'duplicate'
# but this data is only used for sense-checking not to actually calculate anything important
pass
def IdentifyCave ( self , cavepath ) :
path = os . path . join ( os . path . split ( cavepath ) [ 0 ] , re . sub ( r " \ .svx$ " , " " , cavepath ) )
path_match = self . rx_cave . search ( path )
print ( ' - Attempting cave match for %s ' % path )
if path_match :
pos_cave = ' %s - %s ' % ( path_match . group ( 1 ) , path_match . group ( 2 ) )
# print(insp+pos_cave)
cave = models_caves . getCaveByReference ( pos_cave )
if cave :
survexfile . cave = cave
print ( ' - Cave matched for %s ' % path )
return cave
else :
print ( ' ! No cave match for %s ' % path )
return None
def LoadSurvexFileBlock ( self , survexblock , includelabel ) :
""" Creates SurvexDirectory and SurvexFile in the database
with links to ' cave '
Creates a new current survexblock with valid . survexfile and valid . survexdirectory
"""
cave = self . IdentifyCave ( self , includelabel )
survexdirectory = SurvexDirectory ( path = dirpath , cave = cave , primarysurvexfile = self )
survexdirectory . save ( )
newsurvexfile = models_survex . SurvexFile ( path = includelabel )
newsurvexfile . survexdirectory = survexdirectory
newsurvexfile . save ( )
name = includelabel
newsurvexblock = models_survex . SurvexBlock ( name = name , parent = survexblock ,
survexpath = survexblock . survexpath + " . " + name ,
cave = survexfile . cave , survexfile = newsurvexfile ,
legsall = 0 , legssplay = 0 , legssurfc = 0 , totalleglength = 0.0 )
newsurvexblock . save
self . currentsurvexfile = newsurvexfile
self . currentsurvexblock = newsurvexblock
2020-06-24 14:49:39 +01:00
2020-06-24 22:46:18 +01:00
def LoadSurvexComment ( self , survexblock , comment ) :
2020-06-27 17:55:59 +01:00
# ignore all comments except ;ref and ;QM and ;*include (for collated survex file)
2020-06-24 22:46:18 +01:00
refline = self . rx_ref . match ( comment )
if refline :
comment = comment . replace ( " ref " , " " ) . strip ( )
self . LoadSurvexRef ( survexblock , comment )
2020-06-24 14:10:13 +01:00
2020-06-24 22:46:18 +01:00
qmline = self . rx_qm . match ( comment )
if qmline :
self . LoadSurvexQM ( survexblock , qmline )
2020-06-27 17:55:59 +01:00
included = self . rx_comminc . match ( comment )
# ;*include means we have been included; not 'proceed to include' which *include means
if included :
self . LoadSurvexFileBlock ( survexblock , included )
edulcni = self . rx_commcni . match ( comment )
# ;*include means we have been included; not 'proceed to include' which *include means
if edulcni :
currentsurvexblock = currentsurvexblock . parent
currentsurvexfile = currentsurvexblock . parent . survexfile
2020-06-24 22:46:18 +01:00
def LoadSurvexSetup ( self , survexblock , survexfile ) :
self . depthbegin = 0
self . stardata = self . stardatadefault
blocklegs = self . survexlegsnumber
print ( self . insp + " - MEM: {:.3f} Reading. parent: {} <> {} " . format ( models . get_process_memory ( ) , survexblock . survexfile . path , survexfile . path ) )
self . lineno = 0
2020-06-24 14:10:13 +01:00
sys . stderr . flush ( ) ;
self . callcount + = 1
2020-06-27 00:50:40 +01:00
if self . callcount % 10 == 0 :
2020-06-24 14:10:13 +01:00
print ( " . " , file = sys . stderr , end = ' ' )
2020-06-27 00:50:40 +01:00
if self . callcount % 500 == 0 :
print ( " \n " , file = sys . stderr , end = ' ' )
2020-06-24 14:10:13 +01:00
# Try to find the cave in the DB if not use the string as before
path_match = re . search ( r " caves-( \ d \ d \ d \ d)/( \ d+| \ d \ d \ d \ d-? \ w+- \ d+)/ " , survexblock . survexfile . path )
if path_match :
pos_cave = ' %s - %s ' % ( path_match . group ( 1 ) , path_match . group ( 2 ) )
cave = models_caves . getCaveByReference ( pos_cave )
if cave :
survexfile . cave = cave
2020-06-24 22:46:18 +01:00
2020-06-27 17:55:59 +01:00
def RecursiveRecursiveLoad ( self , survexblock , survexfile , fin ) :
2020-06-24 22:46:18 +01:00
""" Follows the *include links in all the survex files from the root file 1623.svx
and reads in the survex blocks , other data and the wallet references ( scansfolder ) as it
2020-06-27 17:55:59 +01:00
goes . This part of the data include process is where the maximum memory is used and where it
2020-06-24 22:46:18 +01:00
crashes on memory - constrained machines . Begin - end blocks may also be nested .
"""
self . LoadSurvexSetup ( survexblock , survexfile )
insp = self . insp
previousnlegs = 0
2020-06-24 14:10:13 +01:00
svxlines = fin . read ( ) . splitlines ( )
2020-06-24 22:46:18 +01:00
# cannot close file now as may be recursively called with the same fin if nested *begin-end
2020-06-24 14:10:13 +01:00
for svxline in svxlines :
2020-06-24 22:46:18 +01:00
self . lineno + = 1
2020-06-24 14:10:13 +01:00
sline , comment = self . rx_comment . match ( svxline . strip ( ) ) . groups ( )
2020-06-24 22:46:18 +01:00
if comment :
self . LoadSurvexComment ( survexblock , comment )
2020-06-24 14:10:13 +01:00
if not sline :
2020-06-24 22:46:18 +01:00
continue # skip blank lines
2020-06-24 14:10:13 +01:00
# detect the star command
mstar = self . rx_star . match ( sline )
2020-06-24 22:46:18 +01:00
if mstar : # yes we are reading a *cmd
cmd , args = mstar . groups ( )
cmd = cmd . lower ( )
if re . match ( " include$(?i) " , cmd ) :
includepath = os . path . normpath ( os . path . join ( os . path . split ( survexfile . path ) [ 0 ] , re . sub ( r " \ .svx$ " , " " , args ) ) )
print ( ( insp + ' - INCLUDE-go path found, including - ' + includepath ) )
# Try to find the cave in the DB. if not, use the string as before
path_match = re . search ( r " caves-( \ d \ d \ d \ d)/( \ d+| \ d \ d \ d \ d-? \ w+- \ d+)/ " , includepath )
2020-06-24 14:10:13 +01:00
if path_match :
pos_cave = ' %s - %s ' % ( path_match . group ( 1 ) , path_match . group ( 2 ) )
2020-06-24 22:46:18 +01:00
print ( ( insp + ' - Match in DB (i) for cave {} . ' . format ( pos_cave ) ) )
2020-06-24 14:10:13 +01:00
cave = models_caves . getCaveByReference ( pos_cave )
if cave :
survexfile . cave = cave
else :
2020-06-24 22:46:18 +01:00
print ( ( insp + ' - NO Match in DB (i) for a cave for {} ' . format ( includepath ) ) )
includesurvexfile = models_survex . SurvexFile ( path = includepath )
includesurvexfile . save ( )
includesurvexfile . SetDirectory ( )
if includesurvexfile . exists ( ) :
survexblock . save ( )
self . insp + = " > "
#--------------------------------------------------------
fininclude = includesurvexfile . OpenFile ( )
2020-06-27 17:55:59 +01:00
self . RecursiveRecursiveLoad ( survexblock , includesurvexfile , fininclude )
2020-06-24 22:46:18 +01:00
fininclude . close ( )
#--------------------------------------------------------
self . insp = self . insp [ 2 : ]
insp = self . insp
print ( ( insp + ' - INCLUDE-return from include - ' + includepath ) )
else :
print ( ( insp + ' ! ERROR *include file not found for %s ' % includesurvexfile ) )
elif re . match ( " begin$(?i) " , cmd ) :
# On a *begin statement we start a new survexblock.
# There should not be any *include inside a begin-end block, so this is a simple
# load not a recursive fileload. But there may be many blocks nested to any depth in one file.
if args :
newsvxpath = os . path . join ( os . path . split ( survexfile . path ) [ 0 ] , re . sub ( r " \ .svx$ " , " " , args ) )
# Try to find the cave in the DB if not use the string as before
path_match = re . search ( r " caves-( \ d \ d \ d \ d)/( \ d+| \ d \ d \ d \ d-? \ w+- \ d+)/ " , newsvxpath )
if path_match :
pos_cave = ' %s - %s ' % ( path_match . group ( 1 ) , path_match . group ( 2 ) )
# print(insp+pos_cave)
cave = models_caves . getCaveByReference ( pos_cave )
if cave :
survexfile . cave = cave
else :
print ( ( insp + ' - No match (b) for %s ' % newsvxpath ) )
previousnlegs = self . survexlegsnumber
name = args . lower ( )
print ( insp + ' - Begin found for: {} , creating new SurvexBlock ' . format ( name ) )
# the recursive call re-reads the entire file. This is wasteful. We should pass in only
# the un-parsed part of the file.
survexblockdown = models_survex . SurvexBlock ( name = name , parent = survexblock ,
survexpath = survexblock . survexpath + " . " + name ,
cave = survexfile . cave , survexfile = survexfile ,
legsall = 0 , legssplay = 0 , legssurfc = 0 , totalleglength = 0.0 )
survexblockdown . save ( )
survexblock . save ( )
survexblock = survexblockdown
print ( insp + " - BLOCK-enter nested *begin/*end block: ' {} ' " . format ( name ) )
self . insp + = " > "
#--------------------------------------------------------
2020-06-27 17:55:59 +01:00
self . RecursiveRecursiveLoad ( survexblockdown , survexfile , fin )
2020-06-24 22:46:18 +01:00
#--------------------------------------------------------
# do not close the file as there may be more blocks in this one
# and it is re-read afresh with every nested begin-end block.
self . insp = self . insp [ 2 : ]
insp = self . insp
else :
self . depthbegin + = 1
2020-02-21 15:57:07 +00:00
2020-06-24 22:46:18 +01:00
elif re . match ( " end$(?i) " , cmd ) :
if self . depthbegin :
print ( insp + " - BLOCK-return from nested *begin/*end block: ' {} ' " . format ( args ) )
self . depthbegin - = 1
else :
legsinblock = self . survexlegsnumber - previousnlegs
print ( insp + " - LEGS: {} (previous: {} , now: {} ) " . format ( legsinblock , previousnlegs , self . survexlegsnumber ) )
survexblock . legsall = legsinblock
survexblock . save ( )
return
elif cmd == " ref " :
self . LoadSurvexRef ( survexblock , args )
elif cmd == " flags " :
self . LoadSurvexFlags ( args , cmd )
elif cmd == " data " :
ls = args . lower ( ) . split ( )
stardata = { " type " : ls [ 0 ] }
for i in range ( 0 , len ( ls ) ) :
stardata [ self . stardataparamconvert . get ( ls [ i ] , ls [ i ] ) ] = i - 1
self . stardata = stardata
if ls [ 0 ] in [ " normal " , " cartesian " , " nosurvey " ] :
assert ( ( " from " in stardata and " to " in stardata ) or " station " in stardata ) , args
elif ls [ 0 ] == " default " :
stardata = self . stardatadefault
else :
assert ls [ 0 ] == " passage " , args
elif cmd == " set " and re . match ( " names(?i) " , args ) :
pass
elif re . match ( " date$(?i) " , cmd ) :
self . LoadSurvexDate ( survexblock , args )
elif re . match ( " team$(?i) " , cmd ) :
self . LoadSurvexTeam ( survexblock , args )
2020-06-24 14:10:13 +01:00
else :
2020-06-24 22:46:18 +01:00
self . LoadSurvexIgnore ( survexblock , args , cmd )
else : # not a *cmd so we are reading data OR rx_comment failed
if " from " in self . stardata : # only interested in survey legs
self . LoadSurvexLineLeg ( survexblock , svxline , sline , comment )
2020-06-24 14:10:13 +01:00
else :
2020-06-24 22:46:18 +01:00
pass # ignore all other sorts of data
2020-06-23 23:34:08 +01:00
2020-06-27 17:55:59 +01:00
def LinearRecursiveLoad ( self , survexblock , path , fin , skipto ) :
""" Loads a single survex file. Usually used to import all the survex files which have been collated
into a single file . Loads the begin / end blocks recursively .
"""
self . relativefilename = path
cave = self . IdentifyCave ( path ) # this will produce null for survex files which are geographic collections
svxlines = fin . read ( ) . splitlines ( )
for svxline in svxlines :
self . lineno + = 1
if self . lineno < skipto :
continue # skip through file to the place we got up to
sline , comment = self . rx_comment . match ( svxline . strip ( ) ) . groups ( )
if comment :
self . LoadSurvexComment ( survexblock , comment )
if not sline :
continue # skip blank lines
# detect a star command
mstar = self . rx_star . match ( sline )
if mstar : # yes we are reading a *cmd
cmd , args = mstar . groups ( )
cmd = cmd . lower ( )
if re . match ( " begin$(?i) " , cmd ) :
self . depthbegin + = 1
if args :
depth = " " * self . depthbegin
self . stackbegin . append ( args . lower ( ) )
previousnlegs = self . survexlegsnumber
name = args . lower ( )
print ( ' - Begin found for: {} , creating new SurvexBlock ' . format ( name ) )
survexblockdown = models_survex . SurvexBlock ( name = name , parent = survexblock ,
survexpath = survexblock . survexpath + " . " + name ,
cave = self . currentcave , survexfile = self . currentsurvexfile ,
legsall = 0 , legssplay = 0 , legssurfc = 0 , totalleglength = 0.0 )
survexblockdown . save ( )
survexblock . save ( )
survexblock = survexblockdown
else :
self . depthbegin + = 1
elif re . match ( " end$(?i) " , cmd ) :
# haven#t really thought this through..
if survexblock :
self . currentsurvexblock = survexblock . parent
self . currentsurvexfile = survexblock . parent . survexfile
if self . depthbegin :
print ( " - End -return from nested *begin/*end block: ' {} ' " . format ( args ) )
self . depthbegin - = 1
else :
legsinblock = self . survexlegsnumber - previousnlegs
print ( " - LEGS: {} (previous: {} , now: {} ) " . format ( legsinblock , previousnlegs , self . survexlegsnumber ) )
survexblock . legsall = legsinblock
survexblock . save ( )
return
elif re . match ( " title$(?i) " , cmd ) :
self . currenttitle = args
elif cmd == " ref " :
self . LoadSurvexRef ( survexblock , args )
elif cmd == " flags " :
self . LoadSurvexFlags ( args , cmd )
elif cmd == " data " :
self . LoadSurvexDataCmd ( survexblock , args )
elif re . match ( " date$(?i) " , cmd ) :
self . LoadSurvexDate ( survexblock , args )
elif re . match ( " team$(?i) " , cmd ) :
self . LoadSurvexTeam ( survexblock , args )
elif cmd == " set " and re . match ( " names(?i) " , args ) :
pass
elif re . match ( " include$(?i) " , cmd ) :
message = " ! -ERROR *include command not expected here {} . Re-run a full Survex import. " . format ( path )
print ( message )
print ( message , file = sys . stderr )
models . DataIssue . objects . create ( parser = ' survex ' , message = message )
else :
self . LoadSurvexIgnore ( survexblock , args , cmd )
else : # not a *cmd so we are reading data OR rx_comment failed
if " from " in self . stardata : # only interested in survey legs
self . LoadSurvexLineLeg ( survexblock , svxline , sline , comment )
else :
pass # ignore all other sorts of data
def RecursiveScan ( self , survexblock , survexfile , fin , flinear , fcollate ) :
2020-06-27 12:08:02 +01:00
""" Follows the *include links in all the survex files from the root file 1623.svx
2020-06-27 17:55:59 +01:00
and reads only the * include and * begin and * end statements . It produces a linearised
list of the include tree
2020-06-27 12:08:02 +01:00
"""
2020-06-27 17:55:59 +01:00
indent = " " * self . depthinclude
2020-06-27 12:08:02 +01:00
sys . stderr . flush ( ) ;
self . callcount + = 1
if self . callcount % 10 == 0 :
print ( " . " , file = sys . stderr , end = ' ' )
if self . callcount % 500 == 0 :
print ( " \n " , file = sys . stderr , end = ' ' )
2020-06-27 17:55:59 +01:00
if survexfile in self . svxfileslist :
message = " * Warning. Survex file already seen: {} " . format ( survexfile . path )
print ( message )
print ( message , file = flinear )
print ( message , file = sys . stderr )
models . DataIssue . objects . create ( parser = ' survex ' , message = message )
if self . svxfileslist . count ( survexfile ) > 20 :
message = " ! ERROR. Survex file already seen 20x. Probably an infinite loop so fix your *include statements that include this. Aborting. {} " . format ( survexfile . path )
print ( message )
print ( message , file = flinear )
print ( message , file = sys . stderr )
models . DataIssue . objects . create ( parser = ' survex ' , message = message )
return
2020-06-27 12:08:02 +01:00
self . svxfileslist . append ( survexfile )
svxlines = fin . read ( ) . splitlines ( )
for svxline in svxlines :
self . lineno + = 1
2020-06-27 17:55:59 +01:00
includestmt = self . rx_include . match ( svxline )
if not includestmt :
fcollate . write ( " {} \n " . format ( svxline ) )
2020-06-27 12:08:02 +01:00
sline , comment = self . rx_comment . match ( svxline . strip ( ) ) . groups ( )
mstar = self . rx_star . match ( sline )
if mstar : # yes we are reading a *cmd
cmd , args = mstar . groups ( )
cmd = cmd . lower ( )
if re . match ( " include$(?i) " , cmd ) :
includepath = os . path . normpath ( os . path . join ( os . path . split ( survexfile . path ) [ 0 ] , re . sub ( r " \ .svx$ " , " " , args ) ) )
path_match = re . search ( r " caves-( \ d \ d \ d \ d)/( \ d+| \ d \ d \ d \ d-? \ w+- \ d+)/ " , includepath )
includesurvexfile = models_survex . SurvexFile ( path = includepath )
if includesurvexfile . exists ( ) :
2020-06-27 17:55:59 +01:00
# do not create SurvexFile in DB here by doing includesurvexfile.save(). Do it when reading data.
2020-06-27 12:08:02 +01:00
#--------------------------------------------------------
2020-06-27 17:55:59 +01:00
self . depthinclude + = 1
2020-06-27 12:08:02 +01:00
fininclude = includesurvexfile . OpenFile ( )
2020-06-27 17:55:59 +01:00
fcollate . write ( " ;*include {} \n " . format ( includesurvexfile . path ) )
flinear . write ( " {:2} {} *include {} \n " . format ( self . depthinclude , indent , includesurvexfile . path ) )
2020-06-27 12:08:02 +01:00
push = includesurvexfile . path . lower ( )
2020-06-27 17:55:59 +01:00
self . stackinclude . append ( push )
self . RecursiveScan ( survexblock , includesurvexfile , fininclude , flinear , fcollate )
pop = self . stackinclude . pop ( )
2020-06-27 12:08:02 +01:00
if pop != push :
2020-06-27 17:55:59 +01:00
message = " !!!!!!! ERROR pop != push {} != {} {} " . format ( pop , push , self . stackinclude )
print ( message )
print ( message , file = flinear )
print ( message , file = sys . stderr )
models . DataIssue . objects . create ( parser = ' survex ' , message = message )
flinear . write ( " {:2} {} *edulcni {} \n " . format ( self . depthinclude , indent , includesurvexfile . path ) )
fcollate . write ( " ;*edulcni {} \n " . format ( includesurvexfile . path ) )
2020-06-27 12:08:02 +01:00
fininclude . close ( )
2020-06-27 17:55:59 +01:00
self . depthinclude - = 1
2020-06-27 12:08:02 +01:00
#--------------------------------------------------------
else :
2020-06-27 17:55:59 +01:00
message = " ! ERROR *include file not found for {} " . format ( includesurvexfile )
print ( message )
print ( message , file = sys . stderr )
models . DataIssue . objects . create ( parser = ' survex ' , message = message )
2020-06-27 12:08:02 +01:00
elif re . match ( " begin$(?i) " , cmd ) :
self . depthbegin + = 1
depth = " " * self . depthbegin
if args :
pushargs = args
else :
pushargs = " "
self . stackbegin . append ( pushargs . lower ( ) )
flinear . write ( " {:2} {} *begin {} \n " . format ( self . depthbegin , depth , args ) )
pass
elif re . match ( " end$(?i) " , cmd ) :
depth = " " * self . depthbegin
flinear . write ( " {:2} {} *end {} \n " . format ( self . depthbegin , depth , args ) )
if not args :
args = " "
popargs = self . stackbegin . pop ( )
if popargs != args . lower ( ) :
2020-06-27 17:55:59 +01:00
message = " !!!!!!! ERROR BEGIN/END pop != push {} != {} \n {} " . format ( popargs , args , self . stackbegin )
print ( message )
print ( message , file = flinear )
print ( message , file = sys . stderr )
models . DataIssue . objects . create ( parser = ' survex ' , message = message )
2020-06-27 12:08:02 +01:00
self . depthbegin - = 1
pass
2020-06-23 23:34:08 +01:00
2020-06-27 17:55:59 +01:00
2020-06-27 18:00:24 +01:00
def FindAndLoadSurvex ( survexblockroot ) :
2020-06-23 23:34:08 +01:00
""" Follows the *include links recursively to find files
"""
2020-06-24 14:10:13 +01:00
print ( ' - redirecting stdout to svxblks.log... ' )
2020-06-23 23:34:08 +01:00
stdout_orig = sys . stdout
# Redirect sys.stdout to the file
2020-06-24 14:10:13 +01:00
sys . stdout = open ( ' svxblks.log ' , ' w ' )
2020-06-27 00:50:40 +01:00
print ( ' - SCANNING All Survex Blocks... ' , file = sys . stderr )
2020-06-27 12:08:02 +01:00
survexfileroot = survexblockroot . survexfile
2020-06-27 00:50:40 +01:00
2020-06-27 17:55:59 +01:00
collatefilename = " _ " + survexfileroot . path + " .svx "
2020-06-27 18:00:24 +01:00
svx_scan = LoadingSurvex ( )
2020-06-27 17:55:59 +01:00
svx_scan . callcount = 0
svx_scan . depthinclude = 0
2020-06-27 00:50:40 +01:00
indent = " "
2020-06-27 17:55:59 +01:00
fcollate = open ( collatefilename , ' w ' )
2020-06-27 00:50:40 +01:00
mem0 = models . get_process_memory ( )
2020-06-27 17:55:59 +01:00
print ( " - MEM: {:7.2f} MB START " . format ( mem0 ) , file = sys . stderr )
2020-06-27 00:50:40 +01:00
flinear = open ( ' svxlinear.log ' , ' w ' )
2020-06-27 17:55:59 +01:00
flinear . write ( " - MEM: {:7.2f} MB START {} \n " . format ( mem0 , survexfileroot . path ) )
2020-06-27 00:50:40 +01:00
finroot = survexfileroot . OpenFile ( )
2020-06-27 17:55:59 +01:00
fcollate . write ( " ;*include {} \n " . format ( survexfileroot . path ) )
flinear . write ( " {:2} {} *include {} \n " . format ( svx_scan . depthinclude , indent , survexfileroot . path ) )
svx_scan . RecursiveScan ( survexblockroot , survexfileroot , finroot , flinear , fcollate )
flinear . write ( " {:2} {} *edulcni {} \n " . format ( svx_scan . depthinclude , indent , survexfileroot . path ) )
fcollate . write ( " ;*edulcni {} \n " . format ( survexfileroot . path ) )
2020-06-27 00:50:40 +01:00
mem1 = models . get_process_memory ( )
flinear . write ( " - MEM: {:.2f} MB STOP {} \n " . format ( mem1 , survexfileroot . path ) )
flinear . write ( " - MEM: {:.3f} MB USED \n " . format ( mem1 - mem0 ) )
2020-06-27 17:55:59 +01:00
svxfileslist = svx_scan . svxfileslist
flinear . write ( " - {:,} survex files in linear include list \n " . format ( len ( svxfileslist ) ) )
2020-06-27 00:50:40 +01:00
flinear . close ( )
2020-06-27 17:55:59 +01:00
fcollate . close ( )
svx_scan = None
print ( " \n - {:,} survex files in linear include list \n " . format ( len ( svxfileslist ) ) , file = sys . stderr )
2020-06-27 00:50:40 +01:00
2020-06-27 17:55:59 +01:00
mem1 = models . get_process_memory ( )
print ( " - MEM: {:7.2f} MB END " . format ( mem0 ) , file = sys . stderr )
print ( " - MEM: {:7.3f} MB USED " . format ( mem1 - mem0 ) , file = sys . stderr )
svxfileslist = [ ] # free memory
# Before doing this, it would be good to identify the *equate and *entrance we need that are relevant to the
# entrance locations currently loaded after this by LoadPos(), but could better be done before ?
# look in MapLocations() for how we find the entrances
print ( ' \n - Loading All Survex Blocks... ' , file = sys . stderr )
2020-06-27 00:50:40 +01:00
2020-06-27 17:55:59 +01:00
2020-06-27 18:00:24 +01:00
svx_load = LoadingSurvex ( )
2020-06-27 17:55:59 +01:00
with open ( collatefilename , " r " ) as fcollate :
#svx_load.LinearRecursiveLoad(survexblockroot,survexfileroot.path,fcollate, 0)
pass
print ( " - MEM: {:7.2f} MB STOP " . format ( mem1 ) , file = sys . stderr )
print ( " - MEM: {:7.3f} MB USED " . format ( mem1 - mem0 ) , file = sys . stderr )
survexlegsnumber = svx_load . survexlegsnumber
survexlegsalllength = svx_load . survexlegsalllength
mem1 = models . get_process_memory ( )
svx_load = None
2020-06-27 00:50:40 +01:00
print ( ' \n - Loading All Survex Blocks... ' , file = sys . stderr )
2020-06-27 18:00:24 +01:00
svxlrl = LoadingSurvex ( )
2020-06-23 23:34:08 +01:00
finroot = survexfileroot . OpenFile ( )
2020-06-27 17:55:59 +01:00
svxlrl . RecursiveRecursiveLoad ( survexblockroot , survexfileroot , finroot )
2020-06-23 23:34:08 +01:00
finroot . close ( )
2020-06-27 00:50:40 +01:00
survexlegsnumber = svxlrl . survexlegsnumber
survexlegsalllength = svxlrl . survexlegsalllength
svxlrl = None
2020-06-27 17:55:59 +01:00
2020-06-24 14:10:13 +01:00
# Close the logging file, Restore sys.stdout to our old saved file handle
2020-06-23 23:34:08 +01:00
sys . stdout . close ( )
print ( " + " , file = sys . stderr )
sys . stderr . flush ( ) ;
sys . stdout = stdout_orig
2020-06-24 14:10:13 +01:00
return ( survexlegsnumber , survexlegsalllength )
2020-06-23 23:34:08 +01:00
2020-06-27 18:00:24 +01:00
def LoadSurvexBlocks ( ) :
2015-01-19 22:48:50 +00:00
2020-04-28 01:18:57 +01:00
print ( ' - Flushing All Survex Blocks... ' )
2020-05-28 04:54:53 +01:00
models_survex . SurvexBlock . objects . all ( ) . delete ( )
models_survex . SurvexFile . objects . all ( ) . delete ( )
models_survex . SurvexDirectory . objects . all ( ) . delete ( )
models_survex . SurvexPersonRole . objects . all ( ) . delete ( )
models_survex . SurvexStation . objects . all ( ) . delete ( )
2020-06-24 19:07:11 +01:00
print ( " - survex Data Issues flushed " )
2020-05-15 21:45:23 +01:00
models . DataIssue . objects . filter ( parser = ' survex ' ) . delete ( )
2020-06-23 23:34:08 +01:00
survexfileroot = models_survex . SurvexFile ( path = settings . SURVEX_TOPNAME , cave = None )
survexfileroot . save ( )
survexfileroot . SetDirectory ( )
survexblockroot = models_survex . SurvexBlock ( name = ROOTBLOCK , survexpath = " " , cave = None , survexfile = survexfileroot ,
2020-06-16 19:27:32 +01:00
legsall = 0 , legssplay = 0 , legssurfc = 0 , totalleglength = 0.0 )
2020-06-23 23:34:08 +01:00
# this is the first so id=1
2011-07-11 00:01:12 +01:00
survexblockroot . save ( )
2020-06-23 23:34:08 +01:00
print ( ' - Loading All Survex Blocks... ' )
2020-06-24 14:10:13 +01:00
memstart = models . get_process_memory ( )
2020-06-27 18:00:24 +01:00
survexlegsnumber , survexlegsalllength = FindAndLoadSurvex ( survexblockroot )
2020-06-24 14:10:13 +01:00
memend = models . get_process_memory ( )
2020-06-24 14:49:39 +01:00
print ( " - MEMORY start: {:.3f} MB end: {:.3f} MB increase= {:.3f} MB " . format ( memstart , memend , memend - memstart ) )
2020-06-23 23:34:08 +01:00
2020-06-12 00:34:53 +01:00
survexblockroot . totalleglength = survexlegsalllength
2020-06-16 19:27:32 +01:00
survexblockroot . legsall = survexlegsnumber
2011-07-11 00:01:12 +01:00
survexblockroot . save ( )
2020-04-30 23:15:57 +01:00
2020-06-12 00:34:53 +01:00
print ( " - total number of survex legs: {} m " . format ( survexlegsnumber ) )
print ( " - total leg lengths loaded: {} m " . format ( survexlegsalllength ) )
2020-04-28 01:18:57 +01:00
print ( ' - Loaded All Survex Blocks. ' )
2011-07-11 00:01:12 +01:00
2019-02-24 13:03:34 +00:00
poslineregex = re . compile ( r " ^ \ ( \ s*([+-]? \ d* \ . \ d*), \ s*([+-]? \ d* \ . \ d*), \ s*([+-]? \ d* \ . \ d*) \ s* \ ) \ s*([^ \ s]+)$ " )
2011-07-11 00:01:12 +01:00
def LoadPos ( ) :
2020-04-28 01:18:57 +01:00
""" Run cavern to produce a complete .3d file, then run 3dtopos to produce a table of
all survey point positions . Then lookup each position by name to see if we have it in the database
and if we do , then save the x / y / z coordinates .
If we don ' t have it in the database, print an error message and discard it.
2020-05-26 16:41:11 +01:00
This is ONLY ever used for entrance and fixedpts locations for the prospecting map :
about 600 points out of 32 , 000.
2020-04-28 01:18:57 +01:00
"""
2020-04-28 18:26:08 +01:00
topdata = settings . SURVEX_DATA + settings . SURVEX_TOPNAME
2020-05-24 01:57:06 +01:00
print ( ( ' - Generating a list of Pos from %s .svx and then loading... ' % ( topdata ) ) )
2020-05-28 01:16:45 +01:00
2020-04-28 18:26:08 +01:00
found = 0
skip = { }
2020-05-24 01:57:06 +01:00
print ( " \n " ) # extra line because cavern overwrites the text buffer somehow
2020-04-28 18:26:08 +01:00
# cavern defaults to using same cwd as supplied input file
call ( [ settings . CAVERN , " --output= %s .3d " % ( topdata ) , " %s .svx " % ( topdata ) ] )
call ( [ settings . THREEDTOPOS , ' %s .3d ' % ( topdata ) ] , cwd = settings . SURVEX_DATA )
2020-05-28 01:16:45 +01:00
mappoints = { }
for pt in MapLocations ( ) . points ( ) :
svxid , number , point_type , label = pt
mappoints [ svxid ] = True
2020-04-30 23:15:57 +01:00
2020-04-28 18:26:08 +01:00
posfile = open ( " %s .pos " % ( topdata ) )
2019-02-24 13:03:34 +00:00
posfile . readline ( ) #Drop header
2020-06-19 00:26:15 +01:00
try :
survexblockroot = models_survex . SurvexBlock . objects . get ( name = ROOTBLOCK )
except :
try :
survexblockroot = models_survex . SurvexBlock . objects . get ( id = 1 )
except :
message = ' ! FAILED to find root SurvexBlock '
print ( message )
models . DataIssue . objects . create ( parser = ' survex ' , message = message )
raise
2011-07-11 00:01:12 +01:00
for line in posfile . readlines ( ) :
2020-02-21 15:57:07 +00:00
r = poslineregex . match ( line )
2011-07-11 00:01:12 +01:00
if r :
2020-05-28 04:54:53 +01:00
x , y , z , id = r . groups ( )
2020-06-16 19:27:32 +01:00
for sid in mappoints :
if id . endswith ( sid ) :
blockpath = " . " + id [ : - len ( sid ) ] . strip ( " . " )
try :
sbqs = models_survex . SurvexBlock . objects . filter ( survexpath = blockpath )
if len ( sbqs ) == 1 :
sb = sbqs [ 0 ]
if len ( sbqs ) > 1 :
message = ' ! MULTIPLE SurvexBlocks matching Entrance point {} {} ' . format ( blockpath , sid )
2020-06-15 03:28:51 +01:00
print ( message )
models . DataIssue . objects . create ( parser = ' survex ' , message = message )
2020-06-16 19:27:32 +01:00
sb = sbqs [ 0 ]
elif len ( sbqs ) < = 0 :
message = ' ! ZERO SurvexBlocks matching Entrance point {} {} ' . format ( blockpath , sid )
2020-06-15 03:28:51 +01:00
print ( message )
models . DataIssue . objects . create ( parser = ' survex ' , message = message )
2020-06-16 19:27:32 +01:00
sb = survexblockroot
except :
message = ' ! FAIL in getting SurvexBlock matching Entrance point {} {} ' . format ( blockpath , sid )
print ( message )
models . DataIssue . objects . create ( parser = ' survex ' , message = message )
try :
ss = models_survex . SurvexStation ( name = id , block = sb )
ss . x = float ( x )
ss . y = float ( y )
ss . z = float ( z )
ss . save ( )
found + = 1
except :
message = ' ! FAIL to create SurvexStation Entrance point {} {} ' . format ( blockpath , sid )
print ( message )
models . DataIssue . objects . create ( parser = ' survex ' , message = message )
raise
print ( " - {} SurvexStation entrances found. " . format ( found ) )