mirror of
https://expo.survex.com/repositories/troggle/.git
synced 2024-11-22 07:11:52 +00:00
Stop storing all SurvexStations
This commit is contained in:
parent
30e560d808
commit
453af2851b
@ -149,7 +149,7 @@ class SurvexBlock(models.Model):
|
||||
return ssl[0]
|
||||
#print name
|
||||
ss = SurvexStation(name=name, block=self)
|
||||
ss.save()
|
||||
#ss.save()
|
||||
return ss
|
||||
|
||||
def DayIndex(self):
|
||||
|
@ -28,7 +28,7 @@ class MapLocations(object):
|
||||
("laser.kt114_96","HSK","Reference", "Hinterer Schwarzmooskogel trig point"),
|
||||
("2000","Nipple","Reference", "Nipple (Weiße Warze)"),
|
||||
("3000","VSK","Reference", "Vorderer Schwarzmooskogel summit"),
|
||||
("oldtopcamp", "OTC", "Reference", "Old Top Camp"),
|
||||
("topcamp", "OTC", "Reference", "Old Top Camp"),
|
||||
("laser.0", "LSR0", "Reference", "Laser Point 0"),
|
||||
("laser.0_1", "LSR1", "Reference", "Laser Point 0/1"),
|
||||
("laser.0_3", "LSR3", "Reference", "Laser Point 0/3"),
|
||||
|
@ -179,6 +179,7 @@ def import_survexblks():
|
||||
troggle.parsers.survex.LoadAllSurvexBlocks()
|
||||
|
||||
def import_survexpos():
|
||||
import troggle.parsers.survex
|
||||
print("Importing Survex x/y/z Positions")
|
||||
troggle.parsers.survex.LoadPos()
|
||||
|
||||
@ -432,7 +433,7 @@ def usage():
|
||||
QMs - read in the QM csv files (older caves only)
|
||||
scans - the survey scans in all the wallets (must run before survex)
|
||||
survex - read in the survex files - all the survex blocks but not the x/y/z positions
|
||||
survexpos - just the x/y/z Pos out of the survex files (not needed) -- Never used.
|
||||
survexpos - set the x/y/z positions for entrances and fixed points
|
||||
|
||||
tunnel - read in the Tunnel files - which scans the survey scans too
|
||||
|
||||
@ -493,7 +494,7 @@ if __name__ == "__main__":
|
||||
jq.enq("QMs",import_QMs)
|
||||
jq.enq("tunnel",import_tunnelfiles)
|
||||
jq.enq("survexblks",import_survexblks)
|
||||
#jq.enq("survexpos",import_survexpos)
|
||||
jq.enq("survexpos",import_survexpos)
|
||||
elif "scans" in sys.argv:
|
||||
jq.enq("scans",import_surveyscans)
|
||||
elif "survex" in sys.argv:
|
||||
|
@ -24,6 +24,7 @@ rx_braskets= re.compile(r"[()]")
|
||||
rx_line_length = re.compile(r"[\d\-+.]+$")
|
||||
survexlegsalllength = 0.0
|
||||
survexlegsnumber = 0
|
||||
survexblockroot = None
|
||||
|
||||
def LoadSurvexLineLeg(survexblock, stardata, sline, comment, cave):
|
||||
global survexlegsalllength
|
||||
@ -550,6 +551,8 @@ def LoadPos():
|
||||
|
||||
posfile = open("%s.pos" % (topdata))
|
||||
posfile.readline() #Drop header
|
||||
|
||||
survexblockroot = models_survex.SurvexBlock.objects.get(id=1)
|
||||
for line in posfile.readlines():
|
||||
r = poslineregex.match(line)
|
||||
if r:
|
||||
@ -559,25 +562,54 @@ def LoadPos():
|
||||
else:
|
||||
for sid in mappoints:
|
||||
if id.endswith(sid):
|
||||
notfoundnow.append(id)
|
||||
# Now that we don't import any stations, we create it rather than look it up
|
||||
# ss = models_survex.SurvexStation.objects.lookup(id)
|
||||
|
||||
# need to set block_id which means doing a search on all the survex blocks..
|
||||
# remove dot at end and add one at beginning
|
||||
blockpath = "." + id[:-len(sid)].strip(".")
|
||||
try:
|
||||
ss = models_survex.SurvexStation.objects.lookup(id)
|
||||
sbqs = models_survex.SurvexBlock.objects.filter(survexpath=blockpath)
|
||||
if len(sbqs)==1:
|
||||
sb = sbqs[0]
|
||||
if len(sbqs)>1:
|
||||
message = ' ! MULTIPLE SurvexBlocks matching Entrance point {} {}'.format(blockpath, sid)
|
||||
print(message)
|
||||
models.DataIssue.objects.create(parser='survex', message=message)
|
||||
sb = sbqs[0]
|
||||
elif len(sbqs)<=0:
|
||||
message = ' ! ZERO SurvexBlocks matching Entrance point {} {}'.format(blockpath, sid)
|
||||
print(message)
|
||||
models.DataIssue.objects.create(parser='survex', message=message)
|
||||
sb = survexblockroot
|
||||
except:
|
||||
message = ' ! FAIL in getting SurvexBlock matching Entrance point {} {}'.format(blockpath, sid)
|
||||
print(message)
|
||||
models.DataIssue.objects.create(parser='survex', message=message)
|
||||
try:
|
||||
ss = models_survex.SurvexStation(name=id, block=sb)
|
||||
ss.x = float(x)
|
||||
ss.y = float(y)
|
||||
ss.z = float(z)
|
||||
ss.save()
|
||||
found += 1
|
||||
except:
|
||||
notfoundnow.append(id)
|
||||
print(" - %s failed lookups of SurvexStation.objects. %s found. %s skipped." % (len(notfoundnow),found, len(skip)))
|
||||
message = ' ! FAIL to create SurvexStation Entrance point {} {}'.format(blockpath, sid)
|
||||
print(message)
|
||||
models.DataIssue.objects.create(parser='survex', message=message)
|
||||
raise
|
||||
|
||||
#print(" - %s failed lookups of SurvexStation.objects. %s found. %s skipped." % (len(notfoundnow),found, len(skip)))
|
||||
|
||||
if found > 10: # i.e. a previous cave import has been done
|
||||
try:
|
||||
with open(cachefile, "w") as f:
|
||||
c = len(notfoundnow)+len(skip)
|
||||
for i in notfoundnow:
|
||||
f.write("%s\n" % i)
|
||||
pass #f.write("%s\n" % i)
|
||||
for j in skip:
|
||||
f.write("%s\n" % j) # NB skip not notfoundbefore
|
||||
pass #f.write("%s\n" % j) # NB skip not notfoundbefore
|
||||
print((' Not-found cache file written: %s entries' % c))
|
||||
except:
|
||||
print(" FAILURE WRITE opening cache file %s" % (cachefile))
|
||||
|
Loading…
Reference in New Issue
Block a user