2009-05-13 05:41:17 +01:00
|
|
|
import troggle.settings as settings
|
2009-07-02 20:43:18 +01:00
|
|
|
import troggle.core.models as models
|
2011-07-11 00:01:12 +01:00
|
|
|
import troggle.settings as settings
|
|
|
|
|
|
|
|
from subprocess import call, Popen, PIPE
|
2009-05-13 05:39:52 +01:00
|
|
|
|
|
|
|
from troggle.parsers.people import GetPersonExpeditionNameLookup
|
2019-07-16 00:07:37 +01:00
|
|
|
from django.utils.timezone import get_current_timezone
|
|
|
|
from django.utils.timezone import make_aware
|
|
|
|
|
2009-05-13 05:14:03 +01:00
|
|
|
import re
|
|
|
|
import os
|
2019-07-16 00:07:37 +01:00
|
|
|
from datetime import datetime
|
2009-05-13 05:14:03 +01:00
|
|
|
|
2019-07-16 00:07:37 +01:00
|
|
|
line_leg_regex = re.compile(r"[\d\-+.]+$")
|
2009-05-13 05:39:52 +01:00
|
|
|
|
2019-07-19 01:04:18 +01:00
|
|
|
def LoadSurvexLineLeg(survexblock, stardata, sline, comment, cave):
|
|
|
|
# The try catches here need replacing as they are relativly expensive
|
2009-08-05 11:58:36 +01:00
|
|
|
ls = sline.lower().split()
|
|
|
|
ssfrom = survexblock.MakeSurvexStation(ls[stardata["from"]])
|
|
|
|
ssto = survexblock.MakeSurvexStation(ls[stardata["to"]])
|
|
|
|
|
|
|
|
survexleg = models.SurvexLeg(block=survexblock, stationfrom=ssfrom, stationto=ssto)
|
|
|
|
if stardata["type"] == "normal":
|
2011-07-11 00:01:12 +01:00
|
|
|
try:
|
|
|
|
survexleg.tape = float(ls[stardata["tape"]])
|
|
|
|
except ValueError:
|
2019-02-24 13:03:34 +00:00
|
|
|
print("Tape misread in", survexblock.survexfile.path)
|
|
|
|
print("Stardata:", stardata)
|
|
|
|
print("Line:", ls)
|
2011-07-11 00:01:12 +01:00
|
|
|
survexleg.tape = 1000
|
2013-05-22 02:33:47 +01:00
|
|
|
try:
|
2013-05-22 02:10:58 +01:00
|
|
|
lclino = ls[stardata["clino"]]
|
|
|
|
except:
|
2019-02-24 13:03:34 +00:00
|
|
|
print("Clino misread in", survexblock.survexfile.path)
|
|
|
|
print("Stardata:", stardata)
|
|
|
|
print("Line:", ls)
|
2013-05-22 02:33:47 +01:00
|
|
|
lclino = error
|
|
|
|
try:
|
|
|
|
lcompass = ls[stardata["compass"]]
|
|
|
|
except:
|
2019-02-24 13:03:34 +00:00
|
|
|
print("Compass misread in", survexblock.survexfile.path)
|
|
|
|
print("Stardata:", stardata)
|
|
|
|
print("Line:", ls)
|
2013-05-22 02:33:47 +01:00
|
|
|
lcompass = error
|
2009-08-05 11:58:36 +01:00
|
|
|
if lclino == "up":
|
|
|
|
survexleg.compass = 0.0
|
|
|
|
survexleg.clino = 90.0
|
|
|
|
elif lclino == "down":
|
|
|
|
survexleg.compass = 0.0
|
|
|
|
survexleg.clino = -90.0
|
|
|
|
elif lclino == "-" or lclino == "level":
|
2011-07-11 00:01:12 +01:00
|
|
|
try:
|
|
|
|
survexleg.compass = float(lcompass)
|
|
|
|
except ValueError:
|
2019-02-24 13:03:34 +00:00
|
|
|
print("Compass misread in", survexblock.survexfile.path)
|
|
|
|
print("Stardata:", stardata)
|
|
|
|
print("Line:", ls)
|
2011-07-11 00:01:12 +01:00
|
|
|
survexleg.compass = 1000
|
2009-08-05 11:58:36 +01:00
|
|
|
survexleg.clino = -90.0
|
|
|
|
else:
|
2019-07-16 00:07:37 +01:00
|
|
|
assert line_leg_regex.match(lcompass), ls
|
|
|
|
assert line_leg_regex.match(lclino) and lclino != "-", ls
|
2009-08-05 11:58:36 +01:00
|
|
|
survexleg.compass = float(lcompass)
|
|
|
|
survexleg.clino = float(lclino)
|
2019-07-19 01:04:18 +01:00
|
|
|
|
|
|
|
if cave:
|
|
|
|
survexleg.cave = cave
|
|
|
|
|
2009-08-05 11:58:36 +01:00
|
|
|
# only save proper legs
|
|
|
|
survexleg.save()
|
|
|
|
|
|
|
|
itape = stardata.get("tape")
|
|
|
|
if itape:
|
2011-07-11 00:01:12 +01:00
|
|
|
try:
|
|
|
|
survexblock.totalleglength += float(ls[itape])
|
|
|
|
except ValueError:
|
2019-02-24 13:03:34 +00:00
|
|
|
print("Length not added")
|
2009-08-05 11:58:36 +01:00
|
|
|
survexblock.save()
|
2019-02-24 13:03:34 +00:00
|
|
|
|
|
|
|
|
2009-08-05 11:58:36 +01:00
|
|
|
def LoadSurvexEquate(survexblock, sline):
|
2012-06-10 14:59:21 +01:00
|
|
|
#print sline #
|
|
|
|
stations = sline.split()
|
|
|
|
assert len(stations) > 1
|
|
|
|
for station in stations:
|
|
|
|
survexblock.MakeSurvexStation(station)
|
2009-08-05 11:58:36 +01:00
|
|
|
|
2019-02-24 13:03:34 +00:00
|
|
|
|
2009-08-05 11:58:36 +01:00
|
|
|
def LoadSurvexLinePassage(survexblock, stardata, sline, comment):
|
|
|
|
pass
|
|
|
|
|
2019-02-24 13:03:34 +00:00
|
|
|
stardatadefault = {"type":"normal", "t":"leg", "from":0, "to":1, "tape":2, "compass":3, "clino":4}
|
|
|
|
stardataparamconvert = {"length":"tape", "bearing":"compass", "gradient":"clino"}
|
2009-08-05 11:58:36 +01:00
|
|
|
|
2019-07-16 00:07:37 +01:00
|
|
|
regex_comment = re.compile(r"([^;]*?)\s*(?:;\s*(.*))?\n?$")
|
|
|
|
regex_ref = re.compile(r'.*?ref.*?(\d+)\s*#\s*(\d+)')
|
|
|
|
regex_star = re.compile(r'\s*\*[\s,]*(\w+)\s*(.*?)\s*(?:;.*)?$')
|
|
|
|
regex_team = re.compile(r"(Insts|Notes|Tape|Dog|Useless|Pics|Helper|Disto|Consultant)\s+(.*)$(?i)")
|
|
|
|
regex_team_member = re.compile(r" and | / |, | & | \+ |^both$|^none$(?i)")
|
2019-07-19 01:04:18 +01:00
|
|
|
regex_qm = re.compile(r'^\s*QM(\d)\s+?([a-dA-DxX])\s+([\w\-]+)\.(\d+)\s+(([\w\-]+)\.(\d+)|\-)\s+(.+)$')
|
2019-07-16 00:07:37 +01:00
|
|
|
|
2009-08-01 07:31:27 +01:00
|
|
|
def RecursiveLoad(survexblock, survexfile, fin, textlines):
|
|
|
|
iblankbegins = 0
|
|
|
|
text = [ ]
|
2009-08-05 11:58:36 +01:00
|
|
|
stardata = stardatadefault
|
2009-08-01 07:31:27 +01:00
|
|
|
teammembers = [ ]
|
2019-07-19 01:04:18 +01:00
|
|
|
|
|
|
|
# uncomment to print out all files during parsing
|
2019-07-16 00:07:37 +01:00
|
|
|
print(" - Reading file: " + survexblock.survexfile.path)
|
|
|
|
stamp = datetime.now()
|
|
|
|
lineno = 0
|
2019-07-19 01:04:18 +01:00
|
|
|
|
|
|
|
# Try to find the cave in the DB if not use the string as before
|
|
|
|
path_match = re.search(r"caves-(\d\d\d\d)/(\d+|\d\d\d\d-?\w+-\d+)/", survexblock.survexfile.path)
|
|
|
|
if path_match:
|
|
|
|
pos_cave = '%s-%s' % (path_match.group(1), path_match.group(2))
|
|
|
|
# print('Match')
|
|
|
|
# print(pos_cave)
|
|
|
|
cave = models.getCaveByReference(pos_cave)
|
|
|
|
if cave:
|
|
|
|
survexfile.cave = cave
|
|
|
|
svxlines = ''
|
|
|
|
svxlines = fin.read().splitlines()
|
|
|
|
# print('Cave - preloop ' + str(survexfile.cave))
|
|
|
|
# print(survexblock)
|
|
|
|
for svxline in svxlines:
|
|
|
|
|
|
|
|
# print(survexblock)
|
|
|
|
|
|
|
|
# print(svxline)
|
|
|
|
# if not svxline:
|
|
|
|
# print(' - Not survex')
|
|
|
|
# return
|
|
|
|
# textlines.append(svxline)
|
2019-03-31 15:39:53 +01:00
|
|
|
|
2019-07-16 00:07:37 +01:00
|
|
|
lineno += 1
|
|
|
|
|
|
|
|
# print(' - Line: %d' % lineno)
|
|
|
|
|
2009-08-05 11:58:36 +01:00
|
|
|
# break the line at the comment
|
2019-07-16 00:07:37 +01:00
|
|
|
sline, comment = regex_comment.match(svxline.strip()).groups()
|
2009-08-05 11:58:36 +01:00
|
|
|
# detect ref line pointing to the scans directory
|
2019-07-16 00:07:37 +01:00
|
|
|
mref = comment and regex_ref.match(comment)
|
2009-08-01 07:31:27 +01:00
|
|
|
if mref:
|
2009-09-10 22:07:31 +01:00
|
|
|
refscan = "%s#%s" % (mref.group(1), mref.group(2))
|
|
|
|
survexscansfolders = models.SurvexScansFolder.objects.filter(walletname=refscan)
|
|
|
|
if survexscansfolders:
|
|
|
|
survexblock.survexscansfolder = survexscansfolders[0]
|
|
|
|
#survexblock.refscandir = "%s/%s%%23%s" % (mref.group(1), mref.group(1), mref.group(2))
|
2019-07-16 00:07:37 +01:00
|
|
|
survexblock.save()
|
2009-08-05 11:58:36 +01:00
|
|
|
continue
|
2019-07-19 01:04:18 +01:00
|
|
|
|
|
|
|
# This whole section should be moved if we can have *QM become a proper survex command
|
|
|
|
# Spec of QM in SVX files, currently commented out need to add to survex
|
|
|
|
# needs to match regex_qm
|
|
|
|
# ;Serial number grade(A/B/C/D/X) nearest-station resolution-station description
|
|
|
|
# ;QM1 a hobnob_hallway_2.42 hobnob-hallway_3.42 junction of keyhole passage
|
|
|
|
# ;QM1 a hobnob_hallway_2.42 - junction of keyhole passage
|
|
|
|
qmline = comment and regex_qm.match(comment)
|
|
|
|
if qmline:
|
|
|
|
print(qmline.groups())
|
|
|
|
#(u'1', u'B', u'miraclemaze', u'1.17', u'-', None, u'\tcontinuation of rift')
|
|
|
|
qm_no = qmline.group(1)
|
|
|
|
qm_grade = qmline.group(2)
|
|
|
|
qm_from_section = qmline.group(3)
|
|
|
|
qm_from_station = qmline.group(4)
|
|
|
|
qm_resolve_section = qmline.group(6)
|
|
|
|
qm_resolve_station = qmline.group(7)
|
|
|
|
qm_notes = qmline.group(8)
|
|
|
|
|
|
|
|
print('Cave - %s' % survexfile.cave)
|
|
|
|
print('QM no %d' % int(qm_no))
|
|
|
|
print('QM grade %s' % qm_grade)
|
|
|
|
print('QM section %s' % qm_from_section)
|
|
|
|
print('QM station %s' % qm_from_station)
|
|
|
|
print('QM res section %s' % qm_resolve_section)
|
|
|
|
print('QM res station %s' % qm_resolve_station)
|
|
|
|
print('QM notes %s' % qm_notes)
|
|
|
|
|
|
|
|
# If the QM isn't resolved (has a resolving station) thn load it
|
|
|
|
if not qm_resolve_section or qm_resolve_section is not '-' or qm_resolve_section is not 'None':
|
|
|
|
from_section = models.SurvexBlock.objects.filter(name=qm_from_section)
|
|
|
|
# If we can find a section (survex note chunck, named)
|
|
|
|
if len(from_section) > 0:
|
|
|
|
print(from_section[0])
|
|
|
|
from_station = models.SurvexStation.objects.filter(block=from_section[0], name=qm_from_station)
|
|
|
|
# If we can find a from station then we have the nearest station and can import it
|
|
|
|
if len(from_station) > 0:
|
|
|
|
print(from_station[0])
|
|
|
|
qm = models.QM.objects.create(number=qm_no,
|
|
|
|
nearest_station=from_station[0],
|
|
|
|
grade=qm_grade.upper(),
|
|
|
|
location_description=qm_notes)
|
|
|
|
else:
|
|
|
|
print('QM found but resolved')
|
|
|
|
|
|
|
|
#print('Cave -sline ' + str(cave))
|
2009-08-05 11:58:36 +01:00
|
|
|
if not sline:
|
|
|
|
continue
|
|
|
|
|
|
|
|
# detect the star command
|
2019-07-16 00:07:37 +01:00
|
|
|
mstar = regex_star.match(sline)
|
2009-08-05 11:58:36 +01:00
|
|
|
if not mstar:
|
|
|
|
if "from" in stardata:
|
2019-07-19 01:04:18 +01:00
|
|
|
# print('Cave ' + str(survexfile.cave))
|
|
|
|
# print(survexblock)
|
|
|
|
LoadSurvexLineLeg(survexblock, stardata, sline, comment, survexfile.cave)
|
2019-07-16 00:07:37 +01:00
|
|
|
# print(' - From: ')
|
|
|
|
#print(stardata)
|
2019-07-19 01:04:18 +01:00
|
|
|
pass
|
2009-08-05 11:58:36 +01:00
|
|
|
elif stardata["type"] == "passage":
|
|
|
|
LoadSurvexLinePassage(survexblock, stardata, sline, comment)
|
2019-07-16 00:07:37 +01:00
|
|
|
# print(' - Passage: ')
|
2011-07-11 00:01:12 +01:00
|
|
|
#Missing "station" in stardata.
|
2009-08-05 11:58:36 +01:00
|
|
|
continue
|
|
|
|
|
|
|
|
# detect the star command
|
|
|
|
cmd, line = mstar.groups()
|
2012-06-10 14:59:21 +01:00
|
|
|
cmd = cmd.lower()
|
2009-08-05 11:58:36 +01:00
|
|
|
if re.match("include$(?i)", cmd):
|
2019-02-24 13:03:34 +00:00
|
|
|
includepath = os.path.join(os.path.split(survexfile.path)[0], re.sub(r"\.svx$", "", line))
|
2019-07-16 00:07:37 +01:00
|
|
|
print(' - Include file found including - ' + includepath)
|
|
|
|
# Try to find the cave in the DB if not use the string as before
|
|
|
|
path_match = re.search(r"caves-(\d\d\d\d)/(\d+|\d\d\d\d-?\w+-\d+)/", includepath)
|
|
|
|
if path_match:
|
|
|
|
pos_cave = '%s-%s' % (path_match.group(1), path_match.group(2))
|
2019-07-19 01:04:18 +01:00
|
|
|
# print(pos_cave)
|
2019-07-16 00:07:37 +01:00
|
|
|
cave = models.getCaveByReference(pos_cave)
|
2019-07-19 01:04:18 +01:00
|
|
|
if cave:
|
|
|
|
survexfile.cave = cave
|
2019-07-16 00:07:37 +01:00
|
|
|
else:
|
|
|
|
print('No match for %s' % includepath)
|
2019-07-19 01:04:18 +01:00
|
|
|
includesurvexfile = models.SurvexFile(path=includepath)
|
2009-08-05 11:58:36 +01:00
|
|
|
includesurvexfile.save()
|
|
|
|
includesurvexfile.SetDirectory()
|
|
|
|
if includesurvexfile.exists():
|
2019-07-19 01:04:18 +01:00
|
|
|
survexblock.save()
|
2009-08-05 11:58:36 +01:00
|
|
|
fininclude = includesurvexfile.OpenFile()
|
|
|
|
RecursiveLoad(survexblock, includesurvexfile, fininclude, textlines)
|
|
|
|
|
|
|
|
elif re.match("begin$(?i)", cmd):
|
2019-07-19 01:04:18 +01:00
|
|
|
if line:
|
|
|
|
newsvxpath = os.path.join(os.path.split(survexfile.path)[0], re.sub(r"\.svx$", "", line))
|
|
|
|
# Try to find the cave in the DB if not use the string as before
|
|
|
|
path_match = re.search(r"caves-(\d\d\d\d)/(\d+|\d\d\d\d-?\w+-\d+)/", newsvxpath)
|
|
|
|
if path_match:
|
|
|
|
pos_cave = '%s-%s' % (path_match.group(1), path_match.group(2))
|
|
|
|
print(pos_cave)
|
|
|
|
cave = models.getCaveByReference(pos_cave)
|
|
|
|
if cave:
|
|
|
|
survexfile.cave = cave
|
|
|
|
else:
|
|
|
|
print('No match for %s' % newsvxpath)
|
|
|
|
|
2009-08-05 11:58:36 +01:00
|
|
|
name = line.lower()
|
2019-07-19 01:04:18 +01:00
|
|
|
print(' - Begin found for: ' + name)
|
|
|
|
# print('Block cave: ' + str(survexfile.cave))
|
|
|
|
survexblockdown = models.SurvexBlock(name=name, begin_char=fin.tell(), parent=survexblock, survexpath=survexblock.survexpath+"."+name, cave=survexfile.cave, survexfile=survexfile, totalleglength=0.0)
|
2009-08-05 11:58:36 +01:00
|
|
|
survexblockdown.save()
|
2019-07-19 01:04:18 +01:00
|
|
|
survexblock.save()
|
|
|
|
survexblock = survexblockdown
|
|
|
|
# print(survexblockdown)
|
2009-08-05 11:58:36 +01:00
|
|
|
textlinesdown = [ ]
|
|
|
|
RecursiveLoad(survexblockdown, survexfile, fin, textlinesdown)
|
|
|
|
else:
|
|
|
|
iblankbegins += 1
|
|
|
|
|
|
|
|
elif re.match("end$(?i)", cmd):
|
|
|
|
if iblankbegins:
|
|
|
|
iblankbegins -= 1
|
|
|
|
else:
|
|
|
|
survexblock.text = "".join(textlines)
|
|
|
|
survexblock.save()
|
2019-07-16 00:07:37 +01:00
|
|
|
# print(' - End found: ')
|
|
|
|
endstamp = datetime.now()
|
|
|
|
timetaken = endstamp - stamp
|
|
|
|
# print(' - Time to process: ' + str(timetaken))
|
2009-08-05 11:58:36 +01:00
|
|
|
return
|
|
|
|
|
|
|
|
elif re.match("date$(?i)", cmd):
|
|
|
|
if len(line) == 10:
|
2019-07-16 00:07:37 +01:00
|
|
|
#print(' - Date found: ' + line)
|
|
|
|
survexblock.date = make_aware(datetime.strptime(re.sub(r"\.", "-", line), '%Y-%m-%d'), get_current_timezone())
|
2009-08-05 11:58:36 +01:00
|
|
|
expeditions = models.Expedition.objects.filter(year=line[:4])
|
|
|
|
if expeditions:
|
2009-09-11 09:04:59 +01:00
|
|
|
assert len(expeditions) == 1
|
2009-08-05 11:58:36 +01:00
|
|
|
survexblock.expedition = expeditions[0]
|
2009-09-11 23:56:47 +01:00
|
|
|
survexblock.expeditionday = survexblock.expedition.get_expedition_day(survexblock.date)
|
|
|
|
survexblock.save()
|
|
|
|
|
2009-08-05 11:58:36 +01:00
|
|
|
elif re.match("team$(?i)", cmd):
|
2019-07-16 00:07:37 +01:00
|
|
|
pass
|
|
|
|
# print(' - Team found: ')
|
|
|
|
mteammember = regex_team.match(line)
|
2009-08-05 11:58:36 +01:00
|
|
|
if mteammember:
|
2019-07-16 00:07:37 +01:00
|
|
|
for tm in regex_team_member.split(mteammember.group(2)):
|
2009-08-05 11:58:36 +01:00
|
|
|
if tm:
|
|
|
|
personexpedition = survexblock.expedition and GetPersonExpeditionNameLookup(survexblock.expedition).get(tm.lower())
|
|
|
|
if (personexpedition, tm) not in teammembers:
|
|
|
|
teammembers.append((personexpedition, tm))
|
2009-09-10 22:07:31 +01:00
|
|
|
personrole = models.SurvexPersonRole(survexblock=survexblock, nrole=mteammember.group(1).lower(), personexpedition=personexpedition, personname=tm)
|
2009-09-11 23:56:47 +01:00
|
|
|
personrole.expeditionday = survexblock.expeditionday
|
2009-08-05 11:58:36 +01:00
|
|
|
if personexpedition:
|
|
|
|
personrole.person=personexpedition.person
|
|
|
|
personrole.save()
|
|
|
|
|
|
|
|
elif cmd == "title":
|
2019-07-16 00:07:37 +01:00
|
|
|
#print(' - Title found: ')
|
2019-07-19 01:04:18 +01:00
|
|
|
survextitle = models.SurvexTitle(survexblock=survexblock, title=line.strip('"'), cave=survexfile.cave)
|
2009-08-05 11:58:36 +01:00
|
|
|
survextitle.save()
|
2019-07-19 01:04:18 +01:00
|
|
|
pass
|
2009-08-01 07:31:27 +01:00
|
|
|
|
2014-06-26 02:34:19 +01:00
|
|
|
elif cmd == "require":
|
|
|
|
# should we check survex version available for processing?
|
|
|
|
pass
|
|
|
|
|
2009-08-05 11:58:36 +01:00
|
|
|
elif cmd == "data":
|
2019-07-16 00:07:37 +01:00
|
|
|
#print(' - Data found: ')
|
2009-08-05 11:58:36 +01:00
|
|
|
ls = line.lower().split()
|
|
|
|
stardata = { "type":ls[0] }
|
2019-07-16 00:07:37 +01:00
|
|
|
#print(' - Star data: ', stardata)
|
|
|
|
#print(ls)
|
2009-08-05 11:58:36 +01:00
|
|
|
for i in range(0, len(ls)):
|
|
|
|
stardata[stardataparamconvert.get(ls[i], ls[i])] = i - 1
|
|
|
|
if ls[0] in ["normal", "cartesian", "nosurvey"]:
|
2011-07-11 00:01:12 +01:00
|
|
|
assert (("from" in stardata and "to" in stardata) or "station" in stardata), line
|
2009-08-05 11:58:36 +01:00
|
|
|
elif ls[0] == "default":
|
|
|
|
stardata = stardatadefault
|
2009-08-01 07:31:27 +01:00
|
|
|
else:
|
2009-08-05 11:58:36 +01:00
|
|
|
assert ls[0] == "passage", line
|
|
|
|
|
|
|
|
elif cmd == "equate":
|
2019-07-16 00:07:37 +01:00
|
|
|
#print(' - Equate found: ')
|
2012-06-10 14:59:21 +01:00
|
|
|
LoadSurvexEquate(survexblock, line)
|
|
|
|
|
|
|
|
elif cmd == "fix":
|
2019-07-16 00:07:37 +01:00
|
|
|
#print(' - Fix found: ')
|
2012-06-10 14:59:21 +01:00
|
|
|
survexblock.MakeSurvexStation(line.split()[0])
|
2014-06-26 02:34:19 +01:00
|
|
|
|
2009-08-05 11:58:36 +01:00
|
|
|
else:
|
2019-07-16 00:07:37 +01:00
|
|
|
#print(' - Stuff')
|
2019-02-24 13:03:34 +00:00
|
|
|
if cmd not in ["sd", "include", "units", "entrance", "data", "flags", "title", "export", "instrument",
|
|
|
|
"calibrate", "set", "infer", "alias", "ref", "cs", "declination", "case"]:
|
|
|
|
print("Unrecognised command in line:", cmd, line, survexblock, survexblock.survexfile.path)
|
2019-07-16 00:07:37 +01:00
|
|
|
endstamp = datetime.now()
|
|
|
|
timetaken = endstamp - stamp
|
|
|
|
# print(' - Time to process: ' + str(timetaken))
|
2009-08-01 07:31:27 +01:00
|
|
|
|
2009-05-13 05:39:52 +01:00
|
|
|
def LoadAllSurvexBlocks():
|
2015-01-19 22:48:50 +00:00
|
|
|
|
2019-02-24 13:03:34 +00:00
|
|
|
print('Loading All Survex Blocks...')
|
2015-01-19 22:48:50 +00:00
|
|
|
|
2011-07-11 00:01:12 +01:00
|
|
|
models.SurvexBlock.objects.all().delete()
|
|
|
|
models.SurvexFile.objects.all().delete()
|
|
|
|
models.SurvexDirectory.objects.all().delete()
|
|
|
|
models.SurvexEquate.objects.all().delete()
|
|
|
|
models.SurvexLeg.objects.all().delete()
|
|
|
|
models.SurvexTitle.objects.all().delete()
|
|
|
|
models.SurvexPersonRole.objects.all().delete()
|
2012-06-10 14:59:21 +01:00
|
|
|
models.SurvexStation.objects.all().delete()
|
2011-07-11 00:01:12 +01:00
|
|
|
|
2019-02-24 13:03:34 +00:00
|
|
|
print(" - Data flushed")
|
|
|
|
|
2019-06-26 03:32:18 +01:00
|
|
|
survexfile = models.SurvexFile(path=settings.SURVEX_TOPNAME, cave=None)
|
2011-07-11 00:01:12 +01:00
|
|
|
survexfile.save()
|
|
|
|
survexfile.SetDirectory()
|
|
|
|
|
|
|
|
#Load all
|
|
|
|
survexblockroot = models.SurvexBlock(name="root", survexpath="", begin_char=0, cave=None, survexfile=survexfile, totalleglength=0.0)
|
|
|
|
survexblockroot.save()
|
|
|
|
fin = survexfile.OpenFile()
|
|
|
|
textlines = [ ]
|
2019-07-19 01:04:18 +01:00
|
|
|
# The real work starts here
|
2011-07-11 00:01:12 +01:00
|
|
|
RecursiveLoad(survexblockroot, survexfile, fin, textlines)
|
2019-07-19 01:04:18 +01:00
|
|
|
fin.close()
|
2011-07-11 00:01:12 +01:00
|
|
|
survexblockroot.text = "".join(textlines)
|
|
|
|
survexblockroot.save()
|
|
|
|
|
|
|
|
|
2019-02-24 13:03:34 +00:00
|
|
|
poslineregex = re.compile(r"^\(\s*([+-]?\d*\.\d*),\s*([+-]?\d*\.\d*),\s*([+-]?\d*\.\d*)\s*\)\s*([^\s]+)$")
|
|
|
|
|
|
|
|
|
2011-07-11 00:01:12 +01:00
|
|
|
def LoadPos():
|
2015-01-19 22:48:50 +00:00
|
|
|
|
2019-02-24 13:03:34 +00:00
|
|
|
print('Loading Pos....')
|
2015-01-19 22:48:50 +00:00
|
|
|
|
2019-06-26 03:32:18 +01:00
|
|
|
call([settings.CAVERN, "--output=%s%s.3d" % (settings.SURVEX_DATA, settings.SURVEX_TOPNAME), "%s%s.svx" % (settings.SURVEX_DATA, settings.SURVEX_TOPNAME)])
|
|
|
|
call([settings.THREEDTOPOS, '%s%s.3d' % (settings.SURVEX_DATA, settings.SURVEX_TOPNAME)], cwd = settings.SURVEX_DATA)
|
|
|
|
posfile = open("%s%s.pos" % (settings.SURVEX_DATA, settings.SURVEX_TOPNAME))
|
2019-02-24 13:03:34 +00:00
|
|
|
posfile.readline() #Drop header
|
2011-07-11 00:01:12 +01:00
|
|
|
for line in posfile.readlines():
|
2019-07-16 00:07:37 +01:00
|
|
|
r = poslineregex.match(line)
|
2011-07-11 00:01:12 +01:00
|
|
|
if r:
|
|
|
|
x, y, z, name = r.groups()
|
|
|
|
try:
|
|
|
|
ss = models.SurvexStation.objects.lookup(name)
|
2015-06-24 04:09:19 +01:00
|
|
|
ss.x = float(x)
|
|
|
|
ss.y = float(y)
|
|
|
|
ss.z = float(z)
|
|
|
|
ss.save()
|
2011-07-11 00:01:12 +01:00
|
|
|
except:
|
2019-02-24 13:03:34 +00:00
|
|
|
print("%s not parsed in survex" % name)
|