forked from expo/troggle
*units in feet and metres, subcaves
This commit is contained in:
parent
ff3cdb10dc
commit
efc43b0863
@ -33,7 +33,8 @@ from django.contrib.auth.models import User
|
||||
from troggle.core.models import get_process_memory
|
||||
from troggle.core.models_caves import Cave, Entrance
|
||||
from troggle.parsers.imports import import_caves, import_people, import_surveyscans, \
|
||||
import_logbooks, import_QMs, import_survex, import_loadpos, import_drawingsfiles
|
||||
import_logbooks, import_QMs, import_survex, import_loadpos, import_drawingsfiles, \
|
||||
import_subcaves
|
||||
import troggle.logbooksdump
|
||||
|
||||
if os.geteuid() == 0:
|
||||
@ -318,7 +319,8 @@ if __name__ == "__main__":
|
||||
jq.enq("reinit",reinit_db)
|
||||
elif "test" in sys.argv:
|
||||
jq.enq("caves",import_caves)
|
||||
jq.enq("people",import_people)
|
||||
import_subcaves()
|
||||
#jq.enq("people",import_people)
|
||||
#jq.run_now_django_tests(2)
|
||||
elif "caves" in sys.argv:
|
||||
jq.enq("caves",import_caves)
|
||||
|
@ -14,6 +14,7 @@ import troggle.parsers.people
|
||||
import troggle.parsers.surveys
|
||||
import troggle.parsers.logbooks
|
||||
import troggle.parsers.QMs
|
||||
import troggle.parsers.subcaves
|
||||
|
||||
def import_caves():
|
||||
print("-- Importing Caves to ",end="")
|
||||
@ -56,3 +57,7 @@ def import_drawingsfiles():
|
||||
print("-- Importing Drawings files")
|
||||
troggle.parsers.surveys.LoadDrawingFiles()
|
||||
|
||||
def import_subcaves():
|
||||
print("-- Interpreting SubCaves from CaveDescriptions")
|
||||
troggle.parsers.subcaves.importAllSubcaves()
|
||||
|
||||
|
@ -137,7 +137,7 @@ def ParseDate(tripdate, year):
|
||||
day, month, year = int(mdategoof.group(1)), int(mdategoof.group(2)), int(mdategoof.group(4)) + yadd
|
||||
else:
|
||||
assert False, tripdate
|
||||
message" ! - Bad date in logbook: " + tripdate + " - " + year
|
||||
message = " ! - Bad date in logbook: " + tripdate + " - " + year
|
||||
DataIssue.objects.create(parser='logbooks', message=message)
|
||||
logdataissues[tid+"author"]=message
|
||||
|
||||
|
@ -8,7 +8,7 @@ import logging
|
||||
|
||||
from django.conf import settings
|
||||
|
||||
from troggle.core.models import Subcave, Cave
|
||||
from troggle.core.models_caves import Cave, NewSubCave
|
||||
from utils import save_carefully
|
||||
|
||||
def getLinksInCaveDescription(cave):
|
||||
@ -29,12 +29,14 @@ def importSubcaves(cave):
|
||||
settings.EXPOWEB,
|
||||
os.path.dirname(cave.description_file),
|
||||
link[0])
|
||||
subcaveFile=open(subcaveFilePath,'r')
|
||||
description=subcaveFile.read().decode('iso-8859-1').encode('utf-8')
|
||||
subcaveFile=open(subcaveFilePath,'rb')
|
||||
description=subcaveFile.read() # decode('iso-8859-1').encode('utf-8')
|
||||
|
||||
lookupAttribs={'title':link[1], 'cave':cave}
|
||||
nonLookupAttribs={'description':description}
|
||||
newSubcave=save_carefully(Subcave,lookupAttribs=lookupAttribs,nonLookupAttribs=nonLookupAttribs)
|
||||
#lookupAttribs={'title':link[1], 'cave':cave}
|
||||
#nonLookupAttribs={'description':description}
|
||||
lookupAttribs={}
|
||||
nonLookupAttribs={}
|
||||
newSubcave=save_carefully(NewSubCave,lookupAttribs=lookupAttribs,nonLookupAttribs=nonLookupAttribs)
|
||||
|
||||
logging.info("Added " + str(newSubcave) + " to " + str(cave))
|
||||
except IOError:
|
||||
@ -49,7 +51,7 @@ def getLinksInSubcaveDescription(subcave):
|
||||
|
||||
def getLinksInAllSubcaves():
|
||||
bigList=[]
|
||||
for subcave in Subcave.objects.all():
|
||||
for subcave in NewSubCave.objects.all():
|
||||
bigList+=getLinksInSubcaveDescription(subcave)
|
||||
return bigList
|
||||
|
||||
|
@ -38,20 +38,27 @@ class LoadingSurvex():
|
||||
other survex files.
|
||||
A 'scansfolder' is what we today call a "survey scans folder" or a "wallet".
|
||||
"""
|
||||
|
||||
rx_flagsnot= re.compile(r"not\s")
|
||||
rx_linelen = re.compile(r"[\d\-+.]+$")
|
||||
rx_team = re.compile(r"(?i)(Insts|Notes|Tape|Dog|Useless|Pics|Helper|Disto|Consultant)\s+(.*)$")
|
||||
rx_person = re.compile(r"(?i) and | / |, | & | \+ |^both$|^none$")
|
||||
rx_qm = re.compile(r'(?i)^\s*QM(\d)\s+?([a-dA-DxX])\s+([\w\-]+)\.(\d+)\s+(([\w\-]+)\.(\d+)|\-)\s+(.+)$')
|
||||
# remember there is also QM_PATTERN used in views_other and set in settings.py
|
||||
|
||||
rx_begin = re.compile(r'(?i)begin')
|
||||
rx_end = re.compile(r'(?i)end$')
|
||||
rx_title = re.compile(r'(?i)title$')
|
||||
rx_ref = re.compile(r'(?i)ref$')
|
||||
rx_data = re.compile(r'(?i)data$')
|
||||
rx_flags = re.compile(r'(?i)flags$')
|
||||
rx_alias = re.compile(r'(?i)alias$')
|
||||
rx_entrance = re.compile(r'(?i)entrance$')
|
||||
rx_date = re.compile(r'(?i)date$')
|
||||
rx_units = re.compile(r'(?i)units$')
|
||||
rx_team = re.compile(r'(?i)team$')
|
||||
rx_set = re.compile(r'(?i)set$')
|
||||
rx_names = re.compile(r'(?i)names$')
|
||||
|
||||
rx_flagsnot= re.compile(r"not\s")
|
||||
rx_linelen = re.compile(r"[\d\-+.]+$")
|
||||
rx_teammem = re.compile(r"(?i)(Insts|Notes|Tape|Dog|Useless|Pics|Helper|Disto|Consultant)\s+(.*)$")
|
||||
rx_person = re.compile(r"(?i) and | / |, | & | \+ |^both$|^none$")
|
||||
rx_qm = re.compile(r'(?i)^\s*QM(\d)\s+?([a-dA-DxX])\s+([\w\-]+)\.(\d+)\s+(([\w\-]+)\.(\d+)|\-)\s+(.+)$')
|
||||
# remember there is also QM_PATTERN used in views_other and set in settings.py
|
||||
rx_tapelng = re.compile(r'(?i)(tape|length)$')
|
||||
|
||||
rx_cave = re.compile(r'(?i)caves-(\d\d\d\d)/([-\d\w]+|\d\d\d\d-?\w+-\d+)')
|
||||
rx_comment = re.compile(r'([^;]*?)\s*(?:;\s*(.*))?\n?$')
|
||||
@ -71,19 +78,23 @@ class LoadingSurvex():
|
||||
datastardefault = {"type":"normal", "from":0, "to":1, "tape":2, "compass":3, "clino":4}
|
||||
flagsdefault = {"duplicate":False, "surface":False, "splay":False, "skiplegs":False, "splayalias":False}
|
||||
|
||||
METRESINFEET = 3.28084
|
||||
|
||||
datastar ={}
|
||||
flagsstar = {}
|
||||
units = "metres"
|
||||
slength = 0.0
|
||||
legsnumber = 0
|
||||
depthbegin = 0
|
||||
depthinclude = 0
|
||||
unitsstack = []
|
||||
legsnumberstack = []
|
||||
slengthstack = []
|
||||
personexpedstack = []
|
||||
stackbegin =[]
|
||||
flagsstack =[]
|
||||
datastack =[]
|
||||
stackinclude = []
|
||||
includestack = []
|
||||
stacksvxfiles = []
|
||||
svxfileslist = []
|
||||
svxdirs = {}
|
||||
@ -109,7 +120,7 @@ class LoadingSurvex():
|
||||
if cmd == "require":
|
||||
pass # should we check survex version available for processing?
|
||||
elif cmd in ["equate", "fix", "calibrate", "cs", "export", "case",
|
||||
"declination", "infer","instrument", "sd", "units"]:
|
||||
"declination", "infer","instrument", "sd"]:
|
||||
pass # we ignore all these, which is fine.
|
||||
else:
|
||||
if cmd in ["include", "data", "flags", "title", "entrance","set", "units", "alias", "ref"]:
|
||||
@ -123,7 +134,7 @@ class LoadingSurvex():
|
||||
|
||||
def LoadSurvexTeam(self, survexblock, line):
|
||||
teammembers = [ ]
|
||||
mteammember = self.rx_team.match(line)
|
||||
mteammember = self.rx_teammem.match(line)
|
||||
if mteammember:
|
||||
for tm in self.rx_person.split(mteammember.group(2)):
|
||||
if tm:
|
||||
@ -152,6 +163,22 @@ class LoadingSurvex():
|
||||
print((self.insp+message))
|
||||
models.DataIssue.objects.create(parser='survex', message=message)
|
||||
|
||||
def LoadSurvexUnits(self, survexblock, line):
|
||||
# all for 4 survex files with measurements in feet. bugger.
|
||||
tapeunits = self.rx_tapelng.match(line) # tape|length
|
||||
if not tapeunits:
|
||||
return
|
||||
feet = re.match("(?i)feet$",line)
|
||||
metres = re.match("(?i)(METRIC|METRES|METERS)",line)
|
||||
if feet:
|
||||
self.units = "feet"
|
||||
elif metres:
|
||||
self.units = "metres"
|
||||
else:
|
||||
message = "! *UNITS in YARDS (!?) - not converted '{}' ({}) {}".format(line, survexblock, survexblock.survexfile.path)
|
||||
print((self.insp+message))
|
||||
models.DataIssue.objects.create(parser='survex', message=message)
|
||||
|
||||
def LoadSurvexDate(self, survexblock, line):
|
||||
# we should make this a date range for everything
|
||||
if len(line) == 10:
|
||||
@ -174,6 +201,7 @@ class LoadingSurvex():
|
||||
def LoadSurvexLeg(self, survexblock, sline, comment):
|
||||
"""This reads compass, clino and tape data but only keeps the tape lengths,
|
||||
the rest is discarded after error-checking.
|
||||
Now skipping the error checking - returns as soon as the leg is not one we count.
|
||||
"""
|
||||
invalid_clino = 180.0
|
||||
invalid_compass = 720.0
|
||||
@ -186,7 +214,6 @@ class LoadingSurvex():
|
||||
|
||||
if debugprint:
|
||||
print("! LEG datastar type:{}++{}\n{} ".format(self.datastar["type"].upper(), survexblock.survexfile.path, sline))
|
||||
# SKIP PASSAGES *data passage
|
||||
if self.datastar["type"] == "passage":
|
||||
return
|
||||
if self.datastar["type"] == "cartesian":
|
||||
@ -197,7 +224,8 @@ class LoadingSurvex():
|
||||
return
|
||||
if self.datastar["type"] == "cylpolar":
|
||||
return
|
||||
# print(" !! LEG data lineno:{}\n !! sline:'{}'\n !! datastar['tape']: {}".format(self.lineno, sline, self.datastar["tape"]))
|
||||
if debugprint:
|
||||
print(" !! LEG data lineno:{}\n !! sline:'{}'\n !! datastar['tape']: {}".format(self.lineno, sline, self.datastar["tape"]))
|
||||
|
||||
if self.datastar["type"] != "normal":
|
||||
return
|
||||
@ -242,6 +270,8 @@ class LoadingSurvex():
|
||||
# tape = tape.replace("/",".") # edited original file (only one) instead.
|
||||
try:
|
||||
survexleg.tape = float(tape)
|
||||
if self.units =="feet":
|
||||
survexleg.tape = float(tape) / METRESINFEET
|
||||
self.legsnumber += 1
|
||||
except ValueError:
|
||||
print(("! Tape misread in", survexblock.survexfile.path))
|
||||
@ -698,7 +728,8 @@ class LoadingSurvex():
|
||||
|
||||
depth = " " * self.depthbegin
|
||||
self.insp = depth
|
||||
#print("{:2}{} - Begin for :'{}'".format(self.depthbegin,depth, blkid))
|
||||
if debugprint:
|
||||
print("{:2}{} - Begin for :'{}'".format(self.depthbegin,depth, blkid))
|
||||
pathlist = ""
|
||||
for id in self.stackbegin:
|
||||
if len(id) > 0:
|
||||
@ -708,9 +739,10 @@ class LoadingSurvex():
|
||||
nonlocal args
|
||||
|
||||
depth = " " * self.depthbegin
|
||||
#print("{:2}{} - End from:'{}'".format(self.depthbegin,depth,args))
|
||||
#print("{:2}{} - LEGS: {} (n: {}, length:{})".format(self.depthbegin,
|
||||
# depth, self.slength, self.slength, self.legsnumber))
|
||||
if debugprint:
|
||||
print("{:2}{} - End from:'{}'".format(self.depthbegin,depth,args))
|
||||
print("{:2}{} - LEGS: {} (n: {}, length:{} units:{})".format(self.depthbegin,
|
||||
depth, self.slength, self.slength, self.legsnumber, self.units))
|
||||
|
||||
def pushblock():
|
||||
nonlocal blkid
|
||||
@ -778,6 +810,7 @@ class LoadingSurvex():
|
||||
blkid = args.lower()
|
||||
# PUSH state ++++++++++++++
|
||||
self.stackbegin.append(blkid)
|
||||
self.unitsstack.append(self.units)
|
||||
self.legsnumberstack.append(self.legsnumber)
|
||||
self.slengthstack.append(self.slength)
|
||||
self.personexpedstack.append(self.currentpersonexped)
|
||||
@ -785,6 +818,7 @@ class LoadingSurvex():
|
||||
# PUSH state ++++++++++++++
|
||||
self.legsnumber = 0
|
||||
self.slength = 0.0
|
||||
self.units = "metres"
|
||||
self.currentpersonexped = []
|
||||
printbegin()
|
||||
newsurvexblock = models_survex.SurvexBlock(name=blkid, parent=survexblock,
|
||||
@ -819,6 +853,7 @@ class LoadingSurvex():
|
||||
popblock()
|
||||
self.currentpersonexped = self.personexpedstack.pop()
|
||||
self.legsnumber = self.legsnumberstack.pop()
|
||||
self.units = self.unitsstack.pop()
|
||||
self.slength = self.slengthstack.pop()
|
||||
blkid = self.stackbegin.pop()
|
||||
self.currentsurvexblock = survexblock.parent
|
||||
@ -845,17 +880,19 @@ class LoadingSurvex():
|
||||
|
||||
elif self.rx_data.match(cmd):
|
||||
self.LoadSurvexDataCmd(survexblock, args)
|
||||
elif re.match("(?i)alias$", cmd):
|
||||
elif self.rx_alias.match(cmd):
|
||||
self.LoadSurvexAlias(survexblock, args)
|
||||
elif re.match("(?i)entrance$", cmd):
|
||||
elif self.rx_entrance.match(cmd):
|
||||
self.LoadSurvexEntrance(survexblock, args)
|
||||
elif re.match("(?i)date$", cmd):
|
||||
elif self.rx_date.match(cmd):
|
||||
self.LoadSurvexDate(survexblock, args)
|
||||
elif re.match("(?i)team$", cmd):
|
||||
elif self.rx_units.match(cmd):
|
||||
self.LoadSurvexUnits(survexblock, args)
|
||||
elif self.rx_team.match(cmd):
|
||||
self.LoadSurvexTeam(survexblock, args)
|
||||
elif re.match("(?i)set$", cmd) and re.match("(?i)names", args):
|
||||
elif self.rx_set.match(cmd) and self.rx_names.match(cmd):
|
||||
pass
|
||||
elif re.match("(?i)include$", cmd):
|
||||
elif self.rx_include.match(cmd):
|
||||
message = " ! -ERROR *include command not expected here {}. Re-run a full Survex import.".format(path)
|
||||
print(message)
|
||||
print(message,file=sys.stderr)
|
||||
@ -937,13 +974,13 @@ class LoadingSurvex():
|
||||
fcollate.write(";*include {}\n".format(includepath))
|
||||
flinear.write("{:2} {} *include {}\n".format(self.depthinclude, indent, includepath))
|
||||
push = includepath.lower()
|
||||
self.stackinclude.append(push)
|
||||
self.includestack.append(push)
|
||||
#-----------------
|
||||
self.RecursiveScan(survexblock, includepath, fininclude, flinear, fcollate)
|
||||
#-----------------
|
||||
pop = self.stackinclude.pop()
|
||||
pop = self.includestack.pop()
|
||||
if pop != push:
|
||||
message = "!! ERROR mismatch *include pop!=push {}".format(pop, push, self.stackinclude)
|
||||
message = "!! ERROR mismatch *include pop!=push {}".format(pop, push, self.includestack)
|
||||
print(message)
|
||||
print(message,file=flinear)
|
||||
print(message,file=sys.stderr)
|
||||
@ -1097,7 +1134,6 @@ def FindAndLoadSurvex(survexblockroot):
|
||||
print(" - MEM:{:7.3f} MB USED".format(mem1-mem0),file=sys.stderr)
|
||||
|
||||
legsnumber = svx_load.legsnumber
|
||||
slength = svx_load.slength
|
||||
mem1 = models.get_process_memory()
|
||||
|
||||
print(" - Number of SurvexDirectories: {}".format(len(svx_load.survexdict)))
|
||||
@ -1112,7 +1148,7 @@ def FindAndLoadSurvex(survexblockroot):
|
||||
print("+", file=sys.stderr)
|
||||
sys.stderr.flush();
|
||||
sys.stdout = stdout_orig
|
||||
return (legsnumber, slength)
|
||||
return legsnumber
|
||||
|
||||
def MakeSurvexFileRoot():
|
||||
"""Returns a file_object.path = SURVEX_TOPNAME associated with directory_object.path = SURVEX_DATA
|
||||
@ -1135,29 +1171,25 @@ def LoadSurvexBlocks():
|
||||
models_survex.SurvexStation.objects.all().delete()
|
||||
print(" - survex Data Issues flushed")
|
||||
models.DataIssue.objects.filter(parser='survex').delete()
|
||||
models.DataIssue.objects.filter(parser='survexleg').delete()
|
||||
|
||||
survexfileroot = MakeSurvexFileRoot()
|
||||
# this next makes a block_object assciated with a file_object.path = SURVEX_TOPNAME
|
||||
survexblockroot = models_survex.SurvexBlock(name=ROOTBLOCK, survexpath="", cave=None, survexfile=survexfileroot,
|
||||
legsall=0, legslength=0.0)
|
||||
# this is the first so id=1
|
||||
survexblockroot.save()
|
||||
|
||||
print(' - Loading Survex Blocks...')
|
||||
memstart = models.get_process_memory()
|
||||
#----------------------------------------------------------------
|
||||
legsnumber, slength = FindAndLoadSurvex(survexblockroot)
|
||||
legsnumber = FindAndLoadSurvex(survexblockroot)
|
||||
#----------------------------------------------------------------
|
||||
memend = models.get_process_memory()
|
||||
print(" - MEMORY start:{:.3f} MB end:{:.3f} MB increase={:.3f} MB".format(memstart,memend, memend-memstart))
|
||||
|
||||
# Don't do this, it double-counts everything:
|
||||
#survexblockroot.legslength = slength
|
||||
#survexblockroot.legsall = legsnumber
|
||||
survexblockroot.save()
|
||||
|
||||
print(" - total number of survex legs: {}".format(legsnumber))
|
||||
print(" - total leg lengths loaded: {}m".format(slength))
|
||||
print(' - Loaded All Survex Blocks.')
|
||||
|
||||
poslineregex = re.compile(r"^\(\s*([+-]?\d*\.\d*),\s*([+-]?\d*\.\d*),\s*([+-]?\d*\.\d*)\s*\)\s*([^\s]+)$")
|
||||
|
Loading…
Reference in New Issue
Block a user