forked from expo/troggle
Detect more survex errors
This commit is contained in:
parent
3ac617431f
commit
d6cc32ee9a
@ -6,7 +6,7 @@ import copy
|
|||||||
import subprocess
|
import subprocess
|
||||||
|
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
from datetime import datetime, timedelta
|
from datetime import datetime, timedelta, date
|
||||||
|
|
||||||
from django.utils.timezone import get_current_timezone
|
from django.utils.timezone import get_current_timezone
|
||||||
from django.utils.timezone import make_aware
|
from django.utils.timezone import make_aware
|
||||||
@ -32,7 +32,7 @@ todo = '''Also walk the entire tree in the :loser: repo looking for unconnected
|
|||||||
wider troggle system (the name is not a hyperlink) - apparently randomly.
|
wider troggle system (the name is not a hyperlink) - apparently randomly.
|
||||||
GetPersonExpeditionNameLookup() needs to be fixed.
|
GetPersonExpeditionNameLookup() needs to be fixed.
|
||||||
|
|
||||||
- fix THREEDCACHEDIR and put .3d files in same folder as .svx and fix CaveView
|
-#BUG, if *date comes after *team, the person's date is not set at all. It needs re-setting at the endof the block.
|
||||||
|
|
||||||
- LoadSurvexFile() Creates a new current survexfile and valid .survexdirectory
|
- LoadSurvexFile() Creates a new current survexfile and valid .survexdirectory
|
||||||
The survexblock passed-in is not necessarily the parent. FIX THIS.
|
The survexblock passed-in is not necessarily the parent. FIX THIS.
|
||||||
@ -219,7 +219,7 @@ class LoadingSurvex():
|
|||||||
teammembers.append((personexpedition, tm))
|
teammembers.append((personexpedition, tm))
|
||||||
personrole = SurvexPersonRole(survexblock=survexblock, personexpedition=personexpedition, personname=tm)
|
personrole = SurvexPersonRole(survexblock=survexblock, personexpedition=personexpedition, personname=tm)
|
||||||
personrole.save()
|
personrole.save()
|
||||||
personrole.expeditionday = survexblock.expeditionday
|
personrole.expeditionday = survexblock.expeditionday #BUG, if *date comes after *team, this is NOT SET.
|
||||||
if personexpedition:
|
if personexpedition:
|
||||||
personrole.person=personexpedition.person
|
personrole.person=personexpedition.person
|
||||||
self.currentpersonexped.append(personexpedition)
|
self.currentpersonexped.append(personexpedition)
|
||||||
@ -681,10 +681,14 @@ class LoadingSurvex():
|
|||||||
#print("\n"+message)
|
#print("\n"+message)
|
||||||
#print("\n"+message,file=sys.stderr)
|
#print("\n"+message,file=sys.stderr)
|
||||||
return
|
return
|
||||||
message = f" ! {headpath} is not a fully-registered cave. (while creating '{includelabel}' sfile & sdirectory in survex parsing)"
|
message = f" ! Error: '{headpath}' not a cave or in the ignore list of surface surveys. (while creating '{includelabel}')"
|
||||||
print("\n"+message)
|
print("\n"+message)
|
||||||
print("\n"+message,file=sys.stderr)
|
print("\n"+message,file=sys.stderr)
|
||||||
DataIssue.objects.create(parser='survex', message=message)
|
DataIssue.objects.create(parser='survex', message=message)
|
||||||
|
print(f' # datastack in LoadSurvexFile:{includelabel} type:', end="",file=sys.stderr)
|
||||||
|
for dict in self.datastack:
|
||||||
|
print(f'{dict["type"].upper()} ', end="",file=sys.stderr)
|
||||||
|
|
||||||
|
|
||||||
def LoadSurvexFile(self, svxid):
|
def LoadSurvexFile(self, svxid):
|
||||||
"""Creates SurvexFile in the database, and SurvexDirectory if needed
|
"""Creates SurvexFile in the database, and SurvexDirectory if needed
|
||||||
@ -722,9 +726,9 @@ class LoadingSurvex():
|
|||||||
newdirectory.cave = cave
|
newdirectory.cave = cave
|
||||||
newfile.cave = cave
|
newfile.cave = cave
|
||||||
#print("\n"+str(newdirectory.cave),file=sys.stderr)
|
#print("\n"+str(newdirectory.cave),file=sys.stderr)
|
||||||
else:
|
else: # probably a surface survey
|
||||||
self.ReportNonCaveIncludes(headpath, svxid)
|
self.ReportNonCaveIncludes(headpath, svxid)
|
||||||
|
|
||||||
if not newfile.survexdirectory:
|
if not newfile.survexdirectory:
|
||||||
message = " ! SurvexDirectory NOT SET in new SurvexFile {} ".format(svxid)
|
message = " ! SurvexDirectory NOT SET in new SurvexFile {} ".format(svxid)
|
||||||
print(message)
|
print(message)
|
||||||
@ -1200,36 +1204,47 @@ class LoadingSurvex():
|
|||||||
svx_t = now - 365*24*3600
|
svx_t = now - 365*24*3600
|
||||||
|
|
||||||
def runcavern():
|
def runcavern():
|
||||||
'''This assumes all survex files have unique names and they are taken from many folders but the output is all put
|
'''regenerates the .3d file from the .svx if it is older than the svx file, or older than the software,
|
||||||
into the same folder. A serious potential bug. Import parser checks uniqueness but much better not to do it like this.
|
or randomly using chaosmonkey() just to keep things ticking over.
|
||||||
# see design docum in troggle/templates/cave.html
|
|
||||||
# and views/caves.py rendercave()
|
|
||||||
'''
|
'''
|
||||||
print(f" - Regenerating stale (or chaos-monkeyed) cavern .log and .3d for '{fullpath}'\n at '{logpath}'\n")
|
print(f" - Regenerating stale (or chaos-monkeyed) cavern .log and .3d for '{fullpath}'\n at '{logpath}'\n")
|
||||||
print(f"days svx old: {(svx_t - log_t)/(24*3600):.1f} cav:{(cav_t - log_t)/(24*3600):.1f} log old: { (now - log_t)/(24*3600):.1f}")
|
print(f"days svx old: {(svx_t - log_t)/(24*3600):.1f} cav:{(cav_t - log_t)/(24*3600):.1f} log old: { (now - log_t)/(24*3600):.1f}")
|
||||||
|
|
||||||
outputdir = Path(str(f'{fullpath}.svx')).parent
|
outputdir = Path(str(f'{fullpath}.svx')).parent
|
||||||
sp = subprocess.run([settings.CAVERN, "--log", f'--output={outputdir}', f'{fullpath}.svx'])
|
sp = subprocess.run([settings.CAVERN, "--log", f'--output={outputdir}', f'{fullpath}.svx'],
|
||||||
|
capture_output=True, check=False, text=True)
|
||||||
if sp.returncode != 0:
|
if sp.returncode != 0:
|
||||||
print(f'fullpath: {fullpath}:\n\n' + str(sp.stderr) + '\n\n' + str(sp.stdout) + '\n\nreturn code: ' + str(sp.returncode))
|
message = f' ! Error running {settings.CAVERN}: {fullpath}'
|
||||||
|
DataIssue.objects.create(parser='entrances', message=message)
|
||||||
|
print(message)
|
||||||
|
print(f'stderr:\n\n' + str(sp.stderr) + '\n\n' + str(sp.stdout) + '\n\nreturn code: ' + str(sp.returncode))
|
||||||
self.caverncount += 1
|
self.caverncount += 1
|
||||||
|
|
||||||
# should also collect all the .err files too and create a DataIssue for each one which
|
# should also collect all the .err files too and create a DataIssue for each one which
|
||||||
# - is nonzero in size
|
# - is nonzero in size
|
||||||
# - has Error greater than 5% anywhere, or some other more serious error
|
# - has Error greater than 5% anywhere, or some other more serious error
|
||||||
|
|
||||||
svxpath = fullpath + ".svx"
|
svxpath = Path(fullpath + ".svx")
|
||||||
logpath = fullpath + ".log"
|
logpath = Path(fullpath + ".log")
|
||||||
outputdir = Path(svxpath).parent
|
outputdir = Path(svxpath).parent
|
||||||
|
|
||||||
if not os.path.isfile(logpath):
|
if not logpath.is_file(): # always run if logfile not there
|
||||||
runcavern()
|
runcavern()
|
||||||
return
|
return
|
||||||
|
|
||||||
|
self.caverndate = now - 2*365*24*3600
|
||||||
|
|
||||||
if not self.caverndate:
|
if not self.caverndate:
|
||||||
completed_process = subprocess.run(["which", "{}".format(settings.CAVERN)],
|
sp = subprocess.run(["which", "{}".format(settings.CAVERN)],
|
||||||
capture_output=True, check=True, text=True)
|
capture_output=True, check=False, text=True)
|
||||||
self.caverndate = os.path.getmtime(completed_process.stdout.strip())
|
if sp.returncode != 0:
|
||||||
|
message = f' ! Error running "which" on {settings.CAVERN}'
|
||||||
|
DataIssue.objects.create(parser='entrances', message=message)
|
||||||
|
print(message)
|
||||||
|
print(f'stderr:\n\n' + str(sp.stderr) + '\n\n' + str(sp.stdout) + '\n\nreturn code: ' + str(sp.returncode))
|
||||||
|
self.caverndate = os.path.getmtime(sp.stdout.strip())
|
||||||
|
else:
|
||||||
|
self.caverndate = now - 2*365*24*3600
|
||||||
cav_t = self.caverndate
|
cav_t = self.caverndate
|
||||||
log_t = os.path.getmtime(logpath)
|
log_t = os.path.getmtime(logpath)
|
||||||
svx_t = os.path.getmtime(svxpath)
|
svx_t = os.path.getmtime(svxpath)
|
||||||
@ -1244,7 +1259,7 @@ class LoadingSurvex():
|
|||||||
if cav_t - log_t > 0: # new version of cavern
|
if cav_t - log_t > 0: # new version of cavern
|
||||||
runcavern()
|
runcavern()
|
||||||
return
|
return
|
||||||
if chaosmonkey(400): # one in every 400 runs
|
if chaosmonkey(350): # one in every 350 runs
|
||||||
runcavern()
|
runcavern()
|
||||||
|
|
||||||
def FindAndLoadSurvex(survexblockroot):
|
def FindAndLoadSurvex(survexblockroot):
|
||||||
@ -1332,6 +1347,7 @@ def FindAndLoadSurvex(survexblockroot):
|
|||||||
svxlines = fcollate.read().splitlines()
|
svxlines = fcollate.read().splitlines()
|
||||||
#pr2 = cProfile.Profile()
|
#pr2 = cProfile.Profile()
|
||||||
#pr2.enable()
|
#pr2.enable()
|
||||||
|
print(" ", file=sys.stderr,end='')
|
||||||
#----------------------------------------------------------------
|
#----------------------------------------------------------------
|
||||||
svx_load.LinearLoad(survexblockroot,survexfileroot.path, svxlines)
|
svx_load.LinearLoad(survexblockroot,survexfileroot.path, svxlines)
|
||||||
#----------------------------------------------------------------
|
#----------------------------------------------------------------
|
||||||
@ -1385,6 +1401,7 @@ def LoadSurvexBlocks():
|
|||||||
DataIssue.objects.filter(parser='survex').delete()
|
DataIssue.objects.filter(parser='survex').delete()
|
||||||
DataIssue.objects.filter(parser='survexleg').delete()
|
DataIssue.objects.filter(parser='survexleg').delete()
|
||||||
DataIssue.objects.filter(parser='survexunits').delete()
|
DataIssue.objects.filter(parser='survexunits').delete()
|
||||||
|
DataIssue.objects.filter(parser='entrances').delete()
|
||||||
|
|
||||||
survexfileroot = MakeSurvexFileRoot()
|
survexfileroot = MakeSurvexFileRoot()
|
||||||
# this next makes a block_object assciated with a file_object.path = SURVEX_TOPNAME
|
# this next makes a block_object assciated with a file_object.path = SURVEX_TOPNAME
|
||||||
@ -1421,21 +1438,48 @@ def LoadPositions():
|
|||||||
|
|
||||||
# print(" - Regenerating stale cavern .log and .3d for '{}'\n days old: {:.1f} {:.1f} {:.1f}".
|
# print(" - Regenerating stale cavern .log and .3d for '{}'\n days old: {:.1f} {:.1f} {:.1f}".
|
||||||
# format(topdata, (svx_t - d3d_t)/(24*3600), (cav_t - d3d_t)/(24*3600), (now - d3d_t)/(24*3600)))
|
# format(topdata, (svx_t - d3d_t)/(24*3600), (cav_t - d3d_t)/(24*3600), (now - d3d_t)/(24*3600)))
|
||||||
# subprocess.call([settings.CAVERN, "--log", f"--output={topdata}", f"{topdata}.svx"])
|
|
||||||
|
file3d = Path(f'{topdata}.3d')
|
||||||
try:
|
try:
|
||||||
sp = subprocess.run([settings.CAVERN, "--log", f"--output={outputdir}", f"{topdata}.svx"],
|
sp = subprocess.run([settings.CAVERN, "--log", f"--output={outputdir}", f"{topdata}.svx"],
|
||||||
capture_output=True, check=True, text=True)
|
capture_output=True, check=False, text=True) #check=False means exception not raised
|
||||||
if sp.returncode != 0:
|
if sp.returncode != 0:
|
||||||
print(f'topdata: {topdata}:\n\n' + str(sp.stderr) + '\n\n' + str(sp.stdout) + '\n\nreturn code: ' + str(sp.returncode))
|
message = f' ! Error: cavern: creating {file3d} in runcavern3()'
|
||||||
|
DataIssue.objects.create(parser='survex', message=message)
|
||||||
# print(" - Regenerating {} {}.3d in {}".format(settings.SURVEXPORT, topdata, settings.SURVEX_DATA))
|
print(message)
|
||||||
sp = subprocess.run([settings.SURVEXPORT, '--pos', f'{topdata}.3d'], cwd = settings.SURVEX_DATA,
|
|
||||||
capture_output=True, check=True, text=True)
|
# find the errors in the 1623.log file
|
||||||
if sp.returncode != 0:
|
sp = subprocess.run(["grep", "error:", f"{topdata}.log"],
|
||||||
print(f'topdata: {topdata}:\n\n' + str(sp.stderr) + '\n\n' + str(sp.stdout) + '\n\nreturn code: ' + str(sp.returncode))
|
capture_output=True, check=False, text=True) #check=False means exception not raised
|
||||||
except:
|
message = f' ! Error: cavern: {sp.stdout}'
|
||||||
print(f'topdata: {topdata}:\n\n' + str(sp.stderr) + '\n\n' + str(sp.stdout) + '\n\nreturn code: ' + str(sp.returncode))
|
DataIssue.objects.create(parser='survex', message=message)
|
||||||
|
print(message)
|
||||||
|
|
||||||
|
except:
|
||||||
|
message = " ! CalledProcessError 'cavern' in runcavern3() at {topdata}."
|
||||||
|
DataIssue.objects.create(parser='survex', message=message)
|
||||||
|
print(message)
|
||||||
|
|
||||||
|
if file3d.is_file():
|
||||||
|
message = " ! CalledProcessError. File permissions {file3d.stat().st_mode} on {str(file3d)}"
|
||||||
|
DataIssue.objects.create(parser='survex', message=message)
|
||||||
|
print(message)
|
||||||
|
|
||||||
|
if file3d.is_file(): # might be an old one though
|
||||||
|
try:
|
||||||
|
# print(" - Regenerating {} {}.3d in {}".format(settings.SURVEXPORT, topdata, settings.SURVEX_DATA))
|
||||||
|
sp = subprocess.run([settings.SURVEXPORT, '--pos', f'{file3d}'], cwd = settings.SURVEX_DATA,
|
||||||
|
capture_output=True, check=False, text=True)
|
||||||
|
if sp.returncode != 0:
|
||||||
|
print(f' ! Error: survexport creating {topdata}.pos in runcavern3().\n\n' + str(sp.stdout) + '\n\nreturn code: ' + str(sp.returncode))
|
||||||
|
except:
|
||||||
|
message = " ! CalledProcessError 'survexport' in runcavern3() at {file3d}."
|
||||||
|
DataIssue.objects.create(parser='entrances', message=message)
|
||||||
|
print(message)
|
||||||
|
else:
|
||||||
|
message = f" ! Failed to find {file3d} so aborting generation of new .pos, using old one if present"
|
||||||
|
DataIssue.objects.create(parser='entrances', message=message)
|
||||||
|
print(message)
|
||||||
|
|
||||||
topdata = os.fspath(Path(settings.SURVEX_DATA) / settings.SURVEX_TOPNAME)
|
topdata = os.fspath(Path(settings.SURVEX_DATA) / settings.SURVEX_TOPNAME)
|
||||||
print(' - Generating a list of Pos from %s.svx and then loading...' % (topdata))
|
print(' - Generating a list of Pos from %s.svx and then loading...' % (topdata))
|
||||||
@ -1476,7 +1520,13 @@ def LoadPositions():
|
|||||||
svxid, number, point_type, label = pt
|
svxid, number, point_type, label = pt
|
||||||
mappoints[svxid]=True
|
mappoints[svxid]=True
|
||||||
|
|
||||||
posfile = open("%s.pos" % (topdata))
|
if not Path(pospath).is_file():
|
||||||
|
message = f" ! Failed to find {pospath} so aborting generation of entrance locations. "
|
||||||
|
DataIssue.objects.create(parser='entrances', message=message)
|
||||||
|
print(message)
|
||||||
|
return
|
||||||
|
|
||||||
|
posfile = open(pospath)
|
||||||
posfile.readline() #Drop header
|
posfile.readline() #Drop header
|
||||||
try:
|
try:
|
||||||
survexblockroot = SurvexBlock.objects.get(name=ROOTBLOCK)
|
survexblockroot = SurvexBlock.objects.get(name=ROOTBLOCK)
|
||||||
|
Loading…
x
Reference in New Issue
Block a user