troggle-unchained/parsers/locations.py

273 lines
12 KiB
Python
Raw Permalink Normal View History

import copy
import os
import re
import subprocess
import sys
import time
from datetime import datetime, timezone
from pathlib import Path
import troggle.settings as settings
from troggle.core.models.caves import Cave, Entrance
from troggle.core.models.logbooks import QM
2023-01-30 23:04:11 +00:00
from troggle.core.models.survex import SurvexBlock, SurvexDirectory, SurvexFile, SurvexPersonRole, SurvexStation
from troggle.core.models.wallets import Wallet
from troggle.core.models.troggle import DataIssue, Expedition
from troggle.core.utils import chaosmonkey, get_process_memory
from troggle.parsers.logbooks import GetCaveLookup
from troggle.parsers.people import GetPersonExpeditionNameLookup, known_foreigner
from troggle.parsers.survex import stash_data_issue, store_data_issues, ROOTBLOCK
"""Uses the imported data to find the locations of the survey stations labelled as
entrances
"""
todo = """
- Pending a complete revision of how we handle GPS coordinates of entrances.
"""
class MapLocations(object):
"""Class used only for identifying the entrance locations"""
p = [
("laser.0_7", "BNase", "Reference", "Bräuning Nase laser point"),
("226-96", "BZkn", "Reference", "Bräuning Zinken trig point"),
("vd1", "VD1", "Reference", "VD1 survey point"),
("laser.kt114_96", "HSK", "Reference", "Hinterer Schwarzmooskogel trig point"),
("2000", "Nipple", "Reference", "Nipple (Weiße Warze)"),
("3000", "VSK", "Reference", "Vorderer Schwarzmooskogel summit"),
("topcamp", "OTC", "Reference", "Old Top Camp"),
("laser.0", "LSR0", "Reference", "Laser Point 0"),
("laser.0_1", "LSR1", "Reference", "Laser Point 0/1"),
("laser.0_3", "LSR3", "Reference", "Laser Point 0/3"),
("laser.0_5", "LSR5", "Reference", "Laser Point 0/5"),
("225-96", "BAlm", "Reference", "Bräuning Alm trig point"),
]
def points(self):
for ent in Entrance.objects.all():
if ent.best_station():
# print(f"{ent.filename}", end=", ")
try:
k = ent.caveandentrance_set.all()[0].cave
except:
message = f" ! Failed to get Cave linked to Entrance:{ent.name} from:{ent.filename} best:{ent.best_station()} {ent.caveandentrance_set.all()}"
stash_data_issue(parser="positions", message=message)
print(message)
continue # skip this entrance
try:
areaName = k.getArea().short_name
except:
message = f" ! Failed to get Area on cave '{k}' linked to Entrance:{ent.name} from:{ent.filename} best:{ent.best_station()}"
stash_data_issue(parser="positions", message=message)
print(message)
store_data_issues()
raise
self.p.append((ent.best_station(), f"{areaName}-{str(ent)[5:]}", ent.needs_surface_work(), str(ent)))
message = f" - {len(self.p)} entrances linked to caves."
print(message)
return self.p
def __str__(self):
return f"{len(self.p)} map locations"
poslineregex = re.compile(r"^\(\s*([+-]?\d*\.\d*),\s*([+-]?\d*\.\d*),\s*([+-]?\d*\.\d*)\s*\)\s*([^\s]+)$")
def LoadPositions():
"""First load the survex stations for entrances and fixed points (about 600) into the database.
Run cavern to produce a complete .3d file, then run 3dtopos to produce a table of
all survey point positions. Then lookup each position by name to see if we have it in the database
and if we do, then save the x/y/z coordinates. This gives us coordinates of the entrances.
If we don't have it in the database, print an error message and discard it.
"""
svx_t = 0
d3d_t = 0
DataIssue.objects.filter(parser="positions").delete()
def runcavern3d():
outputdir = Path(str(f"{topdata}.svx")).parent
# print(" - Regenerating stale cavern .log and .3d for '{}'\n days old: {:.1f} {:.1f} {:.1f}".
# format(topdata, (svx_t - d3d_t)/(24*3600), (cav_t - d3d_t)/(24*3600), (now - d3d_t)/(24*3600)))
file3d = Path(f"{topdata}.3d")
try:
sp = subprocess.run(
[settings.CAVERN, "--log", f"--output={outputdir}", f"{topdata}.svx"],
capture_output=True,
check=False,
text=True,
) # check=False means exception not raised
if sp.returncode != 0:
message = f" ! Error: cavern: creating {file3d} in runcavern3()"
stash_data_issue(parser="positions", message=message)
print(message)
# find the errors in the 1623.log file
sp = subprocess.run(
["grep", "error:", f"{topdata}.log"], capture_output=True, check=False, text=True
) # check=False means exception not raised
message = f" ! Error: cavern: {sp.stdout} creating {file3d} "
stash_data_issue(parser="positions", message=message)
print(message)
except:
message = f" ! CalledProcessError 'cavern' in runcavern3() at {topdata}."
stash_data_issue(parser="positions", message=message)
print(message)
if file3d.is_file():
message = f" ! CalledProcessError. File permissions {file3d.stat().st_mode} on {str(file3d)}"
stash_data_issue(parser="positions", message=message)
print(message)
if file3d.is_file(): # might be an old one though
try:
# print(" - Regenerating {} {}.3d in {}".format(settings.SURVEXPORT, topdata, settings.SURVEX_DATA))
sp = subprocess.run(
[settings.SURVEXPORT, "--pos", f"{file3d}"],
cwd=settings.SURVEX_DATA,
capture_output=True,
check=False,
text=True,
)
if sp.returncode != 0:
print(
f" ! Error: survexport creating {topdata}.pos in runcavern3().\n\n"
+ str(sp.stdout)
+ "\n\nreturn code: "
+ str(sp.returncode)
)
except:
message = f" ! CalledProcessError 'survexport' in runcavern3() at {file3d}."
stash_data_issue(parser="positions", message=message)
print(message)
else:
message = f" ! Failed to find {file3d} so aborting generation of new .pos, using old one if present"
stash_data_issue(parser="positions", message=message)
print(message)
topdata = os.fspath(Path(settings.SURVEX_DATA) / settings.SURVEX_TOPNAME)
print(f" - Generating a list of Pos from {topdata}.svx and then loading...")
found = 0
print("\n") # extra line because cavern overwrites the text buffer somehow
# cavern defaults to using same cwd as supplied input file
completed_process = subprocess.run(["which", f"{settings.CAVERN}"], capture_output=True, check=True, text=True)
cav_t = os.path.getmtime(completed_process.stdout.strip())
svxpath = topdata + ".svx"
d3dpath = topdata + ".3d"
pospath = topdata + ".pos"
svx_t = os.path.getmtime(svxpath)
if os.path.isfile(d3dpath):
# always fails to find log file if a double directory, e.g. caves-1623/B4/B4/B4.svx Why ?
d3d_t = os.path.getmtime(d3dpath)
now = time.time()
if not os.path.isfile(pospath):
runcavern3d()
if not os.path.isfile(d3dpath):
runcavern3d()
elif d3d_t - svx_t > 0: # stale, 3d older than svx file
runcavern3d()
elif now - d3d_t > 60 * 24 * 60 * 60: # >60 days old, re-run anyway
runcavern3d()
elif cav_t - d3d_t > 0: # new version of cavern
runcavern3d()
mappoints = {}
for pt in MapLocations().points():
svxid, number, point_type, label = pt
mappoints[svxid] = True
if svxid =="1":
print(f"BOGUS {pt}") # this is now checked for when importing the entrance tags in parsers/caves.py
if not Path(pospath).is_file():
message = f" ! Failed to find {pospath} so aborting generation of entrance locations. "
# DataIssue.objects.create(parser="positions", message=message, url=f"/entrance_data/{pospath}_edit")
stash_data_issue(parser="positions", message=message)
print(message)
return
posfile = open(pospath)
posfile.readline() # Drop header
# not used survexblock on a SurvexStation since we stopped storing all of them in 2020:
# try:
# survexblockroot = SurvexBlock.objects.get(name=ROOTBLOCK)
# except:
# try:
# survexblockroot = SurvexBlock.objects.get(id=1)
# except:
# message = " ! FAILED to find root SurvexBlock"
# print(message)
# stash_data_issue(parser="positions", message=message)
# raise
sbdict = {}
dups = 0
lineno = 1 # we dropped the header
for line in posfile.readlines():
lineno += 1
r = poslineregex.match(line)
if r:
x, y, z, sbid = r.groups() # renamed id to sbid so as to not confuse with Django internal .id
if sbid in sbdict:
dups += 1
message = f" ! DUPLICATE SurvexBlock identifier in .pos file '{sbid}'\n{sbs[sbid]}\n{lineno} / {line}"
print(message)
stash_data_issue(parser="positions", message=message)
else:
sbdict[sbid] = lineno
for sid in mappoints:
if sbid.endswith(sid):
blockpath = "." + sbid[: -len(sid)].strip(".") # only the most recent one that is mappoints
# print(f"# match {sid} {sbid} {blockpath}")
# But why are we doing this? Why do we want the survexblock id for each of these ?
# ..because mostly they don't actually appear in any SVX file. We should match them up
# via the cave data, not by this half-arsed syntactic match which almost never works. PMS.
2023-03-06 16:37:38 +00:00
# We are reading the .pos file so we only know the SurvexFile not the SurvexBlock.
# if False:
# try:
# sbqs = SurvexBlock.objects.filter(survexpath=blockpath)
# if len(sbqs) == 1:
# sbqs[0]
# if len(sbqs) > 1:
# message = f" ! MULTIPLE {len(sbqs):3} SurvexBlocks '{blockpath}' from survex files mention Entrance point '{sbid}' (line {lineno})"
# print(message)
# stash_data_issue(parser="positions", message=message)
# for b in sbqs:
# print(f" - {b}")
# sbqs[0]
# except:
# message = f" ! {lineno} FAIL in getting SurvexBlock matching Entrance point {blockpath} {sid}"
# print(message)
# stash_data_issue(parser="positions", message=message)
try:
ss = SurvexStation(name=sbid)
ss.x = float(x)
ss.y = float(y)
ss.z = float(z)
ss.save()
found += 1
except:
message = f" ! {lineno} FAIL to create SurvexStation Entrance point {blockpath} {sid}"
print(message)
stash_data_issue(parser="positions", message=message)
store_data_issues()
raise
print(f" - {found} SurvexStation entrances found.")
print(f" - {dups} Duplicated SurvexStation entrances found")
store_data_issues()