2023-01-29 17:03:50 +00:00
|
|
|
import copy
|
|
|
|
import os
|
|
|
|
import re
|
|
|
|
import subprocess
|
|
|
|
import sys
|
|
|
|
import time
|
|
|
|
from datetime import datetime, timezone
|
|
|
|
from pathlib import Path
|
|
|
|
|
|
|
|
|
|
|
|
import troggle.settings as settings
|
|
|
|
from troggle.core.models.caves import Cave, Entrance
|
|
|
|
from troggle.core.models.logbooks import QM
|
2023-09-06 20:58:14 +01:00
|
|
|
from troggle.core.models.survex import SurvexBlock, SurvexFile, SurvexPersonRole, SurvexStation
|
2023-01-30 23:04:11 +00:00
|
|
|
from troggle.core.models.wallets import Wallet
|
2023-01-29 17:03:50 +00:00
|
|
|
from troggle.core.models.troggle import DataIssue, Expedition
|
|
|
|
from troggle.core.utils import chaosmonkey, get_process_memory
|
|
|
|
from troggle.parsers.logbooks import GetCaveLookup
|
|
|
|
from troggle.parsers.people import GetPersonExpeditionNameLookup, known_foreigner
|
|
|
|
from troggle.parsers.survex import stash_data_issue, store_data_issues, ROOTBLOCK
|
|
|
|
|
|
|
|
"""Uses the imported data to find the locations of the survey stations labelled as
|
|
|
|
entrances
|
|
|
|
"""
|
|
|
|
|
|
|
|
todo = """
|
2023-11-04 16:28:14 +00:00
|
|
|
-
|
2023-01-29 17:03:50 +00:00
|
|
|
"""
|
|
|
|
|
|
|
|
class MapLocations(object):
|
2023-10-07 00:26:52 +01:00
|
|
|
"""Class used only for identifying the entrance locations
|
2023-10-07 17:41:25 +01:00
|
|
|
Formerly used to put locations on a prospecting map
|
|
|
|
|
|
|
|
We don't need these map locations any more ?!
|
|
|
|
They would only be used in addition to entrances going onto a map display"""
|
2023-01-29 17:03:50 +00:00
|
|
|
|
2023-10-10 23:03:28 +01:00
|
|
|
fp = [
|
2023-01-29 17:03:50 +00:00
|
|
|
("laser.0_7", "BNase", "Reference", "Bräuning Nase laser point"),
|
|
|
|
("226-96", "BZkn", "Reference", "Bräuning Zinken trig point"),
|
|
|
|
("vd1", "VD1", "Reference", "VD1 survey point"),
|
|
|
|
("laser.kt114_96", "HSK", "Reference", "Hinterer Schwarzmooskogel trig point"),
|
|
|
|
("2000", "Nipple", "Reference", "Nipple (Weiße Warze)"),
|
|
|
|
("3000", "VSK", "Reference", "Vorderer Schwarzmooskogel summit"),
|
|
|
|
("topcamp", "OTC", "Reference", "Old Top Camp"),
|
|
|
|
("laser.0", "LSR0", "Reference", "Laser Point 0"),
|
|
|
|
("laser.0_1", "LSR1", "Reference", "Laser Point 0/1"),
|
|
|
|
("laser.0_3", "LSR3", "Reference", "Laser Point 0/3"),
|
|
|
|
("laser.0_5", "LSR5", "Reference", "Laser Point 0/5"),
|
|
|
|
("225-96", "BAlm", "Reference", "Bräuning Alm trig point"),
|
2023-10-07 00:26:52 +01:00
|
|
|
] # 12 fixed points
|
2023-10-10 23:03:28 +01:00
|
|
|
|
2023-11-05 00:24:37 +00:00
|
|
|
|
2023-10-10 23:03:28 +01:00
|
|
|
p = []
|
2023-01-29 17:03:50 +00:00
|
|
|
|
|
|
|
def points(self):
|
2023-11-05 20:59:23 +00:00
|
|
|
# Where we have a lot of survey stations which have not been assigned to proper Entrances and Caves yet, but we
|
|
|
|
# want to see them in the /stations report page:
|
|
|
|
|
|
|
|
# they already appear in any survex-generated stuff, e.g.
|
|
|
|
# $ survexport --gpx --fixes --survey=1623 --survey=1626 troggle_import_root.3d trogfix.gpx
|
|
|
|
# but not in troggle reports unless we include them here
|
|
|
|
|
|
|
|
nullent = Entrance.objects.all()[0]
|
2023-11-23 18:04:04 +00:00
|
|
|
# These pending entrnces have been added to the fixedpts but no Cave Description page has been created yet,
|
|
|
|
# nor are they on the pendingcaves.txt list - yet - as they really need a proper name,
|
|
|
|
# /walletedit/2013:02
|
|
|
|
# so probably 2013-BL-02,03 etc.
|
2023-11-05 00:24:37 +00:00
|
|
|
pending = [
|
|
|
|
("1623.p2013-cucc-pit", "no ent", False, nullent),
|
2023-11-10 13:34:13 +00:00
|
|
|
("1623.p2013-cucc-01", "no ent", False, nullent),
|
|
|
|
("1623.p2013-cucc-01-DUP", "no ent", False, nullent),
|
|
|
|
("1623.p2013-cucc-01B", "no ent", False, nullent),
|
|
|
|
("1623.p2013-cucc-slope", "no ent", False, nullent),
|
|
|
|
("1623.p2013-cucc-draft4pit ", "no ent", False, nullent),
|
|
|
|
("1623.p2013-cucc-draft4pit-DUP ", "no ent", False, nullent),
|
|
|
|
("1623.p2013-cucc-shelter1", "no ent", False, nullent),
|
2023-11-05 00:24:37 +00:00
|
|
|
|
|
|
|
("1626.p2013-cucc-draftyholes", "no ent", False, nullent),
|
|
|
|
("1626.p2013-cucc-pitarea", "no ent", False, nullent),
|
|
|
|
("1626.p2013-cucc-goodpit", "no ent", False, nullent),
|
|
|
|
("1626.p2013-cucc-goodpit-DUP", "no ent", False, nullent),
|
|
|
|
("1626.p2023-cucc-22mpit", "no ent", False, nullent),
|
|
|
|
("1626.p2013-cucc-lineofpits", "no ent", False, nullent),
|
|
|
|
("1626.p2013-cucc-12mpit", "no ent", False, nullent),
|
|
|
|
("1626.p2013-cucc-20mpit", "no ent", False, nullent),
|
|
|
|
("1626.p2013-cucc-2s-drop", "no ent", False, nullent),
|
2023-11-05 20:59:23 +00:00
|
|
|
("1626.p2013-cucc-01cp", "no ent", False, nullent),
|
|
|
|
("1626.p2013-cucc-rift", "no ent", False, nullent),
|
|
|
|
("1626.p2013-cucc-snowplug2", "no ent", False, nullent),
|
|
|
|
("1626.p2013-cucc-DraftHole", "no ent", False, nullent),
|
|
|
|
("1626.p2013-cucc-setofsnowholes", "no ent", False, nullent),
|
|
|
|
("1626.p2013-cucc-stotp20", "no ent", False, nullent),
|
|
|
|
("1626.p2013-cucc-snowplug", "no ent", False, nullent),
|
|
|
|
("1626.p2013-cucc-draft3pit", "no ent", False, nullent),
|
|
|
|
("1626.p2013-cucc-draft2pit", "no ent", False, nullent),
|
2023-11-05 00:24:37 +00:00
|
|
|
("1626.p2014-ms-01", "no ent", False, nullent),
|
|
|
|
("1626.p2014-ms-02", "no ent", False, nullent),
|
|
|
|
("1626.p2014-ms-03", "no ent", False, nullent),
|
|
|
|
("1626.p2014-ms-04", "no ent", False, nullent),
|
|
|
|
("1626.p2014-ms-05", "no ent", False, nullent),
|
|
|
|
("1626.p2014-ms-06", "no ent", False, nullent),
|
|
|
|
("1626.p2014-ms-07", "no ent", False, nullent),
|
|
|
|
|
|
|
|
]
|
|
|
|
self.p = pending
|
2023-01-29 17:03:50 +00:00
|
|
|
for ent in Entrance.objects.all():
|
2023-10-11 22:58:20 +01:00
|
|
|
for st, ent_type in {ent.other_station: "other", ent.tag_station: "tag"}.items():
|
2023-10-07 00:26:52 +01:00
|
|
|
if st != "":
|
2023-10-10 23:03:28 +01:00
|
|
|
self.p.append((st, str(ent), ent.needs_surface_work(), ent))
|
2023-10-07 00:26:52 +01:00
|
|
|
store_data_issues()
|
2023-10-17 22:19:17 +01:00
|
|
|
message = f" - {len(self.p)} Survey stations found on Entrance objects - not yet validated against survex .pos file."
|
2023-01-29 17:03:50 +00:00
|
|
|
print(message)
|
|
|
|
return self.p
|
|
|
|
|
|
|
|
def __str__(self):
|
2023-10-17 22:19:17 +01:00
|
|
|
return f"{len(self.p)} ent locations"
|
2023-01-29 17:03:50 +00:00
|
|
|
|
|
|
|
|
2023-10-07 00:26:52 +01:00
|
|
|
def validate_entrance_stations(ent=None):
|
2023-10-17 22:19:17 +01:00
|
|
|
"""Now that we have the located positions, we can check if the Entrances had correct stations
|
2023-10-07 00:26:52 +01:00
|
|
|
"""
|
|
|
|
bads = 0
|
|
|
|
good = 0
|
2023-10-10 23:03:28 +01:00
|
|
|
url="/caves" # fallback
|
2023-10-07 00:26:52 +01:00
|
|
|
|
2023-10-17 22:19:17 +01:00
|
|
|
def station_lower_case(station):
|
2023-10-10 23:03:28 +01:00
|
|
|
nonlocal url
|
2023-11-05 00:24:37 +00:00
|
|
|
if not station:
|
|
|
|
return
|
2023-10-07 00:26:52 +01:00
|
|
|
so = SurvexStation.objects.filter(name=station.lower())
|
|
|
|
if so.count() == 1:
|
|
|
|
message = f"X - Entrance {ent} station '{station}' should be '{station.lower()}'"
|
|
|
|
stash_data_issue(parser="positions", message=message, url=url)
|
|
|
|
print(message)
|
|
|
|
|
|
|
|
def validate_ent(ent):
|
2023-10-12 14:13:28 +01:00
|
|
|
"""For each of the two station strings in an Entrance object,
|
2023-10-07 00:26:52 +01:00
|
|
|
validate each string as referring to a valid SurvexStation object.
|
|
|
|
But our list of created SurvexStation objects is created by taking a list of strings and using them
|
|
|
|
to select from lines in a .pos file - so this is unnecessarily indirect.
|
|
|
|
"""
|
|
|
|
nonlocal bads
|
|
|
|
nonlocal good
|
2023-11-07 21:23:15 +00:00
|
|
|
# {% url "editentrance" cave.slug ent.entrance.slug %}
|
2023-10-07 00:26:52 +01:00
|
|
|
# e.g. url = f"/1623/101/1623-101:{ent}_entrance_edit"
|
|
|
|
cavelist = ent.cavelist()
|
|
|
|
if len(cavelist) == 1:
|
|
|
|
cave = cavelist[0]
|
|
|
|
url = f"/{cave.url}"
|
|
|
|
elif len(cavelist) > 1:
|
2023-10-12 14:13:28 +01:00
|
|
|
cave = cavelist[-1] # set to last in list
|
2023-10-07 00:26:52 +01:00
|
|
|
url = f"/{cave.url}"
|
|
|
|
else:
|
2023-10-12 14:13:28 +01:00
|
|
|
print(f"BUGGER bad cave '{cavelist}' on Entrance object {ent} ")
|
2023-10-07 00:26:52 +01:00
|
|
|
url="/caves"
|
2023-10-11 22:58:20 +01:00
|
|
|
for st, ent_type in {ent.other_station: "other", ent.tag_station: "tag"}.items():
|
2023-10-07 00:26:52 +01:00
|
|
|
if st == "":
|
|
|
|
continue
|
|
|
|
try:
|
|
|
|
so = SurvexStation.objects.filter(name=st)
|
|
|
|
except:
|
|
|
|
message = f" ! - Entrance {ent} has invalid '{ent_type}' station '{st}'. EXCEPTION."
|
|
|
|
stash_data_issue(parser="positions", message=message, url=url)
|
|
|
|
print(message)
|
|
|
|
bads +=1
|
|
|
|
continue
|
2023-10-10 23:03:28 +01:00
|
|
|
|
|
|
|
if so.count() == 1:
|
|
|
|
good +=1
|
|
|
|
# print(f"OK - Entrance {ent} '{ent_type}' station '{st}'")
|
|
|
|
continue
|
|
|
|
if so.count() != 0:
|
|
|
|
message =f"{so.count()} found for Entrance {ent} '{ent_type}' station '{st}' {so}"
|
|
|
|
else:
|
2023-11-05 00:24:37 +00:00
|
|
|
# not found
|
2023-10-10 23:03:28 +01:00
|
|
|
message = f" ! - Entrance {ent} has invalid '{ent_type}' station '{st}'"
|
|
|
|
if st == ent.best_station():
|
|
|
|
message = message + " - AND THIS IS THE 'BEST' ONE"
|
|
|
|
else:
|
|
|
|
message = message + " - not the 'best'"
|
|
|
|
stash_data_issue(parser="positions", message=message, url=url)
|
|
|
|
print(message)
|
|
|
|
bads +=1
|
2023-10-17 22:19:17 +01:00
|
|
|
station_lower_case(st)
|
2023-10-10 23:03:28 +01:00
|
|
|
continue
|
|
|
|
|
2023-10-07 00:26:52 +01:00
|
|
|
|
|
|
|
if ent:
|
|
|
|
return validate_ent(ent)
|
|
|
|
|
|
|
|
|
|
|
|
for ent in Entrance.objects.all():
|
|
|
|
validate_ent(ent)
|
|
|
|
|
2023-10-15 16:43:15 +01:00
|
|
|
print(f" - {good} valid SurvexStations of all types found on Entrances.")
|
|
|
|
print(f" - {bads} bad SurvexStations of all types found on Entrances.")
|
2023-10-07 00:26:52 +01:00
|
|
|
return True # not necessarily.. but unused return value
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
poslineregex = re.compile(r"^\(\s*([+-]?\d*\.\d*),\s*([+-]?\d*\.\d*),\s*([+-]?\d*\.\d*)\s*\)\s*([^\s]+)$")
|
2023-01-29 17:03:50 +00:00
|
|
|
|
|
|
|
def LoadPositions():
|
|
|
|
"""First load the survex stations for entrances and fixed points (about 600) into the database.
|
2023-09-30 18:35:40 +01:00
|
|
|
Run 'cavern' to produce a complete .3d file, then run 'survexport -pos' to produce a table of
|
|
|
|
all survey point positions in UTM cooridnates. Then lookup each of the 600 positions by name to
|
|
|
|
see if we have it in the database and if we do, then save the UTM x/y/z coordinates.
|
|
|
|
This gives us coordinates of the entrances.
|
2023-01-29 17:03:50 +00:00
|
|
|
If we don't have it in the database, print an error message and discard it.
|
|
|
|
"""
|
|
|
|
svx_t = 0
|
|
|
|
d3d_t = 0
|
2023-07-25 22:14:46 +01:00
|
|
|
|
|
|
|
DataIssue.objects.filter(parser="positions").delete()
|
2023-10-07 00:26:52 +01:00
|
|
|
SurvexStation.objects.all().delete()
|
2023-07-25 22:14:46 +01:00
|
|
|
|
2023-01-29 17:03:50 +00:00
|
|
|
|
2023-11-11 12:36:03 +00:00
|
|
|
def runcavern3d(msg=None):
|
|
|
|
if msg:
|
|
|
|
print(" - ", msg)
|
2023-01-29 17:03:50 +00:00
|
|
|
outputdir = Path(str(f"{topdata}.svx")).parent
|
|
|
|
file3d = Path(f"{topdata}.3d")
|
|
|
|
try:
|
|
|
|
sp = subprocess.run(
|
|
|
|
[settings.CAVERN, "--log", f"--output={outputdir}", f"{topdata}.svx"],
|
|
|
|
capture_output=True,
|
|
|
|
check=False,
|
|
|
|
text=True,
|
|
|
|
) # check=False means exception not raised
|
|
|
|
if sp.returncode != 0:
|
|
|
|
message = f" ! Error: cavern: creating {file3d} in runcavern3()"
|
2023-07-25 22:14:46 +01:00
|
|
|
stash_data_issue(parser="positions", message=message)
|
2023-01-29 17:03:50 +00:00
|
|
|
print(message)
|
|
|
|
|
2023-11-11 12:36:03 +00:00
|
|
|
# find the errors in the .log file
|
2023-01-29 17:03:50 +00:00
|
|
|
sp = subprocess.run(
|
|
|
|
["grep", "error:", f"{topdata}.log"], capture_output=True, check=False, text=True
|
|
|
|
) # check=False means exception not raised
|
|
|
|
message = f" ! Error: cavern: {sp.stdout} creating {file3d} "
|
2023-07-25 22:14:46 +01:00
|
|
|
stash_data_issue(parser="positions", message=message)
|
2023-01-29 17:03:50 +00:00
|
|
|
print(message)
|
|
|
|
|
|
|
|
except:
|
|
|
|
message = f" ! CalledProcessError 'cavern' in runcavern3() at {topdata}."
|
2023-07-25 22:14:46 +01:00
|
|
|
stash_data_issue(parser="positions", message=message)
|
2023-01-29 17:03:50 +00:00
|
|
|
print(message)
|
|
|
|
|
|
|
|
if file3d.is_file():
|
|
|
|
message = f" ! CalledProcessError. File permissions {file3d.stat().st_mode} on {str(file3d)}"
|
2023-07-25 22:14:46 +01:00
|
|
|
stash_data_issue(parser="positions", message=message)
|
2023-01-29 17:03:50 +00:00
|
|
|
print(message)
|
|
|
|
|
2023-11-11 12:36:03 +00:00
|
|
|
if file3d.is_file(): # might be an old one though, if previous step failed
|
2023-01-29 17:03:50 +00:00
|
|
|
try:
|
|
|
|
sp = subprocess.run(
|
|
|
|
[settings.SURVEXPORT, "--pos", f"{file3d}"],
|
|
|
|
cwd=settings.SURVEX_DATA,
|
|
|
|
capture_output=True,
|
|
|
|
check=False,
|
|
|
|
text=True,
|
|
|
|
)
|
|
|
|
if sp.returncode != 0:
|
|
|
|
print(
|
|
|
|
f" ! Error: survexport creating {topdata}.pos in runcavern3().\n\n"
|
|
|
|
+ str(sp.stdout)
|
|
|
|
+ "\n\nreturn code: "
|
|
|
|
+ str(sp.returncode)
|
|
|
|
)
|
|
|
|
except:
|
|
|
|
message = f" ! CalledProcessError 'survexport' in runcavern3() at {file3d}."
|
2023-07-25 22:14:46 +01:00
|
|
|
stash_data_issue(parser="positions", message=message)
|
2023-01-29 17:03:50 +00:00
|
|
|
print(message)
|
|
|
|
else:
|
|
|
|
message = f" ! Failed to find {file3d} so aborting generation of new .pos, using old one if present"
|
2023-07-25 22:14:46 +01:00
|
|
|
stash_data_issue(parser="positions", message=message)
|
2023-01-29 17:03:50 +00:00
|
|
|
print(message)
|
|
|
|
|
2023-11-11 12:36:03 +00:00
|
|
|
topdata = Path(settings.SURVEX_DATA, settings.SURVEX_TOPNAME)
|
2023-11-05 13:20:45 +00:00
|
|
|
print(f" - Generating a list of Pos from {topdata}.3d and then loading...")
|
2023-01-29 17:03:50 +00:00
|
|
|
|
|
|
|
found = 0
|
|
|
|
print("\n") # extra line because cavern overwrites the text buffer somehow
|
|
|
|
# cavern defaults to using same cwd as supplied input file
|
|
|
|
|
|
|
|
completed_process = subprocess.run(["which", f"{settings.CAVERN}"], capture_output=True, check=True, text=True)
|
2023-11-11 12:36:03 +00:00
|
|
|
cav_t = Path(completed_process.stdout.strip()).stat().st_mtime
|
|
|
|
cav_d = datetime.fromtimestamp(cav_t).strftime('%d %b %Y %H:%M:%S')
|
2023-01-29 17:03:50 +00:00
|
|
|
|
2023-11-11 12:36:03 +00:00
|
|
|
svxpath = topdata.with_suffix(".svx")
|
|
|
|
d3dpath = topdata.with_suffix(".3d")
|
|
|
|
pospath = topdata.with_suffix(".pos")
|
|
|
|
|
|
|
|
if not settings.DEVSERVER:
|
|
|
|
runcavern3d(f"Regen - on server {settings.DEVSERVER=}") # always regenerate .3d and .pos on the server
|
|
|
|
else:
|
|
|
|
# These basic tests fail to capture the case where a *included svx file has changed,
|
|
|
|
# typically this is one of the fixedpts *fix files.
|
|
|
|
for p in [pospath, d3dpath]:
|
|
|
|
if not p.is_file():
|
|
|
|
runcavern3d(f"Creating {p}.3d, .pos")
|
|
|
|
svx_t = svxpath.stat().st_mtime
|
|
|
|
d3d_t = d3dpath.stat().st_mtime # os.path.getmtime(d3dpath)
|
|
|
|
svx_d = datetime.fromtimestamp(svx_t).strftime('%d %b %Y %H:%M:%S')
|
|
|
|
d3d_d = datetime.fromtimestamp(d3d_t).strftime('%d %b %Y %H:%M:%S')
|
2023-01-29 17:03:50 +00:00
|
|
|
|
2023-11-11 12:36:03 +00:00
|
|
|
now = time.time()
|
|
|
|
if d3d_t - svx_t < 0: # stale, 3d older than svx file . But .svx timestamp does not reflect *include timestamps
|
|
|
|
runcavern3d(f"Regen - stale {d3d_d} earlier than {svx_d}")
|
|
|
|
elif now - d3d_t > 24 * 60 * 60: # >1 days old, re-run anyway
|
|
|
|
runcavern3d(f"Regen - old")
|
|
|
|
elif d3d_t - cav_t < 0: # new version of cavern
|
|
|
|
runcavern3d(f"Regen - new survex version {d3d_d} earlier than {cav_d} ")
|
2023-01-29 17:03:50 +00:00
|
|
|
|
|
|
|
mappoints = {}
|
2023-10-17 22:19:17 +01:00
|
|
|
found_points = {}
|
|
|
|
pts = MapLocations().points()
|
|
|
|
for pt in pts:
|
2023-10-10 23:03:28 +01:00
|
|
|
svxid, number, point_type, ent = pt
|
|
|
|
#((st, str(ent), ent.needs_surface_work(), ent))
|
2023-10-17 22:19:17 +01:00
|
|
|
if svxid in mappoints:
|
2023-10-17 22:42:50 +01:00
|
|
|
print(f" = seen this svxid {svxid} for {ent} already on entrance {mappoints[svxid]}")
|
2023-10-17 22:19:17 +01:00
|
|
|
else:
|
|
|
|
mappoints[svxid] = ent
|
2023-07-25 22:14:46 +01:00
|
|
|
if svxid =="1":
|
2023-10-17 22:19:17 +01:00
|
|
|
print(f"BOGUS {pt}") # this is now checked for when importing the entrance stations in parsers/caves.py
|
2023-01-29 17:03:50 +00:00
|
|
|
|
|
|
|
if not Path(pospath).is_file():
|
|
|
|
message = f" ! Failed to find {pospath} so aborting generation of entrance locations. "
|
2023-10-07 00:26:52 +01:00
|
|
|
stash_data_issue(parser="positions", message=message, url=f"/entrance_data/{pospath}_edit")
|
2023-01-29 17:03:50 +00:00
|
|
|
print(message)
|
|
|
|
return
|
2023-11-05 13:20:45 +00:00
|
|
|
with open(pospath) as posfile:
|
|
|
|
posfile.readline() # Drop header
|
|
|
|
|
|
|
|
sbdict = {}
|
|
|
|
dups = 0
|
|
|
|
lineno = 1 # we dropped the header
|
|
|
|
for line in posfile.readlines():
|
|
|
|
lineno += 1
|
|
|
|
r = poslineregex.match(line)
|
|
|
|
if r:
|
|
|
|
x, y, z, sbid = r.groups() # renamed id to sbid so as to not confuse with Django internal .id
|
|
|
|
if sbid in sbdict:
|
|
|
|
dups += 1
|
|
|
|
message = f" ! DUPLICATE SurvexBlock identifier in .pos file '{sbid}'\n{sbs[sbid]}\n{lineno} / {line}"
|
|
|
|
print(message)
|
|
|
|
stash_data_issue(parser="positions", message=message)
|
|
|
|
else:
|
|
|
|
sbdict[sbid] = lineno
|
|
|
|
|
2023-01-29 17:03:50 +00:00
|
|
|
|
2023-11-05 13:20:45 +00:00
|
|
|
for sid in mappoints:
|
|
|
|
if not sid: # catch None entry
|
|
|
|
continue
|
|
|
|
if sbid.endswith(sid) or sbid.endswith(sid.lower()):
|
|
|
|
blockpath = "." + sbid[: -len(sid)].strip(".") # only the most recent one that is mappoints
|
|
|
|
if sid in found_points:
|
|
|
|
found_points[sid] += 1
|
|
|
|
else:
|
|
|
|
found_points[sid] = 1
|
|
|
|
|
|
|
|
try:
|
|
|
|
ss = SurvexStation(name=sbid)
|
|
|
|
ss.x = float(x)
|
|
|
|
ss.y = float(y)
|
|
|
|
ss.z = float(z)
|
|
|
|
ss.entrance = mappoints[sid]
|
|
|
|
ss.save()
|
|
|
|
found += 1
|
|
|
|
except:
|
|
|
|
message = f" ! {lineno} FAIL to create SurvexStation Entrance point {blockpath} {sid}"
|
|
|
|
print(message)
|
|
|
|
stash_data_issue(parser="positions", message=message)
|
|
|
|
store_data_issues()
|
|
|
|
raise
|
2023-10-07 00:26:52 +01:00
|
|
|
validate_entrance_stations() # do not need to use db here really
|
|
|
|
positions_filename = Path(pospath).name
|
2023-10-17 22:19:17 +01:00
|
|
|
print(f" - {found} distinct SurvexStation entrance stations identified in {lineno:,} lines in {positions_filename}.")
|
2023-10-07 00:26:52 +01:00
|
|
|
if dups > 0:
|
|
|
|
print(f" - {dups} Duplicated SurvexStation entrances found")
|
2023-11-05 13:20:45 +00:00
|
|
|
|
2023-01-29 17:03:50 +00:00
|
|
|
store_data_issues()
|