2
0
mirror of https://expo.survex.com/repositories/troggle/.git synced 2025-12-18 06:47:09 +00:00

fixed survey station casesensitivity

This commit is contained in:
2025-07-20 16:57:58 +02:00
parent 5770a9b2e7
commit 29840aabff
3 changed files with 101 additions and 92 deletions

View File

@@ -128,9 +128,38 @@ def validate_entrance_stations(ent=None):
return
so = SurvexStation.objects.filter(name=station.lower())
if so.count() == 1:
message = f"X - Entrance {ent} station '{station}' should be '{station.lower()}'"
message = f" X - Entrance {ent} station text '{station}' should be '{station.lower()}' to match created SurvexStation"
stash_data_issue(parser="positions", message=message, url=url)
print(message)
def find_station(station, ent_type):
nonlocal url
if not station:
return False
try:
if SurvexStation.objects.filter(name=station).count() == 1:
return True
except:
message = f" ! - Entrance {ent} has invalid '{ent_type}' station '{station}'. EXCEPTION."
stash_data_issue(parser="positions", message=message, url=url)
print(message)
bads +=1
return False
candidates = [station.lower(), station.upper()]
for stn in candidates:
if SurvexStation.objects.filter(name=stn).count() == 1:
message = f" # - Entrance {ent} has '{ent_type}' station '{station}'. with incorrect capitalisation {stn}"
stash_data_issue(parser="positions", message=message, url=url)
print(message)
return True
message =f" ! - No items found for Entrance {ent} '{ent_type}' station '{station}"
stash_data_issue(parser="positions", message=message, url=url)
print(message)
return False
def validate_ent(ent):
"""For each of the two station strings in an Entrance object,
@@ -153,37 +182,15 @@ def validate_entrance_stations(ent=None):
else:
print(f"BUGGER bad cave '{cavelist}' on Entrance object {ent} ")
url="/caves"
for st, ent_type in {ent.other_station: "other", ent.tag_station: "tag"}.items():
if st == "":
for stn, ent_type in {ent.other_station: "other", ent.tag_station: "tag"}.items():
if stn == "":
continue
try:
so = SurvexStation.objects.filter(name=st)
except:
message = f" ! - Entrance {ent} has invalid '{ent_type}' station '{st}'. EXCEPTION."
stash_data_issue(parser="positions", message=message, url=url)
print(message)
bads +=1
continue
if so.count() == 1:
if find_station(stn, ent_type):
good +=1
# print(f"OK - Entrance {ent} '{ent_type}' station '{st}'")
continue
if so.count() != 0:
message =f"{so.count()} found for Entrance {ent} '{ent_type}' station '{st}' {so}"
else:
# not found
message = f" ! - Entrance {ent} '{ent_type}' station '{st}' not found as a registered SurvexStation"
if st == ent.best_station():
message = message + " - AND THIS IS THE 'BEST' ONE"
else:
message = message + " - not the 'best'"
stash_data_issue(parser="positions", message=message, url=url)
print(message)
bads +=1
station_lower_case(st)
continue
bads +=1
if ent:
return validate_ent(ent)
@@ -241,65 +248,65 @@ def LoadPositions():
SurvexStation.objects.all().delete()
# def runcavern3d(msg=None):
# if msg:
# print(" - ", msg)
# outputdir = Path(str(f"{topdata}.svx")).parent
# file3d = Path(f"{topdata}.3d")
# try:
# sp = subprocess.run(
# [settings.CAVERN, "--log", f"--output={outputdir}", f"{topdata}.svx"],
# capture_output=True,
# check=False,
# text=True,
# ) # check=False means exception not raised
# if sp.returncode != 0:
# message = f" ! Error: cavern: creating {file3d} in runcavern3()"
# stash_data_issue(parser="positions", message=message)
# print(message)
def runcavern3d(msg=None):
if msg:
print(" - ", msg)
outputdir = Path(str(f"{topdata}.svx")).parent
file3d = Path(f"{topdata}.3d")
try:
sp = subprocess.run(
[settings.CAVERN, "--log", f"--output={outputdir}", f"{topdata}.svx"],
capture_output=True,
check=False,
text=True,
) # check=False means exception not raised
if sp.returncode != 0:
message = f" ! Error: cavern: creating {file3d} in runcavern3()"
stash_data_issue(parser="positions", message=message)
print(message)
# # find the errors in the .log file
# sp = subprocess.run(
# ["grep", "error:", f"{topdata}.log"], capture_output=True, check=False, text=True
# ) # check=False means exception not raised
# message = f" ! Error: cavern: {sp.stdout} creating {file3d} "
# stash_data_issue(parser="positions", message=message)
# print(message)
# find the errors in the .log file
sp = subprocess.run(
["grep", "error:", f"{topdata}.log"], capture_output=True, check=False, text=True
) # check=False means exception not raised
message = f" ! Error: cavern: {sp.stdout} creating {file3d} "
stash_data_issue(parser="positions", message=message)
print(message)
# except:
# message = f" ! CalledProcessError 'cavern' in runcavern3() at {topdata}."
# stash_data_issue(parser="positions", message=message)
# print(message)
except:
message = f" ! CalledProcessError 'cavern' in runcavern3() at {topdata}."
stash_data_issue(parser="positions", message=message)
print(message)
# if file3d.is_file():
# message = f" ! CalledProcessError. File permissions {file3d.stat().st_mode} on {str(file3d)}"
# stash_data_issue(parser="positions", message=message)
# print(message)
if file3d.is_file():
message = f" ! CalledProcessError. File permissions {file3d.stat().st_mode} on {str(file3d)}"
stash_data_issue(parser="positions", message=message)
print(message)
# if file3d.is_file(): # might be an old one though, if previous step failed
# try:
# sp = subprocess.run(
# [settings.SURVEXPORT, "--pos", f"{file3d}"],
# cwd=settings.SURVEX_DATA,
# capture_output=True,
# check=False,
# text=True,
# )
# if sp.returncode != 0:
# print(
# f" ! Error: survexport creating {topdata}.pos in runcavern3().\n\n"
# + str(sp.stdout)
# + "\n\nreturn code: "
# + str(sp.returncode)
# )
# except:
# message = f" ! CalledProcessError 'survexport' in runcavern3() at {file3d}."
# stash_data_issue(parser="positions", message=message)
# print(message)
# else:
# message = f" ! Failed to find {file3d} so aborting generation of new .pos, using old one if present"
# stash_data_issue(parser="positions", message=message)
# print(message)
if file3d.is_file(): # might be an old one though, if previous step failed
try:
sp = subprocess.run(
[settings.SURVEXPORT, "--pos", f"{file3d}"],
cwd=settings.SURVEX_DATA,
capture_output=True,
check=False,
text=True,
)
if sp.returncode != 0:
print(
f" ! Error: survexport creating {topdata}.pos in runcavern3().\n\n"
+ str(sp.stdout)
+ "\n\nreturn code: "
+ str(sp.returncode)
)
except:
message = f" ! CalledProcessError 'survexport' in runcavern3() at {file3d}."
stash_data_issue(parser="positions", message=message)
print(message)
else:
message = f" ! Failed to find {file3d} so aborting generation of new .pos, using old one if present"
stash_data_issue(parser="positions", message=message)
print(message)
topdata = Path(settings.SURVEX_DATA, settings.SURVEX_TOPNAME)
print(f" - Generating a list of Pos from {topdata}.3d and then loading...")
@@ -316,8 +323,8 @@ def LoadPositions():
d3dpath = topdata.with_suffix(".3d")
pospath = topdata.with_suffix(".pos")
# we do not need to do this as the previous 'survex' step in databaseReset generated the .3d and .pos file
# runcavern3d(f"Regen {settings.DEVSERVER=}") # always regenerate .3d and .pos as the *includes may have changed
runcavern3d(f"Regenerating .pos file {settings.DEVSERVER=}") # always regenerate .3d and .pos as the *includes may have changed
mappoints = {}
found_points = {}