2
0
mirror of https://expo.survex.com/repositories/troggle/.git synced 2024-11-25 08:41:51 +00:00

bugfix and making more robust

This commit is contained in:
Philip Sargent 2023-11-05 15:20:45 +02:00
parent d524f94c47
commit 2c67351424
3 changed files with 50 additions and 56 deletions

View File

@ -208,7 +208,9 @@ class EntranceLetterForm(ModelForm):
Nb. The relationship between caves and entrances has historically been a many to many relationship.
With entrances gaining new caves and letters when caves are joined.
"""
entranceletter = forms.CharField(required=False, widget=forms.TextInput(attrs={"size": "2"}))
# This only needs to be required=True for the second and subsequent entrances, not the first. Tricky.
entranceletter = forms.CharField(required=True, widget=forms.TextInput(attrs={"size": "2"}))
class Meta:
model = CaveAndEntrance

View File

@ -519,7 +519,7 @@ def edit_entrance(request, path="", caveslug=None, entslug=None):
entranceletter = entletterform.cleaned_data["entranceletter"]
else:
print(f"- POST INVALID {caveslug=} {entslug=} {path=} entletterform invalid.")
return render(request, "errors/badslug.html", {"entletter problem in edit_entrances()"})
return render(request, "errors/badslug.html", {"badslug": "entletter problem in edit_entrances()"})
# if entform.is_valid() and entletterform.is_valid():
if entform.is_valid():
entrance = entform.save(commit=False)

View File

@ -292,7 +292,7 @@ def LoadPositions():
print(message)
topdata = os.fspath(Path(settings.SURVEX_DATA) / settings.SURVEX_TOPNAME)
print(f" - Generating a list of Pos from {topdata}.svx and then loading...")
print(f" - Generating a list of Pos from {topdata}.3d and then loading...")
found = 0
print("\n") # extra line because cavern overwrites the text buffer somehow
@ -341,62 +341,54 @@ def LoadPositions():
stash_data_issue(parser="positions", message=message, url=f"/entrance_data/{pospath}_edit")
print(message)
return
with open(pospath) as posfile:
posfile.readline() # Drop header
posfile = open(pospath)
posfile.readline() # Drop header
sbdict = {}
dups = 0
lineno = 1 # we dropped the header
for line in posfile.readlines():
lineno += 1
r = poslineregex.match(line)
if r:
x, y, z, sbid = r.groups() # renamed id to sbid so as to not confuse with Django internal .id
if sbid in sbdict:
dups += 1
message = f" ! DUPLICATE SurvexBlock identifier in .pos file '{sbid}'\n{sbs[sbid]}\n{lineno} / {line}"
print(message)
stash_data_issue(parser="positions", message=message)
else:
sbdict[sbid] = lineno
sbdict = {}
dups = 0
lineno = 1 # we dropped the header
for line in posfile.readlines():
lineno += 1
r = poslineregex.match(line)
if r:
x, y, z, sbid = r.groups() # renamed id to sbid so as to not confuse with Django internal .id
if sbid in sbdict:
dups += 1
message = f" ! DUPLICATE SurvexBlock identifier in .pos file '{sbid}'\n{sbs[sbid]}\n{lineno} / {line}"
print(message)
stash_data_issue(parser="positions", message=message)
else:
sbdict[sbid] = lineno
for sid in mappoints:
if not sid: # catch None entry
continue
if sbid.endswith(sid) or sbid.endswith(sid.lower()):
blockpath = "." + sbid[: -len(sid)].strip(".") # only the most recent one that is mappoints
if sid in found_points:
found_points[sid] += 1
else:
found_points[sid] = 1
for sid in mappoints:
if not sid: # catch None entry
continue
if sbid.endswith(sid) or sbid.endswith(sid.lower()):
blockpath = "." + sbid[: -len(sid)].strip(".") # only the most recent one that is mappoints
if sid in found_points:
found_points[sid] += 1
else:
found_points[sid] = 1
try:
ss = SurvexStation(name=sbid)
ss.x = float(x)
ss.y = float(y)
ss.z = float(z)
ss.entrance = mappoints[sid]
ss.save()
found += 1
except:
message = f" ! {lineno} FAIL to create SurvexStation Entrance point {blockpath} {sid}"
print(message)
stash_data_issue(parser="positions", message=message)
store_data_issues()
raise
try:
ss = SurvexStation(name=sbid)
ss.x = float(x)
ss.y = float(y)
ss.z = float(z)
ss.entrance = mappoints[sid]
ss.save()
found += 1
except:
message = f" ! {lineno} FAIL to create SurvexStation Entrance point {blockpath} {sid}"
print(message)
stash_data_issue(parser="positions", message=message)
store_data_issues()
raise
validate_entrance_stations() # do not need to use db here really
positions_filename = Path(pospath).name
print(f" - {found} distinct SurvexStation entrance stations identified in {lineno:,} lines in {positions_filename}.")
if dups > 0:
print(f" - {dups} Duplicated SurvexStation entrances found")
# for p in mappoints:
# if p not in found_points:
# print(f"Valid point {p} NOT found in {positions_filename}")
# print(f" - {len(mappoints)} mappoints, {len(found_points)} found_points")
# for sid in found_points:
# if found_points[sid] > 1:
# print(f" - {sid} - {found_points[sid]}")
store_data_issues()