diff --git a/parsers/logbooks.py b/parsers/logbooks.py
index ee76465..6be21fa 100644
--- a/parsers/logbooks.py
+++ b/parsers/logbooks.py
@@ -353,12 +353,19 @@ def Parseloghtml01(year, expedition, txt):
try:
tripdate, triptitle, trippeople = tripheader.split("|")
except:
- message = f" ! - Fail to split out date|title|people. trip:<{tid}> '{tripheader.split('|')}'"
+ message = f" ! - Fail 3 to split out date|title|people. trip:<{tid}> '{tripheader.split('|')}'"
DataIssue.objects.create(parser='logbooks', message=message)
logdataissues[tid]=message
print(message)
- tripdate, triptitle = tripheader.split("|")
- trippeople = "anon"
+ try:
+ tripdate, triptitle = tripheader.split("|")
+ trippeople = "GUESS ANON"
+ except:
+ message = f" ! - Skipping logentry {year} Fail 2 to split out date|title (anon). trip:<{tid}> '{tripheader.split('|')}' CRASHES MySQL !"
+ DataIssue.objects.create(parser='logbooks', message=message)
+ logdataissues[tid]=message
+ print(message)
+ break
#print(f" #3 - tid: {tid}")
ldate = ParseDate(tripdate.strip(), year)
#print(f" # - tid: {tid} <{tripdate}> <{triptitle}> <{trippeople}>")
@@ -639,7 +646,7 @@ def LoadLogbooks():
if len(expos) <= 1:
print(" ! No expeditions found. Load 'people' first.\n")
nologbook = ["1976", "1977", "1978", "1979", "1980", "1981",
- "1987", "1988", "1989", # breaks mysql with db constraint fail - debug locally first
+ # "1987", "1988", "1989", # breaks mysql with db constraint fail - debug locally first
"1986", "2020",] #no expo
entries = {"2021": 0, "2019": 20, "2018": 74, "2017": 60, "2016": 81, "2015": 79,
"2014": 65, "2013": 51, "2012": 75, "2011": 68, "2010": 22, "2009": 52,
diff --git a/templates/base.html b/templates/base.html
index 3e73bb5..c82db6b 100644
--- a/templates/base.html
+++ b/templates/base.html
@@ -41,6 +41,7 @@
Data Issues |
tasks to do |
caves |
+ ents |
expoers |
survey lengths |
statistics |
diff --git a/urls.py b/urls.py
index d920cef..058fe6e 100644
--- a/urls.py
+++ b/urls.py
@@ -39,7 +39,7 @@ which is vital to writing code for the webapp. So the URL dispatch is declarativ
The API urls return TSV or JSON and are new in July 2020.
"""
-todo = '''Replace most re_path() with modern and simpler path()
+todo = '''Replace most re_path() with modern and simpler path(). Test VERY CAREFULLY for each chnage. It is fragile.
'''
# Many of these patterns do not work because troggle spent many years broken and we have
@@ -115,7 +115,6 @@ trogglepatterns = [
re_path(r'^newcave/$', caves.edit_cave, name="newcave"),
re_path(r'^cave/3d/(?P[^/]+)$', caves.cave3d, name="cave3d"),
- re_path(r'^cave/entrance/([^/]+)/?$', caves.caveEntrance),
re_path(r'^cave/description/([^/]+)/?$', caves.caveDescription),
re_path(r'^cave/(?P[^/]+)/?$', caves.cave, name="cave"),
re_path(r'^cave/(?P[^/]+)/?(?P[^/])$', ent), # view_caves.ent
@@ -124,15 +123,16 @@ trogglepatterns = [
# Note that urls eg '1623/161/l/rl89a.htm' are handled by cavepage which redirects them to 'expopage'
# Entrances
- re_path(r'^entrance/(?P[^/]+)/(?P[^/]+)/edit/', caves.edit_entrance, name = "editentrance"),
- re_path(r'^entrance/new/(?P[^/]+)$', caves.edit_entrance, name = "newentrance"),
+ re_path(r'^cave/entrance/([^/]+)/?$', caves.caveEntrance), # lists all entrances
+ re_path(r'^entrance/(?P[^/]+)/(?P[^/]+)/edit/', caves.edit_entrance, name = "editentrance"), #edit existing entrance
+ re_path(r'^entrance/new/(?P[^/]+)$', caves.edit_entrance, name = "newentrance"), # new entrance for a cave
# System admin and monitoring
path('statistics', statistics.stats, name="stats"),
path('stats', statistics.stats, name="stats"),
path('pathsreport', statistics.pathsreport, name="pathsreport"),
path('dataissues', statistics.dataissues, name="dataissues"),
- path('eastings', statistics.eastings, name="eastings"),
+ path('eastings', statistics.eastings, name="eastings"),
path('troggle', frontpage, name="frontpage"), # control panel. Shows recent actions.
path('todo/', todos, name="todos"),