2
0
mirror of https://expo.survex.com/repositories/troggle/.git synced 2026-02-08 11:28:23 +00:00

SHould be nearly working, but crashes on saving edited entry

This commit is contained in:
2025-11-26 01:22:38 +02:00
parent 0e6a3e457d
commit ced9a7b024
13 changed files with 187 additions and 98 deletions

View File

@@ -43,7 +43,7 @@ def import_logbooks():
with transaction.atomic():
troggle.parsers.logbooks.LoadLogbooks()
def import_logbook(year=2024):
def import_logbook(year=2025):
print(f"-- Importing Logbook {year}")
with transaction.atomic():
troggle.parsers.logbooks.LoadLogbook(year)

View File

@@ -14,7 +14,7 @@ from django.template.defaultfilters import slugify
from parsers.people import GetPersonExpeditionNameLookup, known_foreigner, load_people_expos
from typing import Any, List, Tuple
from troggle.core.models.caves import GetCaveLookup
from troggle.core.models.caves import GetCaveLookup, Cave
from troggle.core.models.logbooks import LogbookEntry, PersonLogEntry
from troggle.core.models.troggle import DataIssue, Expedition, Person, PersonExpedition
from troggle.core.utils import alphabet_suffix, get_process_memory, unique_slug
@@ -248,7 +248,7 @@ def tidy_trip_persons(trippeople, title, expedition, logtime_underground, tid):
return trippersons, author, guests
def tidy_trip_cave(place):
# GetCaveLookup() need to work better. None of this data is *used* though?
# GetCaveLookup() need to work better. Used in parsing logbooks: place=>cave
# 'tripcave' is converted to a string doing this, which renders as the cave slug.
lplace = place.lower()
@@ -526,8 +526,6 @@ def parser_html(year, expedition, txt, seq=""):
entry = LogbookEntryData(ldate, place, tripcave, triptitle, tripcontent, trippersons, author, guests, expedition, tu, tid)
logentries.append(entry)
if str(ldate) == "2025-07-08":
print(f"PARSED from html\n",entry,"\n")
return logentries
@@ -629,6 +627,7 @@ def parser_blog(year, expedition, txt, sq=""):
logtime_underground = 0
trippersons, author, guests = tidy_trip_persons(trippeople, triptitle, expedition, logtime_underground, tid)
# print(f" - author: {author}")
tripcave = tidy_trip_cave(place)
tripcontent = tidy_trip_image_urls(tripcontent, year)
tid = tidy_tid(tid, triptitle, datestamp)
@@ -691,7 +690,7 @@ def parse_logbook_for_expedition(expedition, blog=False):
"""
ldate = datetime.fromisoformat(entrydict["date"]).date()
place = entrydict["place"]
tripcave = entrydict["cave"]
tripcave=None
triptitle = entrydict["title"]
tripcontent = entrydict["text"]
@@ -699,20 +698,31 @@ def parse_logbook_for_expedition(expedition, blog=False):
expedition = Expedition.objects.get(name=entrydict["expedition"]["name"])
tu = entrydict["time_underground"]
tid = entrydict["slug"]
_author_person = Person.objects.get(slug=entrydict["author"]["slug"])
_author_nickname = entrydict["author"]["nickname"]
_author_tu = entrydict["author"]["tu"]
# author does not have tu or nickname, that info is on the same person in the participants list
author = PersonExpedition.objects.get(person=_author_person, expedition=expedition) # not a tuple
trippersons = []
for tp in entrydict["trippersons"]:
_person = Person.objects.get(slug=tp["slug"])
_personexpo = PersonExpedition.objects.get(person=_person, expedition=expedition)
# if "nickname" not in tp:
# tp["nickname"] = ""
# if "tu" not in tp:
# tp["tu"] = ""
trippersons.append((_personexpo,tp["nickname"],tp["tu"]))
tripcave = tidy_trip_cave(place)
if "cave" in entrydict:
_cave = Cave.objects.get(areacode=entrydict["cave"]["areacode"],
unofficial_number=entrydict["cave"]["unofficial_number"],
kataster_number=entrydict["cave"]["kataster_number"])
if tripcave != _cave:
message = f"! MISMATCH between place and Cave: {tripcave=} {_cave=}"
print(message)
DataIssue.objects.update_or_create(parser="logbooks", message=message, url=jsonurl)
logentry = LogbookEntryData(ldate, place, tripcave, triptitle, tripcontent, trippersons, author, guests, expedition, tu, tid)
if entrydict["date"] == "2025-07-08":
print(f"PARSED from JSON\n",logentry,"\n")
return logentry
@@ -734,15 +744,9 @@ def parse_logbook_for_expedition(expedition, blog=False):
expect = ENTRIES[year]
# print(" - Logbook for: " + year)
json_entries_dir = settings.EXPOWEB / "years" / year / "log_entries"
json_entries_dir = settings.EXPOWEB / "years" / year / settings.JSON_LOG_ENTRIES
if json_entries_dir.is_dir():
print(f" # WARNING year {year} has JSON-encoded logbook entries. Using these instead of the archive .html file.")
logentries = load_from_json()
logentries = [] # but don't actually use these.
# check_number_of_entries(logentries)
# return logentries
if year in LOGBOOK_PARSER_SETTINGS:
@@ -764,6 +768,15 @@ def parse_logbook_for_expedition(expedition, blog=False):
yearfile, parsefunc = BLOG_PARSER_SETTINGS[year]
print(f" - BLOG file {yearfile} using parser {parsefunc}")
else:
if json_entries_dir.is_dir():
print(f" # WARNING year {year} has JSON-encoded logbook entries. Using these instead of the archive .html file.")
logentries = load_from_json()
check_number_of_entries(logentries)
# we know this is being called for a non-blog from the blog=False setting
# so we can just skip the rest and return.
return logentries
lb = Path(expologbase, year, logbookpath.stem + logbookpath.suffix)
if not (lb.is_file()):
message = f" ! Logbook file does not exist (yet): '{lb}'"