mirror of
https://expo.survex.com/repositories/troggle/.git
synced 2026-01-18 16:42:48 +00:00
nearly complete store logbook entries in separate JSON files
This commit is contained in:
@@ -176,7 +176,7 @@ class PersonLogEntry(TroggleModel):
|
||||
"""
|
||||
|
||||
personexpedition = models.ForeignKey("PersonExpedition", null=True, on_delete=models.CASCADE, db_index=True)
|
||||
time_underground = models.FloatField(help_text="In decimal hours")
|
||||
time_underground = models.FloatField(help_text="In decimal hours")
|
||||
logbook_entry = models.ForeignKey(LogbookEntry, on_delete=models.CASCADE, db_index=True)
|
||||
is_logbook_entry_author = models.BooleanField(default=False)
|
||||
nickname_used = models.CharField(max_length=100,default="") # e.g. "Animal" or "Zonker", as it appears in the original logbook
|
||||
|
||||
@@ -380,7 +380,7 @@ def git_add(filename, cwd, commands=[]):
|
||||
f"CANNOT git ADD on server for this file {filename}.\n\n" + msgdata
|
||||
)
|
||||
|
||||
print(f"git add {filename} in {cwd}")
|
||||
# print(f"git add {filename} in {cwd}")
|
||||
cmd_add = [git, "add", filename]
|
||||
commands.append(cmd_add)
|
||||
cp_add = subprocess.run(cmd_add, cwd=cwd, capture_output=True, text=True)
|
||||
|
||||
@@ -414,35 +414,41 @@ def write_entries(entries, year, editor):
|
||||
"""When doing JSON output of objects which have foreign keys to other objects in the database,
|
||||
we need to use a custom serializer instead of just "json.dump()
|
||||
"""
|
||||
|
||||
author_link = PersonLogEntry.objects.select_related('personexpedition').get(
|
||||
logbook_entry=le,
|
||||
is_logbook_entry_author=True
|
||||
)
|
||||
author = author_link.personexpedition.person
|
||||
author = author_link.personexpedition.person
|
||||
|
||||
author_data = model_to_dict(author, fields=['slug'])
|
||||
author_data["nickname"] = author_link.nickname_used
|
||||
author_data["tu"] = le.time_underground
|
||||
|
||||
participants_links = PersonLogEntry.objects.select_related('personexpedition').filter(
|
||||
logbook_entry=le,
|
||||
is_logbook_entry_author=False
|
||||
)
|
||||
participants = []
|
||||
participants = [author_data] # the author also appears in teh participants list
|
||||
for pl in participants_links:
|
||||
participants.append(pl.personexpedition.person)
|
||||
particpant_dict = model_to_dict(pl.personexpedition.person, fields=['slug'])
|
||||
particpant_dict["nickname"] = pl.nickname_used
|
||||
particpant_dict["tu"] = le.time_underground
|
||||
participants.append(particpant_dict)
|
||||
|
||||
author_data = model_to_dict(author, fields=['id', 'slug', 'nickname_used'])
|
||||
|
||||
participants_data = []
|
||||
for p in participants:
|
||||
participants_data.append(model_to_dict(p, fields=['id', 'slug', 'nickname_used']))
|
||||
expedition_data = model_to_dict(le.expedition, fields=['year', 'name'])
|
||||
|
||||
entrydict = model_to_dict(le, fields=('slug', 'date', 'expedition', 'title', 'cave', 'place', 'other_people', 'time_underground', 'text'))
|
||||
entrydict = model_to_dict(le, fields=('slug', 'date', 'title', 'cave', 'place', 'other_people', 'time_underground', 'text'))
|
||||
entrydict['author'] = author_data
|
||||
entrydict['participants_data'] = participants_data
|
||||
entrydict['trippersons'] = participants
|
||||
entrydict['expedition'] = expedition_data
|
||||
return entrydict
|
||||
|
||||
dirpath = settings.EXPOWEB / "years" / year / LOGBOOK_ENTRIES
|
||||
|
||||
for le in entries:
|
||||
filename = f"{le.slug}-{le.pk:03}.json"
|
||||
# filename = f"{le.slug}-{le.pk:03}.json"
|
||||
filename = f"{le.slug}.json"
|
||||
filepath = dirpath / filename
|
||||
ensure_dir_exists(filepath)
|
||||
|
||||
|
||||
@@ -16,7 +16,7 @@ from parsers.people import GetPersonExpeditionNameLookup, known_foreigner, load_
|
||||
from typing import Any, List, Tuple
|
||||
from troggle.core.models.caves import GetCaveLookup
|
||||
from troggle.core.models.logbooks import LogbookEntry, PersonLogEntry
|
||||
from troggle.core.models.troggle import DataIssue, Expedition
|
||||
from troggle.core.models.troggle import DataIssue, Expedition, Person, PersonExpedition
|
||||
from troggle.core.utils import alphabet_suffix, get_process_memory, unique_slug
|
||||
|
||||
EPOCH = settings.EPOCH
|
||||
@@ -305,7 +305,10 @@ def bulk_store_entries(entries):
|
||||
logbook_objs = []
|
||||
slug_to_entrydata = {}
|
||||
for entry in entries:
|
||||
other_people = ", ".join(entry.guests)
|
||||
if isinstance(entry.guests, list):
|
||||
other_people = ", ".join(entry.guests) # this idiom takes a list of strings and concatenates them into a single string
|
||||
else:
|
||||
other_people = entry.guests
|
||||
# Ensure slug is unique, otherwise add suffix
|
||||
slug = entry.tid
|
||||
orig_slug = slug
|
||||
@@ -682,20 +685,33 @@ def parse_logbook_for_expedition(expedition, blog=False):
|
||||
return logentries
|
||||
|
||||
def parse_from_dict(entrydict):
|
||||
ldate = entrydict["date"]
|
||||
"""We have imported the JSON data, but this still needs turning into links to
|
||||
actual live DJango objects that already exist: Expedition, Persons,
|
||||
PersonExpedition etc.
|
||||
"""
|
||||
ldate = datetime.fromisoformat(entrydict["date"]).date()
|
||||
place = entrydict["place"]
|
||||
tripcave = entrydict["cave"]
|
||||
triptitle = entrydict["title"]
|
||||
tripcontent = entrydict["text"]
|
||||
trippersons = None
|
||||
author = entrydict["author"]["slug"]
|
||||
|
||||
guests = entrydict["other_people"]
|
||||
expedition = entrydict["expedition"]
|
||||
expedition = Expedition.objects.get(name=entrydict["expedition"]["name"])
|
||||
tu = entrydict["time_underground"]
|
||||
tid = entrydict["slug"] # Is this right ? Or is it the end txt?
|
||||
|
||||
tid = entrydict["slug"]
|
||||
|
||||
_author_person = Person.objects.get(slug=entrydict["author"]["slug"])
|
||||
_author_nickname = entrydict["author"]["nickname"]
|
||||
_author_tu = entrydict["author"]["tu"]
|
||||
author = PersonExpedition.objects.get(person=_author_person, expedition=expedition) # not a tuple
|
||||
trippersons = []
|
||||
for tp in entrydict["trippersons"]:
|
||||
_person = Person.objects.get(slug=tp["slug"])
|
||||
_personexpo = PersonExpedition.objects.get(person=_person, expedition=expedition)
|
||||
trippersons.append((_personexpo,tp["nickname"],tp["tu"]))
|
||||
|
||||
logentry = LogbookEntryData(ldate, place, tripcave, triptitle, tripcontent, trippersons, author, guests, expedition, tu, tid)
|
||||
if ldate == "2025-07-08":
|
||||
if entrydict["date"] == "2025-07-08":
|
||||
print(f"PARSED from JSON\n",logentry,"\n")
|
||||
|
||||
return logentry
|
||||
@@ -724,7 +740,7 @@ def parse_logbook_for_expedition(expedition, blog=False):
|
||||
print(f" # WARNING year {year} has JSON-encoded logbook entries. Using these instead of the archive .html file.")
|
||||
logentries = load_from_json()
|
||||
|
||||
logentries = []
|
||||
logentries = [] # but don't actually use these.
|
||||
# check_number_of_entries(logentries)
|
||||
# return logentries
|
||||
|
||||
@@ -828,13 +844,6 @@ def LoadLogbook(year):
|
||||
)
|
||||
# Bulk store all entries at once
|
||||
bulk_store_entries(logentries)
|
||||
#for entry in logentries:
|
||||
#date, place, tripcave, triptitle, text, trippersons, author, guests, expedition, tu, tid = entrytuple
|
||||
#if expo == entry.expedition: # unneeded check, we zeroed it before filling it
|
||||
# print(f" -- {triptitle}")
|
||||
#store_entry_into_database(entry)
|
||||
#else:
|
||||
#print(f" ! unexpected log entry labelled as '{entry.expedition}' {entry.tid}" )
|
||||
expo.save() # to save logbook name property
|
||||
|
||||
def LoadLogbooks():
|
||||
|
||||
Reference in New Issue
Block a user