mirror of
https://expo.survex.com/repositories/troggle/.git
synced 2024-11-25 08:41:51 +00:00
remove cache and setdatesfromlogbook
This commit is contained in:
parent
4260b0f092
commit
bcb61f9cd9
@ -214,6 +214,7 @@ def logbookentry(request, date, slug):
|
||||
svxothers = None
|
||||
|
||||
this_logbookentry=this_logbookentry[0]
|
||||
# This is the only page that uses presontrip_next and persontrip_prev
|
||||
return render(request, 'logbookentry.html',
|
||||
{'logbookentry': this_logbookentry, 'trips': trips, 'svxothers': svxothers, 'wallets': wallets})
|
||||
else:
|
||||
|
@ -24,8 +24,6 @@ Parses and imports logbooks in all their wonderful confusion
|
||||
# it can be checked up later from the hard-copy if necessary; or it's not possible to determin (name, trip place, etc)
|
||||
'''
|
||||
todo='''
|
||||
- Use the .shelve.db cache for all logbooks, not just individually
|
||||
|
||||
- refactor everything with some urgency, esp. LoadLogbookForExpedition()
|
||||
|
||||
- profile the code to find bad repetitive things, of which there are many.
|
||||
@ -46,9 +44,6 @@ todo='''
|
||||
|
||||
- this is a slow and uncertain function: cave = getCaveByReference(caveRef)
|
||||
|
||||
|
||||
- DB lock currently prevents multiple threads for loading logbooks. But asyncio might work..?
|
||||
|
||||
'''
|
||||
|
||||
logentries = [] # the entire logbook for one year is a single object: a list of entries
|
||||
@ -120,7 +115,7 @@ def EnterLogIntoDbase(date, place, title, text, trippeople, expedition, logtime_
|
||||
Does NOT save the expeditionday_id - all NULLs. why? Because we are deprecating expeditionday !
|
||||
|
||||
troggle.log shows that we are creating lots of duplicates, which is no no problem with SQL as they just overwrite
|
||||
but we are saving the same thing too many times.. Also seen in the ObjStore mimic
|
||||
but we are saving the same thing too many times..
|
||||
"""
|
||||
try:
|
||||
trippersons, author = GetTripPersons(trippeople, expedition, logtime_underground, tid=tid)
|
||||
@ -471,7 +466,15 @@ def Parseloghtml03(year, expedition, txt):
|
||||
logentries.append(entrytuple)
|
||||
|
||||
def SetDatesFromLogbookEntries(expedition):
|
||||
"""
|
||||
"""Sets the next and previous entry for a persontrip by setting
|
||||
persontrip_prev
|
||||
persontrip_next
|
||||
for each persontrip instance.
|
||||
|
||||
This is ONLY needed when a logbook entry is displayed. So could be called lazily
|
||||
only when one of these entries is requested.
|
||||
|
||||
It does NOT do what the docstring says here:
|
||||
Sets the date_from and date_to field for an expedition based on persontrips.
|
||||
Then sets the expedition date_from and date_to based on the personexpeditions.
|
||||
"""
|
||||
@ -561,7 +564,7 @@ def LoadLogbookForExpedition(expedition):
|
||||
|
||||
expedition.save()
|
||||
logbook_cached = False
|
||||
if True: # enable cache system
|
||||
if False: # enable cache system
|
||||
now = time.time()
|
||||
bad_cache = False # temporarily disable reading the cache - buggy
|
||||
try:
|
||||
@ -603,46 +606,59 @@ def LoadLogbookForExpedition(expedition):
|
||||
except (IOError):
|
||||
logbook_parseable = False
|
||||
print(" ! Couldn't open logbook as UTF-8 " + logbookpath)
|
||||
else:
|
||||
try:
|
||||
file_in = open(logbookpath,'rb')
|
||||
txt = file_in.read().decode("utf-8")
|
||||
file_in.close()
|
||||
logbook_parseable = True
|
||||
except (IOError):
|
||||
logbook_parseable = False
|
||||
print(" ! Couldn't open logbook as UTF-8 " + logbookpath)
|
||||
except:
|
||||
logbook_parseable = False
|
||||
print(" ! Very Bad Error opening " + logbookpath)
|
||||
|
||||
if logbook_parseable:
|
||||
parser = globals()[parsefunc]
|
||||
print(f' - Using parser {parsefunc}')
|
||||
print(f' - {year} parsing with {parsefunc}')
|
||||
parser(year, expedition, txt) # this launches the right parser for this year
|
||||
|
||||
print(" - Setting dates from logbook entries")
|
||||
SetDatesFromLogbookEntries(expedition)
|
||||
if len(logentries) >0:
|
||||
print(" - Cacheing " , len(logentries), " log entries")
|
||||
lbsize = logbookpath.stat().st_size
|
||||
with open(cache_filename, "wb") as fc: # we much check that permission are g+w ! or expo can't delete the cache
|
||||
logbk=(expedition,lbsize,len(logentries),logentries)
|
||||
pickle.dump(logbk, fc, protocol=4)
|
||||
else:
|
||||
print(" ! NO TRIP entries found in logbook, check the syntax.")
|
||||
# SetDatesFromLogbookEntries(expedition)
|
||||
# if len(logentries) >0:
|
||||
# print(" - Cacheing " , len(logentries), " log entries")
|
||||
# lbsize = logbookpath.stat().st_size
|
||||
# with open(cache_filename, "wb") as fc: # we much check that permission are g+w ! or expo can't delete the cache
|
||||
# logbk=(expedition,lbsize,len(logentries),logentries)
|
||||
# pickle.dump(logbk, fc, protocol=4)
|
||||
# else:
|
||||
# print(" ! NO TRIP entries found in logbook, check the syntax.")
|
||||
|
||||
i=0
|
||||
for entrytuple in logentries:
|
||||
# date, tripcave, triptitle, text, trippeople, expedition, logtime_underground, tripid1 = entrytuple
|
||||
try:
|
||||
date, tripcave, triptitle, text, trippeople, expedition, logtime_underground, tripid1 = entrytuple
|
||||
except ValueError: # cope with removal of entry_type but still in cache files. Remove in Sept. 2022.
|
||||
except ValueError: # cope with removal of entry_type but still in cache files. Remove in Dec. 2022.
|
||||
date, tripcave, triptitle, text, trippeople, expedition, logtime_underground, entry_type, tripid1 = entrytuple
|
||||
EnterLogIntoDbase(date, tripcave, triptitle, text, trippeople, expedition, 0,
|
||||
tripid1)
|
||||
i +=1
|
||||
SetDatesFromLogbookEntries(expedition)
|
||||
# SetDatesFromLogbookEntries(expedition)
|
||||
|
||||
if len(logentries) == expect:
|
||||
# print(f"OK {year} {len(logentries):5d} is {expect}\n")
|
||||
pass
|
||||
else:
|
||||
print(f"BAD {year} {len(logentries):5d} is not {expect}\n")
|
||||
print(f"Mismatch {year} {len(logentries):5d} is not {expect}\n")
|
||||
|
||||
return len(logentries)
|
||||
|
||||
def LoadLogbooks():
|
||||
""" This is the master function for parsing all logbooks into the Troggle database.
|
||||
Parser settings appropriate for each year are set in settings.py LOGBOOK_PARSER_SETTINGS.
|
||||
This should be rewritten to use coroutines to load all logbooks from disc in parallel.
|
||||
This should be rewritten to use coroutines to load all logbooks from disc in parallel,
|
||||
but must be serialised to write to database as sqlite is single-user.
|
||||
"""
|
||||
global logdataissues
|
||||
global entries
|
||||
|
Loading…
Reference in New Issue
Block a user