2
0
mirror of https://expo.survex.com/repositories/troggle/.git synced 2026-02-08 11:28:23 +00:00

working, moved stuff around a bit, UUID

This commit is contained in:
2026-01-29 11:35:54 +00:00
parent 3783b49162
commit 8455f39809
2 changed files with 208 additions and 113 deletions

View File

@@ -1,3 +1,4 @@
import uuid
import math
import os
import re
@@ -218,6 +219,14 @@ class SurvexBlock(models.Model):
Blocks can span several *included survexfile though.
"""
# This ID is generated as soon as you call SurvexBlock((). So we can use it while assembling the data
# into the survexblock without having to keep doing a database transaction
_blockid = models.UUIDField(
primary_key=False,
default=uuid.uuid4,
editable=False
)
objects = SurvexBlockLookUpManager() # overwrites SurvexBlock.objects and enables lookup()
name = models.CharField(blank=True, max_length=100)
title = models.CharField(blank=True, max_length=200)

View File

@@ -80,9 +80,9 @@ dataissues = set()
# Caches for ORM minimization
survexblock_cache = None # {scanswallet_id: [SurvexBlock, ...]}
personrole_cache = None # {survexblock_id: [SurvexPersonRole, ...]}
personrole_cache = None # {survexblock._blockid: [SurvexPersonRole, ...]}
wallet_cache = None # {walletname: [Wallet, ...]}
trip_people_cache = {} # indexed by survexblock, so never needs cleaning out
trip_people_cache = {} # indexed by survexblock._blockid, so never needs cleaning out
class SurvexLeg:
"""No longer a models.Model subclass, so no longer a database table"""
@@ -152,6 +152,10 @@ def store_data_issues():
parser, message, url, sb = issue
if url is None:
if sb is not None:
try:
url = get_offending_filename(sb.survexfile)
except Exception as e:
print(f" ! store_data_issues() {e} '{sb=}' -- '{url=}'", file=sys.stderr)
url = get_offending_filename(sb)
di_list.append(DataIssue(parser=parser, message=message, url=url))
# Now commit to db
@@ -166,20 +170,28 @@ def get_offending_filename(path):
def get_team_on_trip(survexblock):
"""Uses a cache to avoid a database query if it doesn't need to. Only used for complete team."""
"""Uses a cache to avoid a database query if it doesn't need to. Only used for complete team.
Should personrole_cache be a set() ?
This all seems a bit baroque, can we refactor it please?
"""
global trip_people_cache, personrole_cache
if personrole_cache is None:
# Build cache: {survexblock_id: [SurvexPersonRole, ...]}
# Build cache: {survexblock._blockid: [SurvexPersonRole, ...]}
personrole_cache = {}
for pr in SurvexPersonRole.objects.all().select_related("person", "personexpedition"):
if pr.survexblock_id not in personrole_cache:
personrole_cache[pr.survexblock_id] = []
personrole_cache[pr.survexblock_id].append(pr)
if survexblock in trip_people_cache:
if len(trip_people_cache[survexblock]) > 0:
return trip_people_cache[survexblock]
qpeople = personrole_cache.get(survexblock.id, [])
trip_people_cache[survexblock] = qpeople
for pr in SurvexPersonRole.objects.all().select_related("person", "personexpedition"): # WASTEFUL ! Optimise this
if pr.survexblock._blockid not in personrole_cache:
personrole_cache[pr.survexblock._blockid] = []
personrole_cache[pr.survexblock._blockid].append(pr)
# print(f" PR {pr} {survexblock._blockid }", file=sys.stderr)
if survexblock._blockid in trip_people_cache:
if len(trip_people_cache[survexblock._blockid]) > 0:
return trip_people_cache[survexblock._blockid]
qpeople = personrole_cache.get(survexblock._blockid, [])
trip_people_cache[survexblock._blockid] = qpeople
return qpeople
def get_people_on_trip(survexblock):
@@ -196,14 +208,14 @@ def get_people_on_trip(survexblock):
# THIS SHOULD NOT BE GLOBAL ! Should be per instance of file loader, even though they are globally unique
trip_person_record = {} # a dict indexed by tuples (survexblock, personexpedition) = 1
trip_team_cache = {} # a dict of lists indexed by survexblock
trip_team_cache = {} # a dict of lists indexed by survexblock._blockid
def put_person_on_trip(survexblock, personexpedition, tm):
"""Uses a cache to avoid a database query if it doesn't need to.
Only used for a single person"""
global trip_person_record
global trip_team_cache
if (survexblock, personexpedition) in trip_person_record:
if (survexblock._blockid, personexpedition) in trip_person_record:
return True
try:
@@ -220,54 +232,43 @@ def put_person_on_trip(survexblock, personexpedition, tm):
parser="survex", message=message, url=None, sb=(survexblock.survexfile.path)
)
if survexblock not in trip_team_cache:
trip_team_cache[survexblock] = []
trip_team_cache[survexblock].append(personrole)
print(f"-- trip_team_cache {survexblock}, {trip_team_cache[survexblock]}, {personrole}")
if survexblock._blockid not in trip_team_cache:
trip_team_cache[survexblock._blockid] = []
trip_team_cache[survexblock._blockid].append(personrole)
# print(f"-- trip_team_cache\n -- {survexblock=} - {survexblock._blockid}\n -- {trip_team_cache[survexblock._blockid]}\n -- {personrole}", file=sys.stderr)
trip_person_record[(survexblock, personexpedition)] = 1
trip_person_record[(survexblock._blockid, personexpedition)] = 1
return False
def confirm_team_on_trip(survexblock):
global trip_team_cache
def hack_save(survexblock):
# #### Horrible hack to be properly written as a cache
sb_list =[]
sb = survexblock
while sb.parent and sb != sb.parent:
sb_list.append((sb._blockid, sb))
sb = sb.parent
# print(sb_list, file=sys.stderr)
if survexblock not in trip_team_cache:
return
# Now commit to db
SurvexPersonRole.objects.bulk_create(trip_team_cache[survexblock])
trip_team_cache[survexblock] = [] # in database now, so empty cache
sb_list.reverse()
for sbo in sb_list:
id, sb = sbo
sb.save()
# #### Horrible hack to be properly written as a cache
def check_team_cache(label=None):
global trip_team_cache
message = f"! check_team_cache() called.. "
print(message)
print(message, file=sys.stderr)
for block in trip_team_cache:
message = f"! *team CACHEFAIL, trip_team_cache {block.survexfile.path} ({block}). label:{label}"
print(message)
print(message, file=sys.stderr)
person_pending_cache = {} # indexed per survexblock, so robust wrt PUSH/POP begin/end
def add_to_pending(survexblock, tm):
"""Collects team names before we have a date so cannot validate against
expo attendance yet"""
global person_pending_cache
def blockid_raw(survexfile, name):
if name and survexfile:
return f"{survexfile}-{name}"
return False
if survexblock not in person_pending_cache:
person_pending_cache[survexblock] = set()
person_pending_cache[survexblock].add(tm)
print(f"-- person_pending_cache {survexblock}, {person_pending_cache[survexblock]}, {tm}")
def blockid(survexblock):
"""When parsing all the survex file we need to maintain a number of caches as we want to
hit the database with updates only when we have collected all the data.
def get_team_pending(survexblock):
"""A set of *team names added at the end of the survex block
But since we have not saved to the database, we don't have a unique survexblock.id that we can
use. So we have to roll our own.
"""
global person_pending_cache
if survexblock in person_pending_cache:
teamnames = person_pending_cache[survexblock] # a set of names
person_pending_cache[survexblock] = set() #re zero the cache
return teamnames
return
return survexblock._blockid # new UUID
class LoadingSurvex:
"""A 'survex block' is a *begin...*end set of cave data.
@@ -489,6 +490,8 @@ class LoadingSurvex:
inheritdate = None
pending = []
adhocload = False
person_pending_cache = {} # indexed per survexblock UUID, so robust wrt PUSH/POP begin/end
def __init__(self):
self.caveslist = GetCaveLookup()
@@ -517,8 +520,78 @@ class LoadingSurvex:
parser="survex", message=message, url=None, sb=(survexblock.survexfile.path)
)
def confirm_team_on_trip(self, survexblock):
"""This is only called when processing a *end statement
"""
global trip_team_cache
if survexblock._blockid not in trip_team_cache:
return
#### STRIP THIS OUT and cache the SurvexPersonRole for the end of the survex block !
hack_save(survexblock)
# Now commit to db
pr_list = trip_team_cache[survexblock._blockid]
# print(f" PR_LIST {pr_list} {survexblock._blockid }", file=sys.stderr)
valid_list = []
for pr in pr_list:
try:
# print(f"___ {pr.survexblock=} {pr.survexblock.id=} {pr.person=} {pr.personexpedition=}", file=sys.stderr)
pr.full_clean()
valid_list.append(pr)
except ValidationError as e:
print(f" ! PR is invalid: {e} {survexblock} {pr}", file=sys.stderr)
print(f" ! PR is invalid: {e} {survexblock} {pr}")
SurvexPersonRole.objects.bulk_create(valid_list)
# for pr in pr_list:
# print(f"+++ {pr.survexblock=} {pr.survexblock.id=} {pr.person=} {pr.personexpedition=}", file=sys.stderr)
# SurvexPersonRole.objects.create(pr).save()
# Not working, so do not clear cache!
trip_team_cache[survexblock] = [] # in database now, so empty cache
def check_team_cache(self, label=None):
global trip_team_cache
message = f"! check_team_cache() called.. "
print(message)
print(message, file=sys.stderr)
for block in trip_team_cache:
message = f"! *team CACHEFAIL, trip_team_cache {block.survexfile.path} ({block}). label:{label}"
print(message)
print(message, file=sys.stderr)
def add_to_pending(self, survexblock, tm):
"""Collects team names before we have a date so cannot validate against
expo attendance yet"""
global person_pending_cache
if survexblock._blockid not in self.person_pending_cache:
self.person_pending_cache[survexblock._blockid] = set()
self.person_pending_cache[survexblock._blockid].add(tm)
print(f"-- person_pending_cache {survexblock}, {self.person_pending_cache[survexblock._blockid]}, {tm}")
def get_team_pending(self, survexblock):
"""A set of *team names added at the end of the survex block
"""
if blockid in self.person_pending_cache:
teamnames = person_pending_cache[blockid] # a set of names
person_pending_cache[blockid] = set() #re zero the cache
return teamnames
return
def check_cache_clean(self):
for sbid in self.person_pending_cache:
if len(person_pending_cache[sbid]) > 0:
print(f" ")
message = f" ! PENDING team list not emptied {sbid} {len(person_pending_cache[sbid])} people: {person_pending_cache[sbid]}"
stash_data_issue(parser="survex", message=message, url=None) #, sb=(sbid)
print(message)
def get_team_inherited(self, survexblock): # survexblock only used for debug mesgs
"""See get_team_pending(survexblock) which gets called at the same time,
"""See get_team_pending(survexblock._blockid) which gets called at the same time,
when we see a *date line"""
global person_pending_cache
@@ -641,6 +714,10 @@ class LoadingSurvex:
put_person_on_trip(survexblock, personexpedition, tm)
return
def cache_survexblock(self, survexblock):
# appends to list, creates an empty list to append to if it doen't exist yet
self._pending_block_saves.setdefault(survexblock._blockid, []).append(survexblock)
def LoadSurvexTeam(self, survexblock, line):
"""Interpeting the *team fields has been updated to current 2025 survex standard,
*team Insts Anthony Day - this is how most of our files used to specify the team member
@@ -650,6 +727,9 @@ class LoadingSurvex:
personrole is used to record that a person was on a survex trip, NOT the role they played.
(NB PersonLogEntry is a logbook thing, not a survex thing. )
DONT do this here. Just collect the members, but wait untilt he *end before we use
the list to create anything in the db
"""
def record_team_member(tm, survexblock):
@@ -688,13 +768,14 @@ class LoadingSurvex:
parser="survex", message=message, url=None, sb=(survexblock.survexfile.path)
)
else:
add_to_pending(survexblock, tm)
self.add_to_pending(survexblock, tm)
# don't know the date yet, so cannot query the table about validity.
# assume the person is valid. It will get picked up with the *date appears
# There are hundreds of these..
message = (
f"- Team before Date: {line} ({survexblock}) {survexblock.survexfile.path}"
)
# teamfix = r"(?i)(.*?)\s+" + roles + r"?(?:es|s)?$" -- (.*?) means a non-greedy capture
if fixstyle := self.rx_teamfix.match(line): # matches the optional role at the the end of the string WALRUS
tmlist = fixstyle.group(1).strip('\"') # remove quotes, if present
@@ -747,7 +828,11 @@ class LoadingSurvex:
def LoadSurvexFix(self, survexblock, line):
"""*fix is a station geolocation, units depend on a previous *cs setting
NOTE 'line' is not the full line, it is 'arg' and the comments have been stripped !
SO we have to recognise the '*fix' too
So we have to recognise the '*fix' too
Note that the cache self.fixes would simply use survexblock.id as a key,
but at this point int he parsing we have not yet saved survexblock to the db so
survexblock.id is not available.
"""
# *fix|36|reference|36359.40|82216.08|2000.00\n
# *fix|36|36359.40|82216.08|2000.00\n
@@ -762,10 +847,14 @@ class LoadingSurvex:
# \s+([\d\.]*) # Capture group 4: yet another number (digits and periods)
# \s*;? # Optional whitespace and optional semicolon
# (.*)$ # Capture group 5: remainder of the line (any characters), a comment
fixid = blockid(survexblock)
rx_fixline = re.compile(r"(?i)^\s*[*]fix\s+([\w\d_\.\-]+)\s+(?:reference)?\s*([\d\.]*)\s+([\d\.]*)\s+([\d\.]*)\s*;?(.*)$")
line = line.replace("\n","")
#fixline = self.rx_fixline.match(line)
fixline = rx_fixline.match(line)
if not fixline:
display = line.replace(" ","|")
@@ -777,8 +866,8 @@ class LoadingSurvex:
#print(fixline.group(1), fixline.group(5))
#print(f"'{line}'")
name = fixdata[0]
if (survexblock, name) in self.fixes:
message = f"! Duplicate *FIX: id '{line}' ({survexblock}) {survexblock.survexfile.path}"
if fixid in self.fixes:
message = f"! Duplicate *FIX: id '{line}' '{fixid}' ({survexblock}) {survexblock.survexfile.path} "
print(self.insp + message)
stash_data_issue(parser="survex", message=message)
@@ -788,7 +877,7 @@ class LoadingSurvex:
try:
#_, _, alt, *rest = (fixdata + [None]*5)[:5]
name, _, _, alt, comment = (list(fixdata) + [None]*5)[:5]
fixid = str(survexblock.id)+ ":"+ name
self.fixes[fixid] = (survexblock, name, alt, comment)
message = f"{name}, {fixdata=}, last:{fixline.groups()[-1]}"
except Exception as e:
@@ -928,7 +1017,7 @@ class LoadingSurvex:
print(self.insp + message)
stash_data_issue(parser='survex', message=message, url=None, sb=(survexblock.survexfile.path))
if teamnames := get_team_pending(survexblock):
if teamnames := self.get_team_pending(survexblock._blockid):
for tm in teamnames:
if known_foreigner(tm):
message = f"- *team {expo.year} '{tm}' known foreigner *date (misordered) {survexblock.survexfile.path} ({survexblock}) in '{line}'"
@@ -1030,7 +1119,7 @@ class LoadingSurvex:
if survexblock.date:
# do not actually need a distict variable 'currentdate' but it makes the code clearer
self.currentdate = survexblock.date
survexblock.save()
# survexblock.save()
def LoadSurvexLeg(self, survexblock, sline, comment, svxline):
"""This reads compass, clino and tape data but only keeps the tape lengths,
@@ -1241,7 +1330,7 @@ class LoadingSurvex:
Currently this just sets a flag that the survex block is not CUCC
"""
survexblock.foreigners = True
survexblock.save(update_fields=["foreigners"])
# survexblock.save(update_fields=["foreigners"])
def LoadSurvexRef(self, survexblock, args):
"""Interpret the *ref record, and all the many variants
@@ -1272,7 +1361,7 @@ class LoadingSurvex:
if reftxt:
# only store it if not an empty string
survexblock.ref_text = reftxt[:399] # truncate or MariaDB crashes on databaseReset !
survexblock.save()
# survexblock.save()
return
if len(args) < 4:
@@ -1339,7 +1428,7 @@ class LoadingSurvex:
if manywallets[0]:
survexblock.scanswallet = manywallets[0] # this is a ForeignKey field
# Only save if changed
survexblock.save(update_fields=["scanswallet"])
# survexblock.save(update_fields=["scanswallet"])
# This is where we should check that the wallet JSON contains a link to the survexfile
# and that the JSON date and walletdate are set correctly to the survexblock date.
set_walletdate(survexblock.scanswallet)
@@ -1715,7 +1804,9 @@ class LoadingSurvex:
print(f">> why is survexblock not set ?! in LoadSurvexQM()/n {survexblock.survexfile.path}")
expoyear = settings.EPOCH.year # 1970
### HORRIBLE HACK, replace with cache
hack_save(survexblock)
### HORRIBLE HACK, replace with cache
try:
qm = QM.objects.create(
@@ -1827,7 +1918,7 @@ class LoadingSurvex:
Loads the begin/end blocks using a stack for labels.
Uses the python generator idiom to avoid loading the whole file (21MB) into memory.
"""
blkid = None
blk_name = None
pathlist = None
args = None
oldflags = None
@@ -1837,7 +1928,7 @@ class LoadingSurvex:
nlegstotal = 0
self.relativefilename = path
self._pending_block_saves = set() # Cache for survex blocks to save at the end
self._pending_block_saves = {} # Cache for survex blocks to save at the end
#self.IdentifyCave(path, svxid, depth) # this will produce null for survex files which are geographic collections
self.currentsurvexfile = survexblock.survexfile
@@ -1862,13 +1953,13 @@ class LoadingSurvex:
sys.stderr.flush()
def printbegin():
nonlocal blkid
nonlocal blk_name
nonlocal pathlist
depth = " " * self.depthbegin
self.insp = depth
if debugprint:
print(f"{self.depthbegin:2}{depth} - Begin for :'{blkid}'")
print(f"{self.depthbegin:2}{depth} - Begin for :'{blk_name}'")
pathlist = ""
for id in self.stackbegin:
if len(id) > 0:
@@ -1887,9 +1978,9 @@ class LoadingSurvex:
)
def pushblock():
nonlocal blkid
nonlocal blk_name
if debugprint:
print(f" # datastack at 1 *begin {blkid} 'type':", end="")
print(f" # datastack at 1 *begin {blk_name} 'type':", end="")
for dict in self.datastack:
print(f"'{dict['type'].upper()}' ", end="")
print("")
@@ -1898,7 +1989,7 @@ class LoadingSurvex:
self.datastack.append(copy.deepcopy(self.datastar))
# ------------ * DATA
if debugprint:
print(f" # datastack at 2 *begin {blkid} 'type':", end="")
print(f" # datastack at 2 *begin {blk_name} 'type':", end="")
for dict in self.datastack:
print(f"'{dict['type'].upper()}' ", end="")
print("")
@@ -1910,10 +2001,10 @@ class LoadingSurvex:
pass
def popblock():
nonlocal blkid
nonlocal blk_name
nonlocal oldflags
if debugprint:
print(f" # datastack at *end '{blkid} 'type':", end="")
print(f" # datastack at *end '{blk_name} 'type':", end="")
for dict in self.datastack:
print(f"'{dict['type'].upper()}' ", end="")
print("")
@@ -1922,7 +2013,7 @@ class LoadingSurvex:
self.datastar = copy.deepcopy(self.datastack.pop())
# ------------ * DATA
if debugprint:
print(f" # datastack after *end '{blkid} 'type':", end="")
print(f" # datastack after *end '{blk_name} 'type':", end="")
for dict in self.datastack:
print(f"'{dict['type'].upper()}' ", end="")
print("")
@@ -1940,7 +2031,7 @@ class LoadingSurvex:
# ...existing code...
"""Interprets a survex comamnd where * is the first character on the line, e.g. *begin"""
nonlocal survexblock
nonlocal blkid
nonlocal blk_name
nonlocal pathlist
nonlocal args
nonlocal oldflags
@@ -1953,10 +2044,10 @@ class LoadingSurvex:
# ------------------------BEGIN
if self.rx_begin.match(cmd):
t_block = time.perf_counter()
blkid = args.lower()
blk_name = args.lower()
# PUSH state ++++++++++++++
self.depthbegin += 1
self.stackbegin.append(blkid)
self.stackbegin.append(blk_name)
self.unitsstack.append((self.units, self.unitsfactor))
self.legsnumberstack.append(self.legsnumber)
self.slengthstack.append(self.slength)
@@ -1974,8 +2065,11 @@ class LoadingSurvex:
self.inheritdate = self.currentdate
self.currentdate = None # zero the current date when we start a new block
printbegin()
# creating the SurvexBlock automatically creates a UUID in ._blockid
# Note that this does not create it in the database
newsurvexblock = SurvexBlock(
name=blkid,
name=blk_name,
parent=survexblock,
survexfile=self.currentsurvexfile,
legsall=0,
@@ -1986,7 +2080,7 @@ class LoadingSurvex:
"(" + survexblock.title + ")"
) # copy parent inititally, overwrite if it has its own
survexblock = newsurvexblock
survexblock.save() # Only save once, after all fields are set
survexblock.save() # Only save once, after all fields are set, or try to delay until *end using caches
tickle()
# ---------------------------END
@@ -2000,18 +2094,8 @@ class LoadingSurvex:
self.fix_undated(survexblock)
self.fix_anonymous(survexblock)
# This is the most time-consuming step within *end processing: was 47%
# Instead of saving parent here, cache for later
if hasattr(survexblock, 'parent') and survexblock.parent:
self._pending_block_saves.add(survexblock)
try:
# This is the second most time-consuming step within *end processing: was 35%
self._pending_block_saves.add(survexblock)
# update_fields=["legsall", "legslength"]
except Exception:
print(f"{survexblock=}", file=sys.stderr)
raise
confirm_team_on_trip(survexblock)
self.confirm_team_on_trip(survexblock)
self.cache_survexblock(survexblock)
# POP state ++++++++++++++
popblock()
self.inheritteam = self.teaminheritstack.pop()
@@ -2021,7 +2105,7 @@ class LoadingSurvex:
self.legsnumber = self.legsnumberstack.pop()
self.units, self.unitsfactor = self.unitsstack.pop()
self.slength = self.slengthstack.pop()
blkid = self.stackbegin.pop()
blk_name = self.stackbegin.pop()
self.currentsurvexblock = survexblock.parent
survexblock = survexblock.parent
oldflags = self.flagsstar
@@ -2099,7 +2183,7 @@ class LoadingSurvex:
if mfail:
message = f"\n ! - ERROR version control merge failure\n - '{sline}'\n"
message = (
message + f" - line {self.lineno} in {blkid} in {survexblock}\n - NERD++ needed to fix it"
message + f" - line {self.lineno} in {blk_name} in {survexblock}\n - NERD++ needed to fix it"
)
print(message)
print(message, file=sys.stderr)
@@ -2122,7 +2206,10 @@ class LoadingSurvex:
# At the end, save all cached survexblocks using bulk_update
blocks = list(getattr(self, '_pending_block_saves', set()))
blocks = []
for blockid in self._pending_block_saves:
blocks.append(self._pending_block_saves[blockid])
# blocks = list(getattr(self, '_pending_block_saves', set()))
if blocks:
# valid_blocks = []
# for block in blocks:
@@ -2522,6 +2609,9 @@ def FindAndLoadSurvex():
flinear.write(f"{svx_scan.depthinclude:2} {indent} *edulcni {survexfileroot.path}\n")
io_collate.write(f";*edulcni {survexfileroot.path}\n")
svx_scan.check_cache_clean()
mem1 = get_process_memory()
flinear.write(f"\n - MEM:{mem1:.2f} MB STOP {survexfileroot.path}\n")
flinear.write(f" - MEM:{mem1 - mem0:.3f} MB ADDITIONALLY USED\n")
@@ -2637,6 +2727,8 @@ def FindAndLoadSurvex():
flinear.write(f"{omit_scan.depthinclude:2} {indent} *edulcni {unseensroot}\n")
io_collate.write(f";*edulcni {UNSEENS}\n")
omit_scan.check_cache_clean()
mem1 = get_process_memory()
flinear.write(f"\n - MEM:{mem1:.2f} MB STOP {UNSEENS} Unseen Oddments\n")
flinear.write(f" - MEM:{mem1 - mem0:.3f} MB ADDITIONALLY USED Unseen Oddments\n")
@@ -3139,13 +3231,7 @@ def LoadSurvexBlocks():
memend = get_process_memory()
print(f" - MEMORY start:{memstart:.3f} MB end:{memend:.3f} MB increase={memend - memstart:.3f} MB")
global person_pending_cache
for sb in person_pending_cache:
if len(person_pending_cache[sb]) > 0:
print(f" ")
message = f" ! PENDING team list not emptied {sb.survexfile.path} {len(person_pending_cache[sb])} people: {person_pending_cache[sb]}"
stash_data_issue(parser="survex", message=message, url=None, sb=(sb.survexfile.path))
print(message)
# duration = time.time() - start
# print(f" - TIME: {duration:7.2f} s", file=sys.stderr)
store_data_issues()