2
0
mirror of https://expo.survex.com/repositories/troggle/.git synced 2026-02-08 03:07:48 +00:00

Fixed weird QM issues, all working

This commit is contained in:
2026-01-30 03:32:15 +00:00
parent 3e203f992e
commit 2141dc1eba
3 changed files with 48 additions and 51 deletions

View File

@@ -251,7 +251,7 @@ class PersonLogEntry(TroggleModel):
class QM(TroggleModel):
"""This is based on qm.csv in trunk/expoweb/1623/204 which has the fields:
"Number","Grade","Area","Description","Page reference","Nearest station","Completion description","Comment"
All the stuff handling TICK QMs is INCOMPLETE
"""
@@ -274,13 +274,21 @@ class QM(TroggleModel):
ticked = models.BooleanField(default=False)
location_description = models.TextField(blank=True, null=True)
completion_description = models.TextField(blank=True, null=True)
# completion_date = models.DateField(blank=True, null=True) #never used in fact
nearest_station_name = models.CharField(max_length=200, blank=True, null=True)
resolution_station_name = models.CharField(max_length=200, blank=True, null=True)
area = models.CharField(max_length=100, blank=True, null=True)
page_ref = models.TextField(blank=True, null=True)
comment = models.TextField(blank=True, null=True)
loaded_from_csv = models.BooleanField(default=False)
class Meta:
constraints = [
models.UniqueConstraint(
fields=['cave', 'blockname', 'grade', 'number', 'expoyear'],
name='unique_qm_identity'
)
]
def __str__(self):
return f"{self.code()}"

View File

@@ -83,6 +83,7 @@ def parseCaveQMs(cave, inputFile, ticked=False):
QMnum = re.match(r".*?-\d*?-X?(?P<numb>\d*)", line[0]).group("numb")
newQM = QM() # creates python object, does not touch db yet
# newQM.found_by=placeholder
newQM.loaded_from_csv = True
newQM.number = QMnum
newQM.cave = caveid
newQM.expoyear = year
@@ -181,7 +182,8 @@ def parse_KH_QMs(kh, inputFile, ticked):
"cave": kh,
"grade": res["grade"],
}
otherAttribs = {
otherAttribs = {
"loaded_from_csv": True,
"ticked": ticked,
"page_ref": "",
"completion_description": completion,

View File

@@ -581,27 +581,11 @@ class LoadingSurvex:
blocks = []
for blockid in self._pending_block_saves:
blocks.append(self._pending_block_saves[blockid])
if blocks:
# valid_blocks = []
# bad_parents = 0
# for block in blocks:
# try:
# if block.parent:
# if block.parent not in already_saved_blocks:
# bad_parents += 1
# # print(f" Invalid parent id: {block.survexfile}::{block} -> {block.parent}", file=sys.stderr)
# # block.full_clean()
# valid_blocks.append(block)
# except ValidationError as e:
# print(f" ! Block {block} is invalid: {e}", file=sys.stderr)
# print(f" ! Block {block} is invalid: {e}")
# print(f"\n !! {bad_parents} as-yet invalid parent ids out of {len(blocks)} blocks. {len(valid_blocks)} valid blocks", file=sys.stderr)
if blocks:
topo_list = get_toposorted_blocks(blocks)
print(f"\n - {len(topo_list):,} survexblocks to save to db (topologically sorted)", file=sys.stderr)
print(f"\n - Saving {len(topo_list):,} survexblocks to save to db (topologically sorted)", file=sys.stderr)
safe_chunks = get_generational_chunks_optimized(topo_list)
safe_chunks = get_generational_chunks_optimized(topo_list)
try:
for i, chunk in enumerate(safe_chunks):
print(f" - Saving chunk {i+1} ({len(chunk)} blocks)...", file=sys.stderr)
@@ -618,20 +602,6 @@ class LoadingSurvex:
print(f"Failed at chunk {i+1}: {e}", file=sys.stderr)
return
try:
for i in range(0, len(blocks), 1):
valid_blocks[i].save()
except Exception as e:
print(f" !! Error in SINGLE create for survexblocks at {i}: {e}", file=sys.stderr)
return
# Nope, even topo-sorted, we can't know what a batch size is suitable
# without some of the ietms being invalid
try:
for i in range(0, len(topo_list), BATCH_SIZE):
SurvexBlock.objects.bulk_create(topo_list[i:i+BATCH_SIZE])
except Exception as e:
print(f" !! Error in bulk_create for survexblocks at {i}: {e}", file=sys.stderr)
def put_personrole_on_trip(self, survexblock, personexpedition, tm):
"""
@@ -710,7 +680,7 @@ class LoadingSurvex:
"""This should be run only after all the survexblocks have
been saved to the database and so have _id that can be used as a ForeignKey
"""
# print(f" - Saving {len(self._pending_pr_saves)} SurvexPersonRoles to db", file=sys.stderr)
print(f" - Saving {len(self._pending_pr_saves):,} SurvexPersonRoles to db", file=sys.stderr)
for blk in self._pending_pr_saves:
# Now commit to db
@@ -738,7 +708,11 @@ class LoadingSurvex:
"""This should be run only after all the survexblocks have
been saved to the database and so have _id that can be used as a ForeignKey
"""
print(f" - Saving {len(self._pending_qm_saves)} QMs to db", file=sys.stderr)
qms = []
for blk in self._pending_qm_saves:
qm_list = self._pending_qm_saves[blk]
qms = qms + qm_list
print(f" - Saving {len(qms)} QMs to db", file=sys.stderr)
for blk in self._pending_qm_saves:
# Now commit to db
@@ -746,8 +720,10 @@ class LoadingSurvex:
# print(f" PR_LIST {pr_list} {blk}", file=sys.stderr)
valid_list = []
for qm in qm_list:
nqms = QM.objects.filter(cave=qm.cave, blockname=qm.blockname)
print(f"QM found a prior match {nqms}", file=sys.stderr)
# nqms = QM.objects.filter(
# cave=qm.cave, blockname=qm.blockname, grade=qm.grade,
# number=qm.number, expoyear=qm.expoyear)
# print(f"QM found a prior match {nqms}", file=sys.stderr)
try:
qm.full_clean()
valid_list.append(qm)
@@ -759,18 +735,23 @@ class LoadingSurvex:
message=message,
url=None, sb=survexblock,
)
qms = []
for blk in self._pending_qm_saves:
try:
for qm in self._pending_qm_saves[blk]:
qm.save()
qms.append(qm)
#qm.save()
except Exception as e:
pass
raise
# try:
# QM.objects.bulk_create(valid_list)
# except Exception as e:
# pass
# raise
try:
QM.objects.bulk_create(qms,
update_conflicts=True,
unique_fields=['cave', 'blockname', 'grade', 'number', 'expoyear'],
update_fields=['block', 'cave', 'blockname', 'grade', 'number', 'expoyear'] )
except Exception as e:
pass
raise
_pending_qm_saves = {} # in database now, so empty cache
@@ -2343,10 +2324,15 @@ class LoadingSurvex:
# At the end of the whole (concatenated) file, save all cached survexblocks using bulk_update
qms_svx = QM.objects.filter(loaded_from_csv=False) # the survex QMs
qms_csv = QM.objects.filter(loaded_from_csv=True) # the CSV QMs
print(f"\n - Currently {len(qms_svx)} survex QMs and {len(qms_csv)} CSV QMs", file=sys.stderr)
self.save_survexblocks_to_db()
self.save_personroles_to_db()
self.save_qms_to_db()
qms_n = QM.objects.all().count()
print(f" - Now {qms_n} QMs in total", file=sys.stderr)
def PushdownStackScan(self, survexblock, path, finname, flinear, io_collate):
"""Follows the *include links in all the survex files from the root file (usually 1623.svx)
@@ -3330,10 +3316,11 @@ def LoadSurvexBlocks():
SurvexFile.objects.all().delete()
SurvexPersonRole.objects.all().delete()
SurvexStation.objects.all().delete()
qms_to_go = QM.objects.filter(block__isnull=False)
print(f" - Flushing {len(qms_to_go)} previously loaded QMs")
qms_to_go .delete()
# QM.objects.all().delete()
qms_to_go = QM.objects.filter(loaded_from_csv=False) # the survex QMs, not the CSV QMs
qms_to_keep = QM.objects.filter(loaded_from_csv=True) # the survex QMs, not the CSV QMs
print(f" - Flushing {len(qms_to_go)} previously loaded QMs (keeping {len(qms_to_keep)})")
qms_to_go.delete()
mem1 = get_process_memory()
print(f" - MEM:{mem1:7.2f} MB now. Foreign key objects loaded on deletion. ", file=sys.stderr)