diff --git a/parsers/survex.py b/parsers/survex.py index cd331d1..fb4142a 100644 --- a/parsers/survex.py +++ b/parsers/survex.py @@ -1821,8 +1821,10 @@ class LoadingSurvex: nlegstotal = 0 self.relativefilename = path - # Cache for parent blocks to save at the end + # Cache for parent survex blocks to save at the end self._pending_parent_saves = set() + # Cache for survexblocks to save at the end (legsall/legslength) + self._pending_block_saves = set() #self.IdentifyCave(path, svxid, depth) # this will produce null for survex files which are geographic collections self.currentsurvexfile = survexblock.survexfile @@ -2106,14 +2108,23 @@ class LoadingSurvex: self.legsnumber = nlegstotal self.slength = slengthtotal - # At the end, save all cached parent blocks - for parent in getattr(self, '_pending_parent_saves', set()): - try: - parent.save(update_fields=None) - except Exception as e: - print(f"Error saving parent block {parent}: {e}", file=sys.stderr) - # ...timing removed... + # At the end, save all cached survexblocks using bulk_update + blocks = list(getattr(self, '_pending_block_saves', set())) + if blocks: + try: + SurvexBlock.objects.bulk_update(blocks, ["legsall", "legslength"]) + except Exception as e: + print(f"Error in bulk_update for survexblocks: {e}", file=sys.stderr) + + # Then save all cached parent survexblocks using bulk_update + parents = list(getattr(self, '_pending_parent_saves', set())) + if parents: + try: + SurvexBlock.objects.bulk_update(parents) + # SurvexBlock.objects.bulk_update(parents, [f.name for f in SurvexBlock._meta.fields if f.name != 'id']) + except Exception as e: + print(f"Error in bulk_update for parent blocks: {e}", file=sys.stderr) def PushdownStackScan(self, survexblock, path, finname, flinear, io_collate): """Follows the *include links in all the survex files from the root file (usually 1623.svx)