From 7fe34bedb851a5377ac8a8f02fbd5e3fb63ff84b Mon Sep 17 00:00:00 2001 From: Philip Sargent Date: Mon, 15 Jun 2020 03:28:51 +0100 Subject: [PATCH] Stop storing all SurvexStations --- core/models_survex.py | 2 +- core/views_caves.py | 26 +++++++++++++++++-- databaseReset.py | 8 +++--- parsers/survex.py | 60 +++++++++++++++++++++++++++++++++---------- 4 files changed, 76 insertions(+), 20 deletions(-) diff --git a/core/models_survex.py b/core/models_survex.py index 448cea9..c65cf2d 100644 --- a/core/models_survex.py +++ b/core/models_survex.py @@ -147,7 +147,7 @@ class SurvexBlock(models.Model): return ssl[0] #print name ss = SurvexStation(name=name, block=self) - ss.save() + #ss.save() return ss def DayIndex(self): diff --git a/core/views_caves.py b/core/views_caves.py index 1f9b91e..ddce62f 100644 --- a/core/views_caves.py +++ b/core/views_caves.py @@ -21,8 +21,30 @@ from django.shortcuts import get_object_or_404, render import settings -from PIL import Image, ImageDraw, ImageFont -import string, os, sys, subprocess +class MapLocations(object): + p = [ + ("laser.0_7", "BNase", "Reference", "Bräuning Nase laser point"), + ("226-96", "BZkn", "Reference", "Bräuning Zinken trig point"), + ("vd1","VD1","Reference", "VD1 survey point"), + ("laser.kt114_96","HSK","Reference", "Hinterer Schwarzmooskogel trig point"), + ("2000","Nipple","Reference", "Nipple (Weiße Warze)"), + ("3000","VSK","Reference", "Vorderer Schwarzmooskogel summit"), + ("topcamp", "OTC", "Reference", "Old Top Camp"), + ("laser.0", "LSR0", "Reference", "Laser Point 0"), + ("laser.0_1", "LSR1", "Reference", "Laser Point 0/1"), + ("laser.0_3", "LSR3", "Reference", "Laser Point 0/3"), + ("laser.0_5", "LSR5", "Reference", "Laser Point 0/5"), + ("225-96", "BAlm", "Reference", "Bräuning Alm trig point") + ] + def points(self): + for ent in Entrance.objects.all(): + if ent.best_station(): + areaName = ent.caveandentrance_set.all()[0].cave.getArea().short_name + self.p.append((ent.best_station(), "%s-%s" % (areaName, str(ent)[5:]), ent.needs_surface_work(), str(ent))) + return self.p + + def __str__(self): + return "{} map locations".format(len(self.p)) def getCave(cave_id): """Returns a cave object when given a cave name or number. It is used by views including cavehref, ent, and qm.""" diff --git a/databaseReset.py b/databaseReset.py index 38c20f1..f98ded9 100755 --- a/databaseReset.py +++ b/databaseReset.py @@ -95,7 +95,7 @@ def import_survexblks(): troggle.parsers.survex.LoadAllSurvexBlocks() def import_survexpos(): - import troggle.parsers.survex + import troggle.parsers.survex print("Importing Survex x/y/z Positions") troggle.parsers.survex.LoadPos() @@ -380,7 +380,7 @@ def usage(): QMs - read in the QM csv files (older caves only) scans - the survey scans in all the wallets (must run before survex) survex - read in the survex files - all the survex blocks but not the x/y/z positions - survexpos - just the x/y/z Pos out of the survex files - Never used. + survexpos - set the x/y/z positions for entrances and fixed points tunnel - read in the Tunnel files - which scans the survey scans too @@ -435,9 +435,9 @@ if __name__ == "__main__": jq.enq("scans",import_surveyscans) jq.enq("logbooks",import_logbooks) jq.enq("QMs",import_QMs) - #jq.enq("survexblks",import_survexblks) restore when prospecting_guide fixed - #jq.enq("survexpos",import_survexpos) jq.enq("tunnel",import_tunnelfiles) + jq.enq("survexblks",import_survexblks) + jq.enq("survexpos",import_survexpos) elif "scans" in sys.argv: jq.enq("scans",import_surveyscans) elif "survex" in sys.argv: diff --git a/parsers/survex.py b/parsers/survex.py index a3c1fe0..529ac13 100755 --- a/parsers/survex.py +++ b/parsers/survex.py @@ -20,6 +20,7 @@ A 'survexscansfolder' is what we today call a "survey scans folder" or a "wallet line_leg_regex = re.compile(r"[\d\-+.]+$") survexlegsalllength = 0.0 survexlegsnumber = 0 +survexblockroot = None def LoadSurvexLineLeg(survexblock, stardata, sline, comment, cave): global survexlegsalllength @@ -566,6 +567,8 @@ def LoadPos(): posfile = open("%s.pos" % (topdata)) posfile.readline() #Drop header + + survexblockroot = models_survex.SurvexBlock.objects.get(id=1) for line in posfile.readlines(): r = poslineregex.match(line) if r: @@ -573,26 +576,57 @@ def LoadPos(): if name in notfoundbefore: skip[name] = 1 else: - try: - ss = models.SurvexStation.objects.lookup(name) - ss.x = float(x) - ss.y = float(y) - ss.z = float(z) - ss.save() - found += 1 - except: - notfoundnow.append(name) - print(" - %s stations not found in lookup of SurvexStation.objects. %s found. %s skipped." % (len(notfoundnow),found, len(skip))) + for sid in mappoints: + if id.endswith(sid): + notfoundnow.append(id) + # Now that we don't import any stations, we create it rather than look it up + # ss = models_survex.SurvexStation.objects.lookup(id) + + # need to set block_id which means doing a search on all the survex blocks.. + # remove dot at end and add one at beginning + blockpath = "." + id[:-len(sid)].strip(".") + try: + sbqs = models_survex.SurvexBlock.objects.filter(survexpath=blockpath) + if len(sbqs)==1: + sb = sbqs[0] + if len(sbqs)>1: + message = ' ! MULTIPLE SurvexBlocks matching Entrance point {} {}'.format(blockpath, sid) + print(message) + models.DataIssue.objects.create(parser='survex', message=message) + sb = sbqs[0] + elif len(sbqs)<=0: + message = ' ! ZERO SurvexBlocks matching Entrance point {} {}'.format(blockpath, sid) + print(message) + models.DataIssue.objects.create(parser='survex', message=message) + sb = survexblockroot + except: + message = ' ! FAIL in getting SurvexBlock matching Entrance point {} {}'.format(blockpath, sid) + print(message) + models.DataIssue.objects.create(parser='survex', message=message) + try: + ss = models_survex.SurvexStation(name=id, block=sb) + ss.x = float(x) + ss.y = float(y) + ss.z = float(z) + ss.save() + found += 1 + except: + message = ' ! FAIL to create SurvexStation Entrance point {} {}'.format(blockpath, sid) + print(message) + models.DataIssue.objects.create(parser='survex', message=message) + raise + + #print(" - %s failed lookups of SurvexStation.objects. %s found. %s skipped." % (len(notfoundnow),found, len(skip))) if found > 10: # i.e. a previous cave import has been done try: with open(cachefile, "w") as f: c = len(notfoundnow)+len(skip) for i in notfoundnow: - f.write("%s\n" % i) + pass #f.write("%s\n" % i) for j in skip: - f.write("%s\n" % j) # NB skip not notfoundbefore - print(' Not-found cache file written: %s entries' % c) + pass #f.write("%s\n" % j) # NB skip not notfoundbefore + print((' Not-found cache file written: %s entries' % c)) except: print(" FAILURE WRITE opening cache file %s" % (cachefile)) raise