From 4ad5b684333bfb995160cd6bf00308d9f3839d4a Mon Sep 17 00:00:00 2001 From: Sam Wenham Date: Sun, 24 Feb 2019 13:03:34 +0000 Subject: [PATCH] Make things more compatiable with newer python Fix the expeditions list Improvements to make it compatiable with django 1.8 Bump the years to add 2018 Update the .hgignore file to ignore junk --- .hgignore | 6 ++ core/models.py | 9 ++- core/views_logbooks.py | 56 ++++++++++++------ core/views_other.py | 4 +- core/views_survex.py | 18 +++--- databaseReset.py | 20 +++---- docker/requirements.txt.dj-1.7.11 | 8 +++ docker/requirements.txt.dj-1.8.19 | 7 +++ parsers/caves.py | 16 ++--- parsers/survex.py | 98 +++++++++++++++++-------------- templates/base.html | 6 +- templates/flatpage.html | 2 +- templates/frontpage.html | 2 +- urls.py | 2 +- 14 files changed, 153 insertions(+), 101 deletions(-) create mode 100644 docker/requirements.txt.dj-1.7.11 create mode 100644 docker/requirements.txt.dj-1.8.19 diff --git a/.hgignore b/.hgignore index 77026d3..6cbf023 100644 --- a/.hgignore +++ b/.hgignore @@ -8,3 +8,9 @@ localsettings.py parsing_log.txt troggle troggle_log.txt +.idea/* +*.orig +media/images/* +.vscode/* +.swp +imagekit-off/ diff --git a/core/models.py b/core/models.py index a78e49b..f87792e 100644 --- a/core/models.py +++ b/core/models.py @@ -30,7 +30,7 @@ def get_related_by_wikilinks(wiki_text): number = qmdict['number']) res.append(qm) except QM.DoesNotExist: - print 'fail on '+str(wikilink) + print('fail on '+str(wikilink)) return res @@ -141,7 +141,6 @@ class Person(TroggleModel): class Meta: verbose_name_plural = "People" - class Meta: ordering = ('orderref',) # "Wookey" makes too complex for: ('last_name', 'first_name') def __unicode__(self): @@ -529,11 +528,11 @@ class Cave(TroggleModel): def getCaveByReference(reference): areaname, code = reference.split("-", 1) - print areaname, code + print(areaname, code) area = Area.objects.get(short_name = areaname) - print area + print(area) foundCaves = list(Cave.objects.filter(area = area, kataster_number = code).all()) + list(Cave.objects.filter(area = area, unofficial_number = code).all()) - print list(foundCaves) + print(list(foundCaves)) assert len(foundCaves) == 1 return foundCaves[0] diff --git a/core/views_logbooks.py b/core/views_logbooks.py index 0aee9c6..c6f9086 100644 --- a/core/views_logbooks.py +++ b/core/views_logbooks.py @@ -16,9 +16,18 @@ from django.template.defaultfilters import slugify from troggle.helper import login_required_if_public import datetime +from django.views.generic.list import ListView +from django.utils import timezone -# Django uses Context, not RequestContext when you call render_to_response. We always want to use RequestContext, so that django adds the context from settings.TEMPLATE_CONTEXT_PROCESSORS. This way we automatically get necessary settings variables passed to each template. So we use a custom method, render_response instead of render_to_response. Hopefully future Django releases will make this unnecessary. -#from troggle.alwaysUseRequestContext import render_response + +# Django uses Context, not RequestContext when you call render +# to_response. We always want to use RequestContext, so that +# django adds the context from settings.TEMPLATE_CONTEXT_PROCESSORS. +# This way we automatically get necessary settings variables passed +# to each template. So we use a custom method, render_response +# instead of render_to_response. Hopefully future Django releases +# will make this unnecessary. +# from troggle.alwaysUseRequestContext import render_response import re @@ -50,13 +59,13 @@ def personindex(request): def expedition(request, expeditionname): - expedition = Expedition.objects.get(year=int(expeditionname)) + this_expedition = Expedition.objects.get(year=int(expeditionname)) expeditions = Expedition.objects.all() personexpeditiondays = [ ] - dateditems = list(expedition.logbookentry_set.all()) + list(expedition.survexblock_set.all()) + dateditems = list(this_expedition.logbookentry_set.all()) + list(this_expedition.survexblock_set.all()) dates = list(set([item.date for item in dateditems])) dates.sort() - for personexpedition in expedition.personexpedition_set.all(): + for personexpedition in this_expedition.personexpedition_set.all(): prow = [ ] for date in dates: pcell = { "persontrips": PersonTrip.objects.filter(personexpedition=personexpedition, @@ -71,21 +80,30 @@ def expedition(request, expeditionname): message = LoadLogbookForExpedition(expedition) return render_with_context(request,'expedition.html', {'expedition': expedition, 'expeditions':expeditions, 'personexpeditiondays':personexpeditiondays, 'message':message, 'settings':settings, 'dateditems': dateditems }) - def get_absolute_url(self): +class ExpeditionListView(ListView): + + model = Expedition + + def get_context_data(self, **kwargs): + context = super(ExpeditionListView, self).get_context_data(**kwargs) + context['now'] = timezone.now() + return context + +def get_absolute_url(self): return ('expedition', (expedition.year)) def person(request, first_name='', last_name='', ): - person = Person.objects.get(first_name = first_name, last_name = last_name) + this_person = Person.objects.get(first_name = first_name, last_name = last_name) - #This is for removing the reference to the user's profile, in case they set it to the wrong person + # This is for removing the reference to the user's profile, in case they set it to the wrong person if request.method == 'GET': if request.GET.get('clear_profile')=='True': - person.user=None - person.save() + this_person.user=None + this_person.save() return HttpResponseRedirect(reverse('profiles_select_profile')) - return render_with_context(request,'person.html', {'person': person, }) + return render_with_context(request,'person.html', {'person': this_person, }) def GetPersonChronology(personexpedition): @@ -115,20 +133,20 @@ def GetPersonChronology(personexpedition): def personexpedition(request, first_name='', last_name='', year=''): person = Person.objects.get(first_name = first_name, last_name = last_name) - expedition = Expedition.objects.get(year=year) - personexpedition = person.personexpedition_set.get(expedition=expedition) + this_expedition = Expedition.objects.get(year=year) + personexpedition = person.personexpedition_set.get(expedition=this_expedition) personchronology = GetPersonChronology(personexpedition) return render_with_context(request,'personexpedition.html', {'personexpedition': personexpedition, 'personchronology':personchronology}) def logbookentry(request, date, slug): - logbookentry = LogbookEntry.objects.filter(date=date, slug=slug) + this_logbookentry = LogbookEntry.objects.filter(date=date, slug=slug) - if len(logbookentry)>1: - return render_with_context(request, 'object_list.html',{'object_list':logbookentry}) + if len(this_logbookentry)>1: + return render_with_context(request, 'object_list.html',{'object_list':this_logbookentry}) else: - logbookentry=logbookentry[0] - return render_with_context(request, 'logbookentry.html', {'logbookentry': logbookentry}) + this_logbookentry=this_logbookentry[0] + return render_with_context(request, 'logbookentry.html', {'logbookentry': this_logbookentry}) def logbookSearch(request, extra): @@ -196,7 +214,7 @@ def newLogbookEntry(request, expeditionyear, pdate = None, pslug = None): 'expeditionyear': expeditionyear}) f.write(template.render(context)) f.close() - print logbookparsers.parseAutoLogBookEntry(filename) + print(logbookparsers.parseAutoLogBookEntry(filename)) return HttpResponseRedirect(reverse('expedition', args=[expedition.year])) # Redirect after POST else: if pslug and pdate: diff --git a/core/views_other.py b/core/views_other.py index f9a4661..d99cc32 100644 --- a/core/views_other.py +++ b/core/views_other.py @@ -87,8 +87,8 @@ def downloadSurveys(request): def downloadLogbook(request,year=None,extension=None,queryset=None): if year: - expedition=Expedition.objects.get(year=year) - logbook_entries=LogbookEntry.objects.filter(expedition=expedition) + current_expedition=Expedition.objects.get(year=year) + logbook_entries=LogbookEntry.objects.filter(expedition=current_expedition) filename='logbook'+year elif queryset: logbook_entries=queryset diff --git a/core/views_survex.py b/core/views_survex.py index 28a4370..e252095 100644 --- a/core/views_survex.py +++ b/core/views_survex.py @@ -77,7 +77,7 @@ class SvxForm(forms.Form): def DiffCode(self, rcode): code = self.GetDiscCode() difftext = difflib.unified_diff(code.splitlines(), rcode.splitlines()) - difflist = [ diffline.strip() for diffline in difftext if not re.match("\s*$", diffline) ] + difflist = [ diffline.strip() for diffline in difftext if not re.match(r"\s*$", diffline) ] return difflist def SaveCode(self, rcode): @@ -98,7 +98,7 @@ class SvxForm(forms.Form): return "SAVED" def Process(self): - print "....\n\n\n....Processing\n\n\n" + print("....\n\n\n....Processing\n\n\n") cwd = os.getcwd() os.chdir(os.path.split(settings.SURVEX_DATA + self.data['filename'])[0]) os.system(settings.CAVERN + " --log " + settings.SURVEX_DATA + self.data['filename'] + ".svx") @@ -137,13 +137,13 @@ def svx(request, survex_file): if not difflist: message = "OUTPUT FROM PROCESSING" logmessage = form.Process() - print logmessage + print(logmessage) else: message = "SAVE FILE FIRST" form.data['code'] = rcode if "save" in rform.data: if request.user.is_authenticated(): - #print "sssavvving" + #print("sssavvving") message = form.SaveCode(rcode) else: message = "You do not have authority to save this file" @@ -163,7 +163,7 @@ def svx(request, survex_file): difflist.insert(0, message) #print [ form.data['code'] ] - svxincludes = re.findall('\*include\s+(\S+)(?i)', form.data['code'] or "") + svxincludes = re.findall(r'\*include\s+(\S+)(?i)', form.data['code'] or "") vmap = {'settings': settings, 'has_3d': os.path.isfile(settings.SURVEX_DATA + survex_file + ".3d"), @@ -256,7 +256,7 @@ def identifycavedircontents(gcavedir): # direct local non-database browsing through the svx file repositories # perhaps should use the database and have a reload button for it def survexcaveslist(request): - cavesdir = os.path.join(settings.SURVEX_DATA, "caves") + cavesdir = os.path.join(settings.SURVEX_DATA, "caves-1623") #cavesdircontents = { } onefilecaves = [ ] @@ -264,9 +264,11 @@ def survexcaveslist(request): subdircaves = [ ] # first sort the file list - fnumlist = [ (-int(re.match("\d*", f).group(0) or "0"), f) for f in os.listdir(cavesdir) ] + fnumlist = [ (-int(re.match(r"\d*", f).group(0) or "0"), f) for f in os.listdir(cavesdir) ] fnumlist.sort() + print(fnumlist) + # go through the list and identify the contents of each cave directory for num, cavedir in fnumlist: if cavedir in ["144", "40"]: @@ -297,6 +299,8 @@ def survexcaveslist(request): multifilecaves.append((survdirobj[0], survdirobj[1:])) # single file caves else: + #print("survdirobj = ") + #print(survdirobj) onefilecaves.append(survdirobj[0]) return render_to_response('svxfilecavelist.html', {'settings': settings, "onefilecaves":onefilecaves, "multifilecaves":multifilecaves, "subdircaves":subdircaves }) diff --git a/databaseReset.py b/databaseReset.py index a63b83b..edc7db9 100644 --- a/databaseReset.py +++ b/databaseReset.py @@ -42,7 +42,7 @@ def make_dirs(): def import_caves(): import parsers.caves - print "importing caves" + print("importing caves") parsers.caves.readcaves() def import_people(): @@ -96,7 +96,7 @@ def reset(): try: import_tunnelfiles() except: - print "Tunnel files parser broken." + print("Tunnel files parser broken.") import_surveys() @@ -104,11 +104,11 @@ def reset(): def import_auto_logbooks(): import parsers.logbooks import os - for pt in core.models.PersonTrip.objects.all(): + for pt in troggle.core.models.PersonTrip.objects.all(): pt.delete() - for lbe in core.models.LogbookEntry.objects.all(): + for lbe in troggle.core.models.LogbookEntry.objects.all(): lbe.delete() - for expedition in core.models.Expedition.objects.all(): + for expedition in troggle.core.models.Expedition.objects.all(): directory = os.path.join(settings.EXPOWEB, "years", expedition.year, @@ -127,10 +127,10 @@ def dumplogbooks(): return pe.nickname else: return pe.person.first_name - for lbe in core.models.LogbookEntry.objects.all(): + for lbe in troggle.core.models.LogbookEntry.objects.all(): dateStr = lbe.date.strftime("%Y-%m-%d") directory = os.path.join(settings.EXPOWEB, - "years", + "years", lbe.expedition.year, "autologbook") if not os.path.isdir(directory): @@ -166,7 +166,7 @@ def writeCaves(): entrance.writeDataFile() def usage(): - print """Usage is 'python databaseReset.py ' + print("""Usage is 'python databaseReset.py ' where command is: reset - this is normal usage, clear database and reread everything desc @@ -182,7 +182,7 @@ def usage(): survexpos tunnel - read in the Tunnel files writeCaves - """ + """) if __name__ == "__main__": import troggle.core.models @@ -235,7 +235,7 @@ if __name__ == "__main__": elif "help" in sys.argv: usage() else: - print "%s not recognised" % sys.argv + print("%s not recognised" % sys.argv) usage() diff --git a/docker/requirements.txt.dj-1.7.11 b/docker/requirements.txt.dj-1.7.11 new file mode 100644 index 0000000..ae0a7ad --- /dev/null +++ b/docker/requirements.txt.dj-1.7.11 @@ -0,0 +1,8 @@ +Django==1.7.11 +django-registration==2.1.2 +mysql +#imagekit +django-imagekit +Image +django-tinymce==2.7.0 +smartencoding diff --git a/docker/requirements.txt.dj-1.8.19 b/docker/requirements.txt.dj-1.8.19 new file mode 100644 index 0000000..42c3640 --- /dev/null +++ b/docker/requirements.txt.dj-1.8.19 @@ -0,0 +1,7 @@ +Django==1.8.19 +django-registration==2.1.2 +mysql +imagekit +Image +django-tinymce==2.7.0 +smartencoding diff --git a/parsers/caves.py b/parsers/caves.py index 63c9f94..ba1c358 100644 --- a/parsers/caves.py +++ b/parsers/caves.py @@ -10,12 +10,12 @@ def readcaves(): newArea.save() newArea = models.Area(short_name = "1626", parent = None) newArea.save() - print "Reading Entrances" + print("Reading Entrances") #print "list of " for filename in os.walk(settings.ENTRANCEDESCRIPTIONS).next()[2]: #Should be a better way of getting a list of files if filename.endswith('.html'): readentrance(filename) - print "Reading Caves" + print ("Reading Caves") for filename in os.walk(settings.CAVEDESCRIPTIONS).next()[2]: #Should be a better way of getting a list of files if filename.endswith('.html'): readcave(filename) @@ -154,7 +154,7 @@ def readcave(filename): primary = primary) cs.save() except: - print "Can't find text (slug): %s, skipping %s" % (slug, context) + print("Can't find text (slug): %s, skipping %s" % (slug, context)) primary = False for entrance in entrances: @@ -165,17 +165,17 @@ def readcave(filename): ce = models.CaveAndEntrance(cave = c, entrance_letter = letter, entrance = entrance) ce.save() except: - print "Entrance text (slug) %s missing %s" % (slug, context) + print ("Entrance text (slug) %s missing %s" % (slug, context)) def getXML(text, itemname, minItems = 1, maxItems = None, printwarnings = True, context = ""): items = re.findall("<%(itemname)s>(.*?)" % {"itemname": itemname}, text, re.S) if len(items) < minItems and printwarnings: - print "%(count)i %(itemname)s found, at least %(min)i expected" % {"count": len(items), + print("%(count)i %(itemname)s found, at least %(min)i expected" % {"count": len(items), "itemname": itemname, - "min": minItems} + context + "min": minItems} + context) if maxItems is not None and len(items) > maxItems and printwarnings: - print "%(count)i %(itemname)s found, no more than %(max)i expected" % {"count": len(items), + print("%(count)i %(itemname)s found, no more than %(max)i expected" % {"count": len(items), "itemname": itemname, - "max": maxItems} + context + "max": maxItems} + context) return items diff --git a/parsers/survex.py b/parsers/survex.py index 0c108ac..536314f 100644 --- a/parsers/survex.py +++ b/parsers/survex.py @@ -9,7 +9,6 @@ import re import os - def LoadSurvexLineLeg(survexblock, stardata, sline, comment): ls = sline.lower().split() ssfrom = survexblock.MakeSurvexStation(ls[stardata["from"]]) @@ -20,23 +19,23 @@ def LoadSurvexLineLeg(survexblock, stardata, sline, comment): try: survexleg.tape = float(ls[stardata["tape"]]) except ValueError: - print "Tape misread in", survexblock.survexfile.path - print "Stardata:", stardata - print "Line:", ls + print("Tape misread in", survexblock.survexfile.path) + print("Stardata:", stardata) + print("Line:", ls) survexleg.tape = 1000 try: lclino = ls[stardata["clino"]] except: - print "Clino misread in", survexblock.survexfile.path - print "Stardata:", stardata - print "Line:", ls + print("Clino misread in", survexblock.survexfile.path) + print("Stardata:", stardata) + print("Line:", ls) lclino = error try: lcompass = ls[stardata["compass"]] except: - print "Compass misread in", survexblock.survexfile.path - print "Stardata:", stardata - print "Line:", ls + print("Compass misread in", survexblock.survexfile.path) + print("Stardata:", stardata) + print("Line:", ls) lcompass = error if lclino == "up": survexleg.compass = 0.0 @@ -48,14 +47,14 @@ def LoadSurvexLineLeg(survexblock, stardata, sline, comment): try: survexleg.compass = float(lcompass) except ValueError: - print "Compass misread in", survexblock.survexfile.path - print "Stardata:", stardata - print "Line:", ls + print("Compass misread in", survexblock.survexfile.path) + print("Stardata:", stardata) + print("Line:", ls) survexleg.compass = 1000 survexleg.clino = -90.0 else: - assert re.match("[\d\-+.]+$", lcompass), ls - assert re.match("[\d\-+.]+$", lclino) and lclino != "-", ls + assert re.match(r"[\d\-+.]+$", lcompass), ls + assert re.match(r"[\d\-+.]+$", lclino) and lclino != "-", ls survexleg.compass = float(lcompass) survexleg.clino = float(lclino) @@ -67,9 +66,10 @@ def LoadSurvexLineLeg(survexblock, stardata, sline, comment): try: survexblock.totalleglength += float(ls[itape]) except ValueError: - print "Length not added" + print("Length not added") survexblock.save() - + + def LoadSurvexEquate(survexblock, sline): #print sline # stations = sline.split() @@ -77,12 +77,13 @@ def LoadSurvexEquate(survexblock, sline): for station in stations: survexblock.MakeSurvexStation(station) + def LoadSurvexLinePassage(survexblock, stardata, sline, comment): pass -stardatadefault = { "type":"normal", "t":"leg", "from":0, "to":1, "tape":2, "compass":3, "clino":4 } -stardataparamconvert = { "length":"tape", "bearing":"compass", "gradient":"clino" } +stardatadefault = {"type":"normal", "t":"leg", "from":0, "to":1, "tape":2, "compass":3, "clino":4} +stardataparamconvert = {"length":"tape", "bearing":"compass", "gradient":"clino"} def RecursiveLoad(survexblock, survexfile, fin, textlines): iblankbegins = 0 @@ -91,7 +92,7 @@ def RecursiveLoad(survexblock, survexfile, fin, textlines): teammembers = [ ] # uncomment to print out all files during parsing -# print "Reading file:", survexblock.survexfile.path + print("Reading file:", survexblock.survexfile.path) while True: svxline = fin.readline().decode("latin1") if not svxline: @@ -99,10 +100,10 @@ def RecursiveLoad(survexblock, survexfile, fin, textlines): textlines.append(svxline) # break the line at the comment - sline, comment = re.match("([^;]*?)\s*(?:;\s*(.*))?\n?$", svxline.strip()).groups() + sline, comment = re.match(r"([^;]*?)\s*(?:;\s*(.*))?\n?$", svxline.strip()).groups() # detect ref line pointing to the scans directory - mref = comment and re.match('.*?ref.*?(\d+)\s*#\s*(\d+)', comment) + mref = comment and re.match(r'.*?ref.*?(\d+)\s*#\s*(\d+)', comment) if mref: refscan = "%s#%s" % (mref.group(1), mref.group(2)) survexscansfolders = models.SurvexScansFolder.objects.filter(walletname=refscan) @@ -116,7 +117,7 @@ def RecursiveLoad(survexblock, survexfile, fin, textlines): continue # detect the star command - mstar = re.match('\s*\*[\s,]*(\w+)\s*(.*?)\s*(?:;.*)?$', sline) + mstar = re.match(r'\s*\*[\s,]*(\w+)\s*(.*?)\s*(?:;.*)?$', sline) if not mstar: if "from" in stardata: LoadSurvexLineLeg(survexblock, stardata, sline, comment) @@ -129,7 +130,7 @@ def RecursiveLoad(survexblock, survexfile, fin, textlines): cmd, line = mstar.groups() cmd = cmd.lower() if re.match("include$(?i)", cmd): - includepath = os.path.join(os.path.split(survexfile.path)[0], re.sub("\.svx$", "", line)) + includepath = os.path.join(os.path.split(survexfile.path)[0], re.sub(r"\.svx$", "", line)) includesurvexfile = models.SurvexFile(path=includepath, cave=survexfile.cave) includesurvexfile.save() includesurvexfile.SetDirectory() @@ -157,7 +158,7 @@ def RecursiveLoad(survexblock, survexfile, fin, textlines): elif re.match("date$(?i)", cmd): if len(line) == 10: - survexblock.date = re.sub("\.", "-", line) + survexblock.date = re.sub(r"\.", "-", line) expeditions = models.Expedition.objects.filter(year=line[:4]) if expeditions: assert len(expeditions) == 1 @@ -166,9 +167,9 @@ def RecursiveLoad(survexblock, survexfile, fin, textlines): survexblock.save() elif re.match("team$(?i)", cmd): - mteammember = re.match("(Insts|Notes|Tape|Dog|Useless|Pics|Helper|Disto|Consultant)\s+(.*)$(?i)", line) + mteammember = re.match(r"(Insts|Notes|Tape|Dog|Useless|Pics|Helper|Disto|Consultant)\s+(.*)$(?i)", line) if mteammember: - for tm in re.split(" and | / |, | & | \+ |^both$|^none$(?i)", mteammember.group(2)): + for tm in re.split(r" and | / |, | & | \+ |^both$|^none$(?i)", mteammember.group(2)): if tm: personexpedition = survexblock.expedition and GetPersonExpeditionNameLookup(survexblock.expedition).get(tm.lower()) if (personexpedition, tm) not in teammembers: @@ -206,22 +207,25 @@ def RecursiveLoad(survexblock, survexfile, fin, textlines): survexblock.MakeSurvexStation(line.split()[0]) else: - if not cmd in [ "sd", "include", "units", "entrance", "data", "flags", "title", "export", "instrument", "calibrate", "set", "infer", "alias", "ref" ]: - print ("Unrecognised command in line:", cmd, line, survexblock) - - + if cmd not in ["sd", "include", "units", "entrance", "data", "flags", "title", "export", "instrument", + "calibrate", "set", "infer", "alias", "ref", "cs", "declination", "case"]: + print("Unrecognised command in line:", cmd, line, survexblock, survexblock.survexfile.path) -def ReloadSurvexCave(survex_cave): - cave = models.Cave.objects.get(kataster_number=survex_cave) + +def ReloadSurvexCave(survex_cave, area): + print(survex_cave, area) + cave = models.Cave.objects.get(kataster_number=survex_cave, area__short_name=area) + print(cave) + #cave = models.Cave.objects.get(kataster_number=survex_cave) cave.survexblock_set.all().delete() cave.survexfile_set.all().delete() cave.survexdirectory_set.all().delete() - survexfile = models.SurvexFile(path="caves/" + survex_cave + "/" + survex_cave, cave=cave) + survexfile = models.SurvexFile(path="caves-" + cave.kat_area() + "/" + survex_cave + "/" + survex_cave, cave=cave) survexfile.save() survexfile.SetDirectory() - survexblockroot = models.SurvexBlock(name="root", survexpath="caves", begin_char=0, cave=cave, survexfile=survexfile, totalleglength=0.0) + survexblockroot = models.SurvexBlock(name="root", survexpath="caves-" + cave.kat_area(), begin_char=0, cave=cave, survexfile=survexfile, totalleglength=0.0) survexblockroot.save() fin = survexfile.OpenFile() textlines = [ ] @@ -232,7 +236,7 @@ def ReloadSurvexCave(survex_cave): def LoadAllSurvexBlocks(): - print 'Loading All Survex Blocks...' + print('Loading All Survex Blocks...') models.SurvexBlock.objects.all().delete() models.SurvexFile.objects.all().delete() @@ -243,6 +247,8 @@ def LoadAllSurvexBlocks(): models.SurvexPersonRole.objects.all().delete() models.SurvexStation.objects.all().delete() + print(" - Data flushed") + survexfile = models.SurvexFile(path="all", cave=None) survexfile.save() survexfile.SetDirectory() @@ -259,22 +265,26 @@ def LoadAllSurvexBlocks(): #Load each cave, #FIXME this should be dealt with load all above + print(" - Reloading all caves") caves = models.Cave.objects.all() for cave in caves: - if cave.kataster_number and os.path.isdir(os.path.join(settings.SURVEX_DATA, "caves", cave.kataster_number)): + if cave.kataster_number and os.path.isdir(os.path.join(settings.SURVEX_DATA, "caves-" + cave.kat_area(), cave.kataster_number)): if cave.kataster_number not in ['40']: - print "loading", cave - ReloadSurvexCave(cave.kataster_number) - -poslineregex = re.compile("^\(\s*([+-]?\d*\.\d*),\s*([+-]?\d*\.\d*),\s*([+-]?\d*\.\d*)\s*\)\s*([^\s]+)$") + print("loading", cave, cave.kat_area()) + ReloadSurvexCave(cave.kataster_number, cave.kat_area()) + + +poslineregex = re.compile(r"^\(\s*([+-]?\d*\.\d*),\s*([+-]?\d*\.\d*),\s*([+-]?\d*\.\d*)\s*\)\s*([^\s]+)$") + + def LoadPos(): - print 'Loading Pos....' + print('Loading Pos....') call([settings.CAVERN, "--output=%s/all.3d" % settings.SURVEX_DATA, "%s/all.svx" % settings.SURVEX_DATA]) call([settings.THREEDTOPOS, '%sall.3d' % settings.SURVEX_DATA], cwd = settings.SURVEX_DATA) posfile = open("%sall.pos" % settings.SURVEX_DATA) - posfile.readline()#Drop header + posfile.readline() #Drop header for line in posfile.readlines(): r = poslineregex.match(line) if r: @@ -286,4 +296,4 @@ def LoadPos(): ss.z = float(z) ss.save() except: - print "%s not parsed in survex" % name + print("%s not parsed in survex" % name) diff --git a/templates/base.html b/templates/base.html index 20a22ef..372a251 100644 --- a/templates/base.html +++ b/templates/base.html @@ -16,7 +16,7 @@