2
0
mirror of https://expo.survex.com/repositories/troggle/.git synced 2024-11-25 08:41:51 +00:00

Make things more compatiable with newer python

Fix the expeditions list
Improvements to make it compatiable with django 1.8
Bump the years to add 2018
Update the .hgignore file to ignore junk
This commit is contained in:
Sam Wenham 2019-02-24 13:03:34 +00:00
parent 552730f0a3
commit 4ad5b68433
14 changed files with 153 additions and 101 deletions

View File

@ -8,3 +8,9 @@ localsettings.py
parsing_log.txt
troggle
troggle_log.txt
.idea/*
*.orig
media/images/*
.vscode/*
.swp
imagekit-off/

View File

@ -30,7 +30,7 @@ def get_related_by_wikilinks(wiki_text):
number = qmdict['number'])
res.append(qm)
except QM.DoesNotExist:
print 'fail on '+str(wikilink)
print('fail on '+str(wikilink))
return res
@ -141,7 +141,6 @@ class Person(TroggleModel):
class Meta:
verbose_name_plural = "People"
class Meta:
ordering = ('orderref',) # "Wookey" makes too complex for: ('last_name', 'first_name')
def __unicode__(self):
@ -529,11 +528,11 @@ class Cave(TroggleModel):
def getCaveByReference(reference):
areaname, code = reference.split("-", 1)
print areaname, code
print(areaname, code)
area = Area.objects.get(short_name = areaname)
print area
print(area)
foundCaves = list(Cave.objects.filter(area = area, kataster_number = code).all()) + list(Cave.objects.filter(area = area, unofficial_number = code).all())
print list(foundCaves)
print(list(foundCaves))
assert len(foundCaves) == 1
return foundCaves[0]

View File

@ -16,9 +16,18 @@ from django.template.defaultfilters import slugify
from troggle.helper import login_required_if_public
import datetime
from django.views.generic.list import ListView
from django.utils import timezone
# Django uses Context, not RequestContext when you call render_to_response. We always want to use RequestContext, so that django adds the context from settings.TEMPLATE_CONTEXT_PROCESSORS. This way we automatically get necessary settings variables passed to each template. So we use a custom method, render_response instead of render_to_response. Hopefully future Django releases will make this unnecessary.
#from troggle.alwaysUseRequestContext import render_response
# Django uses Context, not RequestContext when you call render
# to_response. We always want to use RequestContext, so that
# django adds the context from settings.TEMPLATE_CONTEXT_PROCESSORS.
# This way we automatically get necessary settings variables passed
# to each template. So we use a custom method, render_response
# instead of render_to_response. Hopefully future Django releases
# will make this unnecessary.
# from troggle.alwaysUseRequestContext import render_response
import re
@ -50,13 +59,13 @@ def personindex(request):
def expedition(request, expeditionname):
expedition = Expedition.objects.get(year=int(expeditionname))
this_expedition = Expedition.objects.get(year=int(expeditionname))
expeditions = Expedition.objects.all()
personexpeditiondays = [ ]
dateditems = list(expedition.logbookentry_set.all()) + list(expedition.survexblock_set.all())
dateditems = list(this_expedition.logbookentry_set.all()) + list(this_expedition.survexblock_set.all())
dates = list(set([item.date for item in dateditems]))
dates.sort()
for personexpedition in expedition.personexpedition_set.all():
for personexpedition in this_expedition.personexpedition_set.all():
prow = [ ]
for date in dates:
pcell = { "persontrips": PersonTrip.objects.filter(personexpedition=personexpedition,
@ -71,21 +80,30 @@ def expedition(request, expeditionname):
message = LoadLogbookForExpedition(expedition)
return render_with_context(request,'expedition.html', {'expedition': expedition, 'expeditions':expeditions, 'personexpeditiondays':personexpeditiondays, 'message':message, 'settings':settings, 'dateditems': dateditems })
def get_absolute_url(self):
class ExpeditionListView(ListView):
model = Expedition
def get_context_data(self, **kwargs):
context = super(ExpeditionListView, self).get_context_data(**kwargs)
context['now'] = timezone.now()
return context
def get_absolute_url(self):
return ('expedition', (expedition.year))
def person(request, first_name='', last_name='', ):
person = Person.objects.get(first_name = first_name, last_name = last_name)
this_person = Person.objects.get(first_name = first_name, last_name = last_name)
#This is for removing the reference to the user's profile, in case they set it to the wrong person
# This is for removing the reference to the user's profile, in case they set it to the wrong person
if request.method == 'GET':
if request.GET.get('clear_profile')=='True':
person.user=None
person.save()
this_person.user=None
this_person.save()
return HttpResponseRedirect(reverse('profiles_select_profile'))
return render_with_context(request,'person.html', {'person': person, })
return render_with_context(request,'person.html', {'person': this_person, })
def GetPersonChronology(personexpedition):
@ -115,20 +133,20 @@ def GetPersonChronology(personexpedition):
def personexpedition(request, first_name='', last_name='', year=''):
person = Person.objects.get(first_name = first_name, last_name = last_name)
expedition = Expedition.objects.get(year=year)
personexpedition = person.personexpedition_set.get(expedition=expedition)
this_expedition = Expedition.objects.get(year=year)
personexpedition = person.personexpedition_set.get(expedition=this_expedition)
personchronology = GetPersonChronology(personexpedition)
return render_with_context(request,'personexpedition.html', {'personexpedition': personexpedition, 'personchronology':personchronology})
def logbookentry(request, date, slug):
logbookentry = LogbookEntry.objects.filter(date=date, slug=slug)
this_logbookentry = LogbookEntry.objects.filter(date=date, slug=slug)
if len(logbookentry)>1:
return render_with_context(request, 'object_list.html',{'object_list':logbookentry})
if len(this_logbookentry)>1:
return render_with_context(request, 'object_list.html',{'object_list':this_logbookentry})
else:
logbookentry=logbookentry[0]
return render_with_context(request, 'logbookentry.html', {'logbookentry': logbookentry})
this_logbookentry=this_logbookentry[0]
return render_with_context(request, 'logbookentry.html', {'logbookentry': this_logbookentry})
def logbookSearch(request, extra):
@ -196,7 +214,7 @@ def newLogbookEntry(request, expeditionyear, pdate = None, pslug = None):
'expeditionyear': expeditionyear})
f.write(template.render(context))
f.close()
print logbookparsers.parseAutoLogBookEntry(filename)
print(logbookparsers.parseAutoLogBookEntry(filename))
return HttpResponseRedirect(reverse('expedition', args=[expedition.year])) # Redirect after POST
else:
if pslug and pdate:

View File

@ -87,8 +87,8 @@ def downloadSurveys(request):
def downloadLogbook(request,year=None,extension=None,queryset=None):
if year:
expedition=Expedition.objects.get(year=year)
logbook_entries=LogbookEntry.objects.filter(expedition=expedition)
current_expedition=Expedition.objects.get(year=year)
logbook_entries=LogbookEntry.objects.filter(expedition=current_expedition)
filename='logbook'+year
elif queryset:
logbook_entries=queryset

View File

@ -77,7 +77,7 @@ class SvxForm(forms.Form):
def DiffCode(self, rcode):
code = self.GetDiscCode()
difftext = difflib.unified_diff(code.splitlines(), rcode.splitlines())
difflist = [ diffline.strip() for diffline in difftext if not re.match("\s*$", diffline) ]
difflist = [ diffline.strip() for diffline in difftext if not re.match(r"\s*$", diffline) ]
return difflist
def SaveCode(self, rcode):
@ -98,7 +98,7 @@ class SvxForm(forms.Form):
return "SAVED"
def Process(self):
print "....\n\n\n....Processing\n\n\n"
print("....\n\n\n....Processing\n\n\n")
cwd = os.getcwd()
os.chdir(os.path.split(settings.SURVEX_DATA + self.data['filename'])[0])
os.system(settings.CAVERN + " --log " + settings.SURVEX_DATA + self.data['filename'] + ".svx")
@ -137,13 +137,13 @@ def svx(request, survex_file):
if not difflist:
message = "OUTPUT FROM PROCESSING"
logmessage = form.Process()
print logmessage
print(logmessage)
else:
message = "SAVE FILE FIRST"
form.data['code'] = rcode
if "save" in rform.data:
if request.user.is_authenticated():
#print "sssavvving"
#print("sssavvving")
message = form.SaveCode(rcode)
else:
message = "You do not have authority to save this file"
@ -163,7 +163,7 @@ def svx(request, survex_file):
difflist.insert(0, message)
#print [ form.data['code'] ]
svxincludes = re.findall('\*include\s+(\S+)(?i)', form.data['code'] or "")
svxincludes = re.findall(r'\*include\s+(\S+)(?i)', form.data['code'] or "")
vmap = {'settings': settings,
'has_3d': os.path.isfile(settings.SURVEX_DATA + survex_file + ".3d"),
@ -256,7 +256,7 @@ def identifycavedircontents(gcavedir):
# direct local non-database browsing through the svx file repositories
# perhaps should use the database and have a reload button for it
def survexcaveslist(request):
cavesdir = os.path.join(settings.SURVEX_DATA, "caves")
cavesdir = os.path.join(settings.SURVEX_DATA, "caves-1623")
#cavesdircontents = { }
onefilecaves = [ ]
@ -264,9 +264,11 @@ def survexcaveslist(request):
subdircaves = [ ]
# first sort the file list
fnumlist = [ (-int(re.match("\d*", f).group(0) or "0"), f) for f in os.listdir(cavesdir) ]
fnumlist = [ (-int(re.match(r"\d*", f).group(0) or "0"), f) for f in os.listdir(cavesdir) ]
fnumlist.sort()
print(fnumlist)
# go through the list and identify the contents of each cave directory
for num, cavedir in fnumlist:
if cavedir in ["144", "40"]:
@ -297,6 +299,8 @@ def survexcaveslist(request):
multifilecaves.append((survdirobj[0], survdirobj[1:]))
# single file caves
else:
#print("survdirobj = ")
#print(survdirobj)
onefilecaves.append(survdirobj[0])
return render_to_response('svxfilecavelist.html', {'settings': settings, "onefilecaves":onefilecaves, "multifilecaves":multifilecaves, "subdircaves":subdircaves })

View File

@ -42,7 +42,7 @@ def make_dirs():
def import_caves():
import parsers.caves
print "importing caves"
print("importing caves")
parsers.caves.readcaves()
def import_people():
@ -96,7 +96,7 @@ def reset():
try:
import_tunnelfiles()
except:
print "Tunnel files parser broken."
print("Tunnel files parser broken.")
import_surveys()
@ -104,11 +104,11 @@ def reset():
def import_auto_logbooks():
import parsers.logbooks
import os
for pt in core.models.PersonTrip.objects.all():
for pt in troggle.core.models.PersonTrip.objects.all():
pt.delete()
for lbe in core.models.LogbookEntry.objects.all():
for lbe in troggle.core.models.LogbookEntry.objects.all():
lbe.delete()
for expedition in core.models.Expedition.objects.all():
for expedition in troggle.core.models.Expedition.objects.all():
directory = os.path.join(settings.EXPOWEB,
"years",
expedition.year,
@ -127,7 +127,7 @@ def dumplogbooks():
return pe.nickname
else:
return pe.person.first_name
for lbe in core.models.LogbookEntry.objects.all():
for lbe in troggle.core.models.LogbookEntry.objects.all():
dateStr = lbe.date.strftime("%Y-%m-%d")
directory = os.path.join(settings.EXPOWEB,
"years",
@ -166,7 +166,7 @@ def writeCaves():
entrance.writeDataFile()
def usage():
print """Usage is 'python databaseReset.py <command>'
print("""Usage is 'python databaseReset.py <command>'
where command is:
reset - this is normal usage, clear database and reread everything
desc
@ -182,7 +182,7 @@ def usage():
survexpos
tunnel - read in the Tunnel files
writeCaves
"""
""")
if __name__ == "__main__":
import troggle.core.models
@ -235,7 +235,7 @@ if __name__ == "__main__":
elif "help" in sys.argv:
usage()
else:
print "%s not recognised" % sys.argv
print("%s not recognised" % sys.argv)
usage()

View File

@ -0,0 +1,8 @@
Django==1.7.11
django-registration==2.1.2
mysql
#imagekit
django-imagekit
Image
django-tinymce==2.7.0
smartencoding

View File

@ -0,0 +1,7 @@
Django==1.8.19
django-registration==2.1.2
mysql
imagekit
Image
django-tinymce==2.7.0
smartencoding

View File

@ -10,12 +10,12 @@ def readcaves():
newArea.save()
newArea = models.Area(short_name = "1626", parent = None)
newArea.save()
print "Reading Entrances"
print("Reading Entrances")
#print "list of <Slug> <Filename>"
for filename in os.walk(settings.ENTRANCEDESCRIPTIONS).next()[2]: #Should be a better way of getting a list of files
if filename.endswith('.html'):
readentrance(filename)
print "Reading Caves"
print ("Reading Caves")
for filename in os.walk(settings.CAVEDESCRIPTIONS).next()[2]: #Should be a better way of getting a list of files
if filename.endswith('.html'):
readcave(filename)
@ -154,7 +154,7 @@ def readcave(filename):
primary = primary)
cs.save()
except:
print "Can't find text (slug): %s, skipping %s" % (slug, context)
print("Can't find text (slug): %s, skipping %s" % (slug, context))
primary = False
for entrance in entrances:
@ -165,17 +165,17 @@ def readcave(filename):
ce = models.CaveAndEntrance(cave = c, entrance_letter = letter, entrance = entrance)
ce.save()
except:
print "Entrance text (slug) %s missing %s" % (slug, context)
print ("Entrance text (slug) %s missing %s" % (slug, context))
def getXML(text, itemname, minItems = 1, maxItems = None, printwarnings = True, context = ""):
items = re.findall("<%(itemname)s>(.*?)</%(itemname)s>" % {"itemname": itemname}, text, re.S)
if len(items) < minItems and printwarnings:
print "%(count)i %(itemname)s found, at least %(min)i expected" % {"count": len(items),
print("%(count)i %(itemname)s found, at least %(min)i expected" % {"count": len(items),
"itemname": itemname,
"min": minItems} + context
"min": minItems} + context)
if maxItems is not None and len(items) > maxItems and printwarnings:
print "%(count)i %(itemname)s found, no more than %(max)i expected" % {"count": len(items),
print("%(count)i %(itemname)s found, no more than %(max)i expected" % {"count": len(items),
"itemname": itemname,
"max": maxItems} + context
"max": maxItems} + context)
return items

View File

@ -9,7 +9,6 @@ import re
import os
def LoadSurvexLineLeg(survexblock, stardata, sline, comment):
ls = sline.lower().split()
ssfrom = survexblock.MakeSurvexStation(ls[stardata["from"]])
@ -20,23 +19,23 @@ def LoadSurvexLineLeg(survexblock, stardata, sline, comment):
try:
survexleg.tape = float(ls[stardata["tape"]])
except ValueError:
print "Tape misread in", survexblock.survexfile.path
print "Stardata:", stardata
print "Line:", ls
print("Tape misread in", survexblock.survexfile.path)
print("Stardata:", stardata)
print("Line:", ls)
survexleg.tape = 1000
try:
lclino = ls[stardata["clino"]]
except:
print "Clino misread in", survexblock.survexfile.path
print "Stardata:", stardata
print "Line:", ls
print("Clino misread in", survexblock.survexfile.path)
print("Stardata:", stardata)
print("Line:", ls)
lclino = error
try:
lcompass = ls[stardata["compass"]]
except:
print "Compass misread in", survexblock.survexfile.path
print "Stardata:", stardata
print "Line:", ls
print("Compass misread in", survexblock.survexfile.path)
print("Stardata:", stardata)
print("Line:", ls)
lcompass = error
if lclino == "up":
survexleg.compass = 0.0
@ -48,14 +47,14 @@ def LoadSurvexLineLeg(survexblock, stardata, sline, comment):
try:
survexleg.compass = float(lcompass)
except ValueError:
print "Compass misread in", survexblock.survexfile.path
print "Stardata:", stardata
print "Line:", ls
print("Compass misread in", survexblock.survexfile.path)
print("Stardata:", stardata)
print("Line:", ls)
survexleg.compass = 1000
survexleg.clino = -90.0
else:
assert re.match("[\d\-+.]+$", lcompass), ls
assert re.match("[\d\-+.]+$", lclino) and lclino != "-", ls
assert re.match(r"[\d\-+.]+$", lcompass), ls
assert re.match(r"[\d\-+.]+$", lclino) and lclino != "-", ls
survexleg.compass = float(lcompass)
survexleg.clino = float(lclino)
@ -67,9 +66,10 @@ def LoadSurvexLineLeg(survexblock, stardata, sline, comment):
try:
survexblock.totalleglength += float(ls[itape])
except ValueError:
print "Length not added"
print("Length not added")
survexblock.save()
def LoadSurvexEquate(survexblock, sline):
#print sline #
stations = sline.split()
@ -77,12 +77,13 @@ def LoadSurvexEquate(survexblock, sline):
for station in stations:
survexblock.MakeSurvexStation(station)
def LoadSurvexLinePassage(survexblock, stardata, sline, comment):
pass
stardatadefault = { "type":"normal", "t":"leg", "from":0, "to":1, "tape":2, "compass":3, "clino":4 }
stardataparamconvert = { "length":"tape", "bearing":"compass", "gradient":"clino" }
stardatadefault = {"type":"normal", "t":"leg", "from":0, "to":1, "tape":2, "compass":3, "clino":4}
stardataparamconvert = {"length":"tape", "bearing":"compass", "gradient":"clino"}
def RecursiveLoad(survexblock, survexfile, fin, textlines):
iblankbegins = 0
@ -91,7 +92,7 @@ def RecursiveLoad(survexblock, survexfile, fin, textlines):
teammembers = [ ]
# uncomment to print out all files during parsing
# print "Reading file:", survexblock.survexfile.path
print("Reading file:", survexblock.survexfile.path)
while True:
svxline = fin.readline().decode("latin1")
if not svxline:
@ -99,10 +100,10 @@ def RecursiveLoad(survexblock, survexfile, fin, textlines):
textlines.append(svxline)
# break the line at the comment
sline, comment = re.match("([^;]*?)\s*(?:;\s*(.*))?\n?$", svxline.strip()).groups()
sline, comment = re.match(r"([^;]*?)\s*(?:;\s*(.*))?\n?$", svxline.strip()).groups()
# detect ref line pointing to the scans directory
mref = comment and re.match('.*?ref.*?(\d+)\s*#\s*(\d+)', comment)
mref = comment and re.match(r'.*?ref.*?(\d+)\s*#\s*(\d+)', comment)
if mref:
refscan = "%s#%s" % (mref.group(1), mref.group(2))
survexscansfolders = models.SurvexScansFolder.objects.filter(walletname=refscan)
@ -116,7 +117,7 @@ def RecursiveLoad(survexblock, survexfile, fin, textlines):
continue
# detect the star command
mstar = re.match('\s*\*[\s,]*(\w+)\s*(.*?)\s*(?:;.*)?$', sline)
mstar = re.match(r'\s*\*[\s,]*(\w+)\s*(.*?)\s*(?:;.*)?$', sline)
if not mstar:
if "from" in stardata:
LoadSurvexLineLeg(survexblock, stardata, sline, comment)
@ -129,7 +130,7 @@ def RecursiveLoad(survexblock, survexfile, fin, textlines):
cmd, line = mstar.groups()
cmd = cmd.lower()
if re.match("include$(?i)", cmd):
includepath = os.path.join(os.path.split(survexfile.path)[0], re.sub("\.svx$", "", line))
includepath = os.path.join(os.path.split(survexfile.path)[0], re.sub(r"\.svx$", "", line))
includesurvexfile = models.SurvexFile(path=includepath, cave=survexfile.cave)
includesurvexfile.save()
includesurvexfile.SetDirectory()
@ -157,7 +158,7 @@ def RecursiveLoad(survexblock, survexfile, fin, textlines):
elif re.match("date$(?i)", cmd):
if len(line) == 10:
survexblock.date = re.sub("\.", "-", line)
survexblock.date = re.sub(r"\.", "-", line)
expeditions = models.Expedition.objects.filter(year=line[:4])
if expeditions:
assert len(expeditions) == 1
@ -166,9 +167,9 @@ def RecursiveLoad(survexblock, survexfile, fin, textlines):
survexblock.save()
elif re.match("team$(?i)", cmd):
mteammember = re.match("(Insts|Notes|Tape|Dog|Useless|Pics|Helper|Disto|Consultant)\s+(.*)$(?i)", line)
mteammember = re.match(r"(Insts|Notes|Tape|Dog|Useless|Pics|Helper|Disto|Consultant)\s+(.*)$(?i)", line)
if mteammember:
for tm in re.split(" and | / |, | & | \+ |^both$|^none$(?i)", mteammember.group(2)):
for tm in re.split(r" and | / |, | & | \+ |^both$|^none$(?i)", mteammember.group(2)):
if tm:
personexpedition = survexblock.expedition and GetPersonExpeditionNameLookup(survexblock.expedition).get(tm.lower())
if (personexpedition, tm) not in teammembers:
@ -206,22 +207,25 @@ def RecursiveLoad(survexblock, survexfile, fin, textlines):
survexblock.MakeSurvexStation(line.split()[0])
else:
if not cmd in [ "sd", "include", "units", "entrance", "data", "flags", "title", "export", "instrument", "calibrate", "set", "infer", "alias", "ref" ]:
print ("Unrecognised command in line:", cmd, line, survexblock)
if cmd not in ["sd", "include", "units", "entrance", "data", "flags", "title", "export", "instrument",
"calibrate", "set", "infer", "alias", "ref", "cs", "declination", "case"]:
print("Unrecognised command in line:", cmd, line, survexblock, survexblock.survexfile.path)
def ReloadSurvexCave(survex_cave):
cave = models.Cave.objects.get(kataster_number=survex_cave)
def ReloadSurvexCave(survex_cave, area):
print(survex_cave, area)
cave = models.Cave.objects.get(kataster_number=survex_cave, area__short_name=area)
print(cave)
#cave = models.Cave.objects.get(kataster_number=survex_cave)
cave.survexblock_set.all().delete()
cave.survexfile_set.all().delete()
cave.survexdirectory_set.all().delete()
survexfile = models.SurvexFile(path="caves/" + survex_cave + "/" + survex_cave, cave=cave)
survexfile = models.SurvexFile(path="caves-" + cave.kat_area() + "/" + survex_cave + "/" + survex_cave, cave=cave)
survexfile.save()
survexfile.SetDirectory()
survexblockroot = models.SurvexBlock(name="root", survexpath="caves", begin_char=0, cave=cave, survexfile=survexfile, totalleglength=0.0)
survexblockroot = models.SurvexBlock(name="root", survexpath="caves-" + cave.kat_area(), begin_char=0, cave=cave, survexfile=survexfile, totalleglength=0.0)
survexblockroot.save()
fin = survexfile.OpenFile()
textlines = [ ]
@ -232,7 +236,7 @@ def ReloadSurvexCave(survex_cave):
def LoadAllSurvexBlocks():
print 'Loading All Survex Blocks...'
print('Loading All Survex Blocks...')
models.SurvexBlock.objects.all().delete()
models.SurvexFile.objects.all().delete()
@ -243,6 +247,8 @@ def LoadAllSurvexBlocks():
models.SurvexPersonRole.objects.all().delete()
models.SurvexStation.objects.all().delete()
print(" - Data flushed")
survexfile = models.SurvexFile(path="all", cave=None)
survexfile.save()
survexfile.SetDirectory()
@ -259,22 +265,26 @@ def LoadAllSurvexBlocks():
#Load each cave,
#FIXME this should be dealt with load all above
print(" - Reloading all caves")
caves = models.Cave.objects.all()
for cave in caves:
if cave.kataster_number and os.path.isdir(os.path.join(settings.SURVEX_DATA, "caves", cave.kataster_number)):
if cave.kataster_number and os.path.isdir(os.path.join(settings.SURVEX_DATA, "caves-" + cave.kat_area(), cave.kataster_number)):
if cave.kataster_number not in ['40']:
print "loading", cave
ReloadSurvexCave(cave.kataster_number)
print("loading", cave, cave.kat_area())
ReloadSurvexCave(cave.kataster_number, cave.kat_area())
poslineregex = re.compile(r"^\(\s*([+-]?\d*\.\d*),\s*([+-]?\d*\.\d*),\s*([+-]?\d*\.\d*)\s*\)\s*([^\s]+)$")
poslineregex = re.compile("^\(\s*([+-]?\d*\.\d*),\s*([+-]?\d*\.\d*),\s*([+-]?\d*\.\d*)\s*\)\s*([^\s]+)$")
def LoadPos():
print 'Loading Pos....'
print('Loading Pos....')
call([settings.CAVERN, "--output=%s/all.3d" % settings.SURVEX_DATA, "%s/all.svx" % settings.SURVEX_DATA])
call([settings.THREEDTOPOS, '%sall.3d' % settings.SURVEX_DATA], cwd = settings.SURVEX_DATA)
posfile = open("%sall.pos" % settings.SURVEX_DATA)
posfile.readline()#Drop header
posfile.readline() #Drop header
for line in posfile.readlines():
r = poslineregex.match(line)
if r:
@ -286,4 +296,4 @@ def LoadPos():
ss.z = float(z)
ss.save()
except:
print "%s not parsed in survex" % name
print("%s not parsed in survex" % name)

View File

@ -16,7 +16,7 @@
<body onLoad="contentHeight();">
<div id="header">
<h1>CUCC Expeditions to Austria: 1976 - 2016</h1>
<h1>CUCC Expeditions to Austria: 1976 - 2018</h1>
<div id="editLinks"> {% block loginInfo %}
<a href="{{settings.EXPOWEB_URL}}">Website home</a> |
{% if user.username %}
@ -40,9 +40,9 @@
<a href="{% url "survexcavessingle" 204 %}">204</a> |
<a href="{% url "survexcavessingle" 258 %}">258</a> |
<a href="{% url "survexcavessingle" 264 %}">264</a> |
<a href="{% url "expedition" 2015 %}">Expo2015</a> |
<a href="{% url "expedition" 2016 %}">Expo2016</a> |
<a href="{% url "expedition" 2017 %}">Expo2017</a> |
<a href="{% url "expedition" 2018 %}">Expo2018</a> |
<a href="/admin/">Django admin</a>
</div>
@ -90,7 +90,7 @@
<li><a id="caversLink" href="{% url "personindex" %}">cavers</a></li>
<li><a href="#">expeditions</a>
<ul class="sub_menu">
<li><a id="expeditionsLink" href="{{ Expedition.objects.latest.get_absolute_url }}">newest</a></li>
<li><a id="expeditionsLink" href="{{ expedition.objects.latest.get_absolute_url }}">newest</a></li>
<li><a id="expeditionsLink" href="{% url "expeditions" %}">list all</a></li>
</ul>
</li>

View File

@ -7,7 +7,7 @@
{% if editable %}
<a href="{% url "editflatpage" path %}">Edit</a>
{% endif %}
{%else %}
{% else %}
{% if not has_menu %}
{% include "menu.html" %}
{% endif %}

View File

@ -42,7 +42,7 @@ This is Troggle, the information portal for Cambridge University Caving Club's E
</p>
<p class="indent">
Here you will find information about the {{expedition.objects.count}} expeditions the club has undertaken since 1976. Browse survey information, photos, and description wikis for {{cave.objects.count}} caves, {{subcave.objects.count}} areas within those caves, and {{extantqms.count}} going leads yet to be explored. We have {{photo.objects.count}} photos and {{logbookentry.objects.count}} logbook entries.
Here you will find information about the {{expedition.objects.count}} expeditions the club has undertaken since 1976. Browse survey information, photos, and description wikis for {{Cave.objects.count}} caves, {{subcave.objects.count}} areas within those caves, and {{extantqms.count}} going leads yet to be explored. We have {{Photo.objects.count}} photos and {{Logbookentry.objects.count}} logbook entries.
</p>
<p class="indent">

View File

@ -30,7 +30,7 @@ actualurlpatterns = patterns('',
#url(r'^person/(\w+_\w+)$', views_logbooks.person, name="person"),
url(r'^expedition/(\d+)$', views_logbooks.expedition, name="expedition"),
url(r'^expeditions/?$', ListView, {'queryset':Expedition.objects.all(),'template_name':'object_list.html'},name="expeditions"),
url(r'^expeditions/?$', views_logbooks.ExpeditionListView.as_view(), name="expeditions"),
url(r'^personexpedition/(?P<first_name>[A-Z]*[a-z]*)[^a-zA-Z]*(?P<last_name>[A-Z]*[a-z]*)/(?P<year>\d+)/?$', views_logbooks.personexpedition, name="personexpedition"),
url(r'^logbookentry/(?P<date>.*)/(?P<slug>.*)/?$', views_logbooks.logbookentry,name="logbookentry"),
url(r'^newlogbookentry/(?P<expeditionyear>.*)$', views_logbooks.newLogbookEntry, name="newLogBookEntry"),