forked from expo/troggle
Compare commits
3 Commits
RW_rebuild
...
django-upg
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
6984f66794 | ||
|
|
6f42bd51e1 | ||
|
|
4e27c90f77 |
@@ -1,24 +0,0 @@
|
||||
import utm
|
||||
import math
|
||||
from django.conf import settings
|
||||
|
||||
def lat_lon_entrance(utmstring):
|
||||
try:
|
||||
x = float(utmstring.split()[0])
|
||||
y = float(utmstring.split()[1])
|
||||
#return ' '+str(x+y)+' '+str(y)
|
||||
q = utm.to_latlon(x, y, 33, 'U')
|
||||
return "{:.5f} {:.5f}".format(q[0],q[1])
|
||||
except:
|
||||
return 'Not found'
|
||||
|
||||
def top_camp_distance(utmstring):
|
||||
try:
|
||||
x = float(utmstring.split()[0])
|
||||
y = float(utmstring.split()[1])
|
||||
tx = settings.TOPCAMPX
|
||||
ty = settings.TOPCAMPY
|
||||
dist = math.sqrt( (tx-x)*(tx-x) + (ty-y)*(ty-y) )
|
||||
return "{:.1f}".format(dist)
|
||||
except:
|
||||
return 'Not found'
|
||||
849
core/models.py
849
core/models.py
@@ -10,13 +10,854 @@ from django.db.models import Min, Max
|
||||
from django.conf import settings
|
||||
from decimal import Decimal, getcontext
|
||||
from django.core.urlresolvers import reverse
|
||||
from imagekit.models import ImageModel
|
||||
from imagekit.models import ProcessedImageField #ImageModel
|
||||
from django.template import Context, loader
|
||||
import settings
|
||||
getcontext().prec=2 #use 2 significant figures for decimal calculations
|
||||
|
||||
from troggle.core.models_survex import * #ancient models for both survex and other things
|
||||
from troggle.core.models_old import *
|
||||
from troggle.core.models_survex import *
|
||||
|
||||
|
||||
from troggle.core.models_millenial import * #updated models are here
|
||||
def get_related_by_wikilinks(wiki_text):
|
||||
found=re.findall(settings.QM_PATTERN,wiki_text)
|
||||
res=[]
|
||||
for wikilink in found:
|
||||
qmdict={'urlroot':settings.URL_ROOT,'cave':wikilink[2],'year':wikilink[1],'number':wikilink[3]}
|
||||
try:
|
||||
cave_slugs = CaveSlug.objects.filter(cave__kataster_number = qmdict['cave'])
|
||||
qm=QM.objects.get(found_by__cave_slug__in = cave_slugs,
|
||||
found_by__date__year = qmdict['year'],
|
||||
number = qmdict['number'])
|
||||
res.append(qm)
|
||||
except QM.DoesNotExist:
|
||||
print('fail on '+str(wikilink))
|
||||
|
||||
return res
|
||||
|
||||
try:
|
||||
logging.basicConfig(level=logging.DEBUG,
|
||||
filename=settings.LOGFILE,
|
||||
filemode='w')
|
||||
except:
|
||||
subprocess.call(settings.FIX_PERMISSIONS)
|
||||
logging.basicConfig(level=logging.DEBUG,
|
||||
filename=settings.LOGFILE,
|
||||
filemode='w')
|
||||
|
||||
#This class is for adding fields and methods which all of our models will have.
|
||||
class TroggleModel(models.Model):
|
||||
new_since_parsing = models.BooleanField(default=False, editable=False)
|
||||
non_public = models.BooleanField(default=False)
|
||||
def object_name(self):
|
||||
return self._meta.object_name
|
||||
|
||||
def get_admin_url(self):
|
||||
return urlparse.urljoin(settings.URL_ROOT, "/admin/core/" + self.object_name().lower() + "/" + str(self.pk))
|
||||
|
||||
class Meta:
|
||||
abstract = True
|
||||
|
||||
class TroggleImageModel(models.Model):
|
||||
new_since_parsing = models.BooleanField(default=False, editable=False)
|
||||
|
||||
def object_name(self):
|
||||
return self._meta.object_name
|
||||
|
||||
def get_admin_url(self):
|
||||
return urlparse.urljoin(settings.URL_ROOT, "/admin/core/" + self.object_name().lower() + "/" + str(self.pk))
|
||||
|
||||
|
||||
class Meta:
|
||||
abstract = True
|
||||
|
||||
#
|
||||
# single Expedition, usually seen by year
|
||||
#
|
||||
class Expedition(TroggleModel):
|
||||
year = models.CharField(max_length=20, unique=True)
|
||||
name = models.CharField(max_length=100)
|
||||
|
||||
def __unicode__(self):
|
||||
return self.year
|
||||
|
||||
class Meta:
|
||||
ordering = ('-year',)
|
||||
get_latest_by = 'year'
|
||||
|
||||
def get_absolute_url(self):
|
||||
return urlparse.urljoin(settings.URL_ROOT, reverse('expedition', args=[self.year]))
|
||||
|
||||
# construction function. should be moved out
|
||||
def get_expedition_day(self, date):
|
||||
expeditiondays = self.expeditionday_set.filter(date=date)
|
||||
if expeditiondays:
|
||||
assert len(expeditiondays) == 1
|
||||
return expeditiondays[0]
|
||||
res = ExpeditionDay(expedition=self, date=date)
|
||||
res.save()
|
||||
return res
|
||||
|
||||
def day_min(self):
|
||||
res = self.expeditionday_set.all()
|
||||
return res and res[0] or None
|
||||
|
||||
def day_max(self):
|
||||
res = self.expeditionday_set.all()
|
||||
return res and res[len(res) - 1] or None
|
||||
|
||||
|
||||
|
||||
class ExpeditionDay(TroggleModel):
|
||||
expedition = models.ForeignKey("Expedition")
|
||||
date = models.DateField()
|
||||
|
||||
class Meta:
|
||||
ordering = ('date',)
|
||||
|
||||
def GetPersonTrip(self, personexpedition):
|
||||
personexpeditions = self.persontrip_set.filter(expeditionday=self)
|
||||
return personexpeditions and personexpeditions[0] or None
|
||||
|
||||
|
||||
#
|
||||
# single Person, can go on many years
|
||||
#
|
||||
class Person(TroggleModel):
|
||||
first_name = models.CharField(max_length=100)
|
||||
last_name = models.CharField(max_length=100)
|
||||
is_vfho = models.BooleanField(help_text="VFHO is the Vereines für Höhlenkunde in Obersteier, a nearby Austrian caving club.", default=False)
|
||||
mug_shot = models.CharField(max_length=100, blank=True,null=True)
|
||||
blurb = models.TextField(blank=True,null=True)
|
||||
|
||||
#href = models.CharField(max_length=200)
|
||||
orderref = models.CharField(max_length=200) # for alphabetic
|
||||
|
||||
#the below have been removed and made methods. I'm not sure what the b in bisnotable stands for. - AC 16 Feb
|
||||
#notability = models.FloatField() # for listing the top 20 people
|
||||
#bisnotable = models.BooleanField(default=False)
|
||||
user = models.OneToOneField(User, null=True, blank=True)
|
||||
def get_absolute_url(self):
|
||||
return urlparse.urljoin(settings.URL_ROOT,reverse('person',kwargs={'first_name':self.first_name,'last_name':self.last_name}))
|
||||
|
||||
class Meta:
|
||||
verbose_name_plural = "People"
|
||||
ordering = ('orderref',) # "Wookey" makes too complex for: ('last_name', 'first_name')
|
||||
|
||||
def __unicode__(self):
|
||||
if self.last_name:
|
||||
return "%s %s" % (self.first_name, self.last_name)
|
||||
return self.first_name
|
||||
|
||||
|
||||
def notability(self):
|
||||
notability = Decimal(0)
|
||||
for personexpedition in self.personexpedition_set.all():
|
||||
if not personexpedition.is_guest:
|
||||
notability += Decimal(1) / (2012 - int(personexpedition.expedition.year))
|
||||
return notability
|
||||
|
||||
def bisnotable(self):
|
||||
return self.notability() > Decimal(1)/Decimal(3)
|
||||
|
||||
def surveyedleglength(self):
|
||||
return sum([personexpedition.surveyedleglength() for personexpedition in self.personexpedition_set.all()])
|
||||
|
||||
def first(self):
|
||||
return self.personexpedition_set.order_by('-expedition')[0]
|
||||
def last(self):
|
||||
return self.personexpedition_set.order_by('expedition')[0]
|
||||
|
||||
#def Sethref(self):
|
||||
#if self.last_name:
|
||||
#self.href = self.first_name.lower() + "_" + self.last_name.lower()
|
||||
#self.orderref = self.last_name + " " + self.first_name
|
||||
#else:
|
||||
# self.href = self.first_name.lower()
|
||||
#self.orderref = self.first_name
|
||||
#self.notability = 0.0 # set temporarily
|
||||
|
||||
|
||||
#
|
||||
# Person's attenance to one Expo
|
||||
#
|
||||
class PersonExpedition(TroggleModel):
|
||||
expedition = models.ForeignKey(Expedition)
|
||||
person = models.ForeignKey(Person)
|
||||
slugfield = models.SlugField(max_length=50,blank=True,null=True)
|
||||
|
||||
is_guest = models.BooleanField(default=False)
|
||||
COMMITTEE_CHOICES = (
|
||||
('leader','Expo leader'),
|
||||
('medical','Expo medical officer'),
|
||||
('treasurer','Expo treasurer'),
|
||||
('sponsorship','Expo sponsorship coordinator'),
|
||||
('research','Expo research coordinator'),
|
||||
)
|
||||
expo_committee_position = models.CharField(blank=True,null=True,choices=COMMITTEE_CHOICES,max_length=200)
|
||||
nickname = models.CharField(max_length=100,blank=True,null=True)
|
||||
|
||||
def GetPersonroles(self):
|
||||
res = [ ]
|
||||
for personrole in self.personrole_set.order_by('survexblock'):
|
||||
if res and res[-1]['survexpath'] == personrole.survexblock.survexpath:
|
||||
res[-1]['roles'] += ", " + str(personrole.role)
|
||||
else:
|
||||
res.append({'date':personrole.survexblock.date, 'survexpath':personrole.survexblock.survexpath, 'roles':str(personrole.role)})
|
||||
return res
|
||||
|
||||
class Meta:
|
||||
ordering = ('-expedition',)
|
||||
#order_with_respect_to = 'expedition'
|
||||
|
||||
def __unicode__(self):
|
||||
return "%s: (%s)" % (self.person, self.expedition)
|
||||
|
||||
|
||||
#why is the below a function in personexpedition, rather than in person? - AC 14 Feb 09
|
||||
def name(self):
|
||||
if self.nickname:
|
||||
return "%s (%s) %s" % (self.person.first_name, self.nickname, self.person.last_name)
|
||||
if self.person.last_name:
|
||||
return "%s %s" % (self.person.first_name, self.person.last_name)
|
||||
return self.person.first_name
|
||||
|
||||
def get_absolute_url(self):
|
||||
return urlparse.urljoin(settings.URL_ROOT, reverse('personexpedition',kwargs={'first_name':self.person.first_name,'last_name':self.person.last_name,'year':self.expedition.year}))
|
||||
|
||||
def surveyedleglength(self):
|
||||
survexblocks = [personrole.survexblock for personrole in self.personrole_set.all() ]
|
||||
return sum([survexblock.totalleglength for survexblock in set(survexblocks)])
|
||||
|
||||
# would prefer to return actual person trips so we could link to first and last ones
|
||||
def day_min(self):
|
||||
res = self.persontrip_set.aggregate(day_min=Min("expeditionday__date"))
|
||||
return res["day_min"]
|
||||
|
||||
def day_max(self):
|
||||
res = self.persontrip_set.all().aggregate(day_max=Max("expeditionday__date"))
|
||||
return res["day_max"]
|
||||
|
||||
#
|
||||
# Single parsed entry from Logbook
|
||||
#
|
||||
class LogbookEntry(TroggleModel):
|
||||
date = models.DateField()#MJG wants to turn this into a datetime such that multiple Logbook entries on the same day can be ordered.
|
||||
expeditionday = models.ForeignKey("ExpeditionDay", null=True)#MJG wants to KILL THIS (redundant information)
|
||||
expedition = models.ForeignKey(Expedition,blank=True,null=True) # yes this is double-
|
||||
#author = models.ForeignKey(PersonExpedition,blank=True,null=True) # the person who writes it up doesn't have to have been on the trip.
|
||||
# Re: the above- so this field should be "typist" or something, not "author". - AC 15 jun 09
|
||||
#MJG wants to KILL THIS, as it is typically redundant with PersonTrip.is_logbook_entry_author, in the rare it was not redundanty and of actually interest it could be added to the text.
|
||||
title = models.CharField(max_length=settings.MAX_LOGBOOK_ENTRY_TITLE_LENGTH)
|
||||
cave_slug = models.SlugField(max_length=50)
|
||||
place = models.CharField(max_length=100,blank=True,null=True,help_text="Only use this if you haven't chosen a cave")
|
||||
text = models.TextField()
|
||||
slug = models.SlugField(max_length=50)
|
||||
filename = models.CharField(max_length=200,null=True)
|
||||
|
||||
class Meta:
|
||||
verbose_name_plural = "Logbook Entries"
|
||||
# several PersonTrips point in to this object
|
||||
ordering = ('-date',)
|
||||
|
||||
def __getattribute__(self, item):
|
||||
if item == "cave": #Allow a logbookentries cave to be directly accessed despite not having a proper foreignkey
|
||||
return CaveSlug.objects.get(slug = self.cave_slug).cave
|
||||
return super(LogbookEntry, self).__getattribute__(item)
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
if "cave" in kwargs.keys():
|
||||
if kwargs["cave"] is not None:
|
||||
kwargs["cave_slug"] = CaveSlug.objects.get(cave=kwargs["cave"], primary=True).slug
|
||||
kwargs.pop("cave")
|
||||
return super(LogbookEntry, self).__init__(*args, **kwargs)
|
||||
|
||||
def isLogbookEntry(self): # Function used in templates
|
||||
return True
|
||||
|
||||
def get_absolute_url(self):
|
||||
return urlparse.urljoin(settings.URL_ROOT, reverse('logbookentry',kwargs={'date':self.date,'slug':self.slug}))
|
||||
|
||||
def __unicode__(self):
|
||||
return "%s: (%s)" % (self.date, self.title)
|
||||
|
||||
def get_next_by_id(self):
|
||||
LogbookEntry.objects.get(id=self.id+1)
|
||||
|
||||
def get_previous_by_id(self):
|
||||
LogbookEntry.objects.get(id=self.id-1)
|
||||
|
||||
def new_QM_number(self):
|
||||
"""Returns """
|
||||
if self.cave:
|
||||
nextQMnumber=self.cave.new_QM_number(self.date.year)
|
||||
else:
|
||||
return none
|
||||
return nextQMnumber
|
||||
|
||||
def new_QM_found_link(self):
|
||||
"""Produces a link to a new QM with the next number filled in and this LogbookEntry set as 'found by' """
|
||||
return settings.URL_ROOT + r'/admin/core/qm/add/?' + r'found_by=' + str(self.pk) +'&number=' + str(self.new_QM_number())
|
||||
|
||||
def DayIndex(self):
|
||||
return list(self.expeditionday.logbookentry_set.all()).index(self)
|
||||
|
||||
#
|
||||
# Single Person going on a trip, which may or may not be written up (accounts for different T/U for people in same logbook entry)
|
||||
#
|
||||
class PersonTrip(TroggleModel):
|
||||
personexpedition = models.ForeignKey("PersonExpedition",null=True)
|
||||
|
||||
#expeditionday = models.ForeignKey("ExpeditionDay")#MJG wants to KILL THIS (redundant information)
|
||||
#date = models.DateField() #MJG wants to KILL THIS (redundant information)
|
||||
time_underground = models.FloatField(help_text="In decimal hours")
|
||||
logbook_entry = models.ForeignKey(LogbookEntry)
|
||||
is_logbook_entry_author = models.BooleanField(default=False)
|
||||
|
||||
|
||||
# sequencing by person (difficult to solve locally)
|
||||
#persontrip_next = models.ForeignKey('PersonTrip', related_name='pnext', blank=True,null=True)#MJG wants to KILL THIS (and use funstion persontrip_next_auto)
|
||||
#persontrip_prev = models.ForeignKey('PersonTrip', related_name='pprev', blank=True,null=True)#MJG wants to KILL THIS(and use funstion persontrip_prev_auto)
|
||||
|
||||
def persontrip_next(self):
|
||||
futurePTs = PersonTrip.objects.filter(personexpedition = self.personexpedition, logbook_entry__date__gt = self.logbook_entry.date).order_by('logbook_entry__date').all()
|
||||
if len(futurePTs) > 0:
|
||||
return futurePTs[0]
|
||||
else:
|
||||
return None
|
||||
|
||||
def persontrip_prev(self):
|
||||
pastPTs = PersonTrip.objects.filter(personexpedition = self.personexpedition, logbook_entry__date__lt = self.logbook_entry.date).order_by('-logbook_entry__date').all()
|
||||
if len(pastPTs) > 0:
|
||||
return pastPTs[0]
|
||||
else:
|
||||
return None
|
||||
|
||||
def place(self):
|
||||
return self.logbook_entry.cave and self.logbook_entry.cave or self.logbook_entry.place
|
||||
|
||||
def __unicode__(self):
|
||||
return "%s (%s)" % (self.personexpedition, self.logbook_entry.date)
|
||||
|
||||
|
||||
|
||||
##########################################
|
||||
# move following classes into models_cave
|
||||
##########################################
|
||||
|
||||
class Area(TroggleModel):
|
||||
short_name = models.CharField(max_length=100)
|
||||
name = models.CharField(max_length=200, blank=True, null=True)
|
||||
description = models.TextField(blank=True,null=True)
|
||||
parent = models.ForeignKey('Area', blank=True, null=True)
|
||||
def __unicode__(self):
|
||||
if self.parent:
|
||||
return unicode(self.parent) + u" - " + unicode(self.short_name)
|
||||
else:
|
||||
return unicode(self.short_name)
|
||||
def kat_area(self):
|
||||
if self.short_name in ["1623", "1626"]:
|
||||
return self.short_name
|
||||
elif self.parent:
|
||||
return self.parent.kat_area()
|
||||
|
||||
class CaveAndEntrance(models.Model):
|
||||
cave = models.ForeignKey('Cave')
|
||||
entrance = models.ForeignKey('Entrance')
|
||||
entrance_letter = models.CharField(max_length=20,blank=True,null=True)
|
||||
def __unicode__(self):
|
||||
return unicode(self.cave) + unicode(self.entrance_letter)
|
||||
|
||||
class CaveSlug(models.Model):
|
||||
cave = models.ForeignKey('Cave')
|
||||
slug = models.SlugField(max_length=50, unique = True)
|
||||
primary = models.BooleanField(default=False)
|
||||
|
||||
|
||||
class Cave(TroggleModel):
|
||||
# too much here perhaps,
|
||||
official_name = models.CharField(max_length=160)
|
||||
area = models.ManyToManyField(Area, blank=True, null=True)
|
||||
kataster_code = models.CharField(max_length=20,blank=True,null=True)
|
||||
kataster_number = models.CharField(max_length=10,blank=True, null=True)
|
||||
unofficial_number = models.CharField(max_length=60,blank=True, null=True)
|
||||
entrances = models.ManyToManyField('Entrance', through='CaveAndEntrance')
|
||||
explorers = models.TextField(blank=True,null=True)
|
||||
underground_description = models.TextField(blank=True,null=True)
|
||||
equipment = models.TextField(blank=True,null=True)
|
||||
references = models.TextField(blank=True,null=True)
|
||||
survey = models.TextField(blank=True,null=True)
|
||||
kataster_status = models.TextField(blank=True,null=True)
|
||||
underground_centre_line = models.TextField(blank=True,null=True)
|
||||
notes = models.TextField(blank=True,null=True)
|
||||
length = models.CharField(max_length=100,blank=True,null=True)
|
||||
depth = models.CharField(max_length=100,blank=True,null=True)
|
||||
extent = models.CharField(max_length=100,blank=True,null=True)
|
||||
survex_file = models.CharField(max_length=100,blank=True,null=True)
|
||||
description_file = models.CharField(max_length=200,blank=True,null=True)
|
||||
url = models.CharField(max_length=200,blank=True,null=True)
|
||||
filename = models.CharField(max_length=200)
|
||||
|
||||
|
||||
#class Meta:
|
||||
# unique_together = (("area", "kataster_number"), ("area", "unofficial_number"))
|
||||
# FIXME Kataster Areas and CUCC defined sub areas need seperating
|
||||
|
||||
|
||||
#href = models.CharField(max_length=100)
|
||||
|
||||
class Meta:
|
||||
ordering = ('kataster_code', 'unofficial_number')
|
||||
|
||||
def hassurvey(self):
|
||||
if not self.underground_centre_line:
|
||||
return "No"
|
||||
if (self.survey.find("<img") > -1 or self.survey.find("<a") > -1 or self.survey.find("<IMG") > -1 or self.survey.find("<A") > -1):
|
||||
return "Yes"
|
||||
return "Missing"
|
||||
|
||||
def hassurveydata(self):
|
||||
if not self.underground_centre_line:
|
||||
return "No"
|
||||
if self.survex_file:
|
||||
return "Yes"
|
||||
return "Missing"
|
||||
|
||||
def slug(self):
|
||||
primarySlugs = self.caveslug_set.filter(primary = True)
|
||||
if primarySlugs:
|
||||
return primarySlugs[0].slug
|
||||
else:
|
||||
slugs = self.caveslug_set.filter()
|
||||
if slugs:
|
||||
return slugs[0].slug
|
||||
|
||||
def ours(self):
|
||||
return bool(re.search(r'CUCC', self.explorers))
|
||||
|
||||
def reference(self):
|
||||
if self.kataster_number:
|
||||
return "%s-%s" % (self.kat_area(), self.kataster_number)
|
||||
else:
|
||||
return "%s-%s" % (self.kat_area(), self.unofficial_number)
|
||||
|
||||
def get_absolute_url(self):
|
||||
if self.kataster_number:
|
||||
href = self.kataster_number
|
||||
elif self.unofficial_number:
|
||||
href = self.unofficial_number
|
||||
else:
|
||||
href = official_name.lower()
|
||||
#return settings.URL_ROOT + '/cave/' + href + '/'
|
||||
return urlparse.urljoin(settings.URL_ROOT, reverse('cave',kwargs={'cave_id':href,}))
|
||||
|
||||
def __unicode__(self, sep = u": "):
|
||||
return unicode(self.slug())
|
||||
|
||||
def get_QMs(self):
|
||||
return QM.objects.filter(found_by__cave_slug=self.caveslug_set.all())
|
||||
|
||||
def new_QM_number(self, year=datetime.date.today().year):
|
||||
"""Given a cave and the current year, returns the next QM number."""
|
||||
try:
|
||||
res=QM.objects.filter(found_by__date__year=year, found_by__cave=self).order_by('-number')[0]
|
||||
except IndexError:
|
||||
return 1
|
||||
return res.number+1
|
||||
|
||||
def kat_area(self):
|
||||
for a in self.area.all():
|
||||
if a.kat_area():
|
||||
return a.kat_area()
|
||||
|
||||
def entrances(self):
|
||||
return CaveAndEntrance.objects.filter(cave=self)
|
||||
|
||||
def singleentrance(self):
|
||||
return len(CaveAndEntrance.objects.filter(cave=self)) == 1
|
||||
|
||||
def entrancelist(self):
|
||||
rs = []
|
||||
res = ""
|
||||
for e in CaveAndEntrance.objects.filter(cave=self):
|
||||
rs.append(e.entrance_letter)
|
||||
rs.sort()
|
||||
prevR = None
|
||||
n = 0
|
||||
for r in rs:
|
||||
if prevR:
|
||||
if chr(ord(prevR) + 1 ) == r:
|
||||
prevR = r
|
||||
n += 1
|
||||
else:
|
||||
if n == 0:
|
||||
res += ", " + prevR
|
||||
else:
|
||||
res += "–" + prevR
|
||||
else:
|
||||
prevR = r
|
||||
n = 0
|
||||
res += r
|
||||
if n == 0:
|
||||
res += ", " + prevR
|
||||
else:
|
||||
res += "–" + prevR
|
||||
return res
|
||||
|
||||
def writeDataFile(self):
|
||||
try:
|
||||
f = open(os.path.join(settings.CAVEDESCRIPTIONS, self.filename), "w")
|
||||
except:
|
||||
subprocess.call(settings.FIX_PERMISSIONS)
|
||||
f = open(os.path.join(settings.CAVEDESCRIPTIONS, self.filename), "w")
|
||||
t = loader.get_template('dataformat/cave.xml')
|
||||
c = Context({'cave': self})
|
||||
u = t.render(c)
|
||||
u8 = u.encode("utf-8")
|
||||
f.write(u8)
|
||||
f.close()
|
||||
|
||||
def getArea(self):
|
||||
areas = self.area.all()
|
||||
lowestareas = list(areas)
|
||||
for area in areas:
|
||||
if area.parent in areas:
|
||||
try:
|
||||
lowestareas.remove(area.parent)
|
||||
except:
|
||||
pass
|
||||
return lowestareas[0]
|
||||
|
||||
def getCaveByReference(reference):
|
||||
areaname, code = reference.split("-", 1)
|
||||
print(areaname, code)
|
||||
area = Area.objects.get(short_name = areaname)
|
||||
print(area)
|
||||
foundCaves = list(Cave.objects.filter(area = area, kataster_number = code).all()) + list(Cave.objects.filter(area = area, unofficial_number = code).all())
|
||||
print(list(foundCaves))
|
||||
assert len(foundCaves) == 1
|
||||
return foundCaves[0]
|
||||
|
||||
class OtherCaveName(TroggleModel):
|
||||
name = models.CharField(max_length=160)
|
||||
cave = models.ForeignKey(Cave)
|
||||
def __unicode__(self):
|
||||
return unicode(self.name)
|
||||
|
||||
class EntranceSlug(models.Model):
|
||||
entrance = models.ForeignKey('Entrance')
|
||||
slug = models.SlugField(max_length=50, unique = True)
|
||||
primary = models.BooleanField(default=False)
|
||||
|
||||
class Entrance(TroggleModel):
|
||||
name = models.CharField(max_length=100, blank=True,null=True)
|
||||
entrance_description = models.TextField(blank=True,null=True)
|
||||
explorers = models.TextField(blank=True,null=True)
|
||||
map_description = models.TextField(blank=True,null=True)
|
||||
location_description = models.TextField(blank=True,null=True)
|
||||
approach = models.TextField(blank=True,null=True)
|
||||
underground_description = models.TextField(blank=True,null=True)
|
||||
photo = models.TextField(blank=True,null=True)
|
||||
MARKING_CHOICES = (
|
||||
('P', 'Paint'),
|
||||
('P?', 'Paint (?)'),
|
||||
('T', 'Tag'),
|
||||
('T?', 'Tag (?)'),
|
||||
('R', 'Needs Retag'),
|
||||
('S', 'Spit'),
|
||||
('S?', 'Spit (?)'),
|
||||
('U', 'Unmarked'),
|
||||
('?', 'Unknown'))
|
||||
marking = models.CharField(max_length=2, choices=MARKING_CHOICES)
|
||||
marking_comment = models.TextField(blank=True,null=True)
|
||||
FINDABLE_CHOICES = (
|
||||
('?', 'To be confirmed ...'),
|
||||
('S', 'Coordinates'),
|
||||
('L', 'Lost'),
|
||||
('R', 'Refindable'))
|
||||
findability = models.CharField(max_length=1, choices=FINDABLE_CHOICES, blank=True, null=True)
|
||||
findability_description = models.TextField(blank=True,null=True)
|
||||
alt = models.TextField(blank=True, null=True)
|
||||
northing = models.TextField(blank=True, null=True)
|
||||
easting = models.TextField(blank=True, null=True)
|
||||
tag_station = models.TextField(blank=True, null=True)
|
||||
exact_station = models.TextField(blank=True, null=True)
|
||||
other_station = models.TextField(blank=True, null=True)
|
||||
other_description = models.TextField(blank=True,null=True)
|
||||
bearings = models.TextField(blank=True,null=True)
|
||||
url = models.CharField(max_length=200,blank=True,null=True)
|
||||
filename = models.CharField(max_length=200)
|
||||
cached_primary_slug = models.CharField(max_length=200,blank=True,null=True)
|
||||
|
||||
def __unicode__(self):
|
||||
return unicode(self.slug())
|
||||
|
||||
def exact_location(self):
|
||||
return SurvexStation.objects.lookup(self.exact_station)
|
||||
def other_location(self):
|
||||
return SurvexStation.objects.lookup(self.other_station)
|
||||
|
||||
|
||||
def find_location(self):
|
||||
r = {'': 'To be entered ',
|
||||
'?': 'To be confirmed:',
|
||||
'S': '',
|
||||
'L': 'Lost:',
|
||||
'R': 'Refindable:'}[self.findability]
|
||||
if self.tag_station:
|
||||
try:
|
||||
s = SurvexStation.objects.lookup(self.tag_station)
|
||||
return r + "%0.0fE %0.0fN %0.0fAlt" % (s.x, s.y, s.z)
|
||||
except:
|
||||
return r + "%s Tag Station not in dataset" % self.tag_station
|
||||
if self.exact_station:
|
||||
try:
|
||||
s = SurvexStation.objects.lookup(self.exact_station)
|
||||
return r + "%0.0fE %0.0fN %0.0fAlt" % (s.x, s.y, s.z)
|
||||
except:
|
||||
return r + "%s Exact Station not in dataset" % self.tag_station
|
||||
if self.other_station:
|
||||
try:
|
||||
s = SurvexStation.objects.lookup(self.other_station)
|
||||
return r + "%0.0fE %0.0fN %0.0fAlt %s" % (s.x, s.y, s.z, self.other_description)
|
||||
except:
|
||||
return r + "%s Other Station not in dataset" % self.tag_station
|
||||
if self.FINDABLE_CHOICES == "S":
|
||||
r += "ERROR, Entrance has been surveyed but has no survex point"
|
||||
if self.bearings:
|
||||
return r + self.bearings
|
||||
return r
|
||||
|
||||
def best_station(self):
|
||||
if self.tag_station:
|
||||
return self.tag_station
|
||||
if self.exact_station:
|
||||
return self.exact_station
|
||||
if self.other_station:
|
||||
return self.other_station
|
||||
|
||||
def has_photo(self):
|
||||
if self.photo:
|
||||
if (self.photo.find("<img") > -1 or self.photo.find("<a") > -1 or self.photo.find("<IMG") > -1 or self.photo.find("<A") > -1):
|
||||
return "Yes"
|
||||
else:
|
||||
return "Missing"
|
||||
else:
|
||||
return "No"
|
||||
|
||||
def marking_val(self):
|
||||
for m in self.MARKING_CHOICES:
|
||||
if m[0] == self.marking:
|
||||
return m[1]
|
||||
def findability_val(self):
|
||||
for f in self.FINDABLE_CHOICES:
|
||||
if f[0] == self.findability:
|
||||
return f[1]
|
||||
|
||||
def tag(self):
|
||||
return SurvexStation.objects.lookup(self.tag_station)
|
||||
|
||||
def needs_surface_work(self):
|
||||
return self.findability != "S" or not self.has_photo or self.marking != "T"
|
||||
|
||||
def get_absolute_url(self):
|
||||
|
||||
ancestor_titles='/'.join([subcave.title for subcave in self.get_ancestors()])
|
||||
if ancestor_titles:
|
||||
res = '/'.join((self.get_root().cave.get_absolute_url(), ancestor_titles, self.title))
|
||||
|
||||
else:
|
||||
res = '/'.join((self.get_root().cave.get_absolute_url(), self.title))
|
||||
|
||||
return res
|
||||
|
||||
def slug(self):
|
||||
if not self.cached_primary_slug:
|
||||
primarySlugs = self.entranceslug_set.filter(primary = True)
|
||||
if primarySlugs:
|
||||
self.cached_primary_slug = primarySlugs[0].slug
|
||||
self.save()
|
||||
else:
|
||||
slugs = self.entranceslug_set.filter()
|
||||
if slugs:
|
||||
self.cached_primary_slug = slugs[0].slug
|
||||
self.save()
|
||||
return self.cached_primary_slug
|
||||
|
||||
def writeDataFile(self):
|
||||
try:
|
||||
f = open(os.path.join(settings.ENTRANCEDESCRIPTIONS, self.filename), "w")
|
||||
except:
|
||||
subprocess.call(settings.FIX_PERMISSIONS)
|
||||
f = open(os.path.join(settings.ENTRANCEDESCRIPTIONS, self.filename), "w")
|
||||
t = loader.get_template('dataformat/entrance.xml')
|
||||
c = Context({'entrance': self})
|
||||
u = t.render(c)
|
||||
u8 = u.encode("utf-8")
|
||||
f.write(u8)
|
||||
f.close()
|
||||
|
||||
class CaveDescription(TroggleModel):
|
||||
short_name = models.CharField(max_length=50, unique = True)
|
||||
long_name = models.CharField(max_length=200, blank=True, null=True)
|
||||
description = models.TextField(blank=True,null=True)
|
||||
linked_subcaves = models.ManyToManyField("NewSubCave", blank=True,null=True)
|
||||
linked_entrances = models.ManyToManyField("Entrance", blank=True,null=True)
|
||||
linked_qms = models.ManyToManyField("QM", blank=True,null=True)
|
||||
|
||||
def __unicode__(self):
|
||||
if self.long_name:
|
||||
return unicode(self.long_name)
|
||||
else:
|
||||
return unicode(self.short_name)
|
||||
|
||||
def get_absolute_url(self):
|
||||
return urlparse.urljoin(settings.URL_ROOT, reverse('cavedescription', args=(self.short_name,)))
|
||||
|
||||
def save(self):
|
||||
"""
|
||||
Overridden save method which stores wikilinks in text as links in database.
|
||||
"""
|
||||
super(CaveDescription, self).save()
|
||||
qm_list=get_related_by_wikilinks(self.description)
|
||||
for qm in qm_list:
|
||||
self.linked_qms.add(qm)
|
||||
super(CaveDescription, self).save()
|
||||
|
||||
class NewSubCave(TroggleModel):
|
||||
name = models.CharField(max_length=200, unique = True)
|
||||
def __unicode__(self):
|
||||
return unicode(self.name)
|
||||
|
||||
class QM(TroggleModel):
|
||||
#based on qm.csv in trunk/expoweb/1623/204 which has the fields:
|
||||
#"Number","Grade","Area","Description","Page reference","Nearest station","Completion description","Comment"
|
||||
found_by = models.ForeignKey(LogbookEntry, related_name='QMs_found',blank=True, null=True )
|
||||
ticked_off_by = models.ForeignKey(LogbookEntry, related_name='QMs_ticked_off',null=True,blank=True)
|
||||
#cave = models.ForeignKey(Cave)
|
||||
#expedition = models.ForeignKey(Expedition)
|
||||
|
||||
number = models.IntegerField(help_text="this is the sequential number in the year", )
|
||||
GRADE_CHOICES=(
|
||||
('A', 'A: Large obvious lead'),
|
||||
('B', 'B: Average lead'),
|
||||
('C', 'C: Tight unpromising lead'),
|
||||
('D', 'D: Dig'),
|
||||
('X', 'X: Unclimbable aven')
|
||||
)
|
||||
grade = models.CharField(max_length=1, choices=GRADE_CHOICES)
|
||||
location_description = models.TextField(blank=True)
|
||||
#should be a foreignkey to surveystation
|
||||
nearest_station_description = models.CharField(max_length=400,null=True,blank=True)
|
||||
nearest_station = models.CharField(max_length=200,blank=True,null=True)
|
||||
area = models.CharField(max_length=100,blank=True,null=True)
|
||||
completion_description = models.TextField(blank=True,null=True)
|
||||
comment=models.TextField(blank=True,null=True)
|
||||
|
||||
def __unicode__(self):
|
||||
return u"%s %s" % (self.code(), self.grade)
|
||||
|
||||
def code(self):
|
||||
return u"%s-%s-%s" % (unicode(self.found_by.cave)[6:], self.found_by.date.year, self.number)
|
||||
|
||||
def get_absolute_url(self):
|
||||
#return settings.URL_ROOT + '/cave/' + self.found_by.cave.kataster_number + '/' + str(self.found_by.date.year) + '-' + '%02d' %self.number
|
||||
return urlparse.urljoin(settings.URL_ROOT, reverse('qm',kwargs={'cave_id':self.found_by.cave.kataster_number,'year':self.found_by.date.year,'qm_id':self.number,'grade':self.grade}))
|
||||
|
||||
def get_next_by_id(self):
|
||||
return QM.objects.get(id=self.id+1)
|
||||
|
||||
def get_previous_by_id(self):
|
||||
return QM.objects.get(id=self.id-1)
|
||||
|
||||
def wiki_link(self):
|
||||
return u"%s%s%s" % ('[[QM:',self.code(),']]')
|
||||
|
||||
photoFileStorage = FileSystemStorage(location=settings.PHOTOS_ROOT, base_url=settings.PHOTOS_URL)
|
||||
class DPhoto(TroggleImageModel):
|
||||
caption = models.CharField(max_length=1000,blank=True,null=True)
|
||||
contains_logbookentry = models.ForeignKey(LogbookEntry,blank=True,null=True)
|
||||
contains_person = models.ManyToManyField(Person,blank=True,null=True)
|
||||
file = models.ImageField(storage=photoFileStorage, upload_to='.',)
|
||||
is_mugshot = models.BooleanField(default=False)
|
||||
contains_cave = models.ForeignKey(Cave,blank=True,null=True)
|
||||
contains_entrance = models.ForeignKey(Entrance, related_name="photo_file",blank=True,null=True)
|
||||
#nearest_survey_point = models.ForeignKey(SurveyStation,blank=True,null=True)
|
||||
nearest_QM = models.ForeignKey(QM,blank=True,null=True)
|
||||
lon_utm = models.FloatField(blank=True,null=True)
|
||||
lat_utm = models.FloatField(blank=True,null=True)
|
||||
|
||||
class IKOptions:
|
||||
spec_module = 'core.imagekit_specs'
|
||||
cache_dir = 'thumbs'
|
||||
image_field = 'file'
|
||||
|
||||
#content_type = models.ForeignKey(ContentType)
|
||||
#object_id = models.PositiveIntegerField()
|
||||
#location = generic.GenericForeignKey('content_type', 'object_id')
|
||||
|
||||
def __unicode__(self):
|
||||
return self.caption
|
||||
|
||||
scansFileStorage = FileSystemStorage(location=settings.SURVEY_SCANS, base_url=settings.SURVEYS_URL)
|
||||
def get_scan_path(instance, filename):
|
||||
year=instance.survey.expedition.year
|
||||
#print("WN: ", type(instance.survey.wallet_number), instance.survey.wallet_number, instance.survey.wallet_letter)
|
||||
number=str(instance.survey.wallet_number)
|
||||
if str(instance.survey.wallet_letter) != "None":
|
||||
number=str(instance.survey.wallet_letter) + number #two strings formatting because convention is 2009#01 or 2009#X01
|
||||
return os.path.join('./',year,year+r'#'+number,str(instance.contents)+str(instance.number_in_wallet)+r'.jpg')
|
||||
|
||||
class ScannedImage(TroggleImageModel):
|
||||
file = models.ImageField(storage=scansFileStorage, upload_to=get_scan_path)
|
||||
scanned_by = models.ForeignKey(Person,blank=True, null=True)
|
||||
scanned_on = models.DateField(null=True)
|
||||
survey = models.ForeignKey('Survey')
|
||||
contents = models.CharField(max_length=20,choices=(('notes','notes'),('plan','plan_sketch'),('elevation','elevation_sketch')))
|
||||
number_in_wallet = models.IntegerField(null=True)
|
||||
lon_utm = models.FloatField(blank=True,null=True)
|
||||
lat_utm = models.FloatField(blank=True,null=True)
|
||||
|
||||
class IKOptions:
|
||||
spec_module = 'core.imagekit_specs'
|
||||
cache_dir = 'thumbs'
|
||||
image_field = 'file'
|
||||
#content_type = models.ForeignKey(ContentType)
|
||||
#object_id = models.PositiveIntegerField()
|
||||
#location = generic.GenericForeignKey('content_type', 'object_id')
|
||||
|
||||
#This is an ugly hack to deal with the #s in our survey scan paths. The correct thing is to write a custom file storage backend which calls urlencode on the name for making file.url but not file.path.
|
||||
def correctURL(self):
|
||||
return string.replace(self.file.url,r'#',r'%23')
|
||||
|
||||
def __unicode__(self):
|
||||
return get_scan_path(self,'')
|
||||
|
||||
class Survey(TroggleModel):
|
||||
expedition = models.ForeignKey('Expedition') #REDUNDANT (logbook_entry)
|
||||
wallet_number = models.IntegerField(blank=True,null=True)
|
||||
wallet_letter = models.CharField(max_length=1,blank=True,null=True)
|
||||
comments = models.TextField(blank=True,null=True)
|
||||
location = models.CharField(max_length=400,blank=True,null=True) #REDUNDANT
|
||||
subcave = models.ForeignKey('NewSubCave', blank=True, null=True)
|
||||
#notes_scan = models.ForeignKey('ScannedImage',related_name='notes_scan',blank=True, null=True) #Replaced by contents field of ScannedImage model
|
||||
survex_block = models.OneToOneField('SurvexBlock',blank=True, null=True)
|
||||
logbook_entry = models.ForeignKey('LogbookEntry')
|
||||
centreline_printed_on = models.DateField(blank=True, null=True)
|
||||
centreline_printed_by = models.ForeignKey('Person',related_name='centreline_printed_by',blank=True,null=True)
|
||||
#sketch_scan = models.ForeignKey(ScannedImage,blank=True, null=True) #Replaced by contents field of ScannedImage model
|
||||
tunnel_file = models.FileField(upload_to='surveyXMLfiles',blank=True, null=True)
|
||||
tunnel_main_sketch = models.ForeignKey('Survey',blank=True,null=True)
|
||||
integrated_into_main_sketch_on = models.DateField(blank=True,null=True)
|
||||
integrated_into_main_sketch_by = models.ForeignKey('Person' ,related_name='integrated_into_main_sketch_by', blank=True,null=True)
|
||||
rendered_image = models.ImageField(upload_to='renderedSurveys',blank=True,null=True)
|
||||
def __unicode__(self):
|
||||
return self.expedition.year+"#"+"%02d" % int(self.wallet_number)
|
||||
|
||||
def notes(self):
|
||||
return self.scannedimage_set.filter(contents='notes')
|
||||
|
||||
def plans(self):
|
||||
return self.scannedimage_set.filter(contents='plan')
|
||||
|
||||
def elevations(self):
|
||||
return self.scannedimage_set.filter(contents='elevation')
|
||||
|
||||
@@ -1,864 +0,0 @@
|
||||
import urllib, urlparse, string, os, datetime, logging, re
|
||||
import subprocess
|
||||
from django.forms import ModelForm
|
||||
from django.db import models
|
||||
from django.contrib import admin
|
||||
from django.core.files.storage import FileSystemStorage
|
||||
from django.contrib.auth.models import User
|
||||
from django.contrib.contenttypes.models import ContentType
|
||||
from django.db.models import Min, Max
|
||||
from django.conf import settings
|
||||
from decimal import Decimal, getcontext
|
||||
from django.core.urlresolvers import reverse
|
||||
from imagekit.models import ImageModel
|
||||
from django.template import Context, loader
|
||||
import settings
|
||||
getcontext().prec=2 #use 2 significant figures for decimal calculations
|
||||
|
||||
from troggle.core.models_survex import *
|
||||
|
||||
from troggle.core.models_millenial import *
|
||||
|
||||
def get_related_by_wikilinks(wiki_text):
|
||||
found=re.findall(settings.QM_PATTERN,wiki_text)
|
||||
res=[]
|
||||
for wikilink in found:
|
||||
qmdict={'urlroot':settings.URL_ROOT,'cave':wikilink[2],'year':wikilink[1],'number':wikilink[3]}
|
||||
try:
|
||||
cave_slugs = CaveSlug.objects.filter(cave__kataster_number = qmdict['cave'])
|
||||
qm=QM.objects.get(found_by__cave_slug__in = cave_slugs,
|
||||
found_by__date__year = qmdict['year'],
|
||||
number = qmdict['number'])
|
||||
res.append(qm)
|
||||
except QM.DoesNotExist:
|
||||
print('fail on '+str(wikilink))
|
||||
|
||||
return res
|
||||
|
||||
try:
|
||||
logging.basicConfig(level=logging.DEBUG,
|
||||
filename=settings.LOGFILE,
|
||||
filemode='w')
|
||||
except:
|
||||
subprocess.call(settings.FIX_PERMISSIONS)
|
||||
logging.basicConfig(level=logging.DEBUG,
|
||||
filename=settings.LOGFILE,
|
||||
filemode='w')
|
||||
|
||||
#This class is for adding fields and methods which all of our models will have.
|
||||
class TroggleModel(models.Model):
|
||||
new_since_parsing = models.BooleanField(default=False, editable=False)
|
||||
non_public = models.BooleanField(default=False)
|
||||
def object_name(self):
|
||||
return self._meta.object_name
|
||||
|
||||
def get_admin_url(self):
|
||||
return urlparse.urljoin(settings.URL_ROOT, "/admin/core/" + self.object_name().lower() + "/" + str(self.pk))
|
||||
|
||||
class Meta:
|
||||
abstract = True
|
||||
|
||||
class TroggleImageModel(ImageModel):
|
||||
new_since_parsing = models.BooleanField(default=False, editable=False)
|
||||
|
||||
def object_name(self):
|
||||
return self._meta.object_name
|
||||
|
||||
def get_admin_url(self):
|
||||
return urlparse.urljoin(settings.URL_ROOT, "/admin/core/" + self.object_name().lower() + "/" + str(self.pk))
|
||||
|
||||
|
||||
class Meta:
|
||||
abstract = True
|
||||
|
||||
#
|
||||
# single Expedition, usually seen by year
|
||||
#
|
||||
class Expedition(TroggleModel):
|
||||
year = models.CharField(max_length=20, unique=True)
|
||||
name = models.CharField(max_length=100)
|
||||
|
||||
def __unicode__(self):
|
||||
return self.year
|
||||
|
||||
class Meta:
|
||||
ordering = ('-year',)
|
||||
get_latest_by = 'year'
|
||||
|
||||
def get_absolute_url(self):
|
||||
return urlparse.urljoin(settings.URL_ROOT, reverse('expedition', args=[self.year]))
|
||||
|
||||
# construction function. should be moved out
|
||||
def get_expedition_day(self, date):
|
||||
expeditiondays = self.expeditionday_set.filter(date=date)
|
||||
if expeditiondays:
|
||||
assert len(expeditiondays) == 1
|
||||
return expeditiondays[0]
|
||||
res = ExpeditionDay(expedition=self, date=date)
|
||||
res.save()
|
||||
return res
|
||||
|
||||
def day_min(self):
|
||||
res = self.expeditionday_set.all()
|
||||
return res and res[0] or None
|
||||
|
||||
def day_max(self):
|
||||
res = self.expeditionday_set.all()
|
||||
return res and res[len(res) - 1] or None
|
||||
|
||||
|
||||
|
||||
class ExpeditionDay(TroggleModel):
|
||||
expedition = models.ForeignKey("Expedition")
|
||||
date = models.DateField()
|
||||
|
||||
class Meta:
|
||||
ordering = ('date',)
|
||||
|
||||
def GetPersonTrip(self, personexpedition):
|
||||
personexpeditions = self.persontrip_set.filter(expeditionday=self)
|
||||
return personexpeditions and personexpeditions[0] or None
|
||||
|
||||
|
||||
#
|
||||
# single Person, can go on many years
|
||||
#
|
||||
class Person(TroggleModel):
|
||||
first_name = models.CharField(max_length=100)
|
||||
last_name = models.CharField(max_length=100)
|
||||
is_vfho = models.BooleanField(help_text="VFHO is the Vereines für Höhlenkunde in Obersteier, a nearby Austrian caving club.", default=False)
|
||||
mug_shot = models.CharField(max_length=100, blank=True,null=True)
|
||||
blurb = models.TextField(blank=True,null=True)
|
||||
|
||||
#href = models.CharField(max_length=200)
|
||||
orderref = models.CharField(max_length=200) # for alphabetic
|
||||
|
||||
#the below have been removed and made methods. I'm not sure what the b in bisnotable stands for. - AC 16 Feb
|
||||
#notability = models.FloatField() # for listing the top 20 people
|
||||
#bisnotable = models.BooleanField(default=False)
|
||||
user = models.OneToOneField(User, null=True, blank=True)
|
||||
def get_absolute_url(self):
|
||||
return urlparse.urljoin(settings.URL_ROOT,reverse('person',kwargs={'first_name':self.first_name,'last_name':self.last_name}))
|
||||
|
||||
class Meta:
|
||||
verbose_name_plural = "People"
|
||||
ordering = ('orderref',) # "Wookey" makes too complex for: ('last_name', 'first_name')
|
||||
|
||||
def __unicode__(self):
|
||||
if self.last_name:
|
||||
return "%s %s" % (self.first_name, self.last_name)
|
||||
return self.first_name
|
||||
|
||||
|
||||
def notability(self):
|
||||
notability = Decimal(0)
|
||||
for personexpedition in self.personexpedition_set.all():
|
||||
if not personexpedition.is_guest:
|
||||
notability += Decimal(1) / (2012 - int(personexpedition.expedition.year))
|
||||
return notability
|
||||
|
||||
def bisnotable(self):
|
||||
return self.notability() > Decimal(1)/Decimal(3)
|
||||
|
||||
def surveyedleglength(self):
|
||||
return sum([personexpedition.surveyedleglength() for personexpedition in self.personexpedition_set.all()])
|
||||
|
||||
def first(self):
|
||||
return self.personexpedition_set.order_by('-expedition')[0]
|
||||
def last(self):
|
||||
return self.personexpedition_set.order_by('expedition')[0]
|
||||
|
||||
#def Sethref(self):
|
||||
#if self.last_name:
|
||||
#self.href = self.first_name.lower() + "_" + self.last_name.lower()
|
||||
#self.orderref = self.last_name + " " + self.first_name
|
||||
#else:
|
||||
# self.href = self.first_name.lower()
|
||||
#self.orderref = self.first_name
|
||||
#self.notability = 0.0 # set temporarily
|
||||
|
||||
|
||||
#
|
||||
# Person's attenance to one Expo
|
||||
#
|
||||
class PersonExpedition(TroggleModel):
|
||||
expedition = models.ForeignKey(Expedition)
|
||||
person = models.ForeignKey(Person)
|
||||
slugfield = models.SlugField(max_length=50,blank=True,null=True)
|
||||
|
||||
is_guest = models.BooleanField(default=False)
|
||||
COMMITTEE_CHOICES = (
|
||||
('leader','Expo leader'),
|
||||
('medical','Expo medical officer'),
|
||||
('treasurer','Expo treasurer'),
|
||||
('sponsorship','Expo sponsorship coordinator'),
|
||||
('research','Expo research coordinator'),
|
||||
)
|
||||
expo_committee_position = models.CharField(blank=True,null=True,choices=COMMITTEE_CHOICES,max_length=200)
|
||||
nickname = models.CharField(max_length=100,blank=True,null=True)
|
||||
|
||||
def GetPersonroles(self):
|
||||
res = [ ]
|
||||
for personrole in self.personrole_set.order_by('survexblock'):
|
||||
if res and res[-1]['survexpath'] == personrole.survexblock.survexpath:
|
||||
res[-1]['roles'] += ", " + str(personrole.role)
|
||||
else:
|
||||
res.append({'date':personrole.survexblock.date, 'survexpath':personrole.survexblock.survexpath, 'roles':str(personrole.role)})
|
||||
return res
|
||||
|
||||
class Meta:
|
||||
ordering = ('-expedition',)
|
||||
#order_with_respect_to = 'expedition'
|
||||
|
||||
def __unicode__(self):
|
||||
return "%s: (%s)" % (self.person, self.expedition)
|
||||
|
||||
|
||||
#why is the below a function in personexpedition, rather than in person? - AC 14 Feb 09
|
||||
def name(self):
|
||||
if self.nickname:
|
||||
return "%s (%s) %s" % (self.person.first_name, self.nickname, self.person.last_name)
|
||||
if self.person.last_name:
|
||||
return "%s %s" % (self.person.first_name, self.person.last_name)
|
||||
return self.person.first_name
|
||||
|
||||
def get_absolute_url(self):
|
||||
return urlparse.urljoin(settings.URL_ROOT, reverse('personexpedition',kwargs={'first_name':self.person.first_name,'last_name':self.person.last_name,'year':self.expedition.year}))
|
||||
|
||||
def surveyedleglength(self):
|
||||
survexblocks = [personrole.survexblock for personrole in self.personrole_set.all() ]
|
||||
return sum([survexblock.totalleglength for survexblock in set(survexblocks)])
|
||||
|
||||
# would prefer to return actual person trips so we could link to first and last ones
|
||||
def day_min(self):
|
||||
res = self.persontrip_set.aggregate(day_min=Min("expeditionday__date"))
|
||||
return res["day_min"]
|
||||
|
||||
def day_max(self):
|
||||
res = self.persontrip_set.all().aggregate(day_max=Max("expeditionday__date"))
|
||||
return res["day_max"]
|
||||
|
||||
#
|
||||
# Single parsed entry from Logbook
|
||||
#
|
||||
class LogbookEntry(TroggleModel):
|
||||
date = models.DateField()#MJG wants to turn this into a datetime such that multiple Logbook entries on the same day can be ordered.
|
||||
expeditionday = models.ForeignKey("ExpeditionDay", null=True)#MJG wants to KILL THIS (redundant information)
|
||||
expedition = models.ForeignKey(Expedition,blank=True,null=True) # yes this is double-
|
||||
#author = models.ForeignKey(PersonExpedition,blank=True,null=True) # the person who writes it up doesn't have to have been on the trip.
|
||||
# Re: the above- so this field should be "typist" or something, not "author". - AC 15 jun 09
|
||||
#MJG wants to KILL THIS, as it is typically redundant with PersonTrip.is_logbook_entry_author, in the rare it was not redundanty and of actually interest it could be added to the text.
|
||||
title = models.CharField(max_length=settings.MAX_LOGBOOK_ENTRY_TITLE_LENGTH)
|
||||
cave_slug = models.SlugField(max_length=50)
|
||||
place = models.CharField(max_length=100,blank=True,null=True,help_text="Only use this if you haven't chosen a cave")
|
||||
text = models.TextField()
|
||||
slug = models.SlugField(max_length=50)
|
||||
filename = models.CharField(max_length=200,null=True)
|
||||
|
||||
class Meta:
|
||||
verbose_name_plural = "Logbook Entries"
|
||||
# several PersonTrips point in to this object
|
||||
ordering = ('-date',)
|
||||
|
||||
def __getattribute__(self, item):
|
||||
if item == "cave": #Allow a logbookentries cave to be directly accessed despite not having a proper foreignkey
|
||||
return CaveSlug.objects.get(slug = self.cave_slug).cave
|
||||
return super(LogbookEntry, self).__getattribute__(item)
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
if "cave" in kwargs.keys():
|
||||
if kwargs["cave"] is not None:
|
||||
kwargs["cave_slug"] = CaveSlug.objects.get(cave=kwargs["cave"], primary=True).slug
|
||||
kwargs.pop("cave")
|
||||
return super(LogbookEntry, self).__init__(*args, **kwargs)
|
||||
|
||||
def isLogbookEntry(self): # Function used in templates
|
||||
return True
|
||||
|
||||
def get_absolute_url(self):
|
||||
return urlparse.urljoin(settings.URL_ROOT, reverse('logbookentry',kwargs={'date':self.date,'slug':self.slug}))
|
||||
|
||||
def __unicode__(self):
|
||||
return "%s: (%s)" % (self.date, self.title)
|
||||
|
||||
def get_next_by_id(self):
|
||||
LogbookEntry.objects.get(id=self.id+1)
|
||||
|
||||
def get_previous_by_id(self):
|
||||
LogbookEntry.objects.get(id=self.id-1)
|
||||
|
||||
def new_QM_number(self):
|
||||
"""Returns """
|
||||
if self.cave:
|
||||
nextQMnumber=self.cave.new_QM_number(self.date.year)
|
||||
else:
|
||||
return none
|
||||
return nextQMnumber
|
||||
|
||||
def new_QM_found_link(self):
|
||||
"""Produces a link to a new QM with the next number filled in and this LogbookEntry set as 'found by' """
|
||||
return settings.URL_ROOT + r'/admin/core/qm/add/?' + r'found_by=' + str(self.pk) +'&number=' + str(self.new_QM_number())
|
||||
|
||||
def DayIndex(self):
|
||||
return list(self.expeditionday.logbookentry_set.all()).index(self)
|
||||
|
||||
#
|
||||
# Single Person going on a trip, which may or may not be written up (accounts for different T/U for people in same logbook entry)
|
||||
#
|
||||
class PersonTrip(TroggleModel):
|
||||
personexpedition = models.ForeignKey("PersonExpedition",null=True)
|
||||
|
||||
#expeditionday = models.ForeignKey("ExpeditionDay")#MJG wants to KILL THIS (redundant information)
|
||||
#date = models.DateField() #MJG wants to KILL THIS (redundant information)
|
||||
time_underground = models.FloatField(help_text="In decimal hours")
|
||||
logbook_entry = models.ForeignKey(LogbookEntry)
|
||||
is_logbook_entry_author = models.BooleanField(default=False)
|
||||
|
||||
|
||||
# sequencing by person (difficult to solve locally)
|
||||
#persontrip_next = models.ForeignKey('PersonTrip', related_name='pnext', blank=True,null=True)#MJG wants to KILL THIS (and use funstion persontrip_next_auto)
|
||||
#persontrip_prev = models.ForeignKey('PersonTrip', related_name='pprev', blank=True,null=True)#MJG wants to KILL THIS(and use funstion persontrip_prev_auto)
|
||||
|
||||
def persontrip_next(self):
|
||||
futurePTs = PersonTrip.objects.filter(personexpedition = self.personexpedition, logbook_entry__date__gt = self.logbook_entry.date).order_by('logbook_entry__date').all()
|
||||
if len(futurePTs) > 0:
|
||||
return futurePTs[0]
|
||||
else:
|
||||
return None
|
||||
|
||||
def persontrip_prev(self):
|
||||
pastPTs = PersonTrip.objects.filter(personexpedition = self.personexpedition, logbook_entry__date__lt = self.logbook_entry.date).order_by('-logbook_entry__date').all()
|
||||
if len(pastPTs) > 0:
|
||||
return pastPTs[0]
|
||||
else:
|
||||
return None
|
||||
|
||||
def place(self):
|
||||
return self.logbook_entry.cave and self.logbook_entry.cave or self.logbook_entry.place
|
||||
|
||||
def __unicode__(self):
|
||||
return "%s (%s)" % (self.personexpedition, self.logbook_entry.date)
|
||||
|
||||
|
||||
|
||||
##########################################
|
||||
# move following classes into models_cave
|
||||
##########################################
|
||||
|
||||
class Area(TroggleModel):
|
||||
short_name = models.CharField(max_length=100)
|
||||
name = models.CharField(max_length=200, blank=True, null=True)
|
||||
description = models.TextField(blank=True,null=True)
|
||||
parent = models.ForeignKey('Area', blank=True, null=True)
|
||||
def __unicode__(self):
|
||||
if self.parent:
|
||||
return unicode(self.parent) + u" - " + unicode(self.short_name)
|
||||
else:
|
||||
return unicode(self.short_name)
|
||||
def kat_area(self):
|
||||
if self.short_name in ["1623", "1626"]:
|
||||
return self.short_name
|
||||
elif self.parent:
|
||||
return self.parent.kat_area()
|
||||
|
||||
class CaveAndEntrance(models.Model):
|
||||
cave = models.ForeignKey('Cave')
|
||||
entrance = models.ForeignKey('Entrance')
|
||||
entrance_letter = models.CharField(max_length=20,blank=True,null=True)
|
||||
def __unicode__(self):
|
||||
return unicode(self.cave) + unicode(self.entrance_letter)
|
||||
|
||||
class CaveSlug(models.Model):
|
||||
cave = models.ForeignKey('Cave')
|
||||
slug = models.SlugField(max_length=50, unique = True)
|
||||
primary = models.BooleanField(default=False)
|
||||
|
||||
|
||||
class Cave(TroggleModel):
|
||||
# too much here perhaps,
|
||||
official_name = models.CharField(max_length=160)
|
||||
area = models.ManyToManyField(Area, blank=True, null=True)
|
||||
kataster_code = models.CharField(max_length=20,blank=True,null=True)
|
||||
kataster_number = models.CharField(max_length=10,blank=True, null=True)
|
||||
unofficial_number = models.CharField(max_length=60,blank=True, null=True)
|
||||
entrances = models.ManyToManyField('Entrance', through='CaveAndEntrance')
|
||||
explorers = models.TextField(blank=True,null=True)
|
||||
underground_description = models.TextField(blank=True,null=True)
|
||||
equipment = models.TextField(blank=True,null=True)
|
||||
references = models.TextField(blank=True,null=True)
|
||||
survey = models.TextField(blank=True,null=True)
|
||||
kataster_status = models.TextField(blank=True,null=True)
|
||||
underground_centre_line = models.TextField(blank=True,null=True)
|
||||
notes = models.TextField(blank=True,null=True)
|
||||
length = models.CharField(max_length=100,blank=True,null=True)
|
||||
depth = models.CharField(max_length=100,blank=True,null=True)
|
||||
extent = models.CharField(max_length=100,blank=True,null=True)
|
||||
survex_file = models.CharField(max_length=100,blank=True,null=True)
|
||||
description_file = models.CharField(max_length=200,blank=True,null=True)
|
||||
url = models.CharField(max_length=200,blank=True,null=True)
|
||||
filename = models.CharField(max_length=200)
|
||||
|
||||
|
||||
#class Meta:
|
||||
# unique_together = (("area", "kataster_number"), ("area", "unofficial_number"))
|
||||
# FIXME Kataster Areas and CUCC defined sub areas need seperating
|
||||
|
||||
|
||||
#href = models.CharField(max_length=100)
|
||||
|
||||
class Meta:
|
||||
ordering = ('kataster_code', 'unofficial_number')
|
||||
|
||||
def hassurvey(self):
|
||||
if not self.underground_centre_line:
|
||||
return "No"
|
||||
if (self.survey.find("<img") > -1 or self.survey.find("<a") > -1 or self.survey.find("<IMG") > -1 or self.survey.find("<A") > -1):
|
||||
return "Yes"
|
||||
return "Missing"
|
||||
|
||||
def hassurveydata(self):
|
||||
if not self.underground_centre_line:
|
||||
return "No"
|
||||
if self.survex_file:
|
||||
return "Yes"
|
||||
return "Missing"
|
||||
|
||||
def slug(self):
|
||||
primarySlugs = self.caveslug_set.filter(primary = True)
|
||||
if primarySlugs:
|
||||
return primarySlugs[0].slug
|
||||
else:
|
||||
slugs = self.caveslug_set.filter()
|
||||
if slugs:
|
||||
return slugs[0].slug
|
||||
|
||||
def ours(self):
|
||||
return bool(re.search(r'CUCC', self.explorers))
|
||||
|
||||
def reference(self):
|
||||
if self.kataster_number:
|
||||
return "%s-%s" % (self.kat_area(), self.kataster_number)
|
||||
else:
|
||||
return "%s-%s" % (self.kat_area(), self.unofficial_number)
|
||||
|
||||
def get_absolute_url(self):
|
||||
if self.kataster_number:
|
||||
href = self.kataster_number
|
||||
elif self.unofficial_number:
|
||||
href = self.unofficial_number
|
||||
else:
|
||||
href = official_name.lower()
|
||||
#return settings.URL_ROOT + '/cave/' + href + '/'
|
||||
return urlparse.urljoin(settings.URL_ROOT, reverse('cave',kwargs={'cave_id':href,}))
|
||||
|
||||
def __unicode__(self, sep = u": "):
|
||||
return unicode(self.slug())
|
||||
|
||||
def get_QMs(self):
|
||||
return QM.objects.filter(found_by__cave_slug=self.caveslug_set.all())
|
||||
|
||||
def new_QM_number(self, year=datetime.date.today().year):
|
||||
"""Given a cave and the current year, returns the next QM number."""
|
||||
try:
|
||||
res=QM.objects.filter(found_by__date__year=year, found_by__cave=self).order_by('-number')[0]
|
||||
except IndexError:
|
||||
return 1
|
||||
return res.number+1
|
||||
|
||||
def kat_area(self):
|
||||
for a in self.area.all():
|
||||
if a.kat_area():
|
||||
return a.kat_area()
|
||||
|
||||
def entrances(self):
|
||||
return CaveAndEntrance.objects.filter(cave=self)
|
||||
|
||||
def singleentrance(self):
|
||||
return len(CaveAndEntrance.objects.filter(cave=self)) == 1
|
||||
|
||||
def entrancelist(self):
|
||||
rs = []
|
||||
res = ""
|
||||
for e in CaveAndEntrance.objects.filter(cave=self):
|
||||
rs.append(e.entrance_letter)
|
||||
rs.sort()
|
||||
prevR = None
|
||||
n = 0
|
||||
for r in rs:
|
||||
if prevR:
|
||||
if chr(ord(prevR) + 1 ) == r:
|
||||
prevR = r
|
||||
n += 1
|
||||
else:
|
||||
if n == 0:
|
||||
res += ", " + prevR
|
||||
else:
|
||||
res += "–" + prevR
|
||||
else:
|
||||
prevR = r
|
||||
n = 0
|
||||
res += r
|
||||
if n == 0:
|
||||
res += ", " + prevR
|
||||
else:
|
||||
res += "–" + prevR
|
||||
return res
|
||||
|
||||
def writeDataFile(self):
|
||||
try:
|
||||
f = open(os.path.join(settings.CAVEDESCRIPTIONS, self.filename), "w")
|
||||
except:
|
||||
subprocess.call(settings.FIX_PERMISSIONS)
|
||||
f = open(os.path.join(settings.CAVEDESCRIPTIONS, self.filename), "w")
|
||||
t = loader.get_template('dataformat/cave.xml')
|
||||
c = Context({'cave': self})
|
||||
u = t.render(c)
|
||||
u8 = u.encode("utf-8")
|
||||
f.write(u8)
|
||||
f.close()
|
||||
|
||||
def getArea(self):
|
||||
areas = self.area.all()
|
||||
lowestareas = list(areas)
|
||||
for area in areas:
|
||||
if area.parent in areas:
|
||||
try:
|
||||
lowestareas.remove(area.parent)
|
||||
except:
|
||||
pass
|
||||
return lowestareas[0]
|
||||
|
||||
def getCaveByReference(reference):
|
||||
areaname, code = reference.split("-", 1)
|
||||
print(areaname, code)
|
||||
area = Area.objects.get(short_name = areaname)
|
||||
print(area)
|
||||
foundCaves = list(Cave.objects.filter(area = area, kataster_number = code).all()) + list(Cave.objects.filter(area = area, unofficial_number = code).all())
|
||||
print(list(foundCaves))
|
||||
assert len(foundCaves) == 1
|
||||
return foundCaves[0]
|
||||
|
||||
class OtherCaveName(TroggleModel):
|
||||
name = models.CharField(max_length=160)
|
||||
cave = models.ForeignKey(Cave)
|
||||
def __unicode__(self):
|
||||
return unicode(self.name)
|
||||
|
||||
class EntranceSlug(models.Model):
|
||||
entrance = models.ForeignKey('Entrance')
|
||||
slug = models.SlugField(max_length=50, unique = True)
|
||||
primary = models.BooleanField(default=False)
|
||||
|
||||
class Entrance(TroggleModel):
|
||||
name = models.CharField(max_length=100, blank=True,null=True)
|
||||
entrance_description = models.TextField(blank=True,null=True)
|
||||
explorers = models.TextField(blank=True,null=True)
|
||||
map_description = models.TextField(blank=True,null=True)
|
||||
location_description = models.TextField(blank=True,null=True)
|
||||
approach = models.TextField(blank=True,null=True)
|
||||
underground_description = models.TextField(blank=True,null=True)
|
||||
photo = models.TextField(blank=True,null=True)
|
||||
MARKING_CHOICES = (
|
||||
('P', 'Paint'),
|
||||
('P?', 'Paint (?)'),
|
||||
('T', 'Tag'),
|
||||
('T?', 'Tag (?)'),
|
||||
('R', 'Needs Retag'),
|
||||
('S', 'Spit'),
|
||||
('S?', 'Spit (?)'),
|
||||
('U', 'Unmarked'),
|
||||
('?', 'Unknown'))
|
||||
marking = models.CharField(max_length=2, choices=MARKING_CHOICES)
|
||||
marking_comment = models.TextField(blank=True,null=True)
|
||||
FINDABLE_CHOICES = (
|
||||
('?', 'To be confirmed ...'),
|
||||
('S', 'Coordinates'),
|
||||
('L', 'Lost'),
|
||||
('R', 'Refindable'))
|
||||
findability = models.CharField(max_length=1, choices=FINDABLE_CHOICES, blank=True, null=True)
|
||||
findability_description = models.TextField(blank=True,null=True)
|
||||
alt = models.TextField(blank=True, null=True)
|
||||
northing = models.TextField(blank=True, null=True)
|
||||
easting = models.TextField(blank=True, null=True)
|
||||
tag_station = models.TextField(blank=True, null=True)
|
||||
exact_station = models.TextField(blank=True, null=True)
|
||||
other_station = models.TextField(blank=True, null=True)
|
||||
other_description = models.TextField(blank=True,null=True)
|
||||
bearings = models.TextField(blank=True,null=True)
|
||||
url = models.CharField(max_length=200,blank=True,null=True)
|
||||
filename = models.CharField(max_length=200)
|
||||
cached_primary_slug = models.CharField(max_length=200,blank=True,null=True)
|
||||
|
||||
def __unicode__(self):
|
||||
return unicode(self.slug())
|
||||
|
||||
def exact_location(self):
|
||||
return SurvexStation.objects.lookup(self.exact_station)
|
||||
def other_location(self):
|
||||
return SurvexStation.objects.lookup(self.other_station)
|
||||
|
||||
|
||||
def find_location(self):
|
||||
r = {'': 'To be entered ',
|
||||
'?': 'To be confirmed:',
|
||||
'S': '',
|
||||
'L': 'Lost:',
|
||||
'R': 'Refindable:'}[self.findability]
|
||||
if self.tag_station:
|
||||
try:
|
||||
s = SurvexStation.objects.lookup(self.tag_station)
|
||||
return r + "%0.0fE %0.0fN %0.0fAlt" % (s.x, s.y, s.z)
|
||||
except:
|
||||
return r + "%s Tag Station not in dataset" % self.tag_station
|
||||
if self.exact_station:
|
||||
try:
|
||||
s = SurvexStation.objects.lookup(self.exact_station)
|
||||
return r + "%0.0fE %0.0fN %0.0fAlt" % (s.x, s.y, s.z)
|
||||
except:
|
||||
return r + "%s Exact Station not in dataset" % self.tag_station
|
||||
if self.other_station:
|
||||
try:
|
||||
s = SurvexStation.objects.lookup(self.other_station)
|
||||
return r + "%0.0fE %0.0fN %0.0fAlt %s" % (s.x, s.y, s.z, self.other_description)
|
||||
except:
|
||||
return r + "%s Other Station not in dataset" % self.tag_station
|
||||
if self.FINDABLE_CHOICES == "S":
|
||||
r += "ERROR, Entrance has been surveyed but has no survex point"
|
||||
if self.bearings:
|
||||
return r + self.bearings
|
||||
return r
|
||||
|
||||
def best_station(self):
|
||||
if self.tag_station:
|
||||
return self.tag_station
|
||||
if self.exact_station:
|
||||
return self.exact_station
|
||||
if self.other_station:
|
||||
return self.other_station
|
||||
|
||||
def has_photo(self):
|
||||
if self.photo:
|
||||
if (self.photo.find("<img") > -1 or self.photo.find("<a") > -1 or self.photo.find("<IMG") > -1 or self.photo.find("<A") > -1):
|
||||
return "Yes"
|
||||
else:
|
||||
return "Missing"
|
||||
else:
|
||||
return "No"
|
||||
|
||||
def marking_val(self):
|
||||
for m in self.MARKING_CHOICES:
|
||||
if m[0] == self.marking:
|
||||
return m[1]
|
||||
def findability_val(self):
|
||||
for f in self.FINDABLE_CHOICES:
|
||||
if f[0] == self.findability:
|
||||
return f[1]
|
||||
|
||||
def tag(self):
|
||||
return SurvexStation.objects.lookup(self.tag_station)
|
||||
|
||||
def needs_surface_work(self):
|
||||
return self.findability != "S" or not self.has_photo or self.marking != "T"
|
||||
|
||||
def get_absolute_url(self):
|
||||
|
||||
ancestor_titles='/'.join([subcave.title for subcave in self.get_ancestors()])
|
||||
if ancestor_titles:
|
||||
res = '/'.join((self.get_root().cave.get_absolute_url(), ancestor_titles, self.title))
|
||||
|
||||
else:
|
||||
res = '/'.join((self.get_root().cave.get_absolute_url(), self.title))
|
||||
|
||||
return res
|
||||
|
||||
def slug(self):
|
||||
if not self.cached_primary_slug:
|
||||
primarySlugs = self.entranceslug_set.filter(primary = True)
|
||||
if primarySlugs:
|
||||
self.cached_primary_slug = primarySlugs[0].slug
|
||||
self.save()
|
||||
else:
|
||||
slugs = self.entranceslug_set.filter()
|
||||
if slugs:
|
||||
self.cached_primary_slug = slugs[0].slug
|
||||
self.save()
|
||||
return self.cached_primary_slug
|
||||
|
||||
def writeDataFile(self):
|
||||
try:
|
||||
f = open(os.path.join(settings.ENTRANCEDESCRIPTIONS, self.filename), "w")
|
||||
except:
|
||||
subprocess.call(settings.FIX_PERMISSIONS)
|
||||
f = open(os.path.join(settings.ENTRANCEDESCRIPTIONS, self.filename), "w")
|
||||
t = loader.get_template('dataformat/entrance.xml')
|
||||
c = Context({'entrance': self})
|
||||
u = t.render(c)
|
||||
u8 = u.encode("utf-8")
|
||||
f.write(u8)
|
||||
f.close()
|
||||
|
||||
class CaveDescription(TroggleModel):
|
||||
short_name = models.CharField(max_length=50, unique = True)
|
||||
long_name = models.CharField(max_length=200, blank=True, null=True)
|
||||
description = models.TextField(blank=True,null=True)
|
||||
linked_subcaves = models.ManyToManyField("NewSubCave", blank=True,null=True)
|
||||
linked_entrances = models.ManyToManyField("Entrance", blank=True,null=True)
|
||||
linked_qms = models.ManyToManyField("QM", blank=True,null=True)
|
||||
|
||||
def __unicode__(self):
|
||||
if self.long_name:
|
||||
return unicode(self.long_name)
|
||||
else:
|
||||
return unicode(self.short_name)
|
||||
|
||||
def get_absolute_url(self):
|
||||
return urlparse.urljoin(settings.URL_ROOT, reverse('cavedescription', args=(self.short_name,)))
|
||||
|
||||
def save(self):
|
||||
"""
|
||||
Overridden save method which stores wikilinks in text as links in database.
|
||||
"""
|
||||
super(CaveDescription, self).save()
|
||||
qm_list=get_related_by_wikilinks(self.description)
|
||||
for qm in qm_list:
|
||||
self.linked_qms.add(qm)
|
||||
super(CaveDescription, self).save()
|
||||
|
||||
class NewSubCave(TroggleModel):
|
||||
name = models.CharField(max_length=200, unique = True)
|
||||
def __unicode__(self):
|
||||
return unicode(self.name)
|
||||
|
||||
class QM(TroggleModel):
|
||||
#based on qm.csv in trunk/expoweb/1623/204 which has the fields:
|
||||
#"Number","Grade","Area","Description","Page reference","Nearest station","Completion description","Comment"
|
||||
found_by = models.ForeignKey(LogbookEntry, related_name='QMs_found',blank=True, null=True )
|
||||
ticked_off_by = models.ForeignKey(LogbookEntry, related_name='QMs_ticked_off',null=True,blank=True)
|
||||
#cave = models.ForeignKey(Cave)
|
||||
#expedition = models.ForeignKey(Expedition)
|
||||
|
||||
number = models.IntegerField(help_text="this is the sequential number in the year", )
|
||||
GRADE_CHOICES=(
|
||||
('A', 'A: Large obvious lead'),
|
||||
('B', 'B: Average lead'),
|
||||
('C', 'C: Tight unpromising lead'),
|
||||
('D', 'D: Dig'),
|
||||
('X', 'X: Unclimbable aven')
|
||||
)
|
||||
grade = models.CharField(max_length=1, choices=GRADE_CHOICES)
|
||||
location_description = models.TextField(blank=True)
|
||||
#should be a foreignkey to surveystation
|
||||
nearest_station_description = models.CharField(max_length=400,null=True,blank=True)
|
||||
nearest_station = models.CharField(max_length=200,blank=True,null=True)
|
||||
area = models.CharField(max_length=100,blank=True,null=True)
|
||||
completion_description = models.TextField(blank=True,null=True)
|
||||
comment=models.TextField(blank=True,null=True)
|
||||
|
||||
def __unicode__(self):
|
||||
return u"%s %s" % (self.code(), self.grade)
|
||||
|
||||
def code(self):
|
||||
return u"%s-%s-%s" % (unicode(self.found_by.cave)[6:], self.found_by.date.year, self.number)
|
||||
|
||||
def get_absolute_url(self):
|
||||
#return settings.URL_ROOT + '/cave/' + self.found_by.cave.kataster_number + '/' + str(self.found_by.date.year) + '-' + '%02d' %self.number
|
||||
return urlparse.urljoin(settings.URL_ROOT, reverse('qm',kwargs={'cave_id':self.found_by.cave.kataster_number,'year':self.found_by.date.year,'qm_id':self.number,'grade':self.grade}))
|
||||
|
||||
def get_next_by_id(self):
|
||||
return QM.objects.get(id=self.id+1)
|
||||
|
||||
def get_previous_by_id(self):
|
||||
return QM.objects.get(id=self.id-1)
|
||||
|
||||
def wiki_link(self):
|
||||
return u"%s%s%s" % ('[[QM:',self.code(),']]')
|
||||
|
||||
photoFileStorage = FileSystemStorage(location=settings.PHOTOS_ROOT, base_url=settings.PHOTOS_URL)
|
||||
class DPhoto(TroggleImageModel):
|
||||
caption = models.CharField(max_length=1000,blank=True,null=True)
|
||||
contains_logbookentry = models.ForeignKey(LogbookEntry,blank=True,null=True)
|
||||
contains_person = models.ManyToManyField(Person,blank=True,null=True)
|
||||
file = models.ImageField(storage=photoFileStorage, upload_to='.',)
|
||||
is_mugshot = models.BooleanField(default=False)
|
||||
contains_cave = models.ForeignKey(Cave,blank=True,null=True)
|
||||
contains_entrance = models.ForeignKey(Entrance, related_name="photo_file",blank=True,null=True)
|
||||
#nearest_survey_point = models.ForeignKey(SurveyStation,blank=True,null=True)
|
||||
nearest_QM = models.ForeignKey(QM,blank=True,null=True)
|
||||
lon_utm = models.FloatField(blank=True,null=True)
|
||||
lat_utm = models.FloatField(blank=True,null=True)
|
||||
|
||||
class IKOptions:
|
||||
spec_module = 'core.imagekit_specs'
|
||||
cache_dir = 'thumbs'
|
||||
image_field = 'file'
|
||||
|
||||
#content_type = models.ForeignKey(ContentType)
|
||||
#object_id = models.PositiveIntegerField()
|
||||
#location = generic.GenericForeignKey('content_type', 'object_id')
|
||||
|
||||
def __unicode__(self):
|
||||
return self.caption
|
||||
|
||||
scansFileStorage = FileSystemStorage(location=settings.SURVEY_SCANS, base_url=settings.SURVEYS_URL)
|
||||
def get_scan_path(instance, filename):
|
||||
year=instance.survey.expedition.year
|
||||
#print("WN: ", type(instance.survey.wallet_number), instance.survey.wallet_number, instance.survey.wallet_letter)
|
||||
number=str(instance.survey.wallet_number)
|
||||
if str(instance.survey.wallet_letter) != "None":
|
||||
number=str(instance.survey.wallet_letter) + number #two strings formatting because convention is 2009#01 or 2009#X01
|
||||
return os.path.join('./',year,year+r'#'+number,str(instance.contents)+str(instance.number_in_wallet)+r'.jpg')
|
||||
|
||||
class ScannedImage(TroggleImageModel):
|
||||
file = models.ImageField(storage=scansFileStorage, upload_to=get_scan_path)
|
||||
scanned_by = models.ForeignKey(Person,blank=True, null=True)
|
||||
scanned_on = models.DateField(null=True)
|
||||
survey = models.ForeignKey('Survey')
|
||||
contents = models.CharField(max_length=20,choices=(('notes','notes'),('plan','plan_sketch'),('elevation','elevation_sketch')))
|
||||
number_in_wallet = models.IntegerField(null=True)
|
||||
lon_utm = models.FloatField(blank=True,null=True)
|
||||
lat_utm = models.FloatField(blank=True,null=True)
|
||||
|
||||
class IKOptions:
|
||||
spec_module = 'core.imagekit_specs'
|
||||
cache_dir = 'thumbs'
|
||||
image_field = 'file'
|
||||
#content_type = models.ForeignKey(ContentType)
|
||||
#object_id = models.PositiveIntegerField()
|
||||
#location = generic.GenericForeignKey('content_type', 'object_id')
|
||||
|
||||
#This is an ugly hack to deal with the #s in our survey scan paths. The correct thing is to write a custom file storage backend which calls urlencode on the name for making file.url but not file.path.
|
||||
def correctURL(self):
|
||||
return string.replace(self.file.url,r'#',r'%23')
|
||||
|
||||
def __unicode__(self):
|
||||
return get_scan_path(self,'')
|
||||
|
||||
class Survey(TroggleModel):
|
||||
expedition = models.ForeignKey('Expedition') #REDUNDANT (logbook_entry)
|
||||
wallet_number = models.IntegerField(blank=True,null=True)
|
||||
wallet_letter = models.CharField(max_length=1,blank=True,null=True)
|
||||
comments = models.TextField(blank=True,null=True)
|
||||
location = models.CharField(max_length=400,blank=True,null=True) #REDUNDANT
|
||||
subcave = models.ForeignKey('NewSubCave', blank=True, null=True)
|
||||
#notes_scan = models.ForeignKey('ScannedImage',related_name='notes_scan',blank=True, null=True) #Replaced by contents field of ScannedImage model
|
||||
survex_block = models.OneToOneField('SurvexBlock',blank=True, null=True)
|
||||
logbook_entry = models.ForeignKey('LogbookEntry')
|
||||
centreline_printed_on = models.DateField(blank=True, null=True)
|
||||
centreline_printed_by = models.ForeignKey('Person',related_name='centreline_printed_by',blank=True,null=True)
|
||||
#sketch_scan = models.ForeignKey(ScannedImage,blank=True, null=True) #Replaced by contents field of ScannedImage model
|
||||
tunnel_file = models.FileField(upload_to='surveyXMLfiles',blank=True, null=True)
|
||||
tunnel_main_sketch = models.ForeignKey('Survey',blank=True,null=True)
|
||||
integrated_into_main_sketch_on = models.DateField(blank=True,null=True)
|
||||
integrated_into_main_sketch_by = models.ForeignKey('Person' ,related_name='integrated_into_main_sketch_by', blank=True,null=True)
|
||||
rendered_image = models.ImageField(upload_to='renderedSurveys',blank=True,null=True)
|
||||
def __unicode__(self):
|
||||
return self.expedition.year+"#"+"%02d" % int(self.wallet_number)
|
||||
|
||||
def notes(self):
|
||||
return self.scannedimage_set.filter(contents='notes')
|
||||
|
||||
def plans(self):
|
||||
return self.scannedimage_set.filter(contents='plan')
|
||||
|
||||
def elevations(self):
|
||||
return self.scannedimage_set.filter(contents='elevation')
|
||||
@@ -1,83 +0,0 @@
|
||||
from django.db import models
|
||||
from django.conf import settings
|
||||
|
||||
import troggle.core.methods_millenial as methods_millenial
|
||||
|
||||
#
|
||||
# This file was created in 2019
|
||||
# It's a result of massive frustration with cluttered database of troggle
|
||||
# Maximal clarity of code was primary goal (previous code had very little comments)
|
||||
# Maximal speed of database rebuild was secondary goal
|
||||
#
|
||||
|
||||
#
|
||||
# The following file will tell you what fields and methods are avaliable inside this database
|
||||
# be carefull you might miss some! ManyToMany fields can be used from the far end as well
|
||||
#
|
||||
|
||||
|
||||
#
|
||||
# Naming conventions:
|
||||
# (Upper/lower convention)
|
||||
# Class names are writen Udddd_ddd_dddM - they finish with M for backwards compatibility
|
||||
# Fields/methods are written lower_lower_lower
|
||||
#
|
||||
|
||||
class PersonM(models.Model): #instance of this class corresponds to one physical peson
|
||||
name = models.CharField(max_length=100) #just name, talk to wookey if you diagree
|
||||
surveys_made = models.ManyToManyField('SurveyM', related_name='people_surveyed') #links to survey objects that this person made (made=:survex says so)
|
||||
expos_attended = models.ManyToManyField('ExpeditionM', related_name='people_attended') #expos attended by this person (attended=:folk.csv says so)
|
||||
logbook_entries_written = models.ManyToManyField('Logbook_entryM', related_name='people_wrote') #links to logbook chuncks created by a person
|
||||
|
||||
class CaveM(models.Model): #instance of this class corresponds to one 'thing' that people call cave
|
||||
entrance = models.CharField(max_length=100) #UTM string describing ONE(!) entrance. Purpose = findability
|
||||
title = models.TextField() #title given to the topmost survey in survex, numeric name otherwise c.f. name (e.g. 'Fishface')
|
||||
name = models.TextField() #name given to the topmost survey in survex (e.g. '2017-cucc-28')
|
||||
surveys = models.ManyToManyField('SurveyM', related_name='cave_parent') #links to surveys objects that this cave contains
|
||||
survex_file = models.TextField() #gives path to top level survex file
|
||||
total_length = models.FloatField() #holds total length of this cave (as given by cavern)
|
||||
total_depth = models.FloatField() #holds total depth of this cave (as given by cavern)
|
||||
description = models.TextField() #holds link to description
|
||||
date = models.TextField() #holds date of last visit
|
||||
def top_camp_distance(self): #returns distance of this cave from topcamp
|
||||
return methods_millenial.top_camp_distance(self.entrance)
|
||||
def top_camp_bearing(self): #returns bearing to this cave from topcamp in format 235.5 (float north-based azimuth)
|
||||
return methods_millenial.top_camp_bearing(self.entrance)
|
||||
def top_camp_bearing_letter(self): #returns bearing to this cave from topcamp in format e.g. 'NE'
|
||||
return methods_millenial.top_camp_bearing_letter(self.entrance)
|
||||
def lat_lon_entrance(self): #lat_lon entrance location
|
||||
return methods_millenial.lat_lon_entrance(self.entrance)
|
||||
|
||||
|
||||
class Cave_descriptionM(models.Model): #instance of this class corresponds to each of the .html files in descriptions
|
||||
#each of those holds one XML field
|
||||
slug = models.TextField()
|
||||
explorers = models.TextField()
|
||||
underground_description = models.TextField()
|
||||
equipment = models.TextField()
|
||||
references = models.TextField()
|
||||
survey = models.TextField()
|
||||
kataster_status = models.TextField()
|
||||
underground_centre_line = models.TextField()
|
||||
survex_file = models.TextField() #as given in .html file
|
||||
notes = models.TextField()
|
||||
|
||||
|
||||
|
||||
class ExpeditionM(models.Model): #instance of this class corresponds to one expo (usually one year)
|
||||
date = models.CharField(max_length=100) #date in format YYYY.MM.DD-YYYY.MM.DD
|
||||
|
||||
|
||||
class SurveyM(models.Model): #instance of this class corresponds to one .svx file - one trip
|
||||
date = models.CharField(max_length=100) #date of the trip in format YYYY.MM.DD (dated:=date given by .svx file)
|
||||
survex_file = models.TextField()
|
||||
|
||||
class Logbook_entryM(models.Model): #instance of this class corresponds to one bit of logbook (c.f. expo.survex.com/years/2015/logbook.html or simil)
|
||||
date = models.CharField(max_length=100) #date as typed into logbook
|
||||
contents = models.TextField() #contents of the logbook chunk
|
||||
|
||||
class Parser_messageM(models.Model): #instance of this class contains one error or warining message produce by any of the parsers
|
||||
parsername = models.CharField(max_length = 20) #name of parser
|
||||
content = models.TextField() #content of message
|
||||
message_type = models.CharField(max_length = 10) # [Error,Info] or similar
|
||||
|
||||
@@ -1,862 +0,0 @@
|
||||
import urllib, urlparse, string, os, datetime, logging, re
|
||||
import subprocess
|
||||
from django.forms import ModelForm
|
||||
from django.db import models
|
||||
from django.contrib import admin
|
||||
from django.core.files.storage import FileSystemStorage
|
||||
from django.contrib.auth.models import User
|
||||
from django.contrib.contenttypes.models import ContentType
|
||||
from django.db.models import Min, Max
|
||||
from django.conf import settings
|
||||
from decimal import Decimal, getcontext
|
||||
from django.core.urlresolvers import reverse
|
||||
from imagekit.models import ImageModel
|
||||
from django.template import Context, loader
|
||||
import settings
|
||||
getcontext().prec=2 #use 2 significant figures for decimal calculations
|
||||
|
||||
|
||||
|
||||
def get_related_by_wikilinks(wiki_text):
|
||||
found=re.findall(settings.QM_PATTERN,wiki_text)
|
||||
res=[]
|
||||
for wikilink in found:
|
||||
qmdict={'urlroot':settings.URL_ROOT,'cave':wikilink[2],'year':wikilink[1],'number':wikilink[3]}
|
||||
try:
|
||||
cave_slugs = CaveSlug.objects.filter(cave__kataster_number = qmdict['cave'])
|
||||
qm=QM.objects.get(found_by__cave_slug__in = cave_slugs,
|
||||
found_by__date__year = qmdict['year'],
|
||||
number = qmdict['number'])
|
||||
res.append(qm)
|
||||
except QM.DoesNotExist:
|
||||
print('fail on '+str(wikilink))
|
||||
|
||||
return res
|
||||
|
||||
try:
|
||||
logging.basicConfig(level=logging.DEBUG,
|
||||
filename=settings.LOGFILE,
|
||||
filemode='w')
|
||||
except:
|
||||
subprocess.call(settings.FIX_PERMISSIONS)
|
||||
logging.basicConfig(level=logging.DEBUG,
|
||||
filename=settings.LOGFILE,
|
||||
filemode='w')
|
||||
|
||||
#This class is for adding fields and methods which all of our models will have.
|
||||
class TroggleModel(models.Model):
|
||||
new_since_parsing = models.BooleanField(default=False, editable=False)
|
||||
non_public = models.BooleanField(default=False)
|
||||
def object_name(self):
|
||||
return self._meta.object_name
|
||||
|
||||
def get_admin_url(self):
|
||||
return urlparse.urljoin(settings.URL_ROOT, "/admin/core/" + self.object_name().lower() + "/" + str(self.pk))
|
||||
|
||||
class Meta:
|
||||
abstract = True
|
||||
|
||||
class TroggleImageModel(ImageModel):
|
||||
new_since_parsing = models.BooleanField(default=False, editable=False)
|
||||
|
||||
def object_name(self):
|
||||
return self._meta.object_name
|
||||
|
||||
def get_admin_url(self):
|
||||
return urlparse.urljoin(settings.URL_ROOT, "/admin/core/" + self.object_name().lower() + "/" + str(self.pk))
|
||||
|
||||
|
||||
class Meta:
|
||||
abstract = True
|
||||
|
||||
#
|
||||
# single Expedition, usually seen by year
|
||||
#
|
||||
class Expedition(TroggleModel):
|
||||
year = models.CharField(max_length=20, unique=True)
|
||||
name = models.CharField(max_length=100)
|
||||
|
||||
def __unicode__(self):
|
||||
return self.year
|
||||
|
||||
class Meta:
|
||||
ordering = ('-year',)
|
||||
get_latest_by = 'year'
|
||||
|
||||
def get_absolute_url(self):
|
||||
return urlparse.urljoin(settings.URL_ROOT, reverse('expedition', args=[self.year]))
|
||||
|
||||
# construction function. should be moved out
|
||||
def get_expedition_day(self, date):
|
||||
expeditiondays = self.expeditionday_set.filter(date=date)
|
||||
if expeditiondays:
|
||||
assert len(expeditiondays) == 1
|
||||
return expeditiondays[0]
|
||||
res = ExpeditionDay(expedition=self, date=date)
|
||||
res.save()
|
||||
return res
|
||||
|
||||
def day_min(self):
|
||||
res = self.expeditionday_set.all()
|
||||
return res and res[0] or None
|
||||
|
||||
def day_max(self):
|
||||
res = self.expeditionday_set.all()
|
||||
return res and res[len(res) - 1] or None
|
||||
|
||||
|
||||
|
||||
class ExpeditionDay(TroggleModel):
|
||||
expedition = models.ForeignKey("Expedition")
|
||||
date = models.DateField()
|
||||
|
||||
class Meta:
|
||||
ordering = ('date',)
|
||||
|
||||
def GetPersonTrip(self, personexpedition):
|
||||
personexpeditions = self.persontrip_set.filter(expeditionday=self)
|
||||
return personexpeditions and personexpeditions[0] or None
|
||||
|
||||
|
||||
#
|
||||
# single Person, can go on many years
|
||||
#
|
||||
class Person(TroggleModel):
|
||||
first_name = models.CharField(max_length=100)
|
||||
last_name = models.CharField(max_length=100)
|
||||
is_vfho = models.BooleanField(help_text="VFHO is the Vereines für Höhlenkunde in Obersteier, a nearby Austrian caving club.", default=False)
|
||||
mug_shot = models.CharField(max_length=100, blank=True,null=True)
|
||||
blurb = models.TextField(blank=True,null=True)
|
||||
|
||||
#href = models.CharField(max_length=200)
|
||||
orderref = models.CharField(max_length=200) # for alphabetic
|
||||
|
||||
#the below have been removed and made methods. I'm not sure what the b in bisnotable stands for. - AC 16 Feb
|
||||
#notability = models.FloatField() # for listing the top 20 people
|
||||
#bisnotable = models.BooleanField(default=False)
|
||||
user = models.OneToOneField(User, null=True, blank=True)
|
||||
def get_absolute_url(self):
|
||||
return urlparse.urljoin(settings.URL_ROOT,reverse('person',kwargs={'first_name':self.first_name,'last_name':self.last_name}))
|
||||
|
||||
class Meta:
|
||||
verbose_name_plural = "People"
|
||||
ordering = ('orderref',) # "Wookey" makes too complex for: ('last_name', 'first_name')
|
||||
|
||||
def __unicode__(self):
|
||||
if self.last_name:
|
||||
return "%s %s" % (self.first_name, self.last_name)
|
||||
return self.first_name
|
||||
|
||||
|
||||
def notability(self):
|
||||
notability = Decimal(0)
|
||||
for personexpedition in self.personexpedition_set.all():
|
||||
if not personexpedition.is_guest:
|
||||
notability += Decimal(1) / (2012 - int(personexpedition.expedition.year))
|
||||
return notability
|
||||
|
||||
def bisnotable(self):
|
||||
return self.notability() > Decimal(1)/Decimal(3)
|
||||
|
||||
def surveyedleglength(self):
|
||||
return sum([personexpedition.surveyedleglength() for personexpedition in self.personexpedition_set.all()])
|
||||
|
||||
def first(self):
|
||||
return self.personexpedition_set.order_by('-expedition')[0]
|
||||
def last(self):
|
||||
return self.personexpedition_set.order_by('expedition')[0]
|
||||
|
||||
#def Sethref(self):
|
||||
#if self.last_name:
|
||||
#self.href = self.first_name.lower() + "_" + self.last_name.lower()
|
||||
#self.orderref = self.last_name + " " + self.first_name
|
||||
#else:
|
||||
# self.href = self.first_name.lower()
|
||||
#self.orderref = self.first_name
|
||||
#self.notability = 0.0 # set temporarily
|
||||
|
||||
|
||||
#
|
||||
# Person's attenance to one Expo
|
||||
#
|
||||
class PersonExpedition(TroggleModel):
|
||||
expedition = models.ForeignKey(Expedition)
|
||||
person = models.ForeignKey(Person)
|
||||
slugfield = models.SlugField(max_length=50,blank=True,null=True)
|
||||
|
||||
is_guest = models.BooleanField(default=False)
|
||||
COMMITTEE_CHOICES = (
|
||||
('leader','Expo leader'),
|
||||
('medical','Expo medical officer'),
|
||||
('treasurer','Expo treasurer'),
|
||||
('sponsorship','Expo sponsorship coordinator'),
|
||||
('research','Expo research coordinator'),
|
||||
)
|
||||
expo_committee_position = models.CharField(blank=True,null=True,choices=COMMITTEE_CHOICES,max_length=200)
|
||||
nickname = models.CharField(max_length=100,blank=True,null=True)
|
||||
|
||||
def GetPersonroles(self):
|
||||
res = [ ]
|
||||
for personrole in self.personrole_set.order_by('survexblock'):
|
||||
if res and res[-1]['survexpath'] == personrole.survexblock.survexpath:
|
||||
res[-1]['roles'] += ", " + str(personrole.role)
|
||||
else:
|
||||
res.append({'date':personrole.survexblock.date, 'survexpath':personrole.survexblock.survexpath, 'roles':str(personrole.role)})
|
||||
return res
|
||||
|
||||
class Meta:
|
||||
ordering = ('-expedition',)
|
||||
#order_with_respect_to = 'expedition'
|
||||
|
||||
def __unicode__(self):
|
||||
return "%s: (%s)" % (self.person, self.expedition)
|
||||
|
||||
|
||||
#why is the below a function in personexpedition, rather than in person? - AC 14 Feb 09
|
||||
def name(self):
|
||||
if self.nickname:
|
||||
return "%s (%s) %s" % (self.person.first_name, self.nickname, self.person.last_name)
|
||||
if self.person.last_name:
|
||||
return "%s %s" % (self.person.first_name, self.person.last_name)
|
||||
return self.person.first_name
|
||||
|
||||
def get_absolute_url(self):
|
||||
return urlparse.urljoin(settings.URL_ROOT, reverse('personexpedition',kwargs={'first_name':self.person.first_name,'last_name':self.person.last_name,'year':self.expedition.year}))
|
||||
|
||||
def surveyedleglength(self):
|
||||
survexblocks = [personrole.survexblock for personrole in self.personrole_set.all() ]
|
||||
return sum([survexblock.totalleglength for survexblock in set(survexblocks)])
|
||||
|
||||
# would prefer to return actual person trips so we could link to first and last ones
|
||||
def day_min(self):
|
||||
res = self.persontrip_set.aggregate(day_min=Min("expeditionday__date"))
|
||||
return res["day_min"]
|
||||
|
||||
def day_max(self):
|
||||
res = self.persontrip_set.all().aggregate(day_max=Max("expeditionday__date"))
|
||||
return res["day_max"]
|
||||
|
||||
#
|
||||
# Single parsed entry from Logbook
|
||||
#
|
||||
class LogbookEntry(TroggleModel):
|
||||
date = models.DateField()#MJG wants to turn this into a datetime such that multiple Logbook entries on the same day can be ordered.
|
||||
expeditionday = models.ForeignKey("ExpeditionDay", null=True)#MJG wants to KILL THIS (redundant information)
|
||||
expedition = models.ForeignKey(Expedition,blank=True,null=True) # yes this is double-
|
||||
#author = models.ForeignKey(PersonExpedition,blank=True,null=True) # the person who writes it up doesn't have to have been on the trip.
|
||||
# Re: the above- so this field should be "typist" or something, not "author". - AC 15 jun 09
|
||||
#MJG wants to KILL THIS, as it is typically redundant with PersonTrip.is_logbook_entry_author, in the rare it was not redundanty and of actually interest it could be added to the text.
|
||||
title = models.CharField(max_length=settings.MAX_LOGBOOK_ENTRY_TITLE_LENGTH)
|
||||
cave_slug = models.SlugField(max_length=50)
|
||||
place = models.CharField(max_length=100,blank=True,null=True,help_text="Only use this if you haven't chosen a cave")
|
||||
text = models.TextField()
|
||||
slug = models.SlugField(max_length=50)
|
||||
filename = models.CharField(max_length=200,null=True)
|
||||
|
||||
class Meta:
|
||||
verbose_name_plural = "Logbook Entries"
|
||||
# several PersonTrips point in to this object
|
||||
ordering = ('-date',)
|
||||
|
||||
def __getattribute__(self, item):
|
||||
if item == "cave": #Allow a logbookentries cave to be directly accessed despite not having a proper foreignkey
|
||||
return CaveSlug.objects.get(slug = self.cave_slug).cave
|
||||
return super(LogbookEntry, self).__getattribute__(item)
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
if "cave" in kwargs.keys():
|
||||
if kwargs["cave"] is not None:
|
||||
kwargs["cave_slug"] = CaveSlug.objects.get(cave=kwargs["cave"], primary=True).slug
|
||||
kwargs.pop("cave")
|
||||
return super(LogbookEntry, self).__init__(*args, **kwargs)
|
||||
|
||||
def isLogbookEntry(self): # Function used in templates
|
||||
return True
|
||||
|
||||
def get_absolute_url(self):
|
||||
return urlparse.urljoin(settings.URL_ROOT, reverse('logbookentry',kwargs={'date':self.date,'slug':self.slug}))
|
||||
|
||||
def __unicode__(self):
|
||||
return "%s: (%s)" % (self.date, self.title)
|
||||
|
||||
def get_next_by_id(self):
|
||||
LogbookEntry.objects.get(id=self.id+1)
|
||||
|
||||
def get_previous_by_id(self):
|
||||
LogbookEntry.objects.get(id=self.id-1)
|
||||
|
||||
def new_QM_number(self):
|
||||
"""Returns """
|
||||
if self.cave:
|
||||
nextQMnumber=self.cave.new_QM_number(self.date.year)
|
||||
else:
|
||||
return none
|
||||
return nextQMnumber
|
||||
|
||||
def new_QM_found_link(self):
|
||||
"""Produces a link to a new QM with the next number filled in and this LogbookEntry set as 'found by' """
|
||||
return settings.URL_ROOT + r'/admin/core/qm/add/?' + r'found_by=' + str(self.pk) +'&number=' + str(self.new_QM_number())
|
||||
|
||||
def DayIndex(self):
|
||||
return list(self.expeditionday.logbookentry_set.all()).index(self)
|
||||
|
||||
#
|
||||
# Single Person going on a trip, which may or may not be written up (accounts for different T/U for people in same logbook entry)
|
||||
#
|
||||
class PersonTrip(TroggleModel):
|
||||
personexpedition = models.ForeignKey("PersonExpedition",null=True)
|
||||
|
||||
#expeditionday = models.ForeignKey("ExpeditionDay")#MJG wants to KILL THIS (redundant information)
|
||||
#date = models.DateField() #MJG wants to KILL THIS (redundant information)
|
||||
time_underground = models.FloatField(help_text="In decimal hours")
|
||||
logbook_entry = models.ForeignKey(LogbookEntry)
|
||||
is_logbook_entry_author = models.BooleanField(default=False)
|
||||
|
||||
|
||||
# sequencing by person (difficult to solve locally)
|
||||
#persontrip_next = models.ForeignKey('PersonTrip', related_name='pnext', blank=True,null=True)#MJG wants to KILL THIS (and use funstion persontrip_next_auto)
|
||||
#persontrip_prev = models.ForeignKey('PersonTrip', related_name='pprev', blank=True,null=True)#MJG wants to KILL THIS(and use funstion persontrip_prev_auto)
|
||||
|
||||
def persontrip_next(self):
|
||||
futurePTs = PersonTrip.objects.filter(personexpedition = self.personexpedition, logbook_entry__date__gt = self.logbook_entry.date).order_by('logbook_entry__date').all()
|
||||
if len(futurePTs) > 0:
|
||||
return futurePTs[0]
|
||||
else:
|
||||
return None
|
||||
|
||||
def persontrip_prev(self):
|
||||
pastPTs = PersonTrip.objects.filter(personexpedition = self.personexpedition, logbook_entry__date__lt = self.logbook_entry.date).order_by('-logbook_entry__date').all()
|
||||
if len(pastPTs) > 0:
|
||||
return pastPTs[0]
|
||||
else:
|
||||
return None
|
||||
|
||||
def place(self):
|
||||
return self.logbook_entry.cave and self.logbook_entry.cave or self.logbook_entry.place
|
||||
|
||||
def __unicode__(self):
|
||||
return "%s (%s)" % (self.personexpedition, self.logbook_entry.date)
|
||||
|
||||
|
||||
|
||||
##########################################
|
||||
# move following classes into models_cave
|
||||
##########################################
|
||||
|
||||
class Area(TroggleModel):
|
||||
short_name = models.CharField(max_length=100)
|
||||
name = models.CharField(max_length=200, blank=True, null=True)
|
||||
description = models.TextField(blank=True,null=True)
|
||||
parent = models.ForeignKey('Area', blank=True, null=True)
|
||||
def __unicode__(self):
|
||||
if self.parent:
|
||||
return unicode(self.parent) + u" - " + unicode(self.short_name)
|
||||
else:
|
||||
return unicode(self.short_name)
|
||||
def kat_area(self):
|
||||
if self.short_name in ["1623", "1626"]:
|
||||
return self.short_name
|
||||
elif self.parent:
|
||||
return self.parent.kat_area()
|
||||
|
||||
class CaveAndEntrance(models.Model):
|
||||
cave = models.ForeignKey('Cave')
|
||||
entrance = models.ForeignKey('Entrance')
|
||||
entrance_letter = models.CharField(max_length=20,blank=True,null=True)
|
||||
def __unicode__(self):
|
||||
return unicode(self.cave) + unicode(self.entrance_letter)
|
||||
|
||||
class CaveSlug(models.Model):
|
||||
cave = models.ForeignKey('Cave')
|
||||
slug = models.SlugField(max_length=50, unique = True)
|
||||
primary = models.BooleanField(default=False)
|
||||
|
||||
|
||||
class Cave(TroggleModel):
|
||||
# too much here perhaps,
|
||||
official_name = models.CharField(max_length=160)
|
||||
area = models.ManyToManyField(Area, blank=True, null=True)
|
||||
kataster_code = models.CharField(max_length=20,blank=True,null=True)
|
||||
kataster_number = models.CharField(max_length=10,blank=True, null=True)
|
||||
unofficial_number = models.CharField(max_length=60,blank=True, null=True)
|
||||
entrances = models.ManyToManyField('Entrance', through='CaveAndEntrance')
|
||||
explorers = models.TextField(blank=True,null=True)
|
||||
underground_description = models.TextField(blank=True,null=True)
|
||||
equipment = models.TextField(blank=True,null=True)
|
||||
references = models.TextField(blank=True,null=True)
|
||||
survey = models.TextField(blank=True,null=True)
|
||||
kataster_status = models.TextField(blank=True,null=True)
|
||||
underground_centre_line = models.TextField(blank=True,null=True)
|
||||
notes = models.TextField(blank=True,null=True)
|
||||
length = models.CharField(max_length=100,blank=True,null=True)
|
||||
depth = models.CharField(max_length=100,blank=True,null=True)
|
||||
extent = models.CharField(max_length=100,blank=True,null=True)
|
||||
survex_file = models.CharField(max_length=100,blank=True,null=True)
|
||||
description_file = models.CharField(max_length=200,blank=True,null=True)
|
||||
url = models.CharField(max_length=200,blank=True,null=True)
|
||||
filename = models.CharField(max_length=200)
|
||||
|
||||
|
||||
#class Meta:
|
||||
# unique_together = (("area", "kataster_number"), ("area", "unofficial_number"))
|
||||
# FIXME Kataster Areas and CUCC defined sub areas need seperating
|
||||
|
||||
|
||||
#href = models.CharField(max_length=100)
|
||||
|
||||
class Meta:
|
||||
ordering = ('kataster_code', 'unofficial_number')
|
||||
|
||||
def hassurvey(self):
|
||||
if not self.underground_centre_line:
|
||||
return "No"
|
||||
if (self.survey.find("<img") > -1 or self.survey.find("<a") > -1 or self.survey.find("<IMG") > -1 or self.survey.find("<A") > -1):
|
||||
return "Yes"
|
||||
return "Missing"
|
||||
|
||||
def hassurveydata(self):
|
||||
if not self.underground_centre_line:
|
||||
return "No"
|
||||
if self.survex_file:
|
||||
return "Yes"
|
||||
return "Missing"
|
||||
|
||||
def slug(self):
|
||||
primarySlugs = self.caveslug_set.filter(primary = True)
|
||||
if primarySlugs:
|
||||
return primarySlugs[0].slug
|
||||
else:
|
||||
slugs = self.caveslug_set.filter()
|
||||
if slugs:
|
||||
return slugs[0].slug
|
||||
|
||||
def ours(self):
|
||||
return bool(re.search(r'CUCC', self.explorers))
|
||||
|
||||
def reference(self):
|
||||
if self.kataster_number:
|
||||
return "%s-%s" % (self.kat_area(), self.kataster_number)
|
||||
else:
|
||||
return "%s-%s" % (self.kat_area(), self.unofficial_number)
|
||||
|
||||
def get_absolute_url(self):
|
||||
if self.kataster_number:
|
||||
href = self.kataster_number
|
||||
elif self.unofficial_number:
|
||||
href = self.unofficial_number
|
||||
else:
|
||||
href = official_name.lower()
|
||||
#return settings.URL_ROOT + '/cave/' + href + '/'
|
||||
return urlparse.urljoin(settings.URL_ROOT, reverse('cave',kwargs={'cave_id':href,}))
|
||||
|
||||
def __unicode__(self, sep = u": "):
|
||||
return unicode(self.slug())
|
||||
|
||||
def get_QMs(self):
|
||||
return QM.objects.filter(found_by__cave_slug=self.caveslug_set.all())
|
||||
|
||||
def new_QM_number(self, year=datetime.date.today().year):
|
||||
"""Given a cave and the current year, returns the next QM number."""
|
||||
try:
|
||||
res=QM.objects.filter(found_by__date__year=year, found_by__cave=self).order_by('-number')[0]
|
||||
except IndexError:
|
||||
return 1
|
||||
return res.number+1
|
||||
|
||||
def kat_area(self):
|
||||
for a in self.area.all():
|
||||
if a.kat_area():
|
||||
return a.kat_area()
|
||||
|
||||
def entrances(self):
|
||||
return CaveAndEntrance.objects.filter(cave=self)
|
||||
|
||||
def singleentrance(self):
|
||||
return len(CaveAndEntrance.objects.filter(cave=self)) == 1
|
||||
|
||||
def entrancelist(self):
|
||||
rs = []
|
||||
res = ""
|
||||
for e in CaveAndEntrance.objects.filter(cave=self):
|
||||
rs.append(e.entrance_letter)
|
||||
rs.sort()
|
||||
prevR = None
|
||||
n = 0
|
||||
for r in rs:
|
||||
if prevR:
|
||||
if chr(ord(prevR) + 1 ) == r:
|
||||
prevR = r
|
||||
n += 1
|
||||
else:
|
||||
if n == 0:
|
||||
res += ", " + prevR
|
||||
else:
|
||||
res += "–" + prevR
|
||||
else:
|
||||
prevR = r
|
||||
n = 0
|
||||
res += r
|
||||
if n == 0:
|
||||
res += ", " + prevR
|
||||
else:
|
||||
res += "–" + prevR
|
||||
return res
|
||||
|
||||
def writeDataFile(self):
|
||||
try:
|
||||
f = open(os.path.join(settings.CAVEDESCRIPTIONS, self.filename), "w")
|
||||
except:
|
||||
subprocess.call(settings.FIX_PERMISSIONS)
|
||||
f = open(os.path.join(settings.CAVEDESCRIPTIONS, self.filename), "w")
|
||||
t = loader.get_template('dataformat/cave.xml')
|
||||
c = Context({'cave': self})
|
||||
u = t.render(c)
|
||||
u8 = u.encode("utf-8")
|
||||
f.write(u8)
|
||||
f.close()
|
||||
|
||||
def getArea(self):
|
||||
areas = self.area.all()
|
||||
lowestareas = list(areas)
|
||||
for area in areas:
|
||||
if area.parent in areas:
|
||||
try:
|
||||
lowestareas.remove(area.parent)
|
||||
except:
|
||||
pass
|
||||
return lowestareas[0]
|
||||
|
||||
def getCaveByReference(reference):
|
||||
areaname, code = reference.split("-", 1)
|
||||
print(areaname, code)
|
||||
area = Area.objects.get(short_name = areaname)
|
||||
print(area)
|
||||
foundCaves = list(Cave.objects.filter(area = area, kataster_number = code).all()) + list(Cave.objects.filter(area = area, unofficial_number = code).all())
|
||||
print(list(foundCaves))
|
||||
assert len(foundCaves) == 1
|
||||
return foundCaves[0]
|
||||
|
||||
class OtherCaveName(TroggleModel):
|
||||
name = models.CharField(max_length=160)
|
||||
cave = models.ForeignKey(Cave)
|
||||
def __unicode__(self):
|
||||
return unicode(self.name)
|
||||
|
||||
class EntranceSlug(models.Model):
|
||||
entrance = models.ForeignKey('Entrance')
|
||||
slug = models.SlugField(max_length=50, unique = True)
|
||||
primary = models.BooleanField(default=False)
|
||||
|
||||
class Entrance(TroggleModel):
|
||||
name = models.CharField(max_length=100, blank=True,null=True)
|
||||
entrance_description = models.TextField(blank=True,null=True)
|
||||
explorers = models.TextField(blank=True,null=True)
|
||||
map_description = models.TextField(blank=True,null=True)
|
||||
location_description = models.TextField(blank=True,null=True)
|
||||
approach = models.TextField(blank=True,null=True)
|
||||
underground_description = models.TextField(blank=True,null=True)
|
||||
photo = models.TextField(blank=True,null=True)
|
||||
MARKING_CHOICES = (
|
||||
('P', 'Paint'),
|
||||
('P?', 'Paint (?)'),
|
||||
('T', 'Tag'),
|
||||
('T?', 'Tag (?)'),
|
||||
('R', 'Needs Retag'),
|
||||
('S', 'Spit'),
|
||||
('S?', 'Spit (?)'),
|
||||
('U', 'Unmarked'),
|
||||
('?', 'Unknown'))
|
||||
marking = models.CharField(max_length=2, choices=MARKING_CHOICES)
|
||||
marking_comment = models.TextField(blank=True,null=True)
|
||||
FINDABLE_CHOICES = (
|
||||
('?', 'To be confirmed ...'),
|
||||
('S', 'Coordinates'),
|
||||
('L', 'Lost'),
|
||||
('R', 'Refindable'))
|
||||
findability = models.CharField(max_length=1, choices=FINDABLE_CHOICES, blank=True, null=True)
|
||||
findability_description = models.TextField(blank=True,null=True)
|
||||
alt = models.TextField(blank=True, null=True)
|
||||
northing = models.TextField(blank=True, null=True)
|
||||
easting = models.TextField(blank=True, null=True)
|
||||
tag_station = models.TextField(blank=True, null=True)
|
||||
exact_station = models.TextField(blank=True, null=True)
|
||||
other_station = models.TextField(blank=True, null=True)
|
||||
other_description = models.TextField(blank=True,null=True)
|
||||
bearings = models.TextField(blank=True,null=True)
|
||||
url = models.CharField(max_length=200,blank=True,null=True)
|
||||
filename = models.CharField(max_length=200)
|
||||
cached_primary_slug = models.CharField(max_length=200,blank=True,null=True)
|
||||
|
||||
def __unicode__(self):
|
||||
return unicode(self.slug())
|
||||
|
||||
def exact_location(self):
|
||||
return SurvexStation.objects.lookup(self.exact_station)
|
||||
def other_location(self):
|
||||
return SurvexStation.objects.lookup(self.other_station)
|
||||
|
||||
|
||||
def find_location(self):
|
||||
r = {'': 'To be entered ',
|
||||
'?': 'To be confirmed:',
|
||||
'S': '',
|
||||
'L': 'Lost:',
|
||||
'R': 'Refindable:'}[self.findability]
|
||||
if self.tag_station:
|
||||
try:
|
||||
s = SurvexStation.objects.lookup(self.tag_station)
|
||||
return r + "%0.0fE %0.0fN %0.0fAlt" % (s.x, s.y, s.z)
|
||||
except:
|
||||
return r + "%s Tag Station not in dataset" % self.tag_station
|
||||
if self.exact_station:
|
||||
try:
|
||||
s = SurvexStation.objects.lookup(self.exact_station)
|
||||
return r + "%0.0fE %0.0fN %0.0fAlt" % (s.x, s.y, s.z)
|
||||
except:
|
||||
return r + "%s Exact Station not in dataset" % self.tag_station
|
||||
if self.other_station:
|
||||
try:
|
||||
s = SurvexStation.objects.lookup(self.other_station)
|
||||
return r + "%0.0fE %0.0fN %0.0fAlt %s" % (s.x, s.y, s.z, self.other_description)
|
||||
except:
|
||||
return r + "%s Other Station not in dataset" % self.tag_station
|
||||
if self.FINDABLE_CHOICES == "S":
|
||||
r += "ERROR, Entrance has been surveyed but has no survex point"
|
||||
if self.bearings:
|
||||
return r + self.bearings
|
||||
return r
|
||||
|
||||
def best_station(self):
|
||||
if self.tag_station:
|
||||
return self.tag_station
|
||||
if self.exact_station:
|
||||
return self.exact_station
|
||||
if self.other_station:
|
||||
return self.other_station
|
||||
|
||||
def has_photo(self):
|
||||
if self.photo:
|
||||
if (self.photo.find("<img") > -1 or self.photo.find("<a") > -1 or self.photo.find("<IMG") > -1 or self.photo.find("<A") > -1):
|
||||
return "Yes"
|
||||
else:
|
||||
return "Missing"
|
||||
else:
|
||||
return "No"
|
||||
|
||||
def marking_val(self):
|
||||
for m in self.MARKING_CHOICES:
|
||||
if m[0] == self.marking:
|
||||
return m[1]
|
||||
def findability_val(self):
|
||||
for f in self.FINDABLE_CHOICES:
|
||||
if f[0] == self.findability:
|
||||
return f[1]
|
||||
|
||||
def tag(self):
|
||||
return SurvexStation.objects.lookup(self.tag_station)
|
||||
|
||||
def needs_surface_work(self):
|
||||
return self.findability != "S" or not self.has_photo or self.marking != "T"
|
||||
|
||||
def get_absolute_url(self):
|
||||
|
||||
ancestor_titles='/'.join([subcave.title for subcave in self.get_ancestors()])
|
||||
if ancestor_titles:
|
||||
res = '/'.join((self.get_root().cave.get_absolute_url(), ancestor_titles, self.title))
|
||||
|
||||
else:
|
||||
res = '/'.join((self.get_root().cave.get_absolute_url(), self.title))
|
||||
|
||||
return res
|
||||
|
||||
def slug(self):
|
||||
if not self.cached_primary_slug:
|
||||
primarySlugs = self.entranceslug_set.filter(primary = True)
|
||||
if primarySlugs:
|
||||
self.cached_primary_slug = primarySlugs[0].slug
|
||||
self.save()
|
||||
else:
|
||||
slugs = self.entranceslug_set.filter()
|
||||
if slugs:
|
||||
self.cached_primary_slug = slugs[0].slug
|
||||
self.save()
|
||||
return self.cached_primary_slug
|
||||
|
||||
def writeDataFile(self):
|
||||
try:
|
||||
f = open(os.path.join(settings.ENTRANCEDESCRIPTIONS, self.filename), "w")
|
||||
except:
|
||||
subprocess.call(settings.FIX_PERMISSIONS)
|
||||
f = open(os.path.join(settings.ENTRANCEDESCRIPTIONS, self.filename), "w")
|
||||
t = loader.get_template('dataformat/entrance.xml')
|
||||
c = Context({'entrance': self})
|
||||
u = t.render(c)
|
||||
u8 = u.encode("utf-8")
|
||||
f.write(u8)
|
||||
f.close()
|
||||
|
||||
class CaveDescription(TroggleModel):
|
||||
short_name = models.CharField(max_length=50, unique = True)
|
||||
long_name = models.CharField(max_length=200, blank=True, null=True)
|
||||
description = models.TextField(blank=True,null=True)
|
||||
linked_subcaves = models.ManyToManyField("NewSubCave", blank=True,null=True)
|
||||
linked_entrances = models.ManyToManyField("Entrance", blank=True,null=True)
|
||||
linked_qms = models.ManyToManyField("QM", blank=True,null=True)
|
||||
|
||||
def __unicode__(self):
|
||||
if self.long_name:
|
||||
return unicode(self.long_name)
|
||||
else:
|
||||
return unicode(self.short_name)
|
||||
|
||||
def get_absolute_url(self):
|
||||
return urlparse.urljoin(settings.URL_ROOT, reverse('cavedescription', args=(self.short_name,)))
|
||||
|
||||
def save(self):
|
||||
"""
|
||||
Overridden save method which stores wikilinks in text as links in database.
|
||||
"""
|
||||
super(CaveDescription, self).save()
|
||||
qm_list=get_related_by_wikilinks(self.description)
|
||||
for qm in qm_list:
|
||||
self.linked_qms.add(qm)
|
||||
super(CaveDescription, self).save()
|
||||
|
||||
class NewSubCave(TroggleModel):
|
||||
name = models.CharField(max_length=200, unique = True)
|
||||
def __unicode__(self):
|
||||
return unicode(self.name)
|
||||
|
||||
class QM(TroggleModel):
|
||||
#based on qm.csv in trunk/expoweb/1623/204 which has the fields:
|
||||
#"Number","Grade","Area","Description","Page reference","Nearest station","Completion description","Comment"
|
||||
found_by = models.ForeignKey(LogbookEntry, related_name='QMs_found',blank=True, null=True )
|
||||
ticked_off_by = models.ForeignKey(LogbookEntry, related_name='QMs_ticked_off',null=True,blank=True)
|
||||
#cave = models.ForeignKey(Cave)
|
||||
#expedition = models.ForeignKey(Expedition)
|
||||
|
||||
number = models.IntegerField(help_text="this is the sequential number in the year", )
|
||||
GRADE_CHOICES=(
|
||||
('A', 'A: Large obvious lead'),
|
||||
('B', 'B: Average lead'),
|
||||
('C', 'C: Tight unpromising lead'),
|
||||
('D', 'D: Dig'),
|
||||
('X', 'X: Unclimbable aven')
|
||||
)
|
||||
grade = models.CharField(max_length=1, choices=GRADE_CHOICES)
|
||||
location_description = models.TextField(blank=True)
|
||||
#should be a foreignkey to surveystation
|
||||
nearest_station_description = models.CharField(max_length=400,null=True,blank=True)
|
||||
nearest_station = models.CharField(max_length=200,blank=True,null=True)
|
||||
area = models.CharField(max_length=100,blank=True,null=True)
|
||||
completion_description = models.TextField(blank=True,null=True)
|
||||
comment=models.TextField(blank=True,null=True)
|
||||
|
||||
def __unicode__(self):
|
||||
return u"%s %s" % (self.code(), self.grade)
|
||||
|
||||
def code(self):
|
||||
return u"%s-%s-%s" % (unicode(self.found_by.cave)[6:], self.found_by.date.year, self.number)
|
||||
|
||||
def get_absolute_url(self):
|
||||
#return settings.URL_ROOT + '/cave/' + self.found_by.cave.kataster_number + '/' + str(self.found_by.date.year) + '-' + '%02d' %self.number
|
||||
return urlparse.urljoin(settings.URL_ROOT, reverse('qm',kwargs={'cave_id':self.found_by.cave.kataster_number,'year':self.found_by.date.year,'qm_id':self.number,'grade':self.grade}))
|
||||
|
||||
def get_next_by_id(self):
|
||||
return QM.objects.get(id=self.id+1)
|
||||
|
||||
def get_previous_by_id(self):
|
||||
return QM.objects.get(id=self.id-1)
|
||||
|
||||
def wiki_link(self):
|
||||
return u"%s%s%s" % ('[[QM:',self.code(),']]')
|
||||
|
||||
photoFileStorage = FileSystemStorage(location=settings.PHOTOS_ROOT, base_url=settings.PHOTOS_URL)
|
||||
class DPhoto(TroggleImageModel):
|
||||
caption = models.CharField(max_length=1000,blank=True,null=True)
|
||||
contains_logbookentry = models.ForeignKey(LogbookEntry,blank=True,null=True)
|
||||
contains_person = models.ManyToManyField(Person,blank=True,null=True)
|
||||
file = models.ImageField(storage=photoFileStorage, upload_to='.',)
|
||||
is_mugshot = models.BooleanField(default=False)
|
||||
contains_cave = models.ForeignKey(Cave,blank=True,null=True)
|
||||
contains_entrance = models.ForeignKey(Entrance, related_name="photo_file",blank=True,null=True)
|
||||
#nearest_survey_point = models.ForeignKey(SurveyStation,blank=True,null=True)
|
||||
nearest_QM = models.ForeignKey(QM,blank=True,null=True)
|
||||
lon_utm = models.FloatField(blank=True,null=True)
|
||||
lat_utm = models.FloatField(blank=True,null=True)
|
||||
|
||||
class IKOptions:
|
||||
spec_module = 'core.imagekit_specs'
|
||||
cache_dir = 'thumbs'
|
||||
image_field = 'file'
|
||||
|
||||
#content_type = models.ForeignKey(ContentType)
|
||||
#object_id = models.PositiveIntegerField()
|
||||
#location = generic.GenericForeignKey('content_type', 'object_id')
|
||||
|
||||
def __unicode__(self):
|
||||
return self.caption
|
||||
|
||||
scansFileStorage = FileSystemStorage(location=settings.SURVEY_SCANS, base_url=settings.SURVEYS_URL)
|
||||
def get_scan_path(instance, filename):
|
||||
year=instance.survey.expedition.year
|
||||
#print("WN: ", type(instance.survey.wallet_number), instance.survey.wallet_number, instance.survey.wallet_letter)
|
||||
number=str(instance.survey.wallet_number)
|
||||
if str(instance.survey.wallet_letter) != "None":
|
||||
number=str(instance.survey.wallet_letter) + number #two strings formatting because convention is 2009#01 or 2009#X01
|
||||
return os.path.join('./',year,year+r'#'+number,str(instance.contents)+str(instance.number_in_wallet)+r'.jpg')
|
||||
|
||||
class ScannedImage(TroggleImageModel):
|
||||
file = models.ImageField(storage=scansFileStorage, upload_to=get_scan_path)
|
||||
scanned_by = models.ForeignKey(Person,blank=True, null=True)
|
||||
scanned_on = models.DateField(null=True)
|
||||
survey = models.ForeignKey('Survey')
|
||||
contents = models.CharField(max_length=20,choices=(('notes','notes'),('plan','plan_sketch'),('elevation','elevation_sketch')))
|
||||
number_in_wallet = models.IntegerField(null=True)
|
||||
lon_utm = models.FloatField(blank=True,null=True)
|
||||
lat_utm = models.FloatField(blank=True,null=True)
|
||||
|
||||
class IKOptions:
|
||||
spec_module = 'core.imagekit_specs'
|
||||
cache_dir = 'thumbs'
|
||||
image_field = 'file'
|
||||
#content_type = models.ForeignKey(ContentType)
|
||||
#object_id = models.PositiveIntegerField()
|
||||
#location = generic.GenericForeignKey('content_type', 'object_id')
|
||||
|
||||
#This is an ugly hack to deal with the #s in our survey scan paths. The correct thing is to write a custom file storage backend which calls urlencode on the name for making file.url but not file.path.
|
||||
def correctURL(self):
|
||||
return string.replace(self.file.url,r'#',r'%23')
|
||||
|
||||
def __unicode__(self):
|
||||
return get_scan_path(self,'')
|
||||
|
||||
class Survey(TroggleModel):
|
||||
expedition = models.ForeignKey('Expedition') #REDUNDANT (logbook_entry)
|
||||
wallet_number = models.IntegerField(blank=True,null=True)
|
||||
wallet_letter = models.CharField(max_length=1,blank=True,null=True)
|
||||
comments = models.TextField(blank=True,null=True)
|
||||
location = models.CharField(max_length=400,blank=True,null=True) #REDUNDANT
|
||||
subcave = models.ForeignKey('NewSubCave', blank=True, null=True)
|
||||
#notes_scan = models.ForeignKey('ScannedImage',related_name='notes_scan',blank=True, null=True) #Replaced by contents field of ScannedImage model
|
||||
survex_block = models.OneToOneField('SurvexBlock',blank=True, null=True)
|
||||
logbook_entry = models.ForeignKey('LogbookEntry')
|
||||
centreline_printed_on = models.DateField(blank=True, null=True)
|
||||
centreline_printed_by = models.ForeignKey('Person',related_name='centreline_printed_by',blank=True,null=True)
|
||||
#sketch_scan = models.ForeignKey(ScannedImage,blank=True, null=True) #Replaced by contents field of ScannedImage model
|
||||
tunnel_file = models.FileField(upload_to='surveyXMLfiles',blank=True, null=True)
|
||||
tunnel_main_sketch = models.ForeignKey('Survey',blank=True,null=True)
|
||||
integrated_into_main_sketch_on = models.DateField(blank=True,null=True)
|
||||
integrated_into_main_sketch_by = models.ForeignKey('Person' ,related_name='integrated_into_main_sketch_by', blank=True,null=True)
|
||||
rendered_image = models.ImageField(upload_to='renderedSurveys',blank=True,null=True)
|
||||
def __unicode__(self):
|
||||
return self.expedition.year+"#"+"%02d" % int(self.wallet_number)
|
||||
|
||||
def notes(self):
|
||||
return self.scannedimage_set.filter(contents='notes')
|
||||
|
||||
def plans(self):
|
||||
return self.scannedimage_set.filter(contents='plan')
|
||||
|
||||
def elevations(self):
|
||||
return self.scannedimage_set.filter(contents='elevation')
|
||||
@@ -21,34 +21,6 @@ import settings
|
||||
from PIL import Image, ImageDraw, ImageFont
|
||||
import string, os, sys, subprocess
|
||||
|
||||
#
|
||||
# NEW CONTENT
|
||||
#
|
||||
|
||||
|
||||
from troggle.core.models import CaveM, Cave_descriptionM, ExpeditionM
|
||||
|
||||
def millenialcaves(request):
|
||||
#RW messing around area
|
||||
caves = CaveM.objects.all()
|
||||
descr = Cave_descriptionM.objects.all()
|
||||
return render_with_context(request,'millenialcaves.html',{'caves': caves,'descriptions' : descr})
|
||||
|
||||
def millenialdescription(request, slug):
|
||||
desc = Cave_descriptionM.objects.get(slug=slug)
|
||||
return render_with_context(request,'cave_uground_description.html', {'cave': desc})
|
||||
|
||||
def millenialpeople(request):
|
||||
expos = ExpeditionM.objects.all()
|
||||
return render_with_context(request,'peoplemillenial.html' , {'expos': expos})
|
||||
|
||||
|
||||
#
|
||||
# END NEW CONTENT
|
||||
#
|
||||
|
||||
|
||||
|
||||
def getCave(cave_id):
|
||||
"""Returns a cave object when given a cave name or number. It is used by views including cavehref, ent, and qm."""
|
||||
try:
|
||||
@@ -82,16 +54,16 @@ def caveindex(request):
|
||||
caves = Cave.objects.all()
|
||||
notablecavehrefs = settings.NOTABLECAVESHREFS
|
||||
notablecaves = [Cave.objects.get(kataster_number=kataster_number) for kataster_number in notablecavehrefs ]
|
||||
#caves1623 = list(Cave.objects.filter(area__short_name = "1623"))
|
||||
caves1623 = list(Cave.objects.all())
|
||||
caves1623 = list(Cave.objects.filter(area__short_name = "1623"))
|
||||
caves1626 = list(Cave.objects.filter(area__short_name = "1626"))
|
||||
caves1623.sort(caveCmp)
|
||||
caves1626.sort(caveCmp)
|
||||
return render_with_context(request,'caveindex.html', {'caves1623': caves1623, 'caves1626': caves1626, 'notablecaves':notablecaves, 'cavepage': True})
|
||||
|
||||
|
||||
|
||||
|
||||
def millenialcaves(request):
|
||||
#RW messing around area
|
||||
return HttpResponse("Test text", content_type="text/plain")
|
||||
|
||||
|
||||
|
||||
def cave3d(request, cave_id=''):
|
||||
|
||||
@@ -40,62 +40,6 @@ survextemplatefile = """; Locn: Totes Gebirge, Austria - Loser/Augst-Eck Plateau
|
||||
|
||||
*end [surveyname]"""
|
||||
|
||||
|
||||
def millenialcaves(request):
|
||||
cavesdir = os.path.join(settings.SURVEX_DATA, "caves-1623")
|
||||
#cavesdircontents = { }
|
||||
|
||||
onefilecaves = [ ]
|
||||
multifilecaves = [ ]
|
||||
subdircaves = [ ]
|
||||
|
||||
millenialcaves = [ ]
|
||||
|
||||
|
||||
# go through the list and identify the contents of each cave directory
|
||||
for cavedir in os.listdir(cavesdir):
|
||||
if cavedir in ["144", "40"]: #????? RW
|
||||
continue
|
||||
|
||||
gcavedir = os.path.join(cavesdir, cavedir) #directory od 'large' cave
|
||||
|
||||
if os.path.isdir(gcavedir) and cavedir[0] != ".":
|
||||
subdirs, subsvx = identifycavedircontents(gcavedir)
|
||||
survdirobj = [ ]
|
||||
|
||||
for lsubsvx in subsvx:
|
||||
survdirobj.append(("caves-1623/"+cavedir+"/"+lsubsvx, lsubsvx))
|
||||
|
||||
# caves with subdirectories
|
||||
if subdirs:
|
||||
subsurvdirs = [ ]
|
||||
for subdir in subdirs:
|
||||
dsubdirs, dsubsvx = identifycavedircontents(os.path.join(gcavedir, subdir))
|
||||
assert not dsubdirs
|
||||
lsurvdirobj = [ ]
|
||||
for lsubsvx in dsubsvx:
|
||||
lsurvdirobj.append(("caves-1623/"+cavedir+"/"+subdir+"/"+lsubsvx, lsubsvx))
|
||||
subsurvdirs.append((lsurvdirobj[0], lsurvdirobj[1:]))
|
||||
subdircaves.append((cavedir, (survdirobj[0], survdirobj[1:]), subsurvdirs))
|
||||
|
||||
# multifile caves
|
||||
elif len(survdirobj) > 1:
|
||||
multifilecaves.append((survdirobj[0], survdirobj[1:]))
|
||||
# single file caves
|
||||
else:
|
||||
#print("survdirobj = ")
|
||||
#print(survdirobj)
|
||||
onefilecaves.append(survdirobj[0])
|
||||
|
||||
caves = Cave.objects.all()
|
||||
|
||||
return render_to_response('millenialcaves.html', {'settings': settings , 'caves':caves , "onefilecaves":onefilecaves, "multifilecaves":multifilecaves, "subdircaves":subdircaves })
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
def ReplaceTabs(stext):
|
||||
res = [ ]
|
||||
|
||||
@@ -28,7 +28,8 @@ def reload_db():
|
||||
cursor.execute("CREATE DATABASE %s" % databasename)
|
||||
cursor.execute("ALTER DATABASE %s CHARACTER SET=utf8" % databasename)
|
||||
cursor.execute("USE %s" % databasename)
|
||||
management.call_command('syncdb', interactive=False)
|
||||
management.call_command('migrate', interactive=False)
|
||||
#management.call_command('syncdb', interactive=False)
|
||||
user = User.objects.create_user(expouser, expouseremail, expouserpass)
|
||||
user.is_staff = True
|
||||
user.is_superuser = True
|
||||
@@ -218,7 +219,7 @@ if __name__ == "__main__":
|
||||
import_descriptions()
|
||||
parse_descriptions()
|
||||
elif "survex" in sys.argv:
|
||||
management.call_command('syncdb', interactive=False) # this sets the path so that import settings works in import_survex
|
||||
#management.call_command('syncdb', interactive=False) # this sets the path so that import settings works in import_survex
|
||||
import_survex()
|
||||
elif "survexpos" in sys.argv:
|
||||
management.call_command('syncdb', interactive=False) # this sets the path so that import settings works in import_survex
|
||||
|
||||
@@ -1,127 +0,0 @@
|
||||
import os
|
||||
import time
|
||||
import settings
|
||||
os.environ['PYTHONPATH'] = settings.PYTHON_PATH
|
||||
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'settings')
|
||||
from django.core import management
|
||||
from django.db import connection
|
||||
from django.contrib.auth.models import User
|
||||
from django.http import HttpResponse
|
||||
from django.core.urlresolvers import reverse
|
||||
from troggle.core.models import Cave, Entrance
|
||||
from troggle.core.models import PersonM, SurveyM, CaveM, ExpeditionM, Logbook_entryM, Cave_descriptionM
|
||||
import troggle.flatpages.models
|
||||
|
||||
databasename=settings.DATABASES['default']['NAME']
|
||||
expouser=settings.EXPOUSER
|
||||
expouserpass=settings.EXPOUSERPASS
|
||||
expouseremail=settings.EXPOUSER_EMAIL
|
||||
|
||||
def destroy():
|
||||
if settings.DATABASES['default']['ENGINE'] == 'django.db.backends.sqlite3':
|
||||
try:
|
||||
os.remove(databasename)
|
||||
except OSError:
|
||||
pass
|
||||
else:
|
||||
cursor = connection.cursor()
|
||||
cursor.execute("DROP DATABASE %s" % databasename)
|
||||
cursor.execute("CREATE DATABASE %s" % databasename)
|
||||
cursor.execute("ALTER DATABASE %s CHARACTER SET=utf8" % databasename)
|
||||
cursor.execute("USE %s" % databasename)
|
||||
management.call_command('syncdb', interactive=False)
|
||||
user = User.objects.create_user(expouser, expouseremail, expouserpass)
|
||||
user.is_staff = True
|
||||
user.is_superuser = True
|
||||
user.save()
|
||||
print('Nuked the database and rebuilt it. You savage monster')
|
||||
|
||||
def gracefull_flush():
|
||||
CaveM.objects.all().delete()
|
||||
PersonM.objects.all().delete()
|
||||
SurveyM.objects.all().delete()
|
||||
ExpeditionM.objects.all().delete()
|
||||
Logbook_entryM.objects.all().delete()
|
||||
Cave_descriptionM.objects.all().delete()
|
||||
print('Deleted contents of the database, ready to load new stuff :)')
|
||||
|
||||
def load_redirects():
|
||||
for oldURL, newURL in [("indxal.htm", reverse("caveindex"))]:
|
||||
f = troggle.flatpages.models.Redirect(originalURL = oldURL, newURL = newURL)
|
||||
f.save()
|
||||
|
||||
def load_surveys():
|
||||
SurveyM.objects.all().delete()
|
||||
import troggle.parsers.surveysM
|
||||
troggle.parsers.surveysM.load()
|
||||
|
||||
def load_caves():
|
||||
import troggle.parsers.cavesM
|
||||
troggle.parsers.cavesM.load()
|
||||
|
||||
def load_people():
|
||||
import troggle.parsers.peopleM
|
||||
troggle.parsers.peopleM.load()
|
||||
|
||||
def load_all():
|
||||
load_caves()
|
||||
load_surveys()
|
||||
load_people()
|
||||
load_redirects()
|
||||
load_links()
|
||||
print('Loaded everything. Your database is ready to go :)')
|
||||
|
||||
|
||||
def help():
|
||||
print("""Usage is 'python databaseResetM.py <command>'
|
||||
where command is:
|
||||
UNLOADERS:
|
||||
gracefull_flush - flushes new (M-style) databases contents but keeps tables existing
|
||||
destroy - destroys entire database and builds empty tables
|
||||
|
||||
LOADERS:
|
||||
load_all - loads all tables and links
|
||||
load_caves - loads all caves
|
||||
load_surveys - loads all surveys (corresponds to .svx files)
|
||||
load_people - loads all people
|
||||
load_redirects - load page redirects
|
||||
load_links - loads links between classes (run last! can't link non-existent things)
|
||||
|
||||
OTHER:
|
||||
help - displays this page
|
||||
----------------
|
||||
This is a new version of database management written by RW 2019
|
||||
----------------
|
||||
""")
|
||||
|
||||
if __name__ == "__main__":
|
||||
import troggle.core.models
|
||||
import sys
|
||||
import django
|
||||
django.setup()
|
||||
if "destroy" in sys.argv:
|
||||
destroy()
|
||||
elif "gracefull_flush" in sys.argv:
|
||||
gracefull_flush()
|
||||
|
||||
elif "load_all" in sys.argv:
|
||||
load_all()
|
||||
elif "load_caves" in sys.argv:
|
||||
load_caves()
|
||||
elif "load_surveys" in sys.argv:
|
||||
load_surveys()
|
||||
elif "load_people" in sys.argv:
|
||||
load_people()
|
||||
elif "load_redirects" in sys.argv:
|
||||
load_redirects()
|
||||
elif "load_links" in sys.argv:
|
||||
load_links()
|
||||
elif "help" in sys.argv:
|
||||
help()
|
||||
else:
|
||||
print("%s not recognised" % sys.argv)
|
||||
help()
|
||||
|
||||
|
||||
|
||||
|
||||
@@ -40,6 +40,7 @@ mkdir -p expofiles/surveyscans
|
||||
|
||||
To start the containers run
|
||||
```bash
|
||||
$ cd ~/expo/troggle/docker
|
||||
$ docker-compose up
|
||||
```
|
||||
You will now have a working troggle but with no data. To import the data you need to access the container run
|
||||
|
||||
@@ -1,13 +0,0 @@
|
||||
"""
|
||||
|
||||
Django ImageKit
|
||||
|
||||
Author: Justin Driscoll <justin.driscoll@gmail.com>
|
||||
Version: 0.2
|
||||
|
||||
"""
|
||||
VERSION = "0.2"
|
||||
|
||||
|
||||
|
||||
|
||||
@@ -1,21 +0,0 @@
|
||||
""" Default ImageKit configuration """
|
||||
|
||||
from imagekit.specs import ImageSpec
|
||||
from imagekit import processors
|
||||
|
||||
class ResizeThumbnail(processors.Resize):
|
||||
width = 100
|
||||
height = 50
|
||||
crop = True
|
||||
|
||||
class EnhanceSmall(processors.Adjustment):
|
||||
contrast = 1.2
|
||||
sharpness = 1.1
|
||||
|
||||
class SampleReflection(processors.Reflection):
|
||||
size = 0.5
|
||||
background_color = "#000000"
|
||||
|
||||
class DjangoAdminThumbnail(ImageSpec):
|
||||
access_as = 'admin_thumbnail'
|
||||
processors = [ResizeThumbnail, EnhanceSmall, SampleReflection]
|
||||
@@ -1,17 +0,0 @@
|
||||
# Required PIL classes may or may not be available from the root namespace
|
||||
# depending on the installation method used.
|
||||
try:
|
||||
import Image
|
||||
import ImageFile
|
||||
import ImageFilter
|
||||
import ImageEnhance
|
||||
import ImageColor
|
||||
except ImportError:
|
||||
try:
|
||||
from PIL import Image
|
||||
from PIL import ImageFile
|
||||
from PIL import ImageFilter
|
||||
from PIL import ImageEnhance
|
||||
from PIL import ImageColor
|
||||
except ImportError:
|
||||
raise ImportError('ImageKit was unable to import the Python Imaging Library. Please confirm it`s installed and available on your current Python path.')
|
||||
@@ -1 +0,0 @@
|
||||
|
||||
@@ -1 +0,0 @@
|
||||
|
||||
@@ -1,38 +0,0 @@
|
||||
from django.db.models.loading import cache
|
||||
from django.core.management.base import BaseCommand, CommandError
|
||||
from optparse import make_option
|
||||
from imagekit.models import ImageModel
|
||||
from imagekit.specs import ImageSpec
|
||||
|
||||
|
||||
class Command(BaseCommand):
|
||||
help = ('Clears all ImageKit cached files.')
|
||||
args = '[apps]'
|
||||
requires_model_validation = True
|
||||
can_import_settings = True
|
||||
|
||||
def handle(self, *args, **options):
|
||||
return flush_cache(args, options)
|
||||
|
||||
def flush_cache(apps, options):
|
||||
""" Clears the image cache
|
||||
|
||||
"""
|
||||
apps = [a.strip(',') for a in apps]
|
||||
if apps:
|
||||
print 'Flushing cache for %s...' % ', '.join(apps)
|
||||
else:
|
||||
print 'Flushing caches...'
|
||||
|
||||
for app_label in apps:
|
||||
app = cache.get_app(app_label)
|
||||
models = [m for m in cache.get_models(app) if issubclass(m, ImageModel)]
|
||||
|
||||
for model in models:
|
||||
for obj in model.objects.all():
|
||||
for spec in model._ik.specs:
|
||||
prop = getattr(obj, spec.name(), None)
|
||||
if prop is not None:
|
||||
prop._delete()
|
||||
if spec.pre_cache:
|
||||
prop._create()
|
||||
@@ -1,136 +0,0 @@
|
||||
import os
|
||||
from datetime import datetime
|
||||
from django.conf import settings
|
||||
from django.core.files.base import ContentFile
|
||||
from django.db import models
|
||||
from django.db.models.base import ModelBase
|
||||
from django.utils.translation import ugettext_lazy as _
|
||||
|
||||
from imagekit import specs
|
||||
from imagekit.lib import *
|
||||
from imagekit.options import Options
|
||||
from imagekit.utils import img_to_fobj
|
||||
|
||||
# Modify image file buffer size.
|
||||
ImageFile.MAXBLOCK = getattr(settings, 'PIL_IMAGEFILE_MAXBLOCK', 256 * 2 ** 10)
|
||||
|
||||
# Choice tuples for specifying the crop origin.
|
||||
# These are provided for convenience.
|
||||
CROP_HORZ_CHOICES = (
|
||||
(0, _('left')),
|
||||
(1, _('center')),
|
||||
(2, _('right')),
|
||||
)
|
||||
|
||||
CROP_VERT_CHOICES = (
|
||||
(0, _('top')),
|
||||
(1, _('center')),
|
||||
(2, _('bottom')),
|
||||
)
|
||||
|
||||
|
||||
class ImageModelBase(ModelBase):
|
||||
""" ImageModel metaclass
|
||||
|
||||
This metaclass parses IKOptions and loads the specified specification
|
||||
module.
|
||||
|
||||
"""
|
||||
def __init__(cls, name, bases, attrs):
|
||||
parents = [b for b in bases if isinstance(b, ImageModelBase)]
|
||||
if not parents:
|
||||
return
|
||||
user_opts = getattr(cls, 'IKOptions', None)
|
||||
opts = Options(user_opts)
|
||||
try:
|
||||
module = __import__(opts.spec_module, {}, {}, [''])
|
||||
except ImportError:
|
||||
raise ImportError('Unable to load imagekit config module: %s' % \
|
||||
opts.spec_module)
|
||||
for spec in [spec for spec in module.__dict__.values() \
|
||||
if isinstance(spec, type) \
|
||||
and issubclass(spec, specs.ImageSpec) \
|
||||
and spec != specs.ImageSpec]:
|
||||
setattr(cls, spec.name(), specs.Descriptor(spec))
|
||||
opts.specs.append(spec)
|
||||
setattr(cls, '_ik', opts)
|
||||
|
||||
|
||||
class ImageModel(models.Model):
|
||||
""" Abstract base class implementing all core ImageKit functionality
|
||||
|
||||
Subclasses of ImageModel are augmented with accessors for each defined
|
||||
image specification and can override the inner IKOptions class to customize
|
||||
storage locations and other options.
|
||||
|
||||
"""
|
||||
__metaclass__ = ImageModelBase
|
||||
|
||||
class Meta:
|
||||
abstract = True
|
||||
|
||||
class IKOptions:
|
||||
pass
|
||||
|
||||
def admin_thumbnail_view(self):
|
||||
if not self._imgfield:
|
||||
return None
|
||||
prop = getattr(self, self._ik.admin_thumbnail_spec, None)
|
||||
if prop is None:
|
||||
return 'An "%s" image spec has not been defined.' % \
|
||||
self._ik.admin_thumbnail_spec
|
||||
else:
|
||||
if hasattr(self, 'get_absolute_url'):
|
||||
return u'<a href="%s"><img src="%s"></a>' % \
|
||||
(self.get_absolute_url(), prop.url)
|
||||
else:
|
||||
return u'<a href="%s"><img src="%s"></a>' % \
|
||||
(self._imgfield.url, prop.url)
|
||||
admin_thumbnail_view.short_description = _('Thumbnail')
|
||||
admin_thumbnail_view.allow_tags = True
|
||||
|
||||
@property
|
||||
def _imgfield(self):
|
||||
return getattr(self, self._ik.image_field)
|
||||
|
||||
def _clear_cache(self):
|
||||
for spec in self._ik.specs:
|
||||
prop = getattr(self, spec.name())
|
||||
prop._delete()
|
||||
|
||||
def _pre_cache(self):
|
||||
for spec in self._ik.specs:
|
||||
if spec.pre_cache:
|
||||
prop = getattr(self, spec.name())
|
||||
prop._create()
|
||||
|
||||
def save(self, clear_cache=True, *args, **kwargs):
|
||||
is_new_object = self._get_pk_val is None
|
||||
super(ImageModel, self).save(*args, **kwargs)
|
||||
if is_new_object:
|
||||
clear_cache = False
|
||||
spec = self._ik.preprocessor_spec
|
||||
if spec is not None:
|
||||
newfile = self._imgfield.storage.open(str(self._imgfield))
|
||||
img = Image.open(newfile)
|
||||
img = spec.process(img, None)
|
||||
format = img.format or 'JPEG'
|
||||
if format != 'JPEG':
|
||||
imgfile = img_to_fobj(img, format)
|
||||
else:
|
||||
imgfile = img_to_fobj(img, format,
|
||||
quality=int(spec.quality),
|
||||
optimize=True)
|
||||
content = ContentFile(imgfile.read())
|
||||
newfile.close()
|
||||
name = str(self._imgfield)
|
||||
self._imgfield.storage.delete(name)
|
||||
self._imgfield.storage.save(name, content)
|
||||
if clear_cache and self._imgfield != '':
|
||||
self._clear_cache()
|
||||
self._pre_cache()
|
||||
|
||||
def delete(self):
|
||||
assert self._get_pk_val() is not None, "%s object can't be deleted because its %s attribute is set to None." % (self._meta.object_name, self._meta.pk.attname)
|
||||
self._clear_cache()
|
||||
models.Model.delete(self)
|
||||
@@ -1,23 +0,0 @@
|
||||
# Imagekit options
|
||||
from imagekit import processors
|
||||
from imagekit.specs import ImageSpec
|
||||
|
||||
|
||||
class Options(object):
|
||||
""" Class handling per-model imagekit options
|
||||
|
||||
"""
|
||||
image_field = 'image'
|
||||
crop_horz_field = 'crop_horz'
|
||||
crop_vert_field = 'crop_vert'
|
||||
preprocessor_spec = None
|
||||
cache_dir = 'cache'
|
||||
save_count_as = None
|
||||
cache_filename_format = "%(filename)s_%(specname)s.%(extension)s"
|
||||
admin_thumbnail_spec = 'admin_thumbnail'
|
||||
spec_module = 'imagekit.defaults'
|
||||
|
||||
def __init__(self, opts):
|
||||
for key, value in opts.__dict__.iteritems():
|
||||
setattr(self, key, value)
|
||||
self.specs = []
|
||||
@@ -1,134 +0,0 @@
|
||||
""" Imagekit Image "ImageProcessors"
|
||||
|
||||
A processor defines a set of class variables (optional) and a
|
||||
class method named "process" which processes the supplied image using
|
||||
the class properties as settings. The process method can be overridden as well allowing user to define their
|
||||
own effects/processes entirely.
|
||||
|
||||
"""
|
||||
from imagekit.lib import *
|
||||
|
||||
class ImageProcessor(object):
|
||||
""" Base image processor class """
|
||||
@classmethod
|
||||
def process(cls, image, obj=None):
|
||||
return image
|
||||
|
||||
|
||||
class Adjustment(ImageProcessor):
|
||||
color = 1.0
|
||||
brightness = 1.0
|
||||
contrast = 1.0
|
||||
sharpness = 1.0
|
||||
|
||||
@classmethod
|
||||
def process(cls, image, obj=None):
|
||||
for name in ['Color', 'Brightness', 'Contrast', 'Sharpness']:
|
||||
factor = getattr(cls, name.lower())
|
||||
if factor != 1.0:
|
||||
image = getattr(ImageEnhance, name)(image).enhance(factor)
|
||||
return image
|
||||
|
||||
|
||||
class Reflection(ImageProcessor):
|
||||
background_color = '#FFFFFF'
|
||||
size = 0.0
|
||||
opacity = 0.6
|
||||
|
||||
@classmethod
|
||||
def process(cls, image, obj=None):
|
||||
# convert bgcolor string to rgb value
|
||||
background_color = ImageColor.getrgb(cls.background_color)
|
||||
# copy orignial image and flip the orientation
|
||||
reflection = image.copy().transpose(Image.FLIP_TOP_BOTTOM)
|
||||
# create a new image filled with the bgcolor the same size
|
||||
background = Image.new("RGB", image.size, background_color)
|
||||
# calculate our alpha mask
|
||||
start = int(255 - (255 * cls.opacity)) # The start of our gradient
|
||||
steps = int(255 * cls.size) # the number of intermedite values
|
||||
increment = (255 - start) / float(steps)
|
||||
mask = Image.new('L', (1, 255))
|
||||
for y in range(255):
|
||||
if y < steps:
|
||||
val = int(y * increment + start)
|
||||
else:
|
||||
val = 255
|
||||
mask.putpixel((0, y), val)
|
||||
alpha_mask = mask.resize(image.size)
|
||||
# merge the reflection onto our background color using the alpha mask
|
||||
reflection = Image.composite(background, reflection, alpha_mask)
|
||||
# crop the reflection
|
||||
reflection_height = int(image.size[1] * cls.size)
|
||||
reflection = reflection.crop((0, 0, image.size[0], reflection_height))
|
||||
# create new image sized to hold both the original image and the reflection
|
||||
composite = Image.new("RGB", (image.size[0], image.size[1]+reflection_height), background_color)
|
||||
# paste the orignal image and the reflection into the composite image
|
||||
composite.paste(image, (0, 0))
|
||||
composite.paste(reflection, (0, image.size[1]))
|
||||
# return the image complete with reflection effect
|
||||
return composite
|
||||
|
||||
|
||||
class Resize(ImageProcessor):
|
||||
width = None
|
||||
height = None
|
||||
crop = False
|
||||
upscale = False
|
||||
|
||||
@classmethod
|
||||
def process(cls, image, obj=None):
|
||||
cur_width, cur_height = image.size
|
||||
if cls.crop:
|
||||
crop_horz = getattr(obj, obj._ik.crop_horz_field, 1)
|
||||
crop_vert = getattr(obj, obj._ik.crop_vert_field, 1)
|
||||
ratio = max(float(cls.width)/cur_width, float(cls.height)/cur_height)
|
||||
resize_x, resize_y = ((cur_width * ratio), (cur_height * ratio))
|
||||
crop_x, crop_y = (abs(cls.width - resize_x), abs(cls.height - resize_y))
|
||||
x_diff, y_diff = (int(crop_x / 2), int(crop_y / 2))
|
||||
box_left, box_right = {
|
||||
0: (0, cls.width),
|
||||
1: (int(x_diff), int(x_diff + cls.width)),
|
||||
2: (int(crop_x), int(resize_x)),
|
||||
}[crop_horz]
|
||||
box_upper, box_lower = {
|
||||
0: (0, cls.height),
|
||||
1: (int(y_diff), int(y_diff + cls.height)),
|
||||
2: (int(crop_y), int(resize_y)),
|
||||
}[crop_vert]
|
||||
box = (box_left, box_upper, box_right, box_lower)
|
||||
image = image.resize((int(resize_x), int(resize_y)), Image.ANTIALIAS).crop(box)
|
||||
else:
|
||||
if not cls.width is None and not cls.height is None:
|
||||
ratio = min(float(cls.width)/cur_width,
|
||||
float(cls.height)/cur_height)
|
||||
else:
|
||||
if cls.width is None:
|
||||
ratio = float(cls.height)/cur_height
|
||||
else:
|
||||
ratio = float(cls.width)/cur_width
|
||||
new_dimensions = (int(round(cur_width*ratio)),
|
||||
int(round(cur_height*ratio)))
|
||||
if new_dimensions[0] > cur_width or \
|
||||
new_dimensions[1] > cur_height:
|
||||
if not cls.upscale:
|
||||
return image
|
||||
image = image.resize(new_dimensions, Image.ANTIALIAS)
|
||||
return image
|
||||
|
||||
|
||||
class Transpose(ImageProcessor):
|
||||
""" Rotates or flips the image
|
||||
|
||||
Method should be one of the following strings:
|
||||
- FLIP_LEFT RIGHT
|
||||
- FLIP_TOP_BOTTOM
|
||||
- ROTATE_90
|
||||
- ROTATE_270
|
||||
- ROTATE_180
|
||||
|
||||
"""
|
||||
method = 'FLIP_LEFT_RIGHT'
|
||||
|
||||
@classmethod
|
||||
def process(cls, image, obj=None):
|
||||
return image.transpose(getattr(Image, cls.method))
|
||||
@@ -1,119 +0,0 @@
|
||||
""" ImageKit image specifications
|
||||
|
||||
All imagekit specifications must inherit from the ImageSpec class. Models
|
||||
inheriting from ImageModel will be modified with a descriptor/accessor for each
|
||||
spec found.
|
||||
|
||||
"""
|
||||
import os
|
||||
from StringIO import StringIO
|
||||
from imagekit.lib import *
|
||||
from imagekit.utils import img_to_fobj
|
||||
from django.core.files.base import ContentFile
|
||||
|
||||
class ImageSpec(object):
|
||||
pre_cache = False
|
||||
quality = 70
|
||||
increment_count = False
|
||||
processors = []
|
||||
|
||||
@classmethod
|
||||
def name(cls):
|
||||
return getattr(cls, 'access_as', cls.__name__.lower())
|
||||
|
||||
@classmethod
|
||||
def process(cls, image, obj):
|
||||
processed_image = image.copy()
|
||||
for proc in cls.processors:
|
||||
processed_image = proc.process(processed_image, obj)
|
||||
return processed_image
|
||||
|
||||
|
||||
class Accessor(object):
|
||||
def __init__(self, obj, spec):
|
||||
self._img = None
|
||||
self._obj = obj
|
||||
self.spec = spec
|
||||
|
||||
def _get_imgfile(self):
|
||||
format = self._img.format or 'JPEG'
|
||||
if format != 'JPEG':
|
||||
imgfile = img_to_fobj(self._img, format)
|
||||
else:
|
||||
imgfile = img_to_fobj(self._img, format,
|
||||
quality=int(self.spec.quality),
|
||||
optimize=True)
|
||||
return imgfile
|
||||
|
||||
def _create(self):
|
||||
if self._exists():
|
||||
return
|
||||
# process the original image file
|
||||
fp = self._obj._imgfield.storage.open(self._obj._imgfield.name)
|
||||
fp.seek(0)
|
||||
fp = StringIO(fp.read())
|
||||
try:
|
||||
self._img = self.spec.process(Image.open(fp), self._obj)
|
||||
# save the new image to the cache
|
||||
content = ContentFile(self._get_imgfile().read())
|
||||
self._obj._imgfield.storage.save(self.name, content)
|
||||
except IOError:
|
||||
pass
|
||||
|
||||
def _delete(self):
|
||||
self._obj._imgfield.storage.delete(self.name)
|
||||
|
||||
def _exists(self):
|
||||
return self._obj._imgfield.storage.exists(self.name)
|
||||
|
||||
def _basename(self):
|
||||
filename, extension = \
|
||||
os.path.splitext(os.path.basename(self._obj._imgfield.name))
|
||||
return self._obj._ik.cache_filename_format % \
|
||||
{'filename': filename,
|
||||
'specname': self.spec.name(),
|
||||
'extension': extension.lstrip('.')}
|
||||
|
||||
@property
|
||||
def name(self):
|
||||
return os.path.join(self._obj._ik.cache_dir, self._basename())
|
||||
|
||||
@property
|
||||
def url(self):
|
||||
self._create()
|
||||
if self.spec.increment_count:
|
||||
fieldname = self._obj._ik.save_count_as
|
||||
if fieldname is not None:
|
||||
current_count = getattr(self._obj, fieldname)
|
||||
setattr(self._obj, fieldname, current_count + 1)
|
||||
self._obj.save(clear_cache=False)
|
||||
return self._obj._imgfield.storage.url(self.name)
|
||||
|
||||
@property
|
||||
def file(self):
|
||||
self._create()
|
||||
return self._obj._imgfield.storage.open(self.name)
|
||||
|
||||
@property
|
||||
def image(self):
|
||||
if self._img is None:
|
||||
self._create()
|
||||
if self._img is None:
|
||||
self._img = Image.open(self.file)
|
||||
return self._img
|
||||
|
||||
@property
|
||||
def width(self):
|
||||
return self.image.size[0]
|
||||
|
||||
@property
|
||||
def height(self):
|
||||
return self.image.size[1]
|
||||
|
||||
|
||||
class Descriptor(object):
|
||||
def __init__(self, spec):
|
||||
self._spec = spec
|
||||
|
||||
def __get__(self, obj, type=None):
|
||||
return Accessor(obj, self._spec)
|
||||
@@ -1,86 +0,0 @@
|
||||
import os
|
||||
import tempfile
|
||||
import unittest
|
||||
from django.conf import settings
|
||||
from django.core.files.base import ContentFile
|
||||
from django.db import models
|
||||
from django.test import TestCase
|
||||
|
||||
from imagekit import processors
|
||||
from imagekit.models import ImageModel
|
||||
from imagekit.specs import ImageSpec
|
||||
from imagekit.lib import Image
|
||||
|
||||
|
||||
class ResizeToWidth(processors.Resize):
|
||||
width = 100
|
||||
|
||||
class ResizeToHeight(processors.Resize):
|
||||
height = 100
|
||||
|
||||
class ResizeToFit(processors.Resize):
|
||||
width = 100
|
||||
height = 100
|
||||
|
||||
class ResizeCropped(ResizeToFit):
|
||||
crop = ('center', 'center')
|
||||
|
||||
class TestResizeToWidth(ImageSpec):
|
||||
access_as = 'to_width'
|
||||
processors = [ResizeToWidth]
|
||||
|
||||
class TestResizeToHeight(ImageSpec):
|
||||
access_as = 'to_height'
|
||||
processors = [ResizeToHeight]
|
||||
|
||||
class TestResizeCropped(ImageSpec):
|
||||
access_as = 'cropped'
|
||||
processors = [ResizeCropped]
|
||||
|
||||
class TestPhoto(ImageModel):
|
||||
""" Minimal ImageModel class for testing """
|
||||
image = models.ImageField(upload_to='images')
|
||||
|
||||
class IKOptions:
|
||||
spec_module = 'imagekit.tests'
|
||||
|
||||
|
||||
class IKTest(TestCase):
|
||||
""" Base TestCase class """
|
||||
def setUp(self):
|
||||
# create a test image using tempfile and PIL
|
||||
self.tmp = tempfile.TemporaryFile()
|
||||
Image.new('RGB', (800, 600)).save(self.tmp, 'JPEG')
|
||||
self.tmp.seek(0)
|
||||
self.p = TestPhoto()
|
||||
self.p.image.save(os.path.basename('test.jpg'),
|
||||
ContentFile(self.tmp.read()))
|
||||
self.p.save()
|
||||
# destroy temp file
|
||||
self.tmp.close()
|
||||
|
||||
def test_setup(self):
|
||||
self.assertEqual(self.p.image.width, 800)
|
||||
self.assertEqual(self.p.image.height, 600)
|
||||
|
||||
def test_to_width(self):
|
||||
self.assertEqual(self.p.to_width.width, 100)
|
||||
self.assertEqual(self.p.to_width.height, 75)
|
||||
|
||||
def test_to_height(self):
|
||||
self.assertEqual(self.p.to_height.width, 133)
|
||||
self.assertEqual(self.p.to_height.height, 100)
|
||||
|
||||
def test_crop(self):
|
||||
self.assertEqual(self.p.cropped.width, 100)
|
||||
self.assertEqual(self.p.cropped.height, 100)
|
||||
|
||||
def test_url(self):
|
||||
tup = (settings.MEDIA_URL, self.p._ik.cache_dir, 'test_to_width.jpg')
|
||||
self.assertEqual(self.p.to_width.url, "%s%s/%s" % tup)
|
||||
|
||||
def tearDown(self):
|
||||
# make sure image file is deleted
|
||||
path = self.p.image.path
|
||||
self.p.delete()
|
||||
self.failIf(os.path.isfile(path))
|
||||
@@ -1,15 +0,0 @@
|
||||
""" ImageKit utility functions """
|
||||
|
||||
import tempfile
|
||||
|
||||
def img_to_fobj(img, format, **kwargs):
|
||||
tmp = tempfile.TemporaryFile()
|
||||
if format != 'JPEG':
|
||||
try:
|
||||
img.save(tmp, format, **kwargs)
|
||||
return
|
||||
except KeyError:
|
||||
pass
|
||||
img.save(tmp, format, **kwargs)
|
||||
tmp.seek(0)
|
||||
return tmp
|
||||
@@ -1,52 +0,0 @@
|
||||
body {
|
||||
all: initial;
|
||||
font-size: 100%;
|
||||
}
|
||||
|
||||
div#inputf {
|
||||
display: inline-block;
|
||||
width: 300px;
|
||||
text-align: justify;
|
||||
margin-top: 0px;
|
||||
margin-bottom: 5px
|
||||
}
|
||||
|
||||
.menu, ul#links{
|
||||
display: none;
|
||||
}
|
||||
|
||||
table {
|
||||
border-spacing: 0;
|
||||
width: 100%;
|
||||
border: 1px solid #ddd;
|
||||
font-family: monospace;
|
||||
}
|
||||
|
||||
th {
|
||||
cursor: pointer;
|
||||
background-color: #bbb
|
||||
}
|
||||
|
||||
th, td {
|
||||
padding: 16px;
|
||||
max-height: 40px;
|
||||
}
|
||||
|
||||
tr:nth-child(even) {
|
||||
background-color: #f2f2f2
|
||||
}
|
||||
|
||||
p {
|
||||
|
||||
margin-right: 80px;
|
||||
margin-left: 80px;
|
||||
}
|
||||
|
||||
button {
|
||||
width: 300px
|
||||
}
|
||||
span#mono {
|
||||
font-family: monospace;
|
||||
background-color: #eee;
|
||||
font-size: 120%;
|
||||
}
|
||||
@@ -1,159 +0,0 @@
|
||||
function filterTable(tablename)
|
||||
{
|
||||
table = document.getElementById(tablename);
|
||||
|
||||
mindepth = document.getElementById("CaveDepthMin").value;
|
||||
maxdepth = document.getElementById("CaveDepthMax").value;
|
||||
if(mindepth==0)mindepth=-999999;
|
||||
if(maxdepth==0)maxdepth= 999999;
|
||||
|
||||
minlength = document.getElementById("CaveLengthMin").value;
|
||||
maxlength = document.getElementById("CaveLengthMax").value;
|
||||
if(minlength==0)minlength=-999999;
|
||||
if(maxlength==0)maxlength= 999999;
|
||||
|
||||
visitdate = document.getElementById("VisitDate").value;
|
||||
|
||||
visitor = document.getElementById("Visitor").value;
|
||||
|
||||
cavename = document.getElementById("CaveName").value.toLowerCase();
|
||||
|
||||
incomplete = document.getElementById("Incomplete").checked;
|
||||
|
||||
var regexmode = false;
|
||||
if(visitor[0]=='/' && visitor[visitor.length-1]=='/')
|
||||
{
|
||||
regexmode = true;
|
||||
visitor = new RegExp(visitor.substr(1,visitor.length-2));
|
||||
}
|
||||
else
|
||||
{
|
||||
visitor.toLowerCase();
|
||||
}
|
||||
|
||||
rows = table.rows;
|
||||
for(i=1; i< rows.length; i++)
|
||||
{
|
||||
name = (rows[i].getElementsByTagName("TD")[1]).innerHTML.toLowerCase();
|
||||
|
||||
depth = (rows[i].getElementsByTagName("TD")[2]).innerHTML.toLowerCase();
|
||||
depth = Number(depth.replace(/[^0-9.]/g,''));
|
||||
|
||||
length = (rows[i].getElementsByTagName("TD")[3]).innerHTML.toLowerCase();
|
||||
length = Number(length.replace(/[^0-9.]/g,''));
|
||||
|
||||
date = (rows[i].getElementsByTagName("TD")[4]).innerHTML.toLowerCase();
|
||||
|
||||
//recentvisitor = (rows[i].getElementsByTagName("TD")[4]).innerHTML.toLowerCase();
|
||||
recentvisitor = ""
|
||||
|
||||
if(cavename != "" && !name.includes(cavename))
|
||||
{
|
||||
rows[i].style.visibility = "collapse";
|
||||
}
|
||||
if(depth<mindepth || depth>maxdepth)
|
||||
{
|
||||
rows[i].style.visibility = "collapse";
|
||||
}
|
||||
if(length<minlength || length>maxlength)
|
||||
{
|
||||
rows[i].style.visibility = "collapse";
|
||||
}
|
||||
if(date < visitdate)
|
||||
{
|
||||
rows[i].style.visibility = "collapse";
|
||||
}
|
||||
if(visitor != "" && regexmode && !visitor.test(recentvisitor))
|
||||
{
|
||||
rows[i].style.visibility = "collapse";
|
||||
}
|
||||
if(visitor != "" && !regexmode && !recentvisitor.includes(visitor))
|
||||
{
|
||||
rows[i].style.visibility = "collapse";
|
||||
}
|
||||
|
||||
crow=rows[i].getElementsByTagName("TD");
|
||||
for(var j=0; j<crow.length; j++)
|
||||
{
|
||||
if(crow[j].innerHTML == "" && incomplete)
|
||||
{
|
||||
rows[i].style.visibility = "collapse";
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
function filterTableReset(tablename)
|
||||
{
|
||||
table = document.getElementById(tablename);
|
||||
rows = table.rows;
|
||||
for(i=1; i< rows.length; i++)
|
||||
{
|
||||
rows[i].style.visibility = "visible";
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
function isOrdered(kvarray,numeric)
|
||||
{
|
||||
for(var i=0;i<kvarray.length-1;i++)
|
||||
{
|
||||
if(numeric==1 && Number(kvarray[i][0])>Number(kvarray[i+1][0]))
|
||||
{
|
||||
return false;
|
||||
}
|
||||
if(numeric!=1 && kvarray[i][0]>kvarray[i+1][0])
|
||||
{
|
||||
return false;
|
||||
}
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
function sortTable(n, tablename, numeric) {
|
||||
table = document.getElementById(tablename);
|
||||
rows = table.rows;
|
||||
var ordering = [];
|
||||
var i;
|
||||
|
||||
//construct key-value pairs for sorting
|
||||
for(i = 1; i < rows.length; i++) //remember header rows
|
||||
{
|
||||
key = rows[i].getElementsByTagName("TD")[n];
|
||||
key = key.innerHTML.toLowerCase();
|
||||
if(numeric==1)
|
||||
{
|
||||
key=key.replace(/[^0-9.]/g,'')
|
||||
}
|
||||
ordering.push([key,i]);
|
||||
}
|
||||
|
||||
var ascending = isOrdered(ordering,numeric);
|
||||
|
||||
//sort either numerically or alphabetically
|
||||
if(numeric==1)
|
||||
{
|
||||
ordering.sort((x,y) => Number(x[0])-Number(y[0]));
|
||||
}
|
||||
else
|
||||
{
|
||||
ordering.sort(); //sorts alphabetically
|
||||
}
|
||||
|
||||
if(ascending) ordering.reverse();
|
||||
|
||||
for(i = 0; i < ordering.length; i++) //add sorted list at the end of the table
|
||||
{
|
||||
var keyval = ordering[i];
|
||||
id = keyval[1]; //get rownumber of n^th sorted value
|
||||
cln = rows[id].cloneNode(true); //deep clone of current node
|
||||
table.insertBefore(cln,null); //add n^th row at the end
|
||||
}
|
||||
for(i = 1; i < ordering.length+1; i++) //remove unsorted nodes
|
||||
{
|
||||
table.deleteRow(1);// 0 -> header; 1 -> first row
|
||||
}
|
||||
|
||||
}
|
||||
@@ -1,213 +0,0 @@
|
||||
|
||||
import troggle.core.models as models #import models for various objects
|
||||
from django.conf import settings
|
||||
import xml.etree.ElementTree as ET #this is used to parse XML's
|
||||
import subprocess
|
||||
import re
|
||||
|
||||
#
|
||||
# This parser has to find several things:
|
||||
# There are files of .html format in expoweb area - they contain some of the important information
|
||||
# There is a similar number of .svx files in loser are - they contain all the measurements
|
||||
#
|
||||
# Previous version was incredibly slow due to various shitty ideas about finding things
|
||||
# and overelayance on python when handling regular expressions, new version delegates heavy lifting to shell
|
||||
# and handles more sophisticated bits only
|
||||
#
|
||||
|
||||
def load():
|
||||
print('Hi! I\'m caves parser. Ready to work')
|
||||
|
||||
print('Loading caves of 1623 area')
|
||||
loadarea('1623')
|
||||
|
||||
print('Loading caves of 1626 area')
|
||||
loadarea('1626')
|
||||
|
||||
|
||||
def loadarea(areacode):
|
||||
|
||||
if not file_exists(settings.SURVEX_DATA+'1623-and-1626.3d'):
|
||||
print('Computing master .3d file')
|
||||
bash('cavern -o'+settings.SURVEX_DATA+' '+settings.SURVEX_DATA+'1623-and-1626.svx')
|
||||
else:
|
||||
print('Loading from existing master .3d file')
|
||||
|
||||
master3d = bash('dump3d -d '+settings.SURVEX_DATA+'1623-and-1626.3d').splitlines()
|
||||
master3dN = [x for x in master3d if ('NODE' in x)] #list of nodes of master survex file
|
||||
master3dL = [x for x in master3d if ('LINE' in x)] #list of nodes of master survex file
|
||||
|
||||
print('Searching all cave dirs files')
|
||||
basedir = settings.SURVEX_DATA+'caves-'+areacode+'/'
|
||||
|
||||
cavedirs = bash("find "+basedir+" -maxdepth 1 -type d").splitlines() #this command finds all directories
|
||||
print('Obtained list of directories! (#dirs='+str(len(cavedirs))+')')
|
||||
ndirs = len(cavedirs) #remember number of dirs for nice debug output
|
||||
|
||||
for cavedir in cavedirs:
|
||||
if cavedir==basedir:
|
||||
continue #skip the basedir - a non-proper subdirectory
|
||||
cavename = bash('echo '+cavedir+' | rev | cut -f1 -d \'/\' | rev').splitlines()[0] #get final bit of the directory
|
||||
|
||||
test = bash('if [ ! -f '+cavedir+'/'+cavename+'.svx ] ; then echo MISSING; fi')#test for file exisence
|
||||
if not file_exists(cavedir+'/'+cavename+'.svx'):
|
||||
msg = models.Parser_messageM(parsername='caves',content=cavedir+'/'+cavename+' MISSING!',message_type='warn')
|
||||
print('Cave missing'+cavename+' :(')
|
||||
msg.save()
|
||||
continue
|
||||
fullname=cavedir+'/'+cavename+'.svx'
|
||||
print('Found cave:'+cavename)
|
||||
cavernout = bash('cavern -o '+cavedir+' '+fullname) #make cavern process the thing
|
||||
if 'cavern: error:' in cavernout:
|
||||
msg = models.Parser_messageM(parsername='caves',content=cavedir+'/'+cavename+' Survex file messed up!',message_type='warn')
|
||||
print('Fucked svx'+cavename+' :(')
|
||||
msg.save()
|
||||
continue
|
||||
|
||||
cavernout = cavernout.splitlines()
|
||||
depth = float(([x for x in cavernout if ('Total vertical length' in x)][0].split()[-1])[:-2])
|
||||
length = float(([x for x in cavernout if ('Total length' in x)][0].split()[6])[:-1])
|
||||
cavefile = open(fullname,'r')
|
||||
cavefilecontents = cavefile.read().splitlines()
|
||||
surveyname = [x for x in cavefilecontents if ('*begin ') in x][0].split()[1].lower()
|
||||
try:
|
||||
title = [x for x in cavefilecontents if ('*title ') in x][0].split()[1]
|
||||
except:
|
||||
syrveyname = "Untitled"
|
||||
|
||||
relevant_nodes = [x for x in master3dN if (('['+areacode+'.'+surveyname+'.' in x) or ('['+areacode+'.'+surveyname+']' in x))]
|
||||
entrance_nodes = [x for x in relevant_nodes if 'ENTRANCE' in x]
|
||||
surface_nodes = [x for x in relevant_nodes if 'SURFACE' in x]
|
||||
location_nodes = []
|
||||
print('rel_nodes'+str(len(relevant_nodes)))
|
||||
if len(entrance_nodes) > 0:
|
||||
location_nodes = entrance_nodes
|
||||
elif len(surface_nodes) > 0:
|
||||
location_nodes = surface_nodes
|
||||
elif len(relevant_nodes) > 0:
|
||||
location_nodes = relevant_nodes
|
||||
|
||||
try:
|
||||
location = sorted(location_nodes, key = lambda y : float(y.split()[3])).pop()
|
||||
except:
|
||||
print(location_nodes)
|
||||
location = 'Not found'
|
||||
|
||||
relevant_lines = [x for x in master3dL if (('['+areacode+'.'+surveyname+'.' in x) or ('['+areacode+'.'+surveyname+']' in x))]
|
||||
try:
|
||||
lastleg = sorted(relevant_lines, key = lambda y : y.split().pop()).pop()
|
||||
except:
|
||||
lastleg = ['LINE 1900.01.01']
|
||||
try:
|
||||
lastdate = lastleg.split().pop()
|
||||
if 'STYLE' in lastdate:
|
||||
lastdate = lastleg.split().pop().pop()
|
||||
except:
|
||||
lastdate = '1900.01.01'
|
||||
|
||||
entrance = ' '.join(location.split()[1:3])
|
||||
print((('depth','length','surv name','entr','date'),(depth,length,surveyname,entrance,lastdate))) #sanity check print
|
||||
|
||||
|
||||
newcave = models.CaveM(
|
||||
survex_file = fullname,
|
||||
total_length = length,
|
||||
name=areacode+'.'+surveyname,
|
||||
total_depth = depth,
|
||||
date = lastdate,
|
||||
entrance = entrance)
|
||||
newcave.save()
|
||||
#end of reading survex masterfiles
|
||||
|
||||
print ("Reading cave descriptions")
|
||||
cavefiles = bash('find '+settings.CAVEDESCRIPTIONS+' -name \'*.html\'').splitlines()
|
||||
for fn in cavefiles:
|
||||
f = open(fn, "r")
|
||||
print(fn)
|
||||
contents = f.read()
|
||||
|
||||
slug = re.sub(r"\s+", "", extractXML(contents,'caveslug'))
|
||||
desc = extractXML(contents,'underground_description')
|
||||
name = slug[5:] #get survex compatible name
|
||||
area = slug[0:4]
|
||||
|
||||
print([area,name])
|
||||
|
||||
if desc==None or name==None:
|
||||
msg = models.Parser_messageM(parsername='caves',content=fn+' Description meesed up!',message_type='warn')
|
||||
print('Fucked description '+fn+' :(')
|
||||
msg.save()
|
||||
continue
|
||||
|
||||
print(area+'/'+name+'/'+name+'.svx')
|
||||
|
||||
updatecave = models.CaveM.objects.filter(survex_file__icontains=area+'/'+name+'/'+name+'.svx')
|
||||
if len(updatecave)>1:
|
||||
print('Non unique solution - skipping. Name:'+name)
|
||||
elif len(updatecave)==0:
|
||||
print('Cave with no survex data:'+name)
|
||||
continue
|
||||
else: #exaclty one match
|
||||
print('Adding desc:'+name)
|
||||
updatecave = updatecave[0]
|
||||
updatecave.description = '/cave/descriptionM/'+slug #area-name
|
||||
updatecave.title=name
|
||||
updatecave.save()
|
||||
|
||||
slugS = slug
|
||||
explorersS = extractXML(contents,'explorers')
|
||||
underground_descriptionS = extractXML(contents,'underground_description')
|
||||
equipmentS = extractXML(contents,'equipment')
|
||||
referencesS = extractXML(contents,'references')
|
||||
surveyS = extractXML(contents,'survey')
|
||||
kataster_statusS = extractXML(contents,'kataster_status')
|
||||
underground_centre_lineS = extractXML(contents,'underground_centre_line')
|
||||
survex_fileS = extractXML(contents,'survex_file')
|
||||
notesS = extractXML(contents,'notes')
|
||||
|
||||
|
||||
newcavedesc = models.Cave_descriptionM(
|
||||
slug = slugS,
|
||||
explorers = explorersS,
|
||||
underground_description = underground_descriptionS,
|
||||
equipment = equipmentS,
|
||||
references = referencesS,
|
||||
survey = surveyS,
|
||||
kataster_status = kataster_statusS,
|
||||
underground_centre_line = underground_centre_lineS,
|
||||
survex_file = survex_fileS,
|
||||
notes = notesS)
|
||||
newcavedesc.save()
|
||||
|
||||
|
||||
|
||||
|
||||
#end of reading cave descriptions
|
||||
|
||||
def file_exists(filename):
|
||||
test = bash('if [ ! -f '+filename+' ] ; then echo MISSING; fi')#test for file exisence
|
||||
if 'MISSING' in test: #send error message to the database
|
||||
return False
|
||||
return True
|
||||
|
||||
def extractXML(contents,tag):
|
||||
#find correct lines
|
||||
lines = contents.splitlines()
|
||||
beg = [x for x in lines if ('<'+tag+'>' in x)]
|
||||
end = [x for x in lines if ('</'+tag+'>' in x)]
|
||||
if (not beg) or (not end):
|
||||
return None
|
||||
begi = lines.index(beg[0])
|
||||
endi = lines.index(end[0])
|
||||
if endi!=begi:
|
||||
segment = '\n'.join(lines[begi:endi+1])
|
||||
else:
|
||||
segment = lines[begi:endi+1][0]
|
||||
|
||||
hit = re.findall('<'+tag+'>(.*)</'+tag+'>', segment, re.S)[0]
|
||||
return hit
|
||||
|
||||
def bash(cmd): #calls command in bash shell, returns output
|
||||
process = subprocess.Popen(cmd,shell=True,stdout=subprocess.PIPE)
|
||||
output, error = process.communicate()
|
||||
return output
|
||||
@@ -62,6 +62,8 @@ def LoadPersonsExpos():
|
||||
|
||||
# make persons
|
||||
print "Loading personexpeditions"
|
||||
#expoers2008 = """Edvin Deadman,Kathryn Hopkins,Djuke Veldhuis,Becka Lawson,Julian Todd,Natalie Uomini,Aaron Curtis,Tony Rooke,Ollie Stevens,Frank Tully,Martin Jahnke,Mark Shinwell,Jess Stirrups,Nial Peters,Serena Povia,Olly Madge,Steve Jones,Pete Harley,Eeva Makiranta,Keith Curtis""".split(",")
|
||||
#expomissing = set(expoers2008)
|
||||
|
||||
for personline in personreader:
|
||||
name = personline[header["Name"]]
|
||||
@@ -83,7 +85,36 @@ def LoadPersonsExpos():
|
||||
nonLookupAttribs = {'nickname':nickname, 'is_guest':(personline[header["Guest"]] == "1")}
|
||||
save_carefully(models.PersonExpedition, lookupAttribs, nonLookupAttribs)
|
||||
|
||||
|
||||
|
||||
# this fills in those people for whom 2008 was their first expo
|
||||
#print "Loading personexpeditions 2008"
|
||||
#for name in expomissing:
|
||||
# firstname, lastname = name.split()
|
||||
# is_guest = name in ["Eeva Makiranta", "Keith Curtis"]
|
||||
# print "2008:", name
|
||||
# persons = list(models.Person.objects.filter(first_name=firstname, last_name=lastname))
|
||||
# if not persons:
|
||||
# person = models.Person(first_name=firstname, last_name = lastname, is_vfho = False, mug_shot = "")
|
||||
# #person.Sethref()
|
||||
# person.save()
|
||||
# else:
|
||||
# person = persons[0]
|
||||
# expedition = models.Expedition.objects.get(year="2008")
|
||||
# personexpedition = models.PersonExpedition(person=person, expedition=expedition, nickname="", is_guest=is_guest)
|
||||
# personexpedition.save()
|
||||
|
||||
#Notability is now a method of person. Makes no sense to store it in the database; it would need to be recalculated every time something changes. - AC 16 Feb 09
|
||||
# could rank according to surveying as well
|
||||
#print "Setting person notability"
|
||||
#for person in models.Person.objects.all():
|
||||
#person.notability = 0.0
|
||||
#for personexpedition in person.personexpedition_set.all():
|
||||
#if not personexpedition.is_guest:
|
||||
#person.notability += 1.0 / (2012 - int(personexpedition.expedition.year))
|
||||
#person.bisnotable = person.notability > 0.3 # I don't know how to filter by this
|
||||
#person.save()
|
||||
|
||||
|
||||
# used in other referencing parser functions
|
||||
# expedition name lookup cached for speed (it's a very big list)
|
||||
Gpersonexpeditionnamelookup = { }
|
||||
|
||||
@@ -1,27 +0,0 @@
|
||||
from django.conf import settings
|
||||
import troggle.core.models as models
|
||||
|
||||
def load():
|
||||
folkfile = open(settings.EXPOWEB+"noinfo/folk.csv")
|
||||
personlines = folkfile.read().splitlines()
|
||||
persontable = [x.split(',') for x in personlines]
|
||||
years = [persontable[0][i] for i in range(5,len(persontable[0]))]
|
||||
for year in years:
|
||||
newexpedition = models.ExpeditionM( date = year )
|
||||
newexpedition.save()
|
||||
for row in persontable[1:]: #skip header
|
||||
attendedid = [i for i, x in enumerate(row) if '1' in x]
|
||||
attendedyears = [persontable[0][i] for i in attendedid if i >= 5]
|
||||
name = row[0]
|
||||
print(name+' has attended: '+', '.join(attendedyears))
|
||||
newperson = models.PersonM(
|
||||
name = name)
|
||||
newperson.save()
|
||||
for year in attendedyears:
|
||||
target = models.ExpeditionM.objects.get(date=year)
|
||||
newperson.expos_attended.add( target )
|
||||
print('Person -> Expo table created!')
|
||||
|
||||
|
||||
|
||||
|
||||
@@ -1,65 +0,0 @@
|
||||
from django.conf import settings
|
||||
import subprocess, re
|
||||
import troggle.core.models as models
|
||||
|
||||
def load():
|
||||
print('Load survex files and relations')
|
||||
load_area('1623')
|
||||
|
||||
def load_area(areacode):
|
||||
|
||||
print('Searching all cave dirs files')
|
||||
basedir = settings.SURVEX_DATA+'caves-'+areacode+'/'
|
||||
|
||||
cavedirs = bash("find "+basedir+" -maxdepth 1 -type d").splitlines() #this command finds all directories
|
||||
print('Obtained list of directories! (#dirs='+str(len(cavedirs))+')')
|
||||
|
||||
for cavedir in cavedirs:
|
||||
if cavedir==basedir:
|
||||
continue #skip the basedir - a non-proper subdirectory
|
||||
parentname = bash('echo '+cavedir+' | rev | cut -f1 -d \'/\' | rev').splitlines()[0] #get final bit of the directory
|
||||
parentcave = models.CaveM.objects.filter(survex_file__icontains=cavedir)
|
||||
if len(parentcave)>1:
|
||||
print('Non unique parent - skipping. Name:'+parentname)
|
||||
elif len(parentcave)==0:
|
||||
print('Error! parent not created:'+parentname)
|
||||
continue
|
||||
else: #exaclty one match
|
||||
print('Adding relations of:'+parentname)
|
||||
parentcave = parentcave[0]
|
||||
|
||||
surveyfiles = bash('find '+cavedir+' -name \'*.svx\'').splitlines()
|
||||
for fn in surveyfiles:
|
||||
print(fn)
|
||||
svxcontents = open(fn,'r').read().splitlines()
|
||||
try:
|
||||
dateline = [x for x in svxcontents if ('*date' in x)][0]
|
||||
date = re.findall('\\d\\d\\d\\d\\.\\d\\d\\.\\d\\d', dateline, re.S)[0]
|
||||
|
||||
|
||||
except:
|
||||
if( len( [x for x in svxcontents if ('*date' in x)] ) == 0 ):
|
||||
continue #skip dateless files
|
||||
print('Date format error in '+fn)
|
||||
print('Dateline = '+ '"'.join([x for x in svxcontents if ('*date' in x)]))
|
||||
date = '1900.01.01'
|
||||
|
||||
|
||||
newsurvex = models.SurveyM(survex_file=fn, date=date)
|
||||
newsurvex.save()
|
||||
parentcave.surveys.add(newsurvex)
|
||||
parentcave.save()
|
||||
|
||||
|
||||
def file_exists(filename):
|
||||
test = bash('if [ ! -f '+filename+' ] ; then echo MISSING; fi')#test for file exisence
|
||||
if 'MISSING' in test: #send error message to the database
|
||||
return False
|
||||
return True
|
||||
|
||||
def bash(cmd): #calls command in bash shell, returns output
|
||||
process = subprocess.Popen(cmd,shell=True,stdout=subprocess.PIPE)
|
||||
output, error = process.communicate()
|
||||
return output
|
||||
|
||||
|
||||
@@ -71,9 +71,6 @@ if django.VERSION[0] == 1 and django.VERSION[1] < 4:
|
||||
else:
|
||||
authmodule = 'django.contrib.auth.context_processors.auth'
|
||||
|
||||
TOPCAMPX=411571.00
|
||||
TOPCAMPY=5282639.00
|
||||
|
||||
TEMPLATE_CONTEXT_PROCESSORS = ( authmodule, "core.context.troggle_context", )
|
||||
|
||||
LOGIN_REDIRECT_URL = '/'
|
||||
@@ -95,7 +92,7 @@ INSTALLED_APPS = (
|
||||
'troggle.profiles',
|
||||
'troggle.core',
|
||||
'troggle.flatpages',
|
||||
'troggle.imagekit',
|
||||
'imagekit',
|
||||
)
|
||||
|
||||
MIDDLEWARE_CLASSES = (
|
||||
|
||||
@@ -20,7 +20,7 @@
|
||||
<table class="searchable">
|
||||
{% for cave in caves1623 %}
|
||||
|
||||
<tr><td> <a href="{{ cave.url }}">{% if cave.kataster_number %}{{ cave.kataster_number }}{% else %}{{cave.unofficial_number }}{%endif %} {{cave.official_name|safe}}</a> {{ cave.slug }}</td></tr>
|
||||
<tr><td> <a href="{{ cave.url }}">{% if cave.kataster_number %}{{ cave.kataster_number }}{% else %}{{cave.unofficial_number }}{%endif %} {{cave.official_name|safe}}</a> </td></tr>
|
||||
|
||||
{% endfor %}
|
||||
</table>
|
||||
|
||||
@@ -1,69 +0,0 @@
|
||||
<html>
|
||||
<head>
|
||||
|
||||
|
||||
<link rel="stylesheet" type="text/css" href="{{ settings.MEDIA_URL }}/css/cavetables.css">
|
||||
|
||||
</head>
|
||||
|
||||
<body>
|
||||
|
||||
<h2>Caves of loserplateau (locations acording to all.3d)</h2>
|
||||
<span style="font-size:70%">
|
||||
Name contains:<br>
|
||||
<div id="inputf"><input type="text" name="CaveName" id="CaveName" style="width:100%"></div><br>
|
||||
Depth between (min, max) in meters (0 disables filter):<br>
|
||||
<div id="inputf"><input type="number" name="CaveDepthMin" id="CaveDepthMin" style="width:45%"> - <input type="number" name="CaveDepthMax" id="CaveDepthMax" style="width:45%"></div><br>
|
||||
Length between (min, max) in meters (0 disables filter):<br>
|
||||
<div id="inputf"><input type="number" name="CaveLengthMin" id="CaveLengthMin" style="width:45%"> - <input type="number" name="CaveLengthMax" id="CaveLengthMax" style="width:45%"></div><br>
|
||||
Last visit after (date in YYYY.MM.DD format works best):<br>
|
||||
<div id="inputf"><input type="text" name="VisitDate" id="VisitDate" style="width:100%"></div><br>
|
||||
Last visited by (single word or regular expression, search is not case sensitive):<br>
|
||||
(e.g. <span id="mono">/da.e/</span> matches both Dave and Dane, <span id="mono">/w..k|ol{2}y/</span> matches either Wook and Olly)<br>
|
||||
<div id="inputf"><input type="text" name="Visitor" id="Visitor" style="width:100%"></div><br>
|
||||
Hide incomplete entries:<br>
|
||||
<div id="inputf"><input type="checkbox" name="Incomplete" id="Incomplete" style="width:100%"></div><br><br>
|
||||
|
||||
<button onclick="filterTable('caves_table')">Filter</button><br>
|
||||
<button onclick="filterTableReset('caves_table')">Reset filters</button><br>
|
||||
|
||||
Click on column headers to sort/reverse sort<br><br><br>
|
||||
</span>
|
||||
|
||||
|
||||
|
||||
|
||||
<table id="caves_table">
|
||||
<tr>
|
||||
<th onclick="sortTable(0,'caves_table',0)">Cave survex id</th>
|
||||
<th onclick="sortTable(1,'caves_table',0)">Cave name</th>
|
||||
<th onclick="sortTable(2,'caves_table',1)">Cave depth</th>
|
||||
<th onclick="sortTable(3,'caves_table',1)">Cave length</th>
|
||||
<th onclick="sortTable(4,'caves_table',0)">Last leg date</th>
|
||||
<th onclick="sortTable(5,'caves_table',0)">Cave location (UTM)</th>
|
||||
<th onclick="sortTable(6,'caves_table',0)">Cave location (lat/lon)</th>
|
||||
<th onclick="sortTable(7,'caves_table',1)">Top camp distance [m]</th>
|
||||
</tr>
|
||||
|
||||
{% for cave in caves %}
|
||||
<tr>
|
||||
<td><a href={{cave.description}}>{{ cave.name }}</a></td>
|
||||
<td>{{ cave.title }}</td>
|
||||
<td>{{ cave.total_depth }}</td>
|
||||
<td>{{ cave.total_length }}</td>
|
||||
<td>{{ cave.date }}</td>
|
||||
<td>33U {{ cave.entrance }}</td>
|
||||
<td>{{ cave.lat_lon_entrance }}</td>
|
||||
<td>{{ cave.top_camp_distance}}</td>
|
||||
</tr>
|
||||
{% endfor %}
|
||||
|
||||
</table>
|
||||
|
||||
<script type="text/javascript" src="{{ settings.MEDIA_URL }}/scripts/TableSort.js"></script>
|
||||
|
||||
</body>
|
||||
|
||||
|
||||
</html>
|
||||
|
||||
6
urls.py
6
urls.py
@@ -22,11 +22,9 @@ admin.autodiscover()
|
||||
|
||||
actualurlpatterns = patterns('',
|
||||
|
||||
|
||||
url(r'^testingurl/?$' , views_caves.millenialcaves, name="testing"),
|
||||
|
||||
url(r'^millenialcaves/?$', views_caves.millenialcaves, name="millenialcaves"),
|
||||
url(r'^millenialpeople/?$', views_caves.millenialpeople, name="millenialpeople"),
|
||||
url(r'^cave/descriptionM/([^/]+)/?$', views_caves.millenialdescription),
|
||||
#url(r'^cave/description/([^/]+)/?$', views_caves.caveDescription),
|
||||
|
||||
url(r'^troggle$', views_other.frontpage, name="frontpage"),
|
||||
url(r'^todo/$', views_other.todo, name="todo"),
|
||||
|
||||
Reference in New Issue
Block a user