[svn r8345]
@@ -1,8 +0,0 @@
|
||||
# this is the snippet from http://www.djangosnippets.org/snippets/3/
|
||||
|
||||
from django.shortcuts import render_to_response
|
||||
from django.template import RequestContext
|
||||
|
||||
def render_response(req, *args, **kwargs):
|
||||
kwargs['context_instance'] = RequestContext(req)
|
||||
return render_to_response(*args, **kwargs)
|
||||
@@ -1,58 +0,0 @@
|
||||
import os
|
||||
import time
|
||||
import settings
|
||||
os.environ['PYTHONPATH'] = settings.PYTHON_PATH
|
||||
os.environ['DJANGO_SETTINGS_MODULE'] = 'settings'
|
||||
from django.core import management
|
||||
from django.db import connection
|
||||
from django.contrib.auth.models import User
|
||||
|
||||
def reload_db():
|
||||
cursor = connection.cursor()
|
||||
cursor.execute("drop database %s" % settings.DATABASE_NAME)
|
||||
cursor.execute("create database %s" % settings.DATABASE_NAME)
|
||||
cursor.execute("ALTER DATABASE %s CHARACTER SET=utf8" % settings.DATABASE_NAME)
|
||||
cursor.execute("USE %s" % settings.DATABASE_NAME)
|
||||
management.call_command('syncdb')
|
||||
user = User.objects.create_user('m', 'm@m.com', 'm')
|
||||
user.is_staff = True
|
||||
user.is_superuser = True
|
||||
user.save()
|
||||
|
||||
def make_dirs():
|
||||
"""Make directories that troggle requires"""
|
||||
if not os.path.isdir(settings.PHOTOS_ROOT):
|
||||
os.mkdir(settings.PHOTOS_ROOT)
|
||||
|
||||
def import_cavetab():
|
||||
import parsers.cavetab
|
||||
parsers.cavetab.LoadCaveTab(logfile=settings.LOGFILE)
|
||||
|
||||
def import_people():
|
||||
import parsers.people
|
||||
parsers.people.LoadPersonsExpos()
|
||||
|
||||
def import_logbooks():
|
||||
settings.LOGFILE.write('\nBegun importing logbooks at ' + time.asctime() +'\n'+'-'*60)
|
||||
import parsers.logbooks
|
||||
parsers.logbooks.LoadLogbooks()
|
||||
|
||||
def import_survex():
|
||||
import parsers.survex
|
||||
parsers.survex.LoadAllSurvexBlocks()
|
||||
|
||||
def import_QMs():
|
||||
import parsers.QMs
|
||||
|
||||
def import_surveys():
|
||||
import parsers.surveys
|
||||
|
||||
def reset():
|
||||
reload_db()
|
||||
make_dirs()
|
||||
import_cavetab()
|
||||
import_people()
|
||||
import_logbooks()
|
||||
import_survex()
|
||||
import_QMs()
|
||||
import_surveys()
|
||||
@@ -1,93 +0,0 @@
|
||||
from troggle.expo.models import *
|
||||
from django.contrib import admin
|
||||
from django.forms import ModelForm
|
||||
import django.forms as forms
|
||||
from expo.forms import LogbookEntryForm
|
||||
#from troggle.reversion.admin import VersionAdmin #django-reversion version control
|
||||
|
||||
#overriding admin save so we have the new since parsing field
|
||||
class TroggleModelAdmin(admin.ModelAdmin):
|
||||
def save_model(self, request, obj, form, change):
|
||||
obj.new_since_parsing=True
|
||||
obj.save()
|
||||
|
||||
class RoleInline(admin.TabularInline):
|
||||
model = PersonRole
|
||||
extra = 4
|
||||
|
||||
class SurvexBlockAdmin(TroggleModelAdmin):
|
||||
inlines = (RoleInline,)
|
||||
|
||||
class ScannedImageInline(admin.TabularInline):
|
||||
model = ScannedImage
|
||||
extra = 4
|
||||
|
||||
class SurveyAdmin(TroggleModelAdmin):
|
||||
inlines = (ScannedImageInline,)
|
||||
search_fields = ('expedition__year','wallet_number')
|
||||
|
||||
class QMInline(admin.TabularInline):
|
||||
model=QM
|
||||
extra = 4
|
||||
|
||||
class PhotoInline(admin.TabularInline):
|
||||
model = Photo
|
||||
exclude = ['is_mugshot', ]
|
||||
extra = 1
|
||||
|
||||
class PersonTripInline(admin.TabularInline):
|
||||
model = PersonTrip
|
||||
exclude = ['persontrip_next','Delete']
|
||||
extra = 1
|
||||
|
||||
#class LogbookEntryAdmin(VersionAdmin):
|
||||
class LogbookEntryAdmin(TroggleModelAdmin):
|
||||
prepopulated_fields = {'slug':("title",)}
|
||||
search_fields = ('title','expedition__year')
|
||||
inlines = (PersonTripInline, PhotoInline)
|
||||
form = LogbookEntryForm
|
||||
#inlines = (QMInline,) #doesn't work because QM has two foreignkeys to Logbookentry- need workaround
|
||||
|
||||
class PersonExpeditionInline(admin.TabularInline):
|
||||
model = PersonExpedition
|
||||
extra = 1
|
||||
|
||||
|
||||
|
||||
class PersonAdmin(TroggleModelAdmin):
|
||||
search_fields = ('first_name','last_name')
|
||||
inlines = (PersonExpeditionInline,)
|
||||
|
||||
class QMAdmin(TroggleModelAdmin):
|
||||
search_fields = ('found_by__cave__kataster_number','number')
|
||||
|
||||
class PersonExpeditionAdmin(TroggleModelAdmin):
|
||||
search_fields = ('person__first_name','expedition__year')
|
||||
|
||||
class CaveAdmin(TroggleModelAdmin):
|
||||
search_fields = ('official_name','kataster_number','unofficial_number')
|
||||
#inlines = (QMInline,)
|
||||
extra = 4
|
||||
|
||||
|
||||
|
||||
admin.site.register(Photo)
|
||||
admin.site.register(Subcave)
|
||||
admin.site.register(Cave, CaveAdmin)
|
||||
admin.site.register(Area)
|
||||
admin.site.register(OtherCaveName)
|
||||
admin.site.register(CaveAndEntrance)
|
||||
admin.site.register(SurveyStation)
|
||||
admin.site.register(Entrance)
|
||||
admin.site.register(SurvexBlock, SurvexBlockAdmin)
|
||||
admin.site.register(Expedition)
|
||||
admin.site.register(Person,PersonAdmin)
|
||||
admin.site.register(PersonRole)
|
||||
admin.site.register(PersonExpedition,PersonExpeditionAdmin)
|
||||
admin.site.register(Role)
|
||||
admin.site.register(LogbookEntry, LogbookEntryAdmin)
|
||||
admin.site.register(PersonTrip)
|
||||
admin.site.register(QM, QMAdmin)
|
||||
admin.site.register(Survey, SurveyAdmin)
|
||||
admin.site.register(ScannedImage)
|
||||
|
||||
@@ -1,4 +0,0 @@
|
||||
from django.conf import settings
|
||||
|
||||
def settings_context(request):
|
||||
return { 'settings':settings }
|
||||
@@ -1,43 +0,0 @@
|
||||
import troggle.settings as settings
|
||||
import os
|
||||
import urllib
|
||||
|
||||
def urljoin(x, y): return x + "/" + y
|
||||
|
||||
def listdir(*path):
|
||||
try:
|
||||
strippedpath = [p for p in path if p]
|
||||
root = os.path.join(settings.FILES, *strippedpath )
|
||||
l = ""
|
||||
#l = root + "\n"
|
||||
isdir = os.path.isdir(root) #This seems to be required for os.path.isdir to work...
|
||||
#l += str(isdir) + "\n"
|
||||
for p in os.listdir(root):
|
||||
if os.path.isdir(os.path.join(root, p)):
|
||||
l += p + "/\n"
|
||||
|
||||
elif os.path.isfile(os.path.join(root, p)):
|
||||
l += p + "\n"
|
||||
#Ignore non-files and non-directories
|
||||
return l
|
||||
except:
|
||||
if strippedpath:
|
||||
c = reduce(urljoin, strippedpath)
|
||||
else:
|
||||
c = ""
|
||||
c = c.replace("#", "%23")
|
||||
print "FILE: ", settings.FILES + "listdir/" + c
|
||||
return urllib.urlopen(settings.FILES + "listdir/" + c).read()
|
||||
|
||||
def dirsAsList(*path):
|
||||
return [d for d in listdir(*path).split("\n") if len(d) > 0 and d[-1] == "/"]
|
||||
|
||||
def filesAsList(*path):
|
||||
return [d for d in listdir(*path).split("\n") if len(d) > 0 and d[-1] != "/"]
|
||||
|
||||
def readFile(*path):
|
||||
try:
|
||||
f = open(os.path.join(settings.FILES, *path))
|
||||
except:
|
||||
f = urllib.urlopen(settings.FILES + "download/" + reduce(urljoin, path))
|
||||
return f.read()
|
||||
@@ -1,40 +0,0 @@
|
||||
from django.forms import ModelForm
|
||||
from models import Cave, Person, LogbookEntry
|
||||
import django.forms as forms
|
||||
from django.forms.formsets import formset_factory
|
||||
from django.contrib.admin.widgets import AdminDateWidget
|
||||
import string
|
||||
|
||||
class CaveForm(ModelForm):
|
||||
class Meta:
|
||||
model = Cave
|
||||
|
||||
class PersonForm(ModelForm):
|
||||
class Meta:
|
||||
model = Person
|
||||
|
||||
class LogbookEntryForm(ModelForm):
|
||||
class Meta:
|
||||
model = LogbookEntry
|
||||
|
||||
def wikiLinkHints(LogbookEntry=None):
|
||||
res = ["Please use the following wikilinks, which are related to this logbook entry:"]
|
||||
|
||||
res.append(r'</p><p style="float: left;"><b>QMs found:</b>')
|
||||
for QM in LogbookEntry.instance.QMs_found.all():
|
||||
res.append(QM.wiki_link())
|
||||
|
||||
res.append(r'</p><p style="float: left;"><b>QMs ticked off:</b>')
|
||||
for QM in LogbookEntry.instance.QMs_ticked_off.all():
|
||||
res.append(QM.wiki_link())
|
||||
|
||||
# res.append(r'</p><p style="float: left; "><b>People</b>')
|
||||
# for persontrip in LogbookEntry.instance.persontrip_set.all():
|
||||
# res.append(persontrip.wiki_link())
|
||||
# res.append(r'</p>')
|
||||
|
||||
return string.join(res, r'<br />')
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
super(LogbookEntryForm, self).__init__(*args, **kwargs)
|
||||
self.fields['text'].help_text=self.wikiLinkHints()
|
||||
@@ -1,23 +0,0 @@
|
||||
from imagekit.specs import ImageSpec
|
||||
from imagekit import processors
|
||||
|
||||
class ResizeThumb(processors.Resize):
|
||||
width = 100
|
||||
height = 75
|
||||
crop = True
|
||||
|
||||
class ResizeDisplay(processors.Resize):
|
||||
width = 600
|
||||
|
||||
class EnhanceThumb(processors.Adjustment):
|
||||
contrast = 1.2
|
||||
sharpness = 1.1
|
||||
|
||||
class Thumbnail(ImageSpec):
|
||||
access_as = 'thumbnail_image'
|
||||
pre_cache = True
|
||||
processors = [ResizeThumb, EnhanceThumb]
|
||||
|
||||
class Display(ImageSpec):
|
||||
increment_count = True
|
||||
processors = [ResizeDisplay]
|
||||
@@ -1,615 +0,0 @@
|
||||
import urllib, urlparse, string, os, datetime
|
||||
from django.forms import ModelForm
|
||||
from django.db import models
|
||||
from django.contrib import admin
|
||||
from django.core.files.storage import FileSystemStorage
|
||||
from django.contrib.auth.models import User
|
||||
from django.contrib.contenttypes.models import ContentType
|
||||
from django.conf import settings
|
||||
from decimal import Decimal, getcontext
|
||||
from django.core.urlresolvers import reverse
|
||||
from imagekit.models import ImageModel
|
||||
getcontext().prec=2 #use 2 significant figures for decimal calculations
|
||||
|
||||
from models_survex import *
|
||||
|
||||
#This class is for adding fields and methods which all of our models will have.
|
||||
class TroggleModel(models.Model):
|
||||
new_since_parsing = models.BooleanField(default=False, editable=False)
|
||||
|
||||
def get_admin_url(self):
|
||||
return settings.URL_ROOT + "/admin/expo/" + self._meta.object_name.lower() + "/" + str(self.pk)
|
||||
|
||||
class Meta:
|
||||
abstract = True
|
||||
|
||||
class TroggleImageModel(ImageModel):
|
||||
new_since_parsing = models.BooleanField(default=False, editable=False)
|
||||
|
||||
def get_admin_url(self):
|
||||
return settings.URL_ROOT + "/admin/expo/" + self._meta.object_name.lower() + "/" + str(self.pk)
|
||||
|
||||
class Meta:
|
||||
abstract = True
|
||||
|
||||
class Expedition(TroggleModel):
|
||||
year = models.CharField(max_length=20, unique=True)
|
||||
name = models.CharField(max_length=100)
|
||||
date_from = models.DateField(blank=True,null=True)
|
||||
date_to = models.DateField(blank=True,null=True)
|
||||
|
||||
def __unicode__(self):
|
||||
return self.year
|
||||
|
||||
class Meta:
|
||||
ordering = ('year',)
|
||||
get_latest_by = 'date_from'
|
||||
|
||||
def get_absolute_url(self):
|
||||
#return settings.URL_ROOT + "/expedition/%s" % self.year
|
||||
return settings.URL_ROOT + reverse('expedition',args=[self.year])
|
||||
|
||||
|
||||
# lose these two functions (inelegant, and we may create a file with the dates that we can load from)
|
||||
def GuessDateFrom(self):
|
||||
try:
|
||||
return self.logbookentry_set.order_by('date')[0].date
|
||||
except IndexError:
|
||||
pass
|
||||
|
||||
def GuessDateTo(self): # returns the date of the last logbook entry in the expedition
|
||||
try:
|
||||
return self.logbookentry_set.order_by('date')[-1].date
|
||||
except IndexError:
|
||||
pass
|
||||
|
||||
def ListDays(self):
|
||||
if self.date_from and self.date_to:
|
||||
res=[]
|
||||
date=self.date_from
|
||||
while date <= self.date_to:
|
||||
res.append(date)
|
||||
date+=datetime.timedelta(days=1)
|
||||
return res
|
||||
elif self.GuessDateFrom() and self.GuessDateTo(): # if we don't have the real dates, try it with the dates taken from the earliest and latest logbook entries
|
||||
date=self.GuessDateFrom()
|
||||
while date <= self.GuessDateTo():
|
||||
res.append(date)
|
||||
date+=datetime.timedelta(days=1)
|
||||
return res
|
||||
|
||||
|
||||
|
||||
|
||||
class Person(TroggleModel):
|
||||
first_name = models.CharField(max_length=100)
|
||||
last_name = models.CharField(max_length=100)
|
||||
is_vfho = models.BooleanField(help_text="VFHO is the Vereines für Höhlenkunde in Obersteier, a nearby Austrian caving club.")
|
||||
mug_shot = models.CharField(max_length=100, blank=True,null=True)
|
||||
blurb = models.TextField(blank=True,null=True)
|
||||
|
||||
#href = models.CharField(max_length=200)
|
||||
orderref = models.CharField(max_length=200) # for alphabetic
|
||||
|
||||
#the below have been removed and made methods. I'm not sure what the b in bisnotable stands for. - AC 16 Feb
|
||||
#notability = models.FloatField() # for listing the top 20 people
|
||||
#bisnotable = models.BooleanField()
|
||||
user = models.OneToOneField(User, null=True, blank=True)
|
||||
def get_absolute_url(self):
|
||||
return settings.URL_ROOT + reverse('person',kwargs={'first_name':self.first_name,'last_name':self.last_name})
|
||||
|
||||
class Meta:
|
||||
verbose_name_plural = "People"
|
||||
class Meta:
|
||||
ordering = ('orderref',) # "Wookey" makes too complex for: ('last_name', 'first_name')
|
||||
|
||||
def __unicode__(self):
|
||||
if self.last_name:
|
||||
return "%s %s" % (self.first_name, self.last_name)
|
||||
return self.first_name
|
||||
|
||||
# Below are no longer needed. Use {{ person.personexpedition_set.all.0.expedition }} for Firstexpedition, and {{ person.personexpedition_set.latest.expedition }} for Lastexpedition
|
||||
# these ought to be possible by piping through |min in the template, or getting the first of an ordered list
|
||||
# def Firstexpedition(self):
|
||||
# return self.personexpedition_set.order_by('expedition')[0]
|
||||
# def Lastexpedition(self):
|
||||
# return self.personexpedition_set.order_by('-expedition')[0]
|
||||
|
||||
def notability(self):
|
||||
notability = Decimal(0)
|
||||
for personexpedition in self.personexpedition_set.all():
|
||||
if not personexpedition.is_guest:
|
||||
notability += Decimal(1) / (2012 - int(personexpedition.expedition.year))
|
||||
return notability
|
||||
|
||||
def bisnotable(self):
|
||||
return self.notability() > Decimal(1)/Decimal(3)
|
||||
|
||||
#def Sethref(self):
|
||||
#if self.last_name:
|
||||
#self.href = self.first_name.lower() + "_" + self.last_name.lower()
|
||||
#self.orderref = self.last_name + " " + self.first_name
|
||||
#else:
|
||||
# self.href = self.first_name.lower()
|
||||
#self.orderref = self.first_name
|
||||
#self.notability = 0.0 # set temporarily
|
||||
|
||||
|
||||
class PersonExpedition(TroggleModel):
|
||||
expedition = models.ForeignKey(Expedition)
|
||||
person = models.ForeignKey(Person)
|
||||
date_from = models.DateField(blank=True,null=True)
|
||||
date_to = models.DateField(blank=True,null=True)
|
||||
is_guest = models.BooleanField(default=False)
|
||||
COMMITTEE_CHOICES = (
|
||||
('leader','Expo leader'),
|
||||
('medical','Expo medical officer'),
|
||||
('treasurer','Expo treasurer'),
|
||||
('sponsorship','Expo sponsorship coordinator'),
|
||||
('research','Expo research coordinator'),
|
||||
)
|
||||
expo_committee_position = models.CharField(blank=True,null=True,choices=COMMITTEE_CHOICES,max_length=200)
|
||||
nickname = models.CharField(max_length=100,blank=True,null=True)
|
||||
|
||||
def GetPersonroles(self):
|
||||
res = [ ]
|
||||
for personrole in self.personrole_set.order_by('survex_block'):
|
||||
if res and res[-1]['survexpath'] == personrole.survex_block.survexpath:
|
||||
res[-1]['roles'] += ", " + str(personrole.role)
|
||||
else:
|
||||
res.append({'date':personrole.survex_block.date, 'survexpath':personrole.survex_block.survexpath, 'roles':str(personrole.role)})
|
||||
return res
|
||||
|
||||
class Meta:
|
||||
ordering = ('expedition',)
|
||||
get_latest_by = 'date_from'
|
||||
|
||||
def GetPersonChronology(self):
|
||||
res = { }
|
||||
for persontrip in self.persontrip_set.all():
|
||||
a = res.setdefault(persontrip.date, { })
|
||||
a.setdefault("persontrips", [ ]).append(persontrip)
|
||||
for personrole in self.personrole_set.all():
|
||||
a = res.setdefault(personrole.survex_block.date, { })
|
||||
b = a.setdefault("personroles", { })
|
||||
survexpath = personrole.survex_block.survexpath
|
||||
|
||||
if b.get(survexpath):
|
||||
b[survexpath] += ", " + str(personrole.role)
|
||||
else:
|
||||
b[survexpath] = str(personrole.role)
|
||||
# needs converting dict into list
|
||||
return sorted(res.items())
|
||||
|
||||
# possibly not useful functions anyway -JT
|
||||
# if you can find a better way to make the expo calendar table, be my guest. It isn't possible to do this logic in a django template without writing custom tags.
|
||||
def ListDays(self):
|
||||
if self.date_from and self.date_to:
|
||||
res=[]
|
||||
date=self.date_from
|
||||
while date <= self.date_to:
|
||||
res.append(date)
|
||||
date+=datetime.timedelta(days=1)
|
||||
return res
|
||||
|
||||
def ListDaysTF(self):
|
||||
if self.date_from and self.date_to:
|
||||
res=[]
|
||||
for date in self.expedition.ListDays():
|
||||
res.append(date in self.ListDays())
|
||||
return res
|
||||
|
||||
def __unicode__(self):
|
||||
return "%s: (%s)" % (self.person, self.expedition)
|
||||
|
||||
#why is the below a function in personexpedition, rather than in person? - AC 14 Feb 09
|
||||
def name(self):
|
||||
if self.nickname:
|
||||
return "%s (%s) %s" % (self.person.first_name, self.nickname, self.person.last_name)
|
||||
if self.person.last_name:
|
||||
return "%s %s" % (self.person.first_name, self.person.last_name)
|
||||
return self.person.first_name
|
||||
|
||||
def get_absolute_url(self):
|
||||
#return settings.URL_ROOT + '/personexpedition/' + str(self.person.first_name) + '_' + str(self.person.last_name) + '/' +self.expedition.year
|
||||
return settings.URL_ROOT + reverse('personexpedition',kwargs={'first_name':self.person.first_name,'last_name':self.person.last_name,'year':self.expedition.year})
|
||||
|
||||
class LogbookEntry(TroggleModel):
|
||||
date = models.DateField()
|
||||
expedition = models.ForeignKey(Expedition,blank=True,null=True) # yes this is double-
|
||||
author = models.ForeignKey(PersonExpedition,blank=True,null=True) # the person who writes it up doesn't have to have been on the trip
|
||||
title = models.CharField(max_length=200)
|
||||
cave = models.ForeignKey('Cave',blank=True,null=True)
|
||||
place = models.CharField(max_length=100,blank=True,null=True)
|
||||
text = models.TextField()
|
||||
slug = models.SlugField(max_length=50)
|
||||
#href = models.CharField(max_length=100)
|
||||
|
||||
|
||||
#logbookentry_next = models.ForeignKey('LogbookEntry', related_name='pnext', blank=True,null=True)
|
||||
#logbookentry_prev = models.ForeignKey('LogbookEntry', related_name='pprev', blank=True,null=True)
|
||||
|
||||
class Meta:
|
||||
verbose_name_plural = "Logbook Entries"
|
||||
# several PersonTrips point in to this object
|
||||
class Meta:
|
||||
ordering = ('-date',)
|
||||
|
||||
def get_absolute_url(self):
|
||||
return settings.URL_ROOT + reverse('logbookentry',kwargs={'date':self.date,'slug':self.slug})
|
||||
|
||||
def __unicode__(self):
|
||||
return "%s: (%s)" % (self.date, self.title)
|
||||
|
||||
def get_next_by_id(self):
|
||||
LogbookEntry.objects.get(id=self.id+1)
|
||||
|
||||
def get_previous_by_id(self):
|
||||
LogbookEntry.objects.get(id=self.id-1)
|
||||
|
||||
class PersonTrip(TroggleModel):
|
||||
person_expedition = models.ForeignKey(PersonExpedition,null=True)
|
||||
|
||||
# this will be a foreign key of the place(s) the trip went through
|
||||
# possibly a trip has a plurality of triplets pointing into it
|
||||
place = models.CharField(max_length=100)
|
||||
# should add cave thing here (copied from logbook maybe)
|
||||
date = models.DateField()
|
||||
time_underground = models.FloatField()
|
||||
logbook_entry = models.ForeignKey(LogbookEntry)
|
||||
is_logbook_entry_author = models.BooleanField()
|
||||
|
||||
#persontrip_next = models.ForeignKey('PersonTrip', related_name='pnext', blank=True,null=True)
|
||||
#persontrip_prev = models.ForeignKey('PersonTrip', related_name='pprev', blank=True,null=True)
|
||||
|
||||
def __unicode__(self):
|
||||
return "%s %s (%s)" % (self.person_expedition, self.place, self.date)
|
||||
|
||||
def get_persons_next_trip(self):
|
||||
try:
|
||||
return PersonTrip.objects.filter(person_expedition__person=self.person_expedition.person, date__gt=self.date)[0]
|
||||
except:
|
||||
return
|
||||
|
||||
def get_persons_previous_trip(self):
|
||||
try:
|
||||
return PersonTrip.objects.filter(person_expedition__person=self.person_expedition.person, date__lt=self.date)[0]
|
||||
except:
|
||||
return
|
||||
|
||||
# def get_persons_previous_trip(self):
|
||||
#
|
||||
# move following classes into models_cave
|
||||
#
|
||||
|
||||
class Area(TroggleModel):
|
||||
short_name = models.CharField(max_length=100)
|
||||
name = models.CharField(max_length=200, blank=True, null=True)
|
||||
description = models.TextField(blank=True,null=True)
|
||||
parent = models.ForeignKey('Area', blank=True, null=True)
|
||||
def __unicode__(self):
|
||||
if self.parent:
|
||||
return unicode(self.parent) + u" - " + unicode(self.short_name)
|
||||
else:
|
||||
return unicode(self.short_name)
|
||||
def kat_area(self):
|
||||
if self.short_name in ["1623", "1626"]:
|
||||
return self.short_name
|
||||
elif self.parent:
|
||||
return self.parent.kat_area()
|
||||
|
||||
class CaveAndEntrance(TroggleModel):
|
||||
cave = models.ForeignKey('Cave')
|
||||
entrance = models.ForeignKey('Entrance')
|
||||
entrance_letter = models.CharField(max_length=20,blank=True,null=True)
|
||||
def __unicode__(self):
|
||||
return unicode(self.cave) + unicode(self.entrance_letter)
|
||||
|
||||
class Cave(TroggleModel):
|
||||
# too much here perhaps
|
||||
official_name = models.CharField(max_length=160)
|
||||
area = models.ManyToManyField(Area, blank=True, null=True)
|
||||
kataster_code = models.CharField(max_length=20,blank=True,null=True)
|
||||
kataster_number = models.CharField(max_length=10,blank=True, null=True)
|
||||
unofficial_number = models.CharField(max_length=60,blank=True, null=True)
|
||||
entrances = models.ManyToManyField('Entrance', through='CaveAndEntrance')
|
||||
explorers = models.TextField(blank=True,null=True)
|
||||
underground_description = models.TextField(blank=True,null=True)
|
||||
equipment = models.TextField(blank=True,null=True)
|
||||
references = models.TextField(blank=True,null=True)
|
||||
survey = models.TextField(blank=True,null=True)
|
||||
kataster_status = models.TextField(blank=True,null=True)
|
||||
underground_centre_line = models.TextField(blank=True,null=True)
|
||||
notes = models.TextField(blank=True,null=True)
|
||||
length = models.CharField(max_length=100,blank=True,null=True)
|
||||
depth = models.CharField(max_length=100,blank=True,null=True)
|
||||
extent = models.CharField(max_length=100,blank=True,null=True)
|
||||
survex_file = models.CharField(max_length=100,blank=True,null=True) #should be filefield, need to fix parser first
|
||||
|
||||
#href = models.CharField(max_length=100)
|
||||
|
||||
def get_absolute_url(self):
|
||||
if self.kataster_number:
|
||||
href = self.kataster_number
|
||||
elif self.unofficial_number:
|
||||
href = self.unofficial_number
|
||||
else:
|
||||
href = official_name.lower()
|
||||
#return settings.URL_ROOT + '/cave/' + href + '/'
|
||||
return settings.URL_ROOT + reverse('cave',kwargs={'cave_id':href,})
|
||||
|
||||
|
||||
|
||||
def __unicode__(self):
|
||||
if self.kataster_number:
|
||||
if self.kat_area():
|
||||
return self.kat_area() + u": " + self.kataster_number
|
||||
else:
|
||||
return unicode("l") + u": " + self.kataster_number
|
||||
else:
|
||||
if self.kat_area():
|
||||
return self.kat_area() + u": " + self.unofficial_number
|
||||
else:
|
||||
return self.unofficial_number
|
||||
|
||||
def get_QMs(self):
|
||||
return QM.objects.filter(found_by__cave=self)
|
||||
|
||||
def kat_area(self):
|
||||
for a in self.area.all():
|
||||
if a.kat_area():
|
||||
return a.kat_area()
|
||||
def entrances(self):
|
||||
return CaveAndEntrance.objects.filter(cave=self)
|
||||
def entrancelist(self):
|
||||
rs = []
|
||||
res = ""
|
||||
for e in CaveAndEntrance.objects.filter(cave=self):
|
||||
rs.append(e.entrance_letter)
|
||||
rs.sort()
|
||||
prevR = None
|
||||
n = 0
|
||||
for r in rs:
|
||||
if prevR:
|
||||
if chr(ord(prevR) + 1 ) == r:
|
||||
prevR = r
|
||||
n += 1
|
||||
else:
|
||||
if n == 0:
|
||||
res += ", " + prevR
|
||||
else:
|
||||
res += "–" + prevR
|
||||
else:
|
||||
prevR = r
|
||||
n = 0
|
||||
res += r
|
||||
if n == 0:
|
||||
res += ", " + prevR
|
||||
else:
|
||||
res += "–" + prevR
|
||||
return res
|
||||
|
||||
|
||||
|
||||
class OtherCaveName(TroggleModel):
|
||||
name = models.CharField(max_length=160)
|
||||
cave = models.ForeignKey(Cave)
|
||||
def __unicode__(self):
|
||||
return unicode(self.name)
|
||||
|
||||
class SurveyStation(TroggleModel):
|
||||
name = models.CharField(max_length=200)
|
||||
def __unicode__(self):
|
||||
return unicode(self.name)
|
||||
|
||||
class Entrance(TroggleModel):
|
||||
name = models.CharField(max_length=100, blank=True,null=True)
|
||||
entrance_description = models.TextField(blank=True,null=True)
|
||||
explorers = models.TextField(blank=True,null=True)
|
||||
map_description = models.TextField(blank=True,null=True)
|
||||
location_description = models.TextField(blank=True,null=True)
|
||||
approach = models.TextField(blank=True,null=True)
|
||||
underground_description = models.TextField(blank=True,null=True)
|
||||
photo = models.TextField(blank=True,null=True)
|
||||
MARKING_CHOICES = (
|
||||
('P', 'Paint'),
|
||||
('P?', 'Paint (?)'),
|
||||
('T', 'Tag'),
|
||||
('T?', 'Tag (?)'),
|
||||
('R', 'Retagged'),
|
||||
('S', 'Spit'),
|
||||
('S?', 'Spit (?)'),
|
||||
('U', 'Unmarked'),
|
||||
('?', 'Unknown'))
|
||||
marking = models.CharField(max_length=2, choices=MARKING_CHOICES)
|
||||
marking_comment = models.TextField(blank=True,null=True)
|
||||
FINDABLE_CHOICES = (
|
||||
('?', 'To be confirmed ...'),
|
||||
('S', 'Surveyed'),
|
||||
('L', 'Lost'),
|
||||
('R', 'Refindable'))
|
||||
findability = models.CharField(max_length=1, choices=FINDABLE_CHOICES, blank=True, null=True)
|
||||
findability_description = models.TextField(blank=True,null=True)
|
||||
alt = models.TextField(blank=True, null=True)
|
||||
northing = models.TextField(blank=True, null=True)
|
||||
easting = models.TextField(blank=True, null=True)
|
||||
tag_station = models.ForeignKey(SurveyStation, blank=True,null=True, related_name="tag_station")
|
||||
exact_station = models.ForeignKey(SurveyStation, blank=True,null=True, related_name="exact_station")
|
||||
other_station = models.ForeignKey(SurveyStation, blank=True,null=True, related_name="other_station")
|
||||
other_description = models.TextField(blank=True,null=True)
|
||||
bearings = models.TextField(blank=True,null=True)
|
||||
def __unicode__(self):
|
||||
a = CaveAndEntrance.objects.filter(entrance = self)
|
||||
name = u''
|
||||
if self.name:
|
||||
name = unicode(self.name) + u' '
|
||||
if len(a) == 1:
|
||||
return name + unicode(a[0])
|
||||
return name + unicode(a)
|
||||
def marking_val(self):
|
||||
for m in self.MARKING_CHOICES:
|
||||
if m[0] == self.marking:
|
||||
return m[1]
|
||||
def findability_val(self):
|
||||
for f in self.FINDABLE_CHOICES:
|
||||
if f[0] == self.findability:
|
||||
return f[1]
|
||||
|
||||
class Subcave(TroggleModel):
|
||||
description = models.TextField()
|
||||
name = models.CharField(max_length=200, )
|
||||
cave = models.ForeignKey('Cave', blank=True, null=True, help_text="Only the top-level subcave should be linked to a cave")
|
||||
parent= models.ForeignKey('Subcave', blank=True, null=True, related_name='children')
|
||||
adjoining = models.ManyToManyField('Subcave',blank=True, null=True,)
|
||||
survex_file = models.CharField(max_length=200, blank=True, null=True,)
|
||||
|
||||
def __unicode__(self):
|
||||
return self.name
|
||||
|
||||
def get_absolute_url(self):
|
||||
urlString=self.name
|
||||
if self.parent:
|
||||
parent=self.parent
|
||||
while parent: #recursively walk up the tree, adding parents to the left of the URL
|
||||
urlString=parent.name+'/'+urlString
|
||||
if parent.cave:
|
||||
cave=parent.cave
|
||||
parent=parent.parent
|
||||
urlString='cave/'+unicode(cave.kataster_number)+'/'+urlString
|
||||
else:
|
||||
urlString='cave/'+unicode(self.cave.kataster_number)+'/'+urlString
|
||||
|
||||
|
||||
return urlparse.urljoin(settings.URL_ROOT, urlString)
|
||||
|
||||
class QM(TroggleModel):
|
||||
#based on qm.csv in trunk/expoweb/smkridge/204 which has the fields:
|
||||
#"Number","Grade","Area","Description","Page reference","Nearest station","Completion description","Comment"
|
||||
found_by = models.ForeignKey(LogbookEntry, related_name='QMs_found',blank=True, null=True )
|
||||
ticked_off_by = models.ForeignKey(LogbookEntry, related_name='QMs_ticked_off',null=True,blank=True)
|
||||
number = models.IntegerField()
|
||||
GRADE_CHOICES=(
|
||||
('A', 'A: Large obvious lead'),
|
||||
('B', 'B: Average lead'),
|
||||
('C', 'C: Tight unpromising lead'),
|
||||
('D', 'D: Dig'),
|
||||
('X', 'X: Unclimbable aven')
|
||||
)
|
||||
grade = models.CharField(max_length=1, choices=GRADE_CHOICES)
|
||||
location_description = models.TextField(blank=True)
|
||||
#should be a foreignkey to surveystation
|
||||
nearest_station_description = models.CharField(max_length=400,null=True,blank=True)
|
||||
nearest_station = models.CharField(max_length=200,blank=True,null=True)
|
||||
area = models.CharField(max_length=100,blank=True,null=True)
|
||||
completion_description = models.TextField(blank=True,null=True)
|
||||
comment=models.TextField(blank=True,null=True)
|
||||
#the below are unneeded- instead use the date fields of the QM's trips
|
||||
#dateFound = models.DateField(blank=True)
|
||||
#dateKilled = models.DateField(blank=True)
|
||||
def __str__(self):
|
||||
QMnumber=str(self.found_by.cave)+'-'+str(self.found_by.date.year)+"-"+str(self.number)+self.grade
|
||||
return str(QMnumber)
|
||||
|
||||
def get_absolute_url(self):
|
||||
#return settings.URL_ROOT + '/cave/' + self.found_by.cave.kataster_number + '/' + str(self.found_by.date.year) + '-' + '%02d' %self.number
|
||||
return settings.URL_ROOT + reverse('qm',kwargs={'cave_id':self.cave.kataster_number,'year':self.found_by.date.year,'qm_id':self.number,'grade':self.grade})
|
||||
|
||||
def get_next_by_id(self):
|
||||
return QM.objects.get(id=self.id+1)
|
||||
|
||||
def get_previous_by_id(self):
|
||||
return QM.objects.get(id=self.id-1)
|
||||
|
||||
def wiki_link(self):
|
||||
res = '[[cave:' + str(self.found_by.cave.kataster_number) + ' '
|
||||
res += 'QM:' + str(self.found_by.date.year) + '-'
|
||||
res += str(self.number) + self.grade + ']]'
|
||||
return res
|
||||
|
||||
photoFileStorage = FileSystemStorage(location=settings.PHOTOS_ROOT, base_url=settings.PHOTOS_URL)
|
||||
class Photo(TroggleImageModel):
|
||||
caption = models.CharField(max_length=1000,blank=True,null=True)
|
||||
contains_logbookentry = models.ForeignKey(LogbookEntry,blank=True,null=True)
|
||||
contains_person = models.ManyToManyField(Person,blank=True,null=True)
|
||||
file = models.ImageField(storage=photoFileStorage, upload_to='.',)
|
||||
is_mugshot = models.BooleanField(default=False)
|
||||
contains_cave = models.ForeignKey(Cave,blank=True,null=True)
|
||||
contains_entrance = models.ForeignKey(Entrance, related_name="photo_file",blank=True,null=True)
|
||||
nearest_survey_point = models.ForeignKey(SurveyStation,blank=True,null=True)
|
||||
nearest_QM = models.ForeignKey(QM,blank=True,null=True)
|
||||
lon_utm = models.FloatField(blank=True,null=True)
|
||||
lat_utm = models.FloatField(blank=True,null=True)
|
||||
|
||||
class IKOptions:
|
||||
spec_module = 'expo.imagekit_specs'
|
||||
cache_dir = 'thumbs'
|
||||
image_field = 'file'
|
||||
|
||||
#content_type = models.ForeignKey(ContentType)
|
||||
#object_id = models.PositiveIntegerField()
|
||||
#location = generic.GenericForeignKey('content_type', 'object_id')
|
||||
|
||||
def __str__(self):
|
||||
return self.caption
|
||||
|
||||
scansFileStorage = FileSystemStorage(location=settings.SURVEY_SCANS, base_url=settings.SURVEYS_URL)
|
||||
def get_scan_path(instance, filename):
|
||||
year=instance.survey.expedition.year
|
||||
print "WN: ", type(instance.survey.wallet_number), instance.survey.wallet_number
|
||||
number="%02d" % instance.survey.wallet_number + str(instance.survey.wallet_letter) #using %02d string formatting because convention was 2009#01
|
||||
return os.path.join('./',year,year+r'#'+number,instance.contents+str(instance.number_in_wallet)+r'.jpg')
|
||||
|
||||
class ScannedImage(TroggleImageModel):
|
||||
file = models.ImageField(storage=scansFileStorage, upload_to=get_scan_path)
|
||||
scanned_by = models.ForeignKey(Person,blank=True, null=True)
|
||||
scanned_on = models.DateField(null=True)
|
||||
survey = models.ForeignKey('Survey')
|
||||
contents = models.CharField(max_length=20,choices=(('notes','notes'),('plan','plan_sketch'),('elevation','elevation_sketch')))
|
||||
number_in_wallet = models.IntegerField(null=True)
|
||||
lon_utm = models.FloatField(blank=True,null=True)
|
||||
lat_utm = models.FloatField(blank=True,null=True)
|
||||
|
||||
class IKOptions:
|
||||
spec_module = 'expo.imagekit_specs'
|
||||
cache_dir = 'thumbs'
|
||||
image_field = 'file'
|
||||
#content_type = models.ForeignKey(ContentType)
|
||||
#object_id = models.PositiveIntegerField()
|
||||
#location = generic.GenericForeignKey('content_type', 'object_id')
|
||||
|
||||
#This is an ugly hack to deal with the #s in our survey scan paths. The correct thing is to write a custom file storage backend which calls urlencode on the name for making file.url but not file.path.
|
||||
def correctURL(self):
|
||||
return string.replace(self.file.url,r'#',r'%23')
|
||||
|
||||
def __str__(self):
|
||||
return get_scan_path(self,'')
|
||||
|
||||
class Survey(TroggleModel):
|
||||
expedition = models.ForeignKey('Expedition')
|
||||
wallet_number = models.IntegerField(blank=True,null=True)
|
||||
wallet_letter = models.CharField(max_length=1,blank=True,null=True)
|
||||
comments = models.TextField(blank=True,null=True)
|
||||
location = models.CharField(max_length=400,blank=True,null=True)
|
||||
#notes_scan = models.ForeignKey('ScannedImage',related_name='notes_scan',blank=True, null=True) #Replaced by contents field of ScannedImage model
|
||||
survex_block = models.ForeignKey('SurvexBlock',blank=True, null=True)
|
||||
centreline_printed_on = models.DateField(blank=True, null=True)
|
||||
centreline_printed_by = models.ForeignKey('Person',related_name='centreline_printed_by',blank=True,null=True)
|
||||
#sketch_scan = models.ForeignKey(ScannedImage,blank=True, null=True) #Replaced by contents field of ScannedImage model
|
||||
tunnel_file = models.FileField(upload_to='surveyXMLfiles',blank=True, null=True)
|
||||
tunnel_main_sketch = models.ForeignKey('Survey',blank=True,null=True)
|
||||
integrated_into_main_sketch_on = models.DateField(blank=True,null=True)
|
||||
integrated_into_main_sketch_by = models.ForeignKey('Person' ,related_name='integrated_into_main_sketch_by', blank=True,null=True)
|
||||
rendered_image = models.ImageField(upload_to='renderedSurveys',blank=True,null=True)
|
||||
def __str__(self):
|
||||
return self.expedition.year+"#"+"%02d" % self.wallet_number
|
||||
|
||||
def notes(self):
|
||||
return self.scannedimage_set.filter(contents='notes')
|
||||
|
||||
def plans(self):
|
||||
return self.scannedimage_set.filter(contents='plan')
|
||||
|
||||
def elevations(self):
|
||||
return self.scannedimage_set.filter(contents='elevation')
|
||||
|
||||
|
||||
@@ -1,66 +0,0 @@
|
||||
from django.db import models
|
||||
from django.conf import settings
|
||||
import os
|
||||
|
||||
class SurvexBlock(models.Model):
|
||||
name = models.CharField(max_length=100, blank=True, null=True)
|
||||
parent = models.ForeignKey('SurvexBlock', blank=True, null=True)
|
||||
text = models.TextField()
|
||||
|
||||
# non-useful representation of incomplete data
|
||||
start_year = models.IntegerField(blank=True, null=True)
|
||||
start_month = models.IntegerField(blank=True, null=True)
|
||||
start_day = models.IntegerField(blank=True, null=True)
|
||||
end_year = models.IntegerField(blank=True, null=True)
|
||||
end_month = models.IntegerField(blank=True, null=True)
|
||||
end_day = models.IntegerField(blank=True, null=True)
|
||||
|
||||
date = models.DateField(blank=True, null=True)
|
||||
survexpath = models.CharField(max_length=100)
|
||||
|
||||
# superfluous
|
||||
person = models.ManyToManyField('Person', through='PersonRole', blank=True, null=True)
|
||||
|
||||
# code for where in the survex data files this block sits
|
||||
begin_file = models.CharField(max_length=200)
|
||||
begin_char = models.IntegerField()
|
||||
end_file = models.CharField(max_length=200, blank=True, null=True)
|
||||
end_char = models.IntegerField(blank=True, null=True)
|
||||
|
||||
class Meta:
|
||||
ordering = ('date', 'survexpath')
|
||||
|
||||
def __unicode__(self):
|
||||
return unicode(self.name)
|
||||
|
||||
def filecontents(self):
|
||||
f = os.path.join(settings.SURVEX_DATA, self.begin_file)
|
||||
fin = open(f, "rb")
|
||||
res = fin.read().decode("latin1")
|
||||
fin.close()
|
||||
return res
|
||||
|
||||
def GetPersonroles(self):
|
||||
res = [ ]
|
||||
for personrole in self.personrole_set.order_by('personexpedition'):
|
||||
if res and res[-1]['person'] == personrole.personexpedition.person:
|
||||
res[-1]['roles'] += ", " + str(personrole.role)
|
||||
else:
|
||||
res.append({'person':personrole.personexpedition.person, 'expeditionyear':personrole.personexpedition.expedition.year, 'roles':str(personrole.role)})
|
||||
print res
|
||||
return res
|
||||
|
||||
|
||||
class PersonRole(models.Model):
|
||||
personexpedition = models.ForeignKey('PersonExpedition')
|
||||
person = models.ForeignKey('Person')
|
||||
survex_block = models.ForeignKey('SurvexBlock')
|
||||
role = models.ForeignKey('Role')
|
||||
def __unicode__(self):
|
||||
return unicode(self.person) + " - " + unicode(self.survex_block) + " - " + unicode(self.role)
|
||||
|
||||
class Role(models.Model):
|
||||
name = models.CharField(max_length=50)
|
||||
def __unicode__(self):
|
||||
return unicode(self.name)
|
||||
|
||||
@@ -1,32 +0,0 @@
|
||||
import troggle.settings as settings
|
||||
from django import forms
|
||||
from troggle.expo.models import LogbookEntry
|
||||
import random
|
||||
import re
|
||||
|
||||
def weighted_choice(lst):
|
||||
n = random.uniform(0,1)
|
||||
for item, weight in lst:
|
||||
if n < weight:
|
||||
break
|
||||
n = n - weight
|
||||
return item
|
||||
|
||||
def randomLogbookSentence():
|
||||
randSent={}
|
||||
|
||||
# needs to handle empty logbooks without crashing
|
||||
|
||||
#Choose a random logbook entry
|
||||
randSent['entry']=LogbookEntry.objects.order_by('?')[0]
|
||||
|
||||
#Choose again if there are no sentances (this happens if it is a placeholder entry)
|
||||
while len(re.findall('[A-Z].*?\.',randSent['entry'].text))==0:
|
||||
randSent['entry']=LogbookEntry.objects.order_by('?')[0]
|
||||
|
||||
#Choose a random sentence from that entry. Store the sentence as randSent['sentence'], and the number of that sentence in the entry as randSent['number']
|
||||
sentenceList=re.findall('[A-Z].*?\.',randSent['entry'].text)
|
||||
randSent['number']=random.randrange(0,len(sentenceList))
|
||||
randSent['sentence']=sentenceList[randSent['number']]
|
||||
|
||||
return randSent
|
||||
@@ -1,39 +0,0 @@
|
||||
import re
|
||||
|
||||
from django.db.models import Q
|
||||
|
||||
# search script from http://www.julienphalip.com/blog/2008/08/16/adding-search-django-site-snap/
|
||||
|
||||
def normalize_query(query_string,
|
||||
findterms=re.compile(r'"([^"]+)"|(\S+)').findall,
|
||||
normspace=re.compile(r'\s{2,}').sub):
|
||||
''' Splits the query string in invidual keywords, getting rid of unecessary spaces
|
||||
and grouping quoted words together.
|
||||
Example:
|
||||
|
||||
>>> normalize_query(' some random words "with quotes " and spaces')
|
||||
['some', 'random', 'words', 'with quotes', 'and', 'spaces']
|
||||
|
||||
'''
|
||||
return [normspace(' ', (t[0] or t[1]).strip()) for t in findterms(query_string)]
|
||||
|
||||
def get_query(query_string, search_fields):
|
||||
''' Returns a query, that is a combination of Q objects. That combination
|
||||
aims to search keywords within a model by testing the given search fields.
|
||||
|
||||
'''
|
||||
query = None # Query to search for every search term
|
||||
terms = normalize_query(query_string)
|
||||
for term in terms:
|
||||
or_query = None # Query to search for a given term in each field
|
||||
for field_name in search_fields:
|
||||
q = Q(**{"%s__icontains" % field_name: term})
|
||||
if or_query is None:
|
||||
or_query = q
|
||||
else:
|
||||
or_query = or_query | q
|
||||
if query is None:
|
||||
query = or_query
|
||||
else:
|
||||
query = query & or_query
|
||||
return query
|
||||
@@ -1,52 +0,0 @@
|
||||
from django import template
|
||||
from django.utils.html import conditional_escape
|
||||
from django.template.defaultfilters import stringfilter
|
||||
from django.utils.safestring import mark_safe
|
||||
import re
|
||||
|
||||
register = template.Library()
|
||||
|
||||
# seems to add extra lines between the commented lines, which isn't so great.
|
||||
regexes = []
|
||||
regexes.append((re.compile(r"(;.*)$", re.IGNORECASE|re.MULTILINE),
|
||||
r'<span class = "comment">\1</span>\n'))
|
||||
regexes.append((re.compile(r"^(\s*)(\*include)(\s+)([^\s]*)(.svx)$", re.IGNORECASE|re.MULTILINE),
|
||||
r'\1<span class = "command">\2</span>\3<a href="\4.index">\4\5</a>'))
|
||||
regexes.append((re.compile(r"^(\s*)(\*include)(\s+)([^\s]*)$", re.IGNORECASE|re.MULTILINE),
|
||||
r'\1<span class = "command">\2</span>\3<a href="\4.index">\4</a>'))
|
||||
regexes.append((re.compile(r"^(\s*)(\*team\s+(?:notes|tape|insts|pics))(\s+)(.*)$", re.IGNORECASE|re.MULTILINE),
|
||||
r'\1<span class = "command">\2</span>\3\4'))
|
||||
regexes.append((re.compile(r"^(\s*)(\*(?:begin|end|copyright|date|entrance|equate|export|fix|prefix|require|SOLVE|title|truncate))(\s+)(.*)$", re.IGNORECASE|re.MULTILINE),
|
||||
r'\1<span class = "command">\2</span>\3\4'))
|
||||
regexes.append((re.compile(r"^(\s*)(\*calibrate\s+(?:TAPE|COMPASS|CLINO|COUNTER|DEPTH|DECLINATION|X|Y|Z)+)(\s+)(.*)$", re.IGNORECASE|re.MULTILINE),
|
||||
r'\1<span class = "command">\2</span>\3\4'))
|
||||
regexes.append((re.compile(r"^(\s*)(\*data\s+(?:DEFAULT|NORMAL|DIVING|CARTESIAN|TOPOFIL|CYLPOLAR|NOSURVEY|passage)(?:\s+station|\s+from|\s+to|\s+FROMDEPTH|\s+TODEPTH|\s+DEPTHCHANGE|\s+newline|\s+direction|\s+tape|\s+compass|\s+clino|\s+northing|\s+easting|\s+altitude|\s+length|\s+bearing|\s+gradient|\s+ignoreall|\sleft|\sright|\sup|\sdown)*)$", re.IGNORECASE|re.MULTILINE),
|
||||
r'\1<span class = "command">\2</span>'))
|
||||
regexes.append((re.compile(r"^(\s*)(\*default\s+(?:CALIBRATE|DATA|UNITS)+)(\s+)(.*)$", re.IGNORECASE|re.MULTILINE),
|
||||
r'\1<span class = "command">\2</span>\3\4'))
|
||||
regexes.append((re.compile(r"^(\s*)(\*flags\s+(?:DUPLICATE|SPLAY|SURFACE|not DUPLICATE|not SPLAY|not SURFACE))(\s+)(.*)$", re.IGNORECASE|re.MULTILINE),
|
||||
r'\1<span class = "command">\2</span>\3\4'))
|
||||
regexes.append((re.compile(r"^(\s*)(\*infer\s+(?:plumbs|equates|exports))(\s+)(.*)$", re.IGNORECASE|re.MULTILINE),
|
||||
r'\1<span class = "command">\2</span>\3\4'))
|
||||
regexes.append((re.compile(r"^(\s*)(\*instrument\s+(?:compass|clino|tape))(\s+)(.*)$", re.IGNORECASE|re.MULTILINE),
|
||||
r'\1<span class = "command">\2</span>\3\4'))
|
||||
regexes.append((re.compile(r"^(\s*)(\*instrument\s+(?:compass|clino|tape))(\s+)(.*)$", re.IGNORECASE|re.MULTILINE),
|
||||
r'\1<span class = "command">\2</span>\3\4'))
|
||||
regexes.append((re.compile(r"^(\s*)(\*sd\s+(?:TAPE|COMPASS|CLINO|COUNTER|DEPTH|DECLINATION|DX|DY|DZ))(\s+)(.*)$", re.IGNORECASE|re.MULTILINE),
|
||||
r'\1<span class = "command">\2</span>\3\4'))
|
||||
regexes.append((re.compile(r"^(\s*)(\*set\s+(?:BLANK|COMMENT|DECIMAL|EOL|KEYWORD|MINUS|NAMES|OMIT|PLUS|ROOT|SEPARATOR))(\s+)(.*)$", re.IGNORECASE|re.MULTILINE),
|
||||
r'\1<span class = "command">\2</span>\3\4'))
|
||||
regexes.append((re.compile(r"^(\s*)(\*units\s+(?:TAPE|LENGTH|COMPASS|BEARING|CLINO|GRADIENT|COUNTER|DEPTH|DECLINATION|X|Y|Z))(\s+)(.*)$", re.IGNORECASE|re.MULTILINE),
|
||||
r'\1<span class = "command">\2</span>\3\4'))
|
||||
regexes.append((re.compile(r"^(.*)$", re.IGNORECASE|re.MULTILINE),
|
||||
r'<div>\1 </div>\n'))
|
||||
|
||||
@register.filter()
|
||||
@stringfilter
|
||||
def survex_to_html(value, autoescape=None):
|
||||
if autoescape:
|
||||
value = conditional_escape(value)
|
||||
for regex, sub in regexes:
|
||||
print sub
|
||||
value = regex.sub(sub, value)
|
||||
return mark_safe(value)
|
||||
@@ -1,101 +0,0 @@
|
||||
from django import template
|
||||
from django.utils.html import conditional_escape
|
||||
from django.template.defaultfilters import stringfilter
|
||||
from django.utils.safestring import mark_safe
|
||||
from django.conf import settings
|
||||
from expo.models import QM
|
||||
import re
|
||||
|
||||
register = template.Library()
|
||||
|
||||
def wiki_list(line, listdepth):
|
||||
l = ""
|
||||
for d in listdepth:
|
||||
l += d
|
||||
mstar = re.match(l + "\*(.*)", line)
|
||||
if mstar:
|
||||
listdepth.append("\*")
|
||||
return ("<ul>\n" + " " * len(listdepth) + "<li>%s</li>\n" % mstar.groups()[0], listdepth)
|
||||
mhash = re.match(l + "#(.*)", line)
|
||||
if mhash:
|
||||
listdepth.append("#")
|
||||
return ("<ol>\n" + " " * len(listdepth) + "<li>%s</li>\n" % mhash.groups()[0], listdepth)
|
||||
mflat = re.match(l + "(.*)", line)
|
||||
if mflat and listdepth:
|
||||
return (" " * len(listdepth) + "<li>%s</li>\n" % mflat.groups()[0], listdepth)
|
||||
if listdepth:
|
||||
prev = listdepth.pop()
|
||||
if prev == "\*":
|
||||
t, l = wiki_list(line, listdepth)
|
||||
return ("</ul>\n" + t, l)
|
||||
if prev == "#":
|
||||
t, l = wiki_list(line, listdepth)
|
||||
return ("</ol>\n" + t, l)
|
||||
return (line, listdepth)
|
||||
|
||||
@register.filter()
|
||||
@stringfilter
|
||||
def wiki_to_html(value, autoescape=None):
|
||||
#find paragraphs
|
||||
outValue = ""
|
||||
for paragraph in re.split("\n\s*?\n", value, re.DOTALL):
|
||||
outValue += "<p>"
|
||||
outValue += wiki_to_html_short(paragraph, autoescape)
|
||||
outValue += "</p>\n"
|
||||
return mark_safe(outValue)
|
||||
|
||||
@register.filter()
|
||||
@stringfilter
|
||||
def wiki_to_html_short(value, autoescape=None):
|
||||
if autoescape:
|
||||
value = conditional_escape(value)
|
||||
#deescape doubly escaped characters
|
||||
value = re.sub("&(.*?);", r"&\1;", value, re.DOTALL)
|
||||
#italics and bold
|
||||
value = re.sub("''''([^']+)''''", r"<b><i>\1</i></b>", value, re.DOTALL)
|
||||
value = re.sub("'''([^']+)'''", r"<b>\1</b>", value, re.DOTALL)
|
||||
value = re.sub("''([^']+)''", r"<i>\1</i>", value, re.DOTALL)
|
||||
#make cave links
|
||||
value = re.sub("\[\[\s*cave:([^\s]+)\s*\s*\]\]", r'<a href="%s/cave/\1/">\1</a>' % settings.URL_ROOT, value, re.DOTALL)
|
||||
|
||||
|
||||
#function for replacing wikicode qm links with html qm links
|
||||
def qmrepl(matchobj):
|
||||
if len(matchobj.groups())==4:
|
||||
grade=matchobj.groups()[3]
|
||||
else:
|
||||
grade=''
|
||||
qmdict={'urlroot':settings.URL_ROOT,'cave':matchobj.groups()[0],'year':matchobj.groups()[1],'number':matchobj.groups()[2],'grade':grade}
|
||||
try:
|
||||
qm=QM.objects.get(found_by__cave__kataster_number=qmdict['cave'],found_by__date__year=qmdict['year'], number=qmdict['number'])
|
||||
url=r'<a href=' + str(qm.get_absolute_url()) +'>' + str(qm) + '</a>'
|
||||
except QM.DoesNotExist:
|
||||
url = r'<a class="redtext" href="%(urlroot)s/cave/%(cave)s/%(year)s-%(number)s%(grade)s">%(cave)s:%(year)s-%(number)s%(grade)s</a>' % qmdict
|
||||
return url
|
||||
|
||||
#make qm links
|
||||
value = re.sub("\[\[\s*cave:([^\s]+)\s*\s*\QM:(\d*)-(\d*)([ABCDX]?)\]\]",qmrepl, value, re.DOTALL)
|
||||
|
||||
#qms=qmfinder.search(value)
|
||||
#for qm in qms:
|
||||
#if QM.objects.filter(cave__kataster_number=qm[0], found_by__year=qm[1], number=qm[2]).count >= 1: # If there is at lesat one QM matching this query
|
||||
#replace qm with link in red
|
||||
#else
|
||||
#replace qm with link in blue
|
||||
|
||||
#turn qm links red if nonexistant
|
||||
|
||||
#Make lists from lines starting with lists of [stars and hashes]
|
||||
outValue = ""
|
||||
listdepth = []
|
||||
for line in value.split("\n"):
|
||||
t, listdepth = wiki_list(line, listdepth)
|
||||
outValue += t
|
||||
for item in listdepth:
|
||||
if item == "\*":
|
||||
outValue += "</ul>\n"
|
||||
elif item == "#":
|
||||
outValue += "</ol>\n"
|
||||
return mark_safe(outValue)
|
||||
|
||||
wiki_to_html.needs_autoescape = True
|
||||
@@ -1,161 +0,0 @@
|
||||
from django.conf import settings
|
||||
import fileAbstraction
|
||||
from django.shortcuts import render_to_response
|
||||
from django.http import HttpResponse, Http404
|
||||
import os
|
||||
import re
|
||||
|
||||
# inline fileabstraction into here if it's not going to be useful anywhere else
|
||||
# keep things simple and ignore exceptions everywhere for now
|
||||
|
||||
def getMimeType(extension):
|
||||
try:
|
||||
return {"txt": "text/plain",
|
||||
"html": "text/html",
|
||||
}[extension]
|
||||
except:
|
||||
print "unknown file type"
|
||||
return "text/plain"
|
||||
|
||||
|
||||
def listdir(request, path):
|
||||
#try:
|
||||
return HttpResponse(fileAbstraction.listdir(path), mimetype = "text/plain")
|
||||
#except:
|
||||
# raise Http404
|
||||
|
||||
def upload(request, path):
|
||||
pass
|
||||
|
||||
def download(request, path):
|
||||
#try:
|
||||
|
||||
return HttpResponse(fileAbstraction.readFile(path), mimetype=getMimeType(path.split(".")[-1]))
|
||||
#except:
|
||||
# raise Http404
|
||||
|
||||
|
||||
#
|
||||
# julian's quick hack for something that works
|
||||
# could signal directories by ending with /, and forward cases where it's missing
|
||||
#
|
||||
extmimetypes = {".txt": "text/plain",
|
||||
".html": "text/html",
|
||||
".png": "image/png",
|
||||
".jpg": "image/jpeg",
|
||||
}
|
||||
|
||||
def jgtfile(request, f):
|
||||
fp = os.path.join(settings.SURVEYS, f)
|
||||
# could also surf through SURVEX_DATA
|
||||
|
||||
# directory listing
|
||||
if os.path.isdir(fp):
|
||||
listdirfiles = [ ]
|
||||
listdirdirs = [ ]
|
||||
|
||||
for lf in sorted(os.listdir(fp)):
|
||||
hpath = os.path.join(f, lf) # not absolute path
|
||||
if lf[0] == "." or lf[-1] == "~":
|
||||
continue
|
||||
|
||||
hpath = hpath.replace("\\", "/") # for windows users
|
||||
href = hpath.replace("#", "%23") # '#' in file name annoyance
|
||||
|
||||
flf = os.path.join(fp, lf)
|
||||
if os.path.isdir(flf):
|
||||
nfiles = len([sf for sf in os.listdir(flf) if sf[0] != "."])
|
||||
listdirdirs.append((href, hpath + "/", nfiles))
|
||||
else:
|
||||
listdirfiles.append((href, hpath, os.path.getsize(flf)))
|
||||
|
||||
upperdirs = [ ]
|
||||
lf = f
|
||||
while lf:
|
||||
hpath = lf.replace("\\", "/") # for windows users
|
||||
if hpath[-1] != "/":
|
||||
hpath += "/"
|
||||
href = hpath.replace("#", "%23")
|
||||
lf = os.path.split(lf)[0]
|
||||
upperdirs.append((href, hpath))
|
||||
upperdirs.append(("", "/"))
|
||||
|
||||
return render_to_response('listdir.html', {'file':f, 'listdirfiles':listdirfiles, 'listdirdirs':listdirdirs, 'upperdirs':upperdirs, 'settings': settings})
|
||||
|
||||
# flat output of file when loaded
|
||||
if os.path.isfile(fp):
|
||||
ext = os.path.splitext(fp)[1].lower()
|
||||
mimetype = extmimetypes.get(ext, "text/plain")
|
||||
fin = open(fp)
|
||||
ftext = fin.read()
|
||||
fin.close()
|
||||
return HttpResponse(ftext, mimetype=mimetype)
|
||||
|
||||
return HttpResponse("unknown file::%s::" % f, mimetype = "text/plain")
|
||||
|
||||
|
||||
def UniqueFile(fname):
|
||||
while True:
|
||||
if not os.path.exists(fname):
|
||||
break
|
||||
mname = re.match("(.*?)(?:-(\d+))?\.(png|jpg|jpeg)$(?i)", fname)
|
||||
if mname:
|
||||
fname = "%s-%d.%s" % (mname.group(1), int(mname.group(2) or "0") + 1, mname.group(3))
|
||||
return fname
|
||||
|
||||
|
||||
# join it all up and then split them off for the directories that don't exist
|
||||
# anyway, this mkdir doesn't work
|
||||
def SaveImageInDir(name, imgdir, project, fdata, bbinary):
|
||||
print ("hihihihi", fdata, settings.SURVEYS)
|
||||
fimgdir = os.path.join(settings.SURVEYS, imgdir)
|
||||
if not os.path.isdir(fimgdir):
|
||||
print "*** Making directory", fimgdir
|
||||
os.path.mkdir(fimgdir)
|
||||
fprojdir = os.path.join(fimgdir, project)
|
||||
if not os.path.isdir(fprojdir):
|
||||
print "*** Making directory", fprojdir
|
||||
os.path.mkdir(fprojdir)
|
||||
print "hhh"
|
||||
|
||||
fname = os.path.join(fprojdir, name)
|
||||
print fname, "fff"
|
||||
fname = UniqueFile(fname)
|
||||
|
||||
p2, p1 = os.path.split(fname)
|
||||
p3, p2 = os.path.split(p2)
|
||||
p4, p3 = os.path.split(p3)
|
||||
res = os.path.join(p3, p2, p1)
|
||||
|
||||
print "saving file", fname
|
||||
fout = open(fname, (bbinary and "wb" or "w"))
|
||||
fout.write(fdata.read())
|
||||
fout.close()
|
||||
res = os.path.join(imgdir, name)
|
||||
return res.replace("\\", "/")
|
||||
|
||||
|
||||
# do we want to consider saving project/field rather than field/project
|
||||
def jgtuploadfile(request):
|
||||
filesuploaded = [ ]
|
||||
project, user, password, tunnelversion = request.POST["tunnelproject"], request.POST["tunneluser"], request.POST["tunnelpassword"], request.POST["tunnelversion"]
|
||||
print (project, user, tunnelversion)
|
||||
for uploadedfile in request.FILES.values():
|
||||
if uploadedfile.field_name in ["tileimage", "backgroundimage"] and \
|
||||
uploadedfile.content_type in ["image/png", "image/jpeg"]:
|
||||
fname = user + "_" + re.sub("[\\\\/]", "-", uploadedfile.name) # very escaped \
|
||||
print fname
|
||||
fileuploaded = SaveImageInDir(fname, uploadedfile.field_name, project, uploadedfile, True)
|
||||
filesuploaded.append(settings.URL_ROOT + "/jgtfile/" + fileuploaded)
|
||||
if uploadedfile.field_name in ["sketch"] and \
|
||||
uploadedfile.content_type in ["text/plain"]:
|
||||
fname = user + "_" + re.sub("[\\\\/]", "-", uploadedfile.name) # very escaped \
|
||||
print fname
|
||||
fileuploaded = SaveImageInDir(fname, uploadedfile.field_name, project, uploadedfile, False)
|
||||
filesuploaded.append(settings.URL_ROOT + "/jgtfile/" + fileuploaded)
|
||||
#print "FF", request.FILES
|
||||
#print ("FFF", request.FILES.values())
|
||||
message = ""
|
||||
print "gothere"
|
||||
return render_to_response('fileupload.html', {'message':message, 'filesuploaded':filesuploaded, 'settings': settings})
|
||||
|
||||
@@ -1,8 +0,0 @@
|
||||
# primary namespace
|
||||
|
||||
import view_surveys
|
||||
import views_caves
|
||||
import views_survex
|
||||
import views_logbooks
|
||||
import views_other
|
||||
|
||||
@@ -1,96 +0,0 @@
|
||||
from troggle.expo.models import Cave, CaveAndEntrance, Survey, Expedition, QM
|
||||
import troggle.expo.models as models
|
||||
import troggle.settings as settings
|
||||
from django.forms.models import formset_factory
|
||||
import search
|
||||
from django.core.urlresolvers import reverse
|
||||
from troggle.alwaysUseRequestContext import render_response # see views_logbooks for explanation on this.
|
||||
from django.http import HttpResponseRedirect
|
||||
from django.conf import settings
|
||||
import re
|
||||
|
||||
def getCave(cave_id):
|
||||
"""Returns a cave object when given a cave name or number. It is used by views including cavehref, ent, and qm."""
|
||||
try:
|
||||
cave = Cave.objects.get(kataster_number=cave_id)
|
||||
except Cave.DoesNotExist:
|
||||
cave = Cave.objects.get(unofficial_number=cave_id)
|
||||
return cave
|
||||
|
||||
def caveindex(request):
|
||||
caves = Cave.objects.all()
|
||||
notablecavehrefs = [ "161", "204", "258", "76" ] # could detect notability by trips and notability of people who have been down them
|
||||
notablecaves = [Cave.objects.get(kataster_number=kataster_number) for kataster_number in notablecavehrefs ]
|
||||
return render_response(request,'caveindex.html', {'caves': caves, 'notablecaves':notablecaves})
|
||||
|
||||
def cave(request, cave_id='', offical_name=''):
|
||||
return render_response(request,'cave.html', {'cave': getCave(cave_id),})
|
||||
|
||||
def qm(request,cave_id,qm_id,year,grade=None):
|
||||
year=int(year)
|
||||
try:
|
||||
qm=getCave(cave_id).get_QMs().get(number=qm_id,found_by__date__year=year)
|
||||
return render_response(request,'qm.html',locals())
|
||||
|
||||
except QM.DoesNotExist:
|
||||
url= settings.URL_ROOT + r'/admin/expo/qm/add/?'+ r'number=' + qm_id
|
||||
if grade:
|
||||
url += r'&grade=' + grade
|
||||
return HttpResponseRedirect(url)
|
||||
|
||||
|
||||
def ent(request, cave_id, ent_letter):
|
||||
cave = Cave.objects.filter(kataster_number = cave_id)[0]
|
||||
cave_and_ent = CaveAndEntrance.objects.filter(cave = cave).filter(entrance_letter = ent_letter)[0]
|
||||
return render_response(request,'entrance.html', {'cave': cave,
|
||||
'entrance': cave_and_ent.entrance,
|
||||
'letter': cave_and_ent.entrance_letter,})
|
||||
|
||||
def survexblock(request, survexpath):
|
||||
survexblock = models.SurvexBlock.objects.get(survexpath=survexpath)
|
||||
#ftext = survexblock.filecontents()
|
||||
ftext = survexblock.text
|
||||
return render_response(request,'survexblock.html', {'survexblock':survexblock, 'ftext':ftext, })
|
||||
|
||||
def subcave(request, cave_id, subcave):
|
||||
print subcave
|
||||
subcaveSeq=re.findall('(?:/)([^/]*)',subcave)
|
||||
print subcaveSeq
|
||||
cave=models.Cave.objects.get(kataster_number = cave_id)
|
||||
subcave=models.Subcave.objects.get(name=subcaveSeq[0], cave=cave)
|
||||
if len(subcaveSeq)>1:
|
||||
for subcaveUrlSegment in subcaveSeq[1:]:
|
||||
if subcaveUrlSegment:
|
||||
subcave=subcave.children.get(name=subcaveUrlSegment)
|
||||
print subcave
|
||||
return render_response(request,'subcave.html', {'subcave': subcave,'cave':cave})
|
||||
|
||||
def caveSearch(request):
|
||||
query_string = ''
|
||||
found_entries = None
|
||||
if ('q' in request.GET) and request.GET['q'].strip():
|
||||
query_string = request.GET['q']
|
||||
entry_query = search.get_query(query_string, ['underground_description','official_name',])
|
||||
found_entries = Cave.objects.filter(entry_query)
|
||||
|
||||
return render_response(request,'cavesearch.html',
|
||||
{ 'query_string': query_string, 'found_entries': found_entries,})
|
||||
|
||||
def surveyindex(request):
|
||||
surveys=Survey.objects.all()
|
||||
expeditions=Expedition.objects.order_by("-year")
|
||||
return render_response(request,'survey.html',locals())
|
||||
|
||||
def survey(request,year,wallet_number):
|
||||
surveys=Survey.objects.all()
|
||||
expeditions=Expedition.objects.order_by("-year")
|
||||
current_expedition=Expedition.objects.filter(year=year)[0]
|
||||
|
||||
if wallet_number!='':
|
||||
current_survey=Survey.objects.filter(expedition=current_expedition,wallet_number=wallet_number)[0]
|
||||
notes=current_survey.scannedimage_set.filter(contents='notes')
|
||||
planSketches=current_survey.scannedimage_set.filter(contents='plan')
|
||||
elevationSketches=current_survey.scannedimage_set.filter(contents='elevation')
|
||||
|
||||
return render_response(request,'survey.html', locals())
|
||||
|
||||
@@ -1,120 +0,0 @@
|
||||
from django.shortcuts import render_to_response
|
||||
from troggle.expo.models import Expedition, Person, PersonExpedition, PersonTrip, LogbookEntry
|
||||
import troggle.settings as settings
|
||||
from django.db import models
|
||||
from troggle.parsers.logbooks import LoadLogbookForExpedition
|
||||
from troggle.parsers.people import GetPersonExpeditionNameLookup
|
||||
from troggle.expo.forms import PersonForm
|
||||
from django.core.urlresolvers import reverse
|
||||
from django.http import HttpResponseRedirect
|
||||
|
||||
# Django uses Context, not RequestContext when you call render_to_response. We always want to use RequestContext, so that django adds the context from settings.TEMPLATE_CONTEXT_PROCESSORS. This way we automatically get necessary settings variables passed to each template. So we use a custom method, render_response instead of render_to_response. Hopefully future Django releases will make this unnecessary.
|
||||
from troggle.alwaysUseRequestContext import render_response
|
||||
|
||||
import search
|
||||
import re
|
||||
|
||||
@models.permalink #this allows the nice get_absolute_url syntax we are using
|
||||
|
||||
def getNotablePersons():
|
||||
notablepersons = []
|
||||
for person in Person.objects.all():
|
||||
if person.bisnotable():
|
||||
notablepersons.append(person)
|
||||
return notablepersons
|
||||
|
||||
def personindex(request):
|
||||
persons = Person.objects.all()
|
||||
# From what I can tell, "persons" seems to be the table rows, while "personss" is the table columns. - AC 16 Feb 09
|
||||
personss = [ ]
|
||||
ncols = 5
|
||||
nc = (len(persons) + ncols - 1) / ncols
|
||||
for i in range(ncols):
|
||||
personss.append(persons[i * nc: (i + 1) * nc])
|
||||
|
||||
notablepersons = []
|
||||
for person in Person.objects.all():
|
||||
if person.bisnotable():
|
||||
notablepersons.append(person)
|
||||
|
||||
return render_response(request,'personindex.html', {'persons': persons, 'personss':personss, 'notablepersons':notablepersons, })
|
||||
|
||||
def expedition(request, expeditionname):
|
||||
year = int(expeditionname)
|
||||
expedition = Expedition.objects.get(year=year)
|
||||
expedition_next = Expedition.objects.filter(year=year+1) and Expedition.objects.get(year=year+1) or None
|
||||
expedition_prev = Expedition.objects.filter(year=year-1) and Expedition.objects.get(year=year-1) or None
|
||||
message = "No message"
|
||||
if "reload" in request.GET:
|
||||
message = LoadLogbookForExpedition(expedition)
|
||||
#message = str(GetPersonExpeditionNameLookup(expedition).keys())
|
||||
logbookentries = expedition.logbookentry_set.order_by('date')
|
||||
return render_response(request,'expedition.html', {'expedition': expedition, 'expedition_next':expedition_next, 'expedition_prev':expedition_prev, 'logbookentries':logbookentries, 'message':message, })
|
||||
|
||||
def get_absolute_url(self):
|
||||
return ('expedition', (expedition.year))
|
||||
|
||||
def person(request, first_name='', last_name='', ):
|
||||
person = Person.objects.get(first_name = first_name, last_name = last_name)
|
||||
|
||||
#This is for removing the reference to the user's profile, in case they set it to the wrong person
|
||||
if request.method == 'GET':
|
||||
if request.GET.get('clear_profile')=='True':
|
||||
person.user=None
|
||||
person.save()
|
||||
return HttpResponseRedirect(reverse('profiles_select_profile'))
|
||||
|
||||
return render_response(request,'person.html', {'person': person, })
|
||||
|
||||
def get_absolute_url(self):
|
||||
return settings.URL_ROOT + self.first_name + '_' + self.last_name
|
||||
|
||||
#def person(request, name):
|
||||
# person = Person.objects.get(href=name)
|
||||
#
|
||||
|
||||
def personexpedition(request, first_name='', last_name='', year=''):
|
||||
person = Person.objects.get(first_name = first_name, last_name = last_name)
|
||||
expedition = Expedition.objects.get(year=year)
|
||||
personexpedition = person.personexpedition_set.get(expedition=expedition)
|
||||
return render_response(request,'personexpedition.html', {'personexpedition': personexpedition, })
|
||||
|
||||
def newQMlink(logbookentry):
|
||||
biggestQMnumber=0
|
||||
if logbookentry.cave:
|
||||
for log in logbookentry.cave.logbookentry_set.all():
|
||||
try:
|
||||
biggestQMnumberInLog = logbookentry.QMs_found.order_by('-number')[0].number
|
||||
except IndexError:
|
||||
biggestQMnumberInLog = 0
|
||||
if biggestQMnumberInLog > biggestQMnumber:
|
||||
biggestQMnumber = biggestQMnumberInLog
|
||||
else:
|
||||
return None
|
||||
|
||||
|
||||
|
||||
nextQMnumber=biggestQMnumber+1
|
||||
return settings.URL_ROOT + r'/admin/expo/qm/add/?' + r'found_by=' + str(logbookentry.pk) +'&number=' + str(nextQMnumber)
|
||||
|
||||
def logbookentry(request, date, slug):
|
||||
logbookentry = LogbookEntry.objects.get(date=date, slug=slug)
|
||||
|
||||
return render_response(request, 'logbookentry.html', {'logbookentry': logbookentry, 'newQMlink':newQMlink(logbookentry)})
|
||||
|
||||
def logbookSearch(request, extra):
|
||||
query_string = ''
|
||||
found_entries = None
|
||||
if ('q' in request.GET) and request.GET['q'].strip():
|
||||
query_string = request.GET['q']
|
||||
entry_query = search.get_query(query_string, ['text','title',])
|
||||
found_entries = LogbookEntry.objects.filter(entry_query)
|
||||
|
||||
return render_response(request,'logbooksearch.html',
|
||||
{ 'query_string': query_string, 'found_entries': found_entries, })
|
||||
#context_instance=RequestContext(request))
|
||||
|
||||
def personForm(request,pk):
|
||||
person=Person.objects.get(pk=pk)
|
||||
form=PersonForm(instance=person)
|
||||
return render_response(request,'personform.html', {'form':form,})
|
||||
@@ -1,70 +0,0 @@
|
||||
from troggle.expo.models import Cave, Expedition, Person, LogbookEntry, PersonExpedition, PersonTrip, Photo
|
||||
import troggle.settings as settings
|
||||
from django import forms
|
||||
from django.db.models import Q
|
||||
import databaseReset
|
||||
import re
|
||||
import randSent
|
||||
from django.http import HttpResponse
|
||||
|
||||
from django.core.urlresolvers import reverse
|
||||
from troggle.alwaysUseRequestContext import render_response # see views_logbooks for explanation on this.
|
||||
|
||||
def showrequest(request):
|
||||
return HttpResponse(request.GET)
|
||||
|
||||
def stats(request):
|
||||
statsDict={}
|
||||
statsDict['expoCount'] = int(Expedition.objects.count())
|
||||
statsDict['caveCount'] = int(Cave.objects.count())
|
||||
statsDict['personCount'] = int(Person.objects.count())
|
||||
statsDict['logbookEntryCount'] = int(LogbookEntry.objects.count())
|
||||
return render_response(request,'statistics.html', statsDict)
|
||||
|
||||
def frontpage(request):
|
||||
message = "no test message" #reverse('personn', kwargs={"name":"hkjhjh"})
|
||||
if "reloadexpos" in request.GET:
|
||||
message = LoadPersonsExpos()
|
||||
message = "Reloaded personexpos"
|
||||
if "reloadsurvex" in request.POST:
|
||||
message = LoadAllSurvexBlocks()
|
||||
message = "Reloaded survexblocks"
|
||||
|
||||
#'randSent':randSent.randomLogbookSentence(),
|
||||
expeditions = Expedition.objects.order_by("-year")
|
||||
logbookentry = LogbookEntry
|
||||
cave = Cave
|
||||
photo = Photo
|
||||
return render_response(request,'frontpage.html', locals())
|
||||
|
||||
def todo(request):
|
||||
message = "no test message" #reverse('personn', kwargs={"name":"hkjhjh"})
|
||||
if "reloadexpos" in request.GET:
|
||||
message = LoadPersonsExpos()
|
||||
message = "Reloaded personexpos"
|
||||
if "reloadsurvex" in request.POST:
|
||||
message = LoadAllSurvexBlocks()
|
||||
message = "Reloaded survexblocks"
|
||||
|
||||
#'randSent':randSent.randomLogbookSentence(),
|
||||
expeditions = Expedition.objects.order_by("-year")
|
||||
totallogbookentries = LogbookEntry.objects.count()
|
||||
return render_response(request,'index.html', {'expeditions':expeditions, 'all':'all', 'totallogbookentries':totallogbookentries, "message":message})
|
||||
|
||||
def calendar(request,year):
|
||||
week=['S','S','M','T','W','T','F']
|
||||
if year:
|
||||
expedition=Expedition.objects.get(year=year)
|
||||
PersonExpeditions=expedition.personexpedition_set.all()
|
||||
|
||||
return render_response(request,'calendar.html', locals())
|
||||
|
||||
def controlPanel(request):
|
||||
message = "no test message" #reverse('personn', kwargs={"name":"hkjhjh"})
|
||||
if request.method=='POST':
|
||||
for item in request.POST:
|
||||
if request.user.is_superuser and item!='item':
|
||||
print "running"+ " databaseReset."+item+"()"
|
||||
exec "databaseReset."+item+"()"
|
||||
|
||||
return render_response(request,'controlPanel.html', )
|
||||
@@ -1,44 +0,0 @@
|
||||
from django.shortcuts import render_to_response
|
||||
from django.http import HttpResponse, Http404
|
||||
import re
|
||||
import os
|
||||
|
||||
import troggle.settings as settings
|
||||
|
||||
def index(request, survex_file):
|
||||
process(survex_file)
|
||||
f = open(settings.SURVEX_DATA + survex_file + ".svx", "rb")
|
||||
a = f.read()
|
||||
return render_to_response('svxfile.html', {'settings': settings,
|
||||
'has_3d': os.path.isfile(settings.SURVEX_DATA + survex_file + ".3d"),
|
||||
'title': survex_file,
|
||||
'text': unicode(a, "latin1")})
|
||||
|
||||
def svx(request, survex_file):
|
||||
svx = open(settings.SURVEX_DATA + survex_file + ".svx", "rb")
|
||||
return HttpResponse(svx, mimetype="text")
|
||||
|
||||
def threed(request, survex_file):
|
||||
process(survex_file)
|
||||
try:
|
||||
threed = open(settings.SURVEX_DATA + survex_file + ".3d", "rb")
|
||||
return HttpResponse(threed, mimetype="model/3d")
|
||||
except:
|
||||
log = open(settings.SURVEX_DATA + survex_file + ".log", "rb")
|
||||
return HttpResponse(log, mimetype="text")
|
||||
|
||||
def log(request, survex_file):
|
||||
process(survex_file)
|
||||
log = open(settings.SURVEX_DATA + survex_file + ".log", "rb")
|
||||
return HttpResponse(log, mimetype="text")
|
||||
|
||||
def err(request, survex_file):
|
||||
process(survex_file)
|
||||
err = open(settings.SURVEX_DATA + survex_file + ".err", "rb")
|
||||
return HttpResponse(err, mimetype="text")
|
||||
|
||||
def process(survex_file):
|
||||
cwd = os.getcwd()
|
||||
os.chdir(os.path.split(settings.SURVEX_DATA + survex_file)[0])
|
||||
os.system(settings.CAVERN + " --log " +settings.SURVEX_DATA + survex_file + ".svx")
|
||||
os.chdir(cwd)
|
||||
@@ -1,52 +0,0 @@
|
||||
import troggle.expo.models as models
|
||||
from django.conf import settings
|
||||
|
||||
import csv
|
||||
import re
|
||||
import os
|
||||
|
||||
#format of CAVETAB2.CSV is
|
||||
headers=['KatasterNumber','KatStatusCode','Entrances','UnofficialNumber','MultipleEntrances','AutogenFile','LinkFile','LinkEntrance','Name','UnofficialName',
|
||||
'Comment','Area','Explorers','UndergroundDescription','Equipment','QMList','KatasterStatus','References','UndergroundCentreLine','UndergroundDrawnSurvey',
|
||||
'SurvexFile','Length','Depth','Extent','Notes','EntranceName','TagPoint','OtherPoint','DescriptionOfOtherPoint','ExactEntrance','TypeOfFix','GPSpreSA',
|
||||
'GPSpostSA','Northing','Easting','Altitude','Bearings','Map','Location','Approach','EntranceDescription','PhotoOfLocation','Marking','MarkingComment',
|
||||
'Findability','FindabilityComment']
|
||||
|
||||
def cavetabRow(cave):
|
||||
#mapping of troggle models to table columns is: (guess this could just be a tuple of tuples rather than a dictionary actually)
|
||||
columnsToModelFields={
|
||||
'Name':cave.official_name,
|
||||
'Area':cave.kat_area(),
|
||||
'KatStatusCode':cave.kataster_code,
|
||||
'KatasterNumber':cave.kataster_number,
|
||||
'UnofficialNumber':cave.unofficial_number,
|
||||
#'' : cave.entrances This is a multiple foreignkey now, may be tricky to dump back into csv. Work on this.
|
||||
'Explorers':cave.explorers,
|
||||
'UndergroundDescription':cave.underground_description,
|
||||
'Equipment':cave.equipment,
|
||||
'References':cave.references,
|
||||
'UndergroundDrawnSurvey':cave.survey,
|
||||
'KatasterStatus':cave.kataster_status,
|
||||
'UndergroundCentreLine':cave.underground_centre_line,
|
||||
'Notes':cave.notes,
|
||||
'Length':cave.length,
|
||||
'Depth':cave.depth,
|
||||
'Extent':cave.extent,
|
||||
'SurvexFile':cave.survex_file,
|
||||
}
|
||||
|
||||
caveRow=['' for x in range(len(headers))]
|
||||
for column, modelField in columnsToModelFields.items():
|
||||
if modelField:
|
||||
# Very sorry about the atrocious replace below. I will fix this soon if noone beats me to it. - AC
|
||||
caveRow[headers.index(column)]=modelField.replace(u'\xd7','x').replace(u'\u201c','').replace(u'\u2013','').replace(u'\xbd','')
|
||||
return caveRow
|
||||
|
||||
def writeCaveTab(path):
|
||||
outfile=file(path,'w')
|
||||
cavewriter=csv.writer(outfile,lineterminator='\r')
|
||||
cavewriter.writerow(headers)
|
||||
for cave in models.Cave.objects.all():
|
||||
cavewriter.writerow(cavetabRow(cave))
|
||||
|
||||
|
||||
@@ -1,37 +0,0 @@
|
||||
import troggle.expo.models as models
|
||||
from django.conf import settings
|
||||
|
||||
import csv
|
||||
import re
|
||||
import os
|
||||
|
||||
#format of QM tables
|
||||
headers=['Number','Grade','Area','Description','Page reference','Nearest station','Completion description','Comment']
|
||||
|
||||
def qmRow(qm):
|
||||
#mapping of troggle models to table columns is: (guess this could just be a tuple of tuples rather than a dictionary actually)
|
||||
columnsToModelFields={
|
||||
'Number':str(qm.number),
|
||||
'Grade':qm.grade,
|
||||
'Area':qm.area,
|
||||
'Description':qm.location_description,
|
||||
#'Page reference': #not implemented
|
||||
'Nearest station':qm.nearest_station_description,
|
||||
'Completion description':qm.completion_description,
|
||||
'Comment':qm.comment
|
||||
}
|
||||
|
||||
qmRow=['' for x in range(len(headers))]
|
||||
for column, modelField in columnsToModelFields.items():
|
||||
if modelField:
|
||||
# Very sorry about the atrocious replace below. I will fix this soon if noone beats me to it. - AC
|
||||
qmRow[headers.index(column)]=modelField.replace(u'\xd7','x').replace(u'\u201c','').replace(u'\u2013','').replace(u'\xbd','')
|
||||
return qmRow
|
||||
|
||||
def writeQmTable(path,cave):
|
||||
outfile=file(path,'w')
|
||||
cavewriter=csv.writer(outfile,lineterminator='\r')
|
||||
cavewriter.writerow(headers)
|
||||
for qm in cave.get_QMs():
|
||||
cavewriter.writerow(qmRow(qm))
|
||||
|
||||
@@ -1,13 +0,0 @@
|
||||
"""
|
||||
|
||||
Django ImageKit
|
||||
|
||||
Author: Justin Driscoll <justin.driscoll@gmail.com>
|
||||
Version: 0.2
|
||||
|
||||
"""
|
||||
VERSION = "0.2"
|
||||
|
||||
|
||||
|
||||
|
||||
@@ -1,21 +0,0 @@
|
||||
""" Default ImageKit configuration """
|
||||
|
||||
from imagekit.specs import ImageSpec
|
||||
from imagekit import processors
|
||||
|
||||
class ResizeThumbnail(processors.Resize):
|
||||
width = 100
|
||||
height = 50
|
||||
crop = True
|
||||
|
||||
class EnhanceSmall(processors.Adjustment):
|
||||
contrast = 1.2
|
||||
sharpness = 1.1
|
||||
|
||||
class SampleReflection(processors.Reflection):
|
||||
size = 0.5
|
||||
background_color = "#000000"
|
||||
|
||||
class DjangoAdminThumbnail(ImageSpec):
|
||||
access_as = 'admin_thumbnail'
|
||||
processors = [ResizeThumbnail, EnhanceSmall, SampleReflection]
|
||||
@@ -1,17 +0,0 @@
|
||||
# Required PIL classes may or may not be available from the root namespace
|
||||
# depending on the installation method used.
|
||||
try:
|
||||
import Image
|
||||
import ImageFile
|
||||
import ImageFilter
|
||||
import ImageEnhance
|
||||
import ImageColor
|
||||
except ImportError:
|
||||
try:
|
||||
from PIL import Image
|
||||
from PIL import ImageFile
|
||||
from PIL import ImageFilter
|
||||
from PIL import ImageEnhance
|
||||
from PIL import ImageColor
|
||||
except ImportError:
|
||||
raise ImportError('ImageKit was unable to import the Python Imaging Library. Please confirm it`s installed and available on your current Python path.')
|
||||
@@ -1 +0,0 @@
|
||||
|
||||
@@ -1 +0,0 @@
|
||||
|
||||
@@ -1,38 +0,0 @@
|
||||
from django.db.models.loading import cache
|
||||
from django.core.management.base import BaseCommand, CommandError
|
||||
from optparse import make_option
|
||||
from imagekit.models import ImageModel
|
||||
from imagekit.specs import ImageSpec
|
||||
|
||||
|
||||
class Command(BaseCommand):
|
||||
help = ('Clears all ImageKit cached files.')
|
||||
args = '[apps]'
|
||||
requires_model_validation = True
|
||||
can_import_settings = True
|
||||
|
||||
def handle(self, *args, **options):
|
||||
return flush_cache(args, options)
|
||||
|
||||
def flush_cache(apps, options):
|
||||
""" Clears the image cache
|
||||
|
||||
"""
|
||||
apps = [a.strip(',') for a in apps]
|
||||
if apps:
|
||||
print 'Flushing cache for %s...' % ', '.join(apps)
|
||||
else:
|
||||
print 'Flushing caches...'
|
||||
|
||||
for app_label in apps:
|
||||
app = cache.get_app(app_label)
|
||||
models = [m for m in cache.get_models(app) if issubclass(m, ImageModel)]
|
||||
|
||||
for model in models:
|
||||
for obj in model.objects.all():
|
||||
for spec in model._ik.specs:
|
||||
prop = getattr(obj, spec.name(), None)
|
||||
if prop is not None:
|
||||
prop._delete()
|
||||
if spec.pre_cache:
|
||||
prop._create()
|
||||
@@ -1,136 +0,0 @@
|
||||
import os
|
||||
from datetime import datetime
|
||||
from django.conf import settings
|
||||
from django.core.files.base import ContentFile
|
||||
from django.db import models
|
||||
from django.db.models.base import ModelBase
|
||||
from django.utils.translation import ugettext_lazy as _
|
||||
|
||||
from imagekit import specs
|
||||
from imagekit.lib import *
|
||||
from imagekit.options import Options
|
||||
from imagekit.utils import img_to_fobj
|
||||
|
||||
# Modify image file buffer size.
|
||||
ImageFile.MAXBLOCK = getattr(settings, 'PIL_IMAGEFILE_MAXBLOCK', 256 * 2 ** 10)
|
||||
|
||||
# Choice tuples for specifying the crop origin.
|
||||
# These are provided for convenience.
|
||||
CROP_HORZ_CHOICES = (
|
||||
(0, _('left')),
|
||||
(1, _('center')),
|
||||
(2, _('right')),
|
||||
)
|
||||
|
||||
CROP_VERT_CHOICES = (
|
||||
(0, _('top')),
|
||||
(1, _('center')),
|
||||
(2, _('bottom')),
|
||||
)
|
||||
|
||||
|
||||
class ImageModelBase(ModelBase):
|
||||
""" ImageModel metaclass
|
||||
|
||||
This metaclass parses IKOptions and loads the specified specification
|
||||
module.
|
||||
|
||||
"""
|
||||
def __init__(cls, name, bases, attrs):
|
||||
parents = [b for b in bases if isinstance(b, ImageModelBase)]
|
||||
if not parents:
|
||||
return
|
||||
user_opts = getattr(cls, 'IKOptions', None)
|
||||
opts = Options(user_opts)
|
||||
try:
|
||||
module = __import__(opts.spec_module, {}, {}, [''])
|
||||
except ImportError:
|
||||
raise ImportError('Unable to load imagekit config module: %s' % \
|
||||
opts.spec_module)
|
||||
for spec in [spec for spec in module.__dict__.values() \
|
||||
if isinstance(spec, type) \
|
||||
and issubclass(spec, specs.ImageSpec) \
|
||||
and spec != specs.ImageSpec]:
|
||||
setattr(cls, spec.name(), specs.Descriptor(spec))
|
||||
opts.specs.append(spec)
|
||||
setattr(cls, '_ik', opts)
|
||||
|
||||
|
||||
class ImageModel(models.Model):
|
||||
""" Abstract base class implementing all core ImageKit functionality
|
||||
|
||||
Subclasses of ImageModel are augmented with accessors for each defined
|
||||
image specification and can override the inner IKOptions class to customize
|
||||
storage locations and other options.
|
||||
|
||||
"""
|
||||
__metaclass__ = ImageModelBase
|
||||
|
||||
class Meta:
|
||||
abstract = True
|
||||
|
||||
class IKOptions:
|
||||
pass
|
||||
|
||||
def admin_thumbnail_view(self):
|
||||
if not self._imgfield:
|
||||
return None
|
||||
prop = getattr(self, self._ik.admin_thumbnail_spec, None)
|
||||
if prop is None:
|
||||
return 'An "%s" image spec has not been defined.' % \
|
||||
self._ik.admin_thumbnail_spec
|
||||
else:
|
||||
if hasattr(self, 'get_absolute_url'):
|
||||
return u'<a href="%s"><img src="%s"></a>' % \
|
||||
(self.get_absolute_url(), prop.url)
|
||||
else:
|
||||
return u'<a href="%s"><img src="%s"></a>' % \
|
||||
(self._imgfield.url, prop.url)
|
||||
admin_thumbnail_view.short_description = _('Thumbnail')
|
||||
admin_thumbnail_view.allow_tags = True
|
||||
|
||||
@property
|
||||
def _imgfield(self):
|
||||
return getattr(self, self._ik.image_field)
|
||||
|
||||
def _clear_cache(self):
|
||||
for spec in self._ik.specs:
|
||||
prop = getattr(self, spec.name())
|
||||
prop._delete()
|
||||
|
||||
def _pre_cache(self):
|
||||
for spec in self._ik.specs:
|
||||
if spec.pre_cache:
|
||||
prop = getattr(self, spec.name())
|
||||
prop._create()
|
||||
|
||||
def save(self, clear_cache=True, *args, **kwargs):
|
||||
is_new_object = self._get_pk_val is None
|
||||
super(ImageModel, self).save(*args, **kwargs)
|
||||
if is_new_object:
|
||||
clear_cache = False
|
||||
spec = self._ik.preprocessor_spec
|
||||
if spec is not None:
|
||||
newfile = self._imgfield.storage.open(str(self._imgfield))
|
||||
img = Image.open(newfile)
|
||||
img = spec.process(img, None)
|
||||
format = img.format or 'JPEG'
|
||||
if format != 'JPEG':
|
||||
imgfile = img_to_fobj(img, format)
|
||||
else:
|
||||
imgfile = img_to_fobj(img, format,
|
||||
quality=int(spec.quality),
|
||||
optimize=True)
|
||||
content = ContentFile(imgfile.read())
|
||||
newfile.close()
|
||||
name = str(self._imgfield)
|
||||
self._imgfield.storage.delete(name)
|
||||
self._imgfield.storage.save(name, content)
|
||||
if clear_cache and self._imgfield != '':
|
||||
self._clear_cache()
|
||||
self._pre_cache()
|
||||
|
||||
def delete(self):
|
||||
assert self._get_pk_val() is not None, "%s object can't be deleted because its %s attribute is set to None." % (self._meta.object_name, self._meta.pk.attname)
|
||||
self._clear_cache()
|
||||
models.Model.delete(self)
|
||||
@@ -1,23 +0,0 @@
|
||||
# Imagekit options
|
||||
from imagekit import processors
|
||||
from imagekit.specs import ImageSpec
|
||||
|
||||
|
||||
class Options(object):
|
||||
""" Class handling per-model imagekit options
|
||||
|
||||
"""
|
||||
image_field = 'image'
|
||||
crop_horz_field = 'crop_horz'
|
||||
crop_vert_field = 'crop_vert'
|
||||
preprocessor_spec = None
|
||||
cache_dir = 'cache'
|
||||
save_count_as = None
|
||||
cache_filename_format = "%(filename)s_%(specname)s.%(extension)s"
|
||||
admin_thumbnail_spec = 'admin_thumbnail'
|
||||
spec_module = 'imagekit.defaults'
|
||||
|
||||
def __init__(self, opts):
|
||||
for key, value in opts.__dict__.iteritems():
|
||||
setattr(self, key, value)
|
||||
self.specs = []
|
||||
@@ -1,134 +0,0 @@
|
||||
""" Imagekit Image "ImageProcessors"
|
||||
|
||||
A processor defines a set of class variables (optional) and a
|
||||
class method named "process" which processes the supplied image using
|
||||
the class properties as settings. The process method can be overridden as well allowing user to define their
|
||||
own effects/processes entirely.
|
||||
|
||||
"""
|
||||
from imagekit.lib import *
|
||||
|
||||
class ImageProcessor(object):
|
||||
""" Base image processor class """
|
||||
@classmethod
|
||||
def process(cls, image, obj=None):
|
||||
return image
|
||||
|
||||
|
||||
class Adjustment(ImageProcessor):
|
||||
color = 1.0
|
||||
brightness = 1.0
|
||||
contrast = 1.0
|
||||
sharpness = 1.0
|
||||
|
||||
@classmethod
|
||||
def process(cls, image, obj=None):
|
||||
for name in ['Color', 'Brightness', 'Contrast', 'Sharpness']:
|
||||
factor = getattr(cls, name.lower())
|
||||
if factor != 1.0:
|
||||
image = getattr(ImageEnhance, name)(image).enhance(factor)
|
||||
return image
|
||||
|
||||
|
||||
class Reflection(ImageProcessor):
|
||||
background_color = '#FFFFFF'
|
||||
size = 0.0
|
||||
opacity = 0.6
|
||||
|
||||
@classmethod
|
||||
def process(cls, image, obj=None):
|
||||
# convert bgcolor string to rgb value
|
||||
background_color = ImageColor.getrgb(cls.background_color)
|
||||
# copy orignial image and flip the orientation
|
||||
reflection = image.copy().transpose(Image.FLIP_TOP_BOTTOM)
|
||||
# create a new image filled with the bgcolor the same size
|
||||
background = Image.new("RGB", image.size, background_color)
|
||||
# calculate our alpha mask
|
||||
start = int(255 - (255 * cls.opacity)) # The start of our gradient
|
||||
steps = int(255 * cls.size) # the number of intermedite values
|
||||
increment = (255 - start) / float(steps)
|
||||
mask = Image.new('L', (1, 255))
|
||||
for y in range(255):
|
||||
if y < steps:
|
||||
val = int(y * increment + start)
|
||||
else:
|
||||
val = 255
|
||||
mask.putpixel((0, y), val)
|
||||
alpha_mask = mask.resize(image.size)
|
||||
# merge the reflection onto our background color using the alpha mask
|
||||
reflection = Image.composite(background, reflection, alpha_mask)
|
||||
# crop the reflection
|
||||
reflection_height = int(image.size[1] * cls.size)
|
||||
reflection = reflection.crop((0, 0, image.size[0], reflection_height))
|
||||
# create new image sized to hold both the original image and the reflection
|
||||
composite = Image.new("RGB", (image.size[0], image.size[1]+reflection_height), background_color)
|
||||
# paste the orignal image and the reflection into the composite image
|
||||
composite.paste(image, (0, 0))
|
||||
composite.paste(reflection, (0, image.size[1]))
|
||||
# return the image complete with reflection effect
|
||||
return composite
|
||||
|
||||
|
||||
class Resize(ImageProcessor):
|
||||
width = None
|
||||
height = None
|
||||
crop = False
|
||||
upscale = False
|
||||
|
||||
@classmethod
|
||||
def process(cls, image, obj=None):
|
||||
cur_width, cur_height = image.size
|
||||
if cls.crop:
|
||||
crop_horz = getattr(obj, obj._ik.crop_horz_field, 1)
|
||||
crop_vert = getattr(obj, obj._ik.crop_vert_field, 1)
|
||||
ratio = max(float(cls.width)/cur_width, float(cls.height)/cur_height)
|
||||
resize_x, resize_y = ((cur_width * ratio), (cur_height * ratio))
|
||||
crop_x, crop_y = (abs(cls.width - resize_x), abs(cls.height - resize_y))
|
||||
x_diff, y_diff = (int(crop_x / 2), int(crop_y / 2))
|
||||
box_left, box_right = {
|
||||
0: (0, cls.width),
|
||||
1: (int(x_diff), int(x_diff + cls.width)),
|
||||
2: (int(crop_x), int(resize_x)),
|
||||
}[crop_horz]
|
||||
box_upper, box_lower = {
|
||||
0: (0, cls.height),
|
||||
1: (int(y_diff), int(y_diff + cls.height)),
|
||||
2: (int(crop_y), int(resize_y)),
|
||||
}[crop_vert]
|
||||
box = (box_left, box_upper, box_right, box_lower)
|
||||
image = image.resize((int(resize_x), int(resize_y)), Image.ANTIALIAS).crop(box)
|
||||
else:
|
||||
if not cls.width is None and not cls.height is None:
|
||||
ratio = min(float(cls.width)/cur_width,
|
||||
float(cls.height)/cur_height)
|
||||
else:
|
||||
if cls.width is None:
|
||||
ratio = float(cls.height)/cur_height
|
||||
else:
|
||||
ratio = float(cls.width)/cur_width
|
||||
new_dimensions = (int(round(cur_width*ratio)),
|
||||
int(round(cur_height*ratio)))
|
||||
if new_dimensions[0] > cur_width or \
|
||||
new_dimensions[1] > cur_height:
|
||||
if not cls.upscale:
|
||||
return image
|
||||
image = image.resize(new_dimensions, Image.ANTIALIAS)
|
||||
return image
|
||||
|
||||
|
||||
class Transpose(ImageProcessor):
|
||||
""" Rotates or flips the image
|
||||
|
||||
Method should be one of the following strings:
|
||||
- FLIP_LEFT RIGHT
|
||||
- FLIP_TOP_BOTTOM
|
||||
- ROTATE_90
|
||||
- ROTATE_270
|
||||
- ROTATE_180
|
||||
|
||||
"""
|
||||
method = 'FLIP_LEFT_RIGHT'
|
||||
|
||||
@classmethod
|
||||
def process(cls, image, obj=None):
|
||||
return image.transpose(getattr(Image, cls.method))
|
||||
@@ -1,119 +0,0 @@
|
||||
""" ImageKit image specifications
|
||||
|
||||
All imagekit specifications must inherit from the ImageSpec class. Models
|
||||
inheriting from ImageModel will be modified with a descriptor/accessor for each
|
||||
spec found.
|
||||
|
||||
"""
|
||||
import os
|
||||
from StringIO import StringIO
|
||||
from imagekit.lib import *
|
||||
from imagekit.utils import img_to_fobj
|
||||
from django.core.files.base import ContentFile
|
||||
|
||||
class ImageSpec(object):
|
||||
pre_cache = False
|
||||
quality = 70
|
||||
increment_count = False
|
||||
processors = []
|
||||
|
||||
@classmethod
|
||||
def name(cls):
|
||||
return getattr(cls, 'access_as', cls.__name__.lower())
|
||||
|
||||
@classmethod
|
||||
def process(cls, image, obj):
|
||||
processed_image = image.copy()
|
||||
for proc in cls.processors:
|
||||
processed_image = proc.process(processed_image, obj)
|
||||
return processed_image
|
||||
|
||||
|
||||
class Accessor(object):
|
||||
def __init__(self, obj, spec):
|
||||
self._img = None
|
||||
self._obj = obj
|
||||
self.spec = spec
|
||||
|
||||
def _get_imgfile(self):
|
||||
format = self._img.format or 'JPEG'
|
||||
if format != 'JPEG':
|
||||
imgfile = img_to_fobj(self._img, format)
|
||||
else:
|
||||
imgfile = img_to_fobj(self._img, format,
|
||||
quality=int(self.spec.quality),
|
||||
optimize=True)
|
||||
return imgfile
|
||||
|
||||
def _create(self):
|
||||
if self._exists():
|
||||
return
|
||||
# process the original image file
|
||||
fp = self._obj._imgfield.storage.open(self._obj._imgfield.name)
|
||||
fp.seek(0)
|
||||
fp = StringIO(fp.read())
|
||||
try:
|
||||
self._img = self.spec.process(Image.open(fp), self._obj)
|
||||
# save the new image to the cache
|
||||
content = ContentFile(self._get_imgfile().read())
|
||||
self._obj._imgfield.storage.save(self.name, content)
|
||||
except IOError:
|
||||
pass
|
||||
|
||||
def _delete(self):
|
||||
self._obj._imgfield.storage.delete(self.name)
|
||||
|
||||
def _exists(self):
|
||||
return self._obj._imgfield.storage.exists(self.name)
|
||||
|
||||
def _basename(self):
|
||||
filename, extension = \
|
||||
os.path.splitext(os.path.basename(self._obj._imgfield.name))
|
||||
return self._obj._ik.cache_filename_format % \
|
||||
{'filename': filename,
|
||||
'specname': self.spec.name(),
|
||||
'extension': extension.lstrip('.')}
|
||||
|
||||
@property
|
||||
def name(self):
|
||||
return os.path.join(self._obj._ik.cache_dir, self._basename())
|
||||
|
||||
@property
|
||||
def url(self):
|
||||
self._create()
|
||||
if self.spec.increment_count:
|
||||
fieldname = self._obj._ik.save_count_as
|
||||
if fieldname is not None:
|
||||
current_count = getattr(self._obj, fieldname)
|
||||
setattr(self._obj, fieldname, current_count + 1)
|
||||
self._obj.save(clear_cache=False)
|
||||
return self._obj._imgfield.storage.url(self.name)
|
||||
|
||||
@property
|
||||
def file(self):
|
||||
self._create()
|
||||
return self._obj._imgfield.storage.open(self.name)
|
||||
|
||||
@property
|
||||
def image(self):
|
||||
if self._img is None:
|
||||
self._create()
|
||||
if self._img is None:
|
||||
self._img = Image.open(self.file)
|
||||
return self._img
|
||||
|
||||
@property
|
||||
def width(self):
|
||||
return self.image.size[0]
|
||||
|
||||
@property
|
||||
def height(self):
|
||||
return self.image.size[1]
|
||||
|
||||
|
||||
class Descriptor(object):
|
||||
def __init__(self, spec):
|
||||
self._spec = spec
|
||||
|
||||
def __get__(self, obj, type=None):
|
||||
return Accessor(obj, self._spec)
|
||||
@@ -1,86 +0,0 @@
|
||||
import os
|
||||
import tempfile
|
||||
import unittest
|
||||
from django.conf import settings
|
||||
from django.core.files.base import ContentFile
|
||||
from django.db import models
|
||||
from django.test import TestCase
|
||||
|
||||
from imagekit import processors
|
||||
from imagekit.models import ImageModel
|
||||
from imagekit.specs import ImageSpec
|
||||
from imagekit.lib import Image
|
||||
|
||||
|
||||
class ResizeToWidth(processors.Resize):
|
||||
width = 100
|
||||
|
||||
class ResizeToHeight(processors.Resize):
|
||||
height = 100
|
||||
|
||||
class ResizeToFit(processors.Resize):
|
||||
width = 100
|
||||
height = 100
|
||||
|
||||
class ResizeCropped(ResizeToFit):
|
||||
crop = ('center', 'center')
|
||||
|
||||
class TestResizeToWidth(ImageSpec):
|
||||
access_as = 'to_width'
|
||||
processors = [ResizeToWidth]
|
||||
|
||||
class TestResizeToHeight(ImageSpec):
|
||||
access_as = 'to_height'
|
||||
processors = [ResizeToHeight]
|
||||
|
||||
class TestResizeCropped(ImageSpec):
|
||||
access_as = 'cropped'
|
||||
processors = [ResizeCropped]
|
||||
|
||||
class TestPhoto(ImageModel):
|
||||
""" Minimal ImageModel class for testing """
|
||||
image = models.ImageField(upload_to='images')
|
||||
|
||||
class IKOptions:
|
||||
spec_module = 'imagekit.tests'
|
||||
|
||||
|
||||
class IKTest(TestCase):
|
||||
""" Base TestCase class """
|
||||
def setUp(self):
|
||||
# create a test image using tempfile and PIL
|
||||
self.tmp = tempfile.TemporaryFile()
|
||||
Image.new('RGB', (800, 600)).save(self.tmp, 'JPEG')
|
||||
self.tmp.seek(0)
|
||||
self.p = TestPhoto()
|
||||
self.p.image.save(os.path.basename('test.jpg'),
|
||||
ContentFile(self.tmp.read()))
|
||||
self.p.save()
|
||||
# destroy temp file
|
||||
self.tmp.close()
|
||||
|
||||
def test_setup(self):
|
||||
self.assertEqual(self.p.image.width, 800)
|
||||
self.assertEqual(self.p.image.height, 600)
|
||||
|
||||
def test_to_width(self):
|
||||
self.assertEqual(self.p.to_width.width, 100)
|
||||
self.assertEqual(self.p.to_width.height, 75)
|
||||
|
||||
def test_to_height(self):
|
||||
self.assertEqual(self.p.to_height.width, 133)
|
||||
self.assertEqual(self.p.to_height.height, 100)
|
||||
|
||||
def test_crop(self):
|
||||
self.assertEqual(self.p.cropped.width, 100)
|
||||
self.assertEqual(self.p.cropped.height, 100)
|
||||
|
||||
def test_url(self):
|
||||
tup = (settings.MEDIA_URL, self.p._ik.cache_dir, 'test_to_width.jpg')
|
||||
self.assertEqual(self.p.to_width.url, "%s%s/%s" % tup)
|
||||
|
||||
def tearDown(self):
|
||||
# make sure image file is deleted
|
||||
path = self.p.image.path
|
||||
self.p.delete()
|
||||
self.failIf(os.path.isfile(path))
|
||||
@@ -1,15 +0,0 @@
|
||||
""" ImageKit utility functions """
|
||||
|
||||
import tempfile
|
||||
|
||||
def img_to_fobj(img, format, **kwargs):
|
||||
tmp = tempfile.TemporaryFile()
|
||||
if format != 'JPEG':
|
||||
try:
|
||||
img.save(tmp, format, **kwargs)
|
||||
return
|
||||
except KeyError:
|
||||
pass
|
||||
img.save(tmp, format, **kwargs)
|
||||
tmp.seek(0)
|
||||
return tmp
|
||||
@@ -1,31 +0,0 @@
|
||||
DATABASE_ENGINE = 'mysql' # 'postgresql_psycopg2', 'postgresql', 'mysql', 'sqlite3' or 'oracle'.
|
||||
DATABASE_NAME = 'troggle' # Or path to database file if using sqlite3.
|
||||
DATABASE_USER = 'undemocracy' # Not used with sqlite3.
|
||||
DATABASE_PASSWORD = 'aiGohsh5' # Not used with sqlite3.
|
||||
DATABASE_HOST = '' # Set to empty string for localhost. Not used with sqlite3.
|
||||
DATABASE_PORT = '' # Set to empty string for default. Not used with sqlite3.
|
||||
|
||||
SURVEX_DATA = '/home/mjg/loser/'
|
||||
CAVERN = 'cavern'
|
||||
EXPOWEB = '/home/mjg/expoweb/'
|
||||
SURVEYS = '/home/mjg/surveys/'
|
||||
|
||||
SURVEYS_URL = 'http://framos.lawoftheland.co.uk/troggle/survey_scans/'
|
||||
FILES = "http://framos.lawoftheland.co.uk/troggle/survey_files/"
|
||||
|
||||
SVX_URL = 'http://framos.lawoftheland.co.uk/troggle/survex/'
|
||||
|
||||
PYTHON_PATH = '/home/mjg/expoweb/troggle/'
|
||||
|
||||
MEDIA_URL = 'http://framos.lawoftheland.co.uk/troggle/site_media/'
|
||||
|
||||
MEDIA_ROOT = '/home/mjg/expoweb/troggle/media/'
|
||||
|
||||
URL_ROOT = "http://framos.lawoftheland.co.uk/troggle/"
|
||||
|
||||
TEMPLATE_DIRS = (
|
||||
"/home/mjg/expoweb/troggle/templates",
|
||||
# Put strings here, like "/home/html/django_templates" or "C:/www/django/templates".
|
||||
# Always use forward slashes, even on Windows.
|
||||
# Don't forget to use absolute paths, not relative paths.
|
||||
)
|
||||
@@ -1,20 +0,0 @@
|
||||
DATABASE_ENGINE = 'mysql' # 'postgresql_psycopg2', 'postgresql', 'mysql', 'sqlite3' or 'oracle'.
|
||||
DATABASE_NAME = 'troggle' # Or path to database file if using sqlite3.
|
||||
DATABASE_USER = 'troggler3' # Not used with sqlite3.
|
||||
DATABASE_PASSWORD = 'ggg' # Not used with sqlite3.
|
||||
DATABASE_HOST = '' # Set to empty string for localhost. Not used with sqlite3.
|
||||
DATABASE_PORT = '' # Set to empty string for default. Not used with sqlite3.
|
||||
PYTHON_PATH = '/home/goatchurch/expoweb/troggle/'
|
||||
|
||||
SURVEX_DATA = '/home/goatchurch/loser/'
|
||||
CAVERN = 'cavern'
|
||||
EXPOWEB = '/home/goatchurch/expoweb'
|
||||
URL_ROOT = '/troggle/'
|
||||
|
||||
TEMPLATE_DIRS = (
|
||||
"/home/goatchurch/expoweb/troggle/templates",
|
||||
# Put strings here, like "/home/html/django_templates" or "C:/www/django/templates".
|
||||
# Always use forward slashes, even on Windows.
|
||||
# Don't forget to use absolute paths, not relative paths.
|
||||
)
|
||||
|
||||
@@ -1,49 +0,0 @@
|
||||
DATABASE_ENGINE = '' # 'postgresql_psycopg2', 'postgresql', 'mysql', 'sqlite3' or 'oracle'.
|
||||
DATABASE_NAME = '' # Or path to database file if using sqlite3.
|
||||
DATABASE_USER = '' # Not used with sqlite3.
|
||||
DATABASE_PASSWORD = '' # Not used with sqlite3.
|
||||
DATABASE_HOST = '' # Set to empty string for localhost. Not used with sqlite3.
|
||||
DATABASE_PORT = '' # Set to empty string for default. Not used with sqlite3.
|
||||
|
||||
SURVEX_DATA = 'c:\\Expo\\loser\\'
|
||||
CAVERN = 'cavern'
|
||||
EXPOWEB = 'C:\\Expo\\expoweb\\'
|
||||
SURVEYS = 'E:\\surveys\\'
|
||||
SURVEY_SCANS = 'E:\\surveys\\surveyscans'
|
||||
|
||||
LOGFILE = open(EXPOWEB+'troggle\\parsing_log.txt',"a+b")
|
||||
|
||||
PHOTOS = 'C:\\Expo\\expoweb\\photos'
|
||||
|
||||
URL_ROOT = 'http://127.0.0.1:8000'
|
||||
|
||||
PYTHON_PATH = 'C:\\expoweb\\troggle\\'
|
||||
|
||||
MEDIA_ROOT = 'C:/Expo/expoweb/troggle/media/'
|
||||
|
||||
#FILES = "http://framos.lawoftheland.co.uk/troggle/survey_files/"
|
||||
|
||||
EMAIL_HOST = "smtp.gmail.com"
|
||||
|
||||
EMAIL_HOST_USER = "cuccexpo@gmail.com"
|
||||
|
||||
EMAIL_HOST_PASSWORD = ""
|
||||
|
||||
EMAIL_PORT=587
|
||||
|
||||
EMAIL_USE_TLS = True
|
||||
|
||||
# URL that handles the media served from MEDIA_ROOT. Make sure to use a
|
||||
# trailing slash if there is a path component (optional in other cases).
|
||||
# Examples: "http://media.lawrence.com", "http://example.com/media/"
|
||||
|
||||
|
||||
|
||||
|
||||
TEMPLATE_DIRS = (
|
||||
"C:/Expo/expoweb/troggle/templates",
|
||||
|
||||
# Put strings here, like "/home/html/django_templates" or "C:/www/django/templates".
|
||||
# Always use forward slashes, even on Windows.
|
||||
# Don't forget to use absolute paths, not relative paths.
|
||||
)
|
||||
@@ -1,11 +0,0 @@
|
||||
#!/usr/bin/env python
|
||||
from django.core.management import execute_manager
|
||||
try:
|
||||
import settings # Assumed to be in the same directory.
|
||||
except ImportError:
|
||||
import sys
|
||||
sys.stderr.write("Error: Can't find the file 'settings.py' in the directory containing %r. It appears you've customized things.\nYou'll have to run django-admin.py, passing it your settings module.\n(If the file settings.py does indeed exist, it's causing an ImportError somehow.)\n" % __file__)
|
||||
sys.exit(1)
|
||||
|
||||
if __name__ == "__main__":
|
||||
execute_manager(settings)
|
||||
|
Before Width: | Height: | Size: 38 KiB |
|
Before Width: | Height: | Size: 69 KiB |
@@ -1,339 +0,0 @@
|
||||
html, body, div, span, applet, object, iframe,
|
||||
h1, h2, h3, h4, h5, h6, p, blockquote, pre,
|
||||
a, abbr, acronym, address, big, cite, code,
|
||||
del, dfn, em, font, img, ins, kbd, q, s, samp,
|
||||
small, strike, strong, sub, sup, tt, var,
|
||||
dl, dt, dd, ol, ul, li,
|
||||
fieldset, form, label, legend,
|
||||
table, caption, tbody, tfoot, thead, tr, th, td
|
||||
{
|
||||
margin: 0;
|
||||
padding: 0;
|
||||
border: 0;
|
||||
outline: 0;
|
||||
font-weight: inherit;
|
||||
font-style: inherit;
|
||||
font-size: 100%;
|
||||
font-family: inherit;
|
||||
vertical-align: baseline;
|
||||
}
|
||||
|
||||
html, body {
|
||||
height: 100%
|
||||
}
|
||||
|
||||
|
||||
.caption { font-size: 8pt; margin-bottom: 0pt; }
|
||||
.centre { text-align: center; }
|
||||
.plus2pt { font-size: 160%; }
|
||||
|
||||
ul
|
||||
{
|
||||
list-style: none;
|
||||
}
|
||||
|
||||
body
|
||||
{
|
||||
background-color: white;
|
||||
color: black;
|
||||
font: 100% Verdana, Arial, Helvetica, sans-serif;
|
||||
margin: 0;
|
||||
padding: 0;
|
||||
margin-left: auto;
|
||||
margin-right: auto;
|
||||
}
|
||||
|
||||
div#content
|
||||
{
|
||||
border: thin black dotted;
|
||||
margin: 50px;
|
||||
}
|
||||
|
||||
|
||||
|
||||
|
||||
div#footer
|
||||
{
|
||||
clear:both;
|
||||
background-color:black;
|
||||
color:white;
|
||||
text-align:center;
|
||||
margin-left:auto;
|
||||
margin-right:auto;
|
||||
}
|
||||
|
||||
#frontPageBanner{ position:relative; width:inherit; height:inherit; }
|
||||
|
||||
div.logbookentry
|
||||
{
|
||||
text-align:left;
|
||||
}
|
||||
div.logbookentry ul.cavers
|
||||
{
|
||||
float:left;
|
||||
padding-left:20px;
|
||||
padding-right:10px;
|
||||
margin-top:0px;
|
||||
}
|
||||
td.author
|
||||
{
|
||||
background-color:yellow;
|
||||
}
|
||||
|
||||
div.logbookentry p
|
||||
{
|
||||
margin:10px;
|
||||
}
|
||||
|
||||
div#content div#col2
|
||||
{
|
||||
float:right;
|
||||
width:33%;
|
||||
background-color:#feeeed;
|
||||
}
|
||||
|
||||
div#content h2
|
||||
{
|
||||
text-align:center;
|
||||
font-size:200%;
|
||||
padding-bottom:30px;
|
||||
}
|
||||
|
||||
|
||||
table.prevnextexpeditions
|
||||
{
|
||||
width:100%;
|
||||
}
|
||||
|
||||
table.prevnextexpeditions td
|
||||
{
|
||||
padding: 2px;
|
||||
}
|
||||
|
||||
table.expeditionpersonlist
|
||||
{
|
||||
width:100%;
|
||||
}
|
||||
|
||||
table.expeditionpersonlist td
|
||||
{
|
||||
padding: 2px;
|
||||
}
|
||||
|
||||
div#content div#col1
|
||||
{
|
||||
width:66%;
|
||||
}
|
||||
table.expeditionlogbooks td
|
||||
{
|
||||
padding: 2px;
|
||||
}
|
||||
|
||||
ul#expeditionlist
|
||||
{
|
||||
width: 800px
|
||||
}
|
||||
|
||||
div.survexblock
|
||||
{
|
||||
width:50%;
|
||||
background-color:#e0e0e0;
|
||||
}
|
||||
p.indent
|
||||
{
|
||||
margin-left:10px;
|
||||
}
|
||||
|
||||
table.survexcontibutions td.date
|
||||
{ width:90px; }
|
||||
table.survexcontibutions td.roles
|
||||
{ width:100px; background-color:#feeeed; }
|
||||
table.survexcontibutions td.survexblock
|
||||
{ width:260px; background-color:#feeeed; }
|
||||
table.survexcontibutions td.trip
|
||||
{ width:280px; }
|
||||
table.survexcontibutions td.place
|
||||
{ width:140px; }
|
||||
|
||||
#expoHeader {
|
||||
width:100%;
|
||||
position:relative;
|
||||
left:0;
|
||||
right:0;
|
||||
height:100px;
|
||||
}
|
||||
#currentLocation {
|
||||
float:right;
|
||||
background:#999;
|
||||
line-height: 80%;
|
||||
font-variant: small-caps;
|
||||
margin: 0px;
|
||||
padding: 0px;
|
||||
}
|
||||
#expoHeaderText {
|
||||
background:#999;
|
||||
position:absolute;
|
||||
bottom:0px;
|
||||
clip:rect(10px auto auto auto)
|
||||
/* filter:alpha(opacity=90);
|
||||
-moz-opacity:.90;
|
||||
opacity:.90; */
|
||||
}
|
||||
|
||||
|
||||
|
||||
hr{
|
||||
margin:0;
|
||||
padding:0;
|
||||
height:1px;
|
||||
border:thin solid #000;
|
||||
border:#000;
|
||||
color:#000;
|
||||
background:#000;
|
||||
}
|
||||
|
||||
#expoHeader h1{
|
||||
position:relative;
|
||||
bottom:-8px;
|
||||
vertical-align:top;
|
||||
}
|
||||
|
||||
#expoFinalDate {
|
||||
display: inline;
|
||||
}
|
||||
|
||||
div.centre img { vertical-align: middle; }
|
||||
|
||||
h1 { text-align: center; font-size: 210%; display: inline;}
|
||||
h2 { color: #009900; }
|
||||
h3 { color: #2c105e; text-align:left; border-bottom:thin solid black; margin-bottom:1em; margin-top:1em }
|
||||
h4 { color: #0d664c; }
|
||||
h4.navbar {line-height: 0px;}
|
||||
img.onright, div.onright { vertical-align: top; float: right;
|
||||
margin-left: 10pt; margin-bottom: 10pt;
|
||||
margin-right: 8pt; }
|
||||
img.onleft, div.onleft { vertical-align: top; float: left;
|
||||
margin-right: 10pt; margin-bottom: 10pt;
|
||||
margin-left: 8pt; }
|
||||
img.icon { vertical-align: middle; }
|
||||
img.aligntop { vertical-align: top; }
|
||||
blockquote {
|
||||
font: Georgia, "Times New Roman", Times, serif;
|
||||
font-weight:bold;
|
||||
font-variant:small-caps;
|
||||
width: 400px;
|
||||
background: url(../close-quote.gif) no-repeat right bottom;
|
||||
padding-left: 25px;
|
||||
text-indent: -25px;
|
||||
text-align: right;
|
||||
vertical-align:bottom;
|
||||
color:#CCCC66;
|
||||
}
|
||||
blockquote:first-letter {
|
||||
background: url(../open-quote.gif) no-repeat left top;
|
||||
padding-left: 40px;
|
||||
font: italic 1.4em Georgia, "Times New Roman", Times, serif;
|
||||
}
|
||||
table.imgtable { margin-left: auto; margin-right: auto; }
|
||||
table.imgtable td { vertical-align: middle; text-align: center;
|
||||
padding: 10px; }
|
||||
|
||||
table.normal { border: thin; border-top:solid ; border-left:dotted ; border-bottom:dotted; border-right:hidden ; border-width:1px;}
|
||||
table.normal td { border: thin; border-right:dotted ; border-width:1px; border-spacing:0px }
|
||||
table.normal th { border-left:thin ; border-right:thin ; text-align: left}
|
||||
|
||||
/* "Traditional" table with borders.*/
|
||||
table.trad { margin: 0pt; border: 1px solid #000;
|
||||
border-color: #c0c0c0 #8d8d8d #8d8d8d #c0c0c0; }
|
||||
table.bigfatborder { border-width: 6px; }
|
||||
table.trad td, table.trad th { margin: 0pt; border: 1px solid #aaa;
|
||||
border-color: #8d8d8d #c0c0c0 #c0c0c0 #8d8d8d; }
|
||||
|
||||
/*Divs for layout*/
|
||||
html, body, div.contents {
|
||||
min-height: 100%;
|
||||
height: 100%;
|
||||
width:100%;
|
||||
}
|
||||
html>body, html>body div.contents {
|
||||
height: auto;
|
||||
}
|
||||
body {
|
||||
}
|
||||
div.contents {
|
||||
position: absolute;
|
||||
top: 0;
|
||||
right: 0;
|
||||
}
|
||||
|
||||
|
||||
div.main {
|
||||
margin-bottom: 3em;
|
||||
}
|
||||
|
||||
|
||||
/* You are not expected to understand this. It is necessary. */
|
||||
table.centre { margin-left: auto; margin-right: auto; }
|
||||
table.centre td { text-align: left; }
|
||||
|
||||
h2#tophead { text-align: center; margin-bottom: -10pt; }
|
||||
table#cavepage { width: 100%; font-size: 160%; }
|
||||
table#cavepage th#kat_no { text-align: left; width: 25%; }
|
||||
table#cavepage th#name { text-align: center; width: 50%; }
|
||||
table#cavepage th#status { text-align: right; width: 25%; }
|
||||
|
||||
.command { color: #FF0000; }
|
||||
.comment { color: #888888; font-style:italic;}
|
||||
|
||||
.thumbnail {
|
||||
width: 300px;
|
||||
}
|
||||
|
||||
table {
|
||||
border: thin solid silver;
|
||||
border-collapse: collapse;
|
||||
}
|
||||
td {
|
||||
padding:0px;
|
||||
border: thin solid silver;
|
||||
}
|
||||
|
||||
|
||||
#nav {
|
||||
|
||||
}
|
||||
|
||||
.menuBarItem {
|
||||
font-variant: small-caps;
|
||||
text-align: right;
|
||||
border-top-style: none;
|
||||
border-right-style: none;
|
||||
border-bottom-style: none;
|
||||
border-left-style: none;
|
||||
border-top-width: thin;
|
||||
border-right-width: thin;
|
||||
border-bottom-width: thin;
|
||||
border-left-width: thin;
|
||||
}
|
||||
.behind {
|
||||
display: none;
|
||||
}
|
||||
div.figure {
|
||||
width: 20%;
|
||||
border: thin silver solid;
|
||||
margin: 0.5em;
|
||||
padding: 0.5em;
|
||||
display: inline;
|
||||
float: left;
|
||||
}
|
||||
div.figure p {
|
||||
text-align: left;
|
||||
font-size: smaller;
|
||||
text-indent: 0;
|
||||
}
|
||||
img.thumbnail {
|
||||
width: 100%;
|
||||
}
|
||||
br.clearfloat {
|
||||
clear:both;
|
||||
}
|
||||
@@ -1,374 +0,0 @@
|
||||
html, body, div, span, applet, object, iframe,
|
||||
h1, h2, h3, h4, h5, h6, p, blockquote, pre,
|
||||
a, abbr, acronym, address, big, cite, code,
|
||||
del, dfn, em, font, img, ins, kbd, q, s, samp,
|
||||
small, strike, strong, sub, sup, tt, var,
|
||||
dl, dt, dd, ol, ul, li,
|
||||
fieldset, form, label, legend,
|
||||
table, caption, tbody, tfoot, thead, tr, th, td
|
||||
{
|
||||
|
||||
font-weight: inherit;
|
||||
font-style: inherit;
|
||||
font-size: 100%;
|
||||
font-family: inherit;
|
||||
vertical-align: baseline;
|
||||
}
|
||||
|
||||
html, body {
|
||||
height: 100%;
|
||||
}
|
||||
|
||||
|
||||
.caption { font-size: 8pt; margin-bottom: 0pt; }
|
||||
.centre { text-align: center; }
|
||||
.plus2pt { font-size: 160%; }
|
||||
|
||||
ul
|
||||
{
|
||||
list-style: none;
|
||||
}
|
||||
|
||||
div.logbookentry
|
||||
{
|
||||
text-align:left;
|
||||
}
|
||||
div.logbookentry ul.cavers
|
||||
{
|
||||
float:left;
|
||||
padding-left:20px;
|
||||
padding-right:10px;
|
||||
margin-top:0px;
|
||||
}
|
||||
td.author
|
||||
{
|
||||
background-color:yellow;
|
||||
}
|
||||
|
||||
div.logbookentry p
|
||||
{
|
||||
margin:10px;
|
||||
}
|
||||
|
||||
div#content div#col2
|
||||
{
|
||||
float:right;
|
||||
width:33%;
|
||||
background-color:#feeeed;
|
||||
}
|
||||
|
||||
div#content h2
|
||||
{
|
||||
text-align:center;
|
||||
font-size:200%;
|
||||
padding-bottom:30px;
|
||||
}
|
||||
|
||||
|
||||
table.prevnextexpeditions
|
||||
{
|
||||
width:100%;
|
||||
}
|
||||
|
||||
table.prevnextexpeditions td
|
||||
{
|
||||
padding: 2px;
|
||||
}
|
||||
|
||||
table.expeditionpersonlist
|
||||
{
|
||||
width:100%;
|
||||
}
|
||||
|
||||
table.expeditionpersonlist td
|
||||
{
|
||||
padding: 2px;
|
||||
}
|
||||
|
||||
div#content div#col1
|
||||
{
|
||||
width:66%;
|
||||
}
|
||||
table.expeditionlogbooks td
|
||||
{
|
||||
padding: 2px;
|
||||
}
|
||||
|
||||
ul#expeditionlist
|
||||
{
|
||||
width: 300px
|
||||
}
|
||||
|
||||
div.survexblock
|
||||
{
|
||||
width:50%;
|
||||
background-color:#e0e0e0;
|
||||
}
|
||||
p.indent
|
||||
{
|
||||
margin-left:10px;
|
||||
}
|
||||
|
||||
|
||||
#currentLocation {
|
||||
float:right;
|
||||
background:#999;
|
||||
line-height: 80%;
|
||||
font-variant: small-caps;
|
||||
margin: 0px;
|
||||
padding: 0px;
|
||||
}
|
||||
|
||||
|
||||
|
||||
hr{
|
||||
margin:0;
|
||||
padding:0;
|
||||
height:1px;
|
||||
border:thin solid #000;
|
||||
border:#000;
|
||||
color:#000;
|
||||
background:#000;
|
||||
}
|
||||
|
||||
div.centre img { vertical-align: middle; }
|
||||
|
||||
h1 { text-align: center; font-size: 210%; display: inline;}
|
||||
h2 { color: #009900; }
|
||||
h3 { color: #000 text-align:left; border-bottom:thin solid black; margin-bottom:1em; margin-top:1em; font-weight:bold}
|
||||
h4 { color: #0d664c; }
|
||||
h4.navbar {line-height: 0px;}
|
||||
img.onright, div.onright { vertical-align: top; float: right;
|
||||
margin-left: 10pt; margin-bottom: 10pt;
|
||||
margin-right: 8pt; }
|
||||
img.onleft, div.onleft { vertical-align: top; float: left;
|
||||
margin-right: 10pt; margin-bottom: 10pt;
|
||||
margin-left: 8pt; }
|
||||
img.icon { vertical-align: middle; }
|
||||
img.aligntop { vertical-align: top; }
|
||||
blockquote {
|
||||
font: Georgia, "Times New Roman", Times, serif;
|
||||
font-weight:bold;
|
||||
font-variant:small-caps;
|
||||
width: 400px;
|
||||
background: url(../close-quote.gif) no-repeat right bottom;
|
||||
padding-left: 25px;
|
||||
text-indent: -25px;
|
||||
text-align: right;
|
||||
vertical-align:bottom;
|
||||
color:#CCCC66;
|
||||
}
|
||||
blockquote:first-letter {
|
||||
background: url(../open-quote.gif) no-repeat left top;
|
||||
padding-left: 40px;
|
||||
font: italic 1.4em Georgia, "Times New Roman", Times, serif;
|
||||
}
|
||||
table.imgtable { margin-left: auto; margin-right: auto; }
|
||||
table.imgtable td { vertical-align: middle; text-align: center;
|
||||
padding: 10px; }
|
||||
|
||||
table.normal { border: thin; border-top:solid ; border-left:dotted ; border-bottom:dotted; border-right:hidden ; border-width:1px;}
|
||||
table.normal td { border: thin; border-right:dotted ; border-width:1px; border-spacing:0px }
|
||||
table.normal th { border-left:thin ; border-right:thin ; text-align: left}
|
||||
|
||||
/* "Traditional" table with borders.*/
|
||||
table.trad { margin: 0pt; border: 1px solid #000;
|
||||
border-color: #c0c0c0 #8d8d8d #8d8d8d #c0c0c0; }
|
||||
table.bigfatborder { border-width: 6px; }
|
||||
table.trad td, table.trad th { margin: 0pt; border: 1px solid #aaa;
|
||||
border-color: #8d8d8d #c0c0c0 #c0c0c0 #8d8d8d; }
|
||||
|
||||
|
||||
/* You are not expected to understand this. It is necessary. */
|
||||
/* The above is the most fucktarded comment I have ever read :-) AC, 24 APR 2009 */
|
||||
table.centre { margin-left: auto; margin-right: auto; }
|
||||
table.centre td { text-align: left; }
|
||||
|
||||
h2#tophead { text-align: center; margin-bottom: -10pt; }
|
||||
table#cavepage { width: 100%; font-size: 160%; }
|
||||
table#cavepage th#kat_no { text-align: left; width: 25%; }
|
||||
table#cavepage th#name { text-align: center; width: 50%; }
|
||||
table#cavepage th#status { text-align: right; width: 25%; }
|
||||
|
||||
.command { color: #FF0000; }
|
||||
.comment { color: #888888; font-style:italic;}
|
||||
|
||||
.thumbnail {
|
||||
width: 300px;
|
||||
}
|
||||
|
||||
table {
|
||||
border: thin solid silver;
|
||||
border-collapse: collapse;
|
||||
}
|
||||
td {
|
||||
padding:0px;
|
||||
border: thin solid silver;
|
||||
}
|
||||
|
||||
|
||||
.redtext{
|
||||
color:#F00;
|
||||
}
|
||||
|
||||
a.redtext:link {
|
||||
color:#F00;
|
||||
|
||||
}
|
||||
|
||||
.redtext
|
||||
|
||||
.menuBarItem {
|
||||
font-variant: small-caps;
|
||||
text-align: right;
|
||||
border-top-style: none;
|
||||
border-right-style: none;
|
||||
border-bottom-style: none;
|
||||
border-left-style: none;
|
||||
border-top-width: thin;
|
||||
border-right-width: thin;
|
||||
border-bottom-width: thin;
|
||||
border-left-width: thin;
|
||||
}
|
||||
.behind {
|
||||
display: none;
|
||||
}
|
||||
div.figure {
|
||||
width: 20%;
|
||||
border: thin white solid;
|
||||
margin: 0.5em;
|
||||
padding: 0.5em;
|
||||
display: inline;
|
||||
float: left;
|
||||
}
|
||||
div.figure p {
|
||||
text-align: left;
|
||||
font-size: smaller;
|
||||
text-indent: 0;
|
||||
}
|
||||
img.thumbnail {
|
||||
width: 100%;
|
||||
}
|
||||
|
||||
#error {
|
||||
color: red;
|
||||
}
|
||||
|
||||
div#header {
|
||||
position:fixed;
|
||||
left:100px;
|
||||
right:100px;
|
||||
top:0;
|
||||
margin-left:auto;
|
||||
margin-right:auto;
|
||||
height:50px;
|
||||
background-image: url( ../204plan.gif);
|
||||
border-bottom:thin solid #000;
|
||||
font-family: Arial, Helvetica, sans-serif;
|
||||
font-variant: normal;
|
||||
}
|
||||
|
||||
|
||||
div#editLinks {
|
||||
position:absolute;
|
||||
background: #999;
|
||||
bottom:0px;
|
||||
right:0px;
|
||||
font-family: "Courier New", Courier, monospace;
|
||||
filter:alpha(opacity=75);
|
||||
-moz-opacity:.75;
|
||||
opacity:.75;
|
||||
}
|
||||
|
||||
div#editLinks a{
|
||||
color:#FFF;
|
||||
}
|
||||
|
||||
div#content {
|
||||
margin-top: 50px;
|
||||
margin-left: 120px;
|
||||
margin-right: 120px;
|
||||
padding-top: 10px;
|
||||
padding-left: 5em;
|
||||
padding-right: 5em;
|
||||
background:#CCC;
|
||||
}
|
||||
|
||||
|
||||
.footer {
|
||||
position:fixed;
|
||||
width:100%;
|
||||
bottom:0;
|
||||
left:0;
|
||||
}
|
||||
|
||||
body {
|
||||
background-color:#000;
|
||||
padding-bottom:100px;
|
||||
|
||||
}
|
||||
|
||||
h1 {
|
||||
margin-top:0;
|
||||
margin-left:10px;
|
||||
vertical-align:top;
|
||||
}
|
||||
|
||||
|
||||
.rightMargin {
|
||||
position:absolute;
|
||||
z-index:-2;
|
||||
width:130px;
|
||||
right:0px;
|
||||
top:0px;
|
||||
clip: rect(auto,auto,auto,auto);
|
||||
background:#000;
|
||||
}
|
||||
|
||||
.leftMargin {
|
||||
position:absolute;
|
||||
z-index:-2;
|
||||
width:130px;
|
||||
top:0px;
|
||||
left:0px;
|
||||
clip: rect(auto,100px,auto,auto,);
|
||||
background:#000;
|
||||
}
|
||||
|
||||
#footerLinks{
|
||||
position:fixed;
|
||||
text-align: center;
|
||||
bottom:0;
|
||||
left:0;
|
||||
width:100%;
|
||||
background-color:#000;
|
||||
color:#999
|
||||
}
|
||||
|
||||
#footerLinks a{
|
||||
color:#FFF
|
||||
}
|
||||
|
||||
/*.fadeIn {
|
||||
display: none;
|
||||
}*/
|
||||
|
||||
#timeMachine {
|
||||
width:auto;
|
||||
right:0;
|
||||
left:auto;
|
||||
}
|
||||
|
||||
#surveyHover {
|
||||
width:auto;
|
||||
right:auto;
|
||||
left:auto;
|
||||
}
|
||||
|
||||
#col1 {
|
||||
width:60%
|
||||
}
|
||||
|
||||
#quicksearch {
|
||||
margin-left:40px;
|
||||
}
|
||||
@@ -1,12 +0,0 @@
|
||||
div#nav {
|
||||
position:fixed;
|
||||
width: 12em;
|
||||
background: rgb(153, 153, 153);
|
||||
margin-top: 0px;
|
||||
margin-left: 120px;
|
||||
border-top: thin black solid;
|
||||
}
|
||||
|
||||
div#content {
|
||||
padding-left:240px;
|
||||
}
|
||||
|
Before Width: | Height: | Size: 25 KiB |
|
Before Width: | Height: | Size: 249 KiB |
|
Before Width: | Height: | Size: 60 KiB |
@@ -1,74 +0,0 @@
|
||||
|
||||
$(document).ready(function() {
|
||||
|
||||
$('.searchable li').quicksearch({
|
||||
position: 'before',
|
||||
attached: 'ul.searchable',
|
||||
labelText: '',
|
||||
loaderText: '',
|
||||
delay: 100
|
||||
})
|
||||
|
||||
$('table.searchable tr').quicksearch({
|
||||
position: 'before',
|
||||
attached: 'table.searchable:first',
|
||||
});
|
||||
|
||||
$(".toggleEyeCandy").click(function () {
|
||||
$(".leftMargin,.rightMargin").toggle("fade");
|
||||
$(".toggleEyeCandy").toggle();
|
||||
});
|
||||
|
||||
$(".nav").css('opacity','7')
|
||||
$(".footer").hide();
|
||||
$(".fadeIn").hide();
|
||||
setTimeout("$('.leftMargin.fadeIn').fadeIn(3000);",1000);
|
||||
setTimeout("$('.rightMargin.fadeIn').fadeIn(3000);",2000);
|
||||
|
||||
|
||||
/*$("#footerLinks").hover(
|
||||
function() {$(".footer").fadeIn("slow")},
|
||||
function() {$(".footer").fadeOut("slow")}
|
||||
);*/
|
||||
|
||||
function linkHover(hoverLink,image){
|
||||
|
||||
$(hoverLink).hover(
|
||||
function() {
|
||||
$(image).fadeIn("slow");
|
||||
$(hoverLink).css("background","gray");
|
||||
},
|
||||
function() {
|
||||
$(image).fadeOut("slow");
|
||||
$(hoverLink).css("background","black");
|
||||
}
|
||||
);
|
||||
|
||||
|
||||
|
||||
};
|
||||
|
||||
linkHover("#expoWebsiteLink","#richardBanner");
|
||||
linkHover("#cuccLink","#timeMachine");
|
||||
linkHover("#surveyBinderLink","#surveyHover");
|
||||
linkHover("#troggle","#timeMachine");
|
||||
|
||||
|
||||
});
|
||||
|
||||
function contentHeight(){
|
||||
setMaxHeight($(".rightMargin,#content,.leftMargin,#col2"),$("#content"));
|
||||
};
|
||||
|
||||
function setMaxHeight(group, target) {
|
||||
tallest = 0;
|
||||
group.each(function() {
|
||||
thisHeight = $(this).height();
|
||||
if(thisHeight > tallest) {
|
||||
tallest = thisHeight;
|
||||
}
|
||||
});
|
||||
target.height(tallest);
|
||||
}
|
||||
|
||||
|
||||
4376
troggle/media/js/jquery.js
vendored
@@ -1,328 +0,0 @@
|
||||
jQuery(function ($) {
|
||||
$.fn.quicksearch = function (opt) {
|
||||
|
||||
function is_empty(i)
|
||||
{
|
||||
return (i === null || i === undefined || i === false) ? true: false;
|
||||
}
|
||||
|
||||
function strip_html(input)
|
||||
{
|
||||
var regexp = new RegExp(/\<[^\<]+\>/g);
|
||||
var output = input.replace(regexp, "");
|
||||
output = $.trim(output.toLowerCase().replace(/\n/, '').replace(/\s{2,}/, ' '));
|
||||
return output;
|
||||
}
|
||||
|
||||
function get_key()
|
||||
{
|
||||
var input = strip_html($('input[rel="' + options.randomElement + '"]').val());
|
||||
|
||||
if (input.indexOf(' ') === -1)
|
||||
{
|
||||
return input;
|
||||
}
|
||||
else
|
||||
{
|
||||
return input.split(" ");
|
||||
}
|
||||
}
|
||||
|
||||
function test_key(k, value, type)
|
||||
{
|
||||
if (type === "string")
|
||||
{
|
||||
return test_key_string(k, value);
|
||||
}
|
||||
else
|
||||
{
|
||||
return test_key_arr(k, value);
|
||||
}
|
||||
}
|
||||
|
||||
function test_key_string(k, value)
|
||||
{
|
||||
return (value.indexOf(k) > -1);
|
||||
}
|
||||
|
||||
function test_key_arr(k, value)
|
||||
{
|
||||
for (var i = 0; i < k.length; i++) {
|
||||
var test = value.indexOf(k[i]);
|
||||
if (test === -1) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
function select_element(el)
|
||||
{
|
||||
if (options.hideElement === "grandparent")
|
||||
{
|
||||
return $(el).parent().parent();
|
||||
}
|
||||
else if (options.hideElement === "parent")
|
||||
{
|
||||
return $(el).parent();
|
||||
}
|
||||
else
|
||||
{
|
||||
return $(el);
|
||||
}
|
||||
}
|
||||
|
||||
function stripe(el)
|
||||
{
|
||||
if (doStripe)
|
||||
{
|
||||
var i = 0;
|
||||
select_element(el).filter(':visible').each(function () {
|
||||
|
||||
for (var j = 0; j < stripeRowLength; j++)
|
||||
{
|
||||
if (i === j)
|
||||
{
|
||||
$(this).addClass(options.stripeRowClass[i]);
|
||||
|
||||
}
|
||||
else
|
||||
{
|
||||
$(this).removeClass(options.stripeRowClass[j]);
|
||||
}
|
||||
}
|
||||
i = (i + 1) % stripeRowLength;
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
function fix_widths(el)
|
||||
{
|
||||
$(el).find('td').each(function () {
|
||||
$(this).attr('width', parseInt($(this).css('width')));
|
||||
});
|
||||
}
|
||||
|
||||
function loader(o) {
|
||||
if (options.loaderId)
|
||||
{
|
||||
var l = $('input[rel="' + options.randomElement + '"]').parent().find('.loader');
|
||||
if (o === 'hide')
|
||||
{
|
||||
l.hide();
|
||||
}
|
||||
else
|
||||
{
|
||||
l.show();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function place_form() {
|
||||
var formPosition = options.position;
|
||||
var formAttached = options.attached;
|
||||
|
||||
if (formPosition === 'before') {
|
||||
$(formAttached).before(make_form());
|
||||
} else if (formPosition === 'prepend') {
|
||||
$(formAttached).prepend(make_form());
|
||||
} else if (formPosition === 'append') {
|
||||
$(formAttached).append(make_form());
|
||||
} else {
|
||||
$(formAttached).after(make_form());
|
||||
}
|
||||
}
|
||||
|
||||
function make_form_label()
|
||||
{
|
||||
if (!is_empty(options.labelText)) {
|
||||
return '<label for="' + options.randomElement + '" '+
|
||||
'class="' + options.labelClass + '">'
|
||||
+ options.labelText
|
||||
+ '</label> ';
|
||||
}
|
||||
return '';
|
||||
}
|
||||
|
||||
function make_form_input()
|
||||
{
|
||||
var val = (!is_empty(options.inputText)) ? options.inputText : ""
|
||||
return '<input type="text" value="' + val + '" rel="' + options.randomElement + '" class="' + options.inputClass + '" id="' + options.randomElement + '" /> ';
|
||||
}
|
||||
|
||||
function make_form_loader()
|
||||
{
|
||||
if (!is_empty(options.loaderImg)) {
|
||||
return '<img src="' + options.loaderImg + '" alt="Loading" id="' + options.loaderId + '" class="' + options.loaderClass + '" />';
|
||||
} else {
|
||||
return '<span id="' + options.loaderId + '" class="' + options.loaderClass + '">' + options.loaderText + '</span>';
|
||||
}
|
||||
}
|
||||
|
||||
function make_form()
|
||||
{
|
||||
var f = (!options.isFieldset) ? 'form' : 'fieldset';
|
||||
return '<' + f + ' action="#" ' + 'id="'+ options.formId + '" ' + 'class="quicksearch">' +
|
||||
make_form_label() + make_form_input() + make_form_loader() +
|
||||
'</' + f + '>';
|
||||
}
|
||||
|
||||
function focus_on_load()
|
||||
{
|
||||
$('input[rel="' + options.randomElement + '"]').get(0).focus();
|
||||
}
|
||||
|
||||
function toggle_text() {
|
||||
$('input[rel="' + options.randomElement + '"]').focus(function () {
|
||||
if ($(this).val() === options.inputText) {
|
||||
$(this).val('');
|
||||
}
|
||||
});
|
||||
$('input[rel="' + options.randomElement + '"]').blur(function () {
|
||||
if ($(this).val() === "") {
|
||||
$(this).val(options.inputText);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
function get_cache(el)
|
||||
{
|
||||
return $(el).map(function(){
|
||||
return strip_html(this.innerHTML);
|
||||
});
|
||||
}
|
||||
|
||||
function init()
|
||||
{
|
||||
place_form();
|
||||
if (options.fixWidths) fix_widths(el);
|
||||
if (options.focusOnLoad) focus_on_load();
|
||||
if (options.inputText != "" && options.inputText != null) toggle_text();
|
||||
|
||||
cache = get_cache(el);
|
||||
|
||||
stripe(el);
|
||||
loader('hide');
|
||||
}
|
||||
|
||||
function qs()
|
||||
{
|
||||
clearTimeout(timeout);
|
||||
timeout = setTimeout(function () {
|
||||
|
||||
loader('show');
|
||||
|
||||
setTimeout(function () {
|
||||
options.onBefore();
|
||||
|
||||
var k = get_key();
|
||||
var k_type = (typeof k);
|
||||
var i = 0;
|
||||
|
||||
k = options.filter(k);
|
||||
|
||||
if (k != "")
|
||||
{
|
||||
if (typeof score[k] === "undefined")
|
||||
{
|
||||
score[k] = new Array();
|
||||
cache.each(function (i) {
|
||||
if (test_key(k, cache[i], k_type))
|
||||
{
|
||||
score[k][i] = true;
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
if (score[k].length === 0)
|
||||
{
|
||||
select_element(el).hide();
|
||||
}
|
||||
else
|
||||
{
|
||||
$(el).each(function (i) {
|
||||
if (score[k][i])
|
||||
{
|
||||
select_element(this).show();
|
||||
}
|
||||
else
|
||||
{
|
||||
select_element(this).hide();
|
||||
}
|
||||
});
|
||||
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
select_element(el).show();
|
||||
}
|
||||
|
||||
stripe(el);
|
||||
}, options.delay/2);
|
||||
|
||||
setTimeout( function () {
|
||||
loader('hide');
|
||||
}, options.delay/2);
|
||||
|
||||
options.onAfter();
|
||||
|
||||
}, options.delay/2);
|
||||
}
|
||||
|
||||
var options = $.extend({
|
||||
position: 'prepend',
|
||||
attached: 'body',
|
||||
formId: 'quicksearch',
|
||||
labelText: 'Quick Search',
|
||||
labelClass: 'qs_label',
|
||||
inputText: null,
|
||||
inputClass: 'qs_input',
|
||||
loaderId: 'loader',
|
||||
loaderClass: 'loader',
|
||||
loaderImg: null,
|
||||
loaderText: 'Loading...',
|
||||
stripeRowClass: null,
|
||||
hideElement: null,
|
||||
delay: 500,
|
||||
focusOnLoad: false,
|
||||
onBefore: function () { },
|
||||
onAfter: function () { },
|
||||
filter: function (i) {
|
||||
return i;
|
||||
},
|
||||
randomElement: 'qs' + Math.floor(Math.random() * 1000000),
|
||||
isFieldset: false,
|
||||
fixWidths: false
|
||||
}, opt);
|
||||
|
||||
var timeout;
|
||||
var score = {};
|
||||
var stripeRowLength = (!is_empty(options.stripeRowClass)) ? options.stripeRowClass.length : 0;
|
||||
var doStripe = (stripeRowLength > 0) ? true : false;
|
||||
var el = this;
|
||||
var cache;
|
||||
var selector = $(this).selector;
|
||||
|
||||
$.fn.extend({
|
||||
reset_cache: function () {
|
||||
el = $(selector);
|
||||
cache = get_cache(el);
|
||||
}
|
||||
});
|
||||
|
||||
init();
|
||||
|
||||
$('input[rel="' + options.randomElement + '"]').keydown(function (e) {
|
||||
var keycode = e.keyCode;
|
||||
if (!(keycode === 9 || keycode === 13 || keycode === 16 || keycode === 17 || keycode === 18 || keycode === 38 || keycode === 40 || keycode === 224))
|
||||
{
|
||||
qs();
|
||||
}
|
||||
});
|
||||
|
||||
$('form.quicksearch, fieldset.quicksearch').submit( function () { return false; });
|
||||
|
||||
return this;
|
||||
};
|
||||
});
|
||||
@@ -1,42 +0,0 @@
|
||||
|
||||
mnuItmLst=document.getElementsByClassName("menuBarItem")
|
||||
function highlight(div){
|
||||
for (var i = 0, divIter; divIter = mnuItmLst[i]; i++) {
|
||||
if (divIter.style.backgroundColor!="rgb(102, 102, 102)"){
|
||||
divIter.style.backgroundColor="#EBEBEB";
|
||||
}
|
||||
}
|
||||
if (div.style.backgroundColor!="rgb(102, 102, 102)"){
|
||||
div.style.backgroundColor="#B0B0B0";
|
||||
}
|
||||
}
|
||||
|
||||
function unhighlight(div){
|
||||
if (div.style.backgroundColor=="#EBEBEB"){
|
||||
div.style.backgroundColor="#EBEBEB";
|
||||
}
|
||||
}
|
||||
|
||||
function choose(div){
|
||||
for (var i = 0, divIter; divIter = mnuItmLst[i]; i++) {
|
||||
document.getElementById(divIter.id+"Content").style.display="none";
|
||||
}
|
||||
document.getElementById(div.id+"Content").style.display="block";
|
||||
for (var i = 0, divIter; divIter = mnuItmLst[i]; i++) {
|
||||
document.getElementById(divIter.id).style.backgroundColor="#EBEBEB";
|
||||
}
|
||||
div.style.backgroundColor="#666666";
|
||||
}
|
||||
|
||||
function redirectSurvey(){
|
||||
window.location = "{{ settings.URL_ROOT }}/survey/" + document.getElementById("expeditionChooser").value + "%23" + document.getElementById("surveyChooser").value;
|
||||
document.getElementById("progressTableContent").style.display='hidden'
|
||||
}
|
||||
|
||||
function redirectYear(){
|
||||
window.location = "{{ settings.URL_ROOT }}/survey/" + document.getElementById("expeditionChooser").value + "%23"
|
||||
}
|
||||
|
||||
|
||||
|
||||
|
||||
|
Before Width: | Height: | Size: 215 KiB |
|
Before Width: | Height: | Size: 187 B |
|
Before Width: | Height: | Size: 39 KiB |
|
Before Width: | Height: | Size: 41 KiB |
@@ -1,49 +0,0 @@
|
||||
from django.conf import settings
|
||||
from django import http
|
||||
from django.core.urlresolvers import resolve
|
||||
|
||||
class SmartAppendSlashMiddleware(object):
|
||||
"""
|
||||
"SmartAppendSlash" middleware for taking care of URL rewriting.
|
||||
|
||||
This middleware appends a missing slash, if:
|
||||
* the SMART_APPEND_SLASH setting is True
|
||||
* the URL without the slash does not exist
|
||||
* the URL with an appended slash does exist.
|
||||
Otherwise it won't touch the URL.
|
||||
"""
|
||||
|
||||
def process_request(self, request):
|
||||
"""
|
||||
Rewrite the URL based on settings.SMART_APPEND_SLASH
|
||||
"""
|
||||
|
||||
# Check for a redirect based on settings.SMART_APPEND_SLASH
|
||||
host = http.get_host(request)
|
||||
old_url = [host, request.path]
|
||||
new_url = old_url[:]
|
||||
# Append a slash if SMART_APPEND_SLASH is set and the resulting URL
|
||||
# resolves.
|
||||
if settings.SMART_APPEND_SLASH and (not old_url[1].endswith('/')) and not _resolves(old_url[1]) and _resolves(old_url[1] + '/'):
|
||||
new_url[1] = new_url[1] + '/'
|
||||
if settings.DEBUG and request.method == 'POST':
|
||||
raise RuntimeError, "You called this URL via POST, but the URL doesn't end in a slash and you have SMART_APPEND_SLASH set. Django can't redirect to the slash URL while maintaining POST data. Change your form to point to %s%s (note the trailing slash), or set SMART_APPEND_SLASH=False in your Django settings." % (new_url[0], new_url[1])
|
||||
if new_url != old_url:
|
||||
# Redirect
|
||||
if new_url[0]:
|
||||
newurl = "%s://%s%s" % (request.is_secure() and 'https' or 'http', new_url[0], new_url[1])
|
||||
else:
|
||||
newurl = new_url[1]
|
||||
if request.GET:
|
||||
newurl += '?' + request.GET.urlencode()
|
||||
return http.HttpResponsePermanentRedirect(newurl)
|
||||
|
||||
return None
|
||||
|
||||
def _resolves(url):
|
||||
try:
|
||||
resolve(url)
|
||||
return True
|
||||
except http.Http404:
|
||||
return False
|
||||
|
||||
@@ -1,120 +0,0 @@
|
||||
# -*- coding: UTF-8 -*-
|
||||
|
||||
import csv
|
||||
import settings
|
||||
from expo.models import QM, LogbookEntry, Cave
|
||||
from datetime import *
|
||||
from troggle.save_carefully import save_carefully
|
||||
import re
|
||||
|
||||
def deleteQMs():
|
||||
QM.objects.all().delete()
|
||||
|
||||
def parseCaveQMs(cave,inputFile):
|
||||
"""Runs through the CSV file at inputFile (which is a relative path from expoweb) and saves each QM as a QM instance."""
|
||||
|
||||
if cave=='stein':
|
||||
try:
|
||||
steinBr=Cave.objects.get(official_name="Steinbrückenhöhle")
|
||||
except Cave.DoesNotExist:
|
||||
print "Steinbruckenhoehle is not in the database. Please run parsers.cavetab first."
|
||||
return
|
||||
elif cave=='hauch':
|
||||
try:
|
||||
hauchHl=Cave.objects.get(official_name="Hauchhöhle")
|
||||
except Cave.DoesNotExist:
|
||||
print "Steinbruckenhoehle is not in the database. Please run parsers.cavetab first."
|
||||
return
|
||||
elif cave =='kh':
|
||||
try:
|
||||
kh=Cave.objects.get(official_name="Kaninchenhöhle")
|
||||
except Cave.DoesNotExist:
|
||||
print "Steinbruckenhoehle is not in the database. Please run parsers.cavetab first."
|
||||
for file in inputFile:
|
||||
parse_KH_QMs(kh, inputFile=file)
|
||||
return
|
||||
|
||||
qmPath = settings.EXPOWEB+inputFile
|
||||
qmCSVContents = open(qmPath,'r')
|
||||
dialect=csv.Sniffer().sniff(qmCSVContents.read())
|
||||
qmCSVContents.seek(0,0)
|
||||
qmReader = csv.reader(qmCSVContents,dialect=dialect)
|
||||
qmReader.next() # Skip header row
|
||||
for line in qmReader:
|
||||
try:
|
||||
year=int(line[0][1:5])
|
||||
#check if placeholder exists for given year, create it if not
|
||||
if cave=='stein':
|
||||
placeholder, hadToCreate = LogbookEntry.objects.get_or_create(date__year=year, title="placeholder for QMs in 204", text="QMs temporarily attached to this should be re-attached to their actual trips", defaults={"date": date(year, 1, 1),"cave":steinBr})
|
||||
elif cave=='hauch':
|
||||
placeholder, hadToCreate = LogbookEntry.objects.get_or_create(date__year=year, title="placeholder for QMs in 234", text="QMs temporarily attached to this should be re-attached to their actual trips", defaults={"date": date(year, 1, 1),"cave":hauchHl})
|
||||
if hadToCreate:
|
||||
print cave+" placeholder logbook entry for " + str(year) + " added to database"
|
||||
QMnum=re.match(r".*?-\d*?-X?(?P<numb>\d*)",line[0]).group("numb")
|
||||
newQM = QM()
|
||||
newQM.found_by=placeholder
|
||||
newQM.number=QMnum
|
||||
if line[1]=="Dig":
|
||||
newQM.grade="D"
|
||||
else:
|
||||
newQM.grade=line[1]
|
||||
newQM.area=line[2]
|
||||
newQM.location_description=line[3]
|
||||
|
||||
newQM.completion_description=line[4]
|
||||
newQM.nearest_station_description=line[5]
|
||||
if newQM.completion_description: # Troggle checks if QMs are completed by checking if they have a ticked_off_by trip. In the table, completion is indicated by the presence of a completion discription.
|
||||
newQM.ticked_off_by=placeholder
|
||||
|
||||
newQM.comment=line[6]
|
||||
try:
|
||||
preexistingQM=QM.objects.get(number=QMnum, found_by__date__year=year) #if we don't have this one in the DB, save it
|
||||
if preexistingQM.new_since_parsing==False: #if the pre-existing QM has not been modified, overwrite it
|
||||
preexistingQM.delete()
|
||||
newQM.save()
|
||||
print "overwriting " + str(preexistingQM) +"\r",
|
||||
|
||||
else: # otherwise, print that it was ignored
|
||||
print "preserving "+ str(preexistingQM) + ", which was edited in admin \r",
|
||||
|
||||
except QM.DoesNotExist: #if there is no pre-existing QM, save the new one
|
||||
newQM.save()
|
||||
print "QM "+str(newQM) + ' added to database\r',
|
||||
|
||||
except KeyError: #check on this one
|
||||
continue
|
||||
# except IndexError:
|
||||
# print "Index error in " + str(line)
|
||||
# continue
|
||||
|
||||
def parse_KH_QMs(kh, inputFile):
|
||||
"""import QMs from the 1623-161 (Kaninchenh<6E>hle) html pages
|
||||
"""
|
||||
khQMs=open(settings.EXPOWEB+inputFile,'r')
|
||||
khQMs=khQMs.readlines()
|
||||
for line in khQMs:
|
||||
res=re.search('name=\"[CB](?P<year>\d*)-(?P<cave>\d*)-(?P<number>\d*).*</a> (?P<grade>[ABDCV])<dd>(?P<description>.*)\[(?P<nearest_station>.*)\]',line)
|
||||
if res:
|
||||
res=res.groupdict()
|
||||
year=int(res['year'])
|
||||
#check if placeholder exists for given year, create it if not
|
||||
placeholder, hadToCreate = LogbookEntry.objects.get_or_create(date__year=year, title="placeholder for QMs in 161", text="QMs temporarily attached to this should be re-attached to their actual trips", defaults={"date": date((year), 1, 1),"cave":kh})
|
||||
lookupArgs={
|
||||
'found_by':placeholder,
|
||||
'number':res['number']
|
||||
}
|
||||
nonLookupArgs={
|
||||
'grade':res['grade'],
|
||||
'nearest_station':res['nearest_station'],
|
||||
'location_description':res['description']
|
||||
}
|
||||
|
||||
if
|
||||
|
||||
save_carefully(QM,lookupArgs,nonLookupArgs)
|
||||
|
||||
|
||||
parseCaveQMs(cave='kh', inputFile=r"smkridge/161/qmtodo.htm")
|
||||
parseCaveQMs(cave='stein',inputFile=r"smkridge/204/qm.csv")
|
||||
parseCaveQMs(cave='hauch',inputFile=r"smkridge/234/qm.csv")
|
||||
|
||||
@@ -1,314 +0,0 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
import sys
|
||||
sys.path.append("/home/mjg/expoweb/troggle")
|
||||
import troggle.expo.models as models
|
||||
from django.conf import settings
|
||||
import csv
|
||||
import time
|
||||
|
||||
import re
|
||||
import os
|
||||
|
||||
from troggle.save_carefully import save_carefully
|
||||
|
||||
##format of CAVETAB2.CSV is
|
||||
KatasterNumber = 0
|
||||
KatStatusCode = 1
|
||||
Entrances = 2
|
||||
UnofficialNumber = 3
|
||||
MultipleEntrances = 4
|
||||
AutogenFile = 5
|
||||
LinkFile = 6
|
||||
LinkEntrance = 7
|
||||
Name = 8
|
||||
UnofficialName = 9
|
||||
Comment = 10
|
||||
Area = 11
|
||||
Explorers = 12
|
||||
UndergroundDescription = 13
|
||||
Equipment = 14
|
||||
QMList = 15
|
||||
KatasterStatus = 16
|
||||
References = 17
|
||||
UndergroundCentreLine = 18
|
||||
UndergroundDrawnSurvey = 19
|
||||
SurvexFile = 20
|
||||
Length = 21
|
||||
Depth = 22
|
||||
Extent = 23
|
||||
Notes = 24
|
||||
EntranceName = 25
|
||||
TagPoint = 26
|
||||
OtherPoint = 27
|
||||
DescriptionOfOtherPoint = 28
|
||||
ExactEntrance = 29
|
||||
TypeOfFix = 30
|
||||
GPSpreSA = 31
|
||||
GPSpostSA = 32
|
||||
Northing = 33
|
||||
Easting = 34
|
||||
Altitude = 35
|
||||
Bearings = 36
|
||||
Map = 37
|
||||
Location = 38
|
||||
Approach = 39
|
||||
EntranceDescription = 40
|
||||
PhotoOfLocation = 41
|
||||
Marking = 42
|
||||
MarkingComment = 43
|
||||
Findability = 44
|
||||
FindabilityComment = 45
|
||||
|
||||
|
||||
def html_to_wiki(text):
|
||||
if type(text) != str:
|
||||
return text
|
||||
text = unicode(text, "utf-8")
|
||||
#Characters
|
||||
#text = re.sub("ü", u"\xfc", text)
|
||||
#text = re.sub("ö", u"\xf6", text)
|
||||
#text = re.sub("ä", u"\xe4", text)
|
||||
#text = re.sub("°", u"\xb0", text)
|
||||
#text = re.sub("©", u"\xa9", text)
|
||||
#text = re.sub("&", u"\x26", text)
|
||||
#text = re.sub("ß", u"\xdf", text)
|
||||
#text = re.sub("ß", u"\xdf", text)
|
||||
#text = re.sub("<", u"<", text)
|
||||
#text = re.sub(">", u">", text)
|
||||
#text = re.sub("è", u"\xe8", text)
|
||||
#text = re.sub("é", u"\xe9", text)
|
||||
#text = re.sub(""e;", u'"', text)
|
||||
#text = re.sub(""", u'"', text)
|
||||
#text = re.sub("Ö", u'\xd6', text)
|
||||
#text = re.sub("×", u'"', text)
|
||||
|
||||
#text = re.sub("&(.*);", "/1", text)
|
||||
#if s:
|
||||
# print s.groups()
|
||||
#Lists
|
||||
text = re.sub("</p>", r"", text)
|
||||
text = re.sub("<p>$", r"", text)
|
||||
text = re.sub("<p>", r"\n\n", text)
|
||||
out = ""
|
||||
lists = ""
|
||||
while text:
|
||||
mstar = re.match("^(.*?)<ul>\s*<li[^>]*>(.*?)</li>(.*)$", text, re.DOTALL)
|
||||
munstar = re.match("^(\s*)</ul>(.*)$", text, re.DOTALL)
|
||||
mhash = re.match("^(.*?)<ol>\s*<li[^>]*>(.*?)</li>(.*)$", text, re.DOTALL)
|
||||
munhash = re.match("^(\s*)</ol>(.*)$", text, re.DOTALL)
|
||||
mitem = re.match("^(\s*)<li[^>]*>(.*?)</li>(.*)$", text, re.DOTALL)
|
||||
ms = [len(m.groups()[0]) for m in [mstar, munstar, mhash, munhash, mitem] if m]
|
||||
def min_(i, l):
|
||||
try:
|
||||
v = i.groups()[0]
|
||||
l.remove(len(v))
|
||||
return len(v) < min(l, 1000000000)
|
||||
except:
|
||||
return False
|
||||
if min_(mstar, ms):
|
||||
lists += "*"
|
||||
pre, val, post = mstar.groups()
|
||||
out += pre + "\n" + lists + " " + val
|
||||
text = post
|
||||
elif min_(mhash, ms):
|
||||
lists += "#"
|
||||
pre, val, post = mhash.groups()
|
||||
out += pre + "\n" + lists + " " + val
|
||||
text = post
|
||||
elif min_(mitem, ms):
|
||||
pre, val, post = mitem.groups()
|
||||
out += "\n" + lists + " " + val
|
||||
text = post
|
||||
elif min_(munstar, ms):
|
||||
lists = lists[:-1]
|
||||
text = munstar.groups()[1]
|
||||
elif min_(munhash, ms):
|
||||
lists.pop()
|
||||
text = munhash.groups()[1]
|
||||
else:
|
||||
out += text
|
||||
text = ""
|
||||
text2 = out
|
||||
while text2:
|
||||
mtag = re.match("^(.*?)<(.*?)>(.*)$", text, re.DOTALL)
|
||||
if mtag:
|
||||
text2 = mtag.groups()[2]
|
||||
print mtag.groups()[1]
|
||||
else:
|
||||
text2 = ""
|
||||
return out
|
||||
|
||||
def LoadCaveTab(logfile=None):
|
||||
cavetab = open(os.path.join(settings.EXPOWEB, "noinfo", "CAVETAB2.CSV"))
|
||||
caveReader = csv.reader(cavetab)
|
||||
caveReader.next() # Strip out column headers
|
||||
|
||||
if logfile:
|
||||
logfile.write("Beginning to import caves from "+str(cavetab)+"\n"+"-"*60+"\n")
|
||||
|
||||
for katArea in ['1623', '1626']:
|
||||
if not models.Area.objects.filter(short_name = katArea):
|
||||
newArea = models.Area(short_name = katArea)
|
||||
newArea.save()
|
||||
if logfile:
|
||||
logfile.write("Added area "+str(newArea.short_name)+"\n")
|
||||
area1626 = models.Area.objects.filter(short_name = '1626')[0]
|
||||
area1623 = models.Area.objects.filter(short_name = '1623')[0]
|
||||
|
||||
counter=0
|
||||
for line in caveReader :
|
||||
if line[Area] == 'nonexistent':
|
||||
continue
|
||||
entranceLetters=[] #Used in caves that have mulitlple entrances, which are not described on seperate lines
|
||||
if line[MultipleEntrances] == 'yes' or line[MultipleEntrances]=='': #When true, this line contains an actual cave, otherwise it is an extra entrance.
|
||||
args = {}
|
||||
defaultArgs = {}
|
||||
|
||||
def addToArgs(CSVname, modelName):
|
||||
if line[CSVname]:
|
||||
args[modelName] = html_to_wiki(line[CSVname])
|
||||
|
||||
def addToDefaultArgs(CSVname, modelName): #This has to do with the non-destructive import. These arguments will be passed as the "default" dictionary in a get_or_create
|
||||
if line[CSVname]:
|
||||
defaultArgs[modelName] = html_to_wiki(line[CSVname])
|
||||
|
||||
# The attributes added using "addToArgs" will be used to look up an existing cave. Those added using "addToDefaultArgs" will not.
|
||||
addToArgs(KatasterNumber, "kataster_number")
|
||||
addToDefaultArgs(KatStatusCode, "kataster_code")
|
||||
addToArgs(UnofficialNumber, "unofficial_number")
|
||||
addToArgs(Name, "official_name")
|
||||
addToDefaultArgs(Comment, "notes")
|
||||
addToDefaultArgs(Explorers, "explorers")
|
||||
addToDefaultArgs(UndergroundDescription, "underground_description")
|
||||
addToDefaultArgs(Equipment, "equipment")
|
||||
addToDefaultArgs(KatasterStatus, "kataster_status")
|
||||
addToDefaultArgs(References, "references")
|
||||
addToDefaultArgs(UndergroundCentreLine, "underground_centre_line")
|
||||
addToDefaultArgs(UndergroundDrawnSurvey, "survey")
|
||||
addToDefaultArgs(Length, "length")
|
||||
addToDefaultArgs(Depth, "depth")
|
||||
addToDefaultArgs(Extent, "extent")
|
||||
addToDefaultArgs(SurvexFile, "survex_file")
|
||||
addToDefaultArgs(Notes, "notes")
|
||||
|
||||
newCave, created=save_carefully(models.Cave, lookupAttribs=args, nonLookupAttribs=defaultArgs)
|
||||
if logfile:
|
||||
logfile.write("Added cave "+str(newCave)+"\n")
|
||||
|
||||
#If we created a new cave, add the area to it. This does mean that if a cave's identifying features have not changed, areas will not be updated from csv.
|
||||
if created and line[Area]:
|
||||
if line[Area] == "1626":
|
||||
newCave.area.add(area1626)
|
||||
else:
|
||||
area = models.Area.objects.filter(short_name = line[Area])
|
||||
if area:
|
||||
newArea = area[0]
|
||||
else:
|
||||
newArea = models.Area(short_name = line[Area], parent = area1623)
|
||||
newArea.save()
|
||||
newCave.area.add(newArea)
|
||||
elif created:
|
||||
newCave.area.add(area1623)
|
||||
|
||||
newCave.save()
|
||||
if logfile:
|
||||
logfile.write("Added area "+line[Area]+" to cave "+str(newCave)+"\n")
|
||||
|
||||
if created and line[UnofficialName]:
|
||||
newUnofficialName = models.OtherCaveName(cave = newCave, name = line[UnofficialName])
|
||||
newUnofficialName.save()
|
||||
if logfile:
|
||||
logfile.write("Added unofficial name "+str(newUnofficialName)+" to cave "+str(newCave)+"\n")
|
||||
|
||||
if created and line[MultipleEntrances] == '' or \
|
||||
line[MultipleEntrances] == 'entrance' or \
|
||||
line[MultipleEntrances] == 'last entrance':
|
||||
args = {}
|
||||
def addToArgs(CSVname, modelName):
|
||||
if line[CSVname]:
|
||||
args[modelName] = html_to_wiki(line[CSVname])
|
||||
def addToArgsViaDict(CSVname, modelName, dictionary):
|
||||
if line[CSVname]:
|
||||
args[modelName] = dictionary[html_to_wiki(line[CSVname])]
|
||||
addToArgs(EntranceName, 'name')
|
||||
addToArgs(Explorers, 'explorers')
|
||||
addToArgs(Map, 'map_description')
|
||||
addToArgs(Location, 'location_description')
|
||||
addToArgs(Approach, 'approach')
|
||||
addToArgs(EntranceDescription, 'entrance_description')
|
||||
addToArgs(UndergroundDescription, 'underground_description')
|
||||
addToArgs(PhotoOfLocation, 'photo')
|
||||
addToArgsViaDict(Marking, 'marking', {"Paint": "P",
|
||||
"Paint (?)": "P?",
|
||||
"Tag": "T",
|
||||
"Tag (?)": "T?",
|
||||
"Retagged": "R",
|
||||
"Retag": "R",
|
||||
"Spit": "S",
|
||||
"Spit (?)": "S?",
|
||||
"Unmarked": "U",
|
||||
"": "?",
|
||||
})
|
||||
addToArgs(MarkingComment, 'marking_comment')
|
||||
addToArgsViaDict(Findability, 'findability', {"Surveyed": "S",
|
||||
"Lost": "L",
|
||||
"Refindable": "R",
|
||||
"": "?",
|
||||
"?": "?",
|
||||
})
|
||||
addToArgs(FindabilityComment, 'findability_description')
|
||||
addToArgs(Easting, 'easting')
|
||||
addToArgs(Northing, 'northing')
|
||||
addToArgs(Altitude, 'alt')
|
||||
addToArgs(DescriptionOfOtherPoint, 'other_description')
|
||||
def addToArgsSurveyStation(CSVname, modelName):
|
||||
if line[CSVname]:
|
||||
surveyPoint = models.SurveyStation(name = line[CSVname])
|
||||
surveyPoint.save()
|
||||
args[modelName] = html_to_wiki(surveyPoint)
|
||||
addToArgsSurveyStation(TagPoint, 'tag_station')
|
||||
addToArgsSurveyStation(ExactEntrance, 'exact_station')
|
||||
addToArgsSurveyStation(OtherPoint, 'other_station')
|
||||
addToArgs(OtherPoint, 'other_description')
|
||||
if line[GPSpreSA]:
|
||||
addToArgsSurveyStation(GPSpreSA, 'other_station')
|
||||
args['other_description'] = 'pre selective availability GPS'
|
||||
if line[GPSpostSA]:
|
||||
addToArgsSurveyStation(GPSpostSA, 'other_station')
|
||||
args['other_description'] = 'post selective availability GPS'
|
||||
addToArgs(Bearings, 'bearings')
|
||||
newEntrance = models.Entrance(**args)
|
||||
newEntrance.save()
|
||||
if logfile:
|
||||
logfile.write("Added entrance "+str(newEntrance)+"\n")
|
||||
|
||||
if line[Entrances]:
|
||||
entrance_letter = line[Entrances]
|
||||
else:
|
||||
entrance_letter = ''
|
||||
|
||||
newCaveAndEntrance = models.CaveAndEntrance(cave = newCave, entrance = newEntrance, entrance_letter = entrance_letter)
|
||||
newCaveAndEntrance.save()
|
||||
if logfile:
|
||||
logfile.write("Added CaveAndEntrance "+str(newCaveAndEntrance)+"\n")
|
||||
|
||||
|
||||
# lookup function modelled on GetPersonExpeditionNameLookup
|
||||
Gcavelookup = None
|
||||
def GetCaveLookup():
|
||||
global Gcavelookup
|
||||
if Gcavelookup:
|
||||
return Gcavelookup
|
||||
Gcavelookup = {"NONEPLACEHOLDER":None}
|
||||
for cave in models.Cave.objects.all():
|
||||
Gcavelookup[cave.official_name.lower()] = cave
|
||||
if cave.kataster_number:
|
||||
Gcavelookup[cave.kataster_number] = cave
|
||||
if cave.unofficial_number:
|
||||
Gcavelookup[cave.unofficial_number] = cave
|
||||
|
||||
Gcavelookup["tunnocks"] = Gcavelookup["258"]
|
||||
Gcavelookup["hauchhole"] = Gcavelookup["234"]
|
||||
return Gcavelookup
|
||||
|
||||
|
||||
@@ -1,346 +0,0 @@
|
||||
#.-*- coding: utf-8 -*-
|
||||
|
||||
import troggle.settings as settings
|
||||
import troggle.expo.models as models
|
||||
|
||||
from troggle.parsers.people import GetPersonExpeditionNameLookup
|
||||
from troggle.parsers.cavetab import GetCaveLookup
|
||||
|
||||
from django.template.defaultfilters import slugify
|
||||
|
||||
import csv
|
||||
import re
|
||||
import datetime
|
||||
import os
|
||||
|
||||
from troggle.save_carefully import save_carefully
|
||||
|
||||
#
|
||||
# When we edit logbook entries, allow a "?" after any piece of data to say we've frigged it and
|
||||
# it can be checked up later from the hard-copy if necessary; or it's not possible to determin (name, trip place, etc)
|
||||
#
|
||||
|
||||
#
|
||||
# the logbook loading section
|
||||
#
|
||||
def GetTripPersons(trippeople, expedition, logtime_underground):
|
||||
res = [ ]
|
||||
author = None
|
||||
for tripperson in re.split(",|\+|&|&(?!\w+;)| and ", trippeople):
|
||||
tripperson = tripperson.strip()
|
||||
mul = re.match("<u>(.*?)</u>$(?i)", tripperson)
|
||||
if mul:
|
||||
tripperson = mul.group(1).strip()
|
||||
if tripperson and tripperson[0] != '*':
|
||||
#assert tripperson in personyearmap, "'%s' << %s\n\n %s" % (tripperson, trippeople, personyearmap)
|
||||
personyear = GetPersonExpeditionNameLookup(expedition).get(tripperson.lower())
|
||||
if not personyear:
|
||||
print "NoMatchFor: '%s'" % tripperson
|
||||
res.append((personyear, logtime_underground))
|
||||
if mul:
|
||||
author = personyear
|
||||
if not author:
|
||||
author = res[-1][0]
|
||||
return res, author
|
||||
|
||||
def GetTripCave(place): #need to be fuzzier about matching here. Already a very slow function...
|
||||
# print "Getting cave for " , place
|
||||
try:
|
||||
katastNumRes=[]
|
||||
katastNumRes=list(models.Cave.objects.filter(kataster_number=int(place)))
|
||||
except ValueError:
|
||||
pass
|
||||
officialNameRes=list(models.Cave.objects.filter(official_name=place))
|
||||
tripCaveRes=officialNameRes+katastNumRes
|
||||
|
||||
if len(tripCaveRes)==1:
|
||||
# print "Place " , place , "entered as" , tripCaveRes[0]
|
||||
return tripCaveRes[0]
|
||||
|
||||
elif models.OtherCaveName.objects.filter(name=place):
|
||||
tripCaveRes=models.OtherCaveName.objects.filter(name__icontains=place)[0].cave
|
||||
# print "Place " , place , "entered as" , tripCaveRes
|
||||
return tripCaveRes
|
||||
|
||||
elif len(tripCaveRes)>1:
|
||||
print "Ambiguous place " + str(place) + " entered. Choose from " + str(tripCaveRes)
|
||||
correctIndex=input("type list index of correct cave")
|
||||
return tripCaveRes[correctIndex]
|
||||
else:
|
||||
print "No cave found for place " , place
|
||||
return
|
||||
|
||||
|
||||
noncaveplaces = [ "Journey", "Loser Plateau" ]
|
||||
def EnterLogIntoDbase(date, place, title, text, trippeople, expedition, logtime_underground):
|
||||
""" saves a logbook entry and related persontrips """
|
||||
trippersons, author = GetTripPersons(trippeople, expedition, logtime_underground)
|
||||
# tripCave = GetTripCave(place)
|
||||
#
|
||||
lplace = place.lower()
|
||||
if lplace not in noncaveplaces:
|
||||
cave=GetCaveLookup().get(lplace)
|
||||
|
||||
#Check for an existing copy of the current entry, and save
|
||||
lookupAttribs={'date':date, 'title':title[:50]}
|
||||
nonLookupAttribs={'place':place, 'text':text, 'author':author, 'expedition':expedition, 'cave':cave}
|
||||
lbo, created=save_carefully(models.LogbookEntry, lookupAttribs, nonLookupAttribs)
|
||||
|
||||
for tripperson, time_underground in trippersons:
|
||||
lookupAttribs={'person_expedition':tripperson, 'date':date}
|
||||
nonLookupAttribs={'place':place,'time_underground':time_underground,'logbook_entry':lbo,'is_logbook_entry_author':(tripperson == author)}
|
||||
save_carefully(models.PersonTrip, lookupAttribs, nonLookupAttribs)
|
||||
|
||||
|
||||
def ParseDate(tripdate, year):
|
||||
mdatestandard = re.match("(\d\d\d\d)-(\d\d)-(\d\d)", tripdate)
|
||||
mdategoof = re.match("(\d\d?)/0?(\d)/(20|19)?(\d\d)", tripdate)
|
||||
if mdatestandard:
|
||||
assert mdatestandard.group(1) == year, (tripdate, year)
|
||||
year, month, day = int(mdatestandard.group(1)), int(mdatestandard.group(2)), int(mdatestandard.group(3))
|
||||
elif mdategoof:
|
||||
assert not mdategoof.group(3) or mdategoof.group(3) == year[:2]
|
||||
yadd = int(year[:2]) * 100
|
||||
day, month, year = int(mdategoof.group(1)), int(mdategoof.group(2)), int(mdategoof.group(4)) + yadd
|
||||
else:
|
||||
assert False, tripdate
|
||||
return datetime.date(year, month, day)
|
||||
|
||||
# 2007, 2008, 2006
|
||||
def Parselogwikitxt(year, expedition, txt):
|
||||
trippara = re.findall("===(.*?)===([\s\S]*?)(?====)", txt)
|
||||
for triphead, triptext in trippara:
|
||||
tripheadp = triphead.split("|")
|
||||
assert len(tripheadp) == 3, (tripheadp, triptext)
|
||||
tripdate, tripplace, trippeople = tripheadp
|
||||
tripsplace = tripplace.split(" - ")
|
||||
tripcave = tripsplace[0].strip()
|
||||
|
||||
tul = re.findall("T/?U:?\s*(\d+(?:\.\d*)?|unknown)\s*(hrs|hours)?", triptext)
|
||||
if tul:
|
||||
#assert len(tul) <= 1, (triphead, triptext)
|
||||
#assert tul[0][1] in ["hrs", "hours"], (triphead, triptext)
|
||||
tu = tul[0][0]
|
||||
else:
|
||||
tu = ""
|
||||
#assert tripcave == "Journey", (triphead, triptext)
|
||||
|
||||
ldate = ParseDate(tripdate.strip(), year)
|
||||
#print "\n", tripcave, "--- ppp", trippeople, len(triptext)
|
||||
EnterLogIntoDbase(date = ldate, place = tripcave, title = tripplace, text = triptext, trippeople=trippeople, expedition=expedition, logtime_underground=0)
|
||||
|
||||
# 2002, 2004, 2005
|
||||
def Parseloghtmltxt(year, expedition, txt):
|
||||
tripparas = re.findall("<hr\s*/>([\s\S]*?)(?=<hr)", txt)
|
||||
for trippara in tripparas:
|
||||
s = re.match('''(?x)\s*(?:<a\s+id="(.*?)"\s*/>)?
|
||||
\s*<div\s+class="tripdate"\s*(?:id="(.*?)")?>(.*?)</div>
|
||||
\s*<div\s+class="trippeople">\s*(.*?)</div>
|
||||
\s*<div\s+class="triptitle">\s*(.*?)</div>
|
||||
([\s\S]*?)
|
||||
\s*(?:<div\s+class="timeug">\s*(.*?)</div>)?
|
||||
\s*$
|
||||
''', trippara)
|
||||
assert s, trippara
|
||||
|
||||
tripid, tripid1, tripdate, trippeople, triptitle, triptext, tu = s.groups()
|
||||
ldate = ParseDate(tripdate.strip(), year)
|
||||
#assert tripid[:-1] == "t" + tripdate, (tripid, tripdate)
|
||||
trippeople = re.sub("Ol(?!l)", "Olly", trippeople)
|
||||
trippeople = re.sub("Wook(?!e)", "Wookey", trippeople)
|
||||
triptitles = triptitle.split(" - ")
|
||||
if len(triptitles) >= 2:
|
||||
tripcave = triptitles[0]
|
||||
else:
|
||||
tripcave = "UNKNOWN"
|
||||
#print "\n", tripcave, "--- ppp", trippeople, len(triptext)
|
||||
ltriptext = re.sub("</p>", "", triptext)
|
||||
ltriptext = re.sub("\s*?\n\s*", " ", ltriptext)
|
||||
ltriptext = re.sub("<p>", "\n\n", ltriptext).strip()
|
||||
EnterLogIntoDbase(date = ldate, place = tripcave, title = triptitle, text = ltriptext, trippeople=trippeople, expedition=expedition, logtime_underground=0)
|
||||
|
||||
|
||||
# main parser for pre-2001. simpler because the data has been hacked so much to fit it
|
||||
def Parseloghtml01(year, expedition, txt):
|
||||
tripparas = re.findall("<hr[\s/]*>([\s\S]*?)(?=<hr)", txt)
|
||||
for trippara in tripparas:
|
||||
s = re.match(u"(?s)\s*(?:<p>)?(.*?)</?p>(.*)$(?i)", trippara)
|
||||
assert s, trippara[:100]
|
||||
tripheader, triptext = s.group(1), s.group(2)
|
||||
mtripid = re.search('<a id="(.*?)"', tripheader)
|
||||
tripid = mtripid and mtripid.group(1) or ""
|
||||
tripheader = re.sub("</?(?:[ab]|span)[^>]*>", "", tripheader)
|
||||
|
||||
#print [tripheader]
|
||||
#continue
|
||||
|
||||
tripdate, triptitle, trippeople = tripheader.split("|")
|
||||
ldate = ParseDate(tripdate.strip(), year)
|
||||
|
||||
mtu = re.search('<p[^>]*>(T/?U.*)', triptext)
|
||||
if mtu:
|
||||
tu = mtu.group(1)
|
||||
triptext = triptext[:mtu.start(0)] + triptext[mtu.end():]
|
||||
else:
|
||||
tu = ""
|
||||
|
||||
triptitles = triptitle.split(" - ")
|
||||
tripcave = triptitles[0].strip()
|
||||
|
||||
ltriptext = triptext
|
||||
|
||||
mtail = re.search('(?:<a href="[^"]*">[^<]*</a>|\s|/|-|&|</?p>|\((?:same day|\d+)\))*$', ltriptext)
|
||||
if mtail:
|
||||
#print mtail.group(0)
|
||||
ltriptext = ltriptext[:mtail.start(0)]
|
||||
ltriptext = re.sub("</p>", "", ltriptext)
|
||||
ltriptext = re.sub("\s*?\n\s*", " ", ltriptext)
|
||||
ltriptext = re.sub("<p>|<br>", "\n\n", ltriptext).strip()
|
||||
#ltriptext = re.sub("[^\s0-9a-zA-Z\-.,:;'!]", "NONASCII", ltriptext)
|
||||
ltriptext = re.sub("</?u>", "_", ltriptext)
|
||||
ltriptext = re.sub("</?i>", "''", ltriptext)
|
||||
ltriptext = re.sub("</?b>", "'''", ltriptext)
|
||||
|
||||
|
||||
#print ldate, trippeople.strip()
|
||||
# could includ the tripid (url link for cross referencing)
|
||||
EnterLogIntoDbase(date = ldate, place = tripcave, title = triptitle, text = ltriptext, trippeople=trippeople, expedition=expedition, logtime_underground=0)
|
||||
|
||||
|
||||
def Parseloghtml03(year, expedition, txt):
|
||||
tripparas = re.findall("<hr\s*/>([\s\S]*?)(?=<hr)", txt)
|
||||
for trippara in tripparas:
|
||||
s = re.match(u"(?s)\s*<p>(.*?)</p>(.*)$", trippara)
|
||||
assert s, trippara
|
||||
tripheader, triptext = s.group(1), s.group(2)
|
||||
tripheader = re.sub(" ", " ", tripheader)
|
||||
tripheader = re.sub("\s+", " ", tripheader).strip()
|
||||
sheader = tripheader.split(" -- ")
|
||||
tu = ""
|
||||
if re.match("T/U|Time underwater", sheader[-1]):
|
||||
tu = sheader.pop()
|
||||
if len(sheader) != 3:
|
||||
print sheader
|
||||
# continue
|
||||
tripdate, triptitle, trippeople = sheader
|
||||
ldate = ParseDate(tripdate.strip(), year)
|
||||
triptitles = triptitle.split(" , ")
|
||||
if len(triptitles) >= 2:
|
||||
tripcave = triptitles[0]
|
||||
else:
|
||||
tripcave = "UNKNOWN"
|
||||
#print tripcave, "--- ppp", triptitle, trippeople, len(triptext)
|
||||
ltriptext = re.sub("</p>", "", triptext)
|
||||
ltriptext = re.sub("\s*?\n\s*", " ", ltriptext)
|
||||
ltriptext = re.sub("<p>", "\n\n", ltriptext).strip()
|
||||
ltriptext = re.sub("[^\s0-9a-zA-Z\-.,:;'!&()\[\]<>?=+*%]", "_NONASCII_", ltriptext)
|
||||
EnterLogIntoDbase(date = ldate, place = tripcave, title = triptitle, text = ltriptext, trippeople=trippeople, expedition=expedition, logtime_underground=0)
|
||||
|
||||
yearlinks = [
|
||||
("2008", "2008/2008logbook.txt", Parselogwikitxt),
|
||||
#("2007", "2007/2007logbook.txt", Parselogwikitxt),
|
||||
("2006", "2006/logbook/logbook_06.txt", Parselogwikitxt),
|
||||
("2005", "2005/logbook.html", Parseloghtmltxt),
|
||||
("2004", "2004/logbook.html", Parseloghtmltxt),
|
||||
("2003", "2003/logbook.html", Parseloghtml03),
|
||||
("2002", "2002/logbook.html", Parseloghtmltxt),
|
||||
("2001", "2001/log.htm", Parseloghtml01),
|
||||
("2000", "2000/log.htm", Parseloghtml01),
|
||||
("1999", "1999/log.htm", Parseloghtml01),
|
||||
("1998", "1998/log.htm", Parseloghtml01),
|
||||
("1997", "1997/log.htm", Parseloghtml01),
|
||||
("1996", "1996/log.htm", Parseloghtml01),
|
||||
("1995", "1995/log.htm", Parseloghtml01),
|
||||
("1994", "1994/log.htm", Parseloghtml01),
|
||||
("1993", "1993/log.htm", Parseloghtml01),
|
||||
]
|
||||
|
||||
def SetDatesFromLogbookEntries(expedition):
|
||||
for personexpedition in expedition.personexpedition_set.all():
|
||||
persontrips = personexpedition.persontrip_set.order_by('date')
|
||||
personexpedition.date_from = min([persontrip.date for persontrip in persontrips] or [None])
|
||||
personexpedition.date_to = max([persontrip.date for persontrip in persontrips] or [None])
|
||||
personexpedition.save()
|
||||
|
||||
# The below is all unnecessary, just use the built in get_previous_by_date and get_next_by_date
|
||||
# lprevpersontrip = None
|
||||
# for persontrip in persontrips:
|
||||
# persontrip.persontrip_prev = lprevpersontrip
|
||||
# if lprevpersontrip:
|
||||
# lprevpersontrip.persontrip_next = persontrip
|
||||
# lprevpersontrip.save()
|
||||
# persontrip.persontrip_next = None
|
||||
# lprevpersontrip = persontrip
|
||||
# persontrip.save()
|
||||
|
||||
# from trips rather than logbook entries, which may include events outside the expedition
|
||||
expedition.date_from = min([personexpedition.date_from for personexpedition in expedition.personexpedition_set.all() if personexpedition.date_from] or [None])
|
||||
expedition.date_to = max([personexpedition.date_to for personexpedition in expedition.personexpedition_set.all() if personexpedition.date_to] or [None])
|
||||
expedition.save()
|
||||
|
||||
# The below has been replaced with the methods get_next_by_id and get_previous_by_id
|
||||
# # order by appearance in the logbook (done by id)
|
||||
# lprevlogbookentry = None
|
||||
# for logbookentry in expedition.logbookentry_set.order_by('id'):
|
||||
# logbookentry.logbookentry_prev = lprevlogbookentry
|
||||
# if lprevlogbookentry:
|
||||
# lprevlogbookentry.logbookentry_next = logbookentry
|
||||
# lprevlogbookentry.save()
|
||||
# logbookentry.logbookentry_next = None
|
||||
# logbookentry.save()
|
||||
# lprevlogbookentry = logbookentry
|
||||
|
||||
# This combined date / number key is a weird way of doing things. Use the primary key instead. If we are going to use the date for looking up entries, we should set it up to allow multiple results.
|
||||
# order by date for setting the references
|
||||
# lprevlogbookentry = None
|
||||
# for logbookentry in expedition.logbookentry_set.order_by('date'):
|
||||
# if lprevlogbookentry and lprevlogbookentry.date == logbookentry.date:
|
||||
# mcount = re.search("_(\d+)$", lprevlogbookentry.href)
|
||||
# mc = mcount and (int(mcount.group(1)) + 1) or 1
|
||||
# logbookentry.href = "%s_%d" % (logbookentry.date, mc)
|
||||
# else:
|
||||
# logbookentry.href = "%s" % logbookentry.date
|
||||
# logbookentry.save()
|
||||
# lprevlogbookentry = logbookentry
|
||||
|
||||
|
||||
|
||||
def LoadLogbookForExpedition(expedition):
|
||||
""" Parses all logbook entries for one expedition """
|
||||
|
||||
#We're checking for stuff that's changed in admin before deleting it now.
|
||||
#print "deleting logbooks for", expedition
|
||||
#expedition.logbookentry_set.all().delete()
|
||||
#models.PersonTrip.objects.filter(person_expedition__expedition=expedition).delete()
|
||||
|
||||
expowebbase = os.path.join(settings.EXPOWEB, "years")
|
||||
year = str(expedition.year)
|
||||
for lyear, lloc, parsefunc in yearlinks:
|
||||
if lyear == year:
|
||||
break
|
||||
fin = open(os.path.join(expowebbase, lloc))
|
||||
txt = fin.read()
|
||||
fin.close()
|
||||
parsefunc(year, expedition, txt)
|
||||
SetDatesFromLogbookEntries(expedition)
|
||||
return "TOLOAD: " + year + " " + str(expedition.personexpedition_set.all()[1].logbookentry_set.count()) + " " + str(models.PersonTrip.objects.filter(person_expedition__expedition=expedition).count())
|
||||
|
||||
|
||||
def LoadLogbooks():
|
||||
""" This is the master function for parsing all logbooks into the Troggle database. Requires yearlinks, which is a list of tuples for each expedition with expedition year, logbook path, and parsing function. """
|
||||
|
||||
#Deletion has been moved to a seperate function to enable the non-destructive importing
|
||||
#models.LogbookEntry.objects.all().delete()
|
||||
expowebbase = os.path.join(settings.EXPOWEB, "years")
|
||||
#yearlinks = [ ("2001", "2001/log.htm", Parseloghtml01), ] #overwrite
|
||||
#yearlinks = [ ("1996", "1996/log.htm", Parseloghtml01),] # overwrite
|
||||
|
||||
for year, lloc, parsefunc in yearlinks:
|
||||
expedition = models.Expedition.objects.filter(year = year)[0]
|
||||
fin = open(os.path.join(expowebbase, lloc))
|
||||
txt = fin.read()
|
||||
fin.close()
|
||||
parsefunc(year, expedition, txt)
|
||||
SetDatesFromLogbookEntries(expedition)
|
||||
|
||||
|
||||
@@ -1,166 +0,0 @@
|
||||
#.-*- coding: utf-8 -*-
|
||||
|
||||
import troggle.settings as settings
|
||||
import troggle.expo.models as models
|
||||
import csv
|
||||
import re
|
||||
import datetime
|
||||
import os
|
||||
import shutil
|
||||
from troggle.save_carefully import save_carefully
|
||||
|
||||
# Julian: the below code was causing errors and it seems like a duplication of the above. Hope I haven't broken anything by commenting it. -Aaron
|
||||
#
|
||||
# if name in expoers2008:
|
||||
# print "2008:", name
|
||||
# expomissing.discard(name) # I got an error which I think was caused by this -- python complained that a set changed size during iteration.
|
||||
# yo = models.Expedition.objects.filter(year = "2008")[0]
|
||||
# pyo = models.PersonExpedition(person = pObject, expedition = yo, is_guest=is_guest)
|
||||
# pyo.save()
|
||||
|
||||
|
||||
|
||||
def saveMugShot(mugShotPath, mugShotFilename, person):
|
||||
if mugShotFilename.startswith(r'i/'): #if filename in cell has the directory attached (I think they all do), remove it
|
||||
mugShotFilename=mugShotFilename[2:]
|
||||
else:
|
||||
mugShotFilename=mugShotFilename # just in case one doesn't
|
||||
|
||||
mugShotObj = models.Photo(
|
||||
caption="Mugshot for "+person.first_name+" "+person.last_name,
|
||||
is_mugshot=True,
|
||||
file=mugShotFilename,
|
||||
)
|
||||
|
||||
shutil.copy(mugShotPath, mugShotObj.file.path) #Put a copy of the file in the right place. mugShotObj.file.path is determined by the django filesystemstorage specified in models.py
|
||||
|
||||
mugShotObj.save()
|
||||
mugShotObj.contains_person.add(person)
|
||||
mugShotObj.save()
|
||||
|
||||
def parseMugShotAndBlurb(personline, header, person):
|
||||
#create mugshot Photo instance
|
||||
mugShotFilename=personline[header["Mugshot"]]
|
||||
mugShotPath = os.path.join(settings.EXPOWEB, "folk", mugShotFilename)
|
||||
if mugShotPath[-3:]=='jpg': #if person just has an image, add it
|
||||
saveMugShot(mugShotPath=mugShotPath, mugShotFilename=mugShotFilename, person=person)
|
||||
elif mugShotPath[-3:]=='htm': #if person has an html page, find the image(s) and add it. Also, add the text from the html page to the "blurb" field in his model instance.
|
||||
personPageOld=open(mugShotPath,'r').read()
|
||||
person.blurb=re.search('<body>.*<hr',personPageOld,re.DOTALL).group() #this needs to be refined, take care of the HTML and make sure it doesn't match beyond the blurb
|
||||
for mugShotFilename in re.findall('i/.*?jpg',personPageOld,re.DOTALL):
|
||||
mugShotPath = os.path.join(settings.EXPOWEB, "folk", mugShotFilename)
|
||||
saveMugShot(mugShotPath=mugShotPath, mugShotFilename=mugShotFilename, person=person)
|
||||
person.save()
|
||||
|
||||
def LoadPersonsExpos():
|
||||
|
||||
persontab = open(os.path.join(settings.EXPOWEB, "noinfo", "folk.csv"))
|
||||
personreader = csv.reader(persontab)
|
||||
headers = personreader.next()
|
||||
header = dict(zip(headers, range(len(headers))))
|
||||
|
||||
# make expeditions
|
||||
print "Loading expeditions"
|
||||
models.Expedition.objects.all().delete()
|
||||
years = headers[5:]
|
||||
|
||||
for year in years:
|
||||
expedition = models.Expedition(year = year, name = "CUCC expo %s" % year)
|
||||
expedition.save()
|
||||
|
||||
|
||||
# make persons
|
||||
print "Loading personexpeditions"
|
||||
models.Person.objects.all().delete()
|
||||
models.PersonExpedition.objects.all().delete()
|
||||
#expoers2008 = """Edvin Deadman,Kathryn Hopkins,Djuke Veldhuis,Becka Lawson,Julian Todd,Natalie Uomini,Aaron Curtis,Tony Rooke,Ollie Stevens,Frank Tully,Martin Jahnke,Mark Shinwell,Jess Stirrups,Nial Peters,Serena Povia,Olly Madge,Steve Jones,Pete Harley,Eeva Makiranta,Keith Curtis""".split(",")
|
||||
#expomissing = set(expoers2008)
|
||||
|
||||
for personline in personreader:
|
||||
name = personline[header["Name"]]
|
||||
name = re.sub("<.*?>", "", name)
|
||||
mname = re.match("(\w+)(?:\s((?:van |ten )?\w+))?(?:\s\(([^)]*)\))?", name)
|
||||
nickname = mname.group(3) or ""
|
||||
|
||||
lookupAttribs={'first_name':mname.group(1), 'last_name':(mname.group(2) or "")}
|
||||
nonLookupAttribs={'is_vfho':personline[header["VfHO member"]],}
|
||||
person, created = save_carefully(models.Person, lookupAttribs=lookupAttribs, nonLookupAttribs=nonLookupAttribs)
|
||||
|
||||
parseMugShotAndBlurb(personline=personline, header=header, person=person)
|
||||
|
||||
# make person expedition from table
|
||||
for year, attended in zip(headers, personline)[5:]:
|
||||
expedition = models.Expedition.objects.get(year=year)
|
||||
if attended == "1" or attended == "-1":
|
||||
personexpedition = models.PersonExpedition(person=person, expedition=expedition, nickname=nickname, is_guest=(personline[header["Guest"]] == "1"))
|
||||
personexpedition.save()
|
||||
|
||||
|
||||
# this fills in those people for whom 2008 was their first expo
|
||||
#print "Loading personexpeditions 2008"
|
||||
#for name in expomissing:
|
||||
# firstname, lastname = name.split()
|
||||
# is_guest = name in ["Eeva Makiranta", "Keith Curtis"]
|
||||
# print "2008:", name
|
||||
# persons = list(models.Person.objects.filter(first_name=firstname, last_name=lastname))
|
||||
# if not persons:
|
||||
# person = models.Person(first_name=firstname, last_name = lastname, is_vfho = False, mug_shot = "")
|
||||
# #person.Sethref()
|
||||
# person.save()
|
||||
# else:
|
||||
# person = persons[0]
|
||||
# expedition = models.Expedition.objects.get(year="2008")
|
||||
# personexpedition = models.PersonExpedition(person=person, expedition=expedition, nickname="", is_guest=is_guest)
|
||||
# personexpedition.save()
|
||||
|
||||
#Notability is now a method of person. Makes no sense to store it in the database; it would need to be recalculated every time something changes. - AC 16 Feb 09
|
||||
# could rank according to surveying as well
|
||||
#print "Setting person notability"
|
||||
#for person in models.Person.objects.all():
|
||||
#person.notability = 0.0
|
||||
#for personexpedition in person.personexpedition_set.all():
|
||||
#if not personexpedition.is_guest:
|
||||
#person.notability += 1.0 / (2012 - int(personexpedition.expedition.year))
|
||||
#person.bisnotable = person.notability > 0.3 # I don't know how to filter by this
|
||||
#person.save()
|
||||
|
||||
|
||||
# used in other referencing parser functions
|
||||
# expedition name lookup cached for speed (it's a very big list)
|
||||
Gpersonexpeditionnamelookup = { }
|
||||
def GetPersonExpeditionNameLookup(expedition):
|
||||
global Gpersonexpeditionnamelookup
|
||||
res = Gpersonexpeditionnamelookup.get(expedition.name)
|
||||
if res:
|
||||
return res
|
||||
|
||||
res = { }
|
||||
duplicates = set()
|
||||
|
||||
print "Calculating GetPersonExpeditionNameLookup for", expedition.year
|
||||
personexpeditions = models.PersonExpedition.objects.filter(expedition=expedition)
|
||||
for personexpedition in personexpeditions:
|
||||
possnames = [ ]
|
||||
f = personexpedition.person.first_name.lower()
|
||||
l = personexpedition.person.last_name.lower()
|
||||
if l:
|
||||
possnames.append(f + " " + l)
|
||||
possnames.append(f + " " + l[0])
|
||||
possnames.append(f + l[0])
|
||||
possnames.append(f[0] + " " + l)
|
||||
possnames.append(f)
|
||||
if personexpedition.nickname:
|
||||
possnames.append(personexpedition.nickname.lower())
|
||||
|
||||
for possname in possnames:
|
||||
if possname in res:
|
||||
duplicates.add(possname)
|
||||
else:
|
||||
res[possname] = personexpedition
|
||||
|
||||
for possname in duplicates:
|
||||
del res[possname]
|
||||
|
||||
Gpersonexpeditionnamelookup[expedition.name] = res
|
||||
return res
|
||||
|
||||
@@ -1,152 +0,0 @@
|
||||
import troggle.settings as settings
|
||||
import troggle.expo.models as models
|
||||
|
||||
from troggle.parsers.people import GetPersonExpeditionNameLookup
|
||||
|
||||
import re
|
||||
import os
|
||||
|
||||
roles = {"Insts": "Insts",
|
||||
"insts": "Insts",
|
||||
"Instruments": "Insts",
|
||||
"instruments": "Insts",
|
||||
"Inst": "Insts",
|
||||
"inst": "Insts",
|
||||
"dog": "Other",
|
||||
"Dog": "Other",
|
||||
"other": "Other",
|
||||
"Other": "Other",
|
||||
"Notes": "Notes",
|
||||
"notes": "notes",
|
||||
"pics": "Pics",
|
||||
"Pics": "Pics",
|
||||
"Tape": "Tape",
|
||||
"tape": "Tape"}
|
||||
|
||||
re_include_extension = re.compile(r"^\s*\*include\s+([^\s]*).svx$", re.IGNORECASE)
|
||||
re_include_no_extension = re.compile(r"^\s*\*include\s+([^\s]*)$", re.IGNORECASE)
|
||||
flags = {"begin": re.compile(r"^\s*\*begin\s+(.*?)\s*$", re.IGNORECASE),
|
||||
"end": re.compile(r"^\s*\*end\s+(.*?)\s*$", re.IGNORECASE),
|
||||
"date": re.compile(r"^\s*\*date\s+(.*?)\s*$", re.IGNORECASE),
|
||||
"team": re.compile(r"^\s*\*team\s+(.*?)\s*$", re.IGNORECASE)}
|
||||
|
||||
def fileIterator(directory, filename):
|
||||
survex_file = os.path.join(directory, filename + ".svx")
|
||||
try:
|
||||
f = open(os.path.join(settings.SURVEX_DATA, survex_file), "rb")
|
||||
except:
|
||||
f = open(os.path.join(settings.SURVEX_DATA, survex_file).lower(), "rb")
|
||||
char = 0
|
||||
for line in f.readlines():
|
||||
line = unicode(line, "latin1")
|
||||
include_extension = re_include_extension.match(line)
|
||||
include_no_extension = re_include_no_extension.match(line)
|
||||
def a(include):
|
||||
link = re.split(r"/|\\", include)
|
||||
return fileIterator(os.path.join(directory, *link[:-1]), link[-1])
|
||||
if include_extension:
|
||||
for sf, c, l in a(include_extension.groups()[0]):
|
||||
yield sf, c, l
|
||||
elif include_no_extension:
|
||||
for sf, c, l in a(include_no_extension.groups()[0]):
|
||||
yield sf, c, l
|
||||
else:
|
||||
yield survex_file, char, line
|
||||
char = char + len(line)
|
||||
|
||||
|
||||
def make_model(name, parent, iter_lines, sf, c, l):
|
||||
m = models.SurvexBlock(name = name, begin_file = sf, begin_char = c, text = l)
|
||||
m.survexpath = m.name
|
||||
if parent:
|
||||
m.parent = parent
|
||||
m.survexpath = m.parent.survexpath + "." + m.name
|
||||
m.save()
|
||||
|
||||
# horrible local function
|
||||
def saveEnd(survex_file, count):
|
||||
if m.start_year and team:
|
||||
try:
|
||||
exp = models.Expedition.objects.get(year = str(m.start_year))
|
||||
for file_, (role, names) in team:
|
||||
if names.strip("\t").strip(" ") == "both" or names.strip("\t").strip(" ") == "Both":
|
||||
names = reduce(lambda x, y: x + u" & " + y,
|
||||
[names for file_, (role, names) in team
|
||||
if names.strip("\t").strip(" ") != "both"
|
||||
and names.strip("\t").strip(" ") != "Both"])
|
||||
for name in re.split("&|/|\+|,|;", names):
|
||||
sname = name.strip(". ").lower()
|
||||
try:
|
||||
personexpedition = GetPersonExpeditionNameLookup(exp).get(sname)
|
||||
if personexpedition:
|
||||
models.PersonRole(personexpedition = personexpedition,
|
||||
person = personexpedition.person,
|
||||
survex_block = m,
|
||||
role = models.Role.objects.get(name = roles[role])).save()
|
||||
else:
|
||||
print ("no person", exp, sname, role)
|
||||
except AttributeError:
|
||||
print ("Person not found: " + name + " in " + file_ + " " + role).encode('ascii', 'xmlcharrefreplace')
|
||||
except AssertionError, inst:
|
||||
print (unicode(inst) + ": " + unicode(file_year[0])).encode('ascii', 'xmlcharrefreplace')
|
||||
#except models.Expedition.DoesNotExist:
|
||||
# print "Expo"+str(file_year[1]).encode('ascii', 'xmlcharrefreplace')
|
||||
|
||||
m.end_file = survex_file
|
||||
m.end_char = count
|
||||
|
||||
if m.start_day:
|
||||
m.date = "%04d-%02d-%02d" % (int(m.start_year), int(m.start_month), int(m.start_day))
|
||||
|
||||
m.save()
|
||||
|
||||
team = []
|
||||
file_year = None
|
||||
for survex_file, count, line in iter_lines:
|
||||
#Dictionary compreshension
|
||||
res = dict([(key, regex.match(line.split(";")[0])) for key, regex in flags.iteritems()])
|
||||
if res["begin"]:
|
||||
make_model(res["begin"].groups()[0], m, iter_lines, survex_file, count, line)
|
||||
else:
|
||||
m.text = m.text + line
|
||||
if res["end"]:
|
||||
saveEnd(survex_file, count)
|
||||
assert (res["end"].groups()[0]).lower() == (name).lower()
|
||||
return None
|
||||
elif res["date"]:
|
||||
datere = re.match("(\d+)(?:\.(\d+))?(?:\.(\d+))?(?:-(\d+))?(?:\.(\d+))?(?:\.(\d+))?",
|
||||
res["date"].groups()[0])
|
||||
if datere is not None:
|
||||
startYear, startMonth, startDay, endYear, endMonth, endDay = datere.groups()
|
||||
m.start_year = startYear
|
||||
m.start_month = startMonth
|
||||
m.start_day = startDay
|
||||
m.end_year = endYear
|
||||
m.end_month = endMonth
|
||||
m.end_day = endDay
|
||||
file_year = survex_file, startYear
|
||||
elif res["team"]:
|
||||
h = re.match("((?:[Ii]nst(?:s|ruments)?)|(?:[Pp]ics)|(?:[Tt]ape)|(?:[Nn]otes)|(?:[Oo]ther))\s*(.*)",
|
||||
res["team"].groups()[0])
|
||||
if h:
|
||||
team.append((survex_file, h.groups()))
|
||||
else:
|
||||
print ("Role not found: " + line + " in: " + sf).encode('ascii', 'xmlcharrefreplace')
|
||||
m.text = m.text + line
|
||||
saveEnd(survex_file, count)
|
||||
|
||||
|
||||
#def LoadSurvexBlocks():
|
||||
# survex_file = os.path.join(directory, filename + ".svx")
|
||||
# f = open(os.path.join(settings.SURVEX_DATA, survex_file), "rb")
|
||||
|
||||
|
||||
def LoadAllSurvexBlocks():
|
||||
models.Role.objects.all().delete()
|
||||
models.SurvexBlock.objects.all().delete()
|
||||
for role in ["Insts", "Notes", "Pics", "Tape", "Other"]:
|
||||
models.Role(name = role).save()
|
||||
filename = "all"
|
||||
make_model("all", None, fileIterator("", filename), filename, 0, "")
|
||||
|
||||
|
||||
@@ -1,130 +0,0 @@
|
||||
import sys
|
||||
import os
|
||||
import types
|
||||
#sys.path.append('C:\\Expo\\expoweb')
|
||||
#from troggle import *
|
||||
#os.environ['DJANGO_SETTINGS_MODULE']='troggle.settings'
|
||||
import troggle.settings as settings
|
||||
from troggle.expo.models import *
|
||||
from PIL import Image
|
||||
#import settings
|
||||
#import expo.models as models
|
||||
import csv
|
||||
import re
|
||||
import datetime
|
||||
|
||||
def readSurveysFromCSV(logfile=None):
|
||||
try:
|
||||
surveytab = open(os.path.join(settings.SURVEYS, "Surveys.csv"))
|
||||
except IOError:
|
||||
import cStringIO, urllib
|
||||
surveytab = cStringIO.StringIO(urllib.urlopen(settings.SURVEYS + "download/Surveys.csv").read())
|
||||
dialect=csv.Sniffer().sniff(surveytab.read())
|
||||
surveytab.seek(0,0)
|
||||
surveyreader = csv.reader(surveytab,dialect=dialect)
|
||||
headers = surveyreader.next()
|
||||
header = dict(zip(headers, range(len(headers)))) #set up a dictionary where the indexes are header names and the values are column numbers
|
||||
|
||||
# test if the expeditions have been added yet
|
||||
if Expedition.objects.count()==0:
|
||||
print "There are no expeditions in the database. Please run the logbook parser."
|
||||
sys.exit()
|
||||
|
||||
if logfile:
|
||||
logfile.write("Deleting all scanned images")
|
||||
ScannedImage.objects.all().delete()
|
||||
|
||||
if logfile:
|
||||
logfile.write("Deleting all survey objects")
|
||||
Survey.objects.all().delete()
|
||||
|
||||
if logfile:
|
||||
logfile.write("Beginning to import surveys from "+str(os.path.join(settings.SURVEYS, "Surveys.csv"))+"\n"+"-"*60+"\n")
|
||||
|
||||
for survey in surveyreader:
|
||||
walletNumberLetter = re.match(r'(?P<number>\d*)(?P<letter>[a-zA-Z]*)',survey[header['Survey Number']]) #I hate this, but some surveys have a letter eg 2000#34a. This line deals with that.
|
||||
# print walletNumberLetter.groups()
|
||||
|
||||
surveyobj = Survey(
|
||||
expedition = Expedition.objects.filter(year=survey[header['Year']])[0],
|
||||
wallet_number = walletNumberLetter.group('number'),
|
||||
|
||||
comments = survey[header['Comments']],
|
||||
location = survey[header['Location']]
|
||||
)
|
||||
surveyobj.wallet_letter = walletNumberLetter.group('letter')
|
||||
if survey[header['Finished']]=='Yes':
|
||||
#try and find the sketch_scan
|
||||
pass
|
||||
surveyobj.save()
|
||||
|
||||
if logfile:
|
||||
logfile.write("added survey " + survey[header['Year']] + "#" + surveyobj.wallet_number + "\r")
|
||||
|
||||
def listdir(*directories):
|
||||
try:
|
||||
return os.listdir(os.path.join(settings.SURVEYS, *directories))
|
||||
except:
|
||||
import urllib
|
||||
url = settings.SURVEYS + reduce(lambda x, y: x + "/" + y, ["listdir"] + list(directories))
|
||||
folders = urllib.urlopen(url.replace("#", "%23")).readlines()
|
||||
return [folder.rstrip(r"/") for folder in folders]
|
||||
|
||||
# add survey scans
|
||||
def parseSurveyScans(year, logfile=None):
|
||||
# yearFileList = listdir(year.year)
|
||||
yearPath=os.path.join(settings.SURVEY_SCANS, year.year)
|
||||
yearFileList=os.listdir(yearPath)
|
||||
print yearFileList
|
||||
for surveyFolder in yearFileList:
|
||||
try:
|
||||
surveyNumber=re.match(r'\d\d\d\d#0*(\d+)',surveyFolder).groups()
|
||||
# scanList = listdir(year.year, surveyFolder)
|
||||
scanList=os.listdir(os.path.join(yearPath,surveyFolder))
|
||||
except AttributeError:
|
||||
print surveyFolder + " ignored",
|
||||
continue
|
||||
|
||||
for scan in scanList:
|
||||
try:
|
||||
scanChopped=re.match(r'(?i).*(notes|elev|plan|elevation|extend)(\d*)\.(png|jpg|jpeg)',scan).groups()
|
||||
scanType,scanNumber,scanFormat=scanChopped
|
||||
except AttributeError:
|
||||
print scan + " ignored \r",
|
||||
continue
|
||||
if scanType == 'elev' or scanType == 'extend':
|
||||
scanType = 'elevation'
|
||||
|
||||
if scanNumber=='':
|
||||
scanNumber=1
|
||||
|
||||
if type(surveyNumber)==types.TupleType:
|
||||
surveyNumber=surveyNumber[0]
|
||||
try:
|
||||
survey=Survey.objects.get_or_create(wallet_number=surveyNumber, expedition=year)[0]
|
||||
except Survey.MultipleObjectsReturned:
|
||||
survey=Survey.objects.filter(wallet_number=surveyNumber, expedition=year)[0]
|
||||
file=os.path.join(year.year, surveyFolder, scan)
|
||||
scanObj = ScannedImage(
|
||||
file=file,
|
||||
contents=scanType,
|
||||
number_in_wallet=scanNumber,
|
||||
survey=survey,
|
||||
new_since_parsing=False,
|
||||
)
|
||||
#print "Added scanned image at " + str(scanObj)
|
||||
if scanFormat=="png":
|
||||
if isInterlacedPNG(os.path.join(settings.SURVEY_SCANS,file)):
|
||||
print file + "is an interlaced PNG. No can do."
|
||||
continue
|
||||
scanObj.save()
|
||||
|
||||
def parseSurveys(logfile=None):
|
||||
readSurveysFromCSV()
|
||||
for year in Expedition.objects.filter(year__gte=2000): #expos since 2000, because paths and filenames were nonstandard before then
|
||||
parseSurveyScans(year)
|
||||
|
||||
def isInterlacedPNG(filePath): #We need to check for interlaced PNGs because the thumbnail engine can't handle them (uses PIL)
|
||||
file=Image.open(filePath)
|
||||
return file.info['interlace']
|
||||
|
||||
@@ -1,46 +0,0 @@
|
||||
"""
|
||||
URLConf for Django user profile management.
|
||||
|
||||
Recommended usage is to use a call to ``include()`` in your project's
|
||||
root URLConf to include this URLConf for any URL beginning with
|
||||
'/profiles/'.
|
||||
|
||||
If the default behavior of the profile views is acceptable to you,
|
||||
simply use a line like this in your root URLConf to set up the default
|
||||
URLs for profiles::
|
||||
|
||||
(r'^profiles/', include('profiles.urls')),
|
||||
|
||||
But if you'd like to customize the behavior (e.g., by passing extra
|
||||
arguments to the various views) or split up the URLs, feel free to set
|
||||
up your own URL patterns for these views instead. If you do, it's a
|
||||
good idea to keep the name ``profiles_profile_detail`` for the pattern
|
||||
which points to the ``profile_detail`` view, since several views use
|
||||
``reverse()`` with that name to generate a default post-submission
|
||||
redirect. If you don't use that name, remember to explicitly pass
|
||||
``success_url`` to those views.
|
||||
|
||||
"""
|
||||
|
||||
from django.conf.urls.defaults import *
|
||||
|
||||
from profiles import views
|
||||
|
||||
|
||||
urlpatterns = patterns('',
|
||||
url(r'^select/$',
|
||||
views.select_profile,
|
||||
name='profiles_select_profile'),
|
||||
url(r'^create/$',
|
||||
views.create_profile,
|
||||
name='profiles_create_profile'),
|
||||
url(r'^edit/$',
|
||||
views.edit_profile,
|
||||
name='profiles_edit_profile'),
|
||||
url(r'^(?P<username>\w+)/$',
|
||||
views.profile_detail,
|
||||
name='profiles_profile_detail'),
|
||||
url(r'^$',
|
||||
views.profile_list,
|
||||
name='profiles_profile_list'),
|
||||
)
|
||||
@@ -1,45 +0,0 @@
|
||||
"""
|
||||
Utility functions for retrieving and generating forms for the
|
||||
site-specific user profile model specified in the
|
||||
``AUTH_PROFILE_MODULE`` setting.
|
||||
|
||||
"""
|
||||
|
||||
from django import forms
|
||||
from django.conf import settings
|
||||
from django.contrib.auth.models import SiteProfileNotAvailable
|
||||
from django.db.models import get_model
|
||||
|
||||
|
||||
def get_profile_model():
|
||||
"""
|
||||
Return the model class for the currently-active user profile
|
||||
model, as defined by the ``AUTH_PROFILE_MODULE`` setting. If that
|
||||
setting is missing, raise
|
||||
``django.contrib.auth.models.SiteProfileNotAvailable``.
|
||||
|
||||
"""
|
||||
if (not hasattr(settings, 'AUTH_PROFILE_MODULE')) or \
|
||||
(not settings.AUTH_PROFILE_MODULE):
|
||||
raise SiteProfileNotAvailable
|
||||
profile_mod = get_model(*settings.AUTH_PROFILE_MODULE.split('.'))
|
||||
if profile_mod is None:
|
||||
raise SiteProfileNotAvailable
|
||||
return profile_mod
|
||||
|
||||
|
||||
def get_profile_form():
|
||||
"""
|
||||
Return a form class (a subclass of the default ``ModelForm``)
|
||||
suitable for creating/editing instances of the site-specific user
|
||||
profile model, as defined by the ``AUTH_PROFILE_MODULE``
|
||||
setting. If that setting is missing, raise
|
||||
``django.contrib.auth.models.SiteProfileNotAvailable``.
|
||||
|
||||
"""
|
||||
profile_mod = get_profile_model()
|
||||
class _ProfileForm(forms.ModelForm):
|
||||
class Meta:
|
||||
model = profile_mod
|
||||
exclude = ('user',) # User will be filled in by the view.
|
||||
return _ProfileForm
|
||||
@@ -1,363 +0,0 @@
|
||||
"""
|
||||
Views for creating, editing and viewing site-specific user profiles.
|
||||
|
||||
"""
|
||||
from django.contrib.auth.decorators import login_required
|
||||
from django.contrib.auth.models import User
|
||||
from django.core.exceptions import ObjectDoesNotExist
|
||||
from django.core.urlresolvers import reverse
|
||||
from django.http import Http404
|
||||
from django.http import HttpResponseRedirect
|
||||
from django.shortcuts import get_object_or_404
|
||||
from django.shortcuts import render_to_response
|
||||
from django.template import RequestContext
|
||||
from django.views.generic.list_detail import object_list
|
||||
from django import forms
|
||||
|
||||
from expo.models import Person
|
||||
|
||||
from troggle.alwaysUseRequestContext import render_response
|
||||
|
||||
from profiles import utils
|
||||
|
||||
from django.conf import settings
|
||||
|
||||
class SelectPersonForm(forms.Form): #This and the select_profile view
|
||||
person = forms.ModelChoiceField(queryset=Person.objects.all())
|
||||
|
||||
def select_profile(request):
|
||||
|
||||
if request.method == 'POST':
|
||||
form = SelectPersonForm(request.POST)
|
||||
if form.is_valid():
|
||||
profile_obj=form.cleaned_data['person']
|
||||
profile_obj.user=request.user
|
||||
profile_obj.save()
|
||||
return HttpResponseRedirect(profile_obj.get_absolute_url())
|
||||
else:
|
||||
form = SelectPersonForm()
|
||||
context = RequestContext(request)
|
||||
return render_to_response('profiles/select_profile.html', {
|
||||
'form':form,},
|
||||
context_instance=context
|
||||
)
|
||||
|
||||
|
||||
def create_profile(request, form_class=None, success_url=None,
|
||||
template_name='profiles/create_profile.html',
|
||||
extra_context=None):
|
||||
"""
|
||||
Create a profile for the current user, if one doesn't already
|
||||
exist.
|
||||
|
||||
If the user already has a profile, as determined by
|
||||
``request.user.get_profile()``, a redirect will be issued to the
|
||||
:view:`profiles.views.edit_profile` view. If no profile model has
|
||||
been specified in the ``AUTH_PROFILE_MODULE`` setting,
|
||||
``django.contrib.auth.models.SiteProfileNotAvailable`` will be
|
||||
raised.
|
||||
|
||||
**Optional arguments:**
|
||||
|
||||
``extra_context``
|
||||
A dictionary of variables to add to the template context. Any
|
||||
callable object in this dictionary will be called to produce
|
||||
the end result which appears in the context.
|
||||
|
||||
``form_class``
|
||||
The form class to use for validating and creating the user
|
||||
profile. This form class must define a method named
|
||||
``save()``, implementing the same argument signature as the
|
||||
``save()`` method of a standard Django ``ModelForm`` (this
|
||||
view will call ``save(commit=False)`` to obtain the profile
|
||||
object, and fill in the user before the final save). If the
|
||||
profile object includes many-to-many relations, the convention
|
||||
established by ``ModelForm`` of using a method named
|
||||
``save_m2m()`` will be used, and so your form class should
|
||||
also define this method.
|
||||
|
||||
If this argument is not supplied, this view will use a
|
||||
``ModelForm`` automatically generated from the model specified
|
||||
by ``AUTH_PROFILE_MODULE``.
|
||||
|
||||
``success_url``
|
||||
The URL to redirect to after successful profile creation. If
|
||||
this argument is not supplied, this will default to the URL of
|
||||
:view:`profiles.views.profile_detail` for the newly-created
|
||||
profile object.
|
||||
|
||||
``template_name``
|
||||
The template to use when displaying the profile-creation
|
||||
form. If not supplied, this will default to
|
||||
:template:`profiles/create_profile.html`.
|
||||
|
||||
**Context:**
|
||||
|
||||
``form``
|
||||
The profile-creation form.
|
||||
|
||||
**Template:**
|
||||
|
||||
``template_name`` keyword argument, or
|
||||
:template:`profiles/create_profile.html`.
|
||||
|
||||
"""
|
||||
try:
|
||||
profile_obj = request.user.get_profile()
|
||||
return HttpResponseRedirect(profile_obj.get_absolute_url())
|
||||
except ObjectDoesNotExist:
|
||||
pass
|
||||
|
||||
#
|
||||
# We set up success_url here, rather than as the default value for
|
||||
# the argument. Trying to do it as the argument's default would
|
||||
# mean evaluating the call to reverse() at the time this module is
|
||||
# first imported, which introduces a circular dependency: to
|
||||
# perform the reverse lookup we need access to profiles/urls.py,
|
||||
# but profiles/urls.py in turn imports this module.
|
||||
#
|
||||
|
||||
if success_url is None:
|
||||
success_url = reverse('profiles_profile_detail',
|
||||
kwargs={ 'username': request.user.username })
|
||||
if form_class is None:
|
||||
form_class = utils.get_profile_form()
|
||||
if request.method == 'POST':
|
||||
form = form_class(data=request.POST, files=request.FILES)
|
||||
if form.is_valid():
|
||||
profile_obj = form.save(commit=False)
|
||||
profile_obj.user = request.user
|
||||
profile_obj.save()
|
||||
if hasattr(form, 'save_m2m'):
|
||||
form.save_m2m()
|
||||
return HttpResponseRedirect(success_url)
|
||||
else:
|
||||
form = form_class()
|
||||
|
||||
if extra_context is None:
|
||||
extra_context = {}
|
||||
context = RequestContext(request)
|
||||
for key, value in extra_context.items():
|
||||
context[key] = callable(value) and value() or value
|
||||
|
||||
return render_to_response(template_name,
|
||||
{ 'form': form, 'settings':settings },
|
||||
context_instance=context)
|
||||
create_profile = login_required(create_profile)
|
||||
|
||||
def edit_profile(request, form_class=None, success_url=None,
|
||||
template_name='profiles/edit_profile.html',
|
||||
extra_context=None):
|
||||
"""
|
||||
Edit the current user's profile.
|
||||
|
||||
If the user does not already have a profile (as determined by
|
||||
``User.get_profile()``), a redirect will be issued to the
|
||||
:view:`profiles.views.create_profile` view; if no profile model
|
||||
has been specified in the ``AUTH_PROFILE_MODULE`` setting,
|
||||
``django.contrib.auth.models.SiteProfileNotAvailable`` will be
|
||||
raised.
|
||||
|
||||
**Optional arguments:**
|
||||
|
||||
``extra_context``
|
||||
A dictionary of variables to add to the template context. Any
|
||||
callable object in this dictionary will be called to produce
|
||||
the end result which appears in the context.
|
||||
|
||||
``form_class``
|
||||
The form class to use for validating and editing the user
|
||||
profile. This form class must operate similarly to a standard
|
||||
Django ``ModelForm`` in that it must accept an instance of the
|
||||
object to be edited as the keyword argument ``instance`` to
|
||||
its constructor, and it must implement a method named
|
||||
``save()`` which will save the updates to the object. If this
|
||||
argument is not specified, this view will use a ``ModelForm``
|
||||
generated from the model specified in the
|
||||
``AUTH_PROFILE_MODULE`` setting.
|
||||
|
||||
``success_url``
|
||||
The URL to redirect to following a successful edit. If not
|
||||
specified, this will default to the URL of
|
||||
:view:`profiles.views.profile_detail` for the profile object
|
||||
being edited.
|
||||
|
||||
``template_name``
|
||||
The template to use when displaying the profile-editing
|
||||
form. If not specified, this will default to
|
||||
:template:`profiles/edit_profile.html`.
|
||||
|
||||
**Context:**
|
||||
|
||||
``form``
|
||||
The form for editing the profile.
|
||||
|
||||
``profile``
|
||||
The user's current profile.
|
||||
|
||||
**Template:**
|
||||
|
||||
``template_name`` keyword argument or
|
||||
:template:`profiles/edit_profile.html`.
|
||||
|
||||
"""
|
||||
try:
|
||||
profile_obj = request.user.get_profile()
|
||||
except ObjectDoesNotExist:
|
||||
return HttpResponseRedirect(reverse('profiles_create_profile'))
|
||||
|
||||
#
|
||||
# See the comment in create_profile() for discussion of why
|
||||
# success_url is set up here, rather than as a default value for
|
||||
# the argument.
|
||||
#
|
||||
|
||||
if success_url is None:
|
||||
success_url = reverse('profiles_profile_detail',
|
||||
kwargs={ 'username': request.user.username })
|
||||
if form_class is None:
|
||||
form_class = utils.get_profile_form()
|
||||
if request.method == 'POST':
|
||||
form = form_class(data=request.POST, files=request.FILES, instance=profile_obj)
|
||||
if form.is_valid():
|
||||
form.save()
|
||||
return HttpResponseRedirect(success_url)
|
||||
else:
|
||||
form = form_class(instance=profile_obj)
|
||||
|
||||
if extra_context is None:
|
||||
extra_context = {}
|
||||
context = RequestContext(request)
|
||||
for key, value in extra_context.items():
|
||||
context[key] = callable(value) and value() or value
|
||||
|
||||
return render_to_response(template_name,
|
||||
{ 'form': form,
|
||||
'profile': profile_obj, },
|
||||
context_instance=context)
|
||||
edit_profile = login_required(edit_profile)
|
||||
|
||||
def profile_detail(request, username, public_profile_field=None,
|
||||
template_name='profiles/profile_detail.html',
|
||||
extra_context=None):
|
||||
"""
|
||||
Detail view of a user's profile.
|
||||
|
||||
If no profile model has been specified in the
|
||||
``AUTH_PROFILE_MODULE`` setting,
|
||||
``django.contrib.auth.models.SiteProfileNotAvailable`` will be
|
||||
raised.
|
||||
|
||||
If the user has not yet created a profile, ``Http404`` will be
|
||||
raised.
|
||||
|
||||
**Required arguments:**
|
||||
|
||||
``username``
|
||||
The username of the user whose profile is being displayed.
|
||||
|
||||
**Optional arguments:**
|
||||
|
||||
``extra_context``
|
||||
A dictionary of variables to add to the template context. Any
|
||||
callable object in this dictionary will be called to produce
|
||||
the end result which appears in the context.
|
||||
|
||||
``public_profile_field``
|
||||
The name of a ``BooleanField`` on the profile model; if the
|
||||
value of that field on the user's profile is ``False``, the
|
||||
``profile`` variable in the template will be ``None``. Use
|
||||
this feature to allow users to mark their profiles as not
|
||||
being publicly viewable.
|
||||
|
||||
If this argument is not specified, it will be assumed that all
|
||||
users' profiles are publicly viewable.
|
||||
|
||||
``template_name``
|
||||
The name of the template to use for displaying the profile. If
|
||||
not specified, this will default to
|
||||
:template:`profiles/profile_detail.html`.
|
||||
|
||||
**Context:**
|
||||
|
||||
``profile``
|
||||
The user's profile, or ``None`` if the user's profile is not
|
||||
publicly viewable (see the description of
|
||||
``public_profile_field`` above).
|
||||
|
||||
**Template:**
|
||||
|
||||
``template_name`` keyword argument or
|
||||
:template:`profiles/profile_detail.html`.
|
||||
|
||||
"""
|
||||
user = get_object_or_404(User, username=username)
|
||||
try:
|
||||
profile_obj = user.get_profile()
|
||||
except ObjectDoesNotExist:
|
||||
raise Http404
|
||||
if public_profile_field is not None and \
|
||||
not getattr(profile_obj, public_profile_field):
|
||||
profile_obj = None
|
||||
|
||||
if extra_context is None:
|
||||
extra_context = {}
|
||||
context = RequestContext(request)
|
||||
for key, value in extra_context.items():
|
||||
context[key] = callable(value) and value() or value
|
||||
|
||||
return render_to_response(template_name,
|
||||
{ 'profile': profile_obj },
|
||||
context_instance=context)
|
||||
|
||||
def profile_list(request, public_profile_field=None,
|
||||
template_name='profiles/profile_list.html', **kwargs):
|
||||
"""
|
||||
A list of user profiles.
|
||||
|
||||
If no profile model has been specified in the
|
||||
``AUTH_PROFILE_MODULE`` setting,
|
||||
``django.contrib.auth.models.SiteProfileNotAvailable`` will be
|
||||
raised.
|
||||
|
||||
**Optional arguments:**
|
||||
|
||||
``public_profile_field``
|
||||
The name of a ``BooleanField`` on the profile model; if the
|
||||
value of that field on a user's profile is ``False``, that
|
||||
profile will be excluded from the list. Use this feature to
|
||||
allow users to mark their profiles as not being publicly
|
||||
viewable.
|
||||
|
||||
If this argument is not specified, it will be assumed that all
|
||||
users' profiles are publicly viewable.
|
||||
|
||||
``template_name``
|
||||
The name of the template to use for displaying the profiles. If
|
||||
not specified, this will default to
|
||||
:template:`profiles/profile_list.html`.
|
||||
|
||||
Additionally, all arguments accepted by the
|
||||
:view:`django.views.generic.list_detail.object_list` generic view
|
||||
will be accepted here, and applied in the same fashion, with one
|
||||
exception: ``queryset`` will always be the ``QuerySet`` of the
|
||||
model specified by the ``AUTH_PROFILE_MODULE`` setting, optionally
|
||||
filtered to remove non-publicly-viewable proiles.
|
||||
|
||||
**Context:**
|
||||
|
||||
Same as the :view:`django.views.generic.list_detail.object_list`
|
||||
generic view.
|
||||
|
||||
**Template:**
|
||||
|
||||
``template_name`` keyword argument or
|
||||
:template:`profiles/profile_list.html`.
|
||||
|
||||
"""
|
||||
profile_model = utils.get_profile_model()
|
||||
queryset = profile_model._default_manager.all()
|
||||
if public_profile_field is not None:
|
||||
queryset = queryset.filter(**{ public_profile_field: True })
|
||||
kwargs['queryset'] = queryset
|
||||
return object_list(request, template_name=template_name, **kwargs)
|
||||
@@ -1,11 +0,0 @@
|
||||
from django.contrib import admin
|
||||
|
||||
from registration.models import RegistrationProfile
|
||||
|
||||
|
||||
class RegistrationAdmin(admin.ModelAdmin):
|
||||
list_display = ('__unicode__', 'activation_key_expired')
|
||||
search_fields = ('user__username', 'user__first_name')
|
||||
|
||||
|
||||
admin.site.register(RegistrationProfile, RegistrationAdmin)
|
||||
@@ -1,134 +0,0 @@
|
||||
"""
|
||||
Forms and validation code for user registration.
|
||||
|
||||
"""
|
||||
|
||||
|
||||
from django.contrib.auth.models import User
|
||||
from django import forms
|
||||
from django.utils.translation import ugettext_lazy as _
|
||||
|
||||
from registration.models import RegistrationProfile
|
||||
|
||||
|
||||
# I put this on all required fields, because it's easier to pick up
|
||||
# on them with CSS or JavaScript if they have a class of "required"
|
||||
# in the HTML. Your mileage may vary. If/when Django ticket #3515
|
||||
# lands in trunk, this will no longer be necessary.
|
||||
attrs_dict = { 'class': 'required' }
|
||||
|
||||
|
||||
class RegistrationForm(forms.Form):
|
||||
"""
|
||||
Form for registering a new user account.
|
||||
|
||||
Validates that the requested username is not already in use, and
|
||||
requires the password to be entered twice to catch typos.
|
||||
|
||||
Subclasses should feel free to add any additional validation they
|
||||
need, but should either preserve the base ``save()`` or implement
|
||||
a ``save()`` method which returns a ``User``.
|
||||
|
||||
"""
|
||||
username = forms.RegexField(regex=r'^\w+$',
|
||||
max_length=30,
|
||||
widget=forms.TextInput(attrs=attrs_dict),
|
||||
label=_(u'username'))
|
||||
email = forms.EmailField(widget=forms.TextInput(attrs=dict(attrs_dict,
|
||||
maxlength=75)),
|
||||
label=_(u'email address'))
|
||||
password1 = forms.CharField(widget=forms.PasswordInput(attrs=attrs_dict, render_value=False),
|
||||
label=_(u'password'))
|
||||
password2 = forms.CharField(widget=forms.PasswordInput(attrs=attrs_dict, render_value=False),
|
||||
label=_(u'password (again)'))
|
||||
|
||||
def clean_username(self):
|
||||
"""
|
||||
Validate that the username is alphanumeric and is not already
|
||||
in use.
|
||||
|
||||
"""
|
||||
try:
|
||||
user = User.objects.get(username__iexact=self.cleaned_data['username'])
|
||||
except User.DoesNotExist:
|
||||
return self.cleaned_data['username']
|
||||
raise forms.ValidationError(_(u'This username is already taken. Please choose another.'))
|
||||
|
||||
def clean(self):
|
||||
"""
|
||||
Verifiy that the values entered into the two password fields
|
||||
match. Note that an error here will end up in
|
||||
``non_field_errors()`` because it doesn't apply to a single
|
||||
field.
|
||||
|
||||
"""
|
||||
if 'password1' in self.cleaned_data and 'password2' in self.cleaned_data:
|
||||
if self.cleaned_data['password1'] != self.cleaned_data['password2']:
|
||||
raise forms.ValidationError(_(u'You must type the same password each time'))
|
||||
return self.cleaned_data
|
||||
|
||||
def save(self):
|
||||
"""
|
||||
Create the new ``User`` and ``RegistrationProfile``, and
|
||||
returns the ``User`` (by calling
|
||||
``RegistrationProfile.objects.create_inactive_user()``).
|
||||
|
||||
"""
|
||||
new_user = RegistrationProfile.objects.create_inactive_user(username=self.cleaned_data['username'],
|
||||
password=self.cleaned_data['password1'],
|
||||
email=self.cleaned_data['email'])
|
||||
return new_user
|
||||
|
||||
|
||||
class RegistrationFormTermsOfService(RegistrationForm):
|
||||
"""
|
||||
Subclass of ``RegistrationForm`` which adds a required checkbox
|
||||
for agreeing to a site's Terms of Service.
|
||||
|
||||
"""
|
||||
tos = forms.BooleanField(widget=forms.CheckboxInput(attrs=attrs_dict),
|
||||
label=_(u'I have read and agree to the Terms of Service'),
|
||||
error_messages={ 'required': u"You must agree to the terms to register" })
|
||||
|
||||
|
||||
class RegistrationFormUniqueEmail(RegistrationForm):
|
||||
"""
|
||||
Subclass of ``RegistrationForm`` which enforces uniqueness of
|
||||
email addresses.
|
||||
|
||||
"""
|
||||
def clean_email(self):
|
||||
"""
|
||||
Validate that the supplied email address is unique for the
|
||||
site.
|
||||
|
||||
"""
|
||||
if User.objects.filter(email__iexact=self.cleaned_data['email']):
|
||||
raise forms.ValidationError(_(u'This email address is already in use. Please supply a different email address.'))
|
||||
return self.cleaned_data['email']
|
||||
|
||||
|
||||
class RegistrationFormNoFreeEmail(RegistrationForm):
|
||||
"""
|
||||
Subclass of ``RegistrationForm`` which disallows registration with
|
||||
email addresses from popular free webmail services; moderately
|
||||
useful for preventing automated spam registrations.
|
||||
|
||||
To change the list of banned domains, subclass this form and
|
||||
override the attribute ``bad_domains``.
|
||||
|
||||
"""
|
||||
bad_domains = ['aim.com', 'aol.com', 'email.com', 'gmail.com',
|
||||
'googlemail.com', 'hotmail.com', 'hushmail.com',
|
||||
'msn.com', 'mail.ru', 'mailinator.com', 'live.com']
|
||||
|
||||
def clean_email(self):
|
||||
"""
|
||||
Check the supplied email address against a list of known free
|
||||
webmail domains.
|
||||
|
||||
"""
|
||||
email_domain = self.cleaned_data['email'].split('@')[1]
|
||||
if email_domain in self.bad_domains:
|
||||
raise forms.ValidationError(_(u'Registration using free email addresses is prohibited. Please supply a different email address.'))
|
||||
return self.cleaned_data['email']
|
||||
@@ -1,81 +0,0 @@
|
||||
# SOME DESCRIPTIVE TITLE.
|
||||
# Copyright (C) YEAR THE PACKAGE'S COPYRIGHT HOLDER
|
||||
# This file is distributed under the same license as the PACKAGE package.
|
||||
# FIRST AUTHOR <EMAIL@ADDRESS>, YEAR.
|
||||
#
|
||||
#, fuzzy
|
||||
msgid ""
|
||||
msgstr ""
|
||||
"Project-Id-Version: PACKAGE VERSION\n"
|
||||
"Report-Msgid-Bugs-To: \n"
|
||||
"POT-Creation-Date: 2007-09-19 19:30-0500\n"
|
||||
"PO-Revision-Date: YEAR-MO-DA HO:MI+ZONE\n"
|
||||
"Last-Translator: FULL NAME <EMAIL@ADDRESS>\n"
|
||||
"Language-Team: LANGUAGE <LL@li.org>\n"
|
||||
"MIME-Version: 1.0\n"
|
||||
"Content-Type: text/plain; charset=UTF-8\n"
|
||||
"Content-Transfer-Encoding: 8bit\n"
|
||||
|
||||
#: forms.py:38
|
||||
msgid "username"
|
||||
msgstr "اسم المستخدم"
|
||||
|
||||
#: forms.py:41
|
||||
msgid "email address"
|
||||
msgstr "عنوان البريد الالكتروني"
|
||||
|
||||
#: forms.py:43
|
||||
msgid "password"
|
||||
msgstr "كلمة المرور"
|
||||
|
||||
#: forms.py:45
|
||||
msgid "password (again)"
|
||||
msgstr "تأكيد كلمة المرور"
|
||||
|
||||
#: forms.py:54
|
||||
msgid "Usernames can only contain letters, numbers and underscores"
|
||||
msgstr "يمكن أن يحتوي اسم المستخدم على احرف، ارقام وشرطات سطرية فقط"
|
||||
|
||||
#: forms.py:59
|
||||
msgid "This username is already taken. Please choose another."
|
||||
msgstr "اسم المستخدم مسجل مسبقا. يرجى اختيار اسم اخر."
|
||||
|
||||
#: forms.py:68
|
||||
msgid "You must type the same password each time"
|
||||
msgstr "يجب ادخال كلمة المرور مطابقة كل مرة"
|
||||
|
||||
#: forms.py:96
|
||||
msgid "I have read and agree to the Terms of Service"
|
||||
msgstr "أقر بقراءة والموافقة على شروط الخدمة"
|
||||
|
||||
#: forms.py:105
|
||||
msgid "You must agree to the terms to register"
|
||||
msgstr "يجب الموافقة على الشروط للتسجيل"
|
||||
|
||||
#: forms.py:124
|
||||
msgid ""
|
||||
"This email address is already in use. Please supply a different email "
|
||||
"address."
|
||||
msgstr "عنوان البريد الالكتروني مسجل مسبقا. يرجى تزويد عنوان بريد الكتروني مختلف."
|
||||
|
||||
#: forms.py:149
|
||||
msgid ""
|
||||
"Registration using free email addresses is prohibited. Please supply a "
|
||||
"different email address."
|
||||
msgstr "يمنع التسجيل باستخدام عناوين بريد الكترونية مجانية. يرجى تزويد عنوان بريد الكتروني مختلف."
|
||||
|
||||
#: models.py:188
|
||||
msgid "user"
|
||||
msgstr "مستخدم"
|
||||
|
||||
#: models.py:189
|
||||
msgid "activation key"
|
||||
msgstr "رمز التفعيل"
|
||||
|
||||
#: models.py:194
|
||||
msgid "registration profile"
|
||||
msgstr "ملف التسجيل الشخصي"
|
||||
|
||||
#: models.py:195
|
||||
msgid "registration profiles"
|
||||
msgstr "ملفات التسجيل الشخصية"
|
||||
@@ -1,78 +0,0 @@
|
||||
# SOME DESCRIPTIVE TITLE.
|
||||
# Copyright (C) YEAR THE PACKAGE'S COPYRIGHT HOLDER
|
||||
# This file is distributed under the same license as the PACKAGE package.
|
||||
# FIRST AUTHOR <EMAIL@ADDRESS>, YEAR.
|
||||
#
|
||||
msgid ""
|
||||
msgstr ""
|
||||
"Project-Id-Version: PACKAGE VERSION\n"
|
||||
"Report-Msgid-Bugs-To: \n"
|
||||
"POT-Creation-Date: 2007-09-19 19:30-0500\n"
|
||||
"PO-Revision-Date: 2008-03-05 12:37+0200\n"
|
||||
"Last-Translator: Vladislav <vladislav.mitov@gmail.com>\n"
|
||||
"Language-Team: LANGUAGE <LL@li.org>\n"
|
||||
"MIME-Version: 1.0\n"
|
||||
"Content-Type: text/plain; charset=UTF-8\n"
|
||||
"Content-Transfer-Encoding: 8bit\n"
|
||||
"X-Poedit-Bookmarks: -1,-1,-1,-1,10,-1,-1,-1,-1,-1\n"
|
||||
|
||||
#: forms.py:38
|
||||
msgid "username"
|
||||
msgstr "Потребителско име "
|
||||
|
||||
#: forms.py:41
|
||||
msgid "email address"
|
||||
msgstr "Електронна поща"
|
||||
|
||||
#: forms.py:43
|
||||
msgid "password"
|
||||
msgstr "Парола"
|
||||
|
||||
#: forms.py:45
|
||||
msgid "password (again)"
|
||||
msgstr "Парола (проверка)"
|
||||
|
||||
#: forms.py:54
|
||||
msgid "Usernames can only contain letters, numbers and underscores"
|
||||
msgstr "Потребителските имена могат да съдържат букви, цифри и подчертавки"
|
||||
|
||||
#: forms.py:59
|
||||
msgid "This username is already taken. Please choose another."
|
||||
msgstr "Потребителското име е заето. Моля изберето друго."
|
||||
|
||||
#: forms.py:68
|
||||
msgid "You must type the same password each time"
|
||||
msgstr "Грешка при проверка на паролата."
|
||||
|
||||
#: forms.py:96
|
||||
msgid "I have read and agree to the Terms of Service"
|
||||
msgstr "Прочел съм и съм съгласен с условията за експлоатация"
|
||||
|
||||
#: forms.py:105
|
||||
msgid "You must agree to the terms to register"
|
||||
msgstr "Трябва да сте съгласни с условията за да се регистрирате."
|
||||
|
||||
#: forms.py:124
|
||||
msgid "This email address is already in use. Please supply a different email address."
|
||||
msgstr "Адреса на електронната поща е използван. Моля въведете друг адрес."
|
||||
|
||||
#: forms.py:149
|
||||
msgid "Registration using free email addresses is prohibited. Please supply a different email address."
|
||||
msgstr "Регистрациите с безплатни адреси е забранен. Моля въведете различен адрес за електронна поща"
|
||||
|
||||
#: models.py:188
|
||||
msgid "user"
|
||||
msgstr "Потребител"
|
||||
|
||||
#: models.py:189
|
||||
msgid "activation key"
|
||||
msgstr "Ключ за активация"
|
||||
|
||||
#: models.py:194
|
||||
msgid "registration profile"
|
||||
msgstr "регистрационен профил"
|
||||
|
||||
#: models.py:195
|
||||
msgid "registration profiles"
|
||||
msgstr "регистрационни профили"
|
||||
|
||||
@@ -1,85 +0,0 @@
|
||||
# SOME DESCRIPTIVE TITLE.
|
||||
# Copyright (C) YEAR THE PACKAGE'S COPYRIGHT HOLDER
|
||||
# This file is distributed under the same license as the PACKAGE package.
|
||||
# Jannis Leidel <jannis@leidel.info>, 2007.
|
||||
#
|
||||
#, fuzzy
|
||||
msgid ""
|
||||
msgstr ""
|
||||
"Project-Id-Version: django-registration 0.3 \n"
|
||||
"Report-Msgid-Bugs-To: \n"
|
||||
"POT-Creation-Date: 2007-09-19 19:30-0500\n"
|
||||
"PO-Revision-Date: 2007-09-29 16:50+0200\n"
|
||||
"Last-Translator: Jannis Leidel <jannis@leidel.info>\n"
|
||||
"Language-Team: Deutsch <de@li.org>\n"
|
||||
"MIME-Version: 1.0\n"
|
||||
"Content-Type: text/plain; charset=UTF-8\n"
|
||||
"Content-Transfer-Encoding: 8bit\n"
|
||||
|
||||
#: forms.py:38
|
||||
msgid "username"
|
||||
msgstr "Benutzername"
|
||||
|
||||
#: forms.py:41
|
||||
msgid "email address"
|
||||
msgstr "E-Mail-Adresse"
|
||||
|
||||
#: forms.py:43
|
||||
msgid "password"
|
||||
msgstr "Passwort"
|
||||
|
||||
#: forms.py:45
|
||||
msgid "password (again)"
|
||||
msgstr "Passwort (wiederholen)"
|
||||
|
||||
#: forms.py:54
|
||||
msgid "Usernames can only contain letters, numbers and underscores"
|
||||
msgstr "Benutzernamen können nur Buchstaben, Zahlen und Unterstriche enthalten"
|
||||
|
||||
#: forms.py:59
|
||||
msgid "This username is already taken. Please choose another."
|
||||
msgstr "Dieser Benutzername ist schon vergeben. Bitte einen anderen wählen."
|
||||
|
||||
#: forms.py:68
|
||||
msgid "You must type the same password each time"
|
||||
msgstr "Bitte das gleiche Passwort zur Überprüfung nochmal eingeben"
|
||||
|
||||
#: forms.py:96
|
||||
msgid "I have read and agree to the Terms of Service"
|
||||
msgstr "Ich habe die Nutzungsvereinbarung gelesen und stimme ihr zu"
|
||||
|
||||
#: forms.py:105
|
||||
msgid "You must agree to the terms to register"
|
||||
msgstr "Sie müssen der Nutzungsvereinbarung zustimmen, um sich zu registrieren"
|
||||
|
||||
#: forms.py:124
|
||||
msgid ""
|
||||
"This email address is already in use. Please supply a different email "
|
||||
"address."
|
||||
msgstr ""
|
||||
"Diese E-Mail-Adresse wird schon genutzt. Bitte geben Sie eine andere "
|
||||
"E-Mail-Adresse an."
|
||||
|
||||
#: forms.py:149
|
||||
msgid ""
|
||||
"Registration using free email addresses is prohibited. Please supply a "
|
||||
"different email address."
|
||||
msgstr ""
|
||||
"Die Registrierung mit einer kostenlosen E-Mail-Adresse ist untersagt. Bitte "
|
||||
"geben Sie eine andere E-Mail-Adresse an."
|
||||
|
||||
#: models.py:188
|
||||
msgid "user"
|
||||
msgstr "Benutzer"
|
||||
|
||||
#: models.py:189
|
||||
msgid "activation key"
|
||||
msgstr "Aktivierungsschlüssel"
|
||||
|
||||
#: models.py:194
|
||||
msgid "registration profile"
|
||||
msgstr "Registrierungsprofil"
|
||||
|
||||
#: models.py:195
|
||||
msgid "registration profiles"
|
||||
msgstr "Registrierungsprofile"
|
||||
@@ -1,84 +0,0 @@
|
||||
# SOME DESCRIPTIVE TITLE.
|
||||
# Copyright (C) YEAR THE PACKAGE'S COPYRIGHT HOLDER
|
||||
# This file is distributed under the same license as the PACKAGE package.
|
||||
# Panos Laganakos <panos.laganakos@gmail.com>, 2007.
|
||||
#
|
||||
#, fuzzy
|
||||
msgid ""
|
||||
msgstr ""
|
||||
"Project-Id-Version: PACKAGE VERSION\n"
|
||||
"Report-Msgid-Bugs-To: \n"
|
||||
"POT-Creation-Date: 2007-09-19 19:30-0500\n"
|
||||
"PO-Revision-Date: 2007-11-14 21:50+0200\n"
|
||||
"Last-Translator: FULL NAME <EMAIL@ADDRESS>\n"
|
||||
"Language-Team: LANGUAGE <LL@li.org>\n"
|
||||
"MIME-Version: 1.0\n"
|
||||
"Content-Type: text/plain; charset=UTF-8\n"
|
||||
"Content-Transfer-Encoding: 8bit\n"
|
||||
|
||||
#: forms.py:38
|
||||
msgid "username"
|
||||
msgstr "όνομα χρήστη"
|
||||
|
||||
#: forms.py:41
|
||||
msgid "email address"
|
||||
msgstr "διεύθυνση ηλεκτρονικού ταχυδρομείου"
|
||||
|
||||
#: forms.py:43
|
||||
msgid "password"
|
||||
msgstr "συνθηματικό"
|
||||
|
||||
#: forms.py:45
|
||||
msgid "password (again)"
|
||||
msgstr "συνθηματικό (ξανά)"
|
||||
|
||||
#: forms.py:54
|
||||
msgid "Usernames can only contain letters, numbers and underscores"
|
||||
msgstr "Τα ονόματα χρηστών μπορούν να περιλαμβάνουν μόνο γράμματα, αριθμούς και υπογραμμίσεις"
|
||||
|
||||
#: forms.py:59
|
||||
msgid "This username is already taken. Please choose another."
|
||||
msgstr "Αυτό το όνομα χρήστη χρησιμοποίειται ήδη. Παρακαλώ διαλέξτε ένα άλλο."
|
||||
|
||||
#: forms.py:68
|
||||
msgid "You must type the same password each time"
|
||||
msgstr "Πρέπει να εισάγετε το ίδιο συνθηματικό κάθε φορά"
|
||||
|
||||
#: forms.py:96
|
||||
msgid "I have read and agree to the Terms of Service"
|
||||
msgstr "Διάβασα και συμφωνώ με τους Όρους της Υπηρεσίας"
|
||||
|
||||
#: forms.py:105
|
||||
msgid "You must agree to the terms to register"
|
||||
msgstr "Πρέπει να συμφωνείται με τους όρους για να εγγραφείτε"
|
||||
|
||||
#: forms.py:124
|
||||
msgid ""
|
||||
"This email address is already in use. Please supply a different email "
|
||||
"address."
|
||||
msgstr ""
|
||||
"Η συγκεκριμένη διεύθυνση ηλεκτρονικού ταχυδρομείου χρησιμοποιείται ήδη. "
|
||||
"Παρακαλώ δώστε κάποια άλλη."
|
||||
|
||||
#: forms.py:149
|
||||
msgid ""
|
||||
"Registration using free email addresses is prohibited. Please supply a "
|
||||
"different email address."
|
||||
msgstr ""
|
||||
"Η εγγραφή μέσω δωρεάν διευθύνσεων ηλεκτρονικού ταχυδρομείου απαγορεύεται. ""Παρακαλώ δώστε κάποια άλλη."
|
||||
|
||||
#: models.py:188
|
||||
msgid "user"
|
||||
msgstr "χρήστης"
|
||||
|
||||
#: models.py:189
|
||||
msgid "activation key"
|
||||
msgstr "κλειδί ενεργοποίησης"
|
||||
|
||||
#: models.py:194
|
||||
msgid "registration profile"
|
||||
msgstr "προφίλ εγγραφής"
|
||||
|
||||
#: models.py:195
|
||||
msgid "registration profiles"
|
||||
msgstr "προφίλ εγγραφών"
|
||||
@@ -1,81 +0,0 @@
|
||||
# SOME DESCRIPTIVE TITLE.
|
||||
# Copyright (C) YEAR THE PACKAGE'S COPYRIGHT HOLDER
|
||||
# This file is distributed under the same license as the PACKAGE package.
|
||||
# FIRST AUTHOR <EMAIL@ADDRESS>, YEAR.
|
||||
#
|
||||
#, fuzzy
|
||||
msgid ""
|
||||
msgstr ""
|
||||
"Project-Id-Version: PACKAGE VERSION\n"
|
||||
"Report-Msgid-Bugs-To: \n"
|
||||
"POT-Creation-Date: 2007-09-19 19:30-0500\n"
|
||||
"PO-Revision-Date: YEAR-MO-DA HO:MI+ZONE\n"
|
||||
"Last-Translator: FULL NAME <EMAIL@ADDRESS>\n"
|
||||
"Language-Team: LANGUAGE <LL@li.org>\n"
|
||||
"MIME-Version: 1.0\n"
|
||||
"Content-Type: text/plain; charset=UTF-8\n"
|
||||
"Content-Transfer-Encoding: 8bit\n"
|
||||
|
||||
#: forms.py:38
|
||||
msgid "username"
|
||||
msgstr ""
|
||||
|
||||
#: forms.py:41
|
||||
msgid "email address"
|
||||
msgstr ""
|
||||
|
||||
#: forms.py:43
|
||||
msgid "password"
|
||||
msgstr ""
|
||||
|
||||
#: forms.py:45
|
||||
msgid "password (again)"
|
||||
msgstr ""
|
||||
|
||||
#: forms.py:54
|
||||
msgid "Usernames can only contain letters, numbers and underscores"
|
||||
msgstr ""
|
||||
|
||||
#: forms.py:59
|
||||
msgid "This username is already taken. Please choose another."
|
||||
msgstr ""
|
||||
|
||||
#: forms.py:68
|
||||
msgid "You must type the same password each time"
|
||||
msgstr ""
|
||||
|
||||
#: forms.py:96
|
||||
msgid "I have read and agree to the Terms of Service"
|
||||
msgstr ""
|
||||
|
||||
#: forms.py:105
|
||||
msgid "You must agree to the terms to register"
|
||||
msgstr ""
|
||||
|
||||
#: forms.py:124
|
||||
msgid ""
|
||||
"This email address is already in use. Please supply a different email "
|
||||
"address."
|
||||
msgstr ""
|
||||
|
||||
#: forms.py:149
|
||||
msgid ""
|
||||
"Registration using free email addresses is prohibited. Please supply a "
|
||||
"different email address."
|
||||
msgstr ""
|
||||
|
||||
#: models.py:188
|
||||
msgid "user"
|
||||
msgstr ""
|
||||
|
||||
#: models.py:189
|
||||
msgid "activation key"
|
||||
msgstr ""
|
||||
|
||||
#: models.py:194
|
||||
msgid "registration profile"
|
||||
msgstr ""
|
||||
|
||||
#: models.py:195
|
||||
msgid "registration profiles"
|
||||
msgstr ""
|
||||
@@ -1,85 +0,0 @@
|
||||
# Spanish translation for django-registration.
|
||||
# Copyright (C) 2007, James Bennet
|
||||
# This file is distributed under the same license as the registration package.
|
||||
# Ernesto Rico Schmidt <e.rico.schmidt@gmail.com>, 2008.
|
||||
#
|
||||
#, fuzzy
|
||||
msgid ""
|
||||
msgstr ""
|
||||
"Project-Id-Version: django-registration 0.3 \n"
|
||||
"Report-Msgid-Bugs-To: \n"
|
||||
"POT-Creation-Date: 2008-03-11 00:19-0400\n"
|
||||
"PO-Revision-Date: 2008-03-11 00:19-0400\n"
|
||||
"Last-Translator: Ernesto Rico Schmidt <e.rico.schmidt@gmail.com>\n"
|
||||
"Language-Team: Español <de@li.org>\n"
|
||||
"MIME-Version: 1.0\n"
|
||||
"Content-Type: text/plain; charset=UTF-8\n"
|
||||
"Content-Transfer-Encoding: 8bit\n"
|
||||
|
||||
#: forms.py:38
|
||||
msgid "username"
|
||||
msgstr "nombre de usuario"
|
||||
|
||||
#: forms.py:41
|
||||
msgid "email address"
|
||||
msgstr "dirección de coreo electrónico"
|
||||
|
||||
#: forms.py:43
|
||||
msgid "password"
|
||||
msgstr "contraseña"
|
||||
|
||||
#: forms.py:45
|
||||
msgid "password (again)"
|
||||
msgstr "contraseña (otra vez)"
|
||||
|
||||
#: forms.py:54
|
||||
msgid "Usernames can only contain letters, numbers and underscores"
|
||||
msgstr "Los nombres de usuarios sólo pueden contener letras, números y guiones bajos"
|
||||
|
||||
#: forms.py:59
|
||||
msgid "This username is already taken. Please choose another."
|
||||
msgstr "Este nombre de usuario ya está ocupado. Por favor escoge otro"
|
||||
|
||||
#: forms.py:71
|
||||
msgid "You must type the same password each time"
|
||||
msgstr "Tienes que introducir la misma contraseña cada vez"
|
||||
|
||||
#: forms.py:100
|
||||
msgid "I have read and agree to the Terms of Service"
|
||||
msgstr "He leído y acepto los términos de servicio"
|
||||
|
||||
#: forms.py:109
|
||||
msgid "You must agree to the terms to register"
|
||||
msgstr "Tienes que aceptar los términos para registrarte"
|
||||
|
||||
#: forms.py:128
|
||||
msgid ""
|
||||
"This email address is already in use. Please supply a different email "
|
||||
"address."
|
||||
msgstr ""
|
||||
"La dirección de correo electrónico ya está siendo usada. Por favor"
|
||||
"proporciona otra dirección."
|
||||
|
||||
#: forms.py:153
|
||||
msgid ""
|
||||
"Registration using free email addresses is prohibited. Please supply a "
|
||||
"different email address."
|
||||
msgstr ""
|
||||
"El registro usando una dirección de correo electrónico gratis está prohibido."
|
||||
"Por favor proporciona otra dirección."
|
||||
|
||||
#: models.py:188
|
||||
msgid "user"
|
||||
msgstr "usuario"
|
||||
|
||||
#: models.py:189
|
||||
msgid "activation key"
|
||||
msgstr "clave de activación"
|
||||
|
||||
#: models.py:194
|
||||
msgid "registration profile"
|
||||
msgstr "perfil de registro"
|
||||
|
||||
#: models.py:195
|
||||
msgid "registration profiles"
|
||||
msgstr "perfiles de registro"
|
||||
@@ -1,83 +0,0 @@
|
||||
# SOME DESCRIPTIVE TITLE.
|
||||
# Copyright (C) 2008 Leonardo Manuel Rocha
|
||||
# This file is distributed under the same license as the PACKAGE package.
|
||||
# FIRST AUTHOR <l e o m a r o at g m a i l dot c o m>, YEAR.
|
||||
#
|
||||
#, fuzzy
|
||||
msgid ""
|
||||
msgstr ""
|
||||
"Project-Id-Version: PACKAGE VERSION\n"
|
||||
"Report-Msgid-Bugs-To: \n"
|
||||
"POT-Creation-Date: 2007-09-19 19:30-0500\n"
|
||||
"PO-Revision-Date: YEAR-MO-DA HO:MI+ZONE\n"
|
||||
"Last-Translator: FULL NAME <EMAIL@ADDRESS>\n"
|
||||
"Language-Team: LANGUAGE <LL@li.org>\n"
|
||||
"MIME-Version: 1.0\n"
|
||||
"Content-Type: text/plain; charset=UTF-8\n"
|
||||
"Content-Transfer-Encoding: 8bit\n"
|
||||
|
||||
#: forms.py:38
|
||||
msgid "username"
|
||||
msgstr "nombre de usuario"
|
||||
|
||||
#: forms.py:41
|
||||
msgid "email address"
|
||||
msgstr "dirección de e-mail"
|
||||
|
||||
#: forms.py:43
|
||||
msgid "password"
|
||||
msgstr "contraseña"
|
||||
|
||||
#: forms.py:45
|
||||
msgid "password (again)"
|
||||
msgstr "contraseña (nuevamente)"
|
||||
|
||||
#: forms.py:54
|
||||
msgid "Usernames can only contain letters, numbers and underscores"
|
||||
msgstr "El nombre de usuario solo puede contener letras, números y guiones bajos"
|
||||
|
||||
#: forms.py:59
|
||||
msgid "This username is already taken. Please choose another."
|
||||
msgstr "Ese nombre de usuario ya está asignado. Por favor elija otro."
|
||||
|
||||
#: forms.py:68
|
||||
msgid "You must type the same password each time"
|
||||
msgstr "Debe tipear la misma contraseña cada vez"
|
||||
|
||||
#: forms.py:96
|
||||
msgid "I have read and agree to the Terms of Service"
|
||||
msgstr "He leído y estoy de acuerdo con las Condiciones de Servicio"
|
||||
|
||||
#: forms.py:105
|
||||
msgid "You must agree to the terms to register"
|
||||
msgstr "Debe estar de acuerdo con las Condiciones para poder registrarse"
|
||||
|
||||
#: forms.py:124
|
||||
msgid ""
|
||||
"This email address is already in use. Please supply a different email "
|
||||
"address."
|
||||
msgstr "Esa dirección de e-mail ya está en uso. Por favor provea otra "
|
||||
"dirección."
|
||||
|
||||
#: forms.py:149
|
||||
msgid ""
|
||||
"Registration using free email addresses is prohibited. Please supply a "
|
||||
"different email address."
|
||||
msgstr "La registración con un e-mail gratuito está prohibida. Por favor "
|
||||
"de una dirección de e-mail diferente."
|
||||
|
||||
#: models.py:188
|
||||
msgid "user"
|
||||
msgstr "usuario"
|
||||
|
||||
#: models.py:189
|
||||
msgid "activation key"
|
||||
msgstr "clave de activación"
|
||||
|
||||
#: models.py:194
|
||||
msgid "registration profile"
|
||||
msgstr "perfil de registro"
|
||||
|
||||
#: models.py:195
|
||||
msgid "registration profiles"
|
||||
msgstr "perfiles de registro"
|
||||
@@ -1,81 +0,0 @@
|
||||
# SOME DESCRIPTIVE TITLE.
|
||||
# Copyright (C) YEAR THE PACKAGE'S COPYRIGHT HOLDER
|
||||
# This file is distributed under the same license as the PACKAGE package.
|
||||
# Samuel Adam <samuel.adam@gmail.com>, 2007.
|
||||
#
|
||||
#, fuzzy
|
||||
msgid ""
|
||||
msgstr ""
|
||||
"Project-Id-Version: django-registration 0.3 \n"
|
||||
"Report-Msgid-Bugs-To: \n"
|
||||
"POT-Creation-Date: 2007-09-19 19:30-0500\n"
|
||||
"PO-Revision-Date: 2007-09-20 10:30+0100\n"
|
||||
"Last-Translator: Samuel Adam <samuel.adam@gmail.com>\n"
|
||||
"Language-Team: Français <fr@li.org>\n"
|
||||
"MIME-Version: 1.0\n"
|
||||
"Content-Type: text/plain; charset=UTF-8\n"
|
||||
"Content-Transfer-Encoding: 8bit\n"
|
||||
|
||||
#: forms.py:38
|
||||
msgid "username"
|
||||
msgstr "pseudo"
|
||||
|
||||
#: forms.py:41
|
||||
msgid "email address"
|
||||
msgstr "adresse email"
|
||||
|
||||
#: forms.py:43
|
||||
msgid "password"
|
||||
msgstr "mot de passe"
|
||||
|
||||
#: forms.py:45
|
||||
msgid "password (again)"
|
||||
msgstr "mot de passe (vérification)"
|
||||
|
||||
#: forms.py:54
|
||||
msgid "Usernames can only contain letters, numbers and underscores"
|
||||
msgstr "Le pseudo ne peut contenir que des lettres, chiffres et le caractère souligné."
|
||||
|
||||
#: forms.py:59
|
||||
msgid "This username is already taken. Please choose another."
|
||||
msgstr "Ce pseudo est déjà utilisé. Veuillez en choisir un autre."
|
||||
|
||||
#: forms.py:68
|
||||
msgid "You must type the same password each time"
|
||||
msgstr "Veuillez indiquer le même mot de passe dans les deux champs"
|
||||
|
||||
#: forms.py:96
|
||||
msgid "I have read and agree to the Terms of Service"
|
||||
msgstr "J'ai lu et accepté les Conditions Générales d'Utilisation"
|
||||
|
||||
#: forms.py:105
|
||||
msgid "You must agree to the terms to register"
|
||||
msgstr "Vous devez accepter les conditions d'utilisation pour vous inscrire"
|
||||
|
||||
#: forms.py:124
|
||||
msgid ""
|
||||
"This email address is already in use. Please supply a different email "
|
||||
"address."
|
||||
msgstr "Cette adresse email est déjà utilisée. Veuillez en indiquer une autre."
|
||||
|
||||
#: forms.py:149
|
||||
msgid ""
|
||||
"Registration using free email addresses is prohibited. Please supply a "
|
||||
"different email address."
|
||||
msgstr "L'inscription avec une adresse email d'un compte gratuit est interdite. Veuillez en indiquer une autre."
|
||||
|
||||
#: models.py:188
|
||||
msgid "user"
|
||||
msgstr "utilisateur"
|
||||
|
||||
#: models.py:189
|
||||
msgid "activation key"
|
||||
msgstr "clé d'activation"
|
||||
|
||||
#: models.py:194
|
||||
msgid "registration profile"
|
||||
msgstr "profil d'inscription"
|
||||
|
||||
#: models.py:195
|
||||
msgid "registration profiles"
|
||||
msgstr "profils d'inscription"
|
||||
@@ -1,86 +0,0 @@
|
||||
# translation of registration.
|
||||
# Copyright (C) 2008 THE registration'S COPYRIGHT HOLDER
|
||||
# This file is distributed under the same license as the registration package.
|
||||
# <>, 2008.
|
||||
# , fuzzy
|
||||
# <>, 2008.
|
||||
#
|
||||
#
|
||||
msgid ""
|
||||
msgstr ""
|
||||
"Project-Id-Version: registration\n"
|
||||
"Report-Msgid-Bugs-To: \n"
|
||||
"POT-Creation-Date: 2008-02-10 02:01+0200\n"
|
||||
"PO-Revision-Date: 2008-02-10 02:05+0200\n"
|
||||
"Last-Translator: Meir Kriheli <meir@mksoft.co.il>\n"
|
||||
"Language-Team: Hebrew\n"
|
||||
"MIME-Version: 1.0\n"
|
||||
"Content-Type: text/plain; charset=UTF-8\n"
|
||||
"Content-Transfer-Encoding: 8bit"
|
||||
|
||||
#: forms.py:38
|
||||
msgid "username"
|
||||
msgstr "שם משתמש"
|
||||
|
||||
#: forms.py:41
|
||||
msgid "email address"
|
||||
msgstr "דואר אלקטרוני"
|
||||
|
||||
#: forms.py:43
|
||||
msgid "password"
|
||||
msgstr "סיסמה"
|
||||
|
||||
#: forms.py:45
|
||||
msgid "password (again)"
|
||||
msgstr "סיסמה (שוב)"
|
||||
|
||||
#: forms.py:54
|
||||
msgid "Usernames can only contain letters, numbers and underscores"
|
||||
msgstr "שמות משתמש יכולים להכיל רק אותיות, ספרות וקווים תחתונים"
|
||||
|
||||
#: forms.py:59
|
||||
msgid "This username is already taken. Please choose another."
|
||||
msgstr "שם המשתמש תפוס כבר. נא לבחור אחר."
|
||||
|
||||
#: forms.py:64
|
||||
msgid "You must type the same password each time"
|
||||
msgstr "יש להקליד את אותה הסיסמה פעמיים"
|
||||
|
||||
#: forms.py:93
|
||||
msgid "I have read and agree to the Terms of Service"
|
||||
msgstr "קראתי והסכמתי לתנאי השימוש"
|
||||
|
||||
#: forms.py:102
|
||||
msgid "You must agree to the terms to register"
|
||||
msgstr "עליך להסכים לתנאי השימוש"
|
||||
|
||||
#: forms.py:121
|
||||
msgid ""
|
||||
"This email address is already in use. Please supply a different email "
|
||||
"address."
|
||||
msgstr ""
|
||||
"כתובת הדואר האלקטרוני תפוסה כבר. נא לספק כתובת דואר אחרת."
|
||||
|
||||
#: forms.py:146
|
||||
msgid ""
|
||||
"Registration using free email addresses is prohibited. Please supply a "
|
||||
"different email address."
|
||||
msgstr ""
|
||||
"הרישום בעזרת תיבת דואר אלקטרוני חינמית אסור. נא לספק כתובת אחרת."
|
||||
|
||||
#: models.py:188
|
||||
msgid "user"
|
||||
msgstr "משתמש"
|
||||
|
||||
#: models.py:189
|
||||
msgid "activation key"
|
||||
msgstr "מפתח הפעלה"
|
||||
|
||||
#: models.py:194
|
||||
msgid "registration profile"
|
||||
msgstr "פרופיל רישום"
|
||||
|
||||
#: models.py:195
|
||||
msgid "registration profiles"
|
||||
msgstr "פרופילי רישום"
|
||||
|
||||
@@ -1,82 +0,0 @@
|
||||
# translation of django.po to Italiano
|
||||
# Copyright (C) YEAR THE PACKAGE'S COPYRIGHT HOLDER
|
||||
# This file is distributed under the same license as the PACKAGE package.
|
||||
#
|
||||
# Nicola Larosa <nico@tekNico.net>, 2008.
|
||||
msgid ""
|
||||
msgstr ""
|
||||
"Project-Id-Version: django\n"
|
||||
"Report-Msgid-Bugs-To: \n"
|
||||
"POT-Creation-Date: 2007-09-19 19:30-0500\n"
|
||||
"PO-Revision-Date: 2008-05-27 15:05+0200\n"
|
||||
"Last-Translator: Nicola Larosa <nico@tekNico.net>\n"
|
||||
"Language-Team: Italiano\n"
|
||||
"MIME-Version: 1.0\n"
|
||||
"Content-Type: text/plain; charset=UTF-8\n"
|
||||
"Content-Transfer-Encoding: 8bit\n"
|
||||
"X-Generator: KBabel 1.11.4\n"
|
||||
"Plural-Forms: nplurals=2; plural=(n != 1);\n"
|
||||
|
||||
#: forms.py:38
|
||||
msgid "username"
|
||||
msgstr "nome utente"
|
||||
|
||||
#: forms.py:41
|
||||
msgid "email address"
|
||||
msgstr "indirizzo email"
|
||||
|
||||
#: forms.py:43
|
||||
msgid "password"
|
||||
msgstr "password"
|
||||
|
||||
#: forms.py:45
|
||||
msgid "password (again)"
|
||||
msgstr "password (di nuovo)"
|
||||
|
||||
#: forms.py:54
|
||||
msgid "Usernames can only contain letters, numbers and underscores"
|
||||
msgstr "I nomi utente possono contenere solo lettere, numeri e sottolineature"
|
||||
|
||||
#: forms.py:59
|
||||
msgid "This username is already taken. Please choose another."
|
||||
msgstr "Questo nome utente è già usato. Scegline un altro."
|
||||
|
||||
#: forms.py:68
|
||||
msgid "You must type the same password each time"
|
||||
msgstr "Bisogna inserire la stessa password ogni volta"
|
||||
|
||||
#: forms.py:96
|
||||
msgid "I have read and agree to the Terms of Service"
|
||||
msgstr "Dichiaro di aver letto e di approvare le Condizioni di Servizio"
|
||||
|
||||
#: forms.py:105
|
||||
msgid "You must agree to the terms to register"
|
||||
msgstr "Per registrarsi bisogna approvare le condizioni"
|
||||
|
||||
#: forms.py:124
|
||||
msgid "This email address is already in use. Please supply a different email "
|
||||
"address."
|
||||
msgstr "Questo indirizzo email è già in uso. Inserisci un altro indirizzo email."
|
||||
|
||||
#: forms.py:149
|
||||
msgid "Registration using free email addresses is prohibited. Please supply a "
|
||||
"different email address."
|
||||
msgstr "La registrazione con indirizzi email gratis non è permessa. "
|
||||
"Inserisci un altro indirizzo email."
|
||||
|
||||
#: models.py:188
|
||||
msgid "user"
|
||||
msgstr "utente"
|
||||
|
||||
#: models.py:189
|
||||
msgid "activation key"
|
||||
msgstr "chiave di attivazione"
|
||||
|
||||
#: models.py:194
|
||||
msgid "registration profile"
|
||||
msgstr "profilo di registrazione"
|
||||
|
||||
#: models.py:195
|
||||
msgid "registration profiles"
|
||||
msgstr "profili di registrazione"
|
||||
|
||||
@@ -1,78 +0,0 @@
|
||||
# SOME DESCRIPTIVE TITLE.
|
||||
# Copyright (C) YEAR THE PACKAGE'S COPYRIGHT HOLDER
|
||||
# This file is distributed under the same license as the PACKAGE package.
|
||||
# Shinya Okano <xxshss@yahoo.co.jp>, YEAR.
|
||||
#
|
||||
#, fuzzy
|
||||
msgid ""
|
||||
msgstr ""
|
||||
"Project-Id-Version: django-registration 0.4 \n"
|
||||
"Report-Msgid-Bugs-To: \n"
|
||||
"POT-Creation-Date: 2007-09-19 19:30-0500\n"
|
||||
"PO-Revision-Date: 2008-01-31 10:20+0900\n"
|
||||
"Last-Translator: Shinya Okano <xxshss@yahoo.co.jp>\n"
|
||||
"Language-Team: Japanese <LL@li.org>\n"
|
||||
"MIME-Version: 1.0\n"
|
||||
"Content-Type: text/plain; charset=UTF-8\n"
|
||||
"Content-Transfer-Encoding: 8bit\n"
|
||||
|
||||
#: forms.py:38
|
||||
msgid "username"
|
||||
msgstr "ユーザ名"
|
||||
|
||||
#: forms.py:41
|
||||
msgid "email address"
|
||||
msgstr "メールアドレス"
|
||||
|
||||
#: forms.py:43
|
||||
msgid "password"
|
||||
msgstr "パスワード"
|
||||
|
||||
#: forms.py:45
|
||||
msgid "password (again)"
|
||||
msgstr "パスワード (確認)"
|
||||
|
||||
#: forms.py:54
|
||||
msgid "Usernames can only contain letters, numbers and underscores"
|
||||
msgstr "ユーザ名には半角英数とアンダースコアのみが使用できます。"
|
||||
|
||||
#: forms.py:59
|
||||
msgid "This username is already taken. Please choose another."
|
||||
msgstr "このユーザ名は既に使用されています。他のユーザ名を指定してください。"
|
||||
|
||||
#: forms.py:68
|
||||
msgid "You must type the same password each time"
|
||||
msgstr "同じパスワードを入力する必要があります。"
|
||||
|
||||
#: forms.py:96
|
||||
msgid "I have read and agree to the Terms of Service"
|
||||
msgstr "サービス利用規約を読み、同意します。"
|
||||
|
||||
#: forms.py:105
|
||||
msgid "You must agree to the terms to register"
|
||||
msgstr "登録するためには規約に同意する必要があります。"
|
||||
|
||||
#: forms.py:124
|
||||
msgid "This email address is already in use. Please supply a different email address."
|
||||
msgstr "このメールアドレスは既に使用されています。他のメールアドレスを指定して下さい。"
|
||||
|
||||
#: forms.py:149
|
||||
msgid "Registration using free email addresses is prohibited. Please supply a different email address."
|
||||
msgstr "自由なメールアドレスを使用した登録は禁止されています。他のメールアドレスを指定してください。"
|
||||
|
||||
#: models.py:188
|
||||
msgid "user"
|
||||
msgstr "ユーザ"
|
||||
|
||||
#: models.py:189
|
||||
msgid "activation key"
|
||||
msgstr "アクティベーションキー"
|
||||
|
||||
#: models.py:194
|
||||
msgid "registration profile"
|
||||
msgstr "登録プロファイル"
|
||||
|
||||
#: models.py:195
|
||||
msgid "registration profiles"
|
||||
msgstr "登録プロファイル"
|
||||
|
||||
@@ -1,77 +0,0 @@
|
||||
# SOME DESCRIPTIVE TITLE.
|
||||
# Copyright (C) YEAR THE PACKAGE'S COPYRIGHT HOLDER
|
||||
# This file is distributed under the same license as the PACKAGE package.
|
||||
# FIRST AUTHOR <EMAIL@ADDRESS>, YEAR.
|
||||
#
|
||||
msgid ""
|
||||
msgstr ""
|
||||
"Project-Id-Version: registration\n"
|
||||
"Report-Msgid-Bugs-To: \n"
|
||||
"POT-Creation-Date: 2008-08-14 13:25+0200\n"
|
||||
"PO-Revision-Date: 2008-08-14 13:25+0200\n"
|
||||
"Last-Translator: Joost Cassee <joost@cassee.net>\n"
|
||||
"Language-Team: LANGUAGE <LL@li.org>\n"
|
||||
"MIME-Version: 1.0\n"
|
||||
"Content-Type: text/plain; charset=UTF-8\n"
|
||||
"Content-Transfer-Encoding: 8bit\n"
|
||||
"Plural-Forms: nplurals=2; plural=(n != 1);\n"
|
||||
|
||||
#: forms.py:38
|
||||
msgid "username"
|
||||
msgstr "gebruikersnaam"
|
||||
|
||||
#: forms.py:41
|
||||
msgid "email address"
|
||||
msgstr "e-mail adres"
|
||||
|
||||
#: forms.py:43
|
||||
msgid "password"
|
||||
msgstr "wachtwoord"
|
||||
|
||||
#: forms.py:45
|
||||
msgid "password (again)"
|
||||
msgstr "wachtwoord (opnieuw)"
|
||||
|
||||
#: forms.py:54
|
||||
msgid "Usernames can only contain letters, numbers and underscores"
|
||||
msgstr "Gebruikersnamen kunnen alleen letters, nummer en liggende streepjes bevatten."
|
||||
|
||||
#: forms.py:59
|
||||
msgid "This username is already taken. Please choose another."
|
||||
msgstr "Deze gebruikersnaam is reeds in gebruik. Kiest u alstublieft een andere gebruikersnaam."
|
||||
|
||||
#: forms.py:71
|
||||
msgid "You must type the same password each time"
|
||||
msgstr "U moet twee maal hetzelfde wachtwoord typen."
|
||||
|
||||
#: forms.py:100
|
||||
msgid "I have read and agree to the Terms of Service"
|
||||
msgstr "Ik heb de servicevoorwaarden gelezen en ga akkoord."
|
||||
|
||||
#: forms.py:109
|
||||
msgid "You must agree to the terms to register"
|
||||
msgstr "U moet akkoord gaan met de servicevoorwaarden om u te registreren."
|
||||
|
||||
#: forms.py:125
|
||||
msgid "This email address is already in use. Please supply a different email address."
|
||||
msgstr "Dit e-mail adres is reeds in gebruik. Kiest u alstublieft een ander e-mail adres."
|
||||
|
||||
#: forms.py:151
|
||||
msgid "Registration using free email addresses is prohibited. Please supply a different email address."
|
||||
msgstr "U kunt u niet registreren met een gratis e-mail adres. Kiest u alstublieft een ander e-mail adres."
|
||||
|
||||
#: models.py:191
|
||||
msgid "user"
|
||||
msgstr "gebruiker"
|
||||
|
||||
#: models.py:192
|
||||
msgid "activation key"
|
||||
msgstr "activatiecode"
|
||||
|
||||
#: models.py:197
|
||||
msgid "registration profile"
|
||||
msgstr "registratieprofiel"
|
||||
|
||||
#: models.py:198
|
||||
msgid "registration profiles"
|
||||
msgstr "registratieprofielen"
|
||||
@@ -1,84 +0,0 @@
|
||||
# Polish translation for django-registration.
|
||||
# Copyright (C) YEAR THE PACKAGE'S COPYRIGHT HOLDER
|
||||
# This file is distributed under the same license as the django-registration package.
|
||||
# Jarek Zgoda <jarek.zgoda@gmail.com>, 2007.
|
||||
#
|
||||
#, fuzzy
|
||||
msgid ""
|
||||
msgstr ""
|
||||
"Project-Id-Version: 0.4\n"
|
||||
"Report-Msgid-Bugs-To: \n"
|
||||
"POT-Creation-Date: 2007-09-19 19:30-0500\n"
|
||||
"PO-Revision-Date: 2007-12-15 12:45+0100\n"
|
||||
"Last-Translator: Jarek Zgoda <jarek.zgoda@gmail.com>\n"
|
||||
"Language-Team: Polish <LL@li.org>\n"
|
||||
"MIME-Version: 1.0\n"
|
||||
"Content-Type: text/plain; charset=UTF-8\n"
|
||||
"Content-Transfer-Encoding: 8bit\n"
|
||||
|
||||
#: forms.py:38
|
||||
msgid "username"
|
||||
msgstr "nazwa użytkownika"
|
||||
|
||||
#: forms.py:41
|
||||
msgid "email address"
|
||||
msgstr "adres email"
|
||||
|
||||
#: forms.py:43
|
||||
msgid "password"
|
||||
msgstr "hasło"
|
||||
|
||||
#: forms.py:45
|
||||
msgid "password (again)"
|
||||
msgstr "hasło (ponownie)"
|
||||
|
||||
#: forms.py:54
|
||||
msgid "Usernames can only contain letters, numbers and underscores"
|
||||
msgstr ""
|
||||
"Nazwa użytkownika może zawierać tylko litery, cyfry i znaki podkreślenia"
|
||||
|
||||
#: forms.py:59
|
||||
msgid "This username is already taken. Please choose another."
|
||||
msgstr "Ta nazwa użytkownika jest już zajęta. Wybierz inną."
|
||||
|
||||
#: forms.py:68
|
||||
msgid "You must type the same password each time"
|
||||
msgstr "Musisz wpisać to samo hasło w obu polach"
|
||||
|
||||
#: forms.py:96
|
||||
msgid "I have read and agree to the Terms of Service"
|
||||
msgstr "Przeczytałem regulamin i akceptuję go"
|
||||
|
||||
#: forms.py:105
|
||||
msgid "You must agree to the terms to register"
|
||||
msgstr "Musisz zaakceptować regulamin, aby się zarejestrować"
|
||||
|
||||
#: forms.py:124
|
||||
msgid ""
|
||||
"This email address is already in use. Please supply a different email "
|
||||
"address."
|
||||
msgstr "Ten adres email jest już używany. Użyj innego adresu email."
|
||||
|
||||
#: forms.py:149
|
||||
msgid ""
|
||||
"Registration using free email addresses is prohibited. Please supply a "
|
||||
"different email address."
|
||||
msgstr ""
|
||||
"Nie ma możliwości rejestracji przy użyciu darmowego adresu email. Użyj "
|
||||
"innego adresu email."
|
||||
|
||||
#: models.py:188
|
||||
msgid "user"
|
||||
msgstr "użytkownik"
|
||||
|
||||
#: models.py:189
|
||||
msgid "activation key"
|
||||
msgstr "klucz aktywacyjny"
|
||||
|
||||
#: models.py:194
|
||||
msgid "registration profile"
|
||||
msgstr "profil rejestracji"
|
||||
|
||||
#: models.py:195
|
||||
msgid "registration profiles"
|
||||
msgstr "profile rejestracji"
|
||||