forked from expo/troggle
Compare commits
1 Commits
Faster-sur
...
django-upg
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
6984f66794 |
12
README.txt
12
README.txt
@@ -1,6 +1,6 @@
|
||||
Troggle is an application for caving expedition data management, originally created for use on Cambridge University Caving Club (CUCC)expeditions and licensed under the GNU Lesser General Public License.
|
||||
|
||||
Troggle has been forked into two projects. The original one is maintained by Aron Curtis and is used for Erebus caves. The CUCC variant uses files as the definitive data, not the database and lives at expo.survex.com/troggle.
|
||||
Troggle has been forked into two projects. The original one is maintained by Aron Curtis and is used for Erebus caves. The CUCC variant uses files as the definitive data, not the database and lives at expo.sruvex.com/troggle.
|
||||
|
||||
Troggle setup
|
||||
==========
|
||||
@@ -18,14 +18,12 @@ If you want to use MySQL or Postgresql, download and install them. However, you
|
||||
|
||||
Troggle itself
|
||||
-------------
|
||||
Choose a directory where you will keep troggle, and git clone Troggle into it using the following command:
|
||||
Choose a directory where you will keep troggle, and svn check out Troggle into it using the following command:
|
||||
|
||||
git clone git://expo.survex.com/troggle
|
||||
or more reliably
|
||||
git clone ssh://expo@expo.survex.com/home/expo/troggle
|
||||
svn co http://troggle.googlecode.com/svn/
|
||||
|
||||
|
||||
If you want to work on the source code and be able to commit, your account will need to be added to the troggle project members list. Contact wookey at wookware dot org to get this set up.
|
||||
If you want to work on the source code and be able to commit, you will need to use https instead of http, and your google account will need to be added to the troggle project members list. Contact aaron dot curtis at cantab dot net to get this set up.
|
||||
|
||||
Next, you need to fill in your local settings. Copy either localsettingsubuntu.py or localsettingsserver.py to a new file called localsettings.py. Follow the instructions contained in the file to fill out your settings.
|
||||
|
||||
@@ -37,7 +35,7 @@ Run "python databaseReset.py reset" from the troggle directory.
|
||||
Once troggle is running, you can also log in and then go to "Import / export" data under "admin" on the menu.
|
||||
|
||||
Adding a new year/expedition requires adding a column to the
|
||||
folk/folk.csv table - a year doesn't exist until that is done.
|
||||
noinfo/folk.csv table - a year doesn't exist until that is done.
|
||||
|
||||
|
||||
Running a Troggle server
|
||||
|
||||
@@ -74,11 +74,11 @@ class LogbookEntryAdmin(TroggleModelAdmin):
|
||||
}
|
||||
actions=('export_logbook_entries_as_html','export_logbook_entries_as_txt')
|
||||
|
||||
def export_logbook_entries_as_html(self, modeladmin, request, queryset):
|
||||
def export_logbook_entries_as_html(modeladmin, request, queryset):
|
||||
response=downloadLogbook(request=request, queryset=queryset, extension='html')
|
||||
return response
|
||||
|
||||
def export_logbook_entries_as_txt(self, modeladmin, request, queryset):
|
||||
def export_logbook_entries_as_txt(modeladmin, request, queryset):
|
||||
response=downloadLogbook(request=request, queryset=queryset, extension='txt')
|
||||
return response
|
||||
|
||||
@@ -139,17 +139,16 @@ admin.site.register(SurvexStation)
|
||||
admin.site.register(SurvexScansFolder)
|
||||
admin.site.register(SurvexScanSingle)
|
||||
|
||||
admin.site.register(DataIssue)
|
||||
|
||||
def export_as_json(modeladmin, request, queryset):
|
||||
response = HttpResponse(content_type="text/json")
|
||||
response = HttpResponse(mimetype="text/json")
|
||||
response['Content-Disposition'] = 'attachment; filename=troggle_output.json'
|
||||
serializers.serialize("json", queryset, stream=response)
|
||||
return response
|
||||
|
||||
|
||||
def export_as_xml(modeladmin, request, queryset):
|
||||
response = HttpResponse(content_type="text/xml")
|
||||
response = HttpResponse(mimetype="text/xml")
|
||||
response['Content-Disposition'] = 'attachment; filename=troggle_output.xml'
|
||||
serializers.serialize("xml", queryset, stream=response)
|
||||
return response
|
||||
|
||||
@@ -46,12 +46,12 @@ class EntranceForm(ModelForm):
|
||||
#underground_centre_line = forms.CharField(required = False, widget=TinyMCE(attrs={'cols': 80, 'rows': 10}))
|
||||
#notes = forms.CharField(required = False, widget=TinyMCE(attrs={'cols': 80, 'rows': 10}))
|
||||
#references = forms.CharField(required = False, widget=TinyMCE(attrs={'cols': 80, 'rows': 10}))
|
||||
other_station = forms.CharField(required=False) # Trying to change this to a single line entry
|
||||
tag_station = forms.CharField(required=False) # Trying to change this to a single line entry
|
||||
exact_station = forms.CharField(required=False) # Trying to change this to a single line entry
|
||||
northing = forms.CharField(required=False) # Trying to change this to a single line entry
|
||||
easting = forms.CharField(required=False) # Trying to change this to a single line entry
|
||||
alt = forms.CharField(required=False) # Trying to change this to a single line entry
|
||||
other_station = forms.CharField(required=False) # Trying to change this to a singl;e line entry
|
||||
tag_station = forms.CharField(required=False) # Trying to change this to a singl;e line entry
|
||||
exact_station = forms.CharField(required=False) # Trying to change this to a singl;e line entry
|
||||
northing = forms.CharField(required=False) # Trying to change this to a singl;e line entry
|
||||
easting = forms.CharField(required=False) # Trying to change this to a singl;e line entry
|
||||
alt = forms.CharField(required=False) # Trying to change this to a singl;e line entry
|
||||
class Meta:
|
||||
model = Entrance
|
||||
exclude = ("cached_primary_slug", "filename",)
|
||||
@@ -123,7 +123,7 @@ def getTripForm(expedition):
|
||||
html = forms.CharField(widget=TinyMCE(attrs={'cols': 80, 'rows': 30}))
|
||||
|
||||
def clean(self):
|
||||
print(dir(self))
|
||||
print dir(self)
|
||||
if self.cleaned_data.get("caveOrLocation") == "cave" and not self.cleaned_data.get("cave"):
|
||||
self._errors["cave"] = self.error_class(["This field is required"])
|
||||
if self.cleaned_data.get("caveOrLocation") == "location" and not self.cleaned_data.get("location"):
|
||||
|
||||
@@ -2,14 +2,6 @@ from django.core.management.base import BaseCommand, CommandError
|
||||
from optparse import make_option
|
||||
from troggle.core.models import Cave
|
||||
import settings
|
||||
import os
|
||||
|
||||
from django.db import connection
|
||||
from django.core import management
|
||||
from django.contrib.auth.models import User
|
||||
from django.core.urlresolvers import reverse
|
||||
from troggle.core.models import Cave, Entrance
|
||||
import troggle.flatpages.models
|
||||
|
||||
databasename=settings.DATABASES['default']['NAME']
|
||||
expouser=settings.EXPOUSER
|
||||
@@ -20,13 +12,22 @@ class Command(BaseCommand):
|
||||
help = 'This is normal usage, clear database and reread everything'
|
||||
|
||||
option_list = BaseCommand.option_list + (
|
||||
make_option('--reset',
|
||||
make_option('--foo',
|
||||
action='store_true',
|
||||
dest='reset',
|
||||
dest='foo',
|
||||
default=False,
|
||||
help='Reset the entier DB from files'),
|
||||
help='test'),
|
||||
)
|
||||
|
||||
def add_arguments(self, parser):
|
||||
|
||||
parser.add_argument(
|
||||
'--foo',
|
||||
action='store_true',
|
||||
dest='foo',
|
||||
help='Help text',
|
||||
)
|
||||
|
||||
def handle(self, *args, **options):
|
||||
print(args)
|
||||
print(options)
|
||||
@@ -45,8 +46,8 @@ class Command(BaseCommand):
|
||||
self.import_QMs()
|
||||
elif "tunnel" in args:
|
||||
self.import_tunnelfiles()
|
||||
elif options['reset']:
|
||||
self.reset(self)
|
||||
elif "reset" in args:
|
||||
self.reset()
|
||||
elif "survex" in args:
|
||||
self.import_survex()
|
||||
elif "survexpos" in args:
|
||||
@@ -60,15 +61,13 @@ class Command(BaseCommand):
|
||||
self.dumplogbooks()
|
||||
elif "writeCaves" in args:
|
||||
self.writeCaves()
|
||||
elif options['foo']:
|
||||
self.stdout.write(self.style.WARNING('Tesing....'))
|
||||
elif "foo" in args:
|
||||
self.stdout.write('Tesing....')
|
||||
else:
|
||||
#self.stdout.write("%s not recognised" % args)
|
||||
#self.usage(options)
|
||||
self.stdout.write("poo")
|
||||
#print(args)
|
||||
self.stdout.write("%s not recognised" % args)
|
||||
self.usage(options)
|
||||
|
||||
def reload_db(obj):
|
||||
def reload_db():
|
||||
if settings.DATABASES['default']['ENGINE'] == 'django.db.backends.sqlite3':
|
||||
try:
|
||||
os.remove(databasename)
|
||||
@@ -87,22 +86,22 @@ class Command(BaseCommand):
|
||||
user.is_superuser = True
|
||||
user.save()
|
||||
|
||||
def make_dirs(obj):
|
||||
def make_dirs():
|
||||
"""Make directories that troggle requires"""
|
||||
# should also deal with permissions here.
|
||||
if not os.path.isdir(settings.PHOTOS_ROOT):
|
||||
os.mkdir(settings.PHOTOS_ROOT)
|
||||
|
||||
def import_caves(obj):
|
||||
def import_caves():
|
||||
import parsers.caves
|
||||
print("Importing Caves")
|
||||
print("importing caves")
|
||||
parsers.caves.readcaves()
|
||||
|
||||
def import_people(obj):
|
||||
def import_people():
|
||||
import parsers.people
|
||||
parsers.people.LoadPersonsExpos()
|
||||
|
||||
def import_logbooks(obj):
|
||||
def import_logbooks():
|
||||
# The below line was causing errors I didn't understand (it said LOGFILE was a string), and I couldn't be bothered to figure
|
||||
# what was going on so I just catch the error with a try. - AC 21 May
|
||||
try:
|
||||
@@ -113,57 +112,57 @@ class Command(BaseCommand):
|
||||
import parsers.logbooks
|
||||
parsers.logbooks.LoadLogbooks()
|
||||
|
||||
def import_survex(obj):
|
||||
def import_survex():
|
||||
import parsers.survex
|
||||
parsers.survex.LoadAllSurvexBlocks()
|
||||
parsers.survex.LoadPos()
|
||||
|
||||
def import_QMs(obj):
|
||||
def import_QMs():
|
||||
import parsers.QMs
|
||||
|
||||
def import_surveys(obj):
|
||||
def import_surveys():
|
||||
import parsers.surveys
|
||||
parsers.surveys.parseSurveys(logfile=settings.LOGFILE)
|
||||
|
||||
def import_surveyscans(obj):
|
||||
def import_surveyscans():
|
||||
import parsers.surveys
|
||||
parsers.surveys.LoadListScans()
|
||||
|
||||
def import_tunnelfiles(obj):
|
||||
def import_tunnelfiles():
|
||||
import parsers.surveys
|
||||
parsers.surveys.LoadTunnelFiles()
|
||||
|
||||
def reset(self, mgmt_obj):
|
||||
def reset():
|
||||
""" Wipe the troggle database and import everything from legacy data
|
||||
"""
|
||||
self.reload_db()
|
||||
self.make_dirs()
|
||||
self.pageredirects()
|
||||
self.import_caves()
|
||||
self.import_people()
|
||||
self.import_surveyscans()
|
||||
self.import_survex()
|
||||
self.import_logbooks()
|
||||
self.import_QMs()
|
||||
reload_db()
|
||||
make_dirs()
|
||||
pageredirects()
|
||||
import_caves()
|
||||
import_people()
|
||||
import_surveyscans()
|
||||
import_survex()
|
||||
import_logbooks()
|
||||
import_QMs()
|
||||
try:
|
||||
self.import_tunnelfiles()
|
||||
import_tunnelfiles()
|
||||
except:
|
||||
print("Tunnel files parser broken.")
|
||||
|
||||
self.import_surveys()
|
||||
import_surveys()
|
||||
|
||||
def pageredirects(obj):
|
||||
def pageredirects():
|
||||
for oldURL, newURL in [("indxal.htm", reverse("caveindex"))]:
|
||||
f = troggle.flatpages.models.Redirect(originalURL=oldURL, newURL=newURL)
|
||||
f.save()
|
||||
|
||||
def writeCaves(obj):
|
||||
def writeCaves():
|
||||
for cave in Cave.objects.all():
|
||||
cave.writeDataFile()
|
||||
for entrance in Entrance.objects.all():
|
||||
entrance.writeDataFile()
|
||||
|
||||
def troggle_usage(obj):
|
||||
def usage(self, parser):
|
||||
print("""Usage is 'manage.py reset_db <command>'
|
||||
where command is:
|
||||
reset - this is normal usage, clear database and reread everything
|
||||
|
||||
@@ -10,7 +10,7 @@ from django.db.models import Min, Max
|
||||
from django.conf import settings
|
||||
from decimal import Decimal, getcontext
|
||||
from django.core.urlresolvers import reverse
|
||||
from imagekit.models import ImageModel
|
||||
from imagekit.models import ProcessedImageField #ImageModel
|
||||
from django.template import Context, loader
|
||||
import settings
|
||||
getcontext().prec=2 #use 2 significant figures for decimal calculations
|
||||
@@ -104,32 +104,37 @@ class Expedition(TroggleModel):
|
||||
def day_max(self):
|
||||
res = self.expeditionday_set.all()
|
||||
return res and res[len(res) - 1] or None
|
||||
|
||||
|
||||
|
||||
|
||||
class ExpeditionDay(TroggleModel):
|
||||
expedition = models.ForeignKey("Expedition")
|
||||
date = models.DateField()
|
||||
|
||||
class Meta:
|
||||
ordering = ('date',)
|
||||
ordering = ('date',)
|
||||
|
||||
def GetPersonTrip(self, personexpedition):
|
||||
personexpeditions = self.persontrip_set.filter(expeditionday=self)
|
||||
return personexpeditions and personexpeditions[0] or None
|
||||
|
||||
|
||||
|
||||
#
|
||||
# single Person, can go on many years
|
||||
#
|
||||
class Person(TroggleModel):
|
||||
first_name = models.CharField(max_length=100)
|
||||
last_name = models.CharField(max_length=100)
|
||||
fullname = models.CharField(max_length=200)
|
||||
is_vfho = models.BooleanField(help_text="VFHO is the Vereines für Höhlenkunde in Obersteier, a nearby Austrian caving club.", default=False)
|
||||
mug_shot = models.CharField(max_length=100, blank=True,null=True)
|
||||
blurb = models.TextField(blank=True,null=True)
|
||||
|
||||
#href = models.CharField(max_length=200)
|
||||
orderref = models.CharField(max_length=200) # for alphabetic
|
||||
|
||||
#the below have been removed and made methods. I'm not sure what the b in bisnotable stands for. - AC 16 Feb
|
||||
#notability = models.FloatField() # for listing the top 20 people
|
||||
#bisnotable = models.BooleanField(default=False)
|
||||
user = models.OneToOneField(User, null=True, blank=True)
|
||||
def get_absolute_url(self):
|
||||
return urlparse.urljoin(settings.URL_ROOT,reverse('person',kwargs={'first_name':self.first_name,'last_name':self.last_name}))
|
||||
@@ -146,15 +151,9 @@ class Person(TroggleModel):
|
||||
|
||||
def notability(self):
|
||||
notability = Decimal(0)
|
||||
max_expo_val = 0
|
||||
|
||||
max_expo_year = Expedition.objects.all().aggregate(Max('year'))
|
||||
max_expo_val = int(max_expo_year['year__max']) + 1
|
||||
|
||||
for personexpedition in self.personexpedition_set.all():
|
||||
if not personexpedition.is_guest:
|
||||
print(personexpedition.expedition.year)
|
||||
notability += Decimal(1) / (max_expo_val - int(personexpedition.expedition.year))
|
||||
notability += Decimal(1) / (2012 - int(personexpedition.expedition.year))
|
||||
return notability
|
||||
|
||||
def bisnotable(self):
|
||||
@@ -242,22 +241,18 @@ class PersonExpedition(TroggleModel):
|
||||
# Single parsed entry from Logbook
|
||||
#
|
||||
class LogbookEntry(TroggleModel):
|
||||
|
||||
LOGBOOK_ENTRY_TYPES = (
|
||||
("wiki", "Wiki style logbook"),
|
||||
("html", "Html style logbook")
|
||||
)
|
||||
|
||||
date = models.DateField()#MJG wants to turn this into a datetime such that multiple Logbook entries on the same day can be ordered.ld()
|
||||
date = models.DateField()#MJG wants to turn this into a datetime such that multiple Logbook entries on the same day can be ordered.
|
||||
expeditionday = models.ForeignKey("ExpeditionDay", null=True)#MJG wants to KILL THIS (redundant information)
|
||||
expedition = models.ForeignKey(Expedition,blank=True,null=True) # yes this is double-
|
||||
title = models.CharField(max_length=settings.MAX_LOGBOOK_ENTRY_TITLE_LENGTH)
|
||||
cave_slug = models.SlugField(max_length=50)
|
||||
place = models.CharField(max_length=100,blank=True,null=True,help_text="Only use this if you haven't chosen a cave")
|
||||
text = models.TextField()
|
||||
slug = models.SlugField(max_length=50)
|
||||
filename = models.CharField(max_length=200,null=True)
|
||||
entry_type = models.CharField(default="wiki",null=True,choices=LOGBOOK_ENTRY_TYPES,max_length=50)
|
||||
#author = models.ForeignKey(PersonExpedition,blank=True,null=True) # the person who writes it up doesn't have to have been on the trip.
|
||||
# Re: the above- so this field should be "typist" or something, not "author". - AC 15 jun 09
|
||||
#MJG wants to KILL THIS, as it is typically redundant with PersonTrip.is_logbook_entry_author, in the rare it was not redundanty and of actually interest it could be added to the text.
|
||||
title = models.CharField(max_length=settings.MAX_LOGBOOK_ENTRY_TITLE_LENGTH)
|
||||
cave_slug = models.SlugField(max_length=50)
|
||||
place = models.CharField(max_length=100,blank=True,null=True,help_text="Only use this if you haven't chosen a cave")
|
||||
text = models.TextField()
|
||||
slug = models.SlugField(max_length=50)
|
||||
filename = models.CharField(max_length=200,null=True)
|
||||
|
||||
class Meta:
|
||||
verbose_name_plural = "Logbook Entries"
|
||||
@@ -296,7 +291,7 @@ class LogbookEntry(TroggleModel):
|
||||
if self.cave:
|
||||
nextQMnumber=self.cave.new_QM_number(self.date.year)
|
||||
else:
|
||||
return None
|
||||
return none
|
||||
return nextQMnumber
|
||||
|
||||
def new_QM_found_link(self):
|
||||
@@ -306,7 +301,6 @@ class LogbookEntry(TroggleModel):
|
||||
def DayIndex(self):
|
||||
return list(self.expeditionday.logbookentry_set.all()).index(self)
|
||||
|
||||
|
||||
#
|
||||
# Single Person going on a trip, which may or may not be written up (accounts for different T/U for people in same logbook entry)
|
||||
#
|
||||
@@ -452,7 +446,7 @@ class Cave(TroggleModel):
|
||||
elif self.unofficial_number:
|
||||
href = self.unofficial_number
|
||||
else:
|
||||
href = self.official_name.lower()
|
||||
href = official_name.lower()
|
||||
#return settings.URL_ROOT + '/cave/' + href + '/'
|
||||
return urlparse.urljoin(settings.URL_ROOT, reverse('cave',kwargs={'cave_id':href,}))
|
||||
|
||||
@@ -535,15 +529,13 @@ class Cave(TroggleModel):
|
||||
|
||||
def getCaveByReference(reference):
|
||||
areaname, code = reference.split("-", 1)
|
||||
#print(areaname, code)
|
||||
print(areaname, code)
|
||||
area = Area.objects.get(short_name = areaname)
|
||||
#print(area)
|
||||
foundCaves = list(Cave.objects.filter(area = area, kataster_number = code).all()) + list(Cave.objects.filter(area = area, unofficial_number = code).all())
|
||||
print(area)
|
||||
foundCaves = list(Cave.objects.filter(area = area, kataster_number = code).all()) + list(Cave.objects.filter(area = area, unofficial_number = code).all())
|
||||
print(list(foundCaves))
|
||||
if len(foundCaves) == 1:
|
||||
return foundCaves[0]
|
||||
else:
|
||||
return False
|
||||
assert len(foundCaves) == 1
|
||||
return foundCaves[0]
|
||||
|
||||
class OtherCaveName(TroggleModel):
|
||||
name = models.CharField(max_length=160)
|
||||
@@ -746,17 +738,17 @@ class QM(TroggleModel):
|
||||
|
||||
number = models.IntegerField(help_text="this is the sequential number in the year", )
|
||||
GRADE_CHOICES=(
|
||||
('A', 'A: Large obvious lead'),
|
||||
('B', 'B: Average lead'),
|
||||
('C', 'C: Tight unpromising lead'),
|
||||
('D', 'D: Dig'),
|
||||
('X', 'X: Unclimbable aven')
|
||||
('A', 'A: Large obvious lead'),
|
||||
('B', 'B: Average lead'),
|
||||
('C', 'C: Tight unpromising lead'),
|
||||
('D', 'D: Dig'),
|
||||
('X', 'X: Unclimbable aven')
|
||||
)
|
||||
grade = models.CharField(max_length=1, choices=GRADE_CHOICES)
|
||||
location_description = models.TextField(blank=True)
|
||||
#should be a foreignkey to surveystation
|
||||
nearest_station_description = models.CharField(max_length=400,null=True,blank=True)
|
||||
nearest_station_name = models.CharField(max_length=200,blank=True,null=True)
|
||||
nearest_station = models.ForeignKey(SurvexStation,null=True,blank=True)
|
||||
nearest_station = models.CharField(max_length=200,blank=True,null=True)
|
||||
area = models.CharField(max_length=100,blank=True,null=True)
|
||||
completion_description = models.TextField(blank=True,null=True)
|
||||
comment=models.TextField(blank=True,null=True)
|
||||
@@ -815,7 +807,7 @@ def get_scan_path(instance, filename):
|
||||
number=str(instance.survey.wallet_letter) + number #two strings formatting because convention is 2009#01 or 2009#X01
|
||||
return os.path.join('./',year,year+r'#'+number,str(instance.contents)+str(instance.number_in_wallet)+r'.jpg')
|
||||
|
||||
class ScannedImage(TroggleImageModel):
|
||||
class ScannedImage(TroggleImageModel):
|
||||
file = models.ImageField(storage=scansFileStorage, upload_to=get_scan_path)
|
||||
scanned_by = models.ForeignKey(Person,blank=True, null=True)
|
||||
scanned_on = models.DateField(null=True)
|
||||
@@ -836,7 +828,7 @@ class ScannedImage(TroggleImageModel):
|
||||
#This is an ugly hack to deal with the #s in our survey scan paths. The correct thing is to write a custom file storage backend which calls urlencode on the name for making file.url but not file.path.
|
||||
def correctURL(self):
|
||||
return string.replace(self.file.url,r'#',r'%23')
|
||||
|
||||
|
||||
def __unicode__(self):
|
||||
return get_scan_path(self,'')
|
||||
|
||||
@@ -869,14 +861,3 @@ class Survey(TroggleModel):
|
||||
|
||||
def elevations(self):
|
||||
return self.scannedimage_set.filter(contents='elevation')
|
||||
|
||||
class DataIssue(TroggleModel):
|
||||
date = models.DateTimeField(auto_now_add=True, blank=True)
|
||||
parser = models.CharField(max_length=50, blank=True, null=True)
|
||||
message = models.CharField(max_length=400, blank=True, null=True)
|
||||
|
||||
class Meta:
|
||||
ordering = ['date']
|
||||
|
||||
def __unicode__(self):
|
||||
return u"%s - %s" % (self.parser, self.message)
|
||||
|
||||
@@ -97,7 +97,7 @@ class SurvexBlockLookUpManager(models.Manager):
|
||||
blocknames = []
|
||||
else:
|
||||
blocknames = name.split(".")
|
||||
block = SurvexBlock.objects.get(parent=None, survexfile__path=settings.SURVEX_TOPNAME)
|
||||
block = SurvexBlock.objects.get(parent=None, survexfile__path="all")
|
||||
for blockname in blocknames:
|
||||
block = SurvexBlock.objects.get(parent=block, name__iexact=blockname)
|
||||
return block
|
||||
@@ -225,4 +225,4 @@ class TunnelFile(models.Model):
|
||||
|
||||
class Meta:
|
||||
ordering = ('tunnelpath',)
|
||||
|
||||
|
||||
|
||||
@@ -47,6 +47,6 @@ def survex_to_html(value, autoescape=None):
|
||||
if autoescape:
|
||||
value = conditional_escape(value)
|
||||
for regex, sub in regexes:
|
||||
print(sub)
|
||||
print sub
|
||||
value = regex.sub(sub, value)
|
||||
return mark_safe(value)
|
||||
@@ -7,6 +7,7 @@ from troggle.core.models import QM, DPhoto, LogbookEntry, Cave
|
||||
import re, urlparse
|
||||
|
||||
register = template.Library()
|
||||
|
||||
|
||||
@register.filter()
|
||||
def plusone(n):
|
||||
@@ -76,7 +77,7 @@ def wiki_to_html_short(value, autoescape=None):
|
||||
if number>1:
|
||||
return '<h'+num+'>'+matchobj.groups()[1]+'</h'+num+'>'
|
||||
else:
|
||||
print('morethanone')
|
||||
print 'morethanone'
|
||||
return matchobj.group()
|
||||
value = re.sub(r"(?m)^(=+)([^=]+)(=+)$",headerrepl,value)
|
||||
|
||||
@@ -142,13 +143,13 @@ def wiki_to_html_short(value, autoescape=None):
|
||||
value = re.sub(photoSrcPattern,photoSrcRepl, value, re.DOTALL)
|
||||
|
||||
#make cave links
|
||||
value = re.sub(r"\[\[\s*cave:([^\s]+)\s*\s*\]\]", r'<a href="%scave/\1/">\1</a>' % settings.URL_ROOT, value, re.DOTALL)
|
||||
value = re.sub("\[\[\s*cave:([^\s]+)\s*\s*\]\]", r'<a href="%scave/\1/">\1</a>' % settings.URL_ROOT, value, re.DOTALL)
|
||||
#make people links
|
||||
value = re.sub(r"\[\[\s*person:(.+)\|(.+)\]\]",r'<a href="%sperson/\1/">\2</a>' % settings.URL_ROOT, value, re.DOTALL)
|
||||
value = re.sub("\[\[\s*person:(.+)\|(.+)\]\]",r'<a href="%sperson/\1/">\2</a>' % settings.URL_ROOT, value, re.DOTALL)
|
||||
#make subcave links
|
||||
value = re.sub(r"\[\[\s*subcave:(.+)\|(.+)\]\]",r'<a href="%ssubcave/\1/">\2</a>' % settings.URL_ROOT, value, re.DOTALL)
|
||||
value = re.sub("\[\[\s*subcave:(.+)\|(.+)\]\]",r'<a href="%ssubcave/\1/">\2</a>' % settings.URL_ROOT, value, re.DOTALL)
|
||||
#make cavedescription links
|
||||
value = re.sub(r"\[\[\s*cavedescription:(.+)\|(.+)\]\]",r'<a href="%scavedescription/\1/">\2</a>' % settings.URL_ROOT, value, re.DOTALL)
|
||||
value = re.sub("\[\[\s*cavedescription:(.+)\|(.+)\]\]",r'<a href="%scavedescription/\1/">\2</a>' % settings.URL_ROOT, value, re.DOTALL)
|
||||
|
||||
|
||||
|
||||
|
||||
@@ -10,10 +10,11 @@ from troggle.helper import login_required_if_public
|
||||
from django.forms.models import modelformset_factory
|
||||
from django import forms
|
||||
from django.core.urlresolvers import reverse
|
||||
from utils import render_with_context # see views_logbooks for explanation on this.
|
||||
from django.http import HttpResponse, HttpResponseRedirect
|
||||
from django.conf import settings
|
||||
import re, urlparse
|
||||
from django.shortcuts import get_object_or_404, render
|
||||
from django.shortcuts import get_object_or_404
|
||||
import settings
|
||||
|
||||
|
||||
@@ -57,7 +58,7 @@ def caveindex(request):
|
||||
caves1626 = list(Cave.objects.filter(area__short_name = "1626"))
|
||||
caves1623.sort(caveCmp)
|
||||
caves1626.sort(caveCmp)
|
||||
return render(request,'caveindex.html', {'caves1623': caves1623, 'caves1626': caves1626, 'notablecaves':notablecaves, 'cavepage': True})
|
||||
return render_with_context(request,'caveindex.html', {'caves1623': caves1623, 'caves1626': caves1626, 'notablecaves':notablecaves, 'cavepage': True})
|
||||
|
||||
def millenialcaves(request):
|
||||
#RW messing around area
|
||||
@@ -82,43 +83,43 @@ def cave3d(request, cave_id=''):
|
||||
def cave(request, cave_id='', offical_name=''):
|
||||
cave=getCave(cave_id)
|
||||
if cave.non_public and settings.PUBLIC_SITE and not request.user.is_authenticated():
|
||||
return render(request,'nonpublic.html', {'instance': cave, 'cavepage': True, 'cave_id': cave_id})
|
||||
return render_with_context(request,'nonpublic.html', {'instance': cave, 'cavepage': True, 'cave_id': cave_id})
|
||||
else:
|
||||
return render(request,'cave.html', {'settings': settings, 'cave': cave, 'cavepage': True, 'cave_id': cave_id})
|
||||
return render_with_context(request,'cave.html', {'settings': settings, 'cave': cave, 'cavepage': True, 'cave_id': cave_id})
|
||||
|
||||
def caveEntrance(request, slug):
|
||||
cave = Cave.objects.get(caveslug__slug = slug)
|
||||
if cave.non_public and settings.PUBLIC_SITE and not request.user.is_authenticated():
|
||||
return render(request,'nonpublic.html', {'instance': cave})
|
||||
return render_with_context(request,'nonpublic.html', {'instance': cave})
|
||||
else:
|
||||
return render(request,'cave_entrances.html', {'cave': cave})
|
||||
return render_with_context(request,'cave_entrances.html', {'cave': cave})
|
||||
|
||||
def caveDescription(request, slug):
|
||||
cave = Cave.objects.get(caveslug__slug = slug)
|
||||
if cave.non_public and settings.PUBLIC_SITE and not request.user.is_authenticated():
|
||||
return render(request,'nonpublic.html', {'instance': cave})
|
||||
return render_with_context(request,'nonpublic.html', {'instance': cave})
|
||||
else:
|
||||
return render(request,'cave_uground_description.html', {'cave': cave})
|
||||
return render_with_context(request,'cave_uground_description.html', {'cave': cave})
|
||||
|
||||
def caveQMs(request, slug):
|
||||
cave = Cave.objects.get(caveslug__slug = slug)
|
||||
if cave.non_public and settings.PUBLIC_SITE and not request.user.is_authenticated():
|
||||
return render(request,'nonpublic.html', {'instance': cave})
|
||||
return render_with_context(request,'nonpublic.html', {'instance': cave})
|
||||
else:
|
||||
return render(request,'cave_qms.html', {'cave': cave})
|
||||
return render_with_context(request,'cave_qms.html', {'cave': cave})
|
||||
def caveLogbook(request, slug):
|
||||
cave = Cave.objects.get(caveslug__slug = slug)
|
||||
if cave.non_public and settings.PUBLIC_SITE and not request.user.is_authenticated():
|
||||
return render(request,'nonpublic.html', {'instance': cave})
|
||||
return render_with_context(request,'nonpublic.html', {'instance': cave})
|
||||
else:
|
||||
return render(request,'cave_logbook.html', {'cave': cave})
|
||||
return render_with_context(request,'cave_logbook.html', {'cave': cave})
|
||||
|
||||
def caveSlug(request, slug):
|
||||
cave = Cave.objects.get(caveslug__slug = slug)
|
||||
if cave.non_public and settings.PUBLIC_SITE and not request.user.is_authenticated():
|
||||
return render(request,'nonpublic.html', {'instance': cave, 'cave_editable': slug})
|
||||
return render_with_context(request,'nonpublic.html', {'instance': cave, 'cave_editable': slug})
|
||||
else:
|
||||
return render(request,'cave.html', {'cave': cave, 'cave_editable': slug})
|
||||
return render_with_context(request,'cave.html', {'cave': cave, 'cave_editable': slug})
|
||||
|
||||
@login_required_if_public
|
||||
def edit_cave(request, slug=None):
|
||||
@@ -159,7 +160,7 @@ def edit_cave(request, slug=None):
|
||||
ceFormSet = CaveAndEntranceFormSet(queryset=cave.caveandentrance_set.all())
|
||||
versionControlForm = VersionControlCommentForm()
|
||||
|
||||
return render(request,
|
||||
return render_with_context(request,
|
||||
'editcave2.html',
|
||||
{'form': form,
|
||||
'caveAndEntranceFormSet': ceFormSet,
|
||||
@@ -203,7 +204,7 @@ def editEntrance(request, caveslug, slug=None):
|
||||
entletter = EntranceLetterForm(request.POST)
|
||||
else:
|
||||
entletter = None
|
||||
return render(request,
|
||||
return render_with_context(request,
|
||||
'editentrance.html',
|
||||
{'form': form,
|
||||
'versionControlForm': versionControlForm,
|
||||
@@ -214,7 +215,7 @@ def qm(request,cave_id,qm_id,year,grade=None):
|
||||
year=int(year)
|
||||
try:
|
||||
qm=getCave(cave_id).get_QMs().get(number=qm_id,found_by__date__year=year)
|
||||
return render(request,'qm.html',locals())
|
||||
return render_with_context(request,'qm.html',locals())
|
||||
|
||||
except QM.DoesNotExist:
|
||||
url=urlparse.urljoin(settings.URL_ROOT, r'/admin/core/qm/add/'+'?'+ r'number=' + qm_id)
|
||||
@@ -227,16 +228,16 @@ def qm(request,cave_id,qm_id,year,grade=None):
|
||||
def ent(request, cave_id, ent_letter):
|
||||
cave = Cave.objects.filter(kataster_number = cave_id)[0]
|
||||
cave_and_ent = CaveAndEntrance.objects.filter(cave = cave).filter(entrance_letter = ent_letter)[0]
|
||||
return render(request,'entrance.html', {'cave': cave,
|
||||
return render_with_context(request,'entrance.html', {'cave': cave,
|
||||
'entrance': cave_and_ent.entrance,
|
||||
'letter': cave_and_ent.entrance_letter,})
|
||||
|
||||
def entranceSlug(request, slug):
|
||||
entrance = Entrance.objects.get(entranceslug__slug = slug)
|
||||
if entrance.non_public and not request.user.is_authenticated():
|
||||
return render(request,'nonpublic.html', {'instance': entrance})
|
||||
return render_with_context(request,'nonpublic.html', {'instance': entrance})
|
||||
else:
|
||||
return render(request,'entranceslug.html', {'entrance': entrance})
|
||||
return render_with_context(request,'entranceslug.html', {'entrance': entrance})
|
||||
|
||||
def survexblock(request, survexpath):
|
||||
survexpath = re.sub("/", ".", survexpath)
|
||||
@@ -244,12 +245,12 @@ def survexblock(request, survexpath):
|
||||
survexblock = models.SurvexBlock.objects.get(survexpath=survexpath)
|
||||
#ftext = survexblock.filecontents()
|
||||
ftext = survexblock.text
|
||||
return render(request,'survexblock.html', {'survexblock':survexblock, 'ftext':ftext, })
|
||||
return render_with_context(request,'survexblock.html', {'survexblock':survexblock, 'ftext':ftext, })
|
||||
|
||||
def surveyindex(request):
|
||||
surveys=Survey.objects.all()
|
||||
expeditions=Expedition.objects.order_by("-year")
|
||||
return render(request,'survey.html',locals())
|
||||
return render_with_context(request,'survey.html',locals())
|
||||
|
||||
def survey(request,year,wallet_number):
|
||||
surveys=Survey.objects.all()
|
||||
@@ -262,19 +263,19 @@ def survey(request,year,wallet_number):
|
||||
planSketches=current_survey.scannedimage_set.filter(contents='plan')
|
||||
elevationSketches=current_survey.scannedimage_set.filter(contents='elevation')
|
||||
|
||||
return render(request,'survey.html', locals())
|
||||
return render_with_context(request,'survey.html', locals())
|
||||
|
||||
def cave_description(request, cavedescription_name):
|
||||
cave_description = get_object_or_404(CaveDescription, short_name = cavedescription_name)
|
||||
return render(request,'cave_description.html', locals())
|
||||
return render_with_context(request,'cave_description.html', locals())
|
||||
|
||||
def get_entrances(request, caveslug):
|
||||
cave = Cave.objects.get(caveslug__slug = caveslug)
|
||||
return render(request,'options.html', {"items": [(e.entrance.slug(), e.entrance.slug()) for e in cave.entrances()]})
|
||||
return render_with_context(request,'options.html', {"items": [(e.entrance.slug(), e.entrance.slug()) for e in cave.entrances()]})
|
||||
|
||||
def get_qms(request, caveslug):
|
||||
cave = Cave.objects.get(caveslug__slug = caveslug)
|
||||
return render(request,'options.html', {"items": [(e.entrance.slug(), e.entrance.slug()) for e in cave.entrances()]})
|
||||
return render_with_context(request,'options.html', {"items": [(e.entrance.slug(), e.entrance.slug()) for e in cave.entrances()]})
|
||||
|
||||
areanames = [
|
||||
#('', 'Location unclear'),
|
||||
@@ -312,7 +313,7 @@ def prospecting(request):
|
||||
caves = list(a.cave_set.all())
|
||||
caves.sort(caveCmp)
|
||||
areas.append((name, a, caves))
|
||||
return render(request, 'prospecting.html', {"areas": areas})
|
||||
return render_with_context(request, 'prospecting.html', {"areas": areas})
|
||||
|
||||
# Parameters for big map and zoomed subarea maps:
|
||||
# big map first (zoom factor ignored)
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
from django.shortcuts import render_to_response, render
|
||||
from django.shortcuts import render_to_response
|
||||
from troggle.core.models import Expedition, Person, PersonExpedition, PersonTrip, LogbookEntry, SurvexBlock
|
||||
import troggle.core.models as models
|
||||
import troggle.settings as settings
|
||||
@@ -9,6 +9,7 @@ from troggle.core.forms import getTripForm#, get_name, PersonForm
|
||||
from django.core.urlresolvers import reverse
|
||||
from django.http import HttpResponseRedirect, HttpResponse
|
||||
from django.template import Context, loader
|
||||
from utils import render_with_context
|
||||
import os.path
|
||||
import troggle.parsers.logbooks as logbookparsers
|
||||
from django.template.defaultfilters import slugify
|
||||
@@ -54,7 +55,7 @@ def personindex(request):
|
||||
if person.bisnotable():
|
||||
notablepersons.append(person)
|
||||
|
||||
return render(request,'personindex.html', {'persons': persons, 'personss':personss, 'notablepersons':notablepersons})
|
||||
return render_with_context(request,'personindex.html', {'persons': persons, 'personss':personss, 'notablepersons':notablepersons, })
|
||||
|
||||
|
||||
def expedition(request, expeditionname):
|
||||
@@ -74,9 +75,10 @@ def expedition(request, expeditionname):
|
||||
prow.append(pcell)
|
||||
personexpeditiondays.append({"personexpedition":personexpedition, "personrow":prow})
|
||||
|
||||
message = ""
|
||||
if "reload" in request.GET:
|
||||
LoadLogbookForExpedition(this_expedition)
|
||||
return render(request,'expedition.html', {'expedition': this_expedition, 'expeditions':expeditions, 'personexpeditiondays':personexpeditiondays, 'settings':settings, 'dateditems': dateditems })
|
||||
message = LoadLogbookForExpedition(this_expedition)
|
||||
return render_with_context(request,'expedition.html', {'expedition': this_expedition, 'expeditions':expeditions, 'personexpeditiondays':personexpeditiondays, 'message':message, 'settings':settings, 'dateditems': dateditems })
|
||||
|
||||
def get_absolute_url(self):
|
||||
return ('expedition', (expedition.year))
|
||||
@@ -101,13 +103,13 @@ def person(request, first_name='', last_name='', ):
|
||||
this_person.save()
|
||||
return HttpResponseRedirect(reverse('profiles_select_profile'))
|
||||
|
||||
return render(request,'person.html', {'person': this_person, })
|
||||
return render_with_context(request,'person.html', {'person': this_person, })
|
||||
|
||||
|
||||
def GetPersonChronology(personexpedition):
|
||||
res = { }
|
||||
for persontrip in personexpedition.persontrip_set.all():
|
||||
a = res.setdefault(persontrip.logbook_entry.date, { })
|
||||
a = res.setdefault(persontrip.date, { })
|
||||
a.setdefault("persontrips", [ ]).append(persontrip)
|
||||
|
||||
for personrole in personexpedition.survexpersonrole_set.all():
|
||||
@@ -134,17 +136,17 @@ def personexpedition(request, first_name='', last_name='', year=''):
|
||||
this_expedition = Expedition.objects.get(year=year)
|
||||
personexpedition = person.personexpedition_set.get(expedition=this_expedition)
|
||||
personchronology = GetPersonChronology(personexpedition)
|
||||
return render(request,'personexpedition.html', {'personexpedition': personexpedition, 'personchronology':personchronology})
|
||||
return render_with_context(request,'personexpedition.html', {'personexpedition': personexpedition, 'personchronology':personchronology})
|
||||
|
||||
|
||||
def logbookentry(request, date, slug):
|
||||
this_logbookentry = LogbookEntry.objects.filter(date=date, slug=slug)
|
||||
|
||||
if len(this_logbookentry)>1:
|
||||
return render(request, 'object_list.html',{'object_list':this_logbookentry})
|
||||
return render_with_context(request, 'object_list.html',{'object_list':this_logbookentry})
|
||||
else:
|
||||
this_logbookentry=this_logbookentry[0]
|
||||
return render(request, 'logbookentry.html', {'logbookentry': this_logbookentry})
|
||||
return render_with_context(request, 'logbookentry.html', {'logbookentry': this_logbookentry})
|
||||
|
||||
|
||||
def logbookSearch(request, extra):
|
||||
@@ -155,14 +157,14 @@ def logbookSearch(request, extra):
|
||||
entry_query = search.get_query(query_string, ['text','title',])
|
||||
found_entries = LogbookEntry.objects.filter(entry_query)
|
||||
|
||||
return render(request,'logbooksearch.html',
|
||||
return render_with_context(request,'logbooksearch.html',
|
||||
{ 'query_string': query_string, 'found_entries': found_entries, })
|
||||
#context_instance=RequestContext(request))
|
||||
|
||||
def personForm(request,pk):
|
||||
person=Person.objects.get(pk=pk)
|
||||
form=PersonForm(instance=person)
|
||||
return render(request,'personform.html', {'form':form,})
|
||||
return render_with_context(request,'personform.html', {'form':form,})
|
||||
|
||||
|
||||
def experimental(request):
|
||||
@@ -179,7 +181,7 @@ def experimental(request):
|
||||
|
||||
survexlegs = models.SurvexLeg.objects.all()
|
||||
totalsurvexlength = sum([survexleg.tape for survexleg in survexlegs])
|
||||
return render(request, 'experimental.html', { "nsurvexlegs":len(survexlegs), "totalsurvexlength":totalsurvexlength, "legsbyexpo":legsbyexpo })
|
||||
return render_with_context(request, 'experimental.html', { "nsurvexlegs":len(survexlegs), "totalsurvexlength":totalsurvexlength, "legsbyexpo":legsbyexpo })
|
||||
|
||||
@login_required_if_public
|
||||
def newLogbookEntry(request, expeditionyear, pdate = None, pslug = None):
|
||||
@@ -238,7 +240,7 @@ def newLogbookEntry(request, expeditionyear, pdate = None, pslug = None):
|
||||
tripForm = TripForm() # An unbound form
|
||||
personTripFormSet = PersonTripFormSet()
|
||||
|
||||
return render(request, 'newlogbookentry.html', {
|
||||
return render_with_context(request, 'newlogbookentry.html', {
|
||||
'tripForm': tripForm,
|
||||
'personTripFormSet': personTripFormSet,
|
||||
|
||||
@@ -260,8 +262,9 @@ def delLogbookEntry(lbe):
|
||||
|
||||
def get_people(request, expeditionslug):
|
||||
exp = Expedition.objects.get(year = expeditionslug)
|
||||
return render(request,'options.html', {"items": [(pe.slug, pe.name) for pe in exp.personexpedition_set.all()]})
|
||||
return render_with_context(request,'options.html', {"items": [(pe.slug, pe.name) for pe in exp.personexpedition_set.all()]})
|
||||
|
||||
def get_logbook_entries(request, expeditionslug):
|
||||
exp = Expedition.objects.get(year = expeditionslug)
|
||||
return render(request,'options.html', {"items": [(le.slug, "%s - %s" % (le.date, le.title)) for le in exp.logbookentry_set.all()]})
|
||||
return render_with_context(request,'options.html', {"items": [(le.slug, "%s - %s" % (le.date, le.title)) for le in exp.logbookentry_set.all()]})
|
||||
|
||||
|
||||
@@ -4,11 +4,11 @@ from django.conf import settings
|
||||
from django import forms
|
||||
from django.template import loader, Context
|
||||
from django.db.models import Q
|
||||
from django.shortcuts import render
|
||||
import databaseReset
|
||||
import re
|
||||
from django.http import HttpResponse, HttpResponseRedirect
|
||||
from django.core.urlresolvers import reverse
|
||||
from utils import render_with_context
|
||||
from troggle.core.models import *
|
||||
from troggle.helper import login_required_if_public
|
||||
|
||||
@@ -21,18 +21,18 @@ def stats(request):
|
||||
statsDict['caveCount'] = int(Cave.objects.count())
|
||||
statsDict['personCount'] = int(Person.objects.count())
|
||||
statsDict['logbookEntryCount'] = int(LogbookEntry.objects.count())
|
||||
return render(request,'statistics.html', statsDict)
|
||||
return render_with_context(request,'statistics.html', statsDict)
|
||||
|
||||
def frontpage(request):
|
||||
if request.user.is_authenticated():
|
||||
return render(request,'tasks.html')
|
||||
return render_with_context(request,'tasks.html')
|
||||
|
||||
expeditions = Expedition.objects.order_by("-year")
|
||||
logbookentry = LogbookEntry
|
||||
cave = Cave
|
||||
photo = DPhoto
|
||||
from django.contrib.admin.templatetags import log
|
||||
return render(request,'frontpage.html', locals())
|
||||
return render_with_context(request,'frontpage.html', locals())
|
||||
|
||||
def todo(request):
|
||||
message = "no test message" #reverse('personn', kwargs={"name":"hkjhjh"})
|
||||
@@ -45,7 +45,7 @@ def todo(request):
|
||||
|
||||
expeditions = Expedition.objects.order_by("-year")
|
||||
totallogbookentries = LogbookEntry.objects.count()
|
||||
return render(request,'index.html', {'expeditions':expeditions, 'all':'all', 'totallogbookentries':totallogbookentries, "message":message})
|
||||
return render_with_context(request,'index.html', {'expeditions':expeditions, 'all':'all', 'totallogbookentries':totallogbookentries, "message":message})
|
||||
|
||||
|
||||
def controlPanel(request):
|
||||
@@ -59,27 +59,27 @@ def controlPanel(request):
|
||||
databaseReset.make_dirs()
|
||||
for item in importlist:
|
||||
if item in request.POST:
|
||||
print("running"+ " databaseReset."+item+"()")
|
||||
exec("databaseReset."+item+"()")
|
||||
print "running"+ " databaseReset."+item+"()"
|
||||
exec "databaseReset."+item+"()"
|
||||
jobs_completed.append(item)
|
||||
else:
|
||||
if request.user.is_authenticated(): #The user is logged in, but is not a superuser.
|
||||
return render(request,'controlPanel.html', {'caves':Cave.objects.all(),'error':'You must be a superuser to use that feature.'})
|
||||
return render_with_context(request,'controlPanel.html', {'caves':Cave.objects.all(),'error':'You must be a superuser to use that feature.'})
|
||||
else:
|
||||
return HttpResponseRedirect(reverse('auth_login'))
|
||||
|
||||
return render(request,'controlPanel.html', {'caves':Cave.objects.all(),'expeditions':Expedition.objects.all(),'jobs_completed':jobs_completed})
|
||||
return render_with_context(request,'controlPanel.html', {'caves':Cave.objects.all(),'expeditions':Expedition.objects.all(),'jobs_completed':jobs_completed})
|
||||
|
||||
def downloadCavetab(request):
|
||||
from export import tocavetab
|
||||
response = HttpResponse(content_type='text/csv')
|
||||
response = HttpResponse(mimetype='text/csv')
|
||||
response['Content-Disposition'] = 'attachment; filename=CAVETAB2.CSV'
|
||||
tocavetab.writeCaveTab(response)
|
||||
return response
|
||||
|
||||
def downloadSurveys(request):
|
||||
from export import tosurveys
|
||||
response = HttpResponse(content_type='text/csv')
|
||||
response = HttpResponse(mimetype='text/csv')
|
||||
response['Content-Disposition'] = 'attachment; filename=Surveys.csv'
|
||||
tosurveys.writeCaveTab(response)
|
||||
return response
|
||||
@@ -94,19 +94,20 @@ def downloadLogbook(request,year=None,extension=None,queryset=None):
|
||||
logbook_entries=queryset
|
||||
filename='logbook'
|
||||
else:
|
||||
response = HttpResponse(content_type='text/plain')
|
||||
return response(r"Error: Logbook downloader doesn't know what year you want")
|
||||
|
||||
if 'year' in request.GET:
|
||||
year=request.GET['year']
|
||||
if 'extension' in request.GET:
|
||||
extension=request.GET['extension']
|
||||
|
||||
|
||||
|
||||
|
||||
if extension =='txt':
|
||||
response = HttpResponse(content_type='text/plain')
|
||||
response = HttpResponse(mimetype='text/plain')
|
||||
style='2008'
|
||||
elif extension == 'html':
|
||||
response = HttpResponse(content_type='text/html')
|
||||
response = HttpResponse(mimetype='text/html')
|
||||
style='2005'
|
||||
|
||||
template='logbook'+style+'style.'+extension
|
||||
@@ -123,11 +124,11 @@ def downloadQMs(request):
|
||||
try:
|
||||
cave=Cave.objects.get(kataster_number=request.GET['cave_id'])
|
||||
except Cave.DoesNotExist:
|
||||
cave=Cave.objects.get(name=request.GET['cave_id'])
|
||||
cave=Cave.objects.get(name=cave_id)
|
||||
|
||||
from export import toqms
|
||||
|
||||
response = HttpResponse(content_type='text/csv')
|
||||
response = HttpResponse(mimetype='text/csv')
|
||||
response['Content-Disposition'] = 'attachment; filename=qm.csv'
|
||||
toqms.writeQmTable(response,cave)
|
||||
return response
|
||||
@@ -135,7 +136,7 @@ def downloadQMs(request):
|
||||
def ajax_test(request):
|
||||
post_text = request.POST['post_data']
|
||||
return HttpResponse("{'response_text': '"+post_text+" recieved.'}",
|
||||
content_type="application/json")
|
||||
mimetype="application/json")
|
||||
|
||||
def eyecandy(request):
|
||||
return
|
||||
@@ -143,9 +144,9 @@ def eyecandy(request):
|
||||
def ajax_QM_number(request):
|
||||
if request.method=='POST':
|
||||
cave=Cave.objects.get(id=request.POST['cave'])
|
||||
print(cave)
|
||||
print cave
|
||||
exp=Expedition.objects.get(pk=request.POST['year'])
|
||||
print(exp)
|
||||
print exp
|
||||
res=cave.new_QM_number(exp.year)
|
||||
|
||||
return HttpResponse(res)
|
||||
@@ -166,7 +167,7 @@ def logbook_entry_suggestions(request):
|
||||
#unwiki_QMs=re.findall(unwiki_QM_pattern,lbo.text)
|
||||
unwiki_QMs=[m.groupdict() for m in unwiki_QM_pattern.finditer(lbo.text)]
|
||||
|
||||
print(unwiki_QMs)
|
||||
print unwiki_QMs
|
||||
for qm in unwiki_QMs:
|
||||
#try:
|
||||
if len(qm['year'])==2:
|
||||
@@ -179,7 +180,7 @@ def logbook_entry_suggestions(request):
|
||||
try:
|
||||
lbo=LogbookEntry.objects.get(date__year=qm['year'],title__icontains="placeholder for QMs in")
|
||||
except:
|
||||
print("failed to get placeholder for year "+str(qm['year']))
|
||||
print "failed to get placeholder for year "+str(qm['year'])
|
||||
|
||||
temp_QM=QM(found_by=lbo,number=qm['number'],grade=qm['grade'])
|
||||
temp_QM.grade=qm['grade']
|
||||
@@ -187,7 +188,7 @@ def logbook_entry_suggestions(request):
|
||||
#except:
|
||||
#print 'failed'
|
||||
|
||||
print(unwiki_QMs)
|
||||
print unwiki_QMs
|
||||
|
||||
|
||||
#wikilink_QMs=re.findall(wikilink_QM_pattern,lbo.text)
|
||||
@@ -198,10 +199,10 @@ def logbook_entry_suggestions(request):
|
||||
#for qm in wikilink_QMs:
|
||||
#Try to look up the QM.
|
||||
|
||||
print('got 208')
|
||||
print 'got 208'
|
||||
any_suggestions=True
|
||||
print('got 210')
|
||||
return render(request,'suggestions.html',
|
||||
print 'got 210'
|
||||
return render_with_context(request,'suggestions.html',
|
||||
{
|
||||
'unwiki_QMs':unwiki_QMs,
|
||||
'any_suggestions':any_suggestions
|
||||
@@ -261,7 +262,7 @@ def newFile(request, pslug = None):
|
||||
# else:
|
||||
# fileform = UploadFileForm() # An unbound form
|
||||
|
||||
return render(request, 'editfile.html', {
|
||||
return render_with_context(request, 'editfile.html', {
|
||||
'fileForm': fileform,
|
||||
|
||||
})
|
||||
|
||||
@@ -1,7 +1,6 @@
|
||||
from django import forms
|
||||
from django.http import HttpResponseRedirect, HttpResponse
|
||||
from django.shortcuts import render_to_response, render
|
||||
from django.core.context_processors import csrf
|
||||
from django.shortcuts import render_to_response
|
||||
from django.http import HttpResponse, Http404
|
||||
import re
|
||||
import os
|
||||
@@ -173,14 +172,13 @@ def svx(request, survex_file):
|
||||
'difflist': difflist,
|
||||
'logmessage':logmessage,
|
||||
'form':form}
|
||||
vmap.update(csrf(request))
|
||||
if outputtype == "ajax":
|
||||
return render_to_response('svxfiledifflistonly.html', vmap)
|
||||
return render_to_response('svxfile.html', vmap)
|
||||
|
||||
def svxraw(request, survex_file):
|
||||
svx = open(os.path.join(settings.SURVEX_DATA, survex_file+".svx"), "rb")
|
||||
return HttpResponse(svx, content_type="text")
|
||||
return HttpResponse(svx, mimetype="text")
|
||||
|
||||
|
||||
# The cavern running function
|
||||
@@ -195,20 +193,20 @@ def threed(request, survex_file):
|
||||
process(survex_file)
|
||||
try:
|
||||
threed = open(settings.SURVEX_DATA + survex_file + ".3d", "rb")
|
||||
return HttpResponse(threed, content_type="model/3d")
|
||||
return HttpResponse(threed, mimetype="model/3d")
|
||||
except:
|
||||
log = open(settings.SURVEX_DATA + survex_file + ".log", "rb")
|
||||
return HttpResponse(log, content_type="text")
|
||||
return HttpResponse(log, mimetype="text")
|
||||
|
||||
def log(request, survex_file):
|
||||
process(survex_file)
|
||||
log = open(settings.SURVEX_DATA + survex_file + ".log", "rb")
|
||||
return HttpResponse(log, content_type="text")
|
||||
return HttpResponse(log, mimetype="text")
|
||||
|
||||
def err(request, survex_file):
|
||||
process(survex_file)
|
||||
err = open(settings.SURVEX_DATA + survex_file + ".err", "rb")
|
||||
return HttpResponse(err, content_type="text")
|
||||
return HttpResponse(err, mimetype="text")
|
||||
|
||||
|
||||
|
||||
|
||||
@@ -28,7 +28,8 @@ def reload_db():
|
||||
cursor.execute("CREATE DATABASE %s" % databasename)
|
||||
cursor.execute("ALTER DATABASE %s CHARACTER SET=utf8" % databasename)
|
||||
cursor.execute("USE %s" % databasename)
|
||||
management.call_command('syncdb', interactive=False)
|
||||
management.call_command('migrate', interactive=False)
|
||||
#management.call_command('syncdb', interactive=False)
|
||||
user = User.objects.create_user(expouser, expouseremail, expouserpass)
|
||||
user.is_staff = True
|
||||
user.is_superuser = True
|
||||
@@ -42,7 +43,7 @@ def make_dirs():
|
||||
|
||||
def import_caves():
|
||||
import parsers.caves
|
||||
print("Importing Caves")
|
||||
print("importing caves")
|
||||
parsers.caves.readcaves()
|
||||
|
||||
def import_people():
|
||||
@@ -81,15 +82,6 @@ def import_tunnelfiles():
|
||||
parsers.surveys.LoadTunnelFiles()
|
||||
|
||||
|
||||
def rebuild():
|
||||
""" Wipe the troggle database and sets up structure but imports nothing
|
||||
"""
|
||||
reload_db()
|
||||
make_dirs()
|
||||
pageredirects()
|
||||
|
||||
|
||||
|
||||
def reset():
|
||||
""" Wipe the troggle database and import everything from legacy data
|
||||
"""
|
||||
@@ -99,9 +91,9 @@ def reset():
|
||||
import_caves()
|
||||
import_people()
|
||||
import_surveyscans()
|
||||
import_survex()
|
||||
import_logbooks()
|
||||
import_QMs()
|
||||
import_survex()
|
||||
try:
|
||||
import_tunnelfiles()
|
||||
except:
|
||||
@@ -118,10 +110,10 @@ def import_auto_logbooks():
|
||||
for lbe in troggle.core.models.LogbookEntry.objects.all():
|
||||
lbe.delete()
|
||||
for expedition in troggle.core.models.Expedition.objects.all():
|
||||
directory = os.path.join(settings.EXPOWEB,
|
||||
"years",
|
||||
expedition.year,
|
||||
"autologbook")
|
||||
directory = os.path.join(settings.EXPOWEB,
|
||||
"years",
|
||||
expedition.year,
|
||||
"autologbook")
|
||||
for root, dirs, filenames in os.walk(directory):
|
||||
for filename in filenames:
|
||||
print(os.path.join(root, filename))
|
||||
@@ -168,17 +160,22 @@ def pageredirects():
|
||||
f = troggle.flatpages.models.Redirect(originalURL = oldURL, newURL = newURL)
|
||||
f.save()
|
||||
|
||||
def writeCaves():
|
||||
for cave in Cave.objects.all():
|
||||
cave.writeDataFile()
|
||||
for entrance in Entrance.objects.all():
|
||||
entrance.writeDataFile()
|
||||
|
||||
def usage():
|
||||
print("""Usage is 'python databaseReset.py <command>'
|
||||
where command is:
|
||||
rebuild - this reloads database and set up directories & redirects only
|
||||
reset - this is normal usage, clear database and reread everything from files - time-consuming
|
||||
desc - NOT WORKING: function resetdesc() missing
|
||||
caves - read in the caves
|
||||
logbooks - read in the logbooks, but read in people first
|
||||
autologbooks - read in autologbooks
|
||||
dumplogbooks - write out autologbooks (not working?)
|
||||
people - read in the people from folk.csv
|
||||
reset - this is normal usage, clear database and reread everything
|
||||
desc
|
||||
caves - read in the caves
|
||||
logbooks - read in the logbooks
|
||||
autologbooks
|
||||
dumplogbooks
|
||||
people
|
||||
QMs - read in the QM files
|
||||
resetend
|
||||
scans - read in the scanned surveynotes
|
||||
@@ -186,6 +183,7 @@ def usage():
|
||||
survexpos
|
||||
surveys
|
||||
tunnel - read in the Tunnel files
|
||||
writeCaves
|
||||
""")
|
||||
|
||||
if __name__ == "__main__":
|
||||
@@ -198,6 +196,9 @@ if __name__ == "__main__":
|
||||
elif "scans" in sys.argv:
|
||||
import_surveyscans()
|
||||
elif "caves" in sys.argv:
|
||||
reload_db()
|
||||
make_dirs()
|
||||
pageredirects()
|
||||
import_caves()
|
||||
elif "people" in sys.argv:
|
||||
import_people()
|
||||
@@ -218,14 +219,14 @@ if __name__ == "__main__":
|
||||
import_descriptions()
|
||||
parse_descriptions()
|
||||
elif "survex" in sys.argv:
|
||||
# management.call_command('syncdb', interactive=False) # this sets the path so that import settings works in import_survex
|
||||
#management.call_command('syncdb', interactive=False) # this sets the path so that import settings works in import_survex
|
||||
import_survex()
|
||||
elif "survexpos" in sys.argv:
|
||||
# management.call_command('syncdb', interactive=False) # this sets the path so that import settings works in import_survex
|
||||
management.call_command('syncdb', interactive=False) # this sets the path so that import settings works in import_survex
|
||||
import parsers.survex
|
||||
parsers.survex.LoadPos()
|
||||
elif "logbooks" in sys.argv:
|
||||
# management.call_command('syncdb', interactive=False) # this sets the path so that import settings works in import_survex
|
||||
management.call_command('syncdb', interactive=False) # this sets the path so that import settings works in import_survex
|
||||
import_logbooks()
|
||||
elif "autologbooks" in sys.argv:
|
||||
import_auto_logbooks()
|
||||
@@ -237,10 +238,10 @@ if __name__ == "__main__":
|
||||
import_surveys()
|
||||
elif "help" in sys.argv:
|
||||
usage()
|
||||
elif "reload_db" in sys.argv:
|
||||
reload_db()
|
||||
elif "rebuild" in sys.argv:
|
||||
rebuild()
|
||||
else:
|
||||
print("%s not recognised" % sys.argv)
|
||||
usage()
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
85
debian/serversetup
vendored
85
debian/serversetup
vendored
@@ -1,85 +0,0 @@
|
||||
Instructions for setting up new expo debian server/VM
|
||||
For Debian Stretch, June 2019.
|
||||
|
||||
adduser expo
|
||||
apt install openssh-server mosh tmux mc zile emacs-nox mc most ncdu
|
||||
apt install python-django apache2 mysql-server survex make rsync
|
||||
apt install libjs-openlayers make
|
||||
apt install git mercurial mercurial-server?
|
||||
|
||||
for boe:
|
||||
apt install libcgi-session-perl libcrypt-passwdmd5-perl libfile-slurp-perl libgit-wrapper-perl libhtml-template-perl libhtml-template-pro-perl libmime-lite-perl libtext-password-pronounceable-perl libtime-parsedate-perl libuuid-tiny-perl libcrypt-cracklib-perl
|
||||
|
||||
obsolete-packages:
|
||||
bins (move to jigl?) (for photos)
|
||||
python-django 1.7
|
||||
backports: survex therion
|
||||
not-packaged: caveview
|
||||
|
||||
make these dirs available at top documentroot:
|
||||
cuccfiles
|
||||
expofiles
|
||||
loser (link to repo)
|
||||
tunneldata (link to repo)
|
||||
troggle (link to repo)
|
||||
expoweb (link to repo)
|
||||
boc/boe
|
||||
|
||||
|
||||
config
|
||||
containing:
|
||||
|
||||
setup apache configs for cucc and expo
|
||||
#disable default website
|
||||
a2dissite 000-default
|
||||
a2ensite cucc
|
||||
a2ensite expo
|
||||
a2enmod cgid
|
||||
|
||||
|
||||
Boe config:
|
||||
Alias /boe /home/expo/boe/boc/boc.pl
|
||||
<Directory /home/expo/boe/boc>
|
||||
AddHandler cgi-script .pl
|
||||
SetHandler cgi-script
|
||||
Options +ExecCGI
|
||||
Require all granted
|
||||
</Directory>
|
||||
And remember to set both program and data dir to be
|
||||
www-data:www-data
|
||||
(optionally make file group read/write by treasurer account)
|
||||
create empty repo by clicking create in boe interface
|
||||
then set names in 'settings'
|
||||
|
||||
Set up mysql (as root)
|
||||
mysql -p
|
||||
CREATE DATABASE troggle;
|
||||
GRANT ALL PRIVILEGES ON troggle.* TO 'expo'@'localhost' IDENTIFIED BY 'somepassword';
|
||||
|
||||
install django:
|
||||
sudo apt install python-django python-django-registration python-django-imagekit python-django-tinymce fonts-freefont-ttf libapache2-mod-wsgi
|
||||
|
||||
python-django-imagekit comes from https://salsa.debian.org/python-team/modules/python-django-imagekit
|
||||
python-django-tinymce comes from https://salsa.debian.org/python-team/modules/python-django-tinymce
|
||||
(both modified for stretch/python2). packages under /home/wookey/packages/
|
||||
|
||||
need fonts-freefont-ttf (to have truetype freesans available for troggle via PIL)
|
||||
need libapache2-mod-wsgi for apache wsgi support.
|
||||
|
||||
On stretch the django 1.10 is no use so get rid of that:
|
||||
apt remove python3-django python-django python-django-common python-django-doc
|
||||
|
||||
Then replace with django 1.7 (Needs to be built for stretch)
|
||||
apt install python-django python-django-common python-django-doc
|
||||
apt install python-django-registration python-django-imagekit python-django-tinymce
|
||||
|
||||
then hold them to stop them being upgraded by unattended upgrades:
|
||||
echo "python-django hold" | sudo dpkg --set-selections
|
||||
echo "python-django-common hold" | sudo dpkg --set-selections
|
||||
echo "python-django-doc hold" | sudo dpkg --set-selections
|
||||
|
||||
#troggle has to have a writable logfile otherwise the website explodes
|
||||
# 500 error on the server, and apache error log has non-rentrant errors
|
||||
create /var/log/troggle/troggle.log
|
||||
chown www-data:adm /var/log/troggle/troggle.log
|
||||
chmod 660 /var/log/troggle/troggle.log
|
||||
@@ -40,6 +40,7 @@ mkdir -p expofiles/surveyscans
|
||||
|
||||
To start the containers run
|
||||
```bash
|
||||
$ cd ~/expo/troggle/docker
|
||||
$ docker-compose up
|
||||
```
|
||||
You will now have a working troggle but with no data. To import the data you need to access the container run
|
||||
|
||||
@@ -6,4 +6,3 @@ django-imagekit
|
||||
Image
|
||||
django-tinymce==2.7.0
|
||||
smartencoding
|
||||
unidecode
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
import troggle.settings as settings
|
||||
from troggle.helper import login_required_if_public
|
||||
from django.shortcuts import render
|
||||
from utils import render_with_context
|
||||
|
||||
from django.http import HttpResponse, HttpResponseRedirect, Http404
|
||||
from django.core.urlresolvers import reverse
|
||||
@@ -38,7 +38,7 @@ def flatpage(request, path):
|
||||
print("flat path noinfo", path)
|
||||
return HttpResponseRedirect(reverse("auth_login") + '?next=%s' % request.path)
|
||||
|
||||
if path.endswith("/") or path == "":
|
||||
if path.endswith("/") or path == "":
|
||||
try:
|
||||
o = open(os.path.normpath(settings.EXPOWEB + path + "index.html"), "rb")
|
||||
path = path + "index.html"
|
||||
@@ -47,13 +47,13 @@ def flatpage(request, path):
|
||||
o = open(os.path.normpath(settings.EXPOWEB + path + "index.htm"), "rb")
|
||||
path = path + "index.htm"
|
||||
except IOError:
|
||||
return render(request, 'pagenotfound.html', {'path': path})
|
||||
return render_with_context(request, 'pagenotfound.html', {'path': path})
|
||||
else:
|
||||
try:
|
||||
filetobeopened = os.path.normpath(settings.EXPOWEB + path)
|
||||
o = open(filetobeopened, "rb")
|
||||
except IOError:
|
||||
return render(request, 'pagenotfound.html', {'path': path})
|
||||
return render_with_context(request, 'pagenotfound.html', {'path': path})
|
||||
if path.endswith(".htm") or path.endswith(".html"):
|
||||
html = o.read()
|
||||
|
||||
@@ -75,7 +75,7 @@ def flatpage(request, path):
|
||||
if re.search(r"iso-8859-1", html):
|
||||
body = unicode(body, "iso-8859-1")
|
||||
body.strip
|
||||
return render(request, 'flatpage.html', {'editable': True, 'path': path, 'title': title, 'body': body, 'homepage': (path == "index.htm"), 'has_menu': has_menu})
|
||||
return render_with_context(request, 'flatpage.html', {'editable': True, 'path': path, 'title': title, 'body': body, 'homepage': (path == "index.htm"), 'has_menu': has_menu})
|
||||
else:
|
||||
return HttpResponse(o.read(), content_type=getmimetype(path))
|
||||
|
||||
@@ -160,9 +160,9 @@ def editflatpage(request, path):
|
||||
flatpageForm = FlatPageForm({"html": body, "title": title})
|
||||
else:
|
||||
flatpageForm = FlatPageForm()
|
||||
return render(request, 'editflatpage.html', {'path': path, 'form': flatpageForm, })
|
||||
return render_with_context(request, 'editflatpage.html', {'path': path, 'form': flatpageForm, })
|
||||
|
||||
class FlatPageForm(forms.Form):
|
||||
title = forms.CharField(widget=forms.TextInput(attrs={'size':'60'}))
|
||||
|
||||
html = forms.CharField(widget=TinyMCE(attrs={'cols': 80, 'rows': 20}))
|
||||
html = forms.CharField(widget=forms.Textarea())
|
||||
|
||||
@@ -1,13 +0,0 @@
|
||||
"""
|
||||
|
||||
Django ImageKit
|
||||
|
||||
Author: Justin Driscoll <justin.driscoll@gmail.com>
|
||||
Version: 0.2
|
||||
|
||||
"""
|
||||
VERSION = "0.2"
|
||||
|
||||
|
||||
|
||||
|
||||
@@ -1,21 +0,0 @@
|
||||
""" Default ImageKit configuration """
|
||||
|
||||
from imagekit.specs import ImageSpec
|
||||
from imagekit import processors
|
||||
|
||||
class ResizeThumbnail(processors.Resize):
|
||||
width = 100
|
||||
height = 50
|
||||
crop = True
|
||||
|
||||
class EnhanceSmall(processors.Adjustment):
|
||||
contrast = 1.2
|
||||
sharpness = 1.1
|
||||
|
||||
class SampleReflection(processors.Reflection):
|
||||
size = 0.5
|
||||
background_color = "#000000"
|
||||
|
||||
class DjangoAdminThumbnail(ImageSpec):
|
||||
access_as = 'admin_thumbnail'
|
||||
processors = [ResizeThumbnail, EnhanceSmall, SampleReflection]
|
||||
@@ -1,17 +0,0 @@
|
||||
# Required PIL classes may or may not be available from the root namespace
|
||||
# depending on the installation method used.
|
||||
try:
|
||||
import Image
|
||||
import ImageFile
|
||||
import ImageFilter
|
||||
import ImageEnhance
|
||||
import ImageColor
|
||||
except ImportError:
|
||||
try:
|
||||
from PIL import Image
|
||||
from PIL import ImageFile
|
||||
from PIL import ImageFilter
|
||||
from PIL import ImageEnhance
|
||||
from PIL import ImageColor
|
||||
except ImportError:
|
||||
raise ImportError('ImageKit was unable to import the Python Imaging Library. Please confirm it`s installed and available on your current Python path.')
|
||||
@@ -1 +0,0 @@
|
||||
|
||||
@@ -1 +0,0 @@
|
||||
|
||||
@@ -1,38 +0,0 @@
|
||||
from django.db.models.loading import cache
|
||||
from django.core.management.base import BaseCommand, CommandError
|
||||
from optparse import make_option
|
||||
from imagekit.models import ImageModel
|
||||
from imagekit.specs import ImageSpec
|
||||
|
||||
|
||||
class Command(BaseCommand):
|
||||
help = ('Clears all ImageKit cached files.')
|
||||
args = '[apps]'
|
||||
requires_model_validation = True
|
||||
can_import_settings = True
|
||||
|
||||
def handle(self, *args, **options):
|
||||
return flush_cache(args, options)
|
||||
|
||||
def flush_cache(apps, options):
|
||||
""" Clears the image cache
|
||||
|
||||
"""
|
||||
apps = [a.strip(',') for a in apps]
|
||||
if apps:
|
||||
print 'Flushing cache for %s...' % ', '.join(apps)
|
||||
else:
|
||||
print 'Flushing caches...'
|
||||
|
||||
for app_label in apps:
|
||||
app = cache.get_app(app_label)
|
||||
models = [m for m in cache.get_models(app) if issubclass(m, ImageModel)]
|
||||
|
||||
for model in models:
|
||||
for obj in model.objects.all():
|
||||
for spec in model._ik.specs:
|
||||
prop = getattr(obj, spec.name(), None)
|
||||
if prop is not None:
|
||||
prop._delete()
|
||||
if spec.pre_cache:
|
||||
prop._create()
|
||||
@@ -1,136 +0,0 @@
|
||||
import os
|
||||
from datetime import datetime
|
||||
from django.conf import settings
|
||||
from django.core.files.base import ContentFile
|
||||
from django.db import models
|
||||
from django.db.models.base import ModelBase
|
||||
from django.utils.translation import ugettext_lazy as _
|
||||
|
||||
from imagekit import specs
|
||||
from imagekit.lib import *
|
||||
from imagekit.options import Options
|
||||
from imagekit.utils import img_to_fobj
|
||||
|
||||
# Modify image file buffer size.
|
||||
ImageFile.MAXBLOCK = getattr(settings, 'PIL_IMAGEFILE_MAXBLOCK', 256 * 2 ** 10)
|
||||
|
||||
# Choice tuples for specifying the crop origin.
|
||||
# These are provided for convenience.
|
||||
CROP_HORZ_CHOICES = (
|
||||
(0, _('left')),
|
||||
(1, _('center')),
|
||||
(2, _('right')),
|
||||
)
|
||||
|
||||
CROP_VERT_CHOICES = (
|
||||
(0, _('top')),
|
||||
(1, _('center')),
|
||||
(2, _('bottom')),
|
||||
)
|
||||
|
||||
|
||||
class ImageModelBase(ModelBase):
|
||||
""" ImageModel metaclass
|
||||
|
||||
This metaclass parses IKOptions and loads the specified specification
|
||||
module.
|
||||
|
||||
"""
|
||||
def __init__(cls, name, bases, attrs):
|
||||
parents = [b for b in bases if isinstance(b, ImageModelBase)]
|
||||
if not parents:
|
||||
return
|
||||
user_opts = getattr(cls, 'IKOptions', None)
|
||||
opts = Options(user_opts)
|
||||
try:
|
||||
module = __import__(opts.spec_module, {}, {}, [''])
|
||||
except ImportError:
|
||||
raise ImportError('Unable to load imagekit config module: %s' % \
|
||||
opts.spec_module)
|
||||
for spec in [spec for spec in module.__dict__.values() \
|
||||
if isinstance(spec, type) \
|
||||
and issubclass(spec, specs.ImageSpec) \
|
||||
and spec != specs.ImageSpec]:
|
||||
setattr(cls, spec.name(), specs.Descriptor(spec))
|
||||
opts.specs.append(spec)
|
||||
setattr(cls, '_ik', opts)
|
||||
|
||||
|
||||
class ImageModel(models.Model):
|
||||
""" Abstract base class implementing all core ImageKit functionality
|
||||
|
||||
Subclasses of ImageModel are augmented with accessors for each defined
|
||||
image specification and can override the inner IKOptions class to customize
|
||||
storage locations and other options.
|
||||
|
||||
"""
|
||||
__metaclass__ = ImageModelBase
|
||||
|
||||
class Meta:
|
||||
abstract = True
|
||||
|
||||
class IKOptions:
|
||||
pass
|
||||
|
||||
def admin_thumbnail_view(self):
|
||||
if not self._imgfield:
|
||||
return None
|
||||
prop = getattr(self, self._ik.admin_thumbnail_spec, None)
|
||||
if prop is None:
|
||||
return 'An "%s" image spec has not been defined.' % \
|
||||
self._ik.admin_thumbnail_spec
|
||||
else:
|
||||
if hasattr(self, 'get_absolute_url'):
|
||||
return u'<a href="%s"><img src="%s"></a>' % \
|
||||
(self.get_absolute_url(), prop.url)
|
||||
else:
|
||||
return u'<a href="%s"><img src="%s"></a>' % \
|
||||
(self._imgfield.url, prop.url)
|
||||
admin_thumbnail_view.short_description = _('Thumbnail')
|
||||
admin_thumbnail_view.allow_tags = True
|
||||
|
||||
@property
|
||||
def _imgfield(self):
|
||||
return getattr(self, self._ik.image_field)
|
||||
|
||||
def _clear_cache(self):
|
||||
for spec in self._ik.specs:
|
||||
prop = getattr(self, spec.name())
|
||||
prop._delete()
|
||||
|
||||
def _pre_cache(self):
|
||||
for spec in self._ik.specs:
|
||||
if spec.pre_cache:
|
||||
prop = getattr(self, spec.name())
|
||||
prop._create()
|
||||
|
||||
def save(self, clear_cache=True, *args, **kwargs):
|
||||
is_new_object = self._get_pk_val is None
|
||||
super(ImageModel, self).save(*args, **kwargs)
|
||||
if is_new_object:
|
||||
clear_cache = False
|
||||
spec = self._ik.preprocessor_spec
|
||||
if spec is not None:
|
||||
newfile = self._imgfield.storage.open(str(self._imgfield))
|
||||
img = Image.open(newfile)
|
||||
img = spec.process(img, None)
|
||||
format = img.format or 'JPEG'
|
||||
if format != 'JPEG':
|
||||
imgfile = img_to_fobj(img, format)
|
||||
else:
|
||||
imgfile = img_to_fobj(img, format,
|
||||
quality=int(spec.quality),
|
||||
optimize=True)
|
||||
content = ContentFile(imgfile.read())
|
||||
newfile.close()
|
||||
name = str(self._imgfield)
|
||||
self._imgfield.storage.delete(name)
|
||||
self._imgfield.storage.save(name, content)
|
||||
if clear_cache and self._imgfield != '':
|
||||
self._clear_cache()
|
||||
self._pre_cache()
|
||||
|
||||
def delete(self):
|
||||
assert self._get_pk_val() is not None, "%s object can't be deleted because its %s attribute is set to None." % (self._meta.object_name, self._meta.pk.attname)
|
||||
self._clear_cache()
|
||||
models.Model.delete(self)
|
||||
@@ -1,23 +0,0 @@
|
||||
# Imagekit options
|
||||
from imagekit import processors
|
||||
from imagekit.specs import ImageSpec
|
||||
|
||||
|
||||
class Options(object):
|
||||
""" Class handling per-model imagekit options
|
||||
|
||||
"""
|
||||
image_field = 'image'
|
||||
crop_horz_field = 'crop_horz'
|
||||
crop_vert_field = 'crop_vert'
|
||||
preprocessor_spec = None
|
||||
cache_dir = 'cache'
|
||||
save_count_as = None
|
||||
cache_filename_format = "%(filename)s_%(specname)s.%(extension)s"
|
||||
admin_thumbnail_spec = 'admin_thumbnail'
|
||||
spec_module = 'imagekit.defaults'
|
||||
|
||||
def __init__(self, opts):
|
||||
for key, value in opts.__dict__.iteritems():
|
||||
setattr(self, key, value)
|
||||
self.specs = []
|
||||
@@ -1,134 +0,0 @@
|
||||
""" Imagekit Image "ImageProcessors"
|
||||
|
||||
A processor defines a set of class variables (optional) and a
|
||||
class method named "process" which processes the supplied image using
|
||||
the class properties as settings. The process method can be overridden as well allowing user to define their
|
||||
own effects/processes entirely.
|
||||
|
||||
"""
|
||||
from imagekit.lib import *
|
||||
|
||||
class ImageProcessor(object):
|
||||
""" Base image processor class """
|
||||
@classmethod
|
||||
def process(cls, image, obj=None):
|
||||
return image
|
||||
|
||||
|
||||
class Adjustment(ImageProcessor):
|
||||
color = 1.0
|
||||
brightness = 1.0
|
||||
contrast = 1.0
|
||||
sharpness = 1.0
|
||||
|
||||
@classmethod
|
||||
def process(cls, image, obj=None):
|
||||
for name in ['Color', 'Brightness', 'Contrast', 'Sharpness']:
|
||||
factor = getattr(cls, name.lower())
|
||||
if factor != 1.0:
|
||||
image = getattr(ImageEnhance, name)(image).enhance(factor)
|
||||
return image
|
||||
|
||||
|
||||
class Reflection(ImageProcessor):
|
||||
background_color = '#FFFFFF'
|
||||
size = 0.0
|
||||
opacity = 0.6
|
||||
|
||||
@classmethod
|
||||
def process(cls, image, obj=None):
|
||||
# convert bgcolor string to rgb value
|
||||
background_color = ImageColor.getrgb(cls.background_color)
|
||||
# copy orignial image and flip the orientation
|
||||
reflection = image.copy().transpose(Image.FLIP_TOP_BOTTOM)
|
||||
# create a new image filled with the bgcolor the same size
|
||||
background = Image.new("RGB", image.size, background_color)
|
||||
# calculate our alpha mask
|
||||
start = int(255 - (255 * cls.opacity)) # The start of our gradient
|
||||
steps = int(255 * cls.size) # the number of intermedite values
|
||||
increment = (255 - start) / float(steps)
|
||||
mask = Image.new('L', (1, 255))
|
||||
for y in range(255):
|
||||
if y < steps:
|
||||
val = int(y * increment + start)
|
||||
else:
|
||||
val = 255
|
||||
mask.putpixel((0, y), val)
|
||||
alpha_mask = mask.resize(image.size)
|
||||
# merge the reflection onto our background color using the alpha mask
|
||||
reflection = Image.composite(background, reflection, alpha_mask)
|
||||
# crop the reflection
|
||||
reflection_height = int(image.size[1] * cls.size)
|
||||
reflection = reflection.crop((0, 0, image.size[0], reflection_height))
|
||||
# create new image sized to hold both the original image and the reflection
|
||||
composite = Image.new("RGB", (image.size[0], image.size[1]+reflection_height), background_color)
|
||||
# paste the orignal image and the reflection into the composite image
|
||||
composite.paste(image, (0, 0))
|
||||
composite.paste(reflection, (0, image.size[1]))
|
||||
# return the image complete with reflection effect
|
||||
return composite
|
||||
|
||||
|
||||
class Resize(ImageProcessor):
|
||||
width = None
|
||||
height = None
|
||||
crop = False
|
||||
upscale = False
|
||||
|
||||
@classmethod
|
||||
def process(cls, image, obj=None):
|
||||
cur_width, cur_height = image.size
|
||||
if cls.crop:
|
||||
crop_horz = getattr(obj, obj._ik.crop_horz_field, 1)
|
||||
crop_vert = getattr(obj, obj._ik.crop_vert_field, 1)
|
||||
ratio = max(float(cls.width)/cur_width, float(cls.height)/cur_height)
|
||||
resize_x, resize_y = ((cur_width * ratio), (cur_height * ratio))
|
||||
crop_x, crop_y = (abs(cls.width - resize_x), abs(cls.height - resize_y))
|
||||
x_diff, y_diff = (int(crop_x / 2), int(crop_y / 2))
|
||||
box_left, box_right = {
|
||||
0: (0, cls.width),
|
||||
1: (int(x_diff), int(x_diff + cls.width)),
|
||||
2: (int(crop_x), int(resize_x)),
|
||||
}[crop_horz]
|
||||
box_upper, box_lower = {
|
||||
0: (0, cls.height),
|
||||
1: (int(y_diff), int(y_diff + cls.height)),
|
||||
2: (int(crop_y), int(resize_y)),
|
||||
}[crop_vert]
|
||||
box = (box_left, box_upper, box_right, box_lower)
|
||||
image = image.resize((int(resize_x), int(resize_y)), Image.ANTIALIAS).crop(box)
|
||||
else:
|
||||
if not cls.width is None and not cls.height is None:
|
||||
ratio = min(float(cls.width)/cur_width,
|
||||
float(cls.height)/cur_height)
|
||||
else:
|
||||
if cls.width is None:
|
||||
ratio = float(cls.height)/cur_height
|
||||
else:
|
||||
ratio = float(cls.width)/cur_width
|
||||
new_dimensions = (int(round(cur_width*ratio)),
|
||||
int(round(cur_height*ratio)))
|
||||
if new_dimensions[0] > cur_width or \
|
||||
new_dimensions[1] > cur_height:
|
||||
if not cls.upscale:
|
||||
return image
|
||||
image = image.resize(new_dimensions, Image.ANTIALIAS)
|
||||
return image
|
||||
|
||||
|
||||
class Transpose(ImageProcessor):
|
||||
""" Rotates or flips the image
|
||||
|
||||
Method should be one of the following strings:
|
||||
- FLIP_LEFT RIGHT
|
||||
- FLIP_TOP_BOTTOM
|
||||
- ROTATE_90
|
||||
- ROTATE_270
|
||||
- ROTATE_180
|
||||
|
||||
"""
|
||||
method = 'FLIP_LEFT_RIGHT'
|
||||
|
||||
@classmethod
|
||||
def process(cls, image, obj=None):
|
||||
return image.transpose(getattr(Image, cls.method))
|
||||
@@ -1,119 +0,0 @@
|
||||
""" ImageKit image specifications
|
||||
|
||||
All imagekit specifications must inherit from the ImageSpec class. Models
|
||||
inheriting from ImageModel will be modified with a descriptor/accessor for each
|
||||
spec found.
|
||||
|
||||
"""
|
||||
import os
|
||||
from StringIO import StringIO
|
||||
from imagekit.lib import *
|
||||
from imagekit.utils import img_to_fobj
|
||||
from django.core.files.base import ContentFile
|
||||
|
||||
class ImageSpec(object):
|
||||
pre_cache = False
|
||||
quality = 70
|
||||
increment_count = False
|
||||
processors = []
|
||||
|
||||
@classmethod
|
||||
def name(cls):
|
||||
return getattr(cls, 'access_as', cls.__name__.lower())
|
||||
|
||||
@classmethod
|
||||
def process(cls, image, obj):
|
||||
processed_image = image.copy()
|
||||
for proc in cls.processors:
|
||||
processed_image = proc.process(processed_image, obj)
|
||||
return processed_image
|
||||
|
||||
|
||||
class Accessor(object):
|
||||
def __init__(self, obj, spec):
|
||||
self._img = None
|
||||
self._obj = obj
|
||||
self.spec = spec
|
||||
|
||||
def _get_imgfile(self):
|
||||
format = self._img.format or 'JPEG'
|
||||
if format != 'JPEG':
|
||||
imgfile = img_to_fobj(self._img, format)
|
||||
else:
|
||||
imgfile = img_to_fobj(self._img, format,
|
||||
quality=int(self.spec.quality),
|
||||
optimize=True)
|
||||
return imgfile
|
||||
|
||||
def _create(self):
|
||||
if self._exists():
|
||||
return
|
||||
# process the original image file
|
||||
fp = self._obj._imgfield.storage.open(self._obj._imgfield.name)
|
||||
fp.seek(0)
|
||||
fp = StringIO(fp.read())
|
||||
try:
|
||||
self._img = self.spec.process(Image.open(fp), self._obj)
|
||||
# save the new image to the cache
|
||||
content = ContentFile(self._get_imgfile().read())
|
||||
self._obj._imgfield.storage.save(self.name, content)
|
||||
except IOError:
|
||||
pass
|
||||
|
||||
def _delete(self):
|
||||
self._obj._imgfield.storage.delete(self.name)
|
||||
|
||||
def _exists(self):
|
||||
return self._obj._imgfield.storage.exists(self.name)
|
||||
|
||||
def _basename(self):
|
||||
filename, extension = \
|
||||
os.path.splitext(os.path.basename(self._obj._imgfield.name))
|
||||
return self._obj._ik.cache_filename_format % \
|
||||
{'filename': filename,
|
||||
'specname': self.spec.name(),
|
||||
'extension': extension.lstrip('.')}
|
||||
|
||||
@property
|
||||
def name(self):
|
||||
return os.path.join(self._obj._ik.cache_dir, self._basename())
|
||||
|
||||
@property
|
||||
def url(self):
|
||||
self._create()
|
||||
if self.spec.increment_count:
|
||||
fieldname = self._obj._ik.save_count_as
|
||||
if fieldname is not None:
|
||||
current_count = getattr(self._obj, fieldname)
|
||||
setattr(self._obj, fieldname, current_count + 1)
|
||||
self._obj.save(clear_cache=False)
|
||||
return self._obj._imgfield.storage.url(self.name)
|
||||
|
||||
@property
|
||||
def file(self):
|
||||
self._create()
|
||||
return self._obj._imgfield.storage.open(self.name)
|
||||
|
||||
@property
|
||||
def image(self):
|
||||
if self._img is None:
|
||||
self._create()
|
||||
if self._img is None:
|
||||
self._img = Image.open(self.file)
|
||||
return self._img
|
||||
|
||||
@property
|
||||
def width(self):
|
||||
return self.image.size[0]
|
||||
|
||||
@property
|
||||
def height(self):
|
||||
return self.image.size[1]
|
||||
|
||||
|
||||
class Descriptor(object):
|
||||
def __init__(self, spec):
|
||||
self._spec = spec
|
||||
|
||||
def __get__(self, obj, type=None):
|
||||
return Accessor(obj, self._spec)
|
||||
@@ -1,86 +0,0 @@
|
||||
import os
|
||||
import tempfile
|
||||
import unittest
|
||||
from django.conf import settings
|
||||
from django.core.files.base import ContentFile
|
||||
from django.db import models
|
||||
from django.test import TestCase
|
||||
|
||||
from imagekit import processors
|
||||
from imagekit.models import ImageModel
|
||||
from imagekit.specs import ImageSpec
|
||||
from imagekit.lib import Image
|
||||
|
||||
|
||||
class ResizeToWidth(processors.Resize):
|
||||
width = 100
|
||||
|
||||
class ResizeToHeight(processors.Resize):
|
||||
height = 100
|
||||
|
||||
class ResizeToFit(processors.Resize):
|
||||
width = 100
|
||||
height = 100
|
||||
|
||||
class ResizeCropped(ResizeToFit):
|
||||
crop = ('center', 'center')
|
||||
|
||||
class TestResizeToWidth(ImageSpec):
|
||||
access_as = 'to_width'
|
||||
processors = [ResizeToWidth]
|
||||
|
||||
class TestResizeToHeight(ImageSpec):
|
||||
access_as = 'to_height'
|
||||
processors = [ResizeToHeight]
|
||||
|
||||
class TestResizeCropped(ImageSpec):
|
||||
access_as = 'cropped'
|
||||
processors = [ResizeCropped]
|
||||
|
||||
class TestPhoto(ImageModel):
|
||||
""" Minimal ImageModel class for testing """
|
||||
image = models.ImageField(upload_to='images')
|
||||
|
||||
class IKOptions:
|
||||
spec_module = 'imagekit.tests'
|
||||
|
||||
|
||||
class IKTest(TestCase):
|
||||
""" Base TestCase class """
|
||||
def setUp(self):
|
||||
# create a test image using tempfile and PIL
|
||||
self.tmp = tempfile.TemporaryFile()
|
||||
Image.new('RGB', (800, 600)).save(self.tmp, 'JPEG')
|
||||
self.tmp.seek(0)
|
||||
self.p = TestPhoto()
|
||||
self.p.image.save(os.path.basename('test.jpg'),
|
||||
ContentFile(self.tmp.read()))
|
||||
self.p.save()
|
||||
# destroy temp file
|
||||
self.tmp.close()
|
||||
|
||||
def test_setup(self):
|
||||
self.assertEqual(self.p.image.width, 800)
|
||||
self.assertEqual(self.p.image.height, 600)
|
||||
|
||||
def test_to_width(self):
|
||||
self.assertEqual(self.p.to_width.width, 100)
|
||||
self.assertEqual(self.p.to_width.height, 75)
|
||||
|
||||
def test_to_height(self):
|
||||
self.assertEqual(self.p.to_height.width, 133)
|
||||
self.assertEqual(self.p.to_height.height, 100)
|
||||
|
||||
def test_crop(self):
|
||||
self.assertEqual(self.p.cropped.width, 100)
|
||||
self.assertEqual(self.p.cropped.height, 100)
|
||||
|
||||
def test_url(self):
|
||||
tup = (settings.MEDIA_URL, self.p._ik.cache_dir, 'test_to_width.jpg')
|
||||
self.assertEqual(self.p.to_width.url, "%s%s/%s" % tup)
|
||||
|
||||
def tearDown(self):
|
||||
# make sure image file is deleted
|
||||
path = self.p.image.path
|
||||
self.p.delete()
|
||||
self.failIf(os.path.isfile(path))
|
||||
@@ -1,15 +0,0 @@
|
||||
""" ImageKit utility functions """
|
||||
|
||||
import tempfile
|
||||
|
||||
def img_to_fobj(img, format, **kwargs):
|
||||
tmp = tempfile.TemporaryFile()
|
||||
if format != 'JPEG':
|
||||
try:
|
||||
img.save(tmp, format, **kwargs)
|
||||
return
|
||||
except KeyError:
|
||||
pass
|
||||
img.save(tmp, format, **kwargs)
|
||||
tmp.seek(0)
|
||||
return tmp
|
||||
@@ -1,5 +1,5 @@
|
||||
import sys
|
||||
# This is the local settings for use with the docker compose dev setup. It is imported automatically
|
||||
# link localsettings to this file for use on expo computer in austria
|
||||
|
||||
DATABASES = {
|
||||
'default': {
|
||||
@@ -47,13 +47,13 @@ MEDIA_URL = URL_ROOT + DIR_ROOT + 'site_media/'
|
||||
MEDIA_ROOT = REPOS_ROOT_PATH + '/troggle/media/'
|
||||
MEDIA_ADMIN_DIR = '/usr/lib/python2.7/site-packages/django/contrib/admin/media/'
|
||||
|
||||
STATIC_URL = "/static/"
|
||||
STATIC_ROOT = "/expo/static"
|
||||
STATIC_URL = URL_ROOT
|
||||
STATIC_ROOT = DIR_ROOT
|
||||
|
||||
JSLIB_URL = URL_ROOT + 'javascript/'
|
||||
|
||||
TINY_MCE_MEDIA_ROOT = STATIC_ROOT + '/tiny_mce/'
|
||||
TINY_MCE_MEDIA_URL = STATIC_ROOT + '/tiny_mce/'
|
||||
TINY_MCE_MEDIA_ROOT = '/usr/share/tinymce/www/'
|
||||
TINY_MCE_MEDIA_URL = URL_ROOT + DIR_ROOT + '/tinymce_media/'
|
||||
|
||||
TEMPLATE_DIRS = (
|
||||
PYTHON_PATH + "templates",
|
||||
|
||||
@@ -52,8 +52,8 @@ MEDIA_ADMIN_DIR = '/usr/lib/python2.7/site-packages/django/contrib/admin/media/'
|
||||
|
||||
JSLIB_URL = URL_ROOT + 'javascript/'
|
||||
|
||||
TINY_MCE_MEDIA_ROOT = STATIC_ROOT + '/tiny_mce/'
|
||||
TINY_MCE_MEDIA_URL = STATIC_ROOT + '/tiny_mce/'
|
||||
TINY_MCE_MEDIA_ROOT = '/usr/share/tinymce/www/'
|
||||
TINY_MCE_MEDIA_URL = URL_ROOT + DIR_ROOT + 'tinymce_media/'
|
||||
|
||||
TEMPLATE_DIRS = (
|
||||
PYTHON_PATH + "templates",
|
||||
|
||||
0
modelviz.py
Normal file → Executable file
0
modelviz.py
Normal file → Executable file
@@ -17,19 +17,19 @@ def parseCaveQMs(cave,inputFile):
|
||||
try:
|
||||
steinBr=Cave.objects.get(official_name="Steinbrückenhöhle")
|
||||
except Cave.DoesNotExist:
|
||||
print("Steinbruckenhoehle is not in the database. Please run parsers.cavetab first.")
|
||||
print "Steinbruckenhoehle is not in the database. Please run parsers.cavetab first."
|
||||
return
|
||||
elif cave=='hauch':
|
||||
try:
|
||||
hauchHl=Cave.objects.get(official_name="Hauchhöhle")
|
||||
except Cave.DoesNotExist:
|
||||
print("Hauchhoele is not in the database. Please run parsers.cavetab first.")
|
||||
print "Hauchhoele is not in the database. Please run parsers.cavetab first."
|
||||
return
|
||||
elif cave =='kh':
|
||||
try:
|
||||
kh=Cave.objects.get(official_name="Kaninchenhöhle")
|
||||
except Cave.DoesNotExist:
|
||||
print("KH is not in the database. Please run parsers.cavetab first.")
|
||||
print "KH is not in the database. Please run parsers.cavetab first."
|
||||
parse_KH_QMs(kh, inputFile=inputFile)
|
||||
return
|
||||
|
||||
@@ -48,7 +48,7 @@ def parseCaveQMs(cave,inputFile):
|
||||
elif cave=='hauch':
|
||||
placeholder, hadToCreate = LogbookEntry.objects.get_or_create(date__year=year, title="placeholder for QMs in 234", text="QMs temporarily attached to this should be re-attached to their actual trips", defaults={"date": date(year, 1, 1),"cave":hauchHl})
|
||||
if hadToCreate:
|
||||
print(cave + " placeholder logbook entry for " + str(year) + " added to database")
|
||||
print cave+" placeholder logbook entry for " + str(year) + " added to database"
|
||||
QMnum=re.match(r".*?-\d*?-X?(?P<numb>\d*)",line[0]).group("numb")
|
||||
newQM = QM()
|
||||
newQM.found_by=placeholder
|
||||
@@ -71,18 +71,19 @@ def parseCaveQMs(cave,inputFile):
|
||||
if preexistingQM.new_since_parsing==False: #if the pre-existing QM has not been modified, overwrite it
|
||||
preexistingQM.delete()
|
||||
newQM.save()
|
||||
print("overwriting " + str(preexistingQM) +"\r")
|
||||
print "overwriting " + str(preexistingQM) +"\r",
|
||||
|
||||
else: # otherwise, print that it was ignored
|
||||
print("preserving " + str(preexistingQM) + ", which was edited in admin \r")
|
||||
print "preserving "+ str(preexistingQM) + ", which was edited in admin \r",
|
||||
|
||||
except QM.DoesNotExist: #if there is no pre-existing QM, save the new one
|
||||
newQM.save()
|
||||
print("QM "+str(newQM) + ' added to database\r')
|
||||
print "QM "+str(newQM) + ' added to database\r',
|
||||
|
||||
except KeyError: #check on this one
|
||||
continue
|
||||
except IndexError:
|
||||
print("Index error in " + str(line))
|
||||
print "Index error in " + str(line)
|
||||
continue
|
||||
|
||||
def parse_KH_QMs(kh, inputFile):
|
||||
@@ -103,7 +104,7 @@ def parse_KH_QMs(kh, inputFile):
|
||||
}
|
||||
nonLookupArgs={
|
||||
'grade':res['grade'],
|
||||
'nearest_station_name':res['nearest_station'],
|
||||
'nearest_station':res['nearest_station'],
|
||||
'location_description':res['description']
|
||||
}
|
||||
|
||||
@@ -114,4 +115,3 @@ parseCaveQMs(cave='stein',inputFile=r"1623/204/qm.csv")
|
||||
parseCaveQMs(cave='hauch',inputFile=r"1623/234/qm.csv")
|
||||
parseCaveQMs(cave='kh', inputFile="1623/161/qmtodo.htm")
|
||||
#parseCaveQMs(cave='balkonhoehle',inputFile=r"1623/264/qm.csv")
|
||||
|
||||
|
||||
@@ -6,18 +6,16 @@ import re
|
||||
|
||||
|
||||
def readcaves():
|
||||
|
||||
# Clear the cave data issues as we are reloading
|
||||
models.DataIssue.objects.filter(parser='caves').delete()
|
||||
|
||||
area_1623 = models.Area.objects.update_or_create(short_name = "1623", parent = None)
|
||||
area_1626 = models.Area.objects.update_or_create(short_name = "1626", parent = None)
|
||||
print(" - Reading Entrances")
|
||||
newArea = models.Area(short_name = "1623", parent = None)
|
||||
newArea.save()
|
||||
newArea = models.Area(short_name = "1626", parent = None)
|
||||
newArea.save()
|
||||
print("Reading Entrances")
|
||||
#print "list of <Slug> <Filename>"
|
||||
for filename in os.walk(settings.ENTRANCEDESCRIPTIONS).next()[2]: #Should be a better way of getting a list of files
|
||||
if filename.endswith('.html'):
|
||||
readentrance(filename)
|
||||
print (" - Reading Caves")
|
||||
print ("Reading Caves")
|
||||
for filename in os.walk(settings.CAVEDESCRIPTIONS).next()[2]: #Should be a better way of getting a list of files
|
||||
if filename.endswith('.html'):
|
||||
readcave(filename)
|
||||
@@ -53,7 +51,7 @@ def readentrance(filename):
|
||||
bearings = getXML(entrancecontents, "bearings", maxItems = 1, context = context)
|
||||
url = getXML(entrancecontents, "url", maxItems = 1, context = context)
|
||||
if len(non_public) == 1 and len(slugs) >= 1 and len(name) >= 1 and len(entrance_description) == 1 and len(explorers) == 1 and len(map_description) == 1 and len(location_description) == 1 and len(approach) == 1 and len(underground_description) == 1 and len(marking) == 1 and len(marking_comment) == 1 and len(findability) == 1 and len(findability_description) == 1 and len(alt) == 1 and len(northing) == 1 and len(easting) == 1 and len(tag_station) == 1 and len(exact_station) == 1 and len(other_station) == 1 and len(other_description) == 1 and len(bearings) == 1 and len(url) == 1:
|
||||
e, state = models.Entrance.objects.update_or_create(name = name[0],
|
||||
e = models.Entrance(name = name[0],
|
||||
non_public = {"True": True, "False": False, "true": True, "false": False,}[non_public[0]],
|
||||
entrance_description = entrance_description[0],
|
||||
explorers = explorers[0],
|
||||
@@ -77,12 +75,14 @@ def readentrance(filename):
|
||||
url = url[0],
|
||||
filename = filename,
|
||||
cached_primary_slug = slugs[0])
|
||||
e.save()
|
||||
primary = True
|
||||
for slug in slugs:
|
||||
#print slug, filename
|
||||
cs = models.EntranceSlug.objects.update_or_create(entrance = e,
|
||||
cs = models.EntranceSlug(entrance = e,
|
||||
slug = slug,
|
||||
primary = primary)
|
||||
cs.save()
|
||||
primary = False
|
||||
|
||||
def readcave(filename):
|
||||
@@ -117,7 +117,7 @@ def readcave(filename):
|
||||
url = getXML(cavecontents, "url", maxItems = 1, context = context)
|
||||
entrances = getXML(cavecontents, "entrance", context = context)
|
||||
if len(non_public) == 1 and len(slugs) >= 1 and len(official_name) == 1 and len(areas) >= 1 and len(kataster_code) == 1 and len(kataster_number) == 1 and len(unofficial_number) == 1 and len(explorers) == 1 and len(underground_description) == 1 and len(equipment) == 1 and len(references) == 1 and len(survey) == 1 and len(kataster_status) == 1 and len(underground_centre_line) == 1 and len(notes) == 1 and len(length) == 1 and len(depth) == 1 and len(extent) == 1 and len(survex_file) == 1 and len(description_file ) == 1 and len(url) == 1 and len(entrances) >= 1:
|
||||
c, state = models.Cave.objects.update_or_create(non_public = {"True": True, "False": False, "true": True, "false": False,}[non_public[0]],
|
||||
c = models.Cave(non_public = {"True": True, "False": False, "true": True, "false": False,}[non_public[0]],
|
||||
official_name = official_name[0],
|
||||
kataster_code = kataster_code[0],
|
||||
kataster_number = kataster_number[0],
|
||||
@@ -137,6 +137,7 @@ def readcave(filename):
|
||||
description_file = description_file[0],
|
||||
url = url[0],
|
||||
filename = filename)
|
||||
c.save()
|
||||
for area_slug in areas:
|
||||
area = models.Area.objects.filter(short_name = area_slug)
|
||||
if area:
|
||||
@@ -148,13 +149,12 @@ def readcave(filename):
|
||||
primary = True
|
||||
for slug in slugs:
|
||||
try:
|
||||
cs = models.CaveSlug.objects.update_or_create(cave = c,
|
||||
cs = models.CaveSlug(cave = c,
|
||||
slug = slug,
|
||||
primary = primary)
|
||||
cs.save()
|
||||
except:
|
||||
message = "Can't find text (slug): %s, skipping %s" % (slug, context)
|
||||
models.DataIssue.objects.create(parser='caves', message=message)
|
||||
print(message)
|
||||
print("Can't find text (slug): %s, skipping %s" % (slug, context))
|
||||
|
||||
primary = False
|
||||
for entrance in entrances:
|
||||
@@ -162,26 +162,20 @@ def readcave(filename):
|
||||
letter = getXML(entrance, "letter", maxItems = 1, context = context)[0]
|
||||
try:
|
||||
entrance = models.Entrance.objects.get(entranceslug__slug = slug)
|
||||
ce = models.CaveAndEntrance.objects.update_or_create(cave = c, entrance_letter = letter, entrance = entrance)
|
||||
ce = models.CaveAndEntrance(cave = c, entrance_letter = letter, entrance = entrance)
|
||||
ce.save()
|
||||
except:
|
||||
message = "Entrance text (slug) %s missing %s" % (slug, context)
|
||||
models.DataIssue.objects.create(parser='caves', message=message)
|
||||
print(message)
|
||||
print ("Entrance text (slug) %s missing %s" % (slug, context))
|
||||
|
||||
|
||||
def getXML(text, itemname, minItems = 1, maxItems = None, printwarnings = True, context = ""):
|
||||
items = re.findall("<%(itemname)s>(.*?)</%(itemname)s>" % {"itemname": itemname}, text, re.S)
|
||||
if len(items) < minItems and printwarnings:
|
||||
message = "%(count)i %(itemname)s found, at least %(min)i expected" % {"count": len(items),
|
||||
print("%(count)i %(itemname)s found, at least %(min)i expected" % {"count": len(items),
|
||||
"itemname": itemname,
|
||||
"min": minItems} + context
|
||||
models.DataIssue.objects.create(parser='caves', message=message)
|
||||
print(message)
|
||||
|
||||
"min": minItems} + context)
|
||||
if maxItems is not None and len(items) > maxItems and printwarnings:
|
||||
message = "%(count)i %(itemname)s found, no more than %(max)i expected" % {"count": len(items),
|
||||
print("%(count)i %(itemname)s found, no more than %(max)i expected" % {"count": len(items),
|
||||
"itemname": itemname,
|
||||
"max": maxItems} + context
|
||||
models.DataIssue.objects.create(parser='caves', message=message)
|
||||
print(message)
|
||||
"max": maxItems} + context)
|
||||
return items
|
||||
|
||||
@@ -7,8 +7,6 @@ from parsers.people import GetPersonExpeditionNameLookup
|
||||
from parsers.cavetab import GetCaveLookup
|
||||
|
||||
from django.template.defaultfilters import slugify
|
||||
from django.utils.timezone import get_current_timezone
|
||||
from django.utils.timezone import make_aware
|
||||
|
||||
import csv
|
||||
import re
|
||||
@@ -25,23 +23,19 @@ from utils import save_carefully
|
||||
#
|
||||
# the logbook loading section
|
||||
#
|
||||
def GetTripPersons(trippeople, expedition, logtime_underground):
|
||||
def GetTripPersons(trippeople, expedition, logtime_underground):
|
||||
res = [ ]
|
||||
author = None
|
||||
round_bracket_regex = re.compile(r"[\(\[].*?[\)\]]")
|
||||
for tripperson in re.split(r",|\+|&|&(?!\w+;)| and ", trippeople):
|
||||
for tripperson in re.split(",|\+|&|&(?!\w+;)| and ", trippeople):
|
||||
tripperson = tripperson.strip()
|
||||
mul = re.match(r"<u>(.*?)</u>$(?i)", tripperson)
|
||||
mul = re.match("<u>(.*?)</u>$(?i)", tripperson)
|
||||
if mul:
|
||||
tripperson = mul.group(1).strip()
|
||||
if tripperson and tripperson[0] != '*':
|
||||
#assert tripperson in personyearmap, "'%s' << %s\n\n %s" % (tripperson, trippeople, personyearmap)
|
||||
tripperson = re.sub(round_bracket_regex, "", tripperson).strip()
|
||||
personyear = GetPersonExpeditionNameLookup(expedition).get(tripperson.lower())
|
||||
if not personyear:
|
||||
print(" - No name match for: '%s'" % tripperson)
|
||||
message = "No name match for: '%s' in year '%s'" % (tripperson, expedition.year)
|
||||
models.DataIssue.objects.create(parser='logbooks', message=message)
|
||||
print "NoMatchFor: '%s'" % tripperson
|
||||
res.append((personyear, logtime_underground))
|
||||
if mul:
|
||||
author = personyear
|
||||
@@ -51,7 +45,7 @@ def GetTripPersons(trippeople, expedition, logtime_underground):
|
||||
author = res[-1][0]
|
||||
return res, author
|
||||
|
||||
def GetTripCave(place): #need to be fuzzier about matching here. Already a very slow function...
|
||||
def GetTripCave(place): #need to be fuzzier about matching here. Already a very slow function...
|
||||
# print "Getting cave for " , place
|
||||
try:
|
||||
katastNumRes=[]
|
||||
@@ -71,34 +65,32 @@ def GetTripCave(place): #need to be fuzzier about matching here. Already a very
|
||||
return tripCaveRes
|
||||
|
||||
elif len(tripCaveRes)>1:
|
||||
print("Ambiguous place " + str(place) + " entered. Choose from " + str(tripCaveRes))
|
||||
print "Ambiguous place " + str(place) + " entered. Choose from " + str(tripCaveRes)
|
||||
correctIndex=input("type list index of correct cave")
|
||||
return tripCaveRes[correctIndex]
|
||||
else:
|
||||
print("No cave found for place " , place)
|
||||
print "No cave found for place " , place
|
||||
return
|
||||
|
||||
|
||||
noncaveplaces = [ "Journey", "Loser Plateau" ]
|
||||
def EnterLogIntoDbase(date, place, title, text, trippeople, expedition, logtime_underground, entry_type="wiki"):
|
||||
def EnterLogIntoDbase(date, place, title, text, trippeople, expedition, logtime_underground):
|
||||
""" saves a logbook entry and related persontrips """
|
||||
trippersons, author = GetTripPersons(trippeople, expedition, logtime_underground)
|
||||
if not author:
|
||||
print(" - Skipping logentry: " + title + " - no author for entry")
|
||||
message = "Skipping logentry: %s - no author for entry in year '%s'" % (title, expedition.year)
|
||||
models.DataIssue.objects.create(parser='logbooks', message=message)
|
||||
print "skipping logentry", title
|
||||
return
|
||||
|
||||
#tripCave = GetTripCave(place)
|
||||
|
||||
|
||||
# tripCave = GetTripCave(place)
|
||||
#
|
||||
lplace = place.lower()
|
||||
if lplace not in noncaveplaces:
|
||||
cave=GetCaveLookup().get(lplace)
|
||||
|
||||
#Check for an existing copy of the current entry, and save
|
||||
expeditionday = expedition.get_expedition_day(date)
|
||||
lookupAttribs={'date':date, 'title':title}
|
||||
nonLookupAttribs={'place':place, 'text':text, 'expedition':expedition, 'cave':cave, 'slug':slugify(title)[:50], 'entry_type':entry_type}
|
||||
lookupAttribs={'date':date, 'title':title}
|
||||
nonLookupAttribs={'place':place, 'text':text, 'expedition':expedition, 'cave':cave, 'slug':slugify(title)[:50]}
|
||||
lbo, created=save_carefully(models.LogbookEntry, lookupAttribs, nonLookupAttribs)
|
||||
|
||||
for tripperson, time_underground in trippersons:
|
||||
@@ -110,8 +102,8 @@ def EnterLogIntoDbase(date, place, title, text, trippeople, expedition, logtime_
|
||||
|
||||
def ParseDate(tripdate, year):
|
||||
""" Interprets dates in the expo logbooks and returns a correct datetime.date object """
|
||||
mdatestandard = re.match(r"(\d\d\d\d)-(\d\d)-(\d\d)", tripdate)
|
||||
mdategoof = re.match(r"(\d\d?)/0?(\d)/(20|19)?(\d\d)", tripdate)
|
||||
mdatestandard = re.match("(\d\d\d\d)-(\d\d)-(\d\d)", tripdate)
|
||||
mdategoof = re.match("(\d\d?)/0?(\d)/(20|19)?(\d\d)", tripdate)
|
||||
if mdatestandard:
|
||||
assert mdatestandard.group(1) == year, (tripdate, year)
|
||||
year, month, day = int(mdatestandard.group(1)), int(mdatestandard.group(2)), int(mdatestandard.group(3))
|
||||
@@ -123,9 +115,9 @@ def ParseDate(tripdate, year):
|
||||
assert False, tripdate
|
||||
return datetime.date(year, month, day)
|
||||
|
||||
# 2006, 2008 - 2010
|
||||
# 2007, 2008, 2006
|
||||
def Parselogwikitxt(year, expedition, txt):
|
||||
trippara = re.findall(r"===(.*?)===([\s\S]*?)(?====)", txt)
|
||||
trippara = re.findall("===(.*?)===([\s\S]*?)(?====)", txt)
|
||||
for triphead, triptext in trippara:
|
||||
tripheadp = triphead.split("|")
|
||||
#print "ttt", tripheadp
|
||||
@@ -134,7 +126,7 @@ def Parselogwikitxt(year, expedition, txt):
|
||||
tripsplace = tripplace.split(" - ")
|
||||
tripcave = tripsplace[0].strip()
|
||||
|
||||
tul = re.findall(r"T/?U:?\s*(\d+(?:\.\d*)?|unknown)\s*(hrs|hours)?", triptext)
|
||||
tul = re.findall("T/?U:?\s*(\d+(?:\.\d*)?|unknown)\s*(hrs|hours)?", triptext)
|
||||
if tul:
|
||||
#assert len(tul) <= 1, (triphead, triptext)
|
||||
#assert tul[0][1] in ["hrs", "hours"], (triphead, triptext)
|
||||
@@ -148,16 +140,12 @@ def Parselogwikitxt(year, expedition, txt):
|
||||
#print "\n", tripcave, "--- ppp", trippeople, len(triptext)
|
||||
EnterLogIntoDbase(date = ldate, place = tripcave, title = tripplace, text = triptext, trippeople=trippeople, expedition=expedition, logtime_underground=0)
|
||||
|
||||
# 2002, 2004, 2005, 2007, 2011 - 2018
|
||||
# 2002, 2004, 2005
|
||||
def Parseloghtmltxt(year, expedition, txt):
|
||||
#print(" - Starting log html parser")
|
||||
tripparas = re.findall(r"<hr\s*/>([\s\S]*?)(?=<hr)", txt)
|
||||
logbook_entry_count = 0
|
||||
tripparas = re.findall("<hr\s*/>([\s\S]*?)(?=<hr)", txt)
|
||||
for trippara in tripparas:
|
||||
#print(" - HR detected - maybe a trip?")
|
||||
logbook_entry_count += 1
|
||||
|
||||
s = re.match(r'''(?x)(?:\s*<div\sclass="tripdate"\sid=".*?">.*?</div>\s*<p>)? # second date
|
||||
s = re.match('''(?x)(?:\s*<div\sclass="tripdate"\sid=".*?">.*?</div>\s*<p>)? # second date
|
||||
\s*(?:<a\s+id="(.*?)"\s*/>\s*</a>)?
|
||||
\s*<div\s+class="tripdate"\s*(?:id="(.*?)")?>(.*?)</div>(?:<p>)?
|
||||
\s*<div\s+class="trippeople">\s*(.*?)</div>
|
||||
@@ -167,41 +155,38 @@ def Parseloghtmltxt(year, expedition, txt):
|
||||
\s*$
|
||||
''', trippara)
|
||||
if not s:
|
||||
if not re.search(r"Rigging Guide", trippara):
|
||||
print("can't parse: ", trippara) # this is 2007 which needs editing
|
||||
if not re.search("Rigging Guide", trippara):
|
||||
print "can't parse: ", trippara # this is 2007 which needs editing
|
||||
#assert s, trippara
|
||||
continue
|
||||
|
||||
tripid, tripid1, tripdate, trippeople, triptitle, triptext, tu = s.groups()
|
||||
ldate = ParseDate(tripdate.strip(), year)
|
||||
#assert tripid[:-1] == "t" + tripdate, (tripid, tripdate)
|
||||
#trippeople = re.sub(r"Ol(?!l)", "Olly", trippeople)
|
||||
#trippeople = re.sub(r"Wook(?!e)", "Wookey", trippeople)
|
||||
trippeople = re.sub("Ol(?!l)", "Olly", trippeople)
|
||||
trippeople = re.sub("Wook(?!e)", "Wookey", trippeople)
|
||||
triptitles = triptitle.split(" - ")
|
||||
if len(triptitles) >= 2:
|
||||
tripcave = triptitles[0]
|
||||
else:
|
||||
tripcave = "UNKNOWN"
|
||||
#print("\n", tripcave, "--- ppp", trippeople, len(triptext))
|
||||
ltriptext = re.sub(r"</p>", "", triptext)
|
||||
ltriptext = re.sub(r"\s*?\n\s*", " ", ltriptext)
|
||||
ltriptext = re.sub(r"<p>", "</br></br>", ltriptext).strip()
|
||||
EnterLogIntoDbase(date = ldate, place = tripcave, title = triptitle, text = ltriptext,
|
||||
trippeople=trippeople, expedition=expedition, logtime_underground=0,
|
||||
entry_type="html")
|
||||
if logbook_entry_count == 0:
|
||||
print(" - No trip entrys found in logbook, check the syntax matches htmltxt format")
|
||||
#print "\n", tripcave, "--- ppp", trippeople, len(triptext)
|
||||
ltriptext = re.sub("</p>", "", triptext)
|
||||
ltriptext = re.sub("\s*?\n\s*", " ", ltriptext)
|
||||
ltriptext = re.sub("<p>", "\n\n", ltriptext).strip()
|
||||
EnterLogIntoDbase(date = ldate, place = tripcave, title = triptitle, text = ltriptext, trippeople=trippeople, expedition=expedition, logtime_underground=0)
|
||||
|
||||
|
||||
# main parser for 1991 - 2001. simpler because the data has been hacked so much to fit it
|
||||
# main parser for pre-2001. simpler because the data has been hacked so much to fit it
|
||||
def Parseloghtml01(year, expedition, txt):
|
||||
tripparas = re.findall(r"<hr[\s/]*>([\s\S]*?)(?=<hr)", txt)
|
||||
tripparas = re.findall("<hr[\s/]*>([\s\S]*?)(?=<hr)", txt)
|
||||
for trippara in tripparas:
|
||||
s = re.match(u"(?s)\s*(?:<p>)?(.*?)</?p>(.*)$(?i)", trippara)
|
||||
assert s, trippara[:300]
|
||||
tripheader, triptext = s.group(1), s.group(2)
|
||||
mtripid = re.search(r'<a id="(.*?)"', tripheader)
|
||||
mtripid = re.search('<a id="(.*?)"', tripheader)
|
||||
tripid = mtripid and mtripid.group(1) or ""
|
||||
tripheader = re.sub(r"</?(?:[ab]|span)[^>]*>", "", tripheader)
|
||||
tripheader = re.sub("</?(?:[ab]|span)[^>]*>", "", tripheader)
|
||||
|
||||
#print " ", [tripheader]
|
||||
#continue
|
||||
@@ -209,7 +194,7 @@ def Parseloghtml01(year, expedition, txt):
|
||||
tripdate, triptitle, trippeople = tripheader.split("|")
|
||||
ldate = ParseDate(tripdate.strip(), year)
|
||||
|
||||
mtu = re.search(r'<p[^>]*>(T/?U.*)', triptext)
|
||||
mtu = re.search('<p[^>]*>(T/?U.*)', triptext)
|
||||
if mtu:
|
||||
tu = mtu.group(1)
|
||||
triptext = triptext[:mtu.start(0)] + triptext[mtu.end():]
|
||||
@@ -221,40 +206,38 @@ def Parseloghtml01(year, expedition, txt):
|
||||
|
||||
ltriptext = triptext
|
||||
|
||||
mtail = re.search(r'(?:<a href="[^"]*">[^<]*</a>|\s|/|-|&|</?p>|\((?:same day|\d+)\))*$', ltriptext)
|
||||
mtail = re.search('(?:<a href="[^"]*">[^<]*</a>|\s|/|-|&|</?p>|\((?:same day|\d+)\))*$', ltriptext)
|
||||
if mtail:
|
||||
#print mtail.group(0)
|
||||
ltriptext = ltriptext[:mtail.start(0)]
|
||||
ltriptext = re.sub(r"</p>", "", ltriptext)
|
||||
ltriptext = re.sub(r"\s*?\n\s*", " ", ltriptext)
|
||||
ltriptext = re.sub(r"<p>|<br>", "\n\n", ltriptext).strip()
|
||||
ltriptext = re.sub("</p>", "", ltriptext)
|
||||
ltriptext = re.sub("\s*?\n\s*", " ", ltriptext)
|
||||
ltriptext = re.sub("<p>|<br>", "\n\n", ltriptext).strip()
|
||||
#ltriptext = re.sub("[^\s0-9a-zA-Z\-.,:;'!]", "NONASCII", ltriptext)
|
||||
ltriptext = re.sub(r"</?u>", "_", ltriptext)
|
||||
ltriptext = re.sub(r"</?i>", "''", ltriptext)
|
||||
ltriptext = re.sub(r"</?b>", "'''", ltriptext)
|
||||
ltriptext = re.sub("</?u>", "_", ltriptext)
|
||||
ltriptext = re.sub("</?i>", "''", ltriptext)
|
||||
ltriptext = re.sub("</?b>", "'''", ltriptext)
|
||||
|
||||
|
||||
#print ldate, trippeople.strip()
|
||||
# could includ the tripid (url link for cross referencing)
|
||||
EnterLogIntoDbase(date=ldate, place=tripcave, title=triptitle, text=ltriptext,
|
||||
trippeople=trippeople, expedition=expedition, logtime_underground=0,
|
||||
entry_type="html")
|
||||
EnterLogIntoDbase(date=ldate, place=tripcave, title=triptitle, text=ltriptext, trippeople=trippeople, expedition=expedition, logtime_underground=0)
|
||||
|
||||
|
||||
# parser for 2003
|
||||
def Parseloghtml03(year, expedition, txt):
|
||||
tripparas = re.findall(r"<hr\s*/>([\s\S]*?)(?=<hr)", txt)
|
||||
tripparas = re.findall("<hr\s*/>([\s\S]*?)(?=<hr)", txt)
|
||||
for trippara in tripparas:
|
||||
s = re.match(u"(?s)\s*<p>(.*?)</p>(.*)$", trippara)
|
||||
assert s, trippara
|
||||
tripheader, triptext = s.group(1), s.group(2)
|
||||
tripheader = re.sub(r" ", " ", tripheader)
|
||||
tripheader = re.sub(r"\s+", " ", tripheader).strip()
|
||||
tripheader = re.sub(" ", " ", tripheader)
|
||||
tripheader = re.sub("\s+", " ", tripheader).strip()
|
||||
sheader = tripheader.split(" -- ")
|
||||
tu = ""
|
||||
if re.match("T/U|Time underwater", sheader[-1]):
|
||||
tu = sheader.pop()
|
||||
if len(sheader) != 3:
|
||||
print("header not three pieces", sheader)
|
||||
print "header not three pieces", sheader
|
||||
tripdate, triptitle, trippeople = sheader
|
||||
ldate = ParseDate(tripdate.strip(), year)
|
||||
triptitles = triptitle.split(" , ")
|
||||
@@ -263,14 +246,37 @@ def Parseloghtml03(year, expedition, txt):
|
||||
else:
|
||||
tripcave = "UNKNOWN"
|
||||
#print tripcave, "--- ppp", triptitle, trippeople, len(triptext)
|
||||
ltriptext = re.sub(r"</p>", "", triptext)
|
||||
ltriptext = re.sub(r"\s*?\n\s*", " ", ltriptext)
|
||||
ltriptext = re.sub(r"<p>", "\n\n", ltriptext).strip()
|
||||
ltriptext = re.sub(r"[^\s0-9a-zA-Z\-.,:;'!&()\[\]<>?=+*%]", "_NONASCII_", ltriptext)
|
||||
EnterLogIntoDbase(date = ldate, place = tripcave, title = triptitle,
|
||||
text = ltriptext, trippeople=trippeople, expedition=expedition,
|
||||
logtime_underground=0, entry_type="html")
|
||||
ltriptext = re.sub("</p>", "", triptext)
|
||||
ltriptext = re.sub("\s*?\n\s*", " ", ltriptext)
|
||||
ltriptext = re.sub("<p>", "\n\n", ltriptext).strip()
|
||||
ltriptext = re.sub("[^\s0-9a-zA-Z\-.,:;'!&()\[\]<>?=+*%]", "_NONASCII_", ltriptext)
|
||||
EnterLogIntoDbase(date = ldate, place = tripcave, title = triptitle, text = ltriptext, trippeople=trippeople, expedition=expedition, logtime_underground=0)
|
||||
|
||||
yearlinks = [
|
||||
# ("2013", "2013/logbook.html", Parseloghtmltxt),
|
||||
("2012", "2012/logbook.html", Parseloghtmltxt),
|
||||
("2011", "2011/logbook.html", Parseloghtmltxt),
|
||||
("2010", "2010/logbook.html", Parselogwikitxt),
|
||||
("2009", "2009/2009logbook.txt", Parselogwikitxt),
|
||||
("2008", "2008/2008logbook.txt", Parselogwikitxt),
|
||||
("2007", "2007/logbook.html", Parseloghtmltxt),
|
||||
("2006", "2006/logbook/logbook_06.txt", Parselogwikitxt),
|
||||
("2005", "2005/logbook.html", Parseloghtmltxt),
|
||||
("2004", "2004/logbook.html", Parseloghtmltxt),
|
||||
("2003", "2003/logbook.html", Parseloghtml03),
|
||||
("2002", "2002/logbook.html", Parseloghtmltxt),
|
||||
("2001", "2001/log.htm", Parseloghtml01),
|
||||
("2000", "2000/log.htm", Parseloghtml01),
|
||||
("1999", "1999/log.htm", Parseloghtml01),
|
||||
("1998", "1998/log.htm", Parseloghtml01),
|
||||
("1997", "1997/log.htm", Parseloghtml01),
|
||||
("1996", "1996/log.htm", Parseloghtml01),
|
||||
("1995", "1995/log.htm", Parseloghtml01),
|
||||
("1994", "1994/log.htm", Parseloghtml01),
|
||||
("1993", "1993/log.htm", Parseloghtml01),
|
||||
("1992", "1992/log.htm", Parseloghtml01),
|
||||
("1991", "1991/log.htm", Parseloghtml01),
|
||||
]
|
||||
|
||||
def SetDatesFromLogbookEntries(expedition):
|
||||
"""
|
||||
@@ -289,67 +295,54 @@ def SetDatesFromLogbookEntries(expedition):
|
||||
persontrip.persontrip_next = None
|
||||
lprevpersontrip = persontrip
|
||||
persontrip.save()
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
def LoadLogbookForExpedition(expedition):
|
||||
""" Parses all logbook entries for one expedition """
|
||||
|
||||
expowebbase = os.path.join(settings.EXPOWEB, "years")
|
||||
yearlinks = settings.LOGBOOK_PARSER_SETTINGS
|
||||
|
||||
logbook_parseable = False
|
||||
|
||||
if expedition.year in yearlinks:
|
||||
year_settings = yearlinks[expedition.year]
|
||||
file_in = open(os.path.join(expowebbase, year_settings[0]))
|
||||
txt = file_in.read().decode("latin1")
|
||||
file_in.close()
|
||||
parsefunc = year_settings[1]
|
||||
logbook_parseable = True
|
||||
print(" - Parsing logbook: " + year_settings[0] + "\n - Using parser: " + year_settings[1])
|
||||
else:
|
||||
try:
|
||||
file_in = open(os.path.join(expowebbase, expedition.year, settings.DEFAULT_LOGBOOK_FILE))
|
||||
txt = file_in.read().decode("latin1")
|
||||
file_in.close()
|
||||
logbook_parseable = True
|
||||
print("No set parser found using default")
|
||||
parsefunc = settings.DEFAULT_LOGBOOK_PARSER
|
||||
except (IOError):
|
||||
logbook_parseable = False
|
||||
print("Couldn't open default logbook file and nothing in settings for expo " + expedition.year)
|
||||
|
||||
if logbook_parseable:
|
||||
parser = globals()[parsefunc]
|
||||
parser(expedition.year, expedition, txt)
|
||||
SetDatesFromLogbookEntries(expedition)
|
||||
|
||||
#return "TOLOAD: " + year + " " + str(expedition.personexpedition_set.all()[1].logbookentry_set.count()) + " " + str(models.PersonTrip.objects.filter(personexpedition__expedition=expedition).count())
|
||||
expowebbase = os.path.join(settings.EXPOWEB, "years")
|
||||
year = str(expedition.year)
|
||||
for lyear, lloc, parsefunc in yearlinks:
|
||||
if lyear == year:
|
||||
break
|
||||
fin = open(os.path.join(expowebbase, lloc))
|
||||
print "opennning", lloc
|
||||
txt = fin.read().decode("latin1")
|
||||
fin.close()
|
||||
parsefunc(year, expedition, txt)
|
||||
SetDatesFromLogbookEntries(expedition)
|
||||
return "TOLOAD: " + year + " " + str(expedition.personexpedition_set.all()[1].logbookentry_set.count()) + " " + str(models.PersonTrip.objects.filter(personexpedition__expedition=expedition).count())
|
||||
|
||||
|
||||
def LoadLogbooks():
|
||||
""" This is the master function for parsing all logbooks into the Troggle database. """
|
||||
""" This is the master function for parsing all logbooks into the Troggle database. Requires yearlinks, which is a list of tuples for each expedition with expedition year, logbook path, and parsing function. """
|
||||
|
||||
#Deletion has been moved to a seperate function to enable the non-destructive importing
|
||||
#models.LogbookEntry.objects.all().delete()
|
||||
expowebbase = os.path.join(settings.EXPOWEB, "years")
|
||||
#yearlinks = [ ("2001", "2001/log.htm", Parseloghtml01), ] #overwrite
|
||||
#yearlinks = [ ("1996", "1996/log.htm", Parseloghtml01),] # overwrite
|
||||
|
||||
# Clear the logbook data issues as we are reloading
|
||||
models.DataIssue.objects.filter(parser='logbooks').delete()
|
||||
# Fetch all expos
|
||||
expos = models.Expedition.objects.all()
|
||||
for expo in expos:
|
||||
print("\nLoading Logbook for: " + expo.year)
|
||||
|
||||
# Load logbook for expo
|
||||
LoadLogbookForExpedition(expo)
|
||||
for year, lloc, parsefunc in yearlinks:
|
||||
# This will not work until the corresponding year exists in the database.
|
||||
# In 2012 this needed noscript/folk.csv to be updated first.
|
||||
expedition = models.Expedition.objects.filter(year = year)[0]
|
||||
fin = open(os.path.join(expowebbase, lloc))
|
||||
txt = fin.read().decode("latin1")
|
||||
fin.close()
|
||||
parsefunc(year, expedition, txt)
|
||||
SetDatesFromLogbookEntries(expedition)
|
||||
|
||||
|
||||
dateRegex = re.compile(r'<span\s+class="date">(\d\d\d\d)-(\d\d)-(\d\d)</span>', re.S)
|
||||
expeditionYearRegex = re.compile(r'<span\s+class="expeditionyear">(.*?)</span>', re.S)
|
||||
titleRegex = re.compile(r'<H1>(.*?)</H1>', re.S)
|
||||
reportRegex = re.compile(r'<div\s+class="report">(.*)</div>\s*</body>', re.S)
|
||||
personRegex = re.compile(r'<div\s+class="person">(.*?)</div>', re.S)
|
||||
nameAuthorRegex = re.compile(r'<span\s+class="name(,author|)">(.*?)</span>', re.S)
|
||||
TURegex = re.compile(r'<span\s+class="TU">([0-9]*\.?[0-9]+)</span>', re.S)
|
||||
locationRegex = re.compile(r'<span\s+class="location">(.*?)</span>', re.S)
|
||||
caveRegex = re.compile(r'<span\s+class="cave">(.*?)</span>', re.S)
|
||||
dateRegex = re.compile('<span\s+class="date">(\d\d\d\d)-(\d\d)-(\d\d)</span>', re.S)
|
||||
expeditionYearRegex = re.compile('<span\s+class="expeditionyear">(.*?)</span>', re.S)
|
||||
titleRegex = re.compile('<H1>(.*?)</H1>', re.S)
|
||||
reportRegex = re.compile('<div\s+class="report">(.*)</div>\s*</body>', re.S)
|
||||
personRegex = re.compile('<div\s+class="person">(.*?)</div>', re.S)
|
||||
nameAuthorRegex = re.compile('<span\s+class="name(,author|)">(.*?)</span>', re.S)
|
||||
TURegex = re.compile('<span\s+class="TU">([0-9]*\.?[0-9]+)</span>', re.S)
|
||||
locationRegex = re.compile('<span\s+class="location">(.*?)</span>', re.S)
|
||||
caveRegex = re.compile('<span\s+class="cave">(.*?)</span>', re.S)
|
||||
|
||||
def parseAutoLogBookEntry(filename):
|
||||
errors = []
|
||||
@@ -442,4 +435,4 @@ def parseAutoLogBookEntry(filename):
|
||||
time_underground = TU,
|
||||
logbook_entry = logbookEntry,
|
||||
is_logbook_entry_author = author).save()
|
||||
print(logbookEntry)
|
||||
print logbookEntry
|
||||
|
||||
@@ -4,8 +4,6 @@ from django.conf import settings
|
||||
import troggle.core.models as models
|
||||
import csv, re, datetime, os, shutil
|
||||
from utils import save_carefully
|
||||
from HTMLParser import HTMLParser
|
||||
from unidecode import unidecode
|
||||
|
||||
def saveMugShot(mugShotPath, mugShotFilename, person):
|
||||
if mugShotFilename.startswith(r'i/'): #if filename in cell has the directory attached (I think they all do), remove it
|
||||
@@ -46,13 +44,13 @@ def parseMugShotAndBlurb(personline, header, person):
|
||||
|
||||
def LoadPersonsExpos():
|
||||
|
||||
persontab = open(os.path.join(settings.EXPOWEB, "folk", "folk.csv"))
|
||||
persontab = open(os.path.join(settings.EXPOWEB, "noinfo", "folk.csv"))
|
||||
personreader = csv.reader(persontab)
|
||||
headers = personreader.next()
|
||||
header = dict(zip(headers, range(len(headers))))
|
||||
|
||||
# make expeditions
|
||||
print("Loading expeditions")
|
||||
print "Loading expeditions"
|
||||
years = headers[5:]
|
||||
|
||||
for year in years:
|
||||
@@ -61,35 +59,22 @@ def LoadPersonsExpos():
|
||||
|
||||
save_carefully(models.Expedition, lookupAttribs, nonLookupAttribs)
|
||||
|
||||
|
||||
# make persons
|
||||
print("Loading personexpeditions")
|
||||
print "Loading personexpeditions"
|
||||
#expoers2008 = """Edvin Deadman,Kathryn Hopkins,Djuke Veldhuis,Becka Lawson,Julian Todd,Natalie Uomini,Aaron Curtis,Tony Rooke,Ollie Stevens,Frank Tully,Martin Jahnke,Mark Shinwell,Jess Stirrups,Nial Peters,Serena Povia,Olly Madge,Steve Jones,Pete Harley,Eeva Makiranta,Keith Curtis""".split(",")
|
||||
#expomissing = set(expoers2008)
|
||||
|
||||
for personline in personreader:
|
||||
name = personline[header["Name"]]
|
||||
name = re.sub(r"<.*?>", "", name)
|
||||
|
||||
firstname = ""
|
||||
nickname = ""
|
||||
|
||||
rawlastname = personline[header["Lastname"]].strip()
|
||||
matchlastname = re.match(r"^([\w&;\s]+)(?:\(([^)]*)\))?", rawlastname)
|
||||
lastname = matchlastname.group(1).strip()
|
||||
|
||||
splitnick = re.match(r"^([\w&;\s]+)(?:\(([^)]*)\))?", name)
|
||||
fullname = splitnick.group(1)
|
||||
|
||||
nickname = splitnick.group(2) or ""
|
||||
|
||||
fullname = fullname.strip()
|
||||
names = fullname.split(' ')
|
||||
firstname = names[0]
|
||||
if len(names) == 1:
|
||||
lastname = ""
|
||||
|
||||
lookupAttribs={'first_name':firstname, 'last_name':(lastname or "")}
|
||||
nonLookupAttribs={'is_vfho':personline[header["VfHO member"]], 'fullname':fullname}
|
||||
name = re.sub("<.*?>", "", name)
|
||||
mname = re.match("(\w+)(?:\s((?:van |ten )?\w+))?(?:\s\(([^)]*)\))?", name)
|
||||
nickname = mname.group(3) or ""
|
||||
|
||||
lookupAttribs={'first_name':mname.group(1), 'last_name':(mname.group(2) or "")}
|
||||
nonLookupAttribs={'is_vfho':personline[header["VfHO member"]],}
|
||||
person, created = save_carefully(models.Person, lookupAttribs, nonLookupAttribs)
|
||||
|
||||
|
||||
parseMugShotAndBlurb(personline=personline, header=header, person=person)
|
||||
|
||||
# make person expedition from table
|
||||
@@ -103,8 +88,6 @@ def LoadPersonsExpos():
|
||||
|
||||
# this fills in those people for whom 2008 was their first expo
|
||||
#print "Loading personexpeditions 2008"
|
||||
#expoers2008 = """Edvin Deadman,Kathryn Hopkins,Djuke Veldhuis,Becka Lawson,Julian Todd,Natalie Uomini,Aaron Curtis,Tony Rooke,Ollie Stevens,Frank Tully,Martin Jahnke,Mark Shinwell,Jess Stirrups,Nial Peters,Serena Povia,Olly Madge,Steve Jones,Pete Harley,Eeva Makiranta,Keith Curtis""".split(",")
|
||||
#expomissing = set(expoers2008)
|
||||
#for name in expomissing:
|
||||
# firstname, lastname = name.split()
|
||||
# is_guest = name in ["Eeva Makiranta", "Keith Curtis"]
|
||||
@@ -120,6 +103,18 @@ def LoadPersonsExpos():
|
||||
# personexpedition = models.PersonExpedition(person=person, expedition=expedition, nickname="", is_guest=is_guest)
|
||||
# personexpedition.save()
|
||||
|
||||
#Notability is now a method of person. Makes no sense to store it in the database; it would need to be recalculated every time something changes. - AC 16 Feb 09
|
||||
# could rank according to surveying as well
|
||||
#print "Setting person notability"
|
||||
#for person in models.Person.objects.all():
|
||||
#person.notability = 0.0
|
||||
#for personexpedition in person.personexpedition_set.all():
|
||||
#if not personexpedition.is_guest:
|
||||
#person.notability += 1.0 / (2012 - int(personexpedition.expedition.year))
|
||||
#person.bisnotable = person.notability > 0.3 # I don't know how to filter by this
|
||||
#person.save()
|
||||
|
||||
|
||||
# used in other referencing parser functions
|
||||
# expedition name lookup cached for speed (it's a very big list)
|
||||
Gpersonexpeditionnamelookup = { }
|
||||
@@ -132,33 +127,20 @@ def GetPersonExpeditionNameLookup(expedition):
|
||||
res = { }
|
||||
duplicates = set()
|
||||
|
||||
print("Calculating GetPersonExpeditionNameLookup for " + expedition.year)
|
||||
print "Calculating GetPersonExpeditionNameLookup for", expedition.year
|
||||
personexpeditions = models.PersonExpedition.objects.filter(expedition=expedition)
|
||||
htmlparser = HTMLParser()
|
||||
for personexpedition in personexpeditions:
|
||||
possnames = [ ]
|
||||
f = unidecode(htmlparser.unescape(personexpedition.person.first_name.lower()))
|
||||
l = unidecode(htmlparser.unescape(personexpedition.person.last_name.lower()))
|
||||
full = unidecode(htmlparser.unescape(personexpedition.person.fullname.lower()))
|
||||
f = personexpedition.person.first_name.lower()
|
||||
l = personexpedition.person.last_name.lower()
|
||||
if l:
|
||||
possnames.append(f + " " + l)
|
||||
possnames.append(f + " " + l[0])
|
||||
possnames.append(f + l[0])
|
||||
possnames.append(f[0] + " " + l)
|
||||
possnames.append(f)
|
||||
if full not in possnames:
|
||||
possnames.append(full)
|
||||
if personexpedition.nickname not in possnames:
|
||||
if personexpedition.nickname:
|
||||
possnames.append(personexpedition.nickname.lower())
|
||||
if l:
|
||||
# This allows for nickname to be used for short name eg Phil
|
||||
# adding Phil Sargent to the list
|
||||
if str(personexpedition.nickname.lower() + " " + l) not in possnames:
|
||||
possnames.append(personexpedition.nickname.lower() + " " + l)
|
||||
if str(personexpedition.nickname.lower() + " " + l[0]) not in possnames:
|
||||
possnames.append(personexpedition.nickname.lower() + " " + l[0])
|
||||
if str(personexpedition.nickname.lower() + l[0]) not in possnames:
|
||||
possnames.append(personexpedition.nickname.lower() + l[0])
|
||||
|
||||
for possname in possnames:
|
||||
if possname in res:
|
||||
|
||||
@@ -5,26 +5,20 @@ import troggle.settings as settings
|
||||
from subprocess import call, Popen, PIPE
|
||||
|
||||
from troggle.parsers.people import GetPersonExpeditionNameLookup
|
||||
from django.utils.timezone import get_current_timezone
|
||||
from django.utils.timezone import make_aware
|
||||
|
||||
import re
|
||||
import os
|
||||
from datetime import datetime
|
||||
|
||||
line_leg_regex = re.compile(r"[\d\-+.]+$")
|
||||
|
||||
def LoadSurvexLineLeg(survexblock, stardata, sline, comment, cave):
|
||||
# The try catches here need replacing as they are relativly expensive
|
||||
def LoadSurvexLineLeg(survexblock, stardata, sline, comment):
|
||||
ls = sline.lower().split()
|
||||
ssfrom = survexblock.MakeSurvexStation(ls[stardata["from"]])
|
||||
ssto = survexblock.MakeSurvexStation(ls[stardata["to"]])
|
||||
|
||||
|
||||
survexleg = models.SurvexLeg(block=survexblock, stationfrom=ssfrom, stationto=ssto)
|
||||
if stardata["type"] == "normal":
|
||||
try:
|
||||
survexleg.tape = float(ls[stardata["tape"]])
|
||||
except ValueError:
|
||||
except ValueError:
|
||||
print("Tape misread in", survexblock.survexfile.path)
|
||||
print("Stardata:", stardata)
|
||||
print("Line:", ls)
|
||||
@@ -59,17 +53,14 @@ def LoadSurvexLineLeg(survexblock, stardata, sline, comment, cave):
|
||||
survexleg.compass = 1000
|
||||
survexleg.clino = -90.0
|
||||
else:
|
||||
assert line_leg_regex.match(lcompass), ls
|
||||
assert line_leg_regex.match(lclino) and lclino != "-", ls
|
||||
assert re.match(r"[\d\-+.]+$", lcompass), ls
|
||||
assert re.match(r"[\d\-+.]+$", lclino) and lclino != "-", ls
|
||||
survexleg.compass = float(lcompass)
|
||||
survexleg.clino = float(lclino)
|
||||
|
||||
if cave:
|
||||
survexleg.cave = cave
|
||||
|
||||
|
||||
# only save proper legs
|
||||
survexleg.save()
|
||||
|
||||
|
||||
itape = stardata.get("tape")
|
||||
if itape:
|
||||
try:
|
||||
@@ -89,212 +80,96 @@ def LoadSurvexEquate(survexblock, sline):
|
||||
|
||||
def LoadSurvexLinePassage(survexblock, stardata, sline, comment):
|
||||
pass
|
||||
|
||||
|
||||
stardatadefault = {"type":"normal", "t":"leg", "from":0, "to":1, "tape":2, "compass":3, "clino":4}
|
||||
stardataparamconvert = {"length":"tape", "bearing":"compass", "gradient":"clino"}
|
||||
|
||||
regex_comment = re.compile(r"([^;]*?)\s*(?:;\s*(.*))?\n?$")
|
||||
regex_ref = re.compile(r'.*?ref.*?(\d+)\s*#\s*(\d+)')
|
||||
regex_star = re.compile(r'\s*\*[\s,]*(\w+)\s*(.*?)\s*(?:;.*)?$')
|
||||
regex_team = re.compile(r"(Insts|Notes|Tape|Dog|Useless|Pics|Helper|Disto|Consultant)\s+(.*)$(?i)")
|
||||
regex_team_member = re.compile(r" and | / |, | & | \+ |^both$|^none$(?i)")
|
||||
regex_qm = re.compile(r'^\s*QM(\d)\s+?([a-dA-DxX])\s+([\w\-]+)\.(\d+)\s+(([\w\-]+)\.(\d+)|\-)\s+(.+)$')
|
||||
|
||||
def RecursiveLoad(survexblock, survexfile, fin, textlines):
|
||||
iblankbegins = 0
|
||||
text = [ ]
|
||||
stardata = stardatadefault
|
||||
teammembers = [ ]
|
||||
|
||||
# uncomment to print out all files during parsing
|
||||
print(" - Reading file: " + survexblock.survexfile.path)
|
||||
stamp = datetime.now()
|
||||
lineno = 0
|
||||
|
||||
# Try to find the cave in the DB if not use the string as before
|
||||
path_match = re.search(r"caves-(\d\d\d\d)/(\d+|\d\d\d\d-?\w+-\d+)/", survexblock.survexfile.path)
|
||||
if path_match:
|
||||
pos_cave = '%s-%s' % (path_match.group(1), path_match.group(2))
|
||||
# print('Match')
|
||||
# print(pos_cave)
|
||||
cave = models.getCaveByReference(pos_cave)
|
||||
if cave:
|
||||
survexfile.cave = cave
|
||||
svxlines = ''
|
||||
svxlines = fin.read().splitlines()
|
||||
# print('Cave - preloop ' + str(survexfile.cave))
|
||||
# print(survexblock)
|
||||
for svxline in svxlines:
|
||||
|
||||
# print(survexblock)
|
||||
|
||||
# print(svxline)
|
||||
# if not svxline:
|
||||
# print(' - Not survex')
|
||||
# return
|
||||
# textlines.append(svxline)
|
||||
|
||||
lineno += 1
|
||||
|
||||
# print(' - Line: %d' % lineno)
|
||||
|
||||
|
||||
# uncomment to print out all files during parsing
|
||||
print("Reading file:", survexblock.survexfile.path)
|
||||
while True:
|
||||
svxline = fin.readline().decode("latin1")
|
||||
if not svxline:
|
||||
return
|
||||
textlines.append(svxline)
|
||||
|
||||
# break the line at the comment
|
||||
sline, comment = regex_comment.match(svxline.strip()).groups()
|
||||
sline, comment = re.match(r"([^;]*?)\s*(?:;\s*(.*))?\n?$", svxline.strip()).groups()
|
||||
|
||||
# detect ref line pointing to the scans directory
|
||||
mref = comment and regex_ref.match(comment)
|
||||
mref = comment and re.match(r'.*?ref.*?(\d+)\s*#\s*(\d+)', comment)
|
||||
if mref:
|
||||
refscan = "%s#%s" % (mref.group(1), mref.group(2))
|
||||
survexscansfolders = models.SurvexScansFolder.objects.filter(walletname=refscan)
|
||||
if survexscansfolders:
|
||||
survexblock.survexscansfolder = survexscansfolders[0]
|
||||
#survexblock.refscandir = "%s/%s%%23%s" % (mref.group(1), mref.group(1), mref.group(2))
|
||||
survexblock.save()
|
||||
survexblock.save()
|
||||
continue
|
||||
|
||||
# This whole section should be moved if we can have *QM become a proper survex command
|
||||
# Spec of QM in SVX files, currently commented out need to add to survex
|
||||
# needs to match regex_qm
|
||||
# ;Serial number grade(A/B/C/D/X) nearest-station resolution-station description
|
||||
# ;QM1 a hobnob_hallway_2.42 hobnob-hallway_3.42 junction of keyhole passage
|
||||
# ;QM1 a hobnob_hallway_2.42 - junction of keyhole passage
|
||||
qmline = comment and regex_qm.match(comment)
|
||||
if qmline:
|
||||
print(qmline.groups())
|
||||
#(u'1', u'B', u'miraclemaze', u'1.17', u'-', None, u'\tcontinuation of rift')
|
||||
qm_no = qmline.group(1)
|
||||
qm_grade = qmline.group(2)
|
||||
qm_from_section = qmline.group(3)
|
||||
qm_from_station = qmline.group(4)
|
||||
qm_resolve_section = qmline.group(6)
|
||||
qm_resolve_station = qmline.group(7)
|
||||
qm_notes = qmline.group(8)
|
||||
|
||||
print('Cave - %s' % survexfile.cave)
|
||||
print('QM no %d' % int(qm_no))
|
||||
print('QM grade %s' % qm_grade)
|
||||
print('QM section %s' % qm_from_section)
|
||||
print('QM station %s' % qm_from_station)
|
||||
print('QM res section %s' % qm_resolve_section)
|
||||
print('QM res station %s' % qm_resolve_station)
|
||||
print('QM notes %s' % qm_notes)
|
||||
|
||||
# If the QM isn't resolved (has a resolving station) thn load it
|
||||
if not qm_resolve_section or qm_resolve_section is not '-' or qm_resolve_section is not 'None':
|
||||
from_section = models.SurvexBlock.objects.filter(name=qm_from_section)
|
||||
# If we can find a section (survex note chunck, named)
|
||||
if len(from_section) > 0:
|
||||
print(from_section[0])
|
||||
from_station = models.SurvexStation.objects.filter(block=from_section[0], name=qm_from_station)
|
||||
# If we can find a from station then we have the nearest station and can import it
|
||||
if len(from_station) > 0:
|
||||
print(from_station[0])
|
||||
qm = models.QM.objects.create(number=qm_no,
|
||||
nearest_station=from_station[0],
|
||||
grade=qm_grade.upper(),
|
||||
location_description=qm_notes)
|
||||
else:
|
||||
print('QM found but resolved')
|
||||
|
||||
#print('Cave -sline ' + str(cave))
|
||||
|
||||
if not sline:
|
||||
continue
|
||||
|
||||
|
||||
# detect the star command
|
||||
mstar = regex_star.match(sline)
|
||||
mstar = re.match(r'\s*\*[\s,]*(\w+)\s*(.*?)\s*(?:;.*)?$', sline)
|
||||
if not mstar:
|
||||
if "from" in stardata:
|
||||
# print('Cave ' + str(survexfile.cave))
|
||||
# print(survexblock)
|
||||
LoadSurvexLineLeg(survexblock, stardata, sline, comment, survexfile.cave)
|
||||
# print(' - From: ')
|
||||
#print(stardata)
|
||||
pass
|
||||
LoadSurvexLineLeg(survexblock, stardata, sline, comment)
|
||||
elif stardata["type"] == "passage":
|
||||
LoadSurvexLinePassage(survexblock, stardata, sline, comment)
|
||||
# print(' - Passage: ')
|
||||
#Missing "station" in stardata.
|
||||
continue
|
||||
|
||||
|
||||
# detect the star command
|
||||
cmd, line = mstar.groups()
|
||||
cmd = cmd.lower()
|
||||
if re.match("include$(?i)", cmd):
|
||||
includepath = os.path.join(os.path.split(survexfile.path)[0], re.sub(r"\.svx$", "", line))
|
||||
print(' - Include file found including - ' + includepath)
|
||||
# Try to find the cave in the DB if not use the string as before
|
||||
path_match = re.search(r"caves-(\d\d\d\d)/(\d+|\d\d\d\d-?\w+-\d+)/", includepath)
|
||||
if path_match:
|
||||
pos_cave = '%s-%s' % (path_match.group(1), path_match.group(2))
|
||||
# print(pos_cave)
|
||||
cave = models.getCaveByReference(pos_cave)
|
||||
if cave:
|
||||
survexfile.cave = cave
|
||||
else:
|
||||
print('No match for %s' % includepath)
|
||||
includesurvexfile = models.SurvexFile(path=includepath)
|
||||
includesurvexfile = models.SurvexFile(path=includepath, cave=survexfile.cave)
|
||||
includesurvexfile.save()
|
||||
includesurvexfile.SetDirectory()
|
||||
if includesurvexfile.exists():
|
||||
survexblock.save()
|
||||
fininclude = includesurvexfile.OpenFile()
|
||||
RecursiveLoad(survexblock, includesurvexfile, fininclude, textlines)
|
||||
|
||||
|
||||
elif re.match("begin$(?i)", cmd):
|
||||
if line:
|
||||
newsvxpath = os.path.join(os.path.split(survexfile.path)[0], re.sub(r"\.svx$", "", line))
|
||||
# Try to find the cave in the DB if not use the string as before
|
||||
path_match = re.search(r"caves-(\d\d\d\d)/(\d+|\d\d\d\d-?\w+-\d+)/", newsvxpath)
|
||||
if path_match:
|
||||
pos_cave = '%s-%s' % (path_match.group(1), path_match.group(2))
|
||||
print(pos_cave)
|
||||
cave = models.getCaveByReference(pos_cave)
|
||||
if cave:
|
||||
survexfile.cave = cave
|
||||
else:
|
||||
print('No match for %s' % newsvxpath)
|
||||
|
||||
if line:
|
||||
name = line.lower()
|
||||
print(' - Begin found for: ' + name)
|
||||
# print('Block cave: ' + str(survexfile.cave))
|
||||
survexblockdown = models.SurvexBlock(name=name, begin_char=fin.tell(), parent=survexblock, survexpath=survexblock.survexpath+"."+name, cave=survexfile.cave, survexfile=survexfile, totalleglength=0.0)
|
||||
survexblockdown = models.SurvexBlock(name=name, begin_char=fin.tell(), parent=survexblock, survexpath=survexblock.survexpath+"."+name, cave=survexblock.cave, survexfile=survexfile, totalleglength=0.0)
|
||||
survexblockdown.save()
|
||||
survexblock.save()
|
||||
survexblock = survexblockdown
|
||||
# print(survexblockdown)
|
||||
textlinesdown = [ ]
|
||||
RecursiveLoad(survexblockdown, survexfile, fin, textlinesdown)
|
||||
else:
|
||||
iblankbegins += 1
|
||||
|
||||
|
||||
elif re.match("end$(?i)", cmd):
|
||||
if iblankbegins:
|
||||
iblankbegins -= 1
|
||||
else:
|
||||
survexblock.text = "".join(textlines)
|
||||
survexblock.save()
|
||||
# print(' - End found: ')
|
||||
endstamp = datetime.now()
|
||||
timetaken = endstamp - stamp
|
||||
# print(' - Time to process: ' + str(timetaken))
|
||||
return
|
||||
|
||||
|
||||
elif re.match("date$(?i)", cmd):
|
||||
if len(line) == 10:
|
||||
#print(' - Date found: ' + line)
|
||||
survexblock.date = make_aware(datetime.strptime(re.sub(r"\.", "-", line), '%Y-%m-%d'), get_current_timezone())
|
||||
survexblock.date = re.sub(r"\.", "-", line)
|
||||
expeditions = models.Expedition.objects.filter(year=line[:4])
|
||||
if expeditions:
|
||||
assert len(expeditions) == 1
|
||||
survexblock.expedition = expeditions[0]
|
||||
survexblock.expeditionday = survexblock.expedition.get_expedition_day(survexblock.date)
|
||||
survexblock.save()
|
||||
|
||||
|
||||
elif re.match("team$(?i)", cmd):
|
||||
pass
|
||||
# print(' - Team found: ')
|
||||
mteammember = regex_team.match(line)
|
||||
mteammember = re.match(r"(Insts|Notes|Tape|Dog|Useless|Pics|Helper|Disto|Consultant)\s+(.*)$(?i)", line)
|
||||
if mteammember:
|
||||
for tm in regex_team_member.split(mteammember.group(2)):
|
||||
for tm in re.split(r" and | / |, | & | \+ |^both$|^none$(?i)", mteammember.group(2)):
|
||||
if tm:
|
||||
personexpedition = survexblock.expedition and GetPersonExpeditionNameLookup(survexblock.expedition).get(tm.lower())
|
||||
if (personexpedition, tm) not in teammembers:
|
||||
@@ -304,23 +179,18 @@ def RecursiveLoad(survexblock, survexfile, fin, textlines):
|
||||
if personexpedition:
|
||||
personrole.person=personexpedition.person
|
||||
personrole.save()
|
||||
|
||||
|
||||
elif cmd == "title":
|
||||
#print(' - Title found: ')
|
||||
survextitle = models.SurvexTitle(survexblock=survexblock, title=line.strip('"'), cave=survexfile.cave)
|
||||
survextitle = models.SurvexTitle(survexblock=survexblock, title=line.strip('"'), cave=survexblock.cave)
|
||||
survextitle.save()
|
||||
pass
|
||||
|
||||
|
||||
elif cmd == "require":
|
||||
# should we check survex version available for processing?
|
||||
pass
|
||||
|
||||
elif cmd == "data":
|
||||
#print(' - Data found: ')
|
||||
ls = line.lower().split()
|
||||
stardata = { "type":ls[0] }
|
||||
#print(' - Star data: ', stardata)
|
||||
#print(ls)
|
||||
for i in range(0, len(ls)):
|
||||
stardata[stardataparamconvert.get(ls[i], ls[i])] = i - 1
|
||||
if ls[0] in ["normal", "cartesian", "nosurvey"]:
|
||||
@@ -329,23 +199,40 @@ def RecursiveLoad(survexblock, survexfile, fin, textlines):
|
||||
stardata = stardatadefault
|
||||
else:
|
||||
assert ls[0] == "passage", line
|
||||
|
||||
|
||||
elif cmd == "equate":
|
||||
#print(' - Equate found: ')
|
||||
LoadSurvexEquate(survexblock, line)
|
||||
|
||||
elif cmd == "fix":
|
||||
#print(' - Fix found: ')
|
||||
survexblock.MakeSurvexStation(line.split()[0])
|
||||
|
||||
else:
|
||||
#print(' - Stuff')
|
||||
if cmd not in ["sd", "include", "units", "entrance", "data", "flags", "title", "export", "instrument",
|
||||
"calibrate", "set", "infer", "alias", "ref", "cs", "declination", "case"]:
|
||||
print("Unrecognised command in line:", cmd, line, survexblock, survexblock.survexfile.path)
|
||||
endstamp = datetime.now()
|
||||
timetaken = endstamp - stamp
|
||||
# print(' - Time to process: ' + str(timetaken))
|
||||
|
||||
|
||||
def ReloadSurvexCave(survex_cave, area):
|
||||
print(survex_cave, area)
|
||||
cave = models.Cave.objects.get(kataster_number=survex_cave, area__short_name=area)
|
||||
print(cave)
|
||||
#cave = models.Cave.objects.get(kataster_number=survex_cave)
|
||||
cave.survexblock_set.all().delete()
|
||||
cave.survexfile_set.all().delete()
|
||||
cave.survexdirectory_set.all().delete()
|
||||
|
||||
survexfile = models.SurvexFile(path="caves-" + cave.kat_area() + "/" + survex_cave + "/" + survex_cave, cave=cave)
|
||||
survexfile.save()
|
||||
survexfile.SetDirectory()
|
||||
|
||||
survexblockroot = models.SurvexBlock(name="root", survexpath="caves-" + cave.kat_area(), begin_char=0, cave=cave, survexfile=survexfile, totalleglength=0.0)
|
||||
survexblockroot.save()
|
||||
fin = survexfile.OpenFile()
|
||||
textlines = [ ]
|
||||
RecursiveLoad(survexblockroot, survexfile, fin, textlines)
|
||||
survexblockroot.text = "".join(textlines)
|
||||
survexblockroot.save()
|
||||
|
||||
|
||||
def LoadAllSurvexBlocks():
|
||||
|
||||
@@ -362,7 +249,7 @@ def LoadAllSurvexBlocks():
|
||||
|
||||
print(" - Data flushed")
|
||||
|
||||
survexfile = models.SurvexFile(path=settings.SURVEX_TOPNAME, cave=None)
|
||||
survexfile = models.SurvexFile(path="all", cave=None)
|
||||
survexfile.save()
|
||||
survexfile.SetDirectory()
|
||||
|
||||
@@ -371,13 +258,22 @@ def LoadAllSurvexBlocks():
|
||||
survexblockroot.save()
|
||||
fin = survexfile.OpenFile()
|
||||
textlines = [ ]
|
||||
# The real work starts here
|
||||
RecursiveLoad(survexblockroot, survexfile, fin, textlines)
|
||||
fin.close()
|
||||
survexblockroot.text = "".join(textlines)
|
||||
survexblockroot.save()
|
||||
|
||||
|
||||
#Load each cave,
|
||||
#FIXME this should be dealt with load all above
|
||||
print(" - Reloading all caves")
|
||||
caves = models.Cave.objects.all()
|
||||
for cave in caves:
|
||||
if cave.kataster_number and os.path.isdir(os.path.join(settings.SURVEX_DATA, "caves-" + cave.kat_area(), cave.kataster_number)):
|
||||
if cave.kataster_number not in ['40']:
|
||||
print("loading", cave, cave.kat_area())
|
||||
ReloadSurvexCave(cave.kataster_number, cave.kat_area())
|
||||
|
||||
|
||||
poslineregex = re.compile(r"^\(\s*([+-]?\d*\.\d*),\s*([+-]?\d*\.\d*),\s*([+-]?\d*\.\d*)\s*\)\s*([^\s]+)$")
|
||||
|
||||
|
||||
@@ -385,12 +281,12 @@ def LoadPos():
|
||||
|
||||
print('Loading Pos....')
|
||||
|
||||
call([settings.CAVERN, "--output=%s%s.3d" % (settings.SURVEX_DATA, settings.SURVEX_TOPNAME), "%s%s.svx" % (settings.SURVEX_DATA, settings.SURVEX_TOPNAME)])
|
||||
call([settings.THREEDTOPOS, '%s%s.3d' % (settings.SURVEX_DATA, settings.SURVEX_TOPNAME)], cwd = settings.SURVEX_DATA)
|
||||
posfile = open("%s%s.pos" % (settings.SURVEX_DATA, settings.SURVEX_TOPNAME))
|
||||
call([settings.CAVERN, "--output=%s/all.3d" % settings.SURVEX_DATA, "%s/all.svx" % settings.SURVEX_DATA])
|
||||
call([settings.THREEDTOPOS, '%sall.3d' % settings.SURVEX_DATA], cwd = settings.SURVEX_DATA)
|
||||
posfile = open("%sall.pos" % settings.SURVEX_DATA)
|
||||
posfile.readline() #Drop header
|
||||
for line in posfile.readlines():
|
||||
r = poslineregex.match(line)
|
||||
r = poslineregex.match(line)
|
||||
if r:
|
||||
x, y, z, name = r.groups()
|
||||
try:
|
||||
|
||||
@@ -99,7 +99,7 @@ def parseSurveyScans(expedition, logfile=None):
|
||||
#scanList = listdir(expedition.year, surveyFolder)
|
||||
scanList=os.listdir(os.path.join(yearPath,surveyFolder))
|
||||
except AttributeError:
|
||||
print("Folder: " + surveyFolder + " ignored\r")
|
||||
print(surveyFolder + " ignored\r",)
|
||||
continue
|
||||
|
||||
for scan in scanList:
|
||||
@@ -107,7 +107,7 @@ def parseSurveyScans(expedition, logfile=None):
|
||||
scanChopped=re.match(r'(?i).*(notes|elev|plan|elevation|extend)(\d*)\.(png|jpg|jpeg)',scan).groups()
|
||||
scanType,scanNumber,scanFormat=scanChopped
|
||||
except AttributeError:
|
||||
print("File: " + scan + " ignored\r")
|
||||
print(scan + " ignored\r",)
|
||||
continue
|
||||
if scanType == 'elev' or scanType == 'extend':
|
||||
scanType = 'elevation'
|
||||
@@ -174,6 +174,9 @@ def GetListDir(sdir):
|
||||
ff = os.path.join(sdir, f)
|
||||
res.append((f, ff, os.path.isdir(ff)))
|
||||
return res
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
def LoadListScansFile(survexscansfolder):
|
||||
|
||||
46
settings.py
46
settings.py
@@ -10,7 +10,7 @@ BASE_DIR = os.path.dirname(os.path.dirname(__file__))
|
||||
DEBUG = True
|
||||
TEMPLATE_DEBUG = DEBUG
|
||||
|
||||
ALLOWED_HOSTS = [u'expo.survex.com']
|
||||
ALLOWED_HOSTS = []
|
||||
|
||||
ADMINS = (
|
||||
# ('Your Name', 'your_email@domain.com'),
|
||||
@@ -45,52 +45,14 @@ NOTABLECAVESHREFS = [ "161", "204", "258", "76", "107", "264" ]
|
||||
# Examples: "http://foo.com/media/", "/media/".
|
||||
ADMIN_MEDIA_PREFIX = '/troggle/media-admin/'
|
||||
PHOTOS_ROOT = os.path.join(EXPOWEB, 'photos')
|
||||
CAVEDESCRIPTIONS = os.path.join(EXPOWEB, "cave_data")
|
||||
ENTRANCEDESCRIPTIONS = os.path.join(EXPOWEB, "entrance_data")
|
||||
CAVEDESCRIPTIONS = os.path.join(EXPOWEB, "noinfo", "cave_data")
|
||||
ENTRANCEDESCRIPTIONS = os.path.join(EXPOWEB, "noinfo", "entrance_data")
|
||||
|
||||
MEDIA_URL = urlparse.urljoin(URL_ROOT , '/site_media/')
|
||||
SURVEYS_URL = urlparse.urljoin(URL_ROOT , '/survey_scans/')
|
||||
PHOTOS_URL = urlparse.urljoin(URL_ROOT , '/photos/')
|
||||
SVX_URL = urlparse.urljoin(URL_ROOT , '/survex/')
|
||||
|
||||
# top-level survex file basename (without .svx)
|
||||
SURVEX_TOPNAME = "1623"
|
||||
|
||||
DEFAULT_LOGBOOK_PARSER = "Parseloghtmltxt"
|
||||
DEFAULT_LOGBOOK_FILE = "logbook.html"
|
||||
|
||||
LOGBOOK_PARSER_SETTINGS = {
|
||||
"2019": ("2019/logbook.html", "Parseloghtmltxt"),
|
||||
"2018": ("2018/logbook.html", "Parseloghtmltxt"),
|
||||
"2017": ("2017/logbook.html", "Parseloghtmltxt"),
|
||||
"2016": ("2016/logbook.html", "Parseloghtmltxt"),
|
||||
"2015": ("2015/logbook.html", "Parseloghtmltxt"),
|
||||
"2014": ("2014/logbook.html", "Parseloghtmltxt"),
|
||||
"2013": ("2013/logbook.html", "Parseloghtmltxt"),
|
||||
"2012": ("2012/logbook.html", "Parseloghtmltxt"),
|
||||
"2011": ("2011/logbook.html", "Parseloghtmltxt"),
|
||||
"2010": ("2010/logbook.html", "Parselogwikitxt"),
|
||||
"2009": ("2009/2009logbook.txt", "Parselogwikitxt"),
|
||||
"2008": ("2008/2008logbook.txt", "Parselogwikitxt"),
|
||||
"2007": ("2007/logbook.html", "Parseloghtmltxt"),
|
||||
"2006": ("2006/logbook/logbook_06.txt", "Parselogwikitxt"),
|
||||
"2005": ("2005/logbook.html", "Parseloghtmltxt"),
|
||||
"2004": ("2004/logbook.html", "Parseloghtmltxt"),
|
||||
"2003": ("2003/logbook.html", "Parseloghtml03"),
|
||||
"2002": ("2002/logbook.html", "Parseloghtmltxt"),
|
||||
"2001": ("2001/log.htm", "Parseloghtml01"),
|
||||
"2000": ("2000/log.htm", "Parseloghtml01"),
|
||||
"1999": ("1999/log.htm", "Parseloghtml01"),
|
||||
"1998": ("1998/log.htm", "Parseloghtml01"),
|
||||
"1997": ("1997/log.htm", "Parseloghtml01"),
|
||||
"1996": ("1996/log.htm", "Parseloghtml01"),
|
||||
"1995": ("1995/log.htm", "Parseloghtml01"),
|
||||
"1994": ("1994/log.htm", "Parseloghtml01"),
|
||||
"1993": ("1993/log.htm", "Parseloghtml01"),
|
||||
"1992": ("1992/log.htm", "Parseloghtml01"),
|
||||
"1991": ("1991/log.htm", "Parseloghtml01"),
|
||||
}
|
||||
|
||||
APPEND_SLASH = False
|
||||
SMART_APPEND_SLASH = True
|
||||
|
||||
@@ -130,7 +92,7 @@ INSTALLED_APPS = (
|
||||
'troggle.profiles',
|
||||
'troggle.core',
|
||||
'troggle.flatpages',
|
||||
'troggle.imagekit',
|
||||
'imagekit',
|
||||
)
|
||||
|
||||
MIDDLEWARE_CLASSES = (
|
||||
|
||||
@@ -1,12 +1,12 @@
|
||||
<!DOCTYPE html>
|
||||
<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01//EN">
|
||||
<html lang="en">
|
||||
<head>
|
||||
<meta http-equiv="Content-Type" content="text/html; charset=utf-8" />
|
||||
<meta http-equiv="Content-Type" content="text/html; charset=ISO-8859-1"/>
|
||||
<link rel="stylesheet" type="text/css" href="{{ settings.MEDIA_URL }}css/main3.css" title="eyeCandy"/>
|
||||
<link rel="alternate stylesheet" type="text/css" href="{{ settings.MEDIA_URL }}css/mainplain.css" title="plain"/>
|
||||
<link rel="stylesheet" type="text/css" href="{{ settings.MEDIA_URL }}css/dropdownNavStyle.css" />
|
||||
<title>{% block title %}Troggle{% endblock %}</title>
|
||||
<!-- <script src="{{ settings.JSLIB_URL }}jquery/jquery.min.js" type="text/javascript"></script> -->
|
||||
<script src="{{ settings.JSLIB_URL }}jquery/jquery.min.js" type="text/javascript"></script>
|
||||
<script src="{{ settings.MEDIA_URL }}js/jquery.quicksearch.js" type="text/javascript"></script>
|
||||
<script src="{{ settings.MEDIA_URL }}js/base.js" type="text/javascript"></script>
|
||||
<script src="{{ settings.MEDIA_URL }}js/jquery.dropdownPlain.js" type="text/javascript"></script>
|
||||
@@ -16,7 +16,7 @@
|
||||
<body onLoad="contentHeight();">
|
||||
|
||||
<div id="header">
|
||||
<h1>CUCC Expeditions to Austria: 1976 - 2020</h1>
|
||||
<h1>CUCC Expeditions to Austria: 1976 - 2018</h1>
|
||||
<div id="editLinks"> {% block loginInfo %}
|
||||
<a href="{{settings.EXPOWEB_URL}}">Website home</a> |
|
||||
{% if user.username %}
|
||||
@@ -43,8 +43,6 @@
|
||||
<a href="{% url "expedition" 2016 %}">Expo2016</a> |
|
||||
<a href="{% url "expedition" 2017 %}">Expo2017</a> |
|
||||
<a href="{% url "expedition" 2018 %}">Expo2018</a> |
|
||||
<a href="{% url "expedition" 2019 %}">Expo2019</a> |
|
||||
<a href="{% url "expedition" 2020 %}">Expo2020</a> |
|
||||
|
||||
<a href="/admin/">Django admin</a>
|
||||
</div>
|
||||
@@ -83,7 +81,7 @@
|
||||
|
||||
<li><a href="#">External links</a>
|
||||
<ul class="sub_menu">
|
||||
<li><a id="cuccLink" href="https://camcaving.uk">CUCC website</a></li>
|
||||
<li><a id="cuccLink" href="http://www.srcf.ucam.org/caving/wiki/Main_Page">CUCC website</a></li>
|
||||
<li><a id="expoWebsiteLink" href="http://expo.survex.com">Expedition website</a></li>
|
||||
</ul>
|
||||
</li>
|
||||
|
||||
@@ -408,8 +408,8 @@ div#scene {
|
||||
|
||||
</style>
|
||||
|
||||
<script type="text/javascript" src="/javascript/CaveView/js/CaveView.js" ></script>
|
||||
<script type="text/javascript" src="/javascript/CaveView/lib/proj4.js" ></script>
|
||||
<script type="text/javascript" src="/CaveView/js/CaveView.js" ></script>
|
||||
<script type="text/javascript" src="/CaveView/lib/proj4.js" ></script>
|
||||
|
||||
|
||||
<script type="text/javascript" >
|
||||
|
||||
@@ -11,7 +11,7 @@
|
||||
<h3>Notable caves</h3>
|
||||
<ul>
|
||||
{% for cave in notablecaves %}
|
||||
<li> <a href="{{ cave.url }}">{% if cave.kataster_number %}{{ cave.kataster_number }}{% else %}{{cave.unofficial_number }}{% endif %} {{cave.official_name|safe}}</a> </li>
|
||||
<li> <a href="{{ cave.url }}">{% if cave.kataster_number %}{{ cave.kataster_number }}{% else %}{{cave.unofficial_number }}{%endif %} {{cave.official_name|safe}}</a> </li>
|
||||
{% endfor %}
|
||||
</ul>
|
||||
|
||||
@@ -20,7 +20,7 @@
|
||||
<table class="searchable">
|
||||
{% for cave in caves1623 %}
|
||||
|
||||
<tr><td> <a href="{{ cave.url }}">{% if cave.kataster_number %}{{ cave.kataster_number }} {{cave.official_name|safe}}</a> {% if cave.unofficial_number %}({{cave.unofficial_number }}){% endif %}{% else %}{{cave.unofficial_number }} {{cave.official_name|safe}}</a> {% endif %}</td></tr>
|
||||
<tr><td> <a href="{{ cave.url }}">{% if cave.kataster_number %}{{ cave.kataster_number }}{% else %}{{cave.unofficial_number }}{%endif %} {{cave.official_name|safe}}</a> </td></tr>
|
||||
|
||||
{% endfor %}
|
||||
</table>
|
||||
@@ -30,8 +30,7 @@
|
||||
<ul class="searchable">
|
||||
{% for cave in caves1626 %}
|
||||
|
||||
<li> <a href="{{ cave.url }}">{% if cave.kataster_number %}{{ cave.kataster_number }} {{cave.official_name|safe}}</a> {% if cave.unofficial_number %}({{cave.unofficial_number }}){% endif %}{% else %}{{cave.unofficial_number }} {{cave.official_name|safe}}</a> {% endif %}
|
||||
</li>
|
||||
<li> <a href="{{ cave.url }}">{% if cave.kataster_number %}{{ cave.kataster_number }}{% else %}{{cave.unofficial_number }}{%endif %} {{cave.official_name|safe}}</a> </li>
|
||||
|
||||
{% endfor %}
|
||||
</ul>
|
||||
|
||||
@@ -23,45 +23,25 @@
|
||||
|
||||
|
||||
<form name="reset" method="post" action="">
|
||||
<h3>Wipe:</h3>
|
||||
<table>
|
||||
<tr>
|
||||
<td>Wipe entire database and recreate tables: </td>
|
||||
<td><input type="checkbox" name="reload_db" /></td>
|
||||
<td>
|
||||
<input type="submit" id="Import" value="I really want to delete all information in troggle, and accept all responsibility.">
|
||||
</td>
|
||||
</tr>
|
||||
</table>
|
||||
<h3>Wipe:</h3>
|
||||
|
||||
<table>
|
||||
<tr><td>Wipe entire database and recreate tables: </td><td><input type="checkbox" name="reload_db" /></td><td> <input type="submit" id="Import" value="I really want to delete all information in troggle, and accept all responsibility."></td></tr>
|
||||
</table>
|
||||
</form>
|
||||
<h3>Import (non-destructive):</h3>
|
||||
<form name="import" method="post" action="">
|
||||
<table>
|
||||
<tr>
|
||||
<td>people from folk.csv using parsers\people.py</td>
|
||||
<td><input type="checkbox" name="import_people"/></td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>caves from cavetab2.csv using parsers\cavetab.py</td>
|
||||
<td> <input type="checkbox" class="parser" name="import_cavetab"/></td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>logbook entries using parsers\logbooks.py</td>
|
||||
<td><input type="checkbox" name="import_logbooks"/></td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>QMs using parsers\QMs.py</td>
|
||||
<td><input type="checkbox" name="import_QMs" /></td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>survey scans using parsers\surveys.py</td>
|
||||
<td><input type="checkbox" name="import_surveys" /></td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>survex data using parsers\survex.py</td>
|
||||
<td><input type="checkbox" name="import_survex" /></td>
|
||||
</tr>
|
||||
</table>
|
||||
<table>
|
||||
<tr><td>people from folk.csv using parsers\people.py</td><td><input type="checkbox" name="import_people"/></td></tr>
|
||||
<tr><td>caves from cavetab2.csv using parsers\cavetab.py</td><td> <input type="checkbox" class="parser" name="import_cavetab"/></td></tr>
|
||||
<tr><td>logbook entries using parsers\logbooks.py</td><td><input type="checkbox" name="import_logbooks"/></td></tr>
|
||||
<tr><td>QMs using parsers\QMs.py</td><td><input type="checkbox" name="import_QMs" /></td></tr>
|
||||
<tr><td>survey scans using parsers\surveys.py</td><td><input type="checkbox" name="import_surveys" /></td></tr>
|
||||
<tr><td>survex data using parsers\survex.py</td><td><input type="checkbox" name="import_survex" /></td></tr>
|
||||
|
||||
</table>
|
||||
|
||||
|
||||
<p>
|
||||
<input type="submit" id="Import" value="Import">
|
||||
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
<!DOCTYPE html>
|
||||
<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01//EN">
|
||||
<!-- Only put one cave in this file -->
|
||||
<!-- If you edit this file, make sure you update the websites database -->
|
||||
<html lang="en">
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
<!DOCTYPE html>
|
||||
<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01//EN">
|
||||
<!-- Only put one entrance in this file -->
|
||||
<!-- If you edit this file, make sure you update the websites database -->
|
||||
<html lang="en">
|
||||
|
||||
@@ -1,5 +1,4 @@
|
||||
{% autoescape off %}
|
||||
<!DOCTYPE html>
|
||||
<html>
|
||||
<head>
|
||||
<style type="text/css">.author {text-decoration:underline}</style>
|
||||
|
||||
@@ -9,7 +9,6 @@
|
||||
<script src="{{ settings.TINY_MCE_MEDIA_URL }}tiny_mce.js" type="text/javascript"></script>
|
||||
{% endblock %}
|
||||
{% block content %}
|
||||
<h1>Edit Cave</h1>
|
||||
<form action="" method="post">{% csrf_token %}
|
||||
<table>{{ form }}{{caveAndEntranceFormSet}}</table>
|
||||
{{ versionControlForm }}
|
||||
|
||||
@@ -3,11 +3,6 @@
|
||||
{% block extrahead %}
|
||||
{% load csrffaker %}
|
||||
<script src="{{ settings.TINY_MCE_MEDIA_URL }}tiny_mce.js" type="text/javascript"></script>
|
||||
<script type="text/javascript">
|
||||
tinyMCE.init({
|
||||
mode : "textareas"
|
||||
});
|
||||
</script>
|
||||
{% endblock %}
|
||||
{% block body %}
|
||||
<h1>Edit {{ path }}</h1>
|
||||
|
||||
@@ -10,6 +10,10 @@
|
||||
|
||||
{% block content %}
|
||||
|
||||
{% if message %}
|
||||
<p>debug message: {{message}}</p>
|
||||
{% endif %}
|
||||
|
||||
<h2>{{expedition.name}}</h2>
|
||||
|
||||
<p><b>Other years:</b>
|
||||
@@ -37,13 +41,13 @@ an "S" for a survey trip. The colours are the same for people on the same trip.
|
||||
</tr>
|
||||
{% for personexpeditionday in personexpeditiondays %}
|
||||
<tr>
|
||||
<td><a href="{{ personexpeditionday.personexpedition.get_absolute_url }}">{{personexpeditionday.personexpedition.person|safe}}</a></td>
|
||||
<td><a href="{{ personexpeditionday.personexpedition.get_absolute_url }}">{{personexpeditionday.personexpedition.person}}</a></td>
|
||||
{% for persondayactivities in personexpeditionday.personrow %}
|
||||
|
||||
{% if persondayactivities.persontrips or persondayactivities.survexblocks %}
|
||||
<td class="persondayactivity">
|
||||
{% for persontrip in persondayactivities.persontrips %}
|
||||
<a href="{{persontrip.logbook_entry.get_absolute_url}}" class="dayindexlog-1">T</a>
|
||||
<a href="{{persontrip.logbook_entry.get_absolute_url}}" class="dayindexlog-{{persontrip.logbook_entry.DayIndex}}">T</a>
|
||||
{% endfor %}
|
||||
<br/>
|
||||
{% for survexblock in persondayactivities.survexblocks %}
|
||||
@@ -69,7 +73,7 @@ an "S" for a survey trip. The colours are the same for people on the same trip.
|
||||
{% regroup dateditems|dictsort:"date" by date as dates %}
|
||||
{% for date in dates %}
|
||||
<tr>
|
||||
<td>{{date.grouper|date:"D d M Y"}}</td>
|
||||
<td>{{date.grouper}}</td>
|
||||
<td>{% for item in date.list %}
|
||||
{% if item.isLogbookEntry %}<a href="{{ item.get_absolute_url }}">{{item.title|safe}}</a><br/>{% endif %}
|
||||
{% endfor %}</td>
|
||||
|
||||
@@ -16,7 +16,7 @@
|
||||
{% if entry.is_deletion %}
|
||||
{{ entry.object_repr }}
|
||||
{% else %}
|
||||
<a href="admin/{{ entry.get_admin_url }}/">{{ entry.object_repr }}</a>
|
||||
<a href="admin/{{ entry.get_admin_url }}">{{ entry.object_repr }}</a>
|
||||
{% endif %}
|
||||
<br/>
|
||||
{% if entry.content_type %}
|
||||
@@ -49,6 +49,17 @@ Here you will find information about the {{expedition.objects.count}} expedition
|
||||
If you are an expedition member, please sign up using the link to the top right and begin editing.
|
||||
</p>
|
||||
|
||||
<h3>News</h3>
|
||||
|
||||
<p class="indent">
|
||||
Everyone is gearing up for the 2009 expedition; please see the link below for the main expedition website.
|
||||
</p>
|
||||
|
||||
<h3>Troggle development</h3>
|
||||
<p class="indent">
|
||||
Troggle is still under development. Check out the <a href="http://troggle.googlecode.com">development page</a> on google code, where you can file bug reports, make suggestions, and help develop the code. There is also an old todo list at <a href="{%url "todo"%}">here</a>.
|
||||
</p>
|
||||
</div>
|
||||
{% endblock content %}
|
||||
|
||||
{% block margins %}
|
||||
|
||||
@@ -2,12 +2,12 @@
|
||||
{% load wiki_markup %}
|
||||
|
||||
{% block title %}Logbook {{logbookentry.id}}{% endblock %}
|
||||
{% block editLink %}<a href={{logbookentry.get_admin_url}}/>Edit logbook entry {{logbookentry|wiki_to_html_short}}</a>{% endblock %}
|
||||
{% block editLink %}<a href={{logbookentry.get_admin_url}}>Edit logbook entry {{logbookentry|wiki_to_html_short}}</a>{% endblock %}
|
||||
{% block content %}
|
||||
|
||||
{% block related %}{% endblock %}
|
||||
{% block nav %}{% endblock %}
|
||||
<h2>{{logbookentry.title|safe}}</h2>
|
||||
<h2>{{logbookentry.title}}</h2>
|
||||
|
||||
<div id="related">
|
||||
<p><a href="{{ logbookentry.expedition.get_absolute_url }}">{{logbookentry.expedition.name}}</a></p>
|
||||
@@ -20,10 +20,10 @@
|
||||
|
||||
<p>
|
||||
{% if logbookentry.get_previous_by_date %}
|
||||
<a href="{{ logbookentry.get_previous_by_date.get_absolute_url }}">{{logbookentry.get_previous_by_date.date|date:"D d M Y"}}</a>
|
||||
<a href="{{ logbookentry.get_previous_by_date.get_absolute_url }}">{{logbookentry.get_previous_by_date.date}}</a>
|
||||
{% endif %}
|
||||
{% if logbookentry.get_next_by_date %}
|
||||
<a href="{{ logbookentry.get_next_by_date.get_absolute_url }}">{{logbookentry.get_next_by_date.date|date:"D d M Y"}}</a>
|
||||
<a href="{{ logbookentry.get_next_by_date.get_absolute_url }}">{{logbookentry.get_next_by_date.date}}</a>
|
||||
{% endif %}
|
||||
</p>
|
||||
|
||||
@@ -47,12 +47,12 @@
|
||||
|
||||
<td>
|
||||
{% if persontrip.persontrip_prev %}
|
||||
<a href="{{ persontrip.persontrip_prev.logbook_entry.get_absolute_url }}">{{persontrip.persontrip_prev.logbook_entry.date|date:"D d M Y"}}</a>
|
||||
<a href="{{ persontrip.persontrip_prev.logbook_entry.get_absolute_url }}">{{persontrip.persontrip_prev.logbook_entry.date}}</a>
|
||||
{% endif %}
|
||||
</td>
|
||||
<td>
|
||||
{% if persontrip.persontrip_next %}
|
||||
<a href="{{ persontrip.persontrip_next.logbook_entry.get_absolute_url }}">{{persontrip.persontrip_next.logbook_entry.date|date:"D d M Y"}}</a>
|
||||
<a href="{{ persontrip.persontrip_next.logbook_entry.get_absolute_url }}">{{persontrip.persontrip_next.logbook_entry.date}}</a>
|
||||
{% endif %}
|
||||
</td>
|
||||
|
||||
@@ -65,14 +65,9 @@
|
||||
</div>
|
||||
|
||||
<div id="col1">
|
||||
<div class="logbookentry">
|
||||
<b>{{logbookentry.date|date:"D d M Y"}}</b>
|
||||
{% if logbookentry.entry_type == "html" %}
|
||||
<p>{{logbookentry.text|safe}}</p>
|
||||
{% else %}
|
||||
{{logbookentry.text|wiki_to_html}}
|
||||
{% endif %}
|
||||
</div>
|
||||
<div class="logbookentry">
|
||||
<b>{{logbookentry.date}}</b>
|
||||
{{logbookentry.text|wiki_to_html}}</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
|
||||
@@ -7,7 +7,7 @@
|
||||
|
||||
{% block content %}
|
||||
<h1>
|
||||
<a href="{{personexpedition.person.get_absolute_url}}">{{personexpedition.person|safe}}</a> :
|
||||
<a href="{{personexpedition.person.get_absolute_url}}">{{personexpedition.person}}</a> :
|
||||
<a href="{{personexpedition.expedition.get_absolute_url}}">{{personexpedition.expedition}}</a>
|
||||
</h1>
|
||||
|
||||
|
||||
@@ -8,12 +8,12 @@
|
||||
<h2>Notable expoers</h2>
|
||||
<table class="searchable">
|
||||
<tr><th>Person</th><th>First</th><th>Last</th><th>Notability</th></tr>
|
||||
{% for person in notablepersons|dictsortreversed:"notability" %}
|
||||
{% for person in notablepersons %}
|
||||
<tr>
|
||||
<td><a href="{{ person.get_absolute_url }}">{{person|wiki_to_html_short}}</a></td>
|
||||
<td><a href="{{ person.first.get_absolute_url }}">{{ person.first.expedition.year }}</a></td>
|
||||
<td><a href="{{ person.last.get_absolute_url }}">{{ person.last.expedition.year }}</a></td>
|
||||
<td>{{person.notability|floatformat:2}}</td>
|
||||
<td>{{person.notability}}</td>
|
||||
</tr>
|
||||
{% endfor %}
|
||||
</table>
|
||||
@@ -31,8 +31,8 @@
|
||||
<tr>
|
||||
<td><a href="{{ person.get_absolute_url }}">{{person|wiki_to_html_short}}</a></td>
|
||||
<td><a href="{{ person.first.get_absolute_url }}">{{person.first.expedition.year}}</a></td>
|
||||
<td><a href="{{ person.last.get_absolute_url }}">{{person.last.expedition.year}}</a></td>
|
||||
<td></td>
|
||||
<td><a href="{{ person.last.get_absolute_url }}">{{person.last.expedition.year}}</a></td>
|
||||
<td>{{ person.surveyedleglength }}</td>
|
||||
</tr>
|
||||
{% endfor %}
|
||||
</table>
|
||||
|
||||
@@ -41,7 +41,7 @@
|
||||
<td>{{survexblock.name}}</td>
|
||||
<td>
|
||||
{% if survexblock.expedition %}
|
||||
<a href="{{survexblock.expedition.get_absolute_url}}">{{survexblock.date|date:"D d M Y"}}</a>
|
||||
<a href="{{survexblock.expedition.get_absolute_url}}">{{survexblock.date}}</a>
|
||||
{% else %}
|
||||
{{survexblock.date}}
|
||||
{% endif %}
|
||||
|
||||
@@ -46,7 +46,7 @@ $(document).ready(function()
|
||||
</p>
|
||||
{% endif %}
|
||||
|
||||
<form id="codewikiform" action="" method="POST">{% csrf_token %}
|
||||
<form id="codewikiform" action="" method="POST">
|
||||
<div class="codeframebit">{{form.code}}</div>
|
||||
<div style="display:none">{{form.filename}} {{form.dirname}} {{form.datetime}} {{form.outputtype}}</div>
|
||||
<input type="submit" name="diff" value="Diffy" />
|
||||
|
||||
8
urls.py
8
urls.py
@@ -35,12 +35,12 @@ actualurlpatterns = patterns('',
|
||||
url(r'^newqmnumber/?$', views_other.ajax_QM_number, ),
|
||||
url(r'^lbo_suggestions/?$', logbook_entry_suggestions),
|
||||
#(r'^person/(?P<person_id>\d*)/?$', views_logbooks.person),
|
||||
url(r'^person/(?P<first_name>[A-Z]*[a-z\-\'&;]*)[^a-zA-Z]*(?P<last_name>[a-z\-\']*[^a-zA-Z]*[A-Z]*[a-z\-&;]*)/?', views_logbooks.person, name="person"),
|
||||
url(r'^person/(?P<first_name>[A-Z]*[a-z\-\']*)[^a-zA-Z]*(?P<last_name>[a-z\-\']*[^a-zA-Z]*[A-Z]*[a-z\-]*)/?', views_logbooks.person, name="person"),
|
||||
#url(r'^person/(\w+_\w+)$', views_logbooks.person, name="person"),
|
||||
|
||||
url(r'^expedition/(\d+)$', views_logbooks.expedition, name="expedition"),
|
||||
url(r'^expeditions/?$', views_logbooks.ExpeditionListView.as_view(), name="expeditions"),
|
||||
url(r'^personexpedition/(?P<first_name>[A-Z]*[a-z&;]*)[^a-zA-Z]*(?P<last_name>[A-Z]*[a-zA-Z&;]*)/(?P<year>\d+)/?$', views_logbooks.personexpedition, name="personexpedition"),
|
||||
url(r'^personexpedition/(?P<first_name>[A-Z]*[a-z]*)[^a-zA-Z]*(?P<last_name>[A-Z]*[a-z]*)/(?P<year>\d+)/?$', views_logbooks.personexpedition, name="personexpedition"),
|
||||
url(r'^logbookentry/(?P<date>.*)/(?P<slug>.*)/?$', views_logbooks.logbookentry,name="logbookentry"),
|
||||
url(r'^newlogbookentry/(?P<expeditionyear>.*)$', views_logbooks.newLogbookEntry, name="newLogBookEntry"),
|
||||
url(r'^editlogbookentry/(?P<expeditionyear>[^/]*)/(?P<pdate>[^/]*)/(?P<pslug>[^/]*)/$', views_logbooks.newLogbookEntry, name="editLogBookEntry"),
|
||||
@@ -53,7 +53,7 @@ actualurlpatterns = patterns('',
|
||||
url(r'^getLogBookEntries/(?P<expeditionslug>.*)', views_logbooks.get_logbook_entries, name = "get_logbook_entries"),
|
||||
|
||||
|
||||
url(r'^cave/new/$', views_caves.edit_cave, name="newcave"),
|
||||
url(r'^cave/new/$', edit_cave, name="newcave"),
|
||||
url(r'^cave/(?P<cave_id>[^/]+)/?$', views_caves.cave, name="cave"),
|
||||
url(r'^caveslug/([^/]+)/?$', views_caves.caveSlug, name="caveSlug"),
|
||||
url(r'^cave/entrance/([^/]+)/?$', views_caves.caveEntrance),
|
||||
@@ -71,7 +71,7 @@ actualurlpatterns = patterns('',
|
||||
# url(r'^jgtuploadfile$', view_surveys.jgtuploadfile, name="jgtuploadfile"),
|
||||
|
||||
url(r'^cave/(?P<cave_id>[^/]+)/?(?P<ent_letter>[^/])$', ent),
|
||||
url(r'^cave/(?P<slug>[^/]+)/edit/$', views_caves.edit_cave, name="edit_cave"),
|
||||
url(r'^cave/(?P<slug>[^/]+)/edit/$', edit_cave, name="edit_cave"),
|
||||
#(r'^cavesearch', caveSearch),
|
||||
|
||||
|
||||
|
||||
16
utils.py
16
utils.py
@@ -1,5 +1,4 @@
|
||||
from django.conf import settings
|
||||
from django.shortcuts import render
|
||||
import random, re, logging
|
||||
from troggle.core.models import CaveDescription
|
||||
|
||||
@@ -59,6 +58,21 @@ def save_carefully(objectType, lookupAttribs={}, nonLookupAttribs={}):
|
||||
if not created and not instance.new_since_parsing:
|
||||
logging.info(str(instance) + " existed in the database unchanged since last parse. It was overwritten by the current script. \n")
|
||||
return (instance, created)
|
||||
|
||||
def render_with_context(req, *args, **kwargs):
|
||||
"""this is the snippet from http://www.djangosnippets.org/snippets/3/
|
||||
|
||||
Django uses Context, not RequestContext when you call render_to_response.
|
||||
We always want to use RequestContext, so that django adds the context from
|
||||
settings.TEMPLATE_CONTEXT_PROCESSORS. This way we automatically get
|
||||
necessary settings variables passed to each template. So we use a custom
|
||||
method, render_response instead of render_to_response. Hopefully future
|
||||
Django releases will make this unnecessary."""
|
||||
|
||||
from django.shortcuts import render_to_response
|
||||
from django.template import RequestContext
|
||||
kwargs['context_instance'] = RequestContext(req)
|
||||
return render_to_response(*args, **kwargs)
|
||||
|
||||
re_body = re.compile(r"\<body[^>]*\>(.*)\</body\>", re.DOTALL)
|
||||
re_title = re.compile(r"\<title[^>]*\>(.*)\</title\>", re.DOTALL)
|
||||
|
||||
Reference in New Issue
Block a user