2
0
mirror of https://expo.survex.com/repositories/troggle/.git synced 2025-12-14 18:17:06 +00:00

98 Commits

Author SHA1 Message Date
Sam Wenham
6984f66794 Updates required to move to django 1.8 2019-03-02 14:10:51 +00:00
Sam Wenham
6f42bd51e1 Revert (I hate hg!!!) 2019-02-26 20:43:18 +00:00
Sam Wenham
4e27c90f77 merge 2019-02-26 20:41:47 +00:00
Sam
7e1aa80551 Add docker readme, settings and update compose file
Fix views_logbooks.py
2019-02-26 19:19:01 +00:00
Rad
0afb21a093 Messing with millenialcaves.html or similar 2019-02-26 14:07:45 +00:00
Rad
a4c0b1129c Messing with millenialcaves.html or similar 2019-02-26 14:05:41 +00:00
Rad
0a170c8ed5 Messing with millenialcaves.html or similar 2019-02-26 12:50:19 +00:00
Rad
29de363cdc Messing with millenialcaves.html or similar 2019-02-26 12:47:50 +00:00
Rad
e9922fb97d Messing with millenialcaves.html or similar 2019-02-26 12:30:20 +00:00
Rad
c5025ad51d Messing with millenialcaves.html or similar 2019-02-26 12:29:46 +00:00
Rad
2b118a53a9 Messing with millenialcaves.html or similar 2019-02-26 12:23:12 +00:00
Rad
611ab346d0 Messing with millenialcaves.html or similar 2019-02-26 12:07:45 +00:00
Rad
46ab084f1d Messing with millenialcaves.html or similar 2019-02-26 12:03:17 +00:00
Rad
d7c7466f71 Messing with millenialcaves.html or similar 2019-02-26 12:01:55 +00:00
Rad
aa3061adaf Messing with millenialcaves.html or similar 2019-02-26 12:01:30 +00:00
Rad
ffaf9371b6 Messing with millenialcaves.html or similar 2019-02-26 10:57:02 +00:00
Rad
d269e92380 Messing with millenialcaves.html or similar 2019-02-26 10:02:57 +00:00
Rad
e082d1e122 Messing with millenialcaves.html or similar 2019-02-26 09:45:17 +00:00
Rad
f4da4021f1 Messing with millenialcaves.html or similar 2019-02-26 09:41:02 +00:00
Rad
4901d82a7d Messing with millenialcaves.html or similar 2019-02-26 02:03:26 +00:00
Rad
31f390d95e Messing with millenialcaves.html or similar 2019-02-26 02:01:09 +00:00
Rad
6f92fe7b7c Messing with millenialcaves.html or similar 2019-02-26 01:56:39 +00:00
Rad
e3d652939d Messing with millenialcaves.html or similar 2019-02-26 01:48:52 +00:00
Rad
60d8139a05 Messing with millenialcaves.html or similar 2019-02-26 01:46:54 +00:00
Rad
f03b6b4319 Messing with millenialcaves.html or similar 2019-02-26 01:46:05 +00:00
Rad
9d3f37a2ff Messing with millenialcaves.html or similar 2019-02-26 01:45:03 +00:00
Rad
74f88afb57 Messing with millenialcaves.html or similar 2019-02-26 01:43:54 +00:00
Rad
3466a46db5 Messing with millenialcaves.html or similar 2019-02-26 01:43:28 +00:00
Rad
49afebaf97 Messing with millenialcaves.html or similar 2019-02-26 01:41:15 +00:00
Rad
a4f6ad1d9f Messing with millenialcaves.html or similar 2019-02-26 01:37:52 +00:00
Rad
caa7b2c8b2 Messing with millenialcaves.html or similar 2019-02-26 01:35:55 +00:00
Rad
533446098f Messing with millenialcaves.html or similar 2019-02-26 01:34:09 +00:00
Rad
04a7e770c5 Messing with millenialcaves.html or similar 2019-02-26 01:30:32 +00:00
Rad
ec548db8a9 Messing with millenialcaves.html or similar 2019-02-26 01:18:47 +00:00
Rad
d6de8a3c34 Messing with millenialcaves.html or similar 2019-02-26 01:13:54 +00:00
Rad
0da8fa0d96 Messing with millenialcaves.html or similar 2019-02-26 01:12:14 +00:00
Rad
d714325eb2 Messing with millenialcaves.html or similar 2019-02-26 01:08:04 +00:00
Rad
2a23c72ee1 Messing with millenialcaves.html or similar 2019-02-26 01:07:18 +00:00
Rad
fea9d1095b Messing with millenialcaves.html or similar 2019-02-26 01:04:09 +00:00
Rad
a54a70749a Messing with millenialcaves.html or similar 2019-02-26 01:03:22 +00:00
Rad
52f5423743 Messing with millenialcaves.html or similar 2019-02-26 00:56:46 +00:00
Rad
55f8538413 Messing with millenialcaves.html or similar 2019-02-26 00:48:34 +00:00
Rad
e8ce3e7140 Messing with millenialcaves.html or similar 2019-02-26 00:47:35 +00:00
Rad
44e6fcac33 Messing with millenialcaves.html or similar 2019-02-26 00:45:56 +00:00
Rad
46830e903b Messing with millenialcaves.html or similar 2019-02-26 00:43:46 +00:00
Rad
656460e0ab Messing with millenialcaves.html or similar 2019-02-26 00:43:05 +00:00
Rad
6c94027a26 Messing with millenialcaves.html or similar 2019-02-26 00:35:28 +00:00
Rad
64954fa3e4 Messing with millenialcaves.html or similar 2019-02-26 00:33:37 +00:00
Rad
8c145d88ce Messing with millenialcaves.html or similar 2019-02-26 00:33:04 +00:00
Rad
e55b533504 Messing with millenialcaves.html or similar 2019-02-26 00:30:09 +00:00
Rad
74779788e0 Messing with millenialcaves.html or similar 2019-02-26 00:29:16 +00:00
Rad
f20bd3842a Messing with millenialcaves.html or similar 2019-02-26 00:23:23 +00:00
Rad
1370317813 Messing with millenialcaves.html or similar 2019-02-26 00:22:58 +00:00
Rad
af210768af Messing with millenialcaves.html or similar 2019-02-26 00:21:54 +00:00
Rad
df3a8744d6 Messing with millenialcaves.html or similar 2019-02-26 00:21:27 +00:00
Rad
503a9cddc5 Messing with millenialcaves.html or similar 2019-02-26 00:17:56 +00:00
Rad
a61ad6e7b8 Messing with millenialcaves.html or similar 2019-02-26 00:17:11 +00:00
Rad
83e489c425 Messing with millenialcaves.html or similar 2019-02-26 00:08:15 +00:00
Rad
0d2ac756e5 Messing with millenialcaves.html or similar 2019-02-26 00:04:27 +00:00
Rad
da55e1519e 2019-02-26 00:00:34 +00:00
Rad
b6ad46a37f 2019-02-25 23:55:06 +00:00
Rad
9bc3abbc79 2019-02-25 23:53:19 +00:00
Rad
ccc347eddc 2019-02-25 23:52:47 +00:00
Rad
a013f5bef2 2019-02-25 23:51:26 +00:00
Rad
75acd74d5b 2019-02-25 23:48:58 +00:00
Rad
0c63156428 2019-02-25 23:46:52 +00:00
Rad
8173c3c45d space/tab 2019-02-25 23:42:56 +00:00
Rad
f23700b1b7 trying to add new field 2019-02-25 23:40:53 +00:00
Rad
41e11c6c2e 2019-02-25 23:37:12 +00:00
Sam Wenham
0eb5e560d2 Merge 2019-02-25 23:34:10 +00:00
Sam Wenham
a61e66bb47 Start of moving databasereset to django management 2019-02-25 23:10:24 +00:00
Sam Wenham
f9dc4500d9 Get get_absolute_url in the correct place 2019-02-25 23:07:20 +00:00
Rad
d3f633e41d 2019-02-25 22:34:13 +00:00
Rad
61bd6e81f1 tab/space fix 2019-02-25 22:28:30 +00:00
Rad
edddfb7fc6 added Rad's playground 2019-02-25 22:24:33 +00:00
Rad
71d1719850 merge 2019-02-25 21:02:30 +00:00
Rad
7c2d336bd7 change to table 2019-02-25 20:58:32 +00:00
Sam Wenham
bebbad2448 Fix the All Survex page to work with 1623 area 2019-02-25 20:13:28 +00:00
Sam Wenham
b43bd58f22 Decode the url encoded # when looking at wallets 2019-02-24 19:50:45 +00:00
Sam Wenham
e59f8308ce Deal better with the wallet letter number combo of 2019#X01 2019-02-24 18:55:30 +00:00
Sam Wenham
f6d4ce8d0b Stop django moaning about unit tests from pre 1.6, like we have any anyway! 2019-02-24 16:48:12 +00:00
Sam Wenham
af22385c68 Fix survey scans
Remove the assert for folders in survey wallets, this does mean currently they
will be ignored by troggle.
2019-02-24 16:46:02 +00:00
Sam Wenham
8fd23008e3 Make the suryeys importer not explode 2019-02-24 14:29:14 +00:00
Sam Wenham
8f66837f6f Make things more compatiable with newer python
Fix the expeditions list
Improvements to make it compatiable with django 1.8
Bump the years to add 2018
Update the .hgignore file to ignore junk
2019-02-24 13:03:34 +00:00
Sam Wenham
670559ec87 Revert urls.py as it contains Django 1.8 upgrade code 2019-02-23 15:43:38 +00:00
Sam Wenham
7f92a7280d Prevent troggle adding the menu if there is one in the file
Add a Docker compose file to bring up a dev troggle easily
Various PEP improvments
2019-02-23 15:30:58 +00:00
Sam Wenham
019f8c0550 Don't create years that aren't here yet troggle goes boom 2018-06-20 18:14:13 +01:00
Sam Wenham
952af7adc5 Move the years on a bit 2018-06-20 18:11:12 +01:00
expoonserver
e3e75a40bf Add missing linefeed on survey-parsing error message 2018-06-18 23:43:20 +01:00
expoonserver
b4d3cb514c Make sure that cave parser only reads .html files in cave_data dir (to stop foo~ causing 'duplicate cave' error) 2018-06-18 23:17:05 +01:00
expoonserver
01f17dc1cc Add 'troggle' namespace to databasereset.py so it runs in django >1.5 2018-06-17 02:41:58 +01:00
expoonserver
c3300f7c96 FileUploadForm does not work with django 1.7.
It tries to use database during class initialisation.
removed it for now - not sure if it's important...
2018-06-17 02:24:00 +01:00
expoonserver
94c232c775 django.setup needs to be run before any attempt to use database 2018-06-17 02:23:02 +01:00
expoonserver
4f665070d7 imports must specify the application name i nlater django versions.
databasereset updated accordingly.
2018-06-16 19:00:26 +01:00
Sam Wenham
bfc867826d Add the extra setting for the threed cache to all the template configs 2018-04-20 20:58:05 +01:00
Sam Wenham
af13e84c74 Fix the django for the spinny js cave viewer.
Make the paths settings (don't hard code things like this!!)
Add " round spinny urls from the late merge (the rest were done for the move off 1.4.2
2018-04-20 20:55:12 +01:00
Sam Wenham
bcaa4b27d2 Merge with django-upgrade 2018-04-17 22:19:20 +01:00
expoonserver
d0e0eee15a Add CaveView spinny caves view to each troggle cave page 2018-04-17 21:57:02 +01:00
51 changed files with 1224 additions and 852 deletions

View File

@@ -7,3 +7,10 @@ localsettings.py
*~
parsing_log.txt
troggle
troggle_log.txt
.idea/*
*.orig
media/images/*
.vscode/*
.swp
imagekit-off/

View File

@@ -18,41 +18,50 @@ class TroggleModelAdmin(admin.ModelAdmin):
class Media:
js = ('jquery/jquery.min.js','js/QM_helper.js')
class RoleInline(admin.TabularInline):
model = SurvexPersonRole
extra = 4
class SurvexBlockAdmin(TroggleModelAdmin):
inlines = (RoleInline,)
class ScannedImageInline(admin.TabularInline):
model = ScannedImage
extra = 4
class OtherCaveInline(admin.TabularInline):
model = OtherCaveName
extra = 1
class SurveyAdmin(TroggleModelAdmin):
inlines = (ScannedImageInline,)
search_fields = ('expedition__year','wallet_number')
class QMsFoundInline(admin.TabularInline):
model=QM
fk_name='found_by'
fields=('number','grade','location_description','comment')#need to add foreignkey to cave part
extra=1
class PhotoInline(admin.TabularInline):
model = DPhoto
exclude = ['is_mugshot' ]
extra = 1
class PersonTripInline(admin.TabularInline):
model = PersonTrip
raw_id_fields = ('personexpedition',)
extra = 1
#class LogbookEntryAdmin(VersionAdmin):
class LogbookEntryAdmin(TroggleModelAdmin):
prepopulated_fields = {'slug':("title",)}
@@ -72,17 +81,18 @@ class LogbookEntryAdmin(TroggleModelAdmin):
def export_logbook_entries_as_txt(modeladmin, request, queryset):
response=downloadLogbook(request=request, queryset=queryset, extension='txt')
return response
class PersonExpeditionInline(admin.TabularInline):
model = PersonExpedition
extra = 1
class PersonAdmin(TroggleModelAdmin):
search_fields = ('first_name','last_name')
inlines = (PersonExpeditionInline,)
class QMAdmin(TroggleModelAdmin):
search_fields = ('found_by__cave__kataster_number','number','found_by__date')
list_display = ('__unicode__','grade','found_by','ticked_off_by')
@@ -91,17 +101,21 @@ class QMAdmin(TroggleModelAdmin):
list_per_page = 20
raw_id_fields=('found_by','ticked_off_by')
class PersonExpeditionAdmin(TroggleModelAdmin):
search_fields = ('person__first_name','expedition__year')
class CaveAdmin(TroggleModelAdmin):
search_fields = ('official_name','kataster_number','unofficial_number')
inlines = (OtherCaveInline,)
extra = 4
class EntranceAdmin(TroggleModelAdmin):
search_fields = ('caveandentrance__cave__kataster_number',)
admin.site.register(DPhoto)
admin.site.register(Cave, CaveAdmin)
admin.site.register(Area)
@@ -125,17 +139,20 @@ admin.site.register(SurvexStation)
admin.site.register(SurvexScansFolder)
admin.site.register(SurvexScanSingle)
def export_as_json(modeladmin, request, queryset):
response = HttpResponse(mimetype="text/json")
response['Content-Disposition'] = 'attachment; filename=troggle_output.json'
serializers.serialize("json", queryset, stream=response)
return response
def export_as_xml(modeladmin, request, queryset):
response = HttpResponse(mimetype="text/xml")
response['Content-Disposition'] = 'attachment; filename=troggle_output.xml'
serializers.serialize("xml", queryset, stream=response)
return response
#admin.site.add_action(export_as_xml)
#admin.site.add_action(export_as_json)

View File

@@ -26,7 +26,7 @@ def listdir(*path):
else:
c = ""
c = c.replace("#", "%23")
print "FILE: ", settings.FILES + "listdir/" + c
print("FILE: ", settings.FILES + "listdir/" + c)
return urllib.urlopen(settings.FILES + "listdir/" + c).read()
def dirsAsList(*path):

View File

@@ -148,32 +148,32 @@ def get_name(pe):
else:
return pe.person.first_name
class UploadFileForm(forms.Form):
title = forms.CharField(max_length=50)
file = forms.FileField()
html = forms.CharField(widget=TinyMCE(attrs={'cols': 80, 'rows': 30}))
lon_utm = forms.FloatField(required=False)
lat_utm = forms.FloatField(required=False)
slug = forms.CharField(max_length=50)
date = forms.DateField(required=False)
#class UploadFileForm(forms.Form):
# title = forms.CharField(max_length=50)
# file = forms.FileField()
# html = forms.CharField(widget=TinyMCE(attrs={'cols': 80, 'rows': 30}))
# lon_utm = forms.FloatField(required=False)
# lat_utm = forms.FloatField(required=False)
# slug = forms.CharField(max_length=50)
# date = forms.DateField(required=False)
caves = [cave.slug for cave in Cave.objects.all()]
caves.sort()
caves = ["-----"] + caves
cave = forms.ChoiceField([(c, c) for c in caves], required=False)
# caves = [cave.slug for cave in Cave.objects.all()]
# caves.sort()
# caves = ["-----"] + caves
# cave = forms.ChoiceField([(c, c) for c in caves], required=False)
entrance = forms.ChoiceField([("-----", "Please select a cave"), ], required=False)
qm = forms.ChoiceField([("-----", "Please select a cave"), ], required=False)
# entrance = forms.ChoiceField([("-----", "Please select a cave"), ], required=False)
# qm = forms.ChoiceField([("-----", "Please select a cave"), ], required=False)
expeditions = [e.year for e in Expedition.objects.all()]
expeditions.sort()
expeditions = ["-----"] + expeditions
expedition = forms.ChoiceField([(e, e) for e in expeditions], required=False)
# expeditions = [e.year for e in Expedition.objects.all()]
# expeditions.sort()
# expeditions = ["-----"] + expeditions
# expedition = forms.ChoiceField([(e, e) for e in expeditions], required=False)
logbookentry = forms.ChoiceField([("-----", "Please select an expedition"), ], required=False)
# logbookentry = forms.ChoiceField([("-----", "Please select an expedition"), ], required=False)
person = forms.ChoiceField([("-----", "Please select an expedition"), ], required=False)
# person = forms.ChoiceField([("-----", "Please select an expedition"), ], required=False)
survey_point = forms.CharField()
# survey_point = forms.CharField()

View File

View File

View File

@@ -0,0 +1,182 @@
from django.core.management.base import BaseCommand, CommandError
from optparse import make_option
from troggle.core.models import Cave
import settings
databasename=settings.DATABASES['default']['NAME']
expouser=settings.EXPOUSER
expouserpass=settings.EXPOUSERPASS
expouseremail=settings.EXPOUSER_EMAIL
class Command(BaseCommand):
help = 'This is normal usage, clear database and reread everything'
option_list = BaseCommand.option_list + (
make_option('--foo',
action='store_true',
dest='foo',
default=False,
help='test'),
)
def add_arguments(self, parser):
parser.add_argument(
'--foo',
action='store_true',
dest='foo',
help='Help text',
)
def handle(self, *args, **options):
print(args)
print(options)
if "desc" in args:
self.resetdesc()
elif "scans" in args:
self.import_surveyscans()
elif "caves" in args:
self.reload_db()
self.make_dirs()
self.pageredirects()
self.import_caves()
elif "people" in args:
self.import_people()
elif "QMs" in args:
self.import_QMs()
elif "tunnel" in args:
self.import_tunnelfiles()
elif "reset" in args:
self.reset()
elif "survex" in args:
self.import_survex()
elif "survexpos" in args:
import parsers.survex
parsers.survex.LoadPos()
elif "logbooks" in args:
self.import_logbooks()
elif "autologbooks" in args:
self.import_auto_logbooks()
elif "dumplogbooks" in args:
self.dumplogbooks()
elif "writeCaves" in args:
self.writeCaves()
elif "foo" in args:
self.stdout.write('Tesing....')
else:
self.stdout.write("%s not recognised" % args)
self.usage(options)
def reload_db():
if settings.DATABASES['default']['ENGINE'] == 'django.db.backends.sqlite3':
try:
os.remove(databasename)
except OSError:
pass
else:
cursor = connection.cursor()
cursor.execute("DROP DATABASE %s" % databasename)
cursor.execute("CREATE DATABASE %s" % databasename)
cursor.execute("ALTER DATABASE %s CHARACTER SET=utf8" % databasename)
cursor.execute("USE %s" % databasename)
management.call_command('migrate', interactive=False)
# management.call_command('syncdb', interactive=False)
user = User.objects.create_user(expouser, expouseremail, expouserpass)
user.is_staff = True
user.is_superuser = True
user.save()
def make_dirs():
"""Make directories that troggle requires"""
# should also deal with permissions here.
if not os.path.isdir(settings.PHOTOS_ROOT):
os.mkdir(settings.PHOTOS_ROOT)
def import_caves():
import parsers.caves
print("importing caves")
parsers.caves.readcaves()
def import_people():
import parsers.people
parsers.people.LoadPersonsExpos()
def import_logbooks():
# The below line was causing errors I didn't understand (it said LOGFILE was a string), and I couldn't be bothered to figure
# what was going on so I just catch the error with a try. - AC 21 May
try:
settings.LOGFILE.write('\nBegun importing logbooks at ' + time.asctime() + '\n' + '-' * 60)
except:
pass
import parsers.logbooks
parsers.logbooks.LoadLogbooks()
def import_survex():
import parsers.survex
parsers.survex.LoadAllSurvexBlocks()
parsers.survex.LoadPos()
def import_QMs():
import parsers.QMs
def import_surveys():
import parsers.surveys
parsers.surveys.parseSurveys(logfile=settings.LOGFILE)
def import_surveyscans():
import parsers.surveys
parsers.surveys.LoadListScans()
def import_tunnelfiles():
import parsers.surveys
parsers.surveys.LoadTunnelFiles()
def reset():
""" Wipe the troggle database and import everything from legacy data
"""
reload_db()
make_dirs()
pageredirects()
import_caves()
import_people()
import_surveyscans()
import_survex()
import_logbooks()
import_QMs()
try:
import_tunnelfiles()
except:
print("Tunnel files parser broken.")
import_surveys()
def pageredirects():
for oldURL, newURL in [("indxal.htm", reverse("caveindex"))]:
f = troggle.flatpages.models.Redirect(originalURL=oldURL, newURL=newURL)
f.save()
def writeCaves():
for cave in Cave.objects.all():
cave.writeDataFile()
for entrance in Entrance.objects.all():
entrance.writeDataFile()
def usage(self, parser):
print("""Usage is 'manage.py reset_db <command>'
where command is:
reset - this is normal usage, clear database and reread everything
desc
caves - read in the caves
logbooks - read in the logbooks
autologbooks
dumplogbooks
people
QMs - read in the QM files
resetend
scans - read in the scanned surveynotes
survex - read in the survex files
survexpos
tunnel - read in the Tunnel files
writeCaves
""")

View File

@@ -10,7 +10,7 @@ from django.db.models import Min, Max
from django.conf import settings
from decimal import Decimal, getcontext
from django.core.urlresolvers import reverse
from imagekit.models import ImageModel
from imagekit.models import ProcessedImageField #ImageModel
from django.template import Context, loader
import settings
getcontext().prec=2 #use 2 significant figures for decimal calculations
@@ -30,7 +30,7 @@ def get_related_by_wikilinks(wiki_text):
number = qmdict['number'])
res.append(qm)
except QM.DoesNotExist:
print 'fail on '+str(wikilink)
print('fail on '+str(wikilink))
return res
@@ -57,7 +57,7 @@ class TroggleModel(models.Model):
class Meta:
abstract = True
class TroggleImageModel(ImageModel):
class TroggleImageModel(models.Model):
new_since_parsing = models.BooleanField(default=False, editable=False)
def object_name(self):
@@ -141,7 +141,6 @@ class Person(TroggleModel):
class Meta:
verbose_name_plural = "People"
class Meta:
ordering = ('orderref',) # "Wookey" makes too complex for: ('last_name', 'first_name')
def __unicode__(self):
@@ -398,6 +397,7 @@ class Cave(TroggleModel):
url = models.CharField(max_length=200,blank=True,null=True)
filename = models.CharField(max_length=200)
#class Meta:
# unique_together = (("area", "kataster_number"), ("area", "unofficial_number"))
# FIXME Kataster Areas and CUCC defined sub areas need seperating
@@ -529,11 +529,11 @@ class Cave(TroggleModel):
def getCaveByReference(reference):
areaname, code = reference.split("-", 1)
print areaname, code
print(areaname, code)
area = Area.objects.get(short_name = areaname)
print area
print(area)
foundCaves = list(Cave.objects.filter(area = area, kataster_number = code).all()) + list(Cave.objects.filter(area = area, unofficial_number = code).all())
print list(foundCaves)
print(list(foundCaves))
assert len(foundCaves) == 1
return foundCaves[0]
@@ -801,11 +801,13 @@ class DPhoto(TroggleImageModel):
scansFileStorage = FileSystemStorage(location=settings.SURVEY_SCANS, base_url=settings.SURVEYS_URL)
def get_scan_path(instance, filename):
year=instance.survey.expedition.year
#print "WN: ", type(instance.survey.wallet_number), instance.survey.wallet_number
number=str(instance.survey.wallet_number) + str(instance.survey.wallet_letter) #two strings formatting because convention is 2009#01 or 2009#X01
return os.path.join('./',year,year+r'#'+number,instance.contents+str(instance.number_in_wallet)+r'.jpg')
#print("WN: ", type(instance.survey.wallet_number), instance.survey.wallet_number, instance.survey.wallet_letter)
number=str(instance.survey.wallet_number)
if str(instance.survey.wallet_letter) != "None":
number=str(instance.survey.wallet_letter) + number #two strings formatting because convention is 2009#01 or 2009#X01
return os.path.join('./',year,year+r'#'+number,str(instance.contents)+str(instance.number_in_wallet)+r'.jpg')
class ScannedImage(TroggleImageModel):
class ScannedImage(TroggleImageModel):
file = models.ImageField(storage=scansFileStorage, upload_to=get_scan_path)
scanned_by = models.ForeignKey(Person,blank=True, null=True)
scanned_on = models.DateField(null=True)

View File

@@ -6,6 +6,7 @@ import os, stat
import re
from troggle.core.models import SurvexScansFolder, SurvexScanSingle, SurvexBlock, TunnelFile
import parsers.surveys
import urllib
# inline fileabstraction into here if it's not going to be useful anywhere else
# keep things simple and ignore exceptions everywhere for now
@@ -17,13 +18,13 @@ def getMimeType(extension):
"html": "text/html",
}[extension]
except:
print "unknown file type"
print("unknown file type")
return "text/plain"
def listdir(request, path):
#try:
return HttpResponse(fileAbstraction.listdir(path), mimetype = "text/plain")
return HttpResponse(fileAbstraction.listdir(path), content_type="text/plain")
#except:
# raise Http404
@@ -33,7 +34,7 @@ def upload(request, path):
def download(request, path):
#try:
return HttpResponse(fileAbstraction.readFile(path), mimetype=getMimeType(path.split(".")[-1]))
return HttpResponse(fileAbstraction.readFile(path), content_type=getMimeType(path.split(".")[-1]))
#except:
# raise Http404
@@ -46,6 +47,7 @@ extmimetypes = {".txt": "text/plain",
".html": "text/html",
".png": "image/png",
".jpg": "image/jpeg",
".jpeg": "image/jpeg",
}
# dead
@@ -93,9 +95,9 @@ def jgtfile(request, f):
fin = open(fp)
ftext = fin.read()
fin.close()
return HttpResponse(ftext, mimetype=mimetype)
return HttpResponse(ftext, content_type=mimetype)
return HttpResponse("unknown file::%s::" % f, mimetype = "text/plain")
return HttpResponse("unknown file::%s::" % f, content_type = "text/plain")
def UniqueFile(fname):
@@ -165,13 +167,13 @@ def jgtuploadfile(request):
def surveyscansfolder(request, path):
#print [ s.walletname for s in SurvexScansFolder.objects.all() ]
survexscansfolder = SurvexScansFolder.objects.get(walletname=path)
survexscansfolder = SurvexScansFolder.objects.get(walletname=urllib.unquote(path))
return render_to_response('survexscansfolder.html', { 'survexscansfolder':survexscansfolder, 'settings': settings })
def surveyscansingle(request, path, file):
survexscansfolder = SurvexScansFolder.objects.get(walletname=path)
survexscansfolder = SurvexScansFolder.objects.get(walletname=urllib.unquote(path))
survexscansingle = SurvexScanSingle.objects.get(survexscansfolder=survexscansfolder, name=file)
return HttpResponse(content=open(survexscansingle.ffile), mimetype="image/png")
return HttpResponse(content=open(survexscansingle.ffile), content_type=getMimeType(path.split(".")[-1]))
#return render_to_response('survexscansfolder.html', { 'survexscansfolder':survexscansfolder, 'settings': settings })
def surveyscansfolders(request):
@@ -185,12 +187,12 @@ def tunneldata(request):
def tunnelfile(request, path):
tunnelfile = TunnelFile.objects.get(tunnelpath=path)
tunnelfile = TunnelFile.objects.get(tunnelpath=urllib.unquote(path))
tfile = os.path.join(settings.TUNNEL_DATA, tunnelfile.tunnelpath)
return HttpResponse(content=open(tfile), mimetype="text/plain")
return HttpResponse(content=open(tfile), content_type="text/plain")
def tunnelfileupload(request, path):
tunnelfile = TunnelFile.objects.get(tunnelpath=path)
tunnelfile = TunnelFile.objects.get(tunnelpath=urllib.unquote(path))
tfile = os.path.join(settings.TUNNEL_DATA, tunnelfile.tunnelpath)
project, user, password, tunnelversion = request.POST["tunnelproject"], request.POST["tunneluser"], request.POST["tunnelpassword"], request.POST["tunnelversion"]
@@ -202,13 +204,13 @@ def tunnelfileupload(request, path):
uploadedfile = request.FILES.values()[0]
if uploadedfile.field_name != "sketch":
return HttpResponse(content="Error: non-sketch file uploaded", mimetype="text/plain")
return HttpResponse(content="Error: non-sketch file uploaded", content_type="text/plain")
if uploadedfile.content_type != "text/plain":
return HttpResponse(content="Error: non-plain content type", mimetype="text/plain")
return HttpResponse(content="Error: non-plain content type", content_type="text/plain")
# could use this to add new files
if os.path.split(path)[1] != uploadedfile.name:
return HttpResponse(content="Error: name disagrees", mimetype="text/plain")
return HttpResponse(content="Error: name disagrees", content_type="text/plain")
orgsize = tunnelfile.filesize # = os.stat(tfile)[stat.ST_SIZE]
@@ -226,7 +228,7 @@ def tunnelfileupload(request, path):
uploadedfile.close()
message = "File size %d overwritten with size %d" % (orgsize, tunnelfile.filesize)
return HttpResponse(content=message, mimetype="text/plain")
return HttpResponse(content=message, content_type="text/plain")

View File

@@ -17,8 +17,9 @@ import re, urlparse
from django.shortcuts import get_object_or_404
import settings
from PIL import Image, ImageDraw, ImageFont
import string, os, sys
import string, os, sys, subprocess
def getCave(cave_id):
"""Returns a cave object when given a cave name or number. It is used by views including cavehref, ent, and qm."""
@@ -59,12 +60,32 @@ def caveindex(request):
caves1626.sort(caveCmp)
return render_with_context(request,'caveindex.html', {'caves1623': caves1623, 'caves1626': caves1626, 'notablecaves':notablecaves, 'cavepage': True})
def millenialcaves(request):
#RW messing around area
return HttpResponse("Test text", content_type="text/plain")
def cave3d(request, cave_id=''):
cave = getCave(cave_id)
survexfilename = settings.SURVEX_DATA + cave.survex_file
threedfilename = settings.THREEDCACHEDIR + '%s.3d' % cave_id
if True or os.path.getmtime(survexfilename) > os.path.getmtime(threedfilename):
subprocess.call(["cavern", "--output=%s" % threedfilename, survexfilename])
test_file = open(threedfilename, 'rb')
response = HttpResponse(content=test_file, content_type='application/3d')#mimetype is replaced by content_type for django 1.7
response['Content-Disposition'] = 'attachment; filename=%s.3d' % cave_id
# response['X-Sendfile'] = "%s.3d" % cave_id
# It's usually a good idea to set the 'Content-Length' header too.
# You can also set any other required headers: Cache-Control, etc.
return response
def cave(request, cave_id='', offical_name=''):
cave=getCave(cave_id)
if cave.non_public and settings.PUBLIC_SITE and not request.user.is_authenticated():
return render_with_context(request,'nonpublic.html', {'instance': cave, 'cavepage': True})
return render_with_context(request,'nonpublic.html', {'instance': cave, 'cavepage': True, 'cave_id': cave_id})
else:
return render_with_context(request,'cave.html', {'settings': settings, 'cave': cave, 'cavepage': True})
return render_with_context(request,'cave.html', {'settings': settings, 'cave': cave, 'cavepage': True, 'cave_id': cave_id})
def caveEntrance(request, slug):
cave = Cave.objects.get(caveslug__slug = slug)
@@ -203,7 +224,7 @@ def qm(request,cave_id,qm_id,year,grade=None):
return HttpResponseRedirect(url)
def ent(request, cave_id, ent_letter):
cave = Cave.objects.filter(kataster_number = cave_id)[0]
cave_and_ent = CaveAndEntrance.objects.filter(cave = cave).filter(entrance_letter = ent_letter)[0]
@@ -320,8 +341,8 @@ for n in maps.keys():
W = (R-L)/2
H = (T-B)/2
for i in range(2):
for j in range(2):
maps["%s%i%i" % (n, i, j)] = [L + i * W, T - j * H, L + (i + 1) * W, T - (j + 1) * H, S, name]
for j in range(2):
maps["%s%i%i" % (n, i, j)] = [L + i * W, T - j * H, L + (i + 1) * W, T - (j + 1) * H, S, name]
# Keys in the order in which we want the maps output
mapcodes = ["all", "grieß","40", "76", "204", "tc"]
# Field codes

View File

@@ -16,9 +16,18 @@ from django.template.defaultfilters import slugify
from troggle.helper import login_required_if_public
import datetime
from django.views.generic.list import ListView
from django.utils import timezone
# Django uses Context, not RequestContext when you call render_to_response. We always want to use RequestContext, so that django adds the context from settings.TEMPLATE_CONTEXT_PROCESSORS. This way we automatically get necessary settings variables passed to each template. So we use a custom method, render_response instead of render_to_response. Hopefully future Django releases will make this unnecessary.
#from troggle.alwaysUseRequestContext import render_response
# Django uses Context, not RequestContext when you call render
# to_response. We always want to use RequestContext, so that
# django adds the context from settings.TEMPLATE_CONTEXT_PROCESSORS.
# This way we automatically get necessary settings variables passed
# to each template. So we use a custom method, render_response
# instead of render_to_response. Hopefully future Django releases
# will make this unnecessary.
# from troggle.alwaysUseRequestContext import render_response
import re
@@ -50,13 +59,13 @@ def personindex(request):
def expedition(request, expeditionname):
expedition = Expedition.objects.get(year=int(expeditionname))
this_expedition = Expedition.objects.get(year=int(expeditionname))
expeditions = Expedition.objects.all()
personexpeditiondays = [ ]
dateditems = list(expedition.logbookentry_set.all()) + list(expedition.survexblock_set.all())
dateditems = list(this_expedition.logbookentry_set.all()) + list(this_expedition.survexblock_set.all())
dates = list(set([item.date for item in dateditems]))
dates.sort()
for personexpedition in expedition.personexpedition_set.all():
for personexpedition in this_expedition.personexpedition_set.all():
prow = [ ]
for date in dates:
pcell = { "persontrips": PersonTrip.objects.filter(personexpedition=personexpedition,
@@ -68,24 +77,33 @@ def expedition(request, expeditionname):
message = ""
if "reload" in request.GET:
message = LoadLogbookForExpedition(expedition)
return render_with_context(request,'expedition.html', {'expedition': expedition, 'expeditions':expeditions, 'personexpeditiondays':personexpeditiondays, 'message':message, 'settings':settings, 'dateditems': dateditems })
message = LoadLogbookForExpedition(this_expedition)
return render_with_context(request,'expedition.html', {'expedition': this_expedition, 'expeditions':expeditions, 'personexpeditiondays':personexpeditiondays, 'message':message, 'settings':settings, 'dateditems': dateditems })
def get_absolute_url(self):
return ('expedition', (expedition.year))
def get_absolute_url(self):
return ('expedition', (expedition.year))
class ExpeditionListView(ListView):
model = Expedition
def get_context_data(self, **kwargs):
context = super(ExpeditionListView, self).get_context_data(**kwargs)
context['now'] = timezone.now()
return context
def person(request, first_name='', last_name='', ):
person = Person.objects.get(first_name = first_name, last_name = last_name)
this_person = Person.objects.get(first_name = first_name, last_name = last_name)
#This is for removing the reference to the user's profile, in case they set it to the wrong person
# This is for removing the reference to the user's profile, in case they set it to the wrong person
if request.method == 'GET':
if request.GET.get('clear_profile')=='True':
person.user=None
person.save()
this_person.user=None
this_person.save()
return HttpResponseRedirect(reverse('profiles_select_profile'))
return render_with_context(request,'person.html', {'person': person, })
return render_with_context(request,'person.html', {'person': this_person, })
def GetPersonChronology(personexpedition):
@@ -115,20 +133,20 @@ def GetPersonChronology(personexpedition):
def personexpedition(request, first_name='', last_name='', year=''):
person = Person.objects.get(first_name = first_name, last_name = last_name)
expedition = Expedition.objects.get(year=year)
personexpedition = person.personexpedition_set.get(expedition=expedition)
this_expedition = Expedition.objects.get(year=year)
personexpedition = person.personexpedition_set.get(expedition=this_expedition)
personchronology = GetPersonChronology(personexpedition)
return render_with_context(request,'personexpedition.html', {'personexpedition': personexpedition, 'personchronology':personchronology})
def logbookentry(request, date, slug):
logbookentry = LogbookEntry.objects.filter(date=date, slug=slug)
this_logbookentry = LogbookEntry.objects.filter(date=date, slug=slug)
if len(logbookentry)>1:
return render_with_context(request, 'object_list.html',{'object_list':logbookentry})
if len(this_logbookentry)>1:
return render_with_context(request, 'object_list.html',{'object_list':this_logbookentry})
else:
logbookentry=logbookentry[0]
return render_with_context(request, 'logbookentry.html', {'logbookentry': logbookentry})
this_logbookentry=this_logbookentry[0]
return render_with_context(request, 'logbookentry.html', {'logbookentry': this_logbookentry})
def logbookSearch(request, extra):
@@ -196,7 +214,7 @@ def newLogbookEntry(request, expeditionyear, pdate = None, pslug = None):
'expeditionyear': expeditionyear})
f.write(template.render(context))
f.close()
print logbookparsers.parseAutoLogBookEntry(filename)
print(logbookparsers.parseAutoLogBookEntry(filename))
return HttpResponseRedirect(reverse('expedition', args=[expedition.year])) # Redirect after POST
else:
if pslug and pdate:

View File

@@ -1,5 +1,5 @@
from troggle.core.models import Cave, Expedition, Person, LogbookEntry, PersonExpedition, PersonTrip, DPhoto, QM
from troggle.core.forms import UploadFileForm
#from troggle.core.forms import UploadFileForm
from django.conf import settings
from django import forms
from django.template import loader, Context
@@ -87,8 +87,8 @@ def downloadSurveys(request):
def downloadLogbook(request,year=None,extension=None,queryset=None):
if year:
expedition=Expedition.objects.get(year=year)
logbook_entries=LogbookEntry.objects.filter(expedition=expedition)
current_expedition=Expedition.objects.get(year=year)
logbook_entries=LogbookEntry.objects.filter(expedition=current_expedition)
filename='logbook'+year
elif queryset:
logbook_entries=queryset
@@ -259,8 +259,8 @@ def newFile(request, pslug = None):
# "TU": py.time_underground,
# "author": py.is_logbook_entry_author}
# for py in previouslbe.persontrip_set.all()])
else:
fileform = UploadFileForm() # An unbound form
# else:
# fileform = UploadFileForm() # An unbound form
return render_with_context(request, 'editfile.html', {
'fileForm': fileform,

View File

@@ -77,16 +77,16 @@ class SvxForm(forms.Form):
def DiffCode(self, rcode):
code = self.GetDiscCode()
difftext = difflib.unified_diff(code.splitlines(), rcode.splitlines())
difflist = [ diffline.strip() for diffline in difftext if not re.match("\s*$", diffline) ]
difflist = [ diffline.strip() for diffline in difftext if not re.match(r"\s*$", diffline) ]
return difflist
def SaveCode(self, rcode):
fname = settings.SURVEX_DATA + self.data['filename'] + ".svx"
if not os.path.isfile(fname):
# only save if appears valid
if re.search("\[|\]", rcode):
if re.search(r"\[|\]", rcode):
return "Error: clean up all []s from the text"
mbeginend = re.search("(?s)\*begin\s+(\w+).*?\*end\s+(\w+)", rcode)
mbeginend = re.search(r"(?s)\*begin\s+(\w+).*?\*end\s+(\w+)", rcode)
if not mbeginend:
return "Error: no begin/end block here"
if mbeginend.group(1) != mbeginend.group(2):
@@ -98,7 +98,7 @@ class SvxForm(forms.Form):
return "SAVED"
def Process(self):
print "....\n\n\n....Processing\n\n\n"
print("....\n\n\n....Processing\n\n\n")
cwd = os.getcwd()
os.chdir(os.path.split(settings.SURVEX_DATA + self.data['filename'])[0])
os.system(settings.CAVERN + " --log " + settings.SURVEX_DATA + self.data['filename'] + ".svx")
@@ -137,13 +137,13 @@ def svx(request, survex_file):
if not difflist:
message = "OUTPUT FROM PROCESSING"
logmessage = form.Process()
print logmessage
print(logmessage)
else:
message = "SAVE FILE FIRST"
form.data['code'] = rcode
if "save" in rform.data:
if request.user.is_authenticated():
#print "sssavvving"
#print("sssavvving")
message = form.SaveCode(rcode)
else:
message = "You do not have authority to save this file"
@@ -163,7 +163,7 @@ def svx(request, survex_file):
difflist.insert(0, message)
#print [ form.data['code'] ]
svxincludes = re.findall('\*include\s+(\S+)(?i)', form.data['code'] or "")
svxincludes = re.findall(r'\*include\s+(\S+)(?i)', form.data['code'] or "")
vmap = {'settings': settings,
'has_3d': os.path.isfile(settings.SURVEX_DATA + survex_file + ".3d"),
@@ -256,7 +256,7 @@ def identifycavedircontents(gcavedir):
# direct local non-database browsing through the svx file repositories
# perhaps should use the database and have a reload button for it
def survexcaveslist(request):
cavesdir = os.path.join(settings.SURVEX_DATA, "caves")
cavesdir = os.path.join(settings.SURVEX_DATA, "caves-1623")
#cavesdircontents = { }
onefilecaves = [ ]
@@ -264,9 +264,11 @@ def survexcaveslist(request):
subdircaves = [ ]
# first sort the file list
fnumlist = [ (-int(re.match("\d*", f).group(0) or "0"), f) for f in os.listdir(cavesdir) ]
fnumlist = [ (-int(re.match(r"\d*", f).group(0) or "0"), f) for f in os.listdir(cavesdir) ]
fnumlist.sort()
print(fnumlist)
# go through the list and identify the contents of each cave directory
for num, cavedir in fnumlist:
if cavedir in ["144", "40"]:
@@ -278,7 +280,7 @@ def survexcaveslist(request):
survdirobj = [ ]
for lsubsvx in subsvx:
survdirobj.append(("caves/"+cavedir+"/"+lsubsvx, lsubsvx))
survdirobj.append(("caves-1623/"+cavedir+"/"+lsubsvx, lsubsvx))
# caves with subdirectories
if subdirs:
@@ -288,7 +290,7 @@ def survexcaveslist(request):
assert not dsubdirs
lsurvdirobj = [ ]
for lsubsvx in dsubsvx:
lsurvdirobj.append(("caves/"+cavedir+"/"+subdir+"/"+lsubsvx, lsubsvx))
lsurvdirobj.append(("caves-1623/"+cavedir+"/"+subdir+"/"+lsubsvx, lsubsvx))
subsurvdirs.append((lsurvdirobj[0], lsurvdirobj[1:]))
subdircaves.append((cavedir, (survdirobj[0], survdirobj[1:]), subsurvdirs))
@@ -297,6 +299,8 @@ def survexcaveslist(request):
multifilecaves.append((survdirobj[0], survdirobj[1:]))
# single file caves
else:
#print("survdirobj = ")
#print(survdirobj)
onefilecaves.append(survdirobj[0])
return render_to_response('svxfilecavelist.html', {'settings': settings, "onefilecaves":onefilecaves, "multifilecaves":multifilecaves, "subdircaves":subdircaves })

View File

@@ -2,12 +2,12 @@ import os
import time
import settings
os.environ['PYTHONPATH'] = settings.PYTHON_PATH
os.environ['DJANGO_SETTINGS_MODULE'] = 'settings'
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'settings')
from django.core import management
from django.db import connection
from django.contrib.auth.models import User
from django.http import HttpResponse
from django.core.urlresolvers import reverse
from django.core.urlresolvers import reverse
from troggle.core.models import Cave, Entrance
import troggle.flatpages.models
@@ -22,13 +22,14 @@ def reload_db():
os.remove(databasename)
except OSError:
pass
else:
else:
cursor = connection.cursor()
cursor.execute("DROP DATABASE %s" % databasename)
cursor.execute("CREATE DATABASE %s" % databasename)
cursor.execute("ALTER DATABASE %s CHARACTER SET=utf8" % databasename)
cursor.execute("USE %s" % databasename)
management.call_command('syncdb', interactive=False)
management.call_command('migrate', interactive=False)
#management.call_command('syncdb', interactive=False)
user = User.objects.create_user(expouser, expouseremail, expouserpass)
user.is_staff = True
user.is_superuser = True
@@ -42,7 +43,7 @@ def make_dirs():
def import_caves():
import parsers.caves
print "importing caves"
print("importing caves")
parsers.caves.readcaves()
def import_people():
@@ -96,7 +97,7 @@ def reset():
try:
import_tunnelfiles()
except:
print "Tunnel files parser broken."
print("Tunnel files parser broken.")
import_surveys()
@@ -104,18 +105,18 @@ def reset():
def import_auto_logbooks():
import parsers.logbooks
import os
for pt in core.models.PersonTrip.objects.all():
for pt in troggle.core.models.PersonTrip.objects.all():
pt.delete()
for lbe in core.models.LogbookEntry.objects.all():
for lbe in troggle.core.models.LogbookEntry.objects.all():
lbe.delete()
for expedition in core.models.Expedition.objects.all():
for expedition in troggle.core.models.Expedition.objects.all():
directory = os.path.join(settings.EXPOWEB,
"years",
expedition.year,
"autologbook")
for root, dirs, filenames in os.walk(directory):
for filename in filenames:
print os.path.join(root, filename)
print(os.path.join(root, filename))
parsers.logbooks.parseAutoLogBookEntry(os.path.join(root, filename))
#Temporary function until definative source of data transfered.
@@ -127,10 +128,10 @@ def dumplogbooks():
return pe.nickname
else:
return pe.person.first_name
for lbe in core.models.LogbookEntry.objects.all():
for lbe in troggle.core.models.LogbookEntry.objects.all():
dateStr = lbe.date.strftime("%Y-%m-%d")
directory = os.path.join(settings.EXPOWEB,
"years",
"years",
lbe.expedition.year,
"autologbook")
if not os.path.isdir(directory):
@@ -138,7 +139,7 @@ def dumplogbooks():
filename = os.path.join(directory,
dateStr + "." + slugify(lbe.title)[:50] + ".html")
if lbe.cave:
print lbe.cave.reference()
print(lbe.cave.reference())
trip = {"title": lbe.title, "html":lbe.text, "cave": lbe.cave.reference(), "caveOrLocation": "cave"}
else:
trip = {"title": lbe.title, "html":lbe.text, "location":lbe.place, "caveOrLocation": "location"}
@@ -156,7 +157,7 @@ def dumplogbooks():
def pageredirects():
for oldURL, newURL in [("indxal.htm", reverse("caveindex"))]:
f = flatpages.models.Redirect(originalURL = oldURL, newURL = newURL)
f = troggle.flatpages.models.Redirect(originalURL = oldURL, newURL = newURL)
f.save()
def writeCaves():
@@ -166,7 +167,7 @@ def writeCaves():
entrance.writeDataFile()
def usage():
print """Usage is 'python databaseReset.py <command>'
print("""Usage is 'python databaseReset.py <command>'
where command is:
reset - this is normal usage, clear database and reread everything
desc
@@ -180,13 +181,16 @@ def usage():
scans - read in the scanned surveynotes
survex - read in the survex files
survexpos
surveys
tunnel - read in the Tunnel files
writeCaves
"""
""")
if __name__ == "__main__":
import core.models
import troggle.core.models
import sys
import django
django.setup()
if "desc" in sys.argv:
resetdesc()
elif "scans" in sys.argv:
@@ -210,12 +214,12 @@ if __name__ == "__main__":
try:
import_tunnelfiles()
except:
print "Tunnel files parser broken."
print("Tunnel files parser broken.")
import_surveys()
import_descriptions()
parse_descriptions()
elif "survex" in sys.argv:
management.call_command('syncdb', interactive=False) # this sets the path so that import settings works in import_survex
#management.call_command('syncdb', interactive=False) # this sets the path so that import settings works in import_survex
import_survex()
elif "survexpos" in sys.argv:
management.call_command('syncdb', interactive=False) # this sets the path so that import settings works in import_survex
@@ -230,10 +234,12 @@ if __name__ == "__main__":
dumplogbooks()
elif "writeCaves" in sys.argv:
writeCaves()
elif "surveys" in sys.argv:
import_surveys()
elif "help" in sys.argv:
usage()
else:
print "%s not recognised" % sys.argv
print("%s not recognised" % sys.argv)
usage()

View File

@@ -23,4 +23,6 @@ EXPOSE 8000
WORKDIR /expo/troggle
#CMD ["python","manage.py","runserver","0.0.0.0:8000"]
#CMD ["python","manage.py","migrate"]
ENTRYPOINT ["python","manage.py","runserver","0.0.0.0:8000"]

82
docker/README.md Normal file
View File

@@ -0,0 +1,82 @@
# Running troggle on Docker
## Install
First you need to install
- [docker-ce](https://docs.docker.com/install/)
- [docker-compose](https://docs.docker.com/compose/install/)
If you don't want to type sudo for every docker command (you don't) you need to add your user to the docker group.
To do this
- Create the docker group.
```bash
$ sudo groupadd docker
```
- Add your user to the docker group.
```bash
$ sudo usermod -aG docker $USER
```
## Setup
Checkout all 4 of the expo repos into one folder ([see here for repo cloning instructions](http://expo.survex.com/handbook/manual.html#quickstart)) eg.
```
$ ~/expo/loser
/troggle
/expoweb
/tunnel
```
In the troggle dir copy localsettingsdocker.py to localsettings.py
In the folder you checked out all the repos into (expo in this example) create a folder called `expofiles` and in that a folder called `surveyscans` eg
```bash
cd ~/expo
mkdir -p expofiles/surveyscans
```
## Starting the containers
To start the containers run
```bash
$ cd ~/expo/troggle/docker
$ docker-compose up
```
You will now have a working troggle but with no data. To import the data you need to access the container run
```bash
$ docker exec -it docker_troggle_1 /bin/bash
```
This will give you a shell inside the troggle container
(You may get a warning like `bash: warning: setlocale: LC_ALL: cannot change locale (en_GB.UTF-8)` this can be ignored)
To import the data into troggle now run
```bash
$ python databaseReset.py reset
```
and wait .... this takes a while.
The MySQL database is stored in a docker volume so once run through once the database will remain full of expo data even if you restart the containers.
## Using your new dev setup
Even whilst the import is running you can browse to [http://localhost:8000]
Any chnages made to files in your checkouts will be automatically loaded in the container and served. Somtimes changes to the python files will require the system to reload so refresh a couple of times before declaring you have broken something.
If you edit files from within the container they will have their ownership changed to root and may become un editable to your user (you will have to become root to fix this) so don't do this!
## Stopping the containers
To stop the running containers press Ctrl-c
## Killing it all
If you get it in some state you want to start again run
```bash
$ docker-compose down
```
to destroy the containers
and
```bash
$ docker volume ls
$ docker volume rm docker_expo-mysqldb
```
to remove the database volume

5
docker/compose/mysql.env Normal file
View File

@@ -0,0 +1,5 @@
MYSQL_ROOT_PASSWORD=expo123
MYSQL_DATABASE=troggle
MYSQL_USER=troggleuser
MYSQL_PASSWORD=expo123

21
docker/docker-compose.yml Normal file
View File

@@ -0,0 +1,21 @@
version: '3'
services:
troggle:
restart: always
build: .
ports:
- "8000:8000"
volumes:
- ../..:/expo
links:
- expo-mysql
expo-mysql:
restart: always
image: "mariadb"
env_file:
- compose/mysql.env
volumes:
- expo-mysqldb:/var/lib/mysql
volumes:
expo-mysqldb:

5
docker/mysql.env Normal file
View File

@@ -0,0 +1,5 @@
MYSQL_ROOT_PASSWORD=expo123
MYSQL_DATABASE=troggle
MYSQL_USER=troggleuser
MYSQL_PASSWORD=expo123

View File

@@ -1,7 +0,0 @@
Django==1.7.11
django-registration==2.1.2
mysql
imagekit
Image
django-tinymce==2.7.0
smartencoding

1
docker/requirements.txt Symbolic link
View File

@@ -0,0 +1 @@
requirements.txt.dj-1.7.11

View File

@@ -0,0 +1,8 @@
Django==1.7.11
django-registration==2.1.2
mysql
#imagekit
django-imagekit
Image
django-tinymce==2.7.0
smartencoding

View File

@@ -0,0 +1,7 @@
Django==1.8.19
django-registration==2.1.2
mysql
django-imagekit
Image
django-tinymce==2.7.0
smartencoding

View File

@@ -35,7 +35,7 @@ def flatpage(request, path):
if path.startswith("noinfo") and settings.PUBLIC_SITE and not request.user.is_authenticated():
print "flat path noinfo", path
print("flat path noinfo", path)
return HttpResponseRedirect(reverse("auth_login") + '?next=%s' % request.path)
if path.endswith("/") or path == "":
@@ -67,13 +67,15 @@ def flatpage(request, path):
title, = m.groups()
else:
title = ""
linksmatch = re.match('(.*)<ul id="links">', body, re.DOTALL + re.IGNORECASE)
if linksmatch:
body, = linksmatch.groups()
has_menu = False
menumatch = re.match('(.*)<div id="menu">', body, re.DOTALL + re.IGNORECASE)
if menumatch:
has_menu = True
#body, = menumatch.groups()
if re.search(r"iso-8859-1", html):
body = unicode(body, "iso-8859-1")
body.strip
return render_with_context(request, 'flatpage.html', {'editable': True, 'path': path, 'title': title, 'body': body, 'homepage': (path == "index.htm")})
return render_with_context(request, 'flatpage.html', {'editable': True, 'path': path, 'title': title, 'body': body, 'homepage': (path == "index.htm"), 'has_menu': has_menu})
else:
return HttpResponse(o.read(), content_type=getmimetype(path))
@@ -114,7 +116,7 @@ def editflatpage(request, path):
if m:
filefound = True
preheader, headerargs, head, postheader, bodyargs, body, postbody = m.groups()
linksmatch = re.match('(.*)(<ul\s+id="links">.*)', body, re.DOTALL + re.IGNORECASE)
linksmatch = re.match(r'(.*)(<ul\s+id="links">.*)', body, re.DOTALL + re.IGNORECASE)
if linksmatch:
body, links = linksmatch.groups()
if re.search(r"iso-8859-1", html):

View File

@@ -1,13 +0,0 @@
"""
Django ImageKit
Author: Justin Driscoll <justin.driscoll@gmail.com>
Version: 0.2
"""
VERSION = "0.2"

View File

@@ -1,21 +0,0 @@
""" Default ImageKit configuration """
from imagekit.specs import ImageSpec
from imagekit import processors
class ResizeThumbnail(processors.Resize):
width = 100
height = 50
crop = True
class EnhanceSmall(processors.Adjustment):
contrast = 1.2
sharpness = 1.1
class SampleReflection(processors.Reflection):
size = 0.5
background_color = "#000000"
class DjangoAdminThumbnail(ImageSpec):
access_as = 'admin_thumbnail'
processors = [ResizeThumbnail, EnhanceSmall, SampleReflection]

View File

@@ -1,17 +0,0 @@
# Required PIL classes may or may not be available from the root namespace
# depending on the installation method used.
try:
import Image
import ImageFile
import ImageFilter
import ImageEnhance
import ImageColor
except ImportError:
try:
from PIL import Image
from PIL import ImageFile
from PIL import ImageFilter
from PIL import ImageEnhance
from PIL import ImageColor
except ImportError:
raise ImportError('ImageKit was unable to import the Python Imaging Library. Please confirm it`s installed and available on your current Python path.')

View File

@@ -1 +0,0 @@

View File

@@ -1 +0,0 @@

View File

@@ -1,38 +0,0 @@
from django.db.models.loading import cache
from django.core.management.base import BaseCommand, CommandError
from optparse import make_option
from imagekit.models import ImageModel
from imagekit.specs import ImageSpec
class Command(BaseCommand):
help = ('Clears all ImageKit cached files.')
args = '[apps]'
requires_model_validation = True
can_import_settings = True
def handle(self, *args, **options):
return flush_cache(args, options)
def flush_cache(apps, options):
""" Clears the image cache
"""
apps = [a.strip(',') for a in apps]
if apps:
print 'Flushing cache for %s...' % ', '.join(apps)
else:
print 'Flushing caches...'
for app_label in apps:
app = cache.get_app(app_label)
models = [m for m in cache.get_models(app) if issubclass(m, ImageModel)]
for model in models:
for obj in model.objects.all():
for spec in model._ik.specs:
prop = getattr(obj, spec.name(), None)
if prop is not None:
prop._delete()
if spec.pre_cache:
prop._create()

View File

@@ -1,136 +0,0 @@
import os
from datetime import datetime
from django.conf import settings
from django.core.files.base import ContentFile
from django.db import models
from django.db.models.base import ModelBase
from django.utils.translation import ugettext_lazy as _
from imagekit import specs
from imagekit.lib import *
from imagekit.options import Options
from imagekit.utils import img_to_fobj
# Modify image file buffer size.
ImageFile.MAXBLOCK = getattr(settings, 'PIL_IMAGEFILE_MAXBLOCK', 256 * 2 ** 10)
# Choice tuples for specifying the crop origin.
# These are provided for convenience.
CROP_HORZ_CHOICES = (
(0, _('left')),
(1, _('center')),
(2, _('right')),
)
CROP_VERT_CHOICES = (
(0, _('top')),
(1, _('center')),
(2, _('bottom')),
)
class ImageModelBase(ModelBase):
""" ImageModel metaclass
This metaclass parses IKOptions and loads the specified specification
module.
"""
def __init__(cls, name, bases, attrs):
parents = [b for b in bases if isinstance(b, ImageModelBase)]
if not parents:
return
user_opts = getattr(cls, 'IKOptions', None)
opts = Options(user_opts)
try:
module = __import__(opts.spec_module, {}, {}, [''])
except ImportError:
raise ImportError('Unable to load imagekit config module: %s' % \
opts.spec_module)
for spec in [spec for spec in module.__dict__.values() \
if isinstance(spec, type) \
and issubclass(spec, specs.ImageSpec) \
and spec != specs.ImageSpec]:
setattr(cls, spec.name(), specs.Descriptor(spec))
opts.specs.append(spec)
setattr(cls, '_ik', opts)
class ImageModel(models.Model):
""" Abstract base class implementing all core ImageKit functionality
Subclasses of ImageModel are augmented with accessors for each defined
image specification and can override the inner IKOptions class to customize
storage locations and other options.
"""
__metaclass__ = ImageModelBase
class Meta:
abstract = True
class IKOptions:
pass
def admin_thumbnail_view(self):
if not self._imgfield:
return None
prop = getattr(self, self._ik.admin_thumbnail_spec, None)
if prop is None:
return 'An "%s" image spec has not been defined.' % \
self._ik.admin_thumbnail_spec
else:
if hasattr(self, 'get_absolute_url'):
return u'<a href="%s"><img src="%s"></a>' % \
(self.get_absolute_url(), prop.url)
else:
return u'<a href="%s"><img src="%s"></a>' % \
(self._imgfield.url, prop.url)
admin_thumbnail_view.short_description = _('Thumbnail')
admin_thumbnail_view.allow_tags = True
@property
def _imgfield(self):
return getattr(self, self._ik.image_field)
def _clear_cache(self):
for spec in self._ik.specs:
prop = getattr(self, spec.name())
prop._delete()
def _pre_cache(self):
for spec in self._ik.specs:
if spec.pre_cache:
prop = getattr(self, spec.name())
prop._create()
def save(self, clear_cache=True, *args, **kwargs):
is_new_object = self._get_pk_val is None
super(ImageModel, self).save(*args, **kwargs)
if is_new_object:
clear_cache = False
spec = self._ik.preprocessor_spec
if spec is not None:
newfile = self._imgfield.storage.open(str(self._imgfield))
img = Image.open(newfile)
img = spec.process(img, None)
format = img.format or 'JPEG'
if format != 'JPEG':
imgfile = img_to_fobj(img, format)
else:
imgfile = img_to_fobj(img, format,
quality=int(spec.quality),
optimize=True)
content = ContentFile(imgfile.read())
newfile.close()
name = str(self._imgfield)
self._imgfield.storage.delete(name)
self._imgfield.storage.save(name, content)
if clear_cache and self._imgfield != '':
self._clear_cache()
self._pre_cache()
def delete(self):
assert self._get_pk_val() is not None, "%s object can't be deleted because its %s attribute is set to None." % (self._meta.object_name, self._meta.pk.attname)
self._clear_cache()
models.Model.delete(self)

View File

@@ -1,23 +0,0 @@
# Imagekit options
from imagekit import processors
from imagekit.specs import ImageSpec
class Options(object):
""" Class handling per-model imagekit options
"""
image_field = 'image'
crop_horz_field = 'crop_horz'
crop_vert_field = 'crop_vert'
preprocessor_spec = None
cache_dir = 'cache'
save_count_as = None
cache_filename_format = "%(filename)s_%(specname)s.%(extension)s"
admin_thumbnail_spec = 'admin_thumbnail'
spec_module = 'imagekit.defaults'
def __init__(self, opts):
for key, value in opts.__dict__.iteritems():
setattr(self, key, value)
self.specs = []

View File

@@ -1,134 +0,0 @@
""" Imagekit Image "ImageProcessors"
A processor defines a set of class variables (optional) and a
class method named "process" which processes the supplied image using
the class properties as settings. The process method can be overridden as well allowing user to define their
own effects/processes entirely.
"""
from imagekit.lib import *
class ImageProcessor(object):
""" Base image processor class """
@classmethod
def process(cls, image, obj=None):
return image
class Adjustment(ImageProcessor):
color = 1.0
brightness = 1.0
contrast = 1.0
sharpness = 1.0
@classmethod
def process(cls, image, obj=None):
for name in ['Color', 'Brightness', 'Contrast', 'Sharpness']:
factor = getattr(cls, name.lower())
if factor != 1.0:
image = getattr(ImageEnhance, name)(image).enhance(factor)
return image
class Reflection(ImageProcessor):
background_color = '#FFFFFF'
size = 0.0
opacity = 0.6
@classmethod
def process(cls, image, obj=None):
# convert bgcolor string to rgb value
background_color = ImageColor.getrgb(cls.background_color)
# copy orignial image and flip the orientation
reflection = image.copy().transpose(Image.FLIP_TOP_BOTTOM)
# create a new image filled with the bgcolor the same size
background = Image.new("RGB", image.size, background_color)
# calculate our alpha mask
start = int(255 - (255 * cls.opacity)) # The start of our gradient
steps = int(255 * cls.size) # the number of intermedite values
increment = (255 - start) / float(steps)
mask = Image.new('L', (1, 255))
for y in range(255):
if y < steps:
val = int(y * increment + start)
else:
val = 255
mask.putpixel((0, y), val)
alpha_mask = mask.resize(image.size)
# merge the reflection onto our background color using the alpha mask
reflection = Image.composite(background, reflection, alpha_mask)
# crop the reflection
reflection_height = int(image.size[1] * cls.size)
reflection = reflection.crop((0, 0, image.size[0], reflection_height))
# create new image sized to hold both the original image and the reflection
composite = Image.new("RGB", (image.size[0], image.size[1]+reflection_height), background_color)
# paste the orignal image and the reflection into the composite image
composite.paste(image, (0, 0))
composite.paste(reflection, (0, image.size[1]))
# return the image complete with reflection effect
return composite
class Resize(ImageProcessor):
width = None
height = None
crop = False
upscale = False
@classmethod
def process(cls, image, obj=None):
cur_width, cur_height = image.size
if cls.crop:
crop_horz = getattr(obj, obj._ik.crop_horz_field, 1)
crop_vert = getattr(obj, obj._ik.crop_vert_field, 1)
ratio = max(float(cls.width)/cur_width, float(cls.height)/cur_height)
resize_x, resize_y = ((cur_width * ratio), (cur_height * ratio))
crop_x, crop_y = (abs(cls.width - resize_x), abs(cls.height - resize_y))
x_diff, y_diff = (int(crop_x / 2), int(crop_y / 2))
box_left, box_right = {
0: (0, cls.width),
1: (int(x_diff), int(x_diff + cls.width)),
2: (int(crop_x), int(resize_x)),
}[crop_horz]
box_upper, box_lower = {
0: (0, cls.height),
1: (int(y_diff), int(y_diff + cls.height)),
2: (int(crop_y), int(resize_y)),
}[crop_vert]
box = (box_left, box_upper, box_right, box_lower)
image = image.resize((int(resize_x), int(resize_y)), Image.ANTIALIAS).crop(box)
else:
if not cls.width is None and not cls.height is None:
ratio = min(float(cls.width)/cur_width,
float(cls.height)/cur_height)
else:
if cls.width is None:
ratio = float(cls.height)/cur_height
else:
ratio = float(cls.width)/cur_width
new_dimensions = (int(round(cur_width*ratio)),
int(round(cur_height*ratio)))
if new_dimensions[0] > cur_width or \
new_dimensions[1] > cur_height:
if not cls.upscale:
return image
image = image.resize(new_dimensions, Image.ANTIALIAS)
return image
class Transpose(ImageProcessor):
""" Rotates or flips the image
Method should be one of the following strings:
- FLIP_LEFT RIGHT
- FLIP_TOP_BOTTOM
- ROTATE_90
- ROTATE_270
- ROTATE_180
"""
method = 'FLIP_LEFT_RIGHT'
@classmethod
def process(cls, image, obj=None):
return image.transpose(getattr(Image, cls.method))

View File

@@ -1,119 +0,0 @@
""" ImageKit image specifications
All imagekit specifications must inherit from the ImageSpec class. Models
inheriting from ImageModel will be modified with a descriptor/accessor for each
spec found.
"""
import os
from StringIO import StringIO
from imagekit.lib import *
from imagekit.utils import img_to_fobj
from django.core.files.base import ContentFile
class ImageSpec(object):
pre_cache = False
quality = 70
increment_count = False
processors = []
@classmethod
def name(cls):
return getattr(cls, 'access_as', cls.__name__.lower())
@classmethod
def process(cls, image, obj):
processed_image = image.copy()
for proc in cls.processors:
processed_image = proc.process(processed_image, obj)
return processed_image
class Accessor(object):
def __init__(self, obj, spec):
self._img = None
self._obj = obj
self.spec = spec
def _get_imgfile(self):
format = self._img.format or 'JPEG'
if format != 'JPEG':
imgfile = img_to_fobj(self._img, format)
else:
imgfile = img_to_fobj(self._img, format,
quality=int(self.spec.quality),
optimize=True)
return imgfile
def _create(self):
if self._exists():
return
# process the original image file
fp = self._obj._imgfield.storage.open(self._obj._imgfield.name)
fp.seek(0)
fp = StringIO(fp.read())
try:
self._img = self.spec.process(Image.open(fp), self._obj)
# save the new image to the cache
content = ContentFile(self._get_imgfile().read())
self._obj._imgfield.storage.save(self.name, content)
except IOError:
pass
def _delete(self):
self._obj._imgfield.storage.delete(self.name)
def _exists(self):
return self._obj._imgfield.storage.exists(self.name)
def _basename(self):
filename, extension = \
os.path.splitext(os.path.basename(self._obj._imgfield.name))
return self._obj._ik.cache_filename_format % \
{'filename': filename,
'specname': self.spec.name(),
'extension': extension.lstrip('.')}
@property
def name(self):
return os.path.join(self._obj._ik.cache_dir, self._basename())
@property
def url(self):
self._create()
if self.spec.increment_count:
fieldname = self._obj._ik.save_count_as
if fieldname is not None:
current_count = getattr(self._obj, fieldname)
setattr(self._obj, fieldname, current_count + 1)
self._obj.save(clear_cache=False)
return self._obj._imgfield.storage.url(self.name)
@property
def file(self):
self._create()
return self._obj._imgfield.storage.open(self.name)
@property
def image(self):
if self._img is None:
self._create()
if self._img is None:
self._img = Image.open(self.file)
return self._img
@property
def width(self):
return self.image.size[0]
@property
def height(self):
return self.image.size[1]
class Descriptor(object):
def __init__(self, spec):
self._spec = spec
def __get__(self, obj, type=None):
return Accessor(obj, self._spec)

View File

@@ -1,86 +0,0 @@
import os
import tempfile
import unittest
from django.conf import settings
from django.core.files.base import ContentFile
from django.db import models
from django.test import TestCase
from imagekit import processors
from imagekit.models import ImageModel
from imagekit.specs import ImageSpec
from imagekit.lib import Image
class ResizeToWidth(processors.Resize):
width = 100
class ResizeToHeight(processors.Resize):
height = 100
class ResizeToFit(processors.Resize):
width = 100
height = 100
class ResizeCropped(ResizeToFit):
crop = ('center', 'center')
class TestResizeToWidth(ImageSpec):
access_as = 'to_width'
processors = [ResizeToWidth]
class TestResizeToHeight(ImageSpec):
access_as = 'to_height'
processors = [ResizeToHeight]
class TestResizeCropped(ImageSpec):
access_as = 'cropped'
processors = [ResizeCropped]
class TestPhoto(ImageModel):
""" Minimal ImageModel class for testing """
image = models.ImageField(upload_to='images')
class IKOptions:
spec_module = 'imagekit.tests'
class IKTest(TestCase):
""" Base TestCase class """
def setUp(self):
# create a test image using tempfile and PIL
self.tmp = tempfile.TemporaryFile()
Image.new('RGB', (800, 600)).save(self.tmp, 'JPEG')
self.tmp.seek(0)
self.p = TestPhoto()
self.p.image.save(os.path.basename('test.jpg'),
ContentFile(self.tmp.read()))
self.p.save()
# destroy temp file
self.tmp.close()
def test_setup(self):
self.assertEqual(self.p.image.width, 800)
self.assertEqual(self.p.image.height, 600)
def test_to_width(self):
self.assertEqual(self.p.to_width.width, 100)
self.assertEqual(self.p.to_width.height, 75)
def test_to_height(self):
self.assertEqual(self.p.to_height.width, 133)
self.assertEqual(self.p.to_height.height, 100)
def test_crop(self):
self.assertEqual(self.p.cropped.width, 100)
self.assertEqual(self.p.cropped.height, 100)
def test_url(self):
tup = (settings.MEDIA_URL, self.p._ik.cache_dir, 'test_to_width.jpg')
self.assertEqual(self.p.to_width.url, "%s%s/%s" % tup)
def tearDown(self):
# make sure image file is deleted
path = self.p.image.path
self.p.delete()
self.failIf(os.path.isfile(path))

View File

@@ -1,15 +0,0 @@
""" ImageKit utility functions """
import tempfile
def img_to_fobj(img, format, **kwargs):
tmp = tempfile.TemporaryFile()
if format != 'JPEG':
try:
img.save(tmp, format, **kwargs)
return
except KeyError:
pass
img.save(tmp, format, **kwargs)
tmp.seek(0)
return tmp

65
localsettingsdocker.py Normal file
View File

@@ -0,0 +1,65 @@
import sys
# link localsettings to this file for use on expo computer in austria
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.mysql', # 'postgresql_psycopg2', 'mysql', 'sqlite3' or 'oracle'.
'NAME' : 'troggle', # Or path to database file if using sqlite3.
'USER' : 'troggleuser', # Not used with sqlite3.
'PASSWORD' : 'expo123', # Not used with sqlite3.
'HOST' : 'expo-mysql', # Set to empty string for localhost. Not used with sqlite3.
'PORT' : '', # Set to empty string for default. Not used with sqlite3.
}
}
EXPOUSER = 'expo'
EXPOUSERPASS = 'somepasshere'
EXPOUSER_EMAIL = 'wookey@wookware.org'
REPOS_ROOT_PATH = '/expo/'
sys.path.append(REPOS_ROOT_PATH)
sys.path.append(REPOS_ROOT_PATH + 'troggle')
PUBLIC_SITE = False
SURVEX_DATA = REPOS_ROOT_PATH + 'loser/'
TUNNEL_DATA = REPOS_ROOT_PATH + 'tunneldata/'
CAVERN = 'cavern'
THREEDTOPOS = '3dtopos'
EXPOWEB = REPOS_ROOT_PATH + 'expoweb/'
SURVEYS = REPOS_ROOT_PATH
SURVEY_SCANS = REPOS_ROOT_PATH + 'expofiles/'
FILES = REPOS_ROOT_PATH + 'expofiles'
CACHEDIR = REPOS_ROOT_PATH + 'expowebcache/'
THREEDCACHEDIR = CACHEDIR + '3d/'
THUMBNAILCACHE = CACHEDIR + 'thumbs'
PYTHON_PATH = REPOS_ROOT_PATH + 'troggle/'
URL_ROOT = 'http://127.0.0.1:8000/'
DIR_ROOT = ''#this should end in / if a value is given
EXPOWEB_URL = '/'
SURVEYS_URL = '/survey_scans/'
MEDIA_URL = URL_ROOT + DIR_ROOT + 'site_media/'
MEDIA_ROOT = REPOS_ROOT_PATH + '/troggle/media/'
MEDIA_ADMIN_DIR = '/usr/lib/python2.7/site-packages/django/contrib/admin/media/'
STATIC_URL = URL_ROOT
STATIC_ROOT = DIR_ROOT
JSLIB_URL = URL_ROOT + 'javascript/'
TINY_MCE_MEDIA_ROOT = '/usr/share/tinymce/www/'
TINY_MCE_MEDIA_URL = URL_ROOT + DIR_ROOT + '/tinymce_media/'
TEMPLATE_DIRS = (
PYTHON_PATH + "templates",
# Put strings here, like "/home/html/django_templates" or "C:/www/django/templates".
# Always use forward slashes, even on Windows.
# Don't forget to use absolute paths, not relative paths.
)
LOGFILE = PYTHON_PATH + 'troggle_log.txt'

View File

@@ -24,6 +24,7 @@ FIX_PERMISSIONS = ["sudo", "/usr/local/bin/fix_permissions"]
SURVEX_DATA = REPOS_ROOT_PATH + 'loser/'
TUNNEL_DATA = REPOS_ROOT_PATH + 'tunneldata/'
THREEDCACHEDIR = REPOS_ROOT_PATH + 'expowebcache/3d/'
CAVERN = 'cavern'
THREEDTOPOS = '3dtopos'

View File

@@ -26,6 +26,7 @@ PUBLIC_SITE = True
SURVEX_DATA = REPOS_ROOT_PATH + 'loser/'
TUNNEL_DATA = REPOS_ROOT_PATH + 'tunneldata/'
THREEDCACHEDIR = REPOS_ROOT_PATH + 'expowebcache/3d/'
CAVERN = 'cavern'
THREEDTOPOS = '3dtopos'

View File

@@ -25,6 +25,7 @@ PUBLIC_SITE = False
SURVEX_DATA = REPOS_ROOT_PATH + 'loser/'
TUNNEL_DATA = REPOS_ROOT_PATH + 'tunneldata/'
THREEDCACHEDIR = REPOS_ROOT_PATH + 'expowebcache/3d/'
CAVERN = 'cavern'
THREEDTOPOS = '3dtopos'

View File

@@ -10,14 +10,15 @@ def readcaves():
newArea.save()
newArea = models.Area(short_name = "1626", parent = None)
newArea.save()
print "Reading Entrances"
print("Reading Entrances")
#print "list of <Slug> <Filename>"
for filename in os.walk(settings.ENTRANCEDESCRIPTIONS).next()[2]: #Should be a better way of getting a list of files
if filename.endswith('.html'):
readentrance(filename)
print "Reading Caves"
readentrance(filename)
print ("Reading Caves")
for filename in os.walk(settings.CAVEDESCRIPTIONS).next()[2]: #Should be a better way of getting a list of files
readcave(filename)
if filename.endswith('.html'):
readcave(filename)
def readentrance(filename):
with open(os.path.join(settings.ENTRANCEDESCRIPTIONS, filename)) as f:
@@ -153,7 +154,7 @@ def readcave(filename):
primary = primary)
cs.save()
except:
print "Can't find text (slug): %s, skipping %s" % (slug, context)
print("Can't find text (slug): %s, skipping %s" % (slug, context))
primary = False
for entrance in entrances:
@@ -164,17 +165,17 @@ def readcave(filename):
ce = models.CaveAndEntrance(cave = c, entrance_letter = letter, entrance = entrance)
ce.save()
except:
print "Entrance text (slug) %s missing %s" % (slug, context)
print ("Entrance text (slug) %s missing %s" % (slug, context))
def getXML(text, itemname, minItems = 1, maxItems = None, printwarnings = True, context = ""):
items = re.findall("<%(itemname)s>(.*?)</%(itemname)s>" % {"itemname": itemname}, text, re.S)
if len(items) < minItems and printwarnings:
print "%(count)i %(itemname)s found, at least %(min)i expected" % {"count": len(items),
print("%(count)i %(itemname)s found, at least %(min)i expected" % {"count": len(items),
"itemname": itemname,
"min": minItems} + context
"min": minItems} + context)
if maxItems is not None and len(items) > maxItems and printwarnings:
print "%(count)i %(itemname)s found, no more than %(max)i expected" % {"count": len(items),
print("%(count)i %(itemname)s found, no more than %(max)i expected" % {"count": len(items),
"itemname": itemname,
"max": maxItems} + context
"max": maxItems} + context)
return items

View File

@@ -9,7 +9,6 @@ import re
import os
def LoadSurvexLineLeg(survexblock, stardata, sline, comment):
ls = sline.lower().split()
ssfrom = survexblock.MakeSurvexStation(ls[stardata["from"]])
@@ -20,23 +19,23 @@ def LoadSurvexLineLeg(survexblock, stardata, sline, comment):
try:
survexleg.tape = float(ls[stardata["tape"]])
except ValueError:
print "Tape misread in", survexblock.survexfile.path
print "Stardata:", stardata
print "Line:", ls
print("Tape misread in", survexblock.survexfile.path)
print("Stardata:", stardata)
print("Line:", ls)
survexleg.tape = 1000
try:
lclino = ls[stardata["clino"]]
except:
print "Clino misread in", survexblock.survexfile.path
print "Stardata:", stardata
print "Line:", ls
print("Clino misread in", survexblock.survexfile.path)
print("Stardata:", stardata)
print("Line:", ls)
lclino = error
try:
lcompass = ls[stardata["compass"]]
except:
print "Compass misread in", survexblock.survexfile.path
print "Stardata:", stardata
print "Line:", ls
print("Compass misread in", survexblock.survexfile.path)
print("Stardata:", stardata)
print("Line:", ls)
lcompass = error
if lclino == "up":
survexleg.compass = 0.0
@@ -48,14 +47,14 @@ def LoadSurvexLineLeg(survexblock, stardata, sline, comment):
try:
survexleg.compass = float(lcompass)
except ValueError:
print "Compass misread in", survexblock.survexfile.path
print "Stardata:", stardata
print "Line:", ls
print("Compass misread in", survexblock.survexfile.path)
print("Stardata:", stardata)
print("Line:", ls)
survexleg.compass = 1000
survexleg.clino = -90.0
else:
assert re.match("[\d\-+.]+$", lcompass), ls
assert re.match("[\d\-+.]+$", lclino) and lclino != "-", ls
assert re.match(r"[\d\-+.]+$", lcompass), ls
assert re.match(r"[\d\-+.]+$", lclino) and lclino != "-", ls
survexleg.compass = float(lcompass)
survexleg.clino = float(lclino)
@@ -67,9 +66,10 @@ def LoadSurvexLineLeg(survexblock, stardata, sline, comment):
try:
survexblock.totalleglength += float(ls[itape])
except ValueError:
print "Length not added"
print("Length not added")
survexblock.save()
def LoadSurvexEquate(survexblock, sline):
#print sline #
stations = sline.split()
@@ -77,12 +77,13 @@ def LoadSurvexEquate(survexblock, sline):
for station in stations:
survexblock.MakeSurvexStation(station)
def LoadSurvexLinePassage(survexblock, stardata, sline, comment):
pass
stardatadefault = { "type":"normal", "t":"leg", "from":0, "to":1, "tape":2, "compass":3, "clino":4 }
stardataparamconvert = { "length":"tape", "bearing":"compass", "gradient":"clino" }
stardatadefault = {"type":"normal", "t":"leg", "from":0, "to":1, "tape":2, "compass":3, "clino":4}
stardataparamconvert = {"length":"tape", "bearing":"compass", "gradient":"clino"}
def RecursiveLoad(survexblock, survexfile, fin, textlines):
iblankbegins = 0
@@ -91,7 +92,7 @@ def RecursiveLoad(survexblock, survexfile, fin, textlines):
teammembers = [ ]
# uncomment to print out all files during parsing
# print "Reading file:", survexblock.survexfile.path
print("Reading file:", survexblock.survexfile.path)
while True:
svxline = fin.readline().decode("latin1")
if not svxline:
@@ -99,10 +100,10 @@ def RecursiveLoad(survexblock, survexfile, fin, textlines):
textlines.append(svxline)
# break the line at the comment
sline, comment = re.match("([^;]*?)\s*(?:;\s*(.*))?\n?$", svxline.strip()).groups()
sline, comment = re.match(r"([^;]*?)\s*(?:;\s*(.*))?\n?$", svxline.strip()).groups()
# detect ref line pointing to the scans directory
mref = comment and re.match('.*?ref.*?(\d+)\s*#\s*(\d+)', comment)
mref = comment and re.match(r'.*?ref.*?(\d+)\s*#\s*(\d+)', comment)
if mref:
refscan = "%s#%s" % (mref.group(1), mref.group(2))
survexscansfolders = models.SurvexScansFolder.objects.filter(walletname=refscan)
@@ -116,7 +117,7 @@ def RecursiveLoad(survexblock, survexfile, fin, textlines):
continue
# detect the star command
mstar = re.match('\s*\*[\s,]*(\w+)\s*(.*?)\s*(?:;.*)?$', sline)
mstar = re.match(r'\s*\*[\s,]*(\w+)\s*(.*?)\s*(?:;.*)?$', sline)
if not mstar:
if "from" in stardata:
LoadSurvexLineLeg(survexblock, stardata, sline, comment)
@@ -129,7 +130,7 @@ def RecursiveLoad(survexblock, survexfile, fin, textlines):
cmd, line = mstar.groups()
cmd = cmd.lower()
if re.match("include$(?i)", cmd):
includepath = os.path.join(os.path.split(survexfile.path)[0], re.sub("\.svx$", "", line))
includepath = os.path.join(os.path.split(survexfile.path)[0], re.sub(r"\.svx$", "", line))
includesurvexfile = models.SurvexFile(path=includepath, cave=survexfile.cave)
includesurvexfile.save()
includesurvexfile.SetDirectory()
@@ -157,7 +158,7 @@ def RecursiveLoad(survexblock, survexfile, fin, textlines):
elif re.match("date$(?i)", cmd):
if len(line) == 10:
survexblock.date = re.sub("\.", "-", line)
survexblock.date = re.sub(r"\.", "-", line)
expeditions = models.Expedition.objects.filter(year=line[:4])
if expeditions:
assert len(expeditions) == 1
@@ -166,9 +167,9 @@ def RecursiveLoad(survexblock, survexfile, fin, textlines):
survexblock.save()
elif re.match("team$(?i)", cmd):
mteammember = re.match("(Insts|Notes|Tape|Dog|Useless|Pics|Helper|Disto|Consultant)\s+(.*)$(?i)", line)
mteammember = re.match(r"(Insts|Notes|Tape|Dog|Useless|Pics|Helper|Disto|Consultant)\s+(.*)$(?i)", line)
if mteammember:
for tm in re.split(" and | / |, | & | \+ |^both$|^none$(?i)", mteammember.group(2)):
for tm in re.split(r" and | / |, | & | \+ |^both$|^none$(?i)", mteammember.group(2)):
if tm:
personexpedition = survexblock.expedition and GetPersonExpeditionNameLookup(survexblock.expedition).get(tm.lower())
if (personexpedition, tm) not in teammembers:
@@ -206,22 +207,25 @@ def RecursiveLoad(survexblock, survexfile, fin, textlines):
survexblock.MakeSurvexStation(line.split()[0])
else:
if not cmd in [ "sd", "include", "units", "entrance", "data", "flags", "title", "export", "instrument", "calibrate", "set", "infer", "alias", "ref" ]:
print ("Unrecognised command in line:", cmd, line, survexblock)
if cmd not in ["sd", "include", "units", "entrance", "data", "flags", "title", "export", "instrument",
"calibrate", "set", "infer", "alias", "ref", "cs", "declination", "case"]:
print("Unrecognised command in line:", cmd, line, survexblock, survexblock.survexfile.path)
def ReloadSurvexCave(survex_cave):
cave = models.Cave.objects.get(kataster_number=survex_cave)
def ReloadSurvexCave(survex_cave, area):
print(survex_cave, area)
cave = models.Cave.objects.get(kataster_number=survex_cave, area__short_name=area)
print(cave)
#cave = models.Cave.objects.get(kataster_number=survex_cave)
cave.survexblock_set.all().delete()
cave.survexfile_set.all().delete()
cave.survexdirectory_set.all().delete()
survexfile = models.SurvexFile(path="caves/" + survex_cave + "/" + survex_cave, cave=cave)
survexfile = models.SurvexFile(path="caves-" + cave.kat_area() + "/" + survex_cave + "/" + survex_cave, cave=cave)
survexfile.save()
survexfile.SetDirectory()
survexblockroot = models.SurvexBlock(name="root", survexpath="caves", begin_char=0, cave=cave, survexfile=survexfile, totalleglength=0.0)
survexblockroot = models.SurvexBlock(name="root", survexpath="caves-" + cave.kat_area(), begin_char=0, cave=cave, survexfile=survexfile, totalleglength=0.0)
survexblockroot.save()
fin = survexfile.OpenFile()
textlines = [ ]
@@ -232,7 +236,7 @@ def ReloadSurvexCave(survex_cave):
def LoadAllSurvexBlocks():
print 'Loading All Survex Blocks...'
print('Loading All Survex Blocks...')
models.SurvexBlock.objects.all().delete()
models.SurvexFile.objects.all().delete()
@@ -243,6 +247,8 @@ def LoadAllSurvexBlocks():
models.SurvexPersonRole.objects.all().delete()
models.SurvexStation.objects.all().delete()
print(" - Data flushed")
survexfile = models.SurvexFile(path="all", cave=None)
survexfile.save()
survexfile.SetDirectory()
@@ -259,22 +265,26 @@ def LoadAllSurvexBlocks():
#Load each cave,
#FIXME this should be dealt with load all above
print(" - Reloading all caves")
caves = models.Cave.objects.all()
for cave in caves:
if cave.kataster_number and os.path.isdir(os.path.join(settings.SURVEX_DATA, "caves", cave.kataster_number)):
if cave.kataster_number and os.path.isdir(os.path.join(settings.SURVEX_DATA, "caves-" + cave.kat_area(), cave.kataster_number)):
if cave.kataster_number not in ['40']:
print "loading", cave
ReloadSurvexCave(cave.kataster_number)
poslineregex = re.compile("^\(\s*([+-]?\d*\.\d*),\s*([+-]?\d*\.\d*),\s*([+-]?\d*\.\d*)\s*\)\s*([^\s]+)$")
print("loading", cave, cave.kat_area())
ReloadSurvexCave(cave.kataster_number, cave.kat_area())
poslineregex = re.compile(r"^\(\s*([+-]?\d*\.\d*),\s*([+-]?\d*\.\d*),\s*([+-]?\d*\.\d*)\s*\)\s*([^\s]+)$")
def LoadPos():
print 'Loading Pos....'
print('Loading Pos....')
call([settings.CAVERN, "--output=%s/all.3d" % settings.SURVEX_DATA, "%s/all.svx" % settings.SURVEX_DATA])
call([settings.THREEDTOPOS, '%sall.3d' % settings.SURVEX_DATA], cwd = settings.SURVEX_DATA)
posfile = open("%sall.pos" % settings.SURVEX_DATA)
posfile.readline()#Drop header
posfile.readline() #Drop header
for line in posfile.readlines():
r = poslineregex.match(line)
if r:
@@ -286,4 +296,4 @@ def LoadPos():
ss.z = float(z)
ss.save()
except:
print "%s not parsed in survex" % name
print("%s not parsed in survex" % name)

View File

@@ -39,7 +39,7 @@ def readSurveysFromCSV():
# test if the expeditions have been added yet
if Expedition.objects.count()==0:
print "There are no expeditions in the database. Please run the logbook parser."
print("There are no expeditions in the database. Please run the logbook parser.")
sys.exit()
@@ -56,7 +56,7 @@ def readSurveysFromCSV():
for survey in surveyreader:
#I hate this, but some surveys have a letter eg 2000#34a. The next line deals with that.
walletNumberLetter = re.match(r'(?P<number>\d*)(?P<letter>[a-zA-Z]*)',survey[header['Survey Number']])
# print walletNumberLetter.groups()
# print(walletNumberLetter.groups())
year=survey[header['Year']]
@@ -89,63 +89,73 @@ def listdir(*directories):
# add survey scans
def parseSurveyScans(expedition, logfile=None):
# yearFileList = listdir(expedition.year)
yearPath=os.path.join(settings.SURVEY_SCANS, "surveyscans", expedition.year)
yearFileList=os.listdir(yearPath)
print yearFileList
for surveyFolder in yearFileList:
try:
surveyNumber=re.match(r'\d\d\d\d#0*(\d+)',surveyFolder).groups()
# scanList = listdir(expedition.year, surveyFolder)
scanList=os.listdir(os.path.join(yearPath,surveyFolder))
except AttributeError:
print surveyFolder + " ignored",
continue
for scan in scanList:
try:
yearPath=os.path.join(settings.SURVEY_SCANS, "surveyscans", expedition.year)
yearFileList=os.listdir(yearPath)
print(yearFileList)
for surveyFolder in yearFileList:
try:
scanChopped=re.match(r'(?i).*(notes|elev|plan|elevation|extend)(\d*)\.(png|jpg|jpeg)',scan).groups()
scanType,scanNumber,scanFormat=scanChopped
surveyNumber=re.match(r'\d\d\d\d#(X?)0*(\d+)',surveyFolder).groups()
#scanList = listdir(expedition.year, surveyFolder)
scanList=os.listdir(os.path.join(yearPath,surveyFolder))
except AttributeError:
print scan + " ignored \r",
print(surveyFolder + " ignored\r",)
continue
if scanType == 'elev' or scanType == 'extend':
scanType = 'elevation'
if scanNumber=='':
scanNumber=1
for scan in scanList:
try:
scanChopped=re.match(r'(?i).*(notes|elev|plan|elevation|extend)(\d*)\.(png|jpg|jpeg)',scan).groups()
scanType,scanNumber,scanFormat=scanChopped
except AttributeError:
print(scan + " ignored\r",)
continue
if scanType == 'elev' or scanType == 'extend':
scanType = 'elevation'
if type(surveyNumber)==types.TupleType:
surveyNumber=surveyNumber[0]
try:
placeholder=get_or_create_placeholder(year=int(expedition.year))
survey=Survey.objects.get_or_create(wallet_number=surveyNumber, expedition=expedition, defaults={'logbook_entry':placeholder})[0]
except Survey.MultipleObjectsReturned:
survey=Survey.objects.filter(wallet_number=surveyNumber, expedition=expedition)[0]
file_=os.path.join(yearPath, surveyFolder, scan)
scanObj = ScannedImage(
file=file_,
contents=scanType,
number_in_wallet=scanNumber,
survey=survey,
new_since_parsing=False,
)
print "Added scanned image at " + str(scanObj)
#if scanFormat=="png":
#if isInterlacedPNG(os.path.join(settings.SURVEY_SCANS, "surveyscans", file_)):
# print file_+ " is an interlaced PNG. No can do."
#continue
scanObj.save()
if scanNumber=='':
scanNumber=1
if type(surveyNumber)==types.TupleType:
surveyLetter=surveyNumber[0]
surveyNumber=surveyNumber[1]
try:
placeholder=get_or_create_placeholder(year=int(expedition.year))
survey=Survey.objects.get_or_create(wallet_number=surveyNumber, wallet_letter=surveyLetter, expedition=expedition, defaults={'logbook_entry':placeholder})[0]
except Survey.MultipleObjectsReturned:
survey=Survey.objects.filter(wallet_number=surveyNumber, wallet_letter=surveyLetter, expedition=expedition)[0]
file_=os.path.join(yearPath, surveyFolder, scan)
scanObj = ScannedImage(
file=file_,
contents=scanType,
number_in_wallet=scanNumber,
survey=survey,
new_since_parsing=False,
)
print("Added scanned image at " + str(scanObj))
#if scanFormat=="png":
#if isInterlacedPNG(os.path.join(settings.SURVEY_SCANS, "surveyscans", file_)):
# print file_+ " is an interlaced PNG. No can do."
#continue
scanObj.save()
except (IOError, OSError):
yearPath=os.path.join(settings.SURVEY_SCANS, "surveyscans", expedition.year)
print("No folder found for " + expedition.year + " at:- " + yearPath)
# dead
def parseSurveys(logfile=None):
readSurveysFromCSV()
try:
readSurveysFromCSV()
except (IOError, OSError):
print("Survey CSV not found..")
pass
for expedition in Expedition.objects.filter(year__gte=2000): #expos since 2000, because paths and filenames were nonstandard before then
parseSurveyScans(expedition)
# dead
def isInterlacedPNG(filePath): #We need to check for interlaced PNGs because the thumbnail engine can't handle them (uses PIL)
file=Image.open(filePath)
print filePath
print(filePath)
if 'interlace' in file.info:
return file.info['interlace']
else:
@@ -180,8 +190,8 @@ def LoadListScansFile(survexscansfolder):
gld.append((fyf, ffyf, fisdiryf))
for (fyf, ffyf, fisdiryf) in gld:
assert not fisdiryf, ffyf
if re.search("\.(?:png|jpg|jpeg)(?i)$", fyf):
#assert not fisdiryf, ffyf
if re.search(r"\.(?:png|jpg|jpeg)(?i)$", fyf):
survexscansingle = SurvexScanSingle(ffile=ffyf, name=fyf, survexscansfolder=survexscansfolder)
survexscansingle.save()
@@ -190,7 +200,7 @@ def LoadListScansFile(survexscansfolder):
# and builds up the models we can access later
def LoadListScans():
print 'Loading Survey Scans...'
print('Loading Survey Scans...')
SurvexScanSingle.objects.all().delete()
SurvexScansFolder.objects.all().delete()
@@ -208,7 +218,7 @@ def LoadListScans():
continue
# do the year folders
if re.match("\d\d\d\d$", f):
if re.match(r"\d\d\d\d$", f):
for fy, ffy, fisdiry in GetListDir(ff):
if fisdiry:
assert fisdiry, ffy
@@ -225,7 +235,7 @@ def LoadListScans():
def FindTunnelScan(tunnelfile, path):
scansfolder, scansfile = None, None
mscansdir = re.search("(\d\d\d\d#\d+\w?|1995-96kh|92-94Surveybookkh|1991surveybook|smkhs)/(.*?(?:png|jpg))$", path)
mscansdir = re.search(r"(\d\d\d\d#X?\d+\w?|1995-96kh|92-94Surveybookkh|1991surveybook|smkhs)/(.*?(?:png|jpg))$", path)
if mscansdir:
scansfolderl = SurvexScansFolder.objects.filter(walletname=mscansdir.group(1))
if len(scansfolderl):
@@ -234,6 +244,7 @@ def FindTunnelScan(tunnelfile, path):
if scansfolder:
scansfilel = scansfolder.survexscansingle_set.filter(name=mscansdir.group(2))
if len(scansfilel):
print(scansfilel, len(scansfilel))
assert len(scansfilel) == 1
scansfile = scansfilel[0]
@@ -242,9 +253,9 @@ def FindTunnelScan(tunnelfile, path):
if scansfile:
tunnelfile.survexscans.add(scansfile)
elif path and not re.search("\.(?:png|jpg)$(?i)", path):
elif path and not re.search(r"\.(?:png|jpg|jpeg)$(?i)", path):
name = os.path.split(path)[1]
print "ttt", tunnelfile.tunnelpath, path, name
print("ttt", tunnelfile.tunnelpath, path, name)
rtunnelfilel = TunnelFile.objects.filter(tunnelname=name)
if len(rtunnelfilel):
assert len(rtunnelfilel) == 1, ("two paths with name of", path, "need more discrimination coded")

View File

@@ -92,7 +92,7 @@ INSTALLED_APPS = (
'troggle.profiles',
'troggle.core',
'troggle.flatpages',
'troggle.imagekit',
'imagekit',
)
MIDDLEWARE_CLASSES = (
@@ -130,4 +130,6 @@ TINYMCE_COMPRESSOR = True
MAX_LOGBOOK_ENTRY_TITLE_LENGTH = 200
TEST_RUNNER = 'django.test.runner.DiscoverRunner'
from localsettings import * #localsettings needs to take precedence. Call it to override any existing vars.

View File

@@ -16,7 +16,7 @@
<body onLoad="contentHeight();">
<div id="header">
<h1>CUCC Expeditions to Austria: 1976 - 2016</h1>
<h1>CUCC Expeditions to Austria: 1976 - 2018</h1>
<div id="editLinks"> {% block loginInfo %}
<a href="{{settings.EXPOWEB_URL}}">Website home</a> |
{% if user.username %}
@@ -40,9 +40,9 @@
<a href="{% url "survexcavessingle" 204 %}">204</a> |
<a href="{% url "survexcavessingle" 258 %}">258</a> |
<a href="{% url "survexcavessingle" 264 %}">264</a> |
<a href="{% url "expedition" 2014 %}">Expo2014</a> |
<a href="{% url "expedition" 2015 %}">Expo2015</a> |
<a href="{% url "expedition" 2016 %}">Expo2016</a> |
<a href="{% url "expedition" 2017 %}">Expo2017</a> |
<a href="{% url "expedition" 2018 %}">Expo2018</a> |
<a href="/admin/">Django admin</a>
</div>
@@ -90,7 +90,7 @@
<li><a id="caversLink" href="{% url "personindex" %}">cavers</a></li>
<li><a href="#">expeditions</a>
<ul class="sub_menu">
<li><a id="expeditionsLink" href="{{ Expedition.objects.latest.get_absolute_url }}">newest</a></li>
<li><a id="expeditionsLink" href="{{ expedition.objects.latest.get_absolute_url }}">newest</a></li>
<li><a id="expeditionsLink" href="{% url "expeditions" %}">list all</a></li>
</ul>
</li>

View File

@@ -1,5 +1,438 @@
{% extends "cavebase.html" %}
{% block extraheaders %}
{% if cave.survex_file %}
<style>
div.cv-panel {
position: absolute;
top: 0;
left: 0;
z-index: 100;
background-color: rgba(50,50,50,0.5);
color: yellowgreen;
border: 1px solid black;
border-radius: 5px;
}
div.cv-compass, div.cv-ahi {
position: absolute;
bottom: 95px;
right: 5px;
margin: 0;
padding-top: 2px;
/* border: 1px solid white; */
text-align: center;
width: 78px;
height: 19px;
z-index: 50;
background-color: rgba(50,50,50,0.5);
background-color: black;
color: white;
}
div.cv-ahi {
right: 95px;
}
div.scale-legend {
position: absolute;
color: white;
background-color: black;
bottom: 30px;
}
div.linear-scale {
position: absolute;
color: white;
background-color: black;
right: 30px;
width: 40px;
padding: 2px 0;
text-align: right;
border: 1px solid black;
font-size: 14px;
}
div.linear-scale-caption {
position: absolute;
color: white;
background-color: black;
right: 5px;
width: 65px;
padding: 2px 0 5px 0;
text-align: left;
border: 1px solid black;
font-size: 14px;
}
#min-div {
border-bottom: 1px solid white;
}
#max-div {
border-top: 1px solid white;
}
#angle-legend {
position: absolute;
width: 80px;
right: 5px;
bottom: 180px;
color: white;
background-color: black;
font-size: 14px;
text-align: center;
}
#scene {
width: 100%;
height: 700px;
position: relative;
}
#progress-bar {
position: absolute;
top: 55%;
height: 20px;
border: 1px solid white;
z-index: 100;
}
#status-text {
position: absolute;
top: 50%;
height: 20px;
padding-left: 4px;
background-color: black;
color: white;
z-index: 100;
}
#frame div.page ul {
list-style-type: none;
margin: 8px 0 0 0;
padding: 0;
width: 200px;
height: 100%;
cursor: default;
font-size: 12px;
overflow-y: auto;
overflow-x: hidden;
}
#frame div.page li {
position: relative;
margin-left: 16px;
border-bottom: 1px solid #444444;
}
#frame div.page li.selected {
color: #1ab4e5;
}
#frame div.page li:hover {
color: yellow;
}
#frame div.page div#ui-path {
font-size: 12px;
border-top: 1px solid grey;
border-bottom: 1px solid grey;
margin-top: 8px;
padding: 2px 0 2px 12px;
}
#frame div.page div#ui-path span {
color: #1ab4e5;
}
#frame div.page div.slide {
position: absolute;
top: 64px;
left: 0px;
height: auto;
margin-top:0;
bottom: 44px;
background-color: #222222;
transition: transform 0.25s ease-in;
}
#frame div.slide-out {
border-right: 1px grey solid;
transform: translateX(-100%);
}
#frame div.page div.descend-tree {
position: absolute;
top: 0px;
right: 0px;
margin: 0;
color: #1ab4e5;
z-index: 110;
}
#frame {
position: absolute;
top: 0px;
left: 0px;
width: 240px;
height: 100%;
background-color: transparent;
transform: translateX(-200px);
transition: transform 0.25s ease-in;
}
#frame.onscreen {
transform: none;
transition: transform 0.25s ease-out;
}
#frame a.download {
border: 1px solid green;
display: block;
width: 180px;
box-sizing: border-box;
margin-top: 6px;
margin-bottom: 4px;
margin-left: 8px;
border: none;
border-bottom: 4px solid #1ab4e5;
color: #dddddd;
background-color: black;
padding-bottom: 4px;
box-shadow: 1px 1px 8px 0px #888888;
outline: nonlass="cavedisplay"e;
text-decoration: none;
text-align: center;
}
#frame a.download:hover {
color: white;
}
#frame a.download:active {
color: #dddddd;
border-bottom: 4px solid #0c536a;
box-shadow: none;
box-shadow: inset 1px 1px 8px 0px #888888;
}
#frame .tab {
position: absolute;
right: 0px;lass="cavedisplay"
width: 40px;
height: 40px;
box-sizing: border-box;
background-color: #444444;
border-left: 1px solid black;
background-position: center;
border-top: 1px solid black;
}
#frame #close {
position: absolute;
right: 40px;
bottom: 0px;
width: 40px;
height: 40px;
box-sizing: border-box;
z-index: 150;
background-image: url(../images/ic_remove.png);
background-position: center;
}
#icon_settings {
background-image: url(../images/ic_settings.png);
}
#icon_terrain {
background-image: url(../images/ic_terrain.png);
}
#icon_explore {
background-image: url(../images/ic_explore.png);
}
#icon_info {
background-image: url(../images/ic_info.png);
}
#icon_route {
background-image: url(../images/ic_route.png);
}
#icon_help {
background-image: url(../images/ic_help.png);
}
#frame div.toptab {
background-color: #222222;
border-left: none;
border-right: 1px solid grey;
border-top: 1px solid grey;
}
#frame div.page {
position: absolute;
top: 0px;
bottom: 40px;
left: 0px;
width: 200px;
height: 100%;
color: white;
background-color: #222222;
padding: 0 4px;
box-sizing: border-box;
cursor: default;
padding-bottom: 40px;
}
#frame div.page div.header {
margin: 16px 0px 8px 0px;
font-weight: bold;
height: 16px;
box-sizing: border-box;
padding-left: 2px;
}
#frame div.page div.control {
margin: 2px 0 2px 0;
padding-top: 2px;
}
#frame div.page label {
display: block;
border-top: 1px solid grey;
padding: 2px 0 2px 8px;
font-size: 12px;
}
#frame div.page select {
display: block;
width: 180px;
box-sizing: border-box;
padding-top: 2px;
margin: 2px 0 4px 8px;
}
#frame div.page select:empty {
background-color: #888888;
}
#frame div.page button {
display: block;
width: 180px;
box-sizing: border-box;
margin-top: 4px;
margin-bottom: 4px;
margin-left: 8px;
border: none;
border-bottom: 4px solid #1ab4e5;
color: #dddddd;
background-color: black;
padding-bottom: 4px;
box-shadow: 1px 1px 8px 0px #888888;
outline: none;
}
#frame div.page button:hover {
color: white;
}
#frame div.page button:active {
color: #dddddd;
border-bottom: 4px solid #0c536a;
box-shadow: none;
box-shadow: inset 1px 1px 8px 0px #888888;
}
#frame div.page input[type="text"] {
display: block;
width: 180px;
box-sizing: border-box;
margin-top: 2px;
margin-left: 8px;
}
#frame div.page input[type="checkbox"] {
position: absolute;
right: 0px;
}
#frame div.page input[type="range"] {
display: block;
width: 180px;
margin-left: 8px;
}
#frame dt, #frame dd {
font-size: 12px;
}
#frame dt {
clear: both;
float: left;
padding-left: 16px;
}
#frame dd {
margin-left: 40px;
}
#frame p {
font-size: 12px;
line-height: 18px;
}
div.station-info {
position: absolute;
border: 1px solid white;
background-color: #222222;
color: white;
padding: 4px;
z-index: 200;
}
.overlay-branding {
color: white;
margin: 4px;
position: absolute;
right: 0;
top: 0;
}
div#scene {
width: 100%;
height: 90%; }
</style>
<script type="text/javascript" src="/CaveView/js/CaveView.js" ></script>
<script type="text/javascript" src="/CaveView/lib/proj4.js" ></script>
<script type="text/javascript" >
function onLoad () {
// display the user interface - and a blank canvas
// the configuration object specifies the location of CaveView, surveys and terrain files
CV.UI.init( 'scene', {
home: '/javascript/CaveView/',
surveyDirectory: '/cave/3d/',
terrainDirectory: '/loser/surface/terrain/'
} );
// load a single survey to display
CV.UI.loadCave( '{% if cave.kataster_number %}{{ cave.kataster_number }}{% else %}{{ cave.unofficial_number }}{% endif %}.3d' );
}
window.onload = onLoad;
</script>
{% endif %}
{% endblock %}
{% load wiki_markup %}
{% block content %}
{% block contentheader %}
@@ -131,7 +564,8 @@
{% endif %}
{% if cave.survex_file %}
<h2>Survex File</h2>
{{ cave.survex_file|safe }}
{{ cave.survex_file|safe }} <a href="{% if cave.kataster_number %}{% url "cave3d" cave.kataster_number %}{% else %}{% url "cave3d" cave.unofficial_number %}{% endif %}">3d file</a>
<div id='scene'></div>
{% endif %}
{% if cave.notes %}
<h2>Notes</h2>

View File

@@ -7,6 +7,7 @@
<h1>Cave Index</h1>
<h3>Notable caves</h3>
<ul>
{% for cave in notablecaves %}
@@ -16,13 +17,13 @@
<h3>1623</h3>
<ul class="searchable">
<table class="searchable">
{% for cave in caves1623 %}
<li> <a href="{{ cave.url }}">{% if cave.kataster_number %}{{ cave.kataster_number }}{% else %}{{cave.unofficial_number }}{%endif %} {{cave.official_name|safe}}</a> </li>
<tr><td> <a href="{{ cave.url }}">{% if cave.kataster_number %}{{ cave.kataster_number }}{% else %}{{cave.unofficial_number }}{%endif %} {{cave.official_name|safe}}</a> </td></tr>
{% endfor %}
</ul>
</table>
<h3>1626</h3>

View File

@@ -0,0 +1,32 @@
{% extends "cavebase.html" %}
{% load wiki_markup %}
{% block title %}Cave Index{% endblock %}
{% block content %}
<h1>Cave Index</h1>
<h3>1623</h3>
<table class="searchable">
{% for cave in caves1623 %}
<tr><td> <a href="{{ cave.url }}">{% if cave.kataster_number %}{{ cave.kataster_number }}{% else %}{{cave.unofficial_number }}{%endif %} {{cave.official_name|safe}}</a> </td></tr>
{% endfor %}
</table>
<h3>1626</h3>
<ul class="searchable">
{% for cave in caves1626 %}
<li> <a href="{{ cave.url }}">{% if cave.kataster_number %}{{ cave.kataster_number }}{% else %}{{cave.unofficial_number }}{%endif %} {{cave.official_name|safe}}</a> </li>
{% endfor %}
</ul>
<a href="{% url "newcave" %}">New Cave</a>
{% endblock %}

View File

@@ -0,0 +1,14 @@
{% extends "base.html" %}
{% load wiki_markup %}
{% load link %}
{% block content %}
<h1>Expeditions</h1>
<ul>
{% for expedition in object_list %}
<li>{{ expedition.year }} - <a href="{{ expedition.get_absolute_url }}">{{ expedition.name }}</a></li>
{% empty %}
<li>No articles yet.</li>
{% endfor %}
</ul>
{% endblock %}

View File

@@ -3,5 +3,13 @@
{% block bodyattrs %}{% if homepage %} id="homepage"{% endif %}{% endblock %}
{% block body %}
{{ body|safe }}
{% if homepage %}{% if editable %}<a href="{% url "editflatpage" path %}">Edit</a>{% endif %}{%else %}{% include "menu.html" %}{% endif %}
{% if homepage %}
{% if editable %}
<a href="{% url "editflatpage" path %}">Edit</a>
{% endif %}
{% else %}
{% if not has_menu %}
{% include "menu.html" %}
{% endif %}
{% endif %}
{% endblock %}

View File

@@ -42,7 +42,7 @@ This is Troggle, the information portal for Cambridge University Caving Club's E
</p>
<p class="indent">
Here you will find information about the {{expedition.objects.count}} expeditions the club has undertaken since 1976. Browse survey information, photos, and description wikis for {{cave.objects.count}} caves, {{subcave.objects.count}} areas within those caves, and {{extantqms.count}} going leads yet to be explored. We have {{photo.objects.count}} photos and {{logbookentry.objects.count}} logbook entries.
Here you will find information about the {{expedition.objects.count}} expeditions the club has undertaken since 1976. Browse survey information, photos, and description wikis for {{Cave.objects.count}} caves, {{subcave.objects.count}} areas within those caves, and {{extantqms.count}} going leads yet to be explored. We have {{Photo.objects.count}} photos and {{Logbookentry.objects.count}} logbook entries.
</p>
<p class="indent">

16
urls.py
View File

@@ -15,12 +15,21 @@ admin.autodiscover()
# type url probably means it's used.
# HOW DOES THIS WORK:
# url( <regular expression that matches the thing in the web browser>,
# <reference to python function in 'core' folder>,
# <name optional argument for URL reversing (doesn't do much)>)
actualurlpatterns = patterns('',
url(r'^testingurl/?$' , views_caves.millenialcaves, name="testing"),
url(r'^millenialcaves/?$', views_caves.millenialcaves, name="millenialcaves"),
url(r'^troggle$', views_other.frontpage, name="frontpage"),
url(r'^todo/$', views_other.todo, name="todo"),
url(r'^caves/?$', views_caves.caveindex, name="caveindex"),
url(r'^caves/?$', views_caves.caveindex, name="caveindex"),
url(r'^people/?$', views_logbooks.personindex, name="personindex"),
url(r'^newqmnumber/?$', views_other.ajax_QM_number, ),
@@ -30,7 +39,7 @@ actualurlpatterns = patterns('',
#url(r'^person/(\w+_\w+)$', views_logbooks.person, name="person"),
url(r'^expedition/(\d+)$', views_logbooks.expedition, name="expedition"),
url(r'^expeditions/?$', ListView, {'queryset':Expedition.objects.all(),'template_name':'object_list.html'},name="expeditions"),
url(r'^expeditions/?$', views_logbooks.ExpeditionListView.as_view(), name="expeditions"),
url(r'^personexpedition/(?P<first_name>[A-Z]*[a-z]*)[^a-zA-Z]*(?P<last_name>[A-Z]*[a-z]*)/(?P<year>\d+)/?$', views_logbooks.personexpedition, name="personexpedition"),
url(r'^logbookentry/(?P<date>.*)/(?P<slug>.*)/?$', views_logbooks.logbookentry,name="logbookentry"),
url(r'^newlogbookentry/(?P<expeditionyear>.*)$', views_logbooks.newLogbookEntry, name="newLogBookEntry"),
@@ -56,6 +65,7 @@ actualurlpatterns = patterns('',
#url(r'^cavedescription/(?P<cavedescription_name>[^/]+)/?$', views_caves.cave_description, name="cavedescription"),
#url(r'^cavedescription/?$', object_list, {'queryset':CaveDescription.objects.all(),'template_name':'object_list.html'}, name="cavedescriptions"),
#url(r'^cavehref/(.+)$', views_caves.cave, name="cave"),url(r'cave'),
url(r'^cave/3d/(?P<cave_id>[^/]+).3d$', views_caves.cave3d, name="cave3d"),
# url(r'^jgtfile/(.*)$', view_surveys.jgtfile, name="jgtfile"),
# url(r'^jgtuploadfile$', view_surveys.jgtuploadfile, name="jgtuploadfile"),
@@ -128,7 +138,7 @@ actualurlpatterns = patterns('',
#(r'^survey_scans/(?P<path>.*)$', 'django.views.static.serve', {'document_root': settings.SURVEY_SCANS, 'show_indexes':True}),
url(r'^survey_scans/$', view_surveys.surveyscansfolders, name="surveyscansfolders"),
url(r'^survey_scans/(?P<path>[^/]+)/$', view_surveys.surveyscansfolder, name="surveyscansfolder"),
url(r'^survey_scans/(?P<path>[^/]+)/(?P<file>[^/]+(?:png|jpg))$',
url(r'^survey_scans/(?P<path>[^/]+)/(?P<file>[^/]+(?:png|jpg|jpeg))$',
view_surveys.surveyscansingle, name="surveyscansingle"),
url(r'^tunneldata/$', view_surveys.tunneldata, name="tunneldata"),