forked from expo/troggle
Compare commits
99 Commits
django-upg
...
RW_rebuild
| Author | SHA1 | Date | |
|---|---|---|---|
| 9a7a1728a4 | |||
| 240c7eff10 | |||
| 6b59e3a689 | |||
| ce268ec306 | |||
|
|
7e1aa80551 | ||
| 0afb21a093 | |||
| a4c0b1129c | |||
| 0a170c8ed5 | |||
| 29de363cdc | |||
| e9922fb97d | |||
| c5025ad51d | |||
| 2b118a53a9 | |||
| 611ab346d0 | |||
| 46ab084f1d | |||
| d7c7466f71 | |||
| aa3061adaf | |||
| ffaf9371b6 | |||
| d269e92380 | |||
| e082d1e122 | |||
| f4da4021f1 | |||
| 4901d82a7d | |||
| 31f390d95e | |||
| 6f92fe7b7c | |||
| e3d652939d | |||
| 60d8139a05 | |||
| f03b6b4319 | |||
| 9d3f37a2ff | |||
| 74f88afb57 | |||
| 3466a46db5 | |||
| 49afebaf97 | |||
| a4f6ad1d9f | |||
| caa7b2c8b2 | |||
| 533446098f | |||
| 04a7e770c5 | |||
| ec548db8a9 | |||
| d6de8a3c34 | |||
| 0da8fa0d96 | |||
| d714325eb2 | |||
| 2a23c72ee1 | |||
| fea9d1095b | |||
| a54a70749a | |||
| 52f5423743 | |||
| 55f8538413 | |||
| e8ce3e7140 | |||
| 44e6fcac33 | |||
| 46830e903b | |||
| 656460e0ab | |||
| 6c94027a26 | |||
| 64954fa3e4 | |||
| 8c145d88ce | |||
| e55b533504 | |||
| 74779788e0 | |||
| f20bd3842a | |||
| 1370317813 | |||
| af210768af | |||
| df3a8744d6 | |||
| 503a9cddc5 | |||
| a61ad6e7b8 | |||
| 83e489c425 | |||
| 0d2ac756e5 | |||
| da55e1519e | |||
| b6ad46a37f | |||
| 9bc3abbc79 | |||
| ccc347eddc | |||
| a013f5bef2 | |||
| 75acd74d5b | |||
| 0c63156428 | |||
| 8173c3c45d | |||
| f23700b1b7 | |||
| 41e11c6c2e | |||
|
|
0eb5e560d2 | ||
|
|
a61e66bb47 | ||
|
|
f9dc4500d9 | ||
| d3f633e41d | |||
| 61bd6e81f1 | |||
| edddfb7fc6 | |||
| 71d1719850 | |||
| 7c2d336bd7 | |||
|
|
bebbad2448 | ||
|
|
b43bd58f22 | ||
|
|
e59f8308ce | ||
|
|
f6d4ce8d0b | ||
|
|
af22385c68 | ||
|
|
8fd23008e3 | ||
|
|
8f66837f6f | ||
|
|
670559ec87 | ||
|
|
7f92a7280d | ||
|
|
019f8c0550 | ||
|
|
952af7adc5 | ||
|
|
e3e75a40bf | ||
|
|
b4d3cb514c | ||
|
|
01f17dc1cc | ||
|
|
c3300f7c96 | ||
|
|
94c232c775 | ||
|
|
4f665070d7 | ||
|
|
bfc867826d | ||
|
|
af13e84c74 | ||
|
|
bcaa4b27d2 | ||
|
|
d0e0eee15a |
@@ -7,3 +7,10 @@ localsettings.py
|
||||
*~
|
||||
parsing_log.txt
|
||||
troggle
|
||||
troggle_log.txt
|
||||
.idea/*
|
||||
*.orig
|
||||
media/images/*
|
||||
.vscode/*
|
||||
.swp
|
||||
imagekit-off/
|
||||
|
||||
@@ -18,41 +18,50 @@ class TroggleModelAdmin(admin.ModelAdmin):
|
||||
class Media:
|
||||
js = ('jquery/jquery.min.js','js/QM_helper.js')
|
||||
|
||||
|
||||
class RoleInline(admin.TabularInline):
|
||||
model = SurvexPersonRole
|
||||
extra = 4
|
||||
|
||||
|
||||
class SurvexBlockAdmin(TroggleModelAdmin):
|
||||
inlines = (RoleInline,)
|
||||
|
||||
|
||||
class ScannedImageInline(admin.TabularInline):
|
||||
model = ScannedImage
|
||||
extra = 4
|
||||
|
||||
|
||||
class OtherCaveInline(admin.TabularInline):
|
||||
model = OtherCaveName
|
||||
extra = 1
|
||||
|
||||
|
||||
class SurveyAdmin(TroggleModelAdmin):
|
||||
inlines = (ScannedImageInline,)
|
||||
search_fields = ('expedition__year','wallet_number')
|
||||
|
||||
|
||||
class QMsFoundInline(admin.TabularInline):
|
||||
model=QM
|
||||
fk_name='found_by'
|
||||
fields=('number','grade','location_description','comment')#need to add foreignkey to cave part
|
||||
extra=1
|
||||
|
||||
|
||||
class PhotoInline(admin.TabularInline):
|
||||
model = DPhoto
|
||||
exclude = ['is_mugshot' ]
|
||||
extra = 1
|
||||
|
||||
|
||||
class PersonTripInline(admin.TabularInline):
|
||||
model = PersonTrip
|
||||
raw_id_fields = ('personexpedition',)
|
||||
extra = 1
|
||||
|
||||
|
||||
#class LogbookEntryAdmin(VersionAdmin):
|
||||
class LogbookEntryAdmin(TroggleModelAdmin):
|
||||
prepopulated_fields = {'slug':("title",)}
|
||||
@@ -72,17 +81,18 @@ class LogbookEntryAdmin(TroggleModelAdmin):
|
||||
def export_logbook_entries_as_txt(modeladmin, request, queryset):
|
||||
response=downloadLogbook(request=request, queryset=queryset, extension='txt')
|
||||
return response
|
||||
|
||||
|
||||
|
||||
|
||||
class PersonExpeditionInline(admin.TabularInline):
|
||||
model = PersonExpedition
|
||||
extra = 1
|
||||
|
||||
|
||||
class PersonAdmin(TroggleModelAdmin):
|
||||
search_fields = ('first_name','last_name')
|
||||
inlines = (PersonExpeditionInline,)
|
||||
|
||||
|
||||
class QMAdmin(TroggleModelAdmin):
|
||||
search_fields = ('found_by__cave__kataster_number','number','found_by__date')
|
||||
list_display = ('__unicode__','grade','found_by','ticked_off_by')
|
||||
@@ -91,17 +101,21 @@ class QMAdmin(TroggleModelAdmin):
|
||||
list_per_page = 20
|
||||
raw_id_fields=('found_by','ticked_off_by')
|
||||
|
||||
|
||||
class PersonExpeditionAdmin(TroggleModelAdmin):
|
||||
search_fields = ('person__first_name','expedition__year')
|
||||
|
||||
|
||||
class CaveAdmin(TroggleModelAdmin):
|
||||
search_fields = ('official_name','kataster_number','unofficial_number')
|
||||
inlines = (OtherCaveInline,)
|
||||
extra = 4
|
||||
|
||||
|
||||
class EntranceAdmin(TroggleModelAdmin):
|
||||
search_fields = ('caveandentrance__cave__kataster_number',)
|
||||
|
||||
|
||||
admin.site.register(DPhoto)
|
||||
admin.site.register(Cave, CaveAdmin)
|
||||
admin.site.register(Area)
|
||||
@@ -125,17 +139,20 @@ admin.site.register(SurvexStation)
|
||||
admin.site.register(SurvexScansFolder)
|
||||
admin.site.register(SurvexScanSingle)
|
||||
|
||||
|
||||
def export_as_json(modeladmin, request, queryset):
|
||||
response = HttpResponse(mimetype="text/json")
|
||||
response['Content-Disposition'] = 'attachment; filename=troggle_output.json'
|
||||
serializers.serialize("json", queryset, stream=response)
|
||||
return response
|
||||
|
||||
|
||||
def export_as_xml(modeladmin, request, queryset):
|
||||
response = HttpResponse(mimetype="text/xml")
|
||||
response['Content-Disposition'] = 'attachment; filename=troggle_output.xml'
|
||||
serializers.serialize("xml", queryset, stream=response)
|
||||
return response
|
||||
|
||||
|
||||
#admin.site.add_action(export_as_xml)
|
||||
#admin.site.add_action(export_as_json)
|
||||
|
||||
@@ -26,7 +26,7 @@ def listdir(*path):
|
||||
else:
|
||||
c = ""
|
||||
c = c.replace("#", "%23")
|
||||
print "FILE: ", settings.FILES + "listdir/" + c
|
||||
print("FILE: ", settings.FILES + "listdir/" + c)
|
||||
return urllib.urlopen(settings.FILES + "listdir/" + c).read()
|
||||
|
||||
def dirsAsList(*path):
|
||||
|
||||
@@ -148,32 +148,32 @@ def get_name(pe):
|
||||
else:
|
||||
return pe.person.first_name
|
||||
|
||||
class UploadFileForm(forms.Form):
|
||||
title = forms.CharField(max_length=50)
|
||||
file = forms.FileField()
|
||||
html = forms.CharField(widget=TinyMCE(attrs={'cols': 80, 'rows': 30}))
|
||||
lon_utm = forms.FloatField(required=False)
|
||||
lat_utm = forms.FloatField(required=False)
|
||||
slug = forms.CharField(max_length=50)
|
||||
date = forms.DateField(required=False)
|
||||
#class UploadFileForm(forms.Form):
|
||||
# title = forms.CharField(max_length=50)
|
||||
# file = forms.FileField()
|
||||
# html = forms.CharField(widget=TinyMCE(attrs={'cols': 80, 'rows': 30}))
|
||||
# lon_utm = forms.FloatField(required=False)
|
||||
# lat_utm = forms.FloatField(required=False)
|
||||
# slug = forms.CharField(max_length=50)
|
||||
# date = forms.DateField(required=False)
|
||||
|
||||
caves = [cave.slug for cave in Cave.objects.all()]
|
||||
caves.sort()
|
||||
caves = ["-----"] + caves
|
||||
cave = forms.ChoiceField([(c, c) for c in caves], required=False)
|
||||
# caves = [cave.slug for cave in Cave.objects.all()]
|
||||
# caves.sort()
|
||||
# caves = ["-----"] + caves
|
||||
# cave = forms.ChoiceField([(c, c) for c in caves], required=False)
|
||||
|
||||
entrance = forms.ChoiceField([("-----", "Please select a cave"), ], required=False)
|
||||
qm = forms.ChoiceField([("-----", "Please select a cave"), ], required=False)
|
||||
# entrance = forms.ChoiceField([("-----", "Please select a cave"), ], required=False)
|
||||
# qm = forms.ChoiceField([("-----", "Please select a cave"), ], required=False)
|
||||
|
||||
expeditions = [e.year for e in Expedition.objects.all()]
|
||||
expeditions.sort()
|
||||
expeditions = ["-----"] + expeditions
|
||||
expedition = forms.ChoiceField([(e, e) for e in expeditions], required=False)
|
||||
# expeditions = [e.year for e in Expedition.objects.all()]
|
||||
# expeditions.sort()
|
||||
# expeditions = ["-----"] + expeditions
|
||||
# expedition = forms.ChoiceField([(e, e) for e in expeditions], required=False)
|
||||
|
||||
logbookentry = forms.ChoiceField([("-----", "Please select an expedition"), ], required=False)
|
||||
# logbookentry = forms.ChoiceField([("-----", "Please select an expedition"), ], required=False)
|
||||
|
||||
person = forms.ChoiceField([("-----", "Please select an expedition"), ], required=False)
|
||||
# person = forms.ChoiceField([("-----", "Please select an expedition"), ], required=False)
|
||||
|
||||
survey_point = forms.CharField()
|
||||
# survey_point = forms.CharField()
|
||||
|
||||
|
||||
|
||||
0
core/management/__init__.py
Normal file
0
core/management/__init__.py
Normal file
0
core/management/commands/__init__.py
Normal file
0
core/management/commands/__init__.py
Normal file
182
core/management/commands/reset_db.py
Normal file
182
core/management/commands/reset_db.py
Normal file
@@ -0,0 +1,182 @@
|
||||
from django.core.management.base import BaseCommand, CommandError
|
||||
from optparse import make_option
|
||||
from troggle.core.models import Cave
|
||||
import settings
|
||||
|
||||
databasename=settings.DATABASES['default']['NAME']
|
||||
expouser=settings.EXPOUSER
|
||||
expouserpass=settings.EXPOUSERPASS
|
||||
expouseremail=settings.EXPOUSER_EMAIL
|
||||
|
||||
class Command(BaseCommand):
|
||||
help = 'This is normal usage, clear database and reread everything'
|
||||
|
||||
option_list = BaseCommand.option_list + (
|
||||
make_option('--foo',
|
||||
action='store_true',
|
||||
dest='foo',
|
||||
default=False,
|
||||
help='test'),
|
||||
)
|
||||
|
||||
def add_arguments(self, parser):
|
||||
|
||||
parser.add_argument(
|
||||
'--foo',
|
||||
action='store_true',
|
||||
dest='foo',
|
||||
help='Help text',
|
||||
)
|
||||
|
||||
def handle(self, *args, **options):
|
||||
print(args)
|
||||
print(options)
|
||||
if "desc" in args:
|
||||
self.resetdesc()
|
||||
elif "scans" in args:
|
||||
self.import_surveyscans()
|
||||
elif "caves" in args:
|
||||
self.reload_db()
|
||||
self.make_dirs()
|
||||
self.pageredirects()
|
||||
self.import_caves()
|
||||
elif "people" in args:
|
||||
self.import_people()
|
||||
elif "QMs" in args:
|
||||
self.import_QMs()
|
||||
elif "tunnel" in args:
|
||||
self.import_tunnelfiles()
|
||||
elif "reset" in args:
|
||||
self.reset()
|
||||
elif "survex" in args:
|
||||
self.import_survex()
|
||||
elif "survexpos" in args:
|
||||
import parsers.survex
|
||||
parsers.survex.LoadPos()
|
||||
elif "logbooks" in args:
|
||||
self.import_logbooks()
|
||||
elif "autologbooks" in args:
|
||||
self.import_auto_logbooks()
|
||||
elif "dumplogbooks" in args:
|
||||
self.dumplogbooks()
|
||||
elif "writeCaves" in args:
|
||||
self.writeCaves()
|
||||
elif "foo" in args:
|
||||
self.stdout.write('Tesing....')
|
||||
else:
|
||||
self.stdout.write("%s not recognised" % args)
|
||||
self.usage(options)
|
||||
|
||||
def reload_db():
|
||||
if settings.DATABASES['default']['ENGINE'] == 'django.db.backends.sqlite3':
|
||||
try:
|
||||
os.remove(databasename)
|
||||
except OSError:
|
||||
pass
|
||||
else:
|
||||
cursor = connection.cursor()
|
||||
cursor.execute("DROP DATABASE %s" % databasename)
|
||||
cursor.execute("CREATE DATABASE %s" % databasename)
|
||||
cursor.execute("ALTER DATABASE %s CHARACTER SET=utf8" % databasename)
|
||||
cursor.execute("USE %s" % databasename)
|
||||
management.call_command('migrate', interactive=False)
|
||||
# management.call_command('syncdb', interactive=False)
|
||||
user = User.objects.create_user(expouser, expouseremail, expouserpass)
|
||||
user.is_staff = True
|
||||
user.is_superuser = True
|
||||
user.save()
|
||||
|
||||
def make_dirs():
|
||||
"""Make directories that troggle requires"""
|
||||
# should also deal with permissions here.
|
||||
if not os.path.isdir(settings.PHOTOS_ROOT):
|
||||
os.mkdir(settings.PHOTOS_ROOT)
|
||||
|
||||
def import_caves():
|
||||
import parsers.caves
|
||||
print("importing caves")
|
||||
parsers.caves.readcaves()
|
||||
|
||||
def import_people():
|
||||
import parsers.people
|
||||
parsers.people.LoadPersonsExpos()
|
||||
|
||||
def import_logbooks():
|
||||
# The below line was causing errors I didn't understand (it said LOGFILE was a string), and I couldn't be bothered to figure
|
||||
# what was going on so I just catch the error with a try. - AC 21 May
|
||||
try:
|
||||
settings.LOGFILE.write('\nBegun importing logbooks at ' + time.asctime() + '\n' + '-' * 60)
|
||||
except:
|
||||
pass
|
||||
|
||||
import parsers.logbooks
|
||||
parsers.logbooks.LoadLogbooks()
|
||||
|
||||
def import_survex():
|
||||
import parsers.survex
|
||||
parsers.survex.LoadAllSurvexBlocks()
|
||||
parsers.survex.LoadPos()
|
||||
|
||||
def import_QMs():
|
||||
import parsers.QMs
|
||||
|
||||
def import_surveys():
|
||||
import parsers.surveys
|
||||
parsers.surveys.parseSurveys(logfile=settings.LOGFILE)
|
||||
|
||||
def import_surveyscans():
|
||||
import parsers.surveys
|
||||
parsers.surveys.LoadListScans()
|
||||
|
||||
def import_tunnelfiles():
|
||||
import parsers.surveys
|
||||
parsers.surveys.LoadTunnelFiles()
|
||||
|
||||
def reset():
|
||||
""" Wipe the troggle database and import everything from legacy data
|
||||
"""
|
||||
reload_db()
|
||||
make_dirs()
|
||||
pageredirects()
|
||||
import_caves()
|
||||
import_people()
|
||||
import_surveyscans()
|
||||
import_survex()
|
||||
import_logbooks()
|
||||
import_QMs()
|
||||
try:
|
||||
import_tunnelfiles()
|
||||
except:
|
||||
print("Tunnel files parser broken.")
|
||||
|
||||
import_surveys()
|
||||
|
||||
def pageredirects():
|
||||
for oldURL, newURL in [("indxal.htm", reverse("caveindex"))]:
|
||||
f = troggle.flatpages.models.Redirect(originalURL=oldURL, newURL=newURL)
|
||||
f.save()
|
||||
|
||||
def writeCaves():
|
||||
for cave in Cave.objects.all():
|
||||
cave.writeDataFile()
|
||||
for entrance in Entrance.objects.all():
|
||||
entrance.writeDataFile()
|
||||
|
||||
def usage(self, parser):
|
||||
print("""Usage is 'manage.py reset_db <command>'
|
||||
where command is:
|
||||
reset - this is normal usage, clear database and reread everything
|
||||
desc
|
||||
caves - read in the caves
|
||||
logbooks - read in the logbooks
|
||||
autologbooks
|
||||
dumplogbooks
|
||||
people
|
||||
QMs - read in the QM files
|
||||
resetend
|
||||
scans - read in the scanned surveynotes
|
||||
survex - read in the survex files
|
||||
survexpos
|
||||
tunnel - read in the Tunnel files
|
||||
writeCaves
|
||||
""")
|
||||
24
core/methods_millenial.py
Normal file
24
core/methods_millenial.py
Normal file
@@ -0,0 +1,24 @@
|
||||
import utm
|
||||
import math
|
||||
from django.conf import settings
|
||||
|
||||
def lat_lon_entrance(utmstring):
|
||||
try:
|
||||
x = float(utmstring.split()[0])
|
||||
y = float(utmstring.split()[1])
|
||||
#return ' '+str(x+y)+' '+str(y)
|
||||
q = utm.to_latlon(x, y, 33, 'U')
|
||||
return "{:.5f} {:.5f}".format(q[0],q[1])
|
||||
except:
|
||||
return 'Not found'
|
||||
|
||||
def top_camp_distance(utmstring):
|
||||
try:
|
||||
x = float(utmstring.split()[0])
|
||||
y = float(utmstring.split()[1])
|
||||
tx = settings.TOPCAMPX
|
||||
ty = settings.TOPCAMPY
|
||||
dist = math.sqrt( (tx-x)*(tx-x) + (ty-y)*(ty-y) )
|
||||
return "{:.1f}".format(dist)
|
||||
except:
|
||||
return 'Not found'
|
||||
845
core/models.py
845
core/models.py
@@ -15,847 +15,8 @@ from django.template import Context, loader
|
||||
import settings
|
||||
getcontext().prec=2 #use 2 significant figures for decimal calculations
|
||||
|
||||
from troggle.core.models_survex import *
|
||||
from troggle.core.models_survex import * #ancient models for both survex and other things
|
||||
from troggle.core.models_old import *
|
||||
|
||||
|
||||
def get_related_by_wikilinks(wiki_text):
|
||||
found=re.findall(settings.QM_PATTERN,wiki_text)
|
||||
res=[]
|
||||
for wikilink in found:
|
||||
qmdict={'urlroot':settings.URL_ROOT,'cave':wikilink[2],'year':wikilink[1],'number':wikilink[3]}
|
||||
try:
|
||||
cave_slugs = CaveSlug.objects.filter(cave__kataster_number = qmdict['cave'])
|
||||
qm=QM.objects.get(found_by__cave_slug__in = cave_slugs,
|
||||
found_by__date__year = qmdict['year'],
|
||||
number = qmdict['number'])
|
||||
res.append(qm)
|
||||
except QM.DoesNotExist:
|
||||
print 'fail on '+str(wikilink)
|
||||
|
||||
return res
|
||||
|
||||
try:
|
||||
logging.basicConfig(level=logging.DEBUG,
|
||||
filename=settings.LOGFILE,
|
||||
filemode='w')
|
||||
except:
|
||||
subprocess.call(settings.FIX_PERMISSIONS)
|
||||
logging.basicConfig(level=logging.DEBUG,
|
||||
filename=settings.LOGFILE,
|
||||
filemode='w')
|
||||
|
||||
#This class is for adding fields and methods which all of our models will have.
|
||||
class TroggleModel(models.Model):
|
||||
new_since_parsing = models.BooleanField(default=False, editable=False)
|
||||
non_public = models.BooleanField(default=False)
|
||||
def object_name(self):
|
||||
return self._meta.object_name
|
||||
|
||||
def get_admin_url(self):
|
||||
return urlparse.urljoin(settings.URL_ROOT, "/admin/core/" + self.object_name().lower() + "/" + str(self.pk))
|
||||
|
||||
class Meta:
|
||||
abstract = True
|
||||
|
||||
class TroggleImageModel(ImageModel):
|
||||
new_since_parsing = models.BooleanField(default=False, editable=False)
|
||||
|
||||
def object_name(self):
|
||||
return self._meta.object_name
|
||||
|
||||
def get_admin_url(self):
|
||||
return urlparse.urljoin(settings.URL_ROOT, "/admin/core/" + self.object_name().lower() + "/" + str(self.pk))
|
||||
|
||||
|
||||
class Meta:
|
||||
abstract = True
|
||||
|
||||
#
|
||||
# single Expedition, usually seen by year
|
||||
#
|
||||
class Expedition(TroggleModel):
|
||||
year = models.CharField(max_length=20, unique=True)
|
||||
name = models.CharField(max_length=100)
|
||||
|
||||
def __unicode__(self):
|
||||
return self.year
|
||||
|
||||
class Meta:
|
||||
ordering = ('-year',)
|
||||
get_latest_by = 'year'
|
||||
|
||||
def get_absolute_url(self):
|
||||
return urlparse.urljoin(settings.URL_ROOT, reverse('expedition', args=[self.year]))
|
||||
|
||||
# construction function. should be moved out
|
||||
def get_expedition_day(self, date):
|
||||
expeditiondays = self.expeditionday_set.filter(date=date)
|
||||
if expeditiondays:
|
||||
assert len(expeditiondays) == 1
|
||||
return expeditiondays[0]
|
||||
res = ExpeditionDay(expedition=self, date=date)
|
||||
res.save()
|
||||
return res
|
||||
|
||||
def day_min(self):
|
||||
res = self.expeditionday_set.all()
|
||||
return res and res[0] or None
|
||||
|
||||
def day_max(self):
|
||||
res = self.expeditionday_set.all()
|
||||
return res and res[len(res) - 1] or None
|
||||
|
||||
|
||||
|
||||
class ExpeditionDay(TroggleModel):
|
||||
expedition = models.ForeignKey("Expedition")
|
||||
date = models.DateField()
|
||||
|
||||
class Meta:
|
||||
ordering = ('date',)
|
||||
|
||||
def GetPersonTrip(self, personexpedition):
|
||||
personexpeditions = self.persontrip_set.filter(expeditionday=self)
|
||||
return personexpeditions and personexpeditions[0] or None
|
||||
|
||||
|
||||
#
|
||||
# single Person, can go on many years
|
||||
#
|
||||
class Person(TroggleModel):
|
||||
first_name = models.CharField(max_length=100)
|
||||
last_name = models.CharField(max_length=100)
|
||||
is_vfho = models.BooleanField(help_text="VFHO is the Vereines für Höhlenkunde in Obersteier, a nearby Austrian caving club.", default=False)
|
||||
mug_shot = models.CharField(max_length=100, blank=True,null=True)
|
||||
blurb = models.TextField(blank=True,null=True)
|
||||
|
||||
#href = models.CharField(max_length=200)
|
||||
orderref = models.CharField(max_length=200) # for alphabetic
|
||||
|
||||
#the below have been removed and made methods. I'm not sure what the b in bisnotable stands for. - AC 16 Feb
|
||||
#notability = models.FloatField() # for listing the top 20 people
|
||||
#bisnotable = models.BooleanField(default=False)
|
||||
user = models.OneToOneField(User, null=True, blank=True)
|
||||
def get_absolute_url(self):
|
||||
return urlparse.urljoin(settings.URL_ROOT,reverse('person',kwargs={'first_name':self.first_name,'last_name':self.last_name}))
|
||||
|
||||
class Meta:
|
||||
verbose_name_plural = "People"
|
||||
class Meta:
|
||||
ordering = ('orderref',) # "Wookey" makes too complex for: ('last_name', 'first_name')
|
||||
|
||||
def __unicode__(self):
|
||||
if self.last_name:
|
||||
return "%s %s" % (self.first_name, self.last_name)
|
||||
return self.first_name
|
||||
|
||||
|
||||
def notability(self):
|
||||
notability = Decimal(0)
|
||||
for personexpedition in self.personexpedition_set.all():
|
||||
if not personexpedition.is_guest:
|
||||
notability += Decimal(1) / (2012 - int(personexpedition.expedition.year))
|
||||
return notability
|
||||
|
||||
def bisnotable(self):
|
||||
return self.notability() > Decimal(1)/Decimal(3)
|
||||
|
||||
def surveyedleglength(self):
|
||||
return sum([personexpedition.surveyedleglength() for personexpedition in self.personexpedition_set.all()])
|
||||
|
||||
def first(self):
|
||||
return self.personexpedition_set.order_by('-expedition')[0]
|
||||
def last(self):
|
||||
return self.personexpedition_set.order_by('expedition')[0]
|
||||
|
||||
#def Sethref(self):
|
||||
#if self.last_name:
|
||||
#self.href = self.first_name.lower() + "_" + self.last_name.lower()
|
||||
#self.orderref = self.last_name + " " + self.first_name
|
||||
#else:
|
||||
# self.href = self.first_name.lower()
|
||||
#self.orderref = self.first_name
|
||||
#self.notability = 0.0 # set temporarily
|
||||
|
||||
|
||||
#
|
||||
# Person's attenance to one Expo
|
||||
#
|
||||
class PersonExpedition(TroggleModel):
|
||||
expedition = models.ForeignKey(Expedition)
|
||||
person = models.ForeignKey(Person)
|
||||
slugfield = models.SlugField(max_length=50,blank=True,null=True)
|
||||
|
||||
is_guest = models.BooleanField(default=False)
|
||||
COMMITTEE_CHOICES = (
|
||||
('leader','Expo leader'),
|
||||
('medical','Expo medical officer'),
|
||||
('treasurer','Expo treasurer'),
|
||||
('sponsorship','Expo sponsorship coordinator'),
|
||||
('research','Expo research coordinator'),
|
||||
)
|
||||
expo_committee_position = models.CharField(blank=True,null=True,choices=COMMITTEE_CHOICES,max_length=200)
|
||||
nickname = models.CharField(max_length=100,blank=True,null=True)
|
||||
|
||||
def GetPersonroles(self):
|
||||
res = [ ]
|
||||
for personrole in self.personrole_set.order_by('survexblock'):
|
||||
if res and res[-1]['survexpath'] == personrole.survexblock.survexpath:
|
||||
res[-1]['roles'] += ", " + str(personrole.role)
|
||||
else:
|
||||
res.append({'date':personrole.survexblock.date, 'survexpath':personrole.survexblock.survexpath, 'roles':str(personrole.role)})
|
||||
return res
|
||||
|
||||
class Meta:
|
||||
ordering = ('-expedition',)
|
||||
#order_with_respect_to = 'expedition'
|
||||
|
||||
def __unicode__(self):
|
||||
return "%s: (%s)" % (self.person, self.expedition)
|
||||
|
||||
|
||||
#why is the below a function in personexpedition, rather than in person? - AC 14 Feb 09
|
||||
def name(self):
|
||||
if self.nickname:
|
||||
return "%s (%s) %s" % (self.person.first_name, self.nickname, self.person.last_name)
|
||||
if self.person.last_name:
|
||||
return "%s %s" % (self.person.first_name, self.person.last_name)
|
||||
return self.person.first_name
|
||||
|
||||
def get_absolute_url(self):
|
||||
return urlparse.urljoin(settings.URL_ROOT, reverse('personexpedition',kwargs={'first_name':self.person.first_name,'last_name':self.person.last_name,'year':self.expedition.year}))
|
||||
|
||||
def surveyedleglength(self):
|
||||
survexblocks = [personrole.survexblock for personrole in self.personrole_set.all() ]
|
||||
return sum([survexblock.totalleglength for survexblock in set(survexblocks)])
|
||||
|
||||
# would prefer to return actual person trips so we could link to first and last ones
|
||||
def day_min(self):
|
||||
res = self.persontrip_set.aggregate(day_min=Min("expeditionday__date"))
|
||||
return res["day_min"]
|
||||
|
||||
def day_max(self):
|
||||
res = self.persontrip_set.all().aggregate(day_max=Max("expeditionday__date"))
|
||||
return res["day_max"]
|
||||
|
||||
#
|
||||
# Single parsed entry from Logbook
|
||||
#
|
||||
class LogbookEntry(TroggleModel):
|
||||
date = models.DateField()#MJG wants to turn this into a datetime such that multiple Logbook entries on the same day can be ordered.
|
||||
expeditionday = models.ForeignKey("ExpeditionDay", null=True)#MJG wants to KILL THIS (redundant information)
|
||||
expedition = models.ForeignKey(Expedition,blank=True,null=True) # yes this is double-
|
||||
#author = models.ForeignKey(PersonExpedition,blank=True,null=True) # the person who writes it up doesn't have to have been on the trip.
|
||||
# Re: the above- so this field should be "typist" or something, not "author". - AC 15 jun 09
|
||||
#MJG wants to KILL THIS, as it is typically redundant with PersonTrip.is_logbook_entry_author, in the rare it was not redundanty and of actually interest it could be added to the text.
|
||||
title = models.CharField(max_length=settings.MAX_LOGBOOK_ENTRY_TITLE_LENGTH)
|
||||
cave_slug = models.SlugField(max_length=50)
|
||||
place = models.CharField(max_length=100,blank=True,null=True,help_text="Only use this if you haven't chosen a cave")
|
||||
text = models.TextField()
|
||||
slug = models.SlugField(max_length=50)
|
||||
filename = models.CharField(max_length=200,null=True)
|
||||
|
||||
class Meta:
|
||||
verbose_name_plural = "Logbook Entries"
|
||||
# several PersonTrips point in to this object
|
||||
ordering = ('-date',)
|
||||
|
||||
def __getattribute__(self, item):
|
||||
if item == "cave": #Allow a logbookentries cave to be directly accessed despite not having a proper foreignkey
|
||||
return CaveSlug.objects.get(slug = self.cave_slug).cave
|
||||
return super(LogbookEntry, self).__getattribute__(item)
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
if "cave" in kwargs.keys():
|
||||
if kwargs["cave"] is not None:
|
||||
kwargs["cave_slug"] = CaveSlug.objects.get(cave=kwargs["cave"], primary=True).slug
|
||||
kwargs.pop("cave")
|
||||
return super(LogbookEntry, self).__init__(*args, **kwargs)
|
||||
|
||||
def isLogbookEntry(self): # Function used in templates
|
||||
return True
|
||||
|
||||
def get_absolute_url(self):
|
||||
return urlparse.urljoin(settings.URL_ROOT, reverse('logbookentry',kwargs={'date':self.date,'slug':self.slug}))
|
||||
|
||||
def __unicode__(self):
|
||||
return "%s: (%s)" % (self.date, self.title)
|
||||
|
||||
def get_next_by_id(self):
|
||||
LogbookEntry.objects.get(id=self.id+1)
|
||||
|
||||
def get_previous_by_id(self):
|
||||
LogbookEntry.objects.get(id=self.id-1)
|
||||
|
||||
def new_QM_number(self):
|
||||
"""Returns """
|
||||
if self.cave:
|
||||
nextQMnumber=self.cave.new_QM_number(self.date.year)
|
||||
else:
|
||||
return none
|
||||
return nextQMnumber
|
||||
|
||||
def new_QM_found_link(self):
|
||||
"""Produces a link to a new QM with the next number filled in and this LogbookEntry set as 'found by' """
|
||||
return settings.URL_ROOT + r'/admin/core/qm/add/?' + r'found_by=' + str(self.pk) +'&number=' + str(self.new_QM_number())
|
||||
|
||||
def DayIndex(self):
|
||||
return list(self.expeditionday.logbookentry_set.all()).index(self)
|
||||
|
||||
#
|
||||
# Single Person going on a trip, which may or may not be written up (accounts for different T/U for people in same logbook entry)
|
||||
#
|
||||
class PersonTrip(TroggleModel):
|
||||
personexpedition = models.ForeignKey("PersonExpedition",null=True)
|
||||
|
||||
#expeditionday = models.ForeignKey("ExpeditionDay")#MJG wants to KILL THIS (redundant information)
|
||||
#date = models.DateField() #MJG wants to KILL THIS (redundant information)
|
||||
time_underground = models.FloatField(help_text="In decimal hours")
|
||||
logbook_entry = models.ForeignKey(LogbookEntry)
|
||||
is_logbook_entry_author = models.BooleanField(default=False)
|
||||
|
||||
|
||||
# sequencing by person (difficult to solve locally)
|
||||
#persontrip_next = models.ForeignKey('PersonTrip', related_name='pnext', blank=True,null=True)#MJG wants to KILL THIS (and use funstion persontrip_next_auto)
|
||||
#persontrip_prev = models.ForeignKey('PersonTrip', related_name='pprev', blank=True,null=True)#MJG wants to KILL THIS(and use funstion persontrip_prev_auto)
|
||||
|
||||
def persontrip_next(self):
|
||||
futurePTs = PersonTrip.objects.filter(personexpedition = self.personexpedition, logbook_entry__date__gt = self.logbook_entry.date).order_by('logbook_entry__date').all()
|
||||
if len(futurePTs) > 0:
|
||||
return futurePTs[0]
|
||||
else:
|
||||
return None
|
||||
|
||||
def persontrip_prev(self):
|
||||
pastPTs = PersonTrip.objects.filter(personexpedition = self.personexpedition, logbook_entry__date__lt = self.logbook_entry.date).order_by('-logbook_entry__date').all()
|
||||
if len(pastPTs) > 0:
|
||||
return pastPTs[0]
|
||||
else:
|
||||
return None
|
||||
|
||||
def place(self):
|
||||
return self.logbook_entry.cave and self.logbook_entry.cave or self.logbook_entry.place
|
||||
|
||||
def __unicode__(self):
|
||||
return "%s (%s)" % (self.personexpedition, self.logbook_entry.date)
|
||||
|
||||
|
||||
|
||||
##########################################
|
||||
# move following classes into models_cave
|
||||
##########################################
|
||||
|
||||
class Area(TroggleModel):
|
||||
short_name = models.CharField(max_length=100)
|
||||
name = models.CharField(max_length=200, blank=True, null=True)
|
||||
description = models.TextField(blank=True,null=True)
|
||||
parent = models.ForeignKey('Area', blank=True, null=True)
|
||||
def __unicode__(self):
|
||||
if self.parent:
|
||||
return unicode(self.parent) + u" - " + unicode(self.short_name)
|
||||
else:
|
||||
return unicode(self.short_name)
|
||||
def kat_area(self):
|
||||
if self.short_name in ["1623", "1626"]:
|
||||
return self.short_name
|
||||
elif self.parent:
|
||||
return self.parent.kat_area()
|
||||
|
||||
class CaveAndEntrance(models.Model):
|
||||
cave = models.ForeignKey('Cave')
|
||||
entrance = models.ForeignKey('Entrance')
|
||||
entrance_letter = models.CharField(max_length=20,blank=True,null=True)
|
||||
def __unicode__(self):
|
||||
return unicode(self.cave) + unicode(self.entrance_letter)
|
||||
|
||||
class CaveSlug(models.Model):
|
||||
cave = models.ForeignKey('Cave')
|
||||
slug = models.SlugField(max_length=50, unique = True)
|
||||
primary = models.BooleanField(default=False)
|
||||
|
||||
|
||||
class Cave(TroggleModel):
|
||||
# too much here perhaps,
|
||||
official_name = models.CharField(max_length=160)
|
||||
area = models.ManyToManyField(Area, blank=True, null=True)
|
||||
kataster_code = models.CharField(max_length=20,blank=True,null=True)
|
||||
kataster_number = models.CharField(max_length=10,blank=True, null=True)
|
||||
unofficial_number = models.CharField(max_length=60,blank=True, null=True)
|
||||
entrances = models.ManyToManyField('Entrance', through='CaveAndEntrance')
|
||||
explorers = models.TextField(blank=True,null=True)
|
||||
underground_description = models.TextField(blank=True,null=True)
|
||||
equipment = models.TextField(blank=True,null=True)
|
||||
references = models.TextField(blank=True,null=True)
|
||||
survey = models.TextField(blank=True,null=True)
|
||||
kataster_status = models.TextField(blank=True,null=True)
|
||||
underground_centre_line = models.TextField(blank=True,null=True)
|
||||
notes = models.TextField(blank=True,null=True)
|
||||
length = models.CharField(max_length=100,blank=True,null=True)
|
||||
depth = models.CharField(max_length=100,blank=True,null=True)
|
||||
extent = models.CharField(max_length=100,blank=True,null=True)
|
||||
survex_file = models.CharField(max_length=100,blank=True,null=True)
|
||||
description_file = models.CharField(max_length=200,blank=True,null=True)
|
||||
url = models.CharField(max_length=200,blank=True,null=True)
|
||||
filename = models.CharField(max_length=200)
|
||||
|
||||
#class Meta:
|
||||
# unique_together = (("area", "kataster_number"), ("area", "unofficial_number"))
|
||||
# FIXME Kataster Areas and CUCC defined sub areas need seperating
|
||||
|
||||
|
||||
#href = models.CharField(max_length=100)
|
||||
|
||||
class Meta:
|
||||
ordering = ('kataster_code', 'unofficial_number')
|
||||
|
||||
def hassurvey(self):
|
||||
if not self.underground_centre_line:
|
||||
return "No"
|
||||
if (self.survey.find("<img") > -1 or self.survey.find("<a") > -1 or self.survey.find("<IMG") > -1 or self.survey.find("<A") > -1):
|
||||
return "Yes"
|
||||
return "Missing"
|
||||
|
||||
def hassurveydata(self):
|
||||
if not self.underground_centre_line:
|
||||
return "No"
|
||||
if self.survex_file:
|
||||
return "Yes"
|
||||
return "Missing"
|
||||
|
||||
def slug(self):
|
||||
primarySlugs = self.caveslug_set.filter(primary = True)
|
||||
if primarySlugs:
|
||||
return primarySlugs[0].slug
|
||||
else:
|
||||
slugs = self.caveslug_set.filter()
|
||||
if slugs:
|
||||
return slugs[0].slug
|
||||
|
||||
def ours(self):
|
||||
return bool(re.search(r'CUCC', self.explorers))
|
||||
|
||||
def reference(self):
|
||||
if self.kataster_number:
|
||||
return "%s-%s" % (self.kat_area(), self.kataster_number)
|
||||
else:
|
||||
return "%s-%s" % (self.kat_area(), self.unofficial_number)
|
||||
|
||||
def get_absolute_url(self):
|
||||
if self.kataster_number:
|
||||
href = self.kataster_number
|
||||
elif self.unofficial_number:
|
||||
href = self.unofficial_number
|
||||
else:
|
||||
href = official_name.lower()
|
||||
#return settings.URL_ROOT + '/cave/' + href + '/'
|
||||
return urlparse.urljoin(settings.URL_ROOT, reverse('cave',kwargs={'cave_id':href,}))
|
||||
|
||||
def __unicode__(self, sep = u": "):
|
||||
return unicode(self.slug())
|
||||
|
||||
def get_QMs(self):
|
||||
return QM.objects.filter(found_by__cave_slug=self.caveslug_set.all())
|
||||
|
||||
def new_QM_number(self, year=datetime.date.today().year):
|
||||
"""Given a cave and the current year, returns the next QM number."""
|
||||
try:
|
||||
res=QM.objects.filter(found_by__date__year=year, found_by__cave=self).order_by('-number')[0]
|
||||
except IndexError:
|
||||
return 1
|
||||
return res.number+1
|
||||
|
||||
def kat_area(self):
|
||||
for a in self.area.all():
|
||||
if a.kat_area():
|
||||
return a.kat_area()
|
||||
|
||||
def entrances(self):
|
||||
return CaveAndEntrance.objects.filter(cave=self)
|
||||
|
||||
def singleentrance(self):
|
||||
return len(CaveAndEntrance.objects.filter(cave=self)) == 1
|
||||
|
||||
def entrancelist(self):
|
||||
rs = []
|
||||
res = ""
|
||||
for e in CaveAndEntrance.objects.filter(cave=self):
|
||||
rs.append(e.entrance_letter)
|
||||
rs.sort()
|
||||
prevR = None
|
||||
n = 0
|
||||
for r in rs:
|
||||
if prevR:
|
||||
if chr(ord(prevR) + 1 ) == r:
|
||||
prevR = r
|
||||
n += 1
|
||||
else:
|
||||
if n == 0:
|
||||
res += ", " + prevR
|
||||
else:
|
||||
res += "–" + prevR
|
||||
else:
|
||||
prevR = r
|
||||
n = 0
|
||||
res += r
|
||||
if n == 0:
|
||||
res += ", " + prevR
|
||||
else:
|
||||
res += "–" + prevR
|
||||
return res
|
||||
|
||||
def writeDataFile(self):
|
||||
try:
|
||||
f = open(os.path.join(settings.CAVEDESCRIPTIONS, self.filename), "w")
|
||||
except:
|
||||
subprocess.call(settings.FIX_PERMISSIONS)
|
||||
f = open(os.path.join(settings.CAVEDESCRIPTIONS, self.filename), "w")
|
||||
t = loader.get_template('dataformat/cave.xml')
|
||||
c = Context({'cave': self})
|
||||
u = t.render(c)
|
||||
u8 = u.encode("utf-8")
|
||||
f.write(u8)
|
||||
f.close()
|
||||
|
||||
def getArea(self):
|
||||
areas = self.area.all()
|
||||
lowestareas = list(areas)
|
||||
for area in areas:
|
||||
if area.parent in areas:
|
||||
try:
|
||||
lowestareas.remove(area.parent)
|
||||
except:
|
||||
pass
|
||||
return lowestareas[0]
|
||||
|
||||
def getCaveByReference(reference):
|
||||
areaname, code = reference.split("-", 1)
|
||||
print areaname, code
|
||||
area = Area.objects.get(short_name = areaname)
|
||||
print area
|
||||
foundCaves = list(Cave.objects.filter(area = area, kataster_number = code).all()) + list(Cave.objects.filter(area = area, unofficial_number = code).all())
|
||||
print list(foundCaves)
|
||||
assert len(foundCaves) == 1
|
||||
return foundCaves[0]
|
||||
|
||||
class OtherCaveName(TroggleModel):
|
||||
name = models.CharField(max_length=160)
|
||||
cave = models.ForeignKey(Cave)
|
||||
def __unicode__(self):
|
||||
return unicode(self.name)
|
||||
|
||||
class EntranceSlug(models.Model):
|
||||
entrance = models.ForeignKey('Entrance')
|
||||
slug = models.SlugField(max_length=50, unique = True)
|
||||
primary = models.BooleanField(default=False)
|
||||
|
||||
class Entrance(TroggleModel):
|
||||
name = models.CharField(max_length=100, blank=True,null=True)
|
||||
entrance_description = models.TextField(blank=True,null=True)
|
||||
explorers = models.TextField(blank=True,null=True)
|
||||
map_description = models.TextField(blank=True,null=True)
|
||||
location_description = models.TextField(blank=True,null=True)
|
||||
approach = models.TextField(blank=True,null=True)
|
||||
underground_description = models.TextField(blank=True,null=True)
|
||||
photo = models.TextField(blank=True,null=True)
|
||||
MARKING_CHOICES = (
|
||||
('P', 'Paint'),
|
||||
('P?', 'Paint (?)'),
|
||||
('T', 'Tag'),
|
||||
('T?', 'Tag (?)'),
|
||||
('R', 'Needs Retag'),
|
||||
('S', 'Spit'),
|
||||
('S?', 'Spit (?)'),
|
||||
('U', 'Unmarked'),
|
||||
('?', 'Unknown'))
|
||||
marking = models.CharField(max_length=2, choices=MARKING_CHOICES)
|
||||
marking_comment = models.TextField(blank=True,null=True)
|
||||
FINDABLE_CHOICES = (
|
||||
('?', 'To be confirmed ...'),
|
||||
('S', 'Coordinates'),
|
||||
('L', 'Lost'),
|
||||
('R', 'Refindable'))
|
||||
findability = models.CharField(max_length=1, choices=FINDABLE_CHOICES, blank=True, null=True)
|
||||
findability_description = models.TextField(blank=True,null=True)
|
||||
alt = models.TextField(blank=True, null=True)
|
||||
northing = models.TextField(blank=True, null=True)
|
||||
easting = models.TextField(blank=True, null=True)
|
||||
tag_station = models.TextField(blank=True, null=True)
|
||||
exact_station = models.TextField(blank=True, null=True)
|
||||
other_station = models.TextField(blank=True, null=True)
|
||||
other_description = models.TextField(blank=True,null=True)
|
||||
bearings = models.TextField(blank=True,null=True)
|
||||
url = models.CharField(max_length=200,blank=True,null=True)
|
||||
filename = models.CharField(max_length=200)
|
||||
cached_primary_slug = models.CharField(max_length=200,blank=True,null=True)
|
||||
|
||||
def __unicode__(self):
|
||||
return unicode(self.slug())
|
||||
|
||||
def exact_location(self):
|
||||
return SurvexStation.objects.lookup(self.exact_station)
|
||||
def other_location(self):
|
||||
return SurvexStation.objects.lookup(self.other_station)
|
||||
|
||||
|
||||
def find_location(self):
|
||||
r = {'': 'To be entered ',
|
||||
'?': 'To be confirmed:',
|
||||
'S': '',
|
||||
'L': 'Lost:',
|
||||
'R': 'Refindable:'}[self.findability]
|
||||
if self.tag_station:
|
||||
try:
|
||||
s = SurvexStation.objects.lookup(self.tag_station)
|
||||
return r + "%0.0fE %0.0fN %0.0fAlt" % (s.x, s.y, s.z)
|
||||
except:
|
||||
return r + "%s Tag Station not in dataset" % self.tag_station
|
||||
if self.exact_station:
|
||||
try:
|
||||
s = SurvexStation.objects.lookup(self.exact_station)
|
||||
return r + "%0.0fE %0.0fN %0.0fAlt" % (s.x, s.y, s.z)
|
||||
except:
|
||||
return r + "%s Exact Station not in dataset" % self.tag_station
|
||||
if self.other_station:
|
||||
try:
|
||||
s = SurvexStation.objects.lookup(self.other_station)
|
||||
return r + "%0.0fE %0.0fN %0.0fAlt %s" % (s.x, s.y, s.z, self.other_description)
|
||||
except:
|
||||
return r + "%s Other Station not in dataset" % self.tag_station
|
||||
if self.FINDABLE_CHOICES == "S":
|
||||
r += "ERROR, Entrance has been surveyed but has no survex point"
|
||||
if self.bearings:
|
||||
return r + self.bearings
|
||||
return r
|
||||
|
||||
def best_station(self):
|
||||
if self.tag_station:
|
||||
return self.tag_station
|
||||
if self.exact_station:
|
||||
return self.exact_station
|
||||
if self.other_station:
|
||||
return self.other_station
|
||||
|
||||
def has_photo(self):
|
||||
if self.photo:
|
||||
if (self.photo.find("<img") > -1 or self.photo.find("<a") > -1 or self.photo.find("<IMG") > -1 or self.photo.find("<A") > -1):
|
||||
return "Yes"
|
||||
else:
|
||||
return "Missing"
|
||||
else:
|
||||
return "No"
|
||||
|
||||
def marking_val(self):
|
||||
for m in self.MARKING_CHOICES:
|
||||
if m[0] == self.marking:
|
||||
return m[1]
|
||||
def findability_val(self):
|
||||
for f in self.FINDABLE_CHOICES:
|
||||
if f[0] == self.findability:
|
||||
return f[1]
|
||||
|
||||
def tag(self):
|
||||
return SurvexStation.objects.lookup(self.tag_station)
|
||||
|
||||
def needs_surface_work(self):
|
||||
return self.findability != "S" or not self.has_photo or self.marking != "T"
|
||||
|
||||
def get_absolute_url(self):
|
||||
|
||||
ancestor_titles='/'.join([subcave.title for subcave in self.get_ancestors()])
|
||||
if ancestor_titles:
|
||||
res = '/'.join((self.get_root().cave.get_absolute_url(), ancestor_titles, self.title))
|
||||
|
||||
else:
|
||||
res = '/'.join((self.get_root().cave.get_absolute_url(), self.title))
|
||||
|
||||
return res
|
||||
|
||||
def slug(self):
|
||||
if not self.cached_primary_slug:
|
||||
primarySlugs = self.entranceslug_set.filter(primary = True)
|
||||
if primarySlugs:
|
||||
self.cached_primary_slug = primarySlugs[0].slug
|
||||
self.save()
|
||||
else:
|
||||
slugs = self.entranceslug_set.filter()
|
||||
if slugs:
|
||||
self.cached_primary_slug = slugs[0].slug
|
||||
self.save()
|
||||
return self.cached_primary_slug
|
||||
|
||||
def writeDataFile(self):
|
||||
try:
|
||||
f = open(os.path.join(settings.ENTRANCEDESCRIPTIONS, self.filename), "w")
|
||||
except:
|
||||
subprocess.call(settings.FIX_PERMISSIONS)
|
||||
f = open(os.path.join(settings.ENTRANCEDESCRIPTIONS, self.filename), "w")
|
||||
t = loader.get_template('dataformat/entrance.xml')
|
||||
c = Context({'entrance': self})
|
||||
u = t.render(c)
|
||||
u8 = u.encode("utf-8")
|
||||
f.write(u8)
|
||||
f.close()
|
||||
|
||||
class CaveDescription(TroggleModel):
|
||||
short_name = models.CharField(max_length=50, unique = True)
|
||||
long_name = models.CharField(max_length=200, blank=True, null=True)
|
||||
description = models.TextField(blank=True,null=True)
|
||||
linked_subcaves = models.ManyToManyField("NewSubCave", blank=True,null=True)
|
||||
linked_entrances = models.ManyToManyField("Entrance", blank=True,null=True)
|
||||
linked_qms = models.ManyToManyField("QM", blank=True,null=True)
|
||||
|
||||
def __unicode__(self):
|
||||
if self.long_name:
|
||||
return unicode(self.long_name)
|
||||
else:
|
||||
return unicode(self.short_name)
|
||||
|
||||
def get_absolute_url(self):
|
||||
return urlparse.urljoin(settings.URL_ROOT, reverse('cavedescription', args=(self.short_name,)))
|
||||
|
||||
def save(self):
|
||||
"""
|
||||
Overridden save method which stores wikilinks in text as links in database.
|
||||
"""
|
||||
super(CaveDescription, self).save()
|
||||
qm_list=get_related_by_wikilinks(self.description)
|
||||
for qm in qm_list:
|
||||
self.linked_qms.add(qm)
|
||||
super(CaveDescription, self).save()
|
||||
|
||||
class NewSubCave(TroggleModel):
|
||||
name = models.CharField(max_length=200, unique = True)
|
||||
def __unicode__(self):
|
||||
return unicode(self.name)
|
||||
|
||||
class QM(TroggleModel):
|
||||
#based on qm.csv in trunk/expoweb/1623/204 which has the fields:
|
||||
#"Number","Grade","Area","Description","Page reference","Nearest station","Completion description","Comment"
|
||||
found_by = models.ForeignKey(LogbookEntry, related_name='QMs_found',blank=True, null=True )
|
||||
ticked_off_by = models.ForeignKey(LogbookEntry, related_name='QMs_ticked_off',null=True,blank=True)
|
||||
#cave = models.ForeignKey(Cave)
|
||||
#expedition = models.ForeignKey(Expedition)
|
||||
|
||||
number = models.IntegerField(help_text="this is the sequential number in the year", )
|
||||
GRADE_CHOICES=(
|
||||
('A', 'A: Large obvious lead'),
|
||||
('B', 'B: Average lead'),
|
||||
('C', 'C: Tight unpromising lead'),
|
||||
('D', 'D: Dig'),
|
||||
('X', 'X: Unclimbable aven')
|
||||
)
|
||||
grade = models.CharField(max_length=1, choices=GRADE_CHOICES)
|
||||
location_description = models.TextField(blank=True)
|
||||
#should be a foreignkey to surveystation
|
||||
nearest_station_description = models.CharField(max_length=400,null=True,blank=True)
|
||||
nearest_station = models.CharField(max_length=200,blank=True,null=True)
|
||||
area = models.CharField(max_length=100,blank=True,null=True)
|
||||
completion_description = models.TextField(blank=True,null=True)
|
||||
comment=models.TextField(blank=True,null=True)
|
||||
|
||||
def __unicode__(self):
|
||||
return u"%s %s" % (self.code(), self.grade)
|
||||
|
||||
def code(self):
|
||||
return u"%s-%s-%s" % (unicode(self.found_by.cave)[6:], self.found_by.date.year, self.number)
|
||||
|
||||
def get_absolute_url(self):
|
||||
#return settings.URL_ROOT + '/cave/' + self.found_by.cave.kataster_number + '/' + str(self.found_by.date.year) + '-' + '%02d' %self.number
|
||||
return urlparse.urljoin(settings.URL_ROOT, reverse('qm',kwargs={'cave_id':self.found_by.cave.kataster_number,'year':self.found_by.date.year,'qm_id':self.number,'grade':self.grade}))
|
||||
|
||||
def get_next_by_id(self):
|
||||
return QM.objects.get(id=self.id+1)
|
||||
|
||||
def get_previous_by_id(self):
|
||||
return QM.objects.get(id=self.id-1)
|
||||
|
||||
def wiki_link(self):
|
||||
return u"%s%s%s" % ('[[QM:',self.code(),']]')
|
||||
|
||||
photoFileStorage = FileSystemStorage(location=settings.PHOTOS_ROOT, base_url=settings.PHOTOS_URL)
|
||||
class DPhoto(TroggleImageModel):
|
||||
caption = models.CharField(max_length=1000,blank=True,null=True)
|
||||
contains_logbookentry = models.ForeignKey(LogbookEntry,blank=True,null=True)
|
||||
contains_person = models.ManyToManyField(Person,blank=True,null=True)
|
||||
file = models.ImageField(storage=photoFileStorage, upload_to='.',)
|
||||
is_mugshot = models.BooleanField(default=False)
|
||||
contains_cave = models.ForeignKey(Cave,blank=True,null=True)
|
||||
contains_entrance = models.ForeignKey(Entrance, related_name="photo_file",blank=True,null=True)
|
||||
#nearest_survey_point = models.ForeignKey(SurveyStation,blank=True,null=True)
|
||||
nearest_QM = models.ForeignKey(QM,blank=True,null=True)
|
||||
lon_utm = models.FloatField(blank=True,null=True)
|
||||
lat_utm = models.FloatField(blank=True,null=True)
|
||||
|
||||
class IKOptions:
|
||||
spec_module = 'core.imagekit_specs'
|
||||
cache_dir = 'thumbs'
|
||||
image_field = 'file'
|
||||
|
||||
#content_type = models.ForeignKey(ContentType)
|
||||
#object_id = models.PositiveIntegerField()
|
||||
#location = generic.GenericForeignKey('content_type', 'object_id')
|
||||
|
||||
def __unicode__(self):
|
||||
return self.caption
|
||||
|
||||
scansFileStorage = FileSystemStorage(location=settings.SURVEY_SCANS, base_url=settings.SURVEYS_URL)
|
||||
def get_scan_path(instance, filename):
|
||||
year=instance.survey.expedition.year
|
||||
#print "WN: ", type(instance.survey.wallet_number), instance.survey.wallet_number
|
||||
number=str(instance.survey.wallet_number) + str(instance.survey.wallet_letter) #two strings formatting because convention is 2009#01 or 2009#X01
|
||||
return os.path.join('./',year,year+r'#'+number,instance.contents+str(instance.number_in_wallet)+r'.jpg')
|
||||
|
||||
class ScannedImage(TroggleImageModel):
|
||||
file = models.ImageField(storage=scansFileStorage, upload_to=get_scan_path)
|
||||
scanned_by = models.ForeignKey(Person,blank=True, null=True)
|
||||
scanned_on = models.DateField(null=True)
|
||||
survey = models.ForeignKey('Survey')
|
||||
contents = models.CharField(max_length=20,choices=(('notes','notes'),('plan','plan_sketch'),('elevation','elevation_sketch')))
|
||||
number_in_wallet = models.IntegerField(null=True)
|
||||
lon_utm = models.FloatField(blank=True,null=True)
|
||||
lat_utm = models.FloatField(blank=True,null=True)
|
||||
|
||||
class IKOptions:
|
||||
spec_module = 'core.imagekit_specs'
|
||||
cache_dir = 'thumbs'
|
||||
image_field = 'file'
|
||||
#content_type = models.ForeignKey(ContentType)
|
||||
#object_id = models.PositiveIntegerField()
|
||||
#location = generic.GenericForeignKey('content_type', 'object_id')
|
||||
|
||||
#This is an ugly hack to deal with the #s in our survey scan paths. The correct thing is to write a custom file storage backend which calls urlencode on the name for making file.url but not file.path.
|
||||
def correctURL(self):
|
||||
return string.replace(self.file.url,r'#',r'%23')
|
||||
|
||||
def __unicode__(self):
|
||||
return get_scan_path(self,'')
|
||||
|
||||
class Survey(TroggleModel):
|
||||
expedition = models.ForeignKey('Expedition') #REDUNDANT (logbook_entry)
|
||||
wallet_number = models.IntegerField(blank=True,null=True)
|
||||
wallet_letter = models.CharField(max_length=1,blank=True,null=True)
|
||||
comments = models.TextField(blank=True,null=True)
|
||||
location = models.CharField(max_length=400,blank=True,null=True) #REDUNDANT
|
||||
subcave = models.ForeignKey('NewSubCave', blank=True, null=True)
|
||||
#notes_scan = models.ForeignKey('ScannedImage',related_name='notes_scan',blank=True, null=True) #Replaced by contents field of ScannedImage model
|
||||
survex_block = models.OneToOneField('SurvexBlock',blank=True, null=True)
|
||||
logbook_entry = models.ForeignKey('LogbookEntry')
|
||||
centreline_printed_on = models.DateField(blank=True, null=True)
|
||||
centreline_printed_by = models.ForeignKey('Person',related_name='centreline_printed_by',blank=True,null=True)
|
||||
#sketch_scan = models.ForeignKey(ScannedImage,blank=True, null=True) #Replaced by contents field of ScannedImage model
|
||||
tunnel_file = models.FileField(upload_to='surveyXMLfiles',blank=True, null=True)
|
||||
tunnel_main_sketch = models.ForeignKey('Survey',blank=True,null=True)
|
||||
integrated_into_main_sketch_on = models.DateField(blank=True,null=True)
|
||||
integrated_into_main_sketch_by = models.ForeignKey('Person' ,related_name='integrated_into_main_sketch_by', blank=True,null=True)
|
||||
rendered_image = models.ImageField(upload_to='renderedSurveys',blank=True,null=True)
|
||||
def __unicode__(self):
|
||||
return self.expedition.year+"#"+"%02d" % int(self.wallet_number)
|
||||
|
||||
def notes(self):
|
||||
return self.scannedimage_set.filter(contents='notes')
|
||||
|
||||
def plans(self):
|
||||
return self.scannedimage_set.filter(contents='plan')
|
||||
|
||||
def elevations(self):
|
||||
return self.scannedimage_set.filter(contents='elevation')
|
||||
from troggle.core.models_millenial import * #updated models are here
|
||||
|
||||
864
core/models.py.old
Normal file
864
core/models.py.old
Normal file
@@ -0,0 +1,864 @@
|
||||
import urllib, urlparse, string, os, datetime, logging, re
|
||||
import subprocess
|
||||
from django.forms import ModelForm
|
||||
from django.db import models
|
||||
from django.contrib import admin
|
||||
from django.core.files.storage import FileSystemStorage
|
||||
from django.contrib.auth.models import User
|
||||
from django.contrib.contenttypes.models import ContentType
|
||||
from django.db.models import Min, Max
|
||||
from django.conf import settings
|
||||
from decimal import Decimal, getcontext
|
||||
from django.core.urlresolvers import reverse
|
||||
from imagekit.models import ImageModel
|
||||
from django.template import Context, loader
|
||||
import settings
|
||||
getcontext().prec=2 #use 2 significant figures for decimal calculations
|
||||
|
||||
from troggle.core.models_survex import *
|
||||
|
||||
from troggle.core.models_millenial import *
|
||||
|
||||
def get_related_by_wikilinks(wiki_text):
|
||||
found=re.findall(settings.QM_PATTERN,wiki_text)
|
||||
res=[]
|
||||
for wikilink in found:
|
||||
qmdict={'urlroot':settings.URL_ROOT,'cave':wikilink[2],'year':wikilink[1],'number':wikilink[3]}
|
||||
try:
|
||||
cave_slugs = CaveSlug.objects.filter(cave__kataster_number = qmdict['cave'])
|
||||
qm=QM.objects.get(found_by__cave_slug__in = cave_slugs,
|
||||
found_by__date__year = qmdict['year'],
|
||||
number = qmdict['number'])
|
||||
res.append(qm)
|
||||
except QM.DoesNotExist:
|
||||
print('fail on '+str(wikilink))
|
||||
|
||||
return res
|
||||
|
||||
try:
|
||||
logging.basicConfig(level=logging.DEBUG,
|
||||
filename=settings.LOGFILE,
|
||||
filemode='w')
|
||||
except:
|
||||
subprocess.call(settings.FIX_PERMISSIONS)
|
||||
logging.basicConfig(level=logging.DEBUG,
|
||||
filename=settings.LOGFILE,
|
||||
filemode='w')
|
||||
|
||||
#This class is for adding fields and methods which all of our models will have.
|
||||
class TroggleModel(models.Model):
|
||||
new_since_parsing = models.BooleanField(default=False, editable=False)
|
||||
non_public = models.BooleanField(default=False)
|
||||
def object_name(self):
|
||||
return self._meta.object_name
|
||||
|
||||
def get_admin_url(self):
|
||||
return urlparse.urljoin(settings.URL_ROOT, "/admin/core/" + self.object_name().lower() + "/" + str(self.pk))
|
||||
|
||||
class Meta:
|
||||
abstract = True
|
||||
|
||||
class TroggleImageModel(ImageModel):
|
||||
new_since_parsing = models.BooleanField(default=False, editable=False)
|
||||
|
||||
def object_name(self):
|
||||
return self._meta.object_name
|
||||
|
||||
def get_admin_url(self):
|
||||
return urlparse.urljoin(settings.URL_ROOT, "/admin/core/" + self.object_name().lower() + "/" + str(self.pk))
|
||||
|
||||
|
||||
class Meta:
|
||||
abstract = True
|
||||
|
||||
#
|
||||
# single Expedition, usually seen by year
|
||||
#
|
||||
class Expedition(TroggleModel):
|
||||
year = models.CharField(max_length=20, unique=True)
|
||||
name = models.CharField(max_length=100)
|
||||
|
||||
def __unicode__(self):
|
||||
return self.year
|
||||
|
||||
class Meta:
|
||||
ordering = ('-year',)
|
||||
get_latest_by = 'year'
|
||||
|
||||
def get_absolute_url(self):
|
||||
return urlparse.urljoin(settings.URL_ROOT, reverse('expedition', args=[self.year]))
|
||||
|
||||
# construction function. should be moved out
|
||||
def get_expedition_day(self, date):
|
||||
expeditiondays = self.expeditionday_set.filter(date=date)
|
||||
if expeditiondays:
|
||||
assert len(expeditiondays) == 1
|
||||
return expeditiondays[0]
|
||||
res = ExpeditionDay(expedition=self, date=date)
|
||||
res.save()
|
||||
return res
|
||||
|
||||
def day_min(self):
|
||||
res = self.expeditionday_set.all()
|
||||
return res and res[0] or None
|
||||
|
||||
def day_max(self):
|
||||
res = self.expeditionday_set.all()
|
||||
return res and res[len(res) - 1] or None
|
||||
|
||||
|
||||
|
||||
class ExpeditionDay(TroggleModel):
|
||||
expedition = models.ForeignKey("Expedition")
|
||||
date = models.DateField()
|
||||
|
||||
class Meta:
|
||||
ordering = ('date',)
|
||||
|
||||
def GetPersonTrip(self, personexpedition):
|
||||
personexpeditions = self.persontrip_set.filter(expeditionday=self)
|
||||
return personexpeditions and personexpeditions[0] or None
|
||||
|
||||
|
||||
#
|
||||
# single Person, can go on many years
|
||||
#
|
||||
class Person(TroggleModel):
|
||||
first_name = models.CharField(max_length=100)
|
||||
last_name = models.CharField(max_length=100)
|
||||
is_vfho = models.BooleanField(help_text="VFHO is the Vereines für Höhlenkunde in Obersteier, a nearby Austrian caving club.", default=False)
|
||||
mug_shot = models.CharField(max_length=100, blank=True,null=True)
|
||||
blurb = models.TextField(blank=True,null=True)
|
||||
|
||||
#href = models.CharField(max_length=200)
|
||||
orderref = models.CharField(max_length=200) # for alphabetic
|
||||
|
||||
#the below have been removed and made methods. I'm not sure what the b in bisnotable stands for. - AC 16 Feb
|
||||
#notability = models.FloatField() # for listing the top 20 people
|
||||
#bisnotable = models.BooleanField(default=False)
|
||||
user = models.OneToOneField(User, null=True, blank=True)
|
||||
def get_absolute_url(self):
|
||||
return urlparse.urljoin(settings.URL_ROOT,reverse('person',kwargs={'first_name':self.first_name,'last_name':self.last_name}))
|
||||
|
||||
class Meta:
|
||||
verbose_name_plural = "People"
|
||||
ordering = ('orderref',) # "Wookey" makes too complex for: ('last_name', 'first_name')
|
||||
|
||||
def __unicode__(self):
|
||||
if self.last_name:
|
||||
return "%s %s" % (self.first_name, self.last_name)
|
||||
return self.first_name
|
||||
|
||||
|
||||
def notability(self):
|
||||
notability = Decimal(0)
|
||||
for personexpedition in self.personexpedition_set.all():
|
||||
if not personexpedition.is_guest:
|
||||
notability += Decimal(1) / (2012 - int(personexpedition.expedition.year))
|
||||
return notability
|
||||
|
||||
def bisnotable(self):
|
||||
return self.notability() > Decimal(1)/Decimal(3)
|
||||
|
||||
def surveyedleglength(self):
|
||||
return sum([personexpedition.surveyedleglength() for personexpedition in self.personexpedition_set.all()])
|
||||
|
||||
def first(self):
|
||||
return self.personexpedition_set.order_by('-expedition')[0]
|
||||
def last(self):
|
||||
return self.personexpedition_set.order_by('expedition')[0]
|
||||
|
||||
#def Sethref(self):
|
||||
#if self.last_name:
|
||||
#self.href = self.first_name.lower() + "_" + self.last_name.lower()
|
||||
#self.orderref = self.last_name + " " + self.first_name
|
||||
#else:
|
||||
# self.href = self.first_name.lower()
|
||||
#self.orderref = self.first_name
|
||||
#self.notability = 0.0 # set temporarily
|
||||
|
||||
|
||||
#
|
||||
# Person's attenance to one Expo
|
||||
#
|
||||
class PersonExpedition(TroggleModel):
|
||||
expedition = models.ForeignKey(Expedition)
|
||||
person = models.ForeignKey(Person)
|
||||
slugfield = models.SlugField(max_length=50,blank=True,null=True)
|
||||
|
||||
is_guest = models.BooleanField(default=False)
|
||||
COMMITTEE_CHOICES = (
|
||||
('leader','Expo leader'),
|
||||
('medical','Expo medical officer'),
|
||||
('treasurer','Expo treasurer'),
|
||||
('sponsorship','Expo sponsorship coordinator'),
|
||||
('research','Expo research coordinator'),
|
||||
)
|
||||
expo_committee_position = models.CharField(blank=True,null=True,choices=COMMITTEE_CHOICES,max_length=200)
|
||||
nickname = models.CharField(max_length=100,blank=True,null=True)
|
||||
|
||||
def GetPersonroles(self):
|
||||
res = [ ]
|
||||
for personrole in self.personrole_set.order_by('survexblock'):
|
||||
if res and res[-1]['survexpath'] == personrole.survexblock.survexpath:
|
||||
res[-1]['roles'] += ", " + str(personrole.role)
|
||||
else:
|
||||
res.append({'date':personrole.survexblock.date, 'survexpath':personrole.survexblock.survexpath, 'roles':str(personrole.role)})
|
||||
return res
|
||||
|
||||
class Meta:
|
||||
ordering = ('-expedition',)
|
||||
#order_with_respect_to = 'expedition'
|
||||
|
||||
def __unicode__(self):
|
||||
return "%s: (%s)" % (self.person, self.expedition)
|
||||
|
||||
|
||||
#why is the below a function in personexpedition, rather than in person? - AC 14 Feb 09
|
||||
def name(self):
|
||||
if self.nickname:
|
||||
return "%s (%s) %s" % (self.person.first_name, self.nickname, self.person.last_name)
|
||||
if self.person.last_name:
|
||||
return "%s %s" % (self.person.first_name, self.person.last_name)
|
||||
return self.person.first_name
|
||||
|
||||
def get_absolute_url(self):
|
||||
return urlparse.urljoin(settings.URL_ROOT, reverse('personexpedition',kwargs={'first_name':self.person.first_name,'last_name':self.person.last_name,'year':self.expedition.year}))
|
||||
|
||||
def surveyedleglength(self):
|
||||
survexblocks = [personrole.survexblock for personrole in self.personrole_set.all() ]
|
||||
return sum([survexblock.totalleglength for survexblock in set(survexblocks)])
|
||||
|
||||
# would prefer to return actual person trips so we could link to first and last ones
|
||||
def day_min(self):
|
||||
res = self.persontrip_set.aggregate(day_min=Min("expeditionday__date"))
|
||||
return res["day_min"]
|
||||
|
||||
def day_max(self):
|
||||
res = self.persontrip_set.all().aggregate(day_max=Max("expeditionday__date"))
|
||||
return res["day_max"]
|
||||
|
||||
#
|
||||
# Single parsed entry from Logbook
|
||||
#
|
||||
class LogbookEntry(TroggleModel):
|
||||
date = models.DateField()#MJG wants to turn this into a datetime such that multiple Logbook entries on the same day can be ordered.
|
||||
expeditionday = models.ForeignKey("ExpeditionDay", null=True)#MJG wants to KILL THIS (redundant information)
|
||||
expedition = models.ForeignKey(Expedition,blank=True,null=True) # yes this is double-
|
||||
#author = models.ForeignKey(PersonExpedition,blank=True,null=True) # the person who writes it up doesn't have to have been on the trip.
|
||||
# Re: the above- so this field should be "typist" or something, not "author". - AC 15 jun 09
|
||||
#MJG wants to KILL THIS, as it is typically redundant with PersonTrip.is_logbook_entry_author, in the rare it was not redundanty and of actually interest it could be added to the text.
|
||||
title = models.CharField(max_length=settings.MAX_LOGBOOK_ENTRY_TITLE_LENGTH)
|
||||
cave_slug = models.SlugField(max_length=50)
|
||||
place = models.CharField(max_length=100,blank=True,null=True,help_text="Only use this if you haven't chosen a cave")
|
||||
text = models.TextField()
|
||||
slug = models.SlugField(max_length=50)
|
||||
filename = models.CharField(max_length=200,null=True)
|
||||
|
||||
class Meta:
|
||||
verbose_name_plural = "Logbook Entries"
|
||||
# several PersonTrips point in to this object
|
||||
ordering = ('-date',)
|
||||
|
||||
def __getattribute__(self, item):
|
||||
if item == "cave": #Allow a logbookentries cave to be directly accessed despite not having a proper foreignkey
|
||||
return CaveSlug.objects.get(slug = self.cave_slug).cave
|
||||
return super(LogbookEntry, self).__getattribute__(item)
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
if "cave" in kwargs.keys():
|
||||
if kwargs["cave"] is not None:
|
||||
kwargs["cave_slug"] = CaveSlug.objects.get(cave=kwargs["cave"], primary=True).slug
|
||||
kwargs.pop("cave")
|
||||
return super(LogbookEntry, self).__init__(*args, **kwargs)
|
||||
|
||||
def isLogbookEntry(self): # Function used in templates
|
||||
return True
|
||||
|
||||
def get_absolute_url(self):
|
||||
return urlparse.urljoin(settings.URL_ROOT, reverse('logbookentry',kwargs={'date':self.date,'slug':self.slug}))
|
||||
|
||||
def __unicode__(self):
|
||||
return "%s: (%s)" % (self.date, self.title)
|
||||
|
||||
def get_next_by_id(self):
|
||||
LogbookEntry.objects.get(id=self.id+1)
|
||||
|
||||
def get_previous_by_id(self):
|
||||
LogbookEntry.objects.get(id=self.id-1)
|
||||
|
||||
def new_QM_number(self):
|
||||
"""Returns """
|
||||
if self.cave:
|
||||
nextQMnumber=self.cave.new_QM_number(self.date.year)
|
||||
else:
|
||||
return none
|
||||
return nextQMnumber
|
||||
|
||||
def new_QM_found_link(self):
|
||||
"""Produces a link to a new QM with the next number filled in and this LogbookEntry set as 'found by' """
|
||||
return settings.URL_ROOT + r'/admin/core/qm/add/?' + r'found_by=' + str(self.pk) +'&number=' + str(self.new_QM_number())
|
||||
|
||||
def DayIndex(self):
|
||||
return list(self.expeditionday.logbookentry_set.all()).index(self)
|
||||
|
||||
#
|
||||
# Single Person going on a trip, which may or may not be written up (accounts for different T/U for people in same logbook entry)
|
||||
#
|
||||
class PersonTrip(TroggleModel):
|
||||
personexpedition = models.ForeignKey("PersonExpedition",null=True)
|
||||
|
||||
#expeditionday = models.ForeignKey("ExpeditionDay")#MJG wants to KILL THIS (redundant information)
|
||||
#date = models.DateField() #MJG wants to KILL THIS (redundant information)
|
||||
time_underground = models.FloatField(help_text="In decimal hours")
|
||||
logbook_entry = models.ForeignKey(LogbookEntry)
|
||||
is_logbook_entry_author = models.BooleanField(default=False)
|
||||
|
||||
|
||||
# sequencing by person (difficult to solve locally)
|
||||
#persontrip_next = models.ForeignKey('PersonTrip', related_name='pnext', blank=True,null=True)#MJG wants to KILL THIS (and use funstion persontrip_next_auto)
|
||||
#persontrip_prev = models.ForeignKey('PersonTrip', related_name='pprev', blank=True,null=True)#MJG wants to KILL THIS(and use funstion persontrip_prev_auto)
|
||||
|
||||
def persontrip_next(self):
|
||||
futurePTs = PersonTrip.objects.filter(personexpedition = self.personexpedition, logbook_entry__date__gt = self.logbook_entry.date).order_by('logbook_entry__date').all()
|
||||
if len(futurePTs) > 0:
|
||||
return futurePTs[0]
|
||||
else:
|
||||
return None
|
||||
|
||||
def persontrip_prev(self):
|
||||
pastPTs = PersonTrip.objects.filter(personexpedition = self.personexpedition, logbook_entry__date__lt = self.logbook_entry.date).order_by('-logbook_entry__date').all()
|
||||
if len(pastPTs) > 0:
|
||||
return pastPTs[0]
|
||||
else:
|
||||
return None
|
||||
|
||||
def place(self):
|
||||
return self.logbook_entry.cave and self.logbook_entry.cave or self.logbook_entry.place
|
||||
|
||||
def __unicode__(self):
|
||||
return "%s (%s)" % (self.personexpedition, self.logbook_entry.date)
|
||||
|
||||
|
||||
|
||||
##########################################
|
||||
# move following classes into models_cave
|
||||
##########################################
|
||||
|
||||
class Area(TroggleModel):
|
||||
short_name = models.CharField(max_length=100)
|
||||
name = models.CharField(max_length=200, blank=True, null=True)
|
||||
description = models.TextField(blank=True,null=True)
|
||||
parent = models.ForeignKey('Area', blank=True, null=True)
|
||||
def __unicode__(self):
|
||||
if self.parent:
|
||||
return unicode(self.parent) + u" - " + unicode(self.short_name)
|
||||
else:
|
||||
return unicode(self.short_name)
|
||||
def kat_area(self):
|
||||
if self.short_name in ["1623", "1626"]:
|
||||
return self.short_name
|
||||
elif self.parent:
|
||||
return self.parent.kat_area()
|
||||
|
||||
class CaveAndEntrance(models.Model):
|
||||
cave = models.ForeignKey('Cave')
|
||||
entrance = models.ForeignKey('Entrance')
|
||||
entrance_letter = models.CharField(max_length=20,blank=True,null=True)
|
||||
def __unicode__(self):
|
||||
return unicode(self.cave) + unicode(self.entrance_letter)
|
||||
|
||||
class CaveSlug(models.Model):
|
||||
cave = models.ForeignKey('Cave')
|
||||
slug = models.SlugField(max_length=50, unique = True)
|
||||
primary = models.BooleanField(default=False)
|
||||
|
||||
|
||||
class Cave(TroggleModel):
|
||||
# too much here perhaps,
|
||||
official_name = models.CharField(max_length=160)
|
||||
area = models.ManyToManyField(Area, blank=True, null=True)
|
||||
kataster_code = models.CharField(max_length=20,blank=True,null=True)
|
||||
kataster_number = models.CharField(max_length=10,blank=True, null=True)
|
||||
unofficial_number = models.CharField(max_length=60,blank=True, null=True)
|
||||
entrances = models.ManyToManyField('Entrance', through='CaveAndEntrance')
|
||||
explorers = models.TextField(blank=True,null=True)
|
||||
underground_description = models.TextField(blank=True,null=True)
|
||||
equipment = models.TextField(blank=True,null=True)
|
||||
references = models.TextField(blank=True,null=True)
|
||||
survey = models.TextField(blank=True,null=True)
|
||||
kataster_status = models.TextField(blank=True,null=True)
|
||||
underground_centre_line = models.TextField(blank=True,null=True)
|
||||
notes = models.TextField(blank=True,null=True)
|
||||
length = models.CharField(max_length=100,blank=True,null=True)
|
||||
depth = models.CharField(max_length=100,blank=True,null=True)
|
||||
extent = models.CharField(max_length=100,blank=True,null=True)
|
||||
survex_file = models.CharField(max_length=100,blank=True,null=True)
|
||||
description_file = models.CharField(max_length=200,blank=True,null=True)
|
||||
url = models.CharField(max_length=200,blank=True,null=True)
|
||||
filename = models.CharField(max_length=200)
|
||||
|
||||
|
||||
#class Meta:
|
||||
# unique_together = (("area", "kataster_number"), ("area", "unofficial_number"))
|
||||
# FIXME Kataster Areas and CUCC defined sub areas need seperating
|
||||
|
||||
|
||||
#href = models.CharField(max_length=100)
|
||||
|
||||
class Meta:
|
||||
ordering = ('kataster_code', 'unofficial_number')
|
||||
|
||||
def hassurvey(self):
|
||||
if not self.underground_centre_line:
|
||||
return "No"
|
||||
if (self.survey.find("<img") > -1 or self.survey.find("<a") > -1 or self.survey.find("<IMG") > -1 or self.survey.find("<A") > -1):
|
||||
return "Yes"
|
||||
return "Missing"
|
||||
|
||||
def hassurveydata(self):
|
||||
if not self.underground_centre_line:
|
||||
return "No"
|
||||
if self.survex_file:
|
||||
return "Yes"
|
||||
return "Missing"
|
||||
|
||||
def slug(self):
|
||||
primarySlugs = self.caveslug_set.filter(primary = True)
|
||||
if primarySlugs:
|
||||
return primarySlugs[0].slug
|
||||
else:
|
||||
slugs = self.caveslug_set.filter()
|
||||
if slugs:
|
||||
return slugs[0].slug
|
||||
|
||||
def ours(self):
|
||||
return bool(re.search(r'CUCC', self.explorers))
|
||||
|
||||
def reference(self):
|
||||
if self.kataster_number:
|
||||
return "%s-%s" % (self.kat_area(), self.kataster_number)
|
||||
else:
|
||||
return "%s-%s" % (self.kat_area(), self.unofficial_number)
|
||||
|
||||
def get_absolute_url(self):
|
||||
if self.kataster_number:
|
||||
href = self.kataster_number
|
||||
elif self.unofficial_number:
|
||||
href = self.unofficial_number
|
||||
else:
|
||||
href = official_name.lower()
|
||||
#return settings.URL_ROOT + '/cave/' + href + '/'
|
||||
return urlparse.urljoin(settings.URL_ROOT, reverse('cave',kwargs={'cave_id':href,}))
|
||||
|
||||
def __unicode__(self, sep = u": "):
|
||||
return unicode(self.slug())
|
||||
|
||||
def get_QMs(self):
|
||||
return QM.objects.filter(found_by__cave_slug=self.caveslug_set.all())
|
||||
|
||||
def new_QM_number(self, year=datetime.date.today().year):
|
||||
"""Given a cave and the current year, returns the next QM number."""
|
||||
try:
|
||||
res=QM.objects.filter(found_by__date__year=year, found_by__cave=self).order_by('-number')[0]
|
||||
except IndexError:
|
||||
return 1
|
||||
return res.number+1
|
||||
|
||||
def kat_area(self):
|
||||
for a in self.area.all():
|
||||
if a.kat_area():
|
||||
return a.kat_area()
|
||||
|
||||
def entrances(self):
|
||||
return CaveAndEntrance.objects.filter(cave=self)
|
||||
|
||||
def singleentrance(self):
|
||||
return len(CaveAndEntrance.objects.filter(cave=self)) == 1
|
||||
|
||||
def entrancelist(self):
|
||||
rs = []
|
||||
res = ""
|
||||
for e in CaveAndEntrance.objects.filter(cave=self):
|
||||
rs.append(e.entrance_letter)
|
||||
rs.sort()
|
||||
prevR = None
|
||||
n = 0
|
||||
for r in rs:
|
||||
if prevR:
|
||||
if chr(ord(prevR) + 1 ) == r:
|
||||
prevR = r
|
||||
n += 1
|
||||
else:
|
||||
if n == 0:
|
||||
res += ", " + prevR
|
||||
else:
|
||||
res += "–" + prevR
|
||||
else:
|
||||
prevR = r
|
||||
n = 0
|
||||
res += r
|
||||
if n == 0:
|
||||
res += ", " + prevR
|
||||
else:
|
||||
res += "–" + prevR
|
||||
return res
|
||||
|
||||
def writeDataFile(self):
|
||||
try:
|
||||
f = open(os.path.join(settings.CAVEDESCRIPTIONS, self.filename), "w")
|
||||
except:
|
||||
subprocess.call(settings.FIX_PERMISSIONS)
|
||||
f = open(os.path.join(settings.CAVEDESCRIPTIONS, self.filename), "w")
|
||||
t = loader.get_template('dataformat/cave.xml')
|
||||
c = Context({'cave': self})
|
||||
u = t.render(c)
|
||||
u8 = u.encode("utf-8")
|
||||
f.write(u8)
|
||||
f.close()
|
||||
|
||||
def getArea(self):
|
||||
areas = self.area.all()
|
||||
lowestareas = list(areas)
|
||||
for area in areas:
|
||||
if area.parent in areas:
|
||||
try:
|
||||
lowestareas.remove(area.parent)
|
||||
except:
|
||||
pass
|
||||
return lowestareas[0]
|
||||
|
||||
def getCaveByReference(reference):
|
||||
areaname, code = reference.split("-", 1)
|
||||
print(areaname, code)
|
||||
area = Area.objects.get(short_name = areaname)
|
||||
print(area)
|
||||
foundCaves = list(Cave.objects.filter(area = area, kataster_number = code).all()) + list(Cave.objects.filter(area = area, unofficial_number = code).all())
|
||||
print(list(foundCaves))
|
||||
assert len(foundCaves) == 1
|
||||
return foundCaves[0]
|
||||
|
||||
class OtherCaveName(TroggleModel):
|
||||
name = models.CharField(max_length=160)
|
||||
cave = models.ForeignKey(Cave)
|
||||
def __unicode__(self):
|
||||
return unicode(self.name)
|
||||
|
||||
class EntranceSlug(models.Model):
|
||||
entrance = models.ForeignKey('Entrance')
|
||||
slug = models.SlugField(max_length=50, unique = True)
|
||||
primary = models.BooleanField(default=False)
|
||||
|
||||
class Entrance(TroggleModel):
|
||||
name = models.CharField(max_length=100, blank=True,null=True)
|
||||
entrance_description = models.TextField(blank=True,null=True)
|
||||
explorers = models.TextField(blank=True,null=True)
|
||||
map_description = models.TextField(blank=True,null=True)
|
||||
location_description = models.TextField(blank=True,null=True)
|
||||
approach = models.TextField(blank=True,null=True)
|
||||
underground_description = models.TextField(blank=True,null=True)
|
||||
photo = models.TextField(blank=True,null=True)
|
||||
MARKING_CHOICES = (
|
||||
('P', 'Paint'),
|
||||
('P?', 'Paint (?)'),
|
||||
('T', 'Tag'),
|
||||
('T?', 'Tag (?)'),
|
||||
('R', 'Needs Retag'),
|
||||
('S', 'Spit'),
|
||||
('S?', 'Spit (?)'),
|
||||
('U', 'Unmarked'),
|
||||
('?', 'Unknown'))
|
||||
marking = models.CharField(max_length=2, choices=MARKING_CHOICES)
|
||||
marking_comment = models.TextField(blank=True,null=True)
|
||||
FINDABLE_CHOICES = (
|
||||
('?', 'To be confirmed ...'),
|
||||
('S', 'Coordinates'),
|
||||
('L', 'Lost'),
|
||||
('R', 'Refindable'))
|
||||
findability = models.CharField(max_length=1, choices=FINDABLE_CHOICES, blank=True, null=True)
|
||||
findability_description = models.TextField(blank=True,null=True)
|
||||
alt = models.TextField(blank=True, null=True)
|
||||
northing = models.TextField(blank=True, null=True)
|
||||
easting = models.TextField(blank=True, null=True)
|
||||
tag_station = models.TextField(blank=True, null=True)
|
||||
exact_station = models.TextField(blank=True, null=True)
|
||||
other_station = models.TextField(blank=True, null=True)
|
||||
other_description = models.TextField(blank=True,null=True)
|
||||
bearings = models.TextField(blank=True,null=True)
|
||||
url = models.CharField(max_length=200,blank=True,null=True)
|
||||
filename = models.CharField(max_length=200)
|
||||
cached_primary_slug = models.CharField(max_length=200,blank=True,null=True)
|
||||
|
||||
def __unicode__(self):
|
||||
return unicode(self.slug())
|
||||
|
||||
def exact_location(self):
|
||||
return SurvexStation.objects.lookup(self.exact_station)
|
||||
def other_location(self):
|
||||
return SurvexStation.objects.lookup(self.other_station)
|
||||
|
||||
|
||||
def find_location(self):
|
||||
r = {'': 'To be entered ',
|
||||
'?': 'To be confirmed:',
|
||||
'S': '',
|
||||
'L': 'Lost:',
|
||||
'R': 'Refindable:'}[self.findability]
|
||||
if self.tag_station:
|
||||
try:
|
||||
s = SurvexStation.objects.lookup(self.tag_station)
|
||||
return r + "%0.0fE %0.0fN %0.0fAlt" % (s.x, s.y, s.z)
|
||||
except:
|
||||
return r + "%s Tag Station not in dataset" % self.tag_station
|
||||
if self.exact_station:
|
||||
try:
|
||||
s = SurvexStation.objects.lookup(self.exact_station)
|
||||
return r + "%0.0fE %0.0fN %0.0fAlt" % (s.x, s.y, s.z)
|
||||
except:
|
||||
return r + "%s Exact Station not in dataset" % self.tag_station
|
||||
if self.other_station:
|
||||
try:
|
||||
s = SurvexStation.objects.lookup(self.other_station)
|
||||
return r + "%0.0fE %0.0fN %0.0fAlt %s" % (s.x, s.y, s.z, self.other_description)
|
||||
except:
|
||||
return r + "%s Other Station not in dataset" % self.tag_station
|
||||
if self.FINDABLE_CHOICES == "S":
|
||||
r += "ERROR, Entrance has been surveyed but has no survex point"
|
||||
if self.bearings:
|
||||
return r + self.bearings
|
||||
return r
|
||||
|
||||
def best_station(self):
|
||||
if self.tag_station:
|
||||
return self.tag_station
|
||||
if self.exact_station:
|
||||
return self.exact_station
|
||||
if self.other_station:
|
||||
return self.other_station
|
||||
|
||||
def has_photo(self):
|
||||
if self.photo:
|
||||
if (self.photo.find("<img") > -1 or self.photo.find("<a") > -1 or self.photo.find("<IMG") > -1 or self.photo.find("<A") > -1):
|
||||
return "Yes"
|
||||
else:
|
||||
return "Missing"
|
||||
else:
|
||||
return "No"
|
||||
|
||||
def marking_val(self):
|
||||
for m in self.MARKING_CHOICES:
|
||||
if m[0] == self.marking:
|
||||
return m[1]
|
||||
def findability_val(self):
|
||||
for f in self.FINDABLE_CHOICES:
|
||||
if f[0] == self.findability:
|
||||
return f[1]
|
||||
|
||||
def tag(self):
|
||||
return SurvexStation.objects.lookup(self.tag_station)
|
||||
|
||||
def needs_surface_work(self):
|
||||
return self.findability != "S" or not self.has_photo or self.marking != "T"
|
||||
|
||||
def get_absolute_url(self):
|
||||
|
||||
ancestor_titles='/'.join([subcave.title for subcave in self.get_ancestors()])
|
||||
if ancestor_titles:
|
||||
res = '/'.join((self.get_root().cave.get_absolute_url(), ancestor_titles, self.title))
|
||||
|
||||
else:
|
||||
res = '/'.join((self.get_root().cave.get_absolute_url(), self.title))
|
||||
|
||||
return res
|
||||
|
||||
def slug(self):
|
||||
if not self.cached_primary_slug:
|
||||
primarySlugs = self.entranceslug_set.filter(primary = True)
|
||||
if primarySlugs:
|
||||
self.cached_primary_slug = primarySlugs[0].slug
|
||||
self.save()
|
||||
else:
|
||||
slugs = self.entranceslug_set.filter()
|
||||
if slugs:
|
||||
self.cached_primary_slug = slugs[0].slug
|
||||
self.save()
|
||||
return self.cached_primary_slug
|
||||
|
||||
def writeDataFile(self):
|
||||
try:
|
||||
f = open(os.path.join(settings.ENTRANCEDESCRIPTIONS, self.filename), "w")
|
||||
except:
|
||||
subprocess.call(settings.FIX_PERMISSIONS)
|
||||
f = open(os.path.join(settings.ENTRANCEDESCRIPTIONS, self.filename), "w")
|
||||
t = loader.get_template('dataformat/entrance.xml')
|
||||
c = Context({'entrance': self})
|
||||
u = t.render(c)
|
||||
u8 = u.encode("utf-8")
|
||||
f.write(u8)
|
||||
f.close()
|
||||
|
||||
class CaveDescription(TroggleModel):
|
||||
short_name = models.CharField(max_length=50, unique = True)
|
||||
long_name = models.CharField(max_length=200, blank=True, null=True)
|
||||
description = models.TextField(blank=True,null=True)
|
||||
linked_subcaves = models.ManyToManyField("NewSubCave", blank=True,null=True)
|
||||
linked_entrances = models.ManyToManyField("Entrance", blank=True,null=True)
|
||||
linked_qms = models.ManyToManyField("QM", blank=True,null=True)
|
||||
|
||||
def __unicode__(self):
|
||||
if self.long_name:
|
||||
return unicode(self.long_name)
|
||||
else:
|
||||
return unicode(self.short_name)
|
||||
|
||||
def get_absolute_url(self):
|
||||
return urlparse.urljoin(settings.URL_ROOT, reverse('cavedescription', args=(self.short_name,)))
|
||||
|
||||
def save(self):
|
||||
"""
|
||||
Overridden save method which stores wikilinks in text as links in database.
|
||||
"""
|
||||
super(CaveDescription, self).save()
|
||||
qm_list=get_related_by_wikilinks(self.description)
|
||||
for qm in qm_list:
|
||||
self.linked_qms.add(qm)
|
||||
super(CaveDescription, self).save()
|
||||
|
||||
class NewSubCave(TroggleModel):
|
||||
name = models.CharField(max_length=200, unique = True)
|
||||
def __unicode__(self):
|
||||
return unicode(self.name)
|
||||
|
||||
class QM(TroggleModel):
|
||||
#based on qm.csv in trunk/expoweb/1623/204 which has the fields:
|
||||
#"Number","Grade","Area","Description","Page reference","Nearest station","Completion description","Comment"
|
||||
found_by = models.ForeignKey(LogbookEntry, related_name='QMs_found',blank=True, null=True )
|
||||
ticked_off_by = models.ForeignKey(LogbookEntry, related_name='QMs_ticked_off',null=True,blank=True)
|
||||
#cave = models.ForeignKey(Cave)
|
||||
#expedition = models.ForeignKey(Expedition)
|
||||
|
||||
number = models.IntegerField(help_text="this is the sequential number in the year", )
|
||||
GRADE_CHOICES=(
|
||||
('A', 'A: Large obvious lead'),
|
||||
('B', 'B: Average lead'),
|
||||
('C', 'C: Tight unpromising lead'),
|
||||
('D', 'D: Dig'),
|
||||
('X', 'X: Unclimbable aven')
|
||||
)
|
||||
grade = models.CharField(max_length=1, choices=GRADE_CHOICES)
|
||||
location_description = models.TextField(blank=True)
|
||||
#should be a foreignkey to surveystation
|
||||
nearest_station_description = models.CharField(max_length=400,null=True,blank=True)
|
||||
nearest_station = models.CharField(max_length=200,blank=True,null=True)
|
||||
area = models.CharField(max_length=100,blank=True,null=True)
|
||||
completion_description = models.TextField(blank=True,null=True)
|
||||
comment=models.TextField(blank=True,null=True)
|
||||
|
||||
def __unicode__(self):
|
||||
return u"%s %s" % (self.code(), self.grade)
|
||||
|
||||
def code(self):
|
||||
return u"%s-%s-%s" % (unicode(self.found_by.cave)[6:], self.found_by.date.year, self.number)
|
||||
|
||||
def get_absolute_url(self):
|
||||
#return settings.URL_ROOT + '/cave/' + self.found_by.cave.kataster_number + '/' + str(self.found_by.date.year) + '-' + '%02d' %self.number
|
||||
return urlparse.urljoin(settings.URL_ROOT, reverse('qm',kwargs={'cave_id':self.found_by.cave.kataster_number,'year':self.found_by.date.year,'qm_id':self.number,'grade':self.grade}))
|
||||
|
||||
def get_next_by_id(self):
|
||||
return QM.objects.get(id=self.id+1)
|
||||
|
||||
def get_previous_by_id(self):
|
||||
return QM.objects.get(id=self.id-1)
|
||||
|
||||
def wiki_link(self):
|
||||
return u"%s%s%s" % ('[[QM:',self.code(),']]')
|
||||
|
||||
photoFileStorage = FileSystemStorage(location=settings.PHOTOS_ROOT, base_url=settings.PHOTOS_URL)
|
||||
class DPhoto(TroggleImageModel):
|
||||
caption = models.CharField(max_length=1000,blank=True,null=True)
|
||||
contains_logbookentry = models.ForeignKey(LogbookEntry,blank=True,null=True)
|
||||
contains_person = models.ManyToManyField(Person,blank=True,null=True)
|
||||
file = models.ImageField(storage=photoFileStorage, upload_to='.',)
|
||||
is_mugshot = models.BooleanField(default=False)
|
||||
contains_cave = models.ForeignKey(Cave,blank=True,null=True)
|
||||
contains_entrance = models.ForeignKey(Entrance, related_name="photo_file",blank=True,null=True)
|
||||
#nearest_survey_point = models.ForeignKey(SurveyStation,blank=True,null=True)
|
||||
nearest_QM = models.ForeignKey(QM,blank=True,null=True)
|
||||
lon_utm = models.FloatField(blank=True,null=True)
|
||||
lat_utm = models.FloatField(blank=True,null=True)
|
||||
|
||||
class IKOptions:
|
||||
spec_module = 'core.imagekit_specs'
|
||||
cache_dir = 'thumbs'
|
||||
image_field = 'file'
|
||||
|
||||
#content_type = models.ForeignKey(ContentType)
|
||||
#object_id = models.PositiveIntegerField()
|
||||
#location = generic.GenericForeignKey('content_type', 'object_id')
|
||||
|
||||
def __unicode__(self):
|
||||
return self.caption
|
||||
|
||||
scansFileStorage = FileSystemStorage(location=settings.SURVEY_SCANS, base_url=settings.SURVEYS_URL)
|
||||
def get_scan_path(instance, filename):
|
||||
year=instance.survey.expedition.year
|
||||
#print("WN: ", type(instance.survey.wallet_number), instance.survey.wallet_number, instance.survey.wallet_letter)
|
||||
number=str(instance.survey.wallet_number)
|
||||
if str(instance.survey.wallet_letter) != "None":
|
||||
number=str(instance.survey.wallet_letter) + number #two strings formatting because convention is 2009#01 or 2009#X01
|
||||
return os.path.join('./',year,year+r'#'+number,str(instance.contents)+str(instance.number_in_wallet)+r'.jpg')
|
||||
|
||||
class ScannedImage(TroggleImageModel):
|
||||
file = models.ImageField(storage=scansFileStorage, upload_to=get_scan_path)
|
||||
scanned_by = models.ForeignKey(Person,blank=True, null=True)
|
||||
scanned_on = models.DateField(null=True)
|
||||
survey = models.ForeignKey('Survey')
|
||||
contents = models.CharField(max_length=20,choices=(('notes','notes'),('plan','plan_sketch'),('elevation','elevation_sketch')))
|
||||
number_in_wallet = models.IntegerField(null=True)
|
||||
lon_utm = models.FloatField(blank=True,null=True)
|
||||
lat_utm = models.FloatField(blank=True,null=True)
|
||||
|
||||
class IKOptions:
|
||||
spec_module = 'core.imagekit_specs'
|
||||
cache_dir = 'thumbs'
|
||||
image_field = 'file'
|
||||
#content_type = models.ForeignKey(ContentType)
|
||||
#object_id = models.PositiveIntegerField()
|
||||
#location = generic.GenericForeignKey('content_type', 'object_id')
|
||||
|
||||
#This is an ugly hack to deal with the #s in our survey scan paths. The correct thing is to write a custom file storage backend which calls urlencode on the name for making file.url but not file.path.
|
||||
def correctURL(self):
|
||||
return string.replace(self.file.url,r'#',r'%23')
|
||||
|
||||
def __unicode__(self):
|
||||
return get_scan_path(self,'')
|
||||
|
||||
class Survey(TroggleModel):
|
||||
expedition = models.ForeignKey('Expedition') #REDUNDANT (logbook_entry)
|
||||
wallet_number = models.IntegerField(blank=True,null=True)
|
||||
wallet_letter = models.CharField(max_length=1,blank=True,null=True)
|
||||
comments = models.TextField(blank=True,null=True)
|
||||
location = models.CharField(max_length=400,blank=True,null=True) #REDUNDANT
|
||||
subcave = models.ForeignKey('NewSubCave', blank=True, null=True)
|
||||
#notes_scan = models.ForeignKey('ScannedImage',related_name='notes_scan',blank=True, null=True) #Replaced by contents field of ScannedImage model
|
||||
survex_block = models.OneToOneField('SurvexBlock',blank=True, null=True)
|
||||
logbook_entry = models.ForeignKey('LogbookEntry')
|
||||
centreline_printed_on = models.DateField(blank=True, null=True)
|
||||
centreline_printed_by = models.ForeignKey('Person',related_name='centreline_printed_by',blank=True,null=True)
|
||||
#sketch_scan = models.ForeignKey(ScannedImage,blank=True, null=True) #Replaced by contents field of ScannedImage model
|
||||
tunnel_file = models.FileField(upload_to='surveyXMLfiles',blank=True, null=True)
|
||||
tunnel_main_sketch = models.ForeignKey('Survey',blank=True,null=True)
|
||||
integrated_into_main_sketch_on = models.DateField(blank=True,null=True)
|
||||
integrated_into_main_sketch_by = models.ForeignKey('Person' ,related_name='integrated_into_main_sketch_by', blank=True,null=True)
|
||||
rendered_image = models.ImageField(upload_to='renderedSurveys',blank=True,null=True)
|
||||
def __unicode__(self):
|
||||
return self.expedition.year+"#"+"%02d" % int(self.wallet_number)
|
||||
|
||||
def notes(self):
|
||||
return self.scannedimage_set.filter(contents='notes')
|
||||
|
||||
def plans(self):
|
||||
return self.scannedimage_set.filter(contents='plan')
|
||||
|
||||
def elevations(self):
|
||||
return self.scannedimage_set.filter(contents='elevation')
|
||||
83
core/models_millenial.py
Normal file
83
core/models_millenial.py
Normal file
@@ -0,0 +1,83 @@
|
||||
from django.db import models
|
||||
from django.conf import settings
|
||||
|
||||
import troggle.core.methods_millenial as methods_millenial
|
||||
|
||||
#
|
||||
# This file was created in 2019
|
||||
# It's a result of massive frustration with cluttered database of troggle
|
||||
# Maximal clarity of code was primary goal (previous code had very little comments)
|
||||
# Maximal speed of database rebuild was secondary goal
|
||||
#
|
||||
|
||||
#
|
||||
# The following file will tell you what fields and methods are avaliable inside this database
|
||||
# be carefull you might miss some! ManyToMany fields can be used from the far end as well
|
||||
#
|
||||
|
||||
|
||||
#
|
||||
# Naming conventions:
|
||||
# (Upper/lower convention)
|
||||
# Class names are writen Udddd_ddd_dddM - they finish with M for backwards compatibility
|
||||
# Fields/methods are written lower_lower_lower
|
||||
#
|
||||
|
||||
class PersonM(models.Model): #instance of this class corresponds to one physical peson
|
||||
name = models.CharField(max_length=100) #just name, talk to wookey if you diagree
|
||||
surveys_made = models.ManyToManyField('SurveyM', related_name='people_surveyed') #links to survey objects that this person made (made=:survex says so)
|
||||
expos_attended = models.ManyToManyField('ExpeditionM', related_name='people_attended') #expos attended by this person (attended=:folk.csv says so)
|
||||
logbook_entries_written = models.ManyToManyField('Logbook_entryM', related_name='people_wrote') #links to logbook chuncks created by a person
|
||||
|
||||
class CaveM(models.Model): #instance of this class corresponds to one 'thing' that people call cave
|
||||
entrance = models.CharField(max_length=100) #UTM string describing ONE(!) entrance. Purpose = findability
|
||||
title = models.TextField() #title given to the topmost survey in survex, numeric name otherwise c.f. name (e.g. 'Fishface')
|
||||
name = models.TextField() #name given to the topmost survey in survex (e.g. '2017-cucc-28')
|
||||
surveys = models.ManyToManyField('SurveyM', related_name='cave_parent') #links to surveys objects that this cave contains
|
||||
survex_file = models.TextField() #gives path to top level survex file
|
||||
total_length = models.FloatField() #holds total length of this cave (as given by cavern)
|
||||
total_depth = models.FloatField() #holds total depth of this cave (as given by cavern)
|
||||
description = models.TextField() #holds link to description
|
||||
date = models.TextField() #holds date of last visit
|
||||
def top_camp_distance(self): #returns distance of this cave from topcamp
|
||||
return methods_millenial.top_camp_distance(self.entrance)
|
||||
def top_camp_bearing(self): #returns bearing to this cave from topcamp in format 235.5 (float north-based azimuth)
|
||||
return methods_millenial.top_camp_bearing(self.entrance)
|
||||
def top_camp_bearing_letter(self): #returns bearing to this cave from topcamp in format e.g. 'NE'
|
||||
return methods_millenial.top_camp_bearing_letter(self.entrance)
|
||||
def lat_lon_entrance(self): #lat_lon entrance location
|
||||
return methods_millenial.lat_lon_entrance(self.entrance)
|
||||
|
||||
|
||||
class Cave_descriptionM(models.Model): #instance of this class corresponds to each of the .html files in descriptions
|
||||
#each of those holds one XML field
|
||||
slug = models.TextField()
|
||||
explorers = models.TextField()
|
||||
underground_description = models.TextField()
|
||||
equipment = models.TextField()
|
||||
references = models.TextField()
|
||||
survey = models.TextField()
|
||||
kataster_status = models.TextField()
|
||||
underground_centre_line = models.TextField()
|
||||
survex_file = models.TextField() #as given in .html file
|
||||
notes = models.TextField()
|
||||
|
||||
|
||||
|
||||
class ExpeditionM(models.Model): #instance of this class corresponds to one expo (usually one year)
|
||||
date = models.CharField(max_length=100) #date in format YYYY.MM.DD-YYYY.MM.DD
|
||||
|
||||
|
||||
class SurveyM(models.Model): #instance of this class corresponds to one .svx file - one trip
|
||||
date = models.CharField(max_length=100) #date of the trip in format YYYY.MM.DD (dated:=date given by .svx file)
|
||||
survex_file = models.TextField()
|
||||
|
||||
class Logbook_entryM(models.Model): #instance of this class corresponds to one bit of logbook (c.f. expo.survex.com/years/2015/logbook.html or simil)
|
||||
date = models.CharField(max_length=100) #date as typed into logbook
|
||||
contents = models.TextField() #contents of the logbook chunk
|
||||
|
||||
class Parser_messageM(models.Model): #instance of this class contains one error or warining message produce by any of the parsers
|
||||
parsername = models.CharField(max_length = 20) #name of parser
|
||||
content = models.TextField() #content of message
|
||||
message_type = models.CharField(max_length = 10) # [Error,Info] or similar
|
||||
|
||||
862
core/models_old.py
Normal file
862
core/models_old.py
Normal file
@@ -0,0 +1,862 @@
|
||||
import urllib, urlparse, string, os, datetime, logging, re
|
||||
import subprocess
|
||||
from django.forms import ModelForm
|
||||
from django.db import models
|
||||
from django.contrib import admin
|
||||
from django.core.files.storage import FileSystemStorage
|
||||
from django.contrib.auth.models import User
|
||||
from django.contrib.contenttypes.models import ContentType
|
||||
from django.db.models import Min, Max
|
||||
from django.conf import settings
|
||||
from decimal import Decimal, getcontext
|
||||
from django.core.urlresolvers import reverse
|
||||
from imagekit.models import ImageModel
|
||||
from django.template import Context, loader
|
||||
import settings
|
||||
getcontext().prec=2 #use 2 significant figures for decimal calculations
|
||||
|
||||
|
||||
|
||||
def get_related_by_wikilinks(wiki_text):
|
||||
found=re.findall(settings.QM_PATTERN,wiki_text)
|
||||
res=[]
|
||||
for wikilink in found:
|
||||
qmdict={'urlroot':settings.URL_ROOT,'cave':wikilink[2],'year':wikilink[1],'number':wikilink[3]}
|
||||
try:
|
||||
cave_slugs = CaveSlug.objects.filter(cave__kataster_number = qmdict['cave'])
|
||||
qm=QM.objects.get(found_by__cave_slug__in = cave_slugs,
|
||||
found_by__date__year = qmdict['year'],
|
||||
number = qmdict['number'])
|
||||
res.append(qm)
|
||||
except QM.DoesNotExist:
|
||||
print('fail on '+str(wikilink))
|
||||
|
||||
return res
|
||||
|
||||
try:
|
||||
logging.basicConfig(level=logging.DEBUG,
|
||||
filename=settings.LOGFILE,
|
||||
filemode='w')
|
||||
except:
|
||||
subprocess.call(settings.FIX_PERMISSIONS)
|
||||
logging.basicConfig(level=logging.DEBUG,
|
||||
filename=settings.LOGFILE,
|
||||
filemode='w')
|
||||
|
||||
#This class is for adding fields and methods which all of our models will have.
|
||||
class TroggleModel(models.Model):
|
||||
new_since_parsing = models.BooleanField(default=False, editable=False)
|
||||
non_public = models.BooleanField(default=False)
|
||||
def object_name(self):
|
||||
return self._meta.object_name
|
||||
|
||||
def get_admin_url(self):
|
||||
return urlparse.urljoin(settings.URL_ROOT, "/admin/core/" + self.object_name().lower() + "/" + str(self.pk))
|
||||
|
||||
class Meta:
|
||||
abstract = True
|
||||
|
||||
class TroggleImageModel(ImageModel):
|
||||
new_since_parsing = models.BooleanField(default=False, editable=False)
|
||||
|
||||
def object_name(self):
|
||||
return self._meta.object_name
|
||||
|
||||
def get_admin_url(self):
|
||||
return urlparse.urljoin(settings.URL_ROOT, "/admin/core/" + self.object_name().lower() + "/" + str(self.pk))
|
||||
|
||||
|
||||
class Meta:
|
||||
abstract = True
|
||||
|
||||
#
|
||||
# single Expedition, usually seen by year
|
||||
#
|
||||
class Expedition(TroggleModel):
|
||||
year = models.CharField(max_length=20, unique=True)
|
||||
name = models.CharField(max_length=100)
|
||||
|
||||
def __unicode__(self):
|
||||
return self.year
|
||||
|
||||
class Meta:
|
||||
ordering = ('-year',)
|
||||
get_latest_by = 'year'
|
||||
|
||||
def get_absolute_url(self):
|
||||
return urlparse.urljoin(settings.URL_ROOT, reverse('expedition', args=[self.year]))
|
||||
|
||||
# construction function. should be moved out
|
||||
def get_expedition_day(self, date):
|
||||
expeditiondays = self.expeditionday_set.filter(date=date)
|
||||
if expeditiondays:
|
||||
assert len(expeditiondays) == 1
|
||||
return expeditiondays[0]
|
||||
res = ExpeditionDay(expedition=self, date=date)
|
||||
res.save()
|
||||
return res
|
||||
|
||||
def day_min(self):
|
||||
res = self.expeditionday_set.all()
|
||||
return res and res[0] or None
|
||||
|
||||
def day_max(self):
|
||||
res = self.expeditionday_set.all()
|
||||
return res and res[len(res) - 1] or None
|
||||
|
||||
|
||||
|
||||
class ExpeditionDay(TroggleModel):
|
||||
expedition = models.ForeignKey("Expedition")
|
||||
date = models.DateField()
|
||||
|
||||
class Meta:
|
||||
ordering = ('date',)
|
||||
|
||||
def GetPersonTrip(self, personexpedition):
|
||||
personexpeditions = self.persontrip_set.filter(expeditionday=self)
|
||||
return personexpeditions and personexpeditions[0] or None
|
||||
|
||||
|
||||
#
|
||||
# single Person, can go on many years
|
||||
#
|
||||
class Person(TroggleModel):
|
||||
first_name = models.CharField(max_length=100)
|
||||
last_name = models.CharField(max_length=100)
|
||||
is_vfho = models.BooleanField(help_text="VFHO is the Vereines für Höhlenkunde in Obersteier, a nearby Austrian caving club.", default=False)
|
||||
mug_shot = models.CharField(max_length=100, blank=True,null=True)
|
||||
blurb = models.TextField(blank=True,null=True)
|
||||
|
||||
#href = models.CharField(max_length=200)
|
||||
orderref = models.CharField(max_length=200) # for alphabetic
|
||||
|
||||
#the below have been removed and made methods. I'm not sure what the b in bisnotable stands for. - AC 16 Feb
|
||||
#notability = models.FloatField() # for listing the top 20 people
|
||||
#bisnotable = models.BooleanField(default=False)
|
||||
user = models.OneToOneField(User, null=True, blank=True)
|
||||
def get_absolute_url(self):
|
||||
return urlparse.urljoin(settings.URL_ROOT,reverse('person',kwargs={'first_name':self.first_name,'last_name':self.last_name}))
|
||||
|
||||
class Meta:
|
||||
verbose_name_plural = "People"
|
||||
ordering = ('orderref',) # "Wookey" makes too complex for: ('last_name', 'first_name')
|
||||
|
||||
def __unicode__(self):
|
||||
if self.last_name:
|
||||
return "%s %s" % (self.first_name, self.last_name)
|
||||
return self.first_name
|
||||
|
||||
|
||||
def notability(self):
|
||||
notability = Decimal(0)
|
||||
for personexpedition in self.personexpedition_set.all():
|
||||
if not personexpedition.is_guest:
|
||||
notability += Decimal(1) / (2012 - int(personexpedition.expedition.year))
|
||||
return notability
|
||||
|
||||
def bisnotable(self):
|
||||
return self.notability() > Decimal(1)/Decimal(3)
|
||||
|
||||
def surveyedleglength(self):
|
||||
return sum([personexpedition.surveyedleglength() for personexpedition in self.personexpedition_set.all()])
|
||||
|
||||
def first(self):
|
||||
return self.personexpedition_set.order_by('-expedition')[0]
|
||||
def last(self):
|
||||
return self.personexpedition_set.order_by('expedition')[0]
|
||||
|
||||
#def Sethref(self):
|
||||
#if self.last_name:
|
||||
#self.href = self.first_name.lower() + "_" + self.last_name.lower()
|
||||
#self.orderref = self.last_name + " " + self.first_name
|
||||
#else:
|
||||
# self.href = self.first_name.lower()
|
||||
#self.orderref = self.first_name
|
||||
#self.notability = 0.0 # set temporarily
|
||||
|
||||
|
||||
#
|
||||
# Person's attenance to one Expo
|
||||
#
|
||||
class PersonExpedition(TroggleModel):
|
||||
expedition = models.ForeignKey(Expedition)
|
||||
person = models.ForeignKey(Person)
|
||||
slugfield = models.SlugField(max_length=50,blank=True,null=True)
|
||||
|
||||
is_guest = models.BooleanField(default=False)
|
||||
COMMITTEE_CHOICES = (
|
||||
('leader','Expo leader'),
|
||||
('medical','Expo medical officer'),
|
||||
('treasurer','Expo treasurer'),
|
||||
('sponsorship','Expo sponsorship coordinator'),
|
||||
('research','Expo research coordinator'),
|
||||
)
|
||||
expo_committee_position = models.CharField(blank=True,null=True,choices=COMMITTEE_CHOICES,max_length=200)
|
||||
nickname = models.CharField(max_length=100,blank=True,null=True)
|
||||
|
||||
def GetPersonroles(self):
|
||||
res = [ ]
|
||||
for personrole in self.personrole_set.order_by('survexblock'):
|
||||
if res and res[-1]['survexpath'] == personrole.survexblock.survexpath:
|
||||
res[-1]['roles'] += ", " + str(personrole.role)
|
||||
else:
|
||||
res.append({'date':personrole.survexblock.date, 'survexpath':personrole.survexblock.survexpath, 'roles':str(personrole.role)})
|
||||
return res
|
||||
|
||||
class Meta:
|
||||
ordering = ('-expedition',)
|
||||
#order_with_respect_to = 'expedition'
|
||||
|
||||
def __unicode__(self):
|
||||
return "%s: (%s)" % (self.person, self.expedition)
|
||||
|
||||
|
||||
#why is the below a function in personexpedition, rather than in person? - AC 14 Feb 09
|
||||
def name(self):
|
||||
if self.nickname:
|
||||
return "%s (%s) %s" % (self.person.first_name, self.nickname, self.person.last_name)
|
||||
if self.person.last_name:
|
||||
return "%s %s" % (self.person.first_name, self.person.last_name)
|
||||
return self.person.first_name
|
||||
|
||||
def get_absolute_url(self):
|
||||
return urlparse.urljoin(settings.URL_ROOT, reverse('personexpedition',kwargs={'first_name':self.person.first_name,'last_name':self.person.last_name,'year':self.expedition.year}))
|
||||
|
||||
def surveyedleglength(self):
|
||||
survexblocks = [personrole.survexblock for personrole in self.personrole_set.all() ]
|
||||
return sum([survexblock.totalleglength for survexblock in set(survexblocks)])
|
||||
|
||||
# would prefer to return actual person trips so we could link to first and last ones
|
||||
def day_min(self):
|
||||
res = self.persontrip_set.aggregate(day_min=Min("expeditionday__date"))
|
||||
return res["day_min"]
|
||||
|
||||
def day_max(self):
|
||||
res = self.persontrip_set.all().aggregate(day_max=Max("expeditionday__date"))
|
||||
return res["day_max"]
|
||||
|
||||
#
|
||||
# Single parsed entry from Logbook
|
||||
#
|
||||
class LogbookEntry(TroggleModel):
|
||||
date = models.DateField()#MJG wants to turn this into a datetime such that multiple Logbook entries on the same day can be ordered.
|
||||
expeditionday = models.ForeignKey("ExpeditionDay", null=True)#MJG wants to KILL THIS (redundant information)
|
||||
expedition = models.ForeignKey(Expedition,blank=True,null=True) # yes this is double-
|
||||
#author = models.ForeignKey(PersonExpedition,blank=True,null=True) # the person who writes it up doesn't have to have been on the trip.
|
||||
# Re: the above- so this field should be "typist" or something, not "author". - AC 15 jun 09
|
||||
#MJG wants to KILL THIS, as it is typically redundant with PersonTrip.is_logbook_entry_author, in the rare it was not redundanty and of actually interest it could be added to the text.
|
||||
title = models.CharField(max_length=settings.MAX_LOGBOOK_ENTRY_TITLE_LENGTH)
|
||||
cave_slug = models.SlugField(max_length=50)
|
||||
place = models.CharField(max_length=100,blank=True,null=True,help_text="Only use this if you haven't chosen a cave")
|
||||
text = models.TextField()
|
||||
slug = models.SlugField(max_length=50)
|
||||
filename = models.CharField(max_length=200,null=True)
|
||||
|
||||
class Meta:
|
||||
verbose_name_plural = "Logbook Entries"
|
||||
# several PersonTrips point in to this object
|
||||
ordering = ('-date',)
|
||||
|
||||
def __getattribute__(self, item):
|
||||
if item == "cave": #Allow a logbookentries cave to be directly accessed despite not having a proper foreignkey
|
||||
return CaveSlug.objects.get(slug = self.cave_slug).cave
|
||||
return super(LogbookEntry, self).__getattribute__(item)
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
if "cave" in kwargs.keys():
|
||||
if kwargs["cave"] is not None:
|
||||
kwargs["cave_slug"] = CaveSlug.objects.get(cave=kwargs["cave"], primary=True).slug
|
||||
kwargs.pop("cave")
|
||||
return super(LogbookEntry, self).__init__(*args, **kwargs)
|
||||
|
||||
def isLogbookEntry(self): # Function used in templates
|
||||
return True
|
||||
|
||||
def get_absolute_url(self):
|
||||
return urlparse.urljoin(settings.URL_ROOT, reverse('logbookentry',kwargs={'date':self.date,'slug':self.slug}))
|
||||
|
||||
def __unicode__(self):
|
||||
return "%s: (%s)" % (self.date, self.title)
|
||||
|
||||
def get_next_by_id(self):
|
||||
LogbookEntry.objects.get(id=self.id+1)
|
||||
|
||||
def get_previous_by_id(self):
|
||||
LogbookEntry.objects.get(id=self.id-1)
|
||||
|
||||
def new_QM_number(self):
|
||||
"""Returns """
|
||||
if self.cave:
|
||||
nextQMnumber=self.cave.new_QM_number(self.date.year)
|
||||
else:
|
||||
return none
|
||||
return nextQMnumber
|
||||
|
||||
def new_QM_found_link(self):
|
||||
"""Produces a link to a new QM with the next number filled in and this LogbookEntry set as 'found by' """
|
||||
return settings.URL_ROOT + r'/admin/core/qm/add/?' + r'found_by=' + str(self.pk) +'&number=' + str(self.new_QM_number())
|
||||
|
||||
def DayIndex(self):
|
||||
return list(self.expeditionday.logbookentry_set.all()).index(self)
|
||||
|
||||
#
|
||||
# Single Person going on a trip, which may or may not be written up (accounts for different T/U for people in same logbook entry)
|
||||
#
|
||||
class PersonTrip(TroggleModel):
|
||||
personexpedition = models.ForeignKey("PersonExpedition",null=True)
|
||||
|
||||
#expeditionday = models.ForeignKey("ExpeditionDay")#MJG wants to KILL THIS (redundant information)
|
||||
#date = models.DateField() #MJG wants to KILL THIS (redundant information)
|
||||
time_underground = models.FloatField(help_text="In decimal hours")
|
||||
logbook_entry = models.ForeignKey(LogbookEntry)
|
||||
is_logbook_entry_author = models.BooleanField(default=False)
|
||||
|
||||
|
||||
# sequencing by person (difficult to solve locally)
|
||||
#persontrip_next = models.ForeignKey('PersonTrip', related_name='pnext', blank=True,null=True)#MJG wants to KILL THIS (and use funstion persontrip_next_auto)
|
||||
#persontrip_prev = models.ForeignKey('PersonTrip', related_name='pprev', blank=True,null=True)#MJG wants to KILL THIS(and use funstion persontrip_prev_auto)
|
||||
|
||||
def persontrip_next(self):
|
||||
futurePTs = PersonTrip.objects.filter(personexpedition = self.personexpedition, logbook_entry__date__gt = self.logbook_entry.date).order_by('logbook_entry__date').all()
|
||||
if len(futurePTs) > 0:
|
||||
return futurePTs[0]
|
||||
else:
|
||||
return None
|
||||
|
||||
def persontrip_prev(self):
|
||||
pastPTs = PersonTrip.objects.filter(personexpedition = self.personexpedition, logbook_entry__date__lt = self.logbook_entry.date).order_by('-logbook_entry__date').all()
|
||||
if len(pastPTs) > 0:
|
||||
return pastPTs[0]
|
||||
else:
|
||||
return None
|
||||
|
||||
def place(self):
|
||||
return self.logbook_entry.cave and self.logbook_entry.cave or self.logbook_entry.place
|
||||
|
||||
def __unicode__(self):
|
||||
return "%s (%s)" % (self.personexpedition, self.logbook_entry.date)
|
||||
|
||||
|
||||
|
||||
##########################################
|
||||
# move following classes into models_cave
|
||||
##########################################
|
||||
|
||||
class Area(TroggleModel):
|
||||
short_name = models.CharField(max_length=100)
|
||||
name = models.CharField(max_length=200, blank=True, null=True)
|
||||
description = models.TextField(blank=True,null=True)
|
||||
parent = models.ForeignKey('Area', blank=True, null=True)
|
||||
def __unicode__(self):
|
||||
if self.parent:
|
||||
return unicode(self.parent) + u" - " + unicode(self.short_name)
|
||||
else:
|
||||
return unicode(self.short_name)
|
||||
def kat_area(self):
|
||||
if self.short_name in ["1623", "1626"]:
|
||||
return self.short_name
|
||||
elif self.parent:
|
||||
return self.parent.kat_area()
|
||||
|
||||
class CaveAndEntrance(models.Model):
|
||||
cave = models.ForeignKey('Cave')
|
||||
entrance = models.ForeignKey('Entrance')
|
||||
entrance_letter = models.CharField(max_length=20,blank=True,null=True)
|
||||
def __unicode__(self):
|
||||
return unicode(self.cave) + unicode(self.entrance_letter)
|
||||
|
||||
class CaveSlug(models.Model):
|
||||
cave = models.ForeignKey('Cave')
|
||||
slug = models.SlugField(max_length=50, unique = True)
|
||||
primary = models.BooleanField(default=False)
|
||||
|
||||
|
||||
class Cave(TroggleModel):
|
||||
# too much here perhaps,
|
||||
official_name = models.CharField(max_length=160)
|
||||
area = models.ManyToManyField(Area, blank=True, null=True)
|
||||
kataster_code = models.CharField(max_length=20,blank=True,null=True)
|
||||
kataster_number = models.CharField(max_length=10,blank=True, null=True)
|
||||
unofficial_number = models.CharField(max_length=60,blank=True, null=True)
|
||||
entrances = models.ManyToManyField('Entrance', through='CaveAndEntrance')
|
||||
explorers = models.TextField(blank=True,null=True)
|
||||
underground_description = models.TextField(blank=True,null=True)
|
||||
equipment = models.TextField(blank=True,null=True)
|
||||
references = models.TextField(blank=True,null=True)
|
||||
survey = models.TextField(blank=True,null=True)
|
||||
kataster_status = models.TextField(blank=True,null=True)
|
||||
underground_centre_line = models.TextField(blank=True,null=True)
|
||||
notes = models.TextField(blank=True,null=True)
|
||||
length = models.CharField(max_length=100,blank=True,null=True)
|
||||
depth = models.CharField(max_length=100,blank=True,null=True)
|
||||
extent = models.CharField(max_length=100,blank=True,null=True)
|
||||
survex_file = models.CharField(max_length=100,blank=True,null=True)
|
||||
description_file = models.CharField(max_length=200,blank=True,null=True)
|
||||
url = models.CharField(max_length=200,blank=True,null=True)
|
||||
filename = models.CharField(max_length=200)
|
||||
|
||||
|
||||
#class Meta:
|
||||
# unique_together = (("area", "kataster_number"), ("area", "unofficial_number"))
|
||||
# FIXME Kataster Areas and CUCC defined sub areas need seperating
|
||||
|
||||
|
||||
#href = models.CharField(max_length=100)
|
||||
|
||||
class Meta:
|
||||
ordering = ('kataster_code', 'unofficial_number')
|
||||
|
||||
def hassurvey(self):
|
||||
if not self.underground_centre_line:
|
||||
return "No"
|
||||
if (self.survey.find("<img") > -1 or self.survey.find("<a") > -1 or self.survey.find("<IMG") > -1 or self.survey.find("<A") > -1):
|
||||
return "Yes"
|
||||
return "Missing"
|
||||
|
||||
def hassurveydata(self):
|
||||
if not self.underground_centre_line:
|
||||
return "No"
|
||||
if self.survex_file:
|
||||
return "Yes"
|
||||
return "Missing"
|
||||
|
||||
def slug(self):
|
||||
primarySlugs = self.caveslug_set.filter(primary = True)
|
||||
if primarySlugs:
|
||||
return primarySlugs[0].slug
|
||||
else:
|
||||
slugs = self.caveslug_set.filter()
|
||||
if slugs:
|
||||
return slugs[0].slug
|
||||
|
||||
def ours(self):
|
||||
return bool(re.search(r'CUCC', self.explorers))
|
||||
|
||||
def reference(self):
|
||||
if self.kataster_number:
|
||||
return "%s-%s" % (self.kat_area(), self.kataster_number)
|
||||
else:
|
||||
return "%s-%s" % (self.kat_area(), self.unofficial_number)
|
||||
|
||||
def get_absolute_url(self):
|
||||
if self.kataster_number:
|
||||
href = self.kataster_number
|
||||
elif self.unofficial_number:
|
||||
href = self.unofficial_number
|
||||
else:
|
||||
href = official_name.lower()
|
||||
#return settings.URL_ROOT + '/cave/' + href + '/'
|
||||
return urlparse.urljoin(settings.URL_ROOT, reverse('cave',kwargs={'cave_id':href,}))
|
||||
|
||||
def __unicode__(self, sep = u": "):
|
||||
return unicode(self.slug())
|
||||
|
||||
def get_QMs(self):
|
||||
return QM.objects.filter(found_by__cave_slug=self.caveslug_set.all())
|
||||
|
||||
def new_QM_number(self, year=datetime.date.today().year):
|
||||
"""Given a cave and the current year, returns the next QM number."""
|
||||
try:
|
||||
res=QM.objects.filter(found_by__date__year=year, found_by__cave=self).order_by('-number')[0]
|
||||
except IndexError:
|
||||
return 1
|
||||
return res.number+1
|
||||
|
||||
def kat_area(self):
|
||||
for a in self.area.all():
|
||||
if a.kat_area():
|
||||
return a.kat_area()
|
||||
|
||||
def entrances(self):
|
||||
return CaveAndEntrance.objects.filter(cave=self)
|
||||
|
||||
def singleentrance(self):
|
||||
return len(CaveAndEntrance.objects.filter(cave=self)) == 1
|
||||
|
||||
def entrancelist(self):
|
||||
rs = []
|
||||
res = ""
|
||||
for e in CaveAndEntrance.objects.filter(cave=self):
|
||||
rs.append(e.entrance_letter)
|
||||
rs.sort()
|
||||
prevR = None
|
||||
n = 0
|
||||
for r in rs:
|
||||
if prevR:
|
||||
if chr(ord(prevR) + 1 ) == r:
|
||||
prevR = r
|
||||
n += 1
|
||||
else:
|
||||
if n == 0:
|
||||
res += ", " + prevR
|
||||
else:
|
||||
res += "–" + prevR
|
||||
else:
|
||||
prevR = r
|
||||
n = 0
|
||||
res += r
|
||||
if n == 0:
|
||||
res += ", " + prevR
|
||||
else:
|
||||
res += "–" + prevR
|
||||
return res
|
||||
|
||||
def writeDataFile(self):
|
||||
try:
|
||||
f = open(os.path.join(settings.CAVEDESCRIPTIONS, self.filename), "w")
|
||||
except:
|
||||
subprocess.call(settings.FIX_PERMISSIONS)
|
||||
f = open(os.path.join(settings.CAVEDESCRIPTIONS, self.filename), "w")
|
||||
t = loader.get_template('dataformat/cave.xml')
|
||||
c = Context({'cave': self})
|
||||
u = t.render(c)
|
||||
u8 = u.encode("utf-8")
|
||||
f.write(u8)
|
||||
f.close()
|
||||
|
||||
def getArea(self):
|
||||
areas = self.area.all()
|
||||
lowestareas = list(areas)
|
||||
for area in areas:
|
||||
if area.parent in areas:
|
||||
try:
|
||||
lowestareas.remove(area.parent)
|
||||
except:
|
||||
pass
|
||||
return lowestareas[0]
|
||||
|
||||
def getCaveByReference(reference):
|
||||
areaname, code = reference.split("-", 1)
|
||||
print(areaname, code)
|
||||
area = Area.objects.get(short_name = areaname)
|
||||
print(area)
|
||||
foundCaves = list(Cave.objects.filter(area = area, kataster_number = code).all()) + list(Cave.objects.filter(area = area, unofficial_number = code).all())
|
||||
print(list(foundCaves))
|
||||
assert len(foundCaves) == 1
|
||||
return foundCaves[0]
|
||||
|
||||
class OtherCaveName(TroggleModel):
|
||||
name = models.CharField(max_length=160)
|
||||
cave = models.ForeignKey(Cave)
|
||||
def __unicode__(self):
|
||||
return unicode(self.name)
|
||||
|
||||
class EntranceSlug(models.Model):
|
||||
entrance = models.ForeignKey('Entrance')
|
||||
slug = models.SlugField(max_length=50, unique = True)
|
||||
primary = models.BooleanField(default=False)
|
||||
|
||||
class Entrance(TroggleModel):
|
||||
name = models.CharField(max_length=100, blank=True,null=True)
|
||||
entrance_description = models.TextField(blank=True,null=True)
|
||||
explorers = models.TextField(blank=True,null=True)
|
||||
map_description = models.TextField(blank=True,null=True)
|
||||
location_description = models.TextField(blank=True,null=True)
|
||||
approach = models.TextField(blank=True,null=True)
|
||||
underground_description = models.TextField(blank=True,null=True)
|
||||
photo = models.TextField(blank=True,null=True)
|
||||
MARKING_CHOICES = (
|
||||
('P', 'Paint'),
|
||||
('P?', 'Paint (?)'),
|
||||
('T', 'Tag'),
|
||||
('T?', 'Tag (?)'),
|
||||
('R', 'Needs Retag'),
|
||||
('S', 'Spit'),
|
||||
('S?', 'Spit (?)'),
|
||||
('U', 'Unmarked'),
|
||||
('?', 'Unknown'))
|
||||
marking = models.CharField(max_length=2, choices=MARKING_CHOICES)
|
||||
marking_comment = models.TextField(blank=True,null=True)
|
||||
FINDABLE_CHOICES = (
|
||||
('?', 'To be confirmed ...'),
|
||||
('S', 'Coordinates'),
|
||||
('L', 'Lost'),
|
||||
('R', 'Refindable'))
|
||||
findability = models.CharField(max_length=1, choices=FINDABLE_CHOICES, blank=True, null=True)
|
||||
findability_description = models.TextField(blank=True,null=True)
|
||||
alt = models.TextField(blank=True, null=True)
|
||||
northing = models.TextField(blank=True, null=True)
|
||||
easting = models.TextField(blank=True, null=True)
|
||||
tag_station = models.TextField(blank=True, null=True)
|
||||
exact_station = models.TextField(blank=True, null=True)
|
||||
other_station = models.TextField(blank=True, null=True)
|
||||
other_description = models.TextField(blank=True,null=True)
|
||||
bearings = models.TextField(blank=True,null=True)
|
||||
url = models.CharField(max_length=200,blank=True,null=True)
|
||||
filename = models.CharField(max_length=200)
|
||||
cached_primary_slug = models.CharField(max_length=200,blank=True,null=True)
|
||||
|
||||
def __unicode__(self):
|
||||
return unicode(self.slug())
|
||||
|
||||
def exact_location(self):
|
||||
return SurvexStation.objects.lookup(self.exact_station)
|
||||
def other_location(self):
|
||||
return SurvexStation.objects.lookup(self.other_station)
|
||||
|
||||
|
||||
def find_location(self):
|
||||
r = {'': 'To be entered ',
|
||||
'?': 'To be confirmed:',
|
||||
'S': '',
|
||||
'L': 'Lost:',
|
||||
'R': 'Refindable:'}[self.findability]
|
||||
if self.tag_station:
|
||||
try:
|
||||
s = SurvexStation.objects.lookup(self.tag_station)
|
||||
return r + "%0.0fE %0.0fN %0.0fAlt" % (s.x, s.y, s.z)
|
||||
except:
|
||||
return r + "%s Tag Station not in dataset" % self.tag_station
|
||||
if self.exact_station:
|
||||
try:
|
||||
s = SurvexStation.objects.lookup(self.exact_station)
|
||||
return r + "%0.0fE %0.0fN %0.0fAlt" % (s.x, s.y, s.z)
|
||||
except:
|
||||
return r + "%s Exact Station not in dataset" % self.tag_station
|
||||
if self.other_station:
|
||||
try:
|
||||
s = SurvexStation.objects.lookup(self.other_station)
|
||||
return r + "%0.0fE %0.0fN %0.0fAlt %s" % (s.x, s.y, s.z, self.other_description)
|
||||
except:
|
||||
return r + "%s Other Station not in dataset" % self.tag_station
|
||||
if self.FINDABLE_CHOICES == "S":
|
||||
r += "ERROR, Entrance has been surveyed but has no survex point"
|
||||
if self.bearings:
|
||||
return r + self.bearings
|
||||
return r
|
||||
|
||||
def best_station(self):
|
||||
if self.tag_station:
|
||||
return self.tag_station
|
||||
if self.exact_station:
|
||||
return self.exact_station
|
||||
if self.other_station:
|
||||
return self.other_station
|
||||
|
||||
def has_photo(self):
|
||||
if self.photo:
|
||||
if (self.photo.find("<img") > -1 or self.photo.find("<a") > -1 or self.photo.find("<IMG") > -1 or self.photo.find("<A") > -1):
|
||||
return "Yes"
|
||||
else:
|
||||
return "Missing"
|
||||
else:
|
||||
return "No"
|
||||
|
||||
def marking_val(self):
|
||||
for m in self.MARKING_CHOICES:
|
||||
if m[0] == self.marking:
|
||||
return m[1]
|
||||
def findability_val(self):
|
||||
for f in self.FINDABLE_CHOICES:
|
||||
if f[0] == self.findability:
|
||||
return f[1]
|
||||
|
||||
def tag(self):
|
||||
return SurvexStation.objects.lookup(self.tag_station)
|
||||
|
||||
def needs_surface_work(self):
|
||||
return self.findability != "S" or not self.has_photo or self.marking != "T"
|
||||
|
||||
def get_absolute_url(self):
|
||||
|
||||
ancestor_titles='/'.join([subcave.title for subcave in self.get_ancestors()])
|
||||
if ancestor_titles:
|
||||
res = '/'.join((self.get_root().cave.get_absolute_url(), ancestor_titles, self.title))
|
||||
|
||||
else:
|
||||
res = '/'.join((self.get_root().cave.get_absolute_url(), self.title))
|
||||
|
||||
return res
|
||||
|
||||
def slug(self):
|
||||
if not self.cached_primary_slug:
|
||||
primarySlugs = self.entranceslug_set.filter(primary = True)
|
||||
if primarySlugs:
|
||||
self.cached_primary_slug = primarySlugs[0].slug
|
||||
self.save()
|
||||
else:
|
||||
slugs = self.entranceslug_set.filter()
|
||||
if slugs:
|
||||
self.cached_primary_slug = slugs[0].slug
|
||||
self.save()
|
||||
return self.cached_primary_slug
|
||||
|
||||
def writeDataFile(self):
|
||||
try:
|
||||
f = open(os.path.join(settings.ENTRANCEDESCRIPTIONS, self.filename), "w")
|
||||
except:
|
||||
subprocess.call(settings.FIX_PERMISSIONS)
|
||||
f = open(os.path.join(settings.ENTRANCEDESCRIPTIONS, self.filename), "w")
|
||||
t = loader.get_template('dataformat/entrance.xml')
|
||||
c = Context({'entrance': self})
|
||||
u = t.render(c)
|
||||
u8 = u.encode("utf-8")
|
||||
f.write(u8)
|
||||
f.close()
|
||||
|
||||
class CaveDescription(TroggleModel):
|
||||
short_name = models.CharField(max_length=50, unique = True)
|
||||
long_name = models.CharField(max_length=200, blank=True, null=True)
|
||||
description = models.TextField(blank=True,null=True)
|
||||
linked_subcaves = models.ManyToManyField("NewSubCave", blank=True,null=True)
|
||||
linked_entrances = models.ManyToManyField("Entrance", blank=True,null=True)
|
||||
linked_qms = models.ManyToManyField("QM", blank=True,null=True)
|
||||
|
||||
def __unicode__(self):
|
||||
if self.long_name:
|
||||
return unicode(self.long_name)
|
||||
else:
|
||||
return unicode(self.short_name)
|
||||
|
||||
def get_absolute_url(self):
|
||||
return urlparse.urljoin(settings.URL_ROOT, reverse('cavedescription', args=(self.short_name,)))
|
||||
|
||||
def save(self):
|
||||
"""
|
||||
Overridden save method which stores wikilinks in text as links in database.
|
||||
"""
|
||||
super(CaveDescription, self).save()
|
||||
qm_list=get_related_by_wikilinks(self.description)
|
||||
for qm in qm_list:
|
||||
self.linked_qms.add(qm)
|
||||
super(CaveDescription, self).save()
|
||||
|
||||
class NewSubCave(TroggleModel):
|
||||
name = models.CharField(max_length=200, unique = True)
|
||||
def __unicode__(self):
|
||||
return unicode(self.name)
|
||||
|
||||
class QM(TroggleModel):
|
||||
#based on qm.csv in trunk/expoweb/1623/204 which has the fields:
|
||||
#"Number","Grade","Area","Description","Page reference","Nearest station","Completion description","Comment"
|
||||
found_by = models.ForeignKey(LogbookEntry, related_name='QMs_found',blank=True, null=True )
|
||||
ticked_off_by = models.ForeignKey(LogbookEntry, related_name='QMs_ticked_off',null=True,blank=True)
|
||||
#cave = models.ForeignKey(Cave)
|
||||
#expedition = models.ForeignKey(Expedition)
|
||||
|
||||
number = models.IntegerField(help_text="this is the sequential number in the year", )
|
||||
GRADE_CHOICES=(
|
||||
('A', 'A: Large obvious lead'),
|
||||
('B', 'B: Average lead'),
|
||||
('C', 'C: Tight unpromising lead'),
|
||||
('D', 'D: Dig'),
|
||||
('X', 'X: Unclimbable aven')
|
||||
)
|
||||
grade = models.CharField(max_length=1, choices=GRADE_CHOICES)
|
||||
location_description = models.TextField(blank=True)
|
||||
#should be a foreignkey to surveystation
|
||||
nearest_station_description = models.CharField(max_length=400,null=True,blank=True)
|
||||
nearest_station = models.CharField(max_length=200,blank=True,null=True)
|
||||
area = models.CharField(max_length=100,blank=True,null=True)
|
||||
completion_description = models.TextField(blank=True,null=True)
|
||||
comment=models.TextField(blank=True,null=True)
|
||||
|
||||
def __unicode__(self):
|
||||
return u"%s %s" % (self.code(), self.grade)
|
||||
|
||||
def code(self):
|
||||
return u"%s-%s-%s" % (unicode(self.found_by.cave)[6:], self.found_by.date.year, self.number)
|
||||
|
||||
def get_absolute_url(self):
|
||||
#return settings.URL_ROOT + '/cave/' + self.found_by.cave.kataster_number + '/' + str(self.found_by.date.year) + '-' + '%02d' %self.number
|
||||
return urlparse.urljoin(settings.URL_ROOT, reverse('qm',kwargs={'cave_id':self.found_by.cave.kataster_number,'year':self.found_by.date.year,'qm_id':self.number,'grade':self.grade}))
|
||||
|
||||
def get_next_by_id(self):
|
||||
return QM.objects.get(id=self.id+1)
|
||||
|
||||
def get_previous_by_id(self):
|
||||
return QM.objects.get(id=self.id-1)
|
||||
|
||||
def wiki_link(self):
|
||||
return u"%s%s%s" % ('[[QM:',self.code(),']]')
|
||||
|
||||
photoFileStorage = FileSystemStorage(location=settings.PHOTOS_ROOT, base_url=settings.PHOTOS_URL)
|
||||
class DPhoto(TroggleImageModel):
|
||||
caption = models.CharField(max_length=1000,blank=True,null=True)
|
||||
contains_logbookentry = models.ForeignKey(LogbookEntry,blank=True,null=True)
|
||||
contains_person = models.ManyToManyField(Person,blank=True,null=True)
|
||||
file = models.ImageField(storage=photoFileStorage, upload_to='.',)
|
||||
is_mugshot = models.BooleanField(default=False)
|
||||
contains_cave = models.ForeignKey(Cave,blank=True,null=True)
|
||||
contains_entrance = models.ForeignKey(Entrance, related_name="photo_file",blank=True,null=True)
|
||||
#nearest_survey_point = models.ForeignKey(SurveyStation,blank=True,null=True)
|
||||
nearest_QM = models.ForeignKey(QM,blank=True,null=True)
|
||||
lon_utm = models.FloatField(blank=True,null=True)
|
||||
lat_utm = models.FloatField(blank=True,null=True)
|
||||
|
||||
class IKOptions:
|
||||
spec_module = 'core.imagekit_specs'
|
||||
cache_dir = 'thumbs'
|
||||
image_field = 'file'
|
||||
|
||||
#content_type = models.ForeignKey(ContentType)
|
||||
#object_id = models.PositiveIntegerField()
|
||||
#location = generic.GenericForeignKey('content_type', 'object_id')
|
||||
|
||||
def __unicode__(self):
|
||||
return self.caption
|
||||
|
||||
scansFileStorage = FileSystemStorage(location=settings.SURVEY_SCANS, base_url=settings.SURVEYS_URL)
|
||||
def get_scan_path(instance, filename):
|
||||
year=instance.survey.expedition.year
|
||||
#print("WN: ", type(instance.survey.wallet_number), instance.survey.wallet_number, instance.survey.wallet_letter)
|
||||
number=str(instance.survey.wallet_number)
|
||||
if str(instance.survey.wallet_letter) != "None":
|
||||
number=str(instance.survey.wallet_letter) + number #two strings formatting because convention is 2009#01 or 2009#X01
|
||||
return os.path.join('./',year,year+r'#'+number,str(instance.contents)+str(instance.number_in_wallet)+r'.jpg')
|
||||
|
||||
class ScannedImage(TroggleImageModel):
|
||||
file = models.ImageField(storage=scansFileStorage, upload_to=get_scan_path)
|
||||
scanned_by = models.ForeignKey(Person,blank=True, null=True)
|
||||
scanned_on = models.DateField(null=True)
|
||||
survey = models.ForeignKey('Survey')
|
||||
contents = models.CharField(max_length=20,choices=(('notes','notes'),('plan','plan_sketch'),('elevation','elevation_sketch')))
|
||||
number_in_wallet = models.IntegerField(null=True)
|
||||
lon_utm = models.FloatField(blank=True,null=True)
|
||||
lat_utm = models.FloatField(blank=True,null=True)
|
||||
|
||||
class IKOptions:
|
||||
spec_module = 'core.imagekit_specs'
|
||||
cache_dir = 'thumbs'
|
||||
image_field = 'file'
|
||||
#content_type = models.ForeignKey(ContentType)
|
||||
#object_id = models.PositiveIntegerField()
|
||||
#location = generic.GenericForeignKey('content_type', 'object_id')
|
||||
|
||||
#This is an ugly hack to deal with the #s in our survey scan paths. The correct thing is to write a custom file storage backend which calls urlencode on the name for making file.url but not file.path.
|
||||
def correctURL(self):
|
||||
return string.replace(self.file.url,r'#',r'%23')
|
||||
|
||||
def __unicode__(self):
|
||||
return get_scan_path(self,'')
|
||||
|
||||
class Survey(TroggleModel):
|
||||
expedition = models.ForeignKey('Expedition') #REDUNDANT (logbook_entry)
|
||||
wallet_number = models.IntegerField(blank=True,null=True)
|
||||
wallet_letter = models.CharField(max_length=1,blank=True,null=True)
|
||||
comments = models.TextField(blank=True,null=True)
|
||||
location = models.CharField(max_length=400,blank=True,null=True) #REDUNDANT
|
||||
subcave = models.ForeignKey('NewSubCave', blank=True, null=True)
|
||||
#notes_scan = models.ForeignKey('ScannedImage',related_name='notes_scan',blank=True, null=True) #Replaced by contents field of ScannedImage model
|
||||
survex_block = models.OneToOneField('SurvexBlock',blank=True, null=True)
|
||||
logbook_entry = models.ForeignKey('LogbookEntry')
|
||||
centreline_printed_on = models.DateField(blank=True, null=True)
|
||||
centreline_printed_by = models.ForeignKey('Person',related_name='centreline_printed_by',blank=True,null=True)
|
||||
#sketch_scan = models.ForeignKey(ScannedImage,blank=True, null=True) #Replaced by contents field of ScannedImage model
|
||||
tunnel_file = models.FileField(upload_to='surveyXMLfiles',blank=True, null=True)
|
||||
tunnel_main_sketch = models.ForeignKey('Survey',blank=True,null=True)
|
||||
integrated_into_main_sketch_on = models.DateField(blank=True,null=True)
|
||||
integrated_into_main_sketch_by = models.ForeignKey('Person' ,related_name='integrated_into_main_sketch_by', blank=True,null=True)
|
||||
rendered_image = models.ImageField(upload_to='renderedSurveys',blank=True,null=True)
|
||||
def __unicode__(self):
|
||||
return self.expedition.year+"#"+"%02d" % int(self.wallet_number)
|
||||
|
||||
def notes(self):
|
||||
return self.scannedimage_set.filter(contents='notes')
|
||||
|
||||
def plans(self):
|
||||
return self.scannedimage_set.filter(contents='plan')
|
||||
|
||||
def elevations(self):
|
||||
return self.scannedimage_set.filter(contents='elevation')
|
||||
@@ -6,6 +6,7 @@ import os, stat
|
||||
import re
|
||||
from troggle.core.models import SurvexScansFolder, SurvexScanSingle, SurvexBlock, TunnelFile
|
||||
import parsers.surveys
|
||||
import urllib
|
||||
|
||||
# inline fileabstraction into here if it's not going to be useful anywhere else
|
||||
# keep things simple and ignore exceptions everywhere for now
|
||||
@@ -17,13 +18,13 @@ def getMimeType(extension):
|
||||
"html": "text/html",
|
||||
}[extension]
|
||||
except:
|
||||
print "unknown file type"
|
||||
print("unknown file type")
|
||||
return "text/plain"
|
||||
|
||||
|
||||
def listdir(request, path):
|
||||
#try:
|
||||
return HttpResponse(fileAbstraction.listdir(path), mimetype = "text/plain")
|
||||
return HttpResponse(fileAbstraction.listdir(path), content_type="text/plain")
|
||||
#except:
|
||||
# raise Http404
|
||||
|
||||
@@ -33,7 +34,7 @@ def upload(request, path):
|
||||
def download(request, path):
|
||||
#try:
|
||||
|
||||
return HttpResponse(fileAbstraction.readFile(path), mimetype=getMimeType(path.split(".")[-1]))
|
||||
return HttpResponse(fileAbstraction.readFile(path), content_type=getMimeType(path.split(".")[-1]))
|
||||
#except:
|
||||
# raise Http404
|
||||
|
||||
@@ -46,6 +47,7 @@ extmimetypes = {".txt": "text/plain",
|
||||
".html": "text/html",
|
||||
".png": "image/png",
|
||||
".jpg": "image/jpeg",
|
||||
".jpeg": "image/jpeg",
|
||||
}
|
||||
|
||||
# dead
|
||||
@@ -93,9 +95,9 @@ def jgtfile(request, f):
|
||||
fin = open(fp)
|
||||
ftext = fin.read()
|
||||
fin.close()
|
||||
return HttpResponse(ftext, mimetype=mimetype)
|
||||
return HttpResponse(ftext, content_type=mimetype)
|
||||
|
||||
return HttpResponse("unknown file::%s::" % f, mimetype = "text/plain")
|
||||
return HttpResponse("unknown file::%s::" % f, content_type = "text/plain")
|
||||
|
||||
|
||||
def UniqueFile(fname):
|
||||
@@ -165,13 +167,13 @@ def jgtuploadfile(request):
|
||||
|
||||
def surveyscansfolder(request, path):
|
||||
#print [ s.walletname for s in SurvexScansFolder.objects.all() ]
|
||||
survexscansfolder = SurvexScansFolder.objects.get(walletname=path)
|
||||
survexscansfolder = SurvexScansFolder.objects.get(walletname=urllib.unquote(path))
|
||||
return render_to_response('survexscansfolder.html', { 'survexscansfolder':survexscansfolder, 'settings': settings })
|
||||
|
||||
def surveyscansingle(request, path, file):
|
||||
survexscansfolder = SurvexScansFolder.objects.get(walletname=path)
|
||||
survexscansfolder = SurvexScansFolder.objects.get(walletname=urllib.unquote(path))
|
||||
survexscansingle = SurvexScanSingle.objects.get(survexscansfolder=survexscansfolder, name=file)
|
||||
return HttpResponse(content=open(survexscansingle.ffile), mimetype="image/png")
|
||||
return HttpResponse(content=open(survexscansingle.ffile), content_type=getMimeType(path.split(".")[-1]))
|
||||
#return render_to_response('survexscansfolder.html', { 'survexscansfolder':survexscansfolder, 'settings': settings })
|
||||
|
||||
def surveyscansfolders(request):
|
||||
@@ -185,12 +187,12 @@ def tunneldata(request):
|
||||
|
||||
|
||||
def tunnelfile(request, path):
|
||||
tunnelfile = TunnelFile.objects.get(tunnelpath=path)
|
||||
tunnelfile = TunnelFile.objects.get(tunnelpath=urllib.unquote(path))
|
||||
tfile = os.path.join(settings.TUNNEL_DATA, tunnelfile.tunnelpath)
|
||||
return HttpResponse(content=open(tfile), mimetype="text/plain")
|
||||
return HttpResponse(content=open(tfile), content_type="text/plain")
|
||||
|
||||
def tunnelfileupload(request, path):
|
||||
tunnelfile = TunnelFile.objects.get(tunnelpath=path)
|
||||
tunnelfile = TunnelFile.objects.get(tunnelpath=urllib.unquote(path))
|
||||
tfile = os.path.join(settings.TUNNEL_DATA, tunnelfile.tunnelpath)
|
||||
|
||||
project, user, password, tunnelversion = request.POST["tunnelproject"], request.POST["tunneluser"], request.POST["tunnelpassword"], request.POST["tunnelversion"]
|
||||
@@ -202,13 +204,13 @@ def tunnelfileupload(request, path):
|
||||
uploadedfile = request.FILES.values()[0]
|
||||
|
||||
if uploadedfile.field_name != "sketch":
|
||||
return HttpResponse(content="Error: non-sketch file uploaded", mimetype="text/plain")
|
||||
return HttpResponse(content="Error: non-sketch file uploaded", content_type="text/plain")
|
||||
if uploadedfile.content_type != "text/plain":
|
||||
return HttpResponse(content="Error: non-plain content type", mimetype="text/plain")
|
||||
return HttpResponse(content="Error: non-plain content type", content_type="text/plain")
|
||||
|
||||
# could use this to add new files
|
||||
if os.path.split(path)[1] != uploadedfile.name:
|
||||
return HttpResponse(content="Error: name disagrees", mimetype="text/plain")
|
||||
return HttpResponse(content="Error: name disagrees", content_type="text/plain")
|
||||
|
||||
orgsize = tunnelfile.filesize # = os.stat(tfile)[stat.ST_SIZE]
|
||||
|
||||
@@ -226,7 +228,7 @@ def tunnelfileupload(request, path):
|
||||
|
||||
uploadedfile.close()
|
||||
message = "File size %d overwritten with size %d" % (orgsize, tunnelfile.filesize)
|
||||
return HttpResponse(content=message, mimetype="text/plain")
|
||||
return HttpResponse(content=message, content_type="text/plain")
|
||||
|
||||
|
||||
|
||||
|
||||
@@ -17,8 +17,37 @@ import re, urlparse
|
||||
from django.shortcuts import get_object_or_404
|
||||
import settings
|
||||
|
||||
|
||||
from PIL import Image, ImageDraw, ImageFont
|
||||
import string, os, sys
|
||||
import string, os, sys, subprocess
|
||||
|
||||
#
|
||||
# NEW CONTENT
|
||||
#
|
||||
|
||||
|
||||
from troggle.core.models import CaveM, Cave_descriptionM, ExpeditionM
|
||||
|
||||
def millenialcaves(request):
|
||||
#RW messing around area
|
||||
caves = CaveM.objects.all()
|
||||
descr = Cave_descriptionM.objects.all()
|
||||
return render_with_context(request,'millenialcaves.html',{'caves': caves,'descriptions' : descr})
|
||||
|
||||
def millenialdescription(request, slug):
|
||||
desc = Cave_descriptionM.objects.get(slug=slug)
|
||||
return render_with_context(request,'cave_uground_description.html', {'cave': desc})
|
||||
|
||||
def millenialpeople(request):
|
||||
expos = ExpeditionM.objects.all()
|
||||
return render_with_context(request,'peoplemillenial.html' , {'expos': expos})
|
||||
|
||||
|
||||
#
|
||||
# END NEW CONTENT
|
||||
#
|
||||
|
||||
|
||||
|
||||
def getCave(cave_id):
|
||||
"""Returns a cave object when given a cave name or number. It is used by views including cavehref, ent, and qm."""
|
||||
@@ -53,18 +82,38 @@ def caveindex(request):
|
||||
caves = Cave.objects.all()
|
||||
notablecavehrefs = settings.NOTABLECAVESHREFS
|
||||
notablecaves = [Cave.objects.get(kataster_number=kataster_number) for kataster_number in notablecavehrefs ]
|
||||
caves1623 = list(Cave.objects.filter(area__short_name = "1623"))
|
||||
#caves1623 = list(Cave.objects.filter(area__short_name = "1623"))
|
||||
caves1623 = list(Cave.objects.all())
|
||||
caves1626 = list(Cave.objects.filter(area__short_name = "1626"))
|
||||
caves1623.sort(caveCmp)
|
||||
caves1626.sort(caveCmp)
|
||||
return render_with_context(request,'caveindex.html', {'caves1623': caves1623, 'caves1626': caves1626, 'notablecaves':notablecaves, 'cavepage': True})
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
def cave3d(request, cave_id=''):
|
||||
cave = getCave(cave_id)
|
||||
survexfilename = settings.SURVEX_DATA + cave.survex_file
|
||||
threedfilename = settings.THREEDCACHEDIR + '%s.3d' % cave_id
|
||||
if True or os.path.getmtime(survexfilename) > os.path.getmtime(threedfilename):
|
||||
subprocess.call(["cavern", "--output=%s" % threedfilename, survexfilename])
|
||||
test_file = open(threedfilename, 'rb')
|
||||
response = HttpResponse(content=test_file, content_type='application/3d')#mimetype is replaced by content_type for django 1.7
|
||||
response['Content-Disposition'] = 'attachment; filename=%s.3d' % cave_id
|
||||
# response['X-Sendfile'] = "%s.3d" % cave_id
|
||||
# It's usually a good idea to set the 'Content-Length' header too.
|
||||
# You can also set any other required headers: Cache-Control, etc.
|
||||
return response
|
||||
|
||||
def cave(request, cave_id='', offical_name=''):
|
||||
cave=getCave(cave_id)
|
||||
if cave.non_public and settings.PUBLIC_SITE and not request.user.is_authenticated():
|
||||
return render_with_context(request,'nonpublic.html', {'instance': cave, 'cavepage': True})
|
||||
return render_with_context(request,'nonpublic.html', {'instance': cave, 'cavepage': True, 'cave_id': cave_id})
|
||||
else:
|
||||
return render_with_context(request,'cave.html', {'settings': settings, 'cave': cave, 'cavepage': True})
|
||||
return render_with_context(request,'cave.html', {'settings': settings, 'cave': cave, 'cavepage': True, 'cave_id': cave_id})
|
||||
|
||||
def caveEntrance(request, slug):
|
||||
cave = Cave.objects.get(caveslug__slug = slug)
|
||||
@@ -203,7 +252,7 @@ def qm(request,cave_id,qm_id,year,grade=None):
|
||||
return HttpResponseRedirect(url)
|
||||
|
||||
|
||||
|
||||
|
||||
def ent(request, cave_id, ent_letter):
|
||||
cave = Cave.objects.filter(kataster_number = cave_id)[0]
|
||||
cave_and_ent = CaveAndEntrance.objects.filter(cave = cave).filter(entrance_letter = ent_letter)[0]
|
||||
@@ -320,8 +369,8 @@ for n in maps.keys():
|
||||
W = (R-L)/2
|
||||
H = (T-B)/2
|
||||
for i in range(2):
|
||||
for j in range(2):
|
||||
maps["%s%i%i" % (n, i, j)] = [L + i * W, T - j * H, L + (i + 1) * W, T - (j + 1) * H, S, name]
|
||||
for j in range(2):
|
||||
maps["%s%i%i" % (n, i, j)] = [L + i * W, T - j * H, L + (i + 1) * W, T - (j + 1) * H, S, name]
|
||||
# Keys in the order in which we want the maps output
|
||||
mapcodes = ["all", "grieß","40", "76", "204", "tc"]
|
||||
# Field codes
|
||||
|
||||
@@ -16,9 +16,18 @@ from django.template.defaultfilters import slugify
|
||||
from troggle.helper import login_required_if_public
|
||||
import datetime
|
||||
|
||||
from django.views.generic.list import ListView
|
||||
from django.utils import timezone
|
||||
|
||||
# Django uses Context, not RequestContext when you call render_to_response. We always want to use RequestContext, so that django adds the context from settings.TEMPLATE_CONTEXT_PROCESSORS. This way we automatically get necessary settings variables passed to each template. So we use a custom method, render_response instead of render_to_response. Hopefully future Django releases will make this unnecessary.
|
||||
#from troggle.alwaysUseRequestContext import render_response
|
||||
|
||||
# Django uses Context, not RequestContext when you call render
|
||||
# to_response. We always want to use RequestContext, so that
|
||||
# django adds the context from settings.TEMPLATE_CONTEXT_PROCESSORS.
|
||||
# This way we automatically get necessary settings variables passed
|
||||
# to each template. So we use a custom method, render_response
|
||||
# instead of render_to_response. Hopefully future Django releases
|
||||
# will make this unnecessary.
|
||||
# from troggle.alwaysUseRequestContext import render_response
|
||||
|
||||
import re
|
||||
|
||||
@@ -50,13 +59,13 @@ def personindex(request):
|
||||
|
||||
|
||||
def expedition(request, expeditionname):
|
||||
expedition = Expedition.objects.get(year=int(expeditionname))
|
||||
this_expedition = Expedition.objects.get(year=int(expeditionname))
|
||||
expeditions = Expedition.objects.all()
|
||||
personexpeditiondays = [ ]
|
||||
dateditems = list(expedition.logbookentry_set.all()) + list(expedition.survexblock_set.all())
|
||||
dateditems = list(this_expedition.logbookentry_set.all()) + list(this_expedition.survexblock_set.all())
|
||||
dates = list(set([item.date for item in dateditems]))
|
||||
dates.sort()
|
||||
for personexpedition in expedition.personexpedition_set.all():
|
||||
for personexpedition in this_expedition.personexpedition_set.all():
|
||||
prow = [ ]
|
||||
for date in dates:
|
||||
pcell = { "persontrips": PersonTrip.objects.filter(personexpedition=personexpedition,
|
||||
@@ -68,24 +77,33 @@ def expedition(request, expeditionname):
|
||||
|
||||
message = ""
|
||||
if "reload" in request.GET:
|
||||
message = LoadLogbookForExpedition(expedition)
|
||||
return render_with_context(request,'expedition.html', {'expedition': expedition, 'expeditions':expeditions, 'personexpeditiondays':personexpeditiondays, 'message':message, 'settings':settings, 'dateditems': dateditems })
|
||||
message = LoadLogbookForExpedition(this_expedition)
|
||||
return render_with_context(request,'expedition.html', {'expedition': this_expedition, 'expeditions':expeditions, 'personexpeditiondays':personexpeditiondays, 'message':message, 'settings':settings, 'dateditems': dateditems })
|
||||
|
||||
def get_absolute_url(self):
|
||||
return ('expedition', (expedition.year))
|
||||
def get_absolute_url(self):
|
||||
return ('expedition', (expedition.year))
|
||||
|
||||
class ExpeditionListView(ListView):
|
||||
|
||||
model = Expedition
|
||||
|
||||
def get_context_data(self, **kwargs):
|
||||
context = super(ExpeditionListView, self).get_context_data(**kwargs)
|
||||
context['now'] = timezone.now()
|
||||
return context
|
||||
|
||||
|
||||
def person(request, first_name='', last_name='', ):
|
||||
person = Person.objects.get(first_name = first_name, last_name = last_name)
|
||||
this_person = Person.objects.get(first_name = first_name, last_name = last_name)
|
||||
|
||||
#This is for removing the reference to the user's profile, in case they set it to the wrong person
|
||||
# This is for removing the reference to the user's profile, in case they set it to the wrong person
|
||||
if request.method == 'GET':
|
||||
if request.GET.get('clear_profile')=='True':
|
||||
person.user=None
|
||||
person.save()
|
||||
this_person.user=None
|
||||
this_person.save()
|
||||
return HttpResponseRedirect(reverse('profiles_select_profile'))
|
||||
|
||||
return render_with_context(request,'person.html', {'person': person, })
|
||||
return render_with_context(request,'person.html', {'person': this_person, })
|
||||
|
||||
|
||||
def GetPersonChronology(personexpedition):
|
||||
@@ -115,20 +133,20 @@ def GetPersonChronology(personexpedition):
|
||||
|
||||
def personexpedition(request, first_name='', last_name='', year=''):
|
||||
person = Person.objects.get(first_name = first_name, last_name = last_name)
|
||||
expedition = Expedition.objects.get(year=year)
|
||||
personexpedition = person.personexpedition_set.get(expedition=expedition)
|
||||
this_expedition = Expedition.objects.get(year=year)
|
||||
personexpedition = person.personexpedition_set.get(expedition=this_expedition)
|
||||
personchronology = GetPersonChronology(personexpedition)
|
||||
return render_with_context(request,'personexpedition.html', {'personexpedition': personexpedition, 'personchronology':personchronology})
|
||||
|
||||
|
||||
def logbookentry(request, date, slug):
|
||||
logbookentry = LogbookEntry.objects.filter(date=date, slug=slug)
|
||||
this_logbookentry = LogbookEntry.objects.filter(date=date, slug=slug)
|
||||
|
||||
if len(logbookentry)>1:
|
||||
return render_with_context(request, 'object_list.html',{'object_list':logbookentry})
|
||||
if len(this_logbookentry)>1:
|
||||
return render_with_context(request, 'object_list.html',{'object_list':this_logbookentry})
|
||||
else:
|
||||
logbookentry=logbookentry[0]
|
||||
return render_with_context(request, 'logbookentry.html', {'logbookentry': logbookentry})
|
||||
this_logbookentry=this_logbookentry[0]
|
||||
return render_with_context(request, 'logbookentry.html', {'logbookentry': this_logbookentry})
|
||||
|
||||
|
||||
def logbookSearch(request, extra):
|
||||
@@ -196,7 +214,7 @@ def newLogbookEntry(request, expeditionyear, pdate = None, pslug = None):
|
||||
'expeditionyear': expeditionyear})
|
||||
f.write(template.render(context))
|
||||
f.close()
|
||||
print logbookparsers.parseAutoLogBookEntry(filename)
|
||||
print(logbookparsers.parseAutoLogBookEntry(filename))
|
||||
return HttpResponseRedirect(reverse('expedition', args=[expedition.year])) # Redirect after POST
|
||||
else:
|
||||
if pslug and pdate:
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
from troggle.core.models import Cave, Expedition, Person, LogbookEntry, PersonExpedition, PersonTrip, DPhoto, QM
|
||||
from troggle.core.forms import UploadFileForm
|
||||
#from troggle.core.forms import UploadFileForm
|
||||
from django.conf import settings
|
||||
from django import forms
|
||||
from django.template import loader, Context
|
||||
@@ -87,8 +87,8 @@ def downloadSurveys(request):
|
||||
def downloadLogbook(request,year=None,extension=None,queryset=None):
|
||||
|
||||
if year:
|
||||
expedition=Expedition.objects.get(year=year)
|
||||
logbook_entries=LogbookEntry.objects.filter(expedition=expedition)
|
||||
current_expedition=Expedition.objects.get(year=year)
|
||||
logbook_entries=LogbookEntry.objects.filter(expedition=current_expedition)
|
||||
filename='logbook'+year
|
||||
elif queryset:
|
||||
logbook_entries=queryset
|
||||
@@ -259,8 +259,8 @@ def newFile(request, pslug = None):
|
||||
# "TU": py.time_underground,
|
||||
# "author": py.is_logbook_entry_author}
|
||||
# for py in previouslbe.persontrip_set.all()])
|
||||
else:
|
||||
fileform = UploadFileForm() # An unbound form
|
||||
# else:
|
||||
# fileform = UploadFileForm() # An unbound form
|
||||
|
||||
return render_with_context(request, 'editfile.html', {
|
||||
'fileForm': fileform,
|
||||
|
||||
@@ -40,6 +40,62 @@ survextemplatefile = """; Locn: Totes Gebirge, Austria - Loser/Augst-Eck Plateau
|
||||
|
||||
*end [surveyname]"""
|
||||
|
||||
|
||||
def millenialcaves(request):
|
||||
cavesdir = os.path.join(settings.SURVEX_DATA, "caves-1623")
|
||||
#cavesdircontents = { }
|
||||
|
||||
onefilecaves = [ ]
|
||||
multifilecaves = [ ]
|
||||
subdircaves = [ ]
|
||||
|
||||
millenialcaves = [ ]
|
||||
|
||||
|
||||
# go through the list and identify the contents of each cave directory
|
||||
for cavedir in os.listdir(cavesdir):
|
||||
if cavedir in ["144", "40"]: #????? RW
|
||||
continue
|
||||
|
||||
gcavedir = os.path.join(cavesdir, cavedir) #directory od 'large' cave
|
||||
|
||||
if os.path.isdir(gcavedir) and cavedir[0] != ".":
|
||||
subdirs, subsvx = identifycavedircontents(gcavedir)
|
||||
survdirobj = [ ]
|
||||
|
||||
for lsubsvx in subsvx:
|
||||
survdirobj.append(("caves-1623/"+cavedir+"/"+lsubsvx, lsubsvx))
|
||||
|
||||
# caves with subdirectories
|
||||
if subdirs:
|
||||
subsurvdirs = [ ]
|
||||
for subdir in subdirs:
|
||||
dsubdirs, dsubsvx = identifycavedircontents(os.path.join(gcavedir, subdir))
|
||||
assert not dsubdirs
|
||||
lsurvdirobj = [ ]
|
||||
for lsubsvx in dsubsvx:
|
||||
lsurvdirobj.append(("caves-1623/"+cavedir+"/"+subdir+"/"+lsubsvx, lsubsvx))
|
||||
subsurvdirs.append((lsurvdirobj[0], lsurvdirobj[1:]))
|
||||
subdircaves.append((cavedir, (survdirobj[0], survdirobj[1:]), subsurvdirs))
|
||||
|
||||
# multifile caves
|
||||
elif len(survdirobj) > 1:
|
||||
multifilecaves.append((survdirobj[0], survdirobj[1:]))
|
||||
# single file caves
|
||||
else:
|
||||
#print("survdirobj = ")
|
||||
#print(survdirobj)
|
||||
onefilecaves.append(survdirobj[0])
|
||||
|
||||
caves = Cave.objects.all()
|
||||
|
||||
return render_to_response('millenialcaves.html', {'settings': settings , 'caves':caves , "onefilecaves":onefilecaves, "multifilecaves":multifilecaves, "subdircaves":subdircaves })
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
def ReplaceTabs(stext):
|
||||
res = [ ]
|
||||
@@ -77,16 +133,16 @@ class SvxForm(forms.Form):
|
||||
def DiffCode(self, rcode):
|
||||
code = self.GetDiscCode()
|
||||
difftext = difflib.unified_diff(code.splitlines(), rcode.splitlines())
|
||||
difflist = [ diffline.strip() for diffline in difftext if not re.match("\s*$", diffline) ]
|
||||
difflist = [ diffline.strip() for diffline in difftext if not re.match(r"\s*$", diffline) ]
|
||||
return difflist
|
||||
|
||||
def SaveCode(self, rcode):
|
||||
fname = settings.SURVEX_DATA + self.data['filename'] + ".svx"
|
||||
if not os.path.isfile(fname):
|
||||
# only save if appears valid
|
||||
if re.search("\[|\]", rcode):
|
||||
if re.search(r"\[|\]", rcode):
|
||||
return "Error: clean up all []s from the text"
|
||||
mbeginend = re.search("(?s)\*begin\s+(\w+).*?\*end\s+(\w+)", rcode)
|
||||
mbeginend = re.search(r"(?s)\*begin\s+(\w+).*?\*end\s+(\w+)", rcode)
|
||||
if not mbeginend:
|
||||
return "Error: no begin/end block here"
|
||||
if mbeginend.group(1) != mbeginend.group(2):
|
||||
@@ -98,7 +154,7 @@ class SvxForm(forms.Form):
|
||||
return "SAVED"
|
||||
|
||||
def Process(self):
|
||||
print "....\n\n\n....Processing\n\n\n"
|
||||
print("....\n\n\n....Processing\n\n\n")
|
||||
cwd = os.getcwd()
|
||||
os.chdir(os.path.split(settings.SURVEX_DATA + self.data['filename'])[0])
|
||||
os.system(settings.CAVERN + " --log " + settings.SURVEX_DATA + self.data['filename'] + ".svx")
|
||||
@@ -137,13 +193,13 @@ def svx(request, survex_file):
|
||||
if not difflist:
|
||||
message = "OUTPUT FROM PROCESSING"
|
||||
logmessage = form.Process()
|
||||
print logmessage
|
||||
print(logmessage)
|
||||
else:
|
||||
message = "SAVE FILE FIRST"
|
||||
form.data['code'] = rcode
|
||||
if "save" in rform.data:
|
||||
if request.user.is_authenticated():
|
||||
#print "sssavvving"
|
||||
#print("sssavvving")
|
||||
message = form.SaveCode(rcode)
|
||||
else:
|
||||
message = "You do not have authority to save this file"
|
||||
@@ -163,7 +219,7 @@ def svx(request, survex_file):
|
||||
difflist.insert(0, message)
|
||||
|
||||
#print [ form.data['code'] ]
|
||||
svxincludes = re.findall('\*include\s+(\S+)(?i)', form.data['code'] or "")
|
||||
svxincludes = re.findall(r'\*include\s+(\S+)(?i)', form.data['code'] or "")
|
||||
|
||||
vmap = {'settings': settings,
|
||||
'has_3d': os.path.isfile(settings.SURVEX_DATA + survex_file + ".3d"),
|
||||
@@ -256,7 +312,7 @@ def identifycavedircontents(gcavedir):
|
||||
# direct local non-database browsing through the svx file repositories
|
||||
# perhaps should use the database and have a reload button for it
|
||||
def survexcaveslist(request):
|
||||
cavesdir = os.path.join(settings.SURVEX_DATA, "caves")
|
||||
cavesdir = os.path.join(settings.SURVEX_DATA, "caves-1623")
|
||||
#cavesdircontents = { }
|
||||
|
||||
onefilecaves = [ ]
|
||||
@@ -264,9 +320,11 @@ def survexcaveslist(request):
|
||||
subdircaves = [ ]
|
||||
|
||||
# first sort the file list
|
||||
fnumlist = [ (-int(re.match("\d*", f).group(0) or "0"), f) for f in os.listdir(cavesdir) ]
|
||||
fnumlist = [ (-int(re.match(r"\d*", f).group(0) or "0"), f) for f in os.listdir(cavesdir) ]
|
||||
fnumlist.sort()
|
||||
|
||||
print(fnumlist)
|
||||
|
||||
# go through the list and identify the contents of each cave directory
|
||||
for num, cavedir in fnumlist:
|
||||
if cavedir in ["144", "40"]:
|
||||
@@ -278,7 +336,7 @@ def survexcaveslist(request):
|
||||
survdirobj = [ ]
|
||||
|
||||
for lsubsvx in subsvx:
|
||||
survdirobj.append(("caves/"+cavedir+"/"+lsubsvx, lsubsvx))
|
||||
survdirobj.append(("caves-1623/"+cavedir+"/"+lsubsvx, lsubsvx))
|
||||
|
||||
# caves with subdirectories
|
||||
if subdirs:
|
||||
@@ -288,7 +346,7 @@ def survexcaveslist(request):
|
||||
assert not dsubdirs
|
||||
lsurvdirobj = [ ]
|
||||
for lsubsvx in dsubsvx:
|
||||
lsurvdirobj.append(("caves/"+cavedir+"/"+subdir+"/"+lsubsvx, lsubsvx))
|
||||
lsurvdirobj.append(("caves-1623/"+cavedir+"/"+subdir+"/"+lsubsvx, lsubsvx))
|
||||
subsurvdirs.append((lsurvdirobj[0], lsurvdirobj[1:]))
|
||||
subdircaves.append((cavedir, (survdirobj[0], survdirobj[1:]), subsurvdirs))
|
||||
|
||||
@@ -297,6 +355,8 @@ def survexcaveslist(request):
|
||||
multifilecaves.append((survdirobj[0], survdirobj[1:]))
|
||||
# single file caves
|
||||
else:
|
||||
#print("survdirobj = ")
|
||||
#print(survdirobj)
|
||||
onefilecaves.append(survdirobj[0])
|
||||
|
||||
return render_to_response('svxfilecavelist.html', {'settings': settings, "onefilecaves":onefilecaves, "multifilecaves":multifilecaves, "subdircaves":subdircaves })
|
||||
|
||||
@@ -2,12 +2,12 @@ import os
|
||||
import time
|
||||
import settings
|
||||
os.environ['PYTHONPATH'] = settings.PYTHON_PATH
|
||||
os.environ['DJANGO_SETTINGS_MODULE'] = 'settings'
|
||||
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'settings')
|
||||
from django.core import management
|
||||
from django.db import connection
|
||||
from django.contrib.auth.models import User
|
||||
from django.http import HttpResponse
|
||||
from django.core.urlresolvers import reverse
|
||||
from django.core.urlresolvers import reverse
|
||||
from troggle.core.models import Cave, Entrance
|
||||
import troggle.flatpages.models
|
||||
|
||||
@@ -22,7 +22,7 @@ def reload_db():
|
||||
os.remove(databasename)
|
||||
except OSError:
|
||||
pass
|
||||
else:
|
||||
else:
|
||||
cursor = connection.cursor()
|
||||
cursor.execute("DROP DATABASE %s" % databasename)
|
||||
cursor.execute("CREATE DATABASE %s" % databasename)
|
||||
@@ -42,7 +42,7 @@ def make_dirs():
|
||||
|
||||
def import_caves():
|
||||
import parsers.caves
|
||||
print "importing caves"
|
||||
print("importing caves")
|
||||
parsers.caves.readcaves()
|
||||
|
||||
def import_people():
|
||||
@@ -96,7 +96,7 @@ def reset():
|
||||
try:
|
||||
import_tunnelfiles()
|
||||
except:
|
||||
print "Tunnel files parser broken."
|
||||
print("Tunnel files parser broken.")
|
||||
|
||||
import_surveys()
|
||||
|
||||
@@ -104,18 +104,18 @@ def reset():
|
||||
def import_auto_logbooks():
|
||||
import parsers.logbooks
|
||||
import os
|
||||
for pt in core.models.PersonTrip.objects.all():
|
||||
for pt in troggle.core.models.PersonTrip.objects.all():
|
||||
pt.delete()
|
||||
for lbe in core.models.LogbookEntry.objects.all():
|
||||
for lbe in troggle.core.models.LogbookEntry.objects.all():
|
||||
lbe.delete()
|
||||
for expedition in core.models.Expedition.objects.all():
|
||||
for expedition in troggle.core.models.Expedition.objects.all():
|
||||
directory = os.path.join(settings.EXPOWEB,
|
||||
"years",
|
||||
expedition.year,
|
||||
"autologbook")
|
||||
for root, dirs, filenames in os.walk(directory):
|
||||
for filename in filenames:
|
||||
print os.path.join(root, filename)
|
||||
print(os.path.join(root, filename))
|
||||
parsers.logbooks.parseAutoLogBookEntry(os.path.join(root, filename))
|
||||
|
||||
#Temporary function until definative source of data transfered.
|
||||
@@ -127,10 +127,10 @@ def dumplogbooks():
|
||||
return pe.nickname
|
||||
else:
|
||||
return pe.person.first_name
|
||||
for lbe in core.models.LogbookEntry.objects.all():
|
||||
for lbe in troggle.core.models.LogbookEntry.objects.all():
|
||||
dateStr = lbe.date.strftime("%Y-%m-%d")
|
||||
directory = os.path.join(settings.EXPOWEB,
|
||||
"years",
|
||||
"years",
|
||||
lbe.expedition.year,
|
||||
"autologbook")
|
||||
if not os.path.isdir(directory):
|
||||
@@ -138,7 +138,7 @@ def dumplogbooks():
|
||||
filename = os.path.join(directory,
|
||||
dateStr + "." + slugify(lbe.title)[:50] + ".html")
|
||||
if lbe.cave:
|
||||
print lbe.cave.reference()
|
||||
print(lbe.cave.reference())
|
||||
trip = {"title": lbe.title, "html":lbe.text, "cave": lbe.cave.reference(), "caveOrLocation": "cave"}
|
||||
else:
|
||||
trip = {"title": lbe.title, "html":lbe.text, "location":lbe.place, "caveOrLocation": "location"}
|
||||
@@ -156,7 +156,7 @@ def dumplogbooks():
|
||||
|
||||
def pageredirects():
|
||||
for oldURL, newURL in [("indxal.htm", reverse("caveindex"))]:
|
||||
f = flatpages.models.Redirect(originalURL = oldURL, newURL = newURL)
|
||||
f = troggle.flatpages.models.Redirect(originalURL = oldURL, newURL = newURL)
|
||||
f.save()
|
||||
|
||||
def writeCaves():
|
||||
@@ -166,7 +166,7 @@ def writeCaves():
|
||||
entrance.writeDataFile()
|
||||
|
||||
def usage():
|
||||
print """Usage is 'python databaseReset.py <command>'
|
||||
print("""Usage is 'python databaseReset.py <command>'
|
||||
where command is:
|
||||
reset - this is normal usage, clear database and reread everything
|
||||
desc
|
||||
@@ -180,13 +180,16 @@ def usage():
|
||||
scans - read in the scanned surveynotes
|
||||
survex - read in the survex files
|
||||
survexpos
|
||||
surveys
|
||||
tunnel - read in the Tunnel files
|
||||
writeCaves
|
||||
"""
|
||||
""")
|
||||
|
||||
if __name__ == "__main__":
|
||||
import core.models
|
||||
import troggle.core.models
|
||||
import sys
|
||||
import django
|
||||
django.setup()
|
||||
if "desc" in sys.argv:
|
||||
resetdesc()
|
||||
elif "scans" in sys.argv:
|
||||
@@ -210,7 +213,7 @@ if __name__ == "__main__":
|
||||
try:
|
||||
import_tunnelfiles()
|
||||
except:
|
||||
print "Tunnel files parser broken."
|
||||
print("Tunnel files parser broken.")
|
||||
import_surveys()
|
||||
import_descriptions()
|
||||
parse_descriptions()
|
||||
@@ -230,10 +233,12 @@ if __name__ == "__main__":
|
||||
dumplogbooks()
|
||||
elif "writeCaves" in sys.argv:
|
||||
writeCaves()
|
||||
elif "surveys" in sys.argv:
|
||||
import_surveys()
|
||||
elif "help" in sys.argv:
|
||||
usage()
|
||||
else:
|
||||
print "%s not recognised" % sys.argv
|
||||
print("%s not recognised" % sys.argv)
|
||||
usage()
|
||||
|
||||
|
||||
|
||||
127
databaseResetM.py
Normal file
127
databaseResetM.py
Normal file
@@ -0,0 +1,127 @@
|
||||
import os
|
||||
import time
|
||||
import settings
|
||||
os.environ['PYTHONPATH'] = settings.PYTHON_PATH
|
||||
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'settings')
|
||||
from django.core import management
|
||||
from django.db import connection
|
||||
from django.contrib.auth.models import User
|
||||
from django.http import HttpResponse
|
||||
from django.core.urlresolvers import reverse
|
||||
from troggle.core.models import Cave, Entrance
|
||||
from troggle.core.models import PersonM, SurveyM, CaveM, ExpeditionM, Logbook_entryM, Cave_descriptionM
|
||||
import troggle.flatpages.models
|
||||
|
||||
databasename=settings.DATABASES['default']['NAME']
|
||||
expouser=settings.EXPOUSER
|
||||
expouserpass=settings.EXPOUSERPASS
|
||||
expouseremail=settings.EXPOUSER_EMAIL
|
||||
|
||||
def destroy():
|
||||
if settings.DATABASES['default']['ENGINE'] == 'django.db.backends.sqlite3':
|
||||
try:
|
||||
os.remove(databasename)
|
||||
except OSError:
|
||||
pass
|
||||
else:
|
||||
cursor = connection.cursor()
|
||||
cursor.execute("DROP DATABASE %s" % databasename)
|
||||
cursor.execute("CREATE DATABASE %s" % databasename)
|
||||
cursor.execute("ALTER DATABASE %s CHARACTER SET=utf8" % databasename)
|
||||
cursor.execute("USE %s" % databasename)
|
||||
management.call_command('syncdb', interactive=False)
|
||||
user = User.objects.create_user(expouser, expouseremail, expouserpass)
|
||||
user.is_staff = True
|
||||
user.is_superuser = True
|
||||
user.save()
|
||||
print('Nuked the database and rebuilt it. You savage monster')
|
||||
|
||||
def gracefull_flush():
|
||||
CaveM.objects.all().delete()
|
||||
PersonM.objects.all().delete()
|
||||
SurveyM.objects.all().delete()
|
||||
ExpeditionM.objects.all().delete()
|
||||
Logbook_entryM.objects.all().delete()
|
||||
Cave_descriptionM.objects.all().delete()
|
||||
print('Deleted contents of the database, ready to load new stuff :)')
|
||||
|
||||
def load_redirects():
|
||||
for oldURL, newURL in [("indxal.htm", reverse("caveindex"))]:
|
||||
f = troggle.flatpages.models.Redirect(originalURL = oldURL, newURL = newURL)
|
||||
f.save()
|
||||
|
||||
def load_surveys():
|
||||
SurveyM.objects.all().delete()
|
||||
import troggle.parsers.surveysM
|
||||
troggle.parsers.surveysM.load()
|
||||
|
||||
def load_caves():
|
||||
import troggle.parsers.cavesM
|
||||
troggle.parsers.cavesM.load()
|
||||
|
||||
def load_people():
|
||||
import troggle.parsers.peopleM
|
||||
troggle.parsers.peopleM.load()
|
||||
|
||||
def load_all():
|
||||
load_caves()
|
||||
load_surveys()
|
||||
load_people()
|
||||
load_redirects()
|
||||
load_links()
|
||||
print('Loaded everything. Your database is ready to go :)')
|
||||
|
||||
|
||||
def help():
|
||||
print("""Usage is 'python databaseResetM.py <command>'
|
||||
where command is:
|
||||
UNLOADERS:
|
||||
gracefull_flush - flushes new (M-style) databases contents but keeps tables existing
|
||||
destroy - destroys entire database and builds empty tables
|
||||
|
||||
LOADERS:
|
||||
load_all - loads all tables and links
|
||||
load_caves - loads all caves
|
||||
load_surveys - loads all surveys (corresponds to .svx files)
|
||||
load_people - loads all people
|
||||
load_redirects - load page redirects
|
||||
load_links - loads links between classes (run last! can't link non-existent things)
|
||||
|
||||
OTHER:
|
||||
help - displays this page
|
||||
----------------
|
||||
This is a new version of database management written by RW 2019
|
||||
----------------
|
||||
""")
|
||||
|
||||
if __name__ == "__main__":
|
||||
import troggle.core.models
|
||||
import sys
|
||||
import django
|
||||
django.setup()
|
||||
if "destroy" in sys.argv:
|
||||
destroy()
|
||||
elif "gracefull_flush" in sys.argv:
|
||||
gracefull_flush()
|
||||
|
||||
elif "load_all" in sys.argv:
|
||||
load_all()
|
||||
elif "load_caves" in sys.argv:
|
||||
load_caves()
|
||||
elif "load_surveys" in sys.argv:
|
||||
load_surveys()
|
||||
elif "load_people" in sys.argv:
|
||||
load_people()
|
||||
elif "load_redirects" in sys.argv:
|
||||
load_redirects()
|
||||
elif "load_links" in sys.argv:
|
||||
load_links()
|
||||
elif "help" in sys.argv:
|
||||
help()
|
||||
else:
|
||||
print("%s not recognised" % sys.argv)
|
||||
help()
|
||||
|
||||
|
||||
|
||||
|
||||
@@ -23,4 +23,6 @@ EXPOSE 8000
|
||||
|
||||
WORKDIR /expo/troggle
|
||||
|
||||
#CMD ["python","manage.py","runserver","0.0.0.0:8000"]
|
||||
#CMD ["python","manage.py","migrate"]
|
||||
|
||||
ENTRYPOINT ["python","manage.py","runserver","0.0.0.0:8000"]
|
||||
|
||||
81
docker/README.md
Normal file
81
docker/README.md
Normal file
@@ -0,0 +1,81 @@
|
||||
# Running troggle on Docker
|
||||
|
||||
## Install
|
||||
First you need to install
|
||||
- [docker-ce](https://docs.docker.com/install/)
|
||||
- [docker-compose](https://docs.docker.com/compose/install/)
|
||||
|
||||
If you don't want to type sudo for every docker command (you don't) you need to add your user to the docker group.
|
||||
|
||||
To do this
|
||||
|
||||
- Create the docker group.
|
||||
```bash
|
||||
$ sudo groupadd docker
|
||||
```
|
||||
|
||||
- Add your user to the docker group.
|
||||
```bash
|
||||
$ sudo usermod -aG docker $USER
|
||||
```
|
||||
## Setup
|
||||
|
||||
Checkout all 4 of the expo repos into one folder ([see here for repo cloning instructions](http://expo.survex.com/handbook/manual.html#quickstart)) eg.
|
||||
```
|
||||
$ ~/expo/loser
|
||||
/troggle
|
||||
/expoweb
|
||||
/tunnel
|
||||
```
|
||||
In the troggle dir copy localsettingsdocker.py to localsettings.py
|
||||
|
||||
In the folder you checked out all the repos into (expo in this example) create a folder called `expofiles` and in that a folder called `surveyscans` eg
|
||||
|
||||
```bash
|
||||
cd ~/expo
|
||||
mkdir -p expofiles/surveyscans
|
||||
```
|
||||
|
||||
## Starting the containers
|
||||
|
||||
To start the containers run
|
||||
```bash
|
||||
$ docker-compose up
|
||||
```
|
||||
You will now have a working troggle but with no data. To import the data you need to access the container run
|
||||
```bash
|
||||
$ docker exec -it docker_troggle_1 /bin/bash
|
||||
```
|
||||
This will give you a shell inside the troggle container
|
||||
|
||||
(You may get a warning like `bash: warning: setlocale: LC_ALL: cannot change locale (en_GB.UTF-8)` this can be ignored)
|
||||
|
||||
To import the data into troggle now run
|
||||
```bash
|
||||
$ python databaseReset.py reset
|
||||
```
|
||||
and wait .... this takes a while.
|
||||
The MySQL database is stored in a docker volume so once run through once the database will remain full of expo data even if you restart the containers.
|
||||
|
||||
## Using your new dev setup
|
||||
Even whilst the import is running you can browse to [http://localhost:8000]
|
||||
|
||||
Any chnages made to files in your checkouts will be automatically loaded in the container and served. Somtimes changes to the python files will require the system to reload so refresh a couple of times before declaring you have broken something.
|
||||
|
||||
If you edit files from within the container they will have their ownership changed to root and may become un editable to your user (you will have to become root to fix this) so don't do this!
|
||||
|
||||
## Stopping the containers
|
||||
To stop the running containers press Ctrl-c
|
||||
|
||||
## Killing it all
|
||||
If you get it in some state you want to start again run
|
||||
```bash
|
||||
$ docker-compose down
|
||||
```
|
||||
to destroy the containers
|
||||
and
|
||||
```bash
|
||||
$ docker volume ls
|
||||
$ docker volume rm docker_expo-mysqldb
|
||||
```
|
||||
to remove the database volume
|
||||
5
docker/compose/mysql.env
Normal file
5
docker/compose/mysql.env
Normal file
@@ -0,0 +1,5 @@
|
||||
MYSQL_ROOT_PASSWORD=expo123
|
||||
MYSQL_DATABASE=troggle
|
||||
MYSQL_USER=troggleuser
|
||||
MYSQL_PASSWORD=expo123
|
||||
|
||||
21
docker/docker-compose.yml
Normal file
21
docker/docker-compose.yml
Normal file
@@ -0,0 +1,21 @@
|
||||
version: '3'
|
||||
services:
|
||||
troggle:
|
||||
restart: always
|
||||
build: .
|
||||
ports:
|
||||
- "8000:8000"
|
||||
volumes:
|
||||
- ../..:/expo
|
||||
links:
|
||||
- expo-mysql
|
||||
expo-mysql:
|
||||
restart: always
|
||||
image: "mariadb"
|
||||
env_file:
|
||||
- compose/mysql.env
|
||||
volumes:
|
||||
- expo-mysqldb:/var/lib/mysql
|
||||
|
||||
volumes:
|
||||
expo-mysqldb:
|
||||
5
docker/mysql.env
Normal file
5
docker/mysql.env
Normal file
@@ -0,0 +1,5 @@
|
||||
MYSQL_ROOT_PASSWORD=expo123
|
||||
MYSQL_DATABASE=troggle
|
||||
MYSQL_USER=troggleuser
|
||||
MYSQL_PASSWORD=expo123
|
||||
|
||||
@@ -1,7 +0,0 @@
|
||||
Django==1.7.11
|
||||
django-registration==2.1.2
|
||||
mysql
|
||||
imagekit
|
||||
Image
|
||||
django-tinymce==2.7.0
|
||||
smartencoding
|
||||
1
docker/requirements.txt
Symbolic link
1
docker/requirements.txt
Symbolic link
@@ -0,0 +1 @@
|
||||
requirements.txt.dj-1.7.11
|
||||
8
docker/requirements.txt.dj-1.7.11
Normal file
8
docker/requirements.txt.dj-1.7.11
Normal file
@@ -0,0 +1,8 @@
|
||||
Django==1.7.11
|
||||
django-registration==2.1.2
|
||||
mysql
|
||||
#imagekit
|
||||
django-imagekit
|
||||
Image
|
||||
django-tinymce==2.7.0
|
||||
smartencoding
|
||||
7
docker/requirements.txt.dj-1.8.19
Normal file
7
docker/requirements.txt.dj-1.8.19
Normal file
@@ -0,0 +1,7 @@
|
||||
Django==1.8.19
|
||||
django-registration==2.1.2
|
||||
mysql
|
||||
django-imagekit
|
||||
Image
|
||||
django-tinymce==2.7.0
|
||||
smartencoding
|
||||
@@ -35,7 +35,7 @@ def flatpage(request, path):
|
||||
|
||||
|
||||
if path.startswith("noinfo") and settings.PUBLIC_SITE and not request.user.is_authenticated():
|
||||
print "flat path noinfo", path
|
||||
print("flat path noinfo", path)
|
||||
return HttpResponseRedirect(reverse("auth_login") + '?next=%s' % request.path)
|
||||
|
||||
if path.endswith("/") or path == "":
|
||||
@@ -67,13 +67,15 @@ def flatpage(request, path):
|
||||
title, = m.groups()
|
||||
else:
|
||||
title = ""
|
||||
linksmatch = re.match('(.*)<ul id="links">', body, re.DOTALL + re.IGNORECASE)
|
||||
if linksmatch:
|
||||
body, = linksmatch.groups()
|
||||
has_menu = False
|
||||
menumatch = re.match('(.*)<div id="menu">', body, re.DOTALL + re.IGNORECASE)
|
||||
if menumatch:
|
||||
has_menu = True
|
||||
#body, = menumatch.groups()
|
||||
if re.search(r"iso-8859-1", html):
|
||||
body = unicode(body, "iso-8859-1")
|
||||
body.strip
|
||||
return render_with_context(request, 'flatpage.html', {'editable': True, 'path': path, 'title': title, 'body': body, 'homepage': (path == "index.htm")})
|
||||
return render_with_context(request, 'flatpage.html', {'editable': True, 'path': path, 'title': title, 'body': body, 'homepage': (path == "index.htm"), 'has_menu': has_menu})
|
||||
else:
|
||||
return HttpResponse(o.read(), content_type=getmimetype(path))
|
||||
|
||||
@@ -114,7 +116,7 @@ def editflatpage(request, path):
|
||||
if m:
|
||||
filefound = True
|
||||
preheader, headerargs, head, postheader, bodyargs, body, postbody = m.groups()
|
||||
linksmatch = re.match('(.*)(<ul\s+id="links">.*)', body, re.DOTALL + re.IGNORECASE)
|
||||
linksmatch = re.match(r'(.*)(<ul\s+id="links">.*)', body, re.DOTALL + re.IGNORECASE)
|
||||
if linksmatch:
|
||||
body, links = linksmatch.groups()
|
||||
if re.search(r"iso-8859-1", html):
|
||||
|
||||
65
localsettingsdocker.py
Normal file
65
localsettingsdocker.py
Normal file
@@ -0,0 +1,65 @@
|
||||
import sys
|
||||
# link localsettings to this file for use on expo computer in austria
|
||||
|
||||
DATABASES = {
|
||||
'default': {
|
||||
'ENGINE': 'django.db.backends.mysql', # 'postgresql_psycopg2', 'mysql', 'sqlite3' or 'oracle'.
|
||||
'NAME' : 'troggle', # Or path to database file if using sqlite3.
|
||||
'USER' : 'troggleuser', # Not used with sqlite3.
|
||||
'PASSWORD' : 'expo123', # Not used with sqlite3.
|
||||
'HOST' : 'expo-mysql', # Set to empty string for localhost. Not used with sqlite3.
|
||||
'PORT' : '', # Set to empty string for default. Not used with sqlite3.
|
||||
}
|
||||
}
|
||||
|
||||
EXPOUSER = 'expo'
|
||||
EXPOUSERPASS = 'somepasshere'
|
||||
EXPOUSER_EMAIL = 'wookey@wookware.org'
|
||||
|
||||
REPOS_ROOT_PATH = '/expo/'
|
||||
sys.path.append(REPOS_ROOT_PATH)
|
||||
sys.path.append(REPOS_ROOT_PATH + 'troggle')
|
||||
|
||||
PUBLIC_SITE = False
|
||||
|
||||
SURVEX_DATA = REPOS_ROOT_PATH + 'loser/'
|
||||
TUNNEL_DATA = REPOS_ROOT_PATH + 'tunneldata/'
|
||||
|
||||
CAVERN = 'cavern'
|
||||
THREEDTOPOS = '3dtopos'
|
||||
EXPOWEB = REPOS_ROOT_PATH + 'expoweb/'
|
||||
SURVEYS = REPOS_ROOT_PATH
|
||||
SURVEY_SCANS = REPOS_ROOT_PATH + 'expofiles/'
|
||||
FILES = REPOS_ROOT_PATH + 'expofiles'
|
||||
|
||||
CACHEDIR = REPOS_ROOT_PATH + 'expowebcache/'
|
||||
THREEDCACHEDIR = CACHEDIR + '3d/'
|
||||
THUMBNAILCACHE = CACHEDIR + 'thumbs'
|
||||
|
||||
PYTHON_PATH = REPOS_ROOT_PATH + 'troggle/'
|
||||
|
||||
URL_ROOT = 'http://127.0.0.1:8000/'
|
||||
DIR_ROOT = ''#this should end in / if a value is given
|
||||
EXPOWEB_URL = '/'
|
||||
SURVEYS_URL = '/survey_scans/'
|
||||
|
||||
MEDIA_URL = URL_ROOT + DIR_ROOT + 'site_media/'
|
||||
MEDIA_ROOT = REPOS_ROOT_PATH + '/troggle/media/'
|
||||
MEDIA_ADMIN_DIR = '/usr/lib/python2.7/site-packages/django/contrib/admin/media/'
|
||||
|
||||
STATIC_URL = URL_ROOT
|
||||
STATIC_ROOT = DIR_ROOT
|
||||
|
||||
JSLIB_URL = URL_ROOT + 'javascript/'
|
||||
|
||||
TINY_MCE_MEDIA_ROOT = '/usr/share/tinymce/www/'
|
||||
TINY_MCE_MEDIA_URL = URL_ROOT + DIR_ROOT + '/tinymce_media/'
|
||||
|
||||
TEMPLATE_DIRS = (
|
||||
PYTHON_PATH + "templates",
|
||||
# Put strings here, like "/home/html/django_templates" or "C:/www/django/templates".
|
||||
# Always use forward slashes, even on Windows.
|
||||
# Don't forget to use absolute paths, not relative paths.
|
||||
)
|
||||
|
||||
LOGFILE = PYTHON_PATH + 'troggle_log.txt'
|
||||
@@ -24,6 +24,7 @@ FIX_PERMISSIONS = ["sudo", "/usr/local/bin/fix_permissions"]
|
||||
|
||||
SURVEX_DATA = REPOS_ROOT_PATH + 'loser/'
|
||||
TUNNEL_DATA = REPOS_ROOT_PATH + 'tunneldata/'
|
||||
THREEDCACHEDIR = REPOS_ROOT_PATH + 'expowebcache/3d/'
|
||||
|
||||
CAVERN = 'cavern'
|
||||
THREEDTOPOS = '3dtopos'
|
||||
|
||||
@@ -26,6 +26,7 @@ PUBLIC_SITE = True
|
||||
|
||||
SURVEX_DATA = REPOS_ROOT_PATH + 'loser/'
|
||||
TUNNEL_DATA = REPOS_ROOT_PATH + 'tunneldata/'
|
||||
THREEDCACHEDIR = REPOS_ROOT_PATH + 'expowebcache/3d/'
|
||||
|
||||
CAVERN = 'cavern'
|
||||
THREEDTOPOS = '3dtopos'
|
||||
|
||||
@@ -25,6 +25,7 @@ PUBLIC_SITE = False
|
||||
|
||||
SURVEX_DATA = REPOS_ROOT_PATH + 'loser/'
|
||||
TUNNEL_DATA = REPOS_ROOT_PATH + 'tunneldata/'
|
||||
THREEDCACHEDIR = REPOS_ROOT_PATH + 'expowebcache/3d/'
|
||||
|
||||
CAVERN = 'cavern'
|
||||
THREEDTOPOS = '3dtopos'
|
||||
|
||||
52
media/css/cavetables.css
Normal file
52
media/css/cavetables.css
Normal file
@@ -0,0 +1,52 @@
|
||||
body {
|
||||
all: initial;
|
||||
font-size: 100%;
|
||||
}
|
||||
|
||||
div#inputf {
|
||||
display: inline-block;
|
||||
width: 300px;
|
||||
text-align: justify;
|
||||
margin-top: 0px;
|
||||
margin-bottom: 5px
|
||||
}
|
||||
|
||||
.menu, ul#links{
|
||||
display: none;
|
||||
}
|
||||
|
||||
table {
|
||||
border-spacing: 0;
|
||||
width: 100%;
|
||||
border: 1px solid #ddd;
|
||||
font-family: monospace;
|
||||
}
|
||||
|
||||
th {
|
||||
cursor: pointer;
|
||||
background-color: #bbb
|
||||
}
|
||||
|
||||
th, td {
|
||||
padding: 16px;
|
||||
max-height: 40px;
|
||||
}
|
||||
|
||||
tr:nth-child(even) {
|
||||
background-color: #f2f2f2
|
||||
}
|
||||
|
||||
p {
|
||||
|
||||
margin-right: 80px;
|
||||
margin-left: 80px;
|
||||
}
|
||||
|
||||
button {
|
||||
width: 300px
|
||||
}
|
||||
span#mono {
|
||||
font-family: monospace;
|
||||
background-color: #eee;
|
||||
font-size: 120%;
|
||||
}
|
||||
159
media/scripts/TableSort.js
Normal file
159
media/scripts/TableSort.js
Normal file
@@ -0,0 +1,159 @@
|
||||
function filterTable(tablename)
|
||||
{
|
||||
table = document.getElementById(tablename);
|
||||
|
||||
mindepth = document.getElementById("CaveDepthMin").value;
|
||||
maxdepth = document.getElementById("CaveDepthMax").value;
|
||||
if(mindepth==0)mindepth=-999999;
|
||||
if(maxdepth==0)maxdepth= 999999;
|
||||
|
||||
minlength = document.getElementById("CaveLengthMin").value;
|
||||
maxlength = document.getElementById("CaveLengthMax").value;
|
||||
if(minlength==0)minlength=-999999;
|
||||
if(maxlength==0)maxlength= 999999;
|
||||
|
||||
visitdate = document.getElementById("VisitDate").value;
|
||||
|
||||
visitor = document.getElementById("Visitor").value;
|
||||
|
||||
cavename = document.getElementById("CaveName").value.toLowerCase();
|
||||
|
||||
incomplete = document.getElementById("Incomplete").checked;
|
||||
|
||||
var regexmode = false;
|
||||
if(visitor[0]=='/' && visitor[visitor.length-1]=='/')
|
||||
{
|
||||
regexmode = true;
|
||||
visitor = new RegExp(visitor.substr(1,visitor.length-2));
|
||||
}
|
||||
else
|
||||
{
|
||||
visitor.toLowerCase();
|
||||
}
|
||||
|
||||
rows = table.rows;
|
||||
for(i=1; i< rows.length; i++)
|
||||
{
|
||||
name = (rows[i].getElementsByTagName("TD")[1]).innerHTML.toLowerCase();
|
||||
|
||||
depth = (rows[i].getElementsByTagName("TD")[2]).innerHTML.toLowerCase();
|
||||
depth = Number(depth.replace(/[^0-9.]/g,''));
|
||||
|
||||
length = (rows[i].getElementsByTagName("TD")[3]).innerHTML.toLowerCase();
|
||||
length = Number(length.replace(/[^0-9.]/g,''));
|
||||
|
||||
date = (rows[i].getElementsByTagName("TD")[4]).innerHTML.toLowerCase();
|
||||
|
||||
//recentvisitor = (rows[i].getElementsByTagName("TD")[4]).innerHTML.toLowerCase();
|
||||
recentvisitor = ""
|
||||
|
||||
if(cavename != "" && !name.includes(cavename))
|
||||
{
|
||||
rows[i].style.visibility = "collapse";
|
||||
}
|
||||
if(depth<mindepth || depth>maxdepth)
|
||||
{
|
||||
rows[i].style.visibility = "collapse";
|
||||
}
|
||||
if(length<minlength || length>maxlength)
|
||||
{
|
||||
rows[i].style.visibility = "collapse";
|
||||
}
|
||||
if(date < visitdate)
|
||||
{
|
||||
rows[i].style.visibility = "collapse";
|
||||
}
|
||||
if(visitor != "" && regexmode && !visitor.test(recentvisitor))
|
||||
{
|
||||
rows[i].style.visibility = "collapse";
|
||||
}
|
||||
if(visitor != "" && !regexmode && !recentvisitor.includes(visitor))
|
||||
{
|
||||
rows[i].style.visibility = "collapse";
|
||||
}
|
||||
|
||||
crow=rows[i].getElementsByTagName("TD");
|
||||
for(var j=0; j<crow.length; j++)
|
||||
{
|
||||
if(crow[j].innerHTML == "" && incomplete)
|
||||
{
|
||||
rows[i].style.visibility = "collapse";
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
function filterTableReset(tablename)
|
||||
{
|
||||
table = document.getElementById(tablename);
|
||||
rows = table.rows;
|
||||
for(i=1; i< rows.length; i++)
|
||||
{
|
||||
rows[i].style.visibility = "visible";
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
function isOrdered(kvarray,numeric)
|
||||
{
|
||||
for(var i=0;i<kvarray.length-1;i++)
|
||||
{
|
||||
if(numeric==1 && Number(kvarray[i][0])>Number(kvarray[i+1][0]))
|
||||
{
|
||||
return false;
|
||||
}
|
||||
if(numeric!=1 && kvarray[i][0]>kvarray[i+1][0])
|
||||
{
|
||||
return false;
|
||||
}
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
function sortTable(n, tablename, numeric) {
|
||||
table = document.getElementById(tablename);
|
||||
rows = table.rows;
|
||||
var ordering = [];
|
||||
var i;
|
||||
|
||||
//construct key-value pairs for sorting
|
||||
for(i = 1; i < rows.length; i++) //remember header rows
|
||||
{
|
||||
key = rows[i].getElementsByTagName("TD")[n];
|
||||
key = key.innerHTML.toLowerCase();
|
||||
if(numeric==1)
|
||||
{
|
||||
key=key.replace(/[^0-9.]/g,'')
|
||||
}
|
||||
ordering.push([key,i]);
|
||||
}
|
||||
|
||||
var ascending = isOrdered(ordering,numeric);
|
||||
|
||||
//sort either numerically or alphabetically
|
||||
if(numeric==1)
|
||||
{
|
||||
ordering.sort((x,y) => Number(x[0])-Number(y[0]));
|
||||
}
|
||||
else
|
||||
{
|
||||
ordering.sort(); //sorts alphabetically
|
||||
}
|
||||
|
||||
if(ascending) ordering.reverse();
|
||||
|
||||
for(i = 0; i < ordering.length; i++) //add sorted list at the end of the table
|
||||
{
|
||||
var keyval = ordering[i];
|
||||
id = keyval[1]; //get rownumber of n^th sorted value
|
||||
cln = rows[id].cloneNode(true); //deep clone of current node
|
||||
table.insertBefore(cln,null); //add n^th row at the end
|
||||
}
|
||||
for(i = 1; i < ordering.length+1; i++) //remove unsorted nodes
|
||||
{
|
||||
table.deleteRow(1);// 0 -> header; 1 -> first row
|
||||
}
|
||||
|
||||
}
|
||||
@@ -10,14 +10,15 @@ def readcaves():
|
||||
newArea.save()
|
||||
newArea = models.Area(short_name = "1626", parent = None)
|
||||
newArea.save()
|
||||
print "Reading Entrances"
|
||||
print("Reading Entrances")
|
||||
#print "list of <Slug> <Filename>"
|
||||
for filename in os.walk(settings.ENTRANCEDESCRIPTIONS).next()[2]: #Should be a better way of getting a list of files
|
||||
if filename.endswith('.html'):
|
||||
readentrance(filename)
|
||||
print "Reading Caves"
|
||||
readentrance(filename)
|
||||
print ("Reading Caves")
|
||||
for filename in os.walk(settings.CAVEDESCRIPTIONS).next()[2]: #Should be a better way of getting a list of files
|
||||
readcave(filename)
|
||||
if filename.endswith('.html'):
|
||||
readcave(filename)
|
||||
|
||||
def readentrance(filename):
|
||||
with open(os.path.join(settings.ENTRANCEDESCRIPTIONS, filename)) as f:
|
||||
@@ -153,7 +154,7 @@ def readcave(filename):
|
||||
primary = primary)
|
||||
cs.save()
|
||||
except:
|
||||
print "Can't find text (slug): %s, skipping %s" % (slug, context)
|
||||
print("Can't find text (slug): %s, skipping %s" % (slug, context))
|
||||
|
||||
primary = False
|
||||
for entrance in entrances:
|
||||
@@ -164,17 +165,17 @@ def readcave(filename):
|
||||
ce = models.CaveAndEntrance(cave = c, entrance_letter = letter, entrance = entrance)
|
||||
ce.save()
|
||||
except:
|
||||
print "Entrance text (slug) %s missing %s" % (slug, context)
|
||||
print ("Entrance text (slug) %s missing %s" % (slug, context))
|
||||
|
||||
|
||||
def getXML(text, itemname, minItems = 1, maxItems = None, printwarnings = True, context = ""):
|
||||
items = re.findall("<%(itemname)s>(.*?)</%(itemname)s>" % {"itemname": itemname}, text, re.S)
|
||||
if len(items) < minItems and printwarnings:
|
||||
print "%(count)i %(itemname)s found, at least %(min)i expected" % {"count": len(items),
|
||||
print("%(count)i %(itemname)s found, at least %(min)i expected" % {"count": len(items),
|
||||
"itemname": itemname,
|
||||
"min": minItems} + context
|
||||
"min": minItems} + context)
|
||||
if maxItems is not None and len(items) > maxItems and printwarnings:
|
||||
print "%(count)i %(itemname)s found, no more than %(max)i expected" % {"count": len(items),
|
||||
print("%(count)i %(itemname)s found, no more than %(max)i expected" % {"count": len(items),
|
||||
"itemname": itemname,
|
||||
"max": maxItems} + context
|
||||
"max": maxItems} + context)
|
||||
return items
|
||||
|
||||
213
parsers/cavesM.py
Normal file
213
parsers/cavesM.py
Normal file
@@ -0,0 +1,213 @@
|
||||
|
||||
import troggle.core.models as models #import models for various objects
|
||||
from django.conf import settings
|
||||
import xml.etree.ElementTree as ET #this is used to parse XML's
|
||||
import subprocess
|
||||
import re
|
||||
|
||||
#
|
||||
# This parser has to find several things:
|
||||
# There are files of .html format in expoweb area - they contain some of the important information
|
||||
# There is a similar number of .svx files in loser are - they contain all the measurements
|
||||
#
|
||||
# Previous version was incredibly slow due to various shitty ideas about finding things
|
||||
# and overelayance on python when handling regular expressions, new version delegates heavy lifting to shell
|
||||
# and handles more sophisticated bits only
|
||||
#
|
||||
|
||||
def load():
|
||||
print('Hi! I\'m caves parser. Ready to work')
|
||||
|
||||
print('Loading caves of 1623 area')
|
||||
loadarea('1623')
|
||||
|
||||
print('Loading caves of 1626 area')
|
||||
loadarea('1626')
|
||||
|
||||
|
||||
def loadarea(areacode):
|
||||
|
||||
if not file_exists(settings.SURVEX_DATA+'1623-and-1626.3d'):
|
||||
print('Computing master .3d file')
|
||||
bash('cavern -o'+settings.SURVEX_DATA+' '+settings.SURVEX_DATA+'1623-and-1626.svx')
|
||||
else:
|
||||
print('Loading from existing master .3d file')
|
||||
|
||||
master3d = bash('dump3d -d '+settings.SURVEX_DATA+'1623-and-1626.3d').splitlines()
|
||||
master3dN = [x for x in master3d if ('NODE' in x)] #list of nodes of master survex file
|
||||
master3dL = [x for x in master3d if ('LINE' in x)] #list of nodes of master survex file
|
||||
|
||||
print('Searching all cave dirs files')
|
||||
basedir = settings.SURVEX_DATA+'caves-'+areacode+'/'
|
||||
|
||||
cavedirs = bash("find "+basedir+" -maxdepth 1 -type d").splitlines() #this command finds all directories
|
||||
print('Obtained list of directories! (#dirs='+str(len(cavedirs))+')')
|
||||
ndirs = len(cavedirs) #remember number of dirs for nice debug output
|
||||
|
||||
for cavedir in cavedirs:
|
||||
if cavedir==basedir:
|
||||
continue #skip the basedir - a non-proper subdirectory
|
||||
cavename = bash('echo '+cavedir+' | rev | cut -f1 -d \'/\' | rev').splitlines()[0] #get final bit of the directory
|
||||
|
||||
test = bash('if [ ! -f '+cavedir+'/'+cavename+'.svx ] ; then echo MISSING; fi')#test for file exisence
|
||||
if not file_exists(cavedir+'/'+cavename+'.svx'):
|
||||
msg = models.Parser_messageM(parsername='caves',content=cavedir+'/'+cavename+' MISSING!',message_type='warn')
|
||||
print('Cave missing'+cavename+' :(')
|
||||
msg.save()
|
||||
continue
|
||||
fullname=cavedir+'/'+cavename+'.svx'
|
||||
print('Found cave:'+cavename)
|
||||
cavernout = bash('cavern -o '+cavedir+' '+fullname) #make cavern process the thing
|
||||
if 'cavern: error:' in cavernout:
|
||||
msg = models.Parser_messageM(parsername='caves',content=cavedir+'/'+cavename+' Survex file messed up!',message_type='warn')
|
||||
print('Fucked svx'+cavename+' :(')
|
||||
msg.save()
|
||||
continue
|
||||
|
||||
cavernout = cavernout.splitlines()
|
||||
depth = float(([x for x in cavernout if ('Total vertical length' in x)][0].split()[-1])[:-2])
|
||||
length = float(([x for x in cavernout if ('Total length' in x)][0].split()[6])[:-1])
|
||||
cavefile = open(fullname,'r')
|
||||
cavefilecontents = cavefile.read().splitlines()
|
||||
surveyname = [x for x in cavefilecontents if ('*begin ') in x][0].split()[1].lower()
|
||||
try:
|
||||
title = [x for x in cavefilecontents if ('*title ') in x][0].split()[1]
|
||||
except:
|
||||
syrveyname = "Untitled"
|
||||
|
||||
relevant_nodes = [x for x in master3dN if (('['+areacode+'.'+surveyname+'.' in x) or ('['+areacode+'.'+surveyname+']' in x))]
|
||||
entrance_nodes = [x for x in relevant_nodes if 'ENTRANCE' in x]
|
||||
surface_nodes = [x for x in relevant_nodes if 'SURFACE' in x]
|
||||
location_nodes = []
|
||||
print('rel_nodes'+str(len(relevant_nodes)))
|
||||
if len(entrance_nodes) > 0:
|
||||
location_nodes = entrance_nodes
|
||||
elif len(surface_nodes) > 0:
|
||||
location_nodes = surface_nodes
|
||||
elif len(relevant_nodes) > 0:
|
||||
location_nodes = relevant_nodes
|
||||
|
||||
try:
|
||||
location = sorted(location_nodes, key = lambda y : float(y.split()[3])).pop()
|
||||
except:
|
||||
print(location_nodes)
|
||||
location = 'Not found'
|
||||
|
||||
relevant_lines = [x for x in master3dL if (('['+areacode+'.'+surveyname+'.' in x) or ('['+areacode+'.'+surveyname+']' in x))]
|
||||
try:
|
||||
lastleg = sorted(relevant_lines, key = lambda y : y.split().pop()).pop()
|
||||
except:
|
||||
lastleg = ['LINE 1900.01.01']
|
||||
try:
|
||||
lastdate = lastleg.split().pop()
|
||||
if 'STYLE' in lastdate:
|
||||
lastdate = lastleg.split().pop().pop()
|
||||
except:
|
||||
lastdate = '1900.01.01'
|
||||
|
||||
entrance = ' '.join(location.split()[1:3])
|
||||
print((('depth','length','surv name','entr','date'),(depth,length,surveyname,entrance,lastdate))) #sanity check print
|
||||
|
||||
|
||||
newcave = models.CaveM(
|
||||
survex_file = fullname,
|
||||
total_length = length,
|
||||
name=areacode+'.'+surveyname,
|
||||
total_depth = depth,
|
||||
date = lastdate,
|
||||
entrance = entrance)
|
||||
newcave.save()
|
||||
#end of reading survex masterfiles
|
||||
|
||||
print ("Reading cave descriptions")
|
||||
cavefiles = bash('find '+settings.CAVEDESCRIPTIONS+' -name \'*.html\'').splitlines()
|
||||
for fn in cavefiles:
|
||||
f = open(fn, "r")
|
||||
print(fn)
|
||||
contents = f.read()
|
||||
|
||||
slug = re.sub(r"\s+", "", extractXML(contents,'caveslug'))
|
||||
desc = extractXML(contents,'underground_description')
|
||||
name = slug[5:] #get survex compatible name
|
||||
area = slug[0:4]
|
||||
|
||||
print([area,name])
|
||||
|
||||
if desc==None or name==None:
|
||||
msg = models.Parser_messageM(parsername='caves',content=fn+' Description meesed up!',message_type='warn')
|
||||
print('Fucked description '+fn+' :(')
|
||||
msg.save()
|
||||
continue
|
||||
|
||||
print(area+'/'+name+'/'+name+'.svx')
|
||||
|
||||
updatecave = models.CaveM.objects.filter(survex_file__icontains=area+'/'+name+'/'+name+'.svx')
|
||||
if len(updatecave)>1:
|
||||
print('Non unique solution - skipping. Name:'+name)
|
||||
elif len(updatecave)==0:
|
||||
print('Cave with no survex data:'+name)
|
||||
continue
|
||||
else: #exaclty one match
|
||||
print('Adding desc:'+name)
|
||||
updatecave = updatecave[0]
|
||||
updatecave.description = '/cave/descriptionM/'+slug #area-name
|
||||
updatecave.title=name
|
||||
updatecave.save()
|
||||
|
||||
slugS = slug
|
||||
explorersS = extractXML(contents,'explorers')
|
||||
underground_descriptionS = extractXML(contents,'underground_description')
|
||||
equipmentS = extractXML(contents,'equipment')
|
||||
referencesS = extractXML(contents,'references')
|
||||
surveyS = extractXML(contents,'survey')
|
||||
kataster_statusS = extractXML(contents,'kataster_status')
|
||||
underground_centre_lineS = extractXML(contents,'underground_centre_line')
|
||||
survex_fileS = extractXML(contents,'survex_file')
|
||||
notesS = extractXML(contents,'notes')
|
||||
|
||||
|
||||
newcavedesc = models.Cave_descriptionM(
|
||||
slug = slugS,
|
||||
explorers = explorersS,
|
||||
underground_description = underground_descriptionS,
|
||||
equipment = equipmentS,
|
||||
references = referencesS,
|
||||
survey = surveyS,
|
||||
kataster_status = kataster_statusS,
|
||||
underground_centre_line = underground_centre_lineS,
|
||||
survex_file = survex_fileS,
|
||||
notes = notesS)
|
||||
newcavedesc.save()
|
||||
|
||||
|
||||
|
||||
|
||||
#end of reading cave descriptions
|
||||
|
||||
def file_exists(filename):
|
||||
test = bash('if [ ! -f '+filename+' ] ; then echo MISSING; fi')#test for file exisence
|
||||
if 'MISSING' in test: #send error message to the database
|
||||
return False
|
||||
return True
|
||||
|
||||
def extractXML(contents,tag):
|
||||
#find correct lines
|
||||
lines = contents.splitlines()
|
||||
beg = [x for x in lines if ('<'+tag+'>' in x)]
|
||||
end = [x for x in lines if ('</'+tag+'>' in x)]
|
||||
if (not beg) or (not end):
|
||||
return None
|
||||
begi = lines.index(beg[0])
|
||||
endi = lines.index(end[0])
|
||||
if endi!=begi:
|
||||
segment = '\n'.join(lines[begi:endi+1])
|
||||
else:
|
||||
segment = lines[begi:endi+1][0]
|
||||
|
||||
hit = re.findall('<'+tag+'>(.*)</'+tag+'>', segment, re.S)[0]
|
||||
return hit
|
||||
|
||||
def bash(cmd): #calls command in bash shell, returns output
|
||||
process = subprocess.Popen(cmd,shell=True,stdout=subprocess.PIPE)
|
||||
output, error = process.communicate()
|
||||
return output
|
||||
@@ -62,8 +62,6 @@ def LoadPersonsExpos():
|
||||
|
||||
# make persons
|
||||
print "Loading personexpeditions"
|
||||
#expoers2008 = """Edvin Deadman,Kathryn Hopkins,Djuke Veldhuis,Becka Lawson,Julian Todd,Natalie Uomini,Aaron Curtis,Tony Rooke,Ollie Stevens,Frank Tully,Martin Jahnke,Mark Shinwell,Jess Stirrups,Nial Peters,Serena Povia,Olly Madge,Steve Jones,Pete Harley,Eeva Makiranta,Keith Curtis""".split(",")
|
||||
#expomissing = set(expoers2008)
|
||||
|
||||
for personline in personreader:
|
||||
name = personline[header["Name"]]
|
||||
@@ -85,36 +83,7 @@ def LoadPersonsExpos():
|
||||
nonLookupAttribs = {'nickname':nickname, 'is_guest':(personline[header["Guest"]] == "1")}
|
||||
save_carefully(models.PersonExpedition, lookupAttribs, nonLookupAttribs)
|
||||
|
||||
|
||||
# this fills in those people for whom 2008 was their first expo
|
||||
#print "Loading personexpeditions 2008"
|
||||
#for name in expomissing:
|
||||
# firstname, lastname = name.split()
|
||||
# is_guest = name in ["Eeva Makiranta", "Keith Curtis"]
|
||||
# print "2008:", name
|
||||
# persons = list(models.Person.objects.filter(first_name=firstname, last_name=lastname))
|
||||
# if not persons:
|
||||
# person = models.Person(first_name=firstname, last_name = lastname, is_vfho = False, mug_shot = "")
|
||||
# #person.Sethref()
|
||||
# person.save()
|
||||
# else:
|
||||
# person = persons[0]
|
||||
# expedition = models.Expedition.objects.get(year="2008")
|
||||
# personexpedition = models.PersonExpedition(person=person, expedition=expedition, nickname="", is_guest=is_guest)
|
||||
# personexpedition.save()
|
||||
|
||||
#Notability is now a method of person. Makes no sense to store it in the database; it would need to be recalculated every time something changes. - AC 16 Feb 09
|
||||
# could rank according to surveying as well
|
||||
#print "Setting person notability"
|
||||
#for person in models.Person.objects.all():
|
||||
#person.notability = 0.0
|
||||
#for personexpedition in person.personexpedition_set.all():
|
||||
#if not personexpedition.is_guest:
|
||||
#person.notability += 1.0 / (2012 - int(personexpedition.expedition.year))
|
||||
#person.bisnotable = person.notability > 0.3 # I don't know how to filter by this
|
||||
#person.save()
|
||||
|
||||
|
||||
|
||||
# used in other referencing parser functions
|
||||
# expedition name lookup cached for speed (it's a very big list)
|
||||
Gpersonexpeditionnamelookup = { }
|
||||
|
||||
27
parsers/peopleM.py
Normal file
27
parsers/peopleM.py
Normal file
@@ -0,0 +1,27 @@
|
||||
from django.conf import settings
|
||||
import troggle.core.models as models
|
||||
|
||||
def load():
|
||||
folkfile = open(settings.EXPOWEB+"noinfo/folk.csv")
|
||||
personlines = folkfile.read().splitlines()
|
||||
persontable = [x.split(',') for x in personlines]
|
||||
years = [persontable[0][i] for i in range(5,len(persontable[0]))]
|
||||
for year in years:
|
||||
newexpedition = models.ExpeditionM( date = year )
|
||||
newexpedition.save()
|
||||
for row in persontable[1:]: #skip header
|
||||
attendedid = [i for i, x in enumerate(row) if '1' in x]
|
||||
attendedyears = [persontable[0][i] for i in attendedid if i >= 5]
|
||||
name = row[0]
|
||||
print(name+' has attended: '+', '.join(attendedyears))
|
||||
newperson = models.PersonM(
|
||||
name = name)
|
||||
newperson.save()
|
||||
for year in attendedyears:
|
||||
target = models.ExpeditionM.objects.get(date=year)
|
||||
newperson.expos_attended.add( target )
|
||||
print('Person -> Expo table created!')
|
||||
|
||||
|
||||
|
||||
|
||||
@@ -9,7 +9,6 @@ import re
|
||||
import os
|
||||
|
||||
|
||||
|
||||
def LoadSurvexLineLeg(survexblock, stardata, sline, comment):
|
||||
ls = sline.lower().split()
|
||||
ssfrom = survexblock.MakeSurvexStation(ls[stardata["from"]])
|
||||
@@ -20,23 +19,23 @@ def LoadSurvexLineLeg(survexblock, stardata, sline, comment):
|
||||
try:
|
||||
survexleg.tape = float(ls[stardata["tape"]])
|
||||
except ValueError:
|
||||
print "Tape misread in", survexblock.survexfile.path
|
||||
print "Stardata:", stardata
|
||||
print "Line:", ls
|
||||
print("Tape misread in", survexblock.survexfile.path)
|
||||
print("Stardata:", stardata)
|
||||
print("Line:", ls)
|
||||
survexleg.tape = 1000
|
||||
try:
|
||||
lclino = ls[stardata["clino"]]
|
||||
except:
|
||||
print "Clino misread in", survexblock.survexfile.path
|
||||
print "Stardata:", stardata
|
||||
print "Line:", ls
|
||||
print("Clino misread in", survexblock.survexfile.path)
|
||||
print("Stardata:", stardata)
|
||||
print("Line:", ls)
|
||||
lclino = error
|
||||
try:
|
||||
lcompass = ls[stardata["compass"]]
|
||||
except:
|
||||
print "Compass misread in", survexblock.survexfile.path
|
||||
print "Stardata:", stardata
|
||||
print "Line:", ls
|
||||
print("Compass misread in", survexblock.survexfile.path)
|
||||
print("Stardata:", stardata)
|
||||
print("Line:", ls)
|
||||
lcompass = error
|
||||
if lclino == "up":
|
||||
survexleg.compass = 0.0
|
||||
@@ -48,14 +47,14 @@ def LoadSurvexLineLeg(survexblock, stardata, sline, comment):
|
||||
try:
|
||||
survexleg.compass = float(lcompass)
|
||||
except ValueError:
|
||||
print "Compass misread in", survexblock.survexfile.path
|
||||
print "Stardata:", stardata
|
||||
print "Line:", ls
|
||||
print("Compass misread in", survexblock.survexfile.path)
|
||||
print("Stardata:", stardata)
|
||||
print("Line:", ls)
|
||||
survexleg.compass = 1000
|
||||
survexleg.clino = -90.0
|
||||
else:
|
||||
assert re.match("[\d\-+.]+$", lcompass), ls
|
||||
assert re.match("[\d\-+.]+$", lclino) and lclino != "-", ls
|
||||
assert re.match(r"[\d\-+.]+$", lcompass), ls
|
||||
assert re.match(r"[\d\-+.]+$", lclino) and lclino != "-", ls
|
||||
survexleg.compass = float(lcompass)
|
||||
survexleg.clino = float(lclino)
|
||||
|
||||
@@ -67,9 +66,10 @@ def LoadSurvexLineLeg(survexblock, stardata, sline, comment):
|
||||
try:
|
||||
survexblock.totalleglength += float(ls[itape])
|
||||
except ValueError:
|
||||
print "Length not added"
|
||||
print("Length not added")
|
||||
survexblock.save()
|
||||
|
||||
|
||||
|
||||
def LoadSurvexEquate(survexblock, sline):
|
||||
#print sline #
|
||||
stations = sline.split()
|
||||
@@ -77,12 +77,13 @@ def LoadSurvexEquate(survexblock, sline):
|
||||
for station in stations:
|
||||
survexblock.MakeSurvexStation(station)
|
||||
|
||||
|
||||
def LoadSurvexLinePassage(survexblock, stardata, sline, comment):
|
||||
pass
|
||||
|
||||
|
||||
stardatadefault = { "type":"normal", "t":"leg", "from":0, "to":1, "tape":2, "compass":3, "clino":4 }
|
||||
stardataparamconvert = { "length":"tape", "bearing":"compass", "gradient":"clino" }
|
||||
stardatadefault = {"type":"normal", "t":"leg", "from":0, "to":1, "tape":2, "compass":3, "clino":4}
|
||||
stardataparamconvert = {"length":"tape", "bearing":"compass", "gradient":"clino"}
|
||||
|
||||
def RecursiveLoad(survexblock, survexfile, fin, textlines):
|
||||
iblankbegins = 0
|
||||
@@ -91,7 +92,7 @@ def RecursiveLoad(survexblock, survexfile, fin, textlines):
|
||||
teammembers = [ ]
|
||||
|
||||
# uncomment to print out all files during parsing
|
||||
# print "Reading file:", survexblock.survexfile.path
|
||||
print("Reading file:", survexblock.survexfile.path)
|
||||
while True:
|
||||
svxline = fin.readline().decode("latin1")
|
||||
if not svxline:
|
||||
@@ -99,10 +100,10 @@ def RecursiveLoad(survexblock, survexfile, fin, textlines):
|
||||
textlines.append(svxline)
|
||||
|
||||
# break the line at the comment
|
||||
sline, comment = re.match("([^;]*?)\s*(?:;\s*(.*))?\n?$", svxline.strip()).groups()
|
||||
sline, comment = re.match(r"([^;]*?)\s*(?:;\s*(.*))?\n?$", svxline.strip()).groups()
|
||||
|
||||
# detect ref line pointing to the scans directory
|
||||
mref = comment and re.match('.*?ref.*?(\d+)\s*#\s*(\d+)', comment)
|
||||
mref = comment and re.match(r'.*?ref.*?(\d+)\s*#\s*(\d+)', comment)
|
||||
if mref:
|
||||
refscan = "%s#%s" % (mref.group(1), mref.group(2))
|
||||
survexscansfolders = models.SurvexScansFolder.objects.filter(walletname=refscan)
|
||||
@@ -116,7 +117,7 @@ def RecursiveLoad(survexblock, survexfile, fin, textlines):
|
||||
continue
|
||||
|
||||
# detect the star command
|
||||
mstar = re.match('\s*\*[\s,]*(\w+)\s*(.*?)\s*(?:;.*)?$', sline)
|
||||
mstar = re.match(r'\s*\*[\s,]*(\w+)\s*(.*?)\s*(?:;.*)?$', sline)
|
||||
if not mstar:
|
||||
if "from" in stardata:
|
||||
LoadSurvexLineLeg(survexblock, stardata, sline, comment)
|
||||
@@ -129,7 +130,7 @@ def RecursiveLoad(survexblock, survexfile, fin, textlines):
|
||||
cmd, line = mstar.groups()
|
||||
cmd = cmd.lower()
|
||||
if re.match("include$(?i)", cmd):
|
||||
includepath = os.path.join(os.path.split(survexfile.path)[0], re.sub("\.svx$", "", line))
|
||||
includepath = os.path.join(os.path.split(survexfile.path)[0], re.sub(r"\.svx$", "", line))
|
||||
includesurvexfile = models.SurvexFile(path=includepath, cave=survexfile.cave)
|
||||
includesurvexfile.save()
|
||||
includesurvexfile.SetDirectory()
|
||||
@@ -157,7 +158,7 @@ def RecursiveLoad(survexblock, survexfile, fin, textlines):
|
||||
|
||||
elif re.match("date$(?i)", cmd):
|
||||
if len(line) == 10:
|
||||
survexblock.date = re.sub("\.", "-", line)
|
||||
survexblock.date = re.sub(r"\.", "-", line)
|
||||
expeditions = models.Expedition.objects.filter(year=line[:4])
|
||||
if expeditions:
|
||||
assert len(expeditions) == 1
|
||||
@@ -166,9 +167,9 @@ def RecursiveLoad(survexblock, survexfile, fin, textlines):
|
||||
survexblock.save()
|
||||
|
||||
elif re.match("team$(?i)", cmd):
|
||||
mteammember = re.match("(Insts|Notes|Tape|Dog|Useless|Pics|Helper|Disto|Consultant)\s+(.*)$(?i)", line)
|
||||
mteammember = re.match(r"(Insts|Notes|Tape|Dog|Useless|Pics|Helper|Disto|Consultant)\s+(.*)$(?i)", line)
|
||||
if mteammember:
|
||||
for tm in re.split(" and | / |, | & | \+ |^both$|^none$(?i)", mteammember.group(2)):
|
||||
for tm in re.split(r" and | / |, | & | \+ |^both$|^none$(?i)", mteammember.group(2)):
|
||||
if tm:
|
||||
personexpedition = survexblock.expedition and GetPersonExpeditionNameLookup(survexblock.expedition).get(tm.lower())
|
||||
if (personexpedition, tm) not in teammembers:
|
||||
@@ -206,22 +207,25 @@ def RecursiveLoad(survexblock, survexfile, fin, textlines):
|
||||
survexblock.MakeSurvexStation(line.split()[0])
|
||||
|
||||
else:
|
||||
if not cmd in [ "sd", "include", "units", "entrance", "data", "flags", "title", "export", "instrument", "calibrate", "set", "infer", "alias", "ref" ]:
|
||||
print ("Unrecognised command in line:", cmd, line, survexblock)
|
||||
|
||||
|
||||
if cmd not in ["sd", "include", "units", "entrance", "data", "flags", "title", "export", "instrument",
|
||||
"calibrate", "set", "infer", "alias", "ref", "cs", "declination", "case"]:
|
||||
print("Unrecognised command in line:", cmd, line, survexblock, survexblock.survexfile.path)
|
||||
|
||||
def ReloadSurvexCave(survex_cave):
|
||||
cave = models.Cave.objects.get(kataster_number=survex_cave)
|
||||
|
||||
def ReloadSurvexCave(survex_cave, area):
|
||||
print(survex_cave, area)
|
||||
cave = models.Cave.objects.get(kataster_number=survex_cave, area__short_name=area)
|
||||
print(cave)
|
||||
#cave = models.Cave.objects.get(kataster_number=survex_cave)
|
||||
cave.survexblock_set.all().delete()
|
||||
cave.survexfile_set.all().delete()
|
||||
cave.survexdirectory_set.all().delete()
|
||||
|
||||
survexfile = models.SurvexFile(path="caves/" + survex_cave + "/" + survex_cave, cave=cave)
|
||||
survexfile = models.SurvexFile(path="caves-" + cave.kat_area() + "/" + survex_cave + "/" + survex_cave, cave=cave)
|
||||
survexfile.save()
|
||||
survexfile.SetDirectory()
|
||||
|
||||
survexblockroot = models.SurvexBlock(name="root", survexpath="caves", begin_char=0, cave=cave, survexfile=survexfile, totalleglength=0.0)
|
||||
survexblockroot = models.SurvexBlock(name="root", survexpath="caves-" + cave.kat_area(), begin_char=0, cave=cave, survexfile=survexfile, totalleglength=0.0)
|
||||
survexblockroot.save()
|
||||
fin = survexfile.OpenFile()
|
||||
textlines = [ ]
|
||||
@@ -232,7 +236,7 @@ def ReloadSurvexCave(survex_cave):
|
||||
|
||||
def LoadAllSurvexBlocks():
|
||||
|
||||
print 'Loading All Survex Blocks...'
|
||||
print('Loading All Survex Blocks...')
|
||||
|
||||
models.SurvexBlock.objects.all().delete()
|
||||
models.SurvexFile.objects.all().delete()
|
||||
@@ -243,6 +247,8 @@ def LoadAllSurvexBlocks():
|
||||
models.SurvexPersonRole.objects.all().delete()
|
||||
models.SurvexStation.objects.all().delete()
|
||||
|
||||
print(" - Data flushed")
|
||||
|
||||
survexfile = models.SurvexFile(path="all", cave=None)
|
||||
survexfile.save()
|
||||
survexfile.SetDirectory()
|
||||
@@ -259,22 +265,26 @@ def LoadAllSurvexBlocks():
|
||||
|
||||
#Load each cave,
|
||||
#FIXME this should be dealt with load all above
|
||||
print(" - Reloading all caves")
|
||||
caves = models.Cave.objects.all()
|
||||
for cave in caves:
|
||||
if cave.kataster_number and os.path.isdir(os.path.join(settings.SURVEX_DATA, "caves", cave.kataster_number)):
|
||||
if cave.kataster_number and os.path.isdir(os.path.join(settings.SURVEX_DATA, "caves-" + cave.kat_area(), cave.kataster_number)):
|
||||
if cave.kataster_number not in ['40']:
|
||||
print "loading", cave
|
||||
ReloadSurvexCave(cave.kataster_number)
|
||||
|
||||
poslineregex = re.compile("^\(\s*([+-]?\d*\.\d*),\s*([+-]?\d*\.\d*),\s*([+-]?\d*\.\d*)\s*\)\s*([^\s]+)$")
|
||||
print("loading", cave, cave.kat_area())
|
||||
ReloadSurvexCave(cave.kataster_number, cave.kat_area())
|
||||
|
||||
|
||||
poslineregex = re.compile(r"^\(\s*([+-]?\d*\.\d*),\s*([+-]?\d*\.\d*),\s*([+-]?\d*\.\d*)\s*\)\s*([^\s]+)$")
|
||||
|
||||
|
||||
def LoadPos():
|
||||
|
||||
print 'Loading Pos....'
|
||||
print('Loading Pos....')
|
||||
|
||||
call([settings.CAVERN, "--output=%s/all.3d" % settings.SURVEX_DATA, "%s/all.svx" % settings.SURVEX_DATA])
|
||||
call([settings.THREEDTOPOS, '%sall.3d' % settings.SURVEX_DATA], cwd = settings.SURVEX_DATA)
|
||||
posfile = open("%sall.pos" % settings.SURVEX_DATA)
|
||||
posfile.readline()#Drop header
|
||||
posfile.readline() #Drop header
|
||||
for line in posfile.readlines():
|
||||
r = poslineregex.match(line)
|
||||
if r:
|
||||
@@ -286,4 +296,4 @@ def LoadPos():
|
||||
ss.z = float(z)
|
||||
ss.save()
|
||||
except:
|
||||
print "%s not parsed in survex" % name
|
||||
print("%s not parsed in survex" % name)
|
||||
|
||||
@@ -39,7 +39,7 @@ def readSurveysFromCSV():
|
||||
|
||||
# test if the expeditions have been added yet
|
||||
if Expedition.objects.count()==0:
|
||||
print "There are no expeditions in the database. Please run the logbook parser."
|
||||
print("There are no expeditions in the database. Please run the logbook parser.")
|
||||
sys.exit()
|
||||
|
||||
|
||||
@@ -56,7 +56,7 @@ def readSurveysFromCSV():
|
||||
for survey in surveyreader:
|
||||
#I hate this, but some surveys have a letter eg 2000#34a. The next line deals with that.
|
||||
walletNumberLetter = re.match(r'(?P<number>\d*)(?P<letter>[a-zA-Z]*)',survey[header['Survey Number']])
|
||||
# print walletNumberLetter.groups()
|
||||
# print(walletNumberLetter.groups())
|
||||
year=survey[header['Year']]
|
||||
|
||||
|
||||
@@ -89,63 +89,73 @@ def listdir(*directories):
|
||||
# add survey scans
|
||||
def parseSurveyScans(expedition, logfile=None):
|
||||
# yearFileList = listdir(expedition.year)
|
||||
yearPath=os.path.join(settings.SURVEY_SCANS, "surveyscans", expedition.year)
|
||||
yearFileList=os.listdir(yearPath)
|
||||
print yearFileList
|
||||
for surveyFolder in yearFileList:
|
||||
try:
|
||||
surveyNumber=re.match(r'\d\d\d\d#0*(\d+)',surveyFolder).groups()
|
||||
# scanList = listdir(expedition.year, surveyFolder)
|
||||
scanList=os.listdir(os.path.join(yearPath,surveyFolder))
|
||||
except AttributeError:
|
||||
print surveyFolder + " ignored",
|
||||
continue
|
||||
|
||||
for scan in scanList:
|
||||
try:
|
||||
yearPath=os.path.join(settings.SURVEY_SCANS, "surveyscans", expedition.year)
|
||||
yearFileList=os.listdir(yearPath)
|
||||
print(yearFileList)
|
||||
for surveyFolder in yearFileList:
|
||||
try:
|
||||
scanChopped=re.match(r'(?i).*(notes|elev|plan|elevation|extend)(\d*)\.(png|jpg|jpeg)',scan).groups()
|
||||
scanType,scanNumber,scanFormat=scanChopped
|
||||
surveyNumber=re.match(r'\d\d\d\d#(X?)0*(\d+)',surveyFolder).groups()
|
||||
#scanList = listdir(expedition.year, surveyFolder)
|
||||
scanList=os.listdir(os.path.join(yearPath,surveyFolder))
|
||||
except AttributeError:
|
||||
print scan + " ignored \r",
|
||||
print(surveyFolder + " ignored\r",)
|
||||
continue
|
||||
if scanType == 'elev' or scanType == 'extend':
|
||||
scanType = 'elevation'
|
||||
|
||||
if scanNumber=='':
|
||||
scanNumber=1
|
||||
for scan in scanList:
|
||||
try:
|
||||
scanChopped=re.match(r'(?i).*(notes|elev|plan|elevation|extend)(\d*)\.(png|jpg|jpeg)',scan).groups()
|
||||
scanType,scanNumber,scanFormat=scanChopped
|
||||
except AttributeError:
|
||||
print(scan + " ignored\r",)
|
||||
continue
|
||||
if scanType == 'elev' or scanType == 'extend':
|
||||
scanType = 'elevation'
|
||||
|
||||
if type(surveyNumber)==types.TupleType:
|
||||
surveyNumber=surveyNumber[0]
|
||||
try:
|
||||
placeholder=get_or_create_placeholder(year=int(expedition.year))
|
||||
survey=Survey.objects.get_or_create(wallet_number=surveyNumber, expedition=expedition, defaults={'logbook_entry':placeholder})[0]
|
||||
except Survey.MultipleObjectsReturned:
|
||||
survey=Survey.objects.filter(wallet_number=surveyNumber, expedition=expedition)[0]
|
||||
file_=os.path.join(yearPath, surveyFolder, scan)
|
||||
scanObj = ScannedImage(
|
||||
file=file_,
|
||||
contents=scanType,
|
||||
number_in_wallet=scanNumber,
|
||||
survey=survey,
|
||||
new_since_parsing=False,
|
||||
)
|
||||
print "Added scanned image at " + str(scanObj)
|
||||
#if scanFormat=="png":
|
||||
#if isInterlacedPNG(os.path.join(settings.SURVEY_SCANS, "surveyscans", file_)):
|
||||
# print file_+ " is an interlaced PNG. No can do."
|
||||
#continue
|
||||
scanObj.save()
|
||||
if scanNumber=='':
|
||||
scanNumber=1
|
||||
|
||||
if type(surveyNumber)==types.TupleType:
|
||||
surveyLetter=surveyNumber[0]
|
||||
surveyNumber=surveyNumber[1]
|
||||
try:
|
||||
placeholder=get_or_create_placeholder(year=int(expedition.year))
|
||||
survey=Survey.objects.get_or_create(wallet_number=surveyNumber, wallet_letter=surveyLetter, expedition=expedition, defaults={'logbook_entry':placeholder})[0]
|
||||
except Survey.MultipleObjectsReturned:
|
||||
survey=Survey.objects.filter(wallet_number=surveyNumber, wallet_letter=surveyLetter, expedition=expedition)[0]
|
||||
file_=os.path.join(yearPath, surveyFolder, scan)
|
||||
scanObj = ScannedImage(
|
||||
file=file_,
|
||||
contents=scanType,
|
||||
number_in_wallet=scanNumber,
|
||||
survey=survey,
|
||||
new_since_parsing=False,
|
||||
)
|
||||
print("Added scanned image at " + str(scanObj))
|
||||
#if scanFormat=="png":
|
||||
#if isInterlacedPNG(os.path.join(settings.SURVEY_SCANS, "surveyscans", file_)):
|
||||
# print file_+ " is an interlaced PNG. No can do."
|
||||
#continue
|
||||
scanObj.save()
|
||||
except (IOError, OSError):
|
||||
yearPath=os.path.join(settings.SURVEY_SCANS, "surveyscans", expedition.year)
|
||||
print("No folder found for " + expedition.year + " at:- " + yearPath)
|
||||
|
||||
# dead
|
||||
def parseSurveys(logfile=None):
|
||||
readSurveysFromCSV()
|
||||
try:
|
||||
readSurveysFromCSV()
|
||||
except (IOError, OSError):
|
||||
print("Survey CSV not found..")
|
||||
pass
|
||||
|
||||
for expedition in Expedition.objects.filter(year__gte=2000): #expos since 2000, because paths and filenames were nonstandard before then
|
||||
parseSurveyScans(expedition)
|
||||
|
||||
# dead
|
||||
def isInterlacedPNG(filePath): #We need to check for interlaced PNGs because the thumbnail engine can't handle them (uses PIL)
|
||||
file=Image.open(filePath)
|
||||
print filePath
|
||||
print(filePath)
|
||||
if 'interlace' in file.info:
|
||||
return file.info['interlace']
|
||||
else:
|
||||
@@ -180,8 +190,8 @@ def LoadListScansFile(survexscansfolder):
|
||||
gld.append((fyf, ffyf, fisdiryf))
|
||||
|
||||
for (fyf, ffyf, fisdiryf) in gld:
|
||||
assert not fisdiryf, ffyf
|
||||
if re.search("\.(?:png|jpg|jpeg)(?i)$", fyf):
|
||||
#assert not fisdiryf, ffyf
|
||||
if re.search(r"\.(?:png|jpg|jpeg)(?i)$", fyf):
|
||||
survexscansingle = SurvexScanSingle(ffile=ffyf, name=fyf, survexscansfolder=survexscansfolder)
|
||||
survexscansingle.save()
|
||||
|
||||
@@ -190,7 +200,7 @@ def LoadListScansFile(survexscansfolder):
|
||||
# and builds up the models we can access later
|
||||
def LoadListScans():
|
||||
|
||||
print 'Loading Survey Scans...'
|
||||
print('Loading Survey Scans...')
|
||||
|
||||
SurvexScanSingle.objects.all().delete()
|
||||
SurvexScansFolder.objects.all().delete()
|
||||
@@ -208,7 +218,7 @@ def LoadListScans():
|
||||
continue
|
||||
|
||||
# do the year folders
|
||||
if re.match("\d\d\d\d$", f):
|
||||
if re.match(r"\d\d\d\d$", f):
|
||||
for fy, ffy, fisdiry in GetListDir(ff):
|
||||
if fisdiry:
|
||||
assert fisdiry, ffy
|
||||
@@ -225,7 +235,7 @@ def LoadListScans():
|
||||
|
||||
def FindTunnelScan(tunnelfile, path):
|
||||
scansfolder, scansfile = None, None
|
||||
mscansdir = re.search("(\d\d\d\d#\d+\w?|1995-96kh|92-94Surveybookkh|1991surveybook|smkhs)/(.*?(?:png|jpg))$", path)
|
||||
mscansdir = re.search(r"(\d\d\d\d#X?\d+\w?|1995-96kh|92-94Surveybookkh|1991surveybook|smkhs)/(.*?(?:png|jpg))$", path)
|
||||
if mscansdir:
|
||||
scansfolderl = SurvexScansFolder.objects.filter(walletname=mscansdir.group(1))
|
||||
if len(scansfolderl):
|
||||
@@ -234,6 +244,7 @@ def FindTunnelScan(tunnelfile, path):
|
||||
if scansfolder:
|
||||
scansfilel = scansfolder.survexscansingle_set.filter(name=mscansdir.group(2))
|
||||
if len(scansfilel):
|
||||
print(scansfilel, len(scansfilel))
|
||||
assert len(scansfilel) == 1
|
||||
scansfile = scansfilel[0]
|
||||
|
||||
@@ -242,9 +253,9 @@ def FindTunnelScan(tunnelfile, path):
|
||||
if scansfile:
|
||||
tunnelfile.survexscans.add(scansfile)
|
||||
|
||||
elif path and not re.search("\.(?:png|jpg)$(?i)", path):
|
||||
elif path and not re.search(r"\.(?:png|jpg|jpeg)$(?i)", path):
|
||||
name = os.path.split(path)[1]
|
||||
print "ttt", tunnelfile.tunnelpath, path, name
|
||||
print("ttt", tunnelfile.tunnelpath, path, name)
|
||||
rtunnelfilel = TunnelFile.objects.filter(tunnelname=name)
|
||||
if len(rtunnelfilel):
|
||||
assert len(rtunnelfilel) == 1, ("two paths with name of", path, "need more discrimination coded")
|
||||
|
||||
65
parsers/surveysM.py
Normal file
65
parsers/surveysM.py
Normal file
@@ -0,0 +1,65 @@
|
||||
from django.conf import settings
|
||||
import subprocess, re
|
||||
import troggle.core.models as models
|
||||
|
||||
def load():
|
||||
print('Load survex files and relations')
|
||||
load_area('1623')
|
||||
|
||||
def load_area(areacode):
|
||||
|
||||
print('Searching all cave dirs files')
|
||||
basedir = settings.SURVEX_DATA+'caves-'+areacode+'/'
|
||||
|
||||
cavedirs = bash("find "+basedir+" -maxdepth 1 -type d").splitlines() #this command finds all directories
|
||||
print('Obtained list of directories! (#dirs='+str(len(cavedirs))+')')
|
||||
|
||||
for cavedir in cavedirs:
|
||||
if cavedir==basedir:
|
||||
continue #skip the basedir - a non-proper subdirectory
|
||||
parentname = bash('echo '+cavedir+' | rev | cut -f1 -d \'/\' | rev').splitlines()[0] #get final bit of the directory
|
||||
parentcave = models.CaveM.objects.filter(survex_file__icontains=cavedir)
|
||||
if len(parentcave)>1:
|
||||
print('Non unique parent - skipping. Name:'+parentname)
|
||||
elif len(parentcave)==0:
|
||||
print('Error! parent not created:'+parentname)
|
||||
continue
|
||||
else: #exaclty one match
|
||||
print('Adding relations of:'+parentname)
|
||||
parentcave = parentcave[0]
|
||||
|
||||
surveyfiles = bash('find '+cavedir+' -name \'*.svx\'').splitlines()
|
||||
for fn in surveyfiles:
|
||||
print(fn)
|
||||
svxcontents = open(fn,'r').read().splitlines()
|
||||
try:
|
||||
dateline = [x for x in svxcontents if ('*date' in x)][0]
|
||||
date = re.findall('\\d\\d\\d\\d\\.\\d\\d\\.\\d\\d', dateline, re.S)[0]
|
||||
|
||||
|
||||
except:
|
||||
if( len( [x for x in svxcontents if ('*date' in x)] ) == 0 ):
|
||||
continue #skip dateless files
|
||||
print('Date format error in '+fn)
|
||||
print('Dateline = '+ '"'.join([x for x in svxcontents if ('*date' in x)]))
|
||||
date = '1900.01.01'
|
||||
|
||||
|
||||
newsurvex = models.SurveyM(survex_file=fn, date=date)
|
||||
newsurvex.save()
|
||||
parentcave.surveys.add(newsurvex)
|
||||
parentcave.save()
|
||||
|
||||
|
||||
def file_exists(filename):
|
||||
test = bash('if [ ! -f '+filename+' ] ; then echo MISSING; fi')#test for file exisence
|
||||
if 'MISSING' in test: #send error message to the database
|
||||
return False
|
||||
return True
|
||||
|
||||
def bash(cmd): #calls command in bash shell, returns output
|
||||
process = subprocess.Popen(cmd,shell=True,stdout=subprocess.PIPE)
|
||||
output, error = process.communicate()
|
||||
return output
|
||||
|
||||
|
||||
@@ -71,6 +71,9 @@ if django.VERSION[0] == 1 and django.VERSION[1] < 4:
|
||||
else:
|
||||
authmodule = 'django.contrib.auth.context_processors.auth'
|
||||
|
||||
TOPCAMPX=411571.00
|
||||
TOPCAMPY=5282639.00
|
||||
|
||||
TEMPLATE_CONTEXT_PROCESSORS = ( authmodule, "core.context.troggle_context", )
|
||||
|
||||
LOGIN_REDIRECT_URL = '/'
|
||||
@@ -130,4 +133,6 @@ TINYMCE_COMPRESSOR = True
|
||||
|
||||
MAX_LOGBOOK_ENTRY_TITLE_LENGTH = 200
|
||||
|
||||
TEST_RUNNER = 'django.test.runner.DiscoverRunner'
|
||||
|
||||
from localsettings import * #localsettings needs to take precedence. Call it to override any existing vars.
|
||||
|
||||
@@ -16,7 +16,7 @@
|
||||
<body onLoad="contentHeight();">
|
||||
|
||||
<div id="header">
|
||||
<h1>CUCC Expeditions to Austria: 1976 - 2016</h1>
|
||||
<h1>CUCC Expeditions to Austria: 1976 - 2018</h1>
|
||||
<div id="editLinks"> {% block loginInfo %}
|
||||
<a href="{{settings.EXPOWEB_URL}}">Website home</a> |
|
||||
{% if user.username %}
|
||||
@@ -40,9 +40,9 @@
|
||||
<a href="{% url "survexcavessingle" 204 %}">204</a> |
|
||||
<a href="{% url "survexcavessingle" 258 %}">258</a> |
|
||||
<a href="{% url "survexcavessingle" 264 %}">264</a> |
|
||||
<a href="{% url "expedition" 2014 %}">Expo2014</a> |
|
||||
<a href="{% url "expedition" 2015 %}">Expo2015</a> |
|
||||
<a href="{% url "expedition" 2016 %}">Expo2016</a> |
|
||||
<a href="{% url "expedition" 2017 %}">Expo2017</a> |
|
||||
<a href="{% url "expedition" 2018 %}">Expo2018</a> |
|
||||
|
||||
<a href="/admin/">Django admin</a>
|
||||
</div>
|
||||
@@ -90,7 +90,7 @@
|
||||
<li><a id="caversLink" href="{% url "personindex" %}">cavers</a></li>
|
||||
<li><a href="#">expeditions</a>
|
||||
<ul class="sub_menu">
|
||||
<li><a id="expeditionsLink" href="{{ Expedition.objects.latest.get_absolute_url }}">newest</a></li>
|
||||
<li><a id="expeditionsLink" href="{{ expedition.objects.latest.get_absolute_url }}">newest</a></li>
|
||||
<li><a id="expeditionsLink" href="{% url "expeditions" %}">list all</a></li>
|
||||
</ul>
|
||||
</li>
|
||||
|
||||
@@ -1,5 +1,438 @@
|
||||
{% extends "cavebase.html" %}
|
||||
|
||||
{% block extraheaders %}
|
||||
{% if cave.survex_file %}
|
||||
<style>
|
||||
|
||||
|
||||
div.cv-panel {
|
||||
position: absolute;
|
||||
top: 0;
|
||||
left: 0;
|
||||
z-index: 100;
|
||||
background-color: rgba(50,50,50,0.5);
|
||||
color: yellowgreen;
|
||||
border: 1px solid black;
|
||||
border-radius: 5px;
|
||||
}
|
||||
|
||||
div.cv-compass, div.cv-ahi {
|
||||
position: absolute;
|
||||
bottom: 95px;
|
||||
right: 5px;
|
||||
margin: 0;
|
||||
padding-top: 2px;
|
||||
/* border: 1px solid white; */
|
||||
text-align: center;
|
||||
width: 78px;
|
||||
height: 19px;
|
||||
z-index: 50;
|
||||
background-color: rgba(50,50,50,0.5);
|
||||
background-color: black;
|
||||
color: white;
|
||||
}
|
||||
|
||||
div.cv-ahi {
|
||||
right: 95px;
|
||||
}
|
||||
|
||||
div.scale-legend {
|
||||
position: absolute;
|
||||
color: white;
|
||||
background-color: black;
|
||||
bottom: 30px;
|
||||
}
|
||||
|
||||
div.linear-scale {
|
||||
position: absolute;
|
||||
color: white;
|
||||
background-color: black;
|
||||
right: 30px;
|
||||
width: 40px;
|
||||
padding: 2px 0;
|
||||
text-align: right;
|
||||
border: 1px solid black;
|
||||
font-size: 14px;
|
||||
}
|
||||
|
||||
div.linear-scale-caption {
|
||||
position: absolute;
|
||||
color: white;
|
||||
background-color: black;
|
||||
right: 5px;
|
||||
width: 65px;
|
||||
padding: 2px 0 5px 0;
|
||||
text-align: left;
|
||||
border: 1px solid black;
|
||||
font-size: 14px;
|
||||
}
|
||||
|
||||
#min-div {
|
||||
border-bottom: 1px solid white;
|
||||
}
|
||||
|
||||
#max-div {
|
||||
border-top: 1px solid white;
|
||||
}
|
||||
|
||||
#angle-legend {
|
||||
position: absolute;
|
||||
width: 80px;
|
||||
right: 5px;
|
||||
bottom: 180px;
|
||||
color: white;
|
||||
background-color: black;
|
||||
font-size: 14px;
|
||||
text-align: center;
|
||||
}
|
||||
|
||||
#scene {
|
||||
width: 100%;
|
||||
height: 700px;
|
||||
position: relative;
|
||||
}
|
||||
|
||||
#progress-bar {
|
||||
position: absolute;
|
||||
top: 55%;
|
||||
height: 20px;
|
||||
border: 1px solid white;
|
||||
z-index: 100;
|
||||
}
|
||||
|
||||
#status-text {
|
||||
position: absolute;
|
||||
top: 50%;
|
||||
height: 20px;
|
||||
padding-left: 4px;
|
||||
background-color: black;
|
||||
color: white;
|
||||
z-index: 100;
|
||||
}
|
||||
|
||||
#frame div.page ul {
|
||||
list-style-type: none;
|
||||
margin: 8px 0 0 0;
|
||||
padding: 0;
|
||||
width: 200px;
|
||||
height: 100%;
|
||||
cursor: default;
|
||||
font-size: 12px;
|
||||
overflow-y: auto;
|
||||
overflow-x: hidden;
|
||||
}
|
||||
|
||||
#frame div.page li {
|
||||
position: relative;
|
||||
margin-left: 16px;
|
||||
border-bottom: 1px solid #444444;
|
||||
}
|
||||
|
||||
#frame div.page li.selected {
|
||||
color: #1ab4e5;
|
||||
}
|
||||
|
||||
#frame div.page li:hover {
|
||||
color: yellow;
|
||||
}
|
||||
|
||||
#frame div.page div#ui-path {
|
||||
font-size: 12px;
|
||||
border-top: 1px solid grey;
|
||||
border-bottom: 1px solid grey;
|
||||
margin-top: 8px;
|
||||
padding: 2px 0 2px 12px;
|
||||
}
|
||||
|
||||
#frame div.page div#ui-path span {
|
||||
color: #1ab4e5;
|
||||
}
|
||||
|
||||
#frame div.page div.slide {
|
||||
position: absolute;
|
||||
top: 64px;
|
||||
left: 0px;
|
||||
height: auto;
|
||||
margin-top:0;
|
||||
bottom: 44px;
|
||||
background-color: #222222;
|
||||
transition: transform 0.25s ease-in;
|
||||
}
|
||||
|
||||
#frame div.slide-out {
|
||||
border-right: 1px grey solid;
|
||||
transform: translateX(-100%);
|
||||
}
|
||||
|
||||
#frame div.page div.descend-tree {
|
||||
position: absolute;
|
||||
top: 0px;
|
||||
right: 0px;
|
||||
margin: 0;
|
||||
color: #1ab4e5;
|
||||
z-index: 110;
|
||||
}
|
||||
|
||||
#frame {
|
||||
position: absolute;
|
||||
top: 0px;
|
||||
left: 0px;
|
||||
width: 240px;
|
||||
height: 100%;
|
||||
background-color: transparent;
|
||||
transform: translateX(-200px);
|
||||
transition: transform 0.25s ease-in;
|
||||
}
|
||||
|
||||
#frame.onscreen {
|
||||
transform: none;
|
||||
transition: transform 0.25s ease-out;
|
||||
}
|
||||
|
||||
#frame a.download {
|
||||
border: 1px solid green;
|
||||
display: block;
|
||||
width: 180px;
|
||||
box-sizing: border-box;
|
||||
margin-top: 6px;
|
||||
margin-bottom: 4px;
|
||||
margin-left: 8px;
|
||||
border: none;
|
||||
border-bottom: 4px solid #1ab4e5;
|
||||
color: #dddddd;
|
||||
background-color: black;
|
||||
padding-bottom: 4px;
|
||||
box-shadow: 1px 1px 8px 0px #888888;
|
||||
outline: nonlass="cavedisplay"e;
|
||||
text-decoration: none;
|
||||
text-align: center;
|
||||
}
|
||||
|
||||
#frame a.download:hover {
|
||||
color: white;
|
||||
}
|
||||
|
||||
#frame a.download:active {
|
||||
color: #dddddd;
|
||||
border-bottom: 4px solid #0c536a;
|
||||
box-shadow: none;
|
||||
box-shadow: inset 1px 1px 8px 0px #888888;
|
||||
}
|
||||
#frame .tab {
|
||||
position: absolute;
|
||||
right: 0px;lass="cavedisplay"
|
||||
width: 40px;
|
||||
height: 40px;
|
||||
box-sizing: border-box;
|
||||
background-color: #444444;
|
||||
border-left: 1px solid black;
|
||||
background-position: center;
|
||||
border-top: 1px solid black;
|
||||
}
|
||||
|
||||
#frame #close {
|
||||
position: absolute;
|
||||
right: 40px;
|
||||
bottom: 0px;
|
||||
width: 40px;
|
||||
height: 40px;
|
||||
box-sizing: border-box;
|
||||
z-index: 150;
|
||||
background-image: url(../images/ic_remove.png);
|
||||
background-position: center;
|
||||
}
|
||||
|
||||
#icon_settings {
|
||||
background-image: url(../images/ic_settings.png);
|
||||
}
|
||||
|
||||
#icon_terrain {
|
||||
background-image: url(../images/ic_terrain.png);
|
||||
}
|
||||
|
||||
#icon_explore {
|
||||
background-image: url(../images/ic_explore.png);
|
||||
}
|
||||
|
||||
#icon_info {
|
||||
background-image: url(../images/ic_info.png);
|
||||
}
|
||||
|
||||
#icon_route {
|
||||
background-image: url(../images/ic_route.png);
|
||||
}
|
||||
|
||||
#icon_help {
|
||||
background-image: url(../images/ic_help.png);
|
||||
}
|
||||
|
||||
#frame div.toptab {
|
||||
background-color: #222222;
|
||||
border-left: none;
|
||||
border-right: 1px solid grey;
|
||||
border-top: 1px solid grey;
|
||||
}
|
||||
|
||||
#frame div.page {
|
||||
position: absolute;
|
||||
top: 0px;
|
||||
bottom: 40px;
|
||||
left: 0px;
|
||||
width: 200px;
|
||||
height: 100%;
|
||||
color: white;
|
||||
background-color: #222222;
|
||||
padding: 0 4px;
|
||||
box-sizing: border-box;
|
||||
cursor: default;
|
||||
padding-bottom: 40px;
|
||||
}
|
||||
|
||||
#frame div.page div.header {
|
||||
margin: 16px 0px 8px 0px;
|
||||
font-weight: bold;
|
||||
height: 16px;
|
||||
box-sizing: border-box;
|
||||
padding-left: 2px;
|
||||
}
|
||||
|
||||
#frame div.page div.control {
|
||||
margin: 2px 0 2px 0;
|
||||
padding-top: 2px;
|
||||
}
|
||||
|
||||
#frame div.page label {
|
||||
display: block;
|
||||
border-top: 1px solid grey;
|
||||
padding: 2px 0 2px 8px;
|
||||
font-size: 12px;
|
||||
}
|
||||
|
||||
#frame div.page select {
|
||||
display: block;
|
||||
width: 180px;
|
||||
box-sizing: border-box;
|
||||
padding-top: 2px;
|
||||
margin: 2px 0 4px 8px;
|
||||
}
|
||||
|
||||
#frame div.page select:empty {
|
||||
background-color: #888888;
|
||||
}
|
||||
|
||||
#frame div.page button {
|
||||
display: block;
|
||||
width: 180px;
|
||||
box-sizing: border-box;
|
||||
margin-top: 4px;
|
||||
margin-bottom: 4px;
|
||||
margin-left: 8px;
|
||||
border: none;
|
||||
border-bottom: 4px solid #1ab4e5;
|
||||
color: #dddddd;
|
||||
background-color: black;
|
||||
padding-bottom: 4px;
|
||||
box-shadow: 1px 1px 8px 0px #888888;
|
||||
outline: none;
|
||||
}
|
||||
|
||||
#frame div.page button:hover {
|
||||
color: white;
|
||||
}
|
||||
|
||||
#frame div.page button:active {
|
||||
color: #dddddd;
|
||||
border-bottom: 4px solid #0c536a;
|
||||
box-shadow: none;
|
||||
box-shadow: inset 1px 1px 8px 0px #888888;
|
||||
}
|
||||
|
||||
#frame div.page input[type="text"] {
|
||||
display: block;
|
||||
width: 180px;
|
||||
box-sizing: border-box;
|
||||
margin-top: 2px;
|
||||
margin-left: 8px;
|
||||
}
|
||||
|
||||
#frame div.page input[type="checkbox"] {
|
||||
position: absolute;
|
||||
right: 0px;
|
||||
}
|
||||
|
||||
#frame div.page input[type="range"] {
|
||||
display: block;
|
||||
width: 180px;
|
||||
margin-left: 8px;
|
||||
}
|
||||
|
||||
#frame dt, #frame dd {
|
||||
font-size: 12px;
|
||||
}
|
||||
|
||||
#frame dt {
|
||||
clear: both;
|
||||
float: left;
|
||||
padding-left: 16px;
|
||||
}
|
||||
|
||||
#frame dd {
|
||||
margin-left: 40px;
|
||||
}
|
||||
|
||||
#frame p {
|
||||
font-size: 12px;
|
||||
line-height: 18px;
|
||||
}
|
||||
|
||||
div.station-info {
|
||||
position: absolute;
|
||||
border: 1px solid white;
|
||||
background-color: #222222;
|
||||
color: white;
|
||||
padding: 4px;
|
||||
z-index: 200;
|
||||
}
|
||||
|
||||
|
||||
.overlay-branding {
|
||||
color: white;
|
||||
margin: 4px;
|
||||
position: absolute;
|
||||
right: 0;
|
||||
top: 0;
|
||||
}
|
||||
div#scene {
|
||||
width: 100%;
|
||||
height: 90%; }
|
||||
|
||||
</style>
|
||||
|
||||
<script type="text/javascript" src="/CaveView/js/CaveView.js" ></script>
|
||||
<script type="text/javascript" src="/CaveView/lib/proj4.js" ></script>
|
||||
|
||||
|
||||
<script type="text/javascript" >
|
||||
|
||||
function onLoad () {
|
||||
|
||||
// display the user interface - and a blank canvas
|
||||
// the configuration object specifies the location of CaveView, surveys and terrain files
|
||||
CV.UI.init( 'scene', {
|
||||
home: '/javascript/CaveView/',
|
||||
surveyDirectory: '/cave/3d/',
|
||||
terrainDirectory: '/loser/surface/terrain/'
|
||||
} );
|
||||
|
||||
// load a single survey to display
|
||||
CV.UI.loadCave( '{% if cave.kataster_number %}{{ cave.kataster_number }}{% else %}{{ cave.unofficial_number }}{% endif %}.3d' );
|
||||
}
|
||||
window.onload = onLoad;
|
||||
</script>
|
||||
{% endif %}
|
||||
{% endblock %}
|
||||
|
||||
|
||||
{% load wiki_markup %}
|
||||
{% block content %}
|
||||
{% block contentheader %}
|
||||
@@ -131,7 +564,8 @@
|
||||
{% endif %}
|
||||
{% if cave.survex_file %}
|
||||
<h2>Survex File</h2>
|
||||
{{ cave.survex_file|safe }}
|
||||
{{ cave.survex_file|safe }} <a href="{% if cave.kataster_number %}{% url "cave3d" cave.kataster_number %}{% else %}{% url "cave3d" cave.unofficial_number %}{% endif %}">3d file</a>
|
||||
<div id='scene'></div>
|
||||
{% endif %}
|
||||
{% if cave.notes %}
|
||||
<h2>Notes</h2>
|
||||
|
||||
@@ -7,6 +7,7 @@
|
||||
|
||||
<h1>Cave Index</h1>
|
||||
|
||||
|
||||
<h3>Notable caves</h3>
|
||||
<ul>
|
||||
{% for cave in notablecaves %}
|
||||
@@ -16,13 +17,13 @@
|
||||
|
||||
<h3>1623</h3>
|
||||
|
||||
<ul class="searchable">
|
||||
<table class="searchable">
|
||||
{% for cave in caves1623 %}
|
||||
|
||||
<li> <a href="{{ cave.url }}">{% if cave.kataster_number %}{{ cave.kataster_number }}{% else %}{{cave.unofficial_number }}{%endif %} {{cave.official_name|safe}}</a> </li>
|
||||
<tr><td> <a href="{{ cave.url }}">{% if cave.kataster_number %}{{ cave.kataster_number }}{% else %}{{cave.unofficial_number }}{%endif %} {{cave.official_name|safe}}</a> {{ cave.slug }}</td></tr>
|
||||
|
||||
{% endfor %}
|
||||
</ul>
|
||||
</table>
|
||||
|
||||
<h3>1626</h3>
|
||||
|
||||
|
||||
32
templates/cavemillenial.html
Normal file
32
templates/cavemillenial.html
Normal file
@@ -0,0 +1,32 @@
|
||||
{% extends "cavebase.html" %}
|
||||
{% load wiki_markup %}
|
||||
|
||||
{% block title %}Cave Index{% endblock %}
|
||||
|
||||
{% block content %}
|
||||
|
||||
<h1>Cave Index</h1>
|
||||
|
||||
<h3>1623</h3>
|
||||
|
||||
<table class="searchable">
|
||||
{% for cave in caves1623 %}
|
||||
|
||||
<tr><td> <a href="{{ cave.url }}">{% if cave.kataster_number %}{{ cave.kataster_number }}{% else %}{{cave.unofficial_number }}{%endif %} {{cave.official_name|safe}}</a> </td></tr>
|
||||
|
||||
{% endfor %}
|
||||
</table>
|
||||
|
||||
<h3>1626</h3>
|
||||
|
||||
<ul class="searchable">
|
||||
{% for cave in caves1626 %}
|
||||
|
||||
<li> <a href="{{ cave.url }}">{% if cave.kataster_number %}{{ cave.kataster_number }}{% else %}{{cave.unofficial_number }}{%endif %} {{cave.official_name|safe}}</a> </li>
|
||||
|
||||
{% endfor %}
|
||||
</ul>
|
||||
|
||||
<a href="{% url "newcave" %}">New Cave</a>
|
||||
|
||||
{% endblock %}
|
||||
14
templates/core/expedition_list.html
Normal file
14
templates/core/expedition_list.html
Normal file
@@ -0,0 +1,14 @@
|
||||
{% extends "base.html" %}
|
||||
{% load wiki_markup %}
|
||||
{% load link %}
|
||||
|
||||
{% block content %}
|
||||
<h1>Expeditions</h1>
|
||||
<ul>
|
||||
{% for expedition in object_list %}
|
||||
<li>{{ expedition.year }} - <a href="{{ expedition.get_absolute_url }}">{{ expedition.name }}</a></li>
|
||||
{% empty %}
|
||||
<li>No articles yet.</li>
|
||||
{% endfor %}
|
||||
</ul>
|
||||
{% endblock %}
|
||||
@@ -3,5 +3,13 @@
|
||||
{% block bodyattrs %}{% if homepage %} id="homepage"{% endif %}{% endblock %}
|
||||
{% block body %}
|
||||
{{ body|safe }}
|
||||
{% if homepage %}{% if editable %}<a href="{% url "editflatpage" path %}">Edit</a>{% endif %}{%else %}{% include "menu.html" %}{% endif %}
|
||||
{% if homepage %}
|
||||
{% if editable %}
|
||||
<a href="{% url "editflatpage" path %}">Edit</a>
|
||||
{% endif %}
|
||||
{% else %}
|
||||
{% if not has_menu %}
|
||||
{% include "menu.html" %}
|
||||
{% endif %}
|
||||
{% endif %}
|
||||
{% endblock %}
|
||||
|
||||
@@ -42,7 +42,7 @@ This is Troggle, the information portal for Cambridge University Caving Club's E
|
||||
</p>
|
||||
|
||||
<p class="indent">
|
||||
Here you will find information about the {{expedition.objects.count}} expeditions the club has undertaken since 1976. Browse survey information, photos, and description wikis for {{cave.objects.count}} caves, {{subcave.objects.count}} areas within those caves, and {{extantqms.count}} going leads yet to be explored. We have {{photo.objects.count}} photos and {{logbookentry.objects.count}} logbook entries.
|
||||
Here you will find information about the {{expedition.objects.count}} expeditions the club has undertaken since 1976. Browse survey information, photos, and description wikis for {{Cave.objects.count}} caves, {{subcave.objects.count}} areas within those caves, and {{extantqms.count}} going leads yet to be explored. We have {{Photo.objects.count}} photos and {{Logbookentry.objects.count}} logbook entries.
|
||||
</p>
|
||||
|
||||
<p class="indent">
|
||||
|
||||
69
templates/millenialcaves.html
Normal file
69
templates/millenialcaves.html
Normal file
@@ -0,0 +1,69 @@
|
||||
<html>
|
||||
<head>
|
||||
|
||||
|
||||
<link rel="stylesheet" type="text/css" href="{{ settings.MEDIA_URL }}/css/cavetables.css">
|
||||
|
||||
</head>
|
||||
|
||||
<body>
|
||||
|
||||
<h2>Caves of loserplateau (locations acording to all.3d)</h2>
|
||||
<span style="font-size:70%">
|
||||
Name contains:<br>
|
||||
<div id="inputf"><input type="text" name="CaveName" id="CaveName" style="width:100%"></div><br>
|
||||
Depth between (min, max) in meters (0 disables filter):<br>
|
||||
<div id="inputf"><input type="number" name="CaveDepthMin" id="CaveDepthMin" style="width:45%"> - <input type="number" name="CaveDepthMax" id="CaveDepthMax" style="width:45%"></div><br>
|
||||
Length between (min, max) in meters (0 disables filter):<br>
|
||||
<div id="inputf"><input type="number" name="CaveLengthMin" id="CaveLengthMin" style="width:45%"> - <input type="number" name="CaveLengthMax" id="CaveLengthMax" style="width:45%"></div><br>
|
||||
Last visit after (date in YYYY.MM.DD format works best):<br>
|
||||
<div id="inputf"><input type="text" name="VisitDate" id="VisitDate" style="width:100%"></div><br>
|
||||
Last visited by (single word or regular expression, search is not case sensitive):<br>
|
||||
(e.g. <span id="mono">/da.e/</span> matches both Dave and Dane, <span id="mono">/w..k|ol{2}y/</span> matches either Wook and Olly)<br>
|
||||
<div id="inputf"><input type="text" name="Visitor" id="Visitor" style="width:100%"></div><br>
|
||||
Hide incomplete entries:<br>
|
||||
<div id="inputf"><input type="checkbox" name="Incomplete" id="Incomplete" style="width:100%"></div><br><br>
|
||||
|
||||
<button onclick="filterTable('caves_table')">Filter</button><br>
|
||||
<button onclick="filterTableReset('caves_table')">Reset filters</button><br>
|
||||
|
||||
Click on column headers to sort/reverse sort<br><br><br>
|
||||
</span>
|
||||
|
||||
|
||||
|
||||
|
||||
<table id="caves_table">
|
||||
<tr>
|
||||
<th onclick="sortTable(0,'caves_table',0)">Cave survex id</th>
|
||||
<th onclick="sortTable(1,'caves_table',0)">Cave name</th>
|
||||
<th onclick="sortTable(2,'caves_table',1)">Cave depth</th>
|
||||
<th onclick="sortTable(3,'caves_table',1)">Cave length</th>
|
||||
<th onclick="sortTable(4,'caves_table',0)">Last leg date</th>
|
||||
<th onclick="sortTable(5,'caves_table',0)">Cave location (UTM)</th>
|
||||
<th onclick="sortTable(6,'caves_table',0)">Cave location (lat/lon)</th>
|
||||
<th onclick="sortTable(7,'caves_table',1)">Top camp distance [m]</th>
|
||||
</tr>
|
||||
|
||||
{% for cave in caves %}
|
||||
<tr>
|
||||
<td><a href={{cave.description}}>{{ cave.name }}</a></td>
|
||||
<td>{{ cave.title }}</td>
|
||||
<td>{{ cave.total_depth }}</td>
|
||||
<td>{{ cave.total_length }}</td>
|
||||
<td>{{ cave.date }}</td>
|
||||
<td>33U {{ cave.entrance }}</td>
|
||||
<td>{{ cave.lat_lon_entrance }}</td>
|
||||
<td>{{ cave.top_camp_distance}}</td>
|
||||
</tr>
|
||||
{% endfor %}
|
||||
|
||||
</table>
|
||||
|
||||
<script type="text/javascript" src="{{ settings.MEDIA_URL }}/scripts/TableSort.js"></script>
|
||||
|
||||
</body>
|
||||
|
||||
|
||||
</html>
|
||||
|
||||
18
urls.py
18
urls.py
@@ -15,12 +15,23 @@ admin.autodiscover()
|
||||
|
||||
# type url probably means it's used.
|
||||
|
||||
# HOW DOES THIS WORK:
|
||||
# url( <regular expression that matches the thing in the web browser>,
|
||||
# <reference to python function in 'core' folder>,
|
||||
# <name optional argument for URL reversing (doesn't do much)>)
|
||||
|
||||
actualurlpatterns = patterns('',
|
||||
|
||||
|
||||
url(r'^millenialcaves/?$', views_caves.millenialcaves, name="millenialcaves"),
|
||||
url(r'^millenialpeople/?$', views_caves.millenialpeople, name="millenialpeople"),
|
||||
url(r'^cave/descriptionM/([^/]+)/?$', views_caves.millenialdescription),
|
||||
#url(r'^cave/description/([^/]+)/?$', views_caves.caveDescription),
|
||||
|
||||
url(r'^troggle$', views_other.frontpage, name="frontpage"),
|
||||
url(r'^todo/$', views_other.todo, name="todo"),
|
||||
|
||||
url(r'^caves/?$', views_caves.caveindex, name="caveindex"),
|
||||
url(r'^caves/?$', views_caves.caveindex, name="caveindex"),
|
||||
url(r'^people/?$', views_logbooks.personindex, name="personindex"),
|
||||
|
||||
url(r'^newqmnumber/?$', views_other.ajax_QM_number, ),
|
||||
@@ -30,7 +41,7 @@ actualurlpatterns = patterns('',
|
||||
#url(r'^person/(\w+_\w+)$', views_logbooks.person, name="person"),
|
||||
|
||||
url(r'^expedition/(\d+)$', views_logbooks.expedition, name="expedition"),
|
||||
url(r'^expeditions/?$', ListView, {'queryset':Expedition.objects.all(),'template_name':'object_list.html'},name="expeditions"),
|
||||
url(r'^expeditions/?$', views_logbooks.ExpeditionListView.as_view(), name="expeditions"),
|
||||
url(r'^personexpedition/(?P<first_name>[A-Z]*[a-z]*)[^a-zA-Z]*(?P<last_name>[A-Z]*[a-z]*)/(?P<year>\d+)/?$', views_logbooks.personexpedition, name="personexpedition"),
|
||||
url(r'^logbookentry/(?P<date>.*)/(?P<slug>.*)/?$', views_logbooks.logbookentry,name="logbookentry"),
|
||||
url(r'^newlogbookentry/(?P<expeditionyear>.*)$', views_logbooks.newLogbookEntry, name="newLogBookEntry"),
|
||||
@@ -56,6 +67,7 @@ actualurlpatterns = patterns('',
|
||||
#url(r'^cavedescription/(?P<cavedescription_name>[^/]+)/?$', views_caves.cave_description, name="cavedescription"),
|
||||
#url(r'^cavedescription/?$', object_list, {'queryset':CaveDescription.objects.all(),'template_name':'object_list.html'}, name="cavedescriptions"),
|
||||
#url(r'^cavehref/(.+)$', views_caves.cave, name="cave"),url(r'cave'),
|
||||
url(r'^cave/3d/(?P<cave_id>[^/]+).3d$', views_caves.cave3d, name="cave3d"),
|
||||
|
||||
# url(r'^jgtfile/(.*)$', view_surveys.jgtfile, name="jgtfile"),
|
||||
# url(r'^jgtuploadfile$', view_surveys.jgtuploadfile, name="jgtuploadfile"),
|
||||
@@ -128,7 +140,7 @@ actualurlpatterns = patterns('',
|
||||
#(r'^survey_scans/(?P<path>.*)$', 'django.views.static.serve', {'document_root': settings.SURVEY_SCANS, 'show_indexes':True}),
|
||||
url(r'^survey_scans/$', view_surveys.surveyscansfolders, name="surveyscansfolders"),
|
||||
url(r'^survey_scans/(?P<path>[^/]+)/$', view_surveys.surveyscansfolder, name="surveyscansfolder"),
|
||||
url(r'^survey_scans/(?P<path>[^/]+)/(?P<file>[^/]+(?:png|jpg))$',
|
||||
url(r'^survey_scans/(?P<path>[^/]+)/(?P<file>[^/]+(?:png|jpg|jpeg))$',
|
||||
view_surveys.surveyscansingle, name="surveyscansingle"),
|
||||
|
||||
url(r'^tunneldata/$', view_surveys.tunneldata, name="tunneldata"),
|
||||
|
||||
Reference in New Issue
Block a user