mirror of
https://expo.survex.com/repositories/troggle/.git
synced 2025-12-14 05:55:06 +00:00
Compare commits
9 Commits
old-master
...
django-1.1
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
43dfe946b6 | ||
|
|
656ddcfe93 | ||
|
|
505bc48331 | ||
|
|
92b273e45f | ||
|
|
978270b152 | ||
|
|
291e3baabf | ||
|
|
eb5406f325 | ||
|
|
de22b071b0 | ||
|
|
08a41941f9 |
@@ -7,7 +7,7 @@ Troggle setup
|
||||
|
||||
Python, Django, and Database setup
|
||||
-----------------------------------
|
||||
Troggle requires Django 1.4 or greater, and any version of Python that works with it.
|
||||
Troggle requires Django 1.10, and Python 2.7.
|
||||
Install Django with the following command:
|
||||
|
||||
apt-get install python-django (on debian/ubuntu)
|
||||
@@ -20,10 +20,14 @@ Troggle itself
|
||||
-------------
|
||||
Choose a directory where you will keep troggle, and git clone Troggle into it using the following command:
|
||||
|
||||
git clone git://expo.survex.com/troggle
|
||||
git clone git://expo.survex.com/~/troggle
|
||||
or more reliably
|
||||
git clone ssh://expo@expo.survex.com/home/expo/troggle
|
||||
|
||||
Running in development
|
||||
----------------------
|
||||
The simplest way to run Troggle in development is through the docker-compose setup
|
||||
See the docker folder in the repo for details
|
||||
|
||||
If you want to work on the source code and be able to commit, your account will need to be added to the troggle project members list. Contact wookey at wookware dot org to get this set up.
|
||||
|
||||
|
||||
@@ -9,12 +9,12 @@ from troggle.core.views_other import downloadLogbook
|
||||
|
||||
|
||||
class TroggleModelAdmin(admin.ModelAdmin):
|
||||
|
||||
|
||||
def save_model(self, request, obj, form, change):
|
||||
"""overriding admin save to fill the new_since parsing_field"""
|
||||
obj.new_since_parsing=True
|
||||
obj.save()
|
||||
|
||||
|
||||
class Media:
|
||||
js = ('jquery/jquery.min.js','js/QM_helper.js')
|
||||
|
||||
@@ -28,6 +28,10 @@ class SurvexBlockAdmin(TroggleModelAdmin):
|
||||
inlines = (RoleInline,)
|
||||
|
||||
|
||||
class SurvexStationAdmin(TroggleModelAdmin):
|
||||
search_fields = ('name', 'block__name')
|
||||
|
||||
|
||||
class ScannedImageInline(admin.TabularInline):
|
||||
model = ScannedImage
|
||||
extra = 4
|
||||
@@ -40,7 +44,7 @@ class OtherCaveInline(admin.TabularInline):
|
||||
|
||||
class SurveyAdmin(TroggleModelAdmin):
|
||||
inlines = (ScannedImageInline,)
|
||||
search_fields = ('expedition__year','wallet_number')
|
||||
search_fields = ('expedition__year','wallet_number')
|
||||
|
||||
|
||||
class QMsFoundInline(admin.TabularInline):
|
||||
@@ -48,7 +52,7 @@ class QMsFoundInline(admin.TabularInline):
|
||||
fk_name='found_by'
|
||||
fields=('number','grade','location_description','comment')#need to add foreignkey to cave part
|
||||
extra=1
|
||||
|
||||
|
||||
|
||||
class PhotoInline(admin.TabularInline):
|
||||
model = DPhoto
|
||||
@@ -64,7 +68,7 @@ class PersonTripInline(admin.TabularInline):
|
||||
|
||||
#class LogbookEntryAdmin(VersionAdmin):
|
||||
class LogbookEntryAdmin(TroggleModelAdmin):
|
||||
prepopulated_fields = {'slug':("title",)}
|
||||
prepopulated_fields = {'slug':("title",)}
|
||||
search_fields = ('title','expedition__year')
|
||||
date_heirarchy = ('date')
|
||||
inlines = (PersonTripInline, PhotoInline, QMsFoundInline)
|
||||
@@ -73,11 +77,11 @@ class LogbookEntryAdmin(TroggleModelAdmin):
|
||||
"all": ("css/troggleadmin.css",)
|
||||
}
|
||||
actions=('export_logbook_entries_as_html','export_logbook_entries_as_txt')
|
||||
|
||||
|
||||
def export_logbook_entries_as_html(self, modeladmin, request, queryset):
|
||||
response=downloadLogbook(request=request, queryset=queryset, extension='html')
|
||||
return response
|
||||
|
||||
|
||||
def export_logbook_entries_as_txt(self, modeladmin, request, queryset):
|
||||
response=downloadLogbook(request=request, queryset=queryset, extension='txt')
|
||||
return response
|
||||
@@ -95,11 +99,11 @@ class PersonAdmin(TroggleModelAdmin):
|
||||
|
||||
class QMAdmin(TroggleModelAdmin):
|
||||
search_fields = ('found_by__cave__kataster_number','number','found_by__date')
|
||||
list_display = ('__unicode__','grade','found_by','ticked_off_by')
|
||||
list_display = ('__unicode__','grade','found_by','ticked_off_by','nearest_station')
|
||||
list_display_links = ('__unicode__',)
|
||||
list_editable = ('found_by','ticked_off_by','grade')
|
||||
list_editable = ('found_by','ticked_off_by','grade','nearest_station')
|
||||
list_per_page = 20
|
||||
raw_id_fields=('found_by','ticked_off_by')
|
||||
raw_id_fields=('found_by','ticked_off_by','nearest_station')
|
||||
|
||||
|
||||
class PersonExpeditionAdmin(TroggleModelAdmin):
|
||||
@@ -118,24 +122,27 @@ class EntranceAdmin(TroggleModelAdmin):
|
||||
|
||||
admin.site.register(DPhoto)
|
||||
admin.site.register(Cave, CaveAdmin)
|
||||
admin.site.register(CaveSlug)
|
||||
admin.site.register(Area)
|
||||
#admin.site.register(OtherCaveName)
|
||||
admin.site.register(CaveAndEntrance)
|
||||
admin.site.register(NewSubCave)
|
||||
admin.site.register(CaveDescription)
|
||||
admin.site.register(Entrance, EntranceAdmin)
|
||||
admin.site.register(SurvexBlock, SurvexBlockAdmin)
|
||||
admin.site.register(Expedition)
|
||||
admin.site.register(Person,PersonAdmin)
|
||||
admin.site.register(SurvexPersonRole)
|
||||
admin.site.register(PersonExpedition,PersonExpeditionAdmin)
|
||||
admin.site.register(LogbookEntry, LogbookEntryAdmin)
|
||||
#admin.site.register(PersonTrip)
|
||||
admin.site.register(QM, QMAdmin)
|
||||
admin.site.register(Survey, SurveyAdmin)
|
||||
admin.site.register(ScannedImage)
|
||||
admin.site.register(SurvexStation)
|
||||
|
||||
admin.site.register(SurvexDirectory)
|
||||
admin.site.register(SurvexFile)
|
||||
admin.site.register(SurvexStation, SurvexStationAdmin)
|
||||
admin.site.register(SurvexBlock)
|
||||
admin.site.register(SurvexPersonRole)
|
||||
admin.site.register(SurvexScansFolder)
|
||||
admin.site.register(SurvexScanSingle)
|
||||
|
||||
|
||||
@@ -15,7 +15,7 @@ def listdir(*path):
|
||||
for p in os.listdir(root):
|
||||
if os.path.isdir(os.path.join(root, p)):
|
||||
l += p + "/\n"
|
||||
|
||||
|
||||
elif os.path.isfile(os.path.join(root, p)):
|
||||
l += p + "\n"
|
||||
#Ignore non-files and non-directories
|
||||
@@ -28,7 +28,7 @@ def listdir(*path):
|
||||
c = c.replace("#", "%23")
|
||||
print("FILE: ", settings.FILES + "listdir/" + c)
|
||||
return urllib.urlopen(settings.FILES + "listdir/" + c).read()
|
||||
|
||||
|
||||
def dirsAsList(*path):
|
||||
return [d for d in listdir(*path).split("\n") if len(d) > 0 and d[-1] == "/"]
|
||||
|
||||
|
||||
@@ -16,7 +16,7 @@ class CaveForm(ModelForm):
|
||||
underground_centre_line = forms.CharField(required = False, widget=forms.Textarea())
|
||||
notes = forms.CharField(required = False, widget=forms.Textarea())
|
||||
references = forms.CharField(required = False, widget=forms.Textarea())
|
||||
url = forms.CharField(required = True)
|
||||
url = forms.CharField(required = True)
|
||||
class Meta:
|
||||
model = Cave
|
||||
exclude = ("filename",)
|
||||
@@ -24,9 +24,9 @@ class CaveForm(ModelForm):
|
||||
|
||||
def clean(self):
|
||||
if self.cleaned_data.get("kataster_number") == "" and self.cleaned_data.get("unofficial_number") == "":
|
||||
self._errors["unofficial_number"] = self.error_class(["Either the kataster or unoffical number is required."])
|
||||
self._errors["unofficial_number"] = self.error_class(["Either the kataster or unoffical number is required."])
|
||||
if self.cleaned_data.get("kataster_number") != "" and self.cleaned_data.get("official_name") == "":
|
||||
self._errors["official_name"] = self.error_class(["This field is required when there is a kataster number."])
|
||||
self._errors["official_name"] = self.error_class(["This field is required when there is a kataster number."])
|
||||
if self.cleaned_data.get("area") == []:
|
||||
self._errors["area"] = self.error_class(["This field is required."])
|
||||
if self.cleaned_data.get("url") and self.cleaned_data.get("url").startswith("/"):
|
||||
@@ -82,11 +82,11 @@ class EntranceLetterForm(ModelForm):
|
||||
# This function returns html-formatted paragraphs for each of the
|
||||
# wikilink types that are related to this logbookentry. Each paragraph
|
||||
# contains a list of all of the related wikilinks.
|
||||
#
|
||||
#
|
||||
# Perhaps an admin javascript solution would be better.
|
||||
# """
|
||||
# res = ["Please use the following wikilinks, which are related to this logbook entry:"]
|
||||
#
|
||||
#
|
||||
# res.append(r'</p><p style="float: left;"><b>QMs found:</b>')
|
||||
# for QM in LogbookEntry.instance.QMs_found.all():
|
||||
# res.append(QM.wiki_link())
|
||||
@@ -94,12 +94,12 @@ class EntranceLetterForm(ModelForm):
|
||||
# res.append(r'</p><p style="float: left;"><b>QMs ticked off:</b>')
|
||||
# for QM in LogbookEntry.instance.QMs_ticked_off.all():
|
||||
# res.append(QM.wiki_link())
|
||||
|
||||
|
||||
# res.append(r'</p><p style="float: left; "><b>People</b>')
|
||||
# for persontrip in LogbookEntry.instance.persontrip_set.all():
|
||||
# res.append(persontrip.wiki_link())
|
||||
# res.append(r'</p>')
|
||||
|
||||
|
||||
# return string.join(res, r'<br />')
|
||||
|
||||
# def __init__(self, *args, **kwargs):
|
||||
@@ -107,7 +107,7 @@ class EntranceLetterForm(ModelForm):
|
||||
# self.fields['text'].help_text=self.wikiLinkHints()#
|
||||
|
||||
#class CaveForm(forms.Form):
|
||||
# html = forms.CharField(widget=TinyMCE(attrs={'cols': 80, 'rows': 30}))
|
||||
# html = forms.CharField(widget=TinyMCE(attrs={'cols': 80, 'rows': 30}))
|
||||
|
||||
def getTripForm(expedition):
|
||||
|
||||
@@ -118,18 +118,18 @@ def getTripForm(expedition):
|
||||
caves.sort()
|
||||
caves = ["-----"] + caves
|
||||
cave = forms.ChoiceField([(c, c) for c in caves], required=False)
|
||||
location = forms.CharField(max_length=200, required=False)
|
||||
location = forms.CharField(max_length=200, required=False)
|
||||
caveOrLocation = forms.ChoiceField([("cave", "Cave"), ("location", "Location")], widget = forms.widgets.RadioSelect())
|
||||
html = forms.CharField(widget=TinyMCE(attrs={'cols': 80, 'rows': 30}))
|
||||
html = forms.CharField(widget=TinyMCE(attrs={'cols': 80, 'rows': 30}))
|
||||
|
||||
def clean(self):
|
||||
print(dir(self))
|
||||
if self.cleaned_data.get("caveOrLocation") == "cave" and not self.cleaned_data.get("cave"):
|
||||
self._errors["cave"] = self.error_class(["This field is required"])
|
||||
self._errors["cave"] = self.error_class(["This field is required"])
|
||||
if self.cleaned_data.get("caveOrLocation") == "location" and not self.cleaned_data.get("location"):
|
||||
self._errors["location"] = self.error_class(["This field is required"])
|
||||
self._errors["location"] = self.error_class(["This field is required"])
|
||||
return self.cleaned_data
|
||||
|
||||
|
||||
class PersonTripForm(forms.Form):
|
||||
names = [get_name(pe) for pe in PersonExpedition.objects.filter(expedition = expedition)]
|
||||
names.sort()
|
||||
@@ -141,7 +141,7 @@ def getTripForm(expedition):
|
||||
PersonTripFormSet = formset_factory(PersonTripForm, extra=1)
|
||||
|
||||
return PersonTripFormSet, TripForm
|
||||
|
||||
|
||||
def get_name(pe):
|
||||
if pe.nickname:
|
||||
return pe.nickname
|
||||
@@ -162,18 +162,18 @@ def get_name(pe):
|
||||
# caves = ["-----"] + caves
|
||||
# cave = forms.ChoiceField([(c, c) for c in caves], required=False)
|
||||
|
||||
# entrance = forms.ChoiceField([("-----", "Please select a cave"), ], required=False)
|
||||
# qm = forms.ChoiceField([("-----", "Please select a cave"), ], required=False)
|
||||
# entrance = forms.ChoiceField([("-----", "Please select a cave"), ], required=False)
|
||||
# qm = forms.ChoiceField([("-----", "Please select a cave"), ], required=False)
|
||||
|
||||
# expeditions = [e.year for e in Expedition.objects.all()]
|
||||
# expeditions.sort()
|
||||
# expeditions = ["-----"] + expeditions
|
||||
# expedition = forms.ChoiceField([(e, e) for e in expeditions], required=False)
|
||||
# expedition = forms.ChoiceField([(e, e) for e in expeditions], required=False)
|
||||
|
||||
# logbookentry = forms.ChoiceField([("-----", "Please select an expedition"), ], required=False)
|
||||
# logbookentry = forms.ChoiceField([("-----", "Please select an expedition"), ], required=False)
|
||||
|
||||
# person = forms.ChoiceField([("-----", "Please select an expedition"), ], required=False)
|
||||
|
||||
# person = forms.ChoiceField([("-----", "Please select an expedition"), ], required=False)
|
||||
|
||||
# survey_point = forms.CharField()
|
||||
|
||||
|
||||
|
||||
@@ -1,21 +1,21 @@
|
||||
from imagekit.specs import ImageSpec
|
||||
from imagekit import processors
|
||||
|
||||
class ResizeThumb(processors.Resize):
|
||||
width = 100
|
||||
class ResizeThumb(processors.Resize):
|
||||
width = 100
|
||||
crop = False
|
||||
|
||||
class ResizeDisplay(processors.Resize):
|
||||
width = 600
|
||||
|
||||
#class EnhanceThumb(processors.Adjustment):
|
||||
width = 600
|
||||
|
||||
#class EnhanceThumb(processors.Adjustment):
|
||||
#contrast = 1.2
|
||||
#sharpness = 2
|
||||
|
||||
class Thumbnail(ImageSpec):
|
||||
access_as = 'thumbnail_image'
|
||||
pre_cache = True
|
||||
processors = [ResizeThumb]
|
||||
class Thumbnail(ImageSpec):
|
||||
access_as = 'thumbnail_image'
|
||||
pre_cache = True
|
||||
processors = [ResizeThumb]
|
||||
|
||||
class Display(ImageSpec):
|
||||
increment_count = True
|
||||
|
||||
575
core/migrations/0001_initial.py
Normal file
575
core/migrations/0001_initial.py
Normal file
@@ -0,0 +1,575 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
# Generated by Django 1.10.8 on 2020-02-18 16:01
|
||||
from __future__ import unicode_literals
|
||||
|
||||
from django.conf import settings
|
||||
import django.core.files.storage
|
||||
from django.db import migrations, models
|
||||
import django.db.models.deletion
|
||||
import troggle.core.models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
initial = True
|
||||
|
||||
dependencies = [
|
||||
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.CreateModel(
|
||||
name='Area',
|
||||
fields=[
|
||||
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
|
||||
('new_since_parsing', models.BooleanField(default=False, editable=False)),
|
||||
('non_public', models.BooleanField(default=False)),
|
||||
('short_name', models.CharField(max_length=100)),
|
||||
('name', models.CharField(blank=True, max_length=200, null=True)),
|
||||
('description', models.TextField(blank=True, null=True)),
|
||||
('parent', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='core.Area')),
|
||||
],
|
||||
options={
|
||||
'abstract': False,
|
||||
},
|
||||
),
|
||||
migrations.CreateModel(
|
||||
name='Cave',
|
||||
fields=[
|
||||
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
|
||||
('new_since_parsing', models.BooleanField(default=False, editable=False)),
|
||||
('non_public', models.BooleanField(default=False)),
|
||||
('official_name', models.CharField(max_length=160)),
|
||||
('kataster_code', models.CharField(blank=True, max_length=20, null=True)),
|
||||
('kataster_number', models.CharField(blank=True, max_length=10, null=True)),
|
||||
('unofficial_number', models.CharField(blank=True, max_length=60, null=True)),
|
||||
('explorers', models.TextField(blank=True, null=True)),
|
||||
('underground_description', models.TextField(blank=True, null=True)),
|
||||
('equipment', models.TextField(blank=True, null=True)),
|
||||
('references', models.TextField(blank=True, null=True)),
|
||||
('survey', models.TextField(blank=True, null=True)),
|
||||
('kataster_status', models.TextField(blank=True, null=True)),
|
||||
('underground_centre_line', models.TextField(blank=True, null=True)),
|
||||
('notes', models.TextField(blank=True, null=True)),
|
||||
('length', models.CharField(blank=True, max_length=100, null=True)),
|
||||
('depth', models.CharField(blank=True, max_length=100, null=True)),
|
||||
('extent', models.CharField(blank=True, max_length=100, null=True)),
|
||||
('survex_file', models.CharField(blank=True, max_length=100, null=True)),
|
||||
('description_file', models.CharField(blank=True, max_length=200, null=True)),
|
||||
('url', models.CharField(blank=True, max_length=200, null=True)),
|
||||
('filename', models.CharField(max_length=200)),
|
||||
('area', models.ManyToManyField(blank=True, to='core.Area')),
|
||||
],
|
||||
options={
|
||||
'ordering': ('kataster_code', 'unofficial_number'),
|
||||
},
|
||||
),
|
||||
migrations.CreateModel(
|
||||
name='CaveAndEntrance',
|
||||
fields=[
|
||||
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
|
||||
('entrance_letter', models.CharField(blank=True, max_length=20, null=True)),
|
||||
('cave', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='core.Cave')),
|
||||
],
|
||||
),
|
||||
migrations.CreateModel(
|
||||
name='CaveDescription',
|
||||
fields=[
|
||||
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
|
||||
('new_since_parsing', models.BooleanField(default=False, editable=False)),
|
||||
('non_public', models.BooleanField(default=False)),
|
||||
('short_name', models.CharField(max_length=50, unique=True)),
|
||||
('long_name', models.CharField(blank=True, max_length=200, null=True)),
|
||||
('description', models.TextField(blank=True, null=True)),
|
||||
],
|
||||
options={
|
||||
'abstract': False,
|
||||
},
|
||||
),
|
||||
migrations.CreateModel(
|
||||
name='CaveSlug',
|
||||
fields=[
|
||||
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
|
||||
('slug', models.SlugField(unique=True)),
|
||||
('primary', models.BooleanField(default=False)),
|
||||
('cave', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='core.Cave')),
|
||||
],
|
||||
),
|
||||
migrations.CreateModel(
|
||||
name='DataIssue',
|
||||
fields=[
|
||||
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
|
||||
('new_since_parsing', models.BooleanField(default=False, editable=False)),
|
||||
('non_public', models.BooleanField(default=False)),
|
||||
('date', models.DateTimeField(auto_now_add=True)),
|
||||
('parser', models.CharField(blank=True, max_length=50, null=True)),
|
||||
('message', models.CharField(blank=True, max_length=400, null=True)),
|
||||
],
|
||||
options={
|
||||
'ordering': ['date'],
|
||||
},
|
||||
),
|
||||
migrations.CreateModel(
|
||||
name='DPhoto',
|
||||
fields=[
|
||||
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
|
||||
('new_since_parsing', models.BooleanField(default=False, editable=False)),
|
||||
('caption', models.CharField(blank=True, max_length=1000, null=True)),
|
||||
('file', models.ImageField(storage=django.core.files.storage.FileSystemStorage(base_url=b'http://127.0.0.1:8000/photos/', location=b'/expo/expoweb/photos'), upload_to=b'.')),
|
||||
('is_mugshot', models.BooleanField(default=False)),
|
||||
('lon_utm', models.FloatField(blank=True, null=True)),
|
||||
('lat_utm', models.FloatField(blank=True, null=True)),
|
||||
('contains_cave', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='core.Cave')),
|
||||
],
|
||||
options={
|
||||
'abstract': False,
|
||||
},
|
||||
),
|
||||
migrations.CreateModel(
|
||||
name='Entrance',
|
||||
fields=[
|
||||
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
|
||||
('new_since_parsing', models.BooleanField(default=False, editable=False)),
|
||||
('non_public', models.BooleanField(default=False)),
|
||||
('name', models.CharField(blank=True, max_length=100, null=True)),
|
||||
('entrance_description', models.TextField(blank=True, null=True)),
|
||||
('explorers', models.TextField(blank=True, null=True)),
|
||||
('map_description', models.TextField(blank=True, null=True)),
|
||||
('location_description', models.TextField(blank=True, null=True)),
|
||||
('approach', models.TextField(blank=True, null=True)),
|
||||
('underground_description', models.TextField(blank=True, null=True)),
|
||||
('photo', models.TextField(blank=True, null=True)),
|
||||
('marking', models.CharField(choices=[(b'P', b'Paint'), (b'P?', b'Paint (?)'), (b'T', b'Tag'), (b'T?', b'Tag (?)'), (b'R', b'Needs Retag'), (b'S', b'Spit'), (b'S?', b'Spit (?)'), (b'U', b'Unmarked'), (b'?', b'Unknown')], max_length=2)),
|
||||
('marking_comment', models.TextField(blank=True, null=True)),
|
||||
('findability', models.CharField(blank=True, choices=[(b'?', b'To be confirmed ...'), (b'S', b'Coordinates'), (b'L', b'Lost'), (b'R', b'Refindable')], max_length=1, null=True)),
|
||||
('findability_description', models.TextField(blank=True, null=True)),
|
||||
('alt', models.TextField(blank=True, null=True)),
|
||||
('northing', models.TextField(blank=True, null=True)),
|
||||
('easting', models.TextField(blank=True, null=True)),
|
||||
('tag_station', models.TextField(blank=True, null=True)),
|
||||
('exact_station', models.TextField(blank=True, null=True)),
|
||||
('other_station', models.TextField(blank=True, null=True)),
|
||||
('other_description', models.TextField(blank=True, null=True)),
|
||||
('bearings', models.TextField(blank=True, null=True)),
|
||||
('url', models.CharField(blank=True, max_length=200, null=True)),
|
||||
('filename', models.CharField(max_length=200)),
|
||||
('cached_primary_slug', models.CharField(blank=True, max_length=200, null=True)),
|
||||
],
|
||||
options={
|
||||
'abstract': False,
|
||||
},
|
||||
),
|
||||
migrations.CreateModel(
|
||||
name='EntranceSlug',
|
||||
fields=[
|
||||
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
|
||||
('slug', models.SlugField(unique=True)),
|
||||
('primary', models.BooleanField(default=False)),
|
||||
('entrance', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='core.Entrance')),
|
||||
],
|
||||
),
|
||||
migrations.CreateModel(
|
||||
name='Expedition',
|
||||
fields=[
|
||||
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
|
||||
('new_since_parsing', models.BooleanField(default=False, editable=False)),
|
||||
('non_public', models.BooleanField(default=False)),
|
||||
('year', models.CharField(max_length=20, unique=True)),
|
||||
('name', models.CharField(max_length=100)),
|
||||
],
|
||||
options={
|
||||
'ordering': ('-year',),
|
||||
'get_latest_by': 'year',
|
||||
},
|
||||
),
|
||||
migrations.CreateModel(
|
||||
name='ExpeditionDay',
|
||||
fields=[
|
||||
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
|
||||
('new_since_parsing', models.BooleanField(default=False, editable=False)),
|
||||
('non_public', models.BooleanField(default=False)),
|
||||
('date', models.DateField()),
|
||||
('expedition', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='core.Expedition')),
|
||||
],
|
||||
options={
|
||||
'ordering': ('date',),
|
||||
},
|
||||
),
|
||||
migrations.CreateModel(
|
||||
name='LogbookEntry',
|
||||
fields=[
|
||||
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
|
||||
('new_since_parsing', models.BooleanField(default=False, editable=False)),
|
||||
('non_public', models.BooleanField(default=False)),
|
||||
('date', models.DateTimeField()),
|
||||
('title', models.CharField(max_length=200)),
|
||||
('cave_slug', models.SlugField()),
|
||||
('place', models.CharField(blank=True, help_text=b"Only use this if you haven't chosen a cave", max_length=100, null=True)),
|
||||
('text', models.TextField()),
|
||||
('slug', models.SlugField()),
|
||||
('filename', models.CharField(max_length=200, null=True)),
|
||||
('entry_type', models.CharField(choices=[(b'wiki', b'Wiki style logbook'), (b'html', b'Html style logbook')], default=b'wiki', max_length=50, null=True)),
|
||||
('expedition', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='core.Expedition')),
|
||||
('expeditionday', models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, to='core.ExpeditionDay')),
|
||||
],
|
||||
options={
|
||||
'ordering': ('-date',),
|
||||
'verbose_name_plural': 'Logbook Entries',
|
||||
},
|
||||
),
|
||||
migrations.CreateModel(
|
||||
name='NewSubCave',
|
||||
fields=[
|
||||
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
|
||||
('new_since_parsing', models.BooleanField(default=False, editable=False)),
|
||||
('non_public', models.BooleanField(default=False)),
|
||||
('name', models.CharField(max_length=200, unique=True)),
|
||||
],
|
||||
options={
|
||||
'abstract': False,
|
||||
},
|
||||
),
|
||||
migrations.CreateModel(
|
||||
name='OtherCaveName',
|
||||
fields=[
|
||||
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
|
||||
('new_since_parsing', models.BooleanField(default=False, editable=False)),
|
||||
('non_public', models.BooleanField(default=False)),
|
||||
('name', models.CharField(max_length=160)),
|
||||
('cave', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='core.Cave')),
|
||||
],
|
||||
options={
|
||||
'abstract': False,
|
||||
},
|
||||
),
|
||||
migrations.CreateModel(
|
||||
name='Person',
|
||||
fields=[
|
||||
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
|
||||
('new_since_parsing', models.BooleanField(default=False, editable=False)),
|
||||
('non_public', models.BooleanField(default=False)),
|
||||
('first_name', models.CharField(max_length=100)),
|
||||
('last_name', models.CharField(max_length=100)),
|
||||
('fullname', models.CharField(max_length=200)),
|
||||
('is_vfho', models.BooleanField(default=False, help_text=b'VFHO is the Vereines für Höhlenkunde in Obersteier, a nearby Austrian caving club.')),
|
||||
('mug_shot', models.CharField(blank=True, max_length=100, null=True)),
|
||||
('blurb', models.TextField(blank=True, null=True)),
|
||||
('orderref', models.CharField(max_length=200)),
|
||||
('user', models.OneToOneField(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
|
||||
],
|
||||
options={
|
||||
'ordering': ('orderref',),
|
||||
'verbose_name_plural': 'People',
|
||||
},
|
||||
),
|
||||
migrations.CreateModel(
|
||||
name='PersonExpedition',
|
||||
fields=[
|
||||
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
|
||||
('new_since_parsing', models.BooleanField(default=False, editable=False)),
|
||||
('non_public', models.BooleanField(default=False)),
|
||||
('slugfield', models.SlugField(blank=True, null=True)),
|
||||
('is_guest', models.BooleanField(default=False)),
|
||||
('expo_committee_position', models.CharField(blank=True, choices=[(b'leader', b'Expo leader'), (b'medical', b'Expo medical officer'), (b'treasurer', b'Expo treasurer'), (b'sponsorship', b'Expo sponsorship coordinator'), (b'research', b'Expo research coordinator')], max_length=200, null=True)),
|
||||
('nickname', models.CharField(blank=True, max_length=100, null=True)),
|
||||
('expedition', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='core.Expedition')),
|
||||
('person', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='core.Person')),
|
||||
],
|
||||
options={
|
||||
'ordering': ('-expedition',),
|
||||
},
|
||||
),
|
||||
migrations.CreateModel(
|
||||
name='PersonTrip',
|
||||
fields=[
|
||||
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
|
||||
('new_since_parsing', models.BooleanField(default=False, editable=False)),
|
||||
('non_public', models.BooleanField(default=False)),
|
||||
('time_underground', models.FloatField(help_text=b'In decimal hours')),
|
||||
('is_logbook_entry_author', models.BooleanField(default=False)),
|
||||
('logbook_entry', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='core.LogbookEntry')),
|
||||
('personexpedition', models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, to='core.PersonExpedition')),
|
||||
],
|
||||
options={
|
||||
'abstract': False,
|
||||
},
|
||||
),
|
||||
migrations.CreateModel(
|
||||
name='QM',
|
||||
fields=[
|
||||
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
|
||||
('new_since_parsing', models.BooleanField(default=False, editable=False)),
|
||||
('non_public', models.BooleanField(default=False)),
|
||||
('number', models.IntegerField(help_text=b'this is the sequential number in the year')),
|
||||
('grade', models.CharField(choices=[(b'A', b'A: Large obvious lead'), (b'B', b'B: Average lead'), (b'C', b'C: Tight unpromising lead'), (b'D', b'D: Dig'), (b'X', b'X: Unclimbable aven')], max_length=1)),
|
||||
('location_description', models.TextField(blank=True)),
|
||||
('nearest_station_description', models.CharField(blank=True, max_length=400, null=True)),
|
||||
('nearest_station_name', models.CharField(blank=True, max_length=200, null=True)),
|
||||
('area', models.CharField(blank=True, max_length=100, null=True)),
|
||||
('completion_description', models.TextField(blank=True, null=True)),
|
||||
('comment', models.TextField(blank=True, null=True)),
|
||||
('found_by', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='QMs_found', to='core.LogbookEntry')),
|
||||
],
|
||||
options={
|
||||
'abstract': False,
|
||||
},
|
||||
),
|
||||
migrations.CreateModel(
|
||||
name='ScannedImage',
|
||||
fields=[
|
||||
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
|
||||
('new_since_parsing', models.BooleanField(default=False, editable=False)),
|
||||
('file', models.ImageField(storage=django.core.files.storage.FileSystemStorage(base_url=b'/survey_scans/', location=b'/expo/expofiles/'), upload_to=troggle.core.models.get_scan_path)),
|
||||
('scanned_on', models.DateField(null=True)),
|
||||
('contents', models.CharField(choices=[(b'notes', b'notes'), (b'plan', b'plan_sketch'), (b'elevation', b'elevation_sketch')], max_length=20)),
|
||||
('number_in_wallet', models.IntegerField(null=True)),
|
||||
('lon_utm', models.FloatField(blank=True, null=True)),
|
||||
('lat_utm', models.FloatField(blank=True, null=True)),
|
||||
('scanned_by', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='core.Person')),
|
||||
],
|
||||
options={
|
||||
'abstract': False,
|
||||
},
|
||||
),
|
||||
migrations.CreateModel(
|
||||
name='SurvexBlock',
|
||||
fields=[
|
||||
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
|
||||
('name', models.CharField(max_length=100)),
|
||||
('text', models.TextField()),
|
||||
('date', models.DateTimeField(blank=True, null=True)),
|
||||
('begin_char', models.IntegerField()),
|
||||
('survexpath', models.CharField(max_length=200)),
|
||||
('totalleglength', models.FloatField()),
|
||||
('cave', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='core.Cave')),
|
||||
('expedition', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='core.Expedition')),
|
||||
('expeditionday', models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, to='core.ExpeditionDay')),
|
||||
('parent', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='core.SurvexBlock')),
|
||||
],
|
||||
options={
|
||||
'ordering': ('id',),
|
||||
},
|
||||
),
|
||||
migrations.CreateModel(
|
||||
name='SurvexDirectory',
|
||||
fields=[
|
||||
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
|
||||
('path', models.CharField(max_length=200)),
|
||||
('cave', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='core.Cave')),
|
||||
],
|
||||
options={
|
||||
'ordering': ('path',),
|
||||
},
|
||||
),
|
||||
migrations.CreateModel(
|
||||
name='SurvexEquate',
|
||||
fields=[
|
||||
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
|
||||
('cave', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='core.Cave')),
|
||||
],
|
||||
),
|
||||
migrations.CreateModel(
|
||||
name='SurvexFile',
|
||||
fields=[
|
||||
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
|
||||
('path', models.CharField(max_length=200)),
|
||||
('cave', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='core.Cave')),
|
||||
('survexdirectory', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='core.SurvexDirectory')),
|
||||
],
|
||||
options={
|
||||
'ordering': ('id',),
|
||||
},
|
||||
),
|
||||
migrations.CreateModel(
|
||||
name='SurvexLeg',
|
||||
fields=[
|
||||
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
|
||||
('tape', models.FloatField()),
|
||||
('compass', models.FloatField()),
|
||||
('clino', models.FloatField()),
|
||||
('block', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='core.SurvexBlock')),
|
||||
],
|
||||
),
|
||||
migrations.CreateModel(
|
||||
name='SurvexPersonRole',
|
||||
fields=[
|
||||
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
|
||||
('nrole', models.CharField(blank=True, choices=[(b'insts', b'Instruments'), (b'dog', b'Other'), (b'notes', b'Notes'), (b'pics', b'Pictures'), (b'tape', b'Tape measure'), (b'useless', b'Useless'), (b'helper', b'Helper'), (b'disto', b'Disto'), (b'consultant', b'Consultant')], max_length=200, null=True)),
|
||||
('personname', models.CharField(max_length=100)),
|
||||
('expeditionday', models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, to='core.ExpeditionDay')),
|
||||
('person', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='core.Person')),
|
||||
('personexpedition', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='core.PersonExpedition')),
|
||||
('persontrip', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='core.PersonTrip')),
|
||||
('survexblock', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='core.SurvexBlock')),
|
||||
],
|
||||
),
|
||||
migrations.CreateModel(
|
||||
name='SurvexScansFolder',
|
||||
fields=[
|
||||
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
|
||||
('fpath', models.CharField(max_length=200)),
|
||||
('walletname', models.CharField(max_length=200)),
|
||||
],
|
||||
options={
|
||||
'ordering': ('walletname',),
|
||||
},
|
||||
),
|
||||
migrations.CreateModel(
|
||||
name='SurvexScanSingle',
|
||||
fields=[
|
||||
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
|
||||
('ffile', models.CharField(max_length=200)),
|
||||
('name', models.CharField(max_length=200)),
|
||||
('survexscansfolder', models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, to='core.SurvexScansFolder')),
|
||||
],
|
||||
options={
|
||||
'ordering': ('name',),
|
||||
},
|
||||
),
|
||||
migrations.CreateModel(
|
||||
name='SurvexStation',
|
||||
fields=[
|
||||
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
|
||||
('name', models.CharField(max_length=100)),
|
||||
('x', models.FloatField(blank=True, null=True)),
|
||||
('y', models.FloatField(blank=True, null=True)),
|
||||
('z', models.FloatField(blank=True, null=True)),
|
||||
('block', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='core.SurvexBlock')),
|
||||
('equate', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='core.SurvexEquate')),
|
||||
],
|
||||
),
|
||||
migrations.CreateModel(
|
||||
name='SurvexTitle',
|
||||
fields=[
|
||||
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
|
||||
('title', models.CharField(max_length=200)),
|
||||
('cave', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='core.Cave')),
|
||||
('survexblock', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='core.SurvexBlock')),
|
||||
],
|
||||
),
|
||||
migrations.CreateModel(
|
||||
name='Survey',
|
||||
fields=[
|
||||
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
|
||||
('new_since_parsing', models.BooleanField(default=False, editable=False)),
|
||||
('non_public', models.BooleanField(default=False)),
|
||||
('wallet_number', models.IntegerField(blank=True, null=True)),
|
||||
('wallet_letter', models.CharField(blank=True, max_length=1, null=True)),
|
||||
('comments', models.TextField(blank=True, null=True)),
|
||||
('location', models.CharField(blank=True, max_length=400, null=True)),
|
||||
('centreline_printed_on', models.DateField(blank=True, null=True)),
|
||||
('tunnel_file', models.FileField(blank=True, null=True, upload_to=b'surveyXMLfiles')),
|
||||
('integrated_into_main_sketch_on', models.DateField(blank=True, null=True)),
|
||||
('rendered_image', models.ImageField(blank=True, null=True, upload_to=b'renderedSurveys')),
|
||||
('centreline_printed_by', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='centreline_printed_by', to='core.Person')),
|
||||
('expedition', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='core.Expedition')),
|
||||
('integrated_into_main_sketch_by', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='integrated_into_main_sketch_by', to='core.Person')),
|
||||
('logbook_entry', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='core.LogbookEntry')),
|
||||
('subcave', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='core.NewSubCave')),
|
||||
('survex_block', models.OneToOneField(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='core.SurvexBlock')),
|
||||
('tunnel_main_sketch', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='core.Survey')),
|
||||
],
|
||||
options={
|
||||
'abstract': False,
|
||||
},
|
||||
),
|
||||
migrations.CreateModel(
|
||||
name='TunnelFile',
|
||||
fields=[
|
||||
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
|
||||
('tunnelpath', models.CharField(max_length=200)),
|
||||
('tunnelname', models.CharField(max_length=200)),
|
||||
('bfontcolours', models.BooleanField(default=False)),
|
||||
('filesize', models.IntegerField(default=0)),
|
||||
('npaths', models.IntegerField(default=0)),
|
||||
('survexblocks', models.ManyToManyField(to='core.SurvexBlock')),
|
||||
('survexscans', models.ManyToManyField(to='core.SurvexScanSingle')),
|
||||
('survexscansfolders', models.ManyToManyField(to='core.SurvexScansFolder')),
|
||||
('survextitles', models.ManyToManyField(to='core.SurvexTitle')),
|
||||
('tunnelcontains', models.ManyToManyField(to='core.TunnelFile')),
|
||||
],
|
||||
options={
|
||||
'ordering': ('tunnelpath',),
|
||||
},
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='survexleg',
|
||||
name='stationfrom',
|
||||
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='stationfrom', to='core.SurvexStation'),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='survexleg',
|
||||
name='stationto',
|
||||
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='stationto', to='core.SurvexStation'),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='survexdirectory',
|
||||
name='primarysurvexfile',
|
||||
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='primarysurvexfile', to='core.SurvexFile'),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='survexblock',
|
||||
name='survexfile',
|
||||
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='core.SurvexFile'),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='survexblock',
|
||||
name='survexscansfolder',
|
||||
field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, to='core.SurvexScansFolder'),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='scannedimage',
|
||||
name='survey',
|
||||
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='core.Survey'),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='qm',
|
||||
name='nearest_station',
|
||||
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='core.SurvexStation'),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='qm',
|
||||
name='ticked_off_by',
|
||||
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='QMs_ticked_off', to='core.LogbookEntry'),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='dphoto',
|
||||
name='contains_entrance',
|
||||
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='photo_file', to='core.Entrance'),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='dphoto',
|
||||
name='contains_logbookentry',
|
||||
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='core.LogbookEntry'),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='dphoto',
|
||||
name='contains_person',
|
||||
field=models.ManyToManyField(blank=True, to='core.Person'),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='dphoto',
|
||||
name='nearest_QM',
|
||||
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='core.QM'),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='cavedescription',
|
||||
name='linked_entrances',
|
||||
field=models.ManyToManyField(blank=True, to='core.Entrance'),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='cavedescription',
|
||||
name='linked_qms',
|
||||
field=models.ManyToManyField(blank=True, to='core.QM'),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='cavedescription',
|
||||
name='linked_subcaves',
|
||||
field=models.ManyToManyField(blank=True, to='core.NewSubCave'),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='caveandentrance',
|
||||
name='entrance',
|
||||
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='core.Entrance'),
|
||||
),
|
||||
]
|
||||
0
core/migrations/__init__.py
Normal file
0
core/migrations/__init__.py
Normal file
208
core/models.py
208
core/models.py
@@ -10,14 +10,13 @@ from django.db.models import Min, Max
|
||||
from django.conf import settings
|
||||
from decimal import Decimal, getcontext
|
||||
from django.core.urlresolvers import reverse
|
||||
from imagekit.models import ImageModel
|
||||
from imagekit.models import ProcessedImageField #ImageModel
|
||||
from django.template import Context, loader
|
||||
import settings
|
||||
getcontext().prec=2 #use 2 significant figures for decimal calculations
|
||||
|
||||
from troggle.core.models_survex import *
|
||||
|
||||
|
||||
def get_related_by_wikilinks(wiki_text):
|
||||
found=re.findall(settings.QM_PATTERN,wiki_text)
|
||||
res=[]
|
||||
@@ -28,10 +27,10 @@ def get_related_by_wikilinks(wiki_text):
|
||||
qm=QM.objects.get(found_by__cave_slug__in = cave_slugs,
|
||||
found_by__date__year = qmdict['year'],
|
||||
number = qmdict['number'])
|
||||
res.append(qm)
|
||||
res.append(qm)
|
||||
except QM.DoesNotExist:
|
||||
print('fail on '+str(wikilink))
|
||||
|
||||
|
||||
return res
|
||||
|
||||
try:
|
||||
@@ -39,7 +38,7 @@ try:
|
||||
filename=settings.LOGFILE,
|
||||
filemode='w')
|
||||
except:
|
||||
subprocess.call(settings.FIX_PERMISSIONS)
|
||||
subprocess.call(settings.FIX_PERMISSIONS)
|
||||
logging.basicConfig(level=logging.DEBUG,
|
||||
filename=settings.LOGFILE,
|
||||
filemode='w')
|
||||
@@ -59,7 +58,7 @@ class TroggleModel(models.Model):
|
||||
|
||||
class TroggleImageModel(models.Model):
|
||||
new_since_parsing = models.BooleanField(default=False, editable=False)
|
||||
|
||||
|
||||
def object_name(self):
|
||||
return self._meta.object_name
|
||||
|
||||
@@ -70,23 +69,23 @@ class TroggleImageModel(models.Model):
|
||||
class Meta:
|
||||
abstract = True
|
||||
|
||||
#
|
||||
#
|
||||
# single Expedition, usually seen by year
|
||||
#
|
||||
class Expedition(TroggleModel):
|
||||
year = models.CharField(max_length=20, unique=True)
|
||||
name = models.CharField(max_length=100)
|
||||
|
||||
|
||||
def __unicode__(self):
|
||||
return self.year
|
||||
|
||||
class Meta:
|
||||
ordering = ('-year',)
|
||||
get_latest_by = 'year'
|
||||
|
||||
|
||||
def get_absolute_url(self):
|
||||
return urlparse.urljoin(settings.URL_ROOT, reverse('expedition', args=[self.year]))
|
||||
|
||||
|
||||
# construction function. should be moved out
|
||||
def get_expedition_day(self, date):
|
||||
expeditiondays = self.expeditionday_set.filter(date=date)
|
||||
@@ -96,13 +95,13 @@ class Expedition(TroggleModel):
|
||||
res = ExpeditionDay(expedition=self, date=date)
|
||||
res.save()
|
||||
return res
|
||||
|
||||
|
||||
def day_min(self):
|
||||
res = self.expeditionday_set.all()
|
||||
res = self.Expeditionday_set.all()
|
||||
return res and res[0] or None
|
||||
|
||||
|
||||
def day_max(self):
|
||||
res = self.expeditionday_set.all()
|
||||
res = self.Expeditionday_set.all()
|
||||
return res and res[len(res) - 1] or None
|
||||
|
||||
|
||||
@@ -114,9 +113,12 @@ class ExpeditionDay(TroggleModel):
|
||||
ordering = ('date',)
|
||||
|
||||
def GetPersonTrip(self, personexpedition):
|
||||
personexpeditions = self.persontrip_set.filter(expeditionday=self)
|
||||
personexpeditions = self.Persontrip_set.filter(expeditionday=self)
|
||||
return personexpeditions and personexpeditions[0] or None
|
||||
|
||||
def __unicode__(self):
|
||||
return str(self.expedition) + ' ' + str(self.date)
|
||||
|
||||
#
|
||||
# single Person, can go on many years
|
||||
#
|
||||
@@ -127,23 +129,23 @@ class Person(TroggleModel):
|
||||
is_vfho = models.BooleanField(help_text="VFHO is the Vereines für Höhlenkunde in Obersteier, a nearby Austrian caving club.", default=False)
|
||||
mug_shot = models.CharField(max_length=100, blank=True,null=True)
|
||||
blurb = models.TextField(blank=True,null=True)
|
||||
|
||||
|
||||
#href = models.CharField(max_length=200)
|
||||
orderref = models.CharField(max_length=200) # for alphabetic
|
||||
orderref = models.CharField(max_length=200) # for alphabetic
|
||||
user = models.OneToOneField(User, null=True, blank=True)
|
||||
def get_absolute_url(self):
|
||||
return urlparse.urljoin(settings.URL_ROOT,reverse('person',kwargs={'first_name':self.first_name,'last_name':self.last_name}))
|
||||
|
||||
class Meta:
|
||||
verbose_name_plural = "People"
|
||||
ordering = ('orderref',) # "Wookey" makes too complex for: ('last_name', 'first_name')
|
||||
|
||||
ordering = ('orderref',) # "Wookey" makes too complex for: ('last_name', 'first_name')
|
||||
|
||||
def __unicode__(self):
|
||||
if self.last_name:
|
||||
return "%s %s" % (self.first_name, self.last_name)
|
||||
return self.first_name
|
||||
|
||||
|
||||
|
||||
def notability(self):
|
||||
notability = Decimal(0)
|
||||
max_expo_val = 0
|
||||
@@ -153,21 +155,21 @@ class Person(TroggleModel):
|
||||
|
||||
for personexpedition in self.personexpedition_set.all():
|
||||
if not personexpedition.is_guest:
|
||||
print(personexpedition.expedition.year)
|
||||
# print(personexpedition.expedition.year)
|
||||
notability += Decimal(1) / (max_expo_val - int(personexpedition.expedition.year))
|
||||
return notability
|
||||
|
||||
def bisnotable(self):
|
||||
return self.notability() > Decimal(1)/Decimal(3)
|
||||
|
||||
|
||||
def surveyedleglength(self):
|
||||
return sum([personexpedition.surveyedleglength() for personexpedition in self.personexpedition_set.all()])
|
||||
|
||||
|
||||
def first(self):
|
||||
return self.personexpedition_set.order_by('-expedition')[0]
|
||||
def last(self):
|
||||
return self.personexpedition_set.order_by('expedition')[0]
|
||||
|
||||
|
||||
#def Sethref(self):
|
||||
#if self.last_name:
|
||||
#self.href = self.first_name.lower() + "_" + self.last_name.lower()
|
||||
@@ -176,7 +178,7 @@ class Person(TroggleModel):
|
||||
# self.href = self.first_name.lower()
|
||||
#self.orderref = self.first_name
|
||||
#self.notability = 0.0 # set temporarily
|
||||
|
||||
|
||||
|
||||
#
|
||||
# Person's attenance to one Expo
|
||||
@@ -185,8 +187,8 @@ class PersonExpedition(TroggleModel):
|
||||
expedition = models.ForeignKey(Expedition)
|
||||
person = models.ForeignKey(Person)
|
||||
slugfield = models.SlugField(max_length=50,blank=True,null=True)
|
||||
|
||||
is_guest = models.BooleanField(default=False)
|
||||
|
||||
is_guest = models.BooleanField(default=False)
|
||||
COMMITTEE_CHOICES = (
|
||||
('leader','Expo leader'),
|
||||
('medical','Expo medical officer'),
|
||||
@@ -196,7 +198,7 @@ class PersonExpedition(TroggleModel):
|
||||
)
|
||||
expo_committee_position = models.CharField(blank=True,null=True,choices=COMMITTEE_CHOICES,max_length=200)
|
||||
nickname = models.CharField(max_length=100,blank=True,null=True)
|
||||
|
||||
|
||||
def GetPersonroles(self):
|
||||
res = [ ]
|
||||
for personrole in self.personrole_set.order_by('survexblock'):
|
||||
@@ -212,8 +214,8 @@ class PersonExpedition(TroggleModel):
|
||||
|
||||
def __unicode__(self):
|
||||
return "%s: (%s)" % (self.person, self.expedition)
|
||||
|
||||
|
||||
|
||||
|
||||
#why is the below a function in personexpedition, rather than in person? - AC 14 Feb 09
|
||||
def name(self):
|
||||
if self.nickname:
|
||||
@@ -224,11 +226,11 @@ class PersonExpedition(TroggleModel):
|
||||
|
||||
def get_absolute_url(self):
|
||||
return urlparse.urljoin(settings.URL_ROOT, reverse('personexpedition',kwargs={'first_name':self.person.first_name,'last_name':self.person.last_name,'year':self.expedition.year}))
|
||||
|
||||
|
||||
def surveyedleglength(self):
|
||||
survexblocks = [personrole.survexblock for personrole in self.personrole_set.all() ]
|
||||
return sum([survexblock.totalleglength for survexblock in set(survexblocks)])
|
||||
|
||||
|
||||
# would prefer to return actual person trips so we could link to first and last ones
|
||||
def day_min(self):
|
||||
res = self.persontrip_set.aggregate(day_min=Min("expeditionday__date"))
|
||||
@@ -240,7 +242,7 @@ class PersonExpedition(TroggleModel):
|
||||
|
||||
#
|
||||
# Single parsed entry from Logbook
|
||||
#
|
||||
#
|
||||
class LogbookEntry(TroggleModel):
|
||||
|
||||
LOGBOOK_ENTRY_TYPES = (
|
||||
@@ -248,7 +250,7 @@ class LogbookEntry(TroggleModel):
|
||||
("html", "Html style logbook")
|
||||
)
|
||||
|
||||
date = models.DateField()#MJG wants to turn this into a datetime such that multiple Logbook entries on the same day can be ordered.ld()
|
||||
date = models.DateTimeField()#MJG wants to turn this into a datetime such that multiple Logbook entries on the same day can be ordered.ld()
|
||||
expeditionday = models.ForeignKey("ExpeditionDay", null=True)#MJG wants to KILL THIS (redundant information)
|
||||
expedition = models.ForeignKey(Expedition,blank=True,null=True) # yes this is double-
|
||||
title = models.CharField(max_length=settings.MAX_LOGBOOK_ENTRY_TITLE_LENGTH)
|
||||
@@ -263,7 +265,7 @@ class LogbookEntry(TroggleModel):
|
||||
verbose_name_plural = "Logbook Entries"
|
||||
# several PersonTrips point in to this object
|
||||
ordering = ('-date',)
|
||||
|
||||
|
||||
def __getattribute__(self, item):
|
||||
if item == "cave": #Allow a logbookentries cave to be directly accessed despite not having a proper foreignkey
|
||||
return CaveSlug.objects.get(slug = self.cave_slug).cave
|
||||
@@ -312,18 +314,18 @@ class LogbookEntry(TroggleModel):
|
||||
#
|
||||
class PersonTrip(TroggleModel):
|
||||
personexpedition = models.ForeignKey("PersonExpedition",null=True)
|
||||
|
||||
|
||||
#expeditionday = models.ForeignKey("ExpeditionDay")#MJG wants to KILL THIS (redundant information)
|
||||
#date = models.DateField() #MJG wants to KILL THIS (redundant information)
|
||||
time_underground = models.FloatField(help_text="In decimal hours")
|
||||
logbook_entry = models.ForeignKey(LogbookEntry)
|
||||
is_logbook_entry_author = models.BooleanField(default=False)
|
||||
|
||||
|
||||
|
||||
|
||||
# sequencing by person (difficult to solve locally)
|
||||
#persontrip_next = models.ForeignKey('PersonTrip', related_name='pnext', blank=True,null=True)#MJG wants to KILL THIS (and use funstion persontrip_next_auto)
|
||||
#persontrip_prev = models.ForeignKey('PersonTrip', related_name='pprev', blank=True,null=True)#MJG wants to KILL THIS(and use funstion persontrip_prev_auto)
|
||||
|
||||
|
||||
def persontrip_next(self):
|
||||
futurePTs = PersonTrip.objects.filter(personexpedition = self.personexpedition, logbook_entry__date__gt = self.logbook_entry.date).order_by('logbook_entry__date').all()
|
||||
if len(futurePTs) > 0:
|
||||
@@ -343,7 +345,7 @@ class PersonTrip(TroggleModel):
|
||||
|
||||
def __unicode__(self):
|
||||
return "%s (%s)" % (self.personexpedition, self.logbook_entry.date)
|
||||
|
||||
|
||||
|
||||
|
||||
##########################################
|
||||
@@ -370,19 +372,22 @@ class CaveAndEntrance(models.Model):
|
||||
cave = models.ForeignKey('Cave')
|
||||
entrance = models.ForeignKey('Entrance')
|
||||
entrance_letter = models.CharField(max_length=20,blank=True,null=True)
|
||||
|
||||
def __unicode__(self):
|
||||
return unicode(self.cave) + unicode(self.entrance_letter)
|
||||
|
||||
|
||||
class CaveSlug(models.Model):
|
||||
cave = models.ForeignKey('Cave')
|
||||
slug = models.SlugField(max_length=50, unique = True)
|
||||
primary = models.BooleanField(default=False)
|
||||
|
||||
|
||||
def __unicode__(self):
|
||||
return self.slug
|
||||
|
||||
class Cave(TroggleModel):
|
||||
# too much here perhaps,
|
||||
# too much here perhaps,
|
||||
official_name = models.CharField(max_length=160)
|
||||
area = models.ManyToManyField(Area, blank=True, null=True)
|
||||
area = models.ManyToManyField(Area, blank=True)
|
||||
kataster_code = models.CharField(max_length=20,blank=True,null=True)
|
||||
kataster_number = models.CharField(max_length=10,blank=True, null=True)
|
||||
unofficial_number = models.CharField(max_length=60,blank=True, null=True)
|
||||
@@ -406,13 +411,13 @@ class Cave(TroggleModel):
|
||||
|
||||
#class Meta:
|
||||
# unique_together = (("area", "kataster_number"), ("area", "unofficial_number"))
|
||||
# FIXME Kataster Areas and CUCC defined sub areas need seperating
|
||||
# FIXME Kataster Areas and CUCC defined sub areas need seperating
|
||||
|
||||
|
||||
#href = models.CharField(max_length=100)
|
||||
|
||||
|
||||
class Meta:
|
||||
ordering = ('kataster_code', 'unofficial_number')
|
||||
ordering = ('kataster_code', 'unofficial_number')
|
||||
|
||||
def hassurvey(self):
|
||||
if not self.underground_centre_line:
|
||||
@@ -427,7 +432,7 @@ class Cave(TroggleModel):
|
||||
if self.survex_file:
|
||||
return "Yes"
|
||||
return "Missing"
|
||||
|
||||
|
||||
def slug(self):
|
||||
primarySlugs = self.caveslug_set.filter(primary = True)
|
||||
if primarySlugs:
|
||||
@@ -445,7 +450,7 @@ class Cave(TroggleModel):
|
||||
return "%s-%s" % (self.kat_area(), self.kataster_number)
|
||||
else:
|
||||
return "%s-%s" % (self.kat_area(), self.unofficial_number)
|
||||
|
||||
|
||||
def get_absolute_url(self):
|
||||
if self.kataster_number:
|
||||
href = self.kataster_number
|
||||
@@ -460,7 +465,7 @@ class Cave(TroggleModel):
|
||||
return unicode(self.slug())
|
||||
|
||||
def get_QMs(self):
|
||||
return QM.objects.filter(found_by__cave_slug=self.caveslug_set.all())
|
||||
return QM.objects.filter(nearest_station__block__cave__caveslug=self.caveslug_set.all())
|
||||
|
||||
def new_QM_number(self, year=datetime.date.today().year):
|
||||
"""Given a cave and the current year, returns the next QM number."""
|
||||
@@ -474,13 +479,13 @@ class Cave(TroggleModel):
|
||||
for a in self.area.all():
|
||||
if a.kat_area():
|
||||
return a.kat_area()
|
||||
|
||||
|
||||
def entrances(self):
|
||||
return CaveAndEntrance.objects.filter(cave=self)
|
||||
|
||||
def singleentrance(self):
|
||||
return len(CaveAndEntrance.objects.filter(cave=self)) == 1
|
||||
|
||||
|
||||
def entrancelist(self):
|
||||
rs = []
|
||||
res = ""
|
||||
@@ -508,12 +513,12 @@ class Cave(TroggleModel):
|
||||
else:
|
||||
res += "–" + prevR
|
||||
return res
|
||||
|
||||
|
||||
def writeDataFile(self):
|
||||
try:
|
||||
f = open(os.path.join(settings.CAVEDESCRIPTIONS, self.filename), "w")
|
||||
except:
|
||||
subprocess.call(settings.FIX_PERMISSIONS)
|
||||
subprocess.call(settings.FIX_PERMISSIONS)
|
||||
f = open(os.path.join(settings.CAVEDESCRIPTIONS, self.filename), "w")
|
||||
t = loader.get_template('dataformat/cave.xml')
|
||||
c = Context({'cave': self})
|
||||
@@ -521,7 +526,7 @@ class Cave(TroggleModel):
|
||||
u8 = u.encode("utf-8")
|
||||
f.write(u8)
|
||||
f.close()
|
||||
|
||||
|
||||
def getArea(self):
|
||||
areas = self.area.all()
|
||||
lowestareas = list(areas)
|
||||
@@ -535,20 +540,22 @@ class Cave(TroggleModel):
|
||||
|
||||
def getCaveByReference(reference):
|
||||
areaname, code = reference.split("-", 1)
|
||||
print(areaname, code)
|
||||
#print(areaname, code)
|
||||
area = Area.objects.get(short_name = areaname)
|
||||
print(area)
|
||||
foundCaves = list(Cave.objects.filter(area = area, kataster_number = code).all()) + list(Cave.objects.filter(area = area, unofficial_number = code).all())
|
||||
#print(area)
|
||||
foundCaves = list(Cave.objects.filter(area = area, kataster_number = code).all()) + list(Cave.objects.filter(area = area, unofficial_number = code).all())
|
||||
print(list(foundCaves))
|
||||
assert len(foundCaves) == 1
|
||||
return foundCaves[0]
|
||||
if len(foundCaves) == 1:
|
||||
return foundCaves[0]
|
||||
else:
|
||||
return False
|
||||
|
||||
class OtherCaveName(TroggleModel):
|
||||
name = models.CharField(max_length=160)
|
||||
cave = models.ForeignKey(Cave)
|
||||
def __unicode__(self):
|
||||
return unicode(self.name)
|
||||
|
||||
|
||||
class EntranceSlug(models.Model):
|
||||
entrance = models.ForeignKey('Entrance')
|
||||
slug = models.SlugField(max_length=50, unique = True)
|
||||
@@ -599,31 +606,35 @@ class Entrance(TroggleModel):
|
||||
|
||||
def exact_location(self):
|
||||
return SurvexStation.objects.lookup(self.exact_station)
|
||||
|
||||
def other_location(self):
|
||||
return SurvexStation.objects.lookup(self.other_station)
|
||||
|
||||
|
||||
def find_location(self):
|
||||
r = {'': 'To be entered ',
|
||||
'?': 'To be confirmed:',
|
||||
'?': 'To be confirmed:',
|
||||
'S': '',
|
||||
'L': 'Lost:',
|
||||
'R': 'Refindable:'}[self.findability]
|
||||
if self.tag_station:
|
||||
try:
|
||||
s = SurvexStation.objects.lookup(self.tag_station)
|
||||
s = SurvexStation.objects.filter(name=self.tag_station)[:1]
|
||||
s = s[0]
|
||||
return r + "%0.0fE %0.0fN %0.0fAlt" % (s.x, s.y, s.z)
|
||||
except:
|
||||
return r + "%s Tag Station not in dataset" % self.tag_station
|
||||
if self.exact_station:
|
||||
try:
|
||||
s = SurvexStation.objects.lookup(self.exact_station)
|
||||
s = SurvexStation.objects.filter(name=self.exact_station)[:1]
|
||||
s = s[0]
|
||||
return r + "%0.0fE %0.0fN %0.0fAlt" % (s.x, s.y, s.z)
|
||||
except:
|
||||
return r + "%s Exact Station not in dataset" % self.tag_station
|
||||
if self.other_station:
|
||||
try:
|
||||
s = SurvexStation.objects.lookup(self.other_station)
|
||||
s = SurvexStation.objects.filter(name=self.other_station)[:1]
|
||||
s = s[0]
|
||||
return r + "%0.0fE %0.0fN %0.0fAlt %s" % (s.x, s.y, s.z, self.other_description)
|
||||
except:
|
||||
return r + "%s Other Station not in dataset" % self.tag_station
|
||||
@@ -658,28 +669,28 @@ class Entrance(TroggleModel):
|
||||
for f in self.FINDABLE_CHOICES:
|
||||
if f[0] == self.findability:
|
||||
return f[1]
|
||||
|
||||
|
||||
def tag(self):
|
||||
return SurvexStation.objects.lookup(self.tag_station)
|
||||
|
||||
|
||||
def needs_surface_work(self):
|
||||
return self.findability != "S" or not self.has_photo or self.marking != "T"
|
||||
|
||||
def get_absolute_url(self):
|
||||
|
||||
|
||||
ancestor_titles='/'.join([subcave.title for subcave in self.get_ancestors()])
|
||||
if ancestor_titles:
|
||||
res = '/'.join((self.get_root().cave.get_absolute_url(), ancestor_titles, self.title))
|
||||
|
||||
|
||||
else:
|
||||
res = '/'.join((self.get_root().cave.get_absolute_url(), self.title))
|
||||
|
||||
|
||||
return res
|
||||
|
||||
def slug(self):
|
||||
if not self.cached_primary_slug:
|
||||
primarySlugs = self.entranceslug_set.filter(primary = True)
|
||||
if primarySlugs:
|
||||
if primarySlugs:
|
||||
self.cached_primary_slug = primarySlugs[0].slug
|
||||
self.save()
|
||||
else:
|
||||
@@ -693,7 +704,7 @@ class Entrance(TroggleModel):
|
||||
try:
|
||||
f = open(os.path.join(settings.ENTRANCEDESCRIPTIONS, self.filename), "w")
|
||||
except:
|
||||
subprocess.call(settings.FIX_PERMISSIONS)
|
||||
subprocess.call(settings.FIX_PERMISSIONS)
|
||||
f = open(os.path.join(settings.ENTRANCEDESCRIPTIONS, self.filename), "w")
|
||||
t = loader.get_template('dataformat/entrance.xml')
|
||||
c = Context({'entrance': self})
|
||||
@@ -706,19 +717,19 @@ class CaveDescription(TroggleModel):
|
||||
short_name = models.CharField(max_length=50, unique = True)
|
||||
long_name = models.CharField(max_length=200, blank=True, null=True)
|
||||
description = models.TextField(blank=True,null=True)
|
||||
linked_subcaves = models.ManyToManyField("NewSubCave", blank=True,null=True)
|
||||
linked_entrances = models.ManyToManyField("Entrance", blank=True,null=True)
|
||||
linked_qms = models.ManyToManyField("QM", blank=True,null=True)
|
||||
linked_subcaves = models.ManyToManyField("NewSubCave", blank=True)
|
||||
linked_entrances = models.ManyToManyField("Entrance", blank=True)
|
||||
linked_qms = models.ManyToManyField("QM", blank=True)
|
||||
|
||||
def __unicode__(self):
|
||||
if self.long_name:
|
||||
return unicode(self.long_name)
|
||||
else:
|
||||
return unicode(self.short_name)
|
||||
|
||||
|
||||
def get_absolute_url(self):
|
||||
return urlparse.urljoin(settings.URL_ROOT, reverse('cavedescription', args=(self.short_name,)))
|
||||
|
||||
|
||||
def save(self):
|
||||
"""
|
||||
Overridden save method which stores wikilinks in text as links in database.
|
||||
@@ -735,20 +746,20 @@ class NewSubCave(TroggleModel):
|
||||
return unicode(self.name)
|
||||
|
||||
class QM(TroggleModel):
|
||||
#based on qm.csv in trunk/expoweb/1623/204 which has the fields:
|
||||
#"Number","Grade","Area","Description","Page reference","Nearest station","Completion description","Comment"
|
||||
# based on qm.csv in trunk/expoweb/1623/204 which has the fields:
|
||||
# "Number","Grade","Area","Description","Page reference","Nearest station","Completion description","Comment"
|
||||
found_by = models.ForeignKey(LogbookEntry, related_name='QMs_found',blank=True, null=True )
|
||||
ticked_off_by = models.ForeignKey(LogbookEntry, related_name='QMs_ticked_off',null=True,blank=True)
|
||||
#cave = models.ForeignKey(Cave)
|
||||
#expedition = models.ForeignKey(Expedition)
|
||||
# cave = models.ForeignKey(Cave)
|
||||
# expedition = models.ForeignKey(Expedition)
|
||||
|
||||
number = models.IntegerField(help_text="this is the sequential number in the year", )
|
||||
GRADE_CHOICES=(
|
||||
('A', 'A: Large obvious lead'),
|
||||
('B', 'B: Average lead'),
|
||||
('C', 'C: Tight unpromising lead'),
|
||||
('D', 'D: Dig'),
|
||||
('X', 'X: Unclimbable aven')
|
||||
('A', 'A: Large obvious lead'),
|
||||
('B', 'B: Average lead'),
|
||||
('C', 'C: Tight unpromising lead'),
|
||||
('D', 'D: Dig'),
|
||||
('X', 'X: Unclimbable aven')
|
||||
)
|
||||
grade = models.CharField(max_length=1, choices=GRADE_CHOICES)
|
||||
location_description = models.TextField(blank=True)
|
||||
@@ -763,11 +774,19 @@ class QM(TroggleModel):
|
||||
return u"%s %s" % (self.code(), self.grade)
|
||||
|
||||
def code(self):
|
||||
return u"%s-%s-%s" % (unicode(self.found_by.cave)[6:], self.found_by.date.year, self.number)
|
||||
if self.found_by:
|
||||
# Old style QMs where found_by is a logbook entry
|
||||
return u"%s-%s-%s" % (unicode(self.found_by.cave)[6:], self.found_by.date.year, self.number)
|
||||
elif self.nearest_station:
|
||||
# New style QMs where QMs are stored in SVX files and nearest station is a forigin key
|
||||
return u"%s-%s-%s" % (self.nearest_station.block.name, self.nearest_station.name, self.number)
|
||||
else:
|
||||
# Just give up!!
|
||||
return u"%s" % (self.number)
|
||||
|
||||
def get_absolute_url(self):
|
||||
#return settings.URL_ROOT + '/cave/' + self.found_by.cave.kataster_number + '/' + str(self.found_by.date.year) + '-' + '%02d' %self.number
|
||||
return urlparse.urljoin(settings.URL_ROOT, reverse('qm',kwargs={'cave_id':self.found_by.cave.kataster_number,'year':self.found_by.date.year,'qm_id':self.number,'grade':self.grade}))
|
||||
return urlparse.urljoin(settings.URL_ROOT, reverse('qm',kwargs={'qm_id':self.id}))
|
||||
|
||||
def get_next_by_id(self):
|
||||
return QM.objects.get(id=self.id+1)
|
||||
@@ -779,10 +798,10 @@ class QM(TroggleModel):
|
||||
return u"%s%s%s" % ('[[QM:',self.code(),']]')
|
||||
|
||||
photoFileStorage = FileSystemStorage(location=settings.PHOTOS_ROOT, base_url=settings.PHOTOS_URL)
|
||||
class DPhoto(TroggleImageModel):
|
||||
class DPhoto(TroggleImageModel):
|
||||
caption = models.CharField(max_length=1000,blank=True,null=True)
|
||||
contains_logbookentry = models.ForeignKey(LogbookEntry,blank=True,null=True)
|
||||
contains_person = models.ManyToManyField(Person,blank=True,null=True)
|
||||
contains_person = models.ManyToManyField(Person,blank=True)
|
||||
file = models.ImageField(storage=photoFileStorage, upload_to='.',)
|
||||
is_mugshot = models.BooleanField(default=False)
|
||||
contains_cave = models.ForeignKey(Cave,blank=True,null=True)
|
||||
@@ -791,12 +810,12 @@ class DPhoto(TroggleImageModel):
|
||||
nearest_QM = models.ForeignKey(QM,blank=True,null=True)
|
||||
lon_utm = models.FloatField(blank=True,null=True)
|
||||
lat_utm = models.FloatField(blank=True,null=True)
|
||||
|
||||
|
||||
class IKOptions:
|
||||
spec_module = 'core.imagekit_specs'
|
||||
cache_dir = 'thumbs'
|
||||
image_field = 'file'
|
||||
|
||||
|
||||
#content_type = models.ForeignKey(ContentType)
|
||||
#object_id = models.PositiveIntegerField()
|
||||
#location = generic.GenericForeignKey('content_type', 'object_id')
|
||||
@@ -813,7 +832,7 @@ def get_scan_path(instance, filename):
|
||||
number=str(instance.survey.wallet_letter) + number #two strings formatting because convention is 2009#01 or 2009#X01
|
||||
return os.path.join('./',year,year+r'#'+number,str(instance.contents)+str(instance.number_in_wallet)+r'.jpg')
|
||||
|
||||
class ScannedImage(TroggleImageModel):
|
||||
class ScannedImage(TroggleImageModel):
|
||||
file = models.ImageField(storage=scansFileStorage, upload_to=get_scan_path)
|
||||
scanned_by = models.ForeignKey(Person,blank=True, null=True)
|
||||
scanned_on = models.DateField(null=True)
|
||||
@@ -856,8 +875,9 @@ class Survey(TroggleModel):
|
||||
integrated_into_main_sketch_on = models.DateField(blank=True,null=True)
|
||||
integrated_into_main_sketch_by = models.ForeignKey('Person' ,related_name='integrated_into_main_sketch_by', blank=True,null=True)
|
||||
rendered_image = models.ImageField(upload_to='renderedSurveys',blank=True,null=True)
|
||||
|
||||
def __unicode__(self):
|
||||
return self.expedition.year+"#"+"%02d" % int(self.wallet_number)
|
||||
return self.expedition.year+"#" + "%s%02d" % (self.wallet_letter, int(self.wallet_number))
|
||||
|
||||
def notes(self):
|
||||
return self.scannedimage_set.filter(contents='notes')
|
||||
|
||||
@@ -9,7 +9,7 @@ from django.core.urlresolvers import reverse
|
||||
###########################################################
|
||||
# These will allow browsing and editing of the survex data
|
||||
###########################################################
|
||||
# Needs to add:
|
||||
# Needs to add:
|
||||
# Equates
|
||||
# reloading
|
||||
|
||||
@@ -18,29 +18,37 @@ class SurvexDirectory(models.Model):
|
||||
cave = models.ForeignKey('Cave', blank=True, null=True)
|
||||
primarysurvexfile = models.ForeignKey('SurvexFile', related_name='primarysurvexfile', blank=True, null=True)
|
||||
# could also include files in directory but not referenced
|
||||
|
||||
|
||||
def __unicode__(self):
|
||||
return self.path
|
||||
|
||||
class Meta:
|
||||
ordering = ('id',)
|
||||
|
||||
ordering = ('path',)
|
||||
|
||||
class SurvexFile(models.Model):
|
||||
path = models.CharField(max_length=200)
|
||||
survexdirectory = models.ForeignKey("SurvexDirectory", blank=True, null=True)
|
||||
cave = models.ForeignKey('Cave', blank=True, null=True)
|
||||
|
||||
|
||||
class Meta:
|
||||
ordering = ('id',)
|
||||
|
||||
|
||||
def __unicode__(self):
|
||||
return self.path + '.svx' or 'no file'
|
||||
|
||||
def exists(self):
|
||||
fname = os.path.join(settings.SURVEX_DATA, self.path + ".svx")
|
||||
return os.path.isfile(fname)
|
||||
|
||||
|
||||
def OpenFile(self):
|
||||
fname = os.path.join(settings.SURVEX_DATA, self.path + ".svx")
|
||||
return open(fname)
|
||||
|
||||
|
||||
def SetDirectory(self):
|
||||
dirpath = os.path.split(self.path)[0]
|
||||
survexdirectorylist = SurvexDirectory.objects.filter(cave=self.cave, path=dirpath)
|
||||
# if self.cave is '' or self.cave is None:
|
||||
# print('No cave set for survex dir %s' % self.path)
|
||||
if survexdirectorylist:
|
||||
self.survexdirectory = survexdirectorylist[0]
|
||||
else:
|
||||
@@ -59,14 +67,20 @@ class SurvexStationLookUpManager(models.Manager):
|
||||
name__iexact = stationname)
|
||||
|
||||
class SurvexStation(models.Model):
|
||||
name = models.CharField(max_length=100)
|
||||
name = models.CharField(max_length=100)
|
||||
block = models.ForeignKey('SurvexBlock')
|
||||
equate = models.ForeignKey('SurvexEquate', blank=True, null=True)
|
||||
objects = SurvexStationLookUpManager()
|
||||
x = models.FloatField(blank=True, null=True)
|
||||
y = models.FloatField(blank=True, null=True)
|
||||
z = models.FloatField(blank=True, null=True)
|
||||
|
||||
|
||||
def __unicode__(self):
|
||||
if self.block.cave:
|
||||
# If we haven't got a cave we can't have a slug, saves a nonetype return
|
||||
return self.block.cave.slug() + '/' + self.block.name + '/' + self.name or 'No station name'
|
||||
else:
|
||||
return str(self.block.cave) + '/' + self.block.name + '/' + self.name or 'No station name'
|
||||
def path(self):
|
||||
r = self.name
|
||||
b = self.block
|
||||
@@ -89,8 +103,8 @@ class SurvexLeg(models.Model):
|
||||
|
||||
|
||||
#
|
||||
# Single SurvexBlock
|
||||
#
|
||||
# Single SurvexBlock
|
||||
#
|
||||
class SurvexBlockLookUpManager(models.Manager):
|
||||
def lookup(self, name):
|
||||
if name == "":
|
||||
@@ -108,20 +122,20 @@ class SurvexBlock(models.Model):
|
||||
parent = models.ForeignKey('SurvexBlock', blank=True, null=True)
|
||||
text = models.TextField()
|
||||
cave = models.ForeignKey('Cave', blank=True, null=True)
|
||||
|
||||
date = models.DateField(blank=True, null=True)
|
||||
|
||||
date = models.DateTimeField(blank=True, null=True)
|
||||
expeditionday = models.ForeignKey("ExpeditionDay", null=True)
|
||||
expedition = models.ForeignKey('Expedition', blank=True, null=True)
|
||||
|
||||
|
||||
survexfile = models.ForeignKey("SurvexFile", blank=True, null=True)
|
||||
begin_char = models.IntegerField() # code for where in the survex data files this block sits
|
||||
survexpath = models.CharField(max_length=200) # the path for the survex stations
|
||||
|
||||
survexscansfolder = models.ForeignKey("SurvexScansFolder", null=True)
|
||||
|
||||
survexscansfolder = models.ForeignKey("SurvexScansFolder", null=True)
|
||||
#refscandir = models.CharField(max_length=100)
|
||||
|
||||
|
||||
totalleglength = models.FloatField()
|
||||
|
||||
|
||||
class Meta:
|
||||
ordering = ('id',)
|
||||
|
||||
@@ -130,7 +144,7 @@ class SurvexBlock(models.Model):
|
||||
|
||||
def __unicode__(self):
|
||||
return self.name and unicode(self.name) or 'no name'
|
||||
|
||||
|
||||
def GetPersonroles(self):
|
||||
res = [ ]
|
||||
for personrole in self.personrole_set.order_by('personexpedition'):
|
||||
@@ -149,10 +163,10 @@ class SurvexBlock(models.Model):
|
||||
ss = SurvexStation(name=name, block=self)
|
||||
ss.save()
|
||||
return ss
|
||||
|
||||
|
||||
def DayIndex(self):
|
||||
return list(self.expeditionday.survexblock_set.all()).index(self)
|
||||
|
||||
|
||||
|
||||
class SurvexTitle(models.Model):
|
||||
survexblock = models.ForeignKey('SurvexBlock')
|
||||
@@ -177,39 +191,45 @@ ROLE_CHOICES = (
|
||||
class SurvexPersonRole(models.Model):
|
||||
survexblock = models.ForeignKey('SurvexBlock')
|
||||
nrole = models.CharField(choices=ROLE_CHOICES, max_length=200, blank=True, null=True)
|
||||
# increasing levels of precision
|
||||
# increasing levels of precision
|
||||
personname = models.CharField(max_length=100)
|
||||
person = models.ForeignKey('Person', blank=True, null=True)
|
||||
personexpedition = models.ForeignKey('PersonExpedition', blank=True, null=True)
|
||||
persontrip = models.ForeignKey('PersonTrip', blank=True, null=True)
|
||||
persontrip = models.ForeignKey('PersonTrip', blank=True, null=True)
|
||||
expeditionday = models.ForeignKey("ExpeditionDay", null=True)
|
||||
|
||||
|
||||
def __unicode__(self):
|
||||
return unicode(self.person) + " - " + unicode(self.survexblock) + " - " + unicode(self.nrole)
|
||||
|
||||
|
||||
|
||||
|
||||
class SurvexScansFolder(models.Model):
|
||||
fpath = models.CharField(max_length=200)
|
||||
walletname = models.CharField(max_length=200)
|
||||
|
||||
|
||||
class Meta:
|
||||
ordering = ('walletname',)
|
||||
|
||||
|
||||
def __unicode__(self):
|
||||
return self.walletname or 'no wallet'
|
||||
|
||||
def get_absolute_url(self):
|
||||
return urlparse.urljoin(settings.URL_ROOT, reverse('surveyscansfolder', kwargs={"path":re.sub("#", "%23", self.walletname)}))
|
||||
|
||||
|
||||
class SurvexScanSingle(models.Model):
|
||||
ffile = models.CharField(max_length=200)
|
||||
name = models.CharField(max_length=200)
|
||||
survexscansfolder = models.ForeignKey("SurvexScansFolder", null=True)
|
||||
|
||||
|
||||
class Meta:
|
||||
ordering = ('name',)
|
||||
|
||||
|
||||
def __unicode__(self):
|
||||
return self.survexscansfolder.walletname + '/' + self.name
|
||||
|
||||
def get_absolute_url(self):
|
||||
return urlparse.urljoin(settings.URL_ROOT, reverse('surveyscansingle', kwargs={"path":re.sub("#", "%23", self.survexscansfolder.walletname), "file":self.name}))
|
||||
|
||||
|
||||
|
||||
|
||||
class TunnelFile(models.Model):
|
||||
tunnelpath = models.CharField(max_length=200)
|
||||
tunnelname = models.CharField(max_length=200)
|
||||
@@ -221,8 +241,8 @@ class TunnelFile(models.Model):
|
||||
filesize = models.IntegerField(default=0)
|
||||
npaths = models.IntegerField(default=0)
|
||||
survextitles = models.ManyToManyField("SurvexTitle")
|
||||
|
||||
|
||||
|
||||
|
||||
class Meta:
|
||||
ordering = ('tunnelpath',)
|
||||
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
from django.conf import settings
|
||||
import fileAbstraction
|
||||
from django.shortcuts import render_to_response
|
||||
from django.shortcuts import render
|
||||
from django.http import HttpResponse, Http404
|
||||
import os, stat
|
||||
import re
|
||||
@@ -8,7 +8,7 @@ from troggle.core.models import SurvexScansFolder, SurvexScanSingle, SurvexBlock
|
||||
import parsers.surveys
|
||||
import urllib
|
||||
|
||||
# inline fileabstraction into here if it's not going to be useful anywhere else
|
||||
# inline fileabstraction into here if it's not going to be useful anywhere else
|
||||
# keep things simple and ignore exceptions everywhere for now
|
||||
|
||||
|
||||
@@ -33,7 +33,7 @@ def upload(request, path):
|
||||
|
||||
def download(request, path):
|
||||
#try:
|
||||
|
||||
|
||||
return HttpResponse(fileAbstraction.readFile(path), content_type=getMimeType(path.split(".")[-1]))
|
||||
#except:
|
||||
# raise Http404
|
||||
@@ -49,32 +49,32 @@ extmimetypes = {".txt": "text/plain",
|
||||
".jpg": "image/jpeg",
|
||||
".jpeg": "image/jpeg",
|
||||
}
|
||||
|
||||
|
||||
# dead
|
||||
def jgtfile(request, f):
|
||||
fp = os.path.join(settings.SURVEYS, f)
|
||||
# could also surf through SURVEX_DATA
|
||||
|
||||
|
||||
# directory listing
|
||||
if os.path.isdir(fp):
|
||||
listdirfiles = [ ]
|
||||
listdirdirs = [ ]
|
||||
|
||||
|
||||
for lf in sorted(os.listdir(fp)):
|
||||
hpath = os.path.join(f, lf) # not absolute path
|
||||
if lf[0] == "." or lf[-1] == "~":
|
||||
continue
|
||||
|
||||
|
||||
hpath = hpath.replace("\\", "/") # for windows users
|
||||
href = hpath.replace("#", "%23") # '#' in file name annoyance
|
||||
|
||||
|
||||
flf = os.path.join(fp, lf)
|
||||
if os.path.isdir(flf):
|
||||
nfiles = len([sf for sf in os.listdir(flf) if sf[0] != "."])
|
||||
listdirdirs.append((href, hpath + "/", nfiles))
|
||||
else:
|
||||
listdirfiles.append((href, hpath, os.path.getsize(flf)))
|
||||
|
||||
|
||||
upperdirs = [ ]
|
||||
lf = f
|
||||
while lf:
|
||||
@@ -85,9 +85,9 @@ def jgtfile(request, f):
|
||||
lf = os.path.split(lf)[0]
|
||||
upperdirs.append((href, hpath))
|
||||
upperdirs.append(("", "/"))
|
||||
|
||||
return render_to_response('listdir.html', {'file':f, 'listdirfiles':listdirfiles, 'listdirdirs':listdirdirs, 'upperdirs':upperdirs, 'settings': settings})
|
||||
|
||||
|
||||
return render(request, 'listdir.html', {'file':f, 'listdirfiles':listdirfiles, 'listdirdirs':listdirdirs, 'upperdirs':upperdirs, 'settings': settings})
|
||||
|
||||
# flat output of file when loaded
|
||||
if os.path.isfile(fp):
|
||||
ext = os.path.splitext(fp)[1].lower()
|
||||
@@ -123,16 +123,16 @@ def SaveImageInDir(name, imgdir, project, fdata, bbinary):
|
||||
print "*** Making directory", fprojdir
|
||||
os.path.mkdir(fprojdir)
|
||||
print "hhh"
|
||||
|
||||
|
||||
fname = os.path.join(fprojdir, name)
|
||||
print fname, "fff"
|
||||
fname = UniqueFile(fname)
|
||||
|
||||
|
||||
p2, p1 = os.path.split(fname)
|
||||
p3, p2 = os.path.split(p2)
|
||||
p4, p3 = os.path.split(p3)
|
||||
res = os.path.join(p3, p2, p1)
|
||||
|
||||
|
||||
print "saving file", fname
|
||||
fout = open(fname, (bbinary and "wb" or "w"))
|
||||
fout.write(fdata.read())
|
||||
@@ -163,73 +163,73 @@ def jgtuploadfile(request):
|
||||
#print ("FFF", request.FILES.values())
|
||||
message = ""
|
||||
print "gothere"
|
||||
return render_to_response('fileupload.html', {'message':message, 'filesuploaded':filesuploaded, 'settings': settings})
|
||||
return render(request, 'fileupload.html', {'message':message, 'filesuploaded':filesuploaded, 'settings': settings})
|
||||
|
||||
def surveyscansfolder(request, path):
|
||||
#print [ s.walletname for s in SurvexScansFolder.objects.all() ]
|
||||
survexscansfolder = SurvexScansFolder.objects.get(walletname=urllib.unquote(path))
|
||||
return render_to_response('survexscansfolder.html', { 'survexscansfolder':survexscansfolder, 'settings': settings })
|
||||
return render(request, 'survexscansfolder.html', { 'survexscansfolder':survexscansfolder, 'settings': settings })
|
||||
|
||||
def surveyscansingle(request, path, file):
|
||||
survexscansfolder = SurvexScansFolder.objects.get(walletname=urllib.unquote(path))
|
||||
survexscansingle = SurvexScanSingle.objects.get(survexscansfolder=survexscansfolder, name=file)
|
||||
return HttpResponse(content=open(survexscansingle.ffile), content_type=getMimeType(path.split(".")[-1]))
|
||||
#return render_to_response('survexscansfolder.html', { 'survexscansfolder':survexscansfolder, 'settings': settings })
|
||||
|
||||
#return render(request, 'survexscansfolder.html', { 'survexscansfolder':survexscansfolder, 'settings': settings })
|
||||
|
||||
def surveyscansfolders(request):
|
||||
survexscansfolders = SurvexScansFolder.objects.all()
|
||||
return render_to_response('survexscansfolders.html', { 'survexscansfolders':survexscansfolders, 'settings': settings })
|
||||
|
||||
|
||||
return render(request, 'survexscansfolders.html', { 'survexscansfolders':survexscansfolders, 'settings': settings })
|
||||
|
||||
|
||||
def tunneldata(request):
|
||||
tunnelfiles = TunnelFile.objects.all()
|
||||
return render_to_response('tunnelfiles.html', { 'tunnelfiles':tunnelfiles, 'settings': settings })
|
||||
|
||||
return render(request, 'tunnelfiles.html', { 'tunnelfiles':tunnelfiles, 'settings': settings })
|
||||
|
||||
|
||||
def tunnelfile(request, path):
|
||||
tunnelfile = TunnelFile.objects.get(tunnelpath=urllib.unquote(path))
|
||||
tfile = os.path.join(settings.TUNNEL_DATA, tunnelfile.tunnelpath)
|
||||
return HttpResponse(content=open(tfile), content_type="text/plain")
|
||||
|
||||
|
||||
def tunnelfileupload(request, path):
|
||||
tunnelfile = TunnelFile.objects.get(tunnelpath=urllib.unquote(path))
|
||||
tfile = os.path.join(settings.TUNNEL_DATA, tunnelfile.tunnelpath)
|
||||
|
||||
|
||||
project, user, password, tunnelversion = request.POST["tunnelproject"], request.POST["tunneluser"], request.POST["tunnelpassword"], request.POST["tunnelversion"]
|
||||
print (project, user, tunnelversion)
|
||||
|
||||
|
||||
|
||||
|
||||
assert len(request.FILES.values()) == 1, "only one file to upload"
|
||||
|
||||
|
||||
uploadedfile = request.FILES.values()[0]
|
||||
|
||||
|
||||
if uploadedfile.field_name != "sketch":
|
||||
return HttpResponse(content="Error: non-sketch file uploaded", content_type="text/plain")
|
||||
if uploadedfile.content_type != "text/plain":
|
||||
return HttpResponse(content="Error: non-plain content type", content_type="text/plain")
|
||||
|
||||
|
||||
# could use this to add new files
|
||||
if os.path.split(path)[1] != uploadedfile.name:
|
||||
if os.path.split(path)[1] != uploadedfile.name:
|
||||
return HttpResponse(content="Error: name disagrees", content_type="text/plain")
|
||||
|
||||
|
||||
orgsize = tunnelfile.filesize # = os.stat(tfile)[stat.ST_SIZE]
|
||||
|
||||
|
||||
ttext = uploadedfile.read()
|
||||
|
||||
|
||||
# could check that the user and projects agree here
|
||||
|
||||
|
||||
fout = open(tfile, "w")
|
||||
fout.write(ttext)
|
||||
fout.close()
|
||||
|
||||
# redo its settings of
|
||||
|
||||
# redo its settings of
|
||||
parsers.surveys.SetTunnelfileInfo(tunnelfile)
|
||||
tunnelfile.save()
|
||||
|
||||
|
||||
uploadedfile.close()
|
||||
message = "File size %d overwritten with size %d" % (orgsize, tunnelfile.filesize)
|
||||
return HttpResponse(content=message, content_type="text/plain")
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
@@ -29,7 +29,7 @@ def getCave(cave_id):
|
||||
return cave
|
||||
|
||||
def pad5(x):
|
||||
return "0" * (5 -len(x.group(0))) + x.group(0)
|
||||
return "0" * (5 -len(x.group(0))) + x.group(0)
|
||||
def padnumber(x):
|
||||
return re.sub("\d+", pad5, x)
|
||||
def numericalcmp(x, y):
|
||||
@@ -37,7 +37,7 @@ def numericalcmp(x, y):
|
||||
|
||||
|
||||
|
||||
def caveCmp(x, y):
|
||||
def caveCmp(x, y):
|
||||
if x.kataster_number:
|
||||
if y.kataster_number:
|
||||
return numericalcmp(x.kataster_number, y.kataster_number) # Note that cave kataster numbers are not generally integers.
|
||||
@@ -46,11 +46,11 @@ def caveCmp(x, y):
|
||||
else:
|
||||
if y.kataster_number:
|
||||
return 1
|
||||
else:
|
||||
else:
|
||||
return numericalcmp(x.unofficial_number, y.unofficial_number)
|
||||
|
||||
def caveindex(request):
|
||||
caves = Cave.objects.all()
|
||||
#caves = Cave.objects.all()
|
||||
notablecavehrefs = settings.NOTABLECAVESHREFS
|
||||
notablecaves = [Cave.objects.get(kataster_number=kataster_number) for kataster_number in notablecavehrefs ]
|
||||
caves1623 = list(Cave.objects.filter(area__short_name = "1623"))
|
||||
@@ -62,7 +62,6 @@ def caveindex(request):
|
||||
def millenialcaves(request):
|
||||
#RW messing around area
|
||||
return HttpResponse("Test text", content_type="text/plain")
|
||||
|
||||
|
||||
|
||||
def cave3d(request, cave_id=''):
|
||||
@@ -106,6 +105,7 @@ def caveQMs(request, slug):
|
||||
return render(request,'nonpublic.html', {'instance': cave})
|
||||
else:
|
||||
return render(request,'cave_qms.html', {'cave': cave})
|
||||
|
||||
def caveLogbook(request, slug):
|
||||
cave = Cave.objects.get(caveslug__slug = slug)
|
||||
if cave.non_public and settings.PUBLIC_SITE and not request.user.is_authenticated():
|
||||
@@ -153,14 +153,14 @@ def edit_cave(request, slug=None):
|
||||
ceinst.cave = cave
|
||||
ceinst.save()
|
||||
cave.writeDataFile()
|
||||
return HttpResponseRedirect("/" + cave.url)
|
||||
return HttpResponseRedirect("/" + cave.url)
|
||||
else:
|
||||
form = CaveForm(instance=cave)
|
||||
ceFormSet = CaveAndEntranceFormSet(queryset=cave.caveandentrance_set.all())
|
||||
versionControlForm = VersionControlCommentForm()
|
||||
|
||||
return render(request,
|
||||
'editcave2.html',
|
||||
|
||||
return render(request,
|
||||
'editcave2.html',
|
||||
{'form': form,
|
||||
'caveAndEntranceFormSet': ceFormSet,
|
||||
'versionControlForm': versionControlForm
|
||||
@@ -168,7 +168,7 @@ def edit_cave(request, slug=None):
|
||||
|
||||
@login_required_if_public
|
||||
def editEntrance(request, caveslug, slug=None):
|
||||
cave = Cave.objects.get(caveslug__slug = caveslug)
|
||||
cave = Cave.objects.get(caveslug__slug = caveslug)
|
||||
if slug is not None:
|
||||
entrance = Entrance.objects.get(entranceslug__slug = slug)
|
||||
else:
|
||||
@@ -195,7 +195,7 @@ def editEntrance(request, caveslug, slug=None):
|
||||
el.entrance = entrance
|
||||
el.save()
|
||||
entrance.writeDataFile()
|
||||
return HttpResponseRedirect("/" + cave.url)
|
||||
return HttpResponseRedirect("/" + cave.url)
|
||||
else:
|
||||
form = EntranceForm(instance = entrance)
|
||||
versionControlForm = VersionControlCommentForm()
|
||||
@@ -203,17 +203,16 @@ def editEntrance(request, caveslug, slug=None):
|
||||
entletter = EntranceLetterForm(request.POST)
|
||||
else:
|
||||
entletter = None
|
||||
return render(request,
|
||||
'editentrance.html',
|
||||
return render(request,
|
||||
'editentrance.html',
|
||||
{'form': form,
|
||||
'versionControlForm': versionControlForm,
|
||||
'entletter': entletter
|
||||
})
|
||||
|
||||
def qm(request,cave_id,qm_id,year,grade=None):
|
||||
year=int(year)
|
||||
def qm(request,qm_id):
|
||||
try:
|
||||
qm=getCave(cave_id).get_QMs().get(number=qm_id,found_by__date__year=year)
|
||||
qm=QM.objects.get(id=qm_id)
|
||||
return render(request,'qm.html',locals())
|
||||
|
||||
except QM.DoesNotExist:
|
||||
@@ -221,9 +220,8 @@ def qm(request,cave_id,qm_id,year,grade=None):
|
||||
if grade:
|
||||
url += r'&grade=' + grade
|
||||
return HttpResponseRedirect(url)
|
||||
|
||||
|
||||
|
||||
|
||||
def ent(request, cave_id, ent_letter):
|
||||
cave = Cave.objects.filter(kataster_number = cave_id)[0]
|
||||
cave_and_ent = CaveAndEntrance.objects.filter(cave = cave).filter(entrance_letter = ent_letter)[0]
|
||||
@@ -240,7 +238,7 @@ def entranceSlug(request, slug):
|
||||
|
||||
def survexblock(request, survexpath):
|
||||
survexpath = re.sub("/", ".", survexpath)
|
||||
print "jjjjjj", survexpath
|
||||
print("jjjjjj", survexpath)
|
||||
survexblock = models.SurvexBlock.objects.get(survexpath=survexpath)
|
||||
#ftext = survexblock.filecontents()
|
||||
ftext = survexblock.text
|
||||
@@ -255,7 +253,7 @@ def survey(request,year,wallet_number):
|
||||
surveys=Survey.objects.all()
|
||||
expeditions=Expedition.objects.order_by("-year")
|
||||
current_expedition=Expedition.objects.filter(year=year)[0]
|
||||
|
||||
|
||||
if wallet_number!='':
|
||||
current_survey=Survey.objects.filter(expedition=current_expedition,wallet_number=wallet_number)[0]
|
||||
notes=current_survey.scannedimage_set.filter(contents='notes')
|
||||
@@ -277,30 +275,30 @@ def get_qms(request, caveslug):
|
||||
return render(request,'options.html', {"items": [(e.entrance.slug(), e.entrance.slug()) for e in cave.entrances()]})
|
||||
|
||||
areanames = [
|
||||
#('', 'Location unclear'),
|
||||
('1a', '1a – Plateau: around Top Camp'),
|
||||
('1b', '1b – Western plateau near 182'),
|
||||
('1c', '1c – Eastern plateau near 204 walk-in path'),
|
||||
('1d', '1d – Further plateau around 76'),
|
||||
('2a', '2a – Southern Schwarzmooskogel near 201 path and the Nipple'),
|
||||
('2b', '2b – Eishöhle area'),
|
||||
('2b or 4 (unclear)', '2b or 4 (unclear)'),
|
||||
('2c', '2c – Kaninchenhöhle area'),
|
||||
('2d', '2d – Steinbrückenhöhle area'),
|
||||
('3', '3 – Bräuning Alm'),
|
||||
('4', '4 – Kratzer valley'),
|
||||
('5', '5 – Schwarzmoos-Wildensee'),
|
||||
('6', '6 – Far plateau'),
|
||||
('1626 or 6 (borderline)', '1626 or 6 (borderline)'),
|
||||
('7', '7 – Egglgrube'),
|
||||
('8a', '8a – Loser south face'),
|
||||
('8b', '8b – Loser below Dimmelwand'),
|
||||
('8c', '8c – Augst See'),
|
||||
('8d', '8d – Loser-Hochganger ridge'),
|
||||
('9', '9 – Gschwandt Alm'),
|
||||
('10', '10 – Altaussee'),
|
||||
('11', '11 – Augstbach')
|
||||
]
|
||||
#('', 'Location unclear'),
|
||||
('1a', '1a – Plateau: around Top Camp'),
|
||||
('1b', '1b – Western plateau near 182'),
|
||||
('1c', '1c – Eastern plateau near 204 walk-in path'),
|
||||
('1d', '1d – Further plateau around 76'),
|
||||
('2a', '2a – Southern Schwarzmooskogel near 201 path and the Nipple'),
|
||||
('2b', '2b – Eishöhle area'),
|
||||
('2b or 4 (unclear)', '2b or 4 (unclear)'),
|
||||
('2c', '2c – Kaninchenhöhle area'),
|
||||
('2d', '2d – Steinbrückenhöhle area'),
|
||||
('3', '3 – Bräuning Alm'),
|
||||
('4', '4 – Kratzer valley'),
|
||||
('5', '5 – Schwarzmoos-Wildensee'),
|
||||
('6', '6 – Far plateau'),
|
||||
('1626 or 6 (borderline)', '1626 or 6 (borderline)'),
|
||||
('7', '7 – Egglgrube'),
|
||||
('8a', '8a – Loser south face'),
|
||||
('8b', '8b – Loser below Dimmelwand'),
|
||||
('8c', '8c – Augst See'),
|
||||
('8d', '8d – Loser-Hochganger ridge'),
|
||||
('9', '9 – Gschwandt Alm'),
|
||||
('10', '10 – Altaussee'),
|
||||
('11', '11 – Augstbach')
|
||||
]
|
||||
|
||||
|
||||
def prospecting(request):
|
||||
@@ -313,26 +311,26 @@ def prospecting(request):
|
||||
caves.sort(caveCmp)
|
||||
areas.append((name, a, caves))
|
||||
return render(request, 'prospecting.html', {"areas": areas})
|
||||
|
||||
|
||||
# Parameters for big map and zoomed subarea maps:
|
||||
# big map first (zoom factor ignored)
|
||||
|
||||
maps = {
|
||||
# id left top right bottom zoom
|
||||
# G&K G&K G&K G&K factor
|
||||
"all": [33810.4, 85436.5, 38192.0, 81048.2, 0.35,
|
||||
"All"],
|
||||
"40": [36275.6, 82392.5, 36780.3, 81800.0, 3.0,
|
||||
"Eishöhle"],
|
||||
"76": [35440.0, 83220.0, 36090.0, 82670.0, 1.3,
|
||||
"Eislufthöhle"],
|
||||
"204": [36354.1, 84154.5, 37047.4, 83300, 3.0,
|
||||
"Steinbrückenhöhle"],
|
||||
"tc": [35230.0, 82690.0, 36110.0, 82100.0, 3.0,
|
||||
"Near Top Camp"],
|
||||
# id left top right bottom zoom
|
||||
# G&K G&K G&K G&K factor
|
||||
"all": [33810.4, 85436.5, 38192.0, 81048.2, 0.35,
|
||||
"All"],
|
||||
"40": [36275.6, 82392.5, 36780.3, 81800.0, 3.0,
|
||||
"Eishöhle"],
|
||||
"76": [35440.0, 83220.0, 36090.0, 82670.0, 1.3,
|
||||
"Eislufthöhle"],
|
||||
"204": [36354.1, 84154.5, 37047.4, 83300, 3.0,
|
||||
"Steinbrückenhöhle"],
|
||||
"tc": [35230.0, 82690.0, 36110.0, 82100.0, 3.0,
|
||||
"Near Top Camp"],
|
||||
"grieß":
|
||||
[36000.0, 86300.0, 38320.0, 84400.0, 4.0,
|
||||
"Grießkogel Area"],
|
||||
[36000.0, 86300.0, 38320.0, 84400.0, 4.0,
|
||||
"Grießkogel Area"],
|
||||
}
|
||||
|
||||
for n in maps.keys():
|
||||
@@ -343,7 +341,7 @@ for n in maps.keys():
|
||||
for j in range(2):
|
||||
maps["%s%i%i" % (n, i, j)] = [L + i * W, T - j * H, L + (i + 1) * W, T - (j + 1) * H, S, name]
|
||||
# Keys in the order in which we want the maps output
|
||||
mapcodes = ["all", "grieß","40", "76", "204", "tc"]
|
||||
mapcodes = ["all", "grieß","40", "76", "204", "tc"]
|
||||
# Field codes
|
||||
L = 0
|
||||
T = 1
|
||||
@@ -353,54 +351,54 @@ ZOOM = 4
|
||||
DESC = 5
|
||||
|
||||
areacolours = {
|
||||
'1a' : '#00ffff',
|
||||
'1b' : '#ff00ff',
|
||||
'1c' : '#ffff00',
|
||||
'1d' : '#ffffff',
|
||||
'2a' : '#ff0000',
|
||||
'2b' : '#00ff00',
|
||||
'2c' : '#008800',
|
||||
'2d' : '#ff9900',
|
||||
'3' : '#880000',
|
||||
'4' : '#0000ff',
|
||||
'6' : '#000000', # doubles for surface fixed pts, and anything else
|
||||
'7' : '#808080'
|
||||
}
|
||||
'1a' : '#00ffff',
|
||||
'1b' : '#ff00ff',
|
||||
'1c' : '#ffff00',
|
||||
'1d' : '#ffffff',
|
||||
'2a' : '#ff0000',
|
||||
'2b' : '#00ff00',
|
||||
'2c' : '#008800',
|
||||
'2d' : '#ff9900',
|
||||
'3' : '#880000',
|
||||
'4' : '#0000ff',
|
||||
'6' : '#000000', # doubles for surface fixed pts, and anything else
|
||||
'7' : '#808080'
|
||||
}
|
||||
|
||||
|
||||
for FONT in [
|
||||
"/usr/share/fonts/truetype/freefont/FreeSans.ttf",
|
||||
"/usr/X11R6/lib/X11/fonts/truetype/arial.ttf",
|
||||
"C:\WINNT\Fonts\ARIAL.TTF"
|
||||
]:
|
||||
if os.path.isfile(FONT): break
|
||||
"/usr/share/fonts/truetype/freefont/FreeSans.ttf",
|
||||
"/usr/X11R6/lib/X11/fonts/truetype/arial.ttf",
|
||||
"C:\WINNT\Fonts\ARIAL.TTF"
|
||||
]:
|
||||
if os.path.isfile(FONT): break
|
||||
TEXTSIZE = 16
|
||||
CIRCLESIZE =8
|
||||
LINEWIDTH = 2
|
||||
myFont = ImageFont.truetype(FONT, TEXTSIZE)
|
||||
|
||||
def mungecoord(x, y, mapcode, img):
|
||||
# Top of Zinken is 73 1201 = dataset 34542 81967
|
||||
# Top of Hinter is 1073 562 = dataset 36670 83317
|
||||
# image is 1417 by 2201
|
||||
# FACTOR1 = 1000.0 / (36670.0-34542.0)
|
||||
# FACTOR2 = (1201.0-562.0) / (83317 - 81967)
|
||||
# FACTOR = (FACTOR1 + FACTOR2)/2
|
||||
# The factors aren't the same as the scanned map's at a slight angle. I
|
||||
# can't be bothered to fix this. Since we zero on the Hinter it makes
|
||||
# very little difference for caves in the areas round 76 or 204.
|
||||
# xoffset = (x - 36670)*FACTOR
|
||||
# yoffset = (y - 83317)*FACTOR
|
||||
# return (1073 + xoffset, 562 - yoffset)
|
||||
# Top of Zinken is 73 1201 = dataset 34542 81967
|
||||
# Top of Hinter is 1073 562 = dataset 36670 83317
|
||||
# image is 1417 by 2201
|
||||
# FACTOR1 = 1000.0 / (36670.0-34542.0)
|
||||
# FACTOR2 = (1201.0-562.0) / (83317 - 81967)
|
||||
# FACTOR = (FACTOR1 + FACTOR2)/2
|
||||
# The factors aren't the same as the scanned map's at a slight angle. I
|
||||
# can't be bothered to fix this. Since we zero on the Hinter it makes
|
||||
# very little difference for caves in the areas round 76 or 204.
|
||||
# xoffset = (x - 36670)*FACTOR
|
||||
# yoffset = (y - 83317)*FACTOR
|
||||
# return (1073 + xoffset, 562 - yoffset)
|
||||
|
||||
m = maps[mapcode]
|
||||
factorX, factorY = img.size[0] / (m[R] - m[L]), img.size[1] / (m[T] - m[B])
|
||||
return ((x - m[L]) * factorX, (m[T] - y) * factorY)
|
||||
|
||||
m = maps[mapcode]
|
||||
factorX, factorY = img.size[0] / (m[R] - m[L]), img.size[1] / (m[T] - m[B])
|
||||
return ((x - m[L]) * factorX, (m[T] - y) * factorY)
|
||||
|
||||
COL_TYPES = {True: "red",
|
||||
False: "#dddddd",
|
||||
"Reference": "#dddddd"}
|
||||
|
||||
|
||||
def plot(surveypoint, number, point_type, label, mapcode, draw, img):
|
||||
try:
|
||||
ss = SurvexStation.objects.lookup(surveypoint)
|
||||
@@ -422,40 +420,40 @@ def prospecting_image(request, name):
|
||||
m = maps[name]
|
||||
#imgmaps = []
|
||||
if name == "all":
|
||||
img = mainImage
|
||||
img = mainImage
|
||||
else:
|
||||
M = maps['all']
|
||||
W, H = mainImage.size
|
||||
l = int((m[L] - M[L]) / (M[R] - M[L]) * W)
|
||||
t = int((m[T] - M[T]) / (M[B] - M[T]) * H)
|
||||
r = int((m[R] - M[L]) / (M[R] - M[L]) * W)
|
||||
b = int((m[B] - M[T]) / (M[B] - M[T]) * H)
|
||||
img = mainImage.crop((l, t, r, b))
|
||||
w = int(round(m[ZOOM] * (m[R] - m[L]) / (M[R] - M[L]) * W))
|
||||
h = int(round(m[ZOOM] * (m[B] - m[T]) / (M[B] - M[T]) * H))
|
||||
img = img.resize((w, h), Image.BICUBIC)
|
||||
M = maps['all']
|
||||
W, H = mainImage.size
|
||||
l = int((m[L] - M[L]) / (M[R] - M[L]) * W)
|
||||
t = int((m[T] - M[T]) / (M[B] - M[T]) * H)
|
||||
r = int((m[R] - M[L]) / (M[R] - M[L]) * W)
|
||||
b = int((m[B] - M[T]) / (M[B] - M[T]) * H)
|
||||
img = mainImage.crop((l, t, r, b))
|
||||
w = int(round(m[ZOOM] * (m[R] - m[L]) / (M[R] - M[L]) * W))
|
||||
h = int(round(m[ZOOM] * (m[B] - m[T]) / (M[B] - M[T]) * H))
|
||||
img = img.resize((w, h), Image.BICUBIC)
|
||||
draw = ImageDraw.Draw(img)
|
||||
draw.setfont(myFont)
|
||||
if name == "all":
|
||||
for maparea in maps.keys():
|
||||
if maparea == "all":
|
||||
continue
|
||||
localm = maps[maparea]
|
||||
l,t = mungecoord(localm[L], localm[T], "all", img)
|
||||
r,b = mungecoord(localm[R], localm[B], "all", img)
|
||||
text = maparea + " map"
|
||||
textlen = draw.textsize(text)[0] + 3
|
||||
draw.rectangle([l, t, l+textlen, t+TEXTSIZE+2], fill='#ffffff')
|
||||
draw.text((l+2, t+1), text, fill="#000000")
|
||||
#imgmaps.append( [l, t, l+textlen, t+SIZE+2, "submap" + maparea, maparea + " subarea map"] )
|
||||
draw.line([l, t, r, t], fill='#777777', width=LINEWIDTH)
|
||||
draw.line([l, b, r, b], fill='#777777', width=LINEWIDTH)
|
||||
draw.line([l, t, l, b], fill='#777777', width=LINEWIDTH)
|
||||
draw.line([r, t, r, b], fill='#777777', width=LINEWIDTH)
|
||||
draw.line([l, t, l+textlen, t], fill='#777777', width=LINEWIDTH)
|
||||
draw.line([l, t+TEXTSIZE+2, l+textlen, t+TEXTSIZE+2], fill='#777777', width=LINEWIDTH)
|
||||
draw.line([l, t, l, t+TEXTSIZE+2], fill='#777777', width=LINEWIDTH)
|
||||
draw.line([l+textlen, t, l+textlen, t+TEXTSIZE+2], fill='#777777', width=LINEWIDTH)
|
||||
if maparea == "all":
|
||||
continue
|
||||
localm = maps[maparea]
|
||||
l,t = mungecoord(localm[L], localm[T], "all", img)
|
||||
r,b = mungecoord(localm[R], localm[B], "all", img)
|
||||
text = maparea + " map"
|
||||
textlen = draw.textsize(text)[0] + 3
|
||||
draw.rectangle([l, t, l+textlen, t+TEXTSIZE+2], fill='#ffffff')
|
||||
draw.text((l+2, t+1), text, fill="#000000")
|
||||
#imgmaps.append( [l, t, l+textlen, t+SIZE+2, "submap" + maparea, maparea + " subarea map"] )
|
||||
draw.line([l, t, r, t], fill='#777777', width=LINEWIDTH)
|
||||
draw.line([l, b, r, b], fill='#777777', width=LINEWIDTH)
|
||||
draw.line([l, t, l, b], fill='#777777', width=LINEWIDTH)
|
||||
draw.line([r, t, r, b], fill='#777777', width=LINEWIDTH)
|
||||
draw.line([l, t, l+textlen, t], fill='#777777', width=LINEWIDTH)
|
||||
draw.line([l, t+TEXTSIZE+2, l+textlen, t+TEXTSIZE+2], fill='#777777', width=LINEWIDTH)
|
||||
draw.line([l, t, l, t+TEXTSIZE+2], fill='#777777', width=LINEWIDTH)
|
||||
draw.line([l+textlen, t, l+textlen, t+TEXTSIZE+2], fill='#777777', width=LINEWIDTH)
|
||||
#imgmaps[maparea] = []
|
||||
# Draw scale bar
|
||||
m100 = int(100 / (m[R] - m[L]) * img.size[0])
|
||||
@@ -467,7 +465,7 @@ def prospecting_image(request, name):
|
||||
plot("laser.0_7", "BNase", "Reference", "Bräuning Nase laser point", name, draw, img)
|
||||
plot("226-96", "BZkn", "Reference", "Bräuning Zinken trig point", name, draw, img)
|
||||
plot("vd1","VD1","Reference", "VD1 survey point", name, draw, img)
|
||||
plot("laser.kt114_96","HSK","Reference", "Hinterer Schwarzmooskogel trig point", name, draw, img)
|
||||
plot("laser.kt114_96","HSK","Reference", "Hinterer Schwarzmooskogel trig point", name, draw, img)
|
||||
plot("2000","Nipple","Reference", "Nipple (Weiße Warze)", name, draw, img)
|
||||
plot("3000","VSK","Reference", "Vorderer Schwarzmooskogel summit", name, draw, img)
|
||||
plot("topcamp", "TC", "Reference", "Top Camp", name, draw, img)
|
||||
@@ -477,14 +475,15 @@ def prospecting_image(request, name):
|
||||
plot("laser.0_5", "LSR5", "Reference", "Laser Point 0/5", name, draw, img)
|
||||
plot("225-96", "BAlm", "Reference", "Bräuning Alm trig point", name, draw, img)
|
||||
for entrance in Entrance.objects.all():
|
||||
station = entrance.best_station()
|
||||
if station:
|
||||
#try:
|
||||
areaName = entrance.caveandentrance_set.all()[0].cave.getArea().short_name
|
||||
plot(station, "%s-%s" % (areaName, str(entrance)[5:]), entrance.needs_surface_work(), str(entrance), name, draw, img)
|
||||
#except:
|
||||
# pass
|
||||
|
||||
station = entrance.best_station()
|
||||
if station:
|
||||
#try:
|
||||
areaName = entrance.caveandentrance_set.all()[0].cave.getArea().short_name
|
||||
plot(station, "%s-%s" % (areaName, str(entrance)
|
||||
[5:]), entrance.needs_surface_work(), str(entrance), name, draw, img)
|
||||
#except:
|
||||
# pass
|
||||
|
||||
for (N, E, D, num) in [(35975.37, 83018.21, 100,"177"), # Calculated from bearings
|
||||
(35350.00, 81630.00, 50, "71"), # From Auer map
|
||||
(36025.00, 82475.00, 50, "146"), # From mystery map
|
||||
@@ -508,8 +507,8 @@ def prospecting_image(request, name):
|
||||
del draw
|
||||
img.save(response, "PNG")
|
||||
return response
|
||||
|
||||
STATIONS = {}
|
||||
|
||||
STATIONS = {}
|
||||
poslineregex = re.compile("^\(\s*([+-]?\d*\.\d*),\s*([+-]?\d*\.\d*),\s*([+-]?\d*\.\d*)\s*\)\s*([^\s]+)$")
|
||||
def LoadPos():
|
||||
call([settings.CAVERN, "--output=%s/all.3d" % settings.SURVEX_DATA, "%s/all.svx" % settings.SURVEX_DATA])
|
||||
@@ -517,7 +516,7 @@ def LoadPos():
|
||||
posfile = open("%sall.pos" % settings.SURVEX_DATA)
|
||||
posfile.readline()#Drop header
|
||||
for line in posfile.readlines():
|
||||
r = poslineregex.match(line)
|
||||
r = poslineregex.match(line)
|
||||
if r:
|
||||
x, y, z, name = r.groups()
|
||||
STATIONS[name] = (x, y, z)
|
||||
|
||||
@@ -37,7 +37,7 @@ def getNotablePersons():
|
||||
for person in Person.objects.all():
|
||||
if person.bisnotable():
|
||||
notablepersons.append(person)
|
||||
return notablepersons
|
||||
return notablepersons
|
||||
|
||||
|
||||
def personindex(request):
|
||||
@@ -48,7 +48,7 @@ def personindex(request):
|
||||
nc = (len(persons) + ncols - 1) / ncols
|
||||
for i in range(ncols):
|
||||
personss.append(persons[i * nc: (i + 1) * nc])
|
||||
|
||||
|
||||
notablepersons = []
|
||||
for person in Person.objects.all():
|
||||
if person.bisnotable():
|
||||
@@ -67,16 +67,20 @@ def expedition(request, expeditionname):
|
||||
for personexpedition in this_expedition.personexpedition_set.all():
|
||||
prow = [ ]
|
||||
for date in dates:
|
||||
pcell = { "persontrips": PersonTrip.objects.filter(personexpedition=personexpedition,
|
||||
pcell = { "persontrips": PersonTrip.objects.filter(personexpedition=personexpedition,
|
||||
logbook_entry__date=date) }
|
||||
pcell["survexblocks"] = set(SurvexBlock.objects.filter(survexpersonrole__personexpedition=personexpedition,
|
||||
date = date))
|
||||
pcell["survexblocks"] = set(SurvexBlock.objects.filter(survexpersonrole__personexpedition=personexpedition,
|
||||
date=date))
|
||||
prow.append(pcell)
|
||||
personexpeditiondays.append({"personexpedition":personexpedition, "personrow":prow})
|
||||
|
||||
|
||||
if "reload" in request.GET:
|
||||
LoadLogbookForExpedition(this_expedition)
|
||||
return render(request,'expedition.html', {'expedition': this_expedition, 'expeditions':expeditions, 'personexpeditiondays':personexpeditiondays, 'settings':settings, 'dateditems': dateditems })
|
||||
return render(request,'expedition.html', {'this_expedition': this_expedition,
|
||||
'expeditions':expeditions,
|
||||
'personexpeditiondays':personexpeditiondays,
|
||||
'settings':settings,
|
||||
'dateditems': dateditems })
|
||||
|
||||
def get_absolute_url(self):
|
||||
return ('expedition', (expedition.year))
|
||||
@@ -93,14 +97,14 @@ class ExpeditionListView(ListView):
|
||||
|
||||
def person(request, first_name='', last_name='', ):
|
||||
this_person = Person.objects.get(first_name = first_name, last_name = last_name)
|
||||
|
||||
|
||||
# This is for removing the reference to the user's profile, in case they set it to the wrong person
|
||||
if request.method == 'GET':
|
||||
if request.GET.get('clear_profile')=='True':
|
||||
this_person.user=None
|
||||
this_person.save()
|
||||
return HttpResponseRedirect(reverse('profiles_select_profile'))
|
||||
|
||||
|
||||
return render(request,'person.html', {'person': this_person, })
|
||||
|
||||
|
||||
@@ -113,19 +117,19 @@ def GetPersonChronology(personexpedition):
|
||||
for personrole in personexpedition.survexpersonrole_set.all():
|
||||
a = res.setdefault(personrole.survexblock.date, { })
|
||||
a.setdefault("personroles", [ ]).append(personrole.survexblock)
|
||||
|
||||
|
||||
# build up the tables
|
||||
rdates = res.keys()
|
||||
rdates.sort()
|
||||
|
||||
|
||||
|
||||
|
||||
res2 = [ ]
|
||||
for rdate in rdates:
|
||||
persontrips = res[rdate].get("persontrips", [])
|
||||
personroles = res[rdate].get("personroles", [])
|
||||
for n in range(max(len(persontrips), len(personroles))):
|
||||
res2.append(((n == 0 and rdate or "--"), (n < len(persontrips) and persontrips[n]), (n < len(personroles) and personroles[n])))
|
||||
|
||||
|
||||
return res2
|
||||
|
||||
|
||||
@@ -176,7 +180,7 @@ def experimental(request):
|
||||
survexleglength += survexblock.totalleglength
|
||||
legsbyexpo.append((expedition, {"nsurvexlegs":len(survexlegs), "survexleglength":survexleglength}))
|
||||
legsbyexpo.reverse()
|
||||
|
||||
|
||||
survexlegs = models.SurvexLeg.objects.all()
|
||||
totalsurvexlength = sum([survexleg.tape for survexleg in survexlegs])
|
||||
return render(request, 'experimental.html', { "nsurvexlegs":len(survexlegs), "totalsurvexlength":totalsurvexlength, "legsbyexpo":legsbyexpo })
|
||||
@@ -194,11 +198,11 @@ def newLogbookEntry(request, expeditionyear, pdate = None, pslug = None):
|
||||
personTripFormSet = PersonTripFormSet(request.POST)
|
||||
if tripForm.is_valid() and personTripFormSet.is_valid(): # All validation rules pass
|
||||
dateStr = tripForm.cleaned_data["date"].strftime("%Y-%m-%d")
|
||||
directory = os.path.join(settings.EXPOWEB,
|
||||
"years",
|
||||
expedition.year,
|
||||
directory = os.path.join(settings.EXPOWEB,
|
||||
"years",
|
||||
expedition.year,
|
||||
"autologbook")
|
||||
filename = os.path.join(directory,
|
||||
filename = os.path.join(directory,
|
||||
dateStr + "." + slugify(tripForm.cleaned_data["title"])[:50] + ".html")
|
||||
if not os.path.isdir(directory):
|
||||
os.mkdir(directory)
|
||||
@@ -206,7 +210,7 @@ def newLogbookEntry(request, expeditionyear, pdate = None, pslug = None):
|
||||
delLogbookEntry(previouslbe)
|
||||
f = open(filename, "w")
|
||||
template = loader.get_template('dataformat/logbookentry.html')
|
||||
context = Context({'trip': tripForm.cleaned_data,
|
||||
context = Context({'trip': tripForm.cleaned_data,
|
||||
'persons': personTripFormSet.cleaned_data,
|
||||
'date': dateStr,
|
||||
'expeditionyear': expeditionyear})
|
||||
@@ -230,11 +234,11 @@ def newLogbookEntry(request, expeditionyear, pdate = None, pslug = None):
|
||||
"location": previouslbe.place,
|
||||
"caveOrLocation": "location",
|
||||
"html": previouslbe.text})
|
||||
personTripFormSet = PersonTripFormSet(initial=[{"name": get_name(py.personexpedition),
|
||||
"TU": py.time_underground,
|
||||
personTripFormSet = PersonTripFormSet(initial=[{"name": get_name(py.personexpedition),
|
||||
"TU": py.time_underground,
|
||||
"author": py.is_logbook_entry_author}
|
||||
for py in previouslbe.persontrip_set.all()])
|
||||
else:
|
||||
else:
|
||||
tripForm = TripForm() # An unbound form
|
||||
personTripFormSet = PersonTripFormSet()
|
||||
|
||||
|
||||
@@ -35,7 +35,7 @@ def frontpage(request):
|
||||
return render(request,'frontpage.html', locals())
|
||||
|
||||
def todo(request):
|
||||
message = "no test message" #reverse('personn', kwargs={"name":"hkjhjh"})
|
||||
message = "no test message" #reverse('personn', kwargs={"name":"hkjhjh"})
|
||||
if "reloadexpos" in request.GET:
|
||||
message = LoadPersonsExpos()
|
||||
message = "Reloaded personexpos"
|
||||
@@ -52,7 +52,7 @@ def controlPanel(request):
|
||||
jobs_completed=[]
|
||||
if request.method=='POST':
|
||||
if request.user.is_superuser:
|
||||
|
||||
|
||||
#importlist is mostly here so that things happen in the correct order.
|
||||
#http post data seems to come in an unpredictable order, so we do it this way.
|
||||
importlist=['reload_db', 'import_people', 'import_cavetab', 'import_logbooks', 'import_surveys', 'import_QMs']
|
||||
@@ -85,7 +85,7 @@ def downloadSurveys(request):
|
||||
return response
|
||||
|
||||
def downloadLogbook(request,year=None,extension=None,queryset=None):
|
||||
|
||||
|
||||
if year:
|
||||
current_expedition=Expedition.objects.get(year=year)
|
||||
logbook_entries=LogbookEntry.objects.filter(expedition=current_expedition)
|
||||
@@ -96,7 +96,7 @@ def downloadLogbook(request,year=None,extension=None,queryset=None):
|
||||
else:
|
||||
response = HttpResponse(content_type='text/plain')
|
||||
return response(r"Error: Logbook downloader doesn't know what year you want")
|
||||
|
||||
|
||||
if 'year' in request.GET:
|
||||
year=request.GET['year']
|
||||
if 'extension' in request.GET:
|
||||
@@ -108,14 +108,14 @@ def downloadLogbook(request,year=None,extension=None,queryset=None):
|
||||
elif extension == 'html':
|
||||
response = HttpResponse(content_type='text/html')
|
||||
style='2005'
|
||||
|
||||
|
||||
template='logbook'+style+'style.'+extension
|
||||
response['Content-Disposition'] = 'attachment; filename='+filename+'.'+extension
|
||||
response['Content-Disposition'] = 'attachment; filename='+filename+'.'+extension
|
||||
t=loader.get_template(template)
|
||||
c=Context({'logbook_entries':logbook_entries})
|
||||
response.write(t.render(c))
|
||||
return response
|
||||
|
||||
|
||||
|
||||
def downloadQMs(request):
|
||||
# Note to self: use get_cave method for the below
|
||||
@@ -131,14 +131,14 @@ def downloadQMs(request):
|
||||
response['Content-Disposition'] = 'attachment; filename=qm.csv'
|
||||
toqms.writeQmTable(response,cave)
|
||||
return response
|
||||
|
||||
|
||||
def ajax_test(request):
|
||||
post_text = request.POST['post_data']
|
||||
return HttpResponse("{'response_text': '"+post_text+" recieved.'}",
|
||||
return HttpResponse("{'response_text': '"+post_text+" recieved.'}",
|
||||
content_type="application/json")
|
||||
|
||||
|
||||
def eyecandy(request):
|
||||
return
|
||||
return
|
||||
|
||||
def ajax_QM_number(request):
|
||||
if request.method=='POST':
|
||||
@@ -158,14 +158,14 @@ def logbook_entry_suggestions(request):
|
||||
unwiki_QM_pattern=r"(?P<whole>(?P<explorer_code>[ABC]?)(?P<cave>\d*)-?(?P<year>\d\d\d?\d?)-(?P<number>\d\d)(?P<grade>[ABCDXV]?))"
|
||||
unwiki_QM_pattern=re.compile(unwiki_QM_pattern)
|
||||
#wikilink_QM_pattern=settings.QM_PATTERN
|
||||
|
||||
|
||||
slug=request.POST['slug']
|
||||
date=request.POST['date']
|
||||
lbo=LogbookEntry.objects.get(slug=slug, date=date)
|
||||
|
||||
|
||||
#unwiki_QMs=re.findall(unwiki_QM_pattern,lbo.text)
|
||||
unwiki_QMs=[m.groupdict() for m in unwiki_QM_pattern.finditer(lbo.text)]
|
||||
|
||||
|
||||
print(unwiki_QMs)
|
||||
for qm in unwiki_QMs:
|
||||
#try:
|
||||
@@ -180,7 +180,7 @@ def logbook_entry_suggestions(request):
|
||||
lbo=LogbookEntry.objects.get(date__year=qm['year'],title__icontains="placeholder for QMs in")
|
||||
except:
|
||||
print("failed to get placeholder for year "+str(qm['year']))
|
||||
|
||||
|
||||
temp_QM=QM(found_by=lbo,number=qm['number'],grade=qm['grade'])
|
||||
temp_QM.grade=qm['grade']
|
||||
qm['wikilink']=temp_QM.wiki_link()
|
||||
@@ -188,16 +188,16 @@ def logbook_entry_suggestions(request):
|
||||
#print 'failed'
|
||||
|
||||
print(unwiki_QMs)
|
||||
|
||||
|
||||
|
||||
|
||||
#wikilink_QMs=re.findall(wikilink_QM_pattern,lbo.text)
|
||||
attached_QMs=lbo.QMs_found.all()
|
||||
unmentioned_attached_QMs=''#not implemented, fill this in by subtracting wiklink_QMs from attached_QMs
|
||||
|
||||
|
||||
#Find unattached_QMs. We only look at the QMs with a proper wiki link.
|
||||
#for qm in wikilink_QMs:
|
||||
#Try to look up the QM.
|
||||
|
||||
#Try to look up the QM.
|
||||
|
||||
print('got 208')
|
||||
any_suggestions=True
|
||||
print('got 210')
|
||||
@@ -217,11 +217,11 @@ def newFile(request, pslug = None):
|
||||
# personTripFormSet = PersonTripFormSet(request.POST)
|
||||
# if tripForm.is_valid() and personTripFormSet.is_valid(): # All validation rules pass
|
||||
# dateStr = tripForm.cleaned_data["date"].strftime("%Y-%m-%d")
|
||||
# directory = os.path.join(settings.EXPOWEB,
|
||||
# "years",
|
||||
# expedition.year,
|
||||
# directory = os.path.join(settings.EXPOWEB,
|
||||
# "years",
|
||||
# expedition.year,
|
||||
# "autologbook")
|
||||
# filename = os.path.join(directory,
|
||||
# filename = os.path.join(directory,
|
||||
# dateStr + "." + slugify(tripForm.cleaned_data["title"])[:50] + ".html")
|
||||
# if not os.path.isdir(directory):
|
||||
# os.mkdir(directory)
|
||||
@@ -229,7 +229,7 @@ def newFile(request, pslug = None):
|
||||
# delLogbookEntry(previouslbe)
|
||||
# f = open(filename, "w")
|
||||
# template = loader.get_template('dataformat/logbookentry.html')
|
||||
# context = Context({'trip': tripForm.cleaned_data,
|
||||
# context = Context({'trip': tripForm.cleaned_data,
|
||||
# 'persons': personTripFormSet.cleaned_data,
|
||||
# 'date': dateStr,
|
||||
# 'expeditionyear': expeditionyear})
|
||||
@@ -254,11 +254,11 @@ def newFile(request, pslug = None):
|
||||
# "location": previouslbe.place,
|
||||
# "caveOrLocation": "location",
|
||||
# "html": previouslbe.text})
|
||||
# personTripFormSet = PersonTripFormSet(initial=[{"name": get_name(py.personexpedition),
|
||||
# "TU": py.time_underground,
|
||||
# personTripFormSet = PersonTripFormSet(initial=[{"name": get_name(py.personexpedition),
|
||||
# "TU": py.time_underground,
|
||||
# "author": py.is_logbook_entry_author}
|
||||
# for py in previouslbe.persontrip_set.all()])
|
||||
# else:
|
||||
# else:
|
||||
# fileform = UploadFileForm() # An unbound form
|
||||
|
||||
return render(request, 'editfile.html', {
|
||||
|
||||
@@ -1,7 +1,8 @@
|
||||
from django import forms
|
||||
from django.http import HttpResponseRedirect, HttpResponse
|
||||
from django.shortcuts import render_to_response, render
|
||||
from django.core.context_processors import csrf
|
||||
from django.shortcuts import render
|
||||
from django.views.decorators import csrf
|
||||
from django.views.decorators.csrf import csrf_protect
|
||||
from django.http import HttpResponse, Http404
|
||||
import re
|
||||
import os
|
||||
@@ -16,7 +17,7 @@ import troggle.settings as settings
|
||||
import parsers.survex
|
||||
|
||||
survextemplatefile = """; Locn: Totes Gebirge, Austria - Loser/Augst-Eck Plateau (kataster group 1623)
|
||||
; Cave:
|
||||
; Cave:
|
||||
|
||||
*begin [surveyname]
|
||||
|
||||
@@ -39,9 +40,9 @@ survextemplatefile = """; Locn: Totes Gebirge, Austria - Loser/Augst-Eck Plateau
|
||||
*data passage station left right up down ignoreall
|
||||
1 [L] [R] [U] [D] comment
|
||||
|
||||
*end [surveyname]"""
|
||||
|
||||
|
||||
*end [surveyname]"""
|
||||
|
||||
|
||||
def ReplaceTabs(stext):
|
||||
res = [ ]
|
||||
nsl = 0
|
||||
@@ -64,7 +65,7 @@ class SvxForm(forms.Form):
|
||||
datetime = forms.DateTimeField(widget=forms.TextInput(attrs={"readonly":True}))
|
||||
outputtype = forms.CharField(widget=forms.TextInput(attrs={"readonly":True}))
|
||||
code = forms.CharField(widget=forms.Textarea(attrs={"cols":150, "rows":18}))
|
||||
|
||||
|
||||
def GetDiscCode(self):
|
||||
fname = settings.SURVEX_DATA + self.data['filename'] + ".svx"
|
||||
if not os.path.isfile(fname):
|
||||
@@ -74,7 +75,7 @@ class SvxForm(forms.Form):
|
||||
svxtext = ReplaceTabs(svxtext).strip()
|
||||
fin.close()
|
||||
return svxtext
|
||||
|
||||
|
||||
def DiffCode(self, rcode):
|
||||
code = self.GetDiscCode()
|
||||
difftext = difflib.unified_diff(code.splitlines(), rcode.splitlines())
|
||||
@@ -85,14 +86,14 @@ class SvxForm(forms.Form):
|
||||
fname = settings.SURVEX_DATA + self.data['filename'] + ".svx"
|
||||
if not os.path.isfile(fname):
|
||||
# only save if appears valid
|
||||
if re.search(r"\[|\]", rcode):
|
||||
if re.search(r"\[|\]", rcode):
|
||||
return "Error: clean up all []s from the text"
|
||||
mbeginend = re.search(r"(?s)\*begin\s+(\w+).*?\*end\s+(\w+)", rcode)
|
||||
if not mbeginend:
|
||||
return "Error: no begin/end block here"
|
||||
if mbeginend.group(1) != mbeginend.group(2):
|
||||
return "Error: mismatching beginend"
|
||||
|
||||
|
||||
fout = open(fname, "w")
|
||||
res = fout.write(rcode.encode("latin1"))
|
||||
fout.close()
|
||||
@@ -110,28 +111,28 @@ class SvxForm(forms.Form):
|
||||
log = re.sub("(?s).*?(Survey contains)", "\\1", log)
|
||||
return log
|
||||
|
||||
|
||||
@csrf_protect
|
||||
def svx(request, survex_file):
|
||||
# get the basic data from the file given in the URL
|
||||
dirname = os.path.split(survex_file)[0]
|
||||
dirname += "/"
|
||||
nowtime = datetime.datetime.now().strftime('%Y-%m-%d %H:%M:%S')
|
||||
outputtype = "normal"
|
||||
form = SvxForm({'filename':survex_file, 'dirname':dirname, 'datetime':nowtime, 'outputtype':outputtype})
|
||||
|
||||
form = SvxForm({'filename':survex_file, 'dirname':dirname, 'datetime':nowtime, 'outputtype':outputtype})
|
||||
|
||||
# if the form has been returned
|
||||
difflist = [ ]
|
||||
logmessage = ""
|
||||
message = ""
|
||||
|
||||
if request.method == 'POST': # If the form has been submitted...
|
||||
rform = SvxForm(request.POST) #
|
||||
rform = SvxForm(request.POST) #
|
||||
if rform.is_valid(): # All validation rules pass (how do we check it against the filename and users?)
|
||||
rcode = rform.cleaned_data['code']
|
||||
outputtype = rform.cleaned_data['outputtype']
|
||||
difflist = form.DiffCode(rcode)
|
||||
#print "ssss", rform.data
|
||||
|
||||
|
||||
if "revert" in rform.data:
|
||||
pass
|
||||
if "process" in rform.data:
|
||||
@@ -152,20 +153,20 @@ def svx(request, survex_file):
|
||||
form.data['code'] = rcode
|
||||
if "diff" in rform.data:
|
||||
form.data['code'] = rcode
|
||||
|
||||
|
||||
|
||||
|
||||
#process(survex_file)
|
||||
if 'code' not in form.data:
|
||||
if 'code' not in form.data:
|
||||
form.data['code'] = form.GetDiscCode()
|
||||
|
||||
|
||||
if not difflist:
|
||||
difflist.append("none")
|
||||
if message:
|
||||
difflist.insert(0, message)
|
||||
|
||||
|
||||
#print [ form.data['code'] ]
|
||||
svxincludes = re.findall(r'\*include\s+(\S+)(?i)', form.data['code'] or "")
|
||||
|
||||
|
||||
vmap = {'settings': settings,
|
||||
'has_3d': os.path.isfile(settings.SURVEX_DATA + survex_file + ".3d"),
|
||||
'title': survex_file,
|
||||
@@ -173,10 +174,10 @@ def svx(request, survex_file):
|
||||
'difflist': difflist,
|
||||
'logmessage':logmessage,
|
||||
'form':form}
|
||||
vmap.update(csrf(request))
|
||||
# vmap.update(csrf(request))
|
||||
if outputtype == "ajax":
|
||||
return render_to_response('svxfiledifflistonly.html', vmap)
|
||||
return render_to_response('svxfile.html', vmap)
|
||||
return render(request, 'svxfiledifflistonly.html', vmap)
|
||||
return render(request, 'svxfile.html', vmap)
|
||||
|
||||
def svxraw(request, survex_file):
|
||||
svx = open(os.path.join(settings.SURVEX_DATA, survex_file+".svx"), "rb")
|
||||
@@ -200,19 +201,19 @@ def threed(request, survex_file):
|
||||
log = open(settings.SURVEX_DATA + survex_file + ".log", "rb")
|
||||
return HttpResponse(log, content_type="text")
|
||||
|
||||
|
||||
def log(request, survex_file):
|
||||
process(survex_file)
|
||||
log = open(settings.SURVEX_DATA + survex_file + ".log", "rb")
|
||||
return HttpResponse(log, content_type="text")
|
||||
|
||||
|
||||
def err(request, survex_file):
|
||||
process(survex_file)
|
||||
err = open(settings.SURVEX_DATA + survex_file + ".err", "rb")
|
||||
return HttpResponse(err, content_type="text")
|
||||
|
||||
|
||||
|
||||
|
||||
def identifycavedircontents(gcavedir):
|
||||
# find the primary survex file in each cave directory
|
||||
name = os.path.split(gcavedir)[1]
|
||||
@@ -226,13 +227,13 @@ def identifycavedircontents(gcavedir):
|
||||
pass
|
||||
elif name == "115" and (f in ["115cufix.svx", "115fix.svx"]):
|
||||
pass
|
||||
|
||||
|
||||
elif os.path.isdir(os.path.join(gcavedir, f)):
|
||||
if f[0] != ".":
|
||||
subdirs.append(f)
|
||||
elif f[-4:] == ".svx":
|
||||
nf = f[:-4]
|
||||
|
||||
|
||||
if nf.lower() == name.lower() or nf[:3] == "all" or (name, nf) in [("resurvey2005", "145-2005"), ("cucc", "cu115")]:
|
||||
if primesvx:
|
||||
if nf[:3] == "all":
|
||||
@@ -252,38 +253,50 @@ def identifycavedircontents(gcavedir):
|
||||
if primesvx:
|
||||
subsvx.insert(0, primesvx)
|
||||
return subdirs, subsvx
|
||||
|
||||
|
||||
|
||||
|
||||
# direct local non-database browsing through the svx file repositories
|
||||
# perhaps should use the database and have a reload button for it
|
||||
def survexcaveslist(request):
|
||||
cavesdir = os.path.join(settings.SURVEX_DATA, "caves-1623")
|
||||
#cavesdircontents = { }
|
||||
|
||||
kat_areas = settings.KAT_AREAS
|
||||
|
||||
fnumlist = []
|
||||
|
||||
kat_areas = ['1623']
|
||||
|
||||
for area in kat_areas:
|
||||
print(area)
|
||||
cavesdir = os.path.join(settings.SURVEX_DATA, "caves-%s" % area)
|
||||
print(cavesdir)
|
||||
#cavesdircontents = { }
|
||||
fnumlist += [ (-int(re.match(r"\d*", f).group(0) or "0"), f, area) for f in os.listdir(cavesdir) ]
|
||||
print(fnumlist)
|
||||
print(len(fnumlist))
|
||||
|
||||
# first sort the file list
|
||||
fnumlist.sort()
|
||||
|
||||
onefilecaves = [ ]
|
||||
multifilecaves = [ ]
|
||||
subdircaves = [ ]
|
||||
|
||||
# first sort the file list
|
||||
fnumlist = [ (-int(re.match(r"\d*", f).group(0) or "0"), f) for f in os.listdir(cavesdir) ]
|
||||
fnumlist.sort()
|
||||
|
||||
|
||||
print(fnumlist)
|
||||
|
||||
|
||||
# go through the list and identify the contents of each cave directory
|
||||
for num, cavedir in fnumlist:
|
||||
for num, cavedir, area in fnumlist:
|
||||
if cavedir in ["144", "40"]:
|
||||
continue
|
||||
|
||||
|
||||
cavesdir = os.path.join(settings.SURVEX_DATA, "caves-%s" % area)
|
||||
|
||||
gcavedir = os.path.join(cavesdir, cavedir)
|
||||
if os.path.isdir(gcavedir) and cavedir[0] != ".":
|
||||
subdirs, subsvx = identifycavedircontents(gcavedir)
|
||||
survdirobj = [ ]
|
||||
|
||||
|
||||
for lsubsvx in subsvx:
|
||||
survdirobj.append(("caves-1623/"+cavedir+"/"+lsubsvx, lsubsvx))
|
||||
|
||||
survdirobj.append(("caves-" + area + "/"+cavedir+"/"+lsubsvx, lsubsvx))
|
||||
|
||||
# caves with subdirectories
|
||||
if subdirs:
|
||||
subsurvdirs = [ ]
|
||||
@@ -292,10 +305,10 @@ def survexcaveslist(request):
|
||||
assert not dsubdirs
|
||||
lsurvdirobj = [ ]
|
||||
for lsubsvx in dsubsvx:
|
||||
lsurvdirobj.append(("caves-1623/"+cavedir+"/"+subdir+"/"+lsubsvx, lsubsvx))
|
||||
lsurvdirobj.append(("caves-" + area + "/"+cavedir+"/"+subdir+"/"+lsubsvx, lsubsvx))
|
||||
subsurvdirs.append((lsurvdirobj[0], lsurvdirobj[1:]))
|
||||
subdircaves.append((cavedir, (survdirobj[0], survdirobj[1:]), subsurvdirs))
|
||||
|
||||
|
||||
# multifile caves
|
||||
elif len(survdirobj) > 1:
|
||||
multifilecaves.append((survdirobj[0], survdirobj[1:]))
|
||||
@@ -304,24 +317,22 @@ def survexcaveslist(request):
|
||||
#print("survdirobj = ")
|
||||
#print(survdirobj)
|
||||
onefilecaves.append(survdirobj[0])
|
||||
|
||||
return render_to_response('svxfilecavelist.html', {'settings': settings, "onefilecaves":onefilecaves, "multifilecaves":multifilecaves, "subdircaves":subdircaves })
|
||||
|
||||
|
||||
|
||||
|
||||
return render(request, 'svxfilecavelist.html', {"onefilecaves":onefilecaves, "multifilecaves":multifilecaves, "subdircaves":subdircaves })
|
||||
|
||||
|
||||
# parsing all the survex files of a single cave and showing that it's consistent and can find all the files and people
|
||||
# doesn't use recursion. just writes it twice
|
||||
def survexcavesingle(request, survex_cave):
|
||||
breload = False
|
||||
cave = Cave.objects.get(kataster_number=survex_cave)
|
||||
cave = Cave.objects.filter(kataster_number=survex_cave)
|
||||
if len(cave) < 1:
|
||||
cave = Cave.objects.filter(unofficial_number=survex_cave)
|
||||
|
||||
if breload:
|
||||
parsers.survex.ReloadSurvexCave(survex_cave)
|
||||
return render_to_response('svxcavesingle.html', {'settings': settings, "cave":cave })
|
||||
if len(cave) > 0:
|
||||
return render(request, 'svxcavesingle.html', {"cave":cave[0] })
|
||||
else:
|
||||
return render(request, 'svxcavesingle.html', {"cave":cave })
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
@@ -3,6 +3,11 @@ import time
|
||||
import settings
|
||||
os.environ['PYTHONPATH'] = settings.PYTHON_PATH
|
||||
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'settings')
|
||||
|
||||
if __name__ == '__main__':
|
||||
import django
|
||||
django.setup()
|
||||
|
||||
from django.core import management
|
||||
from django.db import connection
|
||||
from django.contrib.auth.models import User
|
||||
@@ -28,7 +33,7 @@ def reload_db():
|
||||
cursor.execute("CREATE DATABASE %s" % databasename)
|
||||
cursor.execute("ALTER DATABASE %s CHARACTER SET=utf8" % databasename)
|
||||
cursor.execute("USE %s" % databasename)
|
||||
management.call_command('syncdb', interactive=False)
|
||||
management.call_command('migrate', interactive=False)
|
||||
user = User.objects.create_user(expouser, expouseremail, expouserpass)
|
||||
user.is_staff = True
|
||||
user.is_superuser = True
|
||||
@@ -56,7 +61,7 @@ def import_logbooks():
|
||||
settings.LOGFILE.write('\nBegun importing logbooks at ' + time.asctime() +'\n'+'-'*60)
|
||||
except:
|
||||
pass
|
||||
|
||||
|
||||
import parsers.logbooks
|
||||
parsers.logbooks.LoadLogbooks()
|
||||
|
||||
@@ -90,14 +95,16 @@ def reset():
|
||||
import_caves()
|
||||
import_people()
|
||||
import_surveyscans()
|
||||
import_survex()
|
||||
|
||||
import_logbooks()
|
||||
import_QMs()
|
||||
|
||||
import_survex()
|
||||
try:
|
||||
import_tunnelfiles()
|
||||
except:
|
||||
print("Tunnel files parser broken.")
|
||||
|
||||
|
||||
import_surveys()
|
||||
|
||||
|
||||
@@ -129,13 +136,13 @@ def dumplogbooks():
|
||||
return pe.person.first_name
|
||||
for lbe in troggle.core.models.LogbookEntry.objects.all():
|
||||
dateStr = lbe.date.strftime("%Y-%m-%d")
|
||||
directory = os.path.join(settings.EXPOWEB,
|
||||
directory = os.path.join(settings.EXPOWEB,
|
||||
"years",
|
||||
lbe.expedition.year,
|
||||
lbe.expedition.year,
|
||||
"autologbook")
|
||||
if not os.path.isdir(directory):
|
||||
os.mkdir(directory)
|
||||
filename = os.path.join(directory,
|
||||
filename = os.path.join(directory,
|
||||
dateStr + "." + slugify(lbe.title)[:50] + ".html")
|
||||
if lbe.cave:
|
||||
print(lbe.cave.reference())
|
||||
@@ -146,7 +153,7 @@ def dumplogbooks():
|
||||
persons = [{"name": get_name(pt.personexpedition), "TU": pt.time_underground, "author": pt.is_logbook_entry_author} for pt in pts]
|
||||
f = open(filename, "wb")
|
||||
template = loader.get_template('dataformat/logbookentry.html')
|
||||
context = Context({'trip': trip,
|
||||
context = Context({'trip': trip,
|
||||
'persons': persons,
|
||||
'date': dateStr,
|
||||
'expeditionyear': lbe.expedition.year})
|
||||
@@ -220,7 +227,7 @@ if __name__ == "__main__":
|
||||
elif "survexpos" in sys.argv:
|
||||
# management.call_command('syncdb', interactive=False) # this sets the path so that import settings works in import_survex
|
||||
import parsers.survex
|
||||
parsers.survex.LoadPos()
|
||||
parsers.survex.LoadPos()
|
||||
elif "logbooks" in sys.argv:
|
||||
# management.call_command('syncdb', interactive=False) # this sets the path so that import settings works in import_survex
|
||||
import_logbooks()
|
||||
@@ -239,7 +246,3 @@ if __name__ == "__main__":
|
||||
else:
|
||||
print("%s not recognised" % sys.argv)
|
||||
usage()
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
@@ -2,15 +2,17 @@ FROM python:2.7-stretch
|
||||
|
||||
#COPY backports.list /etc/apt/sources.list.d/
|
||||
|
||||
RUN apt-get -y update && apt-get install -y mercurial fonts-freefont-ttf locales survex
|
||||
RUN apt-get -y update && apt-get install -y mercurial \
|
||||
fonts-freefont-ttf locales survex python-levenshtein \
|
||||
python-pygraphviz
|
||||
|
||||
#RUN apt-get -y -t -backports install survex
|
||||
|
||||
# Set the locale
|
||||
RUN locale-gen en_GB.UTF-8
|
||||
ENV LANG en_GB.UTF-8
|
||||
ENV LANGUAGE en_GB:en
|
||||
ENV LC_ALL en_GB.UTF-8
|
||||
ENV LANG en_GB.UTF-8
|
||||
ENV LANGUAGE en_GB:en
|
||||
ENV LC_ALL en_GB.UTF-8
|
||||
|
||||
WORKDIR /opt/expo/troggle
|
||||
COPY requirements.txt .
|
||||
|
||||
@@ -1 +1 @@
|
||||
requirements.txt.dj-1.7.11
|
||||
requirements.txt.dj-1.10
|
||||
13
docker/requirements.txt.dj-1.10
Normal file
13
docker/requirements.txt.dj-1.10
Normal file
@@ -0,0 +1,13 @@
|
||||
Django==1.10.8
|
||||
django-registration==2.1.2
|
||||
mysql
|
||||
django-imagekit
|
||||
Image
|
||||
django-tinymce
|
||||
smartencoding
|
||||
fuzzywuzzy
|
||||
GitPython
|
||||
unidecode
|
||||
django-extensions
|
||||
pygraphviz
|
||||
python-Levenshtein
|
||||
@@ -6,3 +6,7 @@ django-imagekit
|
||||
Image
|
||||
django-tinymce==2.7.0
|
||||
smartencoding
|
||||
fuzzywuzzy
|
||||
GitPython
|
||||
unidecode
|
||||
django-extensions
|
||||
|
||||
@@ -33,4 +33,3 @@ def writeQmTable(outfile,cave):
|
||||
cavewriter.writerow(headers)
|
||||
for qm in cave.get_QMs():
|
||||
cavewriter.writerow(qmRow(qm))
|
||||
|
||||
34
flatpages/migrations/0001_initial.py
Normal file
34
flatpages/migrations/0001_initial.py
Normal file
@@ -0,0 +1,34 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
# Generated by Django 1.10.8 on 2020-02-18 16:01
|
||||
from __future__ import unicode_literals
|
||||
|
||||
from django.db import migrations, models
|
||||
import django.db.models.deletion
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
initial = True
|
||||
|
||||
dependencies = [
|
||||
('core', '0001_initial'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.CreateModel(
|
||||
name='EntranceRedirect',
|
||||
fields=[
|
||||
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
|
||||
('originalURL', models.CharField(max_length=200)),
|
||||
('entrance', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='core.Entrance')),
|
||||
],
|
||||
),
|
||||
migrations.CreateModel(
|
||||
name='Redirect',
|
||||
fields=[
|
||||
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
|
||||
('originalURL', models.CharField(max_length=200, unique=True)),
|
||||
('newURL', models.CharField(max_length=200)),
|
||||
],
|
||||
),
|
||||
]
|
||||
0
flatpages/migrations/__init__.py
Normal file
0
flatpages/migrations/__init__.py
Normal file
@@ -33,7 +33,6 @@ def flatpage(request, path):
|
||||
except EntranceRedirect.DoesNotExist:
|
||||
pass
|
||||
|
||||
|
||||
if path.startswith("noinfo") and settings.PUBLIC_SITE and not request.user.is_authenticated():
|
||||
print("flat path noinfo", path)
|
||||
return HttpResponseRedirect(reverse("auth_login") + '?next=%s' % request.path)
|
||||
@@ -48,7 +47,7 @@ def flatpage(request, path):
|
||||
path = path + "index.htm"
|
||||
except IOError:
|
||||
return render(request, 'pagenotfound.html', {'path': path})
|
||||
else:
|
||||
else:
|
||||
try:
|
||||
filetobeopened = os.path.normpath(settings.EXPOWEB + path)
|
||||
o = open(filetobeopened, "rb")
|
||||
@@ -56,7 +55,7 @@ def flatpage(request, path):
|
||||
return render(request, 'pagenotfound.html', {'path': path})
|
||||
if path.endswith(".htm") or path.endswith(".html"):
|
||||
html = o.read()
|
||||
|
||||
|
||||
m = re.search(r"(.*)<\s*head([^>]*)>(.*)<\s*/head\s*>(.*)<\s*body([^>]*)>(.*)<\s*/body\s*>(.*)", html, re.DOTALL + re.IGNORECASE)
|
||||
if m:
|
||||
preheader, headerattrs, head, postheader, bodyattrs, body, postbody = m.groups()
|
||||
@@ -125,7 +124,7 @@ def editflatpage(request, path):
|
||||
return HttpResponse("Page could not be split into header and body")
|
||||
except IOError:
|
||||
filefound = False
|
||||
|
||||
|
||||
|
||||
if request.method == 'POST': # If the form has been submitted...
|
||||
flatpageForm = FlatPageForm(request.POST) # A form bound to the POST data
|
||||
@@ -142,7 +141,7 @@ def editflatpage(request, path):
|
||||
headerargs = ""
|
||||
postheader = ""
|
||||
bodyargs = ""
|
||||
postbody = "</html>"
|
||||
postbody = "</html>"
|
||||
body = flatpageForm.cleaned_data["html"]
|
||||
body = body.replace("\r", "")
|
||||
result = u"%s<head%s>%s</head>%s<body%s>\n%s</body>%s" % (preheader, headerargs, head, postheader, bodyargs, body, postbody)
|
||||
@@ -153,7 +152,7 @@ def editflatpage(request, path):
|
||||
else:
|
||||
if filefound:
|
||||
m = re.search(r"<title>(.*)</title>", head, re.DOTALL + re.IGNORECASE)
|
||||
if m:
|
||||
if m:
|
||||
title, = m.groups()
|
||||
else:
|
||||
title = ""
|
||||
|
||||
@@ -1,13 +0,0 @@
|
||||
"""
|
||||
|
||||
Django ImageKit
|
||||
|
||||
Author: Justin Driscoll <justin.driscoll@gmail.com>
|
||||
Version: 0.2
|
||||
|
||||
"""
|
||||
VERSION = "0.2"
|
||||
|
||||
|
||||
|
||||
|
||||
@@ -1,21 +0,0 @@
|
||||
""" Default ImageKit configuration """
|
||||
|
||||
from imagekit.specs import ImageSpec
|
||||
from imagekit import processors
|
||||
|
||||
class ResizeThumbnail(processors.Resize):
|
||||
width = 100
|
||||
height = 50
|
||||
crop = True
|
||||
|
||||
class EnhanceSmall(processors.Adjustment):
|
||||
contrast = 1.2
|
||||
sharpness = 1.1
|
||||
|
||||
class SampleReflection(processors.Reflection):
|
||||
size = 0.5
|
||||
background_color = "#000000"
|
||||
|
||||
class DjangoAdminThumbnail(ImageSpec):
|
||||
access_as = 'admin_thumbnail'
|
||||
processors = [ResizeThumbnail, EnhanceSmall, SampleReflection]
|
||||
@@ -1,17 +0,0 @@
|
||||
# Required PIL classes may or may not be available from the root namespace
|
||||
# depending on the installation method used.
|
||||
try:
|
||||
import Image
|
||||
import ImageFile
|
||||
import ImageFilter
|
||||
import ImageEnhance
|
||||
import ImageColor
|
||||
except ImportError:
|
||||
try:
|
||||
from PIL import Image
|
||||
from PIL import ImageFile
|
||||
from PIL import ImageFilter
|
||||
from PIL import ImageEnhance
|
||||
from PIL import ImageColor
|
||||
except ImportError:
|
||||
raise ImportError('ImageKit was unable to import the Python Imaging Library. Please confirm it`s installed and available on your current Python path.')
|
||||
@@ -1 +0,0 @@
|
||||
|
||||
@@ -1 +0,0 @@
|
||||
|
||||
@@ -1,38 +0,0 @@
|
||||
from django.db.models.loading import cache
|
||||
from django.core.management.base import BaseCommand, CommandError
|
||||
from optparse import make_option
|
||||
from imagekit.models import ImageModel
|
||||
from imagekit.specs import ImageSpec
|
||||
|
||||
|
||||
class Command(BaseCommand):
|
||||
help = ('Clears all ImageKit cached files.')
|
||||
args = '[apps]'
|
||||
requires_model_validation = True
|
||||
can_import_settings = True
|
||||
|
||||
def handle(self, *args, **options):
|
||||
return flush_cache(args, options)
|
||||
|
||||
def flush_cache(apps, options):
|
||||
""" Clears the image cache
|
||||
|
||||
"""
|
||||
apps = [a.strip(',') for a in apps]
|
||||
if apps:
|
||||
print 'Flushing cache for %s...' % ', '.join(apps)
|
||||
else:
|
||||
print 'Flushing caches...'
|
||||
|
||||
for app_label in apps:
|
||||
app = cache.get_app(app_label)
|
||||
models = [m for m in cache.get_models(app) if issubclass(m, ImageModel)]
|
||||
|
||||
for model in models:
|
||||
for obj in model.objects.all():
|
||||
for spec in model._ik.specs:
|
||||
prop = getattr(obj, spec.name(), None)
|
||||
if prop is not None:
|
||||
prop._delete()
|
||||
if spec.pre_cache:
|
||||
prop._create()
|
||||
@@ -1,136 +0,0 @@
|
||||
import os
|
||||
from datetime import datetime
|
||||
from django.conf import settings
|
||||
from django.core.files.base import ContentFile
|
||||
from django.db import models
|
||||
from django.db.models.base import ModelBase
|
||||
from django.utils.translation import ugettext_lazy as _
|
||||
|
||||
from imagekit import specs
|
||||
from imagekit.lib import *
|
||||
from imagekit.options import Options
|
||||
from imagekit.utils import img_to_fobj
|
||||
|
||||
# Modify image file buffer size.
|
||||
ImageFile.MAXBLOCK = getattr(settings, 'PIL_IMAGEFILE_MAXBLOCK', 256 * 2 ** 10)
|
||||
|
||||
# Choice tuples for specifying the crop origin.
|
||||
# These are provided for convenience.
|
||||
CROP_HORZ_CHOICES = (
|
||||
(0, _('left')),
|
||||
(1, _('center')),
|
||||
(2, _('right')),
|
||||
)
|
||||
|
||||
CROP_VERT_CHOICES = (
|
||||
(0, _('top')),
|
||||
(1, _('center')),
|
||||
(2, _('bottom')),
|
||||
)
|
||||
|
||||
|
||||
class ImageModelBase(ModelBase):
|
||||
""" ImageModel metaclass
|
||||
|
||||
This metaclass parses IKOptions and loads the specified specification
|
||||
module.
|
||||
|
||||
"""
|
||||
def __init__(cls, name, bases, attrs):
|
||||
parents = [b for b in bases if isinstance(b, ImageModelBase)]
|
||||
if not parents:
|
||||
return
|
||||
user_opts = getattr(cls, 'IKOptions', None)
|
||||
opts = Options(user_opts)
|
||||
try:
|
||||
module = __import__(opts.spec_module, {}, {}, [''])
|
||||
except ImportError:
|
||||
raise ImportError('Unable to load imagekit config module: %s' % \
|
||||
opts.spec_module)
|
||||
for spec in [spec for spec in module.__dict__.values() \
|
||||
if isinstance(spec, type) \
|
||||
and issubclass(spec, specs.ImageSpec) \
|
||||
and spec != specs.ImageSpec]:
|
||||
setattr(cls, spec.name(), specs.Descriptor(spec))
|
||||
opts.specs.append(spec)
|
||||
setattr(cls, '_ik', opts)
|
||||
|
||||
|
||||
class ImageModel(models.Model):
|
||||
""" Abstract base class implementing all core ImageKit functionality
|
||||
|
||||
Subclasses of ImageModel are augmented with accessors for each defined
|
||||
image specification and can override the inner IKOptions class to customize
|
||||
storage locations and other options.
|
||||
|
||||
"""
|
||||
__metaclass__ = ImageModelBase
|
||||
|
||||
class Meta:
|
||||
abstract = True
|
||||
|
||||
class IKOptions:
|
||||
pass
|
||||
|
||||
def admin_thumbnail_view(self):
|
||||
if not self._imgfield:
|
||||
return None
|
||||
prop = getattr(self, self._ik.admin_thumbnail_spec, None)
|
||||
if prop is None:
|
||||
return 'An "%s" image spec has not been defined.' % \
|
||||
self._ik.admin_thumbnail_spec
|
||||
else:
|
||||
if hasattr(self, 'get_absolute_url'):
|
||||
return u'<a href="%s"><img src="%s"></a>' % \
|
||||
(self.get_absolute_url(), prop.url)
|
||||
else:
|
||||
return u'<a href="%s"><img src="%s"></a>' % \
|
||||
(self._imgfield.url, prop.url)
|
||||
admin_thumbnail_view.short_description = _('Thumbnail')
|
||||
admin_thumbnail_view.allow_tags = True
|
||||
|
||||
@property
|
||||
def _imgfield(self):
|
||||
return getattr(self, self._ik.image_field)
|
||||
|
||||
def _clear_cache(self):
|
||||
for spec in self._ik.specs:
|
||||
prop = getattr(self, spec.name())
|
||||
prop._delete()
|
||||
|
||||
def _pre_cache(self):
|
||||
for spec in self._ik.specs:
|
||||
if spec.pre_cache:
|
||||
prop = getattr(self, spec.name())
|
||||
prop._create()
|
||||
|
||||
def save(self, clear_cache=True, *args, **kwargs):
|
||||
is_new_object = self._get_pk_val is None
|
||||
super(ImageModel, self).save(*args, **kwargs)
|
||||
if is_new_object:
|
||||
clear_cache = False
|
||||
spec = self._ik.preprocessor_spec
|
||||
if spec is not None:
|
||||
newfile = self._imgfield.storage.open(str(self._imgfield))
|
||||
img = Image.open(newfile)
|
||||
img = spec.process(img, None)
|
||||
format = img.format or 'JPEG'
|
||||
if format != 'JPEG':
|
||||
imgfile = img_to_fobj(img, format)
|
||||
else:
|
||||
imgfile = img_to_fobj(img, format,
|
||||
quality=int(spec.quality),
|
||||
optimize=True)
|
||||
content = ContentFile(imgfile.read())
|
||||
newfile.close()
|
||||
name = str(self._imgfield)
|
||||
self._imgfield.storage.delete(name)
|
||||
self._imgfield.storage.save(name, content)
|
||||
if clear_cache and self._imgfield != '':
|
||||
self._clear_cache()
|
||||
self._pre_cache()
|
||||
|
||||
def delete(self):
|
||||
assert self._get_pk_val() is not None, "%s object can't be deleted because its %s attribute is set to None." % (self._meta.object_name, self._meta.pk.attname)
|
||||
self._clear_cache()
|
||||
models.Model.delete(self)
|
||||
@@ -1,23 +0,0 @@
|
||||
# Imagekit options
|
||||
from imagekit import processors
|
||||
from imagekit.specs import ImageSpec
|
||||
|
||||
|
||||
class Options(object):
|
||||
""" Class handling per-model imagekit options
|
||||
|
||||
"""
|
||||
image_field = 'image'
|
||||
crop_horz_field = 'crop_horz'
|
||||
crop_vert_field = 'crop_vert'
|
||||
preprocessor_spec = None
|
||||
cache_dir = 'cache'
|
||||
save_count_as = None
|
||||
cache_filename_format = "%(filename)s_%(specname)s.%(extension)s"
|
||||
admin_thumbnail_spec = 'admin_thumbnail'
|
||||
spec_module = 'imagekit.defaults'
|
||||
|
||||
def __init__(self, opts):
|
||||
for key, value in opts.__dict__.iteritems():
|
||||
setattr(self, key, value)
|
||||
self.specs = []
|
||||
@@ -1,134 +0,0 @@
|
||||
""" Imagekit Image "ImageProcessors"
|
||||
|
||||
A processor defines a set of class variables (optional) and a
|
||||
class method named "process" which processes the supplied image using
|
||||
the class properties as settings. The process method can be overridden as well allowing user to define their
|
||||
own effects/processes entirely.
|
||||
|
||||
"""
|
||||
from imagekit.lib import *
|
||||
|
||||
class ImageProcessor(object):
|
||||
""" Base image processor class """
|
||||
@classmethod
|
||||
def process(cls, image, obj=None):
|
||||
return image
|
||||
|
||||
|
||||
class Adjustment(ImageProcessor):
|
||||
color = 1.0
|
||||
brightness = 1.0
|
||||
contrast = 1.0
|
||||
sharpness = 1.0
|
||||
|
||||
@classmethod
|
||||
def process(cls, image, obj=None):
|
||||
for name in ['Color', 'Brightness', 'Contrast', 'Sharpness']:
|
||||
factor = getattr(cls, name.lower())
|
||||
if factor != 1.0:
|
||||
image = getattr(ImageEnhance, name)(image).enhance(factor)
|
||||
return image
|
||||
|
||||
|
||||
class Reflection(ImageProcessor):
|
||||
background_color = '#FFFFFF'
|
||||
size = 0.0
|
||||
opacity = 0.6
|
||||
|
||||
@classmethod
|
||||
def process(cls, image, obj=None):
|
||||
# convert bgcolor string to rgb value
|
||||
background_color = ImageColor.getrgb(cls.background_color)
|
||||
# copy orignial image and flip the orientation
|
||||
reflection = image.copy().transpose(Image.FLIP_TOP_BOTTOM)
|
||||
# create a new image filled with the bgcolor the same size
|
||||
background = Image.new("RGB", image.size, background_color)
|
||||
# calculate our alpha mask
|
||||
start = int(255 - (255 * cls.opacity)) # The start of our gradient
|
||||
steps = int(255 * cls.size) # the number of intermedite values
|
||||
increment = (255 - start) / float(steps)
|
||||
mask = Image.new('L', (1, 255))
|
||||
for y in range(255):
|
||||
if y < steps:
|
||||
val = int(y * increment + start)
|
||||
else:
|
||||
val = 255
|
||||
mask.putpixel((0, y), val)
|
||||
alpha_mask = mask.resize(image.size)
|
||||
# merge the reflection onto our background color using the alpha mask
|
||||
reflection = Image.composite(background, reflection, alpha_mask)
|
||||
# crop the reflection
|
||||
reflection_height = int(image.size[1] * cls.size)
|
||||
reflection = reflection.crop((0, 0, image.size[0], reflection_height))
|
||||
# create new image sized to hold both the original image and the reflection
|
||||
composite = Image.new("RGB", (image.size[0], image.size[1]+reflection_height), background_color)
|
||||
# paste the orignal image and the reflection into the composite image
|
||||
composite.paste(image, (0, 0))
|
||||
composite.paste(reflection, (0, image.size[1]))
|
||||
# return the image complete with reflection effect
|
||||
return composite
|
||||
|
||||
|
||||
class Resize(ImageProcessor):
|
||||
width = None
|
||||
height = None
|
||||
crop = False
|
||||
upscale = False
|
||||
|
||||
@classmethod
|
||||
def process(cls, image, obj=None):
|
||||
cur_width, cur_height = image.size
|
||||
if cls.crop:
|
||||
crop_horz = getattr(obj, obj._ik.crop_horz_field, 1)
|
||||
crop_vert = getattr(obj, obj._ik.crop_vert_field, 1)
|
||||
ratio = max(float(cls.width)/cur_width, float(cls.height)/cur_height)
|
||||
resize_x, resize_y = ((cur_width * ratio), (cur_height * ratio))
|
||||
crop_x, crop_y = (abs(cls.width - resize_x), abs(cls.height - resize_y))
|
||||
x_diff, y_diff = (int(crop_x / 2), int(crop_y / 2))
|
||||
box_left, box_right = {
|
||||
0: (0, cls.width),
|
||||
1: (int(x_diff), int(x_diff + cls.width)),
|
||||
2: (int(crop_x), int(resize_x)),
|
||||
}[crop_horz]
|
||||
box_upper, box_lower = {
|
||||
0: (0, cls.height),
|
||||
1: (int(y_diff), int(y_diff + cls.height)),
|
||||
2: (int(crop_y), int(resize_y)),
|
||||
}[crop_vert]
|
||||
box = (box_left, box_upper, box_right, box_lower)
|
||||
image = image.resize((int(resize_x), int(resize_y)), Image.ANTIALIAS).crop(box)
|
||||
else:
|
||||
if not cls.width is None and not cls.height is None:
|
||||
ratio = min(float(cls.width)/cur_width,
|
||||
float(cls.height)/cur_height)
|
||||
else:
|
||||
if cls.width is None:
|
||||
ratio = float(cls.height)/cur_height
|
||||
else:
|
||||
ratio = float(cls.width)/cur_width
|
||||
new_dimensions = (int(round(cur_width*ratio)),
|
||||
int(round(cur_height*ratio)))
|
||||
if new_dimensions[0] > cur_width or \
|
||||
new_dimensions[1] > cur_height:
|
||||
if not cls.upscale:
|
||||
return image
|
||||
image = image.resize(new_dimensions, Image.ANTIALIAS)
|
||||
return image
|
||||
|
||||
|
||||
class Transpose(ImageProcessor):
|
||||
""" Rotates or flips the image
|
||||
|
||||
Method should be one of the following strings:
|
||||
- FLIP_LEFT RIGHT
|
||||
- FLIP_TOP_BOTTOM
|
||||
- ROTATE_90
|
||||
- ROTATE_270
|
||||
- ROTATE_180
|
||||
|
||||
"""
|
||||
method = 'FLIP_LEFT_RIGHT'
|
||||
|
||||
@classmethod
|
||||
def process(cls, image, obj=None):
|
||||
return image.transpose(getattr(Image, cls.method))
|
||||
@@ -1,119 +0,0 @@
|
||||
""" ImageKit image specifications
|
||||
|
||||
All imagekit specifications must inherit from the ImageSpec class. Models
|
||||
inheriting from ImageModel will be modified with a descriptor/accessor for each
|
||||
spec found.
|
||||
|
||||
"""
|
||||
import os
|
||||
from StringIO import StringIO
|
||||
from imagekit.lib import *
|
||||
from imagekit.utils import img_to_fobj
|
||||
from django.core.files.base import ContentFile
|
||||
|
||||
class ImageSpec(object):
|
||||
pre_cache = False
|
||||
quality = 70
|
||||
increment_count = False
|
||||
processors = []
|
||||
|
||||
@classmethod
|
||||
def name(cls):
|
||||
return getattr(cls, 'access_as', cls.__name__.lower())
|
||||
|
||||
@classmethod
|
||||
def process(cls, image, obj):
|
||||
processed_image = image.copy()
|
||||
for proc in cls.processors:
|
||||
processed_image = proc.process(processed_image, obj)
|
||||
return processed_image
|
||||
|
||||
|
||||
class Accessor(object):
|
||||
def __init__(self, obj, spec):
|
||||
self._img = None
|
||||
self._obj = obj
|
||||
self.spec = spec
|
||||
|
||||
def _get_imgfile(self):
|
||||
format = self._img.format or 'JPEG'
|
||||
if format != 'JPEG':
|
||||
imgfile = img_to_fobj(self._img, format)
|
||||
else:
|
||||
imgfile = img_to_fobj(self._img, format,
|
||||
quality=int(self.spec.quality),
|
||||
optimize=True)
|
||||
return imgfile
|
||||
|
||||
def _create(self):
|
||||
if self._exists():
|
||||
return
|
||||
# process the original image file
|
||||
fp = self._obj._imgfield.storage.open(self._obj._imgfield.name)
|
||||
fp.seek(0)
|
||||
fp = StringIO(fp.read())
|
||||
try:
|
||||
self._img = self.spec.process(Image.open(fp), self._obj)
|
||||
# save the new image to the cache
|
||||
content = ContentFile(self._get_imgfile().read())
|
||||
self._obj._imgfield.storage.save(self.name, content)
|
||||
except IOError:
|
||||
pass
|
||||
|
||||
def _delete(self):
|
||||
self._obj._imgfield.storage.delete(self.name)
|
||||
|
||||
def _exists(self):
|
||||
return self._obj._imgfield.storage.exists(self.name)
|
||||
|
||||
def _basename(self):
|
||||
filename, extension = \
|
||||
os.path.splitext(os.path.basename(self._obj._imgfield.name))
|
||||
return self._obj._ik.cache_filename_format % \
|
||||
{'filename': filename,
|
||||
'specname': self.spec.name(),
|
||||
'extension': extension.lstrip('.')}
|
||||
|
||||
@property
|
||||
def name(self):
|
||||
return os.path.join(self._obj._ik.cache_dir, self._basename())
|
||||
|
||||
@property
|
||||
def url(self):
|
||||
self._create()
|
||||
if self.spec.increment_count:
|
||||
fieldname = self._obj._ik.save_count_as
|
||||
if fieldname is not None:
|
||||
current_count = getattr(self._obj, fieldname)
|
||||
setattr(self._obj, fieldname, current_count + 1)
|
||||
self._obj.save(clear_cache=False)
|
||||
return self._obj._imgfield.storage.url(self.name)
|
||||
|
||||
@property
|
||||
def file(self):
|
||||
self._create()
|
||||
return self._obj._imgfield.storage.open(self.name)
|
||||
|
||||
@property
|
||||
def image(self):
|
||||
if self._img is None:
|
||||
self._create()
|
||||
if self._img is None:
|
||||
self._img = Image.open(self.file)
|
||||
return self._img
|
||||
|
||||
@property
|
||||
def width(self):
|
||||
return self.image.size[0]
|
||||
|
||||
@property
|
||||
def height(self):
|
||||
return self.image.size[1]
|
||||
|
||||
|
||||
class Descriptor(object):
|
||||
def __init__(self, spec):
|
||||
self._spec = spec
|
||||
|
||||
def __get__(self, obj, type=None):
|
||||
return Accessor(obj, self._spec)
|
||||
@@ -1,86 +0,0 @@
|
||||
import os
|
||||
import tempfile
|
||||
import unittest
|
||||
from django.conf import settings
|
||||
from django.core.files.base import ContentFile
|
||||
from django.db import models
|
||||
from django.test import TestCase
|
||||
|
||||
from imagekit import processors
|
||||
from imagekit.models import ImageModel
|
||||
from imagekit.specs import ImageSpec
|
||||
from imagekit.lib import Image
|
||||
|
||||
|
||||
class ResizeToWidth(processors.Resize):
|
||||
width = 100
|
||||
|
||||
class ResizeToHeight(processors.Resize):
|
||||
height = 100
|
||||
|
||||
class ResizeToFit(processors.Resize):
|
||||
width = 100
|
||||
height = 100
|
||||
|
||||
class ResizeCropped(ResizeToFit):
|
||||
crop = ('center', 'center')
|
||||
|
||||
class TestResizeToWidth(ImageSpec):
|
||||
access_as = 'to_width'
|
||||
processors = [ResizeToWidth]
|
||||
|
||||
class TestResizeToHeight(ImageSpec):
|
||||
access_as = 'to_height'
|
||||
processors = [ResizeToHeight]
|
||||
|
||||
class TestResizeCropped(ImageSpec):
|
||||
access_as = 'cropped'
|
||||
processors = [ResizeCropped]
|
||||
|
||||
class TestPhoto(ImageModel):
|
||||
""" Minimal ImageModel class for testing """
|
||||
image = models.ImageField(upload_to='images')
|
||||
|
||||
class IKOptions:
|
||||
spec_module = 'imagekit.tests'
|
||||
|
||||
|
||||
class IKTest(TestCase):
|
||||
""" Base TestCase class """
|
||||
def setUp(self):
|
||||
# create a test image using tempfile and PIL
|
||||
self.tmp = tempfile.TemporaryFile()
|
||||
Image.new('RGB', (800, 600)).save(self.tmp, 'JPEG')
|
||||
self.tmp.seek(0)
|
||||
self.p = TestPhoto()
|
||||
self.p.image.save(os.path.basename('test.jpg'),
|
||||
ContentFile(self.tmp.read()))
|
||||
self.p.save()
|
||||
# destroy temp file
|
||||
self.tmp.close()
|
||||
|
||||
def test_setup(self):
|
||||
self.assertEqual(self.p.image.width, 800)
|
||||
self.assertEqual(self.p.image.height, 600)
|
||||
|
||||
def test_to_width(self):
|
||||
self.assertEqual(self.p.to_width.width, 100)
|
||||
self.assertEqual(self.p.to_width.height, 75)
|
||||
|
||||
def test_to_height(self):
|
||||
self.assertEqual(self.p.to_height.width, 133)
|
||||
self.assertEqual(self.p.to_height.height, 100)
|
||||
|
||||
def test_crop(self):
|
||||
self.assertEqual(self.p.cropped.width, 100)
|
||||
self.assertEqual(self.p.cropped.height, 100)
|
||||
|
||||
def test_url(self):
|
||||
tup = (settings.MEDIA_URL, self.p._ik.cache_dir, 'test_to_width.jpg')
|
||||
self.assertEqual(self.p.to_width.url, "%s%s/%s" % tup)
|
||||
|
||||
def tearDown(self):
|
||||
# make sure image file is deleted
|
||||
path = self.p.image.path
|
||||
self.p.delete()
|
||||
self.failIf(os.path.isfile(path))
|
||||
@@ -1,15 +0,0 @@
|
||||
""" ImageKit utility functions """
|
||||
|
||||
import tempfile
|
||||
|
||||
def img_to_fobj(img, format, **kwargs):
|
||||
tmp = tempfile.TemporaryFile()
|
||||
if format != 'JPEG':
|
||||
try:
|
||||
img.save(tmp, format, **kwargs)
|
||||
return
|
||||
except KeyError:
|
||||
pass
|
||||
img.save(tmp, format, **kwargs)
|
||||
tmp.seek(0)
|
||||
return tmp
|
||||
@@ -2,7 +2,7 @@ import sys
|
||||
# This is the local settings for use with the docker compose dev setup. It is imported automatically
|
||||
|
||||
DATABASES = {
|
||||
'default': {
|
||||
'default': {
|
||||
'ENGINE': 'django.db.backends.mysql', # 'postgresql_psycopg2', 'mysql', 'sqlite3' or 'oracle'.
|
||||
'NAME' : 'troggle', # Or path to database file if using sqlite3.
|
||||
'USER' : 'troggleuser', # Not used with sqlite3.
|
||||
@@ -12,6 +12,8 @@ DATABASES = {
|
||||
}
|
||||
}
|
||||
|
||||
ALLOWED_HOSTS = ['*']
|
||||
|
||||
EXPOUSER = 'expo'
|
||||
EXPOUSERPASS = 'somepasshere'
|
||||
EXPOUSER_EMAIL = 'wookey@wookware.org'
|
||||
@@ -55,11 +57,4 @@ JSLIB_URL = URL_ROOT + 'javascript/'
|
||||
TINY_MCE_MEDIA_ROOT = STATIC_ROOT + '/tiny_mce/'
|
||||
TINY_MCE_MEDIA_URL = STATIC_ROOT + '/tiny_mce/'
|
||||
|
||||
TEMPLATE_DIRS = (
|
||||
PYTHON_PATH + "templates",
|
||||
# Put strings here, like "/home/html/django_templates" or "C:/www/django/templates".
|
||||
# Always use forward slashes, even on Windows.
|
||||
# Don't forget to use absolute paths, not relative paths.
|
||||
)
|
||||
|
||||
LOGFILE = PYTHON_PATH + 'troggle_log.txt'
|
||||
|
||||
@@ -15,6 +15,8 @@ DATABASES = {
|
||||
}
|
||||
}
|
||||
|
||||
ALLOWED_HOSTS = ['*']
|
||||
|
||||
REPOS_ROOT_PATH = '/home/expo/'
|
||||
sys.path.append(REPOS_ROOT_PATH)
|
||||
sys.path.append(REPOS_ROOT_PATH + 'troggle')
|
||||
@@ -53,13 +55,6 @@ JSLIB_PATH = '/usr/share/javascript/'
|
||||
TINY_MCE_MEDIA_ROOT = '/usr/share/tinymce/www/'
|
||||
TINY_MCE_MEDIA_URL = URL_ROOT + DIR_ROOT + 'tinymce_media/'
|
||||
|
||||
TEMPLATE_DIRS = (
|
||||
PYTHON_PATH + "templates",
|
||||
# Put strings here, like "/home/html/django_templates" or "C:/www/django/templates".
|
||||
# Always use forward slashes, even on Windows.
|
||||
# Don't forget to use absolute paths, not relative paths.
|
||||
)
|
||||
|
||||
LOGFILE = '/home/expo/troggle/troggle_log.txt'
|
||||
|
||||
FEINCMS_ADMIN_MEDIA='/site_media/feincms/'
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
import sys
|
||||
# This is an example file. Copy it to localsettings.py, set the
|
||||
# password and _don't_ check that file back to the repo as it exposes
|
||||
# password and _don't_ check that file back to the repo as it exposes
|
||||
# your/our password to the world!
|
||||
|
||||
DATABASES = {
|
||||
@@ -14,6 +14,8 @@ DATABASES = {
|
||||
}
|
||||
}
|
||||
|
||||
ALLOWED_HOSTS = ['*']
|
||||
|
||||
EXPOUSER = 'expo'
|
||||
EXPOUSERPASS = 'realpasshere'
|
||||
EXPOUSER_EMAIL = 'wookey@wookware.org'
|
||||
@@ -55,13 +57,6 @@ JSLIB_URL = URL_ROOT + 'javascript/'
|
||||
TINY_MCE_MEDIA_ROOT = STATIC_ROOT + '/tiny_mce/'
|
||||
TINY_MCE_MEDIA_URL = STATIC_ROOT + '/tiny_mce/'
|
||||
|
||||
TEMPLATE_DIRS = (
|
||||
PYTHON_PATH + "templates",
|
||||
# Put strings here, like "/home/html/django_templates" or "C:/www/django/templates".
|
||||
# Always use forward slashes, even on Windows.
|
||||
# Don't forget to use absolute paths, not relative paths.
|
||||
)
|
||||
|
||||
LOGFILE = '/home/expo/troggle/troggle_log.txt'
|
||||
|
||||
FEINCMS_ADMIN_MEDIA='/site_media/feincms/'
|
||||
|
||||
@@ -2,7 +2,7 @@ import sys
|
||||
# link localsettings to this file for use on expo computer in austria
|
||||
|
||||
DATABASES = {
|
||||
'default': {
|
||||
'default': {
|
||||
'ENGINE': 'django.db.backends.mysql', # 'postgresql_psycopg2', 'mysql', 'sqlite3' or 'oracle'.
|
||||
'NAME' : 'troggle', # Or path to database file if using sqlite3.
|
||||
'USER' : 'expo', # Not used with sqlite3.
|
||||
@@ -12,6 +12,8 @@ DATABASES = {
|
||||
}
|
||||
}
|
||||
|
||||
ALLOWED_HOSTS = ['*']
|
||||
|
||||
EXPOUSER = 'expo'
|
||||
EXPOUSERPASS = 'realpasshere'
|
||||
EXPOUSER_EMAIL = 'wookey@wookware.org'
|
||||
@@ -57,11 +59,4 @@ JSLIB_URL = URL_ROOT + 'javascript/'
|
||||
TINY_MCE_MEDIA_ROOT = '/usr/share/tinymce/www/'
|
||||
TINY_MCE_MEDIA_URL = URL_ROOT + DIR_ROOT + '/tinymce_media/'
|
||||
|
||||
TEMPLATE_DIRS = (
|
||||
PYTHON_PATH + "templates",
|
||||
# Put strings here, like "/home/html/django_templates" or "C:/www/django/templates".
|
||||
# Always use forward slashes, even on Windows.
|
||||
# Don't forget to use absolute paths, not relative paths.
|
||||
)
|
||||
|
||||
LOGFILE = PYTHON_PATH + 'troggle_log.txt'
|
||||
|
||||
@@ -9,6 +9,8 @@ DATABASES = {
|
||||
}
|
||||
}
|
||||
|
||||
ALLOWED_HOSTS = ['*']
|
||||
|
||||
EXPOUSER = 'expo'
|
||||
EXPOUSERPASS = 'realpasshere'
|
||||
EXPOUSER_EMAIL = 'wookey@wookware.org'
|
||||
@@ -30,7 +32,7 @@ URL_ROOT = 'http://127.0.0.1:8000'
|
||||
DIR_ROOT = ''#this should end in / if a value is given
|
||||
PUBLIC_SITE = False
|
||||
|
||||
TINY_MCE_MEDIA_ROOT = '/usr/share/tinymce/www/'
|
||||
TINY_MCE_MEDIA_ROOT = '/usr/share/tinymce/www/'
|
||||
TINY_MCE_MEDIA_URL = URL_ROOT + DIR_ROOT + 'tinymce_media/'
|
||||
|
||||
PYTHON_PATH = 'C:\\expoweb\\troggle\\'
|
||||
@@ -56,14 +58,3 @@ EMAIL_USE_TLS = True
|
||||
# URL that handles the media served from MEDIA_ROOT. Make sure to use a
|
||||
# trailing slash if there is a path component (optional in other cases).
|
||||
# Examples: "http://media.lawrence.com", "http://example.com/media/"
|
||||
|
||||
|
||||
|
||||
|
||||
TEMPLATE_DIRS = (
|
||||
"C:/Expo/expoweb/troggle/templates",
|
||||
|
||||
# Put strings here, like "/home/html/django_templates" or "C:/www/django/templates".
|
||||
# Always use forward slashes, even on Windows.
|
||||
# Don't forget to use absolute paths, not relative paths.
|
||||
)
|
||||
|
||||
@@ -29,12 +29,12 @@
|
||||
}
|
||||
|
||||
function redirectSurvey(){
|
||||
window.location = "{{ settings.URL_ROOT }}/survey/" + document.getElementById("expeditionChooser").value + "%23" + document.getElementById("surveyChooser").value;
|
||||
window.location = "{{ URL_ROOT }}/survey/" + document.getElementById("expeditionChooser").value + "%23" + document.getElementById("surveyChooser").value;
|
||||
document.getElementById("progressTableContent").style.display='hidden'
|
||||
}
|
||||
|
||||
function redirectYear(){
|
||||
window.location = "{{ settings.URL_ROOT }}/survey/" + document.getElementById("expeditionChooser").value + "%23"
|
||||
window.location = "{{ URL_ROOT }}/survey/" + document.getElementById("expeditionChooser").value + "%23"
|
||||
}
|
||||
|
||||
|
||||
|
||||
@@ -46,4 +46,4 @@ def _resolves(url):
|
||||
return True
|
||||
except http.Http404:
|
||||
return False
|
||||
|
||||
|
||||
|
||||
@@ -30,7 +30,7 @@ def parseCaveQMs(cave,inputFile):
|
||||
kh=Cave.objects.get(official_name="Kaninchenhöhle")
|
||||
except Cave.DoesNotExist:
|
||||
print("KH is not in the database. Please run parsers.cavetab first.")
|
||||
parse_KH_QMs(kh, inputFile=inputFile)
|
||||
parse_KH_QMs(kh, inputFile=inputFile)
|
||||
return
|
||||
|
||||
qmPath = settings.EXPOWEB+inputFile
|
||||
@@ -46,7 +46,7 @@ def parseCaveQMs(cave,inputFile):
|
||||
if cave=='stein':
|
||||
placeholder, hadToCreate = LogbookEntry.objects.get_or_create(date__year=year, title="placeholder for QMs in 204", text="QMs temporarily attached to this should be re-attached to their actual trips", defaults={"date": date(year, 1, 1),"cave":steinBr})
|
||||
elif cave=='hauch':
|
||||
placeholder, hadToCreate = LogbookEntry.objects.get_or_create(date__year=year, title="placeholder for QMs in 234", text="QMs temporarily attached to this should be re-attached to their actual trips", defaults={"date": date(year, 1, 1),"cave":hauchHl})
|
||||
placeholder, hadToCreate = LogbookEntry.objects.get_or_create(date__year=year, title="placeholder for QMs in 234", text="QMs temporarily attached to this should be re-attached to their actual trips", defaults={"date": date(year, 1, 1),"cave":hauchHl})
|
||||
if hadToCreate:
|
||||
print(cave + " placeholder logbook entry for " + str(year) + " added to database")
|
||||
QMnum=re.match(r".*?-\d*?-X?(?P<numb>\d*)",line[0]).group("numb")
|
||||
@@ -59,7 +59,7 @@ def parseCaveQMs(cave,inputFile):
|
||||
newQM.grade=line[1]
|
||||
newQM.area=line[2]
|
||||
newQM.location_description=line[3]
|
||||
|
||||
|
||||
newQM.completion_description=line[4]
|
||||
newQM.nearest_station_description=line[5]
|
||||
if newQM.completion_description: # Troggle checks if QMs are completed by checking if they have a ticked_off_by trip. In the table, completion is indicated by the presence of a completion discription.
|
||||
@@ -74,11 +74,11 @@ def parseCaveQMs(cave,inputFile):
|
||||
print("overwriting " + str(preexistingQM) +"\r")
|
||||
else: # otherwise, print that it was ignored
|
||||
print("preserving " + str(preexistingQM) + ", which was edited in admin \r")
|
||||
|
||||
|
||||
except QM.DoesNotExist: #if there is no pre-existing QM, save the new one
|
||||
newQM.save()
|
||||
newQM.save()
|
||||
print("QM "+str(newQM) + ' added to database\r')
|
||||
|
||||
|
||||
except KeyError: #check on this one
|
||||
continue
|
||||
except IndexError:
|
||||
@@ -106,9 +106,9 @@ def parse_KH_QMs(kh, inputFile):
|
||||
'nearest_station_name':res['nearest_station'],
|
||||
'location_description':res['description']
|
||||
}
|
||||
|
||||
|
||||
save_carefully(QM,lookupArgs,nonLookupArgs)
|
||||
|
||||
|
||||
|
||||
parseCaveQMs(cave='stein',inputFile=r"1623/204/qm.csv")
|
||||
parseCaveQMs(cave='hauch',inputFile=r"1623/234/qm.csv")
|
||||
|
||||
@@ -155,7 +155,7 @@ def readcave(filename):
|
||||
message = "Can't find text (slug): %s, skipping %s" % (slug, context)
|
||||
models.DataIssue.objects.create(parser='caves', message=message)
|
||||
print(message)
|
||||
|
||||
|
||||
primary = False
|
||||
for entrance in entrances:
|
||||
slug = getXML(entrance, "entranceslug", maxItems = 1, context = context)[0]
|
||||
@@ -167,7 +167,7 @@ def readcave(filename):
|
||||
message = "Entrance text (slug) %s missing %s" % (slug, context)
|
||||
models.DataIssue.objects.create(parser='caves', message=message)
|
||||
print(message)
|
||||
|
||||
|
||||
|
||||
def getXML(text, itemname, minItems = 1, maxItems = None, printwarnings = True, context = ""):
|
||||
items = re.findall("<%(itemname)s>(.*?)</%(itemname)s>" % {"itemname": itemname}, text, re.S)
|
||||
@@ -177,7 +177,7 @@ def getXML(text, itemname, minItems = 1, maxItems = None, printwarnings = True,
|
||||
"min": minItems} + context
|
||||
models.DataIssue.objects.create(parser='caves', message=message)
|
||||
print(message)
|
||||
|
||||
|
||||
if maxItems is not None and len(items) > maxItems and printwarnings:
|
||||
message = "%(count)i %(itemname)s found, no more than %(max)i expected" % {"count": len(items),
|
||||
"itemname": itemname,
|
||||
|
||||
@@ -14,10 +14,11 @@ import csv
|
||||
import re
|
||||
import datetime
|
||||
import os
|
||||
from fuzzywuzzy import fuzz
|
||||
|
||||
from utils import save_carefully
|
||||
|
||||
#
|
||||
#
|
||||
# When we edit logbook entries, allow a "?" after any piece of data to say we've frigged it and
|
||||
# it can be checked up later from the hard-copy if necessary; or it's not possible to determin (name, trip place, etc)
|
||||
#
|
||||
@@ -31,6 +32,7 @@ def GetTripPersons(trippeople, expedition, logtime_underground):
|
||||
round_bracket_regex = re.compile(r"[\(\[].*?[\)\]]")
|
||||
for tripperson in re.split(r",|\+|&|&(?!\w+;)| and ", trippeople):
|
||||
tripperson = tripperson.strip()
|
||||
tripperson = tripperson.strip('.')
|
||||
mul = re.match(r"<u>(.*?)</u>$(?i)", tripperson)
|
||||
if mul:
|
||||
tripperson = mul.group(1).strip()
|
||||
@@ -42,6 +44,15 @@ def GetTripPersons(trippeople, expedition, logtime_underground):
|
||||
print(" - No name match for: '%s'" % tripperson)
|
||||
message = "No name match for: '%s' in year '%s'" % (tripperson, expedition.year)
|
||||
models.DataIssue.objects.create(parser='logbooks', message=message)
|
||||
print(' - Lets try something fuzzy')
|
||||
fuzzy_matches = {}
|
||||
for person in GetPersonExpeditionNameLookup(expedition):
|
||||
fuzz_num = fuzz.ratio(tripperson.lower(), person)
|
||||
if fuzz_num > 50:
|
||||
#print(" - %s -> %s = %d" % (tripperson.lower(), person, fuzz_num))
|
||||
fuzzy_matches[person] = fuzz_num
|
||||
for i in sorted(fuzzy_matches.items(), key = lambda kv:(kv[1]), reverse=True):
|
||||
print(' - %s -> %s' % (i[0], i[1]))
|
||||
res.append((personyear, logtime_underground))
|
||||
if mul:
|
||||
author = personyear
|
||||
@@ -100,7 +111,7 @@ def EnterLogIntoDbase(date, place, title, text, trippeople, expedition, logtime_
|
||||
lookupAttribs={'date':date, 'title':title}
|
||||
nonLookupAttribs={'place':place, 'text':text, 'expedition':expedition, 'cave':cave, 'slug':slugify(title)[:50], 'entry_type':entry_type}
|
||||
lbo, created=save_carefully(models.LogbookEntry, lookupAttribs, nonLookupAttribs)
|
||||
|
||||
|
||||
for tripperson, time_underground in trippersons:
|
||||
lookupAttribs={'personexpedition':tripperson, 'logbook_entry':lbo}
|
||||
nonLookupAttribs={'time_underground':time_underground, 'is_logbook_entry_author':(tripperson == author)}
|
||||
@@ -121,7 +132,7 @@ def ParseDate(tripdate, year):
|
||||
day, month, year = int(mdategoof.group(1)), int(mdategoof.group(2)), int(mdategoof.group(4)) + yadd
|
||||
else:
|
||||
assert False, tripdate
|
||||
return datetime.date(year, month, day)
|
||||
return make_aware(datetime.datetime(year, month, day), get_current_timezone())
|
||||
|
||||
# 2006, 2008 - 2010
|
||||
def Parselogwikitxt(year, expedition, txt):
|
||||
@@ -156,7 +167,7 @@ def Parseloghtmltxt(year, expedition, txt):
|
||||
for trippara in tripparas:
|
||||
#print(" - HR detected - maybe a trip?")
|
||||
logbook_entry_count += 1
|
||||
|
||||
|
||||
s = re.match(r'''(?x)(?:\s*<div\sclass="tripdate"\sid=".*?">.*?</div>\s*<p>)? # second date
|
||||
\s*(?:<a\s+id="(.*?)"\s*/>\s*</a>)?
|
||||
\s*<div\s+class="tripdate"\s*(?:id="(.*?)")?>(.*?)</div>(?:<p>)?
|
||||
@@ -173,9 +184,6 @@ def Parseloghtmltxt(year, expedition, txt):
|
||||
continue
|
||||
tripid, tripid1, tripdate, trippeople, triptitle, triptext, tu = s.groups()
|
||||
ldate = ParseDate(tripdate.strip(), year)
|
||||
#assert tripid[:-1] == "t" + tripdate, (tripid, tripdate)
|
||||
#trippeople = re.sub(r"Ol(?!l)", "Olly", trippeople)
|
||||
#trippeople = re.sub(r"Wook(?!e)", "Wookey", trippeople)
|
||||
triptitles = triptitle.split(" - ")
|
||||
if len(triptitles) >= 2:
|
||||
tripcave = triptitles[0]
|
||||
@@ -208,7 +216,7 @@ def Parseloghtml01(year, expedition, txt):
|
||||
|
||||
tripdate, triptitle, trippeople = tripheader.split("|")
|
||||
ldate = ParseDate(tripdate.strip(), year)
|
||||
|
||||
|
||||
mtu = re.search(r'<p[^>]*>(T/?U.*)', triptext)
|
||||
if mtu:
|
||||
tu = mtu.group(1)
|
||||
@@ -220,7 +228,7 @@ def Parseloghtml01(year, expedition, txt):
|
||||
tripcave = triptitles[0].strip()
|
||||
|
||||
ltriptext = triptext
|
||||
|
||||
|
||||
mtail = re.search(r'(?:<a href="[^"]*">[^<]*</a>|\s|/|-|&|</?p>|\((?:same day|\d+)\))*$', ltriptext)
|
||||
if mtail:
|
||||
#print mtail.group(0)
|
||||
@@ -232,7 +240,6 @@ def Parseloghtml01(year, expedition, txt):
|
||||
ltriptext = re.sub(r"</?u>", "_", ltriptext)
|
||||
ltriptext = re.sub(r"</?i>", "''", ltriptext)
|
||||
ltriptext = re.sub(r"</?b>", "'''", ltriptext)
|
||||
|
||||
|
||||
#print ldate, trippeople.strip()
|
||||
# could includ the tripid (url link for cross referencing)
|
||||
@@ -293,7 +300,7 @@ def SetDatesFromLogbookEntries(expedition):
|
||||
|
||||
def LoadLogbookForExpedition(expedition):
|
||||
""" Parses all logbook entries for one expedition """
|
||||
|
||||
|
||||
expowebbase = os.path.join(settings.EXPOWEB, "years")
|
||||
yearlinks = settings.LOGBOOK_PARSER_SETTINGS
|
||||
|
||||
@@ -336,7 +343,7 @@ def LoadLogbooks():
|
||||
expos = models.Expedition.objects.all()
|
||||
for expo in expos:
|
||||
print("\nLoading Logbook for: " + expo.year)
|
||||
|
||||
|
||||
# Load logbook for expo
|
||||
LoadLogbookForExpedition(expo)
|
||||
|
||||
@@ -370,17 +377,17 @@ def parseAutoLogBookEntry(filename):
|
||||
expedition = models.Expedition.objects.get(year = expeditionYearMatch.groups()[0])
|
||||
personExpeditionNameLookup = GetPersonExpeditionNameLookup(expedition)
|
||||
except models.Expedition.DoesNotExist:
|
||||
errors.append("Expedition not in database")
|
||||
errors.append("Expedition not in database")
|
||||
else:
|
||||
errors.append("Expediton Year could not be parsed")
|
||||
errors.append("Expediton Year could not be parsed")
|
||||
|
||||
titleMatch = titleRegex.search(contents)
|
||||
if titleMatch:
|
||||
title, = titleMatch.groups()
|
||||
if len(title) > settings.MAX_LOGBOOK_ENTRY_TITLE_LENGTH:
|
||||
errors.append("Title too long")
|
||||
errors.append("Title too long")
|
||||
else:
|
||||
errors.append("Title could not be found")
|
||||
errors.append("Title could not be found")
|
||||
|
||||
caveMatch = caveRegex.search(contents)
|
||||
if caveMatch:
|
||||
@@ -389,24 +396,24 @@ def parseAutoLogBookEntry(filename):
|
||||
cave = models.getCaveByReference(caveRef)
|
||||
except AssertionError:
|
||||
cave = None
|
||||
errors.append("Cave not found in database")
|
||||
errors.append("Cave not found in database")
|
||||
else:
|
||||
cave = None
|
||||
|
||||
locationMatch = locationRegex.search(contents)
|
||||
if locationMatch:
|
||||
location, = locationMatch.groups()
|
||||
location, = locationMatch.groups()
|
||||
else:
|
||||
location = None
|
||||
|
||||
|
||||
if cave is None and location is None:
|
||||
errors.append("Location nor cave could not be found")
|
||||
errors.append("Location nor cave could not be found")
|
||||
|
||||
reportMatch = reportRegex.search(contents)
|
||||
if reportMatch:
|
||||
report, = reportMatch.groups()
|
||||
else:
|
||||
errors.append("Contents could not be found")
|
||||
errors.append("Contents could not be found")
|
||||
if errors:
|
||||
return errors # Easiest to bail out at this point as we need to make sure that we know which expedition to look for people from.
|
||||
people = []
|
||||
@@ -421,7 +428,7 @@ def parseAutoLogBookEntry(filename):
|
||||
author = bool(author)
|
||||
else:
|
||||
errors.append("Persons name could not be found")
|
||||
|
||||
|
||||
TUMatch = TURegex.search(contents)
|
||||
if TUMatch:
|
||||
TU, = TUMatch.groups()
|
||||
@@ -431,15 +438,15 @@ def parseAutoLogBookEntry(filename):
|
||||
people.append((name, author, TU))
|
||||
if errors:
|
||||
return errors # Bail out before commiting to the database
|
||||
logbookEntry = models.LogbookEntry(date = date,
|
||||
logbookEntry = models.LogbookEntry(date = date,
|
||||
expedition = expedition,
|
||||
title = title, cave = cave, place = location,
|
||||
title = title, cave = cave, place = location,
|
||||
text = report, slug = slugify(title)[:50],
|
||||
filename = filename)
|
||||
logbookEntry.save()
|
||||
for name, author, TU in people:
|
||||
models.PersonTrip(personexpedition = personExpo,
|
||||
time_underground = TU,
|
||||
logbook_entry = logbookEntry,
|
||||
models.PersonTrip(personexpedition = personExpo,
|
||||
time_underground = TU,
|
||||
logbook_entry = logbookEntry,
|
||||
is_logbook_entry_author = author).save()
|
||||
print(logbookEntry)
|
||||
|
||||
@@ -12,22 +12,22 @@ def saveMugShot(mugShotPath, mugShotFilename, person):
|
||||
mugShotFilename=mugShotFilename[2:]
|
||||
else:
|
||||
mugShotFilename=mugShotFilename # just in case one doesn't
|
||||
|
||||
|
||||
dummyObj=models.DPhoto(file=mugShotFilename)
|
||||
|
||||
|
||||
#Put a copy of the file in the right place. mugShotObj.file.path is determined by the django filesystemstorage specified in models.py
|
||||
if not os.path.exists(dummyObj.file.path):
|
||||
shutil.copy(mugShotPath, dummyObj.file.path)
|
||||
|
||||
|
||||
mugShotObj, created = save_carefully(
|
||||
models.DPhoto,
|
||||
lookupAttribs={'is_mugshot':True, 'file':mugShotFilename},
|
||||
nonLookupAttribs={'caption':"Mugshot for "+person.first_name+" "+person.last_name}
|
||||
)
|
||||
|
||||
|
||||
if created:
|
||||
mugShotObj.contains_person.add(person)
|
||||
mugShotObj.save()
|
||||
mugShotObj.save()
|
||||
|
||||
def parseMugShotAndBlurb(personline, header, person):
|
||||
"""create mugshot Photo instance"""
|
||||
@@ -45,20 +45,20 @@ def parseMugShotAndBlurb(personline, header, person):
|
||||
person.save()
|
||||
|
||||
def LoadPersonsExpos():
|
||||
|
||||
|
||||
persontab = open(os.path.join(settings.EXPOWEB, "folk", "folk.csv"))
|
||||
personreader = csv.reader(persontab)
|
||||
headers = personreader.next()
|
||||
header = dict(zip(headers, range(len(headers))))
|
||||
|
||||
|
||||
# make expeditions
|
||||
print("Loading expeditions")
|
||||
years = headers[5:]
|
||||
|
||||
|
||||
for year in years:
|
||||
lookupAttribs = {'year':year}
|
||||
nonLookupAttribs = {'name':"CUCC expo %s" % year}
|
||||
|
||||
|
||||
save_carefully(models.Expedition, lookupAttribs, nonLookupAttribs)
|
||||
|
||||
# make persons
|
||||
@@ -87,11 +87,11 @@ def LoadPersonsExpos():
|
||||
lastname = ""
|
||||
|
||||
lookupAttribs={'first_name':firstname, 'last_name':(lastname or "")}
|
||||
nonLookupAttribs={'is_vfho':personline[header["VfHO member"]], 'fullname':fullname}
|
||||
nonLookupAttribs={'is_vfho':bool(personline[header["VfHO member"]]), 'fullname':fullname}
|
||||
person, created = save_carefully(models.Person, lookupAttribs, nonLookupAttribs)
|
||||
|
||||
parseMugShotAndBlurb(personline=personline, header=header, person=person)
|
||||
|
||||
|
||||
# make person expedition from table
|
||||
for year, attended in zip(headers, personline)[5:]:
|
||||
expedition = models.Expedition.objects.get(year=year)
|
||||
@@ -100,26 +100,6 @@ def LoadPersonsExpos():
|
||||
nonLookupAttribs = {'nickname':nickname, 'is_guest':(personline[header["Guest"]] == "1")}
|
||||
save_carefully(models.PersonExpedition, lookupAttribs, nonLookupAttribs)
|
||||
|
||||
|
||||
# this fills in those people for whom 2008 was their first expo
|
||||
#print "Loading personexpeditions 2008"
|
||||
#expoers2008 = """Edvin Deadman,Kathryn Hopkins,Djuke Veldhuis,Becka Lawson,Julian Todd,Natalie Uomini,Aaron Curtis,Tony Rooke,Ollie Stevens,Frank Tully,Martin Jahnke,Mark Shinwell,Jess Stirrups,Nial Peters,Serena Povia,Olly Madge,Steve Jones,Pete Harley,Eeva Makiranta,Keith Curtis""".split(",")
|
||||
#expomissing = set(expoers2008)
|
||||
#for name in expomissing:
|
||||
# firstname, lastname = name.split()
|
||||
# is_guest = name in ["Eeva Makiranta", "Keith Curtis"]
|
||||
# print "2008:", name
|
||||
# persons = list(models.Person.objects.filter(first_name=firstname, last_name=lastname))
|
||||
# if not persons:
|
||||
# person = models.Person(first_name=firstname, last_name = lastname, is_vfho = False, mug_shot = "")
|
||||
# #person.Sethref()
|
||||
# person.save()
|
||||
# else:
|
||||
# person = persons[0]
|
||||
# expedition = models.Expedition.objects.get(year="2008")
|
||||
# personexpedition = models.PersonExpedition(person=person, expedition=expedition, nickname="", is_guest=is_guest)
|
||||
# personexpedition.save()
|
||||
|
||||
# used in other referencing parser functions
|
||||
# expedition name lookup cached for speed (it's a very big list)
|
||||
Gpersonexpeditionnamelookup = { }
|
||||
@@ -128,10 +108,10 @@ def GetPersonExpeditionNameLookup(expedition):
|
||||
res = Gpersonexpeditionnamelookup.get(expedition.name)
|
||||
if res:
|
||||
return res
|
||||
|
||||
|
||||
res = { }
|
||||
duplicates = set()
|
||||
|
||||
|
||||
print("Calculating GetPersonExpeditionNameLookup for " + expedition.year)
|
||||
personexpeditions = models.PersonExpedition.objects.filter(expedition=expedition)
|
||||
htmlparser = HTMLParser()
|
||||
@@ -159,16 +139,16 @@ def GetPersonExpeditionNameLookup(expedition):
|
||||
possnames.append(personexpedition.nickname.lower() + " " + l[0])
|
||||
if str(personexpedition.nickname.lower() + l[0]) not in possnames:
|
||||
possnames.append(personexpedition.nickname.lower() + l[0])
|
||||
|
||||
|
||||
for possname in possnames:
|
||||
if possname in res:
|
||||
duplicates.add(possname)
|
||||
else:
|
||||
res[possname] = personexpedition
|
||||
|
||||
|
||||
for possname in duplicates:
|
||||
del res[possname]
|
||||
|
||||
|
||||
Gpersonexpeditionnamelookup[expedition.name] = res
|
||||
return res
|
||||
|
||||
|
||||
@@ -1,5 +1,7 @@
|
||||
'''
|
||||
This module is the part of troggle that parses descriptions of cave parts (subcaves) from the legacy html files and saves them in the troggle database as instances of the model Subcave. Unfortunately, this parser can not be very flexible because the legacy format is poorly structured.
|
||||
This module is the part of troggle that parses descriptions of cave parts (subcaves) from the legacy html
|
||||
files and saves them in the troggle database as instances of the model Subcave.
|
||||
Unfortunately, this parser can not be very flexible because the legacy format is poorly structured.
|
||||
'''
|
||||
|
||||
import sys, os
|
||||
@@ -29,12 +31,12 @@ def importSubcaves(cave):
|
||||
link[0])
|
||||
subcaveFile=open(subcaveFilePath,'r')
|
||||
description=subcaveFile.read().decode('iso-8859-1').encode('utf-8')
|
||||
|
||||
|
||||
lookupAttribs={'title':link[1], 'cave':cave}
|
||||
nonLookupAttribs={'description':description}
|
||||
newSubcave=save_carefully(Subcave,lookupAttribs=lookupAttribs,nonLookupAttribs=nonLookupAttribs)
|
||||
|
||||
logging.info("Added " + unicode(newSubcave) + " to " + unicode(cave))
|
||||
logging.info("Added " + unicode(newSubcave) + " to " + unicode(cave))
|
||||
except IOError:
|
||||
logging.info("Subcave import couldn't open "+subcaveFilePath)
|
||||
|
||||
|
||||
@@ -5,20 +5,26 @@ import troggle.settings as settings
|
||||
from subprocess import call, Popen, PIPE
|
||||
|
||||
from troggle.parsers.people import GetPersonExpeditionNameLookup
|
||||
from django.utils.timezone import get_current_timezone
|
||||
from django.utils.timezone import make_aware
|
||||
|
||||
import re
|
||||
import os
|
||||
from datetime import datetime
|
||||
|
||||
line_leg_regex = re.compile(r"[\d\-+.]+$")
|
||||
|
||||
def LoadSurvexLineLeg(survexblock, stardata, sline, comment):
|
||||
def LoadSurvexLineLeg(survexblock, stardata, sline, comment, cave):
|
||||
# The try catches here need replacing as they are relativly expensive
|
||||
ls = sline.lower().split()
|
||||
ssfrom = survexblock.MakeSurvexStation(ls[stardata["from"]])
|
||||
ssto = survexblock.MakeSurvexStation(ls[stardata["to"]])
|
||||
|
||||
|
||||
survexleg = models.SurvexLeg(block=survexblock, stationfrom=ssfrom, stationto=ssto)
|
||||
if stardata["type"] == "normal":
|
||||
try:
|
||||
survexleg.tape = float(ls[stardata["tape"]])
|
||||
except ValueError:
|
||||
except ValueError:
|
||||
print("Tape misread in", survexblock.survexfile.path)
|
||||
print("Stardata:", stardata)
|
||||
print("Line:", ls)
|
||||
@@ -53,14 +59,17 @@ def LoadSurvexLineLeg(survexblock, stardata, sline, comment):
|
||||
survexleg.compass = 1000
|
||||
survexleg.clino = -90.0
|
||||
else:
|
||||
assert re.match(r"[\d\-+.]+$", lcompass), ls
|
||||
assert re.match(r"[\d\-+.]+$", lclino) and lclino != "-", ls
|
||||
assert line_leg_regex.match(lcompass), ls
|
||||
assert line_leg_regex.match(lclino) and lclino != "-", ls
|
||||
survexleg.compass = float(lcompass)
|
||||
survexleg.clino = float(lclino)
|
||||
|
||||
|
||||
if cave:
|
||||
survexleg.cave = cave
|
||||
|
||||
# only save proper legs
|
||||
survexleg.save()
|
||||
|
||||
|
||||
itape = stardata.get("tape")
|
||||
if itape:
|
||||
try:
|
||||
@@ -80,96 +89,212 @@ def LoadSurvexEquate(survexblock, sline):
|
||||
|
||||
def LoadSurvexLinePassage(survexblock, stardata, sline, comment):
|
||||
pass
|
||||
|
||||
|
||||
stardatadefault = {"type":"normal", "t":"leg", "from":0, "to":1, "tape":2, "compass":3, "clino":4}
|
||||
stardataparamconvert = {"length":"tape", "bearing":"compass", "gradient":"clino"}
|
||||
|
||||
regex_comment = re.compile(r"([^;]*?)\s*(?:;\s*(.*))?\n?$")
|
||||
regex_ref = re.compile(r'.*?ref.*?(\d+)\s*#\s*(\d+)')
|
||||
regex_star = re.compile(r'\s*\*[\s,]*(\w+)\s*(.*?)\s*(?:;.*)?$')
|
||||
regex_team = re.compile(r"(Insts|Notes|Tape|Dog|Useless|Pics|Helper|Disto|Consultant)\s+(.*)$(?i)")
|
||||
regex_team_member = re.compile(r" and | / |, | & | \+ |^both$|^none$(?i)")
|
||||
regex_qm = re.compile(r'^\s*QM(\d)\s+?([a-dA-DxX])\s+([\w\-]+)\.(\d+)\s+(([\w\-]+)\.(\d+)|\-)\s+(.+)$')
|
||||
|
||||
def RecursiveLoad(survexblock, survexfile, fin, textlines):
|
||||
iblankbegins = 0
|
||||
text = [ ]
|
||||
stardata = stardatadefault
|
||||
teammembers = [ ]
|
||||
|
||||
# uncomment to print out all files during parsing
|
||||
print("Reading file: " + survexblock.survexfile.path)
|
||||
while True:
|
||||
svxline = fin.readline().decode("latin1")
|
||||
if not svxline:
|
||||
return
|
||||
textlines.append(svxline)
|
||||
|
||||
# uncomment to print out all files during parsing
|
||||
print(" - Reading file: " + survexblock.survexfile.path)
|
||||
stamp = datetime.now()
|
||||
lineno = 0
|
||||
|
||||
# Try to find the cave in the DB if not use the string as before
|
||||
path_match = re.search(r"caves-(\d\d\d\d)/(\d+|\d\d\d\d-?\w+-\d+)/", survexblock.survexfile.path)
|
||||
if path_match:
|
||||
pos_cave = '%s-%s' % (path_match.group(1), path_match.group(2))
|
||||
# print('Match')
|
||||
# print(pos_cave)
|
||||
cave = models.getCaveByReference(pos_cave)
|
||||
if cave:
|
||||
survexfile.cave = cave
|
||||
svxlines = ''
|
||||
svxlines = fin.read().splitlines()
|
||||
# print('Cave - preloop ' + str(survexfile.cave))
|
||||
# print(survexblock)
|
||||
for svxline in svxlines:
|
||||
|
||||
# print(survexblock)
|
||||
|
||||
# print(svxline)
|
||||
# if not svxline:
|
||||
# print(' - Not survex')
|
||||
# return
|
||||
# textlines.append(svxline)
|
||||
|
||||
lineno += 1
|
||||
|
||||
# print(' - Line: %d' % lineno)
|
||||
|
||||
# break the line at the comment
|
||||
sline, comment = re.match(r"([^;]*?)\s*(?:;\s*(.*))?\n?$", svxline.strip()).groups()
|
||||
|
||||
sline, comment = regex_comment.match(svxline.strip()).groups()
|
||||
# detect ref line pointing to the scans directory
|
||||
mref = comment and re.match(r'.*?ref.*?(\d+)\s*#\s*(\d+)', comment)
|
||||
mref = comment and regex_ref.match(comment)
|
||||
if mref:
|
||||
refscan = "%s#%s" % (mref.group(1), mref.group(2))
|
||||
survexscansfolders = models.SurvexScansFolder.objects.filter(walletname=refscan)
|
||||
if survexscansfolders:
|
||||
survexblock.survexscansfolder = survexscansfolders[0]
|
||||
#survexblock.refscandir = "%s/%s%%23%s" % (mref.group(1), mref.group(1), mref.group(2))
|
||||
survexblock.save()
|
||||
survexblock.save()
|
||||
continue
|
||||
|
||||
|
||||
# This whole section should be moved if we can have *QM become a proper survex command
|
||||
# Spec of QM in SVX files, currently commented out need to add to survex
|
||||
# needs to match regex_qm
|
||||
# ;Serial number grade(A/B/C/D/X) nearest-station resolution-station description
|
||||
# ;QM1 a hobnob_hallway_2.42 hobnob-hallway_3.42 junction of keyhole passage
|
||||
# ;QM1 a hobnob_hallway_2.42 - junction of keyhole passage
|
||||
qmline = comment and regex_qm.match(comment)
|
||||
if qmline:
|
||||
print(qmline.groups())
|
||||
#(u'1', u'B', u'miraclemaze', u'1.17', u'-', None, u'\tcontinuation of rift')
|
||||
qm_no = qmline.group(1)
|
||||
qm_grade = qmline.group(2)
|
||||
qm_from_section = qmline.group(3)
|
||||
qm_from_station = qmline.group(4)
|
||||
qm_resolve_section = qmline.group(6)
|
||||
qm_resolve_station = qmline.group(7)
|
||||
qm_notes = qmline.group(8)
|
||||
|
||||
print('Cave - %s' % survexfile.cave)
|
||||
print('QM no %d' % int(qm_no))
|
||||
print('QM grade %s' % qm_grade)
|
||||
print('QM section %s' % qm_from_section)
|
||||
print('QM station %s' % qm_from_station)
|
||||
print('QM res section %s' % qm_resolve_section)
|
||||
print('QM res station %s' % qm_resolve_station)
|
||||
print('QM notes %s' % qm_notes)
|
||||
|
||||
# If the QM isn't resolved (has a resolving station) then load it
|
||||
if not qm_resolve_section or qm_resolve_section is not '-' or qm_resolve_section is not 'None':
|
||||
from_section = models.SurvexBlock.objects.filter(name=qm_from_section)
|
||||
# If we can find a section (survex note chunck, named)
|
||||
if len(from_section) > 0:
|
||||
print(from_section[0])
|
||||
from_station = models.SurvexStation.objects.filter(block=from_section[0], name=qm_from_station)
|
||||
# If we can find a from station then we have the nearest station and can import it
|
||||
if len(from_station) > 0:
|
||||
print(from_station[0])
|
||||
qm = models.QM.objects.create(number=qm_no,
|
||||
nearest_station=from_station[0],
|
||||
grade=qm_grade.upper(),
|
||||
location_description=qm_notes)
|
||||
else:
|
||||
print('QM found but resolved')
|
||||
|
||||
#print('Cave -sline ' + str(cave))
|
||||
if not sline:
|
||||
continue
|
||||
|
||||
|
||||
# detect the star command
|
||||
mstar = re.match(r'\s*\*[\s,]*(\w+)\s*(.*?)\s*(?:;.*)?$', sline)
|
||||
mstar = regex_star.match(sline)
|
||||
if not mstar:
|
||||
if "from" in stardata:
|
||||
LoadSurvexLineLeg(survexblock, stardata, sline, comment)
|
||||
# print('Cave ' + str(survexfile.cave))
|
||||
# print(survexblock)
|
||||
LoadSurvexLineLeg(survexblock, stardata, sline, comment, survexfile.cave)
|
||||
# print(' - From: ')
|
||||
#print(stardata)
|
||||
pass
|
||||
elif stardata["type"] == "passage":
|
||||
LoadSurvexLinePassage(survexblock, stardata, sline, comment)
|
||||
# print(' - Passage: ')
|
||||
#Missing "station" in stardata.
|
||||
continue
|
||||
|
||||
|
||||
# detect the star command
|
||||
cmd, line = mstar.groups()
|
||||
cmd = cmd.lower()
|
||||
if re.match("include$(?i)", cmd):
|
||||
includepath = os.path.join(os.path.split(survexfile.path)[0], re.sub(r"\.svx$", "", line))
|
||||
includesurvexfile = models.SurvexFile(path=includepath, cave=survexfile.cave)
|
||||
print(' - Include file found including - ' + includepath)
|
||||
# Try to find the cave in the DB if not use the string as before
|
||||
path_match = re.search(r"caves-(\d\d\d\d)/(\d+|\d\d\d\d-?\w+-\d+)/", includepath)
|
||||
if path_match:
|
||||
pos_cave = '%s-%s' % (path_match.group(1), path_match.group(2))
|
||||
# print(pos_cave)
|
||||
cave = models.getCaveByReference(pos_cave)
|
||||
if cave:
|
||||
survexfile.cave = cave
|
||||
else:
|
||||
print('No match for %s' % includepath)
|
||||
includesurvexfile = models.SurvexFile(path=includepath)
|
||||
includesurvexfile.save()
|
||||
includesurvexfile.SetDirectory()
|
||||
if includesurvexfile.exists():
|
||||
survexblock.save()
|
||||
fininclude = includesurvexfile.OpenFile()
|
||||
RecursiveLoad(survexblock, includesurvexfile, fininclude, textlines)
|
||||
|
||||
|
||||
elif re.match("begin$(?i)", cmd):
|
||||
if line:
|
||||
if line:
|
||||
newsvxpath = os.path.join(os.path.split(survexfile.path)[0], re.sub(r"\.svx$", "", line))
|
||||
# Try to find the cave in the DB if not use the string as before
|
||||
path_match = re.search(r"caves-(\d\d\d\d)/(\d+|\d\d\d\d-?\w+-\d+)/", newsvxpath)
|
||||
if path_match:
|
||||
pos_cave = '%s-%s' % (path_match.group(1), path_match.group(2))
|
||||
print(pos_cave)
|
||||
cave = models.getCaveByReference(pos_cave)
|
||||
if cave:
|
||||
survexfile.cave = cave
|
||||
else:
|
||||
print('No match for %s' % newsvxpath)
|
||||
|
||||
name = line.lower()
|
||||
survexblockdown = models.SurvexBlock(name=name, begin_char=fin.tell(), parent=survexblock, survexpath=survexblock.survexpath+"."+name, cave=survexblock.cave, survexfile=survexfile, totalleglength=0.0)
|
||||
print(' - Begin found for: ' + name)
|
||||
# print('Block cave: ' + str(survexfile.cave))
|
||||
survexblockdown = models.SurvexBlock(name=name, begin_char=fin.tell(), parent=survexblock, survexpath=survexblock.survexpath+"."+name, cave=survexfile.cave, survexfile=survexfile, totalleglength=0.0)
|
||||
survexblockdown.save()
|
||||
survexblock.save()
|
||||
survexblock = survexblockdown
|
||||
# print(survexblockdown)
|
||||
textlinesdown = [ ]
|
||||
RecursiveLoad(survexblockdown, survexfile, fin, textlinesdown)
|
||||
else:
|
||||
iblankbegins += 1
|
||||
|
||||
|
||||
elif re.match("end$(?i)", cmd):
|
||||
if iblankbegins:
|
||||
iblankbegins -= 1
|
||||
else:
|
||||
survexblock.text = "".join(textlines)
|
||||
survexblock.save()
|
||||
# print(' - End found: ')
|
||||
endstamp = datetime.now()
|
||||
timetaken = endstamp - stamp
|
||||
# print(' - Time to process: ' + str(timetaken))
|
||||
return
|
||||
|
||||
|
||||
elif re.match("date$(?i)", cmd):
|
||||
if len(line) == 10:
|
||||
survexblock.date = re.sub(r"\.", "-", line)
|
||||
#print(' - Date found: ' + line)
|
||||
survexblock.date = make_aware(datetime.strptime(re.sub(r"\.", "-", line), '%Y-%m-%d'), get_current_timezone())
|
||||
expeditions = models.Expedition.objects.filter(year=line[:4])
|
||||
if expeditions:
|
||||
assert len(expeditions) == 1
|
||||
survexblock.expedition = expeditions[0]
|
||||
survexblock.expeditionday = survexblock.expedition.get_expedition_day(survexblock.date)
|
||||
survexblock.save()
|
||||
|
||||
|
||||
elif re.match("team$(?i)", cmd):
|
||||
mteammember = re.match(r"(Insts|Notes|Tape|Dog|Useless|Pics|Helper|Disto|Consultant)\s+(.*)$(?i)", line)
|
||||
pass
|
||||
# print(' - Team found: ')
|
||||
mteammember = regex_team.match(line)
|
||||
if mteammember:
|
||||
for tm in re.split(r" and | / |, | & | \+ |^both$|^none$(?i)", mteammember.group(2)):
|
||||
for tm in regex_team_member.split(mteammember.group(2)):
|
||||
if tm:
|
||||
personexpedition = survexblock.expedition and GetPersonExpeditionNameLookup(survexblock.expedition).get(tm.lower())
|
||||
if (personexpedition, tm) not in teammembers:
|
||||
@@ -179,18 +304,23 @@ def RecursiveLoad(survexblock, survexfile, fin, textlines):
|
||||
if personexpedition:
|
||||
personrole.person=personexpedition.person
|
||||
personrole.save()
|
||||
|
||||
|
||||
elif cmd == "title":
|
||||
survextitle = models.SurvexTitle(survexblock=survexblock, title=line.strip('"'), cave=survexblock.cave)
|
||||
#print(' - Title found: ')
|
||||
survextitle = models.SurvexTitle(survexblock=survexblock, title=line.strip('"'), cave=survexfile.cave)
|
||||
survextitle.save()
|
||||
|
||||
pass
|
||||
|
||||
elif cmd == "require":
|
||||
# should we check survex version available for processing?
|
||||
pass
|
||||
|
||||
elif cmd == "data":
|
||||
#print(' - Data found: ')
|
||||
ls = line.lower().split()
|
||||
stardata = { "type":ls[0] }
|
||||
#print(' - Star data: ', stardata)
|
||||
#print(ls)
|
||||
for i in range(0, len(ls)):
|
||||
stardata[stardataparamconvert.get(ls[i], ls[i])] = i - 1
|
||||
if ls[0] in ["normal", "cartesian", "nosurvey"]:
|
||||
@@ -199,40 +329,23 @@ def RecursiveLoad(survexblock, survexfile, fin, textlines):
|
||||
stardata = stardatadefault
|
||||
else:
|
||||
assert ls[0] == "passage", line
|
||||
|
||||
|
||||
elif cmd == "equate":
|
||||
#print(' - Equate found: ')
|
||||
LoadSurvexEquate(survexblock, line)
|
||||
|
||||
elif cmd == "fix":
|
||||
#print(' - Fix found: ')
|
||||
survexblock.MakeSurvexStation(line.split()[0])
|
||||
|
||||
else:
|
||||
#print(' - Stuff')
|
||||
if cmd not in ["sd", "include", "units", "entrance", "data", "flags", "title", "export", "instrument",
|
||||
"calibrate", "set", "infer", "alias", "ref", "cs", "declination", "case"]:
|
||||
print("Unrecognised command in line:", cmd, line, survexblock, survexblock.survexfile.path)
|
||||
|
||||
|
||||
def ReloadSurvexCave(survex_cave, area):
|
||||
print(survex_cave, area)
|
||||
cave = models.Cave.objects.get(kataster_number=survex_cave, area__short_name=area)
|
||||
print(cave)
|
||||
#cave = models.Cave.objects.get(kataster_number=survex_cave)
|
||||
cave.survexblock_set.all().delete()
|
||||
cave.survexfile_set.all().delete()
|
||||
cave.survexdirectory_set.all().delete()
|
||||
|
||||
survexfile = models.SurvexFile(path="caves-" + cave.kat_area() + "/" + survex_cave + "/" + survex_cave, cave=cave)
|
||||
survexfile.save()
|
||||
survexfile.SetDirectory()
|
||||
|
||||
survexblockroot = models.SurvexBlock(name="root", survexpath="caves-" + cave.kat_area(), begin_char=0, cave=cave, survexfile=survexfile, totalleglength=0.0)
|
||||
survexblockroot.save()
|
||||
fin = survexfile.OpenFile()
|
||||
textlines = [ ]
|
||||
RecursiveLoad(survexblockroot, survexfile, fin, textlines)
|
||||
survexblockroot.text = "".join(textlines)
|
||||
survexblockroot.save()
|
||||
|
||||
endstamp = datetime.now()
|
||||
timetaken = endstamp - stamp
|
||||
# print(' - Time to process: ' + str(timetaken))
|
||||
|
||||
def LoadAllSurvexBlocks():
|
||||
|
||||
@@ -258,22 +371,13 @@ def LoadAllSurvexBlocks():
|
||||
survexblockroot.save()
|
||||
fin = survexfile.OpenFile()
|
||||
textlines = [ ]
|
||||
# The real work starts here
|
||||
RecursiveLoad(survexblockroot, survexfile, fin, textlines)
|
||||
fin.close()
|
||||
survexblockroot.text = "".join(textlines)
|
||||
survexblockroot.save()
|
||||
|
||||
|
||||
#Load each cave,
|
||||
#FIXME this should be dealt with load all above
|
||||
print(" - Reloading all caves")
|
||||
caves = models.Cave.objects.all()
|
||||
for cave in caves:
|
||||
if cave.kataster_number and os.path.isdir(os.path.join(settings.SURVEX_DATA, "caves-" + cave.kat_area(), cave.kataster_number)):
|
||||
if cave.kataster_number not in ['40']:
|
||||
print("loading", cave, cave.kat_area())
|
||||
ReloadSurvexCave(cave.kataster_number, cave.kat_area())
|
||||
|
||||
|
||||
poslineregex = re.compile(r"^\(\s*([+-]?\d*\.\d*),\s*([+-]?\d*\.\d*),\s*([+-]?\d*\.\d*)\s*\)\s*([^\s]+)$")
|
||||
|
||||
|
||||
@@ -286,7 +390,7 @@ def LoadPos():
|
||||
posfile = open("%s%s.pos" % (settings.SURVEX_DATA, settings.SURVEX_TOPNAME))
|
||||
posfile.readline() #Drop header
|
||||
for line in posfile.readlines():
|
||||
r = poslineregex.match(line)
|
||||
r = poslineregex.match(line)
|
||||
if r:
|
||||
x, y, z, name = r.groups()
|
||||
try:
|
||||
|
||||
@@ -1,12 +1,7 @@
|
||||
import sys, os, types, logging, stat
|
||||
#sys.path.append('C:\\Expo\\expoweb')
|
||||
#from troggle import *
|
||||
#os.environ['DJANGO_SETTINGS_MODULE']='troggle.settings'
|
||||
import settings
|
||||
from troggle.core.models import *
|
||||
from PIL import Image
|
||||
#import settings
|
||||
#import core.models as models
|
||||
import csv
|
||||
import re
|
||||
import datetime
|
||||
@@ -29,7 +24,7 @@ def readSurveysFromCSV():
|
||||
try: # could probably combine these two
|
||||
surveytab = open(os.path.join(settings.SURVEY_SCANS, "Surveys.csv"))
|
||||
except IOError:
|
||||
import cStringIO, urllib
|
||||
import cStringIO, urllib
|
||||
surveytab = cStringIO.StringIO(urllib.urlopen(settings.SURVEY_SCANS + "/Surveys.csv").read())
|
||||
dialect=csv.Sniffer().sniff(surveytab.read())
|
||||
surveytab.seek(0,0)
|
||||
@@ -42,24 +37,21 @@ def readSurveysFromCSV():
|
||||
print("There are no expeditions in the database. Please run the logbook parser.")
|
||||
sys.exit()
|
||||
|
||||
|
||||
|
||||
logging.info("Deleting all scanned images")
|
||||
ScannedImage.objects.all().delete()
|
||||
|
||||
|
||||
|
||||
logging.info("Deleting all survey objects")
|
||||
Survey.objects.all().delete()
|
||||
|
||||
|
||||
|
||||
logging.info("Beginning to import surveys from "+str(os.path.join(settings.SURVEYS, "Surveys.csv"))+"\n"+"-"*60+"\n")
|
||||
|
||||
|
||||
for survey in surveyreader:
|
||||
#I hate this, but some surveys have a letter eg 2000#34a. The next line deals with that.
|
||||
walletNumberLetter = re.match(r'(?P<number>\d*)(?P<letter>[a-zA-Z]*)',survey[header['Survey Number']])
|
||||
# print(walletNumberLetter.groups())
|
||||
# I hate this, but some surveys have a letter eg 2000#34a. The next line deals with that.
|
||||
walletNumberLetter = re.match(r'(?P<number>\d*)(?P<letter>[a-zA-Z]*)',survey[header['Survey Number']])
|
||||
# print(walletNumberLetter.groups())
|
||||
year=survey[header['Year']]
|
||||
|
||||
|
||||
surveyobj = Survey(
|
||||
expedition = Expedition.objects.filter(year=year)[0],
|
||||
wallet_number = walletNumberLetter.group('number'),
|
||||
@@ -73,7 +65,6 @@ def readSurveysFromCSV():
|
||||
pass
|
||||
surveyobj.save()
|
||||
|
||||
|
||||
logging.info("added survey " + survey[header['Year']] + "#" + surveyobj.wallet_number + "\r")
|
||||
|
||||
# dead
|
||||
@@ -141,14 +132,14 @@ def parseSurveyScans(expedition, logfile=None):
|
||||
yearPath=os.path.join(settings.SURVEY_SCANS, "surveyscans", expedition.year)
|
||||
print("No folder found for " + expedition.year + " at:- " + yearPath)
|
||||
|
||||
# dead
|
||||
|
||||
def parseSurveys(logfile=None):
|
||||
try:
|
||||
readSurveysFromCSV()
|
||||
except (IOError, OSError):
|
||||
print("Survey CSV not found..")
|
||||
pass
|
||||
|
||||
|
||||
for expedition in Expedition.objects.filter(year__gte=2000): #expos since 2000, because paths and filenames were nonstandard before then
|
||||
parseSurveyScans(expedition)
|
||||
|
||||
@@ -178,21 +169,21 @@ def GetListDir(sdir):
|
||||
|
||||
def LoadListScansFile(survexscansfolder):
|
||||
gld = [ ]
|
||||
|
||||
|
||||
# flatten out any directories in these book files
|
||||
for (fyf, ffyf, fisdiryf) in GetListDir(survexscansfolder.fpath):
|
||||
if fisdiryf:
|
||||
gld.extend(GetListDir(ffyf))
|
||||
else:
|
||||
gld.append((fyf, ffyf, fisdiryf))
|
||||
|
||||
|
||||
for (fyf, ffyf, fisdiryf) in gld:
|
||||
#assert not fisdiryf, ffyf
|
||||
if re.search(r"\.(?:png|jpg|jpeg)(?i)$", fyf):
|
||||
survexscansingle = SurvexScanSingle(ffile=ffyf, name=fyf, survexscansfolder=survexscansfolder)
|
||||
survexscansingle.save()
|
||||
|
||||
|
||||
|
||||
# this iterates through the scans directories (either here or on the remote server)
|
||||
# and builds up the models we can access later
|
||||
def LoadListScans():
|
||||
@@ -203,17 +194,17 @@ def LoadListScans():
|
||||
SurvexScansFolder.objects.all().delete()
|
||||
|
||||
# first do the smkhs (large kh survey scans) directory
|
||||
survexscansfoldersmkhs = SurvexScansFolder(fpath=os.path.join(settings.SURVEY_SCANS, "smkhs"), walletname="smkhs")
|
||||
survexscansfoldersmkhs = SurvexScansFolder(fpath=os.path.join(settings.SURVEY_SCANS, "smkhs"), walletname="smkhs")
|
||||
if os.path.isdir(survexscansfoldersmkhs.fpath):
|
||||
survexscansfoldersmkhs.save()
|
||||
LoadListScansFile(survexscansfoldersmkhs)
|
||||
|
||||
|
||||
|
||||
|
||||
# iterate into the surveyscans directory
|
||||
for f, ff, fisdir in GetListDir(os.path.join(settings.SURVEY_SCANS, "surveyscans")):
|
||||
if not fisdir:
|
||||
continue
|
||||
|
||||
|
||||
# do the year folders
|
||||
if re.match(r"\d\d\d\d$", f):
|
||||
for fy, ffy, fisdiry in GetListDir(ff):
|
||||
@@ -222,13 +213,13 @@ def LoadListScans():
|
||||
survexscansfolder = SurvexScansFolder(fpath=ffy, walletname=fy)
|
||||
survexscansfolder.save()
|
||||
LoadListScansFile(survexscansfolder)
|
||||
|
||||
# do the
|
||||
|
||||
# do the
|
||||
elif f != "thumbs":
|
||||
survexscansfolder = SurvexScansFolder(fpath=ff, walletname=f)
|
||||
survexscansfolder.save()
|
||||
LoadListScansFile(survexscansfolder)
|
||||
|
||||
|
||||
|
||||
def FindTunnelScan(tunnelfile, path):
|
||||
scansfolder, scansfile = None, None
|
||||
@@ -244,12 +235,12 @@ def FindTunnelScan(tunnelfile, path):
|
||||
print(scansfilel, len(scansfilel))
|
||||
assert len(scansfilel) == 1
|
||||
scansfile = scansfilel[0]
|
||||
|
||||
|
||||
if scansfolder:
|
||||
tunnelfile.survexscansfolders.add(scansfolder)
|
||||
if scansfile:
|
||||
tunnelfile.survexscans.add(scansfile)
|
||||
|
||||
|
||||
elif path and not re.search(r"\.(?:png|jpg|jpeg)$(?i)", path):
|
||||
name = os.path.split(path)[1]
|
||||
print("ttt", tunnelfile.tunnelpath, path, name)
|
||||
@@ -269,21 +260,22 @@ def SetTunnelfileInfo(tunnelfile):
|
||||
fin = open(ff)
|
||||
ttext = fin.read()
|
||||
fin.close()
|
||||
|
||||
|
||||
mtype = re.search("<(fontcolours|sketch)", ttext)
|
||||
assert mtype, ff
|
||||
tunnelfile.bfontcolours = (mtype.group(1)=="fontcolours")
|
||||
#assert mtype, ff
|
||||
if mtype:
|
||||
tunnelfile.bfontcolours = (mtype.group(1)=="fontcolours")
|
||||
tunnelfile.npaths = len(re.findall("<skpath", ttext))
|
||||
tunnelfile.save()
|
||||
|
||||
|
||||
# <tunnelxml tunnelversion="version2009-06-21 Matienzo" tunnelproject="ireby" tunneluser="goatchurch" tunneldate="2009-06-29 23:22:17">
|
||||
# <pcarea area_signal="frame" sfscaledown="12.282584" sfrotatedeg="-90.76982" sfxtrans="11.676667377221136" sfytrans="-15.677173422877454" sfsketch="204description/scans/plan(38).png" sfstyle="" nodeconnzsetrelative="0.0">
|
||||
for path, style in re.findall('<pcarea area_signal="frame".*?sfsketch="([^"]*)" sfstyle="([^"]*)"', ttext):
|
||||
FindTunnelScan(tunnelfile, path)
|
||||
|
||||
|
||||
# should also scan and look for survex blocks that might have been included
|
||||
# and also survex titles as well.
|
||||
|
||||
# and also survex titles as well.
|
||||
|
||||
tunnelfile.save()
|
||||
|
||||
|
||||
@@ -303,6 +295,6 @@ def LoadTunnelFiles():
|
||||
elif f[-4:] == ".xml":
|
||||
tunnelfile = TunnelFile(tunnelpath=lf, tunnelname=os.path.split(f[:-4])[1])
|
||||
tunnelfile.save()
|
||||
|
||||
|
||||
for tunnelfile in TunnelFile.objects.all():
|
||||
SetTunnelfileInfo(tunnelfile)
|
||||
|
||||
@@ -27,7 +27,7 @@ from django.conf.urls import *
|
||||
from profiles import views
|
||||
|
||||
|
||||
urlpatterns = patterns('',
|
||||
urlpatterns = [
|
||||
url(r'^select/$',
|
||||
views.select_profile,
|
||||
name='profiles_select_profile'),
|
||||
@@ -43,4 +43,4 @@ urlpatterns = patterns('',
|
||||
url(r'^$',
|
||||
views.profile_list,
|
||||
name='profiles_profile_list'),
|
||||
)
|
||||
]
|
||||
|
||||
@@ -14,8 +14,7 @@ try:
|
||||
except ImportError: # django >= 1.7
|
||||
SiteProfileNotAvailable = type('SiteProfileNotAvailable', (Exception,), {})
|
||||
|
||||
from django.db.models import get_model
|
||||
|
||||
from django.apps import apps
|
||||
|
||||
def get_profile_model():
|
||||
"""
|
||||
@@ -28,7 +27,7 @@ def get_profile_model():
|
||||
if (not hasattr(settings, 'AUTH_PROFILE_MODULE')) or \
|
||||
(not settings.AUTH_PROFILE_MODULE):
|
||||
raise SiteProfileNotAvailable
|
||||
profile_mod = get_model(*settings.AUTH_PROFILE_MODULE.split('.'))
|
||||
profile_mod = apps.get_model(*settings.AUTH_PROFILE_MODULE.split('.'))
|
||||
if profile_mod is None:
|
||||
raise SiteProfileNotAvailable
|
||||
return profile_mod
|
||||
|
||||
84
settings.py
84
settings.py
@@ -8,7 +8,6 @@ BASE_DIR = os.path.dirname(os.path.dirname(__file__))
|
||||
|
||||
# Django settings for troggle project.
|
||||
DEBUG = True
|
||||
TEMPLATE_DEBUG = DEBUG
|
||||
|
||||
ALLOWED_HOSTS = [u'expo.survex.com']
|
||||
|
||||
@@ -56,6 +55,8 @@ SVX_URL = urlparse.urljoin(URL_ROOT , '/survex/')
|
||||
# top-level survex file basename (without .svx)
|
||||
SURVEX_TOPNAME = "1623"
|
||||
|
||||
KAT_AREAS = ['1623', '1624', '1626', '1627']
|
||||
|
||||
DEFAULT_LOGBOOK_PARSER = "Parseloghtmltxt"
|
||||
DEFAULT_LOGBOOK_FILE = "logbook.html"
|
||||
|
||||
@@ -64,30 +65,30 @@ LOGBOOK_PARSER_SETTINGS = {
|
||||
"2017": ("2017/logbook.html", "Parseloghtmltxt"),
|
||||
"2016": ("2016/logbook.html", "Parseloghtmltxt"),
|
||||
"2015": ("2015/logbook.html", "Parseloghtmltxt"),
|
||||
"2014": ("2014/logbook.html", "Parseloghtmltxt"),
|
||||
"2013": ("2013/logbook.html", "Parseloghtmltxt"),
|
||||
"2012": ("2012/logbook.html", "Parseloghtmltxt"),
|
||||
"2011": ("2011/logbook.html", "Parseloghtmltxt"),
|
||||
"2010": ("2010/logbook.html", "Parselogwikitxt"),
|
||||
"2009": ("2009/2009logbook.txt", "Parselogwikitxt"),
|
||||
"2008": ("2008/2008logbook.txt", "Parselogwikitxt"),
|
||||
"2007": ("2007/logbook.html", "Parseloghtmltxt"),
|
||||
"2006": ("2006/logbook/logbook_06.txt", "Parselogwikitxt"),
|
||||
"2005": ("2005/logbook.html", "Parseloghtmltxt"),
|
||||
"2004": ("2004/logbook.html", "Parseloghtmltxt"),
|
||||
"2003": ("2003/logbook.html", "Parseloghtml03"),
|
||||
"2002": ("2002/logbook.html", "Parseloghtmltxt"),
|
||||
"2001": ("2001/log.htm", "Parseloghtml01"),
|
||||
"2000": ("2000/log.htm", "Parseloghtml01"),
|
||||
"1999": ("1999/log.htm", "Parseloghtml01"),
|
||||
"1998": ("1998/log.htm", "Parseloghtml01"),
|
||||
"1997": ("1997/log.htm", "Parseloghtml01"),
|
||||
"2014": ("2014/logbook.html", "Parseloghtmltxt"),
|
||||
"2013": ("2013/logbook.html", "Parseloghtmltxt"),
|
||||
"2012": ("2012/logbook.html", "Parseloghtmltxt"),
|
||||
"2011": ("2011/logbook.html", "Parseloghtmltxt"),
|
||||
"2010": ("2010/logbook.html", "Parselogwikitxt"),
|
||||
"2009": ("2009/2009logbook.txt", "Parselogwikitxt"),
|
||||
"2008": ("2008/2008logbook.txt", "Parselogwikitxt"),
|
||||
"2007": ("2007/logbook.html", "Parseloghtmltxt"),
|
||||
"2006": ("2006/logbook/logbook_06.txt", "Parselogwikitxt"),
|
||||
"2005": ("2005/logbook.html", "Parseloghtmltxt"),
|
||||
"2004": ("2004/logbook.html", "Parseloghtmltxt"),
|
||||
"2003": ("2003/logbook.html", "Parseloghtml03"),
|
||||
"2002": ("2002/logbook.html", "Parseloghtmltxt"),
|
||||
"2001": ("2001/log.htm", "Parseloghtml01"),
|
||||
"2000": ("2000/log.htm", "Parseloghtml01"),
|
||||
"1999": ("1999/log.htm", "Parseloghtml01"),
|
||||
"1998": ("1998/log.htm", "Parseloghtml01"),
|
||||
"1997": ("1997/log.htm", "Parseloghtml01"),
|
||||
"1996": ("1996/log.htm", "Parseloghtml01"),
|
||||
"1995": ("1995/log.htm", "Parseloghtml01"),
|
||||
"1994": ("1994/log.htm", "Parseloghtml01"),
|
||||
"1993": ("1993/log.htm", "Parseloghtml01"),
|
||||
"1992": ("1992/log.htm", "Parseloghtml01"),
|
||||
"1991": ("1991/log.htm", "Parseloghtml01"),
|
||||
"1995": ("1995/log.htm", "Parseloghtml01"),
|
||||
"1994": ("1994/log.htm", "Parseloghtml01"),
|
||||
"1993": ("1993/log.htm", "Parseloghtml01"),
|
||||
"1992": ("1992/log.htm", "Parseloghtml01"),
|
||||
"1991": ("1991/log.htm", "Parseloghtml01"),
|
||||
}
|
||||
|
||||
APPEND_SLASH = False
|
||||
@@ -96,20 +97,34 @@ SMART_APPEND_SLASH = True
|
||||
# Make this unique, and don't share it with anybody.
|
||||
SECRET_KEY = 'a#vaeozn0)uz_9t_%v5n#tj)m+%ace6b_0(^fj!355qki*v)j2'
|
||||
|
||||
# List of callables that know how to import templates from various sources.
|
||||
TEMPLATE_LOADERS = (
|
||||
'django.template.loaders.filesystem.Loader',
|
||||
'django.template.loaders.app_directories.Loader',
|
||||
# 'django.template.loaders.eggs.load_template_source',
|
||||
)
|
||||
TEMPLATES = [
|
||||
{
|
||||
'BACKEND': 'django.template.backends.django.DjangoTemplates',
|
||||
'DIRS': [
|
||||
os.path.join(PYTHON_PATH, 'templates')
|
||||
],
|
||||
'APP_DIRS': True,
|
||||
'OPTIONS': {
|
||||
'context_processors': [
|
||||
'django.contrib.auth.context_processors.auth',
|
||||
'django.template.context_processors.debug',
|
||||
'django.template.context_processors.i18n',
|
||||
'django.template.context_processors.media',
|
||||
'django.template.context_processors.static',
|
||||
'django.template.context_processors.tz',
|
||||
'django.contrib.messages.context_processors.messages',
|
||||
'django.template.context_processors.request',
|
||||
#'core.context.troggle_context'
|
||||
]
|
||||
},
|
||||
},
|
||||
]
|
||||
|
||||
if django.VERSION[0] == 1 and django.VERSION[1] < 4:
|
||||
authmodule = 'django.core.context_processors.auth'
|
||||
else:
|
||||
authmodule = 'django.contrib.auth.context_processors.auth'
|
||||
|
||||
TEMPLATE_CONTEXT_PROCESSORS = ( authmodule, "core.context.troggle_context", )
|
||||
|
||||
LOGIN_REDIRECT_URL = '/'
|
||||
|
||||
INSTALLED_APPS = (
|
||||
@@ -122,14 +137,13 @@ INSTALLED_APPS = (
|
||||
'django.contrib.messages',
|
||||
'django.contrib.staticfiles',
|
||||
#'troggle.photologue',
|
||||
#'troggle.reversion',
|
||||
#'django_evolution',
|
||||
'tinymce',
|
||||
'registration',
|
||||
'troggle.profiles',
|
||||
'troggle.core',
|
||||
'troggle.flatpages',
|
||||
'troggle.imagekit',
|
||||
'imagekit',
|
||||
'django_extensions',
|
||||
)
|
||||
|
||||
MIDDLEWARE_CLASSES = (
|
||||
|
||||
@@ -2,14 +2,14 @@
|
||||
<html lang="en">
|
||||
<head>
|
||||
<meta http-equiv="Content-Type" content="text/html; charset=ISO-8859-1"/>
|
||||
<link rel="stylesheet" type="text/css" href="{{ settings.MEDIA_URL }}css/main3.css" title="eyeCandy"/>
|
||||
<link rel="alternate stylesheet" type="text/css" href="{{ settings.MEDIA_URL }}css/mainplain.css" title="plain"/>
|
||||
<link rel="stylesheet" type="text/css" href="{{ settings.MEDIA_URL }}css/dropdownNavStyle.css" />
|
||||
<link rel="stylesheet" type="text/css" href="{{ MEDIA_URL }}css/main3.css" title="eyeCandy"/>
|
||||
<link rel="alternate stylesheet" type="text/css" href="{{ MEDIA_URL }}css/mainplain.css" title="plain"/>
|
||||
<link rel="stylesheet" type="text/css" href="{{ MEDIA_URL }}css/dropdownNavStyle.css" />
|
||||
<title>{% block title %}Troggle{% endblock %}</title>
|
||||
<!-- <script src="{{ settings.JSLIB_URL }}jquery/jquery.min.js" type="text/javascript"></script> -->
|
||||
<script src="{{ settings.MEDIA_URL }}js/jquery.quicksearch.js" type="text/javascript"></script>
|
||||
<script src="{{ settings.MEDIA_URL }}js/base.js" type="text/javascript"></script>
|
||||
<script src="{{ settings.MEDIA_URL }}js/jquery.dropdownPlain.js" type="text/javascript"></script>
|
||||
<script src="{{ MEDIA_URL }}js/jquery.quicksearch.js" type="text/javascript"></script>
|
||||
<script src="{{ MEDIA_URL }}js/base.js" type="text/javascript"></script>
|
||||
<script src="{{ MEDIA_URL }}js/jquery.dropdownPlain.js" type="text/javascript"></script>
|
||||
|
||||
{% block head %}{% endblock %}
|
||||
</head>
|
||||
@@ -64,8 +64,8 @@
|
||||
<div id="related">
|
||||
{% block related %}
|
||||
<script language="javascript">
|
||||
$('#related').remove()
|
||||
/*This is a hack to stop a line appearing because of the empty div border*/
|
||||
$('#related').remove()
|
||||
/*This is a hack to stop a line appearing because of the empty div border*/
|
||||
</script>
|
||||
{% endblock %}
|
||||
</div>
|
||||
|
||||
@@ -17,7 +17,7 @@ div.cv-panel {
|
||||
}
|
||||
|
||||
div.cv-compass, div.cv-ahi {
|
||||
position: absolute;
|
||||
position: absolute;
|
||||
bottom: 95px;
|
||||
right: 5px;
|
||||
margin: 0;
|
||||
@@ -31,7 +31,7 @@ div.cv-compass, div.cv-ahi {
|
||||
background-color: black;
|
||||
color: white;
|
||||
}
|
||||
|
||||
|
||||
div.cv-ahi {
|
||||
right: 95px;
|
||||
}
|
||||
@@ -152,7 +152,7 @@ div.linear-scale-caption {
|
||||
position: absolute;
|
||||
top: 64px;
|
||||
left: 0px;
|
||||
height: auto;
|
||||
height: auto;
|
||||
margin-top:0;
|
||||
bottom: 44px;
|
||||
background-color: #222222;
|
||||
@@ -220,7 +220,7 @@ div.linear-scale-caption {
|
||||
}
|
||||
#frame .tab {
|
||||
position: absolute;
|
||||
right: 0px;lass="cavedisplay"
|
||||
right: 0px;
|
||||
width: 40px;
|
||||
height: 40px;
|
||||
box-sizing: border-box;
|
||||
@@ -421,7 +421,7 @@ div#scene {
|
||||
CV.UI.init( 'scene', {
|
||||
home: '/javascript/CaveView/',
|
||||
surveyDirectory: '/cave/3d/',
|
||||
terrainDirectory: '/loser/surface/terrain/'
|
||||
terrainDirectory: '/loser/surface/terrain/'
|
||||
} );
|
||||
|
||||
// load a single survey to display
|
||||
@@ -516,14 +516,17 @@ div#scene {
|
||||
{% if ent.entrance.exact_station %}
|
||||
<dt>Exact Station</dt><dd>{{ ent.entrance.exact_station|safe }} {{ ent.entrance.exact_location.y|safe }}, {{ ent.entrance.exact_location.x|safe }}, {{ ent.entrance.exact_location.z|safe }}m</dd>
|
||||
{% endif %}
|
||||
{% if ent.entrance.other_station %}
|
||||
{% if ent.entrance.find_location %}
|
||||
<dt>Coordinates</dt><dd>{{ ent.entrance.find_location|safe }}</dd>
|
||||
{% endif %}
|
||||
{% if ent.entrance.other_station %}
|
||||
<dt>Other Station</dt><dd>{{ ent.entrance.other_station|safe }}
|
||||
{% if ent.entrance.other_description %}
|
||||
- {{ ent.entrance.other_description|safe }}
|
||||
{% endif %} {{ ent.entrance.other_location.y|safe }}, {{ ent.entrance.other_location.x|safe }}, {{ ent.entrance.other_location.z|safe }}m
|
||||
</dd>
|
||||
{% endif %}
|
||||
</dl>
|
||||
</dl>
|
||||
</li>
|
||||
{% endfor %}
|
||||
</ul>
|
||||
|
||||
@@ -2,15 +2,15 @@
|
||||
{% load wiki_markup %}
|
||||
{% load link %}
|
||||
|
||||
{% block title %}Expedition {{expedition.name}}{% endblock %}
|
||||
{% block editLink %}<a href={{expedition.get_admin_url}}>Edit expedition {{expedition|wiki_to_html_short}}</a>{% endblock %}
|
||||
{% block title %}Expedition {{this_expedition.name}}{% endblock %}
|
||||
{% block editLink %}<a href={{this_expedition.get_admin_url}}>Edit expedition {{expedition|wiki_to_html_short}}</a>{% endblock %}
|
||||
|
||||
{% block related %}
|
||||
{% endblock %}
|
||||
|
||||
{% block content %}
|
||||
|
||||
<h2>{{expedition.name}}</h2>
|
||||
<h2>{{this_expedition.name}}</h2>
|
||||
|
||||
<p><b>Other years:</b>
|
||||
{% for otherexpedition in expeditions %}
|
||||
@@ -29,7 +29,7 @@ an "S" for a survey trip. The colours are the same for people on the same trip.
|
||||
<table class="expeditionpersonlist">
|
||||
<tr>
|
||||
<th>Caver</th>
|
||||
{% for expeditionday in expedition.expeditionday_set.all %}
|
||||
{% for expeditionday in this_expedition.expeditionday_set.all %}
|
||||
<th>
|
||||
{{expeditionday.date.day}}
|
||||
</th>
|
||||
@@ -63,7 +63,7 @@ an "S" for a survey trip. The colours are the same for people on the same trip.
|
||||
<form action="" method="GET"><input type="submit" name="reload" value="Reload"></form>
|
||||
|
||||
<h3>Logbooks and survey trips per day</h3>
|
||||
<a href="{% url "newLogBookEntry" expeditionyear=expedition.year %}">New logbook entry</a>
|
||||
<a href="{% url "newLogBookEntry" expeditionyear=this_expedition.year %}">New logbook entry</a>
|
||||
<table class="expeditionlogbooks">
|
||||
<tr><th>Date</th><th>Logged trips</th><th>Surveys</th></tr>
|
||||
{% regroup dateditems|dictsort:"date" by date as dates %}
|
||||
|
||||
@@ -18,8 +18,8 @@
|
||||
{% if pic.is_mugshot %}
|
||||
<div class="figure">
|
||||
<p> <img src="{{ pic.thumbnail_image.url }}" class="thumbnail" />
|
||||
<p> {{ pic.caption }}</p>
|
||||
<p> <a href="{{ pic.get_admin_url }}">edit {{pic}}</a> </>
|
||||
<p> {{ pic.caption }} </p>
|
||||
<p> <a href="{{ pic.get_admin_url }}">edit {{pic}}</a>
|
||||
</p>
|
||||
</p>
|
||||
</div>
|
||||
@@ -32,7 +32,7 @@
|
||||
<ul>
|
||||
{% for personexpedition in person.personexpedition_set.all %}
|
||||
<li> <a href="{{ personexpedition.get_absolute_url }}">{{personexpedition.expedition.year}}</a>
|
||||
<span style="padding-left:{{personexpedition.persontrip_set.all|length}}0px; background-color:red"></span>
|
||||
<span style="padding-left:{{ personexpedition.persontrip_set.all|length }}0px; background-color:red"></span>
|
||||
{{personexpedition.persontrip_set.all|length}} trips
|
||||
</li>
|
||||
{% endfor %}
|
||||
|
||||
@@ -4,9 +4,7 @@
|
||||
|
||||
{% block title %} QM: {{qm|wiki_to_html_short}} {% endblock %}
|
||||
|
||||
{% block editLink %}| <a href={{qm.get_admin_url}}>Edit QM {{qm|wiki_to_html_short}}</a>{% endblock %}
|
||||
|
||||
|
||||
{% block editLink %}| <a href="{{qm.get_admin_url}}/">Edit QM {{qm|wiki_to_html_short}}</a>{% endblock %}
|
||||
|
||||
{% block contentheader %}
|
||||
<table id="cavepage">
|
||||
|
||||
@@ -5,7 +5,7 @@
|
||||
{% block title %}CUCC Virtual Survey Binder: {{ current_expedition }}{{ current_survey }}{%endblock%}
|
||||
{% block head %}
|
||||
|
||||
<link rel="stylesheet" type="text/css" href="{{ settings.MEDIA_URL }}css/nav.css" />
|
||||
<link rel="stylesheet" type="text/css" href="{{ MEDIA_URL }}css/nav.css" />
|
||||
|
||||
<script language="javascript">
|
||||
blankColor = "rgb(153, 153, 153)"
|
||||
@@ -164,7 +164,7 @@
|
||||
</p>
|
||||
</div>
|
||||
{% endfor %}
|
||||
<div class="figure"> <a href="{{ settings.URL_ROOT }}/admin/expo/scannedimage/add/"> <img src="{{ settings.URL_ROOT }}{{ settings.ADMIN_MEDIA_PREFIX }}img/admin/icon_addlink.gif" /> Add a new scanned notes page. </a> </div>
|
||||
<div class="figure"> <a href="{{ URL_ROOT }}/admin/expo/scannedimage/add/"> <img src="{{ URL_ROOT }}{{ ADMIN_MEDIA_PREFIX }}img/admin/icon_addlink.gif" /> Add a new scanned notes page. </a> </div>
|
||||
</div>
|
||||
<br class="clearfloat" />
|
||||
<div id="survexFileContent" class="behind"> survex file editor, keeping file in original structure <br />
|
||||
|
||||
@@ -4,7 +4,7 @@
|
||||
{% block title %}{{ title }}{% endblock %}
|
||||
|
||||
{% block head %}
|
||||
<script src="{{ settings.MEDIA_URL }}js/base.js" type="text/javascript"></script>
|
||||
<script src="{{ MEDIA_URL }}js/base.js" type="text/javascript"></script>
|
||||
<script type="text/javascript" src="{{settings.JSLIB_URL}}jquery-form/jquery.form.min.js"></script>
|
||||
<script type="text/javascript" src="{{settings.JSLIB_URL}}codemirror/codemirror.min.js"></script>
|
||||
|
||||
|
||||
@@ -34,6 +34,6 @@ add wikilinks
|
||||
{% endblock content %}
|
||||
|
||||
{% block margins %}
|
||||
<img class="leftMargin eyeCandy fadeIn" src="{{ settings.MEDIA_URL }}eieshole.jpg">
|
||||
<img class="rightMargin eyeCandy fadeIn" src="{{ settings.MEDIA_URL }}goesser.jpg">
|
||||
<img class="leftMargin eyeCandy fadeIn" src="{{ MEDIA_URL }}eieshole.jpg">
|
||||
<img class="rightMargin eyeCandy fadeIn" src="{{ MEDIA_URL }}goesser.jpg">
|
||||
{% endblock margins %}
|
||||
|
||||
121
urls.py
121
urls.py
@@ -1,17 +1,19 @@
|
||||
from django.conf.urls import *
|
||||
from django.conf import settings
|
||||
from django.conf.urls.static import static
|
||||
from django.views.static import serve
|
||||
|
||||
from core.views import * # flat import
|
||||
from core.views_other import *
|
||||
from core.views_caves import *
|
||||
from core.views_survex import *
|
||||
from core.models import *
|
||||
from flatpages.views import *
|
||||
from django.views.generic.edit import UpdateView
|
||||
from django.contrib import admin
|
||||
from django.views.generic.list import ListView
|
||||
from django.contrib import admin
|
||||
admin.autodiscover()
|
||||
|
||||
#admin.autodiscover()
|
||||
|
||||
# type url probably means it's used.
|
||||
|
||||
@@ -20,24 +22,24 @@ admin.autodiscover()
|
||||
# <reference to python function in 'core' folder>,
|
||||
# <name optional argument for URL reversing (doesn't do much)>)
|
||||
|
||||
actualurlpatterns = patterns('',
|
||||
|
||||
actualurlpatterns = [
|
||||
|
||||
url(r'^testingurl/?$' , views_caves.millenialcaves, name="testing"),
|
||||
|
||||
url(r'^millenialcaves/?$', views_caves.millenialcaves, name="millenialcaves"),
|
||||
|
||||
|
||||
url(r'^troggle$', views_other.frontpage, name="frontpage"),
|
||||
url(r'^todo/$', views_other.todo, name="todo"),
|
||||
|
||||
url(r'^caves/?$', views_caves.caveindex, name="caveindex"),
|
||||
|
||||
url(r'^caves/?$', views_caves.caveindex, name="caveindex"),
|
||||
url(r'^people/?$', views_logbooks.personindex, name="personindex"),
|
||||
|
||||
url(r'^newqmnumber/?$', views_other.ajax_QM_number, ),
|
||||
url(r'^lbo_suggestions/?$', logbook_entry_suggestions),
|
||||
url(r'^lbo_suggestions/?$', logbook_entry_suggestions),
|
||||
#(r'^person/(?P<person_id>\d*)/?$', views_logbooks.person),
|
||||
url(r'^person/(?P<first_name>[A-Z]*[a-z\-\'&;]*)[^a-zA-Z]*(?P<last_name>[a-z\-\']*[^a-zA-Z]*[A-Z]*[a-z\-&;]*)/?', views_logbooks.person, name="person"),
|
||||
#url(r'^person/(\w+_\w+)$', views_logbooks.person, name="person"),
|
||||
|
||||
|
||||
url(r'^expedition/(\d+)$', views_logbooks.expedition, name="expedition"),
|
||||
url(r'^expeditions/?$', views_logbooks.ExpeditionListView.as_view(), name="expeditions"),
|
||||
url(r'^personexpedition/(?P<first_name>[A-Z]*[a-z&;]*)[^a-zA-Z]*(?P<last_name>[A-Z]*[a-zA-Z&;]*)/(?P<year>\d+)/?$', views_logbooks.personexpedition, name="personexpedition"),
|
||||
@@ -52,7 +54,7 @@ actualurlpatterns = patterns('',
|
||||
url(r'^getPeople/(?P<expeditionslug>.*)', views_logbooks.get_people, name = "get_people"),
|
||||
url(r'^getLogBookEntries/(?P<expeditionslug>.*)', views_logbooks.get_logbook_entries, name = "get_logbook_entries"),
|
||||
|
||||
|
||||
|
||||
url(r'^cave/new/$', views_caves.edit_cave, name="newcave"),
|
||||
url(r'^cave/(?P<cave_id>[^/]+)/?$', views_caves.cave, name="cave"),
|
||||
url(r'^caveslug/([^/]+)/?$', views_caves.caveSlug, name="caveSlug"),
|
||||
@@ -74,97 +76,88 @@ actualurlpatterns = patterns('',
|
||||
url(r'^cave/(?P<slug>[^/]+)/edit/$', views_caves.edit_cave, name="edit_cave"),
|
||||
#(r'^cavesearch', caveSearch),
|
||||
|
||||
|
||||
url(r'^cave/(?P<cave_id>[^/]+)/(?P<year>\d\d\d\d)-(?P<qm_id>\d*)(?P<grade>[ABCDX]?)?$', views_caves.qm, name="qm"),
|
||||
|
||||
url(r'^prospecting_guide/$', views_caves.prospecting),
|
||||
# url(r'^cave/(?P<cave_id>[^/]+)/(?P<year>\d\d\d\d)-(?P<qm_id>\d*)(?P<grade>[ABCDX]?)?$', views_caves.qm, name="qm"),
|
||||
url(r'^cave/qm/(?P<qm_id>[^/]+)?$', views_caves.qm, name="qm"),
|
||||
|
||||
url(r'^prospecting_guide/$', views_caves.prospecting),
|
||||
|
||||
url(r'^logbooksearch/(.*)/?$', views_logbooks.logbookSearch),
|
||||
|
||||
|
||||
url(r'^statistics/?$', views_other.stats, name="stats"),
|
||||
|
||||
|
||||
url(r'^survey/?$', surveyindex, name="survey"),
|
||||
url(r'^survey/(?P<year>\d\d\d\d)\#(?P<wallet_number>\d*)$', survey, name="survey"),
|
||||
|
||||
url(r'^controlpanel/?$', views_other.controlPanel, name="controlpanel"),
|
||||
url(r'^CAVETAB2\.CSV/?$', views_other.downloadCavetab, name="downloadcavetab"),
|
||||
url(r'^CAVETAB2\.CSV/?$', views_other.downloadCavetab, name="downloadcavetab"),
|
||||
url(r'^Surveys\.csv/?$', views_other.downloadSurveys, name="downloadsurveys"),
|
||||
url(r'^logbook(?P<year>\d\d\d\d)\.(?P<extension>.*)/?$',views_other.downloadLogbook),
|
||||
url(r'^logbook/?$',views_other.downloadLogbook, name="downloadlogbook"),
|
||||
url(r'^cave/(?P<cave_id>[^/]+)/qm\.csv/?$', views_other.downloadQMs, name="downloadqms"),
|
||||
(r'^downloadqms$', views_other.downloadQMs),
|
||||
|
||||
url(r'^cave/(?P<cave_id>[^/]+)/qm\.csv/?$', views_other.downloadQMs, name="downloadqms"),
|
||||
url(r'^downloadqms$', views_other.downloadQMs),
|
||||
|
||||
url(r'^eyecandy$', views_other.eyecandy),
|
||||
|
||||
(r'^admin/doc/?', include('django.contrib.admindocs.urls')),
|
||||
url(r'^admin/doc/?', include('django.contrib.admindocs.urls')),
|
||||
#url(r'^admin/(.*)', admin.site.get_urls, name="admin"),
|
||||
(r'^admin/', include(admin.site.urls)),
|
||||
|
||||
url(r'^admin/', include(admin.site.urls)),
|
||||
|
||||
# don't know why this needs troggle/ in here. nice to get it out
|
||||
url(r'^troggle/media-admin/(?P<path>.*)$', 'django.views.static.serve', {'document_root': settings.MEDIA_ADMIN_DIR, 'show_indexes':True}),
|
||||
# url(r'^troggle/media-admin/(?P<path>.*)$', static, {'document_root': settings.MEDIA_ADMIN_DIR, 'show_indexes':True}),
|
||||
|
||||
|
||||
(r'^accounts/', include('registration.backends.default.urls')),
|
||||
(r'^profiles/', include('profiles.urls')),
|
||||
url(r'^accounts/', include('registration.backends.default.urls')),
|
||||
url(r'^profiles/', include('profiles.urls')),
|
||||
|
||||
|
||||
|
||||
# (r'^personform/(.*)$', personForm),
|
||||
|
||||
(r'^site_media/(?P<path>.*)$', 'django.views.static.serve',
|
||||
{'document_root': settings.MEDIA_ROOT, 'show_indexes': True}),
|
||||
(r'^tinymce_media/(?P<path>.*)$', 'django.views.static.serve',
|
||||
{'document_root': settings.TINY_MCE_MEDIA_ROOT, 'show_indexes': True}),
|
||||
|
||||
|
||||
url(r'^site_media/(?P<path>.*)$', serve, {'document_root': settings.MEDIA_ROOT, 'show_indexes': True}),
|
||||
|
||||
url(r'^survexblock/(.+)$', views_caves.survexblock, name="survexblock"),
|
||||
url(r'^survexfile/(?P<survex_file>.*?)\.svx$', views_survex.svx, name="svx"),
|
||||
url(r'^survexfile/(?P<survex_file>.*?)\.3d$', views_survex.threed, name="threed"),
|
||||
url(r'^survexfile/(?P<survex_file>.*?)\.log$', views_survex.svxraw),
|
||||
url(r'^survexfile/(?P<survex_file>.*?)\.err$', views_survex.err),
|
||||
|
||||
|
||||
|
||||
|
||||
url(r'^survexfile/caves/$', views_survex.survexcaveslist, name="survexcaveslist"),
|
||||
url(r'^survexfile/caves/(?P<survex_cave>.*)$', views_survex.survexcavesingle, name="survexcavessingle"),
|
||||
url(r'^survexfileraw/(?P<survex_file>.*?)\.svx$', views_survex.svxraw, name="svxraw"),
|
||||
|
||||
|
||||
(r'^survey_files/listdir/(?P<path>.*)$', view_surveys.listdir),
|
||||
(r'^survey_files/download/(?P<path>.*)$', view_surveys.download),
|
||||
|
||||
|
||||
url(r'^survey_files/listdir/(?P<path>.*)$', view_surveys.listdir),
|
||||
url(r'^survey_files/download/(?P<path>.*)$', view_surveys.download),
|
||||
#(r'^survey_files/upload/(?P<path>.*)$', view_surveys.upload),
|
||||
|
||||
|
||||
|
||||
#(r'^survey_scans/(?P<path>.*)$', 'django.views.static.serve', {'document_root': settings.SURVEY_SCANS, 'show_indexes':True}),
|
||||
url(r'^survey_scans/$', view_surveys.surveyscansfolders, name="surveyscansfolders"),
|
||||
url(r'^survey_scans/(?P<path>[^/]+)/$', view_surveys.surveyscansfolder, name="surveyscansfolder"),
|
||||
url(r'^survey_scans/(?P<path>[^/]+)/(?P<file>[^/]+(?:png|jpg|jpeg))$',
|
||||
view_surveys.surveyscansingle, name="surveyscansingle"),
|
||||
|
||||
url(r'^tunneldata/$', view_surveys.tunneldata, name="tunneldata"),
|
||||
url(r'^tunneldataraw/(?P<path>.+?\.xml)$', view_surveys.tunnelfile, name="tunnelfile"),
|
||||
url(r'^tunneldataraw/(?P<path>.+?\.xml)/upload$',view_surveys.tunnelfileupload, name="tunnelfileupload"),
|
||||
|
||||
#url(r'^tunneldatainfo/(?P<path>.+?\.xml)$', view_surveys.tunnelfileinfo, name="tunnelfileinfo"),
|
||||
|
||||
(r'^photos/(?P<path>.*)$', 'django.views.static.serve',
|
||||
{'document_root': settings.PHOTOS_ROOT, 'show_indexes':True}),
|
||||
|
||||
url(r'^survey_scans/$', view_surveys.surveyscansfolders, name="surveyscansfolders"),
|
||||
url(r'^survey_scans/(?P<path>[^/]+)/$', view_surveys.surveyscansfolder, name="surveyscansfolder"),
|
||||
url(r'^survey_scans/(?P<path>[^/]+)/(?P<file>[^/]+(?:png|jpg|jpeg))$',
|
||||
view_surveys.surveyscansingle, name="surveyscansingle"),
|
||||
|
||||
url(r'^tunneldata/$', view_surveys.tunneldata, name="tunneldata"),
|
||||
url(r'^tunneldataraw/(?P<path>.+?\.xml)$', view_surveys.tunnelfile, name="tunnelfile"),
|
||||
url(r'^tunneldataraw/(?P<path>.+?\.xml)/upload$',view_surveys.tunnelfileupload, name="tunnelfileupload"),
|
||||
|
||||
#url(r'^tunneldatainfo/(?P<path>.+?\.xml)$', view_surveys.tunnelfileinfo, name="tunnelfileinfo"),
|
||||
|
||||
# url(r'^photos/(?P<path>.*)$', 'django.views.static.serve', {'document_root': settings.PHOTOS_ROOT, 'show_indexes':True}),
|
||||
|
||||
url(r'^prospecting/(?P<name>[^.]+).png$', prospecting_image, name="prospecting_image"),
|
||||
|
||||
# (r'^gallery/(?P<path>.*)$', 'django.views.static.serve',
|
||||
# {'document_root': settings.PHOTOS_ROOT, 'show_indexes':True}),
|
||||
# (r'^gallery/(?P<path>.*)$', 'django.views.static.serve', {'document_root': settings.PHOTOS_ROOT, 'show_indexes':True}),
|
||||
|
||||
# for those silly ideas
|
||||
url(r'^experimental.*$', views_logbooks.experimental, name="experimental"),
|
||||
|
||||
|
||||
#url(r'^trip_report/?$',views_other.tripreport,name="trip_report")
|
||||
|
||||
url(r'^(.*)_edit$', 'flatpages.views.editflatpage', name="editflatpage"),
|
||||
url(r'^(.*)$', 'flatpages.views.flatpage', name="flatpage"),
|
||||
)
|
||||
url(r'^(.*)_edit$', editflatpage, name="editflatpage"),
|
||||
url(r'^(.*)$', flatpage, name="flatpage"),
|
||||
]
|
||||
|
||||
#Allow prefix to all urls
|
||||
urlpatterns = patterns ('',
|
||||
('^%s' % settings.DIR_ROOT, include(actualurlpatterns))
|
||||
)
|
||||
urlpatterns = [
|
||||
url('^%s' % settings.DIR_ROOT, include(actualurlpatterns))
|
||||
] + static(settings.STATIC_URL, document_root=settings.STATIC_ROOT)
|
||||
|
||||
24
utils.py
24
utils.py
@@ -23,12 +23,12 @@ def randomLogbookSentence():
|
||||
#Choose again if there are no sentances (this happens if it is a placeholder entry)
|
||||
while len(re.findall('[A-Z].*?\.',randSent['entry'].text))==0:
|
||||
randSent['entry']=LogbookEntry.objects.order_by('?')[0]
|
||||
|
||||
|
||||
#Choose a random sentence from that entry. Store the sentence as randSent['sentence'], and the number of that sentence in the entry as randSent['number']
|
||||
sentenceList=re.findall('[A-Z].*?\.',randSent['entry'].text)
|
||||
randSent['number']=random.randrange(0,len(sentenceList))
|
||||
randSent['sentence']=sentenceList[randSent['number']]
|
||||
|
||||
|
||||
return randSent
|
||||
|
||||
|
||||
@@ -37,29 +37,29 @@ def save_carefully(objectType, lookupAttribs={}, nonLookupAttribs={}):
|
||||
-if instance does not exist in DB: add instance to DB, return (new instance, True)
|
||||
-if instance exists in DB and was modified using Troggle: do nothing, return (existing instance, False)
|
||||
-if instance exists in DB and was not modified using Troggle: overwrite instance, return (instance, False)
|
||||
|
||||
|
||||
The checking is accomplished using Django's get_or_create and the new_since_parsing boolean field
|
||||
defined in core.models.TroggleModel.
|
||||
|
||||
|
||||
"""
|
||||
|
||||
instance, created=objectType.objects.get_or_create(defaults=nonLookupAttribs, **lookupAttribs)
|
||||
|
||||
if not created and not instance.new_since_parsing:
|
||||
for k, v in nonLookupAttribs.items(): #overwrite the existing attributes from the logbook text (except date and title)
|
||||
for k, v in list(nonLookupAttribs.items()): #overwrite the existing attributes from the logbook text (except date and title)
|
||||
setattr(instance, k, v)
|
||||
instance.save()
|
||||
|
||||
|
||||
if created:
|
||||
logging.info(str(instance) + ' was just added to the database for the first time. \n')
|
||||
|
||||
|
||||
if not created and instance.new_since_parsing:
|
||||
logging.info(str(instance) + " has been modified using Troggle, so the current script left it as is. \n")
|
||||
|
||||
if not created and not instance.new_since_parsing:
|
||||
logging.info(str(instance) + " existed in the database unchanged since last parse. It was overwritten by the current script. \n")
|
||||
return (instance, created)
|
||||
|
||||
|
||||
re_body = re.compile(r"\<body[^>]*\>(.*)\</body\>", re.DOTALL)
|
||||
re_title = re.compile(r"\<title[^>]*\>(.*)\</title\>", re.DOTALL)
|
||||
|
||||
@@ -80,7 +80,7 @@ def get_single_match(regex, text):
|
||||
def href_to_wikilinks(matchobj):
|
||||
"""
|
||||
Given an html link, checks for possible valid wikilinks.
|
||||
|
||||
|
||||
Returns the first valid wikilink. Valid means the target
|
||||
object actually exists.
|
||||
"""
|
||||
@@ -91,7 +91,7 @@ def href_to_wikilinks(matchobj):
|
||||
return matchobj.group()
|
||||
#except:
|
||||
#print 'fail'
|
||||
|
||||
|
||||
|
||||
re_subs = [(re.compile(r"\<b[^>]*\>(.*?)\</b\>", re.DOTALL), r"'''\1'''"),
|
||||
(re.compile(r"\<i\>(.*?)\</i\>", re.DOTALL), r"''\1''"),
|
||||
@@ -107,12 +107,12 @@ re_subs = [(re.compile(r"\<b[^>]*\>(.*?)\</b\>", re.DOTALL), r"'''\1'''"),
|
||||
(re.compile(r"\<a\s+href=['\"]#([^'\"]*)['\"]\s*\>(.*?)\</a\>", re.DOTALL), r"[[cavedescription:\1|\2]]"), #assumes that all links with target ids are cave descriptions. Not great.
|
||||
(re.compile(r"\[\<a\s+href=['\"][^'\"]*['\"]\s+id=['\"][^'\"]*['\"]\s*\>([^\s]*).*?\</a\>\]", re.DOTALL), r"[[qm:\1]]"),
|
||||
(re.compile(r'<a\shref="?(?P<target>.*)"?>(?P<text>.*)</a>'),href_to_wikilinks),
|
||||
|
||||
|
||||
]
|
||||
|
||||
def html_to_wiki(text, codec = "utf-8"):
|
||||
if type(text) == str:
|
||||
text = unicode(text, codec)
|
||||
text = str(text, codec)
|
||||
text = re.sub("</p>", r"", text)
|
||||
text = re.sub("<p>$", r"", text)
|
||||
text = re.sub("<p>", r"\n\n", text)
|
||||
|
||||
Reference in New Issue
Block a user