mirror of
https://expo.survex.com/repositories/troggle/.git
synced 2025-12-16 22:47:03 +00:00
Compare commits
9 Commits
Faster-sur
...
django-1.1
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
43dfe946b6 | ||
|
|
656ddcfe93 | ||
|
|
505bc48331 | ||
|
|
92b273e45f | ||
|
|
978270b152 | ||
|
|
291e3baabf | ||
|
|
eb5406f325 | ||
|
|
de22b071b0 | ||
|
|
08a41941f9 |
16
.hgignore
Normal file
16
.hgignore
Normal file
@@ -0,0 +1,16 @@
|
|||||||
|
# use glob syntax
|
||||||
|
syntax: glob
|
||||||
|
|
||||||
|
*.pyc
|
||||||
|
db*
|
||||||
|
localsettings.py
|
||||||
|
*~
|
||||||
|
parsing_log.txt
|
||||||
|
troggle
|
||||||
|
troggle_log.txt
|
||||||
|
.idea/*
|
||||||
|
*.orig
|
||||||
|
media/images/*
|
||||||
|
.vscode/*
|
||||||
|
.swp
|
||||||
|
imagekit-off/
|
||||||
@@ -7,7 +7,7 @@ Troggle setup
|
|||||||
|
|
||||||
Python, Django, and Database setup
|
Python, Django, and Database setup
|
||||||
-----------------------------------
|
-----------------------------------
|
||||||
Troggle requires Django 1.4 or greater, and any version of Python that works with it.
|
Troggle requires Django 1.10, and Python 2.7.
|
||||||
Install Django with the following command:
|
Install Django with the following command:
|
||||||
|
|
||||||
apt-get install python-django (on debian/ubuntu)
|
apt-get install python-django (on debian/ubuntu)
|
||||||
@@ -20,10 +20,14 @@ Troggle itself
|
|||||||
-------------
|
-------------
|
||||||
Choose a directory where you will keep troggle, and git clone Troggle into it using the following command:
|
Choose a directory where you will keep troggle, and git clone Troggle into it using the following command:
|
||||||
|
|
||||||
git clone git://expo.survex.com/troggle
|
git clone git://expo.survex.com/~/troggle
|
||||||
or more reliably
|
or more reliably
|
||||||
git clone ssh://expo@expo.survex.com/home/expo/troggle
|
git clone ssh://expo@expo.survex.com/home/expo/troggle
|
||||||
|
|
||||||
|
Running in development
|
||||||
|
----------------------
|
||||||
|
The simplest way to run Troggle in development is through the docker-compose setup
|
||||||
|
See the docker folder in the repo for details
|
||||||
|
|
||||||
If you want to work on the source code and be able to commit, your account will need to be added to the troggle project members list. Contact wookey at wookware dot org to get this set up.
|
If you want to work on the source code and be able to commit, your account will need to be added to the troggle project members list. Contact wookey at wookware dot org to get this set up.
|
||||||
|
|
||||||
|
|||||||
@@ -28,6 +28,10 @@ class SurvexBlockAdmin(TroggleModelAdmin):
|
|||||||
inlines = (RoleInline,)
|
inlines = (RoleInline,)
|
||||||
|
|
||||||
|
|
||||||
|
class SurvexStationAdmin(TroggleModelAdmin):
|
||||||
|
search_fields = ('name', 'block__name')
|
||||||
|
|
||||||
|
|
||||||
class ScannedImageInline(admin.TabularInline):
|
class ScannedImageInline(admin.TabularInline):
|
||||||
model = ScannedImage
|
model = ScannedImage
|
||||||
extra = 4
|
extra = 4
|
||||||
@@ -95,11 +99,11 @@ class PersonAdmin(TroggleModelAdmin):
|
|||||||
|
|
||||||
class QMAdmin(TroggleModelAdmin):
|
class QMAdmin(TroggleModelAdmin):
|
||||||
search_fields = ('found_by__cave__kataster_number','number','found_by__date')
|
search_fields = ('found_by__cave__kataster_number','number','found_by__date')
|
||||||
list_display = ('__unicode__','grade','found_by','ticked_off_by')
|
list_display = ('__unicode__','grade','found_by','ticked_off_by','nearest_station')
|
||||||
list_display_links = ('__unicode__',)
|
list_display_links = ('__unicode__',)
|
||||||
list_editable = ('found_by','ticked_off_by','grade')
|
list_editable = ('found_by','ticked_off_by','grade','nearest_station')
|
||||||
list_per_page = 20
|
list_per_page = 20
|
||||||
raw_id_fields=('found_by','ticked_off_by')
|
raw_id_fields=('found_by','ticked_off_by','nearest_station')
|
||||||
|
|
||||||
|
|
||||||
class PersonExpeditionAdmin(TroggleModelAdmin):
|
class PersonExpeditionAdmin(TroggleModelAdmin):
|
||||||
@@ -118,24 +122,27 @@ class EntranceAdmin(TroggleModelAdmin):
|
|||||||
|
|
||||||
admin.site.register(DPhoto)
|
admin.site.register(DPhoto)
|
||||||
admin.site.register(Cave, CaveAdmin)
|
admin.site.register(Cave, CaveAdmin)
|
||||||
|
admin.site.register(CaveSlug)
|
||||||
admin.site.register(Area)
|
admin.site.register(Area)
|
||||||
#admin.site.register(OtherCaveName)
|
#admin.site.register(OtherCaveName)
|
||||||
admin.site.register(CaveAndEntrance)
|
admin.site.register(CaveAndEntrance)
|
||||||
admin.site.register(NewSubCave)
|
admin.site.register(NewSubCave)
|
||||||
admin.site.register(CaveDescription)
|
admin.site.register(CaveDescription)
|
||||||
admin.site.register(Entrance, EntranceAdmin)
|
admin.site.register(Entrance, EntranceAdmin)
|
||||||
admin.site.register(SurvexBlock, SurvexBlockAdmin)
|
|
||||||
admin.site.register(Expedition)
|
admin.site.register(Expedition)
|
||||||
admin.site.register(Person,PersonAdmin)
|
admin.site.register(Person,PersonAdmin)
|
||||||
admin.site.register(SurvexPersonRole)
|
|
||||||
admin.site.register(PersonExpedition,PersonExpeditionAdmin)
|
admin.site.register(PersonExpedition,PersonExpeditionAdmin)
|
||||||
admin.site.register(LogbookEntry, LogbookEntryAdmin)
|
admin.site.register(LogbookEntry, LogbookEntryAdmin)
|
||||||
#admin.site.register(PersonTrip)
|
#admin.site.register(PersonTrip)
|
||||||
admin.site.register(QM, QMAdmin)
|
admin.site.register(QM, QMAdmin)
|
||||||
admin.site.register(Survey, SurveyAdmin)
|
admin.site.register(Survey, SurveyAdmin)
|
||||||
admin.site.register(ScannedImage)
|
admin.site.register(ScannedImage)
|
||||||
admin.site.register(SurvexStation)
|
|
||||||
|
|
||||||
|
admin.site.register(SurvexDirectory)
|
||||||
|
admin.site.register(SurvexFile)
|
||||||
|
admin.site.register(SurvexStation, SurvexStationAdmin)
|
||||||
|
admin.site.register(SurvexBlock)
|
||||||
|
admin.site.register(SurvexPersonRole)
|
||||||
admin.site.register(SurvexScansFolder)
|
admin.site.register(SurvexScansFolder)
|
||||||
admin.site.register(SurvexScanSingle)
|
admin.site.register(SurvexScanSingle)
|
||||||
|
|
||||||
|
|||||||
575
core/migrations/0001_initial.py
Normal file
575
core/migrations/0001_initial.py
Normal file
@@ -0,0 +1,575 @@
|
|||||||
|
# -*- coding: utf-8 -*-
|
||||||
|
# Generated by Django 1.10.8 on 2020-02-18 16:01
|
||||||
|
from __future__ import unicode_literals
|
||||||
|
|
||||||
|
from django.conf import settings
|
||||||
|
import django.core.files.storage
|
||||||
|
from django.db import migrations, models
|
||||||
|
import django.db.models.deletion
|
||||||
|
import troggle.core.models
|
||||||
|
|
||||||
|
|
||||||
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
|
initial = True
|
||||||
|
|
||||||
|
dependencies = [
|
||||||
|
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
|
||||||
|
]
|
||||||
|
|
||||||
|
operations = [
|
||||||
|
migrations.CreateModel(
|
||||||
|
name='Area',
|
||||||
|
fields=[
|
||||||
|
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
|
||||||
|
('new_since_parsing', models.BooleanField(default=False, editable=False)),
|
||||||
|
('non_public', models.BooleanField(default=False)),
|
||||||
|
('short_name', models.CharField(max_length=100)),
|
||||||
|
('name', models.CharField(blank=True, max_length=200, null=True)),
|
||||||
|
('description', models.TextField(blank=True, null=True)),
|
||||||
|
('parent', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='core.Area')),
|
||||||
|
],
|
||||||
|
options={
|
||||||
|
'abstract': False,
|
||||||
|
},
|
||||||
|
),
|
||||||
|
migrations.CreateModel(
|
||||||
|
name='Cave',
|
||||||
|
fields=[
|
||||||
|
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
|
||||||
|
('new_since_parsing', models.BooleanField(default=False, editable=False)),
|
||||||
|
('non_public', models.BooleanField(default=False)),
|
||||||
|
('official_name', models.CharField(max_length=160)),
|
||||||
|
('kataster_code', models.CharField(blank=True, max_length=20, null=True)),
|
||||||
|
('kataster_number', models.CharField(blank=True, max_length=10, null=True)),
|
||||||
|
('unofficial_number', models.CharField(blank=True, max_length=60, null=True)),
|
||||||
|
('explorers', models.TextField(blank=True, null=True)),
|
||||||
|
('underground_description', models.TextField(blank=True, null=True)),
|
||||||
|
('equipment', models.TextField(blank=True, null=True)),
|
||||||
|
('references', models.TextField(blank=True, null=True)),
|
||||||
|
('survey', models.TextField(blank=True, null=True)),
|
||||||
|
('kataster_status', models.TextField(blank=True, null=True)),
|
||||||
|
('underground_centre_line', models.TextField(blank=True, null=True)),
|
||||||
|
('notes', models.TextField(blank=True, null=True)),
|
||||||
|
('length', models.CharField(blank=True, max_length=100, null=True)),
|
||||||
|
('depth', models.CharField(blank=True, max_length=100, null=True)),
|
||||||
|
('extent', models.CharField(blank=True, max_length=100, null=True)),
|
||||||
|
('survex_file', models.CharField(blank=True, max_length=100, null=True)),
|
||||||
|
('description_file', models.CharField(blank=True, max_length=200, null=True)),
|
||||||
|
('url', models.CharField(blank=True, max_length=200, null=True)),
|
||||||
|
('filename', models.CharField(max_length=200)),
|
||||||
|
('area', models.ManyToManyField(blank=True, to='core.Area')),
|
||||||
|
],
|
||||||
|
options={
|
||||||
|
'ordering': ('kataster_code', 'unofficial_number'),
|
||||||
|
},
|
||||||
|
),
|
||||||
|
migrations.CreateModel(
|
||||||
|
name='CaveAndEntrance',
|
||||||
|
fields=[
|
||||||
|
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
|
||||||
|
('entrance_letter', models.CharField(blank=True, max_length=20, null=True)),
|
||||||
|
('cave', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='core.Cave')),
|
||||||
|
],
|
||||||
|
),
|
||||||
|
migrations.CreateModel(
|
||||||
|
name='CaveDescription',
|
||||||
|
fields=[
|
||||||
|
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
|
||||||
|
('new_since_parsing', models.BooleanField(default=False, editable=False)),
|
||||||
|
('non_public', models.BooleanField(default=False)),
|
||||||
|
('short_name', models.CharField(max_length=50, unique=True)),
|
||||||
|
('long_name', models.CharField(blank=True, max_length=200, null=True)),
|
||||||
|
('description', models.TextField(blank=True, null=True)),
|
||||||
|
],
|
||||||
|
options={
|
||||||
|
'abstract': False,
|
||||||
|
},
|
||||||
|
),
|
||||||
|
migrations.CreateModel(
|
||||||
|
name='CaveSlug',
|
||||||
|
fields=[
|
||||||
|
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
|
||||||
|
('slug', models.SlugField(unique=True)),
|
||||||
|
('primary', models.BooleanField(default=False)),
|
||||||
|
('cave', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='core.Cave')),
|
||||||
|
],
|
||||||
|
),
|
||||||
|
migrations.CreateModel(
|
||||||
|
name='DataIssue',
|
||||||
|
fields=[
|
||||||
|
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
|
||||||
|
('new_since_parsing', models.BooleanField(default=False, editable=False)),
|
||||||
|
('non_public', models.BooleanField(default=False)),
|
||||||
|
('date', models.DateTimeField(auto_now_add=True)),
|
||||||
|
('parser', models.CharField(blank=True, max_length=50, null=True)),
|
||||||
|
('message', models.CharField(blank=True, max_length=400, null=True)),
|
||||||
|
],
|
||||||
|
options={
|
||||||
|
'ordering': ['date'],
|
||||||
|
},
|
||||||
|
),
|
||||||
|
migrations.CreateModel(
|
||||||
|
name='DPhoto',
|
||||||
|
fields=[
|
||||||
|
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
|
||||||
|
('new_since_parsing', models.BooleanField(default=False, editable=False)),
|
||||||
|
('caption', models.CharField(blank=True, max_length=1000, null=True)),
|
||||||
|
('file', models.ImageField(storage=django.core.files.storage.FileSystemStorage(base_url=b'http://127.0.0.1:8000/photos/', location=b'/expo/expoweb/photos'), upload_to=b'.')),
|
||||||
|
('is_mugshot', models.BooleanField(default=False)),
|
||||||
|
('lon_utm', models.FloatField(blank=True, null=True)),
|
||||||
|
('lat_utm', models.FloatField(blank=True, null=True)),
|
||||||
|
('contains_cave', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='core.Cave')),
|
||||||
|
],
|
||||||
|
options={
|
||||||
|
'abstract': False,
|
||||||
|
},
|
||||||
|
),
|
||||||
|
migrations.CreateModel(
|
||||||
|
name='Entrance',
|
||||||
|
fields=[
|
||||||
|
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
|
||||||
|
('new_since_parsing', models.BooleanField(default=False, editable=False)),
|
||||||
|
('non_public', models.BooleanField(default=False)),
|
||||||
|
('name', models.CharField(blank=True, max_length=100, null=True)),
|
||||||
|
('entrance_description', models.TextField(blank=True, null=True)),
|
||||||
|
('explorers', models.TextField(blank=True, null=True)),
|
||||||
|
('map_description', models.TextField(blank=True, null=True)),
|
||||||
|
('location_description', models.TextField(blank=True, null=True)),
|
||||||
|
('approach', models.TextField(blank=True, null=True)),
|
||||||
|
('underground_description', models.TextField(blank=True, null=True)),
|
||||||
|
('photo', models.TextField(blank=True, null=True)),
|
||||||
|
('marking', models.CharField(choices=[(b'P', b'Paint'), (b'P?', b'Paint (?)'), (b'T', b'Tag'), (b'T?', b'Tag (?)'), (b'R', b'Needs Retag'), (b'S', b'Spit'), (b'S?', b'Spit (?)'), (b'U', b'Unmarked'), (b'?', b'Unknown')], max_length=2)),
|
||||||
|
('marking_comment', models.TextField(blank=True, null=True)),
|
||||||
|
('findability', models.CharField(blank=True, choices=[(b'?', b'To be confirmed ...'), (b'S', b'Coordinates'), (b'L', b'Lost'), (b'R', b'Refindable')], max_length=1, null=True)),
|
||||||
|
('findability_description', models.TextField(blank=True, null=True)),
|
||||||
|
('alt', models.TextField(blank=True, null=True)),
|
||||||
|
('northing', models.TextField(blank=True, null=True)),
|
||||||
|
('easting', models.TextField(blank=True, null=True)),
|
||||||
|
('tag_station', models.TextField(blank=True, null=True)),
|
||||||
|
('exact_station', models.TextField(blank=True, null=True)),
|
||||||
|
('other_station', models.TextField(blank=True, null=True)),
|
||||||
|
('other_description', models.TextField(blank=True, null=True)),
|
||||||
|
('bearings', models.TextField(blank=True, null=True)),
|
||||||
|
('url', models.CharField(blank=True, max_length=200, null=True)),
|
||||||
|
('filename', models.CharField(max_length=200)),
|
||||||
|
('cached_primary_slug', models.CharField(blank=True, max_length=200, null=True)),
|
||||||
|
],
|
||||||
|
options={
|
||||||
|
'abstract': False,
|
||||||
|
},
|
||||||
|
),
|
||||||
|
migrations.CreateModel(
|
||||||
|
name='EntranceSlug',
|
||||||
|
fields=[
|
||||||
|
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
|
||||||
|
('slug', models.SlugField(unique=True)),
|
||||||
|
('primary', models.BooleanField(default=False)),
|
||||||
|
('entrance', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='core.Entrance')),
|
||||||
|
],
|
||||||
|
),
|
||||||
|
migrations.CreateModel(
|
||||||
|
name='Expedition',
|
||||||
|
fields=[
|
||||||
|
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
|
||||||
|
('new_since_parsing', models.BooleanField(default=False, editable=False)),
|
||||||
|
('non_public', models.BooleanField(default=False)),
|
||||||
|
('year', models.CharField(max_length=20, unique=True)),
|
||||||
|
('name', models.CharField(max_length=100)),
|
||||||
|
],
|
||||||
|
options={
|
||||||
|
'ordering': ('-year',),
|
||||||
|
'get_latest_by': 'year',
|
||||||
|
},
|
||||||
|
),
|
||||||
|
migrations.CreateModel(
|
||||||
|
name='ExpeditionDay',
|
||||||
|
fields=[
|
||||||
|
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
|
||||||
|
('new_since_parsing', models.BooleanField(default=False, editable=False)),
|
||||||
|
('non_public', models.BooleanField(default=False)),
|
||||||
|
('date', models.DateField()),
|
||||||
|
('expedition', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='core.Expedition')),
|
||||||
|
],
|
||||||
|
options={
|
||||||
|
'ordering': ('date',),
|
||||||
|
},
|
||||||
|
),
|
||||||
|
migrations.CreateModel(
|
||||||
|
name='LogbookEntry',
|
||||||
|
fields=[
|
||||||
|
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
|
||||||
|
('new_since_parsing', models.BooleanField(default=False, editable=False)),
|
||||||
|
('non_public', models.BooleanField(default=False)),
|
||||||
|
('date', models.DateTimeField()),
|
||||||
|
('title', models.CharField(max_length=200)),
|
||||||
|
('cave_slug', models.SlugField()),
|
||||||
|
('place', models.CharField(blank=True, help_text=b"Only use this if you haven't chosen a cave", max_length=100, null=True)),
|
||||||
|
('text', models.TextField()),
|
||||||
|
('slug', models.SlugField()),
|
||||||
|
('filename', models.CharField(max_length=200, null=True)),
|
||||||
|
('entry_type', models.CharField(choices=[(b'wiki', b'Wiki style logbook'), (b'html', b'Html style logbook')], default=b'wiki', max_length=50, null=True)),
|
||||||
|
('expedition', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='core.Expedition')),
|
||||||
|
('expeditionday', models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, to='core.ExpeditionDay')),
|
||||||
|
],
|
||||||
|
options={
|
||||||
|
'ordering': ('-date',),
|
||||||
|
'verbose_name_plural': 'Logbook Entries',
|
||||||
|
},
|
||||||
|
),
|
||||||
|
migrations.CreateModel(
|
||||||
|
name='NewSubCave',
|
||||||
|
fields=[
|
||||||
|
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
|
||||||
|
('new_since_parsing', models.BooleanField(default=False, editable=False)),
|
||||||
|
('non_public', models.BooleanField(default=False)),
|
||||||
|
('name', models.CharField(max_length=200, unique=True)),
|
||||||
|
],
|
||||||
|
options={
|
||||||
|
'abstract': False,
|
||||||
|
},
|
||||||
|
),
|
||||||
|
migrations.CreateModel(
|
||||||
|
name='OtherCaveName',
|
||||||
|
fields=[
|
||||||
|
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
|
||||||
|
('new_since_parsing', models.BooleanField(default=False, editable=False)),
|
||||||
|
('non_public', models.BooleanField(default=False)),
|
||||||
|
('name', models.CharField(max_length=160)),
|
||||||
|
('cave', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='core.Cave')),
|
||||||
|
],
|
||||||
|
options={
|
||||||
|
'abstract': False,
|
||||||
|
},
|
||||||
|
),
|
||||||
|
migrations.CreateModel(
|
||||||
|
name='Person',
|
||||||
|
fields=[
|
||||||
|
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
|
||||||
|
('new_since_parsing', models.BooleanField(default=False, editable=False)),
|
||||||
|
('non_public', models.BooleanField(default=False)),
|
||||||
|
('first_name', models.CharField(max_length=100)),
|
||||||
|
('last_name', models.CharField(max_length=100)),
|
||||||
|
('fullname', models.CharField(max_length=200)),
|
||||||
|
('is_vfho', models.BooleanField(default=False, help_text=b'VFHO is the Vereines für Höhlenkunde in Obersteier, a nearby Austrian caving club.')),
|
||||||
|
('mug_shot', models.CharField(blank=True, max_length=100, null=True)),
|
||||||
|
('blurb', models.TextField(blank=True, null=True)),
|
||||||
|
('orderref', models.CharField(max_length=200)),
|
||||||
|
('user', models.OneToOneField(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
|
||||||
|
],
|
||||||
|
options={
|
||||||
|
'ordering': ('orderref',),
|
||||||
|
'verbose_name_plural': 'People',
|
||||||
|
},
|
||||||
|
),
|
||||||
|
migrations.CreateModel(
|
||||||
|
name='PersonExpedition',
|
||||||
|
fields=[
|
||||||
|
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
|
||||||
|
('new_since_parsing', models.BooleanField(default=False, editable=False)),
|
||||||
|
('non_public', models.BooleanField(default=False)),
|
||||||
|
('slugfield', models.SlugField(blank=True, null=True)),
|
||||||
|
('is_guest', models.BooleanField(default=False)),
|
||||||
|
('expo_committee_position', models.CharField(blank=True, choices=[(b'leader', b'Expo leader'), (b'medical', b'Expo medical officer'), (b'treasurer', b'Expo treasurer'), (b'sponsorship', b'Expo sponsorship coordinator'), (b'research', b'Expo research coordinator')], max_length=200, null=True)),
|
||||||
|
('nickname', models.CharField(blank=True, max_length=100, null=True)),
|
||||||
|
('expedition', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='core.Expedition')),
|
||||||
|
('person', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='core.Person')),
|
||||||
|
],
|
||||||
|
options={
|
||||||
|
'ordering': ('-expedition',),
|
||||||
|
},
|
||||||
|
),
|
||||||
|
migrations.CreateModel(
|
||||||
|
name='PersonTrip',
|
||||||
|
fields=[
|
||||||
|
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
|
||||||
|
('new_since_parsing', models.BooleanField(default=False, editable=False)),
|
||||||
|
('non_public', models.BooleanField(default=False)),
|
||||||
|
('time_underground', models.FloatField(help_text=b'In decimal hours')),
|
||||||
|
('is_logbook_entry_author', models.BooleanField(default=False)),
|
||||||
|
('logbook_entry', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='core.LogbookEntry')),
|
||||||
|
('personexpedition', models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, to='core.PersonExpedition')),
|
||||||
|
],
|
||||||
|
options={
|
||||||
|
'abstract': False,
|
||||||
|
},
|
||||||
|
),
|
||||||
|
migrations.CreateModel(
|
||||||
|
name='QM',
|
||||||
|
fields=[
|
||||||
|
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
|
||||||
|
('new_since_parsing', models.BooleanField(default=False, editable=False)),
|
||||||
|
('non_public', models.BooleanField(default=False)),
|
||||||
|
('number', models.IntegerField(help_text=b'this is the sequential number in the year')),
|
||||||
|
('grade', models.CharField(choices=[(b'A', b'A: Large obvious lead'), (b'B', b'B: Average lead'), (b'C', b'C: Tight unpromising lead'), (b'D', b'D: Dig'), (b'X', b'X: Unclimbable aven')], max_length=1)),
|
||||||
|
('location_description', models.TextField(blank=True)),
|
||||||
|
('nearest_station_description', models.CharField(blank=True, max_length=400, null=True)),
|
||||||
|
('nearest_station_name', models.CharField(blank=True, max_length=200, null=True)),
|
||||||
|
('area', models.CharField(blank=True, max_length=100, null=True)),
|
||||||
|
('completion_description', models.TextField(blank=True, null=True)),
|
||||||
|
('comment', models.TextField(blank=True, null=True)),
|
||||||
|
('found_by', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='QMs_found', to='core.LogbookEntry')),
|
||||||
|
],
|
||||||
|
options={
|
||||||
|
'abstract': False,
|
||||||
|
},
|
||||||
|
),
|
||||||
|
migrations.CreateModel(
|
||||||
|
name='ScannedImage',
|
||||||
|
fields=[
|
||||||
|
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
|
||||||
|
('new_since_parsing', models.BooleanField(default=False, editable=False)),
|
||||||
|
('file', models.ImageField(storage=django.core.files.storage.FileSystemStorage(base_url=b'/survey_scans/', location=b'/expo/expofiles/'), upload_to=troggle.core.models.get_scan_path)),
|
||||||
|
('scanned_on', models.DateField(null=True)),
|
||||||
|
('contents', models.CharField(choices=[(b'notes', b'notes'), (b'plan', b'plan_sketch'), (b'elevation', b'elevation_sketch')], max_length=20)),
|
||||||
|
('number_in_wallet', models.IntegerField(null=True)),
|
||||||
|
('lon_utm', models.FloatField(blank=True, null=True)),
|
||||||
|
('lat_utm', models.FloatField(blank=True, null=True)),
|
||||||
|
('scanned_by', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='core.Person')),
|
||||||
|
],
|
||||||
|
options={
|
||||||
|
'abstract': False,
|
||||||
|
},
|
||||||
|
),
|
||||||
|
migrations.CreateModel(
|
||||||
|
name='SurvexBlock',
|
||||||
|
fields=[
|
||||||
|
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
|
||||||
|
('name', models.CharField(max_length=100)),
|
||||||
|
('text', models.TextField()),
|
||||||
|
('date', models.DateTimeField(blank=True, null=True)),
|
||||||
|
('begin_char', models.IntegerField()),
|
||||||
|
('survexpath', models.CharField(max_length=200)),
|
||||||
|
('totalleglength', models.FloatField()),
|
||||||
|
('cave', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='core.Cave')),
|
||||||
|
('expedition', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='core.Expedition')),
|
||||||
|
('expeditionday', models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, to='core.ExpeditionDay')),
|
||||||
|
('parent', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='core.SurvexBlock')),
|
||||||
|
],
|
||||||
|
options={
|
||||||
|
'ordering': ('id',),
|
||||||
|
},
|
||||||
|
),
|
||||||
|
migrations.CreateModel(
|
||||||
|
name='SurvexDirectory',
|
||||||
|
fields=[
|
||||||
|
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
|
||||||
|
('path', models.CharField(max_length=200)),
|
||||||
|
('cave', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='core.Cave')),
|
||||||
|
],
|
||||||
|
options={
|
||||||
|
'ordering': ('path',),
|
||||||
|
},
|
||||||
|
),
|
||||||
|
migrations.CreateModel(
|
||||||
|
name='SurvexEquate',
|
||||||
|
fields=[
|
||||||
|
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
|
||||||
|
('cave', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='core.Cave')),
|
||||||
|
],
|
||||||
|
),
|
||||||
|
migrations.CreateModel(
|
||||||
|
name='SurvexFile',
|
||||||
|
fields=[
|
||||||
|
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
|
||||||
|
('path', models.CharField(max_length=200)),
|
||||||
|
('cave', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='core.Cave')),
|
||||||
|
('survexdirectory', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='core.SurvexDirectory')),
|
||||||
|
],
|
||||||
|
options={
|
||||||
|
'ordering': ('id',),
|
||||||
|
},
|
||||||
|
),
|
||||||
|
migrations.CreateModel(
|
||||||
|
name='SurvexLeg',
|
||||||
|
fields=[
|
||||||
|
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
|
||||||
|
('tape', models.FloatField()),
|
||||||
|
('compass', models.FloatField()),
|
||||||
|
('clino', models.FloatField()),
|
||||||
|
('block', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='core.SurvexBlock')),
|
||||||
|
],
|
||||||
|
),
|
||||||
|
migrations.CreateModel(
|
||||||
|
name='SurvexPersonRole',
|
||||||
|
fields=[
|
||||||
|
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
|
||||||
|
('nrole', models.CharField(blank=True, choices=[(b'insts', b'Instruments'), (b'dog', b'Other'), (b'notes', b'Notes'), (b'pics', b'Pictures'), (b'tape', b'Tape measure'), (b'useless', b'Useless'), (b'helper', b'Helper'), (b'disto', b'Disto'), (b'consultant', b'Consultant')], max_length=200, null=True)),
|
||||||
|
('personname', models.CharField(max_length=100)),
|
||||||
|
('expeditionday', models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, to='core.ExpeditionDay')),
|
||||||
|
('person', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='core.Person')),
|
||||||
|
('personexpedition', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='core.PersonExpedition')),
|
||||||
|
('persontrip', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='core.PersonTrip')),
|
||||||
|
('survexblock', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='core.SurvexBlock')),
|
||||||
|
],
|
||||||
|
),
|
||||||
|
migrations.CreateModel(
|
||||||
|
name='SurvexScansFolder',
|
||||||
|
fields=[
|
||||||
|
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
|
||||||
|
('fpath', models.CharField(max_length=200)),
|
||||||
|
('walletname', models.CharField(max_length=200)),
|
||||||
|
],
|
||||||
|
options={
|
||||||
|
'ordering': ('walletname',),
|
||||||
|
},
|
||||||
|
),
|
||||||
|
migrations.CreateModel(
|
||||||
|
name='SurvexScanSingle',
|
||||||
|
fields=[
|
||||||
|
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
|
||||||
|
('ffile', models.CharField(max_length=200)),
|
||||||
|
('name', models.CharField(max_length=200)),
|
||||||
|
('survexscansfolder', models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, to='core.SurvexScansFolder')),
|
||||||
|
],
|
||||||
|
options={
|
||||||
|
'ordering': ('name',),
|
||||||
|
},
|
||||||
|
),
|
||||||
|
migrations.CreateModel(
|
||||||
|
name='SurvexStation',
|
||||||
|
fields=[
|
||||||
|
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
|
||||||
|
('name', models.CharField(max_length=100)),
|
||||||
|
('x', models.FloatField(blank=True, null=True)),
|
||||||
|
('y', models.FloatField(blank=True, null=True)),
|
||||||
|
('z', models.FloatField(blank=True, null=True)),
|
||||||
|
('block', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='core.SurvexBlock')),
|
||||||
|
('equate', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='core.SurvexEquate')),
|
||||||
|
],
|
||||||
|
),
|
||||||
|
migrations.CreateModel(
|
||||||
|
name='SurvexTitle',
|
||||||
|
fields=[
|
||||||
|
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
|
||||||
|
('title', models.CharField(max_length=200)),
|
||||||
|
('cave', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='core.Cave')),
|
||||||
|
('survexblock', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='core.SurvexBlock')),
|
||||||
|
],
|
||||||
|
),
|
||||||
|
migrations.CreateModel(
|
||||||
|
name='Survey',
|
||||||
|
fields=[
|
||||||
|
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
|
||||||
|
('new_since_parsing', models.BooleanField(default=False, editable=False)),
|
||||||
|
('non_public', models.BooleanField(default=False)),
|
||||||
|
('wallet_number', models.IntegerField(blank=True, null=True)),
|
||||||
|
('wallet_letter', models.CharField(blank=True, max_length=1, null=True)),
|
||||||
|
('comments', models.TextField(blank=True, null=True)),
|
||||||
|
('location', models.CharField(blank=True, max_length=400, null=True)),
|
||||||
|
('centreline_printed_on', models.DateField(blank=True, null=True)),
|
||||||
|
('tunnel_file', models.FileField(blank=True, null=True, upload_to=b'surveyXMLfiles')),
|
||||||
|
('integrated_into_main_sketch_on', models.DateField(blank=True, null=True)),
|
||||||
|
('rendered_image', models.ImageField(blank=True, null=True, upload_to=b'renderedSurveys')),
|
||||||
|
('centreline_printed_by', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='centreline_printed_by', to='core.Person')),
|
||||||
|
('expedition', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='core.Expedition')),
|
||||||
|
('integrated_into_main_sketch_by', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='integrated_into_main_sketch_by', to='core.Person')),
|
||||||
|
('logbook_entry', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='core.LogbookEntry')),
|
||||||
|
('subcave', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='core.NewSubCave')),
|
||||||
|
('survex_block', models.OneToOneField(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='core.SurvexBlock')),
|
||||||
|
('tunnel_main_sketch', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='core.Survey')),
|
||||||
|
],
|
||||||
|
options={
|
||||||
|
'abstract': False,
|
||||||
|
},
|
||||||
|
),
|
||||||
|
migrations.CreateModel(
|
||||||
|
name='TunnelFile',
|
||||||
|
fields=[
|
||||||
|
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
|
||||||
|
('tunnelpath', models.CharField(max_length=200)),
|
||||||
|
('tunnelname', models.CharField(max_length=200)),
|
||||||
|
('bfontcolours', models.BooleanField(default=False)),
|
||||||
|
('filesize', models.IntegerField(default=0)),
|
||||||
|
('npaths', models.IntegerField(default=0)),
|
||||||
|
('survexblocks', models.ManyToManyField(to='core.SurvexBlock')),
|
||||||
|
('survexscans', models.ManyToManyField(to='core.SurvexScanSingle')),
|
||||||
|
('survexscansfolders', models.ManyToManyField(to='core.SurvexScansFolder')),
|
||||||
|
('survextitles', models.ManyToManyField(to='core.SurvexTitle')),
|
||||||
|
('tunnelcontains', models.ManyToManyField(to='core.TunnelFile')),
|
||||||
|
],
|
||||||
|
options={
|
||||||
|
'ordering': ('tunnelpath',),
|
||||||
|
},
|
||||||
|
),
|
||||||
|
migrations.AddField(
|
||||||
|
model_name='survexleg',
|
||||||
|
name='stationfrom',
|
||||||
|
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='stationfrom', to='core.SurvexStation'),
|
||||||
|
),
|
||||||
|
migrations.AddField(
|
||||||
|
model_name='survexleg',
|
||||||
|
name='stationto',
|
||||||
|
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='stationto', to='core.SurvexStation'),
|
||||||
|
),
|
||||||
|
migrations.AddField(
|
||||||
|
model_name='survexdirectory',
|
||||||
|
name='primarysurvexfile',
|
||||||
|
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='primarysurvexfile', to='core.SurvexFile'),
|
||||||
|
),
|
||||||
|
migrations.AddField(
|
||||||
|
model_name='survexblock',
|
||||||
|
name='survexfile',
|
||||||
|
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='core.SurvexFile'),
|
||||||
|
),
|
||||||
|
migrations.AddField(
|
||||||
|
model_name='survexblock',
|
||||||
|
name='survexscansfolder',
|
||||||
|
field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, to='core.SurvexScansFolder'),
|
||||||
|
),
|
||||||
|
migrations.AddField(
|
||||||
|
model_name='scannedimage',
|
||||||
|
name='survey',
|
||||||
|
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='core.Survey'),
|
||||||
|
),
|
||||||
|
migrations.AddField(
|
||||||
|
model_name='qm',
|
||||||
|
name='nearest_station',
|
||||||
|
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='core.SurvexStation'),
|
||||||
|
),
|
||||||
|
migrations.AddField(
|
||||||
|
model_name='qm',
|
||||||
|
name='ticked_off_by',
|
||||||
|
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='QMs_ticked_off', to='core.LogbookEntry'),
|
||||||
|
),
|
||||||
|
migrations.AddField(
|
||||||
|
model_name='dphoto',
|
||||||
|
name='contains_entrance',
|
||||||
|
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='photo_file', to='core.Entrance'),
|
||||||
|
),
|
||||||
|
migrations.AddField(
|
||||||
|
model_name='dphoto',
|
||||||
|
name='contains_logbookentry',
|
||||||
|
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='core.LogbookEntry'),
|
||||||
|
),
|
||||||
|
migrations.AddField(
|
||||||
|
model_name='dphoto',
|
||||||
|
name='contains_person',
|
||||||
|
field=models.ManyToManyField(blank=True, to='core.Person'),
|
||||||
|
),
|
||||||
|
migrations.AddField(
|
||||||
|
model_name='dphoto',
|
||||||
|
name='nearest_QM',
|
||||||
|
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='core.QM'),
|
||||||
|
),
|
||||||
|
migrations.AddField(
|
||||||
|
model_name='cavedescription',
|
||||||
|
name='linked_entrances',
|
||||||
|
field=models.ManyToManyField(blank=True, to='core.Entrance'),
|
||||||
|
),
|
||||||
|
migrations.AddField(
|
||||||
|
model_name='cavedescription',
|
||||||
|
name='linked_qms',
|
||||||
|
field=models.ManyToManyField(blank=True, to='core.QM'),
|
||||||
|
),
|
||||||
|
migrations.AddField(
|
||||||
|
model_name='cavedescription',
|
||||||
|
name='linked_subcaves',
|
||||||
|
field=models.ManyToManyField(blank=True, to='core.NewSubCave'),
|
||||||
|
),
|
||||||
|
migrations.AddField(
|
||||||
|
model_name='caveandentrance',
|
||||||
|
name='entrance',
|
||||||
|
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='core.Entrance'),
|
||||||
|
),
|
||||||
|
]
|
||||||
0
core/migrations/__init__.py
Normal file
0
core/migrations/__init__.py
Normal file
@@ -10,14 +10,13 @@ from django.db.models import Min, Max
|
|||||||
from django.conf import settings
|
from django.conf import settings
|
||||||
from decimal import Decimal, getcontext
|
from decimal import Decimal, getcontext
|
||||||
from django.core.urlresolvers import reverse
|
from django.core.urlresolvers import reverse
|
||||||
from imagekit.models import ImageModel
|
from imagekit.models import ProcessedImageField #ImageModel
|
||||||
from django.template import Context, loader
|
from django.template import Context, loader
|
||||||
import settings
|
import settings
|
||||||
getcontext().prec=2 #use 2 significant figures for decimal calculations
|
getcontext().prec=2 #use 2 significant figures for decimal calculations
|
||||||
|
|
||||||
from troggle.core.models_survex import *
|
from troggle.core.models_survex import *
|
||||||
|
|
||||||
|
|
||||||
def get_related_by_wikilinks(wiki_text):
|
def get_related_by_wikilinks(wiki_text):
|
||||||
found=re.findall(settings.QM_PATTERN,wiki_text)
|
found=re.findall(settings.QM_PATTERN,wiki_text)
|
||||||
res=[]
|
res=[]
|
||||||
@@ -98,11 +97,11 @@ class Expedition(TroggleModel):
|
|||||||
return res
|
return res
|
||||||
|
|
||||||
def day_min(self):
|
def day_min(self):
|
||||||
res = self.expeditionday_set.all()
|
res = self.Expeditionday_set.all()
|
||||||
return res and res[0] or None
|
return res and res[0] or None
|
||||||
|
|
||||||
def day_max(self):
|
def day_max(self):
|
||||||
res = self.expeditionday_set.all()
|
res = self.Expeditionday_set.all()
|
||||||
return res and res[len(res) - 1] or None
|
return res and res[len(res) - 1] or None
|
||||||
|
|
||||||
|
|
||||||
@@ -114,9 +113,12 @@ class ExpeditionDay(TroggleModel):
|
|||||||
ordering = ('date',)
|
ordering = ('date',)
|
||||||
|
|
||||||
def GetPersonTrip(self, personexpedition):
|
def GetPersonTrip(self, personexpedition):
|
||||||
personexpeditions = self.persontrip_set.filter(expeditionday=self)
|
personexpeditions = self.Persontrip_set.filter(expeditionday=self)
|
||||||
return personexpeditions and personexpeditions[0] or None
|
return personexpeditions and personexpeditions[0] or None
|
||||||
|
|
||||||
|
def __unicode__(self):
|
||||||
|
return str(self.expedition) + ' ' + str(self.date)
|
||||||
|
|
||||||
#
|
#
|
||||||
# single Person, can go on many years
|
# single Person, can go on many years
|
||||||
#
|
#
|
||||||
@@ -153,7 +155,7 @@ class Person(TroggleModel):
|
|||||||
|
|
||||||
for personexpedition in self.personexpedition_set.all():
|
for personexpedition in self.personexpedition_set.all():
|
||||||
if not personexpedition.is_guest:
|
if not personexpedition.is_guest:
|
||||||
print(personexpedition.expedition.year)
|
# print(personexpedition.expedition.year)
|
||||||
notability += Decimal(1) / (max_expo_val - int(personexpedition.expedition.year))
|
notability += Decimal(1) / (max_expo_val - int(personexpedition.expedition.year))
|
||||||
return notability
|
return notability
|
||||||
|
|
||||||
@@ -248,7 +250,7 @@ class LogbookEntry(TroggleModel):
|
|||||||
("html", "Html style logbook")
|
("html", "Html style logbook")
|
||||||
)
|
)
|
||||||
|
|
||||||
date = models.DateField()#MJG wants to turn this into a datetime such that multiple Logbook entries on the same day can be ordered.ld()
|
date = models.DateTimeField()#MJG wants to turn this into a datetime such that multiple Logbook entries on the same day can be ordered.ld()
|
||||||
expeditionday = models.ForeignKey("ExpeditionDay", null=True)#MJG wants to KILL THIS (redundant information)
|
expeditionday = models.ForeignKey("ExpeditionDay", null=True)#MJG wants to KILL THIS (redundant information)
|
||||||
expedition = models.ForeignKey(Expedition,blank=True,null=True) # yes this is double-
|
expedition = models.ForeignKey(Expedition,blank=True,null=True) # yes this is double-
|
||||||
title = models.CharField(max_length=settings.MAX_LOGBOOK_ENTRY_TITLE_LENGTH)
|
title = models.CharField(max_length=settings.MAX_LOGBOOK_ENTRY_TITLE_LENGTH)
|
||||||
@@ -370,6 +372,7 @@ class CaveAndEntrance(models.Model):
|
|||||||
cave = models.ForeignKey('Cave')
|
cave = models.ForeignKey('Cave')
|
||||||
entrance = models.ForeignKey('Entrance')
|
entrance = models.ForeignKey('Entrance')
|
||||||
entrance_letter = models.CharField(max_length=20,blank=True,null=True)
|
entrance_letter = models.CharField(max_length=20,blank=True,null=True)
|
||||||
|
|
||||||
def __unicode__(self):
|
def __unicode__(self):
|
||||||
return unicode(self.cave) + unicode(self.entrance_letter)
|
return unicode(self.cave) + unicode(self.entrance_letter)
|
||||||
|
|
||||||
@@ -378,11 +381,13 @@ class CaveSlug(models.Model):
|
|||||||
slug = models.SlugField(max_length=50, unique = True)
|
slug = models.SlugField(max_length=50, unique = True)
|
||||||
primary = models.BooleanField(default=False)
|
primary = models.BooleanField(default=False)
|
||||||
|
|
||||||
|
def __unicode__(self):
|
||||||
|
return self.slug
|
||||||
|
|
||||||
class Cave(TroggleModel):
|
class Cave(TroggleModel):
|
||||||
# too much here perhaps,
|
# too much here perhaps,
|
||||||
official_name = models.CharField(max_length=160)
|
official_name = models.CharField(max_length=160)
|
||||||
area = models.ManyToManyField(Area, blank=True, null=True)
|
area = models.ManyToManyField(Area, blank=True)
|
||||||
kataster_code = models.CharField(max_length=20,blank=True,null=True)
|
kataster_code = models.CharField(max_length=20,blank=True,null=True)
|
||||||
kataster_number = models.CharField(max_length=10,blank=True, null=True)
|
kataster_number = models.CharField(max_length=10,blank=True, null=True)
|
||||||
unofficial_number = models.CharField(max_length=60,blank=True, null=True)
|
unofficial_number = models.CharField(max_length=60,blank=True, null=True)
|
||||||
@@ -460,7 +465,7 @@ class Cave(TroggleModel):
|
|||||||
return unicode(self.slug())
|
return unicode(self.slug())
|
||||||
|
|
||||||
def get_QMs(self):
|
def get_QMs(self):
|
||||||
return QM.objects.filter(found_by__cave_slug=self.caveslug_set.all())
|
return QM.objects.filter(nearest_station__block__cave__caveslug=self.caveslug_set.all())
|
||||||
|
|
||||||
def new_QM_number(self, year=datetime.date.today().year):
|
def new_QM_number(self, year=datetime.date.today().year):
|
||||||
"""Given a cave and the current year, returns the next QM number."""
|
"""Given a cave and the current year, returns the next QM number."""
|
||||||
@@ -601,31 +606,35 @@ class Entrance(TroggleModel):
|
|||||||
|
|
||||||
def exact_location(self):
|
def exact_location(self):
|
||||||
return SurvexStation.objects.lookup(self.exact_station)
|
return SurvexStation.objects.lookup(self.exact_station)
|
||||||
|
|
||||||
def other_location(self):
|
def other_location(self):
|
||||||
return SurvexStation.objects.lookup(self.other_station)
|
return SurvexStation.objects.lookup(self.other_station)
|
||||||
|
|
||||||
|
|
||||||
def find_location(self):
|
def find_location(self):
|
||||||
r = {'': 'To be entered ',
|
r = {'': 'To be entered ',
|
||||||
'?': 'To be confirmed:',
|
'?': 'To be confirmed:',
|
||||||
'S': '',
|
'S': '',
|
||||||
'L': 'Lost:',
|
'L': 'Lost:',
|
||||||
'R': 'Refindable:'}[self.findability]
|
'R': 'Refindable:'}[self.findability]
|
||||||
if self.tag_station:
|
if self.tag_station:
|
||||||
try:
|
try:
|
||||||
s = SurvexStation.objects.lookup(self.tag_station)
|
s = SurvexStation.objects.filter(name=self.tag_station)[:1]
|
||||||
|
s = s[0]
|
||||||
return r + "%0.0fE %0.0fN %0.0fAlt" % (s.x, s.y, s.z)
|
return r + "%0.0fE %0.0fN %0.0fAlt" % (s.x, s.y, s.z)
|
||||||
except:
|
except:
|
||||||
return r + "%s Tag Station not in dataset" % self.tag_station
|
return r + "%s Tag Station not in dataset" % self.tag_station
|
||||||
if self.exact_station:
|
if self.exact_station:
|
||||||
try:
|
try:
|
||||||
s = SurvexStation.objects.lookup(self.exact_station)
|
s = SurvexStation.objects.filter(name=self.exact_station)[:1]
|
||||||
|
s = s[0]
|
||||||
return r + "%0.0fE %0.0fN %0.0fAlt" % (s.x, s.y, s.z)
|
return r + "%0.0fE %0.0fN %0.0fAlt" % (s.x, s.y, s.z)
|
||||||
except:
|
except:
|
||||||
return r + "%s Exact Station not in dataset" % self.tag_station
|
return r + "%s Exact Station not in dataset" % self.tag_station
|
||||||
if self.other_station:
|
if self.other_station:
|
||||||
try:
|
try:
|
||||||
s = SurvexStation.objects.lookup(self.other_station)
|
s = SurvexStation.objects.filter(name=self.other_station)[:1]
|
||||||
|
s = s[0]
|
||||||
return r + "%0.0fE %0.0fN %0.0fAlt %s" % (s.x, s.y, s.z, self.other_description)
|
return r + "%0.0fE %0.0fN %0.0fAlt %s" % (s.x, s.y, s.z, self.other_description)
|
||||||
except:
|
except:
|
||||||
return r + "%s Other Station not in dataset" % self.tag_station
|
return r + "%s Other Station not in dataset" % self.tag_station
|
||||||
@@ -708,9 +717,9 @@ class CaveDescription(TroggleModel):
|
|||||||
short_name = models.CharField(max_length=50, unique = True)
|
short_name = models.CharField(max_length=50, unique = True)
|
||||||
long_name = models.CharField(max_length=200, blank=True, null=True)
|
long_name = models.CharField(max_length=200, blank=True, null=True)
|
||||||
description = models.TextField(blank=True,null=True)
|
description = models.TextField(blank=True,null=True)
|
||||||
linked_subcaves = models.ManyToManyField("NewSubCave", blank=True,null=True)
|
linked_subcaves = models.ManyToManyField("NewSubCave", blank=True)
|
||||||
linked_entrances = models.ManyToManyField("Entrance", blank=True,null=True)
|
linked_entrances = models.ManyToManyField("Entrance", blank=True)
|
||||||
linked_qms = models.ManyToManyField("QM", blank=True,null=True)
|
linked_qms = models.ManyToManyField("QM", blank=True)
|
||||||
|
|
||||||
def __unicode__(self):
|
def __unicode__(self):
|
||||||
if self.long_name:
|
if self.long_name:
|
||||||
@@ -737,20 +746,20 @@ class NewSubCave(TroggleModel):
|
|||||||
return unicode(self.name)
|
return unicode(self.name)
|
||||||
|
|
||||||
class QM(TroggleModel):
|
class QM(TroggleModel):
|
||||||
#based on qm.csv in trunk/expoweb/1623/204 which has the fields:
|
# based on qm.csv in trunk/expoweb/1623/204 which has the fields:
|
||||||
#"Number","Grade","Area","Description","Page reference","Nearest station","Completion description","Comment"
|
# "Number","Grade","Area","Description","Page reference","Nearest station","Completion description","Comment"
|
||||||
found_by = models.ForeignKey(LogbookEntry, related_name='QMs_found',blank=True, null=True )
|
found_by = models.ForeignKey(LogbookEntry, related_name='QMs_found',blank=True, null=True )
|
||||||
ticked_off_by = models.ForeignKey(LogbookEntry, related_name='QMs_ticked_off',null=True,blank=True)
|
ticked_off_by = models.ForeignKey(LogbookEntry, related_name='QMs_ticked_off',null=True,blank=True)
|
||||||
#cave = models.ForeignKey(Cave)
|
# cave = models.ForeignKey(Cave)
|
||||||
#expedition = models.ForeignKey(Expedition)
|
# expedition = models.ForeignKey(Expedition)
|
||||||
|
|
||||||
number = models.IntegerField(help_text="this is the sequential number in the year", )
|
number = models.IntegerField(help_text="this is the sequential number in the year", )
|
||||||
GRADE_CHOICES=(
|
GRADE_CHOICES=(
|
||||||
('A', 'A: Large obvious lead'),
|
('A', 'A: Large obvious lead'),
|
||||||
('B', 'B: Average lead'),
|
('B', 'B: Average lead'),
|
||||||
('C', 'C: Tight unpromising lead'),
|
('C', 'C: Tight unpromising lead'),
|
||||||
('D', 'D: Dig'),
|
('D', 'D: Dig'),
|
||||||
('X', 'X: Unclimbable aven')
|
('X', 'X: Unclimbable aven')
|
||||||
)
|
)
|
||||||
grade = models.CharField(max_length=1, choices=GRADE_CHOICES)
|
grade = models.CharField(max_length=1, choices=GRADE_CHOICES)
|
||||||
location_description = models.TextField(blank=True)
|
location_description = models.TextField(blank=True)
|
||||||
@@ -765,11 +774,19 @@ class QM(TroggleModel):
|
|||||||
return u"%s %s" % (self.code(), self.grade)
|
return u"%s %s" % (self.code(), self.grade)
|
||||||
|
|
||||||
def code(self):
|
def code(self):
|
||||||
return u"%s-%s-%s" % (unicode(self.found_by.cave)[6:], self.found_by.date.year, self.number)
|
if self.found_by:
|
||||||
|
# Old style QMs where found_by is a logbook entry
|
||||||
|
return u"%s-%s-%s" % (unicode(self.found_by.cave)[6:], self.found_by.date.year, self.number)
|
||||||
|
elif self.nearest_station:
|
||||||
|
# New style QMs where QMs are stored in SVX files and nearest station is a forigin key
|
||||||
|
return u"%s-%s-%s" % (self.nearest_station.block.name, self.nearest_station.name, self.number)
|
||||||
|
else:
|
||||||
|
# Just give up!!
|
||||||
|
return u"%s" % (self.number)
|
||||||
|
|
||||||
def get_absolute_url(self):
|
def get_absolute_url(self):
|
||||||
#return settings.URL_ROOT + '/cave/' + self.found_by.cave.kataster_number + '/' + str(self.found_by.date.year) + '-' + '%02d' %self.number
|
#return settings.URL_ROOT + '/cave/' + self.found_by.cave.kataster_number + '/' + str(self.found_by.date.year) + '-' + '%02d' %self.number
|
||||||
return urlparse.urljoin(settings.URL_ROOT, reverse('qm',kwargs={'cave_id':self.found_by.cave.kataster_number,'year':self.found_by.date.year,'qm_id':self.number,'grade':self.grade}))
|
return urlparse.urljoin(settings.URL_ROOT, reverse('qm',kwargs={'qm_id':self.id}))
|
||||||
|
|
||||||
def get_next_by_id(self):
|
def get_next_by_id(self):
|
||||||
return QM.objects.get(id=self.id+1)
|
return QM.objects.get(id=self.id+1)
|
||||||
@@ -784,7 +801,7 @@ photoFileStorage = FileSystemStorage(location=settings.PHOTOS_ROOT, base_url=set
|
|||||||
class DPhoto(TroggleImageModel):
|
class DPhoto(TroggleImageModel):
|
||||||
caption = models.CharField(max_length=1000,blank=True,null=True)
|
caption = models.CharField(max_length=1000,blank=True,null=True)
|
||||||
contains_logbookentry = models.ForeignKey(LogbookEntry,blank=True,null=True)
|
contains_logbookentry = models.ForeignKey(LogbookEntry,blank=True,null=True)
|
||||||
contains_person = models.ManyToManyField(Person,blank=True,null=True)
|
contains_person = models.ManyToManyField(Person,blank=True)
|
||||||
file = models.ImageField(storage=photoFileStorage, upload_to='.',)
|
file = models.ImageField(storage=photoFileStorage, upload_to='.',)
|
||||||
is_mugshot = models.BooleanField(default=False)
|
is_mugshot = models.BooleanField(default=False)
|
||||||
contains_cave = models.ForeignKey(Cave,blank=True,null=True)
|
contains_cave = models.ForeignKey(Cave,blank=True,null=True)
|
||||||
@@ -858,8 +875,9 @@ class Survey(TroggleModel):
|
|||||||
integrated_into_main_sketch_on = models.DateField(blank=True,null=True)
|
integrated_into_main_sketch_on = models.DateField(blank=True,null=True)
|
||||||
integrated_into_main_sketch_by = models.ForeignKey('Person' ,related_name='integrated_into_main_sketch_by', blank=True,null=True)
|
integrated_into_main_sketch_by = models.ForeignKey('Person' ,related_name='integrated_into_main_sketch_by', blank=True,null=True)
|
||||||
rendered_image = models.ImageField(upload_to='renderedSurveys',blank=True,null=True)
|
rendered_image = models.ImageField(upload_to='renderedSurveys',blank=True,null=True)
|
||||||
|
|
||||||
def __unicode__(self):
|
def __unicode__(self):
|
||||||
return self.expedition.year+"#"+"%02d" % int(self.wallet_number)
|
return self.expedition.year+"#" + "%s%02d" % (self.wallet_letter, int(self.wallet_number))
|
||||||
|
|
||||||
def notes(self):
|
def notes(self):
|
||||||
return self.scannedimage_set.filter(contents='notes')
|
return self.scannedimage_set.filter(contents='notes')
|
||||||
|
|||||||
@@ -19,8 +19,11 @@ class SurvexDirectory(models.Model):
|
|||||||
primarysurvexfile = models.ForeignKey('SurvexFile', related_name='primarysurvexfile', blank=True, null=True)
|
primarysurvexfile = models.ForeignKey('SurvexFile', related_name='primarysurvexfile', blank=True, null=True)
|
||||||
# could also include files in directory but not referenced
|
# could also include files in directory but not referenced
|
||||||
|
|
||||||
|
def __unicode__(self):
|
||||||
|
return self.path
|
||||||
|
|
||||||
class Meta:
|
class Meta:
|
||||||
ordering = ('id',)
|
ordering = ('path',)
|
||||||
|
|
||||||
class SurvexFile(models.Model):
|
class SurvexFile(models.Model):
|
||||||
path = models.CharField(max_length=200)
|
path = models.CharField(max_length=200)
|
||||||
@@ -30,6 +33,9 @@ class SurvexFile(models.Model):
|
|||||||
class Meta:
|
class Meta:
|
||||||
ordering = ('id',)
|
ordering = ('id',)
|
||||||
|
|
||||||
|
def __unicode__(self):
|
||||||
|
return self.path + '.svx' or 'no file'
|
||||||
|
|
||||||
def exists(self):
|
def exists(self):
|
||||||
fname = os.path.join(settings.SURVEX_DATA, self.path + ".svx")
|
fname = os.path.join(settings.SURVEX_DATA, self.path + ".svx")
|
||||||
return os.path.isfile(fname)
|
return os.path.isfile(fname)
|
||||||
@@ -41,6 +47,8 @@ class SurvexFile(models.Model):
|
|||||||
def SetDirectory(self):
|
def SetDirectory(self):
|
||||||
dirpath = os.path.split(self.path)[0]
|
dirpath = os.path.split(self.path)[0]
|
||||||
survexdirectorylist = SurvexDirectory.objects.filter(cave=self.cave, path=dirpath)
|
survexdirectorylist = SurvexDirectory.objects.filter(cave=self.cave, path=dirpath)
|
||||||
|
# if self.cave is '' or self.cave is None:
|
||||||
|
# print('No cave set for survex dir %s' % self.path)
|
||||||
if survexdirectorylist:
|
if survexdirectorylist:
|
||||||
self.survexdirectory = survexdirectorylist[0]
|
self.survexdirectory = survexdirectorylist[0]
|
||||||
else:
|
else:
|
||||||
@@ -67,6 +75,12 @@ class SurvexStation(models.Model):
|
|||||||
y = models.FloatField(blank=True, null=True)
|
y = models.FloatField(blank=True, null=True)
|
||||||
z = models.FloatField(blank=True, null=True)
|
z = models.FloatField(blank=True, null=True)
|
||||||
|
|
||||||
|
def __unicode__(self):
|
||||||
|
if self.block.cave:
|
||||||
|
# If we haven't got a cave we can't have a slug, saves a nonetype return
|
||||||
|
return self.block.cave.slug() + '/' + self.block.name + '/' + self.name or 'No station name'
|
||||||
|
else:
|
||||||
|
return str(self.block.cave) + '/' + self.block.name + '/' + self.name or 'No station name'
|
||||||
def path(self):
|
def path(self):
|
||||||
r = self.name
|
r = self.name
|
||||||
b = self.block
|
b = self.block
|
||||||
@@ -109,7 +123,7 @@ class SurvexBlock(models.Model):
|
|||||||
text = models.TextField()
|
text = models.TextField()
|
||||||
cave = models.ForeignKey('Cave', blank=True, null=True)
|
cave = models.ForeignKey('Cave', blank=True, null=True)
|
||||||
|
|
||||||
date = models.DateField(blank=True, null=True)
|
date = models.DateTimeField(blank=True, null=True)
|
||||||
expeditionday = models.ForeignKey("ExpeditionDay", null=True)
|
expeditionday = models.ForeignKey("ExpeditionDay", null=True)
|
||||||
expedition = models.ForeignKey('Expedition', blank=True, null=True)
|
expedition = models.ForeignKey('Expedition', blank=True, null=True)
|
||||||
|
|
||||||
@@ -177,7 +191,7 @@ ROLE_CHOICES = (
|
|||||||
class SurvexPersonRole(models.Model):
|
class SurvexPersonRole(models.Model):
|
||||||
survexblock = models.ForeignKey('SurvexBlock')
|
survexblock = models.ForeignKey('SurvexBlock')
|
||||||
nrole = models.CharField(choices=ROLE_CHOICES, max_length=200, blank=True, null=True)
|
nrole = models.CharField(choices=ROLE_CHOICES, max_length=200, blank=True, null=True)
|
||||||
# increasing levels of precision
|
# increasing levels of precision
|
||||||
personname = models.CharField(max_length=100)
|
personname = models.CharField(max_length=100)
|
||||||
person = models.ForeignKey('Person', blank=True, null=True)
|
person = models.ForeignKey('Person', blank=True, null=True)
|
||||||
personexpedition = models.ForeignKey('PersonExpedition', blank=True, null=True)
|
personexpedition = models.ForeignKey('PersonExpedition', blank=True, null=True)
|
||||||
@@ -195,6 +209,9 @@ class SurvexScansFolder(models.Model):
|
|||||||
class Meta:
|
class Meta:
|
||||||
ordering = ('walletname',)
|
ordering = ('walletname',)
|
||||||
|
|
||||||
|
def __unicode__(self):
|
||||||
|
return self.walletname or 'no wallet'
|
||||||
|
|
||||||
def get_absolute_url(self):
|
def get_absolute_url(self):
|
||||||
return urlparse.urljoin(settings.URL_ROOT, reverse('surveyscansfolder', kwargs={"path":re.sub("#", "%23", self.walletname)}))
|
return urlparse.urljoin(settings.URL_ROOT, reverse('surveyscansfolder', kwargs={"path":re.sub("#", "%23", self.walletname)}))
|
||||||
|
|
||||||
@@ -206,6 +223,9 @@ class SurvexScanSingle(models.Model):
|
|||||||
class Meta:
|
class Meta:
|
||||||
ordering = ('name',)
|
ordering = ('name',)
|
||||||
|
|
||||||
|
def __unicode__(self):
|
||||||
|
return self.survexscansfolder.walletname + '/' + self.name
|
||||||
|
|
||||||
def get_absolute_url(self):
|
def get_absolute_url(self):
|
||||||
return urlparse.urljoin(settings.URL_ROOT, reverse('surveyscansingle', kwargs={"path":re.sub("#", "%23", self.survexscansfolder.walletname), "file":self.name}))
|
return urlparse.urljoin(settings.URL_ROOT, reverse('surveyscansingle', kwargs={"path":re.sub("#", "%23", self.survexscansfolder.walletname), "file":self.name}))
|
||||||
|
|
||||||
|
|||||||
@@ -1,6 +1,6 @@
|
|||||||
from django.conf import settings
|
from django.conf import settings
|
||||||
import fileAbstraction
|
import fileAbstraction
|
||||||
from django.shortcuts import render_to_response
|
from django.shortcuts import render
|
||||||
from django.http import HttpResponse, Http404
|
from django.http import HttpResponse, Http404
|
||||||
import os, stat
|
import os, stat
|
||||||
import re
|
import re
|
||||||
@@ -86,7 +86,7 @@ def jgtfile(request, f):
|
|||||||
upperdirs.append((href, hpath))
|
upperdirs.append((href, hpath))
|
||||||
upperdirs.append(("", "/"))
|
upperdirs.append(("", "/"))
|
||||||
|
|
||||||
return render_to_response('listdir.html', {'file':f, 'listdirfiles':listdirfiles, 'listdirdirs':listdirdirs, 'upperdirs':upperdirs, 'settings': settings})
|
return render(request, 'listdir.html', {'file':f, 'listdirfiles':listdirfiles, 'listdirdirs':listdirdirs, 'upperdirs':upperdirs, 'settings': settings})
|
||||||
|
|
||||||
# flat output of file when loaded
|
# flat output of file when loaded
|
||||||
if os.path.isfile(fp):
|
if os.path.isfile(fp):
|
||||||
@@ -163,27 +163,27 @@ def jgtuploadfile(request):
|
|||||||
#print ("FFF", request.FILES.values())
|
#print ("FFF", request.FILES.values())
|
||||||
message = ""
|
message = ""
|
||||||
print "gothere"
|
print "gothere"
|
||||||
return render_to_response('fileupload.html', {'message':message, 'filesuploaded':filesuploaded, 'settings': settings})
|
return render(request, 'fileupload.html', {'message':message, 'filesuploaded':filesuploaded, 'settings': settings})
|
||||||
|
|
||||||
def surveyscansfolder(request, path):
|
def surveyscansfolder(request, path):
|
||||||
#print [ s.walletname for s in SurvexScansFolder.objects.all() ]
|
#print [ s.walletname for s in SurvexScansFolder.objects.all() ]
|
||||||
survexscansfolder = SurvexScansFolder.objects.get(walletname=urllib.unquote(path))
|
survexscansfolder = SurvexScansFolder.objects.get(walletname=urllib.unquote(path))
|
||||||
return render_to_response('survexscansfolder.html', { 'survexscansfolder':survexscansfolder, 'settings': settings })
|
return render(request, 'survexscansfolder.html', { 'survexscansfolder':survexscansfolder, 'settings': settings })
|
||||||
|
|
||||||
def surveyscansingle(request, path, file):
|
def surveyscansingle(request, path, file):
|
||||||
survexscansfolder = SurvexScansFolder.objects.get(walletname=urllib.unquote(path))
|
survexscansfolder = SurvexScansFolder.objects.get(walletname=urllib.unquote(path))
|
||||||
survexscansingle = SurvexScanSingle.objects.get(survexscansfolder=survexscansfolder, name=file)
|
survexscansingle = SurvexScanSingle.objects.get(survexscansfolder=survexscansfolder, name=file)
|
||||||
return HttpResponse(content=open(survexscansingle.ffile), content_type=getMimeType(path.split(".")[-1]))
|
return HttpResponse(content=open(survexscansingle.ffile), content_type=getMimeType(path.split(".")[-1]))
|
||||||
#return render_to_response('survexscansfolder.html', { 'survexscansfolder':survexscansfolder, 'settings': settings })
|
#return render(request, 'survexscansfolder.html', { 'survexscansfolder':survexscansfolder, 'settings': settings })
|
||||||
|
|
||||||
def surveyscansfolders(request):
|
def surveyscansfolders(request):
|
||||||
survexscansfolders = SurvexScansFolder.objects.all()
|
survexscansfolders = SurvexScansFolder.objects.all()
|
||||||
return render_to_response('survexscansfolders.html', { 'survexscansfolders':survexscansfolders, 'settings': settings })
|
return render(request, 'survexscansfolders.html', { 'survexscansfolders':survexscansfolders, 'settings': settings })
|
||||||
|
|
||||||
|
|
||||||
def tunneldata(request):
|
def tunneldata(request):
|
||||||
tunnelfiles = TunnelFile.objects.all()
|
tunnelfiles = TunnelFile.objects.all()
|
||||||
return render_to_response('tunnelfiles.html', { 'tunnelfiles':tunnelfiles, 'settings': settings })
|
return render(request, 'tunnelfiles.html', { 'tunnelfiles':tunnelfiles, 'settings': settings })
|
||||||
|
|
||||||
|
|
||||||
def tunnelfile(request, path):
|
def tunnelfile(request, path):
|
||||||
|
|||||||
@@ -50,7 +50,7 @@ def caveCmp(x, y):
|
|||||||
return numericalcmp(x.unofficial_number, y.unofficial_number)
|
return numericalcmp(x.unofficial_number, y.unofficial_number)
|
||||||
|
|
||||||
def caveindex(request):
|
def caveindex(request):
|
||||||
caves = Cave.objects.all()
|
#caves = Cave.objects.all()
|
||||||
notablecavehrefs = settings.NOTABLECAVESHREFS
|
notablecavehrefs = settings.NOTABLECAVESHREFS
|
||||||
notablecaves = [Cave.objects.get(kataster_number=kataster_number) for kataster_number in notablecavehrefs ]
|
notablecaves = [Cave.objects.get(kataster_number=kataster_number) for kataster_number in notablecavehrefs ]
|
||||||
caves1623 = list(Cave.objects.filter(area__short_name = "1623"))
|
caves1623 = list(Cave.objects.filter(area__short_name = "1623"))
|
||||||
@@ -64,7 +64,6 @@ def millenialcaves(request):
|
|||||||
return HttpResponse("Test text", content_type="text/plain")
|
return HttpResponse("Test text", content_type="text/plain")
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
def cave3d(request, cave_id=''):
|
def cave3d(request, cave_id=''):
|
||||||
cave = getCave(cave_id)
|
cave = getCave(cave_id)
|
||||||
survexfilename = settings.SURVEX_DATA + cave.survex_file
|
survexfilename = settings.SURVEX_DATA + cave.survex_file
|
||||||
@@ -106,6 +105,7 @@ def caveQMs(request, slug):
|
|||||||
return render(request,'nonpublic.html', {'instance': cave})
|
return render(request,'nonpublic.html', {'instance': cave})
|
||||||
else:
|
else:
|
||||||
return render(request,'cave_qms.html', {'cave': cave})
|
return render(request,'cave_qms.html', {'cave': cave})
|
||||||
|
|
||||||
def caveLogbook(request, slug):
|
def caveLogbook(request, slug):
|
||||||
cave = Cave.objects.get(caveslug__slug = slug)
|
cave = Cave.objects.get(caveslug__slug = slug)
|
||||||
if cave.non_public and settings.PUBLIC_SITE and not request.user.is_authenticated():
|
if cave.non_public and settings.PUBLIC_SITE and not request.user.is_authenticated():
|
||||||
@@ -210,10 +210,9 @@ def editEntrance(request, caveslug, slug=None):
|
|||||||
'entletter': entletter
|
'entletter': entletter
|
||||||
})
|
})
|
||||||
|
|
||||||
def qm(request,cave_id,qm_id,year,grade=None):
|
def qm(request,qm_id):
|
||||||
year=int(year)
|
|
||||||
try:
|
try:
|
||||||
qm=getCave(cave_id).get_QMs().get(number=qm_id,found_by__date__year=year)
|
qm=QM.objects.get(id=qm_id)
|
||||||
return render(request,'qm.html',locals())
|
return render(request,'qm.html',locals())
|
||||||
|
|
||||||
except QM.DoesNotExist:
|
except QM.DoesNotExist:
|
||||||
@@ -223,7 +222,6 @@ def qm(request,cave_id,qm_id,year,grade=None):
|
|||||||
return HttpResponseRedirect(url)
|
return HttpResponseRedirect(url)
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
def ent(request, cave_id, ent_letter):
|
def ent(request, cave_id, ent_letter):
|
||||||
cave = Cave.objects.filter(kataster_number = cave_id)[0]
|
cave = Cave.objects.filter(kataster_number = cave_id)[0]
|
||||||
cave_and_ent = CaveAndEntrance.objects.filter(cave = cave).filter(entrance_letter = ent_letter)[0]
|
cave_and_ent = CaveAndEntrance.objects.filter(cave = cave).filter(entrance_letter = ent_letter)[0]
|
||||||
@@ -240,7 +238,7 @@ def entranceSlug(request, slug):
|
|||||||
|
|
||||||
def survexblock(request, survexpath):
|
def survexblock(request, survexpath):
|
||||||
survexpath = re.sub("/", ".", survexpath)
|
survexpath = re.sub("/", ".", survexpath)
|
||||||
print "jjjjjj", survexpath
|
print("jjjjjj", survexpath)
|
||||||
survexblock = models.SurvexBlock.objects.get(survexpath=survexpath)
|
survexblock = models.SurvexBlock.objects.get(survexpath=survexpath)
|
||||||
#ftext = survexblock.filecontents()
|
#ftext = survexblock.filecontents()
|
||||||
ftext = survexblock.text
|
ftext = survexblock.text
|
||||||
@@ -277,30 +275,30 @@ def get_qms(request, caveslug):
|
|||||||
return render(request,'options.html', {"items": [(e.entrance.slug(), e.entrance.slug()) for e in cave.entrances()]})
|
return render(request,'options.html', {"items": [(e.entrance.slug(), e.entrance.slug()) for e in cave.entrances()]})
|
||||||
|
|
||||||
areanames = [
|
areanames = [
|
||||||
#('', 'Location unclear'),
|
#('', 'Location unclear'),
|
||||||
('1a', '1a – Plateau: around Top Camp'),
|
('1a', '1a – Plateau: around Top Camp'),
|
||||||
('1b', '1b – Western plateau near 182'),
|
('1b', '1b – Western plateau near 182'),
|
||||||
('1c', '1c – Eastern plateau near 204 walk-in path'),
|
('1c', '1c – Eastern plateau near 204 walk-in path'),
|
||||||
('1d', '1d – Further plateau around 76'),
|
('1d', '1d – Further plateau around 76'),
|
||||||
('2a', '2a – Southern Schwarzmooskogel near 201 path and the Nipple'),
|
('2a', '2a – Southern Schwarzmooskogel near 201 path and the Nipple'),
|
||||||
('2b', '2b – Eishöhle area'),
|
('2b', '2b – Eishöhle area'),
|
||||||
('2b or 4 (unclear)', '2b or 4 (unclear)'),
|
('2b or 4 (unclear)', '2b or 4 (unclear)'),
|
||||||
('2c', '2c – Kaninchenhöhle area'),
|
('2c', '2c – Kaninchenhöhle area'),
|
||||||
('2d', '2d – Steinbrückenhöhle area'),
|
('2d', '2d – Steinbrückenhöhle area'),
|
||||||
('3', '3 – Bräuning Alm'),
|
('3', '3 – Bräuning Alm'),
|
||||||
('4', '4 – Kratzer valley'),
|
('4', '4 – Kratzer valley'),
|
||||||
('5', '5 – Schwarzmoos-Wildensee'),
|
('5', '5 – Schwarzmoos-Wildensee'),
|
||||||
('6', '6 – Far plateau'),
|
('6', '6 – Far plateau'),
|
||||||
('1626 or 6 (borderline)', '1626 or 6 (borderline)'),
|
('1626 or 6 (borderline)', '1626 or 6 (borderline)'),
|
||||||
('7', '7 – Egglgrube'),
|
('7', '7 – Egglgrube'),
|
||||||
('8a', '8a – Loser south face'),
|
('8a', '8a – Loser south face'),
|
||||||
('8b', '8b – Loser below Dimmelwand'),
|
('8b', '8b – Loser below Dimmelwand'),
|
||||||
('8c', '8c – Augst See'),
|
('8c', '8c – Augst See'),
|
||||||
('8d', '8d – Loser-Hochganger ridge'),
|
('8d', '8d – Loser-Hochganger ridge'),
|
||||||
('9', '9 – Gschwandt Alm'),
|
('9', '9 – Gschwandt Alm'),
|
||||||
('10', '10 – Altaussee'),
|
('10', '10 – Altaussee'),
|
||||||
('11', '11 – Augstbach')
|
('11', '11 – Augstbach')
|
||||||
]
|
]
|
||||||
|
|
||||||
|
|
||||||
def prospecting(request):
|
def prospecting(request):
|
||||||
@@ -318,21 +316,21 @@ def prospecting(request):
|
|||||||
# big map first (zoom factor ignored)
|
# big map first (zoom factor ignored)
|
||||||
|
|
||||||
maps = {
|
maps = {
|
||||||
# id left top right bottom zoom
|
# id left top right bottom zoom
|
||||||
# G&K G&K G&K G&K factor
|
# G&K G&K G&K G&K factor
|
||||||
"all": [33810.4, 85436.5, 38192.0, 81048.2, 0.35,
|
"all": [33810.4, 85436.5, 38192.0, 81048.2, 0.35,
|
||||||
"All"],
|
"All"],
|
||||||
"40": [36275.6, 82392.5, 36780.3, 81800.0, 3.0,
|
"40": [36275.6, 82392.5, 36780.3, 81800.0, 3.0,
|
||||||
"Eishöhle"],
|
"Eishöhle"],
|
||||||
"76": [35440.0, 83220.0, 36090.0, 82670.0, 1.3,
|
"76": [35440.0, 83220.0, 36090.0, 82670.0, 1.3,
|
||||||
"Eislufthöhle"],
|
"Eislufthöhle"],
|
||||||
"204": [36354.1, 84154.5, 37047.4, 83300, 3.0,
|
"204": [36354.1, 84154.5, 37047.4, 83300, 3.0,
|
||||||
"Steinbrückenhöhle"],
|
"Steinbrückenhöhle"],
|
||||||
"tc": [35230.0, 82690.0, 36110.0, 82100.0, 3.0,
|
"tc": [35230.0, 82690.0, 36110.0, 82100.0, 3.0,
|
||||||
"Near Top Camp"],
|
"Near Top Camp"],
|
||||||
"grieß":
|
"grieß":
|
||||||
[36000.0, 86300.0, 38320.0, 84400.0, 4.0,
|
[36000.0, 86300.0, 38320.0, 84400.0, 4.0,
|
||||||
"Grießkogel Area"],
|
"Grießkogel Area"],
|
||||||
}
|
}
|
||||||
|
|
||||||
for n in maps.keys():
|
for n in maps.keys():
|
||||||
@@ -353,49 +351,49 @@ ZOOM = 4
|
|||||||
DESC = 5
|
DESC = 5
|
||||||
|
|
||||||
areacolours = {
|
areacolours = {
|
||||||
'1a' : '#00ffff',
|
'1a' : '#00ffff',
|
||||||
'1b' : '#ff00ff',
|
'1b' : '#ff00ff',
|
||||||
'1c' : '#ffff00',
|
'1c' : '#ffff00',
|
||||||
'1d' : '#ffffff',
|
'1d' : '#ffffff',
|
||||||
'2a' : '#ff0000',
|
'2a' : '#ff0000',
|
||||||
'2b' : '#00ff00',
|
'2b' : '#00ff00',
|
||||||
'2c' : '#008800',
|
'2c' : '#008800',
|
||||||
'2d' : '#ff9900',
|
'2d' : '#ff9900',
|
||||||
'3' : '#880000',
|
'3' : '#880000',
|
||||||
'4' : '#0000ff',
|
'4' : '#0000ff',
|
||||||
'6' : '#000000', # doubles for surface fixed pts, and anything else
|
'6' : '#000000', # doubles for surface fixed pts, and anything else
|
||||||
'7' : '#808080'
|
'7' : '#808080'
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
for FONT in [
|
for FONT in [
|
||||||
"/usr/share/fonts/truetype/freefont/FreeSans.ttf",
|
"/usr/share/fonts/truetype/freefont/FreeSans.ttf",
|
||||||
"/usr/X11R6/lib/X11/fonts/truetype/arial.ttf",
|
"/usr/X11R6/lib/X11/fonts/truetype/arial.ttf",
|
||||||
"C:\WINNT\Fonts\ARIAL.TTF"
|
"C:\WINNT\Fonts\ARIAL.TTF"
|
||||||
]:
|
]:
|
||||||
if os.path.isfile(FONT): break
|
if os.path.isfile(FONT): break
|
||||||
TEXTSIZE = 16
|
TEXTSIZE = 16
|
||||||
CIRCLESIZE =8
|
CIRCLESIZE =8
|
||||||
LINEWIDTH = 2
|
LINEWIDTH = 2
|
||||||
myFont = ImageFont.truetype(FONT, TEXTSIZE)
|
myFont = ImageFont.truetype(FONT, TEXTSIZE)
|
||||||
|
|
||||||
def mungecoord(x, y, mapcode, img):
|
def mungecoord(x, y, mapcode, img):
|
||||||
# Top of Zinken is 73 1201 = dataset 34542 81967
|
# Top of Zinken is 73 1201 = dataset 34542 81967
|
||||||
# Top of Hinter is 1073 562 = dataset 36670 83317
|
# Top of Hinter is 1073 562 = dataset 36670 83317
|
||||||
# image is 1417 by 2201
|
# image is 1417 by 2201
|
||||||
# FACTOR1 = 1000.0 / (36670.0-34542.0)
|
# FACTOR1 = 1000.0 / (36670.0-34542.0)
|
||||||
# FACTOR2 = (1201.0-562.0) / (83317 - 81967)
|
# FACTOR2 = (1201.0-562.0) / (83317 - 81967)
|
||||||
# FACTOR = (FACTOR1 + FACTOR2)/2
|
# FACTOR = (FACTOR1 + FACTOR2)/2
|
||||||
# The factors aren't the same as the scanned map's at a slight angle. I
|
# The factors aren't the same as the scanned map's at a slight angle. I
|
||||||
# can't be bothered to fix this. Since we zero on the Hinter it makes
|
# can't be bothered to fix this. Since we zero on the Hinter it makes
|
||||||
# very little difference for caves in the areas round 76 or 204.
|
# very little difference for caves in the areas round 76 or 204.
|
||||||
# xoffset = (x - 36670)*FACTOR
|
# xoffset = (x - 36670)*FACTOR
|
||||||
# yoffset = (y - 83317)*FACTOR
|
# yoffset = (y - 83317)*FACTOR
|
||||||
# return (1073 + xoffset, 562 - yoffset)
|
# return (1073 + xoffset, 562 - yoffset)
|
||||||
|
|
||||||
m = maps[mapcode]
|
m = maps[mapcode]
|
||||||
factorX, factorY = img.size[0] / (m[R] - m[L]), img.size[1] / (m[T] - m[B])
|
factorX, factorY = img.size[0] / (m[R] - m[L]), img.size[1] / (m[T] - m[B])
|
||||||
return ((x - m[L]) * factorX, (m[T] - y) * factorY)
|
return ((x - m[L]) * factorX, (m[T] - y) * factorY)
|
||||||
|
|
||||||
COL_TYPES = {True: "red",
|
COL_TYPES = {True: "red",
|
||||||
False: "#dddddd",
|
False: "#dddddd",
|
||||||
@@ -422,40 +420,40 @@ def prospecting_image(request, name):
|
|||||||
m = maps[name]
|
m = maps[name]
|
||||||
#imgmaps = []
|
#imgmaps = []
|
||||||
if name == "all":
|
if name == "all":
|
||||||
img = mainImage
|
img = mainImage
|
||||||
else:
|
else:
|
||||||
M = maps['all']
|
M = maps['all']
|
||||||
W, H = mainImage.size
|
W, H = mainImage.size
|
||||||
l = int((m[L] - M[L]) / (M[R] - M[L]) * W)
|
l = int((m[L] - M[L]) / (M[R] - M[L]) * W)
|
||||||
t = int((m[T] - M[T]) / (M[B] - M[T]) * H)
|
t = int((m[T] - M[T]) / (M[B] - M[T]) * H)
|
||||||
r = int((m[R] - M[L]) / (M[R] - M[L]) * W)
|
r = int((m[R] - M[L]) / (M[R] - M[L]) * W)
|
||||||
b = int((m[B] - M[T]) / (M[B] - M[T]) * H)
|
b = int((m[B] - M[T]) / (M[B] - M[T]) * H)
|
||||||
img = mainImage.crop((l, t, r, b))
|
img = mainImage.crop((l, t, r, b))
|
||||||
w = int(round(m[ZOOM] * (m[R] - m[L]) / (M[R] - M[L]) * W))
|
w = int(round(m[ZOOM] * (m[R] - m[L]) / (M[R] - M[L]) * W))
|
||||||
h = int(round(m[ZOOM] * (m[B] - m[T]) / (M[B] - M[T]) * H))
|
h = int(round(m[ZOOM] * (m[B] - m[T]) / (M[B] - M[T]) * H))
|
||||||
img = img.resize((w, h), Image.BICUBIC)
|
img = img.resize((w, h), Image.BICUBIC)
|
||||||
draw = ImageDraw.Draw(img)
|
draw = ImageDraw.Draw(img)
|
||||||
draw.setfont(myFont)
|
draw.setfont(myFont)
|
||||||
if name == "all":
|
if name == "all":
|
||||||
for maparea in maps.keys():
|
for maparea in maps.keys():
|
||||||
if maparea == "all":
|
if maparea == "all":
|
||||||
continue
|
continue
|
||||||
localm = maps[maparea]
|
localm = maps[maparea]
|
||||||
l,t = mungecoord(localm[L], localm[T], "all", img)
|
l,t = mungecoord(localm[L], localm[T], "all", img)
|
||||||
r,b = mungecoord(localm[R], localm[B], "all", img)
|
r,b = mungecoord(localm[R], localm[B], "all", img)
|
||||||
text = maparea + " map"
|
text = maparea + " map"
|
||||||
textlen = draw.textsize(text)[0] + 3
|
textlen = draw.textsize(text)[0] + 3
|
||||||
draw.rectangle([l, t, l+textlen, t+TEXTSIZE+2], fill='#ffffff')
|
draw.rectangle([l, t, l+textlen, t+TEXTSIZE+2], fill='#ffffff')
|
||||||
draw.text((l+2, t+1), text, fill="#000000")
|
draw.text((l+2, t+1), text, fill="#000000")
|
||||||
#imgmaps.append( [l, t, l+textlen, t+SIZE+2, "submap" + maparea, maparea + " subarea map"] )
|
#imgmaps.append( [l, t, l+textlen, t+SIZE+2, "submap" + maparea, maparea + " subarea map"] )
|
||||||
draw.line([l, t, r, t], fill='#777777', width=LINEWIDTH)
|
draw.line([l, t, r, t], fill='#777777', width=LINEWIDTH)
|
||||||
draw.line([l, b, r, b], fill='#777777', width=LINEWIDTH)
|
draw.line([l, b, r, b], fill='#777777', width=LINEWIDTH)
|
||||||
draw.line([l, t, l, b], fill='#777777', width=LINEWIDTH)
|
draw.line([l, t, l, b], fill='#777777', width=LINEWIDTH)
|
||||||
draw.line([r, t, r, b], fill='#777777', width=LINEWIDTH)
|
draw.line([r, t, r, b], fill='#777777', width=LINEWIDTH)
|
||||||
draw.line([l, t, l+textlen, t], fill='#777777', width=LINEWIDTH)
|
draw.line([l, t, l+textlen, t], fill='#777777', width=LINEWIDTH)
|
||||||
draw.line([l, t+TEXTSIZE+2, l+textlen, t+TEXTSIZE+2], fill='#777777', width=LINEWIDTH)
|
draw.line([l, t+TEXTSIZE+2, l+textlen, t+TEXTSIZE+2], fill='#777777', width=LINEWIDTH)
|
||||||
draw.line([l, t, l, t+TEXTSIZE+2], fill='#777777', width=LINEWIDTH)
|
draw.line([l, t, l, t+TEXTSIZE+2], fill='#777777', width=LINEWIDTH)
|
||||||
draw.line([l+textlen, t, l+textlen, t+TEXTSIZE+2], fill='#777777', width=LINEWIDTH)
|
draw.line([l+textlen, t, l+textlen, t+TEXTSIZE+2], fill='#777777', width=LINEWIDTH)
|
||||||
#imgmaps[maparea] = []
|
#imgmaps[maparea] = []
|
||||||
# Draw scale bar
|
# Draw scale bar
|
||||||
m100 = int(100 / (m[R] - m[L]) * img.size[0])
|
m100 = int(100 / (m[R] - m[L]) * img.size[0])
|
||||||
@@ -477,13 +475,14 @@ def prospecting_image(request, name):
|
|||||||
plot("laser.0_5", "LSR5", "Reference", "Laser Point 0/5", name, draw, img)
|
plot("laser.0_5", "LSR5", "Reference", "Laser Point 0/5", name, draw, img)
|
||||||
plot("225-96", "BAlm", "Reference", "Bräuning Alm trig point", name, draw, img)
|
plot("225-96", "BAlm", "Reference", "Bräuning Alm trig point", name, draw, img)
|
||||||
for entrance in Entrance.objects.all():
|
for entrance in Entrance.objects.all():
|
||||||
station = entrance.best_station()
|
station = entrance.best_station()
|
||||||
if station:
|
if station:
|
||||||
#try:
|
#try:
|
||||||
areaName = entrance.caveandentrance_set.all()[0].cave.getArea().short_name
|
areaName = entrance.caveandentrance_set.all()[0].cave.getArea().short_name
|
||||||
plot(station, "%s-%s" % (areaName, str(entrance)[5:]), entrance.needs_surface_work(), str(entrance), name, draw, img)
|
plot(station, "%s-%s" % (areaName, str(entrance)
|
||||||
#except:
|
[5:]), entrance.needs_surface_work(), str(entrance), name, draw, img)
|
||||||
# pass
|
#except:
|
||||||
|
# pass
|
||||||
|
|
||||||
for (N, E, D, num) in [(35975.37, 83018.21, 100,"177"), # Calculated from bearings
|
for (N, E, D, num) in [(35975.37, 83018.21, 100,"177"), # Calculated from bearings
|
||||||
(35350.00, 81630.00, 50, "71"), # From Auer map
|
(35350.00, 81630.00, 50, "71"), # From Auer map
|
||||||
|
|||||||
@@ -70,13 +70,17 @@ def expedition(request, expeditionname):
|
|||||||
pcell = { "persontrips": PersonTrip.objects.filter(personexpedition=personexpedition,
|
pcell = { "persontrips": PersonTrip.objects.filter(personexpedition=personexpedition,
|
||||||
logbook_entry__date=date) }
|
logbook_entry__date=date) }
|
||||||
pcell["survexblocks"] = set(SurvexBlock.objects.filter(survexpersonrole__personexpedition=personexpedition,
|
pcell["survexblocks"] = set(SurvexBlock.objects.filter(survexpersonrole__personexpedition=personexpedition,
|
||||||
date = date))
|
date=date))
|
||||||
prow.append(pcell)
|
prow.append(pcell)
|
||||||
personexpeditiondays.append({"personexpedition":personexpedition, "personrow":prow})
|
personexpeditiondays.append({"personexpedition":personexpedition, "personrow":prow})
|
||||||
|
|
||||||
if "reload" in request.GET:
|
if "reload" in request.GET:
|
||||||
LoadLogbookForExpedition(this_expedition)
|
LoadLogbookForExpedition(this_expedition)
|
||||||
return render(request,'expedition.html', {'expedition': this_expedition, 'expeditions':expeditions, 'personexpeditiondays':personexpeditiondays, 'settings':settings, 'dateditems': dateditems })
|
return render(request,'expedition.html', {'this_expedition': this_expedition,
|
||||||
|
'expeditions':expeditions,
|
||||||
|
'personexpeditiondays':personexpeditiondays,
|
||||||
|
'settings':settings,
|
||||||
|
'dateditems': dateditems })
|
||||||
|
|
||||||
def get_absolute_url(self):
|
def get_absolute_url(self):
|
||||||
return ('expedition', (expedition.year))
|
return ('expedition', (expedition.year))
|
||||||
|
|||||||
@@ -1,7 +1,8 @@
|
|||||||
from django import forms
|
from django import forms
|
||||||
from django.http import HttpResponseRedirect, HttpResponse
|
from django.http import HttpResponseRedirect, HttpResponse
|
||||||
from django.shortcuts import render_to_response, render
|
from django.shortcuts import render
|
||||||
from django.core.context_processors import csrf
|
from django.views.decorators import csrf
|
||||||
|
from django.views.decorators.csrf import csrf_protect
|
||||||
from django.http import HttpResponse, Http404
|
from django.http import HttpResponse, Http404
|
||||||
import re
|
import re
|
||||||
import os
|
import os
|
||||||
@@ -110,7 +111,7 @@ class SvxForm(forms.Form):
|
|||||||
log = re.sub("(?s).*?(Survey contains)", "\\1", log)
|
log = re.sub("(?s).*?(Survey contains)", "\\1", log)
|
||||||
return log
|
return log
|
||||||
|
|
||||||
|
@csrf_protect
|
||||||
def svx(request, survex_file):
|
def svx(request, survex_file):
|
||||||
# get the basic data from the file given in the URL
|
# get the basic data from the file given in the URL
|
||||||
dirname = os.path.split(survex_file)[0]
|
dirname = os.path.split(survex_file)[0]
|
||||||
@@ -173,10 +174,10 @@ def svx(request, survex_file):
|
|||||||
'difflist': difflist,
|
'difflist': difflist,
|
||||||
'logmessage':logmessage,
|
'logmessage':logmessage,
|
||||||
'form':form}
|
'form':form}
|
||||||
vmap.update(csrf(request))
|
# vmap.update(csrf(request))
|
||||||
if outputtype == "ajax":
|
if outputtype == "ajax":
|
||||||
return render_to_response('svxfiledifflistonly.html', vmap)
|
return render(request, 'svxfiledifflistonly.html', vmap)
|
||||||
return render_to_response('svxfile.html', vmap)
|
return render(request, 'svxfile.html', vmap)
|
||||||
|
|
||||||
def svxraw(request, survex_file):
|
def svxraw(request, survex_file):
|
||||||
svx = open(os.path.join(settings.SURVEX_DATA, survex_file+".svx"), "rb")
|
svx = open(os.path.join(settings.SURVEX_DATA, survex_file+".svx"), "rb")
|
||||||
@@ -200,19 +201,19 @@ def threed(request, survex_file):
|
|||||||
log = open(settings.SURVEX_DATA + survex_file + ".log", "rb")
|
log = open(settings.SURVEX_DATA + survex_file + ".log", "rb")
|
||||||
return HttpResponse(log, content_type="text")
|
return HttpResponse(log, content_type="text")
|
||||||
|
|
||||||
|
|
||||||
def log(request, survex_file):
|
def log(request, survex_file):
|
||||||
process(survex_file)
|
process(survex_file)
|
||||||
log = open(settings.SURVEX_DATA + survex_file + ".log", "rb")
|
log = open(settings.SURVEX_DATA + survex_file + ".log", "rb")
|
||||||
return HttpResponse(log, content_type="text")
|
return HttpResponse(log, content_type="text")
|
||||||
|
|
||||||
|
|
||||||
def err(request, survex_file):
|
def err(request, survex_file):
|
||||||
process(survex_file)
|
process(survex_file)
|
||||||
err = open(settings.SURVEX_DATA + survex_file + ".err", "rb")
|
err = open(settings.SURVEX_DATA + survex_file + ".err", "rb")
|
||||||
return HttpResponse(err, content_type="text")
|
return HttpResponse(err, content_type="text")
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
def identifycavedircontents(gcavedir):
|
def identifycavedircontents(gcavedir):
|
||||||
# find the primary survex file in each cave directory
|
# find the primary survex file in each cave directory
|
||||||
name = os.path.split(gcavedir)[1]
|
name = os.path.split(gcavedir)[1]
|
||||||
@@ -254,35 +255,47 @@ def identifycavedircontents(gcavedir):
|
|||||||
return subdirs, subsvx
|
return subdirs, subsvx
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
# direct local non-database browsing through the svx file repositories
|
# direct local non-database browsing through the svx file repositories
|
||||||
# perhaps should use the database and have a reload button for it
|
# perhaps should use the database and have a reload button for it
|
||||||
def survexcaveslist(request):
|
def survexcaveslist(request):
|
||||||
cavesdir = os.path.join(settings.SURVEX_DATA, "caves-1623")
|
kat_areas = settings.KAT_AREAS
|
||||||
#cavesdircontents = { }
|
|
||||||
|
fnumlist = []
|
||||||
|
|
||||||
|
kat_areas = ['1623']
|
||||||
|
|
||||||
|
for area in kat_areas:
|
||||||
|
print(area)
|
||||||
|
cavesdir = os.path.join(settings.SURVEX_DATA, "caves-%s" % area)
|
||||||
|
print(cavesdir)
|
||||||
|
#cavesdircontents = { }
|
||||||
|
fnumlist += [ (-int(re.match(r"\d*", f).group(0) or "0"), f, area) for f in os.listdir(cavesdir) ]
|
||||||
|
print(fnumlist)
|
||||||
|
print(len(fnumlist))
|
||||||
|
|
||||||
|
# first sort the file list
|
||||||
|
fnumlist.sort()
|
||||||
|
|
||||||
onefilecaves = [ ]
|
onefilecaves = [ ]
|
||||||
multifilecaves = [ ]
|
multifilecaves = [ ]
|
||||||
subdircaves = [ ]
|
subdircaves = [ ]
|
||||||
|
|
||||||
# first sort the file list
|
|
||||||
fnumlist = [ (-int(re.match(r"\d*", f).group(0) or "0"), f) for f in os.listdir(cavesdir) ]
|
|
||||||
fnumlist.sort()
|
|
||||||
|
|
||||||
print(fnumlist)
|
print(fnumlist)
|
||||||
|
|
||||||
# go through the list and identify the contents of each cave directory
|
# go through the list and identify the contents of each cave directory
|
||||||
for num, cavedir in fnumlist:
|
for num, cavedir, area in fnumlist:
|
||||||
if cavedir in ["144", "40"]:
|
if cavedir in ["144", "40"]:
|
||||||
continue
|
continue
|
||||||
|
|
||||||
|
cavesdir = os.path.join(settings.SURVEX_DATA, "caves-%s" % area)
|
||||||
|
|
||||||
gcavedir = os.path.join(cavesdir, cavedir)
|
gcavedir = os.path.join(cavesdir, cavedir)
|
||||||
if os.path.isdir(gcavedir) and cavedir[0] != ".":
|
if os.path.isdir(gcavedir) and cavedir[0] != ".":
|
||||||
subdirs, subsvx = identifycavedircontents(gcavedir)
|
subdirs, subsvx = identifycavedircontents(gcavedir)
|
||||||
survdirobj = [ ]
|
survdirobj = [ ]
|
||||||
|
|
||||||
for lsubsvx in subsvx:
|
for lsubsvx in subsvx:
|
||||||
survdirobj.append(("caves-1623/"+cavedir+"/"+lsubsvx, lsubsvx))
|
survdirobj.append(("caves-" + area + "/"+cavedir+"/"+lsubsvx, lsubsvx))
|
||||||
|
|
||||||
# caves with subdirectories
|
# caves with subdirectories
|
||||||
if subdirs:
|
if subdirs:
|
||||||
@@ -292,7 +305,7 @@ def survexcaveslist(request):
|
|||||||
assert not dsubdirs
|
assert not dsubdirs
|
||||||
lsurvdirobj = [ ]
|
lsurvdirobj = [ ]
|
||||||
for lsubsvx in dsubsvx:
|
for lsubsvx in dsubsvx:
|
||||||
lsurvdirobj.append(("caves-1623/"+cavedir+"/"+subdir+"/"+lsubsvx, lsubsvx))
|
lsurvdirobj.append(("caves-" + area + "/"+cavedir+"/"+subdir+"/"+lsubsvx, lsubsvx))
|
||||||
subsurvdirs.append((lsurvdirobj[0], lsurvdirobj[1:]))
|
subsurvdirs.append((lsurvdirobj[0], lsurvdirobj[1:]))
|
||||||
subdircaves.append((cavedir, (survdirobj[0], survdirobj[1:]), subsurvdirs))
|
subdircaves.append((cavedir, (survdirobj[0], survdirobj[1:]), subsurvdirs))
|
||||||
|
|
||||||
@@ -305,23 +318,21 @@ def survexcaveslist(request):
|
|||||||
#print(survdirobj)
|
#print(survdirobj)
|
||||||
onefilecaves.append(survdirobj[0])
|
onefilecaves.append(survdirobj[0])
|
||||||
|
|
||||||
return render_to_response('svxfilecavelist.html', {'settings': settings, "onefilecaves":onefilecaves, "multifilecaves":multifilecaves, "subdircaves":subdircaves })
|
return render(request, 'svxfilecavelist.html', {"onefilecaves":onefilecaves, "multifilecaves":multifilecaves, "subdircaves":subdircaves })
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
# parsing all the survex files of a single cave and showing that it's consistent and can find all the files and people
|
# parsing all the survex files of a single cave and showing that it's consistent and can find all the files and people
|
||||||
# doesn't use recursion. just writes it twice
|
# doesn't use recursion. just writes it twice
|
||||||
def survexcavesingle(request, survex_cave):
|
def survexcavesingle(request, survex_cave):
|
||||||
breload = False
|
breload = False
|
||||||
cave = Cave.objects.get(kataster_number=survex_cave)
|
cave = Cave.objects.filter(kataster_number=survex_cave)
|
||||||
|
if len(cave) < 1:
|
||||||
|
cave = Cave.objects.filter(unofficial_number=survex_cave)
|
||||||
|
|
||||||
if breload:
|
if breload:
|
||||||
parsers.survex.ReloadSurvexCave(survex_cave)
|
parsers.survex.ReloadSurvexCave(survex_cave)
|
||||||
return render_to_response('svxcavesingle.html', {'settings': settings, "cave":cave })
|
if len(cave) > 0:
|
||||||
|
return render(request, 'svxcavesingle.html', {"cave":cave[0] })
|
||||||
|
else:
|
||||||
|
return render(request, 'svxcavesingle.html', {"cave":cave })
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@@ -3,6 +3,11 @@ import time
|
|||||||
import settings
|
import settings
|
||||||
os.environ['PYTHONPATH'] = settings.PYTHON_PATH
|
os.environ['PYTHONPATH'] = settings.PYTHON_PATH
|
||||||
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'settings')
|
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'settings')
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
import django
|
||||||
|
django.setup()
|
||||||
|
|
||||||
from django.core import management
|
from django.core import management
|
||||||
from django.db import connection
|
from django.db import connection
|
||||||
from django.contrib.auth.models import User
|
from django.contrib.auth.models import User
|
||||||
@@ -28,7 +33,7 @@ def reload_db():
|
|||||||
cursor.execute("CREATE DATABASE %s" % databasename)
|
cursor.execute("CREATE DATABASE %s" % databasename)
|
||||||
cursor.execute("ALTER DATABASE %s CHARACTER SET=utf8" % databasename)
|
cursor.execute("ALTER DATABASE %s CHARACTER SET=utf8" % databasename)
|
||||||
cursor.execute("USE %s" % databasename)
|
cursor.execute("USE %s" % databasename)
|
||||||
management.call_command('syncdb', interactive=False)
|
management.call_command('migrate', interactive=False)
|
||||||
user = User.objects.create_user(expouser, expouseremail, expouserpass)
|
user = User.objects.create_user(expouser, expouseremail, expouserpass)
|
||||||
user.is_staff = True
|
user.is_staff = True
|
||||||
user.is_superuser = True
|
user.is_superuser = True
|
||||||
@@ -81,15 +86,6 @@ def import_tunnelfiles():
|
|||||||
parsers.surveys.LoadTunnelFiles()
|
parsers.surveys.LoadTunnelFiles()
|
||||||
|
|
||||||
|
|
||||||
def rebuild():
|
|
||||||
""" Wipe the troggle database and sets up structure but imports nothing
|
|
||||||
"""
|
|
||||||
reload_db()
|
|
||||||
make_dirs()
|
|
||||||
pageredirects()
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
def reset():
|
def reset():
|
||||||
""" Wipe the troggle database and import everything from legacy data
|
""" Wipe the troggle database and import everything from legacy data
|
||||||
"""
|
"""
|
||||||
@@ -99,8 +95,10 @@ def reset():
|
|||||||
import_caves()
|
import_caves()
|
||||||
import_people()
|
import_people()
|
||||||
import_surveyscans()
|
import_surveyscans()
|
||||||
|
|
||||||
import_logbooks()
|
import_logbooks()
|
||||||
import_QMs()
|
import_QMs()
|
||||||
|
|
||||||
import_survex()
|
import_survex()
|
||||||
try:
|
try:
|
||||||
import_tunnelfiles()
|
import_tunnelfiles()
|
||||||
@@ -168,17 +166,22 @@ def pageredirects():
|
|||||||
f = troggle.flatpages.models.Redirect(originalURL = oldURL, newURL = newURL)
|
f = troggle.flatpages.models.Redirect(originalURL = oldURL, newURL = newURL)
|
||||||
f.save()
|
f.save()
|
||||||
|
|
||||||
|
def writeCaves():
|
||||||
|
for cave in Cave.objects.all():
|
||||||
|
cave.writeDataFile()
|
||||||
|
for entrance in Entrance.objects.all():
|
||||||
|
entrance.writeDataFile()
|
||||||
|
|
||||||
def usage():
|
def usage():
|
||||||
print("""Usage is 'python databaseReset.py <command>'
|
print("""Usage is 'python databaseReset.py <command>'
|
||||||
where command is:
|
where command is:
|
||||||
rebuild - this reloads database and set up directories & redirects only
|
reset - this is normal usage, clear database and reread everything
|
||||||
reset - this is normal usage, clear database and reread everything from files - time-consuming
|
desc
|
||||||
desc - NOT WORKING: function resetdesc() missing
|
caves - read in the caves
|
||||||
caves - read in the caves
|
logbooks - read in the logbooks
|
||||||
logbooks - read in the logbooks, but read in people first
|
autologbooks
|
||||||
autologbooks - read in autologbooks
|
dumplogbooks
|
||||||
dumplogbooks - write out autologbooks (not working?)
|
people
|
||||||
people - read in the people from folk.csv
|
|
||||||
QMs - read in the QM files
|
QMs - read in the QM files
|
||||||
resetend
|
resetend
|
||||||
scans - read in the scanned surveynotes
|
scans - read in the scanned surveynotes
|
||||||
@@ -186,6 +189,7 @@ def usage():
|
|||||||
survexpos
|
survexpos
|
||||||
surveys
|
surveys
|
||||||
tunnel - read in the Tunnel files
|
tunnel - read in the Tunnel files
|
||||||
|
writeCaves
|
||||||
""")
|
""")
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
@@ -239,8 +243,6 @@ if __name__ == "__main__":
|
|||||||
usage()
|
usage()
|
||||||
elif "reload_db" in sys.argv:
|
elif "reload_db" in sys.argv:
|
||||||
reload_db()
|
reload_db()
|
||||||
elif "rebuild" in sys.argv:
|
|
||||||
rebuild()
|
|
||||||
else:
|
else:
|
||||||
print("%s not recognised" % sys.argv)
|
print("%s not recognised" % sys.argv)
|
||||||
usage()
|
usage()
|
||||||
|
|||||||
@@ -2,7 +2,9 @@ FROM python:2.7-stretch
|
|||||||
|
|
||||||
#COPY backports.list /etc/apt/sources.list.d/
|
#COPY backports.list /etc/apt/sources.list.d/
|
||||||
|
|
||||||
RUN apt-get -y update && apt-get install -y mercurial fonts-freefont-ttf locales survex
|
RUN apt-get -y update && apt-get install -y mercurial \
|
||||||
|
fonts-freefont-ttf locales survex python-levenshtein \
|
||||||
|
python-pygraphviz
|
||||||
|
|
||||||
#RUN apt-get -y -t -backports install survex
|
#RUN apt-get -y -t -backports install survex
|
||||||
|
|
||||||
|
|||||||
@@ -1 +1 @@
|
|||||||
requirements.txt.dj-1.7.11
|
requirements.txt.dj-1.10
|
||||||
13
docker/requirements.txt.dj-1.10
Normal file
13
docker/requirements.txt.dj-1.10
Normal file
@@ -0,0 +1,13 @@
|
|||||||
|
Django==1.10.8
|
||||||
|
django-registration==2.1.2
|
||||||
|
mysql
|
||||||
|
django-imagekit
|
||||||
|
Image
|
||||||
|
django-tinymce
|
||||||
|
smartencoding
|
||||||
|
fuzzywuzzy
|
||||||
|
GitPython
|
||||||
|
unidecode
|
||||||
|
django-extensions
|
||||||
|
pygraphviz
|
||||||
|
python-Levenshtein
|
||||||
@@ -6,4 +6,7 @@ django-imagekit
|
|||||||
Image
|
Image
|
||||||
django-tinymce==2.7.0
|
django-tinymce==2.7.0
|
||||||
smartencoding
|
smartencoding
|
||||||
|
fuzzywuzzy
|
||||||
|
GitPython
|
||||||
unidecode
|
unidecode
|
||||||
|
django-extensions
|
||||||
|
|||||||
@@ -33,4 +33,3 @@ def writeQmTable(outfile,cave):
|
|||||||
cavewriter.writerow(headers)
|
cavewriter.writerow(headers)
|
||||||
for qm in cave.get_QMs():
|
for qm in cave.get_QMs():
|
||||||
cavewriter.writerow(qmRow(qm))
|
cavewriter.writerow(qmRow(qm))
|
||||||
|
|
||||||
34
flatpages/migrations/0001_initial.py
Normal file
34
flatpages/migrations/0001_initial.py
Normal file
@@ -0,0 +1,34 @@
|
|||||||
|
# -*- coding: utf-8 -*-
|
||||||
|
# Generated by Django 1.10.8 on 2020-02-18 16:01
|
||||||
|
from __future__ import unicode_literals
|
||||||
|
|
||||||
|
from django.db import migrations, models
|
||||||
|
import django.db.models.deletion
|
||||||
|
|
||||||
|
|
||||||
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
|
initial = True
|
||||||
|
|
||||||
|
dependencies = [
|
||||||
|
('core', '0001_initial'),
|
||||||
|
]
|
||||||
|
|
||||||
|
operations = [
|
||||||
|
migrations.CreateModel(
|
||||||
|
name='EntranceRedirect',
|
||||||
|
fields=[
|
||||||
|
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
|
||||||
|
('originalURL', models.CharField(max_length=200)),
|
||||||
|
('entrance', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='core.Entrance')),
|
||||||
|
],
|
||||||
|
),
|
||||||
|
migrations.CreateModel(
|
||||||
|
name='Redirect',
|
||||||
|
fields=[
|
||||||
|
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
|
||||||
|
('originalURL', models.CharField(max_length=200, unique=True)),
|
||||||
|
('newURL', models.CharField(max_length=200)),
|
||||||
|
],
|
||||||
|
),
|
||||||
|
]
|
||||||
0
flatpages/migrations/__init__.py
Normal file
0
flatpages/migrations/__init__.py
Normal file
@@ -33,7 +33,6 @@ def flatpage(request, path):
|
|||||||
except EntranceRedirect.DoesNotExist:
|
except EntranceRedirect.DoesNotExist:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
|
||||||
if path.startswith("noinfo") and settings.PUBLIC_SITE and not request.user.is_authenticated():
|
if path.startswith("noinfo") and settings.PUBLIC_SITE and not request.user.is_authenticated():
|
||||||
print("flat path noinfo", path)
|
print("flat path noinfo", path)
|
||||||
return HttpResponseRedirect(reverse("auth_login") + '?next=%s' % request.path)
|
return HttpResponseRedirect(reverse("auth_login") + '?next=%s' % request.path)
|
||||||
|
|||||||
@@ -1,13 +0,0 @@
|
|||||||
"""
|
|
||||||
|
|
||||||
Django ImageKit
|
|
||||||
|
|
||||||
Author: Justin Driscoll <justin.driscoll@gmail.com>
|
|
||||||
Version: 0.2
|
|
||||||
|
|
||||||
"""
|
|
||||||
VERSION = "0.2"
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
@@ -1,21 +0,0 @@
|
|||||||
""" Default ImageKit configuration """
|
|
||||||
|
|
||||||
from imagekit.specs import ImageSpec
|
|
||||||
from imagekit import processors
|
|
||||||
|
|
||||||
class ResizeThumbnail(processors.Resize):
|
|
||||||
width = 100
|
|
||||||
height = 50
|
|
||||||
crop = True
|
|
||||||
|
|
||||||
class EnhanceSmall(processors.Adjustment):
|
|
||||||
contrast = 1.2
|
|
||||||
sharpness = 1.1
|
|
||||||
|
|
||||||
class SampleReflection(processors.Reflection):
|
|
||||||
size = 0.5
|
|
||||||
background_color = "#000000"
|
|
||||||
|
|
||||||
class DjangoAdminThumbnail(ImageSpec):
|
|
||||||
access_as = 'admin_thumbnail'
|
|
||||||
processors = [ResizeThumbnail, EnhanceSmall, SampleReflection]
|
|
||||||
@@ -1,17 +0,0 @@
|
|||||||
# Required PIL classes may or may not be available from the root namespace
|
|
||||||
# depending on the installation method used.
|
|
||||||
try:
|
|
||||||
import Image
|
|
||||||
import ImageFile
|
|
||||||
import ImageFilter
|
|
||||||
import ImageEnhance
|
|
||||||
import ImageColor
|
|
||||||
except ImportError:
|
|
||||||
try:
|
|
||||||
from PIL import Image
|
|
||||||
from PIL import ImageFile
|
|
||||||
from PIL import ImageFilter
|
|
||||||
from PIL import ImageEnhance
|
|
||||||
from PIL import ImageColor
|
|
||||||
except ImportError:
|
|
||||||
raise ImportError('ImageKit was unable to import the Python Imaging Library. Please confirm it`s installed and available on your current Python path.')
|
|
||||||
@@ -1 +0,0 @@
|
|||||||
|
|
||||||
@@ -1 +0,0 @@
|
|||||||
|
|
||||||
@@ -1,38 +0,0 @@
|
|||||||
from django.db.models.loading import cache
|
|
||||||
from django.core.management.base import BaseCommand, CommandError
|
|
||||||
from optparse import make_option
|
|
||||||
from imagekit.models import ImageModel
|
|
||||||
from imagekit.specs import ImageSpec
|
|
||||||
|
|
||||||
|
|
||||||
class Command(BaseCommand):
|
|
||||||
help = ('Clears all ImageKit cached files.')
|
|
||||||
args = '[apps]'
|
|
||||||
requires_model_validation = True
|
|
||||||
can_import_settings = True
|
|
||||||
|
|
||||||
def handle(self, *args, **options):
|
|
||||||
return flush_cache(args, options)
|
|
||||||
|
|
||||||
def flush_cache(apps, options):
|
|
||||||
""" Clears the image cache
|
|
||||||
|
|
||||||
"""
|
|
||||||
apps = [a.strip(',') for a in apps]
|
|
||||||
if apps:
|
|
||||||
print 'Flushing cache for %s...' % ', '.join(apps)
|
|
||||||
else:
|
|
||||||
print 'Flushing caches...'
|
|
||||||
|
|
||||||
for app_label in apps:
|
|
||||||
app = cache.get_app(app_label)
|
|
||||||
models = [m for m in cache.get_models(app) if issubclass(m, ImageModel)]
|
|
||||||
|
|
||||||
for model in models:
|
|
||||||
for obj in model.objects.all():
|
|
||||||
for spec in model._ik.specs:
|
|
||||||
prop = getattr(obj, spec.name(), None)
|
|
||||||
if prop is not None:
|
|
||||||
prop._delete()
|
|
||||||
if spec.pre_cache:
|
|
||||||
prop._create()
|
|
||||||
@@ -1,136 +0,0 @@
|
|||||||
import os
|
|
||||||
from datetime import datetime
|
|
||||||
from django.conf import settings
|
|
||||||
from django.core.files.base import ContentFile
|
|
||||||
from django.db import models
|
|
||||||
from django.db.models.base import ModelBase
|
|
||||||
from django.utils.translation import ugettext_lazy as _
|
|
||||||
|
|
||||||
from imagekit import specs
|
|
||||||
from imagekit.lib import *
|
|
||||||
from imagekit.options import Options
|
|
||||||
from imagekit.utils import img_to_fobj
|
|
||||||
|
|
||||||
# Modify image file buffer size.
|
|
||||||
ImageFile.MAXBLOCK = getattr(settings, 'PIL_IMAGEFILE_MAXBLOCK', 256 * 2 ** 10)
|
|
||||||
|
|
||||||
# Choice tuples for specifying the crop origin.
|
|
||||||
# These are provided for convenience.
|
|
||||||
CROP_HORZ_CHOICES = (
|
|
||||||
(0, _('left')),
|
|
||||||
(1, _('center')),
|
|
||||||
(2, _('right')),
|
|
||||||
)
|
|
||||||
|
|
||||||
CROP_VERT_CHOICES = (
|
|
||||||
(0, _('top')),
|
|
||||||
(1, _('center')),
|
|
||||||
(2, _('bottom')),
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
class ImageModelBase(ModelBase):
|
|
||||||
""" ImageModel metaclass
|
|
||||||
|
|
||||||
This metaclass parses IKOptions and loads the specified specification
|
|
||||||
module.
|
|
||||||
|
|
||||||
"""
|
|
||||||
def __init__(cls, name, bases, attrs):
|
|
||||||
parents = [b for b in bases if isinstance(b, ImageModelBase)]
|
|
||||||
if not parents:
|
|
||||||
return
|
|
||||||
user_opts = getattr(cls, 'IKOptions', None)
|
|
||||||
opts = Options(user_opts)
|
|
||||||
try:
|
|
||||||
module = __import__(opts.spec_module, {}, {}, [''])
|
|
||||||
except ImportError:
|
|
||||||
raise ImportError('Unable to load imagekit config module: %s' % \
|
|
||||||
opts.spec_module)
|
|
||||||
for spec in [spec for spec in module.__dict__.values() \
|
|
||||||
if isinstance(spec, type) \
|
|
||||||
and issubclass(spec, specs.ImageSpec) \
|
|
||||||
and spec != specs.ImageSpec]:
|
|
||||||
setattr(cls, spec.name(), specs.Descriptor(spec))
|
|
||||||
opts.specs.append(spec)
|
|
||||||
setattr(cls, '_ik', opts)
|
|
||||||
|
|
||||||
|
|
||||||
class ImageModel(models.Model):
|
|
||||||
""" Abstract base class implementing all core ImageKit functionality
|
|
||||||
|
|
||||||
Subclasses of ImageModel are augmented with accessors for each defined
|
|
||||||
image specification and can override the inner IKOptions class to customize
|
|
||||||
storage locations and other options.
|
|
||||||
|
|
||||||
"""
|
|
||||||
__metaclass__ = ImageModelBase
|
|
||||||
|
|
||||||
class Meta:
|
|
||||||
abstract = True
|
|
||||||
|
|
||||||
class IKOptions:
|
|
||||||
pass
|
|
||||||
|
|
||||||
def admin_thumbnail_view(self):
|
|
||||||
if not self._imgfield:
|
|
||||||
return None
|
|
||||||
prop = getattr(self, self._ik.admin_thumbnail_spec, None)
|
|
||||||
if prop is None:
|
|
||||||
return 'An "%s" image spec has not been defined.' % \
|
|
||||||
self._ik.admin_thumbnail_spec
|
|
||||||
else:
|
|
||||||
if hasattr(self, 'get_absolute_url'):
|
|
||||||
return u'<a href="%s"><img src="%s"></a>' % \
|
|
||||||
(self.get_absolute_url(), prop.url)
|
|
||||||
else:
|
|
||||||
return u'<a href="%s"><img src="%s"></a>' % \
|
|
||||||
(self._imgfield.url, prop.url)
|
|
||||||
admin_thumbnail_view.short_description = _('Thumbnail')
|
|
||||||
admin_thumbnail_view.allow_tags = True
|
|
||||||
|
|
||||||
@property
|
|
||||||
def _imgfield(self):
|
|
||||||
return getattr(self, self._ik.image_field)
|
|
||||||
|
|
||||||
def _clear_cache(self):
|
|
||||||
for spec in self._ik.specs:
|
|
||||||
prop = getattr(self, spec.name())
|
|
||||||
prop._delete()
|
|
||||||
|
|
||||||
def _pre_cache(self):
|
|
||||||
for spec in self._ik.specs:
|
|
||||||
if spec.pre_cache:
|
|
||||||
prop = getattr(self, spec.name())
|
|
||||||
prop._create()
|
|
||||||
|
|
||||||
def save(self, clear_cache=True, *args, **kwargs):
|
|
||||||
is_new_object = self._get_pk_val is None
|
|
||||||
super(ImageModel, self).save(*args, **kwargs)
|
|
||||||
if is_new_object:
|
|
||||||
clear_cache = False
|
|
||||||
spec = self._ik.preprocessor_spec
|
|
||||||
if spec is not None:
|
|
||||||
newfile = self._imgfield.storage.open(str(self._imgfield))
|
|
||||||
img = Image.open(newfile)
|
|
||||||
img = spec.process(img, None)
|
|
||||||
format = img.format or 'JPEG'
|
|
||||||
if format != 'JPEG':
|
|
||||||
imgfile = img_to_fobj(img, format)
|
|
||||||
else:
|
|
||||||
imgfile = img_to_fobj(img, format,
|
|
||||||
quality=int(spec.quality),
|
|
||||||
optimize=True)
|
|
||||||
content = ContentFile(imgfile.read())
|
|
||||||
newfile.close()
|
|
||||||
name = str(self._imgfield)
|
|
||||||
self._imgfield.storage.delete(name)
|
|
||||||
self._imgfield.storage.save(name, content)
|
|
||||||
if clear_cache and self._imgfield != '':
|
|
||||||
self._clear_cache()
|
|
||||||
self._pre_cache()
|
|
||||||
|
|
||||||
def delete(self):
|
|
||||||
assert self._get_pk_val() is not None, "%s object can't be deleted because its %s attribute is set to None." % (self._meta.object_name, self._meta.pk.attname)
|
|
||||||
self._clear_cache()
|
|
||||||
models.Model.delete(self)
|
|
||||||
@@ -1,23 +0,0 @@
|
|||||||
# Imagekit options
|
|
||||||
from imagekit import processors
|
|
||||||
from imagekit.specs import ImageSpec
|
|
||||||
|
|
||||||
|
|
||||||
class Options(object):
|
|
||||||
""" Class handling per-model imagekit options
|
|
||||||
|
|
||||||
"""
|
|
||||||
image_field = 'image'
|
|
||||||
crop_horz_field = 'crop_horz'
|
|
||||||
crop_vert_field = 'crop_vert'
|
|
||||||
preprocessor_spec = None
|
|
||||||
cache_dir = 'cache'
|
|
||||||
save_count_as = None
|
|
||||||
cache_filename_format = "%(filename)s_%(specname)s.%(extension)s"
|
|
||||||
admin_thumbnail_spec = 'admin_thumbnail'
|
|
||||||
spec_module = 'imagekit.defaults'
|
|
||||||
|
|
||||||
def __init__(self, opts):
|
|
||||||
for key, value in opts.__dict__.iteritems():
|
|
||||||
setattr(self, key, value)
|
|
||||||
self.specs = []
|
|
||||||
@@ -1,134 +0,0 @@
|
|||||||
""" Imagekit Image "ImageProcessors"
|
|
||||||
|
|
||||||
A processor defines a set of class variables (optional) and a
|
|
||||||
class method named "process" which processes the supplied image using
|
|
||||||
the class properties as settings. The process method can be overridden as well allowing user to define their
|
|
||||||
own effects/processes entirely.
|
|
||||||
|
|
||||||
"""
|
|
||||||
from imagekit.lib import *
|
|
||||||
|
|
||||||
class ImageProcessor(object):
|
|
||||||
""" Base image processor class """
|
|
||||||
@classmethod
|
|
||||||
def process(cls, image, obj=None):
|
|
||||||
return image
|
|
||||||
|
|
||||||
|
|
||||||
class Adjustment(ImageProcessor):
|
|
||||||
color = 1.0
|
|
||||||
brightness = 1.0
|
|
||||||
contrast = 1.0
|
|
||||||
sharpness = 1.0
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def process(cls, image, obj=None):
|
|
||||||
for name in ['Color', 'Brightness', 'Contrast', 'Sharpness']:
|
|
||||||
factor = getattr(cls, name.lower())
|
|
||||||
if factor != 1.0:
|
|
||||||
image = getattr(ImageEnhance, name)(image).enhance(factor)
|
|
||||||
return image
|
|
||||||
|
|
||||||
|
|
||||||
class Reflection(ImageProcessor):
|
|
||||||
background_color = '#FFFFFF'
|
|
||||||
size = 0.0
|
|
||||||
opacity = 0.6
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def process(cls, image, obj=None):
|
|
||||||
# convert bgcolor string to rgb value
|
|
||||||
background_color = ImageColor.getrgb(cls.background_color)
|
|
||||||
# copy orignial image and flip the orientation
|
|
||||||
reflection = image.copy().transpose(Image.FLIP_TOP_BOTTOM)
|
|
||||||
# create a new image filled with the bgcolor the same size
|
|
||||||
background = Image.new("RGB", image.size, background_color)
|
|
||||||
# calculate our alpha mask
|
|
||||||
start = int(255 - (255 * cls.opacity)) # The start of our gradient
|
|
||||||
steps = int(255 * cls.size) # the number of intermedite values
|
|
||||||
increment = (255 - start) / float(steps)
|
|
||||||
mask = Image.new('L', (1, 255))
|
|
||||||
for y in range(255):
|
|
||||||
if y < steps:
|
|
||||||
val = int(y * increment + start)
|
|
||||||
else:
|
|
||||||
val = 255
|
|
||||||
mask.putpixel((0, y), val)
|
|
||||||
alpha_mask = mask.resize(image.size)
|
|
||||||
# merge the reflection onto our background color using the alpha mask
|
|
||||||
reflection = Image.composite(background, reflection, alpha_mask)
|
|
||||||
# crop the reflection
|
|
||||||
reflection_height = int(image.size[1] * cls.size)
|
|
||||||
reflection = reflection.crop((0, 0, image.size[0], reflection_height))
|
|
||||||
# create new image sized to hold both the original image and the reflection
|
|
||||||
composite = Image.new("RGB", (image.size[0], image.size[1]+reflection_height), background_color)
|
|
||||||
# paste the orignal image and the reflection into the composite image
|
|
||||||
composite.paste(image, (0, 0))
|
|
||||||
composite.paste(reflection, (0, image.size[1]))
|
|
||||||
# return the image complete with reflection effect
|
|
||||||
return composite
|
|
||||||
|
|
||||||
|
|
||||||
class Resize(ImageProcessor):
|
|
||||||
width = None
|
|
||||||
height = None
|
|
||||||
crop = False
|
|
||||||
upscale = False
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def process(cls, image, obj=None):
|
|
||||||
cur_width, cur_height = image.size
|
|
||||||
if cls.crop:
|
|
||||||
crop_horz = getattr(obj, obj._ik.crop_horz_field, 1)
|
|
||||||
crop_vert = getattr(obj, obj._ik.crop_vert_field, 1)
|
|
||||||
ratio = max(float(cls.width)/cur_width, float(cls.height)/cur_height)
|
|
||||||
resize_x, resize_y = ((cur_width * ratio), (cur_height * ratio))
|
|
||||||
crop_x, crop_y = (abs(cls.width - resize_x), abs(cls.height - resize_y))
|
|
||||||
x_diff, y_diff = (int(crop_x / 2), int(crop_y / 2))
|
|
||||||
box_left, box_right = {
|
|
||||||
0: (0, cls.width),
|
|
||||||
1: (int(x_diff), int(x_diff + cls.width)),
|
|
||||||
2: (int(crop_x), int(resize_x)),
|
|
||||||
}[crop_horz]
|
|
||||||
box_upper, box_lower = {
|
|
||||||
0: (0, cls.height),
|
|
||||||
1: (int(y_diff), int(y_diff + cls.height)),
|
|
||||||
2: (int(crop_y), int(resize_y)),
|
|
||||||
}[crop_vert]
|
|
||||||
box = (box_left, box_upper, box_right, box_lower)
|
|
||||||
image = image.resize((int(resize_x), int(resize_y)), Image.ANTIALIAS).crop(box)
|
|
||||||
else:
|
|
||||||
if not cls.width is None and not cls.height is None:
|
|
||||||
ratio = min(float(cls.width)/cur_width,
|
|
||||||
float(cls.height)/cur_height)
|
|
||||||
else:
|
|
||||||
if cls.width is None:
|
|
||||||
ratio = float(cls.height)/cur_height
|
|
||||||
else:
|
|
||||||
ratio = float(cls.width)/cur_width
|
|
||||||
new_dimensions = (int(round(cur_width*ratio)),
|
|
||||||
int(round(cur_height*ratio)))
|
|
||||||
if new_dimensions[0] > cur_width or \
|
|
||||||
new_dimensions[1] > cur_height:
|
|
||||||
if not cls.upscale:
|
|
||||||
return image
|
|
||||||
image = image.resize(new_dimensions, Image.ANTIALIAS)
|
|
||||||
return image
|
|
||||||
|
|
||||||
|
|
||||||
class Transpose(ImageProcessor):
|
|
||||||
""" Rotates or flips the image
|
|
||||||
|
|
||||||
Method should be one of the following strings:
|
|
||||||
- FLIP_LEFT RIGHT
|
|
||||||
- FLIP_TOP_BOTTOM
|
|
||||||
- ROTATE_90
|
|
||||||
- ROTATE_270
|
|
||||||
- ROTATE_180
|
|
||||||
|
|
||||||
"""
|
|
||||||
method = 'FLIP_LEFT_RIGHT'
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def process(cls, image, obj=None):
|
|
||||||
return image.transpose(getattr(Image, cls.method))
|
|
||||||
@@ -1,119 +0,0 @@
|
|||||||
""" ImageKit image specifications
|
|
||||||
|
|
||||||
All imagekit specifications must inherit from the ImageSpec class. Models
|
|
||||||
inheriting from ImageModel will be modified with a descriptor/accessor for each
|
|
||||||
spec found.
|
|
||||||
|
|
||||||
"""
|
|
||||||
import os
|
|
||||||
from StringIO import StringIO
|
|
||||||
from imagekit.lib import *
|
|
||||||
from imagekit.utils import img_to_fobj
|
|
||||||
from django.core.files.base import ContentFile
|
|
||||||
|
|
||||||
class ImageSpec(object):
|
|
||||||
pre_cache = False
|
|
||||||
quality = 70
|
|
||||||
increment_count = False
|
|
||||||
processors = []
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def name(cls):
|
|
||||||
return getattr(cls, 'access_as', cls.__name__.lower())
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def process(cls, image, obj):
|
|
||||||
processed_image = image.copy()
|
|
||||||
for proc in cls.processors:
|
|
||||||
processed_image = proc.process(processed_image, obj)
|
|
||||||
return processed_image
|
|
||||||
|
|
||||||
|
|
||||||
class Accessor(object):
|
|
||||||
def __init__(self, obj, spec):
|
|
||||||
self._img = None
|
|
||||||
self._obj = obj
|
|
||||||
self.spec = spec
|
|
||||||
|
|
||||||
def _get_imgfile(self):
|
|
||||||
format = self._img.format or 'JPEG'
|
|
||||||
if format != 'JPEG':
|
|
||||||
imgfile = img_to_fobj(self._img, format)
|
|
||||||
else:
|
|
||||||
imgfile = img_to_fobj(self._img, format,
|
|
||||||
quality=int(self.spec.quality),
|
|
||||||
optimize=True)
|
|
||||||
return imgfile
|
|
||||||
|
|
||||||
def _create(self):
|
|
||||||
if self._exists():
|
|
||||||
return
|
|
||||||
# process the original image file
|
|
||||||
fp = self._obj._imgfield.storage.open(self._obj._imgfield.name)
|
|
||||||
fp.seek(0)
|
|
||||||
fp = StringIO(fp.read())
|
|
||||||
try:
|
|
||||||
self._img = self.spec.process(Image.open(fp), self._obj)
|
|
||||||
# save the new image to the cache
|
|
||||||
content = ContentFile(self._get_imgfile().read())
|
|
||||||
self._obj._imgfield.storage.save(self.name, content)
|
|
||||||
except IOError:
|
|
||||||
pass
|
|
||||||
|
|
||||||
def _delete(self):
|
|
||||||
self._obj._imgfield.storage.delete(self.name)
|
|
||||||
|
|
||||||
def _exists(self):
|
|
||||||
return self._obj._imgfield.storage.exists(self.name)
|
|
||||||
|
|
||||||
def _basename(self):
|
|
||||||
filename, extension = \
|
|
||||||
os.path.splitext(os.path.basename(self._obj._imgfield.name))
|
|
||||||
return self._obj._ik.cache_filename_format % \
|
|
||||||
{'filename': filename,
|
|
||||||
'specname': self.spec.name(),
|
|
||||||
'extension': extension.lstrip('.')}
|
|
||||||
|
|
||||||
@property
|
|
||||||
def name(self):
|
|
||||||
return os.path.join(self._obj._ik.cache_dir, self._basename())
|
|
||||||
|
|
||||||
@property
|
|
||||||
def url(self):
|
|
||||||
self._create()
|
|
||||||
if self.spec.increment_count:
|
|
||||||
fieldname = self._obj._ik.save_count_as
|
|
||||||
if fieldname is not None:
|
|
||||||
current_count = getattr(self._obj, fieldname)
|
|
||||||
setattr(self._obj, fieldname, current_count + 1)
|
|
||||||
self._obj.save(clear_cache=False)
|
|
||||||
return self._obj._imgfield.storage.url(self.name)
|
|
||||||
|
|
||||||
@property
|
|
||||||
def file(self):
|
|
||||||
self._create()
|
|
||||||
return self._obj._imgfield.storage.open(self.name)
|
|
||||||
|
|
||||||
@property
|
|
||||||
def image(self):
|
|
||||||
if self._img is None:
|
|
||||||
self._create()
|
|
||||||
if self._img is None:
|
|
||||||
self._img = Image.open(self.file)
|
|
||||||
return self._img
|
|
||||||
|
|
||||||
@property
|
|
||||||
def width(self):
|
|
||||||
return self.image.size[0]
|
|
||||||
|
|
||||||
@property
|
|
||||||
def height(self):
|
|
||||||
return self.image.size[1]
|
|
||||||
|
|
||||||
|
|
||||||
class Descriptor(object):
|
|
||||||
def __init__(self, spec):
|
|
||||||
self._spec = spec
|
|
||||||
|
|
||||||
def __get__(self, obj, type=None):
|
|
||||||
return Accessor(obj, self._spec)
|
|
||||||
@@ -1,86 +0,0 @@
|
|||||||
import os
|
|
||||||
import tempfile
|
|
||||||
import unittest
|
|
||||||
from django.conf import settings
|
|
||||||
from django.core.files.base import ContentFile
|
|
||||||
from django.db import models
|
|
||||||
from django.test import TestCase
|
|
||||||
|
|
||||||
from imagekit import processors
|
|
||||||
from imagekit.models import ImageModel
|
|
||||||
from imagekit.specs import ImageSpec
|
|
||||||
from imagekit.lib import Image
|
|
||||||
|
|
||||||
|
|
||||||
class ResizeToWidth(processors.Resize):
|
|
||||||
width = 100
|
|
||||||
|
|
||||||
class ResizeToHeight(processors.Resize):
|
|
||||||
height = 100
|
|
||||||
|
|
||||||
class ResizeToFit(processors.Resize):
|
|
||||||
width = 100
|
|
||||||
height = 100
|
|
||||||
|
|
||||||
class ResizeCropped(ResizeToFit):
|
|
||||||
crop = ('center', 'center')
|
|
||||||
|
|
||||||
class TestResizeToWidth(ImageSpec):
|
|
||||||
access_as = 'to_width'
|
|
||||||
processors = [ResizeToWidth]
|
|
||||||
|
|
||||||
class TestResizeToHeight(ImageSpec):
|
|
||||||
access_as = 'to_height'
|
|
||||||
processors = [ResizeToHeight]
|
|
||||||
|
|
||||||
class TestResizeCropped(ImageSpec):
|
|
||||||
access_as = 'cropped'
|
|
||||||
processors = [ResizeCropped]
|
|
||||||
|
|
||||||
class TestPhoto(ImageModel):
|
|
||||||
""" Minimal ImageModel class for testing """
|
|
||||||
image = models.ImageField(upload_to='images')
|
|
||||||
|
|
||||||
class IKOptions:
|
|
||||||
spec_module = 'imagekit.tests'
|
|
||||||
|
|
||||||
|
|
||||||
class IKTest(TestCase):
|
|
||||||
""" Base TestCase class """
|
|
||||||
def setUp(self):
|
|
||||||
# create a test image using tempfile and PIL
|
|
||||||
self.tmp = tempfile.TemporaryFile()
|
|
||||||
Image.new('RGB', (800, 600)).save(self.tmp, 'JPEG')
|
|
||||||
self.tmp.seek(0)
|
|
||||||
self.p = TestPhoto()
|
|
||||||
self.p.image.save(os.path.basename('test.jpg'),
|
|
||||||
ContentFile(self.tmp.read()))
|
|
||||||
self.p.save()
|
|
||||||
# destroy temp file
|
|
||||||
self.tmp.close()
|
|
||||||
|
|
||||||
def test_setup(self):
|
|
||||||
self.assertEqual(self.p.image.width, 800)
|
|
||||||
self.assertEqual(self.p.image.height, 600)
|
|
||||||
|
|
||||||
def test_to_width(self):
|
|
||||||
self.assertEqual(self.p.to_width.width, 100)
|
|
||||||
self.assertEqual(self.p.to_width.height, 75)
|
|
||||||
|
|
||||||
def test_to_height(self):
|
|
||||||
self.assertEqual(self.p.to_height.width, 133)
|
|
||||||
self.assertEqual(self.p.to_height.height, 100)
|
|
||||||
|
|
||||||
def test_crop(self):
|
|
||||||
self.assertEqual(self.p.cropped.width, 100)
|
|
||||||
self.assertEqual(self.p.cropped.height, 100)
|
|
||||||
|
|
||||||
def test_url(self):
|
|
||||||
tup = (settings.MEDIA_URL, self.p._ik.cache_dir, 'test_to_width.jpg')
|
|
||||||
self.assertEqual(self.p.to_width.url, "%s%s/%s" % tup)
|
|
||||||
|
|
||||||
def tearDown(self):
|
|
||||||
# make sure image file is deleted
|
|
||||||
path = self.p.image.path
|
|
||||||
self.p.delete()
|
|
||||||
self.failIf(os.path.isfile(path))
|
|
||||||
@@ -1,15 +0,0 @@
|
|||||||
""" ImageKit utility functions """
|
|
||||||
|
|
||||||
import tempfile
|
|
||||||
|
|
||||||
def img_to_fobj(img, format, **kwargs):
|
|
||||||
tmp = tempfile.TemporaryFile()
|
|
||||||
if format != 'JPEG':
|
|
||||||
try:
|
|
||||||
img.save(tmp, format, **kwargs)
|
|
||||||
return
|
|
||||||
except KeyError:
|
|
||||||
pass
|
|
||||||
img.save(tmp, format, **kwargs)
|
|
||||||
tmp.seek(0)
|
|
||||||
return tmp
|
|
||||||
@@ -12,6 +12,8 @@ DATABASES = {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
ALLOWED_HOSTS = ['*']
|
||||||
|
|
||||||
EXPOUSER = 'expo'
|
EXPOUSER = 'expo'
|
||||||
EXPOUSERPASS = 'somepasshere'
|
EXPOUSERPASS = 'somepasshere'
|
||||||
EXPOUSER_EMAIL = 'wookey@wookware.org'
|
EXPOUSER_EMAIL = 'wookey@wookware.org'
|
||||||
@@ -55,11 +57,4 @@ JSLIB_URL = URL_ROOT + 'javascript/'
|
|||||||
TINY_MCE_MEDIA_ROOT = STATIC_ROOT + '/tiny_mce/'
|
TINY_MCE_MEDIA_ROOT = STATIC_ROOT + '/tiny_mce/'
|
||||||
TINY_MCE_MEDIA_URL = STATIC_ROOT + '/tiny_mce/'
|
TINY_MCE_MEDIA_URL = STATIC_ROOT + '/tiny_mce/'
|
||||||
|
|
||||||
TEMPLATE_DIRS = (
|
|
||||||
PYTHON_PATH + "templates",
|
|
||||||
# Put strings here, like "/home/html/django_templates" or "C:/www/django/templates".
|
|
||||||
# Always use forward slashes, even on Windows.
|
|
||||||
# Don't forget to use absolute paths, not relative paths.
|
|
||||||
)
|
|
||||||
|
|
||||||
LOGFILE = PYTHON_PATH + 'troggle_log.txt'
|
LOGFILE = PYTHON_PATH + 'troggle_log.txt'
|
||||||
|
|||||||
@@ -15,6 +15,8 @@ DATABASES = {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
ALLOWED_HOSTS = ['*']
|
||||||
|
|
||||||
REPOS_ROOT_PATH = '/home/expo/'
|
REPOS_ROOT_PATH = '/home/expo/'
|
||||||
sys.path.append(REPOS_ROOT_PATH)
|
sys.path.append(REPOS_ROOT_PATH)
|
||||||
sys.path.append(REPOS_ROOT_PATH + 'troggle')
|
sys.path.append(REPOS_ROOT_PATH + 'troggle')
|
||||||
@@ -53,13 +55,6 @@ JSLIB_PATH = '/usr/share/javascript/'
|
|||||||
TINY_MCE_MEDIA_ROOT = '/usr/share/tinymce/www/'
|
TINY_MCE_MEDIA_ROOT = '/usr/share/tinymce/www/'
|
||||||
TINY_MCE_MEDIA_URL = URL_ROOT + DIR_ROOT + 'tinymce_media/'
|
TINY_MCE_MEDIA_URL = URL_ROOT + DIR_ROOT + 'tinymce_media/'
|
||||||
|
|
||||||
TEMPLATE_DIRS = (
|
|
||||||
PYTHON_PATH + "templates",
|
|
||||||
# Put strings here, like "/home/html/django_templates" or "C:/www/django/templates".
|
|
||||||
# Always use forward slashes, even on Windows.
|
|
||||||
# Don't forget to use absolute paths, not relative paths.
|
|
||||||
)
|
|
||||||
|
|
||||||
LOGFILE = '/home/expo/troggle/troggle_log.txt'
|
LOGFILE = '/home/expo/troggle/troggle_log.txt'
|
||||||
|
|
||||||
FEINCMS_ADMIN_MEDIA='/site_media/feincms/'
|
FEINCMS_ADMIN_MEDIA='/site_media/feincms/'
|
||||||
|
|||||||
@@ -14,6 +14,8 @@ DATABASES = {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
ALLOWED_HOSTS = ['*']
|
||||||
|
|
||||||
EXPOUSER = 'expo'
|
EXPOUSER = 'expo'
|
||||||
EXPOUSERPASS = 'realpasshere'
|
EXPOUSERPASS = 'realpasshere'
|
||||||
EXPOUSER_EMAIL = 'wookey@wookware.org'
|
EXPOUSER_EMAIL = 'wookey@wookware.org'
|
||||||
@@ -55,13 +57,6 @@ JSLIB_URL = URL_ROOT + 'javascript/'
|
|||||||
TINY_MCE_MEDIA_ROOT = STATIC_ROOT + '/tiny_mce/'
|
TINY_MCE_MEDIA_ROOT = STATIC_ROOT + '/tiny_mce/'
|
||||||
TINY_MCE_MEDIA_URL = STATIC_ROOT + '/tiny_mce/'
|
TINY_MCE_MEDIA_URL = STATIC_ROOT + '/tiny_mce/'
|
||||||
|
|
||||||
TEMPLATE_DIRS = (
|
|
||||||
PYTHON_PATH + "templates",
|
|
||||||
# Put strings here, like "/home/html/django_templates" or "C:/www/django/templates".
|
|
||||||
# Always use forward slashes, even on Windows.
|
|
||||||
# Don't forget to use absolute paths, not relative paths.
|
|
||||||
)
|
|
||||||
|
|
||||||
LOGFILE = '/home/expo/troggle/troggle_log.txt'
|
LOGFILE = '/home/expo/troggle/troggle_log.txt'
|
||||||
|
|
||||||
FEINCMS_ADMIN_MEDIA='/site_media/feincms/'
|
FEINCMS_ADMIN_MEDIA='/site_media/feincms/'
|
||||||
|
|||||||
@@ -12,6 +12,8 @@ DATABASES = {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
ALLOWED_HOSTS = ['*']
|
||||||
|
|
||||||
EXPOUSER = 'expo'
|
EXPOUSER = 'expo'
|
||||||
EXPOUSERPASS = 'realpasshere'
|
EXPOUSERPASS = 'realpasshere'
|
||||||
EXPOUSER_EMAIL = 'wookey@wookware.org'
|
EXPOUSER_EMAIL = 'wookey@wookware.org'
|
||||||
@@ -57,11 +59,4 @@ JSLIB_URL = URL_ROOT + 'javascript/'
|
|||||||
TINY_MCE_MEDIA_ROOT = '/usr/share/tinymce/www/'
|
TINY_MCE_MEDIA_ROOT = '/usr/share/tinymce/www/'
|
||||||
TINY_MCE_MEDIA_URL = URL_ROOT + DIR_ROOT + '/tinymce_media/'
|
TINY_MCE_MEDIA_URL = URL_ROOT + DIR_ROOT + '/tinymce_media/'
|
||||||
|
|
||||||
TEMPLATE_DIRS = (
|
|
||||||
PYTHON_PATH + "templates",
|
|
||||||
# Put strings here, like "/home/html/django_templates" or "C:/www/django/templates".
|
|
||||||
# Always use forward slashes, even on Windows.
|
|
||||||
# Don't forget to use absolute paths, not relative paths.
|
|
||||||
)
|
|
||||||
|
|
||||||
LOGFILE = PYTHON_PATH + 'troggle_log.txt'
|
LOGFILE = PYTHON_PATH + 'troggle_log.txt'
|
||||||
|
|||||||
@@ -9,6 +9,8 @@ DATABASES = {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
ALLOWED_HOSTS = ['*']
|
||||||
|
|
||||||
EXPOUSER = 'expo'
|
EXPOUSER = 'expo'
|
||||||
EXPOUSERPASS = 'realpasshere'
|
EXPOUSERPASS = 'realpasshere'
|
||||||
EXPOUSER_EMAIL = 'wookey@wookware.org'
|
EXPOUSER_EMAIL = 'wookey@wookware.org'
|
||||||
@@ -56,14 +58,3 @@ EMAIL_USE_TLS = True
|
|||||||
# URL that handles the media served from MEDIA_ROOT. Make sure to use a
|
# URL that handles the media served from MEDIA_ROOT. Make sure to use a
|
||||||
# trailing slash if there is a path component (optional in other cases).
|
# trailing slash if there is a path component (optional in other cases).
|
||||||
# Examples: "http://media.lawrence.com", "http://example.com/media/"
|
# Examples: "http://media.lawrence.com", "http://example.com/media/"
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
TEMPLATE_DIRS = (
|
|
||||||
"C:/Expo/expoweb/troggle/templates",
|
|
||||||
|
|
||||||
# Put strings here, like "/home/html/django_templates" or "C:/www/django/templates".
|
|
||||||
# Always use forward slashes, even on Windows.
|
|
||||||
# Don't forget to use absolute paths, not relative paths.
|
|
||||||
)
|
|
||||||
|
|||||||
@@ -29,12 +29,12 @@
|
|||||||
}
|
}
|
||||||
|
|
||||||
function redirectSurvey(){
|
function redirectSurvey(){
|
||||||
window.location = "{{ settings.URL_ROOT }}/survey/" + document.getElementById("expeditionChooser").value + "%23" + document.getElementById("surveyChooser").value;
|
window.location = "{{ URL_ROOT }}/survey/" + document.getElementById("expeditionChooser").value + "%23" + document.getElementById("surveyChooser").value;
|
||||||
document.getElementById("progressTableContent").style.display='hidden'
|
document.getElementById("progressTableContent").style.display='hidden'
|
||||||
}
|
}
|
||||||
|
|
||||||
function redirectYear(){
|
function redirectYear(){
|
||||||
window.location = "{{ settings.URL_ROOT }}/survey/" + document.getElementById("expeditionChooser").value + "%23"
|
window.location = "{{ URL_ROOT }}/survey/" + document.getElementById("expeditionChooser").value + "%23"
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
0
modelviz.py
Normal file → Executable file
0
modelviz.py
Normal file → Executable file
@@ -14,6 +14,7 @@ import csv
|
|||||||
import re
|
import re
|
||||||
import datetime
|
import datetime
|
||||||
import os
|
import os
|
||||||
|
from fuzzywuzzy import fuzz
|
||||||
|
|
||||||
from utils import save_carefully
|
from utils import save_carefully
|
||||||
|
|
||||||
@@ -31,6 +32,7 @@ def GetTripPersons(trippeople, expedition, logtime_underground):
|
|||||||
round_bracket_regex = re.compile(r"[\(\[].*?[\)\]]")
|
round_bracket_regex = re.compile(r"[\(\[].*?[\)\]]")
|
||||||
for tripperson in re.split(r",|\+|&|&(?!\w+;)| and ", trippeople):
|
for tripperson in re.split(r",|\+|&|&(?!\w+;)| and ", trippeople):
|
||||||
tripperson = tripperson.strip()
|
tripperson = tripperson.strip()
|
||||||
|
tripperson = tripperson.strip('.')
|
||||||
mul = re.match(r"<u>(.*?)</u>$(?i)", tripperson)
|
mul = re.match(r"<u>(.*?)</u>$(?i)", tripperson)
|
||||||
if mul:
|
if mul:
|
||||||
tripperson = mul.group(1).strip()
|
tripperson = mul.group(1).strip()
|
||||||
@@ -42,6 +44,15 @@ def GetTripPersons(trippeople, expedition, logtime_underground):
|
|||||||
print(" - No name match for: '%s'" % tripperson)
|
print(" - No name match for: '%s'" % tripperson)
|
||||||
message = "No name match for: '%s' in year '%s'" % (tripperson, expedition.year)
|
message = "No name match for: '%s' in year '%s'" % (tripperson, expedition.year)
|
||||||
models.DataIssue.objects.create(parser='logbooks', message=message)
|
models.DataIssue.objects.create(parser='logbooks', message=message)
|
||||||
|
print(' - Lets try something fuzzy')
|
||||||
|
fuzzy_matches = {}
|
||||||
|
for person in GetPersonExpeditionNameLookup(expedition):
|
||||||
|
fuzz_num = fuzz.ratio(tripperson.lower(), person)
|
||||||
|
if fuzz_num > 50:
|
||||||
|
#print(" - %s -> %s = %d" % (tripperson.lower(), person, fuzz_num))
|
||||||
|
fuzzy_matches[person] = fuzz_num
|
||||||
|
for i in sorted(fuzzy_matches.items(), key = lambda kv:(kv[1]), reverse=True):
|
||||||
|
print(' - %s -> %s' % (i[0], i[1]))
|
||||||
res.append((personyear, logtime_underground))
|
res.append((personyear, logtime_underground))
|
||||||
if mul:
|
if mul:
|
||||||
author = personyear
|
author = personyear
|
||||||
@@ -121,7 +132,7 @@ def ParseDate(tripdate, year):
|
|||||||
day, month, year = int(mdategoof.group(1)), int(mdategoof.group(2)), int(mdategoof.group(4)) + yadd
|
day, month, year = int(mdategoof.group(1)), int(mdategoof.group(2)), int(mdategoof.group(4)) + yadd
|
||||||
else:
|
else:
|
||||||
assert False, tripdate
|
assert False, tripdate
|
||||||
return datetime.date(year, month, day)
|
return make_aware(datetime.datetime(year, month, day), get_current_timezone())
|
||||||
|
|
||||||
# 2006, 2008 - 2010
|
# 2006, 2008 - 2010
|
||||||
def Parselogwikitxt(year, expedition, txt):
|
def Parselogwikitxt(year, expedition, txt):
|
||||||
@@ -173,9 +184,6 @@ def Parseloghtmltxt(year, expedition, txt):
|
|||||||
continue
|
continue
|
||||||
tripid, tripid1, tripdate, trippeople, triptitle, triptext, tu = s.groups()
|
tripid, tripid1, tripdate, trippeople, triptitle, triptext, tu = s.groups()
|
||||||
ldate = ParseDate(tripdate.strip(), year)
|
ldate = ParseDate(tripdate.strip(), year)
|
||||||
#assert tripid[:-1] == "t" + tripdate, (tripid, tripdate)
|
|
||||||
#trippeople = re.sub(r"Ol(?!l)", "Olly", trippeople)
|
|
||||||
#trippeople = re.sub(r"Wook(?!e)", "Wookey", trippeople)
|
|
||||||
triptitles = triptitle.split(" - ")
|
triptitles = triptitle.split(" - ")
|
||||||
if len(triptitles) >= 2:
|
if len(triptitles) >= 2:
|
||||||
tripcave = triptitles[0]
|
tripcave = triptitles[0]
|
||||||
@@ -184,7 +192,7 @@ def Parseloghtmltxt(year, expedition, txt):
|
|||||||
#print("\n", tripcave, "--- ppp", trippeople, len(triptext))
|
#print("\n", tripcave, "--- ppp", trippeople, len(triptext))
|
||||||
ltriptext = re.sub(r"</p>", "", triptext)
|
ltriptext = re.sub(r"</p>", "", triptext)
|
||||||
ltriptext = re.sub(r"\s*?\n\s*", " ", ltriptext)
|
ltriptext = re.sub(r"\s*?\n\s*", " ", ltriptext)
|
||||||
ltriptext = re.sub(r"<p>", "</br></br>", ltriptext).strip()
|
ltriptext = re.sub(r"<p>", "\n\n", ltriptext).strip()
|
||||||
EnterLogIntoDbase(date = ldate, place = tripcave, title = triptitle, text = ltriptext,
|
EnterLogIntoDbase(date = ldate, place = tripcave, title = triptitle, text = ltriptext,
|
||||||
trippeople=trippeople, expedition=expedition, logtime_underground=0,
|
trippeople=trippeople, expedition=expedition, logtime_underground=0,
|
||||||
entry_type="html")
|
entry_type="html")
|
||||||
@@ -233,7 +241,6 @@ def Parseloghtml01(year, expedition, txt):
|
|||||||
ltriptext = re.sub(r"</?i>", "''", ltriptext)
|
ltriptext = re.sub(r"</?i>", "''", ltriptext)
|
||||||
ltriptext = re.sub(r"</?b>", "'''", ltriptext)
|
ltriptext = re.sub(r"</?b>", "'''", ltriptext)
|
||||||
|
|
||||||
|
|
||||||
#print ldate, trippeople.strip()
|
#print ldate, trippeople.strip()
|
||||||
# could includ the tripid (url link for cross referencing)
|
# could includ the tripid (url link for cross referencing)
|
||||||
EnterLogIntoDbase(date=ldate, place=tripcave, title=triptitle, text=ltriptext,
|
EnterLogIntoDbase(date=ldate, place=tripcave, title=triptitle, text=ltriptext,
|
||||||
|
|||||||
@@ -87,7 +87,7 @@ def LoadPersonsExpos():
|
|||||||
lastname = ""
|
lastname = ""
|
||||||
|
|
||||||
lookupAttribs={'first_name':firstname, 'last_name':(lastname or "")}
|
lookupAttribs={'first_name':firstname, 'last_name':(lastname or "")}
|
||||||
nonLookupAttribs={'is_vfho':personline[header["VfHO member"]], 'fullname':fullname}
|
nonLookupAttribs={'is_vfho':bool(personline[header["VfHO member"]]), 'fullname':fullname}
|
||||||
person, created = save_carefully(models.Person, lookupAttribs, nonLookupAttribs)
|
person, created = save_carefully(models.Person, lookupAttribs, nonLookupAttribs)
|
||||||
|
|
||||||
parseMugShotAndBlurb(personline=personline, header=header, person=person)
|
parseMugShotAndBlurb(personline=personline, header=header, person=person)
|
||||||
@@ -100,26 +100,6 @@ def LoadPersonsExpos():
|
|||||||
nonLookupAttribs = {'nickname':nickname, 'is_guest':(personline[header["Guest"]] == "1")}
|
nonLookupAttribs = {'nickname':nickname, 'is_guest':(personline[header["Guest"]] == "1")}
|
||||||
save_carefully(models.PersonExpedition, lookupAttribs, nonLookupAttribs)
|
save_carefully(models.PersonExpedition, lookupAttribs, nonLookupAttribs)
|
||||||
|
|
||||||
|
|
||||||
# this fills in those people for whom 2008 was their first expo
|
|
||||||
#print "Loading personexpeditions 2008"
|
|
||||||
#expoers2008 = """Edvin Deadman,Kathryn Hopkins,Djuke Veldhuis,Becka Lawson,Julian Todd,Natalie Uomini,Aaron Curtis,Tony Rooke,Ollie Stevens,Frank Tully,Martin Jahnke,Mark Shinwell,Jess Stirrups,Nial Peters,Serena Povia,Olly Madge,Steve Jones,Pete Harley,Eeva Makiranta,Keith Curtis""".split(",")
|
|
||||||
#expomissing = set(expoers2008)
|
|
||||||
#for name in expomissing:
|
|
||||||
# firstname, lastname = name.split()
|
|
||||||
# is_guest = name in ["Eeva Makiranta", "Keith Curtis"]
|
|
||||||
# print "2008:", name
|
|
||||||
# persons = list(models.Person.objects.filter(first_name=firstname, last_name=lastname))
|
|
||||||
# if not persons:
|
|
||||||
# person = models.Person(first_name=firstname, last_name = lastname, is_vfho = False, mug_shot = "")
|
|
||||||
# #person.Sethref()
|
|
||||||
# person.save()
|
|
||||||
# else:
|
|
||||||
# person = persons[0]
|
|
||||||
# expedition = models.Expedition.objects.get(year="2008")
|
|
||||||
# personexpedition = models.PersonExpedition(person=person, expedition=expedition, nickname="", is_guest=is_guest)
|
|
||||||
# personexpedition.save()
|
|
||||||
|
|
||||||
# used in other referencing parser functions
|
# used in other referencing parser functions
|
||||||
# expedition name lookup cached for speed (it's a very big list)
|
# expedition name lookup cached for speed (it's a very big list)
|
||||||
Gpersonexpeditionnamelookup = { }
|
Gpersonexpeditionnamelookup = { }
|
||||||
|
|||||||
@@ -1,5 +1,7 @@
|
|||||||
'''
|
'''
|
||||||
This module is the part of troggle that parses descriptions of cave parts (subcaves) from the legacy html files and saves them in the troggle database as instances of the model Subcave. Unfortunately, this parser can not be very flexible because the legacy format is poorly structured.
|
This module is the part of troggle that parses descriptions of cave parts (subcaves) from the legacy html
|
||||||
|
files and saves them in the troggle database as instances of the model Subcave.
|
||||||
|
Unfortunately, this parser can not be very flexible because the legacy format is poorly structured.
|
||||||
'''
|
'''
|
||||||
|
|
||||||
import sys, os
|
import sys, os
|
||||||
|
|||||||
@@ -178,7 +178,7 @@ def RecursiveLoad(survexblock, survexfile, fin, textlines):
|
|||||||
print('QM res station %s' % qm_resolve_station)
|
print('QM res station %s' % qm_resolve_station)
|
||||||
print('QM notes %s' % qm_notes)
|
print('QM notes %s' % qm_notes)
|
||||||
|
|
||||||
# If the QM isn't resolved (has a resolving station) thn load it
|
# If the QM isn't resolved (has a resolving station) then load it
|
||||||
if not qm_resolve_section or qm_resolve_section is not '-' or qm_resolve_section is not 'None':
|
if not qm_resolve_section or qm_resolve_section is not '-' or qm_resolve_section is not 'None':
|
||||||
from_section = models.SurvexBlock.objects.filter(name=qm_from_section)
|
from_section = models.SurvexBlock.objects.filter(name=qm_from_section)
|
||||||
# If we can find a section (survex note chunck, named)
|
# If we can find a section (survex note chunck, named)
|
||||||
|
|||||||
@@ -1,12 +1,7 @@
|
|||||||
import sys, os, types, logging, stat
|
import sys, os, types, logging, stat
|
||||||
#sys.path.append('C:\\Expo\\expoweb')
|
|
||||||
#from troggle import *
|
|
||||||
#os.environ['DJANGO_SETTINGS_MODULE']='troggle.settings'
|
|
||||||
import settings
|
import settings
|
||||||
from troggle.core.models import *
|
from troggle.core.models import *
|
||||||
from PIL import Image
|
from PIL import Image
|
||||||
#import settings
|
|
||||||
#import core.models as models
|
|
||||||
import csv
|
import csv
|
||||||
import re
|
import re
|
||||||
import datetime
|
import datetime
|
||||||
@@ -46,20 +41,17 @@ def readSurveysFromCSV():
|
|||||||
logging.info("Deleting all scanned images")
|
logging.info("Deleting all scanned images")
|
||||||
ScannedImage.objects.all().delete()
|
ScannedImage.objects.all().delete()
|
||||||
|
|
||||||
|
|
||||||
logging.info("Deleting all survey objects")
|
logging.info("Deleting all survey objects")
|
||||||
Survey.objects.all().delete()
|
Survey.objects.all().delete()
|
||||||
|
|
||||||
|
|
||||||
logging.info("Beginning to import surveys from "+str(os.path.join(settings.SURVEYS, "Surveys.csv"))+"\n"+"-"*60+"\n")
|
logging.info("Beginning to import surveys from "+str(os.path.join(settings.SURVEYS, "Surveys.csv"))+"\n"+"-"*60+"\n")
|
||||||
|
|
||||||
for survey in surveyreader:
|
for survey in surveyreader:
|
||||||
#I hate this, but some surveys have a letter eg 2000#34a. The next line deals with that.
|
# I hate this, but some surveys have a letter eg 2000#34a. The next line deals with that.
|
||||||
walletNumberLetter = re.match(r'(?P<number>\d*)(?P<letter>[a-zA-Z]*)',survey[header['Survey Number']])
|
walletNumberLetter = re.match(r'(?P<number>\d*)(?P<letter>[a-zA-Z]*)',survey[header['Survey Number']])
|
||||||
# print(walletNumberLetter.groups())
|
# print(walletNumberLetter.groups())
|
||||||
year=survey[header['Year']]
|
year=survey[header['Year']]
|
||||||
|
|
||||||
|
|
||||||
surveyobj = Survey(
|
surveyobj = Survey(
|
||||||
expedition = Expedition.objects.filter(year=year)[0],
|
expedition = Expedition.objects.filter(year=year)[0],
|
||||||
wallet_number = walletNumberLetter.group('number'),
|
wallet_number = walletNumberLetter.group('number'),
|
||||||
@@ -73,7 +65,6 @@ def readSurveysFromCSV():
|
|||||||
pass
|
pass
|
||||||
surveyobj.save()
|
surveyobj.save()
|
||||||
|
|
||||||
|
|
||||||
logging.info("added survey " + survey[header['Year']] + "#" + surveyobj.wallet_number + "\r")
|
logging.info("added survey " + survey[header['Year']] + "#" + surveyobj.wallet_number + "\r")
|
||||||
|
|
||||||
# dead
|
# dead
|
||||||
@@ -141,7 +132,7 @@ def parseSurveyScans(expedition, logfile=None):
|
|||||||
yearPath=os.path.join(settings.SURVEY_SCANS, "surveyscans", expedition.year)
|
yearPath=os.path.join(settings.SURVEY_SCANS, "surveyscans", expedition.year)
|
||||||
print("No folder found for " + expedition.year + " at:- " + yearPath)
|
print("No folder found for " + expedition.year + " at:- " + yearPath)
|
||||||
|
|
||||||
# dead
|
|
||||||
def parseSurveys(logfile=None):
|
def parseSurveys(logfile=None):
|
||||||
try:
|
try:
|
||||||
readSurveysFromCSV()
|
readSurveysFromCSV()
|
||||||
@@ -271,8 +262,9 @@ def SetTunnelfileInfo(tunnelfile):
|
|||||||
fin.close()
|
fin.close()
|
||||||
|
|
||||||
mtype = re.search("<(fontcolours|sketch)", ttext)
|
mtype = re.search("<(fontcolours|sketch)", ttext)
|
||||||
assert mtype, ff
|
#assert mtype, ff
|
||||||
tunnelfile.bfontcolours = (mtype.group(1)=="fontcolours")
|
if mtype:
|
||||||
|
tunnelfile.bfontcolours = (mtype.group(1)=="fontcolours")
|
||||||
tunnelfile.npaths = len(re.findall("<skpath", ttext))
|
tunnelfile.npaths = len(re.findall("<skpath", ttext))
|
||||||
tunnelfile.save()
|
tunnelfile.save()
|
||||||
|
|
||||||
|
|||||||
@@ -27,7 +27,7 @@ from django.conf.urls import *
|
|||||||
from profiles import views
|
from profiles import views
|
||||||
|
|
||||||
|
|
||||||
urlpatterns = patterns('',
|
urlpatterns = [
|
||||||
url(r'^select/$',
|
url(r'^select/$',
|
||||||
views.select_profile,
|
views.select_profile,
|
||||||
name='profiles_select_profile'),
|
name='profiles_select_profile'),
|
||||||
@@ -43,4 +43,4 @@ urlpatterns = patterns('',
|
|||||||
url(r'^$',
|
url(r'^$',
|
||||||
views.profile_list,
|
views.profile_list,
|
||||||
name='profiles_profile_list'),
|
name='profiles_profile_list'),
|
||||||
)
|
]
|
||||||
|
|||||||
@@ -14,8 +14,7 @@ try:
|
|||||||
except ImportError: # django >= 1.7
|
except ImportError: # django >= 1.7
|
||||||
SiteProfileNotAvailable = type('SiteProfileNotAvailable', (Exception,), {})
|
SiteProfileNotAvailable = type('SiteProfileNotAvailable', (Exception,), {})
|
||||||
|
|
||||||
from django.db.models import get_model
|
from django.apps import apps
|
||||||
|
|
||||||
|
|
||||||
def get_profile_model():
|
def get_profile_model():
|
||||||
"""
|
"""
|
||||||
@@ -28,7 +27,7 @@ def get_profile_model():
|
|||||||
if (not hasattr(settings, 'AUTH_PROFILE_MODULE')) or \
|
if (not hasattr(settings, 'AUTH_PROFILE_MODULE')) or \
|
||||||
(not settings.AUTH_PROFILE_MODULE):
|
(not settings.AUTH_PROFILE_MODULE):
|
||||||
raise SiteProfileNotAvailable
|
raise SiteProfileNotAvailable
|
||||||
profile_mod = get_model(*settings.AUTH_PROFILE_MODULE.split('.'))
|
profile_mod = apps.get_model(*settings.AUTH_PROFILE_MODULE.split('.'))
|
||||||
if profile_mod is None:
|
if profile_mod is None:
|
||||||
raise SiteProfileNotAvailable
|
raise SiteProfileNotAvailable
|
||||||
return profile_mod
|
return profile_mod
|
||||||
|
|||||||
39
settings.py
39
settings.py
@@ -8,7 +8,6 @@ BASE_DIR = os.path.dirname(os.path.dirname(__file__))
|
|||||||
|
|
||||||
# Django settings for troggle project.
|
# Django settings for troggle project.
|
||||||
DEBUG = True
|
DEBUG = True
|
||||||
TEMPLATE_DEBUG = DEBUG
|
|
||||||
|
|
||||||
ALLOWED_HOSTS = [u'expo.survex.com']
|
ALLOWED_HOSTS = [u'expo.survex.com']
|
||||||
|
|
||||||
@@ -56,11 +55,12 @@ SVX_URL = urlparse.urljoin(URL_ROOT , '/survex/')
|
|||||||
# top-level survex file basename (without .svx)
|
# top-level survex file basename (without .svx)
|
||||||
SURVEX_TOPNAME = "1623"
|
SURVEX_TOPNAME = "1623"
|
||||||
|
|
||||||
|
KAT_AREAS = ['1623', '1624', '1626', '1627']
|
||||||
|
|
||||||
DEFAULT_LOGBOOK_PARSER = "Parseloghtmltxt"
|
DEFAULT_LOGBOOK_PARSER = "Parseloghtmltxt"
|
||||||
DEFAULT_LOGBOOK_FILE = "logbook.html"
|
DEFAULT_LOGBOOK_FILE = "logbook.html"
|
||||||
|
|
||||||
LOGBOOK_PARSER_SETTINGS = {
|
LOGBOOK_PARSER_SETTINGS = {
|
||||||
"2019": ("2019/logbook.html", "Parseloghtmltxt"),
|
|
||||||
"2018": ("2018/logbook.html", "Parseloghtmltxt"),
|
"2018": ("2018/logbook.html", "Parseloghtmltxt"),
|
||||||
"2017": ("2017/logbook.html", "Parseloghtmltxt"),
|
"2017": ("2017/logbook.html", "Parseloghtmltxt"),
|
||||||
"2016": ("2016/logbook.html", "Parseloghtmltxt"),
|
"2016": ("2016/logbook.html", "Parseloghtmltxt"),
|
||||||
@@ -97,20 +97,34 @@ SMART_APPEND_SLASH = True
|
|||||||
# Make this unique, and don't share it with anybody.
|
# Make this unique, and don't share it with anybody.
|
||||||
SECRET_KEY = 'a#vaeozn0)uz_9t_%v5n#tj)m+%ace6b_0(^fj!355qki*v)j2'
|
SECRET_KEY = 'a#vaeozn0)uz_9t_%v5n#tj)m+%ace6b_0(^fj!355qki*v)j2'
|
||||||
|
|
||||||
# List of callables that know how to import templates from various sources.
|
TEMPLATES = [
|
||||||
TEMPLATE_LOADERS = (
|
{
|
||||||
'django.template.loaders.filesystem.Loader',
|
'BACKEND': 'django.template.backends.django.DjangoTemplates',
|
||||||
'django.template.loaders.app_directories.Loader',
|
'DIRS': [
|
||||||
# 'django.template.loaders.eggs.load_template_source',
|
os.path.join(PYTHON_PATH, 'templates')
|
||||||
)
|
],
|
||||||
|
'APP_DIRS': True,
|
||||||
|
'OPTIONS': {
|
||||||
|
'context_processors': [
|
||||||
|
'django.contrib.auth.context_processors.auth',
|
||||||
|
'django.template.context_processors.debug',
|
||||||
|
'django.template.context_processors.i18n',
|
||||||
|
'django.template.context_processors.media',
|
||||||
|
'django.template.context_processors.static',
|
||||||
|
'django.template.context_processors.tz',
|
||||||
|
'django.contrib.messages.context_processors.messages',
|
||||||
|
'django.template.context_processors.request',
|
||||||
|
#'core.context.troggle_context'
|
||||||
|
]
|
||||||
|
},
|
||||||
|
},
|
||||||
|
]
|
||||||
|
|
||||||
if django.VERSION[0] == 1 and django.VERSION[1] < 4:
|
if django.VERSION[0] == 1 and django.VERSION[1] < 4:
|
||||||
authmodule = 'django.core.context_processors.auth'
|
authmodule = 'django.core.context_processors.auth'
|
||||||
else:
|
else:
|
||||||
authmodule = 'django.contrib.auth.context_processors.auth'
|
authmodule = 'django.contrib.auth.context_processors.auth'
|
||||||
|
|
||||||
TEMPLATE_CONTEXT_PROCESSORS = ( authmodule, "core.context.troggle_context", )
|
|
||||||
|
|
||||||
LOGIN_REDIRECT_URL = '/'
|
LOGIN_REDIRECT_URL = '/'
|
||||||
|
|
||||||
INSTALLED_APPS = (
|
INSTALLED_APPS = (
|
||||||
@@ -123,14 +137,13 @@ INSTALLED_APPS = (
|
|||||||
'django.contrib.messages',
|
'django.contrib.messages',
|
||||||
'django.contrib.staticfiles',
|
'django.contrib.staticfiles',
|
||||||
#'troggle.photologue',
|
#'troggle.photologue',
|
||||||
#'troggle.reversion',
|
|
||||||
#'django_evolution',
|
|
||||||
'tinymce',
|
'tinymce',
|
||||||
'registration',
|
'registration',
|
||||||
'troggle.profiles',
|
'troggle.profiles',
|
||||||
'troggle.core',
|
'troggle.core',
|
||||||
'troggle.flatpages',
|
'troggle.flatpages',
|
||||||
'troggle.imagekit',
|
'imagekit',
|
||||||
|
'django_extensions',
|
||||||
)
|
)
|
||||||
|
|
||||||
MIDDLEWARE_CLASSES = (
|
MIDDLEWARE_CLASSES = (
|
||||||
|
|||||||
@@ -1,22 +1,22 @@
|
|||||||
<!DOCTYPE html>
|
<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01//EN">
|
||||||
<html lang="en">
|
<html lang="en">
|
||||||
<head>
|
<head>
|
||||||
<meta http-equiv="Content-Type" content="text/html; charset=utf-8" />
|
<meta http-equiv="Content-Type" content="text/html; charset=ISO-8859-1"/>
|
||||||
<link rel="stylesheet" type="text/css" href="{{ settings.MEDIA_URL }}css/main3.css" title="eyeCandy"/>
|
<link rel="stylesheet" type="text/css" href="{{ MEDIA_URL }}css/main3.css" title="eyeCandy"/>
|
||||||
<link rel="alternate stylesheet" type="text/css" href="{{ settings.MEDIA_URL }}css/mainplain.css" title="plain"/>
|
<link rel="alternate stylesheet" type="text/css" href="{{ MEDIA_URL }}css/mainplain.css" title="plain"/>
|
||||||
<link rel="stylesheet" type="text/css" href="{{ settings.MEDIA_URL }}css/dropdownNavStyle.css" />
|
<link rel="stylesheet" type="text/css" href="{{ MEDIA_URL }}css/dropdownNavStyle.css" />
|
||||||
<title>{% block title %}Troggle{% endblock %}</title>
|
<title>{% block title %}Troggle{% endblock %}</title>
|
||||||
<!-- <script src="{{ settings.JSLIB_URL }}jquery/jquery.min.js" type="text/javascript"></script> -->
|
<!-- <script src="{{ settings.JSLIB_URL }}jquery/jquery.min.js" type="text/javascript"></script> -->
|
||||||
<script src="{{ settings.MEDIA_URL }}js/jquery.quicksearch.js" type="text/javascript"></script>
|
<script src="{{ MEDIA_URL }}js/jquery.quicksearch.js" type="text/javascript"></script>
|
||||||
<script src="{{ settings.MEDIA_URL }}js/base.js" type="text/javascript"></script>
|
<script src="{{ MEDIA_URL }}js/base.js" type="text/javascript"></script>
|
||||||
<script src="{{ settings.MEDIA_URL }}js/jquery.dropdownPlain.js" type="text/javascript"></script>
|
<script src="{{ MEDIA_URL }}js/jquery.dropdownPlain.js" type="text/javascript"></script>
|
||||||
|
|
||||||
{% block head %}{% endblock %}
|
{% block head %}{% endblock %}
|
||||||
</head>
|
</head>
|
||||||
<body onLoad="contentHeight();">
|
<body onLoad="contentHeight();">
|
||||||
|
|
||||||
<div id="header">
|
<div id="header">
|
||||||
<h1>CUCC Expeditions to Austria: 1976 - 2020</h1>
|
<h1>CUCC Expeditions to Austria: 1976 - 2018</h1>
|
||||||
<div id="editLinks"> {% block loginInfo %}
|
<div id="editLinks"> {% block loginInfo %}
|
||||||
<a href="{{settings.EXPOWEB_URL}}">Website home</a> |
|
<a href="{{settings.EXPOWEB_URL}}">Website home</a> |
|
||||||
{% if user.username %}
|
{% if user.username %}
|
||||||
@@ -43,8 +43,6 @@
|
|||||||
<a href="{% url "expedition" 2016 %}">Expo2016</a> |
|
<a href="{% url "expedition" 2016 %}">Expo2016</a> |
|
||||||
<a href="{% url "expedition" 2017 %}">Expo2017</a> |
|
<a href="{% url "expedition" 2017 %}">Expo2017</a> |
|
||||||
<a href="{% url "expedition" 2018 %}">Expo2018</a> |
|
<a href="{% url "expedition" 2018 %}">Expo2018</a> |
|
||||||
<a href="{% url "expedition" 2019 %}">Expo2019</a> |
|
|
||||||
<a href="{% url "expedition" 2020 %}">Expo2020</a> |
|
|
||||||
|
|
||||||
<a href="/admin/">Django admin</a>
|
<a href="/admin/">Django admin</a>
|
||||||
</div>
|
</div>
|
||||||
@@ -66,8 +64,8 @@
|
|||||||
<div id="related">
|
<div id="related">
|
||||||
{% block related %}
|
{% block related %}
|
||||||
<script language="javascript">
|
<script language="javascript">
|
||||||
$('#related').remove()
|
$('#related').remove()
|
||||||
/*This is a hack to stop a line appearing because of the empty div border*/
|
/*This is a hack to stop a line appearing because of the empty div border*/
|
||||||
</script>
|
</script>
|
||||||
{% endblock %}
|
{% endblock %}
|
||||||
</div>
|
</div>
|
||||||
@@ -83,7 +81,7 @@
|
|||||||
|
|
||||||
<li><a href="#">External links</a>
|
<li><a href="#">External links</a>
|
||||||
<ul class="sub_menu">
|
<ul class="sub_menu">
|
||||||
<li><a id="cuccLink" href="https://camcaving.uk">CUCC website</a></li>
|
<li><a id="cuccLink" href="http://www.srcf.ucam.org/caving/wiki/Main_Page">CUCC website</a></li>
|
||||||
<li><a id="expoWebsiteLink" href="http://expo.survex.com">Expedition website</a></li>
|
<li><a id="expoWebsiteLink" href="http://expo.survex.com">Expedition website</a></li>
|
||||||
</ul>
|
</ul>
|
||||||
</li>
|
</li>
|
||||||
|
|||||||
@@ -220,7 +220,7 @@ div.linear-scale-caption {
|
|||||||
}
|
}
|
||||||
#frame .tab {
|
#frame .tab {
|
||||||
position: absolute;
|
position: absolute;
|
||||||
right: 0px;lass="cavedisplay"
|
right: 0px;
|
||||||
width: 40px;
|
width: 40px;
|
||||||
height: 40px;
|
height: 40px;
|
||||||
box-sizing: border-box;
|
box-sizing: border-box;
|
||||||
@@ -408,8 +408,8 @@ div#scene {
|
|||||||
|
|
||||||
</style>
|
</style>
|
||||||
|
|
||||||
<script type="text/javascript" src="/javascript/CaveView/js/CaveView.js" ></script>
|
<script type="text/javascript" src="/CaveView/js/CaveView.js" ></script>
|
||||||
<script type="text/javascript" src="/javascript/CaveView/lib/proj4.js" ></script>
|
<script type="text/javascript" src="/CaveView/lib/proj4.js" ></script>
|
||||||
|
|
||||||
|
|
||||||
<script type="text/javascript" >
|
<script type="text/javascript" >
|
||||||
@@ -421,7 +421,7 @@ div#scene {
|
|||||||
CV.UI.init( 'scene', {
|
CV.UI.init( 'scene', {
|
||||||
home: '/javascript/CaveView/',
|
home: '/javascript/CaveView/',
|
||||||
surveyDirectory: '/cave/3d/',
|
surveyDirectory: '/cave/3d/',
|
||||||
terrainDirectory: '/loser/surface/terrain/'
|
terrainDirectory: '/loser/surface/terrain/'
|
||||||
} );
|
} );
|
||||||
|
|
||||||
// load a single survey to display
|
// load a single survey to display
|
||||||
@@ -516,7 +516,10 @@ div#scene {
|
|||||||
{% if ent.entrance.exact_station %}
|
{% if ent.entrance.exact_station %}
|
||||||
<dt>Exact Station</dt><dd>{{ ent.entrance.exact_station|safe }} {{ ent.entrance.exact_location.y|safe }}, {{ ent.entrance.exact_location.x|safe }}, {{ ent.entrance.exact_location.z|safe }}m</dd>
|
<dt>Exact Station</dt><dd>{{ ent.entrance.exact_station|safe }} {{ ent.entrance.exact_location.y|safe }}, {{ ent.entrance.exact_location.x|safe }}, {{ ent.entrance.exact_location.z|safe }}m</dd>
|
||||||
{% endif %}
|
{% endif %}
|
||||||
{% if ent.entrance.other_station %}
|
{% if ent.entrance.find_location %}
|
||||||
|
<dt>Coordinates</dt><dd>{{ ent.entrance.find_location|safe }}</dd>
|
||||||
|
{% endif %}
|
||||||
|
{% if ent.entrance.other_station %}
|
||||||
<dt>Other Station</dt><dd>{{ ent.entrance.other_station|safe }}
|
<dt>Other Station</dt><dd>{{ ent.entrance.other_station|safe }}
|
||||||
{% if ent.entrance.other_description %}
|
{% if ent.entrance.other_description %}
|
||||||
- {{ ent.entrance.other_description|safe }}
|
- {{ ent.entrance.other_description|safe }}
|
||||||
|
|||||||
@@ -1,4 +1,4 @@
|
|||||||
<!DOCTYPE html>
|
<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01//EN">
|
||||||
<!-- Only put one cave in this file -->
|
<!-- Only put one cave in this file -->
|
||||||
<!-- If you edit this file, make sure you update the websites database -->
|
<!-- If you edit this file, make sure you update the websites database -->
|
||||||
<html lang="en">
|
<html lang="en">
|
||||||
|
|||||||
@@ -1,4 +1,4 @@
|
|||||||
<!DOCTYPE html>
|
<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01//EN">
|
||||||
<!-- Only put one entrance in this file -->
|
<!-- Only put one entrance in this file -->
|
||||||
<!-- If you edit this file, make sure you update the websites database -->
|
<!-- If you edit this file, make sure you update the websites database -->
|
||||||
<html lang="en">
|
<html lang="en">
|
||||||
|
|||||||
@@ -1,5 +1,4 @@
|
|||||||
{% autoescape off %}
|
{% autoescape off %}
|
||||||
<!DOCTYPE html>
|
|
||||||
<html>
|
<html>
|
||||||
<head>
|
<head>
|
||||||
<style type="text/css">.author {text-decoration:underline}</style>
|
<style type="text/css">.author {text-decoration:underline}</style>
|
||||||
|
|||||||
@@ -2,15 +2,15 @@
|
|||||||
{% load wiki_markup %}
|
{% load wiki_markup %}
|
||||||
{% load link %}
|
{% load link %}
|
||||||
|
|
||||||
{% block title %}Expedition {{expedition.name}}{% endblock %}
|
{% block title %}Expedition {{this_expedition.name}}{% endblock %}
|
||||||
{% block editLink %}<a href={{expedition.get_admin_url}}>Edit expedition {{expedition|wiki_to_html_short}}</a>{% endblock %}
|
{% block editLink %}<a href={{this_expedition.get_admin_url}}>Edit expedition {{expedition|wiki_to_html_short}}</a>{% endblock %}
|
||||||
|
|
||||||
{% block related %}
|
{% block related %}
|
||||||
{% endblock %}
|
{% endblock %}
|
||||||
|
|
||||||
{% block content %}
|
{% block content %}
|
||||||
|
|
||||||
<h2>{{expedition.name}}</h2>
|
<h2>{{this_expedition.name}}</h2>
|
||||||
|
|
||||||
<p><b>Other years:</b>
|
<p><b>Other years:</b>
|
||||||
{% for otherexpedition in expeditions %}
|
{% for otherexpedition in expeditions %}
|
||||||
@@ -29,7 +29,7 @@ an "S" for a survey trip. The colours are the same for people on the same trip.
|
|||||||
<table class="expeditionpersonlist">
|
<table class="expeditionpersonlist">
|
||||||
<tr>
|
<tr>
|
||||||
<th>Caver</th>
|
<th>Caver</th>
|
||||||
{% for expeditionday in expedition.expeditionday_set.all %}
|
{% for expeditionday in this_expedition.expeditionday_set.all %}
|
||||||
<th>
|
<th>
|
||||||
{{expeditionday.date.day}}
|
{{expeditionday.date.day}}
|
||||||
</th>
|
</th>
|
||||||
@@ -63,7 +63,7 @@ an "S" for a survey trip. The colours are the same for people on the same trip.
|
|||||||
<form action="" method="GET"><input type="submit" name="reload" value="Reload"></form>
|
<form action="" method="GET"><input type="submit" name="reload" value="Reload"></form>
|
||||||
|
|
||||||
<h3>Logbooks and survey trips per day</h3>
|
<h3>Logbooks and survey trips per day</h3>
|
||||||
<a href="{% url "newLogBookEntry" expeditionyear=expedition.year %}">New logbook entry</a>
|
<a href="{% url "newLogBookEntry" expeditionyear=this_expedition.year %}">New logbook entry</a>
|
||||||
<table class="expeditionlogbooks">
|
<table class="expeditionlogbooks">
|
||||||
<tr><th>Date</th><th>Logged trips</th><th>Surveys</th></tr>
|
<tr><th>Date</th><th>Logged trips</th><th>Surveys</th></tr>
|
||||||
{% regroup dateditems|dictsort:"date" by date as dates %}
|
{% regroup dateditems|dictsort:"date" by date as dates %}
|
||||||
|
|||||||
@@ -18,8 +18,8 @@
|
|||||||
{% if pic.is_mugshot %}
|
{% if pic.is_mugshot %}
|
||||||
<div class="figure">
|
<div class="figure">
|
||||||
<p> <img src="{{ pic.thumbnail_image.url }}" class="thumbnail" />
|
<p> <img src="{{ pic.thumbnail_image.url }}" class="thumbnail" />
|
||||||
<p> {{ pic.caption }}</p>
|
<p> {{ pic.caption }} </p>
|
||||||
<p> <a href="{{ pic.get_admin_url }}">edit {{pic}}</a> </>
|
<p> <a href="{{ pic.get_admin_url }}">edit {{pic}}</a>
|
||||||
</p>
|
</p>
|
||||||
</p>
|
</p>
|
||||||
</div>
|
</div>
|
||||||
@@ -32,7 +32,7 @@
|
|||||||
<ul>
|
<ul>
|
||||||
{% for personexpedition in person.personexpedition_set.all %}
|
{% for personexpedition in person.personexpedition_set.all %}
|
||||||
<li> <a href="{{ personexpedition.get_absolute_url }}">{{personexpedition.expedition.year}}</a>
|
<li> <a href="{{ personexpedition.get_absolute_url }}">{{personexpedition.expedition.year}}</a>
|
||||||
<span style="padding-left:{{personexpedition.persontrip_set.all|length}}0px; background-color:red"></span>
|
<span style="padding-left:{{ personexpedition.persontrip_set.all|length }}0px; background-color:red"></span>
|
||||||
{{personexpedition.persontrip_set.all|length}} trips
|
{{personexpedition.persontrip_set.all|length}} trips
|
||||||
</li>
|
</li>
|
||||||
{% endfor %}
|
{% endfor %}
|
||||||
|
|||||||
@@ -4,9 +4,7 @@
|
|||||||
|
|
||||||
{% block title %} QM: {{qm|wiki_to_html_short}} {% endblock %}
|
{% block title %} QM: {{qm|wiki_to_html_short}} {% endblock %}
|
||||||
|
|
||||||
{% block editLink %}| <a href={{qm.get_admin_url}}>Edit QM {{qm|wiki_to_html_short}}</a>{% endblock %}
|
{% block editLink %}| <a href="{{qm.get_admin_url}}/">Edit QM {{qm|wiki_to_html_short}}</a>{% endblock %}
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
{% block contentheader %}
|
{% block contentheader %}
|
||||||
<table id="cavepage">
|
<table id="cavepage">
|
||||||
|
|||||||
@@ -5,7 +5,7 @@
|
|||||||
{% block title %}CUCC Virtual Survey Binder: {{ current_expedition }}{{ current_survey }}{%endblock%}
|
{% block title %}CUCC Virtual Survey Binder: {{ current_expedition }}{{ current_survey }}{%endblock%}
|
||||||
{% block head %}
|
{% block head %}
|
||||||
|
|
||||||
<link rel="stylesheet" type="text/css" href="{{ settings.MEDIA_URL }}css/nav.css" />
|
<link rel="stylesheet" type="text/css" href="{{ MEDIA_URL }}css/nav.css" />
|
||||||
|
|
||||||
<script language="javascript">
|
<script language="javascript">
|
||||||
blankColor = "rgb(153, 153, 153)"
|
blankColor = "rgb(153, 153, 153)"
|
||||||
@@ -164,7 +164,7 @@
|
|||||||
</p>
|
</p>
|
||||||
</div>
|
</div>
|
||||||
{% endfor %}
|
{% endfor %}
|
||||||
<div class="figure"> <a href="{{ settings.URL_ROOT }}/admin/expo/scannedimage/add/"> <img src="{{ settings.URL_ROOT }}{{ settings.ADMIN_MEDIA_PREFIX }}img/admin/icon_addlink.gif" /> Add a new scanned notes page. </a> </div>
|
<div class="figure"> <a href="{{ URL_ROOT }}/admin/expo/scannedimage/add/"> <img src="{{ URL_ROOT }}{{ ADMIN_MEDIA_PREFIX }}img/admin/icon_addlink.gif" /> Add a new scanned notes page. </a> </div>
|
||||||
</div>
|
</div>
|
||||||
<br class="clearfloat" />
|
<br class="clearfloat" />
|
||||||
<div id="survexFileContent" class="behind"> survex file editor, keeping file in original structure <br />
|
<div id="survexFileContent" class="behind"> survex file editor, keeping file in original structure <br />
|
||||||
|
|||||||
@@ -4,7 +4,7 @@
|
|||||||
{% block title %}{{ title }}{% endblock %}
|
{% block title %}{{ title }}{% endblock %}
|
||||||
|
|
||||||
{% block head %}
|
{% block head %}
|
||||||
<script src="{{ settings.MEDIA_URL }}js/base.js" type="text/javascript"></script>
|
<script src="{{ MEDIA_URL }}js/base.js" type="text/javascript"></script>
|
||||||
<script type="text/javascript" src="{{settings.JSLIB_URL}}jquery-form/jquery.form.min.js"></script>
|
<script type="text/javascript" src="{{settings.JSLIB_URL}}jquery-form/jquery.form.min.js"></script>
|
||||||
<script type="text/javascript" src="{{settings.JSLIB_URL}}codemirror/codemirror.min.js"></script>
|
<script type="text/javascript" src="{{settings.JSLIB_URL}}codemirror/codemirror.min.js"></script>
|
||||||
|
|
||||||
|
|||||||
@@ -34,6 +34,6 @@ add wikilinks
|
|||||||
{% endblock content %}
|
{% endblock content %}
|
||||||
|
|
||||||
{% block margins %}
|
{% block margins %}
|
||||||
<img class="leftMargin eyeCandy fadeIn" src="{{ settings.MEDIA_URL }}eieshole.jpg">
|
<img class="leftMargin eyeCandy fadeIn" src="{{ MEDIA_URL }}eieshole.jpg">
|
||||||
<img class="rightMargin eyeCandy fadeIn" src="{{ settings.MEDIA_URL }}goesser.jpg">
|
<img class="rightMargin eyeCandy fadeIn" src="{{ MEDIA_URL }}goesser.jpg">
|
||||||
{% endblock margins %}
|
{% endblock margins %}
|
||||||
|
|||||||
55
urls.py
55
urls.py
@@ -1,17 +1,19 @@
|
|||||||
from django.conf.urls import *
|
from django.conf.urls import *
|
||||||
from django.conf import settings
|
from django.conf import settings
|
||||||
|
from django.conf.urls.static import static
|
||||||
|
from django.views.static import serve
|
||||||
|
|
||||||
from core.views import * # flat import
|
from core.views import * # flat import
|
||||||
from core.views_other import *
|
from core.views_other import *
|
||||||
from core.views_caves import *
|
from core.views_caves import *
|
||||||
from core.views_survex import *
|
from core.views_survex import *
|
||||||
from core.models import *
|
from core.models import *
|
||||||
|
from flatpages.views import *
|
||||||
from django.views.generic.edit import UpdateView
|
from django.views.generic.edit import UpdateView
|
||||||
from django.contrib import admin
|
from django.contrib import admin
|
||||||
from django.views.generic.list import ListView
|
from django.views.generic.list import ListView
|
||||||
from django.contrib import admin
|
from django.contrib import admin
|
||||||
admin.autodiscover()
|
#admin.autodiscover()
|
||||||
|
|
||||||
|
|
||||||
# type url probably means it's used.
|
# type url probably means it's used.
|
||||||
|
|
||||||
@@ -20,7 +22,7 @@ admin.autodiscover()
|
|||||||
# <reference to python function in 'core' folder>,
|
# <reference to python function in 'core' folder>,
|
||||||
# <name optional argument for URL reversing (doesn't do much)>)
|
# <name optional argument for URL reversing (doesn't do much)>)
|
||||||
|
|
||||||
actualurlpatterns = patterns('',
|
actualurlpatterns = [
|
||||||
|
|
||||||
url(r'^testingurl/?$' , views_caves.millenialcaves, name="testing"),
|
url(r'^testingurl/?$' , views_caves.millenialcaves, name="testing"),
|
||||||
|
|
||||||
@@ -74,14 +76,13 @@ actualurlpatterns = patterns('',
|
|||||||
url(r'^cave/(?P<slug>[^/]+)/edit/$', views_caves.edit_cave, name="edit_cave"),
|
url(r'^cave/(?P<slug>[^/]+)/edit/$', views_caves.edit_cave, name="edit_cave"),
|
||||||
#(r'^cavesearch', caveSearch),
|
#(r'^cavesearch', caveSearch),
|
||||||
|
|
||||||
|
# url(r'^cave/(?P<cave_id>[^/]+)/(?P<year>\d\d\d\d)-(?P<qm_id>\d*)(?P<grade>[ABCDX]?)?$', views_caves.qm, name="qm"),
|
||||||
url(r'^cave/(?P<cave_id>[^/]+)/(?P<year>\d\d\d\d)-(?P<qm_id>\d*)(?P<grade>[ABCDX]?)?$', views_caves.qm, name="qm"),
|
url(r'^cave/qm/(?P<qm_id>[^/]+)?$', views_caves.qm, name="qm"),
|
||||||
|
|
||||||
url(r'^prospecting_guide/$', views_caves.prospecting),
|
url(r'^prospecting_guide/$', views_caves.prospecting),
|
||||||
|
|
||||||
url(r'^logbooksearch/(.*)/?$', views_logbooks.logbookSearch),
|
url(r'^logbooksearch/(.*)/?$', views_logbooks.logbookSearch),
|
||||||
|
|
||||||
|
|
||||||
url(r'^statistics/?$', views_other.stats, name="stats"),
|
url(r'^statistics/?$', views_other.stats, name="stats"),
|
||||||
|
|
||||||
url(r'^survey/?$', surveyindex, name="survey"),
|
url(r'^survey/?$', surveyindex, name="survey"),
|
||||||
@@ -93,29 +94,25 @@ actualurlpatterns = patterns('',
|
|||||||
url(r'^logbook(?P<year>\d\d\d\d)\.(?P<extension>.*)/?$',views_other.downloadLogbook),
|
url(r'^logbook(?P<year>\d\d\d\d)\.(?P<extension>.*)/?$',views_other.downloadLogbook),
|
||||||
url(r'^logbook/?$',views_other.downloadLogbook, name="downloadlogbook"),
|
url(r'^logbook/?$',views_other.downloadLogbook, name="downloadlogbook"),
|
||||||
url(r'^cave/(?P<cave_id>[^/]+)/qm\.csv/?$', views_other.downloadQMs, name="downloadqms"),
|
url(r'^cave/(?P<cave_id>[^/]+)/qm\.csv/?$', views_other.downloadQMs, name="downloadqms"),
|
||||||
(r'^downloadqms$', views_other.downloadQMs),
|
url(r'^downloadqms$', views_other.downloadQMs),
|
||||||
|
|
||||||
url(r'^eyecandy$', views_other.eyecandy),
|
url(r'^eyecandy$', views_other.eyecandy),
|
||||||
|
|
||||||
(r'^admin/doc/?', include('django.contrib.admindocs.urls')),
|
url(r'^admin/doc/?', include('django.contrib.admindocs.urls')),
|
||||||
#url(r'^admin/(.*)', admin.site.get_urls, name="admin"),
|
#url(r'^admin/(.*)', admin.site.get_urls, name="admin"),
|
||||||
(r'^admin/', include(admin.site.urls)),
|
url(r'^admin/', include(admin.site.urls)),
|
||||||
|
|
||||||
# don't know why this needs troggle/ in here. nice to get it out
|
# don't know why this needs troggle/ in here. nice to get it out
|
||||||
url(r'^troggle/media-admin/(?P<path>.*)$', 'django.views.static.serve', {'document_root': settings.MEDIA_ADMIN_DIR, 'show_indexes':True}),
|
# url(r'^troggle/media-admin/(?P<path>.*)$', static, {'document_root': settings.MEDIA_ADMIN_DIR, 'show_indexes':True}),
|
||||||
|
|
||||||
|
|
||||||
(r'^accounts/', include('registration.backends.default.urls')),
|
url(r'^accounts/', include('registration.backends.default.urls')),
|
||||||
(r'^profiles/', include('profiles.urls')),
|
url(r'^profiles/', include('profiles.urls')),
|
||||||
|
|
||||||
|
|
||||||
# (r'^personform/(.*)$', personForm),
|
# (r'^personform/(.*)$', personForm),
|
||||||
|
|
||||||
(r'^site_media/(?P<path>.*)$', 'django.views.static.serve',
|
url(r'^site_media/(?P<path>.*)$', serve, {'document_root': settings.MEDIA_ROOT, 'show_indexes': True}),
|
||||||
{'document_root': settings.MEDIA_ROOT, 'show_indexes': True}),
|
|
||||||
(r'^tinymce_media/(?P<path>.*)$', 'django.views.static.serve',
|
|
||||||
{'document_root': settings.TINY_MCE_MEDIA_ROOT, 'show_indexes': True}),
|
|
||||||
|
|
||||||
|
|
||||||
url(r'^survexblock/(.+)$', views_caves.survexblock, name="survexblock"),
|
url(r'^survexblock/(.+)$', views_caves.survexblock, name="survexblock"),
|
||||||
url(r'^survexfile/(?P<survex_file>.*?)\.svx$', views_survex.svx, name="svx"),
|
url(r'^survexfile/(?P<survex_file>.*?)\.svx$', views_survex.svx, name="svx"),
|
||||||
@@ -129,12 +126,10 @@ actualurlpatterns = patterns('',
|
|||||||
url(r'^survexfileraw/(?P<survex_file>.*?)\.svx$', views_survex.svxraw, name="svxraw"),
|
url(r'^survexfileraw/(?P<survex_file>.*?)\.svx$', views_survex.svxraw, name="svxraw"),
|
||||||
|
|
||||||
|
|
||||||
(r'^survey_files/listdir/(?P<path>.*)$', view_surveys.listdir),
|
url(r'^survey_files/listdir/(?P<path>.*)$', view_surveys.listdir),
|
||||||
(r'^survey_files/download/(?P<path>.*)$', view_surveys.download),
|
url(r'^survey_files/download/(?P<path>.*)$', view_surveys.download),
|
||||||
#(r'^survey_files/upload/(?P<path>.*)$', view_surveys.upload),
|
#(r'^survey_files/upload/(?P<path>.*)$', view_surveys.upload),
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
#(r'^survey_scans/(?P<path>.*)$', 'django.views.static.serve', {'document_root': settings.SURVEY_SCANS, 'show_indexes':True}),
|
#(r'^survey_scans/(?P<path>.*)$', 'django.views.static.serve', {'document_root': settings.SURVEY_SCANS, 'show_indexes':True}),
|
||||||
url(r'^survey_scans/$', view_surveys.surveyscansfolders, name="surveyscansfolders"),
|
url(r'^survey_scans/$', view_surveys.surveyscansfolders, name="surveyscansfolders"),
|
||||||
url(r'^survey_scans/(?P<path>[^/]+)/$', view_surveys.surveyscansfolder, name="surveyscansfolder"),
|
url(r'^survey_scans/(?P<path>[^/]+)/$', view_surveys.surveyscansfolder, name="surveyscansfolder"),
|
||||||
@@ -147,24 +142,22 @@ actualurlpatterns = patterns('',
|
|||||||
|
|
||||||
#url(r'^tunneldatainfo/(?P<path>.+?\.xml)$', view_surveys.tunnelfileinfo, name="tunnelfileinfo"),
|
#url(r'^tunneldatainfo/(?P<path>.+?\.xml)$', view_surveys.tunnelfileinfo, name="tunnelfileinfo"),
|
||||||
|
|
||||||
(r'^photos/(?P<path>.*)$', 'django.views.static.serve',
|
# url(r'^photos/(?P<path>.*)$', 'django.views.static.serve', {'document_root': settings.PHOTOS_ROOT, 'show_indexes':True}),
|
||||||
{'document_root': settings.PHOTOS_ROOT, 'show_indexes':True}),
|
|
||||||
|
|
||||||
url(r'^prospecting/(?P<name>[^.]+).png$', prospecting_image, name="prospecting_image"),
|
url(r'^prospecting/(?P<name>[^.]+).png$', prospecting_image, name="prospecting_image"),
|
||||||
|
|
||||||
# (r'^gallery/(?P<path>.*)$', 'django.views.static.serve',
|
# (r'^gallery/(?P<path>.*)$', 'django.views.static.serve', {'document_root': settings.PHOTOS_ROOT, 'show_indexes':True}),
|
||||||
# {'document_root': settings.PHOTOS_ROOT, 'show_indexes':True}),
|
|
||||||
|
|
||||||
# for those silly ideas
|
# for those silly ideas
|
||||||
url(r'^experimental.*$', views_logbooks.experimental, name="experimental"),
|
url(r'^experimental.*$', views_logbooks.experimental, name="experimental"),
|
||||||
|
|
||||||
#url(r'^trip_report/?$',views_other.tripreport,name="trip_report")
|
#url(r'^trip_report/?$',views_other.tripreport,name="trip_report")
|
||||||
|
|
||||||
url(r'^(.*)_edit$', 'flatpages.views.editflatpage', name="editflatpage"),
|
url(r'^(.*)_edit$', editflatpage, name="editflatpage"),
|
||||||
url(r'^(.*)$', 'flatpages.views.flatpage', name="flatpage"),
|
url(r'^(.*)$', flatpage, name="flatpage"),
|
||||||
)
|
]
|
||||||
|
|
||||||
#Allow prefix to all urls
|
#Allow prefix to all urls
|
||||||
urlpatterns = patterns ('',
|
urlpatterns = [
|
||||||
('^%s' % settings.DIR_ROOT, include(actualurlpatterns))
|
url('^%s' % settings.DIR_ROOT, include(actualurlpatterns))
|
||||||
)
|
] + static(settings.STATIC_URL, document_root=settings.STATIC_ROOT)
|
||||||
|
|||||||
4
utils.py
4
utils.py
@@ -46,7 +46,7 @@ def save_carefully(objectType, lookupAttribs={}, nonLookupAttribs={}):
|
|||||||
instance, created=objectType.objects.get_or_create(defaults=nonLookupAttribs, **lookupAttribs)
|
instance, created=objectType.objects.get_or_create(defaults=nonLookupAttribs, **lookupAttribs)
|
||||||
|
|
||||||
if not created and not instance.new_since_parsing:
|
if not created and not instance.new_since_parsing:
|
||||||
for k, v in nonLookupAttribs.items(): #overwrite the existing attributes from the logbook text (except date and title)
|
for k, v in list(nonLookupAttribs.items()): #overwrite the existing attributes from the logbook text (except date and title)
|
||||||
setattr(instance, k, v)
|
setattr(instance, k, v)
|
||||||
instance.save()
|
instance.save()
|
||||||
|
|
||||||
@@ -112,7 +112,7 @@ re_subs = [(re.compile(r"\<b[^>]*\>(.*?)\</b\>", re.DOTALL), r"'''\1'''"),
|
|||||||
|
|
||||||
def html_to_wiki(text, codec = "utf-8"):
|
def html_to_wiki(text, codec = "utf-8"):
|
||||||
if type(text) == str:
|
if type(text) == str:
|
||||||
text = unicode(text, codec)
|
text = str(text, codec)
|
||||||
text = re.sub("</p>", r"", text)
|
text = re.sub("</p>", r"", text)
|
||||||
text = re.sub("<p>$", r"", text)
|
text = re.sub("<p>$", r"", text)
|
||||||
text = re.sub("<p>", r"\n\n", text)
|
text = re.sub("<p>", r"\n\n", text)
|
||||||
|
|||||||
Reference in New Issue
Block a user