mirror of
https://expo.survex.com/repositories/troggle/.git
synced 2025-12-16 13:57:09 +00:00
Just removing dud whitespace
This commit is contained in:
@@ -9,12 +9,12 @@ from troggle.core.views_other import downloadLogbook
|
|||||||
|
|
||||||
|
|
||||||
class TroggleModelAdmin(admin.ModelAdmin):
|
class TroggleModelAdmin(admin.ModelAdmin):
|
||||||
|
|
||||||
def save_model(self, request, obj, form, change):
|
def save_model(self, request, obj, form, change):
|
||||||
"""overriding admin save to fill the new_since parsing_field"""
|
"""overriding admin save to fill the new_since parsing_field"""
|
||||||
obj.new_since_parsing=True
|
obj.new_since_parsing=True
|
||||||
obj.save()
|
obj.save()
|
||||||
|
|
||||||
class Media:
|
class Media:
|
||||||
js = ('jquery/jquery.min.js','js/QM_helper.js')
|
js = ('jquery/jquery.min.js','js/QM_helper.js')
|
||||||
|
|
||||||
@@ -44,7 +44,7 @@ class OtherCaveInline(admin.TabularInline):
|
|||||||
|
|
||||||
class SurveyAdmin(TroggleModelAdmin):
|
class SurveyAdmin(TroggleModelAdmin):
|
||||||
inlines = (ScannedImageInline,)
|
inlines = (ScannedImageInline,)
|
||||||
search_fields = ('expedition__year','wallet_number')
|
search_fields = ('expedition__year','wallet_number')
|
||||||
|
|
||||||
|
|
||||||
class QMsFoundInline(admin.TabularInline):
|
class QMsFoundInline(admin.TabularInline):
|
||||||
@@ -52,7 +52,7 @@ class QMsFoundInline(admin.TabularInline):
|
|||||||
fk_name='found_by'
|
fk_name='found_by'
|
||||||
fields=('number','grade','location_description','comment')#need to add foreignkey to cave part
|
fields=('number','grade','location_description','comment')#need to add foreignkey to cave part
|
||||||
extra=1
|
extra=1
|
||||||
|
|
||||||
|
|
||||||
class PhotoInline(admin.TabularInline):
|
class PhotoInline(admin.TabularInline):
|
||||||
model = DPhoto
|
model = DPhoto
|
||||||
@@ -68,7 +68,7 @@ class PersonTripInline(admin.TabularInline):
|
|||||||
|
|
||||||
#class LogbookEntryAdmin(VersionAdmin):
|
#class LogbookEntryAdmin(VersionAdmin):
|
||||||
class LogbookEntryAdmin(TroggleModelAdmin):
|
class LogbookEntryAdmin(TroggleModelAdmin):
|
||||||
prepopulated_fields = {'slug':("title",)}
|
prepopulated_fields = {'slug':("title",)}
|
||||||
search_fields = ('title','expedition__year')
|
search_fields = ('title','expedition__year')
|
||||||
date_heirarchy = ('date')
|
date_heirarchy = ('date')
|
||||||
inlines = (PersonTripInline, PhotoInline, QMsFoundInline)
|
inlines = (PersonTripInline, PhotoInline, QMsFoundInline)
|
||||||
@@ -77,11 +77,11 @@ class LogbookEntryAdmin(TroggleModelAdmin):
|
|||||||
"all": ("css/troggleadmin.css",)
|
"all": ("css/troggleadmin.css",)
|
||||||
}
|
}
|
||||||
actions=('export_logbook_entries_as_html','export_logbook_entries_as_txt')
|
actions=('export_logbook_entries_as_html','export_logbook_entries_as_txt')
|
||||||
|
|
||||||
def export_logbook_entries_as_html(self, modeladmin, request, queryset):
|
def export_logbook_entries_as_html(self, modeladmin, request, queryset):
|
||||||
response=downloadLogbook(request=request, queryset=queryset, extension='html')
|
response=downloadLogbook(request=request, queryset=queryset, extension='html')
|
||||||
return response
|
return response
|
||||||
|
|
||||||
def export_logbook_entries_as_txt(self, modeladmin, request, queryset):
|
def export_logbook_entries_as_txt(self, modeladmin, request, queryset):
|
||||||
response=downloadLogbook(request=request, queryset=queryset, extension='txt')
|
response=downloadLogbook(request=request, queryset=queryset, extension='txt')
|
||||||
return response
|
return response
|
||||||
|
|||||||
@@ -15,7 +15,7 @@ def listdir(*path):
|
|||||||
for p in os.listdir(root):
|
for p in os.listdir(root):
|
||||||
if os.path.isdir(os.path.join(root, p)):
|
if os.path.isdir(os.path.join(root, p)):
|
||||||
l += p + "/\n"
|
l += p + "/\n"
|
||||||
|
|
||||||
elif os.path.isfile(os.path.join(root, p)):
|
elif os.path.isfile(os.path.join(root, p)):
|
||||||
l += p + "\n"
|
l += p + "\n"
|
||||||
#Ignore non-files and non-directories
|
#Ignore non-files and non-directories
|
||||||
@@ -28,7 +28,7 @@ def listdir(*path):
|
|||||||
c = c.replace("#", "%23")
|
c = c.replace("#", "%23")
|
||||||
print("FILE: ", settings.FILES + "listdir/" + c)
|
print("FILE: ", settings.FILES + "listdir/" + c)
|
||||||
return urllib.urlopen(settings.FILES + "listdir/" + c).read()
|
return urllib.urlopen(settings.FILES + "listdir/" + c).read()
|
||||||
|
|
||||||
def dirsAsList(*path):
|
def dirsAsList(*path):
|
||||||
return [d for d in listdir(*path).split("\n") if len(d) > 0 and d[-1] == "/"]
|
return [d for d in listdir(*path).split("\n") if len(d) > 0 and d[-1] == "/"]
|
||||||
|
|
||||||
|
|||||||
@@ -16,7 +16,7 @@ class CaveForm(ModelForm):
|
|||||||
underground_centre_line = forms.CharField(required = False, widget=forms.Textarea())
|
underground_centre_line = forms.CharField(required = False, widget=forms.Textarea())
|
||||||
notes = forms.CharField(required = False, widget=forms.Textarea())
|
notes = forms.CharField(required = False, widget=forms.Textarea())
|
||||||
references = forms.CharField(required = False, widget=forms.Textarea())
|
references = forms.CharField(required = False, widget=forms.Textarea())
|
||||||
url = forms.CharField(required = True)
|
url = forms.CharField(required = True)
|
||||||
class Meta:
|
class Meta:
|
||||||
model = Cave
|
model = Cave
|
||||||
exclude = ("filename",)
|
exclude = ("filename",)
|
||||||
@@ -24,9 +24,9 @@ class CaveForm(ModelForm):
|
|||||||
|
|
||||||
def clean(self):
|
def clean(self):
|
||||||
if self.cleaned_data.get("kataster_number") == "" and self.cleaned_data.get("unofficial_number") == "":
|
if self.cleaned_data.get("kataster_number") == "" and self.cleaned_data.get("unofficial_number") == "":
|
||||||
self._errors["unofficial_number"] = self.error_class(["Either the kataster or unoffical number is required."])
|
self._errors["unofficial_number"] = self.error_class(["Either the kataster or unoffical number is required."])
|
||||||
if self.cleaned_data.get("kataster_number") != "" and self.cleaned_data.get("official_name") == "":
|
if self.cleaned_data.get("kataster_number") != "" and self.cleaned_data.get("official_name") == "":
|
||||||
self._errors["official_name"] = self.error_class(["This field is required when there is a kataster number."])
|
self._errors["official_name"] = self.error_class(["This field is required when there is a kataster number."])
|
||||||
if self.cleaned_data.get("area") == []:
|
if self.cleaned_data.get("area") == []:
|
||||||
self._errors["area"] = self.error_class(["This field is required."])
|
self._errors["area"] = self.error_class(["This field is required."])
|
||||||
if self.cleaned_data.get("url") and self.cleaned_data.get("url").startswith("/"):
|
if self.cleaned_data.get("url") and self.cleaned_data.get("url").startswith("/"):
|
||||||
@@ -82,11 +82,11 @@ class EntranceLetterForm(ModelForm):
|
|||||||
# This function returns html-formatted paragraphs for each of the
|
# This function returns html-formatted paragraphs for each of the
|
||||||
# wikilink types that are related to this logbookentry. Each paragraph
|
# wikilink types that are related to this logbookentry. Each paragraph
|
||||||
# contains a list of all of the related wikilinks.
|
# contains a list of all of the related wikilinks.
|
||||||
#
|
#
|
||||||
# Perhaps an admin javascript solution would be better.
|
# Perhaps an admin javascript solution would be better.
|
||||||
# """
|
# """
|
||||||
# res = ["Please use the following wikilinks, which are related to this logbook entry:"]
|
# res = ["Please use the following wikilinks, which are related to this logbook entry:"]
|
||||||
#
|
#
|
||||||
# res.append(r'</p><p style="float: left;"><b>QMs found:</b>')
|
# res.append(r'</p><p style="float: left;"><b>QMs found:</b>')
|
||||||
# for QM in LogbookEntry.instance.QMs_found.all():
|
# for QM in LogbookEntry.instance.QMs_found.all():
|
||||||
# res.append(QM.wiki_link())
|
# res.append(QM.wiki_link())
|
||||||
@@ -94,12 +94,12 @@ class EntranceLetterForm(ModelForm):
|
|||||||
# res.append(r'</p><p style="float: left;"><b>QMs ticked off:</b>')
|
# res.append(r'</p><p style="float: left;"><b>QMs ticked off:</b>')
|
||||||
# for QM in LogbookEntry.instance.QMs_ticked_off.all():
|
# for QM in LogbookEntry.instance.QMs_ticked_off.all():
|
||||||
# res.append(QM.wiki_link())
|
# res.append(QM.wiki_link())
|
||||||
|
|
||||||
# res.append(r'</p><p style="float: left; "><b>People</b>')
|
# res.append(r'</p><p style="float: left; "><b>People</b>')
|
||||||
# for persontrip in LogbookEntry.instance.persontrip_set.all():
|
# for persontrip in LogbookEntry.instance.persontrip_set.all():
|
||||||
# res.append(persontrip.wiki_link())
|
# res.append(persontrip.wiki_link())
|
||||||
# res.append(r'</p>')
|
# res.append(r'</p>')
|
||||||
|
|
||||||
# return string.join(res, r'<br />')
|
# return string.join(res, r'<br />')
|
||||||
|
|
||||||
# def __init__(self, *args, **kwargs):
|
# def __init__(self, *args, **kwargs):
|
||||||
@@ -107,7 +107,7 @@ class EntranceLetterForm(ModelForm):
|
|||||||
# self.fields['text'].help_text=self.wikiLinkHints()#
|
# self.fields['text'].help_text=self.wikiLinkHints()#
|
||||||
|
|
||||||
#class CaveForm(forms.Form):
|
#class CaveForm(forms.Form):
|
||||||
# html = forms.CharField(widget=TinyMCE(attrs={'cols': 80, 'rows': 30}))
|
# html = forms.CharField(widget=TinyMCE(attrs={'cols': 80, 'rows': 30}))
|
||||||
|
|
||||||
def getTripForm(expedition):
|
def getTripForm(expedition):
|
||||||
|
|
||||||
@@ -118,18 +118,18 @@ def getTripForm(expedition):
|
|||||||
caves.sort()
|
caves.sort()
|
||||||
caves = ["-----"] + caves
|
caves = ["-----"] + caves
|
||||||
cave = forms.ChoiceField([(c, c) for c in caves], required=False)
|
cave = forms.ChoiceField([(c, c) for c in caves], required=False)
|
||||||
location = forms.CharField(max_length=200, required=False)
|
location = forms.CharField(max_length=200, required=False)
|
||||||
caveOrLocation = forms.ChoiceField([("cave", "Cave"), ("location", "Location")], widget = forms.widgets.RadioSelect())
|
caveOrLocation = forms.ChoiceField([("cave", "Cave"), ("location", "Location")], widget = forms.widgets.RadioSelect())
|
||||||
html = forms.CharField(widget=TinyMCE(attrs={'cols': 80, 'rows': 30}))
|
html = forms.CharField(widget=TinyMCE(attrs={'cols': 80, 'rows': 30}))
|
||||||
|
|
||||||
def clean(self):
|
def clean(self):
|
||||||
print(dir(self))
|
print(dir(self))
|
||||||
if self.cleaned_data.get("caveOrLocation") == "cave" and not self.cleaned_data.get("cave"):
|
if self.cleaned_data.get("caveOrLocation") == "cave" and not self.cleaned_data.get("cave"):
|
||||||
self._errors["cave"] = self.error_class(["This field is required"])
|
self._errors["cave"] = self.error_class(["This field is required"])
|
||||||
if self.cleaned_data.get("caveOrLocation") == "location" and not self.cleaned_data.get("location"):
|
if self.cleaned_data.get("caveOrLocation") == "location" and not self.cleaned_data.get("location"):
|
||||||
self._errors["location"] = self.error_class(["This field is required"])
|
self._errors["location"] = self.error_class(["This field is required"])
|
||||||
return self.cleaned_data
|
return self.cleaned_data
|
||||||
|
|
||||||
class PersonTripForm(forms.Form):
|
class PersonTripForm(forms.Form):
|
||||||
names = [get_name(pe) for pe in PersonExpedition.objects.filter(expedition = expedition)]
|
names = [get_name(pe) for pe in PersonExpedition.objects.filter(expedition = expedition)]
|
||||||
names.sort()
|
names.sort()
|
||||||
@@ -141,7 +141,7 @@ def getTripForm(expedition):
|
|||||||
PersonTripFormSet = formset_factory(PersonTripForm, extra=1)
|
PersonTripFormSet = formset_factory(PersonTripForm, extra=1)
|
||||||
|
|
||||||
return PersonTripFormSet, TripForm
|
return PersonTripFormSet, TripForm
|
||||||
|
|
||||||
def get_name(pe):
|
def get_name(pe):
|
||||||
if pe.nickname:
|
if pe.nickname:
|
||||||
return pe.nickname
|
return pe.nickname
|
||||||
@@ -162,18 +162,18 @@ def get_name(pe):
|
|||||||
# caves = ["-----"] + caves
|
# caves = ["-----"] + caves
|
||||||
# cave = forms.ChoiceField([(c, c) for c in caves], required=False)
|
# cave = forms.ChoiceField([(c, c) for c in caves], required=False)
|
||||||
|
|
||||||
# entrance = forms.ChoiceField([("-----", "Please select a cave"), ], required=False)
|
# entrance = forms.ChoiceField([("-----", "Please select a cave"), ], required=False)
|
||||||
# qm = forms.ChoiceField([("-----", "Please select a cave"), ], required=False)
|
# qm = forms.ChoiceField([("-----", "Please select a cave"), ], required=False)
|
||||||
|
|
||||||
# expeditions = [e.year for e in Expedition.objects.all()]
|
# expeditions = [e.year for e in Expedition.objects.all()]
|
||||||
# expeditions.sort()
|
# expeditions.sort()
|
||||||
# expeditions = ["-----"] + expeditions
|
# expeditions = ["-----"] + expeditions
|
||||||
# expedition = forms.ChoiceField([(e, e) for e in expeditions], required=False)
|
# expedition = forms.ChoiceField([(e, e) for e in expeditions], required=False)
|
||||||
|
|
||||||
# logbookentry = forms.ChoiceField([("-----", "Please select an expedition"), ], required=False)
|
# logbookentry = forms.ChoiceField([("-----", "Please select an expedition"), ], required=False)
|
||||||
|
|
||||||
|
# person = forms.ChoiceField([("-----", "Please select an expedition"), ], required=False)
|
||||||
|
|
||||||
# person = forms.ChoiceField([("-----", "Please select an expedition"), ], required=False)
|
|
||||||
|
|
||||||
# survey_point = forms.CharField()
|
# survey_point = forms.CharField()
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@@ -1,21 +1,21 @@
|
|||||||
from imagekit.specs import ImageSpec
|
from imagekit.specs import ImageSpec
|
||||||
from imagekit import processors
|
from imagekit import processors
|
||||||
|
|
||||||
class ResizeThumb(processors.Resize):
|
class ResizeThumb(processors.Resize):
|
||||||
width = 100
|
width = 100
|
||||||
crop = False
|
crop = False
|
||||||
|
|
||||||
class ResizeDisplay(processors.Resize):
|
class ResizeDisplay(processors.Resize):
|
||||||
width = 600
|
width = 600
|
||||||
|
|
||||||
#class EnhanceThumb(processors.Adjustment):
|
#class EnhanceThumb(processors.Adjustment):
|
||||||
#contrast = 1.2
|
#contrast = 1.2
|
||||||
#sharpness = 2
|
#sharpness = 2
|
||||||
|
|
||||||
class Thumbnail(ImageSpec):
|
class Thumbnail(ImageSpec):
|
||||||
access_as = 'thumbnail_image'
|
access_as = 'thumbnail_image'
|
||||||
pre_cache = True
|
pre_cache = True
|
||||||
processors = [ResizeThumb]
|
processors = [ResizeThumb]
|
||||||
|
|
||||||
class Display(ImageSpec):
|
class Display(ImageSpec):
|
||||||
increment_count = True
|
increment_count = True
|
||||||
|
|||||||
120
core/models.py
120
core/models.py
@@ -27,10 +27,10 @@ def get_related_by_wikilinks(wiki_text):
|
|||||||
qm=QM.objects.get(found_by__cave_slug__in = cave_slugs,
|
qm=QM.objects.get(found_by__cave_slug__in = cave_slugs,
|
||||||
found_by__date__year = qmdict['year'],
|
found_by__date__year = qmdict['year'],
|
||||||
number = qmdict['number'])
|
number = qmdict['number'])
|
||||||
res.append(qm)
|
res.append(qm)
|
||||||
except QM.DoesNotExist:
|
except QM.DoesNotExist:
|
||||||
print('fail on '+str(wikilink))
|
print('fail on '+str(wikilink))
|
||||||
|
|
||||||
return res
|
return res
|
||||||
|
|
||||||
try:
|
try:
|
||||||
@@ -38,7 +38,7 @@ try:
|
|||||||
filename=settings.LOGFILE,
|
filename=settings.LOGFILE,
|
||||||
filemode='w')
|
filemode='w')
|
||||||
except:
|
except:
|
||||||
subprocess.call(settings.FIX_PERMISSIONS)
|
subprocess.call(settings.FIX_PERMISSIONS)
|
||||||
logging.basicConfig(level=logging.DEBUG,
|
logging.basicConfig(level=logging.DEBUG,
|
||||||
filename=settings.LOGFILE,
|
filename=settings.LOGFILE,
|
||||||
filemode='w')
|
filemode='w')
|
||||||
@@ -58,7 +58,7 @@ class TroggleModel(models.Model):
|
|||||||
|
|
||||||
class TroggleImageModel(models.Model):
|
class TroggleImageModel(models.Model):
|
||||||
new_since_parsing = models.BooleanField(default=False, editable=False)
|
new_since_parsing = models.BooleanField(default=False, editable=False)
|
||||||
|
|
||||||
def object_name(self):
|
def object_name(self):
|
||||||
return self._meta.object_name
|
return self._meta.object_name
|
||||||
|
|
||||||
@@ -69,23 +69,23 @@ class TroggleImageModel(models.Model):
|
|||||||
class Meta:
|
class Meta:
|
||||||
abstract = True
|
abstract = True
|
||||||
|
|
||||||
#
|
#
|
||||||
# single Expedition, usually seen by year
|
# single Expedition, usually seen by year
|
||||||
#
|
#
|
||||||
class Expedition(TroggleModel):
|
class Expedition(TroggleModel):
|
||||||
year = models.CharField(max_length=20, unique=True)
|
year = models.CharField(max_length=20, unique=True)
|
||||||
name = models.CharField(max_length=100)
|
name = models.CharField(max_length=100)
|
||||||
|
|
||||||
def __unicode__(self):
|
def __unicode__(self):
|
||||||
return self.year
|
return self.year
|
||||||
|
|
||||||
class Meta:
|
class Meta:
|
||||||
ordering = ('-year',)
|
ordering = ('-year',)
|
||||||
get_latest_by = 'year'
|
get_latest_by = 'year'
|
||||||
|
|
||||||
def get_absolute_url(self):
|
def get_absolute_url(self):
|
||||||
return urlparse.urljoin(settings.URL_ROOT, reverse('expedition', args=[self.year]))
|
return urlparse.urljoin(settings.URL_ROOT, reverse('expedition', args=[self.year]))
|
||||||
|
|
||||||
# construction function. should be moved out
|
# construction function. should be moved out
|
||||||
def get_expedition_day(self, date):
|
def get_expedition_day(self, date):
|
||||||
expeditiondays = self.expeditionday_set.filter(date=date)
|
expeditiondays = self.expeditionday_set.filter(date=date)
|
||||||
@@ -95,11 +95,11 @@ class Expedition(TroggleModel):
|
|||||||
res = ExpeditionDay(expedition=self, date=date)
|
res = ExpeditionDay(expedition=self, date=date)
|
||||||
res.save()
|
res.save()
|
||||||
return res
|
return res
|
||||||
|
|
||||||
def day_min(self):
|
def day_min(self):
|
||||||
res = self.Expeditionday_set.all()
|
res = self.Expeditionday_set.all()
|
||||||
return res and res[0] or None
|
return res and res[0] or None
|
||||||
|
|
||||||
def day_max(self):
|
def day_max(self):
|
||||||
res = self.Expeditionday_set.all()
|
res = self.Expeditionday_set.all()
|
||||||
return res and res[len(res) - 1] or None
|
return res and res[len(res) - 1] or None
|
||||||
@@ -129,23 +129,23 @@ class Person(TroggleModel):
|
|||||||
is_vfho = models.BooleanField(help_text="VFHO is the Vereines für Höhlenkunde in Obersteier, a nearby Austrian caving club.", default=False)
|
is_vfho = models.BooleanField(help_text="VFHO is the Vereines für Höhlenkunde in Obersteier, a nearby Austrian caving club.", default=False)
|
||||||
mug_shot = models.CharField(max_length=100, blank=True,null=True)
|
mug_shot = models.CharField(max_length=100, blank=True,null=True)
|
||||||
blurb = models.TextField(blank=True,null=True)
|
blurb = models.TextField(blank=True,null=True)
|
||||||
|
|
||||||
#href = models.CharField(max_length=200)
|
#href = models.CharField(max_length=200)
|
||||||
orderref = models.CharField(max_length=200) # for alphabetic
|
orderref = models.CharField(max_length=200) # for alphabetic
|
||||||
user = models.OneToOneField(User, null=True, blank=True)
|
user = models.OneToOneField(User, null=True, blank=True)
|
||||||
def get_absolute_url(self):
|
def get_absolute_url(self):
|
||||||
return urlparse.urljoin(settings.URL_ROOT,reverse('person',kwargs={'first_name':self.first_name,'last_name':self.last_name}))
|
return urlparse.urljoin(settings.URL_ROOT,reverse('person',kwargs={'first_name':self.first_name,'last_name':self.last_name}))
|
||||||
|
|
||||||
class Meta:
|
class Meta:
|
||||||
verbose_name_plural = "People"
|
verbose_name_plural = "People"
|
||||||
ordering = ('orderref',) # "Wookey" makes too complex for: ('last_name', 'first_name')
|
ordering = ('orderref',) # "Wookey" makes too complex for: ('last_name', 'first_name')
|
||||||
|
|
||||||
def __unicode__(self):
|
def __unicode__(self):
|
||||||
if self.last_name:
|
if self.last_name:
|
||||||
return "%s %s" % (self.first_name, self.last_name)
|
return "%s %s" % (self.first_name, self.last_name)
|
||||||
return self.first_name
|
return self.first_name
|
||||||
|
|
||||||
|
|
||||||
def notability(self):
|
def notability(self):
|
||||||
notability = Decimal(0)
|
notability = Decimal(0)
|
||||||
max_expo_val = 0
|
max_expo_val = 0
|
||||||
@@ -161,15 +161,15 @@ class Person(TroggleModel):
|
|||||||
|
|
||||||
def bisnotable(self):
|
def bisnotable(self):
|
||||||
return self.notability() > Decimal(1)/Decimal(3)
|
return self.notability() > Decimal(1)/Decimal(3)
|
||||||
|
|
||||||
def surveyedleglength(self):
|
def surveyedleglength(self):
|
||||||
return sum([personexpedition.surveyedleglength() for personexpedition in self.personexpedition_set.all()])
|
return sum([personexpedition.surveyedleglength() for personexpedition in self.personexpedition_set.all()])
|
||||||
|
|
||||||
def first(self):
|
def first(self):
|
||||||
return self.personexpedition_set.order_by('-expedition')[0]
|
return self.personexpedition_set.order_by('-expedition')[0]
|
||||||
def last(self):
|
def last(self):
|
||||||
return self.personexpedition_set.order_by('expedition')[0]
|
return self.personexpedition_set.order_by('expedition')[0]
|
||||||
|
|
||||||
#def Sethref(self):
|
#def Sethref(self):
|
||||||
#if self.last_name:
|
#if self.last_name:
|
||||||
#self.href = self.first_name.lower() + "_" + self.last_name.lower()
|
#self.href = self.first_name.lower() + "_" + self.last_name.lower()
|
||||||
@@ -178,7 +178,7 @@ class Person(TroggleModel):
|
|||||||
# self.href = self.first_name.lower()
|
# self.href = self.first_name.lower()
|
||||||
#self.orderref = self.first_name
|
#self.orderref = self.first_name
|
||||||
#self.notability = 0.0 # set temporarily
|
#self.notability = 0.0 # set temporarily
|
||||||
|
|
||||||
|
|
||||||
#
|
#
|
||||||
# Person's attenance to one Expo
|
# Person's attenance to one Expo
|
||||||
@@ -187,8 +187,8 @@ class PersonExpedition(TroggleModel):
|
|||||||
expedition = models.ForeignKey(Expedition)
|
expedition = models.ForeignKey(Expedition)
|
||||||
person = models.ForeignKey(Person)
|
person = models.ForeignKey(Person)
|
||||||
slugfield = models.SlugField(max_length=50,blank=True,null=True)
|
slugfield = models.SlugField(max_length=50,blank=True,null=True)
|
||||||
|
|
||||||
is_guest = models.BooleanField(default=False)
|
is_guest = models.BooleanField(default=False)
|
||||||
COMMITTEE_CHOICES = (
|
COMMITTEE_CHOICES = (
|
||||||
('leader','Expo leader'),
|
('leader','Expo leader'),
|
||||||
('medical','Expo medical officer'),
|
('medical','Expo medical officer'),
|
||||||
@@ -198,7 +198,7 @@ class PersonExpedition(TroggleModel):
|
|||||||
)
|
)
|
||||||
expo_committee_position = models.CharField(blank=True,null=True,choices=COMMITTEE_CHOICES,max_length=200)
|
expo_committee_position = models.CharField(blank=True,null=True,choices=COMMITTEE_CHOICES,max_length=200)
|
||||||
nickname = models.CharField(max_length=100,blank=True,null=True)
|
nickname = models.CharField(max_length=100,blank=True,null=True)
|
||||||
|
|
||||||
def GetPersonroles(self):
|
def GetPersonroles(self):
|
||||||
res = [ ]
|
res = [ ]
|
||||||
for personrole in self.personrole_set.order_by('survexblock'):
|
for personrole in self.personrole_set.order_by('survexblock'):
|
||||||
@@ -214,8 +214,8 @@ class PersonExpedition(TroggleModel):
|
|||||||
|
|
||||||
def __unicode__(self):
|
def __unicode__(self):
|
||||||
return "%s: (%s)" % (self.person, self.expedition)
|
return "%s: (%s)" % (self.person, self.expedition)
|
||||||
|
|
||||||
|
|
||||||
#why is the below a function in personexpedition, rather than in person? - AC 14 Feb 09
|
#why is the below a function in personexpedition, rather than in person? - AC 14 Feb 09
|
||||||
def name(self):
|
def name(self):
|
||||||
if self.nickname:
|
if self.nickname:
|
||||||
@@ -226,11 +226,11 @@ class PersonExpedition(TroggleModel):
|
|||||||
|
|
||||||
def get_absolute_url(self):
|
def get_absolute_url(self):
|
||||||
return urlparse.urljoin(settings.URL_ROOT, reverse('personexpedition',kwargs={'first_name':self.person.first_name,'last_name':self.person.last_name,'year':self.expedition.year}))
|
return urlparse.urljoin(settings.URL_ROOT, reverse('personexpedition',kwargs={'first_name':self.person.first_name,'last_name':self.person.last_name,'year':self.expedition.year}))
|
||||||
|
|
||||||
def surveyedleglength(self):
|
def surveyedleglength(self):
|
||||||
survexblocks = [personrole.survexblock for personrole in self.personrole_set.all() ]
|
survexblocks = [personrole.survexblock for personrole in self.personrole_set.all() ]
|
||||||
return sum([survexblock.totalleglength for survexblock in set(survexblocks)])
|
return sum([survexblock.totalleglength for survexblock in set(survexblocks)])
|
||||||
|
|
||||||
# would prefer to return actual person trips so we could link to first and last ones
|
# would prefer to return actual person trips so we could link to first and last ones
|
||||||
def day_min(self):
|
def day_min(self):
|
||||||
res = self.persontrip_set.aggregate(day_min=Min("expeditionday__date"))
|
res = self.persontrip_set.aggregate(day_min=Min("expeditionday__date"))
|
||||||
@@ -242,7 +242,7 @@ class PersonExpedition(TroggleModel):
|
|||||||
|
|
||||||
#
|
#
|
||||||
# Single parsed entry from Logbook
|
# Single parsed entry from Logbook
|
||||||
#
|
#
|
||||||
class LogbookEntry(TroggleModel):
|
class LogbookEntry(TroggleModel):
|
||||||
|
|
||||||
LOGBOOK_ENTRY_TYPES = (
|
LOGBOOK_ENTRY_TYPES = (
|
||||||
@@ -265,7 +265,7 @@ class LogbookEntry(TroggleModel):
|
|||||||
verbose_name_plural = "Logbook Entries"
|
verbose_name_plural = "Logbook Entries"
|
||||||
# several PersonTrips point in to this object
|
# several PersonTrips point in to this object
|
||||||
ordering = ('-date',)
|
ordering = ('-date',)
|
||||||
|
|
||||||
def __getattribute__(self, item):
|
def __getattribute__(self, item):
|
||||||
if item == "cave": #Allow a logbookentries cave to be directly accessed despite not having a proper foreignkey
|
if item == "cave": #Allow a logbookentries cave to be directly accessed despite not having a proper foreignkey
|
||||||
return CaveSlug.objects.get(slug = self.cave_slug).cave
|
return CaveSlug.objects.get(slug = self.cave_slug).cave
|
||||||
@@ -314,18 +314,18 @@ class LogbookEntry(TroggleModel):
|
|||||||
#
|
#
|
||||||
class PersonTrip(TroggleModel):
|
class PersonTrip(TroggleModel):
|
||||||
personexpedition = models.ForeignKey("PersonExpedition",null=True)
|
personexpedition = models.ForeignKey("PersonExpedition",null=True)
|
||||||
|
|
||||||
#expeditionday = models.ForeignKey("ExpeditionDay")#MJG wants to KILL THIS (redundant information)
|
#expeditionday = models.ForeignKey("ExpeditionDay")#MJG wants to KILL THIS (redundant information)
|
||||||
#date = models.DateField() #MJG wants to KILL THIS (redundant information)
|
#date = models.DateField() #MJG wants to KILL THIS (redundant information)
|
||||||
time_underground = models.FloatField(help_text="In decimal hours")
|
time_underground = models.FloatField(help_text="In decimal hours")
|
||||||
logbook_entry = models.ForeignKey(LogbookEntry)
|
logbook_entry = models.ForeignKey(LogbookEntry)
|
||||||
is_logbook_entry_author = models.BooleanField(default=False)
|
is_logbook_entry_author = models.BooleanField(default=False)
|
||||||
|
|
||||||
|
|
||||||
# sequencing by person (difficult to solve locally)
|
# sequencing by person (difficult to solve locally)
|
||||||
#persontrip_next = models.ForeignKey('PersonTrip', related_name='pnext', blank=True,null=True)#MJG wants to KILL THIS (and use funstion persontrip_next_auto)
|
#persontrip_next = models.ForeignKey('PersonTrip', related_name='pnext', blank=True,null=True)#MJG wants to KILL THIS (and use funstion persontrip_next_auto)
|
||||||
#persontrip_prev = models.ForeignKey('PersonTrip', related_name='pprev', blank=True,null=True)#MJG wants to KILL THIS(and use funstion persontrip_prev_auto)
|
#persontrip_prev = models.ForeignKey('PersonTrip', related_name='pprev', blank=True,null=True)#MJG wants to KILL THIS(and use funstion persontrip_prev_auto)
|
||||||
|
|
||||||
def persontrip_next(self):
|
def persontrip_next(self):
|
||||||
futurePTs = PersonTrip.objects.filter(personexpedition = self.personexpedition, logbook_entry__date__gt = self.logbook_entry.date).order_by('logbook_entry__date').all()
|
futurePTs = PersonTrip.objects.filter(personexpedition = self.personexpedition, logbook_entry__date__gt = self.logbook_entry.date).order_by('logbook_entry__date').all()
|
||||||
if len(futurePTs) > 0:
|
if len(futurePTs) > 0:
|
||||||
@@ -345,7 +345,7 @@ class PersonTrip(TroggleModel):
|
|||||||
|
|
||||||
def __unicode__(self):
|
def __unicode__(self):
|
||||||
return "%s (%s)" % (self.personexpedition, self.logbook_entry.date)
|
return "%s (%s)" % (self.personexpedition, self.logbook_entry.date)
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
##########################################
|
##########################################
|
||||||
@@ -375,7 +375,7 @@ class CaveAndEntrance(models.Model):
|
|||||||
|
|
||||||
def __unicode__(self):
|
def __unicode__(self):
|
||||||
return unicode(self.cave) + unicode(self.entrance_letter)
|
return unicode(self.cave) + unicode(self.entrance_letter)
|
||||||
|
|
||||||
class CaveSlug(models.Model):
|
class CaveSlug(models.Model):
|
||||||
cave = models.ForeignKey('Cave')
|
cave = models.ForeignKey('Cave')
|
||||||
slug = models.SlugField(max_length=50, unique = True)
|
slug = models.SlugField(max_length=50, unique = True)
|
||||||
@@ -385,7 +385,7 @@ class CaveSlug(models.Model):
|
|||||||
return self.slug
|
return self.slug
|
||||||
|
|
||||||
class Cave(TroggleModel):
|
class Cave(TroggleModel):
|
||||||
# too much here perhaps,
|
# too much here perhaps,
|
||||||
official_name = models.CharField(max_length=160)
|
official_name = models.CharField(max_length=160)
|
||||||
area = models.ManyToManyField(Area, blank=True)
|
area = models.ManyToManyField(Area, blank=True)
|
||||||
kataster_code = models.CharField(max_length=20,blank=True,null=True)
|
kataster_code = models.CharField(max_length=20,blank=True,null=True)
|
||||||
@@ -411,13 +411,13 @@ class Cave(TroggleModel):
|
|||||||
|
|
||||||
#class Meta:
|
#class Meta:
|
||||||
# unique_together = (("area", "kataster_number"), ("area", "unofficial_number"))
|
# unique_together = (("area", "kataster_number"), ("area", "unofficial_number"))
|
||||||
# FIXME Kataster Areas and CUCC defined sub areas need seperating
|
# FIXME Kataster Areas and CUCC defined sub areas need seperating
|
||||||
|
|
||||||
|
|
||||||
#href = models.CharField(max_length=100)
|
#href = models.CharField(max_length=100)
|
||||||
|
|
||||||
class Meta:
|
class Meta:
|
||||||
ordering = ('kataster_code', 'unofficial_number')
|
ordering = ('kataster_code', 'unofficial_number')
|
||||||
|
|
||||||
def hassurvey(self):
|
def hassurvey(self):
|
||||||
if not self.underground_centre_line:
|
if not self.underground_centre_line:
|
||||||
@@ -432,7 +432,7 @@ class Cave(TroggleModel):
|
|||||||
if self.survex_file:
|
if self.survex_file:
|
||||||
return "Yes"
|
return "Yes"
|
||||||
return "Missing"
|
return "Missing"
|
||||||
|
|
||||||
def slug(self):
|
def slug(self):
|
||||||
primarySlugs = self.caveslug_set.filter(primary = True)
|
primarySlugs = self.caveslug_set.filter(primary = True)
|
||||||
if primarySlugs:
|
if primarySlugs:
|
||||||
@@ -450,7 +450,7 @@ class Cave(TroggleModel):
|
|||||||
return "%s-%s" % (self.kat_area(), self.kataster_number)
|
return "%s-%s" % (self.kat_area(), self.kataster_number)
|
||||||
else:
|
else:
|
||||||
return "%s-%s" % (self.kat_area(), self.unofficial_number)
|
return "%s-%s" % (self.kat_area(), self.unofficial_number)
|
||||||
|
|
||||||
def get_absolute_url(self):
|
def get_absolute_url(self):
|
||||||
if self.kataster_number:
|
if self.kataster_number:
|
||||||
href = self.kataster_number
|
href = self.kataster_number
|
||||||
@@ -479,13 +479,13 @@ class Cave(TroggleModel):
|
|||||||
for a in self.area.all():
|
for a in self.area.all():
|
||||||
if a.kat_area():
|
if a.kat_area():
|
||||||
return a.kat_area()
|
return a.kat_area()
|
||||||
|
|
||||||
def entrances(self):
|
def entrances(self):
|
||||||
return CaveAndEntrance.objects.filter(cave=self)
|
return CaveAndEntrance.objects.filter(cave=self)
|
||||||
|
|
||||||
def singleentrance(self):
|
def singleentrance(self):
|
||||||
return len(CaveAndEntrance.objects.filter(cave=self)) == 1
|
return len(CaveAndEntrance.objects.filter(cave=self)) == 1
|
||||||
|
|
||||||
def entrancelist(self):
|
def entrancelist(self):
|
||||||
rs = []
|
rs = []
|
||||||
res = ""
|
res = ""
|
||||||
@@ -513,12 +513,12 @@ class Cave(TroggleModel):
|
|||||||
else:
|
else:
|
||||||
res += "–" + prevR
|
res += "–" + prevR
|
||||||
return res
|
return res
|
||||||
|
|
||||||
def writeDataFile(self):
|
def writeDataFile(self):
|
||||||
try:
|
try:
|
||||||
f = open(os.path.join(settings.CAVEDESCRIPTIONS, self.filename), "w")
|
f = open(os.path.join(settings.CAVEDESCRIPTIONS, self.filename), "w")
|
||||||
except:
|
except:
|
||||||
subprocess.call(settings.FIX_PERMISSIONS)
|
subprocess.call(settings.FIX_PERMISSIONS)
|
||||||
f = open(os.path.join(settings.CAVEDESCRIPTIONS, self.filename), "w")
|
f = open(os.path.join(settings.CAVEDESCRIPTIONS, self.filename), "w")
|
||||||
t = loader.get_template('dataformat/cave.xml')
|
t = loader.get_template('dataformat/cave.xml')
|
||||||
c = Context({'cave': self})
|
c = Context({'cave': self})
|
||||||
@@ -526,7 +526,7 @@ class Cave(TroggleModel):
|
|||||||
u8 = u.encode("utf-8")
|
u8 = u.encode("utf-8")
|
||||||
f.write(u8)
|
f.write(u8)
|
||||||
f.close()
|
f.close()
|
||||||
|
|
||||||
def getArea(self):
|
def getArea(self):
|
||||||
areas = self.area.all()
|
areas = self.area.all()
|
||||||
lowestareas = list(areas)
|
lowestareas = list(areas)
|
||||||
@@ -543,7 +543,7 @@ def getCaveByReference(reference):
|
|||||||
#print(areaname, code)
|
#print(areaname, code)
|
||||||
area = Area.objects.get(short_name = areaname)
|
area = Area.objects.get(short_name = areaname)
|
||||||
#print(area)
|
#print(area)
|
||||||
foundCaves = list(Cave.objects.filter(area = area, kataster_number = code).all()) + list(Cave.objects.filter(area = area, unofficial_number = code).all())
|
foundCaves = list(Cave.objects.filter(area = area, kataster_number = code).all()) + list(Cave.objects.filter(area = area, unofficial_number = code).all())
|
||||||
print(list(foundCaves))
|
print(list(foundCaves))
|
||||||
if len(foundCaves) == 1:
|
if len(foundCaves) == 1:
|
||||||
return foundCaves[0]
|
return foundCaves[0]
|
||||||
@@ -555,7 +555,7 @@ class OtherCaveName(TroggleModel):
|
|||||||
cave = models.ForeignKey(Cave)
|
cave = models.ForeignKey(Cave)
|
||||||
def __unicode__(self):
|
def __unicode__(self):
|
||||||
return unicode(self.name)
|
return unicode(self.name)
|
||||||
|
|
||||||
class EntranceSlug(models.Model):
|
class EntranceSlug(models.Model):
|
||||||
entrance = models.ForeignKey('Entrance')
|
entrance = models.ForeignKey('Entrance')
|
||||||
slug = models.SlugField(max_length=50, unique = True)
|
slug = models.SlugField(max_length=50, unique = True)
|
||||||
@@ -669,28 +669,28 @@ class Entrance(TroggleModel):
|
|||||||
for f in self.FINDABLE_CHOICES:
|
for f in self.FINDABLE_CHOICES:
|
||||||
if f[0] == self.findability:
|
if f[0] == self.findability:
|
||||||
return f[1]
|
return f[1]
|
||||||
|
|
||||||
def tag(self):
|
def tag(self):
|
||||||
return SurvexStation.objects.lookup(self.tag_station)
|
return SurvexStation.objects.lookup(self.tag_station)
|
||||||
|
|
||||||
def needs_surface_work(self):
|
def needs_surface_work(self):
|
||||||
return self.findability != "S" or not self.has_photo or self.marking != "T"
|
return self.findability != "S" or not self.has_photo or self.marking != "T"
|
||||||
|
|
||||||
def get_absolute_url(self):
|
def get_absolute_url(self):
|
||||||
|
|
||||||
ancestor_titles='/'.join([subcave.title for subcave in self.get_ancestors()])
|
ancestor_titles='/'.join([subcave.title for subcave in self.get_ancestors()])
|
||||||
if ancestor_titles:
|
if ancestor_titles:
|
||||||
res = '/'.join((self.get_root().cave.get_absolute_url(), ancestor_titles, self.title))
|
res = '/'.join((self.get_root().cave.get_absolute_url(), ancestor_titles, self.title))
|
||||||
|
|
||||||
else:
|
else:
|
||||||
res = '/'.join((self.get_root().cave.get_absolute_url(), self.title))
|
res = '/'.join((self.get_root().cave.get_absolute_url(), self.title))
|
||||||
|
|
||||||
return res
|
return res
|
||||||
|
|
||||||
def slug(self):
|
def slug(self):
|
||||||
if not self.cached_primary_slug:
|
if not self.cached_primary_slug:
|
||||||
primarySlugs = self.entranceslug_set.filter(primary = True)
|
primarySlugs = self.entranceslug_set.filter(primary = True)
|
||||||
if primarySlugs:
|
if primarySlugs:
|
||||||
self.cached_primary_slug = primarySlugs[0].slug
|
self.cached_primary_slug = primarySlugs[0].slug
|
||||||
self.save()
|
self.save()
|
||||||
else:
|
else:
|
||||||
@@ -704,7 +704,7 @@ class Entrance(TroggleModel):
|
|||||||
try:
|
try:
|
||||||
f = open(os.path.join(settings.ENTRANCEDESCRIPTIONS, self.filename), "w")
|
f = open(os.path.join(settings.ENTRANCEDESCRIPTIONS, self.filename), "w")
|
||||||
except:
|
except:
|
||||||
subprocess.call(settings.FIX_PERMISSIONS)
|
subprocess.call(settings.FIX_PERMISSIONS)
|
||||||
f = open(os.path.join(settings.ENTRANCEDESCRIPTIONS, self.filename), "w")
|
f = open(os.path.join(settings.ENTRANCEDESCRIPTIONS, self.filename), "w")
|
||||||
t = loader.get_template('dataformat/entrance.xml')
|
t = loader.get_template('dataformat/entrance.xml')
|
||||||
c = Context({'entrance': self})
|
c = Context({'entrance': self})
|
||||||
@@ -726,10 +726,10 @@ class CaveDescription(TroggleModel):
|
|||||||
return unicode(self.long_name)
|
return unicode(self.long_name)
|
||||||
else:
|
else:
|
||||||
return unicode(self.short_name)
|
return unicode(self.short_name)
|
||||||
|
|
||||||
def get_absolute_url(self):
|
def get_absolute_url(self):
|
||||||
return urlparse.urljoin(settings.URL_ROOT, reverse('cavedescription', args=(self.short_name,)))
|
return urlparse.urljoin(settings.URL_ROOT, reverse('cavedescription', args=(self.short_name,)))
|
||||||
|
|
||||||
def save(self):
|
def save(self):
|
||||||
"""
|
"""
|
||||||
Overridden save method which stores wikilinks in text as links in database.
|
Overridden save method which stores wikilinks in text as links in database.
|
||||||
@@ -798,7 +798,7 @@ class QM(TroggleModel):
|
|||||||
return u"%s%s%s" % ('[[QM:',self.code(),']]')
|
return u"%s%s%s" % ('[[QM:',self.code(),']]')
|
||||||
|
|
||||||
photoFileStorage = FileSystemStorage(location=settings.PHOTOS_ROOT, base_url=settings.PHOTOS_URL)
|
photoFileStorage = FileSystemStorage(location=settings.PHOTOS_ROOT, base_url=settings.PHOTOS_URL)
|
||||||
class DPhoto(TroggleImageModel):
|
class DPhoto(TroggleImageModel):
|
||||||
caption = models.CharField(max_length=1000,blank=True,null=True)
|
caption = models.CharField(max_length=1000,blank=True,null=True)
|
||||||
contains_logbookentry = models.ForeignKey(LogbookEntry,blank=True,null=True)
|
contains_logbookentry = models.ForeignKey(LogbookEntry,blank=True,null=True)
|
||||||
contains_person = models.ManyToManyField(Person,blank=True)
|
contains_person = models.ManyToManyField(Person,blank=True)
|
||||||
@@ -810,12 +810,12 @@ class DPhoto(TroggleImageModel):
|
|||||||
nearest_QM = models.ForeignKey(QM,blank=True,null=True)
|
nearest_QM = models.ForeignKey(QM,blank=True,null=True)
|
||||||
lon_utm = models.FloatField(blank=True,null=True)
|
lon_utm = models.FloatField(blank=True,null=True)
|
||||||
lat_utm = models.FloatField(blank=True,null=True)
|
lat_utm = models.FloatField(blank=True,null=True)
|
||||||
|
|
||||||
class IKOptions:
|
class IKOptions:
|
||||||
spec_module = 'core.imagekit_specs'
|
spec_module = 'core.imagekit_specs'
|
||||||
cache_dir = 'thumbs'
|
cache_dir = 'thumbs'
|
||||||
image_field = 'file'
|
image_field = 'file'
|
||||||
|
|
||||||
#content_type = models.ForeignKey(ContentType)
|
#content_type = models.ForeignKey(ContentType)
|
||||||
#object_id = models.PositiveIntegerField()
|
#object_id = models.PositiveIntegerField()
|
||||||
#location = generic.GenericForeignKey('content_type', 'object_id')
|
#location = generic.GenericForeignKey('content_type', 'object_id')
|
||||||
@@ -832,7 +832,7 @@ def get_scan_path(instance, filename):
|
|||||||
number=str(instance.survey.wallet_letter) + number #two strings formatting because convention is 2009#01 or 2009#X01
|
number=str(instance.survey.wallet_letter) + number #two strings formatting because convention is 2009#01 or 2009#X01
|
||||||
return os.path.join('./',year,year+r'#'+number,str(instance.contents)+str(instance.number_in_wallet)+r'.jpg')
|
return os.path.join('./',year,year+r'#'+number,str(instance.contents)+str(instance.number_in_wallet)+r'.jpg')
|
||||||
|
|
||||||
class ScannedImage(TroggleImageModel):
|
class ScannedImage(TroggleImageModel):
|
||||||
file = models.ImageField(storage=scansFileStorage, upload_to=get_scan_path)
|
file = models.ImageField(storage=scansFileStorage, upload_to=get_scan_path)
|
||||||
scanned_by = models.ForeignKey(Person,blank=True, null=True)
|
scanned_by = models.ForeignKey(Person,blank=True, null=True)
|
||||||
scanned_on = models.DateField(null=True)
|
scanned_on = models.DateField(null=True)
|
||||||
|
|||||||
@@ -9,7 +9,7 @@ from django.core.urlresolvers import reverse
|
|||||||
###########################################################
|
###########################################################
|
||||||
# These will allow browsing and editing of the survex data
|
# These will allow browsing and editing of the survex data
|
||||||
###########################################################
|
###########################################################
|
||||||
# Needs to add:
|
# Needs to add:
|
||||||
# Equates
|
# Equates
|
||||||
# reloading
|
# reloading
|
||||||
|
|
||||||
@@ -29,21 +29,21 @@ class SurvexFile(models.Model):
|
|||||||
path = models.CharField(max_length=200)
|
path = models.CharField(max_length=200)
|
||||||
survexdirectory = models.ForeignKey("SurvexDirectory", blank=True, null=True)
|
survexdirectory = models.ForeignKey("SurvexDirectory", blank=True, null=True)
|
||||||
cave = models.ForeignKey('Cave', blank=True, null=True)
|
cave = models.ForeignKey('Cave', blank=True, null=True)
|
||||||
|
|
||||||
class Meta:
|
class Meta:
|
||||||
ordering = ('id',)
|
ordering = ('id',)
|
||||||
|
|
||||||
def __unicode__(self):
|
def __unicode__(self):
|
||||||
return self.path + '.svx' or 'no file'
|
return self.path + '.svx' or 'no file'
|
||||||
|
|
||||||
def exists(self):
|
def exists(self):
|
||||||
fname = os.path.join(settings.SURVEX_DATA, self.path + ".svx")
|
fname = os.path.join(settings.SURVEX_DATA, self.path + ".svx")
|
||||||
return os.path.isfile(fname)
|
return os.path.isfile(fname)
|
||||||
|
|
||||||
def OpenFile(self):
|
def OpenFile(self):
|
||||||
fname = os.path.join(settings.SURVEX_DATA, self.path + ".svx")
|
fname = os.path.join(settings.SURVEX_DATA, self.path + ".svx")
|
||||||
return open(fname)
|
return open(fname)
|
||||||
|
|
||||||
def SetDirectory(self):
|
def SetDirectory(self):
|
||||||
dirpath = os.path.split(self.path)[0]
|
dirpath = os.path.split(self.path)[0]
|
||||||
survexdirectorylist = SurvexDirectory.objects.filter(cave=self.cave, path=dirpath)
|
survexdirectorylist = SurvexDirectory.objects.filter(cave=self.cave, path=dirpath)
|
||||||
@@ -67,7 +67,7 @@ class SurvexStationLookUpManager(models.Manager):
|
|||||||
name__iexact = stationname)
|
name__iexact = stationname)
|
||||||
|
|
||||||
class SurvexStation(models.Model):
|
class SurvexStation(models.Model):
|
||||||
name = models.CharField(max_length=100)
|
name = models.CharField(max_length=100)
|
||||||
block = models.ForeignKey('SurvexBlock')
|
block = models.ForeignKey('SurvexBlock')
|
||||||
equate = models.ForeignKey('SurvexEquate', blank=True, null=True)
|
equate = models.ForeignKey('SurvexEquate', blank=True, null=True)
|
||||||
objects = SurvexStationLookUpManager()
|
objects = SurvexStationLookUpManager()
|
||||||
@@ -103,8 +103,8 @@ class SurvexLeg(models.Model):
|
|||||||
|
|
||||||
|
|
||||||
#
|
#
|
||||||
# Single SurvexBlock
|
# Single SurvexBlock
|
||||||
#
|
#
|
||||||
class SurvexBlockLookUpManager(models.Manager):
|
class SurvexBlockLookUpManager(models.Manager):
|
||||||
def lookup(self, name):
|
def lookup(self, name):
|
||||||
if name == "":
|
if name == "":
|
||||||
@@ -122,20 +122,20 @@ class SurvexBlock(models.Model):
|
|||||||
parent = models.ForeignKey('SurvexBlock', blank=True, null=True)
|
parent = models.ForeignKey('SurvexBlock', blank=True, null=True)
|
||||||
text = models.TextField()
|
text = models.TextField()
|
||||||
cave = models.ForeignKey('Cave', blank=True, null=True)
|
cave = models.ForeignKey('Cave', blank=True, null=True)
|
||||||
|
|
||||||
date = models.DateTimeField(blank=True, null=True)
|
date = models.DateTimeField(blank=True, null=True)
|
||||||
expeditionday = models.ForeignKey("ExpeditionDay", null=True)
|
expeditionday = models.ForeignKey("ExpeditionDay", null=True)
|
||||||
expedition = models.ForeignKey('Expedition', blank=True, null=True)
|
expedition = models.ForeignKey('Expedition', blank=True, null=True)
|
||||||
|
|
||||||
survexfile = models.ForeignKey("SurvexFile", blank=True, null=True)
|
survexfile = models.ForeignKey("SurvexFile", blank=True, null=True)
|
||||||
begin_char = models.IntegerField() # code for where in the survex data files this block sits
|
begin_char = models.IntegerField() # code for where in the survex data files this block sits
|
||||||
survexpath = models.CharField(max_length=200) # the path for the survex stations
|
survexpath = models.CharField(max_length=200) # the path for the survex stations
|
||||||
|
|
||||||
survexscansfolder = models.ForeignKey("SurvexScansFolder", null=True)
|
survexscansfolder = models.ForeignKey("SurvexScansFolder", null=True)
|
||||||
#refscandir = models.CharField(max_length=100)
|
#refscandir = models.CharField(max_length=100)
|
||||||
|
|
||||||
totalleglength = models.FloatField()
|
totalleglength = models.FloatField()
|
||||||
|
|
||||||
class Meta:
|
class Meta:
|
||||||
ordering = ('id',)
|
ordering = ('id',)
|
||||||
|
|
||||||
@@ -144,7 +144,7 @@ class SurvexBlock(models.Model):
|
|||||||
|
|
||||||
def __unicode__(self):
|
def __unicode__(self):
|
||||||
return self.name and unicode(self.name) or 'no name'
|
return self.name and unicode(self.name) or 'no name'
|
||||||
|
|
||||||
def GetPersonroles(self):
|
def GetPersonroles(self):
|
||||||
res = [ ]
|
res = [ ]
|
||||||
for personrole in self.personrole_set.order_by('personexpedition'):
|
for personrole in self.personrole_set.order_by('personexpedition'):
|
||||||
@@ -163,10 +163,10 @@ class SurvexBlock(models.Model):
|
|||||||
ss = SurvexStation(name=name, block=self)
|
ss = SurvexStation(name=name, block=self)
|
||||||
ss.save()
|
ss.save()
|
||||||
return ss
|
return ss
|
||||||
|
|
||||||
def DayIndex(self):
|
def DayIndex(self):
|
||||||
return list(self.expeditionday.survexblock_set.all()).index(self)
|
return list(self.expeditionday.survexblock_set.all()).index(self)
|
||||||
|
|
||||||
|
|
||||||
class SurvexTitle(models.Model):
|
class SurvexTitle(models.Model):
|
||||||
survexblock = models.ForeignKey('SurvexBlock')
|
survexblock = models.ForeignKey('SurvexBlock')
|
||||||
@@ -195,41 +195,41 @@ class SurvexPersonRole(models.Model):
|
|||||||
personname = models.CharField(max_length=100)
|
personname = models.CharField(max_length=100)
|
||||||
person = models.ForeignKey('Person', blank=True, null=True)
|
person = models.ForeignKey('Person', blank=True, null=True)
|
||||||
personexpedition = models.ForeignKey('PersonExpedition', blank=True, null=True)
|
personexpedition = models.ForeignKey('PersonExpedition', blank=True, null=True)
|
||||||
persontrip = models.ForeignKey('PersonTrip', blank=True, null=True)
|
persontrip = models.ForeignKey('PersonTrip', blank=True, null=True)
|
||||||
expeditionday = models.ForeignKey("ExpeditionDay", null=True)
|
expeditionday = models.ForeignKey("ExpeditionDay", null=True)
|
||||||
|
|
||||||
def __unicode__(self):
|
def __unicode__(self):
|
||||||
return unicode(self.person) + " - " + unicode(self.survexblock) + " - " + unicode(self.nrole)
|
return unicode(self.person) + " - " + unicode(self.survexblock) + " - " + unicode(self.nrole)
|
||||||
|
|
||||||
|
|
||||||
class SurvexScansFolder(models.Model):
|
class SurvexScansFolder(models.Model):
|
||||||
fpath = models.CharField(max_length=200)
|
fpath = models.CharField(max_length=200)
|
||||||
walletname = models.CharField(max_length=200)
|
walletname = models.CharField(max_length=200)
|
||||||
|
|
||||||
class Meta:
|
class Meta:
|
||||||
ordering = ('walletname',)
|
ordering = ('walletname',)
|
||||||
|
|
||||||
def __unicode__(self):
|
def __unicode__(self):
|
||||||
return self.walletname or 'no wallet'
|
return self.walletname or 'no wallet'
|
||||||
|
|
||||||
def get_absolute_url(self):
|
def get_absolute_url(self):
|
||||||
return urlparse.urljoin(settings.URL_ROOT, reverse('surveyscansfolder', kwargs={"path":re.sub("#", "%23", self.walletname)}))
|
return urlparse.urljoin(settings.URL_ROOT, reverse('surveyscansfolder', kwargs={"path":re.sub("#", "%23", self.walletname)}))
|
||||||
|
|
||||||
class SurvexScanSingle(models.Model):
|
class SurvexScanSingle(models.Model):
|
||||||
ffile = models.CharField(max_length=200)
|
ffile = models.CharField(max_length=200)
|
||||||
name = models.CharField(max_length=200)
|
name = models.CharField(max_length=200)
|
||||||
survexscansfolder = models.ForeignKey("SurvexScansFolder", null=True)
|
survexscansfolder = models.ForeignKey("SurvexScansFolder", null=True)
|
||||||
|
|
||||||
class Meta:
|
class Meta:
|
||||||
ordering = ('name',)
|
ordering = ('name',)
|
||||||
|
|
||||||
def __unicode__(self):
|
def __unicode__(self):
|
||||||
return self.survexscansfolder.walletname + '/' + self.name
|
return self.survexscansfolder.walletname + '/' + self.name
|
||||||
|
|
||||||
def get_absolute_url(self):
|
def get_absolute_url(self):
|
||||||
return urlparse.urljoin(settings.URL_ROOT, reverse('surveyscansingle', kwargs={"path":re.sub("#", "%23", self.survexscansfolder.walletname), "file":self.name}))
|
return urlparse.urljoin(settings.URL_ROOT, reverse('surveyscansingle', kwargs={"path":re.sub("#", "%23", self.survexscansfolder.walletname), "file":self.name}))
|
||||||
|
|
||||||
|
|
||||||
class TunnelFile(models.Model):
|
class TunnelFile(models.Model):
|
||||||
tunnelpath = models.CharField(max_length=200)
|
tunnelpath = models.CharField(max_length=200)
|
||||||
tunnelname = models.CharField(max_length=200)
|
tunnelname = models.CharField(max_length=200)
|
||||||
@@ -241,8 +241,8 @@ class TunnelFile(models.Model):
|
|||||||
filesize = models.IntegerField(default=0)
|
filesize = models.IntegerField(default=0)
|
||||||
npaths = models.IntegerField(default=0)
|
npaths = models.IntegerField(default=0)
|
||||||
survextitles = models.ManyToManyField("SurvexTitle")
|
survextitles = models.ManyToManyField("SurvexTitle")
|
||||||
|
|
||||||
|
|
||||||
class Meta:
|
class Meta:
|
||||||
ordering = ('tunnelpath',)
|
ordering = ('tunnelpath',)
|
||||||
|
|
||||||
|
|||||||
@@ -8,7 +8,7 @@ from troggle.core.models import SurvexScansFolder, SurvexScanSingle, SurvexBlock
|
|||||||
import parsers.surveys
|
import parsers.surveys
|
||||||
import urllib
|
import urllib
|
||||||
|
|
||||||
# inline fileabstraction into here if it's not going to be useful anywhere else
|
# inline fileabstraction into here if it's not going to be useful anywhere else
|
||||||
# keep things simple and ignore exceptions everywhere for now
|
# keep things simple and ignore exceptions everywhere for now
|
||||||
|
|
||||||
|
|
||||||
@@ -33,7 +33,7 @@ def upload(request, path):
|
|||||||
|
|
||||||
def download(request, path):
|
def download(request, path):
|
||||||
#try:
|
#try:
|
||||||
|
|
||||||
return HttpResponse(fileAbstraction.readFile(path), content_type=getMimeType(path.split(".")[-1]))
|
return HttpResponse(fileAbstraction.readFile(path), content_type=getMimeType(path.split(".")[-1]))
|
||||||
#except:
|
#except:
|
||||||
# raise Http404
|
# raise Http404
|
||||||
@@ -49,32 +49,32 @@ extmimetypes = {".txt": "text/plain",
|
|||||||
".jpg": "image/jpeg",
|
".jpg": "image/jpeg",
|
||||||
".jpeg": "image/jpeg",
|
".jpeg": "image/jpeg",
|
||||||
}
|
}
|
||||||
|
|
||||||
# dead
|
# dead
|
||||||
def jgtfile(request, f):
|
def jgtfile(request, f):
|
||||||
fp = os.path.join(settings.SURVEYS, f)
|
fp = os.path.join(settings.SURVEYS, f)
|
||||||
# could also surf through SURVEX_DATA
|
# could also surf through SURVEX_DATA
|
||||||
|
|
||||||
# directory listing
|
# directory listing
|
||||||
if os.path.isdir(fp):
|
if os.path.isdir(fp):
|
||||||
listdirfiles = [ ]
|
listdirfiles = [ ]
|
||||||
listdirdirs = [ ]
|
listdirdirs = [ ]
|
||||||
|
|
||||||
for lf in sorted(os.listdir(fp)):
|
for lf in sorted(os.listdir(fp)):
|
||||||
hpath = os.path.join(f, lf) # not absolute path
|
hpath = os.path.join(f, lf) # not absolute path
|
||||||
if lf[0] == "." or lf[-1] == "~":
|
if lf[0] == "." or lf[-1] == "~":
|
||||||
continue
|
continue
|
||||||
|
|
||||||
hpath = hpath.replace("\\", "/") # for windows users
|
hpath = hpath.replace("\\", "/") # for windows users
|
||||||
href = hpath.replace("#", "%23") # '#' in file name annoyance
|
href = hpath.replace("#", "%23") # '#' in file name annoyance
|
||||||
|
|
||||||
flf = os.path.join(fp, lf)
|
flf = os.path.join(fp, lf)
|
||||||
if os.path.isdir(flf):
|
if os.path.isdir(flf):
|
||||||
nfiles = len([sf for sf in os.listdir(flf) if sf[0] != "."])
|
nfiles = len([sf for sf in os.listdir(flf) if sf[0] != "."])
|
||||||
listdirdirs.append((href, hpath + "/", nfiles))
|
listdirdirs.append((href, hpath + "/", nfiles))
|
||||||
else:
|
else:
|
||||||
listdirfiles.append((href, hpath, os.path.getsize(flf)))
|
listdirfiles.append((href, hpath, os.path.getsize(flf)))
|
||||||
|
|
||||||
upperdirs = [ ]
|
upperdirs = [ ]
|
||||||
lf = f
|
lf = f
|
||||||
while lf:
|
while lf:
|
||||||
@@ -85,9 +85,9 @@ def jgtfile(request, f):
|
|||||||
lf = os.path.split(lf)[0]
|
lf = os.path.split(lf)[0]
|
||||||
upperdirs.append((href, hpath))
|
upperdirs.append((href, hpath))
|
||||||
upperdirs.append(("", "/"))
|
upperdirs.append(("", "/"))
|
||||||
|
|
||||||
return render(request, 'listdir.html', {'file':f, 'listdirfiles':listdirfiles, 'listdirdirs':listdirdirs, 'upperdirs':upperdirs, 'settings': settings})
|
return render(request, 'listdir.html', {'file':f, 'listdirfiles':listdirfiles, 'listdirdirs':listdirdirs, 'upperdirs':upperdirs, 'settings': settings})
|
||||||
|
|
||||||
# flat output of file when loaded
|
# flat output of file when loaded
|
||||||
if os.path.isfile(fp):
|
if os.path.isfile(fp):
|
||||||
ext = os.path.splitext(fp)[1].lower()
|
ext = os.path.splitext(fp)[1].lower()
|
||||||
@@ -123,16 +123,16 @@ def SaveImageInDir(name, imgdir, project, fdata, bbinary):
|
|||||||
print "*** Making directory", fprojdir
|
print "*** Making directory", fprojdir
|
||||||
os.path.mkdir(fprojdir)
|
os.path.mkdir(fprojdir)
|
||||||
print "hhh"
|
print "hhh"
|
||||||
|
|
||||||
fname = os.path.join(fprojdir, name)
|
fname = os.path.join(fprojdir, name)
|
||||||
print fname, "fff"
|
print fname, "fff"
|
||||||
fname = UniqueFile(fname)
|
fname = UniqueFile(fname)
|
||||||
|
|
||||||
p2, p1 = os.path.split(fname)
|
p2, p1 = os.path.split(fname)
|
||||||
p3, p2 = os.path.split(p2)
|
p3, p2 = os.path.split(p2)
|
||||||
p4, p3 = os.path.split(p3)
|
p4, p3 = os.path.split(p3)
|
||||||
res = os.path.join(p3, p2, p1)
|
res = os.path.join(p3, p2, p1)
|
||||||
|
|
||||||
print "saving file", fname
|
print "saving file", fname
|
||||||
fout = open(fname, (bbinary and "wb" or "w"))
|
fout = open(fname, (bbinary and "wb" or "w"))
|
||||||
fout.write(fdata.read())
|
fout.write(fdata.read())
|
||||||
@@ -175,61 +175,61 @@ def surveyscansingle(request, path, file):
|
|||||||
survexscansingle = SurvexScanSingle.objects.get(survexscansfolder=survexscansfolder, name=file)
|
survexscansingle = SurvexScanSingle.objects.get(survexscansfolder=survexscansfolder, name=file)
|
||||||
return HttpResponse(content=open(survexscansingle.ffile), content_type=getMimeType(path.split(".")[-1]))
|
return HttpResponse(content=open(survexscansingle.ffile), content_type=getMimeType(path.split(".")[-1]))
|
||||||
#return render(request, 'survexscansfolder.html', { 'survexscansfolder':survexscansfolder, 'settings': settings })
|
#return render(request, 'survexscansfolder.html', { 'survexscansfolder':survexscansfolder, 'settings': settings })
|
||||||
|
|
||||||
def surveyscansfolders(request):
|
def surveyscansfolders(request):
|
||||||
survexscansfolders = SurvexScansFolder.objects.all()
|
survexscansfolders = SurvexScansFolder.objects.all()
|
||||||
return render(request, 'survexscansfolders.html', { 'survexscansfolders':survexscansfolders, 'settings': settings })
|
return render(request, 'survexscansfolders.html', { 'survexscansfolders':survexscansfolders, 'settings': settings })
|
||||||
|
|
||||||
|
|
||||||
def tunneldata(request):
|
def tunneldata(request):
|
||||||
tunnelfiles = TunnelFile.objects.all()
|
tunnelfiles = TunnelFile.objects.all()
|
||||||
return render(request, 'tunnelfiles.html', { 'tunnelfiles':tunnelfiles, 'settings': settings })
|
return render(request, 'tunnelfiles.html', { 'tunnelfiles':tunnelfiles, 'settings': settings })
|
||||||
|
|
||||||
|
|
||||||
def tunnelfile(request, path):
|
def tunnelfile(request, path):
|
||||||
tunnelfile = TunnelFile.objects.get(tunnelpath=urllib.unquote(path))
|
tunnelfile = TunnelFile.objects.get(tunnelpath=urllib.unquote(path))
|
||||||
tfile = os.path.join(settings.TUNNEL_DATA, tunnelfile.tunnelpath)
|
tfile = os.path.join(settings.TUNNEL_DATA, tunnelfile.tunnelpath)
|
||||||
return HttpResponse(content=open(tfile), content_type="text/plain")
|
return HttpResponse(content=open(tfile), content_type="text/plain")
|
||||||
|
|
||||||
def tunnelfileupload(request, path):
|
def tunnelfileupload(request, path):
|
||||||
tunnelfile = TunnelFile.objects.get(tunnelpath=urllib.unquote(path))
|
tunnelfile = TunnelFile.objects.get(tunnelpath=urllib.unquote(path))
|
||||||
tfile = os.path.join(settings.TUNNEL_DATA, tunnelfile.tunnelpath)
|
tfile = os.path.join(settings.TUNNEL_DATA, tunnelfile.tunnelpath)
|
||||||
|
|
||||||
project, user, password, tunnelversion = request.POST["tunnelproject"], request.POST["tunneluser"], request.POST["tunnelpassword"], request.POST["tunnelversion"]
|
project, user, password, tunnelversion = request.POST["tunnelproject"], request.POST["tunneluser"], request.POST["tunnelpassword"], request.POST["tunnelversion"]
|
||||||
print (project, user, tunnelversion)
|
print (project, user, tunnelversion)
|
||||||
|
|
||||||
|
|
||||||
assert len(request.FILES.values()) == 1, "only one file to upload"
|
assert len(request.FILES.values()) == 1, "only one file to upload"
|
||||||
|
|
||||||
uploadedfile = request.FILES.values()[0]
|
uploadedfile = request.FILES.values()[0]
|
||||||
|
|
||||||
if uploadedfile.field_name != "sketch":
|
if uploadedfile.field_name != "sketch":
|
||||||
return HttpResponse(content="Error: non-sketch file uploaded", content_type="text/plain")
|
return HttpResponse(content="Error: non-sketch file uploaded", content_type="text/plain")
|
||||||
if uploadedfile.content_type != "text/plain":
|
if uploadedfile.content_type != "text/plain":
|
||||||
return HttpResponse(content="Error: non-plain content type", content_type="text/plain")
|
return HttpResponse(content="Error: non-plain content type", content_type="text/plain")
|
||||||
|
|
||||||
# could use this to add new files
|
# could use this to add new files
|
||||||
if os.path.split(path)[1] != uploadedfile.name:
|
if os.path.split(path)[1] != uploadedfile.name:
|
||||||
return HttpResponse(content="Error: name disagrees", content_type="text/plain")
|
return HttpResponse(content="Error: name disagrees", content_type="text/plain")
|
||||||
|
|
||||||
orgsize = tunnelfile.filesize # = os.stat(tfile)[stat.ST_SIZE]
|
orgsize = tunnelfile.filesize # = os.stat(tfile)[stat.ST_SIZE]
|
||||||
|
|
||||||
ttext = uploadedfile.read()
|
ttext = uploadedfile.read()
|
||||||
|
|
||||||
# could check that the user and projects agree here
|
# could check that the user and projects agree here
|
||||||
|
|
||||||
fout = open(tfile, "w")
|
fout = open(tfile, "w")
|
||||||
fout.write(ttext)
|
fout.write(ttext)
|
||||||
fout.close()
|
fout.close()
|
||||||
|
|
||||||
# redo its settings of
|
# redo its settings of
|
||||||
parsers.surveys.SetTunnelfileInfo(tunnelfile)
|
parsers.surveys.SetTunnelfileInfo(tunnelfile)
|
||||||
tunnelfile.save()
|
tunnelfile.save()
|
||||||
|
|
||||||
uploadedfile.close()
|
uploadedfile.close()
|
||||||
message = "File size %d overwritten with size %d" % (orgsize, tunnelfile.filesize)
|
message = "File size %d overwritten with size %d" % (orgsize, tunnelfile.filesize)
|
||||||
return HttpResponse(content=message, content_type="text/plain")
|
return HttpResponse(content=message, content_type="text/plain")
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@@ -383,7 +383,7 @@ def mungecoord(x, y, mapcode, img):
|
|||||||
# image is 1417 by 2201
|
# image is 1417 by 2201
|
||||||
# FACTOR1 = 1000.0 / (36670.0-34542.0)
|
# FACTOR1 = 1000.0 / (36670.0-34542.0)
|
||||||
# FACTOR2 = (1201.0-562.0) / (83317 - 81967)
|
# FACTOR2 = (1201.0-562.0) / (83317 - 81967)
|
||||||
# FACTOR = (FACTOR1 + FACTOR2)/2
|
# FACTOR = (FACTOR1 + FACTOR2)/2
|
||||||
# The factors aren't the same as the scanned map's at a slight angle. I
|
# The factors aren't the same as the scanned map's at a slight angle. I
|
||||||
# can't be bothered to fix this. Since we zero on the Hinter it makes
|
# can't be bothered to fix this. Since we zero on the Hinter it makes
|
||||||
# very little difference for caves in the areas round 76 or 204.
|
# very little difference for caves in the areas round 76 or 204.
|
||||||
@@ -465,7 +465,7 @@ def prospecting_image(request, name):
|
|||||||
plot("laser.0_7", "BNase", "Reference", "Bräuning Nase laser point", name, draw, img)
|
plot("laser.0_7", "BNase", "Reference", "Bräuning Nase laser point", name, draw, img)
|
||||||
plot("226-96", "BZkn", "Reference", "Bräuning Zinken trig point", name, draw, img)
|
plot("226-96", "BZkn", "Reference", "Bräuning Zinken trig point", name, draw, img)
|
||||||
plot("vd1","VD1","Reference", "VD1 survey point", name, draw, img)
|
plot("vd1","VD1","Reference", "VD1 survey point", name, draw, img)
|
||||||
plot("laser.kt114_96","HSK","Reference", "Hinterer Schwarzmooskogel trig point", name, draw, img)
|
plot("laser.kt114_96","HSK","Reference", "Hinterer Schwarzmooskogel trig point", name, draw, img)
|
||||||
plot("2000","Nipple","Reference", "Nipple (Weiße Warze)", name, draw, img)
|
plot("2000","Nipple","Reference", "Nipple (Weiße Warze)", name, draw, img)
|
||||||
plot("3000","VSK","Reference", "Vorderer Schwarzmooskogel summit", name, draw, img)
|
plot("3000","VSK","Reference", "Vorderer Schwarzmooskogel summit", name, draw, img)
|
||||||
plot("topcamp", "TC", "Reference", "Top Camp", name, draw, img)
|
plot("topcamp", "TC", "Reference", "Top Camp", name, draw, img)
|
||||||
|
|||||||
@@ -37,7 +37,7 @@ def getNotablePersons():
|
|||||||
for person in Person.objects.all():
|
for person in Person.objects.all():
|
||||||
if person.bisnotable():
|
if person.bisnotable():
|
||||||
notablepersons.append(person)
|
notablepersons.append(person)
|
||||||
return notablepersons
|
return notablepersons
|
||||||
|
|
||||||
|
|
||||||
def personindex(request):
|
def personindex(request):
|
||||||
@@ -48,7 +48,7 @@ def personindex(request):
|
|||||||
nc = (len(persons) + ncols - 1) / ncols
|
nc = (len(persons) + ncols - 1) / ncols
|
||||||
for i in range(ncols):
|
for i in range(ncols):
|
||||||
personss.append(persons[i * nc: (i + 1) * nc])
|
personss.append(persons[i * nc: (i + 1) * nc])
|
||||||
|
|
||||||
notablepersons = []
|
notablepersons = []
|
||||||
for person in Person.objects.all():
|
for person in Person.objects.all():
|
||||||
if person.bisnotable():
|
if person.bisnotable():
|
||||||
@@ -67,13 +67,13 @@ def expedition(request, expeditionname):
|
|||||||
for personexpedition in this_expedition.personexpedition_set.all():
|
for personexpedition in this_expedition.personexpedition_set.all():
|
||||||
prow = [ ]
|
prow = [ ]
|
||||||
for date in dates:
|
for date in dates:
|
||||||
pcell = { "persontrips": PersonTrip.objects.filter(personexpedition=personexpedition,
|
pcell = { "persontrips": PersonTrip.objects.filter(personexpedition=personexpedition,
|
||||||
logbook_entry__date=date) }
|
logbook_entry__date=date) }
|
||||||
pcell["survexblocks"] = set(SurvexBlock.objects.filter(survexpersonrole__personexpedition=personexpedition,
|
pcell["survexblocks"] = set(SurvexBlock.objects.filter(survexpersonrole__personexpedition=personexpedition,
|
||||||
date=date))
|
date=date))
|
||||||
prow.append(pcell)
|
prow.append(pcell)
|
||||||
personexpeditiondays.append({"personexpedition":personexpedition, "personrow":prow})
|
personexpeditiondays.append({"personexpedition":personexpedition, "personrow":prow})
|
||||||
|
|
||||||
if "reload" in request.GET:
|
if "reload" in request.GET:
|
||||||
LoadLogbookForExpedition(this_expedition)
|
LoadLogbookForExpedition(this_expedition)
|
||||||
return render(request,'expedition.html', {'this_expedition': this_expedition,
|
return render(request,'expedition.html', {'this_expedition': this_expedition,
|
||||||
@@ -97,14 +97,14 @@ class ExpeditionListView(ListView):
|
|||||||
|
|
||||||
def person(request, first_name='', last_name='', ):
|
def person(request, first_name='', last_name='', ):
|
||||||
this_person = Person.objects.get(first_name = first_name, last_name = last_name)
|
this_person = Person.objects.get(first_name = first_name, last_name = last_name)
|
||||||
|
|
||||||
# This is for removing the reference to the user's profile, in case they set it to the wrong person
|
# This is for removing the reference to the user's profile, in case they set it to the wrong person
|
||||||
if request.method == 'GET':
|
if request.method == 'GET':
|
||||||
if request.GET.get('clear_profile')=='True':
|
if request.GET.get('clear_profile')=='True':
|
||||||
this_person.user=None
|
this_person.user=None
|
||||||
this_person.save()
|
this_person.save()
|
||||||
return HttpResponseRedirect(reverse('profiles_select_profile'))
|
return HttpResponseRedirect(reverse('profiles_select_profile'))
|
||||||
|
|
||||||
return render(request,'person.html', {'person': this_person, })
|
return render(request,'person.html', {'person': this_person, })
|
||||||
|
|
||||||
|
|
||||||
@@ -117,19 +117,19 @@ def GetPersonChronology(personexpedition):
|
|||||||
for personrole in personexpedition.survexpersonrole_set.all():
|
for personrole in personexpedition.survexpersonrole_set.all():
|
||||||
a = res.setdefault(personrole.survexblock.date, { })
|
a = res.setdefault(personrole.survexblock.date, { })
|
||||||
a.setdefault("personroles", [ ]).append(personrole.survexblock)
|
a.setdefault("personroles", [ ]).append(personrole.survexblock)
|
||||||
|
|
||||||
# build up the tables
|
# build up the tables
|
||||||
rdates = res.keys()
|
rdates = res.keys()
|
||||||
rdates.sort()
|
rdates.sort()
|
||||||
|
|
||||||
|
|
||||||
res2 = [ ]
|
res2 = [ ]
|
||||||
for rdate in rdates:
|
for rdate in rdates:
|
||||||
persontrips = res[rdate].get("persontrips", [])
|
persontrips = res[rdate].get("persontrips", [])
|
||||||
personroles = res[rdate].get("personroles", [])
|
personroles = res[rdate].get("personroles", [])
|
||||||
for n in range(max(len(persontrips), len(personroles))):
|
for n in range(max(len(persontrips), len(personroles))):
|
||||||
res2.append(((n == 0 and rdate or "--"), (n < len(persontrips) and persontrips[n]), (n < len(personroles) and personroles[n])))
|
res2.append(((n == 0 and rdate or "--"), (n < len(persontrips) and persontrips[n]), (n < len(personroles) and personroles[n])))
|
||||||
|
|
||||||
return res2
|
return res2
|
||||||
|
|
||||||
|
|
||||||
@@ -180,7 +180,7 @@ def experimental(request):
|
|||||||
survexleglength += survexblock.totalleglength
|
survexleglength += survexblock.totalleglength
|
||||||
legsbyexpo.append((expedition, {"nsurvexlegs":len(survexlegs), "survexleglength":survexleglength}))
|
legsbyexpo.append((expedition, {"nsurvexlegs":len(survexlegs), "survexleglength":survexleglength}))
|
||||||
legsbyexpo.reverse()
|
legsbyexpo.reverse()
|
||||||
|
|
||||||
survexlegs = models.SurvexLeg.objects.all()
|
survexlegs = models.SurvexLeg.objects.all()
|
||||||
totalsurvexlength = sum([survexleg.tape for survexleg in survexlegs])
|
totalsurvexlength = sum([survexleg.tape for survexleg in survexlegs])
|
||||||
return render(request, 'experimental.html', { "nsurvexlegs":len(survexlegs), "totalsurvexlength":totalsurvexlength, "legsbyexpo":legsbyexpo })
|
return render(request, 'experimental.html', { "nsurvexlegs":len(survexlegs), "totalsurvexlength":totalsurvexlength, "legsbyexpo":legsbyexpo })
|
||||||
@@ -198,11 +198,11 @@ def newLogbookEntry(request, expeditionyear, pdate = None, pslug = None):
|
|||||||
personTripFormSet = PersonTripFormSet(request.POST)
|
personTripFormSet = PersonTripFormSet(request.POST)
|
||||||
if tripForm.is_valid() and personTripFormSet.is_valid(): # All validation rules pass
|
if tripForm.is_valid() and personTripFormSet.is_valid(): # All validation rules pass
|
||||||
dateStr = tripForm.cleaned_data["date"].strftime("%Y-%m-%d")
|
dateStr = tripForm.cleaned_data["date"].strftime("%Y-%m-%d")
|
||||||
directory = os.path.join(settings.EXPOWEB,
|
directory = os.path.join(settings.EXPOWEB,
|
||||||
"years",
|
"years",
|
||||||
expedition.year,
|
expedition.year,
|
||||||
"autologbook")
|
"autologbook")
|
||||||
filename = os.path.join(directory,
|
filename = os.path.join(directory,
|
||||||
dateStr + "." + slugify(tripForm.cleaned_data["title"])[:50] + ".html")
|
dateStr + "." + slugify(tripForm.cleaned_data["title"])[:50] + ".html")
|
||||||
if not os.path.isdir(directory):
|
if not os.path.isdir(directory):
|
||||||
os.mkdir(directory)
|
os.mkdir(directory)
|
||||||
@@ -210,7 +210,7 @@ def newLogbookEntry(request, expeditionyear, pdate = None, pslug = None):
|
|||||||
delLogbookEntry(previouslbe)
|
delLogbookEntry(previouslbe)
|
||||||
f = open(filename, "w")
|
f = open(filename, "w")
|
||||||
template = loader.get_template('dataformat/logbookentry.html')
|
template = loader.get_template('dataformat/logbookentry.html')
|
||||||
context = Context({'trip': tripForm.cleaned_data,
|
context = Context({'trip': tripForm.cleaned_data,
|
||||||
'persons': personTripFormSet.cleaned_data,
|
'persons': personTripFormSet.cleaned_data,
|
||||||
'date': dateStr,
|
'date': dateStr,
|
||||||
'expeditionyear': expeditionyear})
|
'expeditionyear': expeditionyear})
|
||||||
@@ -234,11 +234,11 @@ def newLogbookEntry(request, expeditionyear, pdate = None, pslug = None):
|
|||||||
"location": previouslbe.place,
|
"location": previouslbe.place,
|
||||||
"caveOrLocation": "location",
|
"caveOrLocation": "location",
|
||||||
"html": previouslbe.text})
|
"html": previouslbe.text})
|
||||||
personTripFormSet = PersonTripFormSet(initial=[{"name": get_name(py.personexpedition),
|
personTripFormSet = PersonTripFormSet(initial=[{"name": get_name(py.personexpedition),
|
||||||
"TU": py.time_underground,
|
"TU": py.time_underground,
|
||||||
"author": py.is_logbook_entry_author}
|
"author": py.is_logbook_entry_author}
|
||||||
for py in previouslbe.persontrip_set.all()])
|
for py in previouslbe.persontrip_set.all()])
|
||||||
else:
|
else:
|
||||||
tripForm = TripForm() # An unbound form
|
tripForm = TripForm() # An unbound form
|
||||||
personTripFormSet = PersonTripFormSet()
|
personTripFormSet = PersonTripFormSet()
|
||||||
|
|
||||||
|
|||||||
@@ -35,7 +35,7 @@ def frontpage(request):
|
|||||||
return render(request,'frontpage.html', locals())
|
return render(request,'frontpage.html', locals())
|
||||||
|
|
||||||
def todo(request):
|
def todo(request):
|
||||||
message = "no test message" #reverse('personn', kwargs={"name":"hkjhjh"})
|
message = "no test message" #reverse('personn', kwargs={"name":"hkjhjh"})
|
||||||
if "reloadexpos" in request.GET:
|
if "reloadexpos" in request.GET:
|
||||||
message = LoadPersonsExpos()
|
message = LoadPersonsExpos()
|
||||||
message = "Reloaded personexpos"
|
message = "Reloaded personexpos"
|
||||||
@@ -52,7 +52,7 @@ def controlPanel(request):
|
|||||||
jobs_completed=[]
|
jobs_completed=[]
|
||||||
if request.method=='POST':
|
if request.method=='POST':
|
||||||
if request.user.is_superuser:
|
if request.user.is_superuser:
|
||||||
|
|
||||||
#importlist is mostly here so that things happen in the correct order.
|
#importlist is mostly here so that things happen in the correct order.
|
||||||
#http post data seems to come in an unpredictable order, so we do it this way.
|
#http post data seems to come in an unpredictable order, so we do it this way.
|
||||||
importlist=['reload_db', 'import_people', 'import_cavetab', 'import_logbooks', 'import_surveys', 'import_QMs']
|
importlist=['reload_db', 'import_people', 'import_cavetab', 'import_logbooks', 'import_surveys', 'import_QMs']
|
||||||
@@ -85,7 +85,7 @@ def downloadSurveys(request):
|
|||||||
return response
|
return response
|
||||||
|
|
||||||
def downloadLogbook(request,year=None,extension=None,queryset=None):
|
def downloadLogbook(request,year=None,extension=None,queryset=None):
|
||||||
|
|
||||||
if year:
|
if year:
|
||||||
current_expedition=Expedition.objects.get(year=year)
|
current_expedition=Expedition.objects.get(year=year)
|
||||||
logbook_entries=LogbookEntry.objects.filter(expedition=current_expedition)
|
logbook_entries=LogbookEntry.objects.filter(expedition=current_expedition)
|
||||||
@@ -96,7 +96,7 @@ def downloadLogbook(request,year=None,extension=None,queryset=None):
|
|||||||
else:
|
else:
|
||||||
response = HttpResponse(content_type='text/plain')
|
response = HttpResponse(content_type='text/plain')
|
||||||
return response(r"Error: Logbook downloader doesn't know what year you want")
|
return response(r"Error: Logbook downloader doesn't know what year you want")
|
||||||
|
|
||||||
if 'year' in request.GET:
|
if 'year' in request.GET:
|
||||||
year=request.GET['year']
|
year=request.GET['year']
|
||||||
if 'extension' in request.GET:
|
if 'extension' in request.GET:
|
||||||
@@ -108,14 +108,14 @@ def downloadLogbook(request,year=None,extension=None,queryset=None):
|
|||||||
elif extension == 'html':
|
elif extension == 'html':
|
||||||
response = HttpResponse(content_type='text/html')
|
response = HttpResponse(content_type='text/html')
|
||||||
style='2005'
|
style='2005'
|
||||||
|
|
||||||
template='logbook'+style+'style.'+extension
|
template='logbook'+style+'style.'+extension
|
||||||
response['Content-Disposition'] = 'attachment; filename='+filename+'.'+extension
|
response['Content-Disposition'] = 'attachment; filename='+filename+'.'+extension
|
||||||
t=loader.get_template(template)
|
t=loader.get_template(template)
|
||||||
c=Context({'logbook_entries':logbook_entries})
|
c=Context({'logbook_entries':logbook_entries})
|
||||||
response.write(t.render(c))
|
response.write(t.render(c))
|
||||||
return response
|
return response
|
||||||
|
|
||||||
|
|
||||||
def downloadQMs(request):
|
def downloadQMs(request):
|
||||||
# Note to self: use get_cave method for the below
|
# Note to self: use get_cave method for the below
|
||||||
@@ -131,14 +131,14 @@ def downloadQMs(request):
|
|||||||
response['Content-Disposition'] = 'attachment; filename=qm.csv'
|
response['Content-Disposition'] = 'attachment; filename=qm.csv'
|
||||||
toqms.writeQmTable(response,cave)
|
toqms.writeQmTable(response,cave)
|
||||||
return response
|
return response
|
||||||
|
|
||||||
def ajax_test(request):
|
def ajax_test(request):
|
||||||
post_text = request.POST['post_data']
|
post_text = request.POST['post_data']
|
||||||
return HttpResponse("{'response_text': '"+post_text+" recieved.'}",
|
return HttpResponse("{'response_text': '"+post_text+" recieved.'}",
|
||||||
content_type="application/json")
|
content_type="application/json")
|
||||||
|
|
||||||
def eyecandy(request):
|
def eyecandy(request):
|
||||||
return
|
return
|
||||||
|
|
||||||
def ajax_QM_number(request):
|
def ajax_QM_number(request):
|
||||||
if request.method=='POST':
|
if request.method=='POST':
|
||||||
@@ -158,14 +158,14 @@ def logbook_entry_suggestions(request):
|
|||||||
unwiki_QM_pattern=r"(?P<whole>(?P<explorer_code>[ABC]?)(?P<cave>\d*)-?(?P<year>\d\d\d?\d?)-(?P<number>\d\d)(?P<grade>[ABCDXV]?))"
|
unwiki_QM_pattern=r"(?P<whole>(?P<explorer_code>[ABC]?)(?P<cave>\d*)-?(?P<year>\d\d\d?\d?)-(?P<number>\d\d)(?P<grade>[ABCDXV]?))"
|
||||||
unwiki_QM_pattern=re.compile(unwiki_QM_pattern)
|
unwiki_QM_pattern=re.compile(unwiki_QM_pattern)
|
||||||
#wikilink_QM_pattern=settings.QM_PATTERN
|
#wikilink_QM_pattern=settings.QM_PATTERN
|
||||||
|
|
||||||
slug=request.POST['slug']
|
slug=request.POST['slug']
|
||||||
date=request.POST['date']
|
date=request.POST['date']
|
||||||
lbo=LogbookEntry.objects.get(slug=slug, date=date)
|
lbo=LogbookEntry.objects.get(slug=slug, date=date)
|
||||||
|
|
||||||
#unwiki_QMs=re.findall(unwiki_QM_pattern,lbo.text)
|
#unwiki_QMs=re.findall(unwiki_QM_pattern,lbo.text)
|
||||||
unwiki_QMs=[m.groupdict() for m in unwiki_QM_pattern.finditer(lbo.text)]
|
unwiki_QMs=[m.groupdict() for m in unwiki_QM_pattern.finditer(lbo.text)]
|
||||||
|
|
||||||
print(unwiki_QMs)
|
print(unwiki_QMs)
|
||||||
for qm in unwiki_QMs:
|
for qm in unwiki_QMs:
|
||||||
#try:
|
#try:
|
||||||
@@ -180,7 +180,7 @@ def logbook_entry_suggestions(request):
|
|||||||
lbo=LogbookEntry.objects.get(date__year=qm['year'],title__icontains="placeholder for QMs in")
|
lbo=LogbookEntry.objects.get(date__year=qm['year'],title__icontains="placeholder for QMs in")
|
||||||
except:
|
except:
|
||||||
print("failed to get placeholder for year "+str(qm['year']))
|
print("failed to get placeholder for year "+str(qm['year']))
|
||||||
|
|
||||||
temp_QM=QM(found_by=lbo,number=qm['number'],grade=qm['grade'])
|
temp_QM=QM(found_by=lbo,number=qm['number'],grade=qm['grade'])
|
||||||
temp_QM.grade=qm['grade']
|
temp_QM.grade=qm['grade']
|
||||||
qm['wikilink']=temp_QM.wiki_link()
|
qm['wikilink']=temp_QM.wiki_link()
|
||||||
@@ -188,16 +188,16 @@ def logbook_entry_suggestions(request):
|
|||||||
#print 'failed'
|
#print 'failed'
|
||||||
|
|
||||||
print(unwiki_QMs)
|
print(unwiki_QMs)
|
||||||
|
|
||||||
|
|
||||||
#wikilink_QMs=re.findall(wikilink_QM_pattern,lbo.text)
|
#wikilink_QMs=re.findall(wikilink_QM_pattern,lbo.text)
|
||||||
attached_QMs=lbo.QMs_found.all()
|
attached_QMs=lbo.QMs_found.all()
|
||||||
unmentioned_attached_QMs=''#not implemented, fill this in by subtracting wiklink_QMs from attached_QMs
|
unmentioned_attached_QMs=''#not implemented, fill this in by subtracting wiklink_QMs from attached_QMs
|
||||||
|
|
||||||
#Find unattached_QMs. We only look at the QMs with a proper wiki link.
|
#Find unattached_QMs. We only look at the QMs with a proper wiki link.
|
||||||
#for qm in wikilink_QMs:
|
#for qm in wikilink_QMs:
|
||||||
#Try to look up the QM.
|
#Try to look up the QM.
|
||||||
|
|
||||||
print('got 208')
|
print('got 208')
|
||||||
any_suggestions=True
|
any_suggestions=True
|
||||||
print('got 210')
|
print('got 210')
|
||||||
@@ -217,11 +217,11 @@ def newFile(request, pslug = None):
|
|||||||
# personTripFormSet = PersonTripFormSet(request.POST)
|
# personTripFormSet = PersonTripFormSet(request.POST)
|
||||||
# if tripForm.is_valid() and personTripFormSet.is_valid(): # All validation rules pass
|
# if tripForm.is_valid() and personTripFormSet.is_valid(): # All validation rules pass
|
||||||
# dateStr = tripForm.cleaned_data["date"].strftime("%Y-%m-%d")
|
# dateStr = tripForm.cleaned_data["date"].strftime("%Y-%m-%d")
|
||||||
# directory = os.path.join(settings.EXPOWEB,
|
# directory = os.path.join(settings.EXPOWEB,
|
||||||
# "years",
|
# "years",
|
||||||
# expedition.year,
|
# expedition.year,
|
||||||
# "autologbook")
|
# "autologbook")
|
||||||
# filename = os.path.join(directory,
|
# filename = os.path.join(directory,
|
||||||
# dateStr + "." + slugify(tripForm.cleaned_data["title"])[:50] + ".html")
|
# dateStr + "." + slugify(tripForm.cleaned_data["title"])[:50] + ".html")
|
||||||
# if not os.path.isdir(directory):
|
# if not os.path.isdir(directory):
|
||||||
# os.mkdir(directory)
|
# os.mkdir(directory)
|
||||||
@@ -229,7 +229,7 @@ def newFile(request, pslug = None):
|
|||||||
# delLogbookEntry(previouslbe)
|
# delLogbookEntry(previouslbe)
|
||||||
# f = open(filename, "w")
|
# f = open(filename, "w")
|
||||||
# template = loader.get_template('dataformat/logbookentry.html')
|
# template = loader.get_template('dataformat/logbookentry.html')
|
||||||
# context = Context({'trip': tripForm.cleaned_data,
|
# context = Context({'trip': tripForm.cleaned_data,
|
||||||
# 'persons': personTripFormSet.cleaned_data,
|
# 'persons': personTripFormSet.cleaned_data,
|
||||||
# 'date': dateStr,
|
# 'date': dateStr,
|
||||||
# 'expeditionyear': expeditionyear})
|
# 'expeditionyear': expeditionyear})
|
||||||
@@ -254,11 +254,11 @@ def newFile(request, pslug = None):
|
|||||||
# "location": previouslbe.place,
|
# "location": previouslbe.place,
|
||||||
# "caveOrLocation": "location",
|
# "caveOrLocation": "location",
|
||||||
# "html": previouslbe.text})
|
# "html": previouslbe.text})
|
||||||
# personTripFormSet = PersonTripFormSet(initial=[{"name": get_name(py.personexpedition),
|
# personTripFormSet = PersonTripFormSet(initial=[{"name": get_name(py.personexpedition),
|
||||||
# "TU": py.time_underground,
|
# "TU": py.time_underground,
|
||||||
# "author": py.is_logbook_entry_author}
|
# "author": py.is_logbook_entry_author}
|
||||||
# for py in previouslbe.persontrip_set.all()])
|
# for py in previouslbe.persontrip_set.all()])
|
||||||
# else:
|
# else:
|
||||||
# fileform = UploadFileForm() # An unbound form
|
# fileform = UploadFileForm() # An unbound form
|
||||||
|
|
||||||
return render(request, 'editfile.html', {
|
return render(request, 'editfile.html', {
|
||||||
|
|||||||
@@ -17,7 +17,7 @@ import troggle.settings as settings
|
|||||||
import parsers.survex
|
import parsers.survex
|
||||||
|
|
||||||
survextemplatefile = """; Locn: Totes Gebirge, Austria - Loser/Augst-Eck Plateau (kataster group 1623)
|
survextemplatefile = """; Locn: Totes Gebirge, Austria - Loser/Augst-Eck Plateau (kataster group 1623)
|
||||||
; Cave:
|
; Cave:
|
||||||
|
|
||||||
*begin [surveyname]
|
*begin [surveyname]
|
||||||
|
|
||||||
@@ -65,7 +65,7 @@ class SvxForm(forms.Form):
|
|||||||
datetime = forms.DateTimeField(widget=forms.TextInput(attrs={"readonly":True}))
|
datetime = forms.DateTimeField(widget=forms.TextInput(attrs={"readonly":True}))
|
||||||
outputtype = forms.CharField(widget=forms.TextInput(attrs={"readonly":True}))
|
outputtype = forms.CharField(widget=forms.TextInput(attrs={"readonly":True}))
|
||||||
code = forms.CharField(widget=forms.Textarea(attrs={"cols":150, "rows":18}))
|
code = forms.CharField(widget=forms.Textarea(attrs={"cols":150, "rows":18}))
|
||||||
|
|
||||||
def GetDiscCode(self):
|
def GetDiscCode(self):
|
||||||
fname = settings.SURVEX_DATA + self.data['filename'] + ".svx"
|
fname = settings.SURVEX_DATA + self.data['filename'] + ".svx"
|
||||||
if not os.path.isfile(fname):
|
if not os.path.isfile(fname):
|
||||||
@@ -75,7 +75,7 @@ class SvxForm(forms.Form):
|
|||||||
svxtext = ReplaceTabs(svxtext).strip()
|
svxtext = ReplaceTabs(svxtext).strip()
|
||||||
fin.close()
|
fin.close()
|
||||||
return svxtext
|
return svxtext
|
||||||
|
|
||||||
def DiffCode(self, rcode):
|
def DiffCode(self, rcode):
|
||||||
code = self.GetDiscCode()
|
code = self.GetDiscCode()
|
||||||
difftext = difflib.unified_diff(code.splitlines(), rcode.splitlines())
|
difftext = difflib.unified_diff(code.splitlines(), rcode.splitlines())
|
||||||
@@ -86,14 +86,14 @@ class SvxForm(forms.Form):
|
|||||||
fname = settings.SURVEX_DATA + self.data['filename'] + ".svx"
|
fname = settings.SURVEX_DATA + self.data['filename'] + ".svx"
|
||||||
if not os.path.isfile(fname):
|
if not os.path.isfile(fname):
|
||||||
# only save if appears valid
|
# only save if appears valid
|
||||||
if re.search(r"\[|\]", rcode):
|
if re.search(r"\[|\]", rcode):
|
||||||
return "Error: clean up all []s from the text"
|
return "Error: clean up all []s from the text"
|
||||||
mbeginend = re.search(r"(?s)\*begin\s+(\w+).*?\*end\s+(\w+)", rcode)
|
mbeginend = re.search(r"(?s)\*begin\s+(\w+).*?\*end\s+(\w+)", rcode)
|
||||||
if not mbeginend:
|
if not mbeginend:
|
||||||
return "Error: no begin/end block here"
|
return "Error: no begin/end block here"
|
||||||
if mbeginend.group(1) != mbeginend.group(2):
|
if mbeginend.group(1) != mbeginend.group(2):
|
||||||
return "Error: mismatching beginend"
|
return "Error: mismatching beginend"
|
||||||
|
|
||||||
fout = open(fname, "w")
|
fout = open(fname, "w")
|
||||||
res = fout.write(rcode.encode("latin1"))
|
res = fout.write(rcode.encode("latin1"))
|
||||||
fout.close()
|
fout.close()
|
||||||
@@ -118,21 +118,21 @@ def svx(request, survex_file):
|
|||||||
dirname += "/"
|
dirname += "/"
|
||||||
nowtime = datetime.datetime.now().strftime('%Y-%m-%d %H:%M:%S')
|
nowtime = datetime.datetime.now().strftime('%Y-%m-%d %H:%M:%S')
|
||||||
outputtype = "normal"
|
outputtype = "normal"
|
||||||
form = SvxForm({'filename':survex_file, 'dirname':dirname, 'datetime':nowtime, 'outputtype':outputtype})
|
form = SvxForm({'filename':survex_file, 'dirname':dirname, 'datetime':nowtime, 'outputtype':outputtype})
|
||||||
|
|
||||||
# if the form has been returned
|
# if the form has been returned
|
||||||
difflist = [ ]
|
difflist = [ ]
|
||||||
logmessage = ""
|
logmessage = ""
|
||||||
message = ""
|
message = ""
|
||||||
|
|
||||||
if request.method == 'POST': # If the form has been submitted...
|
if request.method == 'POST': # If the form has been submitted...
|
||||||
rform = SvxForm(request.POST) #
|
rform = SvxForm(request.POST) #
|
||||||
if rform.is_valid(): # All validation rules pass (how do we check it against the filename and users?)
|
if rform.is_valid(): # All validation rules pass (how do we check it against the filename and users?)
|
||||||
rcode = rform.cleaned_data['code']
|
rcode = rform.cleaned_data['code']
|
||||||
outputtype = rform.cleaned_data['outputtype']
|
outputtype = rform.cleaned_data['outputtype']
|
||||||
difflist = form.DiffCode(rcode)
|
difflist = form.DiffCode(rcode)
|
||||||
#print "ssss", rform.data
|
#print "ssss", rform.data
|
||||||
|
|
||||||
if "revert" in rform.data:
|
if "revert" in rform.data:
|
||||||
pass
|
pass
|
||||||
if "process" in rform.data:
|
if "process" in rform.data:
|
||||||
@@ -153,20 +153,20 @@ def svx(request, survex_file):
|
|||||||
form.data['code'] = rcode
|
form.data['code'] = rcode
|
||||||
if "diff" in rform.data:
|
if "diff" in rform.data:
|
||||||
form.data['code'] = rcode
|
form.data['code'] = rcode
|
||||||
|
|
||||||
|
|
||||||
#process(survex_file)
|
#process(survex_file)
|
||||||
if 'code' not in form.data:
|
if 'code' not in form.data:
|
||||||
form.data['code'] = form.GetDiscCode()
|
form.data['code'] = form.GetDiscCode()
|
||||||
|
|
||||||
if not difflist:
|
if not difflist:
|
||||||
difflist.append("none")
|
difflist.append("none")
|
||||||
if message:
|
if message:
|
||||||
difflist.insert(0, message)
|
difflist.insert(0, message)
|
||||||
|
|
||||||
#print [ form.data['code'] ]
|
#print [ form.data['code'] ]
|
||||||
svxincludes = re.findall(r'\*include\s+(\S+)(?i)', form.data['code'] or "")
|
svxincludes = re.findall(r'\*include\s+(\S+)(?i)', form.data['code'] or "")
|
||||||
|
|
||||||
vmap = {'settings': settings,
|
vmap = {'settings': settings,
|
||||||
'has_3d': os.path.isfile(settings.SURVEX_DATA + survex_file + ".3d"),
|
'has_3d': os.path.isfile(settings.SURVEX_DATA + survex_file + ".3d"),
|
||||||
'title': survex_file,
|
'title': survex_file,
|
||||||
@@ -227,13 +227,13 @@ def identifycavedircontents(gcavedir):
|
|||||||
pass
|
pass
|
||||||
elif name == "115" and (f in ["115cufix.svx", "115fix.svx"]):
|
elif name == "115" and (f in ["115cufix.svx", "115fix.svx"]):
|
||||||
pass
|
pass
|
||||||
|
|
||||||
elif os.path.isdir(os.path.join(gcavedir, f)):
|
elif os.path.isdir(os.path.join(gcavedir, f)):
|
||||||
if f[0] != ".":
|
if f[0] != ".":
|
||||||
subdirs.append(f)
|
subdirs.append(f)
|
||||||
elif f[-4:] == ".svx":
|
elif f[-4:] == ".svx":
|
||||||
nf = f[:-4]
|
nf = f[:-4]
|
||||||
|
|
||||||
if nf.lower() == name.lower() or nf[:3] == "all" or (name, nf) in [("resurvey2005", "145-2005"), ("cucc", "cu115")]:
|
if nf.lower() == name.lower() or nf[:3] == "all" or (name, nf) in [("resurvey2005", "145-2005"), ("cucc", "cu115")]:
|
||||||
if primesvx:
|
if primesvx:
|
||||||
if nf[:3] == "all":
|
if nf[:3] == "all":
|
||||||
@@ -272,16 +272,16 @@ def survexcaveslist(request):
|
|||||||
fnumlist += [ (-int(re.match(r"\d*", f).group(0) or "0"), f, area) for f in os.listdir(cavesdir) ]
|
fnumlist += [ (-int(re.match(r"\d*", f).group(0) or "0"), f, area) for f in os.listdir(cavesdir) ]
|
||||||
print(fnumlist)
|
print(fnumlist)
|
||||||
print(len(fnumlist))
|
print(len(fnumlist))
|
||||||
|
|
||||||
# first sort the file list
|
# first sort the file list
|
||||||
fnumlist.sort()
|
fnumlist.sort()
|
||||||
|
|
||||||
onefilecaves = [ ]
|
onefilecaves = [ ]
|
||||||
multifilecaves = [ ]
|
multifilecaves = [ ]
|
||||||
subdircaves = [ ]
|
subdircaves = [ ]
|
||||||
|
|
||||||
print(fnumlist)
|
print(fnumlist)
|
||||||
|
|
||||||
# go through the list and identify the contents of each cave directory
|
# go through the list and identify the contents of each cave directory
|
||||||
for num, cavedir, area in fnumlist:
|
for num, cavedir, area in fnumlist:
|
||||||
if cavedir in ["144", "40"]:
|
if cavedir in ["144", "40"]:
|
||||||
@@ -293,10 +293,10 @@ def survexcaveslist(request):
|
|||||||
if os.path.isdir(gcavedir) and cavedir[0] != ".":
|
if os.path.isdir(gcavedir) and cavedir[0] != ".":
|
||||||
subdirs, subsvx = identifycavedircontents(gcavedir)
|
subdirs, subsvx = identifycavedircontents(gcavedir)
|
||||||
survdirobj = [ ]
|
survdirobj = [ ]
|
||||||
|
|
||||||
for lsubsvx in subsvx:
|
for lsubsvx in subsvx:
|
||||||
survdirobj.append(("caves-" + area + "/"+cavedir+"/"+lsubsvx, lsubsvx))
|
survdirobj.append(("caves-" + area + "/"+cavedir+"/"+lsubsvx, lsubsvx))
|
||||||
|
|
||||||
# caves with subdirectories
|
# caves with subdirectories
|
||||||
if subdirs:
|
if subdirs:
|
||||||
subsurvdirs = [ ]
|
subsurvdirs = [ ]
|
||||||
@@ -308,7 +308,7 @@ def survexcaveslist(request):
|
|||||||
lsurvdirobj.append(("caves-" + area + "/"+cavedir+"/"+subdir+"/"+lsubsvx, lsubsvx))
|
lsurvdirobj.append(("caves-" + area + "/"+cavedir+"/"+subdir+"/"+lsubsvx, lsubsvx))
|
||||||
subsurvdirs.append((lsurvdirobj[0], lsurvdirobj[1:]))
|
subsurvdirs.append((lsurvdirobj[0], lsurvdirobj[1:]))
|
||||||
subdircaves.append((cavedir, (survdirobj[0], survdirobj[1:]), subsurvdirs))
|
subdircaves.append((cavedir, (survdirobj[0], survdirobj[1:]), subsurvdirs))
|
||||||
|
|
||||||
# multifile caves
|
# multifile caves
|
||||||
elif len(survdirobj) > 1:
|
elif len(survdirobj) > 1:
|
||||||
multifilecaves.append((survdirobj[0], survdirobj[1:]))
|
multifilecaves.append((survdirobj[0], survdirobj[1:]))
|
||||||
|
|||||||
@@ -61,7 +61,7 @@ def import_logbooks():
|
|||||||
settings.LOGFILE.write('\nBegun importing logbooks at ' + time.asctime() +'\n'+'-'*60)
|
settings.LOGFILE.write('\nBegun importing logbooks at ' + time.asctime() +'\n'+'-'*60)
|
||||||
except:
|
except:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
import parsers.logbooks
|
import parsers.logbooks
|
||||||
parsers.logbooks.LoadLogbooks()
|
parsers.logbooks.LoadLogbooks()
|
||||||
|
|
||||||
@@ -95,7 +95,7 @@ def reset():
|
|||||||
import_caves()
|
import_caves()
|
||||||
import_people()
|
import_people()
|
||||||
import_surveyscans()
|
import_surveyscans()
|
||||||
|
|
||||||
import_logbooks()
|
import_logbooks()
|
||||||
import_QMs()
|
import_QMs()
|
||||||
|
|
||||||
@@ -136,13 +136,13 @@ def dumplogbooks():
|
|||||||
return pe.person.first_name
|
return pe.person.first_name
|
||||||
for lbe in troggle.core.models.LogbookEntry.objects.all():
|
for lbe in troggle.core.models.LogbookEntry.objects.all():
|
||||||
dateStr = lbe.date.strftime("%Y-%m-%d")
|
dateStr = lbe.date.strftime("%Y-%m-%d")
|
||||||
directory = os.path.join(settings.EXPOWEB,
|
directory = os.path.join(settings.EXPOWEB,
|
||||||
"years",
|
"years",
|
||||||
lbe.expedition.year,
|
lbe.expedition.year,
|
||||||
"autologbook")
|
"autologbook")
|
||||||
if not os.path.isdir(directory):
|
if not os.path.isdir(directory):
|
||||||
os.mkdir(directory)
|
os.mkdir(directory)
|
||||||
filename = os.path.join(directory,
|
filename = os.path.join(directory,
|
||||||
dateStr + "." + slugify(lbe.title)[:50] + ".html")
|
dateStr + "." + slugify(lbe.title)[:50] + ".html")
|
||||||
if lbe.cave:
|
if lbe.cave:
|
||||||
print(lbe.cave.reference())
|
print(lbe.cave.reference())
|
||||||
@@ -227,7 +227,7 @@ if __name__ == "__main__":
|
|||||||
elif "survexpos" in sys.argv:
|
elif "survexpos" in sys.argv:
|
||||||
# management.call_command('syncdb', interactive=False) # this sets the path so that import settings works in import_survex
|
# management.call_command('syncdb', interactive=False) # this sets the path so that import settings works in import_survex
|
||||||
import parsers.survex
|
import parsers.survex
|
||||||
parsers.survex.LoadPos()
|
parsers.survex.LoadPos()
|
||||||
elif "logbooks" in sys.argv:
|
elif "logbooks" in sys.argv:
|
||||||
# management.call_command('syncdb', interactive=False) # this sets the path so that import settings works in import_survex
|
# management.call_command('syncdb', interactive=False) # this sets the path so that import settings works in import_survex
|
||||||
import_logbooks()
|
import_logbooks()
|
||||||
|
|||||||
@@ -33,4 +33,3 @@ def writeQmTable(outfile,cave):
|
|||||||
cavewriter.writerow(headers)
|
cavewriter.writerow(headers)
|
||||||
for qm in cave.get_QMs():
|
for qm in cave.get_QMs():
|
||||||
cavewriter.writerow(qmRow(qm))
|
cavewriter.writerow(qmRow(qm))
|
||||||
|
|
||||||
@@ -46,4 +46,4 @@ def _resolves(url):
|
|||||||
return True
|
return True
|
||||||
except http.Http404:
|
except http.Http404:
|
||||||
return False
|
return False
|
||||||
|
|
||||||
|
|||||||
@@ -30,7 +30,7 @@ def parseCaveQMs(cave,inputFile):
|
|||||||
kh=Cave.objects.get(official_name="Kaninchenhöhle")
|
kh=Cave.objects.get(official_name="Kaninchenhöhle")
|
||||||
except Cave.DoesNotExist:
|
except Cave.DoesNotExist:
|
||||||
print("KH is not in the database. Please run parsers.cavetab first.")
|
print("KH is not in the database. Please run parsers.cavetab first.")
|
||||||
parse_KH_QMs(kh, inputFile=inputFile)
|
parse_KH_QMs(kh, inputFile=inputFile)
|
||||||
return
|
return
|
||||||
|
|
||||||
qmPath = settings.EXPOWEB+inputFile
|
qmPath = settings.EXPOWEB+inputFile
|
||||||
@@ -46,7 +46,7 @@ def parseCaveQMs(cave,inputFile):
|
|||||||
if cave=='stein':
|
if cave=='stein':
|
||||||
placeholder, hadToCreate = LogbookEntry.objects.get_or_create(date__year=year, title="placeholder for QMs in 204", text="QMs temporarily attached to this should be re-attached to their actual trips", defaults={"date": date(year, 1, 1),"cave":steinBr})
|
placeholder, hadToCreate = LogbookEntry.objects.get_or_create(date__year=year, title="placeholder for QMs in 204", text="QMs temporarily attached to this should be re-attached to their actual trips", defaults={"date": date(year, 1, 1),"cave":steinBr})
|
||||||
elif cave=='hauch':
|
elif cave=='hauch':
|
||||||
placeholder, hadToCreate = LogbookEntry.objects.get_or_create(date__year=year, title="placeholder for QMs in 234", text="QMs temporarily attached to this should be re-attached to their actual trips", defaults={"date": date(year, 1, 1),"cave":hauchHl})
|
placeholder, hadToCreate = LogbookEntry.objects.get_or_create(date__year=year, title="placeholder for QMs in 234", text="QMs temporarily attached to this should be re-attached to their actual trips", defaults={"date": date(year, 1, 1),"cave":hauchHl})
|
||||||
if hadToCreate:
|
if hadToCreate:
|
||||||
print(cave + " placeholder logbook entry for " + str(year) + " added to database")
|
print(cave + " placeholder logbook entry for " + str(year) + " added to database")
|
||||||
QMnum=re.match(r".*?-\d*?-X?(?P<numb>\d*)",line[0]).group("numb")
|
QMnum=re.match(r".*?-\d*?-X?(?P<numb>\d*)",line[0]).group("numb")
|
||||||
@@ -59,7 +59,7 @@ def parseCaveQMs(cave,inputFile):
|
|||||||
newQM.grade=line[1]
|
newQM.grade=line[1]
|
||||||
newQM.area=line[2]
|
newQM.area=line[2]
|
||||||
newQM.location_description=line[3]
|
newQM.location_description=line[3]
|
||||||
|
|
||||||
newQM.completion_description=line[4]
|
newQM.completion_description=line[4]
|
||||||
newQM.nearest_station_description=line[5]
|
newQM.nearest_station_description=line[5]
|
||||||
if newQM.completion_description: # Troggle checks if QMs are completed by checking if they have a ticked_off_by trip. In the table, completion is indicated by the presence of a completion discription.
|
if newQM.completion_description: # Troggle checks if QMs are completed by checking if they have a ticked_off_by trip. In the table, completion is indicated by the presence of a completion discription.
|
||||||
@@ -74,11 +74,11 @@ def parseCaveQMs(cave,inputFile):
|
|||||||
print("overwriting " + str(preexistingQM) +"\r")
|
print("overwriting " + str(preexistingQM) +"\r")
|
||||||
else: # otherwise, print that it was ignored
|
else: # otherwise, print that it was ignored
|
||||||
print("preserving " + str(preexistingQM) + ", which was edited in admin \r")
|
print("preserving " + str(preexistingQM) + ", which was edited in admin \r")
|
||||||
|
|
||||||
except QM.DoesNotExist: #if there is no pre-existing QM, save the new one
|
except QM.DoesNotExist: #if there is no pre-existing QM, save the new one
|
||||||
newQM.save()
|
newQM.save()
|
||||||
print("QM "+str(newQM) + ' added to database\r')
|
print("QM "+str(newQM) + ' added to database\r')
|
||||||
|
|
||||||
except KeyError: #check on this one
|
except KeyError: #check on this one
|
||||||
continue
|
continue
|
||||||
except IndexError:
|
except IndexError:
|
||||||
@@ -106,9 +106,9 @@ def parse_KH_QMs(kh, inputFile):
|
|||||||
'nearest_station_name':res['nearest_station'],
|
'nearest_station_name':res['nearest_station'],
|
||||||
'location_description':res['description']
|
'location_description':res['description']
|
||||||
}
|
}
|
||||||
|
|
||||||
save_carefully(QM,lookupArgs,nonLookupArgs)
|
save_carefully(QM,lookupArgs,nonLookupArgs)
|
||||||
|
|
||||||
|
|
||||||
parseCaveQMs(cave='stein',inputFile=r"1623/204/qm.csv")
|
parseCaveQMs(cave='stein',inputFile=r"1623/204/qm.csv")
|
||||||
parseCaveQMs(cave='hauch',inputFile=r"1623/234/qm.csv")
|
parseCaveQMs(cave='hauch',inputFile=r"1623/234/qm.csv")
|
||||||
|
|||||||
@@ -155,7 +155,7 @@ def readcave(filename):
|
|||||||
message = "Can't find text (slug): %s, skipping %s" % (slug, context)
|
message = "Can't find text (slug): %s, skipping %s" % (slug, context)
|
||||||
models.DataIssue.objects.create(parser='caves', message=message)
|
models.DataIssue.objects.create(parser='caves', message=message)
|
||||||
print(message)
|
print(message)
|
||||||
|
|
||||||
primary = False
|
primary = False
|
||||||
for entrance in entrances:
|
for entrance in entrances:
|
||||||
slug = getXML(entrance, "entranceslug", maxItems = 1, context = context)[0]
|
slug = getXML(entrance, "entranceslug", maxItems = 1, context = context)[0]
|
||||||
@@ -167,7 +167,7 @@ def readcave(filename):
|
|||||||
message = "Entrance text (slug) %s missing %s" % (slug, context)
|
message = "Entrance text (slug) %s missing %s" % (slug, context)
|
||||||
models.DataIssue.objects.create(parser='caves', message=message)
|
models.DataIssue.objects.create(parser='caves', message=message)
|
||||||
print(message)
|
print(message)
|
||||||
|
|
||||||
|
|
||||||
def getXML(text, itemname, minItems = 1, maxItems = None, printwarnings = True, context = ""):
|
def getXML(text, itemname, minItems = 1, maxItems = None, printwarnings = True, context = ""):
|
||||||
items = re.findall("<%(itemname)s>(.*?)</%(itemname)s>" % {"itemname": itemname}, text, re.S)
|
items = re.findall("<%(itemname)s>(.*?)</%(itemname)s>" % {"itemname": itemname}, text, re.S)
|
||||||
@@ -177,7 +177,7 @@ def getXML(text, itemname, minItems = 1, maxItems = None, printwarnings = True,
|
|||||||
"min": minItems} + context
|
"min": minItems} + context
|
||||||
models.DataIssue.objects.create(parser='caves', message=message)
|
models.DataIssue.objects.create(parser='caves', message=message)
|
||||||
print(message)
|
print(message)
|
||||||
|
|
||||||
if maxItems is not None and len(items) > maxItems and printwarnings:
|
if maxItems is not None and len(items) > maxItems and printwarnings:
|
||||||
message = "%(count)i %(itemname)s found, no more than %(max)i expected" % {"count": len(items),
|
message = "%(count)i %(itemname)s found, no more than %(max)i expected" % {"count": len(items),
|
||||||
"itemname": itemname,
|
"itemname": itemname,
|
||||||
|
|||||||
@@ -18,7 +18,7 @@ from fuzzywuzzy import fuzz
|
|||||||
|
|
||||||
from utils import save_carefully
|
from utils import save_carefully
|
||||||
|
|
||||||
#
|
#
|
||||||
# When we edit logbook entries, allow a "?" after any piece of data to say we've frigged it and
|
# When we edit logbook entries, allow a "?" after any piece of data to say we've frigged it and
|
||||||
# it can be checked up later from the hard-copy if necessary; or it's not possible to determin (name, trip place, etc)
|
# it can be checked up later from the hard-copy if necessary; or it's not possible to determin (name, trip place, etc)
|
||||||
#
|
#
|
||||||
@@ -111,7 +111,7 @@ def EnterLogIntoDbase(date, place, title, text, trippeople, expedition, logtime_
|
|||||||
lookupAttribs={'date':date, 'title':title}
|
lookupAttribs={'date':date, 'title':title}
|
||||||
nonLookupAttribs={'place':place, 'text':text, 'expedition':expedition, 'cave':cave, 'slug':slugify(title)[:50], 'entry_type':entry_type}
|
nonLookupAttribs={'place':place, 'text':text, 'expedition':expedition, 'cave':cave, 'slug':slugify(title)[:50], 'entry_type':entry_type}
|
||||||
lbo, created=save_carefully(models.LogbookEntry, lookupAttribs, nonLookupAttribs)
|
lbo, created=save_carefully(models.LogbookEntry, lookupAttribs, nonLookupAttribs)
|
||||||
|
|
||||||
for tripperson, time_underground in trippersons:
|
for tripperson, time_underground in trippersons:
|
||||||
lookupAttribs={'personexpedition':tripperson, 'logbook_entry':lbo}
|
lookupAttribs={'personexpedition':tripperson, 'logbook_entry':lbo}
|
||||||
nonLookupAttribs={'time_underground':time_underground, 'is_logbook_entry_author':(tripperson == author)}
|
nonLookupAttribs={'time_underground':time_underground, 'is_logbook_entry_author':(tripperson == author)}
|
||||||
@@ -216,7 +216,7 @@ def Parseloghtml01(year, expedition, txt):
|
|||||||
|
|
||||||
tripdate, triptitle, trippeople = tripheader.split("|")
|
tripdate, triptitle, trippeople = tripheader.split("|")
|
||||||
ldate = ParseDate(tripdate.strip(), year)
|
ldate = ParseDate(tripdate.strip(), year)
|
||||||
|
|
||||||
mtu = re.search(r'<p[^>]*>(T/?U.*)', triptext)
|
mtu = re.search(r'<p[^>]*>(T/?U.*)', triptext)
|
||||||
if mtu:
|
if mtu:
|
||||||
tu = mtu.group(1)
|
tu = mtu.group(1)
|
||||||
@@ -228,7 +228,7 @@ def Parseloghtml01(year, expedition, txt):
|
|||||||
tripcave = triptitles[0].strip()
|
tripcave = triptitles[0].strip()
|
||||||
|
|
||||||
ltriptext = triptext
|
ltriptext = triptext
|
||||||
|
|
||||||
mtail = re.search(r'(?:<a href="[^"]*">[^<]*</a>|\s|/|-|&|</?p>|\((?:same day|\d+)\))*$', ltriptext)
|
mtail = re.search(r'(?:<a href="[^"]*">[^<]*</a>|\s|/|-|&|</?p>|\((?:same day|\d+)\))*$', ltriptext)
|
||||||
if mtail:
|
if mtail:
|
||||||
#print mtail.group(0)
|
#print mtail.group(0)
|
||||||
@@ -240,7 +240,6 @@ def Parseloghtml01(year, expedition, txt):
|
|||||||
ltriptext = re.sub(r"</?u>", "_", ltriptext)
|
ltriptext = re.sub(r"</?u>", "_", ltriptext)
|
||||||
ltriptext = re.sub(r"</?i>", "''", ltriptext)
|
ltriptext = re.sub(r"</?i>", "''", ltriptext)
|
||||||
ltriptext = re.sub(r"</?b>", "'''", ltriptext)
|
ltriptext = re.sub(r"</?b>", "'''", ltriptext)
|
||||||
|
|
||||||
|
|
||||||
#print ldate, trippeople.strip()
|
#print ldate, trippeople.strip()
|
||||||
# could includ the tripid (url link for cross referencing)
|
# could includ the tripid (url link for cross referencing)
|
||||||
@@ -301,7 +300,7 @@ def SetDatesFromLogbookEntries(expedition):
|
|||||||
|
|
||||||
def LoadLogbookForExpedition(expedition):
|
def LoadLogbookForExpedition(expedition):
|
||||||
""" Parses all logbook entries for one expedition """
|
""" Parses all logbook entries for one expedition """
|
||||||
|
|
||||||
expowebbase = os.path.join(settings.EXPOWEB, "years")
|
expowebbase = os.path.join(settings.EXPOWEB, "years")
|
||||||
yearlinks = settings.LOGBOOK_PARSER_SETTINGS
|
yearlinks = settings.LOGBOOK_PARSER_SETTINGS
|
||||||
|
|
||||||
@@ -344,7 +343,7 @@ def LoadLogbooks():
|
|||||||
expos = models.Expedition.objects.all()
|
expos = models.Expedition.objects.all()
|
||||||
for expo in expos:
|
for expo in expos:
|
||||||
print("\nLoading Logbook for: " + expo.year)
|
print("\nLoading Logbook for: " + expo.year)
|
||||||
|
|
||||||
# Load logbook for expo
|
# Load logbook for expo
|
||||||
LoadLogbookForExpedition(expo)
|
LoadLogbookForExpedition(expo)
|
||||||
|
|
||||||
@@ -378,17 +377,17 @@ def parseAutoLogBookEntry(filename):
|
|||||||
expedition = models.Expedition.objects.get(year = expeditionYearMatch.groups()[0])
|
expedition = models.Expedition.objects.get(year = expeditionYearMatch.groups()[0])
|
||||||
personExpeditionNameLookup = GetPersonExpeditionNameLookup(expedition)
|
personExpeditionNameLookup = GetPersonExpeditionNameLookup(expedition)
|
||||||
except models.Expedition.DoesNotExist:
|
except models.Expedition.DoesNotExist:
|
||||||
errors.append("Expedition not in database")
|
errors.append("Expedition not in database")
|
||||||
else:
|
else:
|
||||||
errors.append("Expediton Year could not be parsed")
|
errors.append("Expediton Year could not be parsed")
|
||||||
|
|
||||||
titleMatch = titleRegex.search(contents)
|
titleMatch = titleRegex.search(contents)
|
||||||
if titleMatch:
|
if titleMatch:
|
||||||
title, = titleMatch.groups()
|
title, = titleMatch.groups()
|
||||||
if len(title) > settings.MAX_LOGBOOK_ENTRY_TITLE_LENGTH:
|
if len(title) > settings.MAX_LOGBOOK_ENTRY_TITLE_LENGTH:
|
||||||
errors.append("Title too long")
|
errors.append("Title too long")
|
||||||
else:
|
else:
|
||||||
errors.append("Title could not be found")
|
errors.append("Title could not be found")
|
||||||
|
|
||||||
caveMatch = caveRegex.search(contents)
|
caveMatch = caveRegex.search(contents)
|
||||||
if caveMatch:
|
if caveMatch:
|
||||||
@@ -397,24 +396,24 @@ def parseAutoLogBookEntry(filename):
|
|||||||
cave = models.getCaveByReference(caveRef)
|
cave = models.getCaveByReference(caveRef)
|
||||||
except AssertionError:
|
except AssertionError:
|
||||||
cave = None
|
cave = None
|
||||||
errors.append("Cave not found in database")
|
errors.append("Cave not found in database")
|
||||||
else:
|
else:
|
||||||
cave = None
|
cave = None
|
||||||
|
|
||||||
locationMatch = locationRegex.search(contents)
|
locationMatch = locationRegex.search(contents)
|
||||||
if locationMatch:
|
if locationMatch:
|
||||||
location, = locationMatch.groups()
|
location, = locationMatch.groups()
|
||||||
else:
|
else:
|
||||||
location = None
|
location = None
|
||||||
|
|
||||||
if cave is None and location is None:
|
if cave is None and location is None:
|
||||||
errors.append("Location nor cave could not be found")
|
errors.append("Location nor cave could not be found")
|
||||||
|
|
||||||
reportMatch = reportRegex.search(contents)
|
reportMatch = reportRegex.search(contents)
|
||||||
if reportMatch:
|
if reportMatch:
|
||||||
report, = reportMatch.groups()
|
report, = reportMatch.groups()
|
||||||
else:
|
else:
|
||||||
errors.append("Contents could not be found")
|
errors.append("Contents could not be found")
|
||||||
if errors:
|
if errors:
|
||||||
return errors # Easiest to bail out at this point as we need to make sure that we know which expedition to look for people from.
|
return errors # Easiest to bail out at this point as we need to make sure that we know which expedition to look for people from.
|
||||||
people = []
|
people = []
|
||||||
@@ -429,7 +428,7 @@ def parseAutoLogBookEntry(filename):
|
|||||||
author = bool(author)
|
author = bool(author)
|
||||||
else:
|
else:
|
||||||
errors.append("Persons name could not be found")
|
errors.append("Persons name could not be found")
|
||||||
|
|
||||||
TUMatch = TURegex.search(contents)
|
TUMatch = TURegex.search(contents)
|
||||||
if TUMatch:
|
if TUMatch:
|
||||||
TU, = TUMatch.groups()
|
TU, = TUMatch.groups()
|
||||||
@@ -439,15 +438,15 @@ def parseAutoLogBookEntry(filename):
|
|||||||
people.append((name, author, TU))
|
people.append((name, author, TU))
|
||||||
if errors:
|
if errors:
|
||||||
return errors # Bail out before commiting to the database
|
return errors # Bail out before commiting to the database
|
||||||
logbookEntry = models.LogbookEntry(date = date,
|
logbookEntry = models.LogbookEntry(date = date,
|
||||||
expedition = expedition,
|
expedition = expedition,
|
||||||
title = title, cave = cave, place = location,
|
title = title, cave = cave, place = location,
|
||||||
text = report, slug = slugify(title)[:50],
|
text = report, slug = slugify(title)[:50],
|
||||||
filename = filename)
|
filename = filename)
|
||||||
logbookEntry.save()
|
logbookEntry.save()
|
||||||
for name, author, TU in people:
|
for name, author, TU in people:
|
||||||
models.PersonTrip(personexpedition = personExpo,
|
models.PersonTrip(personexpedition = personExpo,
|
||||||
time_underground = TU,
|
time_underground = TU,
|
||||||
logbook_entry = logbookEntry,
|
logbook_entry = logbookEntry,
|
||||||
is_logbook_entry_author = author).save()
|
is_logbook_entry_author = author).save()
|
||||||
print(logbookEntry)
|
print(logbookEntry)
|
||||||
|
|||||||
@@ -12,22 +12,22 @@ def saveMugShot(mugShotPath, mugShotFilename, person):
|
|||||||
mugShotFilename=mugShotFilename[2:]
|
mugShotFilename=mugShotFilename[2:]
|
||||||
else:
|
else:
|
||||||
mugShotFilename=mugShotFilename # just in case one doesn't
|
mugShotFilename=mugShotFilename # just in case one doesn't
|
||||||
|
|
||||||
dummyObj=models.DPhoto(file=mugShotFilename)
|
dummyObj=models.DPhoto(file=mugShotFilename)
|
||||||
|
|
||||||
#Put a copy of the file in the right place. mugShotObj.file.path is determined by the django filesystemstorage specified in models.py
|
#Put a copy of the file in the right place. mugShotObj.file.path is determined by the django filesystemstorage specified in models.py
|
||||||
if not os.path.exists(dummyObj.file.path):
|
if not os.path.exists(dummyObj.file.path):
|
||||||
shutil.copy(mugShotPath, dummyObj.file.path)
|
shutil.copy(mugShotPath, dummyObj.file.path)
|
||||||
|
|
||||||
mugShotObj, created = save_carefully(
|
mugShotObj, created = save_carefully(
|
||||||
models.DPhoto,
|
models.DPhoto,
|
||||||
lookupAttribs={'is_mugshot':True, 'file':mugShotFilename},
|
lookupAttribs={'is_mugshot':True, 'file':mugShotFilename},
|
||||||
nonLookupAttribs={'caption':"Mugshot for "+person.first_name+" "+person.last_name}
|
nonLookupAttribs={'caption':"Mugshot for "+person.first_name+" "+person.last_name}
|
||||||
)
|
)
|
||||||
|
|
||||||
if created:
|
if created:
|
||||||
mugShotObj.contains_person.add(person)
|
mugShotObj.contains_person.add(person)
|
||||||
mugShotObj.save()
|
mugShotObj.save()
|
||||||
|
|
||||||
def parseMugShotAndBlurb(personline, header, person):
|
def parseMugShotAndBlurb(personline, header, person):
|
||||||
"""create mugshot Photo instance"""
|
"""create mugshot Photo instance"""
|
||||||
@@ -45,20 +45,20 @@ def parseMugShotAndBlurb(personline, header, person):
|
|||||||
person.save()
|
person.save()
|
||||||
|
|
||||||
def LoadPersonsExpos():
|
def LoadPersonsExpos():
|
||||||
|
|
||||||
persontab = open(os.path.join(settings.EXPOWEB, "folk", "folk.csv"))
|
persontab = open(os.path.join(settings.EXPOWEB, "folk", "folk.csv"))
|
||||||
personreader = csv.reader(persontab)
|
personreader = csv.reader(persontab)
|
||||||
headers = personreader.next()
|
headers = personreader.next()
|
||||||
header = dict(zip(headers, range(len(headers))))
|
header = dict(zip(headers, range(len(headers))))
|
||||||
|
|
||||||
# make expeditions
|
# make expeditions
|
||||||
print("Loading expeditions")
|
print("Loading expeditions")
|
||||||
years = headers[5:]
|
years = headers[5:]
|
||||||
|
|
||||||
for year in years:
|
for year in years:
|
||||||
lookupAttribs = {'year':year}
|
lookupAttribs = {'year':year}
|
||||||
nonLookupAttribs = {'name':"CUCC expo %s" % year}
|
nonLookupAttribs = {'name':"CUCC expo %s" % year}
|
||||||
|
|
||||||
save_carefully(models.Expedition, lookupAttribs, nonLookupAttribs)
|
save_carefully(models.Expedition, lookupAttribs, nonLookupAttribs)
|
||||||
|
|
||||||
# make persons
|
# make persons
|
||||||
@@ -91,7 +91,7 @@ def LoadPersonsExpos():
|
|||||||
person, created = save_carefully(models.Person, lookupAttribs, nonLookupAttribs)
|
person, created = save_carefully(models.Person, lookupAttribs, nonLookupAttribs)
|
||||||
|
|
||||||
parseMugShotAndBlurb(personline=personline, header=header, person=person)
|
parseMugShotAndBlurb(personline=personline, header=header, person=person)
|
||||||
|
|
||||||
# make person expedition from table
|
# make person expedition from table
|
||||||
for year, attended in zip(headers, personline)[5:]:
|
for year, attended in zip(headers, personline)[5:]:
|
||||||
expedition = models.Expedition.objects.get(year=year)
|
expedition = models.Expedition.objects.get(year=year)
|
||||||
@@ -108,10 +108,10 @@ def GetPersonExpeditionNameLookup(expedition):
|
|||||||
res = Gpersonexpeditionnamelookup.get(expedition.name)
|
res = Gpersonexpeditionnamelookup.get(expedition.name)
|
||||||
if res:
|
if res:
|
||||||
return res
|
return res
|
||||||
|
|
||||||
res = { }
|
res = { }
|
||||||
duplicates = set()
|
duplicates = set()
|
||||||
|
|
||||||
print("Calculating GetPersonExpeditionNameLookup for " + expedition.year)
|
print("Calculating GetPersonExpeditionNameLookup for " + expedition.year)
|
||||||
personexpeditions = models.PersonExpedition.objects.filter(expedition=expedition)
|
personexpeditions = models.PersonExpedition.objects.filter(expedition=expedition)
|
||||||
htmlparser = HTMLParser()
|
htmlparser = HTMLParser()
|
||||||
@@ -139,16 +139,16 @@ def GetPersonExpeditionNameLookup(expedition):
|
|||||||
possnames.append(personexpedition.nickname.lower() + " " + l[0])
|
possnames.append(personexpedition.nickname.lower() + " " + l[0])
|
||||||
if str(personexpedition.nickname.lower() + l[0]) not in possnames:
|
if str(personexpedition.nickname.lower() + l[0]) not in possnames:
|
||||||
possnames.append(personexpedition.nickname.lower() + l[0])
|
possnames.append(personexpedition.nickname.lower() + l[0])
|
||||||
|
|
||||||
for possname in possnames:
|
for possname in possnames:
|
||||||
if possname in res:
|
if possname in res:
|
||||||
duplicates.add(possname)
|
duplicates.add(possname)
|
||||||
else:
|
else:
|
||||||
res[possname] = personexpedition
|
res[possname] = personexpedition
|
||||||
|
|
||||||
for possname in duplicates:
|
for possname in duplicates:
|
||||||
del res[possname]
|
del res[possname]
|
||||||
|
|
||||||
Gpersonexpeditionnamelookup[expedition.name] = res
|
Gpersonexpeditionnamelookup[expedition.name] = res
|
||||||
return res
|
return res
|
||||||
|
|
||||||
|
|||||||
@@ -1,5 +1,7 @@
|
|||||||
'''
|
'''
|
||||||
This module is the part of troggle that parses descriptions of cave parts (subcaves) from the legacy html files and saves them in the troggle database as instances of the model Subcave. Unfortunately, this parser can not be very flexible because the legacy format is poorly structured.
|
This module is the part of troggle that parses descriptions of cave parts (subcaves) from the legacy html
|
||||||
|
files and saves them in the troggle database as instances of the model Subcave.
|
||||||
|
Unfortunately, this parser can not be very flexible because the legacy format is poorly structured.
|
||||||
'''
|
'''
|
||||||
|
|
||||||
import sys, os
|
import sys, os
|
||||||
@@ -29,12 +31,12 @@ def importSubcaves(cave):
|
|||||||
link[0])
|
link[0])
|
||||||
subcaveFile=open(subcaveFilePath,'r')
|
subcaveFile=open(subcaveFilePath,'r')
|
||||||
description=subcaveFile.read().decode('iso-8859-1').encode('utf-8')
|
description=subcaveFile.read().decode('iso-8859-1').encode('utf-8')
|
||||||
|
|
||||||
lookupAttribs={'title':link[1], 'cave':cave}
|
lookupAttribs={'title':link[1], 'cave':cave}
|
||||||
nonLookupAttribs={'description':description}
|
nonLookupAttribs={'description':description}
|
||||||
newSubcave=save_carefully(Subcave,lookupAttribs=lookupAttribs,nonLookupAttribs=nonLookupAttribs)
|
newSubcave=save_carefully(Subcave,lookupAttribs=lookupAttribs,nonLookupAttribs=nonLookupAttribs)
|
||||||
|
|
||||||
logging.info("Added " + unicode(newSubcave) + " to " + unicode(cave))
|
logging.info("Added " + unicode(newSubcave) + " to " + unicode(cave))
|
||||||
except IOError:
|
except IOError:
|
||||||
logging.info("Subcave import couldn't open "+subcaveFilePath)
|
logging.info("Subcave import couldn't open "+subcaveFilePath)
|
||||||
|
|
||||||
|
|||||||
@@ -19,12 +19,12 @@ def LoadSurvexLineLeg(survexblock, stardata, sline, comment, cave):
|
|||||||
ls = sline.lower().split()
|
ls = sline.lower().split()
|
||||||
ssfrom = survexblock.MakeSurvexStation(ls[stardata["from"]])
|
ssfrom = survexblock.MakeSurvexStation(ls[stardata["from"]])
|
||||||
ssto = survexblock.MakeSurvexStation(ls[stardata["to"]])
|
ssto = survexblock.MakeSurvexStation(ls[stardata["to"]])
|
||||||
|
|
||||||
survexleg = models.SurvexLeg(block=survexblock, stationfrom=ssfrom, stationto=ssto)
|
survexleg = models.SurvexLeg(block=survexblock, stationfrom=ssfrom, stationto=ssto)
|
||||||
if stardata["type"] == "normal":
|
if stardata["type"] == "normal":
|
||||||
try:
|
try:
|
||||||
survexleg.tape = float(ls[stardata["tape"]])
|
survexleg.tape = float(ls[stardata["tape"]])
|
||||||
except ValueError:
|
except ValueError:
|
||||||
print("Tape misread in", survexblock.survexfile.path)
|
print("Tape misread in", survexblock.survexfile.path)
|
||||||
print("Stardata:", stardata)
|
print("Stardata:", stardata)
|
||||||
print("Line:", ls)
|
print("Line:", ls)
|
||||||
@@ -69,7 +69,7 @@ def LoadSurvexLineLeg(survexblock, stardata, sline, comment, cave):
|
|||||||
|
|
||||||
# only save proper legs
|
# only save proper legs
|
||||||
survexleg.save()
|
survexleg.save()
|
||||||
|
|
||||||
itape = stardata.get("tape")
|
itape = stardata.get("tape")
|
||||||
if itape:
|
if itape:
|
||||||
try:
|
try:
|
||||||
@@ -106,7 +106,7 @@ def RecursiveLoad(survexblock, survexfile, fin, textlines):
|
|||||||
stardata = stardatadefault
|
stardata = stardatadefault
|
||||||
teammembers = [ ]
|
teammembers = [ ]
|
||||||
|
|
||||||
# uncomment to print out all files during parsing
|
# uncomment to print out all files during parsing
|
||||||
print(" - Reading file: " + survexblock.survexfile.path)
|
print(" - Reading file: " + survexblock.survexfile.path)
|
||||||
stamp = datetime.now()
|
stamp = datetime.now()
|
||||||
lineno = 0
|
lineno = 0
|
||||||
@@ -198,7 +198,7 @@ def RecursiveLoad(survexblock, survexfile, fin, textlines):
|
|||||||
#print('Cave -sline ' + str(cave))
|
#print('Cave -sline ' + str(cave))
|
||||||
if not sline:
|
if not sline:
|
||||||
continue
|
continue
|
||||||
|
|
||||||
# detect the star command
|
# detect the star command
|
||||||
mstar = regex_star.match(sline)
|
mstar = regex_star.match(sline)
|
||||||
if not mstar:
|
if not mstar:
|
||||||
@@ -214,7 +214,7 @@ def RecursiveLoad(survexblock, survexfile, fin, textlines):
|
|||||||
# print(' - Passage: ')
|
# print(' - Passage: ')
|
||||||
#Missing "station" in stardata.
|
#Missing "station" in stardata.
|
||||||
continue
|
continue
|
||||||
|
|
||||||
# detect the star command
|
# detect the star command
|
||||||
cmd, line = mstar.groups()
|
cmd, line = mstar.groups()
|
||||||
cmd = cmd.lower()
|
cmd = cmd.lower()
|
||||||
@@ -238,7 +238,7 @@ def RecursiveLoad(survexblock, survexfile, fin, textlines):
|
|||||||
survexblock.save()
|
survexblock.save()
|
||||||
fininclude = includesurvexfile.OpenFile()
|
fininclude = includesurvexfile.OpenFile()
|
||||||
RecursiveLoad(survexblock, includesurvexfile, fininclude, textlines)
|
RecursiveLoad(survexblock, includesurvexfile, fininclude, textlines)
|
||||||
|
|
||||||
elif re.match("begin$(?i)", cmd):
|
elif re.match("begin$(?i)", cmd):
|
||||||
if line:
|
if line:
|
||||||
newsvxpath = os.path.join(os.path.split(survexfile.path)[0], re.sub(r"\.svx$", "", line))
|
newsvxpath = os.path.join(os.path.split(survexfile.path)[0], re.sub(r"\.svx$", "", line))
|
||||||
@@ -265,7 +265,7 @@ def RecursiveLoad(survexblock, survexfile, fin, textlines):
|
|||||||
RecursiveLoad(survexblockdown, survexfile, fin, textlinesdown)
|
RecursiveLoad(survexblockdown, survexfile, fin, textlinesdown)
|
||||||
else:
|
else:
|
||||||
iblankbegins += 1
|
iblankbegins += 1
|
||||||
|
|
||||||
elif re.match("end$(?i)", cmd):
|
elif re.match("end$(?i)", cmd):
|
||||||
if iblankbegins:
|
if iblankbegins:
|
||||||
iblankbegins -= 1
|
iblankbegins -= 1
|
||||||
@@ -277,7 +277,7 @@ def RecursiveLoad(survexblock, survexfile, fin, textlines):
|
|||||||
timetaken = endstamp - stamp
|
timetaken = endstamp - stamp
|
||||||
# print(' - Time to process: ' + str(timetaken))
|
# print(' - Time to process: ' + str(timetaken))
|
||||||
return
|
return
|
||||||
|
|
||||||
elif re.match("date$(?i)", cmd):
|
elif re.match("date$(?i)", cmd):
|
||||||
if len(line) == 10:
|
if len(line) == 10:
|
||||||
#print(' - Date found: ' + line)
|
#print(' - Date found: ' + line)
|
||||||
@@ -288,7 +288,7 @@ def RecursiveLoad(survexblock, survexfile, fin, textlines):
|
|||||||
survexblock.expedition = expeditions[0]
|
survexblock.expedition = expeditions[0]
|
||||||
survexblock.expeditionday = survexblock.expedition.get_expedition_day(survexblock.date)
|
survexblock.expeditionday = survexblock.expedition.get_expedition_day(survexblock.date)
|
||||||
survexblock.save()
|
survexblock.save()
|
||||||
|
|
||||||
elif re.match("team$(?i)", cmd):
|
elif re.match("team$(?i)", cmd):
|
||||||
pass
|
pass
|
||||||
# print(' - Team found: ')
|
# print(' - Team found: ')
|
||||||
@@ -304,13 +304,13 @@ def RecursiveLoad(survexblock, survexfile, fin, textlines):
|
|||||||
if personexpedition:
|
if personexpedition:
|
||||||
personrole.person=personexpedition.person
|
personrole.person=personexpedition.person
|
||||||
personrole.save()
|
personrole.save()
|
||||||
|
|
||||||
elif cmd == "title":
|
elif cmd == "title":
|
||||||
#print(' - Title found: ')
|
#print(' - Title found: ')
|
||||||
survextitle = models.SurvexTitle(survexblock=survexblock, title=line.strip('"'), cave=survexfile.cave)
|
survextitle = models.SurvexTitle(survexblock=survexblock, title=line.strip('"'), cave=survexfile.cave)
|
||||||
survextitle.save()
|
survextitle.save()
|
||||||
pass
|
pass
|
||||||
|
|
||||||
elif cmd == "require":
|
elif cmd == "require":
|
||||||
# should we check survex version available for processing?
|
# should we check survex version available for processing?
|
||||||
pass
|
pass
|
||||||
@@ -329,7 +329,7 @@ def RecursiveLoad(survexblock, survexfile, fin, textlines):
|
|||||||
stardata = stardatadefault
|
stardata = stardatadefault
|
||||||
else:
|
else:
|
||||||
assert ls[0] == "passage", line
|
assert ls[0] == "passage", line
|
||||||
|
|
||||||
elif cmd == "equate":
|
elif cmd == "equate":
|
||||||
#print(' - Equate found: ')
|
#print(' - Equate found: ')
|
||||||
LoadSurvexEquate(survexblock, line)
|
LoadSurvexEquate(survexblock, line)
|
||||||
|
|||||||
@@ -24,7 +24,7 @@ def readSurveysFromCSV():
|
|||||||
try: # could probably combine these two
|
try: # could probably combine these two
|
||||||
surveytab = open(os.path.join(settings.SURVEY_SCANS, "Surveys.csv"))
|
surveytab = open(os.path.join(settings.SURVEY_SCANS, "Surveys.csv"))
|
||||||
except IOError:
|
except IOError:
|
||||||
import cStringIO, urllib
|
import cStringIO, urllib
|
||||||
surveytab = cStringIO.StringIO(urllib.urlopen(settings.SURVEY_SCANS + "/Surveys.csv").read())
|
surveytab = cStringIO.StringIO(urllib.urlopen(settings.SURVEY_SCANS + "/Surveys.csv").read())
|
||||||
dialect=csv.Sniffer().sniff(surveytab.read())
|
dialect=csv.Sniffer().sniff(surveytab.read())
|
||||||
surveytab.seek(0,0)
|
surveytab.seek(0,0)
|
||||||
@@ -37,7 +37,7 @@ def readSurveysFromCSV():
|
|||||||
print("There are no expeditions in the database. Please run the logbook parser.")
|
print("There are no expeditions in the database. Please run the logbook parser.")
|
||||||
sys.exit()
|
sys.exit()
|
||||||
|
|
||||||
|
|
||||||
logging.info("Deleting all scanned images")
|
logging.info("Deleting all scanned images")
|
||||||
ScannedImage.objects.all().delete()
|
ScannedImage.objects.all().delete()
|
||||||
|
|
||||||
@@ -48,7 +48,7 @@ def readSurveysFromCSV():
|
|||||||
|
|
||||||
for survey in surveyreader:
|
for survey in surveyreader:
|
||||||
# I hate this, but some surveys have a letter eg 2000#34a. The next line deals with that.
|
# I hate this, but some surveys have a letter eg 2000#34a. The next line deals with that.
|
||||||
walletNumberLetter = re.match(r'(?P<number>\d*)(?P<letter>[a-zA-Z]*)',survey[header['Survey Number']])
|
walletNumberLetter = re.match(r'(?P<number>\d*)(?P<letter>[a-zA-Z]*)',survey[header['Survey Number']])
|
||||||
# print(walletNumberLetter.groups())
|
# print(walletNumberLetter.groups())
|
||||||
year=survey[header['Year']]
|
year=survey[header['Year']]
|
||||||
|
|
||||||
@@ -139,7 +139,7 @@ def parseSurveys(logfile=None):
|
|||||||
except (IOError, OSError):
|
except (IOError, OSError):
|
||||||
print("Survey CSV not found..")
|
print("Survey CSV not found..")
|
||||||
pass
|
pass
|
||||||
|
|
||||||
for expedition in Expedition.objects.filter(year__gte=2000): #expos since 2000, because paths and filenames were nonstandard before then
|
for expedition in Expedition.objects.filter(year__gte=2000): #expos since 2000, because paths and filenames were nonstandard before then
|
||||||
parseSurveyScans(expedition)
|
parseSurveyScans(expedition)
|
||||||
|
|
||||||
@@ -169,21 +169,21 @@ def GetListDir(sdir):
|
|||||||
|
|
||||||
def LoadListScansFile(survexscansfolder):
|
def LoadListScansFile(survexscansfolder):
|
||||||
gld = [ ]
|
gld = [ ]
|
||||||
|
|
||||||
# flatten out any directories in these book files
|
# flatten out any directories in these book files
|
||||||
for (fyf, ffyf, fisdiryf) in GetListDir(survexscansfolder.fpath):
|
for (fyf, ffyf, fisdiryf) in GetListDir(survexscansfolder.fpath):
|
||||||
if fisdiryf:
|
if fisdiryf:
|
||||||
gld.extend(GetListDir(ffyf))
|
gld.extend(GetListDir(ffyf))
|
||||||
else:
|
else:
|
||||||
gld.append((fyf, ffyf, fisdiryf))
|
gld.append((fyf, ffyf, fisdiryf))
|
||||||
|
|
||||||
for (fyf, ffyf, fisdiryf) in gld:
|
for (fyf, ffyf, fisdiryf) in gld:
|
||||||
#assert not fisdiryf, ffyf
|
#assert not fisdiryf, ffyf
|
||||||
if re.search(r"\.(?:png|jpg|jpeg)(?i)$", fyf):
|
if re.search(r"\.(?:png|jpg|jpeg)(?i)$", fyf):
|
||||||
survexscansingle = SurvexScanSingle(ffile=ffyf, name=fyf, survexscansfolder=survexscansfolder)
|
survexscansingle = SurvexScanSingle(ffile=ffyf, name=fyf, survexscansfolder=survexscansfolder)
|
||||||
survexscansingle.save()
|
survexscansingle.save()
|
||||||
|
|
||||||
|
|
||||||
# this iterates through the scans directories (either here or on the remote server)
|
# this iterates through the scans directories (either here or on the remote server)
|
||||||
# and builds up the models we can access later
|
# and builds up the models we can access later
|
||||||
def LoadListScans():
|
def LoadListScans():
|
||||||
@@ -194,17 +194,17 @@ def LoadListScans():
|
|||||||
SurvexScansFolder.objects.all().delete()
|
SurvexScansFolder.objects.all().delete()
|
||||||
|
|
||||||
# first do the smkhs (large kh survey scans) directory
|
# first do the smkhs (large kh survey scans) directory
|
||||||
survexscansfoldersmkhs = SurvexScansFolder(fpath=os.path.join(settings.SURVEY_SCANS, "smkhs"), walletname="smkhs")
|
survexscansfoldersmkhs = SurvexScansFolder(fpath=os.path.join(settings.SURVEY_SCANS, "smkhs"), walletname="smkhs")
|
||||||
if os.path.isdir(survexscansfoldersmkhs.fpath):
|
if os.path.isdir(survexscansfoldersmkhs.fpath):
|
||||||
survexscansfoldersmkhs.save()
|
survexscansfoldersmkhs.save()
|
||||||
LoadListScansFile(survexscansfoldersmkhs)
|
LoadListScansFile(survexscansfoldersmkhs)
|
||||||
|
|
||||||
|
|
||||||
# iterate into the surveyscans directory
|
# iterate into the surveyscans directory
|
||||||
for f, ff, fisdir in GetListDir(os.path.join(settings.SURVEY_SCANS, "surveyscans")):
|
for f, ff, fisdir in GetListDir(os.path.join(settings.SURVEY_SCANS, "surveyscans")):
|
||||||
if not fisdir:
|
if not fisdir:
|
||||||
continue
|
continue
|
||||||
|
|
||||||
# do the year folders
|
# do the year folders
|
||||||
if re.match(r"\d\d\d\d$", f):
|
if re.match(r"\d\d\d\d$", f):
|
||||||
for fy, ffy, fisdiry in GetListDir(ff):
|
for fy, ffy, fisdiry in GetListDir(ff):
|
||||||
@@ -213,13 +213,13 @@ def LoadListScans():
|
|||||||
survexscansfolder = SurvexScansFolder(fpath=ffy, walletname=fy)
|
survexscansfolder = SurvexScansFolder(fpath=ffy, walletname=fy)
|
||||||
survexscansfolder.save()
|
survexscansfolder.save()
|
||||||
LoadListScansFile(survexscansfolder)
|
LoadListScansFile(survexscansfolder)
|
||||||
|
|
||||||
# do the
|
# do the
|
||||||
elif f != "thumbs":
|
elif f != "thumbs":
|
||||||
survexscansfolder = SurvexScansFolder(fpath=ff, walletname=f)
|
survexscansfolder = SurvexScansFolder(fpath=ff, walletname=f)
|
||||||
survexscansfolder.save()
|
survexscansfolder.save()
|
||||||
LoadListScansFile(survexscansfolder)
|
LoadListScansFile(survexscansfolder)
|
||||||
|
|
||||||
|
|
||||||
def FindTunnelScan(tunnelfile, path):
|
def FindTunnelScan(tunnelfile, path):
|
||||||
scansfolder, scansfile = None, None
|
scansfolder, scansfile = None, None
|
||||||
@@ -235,12 +235,12 @@ def FindTunnelScan(tunnelfile, path):
|
|||||||
print(scansfilel, len(scansfilel))
|
print(scansfilel, len(scansfilel))
|
||||||
assert len(scansfilel) == 1
|
assert len(scansfilel) == 1
|
||||||
scansfile = scansfilel[0]
|
scansfile = scansfilel[0]
|
||||||
|
|
||||||
if scansfolder:
|
if scansfolder:
|
||||||
tunnelfile.survexscansfolders.add(scansfolder)
|
tunnelfile.survexscansfolders.add(scansfolder)
|
||||||
if scansfile:
|
if scansfile:
|
||||||
tunnelfile.survexscans.add(scansfile)
|
tunnelfile.survexscans.add(scansfile)
|
||||||
|
|
||||||
elif path and not re.search(r"\.(?:png|jpg|jpeg)$(?i)", path):
|
elif path and not re.search(r"\.(?:png|jpg|jpeg)$(?i)", path):
|
||||||
name = os.path.split(path)[1]
|
name = os.path.split(path)[1]
|
||||||
print("ttt", tunnelfile.tunnelpath, path, name)
|
print("ttt", tunnelfile.tunnelpath, path, name)
|
||||||
@@ -260,22 +260,22 @@ def SetTunnelfileInfo(tunnelfile):
|
|||||||
fin = open(ff)
|
fin = open(ff)
|
||||||
ttext = fin.read()
|
ttext = fin.read()
|
||||||
fin.close()
|
fin.close()
|
||||||
|
|
||||||
mtype = re.search("<(fontcolours|sketch)", ttext)
|
mtype = re.search("<(fontcolours|sketch)", ttext)
|
||||||
#assert mtype, ff
|
#assert mtype, ff
|
||||||
if mtype:
|
if mtype:
|
||||||
tunnelfile.bfontcolours = (mtype.group(1)=="fontcolours")
|
tunnelfile.bfontcolours = (mtype.group(1)=="fontcolours")
|
||||||
tunnelfile.npaths = len(re.findall("<skpath", ttext))
|
tunnelfile.npaths = len(re.findall("<skpath", ttext))
|
||||||
tunnelfile.save()
|
tunnelfile.save()
|
||||||
|
|
||||||
# <tunnelxml tunnelversion="version2009-06-21 Matienzo" tunnelproject="ireby" tunneluser="goatchurch" tunneldate="2009-06-29 23:22:17">
|
# <tunnelxml tunnelversion="version2009-06-21 Matienzo" tunnelproject="ireby" tunneluser="goatchurch" tunneldate="2009-06-29 23:22:17">
|
||||||
# <pcarea area_signal="frame" sfscaledown="12.282584" sfrotatedeg="-90.76982" sfxtrans="11.676667377221136" sfytrans="-15.677173422877454" sfsketch="204description/scans/plan(38).png" sfstyle="" nodeconnzsetrelative="0.0">
|
# <pcarea area_signal="frame" sfscaledown="12.282584" sfrotatedeg="-90.76982" sfxtrans="11.676667377221136" sfytrans="-15.677173422877454" sfsketch="204description/scans/plan(38).png" sfstyle="" nodeconnzsetrelative="0.0">
|
||||||
for path, style in re.findall('<pcarea area_signal="frame".*?sfsketch="([^"]*)" sfstyle="([^"]*)"', ttext):
|
for path, style in re.findall('<pcarea area_signal="frame".*?sfsketch="([^"]*)" sfstyle="([^"]*)"', ttext):
|
||||||
FindTunnelScan(tunnelfile, path)
|
FindTunnelScan(tunnelfile, path)
|
||||||
|
|
||||||
# should also scan and look for survex blocks that might have been included
|
# should also scan and look for survex blocks that might have been included
|
||||||
# and also survex titles as well.
|
# and also survex titles as well.
|
||||||
|
|
||||||
tunnelfile.save()
|
tunnelfile.save()
|
||||||
|
|
||||||
|
|
||||||
@@ -295,6 +295,6 @@ def LoadTunnelFiles():
|
|||||||
elif f[-4:] == ".xml":
|
elif f[-4:] == ".xml":
|
||||||
tunnelfile = TunnelFile(tunnelpath=lf, tunnelname=os.path.split(f[:-4])[1])
|
tunnelfile = TunnelFile(tunnelpath=lf, tunnelname=os.path.split(f[:-4])[1])
|
||||||
tunnelfile.save()
|
tunnelfile.save()
|
||||||
|
|
||||||
for tunnelfile in TunnelFile.objects.all():
|
for tunnelfile in TunnelFile.objects.all():
|
||||||
SetTunnelfileInfo(tunnelfile)
|
SetTunnelfileInfo(tunnelfile)
|
||||||
|
|||||||
46
settings.py
46
settings.py
@@ -65,30 +65,30 @@ LOGBOOK_PARSER_SETTINGS = {
|
|||||||
"2017": ("2017/logbook.html", "Parseloghtmltxt"),
|
"2017": ("2017/logbook.html", "Parseloghtmltxt"),
|
||||||
"2016": ("2016/logbook.html", "Parseloghtmltxt"),
|
"2016": ("2016/logbook.html", "Parseloghtmltxt"),
|
||||||
"2015": ("2015/logbook.html", "Parseloghtmltxt"),
|
"2015": ("2015/logbook.html", "Parseloghtmltxt"),
|
||||||
"2014": ("2014/logbook.html", "Parseloghtmltxt"),
|
"2014": ("2014/logbook.html", "Parseloghtmltxt"),
|
||||||
"2013": ("2013/logbook.html", "Parseloghtmltxt"),
|
"2013": ("2013/logbook.html", "Parseloghtmltxt"),
|
||||||
"2012": ("2012/logbook.html", "Parseloghtmltxt"),
|
"2012": ("2012/logbook.html", "Parseloghtmltxt"),
|
||||||
"2011": ("2011/logbook.html", "Parseloghtmltxt"),
|
"2011": ("2011/logbook.html", "Parseloghtmltxt"),
|
||||||
"2010": ("2010/logbook.html", "Parselogwikitxt"),
|
"2010": ("2010/logbook.html", "Parselogwikitxt"),
|
||||||
"2009": ("2009/2009logbook.txt", "Parselogwikitxt"),
|
"2009": ("2009/2009logbook.txt", "Parselogwikitxt"),
|
||||||
"2008": ("2008/2008logbook.txt", "Parselogwikitxt"),
|
"2008": ("2008/2008logbook.txt", "Parselogwikitxt"),
|
||||||
"2007": ("2007/logbook.html", "Parseloghtmltxt"),
|
"2007": ("2007/logbook.html", "Parseloghtmltxt"),
|
||||||
"2006": ("2006/logbook/logbook_06.txt", "Parselogwikitxt"),
|
"2006": ("2006/logbook/logbook_06.txt", "Parselogwikitxt"),
|
||||||
"2005": ("2005/logbook.html", "Parseloghtmltxt"),
|
"2005": ("2005/logbook.html", "Parseloghtmltxt"),
|
||||||
"2004": ("2004/logbook.html", "Parseloghtmltxt"),
|
"2004": ("2004/logbook.html", "Parseloghtmltxt"),
|
||||||
"2003": ("2003/logbook.html", "Parseloghtml03"),
|
"2003": ("2003/logbook.html", "Parseloghtml03"),
|
||||||
"2002": ("2002/logbook.html", "Parseloghtmltxt"),
|
"2002": ("2002/logbook.html", "Parseloghtmltxt"),
|
||||||
"2001": ("2001/log.htm", "Parseloghtml01"),
|
"2001": ("2001/log.htm", "Parseloghtml01"),
|
||||||
"2000": ("2000/log.htm", "Parseloghtml01"),
|
"2000": ("2000/log.htm", "Parseloghtml01"),
|
||||||
"1999": ("1999/log.htm", "Parseloghtml01"),
|
"1999": ("1999/log.htm", "Parseloghtml01"),
|
||||||
"1998": ("1998/log.htm", "Parseloghtml01"),
|
"1998": ("1998/log.htm", "Parseloghtml01"),
|
||||||
"1997": ("1997/log.htm", "Parseloghtml01"),
|
"1997": ("1997/log.htm", "Parseloghtml01"),
|
||||||
"1996": ("1996/log.htm", "Parseloghtml01"),
|
"1996": ("1996/log.htm", "Parseloghtml01"),
|
||||||
"1995": ("1995/log.htm", "Parseloghtml01"),
|
"1995": ("1995/log.htm", "Parseloghtml01"),
|
||||||
"1994": ("1994/log.htm", "Parseloghtml01"),
|
"1994": ("1994/log.htm", "Parseloghtml01"),
|
||||||
"1993": ("1993/log.htm", "Parseloghtml01"),
|
"1993": ("1993/log.htm", "Parseloghtml01"),
|
||||||
"1992": ("1992/log.htm", "Parseloghtml01"),
|
"1992": ("1992/log.htm", "Parseloghtml01"),
|
||||||
"1991": ("1991/log.htm", "Parseloghtml01"),
|
"1991": ("1991/log.htm", "Parseloghtml01"),
|
||||||
}
|
}
|
||||||
|
|
||||||
APPEND_SLASH = False
|
APPEND_SLASH = False
|
||||||
|
|||||||
@@ -18,8 +18,8 @@
|
|||||||
{% if pic.is_mugshot %}
|
{% if pic.is_mugshot %}
|
||||||
<div class="figure">
|
<div class="figure">
|
||||||
<p> <img src="{{ pic.thumbnail_image.url }}" class="thumbnail" />
|
<p> <img src="{{ pic.thumbnail_image.url }}" class="thumbnail" />
|
||||||
<p> {{ pic.caption }}</p>
|
<p> {{ pic.caption }} </p>
|
||||||
<p> <a href="{{ pic.get_admin_url }}">edit {{pic}}</a> </>
|
<p> <a href="{{ pic.get_admin_url }}">edit {{pic}}</a>
|
||||||
</p>
|
</p>
|
||||||
</p>
|
</p>
|
||||||
</div>
|
</div>
|
||||||
@@ -32,7 +32,7 @@
|
|||||||
<ul>
|
<ul>
|
||||||
{% for personexpedition in person.personexpedition_set.all %}
|
{% for personexpedition in person.personexpedition_set.all %}
|
||||||
<li> <a href="{{ personexpedition.get_absolute_url }}">{{personexpedition.expedition.year}}</a>
|
<li> <a href="{{ personexpedition.get_absolute_url }}">{{personexpedition.expedition.year}}</a>
|
||||||
<span style="padding-left:{{personexpedition.persontrip_set.all|length}}0px; background-color:red"></span>
|
<span style="padding-left:{{ personexpedition.persontrip_set.all|length }}0px; background-color:red"></span>
|
||||||
{{personexpedition.persontrip_set.all|length}} trips
|
{{personexpedition.persontrip_set.all|length}} trips
|
||||||
</li>
|
</li>
|
||||||
{% endfor %}
|
{% endfor %}
|
||||||
|
|||||||
20
utils.py
20
utils.py
@@ -23,12 +23,12 @@ def randomLogbookSentence():
|
|||||||
#Choose again if there are no sentances (this happens if it is a placeholder entry)
|
#Choose again if there are no sentances (this happens if it is a placeholder entry)
|
||||||
while len(re.findall('[A-Z].*?\.',randSent['entry'].text))==0:
|
while len(re.findall('[A-Z].*?\.',randSent['entry'].text))==0:
|
||||||
randSent['entry']=LogbookEntry.objects.order_by('?')[0]
|
randSent['entry']=LogbookEntry.objects.order_by('?')[0]
|
||||||
|
|
||||||
#Choose a random sentence from that entry. Store the sentence as randSent['sentence'], and the number of that sentence in the entry as randSent['number']
|
#Choose a random sentence from that entry. Store the sentence as randSent['sentence'], and the number of that sentence in the entry as randSent['number']
|
||||||
sentenceList=re.findall('[A-Z].*?\.',randSent['entry'].text)
|
sentenceList=re.findall('[A-Z].*?\.',randSent['entry'].text)
|
||||||
randSent['number']=random.randrange(0,len(sentenceList))
|
randSent['number']=random.randrange(0,len(sentenceList))
|
||||||
randSent['sentence']=sentenceList[randSent['number']]
|
randSent['sentence']=sentenceList[randSent['number']]
|
||||||
|
|
||||||
return randSent
|
return randSent
|
||||||
|
|
||||||
|
|
||||||
@@ -37,10 +37,10 @@ def save_carefully(objectType, lookupAttribs={}, nonLookupAttribs={}):
|
|||||||
-if instance does not exist in DB: add instance to DB, return (new instance, True)
|
-if instance does not exist in DB: add instance to DB, return (new instance, True)
|
||||||
-if instance exists in DB and was modified using Troggle: do nothing, return (existing instance, False)
|
-if instance exists in DB and was modified using Troggle: do nothing, return (existing instance, False)
|
||||||
-if instance exists in DB and was not modified using Troggle: overwrite instance, return (instance, False)
|
-if instance exists in DB and was not modified using Troggle: overwrite instance, return (instance, False)
|
||||||
|
|
||||||
The checking is accomplished using Django's get_or_create and the new_since_parsing boolean field
|
The checking is accomplished using Django's get_or_create and the new_since_parsing boolean field
|
||||||
defined in core.models.TroggleModel.
|
defined in core.models.TroggleModel.
|
||||||
|
|
||||||
"""
|
"""
|
||||||
|
|
||||||
instance, created=objectType.objects.get_or_create(defaults=nonLookupAttribs, **lookupAttribs)
|
instance, created=objectType.objects.get_or_create(defaults=nonLookupAttribs, **lookupAttribs)
|
||||||
@@ -49,17 +49,17 @@ def save_carefully(objectType, lookupAttribs={}, nonLookupAttribs={}):
|
|||||||
for k, v in list(nonLookupAttribs.items()): #overwrite the existing attributes from the logbook text (except date and title)
|
for k, v in list(nonLookupAttribs.items()): #overwrite the existing attributes from the logbook text (except date and title)
|
||||||
setattr(instance, k, v)
|
setattr(instance, k, v)
|
||||||
instance.save()
|
instance.save()
|
||||||
|
|
||||||
if created:
|
if created:
|
||||||
logging.info(str(instance) + ' was just added to the database for the first time. \n')
|
logging.info(str(instance) + ' was just added to the database for the first time. \n')
|
||||||
|
|
||||||
if not created and instance.new_since_parsing:
|
if not created and instance.new_since_parsing:
|
||||||
logging.info(str(instance) + " has been modified using Troggle, so the current script left it as is. \n")
|
logging.info(str(instance) + " has been modified using Troggle, so the current script left it as is. \n")
|
||||||
|
|
||||||
if not created and not instance.new_since_parsing:
|
if not created and not instance.new_since_parsing:
|
||||||
logging.info(str(instance) + " existed in the database unchanged since last parse. It was overwritten by the current script. \n")
|
logging.info(str(instance) + " existed in the database unchanged since last parse. It was overwritten by the current script. \n")
|
||||||
return (instance, created)
|
return (instance, created)
|
||||||
|
|
||||||
re_body = re.compile(r"\<body[^>]*\>(.*)\</body\>", re.DOTALL)
|
re_body = re.compile(r"\<body[^>]*\>(.*)\</body\>", re.DOTALL)
|
||||||
re_title = re.compile(r"\<title[^>]*\>(.*)\</title\>", re.DOTALL)
|
re_title = re.compile(r"\<title[^>]*\>(.*)\</title\>", re.DOTALL)
|
||||||
|
|
||||||
@@ -80,7 +80,7 @@ def get_single_match(regex, text):
|
|||||||
def href_to_wikilinks(matchobj):
|
def href_to_wikilinks(matchobj):
|
||||||
"""
|
"""
|
||||||
Given an html link, checks for possible valid wikilinks.
|
Given an html link, checks for possible valid wikilinks.
|
||||||
|
|
||||||
Returns the first valid wikilink. Valid means the target
|
Returns the first valid wikilink. Valid means the target
|
||||||
object actually exists.
|
object actually exists.
|
||||||
"""
|
"""
|
||||||
@@ -91,7 +91,7 @@ def href_to_wikilinks(matchobj):
|
|||||||
return matchobj.group()
|
return matchobj.group()
|
||||||
#except:
|
#except:
|
||||||
#print 'fail'
|
#print 'fail'
|
||||||
|
|
||||||
|
|
||||||
re_subs = [(re.compile(r"\<b[^>]*\>(.*?)\</b\>", re.DOTALL), r"'''\1'''"),
|
re_subs = [(re.compile(r"\<b[^>]*\>(.*?)\</b\>", re.DOTALL), r"'''\1'''"),
|
||||||
(re.compile(r"\<i\>(.*?)\</i\>", re.DOTALL), r"''\1''"),
|
(re.compile(r"\<i\>(.*?)\</i\>", re.DOTALL), r"''\1''"),
|
||||||
@@ -107,7 +107,7 @@ re_subs = [(re.compile(r"\<b[^>]*\>(.*?)\</b\>", re.DOTALL), r"'''\1'''"),
|
|||||||
(re.compile(r"\<a\s+href=['\"]#([^'\"]*)['\"]\s*\>(.*?)\</a\>", re.DOTALL), r"[[cavedescription:\1|\2]]"), #assumes that all links with target ids are cave descriptions. Not great.
|
(re.compile(r"\<a\s+href=['\"]#([^'\"]*)['\"]\s*\>(.*?)\</a\>", re.DOTALL), r"[[cavedescription:\1|\2]]"), #assumes that all links with target ids are cave descriptions. Not great.
|
||||||
(re.compile(r"\[\<a\s+href=['\"][^'\"]*['\"]\s+id=['\"][^'\"]*['\"]\s*\>([^\s]*).*?\</a\>\]", re.DOTALL), r"[[qm:\1]]"),
|
(re.compile(r"\[\<a\s+href=['\"][^'\"]*['\"]\s+id=['\"][^'\"]*['\"]\s*\>([^\s]*).*?\</a\>\]", re.DOTALL), r"[[qm:\1]]"),
|
||||||
(re.compile(r'<a\shref="?(?P<target>.*)"?>(?P<text>.*)</a>'),href_to_wikilinks),
|
(re.compile(r'<a\shref="?(?P<target>.*)"?>(?P<text>.*)</a>'),href_to_wikilinks),
|
||||||
|
|
||||||
]
|
]
|
||||||
|
|
||||||
def html_to_wiki(text, codec = "utf-8"):
|
def html_to_wiki(text, codec = "utf-8"):
|
||||||
|
|||||||
Reference in New Issue
Block a user