mirror of
https://expo.survex.com/repositories/troggle/.git
synced 2024-11-22 07:11:52 +00:00
[svn] * Make descriptions parser also replace links to descriptions from Cave models' underground_descriptions with wikilinks for valid (existing) links
* Make entrances searchable in admin by cave kataster number
This commit is contained in:
parent
05d262e42b
commit
b9bbccfe00
@ -100,6 +100,9 @@ class CaveAdmin(TroggleModelAdmin):
|
|||||||
inlines = (OtherCaveInline,)
|
inlines = (OtherCaveInline,)
|
||||||
extra = 4
|
extra = 4
|
||||||
|
|
||||||
|
class EntranceAdmin(TroggleModelAdmin):
|
||||||
|
search_fields = ('caveandentrance__cave__kataster_number',)
|
||||||
|
|
||||||
admin.site.register(Photo)
|
admin.site.register(Photo)
|
||||||
admin.site.register(Cave, CaveAdmin)
|
admin.site.register(Cave, CaveAdmin)
|
||||||
admin.site.register(Area)
|
admin.site.register(Area)
|
||||||
@ -108,7 +111,7 @@ admin.site.register(CaveAndEntrance)
|
|||||||
admin.site.register(SurveyStation)
|
admin.site.register(SurveyStation)
|
||||||
admin.site.register(NewSubCave)
|
admin.site.register(NewSubCave)
|
||||||
admin.site.register(CaveDescription)
|
admin.site.register(CaveDescription)
|
||||||
admin.site.register(Entrance)
|
admin.site.register(Entrance, EntranceAdmin)
|
||||||
admin.site.register(SurvexBlock, SurvexBlockAdmin)
|
admin.site.register(SurvexBlock, SurvexBlockAdmin)
|
||||||
admin.site.register(Expedition)
|
admin.site.register(Expedition)
|
||||||
admin.site.register(Person,PersonAdmin)
|
admin.site.register(Person,PersonAdmin)
|
||||||
|
@ -145,8 +145,8 @@ def wiki_to_html_short(value, autoescape=None):
|
|||||||
#make subcave links
|
#make subcave links
|
||||||
value = re.sub("\[\[\s*subcave:(.+)\|(.+)\]\]",r'<a href="%s/subcave/\1/">\2</a>' % url_root, value, re.DOTALL)
|
value = re.sub("\[\[\s*subcave:(.+)\|(.+)\]\]",r'<a href="%s/subcave/\1/">\2</a>' % url_root, value, re.DOTALL)
|
||||||
#make cavedescription links
|
#make cavedescription links
|
||||||
value = re.sub("\[\[\s*cavedescription:(.+)\|(.+)\]\]",r'<a href="%s/cavedescription/\2/">\1</a>' % url_root, value, re.DOTALL)
|
value = re.sub("\[\[\s*cavedescription:(.+)\|(.+)\]\]",r'<a href="%s/cavedescription/\2/">\2</a>' % url_root, value, re.DOTALL)
|
||||||
|
|
||||||
|
|
||||||
#Make lists from lines starting with lists of [stars and hashes]
|
#Make lists from lines starting with lists of [stars and hashes]
|
||||||
outValue = ""
|
outValue = ""
|
||||||
|
@ -65,6 +65,7 @@ def import_descriptions():
|
|||||||
def parse_descriptions():
|
def parse_descriptions():
|
||||||
import parsers.descriptions
|
import parsers.descriptions
|
||||||
parsers.descriptions.parseDescriptions()
|
parsers.descriptions.parseDescriptions()
|
||||||
|
parsers.descriptions.parseDescriptionsOnCaveObjects()
|
||||||
|
|
||||||
def reset():
|
def reset():
|
||||||
""" Wipe the troggle database and import everything from legacy data
|
""" Wipe the troggle database and import everything from legacy data
|
||||||
|
@ -22,6 +22,7 @@ pages = [(["smkridge", "204", "ariston-rigging.html"], "ariston-rigging"),
|
|||||||
|
|
||||||
|
|
||||||
def getDescriptions():
|
def getDescriptions():
|
||||||
|
"""Creates objects in the database for each item in the list 'pages' . """
|
||||||
for filelocation, name in pages:
|
for filelocation, name in pages:
|
||||||
f = open(os.path.join(settings.EXPOWEB, *filelocation), "r")
|
f = open(os.path.join(settings.EXPOWEB, *filelocation), "r")
|
||||||
html = f.read()
|
html = f.read()
|
||||||
@ -32,7 +33,13 @@ def getDescriptions():
|
|||||||
cd.save()
|
cd.save()
|
||||||
|
|
||||||
def parseDescriptions():
|
def parseDescriptions():
|
||||||
|
"""Turns the HTML in each cave description into wikicode"""
|
||||||
for cd in models.CaveDescription.objects.all():
|
for cd in models.CaveDescription.objects.all():
|
||||||
cd.description = html_to_wiki(cd.description)
|
cd.description = html_to_wiki(cd.description)
|
||||||
|
|
||||||
cd.save()
|
cd.save()
|
||||||
|
|
||||||
|
def parseDescriptionsInCaveObjects():
|
||||||
|
for cave in models.Cave.objects.all():
|
||||||
|
cave.underground_description=html_to_wiki(unicode(cave.underground_description))
|
||||||
|
cave.save()
|
23
utils.py
23
utils.py
@ -1,6 +1,6 @@
|
|||||||
from django.conf import settings
|
from django.conf import settings
|
||||||
|
|
||||||
import random, re, logging
|
import random, re, logging
|
||||||
|
from core.models import CaveDescription
|
||||||
|
|
||||||
def weighted_choice(lst):
|
def weighted_choice(lst):
|
||||||
n = random.uniform(0,1)
|
n = random.uniform(0,1)
|
||||||
@ -76,6 +76,7 @@ def render_with_context(req, *args, **kwargs):
|
|||||||
|
|
||||||
re_body = re.compile(r"\<body[^>]*\>(.*)\</body\>", re.DOTALL)
|
re_body = re.compile(r"\<body[^>]*\>(.*)\</body\>", re.DOTALL)
|
||||||
re_title = re.compile(r"\<title[^>]*\>(.*)\</title\>", re.DOTALL)
|
re_title = re.compile(r"\<title[^>]*\>(.*)\</title\>", re.DOTALL)
|
||||||
|
|
||||||
def get_html_body(text):
|
def get_html_body(text):
|
||||||
return get_single_match(re_body, text)
|
return get_single_match(re_body, text)
|
||||||
|
|
||||||
@ -90,6 +91,21 @@ def get_single_match(regex, text):
|
|||||||
else:
|
else:
|
||||||
return None
|
return None
|
||||||
|
|
||||||
|
def href_to_wikilinks(matchobj):
|
||||||
|
"""
|
||||||
|
Given an html link, checks for possible valid wikilinks.
|
||||||
|
|
||||||
|
Returns the first valid wikilink. Valid means the target
|
||||||
|
object actually exists.
|
||||||
|
"""
|
||||||
|
res=CaveDescription.objects.filter(long_name__icontains=matchobj.groupdict()['text'])
|
||||||
|
if res:
|
||||||
|
return r'[[cavedescription:'+res[0].short_name+'|'+res[0].long_name+']]'
|
||||||
|
else:
|
||||||
|
return matchobj
|
||||||
|
#except:
|
||||||
|
#print 'fail'
|
||||||
|
|
||||||
|
|
||||||
re_subs = [(re.compile(r"\<b[^>]*\>(.*?)\</b\>", re.DOTALL), r"'''\1'''"),
|
re_subs = [(re.compile(r"\<b[^>]*\>(.*?)\</b\>", re.DOTALL), r"'''\1'''"),
|
||||||
(re.compile(r"\<i\>(.*?)\</i\>", re.DOTALL), r"''\1''"),
|
(re.compile(r"\<i\>(.*?)\</i\>", re.DOTALL), r"''\1''"),
|
||||||
@ -99,10 +115,11 @@ re_subs = [(re.compile(r"\<b[^>]*\>(.*?)\</b\>", re.DOTALL), r"'''\1'''"),
|
|||||||
(re.compile(r"\<h4[^>]*\>(.*?)\</h4\>", re.DOTALL), r"====\1===="),
|
(re.compile(r"\<h4[^>]*\>(.*?)\</h4\>", re.DOTALL), r"====\1===="),
|
||||||
(re.compile(r"\<h5[^>]*\>(.*?)\</h5\>", re.DOTALL), r"=====\1====="),
|
(re.compile(r"\<h5[^>]*\>(.*?)\</h5\>", re.DOTALL), r"=====\1====="),
|
||||||
(re.compile(r"\<h6[^>]*\>(.*?)\</h6\>", re.DOTALL), r"======\1======"),
|
(re.compile(r"\<h6[^>]*\>(.*?)\</h6\>", re.DOTALL), r"======\1======"),
|
||||||
(re.compile(r"\<a\s+id=['\"]([^'\"]*)['\"]\s*\>(.*?)\</a\>", re.DOTALL), r"[[subcave:\1|\2]]"),
|
(re.compile(r"\<a\s+id=['\"]([^'\"]*)['\"]\s*\>(.*?)\</a\>", re.DOTALL), r"[[subcave:\1|\2]]"), #assumes that all links with id attributes are subcaves. Not great.
|
||||||
#interpage link needed
|
#interpage link needed
|
||||||
(re.compile(r"\<a\s+href=['\"]#([^'\"]*)['\"]\s*\>(.*?)\</a\>", re.DOTALL), r"[[cavedescription:\1|\2]]"),
|
(re.compile(r"\<a\s+href=['\"]#([^'\"]*)['\"]\s*\>(.*?)\</a\>", re.DOTALL), r"[[cavedescription:\1|\2]]"), #assumes that all links with target ids are subcaves. Not great.
|
||||||
(re.compile(r"\[\<a\s+href=['\"][^'\"]*['\"]\s+id=['\"][^'\"]*['\"]\s*\>([^\s]*).*?\</a\>\]", re.DOTALL), r"[[qm:\1]]"),
|
(re.compile(r"\[\<a\s+href=['\"][^'\"]*['\"]\s+id=['\"][^'\"]*['\"]\s*\>([^\s]*).*?\</a\>\]", re.DOTALL), r"[[qm:\1]]"),
|
||||||
|
(re.compile(r'<a\shref="?(?P<target>.*)"?>(?P<text>.*)</a>'),href_to_wikilinks)
|
||||||
|
|
||||||
]
|
]
|
||||||
|
|
||||||
|
Loading…
Reference in New Issue
Block a user