From 5b3a951be3cb06fcf44b5643476cb34d8cf773bd Mon Sep 17 00:00:00 2001
From: substantialnoninfringinguser <substantialnoninfringinguser@gmail.com>
Date: Thu, 16 Jul 2009 05:37:33 +0100
Subject: [PATCH] [svn] * Make descriptions parser also replace links to
 descriptions from Cave models' underground_descriptions with wikilinks for
 valid (existing) links * Make entrances searchable in admin by cave kataster
 number

---
 core/admin.py                    |  5 ++++-
 core/templatetags/wiki_markup.py |  4 ++--
 databaseReset.py                 |  1 +
 parsers/descriptions.py          |  9 ++++++++-
 utils.py                         | 23 ++++++++++++++++++++---
 5 files changed, 35 insertions(+), 7 deletions(-)

diff --git a/core/admin.py b/core/admin.py
index 438281e..fb3173a 100644
--- a/core/admin.py
+++ b/core/admin.py
@@ -100,6 +100,9 @@ class CaveAdmin(TroggleModelAdmin):
     inlines = (OtherCaveInline,)
     extra = 4
 
+class EntranceAdmin(TroggleModelAdmin):
+    search_fields = ('caveandentrance__cave__kataster_number',)
+
 admin.site.register(Photo)
 admin.site.register(Cave, CaveAdmin)
 admin.site.register(Area)
@@ -108,7 +111,7 @@ admin.site.register(CaveAndEntrance)
 admin.site.register(SurveyStation)
 admin.site.register(NewSubCave)
 admin.site.register(CaveDescription)
-admin.site.register(Entrance)
+admin.site.register(Entrance, EntranceAdmin)
 admin.site.register(SurvexBlock, SurvexBlockAdmin)
 admin.site.register(Expedition)
 admin.site.register(Person,PersonAdmin)
diff --git a/core/templatetags/wiki_markup.py b/core/templatetags/wiki_markup.py
index cf11358..31c5671 100644
--- a/core/templatetags/wiki_markup.py
+++ b/core/templatetags/wiki_markup.py
@@ -145,8 +145,8 @@ def wiki_to_html_short(value, autoescape=None):
     #make subcave links
     value = re.sub("\[\[\s*subcave:(.+)\|(.+)\]\]",r'<a href="%s/subcave/\1/">\2</a>' % url_root, value, re.DOTALL)
     #make cavedescription links
-    value = re.sub("\[\[\s*cavedescription:(.+)\|(.+)\]\]",r'<a href="%s/cavedescription/\2/">\1</a>' % url_root, value, re.DOTALL)
-    
+    value = re.sub("\[\[\s*cavedescription:(.+)\|(.+)\]\]",r'<a href="%s/cavedescription/\2/">\2</a>' % url_root, value, re.DOTALL)
+
 
     #Make lists from lines starting with lists of [stars and hashes]
     outValue = ""
diff --git a/databaseReset.py b/databaseReset.py
index e6bd13c..b555978 100644
--- a/databaseReset.py
+++ b/databaseReset.py
@@ -65,6 +65,7 @@ def import_descriptions():
 def parse_descriptions():
     import parsers.descriptions
     parsers.descriptions.parseDescriptions()
+    parsers.descriptions.parseDescriptionsOnCaveObjects()
 
 def reset():
     """ Wipe the troggle database and import everything from legacy data
diff --git a/parsers/descriptions.py b/parsers/descriptions.py
index fe325e7..108a892 100644
--- a/parsers/descriptions.py
+++ b/parsers/descriptions.py
@@ -22,6 +22,7 @@ pages = [(["smkridge", "204", "ariston-rigging.html"], "ariston-rigging"),
 
 
 def getDescriptions():
+    """Creates objects in the database for each item in the list 'pages' . """
     for filelocation, name in pages:
         f = open(os.path.join(settings.EXPOWEB, *filelocation), "r")
         html = f.read()
@@ -32,7 +33,13 @@ def getDescriptions():
         cd.save()
 
 def parseDescriptions():
+    """Turns the HTML in each cave description into wikicode"""
     for cd in models.CaveDescription.objects.all():
         cd.description = html_to_wiki(cd.description)
 
-        cd.save()
\ No newline at end of file
+        cd.save()
+
+def parseDescriptionsInCaveObjects():
+    for cave in models.Cave.objects.all():
+        cave.underground_description=html_to_wiki(unicode(cave.underground_description))
+        cave.save()
\ No newline at end of file
diff --git a/utils.py b/utils.py
index 1b16059..1638cd2 100644
--- a/utils.py
+++ b/utils.py
@@ -1,6 +1,6 @@
 from django.conf import settings
-    
 import random, re, logging
+from core.models import CaveDescription
 
 def weighted_choice(lst):
 	n = random.uniform(0,1)
@@ -76,6 +76,7 @@ def render_with_context(req, *args, **kwargs):
     
 re_body = re.compile(r"\<body[^>]*\>(.*)\</body\>", re.DOTALL)
 re_title = re.compile(r"\<title[^>]*\>(.*)\</title\>", re.DOTALL)
+
 def get_html_body(text):
     return get_single_match(re_body, text)
 
@@ -90,6 +91,21 @@ def get_single_match(regex, text):
     else:
         return None
 
+def href_to_wikilinks(matchobj):
+    """
+    Given an html link, checks for possible valid wikilinks.
+    
+    Returns the first valid wikilink. Valid means the target
+    object actually exists.
+    """
+    res=CaveDescription.objects.filter(long_name__icontains=matchobj.groupdict()['text'])
+    if res:
+        return r'[[cavedescription:'+res[0].short_name+'|'+res[0].long_name+']]'
+    else:
+        return matchobj
+    #except:
+        #print 'fail'
+    
 
 re_subs = [(re.compile(r"\<b[^>]*\>(.*?)\</b\>", re.DOTALL), r"'''\1'''"),
            (re.compile(r"\<i\>(.*?)\</i\>", re.DOTALL), r"''\1''"),
@@ -99,10 +115,11 @@ re_subs = [(re.compile(r"\<b[^>]*\>(.*?)\</b\>", re.DOTALL), r"'''\1'''"),
            (re.compile(r"\<h4[^>]*\>(.*?)\</h4\>", re.DOTALL), r"====\1===="),
            (re.compile(r"\<h5[^>]*\>(.*?)\</h5\>", re.DOTALL), r"=====\1====="),
            (re.compile(r"\<h6[^>]*\>(.*?)\</h6\>", re.DOTALL), r"======\1======"),
-           (re.compile(r"\<a\s+id=['\"]([^'\"]*)['\"]\s*\>(.*?)\</a\>", re.DOTALL), r"[[subcave:\1|\2]]"),
+           (re.compile(r"\<a\s+id=['\"]([^'\"]*)['\"]\s*\>(.*?)\</a\>", re.DOTALL), r"[[subcave:\1|\2]]"), #assumes that all links with id attributes are subcaves. Not great.
            #interpage link needed
-           (re.compile(r"\<a\s+href=['\"]#([^'\"]*)['\"]\s*\>(.*?)\</a\>", re.DOTALL), r"[[cavedescription:\1|\2]]"),
+           (re.compile(r"\<a\s+href=['\"]#([^'\"]*)['\"]\s*\>(.*?)\</a\>", re.DOTALL), r"[[cavedescription:\1|\2]]"), #assumes that all links with target ids are subcaves. Not great.
            (re.compile(r"\[\<a\s+href=['\"][^'\"]*['\"]\s+id=['\"][^'\"]*['\"]\s*\>([^\s]*).*?\</a\>\]", re.DOTALL), r"[[qm:\1]]"),
+           (re.compile(r'<a\shref="?(?P<target>.*)"?>(?P<text>.*)</a>'),href_to_wikilinks)
 
            ]