forked from expo/troggle
moved clever slash middleware & unused.py
This commit is contained in:
68
core/middleware.py
Normal file
68
core/middleware.py
Normal file
@@ -0,0 +1,68 @@
|
||||
from django.conf import settings
|
||||
from django import http
|
||||
from django.urls import reverse, resolve,Resolver404
|
||||
"""Non-standard django middleware is loaded from this file.
|
||||
"""
|
||||
|
||||
class SmartAppendSlashMiddleware(object):
|
||||
"""
|
||||
"SmartAppendSlash" middleware for taking care of URL rewriting.
|
||||
|
||||
This middleware appends a missing slash, if:
|
||||
* the SMART_APPEND_SLASH setting is True
|
||||
* the URL without the slash does not exist
|
||||
* the URL with an appended slash does exist.
|
||||
Otherwise it won't touch the URL.
|
||||
"""
|
||||
|
||||
def process_request(self, request):
|
||||
'''Called for every url so return as quickly as possible
|
||||
Append a slash if SMART_APPEND_SLASH is set, the resulting URL resolves and it doesn't without the /
|
||||
'''
|
||||
if not settings.SMART_APPEND_SLASH:
|
||||
return None
|
||||
|
||||
if request.path.endswith('/'):
|
||||
return None
|
||||
|
||||
if request.path.endswith('_edit'):
|
||||
return None
|
||||
|
||||
host = http.HttpRequest.get_host(request)
|
||||
old_url = [host, request.path]
|
||||
if _resolves(old_url[1]):
|
||||
return None
|
||||
|
||||
# So: it does not resolve according to our criteria, i.e. _edit doesn't count
|
||||
new_url = old_url[:]
|
||||
new_url[1] = new_url[1] + '/'
|
||||
if not _resolves(new_url[1]):
|
||||
return None
|
||||
else:
|
||||
if settings.DEBUG and request.method == 'POST':
|
||||
# replace this exception with a redirect to an error page
|
||||
raise RuntimeError("You called this URL via POST, but the URL doesn't end in a slash and you have SMART_APPEND_SLASH set. Django can't redirect to the slash URL while maintaining POST data. Change your form to point to %s%s (note the trailing slash), or set SMART_APPEND_SLASH=False in your Django settings." % (new_url[0], new_url[1]))
|
||||
if new_url != old_url:
|
||||
# Redirect
|
||||
if new_url[0]:
|
||||
newurl = "%s://%s%s" % (request.is_secure() and 'https' or 'http', new_url[0], new_url[1])
|
||||
else:
|
||||
newurl = new_url[1]
|
||||
if request.GET:
|
||||
newurl += '?' + request.GET.urlencode()
|
||||
return http.HttpResponsePermanentRedirect(newurl)
|
||||
|
||||
return None
|
||||
|
||||
def _resolves(url):
|
||||
try:
|
||||
# If the URL does not resolve, the function raises a Resolver404 exception (a subclass of Http404)
|
||||
match = resolve(url)
|
||||
# this will ALWAYS be resolved by expopages because it will produce pagenotfound if not the thing asked for
|
||||
# so handle this in expopages, not in middleware
|
||||
return True
|
||||
except Resolver404:
|
||||
return False
|
||||
except:
|
||||
print(url)
|
||||
raise
|
||||
132
core/unused.py
Normal file
132
core/unused.py
Normal file
@@ -0,0 +1,132 @@
|
||||
import sys
|
||||
import random
|
||||
import re
|
||||
import logging
|
||||
|
||||
from django.conf import settings
|
||||
from django.shortcuts import render
|
||||
|
||||
"""Oddball mixture of critical, superfluous and useful functions which should
|
||||
be re-located more sensibly to other modules:
|
||||
|
||||
|
||||
various HTML/wiki functions presumably for logbooks?
|
||||
|
||||
Use unknown:
|
||||
weighted_choice(lst)
|
||||
randomLogbookSentence()
|
||||
"""
|
||||
|
||||
|
||||
def weighted_choice(lst):
|
||||
n = random.uniform(0,1)
|
||||
for item, weight in lst:
|
||||
if n < weight:
|
||||
break
|
||||
n = n - weight
|
||||
return item
|
||||
|
||||
def randomLogbookSentence():
|
||||
from troggle.core.models import LogbookEntry
|
||||
randSent={}
|
||||
|
||||
# needs to handle empty logbooks without crashing
|
||||
|
||||
#Choose a random logbook entry
|
||||
randSent['entry']=LogbookEntry.objects.order_by('?')[0]
|
||||
|
||||
#Choose again if there are no sentances (this happens if it is a placeholder entry)
|
||||
while len(re.findall('[A-Z].*?\.',randSent['entry'].text))==0:
|
||||
randSent['entry']=LogbookEntry.objects.order_by('?')[0]
|
||||
|
||||
#Choose a random sentence from that entry. Store the sentence as randSent['sentence'], and the number of that sentence in the entry as randSent['number']
|
||||
sentenceList=re.findall('[A-Z].*?\.',randSent['entry'].text)
|
||||
randSent['number']=random.randrange(0,len(sentenceList))
|
||||
randSent['sentence']=sentenceList[randSent['number']]
|
||||
|
||||
return randSent
|
||||
|
||||
|
||||
re_body = re.compile(r"\<body[^>]*\>(.*)\</body\>", re.DOTALL)
|
||||
re_title = re.compile(r"\<title[^>]*\>(.*)\</title\>", re.DOTALL)
|
||||
|
||||
def get_html_body(text):
|
||||
return get_single_match(re_body, text)
|
||||
|
||||
def get_html_title(text):
|
||||
return get_single_match(re_title, text)
|
||||
|
||||
def get_single_match(regex, text):
|
||||
match = regex.search(text)
|
||||
|
||||
if match:
|
||||
return match.groups()[0]
|
||||
else:
|
||||
return None
|
||||
|
||||
re_subs = [(re.compile(r"\<b[^>]*\>(.*?)\</b\>", re.DOTALL), r"'''\1'''"),
|
||||
(re.compile(r"\<i\>(.*?)\</i\>", re.DOTALL), r"''\1''"),
|
||||
(re.compile(r"\<h1[^>]*\>(.*?)\</h1\>", re.DOTALL), r"=\1="),
|
||||
(re.compile(r"\<h2[^>]*\>(.*?)\</h2\>", re.DOTALL), r"==\1=="),
|
||||
(re.compile(r"\<h3[^>]*\>(.*?)\</h3\>", re.DOTALL), r"===\1==="),
|
||||
(re.compile(r"\<h4[^>]*\>(.*?)\</h4\>", re.DOTALL), r"====\1===="),
|
||||
(re.compile(r"\<h5[^>]*\>(.*?)\</h5\>", re.DOTALL), r"=====\1====="),
|
||||
(re.compile(r"\<h6[^>]*\>(.*?)\</h6\>", re.DOTALL), r"======\1======"),
|
||||
(re.compile(r'(<a href="?(?P<target>.*)"?>)?<img class="?(?P<class>\w*)"? src="?t/?(?P<source>[\w/\.]*)"?(?P<rest>></img>|\s/>(</a>)?)', re.DOTALL),r'[[display:\g<class> photo:\g<source>]]'), #
|
||||
(re.compile(r"\<a\s+id=['\"]([^'\"]*)['\"]\s*\>(.*?)\</a\>", re.DOTALL), r"[[subcave:\1|\2]]"), #assumes that all links with id attributes are subcaves. Not great.
|
||||
#interpage link needed
|
||||
(re.compile(r"\<a\s+href=['\"]#([^'\"]*)['\"]\s*\>(.*?)\</a\>", re.DOTALL), r"[[cavedescription:\1|\2]]"), #assumes that all links with target ids are cave descriptions. Not great.
|
||||
(re.compile(r"\[\<a\s+href=['\"][^'\"]*['\"]\s+id=['\"][^'\"]*['\"]\s*\>([^\s]*).*?\</a\>\]", re.DOTALL), r"[[qm:\1]]"),
|
||||
# (re.compile(r'<a\shref="?(?P<target>.*)"?>(?P<text>.*)</a>'),href_to_wikilinks),
|
||||
]
|
||||
|
||||
def html_to_wiki(text, codec = "utf-8"):
|
||||
if isinstance(text, str):
|
||||
text = str(text, codec)
|
||||
text = re.sub("</p>", r"", text)
|
||||
text = re.sub("<p>$", r"", text)
|
||||
text = re.sub("<p>", r"\n\n", text)
|
||||
out = ""
|
||||
lists = ""
|
||||
#lists
|
||||
while text:
|
||||
mstar = re.match("^(.*?)<ul[^>]*>\s*<li[^>]*>(.*?)</li>(.*)$", text, re.DOTALL)
|
||||
munstar = re.match("^(\s*)</ul>(.*)$", text, re.DOTALL)
|
||||
mhash = re.match("^(.*?)<ol[^>]*>\s*<li[^>]*>(.*?)</li>(.*)$", text, re.DOTALL)
|
||||
munhash = re.match("^(\s*)</ol>(.*)$", text, re.DOTALL)
|
||||
mitem = re.match("^(\s*)<li[^>]*>(.*?)</li>(.*)$", text, re.DOTALL)
|
||||
ms = [len(m.groups()[0]) for m in [mstar, munstar, mhash, munhash, mitem] if m]
|
||||
def min_(i, l):
|
||||
try:
|
||||
v = i.groups()[0]
|
||||
l.remove(len(v))
|
||||
return len(v) < min(l, 1000000000)
|
||||
except:
|
||||
return False
|
||||
if min_(mstar, ms):
|
||||
lists += "*"
|
||||
pre, val, post = mstar.groups()
|
||||
out += pre + "\n" + lists + " " + val
|
||||
text = post
|
||||
elif min_(mhash, ms):
|
||||
lists += "#"
|
||||
pre, val, post = mhash.groups()
|
||||
out += pre + "\n" + lists + " " + val
|
||||
text = post
|
||||
elif min_(mitem, ms):
|
||||
pre, val, post = mitem.groups()
|
||||
out += "\n" + lists + " " + val
|
||||
text = post
|
||||
elif min_(munstar, ms):
|
||||
lists = lists[:-1]
|
||||
text = munstar.groups()[1]
|
||||
elif min_(munhash, ms):
|
||||
lists.pop()
|
||||
text = munhash.groups()[1]
|
||||
else:
|
||||
out += text
|
||||
text = ""
|
||||
#substitutions
|
||||
for regex, repl in re_subs:
|
||||
out = regex.sub(repl, out)
|
||||
return out
|
||||
Reference in New Issue
Block a user