2
0
mirror of https://expo.survex.com/repositories/troggle/.git synced 2024-11-28 18:11:52 +00:00

Update new management command for DB reset

Switch to content_type from mimetype
Make DB reset not nuke so much
Tidy logbook parser
This commit is contained in:
Sam Wenham 2019-03-30 13:58:38 +00:00
parent 705dd51f30
commit a4532a29da
6 changed files with 85 additions and 86 deletions

View File

@ -2,6 +2,14 @@ from django.core.management.base import BaseCommand, CommandError
from optparse import make_option from optparse import make_option
from troggle.core.models import Cave from troggle.core.models import Cave
import settings import settings
import os
from django.db import connection
from django.core import management
from django.contrib.auth.models import User
from django.core.urlresolvers import reverse
from troggle.core.models import Cave, Entrance
import troggle.flatpages.models
databasename=settings.DATABASES['default']['NAME'] databasename=settings.DATABASES['default']['NAME']
expouser=settings.EXPOUSER expouser=settings.EXPOUSER
@ -12,22 +20,13 @@ class Command(BaseCommand):
help = 'This is normal usage, clear database and reread everything' help = 'This is normal usage, clear database and reread everything'
option_list = BaseCommand.option_list + ( option_list = BaseCommand.option_list + (
make_option('--foo', make_option('--reset',
action='store_true', action='store_true',
dest='foo', dest='reset',
default=False, default=False,
help='test'), help='Reset the entier DB from files'),
) )
def add_arguments(self, parser):
parser.add_argument(
'--foo',
action='store_true',
dest='foo',
help='Help text',
)
def handle(self, *args, **options): def handle(self, *args, **options):
print(args) print(args)
print(options) print(options)
@ -46,8 +45,8 @@ class Command(BaseCommand):
self.import_QMs() self.import_QMs()
elif "tunnel" in args: elif "tunnel" in args:
self.import_tunnelfiles() self.import_tunnelfiles()
elif "reset" in args: elif options['reset']:
self.reset() self.reset(self)
elif "survex" in args: elif "survex" in args:
self.import_survex() self.import_survex()
elif "survexpos" in args: elif "survexpos" in args:
@ -61,13 +60,15 @@ class Command(BaseCommand):
self.dumplogbooks() self.dumplogbooks()
elif "writeCaves" in args: elif "writeCaves" in args:
self.writeCaves() self.writeCaves()
elif "foo" in args: elif options['foo']:
self.stdout.write('Tesing....') self.stdout.write(self.style.WARNING('Tesing....'))
else: else:
self.stdout.write("%s not recognised" % args) #self.stdout.write("%s not recognised" % args)
self.usage(options) #self.usage(options)
self.stdout.write("poo")
#print(args)
def reload_db(): def reload_db(obj):
if settings.DATABASES['default']['ENGINE'] == 'django.db.backends.sqlite3': if settings.DATABASES['default']['ENGINE'] == 'django.db.backends.sqlite3':
try: try:
os.remove(databasename) os.remove(databasename)
@ -86,22 +87,22 @@ class Command(BaseCommand):
user.is_superuser = True user.is_superuser = True
user.save() user.save()
def make_dirs(): def make_dirs(obj):
"""Make directories that troggle requires""" """Make directories that troggle requires"""
# should also deal with permissions here. # should also deal with permissions here.
if not os.path.isdir(settings.PHOTOS_ROOT): if not os.path.isdir(settings.PHOTOS_ROOT):
os.mkdir(settings.PHOTOS_ROOT) os.mkdir(settings.PHOTOS_ROOT)
def import_caves(): def import_caves(obj):
import parsers.caves import parsers.caves
print("importing caves") print("Importing Caves")
parsers.caves.readcaves() parsers.caves.readcaves()
def import_people(): def import_people(obj):
import parsers.people import parsers.people
parsers.people.LoadPersonsExpos() parsers.people.LoadPersonsExpos()
def import_logbooks(): def import_logbooks(obj):
# The below line was causing errors I didn't understand (it said LOGFILE was a string), and I couldn't be bothered to figure # The below line was causing errors I didn't understand (it said LOGFILE was a string), and I couldn't be bothered to figure
# what was going on so I just catch the error with a try. - AC 21 May # what was going on so I just catch the error with a try. - AC 21 May
try: try:
@ -112,57 +113,57 @@ class Command(BaseCommand):
import parsers.logbooks import parsers.logbooks
parsers.logbooks.LoadLogbooks() parsers.logbooks.LoadLogbooks()
def import_survex(): def import_survex(obj):
import parsers.survex import parsers.survex
parsers.survex.LoadAllSurvexBlocks() parsers.survex.LoadAllSurvexBlocks()
parsers.survex.LoadPos() parsers.survex.LoadPos()
def import_QMs(): def import_QMs(obj):
import parsers.QMs import parsers.QMs
def import_surveys(): def import_surveys(obj):
import parsers.surveys import parsers.surveys
parsers.surveys.parseSurveys(logfile=settings.LOGFILE) parsers.surveys.parseSurveys(logfile=settings.LOGFILE)
def import_surveyscans(): def import_surveyscans(obj):
import parsers.surveys import parsers.surveys
parsers.surveys.LoadListScans() parsers.surveys.LoadListScans()
def import_tunnelfiles(): def import_tunnelfiles(obj):
import parsers.surveys import parsers.surveys
parsers.surveys.LoadTunnelFiles() parsers.surveys.LoadTunnelFiles()
def reset(): def reset(self, mgmt_obj):
""" Wipe the troggle database and import everything from legacy data """ Wipe the troggle database and import everything from legacy data
""" """
reload_db() self.reload_db()
make_dirs() self.make_dirs()
pageredirects() self.pageredirects()
import_caves() self.import_caves()
import_people() self.import_people()
import_surveyscans() self.import_surveyscans()
import_survex() self.import_survex()
import_logbooks() self.import_logbooks()
import_QMs() self.import_QMs()
try: try:
import_tunnelfiles() self.import_tunnelfiles()
except: except:
print("Tunnel files parser broken.") print("Tunnel files parser broken.")
import_surveys() self.import_surveys()
def pageredirects(): def pageredirects(obj):
for oldURL, newURL in [("indxal.htm", reverse("caveindex"))]: for oldURL, newURL in [("indxal.htm", reverse("caveindex"))]:
f = troggle.flatpages.models.Redirect(originalURL=oldURL, newURL=newURL) f = troggle.flatpages.models.Redirect(originalURL=oldURL, newURL=newURL)
f.save() f.save()
def writeCaves(): def writeCaves(obj):
for cave in Cave.objects.all(): for cave in Cave.objects.all():
cave.writeDataFile() cave.writeDataFile()
for entrance in Entrance.objects.all(): for entrance in Entrance.objects.all():
entrance.writeDataFile() entrance.writeDataFile()
def usage(self, parser): def troggle_usage(obj):
print("""Usage is 'manage.py reset_db <command>' print("""Usage is 'manage.py reset_db <command>'
where command is: where command is:
reset - this is normal usage, clear database and reread everything reset - this is normal usage, clear database and reread everything

View File

@ -59,8 +59,8 @@ def controlPanel(request):
databaseReset.make_dirs() databaseReset.make_dirs()
for item in importlist: for item in importlist:
if item in request.POST: if item in request.POST:
print "running"+ " databaseReset."+item+"()" print("running"+ " databaseReset."+item+"()")
exec "databaseReset."+item+"()" exec("databaseReset."+item+"()")
jobs_completed.append(item) jobs_completed.append(item)
else: else:
if request.user.is_authenticated(): #The user is logged in, but is not a superuser. if request.user.is_authenticated(): #The user is logged in, but is not a superuser.
@ -72,14 +72,14 @@ def controlPanel(request):
def downloadCavetab(request): def downloadCavetab(request):
from export import tocavetab from export import tocavetab
response = HttpResponse(mimetype='text/csv') response = HttpResponse(content_type='text/csv')
response['Content-Disposition'] = 'attachment; filename=CAVETAB2.CSV' response['Content-Disposition'] = 'attachment; filename=CAVETAB2.CSV'
tocavetab.writeCaveTab(response) tocavetab.writeCaveTab(response)
return response return response
def downloadSurveys(request): def downloadSurveys(request):
from export import tosurveys from export import tosurveys
response = HttpResponse(mimetype='text/csv') response = HttpResponse(content_type='text/csv')
response['Content-Disposition'] = 'attachment; filename=Surveys.csv' response['Content-Disposition'] = 'attachment; filename=Surveys.csv'
tosurveys.writeCaveTab(response) tosurveys.writeCaveTab(response)
return response return response
@ -94,20 +94,19 @@ def downloadLogbook(request,year=None,extension=None,queryset=None):
logbook_entries=queryset logbook_entries=queryset
filename='logbook' filename='logbook'
else: else:
response = HttpResponse(content_type='text/plain')
return response(r"Error: Logbook downloader doesn't know what year you want") return response(r"Error: Logbook downloader doesn't know what year you want")
if 'year' in request.GET: if 'year' in request.GET:
year=request.GET['year'] year=request.GET['year']
if 'extension' in request.GET: if 'extension' in request.GET:
extension=request.GET['extension'] extension=request.GET['extension']
if extension =='txt': if extension =='txt':
response = HttpResponse(mimetype='text/plain') response = HttpResponse(content_type='text/plain')
style='2008' style='2008'
elif extension == 'html': elif extension == 'html':
response = HttpResponse(mimetype='text/html') response = HttpResponse(content_type='text/html')
style='2005' style='2005'
template='logbook'+style+'style.'+extension template='logbook'+style+'style.'+extension
@ -124,11 +123,11 @@ def downloadQMs(request):
try: try:
cave=Cave.objects.get(kataster_number=request.GET['cave_id']) cave=Cave.objects.get(kataster_number=request.GET['cave_id'])
except Cave.DoesNotExist: except Cave.DoesNotExist:
cave=Cave.objects.get(name=cave_id) cave=Cave.objects.get(name=request.GET['cave_id'])
from export import toqms from export import toqms
response = HttpResponse(mimetype='text/csv') response = HttpResponse(content_type='text/csv')
response['Content-Disposition'] = 'attachment; filename=qm.csv' response['Content-Disposition'] = 'attachment; filename=qm.csv'
toqms.writeQmTable(response,cave) toqms.writeQmTable(response,cave)
return response return response
@ -136,7 +135,7 @@ def downloadQMs(request):
def ajax_test(request): def ajax_test(request):
post_text = request.POST['post_data'] post_text = request.POST['post_data']
return HttpResponse("{'response_text': '"+post_text+" recieved.'}", return HttpResponse("{'response_text': '"+post_text+" recieved.'}",
mimetype="application/json") content_type="application/json")
def eyecandy(request): def eyecandy(request):
return return
@ -144,9 +143,9 @@ def eyecandy(request):
def ajax_QM_number(request): def ajax_QM_number(request):
if request.method=='POST': if request.method=='POST':
cave=Cave.objects.get(id=request.POST['cave']) cave=Cave.objects.get(id=request.POST['cave'])
print cave print(cave)
exp=Expedition.objects.get(pk=request.POST['year']) exp=Expedition.objects.get(pk=request.POST['year'])
print exp print(exp)
res=cave.new_QM_number(exp.year) res=cave.new_QM_number(exp.year)
return HttpResponse(res) return HttpResponse(res)
@ -167,7 +166,7 @@ def logbook_entry_suggestions(request):
#unwiki_QMs=re.findall(unwiki_QM_pattern,lbo.text) #unwiki_QMs=re.findall(unwiki_QM_pattern,lbo.text)
unwiki_QMs=[m.groupdict() for m in unwiki_QM_pattern.finditer(lbo.text)] unwiki_QMs=[m.groupdict() for m in unwiki_QM_pattern.finditer(lbo.text)]
print unwiki_QMs print(unwiki_QMs)
for qm in unwiki_QMs: for qm in unwiki_QMs:
#try: #try:
if len(qm['year'])==2: if len(qm['year'])==2:
@ -180,7 +179,7 @@ def logbook_entry_suggestions(request):
try: try:
lbo=LogbookEntry.objects.get(date__year=qm['year'],title__icontains="placeholder for QMs in") lbo=LogbookEntry.objects.get(date__year=qm['year'],title__icontains="placeholder for QMs in")
except: except:
print "failed to get placeholder for year "+str(qm['year']) print("failed to get placeholder for year "+str(qm['year']))
temp_QM=QM(found_by=lbo,number=qm['number'],grade=qm['grade']) temp_QM=QM(found_by=lbo,number=qm['number'],grade=qm['grade'])
temp_QM.grade=qm['grade'] temp_QM.grade=qm['grade']
@ -188,7 +187,7 @@ def logbook_entry_suggestions(request):
#except: #except:
#print 'failed' #print 'failed'
print unwiki_QMs print(unwiki_QMs)
#wikilink_QMs=re.findall(wikilink_QM_pattern,lbo.text) #wikilink_QMs=re.findall(wikilink_QM_pattern,lbo.text)
@ -199,9 +198,9 @@ def logbook_entry_suggestions(request):
#for qm in wikilink_QMs: #for qm in wikilink_QMs:
#Try to look up the QM. #Try to look up the QM.
print 'got 208' print('got 208')
any_suggestions=True any_suggestions=True
print 'got 210' print('got 210')
return render_with_context(request,'suggestions.html', return render_with_context(request,'suggestions.html',
{ {
'unwiki_QMs':unwiki_QMs, 'unwiki_QMs':unwiki_QMs,

View File

@ -109,10 +109,10 @@ def import_auto_logbooks():
for lbe in troggle.core.models.LogbookEntry.objects.all(): for lbe in troggle.core.models.LogbookEntry.objects.all():
lbe.delete() lbe.delete()
for expedition in troggle.core.models.Expedition.objects.all(): for expedition in troggle.core.models.Expedition.objects.all():
directory = os.path.join(settings.EXPOWEB, directory = os.path.join(settings.EXPOWEB,
"years", "years",
expedition.year, expedition.year,
"autologbook") "autologbook")
for root, dirs, filenames in os.walk(directory): for root, dirs, filenames in os.walk(directory):
for filename in filenames: for filename in filenames:
print(os.path.join(root, filename)) print(os.path.join(root, filename))
@ -195,9 +195,9 @@ if __name__ == "__main__":
elif "scans" in sys.argv: elif "scans" in sys.argv:
import_surveyscans() import_surveyscans()
elif "caves" in sys.argv: elif "caves" in sys.argv:
reload_db() # reload_db()
make_dirs() # make_dirs()
pageredirects() # pageredirects()
import_caves() import_caves()
elif "people" in sys.argv: elif "people" in sys.argv:
import_people() import_people()
@ -218,14 +218,14 @@ if __name__ == "__main__":
import_descriptions() import_descriptions()
parse_descriptions() parse_descriptions()
elif "survex" in sys.argv: elif "survex" in sys.argv:
management.call_command('syncdb', interactive=False) # this sets the path so that import settings works in import_survex # management.call_command('syncdb', interactive=False) # this sets the path so that import settings works in import_survex
import_survex() import_survex()
elif "survexpos" in sys.argv: elif "survexpos" in sys.argv:
management.call_command('syncdb', interactive=False) # this sets the path so that import settings works in import_survex # management.call_command('syncdb', interactive=False) # this sets the path so that import settings works in import_survex
import parsers.survex import parsers.survex
parsers.survex.LoadPos() parsers.survex.LoadPos()
elif "logbooks" in sys.argv: elif "logbooks" in sys.argv:
management.call_command('syncdb', interactive=False) # this sets the path so that import settings works in import_survex # management.call_command('syncdb', interactive=False) # this sets the path so that import settings works in import_survex
import_logbooks() import_logbooks()
elif "autologbooks" in sys.argv: elif "autologbooks" in sys.argv:
import_auto_logbooks() import_auto_logbooks()

View File

@ -115,7 +115,7 @@ def ParseDate(tripdate, year):
assert False, tripdate assert False, tripdate
return datetime.date(year, month, day) return datetime.date(year, month, day)
# 2007, 2008, 2006 # 2006, 2008 - 2010
def Parselogwikitxt(year, expedition, txt): def Parselogwikitxt(year, expedition, txt):
trippara = re.findall(r"===(.*?)===([\s\S]*?)(?====)", txt) trippara = re.findall(r"===(.*?)===([\s\S]*?)(?====)", txt)
for triphead, triptext in trippara: for triphead, triptext in trippara:
@ -140,9 +140,9 @@ def Parselogwikitxt(year, expedition, txt):
#print "\n", tripcave, "--- ppp", trippeople, len(triptext) #print "\n", tripcave, "--- ppp", trippeople, len(triptext)
EnterLogIntoDbase(date = ldate, place = tripcave, title = tripplace, text = triptext, trippeople=trippeople, expedition=expedition, logtime_underground=0) EnterLogIntoDbase(date = ldate, place = tripcave, title = tripplace, text = triptext, trippeople=trippeople, expedition=expedition, logtime_underground=0)
# 2002, 2004, 2005 # 2002, 2004, 2005, 2007, 2011 - 2018
def Parseloghtmltxt(year, expedition, txt): def Parseloghtmltxt(year, expedition, txt):
print(" - Using log html parser") #print(" - Starting log html parser")
tripparas = re.findall(r"<hr\s*/>([\s\S]*?)(?=<hr)", txt) tripparas = re.findall(r"<hr\s*/>([\s\S]*?)(?=<hr)", txt)
logbook_entry_count = 0 logbook_entry_count = 0
for trippara in tripparas: for trippara in tripparas:
@ -163,7 +163,6 @@ def Parseloghtmltxt(year, expedition, txt):
print("can't parse: ", trippara) # this is 2007 which needs editing print("can't parse: ", trippara) # this is 2007 which needs editing
#assert s, trippara #assert s, trippara
continue continue
tripid, tripid1, tripdate, trippeople, triptitle, triptext, tu = s.groups() tripid, tripid1, tripdate, trippeople, triptitle, triptext, tu = s.groups()
ldate = ParseDate(tripdate.strip(), year) ldate = ParseDate(tripdate.strip(), year)
#assert tripid[:-1] == "t" + tripdate, (tripid, tripdate) #assert tripid[:-1] == "t" + tripdate, (tripid, tripdate)
@ -174,7 +173,7 @@ def Parseloghtmltxt(year, expedition, txt):
tripcave = triptitles[0] tripcave = triptitles[0]
else: else:
tripcave = "UNKNOWN" tripcave = "UNKNOWN"
#print "\n", tripcave, "--- ppp", trippeople, len(triptext) #print("\n", tripcave, "--- ppp", trippeople, len(triptext))
ltriptext = re.sub(r"</p>", "", triptext) ltriptext = re.sub(r"</p>", "", triptext)
ltriptext = re.sub(r"\s*?\n\s*", " ", ltriptext) ltriptext = re.sub(r"\s*?\n\s*", " ", ltriptext)
ltriptext = re.sub(r"<p>", "\n\n", ltriptext).strip() ltriptext = re.sub(r"<p>", "\n\n", ltriptext).strip()
@ -183,7 +182,7 @@ def Parseloghtmltxt(year, expedition, txt):
print(" - No trip entrys found in logbook, check the syntax matches htmltxt format") print(" - No trip entrys found in logbook, check the syntax matches htmltxt format")
# main parser for pre-2001. simpler because the data has been hacked so much to fit it # main parser for 1991 - 2001. simpler because the data has been hacked so much to fit it
def Parseloghtml01(year, expedition, txt): def Parseloghtml01(year, expedition, txt):
tripparas = re.findall(r"<hr[\s/]*>([\s\S]*?)(?=<hr)", txt) tripparas = re.findall(r"<hr[\s/]*>([\s\S]*?)(?=<hr)", txt)
for trippara in tripparas: for trippara in tripparas:
@ -229,7 +228,7 @@ def Parseloghtml01(year, expedition, txt):
# could includ the tripid (url link for cross referencing) # could includ the tripid (url link for cross referencing)
EnterLogIntoDbase(date=ldate, place=tripcave, title=triptitle, text=ltriptext, trippeople=trippeople, expedition=expedition, logtime_underground=0) EnterLogIntoDbase(date=ldate, place=tripcave, title=triptitle, text=ltriptext, trippeople=trippeople, expedition=expedition, logtime_underground=0)
# parser for 2003
def Parseloghtml03(year, expedition, txt): def Parseloghtml03(year, expedition, txt):
tripparas = re.findall(r"<hr\s*/>([\s\S]*?)(?=<hr)", txt) tripparas = re.findall(r"<hr\s*/>([\s\S]*?)(?=<hr)", txt)
for trippara in tripparas: for trippara in tripparas:
@ -281,8 +280,7 @@ def SetDatesFromLogbookEntries(expedition):
def LoadLogbookForExpedition(expedition): def LoadLogbookForExpedition(expedition):
""" Parses all logbook entries for one expedition """ """ Parses all logbook entries for one expedition """
expowebbase = os.path.join(settings.EXPOWEB, "years") expowebbase = os.path.join(settings.EXPOWEB, "years")
#year = str(expedition.year)
yearlinks = settings.LOGBOOK_PARSER_SETTINGS yearlinks = settings.LOGBOOK_PARSER_SETTINGS
logbook_parseable = False logbook_parseable = False
@ -294,6 +292,7 @@ def LoadLogbookForExpedition(expedition):
file_in.close() file_in.close()
parsefunc = year_settings[1] parsefunc = year_settings[1]
logbook_parseable = True logbook_parseable = True
print(" - Parsing logbook: " + year_settings[0] + "\n - Using parser: " + year_settings[1])
else: else:
try: try:
file_in = open(os.path.join(expowebbase, expedition.year, settings.DEFAULT_LOGBOOK_FILE)) file_in = open(os.path.join(expowebbase, expedition.year, settings.DEFAULT_LOGBOOK_FILE))
@ -304,7 +303,7 @@ def LoadLogbookForExpedition(expedition):
parsefunc = settings.DEFAULT_LOGBOOK_PARSER parsefunc = settings.DEFAULT_LOGBOOK_PARSER
except (IOError): except (IOError):
logbook_parseable = False logbook_parseable = False
print("Couldn't open default logbook file and nothing set for expo " + expedition.year) print("Couldn't open default logbook file and nothing in settings for expo " + expedition.year)
if logbook_parseable: if logbook_parseable:
parser = globals()[parsefunc] parser = globals()[parsefunc]

View File

@ -67,8 +67,8 @@ def LoadPersonsExpos():
for personline in personreader: for personline in personreader:
name = personline[header["Name"]] name = personline[header["Name"]]
name = re.sub("<.*?>", "", name) name = re.sub(r"<.*?>", "", name)
mname = re.match("(\w+)(?:\s((?:van |ten )?\w+))?(?:\s\(([^)]*)\))?", name) mname = re.match(r"(\w+)(?:\s((?:van |ten )?\w+))?(?:\s\(([^)]*)\))?", name)
nickname = mname.group(3) or "" nickname = mname.group(3) or ""
lookupAttribs={'first_name':mname.group(1), 'last_name':(mname.group(2) or "")} lookupAttribs={'first_name':mname.group(1), 'last_name':(mname.group(2) or "")}

View File

@ -16,7 +16,7 @@
{% if entry.is_deletion %} {% if entry.is_deletion %}
{{ entry.object_repr }} {{ entry.object_repr }}
{% else %} {% else %}
<a href="admin/{{ entry.get_admin_url }}">{{ entry.object_repr }}</a> <a href="admin/{{ entry.get_admin_url }}/">{{ entry.object_repr }}</a>
{% endif %} {% endif %}
<br/> <br/>
{% if entry.content_type %} {% if entry.content_type %}