2
0
mirror of https://expo.survex.com/repositories/troggle/.git synced 2024-11-25 00:31:55 +00:00

Update new management command for DB reset

Switch to content_type from mimetype
Make DB reset not nuke so much
Tidy logbook parser
This commit is contained in:
Sam Wenham 2019-03-30 13:58:38 +00:00
parent 705dd51f30
commit a4532a29da
6 changed files with 85 additions and 86 deletions

View File

@ -2,6 +2,14 @@ from django.core.management.base import BaseCommand, CommandError
from optparse import make_option
from troggle.core.models import Cave
import settings
import os
from django.db import connection
from django.core import management
from django.contrib.auth.models import User
from django.core.urlresolvers import reverse
from troggle.core.models import Cave, Entrance
import troggle.flatpages.models
databasename=settings.DATABASES['default']['NAME']
expouser=settings.EXPOUSER
@ -12,22 +20,13 @@ class Command(BaseCommand):
help = 'This is normal usage, clear database and reread everything'
option_list = BaseCommand.option_list + (
make_option('--foo',
make_option('--reset',
action='store_true',
dest='foo',
dest='reset',
default=False,
help='test'),
help='Reset the entier DB from files'),
)
def add_arguments(self, parser):
parser.add_argument(
'--foo',
action='store_true',
dest='foo',
help='Help text',
)
def handle(self, *args, **options):
print(args)
print(options)
@ -46,8 +45,8 @@ class Command(BaseCommand):
self.import_QMs()
elif "tunnel" in args:
self.import_tunnelfiles()
elif "reset" in args:
self.reset()
elif options['reset']:
self.reset(self)
elif "survex" in args:
self.import_survex()
elif "survexpos" in args:
@ -61,13 +60,15 @@ class Command(BaseCommand):
self.dumplogbooks()
elif "writeCaves" in args:
self.writeCaves()
elif "foo" in args:
self.stdout.write('Tesing....')
elif options['foo']:
self.stdout.write(self.style.WARNING('Tesing....'))
else:
self.stdout.write("%s not recognised" % args)
self.usage(options)
#self.stdout.write("%s not recognised" % args)
#self.usage(options)
self.stdout.write("poo")
#print(args)
def reload_db():
def reload_db(obj):
if settings.DATABASES['default']['ENGINE'] == 'django.db.backends.sqlite3':
try:
os.remove(databasename)
@ -86,22 +87,22 @@ class Command(BaseCommand):
user.is_superuser = True
user.save()
def make_dirs():
def make_dirs(obj):
"""Make directories that troggle requires"""
# should also deal with permissions here.
if not os.path.isdir(settings.PHOTOS_ROOT):
os.mkdir(settings.PHOTOS_ROOT)
def import_caves():
def import_caves(obj):
import parsers.caves
print("importing caves")
print("Importing Caves")
parsers.caves.readcaves()
def import_people():
def import_people(obj):
import parsers.people
parsers.people.LoadPersonsExpos()
def import_logbooks():
def import_logbooks(obj):
# The below line was causing errors I didn't understand (it said LOGFILE was a string), and I couldn't be bothered to figure
# what was going on so I just catch the error with a try. - AC 21 May
try:
@ -112,57 +113,57 @@ class Command(BaseCommand):
import parsers.logbooks
parsers.logbooks.LoadLogbooks()
def import_survex():
def import_survex(obj):
import parsers.survex
parsers.survex.LoadAllSurvexBlocks()
parsers.survex.LoadPos()
def import_QMs():
def import_QMs(obj):
import parsers.QMs
def import_surveys():
def import_surveys(obj):
import parsers.surveys
parsers.surveys.parseSurveys(logfile=settings.LOGFILE)
def import_surveyscans():
def import_surveyscans(obj):
import parsers.surveys
parsers.surveys.LoadListScans()
def import_tunnelfiles():
def import_tunnelfiles(obj):
import parsers.surveys
parsers.surveys.LoadTunnelFiles()
def reset():
def reset(self, mgmt_obj):
""" Wipe the troggle database and import everything from legacy data
"""
reload_db()
make_dirs()
pageredirects()
import_caves()
import_people()
import_surveyscans()
import_survex()
import_logbooks()
import_QMs()
self.reload_db()
self.make_dirs()
self.pageredirects()
self.import_caves()
self.import_people()
self.import_surveyscans()
self.import_survex()
self.import_logbooks()
self.import_QMs()
try:
import_tunnelfiles()
self.import_tunnelfiles()
except:
print("Tunnel files parser broken.")
import_surveys()
self.import_surveys()
def pageredirects():
def pageredirects(obj):
for oldURL, newURL in [("indxal.htm", reverse("caveindex"))]:
f = troggle.flatpages.models.Redirect(originalURL=oldURL, newURL=newURL)
f.save()
def writeCaves():
def writeCaves(obj):
for cave in Cave.objects.all():
cave.writeDataFile()
for entrance in Entrance.objects.all():
entrance.writeDataFile()
def usage(self, parser):
def troggle_usage(obj):
print("""Usage is 'manage.py reset_db <command>'
where command is:
reset - this is normal usage, clear database and reread everything

View File

@ -59,8 +59,8 @@ def controlPanel(request):
databaseReset.make_dirs()
for item in importlist:
if item in request.POST:
print "running"+ " databaseReset."+item+"()"
exec "databaseReset."+item+"()"
print("running"+ " databaseReset."+item+"()")
exec("databaseReset."+item+"()")
jobs_completed.append(item)
else:
if request.user.is_authenticated(): #The user is logged in, but is not a superuser.
@ -72,14 +72,14 @@ def controlPanel(request):
def downloadCavetab(request):
from export import tocavetab
response = HttpResponse(mimetype='text/csv')
response = HttpResponse(content_type='text/csv')
response['Content-Disposition'] = 'attachment; filename=CAVETAB2.CSV'
tocavetab.writeCaveTab(response)
return response
def downloadSurveys(request):
from export import tosurveys
response = HttpResponse(mimetype='text/csv')
response = HttpResponse(content_type='text/csv')
response['Content-Disposition'] = 'attachment; filename=Surveys.csv'
tosurveys.writeCaveTab(response)
return response
@ -94,20 +94,19 @@ def downloadLogbook(request,year=None,extension=None,queryset=None):
logbook_entries=queryset
filename='logbook'
else:
response = HttpResponse(content_type='text/plain')
return response(r"Error: Logbook downloader doesn't know what year you want")
if 'year' in request.GET:
year=request.GET['year']
if 'extension' in request.GET:
extension=request.GET['extension']
if extension =='txt':
response = HttpResponse(mimetype='text/plain')
response = HttpResponse(content_type='text/plain')
style='2008'
elif extension == 'html':
response = HttpResponse(mimetype='text/html')
response = HttpResponse(content_type='text/html')
style='2005'
template='logbook'+style+'style.'+extension
@ -124,11 +123,11 @@ def downloadQMs(request):
try:
cave=Cave.objects.get(kataster_number=request.GET['cave_id'])
except Cave.DoesNotExist:
cave=Cave.objects.get(name=cave_id)
cave=Cave.objects.get(name=request.GET['cave_id'])
from export import toqms
response = HttpResponse(mimetype='text/csv')
response = HttpResponse(content_type='text/csv')
response['Content-Disposition'] = 'attachment; filename=qm.csv'
toqms.writeQmTable(response,cave)
return response
@ -136,7 +135,7 @@ def downloadQMs(request):
def ajax_test(request):
post_text = request.POST['post_data']
return HttpResponse("{'response_text': '"+post_text+" recieved.'}",
mimetype="application/json")
content_type="application/json")
def eyecandy(request):
return
@ -144,9 +143,9 @@ def eyecandy(request):
def ajax_QM_number(request):
if request.method=='POST':
cave=Cave.objects.get(id=request.POST['cave'])
print cave
print(cave)
exp=Expedition.objects.get(pk=request.POST['year'])
print exp
print(exp)
res=cave.new_QM_number(exp.year)
return HttpResponse(res)
@ -167,7 +166,7 @@ def logbook_entry_suggestions(request):
#unwiki_QMs=re.findall(unwiki_QM_pattern,lbo.text)
unwiki_QMs=[m.groupdict() for m in unwiki_QM_pattern.finditer(lbo.text)]
print unwiki_QMs
print(unwiki_QMs)
for qm in unwiki_QMs:
#try:
if len(qm['year'])==2:
@ -180,7 +179,7 @@ def logbook_entry_suggestions(request):
try:
lbo=LogbookEntry.objects.get(date__year=qm['year'],title__icontains="placeholder for QMs in")
except:
print "failed to get placeholder for year "+str(qm['year'])
print("failed to get placeholder for year "+str(qm['year']))
temp_QM=QM(found_by=lbo,number=qm['number'],grade=qm['grade'])
temp_QM.grade=qm['grade']
@ -188,7 +187,7 @@ def logbook_entry_suggestions(request):
#except:
#print 'failed'
print unwiki_QMs
print(unwiki_QMs)
#wikilink_QMs=re.findall(wikilink_QM_pattern,lbo.text)
@ -199,9 +198,9 @@ def logbook_entry_suggestions(request):
#for qm in wikilink_QMs:
#Try to look up the QM.
print 'got 208'
print('got 208')
any_suggestions=True
print 'got 210'
print('got 210')
return render_with_context(request,'suggestions.html',
{
'unwiki_QMs':unwiki_QMs,

View File

@ -109,10 +109,10 @@ def import_auto_logbooks():
for lbe in troggle.core.models.LogbookEntry.objects.all():
lbe.delete()
for expedition in troggle.core.models.Expedition.objects.all():
directory = os.path.join(settings.EXPOWEB,
"years",
expedition.year,
"autologbook")
directory = os.path.join(settings.EXPOWEB,
"years",
expedition.year,
"autologbook")
for root, dirs, filenames in os.walk(directory):
for filename in filenames:
print(os.path.join(root, filename))
@ -195,9 +195,9 @@ if __name__ == "__main__":
elif "scans" in sys.argv:
import_surveyscans()
elif "caves" in sys.argv:
reload_db()
make_dirs()
pageredirects()
# reload_db()
# make_dirs()
# pageredirects()
import_caves()
elif "people" in sys.argv:
import_people()
@ -218,14 +218,14 @@ if __name__ == "__main__":
import_descriptions()
parse_descriptions()
elif "survex" in sys.argv:
management.call_command('syncdb', interactive=False) # this sets the path so that import settings works in import_survex
# management.call_command('syncdb', interactive=False) # this sets the path so that import settings works in import_survex
import_survex()
elif "survexpos" in sys.argv:
management.call_command('syncdb', interactive=False) # this sets the path so that import settings works in import_survex
# management.call_command('syncdb', interactive=False) # this sets the path so that import settings works in import_survex
import parsers.survex
parsers.survex.LoadPos()
elif "logbooks" in sys.argv:
management.call_command('syncdb', interactive=False) # this sets the path so that import settings works in import_survex
# management.call_command('syncdb', interactive=False) # this sets the path so that import settings works in import_survex
import_logbooks()
elif "autologbooks" in sys.argv:
import_auto_logbooks()

View File

@ -115,7 +115,7 @@ def ParseDate(tripdate, year):
assert False, tripdate
return datetime.date(year, month, day)
# 2007, 2008, 2006
# 2006, 2008 - 2010
def Parselogwikitxt(year, expedition, txt):
trippara = re.findall(r"===(.*?)===([\s\S]*?)(?====)", txt)
for triphead, triptext in trippara:
@ -140,9 +140,9 @@ def Parselogwikitxt(year, expedition, txt):
#print "\n", tripcave, "--- ppp", trippeople, len(triptext)
EnterLogIntoDbase(date = ldate, place = tripcave, title = tripplace, text = triptext, trippeople=trippeople, expedition=expedition, logtime_underground=0)
# 2002, 2004, 2005
# 2002, 2004, 2005, 2007, 2011 - 2018
def Parseloghtmltxt(year, expedition, txt):
print(" - Using log html parser")
#print(" - Starting log html parser")
tripparas = re.findall(r"<hr\s*/>([\s\S]*?)(?=<hr)", txt)
logbook_entry_count = 0
for trippara in tripparas:
@ -163,7 +163,6 @@ def Parseloghtmltxt(year, expedition, txt):
print("can't parse: ", trippara) # this is 2007 which needs editing
#assert s, trippara
continue
tripid, tripid1, tripdate, trippeople, triptitle, triptext, tu = s.groups()
ldate = ParseDate(tripdate.strip(), year)
#assert tripid[:-1] == "t" + tripdate, (tripid, tripdate)
@ -174,7 +173,7 @@ def Parseloghtmltxt(year, expedition, txt):
tripcave = triptitles[0]
else:
tripcave = "UNKNOWN"
#print "\n", tripcave, "--- ppp", trippeople, len(triptext)
#print("\n", tripcave, "--- ppp", trippeople, len(triptext))
ltriptext = re.sub(r"</p>", "", triptext)
ltriptext = re.sub(r"\s*?\n\s*", " ", ltriptext)
ltriptext = re.sub(r"<p>", "\n\n", ltriptext).strip()
@ -183,7 +182,7 @@ def Parseloghtmltxt(year, expedition, txt):
print(" - No trip entrys found in logbook, check the syntax matches htmltxt format")
# main parser for pre-2001. simpler because the data has been hacked so much to fit it
# main parser for 1991 - 2001. simpler because the data has been hacked so much to fit it
def Parseloghtml01(year, expedition, txt):
tripparas = re.findall(r"<hr[\s/]*>([\s\S]*?)(?=<hr)", txt)
for trippara in tripparas:
@ -229,7 +228,7 @@ def Parseloghtml01(year, expedition, txt):
# could includ the tripid (url link for cross referencing)
EnterLogIntoDbase(date=ldate, place=tripcave, title=triptitle, text=ltriptext, trippeople=trippeople, expedition=expedition, logtime_underground=0)
# parser for 2003
def Parseloghtml03(year, expedition, txt):
tripparas = re.findall(r"<hr\s*/>([\s\S]*?)(?=<hr)", txt)
for trippara in tripparas:
@ -281,8 +280,7 @@ def SetDatesFromLogbookEntries(expedition):
def LoadLogbookForExpedition(expedition):
""" Parses all logbook entries for one expedition """
expowebbase = os.path.join(settings.EXPOWEB, "years")
#year = str(expedition.year)
expowebbase = os.path.join(settings.EXPOWEB, "years")
yearlinks = settings.LOGBOOK_PARSER_SETTINGS
logbook_parseable = False
@ -294,6 +292,7 @@ def LoadLogbookForExpedition(expedition):
file_in.close()
parsefunc = year_settings[1]
logbook_parseable = True
print(" - Parsing logbook: " + year_settings[0] + "\n - Using parser: " + year_settings[1])
else:
try:
file_in = open(os.path.join(expowebbase, expedition.year, settings.DEFAULT_LOGBOOK_FILE))
@ -304,7 +303,7 @@ def LoadLogbookForExpedition(expedition):
parsefunc = settings.DEFAULT_LOGBOOK_PARSER
except (IOError):
logbook_parseable = False
print("Couldn't open default logbook file and nothing set for expo " + expedition.year)
print("Couldn't open default logbook file and nothing in settings for expo " + expedition.year)
if logbook_parseable:
parser = globals()[parsefunc]

View File

@ -67,8 +67,8 @@ def LoadPersonsExpos():
for personline in personreader:
name = personline[header["Name"]]
name = re.sub("<.*?>", "", name)
mname = re.match("(\w+)(?:\s((?:van |ten )?\w+))?(?:\s\(([^)]*)\))?", name)
name = re.sub(r"<.*?>", "", name)
mname = re.match(r"(\w+)(?:\s((?:van |ten )?\w+))?(?:\s\(([^)]*)\))?", name)
nickname = mname.group(3) or ""
lookupAttribs={'first_name':mname.group(1), 'last_name':(mname.group(2) or "")}

View File

@ -16,7 +16,7 @@
{% if entry.is_deletion %}
{{ entry.object_repr }}
{% else %}
<a href="admin/{{ entry.get_admin_url }}">{{ entry.object_repr }}</a>
<a href="admin/{{ entry.get_admin_url }}/">{{ entry.object_repr }}</a>
{% endif %}
<br/>
{% if entry.content_type %}