2022-04-05 08:37:31 +01:00
|
|
|
import datetime
|
|
|
|
import os.path
|
|
|
|
import re
|
2022-10-09 22:28:57 +01:00
|
|
|
import operator
|
2022-04-05 08:37:31 +01:00
|
|
|
from collections import OrderedDict
|
|
|
|
|
|
|
|
import django.db.models
|
|
|
|
from django.db.models import Min, Max
|
|
|
|
from django.shortcuts import render
|
|
|
|
from django.template import Context, loader
|
|
|
|
from django.template.defaultfilters import slugify
|
|
|
|
from django.utils import timezone
|
|
|
|
#from django.views.generic.list import ListView
|
|
|
|
|
|
|
|
from troggle.core.models.troggle import Expedition, Person, PersonExpedition, DataIssue
|
|
|
|
from troggle.core.models.caves import Cave, LogbookEntry, Entrance
|
|
|
|
from troggle.core.models.survex import SurvexBlock, SurvexStation
|
2022-10-12 21:10:48 +01:00
|
|
|
from troggle.parsers.people import GetPersonExpeditionNameLookup, foreign_friends
|
2022-04-05 08:37:31 +01:00
|
|
|
|
|
|
|
import troggle.settings as settings
|
|
|
|
|
|
|
|
'''Very simple report pages summarizing data about the whole set of expeditions and of
|
|
|
|
the status of data inconsistencies
|
|
|
|
'''
|
|
|
|
|
|
|
|
def pathsreport(request):
|
|
|
|
pathsdict = OrderedDict()
|
|
|
|
try:
|
|
|
|
pathsdict = {
|
|
|
|
# "BOGUS" : str( settings.BOGUS),
|
|
|
|
"JSLIB_URL" : str( settings.JSLIB_URL),
|
|
|
|
"JSLIB_ROOT" : str( settings.JSLIB_ROOT),
|
|
|
|
# "CSSLIB_URL" : str( settings.CSSLIB_URL),
|
|
|
|
"CAVEDESCRIPTIONS" : str( settings.CAVEDESCRIPTIONS),
|
|
|
|
"DIR_ROOT" : str( settings.DIR_ROOT),
|
|
|
|
"ENTRANCEDESCRIPTIONS" : str( settings.ENTRANCEDESCRIPTIONS),
|
|
|
|
"EXPOUSER_EMAIL" : str( settings.EXPOUSER_EMAIL),
|
|
|
|
"EXPOUSERPASS" : str("<redacted>"),
|
|
|
|
"EXPOUSER" : str( settings.EXPOUSER),
|
|
|
|
"EXPOWEB" : str( settings.EXPOWEB),
|
|
|
|
"EXPOWEB_URL" : str( settings.EXPOWEB_URL),
|
2022-04-06 19:01:31 +01:00
|
|
|
# "FILES" : str( settings.FILES),
|
2022-04-05 08:37:31 +01:00
|
|
|
"LIBDIR" : str( settings.LIBDIR),
|
|
|
|
"LOGFILE" : str( settings.LOGFILE),
|
|
|
|
"LOGIN_REDIRECT_URL" : str( settings.LOGIN_REDIRECT_URL),
|
|
|
|
"MEDIA_ROOT" : str( settings.MEDIA_ROOT),
|
|
|
|
"MEDIA_URL" : str( settings.MEDIA_URL),
|
|
|
|
"PHOTOS_URL" : str( settings.PHOTOS_URL),
|
|
|
|
"PYTHON_PATH" : str( settings.PYTHON_PATH),
|
|
|
|
"REPOS_ROOT_PATH" : str( settings.REPOS_ROOT_PATH),
|
|
|
|
"ROOT_URLCONF" : str( settings.ROOT_URLCONF),
|
|
|
|
"STATIC_URL" : str( settings.STATIC_URL),
|
|
|
|
"SURVEX_DATA" : str( settings.SURVEX_DATA),
|
2022-04-18 20:48:49 +01:00
|
|
|
"SCANS_ROOT" : str( settings.SCANS_ROOT),
|
2022-04-12 19:05:28 +01:00
|
|
|
# "SURVEYS" : str( settings.SURVEYS),
|
2022-07-27 21:23:43 +01:00
|
|
|
# "SCANS_URL" : str( settings.SCANS_URL),
|
2022-04-05 08:37:31 +01:00
|
|
|
"SURVEXPORT" : str( settings.SURVEXPORT),
|
|
|
|
"DRAWINGS_DATA" : str( settings.DRAWINGS_DATA),
|
|
|
|
"URL_ROOT" : str( settings.URL_ROOT)
|
|
|
|
}
|
|
|
|
except:
|
|
|
|
pathsdict["! EXCEPTION !"] = "missing or exta string constant in troggle/settings"
|
|
|
|
|
|
|
|
pathstype = OrderedDict()
|
|
|
|
try:
|
|
|
|
pathstype = {
|
|
|
|
# "BOGUS" : type(settings.BOGUS),
|
|
|
|
"JSLIB_URL" : type(settings.JSLIB_URL),
|
|
|
|
"JSLIB_ROOT" : type( settings.JSLIB_ROOT),
|
|
|
|
# "CSSLIB_URL" : type(settings.CSSLIB_URL),
|
|
|
|
"CAVEDESCRIPTIONS" : type(settings.CAVEDESCRIPTIONS),
|
|
|
|
"DIR_ROOT" : type(settings.DIR_ROOT),
|
|
|
|
"ENTRANCEDESCRIPTIONS" : type(settings.ENTRANCEDESCRIPTIONS),
|
|
|
|
"EXPOUSER_EMAIL" : type(settings.EXPOUSER_EMAIL),
|
|
|
|
"EXPOUSERPASS" : type(settings.EXPOUSERPASS),
|
|
|
|
"EXPOUSER" : type(settings.EXPOUSER),
|
|
|
|
"EXPOWEB" : type(settings.EXPOWEB),
|
|
|
|
"EXPOWEB_URL" : type(settings.EXPOWEB_URL),
|
2022-04-06 19:01:31 +01:00
|
|
|
# "FILES" : type(settings.FILES),
|
2022-04-05 08:37:31 +01:00
|
|
|
"LIBDIR" : type( settings.LIBDIR),
|
|
|
|
"LOGFILE" : type(settings.LOGFILE),
|
|
|
|
"LOGIN_REDIRECT_URL" : type(settings.LOGIN_REDIRECT_URL),
|
|
|
|
"MEDIA_ROOT" : type(settings.MEDIA_ROOT),
|
|
|
|
"MEDIA_URL" : type(settings.MEDIA_URL),
|
|
|
|
"PHOTOS_URL" : type(settings.PHOTOS_URL),
|
|
|
|
"PYTHON_PATH" : type(settings.PYTHON_PATH),
|
|
|
|
"REPOS_ROOT_PATH" : type(settings.REPOS_ROOT_PATH),
|
|
|
|
"ROOT_URLCONF" : type(settings.ROOT_URLCONF),
|
|
|
|
"STATIC_URL" : type(settings.STATIC_URL),
|
|
|
|
"SURVEX_DATA" : type(settings.SURVEX_DATA),
|
2022-04-18 20:48:49 +01:00
|
|
|
"SCANS_ROOT" : type(settings.SCANS_ROOT),
|
2022-04-12 19:05:28 +01:00
|
|
|
# "SURVEYS" : type(settings.SURVEYS),
|
2022-07-27 21:23:43 +01:00
|
|
|
# "SCANS_URL" : type(settings.SCANS_URL),
|
2022-04-05 08:37:31 +01:00
|
|
|
"SURVEXPORT" : type(settings.SURVEXPORT),
|
|
|
|
"DRAWINGS_DATA" : type(settings.DRAWINGS_DATA),
|
|
|
|
"URL_ROOT" : type(settings.URL_ROOT)
|
|
|
|
}
|
|
|
|
except:
|
|
|
|
pathstype["! EXCEPTION !"] = "missing or exta string constant in troggle/settings"
|
|
|
|
raise
|
|
|
|
|
|
|
|
# settings are unique by paths are not
|
|
|
|
ncodes = len(pathsdict)
|
|
|
|
bycodeslist = sorted(pathsdict.items()) # a list of tuples
|
|
|
|
bycodeslist2 = []
|
|
|
|
|
|
|
|
for k, p in bycodeslist:
|
|
|
|
bycodeslist2.append((k, p, str(pathstype[k])))
|
|
|
|
|
|
|
|
bypaths = sorted(pathsdict.values()) # a list
|
|
|
|
bypathslist = []
|
|
|
|
|
|
|
|
for p in bypaths:
|
|
|
|
for k in pathsdict.keys():
|
|
|
|
if pathsdict[k] == p:
|
|
|
|
bypathslist.append((p, k, str(pathstype[k])))
|
|
|
|
del pathsdict[k]
|
|
|
|
break
|
|
|
|
|
|
|
|
return render(request, 'pathsreport.html', {
|
|
|
|
"pathsdict":pathsdict,
|
|
|
|
"bycodeslist":bycodeslist2,
|
|
|
|
"bypathslist":bypathslist,
|
|
|
|
"ncodes":ncodes})
|
|
|
|
|
|
|
|
def stats(request):
|
|
|
|
statsDict={}
|
|
|
|
statsDict['expoCount'] = "{:,}".format(Expedition.objects.count())
|
|
|
|
statsDict['caveCount'] = "{:,}".format(Cave.objects.count())
|
|
|
|
statsDict['personCount'] = "{:,}".format(Person.objects.count())
|
|
|
|
statsDict['logbookEntryCount'] = "{:,}".format(LogbookEntry.objects.count())
|
|
|
|
|
|
|
|
legsbyexpo = [ ]
|
|
|
|
addupsurvexlength = 0
|
2022-04-18 21:33:04 +01:00
|
|
|
addupsurvexlegs = 0
|
2022-04-05 08:37:31 +01:00
|
|
|
for expedition in Expedition.objects.all():
|
|
|
|
survexblocks = expedition.survexblock_set.all()
|
|
|
|
legsyear=0
|
|
|
|
survexleglength = 0.0
|
|
|
|
for survexblock in survexblocks:
|
|
|
|
survexleglength += survexblock.legslength
|
2022-04-18 21:33:04 +01:00
|
|
|
legsyear += int(survexblock.legsall)
|
2022-04-05 08:37:31 +01:00
|
|
|
addupsurvexlength += survexleglength
|
2022-04-18 21:33:04 +01:00
|
|
|
addupsurvexlegs += legsyear
|
2022-04-05 08:37:31 +01:00
|
|
|
legsbyexpo.append((expedition, {"nsurvexlegs": "{:,}".format(legsyear),
|
|
|
|
"survexleglength":"{:,.0f}".format(survexleglength)}))
|
|
|
|
legsbyexpo.reverse()
|
|
|
|
|
2022-04-18 21:33:04 +01:00
|
|
|
renderDict = {**statsDict, **{ "addupsurvexlength":addupsurvexlength/1000, "legsbyexpo":legsbyexpo, "nsurvexlegs":addupsurvexlegs }} # new syntax
|
2022-04-05 08:37:31 +01:00
|
|
|
return render(request,'statistics.html', renderDict)
|
|
|
|
|
|
|
|
def dataissues(request):
|
|
|
|
'''Each issue has a parser, a message and a url linking to the offending object after loading
|
|
|
|
'''
|
|
|
|
def myFunc(di):
|
|
|
|
return di.parser.lower() + di.message.lower()
|
|
|
|
|
|
|
|
dilist = list(DataIssue.objects.all())
|
|
|
|
dilist.sort(key = myFunc)
|
|
|
|
|
|
|
|
return render(request,'dataissues.html', {'didict': dilist})
|
|
|
|
|
|
|
|
def eastings(request):
|
|
|
|
'''report each Northing/Easting pair wherever recorded
|
|
|
|
'''
|
|
|
|
ents = []
|
|
|
|
entrances = Entrance.objects.all()
|
|
|
|
for e in entrances:
|
|
|
|
if e.easting or e.northing:
|
|
|
|
ents.append(e)
|
|
|
|
|
|
|
|
stations = SurvexStation.objects.all()
|
|
|
|
|
|
|
|
return render(request,'eastings.html', {'ents': ents, 'stations': stations})
|
2022-10-08 22:17:53 +01:00
|
|
|
|
|
|
|
def aliases(request, year):
|
|
|
|
'''Page which displays a list of all the person aliases in a specific year
|
|
|
|
'''
|
2022-10-12 21:10:48 +01:00
|
|
|
|
2022-10-08 22:17:53 +01:00
|
|
|
if not year:
|
|
|
|
year = 1998
|
|
|
|
expo = Expedition.objects.filter(year=year)[0] # returns a set, even though we know there is only one
|
|
|
|
personexpeditions = PersonExpedition.objects.filter(expedition=expo)
|
2022-10-09 22:28:57 +01:00
|
|
|
persons = list(Person.objects.all().order_by('last_name'))
|
|
|
|
|
2022-10-12 21:10:48 +01:00
|
|
|
|
2022-10-09 21:50:32 +01:00
|
|
|
aliases = GetPersonExpeditionNameLookup(expo)
|
|
|
|
|
|
|
|
aliasdict={}
|
|
|
|
for i in sorted(aliases):
|
|
|
|
aliasdict[i]=aliases[i]
|
2022-10-09 00:29:53 +01:00
|
|
|
invert ={}
|
|
|
|
|
2022-10-08 22:17:53 +01:00
|
|
|
|
2022-10-12 21:10:48 +01:00
|
|
|
return render(request,'aliases.html', {'year': year, 'aliasdict': aliasdict,
|
|
|
|
'foreign_friends': foreign_friends, 'invert': invert,'personexpeditions': personexpeditions, 'persons': persons})
|