mirror of
https://expo.survex.com/repositories/troggle/.git
synced 2025-12-17 16:27:07 +00:00
ran 'black' to reformat all the core files
This commit is contained in:
@@ -16,96 +16,97 @@ import troggle.settings as settings
|
||||
from troggle.core.models.caves import Cave, Entrance
|
||||
from troggle.core.models.logbooks import LogbookEntry
|
||||
from troggle.core.models.survex import SurvexBlock, SurvexStation
|
||||
from troggle.core.models.troggle import (DataIssue, Expedition, Person,
|
||||
PersonExpedition)
|
||||
from troggle.parsers.people import (GetPersonExpeditionNameLookup,
|
||||
foreign_friends)
|
||||
from troggle.core.models.troggle import DataIssue, Expedition, Person, PersonExpedition
|
||||
from troggle.parsers.people import GetPersonExpeditionNameLookup, foreign_friends
|
||||
|
||||
#from django.views.generic.list import ListView
|
||||
'''Very simple report pages summarizing data about the whole set of expeditions and of
|
||||
# from django.views.generic.list import ListView
|
||||
"""Very simple report pages summarizing data about the whole set of expeditions and of
|
||||
the status of data inconsistencies
|
||||
'''
|
||||
"""
|
||||
|
||||
|
||||
def therionissues(request):
|
||||
"""Page displaying contents of a file produced during data import"""
|
||||
logname = "therionrefs.log"
|
||||
logpath = (Path(settings.PYTHON_PATH, logname))
|
||||
logpath = Path(settings.PYTHON_PATH, logname)
|
||||
therionlog = []
|
||||
newlog = []
|
||||
|
||||
|
||||
if Path(logpath).is_file:
|
||||
with open(logpath, "r") as f:
|
||||
therionlog = f.readlines()
|
||||
print(f"{logpath} has {len(therionlog)} entries")
|
||||
therionlog = f.readlines()
|
||||
print(f"{logpath} has {len(therionlog)} entries")
|
||||
else:
|
||||
print(f"{logpath} NOT FOUND {len(therionlog)}")
|
||||
|
||||
|
||||
for line in therionlog:
|
||||
line = line.replace("! Un-parsed image filename:", "")
|
||||
newlog.append(line)
|
||||
return render(request, 'therionreport.html', {"therionlog":newlog})
|
||||
return render(request, "therionreport.html", {"therionlog": newlog})
|
||||
|
||||
|
||||
def surveximport(request):
|
||||
"""Page displaying contents of a file produced during data import"""
|
||||
logname = "svxlinear.log"
|
||||
logpath = (Path(settings.PYTHON_PATH, logname))
|
||||
|
||||
logpath = Path(settings.PYTHON_PATH, logname)
|
||||
|
||||
if Path(logpath).is_file:
|
||||
with open(logpath, "r") as f:
|
||||
contents = f.read()
|
||||
contents = f.read()
|
||||
else:
|
||||
print(f"{logpath} NOT FOUND {len(contents)}")
|
||||
return render(request, 'survexreport.html', {"log":contents})
|
||||
return render(request, "survexreport.html", {"log": contents})
|
||||
|
||||
|
||||
def survexdebug(request):
|
||||
"""Page displaying contents of a file produced during data import"""
|
||||
logname = "svxblks.log"
|
||||
logpath = (Path(settings.PYTHON_PATH, logname))
|
||||
|
||||
logpath = Path(settings.PYTHON_PATH, logname)
|
||||
|
||||
if Path(logpath).is_file:
|
||||
with open(logpath, "r") as f:
|
||||
contents = f.read()
|
||||
contents = f.read()
|
||||
else:
|
||||
print(f"{logpath} NOT FOUND {len(contents)}")
|
||||
return render(request, 'survexdebug.html', {"log":contents})
|
||||
return render(request, "survexdebug.html", {"log": contents})
|
||||
|
||||
|
||||
def pathsreport(request):
|
||||
"""The CONSTANTs declared in the settings and localsettings and how they have
|
||||
been evaluated for this specific installation - live """
|
||||
been evaluated for this specific installation - live"""
|
||||
pathsdict = OrderedDict()
|
||||
try:
|
||||
pathsdict = {
|
||||
# "BOGUS" : str( settings.BOGUS),
|
||||
"JSLIB_URL" : str( settings.JSLIB_URL),
|
||||
"JSLIB_ROOT" : str( settings.JSLIB_ROOT),
|
||||
# "CSSLIB_URL" : str( settings.CSSLIB_URL),
|
||||
"CAVEDESCRIPTIONS" : str( settings.CAVEDESCRIPTIONS),
|
||||
"DIR_ROOT" : str( settings.DIR_ROOT),
|
||||
"ENTRANCEDESCRIPTIONS" : str( settings.ENTRANCEDESCRIPTIONS),
|
||||
"EXPOUSER_EMAIL" : str( settings.EXPOUSER_EMAIL),
|
||||
"EXPOUSERPASS" : str("<redacted>"),
|
||||
"EXPOUSER" : str( settings.EXPOUSER),
|
||||
"EXPOWEB" : str( settings.EXPOWEB),
|
||||
"EXPOWEB_URL" : str( settings.EXPOWEB_URL),
|
||||
# "FILES" : str( settings.FILES),
|
||||
"LIBDIR" : str( settings.LIBDIR),
|
||||
"LOGFILE" : str( settings.LOGFILE),
|
||||
"LOGIN_REDIRECT_URL" : str( settings.LOGIN_REDIRECT_URL),
|
||||
"MEDIA_ROOT" : str( settings.MEDIA_ROOT),
|
||||
"MEDIA_URL" : str( settings.MEDIA_URL),
|
||||
"PHOTOS_URL" : str( settings.PHOTOS_URL),
|
||||
"PYTHON_PATH" : str( settings.PYTHON_PATH),
|
||||
"REPOS_ROOT_PATH" : str( settings.REPOS_ROOT_PATH),
|
||||
"ROOT_URLCONF" : str( settings.ROOT_URLCONF),
|
||||
"STATIC_URL" : str( settings.STATIC_URL),
|
||||
"SURVEX_DATA" : str( settings.SURVEX_DATA),
|
||||
"SCANS_ROOT" : str( settings.SCANS_ROOT),
|
||||
# "SURVEYS" : str( settings.SURVEYS),
|
||||
# "SCANS_URL" : str( settings.SCANS_URL),
|
||||
"SURVEXPORT" : str( settings.SURVEXPORT),
|
||||
"DRAWINGS_DATA" : str( settings.DRAWINGS_DATA),
|
||||
"URL_ROOT" : str( settings.URL_ROOT)
|
||||
# "BOGUS" : str( settings.BOGUS),
|
||||
"JSLIB_URL": str(settings.JSLIB_URL),
|
||||
"JSLIB_ROOT": str(settings.JSLIB_ROOT),
|
||||
# "CSSLIB_URL" : str( settings.CSSLIB_URL),
|
||||
"CAVEDESCRIPTIONS": str(settings.CAVEDESCRIPTIONS),
|
||||
"DIR_ROOT": str(settings.DIR_ROOT),
|
||||
"ENTRANCEDESCRIPTIONS": str(settings.ENTRANCEDESCRIPTIONS),
|
||||
"EXPOUSER_EMAIL": str(settings.EXPOUSER_EMAIL),
|
||||
"EXPOUSERPASS": str("<redacted>"),
|
||||
"EXPOUSER": str(settings.EXPOUSER),
|
||||
"EXPOWEB": str(settings.EXPOWEB),
|
||||
"EXPOWEB_URL": str(settings.EXPOWEB_URL),
|
||||
# "FILES" : str( settings.FILES),
|
||||
"LIBDIR": str(settings.LIBDIR),
|
||||
"LOGFILE": str(settings.LOGFILE),
|
||||
"LOGIN_REDIRECT_URL": str(settings.LOGIN_REDIRECT_URL),
|
||||
"MEDIA_ROOT": str(settings.MEDIA_ROOT),
|
||||
"MEDIA_URL": str(settings.MEDIA_URL),
|
||||
"PHOTOS_URL": str(settings.PHOTOS_URL),
|
||||
"PYTHON_PATH": str(settings.PYTHON_PATH),
|
||||
"REPOS_ROOT_PATH": str(settings.REPOS_ROOT_PATH),
|
||||
"ROOT_URLCONF": str(settings.ROOT_URLCONF),
|
||||
"STATIC_URL": str(settings.STATIC_URL),
|
||||
"SURVEX_DATA": str(settings.SURVEX_DATA),
|
||||
"SCANS_ROOT": str(settings.SCANS_ROOT),
|
||||
# "SURVEYS" : str( settings.SURVEYS),
|
||||
# "SCANS_URL" : str( settings.SCANS_URL),
|
||||
"SURVEXPORT": str(settings.SURVEXPORT),
|
||||
"DRAWINGS_DATA": str(settings.DRAWINGS_DATA),
|
||||
"URL_ROOT": str(settings.URL_ROOT),
|
||||
}
|
||||
except:
|
||||
pathsdict["! EXCEPTION !"] = "missing or exta string constant in troggle/settings"
|
||||
@@ -113,36 +114,36 @@ def pathsreport(request):
|
||||
pathstype = OrderedDict()
|
||||
try:
|
||||
pathstype = {
|
||||
# "BOGUS" : type(settings.BOGUS),
|
||||
"JSLIB_URL" : type(settings.JSLIB_URL),
|
||||
"JSLIB_ROOT" : type( settings.JSLIB_ROOT),
|
||||
# "CSSLIB_URL" : type(settings.CSSLIB_URL),
|
||||
"CAVEDESCRIPTIONS" : type(settings.CAVEDESCRIPTIONS),
|
||||
"DIR_ROOT" : type(settings.DIR_ROOT),
|
||||
"ENTRANCEDESCRIPTIONS" : type(settings.ENTRANCEDESCRIPTIONS),
|
||||
"EXPOUSER_EMAIL" : type(settings.EXPOUSER_EMAIL),
|
||||
"EXPOUSERPASS" : type(settings.EXPOUSERPASS),
|
||||
"EXPOUSER" : type(settings.EXPOUSER),
|
||||
"EXPOWEB" : type(settings.EXPOWEB),
|
||||
"EXPOWEB_URL" : type(settings.EXPOWEB_URL),
|
||||
# "FILES" : type(settings.FILES),
|
||||
"LIBDIR" : type( settings.LIBDIR),
|
||||
"LOGFILE" : type(settings.LOGFILE),
|
||||
"LOGIN_REDIRECT_URL" : type(settings.LOGIN_REDIRECT_URL),
|
||||
"MEDIA_ROOT" : type(settings.MEDIA_ROOT),
|
||||
"MEDIA_URL" : type(settings.MEDIA_URL),
|
||||
"PHOTOS_URL" : type(settings.PHOTOS_URL),
|
||||
"PYTHON_PATH" : type(settings.PYTHON_PATH),
|
||||
"REPOS_ROOT_PATH" : type(settings.REPOS_ROOT_PATH),
|
||||
"ROOT_URLCONF" : type(settings.ROOT_URLCONF),
|
||||
"STATIC_URL" : type(settings.STATIC_URL),
|
||||
"SURVEX_DATA" : type(settings.SURVEX_DATA),
|
||||
"SCANS_ROOT" : type(settings.SCANS_ROOT),
|
||||
# "SURVEYS" : type(settings.SURVEYS),
|
||||
# "SCANS_URL" : type(settings.SCANS_URL),
|
||||
"SURVEXPORT" : type(settings.SURVEXPORT),
|
||||
"DRAWINGS_DATA" : type(settings.DRAWINGS_DATA),
|
||||
"URL_ROOT" : type(settings.URL_ROOT)
|
||||
# "BOGUS" : type(settings.BOGUS),
|
||||
"JSLIB_URL": type(settings.JSLIB_URL),
|
||||
"JSLIB_ROOT": type(settings.JSLIB_ROOT),
|
||||
# "CSSLIB_URL" : type(settings.CSSLIB_URL),
|
||||
"CAVEDESCRIPTIONS": type(settings.CAVEDESCRIPTIONS),
|
||||
"DIR_ROOT": type(settings.DIR_ROOT),
|
||||
"ENTRANCEDESCRIPTIONS": type(settings.ENTRANCEDESCRIPTIONS),
|
||||
"EXPOUSER_EMAIL": type(settings.EXPOUSER_EMAIL),
|
||||
"EXPOUSERPASS": type(settings.EXPOUSERPASS),
|
||||
"EXPOUSER": type(settings.EXPOUSER),
|
||||
"EXPOWEB": type(settings.EXPOWEB),
|
||||
"EXPOWEB_URL": type(settings.EXPOWEB_URL),
|
||||
# "FILES" : type(settings.FILES),
|
||||
"LIBDIR": type(settings.LIBDIR),
|
||||
"LOGFILE": type(settings.LOGFILE),
|
||||
"LOGIN_REDIRECT_URL": type(settings.LOGIN_REDIRECT_URL),
|
||||
"MEDIA_ROOT": type(settings.MEDIA_ROOT),
|
||||
"MEDIA_URL": type(settings.MEDIA_URL),
|
||||
"PHOTOS_URL": type(settings.PHOTOS_URL),
|
||||
"PYTHON_PATH": type(settings.PYTHON_PATH),
|
||||
"REPOS_ROOT_PATH": type(settings.REPOS_ROOT_PATH),
|
||||
"ROOT_URLCONF": type(settings.ROOT_URLCONF),
|
||||
"STATIC_URL": type(settings.STATIC_URL),
|
||||
"SURVEX_DATA": type(settings.SURVEX_DATA),
|
||||
"SCANS_ROOT": type(settings.SCANS_ROOT),
|
||||
# "SURVEYS" : type(settings.SURVEYS),
|
||||
# "SCANS_URL" : type(settings.SCANS_URL),
|
||||
"SURVEXPORT": type(settings.SURVEXPORT),
|
||||
"DRAWINGS_DATA": type(settings.DRAWINGS_DATA),
|
||||
"URL_ROOT": type(settings.URL_ROOT),
|
||||
}
|
||||
except:
|
||||
pathstype["! EXCEPTION !"] = "missing or exta string constant in troggle/settings"
|
||||
@@ -150,15 +151,15 @@ def pathsreport(request):
|
||||
|
||||
# settings are unique by paths are not
|
||||
ncodes = len(pathsdict)
|
||||
bycodeslist = sorted(pathsdict.items()) # a list of tuples
|
||||
bycodeslist = sorted(pathsdict.items()) # a list of tuples
|
||||
bycodeslist2 = []
|
||||
|
||||
for k, p in bycodeslist:
|
||||
bycodeslist2.append((k, p, str(pathstype[k])))
|
||||
|
||||
bypaths = sorted(pathsdict.values()) # a list
|
||||
|
||||
bypaths = sorted(pathsdict.values()) # a list
|
||||
bypathslist = []
|
||||
|
||||
|
||||
for p in bypaths:
|
||||
for k in pathsdict.keys():
|
||||
if pathsdict[k] == p:
|
||||
@@ -166,80 +167,92 @@ def pathsreport(request):
|
||||
del pathsdict[k]
|
||||
break
|
||||
|
||||
return render(request, 'pathsreport.html', {
|
||||
"pathsdict":pathsdict,
|
||||
"bycodeslist":bycodeslist2,
|
||||
"bypathslist":bypathslist,
|
||||
"ncodes":ncodes})
|
||||
return render(
|
||||
request,
|
||||
"pathsreport.html",
|
||||
{"pathsdict": pathsdict, "bycodeslist": bycodeslist2, "bypathslist": bypathslist, "ncodes": ncodes},
|
||||
)
|
||||
|
||||
|
||||
def stats(request):
|
||||
statsDict={}
|
||||
statsDict['expoCount'] = f"{Expedition.objects.count():,}"
|
||||
statsDict['caveCount'] = f"{Cave.objects.count():,}"
|
||||
statsDict['personCount'] = f"{Person.objects.count():,}"
|
||||
statsDict['logbookEntryCount'] = f"{LogbookEntry.objects.count():,}"
|
||||
statsDict = {}
|
||||
statsDict["expoCount"] = f"{Expedition.objects.count():,}"
|
||||
statsDict["caveCount"] = f"{Cave.objects.count():,}"
|
||||
statsDict["personCount"] = f"{Person.objects.count():,}"
|
||||
statsDict["logbookEntryCount"] = f"{LogbookEntry.objects.count():,}"
|
||||
|
||||
legsbyexpo = [ ]
|
||||
legsbyexpo = []
|
||||
addupsurvexlength = 0
|
||||
addupsurvexlegs = 0
|
||||
for expedition in Expedition.objects.all():
|
||||
survexblocks = expedition.survexblock_set.all()
|
||||
legsyear=0
|
||||
legsyear = 0
|
||||
survexleglength = 0.0
|
||||
for survexblock in survexblocks:
|
||||
survexleglength += survexblock.legslength
|
||||
legsyear += int(survexblock.legsall)
|
||||
addupsurvexlength += survexleglength
|
||||
addupsurvexlegs += legsyear
|
||||
legsbyexpo.append((expedition, {"nsurvexlegs": f"{legsyear:,}",
|
||||
"survexleglength":f"{survexleglength:,.0f}"}))
|
||||
legsbyexpo.reverse()
|
||||
legsbyexpo.append((expedition, {"nsurvexlegs": f"{legsyear:,}", "survexleglength": f"{survexleglength:,.0f}"}))
|
||||
legsbyexpo.reverse()
|
||||
|
||||
renderDict = {
|
||||
**statsDict,
|
||||
**{"addupsurvexlength": addupsurvexlength / 1000, "legsbyexpo": legsbyexpo, "nsurvexlegs": addupsurvexlegs},
|
||||
} # new syntax
|
||||
return render(request, "statistics.html", renderDict)
|
||||
|
||||
renderDict = {**statsDict, **{ "addupsurvexlength":addupsurvexlength/1000, "legsbyexpo":legsbyexpo, "nsurvexlegs":addupsurvexlegs }} # new syntax
|
||||
return render(request,'statistics.html', renderDict)
|
||||
|
||||
def dataissues(request):
|
||||
'''Each issue has a parser, a message and a url linking to the offending object after loading
|
||||
'''
|
||||
"""Each issue has a parser, a message and a url linking to the offending object after loading"""
|
||||
|
||||
def myFunc(di):
|
||||
return di.parser.lower() + di.message.lower()
|
||||
|
||||
|
||||
dilist = list(DataIssue.objects.all())
|
||||
dilist.sort(key = myFunc)
|
||||
|
||||
return render(request,'dataissues.html', {'didict': dilist})
|
||||
dilist.sort(key=myFunc)
|
||||
|
||||
return render(request, "dataissues.html", {"didict": dilist})
|
||||
|
||||
|
||||
def eastings(request):
|
||||
'''report each Northing/Easting pair wherever recorded
|
||||
'''
|
||||
"""report each Northing/Easting pair wherever recorded"""
|
||||
ents = []
|
||||
entrances = Entrance.objects.all()
|
||||
for e in entrances:
|
||||
if e.easting or e.northing:
|
||||
ents.append(e)
|
||||
|
||||
|
||||
stations = SurvexStation.objects.all()
|
||||
|
||||
return render(request,'eastings.html', {'ents': ents, 'stations': stations})
|
||||
|
||||
|
||||
return render(request, "eastings.html", {"ents": ents, "stations": stations})
|
||||
|
||||
|
||||
def aliases(request, year):
|
||||
'''Page which displays a list of all the person aliases in a specific year
|
||||
'''
|
||||
|
||||
"""Page which displays a list of all the person aliases in a specific year"""
|
||||
|
||||
if not year:
|
||||
year = 1998
|
||||
expo = Expedition.objects.filter(year=year)[0] # returns a set, even though we know there is only one
|
||||
expo = Expedition.objects.filter(year=year)[0] # returns a set, even though we know there is only one
|
||||
personexpeditions = PersonExpedition.objects.filter(expedition=expo)
|
||||
persons = list(Person.objects.all().order_by('last_name'))
|
||||
|
||||
|
||||
aliases = GetPersonExpeditionNameLookup(expo)
|
||||
|
||||
aliasdict={}
|
||||
for i in sorted(aliases):
|
||||
aliasdict[i]=aliases[i]
|
||||
invert ={}
|
||||
persons = list(Person.objects.all().order_by("last_name"))
|
||||
|
||||
|
||||
return render(request,'aliases.html', {'year': year, 'aliasdict': aliasdict,
|
||||
'foreign_friends': foreign_friends, 'invert': invert,'personexpeditions': personexpeditions, 'persons': persons})
|
||||
aliases = GetPersonExpeditionNameLookup(expo)
|
||||
|
||||
aliasdict = {}
|
||||
for i in sorted(aliases):
|
||||
aliasdict[i] = aliases[i]
|
||||
invert = {}
|
||||
|
||||
return render(
|
||||
request,
|
||||
"aliases.html",
|
||||
{
|
||||
"year": year,
|
||||
"aliasdict": aliasdict,
|
||||
"foreign_friends": foreign_friends,
|
||||
"invert": invert,
|
||||
"personexpeditions": personexpeditions,
|
||||
"persons": persons,
|
||||
},
|
||||
)
|
||||
|
||||
Reference in New Issue
Block a user