2
0
mirror of https://expo.survex.com/repositories/troggle/.git synced 2024-11-25 16:51:54 +00:00
troggle/core/views/statistics.py

253 lines
9.3 KiB
Python
Raw Normal View History

from collections import OrderedDict
2023-01-26 23:36:56 +00:00
from pathlib import Path
from django.shortcuts import render
2023-01-19 18:35:56 +00:00
import troggle.settings as settings
2023-01-29 16:47:46 +00:00
from troggle.core.models.caves import Cave, Entrance
from troggle.core.models.logbooks import LogbookEntry
2023-01-30 23:04:11 +00:00
from troggle.core.models.survex import SurvexStation
from troggle.core.models.troggle import DataIssue, Expedition, Person, PersonExpedition
from troggle.parsers.people import GetPersonExpeditionNameLookup, foreign_friends
2023-01-19 18:35:56 +00:00
# from django.views.generic.list import ListView
"""Very simple report pages summarizing data about the whole set of expeditions and of
2023-01-29 12:11:47 +00:00
the status of data inconsistencies
"""
2023-01-19 18:35:56 +00:00
2023-01-26 22:36:49 +00:00
def therionissues(request):
2023-01-29 12:11:47 +00:00
"""Page displaying contents of a file produced during data import"""
2023-01-26 23:36:56 +00:00
logname = "therionrefs.log"
logpath = Path(settings.PYTHON_PATH, logname)
2023-01-26 23:36:56 +00:00
therionlog = []
newlog = []
2023-01-26 23:36:56 +00:00
if Path(logpath).is_file:
with open(logpath, "r") as f:
therionlog = f.readlines()
print(f"{logpath} has {len(therionlog)} entries")
2023-01-26 23:36:56 +00:00
else:
print(f"{logpath} NOT FOUND {len(therionlog)}")
2023-01-26 23:36:56 +00:00
for line in therionlog:
line = line.replace("! Un-parsed image filename:", "")
newlog.append(line)
return render(request, "therionreport.html", {"therionlog": newlog})
2023-01-29 12:11:47 +00:00
def surveximport(request):
"""Page displaying contents of a file produced during data import"""
logname = "svxlinear.log"
logpath = Path(settings.PYTHON_PATH, logname)
2023-01-29 12:11:47 +00:00
if Path(logpath).is_file:
with open(logpath, "r") as f:
contents = f.read()
2023-01-29 12:11:47 +00:00
else:
print(f"{logpath} NOT FOUND {len(contents)}")
return render(request, "survexreport.html", {"log": contents})
2023-01-29 12:41:50 +00:00
def survexdebug(request):
"""Page displaying contents of a file produced during data import"""
logname = "svxblks.log"
logpath = Path(settings.PYTHON_PATH, logname)
2023-01-29 12:41:50 +00:00
if Path(logpath).is_file:
with open(logpath, "r") as f:
contents = f.read()
2023-01-29 12:41:50 +00:00
else:
print(f"{logpath} NOT FOUND {len(contents)}")
return render(request, "survexdebug.html", {"log": contents})
2023-01-29 12:11:47 +00:00
def pathsreport(request):
2023-01-29 12:11:47 +00:00
"""The CONSTANTs declared in the settings and localsettings and how they have
been evaluated for this specific installation - live"""
pathsdict = OrderedDict()
try:
pathsdict = {
# "BOGUS" : str( settings.BOGUS),
"JSLIB_URL": str(settings.JSLIB_URL),
"JSLIB_ROOT": str(settings.JSLIB_ROOT),
# "CSSLIB_URL" : str( settings.CSSLIB_URL),
"CAVEDESCRIPTIONS": str(settings.CAVEDESCRIPTIONS),
"DIR_ROOT": str(settings.DIR_ROOT),
"ENTRANCEDESCRIPTIONS": str(settings.ENTRANCEDESCRIPTIONS),
"EXPOUSER_EMAIL": str(settings.EXPOUSER_EMAIL),
"EXPOUSERPASS": str("<redacted>"),
"EXPOUSER": str(settings.EXPOUSER),
"EXPOWEB": str(settings.EXPOWEB),
"EXPOWEB_URL": str(settings.EXPOWEB_URL),
# "FILES" : str( settings.FILES),
"LIBDIR": str(settings.LIBDIR),
"LOGFILE": str(settings.LOGFILE),
"LOGIN_REDIRECT_URL": str(settings.LOGIN_REDIRECT_URL),
"MEDIA_ROOT": str(settings.MEDIA_ROOT),
"MEDIA_URL": str(settings.MEDIA_URL),
"PHOTOS_URL": str(settings.PHOTOS_URL),
"PYTHON_PATH": str(settings.PYTHON_PATH),
"REPOS_ROOT_PATH": str(settings.REPOS_ROOT_PATH),
"ROOT_URLCONF": str(settings.ROOT_URLCONF),
"STATIC_URL": str(settings.STATIC_URL),
"SURVEX_DATA": str(settings.SURVEX_DATA),
"SCANS_ROOT": str(settings.SCANS_ROOT),
# "SURVEYS" : str( settings.SURVEYS),
# "SCANS_URL" : str( settings.SCANS_URL),
"SURVEXPORT": str(settings.SURVEXPORT),
"DRAWINGS_DATA": str(settings.DRAWINGS_DATA),
"URL_ROOT": str(settings.URL_ROOT),
}
except:
pathsdict["! EXCEPTION !"] = "missing or exta string constant in troggle/settings"
pathstype = OrderedDict()
try:
pathstype = {
# "BOGUS" : type(settings.BOGUS),
"JSLIB_URL": type(settings.JSLIB_URL),
"JSLIB_ROOT": type(settings.JSLIB_ROOT),
# "CSSLIB_URL" : type(settings.CSSLIB_URL),
"CAVEDESCRIPTIONS": type(settings.CAVEDESCRIPTIONS),
"DIR_ROOT": type(settings.DIR_ROOT),
"ENTRANCEDESCRIPTIONS": type(settings.ENTRANCEDESCRIPTIONS),
"EXPOUSER_EMAIL": type(settings.EXPOUSER_EMAIL),
"EXPOUSERPASS": type(settings.EXPOUSERPASS),
"EXPOUSER": type(settings.EXPOUSER),
"EXPOWEB": type(settings.EXPOWEB),
"EXPOWEB_URL": type(settings.EXPOWEB_URL),
# "FILES" : type(settings.FILES),
"LIBDIR": type(settings.LIBDIR),
"LOGFILE": type(settings.LOGFILE),
"LOGIN_REDIRECT_URL": type(settings.LOGIN_REDIRECT_URL),
"MEDIA_ROOT": type(settings.MEDIA_ROOT),
"MEDIA_URL": type(settings.MEDIA_URL),
"PHOTOS_URL": type(settings.PHOTOS_URL),
"PYTHON_PATH": type(settings.PYTHON_PATH),
"REPOS_ROOT_PATH": type(settings.REPOS_ROOT_PATH),
"ROOT_URLCONF": type(settings.ROOT_URLCONF),
"STATIC_URL": type(settings.STATIC_URL),
"SURVEX_DATA": type(settings.SURVEX_DATA),
"SCANS_ROOT": type(settings.SCANS_ROOT),
# "SURVEYS" : type(settings.SURVEYS),
# "SCANS_URL" : type(settings.SCANS_URL),
"SURVEXPORT": type(settings.SURVEXPORT),
"DRAWINGS_DATA": type(settings.DRAWINGS_DATA),
"URL_ROOT": type(settings.URL_ROOT),
}
except:
pathstype["! EXCEPTION !"] = "missing or exta string constant in troggle/settings"
raise
# settings are unique by paths are not
ncodes = len(pathsdict)
bycodeslist = sorted(pathsdict.items()) # a list of tuples
bycodeslist2 = []
for k, p in bycodeslist:
bycodeslist2.append((k, p, str(pathstype[k])))
bypaths = sorted(pathsdict.values()) # a list
bypathslist = []
for p in bypaths:
for k in pathsdict.keys():
if pathsdict[k] == p:
bypathslist.append((p, k, str(pathstype[k])))
del pathsdict[k]
break
return render(
request,
"pathsreport.html",
{"pathsdict": pathsdict, "bycodeslist": bycodeslist2, "bypathslist": bypathslist, "ncodes": ncodes},
)
def stats(request):
2023-03-12 01:09:17 +00:00
"""Calculates number of survey blocks, the number of survey legs and the survey length for each year.
This is only underground survey legs, but includes ARGE as well as Expo survex files.
"""
statsDict = {}
statsDict["expoCount"] = f"{Expedition.objects.count():,}"
statsDict["caveCount"] = f"{Cave.objects.count():,}"
statsDict["personCount"] = f"{Person.objects.count():,}"
statsDict["logbookEntryCount"] = f"{LogbookEntry.objects.count():,}"
legsbyexpo = []
2023-03-12 01:09:17 +00:00
addupsurvexlength = 0.0
2022-04-18 21:33:04 +01:00
addupsurvexlegs = 0
for expedition in Expedition.objects.all():
survexblocks = expedition.survexblock_set.all()
legsyear = 0
survexleglength = 0.0
for survexblock in survexblocks:
survexleglength += survexblock.legslength
2022-04-18 21:33:04 +01:00
legsyear += int(survexblock.legsall)
addupsurvexlength += survexleglength
2022-04-18 21:33:04 +01:00
addupsurvexlegs += legsyear
2023-03-12 01:09:17 +00:00
legsbyexpo.append((expedition, {"nsurvexlegs": legsyear, "survexleglength": survexleglength}))
legsbyexpo.reverse()
renderDict = {
**statsDict,
**{"addupsurvexlength": addupsurvexlength / 1000, "legsbyexpo": legsbyexpo, "nsurvexlegs": addupsurvexlegs},
} # new syntax
return render(request, "statistics.html", renderDict)
def dataissues(request):
"""Each issue has a parser, a message and a url linking to the offending object after loading"""
def myFunc(di):
return di.parser.lower() + di.message.lower()
dilist = list(DataIssue.objects.all())
dilist.sort(key=myFunc)
return render(request, "dataissues.html", {"didict": dilist})
def eastings(request):
"""report each Northing/Easting pair wherever recorded"""
ents = []
entrances = Entrance.objects.all()
for e in entrances:
if e.easting or e.northing:
ents.append(e)
stations = SurvexStation.objects.all()
return render(request, "eastings.html", {"ents": ents, "stations": stations})
2022-10-08 22:17:53 +01:00
def aliases(request, year):
"""Page which displays a list of all the person aliases in a specific year"""
2022-10-08 22:17:53 +01:00
if not year:
year = 1998
expo = Expedition.objects.filter(year=year)[0] # returns a set, even though we know there is only one
2022-10-08 22:17:53 +01:00
personexpeditions = PersonExpedition.objects.filter(expedition=expo)
persons = list(Person.objects.all().order_by("last_name"))
2022-10-09 21:50:32 +01:00
aliases = GetPersonExpeditionNameLookup(expo)
aliasdict = {}
2022-10-09 21:50:32 +01:00
for i in sorted(aliases):
aliasdict[i] = aliases[i]
invert = {}
return render(
request,
"aliases.html",
{
"year": year,
"aliasdict": aliasdict,
"foreign_friends": foreign_friends,
"invert": invert,
"personexpeditions": personexpeditions,
"persons": persons,
},
)