2
0
mirror of https://expo.survex.com/repositories/troggle/.git synced 2024-11-24 16:21:53 +00:00
troggle/core/views/statistics.py

377 lines
14 KiB
Python
Raw Normal View History

from collections import OrderedDict
from math import sqrt
2023-01-26 23:36:56 +00:00
from pathlib import Path
from django.shortcuts import render
2023-01-19 18:35:56 +00:00
import troggle.settings as settings
2023-01-29 16:47:46 +00:00
from troggle.core.models.caves import Cave, Entrance
from troggle.core.models.logbooks import LogbookEntry
2023-08-05 10:57:38 +01:00
from troggle.core.models.survex import SurvexStation, SurvexPersonRole
from troggle.core.models.troggle import DataIssue, Expedition, Person, PersonExpedition
from troggle.core.models.wallets import Wallet
from troggle.parsers.people import GetPersonExpeditionNameLookup, foreign_friends
2023-01-19 18:35:56 +00:00
# from django.views.generic.list import ListView
"""Very simple report pages summarizing data about the whole set of expeditions and of
2023-01-29 12:11:47 +00:00
the status of data inconsistencies
"""
2023-08-05 10:43:06 +01:00
def svxfilewild(request, year=None):
"""Looks for survexfiles which do not have an associated
wallet, per year
"""
legsbyexpo = []
addupsurvexlength = 0.0
addupsurvexlegs = 0
2023-08-05 10:43:06 +01:00
if not year:
expos = Expedition.objects.all()
else:
expos = Expedition.objects.filter(year=year)
2023-08-05 10:43:06 +01:00
for expedition in expos:
survexblocks = expedition.survexblock_set.all()
legsyear = 0
survexleglength = 0.0
for survexblock in survexblocks:
survexleglength += survexblock.legslength
legsyear += int(survexblock.legsall)
addupsurvexlength += survexleglength
addupsurvexlegs += legsyear
legsbyexpo.append((expedition, {"nsurvexlegs": legsyear, "survexleglength": survexleglength}))
legsbyexpo.reverse()
svxwild = []
wildlength = 0.0
2023-08-05 10:43:06 +01:00
for expedition in expos:
survexblocks = expedition.survexblock_set.all()
for sb in survexblocks:
# print(f"{sb=}")
if sb.scanswallet == None:
if sb.name != "rootblock":
svxwild.append(sb)
print(f" WILD {sb.survexfile} {sb.date}")
wildlength += sb.legslength
2023-08-05 11:14:45 +01:00
sb.year = f"{expedition}"
2023-08-05 10:57:38 +01:00
people = SurvexPersonRole.objects.filter(survexblock=sb)
team = []
for p in people:
team.append(p.personname)
sb.team = team
walletslength = 0.0
if year:
wallets = Wallet.objects.filter(walletyear__year=year)
for w in wallets:
for sb in w.survexblock_set.all():
walletslength += sb.legslength
2023-08-05 10:43:06 +01:00
return render(request, "survexfilewild.html",
{"addupsurvexlength": addupsurvexlength / 1000,
"legsbyexpo": legsbyexpo,
"nsurvexlegs": addupsurvexlegs,
"walletslength": walletslength,
"wildlength": wildlength,
2023-08-05 10:43:06 +01:00
"svxwild": svxwild}
)
2023-01-26 22:36:49 +00:00
def therionissues(request):
2023-01-29 12:11:47 +00:00
"""Page displaying contents of a file produced during data import"""
2023-01-26 23:36:56 +00:00
logname = "therionrefs.log"
logpath = Path(settings.PYTHON_PATH, logname)
2023-01-26 23:36:56 +00:00
therionlog = []
newlog = []
2023-01-26 23:36:56 +00:00
if Path(logpath).is_file:
try:
with open(logpath, "r") as f:
therionlog = f.readlines()
print(f"{logpath} has {len(therionlog)} entries")
except:
msg = f"{logpath} EXCEPTION opening or reading therion logfile. Run a full reset."
print(msg)
return render(request, "therionreport.html", {"therionlog": newlog})
2023-01-26 23:36:56 +00:00
else:
print(f"{logpath} NOT FOUND: {len(therionlog)} entries")
2023-01-26 23:36:56 +00:00
for line in therionlog:
line = line.replace("! Un-parsed image filename:", "")
newlog.append(line)
return render(request, "therionreport.html", {"therionlog": newlog})
2023-01-29 12:11:47 +00:00
def surveximport(request):
"""Page displaying contents of a file produced during data import"""
logname = "svxlinear.log"
logpath = Path(settings.PYTHON_PATH, logname)
2023-01-29 12:11:47 +00:00
if Path(logpath).is_file:
with open(logpath, "r") as f:
contents = f.read()
2023-01-29 12:11:47 +00:00
else:
print(f"{logpath} NOT FOUND {len(contents)}")
return render(request, "survexreport.html", {"log": contents})
2023-01-29 12:41:50 +00:00
def survexdebug(request):
"""Page displaying contents of a file produced during data import"""
logname = "svxblks.log"
logpath = Path(settings.PYTHON_PATH, logname)
2023-01-29 12:41:50 +00:00
if Path(logpath).is_file:
with open(logpath, "r") as f:
contents = f.read()
2023-01-29 12:41:50 +00:00
else:
print(f"{logpath} NOT FOUND {len(contents)}")
return render(request, "survexdebug.html", {"log": contents})
2023-01-29 12:11:47 +00:00
def pathsreport(request):
2023-01-29 12:11:47 +00:00
"""The CONSTANTs declared in the settings and localsettings and how they have
been evaluated for this specific installation - live"""
pathsdict = OrderedDict()
try:
pathsdict = {
# "BOGUS" : str( settings.BOGUS),
"JSLIB_URL": str(settings.JSLIB_URL),
"JSLIB_ROOT": str(settings.JSLIB_ROOT),
# "CSSLIB_URL" : str( settings.CSSLIB_URL),
"CAVEDESCRIPTIONS": str(settings.CAVEDESCRIPTIONS),
"DIR_ROOT": str(settings.DIR_ROOT),
"ENTRANCEDESCRIPTIONS": str(settings.ENTRANCEDESCRIPTIONS),
"EXPOUSER_EMAIL": str(settings.EXPOUSER_EMAIL),
"EXPOUSERPASS": str("<redacted>"),
"EXPOUSER": str(settings.EXPOUSER),
"EXPOWEB": str(settings.EXPOWEB),
# "EXPOWEB_URL": str(settings.EXPOWEB_URL),
# "FILES" : str( settings.FILES),
"LIBDIR": str(settings.LIBDIR),
"LOGFILE": str(settings.LOGFILE),
"LOGIN_REDIRECT_URL": str(settings.LOGIN_REDIRECT_URL),
"MEDIA_ROOT": str(settings.MEDIA_ROOT),
"MEDIA_URL": str(settings.MEDIA_URL),
"PHOTOS_URL": str(settings.PHOTOS_URL),
"PHOTOS_ROOT": str(settings.PHOTOS_ROOT),
"PYTHON_PATH": str(settings.PYTHON_PATH),
"REPOS_ROOT_PATH": str(settings.REPOS_ROOT_PATH),
"ROOT_URLCONF": str(settings.ROOT_URLCONF),
"STATIC_URL": str(settings.STATIC_URL),
"SURVEX_DATA": str(settings.SURVEX_DATA),
"SCANS_ROOT": str(settings.SCANS_ROOT),
# "SURVEYS" : str( settings.SURVEYS),
# "SCANS_URL" : str( settings.SCANS_URL),
"SURVEXPORT": str(settings.SURVEXPORT),
"DRAWINGS_DATA": str(settings.DRAWINGS_DATA),
"URL_ROOT": str(settings.URL_ROOT),
}
except:
pathsdict["! EXCEPTION !"] = "missing or exta string constant in troggle/settings"
pathstype = OrderedDict()
try:
pathstype = {
# "BOGUS" : type(settings.BOGUS),
"JSLIB_URL": type(settings.JSLIB_URL),
"JSLIB_ROOT": type(settings.JSLIB_ROOT),
# "CSSLIB_URL" : type(settings.CSSLIB_URL),
"CAVEDESCRIPTIONS": type(settings.CAVEDESCRIPTIONS),
"DIR_ROOT": type(settings.DIR_ROOT),
"ENTRANCEDESCRIPTIONS": type(settings.ENTRANCEDESCRIPTIONS),
"EXPOUSER_EMAIL": type(settings.EXPOUSER_EMAIL),
"EXPOUSERPASS": type(settings.EXPOUSERPASS),
"EXPOUSER": type(settings.EXPOUSER),
"EXPOWEB": type(settings.EXPOWEB),
# "EXPOWEB_URL": type(settings.EXPOWEB_URL),
# "FILES" : type(settings.FILES),
"LIBDIR": type(settings.LIBDIR),
"LOGFILE": type(settings.LOGFILE),
"LOGIN_REDIRECT_URL": type(settings.LOGIN_REDIRECT_URL),
"MEDIA_ROOT": type(settings.MEDIA_ROOT),
"MEDIA_URL": type(settings.MEDIA_URL),
"PHOTOS_ROOT": type(settings.PHOTOS_ROOT),
"PHOTOS_URL": type(settings.PHOTOS_URL),
"PYTHON_PATH": type(settings.PYTHON_PATH),
"REPOS_ROOT_PATH": type(settings.REPOS_ROOT_PATH),
"ROOT_URLCONF": type(settings.ROOT_URLCONF),
"STATIC_URL": type(settings.STATIC_URL),
"SURVEX_DATA": type(settings.SURVEX_DATA),
"SCANS_ROOT": type(settings.SCANS_ROOT),
# "SURVEYS" : type(settings.SURVEYS),
# "SCANS_URL" : type(settings.SCANS_URL),
"SURVEXPORT": type(settings.SURVEXPORT),
"DRAWINGS_DATA": type(settings.DRAWINGS_DATA),
"URL_ROOT": type(settings.URL_ROOT),
}
except:
pathstype["! EXCEPTION !"] = "missing or exta string constant in troggle/settings"
raise
# settings are unique by paths are not
ncodes = len(pathsdict)
bycodeslist = sorted(pathsdict.items()) # a list of tuples
bycodeslist2 = []
for k, p in bycodeslist:
bycodeslist2.append((k, p, str(pathstype[k])))
bypaths = sorted(pathsdict.values()) # a list
bypathslist = []
for p in bypaths:
for k in pathsdict.keys():
if pathsdict[k] == p:
bypathslist.append((p, k, str(pathstype[k])))
del pathsdict[k]
break
return render(
request,
"pathsreport.html",
{"pathsdict": pathsdict, "bycodeslist": bycodeslist2, "bypathslist": bypathslist, "ncodes": ncodes},
)
def stats(request):
2023-03-12 01:09:17 +00:00
"""Calculates number of survey blocks, the number of survey legs and the survey length for each year.
This is only underground survey legs, but includes ARGE as well as Expo survex files.
"""
statsDict = {}
statsDict["expoCount"] = f"{Expedition.objects.count():,}"
statsDict["caveCount"] = f"{Cave.objects.count():,}"
statsDict["personCount"] = f"{Person.objects.count():,}"
statsDict["logbookEntryCount"] = f"{LogbookEntry.objects.count():,}"
legsbyexpo = []
2023-03-12 01:09:17 +00:00
addupsurvexlength = 0.0
2022-04-18 21:33:04 +01:00
addupsurvexlegs = 0
for expedition in Expedition.objects.all():
survexblocks = expedition.survexblock_set.all()
legsyear = 0
survexleglength = 0.0
for survexblock in survexblocks:
survexleglength += survexblock.legslength
2022-04-18 21:33:04 +01:00
legsyear += int(survexblock.legsall)
addupsurvexlength += survexleglength
2022-04-18 21:33:04 +01:00
addupsurvexlegs += legsyear
2023-03-12 01:09:17 +00:00
legsbyexpo.append((expedition, {"nsurvexlegs": legsyear, "survexleglength": survexleglength}))
legsbyexpo.reverse()
renderDict = {
**statsDict,
**{"addupsurvexlength": addupsurvexlength / 1000, "legsbyexpo": legsbyexpo, "nsurvexlegs": addupsurvexlegs},
} # new syntax
return render(request, "statistics.html", renderDict)
def dataissues(request):
"""Each issue has a parser, a message and a url linking to the offending object after loading"""
def myFunc(di):
return di.parser.lower() + di.message.lower()
dilist = list(DataIssue.objects.all())
dilist.sort(key=myFunc)
return render(request, "dataissues.html", {"didict": dilist})
def eastings(request):
"""report each Northing/Easting pair wherever recorded"""
ents = set()
gpsents = set()
def add_stations(e):
try:
ts = e.tag_station
if ts:
e.tag_ts = SurvexStation.objects.get(name=ts)
2023-10-07 00:26:52 +01:00
#print(f"{e} {e.tag_ts} {e.tag_ts.lat()} {e.tag_ts.long()}")
2023-10-11 22:58:20 +01:00
os = e.other_station
if os:
e.tag_os = SurvexStation.objects.get(name=os)
2023-10-07 00:26:52 +01:00
#print(f"{e} {e.tag_os} {e.tag_os.lat()} {e.tag_os.long()}")
except:
e.tag_ss = None
e.tag_es = None
e.tag_os = None
# print(f"exception for {e}")
entrances = Entrance.objects.all()
for e in entrances:
2023-10-11 22:58:20 +01:00
if e.other_station:
ents.add(e)
add_stations(e)
2023-10-11 21:26:41 +01:00
# if e.easting or e.northing:
# ents.add(e)
# add_stations(e)
# e.northing = float(e.northing)
# e.easting = float(e.easting)
2023-10-11 21:26:41 +01:00
# if e.northing < 5200000:
# e.bmn = True
# e.bmn_n = e.northing
# # e.northing = e.northing + 5200000
# e.northing = e.northing + 5198919.918
2023-10-11 21:26:41 +01:00
# e.bmn_e = e.easting
# #e.easting = e.easting - 36000 + 486000
# e.easting = e.easting + 374854.63 # linear hack
2023-10-11 21:26:41 +01:00
# if e.alt:
# e.alt = e.alt.replace("m","")
# #print(f"{e.slug} '{e.alt}'")
# e.bmn_alt = e.alt
# else:
# e.bmn_alt = 1000
# e.pslug = e.slug.replace("1623-","x")
# try:
# e.diffx = e.easting - e.best_station_object().x
# e.diffy = e.northing - e.best_station_object().y
# e.error = sqrt(e.diffx**2 + e.diffy**2)
# except:
# pass
for e in entrances:
if e.lat_wgs84 or e.long_wgs84:
gpsents.add(e)
add_stations(e)
stations = SurvexStation.objects.all() # NB these are NOT all the stations in troggle_import_root.pos
return render(request, "eastings.html", {"ents": ents, "gpsents": gpsents, "stations": stations})
2022-10-08 22:17:53 +01:00
def aliases(request, year):
"""Page which displays a list of all the person aliases in a specific year"""
2022-10-08 22:17:53 +01:00
if not year:
year = 1998
expo = Expedition.objects.filter(year=year)[0] # returns a set, even though we know there is only one
2022-10-08 22:17:53 +01:00
personexpeditions = PersonExpedition.objects.filter(expedition=expo)
persons = list(Person.objects.all().order_by("last_name"))
2022-10-09 21:50:32 +01:00
aliases = GetPersonExpeditionNameLookup(expo)
aliasdict = {}
2022-10-09 21:50:32 +01:00
for i in sorted(aliases):
aliasdict[i] = aliases[i]
invert = {}
return render(
request,
"aliases.html",
{
"year": year,
"aliasdict": aliasdict,
"foreign_friends": foreign_friends,
"invert": invert,
"personexpeditions": personexpeditions,
"persons": persons,
},
)