2022-04-05 08:37:31 +01:00
|
|
|
from collections import OrderedDict
|
2023-10-10 23:03:28 +01:00
|
|
|
from math import sqrt
|
2023-01-26 23:36:56 +00:00
|
|
|
from pathlib import Path
|
2022-04-05 08:37:31 +01:00
|
|
|
|
|
|
|
from django.shortcuts import render
|
|
|
|
|
2023-01-19 18:35:56 +00:00
|
|
|
import troggle.settings as settings
|
2023-01-29 16:47:46 +00:00
|
|
|
from troggle.core.models.caves import Cave, Entrance
|
|
|
|
from troggle.core.models.logbooks import LogbookEntry
|
2023-08-05 10:57:38 +01:00
|
|
|
from troggle.core.models.survex import SurvexStation, SurvexPersonRole
|
2023-01-30 19:04:36 +00:00
|
|
|
from troggle.core.models.troggle import DataIssue, Expedition, Person, PersonExpedition
|
2023-08-05 12:15:35 +01:00
|
|
|
from troggle.core.models.wallets import Wallet
|
2023-01-30 19:04:36 +00:00
|
|
|
from troggle.parsers.people import GetPersonExpeditionNameLookup, foreign_friends
|
2023-01-19 18:35:56 +00:00
|
|
|
|
2023-01-30 19:04:36 +00:00
|
|
|
# from django.views.generic.list import ListView
|
|
|
|
"""Very simple report pages summarizing data about the whole set of expeditions and of
|
2023-01-29 12:11:47 +00:00
|
|
|
the status of data inconsistencies
|
2023-01-30 19:04:36 +00:00
|
|
|
"""
|
|
|
|
|
2023-08-05 10:43:06 +01:00
|
|
|
def svxfilewild(request, year=None):
|
|
|
|
"""Looks for survexfiles which do not have an associated
|
|
|
|
wallet, per year
|
|
|
|
"""
|
|
|
|
legsbyexpo = []
|
|
|
|
addupsurvexlength = 0.0
|
|
|
|
addupsurvexlegs = 0
|
2023-08-05 12:15:35 +01:00
|
|
|
|
2023-08-05 10:43:06 +01:00
|
|
|
if not year:
|
|
|
|
expos = Expedition.objects.all()
|
|
|
|
else:
|
|
|
|
expos = Expedition.objects.filter(year=year)
|
2023-08-05 12:15:35 +01:00
|
|
|
|
2023-08-05 10:43:06 +01:00
|
|
|
for expedition in expos:
|
|
|
|
survexblocks = expedition.survexblock_set.all()
|
|
|
|
legsyear = 0
|
|
|
|
survexleglength = 0.0
|
|
|
|
for survexblock in survexblocks:
|
|
|
|
survexleglength += survexblock.legslength
|
|
|
|
legsyear += int(survexblock.legsall)
|
|
|
|
addupsurvexlength += survexleglength
|
|
|
|
addupsurvexlegs += legsyear
|
|
|
|
legsbyexpo.append((expedition, {"nsurvexlegs": legsyear, "survexleglength": survexleglength}))
|
|
|
|
legsbyexpo.reverse()
|
|
|
|
|
|
|
|
svxwild = []
|
2023-08-05 12:15:35 +01:00
|
|
|
wildlength = 0.0
|
2023-08-05 10:43:06 +01:00
|
|
|
for expedition in expos:
|
|
|
|
survexblocks = expedition.survexblock_set.all()
|
|
|
|
for sb in survexblocks:
|
|
|
|
# print(f"{sb=}")
|
|
|
|
if sb.scanswallet == None:
|
|
|
|
if sb.name != "rootblock":
|
|
|
|
svxwild.append(sb)
|
|
|
|
print(f" WILD {sb.survexfile} {sb.date}")
|
2023-08-05 12:15:35 +01:00
|
|
|
wildlength += sb.legslength
|
2023-08-05 11:14:45 +01:00
|
|
|
sb.year = f"{expedition}"
|
2023-08-05 10:57:38 +01:00
|
|
|
people = SurvexPersonRole.objects.filter(survexblock=sb)
|
|
|
|
team = []
|
|
|
|
for p in people:
|
|
|
|
team.append(p.personname)
|
|
|
|
sb.team = team
|
2023-08-05 12:15:35 +01:00
|
|
|
walletslength = 0.0
|
|
|
|
if year:
|
|
|
|
wallets = Wallet.objects.filter(walletyear__year=year)
|
|
|
|
for w in wallets:
|
|
|
|
for sb in w.survexblock_set.all():
|
|
|
|
walletslength += sb.legslength
|
|
|
|
|
2023-08-05 10:43:06 +01:00
|
|
|
return render(request, "survexfilewild.html",
|
|
|
|
{"addupsurvexlength": addupsurvexlength / 1000,
|
|
|
|
"legsbyexpo": legsbyexpo,
|
|
|
|
"nsurvexlegs": addupsurvexlegs,
|
2023-08-05 12:15:35 +01:00
|
|
|
"walletslength": walletslength,
|
|
|
|
"wildlength": wildlength,
|
2023-08-05 10:43:06 +01:00
|
|
|
"svxwild": svxwild}
|
|
|
|
)
|
|
|
|
|
2023-01-26 22:36:49 +00:00
|
|
|
def therionissues(request):
|
2023-01-29 12:11:47 +00:00
|
|
|
"""Page displaying contents of a file produced during data import"""
|
2023-01-26 23:36:56 +00:00
|
|
|
logname = "therionrefs.log"
|
2023-01-30 19:04:36 +00:00
|
|
|
logpath = Path(settings.PYTHON_PATH, logname)
|
2023-01-26 23:36:56 +00:00
|
|
|
therionlog = []
|
|
|
|
newlog = []
|
2023-01-30 19:04:36 +00:00
|
|
|
|
2023-01-26 23:36:56 +00:00
|
|
|
if Path(logpath).is_file:
|
2023-08-25 19:49:05 +01:00
|
|
|
try:
|
|
|
|
with open(logpath, "r") as f:
|
|
|
|
therionlog = f.readlines()
|
|
|
|
print(f"{logpath} has {len(therionlog)} entries")
|
|
|
|
except:
|
|
|
|
msg = f"{logpath} EXCEPTION opening or reading therion logfile. Run a full reset."
|
|
|
|
print(msg)
|
|
|
|
return render(request, "therionreport.html", {"therionlog": newlog})
|
2023-01-26 23:36:56 +00:00
|
|
|
else:
|
2023-08-25 19:49:05 +01:00
|
|
|
print(f"{logpath} NOT FOUND: {len(therionlog)} entries")
|
2023-01-30 19:04:36 +00:00
|
|
|
|
2023-01-26 23:36:56 +00:00
|
|
|
for line in therionlog:
|
|
|
|
line = line.replace("! Un-parsed image filename:", "")
|
|
|
|
newlog.append(line)
|
2023-01-30 19:04:36 +00:00
|
|
|
return render(request, "therionreport.html", {"therionlog": newlog})
|
|
|
|
|
2022-04-05 08:37:31 +01:00
|
|
|
|
2023-01-29 12:11:47 +00:00
|
|
|
def surveximport(request):
|
|
|
|
"""Page displaying contents of a file produced during data import"""
|
|
|
|
logname = "svxlinear.log"
|
2023-01-30 19:04:36 +00:00
|
|
|
logpath = Path(settings.PYTHON_PATH, logname)
|
|
|
|
|
2023-01-29 12:11:47 +00:00
|
|
|
if Path(logpath).is_file:
|
|
|
|
with open(logpath, "r") as f:
|
2023-01-30 19:04:36 +00:00
|
|
|
contents = f.read()
|
2023-01-29 12:11:47 +00:00
|
|
|
else:
|
|
|
|
print(f"{logpath} NOT FOUND {len(contents)}")
|
2023-01-30 19:04:36 +00:00
|
|
|
return render(request, "survexreport.html", {"log": contents})
|
|
|
|
|
2023-01-29 12:41:50 +00:00
|
|
|
|
|
|
|
def survexdebug(request):
|
|
|
|
"""Page displaying contents of a file produced during data import"""
|
|
|
|
logname = "svxblks.log"
|
2023-01-30 19:04:36 +00:00
|
|
|
logpath = Path(settings.PYTHON_PATH, logname)
|
|
|
|
|
2023-01-29 12:41:50 +00:00
|
|
|
if Path(logpath).is_file:
|
|
|
|
with open(logpath, "r") as f:
|
2023-01-30 19:04:36 +00:00
|
|
|
contents = f.read()
|
2023-01-29 12:41:50 +00:00
|
|
|
else:
|
|
|
|
print(f"{logpath} NOT FOUND {len(contents)}")
|
2023-01-30 19:04:36 +00:00
|
|
|
return render(request, "survexdebug.html", {"log": contents})
|
2023-01-29 12:11:47 +00:00
|
|
|
|
2022-04-05 08:37:31 +01:00
|
|
|
|
|
|
|
def pathsreport(request):
|
2023-01-29 12:11:47 +00:00
|
|
|
"""The CONSTANTs declared in the settings and localsettings and how they have
|
2023-01-30 19:04:36 +00:00
|
|
|
been evaluated for this specific installation - live"""
|
2022-04-05 08:37:31 +01:00
|
|
|
pathsdict = OrderedDict()
|
|
|
|
try:
|
|
|
|
pathsdict = {
|
2023-01-30 19:04:36 +00:00
|
|
|
# "BOGUS" : str( settings.BOGUS),
|
|
|
|
"JSLIB_URL": str(settings.JSLIB_URL),
|
|
|
|
"JSLIB_ROOT": str(settings.JSLIB_ROOT),
|
|
|
|
# "CSSLIB_URL" : str( settings.CSSLIB_URL),
|
|
|
|
"CAVEDESCRIPTIONS": str(settings.CAVEDESCRIPTIONS),
|
|
|
|
"DIR_ROOT": str(settings.DIR_ROOT),
|
|
|
|
"ENTRANCEDESCRIPTIONS": str(settings.ENTRANCEDESCRIPTIONS),
|
|
|
|
"EXPOUSER_EMAIL": str(settings.EXPOUSER_EMAIL),
|
|
|
|
"EXPOUSERPASS": str("<redacted>"),
|
|
|
|
"EXPOUSER": str(settings.EXPOUSER),
|
|
|
|
"EXPOWEB": str(settings.EXPOWEB),
|
2023-04-05 20:42:09 +01:00
|
|
|
# "EXPOWEB_URL": str(settings.EXPOWEB_URL),
|
2023-01-30 19:04:36 +00:00
|
|
|
# "FILES" : str( settings.FILES),
|
|
|
|
"LIBDIR": str(settings.LIBDIR),
|
|
|
|
"LOGFILE": str(settings.LOGFILE),
|
|
|
|
"LOGIN_REDIRECT_URL": str(settings.LOGIN_REDIRECT_URL),
|
|
|
|
"MEDIA_ROOT": str(settings.MEDIA_ROOT),
|
|
|
|
"MEDIA_URL": str(settings.MEDIA_URL),
|
|
|
|
"PHOTOS_URL": str(settings.PHOTOS_URL),
|
2023-04-05 20:42:09 +01:00
|
|
|
"PHOTOS_ROOT": str(settings.PHOTOS_ROOT),
|
2023-01-30 19:04:36 +00:00
|
|
|
"PYTHON_PATH": str(settings.PYTHON_PATH),
|
|
|
|
"REPOS_ROOT_PATH": str(settings.REPOS_ROOT_PATH),
|
|
|
|
"ROOT_URLCONF": str(settings.ROOT_URLCONF),
|
|
|
|
"STATIC_URL": str(settings.STATIC_URL),
|
|
|
|
"SURVEX_DATA": str(settings.SURVEX_DATA),
|
|
|
|
"SCANS_ROOT": str(settings.SCANS_ROOT),
|
|
|
|
# "SURVEYS" : str( settings.SURVEYS),
|
|
|
|
# "SCANS_URL" : str( settings.SCANS_URL),
|
|
|
|
"SURVEXPORT": str(settings.SURVEXPORT),
|
|
|
|
"DRAWINGS_DATA": str(settings.DRAWINGS_DATA),
|
|
|
|
"URL_ROOT": str(settings.URL_ROOT),
|
2022-04-05 08:37:31 +01:00
|
|
|
}
|
|
|
|
except:
|
|
|
|
pathsdict["! EXCEPTION !"] = "missing or exta string constant in troggle/settings"
|
|
|
|
|
|
|
|
pathstype = OrderedDict()
|
|
|
|
try:
|
|
|
|
pathstype = {
|
2023-01-30 19:04:36 +00:00
|
|
|
# "BOGUS" : type(settings.BOGUS),
|
|
|
|
"JSLIB_URL": type(settings.JSLIB_URL),
|
|
|
|
"JSLIB_ROOT": type(settings.JSLIB_ROOT),
|
|
|
|
# "CSSLIB_URL" : type(settings.CSSLIB_URL),
|
|
|
|
"CAVEDESCRIPTIONS": type(settings.CAVEDESCRIPTIONS),
|
|
|
|
"DIR_ROOT": type(settings.DIR_ROOT),
|
|
|
|
"ENTRANCEDESCRIPTIONS": type(settings.ENTRANCEDESCRIPTIONS),
|
|
|
|
"EXPOUSER_EMAIL": type(settings.EXPOUSER_EMAIL),
|
|
|
|
"EXPOUSERPASS": type(settings.EXPOUSERPASS),
|
|
|
|
"EXPOUSER": type(settings.EXPOUSER),
|
|
|
|
"EXPOWEB": type(settings.EXPOWEB),
|
2023-04-05 20:42:09 +01:00
|
|
|
# "EXPOWEB_URL": type(settings.EXPOWEB_URL),
|
2023-01-30 19:04:36 +00:00
|
|
|
# "FILES" : type(settings.FILES),
|
|
|
|
"LIBDIR": type(settings.LIBDIR),
|
|
|
|
"LOGFILE": type(settings.LOGFILE),
|
|
|
|
"LOGIN_REDIRECT_URL": type(settings.LOGIN_REDIRECT_URL),
|
|
|
|
"MEDIA_ROOT": type(settings.MEDIA_ROOT),
|
|
|
|
"MEDIA_URL": type(settings.MEDIA_URL),
|
2023-04-05 20:42:09 +01:00
|
|
|
"PHOTOS_ROOT": type(settings.PHOTOS_ROOT),
|
2023-01-30 19:04:36 +00:00
|
|
|
"PHOTOS_URL": type(settings.PHOTOS_URL),
|
|
|
|
"PYTHON_PATH": type(settings.PYTHON_PATH),
|
|
|
|
"REPOS_ROOT_PATH": type(settings.REPOS_ROOT_PATH),
|
|
|
|
"ROOT_URLCONF": type(settings.ROOT_URLCONF),
|
|
|
|
"STATIC_URL": type(settings.STATIC_URL),
|
|
|
|
"SURVEX_DATA": type(settings.SURVEX_DATA),
|
|
|
|
"SCANS_ROOT": type(settings.SCANS_ROOT),
|
|
|
|
# "SURVEYS" : type(settings.SURVEYS),
|
|
|
|
# "SCANS_URL" : type(settings.SCANS_URL),
|
|
|
|
"SURVEXPORT": type(settings.SURVEXPORT),
|
|
|
|
"DRAWINGS_DATA": type(settings.DRAWINGS_DATA),
|
|
|
|
"URL_ROOT": type(settings.URL_ROOT),
|
2022-04-05 08:37:31 +01:00
|
|
|
}
|
|
|
|
except:
|
|
|
|
pathstype["! EXCEPTION !"] = "missing or exta string constant in troggle/settings"
|
|
|
|
raise
|
|
|
|
|
|
|
|
# settings are unique by paths are not
|
|
|
|
ncodes = len(pathsdict)
|
2023-01-30 19:04:36 +00:00
|
|
|
bycodeslist = sorted(pathsdict.items()) # a list of tuples
|
2022-04-05 08:37:31 +01:00
|
|
|
bycodeslist2 = []
|
|
|
|
|
|
|
|
for k, p in bycodeslist:
|
|
|
|
bycodeslist2.append((k, p, str(pathstype[k])))
|
2023-01-30 19:04:36 +00:00
|
|
|
|
|
|
|
bypaths = sorted(pathsdict.values()) # a list
|
2022-04-05 08:37:31 +01:00
|
|
|
bypathslist = []
|
2023-01-30 19:04:36 +00:00
|
|
|
|
2022-04-05 08:37:31 +01:00
|
|
|
for p in bypaths:
|
|
|
|
for k in pathsdict.keys():
|
|
|
|
if pathsdict[k] == p:
|
|
|
|
bypathslist.append((p, k, str(pathstype[k])))
|
|
|
|
del pathsdict[k]
|
|
|
|
break
|
|
|
|
|
2023-01-30 19:04:36 +00:00
|
|
|
return render(
|
|
|
|
request,
|
|
|
|
"pathsreport.html",
|
|
|
|
{"pathsdict": pathsdict, "bycodeslist": bycodeslist2, "bypathslist": bypathslist, "ncodes": ncodes},
|
|
|
|
)
|
|
|
|
|
2022-04-05 08:37:31 +01:00
|
|
|
|
|
|
|
def stats(request):
|
2023-03-12 01:09:17 +00:00
|
|
|
"""Calculates number of survey blocks, the number of survey legs and the survey length for each year.
|
|
|
|
This is only underground survey legs, but includes ARGE as well as Expo survex files.
|
|
|
|
"""
|
2023-01-30 19:04:36 +00:00
|
|
|
statsDict = {}
|
|
|
|
statsDict["expoCount"] = f"{Expedition.objects.count():,}"
|
|
|
|
statsDict["caveCount"] = f"{Cave.objects.count():,}"
|
|
|
|
statsDict["personCount"] = f"{Person.objects.count():,}"
|
|
|
|
statsDict["logbookEntryCount"] = f"{LogbookEntry.objects.count():,}"
|
2022-04-05 08:37:31 +01:00
|
|
|
|
2023-01-30 19:04:36 +00:00
|
|
|
legsbyexpo = []
|
2023-03-12 01:09:17 +00:00
|
|
|
addupsurvexlength = 0.0
|
2022-04-18 21:33:04 +01:00
|
|
|
addupsurvexlegs = 0
|
2022-04-05 08:37:31 +01:00
|
|
|
for expedition in Expedition.objects.all():
|
|
|
|
survexblocks = expedition.survexblock_set.all()
|
2023-01-30 19:04:36 +00:00
|
|
|
legsyear = 0
|
2022-04-05 08:37:31 +01:00
|
|
|
survexleglength = 0.0
|
|
|
|
for survexblock in survexblocks:
|
|
|
|
survexleglength += survexblock.legslength
|
2022-04-18 21:33:04 +01:00
|
|
|
legsyear += int(survexblock.legsall)
|
2022-04-05 08:37:31 +01:00
|
|
|
addupsurvexlength += survexleglength
|
2022-04-18 21:33:04 +01:00
|
|
|
addupsurvexlegs += legsyear
|
2023-03-12 01:09:17 +00:00
|
|
|
legsbyexpo.append((expedition, {"nsurvexlegs": legsyear, "survexleglength": survexleglength}))
|
2023-01-30 19:04:36 +00:00
|
|
|
legsbyexpo.reverse()
|
|
|
|
|
|
|
|
renderDict = {
|
|
|
|
**statsDict,
|
|
|
|
**{"addupsurvexlength": addupsurvexlength / 1000, "legsbyexpo": legsbyexpo, "nsurvexlegs": addupsurvexlegs},
|
|
|
|
} # new syntax
|
|
|
|
return render(request, "statistics.html", renderDict)
|
2022-04-05 08:37:31 +01:00
|
|
|
|
|
|
|
|
|
|
|
def dataissues(request):
|
2023-01-30 19:04:36 +00:00
|
|
|
"""Each issue has a parser, a message and a url linking to the offending object after loading"""
|
|
|
|
|
2022-04-05 08:37:31 +01:00
|
|
|
def myFunc(di):
|
|
|
|
return di.parser.lower() + di.message.lower()
|
2023-01-30 19:04:36 +00:00
|
|
|
|
2022-04-05 08:37:31 +01:00
|
|
|
dilist = list(DataIssue.objects.all())
|
2023-01-30 19:04:36 +00:00
|
|
|
dilist.sort(key=myFunc)
|
|
|
|
|
|
|
|
return render(request, "dataissues.html", {"didict": dilist})
|
|
|
|
|
2022-04-05 08:37:31 +01:00
|
|
|
|
2023-10-27 20:28:10 +01:00
|
|
|
def stations(request):
|
2023-01-30 19:04:36 +00:00
|
|
|
"""report each Northing/Easting pair wherever recorded"""
|
2023-10-10 23:03:28 +01:00
|
|
|
ents = set()
|
|
|
|
gpsents = set()
|
|
|
|
|
|
|
|
def add_stations(e):
|
2023-09-16 20:46:17 +01:00
|
|
|
try:
|
|
|
|
ts = e.tag_station
|
|
|
|
if ts:
|
|
|
|
e.tag_ts = SurvexStation.objects.get(name=ts)
|
2023-10-07 00:26:52 +01:00
|
|
|
#print(f"{e} {e.tag_ts} {e.tag_ts.lat()} {e.tag_ts.long()}")
|
2023-10-11 22:58:20 +01:00
|
|
|
|
2023-09-16 20:46:17 +01:00
|
|
|
|
|
|
|
os = e.other_station
|
|
|
|
if os:
|
|
|
|
e.tag_os = SurvexStation.objects.get(name=os)
|
2023-10-07 00:26:52 +01:00
|
|
|
#print(f"{e} {e.tag_os} {e.tag_os.lat()} {e.tag_os.long()}")
|
2023-09-16 20:46:17 +01:00
|
|
|
|
|
|
|
except:
|
|
|
|
e.tag_ss = None
|
|
|
|
e.tag_es = None
|
|
|
|
e.tag_os = None
|
|
|
|
# print(f"exception for {e}")
|
2023-10-10 23:03:28 +01:00
|
|
|
|
|
|
|
entrances = Entrance.objects.all()
|
|
|
|
for e in entrances:
|
2023-10-11 22:58:20 +01:00
|
|
|
if e.other_station:
|
2023-10-10 23:03:28 +01:00
|
|
|
ents.add(e)
|
|
|
|
add_stations(e)
|
2023-10-27 20:28:10 +01:00
|
|
|
|
2023-10-10 23:03:28 +01:00
|
|
|
for e in entrances:
|
|
|
|
if e.lat_wgs84 or e.long_wgs84:
|
|
|
|
gpsents.add(e)
|
|
|
|
add_stations(e)
|
|
|
|
|
|
|
|
|
|
|
|
stations = SurvexStation.objects.all() # NB these are NOT all the stations in troggle_import_root.pos
|
2023-01-30 19:04:36 +00:00
|
|
|
|
2023-10-27 20:28:10 +01:00
|
|
|
return render(request, "stations.html", {"ents": ents, "gpsents": gpsents, "stations": stations})
|
2023-01-30 19:04:36 +00:00
|
|
|
|
|
|
|
|
2022-10-08 22:17:53 +01:00
|
|
|
def aliases(request, year):
|
2023-01-30 19:04:36 +00:00
|
|
|
"""Page which displays a list of all the person aliases in a specific year"""
|
|
|
|
|
2022-10-08 22:17:53 +01:00
|
|
|
if not year:
|
|
|
|
year = 1998
|
2023-01-30 19:04:36 +00:00
|
|
|
expo = Expedition.objects.filter(year=year)[0] # returns a set, even though we know there is only one
|
2022-10-08 22:17:53 +01:00
|
|
|
personexpeditions = PersonExpedition.objects.filter(expedition=expo)
|
2023-01-30 19:04:36 +00:00
|
|
|
persons = list(Person.objects.all().order_by("last_name"))
|
|
|
|
|
2022-10-09 21:50:32 +01:00
|
|
|
aliases = GetPersonExpeditionNameLookup(expo)
|
2023-01-30 19:04:36 +00:00
|
|
|
|
|
|
|
aliasdict = {}
|
2022-10-09 21:50:32 +01:00
|
|
|
for i in sorted(aliases):
|
2023-01-30 19:04:36 +00:00
|
|
|
aliasdict[i] = aliases[i]
|
|
|
|
invert = {}
|
2022-10-09 00:29:53 +01:00
|
|
|
|
2023-01-30 19:04:36 +00:00
|
|
|
return render(
|
|
|
|
request,
|
|
|
|
"aliases.html",
|
|
|
|
{
|
|
|
|
"year": year,
|
|
|
|
"aliasdict": aliasdict,
|
|
|
|
"foreign_friends": foreign_friends,
|
|
|
|
"invert": invert,
|
|
|
|
"personexpeditions": personexpeditions,
|
|
|
|
"persons": persons,
|
|
|
|
},
|
|
|
|
)
|