import os
import os
import re
from collections import defaultdict
from datetime import datetime, timezone
from pathlib import Path
from django.db import models
from django.template import loader
import settings
from troggle.core.models.logbooks import QM
from troggle.core.models.survex import SurvexStation, utmToLatLng
from troggle.core.models.troggle import DataIssue, TroggleModel
from troggle.core.utils import TROG, writetrogglefile
# Use the TROG global object to cache the cave lookup list. No good for multi-user.., or even multi-page. Pointless in fact.
Gcavelookup = TROG["caves"]["gcavelookup"]
Gcave_count = TROG["caves"]["gcavecount"]
Gcavelookup = None
Gcave_count = None
"""The model declarations for Areas, Caves and Entrances
"""
todo = """
- Find out why we have separate objects CaveSlug and why
these are not just a single field on the Model. This was Martin's idea,
but we are using text aliases now so we only need one slug in the data model
- Can we rewrite things to eliminate the CaveSlug and objects? Surely
foreign keys work fine ?!
- Why do we have CaveAndEntrance objects ?
- move the aliases list from the code and put into an editable file
- Restore constraint: unique_together = (("area", "kataster_number"), ("area", "unofficial_number"))
or replace by a unique 'slug' field, better.
"""
class CaveAndEntrance(models.Model):
"""This class is ONLY used to create a FormSet for editing the cave and all its
entrances in one form.
CASCADE means that if the cave or the entrance is deleted, then this CaveAndEntrance
is deleted too
NOT NEEDED anymore if we insist that cave:entrances have 1:n multiplicity.
"""
cave = models.ForeignKey("Cave", on_delete=models.CASCADE)
entrance = models.ForeignKey("Entrance", on_delete=models.CASCADE)
entranceletter = models.CharField(max_length=20, blank=True, null=True)
class Meta:
unique_together = [["cave", "entrance"], ["cave", "entranceletter"]]
ordering = ["entranceletter"]
def __str__(self):
return str(self.cave) + str(self.entranceletter)
# class CaveSlug(models.Model):
# moved to models/logbooks.py to avoid cyclic import problem. No I don't know why either.
class Cave(TroggleModel):
# (far) too much here perhaps,
areacode = models.CharField(max_length=4, blank=True, null=True) # could use models.IntegerChoices
subarea = models.CharField(max_length=25, blank=True, null=True) # 9, 8c etc.
depth = models.CharField(max_length=100, blank=True, null=True)
description_file = models.CharField(max_length=200, blank=True, null=True)
entrances = models.ManyToManyField("Entrance", through="CaveAndEntrance")
equipment = models.TextField(blank=True, null=True)
explorers = models.TextField(blank=True, null=True)
extent = models.CharField(max_length=100, blank=True, null=True)
filename = models.CharField(max_length=200) # if a cave is 'pending' this is not set. Otherwise it is.
kataster_code = models.CharField(max_length=20, blank=True, null=True)
kataster_number = models.CharField(max_length=10, blank=True, null=True)
kataster_status = models.TextField(blank=True, null=True)
length = models.CharField(max_length=100, blank=True, null=True)
notes = models.TextField(blank=True, null=True)
official_name = models.CharField(max_length=160)
references = models.TextField(blank=True, null=True)
survex_file = models.CharField(max_length=100, blank=True, null=True) # should be a foreign key?
survey = models.TextField(blank=True, null=True)
underground_centre_line = models.TextField(blank=True, null=True)
underground_description = models.TextField(blank=True, null=True)
unofficial_number = models.CharField(max_length=60, blank=True, null=True)
url = models.CharField(max_length=300, blank=True, null=True, unique = True)
class Meta:
# we do not enforce uniqueness at the db level as that causes confusing errors for users.
# unique_together = (("area", "kataster_number"), ("area", "unofficial_number"))
ordering = ("kataster_code", "unofficial_number")
def slug(self):
primarySlugs = self.caveslug_set.filter(primary=True)
if primarySlugs:
return primarySlugs[0].slug
else:
slugs = self.caveslug_set.filter()
if slugs:
return slugs[0].slug
def ours(self):
return bool(re.search(r"CUCC", self.explorers))
def number(self):
if self.kataster_number:
return self.kataster_number
else:
return self.unofficial_number
def get_absolute_url(self):
# we do not use URL_ROOT any more.
if self.kataster_number:
pass
elif self.unofficial_number:
pass
else:
self.official_name.lower()
return self.url # not good Django style? NEEDS actual URL
def url_parent(self):
if self.url:
return self.url.rsplit("/", 1)[0]
else:
return "NO cave.url"
def __str__(self, sep=": "):
return str(self.slug())
def get_open_QMs(self):
"""Searches for all QMs that reference this cave."""
# qms = self.qm_set.all().order_by('expoyear', 'block__date')
qms = QM.objects.filter(cave=self).order_by(
"expoyear", "block__date"
) # a QuerySet, see https://docs.djangoproject.com/en/dev/ref/models/querysets/#order-by
qmsopen = qms.filter(ticked=False)
return qmsopen # a QuerySet
def get_ticked_QMs(self):
"""Searches for all QMs that reference this cave."""
qms = QM.objects.filter(cave=self).order_by(
"expoyear", "block__date"
)
qmticked = qms.filter(ticked=True)
return qmticked # a QuerySet
def get_QMs(self):
qms = self.get_open_QMs() | self.get_ticked_QMs() # set union operation
return qms # a QuerySet
def entrances(self):
return CaveAndEntrance.objects.filter(cave=self)
def no_location(self):
no_data = True
for e in CaveAndEntrance.objects.filter(cave=self):
if e.entrance.best_station:
no_data = False
return no_data
def singleentrance(self):
return len(CaveAndEntrance.objects.filter(cave=self)) == 1
def entrancelist(self):
rs = []
res = ""
for e in CaveAndEntrance.objects.filter(cave=self):
if e.entranceletter:
rs.append(e.entranceletter)
rs.sort()
prevR = ""
n = 0
for r in rs:
if prevR:
if chr(ord(prevR) + 1) == r:
prevR = r
n += 1
else:
if n == 0:
res += ", " + prevR
else:
res += "–" + prevR
else:
prevR = r
n = 0
res += r
if n == 0:
if res:
res += ", " + prevR
else:
res += "–" + prevR
return res
def writeDataFile(self):
filepath = os.path.join(settings.CAVEDESCRIPTIONS, self.filename)
t = loader.get_template("dataformat/cave.xml")
now = datetime.now(timezone.utc)
print(now)
c = dict({"cave": self, "date": now})
u = t.render(c)
writetrogglefile(filepath, u)
return
def file_output(self):
"""This produces the content which wll be re-saved as the cave_data html file.
"""
filepath = Path(settings.CAVEDESCRIPTIONS, self.filename)
t = loader.get_template("dataformat/cave.xml")
now = datetime.now(timezone.utc)
c = dict({"cave": self, "date": now})
content = t.render(c)
return (filepath, content, "utf8")
class Entrance(TroggleModel):
MARKING_CHOICES = (
("P", "Paint"),
("P?", "Paint (?)"),
("T", "Tag"),
("T?", "Tag (?)"),
("R", "Needs Retag"),
("S", "Spit"),
("S?", "Spit (?)"),
("U", "Unmarked"),
("?", "Unknown"),
)
FINDABLE_CHOICES = (("?", "To be confirmed ..."), ("S", "Coordinates"), ("L", "Lost"), ("R", "Refindable"))
alt = models.TextField(blank=True, null=True)
approach = models.TextField(blank=True, null=True)
bearings = models.TextField(blank=True, null=True)
easting = models.TextField(blank=True, null=True) # apparently? manually entered not calculated
entrance_description = models.TextField(blank=True, null=True)
explorers = models.TextField(blank=True, null=True)
filename = models.CharField(max_length=200)
findability = models.CharField(max_length=1, choices=FINDABLE_CHOICES, blank=True, null=True)
findability_description = models.TextField(blank=True, null=True)
lastvisit = models.TextField(blank=True, null=True)
lat_wgs84 = models.TextField(blank=True, null=True) # manually entered not calculated
location_description = models.TextField(blank=True, null=True)
long_wgs84 = models.TextField(blank=True, null=True) # manually entered not calculated
map_description = models.TextField(blank=True, null=True)
marking = models.CharField(max_length=2, choices=MARKING_CHOICES)
marking_comment = models.TextField(blank=True, null=True)
name = models.CharField(max_length=100, blank=True, null=True)
northing = models.TextField(blank=True, null=True) # apparently? manually entered not calculated
other_description = models.TextField(blank=True, null=True)
photo = models.TextField(blank=True, null=True)
slug = models.SlugField(max_length=50, unique=True, default="default_slug_id")
underground_description = models.TextField(blank=True, null=True)
url = models.CharField(max_length=300, blank=True, null=True)
tag_station = models.TextField(blank=True, null=True)
exact_station = models.TextField(blank=True, null=True)
other_station = models.TextField(blank=True, null=True)
class Meta:
ordering = ["caveandentrance__entranceletter"]
def __str__(self):
return str(self.slug)
def single(self, station):
try:
single = SurvexStation.objects.get(name = station)
return single
except:
stations = SurvexStation.objects.filter(name = station)
print(f" # MULTIPLE stations found with same name '{station}' in Entrance {self}:")
if len(stations) > 1:
for s in stations:
print(f" # {s.id=} - {s.name} {s.latlong()}") # .id is Django internal field, not one of ours
return stations[0]
else:
return None
def singleletter(self):
"""Used in template/dataformat/cave.xml to write out a replacement cave_data file
why is this not working?
"""
cavelist = self.cavelist
try:
first = cavelist[0]
ce = CaveAndEntrance.objects.get(entrance=self, cave=first)
except:
# will fail if no caves in cavelist or if the cave isnt in the db
return "Z"
print(f"singleletter() access for first cave in {cavelist=}")
if ce.entranceletter == "":
print(f"### BLANK LETTER")
return "Y"
else:
letter = ce.entranceletter
print(f"### LETTER {letter}")
return letter
def exact_location(self):
return self.single(self.exact_station)
def other_location(self):
return self.single(self.other_station)
def find_location(self):
r = {"": "To be entered ", "?": "To be confirmed:", "S": "", "L": "Lost:", "R": "Refindable:"}[self.findability]
if self.tag_station:
try:
s = SurvexStation.objects.lookup(self.tag_station)
return r + f"{s.x:0.0f}E {s.y:0.0f}N {s.z:0.0f}Alt"
except:
return r + f"{self.tag_station} Tag Station not in dataset"
if self.exact_station:
try:
s = SurvexStation.objects.lookup(self.exact_station)
return r + f"{s.x:0.0f}E {s.y:0.0f}N {s.z:0.0f}Alt"
except:
return r + f"{self.tag_station} Exact Station not in dataset"
if self.other_station:
try:
s = SurvexStation.objects.lookup(self.other_station)
return r + f"{s.x:0.0f}E {s.y:0.0f}N {s.z:0.0f}Alt {self.other_description}"
except:
return r + f"{self.tag_station} Other Station not in dataset"
if self.FINDABLE_CHOICES == "S":
r += "ERROR, Entrance has been surveyed but has no survex point"
if self.bearings:
return r + self.bearings
return r
def best_station(self):
if self.tag_station:
return self.tag_station
if self.exact_station:
return self.exact_station
if self.other_station:
return self.other_station
def best_station_object(self):
bs = self.best_station()
return SurvexStation.objects.get(name=bs)
def has_photo(self):
if self.photo:
if (
self.photo.find(" -1
or self.photo.find(" -1
or self.photo.find(" -1
or self.photo.find(" -1
):
return "Yes"
else:
return "Missing"
else:
return "No"
def marking_val(self):
for m in self.MARKING_CHOICES:
if m[0] == self.marking:
return m[1]
def findability_val(self):
for f in self.FINDABLE_CHOICES:
if f[0] == self.findability:
return f[1]
def tag(self):
return self.single(self.tag_station)
def needs_surface_work(self):
return self.findability != "S" or not self.has_photo or self.marking != "T"
def get_absolute_url(self):
res = "/".join((self.get_root().cave.get_absolute_url(), self.title))
return res
def cavelist(self):
rs = []
for e in CaveAndEntrance.objects.filter(entrance=self):
if e.cave:
rs.append(e.cave)
return rs
def get_file_path(self):
return Path(settings.ENTRANCEDESCRIPTIONS, self.filename)
def file_output(self):
filepath = Path(os.path.join(settings.ENTRANCEDESCRIPTIONS, self.filename))
t = loader.get_template("dataformat/entrance.xml")
now = datetime.now(timezone.utc)
c = dict({"entrance": self, "date": now})
content = t.render(c)
return (filepath, content, "utf8")
def writeDataFile(self):
filepath = os.path.join(settings.ENTRANCEDESCRIPTIONS, self.filename)
t = loader.get_template("dataformat/entrance.xml")
now = datetime.now(timezone.utc)
c = dict({"entrance": self, "date": now})
u = t.render(c)
writetrogglefile(filepath, u)
return
def url_parent(self):
if self.url:
return self.url.rsplit("/", 1)[0]
else:
cavelist = self.cavelist()
if len(self.cavelist()) == 1:
return cavelist[0].url_parent()
else:
return ""
def latlong(self):
"""Gets lat long assuming that it has to get it from the associated stations
"""
# if self.easting and self.northing: # hmm not a good idea I think
# return utmToLatLng(33, float(self.easting), float(self.northing), northernHemisphere=True)
station = None
if self.other_station:
try:
station = SurvexStation.objects.get(name = self.other_station)
except:
pass
if self.tag_station:
try:
station = SurvexStation.objects.get(name = self.tag_station)
except:
pass
if self.exact_station:
try:
station = SurvexStation.objects.get(name = self.exact_station)
except:
pass
if station:
return station.latlong()
def lat(self):
if self.latlong():
return self.latlong()[0]
else:
return None
def long(self):
if self.latlong():
return self.latlong()[1]
else:
return None
def GetCaveLookup():
"""A very relaxed way of finding probably the right cave given almost any string which might serve to identify it
lookup function modelled on GetPersonExpeditionNameLookup
repeated assignment each call, needs refactoring
Used when parsing wallets contents.json file too in views/uploads.py
Needs to be a proper function that raises an exception if there is a duplicate.
OR we could set it to return None if there are duplicates, and require the caller to
fall back on doing the actual database query it wants rather than using this cache shortcut
"""
duplicates = {}
def checkcaveid(cave, id):
global Gcavelookup
if id not in Gcavelookup:
Gcavelookup[id] = cave
Gcave_count[id] += 1
else:
if cave == Gcavelookup[id]:
pass # same id, same cave
else: # same id but different cave, e.g. 122 => 1623-122 and 1626-122
duplicates[id] = 1
global Gcavelookup
if Gcavelookup:
return Gcavelookup
Gcavelookup = {"NONEPLACEHOLDER": None}
global Gcave_count
Gcave_count = defaultdict(int) # sets default value to int(0)
for cave in Cave.objects.all():
key = cave.official_name.lower()
if key != "" and key != "unamed" and key != "unnamed":
if Gcave_count[key] > 0:
# message = f" - Warning: ignoring alias id '{id:3}'. Caves '{Gcavelookup[id]}' and '{cave}'. "
# print(message)
# DataIssue.objects.create(parser="aliases", message=message)
duplicates[key] = 1
else:
Gcavelookup[key] = cave
Gcave_count[key] += 1
if cave.kataster_number:
# NOTE this will set an alias for "145" not "1623-145"
checkcaveid(cave, cave.kataster_number) # we do expect 1623/55 and 1626/55 to cause clash, removed below
# the rest of these are 'nice to have' but may validly already be set
if cave.unofficial_number:
unoffn = cave.unofficial_number.lower()
checkcaveid(cave, unoffn)
if cave.filename:
# this is the slug - usually.. but usually done as as f'{cave.area}-{cave.kataster_number}'
fn = cave.filename.replace(".html", "").lower()
checkcaveid(cave, fn)
if cave.slug():
# also possibly done already
slug = cave.slug().lower()
checkcaveid(cave, slug)
# These might alse create more duplicate entries
# Yes, this should be set in, and imported from, settings.py
# On reset, these aliases only work if the cave already properly exists with an entry in :expoweb:/cave_data/
# but as the aliases are recomputed repeatedly, eventually they work on PENDING caves too
aliases = [
("1987-02", "1623-267"),
("1990-01", "1623-171"),
("1990-02", "1623-172"),
("1990-03", "1623-173"),
("1990-04", "1623-174"),
("1990-05", "1623-175"),
("1990-06", "1623-176"),
("1990-07", "1623-177"),
("1990-08", "1623-178"),
("1990-09", "1623-179"),
("1990-10", "1623-180"),
("1990-11", "1623-181"),
("1990-12", "1623-182"),
("1990-13", "1623-183"),
("1990-14", "1623-184"),
("1990-18", "1623-188"),
("1990-adam", "1623-225"),
("1993-01", "1623-200"),
("1996-02", "1623-224"),
("1996-03", "1623-223"),
("1996-04", "1623-222"),
("1996wk2", "1623-207"),
("1996wk3", "1623-208"),
("1996wk5", "1623-219"),
("1996wk6", "1623-218"),
("1996wk8", "1623-209"),
("1996wk11", "1623-268"),
("96wk11", "1623-268"),
("1998-01", "1623-201"),
("1998-03", "1623-210"),
("1999-03", "1623-204"),
("1999-04", "1623-230"),
("1999-10", "1623-162"),
("1999-bo-01", "1623-205"),
("1999-ob-03", "1623-226"),
("1999-ob-04", "1623-227"),
("99ob02", "1999-ob-02"), # exists? pending
("1623-99ob02", "1999-ob-02"),
("gassischacht", "1623-259"),
("1623-gassischacht", "1623-259"),
("2007-gassischacht", "1623-259"),
("2000-03", "1623-214"),
("2000-04", "1623-220"),
("2000-05", "1623-215"),
("2000-06", "1623-216"),
("2000-07", "1623-217"),
("2000-09", "1623-234"),
("2000-aa-01", "1623-250"),
("2001-04", "1623-239"),
("2001-05", "1623-243"),
("2002-01", "1623-249"),
("2002-02", "1623-234"),
("2002-04", "1623-242"),
("2002-05", "1623-294"),
("quarriesd", "1623-2002-08"),
("1623-quarriesd", "1623-2002-08"),
("2002-08", "1623-2002-08"),
("2003-01", "1623-256"),
("2003-02", "1623-248"),
("2003-03", "1623-247"),
("2003-04", "1623-241"),
("2003-05", "1623-246"),
("2003-06", "1623-161"),
("2003-08", "1623-240"),
("2003-09", "1623-245"),
("2003-10", "1623-244"),
("2004-01", "1623-269"),
("2004-03", "1623-270"),
("2004-11", "1623-251"),
("2004-12", "1623-161"),
("2004-15", "1623-253"),
("2004-19", "1623-254"),
("2004-20", "1623-255"),
("2005-04", "1623-204"),
("2005-05", "1623-264"),
("2005-07", "1623-257"),
("2006-08", "1623-285"),
("2006-09", "1623-298"),
("2007-71", "1623-271"),
("2010-01", "1623-263"),
("2010-03", "1623-293"),
("2012-70", "1623-296"),
("1623-2012-70", "1623-296"),
("2012-dd-05", "1623-286"),
("2012-dd-08", "1623-297"),
# ("2011-01", "1623-292"), seems to be a mistake
("2012-dd-05", "1623-286"),
("2012-0w-01", "2012-ow-01"), # typo zero for 'O'
("2012-ns-13", "1623-292"),
("2014-neo-01", "1623-273"),
("2014-sd-01", "1623-274"),
("2014-ms-14", "1623-287"),
("2015-mf-06", "1623-288"),
("2016-jb-01", "1623-289"),
("2017-pw-01", "1623-277"),
("2017_cucc_24", "1623-291"), # note _ not -
("2017_cucc_23", "1623-295"), # note _ not -
("2017_cucc_28", "1623-290"), # note _ not -
("bs17", "1623-283"),
("1976/b11", "1623-198"),
("1976/b8", "1623-197"),
("1976/b9", "1623-190"),
("1976-b11", "1623-198"),
("1976-b8", "1623-197"),
("1976-b9", "1623-190"),
("b11", "1976/b11"),
("b8", "1976/b8"),
("b9", "1976/b9"),
("2011-01-bs30", "1623-190"),
("bs30", "1623-190"),
("2011-01", "1623-190"),
("2002-x11", "1623-2005-08"),
("2002-x12", "2005-07"),
("2002-x13", "1623-2005-06"),
("2002-x14", "2005-05"),
("kh", "1623-161"),
("161-kh", "1623-161"),
("204-steinBH", "1623-204"),
("stonebridge", "1623-204"),
("hauchhole", "1623-234"),
("hauch", "1623-234"),
("234-hauch", "1623-234"),
("tunnocks", "1623-258"),
("balcony", "1623-264"),
("balkon", "1623-264"),
("fgh", "1623-290"),
("fishface", "1623-290"),
("gsh", "1623-291"),
("tempest", "1623-2023-lc-01"),
("1623-2023-kt-02", "2023-kt-02"),
# 1626 aliases
("langgustl", "1626-354"),
("2018-dm-07", "1626-359"),
("homecoming", "2018-dm-07"),
("heimkommen", "2018-dm-07"),
("Heimkehr", "2018-dm-07"),
("hc", "2018-dm-07"),
("loveshack", "1626-2018-ad-03"),
("crushed-garlic", "1626-2018-ad-03"),
("BuzzardHole", "1626-2023-buzzardhole"),
("2023-BuzzardHole", "1626-2023-buzzardhole"),
("1626-2023-BuzzardHole", "1626-2023-buzzardhole"),
]
for key, alias in aliases:
if alias in Gcavelookup:
if key in Gcavelookup:
# already set by a different method, but is it the same cave?
if Gcavelookup[key] == Gcavelookup[alias]:
pass
else:
# aliases wrong - these are different caves
message = f" - Alias list is mis-identifying different caves {key}:{Gcavelookup[key]} != {alias}:{Gcavelookup[alias]} "
print(message)
DataIssue.objects.create(parser="alias", message=message)
# Gcave_count[key] += 1
Gcavelookup[key] = Gcavelookup[alias]
else:
message = f" * Coding or cave existence mistake, cave for id '{alias}' does not exist. Expecting to set alias '{key}' to it"
print(message)
DataIssue.objects.update_or_create(parser="aliases", message=message)
addmore = {}
for id in Gcavelookup:
addmore[id.replace("-", "_")] = Gcavelookup[id]
addmore[id.replace("-", "_")] = Gcavelookup[id]
addmore[id.replace("-", "_").upper()] = Gcavelookup[id]
addmore[id.replace("-", "_").lower()] = Gcavelookup[id]
addmore[id.replace("_", "-").upper()] = Gcavelookup[id]
addmore[id.replace("_", "-").lower()] = Gcavelookup[id]
Gcavelookup = {**addmore, **Gcavelookup}
addmore = {}
ldup = []
for d in duplicates:
# if an alias resolves to 2 or more caves, remove it as an alias
# NOTE such an alisas is restored, assuming a 1623 area, when parsing Wallets - but only wallets.
Gcavelookup.pop(d)
Gcave_count.pop(d) # so should not get a duplicate msg below..
ldup.append(d)
if ldup:
message = f" - Ambiguous aliases removed: {ldup}"
print(message)
DataIssue.objects.update_or_create(parser="aliases ok", message=message)
for c in Gcave_count:
if Gcave_count[c] > 1:
message = f" ** Duplicate cave id count={Gcave_count[c]} id:'{Gcavelookup[c]}' cave __str__:'{c}'"
print(message)
DataIssue.objects.update_or_create(parser="aliases", message=message)
return Gcavelookup