troggle-unchained/core/models/caves.py

693 lines
24 KiB
Python
Raw Normal View History

2023-01-19 18:35:56 +00:00
import os
2023-03-17 20:01:52 +00:00
import os
2023-01-19 18:35:56 +00:00
import re
from collections import defaultdict
2023-01-19 18:35:56 +00:00
from datetime import datetime, timezone
from pathlib import Path
2023-01-19 18:35:56 +00:00
from django.db import models
2023-01-30 23:04:11 +00:00
from django.template import loader
2023-01-19 18:35:56 +00:00
import settings
2023-01-29 18:17:43 +00:00
from troggle.core.models.logbooks import QM
2021-04-13 00:50:12 +01:00
from troggle.core.models.survex import SurvexStation
2023-01-30 23:04:11 +00:00
from troggle.core.models.troggle import DataIssue, TroggleModel
2023-01-19 18:35:56 +00:00
from troggle.core.utils import TROG, writetrogglefile
2022-07-27 23:48:22 +01:00
# Use the TROG global object to cache the cave lookup list. No good for multi-user..
Gcavelookup = TROG["caves"]["gcavelookup"]
Gcave_count = TROG["caves"]["gcavecount"]
2022-07-27 23:48:22 +01:00
Gcavelookup = None
Gcave_count = None
"""The model declarations for Areas, Caves and Entrances
"""
todo = """
2023-03-28 20:30:00 +01:00
- Find out why we have separate objects CaveSlug and why
these are not just a single field on the Model. Do we ever need more
than one slug per cave or entrance? Surely that would break everything??
2023-03-28 20:30:00 +01:00
- Can we rewrite things to eliminate the CaveSlug and objects? Surely
2023-02-08 23:37:00 +00:00
foreign keys work fine ?!
- Why do we have CaveAndEntrance objects ? Surely entranceletter belong son the Entrance object?
- move the aliases list from the code and put into an editable file
2023-02-08 23:37:00 +00:00
- Restore constraint: unique_together = (("area", "kataster_number"), ("area", "unofficial_number"))
"""
class Area(TroggleModel):
short_name = models.CharField(max_length=100)
name = models.CharField(max_length=200, blank=True, null=True)
description = models.TextField(blank=True, null=True)
super = models.ForeignKey("Area", blank=True, null=True, on_delete=models.SET_NULL)
2020-07-26 02:26:04 +01:00
def __str__(self):
2022-07-22 10:40:42 +01:00
if self.super:
return str(self.super) + " - " + str(self.short_name)
else:
return str(self.short_name)
2020-07-26 02:26:04 +01:00
def kat_area(self):
if self.short_name in ["1623", "1626", "1624", "1627"]:
return self.short_name
2022-07-22 10:40:42 +01:00
elif self.super:
return self.super.kat_area()
class CaveAndEntrance(models.Model):
2023-03-28 19:26:37 +01:00
"""This class is ONLY used to create a FormSet for editing the cave and all its
entrances in one form.
CASCADE means that if the cave or the entrance is deleted, then this CaveAndEntrance
2023-03-06 16:37:38 +00:00
is deleted too
"""
cave = models.ForeignKey("Cave", on_delete=models.CASCADE)
entrance = models.ForeignKey("Entrance", on_delete=models.CASCADE)
entrance_letter = models.CharField(max_length=20, blank=True, null=True)
class Meta:
unique_together = [["cave", "entrance"], ["cave", "entrance_letter"]]
ordering = ["entrance_letter"]
def __str__(self):
return str(self.cave) + str(self.entrance_letter)
2020-07-26 02:26:04 +01:00
2023-03-28 15:37:25 +01:00
# class CaveSlug(models.Model):
# moved to models/logbooks.py to avoid cyclic import problem
2023-01-29 21:45:51 +00:00
class Cave(TroggleModel):
# too much here perhaps,
area = models.ManyToManyField(Area, blank=False)
2023-03-28 15:37:25 +01:00
depth = models.CharField(max_length=100, blank=True, null=True)
description_file = models.CharField(max_length=200, blank=True, null=True)
entrances = models.ManyToManyField("Entrance", through="CaveAndEntrance")
equipment = models.TextField(blank=True, null=True)
2023-03-28 15:37:25 +01:00
explorers = models.TextField(blank=True, null=True)
extent = models.CharField(max_length=100, blank=True, null=True)
filename = models.CharField(max_length=200)
kataster_code = models.CharField(max_length=20, blank=True, null=True)
kataster_number = models.CharField(max_length=10, blank=True, null=True)
kataster_status = models.TextField(blank=True, null=True)
length = models.CharField(max_length=100, blank=True, null=True)
2023-03-28 15:37:25 +01:00
notes = models.TextField(blank=True, null=True)
official_name = models.CharField(max_length=160)
references = models.TextField(blank=True, null=True)
2023-03-21 12:56:51 +00:00
survex_file = models.CharField(max_length=100, blank=True, null=True) # should be a foreign key
2023-03-28 15:37:25 +01:00
survey = models.TextField(blank=True, null=True)
underground_centre_line = models.TextField(blank=True, null=True)
underground_description = models.TextField(blank=True, null=True)
unofficial_number = models.CharField(max_length=60, blank=True, null=True)
2023-07-24 23:33:46 +01:00
url = models.CharField(max_length=300, blank=True, null=True, unique = True)
# class Meta:
# unique_together = (("area", "kataster_number"), ("area", "unofficial_number"))
# FIXME Kataster Areas and CUCC defined sub areas need seperating
# href = models.CharField(max_length=100)
class Meta:
ordering = ("kataster_code", "unofficial_number")
def hassurvey(self):
2023-03-21 12:56:51 +00:00
"""This is almost certainly a fossil - needs checking...
"""
if not self.underground_centre_line:
return "No"
if (
self.survey.find("<img") > -1
or self.survey.find("<a") > -1
or self.survey.find("<IMG") > -1
or self.survey.find("<A") > -1
):
return "Yes"
return "Missing"
def hassurveydata(self):
if not self.underground_centre_line:
return "No"
if self.survex_filcavee:
return "Yes"
return "Missing"
def slug(self):
primarySlugs = self.caveslug_set.filter(primary=True)
if primarySlugs:
return primarySlugs[0].slug
else:
slugs = self.caveslug_set.filter()
if slugs:
return slugs[0].slug
def ours(self):
return bool(re.search(r"CUCC", self.explorers))
def number(self):
if self.kataster_number:
return self.kataster_number
else:
return self.unofficial_number
def reference(self):
return f"{self.kat_area()}-{self.number()}"
def get_absolute_url(self):
if self.kataster_number:
2023-01-30 23:04:11 +00:00
pass
elif self.unofficial_number:
2023-01-30 23:04:11 +00:00
pass
else:
2023-01-30 23:04:11 +00:00
self.official_name.lower()
return Path(settings.URL_ROOT) / self.url # not good Django style.. NEEDS actual URL
def url_parent(self):
return self.url.rsplit("/", 1)[0]
def __str__(self, sep=": "):
2020-06-28 15:57:40 +01:00
return str(self.slug())
2023-03-17 20:01:52 +00:00
def get_open_QMs(self):
"""Searches for all QMs that reference this cave."""
# qms = self.qm_set.all().order_by('expoyear', 'block__date')
qms = QM.objects.filter(cave=self).order_by(
"expoyear", "block__date"
2023-02-10 00:05:04 +00:00
) # a QuerySet, see https://docs.djangoproject.com/en/dev/ref/models/querysets/#order-by
2023-03-17 20:01:52 +00:00
qmsopen = qms.filter(ticked=False)
return qmsopen # a QuerySet
2023-03-17 20:01:52 +00:00
def get_ticked_QMs(self):
"""Searches for all QMs that reference this cave."""
qms = QM.objects.filter(cave=self).order_by(
"expoyear", "block__date"
)
qmticked = qms.filter(ticked=True)
return qmticked # a QuerySet
def get_QMs(self):
qms = self.get_open_QMs() | self.get_ticked_QMs() # set union operation
return qms # a QuerySet
def kat_area(self):
try:
for a in self.area.all():
if a.kat_area():
return a.kat_area()
except:
return ""
def entrances(self):
return CaveAndEntrance.objects.filter(cave=self)
def singleentrance(self):
return len(CaveAndEntrance.objects.filter(cave=self)) == 1
def entrancelist(self):
rs = []
res = ""
for e in CaveAndEntrance.objects.filter(cave=self):
2021-04-27 14:51:04 +01:00
if e.entrance_letter:
rs.append(e.entrance_letter)
rs.sort()
prevR = ""
n = 0
for r in rs:
if prevR:
if chr(ord(prevR) + 1) == r:
prevR = r
n += 1
else:
if n == 0:
res += ", " + prevR
else:
res += "&ndash;" + prevR
else:
prevR = r
n = 0
res += r
if n == 0:
if res:
res += ", " + prevR
else:
res += "&ndash;" + prevR
return res
def writeDataFile(self):
filepath = os.path.join(settings.CAVEDESCRIPTIONS, self.filename)
t = loader.get_template("dataformat/cave.xml")
now = datetime.now(timezone.utc)
print(now)
c = dict({"cave": self, "date": now})
u = t.render(c)
writetrogglefile(filepath, u)
return
def file_output(self):
filepath = Path(os.path.join(settings.CAVEDESCRIPTIONS, self.filename))
t = loader.get_template("dataformat/cave.xml")
now = datetime.now(timezone.utc)
c = dict({"cave": self, "date": now})
content = t.render(c)
return (filepath, content, "utf8")
def getArea(self):
areas = self.area.all()
lowestareas = list(areas)
for area in areas:
2022-07-21 19:52:10 +01:00
if area.super in areas:
try:
2022-07-21 19:52:10 +01:00
lowestareas.remove(area.super)
except:
pass
return lowestareas[0]
class Entrance(TroggleModel):
MARKING_CHOICES = (
("P", "Paint"),
("P?", "Paint (?)"),
("T", "Tag"),
("T?", "Tag (?)"),
("R", "Needs Retag"),
("S", "Spit"),
("S?", "Spit (?)"),
("U", "Unmarked"),
("?", "Unknown"),
2023-03-28 15:37:25 +01:00
)
FINDABLE_CHOICES = (("?", "To be confirmed ..."), ("S", "Coordinates"), ("L", "Lost"), ("R", "Refindable"))
alt = models.TextField(blank=True, null=True)
2023-03-28 15:37:25 +01:00
approach = models.TextField(blank=True, null=True)
bearings = models.TextField(blank=True, null=True)
easting = models.TextField(blank=True, null=True)
2023-03-28 15:37:25 +01:00
entrance_description = models.TextField(blank=True, null=True)
exact_station = models.TextField(blank=True, null=True)
explorers = models.TextField(blank=True, null=True)
filename = models.CharField(max_length=200)
findability = models.CharField(max_length=1, choices=FINDABLE_CHOICES, blank=True, null=True)
findability_description = models.TextField(blank=True, null=True)
lastvisit = models.TextField(blank=True, null=True)
2021-05-07 20:44:58 +01:00
lat_wgs84 = models.TextField(blank=True, null=True)
2023-03-28 15:37:25 +01:00
location_description = models.TextField(blank=True, null=True)
2021-05-07 20:44:58 +01:00
long_wgs84 = models.TextField(blank=True, null=True)
2023-03-28 15:37:25 +01:00
map_description = models.TextField(blank=True, null=True)
marking = models.CharField(max_length=2, choices=MARKING_CHOICES)
marking_comment = models.TextField(blank=True, null=True)
name = models.CharField(max_length=100, blank=True, null=True)
northing = models.TextField(blank=True, null=True)
other_description = models.TextField(blank=True, null=True)
2023-03-28 15:37:25 +01:00
other_station = models.TextField(blank=True, null=True)
photo = models.TextField(blank=True, null=True)
slug = models.SlugField(max_length=50, unique=True, default="default_slug_id")
2023-03-28 15:37:25 +01:00
tag_station = models.TextField(blank=True, null=True)
underground_description = models.TextField(blank=True, null=True)
2023-07-24 23:33:46 +01:00
url = models.CharField(max_length=300, blank=True, null=True)
class Meta:
ordering = ["caveandentrance__entrance_letter"]
def __str__(self):
return str(self.slug)
2023-07-25 16:55:42 +01:00
def single(self, station):
try:
single = SurvexStation.objects.get(name = station)
return single
except:
stations = SurvexStation.objects.filter(name = station)
print(f" # MULTIPLE stations found with same name '{station}' in Entrance {self}:")
2023-07-25 20:14:13 +01:00
if len(stations) > 1:
for s in stations:
print(f" # {s.id=} - {s.name} {s.latlong()}") # .id is Django internal field, not one of ours
return stations[0]
else:
return None
2023-07-25 16:55:42 +01:00
def exact_location(self):
2023-07-25 16:55:42 +01:00
return self.single(self.exact_station)
2020-07-26 02:26:04 +01:00
def other_location(self):
2023-07-25 16:55:42 +01:00
return self.single(self.other_station)
def find_location(self):
r = {"": "To be entered ", "?": "To be confirmed:", "S": "", "L": "Lost:", "R": "Refindable:"}[self.findability]
if self.tag_station:
try:
s = SurvexStation.objects.lookup(self.tag_station)
return r + f"{s.x:0.0f}E {s.y:0.0f}N {s.z:0.0f}Alt"
except:
return r + f"{self.tag_station} Tag Station not in dataset"
if self.exact_station:
try:
s = SurvexStation.objects.lookup(self.exact_station)
return r + f"{s.x:0.0f}E {s.y:0.0f}N {s.z:0.0f}Alt"
except:
return r + f"{self.tag_station} Exact Station not in dataset"
if self.other_station:
try:
s = SurvexStation.objects.lookup(self.other_station)
return r + f"{s.x:0.0f}E {s.y:0.0f}N {s.z:0.0f}Alt {self.other_description}"
except:
return r + f"{self.tag_station} Other Station not in dataset"
if self.FINDABLE_CHOICES == "S":
r += "ERROR, Entrance has been surveyed but has no survex point"
if self.bearings:
return r + self.bearings
return r
def best_station(self):
if self.tag_station:
return self.tag_station
if self.exact_station:
return self.exact_station
if self.other_station:
return self.other_station
def has_photo(self):
if self.photo:
if (
self.photo.find("<img") > -1
or self.photo.find("<a") > -1
or self.photo.find("<IMG") > -1
or self.photo.find("<A") > -1
):
return "Yes"
else:
return "Missing"
else:
return "No"
def marking_val(self):
for m in self.MARKING_CHOICES:
if m[0] == self.marking:
return m[1]
2020-07-26 02:26:04 +01:00
def findability_val(self):
for f in self.FINDABLE_CHOICES:
if f[0] == self.findability:
return f[1]
2020-07-26 02:26:04 +01:00
def tag(self):
2023-07-25 16:55:42 +01:00
return self.single(self.tag_station)
2020-07-26 02:26:04 +01:00
def needs_surface_work(self):
return self.findability != "S" or not self.has_photo or self.marking != "T"
def get_absolute_url(self):
res = "/".join((self.get_root().cave.get_absolute_url(), self.title))
return res
def cavelist(self):
rs = []
for e in CaveAndEntrance.objects.filter(entrance=self):
if e.cave:
rs.append(e.cave)
return rs
def get_file_path(self):
return Path(settings.ENTRANCEDESCRIPTIONS, self.filename)
def file_output(self):
filepath = Path(os.path.join(settings.ENTRANCEDESCRIPTIONS, self.filename))
t = loader.get_template("dataformat/entrance.xml")
now = datetime.now(timezone.utc)
c = dict({"entrance": self, "date": now})
content = t.render(c)
return (filepath, content, "utf8")
def writeDataFile(self):
filepath = os.path.join(settings.ENTRANCEDESCRIPTIONS, self.filename)
t = loader.get_template("dataformat/entrance.xml")
now = datetime.now(timezone.utc)
c = dict({"entrance": self, "date": now})
u = t.render(c)
writetrogglefile(filepath, u)
return
def url_parent(self):
if self.url:
return self.url.rsplit("/", 1)[0]
else:
cavelist = self.cavelist()
if len(self.cavelist()) == 1:
return cavelist[0].url_parent()
else:
return ""
2023-05-01 00:01:41 +01:00
def latlong(self):
station = None
if self.other_station:
try:
station = SurvexStation.objects.get(name = self.other_station)
except:
pass
if self.tag_station:
try:
station = SurvexStation.objects.get(name = self.tag_station)
except:
pass
if self.exact_station:
try:
station = SurvexStation.objects.get(name = self.exact_station)
except:
pass
if station:
return station.latlong()
def GetCaveLookup():
2022-07-24 19:38:14 +01:00
"""A very relaxed way of finding probably the right cave given almost any string which might serve to identify it
2022-07-24 19:38:14 +01:00
lookup function modelled on GetPersonExpeditionNameLookup
2020-07-26 02:26:04 +01:00
repeated assignment each call, needs refactoring
2022-07-17 13:01:53 +01:00
Used when parsing wallets contents.json file too in views/uploads.py
Does NOT detect duplicates! Needs fixing.
Needs to be a proper funciton that raises an exception if there is a duplicate.
OR we could set it to return None if there are duplicates, and require the caller to
fall back on doing the actual database query it wants rather thna using this cache shortcut
2020-07-26 02:26:04 +01:00
"""
duplicates = {}
def checkcaveid(cave, id):
global Gcavelookup
if id not in Gcavelookup:
Gcavelookup[id] = cave
Gcave_count[id] += 1
else:
if cave == Gcavelookup[id]:
pass # same id, same cave
else: # same id but different cave
# message = f" - Warning: ignoring alias id '{id:3}'. Caves '{Gcavelookup[id]}' and '{cave}'. "
# print(message)
# DataIssue.objects.create(parser="aliases", message=message)
duplicates[id] = 1
global Gcavelookup
if Gcavelookup:
return Gcavelookup
Gcavelookup = {"NONEPLACEHOLDER": None}
global Gcave_count
Gcave_count = defaultdict(int) # sets default value to int(0)
DataIssue.objects.filter(parser="aliases").delete()
for cave in Cave.objects.all():
key = cave.official_name.lower()
if key != "" and key != "unamed" and key != "unnamed":
if Gcave_count[key] > 0:
# message = f" - Warning: ignoring alias id '{id:3}'. Caves '{Gcavelookup[id]}' and '{cave}'. "
# print(message)
# DataIssue.objects.create(parser="aliases", message=message)
duplicates[key] = 1
else:
Gcavelookup[key] = cave
Gcave_count[key] += 1
if cave.kataster_number:
checkcaveid(cave, cave.kataster_number) # we do expect 1623/55 and 1626/55 to cause a warning message
# the rest of these are 'nice to have' but may validly already be set
if cave.unofficial_number:
unoffn = cave.unofficial_number.lower()
checkcaveid(cave, unoffn)
if cave.filename:
# this is the slug - usually.. but usually done as as f'{cave.area}-{cave.kataster_number}'
fn = cave.filename.replace(".html", "").lower()
checkcaveid(cave, fn)
if cave.slug():
# also possibly done already
slug = cave.slug().lower()
checkcaveid(cave, slug)
# These might alse create more duplicate entries
2022-07-25 13:03:58 +01:00
# Yes, this should be set in, and imported from, settings.py
aliases = [
("1987-02", "267"),
("1990-01", "171"),
("1990-02", "172"),
("1990-03", "173"),
("1990-04", "174"),
("1990-05", "175"),
("1990-06", "176"),
("1990-07", "177"),
("1990-08", "178"),
("1990-09", "179"),
("1990-10", "180"),
("1990-11", "181"),
("1990-12", "182"),
("1990-13", "183"),
("1990-14", "184"),
("1990-18", "188"),
("1990-adam", "225"),
("1993-01", "200"),
("1996-02", "224"),
("1996-03", "223"),
("1996-04", "222"),
("1996wk2", "207"),
("1996wk3", "208"),
("1996wk5", "219"),
("1996wk6", "218"),
("1996wk8", "209"),
("1996wk11", "268"),
("96wk11", "268"),
("1998-01", "201"),
("1998-03", "210"),
("1999-03", "204"),
("1999-04", "230"),
("1999-10", "162"),
("1999-bo-01", "205"),
("1999-ob-03", "226"),
("1999-ob-04", "227"),
("2000-01", "231"),
("2000-03", "214"),
("2000-04", "220"),
("2000-05", "215"),
("2000-06", "216"),
("2000-07", "217"),
("2000-09", "234"),
("2000-aa-01", "250"),
("2001-04", "239"),
("2001-05", "243"),
("2002-01", "249"),
("2002-02", "234"),
("2002-04", "242"),
("2002-05", "294"),
("2003-01", "256"),
("2003-02", "248"),
("2003-03", "247"),
("2003-04", "241"),
("2003-05", "246"),
("2003-06", "161"),
("2003-08", "240"),
("2003-09", "245"),
("2003-10", "244"),
("2004-01", "269"),
("2004-03", "270"),
("2004-11", "251"),
("2004-12", "161"),
("2004-15", "253"),
("2004-19", "254"),
("2004-20", "255"),
("2005-04", "204"),
("2005-05", "264"),
("2005-07", "257"),
("2006-08", "285"),
("2006-09", "298"),
("2007-71", "271"),
("2010-01", "263"),
("2010-03", "293"),
("2011-01", "292"),
("2012-dd-05", "286"),
("2012-ns-13", "292"),
("2014-neo-01", "273"),
("2014-sd-01", "274"),
("2014-ms-14", "287"),
("2015-mf-06", "288"),
("2016-jb-01", "289"),
("2017-pw-01", "277"),
("2018-dm-07", "359"), # NB this is 1626
("2017_cucc_24", "291"), # note _ not -
("2017_cucc_23", "295"), # note _ not -
("2017_cucc_28", "290"), # note _ not -
("bs17", "283"),
("1976/b11", "198"),
("1976/b8", "197"),
("1976/b9", "190"),
("b11", "1976/b11"),
("b8", "1976/b8"),
("b9", "1976/b9"),
("2011-01-bs30", "190"),
("bs30", "190"),
("2011-01", "190"),
("quarriesd", "2002-08"),
("2002-x11", "2005-08"),
("2002-x12", "2005-07"),
("2002-x13", "2005-06"),
("2002-x14", "2005-05"),
("kh", "161"),
("161-kh", "161"),
("204-steinBH", "204"),
("stonebridge", "204"),
("hauchhole", "234"),
("hauch", "234"),
("234-hauch", "234"),
("tunnocks", "258"),
("balcony", "264"),
("balkon", "264"),
("fgh", "290"),
("gsh", "291"),
("homecoming", "2018-dm-07"),
("heimkommen", "2018-dm-07"),
2022-07-25 13:03:58 +01:00
("Heimkehr", "2018-dm-07"),
("99ob02", "1999-ob-02"),
]
for i in aliases:
if i[1] in Gcavelookup:
if i[0] in Gcavelookup:
# already set by a different method, but is it the same cave?
if Gcavelookup[i[0]] == Gcavelookup[i[1]]:
pass
else:
Gcave_count[i[0]] += 1
Gcavelookup[i[0]] = Gcavelookup[i[1]]
else:
message = f" * Coding or cave existence mistake, cave for id '{i[1]}' does not exist. Expecting to set alias '{i[0]}' to it"
# print(message)
DataIssue.objects.create(parser="aliases", message=message)
addmore = {}
for id in Gcavelookup:
addmore[id.replace("-", "_")] = Gcavelookup[id]
addmore[id.replace("_", "-")] = Gcavelookup[id]
addmore[id.upper()] = Gcavelookup[id]
Gcavelookup = {**addmore, **Gcavelookup}
addmore = {}
ldup = []
for d in duplicates:
Gcavelookup.pop(d)
Gcave_count.pop(d)
ldup.append(d)
if ldup:
message = f" - Ambiguous aliases removed: {ldup}"
print(message)
DataIssue.objects.create(parser="aliases", message=message)
for c in Gcave_count:
if Gcave_count[c] > 1:
message = f" ** Duplicate cave id count={Gcave_count[c]} id:'{Gcavelookup[c]}' cave __str__:'{c}'"
print(message)
DataIssue.objects.create(parser="aliases", message=message)
return Gcavelookup