2023-01-19 18:35:56 +00:00
|
|
|
import os
|
|
|
|
import re
|
2021-03-28 23:47:47 +01:00
|
|
|
from collections import defaultdict
|
2023-01-19 18:35:56 +00:00
|
|
|
from datetime import datetime, timezone
|
2021-04-26 02:10:45 +01:00
|
|
|
from pathlib import Path
|
2020-05-28 04:54:53 +01:00
|
|
|
|
2024-12-15 18:54:47 +00:00
|
|
|
from django.db import DataError, models
|
2023-01-30 23:04:11 +00:00
|
|
|
from django.template import loader
|
2020-05-28 04:54:53 +01:00
|
|
|
|
2023-01-19 18:35:56 +00:00
|
|
|
import settings
|
2023-01-29 18:17:43 +00:00
|
|
|
from troggle.core.models.logbooks import QM
|
2023-09-16 20:46:17 +01:00
|
|
|
from troggle.core.models.survex import SurvexStation, utmToLatLng
|
2023-01-30 23:04:11 +00:00
|
|
|
from troggle.core.models.troggle import DataIssue, TroggleModel
|
2024-12-15 18:54:47 +00:00
|
|
|
from troggle.core.utils import TROG, parse_aliases, writetrogglefile
|
2022-07-27 23:48:22 +01:00
|
|
|
|
2023-10-07 00:26:52 +01:00
|
|
|
# Use the TROG global object to cache the cave lookup list. No good for multi-user.., or even multi-page. Pointless in fact.
|
2023-01-30 19:04:36 +00:00
|
|
|
Gcavelookup = TROG["caves"]["gcavelookup"]
|
|
|
|
Gcave_count = TROG["caves"]["gcavecount"]
|
2022-07-27 23:48:22 +01:00
|
|
|
|
|
|
|
Gcavelookup = None
|
|
|
|
Gcave_count = None
|
2020-05-28 04:54:53 +01:00
|
|
|
|
2023-01-30 19:04:36 +00:00
|
|
|
"""The model declarations for Areas, Caves and Entrances
|
|
|
|
"""
|
2021-04-26 02:10:45 +01:00
|
|
|
|
2023-01-30 19:04:36 +00:00
|
|
|
todo = """
|
2023-11-03 14:54:29 +00:00
|
|
|
- Why do we have CaveAndEntrance objects ? These do not need to be explcit for a many:many relationship these days
|
2024-12-15 21:59:54 +00:00
|
|
|
now only used to create a <form> for entranceletter
|
|
|
|
TO DO move entranceletter to Entrance
|
2023-02-08 23:37:00 +00:00
|
|
|
|
2021-04-26 02:10:45 +01:00
|
|
|
- Restore constraint: unique_together = (("area", "kataster_number"), ("area", "unofficial_number"))
|
2023-09-10 13:42:36 +01:00
|
|
|
or replace by a unique 'slug' field, better.
|
2023-01-30 19:04:36 +00:00
|
|
|
"""
|
|
|
|
|
2021-04-26 02:10:45 +01:00
|
|
|
|
2020-05-28 04:54:53 +01:00
|
|
|
class CaveAndEntrance(models.Model):
|
2024-12-15 21:59:54 +00:00
|
|
|
"""This class is ONLY used to create a FormSet for editing the entranceletter.
|
2023-03-28 19:26:37 +01:00
|
|
|
CASCADE means that if the cave or the entrance is deleted, then this CaveAndEntrance
|
2023-03-06 16:37:38 +00:00
|
|
|
is deleted too
|
2023-08-03 14:11:46 +01:00
|
|
|
NOT NEEDED anymore if we insist that cave:entrances have 1:n multiplicity.
|
2024-12-15 21:59:54 +00:00
|
|
|
TO DO move entranceletter to Entrance
|
2023-03-06 16:37:38 +00:00
|
|
|
"""
|
2023-01-30 19:04:36 +00:00
|
|
|
cave = models.ForeignKey("Cave", on_delete=models.CASCADE)
|
|
|
|
entrance = models.ForeignKey("Entrance", on_delete=models.CASCADE)
|
2023-08-03 14:11:46 +01:00
|
|
|
entranceletter = models.CharField(max_length=20, blank=True, null=True)
|
2023-01-30 19:04:36 +00:00
|
|
|
|
2022-08-01 15:04:22 +01:00
|
|
|
class Meta:
|
2023-08-03 14:11:46 +01:00
|
|
|
unique_together = [["cave", "entrance"], ["cave", "entranceletter"]]
|
|
|
|
ordering = ["entranceletter"]
|
2023-01-30 19:04:36 +00:00
|
|
|
|
2020-05-28 04:54:53 +01:00
|
|
|
def __str__(self):
|
2023-08-03 14:11:46 +01:00
|
|
|
return str(self.cave) + str(self.entranceletter)
|
2024-07-12 16:18:05 +01:00
|
|
|
|
2023-01-29 21:45:51 +00:00
|
|
|
|
2023-12-23 18:37:20 +00:00
|
|
|
def get_cave_leniently(caveid):
|
|
|
|
try:
|
|
|
|
c = getCave(caveid)
|
|
|
|
if c:
|
|
|
|
return c
|
|
|
|
except:
|
|
|
|
# print(f"get_cave_leniently FAIL {caveid}")
|
|
|
|
try:
|
|
|
|
c = getCave("1623-"+caveid)
|
|
|
|
if c:
|
|
|
|
return c
|
|
|
|
except:
|
|
|
|
return None
|
|
|
|
|
2020-05-28 04:54:53 +01:00
|
|
|
class Cave(TroggleModel):
|
2023-10-07 00:26:52 +01:00
|
|
|
# (far) too much here perhaps,
|
2023-09-10 13:42:36 +01:00
|
|
|
areacode = models.CharField(max_length=4, blank=True, null=True) # could use models.IntegerChoices
|
2023-09-11 18:38:14 +01:00
|
|
|
subarea = models.CharField(max_length=25, blank=True, null=True) # 9, 8c etc.
|
2023-03-28 15:37:25 +01:00
|
|
|
depth = models.CharField(max_length=100, blank=True, null=True)
|
|
|
|
description_file = models.CharField(max_length=200, blank=True, null=True)
|
2023-01-30 19:04:36 +00:00
|
|
|
entrances = models.ManyToManyField("Entrance", through="CaveAndEntrance")
|
|
|
|
equipment = models.TextField(blank=True, null=True)
|
2023-03-28 15:37:25 +01:00
|
|
|
explorers = models.TextField(blank=True, null=True)
|
|
|
|
extent = models.CharField(max_length=100, blank=True, null=True)
|
2023-10-07 00:26:52 +01:00
|
|
|
filename = models.CharField(max_length=200) # if a cave is 'pending' this is not set. Otherwise it is.
|
2024-07-02 11:15:34 +01:00
|
|
|
fully_explored = models.BooleanField(default=False)
|
2023-03-28 15:37:25 +01:00
|
|
|
kataster_code = models.CharField(max_length=20, blank=True, null=True)
|
|
|
|
kataster_number = models.CharField(max_length=10, blank=True, null=True)
|
2023-01-30 19:04:36 +00:00
|
|
|
kataster_status = models.TextField(blank=True, null=True)
|
|
|
|
length = models.CharField(max_length=100, blank=True, null=True)
|
2023-03-28 15:37:25 +01:00
|
|
|
notes = models.TextField(blank=True, null=True)
|
|
|
|
official_name = models.CharField(max_length=160)
|
|
|
|
references = models.TextField(blank=True, null=True)
|
2023-09-10 13:42:36 +01:00
|
|
|
survex_file = models.CharField(max_length=100, blank=True, null=True) # should be a foreign key?
|
2023-03-28 15:37:25 +01:00
|
|
|
survey = models.TextField(blank=True, null=True)
|
2023-11-18 12:38:07 +00:00
|
|
|
# underground_centre_line = models.TextField(blank=True, null=True)
|
2023-03-28 15:37:25 +01:00
|
|
|
underground_description = models.TextField(blank=True, null=True)
|
|
|
|
unofficial_number = models.CharField(max_length=60, blank=True, null=True)
|
2023-07-24 23:33:46 +01:00
|
|
|
url = models.CharField(max_length=300, blank=True, null=True, unique = True)
|
2020-05-28 04:54:53 +01:00
|
|
|
|
|
|
|
class Meta:
|
2023-10-21 14:22:20 +01:00
|
|
|
# we do not enforce uniqueness at the db level as that causes confusing errors for newbie maintainers
|
2023-10-07 00:26:52 +01:00
|
|
|
# unique_together = (("area", "kataster_number"), ("area", "unofficial_number"))
|
2023-01-30 19:04:36 +00:00
|
|
|
ordering = ("kataster_code", "unofficial_number")
|
2020-05-28 04:54:53 +01:00
|
|
|
|
|
|
|
def slug(self):
|
2024-07-02 18:01:15 +01:00
|
|
|
return self.newslug()
|
2023-01-30 19:04:36 +00:00
|
|
|
primarySlugs = self.caveslug_set.filter(primary=True)
|
2020-05-28 04:54:53 +01:00
|
|
|
if primarySlugs:
|
|
|
|
return primarySlugs[0].slug
|
|
|
|
else:
|
|
|
|
slugs = self.caveslug_set.filter()
|
|
|
|
if slugs:
|
|
|
|
return slugs[0].slug
|
2024-07-02 18:01:15 +01:00
|
|
|
else:
|
|
|
|
return str(self.id)
|
2024-06-29 06:55:14 +01:00
|
|
|
|
|
|
|
def newslug(self):
|
|
|
|
return f"{self.areacode}-{self.number()}"
|
2020-05-28 04:54:53 +01:00
|
|
|
|
|
|
|
def ours(self):
|
2023-01-30 19:04:36 +00:00
|
|
|
return bool(re.search(r"CUCC", self.explorers))
|
2023-07-05 17:43:57 +01:00
|
|
|
|
|
|
|
def number(self):
|
2020-05-28 04:54:53 +01:00
|
|
|
if self.kataster_number:
|
2023-07-05 17:43:57 +01:00
|
|
|
return self.kataster_number
|
2020-05-28 04:54:53 +01:00
|
|
|
else:
|
2023-07-05 17:43:57 +01:00
|
|
|
return self.unofficial_number
|
|
|
|
|
2020-05-28 04:54:53 +01:00
|
|
|
def get_absolute_url(self):
|
2023-10-01 10:21:34 +01:00
|
|
|
# we do not use URL_ROOT any more.
|
2023-11-03 14:54:29 +00:00
|
|
|
# if self.kataster_number:
|
|
|
|
# pass
|
|
|
|
# elif self.unofficial_number:
|
|
|
|
# pass
|
|
|
|
# else:
|
|
|
|
# self.official_name.lower()
|
2024-02-06 16:59:25 +00:00
|
|
|
return "/"+ self.url # not good Django style? NEEDS actual URL
|
2023-01-30 19:04:36 +00:00
|
|
|
|
2022-07-31 16:08:28 +01:00
|
|
|
def url_parent(self):
|
2023-09-11 18:38:14 +01:00
|
|
|
if self.url:
|
|
|
|
return self.url.rsplit("/", 1)[0]
|
|
|
|
else:
|
|
|
|
return "NO cave.url"
|
2020-05-28 04:54:53 +01:00
|
|
|
|
2023-01-30 19:04:36 +00:00
|
|
|
def __str__(self, sep=": "):
|
2020-06-28 15:57:40 +01:00
|
|
|
return str(self.slug())
|
2020-05-28 04:54:53 +01:00
|
|
|
|
2023-03-17 20:01:52 +00:00
|
|
|
def get_open_QMs(self):
|
2023-01-30 19:04:36 +00:00
|
|
|
"""Searches for all QMs that reference this cave."""
|
|
|
|
# qms = self.qm_set.all().order_by('expoyear', 'block__date')
|
|
|
|
qms = QM.objects.filter(cave=self).order_by(
|
|
|
|
"expoyear", "block__date"
|
2023-02-10 00:05:04 +00:00
|
|
|
) # a QuerySet, see https://docs.djangoproject.com/en/dev/ref/models/querysets/#order-by
|
2023-03-17 20:01:52 +00:00
|
|
|
qmsopen = qms.filter(ticked=False)
|
|
|
|
return qmsopen # a QuerySet
|
2020-05-28 04:54:53 +01:00
|
|
|
|
2023-03-17 20:01:52 +00:00
|
|
|
def get_ticked_QMs(self):
|
|
|
|
"""Searches for all QMs that reference this cave."""
|
|
|
|
qms = QM.objects.filter(cave=self).order_by(
|
|
|
|
"expoyear", "block__date"
|
|
|
|
)
|
|
|
|
qmticked = qms.filter(ticked=True)
|
|
|
|
return qmticked # a QuerySet
|
|
|
|
|
|
|
|
def get_QMs(self):
|
|
|
|
qms = self.get_open_QMs() | self.get_ticked_QMs() # set union operation
|
|
|
|
return qms # a QuerySet
|
|
|
|
|
2020-05-28 04:54:53 +01:00
|
|
|
def entrances(self):
|
|
|
|
return CaveAndEntrance.objects.filter(cave=self)
|
2023-10-07 00:26:52 +01:00
|
|
|
|
|
|
|
def no_location(self):
|
|
|
|
no_data = True
|
|
|
|
for e in CaveAndEntrance.objects.filter(cave=self):
|
2023-10-07 17:41:25 +01:00
|
|
|
if e.entrance.best_station() and e.entrance.best_station() != "":
|
|
|
|
#print(self, e, e.entrance.best_station())
|
2023-10-10 23:03:28 +01:00
|
|
|
try:
|
|
|
|
x = e.entrance.best_station_object().x
|
2023-10-07 17:41:25 +01:00
|
|
|
no_data = False
|
2023-10-10 23:03:28 +01:00
|
|
|
except:
|
|
|
|
pass
|
2023-10-07 00:26:52 +01:00
|
|
|
return no_data
|
2020-05-28 04:54:53 +01:00
|
|
|
|
|
|
|
def singleentrance(self):
|
|
|
|
return len(CaveAndEntrance.objects.filter(cave=self)) == 1
|
2023-01-30 19:04:36 +00:00
|
|
|
|
2020-05-28 04:54:53 +01:00
|
|
|
def entrancelist(self):
|
|
|
|
rs = []
|
|
|
|
res = ""
|
|
|
|
for e in CaveAndEntrance.objects.filter(cave=self):
|
2023-08-03 14:11:46 +01:00
|
|
|
if e.entranceletter:
|
|
|
|
rs.append(e.entranceletter)
|
2020-05-28 04:54:53 +01:00
|
|
|
rs.sort()
|
2023-01-30 19:04:36 +00:00
|
|
|
prevR = ""
|
2020-05-28 04:54:53 +01:00
|
|
|
n = 0
|
|
|
|
for r in rs:
|
|
|
|
if prevR:
|
2023-01-30 19:04:36 +00:00
|
|
|
if chr(ord(prevR) + 1) == r:
|
2020-05-28 04:54:53 +01:00
|
|
|
prevR = r
|
|
|
|
n += 1
|
|
|
|
else:
|
|
|
|
if n == 0:
|
|
|
|
res += ", " + prevR
|
|
|
|
else:
|
|
|
|
res += "–" + prevR
|
|
|
|
else:
|
|
|
|
prevR = r
|
|
|
|
n = 0
|
|
|
|
res += r
|
|
|
|
if n == 0:
|
2022-07-15 14:05:48 +01:00
|
|
|
if res:
|
|
|
|
res += ", " + prevR
|
2020-05-28 04:54:53 +01:00
|
|
|
else:
|
|
|
|
res += "–" + prevR
|
|
|
|
return res
|
2023-01-30 19:04:36 +00:00
|
|
|
|
2022-08-01 15:04:22 +01:00
|
|
|
def file_output(self):
|
2023-08-03 14:11:46 +01:00
|
|
|
"""This produces the content which wll be re-saved as the cave_data html file.
|
|
|
|
"""
|
2023-11-07 00:12:57 +00:00
|
|
|
if not self.filename:
|
|
|
|
self.filename = self.slug() + ".html"
|
|
|
|
self.save()
|
|
|
|
|
2023-08-03 14:11:46 +01:00
|
|
|
filepath = Path(settings.CAVEDESCRIPTIONS, self.filename)
|
2021-12-30 19:07:17 +00:00
|
|
|
|
2023-01-30 19:04:36 +00:00
|
|
|
t = loader.get_template("dataformat/cave.xml")
|
2022-10-12 20:09:58 +01:00
|
|
|
now = datetime.now(timezone.utc)
|
2023-01-30 19:04:36 +00:00
|
|
|
c = dict({"cave": self, "date": now})
|
2022-08-01 15:04:22 +01:00
|
|
|
content = t.render(c)
|
|
|
|
return (filepath, content, "utf8")
|
2023-11-07 00:12:57 +00:00
|
|
|
|
|
|
|
def writeDataFile(self):
|
|
|
|
filepath, content, coding = self.file_output()
|
|
|
|
writetrogglefile(filepath, content)
|
|
|
|
return
|
2023-01-30 19:04:36 +00:00
|
|
|
|
2020-05-28 04:54:53 +01:00
|
|
|
class Entrance(TroggleModel):
|
|
|
|
MARKING_CHOICES = (
|
2023-01-30 19:04:36 +00:00
|
|
|
("P", "Paint"),
|
|
|
|
("P?", "Paint (?)"),
|
|
|
|
("T", "Tag"),
|
|
|
|
("T?", "Tag (?)"),
|
|
|
|
("R", "Needs Retag"),
|
|
|
|
("S", "Spit"),
|
|
|
|
("S?", "Spit (?)"),
|
|
|
|
("U", "Unmarked"),
|
|
|
|
("?", "Unknown"),
|
2023-03-28 15:37:25 +01:00
|
|
|
)
|
2023-01-30 19:04:36 +00:00
|
|
|
FINDABLE_CHOICES = (("?", "To be confirmed ..."), ("S", "Coordinates"), ("L", "Lost"), ("R", "Refindable"))
|
2020-05-28 04:54:53 +01:00
|
|
|
alt = models.TextField(blank=True, null=True)
|
2023-03-28 15:37:25 +01:00
|
|
|
approach = models.TextField(blank=True, null=True)
|
|
|
|
bearings = models.TextField(blank=True, null=True)
|
|
|
|
entrance_description = models.TextField(blank=True, null=True)
|
|
|
|
explorers = models.TextField(blank=True, null=True)
|
|
|
|
filename = models.CharField(max_length=200)
|
2023-11-08 00:12:37 +00:00
|
|
|
findability = models.CharField(max_length=1, choices=FINDABLE_CHOICES, blank=True, null=True, default="?")
|
2023-03-28 15:37:25 +01:00
|
|
|
findability_description = models.TextField(blank=True, null=True)
|
|
|
|
lastvisit = models.TextField(blank=True, null=True)
|
2023-09-16 20:46:17 +01:00
|
|
|
lat_wgs84 = models.TextField(blank=True, null=True) # manually entered not calculated
|
2023-03-28 15:37:25 +01:00
|
|
|
location_description = models.TextField(blank=True, null=True)
|
2023-09-16 20:46:17 +01:00
|
|
|
long_wgs84 = models.TextField(blank=True, null=True) # manually entered not calculated
|
2023-11-18 14:17:50 +00:00
|
|
|
# map_description = models.TextField(blank=True, null=True)
|
2023-11-08 00:12:37 +00:00
|
|
|
marking = models.CharField(max_length=2, choices=MARKING_CHOICES, default="?")
|
2023-03-28 15:37:25 +01:00
|
|
|
marking_comment = models.TextField(blank=True, null=True)
|
|
|
|
name = models.CharField(max_length=100, blank=True, null=True)
|
2023-01-30 19:04:36 +00:00
|
|
|
other_description = models.TextField(blank=True, null=True)
|
2023-03-28 15:37:25 +01:00
|
|
|
photo = models.TextField(blank=True, null=True)
|
2023-03-28 19:08:05 +01:00
|
|
|
slug = models.SlugField(max_length=50, unique=True, default="default_slug_id")
|
2023-03-28 15:37:25 +01:00
|
|
|
underground_description = models.TextField(blank=True, null=True)
|
2023-01-30 19:04:36 +00:00
|
|
|
|
2023-09-26 22:38:07 +01:00
|
|
|
tag_station = models.TextField(blank=True, null=True)
|
|
|
|
other_station = models.TextField(blank=True, null=True)
|
|
|
|
|
2022-08-01 15:04:22 +01:00
|
|
|
class Meta:
|
2023-08-03 14:11:46 +01:00
|
|
|
ordering = ["caveandentrance__entranceletter"]
|
2020-05-28 04:54:53 +01:00
|
|
|
|
|
|
|
def __str__(self):
|
2023-03-28 19:08:05 +01:00
|
|
|
return str(self.slug)
|
2020-05-28 04:54:53 +01:00
|
|
|
|
2023-07-25 16:55:42 +01:00
|
|
|
def single(self, station):
|
2023-10-14 23:11:21 +01:00
|
|
|
if not station:
|
|
|
|
return None
|
2023-07-25 16:55:42 +01:00
|
|
|
try:
|
|
|
|
single = SurvexStation.objects.get(name = station)
|
|
|
|
return single
|
|
|
|
except:
|
|
|
|
stations = SurvexStation.objects.filter(name = station)
|
2023-10-14 23:11:21 +01:00
|
|
|
print(f" # EXCEPTION looking for '{station}' in all stations. (Entrance {self})")
|
2023-07-25 20:14:13 +01:00
|
|
|
if len(stations) > 1:
|
2023-10-14 23:11:21 +01:00
|
|
|
print(f" # MULTIPLE stations found with same name '{station}' in Entrance {self}:")
|
2023-07-25 20:14:13 +01:00
|
|
|
for s in stations:
|
|
|
|
print(f" # {s.id=} - {s.name} {s.latlong()}") # .id is Django internal field, not one of ours
|
|
|
|
return stations[0]
|
|
|
|
else:
|
|
|
|
return None
|
2023-07-25 16:55:42 +01:00
|
|
|
|
2023-08-03 14:11:46 +01:00
|
|
|
def singleletter(self):
|
|
|
|
"""Used in template/dataformat/cave.xml to write out a replacement cave_data file
|
|
|
|
why is this not working?
|
|
|
|
"""
|
|
|
|
cavelist = self.cavelist
|
|
|
|
try:
|
|
|
|
first = cavelist[0]
|
|
|
|
ce = CaveAndEntrance.objects.get(entrance=self, cave=first)
|
|
|
|
except:
|
|
|
|
# will fail if no caves in cavelist or if the cave isnt in the db
|
|
|
|
return "Z"
|
|
|
|
print(f"singleletter() access for first cave in {cavelist=}")
|
|
|
|
if ce.entranceletter == "":
|
|
|
|
print(f"### BLANK LETTER")
|
|
|
|
return "Y"
|
|
|
|
else:
|
|
|
|
letter = ce.entranceletter
|
|
|
|
print(f"### LETTER {letter}")
|
|
|
|
return letter
|
2020-07-26 02:26:04 +01:00
|
|
|
|
2020-05-28 04:54:53 +01:00
|
|
|
def other_location(self):
|
2023-07-25 16:55:42 +01:00
|
|
|
return self.single(self.other_station)
|
|
|
|
|
2020-05-28 04:54:53 +01:00
|
|
|
def find_location(self):
|
2023-01-30 19:04:36 +00:00
|
|
|
r = {"": "To be entered ", "?": "To be confirmed:", "S": "", "L": "Lost:", "R": "Refindable:"}[self.findability]
|
2020-05-28 04:54:53 +01:00
|
|
|
if self.tag_station:
|
|
|
|
try:
|
|
|
|
s = SurvexStation.objects.lookup(self.tag_station)
|
2022-11-23 10:48:39 +00:00
|
|
|
return r + f"{s.x:0.0f}E {s.y:0.0f}N {s.z:0.0f}Alt"
|
2020-05-28 04:54:53 +01:00
|
|
|
except:
|
2022-11-23 10:48:39 +00:00
|
|
|
return r + f"{self.tag_station} Tag Station not in dataset"
|
2020-05-28 04:54:53 +01:00
|
|
|
if self.other_station:
|
|
|
|
try:
|
|
|
|
s = SurvexStation.objects.lookup(self.other_station)
|
2022-11-23 10:48:39 +00:00
|
|
|
return r + f"{s.x:0.0f}E {s.y:0.0f}N {s.z:0.0f}Alt {self.other_description}"
|
2020-05-28 04:54:53 +01:00
|
|
|
except:
|
2022-11-23 10:48:39 +00:00
|
|
|
return r + f"{self.tag_station} Other Station not in dataset"
|
2020-05-28 04:54:53 +01:00
|
|
|
if self.FINDABLE_CHOICES == "S":
|
|
|
|
r += "ERROR, Entrance has been surveyed but has no survex point"
|
|
|
|
if self.bearings:
|
|
|
|
return r + self.bearings
|
|
|
|
return r
|
|
|
|
|
|
|
|
def best_station(self):
|
|
|
|
if self.tag_station:
|
|
|
|
return self.tag_station
|
|
|
|
if self.other_station:
|
|
|
|
return self.other_station
|
2023-10-07 00:26:52 +01:00
|
|
|
|
|
|
|
def best_station_object(self):
|
|
|
|
bs = self.best_station()
|
|
|
|
return SurvexStation.objects.get(name=bs)
|
2020-05-28 04:54:53 +01:00
|
|
|
|
|
|
|
def has_photo(self):
|
|
|
|
if self.photo:
|
2023-01-30 19:04:36 +00:00
|
|
|
if (
|
|
|
|
self.photo.find("<img") > -1
|
|
|
|
or self.photo.find("<a") > -1
|
|
|
|
or self.photo.find("<IMG") > -1
|
|
|
|
or self.photo.find("<A") > -1
|
|
|
|
):
|
2022-08-01 15:04:22 +01:00
|
|
|
return "Yes"
|
2020-05-28 04:54:53 +01:00
|
|
|
else:
|
|
|
|
return "Missing"
|
|
|
|
else:
|
|
|
|
return "No"
|
|
|
|
|
|
|
|
def marking_val(self):
|
|
|
|
for m in self.MARKING_CHOICES:
|
|
|
|
if m[0] == self.marking:
|
|
|
|
return m[1]
|
2020-07-26 02:26:04 +01:00
|
|
|
|
2020-05-28 04:54:53 +01:00
|
|
|
def findability_val(self):
|
|
|
|
for f in self.FINDABLE_CHOICES:
|
|
|
|
if f[0] == self.findability:
|
|
|
|
return f[1]
|
2020-07-26 02:26:04 +01:00
|
|
|
|
2020-05-28 04:54:53 +01:00
|
|
|
def tag(self):
|
2023-07-25 16:55:42 +01:00
|
|
|
return self.single(self.tag_station)
|
2023-10-14 23:11:21 +01:00
|
|
|
def other(self):
|
|
|
|
return self.single(self.other_station)
|
2020-07-26 02:26:04 +01:00
|
|
|
|
2020-05-28 04:54:53 +01:00
|
|
|
def needs_surface_work(self):
|
|
|
|
return self.findability != "S" or not self.has_photo or self.marking != "T"
|
|
|
|
|
|
|
|
def get_absolute_url(self):
|
2024-02-06 16:59:25 +00:00
|
|
|
# This can't be right..
|
2023-01-30 19:04:36 +00:00
|
|
|
res = "/".join((self.get_root().cave.get_absolute_url(), self.title))
|
2024-02-06 16:59:25 +00:00
|
|
|
return self.url_parent()
|
2020-05-28 04:54:53 +01:00
|
|
|
|
2021-05-07 23:21:57 +01:00
|
|
|
def cavelist(self):
|
|
|
|
rs = []
|
|
|
|
for e in CaveAndEntrance.objects.filter(entrance=self):
|
|
|
|
if e.cave:
|
|
|
|
rs.append(e.cave)
|
|
|
|
return rs
|
2023-10-14 23:11:21 +01:00
|
|
|
|
|
|
|
def firstcave(self):
|
|
|
|
for e in CaveAndEntrance.objects.filter(entrance=self):
|
|
|
|
if e.cave:
|
|
|
|
return(e.cave)
|
|
|
|
|
2021-04-26 02:10:45 +01:00
|
|
|
def get_file_path(self):
|
2023-01-30 19:04:36 +00:00
|
|
|
return Path(settings.ENTRANCEDESCRIPTIONS, self.filename)
|
|
|
|
|
2022-08-01 15:04:22 +01:00
|
|
|
def file_output(self):
|
2023-11-07 00:12:57 +00:00
|
|
|
if not self.filename:
|
|
|
|
self.filename = self.slug + ".html"
|
|
|
|
self.save()
|
2023-11-08 00:12:37 +00:00
|
|
|
filepath = self.get_file_path()
|
2022-08-01 15:04:22 +01:00
|
|
|
|
2023-01-30 19:04:36 +00:00
|
|
|
t = loader.get_template("dataformat/entrance.xml")
|
2022-10-12 20:09:58 +01:00
|
|
|
now = datetime.now(timezone.utc)
|
2023-01-30 19:04:36 +00:00
|
|
|
c = dict({"entrance": self, "date": now})
|
2022-08-01 15:04:22 +01:00
|
|
|
content = t.render(c)
|
|
|
|
return (filepath, content, "utf8")
|
2021-04-26 02:10:45 +01:00
|
|
|
|
2020-05-28 04:54:53 +01:00
|
|
|
def writeDataFile(self):
|
2023-11-07 00:12:57 +00:00
|
|
|
filepath, content, coding = self.file_output()
|
|
|
|
writetrogglefile(filepath, content)
|
2021-12-30 19:07:17 +00:00
|
|
|
return
|
2023-01-30 19:04:36 +00:00
|
|
|
|
2022-07-31 20:29:17 +01:00
|
|
|
def url_parent(self):
|
|
|
|
if self.url:
|
|
|
|
return self.url.rsplit("/", 1)[0]
|
|
|
|
else:
|
|
|
|
cavelist = self.cavelist()
|
|
|
|
if len(self.cavelist()) == 1:
|
|
|
|
return cavelist[0].url_parent()
|
|
|
|
else:
|
|
|
|
return ""
|
2023-05-01 00:01:41 +01:00
|
|
|
|
|
|
|
def latlong(self):
|
2023-09-30 18:35:40 +01:00
|
|
|
"""Gets lat long assuming that it has to get it from the associated stations
|
2023-10-17 22:42:50 +01:00
|
|
|
"""
|
2023-05-01 00:01:41 +01:00
|
|
|
station = None
|
|
|
|
if self.other_station:
|
|
|
|
try:
|
|
|
|
station = SurvexStation.objects.get(name = self.other_station)
|
|
|
|
except:
|
|
|
|
pass
|
|
|
|
if self.tag_station:
|
|
|
|
try:
|
|
|
|
station = SurvexStation.objects.get(name = self.tag_station)
|
|
|
|
except:
|
|
|
|
pass
|
|
|
|
if station:
|
|
|
|
return station.latlong()
|
2023-01-30 19:04:36 +00:00
|
|
|
|
2023-09-16 20:46:17 +01:00
|
|
|
def lat(self):
|
|
|
|
if self.latlong():
|
|
|
|
return self.latlong()[0]
|
|
|
|
else:
|
|
|
|
return None
|
|
|
|
|
|
|
|
def long(self):
|
|
|
|
if self.latlong():
|
|
|
|
return self.latlong()[1]
|
|
|
|
else:
|
|
|
|
return None
|
2023-10-27 20:13:14 +01:00
|
|
|
|
|
|
|
def best_alt(self):
|
|
|
|
return self.best_station_object().z
|
|
|
|
def best_srtm_alt(self):
|
|
|
|
return self.best_station_object().srtm_alt
|
2020-05-28 04:54:53 +01:00
|
|
|
|
2020-06-29 21:15:42 +01:00
|
|
|
def GetCaveLookup():
|
2022-07-24 19:38:14 +01:00
|
|
|
"""A very relaxed way of finding probably the right cave given almost any string which might serve to identify it
|
2023-01-30 19:04:36 +00:00
|
|
|
|
2022-07-24 19:38:14 +01:00
|
|
|
lookup function modelled on GetPersonExpeditionNameLookup
|
2020-07-26 02:26:04 +01:00
|
|
|
repeated assignment each call, needs refactoring
|
2023-01-30 19:04:36 +00:00
|
|
|
|
2022-07-17 13:01:53 +01:00
|
|
|
Used when parsing wallets contents.json file too in views/uploads.py
|
2023-01-30 19:04:36 +00:00
|
|
|
|
2023-09-06 11:20:29 +01:00
|
|
|
Needs to be a proper function that raises an exception if there is a duplicate.
|
2023-01-30 19:04:36 +00:00
|
|
|
OR we could set it to return None if there are duplicates, and require the caller to
|
2023-09-06 11:20:29 +01:00
|
|
|
fall back on doing the actual database query it wants rather than using this cache shortcut
|
2020-07-26 02:26:04 +01:00
|
|
|
"""
|
2024-07-14 20:39:56 +01:00
|
|
|
def bad_alias(a,k):
|
|
|
|
# this is an error
|
|
|
|
if a.lower() in Gcavelookup:
|
|
|
|
Gcavelookup[key] = Gcavelookup[a.lower()]
|
|
|
|
message = f" - Warning, capitalisation error in alias list. cave for id '{a}' does not exist but {a.lower()} does."
|
|
|
|
print(message)
|
|
|
|
DataIssue.objects.update_or_create(parser="aliases", message=message)
|
|
|
|
else:
|
|
|
|
message = f" * Coding or cave existence mistake, cave for id '{a}' does not exist. Expecting to set key alias '{k}' to it"
|
|
|
|
DataIssue.objects.update_or_create(parser="aliases", message=message)
|
|
|
|
|
2023-07-26 22:38:47 +01:00
|
|
|
|
|
|
|
duplicates = {}
|
2023-01-30 19:04:36 +00:00
|
|
|
|
2022-07-25 00:58:13 +01:00
|
|
|
def checkcaveid(cave, id):
|
|
|
|
global Gcavelookup
|
|
|
|
if id not in Gcavelookup:
|
|
|
|
Gcavelookup[id] = cave
|
|
|
|
Gcave_count[id] += 1
|
|
|
|
else:
|
|
|
|
if cave == Gcavelookup[id]:
|
2023-01-30 19:04:36 +00:00
|
|
|
pass # same id, same cave
|
2023-09-06 11:20:29 +01:00
|
|
|
else: # same id but different cave, e.g. 122 => 1623-122 and 1626-122
|
2024-07-14 20:39:56 +01:00
|
|
|
# We want to keep the 1623- and get rid of the other one
|
|
|
|
if cave.areacode == "1623":
|
|
|
|
Gcavelookup[id] = cave
|
2023-07-26 22:38:47 +01:00
|
|
|
duplicates[id] = 1
|
|
|
|
|
2020-06-29 21:15:42 +01:00
|
|
|
global Gcavelookup
|
|
|
|
if Gcavelookup:
|
|
|
|
return Gcavelookup
|
2021-03-28 23:47:47 +01:00
|
|
|
Gcavelookup = {"NONEPLACEHOLDER": None}
|
|
|
|
global Gcave_count
|
2023-01-30 19:04:36 +00:00
|
|
|
Gcave_count = defaultdict(int) # sets default value to int(0)
|
|
|
|
|
2024-07-18 15:14:11 +01:00
|
|
|
for cave in Cave.objects.all(): # Note that this collects recently created Caves too
|
2021-03-28 23:47:47 +01:00
|
|
|
key = cave.official_name.lower()
|
2022-07-25 00:58:13 +01:00
|
|
|
if key != "" and key != "unamed" and key != "unnamed":
|
2023-07-26 22:38:47 +01:00
|
|
|
if Gcave_count[key] > 0:
|
|
|
|
# message = f" - Warning: ignoring alias id '{id:3}'. Caves '{Gcavelookup[id]}' and '{cave}'. "
|
|
|
|
# print(message)
|
|
|
|
# DataIssue.objects.create(parser="aliases", message=message)
|
|
|
|
duplicates[key] = 1
|
|
|
|
else:
|
|
|
|
Gcavelookup[key] = cave
|
|
|
|
Gcave_count[key] += 1
|
2020-06-29 21:15:42 +01:00
|
|
|
if cave.kataster_number:
|
2023-09-10 00:06:38 +01:00
|
|
|
# NOTE this will set an alias for "145" not "1623-145"
|
|
|
|
checkcaveid(cave, cave.kataster_number) # we do expect 1623/55 and 1626/55 to cause clash, removed below
|
2023-01-30 19:04:36 +00:00
|
|
|
|
2022-07-25 00:58:13 +01:00
|
|
|
# the rest of these are 'nice to have' but may validly already be set
|
2020-06-29 21:15:42 +01:00
|
|
|
if cave.unofficial_number:
|
2022-07-25 00:58:13 +01:00
|
|
|
unoffn = cave.unofficial_number.lower()
|
2023-01-30 19:04:36 +00:00
|
|
|
checkcaveid(cave, unoffn)
|
|
|
|
|
2020-06-29 21:15:42 +01:00
|
|
|
if cave.filename:
|
2024-07-02 00:18:09 +01:00
|
|
|
# this is the slug - or should be
|
2023-01-30 19:04:36 +00:00
|
|
|
fn = cave.filename.replace(".html", "").lower()
|
|
|
|
checkcaveid(cave, fn)
|
|
|
|
|
2020-06-29 21:15:42 +01:00
|
|
|
if cave.slug():
|
2024-07-02 18:01:15 +01:00
|
|
|
# also possibly done already. checking for weird slug values..
|
|
|
|
try:
|
|
|
|
slug = cave.slug().lower()
|
|
|
|
checkcaveid(cave, slug)
|
|
|
|
except:
|
|
|
|
print(cave, cave.slug())
|
2022-07-25 00:58:13 +01:00
|
|
|
|
|
|
|
# These might alse create more duplicate entries
|
2024-07-03 17:27:37 +01:00
|
|
|
aliases = []
|
|
|
|
# read the two files in /cave_data/
|
|
|
|
for ca in ["cavealiasesold.txt", "cavealiases.txt"]:
|
|
|
|
pairs, report = parse_aliases(ca)
|
|
|
|
aliases += pairs
|
|
|
|
|
|
|
|
# print(f"Loaded aliases, {len(aliases)} found\n{report}\n {aliases}")
|
|
|
|
|
2023-09-10 00:06:38 +01:00
|
|
|
# On reset, these aliases only work if the cave already properly exists with an entry in :expoweb:/cave_data/
|
|
|
|
# but as the aliases are recomputed repeatedly, eventually they work on PENDING caves too
|
2023-01-30 19:04:36 +00:00
|
|
|
|
2023-09-10 00:06:38 +01:00
|
|
|
for key, alias in aliases:
|
2024-07-14 20:39:56 +01:00
|
|
|
if not alias in Gcavelookup:
|
|
|
|
bad_alias(alias, key)
|
|
|
|
else:
|
2023-09-10 00:06:38 +01:00
|
|
|
if key in Gcavelookup:
|
2022-07-25 00:58:13 +01:00
|
|
|
# already set by a different method, but is it the same cave?
|
2023-09-10 00:06:38 +01:00
|
|
|
if Gcavelookup[key] == Gcavelookup[alias]:
|
2022-07-25 00:58:13 +01:00
|
|
|
pass
|
|
|
|
else:
|
2023-09-10 00:06:38 +01:00
|
|
|
# aliases wrong - these are different caves
|
|
|
|
message = f" - Alias list is mis-identifying different caves {key}:{Gcavelookup[key]} != {alias}:{Gcavelookup[alias]} "
|
|
|
|
print(message)
|
|
|
|
DataIssue.objects.create(parser="alias", message=message)
|
|
|
|
# Gcave_count[key] += 1
|
|
|
|
Gcavelookup[key] = Gcavelookup[alias]
|
2024-07-14 20:39:56 +01:00
|
|
|
|
2023-01-30 19:04:36 +00:00
|
|
|
|
2020-06-29 21:15:42 +01:00
|
|
|
addmore = {}
|
|
|
|
for id in Gcavelookup:
|
2023-01-30 19:04:36 +00:00
|
|
|
addmore[id.replace("-", "_")] = Gcavelookup[id]
|
2023-09-10 00:06:38 +01:00
|
|
|
addmore[id.replace("-", "_")] = Gcavelookup[id]
|
|
|
|
|
|
|
|
addmore[id.replace("-", "_").upper()] = Gcavelookup[id]
|
|
|
|
addmore[id.replace("-", "_").lower()] = Gcavelookup[id]
|
|
|
|
addmore[id.replace("_", "-").upper()] = Gcavelookup[id]
|
|
|
|
addmore[id.replace("_", "-").lower()] = Gcavelookup[id]
|
2020-06-29 21:15:42 +01:00
|
|
|
Gcavelookup = {**addmore, **Gcavelookup}
|
|
|
|
|
2023-01-30 19:04:36 +00:00
|
|
|
addmore = {}
|
|
|
|
|
2023-07-26 22:38:47 +01:00
|
|
|
ldup = []
|
|
|
|
for d in duplicates:
|
2023-09-27 19:44:04 +01:00
|
|
|
# if an alias resolves to 2 or more caves, remove it as an alias
|
|
|
|
# NOTE such an alisas is restored, assuming a 1623 area, when parsing Wallets - but only wallets.
|
2024-07-14 20:39:56 +01:00
|
|
|
#print(f"{Gcavelookup[d]=} {Gcave_count[d]=}")
|
|
|
|
if Gcavelookup[d].areacode == "1623":
|
|
|
|
# then leave it, treat as OK
|
|
|
|
pass
|
|
|
|
else:
|
|
|
|
Gcavelookup.pop(d)
|
|
|
|
Gcave_count.pop(d) # so should not get a duplicate msg below..
|
|
|
|
ldup.append(d)
|
2023-07-26 22:38:47 +01:00
|
|
|
if ldup:
|
2024-02-05 21:18:40 +00:00
|
|
|
message = f" - Ambiguous aliases being removed: {ldup}"
|
2023-07-26 22:38:47 +01:00
|
|
|
print(message)
|
2024-02-05 21:18:40 +00:00
|
|
|
update_dataissue("aliases ok", message)
|
|
|
|
|
2021-03-28 23:47:47 +01:00
|
|
|
for c in Gcave_count:
|
|
|
|
if Gcave_count[c] > 1:
|
2022-07-25 00:58:13 +01:00
|
|
|
message = f" ** Duplicate cave id count={Gcave_count[c]} id:'{Gcavelookup[c]}' cave __str__:'{c}'"
|
|
|
|
print(message)
|
2024-02-05 22:16:51 +00:00
|
|
|
update_dataissue("aliases", message)
|
2023-07-26 22:38:47 +01:00
|
|
|
|
2020-06-29 21:15:42 +01:00
|
|
|
return Gcavelookup
|
2024-02-05 21:18:40 +00:00
|
|
|
|
2024-02-05 21:39:21 +00:00
|
|
|
# @transaction.atomic
|
2024-02-05 21:18:40 +00:00
|
|
|
def update_dataissue(parsercode, message):
|
|
|
|
try:
|
|
|
|
DataIssue.objects.update_or_create(parser=parsercode, message=message)
|
2024-02-05 22:16:51 +00:00
|
|
|
except DataError as e:
|
|
|
|
# bollocks, swallow this.DANGEROUS. Assuming this is the
|
|
|
|
# (1406, "Data too long for column 'message' at row1")
|
|
|
|
# fault in the mariaDb/Django setup.
|
2024-02-06 16:59:25 +00:00
|
|
|
exept_msg = f"Is this the (1406, Data too long for column 'message' at row1) problem?\nexception:{e}"
|
2024-02-05 22:16:51 +00:00
|
|
|
raise
|
2024-02-05 21:18:40 +00:00
|
|
|
except:
|
2024-02-06 16:59:25 +00:00
|
|
|
# never mind, make a duplicate
|
|
|
|
DataIssue.objects.create(parser=parsercode, message=message)
|