2023-01-19 18:35:56 +00:00
|
|
|
import os
|
2023-03-17 20:01:52 +00:00
|
|
|
import os
|
2023-01-19 18:35:56 +00:00
|
|
|
import re
|
2021-03-28 23:47:47 +01:00
|
|
|
from collections import defaultdict
|
2023-01-19 18:35:56 +00:00
|
|
|
from datetime import datetime, timezone
|
2021-04-26 02:10:45 +01:00
|
|
|
from pathlib import Path
|
2020-05-28 04:54:53 +01:00
|
|
|
|
2023-01-19 18:35:56 +00:00
|
|
|
from django.db import models
|
2023-01-30 23:04:11 +00:00
|
|
|
from django.template import loader
|
2020-05-28 04:54:53 +01:00
|
|
|
|
2023-01-19 18:35:56 +00:00
|
|
|
import settings
|
2023-01-29 18:17:43 +00:00
|
|
|
from troggle.core.models.logbooks import QM
|
2023-09-16 20:46:17 +01:00
|
|
|
from troggle.core.models.survex import SurvexStation, utmToLatLng
|
2023-01-30 23:04:11 +00:00
|
|
|
from troggle.core.models.troggle import DataIssue, TroggleModel
|
2023-01-19 18:35:56 +00:00
|
|
|
from troggle.core.utils import TROG, writetrogglefile
|
2022-07-27 23:48:22 +01:00
|
|
|
|
2023-10-07 00:26:52 +01:00
|
|
|
# Use the TROG global object to cache the cave lookup list. No good for multi-user.., or even multi-page. Pointless in fact.
|
2023-01-30 19:04:36 +00:00
|
|
|
Gcavelookup = TROG["caves"]["gcavelookup"]
|
|
|
|
Gcave_count = TROG["caves"]["gcavecount"]
|
2022-07-27 23:48:22 +01:00
|
|
|
|
|
|
|
Gcavelookup = None
|
|
|
|
Gcave_count = None
|
2020-05-28 04:54:53 +01:00
|
|
|
|
2023-01-30 19:04:36 +00:00
|
|
|
"""The model declarations for Areas, Caves and Entrances
|
|
|
|
"""
|
2021-04-26 02:10:45 +01:00
|
|
|
|
2023-01-30 19:04:36 +00:00
|
|
|
todo = """
|
2023-03-28 20:30:00 +01:00
|
|
|
- Find out why we have separate objects CaveSlug and why
|
2023-09-27 19:44:04 +01:00
|
|
|
these are not just a single field on the Model. This was Martin's idea,
|
|
|
|
but we are using text aliases now so we only need one slug in the data model
|
2021-04-26 02:10:45 +01:00
|
|
|
|
2023-03-28 20:30:00 +01:00
|
|
|
- Can we rewrite things to eliminate the CaveSlug and objects? Surely
|
2023-02-08 23:37:00 +00:00
|
|
|
foreign keys work fine ?!
|
|
|
|
|
2023-11-03 14:54:29 +00:00
|
|
|
- Why do we have CaveAndEntrance objects ? These do not need to be explcit for a many:many relationship these days
|
2023-02-26 22:13:37 +00:00
|
|
|
|
|
|
|
- move the aliases list from the code and put into an editable file
|
2023-02-08 23:37:00 +00:00
|
|
|
|
2021-04-26 02:10:45 +01:00
|
|
|
- Restore constraint: unique_together = (("area", "kataster_number"), ("area", "unofficial_number"))
|
2023-09-10 13:42:36 +01:00
|
|
|
or replace by a unique 'slug' field, better.
|
2023-01-30 19:04:36 +00:00
|
|
|
"""
|
|
|
|
|
2021-04-26 02:10:45 +01:00
|
|
|
|
2020-05-28 04:54:53 +01:00
|
|
|
class CaveAndEntrance(models.Model):
|
2023-03-28 19:26:37 +01:00
|
|
|
"""This class is ONLY used to create a FormSet for editing the cave and all its
|
|
|
|
entrances in one form.
|
|
|
|
CASCADE means that if the cave or the entrance is deleted, then this CaveAndEntrance
|
2023-03-06 16:37:38 +00:00
|
|
|
is deleted too
|
2023-08-03 14:11:46 +01:00
|
|
|
NOT NEEDED anymore if we insist that cave:entrances have 1:n multiplicity.
|
2023-03-06 16:37:38 +00:00
|
|
|
"""
|
2023-01-30 19:04:36 +00:00
|
|
|
cave = models.ForeignKey("Cave", on_delete=models.CASCADE)
|
|
|
|
entrance = models.ForeignKey("Entrance", on_delete=models.CASCADE)
|
2023-08-03 14:11:46 +01:00
|
|
|
entranceletter = models.CharField(max_length=20, blank=True, null=True)
|
2023-01-30 19:04:36 +00:00
|
|
|
|
2022-08-01 15:04:22 +01:00
|
|
|
class Meta:
|
2023-08-03 14:11:46 +01:00
|
|
|
unique_together = [["cave", "entrance"], ["cave", "entranceletter"]]
|
|
|
|
ordering = ["entranceletter"]
|
2023-01-30 19:04:36 +00:00
|
|
|
|
2020-05-28 04:54:53 +01:00
|
|
|
def __str__(self):
|
2023-08-03 14:11:46 +01:00
|
|
|
return str(self.cave) + str(self.entranceletter)
|
2020-07-26 02:26:04 +01:00
|
|
|
|
2023-03-28 15:37:25 +01:00
|
|
|
# class CaveSlug(models.Model):
|
2023-08-03 14:11:46 +01:00
|
|
|
# moved to models/logbooks.py to avoid cyclic import problem. No I don't know why either.
|
2023-01-29 21:45:51 +00:00
|
|
|
|
2023-12-23 18:37:20 +00:00
|
|
|
def get_cave_leniently(caveid):
|
|
|
|
try:
|
|
|
|
c = getCave(caveid)
|
|
|
|
if c:
|
|
|
|
return c
|
|
|
|
except:
|
|
|
|
# print(f"get_cave_leniently FAIL {caveid}")
|
|
|
|
try:
|
|
|
|
c = getCave("1623-"+caveid)
|
|
|
|
if c:
|
|
|
|
return c
|
|
|
|
except:
|
|
|
|
return None
|
|
|
|
|
2020-05-28 04:54:53 +01:00
|
|
|
class Cave(TroggleModel):
|
2023-10-07 00:26:52 +01:00
|
|
|
# (far) too much here perhaps,
|
2023-09-10 13:42:36 +01:00
|
|
|
areacode = models.CharField(max_length=4, blank=True, null=True) # could use models.IntegerChoices
|
2023-09-11 18:38:14 +01:00
|
|
|
subarea = models.CharField(max_length=25, blank=True, null=True) # 9, 8c etc.
|
2023-03-28 15:37:25 +01:00
|
|
|
depth = models.CharField(max_length=100, blank=True, null=True)
|
|
|
|
description_file = models.CharField(max_length=200, blank=True, null=True)
|
2023-01-30 19:04:36 +00:00
|
|
|
entrances = models.ManyToManyField("Entrance", through="CaveAndEntrance")
|
|
|
|
equipment = models.TextField(blank=True, null=True)
|
2023-03-28 15:37:25 +01:00
|
|
|
explorers = models.TextField(blank=True, null=True)
|
|
|
|
extent = models.CharField(max_length=100, blank=True, null=True)
|
2023-10-07 00:26:52 +01:00
|
|
|
filename = models.CharField(max_length=200) # if a cave is 'pending' this is not set. Otherwise it is.
|
2023-03-28 15:37:25 +01:00
|
|
|
kataster_code = models.CharField(max_length=20, blank=True, null=True)
|
|
|
|
kataster_number = models.CharField(max_length=10, blank=True, null=True)
|
2023-01-30 19:04:36 +00:00
|
|
|
kataster_status = models.TextField(blank=True, null=True)
|
|
|
|
length = models.CharField(max_length=100, blank=True, null=True)
|
2023-03-28 15:37:25 +01:00
|
|
|
notes = models.TextField(blank=True, null=True)
|
|
|
|
official_name = models.CharField(max_length=160)
|
|
|
|
references = models.TextField(blank=True, null=True)
|
2023-09-10 13:42:36 +01:00
|
|
|
survex_file = models.CharField(max_length=100, blank=True, null=True) # should be a foreign key?
|
2023-03-28 15:37:25 +01:00
|
|
|
survey = models.TextField(blank=True, null=True)
|
2023-11-18 12:38:07 +00:00
|
|
|
# underground_centre_line = models.TextField(blank=True, null=True)
|
2023-03-28 15:37:25 +01:00
|
|
|
underground_description = models.TextField(blank=True, null=True)
|
|
|
|
unofficial_number = models.CharField(max_length=60, blank=True, null=True)
|
2023-07-24 23:33:46 +01:00
|
|
|
url = models.CharField(max_length=300, blank=True, null=True, unique = True)
|
2020-05-28 04:54:53 +01:00
|
|
|
|
|
|
|
class Meta:
|
2023-10-21 14:22:20 +01:00
|
|
|
# we do not enforce uniqueness at the db level as that causes confusing errors for newbie maintainers
|
2023-10-07 00:26:52 +01:00
|
|
|
# unique_together = (("area", "kataster_number"), ("area", "unofficial_number"))
|
2023-01-30 19:04:36 +00:00
|
|
|
ordering = ("kataster_code", "unofficial_number")
|
2020-05-28 04:54:53 +01:00
|
|
|
|
|
|
|
def slug(self):
|
2023-01-30 19:04:36 +00:00
|
|
|
primarySlugs = self.caveslug_set.filter(primary=True)
|
2020-05-28 04:54:53 +01:00
|
|
|
if primarySlugs:
|
|
|
|
return primarySlugs[0].slug
|
|
|
|
else:
|
|
|
|
slugs = self.caveslug_set.filter()
|
|
|
|
if slugs:
|
|
|
|
return slugs[0].slug
|
|
|
|
|
|
|
|
def ours(self):
|
2023-01-30 19:04:36 +00:00
|
|
|
return bool(re.search(r"CUCC", self.explorers))
|
2023-07-05 17:43:57 +01:00
|
|
|
|
|
|
|
def number(self):
|
2020-05-28 04:54:53 +01:00
|
|
|
if self.kataster_number:
|
2023-07-05 17:43:57 +01:00
|
|
|
return self.kataster_number
|
2020-05-28 04:54:53 +01:00
|
|
|
else:
|
2023-07-05 17:43:57 +01:00
|
|
|
return self.unofficial_number
|
|
|
|
|
2020-05-28 04:54:53 +01:00
|
|
|
def get_absolute_url(self):
|
2023-10-01 10:21:34 +01:00
|
|
|
# we do not use URL_ROOT any more.
|
2023-11-03 14:54:29 +00:00
|
|
|
# if self.kataster_number:
|
|
|
|
# pass
|
|
|
|
# elif self.unofficial_number:
|
|
|
|
# pass
|
|
|
|
# else:
|
|
|
|
# self.official_name.lower()
|
2023-10-01 10:21:34 +01:00
|
|
|
return self.url # not good Django style? NEEDS actual URL
|
2023-01-30 19:04:36 +00:00
|
|
|
|
2022-07-31 16:08:28 +01:00
|
|
|
def url_parent(self):
|
2023-09-11 18:38:14 +01:00
|
|
|
if self.url:
|
|
|
|
return self.url.rsplit("/", 1)[0]
|
|
|
|
else:
|
|
|
|
return "NO cave.url"
|
2020-05-28 04:54:53 +01:00
|
|
|
|
2023-01-30 19:04:36 +00:00
|
|
|
def __str__(self, sep=": "):
|
2020-06-28 15:57:40 +01:00
|
|
|
return str(self.slug())
|
2020-05-28 04:54:53 +01:00
|
|
|
|
2023-03-17 20:01:52 +00:00
|
|
|
def get_open_QMs(self):
|
2023-01-30 19:04:36 +00:00
|
|
|
"""Searches for all QMs that reference this cave."""
|
|
|
|
# qms = self.qm_set.all().order_by('expoyear', 'block__date')
|
|
|
|
qms = QM.objects.filter(cave=self).order_by(
|
|
|
|
"expoyear", "block__date"
|
2023-02-10 00:05:04 +00:00
|
|
|
) # a QuerySet, see https://docs.djangoproject.com/en/dev/ref/models/querysets/#order-by
|
2023-03-17 20:01:52 +00:00
|
|
|
qmsopen = qms.filter(ticked=False)
|
|
|
|
return qmsopen # a QuerySet
|
2020-05-28 04:54:53 +01:00
|
|
|
|
2023-03-17 20:01:52 +00:00
|
|
|
def get_ticked_QMs(self):
|
|
|
|
"""Searches for all QMs that reference this cave."""
|
|
|
|
qms = QM.objects.filter(cave=self).order_by(
|
|
|
|
"expoyear", "block__date"
|
|
|
|
)
|
|
|
|
qmticked = qms.filter(ticked=True)
|
|
|
|
return qmticked # a QuerySet
|
|
|
|
|
|
|
|
def get_QMs(self):
|
|
|
|
qms = self.get_open_QMs() | self.get_ticked_QMs() # set union operation
|
|
|
|
return qms # a QuerySet
|
|
|
|
|
2020-05-28 04:54:53 +01:00
|
|
|
def entrances(self):
|
|
|
|
return CaveAndEntrance.objects.filter(cave=self)
|
2023-10-07 00:26:52 +01:00
|
|
|
|
|
|
|
def no_location(self):
|
|
|
|
no_data = True
|
|
|
|
for e in CaveAndEntrance.objects.filter(cave=self):
|
2023-10-07 17:41:25 +01:00
|
|
|
if e.entrance.best_station() and e.entrance.best_station() != "":
|
|
|
|
#print(self, e, e.entrance.best_station())
|
2023-10-10 23:03:28 +01:00
|
|
|
try:
|
|
|
|
x = e.entrance.best_station_object().x
|
2023-10-07 17:41:25 +01:00
|
|
|
no_data = False
|
2023-10-10 23:03:28 +01:00
|
|
|
except:
|
|
|
|
pass
|
2023-10-07 00:26:52 +01:00
|
|
|
return no_data
|
2020-05-28 04:54:53 +01:00
|
|
|
|
|
|
|
def singleentrance(self):
|
|
|
|
return len(CaveAndEntrance.objects.filter(cave=self)) == 1
|
2023-01-30 19:04:36 +00:00
|
|
|
|
2020-05-28 04:54:53 +01:00
|
|
|
def entrancelist(self):
|
|
|
|
rs = []
|
|
|
|
res = ""
|
|
|
|
for e in CaveAndEntrance.objects.filter(cave=self):
|
2023-08-03 14:11:46 +01:00
|
|
|
if e.entranceletter:
|
|
|
|
rs.append(e.entranceletter)
|
2020-05-28 04:54:53 +01:00
|
|
|
rs.sort()
|
2023-01-30 19:04:36 +00:00
|
|
|
prevR = ""
|
2020-05-28 04:54:53 +01:00
|
|
|
n = 0
|
|
|
|
for r in rs:
|
|
|
|
if prevR:
|
2023-01-30 19:04:36 +00:00
|
|
|
if chr(ord(prevR) + 1) == r:
|
2020-05-28 04:54:53 +01:00
|
|
|
prevR = r
|
|
|
|
n += 1
|
|
|
|
else:
|
|
|
|
if n == 0:
|
|
|
|
res += ", " + prevR
|
|
|
|
else:
|
|
|
|
res += "–" + prevR
|
|
|
|
else:
|
|
|
|
prevR = r
|
|
|
|
n = 0
|
|
|
|
res += r
|
|
|
|
if n == 0:
|
2022-07-15 14:05:48 +01:00
|
|
|
if res:
|
|
|
|
res += ", " + prevR
|
2020-05-28 04:54:53 +01:00
|
|
|
else:
|
|
|
|
res += "–" + prevR
|
|
|
|
return res
|
2023-01-30 19:04:36 +00:00
|
|
|
|
2022-08-01 15:04:22 +01:00
|
|
|
def file_output(self):
|
2023-08-03 14:11:46 +01:00
|
|
|
"""This produces the content which wll be re-saved as the cave_data html file.
|
|
|
|
"""
|
2023-11-07 00:12:57 +00:00
|
|
|
if not self.filename:
|
|
|
|
self.filename = self.slug() + ".html"
|
|
|
|
self.save()
|
|
|
|
|
2023-08-03 14:11:46 +01:00
|
|
|
filepath = Path(settings.CAVEDESCRIPTIONS, self.filename)
|
2021-12-30 19:07:17 +00:00
|
|
|
|
2023-01-30 19:04:36 +00:00
|
|
|
t = loader.get_template("dataformat/cave.xml")
|
2022-10-12 20:09:58 +01:00
|
|
|
now = datetime.now(timezone.utc)
|
2023-01-30 19:04:36 +00:00
|
|
|
c = dict({"cave": self, "date": now})
|
2022-08-01 15:04:22 +01:00
|
|
|
content = t.render(c)
|
|
|
|
return (filepath, content, "utf8")
|
2023-11-07 00:12:57 +00:00
|
|
|
|
|
|
|
def writeDataFile(self):
|
|
|
|
filepath, content, coding = self.file_output()
|
|
|
|
writetrogglefile(filepath, content)
|
|
|
|
return
|
2023-01-30 19:04:36 +00:00
|
|
|
|
2020-05-28 04:54:53 +01:00
|
|
|
class Entrance(TroggleModel):
|
|
|
|
MARKING_CHOICES = (
|
2023-01-30 19:04:36 +00:00
|
|
|
("P", "Paint"),
|
|
|
|
("P?", "Paint (?)"),
|
|
|
|
("T", "Tag"),
|
|
|
|
("T?", "Tag (?)"),
|
|
|
|
("R", "Needs Retag"),
|
|
|
|
("S", "Spit"),
|
|
|
|
("S?", "Spit (?)"),
|
|
|
|
("U", "Unmarked"),
|
|
|
|
("?", "Unknown"),
|
2023-03-28 15:37:25 +01:00
|
|
|
)
|
2023-01-30 19:04:36 +00:00
|
|
|
FINDABLE_CHOICES = (("?", "To be confirmed ..."), ("S", "Coordinates"), ("L", "Lost"), ("R", "Refindable"))
|
2020-05-28 04:54:53 +01:00
|
|
|
alt = models.TextField(blank=True, null=True)
|
2023-03-28 15:37:25 +01:00
|
|
|
approach = models.TextField(blank=True, null=True)
|
|
|
|
bearings = models.TextField(blank=True, null=True)
|
|
|
|
entrance_description = models.TextField(blank=True, null=True)
|
|
|
|
explorers = models.TextField(blank=True, null=True)
|
|
|
|
filename = models.CharField(max_length=200)
|
2023-11-08 00:12:37 +00:00
|
|
|
findability = models.CharField(max_length=1, choices=FINDABLE_CHOICES, blank=True, null=True, default="?")
|
2023-03-28 15:37:25 +01:00
|
|
|
findability_description = models.TextField(blank=True, null=True)
|
|
|
|
lastvisit = models.TextField(blank=True, null=True)
|
2023-09-16 20:46:17 +01:00
|
|
|
lat_wgs84 = models.TextField(blank=True, null=True) # manually entered not calculated
|
2023-03-28 15:37:25 +01:00
|
|
|
location_description = models.TextField(blank=True, null=True)
|
2023-09-16 20:46:17 +01:00
|
|
|
long_wgs84 = models.TextField(blank=True, null=True) # manually entered not calculated
|
2023-11-18 14:17:50 +00:00
|
|
|
# map_description = models.TextField(blank=True, null=True)
|
2023-11-08 00:12:37 +00:00
|
|
|
marking = models.CharField(max_length=2, choices=MARKING_CHOICES, default="?")
|
2023-03-28 15:37:25 +01:00
|
|
|
marking_comment = models.TextField(blank=True, null=True)
|
|
|
|
name = models.CharField(max_length=100, blank=True, null=True)
|
2023-01-30 19:04:36 +00:00
|
|
|
other_description = models.TextField(blank=True, null=True)
|
2023-03-28 15:37:25 +01:00
|
|
|
photo = models.TextField(blank=True, null=True)
|
2023-03-28 19:08:05 +01:00
|
|
|
slug = models.SlugField(max_length=50, unique=True, default="default_slug_id")
|
2023-03-28 15:37:25 +01:00
|
|
|
underground_description = models.TextField(blank=True, null=True)
|
2023-01-30 19:04:36 +00:00
|
|
|
|
2023-09-26 22:38:07 +01:00
|
|
|
tag_station = models.TextField(blank=True, null=True)
|
|
|
|
other_station = models.TextField(blank=True, null=True)
|
|
|
|
|
2022-08-01 15:04:22 +01:00
|
|
|
class Meta:
|
2023-08-03 14:11:46 +01:00
|
|
|
ordering = ["caveandentrance__entranceletter"]
|
2020-05-28 04:54:53 +01:00
|
|
|
|
|
|
|
def __str__(self):
|
2023-03-28 19:08:05 +01:00
|
|
|
return str(self.slug)
|
2020-05-28 04:54:53 +01:00
|
|
|
|
2023-07-25 16:55:42 +01:00
|
|
|
def single(self, station):
|
2023-10-14 23:11:21 +01:00
|
|
|
if not station:
|
|
|
|
return None
|
2023-07-25 16:55:42 +01:00
|
|
|
try:
|
|
|
|
single = SurvexStation.objects.get(name = station)
|
|
|
|
return single
|
|
|
|
except:
|
|
|
|
stations = SurvexStation.objects.filter(name = station)
|
2023-10-14 23:11:21 +01:00
|
|
|
print(f" # EXCEPTION looking for '{station}' in all stations. (Entrance {self})")
|
2023-07-25 20:14:13 +01:00
|
|
|
if len(stations) > 1:
|
2023-10-14 23:11:21 +01:00
|
|
|
print(f" # MULTIPLE stations found with same name '{station}' in Entrance {self}:")
|
2023-07-25 20:14:13 +01:00
|
|
|
for s in stations:
|
|
|
|
print(f" # {s.id=} - {s.name} {s.latlong()}") # .id is Django internal field, not one of ours
|
|
|
|
return stations[0]
|
|
|
|
else:
|
|
|
|
return None
|
2023-07-25 16:55:42 +01:00
|
|
|
|
2023-08-03 14:11:46 +01:00
|
|
|
def singleletter(self):
|
|
|
|
"""Used in template/dataformat/cave.xml to write out a replacement cave_data file
|
|
|
|
why is this not working?
|
|
|
|
"""
|
|
|
|
cavelist = self.cavelist
|
|
|
|
try:
|
|
|
|
first = cavelist[0]
|
|
|
|
ce = CaveAndEntrance.objects.get(entrance=self, cave=first)
|
|
|
|
except:
|
|
|
|
# will fail if no caves in cavelist or if the cave isnt in the db
|
|
|
|
return "Z"
|
|
|
|
print(f"singleletter() access for first cave in {cavelist=}")
|
|
|
|
if ce.entranceletter == "":
|
|
|
|
print(f"### BLANK LETTER")
|
|
|
|
return "Y"
|
|
|
|
else:
|
|
|
|
letter = ce.entranceletter
|
|
|
|
print(f"### LETTER {letter}")
|
|
|
|
return letter
|
2020-07-26 02:26:04 +01:00
|
|
|
|
2020-05-28 04:54:53 +01:00
|
|
|
def other_location(self):
|
2023-07-25 16:55:42 +01:00
|
|
|
return self.single(self.other_station)
|
|
|
|
|
2020-05-28 04:54:53 +01:00
|
|
|
def find_location(self):
|
2023-01-30 19:04:36 +00:00
|
|
|
r = {"": "To be entered ", "?": "To be confirmed:", "S": "", "L": "Lost:", "R": "Refindable:"}[self.findability]
|
2020-05-28 04:54:53 +01:00
|
|
|
if self.tag_station:
|
|
|
|
try:
|
|
|
|
s = SurvexStation.objects.lookup(self.tag_station)
|
2022-11-23 10:48:39 +00:00
|
|
|
return r + f"{s.x:0.0f}E {s.y:0.0f}N {s.z:0.0f}Alt"
|
2020-05-28 04:54:53 +01:00
|
|
|
except:
|
2022-11-23 10:48:39 +00:00
|
|
|
return r + f"{self.tag_station} Tag Station not in dataset"
|
2020-05-28 04:54:53 +01:00
|
|
|
if self.other_station:
|
|
|
|
try:
|
|
|
|
s = SurvexStation.objects.lookup(self.other_station)
|
2022-11-23 10:48:39 +00:00
|
|
|
return r + f"{s.x:0.0f}E {s.y:0.0f}N {s.z:0.0f}Alt {self.other_description}"
|
2020-05-28 04:54:53 +01:00
|
|
|
except:
|
2022-11-23 10:48:39 +00:00
|
|
|
return r + f"{self.tag_station} Other Station not in dataset"
|
2020-05-28 04:54:53 +01:00
|
|
|
if self.FINDABLE_CHOICES == "S":
|
|
|
|
r += "ERROR, Entrance has been surveyed but has no survex point"
|
|
|
|
if self.bearings:
|
|
|
|
return r + self.bearings
|
|
|
|
return r
|
|
|
|
|
|
|
|
def best_station(self):
|
|
|
|
if self.tag_station:
|
|
|
|
return self.tag_station
|
|
|
|
if self.other_station:
|
|
|
|
return self.other_station
|
2023-10-07 00:26:52 +01:00
|
|
|
|
|
|
|
def best_station_object(self):
|
|
|
|
bs = self.best_station()
|
|
|
|
return SurvexStation.objects.get(name=bs)
|
2020-05-28 04:54:53 +01:00
|
|
|
|
|
|
|
def has_photo(self):
|
|
|
|
if self.photo:
|
2023-01-30 19:04:36 +00:00
|
|
|
if (
|
|
|
|
self.photo.find("<img") > -1
|
|
|
|
or self.photo.find("<a") > -1
|
|
|
|
or self.photo.find("<IMG") > -1
|
|
|
|
or self.photo.find("<A") > -1
|
|
|
|
):
|
2022-08-01 15:04:22 +01:00
|
|
|
return "Yes"
|
2020-05-28 04:54:53 +01:00
|
|
|
else:
|
|
|
|
return "Missing"
|
|
|
|
else:
|
|
|
|
return "No"
|
|
|
|
|
|
|
|
def marking_val(self):
|
|
|
|
for m in self.MARKING_CHOICES:
|
|
|
|
if m[0] == self.marking:
|
|
|
|
return m[1]
|
2020-07-26 02:26:04 +01:00
|
|
|
|
2020-05-28 04:54:53 +01:00
|
|
|
def findability_val(self):
|
|
|
|
for f in self.FINDABLE_CHOICES:
|
|
|
|
if f[0] == self.findability:
|
|
|
|
return f[1]
|
2020-07-26 02:26:04 +01:00
|
|
|
|
2020-05-28 04:54:53 +01:00
|
|
|
def tag(self):
|
2023-07-25 16:55:42 +01:00
|
|
|
return self.single(self.tag_station)
|
2023-10-14 23:11:21 +01:00
|
|
|
def other(self):
|
|
|
|
return self.single(self.other_station)
|
2020-07-26 02:26:04 +01:00
|
|
|
|
2020-05-28 04:54:53 +01:00
|
|
|
def needs_surface_work(self):
|
|
|
|
return self.findability != "S" or not self.has_photo or self.marking != "T"
|
|
|
|
|
|
|
|
def get_absolute_url(self):
|
2023-01-30 19:04:36 +00:00
|
|
|
res = "/".join((self.get_root().cave.get_absolute_url(), self.title))
|
2020-05-28 04:54:53 +01:00
|
|
|
return res
|
|
|
|
|
2021-05-07 23:21:57 +01:00
|
|
|
def cavelist(self):
|
|
|
|
rs = []
|
|
|
|
for e in CaveAndEntrance.objects.filter(entrance=self):
|
|
|
|
if e.cave:
|
|
|
|
rs.append(e.cave)
|
|
|
|
return rs
|
2023-10-14 23:11:21 +01:00
|
|
|
|
|
|
|
def firstcave(self):
|
|
|
|
for e in CaveAndEntrance.objects.filter(entrance=self):
|
|
|
|
if e.cave:
|
|
|
|
return(e.cave)
|
|
|
|
|
2021-04-26 02:10:45 +01:00
|
|
|
def get_file_path(self):
|
2023-01-30 19:04:36 +00:00
|
|
|
return Path(settings.ENTRANCEDESCRIPTIONS, self.filename)
|
|
|
|
|
2022-08-01 15:04:22 +01:00
|
|
|
def file_output(self):
|
2023-11-07 00:12:57 +00:00
|
|
|
if not self.filename:
|
|
|
|
self.filename = self.slug + ".html"
|
|
|
|
self.save()
|
2023-11-08 00:12:37 +00:00
|
|
|
filepath = self.get_file_path()
|
2022-08-01 15:04:22 +01:00
|
|
|
|
2023-01-30 19:04:36 +00:00
|
|
|
t = loader.get_template("dataformat/entrance.xml")
|
2022-10-12 20:09:58 +01:00
|
|
|
now = datetime.now(timezone.utc)
|
2023-01-30 19:04:36 +00:00
|
|
|
c = dict({"entrance": self, "date": now})
|
2022-08-01 15:04:22 +01:00
|
|
|
content = t.render(c)
|
|
|
|
return (filepath, content, "utf8")
|
2021-04-26 02:10:45 +01:00
|
|
|
|
2020-05-28 04:54:53 +01:00
|
|
|
def writeDataFile(self):
|
2023-11-07 00:12:57 +00:00
|
|
|
filepath, content, coding = self.file_output()
|
|
|
|
writetrogglefile(filepath, content)
|
2021-12-30 19:07:17 +00:00
|
|
|
return
|
2023-01-30 19:04:36 +00:00
|
|
|
|
2022-07-31 20:29:17 +01:00
|
|
|
def url_parent(self):
|
|
|
|
if self.url:
|
|
|
|
return self.url.rsplit("/", 1)[0]
|
|
|
|
else:
|
|
|
|
cavelist = self.cavelist()
|
|
|
|
if len(self.cavelist()) == 1:
|
|
|
|
return cavelist[0].url_parent()
|
|
|
|
else:
|
|
|
|
return ""
|
2023-05-01 00:01:41 +01:00
|
|
|
|
|
|
|
def latlong(self):
|
2023-09-30 18:35:40 +01:00
|
|
|
"""Gets lat long assuming that it has to get it from the associated stations
|
2023-10-17 22:42:50 +01:00
|
|
|
"""
|
2023-05-01 00:01:41 +01:00
|
|
|
station = None
|
|
|
|
if self.other_station:
|
|
|
|
try:
|
|
|
|
station = SurvexStation.objects.get(name = self.other_station)
|
|
|
|
except:
|
|
|
|
pass
|
|
|
|
if self.tag_station:
|
|
|
|
try:
|
|
|
|
station = SurvexStation.objects.get(name = self.tag_station)
|
|
|
|
except:
|
|
|
|
pass
|
|
|
|
if station:
|
|
|
|
return station.latlong()
|
2023-01-30 19:04:36 +00:00
|
|
|
|
2023-09-16 20:46:17 +01:00
|
|
|
def lat(self):
|
|
|
|
if self.latlong():
|
|
|
|
return self.latlong()[0]
|
|
|
|
else:
|
|
|
|
return None
|
|
|
|
|
|
|
|
def long(self):
|
|
|
|
if self.latlong():
|
|
|
|
return self.latlong()[1]
|
|
|
|
else:
|
|
|
|
return None
|
2023-10-27 20:13:14 +01:00
|
|
|
|
|
|
|
def best_alt(self):
|
|
|
|
return self.best_station_object().z
|
|
|
|
def best_srtm_alt(self):
|
|
|
|
return self.best_station_object().srtm_alt
|
2020-05-28 04:54:53 +01:00
|
|
|
|
2020-06-29 21:15:42 +01:00
|
|
|
def GetCaveLookup():
|
2022-07-24 19:38:14 +01:00
|
|
|
"""A very relaxed way of finding probably the right cave given almost any string which might serve to identify it
|
2023-01-30 19:04:36 +00:00
|
|
|
|
2022-07-24 19:38:14 +01:00
|
|
|
lookup function modelled on GetPersonExpeditionNameLookup
|
2020-07-26 02:26:04 +01:00
|
|
|
repeated assignment each call, needs refactoring
|
2023-01-30 19:04:36 +00:00
|
|
|
|
2022-07-17 13:01:53 +01:00
|
|
|
Used when parsing wallets contents.json file too in views/uploads.py
|
2023-01-30 19:04:36 +00:00
|
|
|
|
2023-09-06 11:20:29 +01:00
|
|
|
Needs to be a proper function that raises an exception if there is a duplicate.
|
2023-01-30 19:04:36 +00:00
|
|
|
OR we could set it to return None if there are duplicates, and require the caller to
|
2023-09-06 11:20:29 +01:00
|
|
|
fall back on doing the actual database query it wants rather than using this cache shortcut
|
2020-07-26 02:26:04 +01:00
|
|
|
"""
|
2023-07-26 22:38:47 +01:00
|
|
|
|
|
|
|
duplicates = {}
|
2023-01-30 19:04:36 +00:00
|
|
|
|
2022-07-25 00:58:13 +01:00
|
|
|
def checkcaveid(cave, id):
|
|
|
|
global Gcavelookup
|
|
|
|
if id not in Gcavelookup:
|
|
|
|
Gcavelookup[id] = cave
|
|
|
|
Gcave_count[id] += 1
|
|
|
|
else:
|
|
|
|
if cave == Gcavelookup[id]:
|
2023-01-30 19:04:36 +00:00
|
|
|
pass # same id, same cave
|
2023-09-06 11:20:29 +01:00
|
|
|
else: # same id but different cave, e.g. 122 => 1623-122 and 1626-122
|
2023-07-26 22:38:47 +01:00
|
|
|
duplicates[id] = 1
|
|
|
|
|
2020-06-29 21:15:42 +01:00
|
|
|
global Gcavelookup
|
|
|
|
if Gcavelookup:
|
|
|
|
return Gcavelookup
|
2021-03-28 23:47:47 +01:00
|
|
|
Gcavelookup = {"NONEPLACEHOLDER": None}
|
|
|
|
global Gcave_count
|
2023-01-30 19:04:36 +00:00
|
|
|
Gcave_count = defaultdict(int) # sets default value to int(0)
|
|
|
|
|
2020-06-29 21:15:42 +01:00
|
|
|
for cave in Cave.objects.all():
|
2021-03-28 23:47:47 +01:00
|
|
|
key = cave.official_name.lower()
|
2022-07-25 00:58:13 +01:00
|
|
|
if key != "" and key != "unamed" and key != "unnamed":
|
2023-07-26 22:38:47 +01:00
|
|
|
if Gcave_count[key] > 0:
|
|
|
|
# message = f" - Warning: ignoring alias id '{id:3}'. Caves '{Gcavelookup[id]}' and '{cave}'. "
|
|
|
|
# print(message)
|
|
|
|
# DataIssue.objects.create(parser="aliases", message=message)
|
|
|
|
duplicates[key] = 1
|
|
|
|
else:
|
|
|
|
Gcavelookup[key] = cave
|
|
|
|
Gcave_count[key] += 1
|
2020-06-29 21:15:42 +01:00
|
|
|
if cave.kataster_number:
|
2023-09-10 00:06:38 +01:00
|
|
|
# NOTE this will set an alias for "145" not "1623-145"
|
|
|
|
checkcaveid(cave, cave.kataster_number) # we do expect 1623/55 and 1626/55 to cause clash, removed below
|
2023-01-30 19:04:36 +00:00
|
|
|
|
2022-07-25 00:58:13 +01:00
|
|
|
# the rest of these are 'nice to have' but may validly already be set
|
2020-06-29 21:15:42 +01:00
|
|
|
if cave.unofficial_number:
|
2022-07-25 00:58:13 +01:00
|
|
|
unoffn = cave.unofficial_number.lower()
|
2023-01-30 19:04:36 +00:00
|
|
|
checkcaveid(cave, unoffn)
|
|
|
|
|
2020-06-29 21:15:42 +01:00
|
|
|
if cave.filename:
|
2022-07-25 00:58:13 +01:00
|
|
|
# this is the slug - usually.. but usually done as as f'{cave.area}-{cave.kataster_number}'
|
2023-01-30 19:04:36 +00:00
|
|
|
fn = cave.filename.replace(".html", "").lower()
|
|
|
|
checkcaveid(cave, fn)
|
|
|
|
|
2020-06-29 21:15:42 +01:00
|
|
|
if cave.slug():
|
2022-07-25 00:58:13 +01:00
|
|
|
# also possibly done already
|
|
|
|
slug = cave.slug().lower()
|
2023-01-30 19:04:36 +00:00
|
|
|
checkcaveid(cave, slug)
|
2022-07-25 00:58:13 +01:00
|
|
|
|
|
|
|
# These might alse create more duplicate entries
|
2022-07-25 13:03:58 +01:00
|
|
|
# Yes, this should be set in, and imported from, settings.py
|
2023-09-10 00:06:38 +01:00
|
|
|
# On reset, these aliases only work if the cave already properly exists with an entry in :expoweb:/cave_data/
|
|
|
|
# but as the aliases are recomputed repeatedly, eventually they work on PENDING caves too
|
2023-01-30 19:04:36 +00:00
|
|
|
aliases = [
|
2023-09-10 00:06:38 +01:00
|
|
|
("1987-02", "1623-267"),
|
|
|
|
("1990-01", "1623-171"),
|
|
|
|
("1990-02", "1623-172"),
|
|
|
|
("1990-03", "1623-173"),
|
|
|
|
("1990-04", "1623-174"),
|
|
|
|
("1990-05", "1623-175"),
|
|
|
|
("1990-06", "1623-176"),
|
|
|
|
("1990-07", "1623-177"),
|
|
|
|
("1990-08", "1623-178"),
|
|
|
|
("1990-09", "1623-179"),
|
|
|
|
("1990-10", "1623-180"),
|
|
|
|
("1990-11", "1623-181"),
|
|
|
|
("1990-12", "1623-182"),
|
|
|
|
("1990-13", "1623-183"),
|
|
|
|
("1990-14", "1623-184"),
|
|
|
|
("1990-18", "1623-188"),
|
|
|
|
("1990-adam", "1623-225"),
|
|
|
|
("1993-01", "1623-200"),
|
|
|
|
("1996-02", "1623-224"),
|
|
|
|
("1996-03", "1623-223"),
|
|
|
|
("1996-04", "1623-222"),
|
|
|
|
("1996wk2", "1623-207"),
|
|
|
|
("1996wk3", "1623-208"),
|
|
|
|
("1996wk5", "1623-219"),
|
|
|
|
("1996wk6", "1623-218"),
|
|
|
|
("1996wk8", "1623-209"),
|
|
|
|
("1996wk11", "1623-268"),
|
|
|
|
("96wk11", "1623-268"),
|
|
|
|
("1998-01", "1623-201"),
|
|
|
|
("1998-03", "1623-210"),
|
|
|
|
("1999-03", "1623-204"),
|
|
|
|
("1999-04", "1623-230"),
|
|
|
|
("1999-10", "1623-162"),
|
|
|
|
("1999-bo-01", "1623-205"),
|
|
|
|
("1999-ob-03", "1623-226"),
|
|
|
|
("1999-ob-04", "1623-227"),
|
|
|
|
("99ob02", "1999-ob-02"), # exists? pending
|
|
|
|
("1623-99ob02", "1999-ob-02"),
|
|
|
|
("gassischacht", "1623-259"),
|
|
|
|
("1623-gassischacht", "1623-259"),
|
|
|
|
("2007-gassischacht", "1623-259"),
|
|
|
|
("2000-03", "1623-214"),
|
|
|
|
("2000-04", "1623-220"),
|
|
|
|
("2000-05", "1623-215"),
|
|
|
|
("2000-06", "1623-216"),
|
|
|
|
("2000-07", "1623-217"),
|
|
|
|
("2000-09", "1623-234"),
|
|
|
|
("2000-aa-01", "1623-250"),
|
|
|
|
("2001-04", "1623-239"),
|
|
|
|
("2001-05", "1623-243"),
|
|
|
|
("2002-01", "1623-249"),
|
|
|
|
("2002-02", "1623-234"),
|
|
|
|
("2002-04", "1623-242"),
|
|
|
|
("2002-05", "1623-294"),
|
|
|
|
("quarriesd", "1623-2002-08"),
|
|
|
|
("1623-quarriesd", "1623-2002-08"),
|
|
|
|
("2002-08", "1623-2002-08"),
|
|
|
|
("2003-01", "1623-256"),
|
|
|
|
("2003-02", "1623-248"),
|
|
|
|
("2003-03", "1623-247"),
|
|
|
|
("2003-04", "1623-241"),
|
|
|
|
("2003-05", "1623-246"),
|
|
|
|
("2003-06", "1623-161"),
|
|
|
|
("2003-08", "1623-240"),
|
|
|
|
("2003-09", "1623-245"),
|
|
|
|
("2003-10", "1623-244"),
|
|
|
|
("2004-01", "1623-269"),
|
|
|
|
("2004-03", "1623-270"),
|
|
|
|
("2004-11", "1623-251"),
|
|
|
|
("2004-12", "1623-161"),
|
|
|
|
("2004-15", "1623-253"),
|
|
|
|
("2004-19", "1623-254"),
|
|
|
|
("2004-20", "1623-255"),
|
|
|
|
("2005-04", "1623-204"),
|
|
|
|
("2005-05", "1623-264"),
|
|
|
|
("2005-07", "1623-257"),
|
|
|
|
("2006-08", "1623-285"),
|
|
|
|
("2006-09", "1623-298"),
|
|
|
|
("2007-71", "1623-271"),
|
|
|
|
("2010-01", "1623-263"),
|
|
|
|
("2010-03", "1623-293"),
|
2023-10-05 12:11:05 +01:00
|
|
|
("2012-70", "1623-296"),
|
|
|
|
("1623-2012-70", "1623-296"),
|
|
|
|
("2012-dd-05", "1623-286"),
|
|
|
|
("2012-dd-08", "1623-297"),
|
2023-09-10 00:06:38 +01:00
|
|
|
# ("2011-01", "1623-292"), seems to be a mistake
|
|
|
|
("2012-dd-05", "1623-286"),
|
2023-10-18 23:01:28 +01:00
|
|
|
("2012-0w-01", "2012-ow-01"), # catch the typo: zero for 'O'
|
2023-09-10 00:06:38 +01:00
|
|
|
("2012-ns-13", "1623-292"),
|
|
|
|
("2014-neo-01", "1623-273"),
|
|
|
|
("2014-sd-01", "1623-274"),
|
|
|
|
("2014-ms-14", "1623-287"),
|
|
|
|
("2015-mf-06", "1623-288"),
|
|
|
|
("2016-jb-01", "1623-289"),
|
2023-10-16 20:47:28 +01:00
|
|
|
("2016-01", "1623-2012-ns-07"),
|
|
|
|
("2016-03", "1623-2012-ns-12"),
|
|
|
|
("2016-04", "1623-2012-ns-10"),
|
2023-09-10 00:06:38 +01:00
|
|
|
("2017-pw-01", "1623-277"),
|
|
|
|
("2017_cucc_24", "1623-291"), # note _ not -
|
|
|
|
("2017_cucc_23", "1623-295"), # note _ not -
|
|
|
|
("2017_cucc_28", "1623-290"), # note _ not -
|
2023-10-10 12:31:17 +01:00
|
|
|
("2013-cucc-03", "1623-2013-03"),
|
2023-10-10 10:41:32 +01:00
|
|
|
("2018-ps-01", "1623-114"),
|
2023-09-10 00:06:38 +01:00
|
|
|
("bs17", "1623-283"),
|
2023-10-18 23:01:28 +01:00
|
|
|
("1976/b11", "1623-198"), # / in name with crash url resolution, bad idea, fix in original doc
|
|
|
|
("1976/b8", "1623-197"), # / in name with crash url resolution, bad idea, fix in original doc
|
|
|
|
("1976/b9", "1623-190"), # / in name with crash url resolution, bad idea, fix in original doc
|
2023-09-10 00:06:38 +01:00
|
|
|
("1976-b11", "1623-198"),
|
|
|
|
("1976-b8", "1623-197"),
|
|
|
|
("1976-b9", "1623-190"),
|
2023-10-18 23:01:28 +01:00
|
|
|
("b11", "1623-198"),
|
|
|
|
("b8", "1623-197"),
|
|
|
|
("b9", "1623-190"),
|
2023-09-10 00:06:38 +01:00
|
|
|
("2011-01-bs30", "1623-190"),
|
|
|
|
("bs30", "1623-190"),
|
|
|
|
("2011-01", "1623-190"),
|
2023-10-05 12:11:05 +01:00
|
|
|
("2002-x11", "1623-2005-08"),
|
2023-10-05 12:45:46 +01:00
|
|
|
("2002-x12", "2005-07"),
|
2023-09-10 00:06:38 +01:00
|
|
|
("2002-x13", "1623-2005-06"),
|
2022-07-25 00:58:13 +01:00
|
|
|
("2002-x14", "2005-05"),
|
2023-09-10 00:06:38 +01:00
|
|
|
("kh", "1623-161"),
|
|
|
|
("161-kh", "1623-161"),
|
|
|
|
("204-steinBH", "1623-204"),
|
|
|
|
("stonebridge", "1623-204"),
|
|
|
|
("hauchhole", "1623-234"),
|
|
|
|
("hauch", "1623-234"),
|
|
|
|
("234-hauch", "1623-234"),
|
|
|
|
("tunnocks", "1623-258"),
|
|
|
|
("balcony", "1623-264"),
|
|
|
|
("balkon", "1623-264"),
|
|
|
|
("fgh", "1623-290"),
|
|
|
|
("fishface", "1623-290"),
|
|
|
|
("gsh", "1623-291"),
|
|
|
|
("tempest", "1623-2023-lc-01"),
|
|
|
|
|
|
|
|
("1623-2023-kt-02", "2023-kt-02"),
|
2023-09-27 19:44:04 +01:00
|
|
|
|
2023-09-10 00:06:38 +01:00
|
|
|
# 1626 aliases
|
|
|
|
("langgustl", "1626-354"),
|
|
|
|
("2018-dm-07", "1626-359"),
|
2023-10-15 16:39:00 +01:00
|
|
|
("1626-2018-dm-07", "1626-359"),
|
2022-07-25 00:58:13 +01:00
|
|
|
("homecoming", "2018-dm-07"),
|
|
|
|
("heimkommen", "2018-dm-07"),
|
2022-07-25 13:03:58 +01:00
|
|
|
("Heimkehr", "2018-dm-07"),
|
2023-09-01 17:34:05 +01:00
|
|
|
("hc", "2018-dm-07"),
|
2023-09-26 17:44:06 +01:00
|
|
|
("loveshack", "1626-2018-ad-03"),
|
|
|
|
("crushed-garlic", "1626-2018-ad-03"),
|
2023-09-27 23:01:32 +01:00
|
|
|
("BuzzardHole", "1626-2023-buzzardhole"),
|
|
|
|
("2023-BuzzardHole", "1626-2023-buzzardhole"),
|
|
|
|
("1626-2023-BuzzardHole", "1626-2023-buzzardhole"),
|
2023-09-26 17:44:06 +01:00
|
|
|
|
2022-07-25 00:58:13 +01:00
|
|
|
]
|
2023-01-30 19:04:36 +00:00
|
|
|
|
2023-09-10 00:06:38 +01:00
|
|
|
for key, alias in aliases:
|
|
|
|
if alias in Gcavelookup:
|
|
|
|
if key in Gcavelookup:
|
2022-07-25 00:58:13 +01:00
|
|
|
# already set by a different method, but is it the same cave?
|
2023-09-10 00:06:38 +01:00
|
|
|
if Gcavelookup[key] == Gcavelookup[alias]:
|
2022-07-25 00:58:13 +01:00
|
|
|
pass
|
|
|
|
else:
|
2023-09-10 00:06:38 +01:00
|
|
|
# aliases wrong - these are different caves
|
|
|
|
message = f" - Alias list is mis-identifying different caves {key}:{Gcavelookup[key]} != {alias}:{Gcavelookup[alias]} "
|
|
|
|
print(message)
|
|
|
|
DataIssue.objects.create(parser="alias", message=message)
|
|
|
|
# Gcave_count[key] += 1
|
|
|
|
Gcavelookup[key] = Gcavelookup[alias]
|
2022-07-25 00:58:13 +01:00
|
|
|
else:
|
2023-09-10 00:06:38 +01:00
|
|
|
message = f" * Coding or cave existence mistake, cave for id '{alias}' does not exist. Expecting to set alias '{key}' to it"
|
2023-09-06 11:20:29 +01:00
|
|
|
print(message)
|
2023-10-05 12:11:05 +01:00
|
|
|
DataIssue.objects.update_or_create(parser="aliases", message=message)
|
2023-01-30 19:04:36 +00:00
|
|
|
|
2020-06-29 21:15:42 +01:00
|
|
|
addmore = {}
|
|
|
|
for id in Gcavelookup:
|
2023-01-30 19:04:36 +00:00
|
|
|
addmore[id.replace("-", "_")] = Gcavelookup[id]
|
2023-09-10 00:06:38 +01:00
|
|
|
addmore[id.replace("-", "_")] = Gcavelookup[id]
|
|
|
|
|
|
|
|
addmore[id.replace("-", "_").upper()] = Gcavelookup[id]
|
|
|
|
addmore[id.replace("-", "_").lower()] = Gcavelookup[id]
|
|
|
|
addmore[id.replace("_", "-").upper()] = Gcavelookup[id]
|
|
|
|
addmore[id.replace("_", "-").lower()] = Gcavelookup[id]
|
2020-06-29 21:15:42 +01:00
|
|
|
Gcavelookup = {**addmore, **Gcavelookup}
|
|
|
|
|
2023-01-30 19:04:36 +00:00
|
|
|
addmore = {}
|
|
|
|
|
2023-07-26 22:38:47 +01:00
|
|
|
ldup = []
|
|
|
|
for d in duplicates:
|
2023-09-27 19:44:04 +01:00
|
|
|
# if an alias resolves to 2 or more caves, remove it as an alias
|
|
|
|
# NOTE such an alisas is restored, assuming a 1623 area, when parsing Wallets - but only wallets.
|
2023-07-26 22:38:47 +01:00
|
|
|
Gcavelookup.pop(d)
|
2023-09-10 00:06:38 +01:00
|
|
|
Gcave_count.pop(d) # so should not get a duplicate msg below..
|
2023-07-26 22:38:47 +01:00
|
|
|
ldup.append(d)
|
|
|
|
if ldup:
|
2024-02-05 21:18:40 +00:00
|
|
|
message = f" - Ambiguous aliases being removed: {ldup}"
|
2023-07-26 22:38:47 +01:00
|
|
|
print(message)
|
2024-02-05 21:18:40 +00:00
|
|
|
update_dataissue("aliases ok", message)
|
|
|
|
|
2021-03-28 23:47:47 +01:00
|
|
|
for c in Gcave_count:
|
|
|
|
if Gcave_count[c] > 1:
|
2022-07-25 00:58:13 +01:00
|
|
|
message = f" ** Duplicate cave id count={Gcave_count[c]} id:'{Gcavelookup[c]}' cave __str__:'{c}'"
|
|
|
|
print(message)
|
2023-10-04 16:22:54 +01:00
|
|
|
DataIssue.objects.update_or_create(parser="aliases", message=message)
|
2023-07-26 22:38:47 +01:00
|
|
|
|
2020-06-29 21:15:42 +01:00
|
|
|
return Gcavelookup
|
2024-02-05 21:18:40 +00:00
|
|
|
|
|
|
|
def update_dataissue(parsercode, message):
|
|
|
|
try:
|
|
|
|
DataIssue.objects.update_or_create(parser=parsercode, message=message)
|
|
|
|
except:
|
|
|
|
# Complete bollocks, but MariaDB barfs when it shouldn't : Django 3.2
|
|
|
|
issues = DataIssue.objects.filter(parser=parsercode, message=message)
|
|
|
|
|
|
|
|
print(f" # EXCEPTION looking for DataIssue '{parsercode}' {message})")
|
|
|
|
if len(issues) > 1:
|
|
|
|
print(f" # MULTIPLE DataIssues '{parsercode}' {message}")
|
|
|
|
for s in issues:
|
|
|
|
print(f" # {s.id=} DataIssues '{parsercode}' {message}") # .id is Django internal field, not one of ours
|
|
|
|
message = "#2 " + message
|
|
|
|
issue[0].message = message
|
|
|
|
issue[0].save()
|