forked from expo/troggle
634 lines
22 KiB
Python
634 lines
22 KiB
Python
import os
|
|
import re
|
|
from collections import defaultdict
|
|
from datetime import datetime, timezone
|
|
from pathlib import Path
|
|
|
|
from django.db import models
|
|
from django.template import loader
|
|
|
|
import settings
|
|
from troggle.core.models.logbooks import QM
|
|
from troggle.core.models.survex import SurvexStation
|
|
from troggle.core.models.troggle import DataIssue, TroggleModel
|
|
from troggle.core.utils import TROG, writetrogglefile
|
|
|
|
# Use the TROG global object to cache the cave lookup list. No good for multi-user..
|
|
Gcavelookup = TROG["caves"]["gcavelookup"]
|
|
Gcave_count = TROG["caves"]["gcavecount"]
|
|
|
|
Gcavelookup = None
|
|
Gcave_count = None
|
|
|
|
"""The model declarations for Areas, Caves and Entrances
|
|
"""
|
|
|
|
todo = """
|
|
- Find out why we have separate objects CaveSlug and EntranceSlug and why
|
|
these are not just a single field on the Model. Do we ever need more
|
|
than one slug per cave or entrance? Surely that would break everything??
|
|
|
|
- Restore constraint: unique_together = (("area", "kataster_number"), ("area", "unofficial_number"))
|
|
"""
|
|
|
|
|
|
class Area(TroggleModel):
|
|
short_name = models.CharField(max_length=100)
|
|
name = models.CharField(max_length=200, blank=True, null=True)
|
|
description = models.TextField(blank=True, null=True)
|
|
super = models.ForeignKey("Area", blank=True, null=True, on_delete=models.SET_NULL)
|
|
|
|
def __str__(self):
|
|
if self.super:
|
|
return str(self.super) + " - " + str(self.short_name)
|
|
else:
|
|
return str(self.short_name)
|
|
|
|
def kat_area(self):
|
|
if self.short_name in ["1623", "1626", "1624", "1627"]:
|
|
return self.short_name
|
|
elif self.super:
|
|
return self.super.kat_area()
|
|
|
|
|
|
class CaveAndEntrance(models.Model):
|
|
cave = models.ForeignKey("Cave", on_delete=models.CASCADE)
|
|
entrance = models.ForeignKey("Entrance", on_delete=models.CASCADE)
|
|
entrance_letter = models.CharField(max_length=20, blank=True, null=True)
|
|
|
|
class Meta:
|
|
unique_together = [["cave", "entrance"], ["cave", "entrance_letter"]]
|
|
ordering = ["entrance_letter"]
|
|
|
|
def __str__(self):
|
|
return str(self.cave) + str(self.entrance_letter)
|
|
|
|
|
|
class Cave(TroggleModel):
|
|
# too much here perhaps,
|
|
official_name = models.CharField(max_length=160)
|
|
area = models.ManyToManyField(Area, blank=True)
|
|
kataster_code = models.CharField(max_length=20, blank=True, null=True)
|
|
kataster_number = models.CharField(max_length=10, blank=True, null=True)
|
|
unofficial_number = models.CharField(max_length=60, blank=True, null=True)
|
|
entrances = models.ManyToManyField("Entrance", through="CaveAndEntrance")
|
|
explorers = models.TextField(blank=True, null=True)
|
|
underground_description = models.TextField(blank=True, null=True)
|
|
equipment = models.TextField(blank=True, null=True)
|
|
references = models.TextField(blank=True, null=True)
|
|
survey = models.TextField(blank=True, null=True)
|
|
kataster_status = models.TextField(blank=True, null=True)
|
|
underground_centre_line = models.TextField(blank=True, null=True)
|
|
notes = models.TextField(blank=True, null=True)
|
|
length = models.CharField(max_length=100, blank=True, null=True)
|
|
depth = models.CharField(max_length=100, blank=True, null=True)
|
|
extent = models.CharField(max_length=100, blank=True, null=True)
|
|
survex_file = models.CharField(max_length=100, blank=True, null=True)
|
|
description_file = models.CharField(max_length=200, blank=True, null=True)
|
|
url = models.CharField(max_length=200, blank=True, null=True)
|
|
filename = models.CharField(max_length=200)
|
|
|
|
# class Meta:
|
|
# unique_together = (("area", "kataster_number"), ("area", "unofficial_number"))
|
|
# FIXME Kataster Areas and CUCC defined sub areas need seperating
|
|
|
|
# href = models.CharField(max_length=100)
|
|
|
|
class Meta:
|
|
ordering = ("kataster_code", "unofficial_number")
|
|
|
|
def hassurvey(self):
|
|
if not self.underground_centre_line:
|
|
return "No"
|
|
if (
|
|
self.survey.find("<img") > -1
|
|
or self.survey.find("<a") > -1
|
|
or self.survey.find("<IMG") > -1
|
|
or self.survey.find("<A") > -1
|
|
):
|
|
return "Yes"
|
|
return "Missing"
|
|
|
|
def hassurveydata(self):
|
|
if not self.underground_centre_line:
|
|
return "No"
|
|
if self.survex_filcavee:
|
|
return "Yes"
|
|
return "Missing"
|
|
|
|
def slug(self):
|
|
primarySlugs = self.caveslug_set.filter(primary=True)
|
|
if primarySlugs:
|
|
return primarySlugs[0].slug
|
|
else:
|
|
slugs = self.caveslug_set.filter()
|
|
if slugs:
|
|
return slugs[0].slug
|
|
|
|
def ours(self):
|
|
return bool(re.search(r"CUCC", self.explorers))
|
|
|
|
def reference(self):
|
|
if self.kataster_number:
|
|
return f"{self.kat_area()}-{self.kataster_number}"
|
|
else:
|
|
return f"{self.kat_area()}-{self.unofficial_number}"
|
|
|
|
def get_absolute_url(self):
|
|
if self.kataster_number:
|
|
pass
|
|
elif self.unofficial_number:
|
|
pass
|
|
else:
|
|
self.official_name.lower()
|
|
# return settings.URL_ROOT + '/cave/' + href + '/'
|
|
# return urljoin(settings.URL_ROOT, reverse('cave',kwargs={'cave_id':href,})) # WRONG. This produces /cave/161 and should be /1623/161
|
|
return Path(settings.URL_ROOT) / self.url # not good Django style.. NEEDS actual URL
|
|
|
|
def url_parent(self):
|
|
return self.url.rsplit("/", 1)[0]
|
|
|
|
def __str__(self, sep=": "):
|
|
return str(self.slug())
|
|
|
|
def get_QMs(self):
|
|
"""Searches for all QMs that reference this cave."""
|
|
# qms = self.qm_set.all().order_by('expoyear', 'block__date')
|
|
qms = QM.objects.filter(cave=self).order_by(
|
|
"expoyear", "block__date"
|
|
) # a QuerySet, see https://docs.djangoproject.com/en/4.0/ref/models/querysets/#order-by
|
|
return qms # a QuerySet
|
|
|
|
def kat_area(self):
|
|
for a in self.area.all():
|
|
if a.kat_area():
|
|
return a.kat_area()
|
|
|
|
def entrances(self):
|
|
return CaveAndEntrance.objects.filter(cave=self)
|
|
|
|
def singleentrance(self):
|
|
return len(CaveAndEntrance.objects.filter(cave=self)) == 1
|
|
|
|
def entrancelist(self):
|
|
rs = []
|
|
res = ""
|
|
for e in CaveAndEntrance.objects.filter(cave=self):
|
|
if e.entrance_letter:
|
|
rs.append(e.entrance_letter)
|
|
rs.sort()
|
|
prevR = ""
|
|
n = 0
|
|
for r in rs:
|
|
if prevR:
|
|
if chr(ord(prevR) + 1) == r:
|
|
prevR = r
|
|
n += 1
|
|
else:
|
|
if n == 0:
|
|
res += ", " + prevR
|
|
else:
|
|
res += "–" + prevR
|
|
else:
|
|
prevR = r
|
|
n = 0
|
|
res += r
|
|
if n == 0:
|
|
if res:
|
|
res += ", " + prevR
|
|
else:
|
|
res += "–" + prevR
|
|
return res
|
|
|
|
def writeDataFile(self):
|
|
filepath = os.path.join(settings.CAVEDESCRIPTIONS, self.filename)
|
|
|
|
t = loader.get_template("dataformat/cave.xml")
|
|
now = datetime.now(timezone.utc)
|
|
print(now)
|
|
c = dict({"cave": self, "date": now})
|
|
u = t.render(c)
|
|
writetrogglefile(filepath, u)
|
|
return
|
|
|
|
def file_output(self):
|
|
filepath = Path(os.path.join(settings.CAVEDESCRIPTIONS, self.filename))
|
|
|
|
t = loader.get_template("dataformat/cave.xml")
|
|
now = datetime.now(timezone.utc)
|
|
c = dict({"cave": self, "date": now})
|
|
content = t.render(c)
|
|
return (filepath, content, "utf8")
|
|
|
|
def getArea(self):
|
|
areas = self.area.all()
|
|
lowestareas = list(areas)
|
|
for area in areas:
|
|
if area.super in areas:
|
|
try:
|
|
lowestareas.remove(area.super)
|
|
except:
|
|
pass
|
|
return lowestareas[0]
|
|
|
|
|
|
class EntranceSlug(models.Model):
|
|
entrance = models.ForeignKey("Entrance", on_delete=models.CASCADE)
|
|
slug = models.SlugField(max_length=50, unique=True)
|
|
primary = models.BooleanField(default=False)
|
|
|
|
|
|
class Entrance(TroggleModel):
|
|
name = models.CharField(max_length=100, blank=True, null=True)
|
|
entrance_description = models.TextField(blank=True, null=True)
|
|
explorers = models.TextField(blank=True, null=True)
|
|
map_description = models.TextField(blank=True, null=True)
|
|
location_description = models.TextField(blank=True, null=True)
|
|
lastvisit = models.TextField(blank=True, null=True)
|
|
approach = models.TextField(blank=True, null=True)
|
|
underground_description = models.TextField(blank=True, null=True)
|
|
photo = models.TextField(blank=True, null=True)
|
|
MARKING_CHOICES = (
|
|
("P", "Paint"),
|
|
("P?", "Paint (?)"),
|
|
("T", "Tag"),
|
|
("T?", "Tag (?)"),
|
|
("R", "Needs Retag"),
|
|
("S", "Spit"),
|
|
("S?", "Spit (?)"),
|
|
("U", "Unmarked"),
|
|
("?", "Unknown"),
|
|
)
|
|
marking = models.CharField(max_length=2, choices=MARKING_CHOICES)
|
|
marking_comment = models.TextField(blank=True, null=True)
|
|
FINDABLE_CHOICES = (("?", "To be confirmed ..."), ("S", "Coordinates"), ("L", "Lost"), ("R", "Refindable"))
|
|
findability = models.CharField(max_length=1, choices=FINDABLE_CHOICES, blank=True, null=True)
|
|
findability_description = models.TextField(blank=True, null=True)
|
|
alt = models.TextField(blank=True, null=True)
|
|
northing = models.TextField(blank=True, null=True)
|
|
easting = models.TextField(blank=True, null=True)
|
|
lat_wgs84 = models.TextField(blank=True, null=True)
|
|
long_wgs84 = models.TextField(blank=True, null=True)
|
|
tag_station = models.TextField(blank=True, null=True)
|
|
exact_station = models.TextField(blank=True, null=True)
|
|
other_station = models.TextField(blank=True, null=True)
|
|
other_description = models.TextField(blank=True, null=True)
|
|
bearings = models.TextField(blank=True, null=True)
|
|
url = models.CharField(max_length=200, blank=True, null=True)
|
|
filename = models.CharField(max_length=200)
|
|
cached_primary_slug = models.CharField(max_length=200, blank=True, null=True)
|
|
|
|
class Meta:
|
|
ordering = ["caveandentrance__entrance_letter"]
|
|
|
|
def __str__(self):
|
|
return str(self.slug())
|
|
|
|
def exact_location(self):
|
|
return SurvexStation.objects.lookup(self.exact_station)
|
|
|
|
def other_location(self):
|
|
return SurvexStation.objects.lookup(self.other_station)
|
|
|
|
def find_location(self):
|
|
r = {"": "To be entered ", "?": "To be confirmed:", "S": "", "L": "Lost:", "R": "Refindable:"}[self.findability]
|
|
if self.tag_station:
|
|
try:
|
|
s = SurvexStation.objects.lookup(self.tag_station)
|
|
return r + f"{s.x:0.0f}E {s.y:0.0f}N {s.z:0.0f}Alt"
|
|
except:
|
|
return r + f"{self.tag_station} Tag Station not in dataset"
|
|
if self.exact_station:
|
|
try:
|
|
s = SurvexStation.objects.lookup(self.exact_station)
|
|
return r + f"{s.x:0.0f}E {s.y:0.0f}N {s.z:0.0f}Alt"
|
|
except:
|
|
return r + f"{self.tag_station} Exact Station not in dataset"
|
|
if self.other_station:
|
|
try:
|
|
s = SurvexStation.objects.lookup(self.other_station)
|
|
return r + f"{s.x:0.0f}E {s.y:0.0f}N {s.z:0.0f}Alt {self.other_description}"
|
|
except:
|
|
return r + f"{self.tag_station} Other Station not in dataset"
|
|
if self.FINDABLE_CHOICES == "S":
|
|
r += "ERROR, Entrance has been surveyed but has no survex point"
|
|
if self.bearings:
|
|
return r + self.bearings
|
|
return r
|
|
|
|
def best_station(self):
|
|
if self.tag_station:
|
|
return self.tag_station
|
|
if self.exact_station:
|
|
return self.exact_station
|
|
if self.other_station:
|
|
return self.other_station
|
|
|
|
def has_photo(self):
|
|
if self.photo:
|
|
if (
|
|
self.photo.find("<img") > -1
|
|
or self.photo.find("<a") > -1
|
|
or self.photo.find("<IMG") > -1
|
|
or self.photo.find("<A") > -1
|
|
):
|
|
return "Yes"
|
|
else:
|
|
return "Missing"
|
|
else:
|
|
return "No"
|
|
|
|
def marking_val(self):
|
|
for m in self.MARKING_CHOICES:
|
|
if m[0] == self.marking:
|
|
return m[1]
|
|
|
|
def findability_val(self):
|
|
for f in self.FINDABLE_CHOICES:
|
|
if f[0] == self.findability:
|
|
return f[1]
|
|
|
|
def tag(self):
|
|
return SurvexStation.objects.lookup(self.tag_station)
|
|
|
|
def needs_surface_work(self):
|
|
return self.findability != "S" or not self.has_photo or self.marking != "T"
|
|
|
|
def get_absolute_url(self):
|
|
# ancestor_titles='/'.join([subcave.title for subcave in self.get_ancestors()])
|
|
# if ancestor_titles:
|
|
# res = '/'.join((self.get_root().cave.get_absolute_url(), ancestor_titles, self.title))
|
|
# else:
|
|
# res = '/'.jocavein((self.get_root().cave.get_absolute_url(), self.title))
|
|
# return res
|
|
res = "/".join((self.get_root().cave.get_absolute_url(), self.title))
|
|
return res
|
|
|
|
def slug(self):
|
|
if not self.cached_primary_slug:
|
|
primarySlugs = self.entranceslug_set.filter(primary=True)
|
|
if primarySlugs:
|
|
self.cached_primary_slug = primarySlugs[0].slug
|
|
self.save()
|
|
else:
|
|
slugs = self.entranceslug_set.filter()
|
|
if slugs:
|
|
self.cached_primary_slug = slugs[0].slug
|
|
self.save()
|
|
return self.cached_primary_slug
|
|
|
|
def cavelist(self):
|
|
rs = []
|
|
for e in CaveAndEntrance.objects.filter(entrance=self):
|
|
if e.cave:
|
|
rs.append(e.cave)
|
|
return rs
|
|
|
|
def get_file_path(self):
|
|
return Path(settings.ENTRANCEDESCRIPTIONS, self.filename)
|
|
|
|
def file_output(self):
|
|
filepath = Path(os.path.join(settings.ENTRANCEDESCRIPTIONS, self.filename))
|
|
|
|
t = loader.get_template("dataformat/entrance.xml")
|
|
now = datetime.now(timezone.utc)
|
|
c = dict({"entrance": self, "date": now})
|
|
content = t.render(c)
|
|
return (filepath, content, "utf8")
|
|
|
|
def writeDataFile(self):
|
|
filepath = os.path.join(settings.ENTRANCEDESCRIPTIONS, self.filename)
|
|
|
|
t = loader.get_template("dataformat/entrance.xml")
|
|
now = datetime.now(timezone.utc)
|
|
c = dict({"entrance": self, "date": now})
|
|
u = t.render(c)
|
|
writetrogglefile(filepath, u)
|
|
return
|
|
|
|
def url_parent(self):
|
|
if self.url:
|
|
return self.url.rsplit("/", 1)[0]
|
|
else:
|
|
cavelist = self.cavelist()
|
|
if len(self.cavelist()) == 1:
|
|
return cavelist[0].url_parent()
|
|
else:
|
|
return ""
|
|
|
|
|
|
def GetCaveLookup():
|
|
"""A very relaxed way of finding probably the right cave given almost any string which might serve to identify it
|
|
|
|
lookup function modelled on GetPersonExpeditionNameLookup
|
|
repeated assignment each call, needs refactoring
|
|
|
|
Used when parsing wallets contents.json file too in views/uploads.py
|
|
|
|
Does NOT detect duplicates! Needs fixing.
|
|
Needs to be a proper funciton that raises an exception if there is a duplicate.
|
|
OR we could set it to return None if there are duplicates, and require the caller to
|
|
fall back on doing the actual database query it wants rather thna using this cache shortcut
|
|
"""
|
|
|
|
def checkcaveid(cave, id):
|
|
global Gcavelookup
|
|
if id not in Gcavelookup:
|
|
Gcavelookup[id] = cave
|
|
Gcave_count[id] += 1
|
|
else:
|
|
if cave == Gcavelookup[id]:
|
|
pass # same id, same cave
|
|
else: # same id but different cave
|
|
message = f" - Warning: same alias id '{id:3}' for two caves '{Gcavelookup[id]}' and '{cave}'. Removing this shorthand alias entirely."
|
|
Gcavelookup.pop(id)
|
|
print(message)
|
|
DataIssue.objects.create(parser="aliases", message=message)
|
|
|
|
global Gcavelookup
|
|
if Gcavelookup:
|
|
return Gcavelookup
|
|
Gcavelookup = {"NONEPLACEHOLDER": None}
|
|
global Gcave_count
|
|
Gcave_count = defaultdict(int) # sets default value to int(0)
|
|
|
|
DataIssue.objects.filter(parser="aliases").delete()
|
|
|
|
for cave in Cave.objects.all():
|
|
key = cave.official_name.lower()
|
|
if key != "" and key != "unamed" and key != "unnamed":
|
|
Gcavelookup[key] = cave
|
|
Gcave_count[key] += 1
|
|
if cave.kataster_number:
|
|
checkcaveid(cave, cave.kataster_number) # we do expect 1623/55 and 1626/55 to cause a warning message
|
|
|
|
# the rest of these are 'nice to have' but may validly already be set
|
|
if cave.unofficial_number:
|
|
unoffn = cave.unofficial_number.lower()
|
|
checkcaveid(cave, unoffn)
|
|
|
|
if cave.filename:
|
|
# this is the slug - usually.. but usually done as as f'{cave.area}-{cave.kataster_number}'
|
|
fn = cave.filename.replace(".html", "").lower()
|
|
checkcaveid(cave, fn)
|
|
|
|
if cave.slug():
|
|
# also possibly done already
|
|
slug = cave.slug().lower()
|
|
checkcaveid(cave, slug)
|
|
|
|
# These might alse create more duplicate entries
|
|
# Yes, this should be set in, and imported from, settings.py
|
|
aliases = [
|
|
("1987-02", "267"),
|
|
("1990-01", "171"),
|
|
("1990-02", "172"),
|
|
("1990-03", "173"),
|
|
("1990-04", "174"),
|
|
("1990-05", "175"),
|
|
("1990-06", "176"),
|
|
("1990-07", "177"),
|
|
("1990-08", "178"),
|
|
("1990-09", "179"),
|
|
("1990-10", "180"),
|
|
("1990-11", "181"),
|
|
("1990-12", "182"),
|
|
("1990-13", "183"),
|
|
("1990-14", "184"),
|
|
("1990-18", "188"),
|
|
("1990-adam", "225"),
|
|
("1993-01", "200"),
|
|
("1996-02", "224"),
|
|
("1996-03", "223"),
|
|
("1996-04", "222"),
|
|
("1996wk2", "207"),
|
|
("1996wk3", "208"),
|
|
("1996wk5", "219"),
|
|
("1996wk6", "218"),
|
|
("1996wk8", "209"),
|
|
("1996wk11", "268"),
|
|
("96wk11", "268"),
|
|
("1998-01", "201"),
|
|
("1998-03", "210"),
|
|
("1999-03", "204"),
|
|
("1999-04", "230"),
|
|
("1999-10", "162"),
|
|
("1999-bo-01", "205"),
|
|
("1999-ob-03", "226"),
|
|
("1999-ob-04", "227"),
|
|
("2000-01", "231"),
|
|
("2000-03", "214"),
|
|
("2000-04", "220"),
|
|
("2000-05", "215"),
|
|
("2000-06", "216"),
|
|
("2000-07", "217"),
|
|
("2000-09", "234"),
|
|
("2000-aa-01", "250"),
|
|
("2001-04", "239"),
|
|
("2001-05", "243"),
|
|
("2002-01", "249"),
|
|
("2002-02", "234"),
|
|
("2002-04", "242"),
|
|
("2002-05", "294"),
|
|
("2003-01", "256"),
|
|
("2003-02", "248"),
|
|
("2003-03", "247"),
|
|
("2003-04", "241"),
|
|
("2003-05", "246"),
|
|
("2003-06", "161"),
|
|
("2003-08", "240"),
|
|
("2003-09", "245"),
|
|
("2003-10", "244"),
|
|
("2004-01", "269"),
|
|
("2004-03", "270"),
|
|
("2004-11", "251"),
|
|
("2004-12", "161"),
|
|
("2004-15", "253"),
|
|
("2004-19", "254"),
|
|
("2004-20", "255"),
|
|
("2005-04", "204"),
|
|
("2005-05", "264"),
|
|
("2005-07", "257"),
|
|
("2006-08", "285"),
|
|
("2006-09", "298"),
|
|
("2007-71", "271"),
|
|
("2010-01", "263"),
|
|
("2010-03", "293"),
|
|
("2011-01", "292"),
|
|
("2012-dd-05", "286"),
|
|
("2012-ns-13", "292"),
|
|
("2014-neo-01", "273"),
|
|
("2014-sd-01", "274"),
|
|
("2014-ms-14", "287"),
|
|
("2015-mf-06", "288"),
|
|
("2016-jb-01", "289"),
|
|
("2017-pw-01", "277"),
|
|
("2018-dm-07", "359"), # NB this is 1626
|
|
("2017_cucc_24", "291"), # note _ not -
|
|
("2017_cucc_23", "295"), # note _ not -
|
|
("2017_cucc_28", "290"), # note _ not -
|
|
("bs17", "283"),
|
|
("1976/b11", "198"),
|
|
("1976/b8", "197"),
|
|
("1976/b9", "190"),
|
|
("b11", "1976/b11"),
|
|
("b8", "1976/b8"),
|
|
("b9", "1976/b9"),
|
|
("2011-01-bs30", "190"),
|
|
("bs30", "190"),
|
|
("2011-01", "190"),
|
|
("quarriesd", "2002-08"),
|
|
("2002-x11", "2005-08"),
|
|
("2002-x12", "2005-07"),
|
|
("2002-x13", "2005-06"),
|
|
("2002-x14", "2005-05"),
|
|
("kh", "161"),
|
|
("161-kh", "161"),
|
|
("204-steinBH", "204"),
|
|
("stonebridge", "204"),
|
|
("hauchhole", "234"),
|
|
("hauch", "234"),
|
|
("234-hauch", "234"),
|
|
("tunnocks", "258"),
|
|
("balcony", "264"),
|
|
("balkon", "264"),
|
|
("fgh", "290"),
|
|
("gsh", "291"),
|
|
("homecoming", "2018-dm-07"),
|
|
("heimkommen", "2018-dm-07"),
|
|
("Heimkehr", "2018-dm-07"),
|
|
("99ob02", "1999-ob-02"),
|
|
]
|
|
|
|
for i in aliases:
|
|
if i[1] in Gcavelookup:
|
|
if i[0] in Gcavelookup:
|
|
# already set by a different method, but is it the same cave?
|
|
if Gcavelookup[i[0]] == Gcavelookup[i[1]]:
|
|
pass
|
|
else:
|
|
Gcave_count[i[0]] += 1
|
|
Gcavelookup[i[0]] = Gcavelookup[i[1]]
|
|
else:
|
|
message = f" * Coding or cave existence mistake, cave for id '{i[1]}' does not exist. Expecting to set alias '{i[0]}' to it"
|
|
# print(message)
|
|
DataIssue.objects.create(parser="aliases", message=message)
|
|
|
|
addmore = {}
|
|
for id in Gcavelookup:
|
|
addmore[id.replace("-", "_")] = Gcavelookup[id]
|
|
addmore[id.replace("_", "-")] = Gcavelookup[id]
|
|
addmore[id.upper()] = Gcavelookup[id]
|
|
Gcavelookup = {**addmore, **Gcavelookup}
|
|
|
|
addmore = {}
|
|
|
|
for c in Gcave_count:
|
|
if Gcave_count[c] > 1:
|
|
message = f" ** Duplicate cave id count={Gcave_count[c]} id:'{Gcavelookup[c]}' cave __str__:'{c}'"
|
|
print(message)
|
|
DataIssue.objects.create(parser="aliases", message=message)
|
|
# logdataissues[Gcavelookup[c]]=message # pending troggle-wide issues logging system
|
|
|
|
return Gcavelookup
|