forked from expo/troggle
ran 'black' to reformat all the core files
This commit is contained in:
@@ -23,33 +23,33 @@ from django.urls import reverse
|
||||
import settings
|
||||
from troggle.core.models.logbooks import QM
|
||||
from troggle.core.models.survex import SurvexStation
|
||||
from troggle.core.models.troggle import (DataIssue, Expedition, Person,
|
||||
TroggleModel)
|
||||
from troggle.core.models.troggle import DataIssue, Expedition, Person, TroggleModel
|
||||
from troggle.core.utils import TROG, writetrogglefile
|
||||
|
||||
# Use the TROG global object to cache the cave lookup list. No good for multi-user..
|
||||
Gcavelookup = TROG['caves']['gcavelookup']
|
||||
Gcave_count = TROG['caves']['gcavecount']
|
||||
Gcavelookup = TROG["caves"]["gcavelookup"]
|
||||
Gcave_count = TROG["caves"]["gcavecount"]
|
||||
|
||||
Gcavelookup = None
|
||||
Gcave_count = None
|
||||
|
||||
'''The model declarations for Areas, Caves and Entrances
|
||||
'''
|
||||
"""The model declarations for Areas, Caves and Entrances
|
||||
"""
|
||||
|
||||
todo='''
|
||||
todo = """
|
||||
- Find out why we have separate objects CaveSlug and EntranceSlug and why
|
||||
these are not just a single field on the Model. Do we ever need more
|
||||
than one slug per cave or entrance? Surely that would break everything??
|
||||
|
||||
- Restore constraint: unique_together = (("area", "kataster_number"), ("area", "unofficial_number"))
|
||||
'''
|
||||
"""
|
||||
|
||||
|
||||
class Area(TroggleModel):
|
||||
short_name = models.CharField(max_length=100)
|
||||
name = models.CharField(max_length=200, blank=True, null=True)
|
||||
description = models.TextField(blank=True, null=True)
|
||||
super = models.ForeignKey('Area', blank=True, null=True, on_delete=models.SET_NULL)
|
||||
super = models.ForeignKey("Area", blank=True, null=True, on_delete=models.SET_NULL)
|
||||
|
||||
def __str__(self):
|
||||
if self.super:
|
||||
@@ -63,56 +63,62 @@ class Area(TroggleModel):
|
||||
elif self.super:
|
||||
return self.super.kat_area()
|
||||
|
||||
|
||||
class CaveAndEntrance(models.Model):
|
||||
cave = models.ForeignKey('Cave',on_delete=models.CASCADE)
|
||||
entrance = models.ForeignKey('Entrance',on_delete=models.CASCADE)
|
||||
entrance_letter = models.CharField(max_length=20,blank=True, null=True)
|
||||
cave = models.ForeignKey("Cave", on_delete=models.CASCADE)
|
||||
entrance = models.ForeignKey("Entrance", on_delete=models.CASCADE)
|
||||
entrance_letter = models.CharField(max_length=20, blank=True, null=True)
|
||||
|
||||
class Meta:
|
||||
unique_together = [['cave', 'entrance'], ['cave', 'entrance_letter']]
|
||||
ordering = ['entrance_letter']
|
||||
|
||||
unique_together = [["cave", "entrance"], ["cave", "entrance_letter"]]
|
||||
ordering = ["entrance_letter"]
|
||||
|
||||
def __str__(self):
|
||||
return str(self.cave) + str(self.entrance_letter)
|
||||
|
||||
|
||||
|
||||
class Cave(TroggleModel):
|
||||
# too much here perhaps,
|
||||
# too much here perhaps,
|
||||
official_name = models.CharField(max_length=160)
|
||||
area = models.ManyToManyField(Area, blank=True)
|
||||
kataster_code = models.CharField(max_length=20,blank=True, null=True)
|
||||
kataster_number = models.CharField(max_length=10,blank=True, null=True)
|
||||
unofficial_number = models.CharField(max_length=60,blank=True, null=True)
|
||||
entrances = models.ManyToManyField('Entrance', through='CaveAndEntrance')
|
||||
explorers = models.TextField(blank=True,null=True)
|
||||
underground_description = models.TextField(blank=True,null=True)
|
||||
equipment = models.TextField(blank=True,null=True)
|
||||
references = models.TextField(blank=True,null=True)
|
||||
survey = models.TextField(blank=True,null=True)
|
||||
kataster_status = models.TextField(blank=True,null=True)
|
||||
underground_centre_line = models.TextField(blank=True,null=True)
|
||||
notes = models.TextField(blank=True,null=True)
|
||||
length = models.CharField(max_length=100,blank=True, null=True)
|
||||
depth = models.CharField(max_length=100,blank=True, null=True)
|
||||
extent = models.CharField(max_length=100,blank=True, null=True)
|
||||
survex_file = models.CharField(max_length=100,blank=True, null=True)
|
||||
description_file = models.CharField(max_length=200,blank=True, null=True)
|
||||
url = models.CharField(max_length=200,blank=True, null=True)
|
||||
kataster_code = models.CharField(max_length=20, blank=True, null=True)
|
||||
kataster_number = models.CharField(max_length=10, blank=True, null=True)
|
||||
unofficial_number = models.CharField(max_length=60, blank=True, null=True)
|
||||
entrances = models.ManyToManyField("Entrance", through="CaveAndEntrance")
|
||||
explorers = models.TextField(blank=True, null=True)
|
||||
underground_description = models.TextField(blank=True, null=True)
|
||||
equipment = models.TextField(blank=True, null=True)
|
||||
references = models.TextField(blank=True, null=True)
|
||||
survey = models.TextField(blank=True, null=True)
|
||||
kataster_status = models.TextField(blank=True, null=True)
|
||||
underground_centre_line = models.TextField(blank=True, null=True)
|
||||
notes = models.TextField(blank=True, null=True)
|
||||
length = models.CharField(max_length=100, blank=True, null=True)
|
||||
depth = models.CharField(max_length=100, blank=True, null=True)
|
||||
extent = models.CharField(max_length=100, blank=True, null=True)
|
||||
survex_file = models.CharField(max_length=100, blank=True, null=True)
|
||||
description_file = models.CharField(max_length=200, blank=True, null=True)
|
||||
url = models.CharField(max_length=200, blank=True, null=True)
|
||||
filename = models.CharField(max_length=200)
|
||||
|
||||
#class Meta:
|
||||
# class Meta:
|
||||
# unique_together = (("area", "kataster_number"), ("area", "unofficial_number"))
|
||||
# FIXME Kataster Areas and CUCC defined sub areas need seperating
|
||||
# FIXME Kataster Areas and CUCC defined sub areas need seperating
|
||||
|
||||
# href = models.CharField(max_length=100)
|
||||
|
||||
#href = models.CharField(max_length=100)
|
||||
|
||||
class Meta:
|
||||
ordering = ('kataster_code', 'unofficial_number')
|
||||
ordering = ("kataster_code", "unofficial_number")
|
||||
|
||||
def hassurvey(self):
|
||||
if not self.underground_centre_line:
|
||||
return "No"
|
||||
if (self.survey.find("<img") > -1 or self.survey.find("<a") > -1 or self.survey.find("<IMG") > -1 or self.survey.find("<A") > -1):
|
||||
if (
|
||||
self.survey.find("<img") > -1
|
||||
or self.survey.find("<a") > -1
|
||||
or self.survey.find("<IMG") > -1
|
||||
or self.survey.find("<A") > -1
|
||||
):
|
||||
return "Yes"
|
||||
return "Missing"
|
||||
|
||||
@@ -122,9 +128,9 @@ class Cave(TroggleModel):
|
||||
if self.survex_filcavee:
|
||||
return "Yes"
|
||||
return "Missing"
|
||||
|
||||
|
||||
def slug(self):
|
||||
primarySlugs = self.caveslug_set.filter(primary = True)
|
||||
primarySlugs = self.caveslug_set.filter(primary=True)
|
||||
if primarySlugs:
|
||||
return primarySlugs[0].slug
|
||||
else:
|
||||
@@ -133,14 +139,14 @@ class Cave(TroggleModel):
|
||||
return slugs[0].slug
|
||||
|
||||
def ours(self):
|
||||
return bool(re.search(r'CUCC', self.explorers))
|
||||
return bool(re.search(r"CUCC", self.explorers))
|
||||
|
||||
def reference(self):
|
||||
if self.kataster_number:
|
||||
return f"{self.kat_area()}-{self.kataster_number}"
|
||||
else:
|
||||
return f"{self.kat_area()}-{self.unofficial_number}"
|
||||
|
||||
|
||||
def get_absolute_url(self):
|
||||
if self.kataster_number:
|
||||
href = self.kataster_number
|
||||
@@ -148,34 +154,35 @@ class Cave(TroggleModel):
|
||||
href = self.unofficial_number
|
||||
else:
|
||||
href = self.official_name.lower()
|
||||
#return settings.URL_ROOT + '/cave/' + href + '/'
|
||||
#return urljoin(settings.URL_ROOT, reverse('cave',kwargs={'cave_id':href,})) # WRONG. This produces /cave/161 and should be /1623/161
|
||||
return Path(settings.URL_ROOT) / self.url # not good Django style.. NEEDS actual URL
|
||||
|
||||
# return settings.URL_ROOT + '/cave/' + href + '/'
|
||||
# return urljoin(settings.URL_ROOT, reverse('cave',kwargs={'cave_id':href,})) # WRONG. This produces /cave/161 and should be /1623/161
|
||||
return Path(settings.URL_ROOT) / self.url # not good Django style.. NEEDS actual URL
|
||||
|
||||
def url_parent(self):
|
||||
return self.url.rsplit("/", 1)[0]
|
||||
|
||||
def __str__(self, sep = ": "):
|
||||
def __str__(self, sep=": "):
|
||||
return str(self.slug())
|
||||
|
||||
def get_QMs(self):
|
||||
'''Searches for all QMs that reference this cave.
|
||||
'''
|
||||
#qms = self.qm_set.all().order_by('expoyear', 'block__date')
|
||||
qms = QM.objects.filter(cave=self).order_by('expoyear', 'block__date') # a QuerySet, see https://docs.djangoproject.com/en/4.0/ref/models/querysets/#order-by
|
||||
return qms # a QuerySet
|
||||
"""Searches for all QMs that reference this cave."""
|
||||
# qms = self.qm_set.all().order_by('expoyear', 'block__date')
|
||||
qms = QM.objects.filter(cave=self).order_by(
|
||||
"expoyear", "block__date"
|
||||
) # a QuerySet, see https://docs.djangoproject.com/en/4.0/ref/models/querysets/#order-by
|
||||
return qms # a QuerySet
|
||||
|
||||
def kat_area(self):
|
||||
for a in self.area.all():
|
||||
if a.kat_area():
|
||||
return a.kat_area()
|
||||
|
||||
|
||||
def entrances(self):
|
||||
return CaveAndEntrance.objects.filter(cave=self)
|
||||
|
||||
def singleentrance(self):
|
||||
return len(CaveAndEntrance.objects.filter(cave=self)) == 1
|
||||
|
||||
|
||||
def entrancelist(self):
|
||||
rs = []
|
||||
res = ""
|
||||
@@ -183,11 +190,11 @@ class Cave(TroggleModel):
|
||||
if e.entrance_letter:
|
||||
rs.append(e.entrance_letter)
|
||||
rs.sort()
|
||||
prevR = ''
|
||||
prevR = ""
|
||||
n = 0
|
||||
for r in rs:
|
||||
if prevR:
|
||||
if chr(ord(prevR) + 1 ) == r:
|
||||
if chr(ord(prevR) + 1) == r:
|
||||
prevR = r
|
||||
n += 1
|
||||
else:
|
||||
@@ -205,27 +212,27 @@ class Cave(TroggleModel):
|
||||
else:
|
||||
res += "–" + prevR
|
||||
return res
|
||||
|
||||
|
||||
def writeDataFile(self):
|
||||
filepath = os.path.join(settings.CAVEDESCRIPTIONS, self.filename)
|
||||
|
||||
t = loader.get_template('dataformat/cave.xml')
|
||||
t = loader.get_template("dataformat/cave.xml")
|
||||
now = datetime.now(timezone.utc)
|
||||
print(now)
|
||||
c = dict({'cave': self, 'date': now})
|
||||
c = dict({"cave": self, "date": now})
|
||||
u = t.render(c)
|
||||
writetrogglefile(filepath, u)
|
||||
return
|
||||
|
||||
|
||||
def file_output(self):
|
||||
filepath = Path(os.path.join(settings.CAVEDESCRIPTIONS, self.filename))
|
||||
|
||||
t = loader.get_template('dataformat/cave.xml')
|
||||
t = loader.get_template("dataformat/cave.xml")
|
||||
now = datetime.now(timezone.utc)
|
||||
c = dict({'cave': self, 'date': now})
|
||||
c = dict({"cave": self, "date": now})
|
||||
content = t.render(c)
|
||||
return (filepath, content, "utf8")
|
||||
|
||||
|
||||
def getArea(self):
|
||||
areas = self.area.all()
|
||||
lowestareas = list(areas)
|
||||
@@ -237,40 +244,39 @@ class Cave(TroggleModel):
|
||||
pass
|
||||
return lowestareas[0]
|
||||
|
||||
|
||||
class EntranceSlug(models.Model):
|
||||
entrance = models.ForeignKey('Entrance',on_delete=models.CASCADE)
|
||||
slug = models.SlugField(max_length=50, unique = True)
|
||||
entrance = models.ForeignKey("Entrance", on_delete=models.CASCADE)
|
||||
slug = models.SlugField(max_length=50, unique=True)
|
||||
primary = models.BooleanField(default=False)
|
||||
|
||||
|
||||
class Entrance(TroggleModel):
|
||||
name = models.CharField(max_length=100, blank=True,null=True)
|
||||
entrance_description = models.TextField(blank=True,null=True)
|
||||
explorers = models.TextField(blank=True,null=True)
|
||||
map_description = models.TextField(blank=True,null=True)
|
||||
location_description = models.TextField(blank=True,null=True)
|
||||
lastvisit = models.TextField(blank=True,null=True)
|
||||
approach = models.TextField(blank=True,null=True)
|
||||
underground_description = models.TextField(blank=True,null=True)
|
||||
photo = models.TextField(blank=True,null=True)
|
||||
name = models.CharField(max_length=100, blank=True, null=True)
|
||||
entrance_description = models.TextField(blank=True, null=True)
|
||||
explorers = models.TextField(blank=True, null=True)
|
||||
map_description = models.TextField(blank=True, null=True)
|
||||
location_description = models.TextField(blank=True, null=True)
|
||||
lastvisit = models.TextField(blank=True, null=True)
|
||||
approach = models.TextField(blank=True, null=True)
|
||||
underground_description = models.TextField(blank=True, null=True)
|
||||
photo = models.TextField(blank=True, null=True)
|
||||
MARKING_CHOICES = (
|
||||
('P', 'Paint'),
|
||||
('P?', 'Paint (?)'),
|
||||
('T', 'Tag'),
|
||||
('T?', 'Tag (?)'),
|
||||
('R', 'Needs Retag'),
|
||||
('S', 'Spit'),
|
||||
('S?', 'Spit (?)'),
|
||||
('U', 'Unmarked'),
|
||||
('?', 'Unknown'))
|
||||
("P", "Paint"),
|
||||
("P?", "Paint (?)"),
|
||||
("T", "Tag"),
|
||||
("T?", "Tag (?)"),
|
||||
("R", "Needs Retag"),
|
||||
("S", "Spit"),
|
||||
("S?", "Spit (?)"),
|
||||
("U", "Unmarked"),
|
||||
("?", "Unknown"),
|
||||
)
|
||||
marking = models.CharField(max_length=2, choices=MARKING_CHOICES)
|
||||
marking_comment = models.TextField(blank=True,null=True)
|
||||
FINDABLE_CHOICES = (
|
||||
('?', 'To be confirmed ...'),
|
||||
('S', 'Coordinates'),
|
||||
('L', 'Lost'),
|
||||
('R', 'Refindable'))
|
||||
marking_comment = models.TextField(blank=True, null=True)
|
||||
FINDABLE_CHOICES = (("?", "To be confirmed ..."), ("S", "Coordinates"), ("L", "Lost"), ("R", "Refindable"))
|
||||
findability = models.CharField(max_length=1, choices=FINDABLE_CHOICES, blank=True, null=True)
|
||||
findability_description = models.TextField(blank=True,null=True)
|
||||
findability_description = models.TextField(blank=True, null=True)
|
||||
alt = models.TextField(blank=True, null=True)
|
||||
northing = models.TextField(blank=True, null=True)
|
||||
easting = models.TextField(blank=True, null=True)
|
||||
@@ -279,14 +285,14 @@ class Entrance(TroggleModel):
|
||||
tag_station = models.TextField(blank=True, null=True)
|
||||
exact_station = models.TextField(blank=True, null=True)
|
||||
other_station = models.TextField(blank=True, null=True)
|
||||
other_description = models.TextField(blank=True,null=True)
|
||||
bearings = models.TextField(blank=True,null=True)
|
||||
url = models.CharField(max_length=200,blank=True, null=True)
|
||||
other_description = models.TextField(blank=True, null=True)
|
||||
bearings = models.TextField(blank=True, null=True)
|
||||
url = models.CharField(max_length=200, blank=True, null=True)
|
||||
filename = models.CharField(max_length=200)
|
||||
cached_primary_slug = models.CharField(max_length=200,blank=True, null=True)
|
||||
|
||||
cached_primary_slug = models.CharField(max_length=200, blank=True, null=True)
|
||||
|
||||
class Meta:
|
||||
ordering = ['caveandentrance__entrance_letter']
|
||||
ordering = ["caveandentrance__entrance_letter"]
|
||||
|
||||
def __str__(self):
|
||||
return str(self.slug())
|
||||
@@ -298,11 +304,7 @@ class Entrance(TroggleModel):
|
||||
return SurvexStation.objects.lookup(self.other_station)
|
||||
|
||||
def find_location(self):
|
||||
r = {'': 'To be entered ',
|
||||
'?': 'To be confirmed:',
|
||||
'S': '',
|
||||
'L': 'Lost:',
|
||||
'R': 'Refindable:'}[self.findability]
|
||||
r = {"": "To be entered ", "?": "To be confirmed:", "S": "", "L": "Lost:", "R": "Refindable:"}[self.findability]
|
||||
if self.tag_station:
|
||||
try:
|
||||
s = SurvexStation.objects.lookup(self.tag_station)
|
||||
@@ -337,7 +339,12 @@ class Entrance(TroggleModel):
|
||||
|
||||
def has_photo(self):
|
||||
if self.photo:
|
||||
if (self.photo.find("<img") > -1 or self.photo.find("<a") > -1 or self.photo.find("<IMG") > -1 or self.photo.find("<A") > -1):
|
||||
if (
|
||||
self.photo.find("<img") > -1
|
||||
or self.photo.find("<a") > -1
|
||||
or self.photo.find("<IMG") > -1
|
||||
or self.photo.find("<A") > -1
|
||||
):
|
||||
return "Yes"
|
||||
else:
|
||||
return "Missing"
|
||||
@@ -363,17 +370,17 @@ class Entrance(TroggleModel):
|
||||
def get_absolute_url(self):
|
||||
# ancestor_titles='/'.join([subcave.title for subcave in self.get_ancestors()])
|
||||
# if ancestor_titles:
|
||||
# res = '/'.join((self.get_root().cave.get_absolute_url(), ancestor_titles, self.title))
|
||||
# res = '/'.join((self.get_root().cave.get_absolute_url(), ancestor_titles, self.title))
|
||||
# else:
|
||||
# res = '/'.jocavein((self.get_root().cave.get_absolute_url(), self.title))
|
||||
# res = '/'.jocavein((self.get_root().cave.get_absolute_url(), self.title))
|
||||
# return res
|
||||
res = '/'.join((self.get_root().cave.get_absolute_url(), self.title))
|
||||
res = "/".join((self.get_root().cave.get_absolute_url(), self.title))
|
||||
return res
|
||||
|
||||
def slug(self):
|
||||
if not self.cached_primary_slug:
|
||||
primarySlugs = self.entranceslug_set.filter(primary = True)
|
||||
if primarySlugs:
|
||||
primarySlugs = self.entranceslug_set.filter(primary=True)
|
||||
if primarySlugs:
|
||||
self.cached_primary_slug = primarySlugs[0].slug
|
||||
self.save()
|
||||
else:
|
||||
@@ -390,30 +397,29 @@ class Entrance(TroggleModel):
|
||||
if e.cave:
|
||||
rs.append(e.cave)
|
||||
return rs
|
||||
|
||||
|
||||
def get_file_path(self):
|
||||
return Path(settings.ENTRANCEDESCRIPTIONS, self.filename)
|
||||
|
||||
return Path(settings.ENTRANCEDESCRIPTIONS, self.filename)
|
||||
|
||||
def file_output(self):
|
||||
filepath = Path(os.path.join(settings.ENTRANCEDESCRIPTIONS, self.filename))
|
||||
|
||||
t = loader.get_template('dataformat/entrance.xml')
|
||||
t = loader.get_template("dataformat/entrance.xml")
|
||||
now = datetime.now(timezone.utc)
|
||||
c = dict({'entrance': self, 'date': now})
|
||||
c = dict({"entrance": self, "date": now})
|
||||
content = t.render(c)
|
||||
return (filepath, content, "utf8")
|
||||
|
||||
def writeDataFile(self):
|
||||
filepath = os.path.join(settings.ENTRANCEDESCRIPTIONS, self.filename)
|
||||
|
||||
t = loader.get_template('dataformat/entrance.xml')
|
||||
t = loader.get_template("dataformat/entrance.xml")
|
||||
now = datetime.now(timezone.utc)
|
||||
c = dict({'entrance': self, 'date': now})
|
||||
c = dict({"entrance": self, "date": now})
|
||||
u = t.render(c)
|
||||
writetrogglefile(filepath, u)
|
||||
return
|
||||
|
||||
|
||||
def url_parent(self):
|
||||
if self.url:
|
||||
return self.url.rsplit("/", 1)[0]
|
||||
@@ -423,21 +429,22 @@ class Entrance(TroggleModel):
|
||||
return cavelist[0].url_parent()
|
||||
else:
|
||||
return ""
|
||||
|
||||
|
||||
|
||||
def GetCaveLookup():
|
||||
"""A very relaxed way of finding probably the right cave given almost any string which might serve to identify it
|
||||
|
||||
|
||||
lookup function modelled on GetPersonExpeditionNameLookup
|
||||
repeated assignment each call, needs refactoring
|
||||
|
||||
|
||||
Used when parsing wallets contents.json file too in views/uploads.py
|
||||
|
||||
|
||||
Does NOT detect duplicates! Needs fixing.
|
||||
Needs to be a proper funciton that raises an exception if there is a duplicate.
|
||||
OR we could set it to return None if there are duplicates, and require the caller to
|
||||
OR we could set it to return None if there are duplicates, and require the caller to
|
||||
fall back on doing the actual database query it wants rather thna using this cache shortcut
|
||||
"""
|
||||
|
||||
def checkcaveid(cave, id):
|
||||
global Gcavelookup
|
||||
if id not in Gcavelookup:
|
||||
@@ -445,48 +452,48 @@ def GetCaveLookup():
|
||||
Gcave_count[id] += 1
|
||||
else:
|
||||
if cave == Gcavelookup[id]:
|
||||
pass # same id, same cave
|
||||
else: # same id but different cave
|
||||
pass # same id, same cave
|
||||
else: # same id but different cave
|
||||
message = f" - Warning: same alias id '{id:3}' for two caves '{Gcavelookup[id]}' and '{cave}'. Removing this shorthand alias entirely."
|
||||
Gcavelookup.pop(id)
|
||||
print(message)
|
||||
DataIssue.objects.create(parser='aliases', message=message)
|
||||
|
||||
DataIssue.objects.create(parser="aliases", message=message)
|
||||
|
||||
global Gcavelookup
|
||||
if Gcavelookup:
|
||||
return Gcavelookup
|
||||
Gcavelookup = {"NONEPLACEHOLDER": None}
|
||||
global Gcave_count
|
||||
Gcave_count = defaultdict(int) # sets default value to int(0)
|
||||
|
||||
DataIssue.objects.filter(parser='aliases').delete()
|
||||
|
||||
Gcave_count = defaultdict(int) # sets default value to int(0)
|
||||
|
||||
DataIssue.objects.filter(parser="aliases").delete()
|
||||
|
||||
for cave in Cave.objects.all():
|
||||
key = cave.official_name.lower()
|
||||
if key != "" and key != "unamed" and key != "unnamed":
|
||||
Gcavelookup[key] = cave
|
||||
Gcave_count[key] += 1
|
||||
Gcave_count[key] += 1
|
||||
if cave.kataster_number:
|
||||
checkcaveid(cave,cave.kataster_number) # we do expect 1623/55 and 1626/55 to cause a warning message
|
||||
|
||||
checkcaveid(cave, cave.kataster_number) # we do expect 1623/55 and 1626/55 to cause a warning message
|
||||
|
||||
# the rest of these are 'nice to have' but may validly already be set
|
||||
if cave.unofficial_number:
|
||||
unoffn = cave.unofficial_number.lower()
|
||||
checkcaveid(cave,unoffn)
|
||||
|
||||
checkcaveid(cave, unoffn)
|
||||
|
||||
if cave.filename:
|
||||
# this is the slug - usually.. but usually done as as f'{cave.area}-{cave.kataster_number}'
|
||||
fn = cave.filename.replace(".html","").lower()
|
||||
checkcaveid(cave,fn)
|
||||
|
||||
fn = cave.filename.replace(".html", "").lower()
|
||||
checkcaveid(cave, fn)
|
||||
|
||||
if cave.slug():
|
||||
# also possibly done already
|
||||
slug = cave.slug().lower()
|
||||
checkcaveid(cave,slug)
|
||||
checkcaveid(cave, slug)
|
||||
|
||||
# These might alse create more duplicate entries
|
||||
# Yes, this should be set in, and imported from, settings.py
|
||||
aliases =[
|
||||
aliases = [
|
||||
("1987-02", "267"),
|
||||
("1990-01", "171"),
|
||||
("1990-02", "172"),
|
||||
@@ -570,29 +577,25 @@ def GetCaveLookup():
|
||||
("2015-mf-06", "288"),
|
||||
("2016-jb-01", "289"),
|
||||
("2017-pw-01", "277"),
|
||||
("2018-dm-07", "359"), # NB this is 1626
|
||||
("2017_cucc_24", "291"), # note _ not -
|
||||
("2017_cucc_23", "295"), # note _ not -
|
||||
("2017_cucc_28", "290"), # note _ not -
|
||||
("2018-dm-07", "359"), # NB this is 1626
|
||||
("2017_cucc_24", "291"), # note _ not -
|
||||
("2017_cucc_23", "295"), # note _ not -
|
||||
("2017_cucc_28", "290"), # note _ not -
|
||||
("bs17", "283"),
|
||||
|
||||
("1976/b11", "198"),
|
||||
("1976/b8", "197"),
|
||||
("1976/b9", "190"),
|
||||
("b11", "1976/b11"),
|
||||
("b8", "1976/b8"),
|
||||
("b9", "1976/b9"),
|
||||
|
||||
("2011-01-bs30", "190"),
|
||||
("bs30", "190"),
|
||||
("2011-01", "190"),
|
||||
|
||||
("quarriesd", "2002-08"),
|
||||
("2002-x11", "2005-08"),
|
||||
("2002-x12", "2005-07"),
|
||||
("2002-x13", "2005-06"),
|
||||
("2002-x14", "2005-05"),
|
||||
|
||||
("kh", "161"),
|
||||
("161-kh", "161"),
|
||||
("204-steinBH", "204"),
|
||||
@@ -605,13 +608,12 @@ def GetCaveLookup():
|
||||
("balkon", "264"),
|
||||
("fgh", "290"),
|
||||
("gsh", "291"),
|
||||
|
||||
("homecoming", "2018-dm-07"),
|
||||
("heimkommen", "2018-dm-07"),
|
||||
("Heimkehr", "2018-dm-07"),
|
||||
("99ob02", "1999-ob-02"),
|
||||
]
|
||||
|
||||
|
||||
for i in aliases:
|
||||
if i[1] in Gcavelookup:
|
||||
if i[0] in Gcavelookup:
|
||||
@@ -623,23 +625,23 @@ def GetCaveLookup():
|
||||
Gcavelookup[i[0]] = Gcavelookup[i[1]]
|
||||
else:
|
||||
message = f" * Coding or cave existence mistake, cave for id '{i[1]}' does not exist. Expecting to set alias '{i[0]}' to it"
|
||||
#print(message)
|
||||
DataIssue.objects.create(parser='aliases', message=message)
|
||||
|
||||
# print(message)
|
||||
DataIssue.objects.create(parser="aliases", message=message)
|
||||
|
||||
addmore = {}
|
||||
for id in Gcavelookup:
|
||||
addmore[id.replace("-","_")] = Gcavelookup[id]
|
||||
addmore[id.replace("_","-")] = Gcavelookup[id]
|
||||
addmore[id.replace("-", "_")] = Gcavelookup[id]
|
||||
addmore[id.replace("_", "-")] = Gcavelookup[id]
|
||||
addmore[id.upper()] = Gcavelookup[id]
|
||||
Gcavelookup = {**addmore, **Gcavelookup}
|
||||
|
||||
addmore ={}
|
||||
|
||||
addmore = {}
|
||||
|
||||
for c in Gcave_count:
|
||||
if Gcave_count[c] > 1:
|
||||
message = f" ** Duplicate cave id count={Gcave_count[c]} id:'{Gcavelookup[c]}' cave __str__:'{c}'"
|
||||
print(message)
|
||||
DataIssue.objects.create(parser='aliases', message=message)
|
||||
DataIssue.objects.create(parser="aliases", message=message)
|
||||
# logdataissues[Gcavelookup[c]]=message # pending troggle-wide issues logging system
|
||||
|
||||
return Gcavelookup
|
||||
|
||||
@@ -22,56 +22,62 @@ from django.urls import reverse
|
||||
|
||||
import settings
|
||||
from troggle.core.models.survex import SurvexStation
|
||||
from troggle.core.models.troggle import (DataIssue, Expedition, Person,
|
||||
PersonExpedition, TroggleModel)
|
||||
from troggle.core.models.troggle import DataIssue, Expedition, Person, PersonExpedition, TroggleModel
|
||||
|
||||
'''The model declarations LogBookEntry, PersonLogEntry, QM
|
||||
'''
|
||||
"""The model declarations LogBookEntry, PersonLogEntry, QM
|
||||
"""
|
||||
|
||||
todo = """
|
||||
"""
|
||||
|
||||
todo='''
|
||||
'''
|
||||
|
||||
class CaveSlug(models.Model):
|
||||
"""Moved here to avoid nasty cyclic import error"""
|
||||
cave = models.ForeignKey('Cave',on_delete=models.CASCADE)
|
||||
slug = models.SlugField(max_length=50, unique = True)
|
||||
|
||||
cave = models.ForeignKey("Cave", on_delete=models.CASCADE)
|
||||
slug = models.SlugField(max_length=50, unique=True)
|
||||
primary = models.BooleanField(default=False)
|
||||
|
||||
|
||||
class LogbookEntry(TroggleModel):
|
||||
"""Single parsed entry from Logbook
|
||||
"""
|
||||
date = models.DateField()#MJG wants to turn this into a datetime such that multiple Logbook entries on the same day can be ordered.ld()
|
||||
expedition = models.ForeignKey(Expedition,blank=True, null=True,on_delete=models.SET_NULL) # yes this is double-
|
||||
title = models.CharField(max_length=200)
|
||||
cave_slug = models.SlugField(max_length=50, blank=True, null=True)
|
||||
place = models.CharField(max_length=100,blank=True, null=True,help_text="Only use this if you haven't chosen a cave")
|
||||
text = models.TextField()
|
||||
slug = models.SlugField(max_length=50)
|
||||
time_underground = models.FloatField(null=True,help_text="In decimal hours")
|
||||
"""Single parsed entry from Logbook"""
|
||||
|
||||
date = (
|
||||
models.DateField()
|
||||
) # MJG wants to turn this into a datetime such that multiple Logbook entries on the same day can be ordered.ld()
|
||||
expedition = models.ForeignKey(Expedition, blank=True, null=True, on_delete=models.SET_NULL) # yes this is double-
|
||||
title = models.CharField(max_length=200)
|
||||
cave_slug = models.SlugField(max_length=50, blank=True, null=True)
|
||||
place = models.CharField(
|
||||
max_length=100, blank=True, null=True, help_text="Only use this if you haven't chosen a cave"
|
||||
)
|
||||
text = models.TextField()
|
||||
slug = models.SlugField(max_length=50)
|
||||
time_underground = models.FloatField(null=True, help_text="In decimal hours")
|
||||
|
||||
class Meta:
|
||||
verbose_name_plural = "Logbook Entries"
|
||||
# several PersonLogEntrys point in to this object
|
||||
ordering = ('-date',)
|
||||
ordering = ("-date",)
|
||||
|
||||
def cave(self): # Why didn't he just make this a foreign key to Cave ?
|
||||
def cave(self): # Why didn't he just make this a foreign key to Cave ?
|
||||
c = CaveSlug.objects.get(slug=self.cave_slug, primary=True).cave
|
||||
return c
|
||||
|
||||
def isLogbookEntry(self): # Function used in templates
|
||||
def isLogbookEntry(self): # Function used in templates
|
||||
return True
|
||||
|
||||
def get_absolute_url(self):
|
||||
return urljoin(settings.URL_ROOT, reverse('logbookentry',kwargs={'date':self.date,'slug':self.slug}))
|
||||
return urljoin(settings.URL_ROOT, reverse("logbookentry", kwargs={"date": self.date, "slug": self.slug}))
|
||||
|
||||
def __str__(self):
|
||||
return f'{self.date}: {self.title}'
|
||||
return f"{self.date}: {self.title}"
|
||||
|
||||
def get_next_by_id(self):
|
||||
LogbookEntry.objects.get(id=self.id+1)
|
||||
LogbookEntry.objects.get(id=self.id + 1)
|
||||
|
||||
def get_previous_by_id(self):
|
||||
LogbookEntry.objects.get(id=self.id-1)
|
||||
LogbookEntry.objects.get(id=self.id - 1)
|
||||
|
||||
def DayIndex(self):
|
||||
"""This is used to set different colours for the different trips on
|
||||
@@ -81,12 +87,12 @@ class LogbookEntry(TroggleModel):
|
||||
if self in todays:
|
||||
index = todays.index(self)
|
||||
else:
|
||||
print(f"DayIndex: Synchronization error. Restart server. {self}")
|
||||
index = 0
|
||||
|
||||
print(f"DayIndex: Synchronization error. Restart server. {self}")
|
||||
index = 0
|
||||
|
||||
if index not in range(0, mx):
|
||||
print(f"DayIndex: More than {mx-1} LogbookEntry items on one day '{index}' {self}")
|
||||
index = 0
|
||||
print(f"DayIndex: More than {mx-1} LogbookEntry items on one day '{index}' {self}")
|
||||
index = 0
|
||||
return index
|
||||
|
||||
|
||||
@@ -94,24 +100,37 @@ class PersonLogEntry(TroggleModel):
|
||||
"""Single Person going on a trip, which may or may not be written up.
|
||||
It could account for different T/U for people in same logbook entry.
|
||||
"""
|
||||
personexpedition = models.ForeignKey("PersonExpedition",null=True,on_delete=models.CASCADE)
|
||||
|
||||
personexpedition = models.ForeignKey("PersonExpedition", null=True, on_delete=models.CASCADE)
|
||||
time_underground = models.FloatField(help_text="In decimal hours")
|
||||
logbook_entry = models.ForeignKey(LogbookEntry,on_delete=models.CASCADE)
|
||||
logbook_entry = models.ForeignKey(LogbookEntry, on_delete=models.CASCADE)
|
||||
is_logbook_entry_author = models.BooleanField(default=False)
|
||||
|
||||
class Meta:
|
||||
ordering = ('-personexpedition',)
|
||||
#order_with_respect_to = 'personexpedition'
|
||||
|
||||
ordering = ("-personexpedition",)
|
||||
# order_with_respect_to = 'personexpedition'
|
||||
|
||||
def next_personlog(self):
|
||||
futurePTs = PersonLogEntry.objects.filter(personexpedition = self.personexpedition, logbook_entry__date__gt = self.logbook_entry.date).order_by('logbook_entry__date').all()
|
||||
futurePTs = (
|
||||
PersonLogEntry.objects.filter(
|
||||
personexpedition=self.personexpedition, logbook_entry__date__gt=self.logbook_entry.date
|
||||
)
|
||||
.order_by("logbook_entry__date")
|
||||
.all()
|
||||
)
|
||||
if len(futurePTs) > 0:
|
||||
return futurePTs[0]
|
||||
else:
|
||||
return None
|
||||
|
||||
def prev_personlog(self):
|
||||
pastPTs = PersonLogEntry.objects.filter(personexpedition = self.personexpedition, logbook_entry__date__lt = self.logbook_entry.date).order_by('-logbook_entry__date').all()
|
||||
pastPTs = (
|
||||
PersonLogEntry.objects.filter(
|
||||
personexpedition=self.personexpedition, logbook_entry__date__lt=self.logbook_entry.date
|
||||
)
|
||||
.order_by("-logbook_entry__date")
|
||||
.all()
|
||||
)
|
||||
if len(pastPTs) > 0:
|
||||
return pastPTs[0]
|
||||
else:
|
||||
@@ -121,38 +140,50 @@ class PersonLogEntry(TroggleModel):
|
||||
return self.logbook_entry.cave and self.logbook_entry.cave or self.logbook_entry.place
|
||||
|
||||
def __str__(self):
|
||||
return f'{self.personexpedition} ({self.logbook_entry.date})'
|
||||
|
||||
return f"{self.personexpedition} ({self.logbook_entry.date})"
|
||||
|
||||
|
||||
class QM(TroggleModel):
|
||||
"""This is based on qm.csv in trunk/expoweb/1623/204 which has the fields:
|
||||
"Number","Grade","Area","Description","Page reference","Nearest station","Completion description","Comment"
|
||||
"""
|
||||
cave = models.ForeignKey('Cave', related_name='QMs',blank=True, null=True,on_delete=models.SET_NULL )
|
||||
block = models.ForeignKey('SurvexBlock', null=True,on_delete=models.SET_NULL) # only for QMs from survex files
|
||||
blockname=models.TextField(blank=True,null=True) # NB truncated copy of survexblock name with last char added
|
||||
expoyear = models.CharField(max_length=4,blank=True, null=True) # could change to datetime if logbooks similarly chnaged
|
||||
found_by = models.ForeignKey(LogbookEntry, related_name='QMs_found',blank=True, null=True,on_delete=models.SET_NULL )
|
||||
ticked = models.BooleanField(default=False) # for ticked QMs not attached to a logbook entry, should imply completion_description has text
|
||||
ticked_off_by = models.ForeignKey(LogbookEntry, related_name='QMs_ticked_off',blank=True, null=True,on_delete=models.SET_NULL) # unused, ever?!
|
||||
number = models.IntegerField(help_text="this is the sequential number in the year, only unique for CSV imports", )
|
||||
GRADE_CHOICES=(
|
||||
('A', 'A: Large obvious lead'),
|
||||
('B', 'B: Average lead'),
|
||||
('C', 'C: Tight unpromising lead'),
|
||||
('D', 'D: Dig'),
|
||||
('X', 'X: Unclimbable aven')
|
||||
) # also seen "?" and "V" in imported data - see urls.py
|
||||
|
||||
cave = models.ForeignKey("Cave", related_name="QMs", blank=True, null=True, on_delete=models.SET_NULL)
|
||||
block = models.ForeignKey("SurvexBlock", null=True, on_delete=models.SET_NULL) # only for QMs from survex files
|
||||
blockname = models.TextField(blank=True, null=True) # NB truncated copy of survexblock name with last char added
|
||||
expoyear = models.CharField(
|
||||
max_length=4, blank=True, null=True
|
||||
) # could change to datetime if logbooks similarly chnaged
|
||||
found_by = models.ForeignKey(
|
||||
LogbookEntry, related_name="QMs_found", blank=True, null=True, on_delete=models.SET_NULL
|
||||
)
|
||||
ticked = models.BooleanField(
|
||||
default=False
|
||||
) # for ticked QMs not attached to a logbook entry, should imply completion_description has text
|
||||
ticked_off_by = models.ForeignKey(
|
||||
LogbookEntry, related_name="QMs_ticked_off", blank=True, null=True, on_delete=models.SET_NULL
|
||||
) # unused, ever?!
|
||||
number = models.IntegerField(
|
||||
help_text="this is the sequential number in the year, only unique for CSV imports",
|
||||
)
|
||||
GRADE_CHOICES = (
|
||||
("A", "A: Large obvious lead"),
|
||||
("B", "B: Average lead"),
|
||||
("C", "C: Tight unpromising lead"),
|
||||
("D", "D: Dig"),
|
||||
("X", "X: Unclimbable aven"),
|
||||
) # also seen "?" and "V" in imported data - see urls.py
|
||||
grade = models.CharField(max_length=1, choices=GRADE_CHOICES)
|
||||
location_description = models.TextField(blank=True)
|
||||
nearest_station_description = models.CharField(max_length=400,blank=True, null=True)
|
||||
nearest_station_name = models.CharField(max_length=200,blank=True, null=True)
|
||||
nearest_station = models.ForeignKey('SurvexStation',blank=True, null=True,on_delete=models.SET_NULL)
|
||||
area = models.CharField(max_length=100,blank=True, null=True)
|
||||
completion_description = models.TextField(blank=True,null=True)
|
||||
comment=models.TextField(blank=True,null=True)
|
||||
nearest_station_description = models.CharField(max_length=400, blank=True, null=True)
|
||||
nearest_station_name = models.CharField(max_length=200, blank=True, null=True)
|
||||
nearest_station = models.ForeignKey("SurvexStation", blank=True, null=True, on_delete=models.SET_NULL)
|
||||
area = models.CharField(max_length=100, blank=True, null=True)
|
||||
completion_description = models.TextField(blank=True, null=True)
|
||||
comment = models.TextField(blank=True, null=True)
|
||||
|
||||
def __str__(self):
|
||||
return f'{self.code()}'
|
||||
return f"{self.code()}"
|
||||
|
||||
def code(self):
|
||||
if self.cave:
|
||||
@@ -167,11 +198,10 @@ class QM(TroggleModel):
|
||||
blocknamestr = "-" + str(self.blockname)
|
||||
else:
|
||||
blocknamestr = ""
|
||||
return f'{cavestr}-{expoyearstr}-{self.number}{self.grade}{blocknamestr}'
|
||||
return f"{cavestr}-{expoyearstr}-{self.number}{self.grade}{blocknamestr}"
|
||||
|
||||
def get_completion_url(self):
|
||||
'''assumes html file named is in same folder as cave description file
|
||||
'''
|
||||
"""assumes html file named is in same folder as cave description file"""
|
||||
cd = None
|
||||
if self.completion_description:
|
||||
try:
|
||||
@@ -180,18 +210,29 @@ class QM(TroggleModel):
|
||||
except:
|
||||
cd = None
|
||||
return cd
|
||||
|
||||
def newslug(self):
|
||||
qmslug = f'{str(self.cave)}-{self.expoyear}-{self.blockname}{self.number}{self.grade}'
|
||||
|
||||
def newslug(self):
|
||||
qmslug = f"{str(self.cave)}-{self.expoyear}-{self.blockname}{self.number}{self.grade}"
|
||||
return qmslug
|
||||
|
||||
|
||||
def get_absolute_url(self):
|
||||
# This reverse resolution stuff is pure magic. Just change the regex in urls.py and everything changes to suit. Whacky.
|
||||
return urljoin(settings.URL_ROOT, reverse('qm',kwargs={'cave_id':self.cave.slug(),'year':self.expoyear, 'blockname':self.blockname,'qm_id':self.number,'grade':self.grade}))
|
||||
|
||||
return urljoin(
|
||||
settings.URL_ROOT,
|
||||
reverse(
|
||||
"qm",
|
||||
kwargs={
|
||||
"cave_id": self.cave.slug(),
|
||||
"year": self.expoyear,
|
||||
"blockname": self.blockname,
|
||||
"qm_id": self.number,
|
||||
"grade": self.grade,
|
||||
},
|
||||
),
|
||||
)
|
||||
|
||||
def get_next_by_id(self):
|
||||
return QM.objects.get(id=self.id+1)
|
||||
return QM.objects.get(id=self.id + 1)
|
||||
|
||||
def get_previous_by_id(self):
|
||||
return QM.objects.get(id=self.id-1)
|
||||
|
||||
return QM.objects.get(id=self.id - 1)
|
||||
|
||||
@@ -12,42 +12,46 @@ from django.db import models
|
||||
from django.urls import reverse
|
||||
|
||||
from troggle.core.models.wallets import Wallet
|
||||
|
||||
# from troggle.core.models.troggle import DataIssue # circular import. Hmm
|
||||
|
||||
|
||||
class SurvexDirectory(models.Model):
|
||||
path = models.CharField(max_length=200)
|
||||
cave = models.ForeignKey('Cave', blank=True, null=True,on_delete=models.SET_NULL)
|
||||
primarysurvexfile = models.ForeignKey('SurvexFile', related_name='primarysurvexfile', blank=True, null=True,on_delete=models.SET_NULL)
|
||||
cave = models.ForeignKey("Cave", blank=True, null=True, on_delete=models.SET_NULL)
|
||||
primarysurvexfile = models.ForeignKey(
|
||||
"SurvexFile", related_name="primarysurvexfile", blank=True, null=True, on_delete=models.SET_NULL
|
||||
)
|
||||
# could also include files in directory but not referenced
|
||||
|
||||
|
||||
class Meta:
|
||||
ordering = ('id',)
|
||||
ordering = ("id",)
|
||||
verbose_name_plural = "Survex directories"
|
||||
|
||||
def __str__(self):
|
||||
return "[SurvexDirectory:"+str(self.path) + " | Primary svx:" + str(self.primarysurvexfile.path) +".svx ]"
|
||||
return "[SurvexDirectory:" + str(self.path) + " | Primary svx:" + str(self.primarysurvexfile.path) + ".svx ]"
|
||||
|
||||
|
||||
class SurvexFile(models.Model):
|
||||
path = models.CharField(max_length=200)
|
||||
survexdirectory = models.ForeignKey("SurvexDirectory", blank=True, null=True,on_delete=models.SET_NULL)
|
||||
cave = models.ForeignKey('Cave', blank=True, null=True,on_delete=models.SET_NULL)
|
||||
|
||||
survexdirectory = models.ForeignKey("SurvexDirectory", blank=True, null=True, on_delete=models.SET_NULL)
|
||||
cave = models.ForeignKey("Cave", blank=True, null=True, on_delete=models.SET_NULL)
|
||||
|
||||
class Meta:
|
||||
ordering = ('id',)
|
||||
ordering = ("id",)
|
||||
|
||||
# Don't change from the default as that breaks troggle webpages and internal referencing!
|
||||
# def __str__(self):
|
||||
# return "[SurvexFile:"+str(self.path) + "-" + str(self.survexdirectory) + "-" + str(self.cave)+"]"
|
||||
# return "[SurvexFile:"+str(self.path) + "-" + str(self.survexdirectory) + "-" + str(self.cave)+"]"
|
||||
|
||||
def exists(self):
|
||||
fname = os.path.join(settings.SURVEX_DATA, self.path + ".svx")
|
||||
return os.path.isfile(fname)
|
||||
|
||||
|
||||
def OpenFile(self):
|
||||
fname = os.path.join(settings.SURVEX_DATA, self.path + ".svx")
|
||||
return open(fname)
|
||||
|
||||
|
||||
def SetDirectory(self):
|
||||
dirpath = os.path.split(self.path)[0]
|
||||
# pointless search every time we import a survex file if we know there are no duplicates..
|
||||
@@ -60,24 +64,25 @@ class SurvexFile(models.Model):
|
||||
survexdirectory.save()
|
||||
self.survexdirectory = survexdirectory
|
||||
self.save()
|
||||
|
||||
|
||||
def __str__(self):
|
||||
return self.path
|
||||
|
||||
|
||||
class SurvexStationLookUpManager(models.Manager):
|
||||
def lookup(self, name):
|
||||
blocknames, sep, stationname = name.rpartition(".")
|
||||
return self.get(block = SurvexBlock.objects.lookup(blocknames),
|
||||
name__iexact = stationname)
|
||||
return self.get(block=SurvexBlock.objects.lookup(blocknames), name__iexact=stationname)
|
||||
|
||||
|
||||
class SurvexStation(models.Model):
|
||||
name = models.CharField(max_length=100)
|
||||
block = models.ForeignKey('SurvexBlock', null=True,on_delete=models.SET_NULL)
|
||||
name = models.CharField(max_length=100)
|
||||
block = models.ForeignKey("SurvexBlock", null=True, on_delete=models.SET_NULL)
|
||||
objects = SurvexStationLookUpManager()
|
||||
x = models.FloatField(blank=True, null=True)
|
||||
y = models.FloatField(blank=True, null=True)
|
||||
z = models.FloatField(blank=True, null=True)
|
||||
|
||||
|
||||
def path(self):
|
||||
r = self.name
|
||||
b = self.block
|
||||
@@ -90,16 +95,19 @@ class SurvexStation(models.Model):
|
||||
return r
|
||||
|
||||
class Meta:
|
||||
ordering = ('id',)
|
||||
ordering = ("id",)
|
||||
|
||||
def __str__(self):
|
||||
return self.name and str(self.name) or 'no name'
|
||||
return self.name and str(self.name) or "no name"
|
||||
|
||||
|
||||
#
|
||||
# Single SurvexBlock
|
||||
#
|
||||
# Single SurvexBlock
|
||||
#
|
||||
class SurvexBlockLookUpManager(models.Manager):
|
||||
"""Don't know what this does, suspect it is part of the Django admin
|
||||
system"""
|
||||
|
||||
def lookup(self, name):
|
||||
if name == "":
|
||||
blocknames = []
|
||||
@@ -110,38 +118,39 @@ class SurvexBlockLookUpManager(models.Manager):
|
||||
block = SurvexBlock.objects.get(parent=block, name__iexact=blockname)
|
||||
return block
|
||||
|
||||
|
||||
class SurvexBlock(models.Model):
|
||||
"""One begin..end block within a survex file. The basic element of a survey trip.
|
||||
"""
|
||||
"""One begin..end block within a survex file. The basic element of a survey trip."""
|
||||
|
||||
objects = SurvexBlockLookUpManager()
|
||||
name = models.CharField(max_length=100)
|
||||
title = models.CharField(max_length=200)
|
||||
parent = models.ForeignKey('SurvexBlock', blank=True, null=True,on_delete=models.SET_NULL)
|
||||
cave = models.ForeignKey('Cave', blank=True, null=True,on_delete=models.SET_NULL)
|
||||
|
||||
date = models.DateField(blank=True, null=True)
|
||||
expedition = models.ForeignKey('Expedition', blank=True, null=True,on_delete=models.SET_NULL)
|
||||
|
||||
survexfile = models.ForeignKey("SurvexFile", blank=True, null=True,on_delete=models.SET_NULL)
|
||||
survexpath = models.CharField(max_length=200) # the path for the survex stations
|
||||
|
||||
scanswallet = models.ForeignKey("Wallet", null=True,on_delete=models.SET_NULL) # only ONE wallet per block. The most recent seen overwites.. ugh.
|
||||
|
||||
legsall = models.IntegerField(null=True) # summary data for this block
|
||||
name = models.CharField(max_length=100)
|
||||
title = models.CharField(max_length=200)
|
||||
parent = models.ForeignKey("SurvexBlock", blank=True, null=True, on_delete=models.SET_NULL)
|
||||
cave = models.ForeignKey("Cave", blank=True, null=True, on_delete=models.SET_NULL)
|
||||
|
||||
date = models.DateField(blank=True, null=True)
|
||||
expedition = models.ForeignKey("Expedition", blank=True, null=True, on_delete=models.SET_NULL)
|
||||
|
||||
survexfile = models.ForeignKey("SurvexFile", blank=True, null=True, on_delete=models.SET_NULL)
|
||||
survexpath = models.CharField(max_length=200) # the path for the survex stations
|
||||
|
||||
scanswallet = models.ForeignKey(
|
||||
"Wallet", null=True, on_delete=models.SET_NULL
|
||||
) # only ONE wallet per block. The most recent seen overwites.. ugh.
|
||||
|
||||
legsall = models.IntegerField(null=True) # summary data for this block
|
||||
legslength = models.FloatField(null=True)
|
||||
|
||||
|
||||
class Meta:
|
||||
ordering = ('id',)
|
||||
ordering = ("id",)
|
||||
|
||||
def __str__(self):
|
||||
return "[SurvexBlock:"+ str(self.name) + "-path:" + \
|
||||
str(self.survexpath) + "-cave:" + \
|
||||
str(self.cave) + "]"
|
||||
|
||||
def __str__(self):
|
||||
return self.name and str(self.name) or 'no name'
|
||||
return "[SurvexBlock:" + str(self.name) + "-path:" + str(self.survexpath) + "-cave:" + str(self.cave) + "]"
|
||||
|
||||
def isSurvexBlock(self): # Function used in templates
|
||||
def __str__(self):
|
||||
return self.name and str(self.name) or "no name"
|
||||
|
||||
def isSurvexBlock(self): # Function used in templates
|
||||
return True
|
||||
|
||||
def DayIndex(self):
|
||||
@@ -152,51 +161,56 @@ class SurvexBlock(models.Model):
|
||||
if index not in range(0, mx):
|
||||
print(f"DayIndex: More than {mx-1} SurvexBlock items on one day '{index}' {self}")
|
||||
index = 0
|
||||
#return list(self.survexblock_set.all()).index(self)
|
||||
# return list(self.survexblock_set.all()).index(self)
|
||||
return index
|
||||
|
||||
|
||||
class SurvexPersonRole(models.Model):
|
||||
survexblock = models.ForeignKey('SurvexBlock',on_delete=models.CASCADE)
|
||||
# increasing levels of precision, Surely we only need survexblock and person now that we have no link to a logbook entry?
|
||||
personname = models.CharField(max_length=100)
|
||||
person = models.ForeignKey('Person', blank=True, null=True,on_delete=models.SET_NULL)
|
||||
personexpedition = models.ForeignKey('PersonExpedition', blank=True, null=True,on_delete=models.SET_NULL)
|
||||
survexblock = models.ForeignKey("SurvexBlock", on_delete=models.CASCADE)
|
||||
# increasing levels of precision, Surely we only need survexblock and person now that we have no link to a logbook entry?
|
||||
personname = models.CharField(max_length=100)
|
||||
person = models.ForeignKey("Person", blank=True, null=True, on_delete=models.SET_NULL)
|
||||
personexpedition = models.ForeignKey("PersonExpedition", blank=True, null=True, on_delete=models.SET_NULL)
|
||||
# expeditionday = models.ForeignKey("ExpeditionDay", null=True,on_delete=models.SET_NULL)
|
||||
|
||||
|
||||
def __str__(self):
|
||||
return str(self.personname) + " - " + str(self.survexblock)
|
||||
return str(self.personname) + " - " + str(self.survexblock)
|
||||
|
||||
|
||||
class SingleScan(models.Model):
|
||||
"""A single file holding an image. Could be raw notes, an elevation plot or whatever
|
||||
"""
|
||||
ffile = models.CharField(max_length=200)
|
||||
name = models.CharField(max_length=200)
|
||||
wallet = models.ForeignKey("Wallet", null=True,on_delete=models.SET_NULL)
|
||||
|
||||
"""A single file holding an image. Could be raw notes, an elevation plot or whatever"""
|
||||
|
||||
ffile = models.CharField(max_length=200)
|
||||
name = models.CharField(max_length=200)
|
||||
wallet = models.ForeignKey("Wallet", null=True, on_delete=models.SET_NULL)
|
||||
|
||||
class Meta:
|
||||
ordering = ('name',)
|
||||
|
||||
ordering = ("name",)
|
||||
|
||||
def get_absolute_url(self):
|
||||
return urljoin(settings.URL_ROOT, reverse('scansingle', kwargs={"path":re.sub("#", "%23", self.wallet.walletname), "file":self.name}))
|
||||
|
||||
return urljoin(
|
||||
settings.URL_ROOT,
|
||||
reverse("scansingle", kwargs={"path": re.sub("#", "%23", self.wallet.walletname), "file": self.name}),
|
||||
)
|
||||
|
||||
def __str__(self):
|
||||
return "Scan Image: " + str(self.name) + " in " + str(self.wallet)
|
||||
|
||||
|
||||
class DrawingFile(models.Model):
|
||||
"""A file holding a Therion (several types) or a Tunnel drawing
|
||||
"""
|
||||
dwgpath = models.CharField(max_length=200)
|
||||
dwgname = models.CharField(max_length=200)
|
||||
dwgwallets = models.ManyToManyField("Wallet") # implicitly links via folders to scans to SVX files
|
||||
scans = models.ManyToManyField("SingleScan") # implicitly links via scans to SVX files
|
||||
dwgcontains = models.ManyToManyField("DrawingFile") # case when its a frame type
|
||||
filesize = models.IntegerField(default=0)
|
||||
npaths = models.IntegerField(default=0)
|
||||
survexfiles = models.ManyToManyField("SurvexFile") # direct link to SVX files - not populated yet
|
||||
"""A file holding a Therion (several types) or a Tunnel drawing"""
|
||||
|
||||
dwgpath = models.CharField(max_length=200)
|
||||
dwgname = models.CharField(max_length=200)
|
||||
dwgwallets = models.ManyToManyField("Wallet") # implicitly links via folders to scans to SVX files
|
||||
scans = models.ManyToManyField("SingleScan") # implicitly links via scans to SVX files
|
||||
dwgcontains = models.ManyToManyField("DrawingFile") # case when its a frame type
|
||||
filesize = models.IntegerField(default=0)
|
||||
npaths = models.IntegerField(default=0)
|
||||
survexfiles = models.ManyToManyField("SurvexFile") # direct link to SVX files - not populated yet
|
||||
|
||||
class Meta:
|
||||
ordering = ('dwgpath',)
|
||||
ordering = ("dwgpath",)
|
||||
|
||||
def __str__(self):
|
||||
return "Drawing File: " + str(self.dwgname) + " (" + str(self.filesize) + " bytes)"
|
||||
|
||||
return "Drawing File: " + str(self.dwgname) + " (" + str(self.filesize) + " bytes)"
|
||||
|
||||
@@ -7,7 +7,7 @@ from decimal import Decimal, getcontext
|
||||
from subprocess import call
|
||||
from urllib.parse import urljoin
|
||||
|
||||
getcontext().prec=2 #use 2 significant figures for decimal calculations
|
||||
getcontext().prec = 2 # use 2 significant figures for decimal calculations
|
||||
|
||||
from django.conf import settings
|
||||
from django.contrib import admin
|
||||
@@ -28,11 +28,13 @@ the django Object Relational Mapping (ORM).
|
||||
There are more subclasses define in models_caves.py models_survex.py etc.
|
||||
"""
|
||||
|
||||
|
||||
class TroggleModel(models.Model):
|
||||
"""This class is for adding fields and methods which all of our models will have.
|
||||
"""
|
||||
"""This class is for adding fields and methods which all of our models will have."""
|
||||
|
||||
new_since_parsing = models.BooleanField(default=False, editable=False)
|
||||
non_public = models.BooleanField(default=False)
|
||||
|
||||
def object_name(self):
|
||||
return self._meta.object_name
|
||||
|
||||
@@ -42,128 +44,135 @@ class TroggleModel(models.Model):
|
||||
class Meta:
|
||||
abstract = True
|
||||
|
||||
|
||||
class DataIssue(TroggleModel):
|
||||
"""When importing cave data any validation problems produce a message which is
|
||||
recorded as a DataIssue. The django admin system automatically prodiuces a page listing
|
||||
"""When importing cave data any validation problems produce a message which is
|
||||
recorded as a DataIssue. The django admin system automatically prodiuces a page listing
|
||||
these at /admin/core/dataissue/
|
||||
This is a use of the NOTIFICATION pattern:
|
||||
This is a use of the NOTIFICATION pattern:
|
||||
https://martinfowler.com/eaaDev/Notification.html
|
||||
|
||||
|
||||
We have replaced all assertions in the code with messages and local fix-ups or skips:
|
||||
https://martinfowler.com/articles/replaceThrowWithNotification.html
|
||||
|
||||
|
||||
See also the use of stash_data_issue() & store_data_issues() in parsers/survex.py which defer writing to the database until the end of the import.
|
||||
"""
|
||||
|
||||
date = models.DateTimeField(auto_now_add=True, blank=True)
|
||||
parser = models.CharField(max_length=50, blank=True, null=True)
|
||||
message = models.CharField(max_length=800, blank=True, null=True)
|
||||
url = models.CharField(max_length=300, blank=True, null=True) # link to offending object
|
||||
url = models.CharField(max_length=300, blank=True, null=True) # link to offending object
|
||||
|
||||
class Meta:
|
||||
ordering = ['date']
|
||||
ordering = ["date"]
|
||||
|
||||
def __str__(self):
|
||||
return f"{self.parser} - {self.message}"
|
||||
|
||||
#
|
||||
|
||||
#
|
||||
# single Expedition, usually seen by year
|
||||
#
|
||||
class Expedition(TroggleModel):
|
||||
year = models.CharField(max_length=20, unique=True)
|
||||
name = models.CharField(max_length=100)
|
||||
year = models.CharField(max_length=20, unique=True)
|
||||
name = models.CharField(max_length=100)
|
||||
logbookfile = models.CharField(max_length=100, blank=True, null=True)
|
||||
|
||||
|
||||
def __str__(self):
|
||||
return self.year
|
||||
|
||||
class Meta:
|
||||
ordering = ('-year',)
|
||||
get_latest_by = 'year'
|
||||
|
||||
ordering = ("-year",)
|
||||
get_latest_by = "year"
|
||||
|
||||
def get_absolute_url(self):
|
||||
return urljoin(settings.URL_ROOT, reverse('expedition', args=[self.year]))
|
||||
|
||||
return urljoin(settings.URL_ROOT, reverse("expedition", args=[self.year]))
|
||||
|
||||
|
||||
# class ExpeditionDay(TroggleModel):
|
||||
# """Exists only on Expedition now. Removed links from logbookentry, personlogentry, survex stuff etc.
|
||||
# """
|
||||
# expedition = models.ForeignKey("Expedition",on_delete=models.CASCADE)
|
||||
# date = models.DateField()
|
||||
# """Exists only on Expedition now. Removed links from logbookentry, personlogentry, survex stuff etc.
|
||||
# """
|
||||
# expedition = models.ForeignKey("Expedition",on_delete=models.CASCADE)
|
||||
# date = models.DateField()
|
||||
|
||||
# class Meta:
|
||||
# ordering = ('date',)
|
||||
# class Meta:
|
||||
# ordering = ('date',)
|
||||
|
||||
|
||||
class Person(TroggleModel):
|
||||
"""single Person, can go on many years
|
||||
"""
|
||||
first_name = models.CharField(max_length=100)
|
||||
last_name = models.CharField(max_length=100)
|
||||
fullname = models.CharField(max_length=200)
|
||||
nickname = models.CharField(max_length=200)
|
||||
is_vfho = models.BooleanField(help_text="VFHO is the Vereines für Höhlenkunde in Obersteier, a nearby Austrian caving club.", default=False)
|
||||
mug_shot = models.CharField(max_length=100, blank=True,null=True)
|
||||
blurb = models.TextField(blank=True,null=True)
|
||||
orderref = models.CharField(max_length=200) # for alphabetic
|
||||
"""single Person, can go on many years"""
|
||||
|
||||
first_name = models.CharField(max_length=100)
|
||||
last_name = models.CharField(max_length=100)
|
||||
fullname = models.CharField(max_length=200)
|
||||
nickname = models.CharField(max_length=200)
|
||||
is_vfho = models.BooleanField(
|
||||
help_text="VFHO is the Vereines für Höhlenkunde in Obersteier, a nearby Austrian caving club.",
|
||||
default=False,
|
||||
)
|
||||
mug_shot = models.CharField(max_length=100, blank=True, null=True)
|
||||
blurb = models.TextField(blank=True, null=True)
|
||||
orderref = models.CharField(max_length=200) # for alphabetic
|
||||
|
||||
def get_absolute_url(self):
|
||||
return urljoin(settings.URL_ROOT,reverse('person',kwargs={'first_name':self.first_name,'last_name':self.last_name}))
|
||||
return urljoin(
|
||||
settings.URL_ROOT, reverse("person", kwargs={"first_name": self.first_name, "last_name": self.last_name})
|
||||
)
|
||||
|
||||
class Meta:
|
||||
verbose_name_plural = "People"
|
||||
ordering = ('orderref',) # "Wookey" makes too complex for: ('last_name', 'first_name')
|
||||
|
||||
ordering = ("orderref",) # "Wookey" makes too complex for: ('last_name', 'first_name')
|
||||
|
||||
def __str__(self):
|
||||
if self.last_name:
|
||||
return f"{self.first_name} {self.last_name}"
|
||||
return self.first_name
|
||||
|
||||
|
||||
def notability(self):
|
||||
"""This is actually recency: all recent cavers, weighted by number of expos
|
||||
"""
|
||||
"""This is actually recency: all recent cavers, weighted by number of expos"""
|
||||
notability = Decimal(0)
|
||||
max_expo_val = 0
|
||||
|
||||
max_expo_year = Expedition.objects.all().aggregate(models.Max('year'))
|
||||
max_expo_val = int(max_expo_year['year__max']) + 1
|
||||
max_expo_year = Expedition.objects.all().aggregate(models.Max("year"))
|
||||
max_expo_val = int(max_expo_year["year__max"]) + 1
|
||||
|
||||
for personexpedition in self.personexpedition_set.all():
|
||||
if not personexpedition.is_guest:
|
||||
if not personexpedition.is_guest:
|
||||
notability += Decimal(1) / (max_expo_val - int(personexpedition.expedition.year))
|
||||
return notability
|
||||
|
||||
def bisnotable(self):
|
||||
"""Boolean: is this person notable?
|
||||
"""
|
||||
return self.notability() > Decimal(1)/Decimal(3)
|
||||
|
||||
"""Boolean: is this person notable?"""
|
||||
return self.notability() > Decimal(1) / Decimal(3)
|
||||
|
||||
def surveyedleglength(self):
|
||||
return sum([personexpedition.surveyedleglength() for personexpedition in self.personexpedition_set.all()])
|
||||
|
||||
return sum([personexpedition.surveyedleglength() for personexpedition in self.personexpedition_set.all()])
|
||||
|
||||
def first(self):
|
||||
return self.personexpedition_set.order_by('-expedition')[0]
|
||||
return self.personexpedition_set.order_by("-expedition")[0]
|
||||
|
||||
def last(self):
|
||||
return self.personexpedition_set.order_by('expedition')[0]
|
||||
return self.personexpedition_set.order_by("expedition")[0]
|
||||
|
||||
|
||||
class PersonExpedition(TroggleModel):
|
||||
"""Person's attendance to one Expo
|
||||
"""
|
||||
expedition = models.ForeignKey(Expedition,on_delete=models.CASCADE)
|
||||
person = models.ForeignKey(Person,on_delete=models.CASCADE)
|
||||
slugfield = models.SlugField(max_length=50,blank=True, null=True) # 2022 to be used in future
|
||||
"""Person's attendance to one Expo"""
|
||||
|
||||
is_guest = models.BooleanField(default=False)
|
||||
nickname = models.CharField(max_length=100,blank=True, null=True) # removbe this
|
||||
expedition = models.ForeignKey(Expedition, on_delete=models.CASCADE)
|
||||
person = models.ForeignKey(Person, on_delete=models.CASCADE)
|
||||
slugfield = models.SlugField(max_length=50, blank=True, null=True) # 2022 to be used in future
|
||||
|
||||
is_guest = models.BooleanField(default=False)
|
||||
nickname = models.CharField(max_length=100, blank=True, null=True) # removbe this
|
||||
|
||||
class Meta:
|
||||
ordering = ('-expedition',)
|
||||
#order_with_respect_to = 'expedition'
|
||||
ordering = ("-expedition",)
|
||||
# order_with_respect_to = 'expedition'
|
||||
|
||||
def __str__(self):
|
||||
return f"{self.person}: ({self.expedition})"
|
||||
|
||||
#why is the below a function in personexpedition, rather than in person? - AC 14 Feb 09
|
||||
|
||||
# why is the below a function in personexpedition, rather than in person? - AC 14 Feb 09
|
||||
def name(self):
|
||||
if self.nickname:
|
||||
return f"{self.person.first_name} ({self.nickname}) {self.person.last_name}"
|
||||
@@ -172,12 +181,19 @@ class PersonExpedition(TroggleModel):
|
||||
return self.person.first_name
|
||||
|
||||
def get_absolute_url(self):
|
||||
return urljoin(settings.URL_ROOT, reverse('personexpedition',kwargs={'first_name':self.person.first_name,'last_name':self.person.last_name,'year':self.expedition.year}))
|
||||
|
||||
return urljoin(
|
||||
settings.URL_ROOT,
|
||||
reverse(
|
||||
"personexpedition",
|
||||
kwargs={
|
||||
"first_name": self.person.first_name,
|
||||
"last_name": self.person.last_name,
|
||||
"year": self.expedition.year,
|
||||
},
|
||||
),
|
||||
)
|
||||
|
||||
def surveyedleglength(self):
|
||||
"""Survey length for this person on all survex trips on this expedition
|
||||
"""
|
||||
survexblocks = [personrole.survexblock for personrole in self.survexpersonrole_set.all() ]
|
||||
return sum([survexblock.legslength for survexblock in set(survexblocks)])
|
||||
|
||||
|
||||
"""Survey length for this person on all survex trips on this expedition"""
|
||||
survexblocks = [personrole.survexblock for personrole in self.survexpersonrole_set.all()]
|
||||
return sum([survexblock.legslength for survexblock in set(survexblocks)])
|
||||
|
||||
@@ -11,40 +11,41 @@ from django.conf import settings
|
||||
from django.db import models
|
||||
from django.urls import reverse
|
||||
|
||||
#from troggle.core.models.survex import SurvexBlock
|
||||
# from troggle.core.models.survex import SurvexBlock
|
||||
# from troggle.core.models.troggle import DataIssue # circular import. Hmm
|
||||
|
||||
|
||||
class Wallet(models.Model):
|
||||
'''We do not keep the JSON values in the database, we query them afresh each time,
|
||||
"""We do not keep the JSON values in the database, we query them afresh each time,
|
||||
but we will change this when we need to do a Django query on e.g. personame
|
||||
'''
|
||||
fpath = models.CharField(max_length=200)
|
||||
walletname = models.CharField(max_length=200)
|
||||
walletdate = models.DateField(blank=True, null=True)
|
||||
walletyear = models.DateField(blank=True, null=True)
|
||||
|
||||
"""
|
||||
|
||||
fpath = models.CharField(max_length=200)
|
||||
walletname = models.CharField(max_length=200)
|
||||
walletdate = models.DateField(blank=True, null=True)
|
||||
walletyear = models.DateField(blank=True, null=True)
|
||||
|
||||
class Meta:
|
||||
ordering = ('walletname',)
|
||||
|
||||
ordering = ("walletname",)
|
||||
|
||||
def get_absolute_url(self):
|
||||
return urljoin(settings.URL_ROOT, reverse('singlewallet', kwargs={"path":re.sub("#", "%23", self.walletname)}))
|
||||
return urljoin(settings.URL_ROOT, reverse("singlewallet", kwargs={"path": re.sub("#", "%23", self.walletname)}))
|
||||
|
||||
def get_json(self):
|
||||
"""Read the JSON file for the wallet and do stuff
|
||||
"""
|
||||
#jsonfile = Path(self.fpath, 'contents.json')
|
||||
|
||||
"""Read the JSON file for the wallet and do stuff"""
|
||||
# jsonfile = Path(self.fpath, 'contents.json')
|
||||
|
||||
# Get from git repo instead
|
||||
# :drawings: walletjson/2022/2022#01/contents.json
|
||||
# fpath = /mnt/d/EXPO/expofiles/surveyscans/1999/1999#02
|
||||
fp = Path(self.fpath)
|
||||
wname = fp.name
|
||||
wyear = fp.parent.name
|
||||
wurl = f"/scanupload/{self.walletname}" # .replace('#', ':')
|
||||
|
||||
wurl = f"/scanupload/{self.walletname}" # .replace('#', ':')
|
||||
|
||||
jsonfile = Path(settings.DRAWINGS_DATA, "walletjson") / wyear / wname / "contents.json"
|
||||
if not Path(jsonfile).is_file():
|
||||
#print(f'{jsonfile} is not a file')
|
||||
# print(f'{jsonfile} is not a file')
|
||||
return None
|
||||
else:
|
||||
with open(jsonfile) as json_f:
|
||||
@@ -52,65 +53,63 @@ class Wallet(models.Model):
|
||||
waldata = json.load(json_f)
|
||||
except:
|
||||
message = f"! {str(self.walletname)} Failed to load {jsonfile} JSON file"
|
||||
#print(message)
|
||||
# print(message)
|
||||
raise
|
||||
if waldata["date"]:
|
||||
datestr = waldata["date"].replace('.','-')
|
||||
datestr = waldata["date"].replace(".", "-")
|
||||
try:
|
||||
thisdate = datetime.date.fromisoformat(datestr)
|
||||
except ValueError:
|
||||
# probably a single digit day number. HACKUS MAXIMUS.
|
||||
# clearly we need to fix this when we first import date strings..
|
||||
datestr = datestr[:-1] + '0' + datestr[-1]
|
||||
print(f' - {datestr=} ')
|
||||
datestr = datestr[:-1] + "0" + datestr[-1]
|
||||
print(f" - {datestr=} ")
|
||||
try:
|
||||
thisdate = datetime.date.fromisoformat(datestr)
|
||||
self.walletdate = thisdate
|
||||
self.walletdate = thisdate
|
||||
self.save()
|
||||
try:
|
||||
waldata["date"] = thisdate.isoformat()
|
||||
except:
|
||||
message = f"! {str(self.walletname)} Date formatting failure {thisdate}. Failed to load from {jsonfile} JSON file"
|
||||
from troggle.core.models.troggle import \
|
||||
DataIssue
|
||||
DataIssue.objects.update_or_create(parser='scans', message=message, url=wurl)
|
||||
from troggle.core.models.troggle import DataIssue
|
||||
|
||||
DataIssue.objects.update_or_create(parser="scans", message=message, url=wurl)
|
||||
except:
|
||||
message = f"! {str(self.walletname)} Date format not ISO {datestr}. Failed to load from {jsonfile} JSON file"
|
||||
from troggle.core.models.troggle import DataIssue
|
||||
DataIssue.objects.update_or_create(parser='scans', message=message, url=wurl)
|
||||
|
||||
DataIssue.objects.update_or_create(parser="scans", message=message, url=wurl)
|
||||
return waldata
|
||||
|
||||
def year(self):
|
||||
'''This gets the year syntactically without opening and reading the JSON
|
||||
'''
|
||||
|
||||
def year(self):
|
||||
"""This gets the year syntactically without opening and reading the JSON"""
|
||||
if len(self.walletname) < 5:
|
||||
return None
|
||||
return None
|
||||
if self.walletname[4] != "#":
|
||||
return None
|
||||
return None
|
||||
year = int(self.walletname[0:4])
|
||||
if year < 1975 or year > 2050:
|
||||
return None
|
||||
return None
|
||||
else:
|
||||
self.walletyear = datetime.date(year, 1, 1)
|
||||
self.walletyear = datetime.date(year, 1, 1)
|
||||
self.save()
|
||||
return str(year)
|
||||
|
||||
|
||||
# Yes this is horribly, horribly inefficient, esp. for a page that have date, people and cave in it
|
||||
def date(self):
|
||||
"""Reads all the JSON data just to get the JSNON date.
|
||||
"""
|
||||
"""Reads all the JSON data just to get the JSNON date."""
|
||||
if self.walletdate:
|
||||
return self.walletdate
|
||||
if not self.get_json():
|
||||
return None
|
||||
jsondata = self.get_json() # use walrus operator?
|
||||
jsondata = self.get_json() # use walrus operator?
|
||||
|
||||
datestr = jsondata["date"]
|
||||
if not datestr:
|
||||
return None
|
||||
else:
|
||||
datestr = datestr.replace('.','-')
|
||||
datestr = datestr.replace(".", "-")
|
||||
try:
|
||||
samedate = datetime.date.fromisoformat(datestr)
|
||||
self.walletdate = samedate.isoformat()
|
||||
@@ -122,13 +121,13 @@ class Wallet(models.Model):
|
||||
samedate = None
|
||||
self.save()
|
||||
return self.walletdate
|
||||
|
||||
|
||||
def people(self):
|
||||
if not self.get_json():
|
||||
return None
|
||||
jsondata = self.get_json()
|
||||
return jsondata["people"]
|
||||
|
||||
|
||||
def cave(self):
|
||||
if not self.get_json():
|
||||
return None
|
||||
@@ -142,9 +141,8 @@ class Wallet(models.Model):
|
||||
return jsondata["name"]
|
||||
|
||||
def get_fnames(self):
|
||||
'''Filenames without the suffix, i.e. without the ".jpg"
|
||||
'''
|
||||
dirpath = Path(settings.SCANS_ROOT, self.fpath) # does nowt as fpath is a rooted path already
|
||||
'''Filenames without the suffix, i.e. without the ".jpg"'''
|
||||
dirpath = Path(settings.SCANS_ROOT, self.fpath) # does nowt as fpath is a rooted path already
|
||||
files = []
|
||||
if not self.fpath:
|
||||
files.append(f"Incorrect path to wallet contents: '{self.fpath}'")
|
||||
@@ -163,19 +161,18 @@ class Wallet(models.Model):
|
||||
files.append("FileNotFoundError")
|
||||
pass
|
||||
return files
|
||||
|
||||
|
||||
def fixsurvextick(self, tick):
|
||||
blocks = self.survexblock_set.all()
|
||||
#blocks = SurvexBlock.objects.filter(scanswallet = self)
|
||||
# blocks = SurvexBlock.objects.filter(scanswallet = self)
|
||||
result = tick
|
||||
for b in blocks:
|
||||
if b.survexfile: # if any exist in db, no check for validity or a real file. Refactor.
|
||||
result = "seagreen" # slightly different shade of green
|
||||
for b in blocks:
|
||||
if b.survexfile: # if any exist in db, no check for validity or a real file. Refactor.
|
||||
result = "seagreen" # slightly different shade of green
|
||||
return result
|
||||
|
||||
def get_ticks(self):
|
||||
"""Reads all the JSON data and sets the colour of the completion tick for each condition
|
||||
"""
|
||||
"""Reads all the JSON data and sets the colour of the completion tick for each condition"""
|
||||
ticks = {}
|
||||
waldata = self.get_json()
|
||||
if not waldata:
|
||||
@@ -189,7 +186,7 @@ class Wallet(models.Model):
|
||||
ticks["W"] = "black"
|
||||
return ticks
|
||||
ticks = {}
|
||||
|
||||
|
||||
# Initially, are there any required survex files present ?
|
||||
# Note that we can't set the survexblock here on the wallet as that info is only available while parsing the survex file
|
||||
survexok = "red"
|
||||
@@ -199,14 +196,14 @@ class Wallet(models.Model):
|
||||
ticks["S"] = "green"
|
||||
else:
|
||||
if waldata["survex file"]:
|
||||
if not type(waldata["survex file"])==list: # a string also is a sequence type, so do it this way
|
||||
if not type(waldata["survex file"]) == list: # a string also is a sequence type, so do it this way
|
||||
waldata["survex file"] = [waldata["survex file"]]
|
||||
ngood = 0
|
||||
nbad = 0
|
||||
ticks["S"] = "purple"
|
||||
for sx in waldata["survex file"]:
|
||||
#this logic appears in several places, inc uploads.py). Refactor.
|
||||
if sx !="":
|
||||
# this logic appears in several places, inc uploads.py). Refactor.
|
||||
if sx != "":
|
||||
if Path(sx).suffix.lower() != ".svx":
|
||||
sx = sx + ".svx"
|
||||
if (Path(settings.SURVEX_DATA) / sx).is_file():
|
||||
@@ -221,9 +218,9 @@ class Wallet(models.Model):
|
||||
ticks["S"] = "red"
|
||||
else:
|
||||
ticks["S"] = "black"
|
||||
|
||||
# Cave Description
|
||||
if waldata["description written"]:
|
||||
|
||||
# Cave Description
|
||||
if waldata["description written"]:
|
||||
ticks["C"] = "green"
|
||||
else:
|
||||
ticks["C"] = survexok
|
||||
@@ -235,10 +232,9 @@ class Wallet(models.Model):
|
||||
if not self.year():
|
||||
ticks["Q"] = "darkgrey"
|
||||
else:
|
||||
if int(self.year()) < 2015:
|
||||
if int(self.year()) < 2015:
|
||||
ticks["Q"] = "lightgrey"
|
||||
|
||||
|
||||
|
||||
# Notes, Plan, Elevation; Tunnel
|
||||
if waldata["electronic survey"]:
|
||||
ticks["N"] = "green"
|
||||
@@ -246,9 +242,9 @@ class Wallet(models.Model):
|
||||
ticks["E"] = "green"
|
||||
ticks["T"] = "green"
|
||||
else:
|
||||
|
||||
|
||||
files = self.get_fnames()
|
||||
|
||||
|
||||
# Notes required
|
||||
notes_scanned = reduce(operator.or_, [f.startswith("note") for f in files], False)
|
||||
notes_scanned = reduce(operator.or_, [f.endswith("notes") for f in files], notes_scanned)
|
||||
@@ -281,15 +277,14 @@ class Wallet(models.Model):
|
||||
ticks["T"] = "red"
|
||||
else:
|
||||
ticks["T"] = "green"
|
||||
|
||||
|
||||
# Website
|
||||
if waldata["website updated"]:
|
||||
ticks["W"] = "green"
|
||||
else:
|
||||
ticks["W"] = "red"
|
||||
|
||||
|
||||
return ticks
|
||||
|
||||
|
||||
def __str__(self):
|
||||
return "[" + str(self.walletname) + " (Wallet)]"
|
||||
|
||||
Reference in New Issue
Block a user