mirror of
https://expo.survex.com/repositories/troggle/.git
synced 2025-12-14 16:27:14 +00:00
Better tag locations
This commit is contained in:
@@ -14,7 +14,7 @@ from troggle.core.models.survex import SurvexStation, utmToLatLng
|
|||||||
from troggle.core.models.troggle import DataIssue, TroggleModel
|
from troggle.core.models.troggle import DataIssue, TroggleModel
|
||||||
from troggle.core.utils import TROG, writetrogglefile
|
from troggle.core.utils import TROG, writetrogglefile
|
||||||
|
|
||||||
# Use the TROG global object to cache the cave lookup list. No good for multi-user..
|
# Use the TROG global object to cache the cave lookup list. No good for multi-user.., or even multi-page. Pointless in fact.
|
||||||
Gcavelookup = TROG["caves"]["gcavelookup"]
|
Gcavelookup = TROG["caves"]["gcavelookup"]
|
||||||
Gcave_count = TROG["caves"]["gcavecount"]
|
Gcave_count = TROG["caves"]["gcavecount"]
|
||||||
|
|
||||||
@@ -63,7 +63,7 @@ class CaveAndEntrance(models.Model):
|
|||||||
# moved to models/logbooks.py to avoid cyclic import problem. No I don't know why either.
|
# moved to models/logbooks.py to avoid cyclic import problem. No I don't know why either.
|
||||||
|
|
||||||
class Cave(TroggleModel):
|
class Cave(TroggleModel):
|
||||||
# too much here perhaps,
|
# (far) too much here perhaps,
|
||||||
areacode = models.CharField(max_length=4, blank=True, null=True) # could use models.IntegerChoices
|
areacode = models.CharField(max_length=4, blank=True, null=True) # could use models.IntegerChoices
|
||||||
subarea = models.CharField(max_length=25, blank=True, null=True) # 9, 8c etc.
|
subarea = models.CharField(max_length=25, blank=True, null=True) # 9, 8c etc.
|
||||||
depth = models.CharField(max_length=100, blank=True, null=True)
|
depth = models.CharField(max_length=100, blank=True, null=True)
|
||||||
@@ -72,7 +72,7 @@ class Cave(TroggleModel):
|
|||||||
equipment = models.TextField(blank=True, null=True)
|
equipment = models.TextField(blank=True, null=True)
|
||||||
explorers = models.TextField(blank=True, null=True)
|
explorers = models.TextField(blank=True, null=True)
|
||||||
extent = models.CharField(max_length=100, blank=True, null=True)
|
extent = models.CharField(max_length=100, blank=True, null=True)
|
||||||
filename = models.CharField(max_length=200)
|
filename = models.CharField(max_length=200) # if a cave is 'pending' this is not set. Otherwise it is.
|
||||||
kataster_code = models.CharField(max_length=20, blank=True, null=True)
|
kataster_code = models.CharField(max_length=20, blank=True, null=True)
|
||||||
kataster_number = models.CharField(max_length=10, blank=True, null=True)
|
kataster_number = models.CharField(max_length=10, blank=True, null=True)
|
||||||
kataster_status = models.TextField(blank=True, null=True)
|
kataster_status = models.TextField(blank=True, null=True)
|
||||||
@@ -87,12 +87,9 @@ class Cave(TroggleModel):
|
|||||||
unofficial_number = models.CharField(max_length=60, blank=True, null=True)
|
unofficial_number = models.CharField(max_length=60, blank=True, null=True)
|
||||||
url = models.CharField(max_length=300, blank=True, null=True, unique = True)
|
url = models.CharField(max_length=300, blank=True, null=True, unique = True)
|
||||||
|
|
||||||
# class Meta:
|
|
||||||
# unique_together = (("area", "kataster_number"), ("area", "unofficial_number"))
|
|
||||||
|
|
||||||
# href = models.CharField(max_length=100)
|
|
||||||
|
|
||||||
class Meta:
|
class Meta:
|
||||||
|
# we do not enforce uniqueness at the db level as that causes confusing errors for users.
|
||||||
|
# unique_together = (("area", "kataster_number"), ("area", "unofficial_number"))
|
||||||
ordering = ("kataster_code", "unofficial_number")
|
ordering = ("kataster_code", "unofficial_number")
|
||||||
|
|
||||||
def slug(self):
|
def slug(self):
|
||||||
@@ -113,10 +110,6 @@ class Cave(TroggleModel):
|
|||||||
else:
|
else:
|
||||||
return self.unofficial_number
|
return self.unofficial_number
|
||||||
|
|
||||||
# def reference(self): # tidy this up, no longer used?
|
|
||||||
# REMOVE because of confusion with cave.references which is different
|
|
||||||
# return f"{self.areacode}-{self.number()}"
|
|
||||||
|
|
||||||
def get_absolute_url(self):
|
def get_absolute_url(self):
|
||||||
# we do not use URL_ROOT any more.
|
# we do not use URL_ROOT any more.
|
||||||
if self.kataster_number:
|
if self.kataster_number:
|
||||||
@@ -159,6 +152,13 @@ class Cave(TroggleModel):
|
|||||||
|
|
||||||
def entrances(self):
|
def entrances(self):
|
||||||
return CaveAndEntrance.objects.filter(cave=self)
|
return CaveAndEntrance.objects.filter(cave=self)
|
||||||
|
|
||||||
|
def no_location(self):
|
||||||
|
no_data = True
|
||||||
|
for e in CaveAndEntrance.objects.filter(cave=self):
|
||||||
|
if e.entrance.best_station:
|
||||||
|
no_data = False
|
||||||
|
return no_data
|
||||||
|
|
||||||
def singleentrance(self):
|
def singleentrance(self):
|
||||||
return len(CaveAndEntrance.objects.filter(cave=self)) == 1
|
return len(CaveAndEntrance.objects.filter(cave=self)) == 1
|
||||||
@@ -215,9 +215,6 @@ class Cave(TroggleModel):
|
|||||||
content = t.render(c)
|
content = t.render(c)
|
||||||
return (filepath, content, "utf8")
|
return (filepath, content, "utf8")
|
||||||
|
|
||||||
def getArea(self):
|
|
||||||
return self.areacode
|
|
||||||
|
|
||||||
class Entrance(TroggleModel):
|
class Entrance(TroggleModel):
|
||||||
MARKING_CHOICES = (
|
MARKING_CHOICES = (
|
||||||
("P", "Paint"),
|
("P", "Paint"),
|
||||||
@@ -338,6 +335,10 @@ class Entrance(TroggleModel):
|
|||||||
return self.exact_station
|
return self.exact_station
|
||||||
if self.other_station:
|
if self.other_station:
|
||||||
return self.other_station
|
return self.other_station
|
||||||
|
|
||||||
|
def best_station_object(self):
|
||||||
|
bs = self.best_station()
|
||||||
|
return SurvexStation.objects.get(name=bs)
|
||||||
|
|
||||||
def has_photo(self):
|
def has_photo(self):
|
||||||
if self.photo:
|
if self.photo:
|
||||||
|
|||||||
@@ -53,7 +53,7 @@ class SurvexStationLookUpManager(models.Manager):
|
|||||||
class SurvexStation(models.Model):
|
class SurvexStation(models.Model):
|
||||||
name = models.CharField(max_length=100)
|
name = models.CharField(max_length=100)
|
||||||
# block = models.ForeignKey("SurvexBlock", null=True, on_delete=models.SET_NULL)
|
# block = models.ForeignKey("SurvexBlock", null=True, on_delete=models.SET_NULL)
|
||||||
# block not used since 2020. survex stations objects are only used for entrnce locations and all taken from the .3d file
|
# block not used since 2020. survex stations objects are only used for entrance locations and all taken from the .3d file
|
||||||
objects = SurvexStationLookUpManager() # overwrites SurvexStation.objects and enables lookup()
|
objects = SurvexStationLookUpManager() # overwrites SurvexStation.objects and enables lookup()
|
||||||
x = models.FloatField(blank=True, null=True)
|
x = models.FloatField(blank=True, null=True)
|
||||||
y = models.FloatField(blank=True, null=True)
|
y = models.FloatField(blank=True, null=True)
|
||||||
|
|||||||
@@ -432,7 +432,7 @@ def edit_cave(request, path="", slug=None):
|
|||||||
"cave": cave,
|
"cave": cave,
|
||||||
"message": message,
|
"message": message,
|
||||||
#"caveAndEntranceFormSet": ceFormSet,
|
#"caveAndEntranceFormSet": ceFormSet,
|
||||||
"path": path + "/",
|
"path": path + "/", # used for saving images if attached
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
|
|
||||||
@@ -552,7 +552,7 @@ def edit_entrance(request, path="", caveslug=None, entslug=None):
|
|||||||
"entletter": entletter,
|
"entletter": entletter,
|
||||||
"entletterform": entletterform, # is unset if not being used
|
"entletterform": entletterform, # is unset if not being used
|
||||||
"entlettereditable": entlettereditable,
|
"entlettereditable": entlettereditable,
|
||||||
"path": path + "/",
|
"path": path + "/", # used for saving images if attached
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|||||||
@@ -237,7 +237,7 @@ def prospecting_image(request, name):
|
|||||||
(35323.60, 81357.83, 50, "74"), # From Auer map
|
(35323.60, 81357.83, 50, "74"), # From Auer map
|
||||||
]:
|
]:
|
||||||
(N, E, D) = list(map(float, (N, E, D)))
|
(N, E, D) = list(map(float, (N, E, D)))
|
||||||
maparea = Cave.objects.get(kataster_number=num).getArea().short_name
|
maparea = Cave.objects.get(kataster_number=num).areacode
|
||||||
lo = mungecoord(N - D, E + D, name, img)
|
lo = mungecoord(N - D, E + D, name, img)
|
||||||
hi = mungecoord(N + D, E - D, name, img)
|
hi = mungecoord(N + D, E - D, name, img)
|
||||||
lpos = mungecoord(N - D, E, name, img)
|
lpos = mungecoord(N - D, E, name, img)
|
||||||
|
|||||||
@@ -289,17 +289,17 @@ def eastings(request):
|
|||||||
ts = e.tag_station
|
ts = e.tag_station
|
||||||
if ts:
|
if ts:
|
||||||
e.tag_ts = SurvexStation.objects.get(name=ts)
|
e.tag_ts = SurvexStation.objects.get(name=ts)
|
||||||
print(f"{e} {e.tag_ts} {e.tag_ts.lat()} {e.tag_ts.long()}")
|
#print(f"{e} {e.tag_ts} {e.tag_ts.lat()} {e.tag_ts.long()}")
|
||||||
|
|
||||||
es = e.exact_station
|
es = e.exact_station
|
||||||
if es:
|
if es:
|
||||||
e.tag_es = SurvexStation.objects.get(name=es)
|
e.tag_es = SurvexStation.objects.get(name=es)
|
||||||
print(f"{e} {e.tag_es} {e.tag_es.lat()} {e.tag_es.long()}")
|
#print(f"{e} {e.tag_es} {e.tag_es.lat()} {e.tag_es.long()}")
|
||||||
|
|
||||||
os = e.other_station
|
os = e.other_station
|
||||||
if os:
|
if os:
|
||||||
e.tag_os = SurvexStation.objects.get(name=os)
|
e.tag_os = SurvexStation.objects.get(name=os)
|
||||||
print(f"{e} {e.tag_os} {e.tag_os.lat()} {e.tag_os.long()}")
|
#print(f"{e} {e.tag_os} {e.tag_os.lat()} {e.tag_os.long()}")
|
||||||
|
|
||||||
except:
|
except:
|
||||||
e.tag_ss = None
|
e.tag_ss = None
|
||||||
|
|||||||
@@ -10,6 +10,7 @@ from django.db import transaction
|
|||||||
|
|
||||||
from troggle.core.models.caves import Cave, CaveAndEntrance, Entrance, GetCaveLookup
|
from troggle.core.models.caves import Cave, CaveAndEntrance, Entrance, GetCaveLookup
|
||||||
from troggle.core.models.logbooks import CaveSlug
|
from troggle.core.models.logbooks import CaveSlug
|
||||||
|
from troggle.core.models.survex import SurvexStation
|
||||||
from troggle.core.models.troggle import DataIssue
|
from troggle.core.models.troggle import DataIssue
|
||||||
from troggle.settings import CAVEDESCRIPTIONS, ENTRANCEDESCRIPTIONS, EXPOWEB, SURVEX_DATA
|
from troggle.settings import CAVEDESCRIPTIONS, ENTRANCEDESCRIPTIONS, EXPOWEB, SURVEX_DATA
|
||||||
|
|
||||||
@@ -424,15 +425,22 @@ def boolify(boolstrs):
|
|||||||
|
|
||||||
def validate_station(station):
|
def validate_station(station):
|
||||||
"""It is possible to break troggle entirely by getting this wrong.
|
"""It is possible to break troggle entirely by getting this wrong.
|
||||||
These station identifiers are matched against other statsions using .endswith()
|
These station identifiers are matched against other stations using .endswith()
|
||||||
in parsers/locations.py
|
in parsers/locations.py
|
||||||
so a simple number here will match hundreds of SUrvexStation objects
|
so a simple number here will match hundreds of SUrvexStation objects
|
||||||
It should be, e.g. "1623.p240"
|
It should be, e.g. "1623.p240"
|
||||||
|
|
||||||
|
We will test them against survex stations after we have loaded them.
|
||||||
"""
|
"""
|
||||||
if station == "":
|
if station == "":
|
||||||
return True
|
return True
|
||||||
|
|
||||||
|
# CANNOT test against locations as we have not read the survex files yet. Hmph.
|
||||||
|
|
||||||
|
# Must have the right format in its name
|
||||||
dot = station.find(".")
|
dot = station.find(".")
|
||||||
if dot == -1:
|
if dot == -1:
|
||||||
|
print(dot)
|
||||||
# no full stop found. Bad station identifier.
|
# no full stop found. Bad station identifier.
|
||||||
raise
|
raise
|
||||||
else:
|
else:
|
||||||
@@ -540,6 +548,7 @@ def read_entrance(filename, ent=None):
|
|||||||
ent.url=url[0]
|
ent.url=url[0]
|
||||||
|
|
||||||
for st in [ent.exact_station, ent.other_station, ent.tag_station]:
|
for st in [ent.exact_station, ent.other_station, ent.tag_station]:
|
||||||
|
#validate_station(st)
|
||||||
try:
|
try:
|
||||||
validate_station(st)
|
validate_station(st)
|
||||||
except:
|
except:
|
||||||
@@ -547,9 +556,6 @@ def read_entrance(filename, ent=None):
|
|||||||
#http://localhost:8000/1623/2023-EBH-01/1623-2023-EBH-01:1623-2023-EBH-01_entrance_edit
|
#http://localhost:8000/1623/2023-EBH-01/1623-2023-EBH-01:1623-2023-EBH-01_entrance_edit
|
||||||
DataIssue.objects.create(parser="entrances", message=message, url=f"/1623/{slug}/{slug}:{slug}_entrance_edit")
|
DataIssue.objects.create(parser="entrances", message=message, url=f"/1623/{slug}/{slug}:{slug}_entrance_edit")
|
||||||
print(message)
|
print(message)
|
||||||
# ent_issues = DataIssue.objects.filter(parser="entrances")
|
|
||||||
# print(f".. We now have {len(ent_issues)} entrance DataIssues")
|
|
||||||
return None
|
|
||||||
ent.save()
|
ent.save()
|
||||||
return ent
|
return ent
|
||||||
|
|
||||||
@@ -811,6 +817,8 @@ def read_cave(filename, cave=None):
|
|||||||
message = f' ! {slug:12} survex filename does not exist :LOSER:"{survex_file[0]}" in "{filename}"'
|
message = f' ! {slug:12} survex filename does not exist :LOSER:"{survex_file[0]}" in "{filename}"'
|
||||||
DataIssue.objects.create(parser="caves", message=message, url=f"/{slug[0:4]}/{slug}_cave_edit/")
|
DataIssue.objects.create(parser="caves", message=message, url=f"/{slug[0:4]}/{slug}_cave_edit/")
|
||||||
print(message)
|
print(message)
|
||||||
|
# else:
|
||||||
|
# print(f"{slug:12} survex filename UNSET")
|
||||||
|
|
||||||
|
|
||||||
if description_file[0]: # if not an empty string
|
if description_file[0]: # if not an empty string
|
||||||
|
|||||||
@@ -28,7 +28,8 @@ todo = """
|
|||||||
"""
|
"""
|
||||||
|
|
||||||
class MapLocations(object):
|
class MapLocations(object):
|
||||||
"""Class used only for identifying the entrance locations"""
|
"""Class used only for identifying the entrance locations
|
||||||
|
Formerly used to put locations on a prospecting map"""
|
||||||
|
|
||||||
p = [
|
p = [
|
||||||
("laser.0_7", "BNase", "Reference", "Bräuning Nase laser point"),
|
("laser.0_7", "BNase", "Reference", "Bräuning Nase laser point"),
|
||||||
@@ -43,37 +44,97 @@ class MapLocations(object):
|
|||||||
("laser.0_3", "LSR3", "Reference", "Laser Point 0/3"),
|
("laser.0_3", "LSR3", "Reference", "Laser Point 0/3"),
|
||||||
("laser.0_5", "LSR5", "Reference", "Laser Point 0/5"),
|
("laser.0_5", "LSR5", "Reference", "Laser Point 0/5"),
|
||||||
("225-96", "BAlm", "Reference", "Bräuning Alm trig point"),
|
("225-96", "BAlm", "Reference", "Bräuning Alm trig point"),
|
||||||
]
|
] # 12 fixed points
|
||||||
|
|
||||||
def points(self):
|
def points(self):
|
||||||
|
prior = len(self.p)
|
||||||
for ent in Entrance.objects.all():
|
for ent in Entrance.objects.all():
|
||||||
if ent.best_station():
|
for st, ent_type in {ent.exact_station: "exact", ent.other_station: "other", ent.tag_station: "tag"}.items():
|
||||||
# print(f"{ent.filename}", end=", ")
|
if st != "":
|
||||||
try:
|
self.p.append((st, str(ent), ent.needs_surface_work(), str(ent)))
|
||||||
k = ent.caveandentrance_set.all()[0].cave
|
store_data_issues()
|
||||||
except:
|
found = len(self.p) - prior
|
||||||
message = f" ! Failed to get Cave linked to Entrance:{ent.name} from:{ent.filename} best:{ent.best_station()} {ent.caveandentrance_set.all()}"
|
message = f" - {found} Entrance tags found - not yet validated against survex .pos file."
|
||||||
stash_data_issue(parser="positions", message=message)
|
|
||||||
print(message)
|
|
||||||
continue # skip this entrance
|
|
||||||
try:
|
|
||||||
areaName = k.areacode
|
|
||||||
except:
|
|
||||||
message = f" ! Failed to get areacode on cave '{k}' linked to Entrance:{ent.name} from:{ent.filename} best:{ent.best_station()}"
|
|
||||||
stash_data_issue(parser="positions", message=message)
|
|
||||||
print(message)
|
|
||||||
store_data_issues()
|
|
||||||
raise
|
|
||||||
self.p.append((ent.best_station(), f"{areaName}-{str(ent)[5:]}", ent.needs_surface_work(), str(ent)))
|
|
||||||
message = f" - {len(self.p)} entrances linked to caves."
|
|
||||||
print(message)
|
print(message)
|
||||||
return self.p
|
return self.p
|
||||||
|
|
||||||
def __str__(self):
|
def __str__(self):
|
||||||
return f"{len(self.p)} map locations"
|
return f"{len(self.p)} map locations"
|
||||||
|
|
||||||
poslineregex = re.compile(r"^\(\s*([+-]?\d*\.\d*),\s*([+-]?\d*\.\d*),\s*([+-]?\d*\.\d*)\s*\)\s*([^\s]+)$")
|
|
||||||
|
|
||||||
|
def validate_entrance_stations(ent=None):
|
||||||
|
"""Now that we have the located positions, we can check if the Entrances had correct tags
|
||||||
|
"""
|
||||||
|
bads = 0
|
||||||
|
good = 0
|
||||||
|
|
||||||
|
def tag_lower_case(station):
|
||||||
|
|
||||||
|
so = SurvexStation.objects.filter(name=station.lower())
|
||||||
|
if so.count() == 1:
|
||||||
|
message = f"X - Entrance {ent} station '{station}' should be '{station.lower()}'"
|
||||||
|
stash_data_issue(parser="positions", message=message, url=url)
|
||||||
|
print(message)
|
||||||
|
|
||||||
|
def validate_ent(ent):
|
||||||
|
"""For each of the three tag strings in an Entrance object,
|
||||||
|
validate each string as referring to a valid SurvexStation object.
|
||||||
|
But our list of created SurvexStation objects is created by taking a list of strings and using them
|
||||||
|
to select from lines in a .pos file - so this is unnecessarily indirect.
|
||||||
|
"""
|
||||||
|
nonlocal bads
|
||||||
|
nonlocal good
|
||||||
|
# {% url "editentrance" ent.entrance.url_parent cave.slug ent.entrance.slug %}
|
||||||
|
# e.g. url = f"/1623/101/1623-101:{ent}_entrance_edit"
|
||||||
|
cavelist = ent.cavelist()
|
||||||
|
if len(cavelist) == 1:
|
||||||
|
cave = cavelist[0]
|
||||||
|
url = f"/{cave.url}"
|
||||||
|
elif len(cavelist) > 1:
|
||||||
|
cave = cavelist[-1]
|
||||||
|
url = f"/{cave.url}"
|
||||||
|
else:
|
||||||
|
print(f"BUGGER {ent} {ent.cavelist()}")
|
||||||
|
url="/caves"
|
||||||
|
for st, ent_type in {ent.exact_station: "exact", ent.other_station: "other", ent.tag_station: "tag"}.items():
|
||||||
|
if st == "":
|
||||||
|
continue
|
||||||
|
try:
|
||||||
|
so = SurvexStation.objects.filter(name=st)
|
||||||
|
if so.count() == 1:
|
||||||
|
good +=1
|
||||||
|
# print(f"OK - Entrance {ent} '{ent_type}' station '{st}'")
|
||||||
|
continue
|
||||||
|
if so.count() != 0:
|
||||||
|
message =f"{so.count()} found for Entrance {ent} '{ent_type}' station '{st}' {so}"
|
||||||
|
else:
|
||||||
|
message = f" ! - Entrance {ent} has invalid '{ent_type}' station '{st}'."
|
||||||
|
stash_data_issue(parser="positions", message=message, url=url)
|
||||||
|
print(message)
|
||||||
|
bads +=1
|
||||||
|
tag_lower_case(st)
|
||||||
|
continue
|
||||||
|
except:
|
||||||
|
message = f" ! - Entrance {ent} has invalid '{ent_type}' station '{st}'. EXCEPTION."
|
||||||
|
stash_data_issue(parser="positions", message=message, url=url)
|
||||||
|
print(message)
|
||||||
|
bads +=1
|
||||||
|
continue
|
||||||
|
|
||||||
|
if ent:
|
||||||
|
return validate_ent(ent)
|
||||||
|
|
||||||
|
|
||||||
|
for ent in Entrance.objects.all():
|
||||||
|
validate_ent(ent)
|
||||||
|
|
||||||
|
print(f" - {good} valid SurvexStation tags of all types found on Entrances.")
|
||||||
|
print(f" - {bads} bad SurvexStation tags of all types found on Entrances.")
|
||||||
|
return True # not necessarily.. but unused return value
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
poslineregex = re.compile(r"^\(\s*([+-]?\d*\.\d*),\s*([+-]?\d*\.\d*),\s*([+-]?\d*\.\d*)\s*\)\s*([^\s]+)$")
|
||||||
|
|
||||||
def LoadPositions():
|
def LoadPositions():
|
||||||
"""First load the survex stations for entrances and fixed points (about 600) into the database.
|
"""First load the survex stations for entrances and fixed points (about 600) into the database.
|
||||||
@@ -87,6 +148,7 @@ def LoadPositions():
|
|||||||
d3d_t = 0
|
d3d_t = 0
|
||||||
|
|
||||||
DataIssue.objects.filter(parser="positions").delete()
|
DataIssue.objects.filter(parser="positions").delete()
|
||||||
|
SurvexStation.objects.all().delete()
|
||||||
|
|
||||||
|
|
||||||
def runcavern3d():
|
def runcavern3d():
|
||||||
@@ -179,7 +241,7 @@ def LoadPositions():
|
|||||||
runcavern3d()
|
runcavern3d()
|
||||||
elif d3d_t - svx_t > 0: # stale, 3d older than svx file
|
elif d3d_t - svx_t > 0: # stale, 3d older than svx file
|
||||||
runcavern3d()
|
runcavern3d()
|
||||||
elif now - d3d_t > 60 * 24 * 60 * 60: # >60 days old, re-run anyway
|
elif now - d3d_t > 24 * 60 * 60: # >1 days old, re-run anyway
|
||||||
runcavern3d()
|
runcavern3d()
|
||||||
elif cav_t - d3d_t > 0: # new version of cavern
|
elif cav_t - d3d_t > 0: # new version of cavern
|
||||||
runcavern3d()
|
runcavern3d()
|
||||||
@@ -193,25 +255,13 @@ def LoadPositions():
|
|||||||
|
|
||||||
if not Path(pospath).is_file():
|
if not Path(pospath).is_file():
|
||||||
message = f" ! Failed to find {pospath} so aborting generation of entrance locations. "
|
message = f" ! Failed to find {pospath} so aborting generation of entrance locations. "
|
||||||
# DataIssue.objects.create(parser="positions", message=message, url=f"/entrance_data/{pospath}_edit")
|
stash_data_issue(parser="positions", message=message, url=f"/entrance_data/{pospath}_edit")
|
||||||
stash_data_issue(parser="positions", message=message)
|
|
||||||
print(message)
|
print(message)
|
||||||
return
|
return
|
||||||
|
|
||||||
posfile = open(pospath)
|
posfile = open(pospath)
|
||||||
posfile.readline() # Drop header
|
posfile.readline() # Drop header
|
||||||
|
|
||||||
# not used survexblock on a SurvexStation since we stopped storing all of them in 2020:
|
|
||||||
# try:
|
|
||||||
# survexblockroot = SurvexBlock.objects.get(name=ROOTBLOCK)
|
|
||||||
# except:
|
|
||||||
# try:
|
|
||||||
# survexblockroot = SurvexBlock.objects.get(id=1)
|
|
||||||
# except:
|
|
||||||
# message = " ! FAILED to find root SurvexBlock"
|
|
||||||
# print(message)
|
|
||||||
# stash_data_issue(parser="positions", message=message)
|
|
||||||
# raise
|
|
||||||
sbdict = {}
|
sbdict = {}
|
||||||
dups = 0
|
dups = 0
|
||||||
lineno = 1 # we dropped the header
|
lineno = 1 # we dropped the header
|
||||||
@@ -228,33 +278,10 @@ def LoadPositions():
|
|||||||
else:
|
else:
|
||||||
sbdict[sbid] = lineno
|
sbdict[sbid] = lineno
|
||||||
|
|
||||||
|
|
||||||
for sid in mappoints:
|
for sid in mappoints:
|
||||||
if sbid.endswith(sid):
|
if sbid.endswith(sid) or sbid.endswith(sid.lower()):
|
||||||
blockpath = "." + sbid[: -len(sid)].strip(".") # only the most recent one that is mappoints
|
blockpath = "." + sbid[: -len(sid)].strip(".") # only the most recent one that is mappoints
|
||||||
# print(f"# match {sid} {sbid} {blockpath}")
|
|
||||||
|
|
||||||
# But why are we doing this? Why do we want the survexblock id for each of these ?
|
|
||||||
# ..because mostly they don't actually appear in any SVX file. We should match them up
|
|
||||||
# via the cave data, not by this half-arsed syntactic match which almost never works. PMS.
|
|
||||||
|
|
||||||
# We are reading the .pos file so we only know the SurvexFile not the SurvexBlock.
|
|
||||||
|
|
||||||
# if False:
|
|
||||||
# try:
|
|
||||||
# sbqs = SurvexBlock.objects.filter(survexpath=blockpath)
|
|
||||||
# if len(sbqs) == 1:
|
|
||||||
# sbqs[0]
|
|
||||||
# if len(sbqs) > 1:
|
|
||||||
# message = f" ! MULTIPLE {len(sbqs):3} SurvexBlocks '{blockpath}' from survex files mention Entrance point '{sbid}' (line {lineno})"
|
|
||||||
# print(message)
|
|
||||||
# stash_data_issue(parser="positions", message=message)
|
|
||||||
# for b in sbqs:
|
|
||||||
# print(f" - {b}")
|
|
||||||
# sbqs[0]
|
|
||||||
# except:
|
|
||||||
# message = f" ! {lineno} FAIL in getting SurvexBlock matching Entrance point {blockpath} {sid}"
|
|
||||||
# print(message)
|
|
||||||
# stash_data_issue(parser="positions", message=message)
|
|
||||||
try:
|
try:
|
||||||
ss = SurvexStation(name=sbid)
|
ss = SurvexStation(name=sbid)
|
||||||
ss.x = float(x)
|
ss.x = float(x)
|
||||||
@@ -268,6 +295,9 @@ def LoadPositions():
|
|||||||
stash_data_issue(parser="positions", message=message)
|
stash_data_issue(parser="positions", message=message)
|
||||||
store_data_issues()
|
store_data_issues()
|
||||||
raise
|
raise
|
||||||
print(f" - {found} SurvexStation entrances found.")
|
validate_entrance_stations() # do not need to use db here really
|
||||||
print(f" - {dups} Duplicated SurvexStation entrances found")
|
positions_filename = Path(pospath).name
|
||||||
|
print(f" - {found-12} SurvexStation entrance tags indentified in {lineno:,} lines in {positions_filename}.")
|
||||||
|
if dups > 0:
|
||||||
|
print(f" - {dups} Duplicated SurvexStation entrances found")
|
||||||
store_data_issues()
|
store_data_issues()
|
||||||
|
|||||||
@@ -64,7 +64,7 @@ ENTRIES = {
|
|||||||
"2019": 55,
|
"2019": 55,
|
||||||
"2018": 95,
|
"2018": 95,
|
||||||
"2017": 74,
|
"2017": 74,
|
||||||
"2016": 86,
|
"2016": 87,
|
||||||
"2015": 80,
|
"2015": 80,
|
||||||
"2014": 67,
|
"2014": 67,
|
||||||
"2013": 52,
|
"2013": 52,
|
||||||
|
|||||||
@@ -14,6 +14,8 @@ from troggle.core.models.survex import SurvexBlock, SurvexFile, SurvexPersonRole
|
|||||||
from troggle.core.models.wallets import Wallet
|
from troggle.core.models.wallets import Wallet
|
||||||
from troggle.core.models.troggle import DataIssue, Expedition
|
from troggle.core.models.troggle import DataIssue, Expedition
|
||||||
from troggle.core.utils import chaosmonkey, get_process_memory
|
from troggle.core.utils import chaosmonkey, get_process_memory
|
||||||
|
from troggle.core.utils import write_and_commit
|
||||||
|
|
||||||
from troggle.parsers.caves import create_new_cave, do_ARGE_cave, AREACODES, ARGEAREAS
|
from troggle.parsers.caves import create_new_cave, do_ARGE_cave, AREACODES, ARGEAREAS
|
||||||
from troggle.parsers.people import GetPersonExpeditionNameLookup, known_foreigner
|
from troggle.parsers.people import GetPersonExpeditionNameLookup, known_foreigner
|
||||||
|
|
||||||
@@ -1278,6 +1280,19 @@ class LoadingSurvex:
|
|||||||
if cave:
|
if cave:
|
||||||
newfile.cave = cave
|
newfile.cave = cave
|
||||||
# print(f"\n - New directory '{newdirectory}' for cave '{cave}'",file=sys.stderr)
|
# print(f"\n - New directory '{newdirectory}' for cave '{cave}'",file=sys.stderr)
|
||||||
|
if not cave.survex_file:
|
||||||
|
cave.survex_file = svxid + ".svx"
|
||||||
|
cave.save()
|
||||||
|
# message = f" - '{cave}' had no survex_file set - setting '{svxid}.svx' writing to {cave.filename})"
|
||||||
|
message = f" - '{cave}' has no survex_file set - need to set to '{svxid}.svx' in {cave.filename})"
|
||||||
|
print("\n",message,file=sys.stderr)
|
||||||
|
stash_data_issue(parser="survex", message=message)
|
||||||
|
|
||||||
|
# try:
|
||||||
|
# cave_file = cave.file_output()
|
||||||
|
# write_and_commit([cave_file], f"{cave} Update of cave.survex_file when parsing {svxid}.svx")
|
||||||
|
# except
|
||||||
|
# raise
|
||||||
|
|
||||||
if not newfile.primary:
|
if not newfile.primary:
|
||||||
message = f" ! .primary NOT SET in new SurvexFile {svxid} "
|
message = f" ! .primary NOT SET in new SurvexFile {svxid} "
|
||||||
|
|||||||
Reference in New Issue
Block a user