mirror of
https://expo.survex.com/repositories/troggle/.git
synced 2024-11-21 23:01:52 +00:00
refactor to reduce db hits and fix *team
This commit is contained in:
parent
e6fd1f0ec5
commit
0d9d307490
@ -19,6 +19,8 @@ In particular, it enables JSON export of any data with 'export_as_json'
|
||||
and configures the search fields to be used within the control panel.
|
||||
|
||||
What is the search path for the css and js inclusions in the Media subclasses though ?!
|
||||
|
||||
The page looks for /static/jquery/jquery.min.js
|
||||
'''
|
||||
|
||||
class TroggleModelAdmin(admin.ModelAdmin):
|
||||
|
@ -152,7 +152,7 @@ class SurvexPersonRole(models.Model):
|
||||
personname = models.CharField(max_length=100)
|
||||
person = models.ForeignKey('Person', blank=True, null=True,on_delete=models.SET_NULL)
|
||||
personexpedition = models.ForeignKey('PersonExpedition', blank=True, null=True,on_delete=models.SET_NULL)
|
||||
persontrip = models.ForeignKey('PersonTrip', blank=True, null=True,on_delete=models.SET_NULL) # logbook
|
||||
# persontrip = models.ForeignKey('PersonTrip', blank=True, null=True,on_delete=models.SET_NULL) # logbook thing not a survex thing
|
||||
expeditionday = models.ForeignKey("ExpeditionDay", null=True,on_delete=models.SET_NULL)
|
||||
|
||||
def __str__(self):
|
||||
|
@ -22,9 +22,6 @@ It also scans the Loser repo for all the svx files, which it loads individually
|
||||
|
||||
todo = """
|
||||
|
||||
-#BUG, if *date comes after *team, the person's date is not set at all.
|
||||
It needs re-setting at the end of the block. 'Fixed', but fix not working.
|
||||
|
||||
- LoadSurvexFile() Creates a new current survexfile and valid .survexdirectory
|
||||
The survexblock passed-in is not necessarily the parent. FIX THIS.
|
||||
|
||||
@ -47,18 +44,23 @@ debugprinttrigger = "!"
|
||||
|
||||
dataissues = []
|
||||
|
||||
def stash_data_issue(parser=None, message=None, url=None):
|
||||
def stash_data_issue(parser=None, message=None, url=None, sb=None):
|
||||
"""Avoid hitting the database for error messages until the end of the import"""
|
||||
global dataissues
|
||||
dataissues.append((parser, message, url))
|
||||
dataissues.append((parser, message, url, sb))
|
||||
|
||||
def store_data_issues():
|
||||
"""Take the stash and store it permanently in the database instead"""
|
||||
global dataissues
|
||||
print(f" - Storing {len(dataissues)} Data Issues into database")
|
||||
|
||||
for i in dataissues:
|
||||
parser, message, url = i
|
||||
for issue in dataissues:
|
||||
parser, message, url, sb = issue
|
||||
if url is None:
|
||||
if sb is not None:
|
||||
url = get_offending_filename(sb)
|
||||
DataIssue.objects.create(parser=parser, message=message, url=url)
|
||||
dataissues = []
|
||||
dataissues = [] # in database now, so empty cache
|
||||
|
||||
class MapLocations(object):
|
||||
"""Class used only for identifying the entrance locations"""
|
||||
@ -104,14 +106,6 @@ class MapLocations(object):
|
||||
def __str__(self):
|
||||
return f"{len(self.p)} map locations"
|
||||
|
||||
|
||||
def get_offending_filename(path):
|
||||
"""Used to provide the URL for a line in the DataErrors page
|
||||
whcih reports problems on importing data into troggle
|
||||
"""
|
||||
return "/survexfile/" + path + ".svx"
|
||||
|
||||
|
||||
class SurvexLeg:
|
||||
"""No longer a models.Model subclass, so no longer a database table"""
|
||||
|
||||
@ -119,14 +113,83 @@ class SurvexLeg:
|
||||
compass = 0.0
|
||||
clino = 0.0
|
||||
|
||||
def get_offending_filename(path):
|
||||
"""Used to provide the URL for a line in the DataErrors page
|
||||
which reports problems on importing data into troggle
|
||||
"""
|
||||
return "/survexfile/" + path + ".svx"
|
||||
|
||||
def get_people_on_trip(survexblock):
|
||||
trip_people_cache = {}
|
||||
def get_team_on_trip(survexblock):
|
||||
"""Uses a cache to avoid a database query if it doesn't need to.
|
||||
Only used for complete team."""
|
||||
global trip_people_cache
|
||||
|
||||
if survexblock in trip_people_cache:
|
||||
return trip_people_cache[survexblock]
|
||||
|
||||
qpeople = SurvexPersonRole.objects.filter(survexblock=survexblock)
|
||||
trip_people_cache[survexblock] = qpeople # this is a query list
|
||||
return qpeople
|
||||
|
||||
def get_people_on_trip(survexblock):
|
||||
"""Gets the displayable names of the people on a survexbock trip.
|
||||
Only used for complete team."""
|
||||
qpeople = get_team_on_trip(survexblock)
|
||||
|
||||
people = []
|
||||
for p in qpeople:
|
||||
people.append(f"{p.personname}")
|
||||
|
||||
return list(set(people))
|
||||
|
||||
trip_person_cache = {}
|
||||
def put_person_on_trip(survexblock, personexpedition, tm):
|
||||
"""Uses a cache to avoid a database query if it doesn't need to.
|
||||
Only used for a single person"""
|
||||
global trip_person_cache
|
||||
|
||||
if (survexblock, personexpedition) in trip_person_cache:
|
||||
return True
|
||||
|
||||
try:
|
||||
personrole = SurvexPersonRole.objects.create(
|
||||
survexblock=survexblock, person = personexpedition.person,
|
||||
expeditionday = survexblock.expeditionday, personexpedition=personexpedition,
|
||||
personname=tm
|
||||
)
|
||||
except:
|
||||
message = f"! *team '{tm}' FAIL, already created {survexblock.survexfile.path} ({survexblock}) "
|
||||
print(self.insp + message)
|
||||
stash_data_issue(
|
||||
parser="survex", message=message, url=None, sb=(survexblock.survexfile.path)
|
||||
)
|
||||
|
||||
trip_person_cache[(survexblock, personexpedition)] = 1
|
||||
return False
|
||||
|
||||
person_pending_cache = {}
|
||||
def add_to_pending(survexblock, tm):
|
||||
global person_pending_cache
|
||||
|
||||
if survexblock not in person_pending_cache:
|
||||
person_pending_cache[survexblock] = set()
|
||||
|
||||
person_pending_cache[survexblock].add(tm)
|
||||
# personexpedition = None
|
||||
# personrole, created = SurvexPersonRole.objects.update_or_create(
|
||||
# survexblock=survexblock, personexpedition=personexpedition, personname=tm)
|
||||
# personrole.save()
|
||||
|
||||
def get_team_pending(survexblock):
|
||||
"""A set of *team names before we get to the *date line in a survexblock"""
|
||||
global person_pending_cache
|
||||
|
||||
if survexblock in person_pending_cache:
|
||||
teamnames = person_pending_cache[survexblock] # a set of names
|
||||
person_pending_cache[survexblock] = ()
|
||||
return teamnames
|
||||
return
|
||||
|
||||
class LoadingSurvex:
|
||||
"""A 'survex block' is a *begin...*end set of cave data.
|
||||
@ -172,6 +235,7 @@ class LoadingSurvex:
|
||||
rx_comminc = re.compile(r"(?i)^\|\*include[\s]*([-\w/]*).*$") # inserted by linear collate ;*include
|
||||
rx_commcni = re.compile(r"(?i)^\|\*edulcni[\s]*([-\w/]*).*$") # inserted by linear collate ;*edulcni
|
||||
rx_include = re.compile(r"(?i)^\s*(\*include[\s].*)$")
|
||||
rx_include2 = re.compile("(?i)include$")
|
||||
rx_commref = re.compile(r"(?i)^\s*ref(?:erence)?[\s.:]*(\d+)\s*#\s*(X)?\s*(\d+)")
|
||||
rx_ref_text = re.compile(r'(?i)^\s*\"[^"]*\"\s*$')
|
||||
rx_star = re.compile(r"(?i)\s*\*[\s,]*(\w+)\s*(.*?)\s*(?:;.*)?$")
|
||||
@ -180,7 +244,9 @@ class LoadingSurvex:
|
||||
rx_badmerge = re.compile(r"(?i).*(\>\>\>\>\>)|(\=\=\=\=\=)|(\<\<\<\<\<).*$")
|
||||
rx_ref2 = re.compile(r"(?i)\s*ref[.;]?")
|
||||
rx_commteam = re.compile(r"(?i)\s*(Messteam|Zeichner)\s*[:]?(.*)")
|
||||
|
||||
rx_quotedtitle = re.compile(r'(?i)^"(.*)"$')
|
||||
|
||||
|
||||
# This interprets the survex "*data normal" command which sets out the order of the fields in the data, e.g.
|
||||
# *DATA normal from to length gradient bearing ignore ignore ignore ignore
|
||||
datastardefault = {"type": "normal", "from": 0, "to": 1, "tape": 2, "compass": 3, "clino": 4}
|
||||
@ -246,7 +312,7 @@ class LoadingSurvex:
|
||||
)
|
||||
print(self.insp + message)
|
||||
stash_data_issue(
|
||||
parser="survex", message=message, url=get_offending_filename(survexblock.survexfile.path)
|
||||
parser="survex", message=message, url=None, sb=(survexblock.survexfile.path)
|
||||
)
|
||||
else:
|
||||
message = (
|
||||
@ -254,7 +320,7 @@ class LoadingSurvex:
|
||||
)
|
||||
print(self.insp + message)
|
||||
stash_data_issue(
|
||||
parser="survex", message=message, url=get_offending_filename(survexblock.survexfile.path)
|
||||
parser="survex", message=message, url=None, sb=(survexblock.survexfile.path)
|
||||
)
|
||||
|
||||
def LoadSurvexTeam(self, survexblock, line):
|
||||
@ -265,7 +331,7 @@ class LoadingSurvex:
|
||||
*team gb, bl
|
||||
|
||||
personrole is used to record that a person was on a survex trip, NOT the role they played.
|
||||
(NB PersonTrip is a logbook thing, not a survex thing. Yes they could be merged, maybe.)
|
||||
(NB PersonTrip is a logbook thing, not a survex thing. )
|
||||
"""
|
||||
|
||||
def record_team_member(tm, survexblock):
|
||||
@ -277,46 +343,49 @@ class LoadingSurvex:
|
||||
# so we can't validate whether the person was on expo or not.
|
||||
# we will have to attach them to the survexblock anyway, and then do a
|
||||
# later check on whether they are valid when we get the date.
|
||||
|
||||
# We have hundreds of updated Django database updates when the same person is
|
||||
# on the same trip in multiple roles. We should de-duplicate these ourselves in Python
|
||||
# instead of using SurvexPersonRole.objects.update_or_create() which is expensive.
|
||||
|
||||
expo = survexblock.expedition # may be None if no *date yet
|
||||
# this syntax was bizarre.. made more obvious
|
||||
|
||||
if expo:
|
||||
if not survexblock.expeditionday: # *date has been set
|
||||
# should not happen
|
||||
message = f"! *team {expo.year} expo ok, expedition day not in *team {survexblock.survexfile.path} ({survexblock}) "
|
||||
message = f"! *team {expo.year} expo ok, expedition day NOT in *team {survexblock.survexfile.path} ({survexblock}) "
|
||||
print(self.insp + message)
|
||||
stash_data_issue(
|
||||
parser="survex", message=message, url=get_offending_filename(survexblock.survexfile.path)
|
||||
parser="survex", message=message, url=None, sb=(survexblock.survexfile.path)
|
||||
)
|
||||
|
||||
personexpedition = GetPersonExpeditionNameLookup(expo).get(tm.lower())
|
||||
if personexpedition:
|
||||
personrole, created = SurvexPersonRole.objects.update_or_create(
|
||||
survexblock=survexblock, personexpedition=personexpedition, personname=tm
|
||||
)
|
||||
personrole.person = personexpedition.person
|
||||
personrole.expeditionday = survexblock.expeditionday
|
||||
put_person_on_trip(survexblock, personexpedition, tm)
|
||||
self.currentpersonexped.append(personexpedition) # used in push/pop block code
|
||||
personrole.save()
|
||||
elif known_foreigner(tm): # note, not using .lower()
|
||||
message = f"- *team {expo.year} '{tm}' known foreigner on *team {survexblock.survexfile.path} ({survexblock}) in '{line}'"
|
||||
print(self.insp + message)
|
||||
# stash_data_issue(parser='survex', message=message, url=get_offending_filename(survexblock.survexfile.path))
|
||||
# stash_data_issue(parser='survex', message=message, url=None, sb=(survexblock.survexfile.path))
|
||||
else:
|
||||
# we know the date and expo, but can't find the person
|
||||
message = f"! *team {expo.year} '{tm}' FAIL personexpedition lookup on *team {survexblock.survexfile.path} ({survexblock}) in '{line}'"
|
||||
print(self.insp + message)
|
||||
stash_data_issue(
|
||||
parser="survex", message=message, url=get_offending_filename(survexblock.survexfile.path)
|
||||
parser="survex", message=message, url=None, sb=(survexblock.survexfile.path)
|
||||
)
|
||||
else:
|
||||
personexpedition = None
|
||||
personrole, created = SurvexPersonRole.objects.update_or_create(
|
||||
survexblock=survexblock, personexpedition=personexpedition, personname=tm
|
||||
)
|
||||
add_to_pending(survexblock, tm)
|
||||
# don't know the date yet, so cannot query the table about validity.
|
||||
# assume the person is valid. It will get picked up with the *date appears
|
||||
personrole.save()
|
||||
# There are hundreds of these..
|
||||
message = (
|
||||
f"- Team/Date mis-ordered: {line} ({survexblock}) {survexblock.survexfile.path}"
|
||||
)
|
||||
print(self.insp + message)
|
||||
# stash_data_issue(
|
||||
# parser="survex team", message=message, url=None, sb=(survexblock.survexfile.path)
|
||||
# )
|
||||
|
||||
mteammember = self.rx_teammem.match(line) # matches the role at the beginning
|
||||
if not mteammember:
|
||||
@ -332,7 +401,7 @@ class LoadingSurvex:
|
||||
message = f"! *team {survexblock.survexfile.path} ({survexblock}) Weird '{mteammember.group(1)}' oldstyle line: '{line}'"
|
||||
print(self.insp + message)
|
||||
stash_data_issue(
|
||||
parser="survex", message=message, url=get_offending_filename(survexblock.survexfile.path)
|
||||
parser="survex", message=message, url=None, sb=(survexblock.survexfile.path)
|
||||
)
|
||||
else:
|
||||
nullmember = self.rx_teamabs.match(line) # matches empty role line. Ignore these.
|
||||
@ -340,7 +409,7 @@ class LoadingSurvex:
|
||||
message = f"! *team {survexblock.survexfile.path} ({survexblock}) Bad line: '{line}'"
|
||||
print(self.insp + message)
|
||||
stash_data_issue(
|
||||
parser="survex", message=message, url=get_offending_filename(survexblock.survexfile.path)
|
||||
parser="survex", message=message, url=None, sb=(survexblock.survexfile.path)
|
||||
)
|
||||
else:
|
||||
for tm in self.rx_person.split(mteammember.group(2)):
|
||||
@ -351,7 +420,7 @@ class LoadingSurvex:
|
||||
message = f"! Weird *team '{mteammember.group(2)}' newstyle line: '{line}' ({survexblock}) {survexblock.survexfile.path}"
|
||||
print(self.insp + message)
|
||||
stash_data_issue(
|
||||
parser="survex", message=message, url=get_offending_filename(survexblock.survexfile.path)
|
||||
parser="survex", message=message, url=None, sb=(survexblock.survexfile.path)
|
||||
)
|
||||
|
||||
def LoadSurvexEntrance(self, survexblock, line):
|
||||
@ -408,7 +477,7 @@ class LoadingSurvex:
|
||||
)
|
||||
print(self.insp + message)
|
||||
stash_data_issue(
|
||||
parser="survex", message=message, url=get_offending_filename(survexblock.survexfile.path)
|
||||
parser="survex", message=message, url=None, sb=(survexblock.survexfile.path)
|
||||
)
|
||||
|
||||
expo = expeditions[0]
|
||||
@ -416,25 +485,61 @@ class LoadingSurvex:
|
||||
return expo
|
||||
|
||||
def LoadSurvexDate(self, survexblock, line):
|
||||
# we should make this a date RANGE for everything?
|
||||
"""We now have a valid date for this survexblock, so we now know the expo
|
||||
it relates to and can use GetPersonExpeditionNameLookup(expo) to check whether
|
||||
the people are correct.
|
||||
|
||||
Note that a *team line can come before AND after a *date line"""
|
||||
|
||||
def setdate_on_survexblock(year):
|
||||
# We are assuming that deferred *team people are in the same block. Otherwise, ouch.
|
||||
"""Either *date comes before any *team, in which case there are no prior
|
||||
PersonRoles attached, or
|
||||
*team came before this *date, in which case the names are only in 'pending'"""
|
||||
global trip_person_cache
|
||||
|
||||
expo = self.get_expo_from_year(year)
|
||||
survexblock.expedition = expo
|
||||
survexblock.expeditionday = expo.get_expedition_day(survexblock.date)
|
||||
survexblock.save()
|
||||
|
||||
team = SurvexPersonRole.objects.filter(survexblock=survexblock)
|
||||
for pr in team:
|
||||
if not pr.expeditionday: # *date and *team in 'wrong' order. All working now.
|
||||
team = get_team_on_trip(survexblock) # should be empty, should only be in 'pending'
|
||||
# team = SurvexPersonRole.objects.filter(survexblock=survexblock)
|
||||
if len(team) > 0:
|
||||
message = f"! *team {expo.year} Multiple *date in one block? Already someone on team when *date seen. {survexblock.survexfile.path} ({survexblock}) in '{line}'"
|
||||
print(self.insp + message)
|
||||
stash_data_issue(parser='survex', message=message, url=None, sb=(survexblock.survexfile.path))
|
||||
|
||||
if teamnames := get_team_pending(survexblock): # WALRUS https://docs.python.org/3/whatsnew/3.8.html#assignment-expressions
|
||||
for tm in teamnames:
|
||||
if known_foreigner(tm):
|
||||
message = f"- *team {expo.year} '{tm}' known foreigner *date (misordered) {survexblock.survexfile.path} ({survexblock}) in '{line}'"
|
||||
print(self.insp + message)
|
||||
# stash_data_issue(parser='survex', message=message, url=None, sb=(survexblock.survexfile.path))
|
||||
else:
|
||||
pe = GetPersonExpeditionNameLookup(expo).get(tm.lower())
|
||||
if pe:
|
||||
put_person_on_trip(survexblock, pe, tm)
|
||||
self.currentpersonexped.append(pe)
|
||||
else:
|
||||
message = f"! *team {year} '{tm}' FAIL personexpedition lookup on *date {survexblock.survexfile.path} ({survexblock}) "
|
||||
print(self.insp + message)
|
||||
stash_data_issue(
|
||||
parser="survex",
|
||||
message=message,
|
||||
url=None, sb=(survexblock.survexfile.path),
|
||||
)
|
||||
|
||||
|
||||
# All this next section should not happen unless there are >1 *date lines in a block
|
||||
for pr in team: # pr is a PersonRole object
|
||||
if not pr.expeditionday: # *date and *team in 'wrong' order.
|
||||
|
||||
pr.expeditionday = survexblock.expeditionday
|
||||
pr.save()
|
||||
|
||||
if not pr.personexpedition: # again, we didn't know the date until now
|
||||
if not pr.personexpedition:
|
||||
pe = GetPersonExpeditionNameLookup(expo).get(pr.personname.lower())
|
||||
if pe:
|
||||
if pe: # pe is a PersonExpedition
|
||||
# message = "! {} ({}) Fixing undated personexpedition '{}'".format(survexblock.survexfile.path, survexblock, p.personname)
|
||||
# print(self.insp+message)
|
||||
# stash_data_issue(parser='survex', message=message)
|
||||
@ -445,21 +550,21 @@ class LoadingSurvex:
|
||||
elif known_foreigner(pr.personname): # note, not using .lower()
|
||||
message = f"- *team {expo.year} '{pr.personname}' known foreigner on *date {survexblock.survexfile.path} ({survexblock}) in '{line}'"
|
||||
print(self.insp + message)
|
||||
# stash_data_issue(parser='survex', message=message, url=get_offending_filename(survexblock.survexfile.path))
|
||||
# stash_data_issue(parser='survex', message=message, url=None, sb=(survexblock.survexfile.path))
|
||||
else:
|
||||
message = f"! *team {year} '{pr.personname}' FAIL personexpedition lookup on *date {survexblock.survexfile.path} ({survexblock}) '{pr.personname}'"
|
||||
print(self.insp + message)
|
||||
stash_data_issue(
|
||||
parser="survex",
|
||||
message=message,
|
||||
url=get_offending_filename(survexblock.survexfile.path),
|
||||
url=None, sb=(survexblock.survexfile.path),
|
||||
)
|
||||
|
||||
oline = line
|
||||
if len(line) > 10:
|
||||
# message = "! DATE Warning LONG DATE '{}' ({}) {}".format(oline, survexblock, survexblock.survexfile.path)
|
||||
# print(self.insp+message)
|
||||
# stash_data_issue(parser='survex', message=message, url=get_offending_filename(survexblock.survexfile.path))
|
||||
# stash_data_issue(parser='survex', message=message, url=None, sb=(survexblock.survexfile.path))
|
||||
if line[10] == "-": # ie a range, just look at first date
|
||||
line = line[0:10]
|
||||
if len(line) == 10:
|
||||
@ -467,40 +572,40 @@ class LoadingSurvex:
|
||||
# TO DO set to correct Austrian timezone Europe/Vienna ?
|
||||
# %m and %d need leading zeros. Source svx files require them.
|
||||
survexblock.date = datetime.strptime(line.replace(".", "-"), "%Y-%m-%d")
|
||||
setdate_on_survexblock(year)
|
||||
elif len(line) == 7:
|
||||
year = line[:4]
|
||||
perps = get_people_on_trip(survexblock) # What, you don't know Judge Dredd slang ?
|
||||
message = f"! DATE Warning only accurate to the month, setting to 1st '{oline}' ({survexblock}) {survexblock.survexfile.path} {perps}"
|
||||
print(self.insp + message)
|
||||
stash_data_issue(
|
||||
parser="svxdate", message=message, url=get_offending_filename(survexblock.survexfile.path)
|
||||
parser="svxdate", message=message, url=None, sb=(survexblock.survexfile.path)
|
||||
)
|
||||
survexblock.date = datetime.strptime(line.replace(".", "-"), "%Y-%m") # sets to first of month
|
||||
setdate_on_survexblock(year)
|
||||
elif len(line) == 4:
|
||||
year = line[:4]
|
||||
perps = get_people_on_trip(survexblock)
|
||||
message = f"! DATE WARNING only accurate to the YEAR, setting to 1st January '{oline}' ({survexblock}) {survexblock.survexfile.path} {perps}"
|
||||
print(self.insp + message)
|
||||
stash_data_issue(
|
||||
parser="svxdate", message=message, url=get_offending_filename(survexblock.survexfile.path)
|
||||
parser="svxdate", message=message, url=None, sb=(survexblock.survexfile.path)
|
||||
)
|
||||
survexblock.date = datetime.strptime(line, "%Y") # sets to January 1st
|
||||
setdate_on_survexblock(year)
|
||||
else:
|
||||
# these errors are reporting the wrong survexblock, which is actually a SurvexFile (!)
|
||||
# see To Do notes on how to trigger this. Still needs investigating..
|
||||
message = (
|
||||
f"! DATE Error unrecognised '{oline}-{survexblock}' ({type(survexblock)}) {survexblock.survexfile.path}"
|
||||
)
|
||||
print(self.insp + message)
|
||||
stash_data_issue(
|
||||
parser="survex", message=message, url=get_offending_filename(survexblock.survexfile.path)
|
||||
parser="survex", message=message, url=None, sb=(survexblock.survexfile.path)
|
||||
)
|
||||
print(f" {type(survexblock)=}") # survexblock.parent fails as a SurvexFile has no .parent ...ugh.
|
||||
print(f" {survexblock.survexpath=}")
|
||||
print(f" {survexblock.survexfile=}")
|
||||
# raise
|
||||
|
||||
setdate_on_survexblock(year)
|
||||
|
||||
def LoadSurvexLeg(self, survexblock, sline, comment, svxline):
|
||||
"""This reads compass, clino and tape data but only keeps the tape lengths,
|
||||
@ -549,7 +654,7 @@ class LoadingSurvex:
|
||||
print(f" Line: {sline}\nsvxline: {svxline}")
|
||||
message = f" ! Not 5 fields in line '{sline.lower()}' {self.datastar=} {ls=} in\n{survexblock}\n{survexblock.survexfile}\n{survexblock.survexfile.path}"
|
||||
stash_data_issue(
|
||||
parser="survexleg", message=message, url=get_offending_filename(survexblock.survexfile.path)
|
||||
parser="survexleg", message=message, url=None, sb=(survexblock.survexfile.path)
|
||||
)
|
||||
|
||||
datastar = self.datastar # shallow copy: alias but the things inside are the same things
|
||||
@ -578,7 +683,7 @@ class LoadingSurvex:
|
||||
message = f" ! datastar parsing from/to incorrect in line {ls} in {survexblock.survexfile.path}"
|
||||
print(self.insp + message)
|
||||
stash_data_issue(
|
||||
parser="survexleg", message=message, url=get_offending_filename(survexblock.survexfile.path)
|
||||
parser="survexleg", message=message, url=None, sb=(survexblock.survexfile.path)
|
||||
)
|
||||
return
|
||||
|
||||
@ -588,7 +693,7 @@ class LoadingSurvex:
|
||||
message = f" ! datastar parsing incorrect in line {ls} in {survexblock.survexfile.path}"
|
||||
print(self.insp + message)
|
||||
stash_data_issue(
|
||||
parser="survexleg", message=message, url=get_offending_filename(survexblock.survexfile.path)
|
||||
parser="survexleg", message=message, url=None, sb=(survexblock.survexfile.path)
|
||||
)
|
||||
survexleg.tape = invalid_tape
|
||||
return
|
||||
@ -603,7 +708,7 @@ class LoadingSurvex:
|
||||
message = f" ! Units: Length scaled {tape}m '{ls}' in ({survexblock.survexfile.path}) units:{self.units} factor:{self.unitsfactor}x"
|
||||
print(self.insp + message)
|
||||
stash_data_issue(
|
||||
parser="survexleg", message=message, url=get_offending_filename(survexblock.survexfile.path)
|
||||
parser="survexleg", message=message, url=None, sb=(survexblock.survexfile.path)
|
||||
)
|
||||
if self.units == "feet":
|
||||
tape = float(tape) / METRESINFEET
|
||||
@ -611,7 +716,7 @@ class LoadingSurvex:
|
||||
message = f" ! Units: converted to {tape:.3f}m from {self.units} '{ls}' in ({survexblock.survexfile.path})"
|
||||
print(self.insp + message)
|
||||
stash_data_issue(
|
||||
parser="survexleg", message=message, url=get_offending_filename(survexblock.survexfile.path)
|
||||
parser="survexleg", message=message, url=None, sb=(survexblock.survexfile.path)
|
||||
)
|
||||
survexleg.tape = float(tape)
|
||||
self.legsnumber += 1
|
||||
@ -619,7 +724,7 @@ class LoadingSurvex:
|
||||
message = f" ! Value Error: Tape misread in line'{ls}' in {survexblock.survexfile.path} units:{self.units}"
|
||||
print(self.insp + message)
|
||||
stash_data_issue(
|
||||
parser="survexleg", message=message, url=get_offending_filename(survexblock.survexfile.path)
|
||||
parser="survexleg", message=message, url=None, sb=(survexblock.survexfile.path)
|
||||
)
|
||||
survexleg.tape = invalid_tape
|
||||
try:
|
||||
@ -631,7 +736,7 @@ class LoadingSurvex:
|
||||
)
|
||||
print(self.insp + message)
|
||||
stash_data_issue(
|
||||
parser="survexleg", message=message, url=get_offending_filename(survexblock.survexfile.path)
|
||||
parser="survexleg", message=message, url=None, sb=(survexblock.survexfile.path)
|
||||
)
|
||||
|
||||
try:
|
||||
@ -640,7 +745,7 @@ class LoadingSurvex:
|
||||
message = f" ! Value Error: Compass not found in line {ls} in {survexblock.survexfile.path}"
|
||||
print(self.insp + message)
|
||||
stash_data_issue(
|
||||
parser="survexleg", message=message, url=get_offending_filename(survexblock.survexfile.path)
|
||||
parser="survexleg", message=message, url=None, sb=(survexblock.survexfile.path)
|
||||
)
|
||||
lcompass = invalid_compass
|
||||
|
||||
@ -652,7 +757,7 @@ class LoadingSurvex:
|
||||
print((" Line:", ls))
|
||||
message = f" ! Value Error: Clino misread in line '{sline.lower()}' {datastar=} {self.datastar=} {ls=} in\n{survexblock}\n{survexblock.survexfile}\n{survexblock.survexfile.path}"
|
||||
stash_data_issue(
|
||||
parser="survexleg", message=message, url=get_offending_filename(survexblock.survexfile.path)
|
||||
parser="survexleg", message=message, url=None, sb=(survexblock.survexfile.path)
|
||||
)
|
||||
lclino = invalid_clino
|
||||
|
||||
@ -673,7 +778,7 @@ class LoadingSurvex:
|
||||
print((" Line:", ls))
|
||||
message = " ! Value Error: lcompass:'{}' line {} in '{}'".format(lcompass, ls, survexblock.survexfile.path)
|
||||
stash_data_issue(
|
||||
parser="survexleg", message=message, url=get_offending_filename(survexblock.survexfile.path)
|
||||
parser="survexleg", message=message, url=None, sb=(survexblock.survexfile.path)
|
||||
)
|
||||
survexleg.compass = invalid_compass
|
||||
|
||||
@ -765,13 +870,13 @@ class LoadingSurvex:
|
||||
message = f' ! QM TICK find FAIL QM{qmtick.group(1)} date:"{qmtick.group(2)}" qmlist:"{qm}" in "{survexblock.survexfile.path}" + comment:"{qmtick.group(3)}" '
|
||||
print(message)
|
||||
stash_data_issue(
|
||||
parser="survex", message=message, url=get_offending_filename(survexblock.survexfile.path)
|
||||
parser="survex", message=message, url=None, sb=(survexblock.survexfile.path)
|
||||
)
|
||||
if len(qm) > 1:
|
||||
message = f' ! QM TICK MULTIPLE found FAIL QM{qmtick.group(1)} date:"{qmtick.group(2)}" in "{survexblock.survexfile.path}" + comment:"{qmtick.group(3)}" '
|
||||
print(message)
|
||||
stash_data_issue(
|
||||
parser="survex", message=message, url=get_offending_filename(survexblock.survexfile.path)
|
||||
parser="survex", message=message, url=None, sb=(survexblock.survexfile.path)
|
||||
)
|
||||
qm[0].ticked = True
|
||||
qm[0].save()
|
||||
@ -831,7 +936,7 @@ class LoadingSurvex:
|
||||
message = f" ! QM{qm_no} FAIL to create {qm_nearest} in'{survexblock.survexfile.path}'"
|
||||
print(insp + message)
|
||||
stash_data_issue(
|
||||
parser="survex", message=message, url=get_offending_filename(survexblock.survexfile.path)
|
||||
parser="survex", message=message, url=None, sb=(survexblock.survexfile.path)
|
||||
)
|
||||
|
||||
def LoadSurvexDataNormal(self, survexblock, args):
|
||||
@ -864,7 +969,7 @@ class LoadingSurvex:
|
||||
print(message)
|
||||
print(message, file=sys.stderr)
|
||||
stash_data_issue(
|
||||
parser="survex", message=message, url=get_offending_filename(survexblock.survexfile.path)
|
||||
parser="survex", message=message, url=None, sb=(survexblock.survexfile.path)
|
||||
)
|
||||
return
|
||||
else:
|
||||
@ -876,7 +981,7 @@ class LoadingSurvex:
|
||||
print(message)
|
||||
print(message, file=sys.stderr)
|
||||
stash_data_issue(
|
||||
parser="survex", message=message, url=get_offending_filename(survexblock.survexfile.path)
|
||||
parser="survex", message=message, url=None, sb=(survexblock.survexfile.path)
|
||||
)
|
||||
return False
|
||||
|
||||
@ -905,7 +1010,7 @@ class LoadingSurvex:
|
||||
print(message)
|
||||
print(message, file=sys.stderr)
|
||||
stash_data_issue(
|
||||
parser="survex", message=message, url=get_offending_filename(survexblock.survexfile.path)
|
||||
parser="survex", message=message, url=None, sb=(survexblock.survexfile.path)
|
||||
)
|
||||
self.datastar["type"] = ls[0]
|
||||
|
||||
@ -1021,7 +1126,7 @@ class LoadingSurvex:
|
||||
print("\n" + message)
|
||||
print("\n" + message, file=sys.stderr)
|
||||
print(f"{self.pending}", end="", file=sys.stderr)
|
||||
stash_data_issue(parser="survex", message=message, url=get_offending_filename(includelabel))
|
||||
stash_data_issue(parser="survex", message=message, url=None, sb=(includelabel))
|
||||
# print(f' # datastack in LoadSurvexFile:{includelabel}', file=sys.stderr)
|
||||
# for dict in self.datastack:
|
||||
# print(f' type: <{dict["type"].upper()} >', file=sys.stderr)
|
||||
@ -1140,7 +1245,7 @@ class LoadingSurvex:
|
||||
message = f' ! QM Unrecognised as valid in "{survexblock.survexfile.path}" QM{qml.group(1)} "{qml.group(2)}" : regex failure, typo?'
|
||||
print(message)
|
||||
stash_data_issue(
|
||||
parser="survex", message=message, url=get_offending_filename(survexblock.survexfile.path)
|
||||
parser="survex", message=message, url=None, sb=(survexblock.survexfile.path)
|
||||
)
|
||||
|
||||
included = self.rx_comminc.match(comment)
|
||||
@ -1306,7 +1411,7 @@ class LoadingSurvex:
|
||||
self.unitsstack.append((self.units, self.unitsfactor))
|
||||
self.legsnumberstack.append(self.legsnumber)
|
||||
self.slengthstack.append(self.slength)
|
||||
self.personexpedstack.append(self.currentpersonexped)
|
||||
self.personexpedstack.append(self.currentpersonexped) # just one person?!
|
||||
pushblock()
|
||||
# PUSH state ++++++++++++++
|
||||
self.legsnumber = 0
|
||||
@ -1351,7 +1456,7 @@ class LoadingSurvex:
|
||||
raise
|
||||
# POP state ++++++++++++++
|
||||
popblock()
|
||||
self.currentpersonexped = self.personexpedstack.pop()
|
||||
self.currentpersonexped = self.personexpedstack.pop() # just one person?!
|
||||
self.legsnumber = self.legsnumberstack.pop()
|
||||
self.units, self.unitsfactor = self.unitsstack.pop()
|
||||
self.slength = self.slengthstack.pop()
|
||||
@ -1364,7 +1469,7 @@ class LoadingSurvex:
|
||||
|
||||
# -----------------------------
|
||||
elif self.rx_title.match(cmd):
|
||||
quotedtitle = re.match('(?i)^"(.*)"$', args)
|
||||
quotedtitle = self.rx_quotedtitle.match(args)
|
||||
if quotedtitle:
|
||||
survexblock.title = quotedtitle.groups()[0]
|
||||
else:
|
||||
@ -1463,7 +1568,7 @@ class LoadingSurvex:
|
||||
)
|
||||
print(message)
|
||||
print(message, file=sys.stderr)
|
||||
stash_data_issue(parser="survex", message=message, url=get_offending_filename(path))
|
||||
stash_data_issue(parser="survex", message=message, url=None, sb=(path))
|
||||
return # skip this survex file and all things *included in it
|
||||
|
||||
includestmt = self.rx_include.match(svxline)
|
||||
@ -1475,7 +1580,9 @@ class LoadingSurvex:
|
||||
if star: # yes we are reading a *cmd
|
||||
cmd, args = star.groups()
|
||||
cmd = cmd.lower()
|
||||
if re.match("(?i)include$", cmd):
|
||||
if self.rx_include2.match(cmd):
|
||||
# rx_include2 = re.compile("(?i)include$")
|
||||
# if re.match("(?i)include$", cmd):
|
||||
includepath = os.path.normpath(os.path.join(os.path.split(path)[0], re.sub(r"\.svx$", "", args)))
|
||||
|
||||
fullpath = os.path.join(settings.SURVEX_DATA, includepath + ".svx")
|
||||
@ -1499,7 +1606,7 @@ class LoadingSurvex:
|
||||
print(message)
|
||||
print(message, file=flinear)
|
||||
print(message, file=sys.stderr)
|
||||
stash_data_issue(parser="survex", message=message, url=get_offending_filename(path))
|
||||
stash_data_issue(parser="survex", message=message, url=None, sb=(path))
|
||||
flinear.write(f"{self.depthinclude:2} {indent} *edulcni {pop}\n")
|
||||
fcollate.write(f";|*edulcni {pop}\n")
|
||||
# fininclude.close()
|
||||
@ -1509,7 +1616,7 @@ class LoadingSurvex:
|
||||
message = f" ! ERROR *include file '{includepath}' not found, listed in '{fin.name}'"
|
||||
print(message)
|
||||
print(message, file=sys.stderr)
|
||||
stash_data_issue(parser="survex", message=message, url=get_offending_filename(path))
|
||||
stash_data_issue(parser="survex", message=message, url=None, sb=(path))
|
||||
elif re.match("(?i)begin$", cmd):
|
||||
self.depthbegin += 1
|
||||
depth = " " * self.depthbegin
|
||||
@ -1533,7 +1640,7 @@ class LoadingSurvex:
|
||||
print(message)
|
||||
print(message, file=flinear)
|
||||
print(message, file=sys.stderr)
|
||||
stash_data_issue(parser="survex", message=message, url=get_offending_filename(path))
|
||||
stash_data_issue(parser="survex", message=message, url=None, sb=(path))
|
||||
|
||||
self.depthbegin -= 1
|
||||
pass
|
||||
@ -1561,13 +1668,13 @@ class LoadingSurvex:
|
||||
print(message)
|
||||
print(message, file=flinear)
|
||||
# print(message,file=sys.stderr)
|
||||
stash_data_issue(parser="survex", message=message, url=get_offending_filename(path))
|
||||
stash_data_issue(parser="survex", message=message, url=None, sb=(path))
|
||||
if self.svxfileslist.count(path) > 2:
|
||||
message = f" ! ERROR. Should have been caught before this. Survex file already *included 2x. Probably an infinite loop so fix your *include statements that include this. Aborting. {path}"
|
||||
print(message)
|
||||
print(message, file=flinear)
|
||||
# print(message,file=sys.stderr)
|
||||
stash_data_issue(parser="survex", message=message, url=get_offending_filename(path))
|
||||
stash_data_issue(parser="survex", message=message, url=None, sb=(path))
|
||||
return
|
||||
return
|
||||
try:
|
||||
@ -1583,13 +1690,13 @@ class LoadingSurvex:
|
||||
message = f" ! ERROR *include file '{path}' in '{survexblock}' has UnicodeDecodeError. Omitted."
|
||||
print(message)
|
||||
print(message, file=sys.stderr)
|
||||
stash_data_issue(parser="survex", message=message, url=get_offending_filename(path))
|
||||
stash_data_issue(parser="survex", message=message, url=None, sb=(path))
|
||||
return # skip this survex file and all things *included in it
|
||||
except:
|
||||
message = f" ! ERROR *include file '{path}' in '{survexblock}' has unexpected error. Omitted."
|
||||
print(message)
|
||||
print(message, file=sys.stderr)
|
||||
stash_data_issue(parser="survex", message=message, url=get_offending_filename(path))
|
||||
stash_data_issue(parser="survex", message=message, url=None, sb=(path))
|
||||
return # skip this survex file and all things *included in it
|
||||
|
||||
def checkUniqueness(self, fullpath):
|
||||
@ -1960,6 +2067,7 @@ def LoadSurvexBlocks():
|
||||
DataIssue.objects.filter(parser="svxdate").delete()
|
||||
DataIssue.objects.filter(parser="survexleg").delete()
|
||||
DataIssue.objects.filter(parser="survexunits").delete()
|
||||
DataIssue.objects.filter(parser="survex team").delete()
|
||||
DataIssue.objects.filter(parser="entrances").delete()
|
||||
DataIssue.objects.filter(parser="xEntrances").delete()
|
||||
print(" - survex Data Issues flushed")
|
||||
@ -1993,12 +2101,19 @@ def LoadSurvexBlocks():
|
||||
print(f" - MEMORY start:{memstart:.3f} MB end:{memend:.3f} MB increase={memend - memstart:.3f} MB")
|
||||
|
||||
survexblockroot.save()
|
||||
|
||||
global person_pending_cache
|
||||
for sb in person_pending_cache:
|
||||
if len(person_pending_cache[sb]) > 0:
|
||||
print(f" ")
|
||||
message = f" ! PENDING team list not emptied {sb.survexfile.path} {len(person_pending_cache[sb])} people: {person_pending_cache[sb]}"
|
||||
stash_data_issue(parser="survex", message=message, url=None, sb=(sb.survexfile.path))
|
||||
print(message)
|
||||
# duration = time.time() - start
|
||||
# print(f" - TIME: {duration:7.2f} s", file=sys.stderr)
|
||||
store_data_issues()
|
||||
# duration = time.time() - start
|
||||
# print(f" - TIME: {duration:7.2f} s", file=sys.stderr)
|
||||
|
||||
print(" - Loaded All Survex Blocks.")
|
||||
|
||||
|
||||
|
Loading…
Reference in New Issue
Block a user