mirror of
https://expo.survex.com/repositories/troggle/.git
synced 2024-11-22 07:11:52 +00:00
Collect dataissues and write to db all at once
This commit is contained in:
parent
2704fc42d4
commit
e6fd1f0ec5
@ -7,7 +7,6 @@ from django.conf import settings
|
||||
|
||||
from troggle.core.models.caves import QM, Cave
|
||||
from troggle.core.models.troggle import DataIssue
|
||||
from troggle.core.utils import save_carefully
|
||||
|
||||
"""Reads the CSV files containg QMs for a select few caves
|
||||
See parsers/survex.py for the parser which extracts QMs from the survex files
|
||||
|
@ -23,7 +23,7 @@ It also scans the Loser repo for all the svx files, which it loads individually
|
||||
todo = """
|
||||
|
||||
-#BUG, if *date comes after *team, the person's date is not set at all.
|
||||
It needs re-setting at the end of the block.
|
||||
It needs re-setting at the end of the block. 'Fixed', but fix not working.
|
||||
|
||||
- LoadSurvexFile() Creates a new current survexfile and valid .survexdirectory
|
||||
The survexblock passed-in is not necessarily the parent. FIX THIS.
|
||||
@ -44,11 +44,24 @@ METRESINFEET = 3.28084
|
||||
stop_dup_warning = False
|
||||
debugprint = False # Turns on debug printout for just one *include file
|
||||
debugprinttrigger = "!"
|
||||
# debugprinttrigger = "caves-1623/40/old/EisSVH"
|
||||
|
||||
dataissues = []
|
||||
|
||||
def stash_data_issue(parser=None, message=None, url=None):
|
||||
global dataissues
|
||||
dataissues.append((parser, message, url))
|
||||
|
||||
def store_data_issues():
|
||||
global dataissues
|
||||
print(f" - Storing {len(dataissues)} Data Issues into database")
|
||||
|
||||
for i in dataissues:
|
||||
parser, message, url = i
|
||||
DataIssue.objects.create(parser=parser, message=message, url=url)
|
||||
dataissues = []
|
||||
|
||||
class MapLocations(object):
|
||||
"""Class used only for identifying teh entrance locations"""
|
||||
"""Class used only for identifying the entrance locations"""
|
||||
|
||||
p = [
|
||||
("laser.0_7", "BNase", "Reference", "Bräuning Nase laser point"),
|
||||
@ -73,14 +86,14 @@ class MapLocations(object):
|
||||
k = ent.caveandentrance_set.all()[0].cave
|
||||
except:
|
||||
message = f" ! Failed to get Cave linked to Entrance:{ent.name} from:{ent.filename} best:{ent.best_station()} {ent.caveandentrance_set.all()}"
|
||||
DataIssue.objects.create(parser="entrances", message=message)
|
||||
stash_data_issue(parser="entrances", message=message)
|
||||
print(message)
|
||||
continue # skip this entrance
|
||||
try:
|
||||
areaName = k.getArea().short_name
|
||||
except:
|
||||
message = f" ! Failed to get Area on cave '{k}' linked to Entrance:{ent.name} from:{ent.filename} best:{ent.best_station()}"
|
||||
DataIssue.objects.create(parser="entrances", message=message)
|
||||
stash_data_issue(parser="entrances", message=message)
|
||||
print(message)
|
||||
raise
|
||||
self.p.append((ent.best_station(), f"{areaName}-{str(ent)[5:]}", ent.needs_surface_work(), str(ent)))
|
||||
@ -232,7 +245,7 @@ class LoadingSurvex:
|
||||
f"! Warning. Unparsed [*{cmd}]: '{line}' {survexblock.survexfile.path} - not an error (probably)"
|
||||
)
|
||||
print(self.insp + message)
|
||||
DataIssue.objects.create(
|
||||
stash_data_issue(
|
||||
parser="survex", message=message, url=get_offending_filename(survexblock.survexfile.path)
|
||||
)
|
||||
else:
|
||||
@ -240,7 +253,7 @@ class LoadingSurvex:
|
||||
f"! Bad unrecognised svx command: [*{cmd}] {line} ({survexblock}) {survexblock.survexfile.path}"
|
||||
)
|
||||
print(self.insp + message)
|
||||
DataIssue.objects.create(
|
||||
stash_data_issue(
|
||||
parser="survex", message=message, url=get_offending_filename(survexblock.survexfile.path)
|
||||
)
|
||||
|
||||
@ -272,7 +285,7 @@ class LoadingSurvex:
|
||||
# should not happen
|
||||
message = f"! *team {expo.year} expo ok, expedition day not in *team {survexblock.survexfile.path} ({survexblock}) "
|
||||
print(self.insp + message)
|
||||
DataIssue.objects.create(
|
||||
stash_data_issue(
|
||||
parser="survex", message=message, url=get_offending_filename(survexblock.survexfile.path)
|
||||
)
|
||||
|
||||
@ -288,12 +301,12 @@ class LoadingSurvex:
|
||||
elif known_foreigner(tm): # note, not using .lower()
|
||||
message = f"- *team {expo.year} '{tm}' known foreigner on *team {survexblock.survexfile.path} ({survexblock}) in '{line}'"
|
||||
print(self.insp + message)
|
||||
# DataIssue.objects.create(parser='survex', message=message, url=get_offending_filename(survexblock.survexfile.path))
|
||||
# stash_data_issue(parser='survex', message=message, url=get_offending_filename(survexblock.survexfile.path))
|
||||
else:
|
||||
# we know the date and expo, but can't find the person
|
||||
message = f"! *team {expo.year} '{tm}' FAIL personexpedition lookup on *team {survexblock.survexfile.path} ({survexblock}) in '{line}'"
|
||||
print(self.insp + message)
|
||||
DataIssue.objects.create(
|
||||
stash_data_issue(
|
||||
parser="survex", message=message, url=get_offending_filename(survexblock.survexfile.path)
|
||||
)
|
||||
else:
|
||||
@ -318,7 +331,7 @@ class LoadingSurvex:
|
||||
else:
|
||||
message = f"! *team {survexblock.survexfile.path} ({survexblock}) Weird '{mteammember.group(1)}' oldstyle line: '{line}'"
|
||||
print(self.insp + message)
|
||||
DataIssue.objects.create(
|
||||
stash_data_issue(
|
||||
parser="survex", message=message, url=get_offending_filename(survexblock.survexfile.path)
|
||||
)
|
||||
else:
|
||||
@ -326,7 +339,7 @@ class LoadingSurvex:
|
||||
if not nullmember:
|
||||
message = f"! *team {survexblock.survexfile.path} ({survexblock}) Bad line: '{line}'"
|
||||
print(self.insp + message)
|
||||
DataIssue.objects.create(
|
||||
stash_data_issue(
|
||||
parser="survex", message=message, url=get_offending_filename(survexblock.survexfile.path)
|
||||
)
|
||||
else:
|
||||
@ -337,7 +350,7 @@ class LoadingSurvex:
|
||||
if mteammember.group(2).lower() not in ("none", "both"):
|
||||
message = f"! Weird *team '{mteammember.group(2)}' newstyle line: '{line}' ({survexblock}) {survexblock.survexfile.path}"
|
||||
print(self.insp + message)
|
||||
DataIssue.objects.create(
|
||||
stash_data_issue(
|
||||
parser="survex", message=message, url=get_offending_filename(survexblock.survexfile.path)
|
||||
)
|
||||
|
||||
@ -353,7 +366,7 @@ class LoadingSurvex:
|
||||
else:
|
||||
message = f"! Bad *ALIAS: '{line}' ({survexblock}) {survexblock.survexfile.path}"
|
||||
print(self.insp + message)
|
||||
DataIssue.objects.create(parser="survex", message=message)
|
||||
stash_data_issue(parser="survex", message=message)
|
||||
|
||||
def LoadSurvexUnits(self, survexblock, line):
|
||||
# all for 4 survex files with measurements in feet. bugger.
|
||||
@ -370,7 +383,7 @@ class LoadingSurvex:
|
||||
f"! *UNITS NUMERICAL conversion [{factor}x] '{line}' ({survexblock}) {survexblock.survexfile.path}"
|
||||
)
|
||||
print(self.insp + message)
|
||||
DataIssue.objects.create(parser="survexunits", message=message)
|
||||
stash_data_issue(parser="survexunits", message=message)
|
||||
|
||||
feet = re.match("(?i).*feet$", line)
|
||||
metres = re.match("(?i).*(METRIC|METRES|METERS)$", line)
|
||||
@ -381,7 +394,7 @@ class LoadingSurvex:
|
||||
else:
|
||||
message = f"! *UNITS in YARDS!? - not converted '{line}' ({survexblock}) {survexblock.survexfile.path}"
|
||||
print(self.insp + message)
|
||||
DataIssue.objects.create(parser="survexunits", message=message)
|
||||
stash_data_issue(parser="survexunits", message=message)
|
||||
|
||||
def get_expo_from_year(self, year):
|
||||
# cacheing to save DB query on every block
|
||||
@ -394,7 +407,7 @@ class LoadingSurvex:
|
||||
f"! More than one expedition in year {year} '{line}' ({survexblock}) {survexblock.survexfile.path}"
|
||||
)
|
||||
print(self.insp + message)
|
||||
DataIssue.objects.create(
|
||||
stash_data_issue(
|
||||
parser="survex", message=message, url=get_offending_filename(survexblock.survexfile.path)
|
||||
)
|
||||
|
||||
@ -424,7 +437,7 @@ class LoadingSurvex:
|
||||
if pe:
|
||||
# message = "! {} ({}) Fixing undated personexpedition '{}'".format(survexblock.survexfile.path, survexblock, p.personname)
|
||||
# print(self.insp+message)
|
||||
# DataIssue.objects.create(parser='survex', message=message)
|
||||
# stash_data_issue(parser='survex', message=message)
|
||||
pr.personexpedition = pe
|
||||
pr.person = pr.personexpedition.person
|
||||
pr.save()
|
||||
@ -432,11 +445,11 @@ class LoadingSurvex:
|
||||
elif known_foreigner(pr.personname): # note, not using .lower()
|
||||
message = f"- *team {expo.year} '{pr.personname}' known foreigner on *date {survexblock.survexfile.path} ({survexblock}) in '{line}'"
|
||||
print(self.insp + message)
|
||||
# DataIssue.objects.create(parser='survex', message=message, url=get_offending_filename(survexblock.survexfile.path))
|
||||
# stash_data_issue(parser='survex', message=message, url=get_offending_filename(survexblock.survexfile.path))
|
||||
else:
|
||||
message = f"! *team {year} '{pr.personname}' FAIL personexpedition lookup on *date {survexblock.survexfile.path} ({survexblock}) '{pr.personname}'"
|
||||
print(self.insp + message)
|
||||
DataIssue.objects.create(
|
||||
stash_data_issue(
|
||||
parser="survex",
|
||||
message=message,
|
||||
url=get_offending_filename(survexblock.survexfile.path),
|
||||
@ -446,7 +459,7 @@ class LoadingSurvex:
|
||||
if len(line) > 10:
|
||||
# message = "! DATE Warning LONG DATE '{}' ({}) {}".format(oline, survexblock, survexblock.survexfile.path)
|
||||
# print(self.insp+message)
|
||||
# DataIssue.objects.create(parser='survex', message=message, url=get_offending_filename(survexblock.survexfile.path))
|
||||
# stash_data_issue(parser='survex', message=message, url=get_offending_filename(survexblock.survexfile.path))
|
||||
if line[10] == "-": # ie a range, just look at first date
|
||||
line = line[0:10]
|
||||
if len(line) == 10:
|
||||
@ -460,7 +473,7 @@ class LoadingSurvex:
|
||||
perps = get_people_on_trip(survexblock) # What, you don't know Judge Dredd slang ?
|
||||
message = f"! DATE Warning only accurate to the month, setting to 1st '{oline}' ({survexblock}) {survexblock.survexfile.path} {perps}"
|
||||
print(self.insp + message)
|
||||
DataIssue.objects.create(
|
||||
stash_data_issue(
|
||||
parser="svxdate", message=message, url=get_offending_filename(survexblock.survexfile.path)
|
||||
)
|
||||
survexblock.date = datetime.strptime(line.replace(".", "-"), "%Y-%m") # sets to first of month
|
||||
@ -470,7 +483,7 @@ class LoadingSurvex:
|
||||
perps = get_people_on_trip(survexblock)
|
||||
message = f"! DATE WARNING only accurate to the YEAR, setting to 1st January '{oline}' ({survexblock}) {survexblock.survexfile.path} {perps}"
|
||||
print(self.insp + message)
|
||||
DataIssue.objects.create(
|
||||
stash_data_issue(
|
||||
parser="svxdate", message=message, url=get_offending_filename(survexblock.survexfile.path)
|
||||
)
|
||||
survexblock.date = datetime.strptime(line, "%Y") # sets to January 1st
|
||||
@ -481,7 +494,7 @@ class LoadingSurvex:
|
||||
f"! DATE Error unrecognised '{oline}-{survexblock}' ({type(survexblock)}) {survexblock.survexfile.path}"
|
||||
)
|
||||
print(self.insp + message)
|
||||
DataIssue.objects.create(
|
||||
stash_data_issue(
|
||||
parser="survex", message=message, url=get_offending_filename(survexblock.survexfile.path)
|
||||
)
|
||||
print(f" {type(survexblock)=}") # survexblock.parent fails as a SurvexFile has no .parent ...ugh.
|
||||
@ -535,7 +548,7 @@ class LoadingSurvex:
|
||||
print(f" Line (split): {ls}, comment: {comment}")
|
||||
print(f" Line: {sline}\nsvxline: {svxline}")
|
||||
message = f" ! Not 5 fields in line '{sline.lower()}' {self.datastar=} {ls=} in\n{survexblock}\n{survexblock.survexfile}\n{survexblock.survexfile.path}"
|
||||
DataIssue.objects.create(
|
||||
stash_data_issue(
|
||||
parser="survexleg", message=message, url=get_offending_filename(survexblock.survexfile.path)
|
||||
)
|
||||
|
||||
@ -564,7 +577,7 @@ class LoadingSurvex:
|
||||
except:
|
||||
message = f" ! datastar parsing from/to incorrect in line {ls} in {survexblock.survexfile.path}"
|
||||
print(self.insp + message)
|
||||
DataIssue.objects.create(
|
||||
stash_data_issue(
|
||||
parser="survexleg", message=message, url=get_offending_filename(survexblock.survexfile.path)
|
||||
)
|
||||
return
|
||||
@ -574,7 +587,7 @@ class LoadingSurvex:
|
||||
except:
|
||||
message = f" ! datastar parsing incorrect in line {ls} in {survexblock.survexfile.path}"
|
||||
print(self.insp + message)
|
||||
DataIssue.objects.create(
|
||||
stash_data_issue(
|
||||
parser="survexleg", message=message, url=get_offending_filename(survexblock.survexfile.path)
|
||||
)
|
||||
survexleg.tape = invalid_tape
|
||||
@ -589,7 +602,7 @@ class LoadingSurvex:
|
||||
if debugprint:
|
||||
message = f" ! Units: Length scaled {tape}m '{ls}' in ({survexblock.survexfile.path}) units:{self.units} factor:{self.unitsfactor}x"
|
||||
print(self.insp + message)
|
||||
DataIssue.objects.create(
|
||||
stash_data_issue(
|
||||
parser="survexleg", message=message, url=get_offending_filename(survexblock.survexfile.path)
|
||||
)
|
||||
if self.units == "feet":
|
||||
@ -597,7 +610,7 @@ class LoadingSurvex:
|
||||
if debugprint:
|
||||
message = f" ! Units: converted to {tape:.3f}m from {self.units} '{ls}' in ({survexblock.survexfile.path})"
|
||||
print(self.insp + message)
|
||||
DataIssue.objects.create(
|
||||
stash_data_issue(
|
||||
parser="survexleg", message=message, url=get_offending_filename(survexblock.survexfile.path)
|
||||
)
|
||||
survexleg.tape = float(tape)
|
||||
@ -605,7 +618,7 @@ class LoadingSurvex:
|
||||
except ValueError:
|
||||
message = f" ! Value Error: Tape misread in line'{ls}' in {survexblock.survexfile.path} units:{self.units}"
|
||||
print(self.insp + message)
|
||||
DataIssue.objects.create(
|
||||
stash_data_issue(
|
||||
parser="survexleg", message=message, url=get_offending_filename(survexblock.survexfile.path)
|
||||
)
|
||||
survexleg.tape = invalid_tape
|
||||
@ -617,7 +630,7 @@ class LoadingSurvex:
|
||||
f" ! Value Error: Tape length not added '{ls}' in {survexblock.survexfile.path} units:{self.units}"
|
||||
)
|
||||
print(self.insp + message)
|
||||
DataIssue.objects.create(
|
||||
stash_data_issue(
|
||||
parser="survexleg", message=message, url=get_offending_filename(survexblock.survexfile.path)
|
||||
)
|
||||
|
||||
@ -626,7 +639,7 @@ class LoadingSurvex:
|
||||
except:
|
||||
message = f" ! Value Error: Compass not found in line {ls} in {survexblock.survexfile.path}"
|
||||
print(self.insp + message)
|
||||
DataIssue.objects.create(
|
||||
stash_data_issue(
|
||||
parser="survexleg", message=message, url=get_offending_filename(survexblock.survexfile.path)
|
||||
)
|
||||
lcompass = invalid_compass
|
||||
@ -638,7 +651,7 @@ class LoadingSurvex:
|
||||
print((" datastar:", datastar))
|
||||
print((" Line:", ls))
|
||||
message = f" ! Value Error: Clino misread in line '{sline.lower()}' {datastar=} {self.datastar=} {ls=} in\n{survexblock}\n{survexblock.survexfile}\n{survexblock.survexfile.path}"
|
||||
DataIssue.objects.create(
|
||||
stash_data_issue(
|
||||
parser="survexleg", message=message, url=get_offending_filename(survexblock.survexfile.path)
|
||||
)
|
||||
lclino = invalid_clino
|
||||
@ -659,7 +672,7 @@ class LoadingSurvex:
|
||||
print((" datastar:", datastar))
|
||||
print((" Line:", ls))
|
||||
message = " ! Value Error: lcompass:'{}' line {} in '{}'".format(lcompass, ls, survexblock.survexfile.path)
|
||||
DataIssue.objects.create(
|
||||
stash_data_issue(
|
||||
parser="survexleg", message=message, url=get_offending_filename(survexblock.survexfile.path)
|
||||
)
|
||||
survexleg.compass = invalid_compass
|
||||
@ -681,7 +694,7 @@ class LoadingSurvex:
|
||||
if len(args) < 4:
|
||||
message = f" ! Empty or BAD *REF statement '{args}' in '{survexblock.survexfile.path}'"
|
||||
print(self.insp + message)
|
||||
DataIssue.objects.create(parser="survex", message=message, url=url)
|
||||
stash_data_issue(parser="survex", message=message, url=url)
|
||||
return
|
||||
|
||||
argsgps = self.rx_argsref.match(args)
|
||||
@ -691,7 +704,7 @@ class LoadingSurvex:
|
||||
perps = get_people_on_trip(survexblock)
|
||||
message = f" ! Wallet *REF bad in '{survexblock.survexfile.path}' malformed id '{args}' {perps}"
|
||||
print(self.insp + message)
|
||||
DataIssue.objects.create(parser="survex", message=message, url=url)
|
||||
stash_data_issue(parser="survex", message=message, url=url)
|
||||
return
|
||||
|
||||
if not letterx:
|
||||
@ -703,18 +716,18 @@ class LoadingSurvex:
|
||||
if not (int(yr) > 1960 and int(yr) < 2050):
|
||||
message = " ! Wallet year out of bounds {yr} '{refscan}' {survexblock.survexfile.path}"
|
||||
print(self.insp + message)
|
||||
DataIssue.objects.create(parser="survex", message=message, url=url)
|
||||
stash_data_issue(parser="survex", message=message, url=url)
|
||||
|
||||
refscan = f"{yr}#{letterx}{wallet}"
|
||||
try:
|
||||
if int(wallet) > 99:
|
||||
message = f" ! Wallet *REF {refscan} - very big (more than 99) so probably wrong in '{survexblock.survexfile.path}'"
|
||||
print(self.insp + message)
|
||||
DataIssue.objects.create(parser="survex", message=message, url=url)
|
||||
stash_data_issue(parser="survex", message=message, url=url)
|
||||
except:
|
||||
message = f" ! Wallet *REF {refscan} - not numeric in '{survexblock.survexfile.path}'"
|
||||
print(self.insp + message)
|
||||
DataIssue.objects.create(parser="survex", message=message, url=url)
|
||||
stash_data_issue(parser="survex", message=message, url=url)
|
||||
|
||||
manywallets = Wallet.objects.filter(
|
||||
walletname=refscan
|
||||
@ -723,13 +736,13 @@ class LoadingSurvex:
|
||||
if len(manywallets) > 1:
|
||||
message = f" ! Wallet *REF {refscan} - more than one found {len(manywallets)} wallets in db with same id {survexblock.survexfile.path}"
|
||||
print(self.insp + message)
|
||||
DataIssue.objects.create(parser="survex", message=message, url=url)
|
||||
stash_data_issue(parser="survex", message=message, url=url)
|
||||
|
||||
if survexblock.scanswallet:
|
||||
if survexblock.scanswallet.walletname != refscan:
|
||||
message = f" ! Wallet *REF {refscan} in {survexblock.survexfile.path} - Already a DIFFERENT wallet is set for this block '{survexblock.scanswallet.walletname}'"
|
||||
print(self.insp + message)
|
||||
DataIssue.objects.create(parser="survex", message=message, url=url)
|
||||
stash_data_issue(parser="survex", message=message, url=url)
|
||||
else:
|
||||
survexblock.scanswallet = manywallets[0] # this is a ForeignKey field
|
||||
survexblock.save()
|
||||
@ -739,7 +752,7 @@ class LoadingSurvex:
|
||||
perps = get_people_on_trip(survexblock)
|
||||
message = f" ! Wallet *REF bad in '{survexblock.survexfile.path}' '{refscan}' NOT in database i.e. wallet does not exist {perps}."
|
||||
print(self.insp + message)
|
||||
DataIssue.objects.create(parser="survex", message=message, url=url)
|
||||
stash_data_issue(parser="survex", message=message, url=url)
|
||||
|
||||
def TickSurvexQM(self, survexblock, qmtick):
|
||||
"""Interpret the specially formatted comment which is a QM TICKED statement"""
|
||||
@ -751,13 +764,13 @@ class LoadingSurvex:
|
||||
# raise
|
||||
message = f' ! QM TICK find FAIL QM{qmtick.group(1)} date:"{qmtick.group(2)}" qmlist:"{qm}" in "{survexblock.survexfile.path}" + comment:"{qmtick.group(3)}" '
|
||||
print(message)
|
||||
DataIssue.objects.create(
|
||||
stash_data_issue(
|
||||
parser="survex", message=message, url=get_offending_filename(survexblock.survexfile.path)
|
||||
)
|
||||
if len(qm) > 1:
|
||||
message = f' ! QM TICK MULTIPLE found FAIL QM{qmtick.group(1)} date:"{qmtick.group(2)}" in "{survexblock.survexfile.path}" + comment:"{qmtick.group(3)}" '
|
||||
print(message)
|
||||
DataIssue.objects.create(
|
||||
stash_data_issue(
|
||||
parser="survex", message=message, url=get_offending_filename(survexblock.survexfile.path)
|
||||
)
|
||||
qm[0].ticked = True
|
||||
@ -817,7 +830,7 @@ class LoadingSurvex:
|
||||
except:
|
||||
message = f" ! QM{qm_no} FAIL to create {qm_nearest} in'{survexblock.survexfile.path}'"
|
||||
print(insp + message)
|
||||
DataIssue.objects.create(
|
||||
stash_data_issue(
|
||||
parser="survex", message=message, url=get_offending_filename(survexblock.survexfile.path)
|
||||
)
|
||||
|
||||
@ -850,7 +863,7 @@ class LoadingSurvex:
|
||||
)
|
||||
print(message)
|
||||
print(message, file=sys.stderr)
|
||||
DataIssue.objects.create(
|
||||
stash_data_issue(
|
||||
parser="survex", message=message, url=get_offending_filename(survexblock.survexfile.path)
|
||||
)
|
||||
return
|
||||
@ -862,7 +875,7 @@ class LoadingSurvex:
|
||||
message = f" ! - ABORT *data statement has NEWLINE in it in {survexblock.survexfile.path}. Not parsed by troggle. '{args}'"
|
||||
print(message)
|
||||
print(message, file=sys.stderr)
|
||||
DataIssue.objects.create(
|
||||
stash_data_issue(
|
||||
parser="survex", message=message, url=get_offending_filename(survexblock.survexfile.path)
|
||||
)
|
||||
return False
|
||||
@ -879,19 +892,19 @@ class LoadingSurvex:
|
||||
# message = " ! - *data {} blocks ignored. {}|{}" '{}' .format(ls[0].upper(), survexblock.name, survexblock.survexpath, args)
|
||||
# print(message)
|
||||
# print(message,file=sys.stderr)
|
||||
# DataIssue.objects.create(parser='survex', message=message)
|
||||
# stash_data_issue(parser='survex', message=message)
|
||||
self.datastar["type"] = ls[0]
|
||||
elif ls[0] == "cartesian": # We should not ignore this ?! Default for Germans ?
|
||||
# message = " ! - *data {} blocks ignored. {}|{}" '{}' .format(ls[0].upper(), survexblock.name, survexblock.survexpath, args)
|
||||
# print(message)
|
||||
# print(message,file=sys.stderr)
|
||||
# DataIssue.objects.create(parser='survex', message=message)
|
||||
# stash_data_issue(parser='survex', message=message)
|
||||
self.datastar["type"] = ls[0]
|
||||
else:
|
||||
message = f" ! - Unrecognised *data statement '{args}' {survexblock.name}|{survexblock.survexpath}"
|
||||
print(message)
|
||||
print(message, file=sys.stderr)
|
||||
DataIssue.objects.create(
|
||||
stash_data_issue(
|
||||
parser="survex", message=message, url=get_offending_filename(survexblock.survexfile.path)
|
||||
)
|
||||
self.datastar["type"] = ls[0]
|
||||
@ -1008,7 +1021,7 @@ class LoadingSurvex:
|
||||
print("\n" + message)
|
||||
print("\n" + message, file=sys.stderr)
|
||||
print(f"{self.pending}", end="", file=sys.stderr)
|
||||
DataIssue.objects.create(parser="survex", message=message, url=get_offending_filename(includelabel))
|
||||
stash_data_issue(parser="survex", message=message, url=get_offending_filename(includelabel))
|
||||
# print(f' # datastack in LoadSurvexFile:{includelabel}', file=sys.stderr)
|
||||
# for dict in self.datastack:
|
||||
# print(f' type: <{dict["type"].upper()} >', file=sys.stderr)
|
||||
@ -1043,7 +1056,7 @@ class LoadingSurvex:
|
||||
message = f" ! 'None' SurvexDirectory returned from GetSurvexDirectory({headpath})"
|
||||
print(message)
|
||||
print(message, file=sys.stderr)
|
||||
DataIssue.objects.create(parser="survex", message=message, url=f"/survexfile/{svxid}")
|
||||
stash_data_issue(parser="survex", message=message, url=f"/survexfile/{svxid}")
|
||||
|
||||
if cave:
|
||||
newdirectory.cave = cave
|
||||
@ -1056,7 +1069,7 @@ class LoadingSurvex:
|
||||
message = f" ! SurvexDirectory NOT SET in new SurvexFile {svxid} "
|
||||
print(message)
|
||||
print(message, file=sys.stderr)
|
||||
DataIssue.objects.create(parser="survex", message=message)
|
||||
stash_data_issue(parser="survex", message=message)
|
||||
self.currentsurvexfile.save() # django insists on this although it is already saved !?
|
||||
try:
|
||||
newdirectory.save()
|
||||
@ -1126,7 +1139,7 @@ class LoadingSurvex:
|
||||
else:
|
||||
message = f' ! QM Unrecognised as valid in "{survexblock.survexfile.path}" QM{qml.group(1)} "{qml.group(2)}" : regex failure, typo?'
|
||||
print(message)
|
||||
DataIssue.objects.create(
|
||||
stash_data_issue(
|
||||
parser="survex", message=message, url=get_offending_filename(survexblock.survexfile.path)
|
||||
)
|
||||
|
||||
@ -1387,7 +1400,7 @@ class LoadingSurvex:
|
||||
message = f" ! -ERROR *include command not expected here {path}. Re-run a full Survex import."
|
||||
print(message)
|
||||
print(message, file=sys.stderr)
|
||||
DataIssue.objects.create(
|
||||
stash_data_issue(
|
||||
parser="survex",
|
||||
message=message,
|
||||
)
|
||||
@ -1417,7 +1430,7 @@ class LoadingSurvex:
|
||||
)
|
||||
print(message)
|
||||
print(message, file=sys.stderr)
|
||||
DataIssue.objects.create(parser="survex", message=message)
|
||||
stash_data_issue(parser="survex", message=message)
|
||||
continue # skip this line
|
||||
|
||||
# detect a star command
|
||||
@ -1450,7 +1463,7 @@ class LoadingSurvex:
|
||||
)
|
||||
print(message)
|
||||
print(message, file=sys.stderr)
|
||||
DataIssue.objects.create(parser="survex", message=message, url=get_offending_filename(path))
|
||||
stash_data_issue(parser="survex", message=message, url=get_offending_filename(path))
|
||||
return # skip this survex file and all things *included in it
|
||||
|
||||
includestmt = self.rx_include.match(svxline)
|
||||
@ -1486,7 +1499,7 @@ class LoadingSurvex:
|
||||
print(message)
|
||||
print(message, file=flinear)
|
||||
print(message, file=sys.stderr)
|
||||
DataIssue.objects.create(parser="survex", message=message, url=get_offending_filename(path))
|
||||
stash_data_issue(parser="survex", message=message, url=get_offending_filename(path))
|
||||
flinear.write(f"{self.depthinclude:2} {indent} *edulcni {pop}\n")
|
||||
fcollate.write(f";|*edulcni {pop}\n")
|
||||
# fininclude.close()
|
||||
@ -1496,7 +1509,7 @@ class LoadingSurvex:
|
||||
message = f" ! ERROR *include file '{includepath}' not found, listed in '{fin.name}'"
|
||||
print(message)
|
||||
print(message, file=sys.stderr)
|
||||
DataIssue.objects.create(parser="survex", message=message, url=get_offending_filename(path))
|
||||
stash_data_issue(parser="survex", message=message, url=get_offending_filename(path))
|
||||
elif re.match("(?i)begin$", cmd):
|
||||
self.depthbegin += 1
|
||||
depth = " " * self.depthbegin
|
||||
@ -1520,7 +1533,7 @@ class LoadingSurvex:
|
||||
print(message)
|
||||
print(message, file=flinear)
|
||||
print(message, file=sys.stderr)
|
||||
DataIssue.objects.create(parser="survex", message=message, url=get_offending_filename(path))
|
||||
stash_data_issue(parser="survex", message=message, url=get_offending_filename(path))
|
||||
|
||||
self.depthbegin -= 1
|
||||
pass
|
||||
@ -1548,13 +1561,13 @@ class LoadingSurvex:
|
||||
print(message)
|
||||
print(message, file=flinear)
|
||||
# print(message,file=sys.stderr)
|
||||
DataIssue.objects.create(parser="survex", message=message, url=get_offending_filename(path))
|
||||
stash_data_issue(parser="survex", message=message, url=get_offending_filename(path))
|
||||
if self.svxfileslist.count(path) > 2:
|
||||
message = f" ! ERROR. Should have been caught before this. Survex file already *included 2x. Probably an infinite loop so fix your *include statements that include this. Aborting. {path}"
|
||||
print(message)
|
||||
print(message, file=flinear)
|
||||
# print(message,file=sys.stderr)
|
||||
DataIssue.objects.create(parser="survex", message=message, url=get_offending_filename(path))
|
||||
stash_data_issue(parser="survex", message=message, url=get_offending_filename(path))
|
||||
return
|
||||
return
|
||||
try:
|
||||
@ -1570,13 +1583,13 @@ class LoadingSurvex:
|
||||
message = f" ! ERROR *include file '{path}' in '{survexblock}' has UnicodeDecodeError. Omitted."
|
||||
print(message)
|
||||
print(message, file=sys.stderr)
|
||||
DataIssue.objects.create(parser="survex", message=message, url=get_offending_filename(path))
|
||||
stash_data_issue(parser="survex", message=message, url=get_offending_filename(path))
|
||||
return # skip this survex file and all things *included in it
|
||||
except:
|
||||
message = f" ! ERROR *include file '{path}' in '{survexblock}' has unexpected error. Omitted."
|
||||
print(message)
|
||||
print(message, file=sys.stderr)
|
||||
DataIssue.objects.create(parser="survex", message=message, url=get_offending_filename(path))
|
||||
stash_data_issue(parser="survex", message=message, url=get_offending_filename(path))
|
||||
return # skip this survex file and all things *included in it
|
||||
|
||||
def checkUniqueness(self, fullpath):
|
||||
@ -1588,7 +1601,7 @@ class LoadingSurvex:
|
||||
# This is not an error now that we are moving .3d files to the :loser: directory tree
|
||||
# message = f" ! NON-UNIQUE survex filename, '{fn}' - '{self.uniquename[fn]}' #{len(self.uniquename[fn])}"
|
||||
# print(message)
|
||||
# DataIssue.objects.create(parser='survex', message=message)
|
||||
# stash_data_issue(parser='survex', message=message)
|
||||
message = (
|
||||
f" NOTE: non-unique survex filename, '{fn}' - '{self.uniquename[fn]}' #{len(self.uniquename[fn])}"
|
||||
)
|
||||
@ -1621,7 +1634,7 @@ class LoadingSurvex:
|
||||
if sp.returncode != 0:
|
||||
message = f" ! Error running {settings.CAVERN}: {fullpath}"
|
||||
url = f"/survexfile{fullpath}.svx".replace(settings.SURVEX_DATA, "")
|
||||
DataIssue.objects.create(parser="xEntrances", message=message, url=url)
|
||||
stash_data_issue(parser="xEntrances", message=message, url=url)
|
||||
print(message)
|
||||
print(
|
||||
"stderr:\n\n" + str(sp.stderr) + "\n\n" + str(sp.stdout) + "\n\nreturn code: " + str(sp.returncode)
|
||||
@ -1643,7 +1656,7 @@ class LoadingSurvex:
|
||||
|
||||
if not svxpath.is_file():
|
||||
message = f' ! BAD survex file "{fullpath}" specified in *include in {calledpath} '
|
||||
DataIssue.objects.create(parser="entrances", message=message)
|
||||
stash_data_issue(parser="entrances", message=message)
|
||||
print(message)
|
||||
return
|
||||
|
||||
@ -1657,7 +1670,7 @@ class LoadingSurvex:
|
||||
sp = subprocess.run(["which", f"{settings.CAVERN}"], capture_output=True, check=False, text=True)
|
||||
if sp.returncode != 0:
|
||||
message = f' ! Error running "which" on {settings.CAVERN}'
|
||||
DataIssue.objects.create(parser="entrances", message=message)
|
||||
stash_data_issue(parser="entrances", message=message)
|
||||
print(message)
|
||||
print(
|
||||
"stderr:\n\n" + str(sp.stderr) + "\n\n" + str(sp.stdout) + "\n\nreturn code: " + str(sp.returncode)
|
||||
@ -1900,8 +1913,7 @@ def FindAndLoadSurvex(survexblockroot):
|
||||
def MakeSurvexFileRoot():
|
||||
"""Returns a file_object.path = SURVEX_TOPNAME associated with directory_object.path = SURVEX_DATA"""
|
||||
# find a cave, any cave..
|
||||
caves = Cave.objects.all()
|
||||
smk = caves.filter(kataster_number="000") # returns a list, a QuerySet
|
||||
smk = Cave.objects.filter(kataster_number="000") # returns a list, a QuerySet
|
||||
|
||||
fileroot = SurvexFile(path=settings.SURVEX_TOPNAME, cave=None)
|
||||
fileroot.save()
|
||||
@ -1925,6 +1937,8 @@ def MakeOmitFileRoot(fn):
|
||||
def LoadSurvexBlocks():
|
||||
mem1 = get_process_memory()
|
||||
print(f" - MEM:{mem1:7.2f} MB now ", file=sys.stderr)
|
||||
start = time.time()
|
||||
|
||||
|
||||
print(" - Flushing All Survex Blocks...")
|
||||
# why does this increase memory use by 20 MB ?!
|
||||
@ -1940,6 +1954,8 @@ def LoadSurvexBlocks():
|
||||
print(f" - MEM:{mem1:7.2f} MB now. Foreign key objects loaded on deletion. ", file=sys.stderr)
|
||||
|
||||
print(" - Flushing survex Data Issues ")
|
||||
global dataissues
|
||||
dataissues = []
|
||||
DataIssue.objects.filter(parser="survex").delete()
|
||||
DataIssue.objects.filter(parser="svxdate").delete()
|
||||
DataIssue.objects.filter(parser="survexleg").delete()
|
||||
@ -1977,6 +1993,11 @@ def LoadSurvexBlocks():
|
||||
print(f" - MEMORY start:{memstart:.3f} MB end:{memend:.3f} MB increase={memend - memstart:.3f} MB")
|
||||
|
||||
survexblockroot.save()
|
||||
# duration = time.time() - start
|
||||
# print(f" - TIME: {duration:7.2f} s", file=sys.stderr)
|
||||
store_data_issues()
|
||||
# duration = time.time() - start
|
||||
# print(f" - TIME: {duration:7.2f} s", file=sys.stderr)
|
||||
|
||||
print(" - Loaded All Survex Blocks.")
|
||||
|
||||
@ -2010,7 +2031,7 @@ def LoadPositions():
|
||||
) # check=False means exception not raised
|
||||
if sp.returncode != 0:
|
||||
message = f" ! Error: cavern: creating {file3d} in runcavern3()"
|
||||
DataIssue.objects.create(parser="entrances", message=message)
|
||||
stash_data_issue(parser="entrances", message=message)
|
||||
print(message)
|
||||
|
||||
# find the errors in the 1623.log file
|
||||
@ -2018,17 +2039,17 @@ def LoadPositions():
|
||||
["grep", "error:", f"{topdata}.log"], capture_output=True, check=False, text=True
|
||||
) # check=False means exception not raised
|
||||
message = f" ! Error: cavern: {sp.stdout} creating {file3d} "
|
||||
DataIssue.objects.create(parser="entrances", message=message)
|
||||
stash_data_issue(parser="entrances", message=message)
|
||||
print(message)
|
||||
|
||||
except:
|
||||
message = f" ! CalledProcessError 'cavern' in runcavern3() at {topdata}."
|
||||
DataIssue.objects.create(parser="entrances", message=message)
|
||||
stash_data_issue(parser="entrances", message=message)
|
||||
print(message)
|
||||
|
||||
if file3d.is_file():
|
||||
message = f" ! CalledProcessError. File permissions {file3d.stat().st_mode} on {str(file3d)}"
|
||||
DataIssue.objects.create(parser="entrances", message=message)
|
||||
stash_data_issue(parser="entrances", message=message)
|
||||
print(message)
|
||||
|
||||
if file3d.is_file(): # might be an old one though
|
||||
@ -2050,11 +2071,11 @@ def LoadPositions():
|
||||
)
|
||||
except:
|
||||
message = f" ! CalledProcessError 'survexport' in runcavern3() at {file3d}."
|
||||
DataIssue.objects.create(parser="entrances", message=message)
|
||||
stash_data_issue(parser="entrances", message=message)
|
||||
print(message)
|
||||
else:
|
||||
message = f" ! Failed to find {file3d} so aborting generation of new .pos, using old one if present"
|
||||
DataIssue.objects.create(parser="entrances", message=message)
|
||||
stash_data_issue(parser="entrances", message=message)
|
||||
print(message)
|
||||
|
||||
topdata = os.fspath(Path(settings.SURVEX_DATA) / settings.SURVEX_TOPNAME)
|
||||
@ -2096,7 +2117,7 @@ def LoadPositions():
|
||||
|
||||
if not Path(pospath).is_file():
|
||||
message = f" ! Failed to find {pospath} so aborting generation of entrance locations. "
|
||||
DataIssue.objects.create(parser="entrances", message=message)
|
||||
stash_data_issue(parser="entrances", message=message)
|
||||
print(message)
|
||||
return
|
||||
|
||||
@ -2110,7 +2131,7 @@ def LoadPositions():
|
||||
except:
|
||||
message = " ! FAILED to find root SurvexBlock"
|
||||
print(message)
|
||||
DataIssue.objects.create(parser="entrances", message=message)
|
||||
stash_data_issue(parser="entrances", message=message)
|
||||
raise
|
||||
for line in posfile.readlines():
|
||||
r = poslineregex.match(line)
|
||||
@ -2130,16 +2151,16 @@ def LoadPositions():
|
||||
if len(sbqs) > 1:
|
||||
message = f" ! MULTIPLE SurvexBlocks {len(sbqs):3} matching Entrance point {blockpath} {sid} '{id}'"
|
||||
print(message)
|
||||
DataIssue.objects.create(parser="entrances", message=message)
|
||||
stash_data_issue(parser="entrances", message=message)
|
||||
sbqs[0]
|
||||
elif len(sbqs) <= 0:
|
||||
message = f" ! ZERO SurvexBlocks matching Entrance point {blockpath} {sid} '{id}'"
|
||||
print(message)
|
||||
DataIssue.objects.create(parser="entrances", message=message)
|
||||
stash_data_issue(parser="entrances", message=message)
|
||||
except:
|
||||
message = f" ! FAIL in getting SurvexBlock matching Entrance point {blockpath} {sid}"
|
||||
print(message)
|
||||
DataIssue.objects.create(parser="entrances", message=message)
|
||||
stash_data_issue(parser="entrances", message=message)
|
||||
try:
|
||||
ss = SurvexStation(name=id, block=survexblockroot)
|
||||
ss.x = float(x)
|
||||
@ -2150,6 +2171,7 @@ def LoadPositions():
|
||||
except:
|
||||
message = f" ! FAIL to create SurvexStation Entrance point {blockpath} {sid}"
|
||||
print(message)
|
||||
DataIssue.objects.create(parser="entrances", message=message)
|
||||
stash_data_issue(parser="entrances", message=message)
|
||||
raise
|
||||
print(f" - {found} SurvexStation entrances found.")
|
||||
store_data_issues()
|
||||
|
Loading…
Reference in New Issue
Block a user