forked from expo/troggle
polishing outputs for importing unseen survex files
This commit is contained in:
parent
7e47fe1f30
commit
29c5c82337
@ -338,11 +338,16 @@ class LoadingSurvex():
|
||||
survexblock.date = datetime.strptime(line, '%Y') # sets to January 1st
|
||||
setdate(year)
|
||||
else:
|
||||
# these errors are reporting the wrong survexblock, which is actually a SurvexFile (!)
|
||||
message = "! DATE Error unrecognised '{}' ({}) {}".format(oline, survexblock, survexblock.survexfile.path)
|
||||
print(self.insp+message)
|
||||
DataIssue.objects.create(parser='survex', message=message, url=get_offending_filename(survexblock.survexfile.path))
|
||||
print(f" {survexblock.parent=}") # fails as SUrvexFile has no .parent ...ugh.
|
||||
print(f" {survexblock.survexpath=}")
|
||||
print(f" {survexblock.survexfile=}")
|
||||
#raise
|
||||
|
||||
def LoadSurvexLeg(self, survexblock, sline, comment):
|
||||
def LoadSurvexLeg(self, survexblock, sline, comment, svxline):
|
||||
"""This reads compass, clino and tape data but only keeps the tape lengths,
|
||||
the rest is discarded after error-checking.
|
||||
Now skipping the error checking - returns as soon as the leg is not one we count.
|
||||
@ -376,11 +381,20 @@ class LoadingSurvex():
|
||||
if self.datastar["type"] != "normal":
|
||||
return
|
||||
|
||||
ls = sline.lower().split()
|
||||
# NORMAL, so there should be 5 fields
|
||||
# from the content, this is clearly reading fixedpts/gps/gps00raw.svx, but not reporting it by that name
|
||||
if len(ls) < 5:
|
||||
print("! Fewer than 5 fields in NORMAL in ", survexblock.survexfile.path, survexfile, survexfile.parent)
|
||||
print(" datastar NORMAL:", self.datastar)
|
||||
print(f" Line (split): {ls}, comment: {comment}")
|
||||
print(f" Line: {sline}\nsvxline: {svxline}")
|
||||
message = f' ! Not 5 fields in line \'{sline.lower()}\' {self.datastar=} {ls=} in\n{survexblock}\n{survexblock.survexfile}\n{survexblock.survexfile.path}'
|
||||
DataIssue.objects.create(parser='survexleg', message=message, url=get_offending_filename(survexblock.survexfile.path))
|
||||
|
||||
datastar = self.datastar # shallow copy: alias but the things inside are the same things
|
||||
survexleg = SurvexLeg()
|
||||
|
||||
ls = sline.lower().split()
|
||||
|
||||
# skip all splay legs
|
||||
try:
|
||||
if ls[datastar["from"]] == ".." or ls[datastar["from"]] == ".":
|
||||
@ -789,14 +803,18 @@ class LoadingSurvex():
|
||||
# Yes we didn't find this cave, but we know it is a pending one. So not an error.
|
||||
# print(f'! ALREADY PENDING {caveid}',file=sys.stderr)
|
||||
return
|
||||
id = caveid[5:]
|
||||
if id in self.pending:
|
||||
print(f'! ALREADY PENDING {id}',file=sys.stderr)
|
||||
return
|
||||
|
||||
message = f" ! Warning: cave identifier '{caveid}' (guessed from file path) is not a known cave. Need to add to expoweb/cave_data/pending.txt ? In '{includelabel}.svx' at depth:[{len(depth)}]."
|
||||
print("\n"+message)
|
||||
print("\n"+message,file=sys.stderr)
|
||||
DataIssue.objects.create(parser='survex', message=message, url=get_offending_filename(includelabel))
|
||||
print(f' # datastack in LoadSurvexFile:{includelabel} type:', end="",file=sys.stderr)
|
||||
for dict in self.datastack:
|
||||
print(f'<{dict["type"].upper()} >', end="",file=sys.stderr)
|
||||
# print(f' # datastack in LoadSurvexFile:{includelabel}', file=sys.stderr)
|
||||
# for dict in self.datastack:
|
||||
# print(f' type: <{dict["type"].upper()} >', file=sys.stderr)
|
||||
|
||||
|
||||
def LoadSurvexFile(self, svxid):
|
||||
@ -973,11 +991,12 @@ class LoadingSurvex():
|
||||
nonlocal blockcount
|
||||
|
||||
blockcount +=1
|
||||
if blockcount % 10 ==0 :
|
||||
if blockcount % 20 ==0 :
|
||||
print(".", file=sys.stderr,end='')
|
||||
if blockcount % 200 ==0 :
|
||||
if blockcount % 400 ==0 :
|
||||
print("\n", file=sys.stderr,end='')
|
||||
print(" - MEM:{:7.3f} MB in use".format(get_process_memory()),file=sys.stderr)
|
||||
mem=get_process_memory()
|
||||
print(" - MEM:{:7.3f} MB in use".format(mem),file=sys.stderr)
|
||||
print(" ", file=sys.stderr,end='')
|
||||
sys.stderr.flush()
|
||||
|
||||
@ -1190,7 +1209,7 @@ class LoadingSurvex():
|
||||
# yes we are reading a *command
|
||||
starstatement(star)
|
||||
else: # not a *cmd so we are reading data OR a ";" rx_comment failed. We hope.
|
||||
self.LoadSurvexLeg(survexblock, sline, comment)
|
||||
self.LoadSurvexLeg(survexblock, sline, comment, svxline)
|
||||
|
||||
self.legsnumber = nlegstotal
|
||||
self.slength = slengthtotal
|
||||
@ -1480,7 +1499,7 @@ def FindAndLoadSurvex(survexblockroot):
|
||||
flinear.write(" - MEM:{:.3f} MB ADDITIONALLY USED\n".format(mem1-mem0))
|
||||
flinear.write(" - {:,} survex files in linear include list \n".format(len(svx_scan.svxfileslist)))
|
||||
|
||||
print(" - {:,} runs of survex 'cavern' refreshing .3d files".format(svx_scan.caverncount),file=sys.stderr)
|
||||
print("\n - {:,} runs of survex 'cavern' refreshing .3d files".format(svx_scan.caverncount),file=sys.stderr)
|
||||
print(" - {:,} survex files from tree in linear include list".format(len(svx_scan.svxfileslist)),file=sys.stderr)
|
||||
|
||||
mem1 = get_process_memory()
|
||||
@ -1512,9 +1531,14 @@ def FindAndLoadSurvex(survexblockroot):
|
||||
for o in excpts:
|
||||
if str(x).strip().startswith(o):
|
||||
removals.append(x)
|
||||
# special fix for file not actually in survex format
|
||||
unseens.remove(Path("fixedpts/gps/gps00raw"))
|
||||
|
||||
for x in removals:
|
||||
unseens.remove(x)
|
||||
print(f" - {len(unseens)} survex files found which were not included in main tree. ({len(svx_scan.svxfileslist)} in main tree)", file=sys.stderr)
|
||||
# for x in unseens:
|
||||
# print(f"'{x}', ", end='', file=sys.stderr)
|
||||
print(f"\n - {len(unseens)} survex files found which were not included in main tree. ({len(svx_scan.svxfileslist)} in main tree)", file=sys.stderr)
|
||||
print(f" -- Now loading the previously-omitted survex files.", file=sys.stderr)
|
||||
|
||||
with open(Path(settings.SURVEX_DATA, '_unseens.svx'), 'w') as u:
|
||||
@ -1608,7 +1632,7 @@ def FindAndLoadSurvex(survexblockroot):
|
||||
# ps = pstats.Stats(pr2, stream=f)
|
||||
# ps.sort_stats(SortKey.CUMULATIVE)
|
||||
# ps.print_stats()
|
||||
|
||||
mem1 = get_process_memory()
|
||||
print("\n - MEM:{:7.2f} MB STOP".format(mem1),file=sys.stderr)
|
||||
print(" - MEM:{:7.3f} MB ADDITIONALLY USED".format(mem1-mem0),file=sys.stderr)
|
||||
|
||||
|
Loading…
Reference in New Issue
Block a user