2
0
mirror of https://expo.survex.com/repositories/troggle/.git synced 2025-12-19 04:47:47 +00:00

cartesian survex blocks now handled

This commit is contained in:
2025-10-31 22:32:14 +02:00
parent c6647907e5
commit 7bbc413902
6 changed files with 92 additions and 37 deletions

View File

@@ -1,5 +1,6 @@
import copy
import io
import math
import os
import re
import subprocess
@@ -70,7 +71,7 @@ dup_includes = 0
debugprint = False # Turns on debug printout for just one *include file
debugprinttrigger = "!"
dataissues = []
dataissues = set()
class SurvexLeg:
"""No longer a models.Model subclass, so no longer a database table"""
@@ -108,9 +109,12 @@ def set_walletdate(w):
w.save()
def stash_data_issue(parser=None, message=None, url=None, sb=None):
"""Avoid hitting the database for error messages until the end of the import"""
"""Avoid hitting the database for error messages until the end of the import
use a set, we do not want identically duplicate issues
"""
global dataissues
dataissues.append((parser, message, url, sb))
dataissues.add((parser, message, url, sb))
def store_data_issues():
"""Take the stash and store it permanently in the database instead
@@ -129,8 +133,8 @@ def store_data_issues():
di_list.append(DataIssue(parser=parser, message=message, url=url))
# Now commit to db
DataIssue.objects.bulk_create(di_list)
dataissues = [] # in database now, so empty cache
dataissues = set()
def get_offending_filename(path):
"""Used to provide the URL for a line in the DataErrors page
which reports problems on importing data into troggle
@@ -285,7 +289,8 @@ class LoadingSurvex:
rx_teamone = re.compile(r"(?i)^\s*(.*)\s*$")
rx_person = re.compile(r"(?i) and |/| / |, | , |&| & | \+ |^both$|^none$")
rx_tapelng = re.compile(r"(?i).*(tape|length).*$")
rx_tapelng = re.compile(r"(?i).*(tape|length).*$") # normal tape
rx_cartlng = re.compile(r"(?i).*([-+]?\d*\.\d+)") # cartesian units and scale
rx_cave = re.compile(r"(?i)caves-(\d\d\d\d)/([-\d\w]+|\d\d\d\d-?\w+-\d+)")
rx_comment = re.compile(r"([^;]*?)\s*(?:;\s*(.*))?\n?$")
@@ -779,17 +784,46 @@ class LoadingSurvex:
def LoadSurvexUnits(self, survexblock, line):
# all for 4 survex files with measurements in feet. bugger.
# Won't need this once we move to using cavern or d3dump output for lengths
# ..and all the cartesian files (ARGE) with scaling factors
tapeunits = self.rx_tapelng.match(line) # tape|length
if not tapeunits:
if not tapeunits:
angle = re.match(r"(?i).*(degrees|grads|percent).*$", line)
if angle:
# we don't care
return
lruds = re.match(r"(?i).*(left|right|up|down).*$", line)
if lruds:
# we don't care
return
scale = self.rx_cartlng.match(line)
if scale:
message = f"! *UNITS SCALE '{line}' ({survexblock}) {survexblock.survexfile.path} {len(scale.groups())} {scale.groups()[0]=}"
print(self.insp + message)
stash_data_issue(parser="survexunits", message=message, url=None, sb=(survexblock.survexfile.path))
self.unitsfactor = float(scale.groups()[0])
else:
message = f"! *UNITS SCALE FAIL '{line}' ({survexblock}) {survexblock.survexfile.path} "
print(self.insp + message)
stash_data_issue(parser="survexunits", message=message, url=None, sb=(survexblock.survexfile.path))
metres = re.match(r"(?i).*(METRIC|METRES|METERS)$", line)
if metres:
self.units = "metres"
else:
message = f"! *UNITS not meters - not converted '{line}' ({survexblock}) {survexblock.survexfile.path}"
print(self.insp + message)
stash_data_issue(parser="survexunits", message=message, url=None, sb=(survexblock.survexfile.path))
return
return
convert = re.match(r"(?i)(\w*)\s*([\.\d]+)\s*(\w*)", line)
if convert:
factor = convert.groups()[1]
self.unitsfactor = float(factor)
if debugprint:
if True:
message = (
f"! *UNITS NUMERICAL conversion [{factor}x] '{line}' ({survexblock}) {survexblock.survexfile.path}"
f" *UNITS NUMERICAL conversion [{factor}x] '{line}' ({survexblock}) {survexblock.survexfile.path}"
)
print(self.insp + message)
stash_data_issue(parser="survexunits", message=message)
@@ -960,6 +994,8 @@ class LoadingSurvex:
"""This reads compass, clino and tape data but only keeps the tape lengths,
the rest is discarded after error-checking.
Now skipping the error checking - returns as soon as the leg is not one we count.
Much less checking for Cartesian data.
REPLACE ALL THIS by reading the .log output of cavern for the file.
But we need the lengths per Block, not by File. dump3d will do lengths per block.
@@ -979,17 +1015,35 @@ class LoadingSurvex:
if self.datastar["type"] == "passage":
return
if self.datastar["type"] == "cartesian":
# message = f" ! CARTESIAN data in {survexblock.survexfile.path} {self.unitsfactor=}."
# stash_data_issue(
# parser="survexleg", message=message, url=None, sb=(survexblock.survexfile.path)
# )
ls = sline.lower().split()
# CARTESIAN, so there should be 5 fields: from to dx dy dz
# we don't care what they are called, we just use the last 3
if len(ls) != 5:
print(f" Line: {sline}\nsvxline: {svxline}")
message = f" ! Not 5 CARTESIAN fields in line '{sline.lower()}' {self.datastar=} {ls=} in\n{survexblock}\n{survexblock.survexfile}\n{survexblock.survexfile.path}"
stash_data_issue(
parser="survexleg", message=message, url=None, sb=(survexblock.survexfile.path)
)
return
leglength = math.sqrt(float(ls[2])**2 + float(ls[3])**2 + float(ls[4])**2)
if self.unitsfactor:
leglength = leglength * self.unitsfactor
self.legsnumber += 1
survexblock.legslength += leglength
self.slength += leglength
return
if self.datastar["type"] == "nosurvey":
return
if self.datastar["type"] == "diving":
return
if self.datastar["type"] == "cylpolar":
return
if debugprint:
print(
f" !! LEG data lineno:{self.lineno}\n !! sline:'{sline}'\n !! datastar['tape']: {self.datastar['tape']}"
)
if self.datastar["type"] != "normal":
return
@@ -1174,7 +1228,6 @@ class LoadingSurvex:
# this produces a lot of printout, so don't print it
reftxt = refline.groups()[0] # only one item in this tuple
if reftxt:
print(f"{self.insp} *REF quoted text: '{reftxt}' in {survexblock.survexfile.path}")
# only store it if not an empty string
survexblock.ref_text = reftxt
survexblock.save()
@@ -1265,7 +1318,7 @@ class LoadingSurvex:
if args == "":
# naked '*data' which is relevant only for passages. Ignore. Continue with previous settings.
return
# DEFAULT | NORMAL | CARTESIAN| NOSURVEY |PASSAGE | TOPOFIL | CYLPOLAR | DIVING
# DEFAULT | NORMAL | CARTESIAN| NOSURVEY |PASSAGE | TOPOFIL | CYLPOLAR | DIVING -- all ignored
ls = args.lower().split()
if ls[0] == "default":
self.datastar = copy.deepcopy(self.datastardefault)
@@ -1307,9 +1360,8 @@ class LoadingSurvex:
# print(message,file=sys.stderr)
# stash_data_issue(parser='survex', message=message)
self.datastar["type"] = ls[0]
elif ls[0] == "cartesian": # We should not ignore this ?! Default for Germans ?
elif ls[0] == "cartesian": # Handled in the legs calc.
# message = f" ! - *data {ls[0].upper()} blocks ignored. {survexblock.name}|{args}"
# print(message)
# print(message,file=sys.stderr)
# stash_data_issue(parser='survex', message=message)
self.datastar["type"] = ls[0]
@@ -2964,7 +3016,7 @@ def LoadSurvexBlocks():
print(" - Flushing survex Data Issues ")
global dataissues
dataissues = []
dataissues = set()
DataIssue.objects.filter(parser="survex").delete()
DataIssue.objects.filter(parser="xSvxDate").delete()
DataIssue.objects.filter(parser="survexleg").delete()