2
0
mirror of https://expo.survex.com/repositories/troggle/.git synced 2025-12-16 14:07:15 +00:00

regenerating .pos file more carefully

This commit is contained in:
2023-11-11 14:36:03 +02:00
parent 7fb0f0734d
commit 699c19245a
3 changed files with 32 additions and 35 deletions

View File

@@ -211,12 +211,10 @@ def LoadPositions():
SurvexStation.objects.all().delete()
def runcavern3d():
def runcavern3d(msg=None):
if msg:
print(" - ", msg)
outputdir = Path(str(f"{topdata}.svx")).parent
# print(" - Regenerating stale cavern .log and .3d for '{}'\n days old: {:.1f} {:.1f} {:.1f}".
# format(topdata, (svx_t - d3d_t)/(24*3600), (cav_t - d3d_t)/(24*3600), (now - d3d_t)/(24*3600)))
file3d = Path(f"{topdata}.3d")
try:
sp = subprocess.run(
@@ -230,7 +228,7 @@ def LoadPositions():
stash_data_issue(parser="positions", message=message)
print(message)
# find the errors in the 1623.log file
# find the errors in the .log file
sp = subprocess.run(
["grep", "error:", f"{topdata}.log"], capture_output=True, check=False, text=True
) # check=False means exception not raised
@@ -248,9 +246,8 @@ def LoadPositions():
stash_data_issue(parser="positions", message=message)
print(message)
if file3d.is_file(): # might be an old one though
if file3d.is_file(): # might be an old one though, if previous step failed
try:
# print(" - Regenerating {} {}.3d in {}".format(settings.SURVEXPORT, topdata, settings.SURVEX_DATA))
sp = subprocess.run(
[settings.SURVEXPORT, "--pos", f"{file3d}"],
cwd=settings.SURVEX_DATA,
@@ -274,7 +271,7 @@ def LoadPositions():
stash_data_issue(parser="positions", message=message)
print(message)
topdata = os.fspath(Path(settings.SURVEX_DATA) / settings.SURVEX_TOPNAME)
topdata = Path(settings.SURVEX_DATA, settings.SURVEX_TOPNAME)
print(f" - Generating a list of Pos from {topdata}.3d and then loading...")
found = 0
@@ -282,29 +279,33 @@ def LoadPositions():
# cavern defaults to using same cwd as supplied input file
completed_process = subprocess.run(["which", f"{settings.CAVERN}"], capture_output=True, check=True, text=True)
cav_t = os.path.getmtime(completed_process.stdout.strip())
cav_t = Path(completed_process.stdout.strip()).stat().st_mtime
cav_d = datetime.fromtimestamp(cav_t).strftime('%d %b %Y %H:%M:%S')
svxpath = topdata + ".svx"
d3dpath = topdata + ".3d"
pospath = topdata + ".pos"
svxpath = topdata.with_suffix(".svx")
d3dpath = topdata.with_suffix(".3d")
pospath = topdata.with_suffix(".pos")
if not settings.DEVSERVER:
runcavern3d(f"Regen - on server {settings.DEVSERVER=}") # always regenerate .3d and .pos on the server
else:
# These basic tests fail to capture the case where a *included svx file has changed,
# typically this is one of the fixedpts *fix files.
for p in [pospath, d3dpath]:
if not p.is_file():
runcavern3d(f"Creating {p}.3d, .pos")
svx_t = svxpath.stat().st_mtime
d3d_t = d3dpath.stat().st_mtime # os.path.getmtime(d3dpath)
svx_d = datetime.fromtimestamp(svx_t).strftime('%d %b %Y %H:%M:%S')
d3d_d = datetime.fromtimestamp(d3d_t).strftime('%d %b %Y %H:%M:%S')
svx_t = os.path.getmtime(svxpath)
if os.path.isfile(d3dpath):
# always fails to find log file if a double directory, e.g. caves-1623/B4/B4/B4.svx Why ?
d3d_t = os.path.getmtime(d3dpath)
now = time.time()
if not os.path.isfile(pospath):
runcavern3d()
if not os.path.isfile(d3dpath):
runcavern3d()
elif d3d_t - svx_t > 0: # stale, 3d older than svx file
runcavern3d()
elif now - d3d_t > 24 * 60 * 60: # >1 days old, re-run anyway
runcavern3d()
elif cav_t - d3d_t > 0: # new version of cavern
runcavern3d()
now = time.time()
if d3d_t - svx_t < 0: # stale, 3d older than svx file . But .svx timestamp does not reflect *include timestamps
runcavern3d(f"Regen - stale {d3d_d} earlier than {svx_d}")
elif now - d3d_t > 24 * 60 * 60: # >1 days old, re-run anyway
runcavern3d(f"Regen - old")
elif d3d_t - cav_t < 0: # new version of cavern
runcavern3d(f"Regen - new survex version {d3d_d} earlier than {cav_d} ")
mappoints = {}
found_points = {}