2020-05-28 01:16:45 +01:00
|
|
|
import sys
|
|
|
|
import os
|
|
|
|
import re
|
|
|
|
import time
|
|
|
|
from datetime import datetime, timedelta
|
2011-07-11 00:01:12 +01:00
|
|
|
from subprocess import call, Popen, PIPE
|
2009-05-13 05:39:52 +01:00
|
|
|
|
2020-02-21 15:57:07 +00:00
|
|
|
from django.utils.timezone import get_current_timezone
|
|
|
|
from django.utils.timezone import make_aware
|
|
|
|
|
2020-05-28 01:16:45 +01:00
|
|
|
import troggle.settings as settings
|
|
|
|
import troggle.core.models as models
|
2020-05-28 04:54:53 +01:00
|
|
|
import troggle.core.models_caves as models_caves
|
|
|
|
import troggle.core.models_survex as models_survex
|
2020-05-28 01:16:45 +01:00
|
|
|
from troggle.parsers.people import GetPersonExpeditionNameLookup
|
|
|
|
from troggle.core.views_caves import MapLocations
|
|
|
|
|
2009-05-13 05:14:03 +01:00
|
|
|
|
2020-05-14 17:21:34 +01:00
|
|
|
"""A 'survex block' is a *begin...*end set of cave data.
|
|
|
|
A 'survexscansfolder' is what we today call a "survey scans folder" or a "wallet".
|
|
|
|
"""
|
|
|
|
|
2020-06-13 01:24:46 +01:00
|
|
|
rx_braskets= re.compile(r"[()]")
|
|
|
|
rx_line_length = re.compile(r"[\d\-+.]+$")
|
2020-06-12 00:34:53 +01:00
|
|
|
survexlegsalllength = 0.0
|
|
|
|
survexlegsnumber = 0
|
2020-06-15 03:28:51 +01:00
|
|
|
survexblockroot = None
|
2009-05-13 05:39:52 +01:00
|
|
|
|
2020-02-21 15:57:07 +00:00
|
|
|
def LoadSurvexLineLeg(survexblock, stardata, sline, comment, cave):
|
2020-06-12 00:34:53 +01:00
|
|
|
global survexlegsalllength
|
|
|
|
global survexlegsnumber
|
2020-04-15 23:29:59 +01:00
|
|
|
# The try catches here need replacing as they are relatively expensive
|
2009-08-05 11:58:36 +01:00
|
|
|
ls = sline.lower().split()
|
2020-06-16 19:27:32 +01:00
|
|
|
#ssfrom = survexblock.MakeSurvexStation(ls[stardata["from"]])
|
|
|
|
#ssto = survexblock.MakeSurvexStation(ls[stardata["to"]])
|
2020-02-21 15:57:07 +00:00
|
|
|
|
2020-06-16 19:27:32 +01:00
|
|
|
# survexleg = models_survex.SurvexLeg(block=survexblock, stationfrom=ssfrom, stationto=ssto)
|
|
|
|
survexleg = models_survex.SurvexLeg()
|
2020-05-24 13:35:47 +01:00
|
|
|
# this next fails for two surface survey svx files which use / for decimal point
|
|
|
|
# e.g. '29/09' in the tape measurement, or use decimals but in brackets, e.g. (06.05)
|
2009-08-05 11:58:36 +01:00
|
|
|
if stardata["type"] == "normal":
|
2020-06-13 01:24:46 +01:00
|
|
|
tape = rx_braskets.sub("",ls[stardata["tape"]])
|
|
|
|
tape = tape.replace("/",".")
|
2011-07-11 00:01:12 +01:00
|
|
|
try:
|
2020-06-13 01:24:46 +01:00
|
|
|
survexleg.tape = float(tape)
|
2020-06-12 00:34:53 +01:00
|
|
|
survexlegsnumber += 1
|
2020-02-21 15:57:07 +00:00
|
|
|
except ValueError:
|
2020-05-24 01:57:06 +01:00
|
|
|
print(("! Tape misread in", survexblock.survexfile.path))
|
|
|
|
print((" Stardata:", stardata))
|
|
|
|
print((" Line:", ls))
|
2020-05-24 13:35:47 +01:00
|
|
|
message = ' ! Value Error: Tape misread in line %s in %s' % (ls, survexblock.survexfile.path)
|
2020-05-15 21:45:23 +01:00
|
|
|
models.DataIssue.objects.create(parser='survex', message=message)
|
2020-06-12 00:34:53 +01:00
|
|
|
survexleg.tape = 0
|
2013-05-22 02:33:47 +01:00
|
|
|
try:
|
2013-05-22 02:10:58 +01:00
|
|
|
lclino = ls[stardata["clino"]]
|
|
|
|
except:
|
2020-05-24 01:57:06 +01:00
|
|
|
print(("! Clino misread in", survexblock.survexfile.path))
|
|
|
|
print((" Stardata:", stardata))
|
|
|
|
print((" Line:", ls))
|
2020-05-24 13:35:47 +01:00
|
|
|
message = ' ! Value Error: Clino misread in line %s in %s' % (ls, survexblock.survexfile.path)
|
2020-05-15 21:45:23 +01:00
|
|
|
models.DataIssue.objects.create(parser='survex', message=message)
|
2013-05-22 02:33:47 +01:00
|
|
|
lclino = error
|
|
|
|
try:
|
|
|
|
lcompass = ls[stardata["compass"]]
|
|
|
|
except:
|
2020-05-24 01:57:06 +01:00
|
|
|
print(("! Compass misread in", survexblock.survexfile.path))
|
|
|
|
print((" Stardata:", stardata))
|
|
|
|
print((" Line:", ls))
|
2020-05-24 13:35:47 +01:00
|
|
|
message = ' ! Value Error: Compass misread in line %s in %s' % (ls, survexblock.survexfile.path)
|
2020-05-15 21:45:23 +01:00
|
|
|
models.DataIssue.objects.create(parser='survex', message=message)
|
2013-05-22 02:33:47 +01:00
|
|
|
lcompass = error
|
2009-08-05 11:58:36 +01:00
|
|
|
if lclino == "up":
|
|
|
|
survexleg.compass = 0.0
|
|
|
|
survexleg.clino = 90.0
|
|
|
|
elif lclino == "down":
|
|
|
|
survexleg.compass = 0.0
|
|
|
|
survexleg.clino = -90.0
|
|
|
|
elif lclino == "-" or lclino == "level":
|
2011-07-11 00:01:12 +01:00
|
|
|
try:
|
|
|
|
survexleg.compass = float(lcompass)
|
|
|
|
except ValueError:
|
2020-05-24 01:57:06 +01:00
|
|
|
print(("! Compass misread in", survexblock.survexfile.path))
|
|
|
|
print((" Stardata:", stardata))
|
|
|
|
print((" Line:", ls))
|
2020-05-15 21:45:23 +01:00
|
|
|
message = ' ! Value Error: line %s in %s' % (ls, survexblock.survexfile.path)
|
|
|
|
models.DataIssue.objects.create(parser='survex', message=message)
|
2011-07-11 00:01:12 +01:00
|
|
|
survexleg.compass = 1000
|
2009-08-05 11:58:36 +01:00
|
|
|
survexleg.clino = -90.0
|
|
|
|
else:
|
2020-06-13 01:24:46 +01:00
|
|
|
assert rx_line_length.match(lcompass), ls
|
|
|
|
assert rx_line_length.match(lclino) and lclino != "-", ls
|
2009-08-05 11:58:36 +01:00
|
|
|
survexleg.compass = float(lcompass)
|
|
|
|
survexleg.clino = float(lclino)
|
2020-02-21 15:57:07 +00:00
|
|
|
|
|
|
|
if cave:
|
|
|
|
survexleg.cave = cave
|
|
|
|
|
2009-08-05 11:58:36 +01:00
|
|
|
# only save proper legs
|
2020-06-12 00:34:53 +01:00
|
|
|
# No need to save as we are measuring lengths only on parsing now.
|
|
|
|
# delete the object so that django autosaving doesn't save it.
|
|
|
|
survexleg = None
|
|
|
|
#survexleg.save()
|
2020-02-21 15:57:07 +00:00
|
|
|
|
2009-08-05 11:58:36 +01:00
|
|
|
itape = stardata.get("tape")
|
|
|
|
if itape:
|
2011-07-11 00:01:12 +01:00
|
|
|
try:
|
|
|
|
survexblock.totalleglength += float(ls[itape])
|
2020-06-12 00:34:53 +01:00
|
|
|
survexlegsalllength += float(ls[itape])
|
2011-07-11 00:01:12 +01:00
|
|
|
except ValueError:
|
2020-05-15 21:45:23 +01:00
|
|
|
print("! Length not added")
|
2020-06-12 00:34:53 +01:00
|
|
|
# No need to save as we are measuring lengths only on parsing now.
|
|
|
|
#survexblock.save()
|
2019-02-24 13:03:34 +00:00
|
|
|
|
|
|
|
|
2009-08-05 11:58:36 +01:00
|
|
|
def LoadSurvexEquate(survexblock, sline):
|
2012-06-10 14:59:21 +01:00
|
|
|
#print sline #
|
|
|
|
stations = sline.split()
|
|
|
|
assert len(stations) > 1
|
|
|
|
for station in stations:
|
|
|
|
survexblock.MakeSurvexStation(station)
|
2009-08-05 11:58:36 +01:00
|
|
|
|
2019-02-24 13:03:34 +00:00
|
|
|
|
2009-08-05 11:58:36 +01:00
|
|
|
def LoadSurvexLinePassage(survexblock, stardata, sline, comment):
|
2020-06-12 18:10:07 +01:00
|
|
|
# do not import *data passage.. data which is LRUD not tape/compass/clino
|
2009-08-05 11:58:36 +01:00
|
|
|
pass
|
|
|
|
|
2020-06-13 01:24:46 +01:00
|
|
|
# This interprets the survex "*data normal" command which sets out the order of the fields in the data, e.g.
|
|
|
|
# *DATA normal from to length gradient bearing ignore ignore ignore ignore
|
2019-02-24 13:03:34 +00:00
|
|
|
stardatadefault = {"type":"normal", "t":"leg", "from":0, "to":1, "tape":2, "compass":3, "clino":4}
|
|
|
|
stardataparamconvert = {"length":"tape", "bearing":"compass", "gradient":"clino"}
|
2009-08-05 11:58:36 +01:00
|
|
|
|
2020-06-13 01:24:46 +01:00
|
|
|
rx_comment = re.compile(r"([^;]*?)\s*(?:;\s*(.*))?\n?$")
|
|
|
|
rx_ref = re.compile(r'.*?ref.*?(\d+)\s*#\s*(X)?\s*(\d+)')
|
|
|
|
rx_star = re.compile(r'\s*\*[\s,]*(\w+)\s*(.*?)\s*(?:;.*)?$')
|
2020-05-13 19:57:07 +01:00
|
|
|
# years from 1960 to 2039
|
2020-06-18 10:59:11 +01:00
|
|
|
rx_starref = re.compile(r'(?i)^\s*\*ref[\s.:]*((?:19[6789]\d)|(?:20[0123]\d))\s*#?\s*(X)?\s*(.*?\d+.*?)$')
|
2020-06-13 01:24:46 +01:00
|
|
|
# rx_starref = re.compile("""?x # VERBOSE mode - can't get this to work
|
2020-05-14 17:21:34 +01:00
|
|
|
# ^\s*\*ref # look for *ref at start of line
|
|
|
|
# [\s.:]* # some spaces, stops or colons
|
|
|
|
# ((?:19[6789]\d)|(?:20[0123]\d)) # a date from 1960 to 2039 - captured as one field
|
|
|
|
# \s*# # spaces then hash separator
|
|
|
|
# ?\s*(X) # optional X - captured
|
|
|
|
# ?\s*(.*?\d+.*?) # maybe a space, then at least one digit in the string - captured
|
|
|
|
# $(?i)""", re.X) # the end (do the whole thing case insensitively)
|
|
|
|
|
2020-06-18 10:59:11 +01:00
|
|
|
rx_team = re.compile(r"(?i)(Insts|Notes|Tape|Dog|Useless|Pics|Helper|Disto|Consultant)\s+(.*)$")
|
|
|
|
rx_team_member = re.compile(r"(?i) and | / |, | & | \+ |^both$|^none$")
|
2020-06-13 01:24:46 +01:00
|
|
|
rx_qm = re.compile(r'^\s*QM(\d)\s+?([a-dA-DxX])\s+([\w\-]+)\.(\d+)\s+(([\w\-]+)\.(\d+)|\-)\s+(.+)$')
|
2020-02-21 15:57:07 +00:00
|
|
|
|
2020-05-14 17:21:34 +01:00
|
|
|
insp = ""
|
2020-06-01 00:42:48 +01:00
|
|
|
callcount = 0
|
2009-08-01 07:31:27 +01:00
|
|
|
def RecursiveLoad(survexblock, survexfile, fin, textlines):
|
2020-05-14 17:21:34 +01:00
|
|
|
"""Follows the *include links in all the survex files from the root file 1623.svx
|
|
|
|
and reads in the survex blocks, other data and the wallet references (survexscansfolder) as it
|
|
|
|
goes. This part of the data import process is where the maximum memory is used and where it
|
|
|
|
crashes on memory-constrained machines.
|
|
|
|
"""
|
2009-08-01 07:31:27 +01:00
|
|
|
iblankbegins = 0
|
|
|
|
text = [ ]
|
2009-08-05 11:58:36 +01:00
|
|
|
stardata = stardatadefault
|
2009-08-01 07:31:27 +01:00
|
|
|
teammembers = [ ]
|
2020-05-14 17:21:34 +01:00
|
|
|
global insp
|
2020-06-01 00:42:48 +01:00
|
|
|
global callcount
|
2020-06-12 00:34:53 +01:00
|
|
|
global survexlegsnumber
|
2020-02-21 15:57:07 +00:00
|
|
|
|
2020-06-01 00:42:48 +01:00
|
|
|
print(insp+" - Reading file: " + survexblock.survexfile.path + " <> " + survexfile.path)
|
2020-02-21 15:57:07 +00:00
|
|
|
stamp = datetime.now()
|
|
|
|
lineno = 0
|
2020-06-01 00:42:48 +01:00
|
|
|
|
|
|
|
sys.stderr.flush();
|
|
|
|
callcount +=1
|
|
|
|
if callcount >=10:
|
|
|
|
callcount=0
|
|
|
|
print(".", file=sys.stderr,end='')
|
2020-02-21 15:57:07 +00:00
|
|
|
|
|
|
|
# Try to find the cave in the DB if not use the string as before
|
|
|
|
path_match = re.search(r"caves-(\d\d\d\d)/(\d+|\d\d\d\d-?\w+-\d+)/", survexblock.survexfile.path)
|
|
|
|
if path_match:
|
|
|
|
pos_cave = '%s-%s' % (path_match.group(1), path_match.group(2))
|
2020-05-28 04:54:53 +01:00
|
|
|
cave = models_caves.getCaveByReference(pos_cave)
|
2020-02-21 15:57:07 +00:00
|
|
|
if cave:
|
|
|
|
survexfile.cave = cave
|
|
|
|
svxlines = ''
|
|
|
|
svxlines = fin.read().splitlines()
|
|
|
|
for svxline in svxlines:
|
|
|
|
lineno += 1
|
2009-08-05 11:58:36 +01:00
|
|
|
# break the line at the comment
|
2020-06-13 01:24:46 +01:00
|
|
|
sline, comment = rx_comment.match(svxline.strip()).groups()
|
2009-08-05 11:58:36 +01:00
|
|
|
# detect ref line pointing to the scans directory
|
2020-06-13 01:24:46 +01:00
|
|
|
mref = comment and rx_ref.match(comment)
|
2009-08-01 07:31:27 +01:00
|
|
|
if mref:
|
2020-05-13 19:57:07 +01:00
|
|
|
yr, letterx, wallet = mref.groups()
|
|
|
|
if not letterx:
|
|
|
|
letterx = ""
|
|
|
|
else:
|
|
|
|
letterx = "X"
|
|
|
|
if len(wallet)<2:
|
|
|
|
wallet = "0" + wallet
|
|
|
|
refscan = "%s#%s%s" % (yr, letterx, wallet )
|
2020-05-28 04:54:53 +01:00
|
|
|
survexscansfolders = models_survex.SurvexScansFolder.objects.filter(walletname=refscan)
|
2009-09-10 22:07:31 +01:00
|
|
|
if survexscansfolders:
|
|
|
|
survexblock.survexscansfolder = survexscansfolders[0]
|
2020-02-21 15:57:07 +00:00
|
|
|
survexblock.save()
|
2020-05-13 19:57:07 +01:00
|
|
|
else:
|
2020-06-13 01:24:46 +01:00
|
|
|
message = ' ! Wallet ; ref {} - NOT found in survexscansfolders {}'.format(refscan, survexblock.survexfile.path)
|
2020-05-24 01:57:06 +01:00
|
|
|
print((insp+message))
|
2020-05-15 21:45:23 +01:00
|
|
|
models.DataIssue.objects.create(parser='survex', message=message)
|
2020-02-21 15:57:07 +00:00
|
|
|
|
|
|
|
# This whole section should be moved if we can have *QM become a proper survex command
|
|
|
|
# Spec of QM in SVX files, currently commented out need to add to survex
|
2020-06-13 01:24:46 +01:00
|
|
|
# needs to match rx_qm
|
2020-02-21 15:57:07 +00:00
|
|
|
# ;Serial number grade(A/B/C/D/X) nearest-station resolution-station description
|
|
|
|
# ;QM1 a hobnob_hallway_2.42 hobnob-hallway_3.42 junction of keyhole passage
|
|
|
|
# ;QM1 a hobnob_hallway_2.42 - junction of keyhole passage
|
2020-06-13 01:24:46 +01:00
|
|
|
qmline = comment and rx_qm.match(comment)
|
2020-02-21 15:57:07 +00:00
|
|
|
if qmline:
|
|
|
|
qm_no = qmline.group(1)
|
|
|
|
qm_grade = qmline.group(2)
|
|
|
|
qm_from_section = qmline.group(3)
|
|
|
|
qm_from_station = qmline.group(4)
|
|
|
|
qm_resolve_section = qmline.group(6)
|
|
|
|
qm_resolve_station = qmline.group(7)
|
|
|
|
qm_notes = qmline.group(8)
|
|
|
|
|
2020-05-14 17:21:34 +01:00
|
|
|
# print(insp+'Cave - %s' % survexfile.cave)
|
|
|
|
# print(insp+'QM no %d' % int(qm_no))
|
|
|
|
# print(insp+'QM grade %s' % qm_grade)
|
|
|
|
# print(insp+'QM section %s' % qm_from_section)
|
|
|
|
# print(insp+'QM station %s' % qm_from_station)
|
|
|
|
# print(insp+'QM res section %s' % qm_resolve_section)
|
|
|
|
# print(insp+'QM res station %s' % qm_resolve_station)
|
|
|
|
# print(insp+'QM notes %s' % qm_notes)
|
2020-02-21 15:57:07 +00:00
|
|
|
|
2020-04-30 23:15:57 +01:00
|
|
|
# If the QM isn't resolved (has a resolving station) then load it
|
2020-05-28 01:16:45 +01:00
|
|
|
if not qm_resolve_section or qm_resolve_section != '-' or qm_resolve_section != 'None':
|
2020-05-28 04:54:53 +01:00
|
|
|
from_section = models_survex.SurvexBlock.objects.filter(name=qm_from_section)
|
2020-02-21 15:57:07 +00:00
|
|
|
# If we can find a section (survex note chunck, named)
|
|
|
|
if len(from_section) > 0:
|
2020-05-28 04:54:53 +01:00
|
|
|
from_station = models_survex.SurvexStation.objects.filter(block=from_section[0], name=qm_from_station)
|
2020-02-21 15:57:07 +00:00
|
|
|
# If we can find a from station then we have the nearest station and can import it
|
|
|
|
if len(from_station) > 0:
|
2020-05-28 04:54:53 +01:00
|
|
|
qm = models_caves.QM.objects.create(number=qm_no,
|
2020-02-21 15:57:07 +00:00
|
|
|
nearest_station=from_station[0],
|
|
|
|
grade=qm_grade.upper(),
|
|
|
|
location_description=qm_notes)
|
|
|
|
else:
|
2020-05-14 17:21:34 +01:00
|
|
|
# print(insp+' - QM found but resolved')
|
2020-04-28 01:18:57 +01:00
|
|
|
pass
|
2020-02-21 15:57:07 +00:00
|
|
|
|
2009-08-05 11:58:36 +01:00
|
|
|
if not sline:
|
|
|
|
continue
|
2020-02-21 15:57:07 +00:00
|
|
|
|
2020-05-13 19:57:07 +01:00
|
|
|
# detect the star ref command
|
2020-06-13 01:24:46 +01:00
|
|
|
mstar = rx_starref.match(sline)
|
2020-05-13 19:57:07 +01:00
|
|
|
if mstar:
|
|
|
|
yr,letterx,wallet = mstar.groups()
|
|
|
|
if not letterx:
|
|
|
|
letterx = ""
|
|
|
|
else:
|
|
|
|
letterx = "X"
|
|
|
|
if len(wallet)<2:
|
|
|
|
wallet = "0" + wallet
|
|
|
|
assert (int(yr)>1960 and int(yr)<2039), "Wallet year out of bounds: %s" % yr
|
|
|
|
assert (int(wallet)<100), "Wallet number more than 100: %s" % wallet
|
|
|
|
refscan = "%s#%s%s" % (yr, letterx, wallet)
|
2020-05-28 04:54:53 +01:00
|
|
|
survexscansfolders = models_survex.SurvexScansFolder.objects.filter(walletname=refscan)
|
2020-05-13 19:57:07 +01:00
|
|
|
if survexscansfolders:
|
|
|
|
survexblock.survexscansfolder = survexscansfolders[0]
|
|
|
|
survexblock.save()
|
|
|
|
else:
|
2020-06-13 01:24:46 +01:00
|
|
|
message = ' ! Wallet *REF {} - NOT found in survexscansfolders {}'.format(refscan, survexblock.survexfile.path)
|
2020-05-24 01:57:06 +01:00
|
|
|
print((insp+message))
|
2020-05-15 21:45:23 +01:00
|
|
|
models.DataIssue.objects.create(parser='survex', message=message)
|
2020-05-13 19:57:07 +01:00
|
|
|
continue
|
|
|
|
|
2009-08-05 11:58:36 +01:00
|
|
|
# detect the star command
|
2020-06-13 01:24:46 +01:00
|
|
|
mstar = rx_star.match(sline)
|
2009-08-05 11:58:36 +01:00
|
|
|
if not mstar:
|
|
|
|
if "from" in stardata:
|
2020-02-21 15:57:07 +00:00
|
|
|
LoadSurvexLineLeg(survexblock, stardata, sline, comment, survexfile.cave)
|
|
|
|
pass
|
2009-08-05 11:58:36 +01:00
|
|
|
elif stardata["type"] == "passage":
|
|
|
|
LoadSurvexLinePassage(survexblock, stardata, sline, comment)
|
2011-07-11 00:01:12 +01:00
|
|
|
#Missing "station" in stardata.
|
2009-08-05 11:58:36 +01:00
|
|
|
continue
|
2020-02-21 15:57:07 +00:00
|
|
|
|
2009-08-05 11:58:36 +01:00
|
|
|
# detect the star command
|
|
|
|
cmd, line = mstar.groups()
|
2012-06-10 14:59:21 +01:00
|
|
|
cmd = cmd.lower()
|
2009-08-05 11:58:36 +01:00
|
|
|
if re.match("include$(?i)", cmd):
|
2020-05-30 02:35:05 +01:00
|
|
|
includepath = os.path.normpath(os.path.join(os.path.split(survexfile.path)[0], re.sub(r"\.svx$", "", line)))
|
2020-05-24 01:57:06 +01:00
|
|
|
print((insp+' - Include path found including - ' + includepath))
|
2020-02-21 15:57:07 +00:00
|
|
|
# Try to find the cave in the DB if not use the string as before
|
|
|
|
path_match = re.search(r"caves-(\d\d\d\d)/(\d+|\d\d\d\d-?\w+-\d+)/", includepath)
|
|
|
|
if path_match:
|
|
|
|
pos_cave = '%s-%s' % (path_match.group(1), path_match.group(2))
|
2020-05-14 17:21:34 +01:00
|
|
|
# print(insp+pos_cave)
|
2020-05-28 04:54:53 +01:00
|
|
|
cave = models_caves.getCaveByReference(pos_cave)
|
2020-02-21 15:57:07 +00:00
|
|
|
if cave:
|
|
|
|
survexfile.cave = cave
|
|
|
|
else:
|
2020-05-24 01:57:06 +01:00
|
|
|
print((insp+' - No match in DB (i) for %s, so loading..' % includepath))
|
2020-05-28 04:54:53 +01:00
|
|
|
includesurvexfile = models_survex.SurvexFile(path=includepath)
|
2009-08-05 11:58:36 +01:00
|
|
|
includesurvexfile.save()
|
|
|
|
includesurvexfile.SetDirectory()
|
|
|
|
if includesurvexfile.exists():
|
2020-02-21 15:57:07 +00:00
|
|
|
survexblock.save()
|
2009-08-05 11:58:36 +01:00
|
|
|
fininclude = includesurvexfile.OpenFile()
|
2020-05-14 17:21:34 +01:00
|
|
|
insp += "> "
|
2009-08-05 11:58:36 +01:00
|
|
|
RecursiveLoad(survexblock, includesurvexfile, fininclude, textlines)
|
2020-05-14 17:21:34 +01:00
|
|
|
insp = insp[2:]
|
2020-02-21 15:57:07 +00:00
|
|
|
|
2009-08-05 11:58:36 +01:00
|
|
|
elif re.match("begin$(?i)", cmd):
|
2020-02-21 15:57:07 +00:00
|
|
|
if line:
|
|
|
|
newsvxpath = os.path.join(os.path.split(survexfile.path)[0], re.sub(r"\.svx$", "", line))
|
|
|
|
# Try to find the cave in the DB if not use the string as before
|
|
|
|
path_match = re.search(r"caves-(\d\d\d\d)/(\d+|\d\d\d\d-?\w+-\d+)/", newsvxpath)
|
|
|
|
if path_match:
|
|
|
|
pos_cave = '%s-%s' % (path_match.group(1), path_match.group(2))
|
2020-05-14 17:21:34 +01:00
|
|
|
# print(insp+pos_cave)
|
2020-05-28 04:54:53 +01:00
|
|
|
cave = models_caves.getCaveByReference(pos_cave)
|
2020-02-21 15:57:07 +00:00
|
|
|
if cave:
|
|
|
|
survexfile.cave = cave
|
|
|
|
else:
|
2020-05-24 01:57:06 +01:00
|
|
|
print((insp+' - No match (b) for %s' % newsvxpath))
|
2020-02-21 15:57:07 +00:00
|
|
|
|
2020-06-12 00:34:53 +01:00
|
|
|
previousnlegs = survexlegsnumber
|
2009-08-05 11:58:36 +01:00
|
|
|
name = line.lower()
|
2020-05-24 01:57:06 +01:00
|
|
|
print((insp+' - Begin found for: ' + name))
|
2020-06-16 19:27:32 +01:00
|
|
|
# survexblockdown = models_survex.SurvexBlock(name=name, begin_char=fin.tell(), parent=survexblock, survexpath=survexblock.survexpath+"."+name, cave=survexfile.cave, survexfile=survexfile, totalleglength=0.0)
|
|
|
|
survexblockdown = models_survex.SurvexBlock(name=name, parent=survexblock, survexpath=survexblock.survexpath+"."+name,
|
|
|
|
cave=survexfile.cave, survexfile=survexfile, legsall=0, legssplay=0, legssurfc=0, totalleglength=0.0)
|
2009-08-05 11:58:36 +01:00
|
|
|
survexblockdown.save()
|
2020-02-21 15:57:07 +00:00
|
|
|
survexblock.save()
|
|
|
|
survexblock = survexblockdown
|
2009-08-05 11:58:36 +01:00
|
|
|
textlinesdown = [ ]
|
2020-05-14 17:21:34 +01:00
|
|
|
insp += "> "
|
2009-08-05 11:58:36 +01:00
|
|
|
RecursiveLoad(survexblockdown, survexfile, fin, textlinesdown)
|
2020-05-14 17:21:34 +01:00
|
|
|
insp = insp[2:]
|
2009-08-05 11:58:36 +01:00
|
|
|
else:
|
|
|
|
iblankbegins += 1
|
2020-02-21 15:57:07 +00:00
|
|
|
|
2009-08-05 11:58:36 +01:00
|
|
|
elif re.match("end$(?i)", cmd):
|
|
|
|
if iblankbegins:
|
|
|
|
iblankbegins -= 1
|
|
|
|
else:
|
2020-06-12 00:34:53 +01:00
|
|
|
# .text not used, using it for number of legs per block
|
|
|
|
legsinblock = survexlegsnumber - previousnlegs
|
2020-06-12 18:10:07 +01:00
|
|
|
print(insp+"LEGS: {} (previous: {}, now:{})".format(legsinblock,previousnlegs,survexlegsnumber))
|
2020-06-16 19:27:32 +01:00
|
|
|
survexblock.legsall = legsinblock
|
2009-08-05 11:58:36 +01:00
|
|
|
survexblock.save()
|
2020-02-21 15:57:07 +00:00
|
|
|
endstamp = datetime.now()
|
|
|
|
timetaken = endstamp - stamp
|
2009-08-05 11:58:36 +01:00
|
|
|
return
|
2020-02-21 15:57:07 +00:00
|
|
|
|
2009-08-05 11:58:36 +01:00
|
|
|
elif re.match("date$(?i)", cmd):
|
|
|
|
if len(line) == 10:
|
2020-02-21 15:57:07 +00:00
|
|
|
survexblock.date = make_aware(datetime.strptime(re.sub(r"\.", "-", line), '%Y-%m-%d'), get_current_timezone())
|
2009-08-05 11:58:36 +01:00
|
|
|
expeditions = models.Expedition.objects.filter(year=line[:4])
|
|
|
|
if expeditions:
|
2009-09-11 09:04:59 +01:00
|
|
|
assert len(expeditions) == 1
|
2009-08-05 11:58:36 +01:00
|
|
|
survexblock.expedition = expeditions[0]
|
2009-09-11 23:56:47 +01:00
|
|
|
survexblock.expeditionday = survexblock.expedition.get_expedition_day(survexblock.date)
|
|
|
|
survexblock.save()
|
2020-02-21 15:57:07 +00:00
|
|
|
|
2009-08-05 11:58:36 +01:00
|
|
|
elif re.match("team$(?i)", cmd):
|
2020-02-21 15:57:07 +00:00
|
|
|
pass
|
2020-05-14 17:21:34 +01:00
|
|
|
# print(insp+' - Team found: ')
|
2020-06-13 01:24:46 +01:00
|
|
|
mteammember = rx_team.match(line)
|
2009-08-05 11:58:36 +01:00
|
|
|
if mteammember:
|
2020-06-13 01:24:46 +01:00
|
|
|
for tm in rx_team_member.split(mteammember.group(2)):
|
2009-08-05 11:58:36 +01:00
|
|
|
if tm:
|
|
|
|
personexpedition = survexblock.expedition and GetPersonExpeditionNameLookup(survexblock.expedition).get(tm.lower())
|
|
|
|
if (personexpedition, tm) not in teammembers:
|
|
|
|
teammembers.append((personexpedition, tm))
|
2020-05-28 04:54:53 +01:00
|
|
|
personrole = models_survex.SurvexPersonRole(survexblock=survexblock, nrole=mteammember.group(1).lower(), personexpedition=personexpedition, personname=tm)
|
2009-09-11 23:56:47 +01:00
|
|
|
personrole.expeditionday = survexblock.expeditionday
|
2009-08-05 11:58:36 +01:00
|
|
|
if personexpedition:
|
|
|
|
personrole.person=personexpedition.person
|
|
|
|
personrole.save()
|
2020-02-21 15:57:07 +00:00
|
|
|
|
2009-08-05 11:58:36 +01:00
|
|
|
elif cmd == "title":
|
2020-05-28 04:54:53 +01:00
|
|
|
survextitle = models_survex.SurvexTitle(survexblock=survexblock, title=line.strip('"'), cave=survexfile.cave)
|
2009-08-05 11:58:36 +01:00
|
|
|
survextitle.save()
|
2020-02-21 15:57:07 +00:00
|
|
|
pass
|
|
|
|
|
2014-06-26 02:34:19 +01:00
|
|
|
elif cmd == "require":
|
|
|
|
# should we check survex version available for processing?
|
|
|
|
pass
|
|
|
|
|
2009-08-05 11:58:36 +01:00
|
|
|
elif cmd == "data":
|
|
|
|
ls = line.lower().split()
|
|
|
|
stardata = { "type":ls[0] }
|
|
|
|
for i in range(0, len(ls)):
|
|
|
|
stardata[stardataparamconvert.get(ls[i], ls[i])] = i - 1
|
|
|
|
if ls[0] in ["normal", "cartesian", "nosurvey"]:
|
2011-07-11 00:01:12 +01:00
|
|
|
assert (("from" in stardata and "to" in stardata) or "station" in stardata), line
|
2009-08-05 11:58:36 +01:00
|
|
|
elif ls[0] == "default":
|
|
|
|
stardata = stardatadefault
|
2009-08-01 07:31:27 +01:00
|
|
|
else:
|
2009-08-05 11:58:36 +01:00
|
|
|
assert ls[0] == "passage", line
|
2020-02-21 15:57:07 +00:00
|
|
|
|
2009-08-05 11:58:36 +01:00
|
|
|
elif cmd == "equate":
|
2012-06-10 14:59:21 +01:00
|
|
|
LoadSurvexEquate(survexblock, line)
|
|
|
|
|
2020-06-13 01:24:46 +01:00
|
|
|
elif cmd == "set" and re.match("names(?i)", line):
|
|
|
|
pass
|
|
|
|
elif cmd == "flags":
|
|
|
|
# Here we could set on/off 'splay', 'not splay', 'surface', 'not surface', or 'duplicate'
|
|
|
|
# but this data is only used for sense-checking not to actually calculate anything important
|
|
|
|
pass
|
2012-06-10 14:59:21 +01:00
|
|
|
elif cmd == "fix":
|
|
|
|
survexblock.MakeSurvexStation(line.split()[0])
|
2020-06-13 01:24:46 +01:00
|
|
|
elif cmd in ["alias", "calibrate", "cs","entrance", "export", "case",
|
|
|
|
"declination", "infer","instrument", "sd", "units"]:
|
|
|
|
# we ignore all these, which is fine.
|
|
|
|
pass
|
2009-08-05 11:58:36 +01:00
|
|
|
else:
|
2020-06-13 01:24:46 +01:00
|
|
|
if cmd not in ["include", "data", "flags", "title", "set", "ref"]:
|
|
|
|
message = "! Bad svx command: [*{}] {} ({}) {}".format(cmd, line, survexblock, survexblock.survexfile.path)
|
|
|
|
print((insp+message))
|
|
|
|
models.DataIssue.objects.create(parser='survex', message=message)
|
|
|
|
else:
|
|
|
|
message = "! Unparsed [*{}]: '{}' {}".format(cmd, line, survexblock.survexfile.path)
|
2020-05-24 01:57:06 +01:00
|
|
|
print((insp+message))
|
2020-05-15 21:45:23 +01:00
|
|
|
models.DataIssue.objects.create(parser='survex', message=message)
|
|
|
|
|
2020-02-21 15:57:07 +00:00
|
|
|
endstamp = datetime.now()
|
|
|
|
timetaken = endstamp - stamp
|
2020-05-14 17:21:34 +01:00
|
|
|
# print(insp+' - Time to process: ' + str(timetaken))
|
2009-08-05 11:58:36 +01:00
|
|
|
|
2009-05-13 05:39:52 +01:00
|
|
|
def LoadAllSurvexBlocks():
|
2020-06-12 00:34:53 +01:00
|
|
|
global survexlegsalllength
|
|
|
|
global survexlegsnumber
|
2015-01-19 22:48:50 +00:00
|
|
|
|
2020-04-28 01:18:57 +01:00
|
|
|
print(' - Flushing All Survex Blocks...')
|
2015-01-19 22:48:50 +00:00
|
|
|
|
2020-05-28 04:54:53 +01:00
|
|
|
models_survex.SurvexBlock.objects.all().delete()
|
|
|
|
models_survex.SurvexFile.objects.all().delete()
|
|
|
|
models_survex.SurvexDirectory.objects.all().delete()
|
|
|
|
models_survex.SurvexEquate.objects.all().delete()
|
2020-06-16 19:27:32 +01:00
|
|
|
#models_survex.SurvexLeg.objects.all().delete()
|
2020-05-28 04:54:53 +01:00
|
|
|
models_survex.SurvexTitle.objects.all().delete()
|
|
|
|
models_survex.SurvexPersonRole.objects.all().delete()
|
|
|
|
models_survex.SurvexStation.objects.all().delete()
|
2011-07-11 00:01:12 +01:00
|
|
|
|
2019-02-24 13:03:34 +00:00
|
|
|
print(" - Data flushed")
|
2020-05-15 21:45:23 +01:00
|
|
|
# Clear the data issues as we are reloading
|
|
|
|
models.DataIssue.objects.filter(parser='survex').delete()
|
2020-04-28 01:18:57 +01:00
|
|
|
print(' - Loading All Survex Blocks...')
|
2020-04-30 23:15:57 +01:00
|
|
|
|
2020-06-01 00:42:48 +01:00
|
|
|
print(' - redirecting stdout to loadsurvexblks.log...')
|
2020-04-30 23:15:57 +01:00
|
|
|
stdout_orig = sys.stdout
|
|
|
|
# Redirect sys.stdout to the file
|
|
|
|
sys.stdout = open('loadsurvexblks.log', 'w')
|
2019-02-24 13:03:34 +00:00
|
|
|
|
2020-05-28 04:54:53 +01:00
|
|
|
survexfile = models_survex.SurvexFile(path=settings.SURVEX_TOPNAME, cave=None)
|
2011-07-11 00:01:12 +01:00
|
|
|
survexfile.save()
|
|
|
|
survexfile.SetDirectory()
|
|
|
|
|
|
|
|
#Load all
|
2020-06-12 00:34:53 +01:00
|
|
|
# this is the first so id=1
|
2020-06-16 19:27:32 +01:00
|
|
|
survexblockroot = models_survex.SurvexBlock(name="rootblock", survexpath="", cave=None, survexfile=survexfile,
|
|
|
|
legsall=0, legssplay=0, legssurfc=0, totalleglength=0.0)
|
2011-07-11 00:01:12 +01:00
|
|
|
survexblockroot.save()
|
|
|
|
fin = survexfile.OpenFile()
|
|
|
|
textlines = [ ]
|
2020-02-21 15:57:07 +00:00
|
|
|
# The real work starts here
|
2011-07-11 00:01:12 +01:00
|
|
|
RecursiveLoad(survexblockroot, survexfile, fin, textlines)
|
2020-02-21 15:57:07 +00:00
|
|
|
fin.close()
|
2020-06-12 00:34:53 +01:00
|
|
|
survexblockroot.totalleglength = survexlegsalllength
|
2020-06-16 19:27:32 +01:00
|
|
|
survexblockroot.legsall = survexlegsnumber
|
2020-06-12 00:34:53 +01:00
|
|
|
#survexblockroot.text = "".join(textlines) these are all blank
|
2011-07-11 00:01:12 +01:00
|
|
|
survexblockroot.save()
|
2020-04-30 23:15:57 +01:00
|
|
|
|
|
|
|
# Close the file
|
|
|
|
sys.stdout.close()
|
2020-06-01 00:42:48 +01:00
|
|
|
print("+", file=sys.stderr)
|
|
|
|
sys.stderr.flush();
|
|
|
|
|
2020-04-30 23:15:57 +01:00
|
|
|
# Restore sys.stdout to our old saved file handler
|
|
|
|
sys.stdout = stdout_orig
|
2020-06-12 00:34:53 +01:00
|
|
|
print(" - total number of survex legs: {}m".format(survexlegsnumber))
|
|
|
|
print(" - total leg lengths loaded: {}m".format(survexlegsalllength))
|
2020-04-28 01:18:57 +01:00
|
|
|
print(' - Loaded All Survex Blocks.')
|
2011-07-11 00:01:12 +01:00
|
|
|
|
|
|
|
|
2019-02-24 13:03:34 +00:00
|
|
|
poslineregex = re.compile(r"^\(\s*([+-]?\d*\.\d*),\s*([+-]?\d*\.\d*),\s*([+-]?\d*\.\d*)\s*\)\s*([^\s]+)$")
|
|
|
|
|
2011-07-11 00:01:12 +01:00
|
|
|
def LoadPos():
|
2020-04-28 01:18:57 +01:00
|
|
|
"""Run cavern to produce a complete .3d file, then run 3dtopos to produce a table of
|
|
|
|
all survey point positions. Then lookup each position by name to see if we have it in the database
|
|
|
|
and if we do, then save the x/y/z coordinates.
|
|
|
|
If we don't have it in the database, print an error message and discard it.
|
2020-05-26 16:41:11 +01:00
|
|
|
This is ONLY ever used for entrance and fixedpts locations for the prospecting map:
|
|
|
|
about 600 points out of 32,000.
|
2020-04-28 01:18:57 +01:00
|
|
|
"""
|
2020-04-28 18:26:08 +01:00
|
|
|
topdata = settings.SURVEX_DATA + settings.SURVEX_TOPNAME
|
2020-05-24 01:57:06 +01:00
|
|
|
print((' - Generating a list of Pos from %s.svx and then loading...' % (topdata)))
|
2020-05-28 01:16:45 +01:00
|
|
|
|
|
|
|
# TO DO - remove the cache file apparatus. Not needed. Only laser points and entrances loaded now.
|
2020-04-28 18:26:08 +01:00
|
|
|
|
|
|
|
# Be careful with the cache file.
|
|
|
|
# If LoadPos has been run before,
|
|
|
|
# but without cave import being run before,
|
|
|
|
# then *everything* may be in the fresh 'not found' cache file.
|
|
|
|
|
2020-06-16 19:27:32 +01:00
|
|
|
# cachefile = settings.SURVEX_DATA + "posnotfound.cache"
|
|
|
|
# notfoundbefore = {}
|
|
|
|
# if os.path.isfile(cachefile):
|
|
|
|
# # this is not a good test. 1623.svx may never change but *included files may have done.
|
|
|
|
# # When the *include is unrolled, we will be able to get a proper timestamp to use
|
|
|
|
# # and can increase the timeout from 3 days to 30 days.
|
|
|
|
# updtsvx = os.path.getmtime(topdata + ".svx")
|
|
|
|
# updtcache = os.path.getmtime(cachefile)
|
|
|
|
# age = updtcache - updtsvx
|
|
|
|
# print((' svx: %s cache: %s not-found cache is fresher by: %s' % (updtsvx, updtcache, str(timedelta(seconds=age) ))))
|
2020-04-30 23:15:57 +01:00
|
|
|
|
2020-06-16 19:27:32 +01:00
|
|
|
# now = time.time()
|
|
|
|
# if now - updtcache > 3*24*60*60:
|
|
|
|
# print(" cache is more than 3 days old. Deleting.")
|
|
|
|
# os.remove(cachefile)
|
|
|
|
# elif age < 0 :
|
|
|
|
# print(" cache is stale. Deleting.")
|
|
|
|
# os.remove(cachefile)
|
|
|
|
# else:
|
|
|
|
# print(" cache is fresh. Reading...")
|
|
|
|
# try:
|
|
|
|
# with open(cachefile, "r") as f:
|
|
|
|
# for line in f:
|
|
|
|
# l = line.rstrip()
|
|
|
|
# if l in notfoundbefore:
|
|
|
|
# notfoundbefore[l] +=1 # should not be duplicates
|
|
|
|
# print(" DUPLICATE ", line, notfoundbefore[l])
|
|
|
|
# else:
|
|
|
|
# notfoundbefore[l] =1
|
|
|
|
# except:
|
|
|
|
# print(" FAILURE READ opening cache file %s" % (cachefile))
|
|
|
|
# raise
|
2020-04-28 21:50:53 +01:00
|
|
|
|
2020-04-28 18:26:08 +01:00
|
|
|
|
2020-06-16 19:27:32 +01:00
|
|
|
# notfoundnow =[]
|
2020-04-28 18:26:08 +01:00
|
|
|
found = 0
|
|
|
|
skip = {}
|
2020-05-24 01:57:06 +01:00
|
|
|
print("\n") # extra line because cavern overwrites the text buffer somehow
|
2020-04-28 18:26:08 +01:00
|
|
|
# cavern defaults to using same cwd as supplied input file
|
|
|
|
call([settings.CAVERN, "--output=%s.3d" % (topdata), "%s.svx" % (topdata)])
|
|
|
|
call([settings.THREEDTOPOS, '%s.3d' % (topdata)], cwd = settings.SURVEX_DATA)
|
2020-05-28 01:16:45 +01:00
|
|
|
#print(" - This next bit takes a while. Matching ~32,000 survey positions. Be patient...")
|
|
|
|
|
|
|
|
mappoints = {}
|
|
|
|
for pt in MapLocations().points():
|
|
|
|
svxid, number, point_type, label = pt
|
|
|
|
mappoints[svxid]=True
|
2020-04-30 23:15:57 +01:00
|
|
|
|
2020-04-28 18:26:08 +01:00
|
|
|
posfile = open("%s.pos" % (topdata))
|
2019-02-24 13:03:34 +00:00
|
|
|
posfile.readline() #Drop header
|
2020-06-15 03:28:51 +01:00
|
|
|
|
|
|
|
survexblockroot = models_survex.SurvexBlock.objects.get(id=1)
|
2011-07-11 00:01:12 +01:00
|
|
|
for line in posfile.readlines():
|
2020-02-21 15:57:07 +00:00
|
|
|
r = poslineregex.match(line)
|
2011-07-11 00:01:12 +01:00
|
|
|
if r:
|
2020-05-28 04:54:53 +01:00
|
|
|
x, y, z, id = r.groups()
|
2020-06-16 19:27:32 +01:00
|
|
|
# if id in notfoundbefore:
|
|
|
|
# skip[id] = 1
|
|
|
|
# else:
|
|
|
|
for sid in mappoints:
|
|
|
|
if id.endswith(sid):
|
|
|
|
# notfoundnow.append(id)
|
|
|
|
# Now that we don't import any stations, we create it rather than look it up
|
|
|
|
# ss = models_survex.SurvexStation.objects.lookup(id)
|
|
|
|
|
|
|
|
# need to set block_id which means doing a search on all the survex blocks..
|
|
|
|
# remove dot at end and add one at beginning
|
|
|
|
blockpath = "." + id[:-len(sid)].strip(".")
|
|
|
|
try:
|
|
|
|
sbqs = models_survex.SurvexBlock.objects.filter(survexpath=blockpath)
|
|
|
|
if len(sbqs)==1:
|
|
|
|
sb = sbqs[0]
|
|
|
|
if len(sbqs)>1:
|
|
|
|
message = ' ! MULTIPLE SurvexBlocks matching Entrance point {} {}'.format(blockpath, sid)
|
2020-06-15 03:28:51 +01:00
|
|
|
print(message)
|
|
|
|
models.DataIssue.objects.create(parser='survex', message=message)
|
2020-06-16 19:27:32 +01:00
|
|
|
sb = sbqs[0]
|
|
|
|
elif len(sbqs)<=0:
|
|
|
|
message = ' ! ZERO SurvexBlocks matching Entrance point {} {}'.format(blockpath, sid)
|
2020-06-15 03:28:51 +01:00
|
|
|
print(message)
|
|
|
|
models.DataIssue.objects.create(parser='survex', message=message)
|
2020-06-16 19:27:32 +01:00
|
|
|
sb = survexblockroot
|
|
|
|
except:
|
|
|
|
message = ' ! FAIL in getting SurvexBlock matching Entrance point {} {}'.format(blockpath, sid)
|
|
|
|
print(message)
|
|
|
|
models.DataIssue.objects.create(parser='survex', message=message)
|
|
|
|
try:
|
|
|
|
ss = models_survex.SurvexStation(name=id, block=sb)
|
|
|
|
ss.x = float(x)
|
|
|
|
ss.y = float(y)
|
|
|
|
ss.z = float(z)
|
|
|
|
ss.save()
|
|
|
|
found += 1
|
|
|
|
except:
|
|
|
|
message = ' ! FAIL to create SurvexStation Entrance point {} {}'.format(blockpath, sid)
|
|
|
|
print(message)
|
|
|
|
models.DataIssue.objects.create(parser='survex', message=message)
|
|
|
|
raise
|
2020-06-15 03:28:51 +01:00
|
|
|
|
|
|
|
#print(" - %s failed lookups of SurvexStation.objects. %s found. %s skipped." % (len(notfoundnow),found, len(skip)))
|
2020-06-16 19:27:32 +01:00
|
|
|
print(" - {} SurvexStation entrances found.".format(found))
|
|
|
|
|
|
|
|
# if found > 10: # i.e. a previous cave import has been done
|
|
|
|
# try:
|
|
|
|
# with open(cachefile, "w") as f:
|
|
|
|
# c = len(notfoundnow)+len(skip)
|
|
|
|
# for i in notfoundnow:
|
|
|
|
# pass #f.write("%s\n" % i)
|
|
|
|
# for j in skip:
|
|
|
|
# pass #f.write("%s\n" % j) # NB skip not notfoundbefore
|
|
|
|
# print((' Not-found cache file written: %s entries' % c))
|
|
|
|
# except:
|
|
|
|
# print(" FAILURE WRITE opening cache file %s" % (cachefile))
|
|
|
|
# raise
|