mirror of
https://expo.survex.com/repositories/troggle/.git
synced 2024-11-22 07:11:52 +00:00
Fixing bugs and better error msgs
This commit is contained in:
parent
8d08a67302
commit
70709c505c
@ -398,7 +398,8 @@ def survexcaveslist(request):
|
|||||||
It can find caves which have not yet been properly registered in the system by Databasereset.py because
|
It can find caves which have not yet been properly registered in the system by Databasereset.py because
|
||||||
someone may have uploaded the survex files without doing the rest of the integration process.
|
someone may have uploaded the survex files without doing the rest of the integration process.
|
||||||
'''
|
'''
|
||||||
# TO DO - filter out the non-public caves from display UNLESS LOGGED INS
|
# TO DO - filter out the non-public caves from display UNLESS LOGGED IN
|
||||||
|
# This is very impenetrable code, original from Aaron Curtis I think.
|
||||||
onefilecaves = [ ]
|
onefilecaves = [ ]
|
||||||
multifilecaves = [ ]
|
multifilecaves = [ ]
|
||||||
subdircaves = [ ]
|
subdircaves = [ ]
|
||||||
@ -422,6 +423,8 @@ def survexcaveslist(request):
|
|||||||
# This all assumes that the first .svx file has the same name as the cave name,
|
# This all assumes that the first .svx file has the same name as the cave name,
|
||||||
# which usually but not always true. e.g. caves-1623/78/allkaese.svx not caves-1623/78/78.svx
|
# which usually but not always true. e.g. caves-1623/78/allkaese.svx not caves-1623/78/78.svx
|
||||||
# which is why we now also pass through the cavedir
|
# which is why we now also pass through the cavedir
|
||||||
|
|
||||||
|
# Still fails for loutitohoehle etc even though this is set correctly when the pending cave is created
|
||||||
cavesdir = get_survexareapath(area)
|
cavesdir = get_survexareapath(area)
|
||||||
gcavedir = os.path.join(cavesdir, cavedir)
|
gcavedir = os.path.join(cavesdir, cavedir)
|
||||||
if os.path.isdir(gcavedir) and cavedir[0] != ".":
|
if os.path.isdir(gcavedir) and cavedir[0] != ".":
|
||||||
@ -493,11 +496,12 @@ def survexcavesingle(request, survex_cave):
|
|||||||
|
|
||||||
def check_cave_registered(area, survex_cave):
|
def check_cave_registered(area, survex_cave):
|
||||||
'''Checks whether a cave has been properly registered when it is found in the Loser repo
|
'''Checks whether a cave has been properly registered when it is found in the Loser repo
|
||||||
This should be called by Databasereset not here in a view
|
This should really be called by Databasereset not here in a view
|
||||||
Currently Caves are only registered if they are listed in :expoweb: settings.CAVEDESCRIPTIONS
|
Currently Caves are only registered if they are listed in :expoweb: settings.CAVEDESCRIPTIONS
|
||||||
so we need to add in any more here.
|
so we need to add in any more here.
|
||||||
|
|
||||||
This function runs but does not seem to be used?!
|
This function runs but does not seem to be used?!
|
||||||
|
A serious bodge anyway.
|
||||||
'''
|
'''
|
||||||
try:
|
try:
|
||||||
cave = Cave.objects.get(kataster_number=survex_cave)
|
cave = Cave.objects.get(kataster_number=survex_cave)
|
||||||
|
@ -86,11 +86,47 @@ def do_pending_cave(k, url, area):
|
|||||||
Note that at this point in importing the data we have not yet seen the survex files, so we can't
|
Note that at this point in importing the data we have not yet seen the survex files, so we can't
|
||||||
look inside the relevant survex file to find the year and so we con't provide helpful links.
|
look inside the relevant survex file to find the year and so we con't provide helpful links.
|
||||||
'''
|
'''
|
||||||
|
def get_survex_file(k):
|
||||||
|
'''Guesses at and finds a survex file for this pending cave.
|
||||||
|
Convoluted. Sorry. Needs rewriting
|
||||||
|
'''
|
||||||
|
if k[0:3] == "162":
|
||||||
|
id = Path(k[5:])
|
||||||
|
else:
|
||||||
|
id = Path(k)
|
||||||
|
|
||||||
|
survex_file = f"caves-{area.short_name}/{id}/{id}.svx"
|
||||||
|
if Path(settings.SURVEX_DATA, survex_file).is_file():
|
||||||
|
return survex_file
|
||||||
|
else:
|
||||||
|
survex_file = f"caves-{area.short_name}/{id}.svx"
|
||||||
|
if Path(settings.SURVEX_DATA, survex_file).is_file():
|
||||||
|
return survex_file
|
||||||
|
|
||||||
|
survex_file = ""
|
||||||
|
d = Path(settings.SURVEX_DATA, f"caves-{area.short_name}/{id}")
|
||||||
|
if d.is_dir():
|
||||||
|
prime_suspect = ""
|
||||||
|
dir = d.iterdir()
|
||||||
|
for f in dir:
|
||||||
|
if f.suffix == ".svx":
|
||||||
|
survex_file = f.relative_to(settings.SURVEX_DATA)
|
||||||
|
chk = min(5, len(f.name)-1)
|
||||||
|
if str(f.name)[:chk].lower() == str(id.name)[:chk].lower(): # bodge which mostly works
|
||||||
|
prime_suspect = survex_file
|
||||||
|
if prime_suspect:
|
||||||
|
survex_file = prime_suspect
|
||||||
|
# message = f" ! {k:14} Found a survex file which might be the right one: {survex_file}"
|
||||||
|
# DataIssue.objects.create(parser='caves', message=message, url=url)
|
||||||
|
# print(message)
|
||||||
|
return survex_file
|
||||||
|
|
||||||
slug = k
|
slug = k
|
||||||
|
|
||||||
g = GetCaveLookup()
|
g = GetCaveLookup()
|
||||||
|
with transaction.atomic():
|
||||||
if slug in g:
|
if slug in g:
|
||||||
message = f" ! {k} cave listed in pendingcaves.txt already exists."
|
message = f" ! {k:18} cave listed in pendingcaves.txt already exists."
|
||||||
DataIssue.objects.create(parser='caves', message=message, url=url)
|
DataIssue.objects.create(parser='caves', message=message, url=url)
|
||||||
print(message)
|
print(message)
|
||||||
return
|
return
|
||||||
@ -119,18 +155,19 @@ def do_pending_cave(k, url, area):
|
|||||||
default_note += f"<br><br>\n\n - Finally, you need to find a nerd to edit the file '<var>expoweb/cave_data/pending.txt</var>' "
|
default_note += f"<br><br>\n\n - Finally, you need to find a nerd to edit the file '<var>expoweb/cave_data/pending.txt</var>' "
|
||||||
default_note += f"to remove the line <br><var>{slug}</var><br> as it is no longer 'pending' but 'done. Well Done."
|
default_note += f"to remove the line <br><var>{slug}</var><br> as it is no longer 'pending' but 'done. Well Done."
|
||||||
|
|
||||||
|
survex_file = get_survex_file(k)
|
||||||
|
|
||||||
cave = Cave(
|
cave = Cave(
|
||||||
unofficial_number = k,
|
unofficial_number = k,
|
||||||
underground_description = "Pending cave write-up - creating as empty object. No XML file available yet.",
|
underground_description = "Pending cave write-up - creating as empty object. No XML file available yet.",
|
||||||
survex_file = f"caves-{area.short_name}/{k[5:]}/{k[5:]}.svx",
|
survex_file = survex_file,
|
||||||
url = url,
|
url = url,
|
||||||
notes = default_note)
|
notes = default_note)
|
||||||
if cave:
|
if cave:
|
||||||
cave.save() # must save to have id before foreign keys work. This is also a ManyToMany key.
|
cave.save() # must save to have id before foreign keys work. This is also a ManyToMany key.
|
||||||
cave.area.add(area)
|
cave.area.add(area)
|
||||||
cave.save()
|
cave.save()
|
||||||
message = f" ! {k:14} {cave.underground_description} url: {url}"
|
message = f" ! {k:18} {cave.underground_description} url: {url}"
|
||||||
DataIssue.objects.create(parser='caves', message=message, url=url)
|
DataIssue.objects.create(parser='caves', message=message, url=url)
|
||||||
print(message)
|
print(message)
|
||||||
|
|
||||||
@ -391,7 +428,7 @@ def readcave(filename):
|
|||||||
|
|
||||||
|
|
||||||
if description_file[0]: # if not an empty string
|
if description_file[0]: # if not an empty string
|
||||||
message = f' - {slug:12} complex description filename "{description_file[0]}" inside "{CAVEDESCRIPTIONS}/{filename}"'
|
message = f' - {slug:12} Note (not an error): complex description filename "{description_file[0]}" inside "{CAVEDESCRIPTIONS}/{filename}"'
|
||||||
DataIssue.objects.create(parser='caves ok', message=message, url=f'/{slug}_cave_edit/')
|
DataIssue.objects.create(parser='caves ok', message=message, url=f'/{slug}_cave_edit/')
|
||||||
print(message)
|
print(message)
|
||||||
|
|
||||||
@ -434,7 +471,7 @@ def readcaves():
|
|||||||
with open(fpending, "r") as fo:
|
with open(fpending, "r") as fo:
|
||||||
cids = fo.readlines()
|
cids = fo.readlines()
|
||||||
for cid in cids:
|
for cid in cids:
|
||||||
pending.add(cid.rstrip('\n'))
|
pending.add(cid.strip().rstrip('\n').upper())
|
||||||
|
|
||||||
with transaction.atomic():
|
with transaction.atomic():
|
||||||
print(" - Deleting Caves and Entrances")
|
print(" - Deleting Caves and Entrances")
|
||||||
@ -460,16 +497,12 @@ def readcaves():
|
|||||||
print(" - Creating Areas 1623, 1624, 1627 and 1626")
|
print(" - Creating Areas 1623, 1624, 1627 and 1626")
|
||||||
# This crashes on the server with MariaDB even though a null parent is explicitly allowed.
|
# This crashes on the server with MariaDB even though a null parent is explicitly allowed.
|
||||||
area_1623= Area.objects.create(short_name = "1623", super=None)
|
area_1623= Area.objects.create(short_name = "1623", super=None)
|
||||||
print(" - Saving Area 1623")
|
|
||||||
area_1623.save()
|
area_1623.save()
|
||||||
area_1624= Area.objects.create(short_name = "1624", super=None)
|
area_1624= Area.objects.create(short_name = "1624", super=None)
|
||||||
print(" - Saving Area 1624")
|
|
||||||
area_1624.save()
|
area_1624.save()
|
||||||
area_1626= Area.objects.create(short_name = "1626", super=None)
|
area_1626= Area.objects.create(short_name = "1626", super=None)
|
||||||
print(" - Saving Area 1626")
|
|
||||||
area_1626.save()
|
area_1626.save()
|
||||||
area_1627= Area.objects.create(short_name = "1627", super=None)
|
area_1627= Area.objects.create(short_name = "1627", super=None)
|
||||||
print(" - Saving Area 1627")
|
|
||||||
area_1627.save()
|
area_1627.save()
|
||||||
|
|
||||||
|
|
||||||
@ -495,6 +528,7 @@ def readcaves():
|
|||||||
print (" - Setting pending caves")
|
print (" - Setting pending caves")
|
||||||
# Do this last, so we can detect if they are created and no longer 'pending'
|
# Do this last, so we can detect if they are created and no longer 'pending'
|
||||||
|
|
||||||
|
with transaction.atomic():
|
||||||
for k in pending:
|
for k in pending:
|
||||||
|
|
||||||
if k[0:3] == "162":
|
if k[0:3] == "162":
|
||||||
@ -512,6 +546,8 @@ def readcaves():
|
|||||||
area = area_1624
|
area = area_1624
|
||||||
if areanum == "1626":
|
if areanum == "1626":
|
||||||
area = area_1626
|
area = area_1626
|
||||||
|
if areanum == "1627":
|
||||||
|
area = area_1627
|
||||||
try:
|
try:
|
||||||
do_pending_cave(k, url, area)
|
do_pending_cave(k, url, area)
|
||||||
except:
|
except:
|
||||||
|
@ -523,7 +523,7 @@ class LoadingSurvex():
|
|||||||
yr, letterx, wallet = argsgps.groups()
|
yr, letterx, wallet = argsgps.groups()
|
||||||
else:
|
else:
|
||||||
perps = get_people_on_trip(survexblock)
|
perps = get_people_on_trip(survexblock)
|
||||||
message = f" ! Wallet *REF '{args}' malformed id in '{survexblock.survexfile.path}' {perps}"
|
message = f" ! Wallet *REF bad in '{survexblock.survexfile.path}' malformed id '{args}' {perps}"
|
||||||
print(self.insp+message)
|
print(self.insp+message)
|
||||||
DataIssue.objects.create(parser='survex', message=message, url=url)
|
DataIssue.objects.create(parser='survex', message=message, url=url)
|
||||||
return
|
return
|
||||||
@ -549,19 +549,25 @@ class LoadingSurvex():
|
|||||||
message = " ! Wallet *REF {} - not numeric in '{}'".format(refscan, survexblock.survexfile.path)
|
message = " ! Wallet *REF {} - not numeric in '{}'".format(refscan, survexblock.survexfile.path)
|
||||||
print(self.insp+message)
|
print(self.insp+message)
|
||||||
DataIssue.objects.create(parser='survex', message=message, url=url)
|
DataIssue.objects.create(parser='survex', message=message, url=url)
|
||||||
|
|
||||||
manywallets = Wallet.objects.filter(walletname=refscan) # assumes all wallets found in earlier pass of data import
|
manywallets = Wallet.objects.filter(walletname=refscan) # assumes all wallets found in earlier pass of data import
|
||||||
if manywallets:
|
if manywallets:
|
||||||
survexblock.scanswallet = manywallets[0] # this is a ForeignKey field
|
|
||||||
message = f' - Wallet {manywallets[0]=} successfully found in db. *ref in {survexblock.survexfile.path}'
|
|
||||||
# print(self.insp+message)
|
|
||||||
survexblock.save()
|
|
||||||
if len(manywallets) > 1:
|
if len(manywallets) > 1:
|
||||||
message = " ! Wallet *REF {} - more than one found {} wallets in block {}".format(refscan, len(manywallets), survexblock.survexfile.path)
|
message = " ! Wallet *REF {} - more than one found {} wallets in db with same id {}".format(refscan, len(manywallets), survexblock.survexfile.path)
|
||||||
|
print(self.insp+message)
|
||||||
|
DataIssue.objects.create(parser='survex', message=message, url=url)
|
||||||
|
|
||||||
|
if survexblock.scanswallet:
|
||||||
|
if survexblock.scanswallet.walletname != refscan:
|
||||||
|
message = f" ! Wallet *REF {refscan} in {survexblock.survexfile.path} - Already a DIFFERENT wallet is set for this block '{survexblock.scanswallet.walletname}'"
|
||||||
print(self.insp+message)
|
print(self.insp+message)
|
||||||
DataIssue.objects.create(parser='survex', message=message, url=url)
|
DataIssue.objects.create(parser='survex', message=message, url=url)
|
||||||
|
else:
|
||||||
|
survexblock.scanswallet = manywallets[0] # this is a ForeignKey field
|
||||||
|
survexblock.save()
|
||||||
else:
|
else:
|
||||||
perps = get_people_on_trip(survexblock)
|
perps = get_people_on_trip(survexblock)
|
||||||
message = f" ! Wallet *REF '{refscan}' in '{survexblock.survexfile.path}' {perps} NOT in database i.e. wallet does not exist."
|
message = f" ! Wallet *REF bad in '{survexblock.survexfile.path}' '{refscan}' NOT in database i.e. wallet does not exist {perps}."
|
||||||
print(self.insp+message)
|
print(self.insp+message)
|
||||||
DataIssue.objects.create(parser='survex', message=message, url=url)
|
DataIssue.objects.create(parser='survex', message=message, url=url)
|
||||||
|
|
||||||
@ -994,7 +1000,7 @@ class LoadingSurvex():
|
|||||||
blockcount +=1
|
blockcount +=1
|
||||||
if blockcount % 20 ==0 :
|
if blockcount % 20 ==0 :
|
||||||
print(".", file=sys.stderr,end='')
|
print(".", file=sys.stderr,end='')
|
||||||
if blockcount % 400 ==0 :
|
if blockcount % 800 ==0 :
|
||||||
print("\n", file=sys.stderr,end='')
|
print("\n", file=sys.stderr,end='')
|
||||||
mem=get_process_memory()
|
mem=get_process_memory()
|
||||||
print(" - MEM:{:7.3f} MB in use".format(mem),file=sys.stderr)
|
print(" - MEM:{:7.3f} MB in use".format(mem),file=sys.stderr)
|
||||||
@ -1603,16 +1609,12 @@ def FindAndLoadSurvex(survexblockroot):
|
|||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
# Before doing this, it would be good to identify the *equate and *entrance we need that are relevant to the
|
# Before doing this, it would be good to identify the *equate and *entrance we need that are relevant to the
|
||||||
# entrance locations currently loaded after this by LoadPos(), but could better be done before ?
|
# entrance locations currently loaded after this by LoadPos(), but could better be done before ?
|
||||||
# look in MapLocations() for how we find the entrances
|
# look in MapLocations() for how we find the entrances
|
||||||
|
|
||||||
print('\n - Loading All Survex Blocks (LinearLoad)',file=sys.stderr)
|
print('\n - Loading All Survex Blocks (LinearLoad)',file=sys.stderr)
|
||||||
svx_load = LoadingSurvex()
|
svx_load = LoadingSurvex()
|
||||||
mem1 = get_process_memory()
|
|
||||||
print(" - MEM:{:7.2f} MB after creating empty loading object.".format(mem1),file=sys.stderr)
|
|
||||||
|
|
||||||
svx_load.survexdict[survexfileroot.survexdirectory] = []
|
svx_load.survexdict[survexfileroot.survexdirectory] = []
|
||||||
svx_load.survexdict[survexfileroot.survexdirectory].append(survexfileroot)
|
svx_load.survexdict[survexfileroot.survexdirectory].append(survexfileroot)
|
||||||
@ -1624,6 +1626,8 @@ def FindAndLoadSurvex(survexblockroot):
|
|||||||
svxlines = fcollate.read().splitlines()
|
svxlines = fcollate.read().splitlines()
|
||||||
#pr2 = cProfile.Profile()
|
#pr2 = cProfile.Profile()
|
||||||
#pr2.enable()
|
#pr2.enable()
|
||||||
|
mem1 = get_process_memory()
|
||||||
|
print(f" - MEM:{mem1:7.2f} MB immediately after reading '{collatefilename}' into memory.",file=sys.stderr)
|
||||||
print(" ", file=sys.stderr,end='')
|
print(" ", file=sys.stderr,end='')
|
||||||
#----------------------------------------------------------------
|
#----------------------------------------------------------------
|
||||||
svx_load.LinearLoad(survexblockroot,survexfileroot.path, svxlines)
|
svx_load.LinearLoad(survexblockroot,survexfileroot.path, svxlines)
|
||||||
@ -1633,6 +1637,7 @@ def FindAndLoadSurvex(survexblockroot):
|
|||||||
# ps = pstats.Stats(pr2, stream=f)
|
# ps = pstats.Stats(pr2, stream=f)
|
||||||
# ps.sort_stats(SortKey.CUMULATIVE)
|
# ps.sort_stats(SortKey.CUMULATIVE)
|
||||||
# ps.print_stats()
|
# ps.print_stats()
|
||||||
|
svxlines = [] # empty 30MB of stashed file
|
||||||
mem1 = get_process_memory()
|
mem1 = get_process_memory()
|
||||||
print("\n - MEM:{:7.2f} MB STOP".format(mem1),file=sys.stderr)
|
print("\n - MEM:{:7.2f} MB STOP".format(mem1),file=sys.stderr)
|
||||||
print(" - MEM:{:7.3f} MB ADDITIONALLY USED".format(mem1-mem0),file=sys.stderr)
|
print(" - MEM:{:7.3f} MB ADDITIONALLY USED".format(mem1-mem0),file=sys.stderr)
|
||||||
@ -1748,24 +1753,24 @@ def LoadPositions():
|
|||||||
capture_output=True, check=False, text=True) #check=False means exception not raised
|
capture_output=True, check=False, text=True) #check=False means exception not raised
|
||||||
if sp.returncode != 0:
|
if sp.returncode != 0:
|
||||||
message = f' ! Error: cavern: creating {file3d} in runcavern3()'
|
message = f' ! Error: cavern: creating {file3d} in runcavern3()'
|
||||||
DataIssue.objects.create(parser='survex', message=message)
|
DataIssue.objects.create(parser='entrances', message=message)
|
||||||
print(message)
|
print(message)
|
||||||
|
|
||||||
# find the errors in the 1623.log file
|
# find the errors in the 1623.log file
|
||||||
sp = subprocess.run(["grep", "error:", f"{topdata}.log"],
|
sp = subprocess.run(["grep", "error:", f"{topdata}.log"],
|
||||||
capture_output=True, check=False, text=True) #check=False means exception not raised
|
capture_output=True, check=False, text=True) #check=False means exception not raised
|
||||||
message = f' ! Error: cavern: {sp.stdout}'
|
message = f' ! Error: cavern: {sp.stdout} creating {file3d} '
|
||||||
DataIssue.objects.create(parser='survex', message=message)
|
DataIssue.objects.create(parser='entrances', message=message)
|
||||||
print(message)
|
print(message)
|
||||||
|
|
||||||
except:
|
except:
|
||||||
message = " ! CalledProcessError 'cavern' in runcavern3() at {topdata}."
|
message = f" ! CalledProcessError 'cavern' in runcavern3() at {topdata}."
|
||||||
DataIssue.objects.create(parser='survex', message=message)
|
DataIssue.objects.create(parser='entrances', message=message)
|
||||||
print(message)
|
print(message)
|
||||||
|
|
||||||
if file3d.is_file():
|
if file3d.is_file():
|
||||||
message = " ! CalledProcessError. File permissions {file3d.stat().st_mode} on {str(file3d)}"
|
message = f" ! CalledProcessError. File permissions {file3d.stat().st_mode} on {str(file3d)}"
|
||||||
DataIssue.objects.create(parser='survex', message=message)
|
DataIssue.objects.create(parser='entrances', message=message)
|
||||||
print(message)
|
print(message)
|
||||||
|
|
||||||
if file3d.is_file(): # might be an old one though
|
if file3d.is_file(): # might be an old one though
|
||||||
@ -1776,7 +1781,7 @@ def LoadPositions():
|
|||||||
if sp.returncode != 0:
|
if sp.returncode != 0:
|
||||||
print(f' ! Error: survexport creating {topdata}.pos in runcavern3().\n\n' + str(sp.stdout) + '\n\nreturn code: ' + str(sp.returncode))
|
print(f' ! Error: survexport creating {topdata}.pos in runcavern3().\n\n' + str(sp.stdout) + '\n\nreturn code: ' + str(sp.returncode))
|
||||||
except:
|
except:
|
||||||
message = " ! CalledProcessError 'survexport' in runcavern3() at {file3d}."
|
message = f" ! CalledProcessError 'survexport' in runcavern3() at {file3d}."
|
||||||
DataIssue.objects.create(parser='entrances', message=message)
|
DataIssue.objects.create(parser='entrances', message=message)
|
||||||
print(message)
|
print(message)
|
||||||
else:
|
else:
|
||||||
@ -1837,9 +1842,9 @@ def LoadPositions():
|
|||||||
try:
|
try:
|
||||||
survexblockroot = SurvexBlock.objects.get(id=1)
|
survexblockroot = SurvexBlock.objects.get(id=1)
|
||||||
except:
|
except:
|
||||||
message = ' ! FAILED to find root SurvexBlock'
|
message = f' ! FAILED to find root SurvexBlock'
|
||||||
print(message)
|
print(message)
|
||||||
DataIssue.objects.create(parser='survex', message=message)
|
DataIssue.objects.create(parser='entrances', message=message)
|
||||||
raise
|
raise
|
||||||
for line in posfile.readlines():
|
for line in posfile.readlines():
|
||||||
r = poslineregex.match(line)
|
r = poslineregex.match(line)
|
||||||
@ -1859,17 +1864,17 @@ def LoadPositions():
|
|||||||
if len(sbqs)>1:
|
if len(sbqs)>1:
|
||||||
message = " ! MULTIPLE SurvexBlocks {:3} matching Entrance point {} {} '{}'".format(len(sbqs), blockpath, sid, id)
|
message = " ! MULTIPLE SurvexBlocks {:3} matching Entrance point {} {} '{}'".format(len(sbqs), blockpath, sid, id)
|
||||||
print(message)
|
print(message)
|
||||||
DataIssue.objects.create(parser='survex', message=message)
|
DataIssue.objects.create(parser='entrances', message=message)
|
||||||
sb = sbqs[0]
|
sb = sbqs[0]
|
||||||
elif len(sbqs)<=0:
|
elif len(sbqs)<=0:
|
||||||
message = " ! ZERO SurvexBlocks matching Entrance point {} {} '{}'".format(blockpath, sid, id)
|
message = " ! ZERO SurvexBlocks matching Entrance point {} {} '{}'".format(blockpath, sid, id)
|
||||||
print(message)
|
print(message)
|
||||||
DataIssue.objects.create(parser='survex', message=message)
|
DataIssue.objects.create(parser='entrances', message=message)
|
||||||
sb = survexblockroot
|
sb = survexblockroot
|
||||||
except:
|
except:
|
||||||
message = ' ! FAIL in getting SurvexBlock matching Entrance point {} {}'.format(blockpath, sid)
|
message = ' ! FAIL in getting SurvexBlock matching Entrance point {} {}'.format(blockpath, sid)
|
||||||
print(message)
|
print(message)
|
||||||
DataIssue.objects.create(parser='survex', message=message)
|
DataIssue.objects.create(parser='entrances', message=message)
|
||||||
try:
|
try:
|
||||||
ss = SurvexStation(name=id, block=survexblockroot)
|
ss = SurvexStation(name=id, block=survexblockroot)
|
||||||
ss.x = float(x)
|
ss.x = float(x)
|
||||||
@ -1880,7 +1885,7 @@ def LoadPositions():
|
|||||||
except:
|
except:
|
||||||
message = ' ! FAIL to create SurvexStation Entrance point {} {}'.format(blockpath, sid)
|
message = ' ! FAIL to create SurvexStation Entrance point {} {}'.format(blockpath, sid)
|
||||||
print(message)
|
print(message)
|
||||||
DataIssue.objects.create(parser='survex', message=message)
|
DataIssue.objects.create(parser='entrances', message=message)
|
||||||
raise
|
raise
|
||||||
print(" - {} SurvexStation entrances found.".format(found))
|
print(" - {} SurvexStation entrances found.".format(found))
|
||||||
|
|
||||||
|
@ -24,7 +24,8 @@ All the processing to extract the survex subdriectories and survex files is done
|
|||||||
<a href="/handbook/computing/repos.html"><var>:loser:</var></a> repository
|
<a href="/handbook/computing/repos.html"><var>:loser:</var></a> repository
|
||||||
but whoever was
|
but whoever was
|
||||||
responsible has not yet created the appropriate entries in the XML file in the
|
responsible has not yet created the appropriate entries in the XML file in the
|
||||||
<a href="/handbook/computing/repos.html"><var>:expoweb:</var></a> repository.
|
<a href="/handbook/computing/repos.html"><var>:expoweb:</var></a> repository
|
||||||
|
AND no one has put the name of the cave in the expoweb/cave_data/pendingcaves.txt list.
|
||||||
It is the XML file which registers the cave description and ties together
|
It is the XML file which registers the cave description and ties together
|
||||||
the survex files with everything else.
|
the survex files with everything else.
|
||||||
<p>The process for registering a new cave is documented in
|
<p>The process for registering a new cave is documented in
|
||||||
|
Loading…
Reference in New Issue
Block a user