Clean DataIssues output, new therionrefs.log file

This commit is contained in:
Philip Sargent 2022-07-21 19:01:04 +03:00
parent f895a7e44c
commit 931c33cfdb
3 changed files with 35 additions and 15 deletions

1
.gitignore vendored
View File

@ -127,3 +127,4 @@ media/jslib/openlayers/Lang/zh-CN.js
media/jslib/openlayers/Lang/zh-TW.js media/jslib/openlayers/Lang/zh-TW.js
_test_response.html _test_response.html
_deploy/wsl/localsettingsWSL.py.bak _deploy/wsl/localsettingsWSL.py.bak
therionrefs.log

View File

@ -26,6 +26,8 @@ todo='''- db Update does not work when a cave id is in the pending list but a pr
So we will need a separate file-editing capability just for this configuration file ?! So we will need a separate file-editing capability just for this configuration file ?!
- crashes on MariaDB on server when deleting Caves and complains Area needs a non null parent, But this is not true. - crashes on MariaDB on server when deleting Caves and complains Area needs a non null parent, But this is not true.
The only solution we have found is to let it crash, then stop and restart MariaDB (requires a logon able to sudo)
and then restart the databasereset.py again. (status as of July 2022)
''' '''
entrances_xslug = {} entrances_xslug = {}
caves_xslug = {} caves_xslug = {}
@ -47,7 +49,7 @@ def dummy_entrance(k, slug, msg="DUMMY"):
slug = slug, primary = False) slug = slug, primary = False)
except: except:
message = f" ! {k:11s} {msg} entrance create failure" message = f" ! {k:11s} {msg} entrance create failure"
DataIssue.objects.create(parser='caves', message=message, url=f'/cave/{slug}') DataIssue.objects.create(parser='caves', message=message, url=f'{slug}')
print(message) print(message)
ent.cached_primary_slug = slug ent.cached_primary_slug = slug
@ -56,7 +58,7 @@ def dummy_entrance(k, slug, msg="DUMMY"):
return ent return ent
else: else:
message = f" ! {k:11s} {msg} cave SLUG '{slug}' create failure" message = f" ! {k:11s} {msg} cave SLUG '{slug}' create failure"
DataIssue.objects.create(parser='caves', message=message, url=f'/cave/{slug}') DataIssue.objects.create(parser='caves', message=message, url=f'{slug}')
print(message) print(message)
raise raise
@ -69,12 +71,13 @@ def set_dummy_entrance(id, slug, cave, msg="DUMMY"):
letter = "" letter = ""
entrances_xslug[slug] = entrance entrances_xslug[slug] = entrance
ce = CaveAndEntrance.objects.update_or_create(cave = cave, entrance_letter = "", entrance = entrance) ce = CaveAndEntrance.objects.update_or_create(cave = cave, entrance_letter = "", entrance = entrance)
message = f' ! Warning: Dummy Entrance created for {id}, slug:"{slug}" ' message = f' ! Warning: Dummy Entrance created for {id}'
DataIssue.objects.create(parser='caves', message=message, url=f'/cave/{slug}')
DataIssue.objects.create(parser='caves', message=message, url=f'{cave.url}')
print(message) print(message)
except: except:
message = f' ! Entrance Dummy setting failure, slug:"{slug}" cave id :"{id}" ' message = f' ! Entrance Dummy setting failure, slug:"{slug}" cave id :"{id}" '
DataIssue.objects.create(parser='caves', message=message, url=f'/cave/{slug}') DataIssue.objects.create(parser='caves', message=message, url=f'{cave.url}')
print(message) print(message)
def do_pending_cave(k, url, area_1623): def do_pending_cave(k, url, area_1623):
@ -82,19 +85,28 @@ def do_pending_cave(k, url, area_1623):
default for a PENDING cave, should be overwritten in the db later if a real cave of the same name exists default for a PENDING cave, should be overwritten in the db later if a real cave of the same name exists
in expoweb/cave_data/1623-"k".html in expoweb/cave_data/1623-"k".html
''' '''
slug = "1623-" + k
default_note = f"_Survex file found in loser repo but no description in expoweb <br><br><br>\n" default_note = f"_Survex file found in loser repo but no description in expoweb <br><br><br>\n"
default_note += f"INSTRUCTIONS: First open 'This survex file' (link above the CaveView panel) to find the date and info. Then " default_note += f"INSTRUCTIONS: First open 'This survex file' (link above the CaveView panel) to find the date and info. Then "
default_note += f"<br><br>\n\n - (1) search in the survex file for the *ref to find a " default_note += f"<br><br>\n\n - (1) search in the survex file for the *ref to find a "
default_note += f"relevant wallet, e.g.<a href='/survey_scans/2009%252311/'>2009#11</a> and read the notes image files <br>\n - " default_note += f"relevant wallet, e.g.<a href='/survey_scans/2009%252311/'>2009#11</a> and read the notes image files <br>\n - "
default_note += f"<br><br>\n\n - (2) search in the Expo for that year e.g. <a href='/expedition/{k[0:4]}'>{k[0:4]}</a> to find a " default_note += f"<br><br>\n\n - (2) search in the Expo for that year e.g. <a href='/expedition/{k[0:4]}'>{k[0:4]}</a> to find a "
default_note += f"relevant logbook entry, then <br>\n - " default_note += f"relevant logbook entry, remember that the date may have been recorded incorrectly, "
default_note += f"so check for trips i.e. logbook entries involving the same people as were listed in the survex file, "
default_note += f"and you should also check the scanned copy of the logbook (linked from each logbook entry page) "
default_note += f"just in case a vital trip was not transcribed, then <br>\n - "
default_note += f"click on 'Edit this cave' and copy the information you find in the survex file and the logbook" default_note += f"click on 'Edit this cave' and copy the information you find in the survex file and the logbook"
default_note += f"and delete all the text in the 'Notes' section - which is the text you are reading now."
default_note += f"<br><br>\n\n - Only two fields on this form are essential. "
default_note += f"Documentation of all the fields on 'Edit this cave' form is in <a href='/handbook/survey/caveentryfields.html'>handbook/survey/caveentryfields</a>"
default_note += f"<br><br>\n\n - " default_note += f"<br><br>\n\n - "
default_note += f"When you Submit it will create a file file in expoweb/cave_data/ " default_note += f"When you Submit it will create a new file in expoweb/cave_data/ "
default_note += f"<br><br>\n\n - Now you can edit the entrance info: click on Edit below for the dummy entrance. " default_note += f"<br><br>\n\n - Now you can edit the entrance info: click on Edit below for the dummy entrance. "
default_note += f"and then Submit to save it (if you forget to do this, a dummy entrance will be created for your new cave description)." default_note += f"and then Submit to save it (if you forget to do this, a dummy entrance will be created for your new cave description)."
default_note += f"<br><br>\n\n - Finally, you need to find a nerd to edit the file '<var>expoweb/cave_data/pending.txt</var>' "
default_note += f"to remove the line <br><var>{slug}</var><br> as it is no longer 'pending' but 'done. Well Done."
slug = "1623-" + k
cave = Cave( cave = Cave(
unofficial_number = k, unofficial_number = k,
@ -424,7 +436,7 @@ def readcave(filename):
ce = CaveAndEntrance.objects.update_or_create(cave = c, entrance_letter = letter, entrance = entrance) ce = CaveAndEntrance.objects.update_or_create(cave = c, entrance_letter = letter, entrance = entrance)
except: except:
message = f' ! Entrance setting failure, slug:"{slug}" #entrances:{len(entrances)} {entrance} letter:"{letter}" cave:"{c}" filename:"cave_data/{filename}"' message = f' ! Entrance setting failure, slug:"{slug}" #entrances:{len(entrances)} {entrance} letter:"{letter}" cave:"{c}" filename:"cave_data/{filename}"'
DataIssue.objects.create(parser='caves', message=message) DataIssue.objects.create(parser='caves', message=message, url=f'{c.url}_edit/')
print(message) print(message)
if survex_file[0]: if survex_file[0]:
@ -436,18 +448,18 @@ def readcave(filename):
if description_file[0]: # if not an empty string if description_file[0]: # if not an empty string
message = f' - {slug:12} complex description filename "{description_file[0]}" inside "{CAVEDESCRIPTIONS}/{filename}"' message = f' - {slug:12} complex description filename "{description_file[0]}" inside "{CAVEDESCRIPTIONS}/{filename}"'
DataIssue.objects.create(parser='caves ok', message=message, url=f'{slug}/edit/') DataIssue.objects.create(parser='caves ok', message=message, url=f'/{slug}_cave_edit/')
print(message) print(message)
if not (Path(EXPOWEB) / description_file[0]).is_file(): if not (Path(EXPOWEB) / description_file[0]).is_file():
message = f' ! {slug:12} description filename "{EXPOWEB}/{description_file[0]}" does not refer to a real file' message = f' ! {slug:12} description filename "{EXPOWEB}/{description_file[0]}" does not refer to a real file'
DataIssue.objects.create(parser='caves', message=message, url=f'/cave/{slug}/edit/') DataIssue.objects.create(parser='caves', message=message, url=f'/{slug}_cave_edit/')
print(message) print(message)
#c.description_file="" # done only once, to clear out cruft. #c.description_file="" # done only once, to clear out cruft.
#c.save() #c.save()
else: # more than one item in long list else: # more than one item in long list
message = f' ! ABORT loading this cave. in "{filename}"' message = f' ! ABORT loading this cave. in "{filename}"'
DataIssue.objects.create(parser='caves', message=message, url=f'/cave/{slugs}/edit/') DataIssue.objects.create(parser='caves', message=message, url=f'/{slug}_cave_edit/')
print(message) print(message)
def getXML(text, itemname, minItems = 1, maxItems = None, printwarnings = True, context = ""): def getXML(text, itemname, minItems = 1, maxItems = None, printwarnings = True, context = ""):

View File

@ -128,15 +128,19 @@ def settherionfileinfo(filetuple):
# scrap blownout -projection plan -scale [-81.0 -42.0 216.0 -42.0 0.0 0.0 7.5438 0.0 m] # scrap blownout -projection plan -scale [-81.0 -42.0 216.0 -42.0 0.0 0.0 7.5438 0.0 m]
for xth_me in rx_xth_me.findall(ttext): for xth_me in rx_xth_me.findall(ttext):
# WORK IN PROGRESS. Do not clutter up the DataIssues list with this
message = f'! Un-parsed image filename: {therionfile.dwgname} : {xth_me.split()[-3]} - {therionfile.dwgpath}' message = f'! Un-parsed image filename: {therionfile.dwgname} : {xth_me.split()[-3]} - {therionfile.dwgpath}'
# print(message) # print(message)
DataIssue.objects.create(parser='xTherion', message=message, url=f'/dwgdataraw/{therionfile.dwgpath}') # DataIssue.objects.create(parser='xTherion', message=message, url=f'/dwgdataraw/{therionfile.dwgpath}')
with open('therionrefs.log', 'a') as lg:
lg.write(message + '\n')
findimageinsert(therionfile, xth_me) findimageinsert(therionfile, xth_me)
for inp in rx_input.findall(ttext): for inp in rx_input.findall(ttext):
# if this 'input' is a .th2 file we have already seen, then we can assign this as a sub-file # if this 'input' is a .th2 file we have already seen, then we can assign this as a sub-file
# but we would need to disentangle to get the current path properly # but we would need to disentangle to get the current path properly
message = f'! Un-set Therion .th2 input: - {therionfile.dwgname} : {inp} - {therionfile.dwgpath}' message = f'! Un-set (?) Therion .th2 input: - {therionfile.dwgname} : {inp} - {therionfile.dwgpath}'
#print(message) #print(message)
DataIssue.objects.create(parser='xTherion', message=message, url=f'/dwgdataraw/{therionfile.dwgpath}') DataIssue.objects.create(parser='xTherion', message=message, url=f'/dwgdataraw/{therionfile.dwgpath}')
findimportinsert(therionfile, inp) findimportinsert(therionfile, inp)
@ -207,6 +211,9 @@ def load_drawings_files():
DataIssue.objects.filter(parser='Therion').delete() DataIssue.objects.filter(parser='Therion').delete()
DataIssue.objects.filter(parser='xTherion').delete() DataIssue.objects.filter(parser='xTherion').delete()
DataIssue.objects.filter(parser='Tunnel').delete() DataIssue.objects.filter(parser='Tunnel').delete()
if(os.path.isfile('therionrefs.log')):
os.remove('therionrefs.log')
drawingsdirs = [ "" ] drawingsdirs = [ "" ]
while drawingsdirs: while drawingsdirs: