forked from expo/troggle
replace assert() with message logging
This commit is contained in:
parent
2467065ac3
commit
daf58e9e45
@ -1,7 +1,6 @@
|
|||||||
import string
|
import string
|
||||||
import os
|
import os
|
||||||
import datetime
|
import datetime
|
||||||
import logging
|
|
||||||
import re
|
import re
|
||||||
import json
|
import json
|
||||||
from subprocess import call
|
from subprocess import call
|
||||||
|
@ -1,7 +1,6 @@
|
|||||||
import string
|
import string
|
||||||
import os
|
import os
|
||||||
import datetime
|
import datetime
|
||||||
import logging
|
|
||||||
import re
|
import re
|
||||||
import resource
|
import resource
|
||||||
from subprocess import call
|
from subprocess import call
|
||||||
@ -51,7 +50,7 @@ class DataIssue(TroggleModel):
|
|||||||
This is a use of the NOTIFICATION pattern:
|
This is a use of the NOTIFICATION pattern:
|
||||||
https://martinfowler.com/eaaDev/Notification.html
|
https://martinfowler.com/eaaDev/Notification.html
|
||||||
|
|
||||||
And we need to use it to replace all assertions in the code too:
|
We have replaced all assertions in the code with messages and local fix-ups or skips:
|
||||||
https://martinfowler.com/articles/replaceThrowWithNotification.html
|
https://martinfowler.com/articles/replaceThrowWithNotification.html
|
||||||
"""
|
"""
|
||||||
date = models.DateTimeField(auto_now_add=True, blank=True)
|
date = models.DateTimeField(auto_now_add=True, blank=True)
|
||||||
|
@ -1,6 +1,5 @@
|
|||||||
import sys
|
import sys
|
||||||
import re
|
import re
|
||||||
import logging
|
|
||||||
|
|
||||||
from django.conf import settings
|
from django.conf import settings
|
||||||
from django.shortcuts import render
|
from django.shortcuts import render
|
||||||
|
@ -1,10 +1,10 @@
|
|||||||
import string
|
import string
|
||||||
import os
|
import os
|
||||||
import datetime
|
import datetime
|
||||||
import logging
|
|
||||||
import re
|
import re
|
||||||
import resource
|
import resource
|
||||||
import random
|
import random
|
||||||
|
import logging
|
||||||
from subprocess import call
|
from subprocess import call
|
||||||
|
|
||||||
from urllib.parse import urljoin
|
from urllib.parse import urljoin
|
||||||
|
@ -35,7 +35,6 @@ def GetTripPersons(trippeople, expedition, logtime_underground):
|
|||||||
if mul:
|
if mul:
|
||||||
tripperson = mul.group(1).strip()
|
tripperson = mul.group(1).strip()
|
||||||
if tripperson and tripperson[0] != '*':
|
if tripperson and tripperson[0] != '*':
|
||||||
#assert tripperson in personyearmap, "'%s' << %s\n\n %s" % (tripperson, trippeople, personyearmap)
|
|
||||||
tripperson = re.sub(round_bracket_regex, "", tripperson).strip()
|
tripperson = re.sub(round_bracket_regex, "", tripperson).strip()
|
||||||
personyear = GetPersonExpeditionNameLookup(expedition).get(tripperson.lower())
|
personyear = GetPersonExpeditionNameLookup(expedition).get(tripperson.lower())
|
||||||
if not personyear:
|
if not personyear:
|
||||||
@ -160,7 +159,6 @@ def Parselogwikitxt(year, expedition, txt):
|
|||||||
for triphead, triptext in trippara:
|
for triphead, triptext in trippara:
|
||||||
logbook_entry_count += 1
|
logbook_entry_count += 1
|
||||||
tripheadp = triphead.split("|")
|
tripheadp = triphead.split("|")
|
||||||
# assert len(tripheadp) == 3, (tripheadp, triptext)
|
|
||||||
if not (len(tripheadp) == 3):
|
if not (len(tripheadp) == 3):
|
||||||
message = " ! - Bad no of items in tripdate in logbook: " + tripdate + " - " + tripheadp
|
message = " ! - Bad no of items in tripdate in logbook: " + tripdate + " - " + tripheadp
|
||||||
DataIssue.objects.create(parser='logbooks', message=message)
|
DataIssue.objects.create(parser='logbooks', message=message)
|
||||||
@ -169,6 +167,12 @@ def Parselogwikitxt(year, expedition, txt):
|
|||||||
tripdate, tripplace, trippeople = tripheadp
|
tripdate, tripplace, trippeople = tripheadp
|
||||||
tripsplace = tripplace.split(" - ")
|
tripsplace = tripplace.split(" - ")
|
||||||
tripcave = tripsplace[0].strip()
|
tripcave = tripsplace[0].strip()
|
||||||
|
if len(tripsplace) == 1:
|
||||||
|
tripsplace = tripsplace[0]
|
||||||
|
else:
|
||||||
|
tripsplace = tripsplace[1]
|
||||||
|
|
||||||
|
print(f"! LOGBOOK {year} {logbook_entry_count:2} {len(triptext):4} '{tripsplace}'")
|
||||||
|
|
||||||
tul = re.findall(r"T/?U:?\s*(\d+(?:\.\d*)?|unknown)\s*(hrs|hours)?", triptext)
|
tul = re.findall(r"T/?U:?\s*(\d+(?:\.\d*)?|unknown)\s*(hrs|hours)?", triptext)
|
||||||
if tul:
|
if tul:
|
||||||
@ -193,7 +197,7 @@ def Parselogwikitxt(year, expedition, txt):
|
|||||||
def EnterLogIntoObjStore(year, date, tripcave, triptitle, text, trippeople, tu, formattype, tripid1, seq):
|
def EnterLogIntoObjStore(year, date, tripcave, triptitle, text, trippeople, tu, formattype, tripid1, seq):
|
||||||
# This will need additional functions to replicate the persontrip calculation and storage. For the
|
# This will need additional functions to replicate the persontrip calculation and storage. For the
|
||||||
# moment we leave all that to be done in the django db
|
# moment we leave all that to be done in the django db
|
||||||
global trips # should be a singleton class object in models.py eventually
|
global trips # should be a singleton TROG eventually
|
||||||
global logdataissues
|
global logdataissues
|
||||||
|
|
||||||
if tripid1 is None or tripid1 =="":
|
if tripid1 is None or tripid1 =="":
|
||||||
@ -354,7 +358,6 @@ def Parseloghtml03(year, expedition, txt):
|
|||||||
logbook_entry_count += 1
|
logbook_entry_count += 1
|
||||||
|
|
||||||
s = re.match(r"(?s)\s*<p>(.*?)</p>(.*)$", trippara)
|
s = re.match(r"(?s)\s*<p>(.*?)</p>(.*)$", trippara)
|
||||||
#assert s, trippara
|
|
||||||
if not ( s ) :
|
if not ( s ) :
|
||||||
message = " ! - Skipping logentry on failure to parse Parseloghtml03: {} {} {}...".format(tripentry,s,trippara[:300])
|
message = " ! - Skipping logentry on failure to parse Parseloghtml03: {} {} {}...".format(tripentry,s,trippara[:300])
|
||||||
DataIssue.objects.create(parser='logbooks', message=message)
|
DataIssue.objects.create(parser='logbooks', message=message)
|
||||||
|
@ -213,7 +213,11 @@ class LoadingSurvex():
|
|||||||
expo = self.expos[year]
|
expo = self.expos[year]
|
||||||
else:
|
else:
|
||||||
expeditions = Expedition.objects.filter(year=year)
|
expeditions = Expedition.objects.filter(year=year)
|
||||||
assert len(expeditions) == 1
|
if len(expeditions) != 1 :
|
||||||
|
message = f"! More than one expedition in year {year} '{line}' ({survexblock}) {survexblock.survexfile.path}"
|
||||||
|
print((self.insp+message))
|
||||||
|
DataIssue.objects.create(parser='survexunits', message=message)
|
||||||
|
|
||||||
expo= expeditions[0]
|
expo= expeditions[0]
|
||||||
self.expos[year]= expo
|
self.expos[year]= expo
|
||||||
|
|
||||||
@ -411,7 +415,11 @@ class LoadingSurvex():
|
|||||||
letterx = "X"
|
letterx = "X"
|
||||||
if len(wallet)<2:
|
if len(wallet)<2:
|
||||||
wallet = "0" + wallet
|
wallet = "0" + wallet
|
||||||
assert (int(yr)>1960 and int(yr)<2039), "Wallet year out of bounds: %s" % yr
|
if not (int(yr)>1960 and int(yr)<2039):
|
||||||
|
message = " ! Wallet year out of bounds {yr} '{refscan}' {survexblock.survexfile.path}"
|
||||||
|
print((self.insp+message))
|
||||||
|
DataIssue.objects.create(parser='survex', message=message)
|
||||||
|
|
||||||
refscan = "%s#%s%s" % (yr, letterx, wallet)
|
refscan = "%s#%s%s" % (yr, letterx, wallet)
|
||||||
try:
|
try:
|
||||||
if int(wallet)>100:
|
if int(wallet)>100:
|
||||||
|
@ -1,7 +1,6 @@
|
|||||||
import sys
|
import sys
|
||||||
import os
|
import os
|
||||||
import types
|
import types
|
||||||
import logging
|
|
||||||
import stat
|
import stat
|
||||||
import csv
|
import csv
|
||||||
import re
|
import re
|
||||||
@ -46,9 +45,12 @@ def listdir(*directories):
|
|||||||
def GetListDir(sdir):
|
def GetListDir(sdir):
|
||||||
res = [ ]
|
res = [ ]
|
||||||
if sdir[:7] == "http://":
|
if sdir[:7] == "http://":
|
||||||
assert False, "Not written"
|
# s = urllib.request.urlopen(sdir)
|
||||||
s = urllib.request.urlopen(sdir)
|
message = f"! Requesting loading from http:// NOT IMPLEMENTED. [{sdir}]"
|
||||||
else:
|
print(message)
|
||||||
|
DataIssue.objects.create(parser='Drawings', message=message)
|
||||||
|
sdir[:7] = ""
|
||||||
|
|
||||||
for f in os.listdir(sdir):
|
for f in os.listdir(sdir):
|
||||||
if f[0] != ".":
|
if f[0] != ".":
|
||||||
ff = os.path.join(sdir, f)
|
ff = os.path.join(sdir, f)
|
||||||
@ -67,7 +69,6 @@ def LoadListScansFile(scansfolder):
|
|||||||
|
|
||||||
c=0
|
c=0
|
||||||
for (fyf, ffyf, fisdiryf) in gld:
|
for (fyf, ffyf, fisdiryf) in gld:
|
||||||
#assert not fisdiryf, ffyf
|
|
||||||
if re.search(r"\.(?:png|jpg|jpeg|pdf|svg|gif)(?i)$", fyf):
|
if re.search(r"\.(?:png|jpg|jpeg|pdf|svg|gif)(?i)$", fyf):
|
||||||
singlescan = SingleScan(ffile=ffyf, name=fyf, scansfolder=scansfolder)
|
singlescan = SingleScan(ffile=ffyf, name=fyf, scansfolder=scansfolder)
|
||||||
singlescan.save()
|
singlescan.save()
|
||||||
@ -106,7 +107,6 @@ def LoadListScans():
|
|||||||
print("%s" % f, end=' ')
|
print("%s" % f, end=' ')
|
||||||
for fy, ffy, fisdiry in GetListDir(ff):
|
for fy, ffy, fisdiry in GetListDir(ff):
|
||||||
if fisdiry:
|
if fisdiry:
|
||||||
assert fisdiry, ffy
|
|
||||||
scansfolder = ScansFolder(fpath=ffy, walletname=fy)
|
scansfolder = ScansFolder(fpath=ffy, walletname=fy)
|
||||||
scansfolder.save()
|
scansfolder.save()
|
||||||
LoadListScansFile(scansfolder)
|
LoadListScansFile(scansfolder)
|
||||||
@ -120,20 +120,25 @@ def LoadListScans():
|
|||||||
|
|
||||||
def find_tunnel_scan(tunnelfile, path):
|
def find_tunnel_scan(tunnelfile, path):
|
||||||
'''Is given a line of text 'path' which may or may not contain a recognisable name of a scanned file
|
'''Is given a line of text 'path' which may or may not contain a recognisable name of a scanned file
|
||||||
which we have already seen when we imported all the files we could find in teh surveyscans direstories
|
which we have already seen when we imported all the files we could find in the surveyscans direstories
|
||||||
'''
|
'''
|
||||||
scansfolder, scansfile = None, None
|
scansfolder, scansfile = None, None
|
||||||
mscansdir = re.search(r"(\d\d\d\d#X?\d+\w?|1995-96kh|92-94Surveybookkh|1991surveybook|smkhs)/(.*?(?:png|jpg|pdf|jpeg))$", path)
|
mscansdir = re.search(r"(\d\d\d\d#X?\d+\w?|1995-96kh|92-94Surveybookkh|1991surveybook|smkhs)/(.*?(?:png|jpg|pdf|jpeg))$", path)
|
||||||
if mscansdir:
|
if mscansdir:
|
||||||
scansfolderl = ScansFolder.objects.filter(walletname=mscansdir.group(1))
|
scansfolderl = ScansFolder.objects.filter(walletname=mscansdir.group(1))
|
||||||
|
# This should properly detect if a list of folders is returned and do something sensible, not just pick the first.
|
||||||
if len(scansfolderl):
|
if len(scansfolderl):
|
||||||
assert len(scansfolderl) == 1
|
|
||||||
scansfolder = scansfolderl[0]
|
scansfolder = scansfolderl[0]
|
||||||
|
if len(scansfolderl) > 1:
|
||||||
|
message = "! More than one scan FOLDER matches filter query. [{}]: {} {} {} {}".format(scansfilel[0], mscansdir.group(1), mscansdir.group(2), tunnelfile.tunnelpath, path)
|
||||||
|
print(message)
|
||||||
|
DataIssue.objects.create(parser='Tunnel', message=message)
|
||||||
|
|
||||||
if scansfolder:
|
if scansfolder:
|
||||||
scansfilel = scansfolder.singlescan_set.filter(name=mscansdir.group(2))
|
scansfilel = scansfolder.singlescan_set.filter(name=mscansdir.group(2))
|
||||||
if len(scansfilel):
|
if len(scansfilel):
|
||||||
if len(scansfilel) > 1:
|
if len(scansfilel) > 1:
|
||||||
message = "! More than one image filename matches filter query. [{}]: {} {} {} {}".format(scansfilel[0], mscansdir.group(1), mscansdir.group(2), tunnelfile.tunnelpath, path)
|
message = "! More than one image FILENAME matches filter query. [{}]: {} {} {} {}".format(scansfilel[0], mscansdir.group(1), mscansdir.group(2), tunnelfile.tunnelpath, path)
|
||||||
print(message)
|
print(message)
|
||||||
DataIssue.objects.create(parser='Tunnel', message=message)
|
DataIssue.objects.create(parser='Tunnel', message=message)
|
||||||
scansfile = scansfilel[0]
|
scansfile = scansfilel[0]
|
||||||
|
@ -76,6 +76,7 @@ LOGBOOK_PARSER_SETTINGS = {
|
|||||||
"2009": ("2009/2009logbook.txt", "Parselogwikitxt"),
|
"2009": ("2009/2009logbook.txt", "Parselogwikitxt"),
|
||||||
"2008": ("2008/2008logbook.txt", "Parselogwikitxt"),
|
"2008": ("2008/2008logbook.txt", "Parselogwikitxt"),
|
||||||
"2007": ("2007/logbook.html", "Parseloghtmltxt"),
|
"2007": ("2007/logbook.html", "Parseloghtmltxt"),
|
||||||
|
"2006": ("2006/logbook.html", "Parseloghtmltxt"),
|
||||||
# "2006": ("2006/logbook/logbook_06.txt", "Parselogwikitxt"),
|
# "2006": ("2006/logbook/logbook_06.txt", "Parselogwikitxt"),
|
||||||
"2006": ("2006/logbook.html", "Parseloghtmltxt"),
|
"2006": ("2006/logbook.html", "Parseloghtmltxt"),
|
||||||
"2005": ("2005/logbook.html", "Parseloghtmltxt"),
|
"2005": ("2005/logbook.html", "Parseloghtmltxt"),
|
||||||
|
Loading…
x
Reference in New Issue
Block a user