forked from expo/troggle
Replace assert() with DataIssue message
This commit is contained in:
parent
c81f17c24b
commit
0f024b27f0
@ -118,7 +118,7 @@ class PageTests(TestCase):
|
||||
h1 = re.search(r'<h1 id="site-name">Troggle administration</h1>', content)
|
||||
|
||||
def test_page_admindocs(self):
|
||||
response = self.client.get('/admin/login/models/')
|
||||
response = self.client.get('/admin/login/models/')
|
||||
content = response.content.decode()
|
||||
self.assertEqual(response.status_code, 200)
|
||||
h1 = re.search(r'<h1>Model documentation</h1>', content)
|
||||
|
@ -112,8 +112,12 @@ class Expedition(TroggleModel):
|
||||
def get_expedition_day(self, date):
|
||||
expeditiondays = self.expeditionday_set.filter(date=date)
|
||||
if expeditiondays:
|
||||
assert len(expeditiondays) == 1
|
||||
return expeditiondays[0]
|
||||
if len(expeditiondays) == 1:
|
||||
return expeditiondays[0]
|
||||
else:
|
||||
message ='! - more than one datum in an expeditionday: {}'.format(date)
|
||||
DataIssue.objects.create(parser='expedition', message=message)
|
||||
return expeditiondays[0]
|
||||
res = ExpeditionDay(expedition=self, date=date)
|
||||
res.save()
|
||||
return res
|
||||
|
@ -59,8 +59,9 @@ def tunnelfileupload(request, path):
|
||||
print((project, user, tunnelversion))
|
||||
|
||||
|
||||
assert len(list(request.FILES.values())) == 1, "only one file to upload"
|
||||
|
||||
if not (len(list(request.FILES.values())) == 1): # "only one file to upload"
|
||||
return HttpResponse(content="Error: more than one file selected for upload", content_type="text/plain")
|
||||
|
||||
uploadedfile = list(request.FILES.values())[0]
|
||||
|
||||
if uploadedfile.field_name != "sketch":
|
||||
|
@ -140,70 +140,13 @@ def ajax_QM_number(request):
|
||||
return HttpResponse(res)
|
||||
|
||||
|
||||
# def logbook_entry_suggestions(request):
|
||||
# """
|
||||
# Generates a html box with suggestions about what to do with QMs
|
||||
# in logbook entry text.
|
||||
# """
|
||||
# unwiki_QM_pattern=r"(?P<whole>(?P<explorer_code>[ABC]?)(?P<cave>\d*)-?(?P<year>\d\d\d?\d?)-(?P<number>\d\d)(?P<grade>[ABCDXV]?))"
|
||||
# unwiki_QM_pattern=re.compile(unwiki_QM_pattern)
|
||||
# #wikilink_QM_pattern=settings.QM_PATTERN
|
||||
|
||||
# slug=request.POST['slug']
|
||||
# date=request.POST['date']
|
||||
# lbo=LogbookEntry.objects.get(slug=slug, date=date)
|
||||
|
||||
# #unwiki_QMs=re.findall(unwiki_QM_pattern,lbo.text)
|
||||
# unwiki_QMs=[m.groupdict() for m in unwiki_QM_pattern.finditer(lbo.text)]
|
||||
|
||||
# print(unwiki_QMs)
|
||||
# for qm in unwiki_QMs:
|
||||
# #try:
|
||||
# if len(qm['year'])==2:
|
||||
# if int(qm['year'])<50:
|
||||
# qm['year']='20'+qm['year']
|
||||
# else:
|
||||
# qm['year']='19'+qm['year']
|
||||
|
||||
# if lbo.date.year!=int(qm['year']):
|
||||
# try:
|
||||
# lbo=LogbookEntry.objects.get(date__year=qm['year'],title__icontains="placeholder for QMs in")
|
||||
# except:
|
||||
# print(("failed to get placeholder for year "+str(qm['year'])))
|
||||
|
||||
# temp_QM=QM(found_by=lbo,number=qm['number'],grade=qm['grade'])
|
||||
# temp_QM.grade=qm['grade']
|
||||
# qm['wikilink']=temp_QM.wiki_link()
|
||||
# #except:
|
||||
# #print 'failed'
|
||||
|
||||
# print(unwiki_QMs)
|
||||
|
||||
|
||||
# #wikilink_QMs=re.findall(wikilink_QM_pattern,lbo.text)
|
||||
# attached_QMs=lbo.QMs_found.all()
|
||||
# unmentioned_attached_QMs=''#not implemented, fill this in by subtracting wiklink_QMs from attached_QMs
|
||||
|
||||
# #Find unattached_QMs. We only look at the QMs with a proper wiki link.
|
||||
# #for qm in wikilink_QMs:
|
||||
# #Try to look up the QM.
|
||||
|
||||
# print('got 208')
|
||||
# any_suggestions=True
|
||||
# print('got 210')
|
||||
# return render(request,'suggestions.html',
|
||||
# {
|
||||
# 'unwiki_QMs':unwiki_QMs,
|
||||
# 'any_suggestions':any_suggestions
|
||||
# })
|
||||
|
||||
print(" - newFile() is next in troggle/core/views_other.py")
|
||||
|
||||
@login_required_if_public
|
||||
def newFile(request, pslug = None):
|
||||
if pslug:
|
||||
previousfile = LogbookEntry.objects.get(slug = pslug, date = previousdate, expedition = expedition)
|
||||
assert previousfile.filename
|
||||
#assert previousfile.filename
|
||||
if request.method == 'POST': # If the form has been submitted...
|
||||
tripForm = TripForm(request.POST) # A form bound to the POST data
|
||||
personTripFormSet = PersonTripFormSet(request.POST)
|
||||
@ -235,10 +178,7 @@ def newFile(request, pslug = None):
|
||||
else:
|
||||
fileform = UploadFileForm() # An unbound form
|
||||
|
||||
return render(request, 'editfile.html', {
|
||||
'fileForm': fileform,
|
||||
|
||||
})
|
||||
return render(request, 'editfile.html', {'fileForm': fileform, })
|
||||
|
||||
@login_required_if_public
|
||||
def deleteFile(request, expeditionyear, date = None, slug = None):
|
||||
|
@ -272,7 +272,7 @@ def identifycavedircontents(gcavedir):
|
||||
subdirs = [ ]
|
||||
subsvx = [ ]
|
||||
primesvx = None
|
||||
for f in os.listdir(gcavedir):
|
||||
for f in os.listdir(gcavedir): # These may get outdated as data gets tidied up. This should not be in the code!
|
||||
if name == "204" and (f in ["skel.svx", "template.svx", "204withents.svx"]):
|
||||
pass
|
||||
elif name == "136" and (f in ["136-noents.svx"]):
|
||||
@ -289,10 +289,11 @@ def identifycavedircontents(gcavedir):
|
||||
if nf.lower() == name.lower() or nf[:3] == "all" or (name, nf) in [("resurvey2005", "145-2005"), ("cucc", "cu115")]:
|
||||
if primesvx:
|
||||
if nf[:3] == "all":
|
||||
assert primesvx[:3] != "all", (name, nf, primesvx, gcavedir, subsvx)
|
||||
#assert primesvx[:3] != "all", (name, nf, primesvx, gcavedir, subsvx)
|
||||
primesvx = nf
|
||||
else:
|
||||
assert primesvx[:3] == "all", (name, nf, primesvx, gcavedir, subsvx)
|
||||
#assert primesvx[:3] == "all", (name, nf, primesvx, gcavedir, subsvx)
|
||||
pass
|
||||
else:
|
||||
primesvx = nf
|
||||
else:
|
||||
|
@ -126,17 +126,26 @@ def ParseDate(tripdate, year):
|
||||
mdatestandard = re.match(r"(\d\d\d\d)-(\d\d)-(\d\d)", tripdate)
|
||||
mdategoof = re.match(r"(\d\d?)/0?(\d)/(20|19)?(\d\d)", tripdate)
|
||||
if mdatestandard:
|
||||
assert mdatestandard.group(1) == year, (tripdate, year)
|
||||
year, month, day = int(mdatestandard.group(1)), int(mdatestandard.group(2)), int(mdatestandard.group(3))
|
||||
if not (mdatestandard.group(1) == year):
|
||||
message = " ! - Bad date (year) in logbook: " + tripdate + " - " + year
|
||||
DataIssue.objects.create(parser='logbooks', message=message)
|
||||
logdataissues["tripdate"]=message
|
||||
return datetime.date('1970', '01', '01')
|
||||
else:
|
||||
year, month, day = int(mdatestandard.group(1)), int(mdatestandard.group(2)), int(mdatestandard.group(3))
|
||||
elif mdategoof:
|
||||
assert not mdategoof.group(3) or mdategoof.group(3) == year[:2], mdategoof.groups()
|
||||
yadd = int(year[:2]) * 100
|
||||
day, month, year = int(mdategoof.group(1)), int(mdategoof.group(2)), int(mdategoof.group(4)) + yadd
|
||||
if not (not mdategoof.group(3) or mdategoof.group(3) == year[:2]):
|
||||
message = " ! - Bad date mdategoof.group(3) in logbook: " + tripdate + " - " + mdategoof.group(3)
|
||||
DataIssue.objects.create(parser='logbooks', message=message)
|
||||
logdataissues["tripdate"]=message
|
||||
return datetime.date('1970', '01', '01')
|
||||
else:
|
||||
yadd = int(year[:2]) * 100
|
||||
day, month, year = int(mdategoof.group(1)), int(mdategoof.group(2)), int(mdategoof.group(4)) + yadd
|
||||
else:
|
||||
message = " ! - Bad date in logbook: " + tripdate + " - " + year
|
||||
DataIssue.objects.create(parser='logbooks', message=message)
|
||||
logdataissues["tripdate"]=message
|
||||
assert False, tripdate
|
||||
|
||||
return datetime.date(year, month, day)
|
||||
|
||||
@ -150,7 +159,12 @@ def Parselogwikitxt(year, expedition, txt):
|
||||
for triphead, triptext in trippara:
|
||||
logbook_entry_count += 1
|
||||
tripheadp = triphead.split("|")
|
||||
assert len(tripheadp) == 3, (tripheadp, triptext)
|
||||
# assert len(tripheadp) == 3, (tripheadp, triptext)
|
||||
if not (len(tripheadp) == 3):
|
||||
message = " ! - Bad no of items in tripdate in logbook: " + tripdate + " - " + tripheadp
|
||||
DataIssue.objects.create(parser='logbooks', message=message)
|
||||
logdataissues["tripdate"]=message
|
||||
|
||||
tripdate, tripplace, trippeople = tripheadp
|
||||
tripsplace = tripplace.split(" - ")
|
||||
tripcave = tripsplace[0].strip()
|
||||
@ -339,7 +353,14 @@ def Parseloghtml03(year, expedition, txt):
|
||||
logbook_entry_count += 1
|
||||
|
||||
s = re.match("(?s)\s*<p>(.*?)</p>(.*)$", trippara)
|
||||
assert s, trippara
|
||||
#assert s, trippara
|
||||
if not ( s ) :
|
||||
message = " ! - Skipping logentry on failure to parse Parseloghtml03: {} {} {}...".format(tripentry,s,trippara[:300])
|
||||
DataIssue.objects.create(parser='logbooks', message=message)
|
||||
logdataissues[tripentry]=message
|
||||
print(message)
|
||||
break
|
||||
|
||||
tripheader, triptext = s.group(1), s.group(2)
|
||||
tripheader = re.sub(r" ", " ", tripheader)
|
||||
tripheader = re.sub(r"\s+", " ", tripheader).strip()
|
||||
@ -595,7 +616,7 @@ def parseAutoLogBookEntry(filename):
|
||||
try:
|
||||
# this is a slow and uncertain function:
|
||||
cave = getCaveByReference(caveRef)
|
||||
except AssertionError:
|
||||
except:
|
||||
cave = None
|
||||
errors.append(" - Cave not found in database")
|
||||
else:
|
||||
|
Loading…
Reference in New Issue
Block a user