2
0
mirror of https://expo.survex.com/repositories/troggle/.git synced 2024-11-22 07:11:52 +00:00

Make import robust against duplicate kataster numbers

This commit is contained in:
Philip Sargent 2020-06-07 17:49:58 +01:00
parent fcfda644d3
commit 9237a6262e
3 changed files with 40 additions and 24 deletions

View File

@ -68,7 +68,8 @@ def numericalcmp(x, y):
def caveKey(x):
"""python3 function for sort.
Note that cave kataster numbers are not generally integers
Note that cave kataster numbers are not generally integers.
This needs to be fixed make a decent sort order.
"""
return x.kataster_number

View File

@ -122,6 +122,7 @@ def readcave(filename):
url = getXML(cavecontents, "url", maxItems = 1, context = context)
entrances = getXML(cavecontents, "entrance", context = context)
if len(non_public) == 1 and len(slugs) >= 1 and len(official_name) == 1 and len(areas) >= 1 and len(kataster_code) == 1 and len(kataster_number) == 1 and len(unofficial_number) == 1 and len(explorers) == 1 and len(underground_description) == 1 and len(equipment) == 1 and len(references) == 1 and len(survey) == 1 and len(kataster_status) == 1 and len(underground_centre_line) == 1 and len(notes) == 1 and len(length) == 1 and len(depth) == 1 and len(extent) == 1 and len(survex_file) == 1 and len(description_file ) == 1 and len(url) == 1 and len(entrances) >= 1:
try:
c, state = models_caves.Cave.objects.update_or_create(non_public = {"True": True, "False": False, "true": True, "false": False,}[non_public[0]],
official_name = official_name[0],
kataster_code = kataster_code[0],
@ -142,6 +143,20 @@ def readcave(filename):
description_file = description_file[0],
url = url[0],
filename = filename)
except:
# need to cope with duplicates
print(" ! FAILED to get only one cave when updating using: "+filename)
kaves = models_caves.Cave.objects.all().filter(kataster_number=kataster_number[0])
for k in kaves:
message = " ! - DUPLICATES in db. kataster:"+ str(k.kataster_number) + ", slug:" + str(k.slug())
models.DataIssue.objects.create(parser='caves', message=message)
print(message)
for k in kaves:
if k.slug() != None:
print(" ! - OVERWRITING this one: slug:"+ str(k.slug()))
k.notes = "DUPLICATE kataster number found on import. Please fix\n" + k.notes
c = k
for area_slug in areas:
area = models_caves.Area.objects.filter(short_name = area_slug)
if area:
@ -157,7 +172,7 @@ def readcave(filename):
slug = slug,
primary = primary)
except:
message = " ! Can't find text (slug): %s, skipping %s" % (slug, context)
message = " ! Cave update/create failure: %s, skipping file %s" % (slug, context)
models.DataIssue.objects.create(parser='caves', message=message)
print(message)
@ -169,7 +184,7 @@ def readcave(filename):
entrance = models_caves.Entrance.objects.get(entranceslug__slug = slug)
ce = models_caves.CaveAndEntrance.objects.update_or_create(cave = c, entrance_letter = letter, entrance = entrance)
except:
message = " ! Entrance text (slug) %s missing %s" % (slug, context)
message = " ! Entrance setting failure, slug: %s letter: %s" % (slug, letter)
models.DataIssue.objects.create(parser='caves', message=message)
print(message)

View File

@ -23,7 +23,7 @@ actualurlpatterns = patterns('',
url(r'^troggle$', views_other.frontpage, name="frontpage"),
url(r'^caves/?$', views_caves.caveindex, name="caveindex"),
url(r'^caves$', views_caves.caveindex, name="caveindex"),
url(r'^people/?$', views_logbooks.personindex, name="personindex"),
url(r'^newqmnumber/?$', views_other.ajax_QM_number, ),