forked from expo/troggle
add 'lastvisit' field to entrances, for the date
This commit is contained in:
104
parsers/caves.py
104
parsers/caves.py
@@ -222,6 +222,7 @@ def readentrance(filename):
|
||||
explorers = getXML(entrancecontents, "explorers", maxItems = 1, context = context)
|
||||
map_description = getXML(entrancecontents, "map_description", maxItems = 1, context = context)
|
||||
location_description = getXML(entrancecontents, "location_description", maxItems = 1, context = context)
|
||||
lastvisit = getXML(entrancecontents, "last visit date", maxItems = 1, minItems = 0, context = context)
|
||||
approach = getXML(entrancecontents, "approach", maxItems = 1, context = context)
|
||||
underground_description = getXML(entrancecontents, "underground_description", maxItems = 1, context = context)
|
||||
photo = getXML(entrancecontents, "photo", maxItems = 1, context = context)
|
||||
@@ -238,59 +239,61 @@ def readentrance(filename):
|
||||
other_description = getXML(entrancecontents, "other_description", maxItems = 1, context = context)
|
||||
bearings = getXML(entrancecontents, "bearings", maxItems = 1, context = context)
|
||||
url = getXML(entrancecontents, "url", maxItems = 1, context = context)
|
||||
if len(non_public) == 1 and len(slugs) >= 1 and len(name) >= 1 and len(entrance_description) == 1 and len(explorers) == 1 and len(map_description) == 1 and len(location_description) == 1 and len(approach) == 1 and len(underground_description) == 1 and len(marking) == 1 and len(marking_comment) == 1 and len(findability) == 1 and len(findability_description) == 1 and len(alt) == 1 and len(northing) == 1 and len(easting) == 1 and len(tag_station) == 1 and len(exact_station) == 1 and len(other_station) == 1 and len(other_description) == 1 and len(bearings) == 1 and len(url) == 1:
|
||||
e, state = Entrance.objects.update_or_create(name = name[0],
|
||||
non_public = {"True": True, "False": False, "true": True, "false": False,}[non_public[0]],
|
||||
entrance_description = entrance_description[0],
|
||||
explorers = explorers[0],
|
||||
map_description = map_description[0],
|
||||
location_description = location_description[0],
|
||||
approach = approach[0],
|
||||
underground_description = underground_description[0],
|
||||
photo = photo[0],
|
||||
marking = marking[0],
|
||||
marking_comment = marking_comment[0],
|
||||
findability = findability[0],
|
||||
findability_description = findability_description[0],
|
||||
alt = alt[0],
|
||||
northing = northing[0],
|
||||
easting = easting[0],
|
||||
tag_station = tag_station[0],
|
||||
exact_station = exact_station[0],
|
||||
other_station = other_station[0],
|
||||
other_description = other_description[0],
|
||||
bearings = bearings[0],
|
||||
url = url[0],
|
||||
filename = filename,
|
||||
cached_primary_slug = slugs[0])
|
||||
primary = True
|
||||
for slug in slugs:
|
||||
#print("entrance slug:{} filename:{}".format(slug, filename))
|
||||
try:
|
||||
cs = EntranceSlug.objects.update_or_create(entrance = e,
|
||||
#if len(non_public) == 1 and len(slugs) >= 1 and len(name) >= 1 and len(entrance_description) == 1 and len(explorers) == 1 and len(map_description) == 1 and len(location_description) == 1 and len(lastvisit) == 1 and len(approach) == 1 and len(underground_description) == 1 and len(marking) == 1 and len(marking_comment) == 1 and len(findability) == 1 and len(findability_description) == 1 and len(alt) == 1 and len(northing) == 1 and len(easting) == 1 and len(tag_station) == 1 and len(exact_station) == 1 and len(other_station) == 1 and len(other_description) == 1 and len(bearings) == 1 and len(url) == 1:
|
||||
e, state = Entrance.objects.update_or_create(name = name[0],
|
||||
non_public = {"True": True, "False": False, "true": True, "false": False,}[non_public[0]],
|
||||
entrance_description = entrance_description[0],
|
||||
explorers = explorers[0],
|
||||
map_description = map_description[0],
|
||||
location_description = location_description[0],
|
||||
lastvisit = lastvisit[0],
|
||||
approach = approach[0],
|
||||
underground_description = underground_description[0],
|
||||
photo = photo[0],
|
||||
marking = marking[0],
|
||||
marking_comment = marking_comment[0],
|
||||
findability = findability[0],
|
||||
findability_description = findability_description[0],
|
||||
alt = alt[0],
|
||||
northing = northing[0],
|
||||
easting = easting[0],
|
||||
tag_station = tag_station[0],
|
||||
exact_station = exact_station[0],
|
||||
other_station = other_station[0],
|
||||
other_description = other_description[0],
|
||||
bearings = bearings[0],
|
||||
url = url[0],
|
||||
filename = filename,
|
||||
cached_primary_slug = slugs[0])
|
||||
primary = True
|
||||
for slug in slugs:
|
||||
#print("entrance slug:{} filename:{}".format(slug, filename))
|
||||
try:
|
||||
cs = EntranceSlug.objects.update_or_create(entrance = e,
|
||||
slug = slug,
|
||||
primary = primary)
|
||||
except:
|
||||
# need to cope with duplicates
|
||||
message = f" ! FAILED to get precisely one ENTRANCE when updating using: cave_entrance/{filename}"
|
||||
DataIssue.objects.create(parser='caves', message=message, url=f'/cave/{slug}/edit/')
|
||||
kents = EntranceSlug.objects.all().filter(entrance = e,
|
||||
slug = slug,
|
||||
primary = primary)
|
||||
except:
|
||||
# need to cope with duplicates
|
||||
message = f" ! FAILED to get precisely one ENTRANCE when updating using: cave_entrance/{filename}"
|
||||
for k in kents:
|
||||
message = " ! - DUPLICATE in db. entrance:"+ str(k.entrance) + ", slug:" + str(k.slug())
|
||||
DataIssue.objects.create(parser='caves', message=message, url=f'/cave/{slug}/edit/')
|
||||
kents = EntranceSlug.objects.all().filter(entrance = e,
|
||||
slug = slug,
|
||||
primary = primary)
|
||||
for k in kents:
|
||||
message = " ! - DUPLICATE in db. entrance:"+ str(k.entrance) + ", slug:" + str(k.slug())
|
||||
DataIssue.objects.create(parser='caves', message=message, url=f'/cave/{slug}/edit/')
|
||||
print(message)
|
||||
for k in kents:
|
||||
if k.slug() != None:
|
||||
print(" ! - OVERWRITING this one: slug:"+ str(k.slug()))
|
||||
k.notes = "DUPLICATE entrance found on import. Please fix\n" + k.notes
|
||||
c = k
|
||||
print(message)
|
||||
for k in kents:
|
||||
if k.slug() != None:
|
||||
print(" ! - OVERWRITING this one: slug:"+ str(k.slug()))
|
||||
k.notes = "DUPLICATE entrance found on import. Please fix\n" + k.notes
|
||||
c = k
|
||||
primary = False
|
||||
else: # more than one item in long list
|
||||
message = f' ! {slug:12} ABORT loading this entrance. in "{filename}"'
|
||||
DataIssue.objects.create(parser='caves', message=message, url=f'/cave/{slug}/edit/')
|
||||
print(message)
|
||||
# else: # more than one item in long list. But this is not an error, and the max and min have been checked by getXML
|
||||
# slug = Path(filename).stem
|
||||
# message = f' ! ABORT loading this entrance. in "{filename}"'
|
||||
# DataIssue.objects.create(parser='caves', message=message, url=f'/cave/{slug}/edit/')
|
||||
# print(message)
|
||||
|
||||
def readcave(filename):
|
||||
'''Assumes any area it hasn't seen before is a subarea of 1623
|
||||
@@ -458,6 +461,9 @@ def getXML(text, itemname, minItems = 1, maxItems = None, printwarnings = True,
|
||||
"max": maxItems} + " in file " + context
|
||||
DataIssue.objects.create(parser='caves', message=message)
|
||||
print(message)
|
||||
if minItems == 0:
|
||||
if not items:
|
||||
items = [ "" ]
|
||||
return items
|
||||
|
||||
def readcaves():
|
||||
|
||||
Reference in New Issue
Block a user