2012-06-10 16:56:12 +01:00
|
|
|
import os
|
|
|
|
import re
|
2021-04-14 18:24:08 +01:00
|
|
|
from pathlib import Path
|
2012-06-10 16:56:12 +01:00
|
|
|
|
2020-05-24 01:57:06 +01:00
|
|
|
from django.conf import settings
|
2021-04-06 22:50:57 +01:00
|
|
|
from django.db import transaction
|
2020-05-24 01:57:06 +01:00
|
|
|
|
2021-04-14 18:24:08 +01:00
|
|
|
from troggle.settings import SURVEX_DATA, EXPOWEB
|
2021-04-13 00:43:57 +01:00
|
|
|
from troggle.core.models.troggle import DataIssue
|
2021-04-13 00:47:17 +01:00
|
|
|
from troggle.core.models.caves import Area, Cave, Entrance, CaveSlug, EntranceSlug, CaveAndEntrance
|
2021-04-06 22:50:57 +01:00
|
|
|
'''Reads all the cave description data by parsing the xml files (stored as e.g. :EXPOWEB:/cave-data/1623-161.html )
|
|
|
|
and creating the various Cave, Entrance and necessary Area objects.
|
|
|
|
|
|
|
|
BUT in Django 2.0 and later we cannot do any queries on data we have just entered
|
|
|
|
because this is all happening inside one transaction. Bummer.
|
|
|
|
|
|
|
|
django.db.transaction.TransactionManagementError:
|
|
|
|
An error occurred in the current transaction. You can't execute queries until the end of the 'atomic' block.
|
|
|
|
'''
|
2012-06-10 16:56:12 +01:00
|
|
|
|
2020-07-06 20:27:31 +01:00
|
|
|
entrances_xslug = {}
|
|
|
|
caves_xslug = {}
|
|
|
|
areas_xslug = {}
|
|
|
|
|
2012-06-10 16:56:12 +01:00
|
|
|
def readcaves():
|
2021-04-06 22:50:57 +01:00
|
|
|
'''Reads the xml-format HTML files in the EXPOWEB repo, not from the loser repo.
|
|
|
|
'''
|
|
|
|
with transaction.atomic():
|
|
|
|
print(" - Deleting Caves and Entrances")
|
|
|
|
Cave.objects.all().delete()
|
|
|
|
Entrance.objects.all().delete()
|
|
|
|
# Clear the cave data issues and the caves as we are reloading
|
|
|
|
DataIssue.objects.filter(parser='caves').delete()
|
|
|
|
DataIssue.objects.filter(parser='entrances').delete()
|
2020-06-29 21:15:42 +01:00
|
|
|
|
2021-04-06 22:50:57 +01:00
|
|
|
|
|
|
|
area_1623 = Area.objects.update_or_create(short_name = "1623", parent = None)
|
2021-04-14 18:24:08 +01:00
|
|
|
# This seems to return a tuple, not a single object! i.e. (<Area: 1623>, True)
|
2021-04-06 22:50:57 +01:00
|
|
|
#print(f' ! - READ CAVES: {area_1623}')
|
2020-06-30 15:39:24 +01:00
|
|
|
|
2021-04-06 22:50:57 +01:00
|
|
|
area_1626 = Area.objects.update_or_create(short_name = "1626", parent = None)
|
|
|
|
|
|
|
|
print (" - Setting pending caves")
|
|
|
|
# Do this first, so that these empty entries are overwritten as they get properly created.
|
|
|
|
|
2021-04-14 21:08:06 +01:00
|
|
|
# For those caves which do not have cave_data/1623-xxx.html XML files even though they exist and have surveys
|
2021-04-06 22:50:57 +01:00
|
|
|
# also needs to be done *before* entrances so that the entrance-cave links work properly.
|
2021-04-14 22:50:47 +01:00
|
|
|
pending = ["2007-05", "2007-06", "2007-12", "2009-01", "2009-02",
|
2021-04-06 22:50:57 +01:00
|
|
|
"2010-06", "2010-07", "2012-ns-01", "2012-ns-02", "2010-04", "2012-ns-05", "2012-ns-06",
|
|
|
|
"2012-ns-07", "2012-ns-08", "2012-ns-12", "2012-ns-14", "2012-ns-15", "2014-bl888",
|
2021-04-14 21:08:06 +01:00
|
|
|
"2018-pf-01", "2018-pf-02", "haldenloch"]
|
2021-04-06 22:50:57 +01:00
|
|
|
for k in pending:
|
2021-04-14 21:08:06 +01:00
|
|
|
url = "1623/" + k
|
|
|
|
try:
|
2021-04-06 22:50:57 +01:00
|
|
|
cave = Cave(
|
|
|
|
unofficial_number = k,
|
2021-04-14 21:08:06 +01:00
|
|
|
# official_name = "",
|
|
|
|
underground_description = "Pending cave write-up - creating as empty object. No XML file available yet.",
|
|
|
|
survex_file = "caves-1623/" + k + "/" + k +".svx",
|
|
|
|
url = url,
|
2021-04-14 22:50:47 +01:00
|
|
|
notes="_Survex file found in loser repo but no description in expoweb <br>\n"+
|
|
|
|
"INSTRUCTIONS: First open 'This survex file' (link above the CaveView panel) to find the date and info. Then <br>\n" +
|
|
|
|
"search in the Expo for that year e.g. <a href='/expedition/2007'>2007</a> to find a relevant logbook entry, then <br>\n" +
|
|
|
|
"click on 'New Entrance' at the bottom of this page as we need to create the entrance *first*.")
|
2021-04-06 22:50:57 +01:00
|
|
|
if cave:
|
|
|
|
cave.save() # must save to have id before foreign keys work. This is also a ManyToMany key.
|
|
|
|
#print(f' ! - READ CAVES: cave {k} {cave}')
|
|
|
|
cave.area.add(area_1623[0])
|
|
|
|
cave.save()
|
2021-04-14 22:50:47 +01:00
|
|
|
message = f" ! {k:12} {cave.underground_description}"
|
2021-04-14 21:08:06 +01:00
|
|
|
DataIssue.objects.create(parser='caves', message=message, url=url)
|
2021-04-03 00:34:34 +01:00
|
|
|
print(message)
|
2021-04-06 22:50:57 +01:00
|
|
|
|
|
|
|
try: # Now create a cave slug ID
|
|
|
|
cs = CaveSlug.objects.update_or_create(cave = cave,
|
2021-04-14 21:08:06 +01:00
|
|
|
slug = "1623-PENDING-" + k,
|
2021-04-06 22:50:57 +01:00
|
|
|
primary = False)
|
|
|
|
except:
|
|
|
|
message = " ! {:11s} {} PENDING cave slug create failure".format(k)
|
|
|
|
DataIssue.objects.create(parser='caves', message=message)
|
|
|
|
print(message)
|
|
|
|
else:
|
|
|
|
message = f' ! {k:11s} PENDING cave slug create failure'
|
|
|
|
DataIssue.objects.create(parser='caves', message=message)
|
|
|
|
print(message)
|
|
|
|
|
2021-04-03 00:34:34 +01:00
|
|
|
|
2021-04-06 22:50:57 +01:00
|
|
|
except:
|
|
|
|
message = " ! Error. Cannot create pending cave, pending-id:{}".format(k)
|
|
|
|
DataIssue.objects.create(parser='caves', message=message)
|
|
|
|
print(message)
|
|
|
|
raise
|
2020-07-06 20:27:31 +01:00
|
|
|
|
2021-04-06 22:50:57 +01:00
|
|
|
with transaction.atomic():
|
|
|
|
print(" - Reading Entrances from entrance descriptions xml files")
|
|
|
|
print(" - settings.CAVEDESCRIPTIONS: ", settings.CAVEDESCRIPTIONS)
|
|
|
|
for filename in next(os.walk(settings.ENTRANCEDESCRIPTIONS))[2]: #Should be a better way of getting a list of files
|
|
|
|
if filename.endswith('.html'):
|
|
|
|
readentrance(filename)
|
2020-07-06 20:27:31 +01:00
|
|
|
|
2021-04-06 22:50:57 +01:00
|
|
|
print(" - Reading Caves from cave descriptions xml files")
|
|
|
|
for filename in next(os.walk(settings.CAVEDESCRIPTIONS))[2]: #Should be a better way of getting a list of files
|
|
|
|
if filename.endswith('.html'):
|
|
|
|
readcave(filename)
|
2020-05-28 04:54:53 +01:00
|
|
|
|
2012-06-10 16:56:12 +01:00
|
|
|
def readentrance(filename):
|
2020-07-06 20:27:31 +01:00
|
|
|
global entrances_xslug
|
|
|
|
global caves_xslug
|
|
|
|
global areas_xslug
|
|
|
|
|
|
|
|
# Note: these are HTML files in the EXPOWEB repo, not from the loser repo.
|
2012-06-10 16:56:12 +01:00
|
|
|
with open(os.path.join(settings.ENTRANCEDESCRIPTIONS, filename)) as f:
|
|
|
|
contents = f.read()
|
2021-04-14 22:50:47 +01:00
|
|
|
context = filename
|
2020-06-12 18:10:07 +01:00
|
|
|
#print("Reading file ENTRANCE {} / {}".format(settings.ENTRANCEDESCRIPTIONS, filename))
|
2012-06-10 16:56:12 +01:00
|
|
|
entrancecontentslist = getXML(contents, "entrance", maxItems = 1, context = context)
|
2021-04-14 18:24:08 +01:00
|
|
|
if len(entrancecontentslist) != 1:
|
|
|
|
message = f'! BAD ENTRANCE at "{filename}"'
|
|
|
|
DataIssue.objects.create(parser='caves', message=message)
|
|
|
|
print(message)
|
|
|
|
else:
|
2012-06-10 16:56:12 +01:00
|
|
|
entrancecontents = entrancecontentslist[0]
|
|
|
|
non_public = getXML(entrancecontents, "non_public", maxItems = 1, context = context)
|
|
|
|
name = getXML(entrancecontents, "name", maxItems = 1, context = context)
|
|
|
|
slugs = getXML(entrancecontents, "slug", context = context)
|
|
|
|
entrance_description = getXML(entrancecontents, "entrance_description", maxItems = 1, context = context)
|
|
|
|
explorers = getXML(entrancecontents, "explorers", maxItems = 1, context = context)
|
|
|
|
map_description = getXML(entrancecontents, "map_description", maxItems = 1, context = context)
|
|
|
|
location_description = getXML(entrancecontents, "location_description", maxItems = 1, context = context)
|
|
|
|
approach = getXML(entrancecontents, "approach", maxItems = 1, context = context)
|
|
|
|
underground_description = getXML(entrancecontents, "underground_description", maxItems = 1, context = context)
|
|
|
|
photo = getXML(entrancecontents, "photo", maxItems = 1, context = context)
|
|
|
|
marking = getXML(entrancecontents, "marking", maxItems = 1, context = context)
|
|
|
|
marking_comment = getXML(entrancecontents, "marking_comment", maxItems = 1, context = context)
|
|
|
|
findability = getXML(entrancecontents, "findability", maxItems = 1, context = context)
|
|
|
|
findability_description = getXML(entrancecontents, "findability_description", maxItems = 1, context = context)
|
|
|
|
alt = getXML(entrancecontents, "alt", maxItems = 1, context = context)
|
|
|
|
northing = getXML(entrancecontents, "northing", maxItems = 1, context = context)
|
|
|
|
easting = getXML(entrancecontents, "easting", maxItems = 1, context = context)
|
|
|
|
tag_station = getXML(entrancecontents, "tag_station", maxItems = 1, context = context)
|
|
|
|
exact_station = getXML(entrancecontents, "exact_station", maxItems = 1, context = context)
|
|
|
|
other_station = getXML(entrancecontents, "other_station", maxItems = 1, context = context)
|
|
|
|
other_description = getXML(entrancecontents, "other_description", maxItems = 1, context = context)
|
|
|
|
bearings = getXML(entrancecontents, "bearings", maxItems = 1, context = context)
|
|
|
|
url = getXML(entrancecontents, "url", maxItems = 1, context = context)
|
|
|
|
if len(non_public) == 1 and len(slugs) >= 1 and len(name) >= 1 and len(entrance_description) == 1 and len(explorers) == 1 and len(map_description) == 1 and len(location_description) == 1 and len(approach) == 1 and len(underground_description) == 1 and len(marking) == 1 and len(marking_comment) == 1 and len(findability) == 1 and len(findability_description) == 1 and len(alt) == 1 and len(northing) == 1 and len(easting) == 1 and len(tag_station) == 1 and len(exact_station) == 1 and len(other_station) == 1 and len(other_description) == 1 and len(bearings) == 1 and len(url) == 1:
|
2021-04-06 22:50:57 +01:00
|
|
|
e, state = Entrance.objects.update_or_create(name = name[0],
|
2012-06-10 16:56:12 +01:00
|
|
|
non_public = {"True": True, "False": False, "true": True, "false": False,}[non_public[0]],
|
|
|
|
entrance_description = entrance_description[0],
|
|
|
|
explorers = explorers[0],
|
|
|
|
map_description = map_description[0],
|
|
|
|
location_description = location_description[0],
|
|
|
|
approach = approach[0],
|
|
|
|
underground_description = underground_description[0],
|
|
|
|
photo = photo[0],
|
|
|
|
marking = marking[0],
|
|
|
|
marking_comment = marking_comment[0],
|
|
|
|
findability = findability[0],
|
|
|
|
findability_description = findability_description[0],
|
|
|
|
alt = alt[0],
|
|
|
|
northing = northing[0],
|
|
|
|
easting = easting[0],
|
|
|
|
tag_station = tag_station[0],
|
|
|
|
exact_station = exact_station[0],
|
|
|
|
other_station = other_station[0],
|
|
|
|
other_description = other_description[0],
|
|
|
|
bearings = bearings[0],
|
|
|
|
url = url[0],
|
|
|
|
filename = filename,
|
|
|
|
cached_primary_slug = slugs[0])
|
|
|
|
primary = True
|
|
|
|
for slug in slugs:
|
2020-06-12 18:10:07 +01:00
|
|
|
#print("entrance slug:{} filename:{}".format(slug, filename))
|
|
|
|
try:
|
2021-04-06 22:50:57 +01:00
|
|
|
cs = EntranceSlug.objects.update_or_create(entrance = e,
|
2020-06-12 18:10:07 +01:00
|
|
|
slug = slug,
|
|
|
|
primary = primary)
|
|
|
|
except:
|
|
|
|
# need to cope with duplicates
|
|
|
|
print(" ! FAILED to get only one ENTRANCE when updating using: "+filename)
|
2021-04-06 22:50:57 +01:00
|
|
|
kents = EntranceSlug.objects.all().filter(entrance = e,
|
2020-06-12 18:10:07 +01:00
|
|
|
slug = slug,
|
|
|
|
primary = primary)
|
|
|
|
for k in kents:
|
|
|
|
message = " ! - DUPLICATE in db. entrance:"+ str(k.entrance) + ", slug:" + str(k.slug())
|
2020-06-30 15:39:24 +01:00
|
|
|
DataIssue.objects.create(parser='caves', message=message)
|
2020-06-12 18:10:07 +01:00
|
|
|
print(message)
|
2020-06-13 01:26:28 +01:00
|
|
|
for k in kents:
|
2020-06-12 18:10:07 +01:00
|
|
|
if k.slug() != None:
|
|
|
|
print(" ! - OVERWRITING this one: slug:"+ str(k.slug()))
|
|
|
|
k.notes = "DUPLICATE entrance found on import. Please fix\n" + k.notes
|
|
|
|
c = k
|
2012-06-10 16:56:12 +01:00
|
|
|
primary = False
|
2021-04-15 01:52:09 +01:00
|
|
|
else: # more than one item in long list
|
|
|
|
message = f' ! {slug:12} ABORT loading this entrance. in "{filename}"'
|
|
|
|
DataIssue.objects.create(parser='caves', message=message, url=f'/cave/{slug}/edit/')
|
|
|
|
print(message)
|
2012-06-10 16:56:12 +01:00
|
|
|
|
|
|
|
def readcave(filename):
|
2021-04-06 22:50:57 +01:00
|
|
|
'''Assumes any area it hasn't seen before is a subarea of 1623
|
|
|
|
'''
|
2020-07-06 20:27:31 +01:00
|
|
|
global entrances_xslug
|
|
|
|
global caves_xslug
|
|
|
|
global areas_xslug
|
|
|
|
|
|
|
|
# Note: these are HTML files in the EXPOWEB repo, not from the loser repo.
|
2012-06-10 16:56:12 +01:00
|
|
|
with open(os.path.join(settings.CAVEDESCRIPTIONS, filename)) as f:
|
|
|
|
contents = f.read()
|
2021-04-14 22:50:47 +01:00
|
|
|
context = filename
|
2012-06-10 16:56:12 +01:00
|
|
|
cavecontentslist = getXML(contents, "cave", maxItems = 1, context = context)
|
2021-04-14 18:24:08 +01:00
|
|
|
if len(cavecontentslist) != 1:
|
|
|
|
message = f'! BAD CAVE at "{filename}"'
|
|
|
|
DataIssue.objects.create(parser='caves', message=message)
|
|
|
|
print(message)
|
|
|
|
else:
|
2012-06-10 16:56:12 +01:00
|
|
|
cavecontents = cavecontentslist[0]
|
|
|
|
non_public = getXML(cavecontents, "non_public", maxItems = 1, context = context)
|
|
|
|
slugs = getXML(cavecontents, "caveslug", maxItems = 1, context = context)
|
|
|
|
official_name = getXML(cavecontents, "official_name", maxItems = 1, context = context)
|
|
|
|
areas = getXML(cavecontents, "area", context = context)
|
|
|
|
kataster_code = getXML(cavecontents, "kataster_code", maxItems = 1, context = context)
|
|
|
|
kataster_number = getXML(cavecontents, "kataster_number", maxItems = 1, context = context)
|
|
|
|
unofficial_number = getXML(cavecontents, "unofficial_number", maxItems = 1, context = context)
|
|
|
|
explorers = getXML(cavecontents, "explorers", maxItems = 1, context = context)
|
|
|
|
underground_description = getXML(cavecontents, "underground_description", maxItems = 1, context = context)
|
|
|
|
equipment = getXML(cavecontents, "equipment", maxItems = 1, context = context)
|
|
|
|
references = getXML(cavecontents, "references", maxItems = 1, context = context)
|
|
|
|
survey = getXML(cavecontents, "survey", maxItems = 1, context = context)
|
|
|
|
kataster_status = getXML(cavecontents, "kataster_status", maxItems = 1, context = context)
|
|
|
|
underground_centre_line = getXML(cavecontents, "underground_centre_line", maxItems = 1, context = context)
|
|
|
|
notes = getXML(cavecontents, "notes", maxItems = 1, context = context)
|
|
|
|
length = getXML(cavecontents, "length", maxItems = 1, context = context)
|
|
|
|
depth = getXML(cavecontents, "depth", maxItems = 1, context = context)
|
|
|
|
extent = getXML(cavecontents, "extent", maxItems = 1, context = context)
|
|
|
|
survex_file = getXML(cavecontents, "survex_file", maxItems = 1, context = context)
|
|
|
|
description_file = getXML(cavecontents, "description_file", maxItems = 1, context = context)
|
|
|
|
url = getXML(cavecontents, "url", maxItems = 1, context = context)
|
|
|
|
entrances = getXML(cavecontents, "entrance", context = context)
|
2021-04-15 01:52:09 +01:00
|
|
|
|
2012-06-10 16:56:12 +01:00
|
|
|
if len(non_public) == 1 and len(slugs) >= 1 and len(official_name) == 1 and len(areas) >= 1 and len(kataster_code) == 1 and len(kataster_number) == 1 and len(unofficial_number) == 1 and len(explorers) == 1 and len(underground_description) == 1 and len(equipment) == 1 and len(references) == 1 and len(survey) == 1 and len(kataster_status) == 1 and len(underground_centre_line) == 1 and len(notes) == 1 and len(length) == 1 and len(depth) == 1 and len(extent) == 1 and len(survex_file) == 1 and len(description_file ) == 1 and len(url) == 1 and len(entrances) >= 1:
|
2020-06-07 17:49:58 +01:00
|
|
|
try:
|
2021-04-06 22:50:57 +01:00
|
|
|
c, state = Cave.objects.update_or_create(non_public = {"True": True, "False": False, "true": True, "false": False,}[non_public[0]],
|
2020-06-07 17:49:58 +01:00
|
|
|
official_name = official_name[0],
|
|
|
|
kataster_code = kataster_code[0],
|
|
|
|
kataster_number = kataster_number[0],
|
|
|
|
unofficial_number = unofficial_number[0],
|
|
|
|
explorers = explorers[0],
|
|
|
|
underground_description = underground_description[0],
|
|
|
|
equipment = equipment[0],
|
|
|
|
references = references[0],
|
|
|
|
survey = survey[0],
|
|
|
|
kataster_status = kataster_status[0],
|
|
|
|
underground_centre_line = underground_centre_line[0],
|
|
|
|
notes = notes[0],
|
|
|
|
length = length[0],
|
|
|
|
depth = depth[0],
|
|
|
|
extent = extent[0],
|
|
|
|
survex_file = survex_file[0],
|
|
|
|
description_file = description_file[0],
|
|
|
|
url = url[0],
|
|
|
|
filename = filename)
|
|
|
|
except:
|
2020-06-12 18:10:07 +01:00
|
|
|
print(" ! FAILED to get only one CAVE when updating using: "+filename)
|
2021-04-06 22:50:57 +01:00
|
|
|
kaves = Cave.objects.all().filter(kataster_number=kataster_number[0])
|
2020-06-07 17:49:58 +01:00
|
|
|
for k in kaves:
|
|
|
|
message = " ! - DUPLICATES in db. kataster:"+ str(k.kataster_number) + ", slug:" + str(k.slug())
|
2020-06-30 15:39:24 +01:00
|
|
|
DataIssue.objects.create(parser='caves', message=message)
|
2020-06-07 17:49:58 +01:00
|
|
|
print(message)
|
|
|
|
for k in kaves:
|
|
|
|
if k.slug() != None:
|
|
|
|
print(" ! - OVERWRITING this one: slug:"+ str(k.slug()))
|
|
|
|
k.notes = "DUPLICATE kataster number found on import. Please fix\n" + k.notes
|
|
|
|
c = k
|
|
|
|
|
2012-06-10 16:56:12 +01:00
|
|
|
for area_slug in areas:
|
2020-07-06 20:27:31 +01:00
|
|
|
if area_slug in areas_xslug:
|
|
|
|
newArea = areas_xslug[area_slug]
|
2012-06-10 16:56:12 +01:00
|
|
|
else:
|
2021-04-06 22:50:57 +01:00
|
|
|
area = Area.objects.filter(short_name = area_slug)
|
2020-07-06 20:27:31 +01:00
|
|
|
if area:
|
|
|
|
newArea = area[0]
|
|
|
|
else:
|
2021-04-06 22:50:57 +01:00
|
|
|
newArea = Area(short_name = area_slug, parent = Area.objects.get(short_name = "1623"))
|
2020-07-06 20:27:31 +01:00
|
|
|
newArea.save()
|
|
|
|
areas_xslug[area_slug] = newArea
|
2012-06-10 16:56:12 +01:00
|
|
|
c.area.add(newArea)
|
|
|
|
primary = True
|
|
|
|
for slug in slugs:
|
2020-07-06 20:27:31 +01:00
|
|
|
if slug in caves_xslug:
|
|
|
|
cs = caves_xslug[slug]
|
|
|
|
else:
|
|
|
|
try:
|
2021-04-06 22:50:57 +01:00
|
|
|
cs = CaveSlug.objects.update_or_create(cave = c,
|
2020-07-06 20:27:31 +01:00
|
|
|
slug = slug,
|
|
|
|
primary = primary)
|
|
|
|
caves_xslug[slug] = cs
|
|
|
|
except:
|
|
|
|
message = " ! Cave update/create failure: %s, skipping file %s" % (slug, context)
|
|
|
|
DataIssue.objects.create(parser='caves', message=message)
|
|
|
|
print(message)
|
2012-09-24 23:23:38 +01:00
|
|
|
|
2012-06-10 16:56:12 +01:00
|
|
|
primary = False
|
2020-07-01 22:49:38 +01:00
|
|
|
|
2012-06-10 16:56:12 +01:00
|
|
|
for entrance in entrances:
|
2021-04-14 18:24:08 +01:00
|
|
|
eslug = getXML(entrance, "entranceslug", maxItems = 1, context = context)[0]
|
2012-06-10 16:56:12 +01:00
|
|
|
letter = getXML(entrance, "letter", maxItems = 1, context = context)[0]
|
2012-09-24 23:23:38 +01:00
|
|
|
try:
|
2021-04-14 18:24:08 +01:00
|
|
|
if eslug in entrances_xslug:
|
|
|
|
entrance = entrances_xslug[eslug]
|
2020-07-06 20:27:31 +01:00
|
|
|
else:
|
2021-04-14 18:24:08 +01:00
|
|
|
entrance = Entrance.objects.get(entranceslug__slug = eslug)
|
|
|
|
entrances_xslug[eslug] = entrance
|
2021-04-06 22:50:57 +01:00
|
|
|
ce = CaveAndEntrance.objects.update_or_create(cave = c, entrance_letter = letter, entrance = entrance)
|
2012-09-24 23:23:38 +01:00
|
|
|
except:
|
2021-04-14 18:24:08 +01:00
|
|
|
message = f' ! Entrance setting failure, slug:"{slug}" letter:"{letter}" cave:"{c}" filename:"{filename}"'
|
2020-06-30 15:39:24 +01:00
|
|
|
DataIssue.objects.create(parser='caves', message=message)
|
2019-04-19 22:52:54 +01:00
|
|
|
print(message)
|
2021-04-14 18:24:08 +01:00
|
|
|
|
|
|
|
if survex_file[0]:
|
|
|
|
if not (Path(SURVEX_DATA) / survex_file[0]).is_file():
|
2021-04-14 22:50:47 +01:00
|
|
|
message = f' ! {slug:12} survex filename does not exist :LOSER:"{survex_file[0]}" in "{filename}"'
|
2021-04-14 18:24:08 +01:00
|
|
|
DataIssue.objects.create(parser='caves', message=message, url=f'/cave/{slug}/edit/')
|
|
|
|
print(message)
|
|
|
|
|
|
|
|
if description_file[0]:
|
|
|
|
if not (Path(EXPOWEB) / description_file[0]).is_file():
|
2021-04-14 22:50:47 +01:00
|
|
|
message = f' ! {slug:12} description filename does not exist :{EXPOWEB}:"{description_file[0]}" in "{filename}"'
|
2021-04-14 18:24:08 +01:00
|
|
|
DataIssue.objects.create(parser='caves', message=message, url=f'/cave/{slug}/edit/')
|
|
|
|
print(message)
|
2021-04-14 21:08:06 +01:00
|
|
|
#c.description_file="" # done only once, to clear out cruft.
|
|
|
|
#c.save()
|
2021-04-15 01:52:09 +01:00
|
|
|
else: # more than one item in long list
|
2021-04-15 18:06:04 +01:00
|
|
|
message = f' ! ABORT loading this cave. in "{filename}"'
|
2021-04-15 01:52:09 +01:00
|
|
|
DataIssue.objects.create(parser='caves', message=message, url=f'/cave/{slug}/edit/')
|
|
|
|
print(message)
|
2012-06-10 16:56:12 +01:00
|
|
|
|
|
|
|
def getXML(text, itemname, minItems = 1, maxItems = None, printwarnings = True, context = ""):
|
|
|
|
items = re.findall("<%(itemname)s>(.*?)</%(itemname)s>" % {"itemname": itemname}, text, re.S)
|
|
|
|
if len(items) < minItems and printwarnings:
|
2021-04-15 01:52:09 +01:00
|
|
|
message = " ! %(count)i x %(itemname)s found, at least %(min)i expected. Load ABORT. " % {"count": len(items),
|
2012-06-10 16:56:12 +01:00
|
|
|
"itemname": itemname,
|
2021-04-14 22:50:47 +01:00
|
|
|
"min": minItems} + " in file " + context
|
|
|
|
DataIssue.objects.create(parser='caves', message=message, url=""+context)
|
2019-04-14 22:45:31 +01:00
|
|
|
print(message)
|
|
|
|
|
2012-06-10 16:56:12 +01:00
|
|
|
if maxItems is not None and len(items) > maxItems and printwarnings:
|
2021-04-15 01:52:09 +01:00
|
|
|
message = " ! %(count)i x %(itemname)s found, no more than %(max)i expected in this XML unit. Load ABORT. " % {"count": len(items),
|
2012-06-10 16:56:12 +01:00
|
|
|
"itemname": itemname,
|
2021-04-14 22:50:47 +01:00
|
|
|
"max": maxItems} + " in file " + context
|
2020-06-30 15:39:24 +01:00
|
|
|
DataIssue.objects.create(parser='caves', message=message)
|
2019-04-14 22:45:31 +01:00
|
|
|
print(message)
|
2020-05-28 04:54:53 +01:00
|
|
|
return items
|