2
0
mirror of https://expo.survex.com/repositories/troggle/.git synced 2024-11-22 15:21:52 +00:00
troggle/parsers/caves.py
2021-04-15 18:06:04 +01:00

352 lines
21 KiB
Python

import os
import re
from pathlib import Path
from django.conf import settings
from django.db import transaction
from troggle.settings import SURVEX_DATA, EXPOWEB
from troggle.core.models.troggle import DataIssue
from troggle.core.models.caves import Area, Cave, Entrance, CaveSlug, EntranceSlug, CaveAndEntrance
'''Reads all the cave description data by parsing the xml files (stored as e.g. :EXPOWEB:/cave-data/1623-161.html )
and creating the various Cave, Entrance and necessary Area objects.
BUT in Django 2.0 and later we cannot do any queries on data we have just entered
because this is all happening inside one transaction. Bummer.
django.db.transaction.TransactionManagementError:
An error occurred in the current transaction. You can't execute queries until the end of the 'atomic' block.
'''
entrances_xslug = {}
caves_xslug = {}
areas_xslug = {}
def readcaves():
'''Reads the xml-format HTML files in the EXPOWEB repo, not from the loser repo.
'''
with transaction.atomic():
print(" - Deleting Caves and Entrances")
Cave.objects.all().delete()
Entrance.objects.all().delete()
# Clear the cave data issues and the caves as we are reloading
DataIssue.objects.filter(parser='caves').delete()
DataIssue.objects.filter(parser='entrances').delete()
area_1623 = Area.objects.update_or_create(short_name = "1623", parent = None)
# This seems to return a tuple, not a single object! i.e. (<Area: 1623>, True)
#print(f' ! - READ CAVES: {area_1623}')
area_1626 = Area.objects.update_or_create(short_name = "1626", parent = None)
print (" - Setting pending caves")
# Do this first, so that these empty entries are overwritten as they get properly created.
# For those caves which do not have cave_data/1623-xxx.html XML files even though they exist and have surveys
# also needs to be done *before* entrances so that the entrance-cave links work properly.
pending = ["2007-05", "2007-06", "2007-12", "2009-01", "2009-02",
"2010-06", "2010-07", "2012-ns-01", "2012-ns-02", "2010-04", "2012-ns-05", "2012-ns-06",
"2012-ns-07", "2012-ns-08", "2012-ns-12", "2012-ns-14", "2012-ns-15", "2014-bl888",
"2018-pf-01", "2018-pf-02", "haldenloch"]
for k in pending:
url = "1623/" + k
try:
cave = Cave(
unofficial_number = k,
# official_name = "",
underground_description = "Pending cave write-up - creating as empty object. No XML file available yet.",
survex_file = "caves-1623/" + k + "/" + k +".svx",
url = url,
notes="_Survex file found in loser repo but no description in expoweb <br>\n"+
"INSTRUCTIONS: First open 'This survex file' (link above the CaveView panel) to find the date and info. Then <br>\n" +
"search in the Expo for that year e.g. <a href='/expedition/2007'>2007</a> to find a relevant logbook entry, then <br>\n" +
"click on 'New Entrance' at the bottom of this page as we need to create the entrance *first*.")
if cave:
cave.save() # must save to have id before foreign keys work. This is also a ManyToMany key.
#print(f' ! - READ CAVES: cave {k} {cave}')
cave.area.add(area_1623[0])
cave.save()
message = f" ! {k:12} {cave.underground_description}"
DataIssue.objects.create(parser='caves', message=message, url=url)
print(message)
try: # Now create a cave slug ID
cs = CaveSlug.objects.update_or_create(cave = cave,
slug = "1623-PENDING-" + k,
primary = False)
except:
message = " ! {:11s} {} PENDING cave slug create failure".format(k)
DataIssue.objects.create(parser='caves', message=message)
print(message)
else:
message = f' ! {k:11s} PENDING cave slug create failure'
DataIssue.objects.create(parser='caves', message=message)
print(message)
except:
message = " ! Error. Cannot create pending cave, pending-id:{}".format(k)
DataIssue.objects.create(parser='caves', message=message)
print(message)
raise
with transaction.atomic():
print(" - Reading Entrances from entrance descriptions xml files")
print(" - settings.CAVEDESCRIPTIONS: ", settings.CAVEDESCRIPTIONS)
for filename in next(os.walk(settings.ENTRANCEDESCRIPTIONS))[2]: #Should be a better way of getting a list of files
if filename.endswith('.html'):
readentrance(filename)
print(" - Reading Caves from cave descriptions xml files")
for filename in next(os.walk(settings.CAVEDESCRIPTIONS))[2]: #Should be a better way of getting a list of files
if filename.endswith('.html'):
readcave(filename)
def readentrance(filename):
global entrances_xslug
global caves_xslug
global areas_xslug
# Note: these are HTML files in the EXPOWEB repo, not from the loser repo.
with open(os.path.join(settings.ENTRANCEDESCRIPTIONS, filename)) as f:
contents = f.read()
context = filename
#print("Reading file ENTRANCE {} / {}".format(settings.ENTRANCEDESCRIPTIONS, filename))
entrancecontentslist = getXML(contents, "entrance", maxItems = 1, context = context)
if len(entrancecontentslist) != 1:
message = f'! BAD ENTRANCE at "{filename}"'
DataIssue.objects.create(parser='caves', message=message)
print(message)
else:
entrancecontents = entrancecontentslist[0]
non_public = getXML(entrancecontents, "non_public", maxItems = 1, context = context)
name = getXML(entrancecontents, "name", maxItems = 1, context = context)
slugs = getXML(entrancecontents, "slug", context = context)
entrance_description = getXML(entrancecontents, "entrance_description", maxItems = 1, context = context)
explorers = getXML(entrancecontents, "explorers", maxItems = 1, context = context)
map_description = getXML(entrancecontents, "map_description", maxItems = 1, context = context)
location_description = getXML(entrancecontents, "location_description", maxItems = 1, context = context)
approach = getXML(entrancecontents, "approach", maxItems = 1, context = context)
underground_description = getXML(entrancecontents, "underground_description", maxItems = 1, context = context)
photo = getXML(entrancecontents, "photo", maxItems = 1, context = context)
marking = getXML(entrancecontents, "marking", maxItems = 1, context = context)
marking_comment = getXML(entrancecontents, "marking_comment", maxItems = 1, context = context)
findability = getXML(entrancecontents, "findability", maxItems = 1, context = context)
findability_description = getXML(entrancecontents, "findability_description", maxItems = 1, context = context)
alt = getXML(entrancecontents, "alt", maxItems = 1, context = context)
northing = getXML(entrancecontents, "northing", maxItems = 1, context = context)
easting = getXML(entrancecontents, "easting", maxItems = 1, context = context)
tag_station = getXML(entrancecontents, "tag_station", maxItems = 1, context = context)
exact_station = getXML(entrancecontents, "exact_station", maxItems = 1, context = context)
other_station = getXML(entrancecontents, "other_station", maxItems = 1, context = context)
other_description = getXML(entrancecontents, "other_description", maxItems = 1, context = context)
bearings = getXML(entrancecontents, "bearings", maxItems = 1, context = context)
url = getXML(entrancecontents, "url", maxItems = 1, context = context)
if len(non_public) == 1 and len(slugs) >= 1 and len(name) >= 1 and len(entrance_description) == 1 and len(explorers) == 1 and len(map_description) == 1 and len(location_description) == 1 and len(approach) == 1 and len(underground_description) == 1 and len(marking) == 1 and len(marking_comment) == 1 and len(findability) == 1 and len(findability_description) == 1 and len(alt) == 1 and len(northing) == 1 and len(easting) == 1 and len(tag_station) == 1 and len(exact_station) == 1 and len(other_station) == 1 and len(other_description) == 1 and len(bearings) == 1 and len(url) == 1:
e, state = Entrance.objects.update_or_create(name = name[0],
non_public = {"True": True, "False": False, "true": True, "false": False,}[non_public[0]],
entrance_description = entrance_description[0],
explorers = explorers[0],
map_description = map_description[0],
location_description = location_description[0],
approach = approach[0],
underground_description = underground_description[0],
photo = photo[0],
marking = marking[0],
marking_comment = marking_comment[0],
findability = findability[0],
findability_description = findability_description[0],
alt = alt[0],
northing = northing[0],
easting = easting[0],
tag_station = tag_station[0],
exact_station = exact_station[0],
other_station = other_station[0],
other_description = other_description[0],
bearings = bearings[0],
url = url[0],
filename = filename,
cached_primary_slug = slugs[0])
primary = True
for slug in slugs:
#print("entrance slug:{} filename:{}".format(slug, filename))
try:
cs = EntranceSlug.objects.update_or_create(entrance = e,
slug = slug,
primary = primary)
except:
# need to cope with duplicates
print(" ! FAILED to get only one ENTRANCE when updating using: "+filename)
kents = EntranceSlug.objects.all().filter(entrance = e,
slug = slug,
primary = primary)
for k in kents:
message = " ! - DUPLICATE in db. entrance:"+ str(k.entrance) + ", slug:" + str(k.slug())
DataIssue.objects.create(parser='caves', message=message)
print(message)
for k in kents:
if k.slug() != None:
print(" ! - OVERWRITING this one: slug:"+ str(k.slug()))
k.notes = "DUPLICATE entrance found on import. Please fix\n" + k.notes
c = k
primary = False
else: # more than one item in long list
message = f' ! {slug:12} ABORT loading this entrance. in "{filename}"'
DataIssue.objects.create(parser='caves', message=message, url=f'/cave/{slug}/edit/')
print(message)
def readcave(filename):
'''Assumes any area it hasn't seen before is a subarea of 1623
'''
global entrances_xslug
global caves_xslug
global areas_xslug
# Note: these are HTML files in the EXPOWEB repo, not from the loser repo.
with open(os.path.join(settings.CAVEDESCRIPTIONS, filename)) as f:
contents = f.read()
context = filename
cavecontentslist = getXML(contents, "cave", maxItems = 1, context = context)
if len(cavecontentslist) != 1:
message = f'! BAD CAVE at "{filename}"'
DataIssue.objects.create(parser='caves', message=message)
print(message)
else:
cavecontents = cavecontentslist[0]
non_public = getXML(cavecontents, "non_public", maxItems = 1, context = context)
slugs = getXML(cavecontents, "caveslug", maxItems = 1, context = context)
official_name = getXML(cavecontents, "official_name", maxItems = 1, context = context)
areas = getXML(cavecontents, "area", context = context)
kataster_code = getXML(cavecontents, "kataster_code", maxItems = 1, context = context)
kataster_number = getXML(cavecontents, "kataster_number", maxItems = 1, context = context)
unofficial_number = getXML(cavecontents, "unofficial_number", maxItems = 1, context = context)
explorers = getXML(cavecontents, "explorers", maxItems = 1, context = context)
underground_description = getXML(cavecontents, "underground_description", maxItems = 1, context = context)
equipment = getXML(cavecontents, "equipment", maxItems = 1, context = context)
references = getXML(cavecontents, "references", maxItems = 1, context = context)
survey = getXML(cavecontents, "survey", maxItems = 1, context = context)
kataster_status = getXML(cavecontents, "kataster_status", maxItems = 1, context = context)
underground_centre_line = getXML(cavecontents, "underground_centre_line", maxItems = 1, context = context)
notes = getXML(cavecontents, "notes", maxItems = 1, context = context)
length = getXML(cavecontents, "length", maxItems = 1, context = context)
depth = getXML(cavecontents, "depth", maxItems = 1, context = context)
extent = getXML(cavecontents, "extent", maxItems = 1, context = context)
survex_file = getXML(cavecontents, "survex_file", maxItems = 1, context = context)
description_file = getXML(cavecontents, "description_file", maxItems = 1, context = context)
url = getXML(cavecontents, "url", maxItems = 1, context = context)
entrances = getXML(cavecontents, "entrance", context = context)
if len(non_public) == 1 and len(slugs) >= 1 and len(official_name) == 1 and len(areas) >= 1 and len(kataster_code) == 1 and len(kataster_number) == 1 and len(unofficial_number) == 1 and len(explorers) == 1 and len(underground_description) == 1 and len(equipment) == 1 and len(references) == 1 and len(survey) == 1 and len(kataster_status) == 1 and len(underground_centre_line) == 1 and len(notes) == 1 and len(length) == 1 and len(depth) == 1 and len(extent) == 1 and len(survex_file) == 1 and len(description_file ) == 1 and len(url) == 1 and len(entrances) >= 1:
try:
c, state = Cave.objects.update_or_create(non_public = {"True": True, "False": False, "true": True, "false": False,}[non_public[0]],
official_name = official_name[0],
kataster_code = kataster_code[0],
kataster_number = kataster_number[0],
unofficial_number = unofficial_number[0],
explorers = explorers[0],
underground_description = underground_description[0],
equipment = equipment[0],
references = references[0],
survey = survey[0],
kataster_status = kataster_status[0],
underground_centre_line = underground_centre_line[0],
notes = notes[0],
length = length[0],
depth = depth[0],
extent = extent[0],
survex_file = survex_file[0],
description_file = description_file[0],
url = url[0],
filename = filename)
except:
print(" ! FAILED to get only one CAVE when updating using: "+filename)
kaves = Cave.objects.all().filter(kataster_number=kataster_number[0])
for k in kaves:
message = " ! - DUPLICATES in db. kataster:"+ str(k.kataster_number) + ", slug:" + str(k.slug())
DataIssue.objects.create(parser='caves', message=message)
print(message)
for k in kaves:
if k.slug() != None:
print(" ! - OVERWRITING this one: slug:"+ str(k.slug()))
k.notes = "DUPLICATE kataster number found on import. Please fix\n" + k.notes
c = k
for area_slug in areas:
if area_slug in areas_xslug:
newArea = areas_xslug[area_slug]
else:
area = Area.objects.filter(short_name = area_slug)
if area:
newArea = area[0]
else:
newArea = Area(short_name = area_slug, parent = Area.objects.get(short_name = "1623"))
newArea.save()
areas_xslug[area_slug] = newArea
c.area.add(newArea)
primary = True
for slug in slugs:
if slug in caves_xslug:
cs = caves_xslug[slug]
else:
try:
cs = CaveSlug.objects.update_or_create(cave = c,
slug = slug,
primary = primary)
caves_xslug[slug] = cs
except:
message = " ! Cave update/create failure: %s, skipping file %s" % (slug, context)
DataIssue.objects.create(parser='caves', message=message)
print(message)
primary = False
for entrance in entrances:
eslug = getXML(entrance, "entranceslug", maxItems = 1, context = context)[0]
letter = getXML(entrance, "letter", maxItems = 1, context = context)[0]
try:
if eslug in entrances_xslug:
entrance = entrances_xslug[eslug]
else:
entrance = Entrance.objects.get(entranceslug__slug = eslug)
entrances_xslug[eslug] = entrance
ce = CaveAndEntrance.objects.update_or_create(cave = c, entrance_letter = letter, entrance = entrance)
except:
message = f' ! Entrance setting failure, slug:"{slug}" letter:"{letter}" cave:"{c}" filename:"{filename}"'
DataIssue.objects.create(parser='caves', message=message)
print(message)
if survex_file[0]:
if not (Path(SURVEX_DATA) / survex_file[0]).is_file():
message = f' ! {slug:12} survex filename does not exist :LOSER:"{survex_file[0]}" in "{filename}"'
DataIssue.objects.create(parser='caves', message=message, url=f'/cave/{slug}/edit/')
print(message)
if description_file[0]:
if not (Path(EXPOWEB) / description_file[0]).is_file():
message = f' ! {slug:12} description filename does not exist :{EXPOWEB}:"{description_file[0]}" in "{filename}"'
DataIssue.objects.create(parser='caves', message=message, url=f'/cave/{slug}/edit/')
print(message)
#c.description_file="" # done only once, to clear out cruft.
#c.save()
else: # more than one item in long list
message = f' ! ABORT loading this cave. in "{filename}"'
DataIssue.objects.create(parser='caves', message=message, url=f'/cave/{slug}/edit/')
print(message)
def getXML(text, itemname, minItems = 1, maxItems = None, printwarnings = True, context = ""):
items = re.findall("<%(itemname)s>(.*?)</%(itemname)s>" % {"itemname": itemname}, text, re.S)
if len(items) < minItems and printwarnings:
message = " ! %(count)i x %(itemname)s found, at least %(min)i expected. Load ABORT. " % {"count": len(items),
"itemname": itemname,
"min": minItems} + " in file " + context
DataIssue.objects.create(parser='caves', message=message, url=""+context)
print(message)
if maxItems is not None and len(items) > maxItems and printwarnings:
message = " ! %(count)i x %(itemname)s found, no more than %(max)i expected in this XML unit. Load ABORT. " % {"count": len(items),
"itemname": itemname,
"max": maxItems} + " in file " + context
DataIssue.objects.create(parser='caves', message=message)
print(message)
return items