2
0
mirror of https://expo.survex.com/repositories/troggle/.git synced 2024-11-25 08:41:51 +00:00

Making entrances work for pending caves

This commit is contained in:
Philip Sargent 2021-04-26 02:10:45 +01:00
parent a656ada67a
commit 72fa8a5883
7 changed files with 143 additions and 61 deletions

View File

@ -60,7 +60,7 @@ class SimpleTest(SimpleTestCase):
import troggle.core.views.expo
from troggle.core.models.troggle import Expedition
from troggle.core.models.caves import CaveSlug, Cave, CaveAndEntrance, QM, EntranceSlug, Entrance, Area, SurvexStation
from troggle.core.forms import CaveForm, CaveAndEntranceFormSet, VersionControlCommentForm, EntranceForm, EntranceLetterForm
from troggle.core.forms import CaveForm, CaveAndEntranceFormSet, EntranceForm, EntranceLetterForm
from troggle.core.views.login import login_required_if_public
from django.contrib.auth.decorators import login_required
from django.conf import settings

View File

@ -54,10 +54,10 @@ class CaveForm(ModelForm):
self._errors["url"] = self.error_class(["This field cannot start with a /."])
return self.cleaned_data
class VersionControlCommentForm(forms.Form):
'''Was appended to all forms. Not used currently
'''
description_of_change = forms.CharField(required = True, widget=forms.Textarea(attrs={'rows':2}))
# class VersionControlCommentForm(forms.Form):
# '''Was appended to all forms. Not used currently
# '''
# description_of_change = forms.CharField(required = True, widget=forms.Textarea(attrs={'rows':2}))
class EntranceForm(ModelForm):

View File

@ -5,6 +5,7 @@ import re
import json
from subprocess import call
from collections import defaultdict
from pathlib import Path
from urllib.parse import urljoin
@ -22,6 +23,20 @@ from django.template import Context, loader
from troggle.core.models.troggle import TroggleModel, Person, Expedition, DataIssue
from troggle.core.models.survex import SurvexStation
'''The model declarations for Areas, Caves and Entrances. Also LogBookENtry, QM, PersonTrip
'''
todo='''- Move utility function into utils.py
- Find out why we have separate objects CaveSlug and EntranceSlug and why
these are not just a single field on the Model. Do we ever need more
than one slug per cave or entrance? Surely that would break everything??
- Move PersonTrip to be with Person and Expedition elsewhere
- Restore constraint: unique_together = (("area", "kataster_number"), ("area", "unofficial_number"))
'''
class Area(TroggleModel):
short_name = models.CharField(max_length=100)
name = models.CharField(max_length=200, blank=True, null=True)
@ -344,6 +359,9 @@ class Entrance(TroggleModel):
self.save()
return self.cached_primary_slug
def get_file_path(self):
return Path(settings.ENTRANCEDESCRIPTIONS, self.filename)
def writeDataFile(self):
try:
f = open(os.path.join(settings.ENTRANCEDESCRIPTIONS, self.filename), "w")

View File

@ -18,7 +18,8 @@ import troggle.settings as settings
from troggle.core.views import expo
from troggle.core.models.troggle import Expedition, DataIssue
from troggle.core.models.caves import CaveSlug, Cave, CaveAndEntrance, QM, EntranceSlug, Entrance, Area, SurvexStation, GetCaveLookup
from troggle.core.forms import CaveForm, CaveAndEntranceFormSet, VersionControlCommentForm, EntranceForm, EntranceLetterForm
from troggle.core.forms import CaveForm, CaveAndEntranceFormSet, EntranceForm, EntranceLetterForm
#from troggle.core.forms import VersionControlCommentForm
from .login import login_required_if_public
'''Manages the complex procedures to assemble a cave description out of the compnoents
@ -323,7 +324,7 @@ def edit_cave(request, slug=None):
if request.POST:
form = CaveForm(request.POST, instance=cave)
ceFormSet = CaveAndEntranceFormSet(request.POST)
versionControlForm = VersionControlCommentForm(request.POST)
#versionControlForm = VersionControlCommentForm(request.POST)
if form.is_valid() and ceFormSet.is_valid():
#print(f'! POST is valid. {cave}')
cave = form.save(commit = False)
@ -397,8 +398,6 @@ def edit_entrance(request, caveslug=None, slug=None):
slugname = cave.slug() + entletter.cleaned_data["entrance_letter"]
else:
slugname = cave.slug()
# Converting a PENDING cave to a real cave by saving this form
slugname = slugname.replace('-PENDING-', '-')
entrance.cached_primary_slug = slugname
entrance.filename = slugname + ".html"
entrance.save()

View File

@ -32,7 +32,7 @@ todo = '''
- But how does this interact with troggle/logbooksdump.py ?
- deleted nefile() - check on deleted UploadFileForm using the editfile.html template which is about re-submitting
- deleted newfile() - check on deleted UploadFileForm using the editfile.html template which is about re-submitting
a LBE aka TripReport
'''
@ -44,12 +44,16 @@ def todos(request, module):
from troggle.core.TESTS.tests import todo as tests
from troggle.core.views.logbooks import todo as viewlogbooks
from troggle.parsers.logbooks import todo as parserslogbooks
from troggle.parsers.survex import todo as parserssurvex
from troggle.core.models.caves import todo as modelcaves
from troggle.core.forms import todo as forms
from troggle.core.templatetags.wiki_markup import todo as wiki
tododict = {'views/other': todo,
'tests': tests,
'views/logbooks': viewlogbooks,
'parsers/logbooks': parserslogbooks,
'parsers/survex': parserssurvex,
'core/models/caves': modelcaves,
'core/forms': forms,
'core/templatetags/wiki_markup': wiki}
return render(request,'core/todos.html', {'tododict': tododict})

View File

@ -23,9 +23,104 @@ entrances_xslug = {}
caves_xslug = {}
areas_xslug = {}
def do_pending_cave(k, url, area_1623):
'''
default for a PENDING cave, should be overwritten in the db later if a real cave of the same name exists
in expoweb/cave_data/1623-"k".html
'''
default_note = f"_Survex file found in loser repo but no description in expoweb <br><br><br>\n"
default_note += f"INSTRUCTIONS: First open 'This survex file' (link above the CaveView panel) to find the date and info. Then "
default_note += f"<br>\n - search in the Expo for that year e.g. <a href='/expedition/{k[0:4]}'>{k[0:4]}</a> to find a "
default_note += f"relevant logbook entry, then <br>\n - "
default_note += f"click on 'Edit this cave' and copy the information you find in the survex file and the logbook"
default_note += f"<br>\n - "
default_note += f"When you Submit it will create a file file in expoweb/cave_data/ "
default_note += f"<br>\n - Now you can edit the entrance info: click on Edit below for the dummy entrance. "
default_note += f"and then Submit to save it. NB your entrance info will not be visible after a reboot of the server until a programmer has edited parser/caves.py (python code) to remove the cave form the [pending] list. But it won't be lost."
slug = "1623-" + k
cave = Cave(
unofficial_number = k,
underground_description = "Pending cave write-up - creating as empty object. No XML file available yet.",
survex_file = "caves-1623/" + k + "/" + k +".svx",
url = url,
notes = default_note)
if cave:
cave.save() # must save to have id before foreign keys work. This is also a ManyToMany key.
cave.area.add(area_1623[0])
cave.save()
message = f" ! {k:12} {cave.underground_description}"
DataIssue.objects.create(parser='caves', message=message, url=url)
print(message)
try: # Now create a cave slug ID
cs = CaveSlug.objects.update_or_create(cave = cave,
slug = slug, primary = False)
except:
message = f" ! {k:11s} PENDING cave SLUG create failure"
DataIssue.objects.create(parser='caves', message=message)
print(message)
else:
message = f' ! {k:11s} PENDING cave create failure'
DataIssue.objects.create(parser='caves', message=message)
print(message)
ent = Entrance(
name = k,
entrance_description = "Dummy entrance: auto-created when registering a new cave." +
"This file WILL NOT BE LOADED while the cave is in the pending[] list in parsers/caves.py",
marking = '?')
if ent:
ent.save() # must save to have id before foreign keys work.
try: # Now create a entrance slug ID
es = EntranceSlug.objects.update_or_create(entrance = ent,
slug = slug, primary = False)
except:
message = f" ! {k:11s} PENDING entrance create failure"
DataIssue.objects.create(parser='caves', message=message)
print(message)
# Now we will actually write this default entrance slugfile. Yes it's naughty, but an extra
# entrance file is less hassle than a missing one! And people always forget to save the entrance file
# when they are saving the edited cave file.
#This WILL then casue an error when the parser then tries to import this file and we get an import failure because
# the entrance exists in the db.
ent.cached_primary_slug = slug
ent.filename = slug + ".html"
ent.save()
if not ent.get_file_path().is_file():
# don't overwrite if by some chnace it exists
# but the pre-existing file won't be parsed later until the cave is removed from the pending list
ent.writeDataFile()
else:
message = f" ! {k:11s} PENDING cave SLUG '{slug}' create failure"
DataIssue.objects.create(parser='caves', message=message)
print(message)
try:
ceinsts = CaveAndEntrance.objects.update_or_create(cave = cave, entrance_letter = "", entrance = ent)
for ceinst in ceinsts:
if str(ceinst) == str(cave): # magic runes... why is the next value a Bool?
ceinst.cave = cave
ceinst.save()
break
except:
message = f" ! {k:11s} PENDING entrance + cave UNION create failure '{cave}' [{ent}]"
DataIssue.objects.create(parser='caves', message=message)
print(message)
def readcaves():
'''Reads the xml-format HTML files in the EXPOWEB repo, not from the loser repo.
'''
# For those caves which do not have cave_data/1623-xxx.html XML files even though they exist and have surveys
pending = ["2007-06", "2009-02",
"2010-06", "2010-07", "2012-ns-01", "2012-ns-02", "2010-04", "2012-ns-05", "2012-ns-06",
"2012-ns-07", "2012-ns-08", "2012-ns-12", "2012-ns-14", "2012-ns-15", "2014-bl888",
"2018-pf-01", "2018-pf-02"]
with transaction.atomic():
print(" - Deleting Caves and Entrances")
Cave.objects.all().delete()
@ -33,7 +128,6 @@ def readcaves():
# Clear the cave data issues and the caves as we are reloading
DataIssue.objects.filter(parser='caves').delete()
DataIssue.objects.filter(parser='entrances').delete()
area_1623 = Area.objects.update_or_create(short_name = "1623", parent = None)
# This seems to return a tuple, not a single object! i.e. (<Area: 1623>, True)
@ -44,64 +138,26 @@ def readcaves():
print (" - Setting pending caves")
# Do this first, so that these empty entries are overwritten as they get properly created.
# For those caves which do not have cave_data/1623-xxx.html XML files even though they exist and have surveys
pending = ["2007-06", "2009-01", "2009-02",
"2010-06", "2010-07", "2012-ns-01", "2012-ns-02", "2010-04", "2012-ns-05", "2012-ns-06",
"2012-ns-07", "2012-ns-08", "2012-ns-12", "2012-ns-14", "2012-ns-15", "2014-bl888",
"2018-pf-01", "2018-pf-02", "haldenloch"]
for k in pending:
url = "1623/" + k
url = "1623/" + k # Note we are not appending the .htm as we are modern folks now.
try:
# default for a PENDING cave, ooverwritten if a real cave exists
cave = Cave(
unofficial_number = k,
# official_name = "",
underground_description = "Pending cave write-up - creating as empty object. No XML file available yet.",
survex_file = "caves-1623/" + k + "/" + k +".svx",
url = url,
notes=f"_Survex file found in loser repo but no description in expoweb <br><br><br>\n"+
f"INSTRUCTIONS: First open 'This survex file' (link above the CaveView panel) to find the date and info. Then " +
f"<br>\n - search in the Expo for that year e.g. <a href='/expedition/{k[0:4]}'>{k[0:4]}</a> to find a relevant logbook entry, then \n - " +
f"click on 'Edit this cave' and copy the information you find in the survex file and the logbook"+
f"<br>\n - " +
f"When you Submit it will create a file file in expoweb/cave_data/ " +
f"<br>\n - but you have not finished yet. You MUST go and create the entrance: click on New Entrance. Then this will no longer be 'Pending' once the flag has been removed from the code")
if cave:
cave.save() # must save to have id before foreign keys work. This is also a ManyToMany key.
#print(f' ! - READ CAVES: cave {k} {cave}')
cave.area.add(area_1623[0])
cave.save()
message = f" ! {k:12} {cave.underground_description}"
DataIssue.objects.create(parser='caves', message=message, url=url)
print(message)
try: # Now create a cave slug ID
cs = CaveSlug.objects.update_or_create(cave = cave,
slug = "1623-" + k,
primary = False)
except:
message = " ! {:11s} {} PENDING cave slug create failure".format(k)
DataIssue.objects.create(parser='caves', message=message)
print(message)
else:
message = f' ! {k:11s} PENDING cave slug create failure'
DataIssue.objects.create(parser='caves', message=message)
print(message)
do_pending_cave(k, url, area_1623)
except:
message = " ! Error. Cannot create pending cave, pending-id:{}".format(k)
message = " ! Error. Cannot create pending cave and entrance, pending-id:{}".format(k)
DataIssue.objects.create(parser='caves', message=message)
print(message)
raise
with transaction.atomic():
print(" - Reading Entrances from entrance descriptions xml files")
print(" - settings.CAVEDESCRIPTIONS: ", settings.CAVEDESCRIPTIONS)
print(" - Reading Entrances from entrance descriptions xml files")
for filename in next(os.walk(settings.ENTRANCEDESCRIPTIONS))[2]: #Should be a better way of getting a list of files
if filename.endswith('.html'):
readentrance(filename)
if Path(filename).stem[5:] in pending:
print(f'Skipping pending entrance dummy file <{filename}>')
else:
readentrance(filename)
print(" - Reading Caves from cave descriptions xml files")
for filename in next(os.walk(settings.CAVEDESCRIPTIONS))[2]: #Should be a better way of getting a list of files
@ -182,13 +238,14 @@ def readentrance(filename):
primary = primary)
except:
# need to cope with duplicates
print(" ! FAILED to get only one ENTRANCE when updating using: "+filename)
message = f" ! FAILED to get precisely one ENTRANCE when updating using: cave_entrance/{filename}"
DataIssue.objects.create(parser='caves', message=message, url=f'/cave/{slug}/edit/')
kents = EntranceSlug.objects.all().filter(entrance = e,
slug = slug,
primary = primary)
for k in kents:
message = " ! - DUPLICATE in db. entrance:"+ str(k.entrance) + ", slug:" + str(k.slug())
DataIssue.objects.create(parser='caves', message=message)
DataIssue.objects.create(parser='caves', message=message, url=f'/cave/{slug}/edit/')
print(message)
for k in kents:
if k.slug() != None:
@ -300,7 +357,7 @@ def readcave(filename):
primary = primary)
caves_xslug[slug] = cs
except Exception as ex:
message = " ! Cave update/create failure : %s, skipping file %s\nException: %s" % (slug, context, ex.__class__)
message = " ! Cave update/create failure : %s, skipping file cave_data/%s with exception\nException: %s" % (slug, context, ex.__class__)
DataIssue.objects.create(parser='caves', message=message)
print(message)

View File

@ -23,6 +23,10 @@ from troggle.core.models.survex import SurvexPersonRole, ScansFolder, SurvexDire
It does also NOT scan the Loser repo for all the svx files - though it should !
'''
todo = '''Also walk the entire tree in the :loser: repo looking for unconnected survex files
- add them to the system so that they can be reported-on
- produce a parser report and create a troggle report page (some are OK, e.g. futility series replaced by ARGE survey in 115)
'''
survexblockroot = None
ROOTBLOCK = "rootblock"
METRESINFEET = 3.28084
@ -658,7 +662,7 @@ class LoadingSurvex():
#print("\n"+message)
#print("\n"+message,file=sys.stderr)
return
message = " ! {} is not a cave. (while creating '{}' sfile & sdirectory)".format(headpath, includelabel)
message = f" ! {headpath} is not a fully-registered cave. (while creating '{includelabel}' sfile & sdirectory in survex parsing)"
print("\n"+message)
print("\n"+message,file=sys.stderr)
DataIssue.objects.create(parser='survex', message=message)