2011-07-11 02:10:22 +01:00
|
|
|
import datetime
|
|
|
|
import difflib
|
2023-01-19 18:35:56 +00:00
|
|
|
import os
|
|
|
|
import re
|
2022-08-16 18:02:28 +01:00
|
|
|
import socket
|
2023-01-19 18:35:56 +00:00
|
|
|
from pathlib import Path
|
2023-02-27 16:42:08 +00:00
|
|
|
from collections import namedtuple
|
2011-07-11 02:10:22 +01:00
|
|
|
|
2020-05-28 04:54:53 +01:00
|
|
|
from django import forms
|
2023-02-26 22:13:37 +00:00
|
|
|
from django.db import models
|
2023-02-27 22:23:24 +00:00
|
|
|
from django.db.models import Q
|
2023-02-26 22:13:37 +00:00
|
|
|
|
2023-01-19 18:35:56 +00:00
|
|
|
from django.core.exceptions import MultipleObjectsReturned, ObjectDoesNotExist
|
2023-01-30 23:04:11 +00:00
|
|
|
from django.http import HttpResponse
|
2021-03-28 03:48:24 +01:00
|
|
|
from django.shortcuts import render
|
2021-03-26 19:42:58 +00:00
|
|
|
from django.views.decorators.csrf import ensure_csrf_cookie
|
2011-07-11 02:10:22 +01:00
|
|
|
|
2023-01-19 18:35:56 +00:00
|
|
|
import troggle.settings as settings
|
2023-02-27 16:42:08 +00:00
|
|
|
from troggle.core.models.logbooks import LogbookEntry
|
2023-09-06 11:20:29 +01:00
|
|
|
from troggle.core.models.caves import Cave, GetCaveLookup
|
2023-09-05 19:46:10 +01:00
|
|
|
from troggle.core.models.survex import SurvexFile, SurvexBlock, SurvexDirectory
|
2023-02-27 16:42:08 +00:00
|
|
|
from troggle.core.models.wallets import Wallet
|
2023-01-30 23:04:11 +00:00
|
|
|
from troggle.core.utils import only_commit
|
2023-03-05 23:06:06 +00:00
|
|
|
from troggle.parsers.survex import parse_one_file
|
2020-05-28 04:54:53 +01:00
|
|
|
|
2023-01-30 19:04:36 +00:00
|
|
|
"""Everything that views survexfiles
|
2021-04-13 01:37:42 +01:00
|
|
|
but also displays data on a cave or caves when there is ambiguity
|
2023-01-30 19:04:36 +00:00
|
|
|
"""
|
2021-04-13 01:37:42 +01:00
|
|
|
|
2023-02-26 22:13:37 +00:00
|
|
|
todo = """- survexcavesingle is not properly producing any result for Homecoming, 1626-359, 2018-dm-07
|
2021-05-02 22:48:25 +01:00
|
|
|
even though there are dozens of surveys.
|
2023-02-26 22:13:37 +00:00
|
|
|
|
2023-03-05 23:55:00 +00:00
|
|
|
- REFACTOR the very impenetrable code for scanningsubdirectories, replace with modern python pathlib
|
|
|
|
|
|
|
|
- filter out the non-public caves from display UNLESS LOGGED IN
|
|
|
|
|
2023-02-26 22:13:37 +00:00
|
|
|
- Never actual uses the object for the survexfile, works entirely from the filepath! Make it check and validate
|
|
|
|
|
2023-03-05 23:55:00 +00:00
|
|
|
- the primary survex file in each cave directory should be in a configuration, not buried in the code...
|
|
|
|
|
2023-03-06 04:52:41 +00:00
|
|
|
- Save and re-parse an edited survexfile which already exists in the db, and update
|
2023-03-05 23:55:00 +00:00
|
|
|
all its dependencies (work in progress)
|
2023-03-23 19:05:25 +00:00
|
|
|
|
|
|
|
- overlapping and cross-calling when things fail make this hard to undersand, e.g. svx() and
|
|
|
|
survexcavessingle() can get called for a survex file depending on whether the URL ends in ".svx" or not,
|
|
|
|
but each tries to handle the other case too.
|
|
|
|
|
2023-01-30 19:04:36 +00:00
|
|
|
"""
|
2021-05-02 22:48:25 +01:00
|
|
|
|
2023-03-23 19:05:25 +00:00
|
|
|
SVXPATH = Path(settings.SURVEX_DATA)
|
2020-05-28 04:54:53 +01:00
|
|
|
|
2023-03-14 16:10:57 +00:00
|
|
|
# NB this template text must be identical to that in :loser:/templates/template.svx
|
2020-07-01 00:20:27 +01:00
|
|
|
survextemplatefile = """; *** THIS IS A TEMPLATE FILE NOT WHAT YOU MIGHT BE EXPECTING ***
|
|
|
|
|
|
|
|
*** DO NOT SAVE THIS FILE WITHOUT RENAMING IT !! ***
|
2020-06-02 21:38:29 +01:00
|
|
|
;[Stuff in square brackets is example text to be replaced with real data,
|
|
|
|
; removing the square brackets]
|
2011-07-11 02:10:22 +01:00
|
|
|
|
|
|
|
*begin [surveyname]
|
|
|
|
|
2020-06-02 21:38:29 +01:00
|
|
|
; stations linked into other surveys (or likely to)
|
|
|
|
*export [1 8 12 34]
|
|
|
|
|
|
|
|
; Cave:
|
|
|
|
; Area in cave/QM:
|
|
|
|
*title ""
|
2020-07-01 00:20:27 +01:00
|
|
|
*date [2040.07.04] ; <-- CHANGE THIS DATE
|
|
|
|
*team Insts [Fred Fossa]
|
|
|
|
*team Notes [Brenda Badger]
|
|
|
|
*team Pics [Luke Lynx]
|
|
|
|
*team Tape [Albert Aadvark]
|
2020-06-02 21:38:29 +01:00
|
|
|
*instrument [SAP #+Laser Tape/DistoX/Compass # ; Clino #]
|
|
|
|
; Calibration: [Where, readings]
|
2020-07-01 00:20:27 +01:00
|
|
|
*ref [2040#00] ; <-- CHANGE THIS TOO
|
2020-06-02 21:38:29 +01:00
|
|
|
; the #number is on the clear pocket containing the original notes
|
|
|
|
|
|
|
|
; if using a tape:
|
|
|
|
*calibrate tape +0.0 ; +ve if tape was too short, -ve if too long
|
|
|
|
|
|
|
|
; Centreline data
|
|
|
|
*data normal from to length bearing gradient ignoreall
|
|
|
|
[ 1 2 5.57 034.5 -12.8 ]
|
|
|
|
|
|
|
|
;-----------
|
|
|
|
;recorded station details (leave commented out)
|
|
|
|
;(NP=Nail Polish, LHW/RHW=Left/Right Hand Wall)
|
|
|
|
;Station Left Right Up Down Description
|
|
|
|
;[Red] nail varnish markings
|
|
|
|
[;1 0.8 0 5.3 1.6 ; NP on boulder. pt 23 on foo survey ]
|
|
|
|
[;2 0.3 1.2 6 1.2 ; NP '2' LHW ]
|
|
|
|
[;3 1.3 0 3.4 0.2 ; Rock on floor - not refindable ]
|
|
|
|
|
|
|
|
|
|
|
|
;LRUDs arranged into passage tubes
|
|
|
|
;new *data command for each 'passage',
|
|
|
|
;repeat stations and adjust numbers as needed
|
|
|
|
*data passage station left right up down
|
|
|
|
;[ 1 0.8 0 5.3 1.6 ]
|
|
|
|
;[ 2 0.3 1.2 6 1.2 ]
|
|
|
|
*data passage station left right up down
|
|
|
|
;[ 1 1.3 1.5 5.3 1.6 ]
|
|
|
|
;[ 3 2.4 0 3.4 0.2 ]
|
|
|
|
|
|
|
|
|
|
|
|
;-----------
|
2023-03-22 23:28:26 +00:00
|
|
|
;Question Mark List ;(keep initial semi-colon on each line)
|
2020-06-02 21:38:29 +01:00
|
|
|
; The nearest-station is the name of the survey and station which are nearest to
|
|
|
|
; the QM. The resolution-station is either '-' to indicate that the QM hasn't
|
|
|
|
; been checked; or the name of the survey and station which push that QM. If a
|
|
|
|
; QM doesn't go anywhere, set the resolution-station to be the same as the
|
|
|
|
; nearest-station. Include any relevant details of how to find or push the QM in
|
|
|
|
; the textual description.
|
2023-03-17 14:33:30 +00:00
|
|
|
;Serial number grade(A/B/C/D/X) nearest-station resolution-station description
|
2020-06-02 21:38:29 +01:00
|
|
|
;[ QM1 A surveyname.3 - description of QM ]
|
|
|
|
;[ QM2 B surveyname.5 - description of QM ]
|
|
|
|
|
2023-03-14 16:10:57 +00:00
|
|
|
;TICKed off QMs
|
|
|
|
; in the past, if another survey existed, the resolution-station
|
|
|
|
; field was filled in, e.g.
|
|
|
|
;[ QM2 B surveyname.5 anothersurvey.7 description of QM and description of progress ]
|
|
|
|
|
|
|
|
; or we can use the trial format
|
|
|
|
;Serial number TICK date resolution description
|
|
|
|
;[QM2 TICK 2022-07-20 This is an example ticked QM]
|
|
|
|
|
2020-06-02 21:38:29 +01:00
|
|
|
;------------
|
|
|
|
;Cave description ;(leave commented-out)
|
2023-03-14 16:10:57 +00:00
|
|
|
;Freeform text describing this section of the cave
|
2020-06-02 21:38:29 +01:00
|
|
|
|
|
|
|
*end [surveyname]
|
|
|
|
"""
|
2011-07-11 02:10:22 +01:00
|
|
|
|
2023-03-06 04:52:41 +00:00
|
|
|
def get_survexfile(filename):
|
|
|
|
"""Gets the SurvexFile object from the survex path for the file
|
|
|
|
in a robust way
|
|
|
|
"""
|
|
|
|
refs = SurvexFile.objects.filter(path=filename)
|
|
|
|
if len(refs)==0: # new survex file, not created in db yet
|
|
|
|
survexfile = False
|
|
|
|
elif len(refs)==1:
|
|
|
|
survexfile = SurvexFile.objects.get(path=filename)
|
|
|
|
else:
|
|
|
|
survexfile = refs[0]
|
|
|
|
# OK this is due to a bug in the import file parsing, whoops. Now fixed ?!
|
|
|
|
print("BUG - to be fixed in the survex parser - not critical..")
|
|
|
|
print(f"Number of SurvexFile objects found: {len(refs)}")
|
|
|
|
for s in refs:
|
|
|
|
print (s.path, s.survexdirectory, s.cave)
|
|
|
|
# print(type(survexfile), filename)
|
|
|
|
return survexfile
|
2011-07-11 02:10:22 +01:00
|
|
|
|
|
|
|
class SvxForm(forms.Form):
|
2023-01-30 19:04:36 +00:00
|
|
|
"""Two-pane form, upper half is the raw survex file, lower half (with green background)
|
2023-03-05 17:46:01 +00:00
|
|
|
is the output : of running 'cavern' on the survex file, of running a 'difference', of
|
|
|
|
checking that there are no square brackets left.
|
2023-01-30 19:04:36 +00:00
|
|
|
"""
|
|
|
|
|
|
|
|
dirname = forms.CharField(widget=forms.TextInput(attrs={"readonly": True}))
|
|
|
|
filename = forms.CharField(widget=forms.TextInput(attrs={"readonly": True}))
|
|
|
|
datetime = forms.DateTimeField(widget=forms.TextInput(attrs={"readonly": True}))
|
|
|
|
outputtype = forms.CharField(widget=forms.TextInput(attrs={"readonly": True}))
|
|
|
|
code = forms.CharField(widget=forms.Textarea(attrs={"cols": 140, "rows": 36}))
|
2023-02-26 22:13:37 +00:00
|
|
|
survexfile = models.ForeignKey(SurvexFile, blank=True, null=True, on_delete=models.SET_NULL) # 1:1 ?
|
2023-01-30 19:04:36 +00:00
|
|
|
|
2021-04-14 21:08:06 +01:00
|
|
|
template = False
|
2023-01-30 19:04:36 +00:00
|
|
|
|
2011-07-11 02:10:22 +01:00
|
|
|
def GetDiscCode(self):
|
2023-03-23 19:05:25 +00:00
|
|
|
fname = SVXPATH / (self.data["filename"] + ".svx")
|
2022-03-11 16:22:37 +00:00
|
|
|
if not fname.is_file():
|
2023-01-30 19:04:36 +00:00
|
|
|
print(">>> >>> WARNING - svx file not found, showing TEMPLATE SVX", fname, flush=True)
|
2021-04-14 21:08:06 +01:00
|
|
|
self.template = True
|
2023-03-05 18:20:18 +00:00
|
|
|
self.survexfile = False
|
2011-07-11 02:10:22 +01:00
|
|
|
return survextemplatefile
|
2023-03-06 04:52:41 +00:00
|
|
|
if not self.survexfile:
|
|
|
|
self.survexfile = get_survexfile(self.data["filename"])
|
2022-07-15 12:09:32 +01:00
|
|
|
try:
|
2023-01-30 19:04:36 +00:00
|
|
|
fin = open(fname, "r", encoding="utf8", newline="")
|
|
|
|
svxtext = fin.read()
|
2022-07-15 12:09:32 +01:00
|
|
|
fin.close()
|
|
|
|
except:
|
2022-07-18 16:42:21 +01:00
|
|
|
# hack. Replace this with something better.
|
2023-01-30 19:04:36 +00:00
|
|
|
fin = open(fname, "r", encoding="iso-8859-1", newline="")
|
|
|
|
svxtext = fin.read()
|
2022-07-15 12:09:32 +01:00
|
|
|
fin.close()
|
2011-07-11 02:10:22 +01:00
|
|
|
return svxtext
|
2023-01-30 19:04:36 +00:00
|
|
|
|
2011-07-11 02:10:22 +01:00
|
|
|
def DiffCode(self, rcode):
|
|
|
|
code = self.GetDiscCode()
|
|
|
|
difftext = difflib.unified_diff(code.splitlines(), rcode.splitlines())
|
2023-01-30 19:04:36 +00:00
|
|
|
difflist = [diffline.strip() for diffline in difftext if not re.match(r"\s*$", diffline)]
|
2011-07-11 02:10:22 +01:00
|
|
|
return difflist
|
|
|
|
|
|
|
|
def SaveCode(self, rcode):
|
2023-03-23 19:05:25 +00:00
|
|
|
fname = SVXPATH / (self.data["filename"] + ".svx")
|
2023-03-05 19:09:28 +00:00
|
|
|
if not fname.is_file():
|
2023-01-30 19:04:36 +00:00
|
|
|
if re.search(r"\[|\]", rcode):
|
2023-03-05 19:09:28 +00:00
|
|
|
errmsg = "Error: remove all []s from the text.\nEverything inside [] are only template guidance.\n\n"
|
|
|
|
errmsg += "All [] must be edited out and replaced with real data before you can save this file.\n"
|
|
|
|
return errmsg
|
2019-02-25 20:13:28 +00:00
|
|
|
mbeginend = re.search(r"(?s)\*begin\s+(\w+).*?\*end\s+(\w+)", rcode)
|
2011-07-11 02:10:22 +01:00
|
|
|
if not mbeginend:
|
|
|
|
return "Error: no begin/end block here"
|
|
|
|
if mbeginend.group(1) != mbeginend.group(2):
|
2020-06-02 21:38:29 +01:00
|
|
|
return "Error: mismatching begin/end labels"
|
2020-07-01 00:20:27 +01:00
|
|
|
|
|
|
|
# Make this create new survex folders if needed
|
|
|
|
try:
|
2023-01-30 19:04:36 +00:00
|
|
|
fout = open(fname, "w", encoding="utf8", newline="\n")
|
2020-07-01 00:20:27 +01:00
|
|
|
except FileNotFoundError:
|
2023-01-30 19:04:36 +00:00
|
|
|
pth = os.path.dirname(self.data["filename"])
|
2023-03-23 19:05:25 +00:00
|
|
|
newpath = SVXPATH / pth
|
2020-07-01 00:20:27 +01:00
|
|
|
if not os.path.exists(newpath):
|
2023-01-30 19:04:36 +00:00
|
|
|
os.makedirs(newpath)
|
|
|
|
fout = open(fname, "w", encoding="utf8", newline="\n")
|
2021-12-05 17:45:45 +00:00
|
|
|
except PermissionError:
|
2023-01-30 19:04:36 +00:00
|
|
|
return (
|
|
|
|
"CANNOT save this file.\nPERMISSIONS incorrectly set on server for this file. Ask a nerd to fix this."
|
|
|
|
)
|
2020-07-01 00:20:27 +01:00
|
|
|
|
|
|
|
# javascript seems to insert CRLF on WSL1 whatever you say. So fix that:
|
2023-01-30 23:04:11 +00:00
|
|
|
fout.write(rcode.replace("\r", ""))
|
|
|
|
fout.write("\n")
|
2011-07-11 02:10:22 +01:00
|
|
|
fout.close()
|
2023-01-30 19:04:36 +00:00
|
|
|
|
2022-08-16 18:02:28 +01:00
|
|
|
if socket.gethostname() == "expo":
|
|
|
|
comment = f"Online survex edit: {self.data['filename']}.svx"
|
|
|
|
else:
|
2023-01-30 19:04:36 +00:00
|
|
|
comment = f"Online survex edit: {self.data['filename']}.svx on dev machine '{socket.gethostname()}' "
|
2022-08-16 18:02:28 +01:00
|
|
|
only_commit(fname, comment)
|
2023-01-30 19:04:36 +00:00
|
|
|
|
2023-03-23 01:17:38 +00:00
|
|
|
msg = "SAVED and committed to git (if there were differences)"
|
2023-03-06 16:37:54 +00:00
|
|
|
# should only call this is something changed
|
2023-03-23 01:17:38 +00:00
|
|
|
if parse_one_file(self.data["filename"]):
|
|
|
|
return msg
|
|
|
|
else:
|
|
|
|
return msg + "\nBUT PARSING failed. Do a completely new databaseReset."
|
2011-07-11 02:10:22 +01:00
|
|
|
|
|
|
|
def Process(self):
|
2022-03-11 16:22:37 +00:00
|
|
|
print(">>>>....\n....Processing\n")
|
2023-03-23 19:05:25 +00:00
|
|
|
froox = os.fspath(SVXPATH / (self.data["filename"] + ".svx"))
|
|
|
|
froog = os.fspath(SVXPATH / (self.data["filename"] + ".log"))
|
2011-07-11 02:10:22 +01:00
|
|
|
cwd = os.getcwd()
|
2021-03-24 15:46:35 +00:00
|
|
|
os.chdir(os.path.split(froox)[0])
|
2023-01-30 19:04:36 +00:00
|
|
|
os.system(settings.CAVERN + " --log " + froox)
|
2011-07-11 02:10:22 +01:00
|
|
|
os.chdir(cwd)
|
2023-01-30 19:04:36 +00:00
|
|
|
|
|
|
|
# Update this to use the new syntax..
|
|
|
|
# sp = subprocess.run([settings.CAVERN, "--log", f'--output={outputdir}', f'{fullpath}.svx'],
|
|
|
|
# capture_output=True, check=False, text=True)
|
|
|
|
# if sp.returncode != 0:
|
|
|
|
# message = f' ! Error running {settings.CAVERN}: {fullpath}'
|
2022-03-11 16:22:37 +00:00
|
|
|
# DataIssue.objects.create(parser='entrances', message=message)
|
|
|
|
# print(message)
|
|
|
|
# print(f'stderr:\n\n' + str(sp.stderr) + '\n\n' + str(sp.stdout) + '\n\nreturn code: ' + str(sp.returncode))
|
|
|
|
|
2023-03-23 19:05:25 +00:00
|
|
|
filepatherr = Path(SVXPATH / str(self.data["filename"] + ".err"))
|
2022-03-11 16:22:37 +00:00
|
|
|
if filepatherr.is_file():
|
|
|
|
if filepatherr.stat().st_size == 0:
|
2023-01-30 19:04:36 +00:00
|
|
|
filepatherr.unlink() # delete empty closure error file
|
2022-03-11 16:22:37 +00:00
|
|
|
|
2023-01-30 19:04:36 +00:00
|
|
|
fin = open(froog, "r", encoding="utf8")
|
2011-07-11 02:10:22 +01:00
|
|
|
log = fin.read()
|
|
|
|
fin.close()
|
2023-01-30 19:04:36 +00:00
|
|
|
# log = re.sub("(?s).*?(Survey contains)", "\\1", log) # this omits any ERROR MESSAGES ! Don't do it.
|
|
|
|
for s in [
|
|
|
|
"Removing trailing traverses...\n\n",
|
|
|
|
"Concatenating traverses...\n\n" "Simplifying network...\n\n",
|
2022-03-11 16:22:37 +00:00
|
|
|
"Calculating network...\n\n",
|
|
|
|
"Calculating traverses...\n\n",
|
|
|
|
"Calculating trailing traverses...\n\n",
|
2023-01-30 19:04:36 +00:00
|
|
|
"Calculating statistics...\n\n",
|
|
|
|
]:
|
|
|
|
log = log.replace(s, "")
|
2011-07-11 02:10:22 +01:00
|
|
|
return log
|
|
|
|
|
|
|
|
|
2021-03-26 19:42:58 +00:00
|
|
|
@ensure_csrf_cookie
|
2011-07-11 02:10:22 +01:00
|
|
|
def svx(request, survex_file):
|
2023-01-30 19:04:36 +00:00
|
|
|
"""Displays a single survex file in an textarea window (using a javascript online editor to enable
|
2021-03-26 19:42:58 +00:00
|
|
|
editing) with buttons which allow SAVE, check for DIFFerences from saved, and RUN (which runs the
|
2023-01-30 19:04:36 +00:00
|
|
|
cavern executable and displays the output below the main textarea).
|
2023-02-26 22:13:37 +00:00
|
|
|
Requires CSRF to be set up correctly, and requires permission to write to the filesystem.
|
2023-03-05 19:09:28 +00:00
|
|
|
|
|
|
|
Originally the non-existence of difflist was used as a marker to say that the file had been saved
|
|
|
|
and that thuis there were no differences. This is inadequate, as a new file which has not been saved
|
|
|
|
also has no difflist.
|
2023-03-06 04:52:41 +00:00
|
|
|
|
|
|
|
Needs refactoring. Too many piecemeal edits and odd state dependencies.
|
2023-03-23 19:05:25 +00:00
|
|
|
|
|
|
|
On Get does the SAME THING as svxcavesingle but is called when the .svx suffix is MISSING
|
2023-01-30 19:04:36 +00:00
|
|
|
"""
|
2021-04-14 21:08:06 +01:00
|
|
|
warning = False
|
2023-03-21 14:29:15 +00:00
|
|
|
|
|
|
|
print(survex_file)
|
|
|
|
if survex_file.lower().endswith(".svx"):
|
2023-03-23 19:05:25 +00:00
|
|
|
#cope with ".svx.svx" bollox
|
2023-03-21 14:29:15 +00:00
|
|
|
survex_file = survex_file[:-4]
|
|
|
|
print(survex_file)
|
2023-01-30 19:04:36 +00:00
|
|
|
|
2011-07-11 02:10:22 +01:00
|
|
|
# get the basic data from the file given in the URL
|
2023-02-26 22:13:37 +00:00
|
|
|
dirname = os.path.split(survex_file)[0] # replace with proper pathlib function..
|
2011-07-11 02:10:22 +01:00
|
|
|
dirname += "/"
|
2023-01-30 19:04:36 +00:00
|
|
|
nowtime = datetime.datetime.now().strftime("%Y-%m-%d %H:%M:%S")
|
2011-07-11 02:10:22 +01:00
|
|
|
outputtype = "normal"
|
2023-01-30 19:04:36 +00:00
|
|
|
form = SvxForm({"filename": survex_file, "dirname": dirname, "datetime": nowtime, "outputtype": outputtype})
|
|
|
|
|
2011-07-11 02:10:22 +01:00
|
|
|
# if the form has been returned
|
2023-01-30 19:04:36 +00:00
|
|
|
difflist = []
|
2011-07-11 02:10:22 +01:00
|
|
|
logmessage = ""
|
|
|
|
message = ""
|
|
|
|
|
2023-01-30 19:04:36 +00:00
|
|
|
if request.method == "POST": # If the form has been submitted...
|
|
|
|
rform = SvxForm(request.POST) #
|
|
|
|
if rform.is_valid(): # All validation rules pass (how do we check it against the filename and users?)
|
|
|
|
rcode = rform.cleaned_data["code"]
|
2023-02-27 22:23:24 +00:00
|
|
|
outputtype = rform.cleaned_data["outputtype"] # used by CodeMirror ajax I think
|
2011-07-11 02:10:22 +01:00
|
|
|
difflist = form.DiffCode(rcode)
|
2023-03-06 04:52:41 +00:00
|
|
|
# keys = []
|
|
|
|
# for key in rform.data:
|
|
|
|
# keys.append(key)
|
|
|
|
# print(">>>> ", keys)
|
|
|
|
sfile = form.survexfile
|
2023-01-30 19:04:36 +00:00
|
|
|
|
2011-07-11 02:10:22 +01:00
|
|
|
if "revert" in rform.data:
|
|
|
|
pass
|
2023-03-05 19:53:12 +00:00
|
|
|
|
2011-07-11 02:10:22 +01:00
|
|
|
if "process" in rform.data:
|
2023-03-05 19:53:12 +00:00
|
|
|
if difflist:
|
|
|
|
message = "SAVE FILE FIRST"
|
|
|
|
form.data["code"] = rcode
|
2023-03-06 04:52:41 +00:00
|
|
|
elif sfile:
|
2011-07-11 02:10:22 +01:00
|
|
|
logmessage = form.Process()
|
2022-03-11 16:22:37 +00:00
|
|
|
if logmessage:
|
2023-01-30 19:04:36 +00:00
|
|
|
message = f"OUTPUT FROM PROCESSING\n{logmessage}"
|
2011-07-11 02:10:22 +01:00
|
|
|
else:
|
2023-03-05 19:53:12 +00:00
|
|
|
message = "SAVE VALID FILE FIRST"
|
|
|
|
form.data["code"] = rcode
|
2011-07-11 02:10:22 +01:00
|
|
|
if "save" in rform.data:
|
2021-04-07 21:53:17 +01:00
|
|
|
if request.user.is_authenticated:
|
2023-03-06 16:37:54 +00:00
|
|
|
if difflist:
|
|
|
|
message = form.SaveCode(rcode)
|
|
|
|
else:
|
|
|
|
message = "NO DIFFERENCES - so not saving the file"
|
2011-07-11 02:10:22 +01:00
|
|
|
else:
|
2021-03-26 19:42:58 +00:00
|
|
|
message = "You do not have authority to save this file. Please log in."
|
2011-07-11 02:10:22 +01:00
|
|
|
if message != "SAVED":
|
2023-01-30 19:04:36 +00:00
|
|
|
form.data["code"] = rcode
|
2023-03-06 04:52:41 +00:00
|
|
|
|
2011-07-11 02:10:22 +01:00
|
|
|
if "diff" in rform.data:
|
2022-03-11 16:22:37 +00:00
|
|
|
print("Differences: ")
|
2023-01-30 19:04:36 +00:00
|
|
|
form.data["code"] = rcode
|
|
|
|
|
2023-03-06 04:52:41 +00:00
|
|
|
# GET, also fall-through after POST-specific handling
|
|
|
|
svxfile = get_survexfile(survex_file)
|
|
|
|
|
2023-01-30 19:04:36 +00:00
|
|
|
if "code" not in form.data:
|
|
|
|
form.data["code"] = form.GetDiscCode()
|
2021-04-14 21:08:06 +01:00
|
|
|
if form.template:
|
|
|
|
warning = True
|
2011-07-11 02:10:22 +01:00
|
|
|
if not difflist:
|
2023-03-05 19:09:28 +00:00
|
|
|
if svxfile:
|
2023-03-05 23:06:06 +00:00
|
|
|
difflist.append("No differences from last saved file.")
|
2023-03-05 19:09:28 +00:00
|
|
|
else:
|
2023-03-05 23:06:06 +00:00
|
|
|
difflist.append("No differences from last saved file (or from initial template).")
|
2011-07-11 02:10:22 +01:00
|
|
|
if message:
|
|
|
|
difflist.insert(0, message)
|
2023-01-30 19:04:36 +00:00
|
|
|
|
|
|
|
svxincludes = re.findall(r"(?i)\*include\s+(\S+)", form.data["code"] or "")
|
|
|
|
|
2023-02-27 22:23:24 +00:00
|
|
|
# collect all the survex blocks which actually have a valid date
|
2023-03-05 18:20:18 +00:00
|
|
|
if svxfile:
|
2023-03-23 19:05:25 +00:00
|
|
|
has_3d = (Path(SVXPATH) / Path(survex_file + ".3d")).is_file()
|
2023-03-05 19:09:28 +00:00
|
|
|
try:
|
|
|
|
svxblocks = svxfile.survexblock_set.filter(date__isnull=False).order_by('date')
|
|
|
|
except:
|
|
|
|
svxblocks = []
|
|
|
|
try:
|
|
|
|
svxblocksall = svxfile.survexblock_set.all()
|
2023-03-12 00:35:37 +00:00
|
|
|
svxlength = 0.0
|
|
|
|
for b in svxblocksall:
|
|
|
|
svxlength += b.legslength
|
2023-03-14 02:12:28 +00:00
|
|
|
# print(svxlength,b, b.legsall)
|
2023-03-05 19:09:28 +00:00
|
|
|
except AttributeError: # some survexfiles just *include files and have no blocks themselves
|
|
|
|
svxblocksall = []
|
2023-03-05 18:20:18 +00:00
|
|
|
else:
|
|
|
|
svxblocks = []
|
2023-03-05 19:09:28 +00:00
|
|
|
svxblocksall = []
|
2023-03-14 02:12:28 +00:00
|
|
|
svxlength = 0.0
|
|
|
|
has_3d = False
|
2023-03-05 19:09:28 +00:00
|
|
|
if not difflist:
|
|
|
|
difflist = ["Survex file does not exist yet"]
|
2023-02-27 16:42:08 +00:00
|
|
|
|
2023-02-27 22:23:24 +00:00
|
|
|
|
|
|
|
events = events_on_dates(svxblocks)
|
2023-02-26 22:13:37 +00:00
|
|
|
|
2023-01-30 19:04:36 +00:00
|
|
|
vmap = {
|
|
|
|
"settings": settings,
|
|
|
|
"warning": warning,
|
2023-03-14 02:12:28 +00:00
|
|
|
"has_3d": has_3d,
|
2023-03-21 14:29:15 +00:00
|
|
|
"survexfile": svxfile,
|
2023-03-12 00:35:37 +00:00
|
|
|
"svxlength": svxlength,
|
2023-02-26 22:13:37 +00:00
|
|
|
"svxblocks": svxblocks,
|
2023-01-30 19:04:36 +00:00
|
|
|
"svxincludes": svxincludes,
|
|
|
|
"difflist": difflist,
|
|
|
|
"logmessage": logmessage,
|
|
|
|
"form": form,
|
2023-02-27 19:14:08 +00:00
|
|
|
"events": events,
|
2023-01-30 19:04:36 +00:00
|
|
|
}
|
|
|
|
|
2023-02-27 22:23:24 +00:00
|
|
|
if outputtype == "ajax": # used by CodeMirror ajax I think
|
2023-01-30 19:04:36 +00:00
|
|
|
return render(request, "svxfiledifflistonly.html", vmap)
|
|
|
|
|
|
|
|
return render(request, "svxfile.html", vmap)
|
|
|
|
|
2023-02-27 22:23:24 +00:00
|
|
|
SameDateEvents = namedtuple('SameDateEvents', ['trips', 'svxfiles', 'wallets', 'blocks'])
|
2023-02-27 16:42:08 +00:00
|
|
|
|
2023-02-27 22:23:24 +00:00
|
|
|
def events_on_dates(svxblocks):
|
2023-02-27 16:42:08 +00:00
|
|
|
"""Returns a dictionary of indexed by date. For each date there is a named tuple of 3 lists:
|
|
|
|
logbookentries, survexfiles (NB files, not blocks), and wallets.
|
|
|
|
"""
|
2023-02-27 22:23:24 +00:00
|
|
|
# deduplicate but maintain date order
|
|
|
|
dates = []
|
|
|
|
for b in svxblocks:
|
|
|
|
if b.date not in dates:
|
|
|
|
dates.append(b.date)
|
|
|
|
# print(f"- {b.date}")
|
|
|
|
|
2023-02-27 16:42:08 +00:00
|
|
|
events = {}
|
2023-02-27 22:23:24 +00:00
|
|
|
for date in dates:
|
|
|
|
trips = LogbookEntry.objects.filter(date=date)
|
|
|
|
|
|
|
|
svxfiles = SurvexFile.objects.filter(survexblock__date=date).distinct()
|
2023-02-27 16:42:08 +00:00
|
|
|
|
2023-02-27 22:23:24 +00:00
|
|
|
# https://stackoverflow.com/questions/739776/how-do-i-do-an-or-filter-in-a-django-query
|
|
|
|
wallets = Wallet.objects.filter(Q(survexblock__date=date) | Q(walletdate=date)).distinct()
|
2023-02-27 16:42:08 +00:00
|
|
|
|
2023-02-27 22:23:24 +00:00
|
|
|
blocks = []
|
|
|
|
for b in svxblocks:
|
|
|
|
if b.date == date:
|
|
|
|
blocks.append(b.name)
|
2023-02-27 16:42:08 +00:00
|
|
|
|
2023-02-27 22:23:24 +00:00
|
|
|
events[date] = SameDateEvents(trips=trips, svxfiles=svxfiles, wallets=wallets, blocks=blocks)
|
|
|
|
# print(events)
|
2023-02-27 16:42:08 +00:00
|
|
|
return events
|
2011-07-11 02:10:22 +01:00
|
|
|
|
2022-03-11 16:22:37 +00:00
|
|
|
# The cavern running function. This is NOT where it is run inside the form! see SvxForm.Process() for that
|
2011-07-11 02:10:22 +01:00
|
|
|
def process(survex_file):
|
2023-01-30 19:04:36 +00:00
|
|
|
"""This runs cavern only where a .3d, .log or .err file is requested."""
|
2023-03-23 19:05:25 +00:00
|
|
|
filepathsvx = SVXPATH / str(survex_file + ".svx")
|
2011-07-11 02:10:22 +01:00
|
|
|
cwd = os.getcwd()
|
2023-03-23 19:05:25 +00:00
|
|
|
os.chdir(os.path.split(os.fspath(SVXPATH / survex_file))[0])
|
2022-03-11 16:22:37 +00:00
|
|
|
os.system(settings.CAVERN + " --log " + str(filepathsvx))
|
2011-07-11 02:10:22 +01:00
|
|
|
os.chdir(cwd)
|
2023-01-30 19:04:36 +00:00
|
|
|
|
2022-03-11 16:22:37 +00:00
|
|
|
# Update this to use the new syntax..
|
|
|
|
# sp = subprocess.run([settings.CAVERN, "--log", f'--output={outputdir}', f'{fullpath}.svx'],
|
2023-01-30 19:04:36 +00:00
|
|
|
# capture_output=True, check=False, text=True)
|
2022-03-11 16:22:37 +00:00
|
|
|
# if sp.returncode != 0:
|
2023-01-30 19:04:36 +00:00
|
|
|
# message = f' ! Error running {settings.CAVERN}: {fullpath}'
|
|
|
|
# DataIssue.objects.create(parser='entrances', message=message)
|
|
|
|
# print(message)
|
|
|
|
# print(f'stderr:\n\n' + str(sp.stderr) + '\n\n' + str(sp.stdout) + '\n\nreturn code: ' + str(sp.returncode))
|
2022-03-11 16:22:37 +00:00
|
|
|
|
2023-03-23 19:05:25 +00:00
|
|
|
filepatherr = Path(SVXPATH / str(survex_file + ".err"))
|
2022-03-11 16:22:37 +00:00
|
|
|
if filepatherr.is_file():
|
|
|
|
if filepatherr.stat().st_size == 0:
|
2023-01-30 19:04:36 +00:00
|
|
|
filepatherr.unlink() # delete empty closure error file
|
|
|
|
|
|
|
|
|
2011-07-11 02:10:22 +01:00
|
|
|
def threed(request, survex_file):
|
2023-03-23 19:05:25 +00:00
|
|
|
filepath3d = SVXPATH / str(survex_file + ".3d")
|
|
|
|
SVXPATH / str(survex_file + ".log")
|
2022-03-11 16:22:37 +00:00
|
|
|
if filepath3d.is_file():
|
|
|
|
threed = open(filepath3d, "rb")
|
|
|
|
return HttpResponse(threed, content_type="application/x-aven")
|
|
|
|
else:
|
2023-01-30 19:04:36 +00:00
|
|
|
process(survex_file) # should not need to do this if it already exists, as it should.
|
2023-03-23 19:05:25 +00:00
|
|
|
log = open(SVXPATH / str(survex_file + ".log"), "r", encoding="utf-8")
|
2019-03-30 17:02:07 +00:00
|
|
|
return HttpResponse(log, content_type="text")
|
2011-07-11 02:10:22 +01:00
|
|
|
|
2023-01-30 19:04:36 +00:00
|
|
|
|
2022-03-11 16:22:37 +00:00
|
|
|
def svxlog(request, survex_file):
|
2023-01-30 19:04:36 +00:00
|
|
|
"""Used for rendering .log files from survex outputtype"""
|
2023-03-23 19:05:25 +00:00
|
|
|
filepathlog = SVXPATH / str(survex_file + ".log")
|
2022-03-11 16:22:37 +00:00
|
|
|
if not filepathlog.is_file():
|
2023-01-30 19:04:36 +00:00
|
|
|
process(survex_file)
|
2022-03-11 16:22:37 +00:00
|
|
|
log = open(filepathlog, "r")
|
2023-01-30 19:04:36 +00:00
|
|
|
return HttpResponse(log, content_type="text/plain; charset=utf-8") # default: "text/html; charset=utf-8"
|
|
|
|
|
2020-06-24 01:57:20 +01:00
|
|
|
|
2011-07-11 02:10:22 +01:00
|
|
|
def err(request, survex_file):
|
2023-03-23 19:05:25 +00:00
|
|
|
filepatherr = SVXPATH / str(survex_file + ".err")
|
2023-01-30 19:04:36 +00:00
|
|
|
if not filepatherr.is_file(): # probably not there because it was empty, but re-run anyway
|
|
|
|
process(survex_file)
|
2011-07-11 02:10:22 +01:00
|
|
|
process(survex_file)
|
2022-03-11 16:22:37 +00:00
|
|
|
if filepatherr.is_file():
|
|
|
|
err = open(filepatherr, "r")
|
|
|
|
return HttpResponse(err, content_type="text/plain; charset=utf-8")
|
|
|
|
else:
|
2023-01-30 19:04:36 +00:00
|
|
|
return HttpResponse(
|
|
|
|
f"No closure errors. \nEmpty {filepatherr} file produced. \nSee the .log file.",
|
|
|
|
content_type="text/plain; charset=utf-8",
|
|
|
|
)
|
2011-07-11 02:10:22 +01:00
|
|
|
|
|
|
|
|
|
|
|
def identifycavedircontents(gcavedir):
|
2023-03-05 23:55:00 +00:00
|
|
|
"""
|
|
|
|
find the primary survex file in each cave directory
|
|
|
|
this should be in a configuration, not buried in the code...
|
|
|
|
|
|
|
|
For gods sake someone refactor this monstrosity using pathlib
|
|
|
|
"""
|
2011-07-11 02:10:22 +01:00
|
|
|
name = os.path.split(gcavedir)[1]
|
2023-01-30 19:04:36 +00:00
|
|
|
subdirs = []
|
|
|
|
subsvx = []
|
2011-07-11 02:10:22 +01:00
|
|
|
primesvx = None
|
2023-01-30 19:04:36 +00:00
|
|
|
for f in os.listdir(gcavedir): # These may get outdated as data gets tidied up. This should not be in the code!
|
2011-07-11 02:10:22 +01:00
|
|
|
if name == "204" and (f in ["skel.svx", "template.svx", "204withents.svx"]):
|
|
|
|
pass
|
|
|
|
elif name == "136" and (f in ["136-noents.svx"]):
|
|
|
|
pass
|
|
|
|
elif name == "115" and (f in ["115cufix.svx", "115fix.svx"]):
|
|
|
|
pass
|
2023-01-30 19:04:36 +00:00
|
|
|
|
2011-07-11 02:10:22 +01:00
|
|
|
elif os.path.isdir(os.path.join(gcavedir, f)):
|
|
|
|
if f[0] != ".":
|
|
|
|
subdirs.append(f)
|
|
|
|
elif f[-4:] == ".svx":
|
|
|
|
nf = f[:-4]
|
2023-01-30 19:04:36 +00:00
|
|
|
|
|
|
|
if (
|
|
|
|
nf.lower() == name.lower()
|
|
|
|
or nf[:3] == "all"
|
|
|
|
or (name, nf) in [("resurvey2005", "145-2005"), ("cucc", "cu115")]
|
|
|
|
):
|
2011-07-11 02:10:22 +01:00
|
|
|
if primesvx:
|
|
|
|
if nf[:3] == "all":
|
2023-01-30 19:04:36 +00:00
|
|
|
# assert primesvx[:3] != "all", (name, nf, primesvx, gcavedir, subsvx)
|
2011-07-11 02:10:22 +01:00
|
|
|
primesvx = nf
|
|
|
|
else:
|
2023-01-30 19:04:36 +00:00
|
|
|
# assert primesvx[:3] == "all", (name, nf, primesvx, gcavedir, subsvx)
|
2021-03-29 02:06:19 +01:00
|
|
|
pass
|
2011-07-11 02:10:22 +01:00
|
|
|
else:
|
|
|
|
primesvx = nf
|
|
|
|
else:
|
|
|
|
subsvx.append(nf)
|
|
|
|
else:
|
2013-08-01 16:00:01 +01:00
|
|
|
pass
|
2023-01-30 19:04:36 +00:00
|
|
|
# assert re.match(".*?(?:.3d|.log|.err|.txt|.tmp|.diff|.e?spec|~)$", f), (gcavedir, f)
|
2011-07-11 02:10:22 +01:00
|
|
|
subsvx.sort()
|
2023-01-30 19:04:36 +00:00
|
|
|
# assert primesvx, (gcavedir, subsvx)
|
2011-07-11 02:10:22 +01:00
|
|
|
if primesvx:
|
|
|
|
subsvx.insert(0, primesvx)
|
|
|
|
return subdirs, subsvx
|
2020-05-28 01:16:45 +01:00
|
|
|
|
2023-01-30 19:04:36 +00:00
|
|
|
|
2021-03-26 02:01:29 +00:00
|
|
|
def get_survexareapath(area):
|
2023-03-23 19:05:25 +00:00
|
|
|
return SVXPATH / str("caves-" + area)
|
2023-01-30 19:04:36 +00:00
|
|
|
|
2011-07-11 02:10:22 +01:00
|
|
|
|
|
|
|
# direct local non-database browsing through the svx file repositories
|
2021-03-26 02:01:29 +00:00
|
|
|
# every time the page is viewed! Should cache this.
|
2011-07-11 02:10:22 +01:00
|
|
|
def survexcaveslist(request):
|
2023-01-30 19:04:36 +00:00
|
|
|
"""This reads the entire list of caves in the Loser repo directory and produces a complete report.
|
2021-03-24 15:46:35 +00:00
|
|
|
It can find caves which have not yet been properly registered in the system by Databasereset.py because
|
2023-09-06 15:19:20 +01:00
|
|
|
someone may have uploaded the survex files with git without doing the rest of the integration process.
|
|
|
|
|
|
|
|
But maybe we don't care if someone has done that!
|
|
|
|
In which case we don't need any of this reading the filesystem, we can generate it all from
|
|
|
|
what is already in the db, and just construct: onefilecaves, multifilecaves, subdircaves.
|
2023-03-05 23:55:00 +00:00
|
|
|
|
|
|
|
It uses very impenetrable code in identifycavedircontents()
|
2023-01-30 19:04:36 +00:00
|
|
|
"""
|
2022-10-06 19:02:15 +01:00
|
|
|
# TO DO - filter out the non-public caves from display UNLESS LOGGED IN
|
|
|
|
# This is very impenetrable code, original from Aaron Curtis I think.
|
2023-01-30 19:04:36 +00:00
|
|
|
onefilecaves = []
|
|
|
|
multifilecaves = []
|
|
|
|
subdircaves = []
|
|
|
|
fnumlist = []
|
|
|
|
|
2021-03-26 02:01:29 +00:00
|
|
|
for area in ["1623", "1626", "1624", "1627"]:
|
|
|
|
cavesdir = get_survexareapath(area)
|
2023-01-30 19:04:36 +00:00
|
|
|
arealist = sorted([(area, -int(re.match(r"\d*", f).group(0) or "0"), f) for f in os.listdir(cavesdir)])
|
2021-03-26 02:01:29 +00:00
|
|
|
fnumlist += arealist
|
2023-01-30 19:04:36 +00:00
|
|
|
|
|
|
|
# print(fnumlist)
|
|
|
|
|
2011-07-11 02:10:22 +01:00
|
|
|
# go through the list and identify the contents of each cave directory
|
2021-03-26 02:01:29 +00:00
|
|
|
for area, num, cavedir in fnumlist:
|
2023-01-30 19:04:36 +00:00
|
|
|
|
2021-03-23 16:36:55 +00:00
|
|
|
# these have sub dirs /cucc/ /arge/ /old/ but that is no reason to hide them in this webpage
|
2021-03-24 15:46:35 +00:00
|
|
|
# so these are now treated the same as 142 and 113 which also have a /cucc/ sub dir
|
2023-01-30 19:04:36 +00:00
|
|
|
# if cavedir in ["144", "40"]:
|
2021-03-23 16:36:55 +00:00
|
|
|
# continue
|
2021-03-24 15:46:35 +00:00
|
|
|
|
2023-01-30 19:04:36 +00:00
|
|
|
# This all assumes that the first .svx file has the same name as the cave name,
|
2021-03-24 15:46:35 +00:00
|
|
|
# which usually but not always true. e.g. caves-1623/78/allkaese.svx not caves-1623/78/78.svx
|
|
|
|
# which is why we now also pass through the cavedir
|
2023-01-30 19:04:36 +00:00
|
|
|
|
2022-10-06 19:02:15 +01:00
|
|
|
# Still fails for loutitohoehle etc even though this is set correctly when the pending cave is created
|
2021-03-26 02:01:29 +00:00
|
|
|
cavesdir = get_survexareapath(area)
|
2011-07-11 02:10:22 +01:00
|
|
|
gcavedir = os.path.join(cavesdir, cavedir)
|
|
|
|
if os.path.isdir(gcavedir) and cavedir[0] != ".":
|
|
|
|
subdirs, subsvx = identifycavedircontents(gcavedir)
|
2023-01-30 19:04:36 +00:00
|
|
|
|
2023-01-30 23:04:11 +00:00
|
|
|
check_cave_registered(
|
2023-01-30 19:04:36 +00:00
|
|
|
area, cavedir
|
|
|
|
) # should do this only once per database load or it will be slow
|
|
|
|
survdirobj = []
|
2011-07-11 02:10:22 +01:00
|
|
|
for lsubsvx in subsvx:
|
2023-01-30 19:04:36 +00:00
|
|
|
survdirobj.append(("caves-" + area + "/" + cavedir + "/" + lsubsvx, lsubsvx))
|
|
|
|
|
2011-07-11 02:10:22 +01:00
|
|
|
# caves with subdirectories
|
|
|
|
if subdirs:
|
2023-01-30 19:04:36 +00:00
|
|
|
subsurvdirs = []
|
2011-07-11 02:10:22 +01:00
|
|
|
for subdir in subdirs:
|
|
|
|
dsubdirs, dsubsvx = identifycavedircontents(os.path.join(gcavedir, subdir))
|
2021-03-23 16:36:55 +00:00
|
|
|
# assert not dsubdirs # handle case of empty sub directory
|
2023-01-30 19:04:36 +00:00
|
|
|
lsurvdirobj = []
|
2011-07-11 02:10:22 +01:00
|
|
|
for lsubsvx in dsubsvx:
|
2023-01-30 19:04:36 +00:00
|
|
|
lsurvdirobj.append(("caves-" + area + "/" + cavedir + "/" + subdir + "/" + lsubsvx, lsubsvx))
|
2021-03-25 16:15:26 +00:00
|
|
|
if len(dsubsvx) >= 1:
|
2023-01-30 19:04:36 +00:00
|
|
|
subsurvdirs.append(
|
|
|
|
(subdir, lsurvdirobj[0], lsurvdirobj[0:])
|
|
|
|
) # list now includes the first item too
|
2023-03-05 23:55:00 +00:00
|
|
|
if survdirobj:
|
|
|
|
subdircaves.append((cavedir, (survdirobj[0], survdirobj[1:]), subsurvdirs))
|
|
|
|
else:
|
|
|
|
print(f" ! Subdirectory containing empty subdirectory {subdirs} in {gcavedir}")
|
2023-01-30 19:04:36 +00:00
|
|
|
|
2011-07-11 02:10:22 +01:00
|
|
|
# multifile caves
|
|
|
|
elif len(survdirobj) > 1:
|
2021-03-24 15:46:35 +00:00
|
|
|
multifilecaves.append((survdirobj[0], cavedir, survdirobj[1:]))
|
2011-07-11 02:10:22 +01:00
|
|
|
# single file caves
|
2021-03-17 20:58:25 +00:00
|
|
|
elif len(survdirobj) == 1:
|
2021-03-23 16:36:55 +00:00
|
|
|
onefilecaves.append(survdirobj[0])
|
2023-01-30 19:04:36 +00:00
|
|
|
|
|
|
|
return render(
|
|
|
|
request,
|
|
|
|
"svxfilecavelist.html",
|
|
|
|
{
|
|
|
|
"settings": settings,
|
|
|
|
"onefilecaves": onefilecaves,
|
|
|
|
"multifilecaves": multifilecaves,
|
|
|
|
"subdircaves": subdircaves,
|
|
|
|
},
|
|
|
|
)
|
|
|
|
|
2023-09-05 19:46:10 +01:00
|
|
|
def survexdir(request):
|
|
|
|
"""report on all the SurvexDirectory objects
|
|
|
|
We are trying to find out how mismatches have crept in.
|
2023-09-05 21:14:48 +01:00
|
|
|
and whether the whole SUrvexDirectory class is actually redundant
|
|
|
|
as the info it holds is always embedded in the survexFile path directories
|
2023-09-05 19:46:10 +01:00
|
|
|
"""
|
|
|
|
|
2023-09-05 21:14:48 +01:00
|
|
|
sds = SurvexDirectory.objects.all() #.order_by("cave")
|
2023-09-05 19:46:10 +01:00
|
|
|
for sd in sds:
|
2023-09-06 15:19:20 +01:00
|
|
|
sd.matchbad = True
|
2023-09-05 19:46:10 +01:00
|
|
|
if f"{sd.primarysurvexfile}".startswith(str(sd.path)):
|
2023-09-06 15:19:20 +01:00
|
|
|
sd.matchbad = False
|
2023-09-05 19:46:10 +01:00
|
|
|
|
|
|
|
sd.pathbad = True
|
|
|
|
if Path(settings.SURVEX_DATA, f"{sd.primarysurvexfile}.svx").is_file():
|
|
|
|
sd.pathbad = False
|
2023-09-05 21:14:48 +01:00
|
|
|
|
|
|
|
survexfiles = SurvexFile.objects.all().order_by("cave")
|
2023-09-06 15:19:20 +01:00
|
|
|
for f in survexfiles:
|
|
|
|
f.matchbad = True
|
|
|
|
if f"{f.path}".startswith(str(f.survexdirectory.path)):
|
|
|
|
f.matchbad = False
|
|
|
|
f.primarybad = True
|
|
|
|
if f.primary:
|
|
|
|
f.pathparent = Path(f.primary.path).parent
|
|
|
|
if str(f.survexdirectory.path) == str(f.pathparent):
|
|
|
|
f.primarybad = False
|
|
|
|
f.pathbad = True
|
|
|
|
if Path(settings.SURVEX_DATA, f"{f.path}.svx").is_file():
|
|
|
|
f.pathbad = False
|
2023-09-05 21:14:48 +01:00
|
|
|
return render(request, "survexdir.html", {"survexdirs": sds, "survexfiles": survexfiles})
|
2023-09-05 19:46:10 +01:00
|
|
|
|
2023-09-06 11:20:29 +01:00
|
|
|
def get_directories(cave):
|
|
|
|
sds = []
|
|
|
|
sfs = cave.survexfile_set.all()
|
|
|
|
for sf in sfs:
|
|
|
|
sds.append(sf.survexdirectory)
|
|
|
|
return list(set(sds))
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def survexcavesingle(request, cave_shortname):
|
2023-01-30 19:04:36 +00:00
|
|
|
"""parsing all the survex files of a single cave and showing that it's consistent and can find all
|
2023-09-06 11:20:29 +01:00
|
|
|
the files and people.
|
2023-01-30 19:04:36 +00:00
|
|
|
"""
|
2023-09-06 11:20:29 +01:00
|
|
|
|
|
|
|
Gcavelookup = GetCaveLookup()
|
|
|
|
if cave_shortname in Gcavelookup:
|
|
|
|
cave = Gcavelookup[cave_shortname]
|
|
|
|
# print(f"survexcavesingle {cave_shortname=} => {cave=}")
|
|
|
|
cave.sds = get_directories(cave)
|
|
|
|
return render(request, "svxcaves.html", {"settings": settings, "caves": [cave]})
|
|
|
|
else:
|
|
|
|
# not a cave or an ambiguous short name, e.g. "122"
|
|
|
|
|
|
|
|
# if (SVXPATH / Path(cave_shortname + ".svx")).is_file():
|
|
|
|
# return svx(request, cave_shortname)
|
|
|
|
|
|
|
|
caves = Cave.objects.filter(kataster_number=cave_shortname)
|
|
|
|
if len(caves) > 0:
|
|
|
|
# print(f"many {cave_shortname=} => {caves=}")
|
|
|
|
for cave in caves:
|
|
|
|
cave.sds = get_directories(cave)
|
|
|
|
# print(f"many {cave=} => {cave.sds=}")
|
|
|
|
return render(request, "svxcaves.html", {"settings": settings, "caves": caves})
|
|
|
|
else:
|
|
|
|
return render(request, "errors/svxcaves404.html", {"settings": settings, "cave": cave_shortname})
|
2021-03-25 16:15:26 +00:00
|
|
|
|
2021-03-26 02:01:29 +00:00
|
|
|
def check_cave_registered(area, survex_cave):
|
2023-01-30 19:04:36 +00:00
|
|
|
"""Checks whether a cave has been properly registered when it is found in the Loser repo
|
2023-02-26 22:13:37 +00:00
|
|
|
This should really be called by databaseReset not here in a view
|
2021-03-26 02:01:29 +00:00
|
|
|
Currently Caves are only registered if they are listed in :expoweb: settings.CAVEDESCRIPTIONS
|
|
|
|
so we need to add in any more here.
|
2023-01-30 19:04:36 +00:00
|
|
|
|
2023-09-06 11:20:29 +01:00
|
|
|
This function is used in survexcaveslist(request)
|
|
|
|
A serious bodge.
|
2023-01-30 19:04:36 +00:00
|
|
|
"""
|
2021-03-26 02:01:29 +00:00
|
|
|
try:
|
2023-01-30 19:04:36 +00:00
|
|
|
cave = Cave.objects.get(kataster_number=survex_cave)
|
2021-03-26 02:01:29 +00:00
|
|
|
return str(cave)
|
|
|
|
|
|
|
|
except MultipleObjectsReturned:
|
2023-01-30 19:04:36 +00:00
|
|
|
caves = Cave.objects.filter(kataster_number=survex_cave)
|
|
|
|
for c in caves:
|
|
|
|
if str(c) == area + "-" + survex_cave:
|
|
|
|
return str(c) # just get the first that matches
|
|
|
|
return None # many returned but none in correct area
|
|
|
|
|
2021-03-26 02:01:29 +00:00
|
|
|
except ObjectDoesNotExist:
|
|
|
|
pass
|
|
|
|
|
|
|
|
try:
|
2023-01-30 19:04:36 +00:00
|
|
|
cave = Cave.objects.get(unofficial_number=survex_cave) # should be unique!
|
2021-03-26 02:01:29 +00:00
|
|
|
if cave.kataster_number:
|
|
|
|
return str(cave)
|
|
|
|
else:
|
|
|
|
return None
|
|
|
|
except ObjectDoesNotExist:
|
|
|
|
pass
|
2023-01-30 19:04:36 +00:00
|
|
|
|
2021-03-26 02:01:29 +00:00
|
|
|
return None
|