troggle-unchained/core/views_survex.py

431 lines
17 KiB
Python
Raw Normal View History

2011-07-11 02:10:22 +01:00
import re
import os
import datetime
import difflib
from pathlib import Path
2011-07-11 02:10:22 +01:00
from django import forms
from django.http import HttpResponseRedirect, HttpResponse
from django.shortcuts import render_to_response, render
2020-06-17 22:55:51 +01:00
#from django.core.context_processors import csrf
from django.template.context_processors import csrf
from django.http import HttpResponse, Http404
2011-07-11 02:10:22 +01:00
from django.core.exceptions import ObjectDoesNotExist, MultipleObjectsReturned
2011-07-11 02:10:22 +01:00
import troggle.settings as settings
import parsers.survex
2020-05-30 01:11:02 +01:00
from troggle.core.models import Expedition, Person, PersonExpedition
from troggle.core.models_survex import SurvexBlock, SurvexPersonRole, SurvexFile, SurvexDirectory
2020-05-30 01:11:02 +01:00
from troggle.core.models_caves import Cave, PersonTrip, LogbookEntry
from troggle.parsers.people import GetPersonExpeditionNameLookup
survexdatasetpath = Path(settings.SURVEX_DATA)
survextemplatefile = """; *** THIS IS A TEMPLATE FILE NOT WHAT YOU MIGHT BE EXPECTING ***
*** DO NOT SAVE THIS FILE WITHOUT RENAMING IT !! ***
2020-06-02 21:38:29 +01:00
;[Stuff in square brackets is example text to be replaced with real data,
; removing the square brackets]
2011-07-11 02:10:22 +01:00
*begin [surveyname]
2020-06-02 21:38:29 +01:00
; stations linked into other surveys (or likely to)
*export [1 8 12 34]
; Cave:
; Area in cave/QM:
*title ""
*date [2040.07.04] ; <-- CHANGE THIS DATE
*team Insts [Fred Fossa]
*team Notes [Brenda Badger]
*team Pics [Luke Lynx]
*team Tape [Albert Aadvark]
2020-06-02 21:38:29 +01:00
*instrument [SAP #+Laser Tape/DistoX/Compass # ; Clino #]
; Calibration: [Where, readings]
*ref [2040#00] ; <-- CHANGE THIS TOO
2020-06-02 21:38:29 +01:00
; the #number is on the clear pocket containing the original notes
; if using a tape:
*calibrate tape +0.0 ; +ve if tape was too short, -ve if too long
; Centreline data
*data normal from to length bearing gradient ignoreall
[ 1 2 5.57 034.5 -12.8 ]
;-----------
;recorded station details (leave commented out)
;(NP=Nail Polish, LHW/RHW=Left/Right Hand Wall)
;Station Left Right Up Down Description
;[Red] nail varnish markings
[;1 0.8 0 5.3 1.6 ; NP on boulder. pt 23 on foo survey ]
[;2 0.3 1.2 6 1.2 ; NP '2' LHW ]
[;3 1.3 0 3.4 0.2 ; Rock on floor - not refindable ]
;LRUDs arranged into passage tubes
;new *data command for each 'passage',
;repeat stations and adjust numbers as needed
*data passage station left right up down
;[ 1 0.8 0 5.3 1.6 ]
;[ 2 0.3 1.2 6 1.2 ]
*data passage station left right up down
;[ 1 1.3 1.5 5.3 1.6 ]
;[ 3 2.4 0 3.4 0.2 ]
;-----------
;Question Mark List ;(leave commented-out)
; The nearest-station is the name of the survey and station which are nearest to
; the QM. The resolution-station is either '-' to indicate that the QM hasn't
; been checked; or the name of the survey and station which push that QM. If a
; QM doesn't go anywhere, set the resolution-station to be the same as the
; nearest-station. Include any relevant details of how to find or push the QM in
; the textual description.
;Serial number grade(A/B/C/X) nearest-station resolution-station description
;[ QM1 A surveyname.3 - description of QM ]
;[ QM2 B surveyname.5 - description of QM ]
;------------
;Cave description ;(leave commented-out)
;freeform text describing this section of the cave
*end [surveyname]
"""
2011-07-11 02:10:22 +01:00
class SvxForm(forms.Form):
dirname = forms.CharField(widget=forms.TextInput(attrs={"readonly":True}))
filename = forms.CharField(widget=forms.TextInput(attrs={"readonly":True}))
datetime = forms.DateTimeField(widget=forms.TextInput(attrs={"readonly":True}))
outputtype = forms.CharField(widget=forms.TextInput(attrs={"readonly":True}))
code = forms.CharField(widget=forms.Textarea(attrs={"cols":150, "rows":36}))
2011-07-11 02:10:22 +01:00
def GetDiscCode(self):
fname = survexdatasetpath / (self.data['filename'] + ".svx")
2011-07-11 02:10:22 +01:00
if not os.path.isfile(fname):
print(">>> >>> WARNING - svx file not found, showiung TEMPLATE SVX",fname, flush=True)
2011-07-11 02:10:22 +01:00
return survextemplatefile
2020-06-02 21:38:29 +01:00
fin = open(fname, "rt",encoding='utf8',newline='')
svxtext = fin.read()
2011-07-11 02:10:22 +01:00
fin.close()
return svxtext
def DiffCode(self, rcode):
code = self.GetDiscCode()
difftext = difflib.unified_diff(code.splitlines(), rcode.splitlines())
difflist = [ diffline.strip() for diffline in difftext if not re.match(r"\s*$", diffline) ]
2011-07-11 02:10:22 +01:00
return difflist
def SaveCode(self, rcode):
fname = survexdatasetpath / (self.data['filename'] + ".svx")
2011-07-11 02:10:22 +01:00
if not os.path.isfile(fname):
if re.search(r"\[|\]", rcode):
2020-06-02 21:38:29 +01:00
return "Error: remove all []s from the text. They are only template guidance."
mbeginend = re.search(r"(?s)\*begin\s+(\w+).*?\*end\s+(\w+)", rcode)
2011-07-11 02:10:22 +01:00
if not mbeginend:
return "Error: no begin/end block here"
if mbeginend.group(1) != mbeginend.group(2):
2020-06-02 21:38:29 +01:00
return "Error: mismatching begin/end labels"
# Make this create new survex folders if needed
try:
fout = open(fname, "wt", encoding='utf8',newline='\n')
except FileNotFoundError:
pth = os.path.dirname(self.data['filename'])
newpath = survexdatasetpath / pth
if not os.path.exists(newpath):
os.makedirs(newpath)
fout = open(fname, "wt", encoding='utf8',newline='\n')
# javascript seems to insert CRLF on WSL1 whatever you say. So fix that:
2020-06-02 21:38:29 +01:00
res = fout.write(rcode.replace("\r",""))
2011-07-11 02:10:22 +01:00
fout.close()
2020-06-02 21:38:29 +01:00
return "SAVED ."
2011-07-11 02:10:22 +01:00
def Process(self):
print("....\n\n\n....Processing\n\n\n")
froox = os.fspath(survexdatasetpath / (self.data['filename'] + ".svx"))
froog = os.fspath(survexdatasetpath / (self.data['filename'] + ".log"))
2011-07-11 02:10:22 +01:00
cwd = os.getcwd()
os.chdir(os.path.split(froox)[0])
os.system(settings.CAVERN + " --log " + froox )
2011-07-11 02:10:22 +01:00
os.chdir(cwd)
fin = open(froog, "rt",encoding='utf8')
2011-07-11 02:10:22 +01:00
log = fin.read()
fin.close()
2020-06-02 21:38:29 +01:00
log = re.sub("(?s).*?(Survey contains)", "\\1", log)
2011-07-11 02:10:22 +01:00
return log
def svx(request, survex_file):
# get the basic data from the file given in the URL
dirname = os.path.split(survex_file)[0]
dirname += "/"
nowtime = datetime.datetime.now().strftime('%Y-%m-%d %H:%M:%S')
outputtype = "normal"
form = SvxForm({'filename':survex_file, 'dirname':dirname, 'datetime':nowtime, 'outputtype':outputtype})
# if the form has been returned
difflist = [ ]
logmessage = ""
message = ""
if request.method == 'POST': # If the form has been submitted...
rform = SvxForm(request.POST) #
if rform.is_valid(): # All validation rules pass (how do we check it against the filename and users?)
rcode = rform.cleaned_data['code']
outputtype = rform.cleaned_data['outputtype']
difflist = form.DiffCode(rcode)
#print "ssss", rform.data
if "revert" in rform.data:
pass
if "process" in rform.data:
if not difflist:
message = "OUTPUT FROM PROCESSING"
logmessage = form.Process()
print(logmessage)
2011-07-11 02:10:22 +01:00
else:
message = "SAVE FILE FIRST"
form.data['code'] = rcode
if "save" in rform.data:
if request.user.is_authenticated():
message = form.SaveCode(rcode)
else:
message = "You do not have authority to save this file"
if message != "SAVED":
form.data['code'] = rcode
if "diff" in rform.data:
form.data['code'] = rcode
#process(survex_file)
if 'code' not in form.data:
form.data['code'] = form.GetDiscCode()
if not difflist:
difflist.append("none")
if message:
difflist.insert(0, message)
#print [ form.data['code'] ]
svxincludes = re.findall(r'\*include\s+(\S+)(?i)', form.data['code'] or "")
2011-07-11 02:10:22 +01:00
vmap = {'settings': settings,
'has_3d': os.path.isfile(survexdatasetpath / survex_file / ".3d"),
2011-07-11 02:10:22 +01:00
'title': survex_file,
'svxincludes': svxincludes,
'difflist': difflist,
'logmessage':logmessage,
'form':form}
vmap.update(csrf(request))
2011-07-11 02:10:22 +01:00
if outputtype == "ajax":
return render_to_response('svxfiledifflistonly.html', vmap)
return render_to_response('svxfile.html', vmap)
def svxraw(request, survex_file):
svx = open(os.path.join(survexdatasetpath / survex_file / ".svx"), "rt",encoding='utf8')
return HttpResponse(svx, content_type="text")
2011-07-11 02:10:22 +01:00
# The cavern running function
def process(survex_file):
cwd = os.getcwd()
os.chdir(os.path.split(os.fspath(survexdatasetpath / survex_file))[0])
os.system(settings.CAVERN + " --log " + survexdatasetpath / survex_file / ".svx")
2011-07-11 02:10:22 +01:00
os.chdir(cwd)
def threed(request, survex_file):
process(survex_file)
try:
threed = open(survexdatasetpath / survex_file / ".3d", "rt",encoding='utf8')
return HttpResponse(threed, content_type="model/3d")
2011-07-11 02:10:22 +01:00
except:
log = open(survexdatasetpath / survex_file / ".log", "rt",encoding='utf8')
return HttpResponse(log, content_type="text")
2011-07-11 02:10:22 +01:00
2011-07-11 02:10:22 +01:00
def log(request, survex_file):
process(survex_file)
log = open(survexdatasetpath / survex_file / ".log", "rt",encoding='utf8')
return HttpResponse(log, content_type="text")
2011-07-11 02:10:22 +01:00
2011-07-11 02:10:22 +01:00
def err(request, survex_file):
process(survex_file)
err = open(survexdatasetpath / survex_file / ".err", "rt",encoding='utf8')
return HttpResponse(err, content_type="text")
2011-07-11 02:10:22 +01:00
def identifycavedircontents(gcavedir):
# find the primary survex file in each cave directory
2011-07-11 02:10:22 +01:00
name = os.path.split(gcavedir)[1]
subdirs = [ ]
subsvx = [ ]
primesvx = None
for f in os.listdir(gcavedir):
if name == "204" and (f in ["skel.svx", "template.svx", "204withents.svx"]):
pass
elif name == "136" and (f in ["136-noents.svx"]):
pass
elif name == "115" and (f in ["115cufix.svx", "115fix.svx"]):
pass
elif os.path.isdir(os.path.join(gcavedir, f)):
if f[0] != ".":
subdirs.append(f)
elif f[-4:] == ".svx":
nf = f[:-4]
if nf.lower() == name.lower() or nf[:3] == "all" or (name, nf) in [("resurvey2005", "145-2005"), ("cucc", "cu115")]:
2011-07-11 02:10:22 +01:00
if primesvx:
if nf[:3] == "all":
assert primesvx[:3] != "all", (name, nf, primesvx, gcavedir, subsvx)
primesvx = nf
else:
assert primesvx[:3] == "all", (name, nf, primesvx, gcavedir, subsvx)
else:
primesvx = nf
else:
subsvx.append(nf)
else:
pass
#assert re.match(".*?(?:.3d|.log|.err|.txt|.tmp|.diff|.e?spec|~)$", f), (gcavedir, f)
2011-07-11 02:10:22 +01:00
subsvx.sort()
#assert primesvx, (gcavedir, subsvx)
2011-07-11 02:10:22 +01:00
if primesvx:
subsvx.insert(0, primesvx)
return subdirs, subsvx
def get_survexareapath(area):
return survexdatasetpath / str("caves-" + area)
2011-07-11 02:10:22 +01:00
# direct local non-database browsing through the svx file repositories
# every time the page is viewed! Should cache this.
2011-07-11 02:10:22 +01:00
def survexcaveslist(request):
'''This reads the entire list of caves in the Loser repo directory and produces a complete report.
It can find caves which have not yet been properly registered in the system by Databasereset.py because
someone may have uploaded the survex files without doing the rest of the integration process.
'''
# TO DO - filter out the non-public caves from display UNLESS LOGGED INS
2011-07-11 02:10:22 +01:00
onefilecaves = [ ]
multifilecaves = [ ]
subdircaves = [ ]
fnumlist = [ ]
2011-07-11 02:10:22 +01:00
for area in ["1623", "1626", "1624", "1627"]:
cavesdir = get_survexareapath(area)
arealist = sorted([ (area, -int(re.match(r"\d*", f).group(0) or "0"), f) for f in os.listdir(cavesdir) ])
fnumlist += arealist
2011-07-11 02:10:22 +01:00
print(fnumlist)
2011-07-11 02:10:22 +01:00
# go through the list and identify the contents of each cave directory
for area, num, cavedir in fnumlist:
# these have sub dirs /cucc/ /arge/ /old/ but that is no reason to hide them in this webpage
# so these are now treated the same as 142 and 113 which also have a /cucc/ sub dir
#if cavedir in ["144", "40"]:
# continue
# This all assumes that the first .svx file has the same name as the cave name,
# which usually but not always true. e.g. caves-1623/78/allkaese.svx not caves-1623/78/78.svx
# which is why we now also pass through the cavedir
cavesdir = get_survexareapath(area)
2011-07-11 02:10:22 +01:00
gcavedir = os.path.join(cavesdir, cavedir)
if os.path.isdir(gcavedir) and cavedir[0] != ".":
subdirs, subsvx = identifycavedircontents(gcavedir)
caveid = check_cave_registered(area, cavedir) # should do this only once per database load or it will be slow
survdirobj = [ ]
2011-07-11 02:10:22 +01:00
for lsubsvx in subsvx:
survdirobj.append(("caves-" +area+ "/" +cavedir+"/"+lsubsvx, lsubsvx))
2011-07-11 02:10:22 +01:00
# caves with subdirectories
if subdirs:
subsurvdirs = [ ]
for subdir in subdirs:
dsubdirs, dsubsvx = identifycavedircontents(os.path.join(gcavedir, subdir))
# assert not dsubdirs # handle case of empty sub directory
2011-07-11 02:10:22 +01:00
lsurvdirobj = [ ]
for lsubsvx in dsubsvx:
lsurvdirobj.append(("caves-" +area+ "/" +cavedir+"/"+subdir+"/"+lsubsvx, lsubsvx))
if len(dsubsvx) >= 1:
subsurvdirs.append((subdir,lsurvdirobj[0], lsurvdirobj[0:])) # list now includes the first item too
2011-07-11 02:10:22 +01:00
subdircaves.append((cavedir, (survdirobj[0], survdirobj[1:]), subsurvdirs))
# multifile caves
elif len(survdirobj) > 1:
multifilecaves.append((survdirobj[0], cavedir, survdirobj[1:]))
2011-07-11 02:10:22 +01:00
# single file caves
elif len(survdirobj) == 1:
onefilecaves.append(survdirobj[0])
2011-07-11 02:10:22 +01:00
return render_to_response('svxfilecavelist.html', {'settings': settings, "onefilecaves":onefilecaves, "multifilecaves":multifilecaves, "subdircaves":subdircaves })
def survexcavesingle(request, survex_cave):
'''parsing all the survex files of a single cave and showing that it's consistent and can find all
the files and people. Should explicity fix the kataster number thing.
kataster numbers are not unique across areas. Fix this.
'''
sc = survex_cave
try:
cave = Cave.objects.get(kataster_number=sc) # This may not be unique.
return render_to_response('svxcavesingle.html', {'settings': settings, "cave":cave })
except ObjectDoesNotExist:
# can get here if the survex file is in a directory labelled with unofficial number not kataster number.
# maybe - and _ mixed up, or CUCC-2017- instead of 2017-CUCC-, or CUCC2015DL01 . Let's not get carried away..
for unoff in [sc, sc.replace('-','_'), sc.replace('_','-'), sc.replace('-',''), sc.replace('_','')]:
try:
cave = Cave.objects.get(unofficial_number=unoff) # return on first one we find
return render_to_response('svxcavesingle.html', {'settings': settings, "cave":cave })
except ObjectDoesNotExist:
continue # next attempt in for loop
return render_to_response('svxcavesingle404.html', {'settings': settings, "cave":sc })
except MultipleObjectsReturned:
caves = Cave.objects.filter(kataster_number=survex_cave)
return render_to_response('svxcaveseveral.html', {'settings': settings, "caves":caves })
except:
return render_to_response('svxcavesingle404.html', {'settings': settings, "cave":sc })
def check_cave_registered(area, survex_cave):
'''Checks whether a cave has been properly registered when it is found in the Loser repo
This should be called by Databasereset not here in a view
Currently Caves are only registered if they are listed in :expoweb: settings.CAVEDESCRIPTIONS
so we need to add in any more here.
This function runs but does not seem to be used?!
'''
try:
cave = Cave.objects.get(kataster_number=survex_cave)
return str(cave)
except MultipleObjectsReturned:
caves = Cave.objects.filter(kataster_number=survex_cave)
for c in caves:
if str(c) == area + "-" + survex_cave :
return str(c) # just get the first that matches
return None # many returned but none in correct area
except ObjectDoesNotExist:
pass
try:
cave = Cave.objects.get(unofficial_number=survex_cave) # should be unique!
if cave.kataster_number:
return str(cave)
else:
return None
except ObjectDoesNotExist:
pass
return None