2011-07-11 02:10:22 +01:00
|
|
|
import os
|
|
|
|
import re
|
2022-07-27 23:48:22 +01:00
|
|
|
import json
|
2022-08-01 00:50:19 +01:00
|
|
|
import operator
|
2022-09-20 20:52:31 +01:00
|
|
|
import datetime
|
2022-07-27 23:48:22 +01:00
|
|
|
from urllib.parse import urljoin
|
|
|
|
from pathlib import Path
|
2022-08-01 00:50:19 +01:00
|
|
|
from functools import reduce
|
2020-05-28 01:16:45 +01:00
|
|
|
|
|
|
|
from django.db import models
|
|
|
|
from django.conf import settings
|
2020-06-18 21:50:16 +01:00
|
|
|
from django.urls import reverse
|
2011-07-11 02:10:22 +01:00
|
|
|
|
2022-09-20 20:52:31 +01:00
|
|
|
# from troggle.core.models.troggle import DataIssue # circular import. Hmm
|
2011-07-11 02:10:22 +01:00
|
|
|
|
|
|
|
class SurvexDirectory(models.Model):
|
|
|
|
path = models.CharField(max_length=200)
|
2020-06-30 15:26:03 +01:00
|
|
|
cave = models.ForeignKey('Cave', blank=True, null=True,on_delete=models.SET_NULL)
|
|
|
|
primarysurvexfile = models.ForeignKey('SurvexFile', related_name='primarysurvexfile', blank=True, null=True,on_delete=models.SET_NULL)
|
2011-07-11 02:10:22 +01:00
|
|
|
# could also include files in directory but not referenced
|
|
|
|
|
|
|
|
class Meta:
|
|
|
|
ordering = ('id',)
|
2021-05-05 00:35:10 +01:00
|
|
|
verbose_name_plural = "Survex directories"
|
2020-05-28 01:16:45 +01:00
|
|
|
|
2021-05-05 00:35:10 +01:00
|
|
|
def __str__(self):
|
2022-07-28 13:15:11 +01:00
|
|
|
return "[SurvexDirectory:"+str(self.path) + " | Primary svx:" + str(self.primarysurvexfile.path) +".svx ]"
|
2020-06-28 01:50:34 +01:00
|
|
|
|
|
|
|
|
2011-07-11 02:10:22 +01:00
|
|
|
class SurvexFile(models.Model):
|
|
|
|
path = models.CharField(max_length=200)
|
2020-06-30 15:26:03 +01:00
|
|
|
survexdirectory = models.ForeignKey("SurvexDirectory", blank=True, null=True,on_delete=models.SET_NULL)
|
|
|
|
cave = models.ForeignKey('Cave', blank=True, null=True,on_delete=models.SET_NULL)
|
2011-07-11 02:10:22 +01:00
|
|
|
|
|
|
|
class Meta:
|
|
|
|
ordering = ('id',)
|
2020-06-28 14:42:26 +01:00
|
|
|
|
2020-06-29 21:16:13 +01:00
|
|
|
# Don't change from the default as that breaks troggle webpages and internal referencing!
|
|
|
|
# def __str__(self):
|
|
|
|
# return "[SurvexFile:"+str(self.path) + "-" + str(self.survexdirectory) + "-" + str(self.cave)+"]"
|
2020-06-28 14:42:26 +01:00
|
|
|
|
2011-07-11 02:10:22 +01:00
|
|
|
def exists(self):
|
|
|
|
fname = os.path.join(settings.SURVEX_DATA, self.path + ".svx")
|
|
|
|
return os.path.isfile(fname)
|
|
|
|
|
|
|
|
def OpenFile(self):
|
|
|
|
fname = os.path.join(settings.SURVEX_DATA, self.path + ".svx")
|
|
|
|
return open(fname)
|
|
|
|
|
|
|
|
def SetDirectory(self):
|
|
|
|
dirpath = os.path.split(self.path)[0]
|
2020-06-27 17:55:59 +01:00
|
|
|
# pointless search every time we import a survex file if we know there are no duplicates..
|
|
|
|
# don't use this for initial import.
|
2011-07-11 02:10:22 +01:00
|
|
|
survexdirectorylist = SurvexDirectory.objects.filter(cave=self.cave, path=dirpath)
|
|
|
|
if survexdirectorylist:
|
|
|
|
self.survexdirectory = survexdirectorylist[0]
|
|
|
|
else:
|
|
|
|
survexdirectory = SurvexDirectory(path=dirpath, cave=self.cave, primarysurvexfile=self)
|
|
|
|
survexdirectory.save()
|
|
|
|
self.survexdirectory = survexdirectory
|
|
|
|
self.save()
|
2021-05-05 00:35:10 +01:00
|
|
|
|
|
|
|
def __str__(self):
|
|
|
|
return self.path
|
2020-05-28 01:16:45 +01:00
|
|
|
|
2011-07-11 02:10:22 +01:00
|
|
|
class SurvexStationLookUpManager(models.Manager):
|
|
|
|
def lookup(self, name):
|
|
|
|
blocknames, sep, stationname = name.rpartition(".")
|
|
|
|
return self.get(block = SurvexBlock.objects.lookup(blocknames),
|
2012-06-10 14:59:21 +01:00
|
|
|
name__iexact = stationname)
|
2011-07-11 02:10:22 +01:00
|
|
|
|
|
|
|
class SurvexStation(models.Model):
|
2012-06-10 14:59:21 +01:00
|
|
|
name = models.CharField(max_length=100)
|
2020-06-30 15:26:03 +01:00
|
|
|
block = models.ForeignKey('SurvexBlock', null=True,on_delete=models.SET_NULL)
|
|
|
|
# equate = models.ForeignKey('SurvexEquate', blank=True, null=True,on_delete=models.SET_NULL)
|
2011-07-11 02:10:22 +01:00
|
|
|
objects = SurvexStationLookUpManager()
|
|
|
|
x = models.FloatField(blank=True, null=True)
|
|
|
|
y = models.FloatField(blank=True, null=True)
|
|
|
|
z = models.FloatField(blank=True, null=True)
|
2012-06-10 14:59:21 +01:00
|
|
|
|
|
|
|
def path(self):
|
|
|
|
r = self.name
|
|
|
|
b = self.block
|
|
|
|
while True:
|
|
|
|
if b.name:
|
|
|
|
r = b.name + "." + r
|
|
|
|
if b.parent:
|
|
|
|
b = b.parent
|
|
|
|
else:
|
|
|
|
return r
|
2011-07-11 02:10:22 +01:00
|
|
|
|
2021-04-20 19:47:08 +01:00
|
|
|
class Meta:
|
|
|
|
ordering = ('id',)
|
|
|
|
def __str__(self):
|
|
|
|
return self.name and str(self.name) or 'no name'
|
|
|
|
|
2011-07-11 02:10:22 +01:00
|
|
|
#
|
|
|
|
# Single SurvexBlock
|
|
|
|
#
|
|
|
|
class SurvexBlockLookUpManager(models.Manager):
|
|
|
|
def lookup(self, name):
|
2012-08-10 18:02:13 +01:00
|
|
|
if name == "":
|
|
|
|
blocknames = []
|
|
|
|
else:
|
|
|
|
blocknames = name.split(".")
|
2019-06-26 03:32:18 +01:00
|
|
|
block = SurvexBlock.objects.get(parent=None, survexfile__path=settings.SURVEX_TOPNAME)
|
2011-07-11 02:10:22 +01:00
|
|
|
for blockname in blocknames:
|
2012-06-10 14:59:21 +01:00
|
|
|
block = SurvexBlock.objects.get(parent=block, name__iexact=blockname)
|
2011-07-11 02:10:22 +01:00
|
|
|
return block
|
|
|
|
|
|
|
|
class SurvexBlock(models.Model):
|
|
|
|
objects = SurvexBlockLookUpManager()
|
|
|
|
name = models.CharField(max_length=100)
|
2020-07-20 23:25:49 +01:00
|
|
|
title = models.CharField(max_length=200)
|
2020-06-30 15:26:03 +01:00
|
|
|
parent = models.ForeignKey('SurvexBlock', blank=True, null=True,on_delete=models.SET_NULL)
|
|
|
|
cave = models.ForeignKey('Cave', blank=True, null=True,on_delete=models.SET_NULL)
|
2011-07-11 02:10:22 +01:00
|
|
|
|
2019-06-26 20:57:24 +01:00
|
|
|
date = models.DateField(blank=True, null=True)
|
2020-06-30 15:26:03 +01:00
|
|
|
expeditionday = models.ForeignKey("ExpeditionDay", null=True,on_delete=models.SET_NULL)
|
|
|
|
expedition = models.ForeignKey('Expedition', blank=True, null=True,on_delete=models.SET_NULL)
|
2011-07-11 02:10:22 +01:00
|
|
|
|
2020-06-30 15:26:03 +01:00
|
|
|
survexfile = models.ForeignKey("SurvexFile", blank=True, null=True,on_delete=models.SET_NULL)
|
2011-07-11 02:10:22 +01:00
|
|
|
survexpath = models.CharField(max_length=200) # the path for the survex stations
|
|
|
|
|
2022-09-15 20:55:45 +01:00
|
|
|
scanswallet = models.ForeignKey("Wallet", null=True,on_delete=models.SET_NULL) # only ONE wallet per block. Th emost recent seen overwites.. ugh.
|
2011-07-11 02:10:22 +01:00
|
|
|
|
2020-06-18 00:20:47 +01:00
|
|
|
legsall = models.IntegerField(null=True) # summary data for this block
|
2020-07-04 13:31:46 +01:00
|
|
|
legslength = models.FloatField(null=True)
|
2011-07-11 02:10:22 +01:00
|
|
|
|
|
|
|
class Meta:
|
|
|
|
ordering = ('id',)
|
|
|
|
|
2021-05-05 00:35:10 +01:00
|
|
|
def __str__(self):
|
|
|
|
return "[SurvexBlock:"+ str(self.name) + "-path:" + \
|
|
|
|
str(self.survexpath) + "-cave:" + \
|
|
|
|
str(self.cave) + "]"
|
|
|
|
|
2020-06-28 14:42:26 +01:00
|
|
|
def __str__(self):
|
2020-06-29 21:16:13 +01:00
|
|
|
return self.name and str(self.name) or 'no name'
|
2020-06-28 14:42:26 +01:00
|
|
|
|
2011-07-11 02:10:22 +01:00
|
|
|
def isSurvexBlock(self): # Function used in templates
|
|
|
|
return True
|
|
|
|
|
|
|
|
def GetPersonroles(self):
|
2021-05-05 00:35:10 +01:00
|
|
|
'''
|
2021-04-30 00:24:36 +01:00
|
|
|
But apparently never used !?
|
|
|
|
'''
|
2011-07-11 02:10:22 +01:00
|
|
|
res = [ ]
|
2021-03-26 21:20:08 +00:00
|
|
|
for personrole in self.survexpersonrole_set.order_by('personexpedition'):
|
2021-04-30 00:24:36 +01:00
|
|
|
res.append({'person':personrole.personexpedition.person, 'expeditionyear':personrole.personexpedition.expedition.year})
|
2011-07-11 02:10:22 +01:00
|
|
|
return res
|
2020-06-27 12:04:34 +01:00
|
|
|
|
2011-07-11 02:10:22 +01:00
|
|
|
def DayIndex(self):
|
|
|
|
return list(self.expeditionday.survexblock_set.all()).index(self)
|
|
|
|
|
|
|
|
class SurvexPersonRole(models.Model):
|
2020-06-18 00:20:47 +01:00
|
|
|
survexblock = models.ForeignKey('SurvexBlock',on_delete=models.CASCADE)
|
2022-10-07 21:47:45 +01:00
|
|
|
# increasing levels of precision, Surely we only need survexblock and person now that we have no link to a logbook entry?
|
2011-07-11 02:10:22 +01:00
|
|
|
personname = models.CharField(max_length=100)
|
2020-06-30 15:26:03 +01:00
|
|
|
person = models.ForeignKey('Person', blank=True, null=True,on_delete=models.SET_NULL)
|
|
|
|
personexpedition = models.ForeignKey('PersonExpedition', blank=True, null=True,on_delete=models.SET_NULL)
|
2022-10-07 21:47:45 +01:00
|
|
|
persontrip = models.ForeignKey('PersonTrip', blank=True, null=True,on_delete=models.SET_NULL) # logbook
|
2020-06-30 15:26:03 +01:00
|
|
|
expeditionday = models.ForeignKey("ExpeditionDay", null=True,on_delete=models.SET_NULL)
|
2011-07-11 02:10:22 +01:00
|
|
|
|
2020-05-26 02:21:36 +01:00
|
|
|
def __str__(self):
|
2022-09-19 23:02:06 +01:00
|
|
|
return str(self.personname) + " - " + str(self.survexblock)
|
2020-05-28 01:16:45 +01:00
|
|
|
|
2021-04-26 18:18:16 +01:00
|
|
|
class Wallet(models.Model):
|
2022-07-27 23:48:22 +01:00
|
|
|
'''We do not keep the JSON values in the database, we query them afresh each time,
|
|
|
|
but we will change this when we need to do a Django query on e.g. personame
|
|
|
|
'''
|
2011-07-11 02:10:22 +01:00
|
|
|
fpath = models.CharField(max_length=200)
|
|
|
|
walletname = models.CharField(max_length=200)
|
|
|
|
|
|
|
|
class Meta:
|
|
|
|
ordering = ('walletname',)
|
|
|
|
|
|
|
|
def get_absolute_url(self):
|
2021-04-26 19:50:03 +01:00
|
|
|
return urljoin(settings.URL_ROOT, reverse('singlewallet', kwargs={"path":re.sub("#", "%23", self.walletname)}))
|
2020-05-15 21:32:55 +01:00
|
|
|
|
2022-07-27 23:48:22 +01:00
|
|
|
def get_json(self):
|
2022-08-31 10:09:07 +01:00
|
|
|
#jsonfile = Path(self.fpath, 'contents.json')
|
2022-08-06 19:27:36 +01:00
|
|
|
|
|
|
|
# Get from git repo instead
|
|
|
|
# :drawings: walletjson/2022/2022#01/contents.json
|
|
|
|
# fpath = /mnt/d/EXPO/expofiles/surveyscans/1999/1999#02
|
|
|
|
fp = Path(self.fpath)
|
|
|
|
wname = fp.name
|
|
|
|
wyear = fp.parent.name
|
2022-09-20 20:52:31 +01:00
|
|
|
wurl = f"/scanupload/{self.walletname}" # .replace('#', ':')
|
2022-08-06 19:27:36 +01:00
|
|
|
|
|
|
|
jsonfile = Path(settings.DRAWINGS_DATA, "walletjson") / wyear / wname / "contents.json"
|
2022-07-27 23:48:22 +01:00
|
|
|
if not Path(jsonfile).is_file():
|
2022-08-01 00:50:19 +01:00
|
|
|
#print(f'{jsonfile} is not a file')
|
2022-07-27 23:48:22 +01:00
|
|
|
return None
|
|
|
|
else:
|
|
|
|
with open(jsonfile) as json_f:
|
|
|
|
try:
|
|
|
|
waldata = json.load(json_f)
|
|
|
|
except:
|
|
|
|
message = f"! {str(self.walletname)} Failed to load {jsonfile} JSON file"
|
2022-08-01 00:50:19 +01:00
|
|
|
#print(message)
|
2022-07-27 23:48:22 +01:00
|
|
|
raise
|
2022-09-20 20:52:31 +01:00
|
|
|
if waldata["date"]:
|
|
|
|
datestr = waldata["date"].replace('.','-')
|
|
|
|
try:
|
|
|
|
thisdate = datetime.date.fromisoformat(datestr)
|
|
|
|
except ValueError:
|
|
|
|
# probably a single digit day number. HACKUS MAXIMUS.
|
|
|
|
# clearly we need to fix this when we first import date strings..
|
|
|
|
datestr = datestr[:-1] + '0' + datestr[-1]
|
|
|
|
print(f' - {datestr=} ')
|
|
|
|
try:
|
|
|
|
thisdate = datetime.date.fromisoformat(datestr)
|
|
|
|
try:
|
|
|
|
waldata["date"] = thisdate.isoformat()
|
|
|
|
except:
|
|
|
|
message = f"! {str(self.walletname)} Date formatting failure {thisdate}. Failed to load from {jsonfile} JSON file"
|
|
|
|
from troggle.core.models.troggle import DataIssue
|
|
|
|
DataIssue.objects.update_or_create(parser='scans', message=message, url=wurl)
|
|
|
|
except:
|
|
|
|
message = f"! {str(self.walletname)} Date format not ISO {datestr}. Failed to load fro, {jsonfile} JSON file"
|
|
|
|
from troggle.core.models.troggle import DataIssue
|
|
|
|
DataIssue.objects.update_or_create(parser='scans', message=message, url=wurl)
|
2022-07-27 23:48:22 +01:00
|
|
|
return waldata
|
2022-07-30 23:02:02 +01:00
|
|
|
|
|
|
|
def year(self):
|
2022-09-22 20:41:42 +01:00
|
|
|
if len(self.walletname) < 5:
|
|
|
|
return None
|
2022-07-30 23:02:02 +01:00
|
|
|
if self.walletname[4] != "#":
|
|
|
|
return None
|
|
|
|
year = int(self.walletname[0:4])
|
|
|
|
if year < 1976 or year > 2050:
|
|
|
|
return None
|
|
|
|
else:
|
|
|
|
return str(year)
|
|
|
|
|
2022-07-27 23:48:22 +01:00
|
|
|
|
2022-07-29 18:55:19 +01:00
|
|
|
# Yes this is horribly, horribly inefficient, esp. for a page that have date, people and cave in it
|
2022-07-27 23:48:22 +01:00
|
|
|
def date(self):
|
2022-07-31 16:58:46 +01:00
|
|
|
if not self.get_json():
|
|
|
|
return None
|
2022-07-27 23:48:22 +01:00
|
|
|
jsondata = self.get_json()
|
2022-09-20 21:06:45 +01:00
|
|
|
|
|
|
|
datestr = jsondata["date"]
|
|
|
|
if not datestr:
|
|
|
|
return None
|
|
|
|
else:
|
|
|
|
datestr = datestr.replace('.','-')
|
|
|
|
try:
|
|
|
|
samedate = datetime.date.fromisoformat(datestr)
|
|
|
|
except:
|
|
|
|
try:
|
|
|
|
samedate = datetime.date.fromisoformat(datestr[:10])
|
|
|
|
except:
|
|
|
|
samedate = None
|
|
|
|
return samedate.isoformat()
|
2022-07-27 23:48:22 +01:00
|
|
|
|
2022-07-28 00:37:44 +01:00
|
|
|
def people(self):
|
2022-07-31 16:58:46 +01:00
|
|
|
if not self.get_json():
|
|
|
|
return None
|
2022-07-28 00:37:44 +01:00
|
|
|
jsondata = self.get_json()
|
|
|
|
return jsondata["people"]
|
|
|
|
|
2022-07-29 18:55:19 +01:00
|
|
|
def cave(self):
|
2022-07-31 16:58:46 +01:00
|
|
|
if not self.get_json():
|
|
|
|
return None
|
2022-07-29 18:55:19 +01:00
|
|
|
jsondata = self.get_json()
|
|
|
|
return jsondata["cave"]
|
|
|
|
|
2022-07-27 23:48:22 +01:00
|
|
|
def name(self):
|
2022-07-31 16:58:46 +01:00
|
|
|
if not self.get_json():
|
|
|
|
return None
|
2022-07-27 23:48:22 +01:00
|
|
|
jsondata = self.get_json()
|
|
|
|
return jsondata["name"]
|
2022-08-01 00:50:19 +01:00
|
|
|
|
|
|
|
def get_fnames(self):
|
|
|
|
'''Filenames without the suffix, i.e. without the ".jpg"
|
|
|
|
'''
|
2022-10-03 19:18:35 +01:00
|
|
|
dirpath = Path(settings.SCANS_ROOT, self.fpath) # does nowt as fpath is a rooted path already
|
2022-08-01 00:50:19 +01:00
|
|
|
files = []
|
2022-10-03 19:18:35 +01:00
|
|
|
if not self.fpath:
|
|
|
|
files.append(f"Incorrect path to wallet contents: '{self.fpath}'")
|
|
|
|
return files
|
|
|
|
if not dirpath.is_dir():
|
|
|
|
files.append(f"Incorrect path to wallet contents: '{self.fpath}'")
|
|
|
|
return files
|
|
|
|
else:
|
2022-08-01 00:50:19 +01:00
|
|
|
try:
|
|
|
|
for f in dirpath.iterdir():
|
|
|
|
if f.is_file():
|
2022-10-03 19:18:35 +01:00
|
|
|
files.append(Path(f.name).stem)
|
|
|
|
else:
|
|
|
|
files.append(f"-{Path(f.name).stem}-")
|
2022-08-01 00:50:19 +01:00
|
|
|
except FileNotFoundError:
|
2022-10-03 19:18:35 +01:00
|
|
|
files.append("FileNotFoundError")
|
2022-08-01 00:50:19 +01:00
|
|
|
pass
|
|
|
|
return files
|
2022-08-31 10:09:07 +01:00
|
|
|
|
|
|
|
def fixsurvextick(self, tick):
|
|
|
|
blocks = SurvexBlock.objects.filter(scanswallet = self)
|
|
|
|
result = tick
|
|
|
|
for b in blocks:
|
|
|
|
if b.survexfile: # if any exist in db, no check for validity or a real file. Refactor.
|
|
|
|
result = "seagreen" # slightly different shade of green
|
|
|
|
return result
|
2022-08-01 00:50:19 +01:00
|
|
|
|
|
|
|
def get_ticks(self):
|
2022-08-06 20:23:39 +01:00
|
|
|
ticks = {}
|
2022-08-01 00:50:19 +01:00
|
|
|
waldata = self.get_json()
|
|
|
|
if not waldata:
|
2022-08-16 15:42:37 +01:00
|
|
|
ticks["S"] = "black"
|
|
|
|
ticks["C"] = "black"
|
|
|
|
ticks["Q"] = "black"
|
|
|
|
ticks["N"] = "black"
|
|
|
|
ticks["P"] = "black"
|
|
|
|
ticks["E"] = "black"
|
|
|
|
ticks["T"] = "black"
|
2022-08-06 20:23:39 +01:00
|
|
|
ticks["W"] = "black"
|
|
|
|
return ticks
|
2022-08-01 00:50:19 +01:00
|
|
|
ticks = {}
|
|
|
|
|
|
|
|
# Initially, are there any required survex files present ?
|
2022-08-25 14:31:38 +01:00
|
|
|
# Note that we can't set the survexblock here on the wallet as that info is only available while parsing the survex file
|
2022-08-01 00:50:19 +01:00
|
|
|
survexok = "red"
|
|
|
|
ticks["S"] = "red"
|
|
|
|
if waldata["survex not required"]:
|
|
|
|
survexok = "green"
|
|
|
|
ticks["S"] = "green"
|
|
|
|
else:
|
|
|
|
if waldata["survex file"]:
|
|
|
|
if not type(waldata["survex file"])==list: # a string also is a sequence type, so do it this way
|
|
|
|
waldata["survex file"] = [waldata["survex file"]]
|
|
|
|
ngood = 0
|
|
|
|
nbad = 0
|
2022-08-31 07:27:14 +01:00
|
|
|
ticks["S"] = "purple"
|
|
|
|
for sx in waldata["survex file"]:
|
|
|
|
#this logic appears in several places, inc uploads.py). Refactor.
|
|
|
|
if sx !="":
|
|
|
|
if Path(sx).suffix.lower() != ".svx":
|
|
|
|
sx = sx + ".svx"
|
|
|
|
if (Path(settings.SURVEX_DATA) / sx).is_file():
|
2022-08-01 00:50:19 +01:00
|
|
|
ngood += 1
|
|
|
|
else:
|
|
|
|
nbad += 1
|
|
|
|
if nbad == 0 and ngood >= 1:
|
|
|
|
ticks["S"] = "green"
|
2022-08-31 07:27:14 +01:00
|
|
|
elif nbad >= 1 and ngood >= 1:
|
2022-08-01 00:50:19 +01:00
|
|
|
ticks["S"] = "orange"
|
2022-08-31 07:27:14 +01:00
|
|
|
elif nbad >= 1 and ngood == 0:
|
2022-08-01 00:50:19 +01:00
|
|
|
ticks["S"] = "red"
|
2022-08-31 07:27:14 +01:00
|
|
|
else:
|
|
|
|
ticks["S"] = "black"
|
2022-08-01 00:50:19 +01:00
|
|
|
|
|
|
|
# Cave Description
|
|
|
|
if waldata["description written"]:
|
|
|
|
ticks["C"] = "green"
|
|
|
|
else:
|
|
|
|
ticks["C"] = survexok
|
|
|
|
# QMs
|
|
|
|
if waldata["qms written"]:
|
|
|
|
ticks["Q"] = "green"
|
|
|
|
else:
|
2022-08-07 21:41:45 +01:00
|
|
|
ticks["Q"] = survexok
|
2022-09-22 20:41:42 +01:00
|
|
|
if not self.year():
|
|
|
|
ticks["Q"] = "darkgrey"
|
|
|
|
else:
|
|
|
|
if int(self.year()) < 2015:
|
|
|
|
ticks["Q"] = "lightgrey"
|
2022-08-07 21:41:45 +01:00
|
|
|
|
2022-08-01 00:50:19 +01:00
|
|
|
|
|
|
|
# Notes, Plan, Elevation; Tunnel
|
|
|
|
if waldata["electronic survey"]:
|
|
|
|
ticks["N"] = "green"
|
|
|
|
ticks["P"] = "green"
|
|
|
|
ticks["E"] = "green"
|
|
|
|
ticks["T"] = "green"
|
|
|
|
else:
|
2022-07-27 23:48:22 +01:00
|
|
|
|
2022-08-01 00:50:19 +01:00
|
|
|
files = self.get_fnames()
|
2022-08-01 01:10:07 +01:00
|
|
|
|
2022-08-01 00:50:19 +01:00
|
|
|
# Notes required
|
|
|
|
notes_scanned = reduce(operator.or_, [f.startswith("note") for f in files], False)
|
|
|
|
notes_scanned = reduce(operator.or_, [f.endswith("notes") for f in files], notes_scanned)
|
|
|
|
if notes_scanned:
|
|
|
|
ticks["N"] = "green"
|
|
|
|
else:
|
|
|
|
ticks["N"] = "red"
|
|
|
|
|
|
|
|
# Plan drawing required
|
|
|
|
plan_scanned = reduce(operator.or_, [f.startswith("plan") for f in files], False)
|
|
|
|
plan_scanned = reduce(operator.or_, [f.endswith("plan") for f in files], plan_scanned)
|
|
|
|
plan_drawing_required = not (plan_scanned or waldata["plan drawn"] or waldata["plan not required"])
|
|
|
|
if plan_drawing_required:
|
|
|
|
ticks["P"] = "red"
|
|
|
|
else:
|
|
|
|
ticks["P"] = "green"
|
|
|
|
|
|
|
|
# Elev drawing required
|
|
|
|
elev_scanned = reduce(operator.or_, [f.startswith("elev") for f in files], False)
|
|
|
|
elev_scanned = reduce(operator.or_, [f.endswith("elev") for f in files], elev_scanned)
|
|
|
|
elev_scanned = reduce(operator.or_, [f.endswith("elevation") for f in files], elev_scanned)
|
|
|
|
elev_drawing_required = not (elev_scanned or waldata["elev drawn"] or waldata["elev not required"])
|
|
|
|
if elev_drawing_required:
|
|
|
|
ticks["E"] = "red"
|
|
|
|
else:
|
|
|
|
ticks["E"] = "green"
|
|
|
|
|
|
|
|
# Tunnel / Therion
|
|
|
|
if elev_drawing_required or plan_drawing_required:
|
|
|
|
ticks["T"] = "red"
|
|
|
|
else:
|
|
|
|
ticks["T"] = "green"
|
|
|
|
|
|
|
|
|
|
|
|
# Website
|
|
|
|
if waldata["website updated"]:
|
|
|
|
ticks["W"] = "green"
|
|
|
|
else:
|
|
|
|
ticks["W"] = "red"
|
|
|
|
|
|
|
|
return ticks
|
|
|
|
|
2020-05-26 02:21:36 +01:00
|
|
|
def __str__(self):
|
2022-07-29 18:55:19 +01:00
|
|
|
return "[" + str(self.walletname) + " (Wallet)]"
|
2020-05-28 01:16:45 +01:00
|
|
|
|
2020-06-24 00:18:01 +01:00
|
|
|
class SingleScan(models.Model):
|
2021-04-26 19:50:03 +01:00
|
|
|
ffile = models.CharField(max_length=200)
|
|
|
|
name = models.CharField(max_length=200)
|
|
|
|
wallet = models.ForeignKey("Wallet", null=True,on_delete=models.SET_NULL)
|
2011-07-11 02:10:22 +01:00
|
|
|
|
|
|
|
class Meta:
|
|
|
|
ordering = ('name',)
|
|
|
|
|
|
|
|
def get_absolute_url(self):
|
2021-04-26 19:50:03 +01:00
|
|
|
return urljoin(settings.URL_ROOT, reverse('scansingle', kwargs={"path":re.sub("#", "%23", self.wallet.walletname), "file":self.name}))
|
2020-05-15 21:32:55 +01:00
|
|
|
|
2020-05-26 02:21:36 +01:00
|
|
|
def __str__(self):
|
2021-04-26 19:50:03 +01:00
|
|
|
return "Survey Scan Image: " + str(self.name) + " in " + str(self.wallet)
|
2020-05-28 01:16:45 +01:00
|
|
|
|
2021-04-26 18:08:42 +01:00
|
|
|
class DrawingFile(models.Model):
|
2021-04-26 18:11:14 +01:00
|
|
|
dwgpath = models.CharField(max_length=200)
|
2021-04-26 18:37:59 +01:00
|
|
|
dwgname = models.CharField(max_length=200)
|
2022-07-27 23:48:22 +01:00
|
|
|
dwgwallets = models.ManyToManyField("Wallet") # implicitly links via folders to scans to SVX files
|
2021-04-26 19:22:29 +01:00
|
|
|
scans = models.ManyToManyField("SingleScan") # implicitly links via scans to SVX files
|
2021-04-26 18:54:17 +01:00
|
|
|
dwgcontains = models.ManyToManyField("DrawingFile") # case when its a frame type
|
2021-04-26 19:22:29 +01:00
|
|
|
filesize = models.IntegerField(default=0)
|
|
|
|
npaths = models.IntegerField(default=0)
|
|
|
|
survexfiles = models.ManyToManyField("SurvexFile") # direct link to SVX files - not populated yet
|
2020-06-24 01:57:20 +01:00
|
|
|
|
2011-07-11 02:10:22 +01:00
|
|
|
class Meta:
|
2021-05-05 00:35:10 +01:00
|
|
|
ordering = ('dwgpath',)
|
|
|
|
|
|
|
|
def __str__(self):
|
|
|
|
return "Drawing File: " + str(self.dwgname) + " (" + str(self.filesize) + " bytes)"
|
|
|
|
|